Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
sdc
kis_tools
Commits
9a8529e1
Commit
9a8529e1
authored
Sep 28, 2020
by
Carl Schaffer
Browse files
ran code formatter
parent
912cd54d
Changes
52
Hide whitespace changes
Inline
Side-by-side
kis_tools/bbi/__init__.py
View file @
9a8529e1
...
...
@@ -4,4 +4,4 @@
"""
Created by schaffer at 7/30/19
"""
\ No newline at end of file
"""
kis_tools/bbi/bbi_header.py
View file @
9a8529e1
...
...
@@ -42,11 +42,11 @@ class OrderedLoader(yaml.Loader):
def
get_separators
(
yaml_file
):
try
:
f
=
open
(
yaml_file
,
'
rb
'
)
f
=
open
(
yaml_file
,
"
rb
"
)
yamldata
=
f
.
read
()
f
.
close
()
except
:
print
(
'
Could not open file {:}
'
.
format
(
yaml_file
))
print
(
"
Could not open file {:}
"
.
format
(
yaml_file
))
return
None
sep_data
=
yaml
.
load
(
yamldata
,
OrderedLoader
)[
"separators"
]
...
...
@@ -62,31 +62,31 @@ def get_generic_header(filename=settings.header_template, version="reconstructio
"""
try
:
f
=
open
(
filename
,
'
rb
'
)
f
=
open
(
filename
,
"
rb
"
)
yamldata
=
f
.
read
()
f
.
close
()
except
:
print
(
'
Could not open file {:}
'
.
format
(
filename
))
print
(
"
Could not open file {:}
"
.
format
(
filename
))
return
None
headerdata
=
yaml
.
load
(
yamldata
,
OrderedLoader
)
try
:
header
=
fits
.
Header
(
headerdata
[
version
].
keys
())
except
:
print
(
'
Version {:} not found
'
.
format
(
version
))
print
(
"
Version {:} not found
"
.
format
(
version
))
return
None
headerversion
=
headerdata
[
version
]
for
card
in
headerversion
:
try
:
gencard
=
headerversion
.
get
(
card
)
standard
=
gencard
[
'
DEFAULT
'
]
comment
=
gencard
.
get
(
'
COMMENT
'
,
''
)
standard
=
gencard
[
"
DEFAULT
"
]
comment
=
gencard
.
get
(
"
COMMENT
"
,
""
)
if
comment
is
None
:
comment
=
''
comment
=
""
header
[
card
]
=
(
standard
,
comment
)
except
:
print
(
'
Could not update header
'
)
print
(
"
Could not update header
"
)
print
(
card
,
gencard
)
return
header
...
...
@@ -99,11 +99,11 @@ def get_generic_patches(filename, version):
:return: patches ordered dict
"""
try
:
f
=
open
(
filename
,
'
rb
'
)
f
=
open
(
filename
,
"
rb
"
)
yamldata
=
f
.
read
()
f
.
close
()
except
:
print
(
'
Could not open file {:}
'
.
format
(
filename
))
print
(
"
Could not open file {:}
"
.
format
(
filename
))
return
None
patchesdata
=
yaml
.
load
(
yamldata
,
OrderedLoader
)
...
...
@@ -111,7 +111,7 @@ def get_generic_patches(filename, version):
try
:
patches
=
patchesdata
[
version
]
except
:
print
(
'
Version {:} not found
'
.
format
(
version
))
print
(
"
Version {:} not found
"
.
format
(
version
))
return
None
return
patches
...
...
@@ -133,7 +133,7 @@ def apply_patches(new_header, old_header, patches):
try
:
new_header
=
apply_patch
(
new_header
,
old_header
,
patch
)
except
:
print
(
'
Could not apply patch {0}
'
.
format
(
patch
))
print
(
"
Could not apply patch {0}
"
.
format
(
patch
))
pass
return
new_header
...
...
@@ -158,21 +158,21 @@ def apply_patch(new_header, old_header, patch):
"""
try
:
kind
=
patch
[
1
][
'
KIND
'
]
old
=
patch
[
1
][
'
OLD
'
]
new
=
patch
[
1
][
'
NEW
'
]
function
=
patch
[
1
][
'
FUNCTION
'
]
if
kind
==
'
UPDATEVALUE
'
:
function
=
function
.
replace
(
'
OLD
'
,
"'"
+
old
+
"'"
)
function
=
function
.
replace
(
'
NEW
'
,
"'"
+
new
+
"'"
)
kind
=
patch
[
1
][
"
KIND
"
]
old
=
patch
[
1
][
"
OLD
"
]
new
=
patch
[
1
][
"
NEW
"
]
function
=
patch
[
1
][
"
FUNCTION
"
]
if
kind
==
"
UPDATEVALUE
"
:
function
=
function
.
replace
(
"
OLD
"
,
"'"
+
old
+
"'"
)
function
=
function
.
replace
(
"
NEW
"
,
"'"
+
new
+
"'"
)
else
:
print
(
'
Unknown Kind: {0}
'
.
format
(
kind
))
print
(
"
Unknown Kind: {0}
"
.
format
(
kind
))
exec
(
function
)
except
KeyError
:
print
(
f
"Error Executing
{
function
}
"
)
except
:
print
(
'
Could not evaluate patch: {0}
'
.
format
(
function
))
print
(
"
Could not evaluate patch: {0}
"
.
format
(
function
))
pass
return
new_header
...
...
@@ -219,7 +219,7 @@ def insert_wcs_kws(header, filename):
# add wcs kws
x
,
y
=
get_coords
(
filename
)
data
=
fits
.
getdata
(
filename
)
center
=
[
int
(
ax
/
2.
)
for
ax
in
data
.
shape
]
center
=
[
int
(
ax
/
2.
0
)
for
ax
in
data
.
shape
]
center
.
reverse
()
# fix x and y order
npix_x
,
npix_y
=
data
.
shape
[
1
],
data
.
shape
[
0
]
# fix x and y order
...
...
@@ -234,13 +234,17 @@ def insert_wcs_kws(header, filename):
CRPIX1
=
center
[
0
],
CRVAL1
=
float
(
x
),
CDELT1
=
step_width_x
,
CSYER1
=
fov_x
*
.
25
*
step_width_x
,
# assume coordinate error of 25 percent of image
CSYER1
=
fov_x
*
0.25
*
step_width_x
,
# assume coordinate error of 25 percent of image
CTYPE2
=
"HPLT-TAN"
,
CUNIT2
=
"arcsec"
,
CRPIX2
=
center
[
1
],
CRVAL2
=
float
(
y
),
CDELT2
=
step_width_y
,
CSYER2
=
fov_y
*
.
25
*
step_width_y
,
# assume coordinate error of 25 percent of image
CSYER2
=
fov_y
*
0.25
*
step_width_y
,
# assume coordinate error of 25 percent of image
# Compatibility for SunPy Maps
HGLN_OBS
=
0
,
HGLT_OBS
=
0
,
...
...
@@ -268,17 +272,17 @@ def insert_separators(new_header):
for
sep
in
sep_list
:
for
sep_contents
,
sep_config
in
sep
.
items
():
target
=
sep_config
[
"after"
]
new_header
.
add_comment
(
'-'
*
70
,
after
=
target
)
new_header
.
add_comment
(
"-"
*
70
,
after
=
target
)
if
"additional_content"
in
sep_config
:
content
=
sep_config
[
'
additional_content
'
]
content
=
sep_config
[
"
additional_content
"
]
lines
=
content
.
split
(
'
\n
'
)
lines
=
content
.
split
(
"
\n
"
)
lines
.
reverse
()
for
line
in
lines
:
new_header
.
add_comment
(
line
,
after
=
target
)
new_header
.
add_comment
(
sep_contents
.
center
(
70
,
'-'
),
after
=
target
)
new_header
.
add_comment
(
'-'
*
70
,
after
=
target
)
new_header
.
add_comment
(
sep_contents
.
center
(
70
,
"-"
),
after
=
target
)
new_header
.
add_comment
(
"-"
*
70
,
after
=
target
)
def
convert_bbi_header
(
filename
):
...
...
@@ -291,15 +295,15 @@ def convert_bbi_header(filename):
"""
header
=
fits
.
getheader
(
filename
)
bbi_header
=
get_generic_header
(
settings
.
header_template
,
'
reconstruction
'
)
bbi_header
=
get_generic_header
(
settings
.
header_template
,
"
reconstruction
"
)
bbi_fn_template
=
settings
.
fn_template
patches
=
get_generic_patches
(
settings
.
header_patches_template
,
'
reconstruction
'
)
patches
=
get_generic_patches
(
settings
.
header_patches_template
,
"
reconstruction
"
)
# Check how much changes are necessary and inform user
diff
=
fits
.
HeaderDiff
(
header
,
bbi_header
)
if
diff
.
identical
:
print
(
'
Header is identical to generic header
'
)
print
(
"
Header is identical to generic header
"
)
return
header
new_header
=
bbi_header
.
copy
()
...
...
@@ -315,21 +319,23 @@ def convert_bbi_header(filename):
new_header
=
apply_patches
(
new_header
,
header
,
patches
)
date_section
=
re
.
search
(
r
"\d{8}[_\-]\d{6}"
,
basename
(
filename
)).
group
(
0
)
timestamp
=
datetime
.
datetime
.
strptime
(
date_section
,
'
%Y%m%d-%H%M%S
'
)
wavelength
=
new_header
[
'
WAVELNTH
'
]
timestamp
=
datetime
.
datetime
.
strptime
(
date_section
,
"
%Y%m%d-%H%M%S
"
)
wavelength
=
new_header
[
"
WAVELNTH
"
]
pointing_id
=
get_pointing_id
(
filename
)
new_header
[
"POINT_ID"
]
=
pointing_id
new_header
[
"EXTNAME"
]
=
"_"
.
join
([
new_header
[
"POINT_ID"
],
f
"WL
{
wavelength
:
03.0
f
}
"
])
new_header
[
"FILTER"
]
=
get_filter
(
wavelength
)
file_mode
=
"reco_bb"
if
((
'Zyla-1'
in
filename
)
or
(
'narrow'
in
filename
))
else
"reco_nb"
file_mode
=
(
"reco_bb"
if
((
"Zyla-1"
in
filename
)
or
(
"narrow"
in
filename
))
else
"reco_nb"
)
fits_filename
=
bbi_fn_template
.
format
(
timestamp
.
strftime
(
"%Y%m%d-%H%M%S"
),
pointing_id
.
split
(
"_"
)[
-
1
],
int
(
wavelength
),
file_mode
,
"fits"
"fits"
,
)
new_filename
=
fits_filename
...
...
@@ -341,13 +347,15 @@ def convert_bbi_header(filename):
raise
KeyError
# speck = pipeline.speckle(filename + '.log')
except
:
print
(
'
Could not find metadata
'
)
print
(
"
Could not find metadata
"
)
speck
=
None
if
speck
is
not
None
:
estimated_alpha
=
speck
.
get_sub_alpha
()
estimated_alpha
=
estimated_alpha
.
mean
((
0
,
1
))[
0
]
estimated_fried
=
estimated_alpha
*
float
(
speck
.
parameter
[
'telescopediameter_mm'
])
/
10.
new_header
[
'ATMOS_R0'
]
=
float
(
f
"
{
estimated_fried
:
03.3
f
}
"
)
estimated_fried
=
(
estimated_alpha
*
float
(
speck
.
parameter
[
"telescopediameter_mm"
])
/
10.0
)
new_header
[
"ATMOS_R0"
]
=
float
(
f
"
{
estimated_fried
:
03.3
f
}
"
)
insert_wcs_kws
(
new_header
,
filename
)
add_history
(
new_header
,
__file__
,
"header_fix"
)
...
...
@@ -355,7 +363,7 @@ def convert_bbi_header(filename):
return
new_header
if
__name__
==
'
__main__
'
:
if
__name__
==
"
__main__
"
:
infile
=
"/archive/bbi/new/20170519/Zyla1/RC/RC_data_andorZyla-1_20170519-123314144_HAlpha_656.fits"
header_template
=
"/dat/schaffer/projects/bbi_header/generic_bbi_header.yaml"
outpath
=
"/tmp/"
...
...
@@ -370,7 +378,7 @@ def run_bbi_header_conversion(args):
"--folder_out"
,
dest
=
"folder_out"
,
default
=
"/dat/sdc/bbi/"
,
help
=
"Output folder, all output files will be stored here, (default: %(default)s)"
help
=
"Output folder, all output files will be stored here, (default: %(default)s)"
,
)
args
=
parser
.
parse_args
(
args
)
in_folder
=
args
.
folder_in
...
...
@@ -408,9 +416,16 @@ def run_bbi_header_conversion(args):
out_dir
=
join
(
out_folder
,
new_header
[
"POINT_ID"
]
+
f
"_WL
{
wl
}
"
)
setup_outdir
(
out_dir
)
out_file
=
join
(
out_dir
,
new_header
[
'
FILENAME
'
])
out_file
=
join
(
out_dir
,
new_header
[
"
FILENAME
"
])
data
=
fits
.
getdata
(
infile
)
flipped
=
np
.
flip
(
data
,
axis
=
0
)
fits
.
writeto
(
out_file
,
flipped
,
new_header
,
overwrite
=
True
,
checksum
=
True
,
output_verify
=
'warn'
)
fits
.
writeto
(
out_file
,
flipped
,
new_header
,
overwrite
=
True
,
checksum
=
True
,
output_verify
=
"warn"
,
)
kis_tools/bbi/bbi_sort_observations.py
View file @
9a8529e1
...
...
@@ -13,7 +13,7 @@ import tqdm
from
kis_tools.generic.settings.settings
import
get_settings
from
kis_tools.util.util
import
date_from_fn
settings
=
get_settings
(
'
bbi
'
)
settings
=
get_settings
(
"
bbi
"
)
path_obs_csv
=
settings
.
path_obs_csv
...
...
@@ -31,7 +31,7 @@ def copy_file(infile, run_number):
def
get_obs_info
(
filename
):
date
=
date_from_fn
(
filename
)
with
open
(
path_obs_csv
,
'r'
)
as
csv_file
:
with
open
(
path_obs_csv
,
"r"
)
as
csv_file
:
obs_csv
=
csv
.
reader
(
csv_file
)
header
=
next
(
obs_csv
)
pos_date_end
=
header
.
index
(
"DATE-END"
)
...
...
@@ -59,7 +59,7 @@ def get_obs_info(filename):
def
make_run_folders
():
path
=
join
(
dirname
(
__file__
),
"bbi_observations_with_coordinates.csv"
),
path
=
(
join
(
dirname
(
__file__
),
"bbi_observations_with_coordinates.csv"
),
)
df
=
pd
.
read_csv
(
path
,
...
...
kis_tools/bbi/observation_counting.py
View file @
9a8529e1
...
...
@@ -24,9 +24,7 @@ def get_times(folder):
""" extract dates from all files from a given folder"""
files
=
glob
(
join
(
folder
,
OBS_FILE_PATTERN
))
stamps
=
sorted
([
f
.
split
(
"_"
)[
2
]
for
f
in
files
])
times
=
np
.
array
(
[
datetime
.
datetime
.
strptime
(
s
,
"%Y%m%d-%H%M%S%f"
)
for
s
in
stamps
]
)
times
=
np
.
array
([
datetime
.
datetime
.
strptime
(
s
,
"%Y%m%d-%H%M%S%f"
)
for
s
in
stamps
])
return
times
...
...
@@ -78,9 +76,7 @@ if __name__ == "__main__":
beg
,
end
=
dates
[
0
],
dates
[
1
]
name
=
beg
.
strftime
(
"%Y%m%d"
)
name
+=
f
"_
{
i
:
03
d
}
"
observations
.
append
(
{
"POINT_ID"
:
name
,
"DATE-END"
:
end
,
"DATE-BEG"
:
beg
}
)
observations
.
append
({
"POINT_ID"
:
name
,
"DATE-END"
:
end
,
"DATE-BEG"
:
beg
})
df
=
pd
.
DataFrame
(
observations
)
df
=
df
[[
"POINT_ID"
,
"DATE-BEG"
,
"DATE-END"
]]
df
[
"DURATION"
]
=
df
[
"DATE-END"
]
-
df
[
"DATE-BEG"
]
...
...
kis_tools/bbi/plotting.py
View file @
9a8529e1
...
...
@@ -20,7 +20,7 @@ from tqdm import tqdm
from
.util
import
get_wl
from
..util.util
import
merge_to_gif
,
gif_to_mp4
mpl
.
use
(
'
Agg
'
)
mpl
.
use
(
"
Agg
"
)
import
matplotlib.pyplot
as
plt
# noqa: E402
from
..generic.fits
import
FitsFile
# noqa: E402
from
sunpy.map
import
Map
# noqa: E402
...
...
@@ -31,21 +31,23 @@ import numpy as np # noqa: E402
def
loc_preview
(
fits_file
,
fn_out
):
"""Generate a preview showing the center of the observation on a up to date image of the soalr disk."""
m
=
Map
(
fits_file
)
x
,
y
=
(
m
.
top_right_coord
.
Tx
+
m
.
bottom_left_coord
.
Tx
)
/
2
,
(
m
.
top_right_coord
.
Ty
+
m
.
bottom_left_coord
.
Ty
)
/
2
x
,
y
=
(
m
.
top_right_coord
.
Tx
+
m
.
bottom_left_coord
.
Tx
)
/
2
,
(
m
.
top_right_coord
.
Ty
+
m
.
bottom_left_coord
.
Ty
)
/
2
time
=
FitsFile
(
fits_file
).
obs_time
with
catch_warnings
():
warnings
.
simplefilter
(
"ignore"
)
fig
,
ax
=
make_loc_plot
(
x
.
value
,
y
.
value
,
time
)
# plt.tight_layout()
fig
.
savefig
(
fn_out
,
bbox_inches
=
"tight"
,
pad_inches
=-
.
01
,
dpi
=
200
)
fig
.
savefig
(
fn_out
,
bbox_inches
=
"tight"
,
pad_inches
=-
0
.01
,
dpi
=
200
)
return
fn_out
def
map_preview
(
fits_file
,
fn_out
):
"""Generate a preview of the reconstructed data."""
m
=
Map
(
fits_file
)
m
.
plot_settings
[
'
vmin
'
]
=
np
.
percentile
(
m
.
data
,
0.1
)
m
.
plot_settings
[
'
vmax
'
]
=
np
.
percentile
(
m
.
data
,
99.9
)
m
.
plot_settings
[
"
vmin
"
]
=
np
.
percentile
(
m
.
data
,
0.1
)
m
.
plot_settings
[
"
vmax
"
]
=
np
.
percentile
(
m
.
data
,
99.9
)
_
=
m
.
plot
()
plt
.
gcf
().
savefig
(
fn_out
,
bbox_inches
=
"tight"
,
pad_inches
=
0.1
,
dpi
=
200
)
...
...
@@ -56,10 +58,11 @@ def alpha_preview(fits_file, fn_out):
"""Generate a preview of the estimated seeing quality over the different reconstruction subfields."""
datetime
=
FitsFile
(
fits_file
).
obs_time
format_keys
=
dict
(
date
=
datetime
.
strftime
(
"%Y%m%d"
),
time
=
datetime
.
strftime
(
"%H%M%S"
)
date
=
datetime
.
strftime
(
"%Y%m%d"
),
time
=
datetime
.
strftime
(
"%H%M%S"
)
)
log
=
glob
(
"/archive/bbi/new/{date}/Zyla1/RC/RC*{date}-{time}*.log"
.
format
(
**
format_keys
)
)
log
=
glob
(
"/archive/bbi/new/{date}/Zyla1/RC/RC*{date}-{time}*.log"
.
format
(
**
format_keys
))
if
not
log
:
raise
IOError
(
f
"Could not find reconstruction results for
{
fits_file
}
"
)
speck
=
pipeline
.
speckle
(
log
[
0
])
...
...
@@ -70,8 +73,7 @@ def alpha_preview(fits_file, fn_out):
def
bbi_gen_gif
(
folder
):
"""Check a folder and merge all images of the same type into gifs and mp4s"""
for
pattern
in
[
'reco'
,
'location'
,
'alpha'
]:
for
pattern
in
[
"reco"
,
"location"
,
"alpha"
]:
files
=
glob
(
join
(
folder
,
f
"*
{
pattern
}
*.png"
))
if
not
files
:
continue
...
...
@@ -82,16 +84,17 @@ def bbi_gen_gif(folder):
for
group
,
grouped_files
in
grouped
:
grouped_files
=
list
(
grouped_files
)
fn
=
basename
(
grouped_files
[
0
])
fn
=
re
.
sub
(
r
'
-\d{6}
'
,
''
,
fn
)
fn
=
fn
.
replace
(
'
.png
'
,
'
.gif
'
)
fn
=
re
.
sub
(
r
"
-\d{6}
"
,
""
,
fn
)
fn
=
fn
.
replace
(
"
.png
"
,
"
.gif
"
)
print
(
fn
)
merge_to_gif
(
sorted
(
grouped_files
),
join
(
folder
,
fn
))
gif_to_mp4
(
join
(
folder
,
fn
))
def
run_bbi_preview
(
cmd_line_args
):
mpl
.
use
(
'
Agg
'
)
mpl
.
use
(
"
Agg
"
)
from
..interface.commandline_interface
import
add_arg_infiles
parser
=
argparse
.
ArgumentParser
()
add_arg_infiles
(
parser
)
args
=
parser
.
parse_args
(
cmd_line_args
)
...
...
@@ -107,7 +110,7 @@ def run_bbi_preview(cmd_line_args):
map_preview
(
infile
,
fn_out_reco
)
loc_preview
(
infile
,
fn_out_loc
)
alpha_preview
(
infile
,
fn_out_alpha
)
plt
.
close
(
'
all
'
)
plt
.
close
(
"
all
"
)
# folder = dirname(infiles[0])
# bbi_gen_gif(folder)
\ No newline at end of file
# bbi_gen_gif(folder)
kis_tools/bbi/util.py
View file @
9a8529e1
...
...
@@ -3,4 +3,4 @@ import re
def
get_wl
(
fn
):
"""Extract wavelength from string"""
return
int
(
re
.
search
(
r
'WL(\d+)'
,
fn
).
group
(
1
))
\ No newline at end of file
return
int
(
re
.
search
(
r
"WL(\d+)"
,
fn
).
group
(
1
))
kis_tools/generic/__init__.py
View file @
9a8529e1
from
.fits
import
FitsFile
\ No newline at end of file
from
.fits
import
FitsFile
kis_tools/generic/fits.py
View file @
9a8529e1
...
...
@@ -42,7 +42,6 @@ class FitsFile:
except
KeyError
as
e
:
raise
AttributeError
def
_get_value
(
self
,
keywords
):
"""
Try to retrieve a value from the header, allows multiple keys to be passed as a list. All given
...
...
@@ -137,7 +136,9 @@ class FitsFile:
def
_calc_heliocentric_angles
(
self
):
coords
=
self
.
sunpy_map
.
center
mu
,
theta
=
calc_heliocentric_angle
(
coords
.
Tx
.
arcsec
,
coords
.
Ty
.
arcsec
,
date
=
self
.
obs_time
)
mu
,
theta
=
calc_heliocentric_angle
(
coords
.
Tx
.
arcsec
,
coords
.
Ty
.
arcsec
,
date
=
self
.
obs_time
)
self
.
_mu
=
mu
self
.
_theta
=
theta
...
...
@@ -161,7 +162,7 @@ class FitsFile:
obs_time: observation time given in filename
"""
time_string
=
re
.
search
(
r
"(\d{8}[\-_]\d{6})"
,
self
.
filename
).
group
(
1
)
time_string
=
time_string
.
replace
(
'-'
,
'_'
)
time_string
=
time_string
.
replace
(
"-"
,
"_"
)
obs_time
=
datetime
.
datetime
.
strptime
(
time_string
,
"%Y%m%d_%H%M%S"
)
return
obs_time
...
...
kis_tools/generic/settings/settings.py
View file @
9a8529e1
...
...
@@ -58,16 +58,19 @@ class ChroTelSettings(SDCSettings, metaclass=MetaClassSettings):
class
BBISettings
(
SDCSettings
,
metaclass
=
MetaClassSettings
):
def
__init__
(
self
):
self
.
settings_file
=
join
(
_BASE_DIR
,
"generic/settings/db_structure.yaml"
)
super
().
__init__
(
self
.
settings_file
)
# extract observations config from yaml
self
.
path_obs_csv
=
join
(
_BASE_DIR
,
'
bbi
'
,
self
.
obs_csv
)
self
.
path_obs_csv
=
join
(
_BASE_DIR
,
"
bbi
"
,
self
.
obs_csv
)
self
.
header_template
=
join
(
self
.
_source_directory
,
self
.
header_template_fn
)
self
.
header_patches_template
=
join
(
self
.
_source_directory
,
self
.
header_patches_fn
)
self
.
observations
=
pd
.
read_csv
(
self
.
path_obs_csv
,
parse_dates
=
[
"DATE-BEG"
,
"DATE-END"
])
self
.
header_patches_template
=
join
(
self
.
_source_directory
,
self
.
header_patches_fn
)
self
.
observations
=
pd
.
read_csv
(
self
.
path_obs_csv
,
parse_dates
=
[
"DATE-BEG"
,
"DATE-END"
]
)
def
get_settings
(
instrument_name
:
str
):
...
...
@@ -77,7 +80,6 @@ def get_settings(instrument_name: str):
"lars"
:
SDCSettings
,
"gris"
:
SDCSettings
,
"chrotel"
:
ChroTelSettings
,
}
if
instrument_name
in
settings_class
:
...
...
@@ -86,4 +88,6 @@ def get_settings(instrument_name: str):
settings_instance
=
settings_class
()
return
settings_instance
else
:
raise
ValueError
(
f
"Failed retrieving settings for
{
instrument_name
}
: No such instrument."
)
raise
ValueError
(
f
"Failed retrieving settings for
{
instrument_name
}
: No such instrument."
)
kis_tools/gris/GrisDay.py
View file @
9a8529e1
...
...
@@ -98,7 +98,7 @@ class GrisDay:
def
check_completeness
(
self
):
"""check for completeness of a day, includes checking of l1
data, l1s data, context_data, map_save_file, coord_save_file,
location_preview
"""
location_preview"""
self
.
has_level1
=
isdir
(
self
.
subfolder
(
"level1"
))
self
.
has_level1_split
=
isdir
(
self
.
subfolder
(
"level1_split"
))
self
.
has_context_data
=
isdir
(
self
.
subfolder
(
"context_data"
))
...
...
kis_tools/gris/GrisFitsFile.py
View file @
9a8529e1
...
...
@@ -114,13 +114,17 @@ class GrisFitsFile(FitsFile):
delta_pix
=
[
i
-
center_pixel
for
i
in
range
(
slit_length_pixels
)]
delta_pix
=
np
.
array
(
delta_pix
)
arcsec_per_pixel_x
=
h
[
"STEPSIZE"
]
# assuming square pixels
arcsec_per_pixel_y
=
h
[
"STEPSIZE"
]
if
h
[
"STEPSIZE"
]
else
0.135
# assuming square pixels
arcsec_per_pixel_y
=
(
h
[
"STEPSIZE"
]
if
h
[
"STEPSIZE"
]
else
0.135
)
# assuming square pixels
X
=
x_slitcenter
-
np
.
sin
(
angle
)
*
delta_pix
*
arcsec_per_pixel_x
Y
=
y_slitcenter
-
np
.
cos
(
angle
)
*
delta_pix
*
arcsec_per_pixel_y
message
=
"Using FITS file coordinates, they are probably wrong by at least 100 arcsec!"
\
" Also the slit angle is definitely off!"
message
=
(
"Using FITS file coordinates, they are probably wrong by at least 100 arcsec!"
" Also the slit angle is definitely off!"
)
warn
(
message
)
coord_array
=
np
.
array
([
X
,
Y
])
*
u
.
arcsec
...
...
@@ -141,7 +145,7 @@ class GrisFitsFile(FitsFile):
def
old_header
(
self
):
"""Check if the header of this file has already been processed"""
old_header
=
True
if
any
([
k
in
self
.
header
for
k
in
[
'
SOLARNET
'
]]):
if
any
([
k
in
self
.
header
for
k
in
[
"
SOLARNET
"
]]):
old_header
=
False
return
old_header
...
...
@@ -190,17 +194,10 @@ class GrisFitsFile(FitsFile):
# check if other files matching the same run are present. If so, check the running step number
# for the same map to determine the actual number of steps. It is assumed, that all files belonging to the
# same map are stored in the same directory
pattern
=
(
f
"gris_*_*_*_
{
self
.
runnumber
:
03
d
}
_
{
self
.
mapnumber
:
03
d
}
_*.fits"
)
pattern
=
f
"gris_*_*_*_
{
self
.
runnumber
:
03
d
}
_
{
self
.
mapnumber
:
03
d
}
_*.fits"
matching_files
=
glob
(
join
(
self
.
dirname
,
pattern
))
max_steps
=
int
(
max
(
[
basename
(
m
).
split
(
"."
)[
0
].
split
(
"_"
)[
-
1
]
for
m
in
matching_files
]
)
max
([
basename
(
m
).
split
(
"."
)[
0
].
split
(
"_"
)[
-
1
]
for
m
in
matching_files
])
)
if
max_steps
<
nsteps
:
...
...
@@ -350,7 +347,9 @@ class GrisFitsFile(FitsFile):
Returns: arcsec, cube of (NAXIS1, NAXIS2, 2) Values are Helioprojective X and Y in arcseconds
"""
assert
not
self
.
old_header
,
f
"Can't get coordinates from
{
self
.
path
}
! Run header conversion first!"
assert
(
not
self
.
old_header
),
f
"Can't get coordinates from
{
self
.
path
}
! Run header conversion first!"
wcs
=
WCS
(
self
.
header
)
pixel_shape
=
wcs
.
pixel_shape
...
...
@@ -381,31 +380,46 @@ class GrisFitsFile(FitsFile):
y_max
=
max
(
Y
)
+
plotting_padding
# Create submap of FOV and plot
submap
=
m
.
submap
(
SkyCoord
(
x_min
,
y_min
,
frame
=
m
.
coordinate_frame
),
SkyCoord
(
x_max
,
y_max
,
frame
=
m
.
coordinate_frame
))
submap
=
m
.
submap
(
SkyCoord
(
x_min
,
y_min
,
frame
=
m
.
coordinate_frame
),
SkyCoord
(
x_max
,
y_max
,
frame
=
m
.
coordinate_frame
),
)
fig
=
plt
.
figure
()
# noqa F841
ax
=
plt
.
subplot
(
projection
=
submap
)
im
=
submap
.
plot
()
# noqa:F841
# create SkyCoord objects and overplot
slit_wcs
=
SkyCoord
(
X
,
Y
,
frame
=
submap
.
coordinate_frame
)
ax
.
plot_coord
(
slit_wcs
,
'
g-
'
,
label
=
"WCS Coordinates"
)
ax
.
plot_coord
(
slit_wcs
,
"
g-
"
,
label
=
"WCS Coordinates"
)
# Create Legend and annotations
title
=
"Gris Slit {0}_{1:03d}_{2:03d}_{3:03d}"
title
=
title
.
format
(
self
.
obs_time
.
strftime
(
'%Y%m%d'
),
self
.
runnumber
,
self
.
mapnumber
,
self
.
step_index
)
title
=
title
.
format
(
self
.
obs_time
.
strftime
(
"%Y%m%d"
),
self
.
runnumber
,
self
.
mapnumber
,
self
.
step_index
,
)
plt
.
title
(
title
)
annotation
=
f
"Spectrum taken at
\n
{
self
.
obs_time
.
strftime
(
'%Y-%m-%d %H:%M:%S'
)
}
"
annotation
+=
f
"
\n
HMI Reference taken at
\n
{
submap
.
date
.
strftime
(
'%Y-%m-%d %H:%M:%S'
)
}
"
plt
.
text
(
0.05
,
0.8
,
annotation
,
horizontalalignment
=
'left'
,
transform
=
ax
.
transAxes
)
annotation
=
(
f
"Spectrum taken at
\n
{
self
.
obs_time
.
strftime
(
'%Y-%m-%d %H:%M:%S'
)
}
"
)
annotation
+=
(
f
"
\n
HMI Reference taken at
\n
{
submap
.
date
.
strftime
(
'%Y-%m-%d %H:%M:%S'
)
}
"
)
plt
.
text
(
0.05
,
0.8
,
annotation
,
horizontalalignment
=
"left"
,
transform
=
ax
.
transAxes
)