pyreduce-astro 0.7a4__cp314-cp314-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyreduce/__init__.py +67 -0
- pyreduce/__main__.py +322 -0
- pyreduce/cli.py +342 -0
- pyreduce/clib/Release/_slitfunc_2d.cp311-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp311-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp312-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp312-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp313-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp313-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp314-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp314-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.obj +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp311-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp311-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp312-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp312-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp313-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp313-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp314-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp314-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.obj +0 -0
- pyreduce/clib/__init__.py +0 -0
- pyreduce/clib/_slitfunc_2d.cp311-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp312-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp313-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp314-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp311-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp312-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp313-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp314-win_amd64.pyd +0 -0
- pyreduce/clib/build_extract.py +75 -0
- pyreduce/clib/slit_func_2d_xi_zeta_bd.c +1313 -0
- pyreduce/clib/slit_func_2d_xi_zeta_bd.h +55 -0
- pyreduce/clib/slit_func_bd.c +362 -0
- pyreduce/clib/slit_func_bd.h +17 -0
- pyreduce/clipnflip.py +147 -0
- pyreduce/combine_frames.py +861 -0
- pyreduce/configuration.py +191 -0
- pyreduce/continuum_normalization.py +329 -0
- pyreduce/cwrappers.py +404 -0
- pyreduce/datasets.py +238 -0
- pyreduce/echelle.py +413 -0
- pyreduce/estimate_background_scatter.py +130 -0
- pyreduce/extract.py +1362 -0
- pyreduce/extraction_width.py +77 -0
- pyreduce/instruments/__init__.py +0 -0
- pyreduce/instruments/aj.py +9 -0
- pyreduce/instruments/aj.yaml +51 -0
- pyreduce/instruments/andes.py +102 -0
- pyreduce/instruments/andes.yaml +72 -0
- pyreduce/instruments/common.py +711 -0
- pyreduce/instruments/common.yaml +57 -0
- pyreduce/instruments/crires_plus.py +103 -0
- pyreduce/instruments/crires_plus.yaml +101 -0
- pyreduce/instruments/filters.py +195 -0
- pyreduce/instruments/harpn.py +203 -0
- pyreduce/instruments/harpn.yaml +140 -0
- pyreduce/instruments/harps.py +312 -0
- pyreduce/instruments/harps.yaml +144 -0
- pyreduce/instruments/instrument_info.py +140 -0
- pyreduce/instruments/jwst_miri.py +29 -0
- pyreduce/instruments/jwst_miri.yaml +53 -0
- pyreduce/instruments/jwst_niriss.py +98 -0
- pyreduce/instruments/jwst_niriss.yaml +60 -0
- pyreduce/instruments/lick_apf.py +35 -0
- pyreduce/instruments/lick_apf.yaml +60 -0
- pyreduce/instruments/mcdonald.py +123 -0
- pyreduce/instruments/mcdonald.yaml +56 -0
- pyreduce/instruments/metis_ifu.py +45 -0
- pyreduce/instruments/metis_ifu.yaml +62 -0
- pyreduce/instruments/metis_lss.py +45 -0
- pyreduce/instruments/metis_lss.yaml +62 -0
- pyreduce/instruments/micado.py +45 -0
- pyreduce/instruments/micado.yaml +62 -0
- pyreduce/instruments/models.py +257 -0
- pyreduce/instruments/neid.py +156 -0
- pyreduce/instruments/neid.yaml +61 -0
- pyreduce/instruments/nirspec.py +215 -0
- pyreduce/instruments/nirspec.yaml +63 -0
- pyreduce/instruments/nte.py +42 -0
- pyreduce/instruments/nte.yaml +55 -0
- pyreduce/instruments/uves.py +46 -0
- pyreduce/instruments/uves.yaml +65 -0
- pyreduce/instruments/xshooter.py +39 -0
- pyreduce/instruments/xshooter.yaml +63 -0
- pyreduce/make_shear.py +607 -0
- pyreduce/masks/mask_crires_plus_det1.fits.gz +0 -0
- pyreduce/masks/mask_crires_plus_det2.fits.gz +0 -0
- pyreduce/masks/mask_crires_plus_det3.fits.gz +0 -0
- pyreduce/masks/mask_ctio_chiron.fits.gz +0 -0
- pyreduce/masks/mask_elodie.fits.gz +0 -0
- pyreduce/masks/mask_feros3.fits.gz +0 -0
- pyreduce/masks/mask_flames_giraffe.fits.gz +0 -0
- pyreduce/masks/mask_harps_blue.fits.gz +0 -0
- pyreduce/masks/mask_harps_red.fits.gz +0 -0
- pyreduce/masks/mask_hds_blue.fits.gz +0 -0
- pyreduce/masks/mask_hds_red.fits.gz +0 -0
- pyreduce/masks/mask_het_hrs_2x5.fits.gz +0 -0
- pyreduce/masks/mask_jwst_miri_lrs_slitless.fits.gz +0 -0
- pyreduce/masks/mask_jwst_niriss_gr700xd.fits.gz +0 -0
- pyreduce/masks/mask_lick_apf_.fits.gz +0 -0
- pyreduce/masks/mask_mcdonald.fits.gz +0 -0
- pyreduce/masks/mask_nes.fits.gz +0 -0
- pyreduce/masks/mask_nirspec_nirspec.fits.gz +0 -0
- pyreduce/masks/mask_sarg.fits.gz +0 -0
- pyreduce/masks/mask_sarg_2x2a.fits.gz +0 -0
- pyreduce/masks/mask_sarg_2x2b.fits.gz +0 -0
- pyreduce/masks/mask_subaru_hds_red.fits.gz +0 -0
- pyreduce/masks/mask_uves_blue.fits.gz +0 -0
- pyreduce/masks/mask_uves_blue_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle_2x2_split.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_uves_red.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_2x2.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_2x2_split.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_xshooter_nir.fits.gz +0 -0
- pyreduce/pipeline.py +619 -0
- pyreduce/rectify.py +138 -0
- pyreduce/reduce.py +2065 -0
- pyreduce/settings/settings_AJ.json +19 -0
- pyreduce/settings/settings_ANDES.json +89 -0
- pyreduce/settings/settings_CRIRES_PLUS.json +89 -0
- pyreduce/settings/settings_HARPN.json +73 -0
- pyreduce/settings/settings_HARPS.json +69 -0
- pyreduce/settings/settings_JWST_MIRI.json +55 -0
- pyreduce/settings/settings_JWST_NIRISS.json +55 -0
- pyreduce/settings/settings_LICK_APF.json +62 -0
- pyreduce/settings/settings_MCDONALD.json +58 -0
- pyreduce/settings/settings_METIS_IFU.json +77 -0
- pyreduce/settings/settings_METIS_LSS.json +77 -0
- pyreduce/settings/settings_MICADO.json +78 -0
- pyreduce/settings/settings_NEID.json +73 -0
- pyreduce/settings/settings_NIRSPEC.json +58 -0
- pyreduce/settings/settings_NTE.json +60 -0
- pyreduce/settings/settings_UVES.json +54 -0
- pyreduce/settings/settings_XSHOOTER.json +78 -0
- pyreduce/settings/settings_pyreduce.json +184 -0
- pyreduce/settings/settings_schema.json +850 -0
- pyreduce/tools/__init__.py +0 -0
- pyreduce/tools/combine.py +117 -0
- pyreduce/trace.py +979 -0
- pyreduce/util.py +1366 -0
- pyreduce/wavecal/MICADO_HK_3arcsec_chip5.npz +0 -0
- pyreduce/wavecal/atlas/thar.fits +4946 -13
- pyreduce/wavecal/atlas/thar_list.txt +4172 -0
- pyreduce/wavecal/atlas/une.fits +0 -0
- pyreduce/wavecal/convert.py +38 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det1.npz +0 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det2.npz +0 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det3.npz +0 -0
- pyreduce/wavecal/harpn_harpn_2D.npz +0 -0
- pyreduce/wavecal/harps_blue_2D.npz +0 -0
- pyreduce/wavecal/harps_blue_pol_2D.npz +0 -0
- pyreduce/wavecal/harps_red_2D.npz +0 -0
- pyreduce/wavecal/harps_red_pol_2D.npz +0 -0
- pyreduce/wavecal/mcdonald.npz +0 -0
- pyreduce/wavecal/metis_lss_l_2D.npz +0 -0
- pyreduce/wavecal/metis_lss_m_2D.npz +0 -0
- pyreduce/wavecal/nirspec_K2.npz +0 -0
- pyreduce/wavecal/uves_blue_360nm_2D.npz +0 -0
- pyreduce/wavecal/uves_blue_390nm_2D.npz +0 -0
- pyreduce/wavecal/uves_blue_437nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_2x2_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_565nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_580nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_600nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_665nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_860nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_580nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_600nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_665nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_760nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_860nm_2D.npz +0 -0
- pyreduce/wavecal/xshooter_nir.npz +0 -0
- pyreduce/wavelength_calibration.py +1871 -0
- pyreduce_astro-0.7a4.dist-info/METADATA +106 -0
- pyreduce_astro-0.7a4.dist-info/RECORD +182 -0
- pyreduce_astro-0.7a4.dist-info/WHEEL +4 -0
- pyreduce_astro-0.7a4.dist-info/entry_points.txt +2 -0
- pyreduce_astro-0.7a4.dist-info/licenses/LICENSE +674 -0
|
@@ -0,0 +1,711 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract parent module for all other instruments
|
|
3
|
+
Contains some general functionality, which may be overridden by the children of course
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import datetime
|
|
7
|
+
import glob
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import os.path
|
|
11
|
+
from itertools import product
|
|
12
|
+
|
|
13
|
+
import numpy as np
|
|
14
|
+
import yaml
|
|
15
|
+
from astropy.io import fits
|
|
16
|
+
from astropy.time import Time
|
|
17
|
+
from dateutil import parser
|
|
18
|
+
from tqdm import tqdm
|
|
19
|
+
|
|
20
|
+
from ..clipnflip import clipnflip
|
|
21
|
+
from .filters import ArmFilter, Filter, InstrumentFilter, NightFilter, ObjectFilter
|
|
22
|
+
from .models import InstrumentConfig
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def find_first_index(arr, value):
|
|
28
|
+
"""find the first element equal to value in the array arr"""
|
|
29
|
+
try:
|
|
30
|
+
return next(i for i, v in enumerate(arr) if v == value)
|
|
31
|
+
except StopIteration as e:
|
|
32
|
+
raise KeyError(f"Value {value} not found") from e
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def observation_date_to_night(observation_date):
|
|
36
|
+
"""Convert an observation timestamp into the date of the observation night
|
|
37
|
+
Nights start at 12am and end at 12 am the next day
|
|
38
|
+
|
|
39
|
+
Parameters
|
|
40
|
+
----------
|
|
41
|
+
observation_date : datetime
|
|
42
|
+
timestamp of the observation
|
|
43
|
+
|
|
44
|
+
Returns
|
|
45
|
+
-------
|
|
46
|
+
night : datetime.date
|
|
47
|
+
night of the observation
|
|
48
|
+
"""
|
|
49
|
+
if observation_date == "":
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
observation_date = parser.parse(observation_date)
|
|
53
|
+
oneday = datetime.timedelta(days=1)
|
|
54
|
+
|
|
55
|
+
if observation_date.hour < 12:
|
|
56
|
+
observation_date -= oneday
|
|
57
|
+
return observation_date.date()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class getter:
|
|
61
|
+
"""Get data from a header/dict, based on the given arm, and applies replacements"""
|
|
62
|
+
|
|
63
|
+
def __init__(self, header, info, arm):
|
|
64
|
+
self.header = header
|
|
65
|
+
self.info = info.copy()
|
|
66
|
+
try:
|
|
67
|
+
self.index = find_first_index(info["arms"], arm.upper())
|
|
68
|
+
except KeyError:
|
|
69
|
+
logger.warning("No instrument arms found in instrument info")
|
|
70
|
+
self.index = 0
|
|
71
|
+
|
|
72
|
+
# Pick values for the given arm
|
|
73
|
+
for k, v in self.info.items():
|
|
74
|
+
if isinstance(v, list):
|
|
75
|
+
self.info[k] = v[self.index]
|
|
76
|
+
|
|
77
|
+
def __call__(self, key, alt=None):
|
|
78
|
+
return self.get(key, alt)
|
|
79
|
+
|
|
80
|
+
def get(self, key, alt=None):
|
|
81
|
+
"""Get data
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
key : str
|
|
86
|
+
key of the data in the header
|
|
87
|
+
alt : obj, optional
|
|
88
|
+
alternative value, if key does not exist (default: None)
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
value : obj
|
|
93
|
+
value found in header (or alternatively alt)
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
value = self.info.get(key, key)
|
|
97
|
+
# if isinstance(value, list):
|
|
98
|
+
# value = value[self.index]
|
|
99
|
+
if isinstance(value, str):
|
|
100
|
+
value = value.format(**self.info)
|
|
101
|
+
value = self.header.get(value, alt)
|
|
102
|
+
return value
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class Instrument:
|
|
106
|
+
"""
|
|
107
|
+
Abstract parent class for all instruments
|
|
108
|
+
Handles the instrument specific information
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
def __init__(self):
|
|
112
|
+
#:str: Name of the instrument (lowercase)
|
|
113
|
+
self.name = self.__class__.__name__.lower()
|
|
114
|
+
#:InstrumentConfig: Validated configuration model
|
|
115
|
+
#:dict: Information about the instrument (for backward compatibility)
|
|
116
|
+
self.config, self.info = self.load_info()
|
|
117
|
+
|
|
118
|
+
self.filters = {
|
|
119
|
+
"instrument": InstrumentFilter(self.config.instrument, regex=True),
|
|
120
|
+
"night": NightFilter(self.config.date, timeformat=self.config.date_format),
|
|
121
|
+
"target": ObjectFilter(self.config.target, regex=True),
|
|
122
|
+
"bias": Filter(self.config.kw_bias),
|
|
123
|
+
"flat": Filter(self.config.kw_flat),
|
|
124
|
+
"orders": Filter(self.config.kw_orders),
|
|
125
|
+
"curvature": Filter(self.config.kw_curvature),
|
|
126
|
+
"scatter": Filter(self.config.kw_scatter),
|
|
127
|
+
"wave": Filter(self.config.kw_wave),
|
|
128
|
+
"comb": Filter(self.config.kw_comb),
|
|
129
|
+
"spec": Filter(self.config.kw_spec),
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
self.night = "night"
|
|
133
|
+
self.science = "science"
|
|
134
|
+
self.shared = ["instrument", "night"]
|
|
135
|
+
|
|
136
|
+
# Add arm filter if kw_arm is defined (for instruments with separate files per arm)
|
|
137
|
+
if self.config.kw_arm is not None:
|
|
138
|
+
self.filters["arm"] = ArmFilter(self.config.kw_arm)
|
|
139
|
+
self.shared.append("arm")
|
|
140
|
+
self.find_closest = [
|
|
141
|
+
"bias",
|
|
142
|
+
"flat",
|
|
143
|
+
"wavecal_master",
|
|
144
|
+
"freq_comb_master",
|
|
145
|
+
"orders",
|
|
146
|
+
"scatter",
|
|
147
|
+
"curvature",
|
|
148
|
+
]
|
|
149
|
+
|
|
150
|
+
def __str__(self):
|
|
151
|
+
return self.name
|
|
152
|
+
|
|
153
|
+
@property
|
|
154
|
+
def arms(self) -> list[str] | None:
|
|
155
|
+
"""Available instrument arms (detectors/channels)."""
|
|
156
|
+
return self.config.arms
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def extension(self) -> int | str | list:
|
|
160
|
+
"""FITS extension(s) to read."""
|
|
161
|
+
return self.config.extension
|
|
162
|
+
|
|
163
|
+
@property
|
|
164
|
+
def orientation(self) -> int | list[int]:
|
|
165
|
+
"""Detector orientation code(s)."""
|
|
166
|
+
return self.config.orientation
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def id_instrument(self) -> str:
|
|
170
|
+
"""Instrument identifier for header matching."""
|
|
171
|
+
return self.config.id_instrument
|
|
172
|
+
|
|
173
|
+
def get(self, key, header, arm, alt=None):
|
|
174
|
+
get = getter(header, self.info, arm)
|
|
175
|
+
return get(key, alt=alt)
|
|
176
|
+
|
|
177
|
+
def get_extension(self, header, arm):
|
|
178
|
+
arm = arm.upper()
|
|
179
|
+
ext = self.extension # Use property
|
|
180
|
+
|
|
181
|
+
if isinstance(ext, list):
|
|
182
|
+
iarm = find_first_index(self.arms, arm)
|
|
183
|
+
ext = ext[iarm]
|
|
184
|
+
|
|
185
|
+
return ext
|
|
186
|
+
|
|
187
|
+
def load_info(self):
|
|
188
|
+
"""
|
|
189
|
+
Load static instrument information
|
|
190
|
+
Either as fits header keywords or static values
|
|
191
|
+
|
|
192
|
+
Returns
|
|
193
|
+
------
|
|
194
|
+
config : InstrumentConfig
|
|
195
|
+
Validated Pydantic model
|
|
196
|
+
info : dict(str:object)
|
|
197
|
+
dictionary of REDUCE names for properties to Header keywords/static values
|
|
198
|
+
"""
|
|
199
|
+
# Tips & Tricks:
|
|
200
|
+
# if several arms are supported, use a list for arms
|
|
201
|
+
# if a value changes depending on the arm, use a list with the same order as "arms"
|
|
202
|
+
# you can also use values from this dictionary as placeholders using {name}, just like str.format
|
|
203
|
+
|
|
204
|
+
this = os.path.dirname(__file__)
|
|
205
|
+
|
|
206
|
+
# Try YAML first, fall back to JSON
|
|
207
|
+
yaml_fname = os.path.join(this, f"{self.name}.yaml")
|
|
208
|
+
json_fname = os.path.join(this, f"{self.name}.json")
|
|
209
|
+
|
|
210
|
+
if os.path.exists(yaml_fname):
|
|
211
|
+
with open(yaml_fname) as f:
|
|
212
|
+
info = yaml.safe_load(f)
|
|
213
|
+
elif os.path.exists(json_fname):
|
|
214
|
+
with open(json_fname) as f:
|
|
215
|
+
info = json.load(f)
|
|
216
|
+
else:
|
|
217
|
+
raise FileNotFoundError(
|
|
218
|
+
f"No instrument config found for {self.name} "
|
|
219
|
+
f"(tried {yaml_fname} and {json_fname})"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
# Validate with Pydantic (strict - invalid config is a bug)
|
|
223
|
+
config = InstrumentConfig(**info)
|
|
224
|
+
|
|
225
|
+
return config, info
|
|
226
|
+
|
|
227
|
+
def load_fits(
|
|
228
|
+
self, fname, arm, extension=None, mask=None, header_only=False, dtype=None
|
|
229
|
+
):
|
|
230
|
+
"""
|
|
231
|
+
load fits file, REDUCE style
|
|
232
|
+
|
|
233
|
+
primary and extension header are combined
|
|
234
|
+
arm-specific info is applied to header
|
|
235
|
+
data is clipnflipped
|
|
236
|
+
mask is applied
|
|
237
|
+
|
|
238
|
+
Parameters
|
|
239
|
+
----------
|
|
240
|
+
fname : str
|
|
241
|
+
filename
|
|
242
|
+
instrument : str
|
|
243
|
+
name of the instrument
|
|
244
|
+
arm : str
|
|
245
|
+
instrument arm (detector/channel)
|
|
246
|
+
extension : int
|
|
247
|
+
data extension of the FITS file to load
|
|
248
|
+
mask : array, optional
|
|
249
|
+
mask to add to the data
|
|
250
|
+
header_only : bool, optional
|
|
251
|
+
only load the header, not the data
|
|
252
|
+
dtype : str, optional
|
|
253
|
+
numpy datatype to convert the read data to
|
|
254
|
+
|
|
255
|
+
Returns
|
|
256
|
+
--------
|
|
257
|
+
data : masked_array
|
|
258
|
+
FITS data, clipped and flipped, and with mask
|
|
259
|
+
header : fits.header
|
|
260
|
+
FITS header (Primary and Extension + arm info)
|
|
261
|
+
|
|
262
|
+
ONLY the header is returned if header_only is True
|
|
263
|
+
"""
|
|
264
|
+
|
|
265
|
+
arm = arm.upper()
|
|
266
|
+
|
|
267
|
+
hdu = fits.open(fname)
|
|
268
|
+
h_prime = hdu[0].header
|
|
269
|
+
if extension is None:
|
|
270
|
+
extension = self.get_extension(h_prime, arm)
|
|
271
|
+
|
|
272
|
+
header = hdu[extension].header
|
|
273
|
+
if extension != 0:
|
|
274
|
+
header.extend(h_prime, strip=False)
|
|
275
|
+
header = self.add_header_info(header, arm)
|
|
276
|
+
header["e_input"] = (os.path.basename(fname), "Original input filename")
|
|
277
|
+
|
|
278
|
+
if header_only:
|
|
279
|
+
hdu.close()
|
|
280
|
+
return header
|
|
281
|
+
|
|
282
|
+
data = clipnflip(hdu[extension].data, header)
|
|
283
|
+
|
|
284
|
+
if dtype is not None:
|
|
285
|
+
data = data.astype(dtype)
|
|
286
|
+
|
|
287
|
+
data = np.ma.masked_array(data, mask=mask)
|
|
288
|
+
|
|
289
|
+
hdu.close()
|
|
290
|
+
return data, header
|
|
291
|
+
|
|
292
|
+
def add_header_info(self, header, arm, **kwargs):
|
|
293
|
+
"""read data from header and add it as REDUCE keyword back to the header
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
header : fits.header, dict
|
|
298
|
+
header to read/write info from/to
|
|
299
|
+
arm : str
|
|
300
|
+
instrument arm (detector/channel)
|
|
301
|
+
|
|
302
|
+
Returns
|
|
303
|
+
-------
|
|
304
|
+
header : fits.header, dict
|
|
305
|
+
header with added information
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
info = self.info
|
|
309
|
+
get = getter(header, info, arm)
|
|
310
|
+
|
|
311
|
+
# Use HIERARCH prefix only for FITS Header objects to avoid warnings
|
|
312
|
+
# For dict objects, HIERARCH is not needed and would break key access
|
|
313
|
+
from astropy.io.fits import Header as FitsHeader
|
|
314
|
+
|
|
315
|
+
hierarch = "HIERARCH " if isinstance(header, FitsHeader) else ""
|
|
316
|
+
|
|
317
|
+
header[f"{hierarch}e_instrument"] = get("instrument", self.__class__.__name__)
|
|
318
|
+
header[f"{hierarch}e_telescope"] = get("telescope", "")
|
|
319
|
+
header[f"{hierarch}e_exptime"] = get("exposure_time", 0)
|
|
320
|
+
|
|
321
|
+
jd = get("date")
|
|
322
|
+
if jd is not None:
|
|
323
|
+
jd = Time(jd, format=self.info.get("date_format", "fits"))
|
|
324
|
+
jd = jd.to_value("mjd")
|
|
325
|
+
|
|
326
|
+
header["e_orient"] = get("orientation", 0)
|
|
327
|
+
# As per IDL rotate if orient is 4 or larger and transpose is undefined
|
|
328
|
+
# the image is transposed
|
|
329
|
+
header[f"{hierarch}e_transpose"] = get(
|
|
330
|
+
"transpose", (header["e_orient"] % 8 >= 4)
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
naxis_x = get("naxis_x", 0)
|
|
334
|
+
naxis_y = get("naxis_y", 0)
|
|
335
|
+
|
|
336
|
+
prescan_x = get("prescan_x", 0)
|
|
337
|
+
overscan_x = get("overscan_x", 0)
|
|
338
|
+
prescan_y = get("prescan_y", 0)
|
|
339
|
+
overscan_y = get("overscan_y", 0)
|
|
340
|
+
|
|
341
|
+
header["e_xlo"] = prescan_x
|
|
342
|
+
header["e_xhi"] = naxis_x - overscan_x
|
|
343
|
+
|
|
344
|
+
header["e_ylo"] = prescan_y
|
|
345
|
+
header["e_yhi"] = naxis_y - overscan_y
|
|
346
|
+
|
|
347
|
+
header["e_gain"] = get("gain", 1)
|
|
348
|
+
header["e_readn"] = get("readnoise", 0)
|
|
349
|
+
|
|
350
|
+
header["e_sky"] = get("sky", 0)
|
|
351
|
+
header["e_drk"] = get("dark", 0)
|
|
352
|
+
header["e_backg"] = header["e_gain"] * (header["e_drk"] + header["e_sky"])
|
|
353
|
+
|
|
354
|
+
header["e_imtype"] = get("image_type")
|
|
355
|
+
header["e_ctg"] = get("category")
|
|
356
|
+
|
|
357
|
+
header["e_ra"] = get("ra", 0)
|
|
358
|
+
header["e_dec"] = get("dec", 0)
|
|
359
|
+
header["e_jd"] = jd
|
|
360
|
+
|
|
361
|
+
header["e_obslon"] = get("longitude")
|
|
362
|
+
header["e_obslat"] = get("latitude")
|
|
363
|
+
header["e_obsalt"] = get("altitude")
|
|
364
|
+
|
|
365
|
+
if info.get("wavecal_element", None) is not None:
|
|
366
|
+
header["HIERARCH e_wavecal_element"] = get(
|
|
367
|
+
"wavecal_element", info.get("wavecal_element", None)
|
|
368
|
+
)
|
|
369
|
+
return header
|
|
370
|
+
|
|
371
|
+
def find_files(self, input_dir):
|
|
372
|
+
"""Find fits files in the given folder
|
|
373
|
+
|
|
374
|
+
Parameters
|
|
375
|
+
----------
|
|
376
|
+
input_dir : string
|
|
377
|
+
directory to look for fits and fits.gz files in, may include bash style wildcards
|
|
378
|
+
|
|
379
|
+
Returns
|
|
380
|
+
-------
|
|
381
|
+
files: array(string)
|
|
382
|
+
absolute path filenames
|
|
383
|
+
"""
|
|
384
|
+
files = glob.glob(input_dir + "/*.fits")
|
|
385
|
+
files += glob.glob(input_dir + "/*.fits.gz")
|
|
386
|
+
files = np.array(files)
|
|
387
|
+
return files
|
|
388
|
+
|
|
389
|
+
def get_expected_values(self, target, night, arm=None, **kwargs):
|
|
390
|
+
expectations = {
|
|
391
|
+
"bias": {
|
|
392
|
+
"instrument": self.config.id_instrument,
|
|
393
|
+
"night": night,
|
|
394
|
+
"bias": self.config.id_bias,
|
|
395
|
+
},
|
|
396
|
+
"flat": {
|
|
397
|
+
"instrument": self.config.id_instrument,
|
|
398
|
+
"night": night,
|
|
399
|
+
"flat": self.config.id_flat,
|
|
400
|
+
},
|
|
401
|
+
"orders": {
|
|
402
|
+
"instrument": self.config.id_instrument,
|
|
403
|
+
"night": night,
|
|
404
|
+
"orders": self.config.id_orders,
|
|
405
|
+
},
|
|
406
|
+
"scatter": {
|
|
407
|
+
"instrument": self.config.id_instrument,
|
|
408
|
+
"night": night,
|
|
409
|
+
"scatter": self.config.id_scatter,
|
|
410
|
+
},
|
|
411
|
+
"curvature": {
|
|
412
|
+
"instrument": self.config.id_instrument,
|
|
413
|
+
"night": night,
|
|
414
|
+
"curvature": self.config.id_curvature,
|
|
415
|
+
},
|
|
416
|
+
"wavecal_master": {
|
|
417
|
+
"instrument": self.config.id_instrument,
|
|
418
|
+
"night": night,
|
|
419
|
+
"wave": self.config.id_wave,
|
|
420
|
+
},
|
|
421
|
+
"freq_comb_master": {
|
|
422
|
+
"instrument": self.config.id_instrument,
|
|
423
|
+
"night": night,
|
|
424
|
+
"comb": self.config.id_comb,
|
|
425
|
+
},
|
|
426
|
+
"science": {
|
|
427
|
+
"instrument": self.config.id_instrument,
|
|
428
|
+
"night": night,
|
|
429
|
+
"target": target,
|
|
430
|
+
"spec": self.config.id_spec,
|
|
431
|
+
},
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
# Add arm filter if this instrument has separate files per arm
|
|
435
|
+
if arm is not None and self.config.kw_arm is not None:
|
|
436
|
+
id_arm = self.config.id_arm
|
|
437
|
+
arms = self.config.arms
|
|
438
|
+
arm_id = id_arm[arms.index(arm)] if arm in arms else arm
|
|
439
|
+
for key in expectations:
|
|
440
|
+
expectations[key]["arm"] = arm_id
|
|
441
|
+
|
|
442
|
+
return expectations
|
|
443
|
+
|
|
444
|
+
def populate_filters(self, files):
|
|
445
|
+
"""Extract values from the fits headers and store them in self.filters
|
|
446
|
+
|
|
447
|
+
Parameters
|
|
448
|
+
----------
|
|
449
|
+
files : list(str)
|
|
450
|
+
list of fits files
|
|
451
|
+
|
|
452
|
+
Returns
|
|
453
|
+
-------
|
|
454
|
+
filters: list(Filter)
|
|
455
|
+
list of populated filters (identical to self.filters)
|
|
456
|
+
"""
|
|
457
|
+
# Empty filters
|
|
458
|
+
for _, fil in self.filters.items():
|
|
459
|
+
fil.clear()
|
|
460
|
+
|
|
461
|
+
for f in tqdm(files):
|
|
462
|
+
with fits.open(f) as hdu:
|
|
463
|
+
h = hdu[0].header
|
|
464
|
+
for _, fil in self.filters.items():
|
|
465
|
+
fil.collect(h)
|
|
466
|
+
|
|
467
|
+
return self.filters
|
|
468
|
+
|
|
469
|
+
def apply_filters(self, files, expected, allow_calibration_only=False):
|
|
470
|
+
"""
|
|
471
|
+
Determine the relevant files for a given set of expected values.
|
|
472
|
+
|
|
473
|
+
Parameters
|
|
474
|
+
----------
|
|
475
|
+
files : list(files)
|
|
476
|
+
list if fits files
|
|
477
|
+
expected : dict
|
|
478
|
+
dictionary with expected header values for each reduction step
|
|
479
|
+
|
|
480
|
+
Returns
|
|
481
|
+
-------
|
|
482
|
+
files: list((dict, dict))
|
|
483
|
+
list of files. The first element of each tuple is the used setting,
|
|
484
|
+
and the second are the files for each step.
|
|
485
|
+
"""
|
|
486
|
+
|
|
487
|
+
# Fill the filters with header information
|
|
488
|
+
self.populate_filters(files)
|
|
489
|
+
|
|
490
|
+
# Use the header information determined in populate filters
|
|
491
|
+
# to find potential science and calibration files in the list of files
|
|
492
|
+
# result = {step : [ {setting : value}, [files] ] }
|
|
493
|
+
result = {}
|
|
494
|
+
for step, values in expected.items():
|
|
495
|
+
result[step] = []
|
|
496
|
+
data = {}
|
|
497
|
+
for name, value in values.items():
|
|
498
|
+
if isinstance(value, list):
|
|
499
|
+
for v in value:
|
|
500
|
+
data[name] = self.filters[name].classify(v)
|
|
501
|
+
if len(data[name]) > 0:
|
|
502
|
+
break
|
|
503
|
+
else:
|
|
504
|
+
data[name] = self.filters[name].classify(value)
|
|
505
|
+
# Get all combinations of possible filter values
|
|
506
|
+
# e.g. if several nights are allowed
|
|
507
|
+
for thingy in product(*data.values()):
|
|
508
|
+
mask = np.copy(thingy[0][1])
|
|
509
|
+
for i in range(1, len(thingy)):
|
|
510
|
+
mask &= thingy[i][1]
|
|
511
|
+
if np.count_nonzero(mask) == 0:
|
|
512
|
+
continue
|
|
513
|
+
d = {k: v[0] for k, v in zip(values.keys(), thingy, strict=False)}
|
|
514
|
+
f = files[mask]
|
|
515
|
+
result[step].append((d, f))
|
|
516
|
+
|
|
517
|
+
# Filter for only nights that have a science observation
|
|
518
|
+
# files = [{setting: value}, {step: files}]
|
|
519
|
+
files = []
|
|
520
|
+
if allow_calibration_only:
|
|
521
|
+
# Use all unique nights
|
|
522
|
+
settings = {}
|
|
523
|
+
for shared in self.shared:
|
|
524
|
+
keys = [k for k in set(self.filters[shared].data) if k is not None]
|
|
525
|
+
settings[shared] = keys
|
|
526
|
+
else:
|
|
527
|
+
# Or use only science nights
|
|
528
|
+
settings = {}
|
|
529
|
+
for shared in self.shared:
|
|
530
|
+
keys = [key[shared] for key, _ in result[self.science]]
|
|
531
|
+
settings[shared] = keys
|
|
532
|
+
|
|
533
|
+
values = [settings[k] for k in self.shared]
|
|
534
|
+
for setting in product(*values):
|
|
535
|
+
setting = dict(zip(self.shared, setting, strict=False))
|
|
536
|
+
night = setting[self.night]
|
|
537
|
+
f = {}
|
|
538
|
+
# For each step look for files with matching settings
|
|
539
|
+
for step, step_data in result.items():
|
|
540
|
+
f[step] = []
|
|
541
|
+
for step_key, step_files in step_data:
|
|
542
|
+
match = [
|
|
543
|
+
setting[shared] == step_key[shared]
|
|
544
|
+
for shared in self.shared
|
|
545
|
+
if shared in step_key.keys()
|
|
546
|
+
]
|
|
547
|
+
if all(match):
|
|
548
|
+
f[step] = step_files
|
|
549
|
+
break
|
|
550
|
+
# If no matching files are found ...
|
|
551
|
+
if len(f[step]) == 0:
|
|
552
|
+
if step not in self.find_closest:
|
|
553
|
+
# Show a warning
|
|
554
|
+
logger.warning(
|
|
555
|
+
"Could not find any files for step '%s' with settings %s, sharing parameters %s",
|
|
556
|
+
step,
|
|
557
|
+
setting,
|
|
558
|
+
self.shared,
|
|
559
|
+
)
|
|
560
|
+
else:
|
|
561
|
+
# Or find the closest night instead
|
|
562
|
+
j = None
|
|
563
|
+
for i, (step_key, _) in enumerate(step_data):
|
|
564
|
+
match = [
|
|
565
|
+
setting[shared] == step_key[shared]
|
|
566
|
+
for shared in self.shared
|
|
567
|
+
if shared in step_key.keys() and shared != self.night
|
|
568
|
+
]
|
|
569
|
+
if all(match):
|
|
570
|
+
if j is None:
|
|
571
|
+
j = i
|
|
572
|
+
else:
|
|
573
|
+
diff_old = abs(step_data[j][0][self.night] - night)
|
|
574
|
+
diff_new = abs(step_data[i][0][self.night] - night)
|
|
575
|
+
if diff_new < diff_old:
|
|
576
|
+
j = i
|
|
577
|
+
if j is None:
|
|
578
|
+
# We still dont find any files
|
|
579
|
+
logger.warning(
|
|
580
|
+
"Could not find any files for step '%s' in any night with settings %s, sharing parameters %s",
|
|
581
|
+
step,
|
|
582
|
+
setting,
|
|
583
|
+
self.shared,
|
|
584
|
+
)
|
|
585
|
+
else:
|
|
586
|
+
# We found files in a close night
|
|
587
|
+
closest_key, closest_files = step_data[j]
|
|
588
|
+
logger.warning(
|
|
589
|
+
"Using '%s' files from night %s for observations of night %s",
|
|
590
|
+
step,
|
|
591
|
+
night,
|
|
592
|
+
closest_key["night"],
|
|
593
|
+
)
|
|
594
|
+
f[step] = closest_files
|
|
595
|
+
|
|
596
|
+
if any(len(a) > 0 for a in f.values()):
|
|
597
|
+
files.append((setting, f))
|
|
598
|
+
if len(files) == 0:
|
|
599
|
+
logger.warning(
|
|
600
|
+
"No %s files found matching the expected values %s",
|
|
601
|
+
self.science,
|
|
602
|
+
expected[self.science],
|
|
603
|
+
)
|
|
604
|
+
return files
|
|
605
|
+
|
|
606
|
+
def sort_files(
|
|
607
|
+
self, input_dir, target, night, *args, allow_calibration_only=False, **kwargs
|
|
608
|
+
):
|
|
609
|
+
"""
|
|
610
|
+
Sort a set of fits files into different categories
|
|
611
|
+
types are: bias, flat, wavecal, orderdef, spec
|
|
612
|
+
|
|
613
|
+
Parameters
|
|
614
|
+
----------
|
|
615
|
+
input_dir : str
|
|
616
|
+
input directory containing the files to sort
|
|
617
|
+
target : str
|
|
618
|
+
name of the target as in the fits headers
|
|
619
|
+
night : str
|
|
620
|
+
observation night, possibly with wildcards
|
|
621
|
+
arm : str
|
|
622
|
+
instrument arm
|
|
623
|
+
Returns
|
|
624
|
+
-------
|
|
625
|
+
files_per_night : list[dict{str:dict{str:list[str]}}]
|
|
626
|
+
a list of file sets, one entry per night, where each night consists of a dictionary with one entry per setting,
|
|
627
|
+
each fileset has five lists of filenames: "bias", "flat", "order", "wave", "spec", organised in another dict
|
|
628
|
+
nights_out : list[datetime]
|
|
629
|
+
a list of observation times, same order as files_per_night
|
|
630
|
+
"""
|
|
631
|
+
input_dir = input_dir.format(
|
|
632
|
+
**kwargs, target=target, night=night, instrument=self.name
|
|
633
|
+
)
|
|
634
|
+
files = self.find_files(input_dir)
|
|
635
|
+
ev = self.get_expected_values(target, night, *args, **kwargs)
|
|
636
|
+
files = self.apply_filters(
|
|
637
|
+
files, ev, allow_calibration_only=allow_calibration_only
|
|
638
|
+
)
|
|
639
|
+
return files
|
|
640
|
+
|
|
641
|
+
def get_wavecal_filename(self, header, arm, **kwargs):
|
|
642
|
+
"""Get the filename of the pre-existing wavelength solution for the current setting
|
|
643
|
+
|
|
644
|
+
Parameters
|
|
645
|
+
----------
|
|
646
|
+
header : fits.header, dict
|
|
647
|
+
header of the wavelength calibration file
|
|
648
|
+
arm : str
|
|
649
|
+
instrument arm
|
|
650
|
+
|
|
651
|
+
Returns
|
|
652
|
+
-------
|
|
653
|
+
filename : str
|
|
654
|
+
name of the wavelength solution file
|
|
655
|
+
"""
|
|
656
|
+
|
|
657
|
+
specifier = header.get(self.config.wavecal_specifier or "", "")
|
|
658
|
+
instrument = "wavecal"
|
|
659
|
+
|
|
660
|
+
cwd = os.path.dirname(__file__)
|
|
661
|
+
fname = f"{instrument.lower()}_{arm}_{specifier}.npz"
|
|
662
|
+
fname = os.path.join(cwd, "..", "wavecal", fname)
|
|
663
|
+
return fname
|
|
664
|
+
|
|
665
|
+
def get_supported_arms(self):
|
|
666
|
+
return self.arms
|
|
667
|
+
|
|
668
|
+
def get_mask_filename(self, arm, **kwargs):
|
|
669
|
+
i = self.name.lower()
|
|
670
|
+
a = arm.lower()
|
|
671
|
+
fname = f"mask_{i}_{a}.fits.gz"
|
|
672
|
+
cwd = os.path.dirname(__file__)
|
|
673
|
+
fname = os.path.join(cwd, "..", "masks", fname)
|
|
674
|
+
return fname
|
|
675
|
+
|
|
676
|
+
def get_wavelength_range(self, header, arm, **kwargs):
|
|
677
|
+
return self.get("wavelength_range", header, arm)
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
class COMMON(Instrument):
|
|
681
|
+
pass
|
|
682
|
+
|
|
683
|
+
|
|
684
|
+
def create_custom_instrument(
|
|
685
|
+
name, extension=0, info=None, mask_file=None, wavecal_file=None
|
|
686
|
+
):
|
|
687
|
+
class CUSTOM(Instrument):
|
|
688
|
+
def __init__(self):
|
|
689
|
+
super().__init__()
|
|
690
|
+
self.name = name
|
|
691
|
+
|
|
692
|
+
def load_info(self):
|
|
693
|
+
if info is None:
|
|
694
|
+
return None, COMMON().info
|
|
695
|
+
try:
|
|
696
|
+
with open(info) as f:
|
|
697
|
+
data = json.load(f)
|
|
698
|
+
return None, data
|
|
699
|
+
except:
|
|
700
|
+
return None, info
|
|
701
|
+
|
|
702
|
+
def get_extension(self, header, arm):
|
|
703
|
+
return extension
|
|
704
|
+
|
|
705
|
+
def get_mask_filename(self, arm, **kwargs):
|
|
706
|
+
return mask_file
|
|
707
|
+
|
|
708
|
+
def get_wavecal_filename(self, header, arm, **kwargs):
|
|
709
|
+
return wavecal_file
|
|
710
|
+
|
|
711
|
+
return CUSTOM()
|