pyreduce-astro 0.7a4__cp314-cp314-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyreduce/__init__.py +67 -0
- pyreduce/__main__.py +322 -0
- pyreduce/cli.py +342 -0
- pyreduce/clib/Release/_slitfunc_2d.cp311-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp311-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp312-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp312-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp313-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp313-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp314-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_2d.cp314-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_2d.obj +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp311-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp311-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp312-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp312-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp313-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp313-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp314-win_amd64.exp +0 -0
- pyreduce/clib/Release/_slitfunc_bd.cp314-win_amd64.lib +0 -0
- pyreduce/clib/Release/_slitfunc_bd.obj +0 -0
- pyreduce/clib/__init__.py +0 -0
- pyreduce/clib/_slitfunc_2d.cp311-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp312-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp313-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_2d.cp314-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp311-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp312-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp313-win_amd64.pyd +0 -0
- pyreduce/clib/_slitfunc_bd.cp314-win_amd64.pyd +0 -0
- pyreduce/clib/build_extract.py +75 -0
- pyreduce/clib/slit_func_2d_xi_zeta_bd.c +1313 -0
- pyreduce/clib/slit_func_2d_xi_zeta_bd.h +55 -0
- pyreduce/clib/slit_func_bd.c +362 -0
- pyreduce/clib/slit_func_bd.h +17 -0
- pyreduce/clipnflip.py +147 -0
- pyreduce/combine_frames.py +861 -0
- pyreduce/configuration.py +191 -0
- pyreduce/continuum_normalization.py +329 -0
- pyreduce/cwrappers.py +404 -0
- pyreduce/datasets.py +238 -0
- pyreduce/echelle.py +413 -0
- pyreduce/estimate_background_scatter.py +130 -0
- pyreduce/extract.py +1362 -0
- pyreduce/extraction_width.py +77 -0
- pyreduce/instruments/__init__.py +0 -0
- pyreduce/instruments/aj.py +9 -0
- pyreduce/instruments/aj.yaml +51 -0
- pyreduce/instruments/andes.py +102 -0
- pyreduce/instruments/andes.yaml +72 -0
- pyreduce/instruments/common.py +711 -0
- pyreduce/instruments/common.yaml +57 -0
- pyreduce/instruments/crires_plus.py +103 -0
- pyreduce/instruments/crires_plus.yaml +101 -0
- pyreduce/instruments/filters.py +195 -0
- pyreduce/instruments/harpn.py +203 -0
- pyreduce/instruments/harpn.yaml +140 -0
- pyreduce/instruments/harps.py +312 -0
- pyreduce/instruments/harps.yaml +144 -0
- pyreduce/instruments/instrument_info.py +140 -0
- pyreduce/instruments/jwst_miri.py +29 -0
- pyreduce/instruments/jwst_miri.yaml +53 -0
- pyreduce/instruments/jwst_niriss.py +98 -0
- pyreduce/instruments/jwst_niriss.yaml +60 -0
- pyreduce/instruments/lick_apf.py +35 -0
- pyreduce/instruments/lick_apf.yaml +60 -0
- pyreduce/instruments/mcdonald.py +123 -0
- pyreduce/instruments/mcdonald.yaml +56 -0
- pyreduce/instruments/metis_ifu.py +45 -0
- pyreduce/instruments/metis_ifu.yaml +62 -0
- pyreduce/instruments/metis_lss.py +45 -0
- pyreduce/instruments/metis_lss.yaml +62 -0
- pyreduce/instruments/micado.py +45 -0
- pyreduce/instruments/micado.yaml +62 -0
- pyreduce/instruments/models.py +257 -0
- pyreduce/instruments/neid.py +156 -0
- pyreduce/instruments/neid.yaml +61 -0
- pyreduce/instruments/nirspec.py +215 -0
- pyreduce/instruments/nirspec.yaml +63 -0
- pyreduce/instruments/nte.py +42 -0
- pyreduce/instruments/nte.yaml +55 -0
- pyreduce/instruments/uves.py +46 -0
- pyreduce/instruments/uves.yaml +65 -0
- pyreduce/instruments/xshooter.py +39 -0
- pyreduce/instruments/xshooter.yaml +63 -0
- pyreduce/make_shear.py +607 -0
- pyreduce/masks/mask_crires_plus_det1.fits.gz +0 -0
- pyreduce/masks/mask_crires_plus_det2.fits.gz +0 -0
- pyreduce/masks/mask_crires_plus_det3.fits.gz +0 -0
- pyreduce/masks/mask_ctio_chiron.fits.gz +0 -0
- pyreduce/masks/mask_elodie.fits.gz +0 -0
- pyreduce/masks/mask_feros3.fits.gz +0 -0
- pyreduce/masks/mask_flames_giraffe.fits.gz +0 -0
- pyreduce/masks/mask_harps_blue.fits.gz +0 -0
- pyreduce/masks/mask_harps_red.fits.gz +0 -0
- pyreduce/masks/mask_hds_blue.fits.gz +0 -0
- pyreduce/masks/mask_hds_red.fits.gz +0 -0
- pyreduce/masks/mask_het_hrs_2x5.fits.gz +0 -0
- pyreduce/masks/mask_jwst_miri_lrs_slitless.fits.gz +0 -0
- pyreduce/masks/mask_jwst_niriss_gr700xd.fits.gz +0 -0
- pyreduce/masks/mask_lick_apf_.fits.gz +0 -0
- pyreduce/masks/mask_mcdonald.fits.gz +0 -0
- pyreduce/masks/mask_nes.fits.gz +0 -0
- pyreduce/masks/mask_nirspec_nirspec.fits.gz +0 -0
- pyreduce/masks/mask_sarg.fits.gz +0 -0
- pyreduce/masks/mask_sarg_2x2a.fits.gz +0 -0
- pyreduce/masks/mask_sarg_2x2b.fits.gz +0 -0
- pyreduce/masks/mask_subaru_hds_red.fits.gz +0 -0
- pyreduce/masks/mask_uves_blue.fits.gz +0 -0
- pyreduce/masks/mask_uves_blue_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle_2x2_split.fits.gz +0 -0
- pyreduce/masks/mask_uves_middle_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_uves_red.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_2x2.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_2x2_split.fits.gz +0 -0
- pyreduce/masks/mask_uves_red_binned_2_2.fits.gz +0 -0
- pyreduce/masks/mask_xshooter_nir.fits.gz +0 -0
- pyreduce/pipeline.py +619 -0
- pyreduce/rectify.py +138 -0
- pyreduce/reduce.py +2065 -0
- pyreduce/settings/settings_AJ.json +19 -0
- pyreduce/settings/settings_ANDES.json +89 -0
- pyreduce/settings/settings_CRIRES_PLUS.json +89 -0
- pyreduce/settings/settings_HARPN.json +73 -0
- pyreduce/settings/settings_HARPS.json +69 -0
- pyreduce/settings/settings_JWST_MIRI.json +55 -0
- pyreduce/settings/settings_JWST_NIRISS.json +55 -0
- pyreduce/settings/settings_LICK_APF.json +62 -0
- pyreduce/settings/settings_MCDONALD.json +58 -0
- pyreduce/settings/settings_METIS_IFU.json +77 -0
- pyreduce/settings/settings_METIS_LSS.json +77 -0
- pyreduce/settings/settings_MICADO.json +78 -0
- pyreduce/settings/settings_NEID.json +73 -0
- pyreduce/settings/settings_NIRSPEC.json +58 -0
- pyreduce/settings/settings_NTE.json +60 -0
- pyreduce/settings/settings_UVES.json +54 -0
- pyreduce/settings/settings_XSHOOTER.json +78 -0
- pyreduce/settings/settings_pyreduce.json +184 -0
- pyreduce/settings/settings_schema.json +850 -0
- pyreduce/tools/__init__.py +0 -0
- pyreduce/tools/combine.py +117 -0
- pyreduce/trace.py +979 -0
- pyreduce/util.py +1366 -0
- pyreduce/wavecal/MICADO_HK_3arcsec_chip5.npz +0 -0
- pyreduce/wavecal/atlas/thar.fits +4946 -13
- pyreduce/wavecal/atlas/thar_list.txt +4172 -0
- pyreduce/wavecal/atlas/une.fits +0 -0
- pyreduce/wavecal/convert.py +38 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det1.npz +0 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det2.npz +0 -0
- pyreduce/wavecal/crires_plus_J1228_Open_det3.npz +0 -0
- pyreduce/wavecal/harpn_harpn_2D.npz +0 -0
- pyreduce/wavecal/harps_blue_2D.npz +0 -0
- pyreduce/wavecal/harps_blue_pol_2D.npz +0 -0
- pyreduce/wavecal/harps_red_2D.npz +0 -0
- pyreduce/wavecal/harps_red_pol_2D.npz +0 -0
- pyreduce/wavecal/mcdonald.npz +0 -0
- pyreduce/wavecal/metis_lss_l_2D.npz +0 -0
- pyreduce/wavecal/metis_lss_m_2D.npz +0 -0
- pyreduce/wavecal/nirspec_K2.npz +0 -0
- pyreduce/wavecal/uves_blue_360nm_2D.npz +0 -0
- pyreduce/wavecal/uves_blue_390nm_2D.npz +0 -0
- pyreduce/wavecal/uves_blue_437nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_2x2_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_565nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_580nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_600nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_665nm_2D.npz +0 -0
- pyreduce/wavecal/uves_middle_860nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_580nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_600nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_665nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_760nm_2D.npz +0 -0
- pyreduce/wavecal/uves_red_860nm_2D.npz +0 -0
- pyreduce/wavecal/xshooter_nir.npz +0 -0
- pyreduce/wavelength_calibration.py +1871 -0
- pyreduce_astro-0.7a4.dist-info/METADATA +106 -0
- pyreduce_astro-0.7a4.dist-info/RECORD +182 -0
- pyreduce_astro-0.7a4.dist-info/WHEEL +4 -0
- pyreduce_astro-0.7a4.dist-info/entry_points.txt +2 -0
- pyreduce_astro-0.7a4.dist-info/licenses/LICENSE +674 -0
pyreduce/echelle.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Contains functions to read and modify echelle structures, just as in reduce
|
|
3
|
+
|
|
4
|
+
Mostly for compatibility reasons
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
import astropy.io.fits as fits
|
|
10
|
+
import numpy as np
|
|
11
|
+
import scipy.constants
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Echelle:
|
|
17
|
+
def __init__(self, head=None, filename="", data=None):
|
|
18
|
+
if data is None:
|
|
19
|
+
data = {}
|
|
20
|
+
if head is None:
|
|
21
|
+
head = {}
|
|
22
|
+
self.filename = filename
|
|
23
|
+
self.header = head
|
|
24
|
+
self._data = data
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def nord(self):
|
|
28
|
+
if "spec" in self._data.keys():
|
|
29
|
+
return self._data["spec"].shape[0]
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
@property
|
|
33
|
+
def ncol(self):
|
|
34
|
+
if "spec" in self._data.keys():
|
|
35
|
+
return self._data["spec"].shape[1]
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def spec(self):
|
|
40
|
+
if "spec" in self._data.keys():
|
|
41
|
+
return self._data["spec"]
|
|
42
|
+
else:
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
@spec.setter
|
|
46
|
+
def spec(self, value):
|
|
47
|
+
self._data["spec"] = value
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def sig(self):
|
|
51
|
+
if "sig" in self._data.keys():
|
|
52
|
+
return self._data["sig"]
|
|
53
|
+
else:
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
@sig.setter
|
|
57
|
+
def sig(self, value):
|
|
58
|
+
self._data["sig"] = value
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def wave(self):
|
|
62
|
+
if "wave" in self._data.keys():
|
|
63
|
+
return self._data["wave"]
|
|
64
|
+
else:
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
@wave.setter
|
|
68
|
+
def wave(self, value):
|
|
69
|
+
self._data["wave"] = value
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def cont(self):
|
|
73
|
+
if "cont" in self._data.keys():
|
|
74
|
+
return self._data["cont"]
|
|
75
|
+
else:
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
@cont.setter
|
|
79
|
+
def cont(self, value):
|
|
80
|
+
self._data["cont"] = value
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def columns(self):
|
|
84
|
+
if "columns" in self._data.keys():
|
|
85
|
+
return self._data["columns"]
|
|
86
|
+
else:
|
|
87
|
+
return None
|
|
88
|
+
|
|
89
|
+
@columns.setter
|
|
90
|
+
def columns(self, value):
|
|
91
|
+
self._data["columns"] = value
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def mask(self):
|
|
95
|
+
if "mask" in self._data.keys():
|
|
96
|
+
return self._data["mask"]
|
|
97
|
+
else:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
@mask.setter
|
|
101
|
+
def mask(self, value):
|
|
102
|
+
self._data["mask"] = value
|
|
103
|
+
|
|
104
|
+
def __getitem__(self, index):
|
|
105
|
+
return self._data[index]
|
|
106
|
+
|
|
107
|
+
def __setitem__(self, index, value):
|
|
108
|
+
self._data[index] = value
|
|
109
|
+
|
|
110
|
+
def __delitem__(self, index):
|
|
111
|
+
del self._data[index]
|
|
112
|
+
|
|
113
|
+
def __contains__(self, index):
|
|
114
|
+
return index in self._data.keys()
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def read(
|
|
118
|
+
fname,
|
|
119
|
+
extension=1,
|
|
120
|
+
raw=False,
|
|
121
|
+
continuum_normalization=True,
|
|
122
|
+
barycentric_correction=True,
|
|
123
|
+
radial_velociy_correction=True,
|
|
124
|
+
):
|
|
125
|
+
"""
|
|
126
|
+
Read data from an echelle file
|
|
127
|
+
Expand wavelength and continuum polynomials
|
|
128
|
+
Apply barycentric/radial velocity correction
|
|
129
|
+
Apply continuum normalization
|
|
130
|
+
|
|
131
|
+
Will load any fields in the binary table, however special attention is given only to specific names:
|
|
132
|
+
"SPEC" : Spectrum
|
|
133
|
+
"SIG" : Sigma, i.e. (absolute) uncertainty
|
|
134
|
+
"CONT" : Continuum
|
|
135
|
+
"WAVE" : Wavelength solution
|
|
136
|
+
"COLUMNS" : Column range
|
|
137
|
+
|
|
138
|
+
Parameters
|
|
139
|
+
----------
|
|
140
|
+
fname : str
|
|
141
|
+
filename to load
|
|
142
|
+
extension : int, optional
|
|
143
|
+
fits extension of the data within the file (default: 1)
|
|
144
|
+
raw : bool, optional
|
|
145
|
+
if true apply no corrections to the data (default: False)
|
|
146
|
+
continuum_normalization : bool, optional
|
|
147
|
+
apply continuum normalization (default: True)
|
|
148
|
+
barycentric_correction : bool, optional
|
|
149
|
+
apply barycentric correction (default: True)
|
|
150
|
+
radial_velociy_correction : bool, optional
|
|
151
|
+
apply radial velocity correction (default: True)
|
|
152
|
+
|
|
153
|
+
Returns
|
|
154
|
+
-------
|
|
155
|
+
ech : obj
|
|
156
|
+
Echelle structure, with data contained in attributes
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
with fits.open(fname, memmap=False) as hdu:
|
|
160
|
+
header = hdu[0].header
|
|
161
|
+
data = hdu[extension].data
|
|
162
|
+
|
|
163
|
+
_data = {column.lower(): data[column][0] for column in data.dtype.names}
|
|
164
|
+
ech = Echelle(filename=fname, head=header, data=_data)
|
|
165
|
+
nord, ncol = ech.nord, ech.ncol
|
|
166
|
+
|
|
167
|
+
if not raw:
|
|
168
|
+
if "spec" in ech:
|
|
169
|
+
base_order = header.get("obase", 1)
|
|
170
|
+
ech["orders"] = np.arange(base_order, base_order + nord)
|
|
171
|
+
|
|
172
|
+
# Wavelength
|
|
173
|
+
if "wave" in ech:
|
|
174
|
+
ech["wave"] = expand_polynomial(ncol, ech["wave"])
|
|
175
|
+
|
|
176
|
+
# Correct for radial velocity and barycentric correction
|
|
177
|
+
# + : away from observer
|
|
178
|
+
# - : towards observer
|
|
179
|
+
velocity_correction = 0
|
|
180
|
+
if barycentric_correction:
|
|
181
|
+
velocity_correction -= header.get("barycorr", 0)
|
|
182
|
+
header["barycorr"] = 0
|
|
183
|
+
if radial_velociy_correction:
|
|
184
|
+
velocity_correction += header.get("radvel", 0)
|
|
185
|
+
header["radvel"] = 0
|
|
186
|
+
|
|
187
|
+
speed_of_light = scipy.constants.speed_of_light * 1e-3
|
|
188
|
+
ech["wave"] *= 1 + velocity_correction / speed_of_light
|
|
189
|
+
|
|
190
|
+
# Continuum
|
|
191
|
+
if "cont" in ech:
|
|
192
|
+
ech["cont"] = expand_polynomial(ncol, ech["cont"])
|
|
193
|
+
|
|
194
|
+
# Create Mask, based on column range
|
|
195
|
+
if "columns" in ech:
|
|
196
|
+
ech["mask"] = np.full((nord, ncol), True)
|
|
197
|
+
for iord in range(nord):
|
|
198
|
+
ech["mask"][
|
|
199
|
+
iord, ech["columns"][iord, 0] : ech["columns"][iord, 1]
|
|
200
|
+
] = False
|
|
201
|
+
|
|
202
|
+
if "spec" in ech:
|
|
203
|
+
ech["spec"] = np.ma.masked_array(ech["spec"], mask=ech["mask"])
|
|
204
|
+
if "sig" in ech:
|
|
205
|
+
ech["sig"] = np.ma.masked_array(ech["sig"], mask=ech["mask"])
|
|
206
|
+
if "cont" in ech:
|
|
207
|
+
ech["cont"] = np.ma.masked_array(ech["cont"], mask=ech["mask"])
|
|
208
|
+
if "wave" in ech:
|
|
209
|
+
ech["wave"] = np.ma.masked_array(ech["wave"], mask=ech["mask"])
|
|
210
|
+
|
|
211
|
+
# Apply continuum normalization
|
|
212
|
+
if continuum_normalization and "cont" in ech:
|
|
213
|
+
if "spec" in ech:
|
|
214
|
+
ech["spec"] /= ech["cont"]
|
|
215
|
+
if "sig" in ech:
|
|
216
|
+
ech["sig"] /= ech["cont"]
|
|
217
|
+
|
|
218
|
+
return ech
|
|
219
|
+
|
|
220
|
+
def save(self, fname):
|
|
221
|
+
"""
|
|
222
|
+
Save data in an Echelle fits, i.e. a fits file with a Binary Table in Extension 1
|
|
223
|
+
|
|
224
|
+
Parameters
|
|
225
|
+
----------
|
|
226
|
+
fname : str
|
|
227
|
+
filename
|
|
228
|
+
"""
|
|
229
|
+
save(fname, self.header, **self._data)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def calc_2dpolynomial(solution2d):
|
|
233
|
+
"""Expand a 2d polynomial, where the data is given in a REDUCE make_wave format
|
|
234
|
+
Note that the coefficients are for order/100 and column/1000 respectively, where the order is counted from order base up
|
|
235
|
+
|
|
236
|
+
Parameters
|
|
237
|
+
----------
|
|
238
|
+
solution2d : array
|
|
239
|
+
data in a REDUCE make_wave format:
|
|
240
|
+
0: version
|
|
241
|
+
1: number of columns
|
|
242
|
+
2: number of orders
|
|
243
|
+
3: order base, i.e. 0th order number (should not be 0)
|
|
244
|
+
4-6: empty
|
|
245
|
+
7: number of cross coefficients
|
|
246
|
+
8: number of column only coefficients
|
|
247
|
+
9: number of order only coefficients
|
|
248
|
+
10: coefficient - constant
|
|
249
|
+
11-x: column coefficients
|
|
250
|
+
x-y : order coefficients
|
|
251
|
+
z- : cross coefficients (xy, xy2, x2y, x2y2, xy3, x3y), with x = orders, y = columns
|
|
252
|
+
|
|
253
|
+
Returns
|
|
254
|
+
-------
|
|
255
|
+
poly : array[nord, ncol]
|
|
256
|
+
expanded polynomial
|
|
257
|
+
"""
|
|
258
|
+
|
|
259
|
+
# make wave style 2d fit
|
|
260
|
+
ncol = int(solution2d[1])
|
|
261
|
+
nord = int(solution2d[2])
|
|
262
|
+
order_base = int(solution2d[3])
|
|
263
|
+
deg_cross, deg_column, deg_order = (
|
|
264
|
+
int(solution2d[7]),
|
|
265
|
+
int(solution2d[8]),
|
|
266
|
+
int(solution2d[9]),
|
|
267
|
+
)
|
|
268
|
+
coeff_in = solution2d[10:]
|
|
269
|
+
|
|
270
|
+
coeff = np.zeros((deg_order + 1, deg_column + 1))
|
|
271
|
+
coeff[0, 0] = coeff_in[0]
|
|
272
|
+
coeff[0, 1:] = coeff_in[1 : 1 + deg_column]
|
|
273
|
+
coeff[1:, 0] = coeff_in[1 + deg_column : 1 + deg_column + deg_order]
|
|
274
|
+
if deg_cross in [4, 6]:
|
|
275
|
+
coeff[1, 1] = coeff_in[deg_column + deg_order + 1]
|
|
276
|
+
coeff[1, 2] = coeff_in[deg_column + deg_order + 2]
|
|
277
|
+
coeff[2, 1] = coeff_in[deg_column + deg_order + 3]
|
|
278
|
+
coeff[2, 2] = coeff_in[deg_column + deg_order + 4]
|
|
279
|
+
if deg_cross == 6:
|
|
280
|
+
coeff[1, 3] = coeff_in[deg_column + deg_order + 5]
|
|
281
|
+
coeff[3, 1] = coeff_in[deg_column + deg_order + 6]
|
|
282
|
+
|
|
283
|
+
x = np.arange(order_base, order_base + nord, dtype=float)
|
|
284
|
+
y = np.arange(ncol, dtype=float)
|
|
285
|
+
|
|
286
|
+
poly = np.polynomial.polynomial.polygrid2d(x / 100, y / 1000, coeff) / x[:, None]
|
|
287
|
+
|
|
288
|
+
return poly
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def calc_1dpolynomials(ncol, poly):
|
|
292
|
+
"""Expand a set of 1d polynomials (one per order) seperately
|
|
293
|
+
|
|
294
|
+
Parameters
|
|
295
|
+
----------
|
|
296
|
+
ncol : int
|
|
297
|
+
number of columns
|
|
298
|
+
poly : array[nord, degree]
|
|
299
|
+
polynomial coefficients
|
|
300
|
+
|
|
301
|
+
Returns
|
|
302
|
+
-------
|
|
303
|
+
poly : array[nord, ncol]
|
|
304
|
+
expanded polynomials
|
|
305
|
+
"""
|
|
306
|
+
|
|
307
|
+
nord = poly.shape[0]
|
|
308
|
+
x = np.arange(ncol)
|
|
309
|
+
result = np.zeros((nord, ncol))
|
|
310
|
+
for i, coef in enumerate(poly):
|
|
311
|
+
result[i] = np.polyval(coef, x)
|
|
312
|
+
return result
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def expand_polynomial(ncol, poly):
|
|
316
|
+
"""Checks if and how to expand data poly, then expands the data if necessary
|
|
317
|
+
|
|
318
|
+
Parameters
|
|
319
|
+
----------
|
|
320
|
+
ncol : int
|
|
321
|
+
number of columns in the image
|
|
322
|
+
poly : array[nord, ...]
|
|
323
|
+
polynomial coefficients to expand, or already expanded data
|
|
324
|
+
|
|
325
|
+
Returns
|
|
326
|
+
-------
|
|
327
|
+
poly : array[nord, ncol]
|
|
328
|
+
expanded data
|
|
329
|
+
"""
|
|
330
|
+
|
|
331
|
+
if poly.ndim == 1:
|
|
332
|
+
poly = calc_2dpolynomial(poly)
|
|
333
|
+
elif poly.shape[1] < 20:
|
|
334
|
+
poly = calc_1dpolynomials(ncol, poly)
|
|
335
|
+
return poly
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def read(fname, **kwargs):
|
|
339
|
+
return Echelle.read(fname, **kwargs)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def save(fname, header, **kwargs):
|
|
343
|
+
"""Save data in an Echelle fits, i.e. a fits file with a Binary Table in Extension 1
|
|
344
|
+
|
|
345
|
+
The data is passed in kwargs, with the name of the binary table column as the key
|
|
346
|
+
Floating point data is saved as float32 (E), Integer data as int16 (I)
|
|
347
|
+
|
|
348
|
+
Parameters
|
|
349
|
+
----------
|
|
350
|
+
fname : str
|
|
351
|
+
filename
|
|
352
|
+
header : fits.header
|
|
353
|
+
FITS header
|
|
354
|
+
**kwargs : array[]
|
|
355
|
+
data to be saved in the file
|
|
356
|
+
"""
|
|
357
|
+
|
|
358
|
+
if not isinstance(header, fits.Header):
|
|
359
|
+
header = fits.Header(cards=header)
|
|
360
|
+
|
|
361
|
+
primary = fits.PrimaryHDU(header=header)
|
|
362
|
+
|
|
363
|
+
columns = []
|
|
364
|
+
for key, value in kwargs.items():
|
|
365
|
+
if value is None:
|
|
366
|
+
continue
|
|
367
|
+
|
|
368
|
+
if isinstance(value, list):
|
|
369
|
+
for i, arr in enumerate(value):
|
|
370
|
+
if np.issubdtype(arr.dtype, np.floating):
|
|
371
|
+
arr = arr.astype(np.float32)
|
|
372
|
+
format_code = "E"
|
|
373
|
+
elif np.issubdtype(arr.dtype, np.integer):
|
|
374
|
+
arr = arr.astype(np.int16)
|
|
375
|
+
format_code = "I"
|
|
376
|
+
else:
|
|
377
|
+
raise ValueError(
|
|
378
|
+
f"Unsupported data type for array {i}: {arr.dtype}"
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
# Create a column with the appropriate format
|
|
382
|
+
col = fits.Column(
|
|
383
|
+
name=f"slitfu{i}", format=f"{len(arr)}{format_code}", array=[arr]
|
|
384
|
+
)
|
|
385
|
+
columns.append(col)
|
|
386
|
+
|
|
387
|
+
else:
|
|
388
|
+
arr = value.ravel()[None, :]
|
|
389
|
+
|
|
390
|
+
if np.issubdtype(arr.dtype, np.floating):
|
|
391
|
+
if key == "wave":
|
|
392
|
+
arr = arr.astype(np.float64)
|
|
393
|
+
dtype = "D"
|
|
394
|
+
else:
|
|
395
|
+
arr = arr.astype(np.float32)
|
|
396
|
+
dtype = "E"
|
|
397
|
+
elif np.issubdtype(arr.dtype, np.integer):
|
|
398
|
+
arr = arr.astype(np.int16)
|
|
399
|
+
dtype = "I"
|
|
400
|
+
elif np.issubdtype(arr.dtype, np.bool_):
|
|
401
|
+
arr = arr.astype(np.bool_)
|
|
402
|
+
dtype = "B"
|
|
403
|
+
else:
|
|
404
|
+
raise TypeError(f"Could not understand dtype {arr.dtype}")
|
|
405
|
+
|
|
406
|
+
form = "%i%s" % (value.size, dtype)
|
|
407
|
+
dim = str(value.shape[::-1])
|
|
408
|
+
columns += [fits.Column(name=key.upper(), array=arr, format=form, dim=dim)]
|
|
409
|
+
|
|
410
|
+
table = fits.BinTableHDU.from_columns(columns)
|
|
411
|
+
|
|
412
|
+
hdulist = fits.HDUList(hdus=[primary, table])
|
|
413
|
+
hdulist.writeto(fname, overwrite=True, output_verify="silentfix+ignore")
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module that estimates the background scatter
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import matplotlib.pyplot as plt
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
from . import util
|
|
11
|
+
from .extract import fix_parameters
|
|
12
|
+
from .util import make_index, polyfit2d
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def estimate_background_scatter(
|
|
18
|
+
img,
|
|
19
|
+
orders,
|
|
20
|
+
column_range=None,
|
|
21
|
+
extraction_width=0.1,
|
|
22
|
+
scatter_degree=4,
|
|
23
|
+
sigma_cutoff=2,
|
|
24
|
+
border_width=10,
|
|
25
|
+
plot=False,
|
|
26
|
+
plot_title=None,
|
|
27
|
+
):
|
|
28
|
+
"""
|
|
29
|
+
Estimate the background by fitting a 2d polynomial to interorder data
|
|
30
|
+
|
|
31
|
+
Interorder data is all pixels minus the orders +- the extraction width
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
img : array[nrow, ncol]
|
|
36
|
+
(flat) image data
|
|
37
|
+
orders : array[nord, degree]
|
|
38
|
+
order polynomial coefficients
|
|
39
|
+
column_range : array[nord, 2], optional
|
|
40
|
+
range of columns to use in each order (default: None == all columns)
|
|
41
|
+
extraction_width : float, array[nord, 2], optional
|
|
42
|
+
extraction width for each order, values below 1.5 are considered fractional, others as number of pixels (default: 0.1)
|
|
43
|
+
scatter_degree : int, optional
|
|
44
|
+
polynomial degree of the 2d fit for the background scatter (default: 4)
|
|
45
|
+
plot : bool, optional
|
|
46
|
+
wether to plot the fitted polynomial and the data or not (default: False)
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
array[nord+1, ncol]
|
|
51
|
+
background scatter between orders
|
|
52
|
+
array[nord+1, ncol]
|
|
53
|
+
y positions of the interorder lines, the scatter values are taken from
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
nrow, ncol = img.shape
|
|
57
|
+
nord, _ = orders.shape
|
|
58
|
+
|
|
59
|
+
extraction_width, column_range, orders = fix_parameters(
|
|
60
|
+
extraction_width,
|
|
61
|
+
column_range,
|
|
62
|
+
orders,
|
|
63
|
+
nrow,
|
|
64
|
+
ncol,
|
|
65
|
+
nord,
|
|
66
|
+
ignore_column_range=True,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Method 1: Select all pixels, but those known to be in orders
|
|
70
|
+
bw = border_width
|
|
71
|
+
mask = np.full(img.shape, True)
|
|
72
|
+
if bw is not None and bw != 0:
|
|
73
|
+
mask[:bw] = mask[-bw:] = mask[:, :bw] = mask[:, -bw:] = False
|
|
74
|
+
for i in range(nord):
|
|
75
|
+
left, right = column_range[i]
|
|
76
|
+
left -= extraction_width[i, 1] * 2
|
|
77
|
+
right += extraction_width[i, 0] * 2
|
|
78
|
+
left = max(0, left)
|
|
79
|
+
right = min(ncol, right)
|
|
80
|
+
|
|
81
|
+
x_order = np.arange(left, right)
|
|
82
|
+
y_order = np.polyval(orders[i], x_order)
|
|
83
|
+
|
|
84
|
+
y_above = y_order + extraction_width[i, 1]
|
|
85
|
+
y_below = y_order - extraction_width[i, 0]
|
|
86
|
+
|
|
87
|
+
y_above = np.floor(y_above)
|
|
88
|
+
y_below = np.ceil(y_below)
|
|
89
|
+
|
|
90
|
+
index = make_index(y_below, y_above, left, right, zero=True)
|
|
91
|
+
np.clip(index[0], 0, nrow - 1, out=index[0])
|
|
92
|
+
|
|
93
|
+
mask[index] = False
|
|
94
|
+
|
|
95
|
+
mask &= ~np.ma.getmask(img)
|
|
96
|
+
|
|
97
|
+
y, x = np.indices(mask.shape)
|
|
98
|
+
y, x = y[mask].ravel(), x[mask].ravel()
|
|
99
|
+
z = np.ma.getdata(img[mask]).ravel()
|
|
100
|
+
|
|
101
|
+
mask = z <= np.median(z) + sigma_cutoff * z.std()
|
|
102
|
+
y, x, z = y[mask], x[mask], z[mask]
|
|
103
|
+
|
|
104
|
+
coeff = polyfit2d(x, y, z, degree=scatter_degree, plot=plot, plot_title=plot_title)
|
|
105
|
+
logger.debug("Background scatter coefficients: %s", str(coeff))
|
|
106
|
+
|
|
107
|
+
if plot: # pragma: no cover
|
|
108
|
+
# Calculate scatter at interorder positionsq
|
|
109
|
+
yp, xp = np.indices(img.shape)
|
|
110
|
+
back = np.polynomial.polynomial.polyval2d(xp, yp, coeff)
|
|
111
|
+
|
|
112
|
+
plt.subplot(121)
|
|
113
|
+
plt.title("Input Image + In-between Order traces")
|
|
114
|
+
plt.xlabel("x [pixel]")
|
|
115
|
+
plt.ylabel("y [pixel]")
|
|
116
|
+
vmin, vmax = np.percentile(img - back, (5, 95))
|
|
117
|
+
plt.imshow(img - back, vmin=vmin, vmax=vmax, aspect="equal", origin="lower")
|
|
118
|
+
plt.plot(x, y, ",")
|
|
119
|
+
|
|
120
|
+
plt.subplot(122)
|
|
121
|
+
plt.title("2D fit to the scatter between orders")
|
|
122
|
+
plt.xlabel("x [pixel]")
|
|
123
|
+
plt.ylabel("y [pixel]")
|
|
124
|
+
plt.imshow(back, vmin=0, vmax=abs(np.max(back)), aspect="equal", origin="lower")
|
|
125
|
+
|
|
126
|
+
if plot_title is not None:
|
|
127
|
+
plt.suptitle(plot_title)
|
|
128
|
+
util.show_or_save("scatter")
|
|
129
|
+
|
|
130
|
+
return coeff
|