dkist-processing-common 11.7.1rc1__py3-none-any.whl → 11.8.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. changelog/245.feature.1.rst +1 -0
  2. dkist_processing_common/models/constants.py +333 -21
  3. dkist_processing_common/models/fits_access.py +16 -25
  4. dkist_processing_common/parsers/average_bud.py +48 -0
  5. dkist_processing_common/parsers/experiment_id_bud.py +8 -4
  6. dkist_processing_common/parsers/id_bud.py +35 -19
  7. dkist_processing_common/parsers/l0_fits_access.py +3 -3
  8. dkist_processing_common/parsers/l1_fits_access.py +47 -21
  9. dkist_processing_common/parsers/near_bud.py +4 -4
  10. dkist_processing_common/parsers/observing_program_id_bud.py +24 -0
  11. dkist_processing_common/parsers/proposal_id_bud.py +11 -5
  12. dkist_processing_common/parsers/single_value_single_key_flower.py +0 -1
  13. dkist_processing_common/parsers/time.py +147 -27
  14. dkist_processing_common/tasks/mixin/quality/_metrics.py +6 -4
  15. dkist_processing_common/tasks/parse_l0_input_data.py +246 -1
  16. dkist_processing_common/tasks/trial_catalog.py +38 -0
  17. dkist_processing_common/tests/mock_metadata_store.py +39 -0
  18. dkist_processing_common/tests/test_fits_access.py +19 -44
  19. dkist_processing_common/tests/test_input_dataset.py +1 -37
  20. dkist_processing_common/tests/test_parse_l0_input_data.py +45 -5
  21. dkist_processing_common/tests/test_quality_mixin.py +3 -11
  22. dkist_processing_common/tests/test_stems.py +162 -10
  23. dkist_processing_common/tests/test_task_parsing.py +6 -6
  24. dkist_processing_common/tests/test_trial_catalog.py +72 -2
  25. {dkist_processing_common-11.7.1rc1.dist-info → dkist_processing_common-11.8.0rc1.dist-info}/METADATA +5 -5
  26. {dkist_processing_common-11.7.1rc1.dist-info → dkist_processing_common-11.8.0rc1.dist-info}/RECORD +28 -26
  27. changelog/271.misc.rst +0 -1
  28. {dkist_processing_common-11.7.1rc1.dist-info → dkist_processing_common-11.8.0rc1.dist-info}/WHEEL +0 -0
  29. {dkist_processing_common-11.7.1rc1.dist-info → dkist_processing_common-11.8.0rc1.dist-info}/top_level.txt +0 -0
@@ -27,6 +27,6 @@ class L0FitsAccess(L1FitsAccess):
27
27
  auto_squeeze: bool = True,
28
28
  ):
29
29
  super().__init__(hdu=hdu, name=name, auto_squeeze=auto_squeeze)
30
- self._set_metadata_key_value(MetadataKey.ip_task_type)
31
- self._set_metadata_key_value(MetadataKey.ip_start_time)
32
- self._set_metadata_key_value(MetadataKey.ip_end_time)
30
+ self.ip_task_type: str = self.header[MetadataKey.ip_task_type]
31
+ self.ip_start_time: str = self.header[MetadataKey.ip_start_time]
32
+ self.ip_end_time: str = self.header[MetadataKey.ip_end_time]
@@ -2,9 +2,12 @@
2
2
 
3
3
  from astropy.io import fits
4
4
 
5
+ from dkist_processing_common.models.fits_access import HEADER_KEY_NOT_FOUND
5
6
  from dkist_processing_common.models.fits_access import FitsAccessBase
6
7
  from dkist_processing_common.models.fits_access import MetadataKey
7
8
 
9
+ NOT_A_FLOAT = -999
10
+
8
11
 
9
12
  class L1FitsAccess(FitsAccessBase):
10
13
  """
@@ -28,25 +31,48 @@ class L1FitsAccess(FitsAccessBase):
28
31
  ):
29
32
  super().__init__(hdu=hdu, name=name, auto_squeeze=auto_squeeze)
30
33
 
31
- self._set_metadata_key_value(MetadataKey.elevation)
32
- self._set_metadata_key_value(MetadataKey.azimuth)
33
- self._set_metadata_key_value(MetadataKey.table_angle)
34
- self._set_metadata_key_value(MetadataKey.gos_level3_status)
35
- self._set_metadata_key_value(MetadataKey.gos_level3_lamp_status)
36
- self._set_metadata_key_value(MetadataKey.gos_polarizer_status)
37
- self._set_metadata_key_value(MetadataKey.gos_retarder_status)
38
- self._set_metadata_key_value(MetadataKey.gos_level0_status)
39
- self._set_metadata_key_value(MetadataKey.time_obs)
40
- self._set_metadata_key_value(MetadataKey.ip_id)
41
- self._set_metadata_key_value(MetadataKey.instrument)
42
- self._set_metadata_key_value(MetadataKey.wavelength)
43
- self._set_metadata_key_value(MetadataKey.proposal_id)
44
- self._set_metadata_key_value(MetadataKey.experiment_id)
45
- self._set_metadata_key_value(MetadataKey.num_dsps_repeats)
46
- self._set_metadata_key_value(MetadataKey.current_dsps_repeat)
47
- self._set_metadata_key_value(MetadataKey.fpa_exposure_time_ms)
48
- self._set_metadata_key_value(MetadataKey.sensor_readout_exposure_time_ms)
49
- self._set_metadata_key_value(MetadataKey.num_raw_frames_per_fpa)
34
+ self.elevation: float = self.header[MetadataKey.elevation]
35
+ self.azimuth: float = self.header[MetadataKey.azimuth]
36
+ self.table_angle: float = self.header[MetadataKey.table_angle]
37
+ self.gos_level3_status: str = self.header[MetadataKey.gos_level3_status]
38
+ self.gos_level3_lamp_status: str = self.header[MetadataKey.gos_level3_lamp_status]
39
+ self.gos_polarizer_status: str = self.header[MetadataKey.gos_polarizer_status]
40
+ self.gos_retarder_status: str = self.header[MetadataKey.gos_retarder_status]
41
+ self.gos_level0_status: str = self.header[MetadataKey.gos_level0_status]
42
+ self.time_obs: str = self.header[MetadataKey.time_obs]
43
+ self.ip_id: str = self.header[MetadataKey.ip_id]
44
+ self.instrument: str = self.header[MetadataKey.instrument]
45
+ self.wavelength: float = self.header[MetadataKey.wavelength]
46
+ self.proposal_id: str = self.header[MetadataKey.proposal_id]
47
+ self.experiment_id: str = self.header[MetadataKey.experiment_id]
48
+ self.num_dsps_repeats: int = self.header[MetadataKey.num_dsps_repeats]
49
+ self.current_dsps_repeat: int = self.header[MetadataKey.current_dsps_repeat]
50
+ self.fpa_exposure_time_ms: float = self.header[MetadataKey.fpa_exposure_time_ms]
51
+ self.sensor_readout_exposure_time_ms: float = self.header[
52
+ MetadataKey.sensor_readout_exposure_time_ms
53
+ ]
54
+ self.num_raw_frames_per_fpa: int = self.header[MetadataKey.num_raw_frames_per_fpa]
55
+ self.camera_id: str = self.header[MetadataKey.camera_id]
56
+ self.camera_name: str = self.header[MetadataKey.camera_name]
57
+ self.camera_bit_depth: int = self.header[MetadataKey.camera_bit_depth]
58
+ self.hardware_binning_x: int = self.header[MetadataKey.hardware_binning_x]
59
+ self.hardware_binning_y: int = self.header[MetadataKey.hardware_binning_y]
60
+ self.software_binning_x: int = self.header[MetadataKey.software_binning_x]
61
+ self.software_binning_y: int = self.header[MetadataKey.software_binning_y]
62
+ self.observing_program_execution_id: str = self.header[
63
+ MetadataKey.observing_program_execution_id
64
+ ]
65
+ self.telescope_tracking_mode: str = self.header.get(
66
+ MetadataKey.telescope_tracking_mode, HEADER_KEY_NOT_FOUND
67
+ )
68
+ self.coude_table_tracking_mode: str = self.header.get(
69
+ MetadataKey.coude_table_tracking_mode, HEADER_KEY_NOT_FOUND
70
+ )
71
+ self.telescope_scanning_mode: str = self.header.get(
72
+ MetadataKey.telescope_scanning_mode, HEADER_KEY_NOT_FOUND
73
+ )
74
+ self.light_level: float = self.header[MetadataKey.light_level]
75
+ self.hls_version: str = self.header[MetadataKey.hls_version]
50
76
 
51
77
  @property
52
78
  def gos_polarizer_angle(self) -> float:
@@ -54,7 +80,7 @@ class L1FitsAccess(FitsAccessBase):
54
80
  try:
55
81
  return float(self.header[MetadataKey.gos_polarizer_angle])
56
82
  except ValueError:
57
- return -999 # The angle is only used if the polarizer is in the beam
83
+ return NOT_A_FLOAT # The angle is only used if the polarizer is in the beam
58
84
 
59
85
  @property
60
86
  def gos_retarder_angle(self) -> float:
@@ -62,4 +88,4 @@ class L1FitsAccess(FitsAccessBase):
62
88
  try:
63
89
  return float(self.header[MetadataKey.gos_retarder_angle])
64
90
  except ValueError:
65
- return -999 # The angle is only used if the retarder is in the beam
91
+ return NOT_A_FLOAT # The angle is only used if the retarder is in the beam
@@ -66,14 +66,14 @@ class NearFloatBud(Stem):
66
66
  -------
67
67
  The mean value associated with this input key
68
68
  """
69
- value_set = list(self.key_to_petal_dict.values())
70
- biggest_value = max(value_set)
71
- smallest_value = min(value_set)
69
+ value_list = list(self.key_to_petal_dict.values())
70
+ biggest_value = max(value_list)
71
+ smallest_value = min(value_list)
72
72
  if biggest_value - smallest_value > self.tolerance:
73
73
  raise ValueError(
74
74
  f"{self.stem_name} values are not close enough. Max: {biggest_value}, Min: {smallest_value}, Tolerance: {self.tolerance}"
75
75
  )
76
- return mean(value_set)
76
+ return mean(value_list)
77
77
 
78
78
 
79
79
  class TaskNearFloatBud(NearFloatBud):
@@ -0,0 +1,24 @@
1
+ """Observing Program Id parser."""
2
+
3
+ from typing import Callable
4
+
5
+ from dkist_processing_common.models.fits_access import MetadataKey
6
+ from dkist_processing_common.parsers.id_bud import TaskContributingIdsBud
7
+ from dkist_processing_common.parsers.task import passthrough_header_ip_task
8
+
9
+
10
+ class TaskContributingObservingProgramExecutionIdsBud(TaskContributingIdsBud):
11
+ """Class to create a Bud for the supporting observing_program_execution_ids."""
12
+
13
+ def __init__(
14
+ self,
15
+ constant_name: str,
16
+ ip_task_types: str | list[str],
17
+ task_type_parsing_function: Callable = passthrough_header_ip_task,
18
+ ):
19
+ super().__init__(
20
+ constant_name=constant_name,
21
+ metadata_key=MetadataKey.observing_program_execution_id,
22
+ ip_task_types=ip_task_types,
23
+ task_type_parsing_function=task_type_parsing_function,
24
+ )
@@ -2,21 +2,27 @@
2
2
 
3
3
  from dkist_processing_common.models.constants import BudName
4
4
  from dkist_processing_common.models.fits_access import MetadataKey
5
+ from dkist_processing_common.models.task_name import TaskName
5
6
  from dkist_processing_common.parsers.id_bud import ContributingIdsBud
6
- from dkist_processing_common.parsers.id_bud import IdBud
7
+ from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
7
8
 
8
9
 
9
- class ProposalIdBud(IdBud):
10
+ class ProposalIdBud(TaskUniqueBud):
10
11
  """Class to create a Bud for the proposal_id."""
11
12
 
12
13
  def __init__(self):
13
- super().__init__(constant_name=BudName.proposal_id, metadata_key=MetadataKey.proposal_id)
14
+ super().__init__(
15
+ constant_name=BudName.proposal_id,
16
+ metadata_key=MetadataKey.proposal_id,
17
+ ip_task_types=TaskName.observe,
18
+ )
14
19
 
15
20
 
16
21
  class ContributingProposalIdsBud(ContributingIdsBud):
17
- """Class to create a Bud for the proposal_ids."""
22
+ """Class to create a Bud for the supporting proposal_ids."""
18
23
 
19
24
  def __init__(self):
20
25
  super().__init__(
21
- stem_name=BudName.contributing_proposal_ids, metadata_key=MetadataKey.proposal_id
26
+ constant_name=BudName.contributing_proposal_ids,
27
+ metadata_key=MetadataKey.proposal_id,
22
28
  )
@@ -2,7 +2,6 @@
2
2
 
3
3
  from enum import StrEnum
4
4
 
5
- from dkist_processing_common.models.fits_access import MetadataKey
6
5
  from dkist_processing_common.models.flower_pot import Stem
7
6
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
8
7
 
@@ -22,7 +22,6 @@ from dkist_processing_common.parsers.single_value_single_key_flower import (
22
22
  )
23
23
  from dkist_processing_common.parsers.task import passthrough_header_ip_task
24
24
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
25
- from dkist_processing_common.parsers.unique_bud import UniqueBud
26
25
 
27
26
 
28
27
  class ObsIpStartTimeBud(TaskUniqueBud):
@@ -36,15 +35,52 @@ class ObsIpStartTimeBud(TaskUniqueBud):
36
35
  )
37
36
 
38
37
 
39
- class CadenceBudBase(UniqueBud):
40
- """Base class for all Cadence Buds."""
38
+ class TaskDatetimeBudBase(Stem):
39
+ """
40
+ Base class for making datetime-related buds.
41
41
 
42
- def __init__(self, constant_name: str):
43
- super().__init__(constant_name, metadata_key=MetadataKey.time_obs)
42
+ Returns a tuple of sorted values converted from datetimes to unix seconds.
43
+
44
+ Complicated parsing of the header into a task type can be achieved by passing in a different
45
+ header task parsing function.
46
+
47
+ Parameters
48
+ ----------
49
+ stem_name
50
+ The name for the constant to be defined
51
+
52
+ metadata_key
53
+ The metadata key associated with the constant
54
+
55
+ ip_task_types
56
+ Only consider objects whose parsed header IP task type matches a string in this list
57
+
58
+ header_type_parsing_func
59
+ The function used to convert a header into an IP task type
60
+ """
61
+
62
+ key_to_petal_dict: dict[str, float]
63
+
64
+ def __init__(
65
+ self,
66
+ stem_name: str,
67
+ metadata_key: str | StrEnum,
68
+ ip_task_types: str | list[str],
69
+ task_type_parsing_function: Callable = passthrough_header_ip_task,
70
+ ):
71
+ super().__init__(stem_name=stem_name)
72
+
73
+ if isinstance(metadata_key, StrEnum):
74
+ metadata_key = metadata_key.name
75
+ self.metadata_key = metadata_key
76
+ if isinstance(ip_task_types, str):
77
+ ip_task_types = [ip_task_types]
78
+ self.ip_task_types = [task.casefold() for task in ip_task_types]
79
+ self.header_parsing_function = task_type_parsing_function
44
80
 
45
81
  def setter(self, fits_obj: L0FitsAccess) -> float | Type[SpilledDirt]:
46
82
  """
47
- If the file is an observe file, its DATE-OBS value is stored as unix seconds.
83
+ Store the metadata key datetime value as unix seconds if the task type is in the desired types.
48
84
 
49
85
  Parameters
50
86
  ----------
@@ -52,16 +88,45 @@ class CadenceBudBase(UniqueBud):
52
88
  The input fits object
53
89
  Returns
54
90
  -------
55
- The observe time in seconds
91
+ The datetime in seconds
56
92
  """
57
- if fits_obj.ip_task_type.casefold() == TaskName.observe.value.casefold():
93
+ task = self.header_parsing_function(fits_obj)
94
+
95
+ if task.casefold() in self.ip_task_types:
58
96
  return (
59
97
  datetime.fromisoformat(getattr(fits_obj, self.metadata_key))
60
98
  .replace(tzinfo=timezone.utc)
61
99
  .timestamp()
62
100
  )
101
+
63
102
  return SpilledDirt
64
103
 
104
+ def getter(self, key: Hashable) -> tuple[float, ...]:
105
+ """
106
+ Return a tuple of sorted times in unix seconds.
107
+
108
+ Parameters
109
+ ----------
110
+ key
111
+ The input key
112
+
113
+ Returns
114
+ -------
115
+ A tuple that is sorted times in unix seconds
116
+ """
117
+ return tuple(sorted(list(self.key_to_petal_dict.values())))
118
+
119
+
120
+ class CadenceBudBase(TaskDatetimeBudBase):
121
+ """Base class for all Cadence Buds."""
122
+
123
+ def __init__(self, constant_name: str):
124
+ super().__init__(
125
+ stem_name=constant_name,
126
+ metadata_key=MetadataKey.time_obs,
127
+ ip_task_types=TaskName.observe,
128
+ )
129
+
65
130
 
66
131
  class AverageCadenceBud(CadenceBudBase):
67
132
  """Class for the average cadence Bud."""
@@ -82,7 +147,7 @@ class AverageCadenceBud(CadenceBudBase):
82
147
  -------
83
148
  The mean value of the cadences of the input frames
84
149
  """
85
- return np.mean(np.diff(sorted(list(self.key_to_petal_dict.values()))))
150
+ return np.mean(np.diff(super().getter(key)))
86
151
 
87
152
 
88
153
  class MaximumCadenceBud(CadenceBudBase):
@@ -104,7 +169,7 @@ class MaximumCadenceBud(CadenceBudBase):
104
169
  -------
105
170
  The maximum cadence between frames
106
171
  """
107
- return np.max(np.diff(sorted(list(self.key_to_petal_dict.values()))))
172
+ return np.max(np.diff(super().getter(key)))
108
173
 
109
174
 
110
175
  class MinimumCadenceBud(CadenceBudBase):
@@ -126,7 +191,7 @@ class MinimumCadenceBud(CadenceBudBase):
126
191
  -------
127
192
  The minimum cadence between frames
128
193
  """
129
- return np.min(np.diff(sorted(list(self.key_to_petal_dict.values()))))
194
+ return np.min(np.diff(super().getter(key)))
130
195
 
131
196
 
132
197
  class VarianceCadenceBud(CadenceBudBase):
@@ -147,11 +212,44 @@ class VarianceCadenceBud(CadenceBudBase):
147
212
  -------
148
213
  Return the variance of the cadences over the input frames
149
214
  """
150
- return np.var(np.diff(sorted(list(self.key_to_petal_dict.values()))))
215
+ return np.var(np.diff(super().getter(key)))
216
+
217
+
218
+ class TaskDateBeginBud(TaskDatetimeBudBase):
219
+ """Class for the date begin task Bud."""
220
+
221
+ def __init__(
222
+ self,
223
+ constant_name: str,
224
+ ip_task_types: str | list[str],
225
+ task_type_parsing_function: Callable = passthrough_header_ip_task,
226
+ ):
227
+ super().__init__(
228
+ stem_name=constant_name,
229
+ metadata_key=MetadataKey.time_obs,
230
+ ip_task_types=ip_task_types,
231
+ task_type_parsing_function=task_type_parsing_function,
232
+ )
233
+
234
+ def getter(self, key) -> str:
235
+ """
236
+ Return the earliest date begin for the ip task type converted from unix seconds to datetime string.
237
+
238
+ Parameters
239
+ ----------
240
+ key
241
+ The input key
242
+ Returns
243
+ -------
244
+ Return the minimum date begin as a datetime string
245
+ """
246
+ min_time = super().getter(key)[0]
247
+ min_time_dt = datetime.fromtimestamp(min_time, tz=timezone.utc)
248
+ return min_time_dt.strftime("%Y-%m-%dT%H:%M:%S.%f")
151
249
 
152
250
 
153
- class TimeFlowerBase(SingleValueSingleKeyFlower):
154
- """Base task for SingleValueSingleKeyFlowers that need to round their values to avoid value jitter."""
251
+ class RoundTimeFlowerBase(SingleValueSingleKeyFlower):
252
+ """Base flower for SingleValueSingleKeyFlowers that need to round their values to avoid value jitter."""
155
253
 
156
254
  def setter(self, fits_obj: L0FitsAccess):
157
255
  """
@@ -169,7 +267,7 @@ class TimeFlowerBase(SingleValueSingleKeyFlower):
169
267
  return round(raw_value, EXP_TIME_ROUND_DIGITS)
170
268
 
171
269
 
172
- class ExposureTimeFlower(TimeFlowerBase):
270
+ class ExposureTimeFlower(RoundTimeFlowerBase):
173
271
  """For tagging the frame FPA exposure time."""
174
272
 
175
273
  def __init__(self):
@@ -178,7 +276,7 @@ class ExposureTimeFlower(TimeFlowerBase):
178
276
  )
179
277
 
180
278
 
181
- class ReadoutExpTimeFlower(TimeFlowerBase):
279
+ class ReadoutExpTimeFlower(RoundTimeFlowerBase):
182
280
  """For tagging the exposure time of each readout that contributes to an FPA."""
183
281
 
184
282
  def __init__(self):
@@ -188,18 +286,18 @@ class ReadoutExpTimeFlower(TimeFlowerBase):
188
286
  )
189
287
 
190
288
 
191
- class TaskTimeBudBase(Stem):
289
+ class TaskRoundTimeBudBase(Stem):
192
290
  """
193
- Base class for making time-related buds that are computed for specific task types.
291
+ Base class for making buds that need a set of rounded times for computing for specific task types.
194
292
 
195
- By "time-related" we mean values that generally need rounding when ingested into the database.
293
+ Metadata key values are already floats. Returns tuple of sorted unique rounded values.
196
294
 
197
295
  Complicated parsing of the header into a task type can be achieved by passing in a different
198
296
  header task parsing function.
199
297
 
200
298
  Parameters
201
299
  ----------
202
- constant_name
300
+ stem_name
203
301
  The name for the constant to be defined
204
302
 
205
303
  metadata_key
@@ -212,6 +310,8 @@ class TaskTimeBudBase(Stem):
212
310
  The function used to convert a header into an IP task type
213
311
  """
214
312
 
313
+ key_to_petal_dict: dict[str, float]
314
+
215
315
  def __init__(
216
316
  self,
217
317
  stem_name: str,
@@ -229,8 +329,18 @@ class TaskTimeBudBase(Stem):
229
329
  self.ip_task_types = [task.casefold() for task in ip_task_types]
230
330
  self.header_parsing_function = header_task_parsing_func
231
331
 
232
- def setter(self, fits_obj: L0FitsAccess):
233
- """Return the desired metadata key only if the parsed task type matches the Bud's task type."""
332
+ def setter(self, fits_obj: L0FitsAccess) -> float | Type[SpilledDirt]:
333
+ """
334
+ Store the metadata key value if the parsed task type is in the desired types.
335
+
336
+ Parameters
337
+ ----------
338
+ fits_obj
339
+ The input fits object
340
+ Returns
341
+ -------
342
+ The rounded time
343
+ """
234
344
  task = self.header_parsing_function(fits_obj)
235
345
 
236
346
  if task.casefold() in self.ip_task_types:
@@ -240,12 +350,22 @@ class TaskTimeBudBase(Stem):
240
350
  return SpilledDirt
241
351
 
242
352
  def getter(self, key: Hashable) -> tuple[float, ...]:
243
- """Return a tuple of all the unique values found."""
244
- value_tuple = tuple(sorted(set(self.key_to_petal_dict.values())))
245
- return value_tuple
353
+ """
354
+ Return a tuple of the sorted unique values found.
355
+
356
+ Parameters
357
+ ----------
358
+ key
359
+ The input key
360
+
361
+ Returns
362
+ -------
363
+ A tuple that is the sorted set of unique times
364
+ """
365
+ return tuple(sorted(set(self.key_to_petal_dict.values())))
246
366
 
247
367
 
248
- class TaskExposureTimesBud(TaskTimeBudBase):
368
+ class TaskExposureTimesBud(TaskRoundTimeBudBase):
249
369
  """Produce a tuple of all FPA exposure times present in the dataset for a specific ip task type."""
250
370
 
251
371
  def __init__(
@@ -262,7 +382,7 @@ class TaskExposureTimesBud(TaskTimeBudBase):
262
382
  )
263
383
 
264
384
 
265
- class TaskReadoutExpTimesBud(TaskTimeBudBase):
385
+ class TaskReadoutExpTimesBud(TaskRoundTimeBudBase):
266
386
  """Produce a tuple of all sensor readout exposure times present in the dataset for a specific task type."""
267
387
 
268
388
  def __init__(
@@ -1356,13 +1356,15 @@ class _WavecalQualityMixin:
1356
1356
  Note that the residuals are the *unweighed* residuals.
1357
1357
  """
1358
1358
  weight_data = np.ones(input_wavelength.size) if weights is None else weights
1359
- prepared_weights = fit_result.prepared_weights
1359
+ prepared_weights = np.sqrt(weight_data / np.sum(weight_data))
1360
1360
  residuals = fit_result.minimizer_result.residual / prepared_weights
1361
1361
  residuals[~np.isfinite(residuals)] = 0.0
1362
+ best_fit_atlas = input_spectrum - residuals
1362
1363
  normalized_residuals = residuals / input_spectrum
1363
1364
 
1364
- best_fit_atlas = fit_result.best_fit_atlas
1365
- best_fit_wavelength = fit_result.best_fit_wavelength_vector
1365
+ best_fit_header = fit_result.wavelength_parameters.to_header(axis_num=1)
1366
+ wcs = WCS(best_fit_header)
1367
+ best_fit_wavelength = wcs.spectral.pixel_to_world(np.arange(input_spectrum.size))
1366
1368
 
1367
1369
  finite_idx = (
1368
1370
  np.isfinite(input_wavelength)
@@ -1376,7 +1378,7 @@ class _WavecalQualityMixin:
1376
1378
  data = {
1377
1379
  "input_wavelength_nm": input_wavelength.to_value(u.nm)[finite_idx].tolist(),
1378
1380
  "input_spectrum": input_spectrum[finite_idx].tolist(),
1379
- "best_fit_wavelength_nm": best_fit_wavelength[finite_idx].tolist(),
1381
+ "best_fit_wavelength_nm": best_fit_wavelength.to_value(u.nm)[finite_idx].tolist(),
1380
1382
  "best_fit_atlas": best_fit_atlas[finite_idx].tolist(),
1381
1383
  "normalized_residuals": normalized_residuals[finite_idx].tolist(),
1382
1384
  "weights": None if weights is None else weight_data[finite_idx].tolist(),