dkist-processing-common 12.1.0rc1__py3-none-any.whl → 12.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -285,18 +285,24 @@ class ConstantsBase:
285
285
  def dark_observing_program_execution_ids(self) -> list[str]:
286
286
  """Return the observing program execution ids constant for the dark task."""
287
287
  observing_programs = self._db_dict[BudName.dark_observing_program_execution_ids]
288
+ if isinstance(observing_programs, str):
289
+ observing_programs = [observing_programs]
288
290
  return list(observing_programs)
289
291
 
290
292
  @property
291
293
  def solar_gain_observing_program_execution_ids(self) -> list[str]:
292
294
  """Return the observing program execution ids constant for the solar_gain task."""
293
295
  observing_programs = self._db_dict[BudName.solar_gain_observing_program_execution_ids]
296
+ if isinstance(observing_programs, str):
297
+ observing_programs = [observing_programs]
294
298
  return list(observing_programs)
295
299
 
296
300
  @property
297
301
  def polcal_observing_program_execution_ids(self) -> list[str]:
298
302
  """Return the observing program execution ids constant."""
299
303
  observing_programs = self._db_dict[BudName.polcal_observing_program_execution_ids]
304
+ if isinstance(observing_programs, str):
305
+ observing_programs = [observing_programs]
300
306
  return list(observing_programs)
301
307
 
302
308
  @property
@@ -467,14 +473,14 @@ class ConstantsBase:
467
473
  return self._db_dict[BudName.solar_gain_gos_polarizer_status]
468
474
 
469
475
  @property
470
- def dark_gos_polarizer_angle(self) -> float:
476
+ def dark_gos_polarizer_angle(self) -> str:
471
477
  """Return the gos polarizer angle constant for the dark task."""
472
- return self._db_dict[BudName.dark_gos_polarizer_angle]
478
+ return str(self._db_dict[BudName.dark_gos_polarizer_angle])
473
479
 
474
480
  @property
475
- def solar_gain_gos_polarizer_angle(self) -> float:
481
+ def solar_gain_gos_polarizer_angle(self) -> str:
476
482
  """Return the gos polarizer angle constant for the solar gain task."""
477
- return self._db_dict[BudName.solar_gain_gos_polarizer_angle]
483
+ return str(self._db_dict[BudName.solar_gain_gos_polarizer_angle])
478
484
 
479
485
  @property
480
486
  def dark_gos_retarder_status(self) -> str:
@@ -487,14 +493,14 @@ class ConstantsBase:
487
493
  return self._db_dict[BudName.solar_gain_gos_retarder_status]
488
494
 
489
495
  @property
490
- def dark_gos_retarder_angle(self) -> float:
496
+ def dark_gos_retarder_angle(self) -> str:
491
497
  """Return the gos retarder angle constant for the dark task."""
492
- return self._db_dict[BudName.dark_gos_retarder_angle]
498
+ return str(self._db_dict[BudName.dark_gos_retarder_angle])
493
499
 
494
500
  @property
495
- def solar_gain_gos_retarder_angle(self) -> float:
501
+ def solar_gain_gos_retarder_angle(self) -> str:
496
502
  """Return the gos retarder angle constant for the solar gain task."""
497
- return self._db_dict[BudName.solar_gain_gos_retarder_angle]
503
+ return str(self._db_dict[BudName.solar_gain_gos_retarder_angle])
498
504
 
499
505
  @property
500
506
  def dark_gos_level0_status(self) -> str:
@@ -0,0 +1,35 @@
1
+ """Autocomplete access to dataset extra header sections."""
2
+
3
+ from enum import StrEnum
4
+
5
+
6
+ class DatasetExtraHeaderSection(StrEnum):
7
+ """Enum defining the possible header sections for dataset extras."""
8
+
9
+ common = "common"
10
+ aggregate = "aggregate"
11
+ iptask = "iptask"
12
+ gos = "gos"
13
+ wavecal = "wavecal"
14
+ atlas = "atlas"
15
+ test = "test"
16
+
17
+
18
+ class DatasetExtraType(StrEnum):
19
+ """Enum defining options for dataset extra names."""
20
+
21
+ dark = "DARK"
22
+ background_light = "BACKGROUND LIGHT"
23
+ solar_gain = "SOLAR GAIN"
24
+ characteristic_spectra = "CHARACTERISTIC SPECTRA"
25
+ modulation_state_offsets = "MODULATION STATE OFFSETS"
26
+ beam_angles = "BEAM ANGLES"
27
+ spectral_curvature_shifts = "SPECTRAL CURVATURE SHIFTS"
28
+ wavelength_calibration_input_spectrum = "WAVELENGTH CALIBRATION INPUT SPECTRUM"
29
+ wavelength_calibration_reference_spectrum = "WAVELENGTH CALIBRATION REFERENCE SPECTRUM"
30
+ reference_wavelength_vector = "REFERENCE WAVELENGTH VECTOR"
31
+ demodulation_matrices = "DEMODULATION MATRICES"
32
+ polcal_as_science = "POLCAL AS SCIENCE"
33
+ bad_pixel_map = "BAD PIXEL MAP"
34
+ beam_offsets = "BEAM OFFSETS"
35
+ spectral_curvature_scales = "SPECTRAL CURVATURE SCALES"
@@ -38,6 +38,8 @@ class StemName(StrEnum):
38
38
  dataset_inventory = "DATASET_INVENTORY"
39
39
  asdf = "ASDF"
40
40
  quality_report = "QUALITY_REPORT"
41
+ # Dataset extras
42
+ extra = "EXTRA"
41
43
 
42
44
 
43
45
  class Tag:
@@ -450,3 +452,14 @@ class Tag:
450
452
  An asdf tag
451
453
  """
452
454
  return cls.format_tag(StemName.asdf)
455
+
456
+ @classmethod
457
+ def extra(cls) -> str:
458
+ """
459
+ Return a dataset extra tag.
460
+
461
+ Returns
462
+ -------
463
+ A dataset extra tag
464
+ """
465
+ return cls.format_tag(StemName.extra)
@@ -1,16 +1,17 @@
1
1
  """Base classes for ID bud parsing."""
2
2
 
3
+ from collections import Counter
3
4
  from enum import StrEnum
4
5
  from typing import Callable
5
6
  from typing import Type
6
7
 
7
- from dkist_processing_common.models.flower_pot import SetStem
8
+ from dkist_processing_common.models.flower_pot import ListStem
8
9
  from dkist_processing_common.models.flower_pot import SpilledDirt
9
10
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
10
11
  from dkist_processing_common.parsers.task import passthrough_header_ip_task
11
12
 
12
13
 
13
- class ContributingIdsBud(SetStem):
14
+ class ContributingIdsBud(ListStem):
14
15
  """Base class for contributing ID buds."""
15
16
 
16
17
  def __init__(self, constant_name: str, metadata_key: str | StrEnum):
@@ -35,13 +36,15 @@ class ContributingIdsBud(SetStem):
35
36
 
36
37
  def getter(self) -> tuple[str, ...]:
37
38
  """
38
- Get all ids seen for any type of frame.
39
+ Get all ids seen for any type of frame, sorted by the number of appearances of that ID.
39
40
 
40
41
  Returns
41
42
  -------
42
43
  IDs from all types of frames
43
44
  """
44
- return tuple(self.value_set)
45
+ counts = Counter(self.value_list) # Count the number of appearances of each ID
46
+ sorted_ids = tuple(str(item) for item, count in counts.most_common())
47
+ return sorted_ids
45
48
 
46
49
 
47
50
  class TaskContributingIdsBud(ContributingIdsBud):
@@ -2,18 +2,14 @@
2
2
 
3
3
  import logging
4
4
  from abc import ABC
5
- from itertools import chain
6
5
  from pathlib import Path
7
6
  from typing import Iterable
8
7
 
9
- from dkist_processing_common.codecs.quality import quality_data_decoder
10
8
  from dkist_processing_common.codecs.quality import quality_data_encoder
11
9
  from dkist_processing_common.models.message import CatalogFrameMessage
12
10
  from dkist_processing_common.models.message import CatalogFrameMessageBody
13
11
  from dkist_processing_common.models.message import CatalogObjectMessage
14
12
  from dkist_processing_common.models.message import CatalogObjectMessageBody
15
- from dkist_processing_common.models.message import CreateQualityReportMessage
16
- from dkist_processing_common.models.message import CreateQualityReportMessageBody
17
13
  from dkist_processing_common.models.tags import Tag
18
14
  from dkist_processing_common.tasks.mixin.globus import GlobusMixin
19
15
  from dkist_processing_common.tasks.mixin.interservice_bus import InterserviceBusMixin
@@ -62,15 +58,19 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
62
58
  with self.telemetry_span("Upload quality data"):
63
59
  self.transfer_quality_data()
64
60
 
65
- with self.telemetry_span("Upload science frames"):
61
+ with self.telemetry_span("Upload output frames"):
66
62
  self.transfer_output_frames()
67
63
 
68
64
  def transfer_output_frames(self):
69
- """Create a Globus transfer for all output data."""
70
- transfer_items = self.build_output_frame_transfer_list()
65
+ """Create a Globus transfer for all output data, as well as any available dataset extras."""
66
+ output_transfer_items = self.build_output_frame_transfer_list()
67
+ dataset_extra_transfer_items = self.build_dataset_extra_transfer_list()
68
+ transfer_items = output_transfer_items + dataset_extra_transfer_items
71
69
 
72
70
  logger.info(
73
71
  f"Preparing globus transfer {len(transfer_items)} items: "
72
+ f"{len(output_transfer_items)} output frames. "
73
+ f"{len(dataset_extra_transfer_items)} dataset extras. "
74
74
  f"recipe_run_id={self.recipe_run_id}. "
75
75
  f"transfer_items={transfer_items[:3]}..."
76
76
  )
@@ -189,7 +189,9 @@ class SubmitDatasetMetadata(L1OutputDataBase):
189
189
  class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
190
190
  """Task class for publishing Catalog and Quality Messages."""
191
191
 
192
- def frame_messages(self, paths: Iterable[Path]) -> list[CatalogFrameMessage]:
192
+ def frame_messages(
193
+ self, paths: Iterable[Path], folder_modifier: str | None = None
194
+ ) -> list[CatalogFrameMessage]:
193
195
  """
194
196
  Create the frame messages.
195
197
 
@@ -197,6 +199,8 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
197
199
  ----------
198
200
  paths
199
201
  The input paths for which to publish frame messages
202
+ folder_modifier
203
+ A subdirectory to use if the files in paths are not in the base directory
200
204
 
201
205
  Returns
202
206
  -------
@@ -204,7 +208,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
204
208
  """
205
209
  message_bodies = [
206
210
  CatalogFrameMessageBody(
207
- objectName=self.format_object_key(path=p),
211
+ objectName=self.format_object_key(path=p, folder_modifier=folder_modifier),
208
212
  conversationId=str(self.recipe_run_id),
209
213
  bucket=self.destination_bucket,
210
214
  )
@@ -233,7 +237,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
233
237
  message_bodies = [
234
238
  CatalogObjectMessageBody(
235
239
  objectType=object_type,
236
- objectName=self.format_object_key(p),
240
+ objectName=self.format_object_key(path=p),
237
241
  bucket=self.destination_bucket,
238
242
  conversationId=str(self.recipe_run_id),
239
243
  groupId=self.constants.dataset_id,
@@ -246,19 +250,24 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
246
250
  def run(self) -> None:
247
251
  """Run method for this task."""
248
252
  with self.telemetry_span("Gather output data"):
249
- frames = self.read(tags=self.output_frame_tags)
250
- movies = self.read(tags=[Tag.output(), Tag.movie()])
253
+ frames = self.read(
254
+ tags=self.output_frame_tags
255
+ ) # frames is kept as a generator as it is much longer than the other file categories
256
+ extras = list(self.read(tags=self.extra_frame_tags))
257
+ movies = list(self.read(tags=[Tag.output(), Tag.movie()]))
251
258
  quality_data = self.read(tags=[Tag.output(), Tag.quality_data()])
252
259
  with self.telemetry_span("Create message objects"):
253
260
  messages = []
254
261
  messages += self.frame_messages(paths=frames)
255
262
  frame_message_count = len(messages)
263
+ messages += self.frame_messages(paths=extras, folder_modifier="extra")
264
+ extra_message_count = len(extras)
256
265
  messages += self.object_messages(paths=movies, object_type="MOVIE")
257
- object_message_count = len(messages) - frame_message_count
266
+ object_message_count = len(movies)
258
267
  dataset_has_quality_data = self.dataset_has_quality_data
259
268
  if dataset_has_quality_data:
260
269
  messages += self.object_messages(paths=quality_data, object_type="QDATA")
261
270
  with self.telemetry_span(
262
- f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
271
+ f"Publish messages: {frame_message_count = }, {extra_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
263
272
  ):
264
273
  self.interservice_bus_publish(messages=messages)
@@ -22,19 +22,23 @@ class OutputDataBase(WorkflowTaskBase, ABC):
22
22
  """Get the destination bucket."""
23
23
  return self.metadata_store_recipe_run.configuration.destination_bucket
24
24
 
25
- def format_object_key(self, path: Path) -> str:
25
+ def format_object_key(self, path: Path, folder_modifier: str | None = None) -> str:
26
26
  """
27
27
  Convert output paths into object store keys.
28
28
 
29
29
  Parameters
30
30
  ----------
31
31
  path: the Path to convert
32
+ folder_modifier: optional folder name to insert into the path
32
33
 
33
34
  Returns
34
35
  -------
35
36
  formatted path in the object store
36
37
  """
37
- object_key = self.destination_folder / Path(path.name)
38
+ if folder_modifier:
39
+ object_key = self.destination_folder / Path(folder_modifier) / Path(path.name)
40
+ else:
41
+ object_key = self.destination_folder / Path(path.name)
38
42
  return str(object_key)
39
43
 
40
44
  @property
@@ -52,6 +56,11 @@ class OutputDataBase(WorkflowTaskBase, ABC):
52
56
  """Tags that uniquely identify L1 fits frames i.e. the dataset-inventory-able frames."""
53
57
  return [Tag.output(), Tag.frame()]
54
58
 
59
+ @property
60
+ def extra_frame_tags(self) -> list[str]:
61
+ """Tags that uniquely identify dataset extra fits frames."""
62
+ return [Tag.output(), Tag.extra()]
63
+
55
64
 
56
65
  class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
57
66
  """Base class for transferring data from scratch to somewhere else."""
@@ -73,9 +82,21 @@ class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
73
82
  """Build a list of GlobusTransfer items corresponding to all OUTPUT (i.e., L1) frames."""
74
83
  science_frame_paths: list[Path] = list(self.read(tags=self.output_frame_tags))
75
84
 
85
+ return self.build_transfer_list(science_frame_paths)
86
+
87
+ def build_dataset_extra_transfer_list(self) -> list[GlobusTransferItem]:
88
+ """Build a list of GlobusTransfer items corresponding to all extra dataset files."""
89
+ extra_paths: list[Path] = list(self.read(tags=self.extra_frame_tags))
90
+
91
+ return self.build_transfer_list(paths=extra_paths, destination_folder_modifier="extra")
92
+
93
+ def build_transfer_list(
94
+ self, paths: list[Path], destination_folder_modifier: str | None = None
95
+ ) -> list[GlobusTransferItem]:
96
+ """Given a list of paths, build a list of GlobusTransfer items."""
76
97
  transfer_items = []
77
- for p in science_frame_paths:
78
- object_key = self.format_object_key(p)
98
+ for p in paths:
99
+ object_key = self.format_object_key(path=p, folder_modifier=destination_folder_modifier)
79
100
  destination_path = Path(self.destination_bucket, object_key)
80
101
  item = GlobusTransferItem(
81
102
  source_path=p,
@@ -0,0 +1,333 @@
1
+ """Classes to support the generation of dataset extras."""
2
+
3
+ import uuid
4
+ from abc import ABC
5
+ from abc import abstractmethod
6
+ from datetime import datetime
7
+
8
+ import numpy as np
9
+ from astropy.io import fits
10
+ from astropy.time import Time
11
+ from dkist_fits_specifications.utils.formatter import reformat_dataset_extra_header
12
+ from dkist_header_validator.spec_validators import spec_extras_validator
13
+
14
+ from dkist_processing_common.codecs.fits import fits_hdulist_encoder
15
+ from dkist_processing_common.models.extras import DatasetExtraHeaderSection
16
+ from dkist_processing_common.models.extras import DatasetExtraType
17
+ from dkist_processing_common.models.tags import Tag
18
+ from dkist_processing_common.models.task_name import TaskName
19
+ from dkist_processing_common.tasks.write_l1_base import WriteL1Base
20
+
21
+
22
+ class WriteL1DatasetExtras(WriteL1Base, ABC):
23
+ """Class supporting the construction of dataset extras."""
24
+
25
+ def dataset_extra_headers(
26
+ self,
27
+ filename: str,
28
+ task_type: TaskName,
29
+ extra_name: DatasetExtraType,
30
+ end_time: str,
31
+ total_exposure: float | None = None,
32
+ readout_exposure: float | None = None,
33
+ ) -> dict:
34
+ """Provide common FITS header keys for dataset extras."""
35
+ # Build task specific header values
36
+ match task_type:
37
+ case TaskName.dark:
38
+ task_specific_observing_program_execution_id = (
39
+ self.constants.dark_observing_program_execution_ids
40
+ )
41
+ task_specific_date_begin = self.constants.dark_date_begin
42
+ task_specific_raw_frames_per_fpa = (
43
+ 0 # can be updated in construction of dataset extra if required
44
+ )
45
+ task_specific_telescope_tracking_mode = (
46
+ "None" # can be updated in construction of dataset extra if required
47
+ )
48
+ task_specific_coude_table_tracking_mode = (
49
+ "None" # can be updated in construction of dataset extra if required
50
+ )
51
+ task_specific_telescope_scanning_mode = (
52
+ "None" # can be updated in construction of dataset extra if required
53
+ )
54
+ task_specific_average_light_level = self.constants.dark_average_light_level
55
+ task_specific_average_telescope_elevation = (
56
+ self.constants.dark_average_telescope_elevation
57
+ )
58
+ task_specific_average_coude_table_angle = (
59
+ self.constants.dark_average_coude_table_angle
60
+ )
61
+ task_specific_average_telescope_azimuth = (
62
+ self.constants.dark_average_telescope_azimuth
63
+ )
64
+ task_specific_gos_level3_status = self.constants.dark_gos_level3_status
65
+ task_specific_gos_level3_lamp_status = self.constants.dark_gos_level3_lamp_status
66
+ task_specific_gos_polarizer_status = self.constants.dark_gos_polarizer_status
67
+ task_specific_gos_polarizer_angle = self.constants.dark_gos_polarizer_angle
68
+ task_specific_gos_retarder_status = self.constants.dark_gos_retarder_status
69
+ task_specific_gos_retarder_angle = self.constants.dark_gos_retarder_angle
70
+ task_specific_gos_level0_status = self.constants.dark_gos_level0_status
71
+ case TaskName.solar_gain:
72
+ task_specific_observing_program_execution_id = (
73
+ self.constants.solar_gain_observing_program_execution_ids
74
+ )
75
+
76
+ task_specific_date_begin = self.constants.solar_gain_date_begin
77
+ task_specific_raw_frames_per_fpa = self.constants.solar_gain_num_raw_frames_per_fpa
78
+ task_specific_telescope_tracking_mode = (
79
+ self.constants.solar_gain_telescope_tracking_mode
80
+ )
81
+ task_specific_coude_table_tracking_mode = (
82
+ self.constants.solar_gain_coude_table_tracking_mode
83
+ )
84
+ task_specific_telescope_scanning_mode = (
85
+ self.constants.solar_gain_telescope_scanning_mode
86
+ )
87
+ task_specific_average_light_level = self.constants.solar_gain_average_light_level
88
+ task_specific_average_telescope_elevation = (
89
+ self.constants.solar_gain_average_telescope_elevation
90
+ )
91
+ task_specific_average_coude_table_angle = (
92
+ self.constants.solar_gain_average_coude_table_angle
93
+ )
94
+ task_specific_average_telescope_azimuth = (
95
+ self.constants.solar_gain_average_telescope_azimuth
96
+ )
97
+ task_specific_gos_level3_status = self.constants.solar_gain_gos_level3_status
98
+ task_specific_gos_level3_lamp_status = (
99
+ self.constants.solar_gain_gos_level3_lamp_status
100
+ )
101
+ task_specific_gos_polarizer_status = self.constants.solar_gain_gos_polarizer_status
102
+ task_specific_gos_polarizer_angle = self.constants.solar_gain_gos_polarizer_angle
103
+ task_specific_gos_retarder_status = self.constants.solar_gain_gos_retarder_status
104
+ task_specific_gos_retarder_angle = self.constants.solar_gain_gos_retarder_angle
105
+ task_specific_gos_level0_status = self.constants.solar_gain_gos_level0_status
106
+ case TaskName.polcal:
107
+ task_specific_observing_program_execution_id = (
108
+ self.constants.polcal_observing_program_execution_ids
109
+ )
110
+
111
+ task_specific_date_begin = self.constants.polcal_date_begin
112
+ task_specific_raw_frames_per_fpa = self.constants.polcal_num_raw_frames_per_fpa
113
+ task_specific_telescope_tracking_mode = (
114
+ self.constants.polcal_telescope_tracking_mode
115
+ )
116
+ task_specific_coude_table_tracking_mode = (
117
+ self.constants.polcal_coude_table_tracking_mode
118
+ )
119
+ task_specific_telescope_scanning_mode = (
120
+ self.constants.polcal_telescope_scanning_mode
121
+ )
122
+ task_specific_average_light_level = self.constants.polcal_average_light_level
123
+ task_specific_average_telescope_elevation = (
124
+ self.constants.polcal_average_telescope_elevation
125
+ )
126
+ task_specific_average_coude_table_angle = (
127
+ self.constants.polcal_average_coude_table_angle
128
+ )
129
+ task_specific_average_telescope_azimuth = (
130
+ self.constants.polcal_average_telescope_azimuth
131
+ )
132
+ task_specific_gos_level3_status = None
133
+ task_specific_gos_level3_lamp_status = None
134
+ task_specific_gos_polarizer_status = None
135
+ task_specific_gos_polarizer_angle = None
136
+ task_specific_gos_retarder_status = None
137
+ task_specific_gos_retarder_angle = None
138
+ task_specific_gos_level0_status = None
139
+ case _:
140
+ raise ValueError(f"Unsupported task type {task_type}")
141
+
142
+ start_datetime = datetime.fromisoformat(task_specific_date_begin)
143
+ end_datetime = datetime.fromisoformat(end_time)
144
+
145
+ dataset_extra_header = {
146
+ DatasetExtraHeaderSection.common: {
147
+ "BUNIT": "count",
148
+ "DATE": Time.now().fits,
149
+ "DATE-BEG": task_specific_date_begin,
150
+ "DATE-END": end_time,
151
+ "TELAPSE": (end_datetime - start_datetime).total_seconds(),
152
+ "DATE-AVG": (start_datetime + (end_datetime - start_datetime) / 2).isoformat(),
153
+ "TIMESYS": "UTC",
154
+ "ORIGIN": "National Solar Observatory",
155
+ "TELESCOP": "Daniel K. Inouye Solar Telescope",
156
+ "OBSRVTRY": "Haleakala High Altitude Observatory Site",
157
+ "NETWORK": "NSF-DKIST",
158
+ "INSTRUME": self.constants.instrument,
159
+ "OBJECT": "unknown",
160
+ "CAM_ID": self.constants.camera_id,
161
+ "CAMERA": self.constants.camera_name,
162
+ "BITDEPTH": self.constants.camera_bit_depth,
163
+ "XPOSURE": total_exposure,
164
+ "TEXPOSUR": readout_exposure,
165
+ "HWBIN1": self.constants.hardware_binning_x,
166
+ "HWBIN2": self.constants.hardware_binning_y,
167
+ "SWBIN1": self.constants.software_binning_x,
168
+ "SWBIN2": self.constants.software_binning_y,
169
+ "NSUMEXP": task_specific_raw_frames_per_fpa,
170
+ "DSETID": self.constants.dataset_id,
171
+ "PROCTYPE": "L1_EXTRA",
172
+ "RRUNID": self.recipe_run_id,
173
+ "RECIPEID": self.metadata_store_recipe_run.recipeInstance.recipeId,
174
+ "RINSTID": self.metadata_store_recipe_run.recipeInstanceId,
175
+ "FILENAME": filename,
176
+ "HEAD_URL": "",
177
+ "INFO_URL": self.docs_base_url,
178
+ "CAL_URL": "",
179
+ "CALVERS": self.version_from_module_name(),
180
+ "IDSPARID": (
181
+ parameters.inputDatasetPartId
182
+ if (parameters := self.metadata_store_input_dataset_parameters)
183
+ else None
184
+ ),
185
+ "IDSOBSID": (
186
+ observe_frames.inputDatasetPartId
187
+ if (observe_frames := self.metadata_store_input_dataset_observe_frames)
188
+ else None
189
+ ),
190
+ "IDSCALID": (
191
+ calibration_frames.inputDatasetPartId
192
+ if (calibration_frames := self.metadata_store_input_dataset_calibration_frames)
193
+ else None
194
+ ),
195
+ "WKFLVERS": self.workflow_version,
196
+ "WKFLNAME": self.workflow_name,
197
+ "MANPROCD": self.workflow_had_manual_intervention,
198
+ "FILE_ID": uuid.uuid4().hex,
199
+ "OBSPR_ID": task_specific_observing_program_execution_id[
200
+ 0
201
+ ], # The OP IDs are stored sorted by number of appearances of each OP ID in the source task type frames
202
+ "EXTOBSID": ",".join(task_specific_observing_program_execution_id[1:]),
203
+ "EXPER_ID": self.constants.experiment_id,
204
+ "PROP_ID": self.constants.proposal_id,
205
+ "HLSVERS": self.constants.hls_version,
206
+ "LINEWAV": self.constants.wavelength,
207
+ "TELTRACK": (
208
+ task_specific_telescope_tracking_mode if task_type != TaskName.dark else None
209
+ ),
210
+ "TTBLTRCK": (
211
+ task_specific_coude_table_tracking_mode if task_type != TaskName.dark else None
212
+ ),
213
+ "TELSCAN": (
214
+ task_specific_telescope_scanning_mode if task_type != TaskName.dark else None
215
+ ),
216
+ "EXTNAME": extra_name,
217
+ },
218
+ DatasetExtraHeaderSection.aggregate: {
219
+ "AVGLLVL": task_specific_average_light_level,
220
+ "ATELEVAT": task_specific_average_telescope_elevation,
221
+ "ATTBLANG": task_specific_average_coude_table_angle,
222
+ "ATAZIMUT": task_specific_average_telescope_azimuth,
223
+ },
224
+ DatasetExtraHeaderSection.iptask: {
225
+ "IPTASK": "GAIN" if "GAIN" in task_type else task_type,
226
+ },
227
+ DatasetExtraHeaderSection.gos: {
228
+ "LVL3STAT": task_specific_gos_level3_status,
229
+ "LAMPSTAT": task_specific_gos_level3_lamp_status,
230
+ "LVL2STAT": task_specific_gos_polarizer_status,
231
+ "POLANGLE": task_specific_gos_polarizer_angle,
232
+ "LVL1STAT": task_specific_gos_retarder_status,
233
+ "RETANGLE": task_specific_gos_retarder_angle,
234
+ "LVL0STAT": task_specific_gos_level0_status,
235
+ },
236
+ }
237
+
238
+ # Remove specific headers from dark frames as they don't constants to fill them
239
+ if task_type == TaskName.dark:
240
+ for key in ["TELTRACK", "TTBLTRCK", "TELSCAN"]:
241
+ del dataset_extra_header[DatasetExtraHeaderSection.common][key]
242
+
243
+ # Remove specific headers from polcal frames as they don't have constants to fill them
244
+ if task_type == TaskName.polcal:
245
+ for key in [
246
+ "LVL3STAT",
247
+ "LAMPSTAT",
248
+ "LVL2STAT",
249
+ "POLANGLE",
250
+ "LVL1STAT",
251
+ "RETANGLE",
252
+ "LVL0STAT",
253
+ ]:
254
+ del dataset_extra_header[DatasetExtraHeaderSection.gos][key]
255
+
256
+ return dataset_extra_header
257
+
258
+ def build_dataset_extra_header(
259
+ self,
260
+ sections: list[DatasetExtraHeaderSection],
261
+ filename: str,
262
+ task_type: TaskName,
263
+ extra_name: DatasetExtraType,
264
+ total_exposure: float | None = None,
265
+ readout_exposure: float | None = None,
266
+ end_time: str | None = None,
267
+ ) -> fits.Header:
268
+ """Build FITS header for dataset extra file."""
269
+ header = fits.Header()
270
+ all_section_headers = self.dataset_extra_headers(
271
+ filename=filename,
272
+ task_type=task_type,
273
+ total_exposure=total_exposure,
274
+ readout_exposure=readout_exposure,
275
+ extra_name=extra_name,
276
+ end_time=end_time,
277
+ )
278
+ for section in sections:
279
+ header.update(all_section_headers[section].items())
280
+ return header
281
+
282
+ def format_extra_filename(self, extra_name: DatasetExtraType | str, detail: str | None = None):
283
+ """Format the filename of dataset extras for consistency."""
284
+ base_filename = f"{self.constants.instrument}_{self.constants.dataset_id}_{extra_name.replace(' ', '-')}"
285
+ if detail:
286
+ base_filename += "_" + detail
287
+ filename_counter = str(self.filename_counter.increment(base_filename))
288
+ return f"{base_filename}_{filename_counter}.fits"
289
+
290
+ def assemble_and_write_dataset_extra(
291
+ self,
292
+ data: np.ndarray | list[np.ndarray],
293
+ header: fits.Header | list[fits.Header],
294
+ filename: str,
295
+ ):
296
+ """Given the data and header information, write the dataset extra."""
297
+ if isinstance(data, list) and isinstance(header, list):
298
+ if len(data) != len(header):
299
+ raise ValueError(
300
+ f"{len(data)} data arrays were provided with {len(header)} headers. These must be equal."
301
+ )
302
+ if isinstance(data, np.ndarray):
303
+ data = [data]
304
+ if isinstance(header, fits.Header):
305
+ header = [header]
306
+ hdus = [fits.PrimaryHDU()] # The first HDU in the list is an empty PrimaryHDU
307
+ for i, data_array in enumerate(data):
308
+ tile_size = self.compute_tile_size_for_array(data_array)
309
+ hdu = fits.CompImageHDU(header=header[i], data=data_array, tile_shape=tile_size)
310
+ formatted_header = reformat_dataset_extra_header(hdu.header)
311
+ hdu = fits.CompImageHDU(header=formatted_header, data=hdu.data, tile_shape=tile_size)
312
+ hdus.append(hdu)
313
+ self.write(
314
+ data=fits.HDUList(hdus),
315
+ tags=[Tag.extra(), Tag.output()],
316
+ encoder=fits_hdulist_encoder,
317
+ relative_path=filename,
318
+ )
319
+ self.update_framevol(relative_path=filename)
320
+
321
+ # Check that the written file passes spec 214 validation if requested
322
+ if self.validate_l1_on_write:
323
+ spec_extras_validator.validate(self.scratch.absolute_path(filename), extra=False)
324
+
325
+ @abstractmethod
326
+ def run(self) -> None:
327
+ """
328
+ For each dataset extra.
329
+
330
+ * Gather the source data in whatever manner is necessary
331
+ * Build a header using the `build_dataset_extra_header` method to help with header construction
332
+ * Write the dataset extra using `assemble_and_write_dataset_extra()`
333
+ """
@@ -1,11 +1,9 @@
1
1
  """Task(s) for writing level 1 data as 214 compliant fits files."""
2
2
 
3
- import importlib
4
3
  import logging
5
4
  import uuid
6
5
  from abc import ABC
7
6
  from abc import abstractmethod
8
- from functools import cached_property
9
7
  from pathlib import Path
10
8
  from string import ascii_uppercase
11
9
  from typing import Literal
@@ -34,7 +32,7 @@ from dkist_processing_common.models.fried_parameter import r0_valid
34
32
  from dkist_processing_common.models.tags import Tag
35
33
  from dkist_processing_common.models.wavelength import WavelengthRange
36
34
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
37
- from dkist_processing_common.tasks import WorkflowTaskBase
35
+ from dkist_processing_common.tasks.write_l1_base import WriteL1Base
38
36
 
39
37
  logger = logging.getLogger(__name__)
40
38
 
@@ -43,7 +41,7 @@ __all__ = ["WriteL1Frame"]
43
41
  from dkist_processing_common.tasks.mixin.metadata_store import MetadataStoreMixin
44
42
 
45
43
 
46
- class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
44
+ class WriteL1Frame(WriteL1Base, ABC):
47
45
  """
48
46
  Task to convert final calibrated science frames into spec 214 compliant level 1 frames.
49
47
 
@@ -108,45 +106,6 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
108
106
  self.scratch.absolute_path(relative_path), extra=False
109
107
  )
110
108
 
111
- @cached_property
112
- def tile_size_param(self) -> int | None:
113
- """Get the tile size parameter for compression."""
114
- return self.metadata_store_recipe_run.configuration.tile_size
115
-
116
- @cached_property
117
- def validate_l1_on_write(self) -> bool:
118
- """Check for validate on write."""
119
- return self.metadata_store_recipe_run.configuration.validate_l1_on_write
120
-
121
- @cached_property
122
- def workflow_had_manual_intervention(self):
123
- """Indicate determining if any provenance capturing steps had manual intervention."""
124
- for provenance_record in self.metadata_store_recipe_run.recipeRunProvenances:
125
- if provenance_record.isTaskManual:
126
- return True
127
- return False
128
-
129
- def compute_tile_size_for_array(self, data: np.ndarray) -> list | None:
130
- """Determine the tile size to use for compression accounting for array shape minimums."""
131
- if self.tile_size_param is None:
132
- return None
133
- tile_size = []
134
- for dim_size in data.shape:
135
- if dim_size < self.tile_size_param:
136
- tile_size.append(dim_size)
137
- else:
138
- tile_size.append(self.tile_size_param)
139
- return tile_size
140
-
141
- def update_framevol(self, relative_path: str) -> None:
142
- """Update FRAMEVOL key to be exactly the size of the file on-disk."""
143
- full_path = self.scratch.workflow_base_path / relative_path
144
- compressed_size = full_path.stat().st_size / 1024 / 1024
145
- hdul = fits.open(full_path, mode="update")
146
- hdul[1].header["FRAMEVOL"] = compressed_size
147
- hdul.flush()
148
- del hdul
149
-
150
109
  def replace_header_values(self, header: fits.Header, data: np.ndarray) -> fits.Header:
151
110
  """Replace header values that should already exist with new values."""
152
111
  header["FILE_ID"] = uuid.uuid4().hex
@@ -430,18 +389,6 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
430
389
  )
431
390
  return header
432
391
 
433
- def version_from_module_name(self) -> str:
434
- """
435
- Get the value of __version__ from a module given its name.
436
-
437
- Returns
438
- -------
439
- The value of __version__
440
- """
441
- package = self.__module__.split(".")[0]
442
- module = importlib.import_module(package)
443
- return module.__version__
444
-
445
392
  @abstractmethod
446
393
  def add_dataset_headers(
447
394
  self, header: fits.Header, stokes: Literal["I", "Q", "U", "V"]
@@ -0,0 +1,67 @@
1
+ """Base class for writing L1 FITS products with headers."""
2
+
3
+ import importlib
4
+ from abc import ABC
5
+ from functools import cached_property
6
+
7
+ import numpy as np
8
+ from astropy.io import fits
9
+
10
+ from dkist_processing_common.tasks import WorkflowTaskBase
11
+ from dkist_processing_common.tasks.mixin.metadata_store import MetadataStoreMixin
12
+
13
+
14
+ class WriteL1Base(WorkflowTaskBase, MetadataStoreMixin, ABC):
15
+ """Base class for writing L1 FITS products with headers."""
16
+
17
+ def version_from_module_name(self) -> str:
18
+ """
19
+ Get the value of __version__ from a module given its name.
20
+
21
+ Returns
22
+ -------
23
+ The value of __version__
24
+ """
25
+ package = self.__module__.split(".")[0]
26
+ module = importlib.import_module(package)
27
+ return module.__version__
28
+
29
+ @cached_property
30
+ def workflow_had_manual_intervention(self) -> bool:
31
+ """Indicate determining if any provenance capturing steps had manual intervention."""
32
+ for provenance_record in self.metadata_store_recipe_run.recipeRunProvenances:
33
+ if provenance_record.isTaskManual:
34
+ return True
35
+ return False
36
+
37
+ def update_framevol(self, relative_path: str) -> None:
38
+ """Update FRAMEVOL key to be exactly the size of the file on-disk."""
39
+ full_path = self.scratch.workflow_base_path / relative_path
40
+ compressed_size = full_path.stat().st_size / 1024 / 1024
41
+ hdul = fits.open(full_path, mode="update")
42
+ for i in range(1, len(hdul)):
43
+ hdul[i].header["FRAMEVOL"] = compressed_size
44
+ hdul.flush()
45
+ del hdul
46
+
47
+ @cached_property
48
+ def tile_size_param(self) -> int | None:
49
+ """Get the tile size parameter for compression."""
50
+ return self.metadata_store_recipe_run.configuration.tile_size
51
+
52
+ def compute_tile_size_for_array(self, data: np.ndarray) -> list | None:
53
+ """Determine the tile size to use for compression accounting for array shape minimums."""
54
+ if self.tile_size_param is None:
55
+ return None
56
+ tile_size = []
57
+ for dim_size in data.shape:
58
+ if dim_size < self.tile_size_param:
59
+ tile_size.append(dim_size)
60
+ else:
61
+ tile_size.append(self.tile_size_param)
62
+ return tile_size
63
+
64
+ @cached_property
65
+ def validate_l1_on_write(self) -> bool:
66
+ """Check for validate on write."""
67
+ return self.metadata_store_recipe_run.configuration.validate_l1_on_write
@@ -0,0 +1,224 @@
1
+ from dataclasses import asdict
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+
5
+ import numpy as np
6
+ import pytest
7
+ from astropy.io import fits
8
+
9
+ from dkist_processing_common._util.scratch import WorkflowFileSystem
10
+ from dkist_processing_common.codecs.fits import fits_array_decoder
11
+ from dkist_processing_common.codecs.fits import fits_array_encoder
12
+ from dkist_processing_common.models.extras import DatasetExtraHeaderSection
13
+ from dkist_processing_common.models.extras import DatasetExtraType
14
+ from dkist_processing_common.models.tags import Tag
15
+ from dkist_processing_common.models.task_name import TaskName
16
+ from dkist_processing_common.tasks.write_extra import WriteL1DatasetExtras
17
+ from dkist_processing_common.tests.mock_metadata_store import RecipeRunResponseMapping
18
+ from dkist_processing_common.tests.mock_metadata_store import fake_gql_client_factory
19
+ from dkist_processing_common.tests.mock_metadata_store import make_default_recipe_run_response
20
+
21
+
22
+ @dataclass
23
+ class FakeConstantDb:
24
+ INSTRUMENT: str = "VBI"
25
+ DATASET_ID: str = "DATASETID"
26
+ AVERAGE_CADENCE: float = 10.0
27
+ MINIMUM_CADENCE: float = 10.0
28
+ MAXIMUM_CADENCE: float = 10.0
29
+ VARIANCE_CADENCE: float = 0.0
30
+ STOKES_PARAMS: tuple = ("I", "Q", "U", "V")
31
+ PROPOSAL_ID: str = "PROPID1"
32
+ EXPERIMENT_ID: str = "EXPERID1"
33
+ CAMERA_ID: str = "CAMERA1"
34
+ CAMERA_NAME: str = "Camera One"
35
+ CAMERA_BIT_DEPTH: int = 16
36
+ HARDWARE_BINNING_X: int = 1
37
+ HARDWARE_BINNING_Y: int = 1
38
+ SOFTWARE_BINNING_X: int = 1
39
+ SOFTWARE_BINNING_Y: int = 1
40
+ HLS_VERSION: str = "1.8"
41
+ WAVELENGTH: float = 854.2
42
+ # Dark
43
+ DARK_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
44
+ DARK_DATE_BEGIN: str = "2023-01-01T00:00:00"
45
+ DARK_DATE_END: str = "2023-01-01T01:00:00"
46
+ DARK_TELESCOPE_TRACKING_MODE: str = "None"
47
+ DARK_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
48
+ DARK_TELESCOPE_SCANNING_MODE: str = "None"
49
+ DARK_AVERAGE_LIGHT_LEVEL: float = 5.0
50
+ DARK_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
51
+ DARK_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
52
+ DARK_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
53
+ DARK_GOS_LEVEL3_STATUS: str = "clear"
54
+ DARK_GOS_LEVEL3_LAMP_STATUS: str = "off"
55
+ DARK_GOS_POLARIZER_STATUS: str = "clear"
56
+ DARK_GOS_POLARIZER_ANGLE: str = "0.0"
57
+ DARK_GOS_RETARDER_STATUS: str = "clear"
58
+ DARK_GOS_RETARDER_ANGLE: str = "0.0"
59
+ DARK_GOS_LEVEL0_STATUS: str = "DarkShutter"
60
+ # Solar Gain
61
+ SOLAR_GAIN_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
62
+ SOLAR_GAIN_DATE_BEGIN: str = "2023-01-01T00:00:00"
63
+ SOLAR_GAIN_DATE_END: str = "2023-01-01T01:00:00"
64
+ SOLAR_GAIN_NUM_RAW_FRAMES_PER_FPA: int = 1
65
+ SOLAR_GAIN_TELESCOPE_TRACKING_MODE: str = "None"
66
+ SOLAR_GAIN_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
67
+ SOLAR_GAIN_TELESCOPE_SCANNING_MODE: str = "None"
68
+ SOLAR_GAIN_AVERAGE_LIGHT_LEVEL: float = 5.0
69
+ SOLAR_GAIN_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
70
+ SOLAR_GAIN_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
71
+ SOLAR_GAIN_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
72
+ SOLAR_GAIN_GOS_LEVEL3_STATUS: str = "clear"
73
+ SOLAR_GAIN_GOS_LEVEL3_LAMP_STATUS: str = "off"
74
+ SOLAR_GAIN_GOS_POLARIZER_STATUS: str = "clear"
75
+ SOLAR_GAIN_GOS_POLARIZER_ANGLE: str = "0.0"
76
+ SOLAR_GAIN_GOS_RETARDER_STATUS: str = "clear"
77
+ SOLAR_GAIN_GOS_RETARDER_ANGLE: str = "0.0"
78
+ SOLAR_GAIN_GOS_LEVEL0_STATUS: str = "DarkShutter"
79
+ # Polcal
80
+ POLCAL_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
81
+ POLCAL_DATE_BEGIN: str = "2023-01-01T00:00:00"
82
+ POLCAL_DATE_END: str = "2023-01-01T01:00:00"
83
+ POLCAL_NUM_RAW_FRAMES_PER_FPA: int = 1
84
+ POLCAL_TELESCOPE_TRACKING_MODE: str = "None"
85
+ POLCAL_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
86
+ POLCAL_TELESCOPE_SCANNING_MODE: str = "None"
87
+ POLCAL_AVERAGE_LIGHT_LEVEL: float = 5.0
88
+ POLCAL_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
89
+ POLCAL_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
90
+ POLCAL_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
91
+
92
+
93
+ class ConstructDatasetExtrasTest(WriteL1DatasetExtras):
94
+ def run(self):
95
+ # Make a dataset extra for each task type
96
+
97
+ for task_type in [
98
+ TaskName.dark,
99
+ TaskName.solar_gain,
100
+ ]:
101
+ filename = self.format_extra_filename(task_type, detail="BEAM1")
102
+ data = next(
103
+ self.read(
104
+ tags=[Tag.task(task_type), Tag.intermediate()], decoder=fits_array_decoder
105
+ )
106
+ )
107
+ header = self.build_dataset_extra_header(
108
+ sections=[
109
+ DatasetExtraHeaderSection.common,
110
+ DatasetExtraHeaderSection.aggregate,
111
+ DatasetExtraHeaderSection.iptask,
112
+ DatasetExtraHeaderSection.gos,
113
+ ],
114
+ filename=filename,
115
+ task_type=task_type,
116
+ total_exposure=0.058,
117
+ readout_exposure=0.029,
118
+ extra_name=(
119
+ DatasetExtraType.dark if task_type == "DARK" else DatasetExtraType.solar_gain
120
+ ),
121
+ end_time="2025-01-01T00:00:00",
122
+ )
123
+
124
+ self.assemble_and_write_dataset_extra(data=data, header=header, filename=filename)
125
+
126
+ task_type = TaskName.polcal
127
+ filename = self.format_extra_filename(task_type, detail="BEAM1")
128
+ data = next(
129
+ self.read(tags=[Tag.task(task_type), Tag.intermediate()], decoder=fits_array_decoder)
130
+ )
131
+ header = self.build_dataset_extra_header(
132
+ sections=[
133
+ DatasetExtraHeaderSection.common,
134
+ DatasetExtraHeaderSection.aggregate,
135
+ DatasetExtraHeaderSection.iptask,
136
+ DatasetExtraHeaderSection.gos,
137
+ ],
138
+ filename=filename,
139
+ task_type=task_type,
140
+ total_exposure=0.058,
141
+ readout_exposure=0.029,
142
+ extra_name=DatasetExtraType.demodulation_matrices,
143
+ end_time="2025-01-01T00:00:00",
144
+ )
145
+ self.assemble_and_write_dataset_extra(data=data, header=header, filename=filename)
146
+
147
+
148
+ @pytest.fixture()
149
+ def construct_dataset_extras_task(request, recipe_run_id, tmp_path):
150
+ with ConstructDatasetExtrasTest(
151
+ recipe_run_id=recipe_run_id,
152
+ workflow_name="workflow_name",
153
+ workflow_version="workflow_version",
154
+ ) as task:
155
+ task.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, scratch_base_path=tmp_path)
156
+ # Write an intermediate product to be used as the source for each dataset extra
157
+ for task_type in [
158
+ TaskName.dark,
159
+ TaskName.solar_gain,
160
+ TaskName.polcal,
161
+ ]:
162
+ task.write(
163
+ data=np.random.random(size=(1, 128, 128)),
164
+ tags=[Tag.task(task_type), Tag.intermediate()],
165
+ encoder=fits_array_encoder,
166
+ )
167
+ task.constants._update(asdict(FakeConstantDb()))
168
+ yield task
169
+ task._purge()
170
+
171
+
172
+ @pytest.fixture
173
+ def fake_gql_client_default_configuration():
174
+ """Create GraphQL client Mock that returns result without recipe run configuration."""
175
+ recipe_run_response = make_default_recipe_run_response()
176
+ recipe_run_response.configuration = None
177
+ new_response_mapping = RecipeRunResponseMapping(response=recipe_run_response)
178
+ FakeGQLClientDefaultConfiguration = fake_gql_client_factory(
179
+ response_mapping_override=new_response_mapping
180
+ )
181
+
182
+ return FakeGQLClientDefaultConfiguration
183
+
184
+
185
+ def test_construct_dataset_extras(
186
+ construct_dataset_extras_task, mocker, fake_gql_client_default_configuration
187
+ ):
188
+ """
189
+ Given: A ConstructDatasetExtras task with source data
190
+ When: Running the ConstructDatasetExtras task
191
+ Then: A dataset extra files are produced with expected header values
192
+ """
193
+ mocker.patch(
194
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
195
+ new=fake_gql_client_default_configuration,
196
+ )
197
+ task = construct_dataset_extras_task
198
+ task()
199
+ dataset_extra_files = list(task.read(tags=[Tag.output(), Tag.extra()]))
200
+ assert len(dataset_extra_files) == 3
201
+ for filename in dataset_extra_files:
202
+ split_filename = Path(filename).name.split("_")
203
+ assert split_filename[0] == "VBI"
204
+ assert split_filename[1] == task.constants.dataset_id
205
+ assert split_filename[-2] == "BEAM1"
206
+ assert split_filename[-1] == "1.fits"
207
+ hdul = fits.open(filename)
208
+ for i in range(1, len(hdul)):
209
+ assert isinstance(hdul[i], fits.CompImageHDU)
210
+ header = hdul[i].header
211
+ assert header["LINEWAV"] == 854.2
212
+ assert header["INSTRUME"] == "VBI"
213
+ assert header["ATAZIMUT"] == 180.0
214
+ assert header["FRAMEVOL"] is not None
215
+ assert header["IDSOBSID"] == 2
216
+ assert header["XPOSURE"] == 0.058
217
+ assert header["OBSPR_ID"] == "OP1"
218
+ assert header["EXTOBSID"] == "OP2,OP3"
219
+ assert header["EXTNAME"] in ["DARK", "SOLAR GAIN", "DEMODULATION MATRICES"]
220
+ if header["IPTASK"] == "POLCAL":
221
+ assert "POLANGLE" not in header
222
+ else:
223
+ assert header.get("POLANGLE") == "0.0"
224
+ assert header.get("RETANGLE") == "0.0"
@@ -57,7 +57,11 @@ def transfer_data_task(recipe_run_id, tmp_path, mocker, fake_gql_client):
57
57
  unwanted_file_obj = uuid4().hex.encode("utf8")
58
58
  task.write(unwanted_file_obj, tags=[Tag.frame()])
59
59
 
60
- yield task, output_file_obj
60
+ # Write a dataset extra
61
+ extra_file_obj = uuid4().hex.encode("utf8")
62
+ task.write(extra_file_obj, tags=[Tag.output(), Tag.extra()])
63
+
64
+ yield task, output_file_obj, extra_file_obj
61
65
  task._purge()
62
66
 
63
67
 
@@ -81,7 +85,7 @@ def test_build_output_frame_transfer_list(transfer_data_task):
81
85
  When: Building a transfer list of all OUTPUT frames
82
86
  Then: All OUTPUT frames are listed and no non-OUTPUT frames are listed
83
87
  """
84
- task, output_file_obj = transfer_data_task
88
+ task, output_file_obj, _ = transfer_data_task
85
89
 
86
90
  transfer_list = task.build_output_frame_transfer_list()
87
91
 
@@ -89,3 +93,21 @@ def test_build_output_frame_transfer_list(transfer_data_task):
89
93
  transfer_item = transfer_list[0]
90
94
  with transfer_item.source_path.open(mode="rb") as f:
91
95
  assert output_file_obj == f.read()
96
+
97
+
98
+ def test_build_dataset_extra_transfer_list(transfer_data_task):
99
+ """
100
+ Given: A task based on TransferDataBase with some files, some of which are EXTRA_OUTPUT
101
+ When: Building a transfer list of all EXTRA_OUTPUT frames
102
+ Then: All EXTRA_OUTPUT frames are listed and no non-EXTRA_OUTPUT frames are listed
103
+ """
104
+ task, _, extra_file_obj = transfer_data_task
105
+
106
+ transfer_list = task.build_dataset_extra_transfer_list()
107
+
108
+ assert len(transfer_list) == 1
109
+ transfer_item = transfer_list[0]
110
+ assert "/extra/" not in str(transfer_item.source_path)
111
+ assert "/extra/" in str(transfer_item.destination_path)
112
+ with transfer_item.source_path.open(mode="rb") as f:
113
+ assert extra_file_obj == f.read()
@@ -25,6 +25,7 @@ def transfer_l1_data_task(recipe_run_id, tmp_path, fake_constants_db):
25
25
  task.constants._update(fake_constants_db)
26
26
  frame_path = task.scratch.workflow_base_path / Path("frame.fits")
27
27
  movie_path = task.scratch.workflow_base_path / Path("movie.mp4")
28
+ extra_path = task.scratch.workflow_base_path / Path("extra.fits")
28
29
  with open(frame_path, "w") as f:
29
30
  f.write("Frame")
30
31
  task.tag(path=frame_path, tags=[Tag.frame(), Tag.output()])
@@ -7,6 +7,8 @@ from uuid import uuid4
7
7
  import astropy.units as u
8
8
  import pytest
9
9
  from astropy.io import fits
10
+ from dkist_data_simulator.dataset_extras import DatasetExtraBase
11
+ from dkist_data_simulator.dataset_extras import InstrumentTables
10
12
  from dkist_data_simulator.spec214.vbi import SimpleVBIDataset
11
13
  from sqids import Sqids
12
14
 
@@ -6,7 +6,6 @@ from unittest.mock import Mock
6
6
  import astropy.units as u
7
7
  import numpy as np
8
8
  import pytest
9
- from astropy.coordinates import EarthLocation
10
9
  from astropy.io import fits
11
10
  from astropy.time import Time
12
11
  from astropy.time import TimeDelta
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 12.1.0rc1
3
+ Version: 12.2.0
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -16,7 +16,7 @@ Description-Content-Type: text/x-rst
16
16
  Requires-Dist: asdf<4.0.0,>=3.5.0
17
17
  Requires-Dist: astropy>=7.0.0
18
18
  Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
19
- Requires-Dist: dkist-header-validator<6.0,>=5.0.0
19
+ Requires-Dist: dkist-header-validator<6.0,>=5.3.0
20
20
  Requires-Dist: dkist-processing-core==7.0.1
21
21
  Requires-Dist: dkist-processing-pac<4.0,>=3.1
22
22
  Requires-Dist: dkist-service-configuration<5.0,>=4.2.0
@@ -1,12 +1,4 @@
1
1
  changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- changelog/280.misc.rst,sha256=6k9RYfsyA5YMk2-FNPn1ou0FCqacpBfYA_2uGr6D9I8,107
3
- changelog/282.feature.2.rst,sha256=6OfMMb7pzbStUmHA4YldfprALhPR5Uc_qZrKh9VdenE,176
4
- changelog/282.feature.rst,sha256=WHCYqxTGJlTrrjttCqRnZizIx2CEz34mLYisW36zJRo,201
5
- changelog/284.feature.rst,sha256=Um3KupQuiTtX9tpLE4zLk3TW_wc3ZoCGxmFA-dGQJdU,120
6
- changelog/285.feature.rst,sha256=FdvhAXvf1je9xLNSgZMl32TYomFibMKd1k6cf8ihkXI,142
7
- changelog/285.misc.rst,sha256=7ie7VKD5yTMjbXs1V-jO2I2_vsCJ49WSK2g-B37NWFs,180
8
- changelog/286.feature.rst,sha256=Dzqo-mB_plyAhUZGuOJ_e0fXNoRLmwgfXrjfi1hqGHg,189
9
- changelog/287.misc.rst,sha256=mqc5FFqdtMFrZETadk6m_wkgtoH8kvJjnR-IGMYXDBY,52
10
2
  dkist_processing_common/__init__.py,sha256=GQ9EBnYhkOnt-qODclAoLS_g5YVhurxfg1tjVtI9rDI,320
11
3
  dkist_processing_common/config.py,sha256=f511KVpK24sQO4dDr4L6PMj5dz0jmWgnx2Y-3DpV0cw,5991
12
4
  dkist_processing_common/manual.py,sha256=bIVVyLsbXMh-g_2L3kGROL-1TtJe0_XviHsp7Br31x8,7023
@@ -29,8 +21,9 @@ dkist_processing_common/codecs/str.py,sha256=Xqt5k8IhLc95KiiNiFwB1JWcVVc6T8AfcLr
29
21
  dkist_processing_common/fonts/Lato-Regular.ttf,sha256=1jbkaDIx-THtoiLViOlE0IK_0726AvkovuRhwPGFslE,656568
30
22
  dkist_processing_common/fonts/__init__.py,sha256=hBvZRtkoGRPlNDWCK-ZePXdSIlThCcjwBDfYaamVgAw,101
31
23
  dkist_processing_common/models/__init__.py,sha256=6LMqemdzVZ87fRrpAsbEnTtWZ02_Gu_oajsUlwGRH_Q,74
32
- dkist_processing_common/models/constants.py,sha256=-UAJh7fz3BhPX4zaRRbQj1J-YnxVMuuvDWxLFmOll5Q,20184
24
+ dkist_processing_common/models/constants.py,sha256=Xo-HDY-POx9LY5s8lGtXZldrqb1c58t0mpiKKoNdmow,20502
33
25
  dkist_processing_common/models/dkist_location.py,sha256=6Nk0wvv4R8ptlrV7BXon7abq4YLvmTdUmPsDN5G8nWc,971
26
+ dkist_processing_common/models/extras.py,sha256=RI4JWOinYl1rRyA4anNDj5nCSIrvwrix_dOod9bcyHA,1207
34
27
  dkist_processing_common/models/fits_access.py,sha256=imKqL4-_g6gTR-IeIjZ6qkMhQX3JujdrKFrTd9gOXnw,5605
35
28
  dkist_processing_common/models/flower_pot.py,sha256=bSmnfN1r5ASx_E9GtvFzdT7lciLCpMy80TeSLztakZk,12289
36
29
  dkist_processing_common/models/fried_parameter.py,sha256=ro_H2Eo3I88lRf1wJjZfTc_XOjhgLt4whIQR_sjAFbM,1609
@@ -41,7 +34,7 @@ dkist_processing_common/models/message_queue_binding.py,sha256=Y4otwkkePrLRSjlry
41
34
  dkist_processing_common/models/metric_code.py,sha256=WSLF9yqcVzk9L9u8WBhgtpUYUWYsG4ZFWFRFtezdUCM,848
42
35
  dkist_processing_common/models/parameters.py,sha256=9An3SxUEBI-oYHjICQ_q-IIScTfpvVeAFH7jLzBzzWI,9649
43
36
  dkist_processing_common/models/quality.py,sha256=TmDVbvPbfl5CIIs1ioD5guLUoEOFTfiJESvDjLTLl5s,3981
44
- dkist_processing_common/models/tags.py,sha256=0YqiDrismOSds_3XtFBb2dfv0gjMs6CgRv2dJKsSthI,12082
37
+ dkist_processing_common/models/tags.py,sha256=emvQDsLwzfzqSvH2CwvOd85DRlb5pBCKgOgzjbEKxlY,12335
45
38
  dkist_processing_common/models/task_name.py,sha256=uAl7qTK4Xx1nqPAhNAe5nAXqxwPwQzAq58YmoccX6xQ,567
46
39
  dkist_processing_common/models/telemetry.py,sha256=XVcLNgHCZsP9L7oYiklyLUoqQtWt_xjEkuf70Kbudz4,839
47
40
  dkist_processing_common/models/wavelength.py,sha256=4UhRVoNvCHZitXo5S1oRdewadbmGfmDK6wetMV06POA,967
@@ -50,7 +43,7 @@ dkist_processing_common/parsers/average_bud.py,sha256=mTlrBlo7Pe5WnNmui4wQC24-8Q
50
43
  dkist_processing_common/parsers/cs_step.py,sha256=6SCSbCgVhEGxJIIrCBByWCmIad_vuje28jECFZ6qlZI,6454
51
44
  dkist_processing_common/parsers/dsps_repeat.py,sha256=Jg6oI9-PtFQbQHbGul6_eiRzBKr0Z2HIGOitG0G5CD4,1642
52
45
  dkist_processing_common/parsers/experiment_id_bud.py,sha256=LUehIqB56hmDwARph1itSUsPenFHScfrrRuZmcCi4xA,960
53
- dkist_processing_common/parsers/id_bud.py,sha256=ShHj7bHiPAmMdUW56WfwPTRrCEnBuLu8v45Aaxocd-M,2242
46
+ dkist_processing_common/parsers/id_bud.py,sha256=QvXTOF9kjekNd_M2ZTQwPhhQwoj2m24dqp2OuCNyHB0,2479
54
47
  dkist_processing_common/parsers/l0_fits_access.py,sha256=Ol3eo8yyNbGqbN2whhD2jBNoqhOrlwmH8DbMmCLtNk0,1033
55
48
  dkist_processing_common/parsers/l1_fits_access.py,sha256=BWojvcFl_RmkrRWHe1WxDCsPeexervlRFPothuXAyoI,4410
56
49
  dkist_processing_common/parsers/lookup_bud.py,sha256=IpDtwCsJRiOanDTpoaav3dZXe55WZpYWTnyxIiEWxvk,4047
@@ -67,15 +60,17 @@ dkist_processing_common/parsers/wavelength.py,sha256=P5C9mG8DAKK3GB3vWNRBI5l7pAW
67
60
  dkist_processing_common/tasks/__init__.py,sha256=l23ctjNsKJbHbbqaZBMeOPaOtw0hmITEljI_JJ-CVsU,627
68
61
  dkist_processing_common/tasks/assemble_movie.py,sha256=1ixDG-f4ODt0vywqVccG3aodLljVO5OGlvuMO9EEvcU,12767
69
62
  dkist_processing_common/tasks/base.py,sha256=itAHCvzcodo-q8_AjpWoRaM86BlcjWDpCIiUP7uwmP0,13236
70
- dkist_processing_common/tasks/l1_output_data.py,sha256=LxGnOSfG0wRQi5r3OuSQueZYSSUG27GBeCDjmg9HCpo,10380
71
- dkist_processing_common/tasks/output_data_base.py,sha256=r1Bu3FX5zTVj66GTMWtaV_NdhxjyjSm661Bt2Mxmfi4,3685
63
+ dkist_processing_common/tasks/l1_output_data.py,sha256=qvOiVyxuqD6efR0PYILERIZ1WPXsJQRIk-hSBf26F5Q,10968
64
+ dkist_processing_common/tasks/output_data_base.py,sha256=lJZ3olayA_nUWbxzozfbIbJFD3j2VpxPMt1CB9LB5IY,4763
72
65
  dkist_processing_common/tasks/parse_l0_input_data.py,sha256=xcWmwSKwxEeManltCFrqVG224Vk-BYRE6g32VOK--rI,19241
73
66
  dkist_processing_common/tasks/quality_metrics.py,sha256=cvGF6tJ8yAvxOvkeG3tWxYwL885BrFW5X3V7_MSzL-A,12481
74
67
  dkist_processing_common/tasks/teardown.py,sha256=rwT9lWINVDF11-az_nx-Z5ykMTX_SJCchobpU6sErgk,2360
75
68
  dkist_processing_common/tasks/transfer_input_data.py,sha256=4TJqlDjTc503QFvzSmMert99r9KHDwyd72r1kHAVhQA,5879
76
69
  dkist_processing_common/tasks/trial_catalog.py,sha256=Yf-BKNCT_OHwJsxxZP8p2eRW04CcY0tw5_YIe1e9RQY,10535
77
70
  dkist_processing_common/tasks/trial_output_data.py,sha256=pUdrNlAzuir4AUdfax5_MOplB-A9NrXErMJmAwtJmLA,6811
78
- dkist_processing_common/tasks/write_l1.py,sha256=Xy834RTp3F95kLcW4ba5gfHMUocfZd82ZQQKnvQcP2M,23204
71
+ dkist_processing_common/tasks/write_extra.py,sha256=1mg7AsFHeDEg_ncgGKv57aHixtbzB7jFc5zPLZ3-89Q,16211
72
+ dkist_processing_common/tasks/write_l1.py,sha256=upsaFN3S0r4MasrzjZ6i0gNF_bvQLf_oyYoRogB7odc,21163
73
+ dkist_processing_common/tasks/write_l1_base.py,sha256=iqejlYb3CSagUyi6U56nmgItzrwcQxLIDwgruxZho3A,2474
79
74
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
80
75
  dkist_processing_common/tasks/mixin/globus.py,sha256=ugejtZ_MR5LesQYuXM1uICd_yWDE7cZZr0qnWCh75R8,6732
81
76
  dkist_processing_common/tasks/mixin/interservice_bus.py,sha256=M6R922l7gJSmmU_vswUXxy-c5DWNrIRjQu9H9CSgGfU,1081
@@ -92,6 +87,7 @@ dkist_processing_common/tests/test_assemble_quality.py,sha256=-F22jMY6mPy65VZ1TZ
92
87
  dkist_processing_common/tests/test_base.py,sha256=gsyBG2R6Ufx7CzbHeGMagUwM9yCfpN4gCSZ6-aH2q48,6643
93
88
  dkist_processing_common/tests/test_codecs.py,sha256=WpF15UYklpNRgETI4EwXsgbNzxMcHlelfpprBbupC0I,23907
94
89
  dkist_processing_common/tests/test_constants.py,sha256=I_KcJs7ScCn53GYhEO6qjWrrnfZuyC1IVYOy87Pjlg4,6565
90
+ dkist_processing_common/tests/test_construct_dataset_extras.py,sha256=dss_RXQu0xfOiiuHDCeI0rGrUlDCfgNHKQ7VwkN6HqU,9318
95
91
  dkist_processing_common/tests/test_cs_step.py,sha256=RA0QD3D8eaL3YSOL_gIJ9wkngy14RQ2jbD-05KAziW4,2408
96
92
  dkist_processing_common/tests/test_dkist_location.py,sha256=-_OoSw4SZDLFyIuOltHvM6PQjxm5hTiJQsiTGZ8Sadc,456
97
93
  dkist_processing_common/tests/test_fits_access.py,sha256=a50B4IAAH5NH5zeudTqyy0b5uWKJwJuzQLUdK1LoOHM,12832
@@ -101,7 +97,7 @@ dkist_processing_common/tests/test_input_dataset.py,sha256=wnQbZxBYywG5CEXces2WW
101
97
  dkist_processing_common/tests/test_interservice_bus.py,sha256=QrBeZ8dh497h6nxA8-aVUIGDcSj8y9DIXIk9I_HkXr0,3001
102
98
  dkist_processing_common/tests/test_interservice_bus_mixin.py,sha256=IptJkW7Qeu2Y742NKXEgkok2VdS600keLgCD3Y9iw3A,4131
103
99
  dkist_processing_common/tests/test_manual_processing.py,sha256=iHF7yQPlar9niYAGXtFv28Gw3Undlds38yMfszk4ccY,1037
104
- dkist_processing_common/tests/test_output_data_base.py,sha256=D8b1XKvbE3C5cGOiHq58yJ2pzQL3iL0wLZy_AkDdB9Y,3085
100
+ dkist_processing_common/tests/test_output_data_base.py,sha256=VoXW7g5yE2Lzb-HpyhVQYbPpiCZ7YYQAuShoAFVO5nE,3983
105
101
  dkist_processing_common/tests/test_parameters.py,sha256=CUEUIGBPMCUXPll0G0UxFDbMXi8lmnjRwXBarGX1PAQ,14033
106
102
  dkist_processing_common/tests/test_parse_l0_input_data.py,sha256=7yn1VGwC3S-0JOYIMflNHhaXEnPhwjJtewD2WxBgugM,13239
107
103
  dkist_processing_common/tests/test_publish_catalog_messages.py,sha256=7WRsEwoLHGeaCmLTAW4tU_BlZw0e3hwx65uWSGzfuYE,2393
@@ -115,11 +111,11 @@ dkist_processing_common/tests/test_task_name.py,sha256=kqFr59XX2K87xzfTlClzDV4-J
115
111
  dkist_processing_common/tests/test_task_parsing.py,sha256=2_OOmeZQWD17XAd_ECYmodJzD_iRIBKjCYdGh38BOx4,4421
116
112
  dkist_processing_common/tests/test_teardown.py,sha256=DaliHSGsiQBZaFkf5wb3XBo6rHNPmx2bmQtVymYeBN4,5601
117
113
  dkist_processing_common/tests/test_transfer_input_data.py,sha256=eyAAWXpTHQ8aew87-MncWpYBn4DAZrTSOL3LvlQfR5Q,12611
118
- dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=PVGDJBEUk4kAeu8ivrhlCE7yd29R18t9kZLFx-mpBwY,2063
119
- dkist_processing_common/tests/test_trial_catalog.py,sha256=CxjtVABE5Fw2EvyXR56IJ3PPi9QvEOjccH0OzzRWk30,9424
114
+ dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=OV2XMEkCbEuL9_i2S3P9Jfyf15tcBkAP3JNE8Jn_A9k,2137
115
+ dkist_processing_common/tests/test_trial_catalog.py,sha256=290h3cr_ToNtYOm9ze8xQQz9d82sF9XLqmyf9_L7g34,9554
120
116
  dkist_processing_common/tests/test_trial_output_data.py,sha256=fu3iGNV_FI8LOacezyt4HvXnxY3g1_UiBuRI63yz5Oo,11977
121
117
  dkist_processing_common/tests/test_workflow_task_base.py,sha256=LTVusltNrsGUOvw9G323am4CXebgE4tJhP6gZCcS0CQ,10457
122
- dkist_processing_common/tests/test_write_l1.py,sha256=alN-lozKEm6vKNdhtvzjnuPqv-NjHyUg16Op7SkMH-c,27964
118
+ dkist_processing_common/tests/test_write_l1.py,sha256=qraecD9Vv6jsqg00XOSlM1IdK9l8i36VoHLoLj8ypiU,27918
123
119
  docs/Makefile,sha256=qnlVz6PuBqE39NfHWuUnHhNEA-EFgT2-WJNNNy9ttfk,4598
124
120
  docs/changelog.rst,sha256=S2jPASsWlQxSlAPqdvNrYvhk9k3FcFWNXFNDYXBSjl4,120
125
121
  docs/conf.py,sha256=7W2iHKs3J5RhAz0JZafC_UnfMvcpZN7j4LLUmQtk2D0,1891
@@ -128,7 +124,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
128
124
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
129
125
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
130
126
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
131
- dkist_processing_common-12.1.0rc1.dist-info/METADATA,sha256=Sv5XyrgF8y0Gx_38hkXIqvcBsPKip3ug5Lfl0mP6lUs,14214
132
- dkist_processing_common-12.1.0rc1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
133
- dkist_processing_common-12.1.0rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
134
- dkist_processing_common-12.1.0rc1.dist-info/RECORD,,
127
+ dkist_processing_common-12.2.0.dist-info/METADATA,sha256=z2dP61XdZiapHC3zbjS9kVZncQsd9zkfHd43Aosw4aQ,14211
128
+ dkist_processing_common-12.2.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
129
+ dkist_processing_common-12.2.0.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
130
+ dkist_processing_common-12.2.0.dist-info/RECORD,,
changelog/280.misc.rst DELETED
@@ -1 +0,0 @@
1
- Speed up the reading of INPUT files in Parse tasks by turning off image decompression and checksum checks.
@@ -1,2 +0,0 @@
1
- Add `SetStem` base class that has all the benefits of `ListStem` but also gets a speedup by storing values in a `set` for
2
- cases where repeated values don't need to be tracked.
changelog/282.feature.rst DELETED
@@ -1,2 +0,0 @@
1
- Add `ListStem` base class for huge speedup in cases where the keys don't matter and the `getter` logic only depends on the
2
- list of values computed by `setter`. This is the case for most (all?) "Buds".
changelog/284.feature.rst DELETED
@@ -1 +0,0 @@
1
- Speed up parsing of the `*CadenceBud`, `TaskDateBeginBud`, and `[Task]NearFloatBud` by basing these buds on `ListStem`.
changelog/285.feature.rst DELETED
@@ -1,2 +0,0 @@
1
- Speed up `NumCSStepBud`, `[Task]UniqueBud`, `[Task]ContributingIdsBud`, and `TaskRoundTimeBudBase` parsing by basing
2
- these buds on `SetStem`.
changelog/285.misc.rst DELETED
@@ -1,2 +0,0 @@
1
- Update `RetarderNameBud` to drop "clear" values (i.e., the retarder is out of the beam) in the `setter` instead of the `getter`.
2
- This brings it in line with standard Bud-practice.
changelog/286.feature.rst DELETED
@@ -1,2 +0,0 @@
1
- Speed up `CSStepFlower` parsing by using an internal set to keep track of the unique `CSStep` objects. This removes the
2
- need to compute the unique set when computing the tag for each file.
changelog/287.misc.rst DELETED
@@ -1 +0,0 @@
1
- Convert the TimeLookupBud to be a SetStem constant.