dkist-processing-common 10.5.4__py3-none-any.whl → 12.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. changelog/280.misc.rst +1 -0
  2. changelog/282.feature.2.rst +2 -0
  3. changelog/282.feature.rst +2 -0
  4. changelog/284.feature.rst +1 -0
  5. changelog/285.feature.rst +2 -0
  6. changelog/285.misc.rst +2 -0
  7. changelog/286.feature.rst +2 -0
  8. changelog/287.misc.rst +1 -0
  9. dkist_processing_common/__init__.py +1 -0
  10. dkist_processing_common/_util/constants.py +1 -0
  11. dkist_processing_common/_util/graphql.py +1 -0
  12. dkist_processing_common/_util/scratch.py +9 -9
  13. dkist_processing_common/_util/tags.py +1 -0
  14. dkist_processing_common/codecs/array.py +20 -0
  15. dkist_processing_common/codecs/asdf.py +9 -3
  16. dkist_processing_common/codecs/basemodel.py +22 -0
  17. dkist_processing_common/codecs/bytes.py +1 -0
  18. dkist_processing_common/codecs/fits.py +37 -9
  19. dkist_processing_common/codecs/iobase.py +1 -0
  20. dkist_processing_common/codecs/json.py +1 -0
  21. dkist_processing_common/codecs/path.py +1 -0
  22. dkist_processing_common/codecs/quality.py +1 -1
  23. dkist_processing_common/codecs/str.py +1 -0
  24. dkist_processing_common/config.py +64 -25
  25. dkist_processing_common/manual.py +6 -8
  26. dkist_processing_common/models/constants.py +373 -37
  27. dkist_processing_common/models/dkist_location.py +27 -0
  28. dkist_processing_common/models/fits_access.py +48 -0
  29. dkist_processing_common/models/flower_pot.py +231 -9
  30. dkist_processing_common/models/fried_parameter.py +41 -0
  31. dkist_processing_common/models/graphql.py +66 -75
  32. dkist_processing_common/models/input_dataset.py +117 -0
  33. dkist_processing_common/models/message.py +1 -1
  34. dkist_processing_common/models/message_queue_binding.py +1 -1
  35. dkist_processing_common/models/metric_code.py +2 -0
  36. dkist_processing_common/models/parameters.py +65 -28
  37. dkist_processing_common/models/quality.py +50 -5
  38. dkist_processing_common/models/tags.py +23 -21
  39. dkist_processing_common/models/task_name.py +3 -2
  40. dkist_processing_common/models/telemetry.py +28 -0
  41. dkist_processing_common/models/wavelength.py +3 -1
  42. dkist_processing_common/parsers/average_bud.py +46 -0
  43. dkist_processing_common/parsers/cs_step.py +13 -12
  44. dkist_processing_common/parsers/dsps_repeat.py +6 -4
  45. dkist_processing_common/parsers/experiment_id_bud.py +12 -4
  46. dkist_processing_common/parsers/id_bud.py +42 -27
  47. dkist_processing_common/parsers/l0_fits_access.py +5 -3
  48. dkist_processing_common/parsers/l1_fits_access.py +51 -23
  49. dkist_processing_common/parsers/lookup_bud.py +125 -0
  50. dkist_processing_common/parsers/near_bud.py +21 -20
  51. dkist_processing_common/parsers/observing_program_id_bud.py +24 -0
  52. dkist_processing_common/parsers/proposal_id_bud.py +13 -5
  53. dkist_processing_common/parsers/quality.py +2 -0
  54. dkist_processing_common/parsers/retarder.py +32 -0
  55. dkist_processing_common/parsers/single_value_single_key_flower.py +6 -1
  56. dkist_processing_common/parsers/task.py +8 -6
  57. dkist_processing_common/parsers/time.py +178 -72
  58. dkist_processing_common/parsers/unique_bud.py +21 -22
  59. dkist_processing_common/parsers/wavelength.py +5 -3
  60. dkist_processing_common/tasks/__init__.py +3 -2
  61. dkist_processing_common/tasks/assemble_movie.py +4 -3
  62. dkist_processing_common/tasks/base.py +59 -60
  63. dkist_processing_common/tasks/l1_output_data.py +54 -53
  64. dkist_processing_common/tasks/mixin/globus.py +24 -27
  65. dkist_processing_common/tasks/mixin/interservice_bus.py +1 -0
  66. dkist_processing_common/tasks/mixin/metadata_store.py +108 -243
  67. dkist_processing_common/tasks/mixin/object_store.py +22 -0
  68. dkist_processing_common/tasks/mixin/quality/__init__.py +1 -0
  69. dkist_processing_common/tasks/mixin/quality/_base.py +8 -1
  70. dkist_processing_common/tasks/mixin/quality/_metrics.py +166 -14
  71. dkist_processing_common/tasks/output_data_base.py +4 -3
  72. dkist_processing_common/tasks/parse_l0_input_data.py +277 -15
  73. dkist_processing_common/tasks/quality_metrics.py +9 -9
  74. dkist_processing_common/tasks/teardown.py +7 -7
  75. dkist_processing_common/tasks/transfer_input_data.py +67 -69
  76. dkist_processing_common/tasks/trial_catalog.py +77 -17
  77. dkist_processing_common/tasks/trial_output_data.py +16 -17
  78. dkist_processing_common/tasks/write_l1.py +102 -72
  79. dkist_processing_common/tests/conftest.py +32 -173
  80. dkist_processing_common/tests/mock_metadata_store.py +271 -0
  81. dkist_processing_common/tests/test_assemble_movie.py +4 -4
  82. dkist_processing_common/tests/test_assemble_quality.py +32 -4
  83. dkist_processing_common/tests/test_base.py +5 -19
  84. dkist_processing_common/tests/test_codecs.py +103 -12
  85. dkist_processing_common/tests/test_constants.py +15 -0
  86. dkist_processing_common/tests/test_dkist_location.py +15 -0
  87. dkist_processing_common/tests/test_fits_access.py +56 -19
  88. dkist_processing_common/tests/test_flower_pot.py +147 -5
  89. dkist_processing_common/tests/test_fried_parameter.py +27 -0
  90. dkist_processing_common/tests/test_input_dataset.py +78 -361
  91. dkist_processing_common/tests/test_interservice_bus.py +1 -0
  92. dkist_processing_common/tests/test_interservice_bus_mixin.py +1 -1
  93. dkist_processing_common/tests/test_manual_processing.py +33 -0
  94. dkist_processing_common/tests/test_output_data_base.py +5 -7
  95. dkist_processing_common/tests/test_parameters.py +71 -22
  96. dkist_processing_common/tests/test_parse_l0_input_data.py +115 -32
  97. dkist_processing_common/tests/test_publish_catalog_messages.py +2 -24
  98. dkist_processing_common/tests/test_quality.py +1 -0
  99. dkist_processing_common/tests/test_quality_mixin.py +255 -23
  100. dkist_processing_common/tests/test_scratch.py +2 -1
  101. dkist_processing_common/tests/test_stems.py +511 -168
  102. dkist_processing_common/tests/test_submit_dataset_metadata.py +3 -7
  103. dkist_processing_common/tests/test_tags.py +1 -0
  104. dkist_processing_common/tests/test_task_name.py +1 -1
  105. dkist_processing_common/tests/test_task_parsing.py +17 -7
  106. dkist_processing_common/tests/test_teardown.py +28 -24
  107. dkist_processing_common/tests/test_transfer_input_data.py +270 -125
  108. dkist_processing_common/tests/test_transfer_l1_output_data.py +2 -3
  109. dkist_processing_common/tests/test_trial_catalog.py +83 -8
  110. dkist_processing_common/tests/test_trial_output_data.py +46 -73
  111. dkist_processing_common/tests/test_workflow_task_base.py +8 -10
  112. dkist_processing_common/tests/test_write_l1.py +298 -76
  113. dkist_processing_common-12.1.0rc1.dist-info/METADATA +265 -0
  114. dkist_processing_common-12.1.0rc1.dist-info/RECORD +134 -0
  115. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/WHEEL +1 -1
  116. docs/conf.py +1 -0
  117. docs/index.rst +1 -1
  118. docs/landing_page.rst +13 -0
  119. dkist_processing_common/tasks/mixin/input_dataset.py +0 -166
  120. dkist_processing_common-10.5.4.dist-info/METADATA +0 -175
  121. dkist_processing_common-10.5.4.dist-info/RECORD +0 -112
  122. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  """Task(s) for writing level 1 data as 214 compliant fits files."""
2
+
2
3
  import importlib
3
4
  import logging
4
5
  import uuid
@@ -6,27 +7,30 @@ from abc import ABC
6
7
  from abc import abstractmethod
7
8
  from functools import cached_property
8
9
  from pathlib import Path
10
+ from string import ascii_uppercase
9
11
  from typing import Literal
10
12
 
11
13
  import astropy.units as u
12
14
  import numpy as np
13
- from astropy.coordinates import EarthLocation
14
15
  from astropy.io import fits
15
16
  from astropy.time import Time
16
17
  from dkist_fits_specifications import __version__ as spec_version
17
18
  from dkist_fits_specifications.utils.formatter import reformat_spec214_header
18
19
  from dkist_header_validator import spec214_validator
19
20
  from dkist_header_validator.translator import remove_extra_axis_keys
20
- from dkist_header_validator.translator import sanitize_to_spec214_level1
21
+ from dkist_header_validator.translator import remove_spec_122_keys_and_spec_214_l0_keys
21
22
  from dkist_spectral_lines.search import get_closest_spectral_line
22
23
  from dkist_spectral_lines.search import get_spectral_lines
23
24
  from scipy.stats import kurtosis
24
25
  from scipy.stats import skew
26
+ from sqids import Sqids
25
27
  from sunpy.coordinates import HeliocentricInertial
26
28
  from sunpy.coordinates import Helioprojective
27
29
 
28
30
  from dkist_processing_common.codecs.fits import fits_access_decoder
29
31
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
32
+ from dkist_processing_common.models.dkist_location import location_of_dkist
33
+ from dkist_processing_common.models.fried_parameter import r0_valid
30
34
  from dkist_processing_common.models.tags import Tag
31
35
  from dkist_processing_common.models.wavelength import WavelengthRange
32
36
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
@@ -49,7 +53,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
49
53
  def run(self) -> None:
50
54
  """Run method for this task."""
51
55
  for stokes_param in self.constants.stokes_params:
52
- with self.apm_task_step(f"Get calibrated frames for stokes param {stokes_param}"):
56
+ with self.telemetry_span(f"Get calibrated frames for stokes param {stokes_param}"):
53
57
  tags = [Tag.frame(), Tag.calibrated(), Tag.stokes(stokes_param)]
54
58
  calibrated_fits_objects = self.read(
55
59
  tags=tags,
@@ -100,22 +104,24 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
100
104
 
101
105
  # Check that the written file passes spec 214 validation if requested
102
106
  if self.validate_l1_on_write:
103
- spec214_validator.validate(self.scratch.absolute_path(relative_path))
107
+ spec214_validator.validate(
108
+ self.scratch.absolute_path(relative_path), extra=False
109
+ )
104
110
 
105
111
  @cached_property
106
- def tile_size_param(self) -> int:
112
+ def tile_size_param(self) -> int | None:
107
113
  """Get the tile size parameter for compression."""
108
- return self.metadata_store_recipe_run_configuration().get("tile_size", None)
114
+ return self.metadata_store_recipe_run.configuration.tile_size
109
115
 
110
116
  @cached_property
111
117
  def validate_l1_on_write(self) -> bool:
112
118
  """Check for validate on write."""
113
- return self.metadata_store_recipe_run_configuration().get("validate_l1_on_write", True)
119
+ return self.metadata_store_recipe_run.configuration.validate_l1_on_write
114
120
 
115
121
  @cached_property
116
122
  def workflow_had_manual_intervention(self):
117
123
  """Indicate determining if any provenance capturing steps had manual intervention."""
118
- for provenance_record in self.metadata_store_recipe_run_provenance:
124
+ for provenance_record in self.metadata_store_recipe_run.recipeRunProvenances:
119
125
  if provenance_record.isTaskManual:
120
126
  return True
121
127
  return False
@@ -157,23 +163,32 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
157
163
  header["DATE-END"] = self.calculate_date_end(header=header)
158
164
  return header
159
165
 
166
+ @staticmethod
167
+ def compute_product_id(ids_obs_id: int, proc_type: str) -> str:
168
+ """Compute the productId from IDSOBSID and PROCTYPE."""
169
+ sqid_factory = Sqids(alphabet=ascii_uppercase, min_length=5)
170
+ sqid = sqid_factory.encode([ids_obs_id])
171
+ return f"{proc_type}-{sqid}"
172
+
160
173
  @staticmethod
161
174
  def add_stats_headers(header: fits.Header, data: np.ndarray) -> fits.Header:
162
175
  """Fill out the spec 214 statistics header table."""
163
176
  data = data.flatten()
164
- percentiles = np.nanpercentile(data, [1, 10, 25, 75, 90, 95, 98, 99])
177
+ percentiles = np.nanpercentile(data, [1, 2, 5, 10, 25, 75, 90, 95, 98, 99])
165
178
  header["DATAMIN"] = np.nanmin(data)
166
179
  header["DATAMAX"] = np.nanmax(data)
167
180
  header["DATAMEAN"] = np.nanmean(data)
168
181
  header["DATAMEDN"] = np.nanmedian(data)
169
- header["DATA01"] = percentiles[0]
170
- header["DATA10"] = percentiles[1]
171
- header["DATA25"] = percentiles[2]
172
- header["DATA75"] = percentiles[3]
173
- header["DATA90"] = percentiles[4]
174
- header["DATA95"] = percentiles[5]
175
- header["DATA98"] = percentiles[6]
176
- header["DATA99"] = percentiles[7]
182
+ header["DATAP01"] = percentiles[0]
183
+ header["DATAP02"] = percentiles[1]
184
+ header["DATAP05"] = percentiles[2]
185
+ header["DATAP10"] = percentiles[3]
186
+ header["DATAP25"] = percentiles[4]
187
+ header["DATAP75"] = percentiles[5]
188
+ header["DATAP90"] = percentiles[6]
189
+ header["DATAP95"] = percentiles[7]
190
+ header["DATAP98"] = percentiles[8]
191
+ header["DATAP99"] = percentiles[9]
177
192
  header["DATARMS"] = np.sqrt(np.nanmean(data**2))
178
193
  header["DATAKURT"] = kurtosis(data, nan_policy="omit")
179
194
  header["DATASKEW"] = skew(data, nan_policy="omit")
@@ -192,30 +207,45 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
192
207
  header["FRAMEVOL"] = -1.0
193
208
  header["PROCTYPE"] = "L1"
194
209
  header["RRUNID"] = self.recipe_run_id
195
- header["RECIPEID"] = self.metadata_store_recipe_id
196
- header["RINSTID"] = self.metadata_store_recipe_instance_id
210
+ header["RECIPEID"] = self.metadata_store_recipe_run.recipeInstance.recipeId
211
+ header["RINSTID"] = self.metadata_store_recipe_run.recipeInstanceId
197
212
  header["EXTNAME"] = "observation"
198
213
  header["SOLARNET"] = 1
199
214
  header["OBS_HDU"] = 1
200
215
  header["FILENAME"] = self.l1_filename(header=header, stokes=stokes)
201
216
  header["STOKES"] = stokes
202
- # Cadence keywords
203
- header["CADENCE"] = self.constants.average_cadence
204
- header["CADMIN"] = self.constants.minimum_cadence
205
- header["CADMAX"] = self.constants.maximum_cadence
206
- header["CADVAR"] = self.constants.variance_cadence
207
217
  # Keywords to support reprocessing
208
- if ids_par_id := self.metadata_store_input_dataset_parameters_part_id:
209
- header["IDSPARID"] = ids_par_id
210
- if ids_obs_id := self.metadata_store_input_dataset_observe_frames_part_id:
211
- header["IDSOBSID"] = ids_obs_id
212
- if ids_cal_id := self.metadata_store_input_dataset_calibration_frames_part_id:
213
- header["IDSCALID"] = ids_cal_id
218
+ if parameters := self.metadata_store_input_dataset_parameters:
219
+ header["IDSPARID"] = parameters.inputDatasetPartId
220
+ if observe_frames := self.metadata_store_input_dataset_observe_frames:
221
+ header["IDSOBSID"] = observe_frames.inputDatasetPartId
222
+ if calibration_frames := self.metadata_store_input_dataset_calibration_frames:
223
+ header["IDSCALID"] = calibration_frames.inputDatasetPartId
214
224
  header["WKFLNAME"] = self.workflow_name
215
225
  header["WKFLVERS"] = self.workflow_version
216
226
  header = self.add_contributing_id_headers(header=header)
217
227
  header["MANPROCD"] = self.workflow_had_manual_intervention
218
- # Spectral line keywords
228
+ header["PRODUCT"] = self.compute_product_id(header["IDSOBSID"], header["PROCTYPE"])
229
+ return header
230
+
231
+ def add_timing_headers(self, header: fits.Header) -> fits.Header:
232
+ """
233
+ Add timing headers to the FITS header.
234
+
235
+ This method adds or updates headers related to frame timings.
236
+ """
237
+ # Cadence keywords
238
+ header["CADENCE"] = self.constants.average_cadence
239
+ header["CADMIN"] = self.constants.minimum_cadence
240
+ header["CADMAX"] = self.constants.maximum_cadence
241
+ header["CADVAR"] = self.constants.variance_cadence
242
+ return header
243
+
244
+ def add_spectral_line_headers(
245
+ self,
246
+ header: fits.Header,
247
+ ) -> fits.Header:
248
+ """Add datacenter table keys relating to spectral lines."""
219
249
  wavelength_range = self.get_wavelength_range(header=header)
220
250
  spectral_lines = get_spectral_lines(
221
251
  wavelength_min=wavelength_range.min,
@@ -236,55 +266,31 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
236
266
  For spectrographs, this is the wavelengths covered by the spectral axis of the data.
237
267
  """
238
268
 
239
- @property
240
- def location_of_dkist(self) -> EarthLocation:
241
- """Return hard-coded EarthLocation of the DKIST.
242
-
243
- Cartesian geocentric coordinates of DKIST on Earth as retrieved from
244
- https://github.com/astropy/astropy-data/blob/gh-pages/coordinates/sites.json#L838
245
- """
246
- _dkist_site_info = {
247
- "aliases": ["DKIST", "ATST"],
248
- "name": "Daniel K. Inouye Solar Telescope",
249
- "elevation": 3067,
250
- "elevation_unit": "meter",
251
- "latitude": 20.7067,
252
- "latitude_unit": "degree",
253
- "longitude": 203.7436,
254
- "longitude_unit": "degree",
255
- "timezone": "US/Hawaii",
256
- "source": "DKIST website: https://www.nso.edu/telescopes/dki-solar-telescope/",
257
- }
258
- location_of_dkist = EarthLocation.from_geodetic(
259
- _dkist_site_info["longitude"] * u.Unit(_dkist_site_info["longitude_unit"]),
260
- _dkist_site_info["latitude"] * u.Unit(_dkist_site_info["latitude_unit"]),
261
- _dkist_site_info["elevation"] * u.Unit(_dkist_site_info["elevation_unit"]),
262
- )
263
-
264
- return location_of_dkist
265
-
266
269
  def add_solarnet_headers(self, header: fits.Header) -> fits.Header:
267
270
  """Add headers recommended by solarnet that haven't already been added."""
268
271
  header["DATE-AVG"] = self.calculate_date_avg(header=header)
269
272
  header["TELAPSE"] = self.calculate_telapse(header=header)
270
273
  header["DATEREF"] = header["DATE-BEG"]
271
- dkist_loc = self.location_of_dkist
272
- header["OBSGEO-X"] = dkist_loc.x.to_value(unit=u.m)
273
- header["OBSGEO-Y"] = dkist_loc.y.to_value(unit=u.m)
274
- header["OBSGEO-Z"] = dkist_loc.z.to_value(unit=u.m)
274
+ header["OBSGEO-X"] = location_of_dkist.x.to_value(unit=u.m)
275
+ header["OBSGEO-Y"] = location_of_dkist.y.to_value(unit=u.m)
276
+ header["OBSGEO-Z"] = location_of_dkist.z.to_value(unit=u.m)
275
277
  obstime = Time(header["DATE-AVG"])
276
278
  header["OBS_VR"] = (
277
- dkist_loc.get_gcrs(obstime=obstime)
279
+ location_of_dkist.get_gcrs(obstime=obstime)
278
280
  .transform_to(HeliocentricInertial(obstime=obstime))
279
281
  .d_distance.to_value(unit=u.m / u.s)
280
282
  ) # relative velocity of observer with respect to the sun in m/s
281
283
  header["SOLARRAD"] = self.calculate_solar_angular_radius(obstime=obstime)
282
284
  header["SPECSYS"] = "TOPOCENT" # no wavelength correction made due to doppler velocity
283
285
  header["VELOSYS"] = 0.0 # no wavelength correction made due to doppler velocity
284
- header["WAVEBAND"] = get_closest_spectral_line(wavelength=header["LINEWAV"] * u.nm).name
285
286
  wavelength_range = self.get_wavelength_range(header=header)
286
287
  header["WAVEMIN"] = wavelength_range.min.to_value(u.nm)
287
288
  header["WAVEMAX"] = wavelength_range.max.to_value(u.nm)
289
+ waveband: str | None = self.get_waveband(
290
+ wavelength=header["LINEWAV"] * u.nm, wavelength_range=wavelength_range
291
+ )
292
+ if waveband:
293
+ header["WAVEBAND"] = waveband
288
294
  return header
289
295
 
290
296
  def l1_filename(self, header: fits.Header, stokes: Literal["I", "Q", "U", "V"]):
@@ -355,8 +361,14 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
355
361
  """
356
362
  # Replace header values in place
357
363
  header = self.replace_header_values(header=header, data=data)
358
- # Remove r0 value if AO not locked
359
- header = self.remove_invalid_r0_values(header=header)
364
+ # Remove r0 value if r0 conditions are not met
365
+ r0_is_valid = r0_valid(
366
+ r0=header["ATMOS_R0"],
367
+ ao_lock=header.get("AO_LOCK", None),
368
+ num_out_of_bounds_ao_values=header.get("OOBSHIFT", None),
369
+ )
370
+ if not r0_is_valid:
371
+ header.pop("ATMOS_R0", None)
360
372
  # Add the stats table
361
373
  header = self.add_stats_headers(header=header, data=data)
362
374
  # Add the datacenter table
@@ -367,8 +379,12 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
367
379
  header = self.add_doc_headers(header=header)
368
380
  # Add the dataset headers (abstract - implement in instrument task)
369
381
  header = self.add_dataset_headers(header=header, stokes=stokes_param)
382
+ # Add the timing headers
383
+ header = self.add_timing_headers(header=header)
384
+ # Add the spectral line headers
385
+ header = self.add_spectral_line_headers(header=header)
370
386
  # Remove any headers not contained in spec 214
371
- header = sanitize_to_spec214_level1(input_headers=header)
387
+ header = remove_spec_122_keys_and_spec_214_l0_keys(input_headers=header)
372
388
  # Remove any keys referring to axes that don't exist
373
389
  header = remove_extra_axis_keys(input_headers=header)
374
390
  return header
@@ -409,9 +425,9 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
409
425
  inst_name = self.constants.instrument.lower()
410
426
  calvers = self.version_from_module_name()
411
427
  header["CALVERS"] = calvers
412
- header[
413
- "CAL_URL"
414
- ] = f"{self.docs_base_url}/projects/{inst_name}/en/v{calvers}/{self.workflow_name}.html"
428
+ header["CAL_URL"] = (
429
+ f"{self.docs_base_url}/projects/{inst_name}/en/v{calvers}/{self.workflow_name}.html"
430
+ )
415
431
  return header
416
432
 
417
433
  def version_from_module_name(self) -> str:
@@ -489,7 +505,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
489
505
  as seen by an observer located at the DKIST site at the given time of observation.
490
506
  """
491
507
  dummy_theta_coord = 0 * u.arcsec
492
- dkist_at_obstime = self.location_of_dkist.get_itrs(obstime=obstime)
508
+ dkist_at_obstime = location_of_dkist.get_itrs(obstime=obstime)
493
509
  sun_coordinate = Helioprojective(
494
510
  Tx=dummy_theta_coord, Ty=dummy_theta_coord, observer=dkist_at_obstime
495
511
  )
@@ -498,6 +514,20 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
498
514
  @staticmethod
499
515
  def remove_invalid_r0_values(header: fits.Header) -> fits.Header:
500
516
  """Remove the Fried parameter r0 from the header if the AO is not locked."""
501
- if not header["AO_LOCK"]:
517
+ if header.get("AO_LOCK") is not True:
502
518
  header.pop("ATMOS_R0", None)
503
519
  return header
520
+
521
+ @staticmethod
522
+ def get_waveband(wavelength: u.Quantity, wavelength_range: WavelengthRange) -> str | None:
523
+ """
524
+ Get the spectral line information of the closest spectral line to the wavelength argument.
525
+
526
+ If the spectral line rest wavelength in air does not fall in the wavelength range of the data,
527
+ do not populate the keyword.
528
+ """
529
+ closest_line = get_closest_spectral_line(wavelength=wavelength)
530
+ rest_wavelength = closest_line.rest_wavelength_in_air
531
+ if rest_wavelength < wavelength_range.min or rest_wavelength > wavelength_range.max:
532
+ return None
533
+ return closest_line.name
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Global test fixtures
3
3
  """
4
+
4
5
  import json
5
6
  from collections import defaultdict
6
7
  from copy import deepcopy
@@ -32,22 +33,9 @@ from dkist_processing_pac.optics.telescope import Telescope
32
33
  from dkist_processing_common._util.constants import ConstantsDb
33
34
  from dkist_processing_common._util.scratch import WorkflowFileSystem
34
35
  from dkist_processing_common._util.tags import TagDB
35
- from dkist_processing_common.models.graphql import InputDatasetInputDatasetPartResponse
36
- from dkist_processing_common.models.graphql import InputDatasetPartResponse
37
- from dkist_processing_common.models.graphql import InputDatasetPartTypeResponse
38
- from dkist_processing_common.models.graphql import InputDatasetRecipeInstanceResponse
39
- from dkist_processing_common.models.graphql import InputDatasetRecipeRunResponse
40
- from dkist_processing_common.models.graphql import InputDatasetResponse
41
- from dkist_processing_common.models.graphql import RecipeInstanceResponse
42
- from dkist_processing_common.models.graphql import RecipeRunProvenanceResponse
43
- from dkist_processing_common.models.graphql import RecipeRunResponse
44
- from dkist_processing_common.models.graphql import RecipeRunStatusResponse
45
- from dkist_processing_common.models.tags import Tag
46
36
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
47
37
  from dkist_processing_common.tasks import WorkflowTaskBase
48
- from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetMixin
49
-
50
- TILE_SIZE = 64
38
+ from dkist_processing_common.tests.mock_metadata_store import fake_gql_client
51
39
 
52
40
 
53
41
  @pytest.fixture()
@@ -100,6 +88,21 @@ def constants_db(recipe_run_id) -> ConstantsDb:
100
88
  constants.close()
101
89
 
102
90
 
91
+ @pytest.fixture()
92
+ def fake_constants_db() -> dict:
93
+ """
94
+ A fake constants DB to prevent key errors.
95
+
96
+ Usage on a task: task.constants._update(fake_constants_db)
97
+ """
98
+ db = {
99
+ "PROPOSAL_ID": "PROPID",
100
+ "INSTRUMENT": "INSTRUMENT",
101
+ "OBS_IP_START_TIME": "20240416T160000",
102
+ }
103
+ return db
104
+
105
+
103
106
  class CommonDataset(Spec122Dataset):
104
107
  def __init__(self, polarimetric: bool = True):
105
108
  super().__init__(
@@ -113,7 +116,7 @@ class CommonDataset(Spec122Dataset):
113
116
  self.add_constant_key("TELEVATN", 6.28)
114
117
  self.add_constant_key("TAZIMUTH", 3.14)
115
118
  self.add_constant_key("TTBLANGL", 1.23)
116
- self.add_constant_key("INST_FOO", "bar")
119
+ self.add_constant_key("VISP_012", "bar")
117
120
  self.add_constant_key("DKIST004", "observe")
118
121
  self.add_constant_key("ID___005", "ip id")
119
122
  self.add_constant_key("PAC__004", "Sapphire Polarizer")
@@ -286,18 +289,18 @@ def cs_step_angle_round_ndigits() -> int:
286
289
 
287
290
 
288
291
  @pytest.fixture(scope="session")
289
- def angle_random_max_perturabtion(cs_step_angle_round_ndigits) -> float:
292
+ def angle_random_max_perturbation(cs_step_angle_round_ndigits) -> float:
290
293
  # Ensures that we always round down to zero.
291
294
  # E.g., if ndigits = 1 then this value will be 0.049.
292
295
  return 10**-cs_step_angle_round_ndigits / 2 - 10 ** -(cs_step_angle_round_ndigits + 2)
293
296
 
294
297
 
295
298
  @pytest.fixture(scope="session")
296
- def grouped_cal_sequence_headers(angle_random_max_perturabtion) -> dict[int, list[L0FitsAccess]]:
299
+ def grouped_cal_sequence_headers(angle_random_max_perturbation) -> dict[int, list[L0FitsAccess]]:
297
300
  ds = CalibrationSequenceDataset(
298
301
  array_shape=(1, 2, 2),
299
302
  time_delta=2.0,
300
- angle_max_random_perturbation=angle_random_max_perturabtion,
303
+ angle_max_random_perturbation=angle_random_max_perturbation,
301
304
  )
302
305
  header_list = [
303
306
  spec122_validator.validate_and_translate_to_214_l0(d.header(), return_type=fits.HDUList)[
@@ -332,99 +335,9 @@ def max_cs_step_time_sec() -> float:
332
335
  return 20.0
333
336
 
334
337
 
335
- class FakeGQLClient:
336
- def __init__(self, *args, **kwargs):
337
- pass
338
-
339
- def execute_gql_query(self, **kwargs):
340
- query_base = kwargs["query_base"]
341
- if query_base == "recipeRunStatuses":
342
- return [RecipeRunStatusResponse(recipeRunStatusId=1)]
343
- if query_base == "recipeRuns":
344
- if kwargs.get("query_response_cls") == InputDatasetRecipeRunResponse:
345
- return [
346
- InputDatasetRecipeRunResponse(
347
- recipeInstance=InputDatasetRecipeInstanceResponse(
348
- inputDataset=InputDatasetResponse(
349
- inputDatasetId=1,
350
- isActive=True,
351
- inputDatasetInputDatasetParts=[
352
- InputDatasetInputDatasetPartResponse(
353
- inputDatasetPart=InputDatasetPartResponse(
354
- inputDatasetPartId=1,
355
- inputDatasetPartDocument='[{"parameterName": "", "parameterValues": [{"parameterValueId": 1, "parameterValue": "[[1,2,3],[4,5,6],[7,8,9]]", "parameterValueStartDate": "1/1/2000"}]}]',
356
- inputDatasetPartType=InputDatasetPartTypeResponse(
357
- inputDatasetPartTypeName="parameters"
358
- ),
359
- )
360
- ),
361
- InputDatasetInputDatasetPartResponse(
362
- inputDatasetPart=InputDatasetPartResponse(
363
- inputDatasetPartId=2,
364
- inputDatasetPartDocument="""[
365
- {
366
- "bucket": "bucket_name",
367
- "object_keys": [
368
- "key1",
369
- "key2"
370
- ]
371
- },
372
- ]""",
373
- inputDatasetPartType=InputDatasetPartTypeResponse(
374
- inputDatasetPartTypeName="observe_frames"
375
- ),
376
- )
377
- ),
378
- InputDatasetInputDatasetPartResponse(
379
- inputDatasetPart=InputDatasetPartResponse(
380
- inputDatasetPartId=3,
381
- inputDatasetPartDocument="""[
382
- {
383
- "bucket": "bucket_name",
384
- "object_keys": [
385
- "key3",
386
- "key4"
387
- ]
388
- },
389
- ]""",
390
- inputDatasetPartType=InputDatasetPartTypeResponse(
391
- inputDatasetPartTypeName="calibration_frames"
392
- ),
393
- )
394
- ),
395
- ],
396
- ),
397
- ),
398
- ),
399
- ]
400
-
401
- return [
402
- RecipeRunResponse(
403
- recipeInstanceId=1,
404
- recipeInstance=RecipeInstanceResponse(
405
- recipeId=1,
406
- inputDatasetId=1,
407
- ),
408
- configuration=f'{{"tile_size": {TILE_SIZE}}}',
409
- recipeRunProvenances=[
410
- RecipeRunProvenanceResponse(recipeRunProvenanceId=1, isTaskManual=False),
411
- ],
412
- ),
413
- ]
414
-
415
- @staticmethod
416
- def execute_gql_mutation(**kwargs):
417
- ...
418
-
419
-
420
- class FakeGQLClientNoRecipeConfiguration(FakeGQLClient):
421
- def execute_gql_query(self, **kwargs):
422
- response = super().execute_gql_query(**kwargs)
423
- response[0].configuration = None
424
- return response
425
-
426
-
427
- # All the following stuff is copied from dkist-processing-pac
338
+ ####################################
339
+ # Copied from dkist-processing-pac #
340
+ ####################################
428
341
  def compute_telgeom(time_hst: Time):
429
342
  dkist_lon = (156 + 15 / 60.0 + 21.7 / 3600.0) * (-1)
430
343
  dkist_lat = 20 + 42 / 60.0 + 27.0 / 3600.0
@@ -488,7 +401,7 @@ class CalibrationSequenceStepDataset(Spec122Dataset):
488
401
  return "none" if self.pol_status == "clear" else str(self.pol_theta)
489
402
 
490
403
  @key_function("PAC__006")
491
- def retarter_status(self, key: str) -> str:
404
+ def retarder_status(self, key: str) -> str:
492
405
  return self.ret_status
493
406
 
494
407
  @key_function("PAC__007")
@@ -740,7 +653,12 @@ def post_fit_polcal_fitter(
740
653
  return fitter
741
654
 
742
655
 
743
- class InputDatasetTask(WorkflowTaskBase, InputDatasetMixin):
656
+ #################
657
+ # Input Dataset #
658
+ #################
659
+
660
+
661
+ class InputDatasetTask(WorkflowTaskBase):
744
662
  def run(self):
745
663
  pass
746
664
 
@@ -768,65 +686,6 @@ def task_with_input_dataset(
768
686
  task.scratch.workflow_base_path = tmp_path / str(recipe_run_id)
769
687
  for part, tag in input_dataset_parts:
770
688
  file_path = task.scratch.workflow_base_path / Path(f"{uuid4().hex[:6]}.ext")
771
- file_path.write_text(data=json.dumps(part))
689
+ file_path.write_text(data=json.dumps({"doc_list": part}))
772
690
  task.tag(path=file_path, tags=tag)
773
691
  yield task
774
-
775
-
776
- def create_parameter_files(task: WorkflowTaskBase, expected_parameters: dict):
777
- """
778
- Create the parameter files required by the task.
779
-
780
- Parameters
781
- ----------
782
- task
783
- The task associated with these parameters
784
-
785
- expected_parameters
786
- A dict of parameters with the format shown below
787
-
788
- Returns
789
- -------
790
- None
791
-
792
- expected_parameters is a dict with the parameter names as the keys
793
- and the values are a list of value dicts for each parameter:
794
- expected_parameters =
795
- { 'parameter_name_1': [param_dict_1, param_dict_2, ...],
796
- 'parameter_name_2': [param_dict_1, param_dict_2, ...],
797
- ...
798
- }
799
- where the param_dicts have the following format:
800
- sample_param_dict =
801
- { "parameterValueId": <param_id>,
802
- "parameterValue": <param_value>,
803
- "parameterValueStartDate": <start_date>
804
- }
805
- """
806
- # Loop over all the parameter values. Each value is a list of parameterValue dicts
807
- for expected_parameter_values in expected_parameters.values():
808
- for value_dict in expected_parameter_values:
809
- if "__file__" not in value_dict["parameterValue"]:
810
- continue
811
- value = json.loads(value_dict["parameterValue"])
812
- param_path = value["__file__"]["objectKey"]
813
- file_path = task.scratch.workflow_base_path / Path(param_path)
814
- if not file_path.parent.exists():
815
- file_path.parent.mkdir(parents=True, exist_ok=True)
816
- file_path.write_text(data="")
817
- task.tag(path=file_path, tags=Tag.parameter(param_path))
818
-
819
-
820
- @pytest.fixture()
821
- def fake_constants_db() -> dict:
822
- """
823
- A fake constants DB to prevent key errors.
824
-
825
- Usage on a task: task.constants._update(fake_constants_db)
826
- """
827
- db = {
828
- "PROPOSAL_ID": "PROPID",
829
- "INSTRUMENT": "INSTRUMENT",
830
- "OBS_IP_START_TIME": "20240416T160000",
831
- }
832
- return db