dkist-processing-common 10.5.4__py3-none-any.whl → 12.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. changelog/280.misc.rst +1 -0
  2. changelog/282.feature.2.rst +2 -0
  3. changelog/282.feature.rst +2 -0
  4. changelog/284.feature.rst +1 -0
  5. changelog/285.feature.rst +2 -0
  6. changelog/285.misc.rst +2 -0
  7. changelog/286.feature.rst +2 -0
  8. changelog/287.misc.rst +1 -0
  9. dkist_processing_common/__init__.py +1 -0
  10. dkist_processing_common/_util/constants.py +1 -0
  11. dkist_processing_common/_util/graphql.py +1 -0
  12. dkist_processing_common/_util/scratch.py +9 -9
  13. dkist_processing_common/_util/tags.py +1 -0
  14. dkist_processing_common/codecs/array.py +20 -0
  15. dkist_processing_common/codecs/asdf.py +9 -3
  16. dkist_processing_common/codecs/basemodel.py +22 -0
  17. dkist_processing_common/codecs/bytes.py +1 -0
  18. dkist_processing_common/codecs/fits.py +37 -9
  19. dkist_processing_common/codecs/iobase.py +1 -0
  20. dkist_processing_common/codecs/json.py +1 -0
  21. dkist_processing_common/codecs/path.py +1 -0
  22. dkist_processing_common/codecs/quality.py +1 -1
  23. dkist_processing_common/codecs/str.py +1 -0
  24. dkist_processing_common/config.py +64 -25
  25. dkist_processing_common/manual.py +6 -8
  26. dkist_processing_common/models/constants.py +373 -37
  27. dkist_processing_common/models/dkist_location.py +27 -0
  28. dkist_processing_common/models/fits_access.py +48 -0
  29. dkist_processing_common/models/flower_pot.py +231 -9
  30. dkist_processing_common/models/fried_parameter.py +41 -0
  31. dkist_processing_common/models/graphql.py +66 -75
  32. dkist_processing_common/models/input_dataset.py +117 -0
  33. dkist_processing_common/models/message.py +1 -1
  34. dkist_processing_common/models/message_queue_binding.py +1 -1
  35. dkist_processing_common/models/metric_code.py +2 -0
  36. dkist_processing_common/models/parameters.py +65 -28
  37. dkist_processing_common/models/quality.py +50 -5
  38. dkist_processing_common/models/tags.py +23 -21
  39. dkist_processing_common/models/task_name.py +3 -2
  40. dkist_processing_common/models/telemetry.py +28 -0
  41. dkist_processing_common/models/wavelength.py +3 -1
  42. dkist_processing_common/parsers/average_bud.py +46 -0
  43. dkist_processing_common/parsers/cs_step.py +13 -12
  44. dkist_processing_common/parsers/dsps_repeat.py +6 -4
  45. dkist_processing_common/parsers/experiment_id_bud.py +12 -4
  46. dkist_processing_common/parsers/id_bud.py +42 -27
  47. dkist_processing_common/parsers/l0_fits_access.py +5 -3
  48. dkist_processing_common/parsers/l1_fits_access.py +51 -23
  49. dkist_processing_common/parsers/lookup_bud.py +125 -0
  50. dkist_processing_common/parsers/near_bud.py +21 -20
  51. dkist_processing_common/parsers/observing_program_id_bud.py +24 -0
  52. dkist_processing_common/parsers/proposal_id_bud.py +13 -5
  53. dkist_processing_common/parsers/quality.py +2 -0
  54. dkist_processing_common/parsers/retarder.py +32 -0
  55. dkist_processing_common/parsers/single_value_single_key_flower.py +6 -1
  56. dkist_processing_common/parsers/task.py +8 -6
  57. dkist_processing_common/parsers/time.py +178 -72
  58. dkist_processing_common/parsers/unique_bud.py +21 -22
  59. dkist_processing_common/parsers/wavelength.py +5 -3
  60. dkist_processing_common/tasks/__init__.py +3 -2
  61. dkist_processing_common/tasks/assemble_movie.py +4 -3
  62. dkist_processing_common/tasks/base.py +59 -60
  63. dkist_processing_common/tasks/l1_output_data.py +54 -53
  64. dkist_processing_common/tasks/mixin/globus.py +24 -27
  65. dkist_processing_common/tasks/mixin/interservice_bus.py +1 -0
  66. dkist_processing_common/tasks/mixin/metadata_store.py +108 -243
  67. dkist_processing_common/tasks/mixin/object_store.py +22 -0
  68. dkist_processing_common/tasks/mixin/quality/__init__.py +1 -0
  69. dkist_processing_common/tasks/mixin/quality/_base.py +8 -1
  70. dkist_processing_common/tasks/mixin/quality/_metrics.py +166 -14
  71. dkist_processing_common/tasks/output_data_base.py +4 -3
  72. dkist_processing_common/tasks/parse_l0_input_data.py +277 -15
  73. dkist_processing_common/tasks/quality_metrics.py +9 -9
  74. dkist_processing_common/tasks/teardown.py +7 -7
  75. dkist_processing_common/tasks/transfer_input_data.py +67 -69
  76. dkist_processing_common/tasks/trial_catalog.py +77 -17
  77. dkist_processing_common/tasks/trial_output_data.py +16 -17
  78. dkist_processing_common/tasks/write_l1.py +102 -72
  79. dkist_processing_common/tests/conftest.py +32 -173
  80. dkist_processing_common/tests/mock_metadata_store.py +271 -0
  81. dkist_processing_common/tests/test_assemble_movie.py +4 -4
  82. dkist_processing_common/tests/test_assemble_quality.py +32 -4
  83. dkist_processing_common/tests/test_base.py +5 -19
  84. dkist_processing_common/tests/test_codecs.py +103 -12
  85. dkist_processing_common/tests/test_constants.py +15 -0
  86. dkist_processing_common/tests/test_dkist_location.py +15 -0
  87. dkist_processing_common/tests/test_fits_access.py +56 -19
  88. dkist_processing_common/tests/test_flower_pot.py +147 -5
  89. dkist_processing_common/tests/test_fried_parameter.py +27 -0
  90. dkist_processing_common/tests/test_input_dataset.py +78 -361
  91. dkist_processing_common/tests/test_interservice_bus.py +1 -0
  92. dkist_processing_common/tests/test_interservice_bus_mixin.py +1 -1
  93. dkist_processing_common/tests/test_manual_processing.py +33 -0
  94. dkist_processing_common/tests/test_output_data_base.py +5 -7
  95. dkist_processing_common/tests/test_parameters.py +71 -22
  96. dkist_processing_common/tests/test_parse_l0_input_data.py +115 -32
  97. dkist_processing_common/tests/test_publish_catalog_messages.py +2 -24
  98. dkist_processing_common/tests/test_quality.py +1 -0
  99. dkist_processing_common/tests/test_quality_mixin.py +255 -23
  100. dkist_processing_common/tests/test_scratch.py +2 -1
  101. dkist_processing_common/tests/test_stems.py +511 -168
  102. dkist_processing_common/tests/test_submit_dataset_metadata.py +3 -7
  103. dkist_processing_common/tests/test_tags.py +1 -0
  104. dkist_processing_common/tests/test_task_name.py +1 -1
  105. dkist_processing_common/tests/test_task_parsing.py +17 -7
  106. dkist_processing_common/tests/test_teardown.py +28 -24
  107. dkist_processing_common/tests/test_transfer_input_data.py +270 -125
  108. dkist_processing_common/tests/test_transfer_l1_output_data.py +2 -3
  109. dkist_processing_common/tests/test_trial_catalog.py +83 -8
  110. dkist_processing_common/tests/test_trial_output_data.py +46 -73
  111. dkist_processing_common/tests/test_workflow_task_base.py +8 -10
  112. dkist_processing_common/tests/test_write_l1.py +298 -76
  113. dkist_processing_common-12.1.0rc1.dist-info/METADATA +265 -0
  114. dkist_processing_common-12.1.0rc1.dist-info/RECORD +134 -0
  115. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/WHEEL +1 -1
  116. docs/conf.py +1 -0
  117. docs/index.rst +1 -1
  118. docs/landing_page.rst +13 -0
  119. dkist_processing_common/tasks/mixin/input_dataset.py +0 -166
  120. dkist_processing_common-10.5.4.dist-info/METADATA +0 -175
  121. dkist_processing_common-10.5.4.dist-info/RECORD +0 -112
  122. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/top_level.txt +0 -0
@@ -18,13 +18,70 @@ from dkist_processing_common._util.scratch import WorkflowFileSystem
18
18
  from dkist_processing_common.codecs.fits import fits_hdu_decoder
19
19
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
20
20
  from dkist_processing_common.models.graphql import RecipeRunProvenanceResponse
21
- from dkist_processing_common.models.graphql import RecipeRunResponse
22
21
  from dkist_processing_common.models.tags import Tag
23
22
  from dkist_processing_common.models.wavelength import WavelengthRange
24
23
  from dkist_processing_common.tasks.write_l1 import WriteL1Frame
25
- from dkist_processing_common.tests.conftest import FakeGQLClient
26
- from dkist_processing_common.tests.conftest import FakeGQLClientNoRecipeConfiguration
27
- from dkist_processing_common.tests.conftest import TILE_SIZE
24
+ from dkist_processing_common.tests.mock_metadata_store import TILE_SIZE
25
+ from dkist_processing_common.tests.mock_metadata_store import InputDatasetRecipeRunResponseMapping
26
+ from dkist_processing_common.tests.mock_metadata_store import RecipeRunResponseMapping
27
+ from dkist_processing_common.tests.mock_metadata_store import fake_gql_client_factory
28
+ from dkist_processing_common.tests.mock_metadata_store import (
29
+ make_default_input_dataset_recipe_run_response,
30
+ )
31
+ from dkist_processing_common.tests.mock_metadata_store import make_default_recipe_run_response
32
+
33
+
34
+ @pytest.fixture
35
+ def fake_gql_client_default_configuration():
36
+ """Create GraphQL client Mock that returns result without recipe run configuration."""
37
+ recipe_run_response = make_default_recipe_run_response()
38
+ recipe_run_response.configuration = None
39
+ new_response_mapping = RecipeRunResponseMapping(response=recipe_run_response)
40
+ FakeGQLClientDefaultConfiguration = fake_gql_client_factory(
41
+ response_mapping_override=new_response_mapping
42
+ )
43
+
44
+ return FakeGQLClientDefaultConfiguration
45
+
46
+
47
+ @pytest.fixture
48
+ def fake_gql_client_missing_calibration_part():
49
+ """Create GraphQL client Mock that returns result without calibration part."""
50
+ input_dataset_recipe_run_response = make_default_input_dataset_recipe_run_response()
51
+ dataset_parts = (
52
+ input_dataset_recipe_run_response.recipeInstance.inputDataset.inputDatasetInputDatasetParts
53
+ )
54
+ for index, part in enumerate(dataset_parts):
55
+ if (
56
+ part.inputDatasetPart.inputDatasetPartType.inputDatasetPartTypeName
57
+ == "calibration_frames"
58
+ ):
59
+ del dataset_parts[index]
60
+ new_response_mapping = InputDatasetRecipeRunResponseMapping(
61
+ response=input_dataset_recipe_run_response
62
+ )
63
+ FakeGQLClientMissingInputDatasetCalibrationPart = fake_gql_client_factory(
64
+ response_mapping_override=new_response_mapping
65
+ )
66
+
67
+ return FakeGQLClientMissingInputDatasetCalibrationPart
68
+
69
+
70
+ @pytest.fixture()
71
+ def make_fake_gql_client_with_provenance():
72
+ """Create GraphQL client Mocks that will return customizable provenance records."""
73
+
74
+ def class_generator(provenances: list[RecipeRunProvenanceResponse]):
75
+ recipe_run_response = make_default_recipe_run_response()
76
+ recipe_run_response.recipeRunProvenances = provenances
77
+ new_response_mapping = RecipeRunResponseMapping(response=recipe_run_response)
78
+ FakeGQLClientProvenances = fake_gql_client_factory(
79
+ response_mapping_override=new_response_mapping
80
+ )
81
+
82
+ return FakeGQLClientProvenances
83
+
84
+ return class_generator
28
85
 
29
86
 
30
87
  class CompleteWriteL1Frame(WriteL1Frame):
@@ -87,6 +144,12 @@ class CompleteWriteL1Frame(WriteL1Frame):
87
144
  return WavelengthRange(min=1075.0 * u.nm, max=1085.0 * u.nm)
88
145
 
89
146
 
147
+ class CompleteWriteL1FrameWithEmptyWaveband(CompleteWriteL1Frame):
148
+ def get_wavelength_range(self, header: fits.Header) -> WavelengthRange:
149
+ # Return an empty range to test the empty waveband case
150
+ return WavelengthRange(min=10000.0 * u.nm, max=10050.0 * u.nm)
151
+
152
+
90
153
  @dataclass
91
154
  class FakeConstantDb:
92
155
  INSTRUMENT: str = "TEST"
@@ -121,6 +184,45 @@ def write_l1_task(request, recipe_run_id, tmp_path):
121
184
  stokes_params = ["I", "Q", "U", "V"]
122
185
  used_stokes_params = []
123
186
  hdu = fits.PrimaryHDU(data=np.random.random(size=(1, 128, 128)) * 10, header=header)
187
+ hdu.header["IPTASK"] = "level0_only key to be removed"
188
+ hdul = fits.HDUList([hdu])
189
+ for i in range(num_of_stokes_params):
190
+ task.write(
191
+ data=hdul,
192
+ tags=[
193
+ Tag.calibrated(),
194
+ Tag.frame(),
195
+ Tag.stokes(stokes_params[i]),
196
+ Tag.dsps_repeat(i),
197
+ ],
198
+ encoder=fits_hdulist_encoder,
199
+ )
200
+ used_stokes_params.append(stokes_params[i])
201
+ task.constants._update(asdict(FakeConstantDb()))
202
+ yield task, used_stokes_params, header
203
+ task._purge()
204
+
205
+
206
+ @pytest.fixture(
207
+ scope="function",
208
+ params=[
209
+ pytest.param((1, "complete_common_header"), id="Intensity"),
210
+ pytest.param((4, "complete_polarimetric_header"), id="Polarimetric"),
211
+ ],
212
+ )
213
+ def write_l1_task_with_empty_waveband(recipe_run_id, tmp_path, request):
214
+ with CompleteWriteL1FrameWithEmptyWaveband(
215
+ recipe_run_id=recipe_run_id,
216
+ workflow_name="workflow_name",
217
+ workflow_version="workflow_version",
218
+ ) as task:
219
+ task.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, scratch_base_path=tmp_path)
220
+ num_of_stokes_params, header_fixture_name = request.param
221
+ header = request.getfixturevalue(header_fixture_name)
222
+ stokes_params = ["I", "Q", "U", "V"]
223
+ used_stokes_params = []
224
+ hdu = fits.PrimaryHDU(data=np.random.random(size=(1, 128, 128)) * 10, header=header)
225
+ hdu.header["IPTASK"] = "level0_only key to be removed"
124
226
  hdul = fits.HDUList([hdu])
125
227
  for i in range(num_of_stokes_params):
126
228
  task.write(
@@ -142,23 +244,46 @@ def write_l1_task(request, recipe_run_id, tmp_path):
142
244
  @pytest.fixture(
143
245
  scope="function",
144
246
  params=[
145
- pytest.param(True, id="AO_lock_on"),
146
- pytest.param(False, id="AO_lock_off"),
247
+ pytest.param(
248
+ {"AO_LOCK": True, "ATMOS_R0": 0.2, "OOBSHIFT": 17}, id="AO_LOCK_True_good_R0_good_oob"
249
+ ),
250
+ pytest.param(
251
+ {"AO_LOCK": True, "ATMOS_R0": 1, "OOBSHIFT": 17}, id="AO_LOCK_True_bad_R0_good_oob"
252
+ ),
253
+ pytest.param(
254
+ {"AO_LOCK": False, "ATMOS_R0": 0.2, "OOBSHIFT": 17}, id="AO_LOCK_False_good_R0_good_oob"
255
+ ),
256
+ pytest.param(
257
+ {"AO_LOCK": False, "ATMOS_R0": 1, "OOBSHIFT": 17}, id="AO_LOCK_False_bad_R0_good_oob"
258
+ ),
259
+ pytest.param(
260
+ {"AO_LOCK": True, "ATMOS_R0": 0.2, "OOBSHIFT": 150}, id="AO_LOCK_True_good_R0_bad_oob"
261
+ ),
262
+ pytest.param(
263
+ {"AO_LOCK": True, "ATMOS_R0": 1, "OOBSHIFT": 150}, id="AO_LOCK_True_bad_R0_bad_oob"
264
+ ),
265
+ pytest.param(
266
+ {"AO_LOCK": False, "ATMOS_R0": 0.2, "OOBSHIFT": 150}, id="AO_LOCK_False_good_R0_bad_oob"
267
+ ),
268
+ pytest.param(
269
+ {"AO_LOCK": False, "ATMOS_R0": 1, "OOBSHIFT": 150}, id="AO_LOCK_False_bad_R0_bad_oob"
270
+ ),
271
+ pytest.param({"ATMOS_R0": 0.2, "OOBSHIFT": 17}, id="AO_LOCK_missing"),
272
+ pytest.param({"ATMOS_R0": 0.2, "AO_LOCK": True}, id="OOBSHIFT_missing"),
147
273
  ],
148
274
  )
149
275
  def write_l1_task_no_data(request, recipe_run_id, tmp_path, complete_common_header):
150
- with (
151
- CompleteWriteL1Frame(
152
- recipe_run_id=recipe_run_id,
153
- workflow_name="workflow_name",
154
- workflow_version="workflow_version",
155
- ) as task
156
- ):
276
+ with CompleteWriteL1Frame(
277
+ recipe_run_id=recipe_run_id,
278
+ workflow_name="workflow_name",
279
+ workflow_version="workflow_version",
280
+ ) as task:
157
281
  task.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, scratch_base_path=tmp_path)
158
282
  header = complete_common_header
159
- header["AO_LOCK"] = request.param
160
- fried_parameter = 0.2
161
- header["ATMOS_R0"] = fried_parameter
283
+ header.pop("AO_LOCK", None)
284
+ header.pop("ATMOS_R0", None)
285
+ header.pop("OOBSHIFT", None)
286
+ header.update(request.param)
162
287
  hdu = fits.PrimaryHDU(data=np.random.random(size=(1, 1, 1)) * 1, header=header)
163
288
  hdul = fits.HDUList([hdu])
164
289
  task.write(
@@ -170,29 +295,12 @@ def write_l1_task_no_data(request, recipe_run_id, tmp_path, complete_common_head
170
295
  encoder=fits_hdulist_encoder,
171
296
  )
172
297
  task.constants._update(asdict(FakeConstantDb()))
173
- yield task, header, fried_parameter
298
+ fried_parameter = request.param["ATMOS_R0"]
299
+ oob_shift = request.param.get("OOBSHIFT")
300
+ yield task, header, fried_parameter, oob_shift
174
301
  task._purge()
175
302
 
176
303
 
177
- @pytest.fixture()
178
- def make_mock_gql_client_with_provenance():
179
- """Factory to create GraphQL client Mocks that will return customizable provenance records."""
180
-
181
- def factory(provenances: list[RecipeRunProvenanceResponse]):
182
- class WriteL1FakeGQLClient(FakeGQLClient):
183
- def execute_gql_query(self, **kwargs):
184
- response = super().execute_gql_query(**kwargs)
185
- if isinstance(response, list):
186
- if isinstance(response[0], RecipeRunResponse):
187
- response: list[RecipeRunResponse]
188
- response[0].recipeRunProvenances = provenances
189
- return response
190
-
191
- return WriteL1FakeGQLClient
192
-
193
- return factory
194
-
195
-
196
304
  @pytest.mark.parametrize(
197
305
  "provenances, is_manual",
198
306
  [
@@ -227,7 +335,7 @@ def make_mock_gql_client_with_provenance():
227
335
  def test_write_l1_frame(
228
336
  write_l1_task,
229
337
  mocker,
230
- make_mock_gql_client_with_provenance,
338
+ make_fake_gql_client_with_provenance,
231
339
  provenances: list[RecipeRunProvenanceResponse],
232
340
  is_manual,
233
341
  ):
@@ -236,7 +344,7 @@ def test_write_l1_frame(
236
344
  :When: running the task
237
345
  :Then: no errors are raised and the MANPROC and FRAMEVOL headers are correct
238
346
  """
239
- WriteL1GQLClient = make_mock_gql_client_with_provenance(provenances=provenances)
347
+ WriteL1GQLClient = make_fake_gql_client_with_provenance(provenances=provenances)
240
348
 
241
349
  mocker.patch(
242
350
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=WriteL1GQLClient
@@ -265,6 +373,9 @@ def test_write_l1_frame(
265
373
  # Test that FRAMEVOL still has its comment
266
374
  assert hdu.header.comments["FRAMEVOL"]
267
375
 
376
+ # Test that 'level0_only' keys are being removed
377
+ assert "IPTASK" not in hdu.header.keys()
378
+
268
379
 
269
380
  def test_replace_header_values(write_l1_task):
270
381
  """
@@ -316,14 +427,14 @@ def test_calculate_telapse(write_l1_task):
316
427
  assert task.calculate_telapse(header=header) == 86400
317
428
 
318
429
 
319
- def test_solarnet_keys(write_l1_task, mocker):
430
+ def test_solarnet_keys(write_l1_task, mocker, fake_gql_client):
320
431
  """
321
432
  :Given: files with headers converted to SPEC 214 L1
322
433
  :When: checking the solarnet extra headers
323
434
  :Then: the correct values are found
324
435
  """
325
436
  mocker.patch(
326
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
437
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
327
438
  )
328
439
  mocker.patch(
329
440
  "dkist_processing_common.tasks.write_l1.WriteL1Frame.version_from_module_name",
@@ -348,14 +459,14 @@ def test_solarnet_keys(write_l1_task, mocker):
348
459
  assert header["WAVEMAX"] == 1085.0
349
460
 
350
461
 
351
- def test_documentation_keys(write_l1_task, mocker):
462
+ def test_documentation_keys(write_l1_task, mocker, fake_gql_client):
352
463
  """
353
464
  :Given: files with headers converted to SPEC 214 L1
354
465
  :When: checking the documentation header URLs
355
466
  :Then: the correct values are found
356
467
  """
357
468
  mocker.patch(
358
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
469
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
359
470
  )
360
471
  mocker.patch(
361
472
  "dkist_processing_common.tasks.write_l1.WriteL1Frame.version_from_module_name",
@@ -386,9 +497,9 @@ def test_get_version_from_module(write_l1_task):
386
497
  assert task.version_from_module_name() == common_version
387
498
 
388
499
 
389
- def test_get_tile_size(write_l1_task, mocker):
500
+ def test_get_tile_size(write_l1_task, mocker, fake_gql_client):
390
501
  mocker.patch(
391
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
502
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
392
503
  )
393
504
  task, _, _ = write_l1_task
394
505
  test_array = np.zeros((1, TILE_SIZE // 2, TILE_SIZE * 2))
@@ -396,28 +507,31 @@ def test_get_tile_size(write_l1_task, mocker):
396
507
  assert tile_size == [1, TILE_SIZE // 2, TILE_SIZE]
397
508
 
398
509
 
399
- def test_rice_compression_with_specified_tile_size(write_l1_task, mocker):
510
+ def test_rice_compression_with_specified_tile_size(write_l1_task, mocker, fake_gql_client):
400
511
  """
401
512
  :Given: a write_L1 task with a specified tile size in the recipe configuration
402
513
  :When: running the task
403
514
  :Then: data is written with the compression tile size specified in the recipe configuration
404
515
  """
405
516
  mocker.patch(
406
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
517
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
407
518
  )
408
519
  task, _, _ = write_l1_task
409
520
  task()
410
521
  files = list(task.read(tags=[Tag.frame(), Tag.output()]))
411
522
  for file in files:
412
523
  hdul = fits.open(file)
413
- comp_header = hdul[1]._header
524
+ bintable = hdul[1]._get_bintable_without_data()
525
+ comp_header = bintable.header
414
526
  data_shape = list(hdul[1].data.shape)
415
527
  data_shape.reverse()
416
528
  for i, dim in enumerate(data_shape):
417
529
  assert comp_header["ZTILE" + str(i + 1)] == min(dim, TILE_SIZE)
418
530
 
419
531
 
420
- def test_rice_compression_with_default_tile_size(write_l1_task, mocker):
532
+ def test_rice_compression_with_default_tile_size(
533
+ write_l1_task, mocker, fake_gql_client_default_configuration
534
+ ):
421
535
  """
422
536
  :Given: a write_L1 task with no specified tile size in the recipe configuration
423
537
  :When: running the task
@@ -427,14 +541,16 @@ def test_rice_compression_with_default_tile_size(write_l1_task, mocker):
427
541
  """
428
542
  mocker.patch(
429
543
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
430
- new=FakeGQLClientNoRecipeConfiguration,
544
+ new=fake_gql_client_default_configuration,
431
545
  )
432
546
  task, _, _ = write_l1_task
433
547
  task()
548
+ assert task.tile_size_param == None
434
549
  files = list(task.read(tags=[Tag.frame(), Tag.output()]))
435
550
  for file in files:
436
551
  hdul = fits.open(file)
437
- comp_header = hdul[1]._header
552
+ bintable = hdul[1]._get_bintable_without_data()
553
+ comp_header = bintable.header
438
554
  data_shape = list(hdul[1].data.shape)
439
555
  data_shape.reverse()
440
556
  assert comp_header["ZTILE1"] == data_shape[0]
@@ -442,7 +558,7 @@ def test_rice_compression_with_default_tile_size(write_l1_task, mocker):
442
558
  assert comp_header["ZTILE3"] == 1
443
559
 
444
560
 
445
- def test_reprocessing_keys(write_l1_task, mocker):
561
+ def test_reprocessing_keys(write_l1_task, mocker, fake_gql_client):
446
562
  """
447
563
  :Given: a write_L1 task with reprocessing keys present
448
564
  :When: running the task
@@ -450,18 +566,73 @@ def test_reprocessing_keys(write_l1_task, mocker):
450
566
  """
451
567
  mocker.patch(
452
568
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
453
- new=FakeGQLClient,
569
+ new=fake_gql_client,
454
570
  )
455
571
  task, _, _ = write_l1_task
456
572
  task()
457
573
  files = list(task.read(tags=[Tag.frame(), Tag.output()]))
458
574
  for file in files:
459
575
  header = fits.open(file)[1].header
460
- assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters_part_id
461
- assert header["IDSOBSID"] == task.metadata_store_input_dataset_observe_frames_part_id
462
- assert header["IDSCALID"] == task.metadata_store_input_dataset_calibration_frames_part_id
576
+ assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters.inputDatasetPartId
577
+ assert (
578
+ header["IDSOBSID"]
579
+ == task.metadata_store_input_dataset_observe_frames.inputDatasetPartId
580
+ )
581
+ assert (
582
+ header["IDSCALID"]
583
+ == task.metadata_store_input_dataset_calibration_frames.inputDatasetPartId
584
+ )
463
585
  assert header["WKFLNAME"] == task.workflow_name
464
586
  assert header["WKFLVERS"] == task.workflow_version
587
+ assert header["PROCTYPE"] == "L1"
588
+ assert header["PRODUCT"] == task.compute_product_id(header["IDSOBSID"], header["PROCTYPE"])
589
+
590
+
591
+ def test_missing_input_dataset_part(
592
+ write_l1_task, mocker, fake_gql_client_missing_calibration_part
593
+ ):
594
+ """
595
+ :Given: a Write_L1 task with a missing calibration frames part
596
+ :When: running the task
597
+ :Then: the input dataset part keys are correctly written without throwing an exception
598
+ """
599
+ mocker.patch(
600
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
601
+ new=fake_gql_client_missing_calibration_part,
602
+ )
603
+ task, _, _ = write_l1_task
604
+ task()
605
+ files = list(task.read(tags=[Tag.frame(), Tag.output()]))
606
+ for file in files:
607
+ header = fits.open(file)[1].header
608
+ assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters.inputDatasetPartId
609
+ assert (
610
+ header["IDSOBSID"]
611
+ == task.metadata_store_input_dataset_observe_frames.inputDatasetPartId
612
+ )
613
+ assert "IDSCALID" not in header
614
+
615
+
616
+ @pytest.mark.parametrize(
617
+ "ids_obs_id, proc_type",
618
+ [
619
+ pytest.param(42, "alpha", id="42"),
620
+ pytest.param(1_000, "beta", id="thousand"),
621
+ pytest.param(1_000_000, "gamma", id="million"),
622
+ ],
623
+ )
624
+ def test_product_id_calculation(ids_obs_id: int, proc_type: str):
625
+ """
626
+ Given: integer IDSOBSID and string PROCTYPE
627
+ When: calculating the productId
628
+ Then: the productId is computed properly
629
+ """
630
+ product_id = WriteL1Frame.compute_product_id(ids_obs_id, proc_type)
631
+ assert isinstance(product_id, str)
632
+ assert product_id.startswith(f"{proc_type}-")
633
+ assert len(product_id) >= len(proc_type) + 6
634
+ # same result the second time around
635
+ assert product_id == WriteL1Frame.compute_product_id(ids_obs_id, proc_type)
465
636
 
466
637
 
467
638
  def test_calculate_date_end(write_l1_task):
@@ -493,7 +664,7 @@ def test_add_contributing_id_headers(write_l1_task):
493
664
  assert header["NEXPERS"] == 3
494
665
 
495
666
 
496
- def test_spectral_line_keys(write_l1_task, mocker):
667
+ def test_spectral_line_keys(write_l1_task, mocker, fake_gql_client):
497
668
  """
498
669
  :Given: a header
499
670
  :When: adding spectral line information to the headers
@@ -501,10 +672,10 @@ def test_spectral_line_keys(write_l1_task, mocker):
501
672
  """
502
673
  mocker.patch(
503
674
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
504
- new=FakeGQLClient,
675
+ new=fake_gql_client,
505
676
  )
506
677
  task, _, header = write_l1_task
507
- header = task.add_datacenter_headers(header=header, hdu_size=1024, stokes="I")
678
+ header = task.add_spectral_line_headers(header=header)
508
679
  assert header["SPECLN01"] == "Fe XIII (1079.8 nm)"
509
680
  assert header["SPECLN02"] == "He I (1083.0 nm)"
510
681
  assert header["NSPECLNS"] == 2
@@ -512,32 +683,83 @@ def test_spectral_line_keys(write_l1_task, mocker):
512
683
  assert header["SPECLN03"]
513
684
 
514
685
 
515
- @pytest.mark.flaky(max_reruns=10)
516
- def test_location_of_dkist(write_l1_task):
517
- """
518
- Given: function for retrieving the dkist location on earth
519
- When: Call function
520
- Then: result is the same as what is in the astropy online database
521
- """
522
- task, _, _ = write_l1_task
523
- itrs = task.location_of_dkist
524
- assert itrs == EarthLocation.of_site("dkist")
525
-
526
-
527
686
  def test_check_r0_ao_lock(write_l1_task_no_data):
528
687
  """
529
688
  :Given: a header
530
689
  :When: writing, check if the AO lock is on
531
690
  :Then: write the r0 value if AO lock on, don't write if AO lock off
532
691
  """
533
- task, header, r0 = write_l1_task_no_data
692
+ task, header, r0, _ = write_l1_task_no_data
534
693
  header_after_check = task.remove_invalid_r0_values(header=header)
535
- if header["AO_LOCK"]:
694
+ if header.get("AO_LOCK"):
536
695
  assert header_after_check["ATMOS_R0"] == header["ATMOS_R0"]
537
696
  assert header["ATMOS_R0"] == r0
538
697
  assert header["AO_LOCK"]
539
- if not header["AO_LOCK"]:
540
- with pytest.raises(KeyError) as ke:
698
+ else:
699
+ with pytest.raises(KeyError, match="Keyword 'ATMOS_R0' not found"):
541
700
  invalid_r0 = header_after_check["ATMOS_R0"]
542
- assert "Keyword 'ATMOS_R0' not found" in str(ke)
543
- assert not header["AO_LOCK"]
701
+ assert header.get("AO_LOCK") != True
702
+
703
+
704
+ @pytest.mark.parametrize(
705
+ "wavelength, wavemin, wavemax, expected",
706
+ [
707
+ pytest.param(
708
+ 617,
709
+ 615,
710
+ 619,
711
+ "Fe I (617.33 nm)",
712
+ id="line_is_between_wavemin_and_wavemax_and_exists",
713
+ ),
714
+ pytest.param(
715
+ 700,
716
+ 698,
717
+ 702,
718
+ None,
719
+ id="line_is_between_wavemin_and_wavemax_and_does_not_exist",
720
+ ),
721
+ pytest.param(
722
+ 617,
723
+ 698,
724
+ 702,
725
+ None,
726
+ id="line_is_not_between_wavemin_and_wavemax_and_exists",
727
+ ),
728
+ ],
729
+ )
730
+ def test_get_waveband(write_l1_task, wavelength, wavemin, wavemax, expected):
731
+ """
732
+ :Given: an input wavelength contribution
733
+ :When: determining the waveband
734
+ :Then: the correct waveband is returned
735
+ """
736
+ wavelength_range = WavelengthRange(min=wavemin * u.nm, max=wavemax * u.nm)
737
+ task, _, _ = write_l1_task
738
+ waveband = task.get_waveband(wavelength=wavelength * u.nm, wavelength_range=wavelength_range)
739
+ assert waveband == expected
740
+
741
+
742
+ def test_empty_waveband(write_l1_task_with_empty_waveband, mocker, fake_gql_client):
743
+ """
744
+ :Given: a header converted to SPEC 214 L1 and a wavelength range that has no listed spectral lines
745
+ :When: checking the waveband key
746
+ :Then: it does not exist
747
+ """
748
+ mocker.patch(
749
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
750
+ )
751
+ mocker.patch(
752
+ "dkist_processing_common.tasks.write_l1.WriteL1Frame.version_from_module_name",
753
+ new_callable=Mock,
754
+ return_value="fake_version_number",
755
+ )
756
+
757
+ task, _, _ = write_l1_task_with_empty_waveband
758
+ task()
759
+ files = list(task.read(tags=[Tag.frame(), Tag.output()]))
760
+ for file in files:
761
+ header = fits.open(file)[1].header
762
+ assert header["WAVEMIN"] == 10000
763
+ assert header["WAVEMAX"] == 10050
764
+ with pytest.raises(KeyError):
765
+ header["WAVEBAND"]