dkist-processing-common 10.8.1rc1__tar.gz → 10.8.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/CHANGELOG.rst +18 -0
  2. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/PKG-INFO +2 -2
  3. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/fits.py +6 -12
  4. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/manual.py +5 -3
  5. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/graphql.py +3 -13
  6. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/parameters.py +28 -65
  7. dkist_processing_common-10.8.2/dkist_processing_common/tasks/mixin/input_dataset.py +166 -0
  8. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/metadata_store.py +4 -7
  9. dkist_processing_common-10.8.2/dkist_processing_common/tasks/transfer_input_data.py +129 -0
  10. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/write_l1.py +20 -1
  11. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/conftest.py +7 -24
  12. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_codecs.py +0 -38
  13. dkist_processing_common-10.8.2/dkist_processing_common/tests/test_input_dataset.py +509 -0
  14. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_parameters.py +22 -71
  15. dkist_processing_common-10.8.2/dkist_processing_common/tests/test_transfer_input_data.py +170 -0
  16. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_write_l1.py +110 -2
  17. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common.egg-info/PKG-INFO +2 -2
  18. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common.egg-info/SOURCES.txt +1 -6
  19. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common.egg-info/requires.txt +1 -1
  20. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/pyproject.toml +1 -1
  21. dkist_processing_common-10.8.1rc1/changelog/235.feature.rst +0 -3
  22. dkist_processing_common-10.8.1rc1/changelog/235.misc.1.rst +0 -2
  23. dkist_processing_common-10.8.1rc1/changelog/235.misc.rst +0 -1
  24. dkist_processing_common-10.8.1rc1/dkist_processing_common/codecs/array.py +0 -19
  25. dkist_processing_common-10.8.1rc1/dkist_processing_common/codecs/basemodel.py +0 -21
  26. dkist_processing_common-10.8.1rc1/dkist_processing_common/models/input_dataset.py +0 -113
  27. dkist_processing_common-10.8.1rc1/dkist_processing_common/tasks/transfer_input_data.py +0 -120
  28. dkist_processing_common-10.8.1rc1/dkist_processing_common/tests/test_input_dataset.py +0 -280
  29. dkist_processing_common-10.8.1rc1/dkist_processing_common/tests/test_transfer_input_data.py +0 -256
  30. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/.gitignore +0 -0
  31. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/.pre-commit-config.yaml +0 -0
  32. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/.readthedocs.yml +0 -0
  33. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/.snyk +0 -0
  34. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/README.rst +0 -0
  35. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/bitbucket-pipelines.yml +0 -0
  36. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/changelog/.gitempty +0 -0
  37. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/__init__.py +0 -0
  38. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/_util/__init__.py +0 -0
  39. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/_util/constants.py +0 -0
  40. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/_util/graphql.py +0 -0
  41. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/_util/scratch.py +0 -0
  42. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/_util/tags.py +0 -0
  43. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/__init__.py +0 -0
  44. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/asdf.py +0 -0
  45. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/bytes.py +0 -0
  46. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/iobase.py +0 -0
  47. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/json.py +0 -0
  48. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/path.py +0 -0
  49. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/quality.py +0 -0
  50. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/codecs/str.py +0 -0
  51. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/config.py +0 -0
  52. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/fonts/Lato-Regular.ttf +0 -0
  53. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/fonts/__init__.py +0 -0
  54. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/__init__.py +0 -0
  55. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/constants.py +0 -0
  56. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/fits_access.py +0 -0
  57. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/flower_pot.py +0 -0
  58. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/message.py +0 -0
  59. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/message_queue_binding.py +0 -0
  60. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/metric_code.py +0 -0
  61. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/quality.py +0 -0
  62. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/tags.py +0 -0
  63. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/task_name.py +0 -0
  64. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/models/wavelength.py +0 -0
  65. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/__init__.py +0 -0
  66. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/cs_step.py +0 -0
  67. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/dsps_repeat.py +0 -0
  68. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/experiment_id_bud.py +0 -0
  69. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/id_bud.py +0 -0
  70. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/l0_fits_access.py +0 -0
  71. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/l1_fits_access.py +0 -0
  72. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/near_bud.py +0 -0
  73. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/proposal_id_bud.py +0 -0
  74. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/quality.py +0 -0
  75. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/retarder.py +0 -0
  76. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/single_value_single_key_flower.py +0 -0
  77. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/task.py +0 -0
  78. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/time.py +0 -0
  79. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/unique_bud.py +0 -0
  80. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/parsers/wavelength.py +0 -0
  81. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/__init__.py +0 -0
  82. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/assemble_movie.py +0 -0
  83. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/base.py +0 -0
  84. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/l1_output_data.py +0 -0
  85. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/__init__.py +0 -0
  86. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/globus.py +0 -0
  87. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/interservice_bus.py +0 -0
  88. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/object_store.py +0 -0
  89. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/quality/__init__.py +0 -0
  90. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/quality/_base.py +0 -0
  91. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/mixin/quality/_metrics.py +0 -0
  92. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/output_data_base.py +0 -0
  93. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/parse_l0_input_data.py +0 -0
  94. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/quality_metrics.py +0 -0
  95. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/teardown.py +0 -0
  96. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/trial_catalog.py +0 -0
  97. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tasks/trial_output_data.py +0 -0
  98. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/__init__.py +0 -0
  99. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_assemble_movie.py +0 -0
  100. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_assemble_quality.py +0 -0
  101. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_base.py +0 -0
  102. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_constants.py +0 -0
  103. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_cs_step.py +0 -0
  104. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_fits_access.py +0 -0
  105. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_flower_pot.py +0 -0
  106. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_interservice_bus.py +0 -0
  107. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_interservice_bus_mixin.py +0 -0
  108. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_manual_processing.py +0 -0
  109. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_output_data_base.py +0 -0
  110. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_parse_l0_input_data.py +0 -0
  111. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_publish_catalog_messages.py +0 -0
  112. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_quality.py +0 -0
  113. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_quality_mixin.py +0 -0
  114. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_scratch.py +0 -0
  115. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_stems.py +0 -0
  116. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_submit_dataset_metadata.py +0 -0
  117. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_tags.py +0 -0
  118. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_task_name.py +0 -0
  119. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_task_parsing.py +0 -0
  120. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_teardown.py +0 -0
  121. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_transfer_l1_output_data.py +0 -0
  122. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_trial_catalog.py +0 -0
  123. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_trial_output_data.py +0 -0
  124. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common/tests/test_workflow_task_base.py +0 -0
  125. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common.egg-info/dependency_links.txt +0 -0
  126. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/dkist_processing_common.egg-info/top_level.txt +0 -0
  127. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/Makefile +0 -0
  128. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/changelog.rst +0 -0
  129. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/conf.py +0 -0
  130. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/index.rst +0 -0
  131. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/landing_page.rst +0 -0
  132. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/make.bat +0 -0
  133. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/docs/requirements.txt +0 -0
  134. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/licenses/LICENSE.rst +0 -0
  135. {dkist_processing_common-10.8.1rc1 → dkist_processing_common-10.8.2}/setup.cfg +0 -0
@@ -1,3 +1,21 @@
1
+ v10.8.2 (2025-05-27)
2
+ ====================
3
+
4
+ Bugfixes
5
+ --------
6
+
7
+ - Prevent the `WAVEBAND` key from being populated with a spectral line that falls outside the wavelength bounds of the data. (`#251 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/251>`__)
8
+
9
+
10
+ v10.8.1 (2025-05-22)
11
+ ====================
12
+
13
+ Misc
14
+ ----
15
+
16
+ - Update `dkist-processing-core` to v5.1.1. (`#253 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/253>`__)
17
+
18
+
1
19
  v10.8.0 (2025-05-15)
2
20
  ====================
3
21
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 10.8.1rc1
3
+ Version: 10.8.2
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -17,7 +17,7 @@ Requires-Dist: asdf<4.0.0,>=3.5.0
17
17
  Requires-Dist: astropy>=7.0.0
18
18
  Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
19
19
  Requires-Dist: dkist-header-validator<6.0,>=5.0.0
20
- Requires-Dist: dkist-processing-core==5.1.0
20
+ Requires-Dist: dkist-processing-core==5.1.1
21
21
  Requires-Dist: dkist-processing-pac<4.0,>=3.1
22
22
  Requires-Dist: dkist-service-configuration<3.0,>=2.0.2
23
23
  Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
@@ -30,15 +30,15 @@ def fits_hdulist_encoder(hdu_list: fits.HDUList) -> bytes:
30
30
  return iobase_encoder(file_obj)
31
31
 
32
32
 
33
- def fits_hdu_decoder(path: Path, hdu: int | None = None) -> fits.PrimaryHDU | fits.CompImageHDU:
33
+ def fits_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
34
34
  """Read a Path with `fits` to produce an `HDUList`."""
35
35
  hdu_list = fits.open(path, checksum=True)
36
- return _extract_hdu(hdu_list, hdu)
36
+ return _extract_hdu(hdu_list)
37
37
 
38
38
 
39
- def fits_array_decoder(path: Path, hdu: int | None = None, auto_squeeze: bool = True) -> np.ndarray:
39
+ def fits_array_decoder(path: Path, auto_squeeze: bool = True) -> np.ndarray:
40
40
  """Read a Path with `fits` and return the `.data` property."""
41
- hdu = fits_hdu_decoder(path, hdu=hdu)
41
+ hdu = fits_hdu_decoder(path)
42
42
  data = hdu.data
43
43
 
44
44
  # This conditional is explicitly to catch summit data with a dummy first axis for WCS
@@ -56,14 +56,8 @@ def fits_access_decoder(
56
56
  return fits_access_class(hdu=hdu, name=str(path), **fits_access_kwargs)
57
57
 
58
58
 
59
- def _extract_hdu(hdul: fits.HDUList, hdu: int | None = None) -> fits.PrimaryHDU | fits.CompImageHDU:
60
- """
61
- Return the fits hdu associated with the data in the hdu list.
62
-
63
- Only search down the hdu index for the data if the hdu index is not explicitly provided.
64
- """
65
- if hdu is not None:
66
- return hdul[hdu]
59
+ def _extract_hdu(hdul: fits.HDUList) -> fits.PrimaryHDU | fits.CompImageHDU:
60
+ """Return the fits hdu associated with the data in the hdu list."""
67
61
  if hdul[0].data is not None:
68
62
  return hdul[0]
69
63
  return hdul[1]
@@ -2,13 +2,15 @@
2
2
  import json
3
3
  import logging
4
4
  import shutil
5
+ from dataclasses import asdict
6
+ from io import BytesIO
5
7
  from pathlib import Path
6
8
  from typing import Callable
7
9
  from unittest.mock import patch
8
10
 
9
11
  from dkist_processing_core.task import TaskBase
10
12
 
11
- from dkist_processing_common.codecs.basemodel import basemodel_encoder
13
+ from dkist_processing_common.codecs.json import json_encoder
12
14
  from dkist_processing_common.models.graphql import RecipeRunProvenanceMutation
13
15
  from dkist_processing_common.models.tags import Tag
14
16
  from dkist_processing_common.tasks.base import WorkflowTaskBase
@@ -180,8 +182,8 @@ def writing_metadata_store_record_provenance(self, is_task_manual: bool, library
180
182
  workflowVersion=self.workflow_version,
181
183
  )
182
184
  self.write(
183
- data=params,
184
- encoder=basemodel_encoder,
185
+ data=params.model_dump(),
186
+ encoder=json_encoder,
185
187
  tags=["PROVENANCE_RECORD"],
186
188
  relative_path=f"{self.task_name}_provenance.json",
187
189
  overwrite=True,
@@ -3,9 +3,6 @@ from pydantic import BaseModel
3
3
  from pydantic import field_validator
4
4
  from pydantic import Json
5
5
 
6
- from dkist_processing_common.models.input_dataset import InputDatasetBaseModel
7
- from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
8
-
9
6
 
10
7
  class RecipeRunMutation(BaseModel):
11
8
  """Recipe run mutation record."""
@@ -40,19 +37,13 @@ class InputDatasetPartTypeResponse(BaseModel):
40
37
  inputDatasetPartTypeName: str
41
38
 
42
39
 
43
- class InputDatasetPartResponse(InputDatasetBaseModel):
40
+ class InputDatasetPartResponse(BaseModel):
44
41
  """Response class for the input dataset part entity."""
45
42
 
46
43
  inputDatasetPartId: int
47
- # inputDatasetPartDocument : Json[InputDatasetPartDocumentList] # will work in gqlclient v2
48
- inputDatasetPartDocument: Json[list]
44
+ inputDatasetPartDocument: Json[list[dict]]
49
45
  inputDatasetPartType: InputDatasetPartTypeResponse
50
46
 
51
- @field_validator("inputDatasetPartDocument", mode="after")
52
- @classmethod
53
- def _use_frame_or_parameter_model(cls, value_list): # not needed for gqlclient v2
54
- return InputDatasetPartDocumentList(doc_list=value_list)
55
-
56
47
 
57
48
  class InputDatasetInputDatasetPartResponse(BaseModel):
58
49
  """Response class for the join entity between input datasets and input dataset parts."""
@@ -112,12 +103,11 @@ class RecipeRunResponse(BaseModel):
112
103
  recipeInstance: RecipeInstanceResponse
113
104
  recipeInstanceId: int
114
105
  recipeRunProvenances: list[RecipeRunProvenanceResponse]
115
- # configuration: Json[RecipeRunConfiguration] | None # will work in gqlclient v2
116
106
  configuration: Json[dict] | None
117
107
 
118
108
  @field_validator("configuration", mode="after")
119
109
  @classmethod
120
- def _use_recipe_run_configuration_model(cls, value): # not needed for gqlclient v2
110
+ def _use_recipe_run_configuration_model(cls, value):
121
111
  if value is None:
122
112
  return RecipeRunConfiguration()
123
113
  return RecipeRunConfiguration.model_validate(value)
@@ -1,23 +1,14 @@
1
1
  """Base class for parameter-parsing object."""
2
2
  import logging
3
- from contextlib import contextmanager
4
3
  from datetime import datetime
5
- from pathlib import Path
6
4
  from typing import Any
7
- from typing import Callable
8
5
  from typing import Literal
9
6
 
10
7
  import numpy as np
11
8
  import scipy.interpolate as spi
9
+ from astropy.io import fits
12
10
 
13
- from dkist_processing_common._util.scratch import WorkflowFileSystem
14
- from dkist_processing_common.codecs.array import array_decoder
15
- from dkist_processing_common.codecs.basemodel import basemodel_decoder
16
- from dkist_processing_common.codecs.fits import fits_array_decoder
17
- from dkist_processing_common.models.input_dataset import InputDatasetFilePointer
18
- from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
19
- from dkist_processing_common.models.tags import Tag
20
-
11
+ from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetParameterValue
21
12
 
22
13
  logger = logging.getLogger(__name__)
23
14
 
@@ -33,9 +24,9 @@ class ParameterBase:
33
24
 
34
25
  To use in an instrument pipeline a subclass is required. Here's a simple, but complete example::
35
26
 
36
- class InstParameters(ParameterBase):
37
- def __init__(self, scratch, some_other_parameters):
38
- super().__init__(scratch=scratch)
27
+ class InstParameters(ParameterBase)
28
+ def __init__(self, input_dataset_parameters, some_other_parameter):
29
+ super().__init__(input_dataset_parameters)
39
30
  self._thing = self._some_function(some_other_parameters)
40
31
 
41
32
  @property
@@ -43,7 +34,7 @@ class ParameterBase:
43
34
  return self._find_most_recent_past_value("some_parameter_name")
44
35
 
45
36
  @property
46
- def complicated_parameter(self):
37
+ def complicate_parameter(self):
47
38
  return self._some_complicated_parsing_function("complicated_parameter_name", another_argument)
48
39
 
49
40
 
@@ -64,16 +55,15 @@ class ParameterBase:
64
55
  workflow_version=workflow_version,
65
56
  )
66
57
 
67
- self.parameters = InstParameters(scratch=self.scratch) #<------ This is the important line
58
+ self.parameters = InstParameters(self.input_dataset_parameters) #<------ This is the important line
68
59
 
69
- ParameterBase needs the task scratch in order to read the parameters document written at input dataset
70
- transfer. Note that the first argument to the ConstantsSubclass will *always* be scratch, but additional
71
- arguments can be passed if the subclass requires them.
60
+ Note that the first argument to the ConstantsSubclass with *always* be self.input_dataset_parameters, but
61
+ additional argument can be passed if the subclass requires them.
72
62
 
73
63
  Parameters
74
64
  ----------
75
- scratch
76
- The task scratch WorkflowFileSystem instance
65
+ input_dataset_parameters
66
+ The input parameters
77
67
 
78
68
  obs_ip_start_time
79
69
  A string containing the start date of the Observe IP task type frames. Must be in isoformat.
@@ -84,53 +74,25 @@ class ParameterBase:
84
74
 
85
75
  def __init__(
86
76
  self,
87
- scratch: WorkflowFileSystem,
77
+ input_dataset_parameters: dict[str, list[InputDatasetParameterValue]],
88
78
  obs_ip_start_time: str | None = None,
89
79
  **kwargs,
90
80
  ):
91
- self.scratch = scratch
92
- input_dataset_parameter_model = self._get_parameters_doc_from_file()
93
- input_dataset_parameters = {}
94
- if input_dataset_parameter_model is not None:
95
- input_dataset_parameters = {
96
- p.parameter_name: p.parameter_values for p in input_dataset_parameter_model.doc_list
97
- }
98
81
  self.input_dataset_parameters = input_dataset_parameters
99
-
100
82
  if obs_ip_start_time is not None:
101
83
  # Specifically `not None` because we want to error normally on badly formatted strings (including "").
102
84
  self._obs_ip_start_datetime = datetime.fromisoformat(obs_ip_start_time)
103
85
  else:
104
86
  logger.info(
105
87
  "WARNING: "
106
- "The task containing this parameters object did not provide an obs ip start time, "
107
- "which really only makes sense for Parsing tasks."
88
+ "The task containing this parameters object did not provide an obs ip start time. "
89
+ "This really only makes sense for Parsing tasks."
108
90
  )
109
91
 
110
92
  for parent_class in self.__class__.__bases__:
111
93
  if hasattr(parent_class, "is_param_mixin"):
112
94
  parent_class.__init__(self, **kwargs)
113
95
 
114
- def _read_parameter_file(
115
- self, tag: str, decoder: Callable[[Path], Any], **decoder_kwargs
116
- ) -> Any:
117
- """Read any file in the task scratch instance."""
118
- paths = list(self.scratch.find_all(tags=tag))
119
- if len(paths) == 0:
120
- logger.info(f"WARNING: There is no parameter file for {tag = }")
121
- if len(paths) == 1:
122
- return decoder(paths[0], **decoder_kwargs)
123
- if len(paths) > 1:
124
- raise ValueError(f"There is more than one parameter file for {tag = }: {paths}")
125
-
126
- def _get_parameters_doc_from_file(self) -> InputDatasetPartDocumentList:
127
- """Get parameters doc saved at the TransferL0Data task."""
128
- tag = Tag.input_dataset_parameters()
129
- parameters_from_file = self._read_parameter_file(
130
- tag=tag, decoder=basemodel_decoder, model=InputDatasetPartDocumentList
131
- )
132
- return parameters_from_file
133
-
134
96
  def _find_most_recent_past_value(
135
97
  self,
136
98
  parameter_name: str,
@@ -151,19 +113,20 @@ class ParameterBase:
151
113
  )
152
114
  return result
153
115
 
154
- def _load_param_value_from_fits(
155
- self, param_obj: InputDatasetFilePointer, hdu: int = 0
156
- ) -> np.ndarray:
157
- """Return the data associated with a tagged parameter file saved in FITS format."""
158
- tag = param_obj.file_pointer.tag
159
- param_value = self._read_parameter_file(tag=tag, decoder=fits_array_decoder, hdu=hdu)
160
- return param_value
161
-
162
- def _load_param_value_from_numpy_save(self, param_obj: InputDatasetFilePointer) -> np.ndarray:
163
- """Return the data associated with a tagged parameter file saved in numpy format."""
164
- tag = param_obj.file_pointer.tag
165
- param_value = self._read_parameter_file(tag=tag, decoder=array_decoder)
166
- return param_value
116
+ @staticmethod
117
+ def _load_param_value_from_fits(param_dict: dict, hdu: int = 0) -> np.ndarray:
118
+ """Load a numpy array from a parameter pointing to a FITS file."""
119
+ file_path = param_dict["param_path"]
120
+
121
+ hdul = fits.open(file_path)
122
+ return hdul[hdu].data
123
+
124
+ @staticmethod
125
+ def _load_param_value_from_numpy_save(param_dict: dict) -> np.ndarray:
126
+ """Return the data associated with a parameter file saved in numpy format."""
127
+ file_path = param_dict["param_path"]
128
+ result = np.load(file_path)
129
+ return result
167
130
 
168
131
 
169
132
  class _ParamMixinBase:
@@ -0,0 +1,166 @@
1
+ """Mixin for a WorkflowDataTaskBase subclass which implements input data set access functionality."""
2
+ import json
3
+ from dataclasses import dataclass
4
+ from datetime import datetime
5
+ from itertools import chain
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ from dkist_processing_common.models.tags import Tag
10
+ from dkist_processing_common.tasks.base import tag_type_hint
11
+
12
+
13
+ frames_part_type_hint = list[dict[str, str | list[str]]] | None
14
+
15
+
16
+ @dataclass
17
+ class InputDatasetParameterValue:
18
+ """Data structure for a de-serialized input dataset parameter value."""
19
+
20
+ parameter_value_id: int
21
+ parameter_value: Any = None
22
+ parameter_value_start_date: datetime | None = None
23
+
24
+
25
+ @dataclass
26
+ class InputDatasetObject:
27
+ """Data structure for a de-serialized input dataset frame."""
28
+
29
+ bucket: str
30
+ object_key: str
31
+
32
+
33
+ class InputDatasetMixin:
34
+ """Mixin for WorkflowDataTaskBase that accesses downloaded input dataset part documents."""
35
+
36
+ def _input_dataset_part_document(self, tags: tag_type_hint):
37
+ """Get the input dataset document part and deserialize it."""
38
+ paths: list[Path] = list(self.read(tags=tags))
39
+ if not paths:
40
+ return
41
+ if len(paths) > 1:
42
+ raise ValueError(
43
+ f"There are more than one input dataset part documents to parse for {tags=}"
44
+ )
45
+ p = paths[0]
46
+ with p.open(mode="rb") as f:
47
+ return json.load(f)
48
+
49
+ @property
50
+ def input_dataset_observe_frames_part_document(self) -> frames_part_type_hint:
51
+ """Get the 'observe frames' part of the input dataset."""
52
+ return self._input_dataset_part_document(tags=Tag.input_dataset_observe_frames())
53
+
54
+ @property
55
+ def input_dataset_calibration_frames_part_document(self) -> frames_part_type_hint:
56
+ """Get the 'calibration frames' part of the input dataset."""
57
+ return self._input_dataset_part_document(tags=Tag.input_dataset_calibration_frames())
58
+
59
+ @property
60
+ def input_dataset_parameters_part_document(
61
+ self,
62
+ ) -> list[dict[str, str | list[dict[str, int | str]]]] | None:
63
+ """Get the 'parameters' part of the input dataset."""
64
+ return self._input_dataset_part_document(tags=Tag.input_dataset_parameters())
65
+
66
+ @property
67
+ def input_dataset_frames(self) -> list[InputDatasetObject]:
68
+ """Get the list of frames for this input dataset."""
69
+ result = []
70
+ observe_frames = self.input_dataset_observe_frames_part_document or []
71
+ calibration_frames = self.input_dataset_calibration_frames_part_document or []
72
+ for frame_set in chain(observe_frames, calibration_frames):
73
+ for key in frame_set.get("object_keys", list()):
74
+ result.append(InputDatasetObject(bucket=frame_set["bucket"], object_key=key))
75
+ return result
76
+
77
+ @property
78
+ def input_dataset_parameters(self) -> dict[str, list[InputDatasetParameterValue]]:
79
+ """Get the input dataset parameters."""
80
+ parameters = self.input_dataset_parameters_part_document or []
81
+ result = dict()
82
+ for p in parameters:
83
+ result.update(self._input_dataset_parse_parameter(p))
84
+ return result
85
+
86
+ @property
87
+ def input_dataset_parameter_objects(self) -> list[InputDatasetObject]:
88
+ """Parse the parameter object locations out of the set of all parameters."""
89
+ result = []
90
+ for value_list in self.input_dataset_parameters.values():
91
+ for value in value_list:
92
+ param_value = value.parameter_value
93
+ if isinstance(param_value, dict) and param_value.get("is_file", False):
94
+ result.append(
95
+ InputDatasetObject(
96
+ bucket=param_value["bucket"], object_key=param_value["objectKey"]
97
+ )
98
+ )
99
+ return result
100
+
101
+ def _input_dataset_parse_parameter(
102
+ self, parameter: dict
103
+ ) -> dict[str, list[InputDatasetParameterValue]]:
104
+ name: str = parameter["parameterName"]
105
+ raw_values: list[dict] = parameter["parameterValues"]
106
+ values = self._input_dataset_parse_parameter_values(raw_values=raw_values)
107
+ return {name: values}
108
+
109
+ def _input_dataset_parse_parameter_values(
110
+ self, raw_values: list[dict[str, Any]]
111
+ ) -> list[InputDatasetParameterValue]:
112
+ values = list()
113
+ for v in raw_values:
114
+ parsed_value = InputDatasetParameterValue(parameter_value_id=v["parameterValueId"])
115
+ parsed_value.parameter_value = self._input_dataset_parse_parameter_value(
116
+ raw_parameter_value=v["parameterValue"]
117
+ )
118
+ if d := v.get("parameterValueStartDate"):
119
+ parsed_value.parameter_value_start_date = datetime.fromisoformat(d)
120
+ else:
121
+ parsed_value.parameter_value_start_date = datetime(1, 1, 1)
122
+ values.append(parsed_value)
123
+ return values
124
+
125
+ def _input_dataset_parse_parameter_value(self, raw_parameter_value: str) -> Any:
126
+ """Return the json decoding of the parameter value."""
127
+ return json.loads(raw_parameter_value, object_hook=self._decode_parameter_value)
128
+
129
+ def _decode_parameter_value(self, param_dict: dict):
130
+ """Decode a parameter value."""
131
+ if "__file__" in param_dict:
132
+ return self._convert_parameter_file_value_to_path(param_dict)
133
+ # Nothing to do here, so return control back to json.loads()
134
+ return param_dict
135
+
136
+ def _convert_parameter_file_value_to_path(self, param_dict: dict):
137
+ """
138
+ Decode a parameter file value.
139
+
140
+ Note: for parameters that are files, the string passed to json.loads() looks like this:
141
+ '{ "__file__":
142
+ {
143
+ "bucket": "data",
144
+ "objectKey": "parameters/parameter_name/uuid.dat"
145
+ }
146
+ }'
147
+
148
+ In this hook, we remove the outer __file__ dict and return the inner dict with the addition of
149
+ a key and value for the file path. Because the file path is dependent on the existence of a tag,
150
+ if no tag is found, None is returned for the path. This use case will occur when we construct
151
+ the list of files to be transferred and tagged by the TransferL0Data task.
152
+ """
153
+ file_dict = param_dict["__file__"]
154
+ object_key = file_dict["objectKey"]
155
+ parameter_tag = Tag.parameter(Path(object_key).name)
156
+ paths = list(self.read(tags=parameter_tag))
157
+ num_paths = len(paths)
158
+ if num_paths == 1:
159
+ param_path = paths[0]
160
+ elif num_paths == 0:
161
+ param_path = None
162
+ else:
163
+ raise ValueError(f"Found multiple paths for {parameter_tag = }.")
164
+ file_dict["param_path"] = param_path
165
+ file_dict["is_file"] = True
166
+ return file_dict
@@ -210,19 +210,16 @@ class MetadataStoreMixin:
210
210
  self, part_type: Literal["observe_frames", "calibration_frames", "parameters"]
211
211
  ) -> InputDatasetPartResponse:
212
212
  """Get the input dataset part by input dataset part type name."""
213
- part_types_found = set()
214
- input_dataset_part = None
213
+ part_type_dict = {}
215
214
  parts = (
216
215
  self.metadata_store_input_dataset_recipe_run.recipeInstance.inputDataset.inputDatasetInputDatasetParts
217
216
  )
218
217
  for part in parts:
219
218
  part_type_name = part.inputDatasetPart.inputDatasetPartType.inputDatasetPartTypeName
220
- if part_type_name in part_types_found:
219
+ if part_type_name in part_type_dict.keys():
221
220
  raise ValueError(f"Multiple input dataset parts found for {part_type_name=}.")
222
- part_types_found.add(part_type_name)
223
- if part_type_name == part_type:
224
- input_dataset_part = part.inputDatasetPart
225
- return input_dataset_part
221
+ part_type_dict[part_type_name] = part.inputDatasetPart
222
+ return part_type_dict.get(part_type)
226
223
 
227
224
  @property
228
225
  def metadata_store_input_dataset_observe_frames(self) -> InputDatasetPartResponse:
@@ -0,0 +1,129 @@
1
+ """Task(s) for the transfer in of data sources for a processing pipeline."""
2
+ import logging
3
+ from pathlib import Path
4
+
5
+ from dkist_processing_common.codecs.json import json_encoder
6
+ from dkist_processing_common.models.tags import Tag
7
+ from dkist_processing_common.tasks.base import WorkflowTaskBase
8
+ from dkist_processing_common.tasks.mixin.globus import GlobusMixin
9
+ from dkist_processing_common.tasks.mixin.globus import GlobusTransferItem
10
+ from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetMixin
11
+ from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetObject
12
+
13
+ __all__ = ["TransferL0Data"]
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class TransferL0Data(WorkflowTaskBase, GlobusMixin, InputDatasetMixin):
19
+ """Transfers Level 0 data and required parameter files to the scratch store."""
20
+
21
+ def download_input_dataset(self):
22
+ """Get the input dataset document parts and save it to scratch with the appropriate tags."""
23
+ if observe_frames := self.metadata_store_input_dataset_observe_frames:
24
+ observe_doc = observe_frames.inputDatasetPartDocument
25
+ self.write(observe_doc, tags=Tag.input_dataset_observe_frames(), encoder=json_encoder)
26
+ if calibration_frames := self.metadata_store_input_dataset_calibration_frames:
27
+ calibration_doc = calibration_frames.inputDatasetPartDocument
28
+ self.write(
29
+ calibration_doc, tags=Tag.input_dataset_calibration_frames(), encoder=json_encoder
30
+ )
31
+ if parameters := self.metadata_store_input_dataset_parameters:
32
+ parameters_doc = parameters.inputDatasetPartDocument
33
+ self.write(parameters_doc, tags=Tag.input_dataset_parameters(), encoder=json_encoder)
34
+
35
+ def format_transfer_items(
36
+ self, input_dataset_objects: list[InputDatasetObject]
37
+ ) -> list[GlobusTransferItem]:
38
+ """Format a list of InputDatasetObject(s) as GlobusTransferItem(s)."""
39
+ transfer_items = []
40
+ for obj in input_dataset_objects:
41
+ source_path = Path("/", obj.bucket, obj.object_key)
42
+ destination_path = self.scratch.absolute_path(obj.object_key)
43
+ transfer_items.append(
44
+ GlobusTransferItem(
45
+ source_path=source_path,
46
+ destination_path=destination_path,
47
+ recursive=False,
48
+ )
49
+ )
50
+ return transfer_items
51
+
52
+ def format_frame_transfer_items(self) -> list[GlobusTransferItem]:
53
+ """Format the list of frames as transfer items to be used by globus."""
54
+ return self.format_transfer_items(self.input_dataset_frames)
55
+
56
+ def format_parameter_transfer_items(self) -> list[GlobusTransferItem]:
57
+ """Format the list of parameter objects as transfer items to be used by globus."""
58
+ return self.format_transfer_items(self.input_dataset_parameter_objects)
59
+
60
+ def tag_input_frames(self, transfer_items: list[GlobusTransferItem]) -> None:
61
+ """
62
+ Tag all the input files with 'frame' and 'input' tags.
63
+
64
+ Parameters
65
+ ----------
66
+ transfer_items
67
+ List of items to be tagged
68
+
69
+ Returns
70
+ -------
71
+ None
72
+ """
73
+ scratch_items = [
74
+ self.scratch.scratch_base_path / ti.destination_path for ti in transfer_items
75
+ ]
76
+ for si in scratch_items:
77
+ self.tag(si, tags=[Tag.input(), Tag.frame()])
78
+
79
+ def tag_parameter_objects(self, transfer_items: list[GlobusTransferItem]) -> None:
80
+ """
81
+ Tag all the parameter files with 'parameter'.
82
+
83
+ Parameters
84
+ ----------
85
+ transfer_items
86
+ List of items to be tagged
87
+
88
+ Returns
89
+ -------
90
+ None
91
+ """
92
+ scratch_items = [
93
+ self.scratch.scratch_base_path / ti.destination_path for ti in transfer_items
94
+ ]
95
+ for si in scratch_items:
96
+ self.tag(si, tags=[Tag.parameter(si.name)])
97
+
98
+ def run(self) -> None:
99
+ """Execute the data transfer."""
100
+ with self.apm_task_step("Change Status to InProgress"):
101
+ self.metadata_store_change_recipe_run_to_inprogress()
102
+
103
+ with self.apm_task_step("Download Input Dataset"):
104
+ self.download_input_dataset()
105
+
106
+ with self.apm_task_step("Format Frame Transfer Items"):
107
+ frame_transfer_items = self.format_frame_transfer_items()
108
+ if not frame_transfer_items:
109
+ raise ValueError("No input dataset frames found")
110
+
111
+ with self.apm_task_step("Format Parameter Transfer Items"):
112
+ parameter_transfer_items = self.format_parameter_transfer_items()
113
+
114
+ with self.apm_task_step("Transfer Input Frames and Parameter Files via Globus"):
115
+ self.globus_transfer_object_store_to_scratch(
116
+ transfer_items=frame_transfer_items + parameter_transfer_items,
117
+ label=f"Transfer Inputs for Recipe Run {self.recipe_run_id}",
118
+ )
119
+
120
+ with self.apm_processing_step("Tag Input Frames and Parameter Files"):
121
+ self.tag_input_frames(transfer_items=frame_transfer_items)
122
+ self.tag_parameter_objects(transfer_items=parameter_transfer_items)
123
+
124
+ def rollback(self):
125
+ """Warn that depending on the progress of the task all data may not be removed because it hadn't been tagged."""
126
+ super().rollback()
127
+ logger.warning(
128
+ f"Rolling back only removes data that has been tagged. The data persisted by this task may not have been tagged prior to rollback."
129
+ )
@@ -295,10 +295,14 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
295
295
  header["SOLARRAD"] = self.calculate_solar_angular_radius(obstime=obstime)
296
296
  header["SPECSYS"] = "TOPOCENT" # no wavelength correction made due to doppler velocity
297
297
  header["VELOSYS"] = 0.0 # no wavelength correction made due to doppler velocity
298
- header["WAVEBAND"] = get_closest_spectral_line(wavelength=header["LINEWAV"] * u.nm).name
299
298
  wavelength_range = self.get_wavelength_range(header=header)
300
299
  header["WAVEMIN"] = wavelength_range.min.to_value(u.nm)
301
300
  header["WAVEMAX"] = wavelength_range.max.to_value(u.nm)
301
+ waveband: str | None = self.get_waveband(
302
+ wavelength=header["LINEWAV"] * u.nm, wavelength_range=wavelength_range
303
+ )
304
+ if waveband:
305
+ header["WAVEBAND"] = waveband
302
306
  return header
303
307
 
304
308
  def l1_filename(self, header: fits.Header, stokes: Literal["I", "Q", "U", "V"]):
@@ -515,3 +519,18 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
515
519
  if header.get("AO_LOCK") is not True:
516
520
  header.pop("ATMOS_R0", None)
517
521
  return header
522
+
523
+ @staticmethod
524
+ def get_waveband(wavelength: u.Quantity, wavelength_range: WavelengthRange) -> str | None:
525
+ """
526
+ Get the spectral line information of the closest spectral line to the wavelength argument.
527
+
528
+ If the spectral line rest wavelength in air does not fall in the wavelength range of the data,
529
+ do not populate the keyword.
530
+ """
531
+ print(wavelength_range)
532
+ closest_line = get_closest_spectral_line(wavelength=wavelength)
533
+ rest_wavelength = closest_line.rest_wavelength_in_air
534
+ if rest_wavelength < wavelength_range.min or rest_wavelength > wavelength_range.max:
535
+ return None
536
+ return closest_line.name