dkist-processing-common 10.6.1rc3__tar.gz → 10.6.1rc5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. dkist_processing_common-10.6.1rc5/PKG-INFO +175 -0
  2. dkist_processing_common-10.6.1rc5/changelog/236.misc.1.rst +2 -0
  3. dkist_processing_common-10.6.1rc5/changelog/236.misc.rst +3 -0
  4. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/graphql.py +21 -2
  5. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/metadata_store.py +5 -6
  6. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/output_data_base.py +1 -1
  7. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/teardown.py +1 -1
  8. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/transfer_input_data.py +4 -3
  9. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/trial_output_data.py +12 -11
  10. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/write_l1.py +3 -3
  11. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/conftest.py +94 -61
  12. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_input_dataset.py +2 -20
  13. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_teardown.py +16 -15
  14. dkist_processing_common-10.6.1rc5/dkist_processing_common/tests/test_transfer_input_data.py +123 -0
  15. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_trial_catalog.py +2 -2
  16. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_trial_output_data.py +9 -11
  17. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_write_l1.py +14 -2
  18. dkist_processing_common-10.6.1rc5/dkist_processing_common.egg-info/PKG-INFO +175 -0
  19. dkist_processing_common-10.6.1rc5/dkist_processing_common.egg-info/requires.txt +53 -0
  20. dkist_processing_common-10.6.1rc5/pyproject.toml +133 -0
  21. dkist_processing_common-10.6.1rc3/PKG-INFO +0 -398
  22. dkist_processing_common-10.6.1rc3/changelog/236.misc.1.rst +0 -1
  23. dkist_processing_common-10.6.1rc3/changelog/236.misc.rst +0 -1
  24. dkist_processing_common-10.6.1rc3/dkist_processing_common/tests/test_transfer_input_data.py +0 -167
  25. dkist_processing_common-10.6.1rc3/dkist_processing_common.egg-info/PKG-INFO +0 -398
  26. dkist_processing_common-10.6.1rc3/dkist_processing_common.egg-info/requires.txt +0 -277
  27. dkist_processing_common-10.6.1rc3/pyproject.toml +0 -362
  28. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/.gitignore +0 -0
  29. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/.pre-commit-config.yaml +0 -0
  30. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/.readthedocs.yml +0 -0
  31. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/.snyk +0 -0
  32. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/CHANGELOG.rst +0 -0
  33. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/README.rst +0 -0
  34. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/bitbucket-pipelines.yml +0 -0
  35. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/changelog/.gitempty +0 -0
  36. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/__init__.py +0 -0
  37. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/_util/__init__.py +0 -0
  38. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/_util/constants.py +0 -0
  39. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/_util/graphql.py +0 -0
  40. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/_util/scratch.py +0 -0
  41. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/_util/tags.py +0 -0
  42. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/__init__.py +0 -0
  43. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/asdf.py +0 -0
  44. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/bytes.py +0 -0
  45. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/fits.py +0 -0
  46. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/iobase.py +0 -0
  47. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/json.py +0 -0
  48. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/path.py +0 -0
  49. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/quality.py +0 -0
  50. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/codecs/str.py +0 -0
  51. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/config.py +0 -0
  52. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/fonts/Lato-Regular.ttf +0 -0
  53. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/fonts/__init__.py +0 -0
  54. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/manual.py +0 -0
  55. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/__init__.py +0 -0
  56. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/constants.py +0 -0
  57. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/fits_access.py +0 -0
  58. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/flower_pot.py +0 -0
  59. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/message.py +0 -0
  60. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/message_queue_binding.py +0 -0
  61. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/metric_code.py +0 -0
  62. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/parameters.py +0 -0
  63. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/quality.py +0 -0
  64. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/tags.py +0 -0
  65. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/task_name.py +0 -0
  66. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/models/wavelength.py +0 -0
  67. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/__init__.py +0 -0
  68. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/cs_step.py +0 -0
  69. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/dsps_repeat.py +0 -0
  70. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/experiment_id_bud.py +0 -0
  71. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/id_bud.py +0 -0
  72. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/l0_fits_access.py +0 -0
  73. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/l1_fits_access.py +0 -0
  74. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/near_bud.py +0 -0
  75. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/proposal_id_bud.py +0 -0
  76. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/quality.py +0 -0
  77. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/retarder.py +0 -0
  78. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/single_value_single_key_flower.py +0 -0
  79. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/task.py +0 -0
  80. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/time.py +0 -0
  81. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/unique_bud.py +0 -0
  82. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/parsers/wavelength.py +0 -0
  83. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/__init__.py +0 -0
  84. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/assemble_movie.py +0 -0
  85. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/base.py +0 -0
  86. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/l1_output_data.py +0 -0
  87. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/__init__.py +0 -0
  88. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/globus.py +0 -0
  89. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/input_dataset.py +0 -0
  90. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/interservice_bus.py +0 -0
  91. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/object_store.py +0 -0
  92. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/quality/__init__.py +0 -0
  93. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/quality/_base.py +0 -0
  94. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/mixin/quality/_metrics.py +0 -0
  95. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/parse_l0_input_data.py +0 -0
  96. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/quality_metrics.py +0 -0
  97. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tasks/trial_catalog.py +0 -0
  98. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/__init__.py +0 -0
  99. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_assemble_movie.py +0 -0
  100. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_assemble_quality.py +0 -0
  101. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_base.py +0 -0
  102. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_codecs.py +0 -0
  103. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_constants.py +0 -0
  104. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_cs_step.py +0 -0
  105. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_fits_access.py +0 -0
  106. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_flower_pot.py +0 -0
  107. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_interservice_bus.py +0 -0
  108. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_interservice_bus_mixin.py +0 -0
  109. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_output_data_base.py +0 -0
  110. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_parameters.py +0 -0
  111. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_parse_l0_input_data.py +0 -0
  112. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_publish_catalog_messages.py +0 -0
  113. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_quality.py +0 -0
  114. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_quality_mixin.py +0 -0
  115. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_scratch.py +0 -0
  116. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_stems.py +0 -0
  117. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_submit_dataset_metadata.py +0 -0
  118. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_tags.py +0 -0
  119. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_task_name.py +0 -0
  120. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_task_parsing.py +0 -0
  121. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_transfer_l1_output_data.py +0 -0
  122. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common/tests/test_workflow_task_base.py +0 -0
  123. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common.egg-info/SOURCES.txt +0 -0
  124. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common.egg-info/dependency_links.txt +0 -0
  125. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/dkist_processing_common.egg-info/top_level.txt +0 -0
  126. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/Makefile +0 -0
  127. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/changelog.rst +0 -0
  128. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/conf.py +0 -0
  129. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/index.rst +0 -0
  130. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/landing_page.rst +0 -0
  131. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/make.bat +0 -0
  132. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/docs/requirements.txt +0 -0
  133. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/licenses/LICENSE.rst +0 -0
  134. {dkist_processing_common-10.6.1rc3 → dkist_processing_common-10.6.1rc5}/setup.cfg +0 -0
@@ -0,0 +1,175 @@
1
+ Metadata-Version: 2.2
2
+ Name: dkist-processing-common
3
+ Version: 10.6.1rc5
4
+ Summary: Common task classes used by the DKIST science data processing pipelines
5
+ Author-email: NSO / AURA <dkistdc@nso.edu>
6
+ License: BSD-3-Clause
7
+ Project-URL: Homepage, https://nso.edu/dkist/data-center/
8
+ Project-URL: Repository, https://bitbucket.org/dkistdc/dkist-processing-common/
9
+ Project-URL: Documentation, https://docs.dkist.nso.edu/projects/common
10
+ Project-URL: Help, https://nso.atlassian.net/servicedesk/customer/portal/5
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.11
14
+ Requires-Python: >=3.11
15
+ Description-Content-Type: text/x-rst
16
+ Requires-Dist: asdf<4.0.0,>=3.5.0
17
+ Requires-Dist: astropy<7.0.0,>=6.0.0
18
+ Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
19
+ Requires-Dist: dkist-header-validator<6.0,>=5.0.0
20
+ Requires-Dist: dkist-processing-core==5.1.0
21
+ Requires-Dist: dkist-processing-pac<4.0,>=3.1
22
+ Requires-Dist: dkist-service-configuration<3.0,>=2.0.2
23
+ Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
24
+ Requires-Dist: globus-sdk>=3.12.0
25
+ Requires-Dist: gqlclient[pydantic]==1.2.3
26
+ Requires-Dist: sqids==0.5.1
27
+ Requires-Dist: matplotlib>=3.4
28
+ Requires-Dist: moviepy>=2.0.0
29
+ Requires-Dist: numpy>=1.20.2
30
+ Requires-Dist: object-clerk==0.1.1
31
+ Requires-Dist: pandas>=1.4.2
32
+ Requires-Dist: pillow>=10.2.0
33
+ Requires-Dist: pydantic>=2.0
34
+ Requires-Dist: redis==4.6.0
35
+ Requires-Dist: requests>=2.23
36
+ Requires-Dist: scipy>=1.15.1
37
+ Requires-Dist: sunpy>=3.0.0
38
+ Requires-Dist: talus==1.1.0
39
+ Provides-Extra: test
40
+ Requires-Dist: pytest; extra == "test"
41
+ Requires-Dist: pytest-xdist; extra == "test"
42
+ Requires-Dist: pytest-cov; extra == "test"
43
+ Requires-Dist: pytest-mock; extra == "test"
44
+ Requires-Dist: hypothesis; extra == "test"
45
+ Requires-Dist: towncrier; extra == "test"
46
+ Requires-Dist: dkist-data-simulator>=5.0.0; extra == "test"
47
+ Requires-Dist: dkist-processing-common[inventory]; extra == "test"
48
+ Requires-Dist: dkist-processing-common[asdf]; extra == "test"
49
+ Requires-Dist: dkist-processing-common[quality]; extra == "test"
50
+ Provides-Extra: docs
51
+ Requires-Dist: sphinx; extra == "docs"
52
+ Requires-Dist: sphinx-astropy; extra == "docs"
53
+ Requires-Dist: sphinx-changelog; extra == "docs"
54
+ Requires-Dist: sphinx-autoapi!=3.1.0; extra == "docs"
55
+ Requires-Dist: pytest; extra == "docs"
56
+ Requires-Dist: towncrier<22.12.0; extra == "docs"
57
+ Requires-Dist: dkist-sphinx-theme; extra == "docs"
58
+ Provides-Extra: inventory
59
+ Requires-Dist: dkist-inventory<2.0,>=1.6.0; extra == "inventory"
60
+ Provides-Extra: asdf
61
+ Requires-Dist: dkist-inventory[asdf]<2.0,>=1.6.0; extra == "asdf"
62
+ Provides-Extra: quality
63
+ Requires-Dist: dkist-quality<2.0,>=1.2.1; extra == "quality"
64
+
65
+ dkist-processing-common
66
+ =======================
67
+
68
+ This repository works in concert with `dkist-processing-core <https://pypi.org/project/dkist-processing-core/>`_ and `dkist-processing-*instrument*` to
69
+ form the DKIST calibration processing stack.
70
+
71
+ Usage
72
+ -----
73
+
74
+ The classes in this repository should be used as the base of any DKIST processing pipeline tasks. Science tasks should subclass `ScienceTaskL0ToL1Base`.
75
+
76
+ Each class is built on an abstract base class with the `run` method left for a developer to fill out with the required steps that the task should take.
77
+ This class is then used as the callable object for the workflow and scheduling engine.
78
+
79
+ Example
80
+ -------
81
+
82
+ .. code-block:: python
83
+
84
+ from dkist_processing_common.tasks.base import ScienceTaskL0ToL1Base
85
+
86
+
87
+ class RemoveArtifacts(ScienceTaskL0ToL1Base):
88
+ def run(self):
89
+ # task code here
90
+ total = 2 + 5
91
+
92
+ Deployment
93
+ ----------
94
+
95
+ dkist-processing-common is deployed to `PyPI <https://pypi.org/project/dkist-processing-common/>`_
96
+
97
+ Development
98
+ -----------
99
+
100
+ There are two prerequisites for test execution on a local machine:
101
+
102
+
103
+ * Redis. A running instance of redis on the local machine is required. The tests will use the default host ip of localhost and port of 6379 to connect to the database.
104
+
105
+ * RabbitMQ. A running instance of rabbitmq on the local machine is required. The tests will use the default host of localhost and a port of 5672 to connect to the interservice bus.
106
+
107
+
108
+ To run the tests locally, clone the repository and install the package in editable mode with the test extras.
109
+
110
+
111
+ .. code-block:: bash
112
+
113
+ git clone git@bitbucket.org:dkistdc/dkist-processing-common.git
114
+ cd dkist-processing-common
115
+ pre-commit install
116
+ pip install -e .[test]
117
+ # Redis must be running
118
+ pytest -v --cov dkist_processing_common
119
+
120
+ Changelog
121
+ #########
122
+
123
+ When you make **any** change to this repository it **MUST** be accompanied by a changelog file.
124
+ The changelog for this repository uses the `towncrier <https://github.com/twisted/towncrier>`__ package.
125
+ Entries in the changelog for the next release are added as individual files (one per change) to the ``changelog/`` directory.
126
+
127
+ Writing a Changelog Entry
128
+ ^^^^^^^^^^^^^^^^^^^^^^^^^
129
+
130
+ A changelog entry accompanying a change should be added to the ``changelog/`` directory.
131
+ The name of a file in this directory follows a specific template::
132
+
133
+ <PULL REQUEST NUMBER>.<TYPE>[.<COUNTER>].rst
134
+
135
+ The fields have the following meanings:
136
+
137
+ * ``<PULL REQUEST NUMBER>``: This is the number of the pull request, so people can jump from the changelog entry to the diff on BitBucket.
138
+ * ``<TYPE>``: This is the type of the change and must be one of the values described below.
139
+ * ``<COUNTER>``: This is an optional field, if you make more than one change of the same type you can append a counter to the subsequent changes, i.e. ``100.bugfix.rst`` and ``100.bugfix.1.rst`` for two bugfix changes in the same PR.
140
+
141
+ The list of possible types is defined the the towncrier section of ``pyproject.toml``, the types are:
142
+
143
+ * ``feature``: This change is a new code feature.
144
+ * ``bugfix``: This is a change which fixes a bug.
145
+ * ``doc``: A documentation change.
146
+ * ``removal``: A deprecation or removal of public API.
147
+ * ``misc``: Any small change which doesn't fit anywhere else, such as a change to the package infrastructure.
148
+
149
+
150
+ Rendering the Changelog at Release Time
151
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
152
+
153
+ When you are about to tag a release first you must run ``towncrier`` to render the changelog.
154
+ The steps for this are as follows:
155
+
156
+ * Run `towncrier build --version vx.y.z` using the version number you want to tag.
157
+ * Agree to have towncrier remove the fragments.
158
+ * Add and commit your changes.
159
+ * Tag the release.
160
+
161
+ **NOTE:** If you forget to add a Changelog entry to a tagged release (either manually or automatically with ``towncrier``)
162
+ then the Bitbucket pipeline will fail. To be able to use the same tag you must delete it locally and on the remote branch:
163
+
164
+ .. code-block:: bash
165
+
166
+ # First, actually update the CHANGELOG and commit the update
167
+ git commit
168
+
169
+ # Delete tags
170
+ git tag -d vWHATEVER.THE.VERSION
171
+ git push --delete origin vWHATEVER.THE.VERSION
172
+
173
+ # Re-tag with the same version
174
+ git tag vWHATEVER.THE.VERSION
175
+ git push --tags origin main
@@ -0,0 +1,2 @@
1
+ Change returns from the metadata store queries into Pydantic BaseModel instances. Remove unnecessary parsing
2
+ and error checking in the metadata store mixin.
@@ -0,0 +1,3 @@
1
+ Convert dataclasses in the graphql model to Pydantic BaseModels for additional validation. In the
2
+ RecipeRunResponse class, configuration is converted from a JSON dictionary to its own Pydantic BaseModel.
3
+ In the InputDatasetPartResponse class, the inputDatasetPartDocument is now returned as a list of dictionaries.
@@ -1,6 +1,6 @@
1
1
  """GraphQL Data models for the metadata store api."""
2
2
  from pydantic import BaseModel
3
- from pydantic import Field
3
+ from pydantic import field_validator
4
4
  from pydantic import Json
5
5
 
6
6
 
@@ -85,13 +85,32 @@ class RecipeRunProvenanceResponse(BaseModel):
85
85
  isTaskManual: bool
86
86
 
87
87
 
88
+ class RecipeRunConfiguration(BaseModel):
89
+ """Response class for a recipe run configuration dictionary."""
90
+
91
+ validate_l1_on_write: bool = True
92
+ destination_bucket: str = "data"
93
+ tile_size: int | None = None
94
+ trial_directory_name: str | None = None
95
+ trial_root_directory_name: str | None = None
96
+ teardown_enabled: bool = True
97
+ trial_exclusive_transfer_tag_lists: list[str] | None = None
98
+
99
+
88
100
  class RecipeRunResponse(BaseModel):
89
101
  """Recipe run query response."""
90
102
 
91
103
  recipeInstance: RecipeInstanceResponse
92
104
  recipeInstanceId: int
93
105
  recipeRunProvenances: list[RecipeRunProvenanceResponse]
94
- configuration: Json[dict] | None = Field(default_factory=dict)
106
+ configuration: Json[dict] | None
107
+
108
+ @field_validator("configuration", mode="after")
109
+ @classmethod
110
+ def _use_recipe_run_configuration_model(cls, value):
111
+ if value is None:
112
+ return RecipeRunConfiguration()
113
+ return RecipeRunConfiguration.model_validate(value)
95
114
 
96
115
 
97
116
  class RecipeRunMutationResponse(BaseModel):
@@ -4,6 +4,8 @@ import logging
4
4
  from functools import cached_property
5
5
  from typing import Literal
6
6
 
7
+ from pydantic import validate_call
8
+
7
9
  from dkist_processing_common._util.graphql import GraphQLClient
8
10
  from dkist_processing_common.codecs.quality import QualityDataEncoder
9
11
  from dkist_processing_common.config import common_configurations
@@ -61,6 +63,7 @@ class MetadataStoreMixin:
61
63
  if len(response) > 0:
62
64
  return response[0].recipeRunStatusId
63
65
 
66
+ @validate_call
64
67
  def _metadata_store_create_recipe_run_status(self, status: str, is_complete: bool) -> int:
65
68
  """
66
69
  Add a new recipe run status to the db.
@@ -75,10 +78,6 @@ class MetadataStoreMixin:
75
78
  "marked complete.",
76
79
  }
77
80
 
78
- if not isinstance(status, str):
79
- raise TypeError(f"status must be of type str: {status}")
80
- if not isinstance(is_complete, bool):
81
- raise TypeError(f"is_complete must be of type bool: {is_complete}")
82
81
  params = RecipeRunStatusMutation(
83
82
  recipeRunStatusName=status,
84
83
  isComplete=is_complete,
@@ -197,7 +196,7 @@ class MetadataStoreMixin:
197
196
  # INPUT DATASET RECIPE RUN
198
197
 
199
198
  @cached_property
200
- def metadata_store_input_dataset_recipe_run_response(self) -> InputDatasetRecipeRunResponse:
199
+ def metadata_store_input_dataset_recipe_run(self) -> InputDatasetRecipeRunResponse:
201
200
  """Get the input dataset recipe run response from the metadata store."""
202
201
  params = RecipeRunQuery(recipeRunId=self.recipe_run_id)
203
202
  response = self.metadata_store_client.execute_gql_query(
@@ -213,7 +212,7 @@ class MetadataStoreMixin:
213
212
  """Get the input dataset part by input dataset part type name."""
214
213
  part_type_dict = {}
215
214
  parts = (
216
- self.metadata_store_input_dataset_recipe_run_response.recipeInstance.inputDataset.inputDatasetInputDatasetParts
215
+ self.metadata_store_input_dataset_recipe_run.recipeInstance.inputDataset.inputDatasetInputDatasetParts
217
216
  )
218
217
  for part in parts:
219
218
  part_type_name = part.inputDatasetPart.inputDatasetPartType.inputDatasetPartTypeName
@@ -19,7 +19,7 @@ class OutputDataBase(WorkflowTaskBase, ABC):
19
19
  @cached_property
20
20
  def destination_bucket(self) -> str:
21
21
  """Get the destination bucket."""
22
- return self.metadata_store_recipe_run.configuration.get("destination_bucket", "data")
22
+ return self.metadata_store_recipe_run.configuration.destination_bucket
23
23
 
24
24
  def format_object_key(self, path: Path) -> str:
25
25
  """
@@ -22,7 +22,7 @@ class TeardownBase(WorkflowTaskBase, ABC):
22
22
  @property
23
23
  def teardown_enabled(self) -> bool:
24
24
  """Recipe run configuration indicating if data should be removed at the end of a run."""
25
- return self.metadata_store_recipe_run.configuration.get("teardown_enabled", True)
25
+ return self.metadata_store_recipe_run.configuration.teardown_enabled
26
26
 
27
27
  def run(self) -> None:
28
28
  """Run method for Teardown class."""
@@ -2,6 +2,7 @@
2
2
  import logging
3
3
  from pathlib import Path
4
4
 
5
+ from dkist_processing_common.codecs.json import json_encoder
5
6
  from dkist_processing_common.models.tags import Tag
6
7
  from dkist_processing_common.tasks.base import WorkflowTaskBase
7
8
  from dkist_processing_common.tasks.mixin.globus import GlobusMixin
@@ -20,11 +21,11 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin, InputDatasetMixin):
20
21
  def download_input_dataset(self):
21
22
  """Get the input dataset document parts and save it to scratch with the appropriate tags."""
22
23
  if doc := self.metadata_store_input_dataset_observe_frames.inputDatasetPartDocument:
23
- self.write(doc.encode("utf-8"), tags=Tag.input_dataset_observe_frames())
24
+ self.write(doc, tags=Tag.input_dataset_observe_frames(), encoder=json_encoder)
24
25
  if doc := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartDocument:
25
- self.write(doc.encode("utf-8"), tags=Tag.input_dataset_calibration_frames())
26
+ self.write(doc, tags=Tag.input_dataset_calibration_frames(), encoder=json_encoder)
26
27
  if doc := self.metadata_store_input_dataset_parameters.inputDatasetPartDocument:
27
- self.write(doc.encode("utf-8"), tags=Tag.input_dataset_parameters())
28
+ self.write(doc, tags=Tag.input_dataset_parameters(), encoder=json_encoder)
28
29
 
29
30
  def format_transfer_items(
30
31
  self, input_dataset_objects: list[InputDatasetObject]
@@ -43,25 +43,23 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
43
43
 
44
44
  @cached_property
45
45
  def destination_bucket(self) -> str:
46
- """Get the destination bucket with a trial default."""
47
- return self.metadata_store_recipe_run.configuration.get("destination_bucket", "etc")
46
+ """Get the destination bucket."""
47
+ return self.metadata_store_recipe_run.configuration.destination_bucket
48
48
 
49
49
  @property
50
50
  def destination_root_folder(self) -> Path:
51
51
  """Format the destination root folder with a value that can be set in the recipe run configuration."""
52
- root_name_from_configuration = self.metadata_store_recipe_run.configuration.get(
53
- "trial_root_directory_name"
52
+ root_name_from_config = (
53
+ self.metadata_store_recipe_run.configuration.trial_root_directory_name
54
54
  )
55
- root_name = Path(root_name_from_configuration or super().destination_root_folder)
56
-
55
+ root_name = Path(root_name_from_config or super().destination_root_folder)
57
56
  return root_name
58
57
 
59
58
  @property
60
59
  def destination_folder(self) -> Path:
61
60
  """Format the destination folder with a parent that can be set by the recipe run configuration."""
62
- dir_name = self.metadata_store_recipe_run.configuration.get("trial_directory_name") or Path(
63
- self.constants.dataset_id
64
- )
61
+ dir_name_from_config = self.metadata_store_recipe_run.configuration.trial_directory_name
62
+ dir_name = dir_name_from_config or Path(self.constants.dataset_id)
65
63
  return self.destination_root_folder / dir_name
66
64
 
67
65
  @property
@@ -71,9 +69,12 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
71
69
  Defaults to transferring all product files. Setting `trial_exclusive_transfer_tag_lists` in the
72
70
  recipe run configuration to a list of tag lists will override the default.
73
71
  """
74
- return self.metadata_store_recipe_run.configuration.get(
75
- "trial_exclusive_transfer_tag_lists", self.default_transfer_tag_lists
72
+ tag_list_from_config = (
73
+ self.metadata_store_recipe_run.configuration.trial_exclusive_transfer_tag_lists
76
74
  )
75
+ if tag_list_from_config is not None:
76
+ return tag_list_from_config
77
+ return self.default_transfer_tag_lists
77
78
 
78
79
  @property
79
80
  def output_frame_tag_list(self) -> list[list[str]]:
@@ -105,14 +105,14 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
105
105
  spec214_validator.validate(self.scratch.absolute_path(relative_path))
106
106
 
107
107
  @cached_property
108
- def tile_size_param(self) -> int:
108
+ def tile_size_param(self) -> int | None:
109
109
  """Get the tile size parameter for compression."""
110
- return self.metadata_store_recipe_run.configuration.get("tile_size", None)
110
+ return self.metadata_store_recipe_run.configuration.tile_size
111
111
 
112
112
  @cached_property
113
113
  def validate_l1_on_write(self) -> bool:
114
114
  """Check for validate on write."""
115
- return self.metadata_store_recipe_run.configuration.get("validate_l1_on_write", True)
115
+ return self.metadata_store_recipe_run.configuration.validate_l1_on_write
116
116
 
117
117
  @cached_property
118
118
  def workflow_had_manual_intervention(self):
@@ -333,6 +333,79 @@ def max_cs_step_time_sec() -> float:
333
333
 
334
334
 
335
335
  class FakeGQLClient:
336
+
337
+ observe_frames_doc_object = [
338
+ {
339
+ "bucket": uuid4().hex[:6],
340
+ "object_keys": [Path(uuid4().hex[:6]).as_posix() for _ in range(3)],
341
+ }
342
+ ]
343
+
344
+ calibration_frames_doc_object = [
345
+ {
346
+ "bucket": uuid4().hex[:6],
347
+ "object_keys": [Path(uuid4().hex[:6]).as_posix() for _ in range(3)],
348
+ },
349
+ {
350
+ "bucket": uuid4().hex[:6],
351
+ "object_keys": [Path(uuid4().hex[:6]).as_posix() for _ in range(3)],
352
+ },
353
+ ]
354
+
355
+ parameters_doc_object = [
356
+ {
357
+ "parameterName": "param_name_1",
358
+ "parameterValues": [
359
+ {
360
+ "parameterValueId": 1,
361
+ "parameterValue": json.dumps([[1, 2, 3], [4, 5, 6], [7, 8, 9]]),
362
+ "parameterValueStartDate": "2000-01-01",
363
+ }
364
+ ],
365
+ },
366
+ {
367
+ "parameterName": "param_name_2",
368
+ "parameterValues": [
369
+ {
370
+ "parameterValueId": 2,
371
+ "parameterValue": json.dumps(
372
+ {
373
+ "__file__": {
374
+ "bucket": "data",
375
+ "objectKey": f"parameters/param_name/{uuid4().hex}.dat",
376
+ }
377
+ }
378
+ ),
379
+ "parameterValueStartDate": "2000-01-01",
380
+ },
381
+ {
382
+ "parameterValueId": 3,
383
+ "parameterValue": json.dumps(
384
+ {
385
+ "__file__": {
386
+ "bucket": "data",
387
+ "objectKey": f"parameters/param_name/{uuid4().hex}.dat",
388
+ }
389
+ }
390
+ ),
391
+ "parameterValueStartDate": "2000-01-02",
392
+ },
393
+ ],
394
+ },
395
+ {
396
+ "parameterName": "param_name_4",
397
+ "parameterValues": [
398
+ {
399
+ "parameterValueId": 4,
400
+ "parameterValue": json.dumps(
401
+ {"a": 1, "b": 3.14159, "c": "foo", "d": [1, 2, 3]}
402
+ ),
403
+ "parameterValueStartDate": "2000-01-01",
404
+ }
405
+ ],
406
+ },
407
+ ]
408
+
336
409
  def __init__(self, *args, **kwargs):
337
410
  pass
338
411
 
@@ -352,7 +425,9 @@ class FakeGQLClient:
352
425
  InputDatasetInputDatasetPartResponse(
353
426
  inputDatasetPart=InputDatasetPartResponse(
354
427
  inputDatasetPartId=1,
355
- inputDatasetPartDocument='[{"parameterName": "", "parameterValues": [{"parameterValueId": 1, "parameterValue": "[[1,2,3],[4,5,6],[7,8,9]]", "parameterValueStartDate": "1/1/2000"}]}]',
428
+ inputDatasetPartDocument=json.dumps(
429
+ self.parameters_doc_object
430
+ ),
356
431
  inputDatasetPartType=InputDatasetPartTypeResponse(
357
432
  inputDatasetPartTypeName="parameters"
358
433
  ),
@@ -361,15 +436,9 @@ class FakeGQLClient:
361
436
  InputDatasetInputDatasetPartResponse(
362
437
  inputDatasetPart=InputDatasetPartResponse(
363
438
  inputDatasetPartId=2,
364
- inputDatasetPartDocument="""[
365
- {
366
- "bucket": "bucket_name",
367
- "object_keys": [
368
- "key1",
369
- "key2"
370
- ]
371
- }
372
- ]""",
439
+ inputDatasetPartDocument=json.dumps(
440
+ self.observe_frames_doc_object
441
+ ),
373
442
  inputDatasetPartType=InputDatasetPartTypeResponse(
374
443
  inputDatasetPartTypeName="observe_frames"
375
444
  ),
@@ -378,15 +447,9 @@ class FakeGQLClient:
378
447
  InputDatasetInputDatasetPartResponse(
379
448
  inputDatasetPart=InputDatasetPartResponse(
380
449
  inputDatasetPartId=3,
381
- inputDatasetPartDocument="""[
382
- {
383
- "bucket": "bucket_name",
384
- "object_keys": [
385
- "key3",
386
- "key4"
387
- ]
388
- }
389
- ]""",
450
+ inputDatasetPartDocument=json.dumps(
451
+ self.calibration_frames_doc_object
452
+ ),
390
453
  inputDatasetPartType=InputDatasetPartTypeResponse(
391
454
  inputDatasetPartTypeName="calibration_frames"
392
455
  ),
@@ -417,14 +480,6 @@ class FakeGQLClient:
417
480
  ...
418
481
 
419
482
 
420
- class FakeGQLClientNoRecipeConfiguration(FakeGQLClient):
421
- def execute_gql_query(self, **kwargs):
422
- response = super().execute_gql_query(**kwargs)
423
- if type(response[0]) == RecipeRunResponse:
424
- response[0].configuration = {}
425
- return response
426
-
427
-
428
483
  # All the following stuff is copied from dkist-processing-pac
429
484
  def compute_telgeom(time_hst: Time):
430
485
  dkist_lon = (156 + 15 / 60.0 + 21.7 / 3600.0) * (-1)
@@ -774,43 +829,21 @@ def task_with_input_dataset(
774
829
  yield task
775
830
 
776
831
 
777
- def create_parameter_files(task: WorkflowTaskBase, expected_parameters: dict):
832
+ def create_parameter_files(
833
+ task: WorkflowTaskBase, parameters_doc: list[dict] = FakeGQLClient.parameters_doc_object
834
+ ):
778
835
  """
779
- Create the parameter files required by the task.
780
-
781
- Parameters
782
- ----------
783
- task
784
- The task associated with these parameters
785
-
786
- expected_parameters
787
- A dict of parameters with the format shown below
788
-
789
- Returns
790
- -------
791
- None
792
-
793
- expected_parameters is a dict with the parameter names as the keys
794
- and the values are a list of value dicts for each parameter:
795
- expected_parameters =
796
- { 'parameter_name_1': [param_dict_1, param_dict_2, ...],
797
- 'parameter_name_2': [param_dict_1, param_dict_2, ...],
798
- ...
799
- }
800
- where the param_dicts have the following format:
801
- sample_param_dict =
802
- { "parameterValueId": <param_id>,
803
- "parameterValue": <param_value>,
804
- "parameterValueStartDate": <start_date>
805
- }
836
+ Create the parameter files specified in the parameters document returned by the metadata store.
837
+
838
+ This fixture assumes that the JSON parameters document has already been loaded into a python
839
+ structure, but the parameter values themselves are still JSON.
806
840
  """
807
- # Loop over all the parameter values. Each value is a list of parameterValue dicts
808
- for expected_parameter_values in expected_parameters.values():
809
- for value_dict in expected_parameter_values:
810
- if "__file__" not in value_dict["parameterValue"]:
841
+ for parameter in parameters_doc:
842
+ for value in parameter["parameterValues"]:
843
+ if "__file__" not in value["parameterValue"]:
811
844
  continue
812
- value = json.loads(value_dict["parameterValue"])
813
- param_path = value["__file__"]["objectKey"]
845
+ parameter_value = json.loads(value["parameterValue"])
846
+ param_path = parameter_value["__file__"]["objectKey"]
814
847
  file_path = task.scratch.workflow_base_path / Path(param_path)
815
848
  if not file_path.parent.exists():
816
849
  file_path.parent.mkdir(parents=True, exist_ok=True)
@@ -308,26 +308,8 @@ def test_input_dataset_parameters(
308
308
  task = task_with_input_dataset
309
309
  doc_part, _ = input_dataset_parts
310
310
  doc_part = doc_part or [] # None case parsing of expected values
311
- """
312
- expected_parameters is a dict with the parameter names as the keys
313
- and the values are a list of value dicts for each parameter:
314
- expected_parameters =
315
- { 'parameter_name_1': [param_dict_1, param_dict_2, ...],
316
- 'parameter_name_2': [param_dict_1, param_dict_2, ...],
317
- ...
318
- }
319
- where the param_dicts have the following format:
320
- sample_param_dict =
321
- { "parameterValueId": <param_id>,
322
- "parameterValue": <param_value>,
323
- "parameterValueStartDate": <start_date>
324
- }
325
- """
326
- expected_parameters = dict()
327
- for item in doc_part:
328
- expected_parameters[item["parameterName"]] = item["parameterValues"]
329
- create_parameter_files(task, expected_parameters)
330
- # key is param name, values is list of InputDatasetParameterValue objects
311
+ create_parameter_files(task, doc_part)
312
+ expected_parameters = {item["parameterName"]: item["parameterValues"] for item in doc_part}
331
313
  for key, values in task.input_dataset_parameters.items():
332
314
  assert key in expected_parameters
333
315
  expected_values = expected_parameters[key]