dkist-processing-common 10.6.0rc2__tar.gz → 10.6.1rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/CHANGELOG.rst +10 -0
  2. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/PKG-INFO +1 -1
  3. dkist_processing_common-10.6.1rc1/changelog/236.misc.1.rst +1 -0
  4. dkist_processing_common-10.6.1rc1/changelog/236.misc.rst +1 -0
  5. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/graphql.py +25 -45
  6. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/metadata_store.py +112 -199
  7. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/output_data_base.py +1 -1
  8. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/teardown.py +1 -1
  9. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/transfer_input_data.py +3 -3
  10. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/trial_output_data.py +6 -6
  11. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/write_l1.py +8 -8
  12. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/conftest.py +4 -3
  13. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_teardown.py +1 -1
  14. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_trial_output_data.py +11 -15
  15. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_workflow_task_base.py +1 -1
  16. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_write_l1.py +9 -3
  17. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common.egg-info/PKG-INFO +1 -1
  18. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common.egg-info/SOURCES.txt +2 -1
  19. dkist_processing_common-10.6.0rc2/changelog/235.feature.rst +0 -2
  20. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/.gitignore +0 -0
  21. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/.pre-commit-config.yaml +0 -0
  22. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/.readthedocs.yml +0 -0
  23. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/.snyk +0 -0
  24. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/README.rst +0 -0
  25. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/bitbucket-pipelines.yml +0 -0
  26. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/changelog/.gitempty +0 -0
  27. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/__init__.py +0 -0
  28. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/_util/__init__.py +0 -0
  29. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/_util/constants.py +0 -0
  30. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/_util/graphql.py +0 -0
  31. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/_util/scratch.py +0 -0
  32. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/_util/tags.py +0 -0
  33. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/__init__.py +0 -0
  34. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/asdf.py +0 -0
  35. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/bytes.py +0 -0
  36. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/fits.py +0 -0
  37. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/iobase.py +0 -0
  38. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/json.py +0 -0
  39. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/path.py +0 -0
  40. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/quality.py +0 -0
  41. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/codecs/str.py +0 -0
  42. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/config.py +0 -0
  43. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/fonts/Lato-Regular.ttf +0 -0
  44. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/fonts/__init__.py +0 -0
  45. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/manual.py +0 -0
  46. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/__init__.py +0 -0
  47. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/constants.py +0 -0
  48. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/fits_access.py +0 -0
  49. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/flower_pot.py +0 -0
  50. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/message.py +0 -0
  51. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/message_queue_binding.py +0 -0
  52. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/metric_code.py +0 -0
  53. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/parameters.py +0 -0
  54. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/quality.py +0 -0
  55. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/tags.py +0 -0
  56. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/task_name.py +0 -0
  57. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/models/wavelength.py +0 -0
  58. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/__init__.py +0 -0
  59. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/cs_step.py +0 -0
  60. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/dsps_repeat.py +0 -0
  61. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/experiment_id_bud.py +0 -0
  62. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/id_bud.py +0 -0
  63. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/l0_fits_access.py +0 -0
  64. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/l1_fits_access.py +0 -0
  65. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/near_bud.py +0 -0
  66. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/proposal_id_bud.py +0 -0
  67. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/quality.py +0 -0
  68. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/retarder.py +0 -0
  69. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/single_value_single_key_flower.py +0 -0
  70. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/task.py +0 -0
  71. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/time.py +0 -0
  72. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/unique_bud.py +0 -0
  73. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/parsers/wavelength.py +0 -0
  74. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/__init__.py +0 -0
  75. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/assemble_movie.py +0 -0
  76. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/base.py +0 -0
  77. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/l1_output_data.py +0 -0
  78. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/__init__.py +0 -0
  79. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/globus.py +0 -0
  80. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/input_dataset.py +0 -0
  81. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/interservice_bus.py +0 -0
  82. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/object_store.py +0 -0
  83. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/quality/__init__.py +0 -0
  84. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/quality/_base.py +0 -0
  85. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/mixin/quality/_metrics.py +0 -0
  86. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/parse_l0_input_data.py +0 -0
  87. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/quality_metrics.py +0 -0
  88. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tasks/trial_catalog.py +0 -0
  89. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/__init__.py +0 -0
  90. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_assemble_movie.py +0 -0
  91. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_assemble_quality.py +0 -0
  92. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_base.py +0 -0
  93. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_codecs.py +0 -0
  94. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_constants.py +0 -0
  95. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_cs_step.py +0 -0
  96. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_fits_access.py +0 -0
  97. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_flower_pot.py +0 -0
  98. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_input_dataset.py +0 -0
  99. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_interservice_bus.py +0 -0
  100. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_interservice_bus_mixin.py +0 -0
  101. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_output_data_base.py +0 -0
  102. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_parameters.py +0 -0
  103. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_parse_l0_input_data.py +0 -0
  104. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_publish_catalog_messages.py +0 -0
  105. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_quality.py +0 -0
  106. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_quality_mixin.py +0 -0
  107. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_scratch.py +0 -0
  108. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_stems.py +0 -0
  109. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_submit_dataset_metadata.py +0 -0
  110. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_tags.py +0 -0
  111. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_task_name.py +0 -0
  112. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_task_parsing.py +0 -0
  113. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_transfer_input_data.py +0 -0
  114. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_transfer_l1_output_data.py +0 -0
  115. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common/tests/test_trial_catalog.py +0 -0
  116. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common.egg-info/dependency_links.txt +0 -0
  117. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common.egg-info/requires.txt +0 -0
  118. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/dkist_processing_common.egg-info/top_level.txt +0 -0
  119. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/Makefile +0 -0
  120. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/changelog.rst +0 -0
  121. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/conf.py +0 -0
  122. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/index.rst +0 -0
  123. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/landing_page.rst +0 -0
  124. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/make.bat +0 -0
  125. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/docs/requirements.txt +0 -0
  126. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/licenses/LICENSE.rst +0 -0
  127. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/pyproject.toml +0 -0
  128. {dkist_processing_common-10.6.0rc2 → dkist_processing_common-10.6.1rc1}/setup.cfg +0 -0
@@ -1,3 +1,13 @@
1
+ v10.6.0 (2025-03-03)
2
+ ====================
3
+
4
+ Features
5
+ --------
6
+
7
+ - Add the `RetarderNameBud` that can parse the name of the GOS retarder and ensure that only a single retarder was used
8
+ for the given set of POLCAL input data. (`#235 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/235>`__)
9
+
10
+
1
11
  v10.5.15 (2025-02-24)
2
12
  =====================
3
13
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: dkist-processing-common
3
- Version: 10.6.0rc2
3
+ Version: 10.6.1rc1
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -0,0 +1 @@
1
+ Change returns from the metadata store queries into Pydantic BaseModel instances. Remove unnecessary parsing and error checking.
@@ -0,0 +1 @@
1
+ Convert dataclasses in the graphql model to Pydantic BaseModels for additional validation. In the RecipeRunResponse class, configuration is now returned as a dictionary. In the InputDatasetPartResponse class, inputDatasetPartDocument is now returned as a list of dictionaries.
@@ -1,24 +1,22 @@
1
1
  """GraphQL Data models for the metadata store api."""
2
- from dataclasses import dataclass
2
+ from pydantic import BaseModel
3
+ from pydantic import Json
3
4
 
4
5
 
5
- @dataclass
6
- class RecipeRunMutation:
6
+ class RecipeRunMutation(BaseModel):
7
7
  """Recipe run mutation record."""
8
8
 
9
9
  recipeRunId: int
10
10
  recipeRunStatusId: int
11
11
 
12
12
 
13
- @dataclass
14
- class RecipeRunStatusQuery:
13
+ class RecipeRunStatusQuery(BaseModel):
15
14
  """Recipe run status query for the recipeRunStatuses endpoint."""
16
15
 
17
16
  recipeRunStatusName: str
18
17
 
19
18
 
20
- @dataclass
21
- class RecipeRunStatusMutation:
19
+ class RecipeRunStatusMutation(BaseModel):
22
20
  """Recipe run status mutation record."""
23
21
 
24
22
  recipeRunStatusName: str
@@ -26,38 +24,33 @@ class RecipeRunStatusMutation:
26
24
  recipeRunStatusDescription: str
27
25
 
28
26
 
29
- @dataclass
30
- class RecipeRunStatusResponse:
27
+ class RecipeRunStatusResponse(BaseModel):
31
28
  """Response to a recipe run status query."""
32
29
 
33
30
  recipeRunStatusId: int
34
31
 
35
32
 
36
- @dataclass
37
- class InputDatasetPartTypeResponse:
33
+ class InputDatasetPartTypeResponse(BaseModel):
38
34
  """Response class for the input dataset part type entity."""
39
35
 
40
36
  inputDatasetPartTypeName: str
41
37
 
42
38
 
43
- @dataclass
44
- class InputDatasetPartResponse:
39
+ class InputDatasetPartResponse(BaseModel):
45
40
  """Response class for the input dataset part entity."""
46
41
 
47
42
  inputDatasetPartId: int
48
- inputDatasetPartDocument: str
43
+ inputDatasetPartDocument: Json[list[dict]]
49
44
  inputDatasetPartType: InputDatasetPartTypeResponse
50
45
 
51
46
 
52
- @dataclass
53
- class InputDatasetInputDatasetPartResponse:
47
+ class InputDatasetInputDatasetPartResponse(BaseModel):
54
48
  """Response class for the join entity between input datasets and input dataset parts."""
55
49
 
56
50
  inputDatasetPart: InputDatasetPartResponse
57
51
 
58
52
 
59
- @dataclass
60
- class InputDatasetResponse:
53
+ class InputDatasetResponse(BaseModel):
61
54
  """Input dataset query response."""
62
55
 
63
56
  inputDatasetId: int
@@ -65,62 +58,54 @@ class InputDatasetResponse:
65
58
  inputDatasetInputDatasetParts: list[InputDatasetInputDatasetPartResponse]
66
59
 
67
60
 
68
- @dataclass
69
- class InputDatasetRecipeInstanceResponse:
61
+ class InputDatasetRecipeInstanceResponse(BaseModel):
70
62
  """Recipe instance query response."""
71
63
 
72
64
  inputDataset: InputDatasetResponse
73
65
 
74
66
 
75
- @dataclass
76
- class InputDatasetRecipeRunResponse:
67
+ class InputDatasetRecipeRunResponse(BaseModel):
77
68
  """Recipe run query response."""
78
69
 
79
70
  recipeInstance: InputDatasetRecipeInstanceResponse
80
71
 
81
72
 
82
- @dataclass
83
- class RecipeInstanceResponse:
73
+ class RecipeInstanceResponse(BaseModel):
84
74
  """Recipe instance query response."""
85
75
 
86
76
  recipeId: int
87
77
  inputDatasetId: int
88
78
 
89
79
 
90
- @dataclass
91
- class RecipeRunProvenanceResponse:
80
+ class RecipeRunProvenanceResponse(BaseModel):
92
81
  """Response for the metadata store recipeRunProvenances and mutations endpoints."""
93
82
 
94
83
  recipeRunProvenanceId: int
95
84
  isTaskManual: bool
96
85
 
97
86
 
98
- @dataclass
99
- class RecipeRunResponse:
87
+ class RecipeRunResponse(BaseModel):
100
88
  """Recipe run query response."""
101
89
 
102
90
  recipeInstance: RecipeInstanceResponse
103
91
  recipeInstanceId: int
104
92
  recipeRunProvenances: list[RecipeRunProvenanceResponse]
105
- configuration: str = None
93
+ configuration: Json[dict] | None = {}
106
94
 
107
95
 
108
- @dataclass
109
- class RecipeRunMutationResponse:
96
+ class RecipeRunMutationResponse(BaseModel):
110
97
  """Recipe run mutation response."""
111
98
 
112
99
  recipeRunId: int
113
100
 
114
101
 
115
- @dataclass
116
- class RecipeRunQuery:
102
+ class RecipeRunQuery(BaseModel):
117
103
  """Query parameters for the metadata store endpoint recipeRuns."""
118
104
 
119
105
  recipeRunId: int
120
106
 
121
107
 
122
- @dataclass
123
- class DatasetCatalogReceiptAccountMutation:
108
+ class DatasetCatalogReceiptAccountMutation(BaseModel):
124
109
  """
125
110
  Dataset catalog receipt account mutation record.
126
111
 
@@ -132,15 +117,13 @@ class DatasetCatalogReceiptAccountMutation:
132
117
  expectedObjectCount: int
133
118
 
134
119
 
135
- @dataclass
136
- class DatasetCatalogReceiptAccountResponse:
120
+ class DatasetCatalogReceiptAccountResponse(BaseModel):
137
121
  """Dataset catalog receipt account response for query and mutation endpoints."""
138
122
 
139
123
  datasetCatalogReceiptAccountId: int
140
124
 
141
125
 
142
- @dataclass
143
- class RecipeRunProvenanceMutation:
126
+ class RecipeRunProvenanceMutation(BaseModel):
144
127
  """Recipe run provenance mutation record."""
145
128
 
146
129
  inputDatasetId: int
@@ -152,8 +135,7 @@ class RecipeRunProvenanceMutation:
152
135
  codeVersion: str = None
153
136
 
154
137
 
155
- @dataclass
156
- class QualityCreation:
138
+ class QualityCreation(BaseModel):
157
139
  """Quality data creation record."""
158
140
 
159
141
  datasetId: str
@@ -173,15 +155,13 @@ class QualityCreation:
173
155
  efficiencyData: str | None = None
174
156
 
175
157
 
176
- @dataclass
177
- class QualitiesRequest:
158
+ class QualitiesRequest(BaseModel):
178
159
  """Query parameters for quality data."""
179
160
 
180
161
  datasetId: str
181
162
 
182
163
 
183
- @dataclass
184
- class QualityResponse:
164
+ class QualityResponse(BaseModel):
185
165
  """Query Response for quality data."""
186
166
 
187
167
  qualityId: int
@@ -2,6 +2,7 @@
2
2
  import json
3
3
  import logging
4
4
  from functools import cached_property
5
+ from typing import Literal
5
6
 
6
7
  from dkist_processing_common._util.graphql import GraphQLClient
7
8
  from dkist_processing_common.codecs.quality import QualityDataEncoder
@@ -26,8 +27,6 @@ from dkist_processing_common.models.graphql import RecipeRunStatusResponse
26
27
 
27
28
  logger = logging.getLogger(__name__)
28
29
 
29
- input_dataset_part_document_type_hint = list | dict | str | int | float | None
30
-
31
30
 
32
31
  class MetadataStoreMixin:
33
32
  """Mixin for a WorkflowDataTaskBase which implements Metadata Store access functionality."""
@@ -37,6 +36,8 @@ class MetadataStoreMixin:
37
36
  """Get the graphql client."""
38
37
  return GraphQLClient(common_configurations.metadata_store_api_base)
39
38
 
39
+ # RECIPE RUN STATUS
40
+
40
41
  def metadata_store_change_recipe_run_to_inprogress(self):
41
42
  """Set the recipe run status to "INPROGRESS"."""
42
43
  self._metadata_store_change_status(status="INPROGRESS", is_complete=False)
@@ -49,6 +50,76 @@ class MetadataStoreMixin:
49
50
  """Set the recipe run status to "TRIALSUCCESS"."""
50
51
  self._metadata_store_change_status(status="TRIALSUCCESS", is_complete=False)
51
52
 
53
+ def _metadata_store_recipe_run_status_id(self, status: str) -> None | int:
54
+ """Find the id of a recipe run status."""
55
+ params = RecipeRunStatusQuery(recipeRunStatusName=status)
56
+ response = self.metadata_store_client.execute_gql_query(
57
+ query_base="recipeRunStatuses",
58
+ query_response_cls=RecipeRunStatusResponse,
59
+ query_parameters=params,
60
+ )
61
+ if len(response) > 0:
62
+ return response[0].recipeRunStatusId
63
+
64
+ def _metadata_store_create_recipe_run_status(self, status: str, is_complete: bool) -> int:
65
+ """
66
+ Add a new recipe run status to the db.
67
+
68
+ :param status: name of the status to add
69
+ :param is_complete: does the new status correspond to an accepted completion state
70
+ """
71
+ recipe_run_statuses = {
72
+ "INPROGRESS": "Recipe run is currently undergoing processing",
73
+ "COMPLETEDSUCCESSFULLY": "Recipe run processing completed with no errors",
74
+ "TRIALSUCCESS": "Recipe run trial processing completed with no errors. Recipe run not "
75
+ "marked complete.",
76
+ }
77
+
78
+ if not isinstance(status, str):
79
+ raise TypeError(f"status must be of type str: {status}")
80
+ if not isinstance(is_complete, bool):
81
+ raise TypeError(f"is_complete must be of type bool: {is_complete}")
82
+ params = RecipeRunStatusMutation(
83
+ recipeRunStatusName=status,
84
+ isComplete=is_complete,
85
+ recipeRunStatusDescription=recipe_run_statuses[status],
86
+ )
87
+ recipe_run_status_response = self.metadata_store_client.execute_gql_mutation(
88
+ mutation_base="createRecipeRunStatus",
89
+ mutation_response_cls=RecipeRunStatusResponse,
90
+ mutation_parameters=params,
91
+ )
92
+ return recipe_run_status_response.recipeRunStatus.recipeRunStatusId
93
+
94
+ def _metadata_store_change_status(self, status: str, is_complete: bool):
95
+ """Change the recipe run status of a recipe run to the given status."""
96
+ recipe_run_status_id = self._metadata_store_recipe_run_status_id(status=status)
97
+ if not recipe_run_status_id:
98
+ recipe_run_status_id = self._metadata_store_create_recipe_run_status(
99
+ status=status, is_complete=is_complete
100
+ )
101
+ self._metadata_store_update_status(recipe_run_status_id=recipe_run_status_id)
102
+
103
+ def _metadata_store_update_status(
104
+ self,
105
+ recipe_run_status_id: int,
106
+ ):
107
+ """
108
+ Change the status of a given recipe run id.
109
+
110
+ :param recipe_run_status_id: the new status to use
111
+ """
112
+ params = RecipeRunMutation(
113
+ recipeRunId=self.recipe_run_id, recipeRunStatusId=recipe_run_status_id
114
+ )
115
+ self.metadata_store_client.execute_gql_mutation(
116
+ mutation_base="updateRecipeRun",
117
+ mutation_parameters=params,
118
+ mutation_response_cls=RecipeRunMutationResponse,
119
+ )
120
+
121
+ # RECEIPT
122
+
52
123
  def metadata_store_add_dataset_receipt_account(
53
124
  self, dataset_id: str, expected_object_count: int
54
125
  ):
@@ -62,10 +133,12 @@ class MetadataStoreMixin:
62
133
  mutation_response_cls=DatasetCatalogReceiptAccountResponse,
63
134
  )
64
135
 
136
+ # PROVENANCE
137
+
65
138
  def metadata_store_record_provenance(self, is_task_manual: bool, library_versions: str):
66
139
  """Record the provenance record in the metadata store."""
67
140
  params = RecipeRunProvenanceMutation(
68
- inputDatasetId=self.metadata_store_input_dataset_id,
141
+ inputDatasetId=self.metadata_store_recipe_run.recipeInstance.inputDatasetId,
69
142
  isTaskManual=is_task_manual,
70
143
  recipeRunId=self.recipe_run_id,
71
144
  taskName=self.task_name,
@@ -78,6 +151,8 @@ class MetadataStoreMixin:
78
151
  mutation_response_cls=RecipeRunProvenanceResponse,
79
152
  )
80
153
 
154
+ # QUALITY
155
+
81
156
  def metadata_store_add_quality_data(self, dataset_id: str, quality_data: list[dict]):
82
157
  """Add the quality data to the metadata-store."""
83
158
  if self.metadata_store_quality_data_exists(dataset_id):
@@ -119,222 +194,60 @@ class MetadataStoreMixin:
119
194
  )
120
195
  return bool(response)
121
196
 
122
- def metadata_store_recipe_run_configuration(self) -> dict:
123
- """Get the recipe run configuration from the metadata store."""
124
- configuration_json = self._metadata_store_recipe_run().configuration
125
- if configuration_json is None:
126
- return {}
127
- try:
128
- configuration = json.loads(configuration_json)
129
- if not isinstance(configuration, dict):
130
- raise ValueError(
131
- f"Invalid recipe run configuration format. "
132
- f"Expected json encoded dictionary, received json encoded {type(configuration)}"
133
- )
134
- return configuration
135
- except (json.JSONDecodeError, ValueError, TypeError, UnicodeDecodeError) as e:
136
- logger.error(f"Invalid recipe run configuration")
137
- raise e
197
+ # INPUT DATASET RECIPE RUN
138
198
 
139
199
  @cached_property
140
- def metadata_store_input_dataset_parts(self) -> list[InputDatasetPartResponse]:
141
- """Get the input dataset parts from the metadata store."""
200
+ def metadata_store_input_dataset_recipe_run_response(self) -> InputDatasetRecipeRunResponse:
201
+ """Get the input dataset recipe run response from the metadata store."""
142
202
  params = RecipeRunQuery(recipeRunId=self.recipe_run_id)
143
203
  response = self.metadata_store_client.execute_gql_query(
144
204
  query_base="recipeRuns",
145
205
  query_response_cls=InputDatasetRecipeRunResponse,
146
206
  query_parameters=params,
147
- ) # queried independently of other recipe run metadata for performance
148
- recipe_run = response[0]
149
- return [
150
- part_link.inputDatasetPart
151
- for part_link in recipe_run.recipeInstance.inputDataset.inputDatasetInputDatasetParts
152
- ]
153
-
154
- def _metadata_store_filter_input_dataset_parts(
155
- self, input_dataset_part_type_name: str
156
- ) -> InputDatasetPartResponse | None:
157
- """Filter the input dataset parts based on the input dataset part type name."""
158
- target_parts = [
159
- part
160
- for part in self.metadata_store_input_dataset_parts
161
- if part.inputDatasetPartType.inputDatasetPartTypeName == input_dataset_part_type_name
162
- ]
163
- if not target_parts:
164
- return
165
- if len(target_parts) == 1:
166
- return target_parts[0]
167
- raise ValueError(
168
- f"Multiple ({len(target_parts)}) input dataset parts found for "
169
- f"{input_dataset_part_type_name=}."
170
- )
171
-
172
- @property
173
- def _metadata_store_input_dataset_observe_frames_part(
174
- self,
175
- ) -> InputDatasetPartResponse | None:
176
- """Get the input dataset part for observe frames."""
177
- return self._metadata_store_filter_input_dataset_parts(
178
- input_dataset_part_type_name="observe_frames",
179
- )
180
-
181
- @property
182
- def metadata_store_input_dataset_observe_frames_part_id(self) -> int | None:
183
- """Get the input dataset part id for observe frames."""
184
- if part := self._metadata_store_input_dataset_observe_frames_part:
185
- return part.inputDatasetPartId
186
-
187
- @property
188
- def metadata_store_input_dataset_observe_frames_part_document(
189
- self,
190
- ) -> input_dataset_part_document_type_hint:
191
- """Get the input dataset part document for observe frames."""
192
- if part := self._metadata_store_input_dataset_observe_frames_part:
193
- return part.inputDatasetPartDocument
194
-
195
- @property
196
- def _metadata_store_input_dataset_calibration_frames_part(
197
- self,
198
- ) -> InputDatasetPartResponse | None:
199
- """Get the input dataset part for calibration frames."""
200
- return self._metadata_store_filter_input_dataset_parts(
201
- input_dataset_part_type_name="calibration_frames"
207
+ response_encoder=InputDatasetRecipeRunResponse.model_validate,
202
208
  )
203
-
204
- @property
205
- def metadata_store_input_dataset_calibration_frames_part_id(self) -> int | None:
206
- """Get the input dataset part id for calibration frames."""
207
- if part := self._metadata_store_input_dataset_calibration_frames_part:
208
- return part.inputDatasetPartId
209
-
210
- @property
211
- def metadata_store_input_dataset_calibration_frames_part_document(
212
- self,
213
- ) -> input_dataset_part_document_type_hint:
214
- """Get the input dataset part document for calibration frames."""
215
- if part := self._metadata_store_input_dataset_calibration_frames_part:
216
- return part.inputDatasetPartDocument
217
-
218
- @property
219
- def _metadata_store_input_dataset_parameters_part(
220
- self,
221
- ) -> InputDatasetPartResponse | None:
222
- """Get the input dataset part for parameters."""
223
- return self._metadata_store_filter_input_dataset_parts(
224
- input_dataset_part_type_name="parameters"
209
+ return response[0]
210
+
211
+ def _metadata_store_input_dataset_part(
212
+ self, part_type: Literal["observe_frames", "calibration_frames", "parameters"]
213
+ ) -> InputDatasetPartResponse:
214
+ """Get the input dataset part by input dataset part type name."""
215
+ part_type_dict = {}
216
+ parts = (
217
+ self.metadata_store_input_dataset_recipe_run_response.recipeInstance.inputDataset.inputDatasetInputDatasetParts
225
218
  )
219
+ for part in parts:
220
+ part_type_name = part.inputDatasetPart.inputDatasetPartType.inputDatasetPartTypeName
221
+ if part_type_name in part_type_dict.keys():
222
+ raise ValueError(f"Multiple input dataset parts found for {part_type_name=}.")
223
+ part_type_dict[part_type_name] = part.inputDatasetPart
224
+ return part_type_dict.get(part_type)
226
225
 
227
226
  @property
228
- def metadata_store_input_dataset_parameters_part_id(self) -> int | None:
229
- """Get the input dataset part id for parameters."""
230
- if part := self._metadata_store_input_dataset_parameters_part:
231
- return part.inputDatasetPartId
227
+ def metadata_store_input_dataset_observe_frames(self) -> InputDatasetPartResponse:
228
+ """Get the input dataset part for the observe frames."""
229
+ return self._metadata_store_input_dataset_part(part_type="observe_frames")
232
230
 
233
231
  @property
234
- def metadata_store_input_dataset_parameters_part_document(
235
- self,
236
- ) -> input_dataset_part_document_type_hint:
237
- """Get the input dataset part document for parameters."""
238
- if part := self._metadata_store_input_dataset_parameters_part:
239
- return part.inputDatasetPartDocument
232
+ def metadata_store_input_dataset_calibration_frames(self) -> InputDatasetPartResponse:
233
+ """Get the input dataset part for the calibration frames."""
234
+ return self._metadata_store_input_dataset_part(part_type="calibration_frames")
240
235
 
241
236
  @property
242
- def metadata_store_input_dataset_id(self) -> int:
243
- """Get the input dataset id from the metadata store."""
244
- return self._metadata_store_recipe_run().recipeInstance.inputDatasetId
237
+ def metadata_store_input_dataset_parameters(self) -> InputDatasetPartResponse:
238
+ """Get the input dataset part for the parameters."""
239
+ return self._metadata_store_input_dataset_part(part_type="parameters")
245
240
 
246
- @property
247
- def metadata_store_recipe_instance_id(self) -> int:
248
- """Get the recipe instance id from the metadata store."""
249
- return self._metadata_store_recipe_run().recipeInstanceId
250
-
251
- @property
252
- def metadata_store_recipe_id(self) -> int:
253
- """Get the recipe id from the metadata store."""
254
- return self._metadata_store_recipe_run().recipeInstance.recipeId
241
+ # RECIPE RUN
255
242
 
256
- @property
257
- def metadata_store_recipe_run_provenance(self) -> list[RecipeRunProvenanceResponse]:
258
- """Get all the provenance records for the recipe run."""
259
- return self._metadata_store_recipe_run().recipeRunProvenances
260
-
261
- def _metadata_store_recipe_run(self, allow_cache: bool = True) -> RecipeRunResponse:
262
- is_cached = bool(getattr(self, "_recipe_run_cache", False))
263
- if is_cached and allow_cache:
264
- return self._recipe_run_cache
243
+ @cached_property
244
+ def metadata_store_recipe_run(self) -> RecipeRunResponse:
245
+ """Get the recipe run response from the metadata store."""
265
246
  params = RecipeRunQuery(recipeRunId=self.recipe_run_id)
266
247
  response = self.metadata_store_client.execute_gql_query(
267
248
  query_base="recipeRuns",
268
249
  query_response_cls=RecipeRunResponse,
269
250
  query_parameters=params,
251
+ response_encoder=RecipeRunResponse.model_validate,
270
252
  )
271
- self._recipe_run_cache = response[0]
272
- return self._recipe_run_cache
273
-
274
- def _metadata_store_change_status(self, status: str, is_complete: bool):
275
- """Change the recipe run status of a recipe run to the given status."""
276
- recipe_run_status_id = self._metadata_store_recipe_run_status_id(status=status)
277
- if not recipe_run_status_id:
278
- recipe_run_status_id = self._metadata_store_create_recipe_run_status(
279
- status=status, is_complete=is_complete
280
- )
281
- self._metadata_store_update_status(recipe_run_status_id=recipe_run_status_id)
282
-
283
- def _metadata_store_recipe_run_status_id(self, status: str) -> None | int:
284
- """Find the id of a recipe run status."""
285
- params = RecipeRunStatusQuery(recipeRunStatusName=status)
286
- response = self.metadata_store_client.execute_gql_query(
287
- query_base="recipeRunStatuses",
288
- query_response_cls=RecipeRunStatusResponse,
289
- query_parameters=params,
290
- )
291
- if len(response) > 0:
292
- return response[0].recipeRunStatusId
293
-
294
- def _metadata_store_create_recipe_run_status(self, status: str, is_complete: bool) -> int:
295
- """
296
- Add a new recipe run status to the db.
297
-
298
- :param status: name of the status to add
299
- :param is_complete: does the new status correspond to an accepted completion state
300
- """
301
- recipe_run_statuses = {
302
- "INPROGRESS": "Recipe run is currently undergoing processing",
303
- "COMPLETEDSUCCESSFULLY": "Recipe run processing completed with no errors",
304
- "TRIALSUCCESS": "Recipe run trial processing completed with no errors. Recipe run not "
305
- "marked complete.",
306
- }
307
-
308
- if not isinstance(status, str):
309
- raise TypeError(f"status must be of type str: {status}")
310
- if not isinstance(is_complete, bool):
311
- raise TypeError(f"is_complete must be of type bool: {is_complete}")
312
- params = RecipeRunStatusMutation(
313
- recipeRunStatusName=status,
314
- isComplete=is_complete,
315
- recipeRunStatusDescription=recipe_run_statuses[status],
316
- )
317
- recipe_run_status_response = self.metadata_store_client.execute_gql_mutation(
318
- mutation_base="createRecipeRunStatus",
319
- mutation_response_cls=RecipeRunStatusResponse,
320
- mutation_parameters=params,
321
- )
322
- return recipe_run_status_response.recipeRunStatus.recipeRunStatusId
323
-
324
- def _metadata_store_update_status(
325
- self,
326
- recipe_run_status_id: int,
327
- ):
328
- """
329
- Change the status of a given recipe run id.
330
-
331
- :param recipe_run_status_id: the new status to use
332
- """
333
- params = RecipeRunMutation(
334
- recipeRunId=self.recipe_run_id, recipeRunStatusId=recipe_run_status_id
335
- )
336
- self.metadata_store_client.execute_gql_mutation(
337
- mutation_base="updateRecipeRun",
338
- mutation_parameters=params,
339
- mutation_response_cls=RecipeRunMutationResponse,
340
- )
253
+ return response[0]
@@ -19,7 +19,7 @@ class OutputDataBase(WorkflowTaskBase, ABC):
19
19
  @cached_property
20
20
  def destination_bucket(self) -> str:
21
21
  """Get the destination bucket."""
22
- return self.metadata_store_recipe_run_configuration().get("destination_bucket", "data")
22
+ return self.metadata_store_recipe_run.configuration.get("destination_bucket", "data")
23
23
 
24
24
  def format_object_key(self, path: Path) -> str:
25
25
  """
@@ -22,7 +22,7 @@ class TeardownBase(WorkflowTaskBase, ABC):
22
22
  @property
23
23
  def teardown_enabled(self) -> bool:
24
24
  """Recipe run configuration indicating if data should be removed at the end of a run."""
25
- return self.metadata_store_recipe_run_configuration().get("teardown_enabled", True)
25
+ return self.metadata_store_recipe_run.configuration.get("teardown_enabled", True)
26
26
 
27
27
  def run(self) -> None:
28
28
  """Run method for Teardown class."""
@@ -19,11 +19,11 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin, InputDatasetMixin):
19
19
 
20
20
  def download_input_dataset(self):
21
21
  """Get the input dataset document parts and save it to scratch with the appropriate tags."""
22
- if doc := self.metadata_store_input_dataset_observe_frames_part_document:
22
+ if doc := self.metadata_store_input_dataset_observe_frames.inputDatasetPartDocument:
23
23
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_observe_frames())
24
- if doc := self.metadata_store_input_dataset_calibration_frames_part_document:
24
+ if doc := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartDocument:
25
25
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_calibration_frames())
26
- if doc := self.metadata_store_input_dataset_parameters_part_document:
26
+ if doc := self.metadata_store_input_dataset_parameters.inputDatasetPartDocument:
27
27
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_parameters())
28
28
 
29
29
  def format_transfer_items(