dkist-processing-common 10.6.0rc2__py3-none-any.whl → 10.6.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ Change returns from the metadata store queries into Pydantic BaseModel instances. Remove unnecessary parsing and error checking.
changelog/236.misc.rst ADDED
@@ -0,0 +1 @@
1
+ Convert dataclasses in the graphql model to Pydantic BaseModels for additional validation. In the RecipeRunResponse class, configuration is now returned as a dictionary. In the InputDatasetPartResponse class, inputDatasetPartDocument is now returned as a list of dictionaries.
@@ -1,24 +1,22 @@
1
1
  """GraphQL Data models for the metadata store api."""
2
- from dataclasses import dataclass
2
+ from pydantic import BaseModel
3
+ from pydantic import Json
3
4
 
4
5
 
5
- @dataclass
6
- class RecipeRunMutation:
6
+ class RecipeRunMutation(BaseModel):
7
7
  """Recipe run mutation record."""
8
8
 
9
9
  recipeRunId: int
10
10
  recipeRunStatusId: int
11
11
 
12
12
 
13
- @dataclass
14
- class RecipeRunStatusQuery:
13
+ class RecipeRunStatusQuery(BaseModel):
15
14
  """Recipe run status query for the recipeRunStatuses endpoint."""
16
15
 
17
16
  recipeRunStatusName: str
18
17
 
19
18
 
20
- @dataclass
21
- class RecipeRunStatusMutation:
19
+ class RecipeRunStatusMutation(BaseModel):
22
20
  """Recipe run status mutation record."""
23
21
 
24
22
  recipeRunStatusName: str
@@ -26,38 +24,33 @@ class RecipeRunStatusMutation:
26
24
  recipeRunStatusDescription: str
27
25
 
28
26
 
29
- @dataclass
30
- class RecipeRunStatusResponse:
27
+ class RecipeRunStatusResponse(BaseModel):
31
28
  """Response to a recipe run status query."""
32
29
 
33
30
  recipeRunStatusId: int
34
31
 
35
32
 
36
- @dataclass
37
- class InputDatasetPartTypeResponse:
33
+ class InputDatasetPartTypeResponse(BaseModel):
38
34
  """Response class for the input dataset part type entity."""
39
35
 
40
36
  inputDatasetPartTypeName: str
41
37
 
42
38
 
43
- @dataclass
44
- class InputDatasetPartResponse:
39
+ class InputDatasetPartResponse(BaseModel):
45
40
  """Response class for the input dataset part entity."""
46
41
 
47
42
  inputDatasetPartId: int
48
- inputDatasetPartDocument: str
43
+ inputDatasetPartDocument: Json[list[dict]]
49
44
  inputDatasetPartType: InputDatasetPartTypeResponse
50
45
 
51
46
 
52
- @dataclass
53
- class InputDatasetInputDatasetPartResponse:
47
+ class InputDatasetInputDatasetPartResponse(BaseModel):
54
48
  """Response class for the join entity between input datasets and input dataset parts."""
55
49
 
56
50
  inputDatasetPart: InputDatasetPartResponse
57
51
 
58
52
 
59
- @dataclass
60
- class InputDatasetResponse:
53
+ class InputDatasetResponse(BaseModel):
61
54
  """Input dataset query response."""
62
55
 
63
56
  inputDatasetId: int
@@ -65,62 +58,54 @@ class InputDatasetResponse:
65
58
  inputDatasetInputDatasetParts: list[InputDatasetInputDatasetPartResponse]
66
59
 
67
60
 
68
- @dataclass
69
- class InputDatasetRecipeInstanceResponse:
61
+ class InputDatasetRecipeInstanceResponse(BaseModel):
70
62
  """Recipe instance query response."""
71
63
 
72
64
  inputDataset: InputDatasetResponse
73
65
 
74
66
 
75
- @dataclass
76
- class InputDatasetRecipeRunResponse:
67
+ class InputDatasetRecipeRunResponse(BaseModel):
77
68
  """Recipe run query response."""
78
69
 
79
70
  recipeInstance: InputDatasetRecipeInstanceResponse
80
71
 
81
72
 
82
- @dataclass
83
- class RecipeInstanceResponse:
73
+ class RecipeInstanceResponse(BaseModel):
84
74
  """Recipe instance query response."""
85
75
 
86
76
  recipeId: int
87
77
  inputDatasetId: int
88
78
 
89
79
 
90
- @dataclass
91
- class RecipeRunProvenanceResponse:
80
+ class RecipeRunProvenanceResponse(BaseModel):
92
81
  """Response for the metadata store recipeRunProvenances and mutations endpoints."""
93
82
 
94
83
  recipeRunProvenanceId: int
95
84
  isTaskManual: bool
96
85
 
97
86
 
98
- @dataclass
99
- class RecipeRunResponse:
87
+ class RecipeRunResponse(BaseModel):
100
88
  """Recipe run query response."""
101
89
 
102
90
  recipeInstance: RecipeInstanceResponse
103
91
  recipeInstanceId: int
104
92
  recipeRunProvenances: list[RecipeRunProvenanceResponse]
105
- configuration: str = None
93
+ configuration: Json[dict] | None = {}
106
94
 
107
95
 
108
- @dataclass
109
- class RecipeRunMutationResponse:
96
+ class RecipeRunMutationResponse(BaseModel):
110
97
  """Recipe run mutation response."""
111
98
 
112
99
  recipeRunId: int
113
100
 
114
101
 
115
- @dataclass
116
- class RecipeRunQuery:
102
+ class RecipeRunQuery(BaseModel):
117
103
  """Query parameters for the metadata store endpoint recipeRuns."""
118
104
 
119
105
  recipeRunId: int
120
106
 
121
107
 
122
- @dataclass
123
- class DatasetCatalogReceiptAccountMutation:
108
+ class DatasetCatalogReceiptAccountMutation(BaseModel):
124
109
  """
125
110
  Dataset catalog receipt account mutation record.
126
111
 
@@ -132,15 +117,13 @@ class DatasetCatalogReceiptAccountMutation:
132
117
  expectedObjectCount: int
133
118
 
134
119
 
135
- @dataclass
136
- class DatasetCatalogReceiptAccountResponse:
120
+ class DatasetCatalogReceiptAccountResponse(BaseModel):
137
121
  """Dataset catalog receipt account response for query and mutation endpoints."""
138
122
 
139
123
  datasetCatalogReceiptAccountId: int
140
124
 
141
125
 
142
- @dataclass
143
- class RecipeRunProvenanceMutation:
126
+ class RecipeRunProvenanceMutation(BaseModel):
144
127
  """Recipe run provenance mutation record."""
145
128
 
146
129
  inputDatasetId: int
@@ -152,8 +135,7 @@ class RecipeRunProvenanceMutation:
152
135
  codeVersion: str = None
153
136
 
154
137
 
155
- @dataclass
156
- class QualityCreation:
138
+ class QualityCreation(BaseModel):
157
139
  """Quality data creation record."""
158
140
 
159
141
  datasetId: str
@@ -173,15 +155,13 @@ class QualityCreation:
173
155
  efficiencyData: str | None = None
174
156
 
175
157
 
176
- @dataclass
177
- class QualitiesRequest:
158
+ class QualitiesRequest(BaseModel):
178
159
  """Query parameters for quality data."""
179
160
 
180
161
  datasetId: str
181
162
 
182
163
 
183
- @dataclass
184
- class QualityResponse:
164
+ class QualityResponse(BaseModel):
185
165
  """Query Response for quality data."""
186
166
 
187
167
  qualityId: int
@@ -2,6 +2,7 @@
2
2
  import json
3
3
  import logging
4
4
  from functools import cached_property
5
+ from typing import Literal
5
6
 
6
7
  from dkist_processing_common._util.graphql import GraphQLClient
7
8
  from dkist_processing_common.codecs.quality import QualityDataEncoder
@@ -26,8 +27,6 @@ from dkist_processing_common.models.graphql import RecipeRunStatusResponse
26
27
 
27
28
  logger = logging.getLogger(__name__)
28
29
 
29
- input_dataset_part_document_type_hint = list | dict | str | int | float | None
30
-
31
30
 
32
31
  class MetadataStoreMixin:
33
32
  """Mixin for a WorkflowDataTaskBase which implements Metadata Store access functionality."""
@@ -37,6 +36,8 @@ class MetadataStoreMixin:
37
36
  """Get the graphql client."""
38
37
  return GraphQLClient(common_configurations.metadata_store_api_base)
39
38
 
39
+ # RECIPE RUN STATUS
40
+
40
41
  def metadata_store_change_recipe_run_to_inprogress(self):
41
42
  """Set the recipe run status to "INPROGRESS"."""
42
43
  self._metadata_store_change_status(status="INPROGRESS", is_complete=False)
@@ -49,6 +50,76 @@ class MetadataStoreMixin:
49
50
  """Set the recipe run status to "TRIALSUCCESS"."""
50
51
  self._metadata_store_change_status(status="TRIALSUCCESS", is_complete=False)
51
52
 
53
+ def _metadata_store_recipe_run_status_id(self, status: str) -> None | int:
54
+ """Find the id of a recipe run status."""
55
+ params = RecipeRunStatusQuery(recipeRunStatusName=status)
56
+ response = self.metadata_store_client.execute_gql_query(
57
+ query_base="recipeRunStatuses",
58
+ query_response_cls=RecipeRunStatusResponse,
59
+ query_parameters=params,
60
+ )
61
+ if len(response) > 0:
62
+ return response[0].recipeRunStatusId
63
+
64
+ def _metadata_store_create_recipe_run_status(self, status: str, is_complete: bool) -> int:
65
+ """
66
+ Add a new recipe run status to the db.
67
+
68
+ :param status: name of the status to add
69
+ :param is_complete: does the new status correspond to an accepted completion state
70
+ """
71
+ recipe_run_statuses = {
72
+ "INPROGRESS": "Recipe run is currently undergoing processing",
73
+ "COMPLETEDSUCCESSFULLY": "Recipe run processing completed with no errors",
74
+ "TRIALSUCCESS": "Recipe run trial processing completed with no errors. Recipe run not "
75
+ "marked complete.",
76
+ }
77
+
78
+ if not isinstance(status, str):
79
+ raise TypeError(f"status must be of type str: {status}")
80
+ if not isinstance(is_complete, bool):
81
+ raise TypeError(f"is_complete must be of type bool: {is_complete}")
82
+ params = RecipeRunStatusMutation(
83
+ recipeRunStatusName=status,
84
+ isComplete=is_complete,
85
+ recipeRunStatusDescription=recipe_run_statuses[status],
86
+ )
87
+ recipe_run_status_response = self.metadata_store_client.execute_gql_mutation(
88
+ mutation_base="createRecipeRunStatus",
89
+ mutation_response_cls=RecipeRunStatusResponse,
90
+ mutation_parameters=params,
91
+ )
92
+ return recipe_run_status_response.recipeRunStatus.recipeRunStatusId
93
+
94
+ def _metadata_store_change_status(self, status: str, is_complete: bool):
95
+ """Change the recipe run status of a recipe run to the given status."""
96
+ recipe_run_status_id = self._metadata_store_recipe_run_status_id(status=status)
97
+ if not recipe_run_status_id:
98
+ recipe_run_status_id = self._metadata_store_create_recipe_run_status(
99
+ status=status, is_complete=is_complete
100
+ )
101
+ self._metadata_store_update_status(recipe_run_status_id=recipe_run_status_id)
102
+
103
+ def _metadata_store_update_status(
104
+ self,
105
+ recipe_run_status_id: int,
106
+ ):
107
+ """
108
+ Change the status of a given recipe run id.
109
+
110
+ :param recipe_run_status_id: the new status to use
111
+ """
112
+ params = RecipeRunMutation(
113
+ recipeRunId=self.recipe_run_id, recipeRunStatusId=recipe_run_status_id
114
+ )
115
+ self.metadata_store_client.execute_gql_mutation(
116
+ mutation_base="updateRecipeRun",
117
+ mutation_parameters=params,
118
+ mutation_response_cls=RecipeRunMutationResponse,
119
+ )
120
+
121
+ # RECEIPT
122
+
52
123
  def metadata_store_add_dataset_receipt_account(
53
124
  self, dataset_id: str, expected_object_count: int
54
125
  ):
@@ -62,10 +133,12 @@ class MetadataStoreMixin:
62
133
  mutation_response_cls=DatasetCatalogReceiptAccountResponse,
63
134
  )
64
135
 
136
+ # PROVENANCE
137
+
65
138
  def metadata_store_record_provenance(self, is_task_manual: bool, library_versions: str):
66
139
  """Record the provenance record in the metadata store."""
67
140
  params = RecipeRunProvenanceMutation(
68
- inputDatasetId=self.metadata_store_input_dataset_id,
141
+ inputDatasetId=self.metadata_store_recipe_run.recipeInstance.inputDatasetId,
69
142
  isTaskManual=is_task_manual,
70
143
  recipeRunId=self.recipe_run_id,
71
144
  taskName=self.task_name,
@@ -78,6 +151,8 @@ class MetadataStoreMixin:
78
151
  mutation_response_cls=RecipeRunProvenanceResponse,
79
152
  )
80
153
 
154
+ # QUALITY
155
+
81
156
  def metadata_store_add_quality_data(self, dataset_id: str, quality_data: list[dict]):
82
157
  """Add the quality data to the metadata-store."""
83
158
  if self.metadata_store_quality_data_exists(dataset_id):
@@ -119,222 +194,60 @@ class MetadataStoreMixin:
119
194
  )
120
195
  return bool(response)
121
196
 
122
- def metadata_store_recipe_run_configuration(self) -> dict:
123
- """Get the recipe run configuration from the metadata store."""
124
- configuration_json = self._metadata_store_recipe_run().configuration
125
- if configuration_json is None:
126
- return {}
127
- try:
128
- configuration = json.loads(configuration_json)
129
- if not isinstance(configuration, dict):
130
- raise ValueError(
131
- f"Invalid recipe run configuration format. "
132
- f"Expected json encoded dictionary, received json encoded {type(configuration)}"
133
- )
134
- return configuration
135
- except (json.JSONDecodeError, ValueError, TypeError, UnicodeDecodeError) as e:
136
- logger.error(f"Invalid recipe run configuration")
137
- raise e
197
+ # INPUT DATASET RECIPE RUN
138
198
 
139
199
  @cached_property
140
- def metadata_store_input_dataset_parts(self) -> list[InputDatasetPartResponse]:
141
- """Get the input dataset parts from the metadata store."""
200
+ def metadata_store_input_dataset_recipe_run_response(self) -> InputDatasetRecipeRunResponse:
201
+ """Get the input dataset recipe run response from the metadata store."""
142
202
  params = RecipeRunQuery(recipeRunId=self.recipe_run_id)
143
203
  response = self.metadata_store_client.execute_gql_query(
144
204
  query_base="recipeRuns",
145
205
  query_response_cls=InputDatasetRecipeRunResponse,
146
206
  query_parameters=params,
147
- ) # queried independently of other recipe run metadata for performance
148
- recipe_run = response[0]
149
- return [
150
- part_link.inputDatasetPart
151
- for part_link in recipe_run.recipeInstance.inputDataset.inputDatasetInputDatasetParts
152
- ]
153
-
154
- def _metadata_store_filter_input_dataset_parts(
155
- self, input_dataset_part_type_name: str
156
- ) -> InputDatasetPartResponse | None:
157
- """Filter the input dataset parts based on the input dataset part type name."""
158
- target_parts = [
159
- part
160
- for part in self.metadata_store_input_dataset_parts
161
- if part.inputDatasetPartType.inputDatasetPartTypeName == input_dataset_part_type_name
162
- ]
163
- if not target_parts:
164
- return
165
- if len(target_parts) == 1:
166
- return target_parts[0]
167
- raise ValueError(
168
- f"Multiple ({len(target_parts)}) input dataset parts found for "
169
- f"{input_dataset_part_type_name=}."
170
- )
171
-
172
- @property
173
- def _metadata_store_input_dataset_observe_frames_part(
174
- self,
175
- ) -> InputDatasetPartResponse | None:
176
- """Get the input dataset part for observe frames."""
177
- return self._metadata_store_filter_input_dataset_parts(
178
- input_dataset_part_type_name="observe_frames",
179
- )
180
-
181
- @property
182
- def metadata_store_input_dataset_observe_frames_part_id(self) -> int | None:
183
- """Get the input dataset part id for observe frames."""
184
- if part := self._metadata_store_input_dataset_observe_frames_part:
185
- return part.inputDatasetPartId
186
-
187
- @property
188
- def metadata_store_input_dataset_observe_frames_part_document(
189
- self,
190
- ) -> input_dataset_part_document_type_hint:
191
- """Get the input dataset part document for observe frames."""
192
- if part := self._metadata_store_input_dataset_observe_frames_part:
193
- return part.inputDatasetPartDocument
194
-
195
- @property
196
- def _metadata_store_input_dataset_calibration_frames_part(
197
- self,
198
- ) -> InputDatasetPartResponse | None:
199
- """Get the input dataset part for calibration frames."""
200
- return self._metadata_store_filter_input_dataset_parts(
201
- input_dataset_part_type_name="calibration_frames"
207
+ response_encoder=InputDatasetRecipeRunResponse.model_validate,
202
208
  )
203
-
204
- @property
205
- def metadata_store_input_dataset_calibration_frames_part_id(self) -> int | None:
206
- """Get the input dataset part id for calibration frames."""
207
- if part := self._metadata_store_input_dataset_calibration_frames_part:
208
- return part.inputDatasetPartId
209
-
210
- @property
211
- def metadata_store_input_dataset_calibration_frames_part_document(
212
- self,
213
- ) -> input_dataset_part_document_type_hint:
214
- """Get the input dataset part document for calibration frames."""
215
- if part := self._metadata_store_input_dataset_calibration_frames_part:
216
- return part.inputDatasetPartDocument
217
-
218
- @property
219
- def _metadata_store_input_dataset_parameters_part(
220
- self,
221
- ) -> InputDatasetPartResponse | None:
222
- """Get the input dataset part for parameters."""
223
- return self._metadata_store_filter_input_dataset_parts(
224
- input_dataset_part_type_name="parameters"
209
+ return response[0]
210
+
211
+ def _metadata_store_input_dataset_part(
212
+ self, part_type: Literal["observe_frames", "calibration_frames", "parameters"]
213
+ ) -> InputDatasetPartResponse:
214
+ """Get the input dataset part by input dataset part type name."""
215
+ part_type_dict = {}
216
+ parts = (
217
+ self.metadata_store_input_dataset_recipe_run_response.recipeInstance.inputDataset.inputDatasetInputDatasetParts
225
218
  )
219
+ for part in parts:
220
+ part_type_name = part.inputDatasetPart.inputDatasetPartType.inputDatasetPartTypeName
221
+ if part_type_name in part_type_dict.keys():
222
+ raise ValueError(f"Multiple input dataset parts found for {part_type_name=}.")
223
+ part_type_dict[part_type_name] = part.inputDatasetPart
224
+ return part_type_dict.get(part_type)
226
225
 
227
226
  @property
228
- def metadata_store_input_dataset_parameters_part_id(self) -> int | None:
229
- """Get the input dataset part id for parameters."""
230
- if part := self._metadata_store_input_dataset_parameters_part:
231
- return part.inputDatasetPartId
227
+ def metadata_store_input_dataset_observe_frames(self) -> InputDatasetPartResponse:
228
+ """Get the input dataset part for the observe frames."""
229
+ return self._metadata_store_input_dataset_part(part_type="observe_frames")
232
230
 
233
231
  @property
234
- def metadata_store_input_dataset_parameters_part_document(
235
- self,
236
- ) -> input_dataset_part_document_type_hint:
237
- """Get the input dataset part document for parameters."""
238
- if part := self._metadata_store_input_dataset_parameters_part:
239
- return part.inputDatasetPartDocument
232
+ def metadata_store_input_dataset_calibration_frames(self) -> InputDatasetPartResponse:
233
+ """Get the input dataset part for the calibration frames."""
234
+ return self._metadata_store_input_dataset_part(part_type="calibration_frames")
240
235
 
241
236
  @property
242
- def metadata_store_input_dataset_id(self) -> int:
243
- """Get the input dataset id from the metadata store."""
244
- return self._metadata_store_recipe_run().recipeInstance.inputDatasetId
237
+ def metadata_store_input_dataset_parameters(self) -> InputDatasetPartResponse:
238
+ """Get the input dataset part for the parameters."""
239
+ return self._metadata_store_input_dataset_part(part_type="parameters")
245
240
 
246
- @property
247
- def metadata_store_recipe_instance_id(self) -> int:
248
- """Get the recipe instance id from the metadata store."""
249
- return self._metadata_store_recipe_run().recipeInstanceId
250
-
251
- @property
252
- def metadata_store_recipe_id(self) -> int:
253
- """Get the recipe id from the metadata store."""
254
- return self._metadata_store_recipe_run().recipeInstance.recipeId
241
+ # RECIPE RUN
255
242
 
256
- @property
257
- def metadata_store_recipe_run_provenance(self) -> list[RecipeRunProvenanceResponse]:
258
- """Get all the provenance records for the recipe run."""
259
- return self._metadata_store_recipe_run().recipeRunProvenances
260
-
261
- def _metadata_store_recipe_run(self, allow_cache: bool = True) -> RecipeRunResponse:
262
- is_cached = bool(getattr(self, "_recipe_run_cache", False))
263
- if is_cached and allow_cache:
264
- return self._recipe_run_cache
243
+ @cached_property
244
+ def metadata_store_recipe_run(self) -> RecipeRunResponse:
245
+ """Get the recipe run response from the metadata store."""
265
246
  params = RecipeRunQuery(recipeRunId=self.recipe_run_id)
266
247
  response = self.metadata_store_client.execute_gql_query(
267
248
  query_base="recipeRuns",
268
249
  query_response_cls=RecipeRunResponse,
269
250
  query_parameters=params,
251
+ response_encoder=RecipeRunResponse.model_validate,
270
252
  )
271
- self._recipe_run_cache = response[0]
272
- return self._recipe_run_cache
273
-
274
- def _metadata_store_change_status(self, status: str, is_complete: bool):
275
- """Change the recipe run status of a recipe run to the given status."""
276
- recipe_run_status_id = self._metadata_store_recipe_run_status_id(status=status)
277
- if not recipe_run_status_id:
278
- recipe_run_status_id = self._metadata_store_create_recipe_run_status(
279
- status=status, is_complete=is_complete
280
- )
281
- self._metadata_store_update_status(recipe_run_status_id=recipe_run_status_id)
282
-
283
- def _metadata_store_recipe_run_status_id(self, status: str) -> None | int:
284
- """Find the id of a recipe run status."""
285
- params = RecipeRunStatusQuery(recipeRunStatusName=status)
286
- response = self.metadata_store_client.execute_gql_query(
287
- query_base="recipeRunStatuses",
288
- query_response_cls=RecipeRunStatusResponse,
289
- query_parameters=params,
290
- )
291
- if len(response) > 0:
292
- return response[0].recipeRunStatusId
293
-
294
- def _metadata_store_create_recipe_run_status(self, status: str, is_complete: bool) -> int:
295
- """
296
- Add a new recipe run status to the db.
297
-
298
- :param status: name of the status to add
299
- :param is_complete: does the new status correspond to an accepted completion state
300
- """
301
- recipe_run_statuses = {
302
- "INPROGRESS": "Recipe run is currently undergoing processing",
303
- "COMPLETEDSUCCESSFULLY": "Recipe run processing completed with no errors",
304
- "TRIALSUCCESS": "Recipe run trial processing completed with no errors. Recipe run not "
305
- "marked complete.",
306
- }
307
-
308
- if not isinstance(status, str):
309
- raise TypeError(f"status must be of type str: {status}")
310
- if not isinstance(is_complete, bool):
311
- raise TypeError(f"is_complete must be of type bool: {is_complete}")
312
- params = RecipeRunStatusMutation(
313
- recipeRunStatusName=status,
314
- isComplete=is_complete,
315
- recipeRunStatusDescription=recipe_run_statuses[status],
316
- )
317
- recipe_run_status_response = self.metadata_store_client.execute_gql_mutation(
318
- mutation_base="createRecipeRunStatus",
319
- mutation_response_cls=RecipeRunStatusResponse,
320
- mutation_parameters=params,
321
- )
322
- return recipe_run_status_response.recipeRunStatus.recipeRunStatusId
323
-
324
- def _metadata_store_update_status(
325
- self,
326
- recipe_run_status_id: int,
327
- ):
328
- """
329
- Change the status of a given recipe run id.
330
-
331
- :param recipe_run_status_id: the new status to use
332
- """
333
- params = RecipeRunMutation(
334
- recipeRunId=self.recipe_run_id, recipeRunStatusId=recipe_run_status_id
335
- )
336
- self.metadata_store_client.execute_gql_mutation(
337
- mutation_base="updateRecipeRun",
338
- mutation_parameters=params,
339
- mutation_response_cls=RecipeRunMutationResponse,
340
- )
253
+ return response[0]
@@ -19,7 +19,7 @@ class OutputDataBase(WorkflowTaskBase, ABC):
19
19
  @cached_property
20
20
  def destination_bucket(self) -> str:
21
21
  """Get the destination bucket."""
22
- return self.metadata_store_recipe_run_configuration().get("destination_bucket", "data")
22
+ return self.metadata_store_recipe_run.configuration.get("destination_bucket", "data")
23
23
 
24
24
  def format_object_key(self, path: Path) -> str:
25
25
  """
@@ -22,7 +22,7 @@ class TeardownBase(WorkflowTaskBase, ABC):
22
22
  @property
23
23
  def teardown_enabled(self) -> bool:
24
24
  """Recipe run configuration indicating if data should be removed at the end of a run."""
25
- return self.metadata_store_recipe_run_configuration().get("teardown_enabled", True)
25
+ return self.metadata_store_recipe_run.configuration.get("teardown_enabled", True)
26
26
 
27
27
  def run(self) -> None:
28
28
  """Run method for Teardown class."""
@@ -19,11 +19,11 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin, InputDatasetMixin):
19
19
 
20
20
  def download_input_dataset(self):
21
21
  """Get the input dataset document parts and save it to scratch with the appropriate tags."""
22
- if doc := self.metadata_store_input_dataset_observe_frames_part_document:
22
+ if doc := self.metadata_store_input_dataset_observe_frames.inputDatasetPartDocument:
23
23
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_observe_frames())
24
- if doc := self.metadata_store_input_dataset_calibration_frames_part_document:
24
+ if doc := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartDocument:
25
25
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_calibration_frames())
26
- if doc := self.metadata_store_input_dataset_parameters_part_document:
26
+ if doc := self.metadata_store_input_dataset_parameters.inputDatasetPartDocument:
27
27
  self.write(doc.encode("utf-8"), tags=Tag.input_dataset_parameters())
28
28
 
29
29
  def format_transfer_items(
@@ -44,12 +44,12 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
44
44
  @cached_property
45
45
  def destination_bucket(self) -> str:
46
46
  """Get the destination bucket with a trial default."""
47
- return self.metadata_store_recipe_run_configuration().get("destination_bucket", "etc")
47
+ return self.metadata_store_recipe_run.configuration.get("destination_bucket", "etc")
48
48
 
49
49
  @property
50
50
  def destination_root_folder(self) -> Path:
51
51
  """Format the destination root folder with a value that can be set in the recipe run configuration."""
52
- root_name_from_configuration = self.metadata_store_recipe_run_configuration().get(
52
+ root_name_from_configuration = self.metadata_store_recipe_run.configuration.get(
53
53
  "trial_root_directory_name"
54
54
  )
55
55
  root_name = Path(root_name_from_configuration or super().destination_root_folder)
@@ -59,9 +59,9 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
59
59
  @property
60
60
  def destination_folder(self) -> Path:
61
61
  """Format the destination folder with a parent that can be set by the recipe run configuration."""
62
- dir_name = self.metadata_store_recipe_run_configuration().get(
63
- "trial_directory_name"
64
- ) or Path(self.constants.dataset_id)
62
+ dir_name = self.metadata_store_recipe_run.configuration.get("trial_directory_name") or Path(
63
+ self.constants.dataset_id
64
+ )
65
65
  return self.destination_root_folder / dir_name
66
66
 
67
67
  @property
@@ -71,7 +71,7 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
71
71
  Defaults to transferring all product files. Setting `trial_exclusive_transfer_tag_lists` in the
72
72
  recipe run configuration to a list of tag lists will override the default.
73
73
  """
74
- return self.metadata_store_recipe_run_configuration().get(
74
+ return self.metadata_store_recipe_run.configuration.get(
75
75
  "trial_exclusive_transfer_tag_lists", self.default_transfer_tag_lists
76
76
  )
77
77
 
@@ -107,17 +107,17 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
107
107
  @cached_property
108
108
  def tile_size_param(self) -> int:
109
109
  """Get the tile size parameter for compression."""
110
- return self.metadata_store_recipe_run_configuration().get("tile_size", None)
110
+ return self.metadata_store_recipe_run.configuration.get("tile_size", None)
111
111
 
112
112
  @cached_property
113
113
  def validate_l1_on_write(self) -> bool:
114
114
  """Check for validate on write."""
115
- return self.metadata_store_recipe_run_configuration().get("validate_l1_on_write", True)
115
+ return self.metadata_store_recipe_run.configuration.get("validate_l1_on_write", True)
116
116
 
117
117
  @cached_property
118
118
  def workflow_had_manual_intervention(self):
119
119
  """Indicate determining if any provenance capturing steps had manual intervention."""
120
- for provenance_record in self.metadata_store_recipe_run_provenance:
120
+ for provenance_record in self.metadata_store_recipe_run.recipeRunProvenances:
121
121
  if provenance_record.isTaskManual:
122
122
  return True
123
123
  return False
@@ -201,8 +201,8 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
201
201
  header["FRAMEVOL"] = -1.0
202
202
  header["PROCTYPE"] = "L1"
203
203
  header["RRUNID"] = self.recipe_run_id
204
- header["RECIPEID"] = self.metadata_store_recipe_id
205
- header["RINSTID"] = self.metadata_store_recipe_instance_id
204
+ header["RECIPEID"] = self.metadata_store_recipe_run.recipeInstance.recipeId
205
+ header["RINSTID"] = self.metadata_store_recipe_run.recipeInstanceId
206
206
  header["EXTNAME"] = "observation"
207
207
  header["SOLARNET"] = 1
208
208
  header["OBS_HDU"] = 1
@@ -214,10 +214,10 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
214
214
  header["CADMAX"] = self.constants.maximum_cadence
215
215
  header["CADVAR"] = self.constants.variance_cadence
216
216
  # Keywords to support reprocessing
217
- if ids_par_id := self.metadata_store_input_dataset_parameters_part_id:
217
+ if ids_par_id := self.metadata_store_input_dataset_parameters.inputDatasetPartId:
218
218
  header["IDSPARID"] = ids_par_id
219
- header["IDSOBSID"] = self.metadata_store_input_dataset_observe_frames_part_id
220
- if ids_cal_id := self.metadata_store_input_dataset_calibration_frames_part_id:
219
+ header["IDSOBSID"] = self.metadata_store_input_dataset_observe_frames.inputDatasetPartId
220
+ if ids_cal_id := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartId:
221
221
  header["IDSCALID"] = ids_cal_id
222
222
  header["WKFLNAME"] = self.workflow_name
223
223
  header["WKFLVERS"] = self.workflow_version
@@ -368,7 +368,7 @@ class FakeGQLClient:
368
368
  "key1",
369
369
  "key2"
370
370
  ]
371
- },
371
+ }
372
372
  ]""",
373
373
  inputDatasetPartType=InputDatasetPartTypeResponse(
374
374
  inputDatasetPartTypeName="observe_frames"
@@ -385,7 +385,7 @@ class FakeGQLClient:
385
385
  "key3",
386
386
  "key4"
387
387
  ]
388
- },
388
+ }
389
389
  ]""",
390
390
  inputDatasetPartType=InputDatasetPartTypeResponse(
391
391
  inputDatasetPartTypeName="calibration_frames"
@@ -420,7 +420,8 @@ class FakeGQLClient:
420
420
  class FakeGQLClientNoRecipeConfiguration(FakeGQLClient):
421
421
  def execute_gql_query(self, **kwargs):
422
422
  response = super().execute_gql_query(**kwargs)
423
- response[0].configuration = None
423
+ if type(response[0]) == RecipeRunResponse:
424
+ response[0].configuration = {}
424
425
  return response
425
426
 
426
427
 
@@ -24,7 +24,7 @@ def make_mock_GQL_with_configuration():
24
24
  response = super().execute_gql_query(**kwargs)
25
25
  if isinstance(response, list):
26
26
  if isinstance(response[0], RecipeRunResponse):
27
- response[0].configuration = json.dumps(configuration)
27
+ response[0].configuration = configuration
28
28
  return response
29
29
 
30
30
  return TeardownFakeGQLClient
@@ -29,13 +29,11 @@ def recipe_run_configuration(
29
29
  response = super().execute_gql_query(**kwargs)
30
30
  if isinstance(response, list):
31
31
  if isinstance(response[0], RecipeRunResponse):
32
- response[0].configuration = json.dumps(
33
- {
34
- "trial_root_directory_name": custom_root_name,
35
- "trial_directory_name": custom_dir_name,
36
- "destination_bucket": destination_bucket,
37
- }
38
- )
32
+ response[0].configuration = {
33
+ "trial_root_directory_name": custom_root_name,
34
+ "trial_directory_name": custom_dir_name,
35
+ "destination_bucket": destination_bucket,
36
+ }
39
37
  return response
40
38
 
41
39
  return GQLClientWithConfiguration
@@ -50,14 +48,12 @@ def recipe_run_configuration_with_tag_lists(
50
48
  response = super().execute_gql_query(**kwargs)
51
49
  if isinstance(response, list):
52
50
  if isinstance(response[0], RecipeRunResponse):
53
- response[0].configuration = json.dumps(
54
- {
55
- "trial_root_directory_name": custom_root_name,
56
- "trial_directory_name": custom_dir_name,
57
- "destination_bucket": destination_bucket,
58
- "trial_exclusive_transfer_tag_lists": exclusive_tag_lists,
59
- }
60
- )
51
+ response[0].configuration = {
52
+ "trial_root_directory_name": custom_root_name,
53
+ "trial_directory_name": custom_dir_name,
54
+ "destination_bucket": destination_bucket,
55
+ "trial_exclusive_transfer_tag_lists": exclusive_tag_lists,
56
+ }
61
57
  return response
62
58
 
63
59
  return GQLClientWithConfiguration
@@ -204,7 +204,7 @@ class ProvenanceTask(WorkflowTaskBase):
204
204
  libraryVersions=library_versions,
205
205
  workflowVersion=self.workflow_version,
206
206
  )
207
- self.write(data=bytes(json.dumps(asdict(params)), "utf-8"), tags=["TEST_PROVENANCE"])
207
+ self.write(data=bytes(params.model_dump_json(), "utf-8"), tags=["TEST_PROVENANCE"])
208
208
 
209
209
 
210
210
  @pytest.fixture(scope="function")
@@ -457,9 +457,15 @@ def test_reprocessing_keys(write_l1_task, mocker):
457
457
  files = list(task.read(tags=[Tag.frame(), Tag.output()]))
458
458
  for file in files:
459
459
  header = fits.open(file)[1].header
460
- assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters_part_id
461
- assert header["IDSOBSID"] == task.metadata_store_input_dataset_observe_frames_part_id
462
- assert header["IDSCALID"] == task.metadata_store_input_dataset_calibration_frames_part_id
460
+ assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters.inputDatasetPartId
461
+ assert (
462
+ header["IDSOBSID"]
463
+ == task.metadata_store_input_dataset_observe_frames.inputDatasetPartId
464
+ )
465
+ assert (
466
+ header["IDSCALID"]
467
+ == task.metadata_store_input_dataset_calibration_frames.inputDatasetPartId
468
+ )
463
469
  assert header["WKFLNAME"] == task.workflow_name
464
470
  assert header["WKFLVERS"] == task.workflow_version
465
471
  assert header["PROCTYPE"] == "L1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: dkist-processing-common
3
- Version: 10.6.0rc2
3
+ Version: 10.6.1rc1
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -1,5 +1,6 @@
1
1
  changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- changelog/235.feature.rst,sha256=gIY9mL5ji-K5fsOlMrai3nTLxTPmTlH20yrjjhzrT9Q,158
2
+ changelog/236.misc.1.rst,sha256=0agTibMnm1CvRDjoQPuLN_xJmGDzAUTJCbhwrCVe55o,130
3
+ changelog/236.misc.rst,sha256=IVs7M44t2WkDkHz292SjlIeRKLBX_yB7IkXMr5EMP3U,277
3
4
  dkist_processing_common/__init__.py,sha256=490Fwm_GgqpwriQlsYfKcLUZNhZ6GkINtJqcYSIEKoU,319
4
5
  dkist_processing_common/config.py,sha256=IcpaD_NvHZU-aLlUNOTdRC4V7ADIvVQwrZ2dHhIr4NY,4247
5
6
  dkist_processing_common/manual.py,sha256=4sGj3cChdnEpwzq2GSNSbTpUf2oS3YUWiXna6ALfMF4,7037
@@ -23,7 +24,7 @@ dkist_processing_common/models/__init__.py,sha256=6LMqemdzVZ87fRrpAsbEnTtWZ02_Gu
23
24
  dkist_processing_common/models/constants.py,sha256=1Eb8RDeuCr6brl237iGKxYLWCH49I6bOUEj_Tv-zFbQ,5441
24
25
  dkist_processing_common/models/fits_access.py,sha256=Au9JROwhVla9zb_u0dN8mIWiSJd_Pca0oOr4N1hN0HY,4113
25
26
  dkist_processing_common/models/flower_pot.py,sha256=59C5uGYKyMyncqQYxhzDZWl8k1DRZFB6s9RF-HFp9mY,5128
26
- dkist_processing_common/models/graphql.py,sha256=b8kI-Pq12gixOtotCWHVTfEBT7XKBlvV_IaCM9qkKvg,3992
27
+ dkist_processing_common/models/graphql.py,sha256=GBCRk6o9TGVCNy7hc9Yrmn5YZW76JBs9AY1Q5muvG1Y,4040
27
28
  dkist_processing_common/models/message.py,sha256=DRW7Qhl01dF5KagcqLta5U-uzdOMewrsHvMatDT6jnk,1684
28
29
  dkist_processing_common/models/message_queue_binding.py,sha256=ROQ2ZQE3TCr4gVbz4WggvUSExAiWP8SD_GjjQl482M8,1012
29
30
  dkist_processing_common/models/metric_code.py,sha256=McXAEF1Sa0_YlR1niXYLJWLFHhdLQhmYw9Xtpr5FGws,815
@@ -52,25 +53,25 @@ dkist_processing_common/tasks/__init__.py,sha256=uH8DTiQP-cx4vMK53S4LYGZGmbip5s0
52
53
  dkist_processing_common/tasks/assemble_movie.py,sha256=9K4sgXyRKaX7UsFBIs138pG3AtClwLLopYw3ZQY3ok4,12771
53
54
  dkist_processing_common/tasks/base.py,sha256=k_IJR5sVV6ennX0sbeb0C6dciqshdY7CKjtWHy_adm8,13143
54
55
  dkist_processing_common/tasks/l1_output_data.py,sha256=IM-nvGaTM5r-z-9vHr2wovPVUpuNCah-cWIFMO2fcII,10576
55
- dkist_processing_common/tasks/output_data_base.py,sha256=sbvTTBgqGvmsGmu3Svl4RSR1WSWejKdsF5_mhO8aRBo,3699
56
+ dkist_processing_common/tasks/output_data_base.py,sha256=hxhO06rAsJkPsuG-XyWa4PvCrFKrseIek2AOQPJSn5M,3697
56
57
  dkist_processing_common/tasks/parse_l0_input_data.py,sha256=iRMGdvhxBobNsTDQ0IEl0myDfB4P_xpxA00guuBWDj8,7986
57
58
  dkist_processing_common/tasks/quality_metrics.py,sha256=g6MUq8s8jELDinkn6o45rfONyODw92JyVMrzb7Dd7OI,12458
58
- dkist_processing_common/tasks/teardown.py,sha256=TNghJIBhLucxJsabuygDbU4oL0RqdTwq8DqOcP5tub8,2370
59
- dkist_processing_common/tasks/transfer_input_data.py,sha256=oR1Y-EEodH8uZT26j-7PfQ9uIMp96GOUemmLSCCb67w,5212
59
+ dkist_processing_common/tasks/teardown.py,sha256=zKMA5oZDs_p6Dr7IuMtmRgmHbeBPYkx1dv_-RPuChqY,2368
60
+ dkist_processing_common/tasks/transfer_input_data.py,sha256=Sc3wBh48F1LhghRZyR3E39NAnMGExI7liibCfgu3FcE,5245
60
61
  dkist_processing_common/tasks/trial_catalog.py,sha256=Y3DKstRfMS8nWWtJFMB0MUVPlZ1jWS_2jhJGMWwxy50,8748
61
- dkist_processing_common/tasks/trial_output_data.py,sha256=vE4nRdY3RP19MuKKPl9JKYYi81DtMbs3VghAZoVJ_2s,6890
62
- dkist_processing_common/tasks/write_l1.py,sha256=KJmfgOiH2R8-lXMIDqNqGkHFMnzAeb6JgF0m3zhj9wI,22375
62
+ dkist_processing_common/tasks/trial_output_data.py,sha256=HHORFHSEN3nGfGB5fIh5qeoAqyousMYTa_g4_R2uzEA,6882
63
+ dkist_processing_common/tasks/write_l1.py,sha256=Nh6d7pSRtvwEvlL5fyidytX4lYiwUEveUZPDxOpclzE,22448
63
64
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
64
65
  dkist_processing_common/tasks/mixin/globus.py,sha256=QAV8VElxMAqxJ2KSB_bJaraceovYfjHXjOdocrTCkIA,6592
65
66
  dkist_processing_common/tasks/mixin/input_dataset.py,sha256=dkW5vf_QPgWedHO_Lf9GjBxr1QrUCKs6gIXufUTi7GE,6813
66
67
  dkist_processing_common/tasks/mixin/interservice_bus.py,sha256=I7BUh0o8AEX-FZv7gxCts6is0uq9lycWjtTB2KqwBrU,1080
67
- dkist_processing_common/tasks/mixin/metadata_store.py,sha256=VmcKYnA0RBhhKZadpmUGtVciSMpIkKoMU81IR_SnB04,15619
68
+ dkist_processing_common/tasks/mixin/metadata_store.py,sha256=x77Kpdy7C59ZJnm7TAWsv8_JjWCb_SRylhdgx94zJsU,11846
68
69
  dkist_processing_common/tasks/mixin/object_store.py,sha256=Vn4l2XuCimii9Fc3gM-pQGIkTKMv_ldqljlxkLesZLU,3236
69
70
  dkist_processing_common/tasks/mixin/quality/__init__.py,sha256=Bgu-DHW7yXLiehglldOCWluEkAP5qh0Hp1F30rh5NFw,383
70
71
  dkist_processing_common/tasks/mixin/quality/_base.py,sha256=nZ9IC-O-hsLXa5-tk29B13CZyQIdhJCv0eO9cdkAhWc,8303
71
72
  dkist_processing_common/tasks/mixin/quality/_metrics.py,sha256=WenTfa12guIUfm0GzkrK2gduKaOHs03e6RhE6j37Les,54304
72
73
  dkist_processing_common/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
- dkist_processing_common/tests/conftest.py,sha256=0MN62OuP26cT48Xsrj2wOZ2MT66SaFHhj8RwShjjrkc,29272
74
+ dkist_processing_common/tests/conftest.py,sha256=ftvyKNSheU1wKXPsuX290tnMHKMhla2U4e4cgLnRa5k,29323
74
75
  dkist_processing_common/tests/test_assemble_movie.py,sha256=XY_ruXSYP5k6s2gUAwlFdnhJ81eyWLSd2O9IkX4RXeo,4165
75
76
  dkist_processing_common/tests/test_assemble_quality.py,sha256=fWSHK4UdVqgNjvxQuD40NBUnXrtmthUP7PUbISPV4MQ,16897
76
77
  dkist_processing_common/tests/test_base.py,sha256=4ST3__jEHitEQaQs9-0OcqtyEJfIjZsk_6PRYZFV2-U,7124
@@ -94,13 +95,13 @@ dkist_processing_common/tests/test_submit_dataset_metadata.py,sha256=F1IKBFWhjjM
94
95
  dkist_processing_common/tests/test_tags.py,sha256=UwlOJ45rkvbfbd5L5m5YltvOxQc8kGqJEn5V81H33U8,5023
95
96
  dkist_processing_common/tests/test_task_name.py,sha256=kqFr59XX2K87xzfTlClzDV4-Je1dx72LvdaJ22UE8UU,1233
96
97
  dkist_processing_common/tests/test_task_parsing.py,sha256=QXt1X6DTO3_liBD2c-t84DToLeEn7B3J-eteIyN4HEM,4027
97
- dkist_processing_common/tests/test_teardown.py,sha256=miAMXG5c3wZxFfwBke8F7PR_Ke8KZmbegRKK9oB0tvU,5432
98
+ dkist_processing_common/tests/test_teardown.py,sha256=bw_MTJ5G9fcpKP7z_hW1sE7geZ7pxUEn-9p0trizqYY,5420
98
99
  dkist_processing_common/tests/test_transfer_input_data.py,sha256=itmOFrK7x4G26cj0Xkf015Xn09G9YDdJunXybGgD8Ek,6658
99
100
  dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=27PifkyH3RZg0nsM-AjmrFJ-hbYuCk5Tt_0Zx8PJBfM,2109
100
101
  dkist_processing_common/tests/test_trial_catalog.py,sha256=YuZsGlm5WE_XClaqzBB9U-EzQF4fdju8ocrYY-MeXEk,6844
101
- dkist_processing_common/tests/test_trial_output_data.py,sha256=rXrf2oncBIytht3yKqA3yCDsqDLoYanD9_0H5GwXTzc,12900
102
- dkist_processing_common/tests/test_workflow_task_base.py,sha256=EVxlz_qOxFn2AdzpppLcW7HTcBj_QC4UPouGGfWHls4,10519
103
- dkist_processing_common/tests/test_write_l1.py,sha256=NmLhi9iyXf02ry_y1eavPjlD73EDiX1ZBUk4Zy2vqgQ,20246
102
+ dkist_processing_common/tests/test_trial_output_data.py,sha256=5tIn7lQ0L5oshsJvkZroOt-Cl02bigDl4DXNNfc5wFI,12748
103
+ dkist_processing_common/tests/test_workflow_task_base.py,sha256=Z5aPW5LQtS0UWJiYho4X0r-2gPLfzpkmMwfmaoFLjMg,10517
104
+ dkist_processing_common/tests/test_write_l1.py,sha256=Wr8UqYYxQk2N2mkITizFWz9W6VJjcuVom7vlpSuw3is,20351
104
105
  docs/Makefile,sha256=qnlVz6PuBqE39NfHWuUnHhNEA-EFgT2-WJNNNy9ttfk,4598
105
106
  docs/changelog.rst,sha256=S2jPASsWlQxSlAPqdvNrYvhk9k3FcFWNXFNDYXBSjl4,120
106
107
  docs/conf.py,sha256=FkX575cqTqZGCcLAjg2MlvE8Buj1Vt3CpHNgZxG256E,1890
@@ -109,7 +110,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
109
110
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
110
111
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
111
112
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
112
- dkist_processing_common-10.6.0rc2.dist-info/METADATA,sha256=SFlO2ZOzBfCo8zqDHAfIIthsqoX9EIDrhg4Czajxd0Y,6963
113
- dkist_processing_common-10.6.0rc2.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
114
- dkist_processing_common-10.6.0rc2.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
115
- dkist_processing_common-10.6.0rc2.dist-info/RECORD,,
113
+ dkist_processing_common-10.6.1rc1.dist-info/METADATA,sha256=kc9wibJ7S38-AQeFbCGDXEQ00nkwvjOU7ITGUdWNK_E,6963
114
+ dkist_processing_common-10.6.1rc1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
115
+ dkist_processing_common-10.6.1rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
116
+ dkist_processing_common-10.6.1rc1.dist-info/RECORD,,
changelog/235.feature.rst DELETED
@@ -1,2 +0,0 @@
1
- Add the `RetarderNameBud` that can parse the name of the GOS retarder and ensure that only a single retarder was used
2
- for the given set of POLCAL input data.