dkist-processing-common 11.8.1rc2__py3-none-any.whl → 11.8.2rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ Store quality data in object store
@@ -176,36 +176,3 @@ class RecipeRunProvenanceMutation(GraphqlBaseModel):
176
176
  libraryVersions: str
177
177
  workflowVersion: str
178
178
  codeVersion: str | None = None
179
-
180
-
181
- class QualityCreation(GraphqlBaseModel):
182
- """Quality data creation record."""
183
-
184
- datasetId: str
185
- metricCode: str
186
- facet: str | None = None
187
- name: str | None = None
188
- description: str | None = None
189
- statement: str | None = None
190
- # JSON array
191
- warnings: str | None = None
192
- # JSON objects
193
- plotData: str | None = None
194
- multiPlotData: str | None = None
195
- tableData: str | None = None
196
- histogramData: str | None = None
197
- modmatData: str | None = None
198
- raincloudData: str | None = None
199
- efficiencyData: str | None = None
200
-
201
-
202
- class QualitiesRequest(GraphqlBaseModel):
203
- """Query parameters for quality data."""
204
-
205
- datasetId: str
206
-
207
-
208
- class QualityResponse(GraphqlBaseModel):
209
- """Query Response for quality data."""
210
-
211
- qualityId: int
@@ -38,8 +38,9 @@ class L1OutputDataBase(OutputDataBase, ABC):
38
38
 
39
39
  @property
40
40
  def dataset_has_quality_data(self) -> bool:
41
- """Return True if quality data has been persisted to the metadata-store."""
42
- return self.metadata_store_quality_data_exists(dataset_id=self.constants.dataset_id)
41
+ """Return True if the dataset has quality data."""
42
+ paths = list(self.read(tags=[Tag.output(), Tag.quality_data()]))
43
+ return len(paths) > 0
43
44
 
44
45
  def rollback(self):
45
46
  """Warn that the metadata-store and the interservice bus retain the effect of this tasks execution. Rolling back this task may not be achievable without other action."""
@@ -58,6 +59,9 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
58
59
  # Movie needs to be transferred separately as the movie headers need to go with it
59
60
  self.transfer_movie()
60
61
 
62
+ with self.telemetry_span("Upload quality data"):
63
+ self.transfer_quality_data()
64
+
61
65
  with self.telemetry_span("Upload science frames"):
62
66
  self.transfer_output_frames()
63
67
 
@@ -101,6 +105,33 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
101
105
  content_type="video/mp4",
102
106
  )
103
107
 
108
+ def transfer_quality_data(self):
109
+ """Transfer quality data to the object store."""
110
+ paths = list(self.read(tags=[Tag.output(), Tag.quality_data()]))
111
+ if len(paths) == 0:
112
+ logger.warning(
113
+ f"No quality data found to upload for dataset. recipe_run_id={self.recipe_run_id}"
114
+ )
115
+ return
116
+
117
+ if count := len(paths) > 1:
118
+ # dataset inventory does not support multiple quality data object keys
119
+ raise RuntimeError(
120
+ f"Found multiple quality data files to upload. Not supported."
121
+ f"{count=}, recipe_run_id={self.recipe_run_id}"
122
+ )
123
+
124
+ with self.telemetry_span(f"Uploading the trial quality data"):
125
+ path = paths[0]
126
+ logger.info(f"Uploading quality data: recipe_run_id={self.recipe_run_id}, {path=}")
127
+ quality_data_object_key = self.format_object_key(path)
128
+ self.object_store_upload_quality_data(
129
+ quality_data=path,
130
+ bucket=self.destination_bucket,
131
+ object_key=quality_data_object_key,
132
+ content_type="application/json",
133
+ )
134
+
104
135
 
105
136
  class AssembleQualityData(L1OutputDataBase, QualityMixin):
106
137
  """
@@ -128,7 +159,7 @@ class AssembleQualityData(L1OutputDataBase, QualityMixin):
128
159
  ):
129
160
  self.write(
130
161
  quality_data,
131
- tags=Tag.quality_data(),
162
+ tags=[Tag.output(), Tag.quality_data()],
132
163
  encoder=quality_data_encoder,
133
164
  relative_path=f"{self.constants.dataset_id}_quality_data.json",
134
165
  )
@@ -146,20 +177,10 @@ class SubmitDatasetMetadata(L1OutputDataBase):
146
177
 
147
178
  def run(self) -> None:
148
179
  """Run method for this task."""
149
- with self.telemetry_span(f"Storing quality data to metadata store"):
150
- # each quality_data file is a list - this will combine the elements of multiple lists into a single list
151
- quality_data = list(
152
- chain.from_iterable(
153
- self.read(tags=Tag.quality_data(), decoder=quality_data_decoder)
154
- )
155
- )
156
- self.metadata_store_add_quality_data(
157
- dataset_id=self.constants.dataset_id, quality_data=quality_data
158
- )
159
180
  with self.telemetry_span("Count Expected Outputs"):
160
181
  dataset_id = self.constants.dataset_id
161
182
  expected_object_count = self.count(tags=Tag.output())
162
- if quality_data:
183
+ if self.dataset_has_quality_data:
163
184
  expected_object_count += 1
164
185
  logger.info(
165
186
  f"Adding Dataset Receipt Account: "
@@ -230,24 +251,12 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
230
251
  messages = [CatalogObjectMessage(body=body) for body in message_bodies]
231
252
  return messages
232
253
 
233
- @property
234
- def quality_report_message(self) -> CreateQualityReportMessage:
235
- """Create the Quality Report Message."""
236
- file_name = Path(f"{self.constants.dataset_id}_quality_report.pdf")
237
- body = CreateQualityReportMessageBody(
238
- bucket=self.destination_bucket,
239
- objectName=self.format_object_key(file_name),
240
- conversationId=str(self.recipe_run_id),
241
- datasetId=self.constants.dataset_id,
242
- incrementDatasetCatalogReceiptCount=True,
243
- )
244
- return CreateQualityReportMessage(body=body)
245
-
246
254
  def run(self) -> None:
247
255
  """Run method for this task."""
248
256
  with self.telemetry_span("Gather output data"):
249
257
  frames = self.read(tags=self.output_frame_tags)
250
258
  movies = self.read(tags=[Tag.output(), Tag.movie()])
259
+ quality_data = self.read(tags=[Tag.output(), Tag.quality_data()])
251
260
  with self.telemetry_span("Create message objects"):
252
261
  messages = []
253
262
  messages += self.frame_messages(paths=frames)
@@ -256,7 +265,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
256
265
  object_message_count = len(messages) - frame_message_count
257
266
  dataset_has_quality_data = self.dataset_has_quality_data
258
267
  if dataset_has_quality_data:
259
- messages.append(self.quality_report_message)
268
+ messages += self.object_messages(paths=quality_data, object_type="QDATA")
260
269
  with self.telemetry_span(
261
270
  f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
262
271
  ):
@@ -1,6 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements Metadata Store data access functionality."""
2
2
 
3
- import json
4
3
  import logging
5
4
  from functools import cached_property
6
5
  from typing import Literal
@@ -8,15 +7,11 @@ from typing import Literal
8
7
  from pydantic import validate_call
9
8
 
10
9
  from dkist_processing_common._util.graphql import GraphQLClient
11
- from dkist_processing_common.codecs.quality import QualityDataEncoder
12
10
  from dkist_processing_common.config import common_configurations
13
11
  from dkist_processing_common.models.graphql import DatasetCatalogReceiptAccountMutation
14
12
  from dkist_processing_common.models.graphql import DatasetCatalogReceiptAccountResponse
15
13
  from dkist_processing_common.models.graphql import InputDatasetPartResponse
16
14
  from dkist_processing_common.models.graphql import InputDatasetRecipeRunResponse
17
- from dkist_processing_common.models.graphql import QualitiesRequest
18
- from dkist_processing_common.models.graphql import QualityCreation
19
- from dkist_processing_common.models.graphql import QualityResponse
20
15
  from dkist_processing_common.models.graphql import RecipeRunMutation
21
16
  from dkist_processing_common.models.graphql import RecipeRunMutationResponse
22
17
  from dkist_processing_common.models.graphql import RecipeRunProvenanceMutation
@@ -150,50 +145,6 @@ class MetadataStoreMixin:
150
145
  mutation_response_cls=RecipeRunProvenanceResponse,
151
146
  )
152
147
 
153
- # QUALITY
154
-
155
- def metadata_store_add_quality_data(self, dataset_id: str, quality_data: list[dict]):
156
- """Add the quality data to the metadata-store."""
157
- if self.metadata_store_quality_data_exists(dataset_id):
158
- raise RuntimeError(f"Quality data already persisted for dataset {dataset_id!r}")
159
- for metric in quality_data:
160
- if (metric_code := metric.get("metric_code")) is None:
161
- name = metric.get("name")
162
- raise ValueError(f"No metric_code for {name!r} in dataset {dataset_id!r}")
163
- params = QualityCreation(
164
- datasetId=dataset_id,
165
- metricCode=metric_code,
166
- facet=metric.get("facet"),
167
- name=metric.get("name"),
168
- description=metric.get("description"),
169
- statement=metric.get("statement"),
170
- # JSON array
171
- warnings=json.dumps(metric.get("warnings")),
172
- # JSON objects
173
- plotData=json.dumps(metric.get("plot_data"), cls=QualityDataEncoder),
174
- multiPlotData=json.dumps(metric.get("multi_plot_data"), cls=QualityDataEncoder),
175
- tableData=json.dumps(metric.get("table_data"), cls=QualityDataEncoder),
176
- histogramData=json.dumps(metric.get("histogram_data"), cls=QualityDataEncoder),
177
- modmatData=json.dumps(metric.get("modmat_data"), cls=QualityDataEncoder),
178
- raincloudData=json.dumps(metric.get("raincloud_data"), cls=QualityDataEncoder),
179
- efficiencyData=json.dumps(metric.get("efficiency_data"), cls=QualityDataEncoder),
180
- )
181
- self.metadata_store_client.execute_gql_mutation(
182
- mutation_base="createQuality",
183
- mutation_parameters=params,
184
- mutation_response_cls=QualityResponse,
185
- )
186
-
187
- def metadata_store_quality_data_exists(self, dataset_id: str) -> bool:
188
- """Return True if quality data exists in the metadata-store for the given dataset id."""
189
- params = QualitiesRequest(datasetId=dataset_id)
190
- response = self.metadata_store_client.execute_gql_query(
191
- query_base="qualities",
192
- query_response_cls=QualityResponse,
193
- query_parameters=params,
194
- )
195
- return bool(response)
196
-
197
148
  # INPUT DATASET RECIPE RUN
198
149
 
199
150
  @cached_property
@@ -55,6 +55,27 @@ class ObjectStoreMixin:
55
55
  },
56
56
  )
57
57
 
58
+ def object_store_upload_quality_data(
59
+ self,
60
+ quality_data: Path | bytes,
61
+ bucket: str,
62
+ object_key: str,
63
+ content_type: str = "application/json",
64
+ ):
65
+ """Upload quality data to the object store."""
66
+ self.object_store_client.upload_object(
67
+ object_data=quality_data,
68
+ bucket=bucket,
69
+ object_key=object_key,
70
+ verify_checksum=True,
71
+ content_type=content_type,
72
+ metadata={
73
+ "groupname": "DATASET",
74
+ "groupid": self.constants.dataset_id,
75
+ "objecttype": "QDATA",
76
+ },
77
+ )
78
+
58
79
  def object_store_remove_folder_objects(self, bucket: str, path: Path | str) -> list[str]:
59
80
  """
60
81
  Remove folder objects (end with /) in the specified bucket and path.
@@ -1356,13 +1356,15 @@ class _WavecalQualityMixin:
1356
1356
  Note that the residuals are the *unweighed* residuals.
1357
1357
  """
1358
1358
  weight_data = np.ones(input_wavelength.size) if weights is None else weights
1359
- prepared_weights = fit_result.prepared_weights
1359
+ prepared_weights = np.sqrt(weight_data / np.sum(weight_data))
1360
1360
  residuals = fit_result.minimizer_result.residual / prepared_weights
1361
1361
  residuals[~np.isfinite(residuals)] = 0.0
1362
+ best_fit_atlas = input_spectrum - residuals
1362
1363
  normalized_residuals = residuals / input_spectrum
1363
1364
 
1364
- best_fit_atlas = fit_result.best_fit_atlas
1365
- best_fit_wavelength = fit_result.best_fit_wavelength_vector
1365
+ best_fit_header = fit_result.wavelength_parameters.to_header(axis_num=1)
1366
+ wcs = WCS(best_fit_header)
1367
+ best_fit_wavelength = wcs.spectral.pixel_to_world(np.arange(input_spectrum.size))
1366
1368
 
1367
1369
  finite_idx = (
1368
1370
  np.isfinite(input_wavelength)
@@ -1376,7 +1378,7 @@ class _WavecalQualityMixin:
1376
1378
  data = {
1377
1379
  "input_wavelength_nm": input_wavelength.to_value(u.nm)[finite_idx].tolist(),
1378
1380
  "input_spectrum": input_spectrum[finite_idx].tolist(),
1379
- "best_fit_wavelength_nm": best_fit_wavelength[finite_idx].tolist(),
1381
+ "best_fit_wavelength_nm": best_fit_wavelength.to_value(u.nm)[finite_idx].tolist(),
1380
1382
  "best_fit_atlas": best_fit_atlas[finite_idx].tolist(),
1381
1383
  "normalized_residuals": normalized_residuals[finite_idx].tolist(),
1382
1384
  "weights": None if weights is None else weight_data[finite_idx].tolist(),
@@ -9,6 +9,8 @@ from typing import Any
9
9
  from typing import Generator
10
10
  from uuid import uuid4
11
11
 
12
+ from dkist_inventory.inventory import generate_quality_report_filename
13
+
12
14
  from dkist_processing_common.codecs.asdf import asdf_fileobj_encoder
13
15
  from dkist_processing_common.codecs.basemodel import basemodel_decoder
14
16
  from dkist_processing_common.codecs.fits import fits_access_decoder
@@ -245,5 +247,7 @@ class CreateTrialQualityReport(OutputDataBase):
245
247
  self.write(
246
248
  quality_report,
247
249
  tags=[Tag.output(), Tag.quality_report()],
248
- relative_path=f"{self.constants.dataset_id}_quality_report.pdf",
250
+ relative_path=generate_quality_report_filename(
251
+ dataset_id=self.constants.dataset_id
252
+ ),
249
253
  )
@@ -100,7 +100,7 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
100
100
  tag_list = []
101
101
  tag_list += [[Tag.output(), Tag.dataset_inventory()]]
102
102
  tag_list += [[Tag.output(), Tag.asdf()]]
103
- tag_list += [[Tag.quality_data()]] # quality data is not tagged as OUTPUT
103
+ tag_list += [[Tag.output(), Tag.quality_data()]]
104
104
  tag_list += [[Tag.output(), Tag.quality_report()]]
105
105
  tag_list += [[Tag.output(), Tag.movie()]]
106
106
  return tag_list
@@ -135,10 +135,6 @@ class InputDatasetRecipeRunResponseMapping(ResponseMapping):
135
135
  return Unset
136
136
 
137
137
 
138
- class QualityResponseMapping(ResponseMapping):
139
- pass # TODO
140
-
141
-
142
138
  def make_default_recipe_run_status_response() -> RecipeRunStatusResponse:
143
139
  return RecipeRunStatusResponse(recipeRunStatusId=1)
144
140
 
@@ -57,24 +57,3 @@ def test_object_messages(publish_catalog_and_quality_messages_task):
57
57
  assert message.body.conversationId == str(task.recipe_run_id)
58
58
  assert message.body.objectType == object_type
59
59
  assert message.body.groupId == task.constants.dataset_id
60
-
61
-
62
- def test_quality_report_message(publish_catalog_and_quality_messages_task):
63
- """
64
- :Given: a PublishCatalogAndQualityMessages task
65
- :When: creating quality report message
66
- :Then: the attributes are correctly populated
67
- """
68
- # Given
69
- task, proposal_id = publish_catalog_and_quality_messages_task
70
- # When
71
- message = task.quality_report_message
72
- # Then
73
- assert isinstance(message, CreateQualityReportMessage)
74
- assert message.body.bucket == task.destination_bucket
75
- # objectName exists and can be evaluated as a valid path
76
- assert message.body.objectName
77
- _ = Path(message.body.objectName)
78
- assert message.body.datasetId == task.constants.dataset_id
79
- assert message.body.conversationId == str(task.recipe_run_id)
80
- assert message.body.incrementDatasetCatalogReceiptCount is True
@@ -1214,20 +1214,12 @@ def wavecal_weights(wavecal_input_wavelength) -> np.ndarray:
1214
1214
 
1215
1215
 
1216
1216
  @pytest.fixture(scope="session")
1217
- def wavecal_fit_result(wavecal_input_wavelength, wavecal_input_spectrum) -> FitResult:
1217
+ def wavecal_fit_result(wavecal_input_wavelength) -> FitResult:
1218
1218
  wavelength_params = WavelengthParameters(
1219
1219
  crpix=1, crval=10.0, dispersion=1, grating_constant=1, order=1, incident_light_angle=0
1220
1220
  )
1221
-
1222
- residuals = np.random.random(wavecal_input_wavelength.size)
1223
- residuals[-1] = np.nan
1224
- minimizer_result = MinimizerResult(residual=residuals)
1225
- return FitResult(
1226
- wavelength_parameters=wavelength_params,
1227
- minimizer_result=minimizer_result,
1228
- input_wavelength_vector=wavecal_input_wavelength,
1229
- input_spectrum=wavecal_input_spectrum,
1230
- )
1221
+ minimizer_result = MinimizerResult(residual=np.random.random(wavecal_input_wavelength.size))
1222
+ return FitResult(wavelength_parameters=wavelength_params, minimizer_result=minimizer_result)
1231
1223
 
1232
1224
 
1233
1225
  @pytest.mark.parametrize(
@@ -95,13 +95,10 @@ def test_submit_dataset_metadata(
95
95
  mocker.patch(
96
96
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
97
97
  )
98
- # intercept these two GraphQLClient calls so they can be confirmed
98
+ # intercept this GraphQLClient call so it can be confirmed
99
99
  mocked_metadata_store_add_dataset_receipt_account = mocker.patch.object(
100
100
  metadata_store.MetadataStoreMixin, "metadata_store_add_dataset_receipt_account"
101
101
  )
102
- mocked_metadata_store_add_quality_data = mocker.patch.object(
103
- metadata_store.MetadataStoreMixin, "metadata_store_add_quality_data"
104
- )
105
102
  task = submit_dataset_metadata_task
106
103
 
107
104
  # When
@@ -109,4 +106,3 @@ def test_submit_dataset_metadata(
109
106
 
110
107
  # Then
111
108
  mocked_metadata_store_add_dataset_receipt_account.assert_called_once()
112
- mocked_metadata_store_add_quality_data.assert_called_once()
@@ -158,13 +158,12 @@ def complete_trial_output_task(
158
158
  task.write(asdf_file_obj, relative_path=asdf_file_name, tags=[Tag.output(), Tag.asdf()])
159
159
 
160
160
  # Write quality data
161
- # quality data is not tagged as OUTPUT
162
161
  quality_data_obj = uuid4().hex.encode("utf8")
163
162
  quality_data_name = "quality_data.json"
164
163
  task.write(
165
164
  quality_data_obj,
166
165
  relative_path=quality_data_name,
167
- tags=Tag.quality_data(),
166
+ tags=[Tag.output(), Tag.quality_data()],
168
167
  )
169
168
 
170
169
  # Write a quality report file
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 11.8.1rc2
3
+ Version: 11.8.2rc1
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -21,7 +21,7 @@ Requires-Dist: dkist-processing-core==6.0.0
21
21
  Requires-Dist: dkist-processing-pac<4.0,>=3.1
22
22
  Requires-Dist: dkist-service-configuration<5.0,>=4.1.7
23
23
  Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
24
- Requires-Dist: solar-wavelength-calibration<3.0,>=2.0.0rc3
24
+ Requires-Dist: solar-wavelength-calibration<2.0,>=1.0
25
25
  Requires-Dist: globus-sdk<4.0.0,>=3.12.0
26
26
  Requires-Dist: gqlclient[pydantic]==1.2.3
27
27
  Requires-Dist: sqids==0.5.1
@@ -31,7 +31,7 @@ Requires-Dist: numpy>=1.26.4
31
31
  Requires-Dist: object-clerk==1.0.0
32
32
  Requires-Dist: pandas>=1.4.2
33
33
  Requires-Dist: pillow>=10.2.0
34
- Requires-Dist: pydantic>=2.7.2
34
+ Requires-Dist: pydantic>=2.0
35
35
  Requires-Dist: redis==6.4.0
36
36
  Requires-Dist: requests>=2.23
37
37
  Requires-Dist: scipy>=1.15.1
@@ -57,9 +57,9 @@ Requires-Dist: pytest; extra == "docs"
57
57
  Requires-Dist: towncrier<22.12.0; extra == "docs"
58
58
  Requires-Dist: dkist-sphinx-theme; extra == "docs"
59
59
  Provides-Extra: inventory
60
- Requires-Dist: dkist-inventory<2.0,>=1.11.1; extra == "inventory"
60
+ Requires-Dist: dkist-inventory==1.11.3rc2; extra == "inventory"
61
61
  Provides-Extra: asdf
62
- Requires-Dist: dkist-inventory[asdf]<2.0,>=1.11.1; extra == "asdf"
62
+ Requires-Dist: dkist-inventory[asdf]==1.11.3rc2; extra == "asdf"
63
63
  Provides-Extra: quality
64
64
  Requires-Dist: dkist-quality<3.0,>=2.0.0; extra == "quality"
65
65
 
@@ -1,6 +1,5 @@
1
1
  changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- changelog/271.misc.2.rst,sha256=-nI7LQ8B_O6ZxNa0iBnfSbjIpo9uCvCMiGmKnwTigrA,99
3
- changelog/271.misc.rst,sha256=YxeCttyTuraW8dlIRP99Pf9XHQrjQMPdSO9bm8r2Kk0,104
2
+ changelog/276.feature.rst,sha256=aqZfWU38aahIPMli0IqN0A9q_QK0zeeva3RkXYAYl3g,35
4
3
  dkist_processing_common/__init__.py,sha256=GQ9EBnYhkOnt-qODclAoLS_g5YVhurxfg1tjVtI9rDI,320
5
4
  dkist_processing_common/config.py,sha256=kkFm-Q6jGGoV3Zdf3PSfk3xNYd2vSH0J2R8dBBav3Og,5218
6
5
  dkist_processing_common/manual.py,sha256=bIVVyLsbXMh-g_2L3kGROL-1TtJe0_XviHsp7Br31x8,7023
@@ -28,7 +27,7 @@ dkist_processing_common/models/dkist_location.py,sha256=6Nk0wvv4R8ptlrV7BXon7abq
28
27
  dkist_processing_common/models/fits_access.py,sha256=imKqL4-_g6gTR-IeIjZ6qkMhQX3JujdrKFrTd9gOXnw,5605
29
28
  dkist_processing_common/models/flower_pot.py,sha256=_J7DwHM8u5kQfdPCpk5pUmALtLrM1L_h-x8JW5BSjXA,5129
30
29
  dkist_processing_common/models/fried_parameter.py,sha256=ro_H2Eo3I88lRf1wJjZfTc_XOjhgLt4whIQR_sjAFbM,1609
31
- dkist_processing_common/models/graphql.py,sha256=oSEcdVsVRytnIDEORLs4b6r3C2Lr7gqEEMHu82zpOOg,5912
30
+ dkist_processing_common/models/graphql.py,sha256=QsKLbytpw_Qg9pJASscA7dZRfDbHLkpLZaWeqaHUDvo,5133
32
31
  dkist_processing_common/models/input_dataset.py,sha256=19w_ydrxdzjJgpnhFELqUomr7GixURjzLOaX41ipOKk,4173
33
32
  dkist_processing_common/models/message.py,sha256=ZEsPQalo5aKTOHfc5I15mNCe1KQcfJ3ivU7XBf8wnkM,1684
34
33
  dkist_processing_common/models/message_queue_binding.py,sha256=Y4otwkkePrLRSjlrya8nlEaBvCCUgfGZAWZF9XqCQ9Y,1012
@@ -60,26 +59,26 @@ dkist_processing_common/parsers/wavelength.py,sha256=P5C9mG8DAKK3GB3vWNRBI5l7pAW
60
59
  dkist_processing_common/tasks/__init__.py,sha256=l23ctjNsKJbHbbqaZBMeOPaOtw0hmITEljI_JJ-CVsU,627
61
60
  dkist_processing_common/tasks/assemble_movie.py,sha256=1ixDG-f4ODt0vywqVccG3aodLljVO5OGlvuMO9EEvcU,12767
62
61
  dkist_processing_common/tasks/base.py,sha256=itAHCvzcodo-q8_AjpWoRaM86BlcjWDpCIiUP7uwmP0,13236
63
- dkist_processing_common/tasks/l1_output_data.py,sha256=D4S3kH2uRKp1b8_xF2YiWO_mGj19UJS5wKQZ4OdatGs,10568
62
+ dkist_processing_common/tasks/l1_output_data.py,sha256=8Yf69__tsNOMgN_dWk-QGGyJWA1jpQ02RkBBoXSYv-Y,10870
64
63
  dkist_processing_common/tasks/output_data_base.py,sha256=r1Bu3FX5zTVj66GTMWtaV_NdhxjyjSm661Bt2Mxmfi4,3685
65
64
  dkist_processing_common/tasks/parse_l0_input_data.py,sha256=KguXT0Xavynu7C8NFMjsV4628LRoTvfeSuApb6v4Neg,18835
66
65
  dkist_processing_common/tasks/quality_metrics.py,sha256=cvGF6tJ8yAvxOvkeG3tWxYwL885BrFW5X3V7_MSzL-A,12481
67
66
  dkist_processing_common/tasks/teardown.py,sha256=rwT9lWINVDF11-az_nx-Z5ykMTX_SJCchobpU6sErgk,2360
68
67
  dkist_processing_common/tasks/transfer_input_data.py,sha256=DAYfS-B1o-iBT9MXU-TiJG4Hv05Z0c_JzPrnFgvnK9g,5786
69
- dkist_processing_common/tasks/trial_catalog.py,sha256=43XN4Hz4uNBGEps3O5RLfFHRfyo9FZPzMwkcapniztQ,10049
70
- dkist_processing_common/tasks/trial_output_data.py,sha256=CPMXXODvN5RTcu9bTF8v6AXciCl212EWP6qTiARvUNk,6837
68
+ dkist_processing_common/tasks/trial_catalog.py,sha256=9NLK5UBxBShW_ROpgZYtXeHObCQSlGJpI-mKmk4MjMs,10180
69
+ dkist_processing_common/tasks/trial_output_data.py,sha256=pUdrNlAzuir4AUdfax5_MOplB-A9NrXErMJmAwtJmLA,6811
71
70
  dkist_processing_common/tasks/write_l1.py,sha256=Xy834RTp3F95kLcW4ba5gfHMUocfZd82ZQQKnvQcP2M,23204
72
71
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
73
72
  dkist_processing_common/tasks/mixin/globus.py,sha256=9ey_UCacqCfmxYZSgm6VDefdlm7dkNloC8G5DeVub8s,6592
74
73
  dkist_processing_common/tasks/mixin/interservice_bus.py,sha256=M6R922l7gJSmmU_vswUXxy-c5DWNrIRjQu9H9CSgGfU,1081
75
- dkist_processing_common/tasks/mixin/metadata_store.py,sha256=QeZV3BITUzCErUt7mbkJME5SQWi3UPp_tw3zJ2PWy-M,11715
76
- dkist_processing_common/tasks/mixin/object_store.py,sha256=zGAyRR1O6EN52p7sFQtDWxa7ASx3HJSsO4d4tJr-beE,3237
74
+ dkist_processing_common/tasks/mixin/metadata_store.py,sha256=GLfh0a0ehuRC8Uml59PfLExuoxGRj6tzNMytlO5jZf4,9106
75
+ dkist_processing_common/tasks/mixin/object_store.py,sha256=l2sPzolmKsuYvUocHayT7PScliiFRIV12qIFYtCLII8,3888
77
76
  dkist_processing_common/tasks/mixin/quality/__init__.py,sha256=GOI_PBUxTmYp5IIuYFbwpA5Vx0jUwpdBBYYrnZMTh0E,384
78
77
  dkist_processing_common/tasks/mixin/quality/_base.py,sha256=qt9TZZ140skFWFmabrjlGdm60OLWEfx_xZAaohr6dLM,8492
79
- dkist_processing_common/tasks/mixin/quality/_metrics.py,sha256=TtM6V7qy0U6ofFmjXNoJKkaECoPSfJsU4Ziu1UQgPZs,60457
78
+ dkist_processing_common/tasks/mixin/quality/_metrics.py,sha256=k0hlthVbW7Jso9q_P0-hWGfp190kQO_oUQyGBRvxgqo,60626
80
79
  dkist_processing_common/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
80
  dkist_processing_common/tests/conftest.py,sha256=Tm-Yq956EAafpDtu1d7JjdVY0Unp9e4z9ak-yf4wnH4,22320
82
- dkist_processing_common/tests/mock_metadata_store.py,sha256=-uzwbLeRn19_Pq1hr97R4hpUD5K5sv8Zy1U8gG0GdSQ,9657
81
+ dkist_processing_common/tests/mock_metadata_store.py,sha256=i41xu5AY4A566YT0qSzhP7qpR0wZp_EFH-OFcAsV4vQ,9591
83
82
  dkist_processing_common/tests/test_assemble_movie.py,sha256=dyVhowxB-Kc6GuxlDs74UrPtK9fwdUL7y5haA3Bidz0,4065
84
83
  dkist_processing_common/tests/test_assemble_quality.py,sha256=-F22jMY6mPy65VZ1TZY2r1vsxMXOPmZHArGx70OD3BA,17832
85
84
  dkist_processing_common/tests/test_base.py,sha256=gsyBG2R6Ufx7CzbHeGMagUwM9yCfpN4gCSZ6-aH2q48,6643
@@ -97,12 +96,12 @@ dkist_processing_common/tests/test_manual_processing.py,sha256=iHF7yQPlar9niYAGX
97
96
  dkist_processing_common/tests/test_output_data_base.py,sha256=D8b1XKvbE3C5cGOiHq58yJ2pzQL3iL0wLZy_AkDdB9Y,3085
98
97
  dkist_processing_common/tests/test_parameters.py,sha256=CUEUIGBPMCUXPll0G0UxFDbMXi8lmnjRwXBarGX1PAQ,14033
99
98
  dkist_processing_common/tests/test_parse_l0_input_data.py,sha256=9OOqeMX8ReQO67ldoMHOBKLQg7Nd5qWHOEoHygcN5Ic,11889
100
- dkist_processing_common/tests/test_publish_catalog_messages.py,sha256=l6Wga1s2wNBIf4wGZ78ZIO_rtqjdidmtvlN9nMnQUAs,3222
99
+ dkist_processing_common/tests/test_publish_catalog_messages.py,sha256=7WRsEwoLHGeaCmLTAW4tU_BlZw0e3hwx65uWSGzfuYE,2393
101
100
  dkist_processing_common/tests/test_quality.py,sha256=IPz7liXcmoqWIsY78oX07Ui0nWHxoUH2FbKGEmMle7E,10258
102
- dkist_processing_common/tests/test_quality_mixin.py,sha256=L-_kSIKs8A48LGt9QaItZWZqIcRF0MhBCAZQZYdSflk,55575
101
+ dkist_processing_common/tests/test_quality_mixin.py,sha256=qanm3SXEiLb0OJDawbh8ixVAG9uRglFMzwxTeYxkDsM,55369
103
102
  dkist_processing_common/tests/test_scratch.py,sha256=WO8C1VJlkcC5IzST9Hj08CyyrINwYcN8pyteD9x38xs,16482
104
103
  dkist_processing_common/tests/test_stems.py,sha256=p__51u-b8vfWLI71aLxF3w2tcWtv6M6DyHJ7_6FZMHI,38949
105
- dkist_processing_common/tests/test_submit_dataset_metadata.py,sha256=LHEyjoIxJHXXssqKkr8Qn1NzzHD1FLJiD3lP8yaLiXU,3764
104
+ dkist_processing_common/tests/test_submit_dataset_metadata.py,sha256=-UicRcyRQAC9H3sbTYlJaH4-Yn6jKNyQEZhzZxojzqw,3543
106
105
  dkist_processing_common/tests/test_tags.py,sha256=w5gmVfp3Ck92KNV80lJQRMz0OYgTYzWtwVUFWv1b5i8,5024
107
106
  dkist_processing_common/tests/test_task_name.py,sha256=kqFr59XX2K87xzfTlClzDV4-Je1dx72LvdaJ22UE8UU,1233
108
107
  dkist_processing_common/tests/test_task_parsing.py,sha256=2_OOmeZQWD17XAd_ECYmodJzD_iRIBKjCYdGh38BOx4,4421
@@ -110,7 +109,7 @@ dkist_processing_common/tests/test_teardown.py,sha256=DaliHSGsiQBZaFkf5wb3XBo6rH
110
109
  dkist_processing_common/tests/test_transfer_input_data.py,sha256=eyAAWXpTHQ8aew87-MncWpYBn4DAZrTSOL3LvlQfR5Q,12611
111
110
  dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=PVGDJBEUk4kAeu8ivrhlCE7yd29R18t9kZLFx-mpBwY,2063
112
111
  dkist_processing_common/tests/test_trial_catalog.py,sha256=CxjtVABE5Fw2EvyXR56IJ3PPi9QvEOjccH0OzzRWk30,9424
113
- dkist_processing_common/tests/test_trial_output_data.py,sha256=YwmSfhNsSG-kdnvNlq7Ku0PNi_onTvU6uxn_nLiAKZw,12008
112
+ dkist_processing_common/tests/test_trial_output_data.py,sha256=fu3iGNV_FI8LOacezyt4HvXnxY3g1_UiBuRI63yz5Oo,11977
114
113
  dkist_processing_common/tests/test_workflow_task_base.py,sha256=LTVusltNrsGUOvw9G323am4CXebgE4tJhP6gZCcS0CQ,10457
115
114
  dkist_processing_common/tests/test_write_l1.py,sha256=alN-lozKEm6vKNdhtvzjnuPqv-NjHyUg16Op7SkMH-c,27964
116
115
  docs/Makefile,sha256=qnlVz6PuBqE39NfHWuUnHhNEA-EFgT2-WJNNNy9ttfk,4598
@@ -121,7 +120,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
121
120
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
122
121
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
123
122
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
124
- dkist_processing_common-11.8.1rc2.dist-info/METADATA,sha256=rY6EYh18aCYbH9btRZoaY4N2vh5-SZP6d3KSJrJE0nU,13332
125
- dkist_processing_common-11.8.1rc2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
126
- dkist_processing_common-11.8.1rc2.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
127
- dkist_processing_common-11.8.1rc2.dist-info/RECORD,,
123
+ dkist_processing_common-11.8.2rc1.dist-info/METADATA,sha256=FJ8L1rwK_GRSv_8dPs5Em8XvcP3hDBdy26w0hz2FgRo,13321
124
+ dkist_processing_common-11.8.2rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
125
+ dkist_processing_common-11.8.2rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
126
+ dkist_processing_common-11.8.2rc1.dist-info/RECORD,,
changelog/271.misc.2.rst DELETED
@@ -1 +0,0 @@
1
- Bump minimum version of `pydantic` to 2.7.2 to avoid install failures for python versions >= 3.12.
changelog/271.misc.rst DELETED
@@ -1 +0,0 @@
1
- Move `solar-wavelength-calibration` dep to 2.0.0 and make use of new helper properties in that release.