dkist-processing-common 10.6.2__py3-none-any.whl → 10.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,3 +26,4 @@ class WavelengthRange(BaseModel):
26
26
  """Validate that the max wavelength is greater than the min wavelength."""
27
27
  if self.min > self.max:
28
28
  raise ValueError("min is greater than max. Values may be reversed.")
29
+ return self
@@ -20,12 +20,17 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin, InputDatasetMixin):
20
20
 
21
21
  def download_input_dataset(self):
22
22
  """Get the input dataset document parts and save it to scratch with the appropriate tags."""
23
- if doc := self.metadata_store_input_dataset_observe_frames.inputDatasetPartDocument:
24
- self.write(doc, tags=Tag.input_dataset_observe_frames(), encoder=json_encoder)
25
- if doc := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartDocument:
26
- self.write(doc, tags=Tag.input_dataset_calibration_frames(), encoder=json_encoder)
27
- if doc := self.metadata_store_input_dataset_parameters.inputDatasetPartDocument:
28
- self.write(doc, tags=Tag.input_dataset_parameters(), encoder=json_encoder)
23
+ if observe_frames := self.metadata_store_input_dataset_observe_frames:
24
+ observe_doc = observe_frames.inputDatasetPartDocument
25
+ self.write(observe_doc, tags=Tag.input_dataset_observe_frames(), encoder=json_encoder)
26
+ if calibration_frames := self.metadata_store_input_dataset_calibration_frames:
27
+ calibration_doc = calibration_frames.inputDatasetPartDocument
28
+ self.write(
29
+ calibration_doc, tags=Tag.input_dataset_calibration_frames(), encoder=json_encoder
30
+ )
31
+ if parameters := self.metadata_store_input_dataset_parameters:
32
+ parameters_doc = parameters.inputDatasetPartDocument
33
+ self.write(parameters_doc, tags=Tag.input_dataset_parameters(), encoder=json_encoder)
29
34
 
30
35
  def format_transfer_items(
31
36
  self, input_dataset_objects: list[InputDatasetObject]
@@ -214,11 +214,12 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
214
214
  header["CADMAX"] = self.constants.maximum_cadence
215
215
  header["CADVAR"] = self.constants.variance_cadence
216
216
  # Keywords to support reprocessing
217
- if ids_par_id := self.metadata_store_input_dataset_parameters.inputDatasetPartId:
218
- header["IDSPARID"] = ids_par_id
219
- header["IDSOBSID"] = self.metadata_store_input_dataset_observe_frames.inputDatasetPartId
220
- if ids_cal_id := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartId:
221
- header["IDSCALID"] = ids_cal_id
217
+ if parameters := self.metadata_store_input_dataset_parameters:
218
+ header["IDSPARID"] = parameters.inputDatasetPartId
219
+ if observe_frames := self.metadata_store_input_dataset_observe_frames:
220
+ header["IDSOBSID"] = observe_frames.inputDatasetPartId
221
+ if calibration_frames := self.metadata_store_input_dataset_calibration_frames:
222
+ header["IDSCALID"] = calibration_frames.inputDatasetPartId
222
223
  header["WKFLNAME"] = self.workflow_name
223
224
  header["WKFLVERS"] = self.workflow_version
224
225
  header = self.add_contributing_id_headers(header=header)
@@ -6,6 +6,7 @@ import pytest
6
6
 
7
7
  from dkist_processing_common._util.scratch import WorkflowFileSystem
8
8
  from dkist_processing_common.codecs.json import json_decoder
9
+ from dkist_processing_common.models.graphql import InputDatasetRecipeRunResponse
9
10
  from dkist_processing_common.models.tags import Tag
10
11
  from dkist_processing_common.tasks.transfer_input_data import TransferL0Data
11
12
  from dkist_processing_common.tests.conftest import create_parameter_files
@@ -17,10 +18,21 @@ class TransferL0DataTask(TransferL0Data):
17
18
  ...
18
19
 
19
20
 
20
- @pytest.fixture
21
- def transfer_l0_data_task(recipe_run_id, tmp_path, mocker):
21
+ class FakeGQLClientMissingInputDatasetPart(FakeGQLClient):
22
+ """Same metadata mocker with calibration input dataset part missing."""
23
+
24
+ def execute_gql_query(self, **kwargs):
25
+ original_response = super().execute_gql_query(**kwargs)
26
+ # Remove calibration frames part if getting InputDatasetRecipeRunResponse:
27
+ if kwargs.get("query_response_cls") == InputDatasetRecipeRunResponse:
28
+ del original_response[0].recipeInstance.inputDataset.inputDatasetInputDatasetParts[2]
29
+ return original_response
30
+
31
+
32
+ def _transfer_l0_data_task_with_client(recipe_run_id, tmp_path, mocker, client_cls):
22
33
  mocker.patch(
23
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
34
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
35
+ new=client_cls,
24
36
  )
25
37
  with TransferL0DataTask(
26
38
  recipe_run_id=recipe_run_id,
@@ -35,6 +47,18 @@ def transfer_l0_data_task(recipe_run_id, tmp_path, mocker):
35
47
  task._purge()
36
48
 
37
49
 
50
+ @pytest.fixture
51
+ def transfer_l0_data_task(recipe_run_id, tmp_path, mocker):
52
+ yield from _transfer_l0_data_task_with_client(recipe_run_id, tmp_path, mocker, FakeGQLClient)
53
+
54
+
55
+ @pytest.fixture
56
+ def transfer_l0_data_task_missing_part(recipe_run_id, tmp_path, mocker):
57
+ yield from _transfer_l0_data_task_with_client(
58
+ recipe_run_id, tmp_path, mocker, FakeGQLClientMissingInputDatasetPart
59
+ )
60
+
61
+
38
62
  def test_download_dataset(transfer_l0_data_task):
39
63
  """
40
64
  :Given: a TransferL0Data task with a valid input dataset
@@ -63,6 +87,29 @@ def test_download_dataset(transfer_l0_data_task):
63
87
  assert parameters_doc_from_file == expected_parameters_doc
64
88
 
65
89
 
90
+ def test_download_dataset_missing_part(transfer_l0_data_task_missing_part):
91
+ """
92
+ :Given: a TransferL0Data task with a valid input dataset without calibration frames
93
+ :When: downloading the dataset documents from the metadata store
94
+ :Then: the correct number of documents are written to disk
95
+ """
96
+ # Given
97
+ task = transfer_l0_data_task_missing_part
98
+ # When
99
+ task.download_input_dataset()
100
+ # Then
101
+ observe_doc_from_file = next(
102
+ task.read(tags=Tag.input_dataset_observe_frames(), decoder=json_decoder)
103
+ )
104
+ parameters_doc_from_file = next(
105
+ task.read(tags=Tag.input_dataset_parameters(), decoder=json_decoder)
106
+ )
107
+ with pytest.raises(StopIteration):
108
+ calibration_doc_from_file = next(
109
+ task.read(tags=Tag.input_dataset_calibration_frames(), decoder=json_decoder)
110
+ )
111
+
112
+
66
113
  def test_format_frame_transfer_items(transfer_l0_data_task):
67
114
  """
68
115
  :Given: a TransferL0Data task with a downloaded input dataset
@@ -24,6 +24,9 @@ from dkist_processing_common.models.wavelength import WavelengthRange
24
24
  from dkist_processing_common.tasks.write_l1 import WriteL1Frame
25
25
  from dkist_processing_common.tests.conftest import FakeGQLClient
26
26
  from dkist_processing_common.tests.conftest import TILE_SIZE
27
+ from dkist_processing_common.tests.test_transfer_input_data import (
28
+ FakeGQLClientMissingInputDatasetPart,
29
+ )
27
30
 
28
31
 
29
32
  class FakeGQLClientDefaultRecipeConfiguration(FakeGQLClient):
@@ -484,6 +487,29 @@ def test_reprocessing_keys(write_l1_task, mocker):
484
487
  assert header["PRODUCT"] == task.compute_product_id(header["IDSOBSID"], header["PROCTYPE"])
485
488
 
486
489
 
490
+ def test_missing_input_dataset_part(write_l1_task, mocker):
491
+ """
492
+ :Given: a Write_L1 task with a missing calibration frames part
493
+ :When: running the task
494
+ :Then: the input dataset part keys are correctly written without throwing an exception
495
+ """
496
+ mocker.patch(
497
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
498
+ new=FakeGQLClientMissingInputDatasetPart,
499
+ )
500
+ task, _, _ = write_l1_task
501
+ task()
502
+ files = list(task.read(tags=[Tag.frame(), Tag.output()]))
503
+ for file in files:
504
+ header = fits.open(file)[1].header
505
+ assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters.inputDatasetPartId
506
+ assert (
507
+ header["IDSOBSID"]
508
+ == task.metadata_store_input_dataset_observe_frames.inputDatasetPartId
509
+ )
510
+ assert "IDSCALID" not in header
511
+
512
+
487
513
  @pytest.mark.parametrize(
488
514
  "ids_obs_id, proc_type",
489
515
  [
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 10.6.2
3
+ Version: 10.6.4
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -30,7 +30,7 @@ dkist_processing_common/models/parameters.py,sha256=Ymx-wvPVMkXg5emhOivv7NG0QsAt
30
30
  dkist_processing_common/models/quality.py,sha256=ONz1A6_qyEoZhQkVp9LChAgm93aGt1O5WSRneE3XCCA,2319
31
31
  dkist_processing_common/models/tags.py,sha256=ykOYqWMU7_ffvRCv84-avjXyty9pHBo7EXwsjIjStjs,12058
32
32
  dkist_processing_common/models/task_name.py,sha256=NL0n92A9vVYBV-yvh8d-qFOCxVy0X2GECDmLgIzrmOY,565
33
- dkist_processing_common/models/wavelength.py,sha256=mH4xkNdPU7kedJfQStI6A0WdkMLi61wfe47pJI9kHBI,946
33
+ dkist_processing_common/models/wavelength.py,sha256=Wtmu5QhjPpsqIGfUQ0Wh-3PQlGeRdGV9BfFAy23HLGg,966
34
34
  dkist_processing_common/parsers/__init__.py,sha256=XJQzHtPb78F6-qXXKXjyztc0x-aHVlgv1C_l4dR88tI,67
35
35
  dkist_processing_common/parsers/cs_step.py,sha256=en1ovwy8H2jeUxZd0XDSV8Qv3ZawRm03q6wpJj4a3C8,6461
36
36
  dkist_processing_common/parsers/dsps_repeat.py,sha256=NiUMnfMYSn0qserHM735V1Z6BCfw4CAanlqtjfyRkos,1571
@@ -55,10 +55,10 @@ dkist_processing_common/tasks/output_data_base.py,sha256=CC1TnCrChi8_iuMymr425CJ
55
55
  dkist_processing_common/tasks/parse_l0_input_data.py,sha256=iRMGdvhxBobNsTDQ0IEl0myDfB4P_xpxA00guuBWDj8,7986
56
56
  dkist_processing_common/tasks/quality_metrics.py,sha256=g6MUq8s8jELDinkn6o45rfONyODw92JyVMrzb7Dd7OI,12458
57
57
  dkist_processing_common/tasks/teardown.py,sha256=e4LKnphJDYDVDAez2tH7MxpZgCmxYsKrq9Zk0qAkzzM,2355
58
- dkist_processing_common/tasks/transfer_input_data.py,sha256=bv2t0DN7nQ9opfl3VPdXG9m69zdzNIjordlCyZVNyBQ,5324
58
+ dkist_processing_common/tasks/transfer_input_data.py,sha256=afEW0glpCFMZRj90nFtQo_4XOQ4CuoOh86jahP6a-a0,5548
59
59
  dkist_processing_common/tasks/trial_catalog.py,sha256=Y3DKstRfMS8nWWtJFMB0MUVPlZ1jWS_2jhJGMWwxy50,8748
60
60
  dkist_processing_common/tasks/trial_output_data.py,sha256=aI_aRuu0qVO8zFGrr_9baxx9i3jUEHZSmsmbO6ytlkE,6960
61
- dkist_processing_common/tasks/write_l1.py,sha256=C5IRUX1JO_Wa7suv_tgE4tH1E2eAUkro0rtj9EHduqw,22429
61
+ dkist_processing_common/tasks/write_l1.py,sha256=XhWLOw4hmql3n01SU26B0McPfYD_7oRKHna0Tswn8Lk,22494
62
62
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
63
63
  dkist_processing_common/tasks/mixin/globus.py,sha256=QAV8VElxMAqxJ2KSB_bJaraceovYfjHXjOdocrTCkIA,6592
64
64
  dkist_processing_common/tasks/mixin/input_dataset.py,sha256=dkW5vf_QPgWedHO_Lf9GjBxr1QrUCKs6gIXufUTi7GE,6813
@@ -95,12 +95,12 @@ dkist_processing_common/tests/test_tags.py,sha256=UwlOJ45rkvbfbd5L5m5YltvOxQc8kG
95
95
  dkist_processing_common/tests/test_task_name.py,sha256=kqFr59XX2K87xzfTlClzDV4-Je1dx72LvdaJ22UE8UU,1233
96
96
  dkist_processing_common/tests/test_task_parsing.py,sha256=QXt1X6DTO3_liBD2c-t84DToLeEn7B3J-eteIyN4HEM,4027
97
97
  dkist_processing_common/tests/test_teardown.py,sha256=w2sATQHkg2lMLvm6VFZF1mNGFYHwWj_SxvF9RQu-tuY,5362
98
- dkist_processing_common/tests/test_transfer_input_data.py,sha256=MYPsZldzQ_j0AoGFqP6_ahn8Nr5mX8WAyG0wpcQReeI,4735
98
+ dkist_processing_common/tests/test_transfer_input_data.py,sha256=kE-FQTcN9nft5bh2Rhtp-8ldCTvGXTvWFcsNm6DY7lk,6619
99
99
  dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=27PifkyH3RZg0nsM-AjmrFJ-hbYuCk5Tt_0Zx8PJBfM,2109
100
100
  dkist_processing_common/tests/test_trial_catalog.py,sha256=SZ-nyn0MXU9Lkg_94FbKER_cwiGoi06GYlzF_3AmvKg,6802
101
101
  dkist_processing_common/tests/test_trial_output_data.py,sha256=cBCj0kXyF5NEMzKh6zPVksdoXyE8ju1opJgWgjdcJWA,12790
102
102
  dkist_processing_common/tests/test_workflow_task_base.py,sha256=Z5aPW5LQtS0UWJiYho4X0r-2gPLfzpkmMwfmaoFLjMg,10517
103
- dkist_processing_common/tests/test_write_l1.py,sha256=bIQd95Dhb8aHxCtOXV7Az4Fy_OeEIlspoSktdUR5lRA,20794
103
+ dkist_processing_common/tests/test_write_l1.py,sha256=P_ep0_T2ZDZXPzFQmN7GgNs-RK6c5G6_DssPUW_sW7Q,21773
104
104
  docs/Makefile,sha256=qnlVz6PuBqE39NfHWuUnHhNEA-EFgT2-WJNNNy9ttfk,4598
105
105
  docs/changelog.rst,sha256=S2jPASsWlQxSlAPqdvNrYvhk9k3FcFWNXFNDYXBSjl4,120
106
106
  docs/conf.py,sha256=FkX575cqTqZGCcLAjg2MlvE8Buj1Vt3CpHNgZxG256E,1890
@@ -109,7 +109,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
109
109
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
110
110
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
111
111
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
112
- dkist_processing_common-10.6.2.dist-info/METADATA,sha256=xBQvt5JjVSsGXLINVfL3RZKU4Cx8oyxiXh5uKxc9MHY,7154
113
- dkist_processing_common-10.6.2.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
114
- dkist_processing_common-10.6.2.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
115
- dkist_processing_common-10.6.2.dist-info/RECORD,,
112
+ dkist_processing_common-10.6.4.dist-info/METADATA,sha256=xFPyZqrXkRO-vWgas9iUWZiduYlNn4qhPTOUCcfXigA,7154
113
+ dkist_processing_common-10.6.4.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
114
+ dkist_processing_common-10.6.4.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
115
+ dkist_processing_common-10.6.4.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.1.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5