dkist-processing-common 10.6.3__py3-none-any.whl → 10.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ from dkist_fits_specifications import __version__ as spec_version
18
18
  from dkist_fits_specifications.utils.formatter import reformat_spec214_header
19
19
  from dkist_header_validator import spec214_validator
20
20
  from dkist_header_validator.translator import remove_extra_axis_keys
21
- from dkist_header_validator.translator import sanitize_to_spec214_level1
21
+ from dkist_header_validator.translator import remove_spec_122_keys_and_spec_214_l0_keys
22
22
  from dkist_spectral_lines.search import get_closest_spectral_line
23
23
  from dkist_spectral_lines.search import get_spectral_lines
24
24
  from scipy.stats import kurtosis
@@ -102,7 +102,9 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
102
102
 
103
103
  # Check that the written file passes spec 214 validation if requested
104
104
  if self.validate_l1_on_write:
105
- spec214_validator.validate(self.scratch.absolute_path(relative_path))
105
+ spec214_validator.validate(
106
+ self.scratch.absolute_path(relative_path), extra=False
107
+ )
106
108
 
107
109
  @cached_property
108
110
  def tile_size_param(self) -> int | None:
@@ -170,19 +172,21 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
170
172
  def add_stats_headers(header: fits.Header, data: np.ndarray) -> fits.Header:
171
173
  """Fill out the spec 214 statistics header table."""
172
174
  data = data.flatten()
173
- percentiles = np.nanpercentile(data, [1, 10, 25, 75, 90, 95, 98, 99])
175
+ percentiles = np.nanpercentile(data, [1, 2, 5, 10, 25, 75, 90, 95, 98, 99])
174
176
  header["DATAMIN"] = np.nanmin(data)
175
177
  header["DATAMAX"] = np.nanmax(data)
176
178
  header["DATAMEAN"] = np.nanmean(data)
177
179
  header["DATAMEDN"] = np.nanmedian(data)
178
- header["DATA01"] = percentiles[0]
179
- header["DATA10"] = percentiles[1]
180
- header["DATA25"] = percentiles[2]
181
- header["DATA75"] = percentiles[3]
182
- header["DATA90"] = percentiles[4]
183
- header["DATA95"] = percentiles[5]
184
- header["DATA98"] = percentiles[6]
185
- header["DATA99"] = percentiles[7]
180
+ header["DATAP01"] = percentiles[0]
181
+ header["DATAP02"] = percentiles[1]
182
+ header["DATAP05"] = percentiles[2]
183
+ header["DATAP10"] = percentiles[3]
184
+ header["DATAP25"] = percentiles[4]
185
+ header["DATAP75"] = percentiles[5]
186
+ header["DATAP90"] = percentiles[6]
187
+ header["DATAP95"] = percentiles[7]
188
+ header["DATAP98"] = percentiles[8]
189
+ header["DATAP99"] = percentiles[9]
186
190
  header["DATARMS"] = np.sqrt(np.nanmean(data**2))
187
191
  header["DATAKURT"] = kurtosis(data, nan_policy="omit")
188
192
  header["DATASKEW"] = skew(data, nan_policy="omit")
@@ -214,11 +218,12 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
214
218
  header["CADMAX"] = self.constants.maximum_cadence
215
219
  header["CADVAR"] = self.constants.variance_cadence
216
220
  # Keywords to support reprocessing
217
- if ids_par_id := self.metadata_store_input_dataset_parameters.inputDatasetPartId:
218
- header["IDSPARID"] = ids_par_id
219
- header["IDSOBSID"] = self.metadata_store_input_dataset_observe_frames.inputDatasetPartId
220
- if ids_cal_id := self.metadata_store_input_dataset_calibration_frames.inputDatasetPartId:
221
- header["IDSCALID"] = ids_cal_id
221
+ if parameters := self.metadata_store_input_dataset_parameters:
222
+ header["IDSPARID"] = parameters.inputDatasetPartId
223
+ if observe_frames := self.metadata_store_input_dataset_observe_frames:
224
+ header["IDSOBSID"] = observe_frames.inputDatasetPartId
225
+ if calibration_frames := self.metadata_store_input_dataset_calibration_frames:
226
+ header["IDSCALID"] = calibration_frames.inputDatasetPartId
222
227
  header["WKFLNAME"] = self.workflow_name
223
228
  header["WKFLVERS"] = self.workflow_version
224
229
  header = self.add_contributing_id_headers(header=header)
@@ -377,7 +382,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
377
382
  # Add the dataset headers (abstract - implement in instrument task)
378
383
  header = self.add_dataset_headers(header=header, stokes=stokes_param)
379
384
  # Remove any headers not contained in spec 214
380
- header = sanitize_to_spec214_level1(input_headers=header)
385
+ header = remove_spec_122_keys_and_spec_214_l0_keys(input_headers=header)
381
386
  # Remove any keys referring to axes that don't exist
382
387
  header = remove_extra_axis_keys(input_headers=header)
383
388
  return header
@@ -113,7 +113,7 @@ class CommonDataset(Spec122Dataset):
113
113
  self.add_constant_key("TELEVATN", 6.28)
114
114
  self.add_constant_key("TAZIMUTH", 3.14)
115
115
  self.add_constant_key("TTBLANGL", 1.23)
116
- self.add_constant_key("INST_FOO", "bar")
116
+ self.add_constant_key("VISP_012", "bar")
117
117
  self.add_constant_key("DKIST004", "observe")
118
118
  self.add_constant_key("ID___005", "ip id")
119
119
  self.add_constant_key("PAC__004", "Sapphire Polarizer")
@@ -220,7 +220,7 @@ def test_as_subclass(hdu_with_complete_common_header):
220
220
  class InstFitsAccess(L0FitsAccess):
221
221
  def __init__(self, hdu, name):
222
222
  super().__init__(hdu, name)
223
- self.foo: str = self.header["INST_FOO"]
223
+ self.foo: str = self.header["VISP_012"]
224
224
 
225
225
  fits_obj = InstFitsAccess(hdu_with_complete_common_header, name="foo")
226
226
  assert fits_obj.foo == "bar"
@@ -6,6 +6,7 @@ import pytest
6
6
 
7
7
  from dkist_processing_common._util.scratch import WorkflowFileSystem
8
8
  from dkist_processing_common.codecs.json import json_decoder
9
+ from dkist_processing_common.models.graphql import InputDatasetRecipeRunResponse
9
10
  from dkist_processing_common.models.tags import Tag
10
11
  from dkist_processing_common.tasks.transfer_input_data import TransferL0Data
11
12
  from dkist_processing_common.tests.conftest import create_parameter_files
@@ -17,13 +18,14 @@ class TransferL0DataTask(TransferL0Data):
17
18
  ...
18
19
 
19
20
 
20
- class FakeGQLClientMissingPart(FakeGQLClient):
21
+ class FakeGQLClientMissingInputDatasetPart(FakeGQLClient):
21
22
  """Same metadata mocker with calibration input dataset part missing."""
22
23
 
23
24
  def execute_gql_query(self, **kwargs):
24
25
  original_response = super().execute_gql_query(**kwargs)
25
- # Remove calibration frames part
26
- del original_response[0].recipeInstance.inputDataset.inputDatasetInputDatasetParts[2]
26
+ # Remove calibration frames part if getting InputDatasetRecipeRunResponse:
27
+ if kwargs.get("query_response_cls") == InputDatasetRecipeRunResponse:
28
+ del original_response[0].recipeInstance.inputDataset.inputDatasetInputDatasetParts[2]
27
29
  return original_response
28
30
 
29
31
 
@@ -53,7 +55,7 @@ def transfer_l0_data_task(recipe_run_id, tmp_path, mocker):
53
55
  @pytest.fixture
54
56
  def transfer_l0_data_task_missing_part(recipe_run_id, tmp_path, mocker):
55
57
  yield from _transfer_l0_data_task_with_client(
56
- recipe_run_id, tmp_path, mocker, FakeGQLClientMissingPart
58
+ recipe_run_id, tmp_path, mocker, FakeGQLClientMissingInputDatasetPart
57
59
  )
58
60
 
59
61
 
@@ -24,6 +24,9 @@ from dkist_processing_common.models.wavelength import WavelengthRange
24
24
  from dkist_processing_common.tasks.write_l1 import WriteL1Frame
25
25
  from dkist_processing_common.tests.conftest import FakeGQLClient
26
26
  from dkist_processing_common.tests.conftest import TILE_SIZE
27
+ from dkist_processing_common.tests.test_transfer_input_data import (
28
+ FakeGQLClientMissingInputDatasetPart,
29
+ )
27
30
 
28
31
 
29
32
  class FakeGQLClientDefaultRecipeConfiguration(FakeGQLClient):
@@ -132,6 +135,7 @@ def write_l1_task(request, recipe_run_id, tmp_path):
132
135
  stokes_params = ["I", "Q", "U", "V"]
133
136
  used_stokes_params = []
134
137
  hdu = fits.PrimaryHDU(data=np.random.random(size=(1, 128, 128)) * 10, header=header)
138
+ hdu.header["IPTASK"] = "level0_only key to be removed"
135
139
  hdul = fits.HDUList([hdu])
136
140
  for i in range(num_of_stokes_params):
137
141
  task.write(
@@ -276,6 +280,9 @@ def test_write_l1_frame(
276
280
  # Test that FRAMEVOL still has its comment
277
281
  assert hdu.header.comments["FRAMEVOL"]
278
282
 
283
+ # Test that 'level0_only' keys are being removed
284
+ assert "IPTASK" not in hdu.header.keys()
285
+
279
286
 
280
287
  def test_replace_header_values(write_l1_task):
281
288
  """
@@ -484,6 +491,29 @@ def test_reprocessing_keys(write_l1_task, mocker):
484
491
  assert header["PRODUCT"] == task.compute_product_id(header["IDSOBSID"], header["PROCTYPE"])
485
492
 
486
493
 
494
+ def test_missing_input_dataset_part(write_l1_task, mocker):
495
+ """
496
+ :Given: a Write_L1 task with a missing calibration frames part
497
+ :When: running the task
498
+ :Then: the input dataset part keys are correctly written without throwing an exception
499
+ """
500
+ mocker.patch(
501
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
502
+ new=FakeGQLClientMissingInputDatasetPart,
503
+ )
504
+ task, _, _ = write_l1_task
505
+ task()
506
+ files = list(task.read(tags=[Tag.frame(), Tag.output()]))
507
+ for file in files:
508
+ header = fits.open(file)[1].header
509
+ assert header["IDSPARID"] == task.metadata_store_input_dataset_parameters.inputDatasetPartId
510
+ assert (
511
+ header["IDSOBSID"]
512
+ == task.metadata_store_input_dataset_observe_frames.inputDatasetPartId
513
+ )
514
+ assert "IDSCALID" not in header
515
+
516
+
487
517
  @pytest.mark.parametrize(
488
518
  "ids_obs_id, proc_type",
489
519
  [
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 10.6.3
3
+ Version: 10.7.0
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -58,7 +58,7 @@ dkist_processing_common/tasks/teardown.py,sha256=e4LKnphJDYDVDAez2tH7MxpZgCmxYsK
58
58
  dkist_processing_common/tasks/transfer_input_data.py,sha256=afEW0glpCFMZRj90nFtQo_4XOQ4CuoOh86jahP6a-a0,5548
59
59
  dkist_processing_common/tasks/trial_catalog.py,sha256=Y3DKstRfMS8nWWtJFMB0MUVPlZ1jWS_2jhJGMWwxy50,8748
60
60
  dkist_processing_common/tasks/trial_output_data.py,sha256=aI_aRuu0qVO8zFGrr_9baxx9i3jUEHZSmsmbO6ytlkE,6960
61
- dkist_processing_common/tasks/write_l1.py,sha256=C5IRUX1JO_Wa7suv_tgE4tH1E2eAUkro0rtj9EHduqw,22429
61
+ dkist_processing_common/tasks/write_l1.py,sha256=Jw8zQjlYjBQru3m1SB_ZDmicMkDRiUihy9VdwehvZc0,22683
62
62
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
63
63
  dkist_processing_common/tasks/mixin/globus.py,sha256=QAV8VElxMAqxJ2KSB_bJaraceovYfjHXjOdocrTCkIA,6592
64
64
  dkist_processing_common/tasks/mixin/input_dataset.py,sha256=dkW5vf_QPgWedHO_Lf9GjBxr1QrUCKs6gIXufUTi7GE,6813
@@ -69,14 +69,14 @@ dkist_processing_common/tasks/mixin/quality/__init__.py,sha256=Bgu-DHW7yXLiehgll
69
69
  dkist_processing_common/tasks/mixin/quality/_base.py,sha256=nZ9IC-O-hsLXa5-tk29B13CZyQIdhJCv0eO9cdkAhWc,8303
70
70
  dkist_processing_common/tasks/mixin/quality/_metrics.py,sha256=WenTfa12guIUfm0GzkrK2gduKaOHs03e6RhE6j37Les,54304
71
71
  dkist_processing_common/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
- dkist_processing_common/tests/conftest.py,sha256=B4BtMYCStjy3zvqefwJB6-ntSNOicIyQwfE0YeapCEE,30160
72
+ dkist_processing_common/tests/conftest.py,sha256=h_ObhpXb1S0-db0Je8XoHNLkrXxXW_0B0TMhbnbDjMo,30160
73
73
  dkist_processing_common/tests/test_assemble_movie.py,sha256=XY_ruXSYP5k6s2gUAwlFdnhJ81eyWLSd2O9IkX4RXeo,4165
74
74
  dkist_processing_common/tests/test_assemble_quality.py,sha256=fWSHK4UdVqgNjvxQuD40NBUnXrtmthUP7PUbISPV4MQ,16897
75
75
  dkist_processing_common/tests/test_base.py,sha256=4ST3__jEHitEQaQs9-0OcqtyEJfIjZsk_6PRYZFV2-U,7124
76
76
  dkist_processing_common/tests/test_codecs.py,sha256=9Ln8FJs319rbHpCukO9lKLk3aDrdyDREjA4nCHsxDCA,20796
77
77
  dkist_processing_common/tests/test_constants.py,sha256=Kc9k5TdYy5QkRRlGav6kfI2dy5HHKqtpf9qOuaAfDZU,5903
78
78
  dkist_processing_common/tests/test_cs_step.py,sha256=RA0QD3D8eaL3YSOL_gIJ9wkngy14RQ2jbD-05KAziW4,2408
79
- dkist_processing_common/tests/test_fits_access.py,sha256=e1B_N13vz6hFCBup6FlGItaH59dAmqXvox7jQuSyOB0,10946
79
+ dkist_processing_common/tests/test_fits_access.py,sha256=aqJ2oBWxEP4PzKfe4fiaGxvfjB9Fvt2f0Owp9XFW-rw,10946
80
80
  dkist_processing_common/tests/test_flower_pot.py,sha256=X9_UI3maa3ZQncV3jYHgovWnawDsdEkEB5vw6EAB96o,3151
81
81
  dkist_processing_common/tests/test_input_dataset.py,sha256=AI5uqaDea4kOwpwAU5qQdzUbxMpBwD20YCAvB7nzD5o,18766
82
82
  dkist_processing_common/tests/test_interservice_bus.py,sha256=M_iv2CLmx5TnCB1VUN4YjkQ2LEUjfCKk7-ZlkV62XEQ,3000
@@ -95,12 +95,12 @@ dkist_processing_common/tests/test_tags.py,sha256=UwlOJ45rkvbfbd5L5m5YltvOxQc8kG
95
95
  dkist_processing_common/tests/test_task_name.py,sha256=kqFr59XX2K87xzfTlClzDV4-Je1dx72LvdaJ22UE8UU,1233
96
96
  dkist_processing_common/tests/test_task_parsing.py,sha256=QXt1X6DTO3_liBD2c-t84DToLeEn7B3J-eteIyN4HEM,4027
97
97
  dkist_processing_common/tests/test_teardown.py,sha256=w2sATQHkg2lMLvm6VFZF1mNGFYHwWj_SxvF9RQu-tuY,5362
98
- dkist_processing_common/tests/test_transfer_input_data.py,sha256=pys5JI-alVEsN4nFE6KDLrAfvLOAH5lSHHpkruLR6lE,6390
98
+ dkist_processing_common/tests/test_transfer_input_data.py,sha256=kE-FQTcN9nft5bh2Rhtp-8ldCTvGXTvWFcsNm6DY7lk,6619
99
99
  dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=27PifkyH3RZg0nsM-AjmrFJ-hbYuCk5Tt_0Zx8PJBfM,2109
100
100
  dkist_processing_common/tests/test_trial_catalog.py,sha256=SZ-nyn0MXU9Lkg_94FbKER_cwiGoi06GYlzF_3AmvKg,6802
101
101
  dkist_processing_common/tests/test_trial_output_data.py,sha256=cBCj0kXyF5NEMzKh6zPVksdoXyE8ju1opJgWgjdcJWA,12790
102
102
  dkist_processing_common/tests/test_workflow_task_base.py,sha256=Z5aPW5LQtS0UWJiYho4X0r-2gPLfzpkmMwfmaoFLjMg,10517
103
- dkist_processing_common/tests/test_write_l1.py,sha256=bIQd95Dhb8aHxCtOXV7Az4Fy_OeEIlspoSktdUR5lRA,20794
103
+ dkist_processing_common/tests/test_write_l1.py,sha256=R_ljJdSC4hCHS_mLV6rKE7PVfpDWCaMXoN6n45QJNWM,21951
104
104
  docs/Makefile,sha256=qnlVz6PuBqE39NfHWuUnHhNEA-EFgT2-WJNNNy9ttfk,4598
105
105
  docs/changelog.rst,sha256=S2jPASsWlQxSlAPqdvNrYvhk9k3FcFWNXFNDYXBSjl4,120
106
106
  docs/conf.py,sha256=FkX575cqTqZGCcLAjg2MlvE8Buj1Vt3CpHNgZxG256E,1890
@@ -109,7 +109,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
109
109
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
110
110
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
111
111
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
112
- dkist_processing_common-10.6.3.dist-info/METADATA,sha256=e_NApWqP_nEbasza1gENhO_v3h2w8yBNjcjbuk4HBWY,7154
113
- dkist_processing_common-10.6.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
114
- dkist_processing_common-10.6.3.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
115
- dkist_processing_common-10.6.3.dist-info/RECORD,,
112
+ dkist_processing_common-10.7.0.dist-info/METADATA,sha256=4YcNp2PrDZuC5FpxD_2Wr9Zo92EqvXbUYr6GC9t-QEY,7154
113
+ dkist_processing_common-10.7.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
114
+ dkist_processing_common-10.7.0.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
115
+ dkist_processing_common-10.7.0.dist-info/RECORD,,