dkist-processing-common 10.8.1rc1__py3-none-any.whl → 10.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -295,10 +295,14 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
295
295
  header["SOLARRAD"] = self.calculate_solar_angular_radius(obstime=obstime)
296
296
  header["SPECSYS"] = "TOPOCENT" # no wavelength correction made due to doppler velocity
297
297
  header["VELOSYS"] = 0.0 # no wavelength correction made due to doppler velocity
298
- header["WAVEBAND"] = get_closest_spectral_line(wavelength=header["LINEWAV"] * u.nm).name
299
298
  wavelength_range = self.get_wavelength_range(header=header)
300
299
  header["WAVEMIN"] = wavelength_range.min.to_value(u.nm)
301
300
  header["WAVEMAX"] = wavelength_range.max.to_value(u.nm)
301
+ waveband: str | None = self.get_waveband(
302
+ wavelength=header["LINEWAV"] * u.nm, wavelength_range=wavelength_range
303
+ )
304
+ if waveband:
305
+ header["WAVEBAND"] = waveband
302
306
  return header
303
307
 
304
308
  def l1_filename(self, header: fits.Header, stokes: Literal["I", "Q", "U", "V"]):
@@ -515,3 +519,18 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
515
519
  if header.get("AO_LOCK") is not True:
516
520
  header.pop("ATMOS_R0", None)
517
521
  return header
522
+
523
+ @staticmethod
524
+ def get_waveband(wavelength: u.Quantity, wavelength_range: WavelengthRange) -> str | None:
525
+ """
526
+ Get the spectral line information of the closest spectral line to the wavelength argument.
527
+
528
+ If the spectral line rest wavelength in air does not fall in the wavelength range of the data,
529
+ do not populate the keyword.
530
+ """
531
+ print(wavelength_range)
532
+ closest_line = get_closest_spectral_line(wavelength=wavelength)
533
+ rest_wavelength = closest_line.rest_wavelength_in_air
534
+ if rest_wavelength < wavelength_range.min or rest_wavelength > wavelength_range.max:
535
+ return None
536
+ return closest_line.name
@@ -45,6 +45,7 @@ from dkist_processing_common.models.graphql import RecipeRunStatusResponse
45
45
  from dkist_processing_common.models.tags import Tag
46
46
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
47
47
  from dkist_processing_common.tasks import WorkflowTaskBase
48
+ from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetMixin
48
49
 
49
50
  TILE_SIZE = 64
50
51
 
@@ -358,7 +359,7 @@ class FakeGQLClient:
358
359
  {
359
360
  "parameterValueId": 1,
360
361
  "parameterValue": json.dumps([[1, 2, 3], [4, 5, 6], [7, 8, 9]]),
361
- "parameterValueStartDate": datetime(2000, 1, 1).isoformat(),
362
+ "parameterValueStartDate": "2000-01-01",
362
363
  }
363
364
  ],
364
365
  },
@@ -375,7 +376,7 @@ class FakeGQLClient:
375
376
  }
376
377
  }
377
378
  ),
378
- "parameterValueStartDate": datetime(2000, 1, 1).isoformat(),
379
+ "parameterValueStartDate": "2000-01-01",
379
380
  },
380
381
  {
381
382
  "parameterValueId": 3,
@@ -387,7 +388,7 @@ class FakeGQLClient:
387
388
  }
388
389
  }
389
390
  ),
390
- "parameterValueStartDate": datetime(2000, 1, 2).isoformat(),
391
+ "parameterValueStartDate": "2000-01-02",
391
392
  },
392
393
  ],
393
394
  },
@@ -399,7 +400,7 @@ class FakeGQLClient:
399
400
  "parameterValue": json.dumps(
400
401
  {"a": 1, "b": 3.14159, "c": "foo", "d": [1, 2, 3]}
401
402
  ),
402
- "parameterValueStartDate": datetime(2000, 1, 1).isoformat(),
403
+ "parameterValueStartDate": "2000-01-01",
403
404
  }
404
405
  ],
405
406
  },
@@ -795,7 +796,7 @@ def post_fit_polcal_fitter(
795
796
  return fitter
796
797
 
797
798
 
798
- class InputDatasetTask(WorkflowTaskBase):
799
+ class InputDatasetTask(WorkflowTaskBase, InputDatasetMixin):
799
800
  def run(self):
800
801
  pass
801
802
 
@@ -823,7 +824,7 @@ def task_with_input_dataset(
823
824
  task.scratch.workflow_base_path = tmp_path / str(recipe_run_id)
824
825
  for part, tag in input_dataset_parts:
825
826
  file_path = task.scratch.workflow_base_path / Path(f"{uuid4().hex[:6]}.ext")
826
- file_path.write_text(data=json.dumps({"doc_list": part}))
827
+ file_path.write_text(data=json.dumps(part))
827
828
  task.tag(path=file_path, tags=tag)
828
829
  yield task
829
830
 
@@ -850,24 +851,6 @@ def create_parameter_files(
850
851
  task.tag(path=file_path, tags=Tag.parameter(param_path))
851
852
 
852
853
 
853
- def create_input_frames(
854
- task: WorkflowTaskBase,
855
- input_frame_docs: list[dict] = FakeGQLClient.observe_frames_doc_object
856
- + FakeGQLClient.calibration_frames_doc_object,
857
- ):
858
- """
859
- Create the observe and calibration frame files specified in the input dataset documents
860
- returned by the metadata store.
861
- """
862
- for frame in input_frame_docs:
863
- for object_key in frame["object_keys"]:
864
- file_path = task.scratch.workflow_base_path / Path(object_key)
865
- if not file_path.parent.exists():
866
- file_path.parent.mkdir(parents=True, exist_ok=True)
867
- file_path.write_text(data="")
868
- task.tag(path=file_path, tags=[Tag.frame(), Tag.input()])
869
-
870
-
871
854
  @pytest.fixture()
872
855
  def fake_constants_db() -> dict:
873
856
  """
@@ -19,15 +19,10 @@ from astropy.io.fits import CompImageHDU
19
19
  from astropy.io.fits import HDUList
20
20
  from astropy.io.fits import Header
21
21
  from astropy.io.fits import PrimaryHDU
22
- from pydantic import BaseModel
23
- from pydantic import create_model
24
- from pydantic import Field
25
22
 
26
23
  from dkist_processing_common.codecs.asdf import asdf_decoder
27
24
  from dkist_processing_common.codecs.asdf import asdf_encoder
28
25
  from dkist_processing_common.codecs.asdf import asdf_fileobj_encoder
29
- from dkist_processing_common.codecs.basemodel import basemodel_decoder
30
- from dkist_processing_common.codecs.basemodel import basemodel_encoder
31
26
  from dkist_processing_common.codecs.bytes import bytes_decoder
32
27
  from dkist_processing_common.codecs.bytes import bytes_encoder
33
28
  from dkist_processing_common.codecs.fits import fits_access_decoder
@@ -105,14 +100,6 @@ def path_to_json(dictionary, tmp_file) -> Path:
105
100
  return tmp_file
106
101
 
107
102
 
108
- @pytest.fixture
109
- def pydantic_basemodel() -> BaseModel:
110
- class Foo(BaseModel):
111
- bar: int
112
-
113
- return Foo(bar=123)
114
-
115
-
116
103
  @pytest.fixture
117
104
  def string() -> str:
118
105
  return "string"
@@ -369,7 +356,6 @@ class DummyFitsAccess(FitsAccessBase):
369
356
  pytest.param("primary_hdu_list", fits_hdulist_encoder, id="fits uncompressed HDUList"),
370
357
  pytest.param("compressed_hdu_list", fits_hdulist_encoder, id="fits compressed HDUList"),
371
358
  pytest.param("dictionary", json_encoder, id="json"),
372
- pytest.param("pydantic_basemodel", basemodel_encoder, id="pydantic basemodel"),
373
359
  pytest.param("string", str_encoder, id="str"),
374
360
  pytest.param("asdf_tree", asdf_encoder, id="asdf"),
375
361
  pytest.param("asdf_obj", asdf_fileobj_encoder, id="asdf_obj"),
@@ -614,30 +600,6 @@ def test_json_encoder_invalid(python_object: Any, expected_exception_type: type[
614
600
  json_encoder(python_object)
615
601
 
616
602
 
617
- def test_basemodel_decoder(valid_json_codec, path_to_text_file):
618
- """
619
- Given: a python object that can be validated to a Pydantic BaseModel object is written to file as json
620
- When: basemodel decoding is applied to the json file
621
- Then: the string gets decoded to the correct Pydantic BaseModel object
622
- """
623
- # write python object to file as json string
624
- python_object = valid_json_codec["python_object"]
625
- path = path_to_text_file(json.dumps({"foo": python_object}))
626
-
627
- # create basemodel on the fly
628
- DynamicBaseModel = create_model(
629
- "DynamicBaseModel", foo=(Any, Field(default_factory=type(python_object)))
630
- )
631
-
632
- # get the same object via the basemodel decoder
633
- decoded_obj = basemodel_decoder(path, model=DynamicBaseModel)
634
- if python_object is nan:
635
- # By definition, nan != nan
636
- assert isnan(decoded_obj.foo)
637
- else:
638
- assert decoded_obj.foo == python_object
639
-
640
-
641
603
  def test_quality_data_encoder_valid(valid_quality_codec):
642
604
  """
643
605
  Given: a python object that can be encoded as a json string