dkist-processing-vbi 1.21.1__py3-none-any.whl → 1.25.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. dkist_processing_vbi/__init__.py +1 -0
  2. dkist_processing_vbi/config.py +1 -0
  3. dkist_processing_vbi/models/constants.py +1 -0
  4. dkist_processing_vbi/models/filter.py +1 -0
  5. dkist_processing_vbi/models/parameters.py +1 -0
  6. dkist_processing_vbi/models/tags.py +1 -0
  7. dkist_processing_vbi/parsers/mosaic_repeats.py +1 -0
  8. dkist_processing_vbi/parsers/spatial_step_pattern.py +2 -1
  9. dkist_processing_vbi/parsers/vbi_l0_fits_access.py +1 -0
  10. dkist_processing_vbi/parsers/vbi_l1_fits_access.py +1 -0
  11. dkist_processing_vbi/tasks/__init__.py +1 -0
  12. dkist_processing_vbi/tasks/assemble_movie.py +5 -3
  13. dkist_processing_vbi/tasks/dark.py +5 -6
  14. dkist_processing_vbi/tasks/gain.py +5 -4
  15. dkist_processing_vbi/tasks/make_movie_frames.py +5 -6
  16. dkist_processing_vbi/tasks/parse.py +3 -2
  17. dkist_processing_vbi/tasks/process_summit_processed.py +2 -1
  18. dkist_processing_vbi/tasks/quality_metrics.py +3 -2
  19. dkist_processing_vbi/tasks/science.py +5 -4
  20. dkist_processing_vbi/tasks/vbi_base.py +2 -2
  21. dkist_processing_vbi/tasks/write_l1.py +6 -0
  22. dkist_processing_vbi/tests/conftest.py +24 -7
  23. dkist_processing_vbi/tests/local_trial_workflows/l0_to_l1.py +12 -25
  24. dkist_processing_vbi/tests/test_assemble_movie.py +6 -4
  25. dkist_processing_vbi/tests/test_dark.py +3 -4
  26. dkist_processing_vbi/tests/test_gain.py +4 -5
  27. dkist_processing_vbi/tests/test_make_movie_frames.py +4 -5
  28. dkist_processing_vbi/tests/test_parse_l0.py +19 -14
  29. dkist_processing_vbi/tests/test_parse_summit.py +8 -6
  30. dkist_processing_vbi/tests/test_process_summit.py +6 -7
  31. dkist_processing_vbi/tests/test_science.py +4 -5
  32. dkist_processing_vbi/tests/test_vbi_constants.py +1 -2
  33. dkist_processing_vbi/tests/test_workflows.py +1 -0
  34. dkist_processing_vbi/tests/test_write_l1.py +5 -4
  35. dkist_processing_vbi/workflows/__init__.py +1 -0
  36. dkist_processing_vbi/workflows/l0_processing.py +4 -1
  37. dkist_processing_vbi/workflows/summit_data_processing.py +2 -1
  38. dkist_processing_vbi/workflows/trial_workflows.py +1 -0
  39. dkist_processing_vbi-1.25.2.dist-info/METADATA +536 -0
  40. dkist_processing_vbi-1.25.2.dist-info/RECORD +64 -0
  41. {dkist_processing_vbi-1.21.1.dist-info → dkist_processing_vbi-1.25.2.dist-info}/WHEEL +1 -1
  42. docs/conf.py +1 -0
  43. dkist_processing_vbi-1.21.1.dist-info/METADATA +0 -446
  44. dkist_processing_vbi-1.21.1.dist-info/RECORD +0 -64
  45. {dkist_processing_vbi-1.21.1.dist-info → dkist_processing_vbi-1.25.2.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  """Package providing support classes and methods used by all workflow tasks."""
2
+
2
3
  from importlib.metadata import PackageNotFoundError
3
4
  from importlib.metadata import version
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Configuration for the dkist-processing-vbi package and the logging thereof."""
2
+
2
3
  from dkist_processing_common.config import DKISTProcessingCommonConfiguration
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """VBI additions to common constants."""
2
+
2
3
  from enum import Enum
3
4
 
4
5
  from dkist_processing_common.models.constants import ConstantsBase
@@ -1,4 +1,5 @@
1
1
  """VBI filter list and tooling."""
2
+
2
3
  import astropy.units as u
3
4
  from dkist_processing_common.models.wavelength import WavelengthRange
4
5
 
@@ -1,4 +1,5 @@
1
1
  """VBI calibration pipeline parameters."""
2
+
2
3
  from dkist_processing_common.models.parameters import ParameterBase
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """VBI tags."""
2
+
2
3
  from enum import Enum
3
4
 
4
5
  from dkist_processing_common.models.tags import Tag
@@ -1,4 +1,5 @@
1
1
  """Stems for organizing files based on their Mosaic repeat number."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from abc import ABC
@@ -1,4 +1,5 @@
1
1
  """Bud for checking that the spatial step pattern (VBISTPAT) matches expectations."""
2
+
2
3
  from dkist_processing_common.models.task_name import TaskName
3
4
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
4
5
 
@@ -23,7 +24,7 @@ class SpatialStepPatternBud(TaskUniqueBud):
23
24
  super().__init__(
24
25
  constant_name=VbiBudName.spatial_step_pattern.value,
25
26
  metadata_key="spatial_step_pattern",
26
- ip_task_type=TaskName.observe.value,
27
+ ip_task_types=TaskName.observe.value,
27
28
  )
28
29
 
29
30
  def getter(self, key):
@@ -1,4 +1,5 @@
1
1
  """VBI FITS access for L0 data."""
2
+
2
3
  from astropy.io import fits
3
4
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
4
5
 
@@ -1,4 +1,5 @@
1
1
  """VBI FITS access for L1 data."""
2
+
2
3
  from astropy.io import fits
3
4
  from dkist_processing_common.parsers.l1_fits_access import L1FitsAccess
4
5
 
@@ -1,4 +1,5 @@
1
1
  """init."""
2
+
2
3
  from dkist_processing_vbi.tasks.assemble_movie import *
3
4
  from dkist_processing_vbi.tasks.dark import *
4
5
  from dkist_processing_vbi.tasks.gain import *
@@ -1,11 +1,13 @@
1
1
  """VBI-specific assemble movie task subclass."""
2
+
2
3
  from typing import Type
3
4
 
4
5
  import numpy as np
5
6
  from astropy.visualization import ZScaleInterval
7
+ from dkist_processing_common.codecs.json import json_decoder
6
8
  from dkist_processing_common.models.constants import ConstantsBase
9
+ from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
7
10
  from dkist_processing_common.tasks import AssembleMovie
8
- from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetMixin
9
11
  from matplotlib import colormaps
10
12
  from PIL import ImageDraw
11
13
 
@@ -17,7 +19,7 @@ from dkist_processing_vbi.parsers.vbi_l1_fits_access import VbiL1FitsAccess
17
19
  __all__ = ["AssembleVbiMovie"]
18
20
 
19
21
 
20
- class AssembleVbiMovie(AssembleMovie, InputDatasetMixin):
22
+ class AssembleVbiMovie(AssembleMovie):
21
23
  """
22
24
  Class for assembling pre-made movie frames (as FITS/numpy) into an mp4 movie file.
23
25
 
@@ -45,7 +47,7 @@ class AssembleVbiMovie(AssembleMovie, InputDatasetMixin):
45
47
  workflow_version=workflow_version,
46
48
  )
47
49
  self.parameters = VbiParameters(
48
- self.input_dataset_parameters,
50
+ scratch=self.scratch,
49
51
  obs_ip_start_time=self.constants.obs_ip_start_time,
50
52
  )
51
53
 
@@ -1,4 +1,5 @@
1
1
  """VBI dark calibration task."""
2
+
2
3
  from dkist_processing_common.codecs.fits import fits_array_decoder
3
4
  from dkist_processing_common.codecs.fits import fits_array_encoder
4
5
  from dkist_processing_common.models.task_name import TaskName
@@ -46,7 +47,7 @@ class DarkCalibration(VbiTaskBase, QualityMixin):
46
47
  set(self.constants.gain_exposure_times + self.constants.observe_exposure_times)
47
48
  )
48
49
  logger.info(f"{target_exp_times = }")
49
- with self.apm_task_step(
50
+ with self.telemetry_span(
50
51
  f"Calculating dark frames for {self.constants.num_spatial_steps} steps and {len(target_exp_times)} exp times",
51
52
  ):
52
53
  total_dark_frames_used = 0
@@ -67,16 +68,14 @@ class DarkCalibration(VbiTaskBase, QualityMixin):
67
68
  auto_squeeze=False,
68
69
  )
69
70
 
70
- with self.apm_processing_step(
71
- f"Processing dark for {step = } and {exp_time = }"
72
- ):
71
+ with self.telemetry_span(f"Processing dark for {step = } and {exp_time = }"):
73
72
  logger.info(f"averaging arrays for step {step}")
74
73
  averaged_dark_array = average_numpy_arrays(input_dark_arrays)
75
74
  logger.info(
76
75
  f"average dark signal in step {step} = {averaged_dark_array.mean():.3e}"
77
76
  )
78
77
 
79
- with self.apm_writing_step(
78
+ with self.telemetry_span(
80
79
  f"Writing intermediate dark for {step = } and {exp_time = }",
81
80
  ):
82
81
  self.write(
@@ -88,7 +87,7 @@ class DarkCalibration(VbiTaskBase, QualityMixin):
88
87
  encoder=fits_array_encoder,
89
88
  )
90
89
 
91
- with self.apm_processing_step("Computing and logging quality metrics"):
90
+ with self.telemetry_span("Computing and logging quality metrics"):
92
91
  no_of_raw_dark_frames: int = self.count(tags=[VbiTag.input(), VbiTag.task_dark_frame()])
93
92
  unused_count = no_of_raw_dark_frames - total_dark_frames_used
94
93
  self.quality_store_task_type_counts(
@@ -1,4 +1,5 @@
1
1
  """VBI gain task."""
2
+
2
3
  import numpy as np
3
4
  from dkist_processing_common.codecs.fits import fits_array_decoder
4
5
  from dkist_processing_common.codecs.fits import fits_array_encoder
@@ -52,7 +53,7 @@ class GainCalibration(VbiTaskBase, QualityMixin):
52
53
  # This is OK (tm) because this will be, at most, 9 4k x 4k arrays. This is a lot (~1G), but not too much.
53
54
  step_gain_dict: dict = {}
54
55
 
55
- with self.apm_processing_step(
56
+ with self.telemetry_span(
56
57
  f"Collecting and reducing gain arrays from {self.constants.num_spatial_steps} steps and {len(self.constants.gain_exposure_times)} exp times",
57
58
  ):
58
59
  for exp_time in self.constants.gain_exposure_times:
@@ -99,13 +100,13 @@ class GainCalibration(VbiTaskBase, QualityMixin):
99
100
  self.total_counts += np.nansum(dark_subtracted_gain_array)
100
101
  step_gain_dict[step] = dark_subtracted_gain_array
101
102
 
102
- with self.apm_processing_step("normalizing gain arrays"):
103
+ with self.telemetry_span("normalizing gain arrays"):
103
104
  normalized_array_dict = self.normalize_fov(step_gain_dict)
104
105
 
105
- with self.apm_writing_step("writing gain arrays to disk"):
106
+ with self.telemetry_span("writing gain arrays to disk"):
106
107
  self.write_gain_calibration(normalized_array_dict)
107
108
 
108
- with self.apm_processing_step("Computing and logging quality metrics"):
109
+ with self.telemetry_span("Computing and logging quality metrics"):
109
110
  no_of_raw_gain_frames: int = self.count(
110
111
  tags=[
111
112
  VbiTag.input(),
@@ -1,4 +1,5 @@
1
1
  """VBI movie frame creation."""
2
+
2
3
  import numpy as np
3
4
  import scipy.ndimage as spnd
4
5
  from astropy.io import fits
@@ -43,15 +44,15 @@ class MakeVbiMovieFrames(VbiTaskBase):
43
44
  None
44
45
 
45
46
  """
46
- with self.apm_processing_step("averaging exposures"):
47
+ with self.telemetry_span("averaging exposures"):
47
48
  self.average_all_exposures()
48
49
 
49
- with self.apm_processing_step("stitching full FOV frames"):
50
+ with self.telemetry_span("stitching full FOV frames"):
50
51
  for mosaic in range(1, self.constants.num_mosaic_repeats + 1):
51
52
  logger.info(f"stitching full FOV for mosaic repeat {mosaic}")
52
53
  output_hdl = self.stitch_single_mosaic_repeat(mosaic)
53
54
 
54
- with self.apm_writing_step(
55
+ with self.telemetry_span(
55
56
  f"writing stitched movie frame for mosaic repeat {mosaic}"
56
57
  ):
57
58
  self.write(
@@ -144,9 +145,7 @@ class MakeVbiMovieFrames(VbiTaskBase):
144
145
  output = np.zeros((size_y, size_x))
145
146
  px_count = np.zeros((size_y, size_x))
146
147
 
147
- with self.apm_processing_step(
148
- f"stitching all camera positions together for {mosaic_repeat=}"
149
- ):
148
+ with self.telemetry_span(f"stitching all camera positions together for {mosaic_repeat=}"):
150
149
  for o in all_step_access:
151
150
  logger.info(f"Placing position {o.current_spatial_step} into full frame")
152
151
  self.place_pos_in_full_fov(o, ref_header, output, px_count)
@@ -1,4 +1,5 @@
1
1
  """VBI parse task."""
2
+
2
3
  from dkist_processing_common.models.tags import StemName
3
4
  from dkist_processing_common.models.task_name import TaskName
4
5
  from dkist_processing_common.parsers.single_value_single_key_flower import (
@@ -55,10 +56,10 @@ class ParseL0VbiInputData(ParseL0InputDataBase):
55
56
  ObsIpStartTimeBud(),
56
57
  TotalMosaicRepeatsBud(),
57
58
  TaskExposureTimesBud(
58
- VbiBudName.gain_exposure_times.value, ip_task_type=TaskName.gain.value
59
+ VbiBudName.gain_exposure_times.value, ip_task_types=TaskName.gain.value
59
60
  ),
60
61
  TaskExposureTimesBud(
61
- VbiBudName.observe_exposure_times.value, ip_task_type=TaskName.observe.value
62
+ VbiBudName.observe_exposure_times.value, ip_task_types=TaskName.observe.value
62
63
  ),
63
64
  ]
64
65
 
@@ -1,4 +1,5 @@
1
1
  """Repackage VBI data already calibrated before receipt at the Data Center."""
2
+
2
3
  from astropy.io import fits
3
4
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
4
5
 
@@ -33,7 +34,7 @@ class GenerateL1SummitData(VbiTaskBase):
33
34
  - Write out
34
35
  """
35
36
  # This loop is how we ensure that only completed mosaics get processed.
36
- with self.apm_task_step("Re-tagging INPUT observe frames as CALIBRATED"):
37
+ with self.telemetry_span("Re-tagging INPUT observe frames as CALIBRATED"):
37
38
  for mosaic in range(1, self.constants.num_mosaic_repeats + 1):
38
39
  for step in range(1, self.constants.num_spatial_steps + 1):
39
40
  for file_name in self.read(
@@ -1,4 +1,5 @@
1
1
  """VBI specific quality metrics."""
2
+
2
3
  from typing import Iterable
3
4
 
4
5
  from dkist_processing_common.codecs.fits import fits_access_decoder
@@ -61,10 +62,10 @@ class VbiQualityL1Metrics(VbiTaskBase, QualityMixin):
61
62
  )
62
63
  datetimes = []
63
64
  noise_values = []
64
- with self.apm_processing_step("Calculating VBI L1 quality metrics"):
65
+ with self.telemetry_span("Calculating VBI L1 quality metrics"):
65
66
  for frame in frames:
66
67
  datetimes.append(frame.time_obs)
67
68
  noise_values.append(self.avg_noise(frame.data))
68
69
 
69
- with self.apm_processing_step("Sending lists for storage"):
70
+ with self.telemetry_span("Sending lists for storage"):
70
71
  self.quality_store_noise(datetimes=datetimes, values=noise_values)
@@ -1,4 +1,5 @@
1
1
  """VBI science task."""
2
+
2
3
  from astropy.io import fits
3
4
  from dkist_processing_common.codecs.fits import fits_access_decoder
4
5
  from dkist_processing_common.codecs.fits import fits_array_decoder
@@ -48,7 +49,7 @@ class ScienceCalibration(VbiTaskBase, QualityMixin):
48
49
  logger.info(
49
50
  f"Starting science with {self.constants.num_spatial_steps} steps and {self.constants.num_mosaic_repeats} mosaic repeats"
50
51
  )
51
- with self.apm_task_step(
52
+ with self.telemetry_span(
52
53
  f"Reducing science frames from {self.constants.num_spatial_steps} steps and {self.constants.num_mosaic_repeats} mosaic repeats",
53
54
  ):
54
55
  for exp_time in self.constants.observe_exposure_times:
@@ -88,7 +89,7 @@ class ScienceCalibration(VbiTaskBase, QualityMixin):
88
89
  auto_squeeze=False,
89
90
  )
90
91
 
91
- with self.apm_processing_step("dark and gain corrections"):
92
+ with self.telemetry_span("dark and gain corrections"):
92
93
  logger.info(f"subtracting dark from {apm_str}")
93
94
  sci_access = subtract_array_from_fits_access(
94
95
  access_objs=sci_access, array_to_subtract=dark_calibration_array
@@ -99,13 +100,13 @@ class ScienceCalibration(VbiTaskBase, QualityMixin):
99
100
  access_objs=sci_access, array_to_divide_by=gain_calibration_array
100
101
  )
101
102
 
102
- with self.apm_writing_step("writing calibrated science frames"):
103
+ with self.telemetry_span("writing calibrated science frames"):
103
104
  for i, access_obj in enumerate(sci_access):
104
105
  exp_num = access_obj.current_mosaic_step_exp
105
106
  logger.info(f"Writing output for {apm_str} and {exp_num = }")
106
107
  self.write_calibrated_fits_obj(access_obj, mosaic, step)
107
108
 
108
- with self.apm_processing_step("Computing and logging quality metrics"):
109
+ with self.telemetry_span("Computing and logging quality metrics"):
109
110
  no_of_raw_obs_frames: int = self.count(
110
111
  tags=[
111
112
  VbiTag.input(),
@@ -1,13 +1,13 @@
1
1
  """VBI base task."""
2
+
2
3
  from abc import ABC
3
4
 
4
5
  from dkist_processing_common.tasks import WorkflowTaskBase
5
- from dkist_processing_common.tasks.mixin.input_dataset import InputDatasetMixin
6
6
 
7
7
  from dkist_processing_vbi.models.constants import VbiConstants
8
8
 
9
9
 
10
- class VbiTaskBase(WorkflowTaskBase, InputDatasetMixin, ABC):
10
+ class VbiTaskBase(WorkflowTaskBase, ABC):
11
11
  """
12
12
  Task class for base VBI tasks.
13
13
 
@@ -1,4 +1,5 @@
1
1
  """VBI Write L1 task."""
2
+
2
3
  from typing import Literal
3
4
  from typing import Type
4
5
 
@@ -13,6 +14,7 @@ from dkist_processing_common.tasks.write_l1 import WriteL1Frame
13
14
  from dkist_service_configuration.logging import logger
14
15
 
15
16
  from dkist_processing_vbi.models.constants import VbiConstants
17
+ from dkist_processing_vbi.models.filter import VBI_FILTERS
16
18
  from dkist_processing_vbi.models.filter import find_associated_filter
17
19
 
18
20
  __all__ = ["VbiWriteL1Frame"]
@@ -160,3 +162,7 @@ class VbiWriteL1Frame(WriteL1Frame):
160
162
  """
161
163
  vbi_filter = find_associated_filter(wavelength=header["LINEWAV"] * u.nm)
162
164
  return WavelengthRange(min=vbi_filter.min, max=vbi_filter.max)
165
+
166
+ def get_waveband(self, wavelength: u.Quantity, wavelength_range: WavelengthRange) -> str:
167
+ """Get the name of the filter that includes the given wavelength."""
168
+ return find_associated_filter(wavelength=wavelength).name
@@ -13,7 +13,10 @@ from dkist_data_simulator.dataset import key_function
13
13
  from dkist_data_simulator.spec122 import Spec122Dataset
14
14
  from dkist_header_validator.translator import sanitize_to_spec214_level1
15
15
  from dkist_header_validator.translator import translate_spec122_to_spec214_l0
16
+ from dkist_processing_common.codecs.basemodel import basemodel_encoder
17
+ from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
16
18
  from dkist_processing_common.models.task_name import TaskName
19
+ from dkist_processing_common.tests.mock_metadata_store import fake_gql_client
17
20
 
18
21
  from dkist_processing_vbi.models.constants import VbiConstants
19
22
  from dkist_processing_vbi.models.parameters import VbiParameters
@@ -93,6 +96,10 @@ class VbiS122Headers(Spec122Dataset):
93
96
  self.add_constant_key("VBI__003", num_steps)
94
97
  self.add_constant_key("VBI__007", num_exp_per_step)
95
98
  self.add_constant_key("DKIST008", DKIST008_value)
99
+ self.add_constant_key("ID___014", "v1") # hls_version
100
+ self.add_constant_key("TELTRACK", "Fixed Solar Rotation Tracking")
101
+ self.add_constant_key("TTBLTRCK", "fixed angle on sun")
102
+ self.add_constant_key("TELSCAN", "Raster")
96
103
 
97
104
  @key_function("VBI__004")
98
105
  def spatial_step(self, key: str) -> int:
@@ -120,6 +127,13 @@ class Vbi122DarkFrames(VbiS122Headers):
120
127
  array_shape, num_steps=num_steps, num_exp_per_step=num_exp_per_step, num_dsps_repeats=1
121
128
  )
122
129
  self.add_constant_key("DKIST004", TaskName.dark.value)
130
+ self.add_constant_key("PAC__002", "clear")
131
+ self.add_constant_key("PAC__003", "off")
132
+ self.add_constant_key("PAC__004", "clear")
133
+ self.add_constant_key("PAC__005", "10.")
134
+ self.add_constant_key("PAC__006", "clear")
135
+ self.add_constant_key("PAC__007", "20.")
136
+ self.add_constant_key("PAC__008", "FieldStop (5arcmin)")
123
137
 
124
138
 
125
139
  class Vbi122GainFrames(VbiS122Headers):
@@ -297,7 +311,7 @@ def testing_obs_ip_start_time() -> str:
297
311
  def input_dataset_document_simple_parameters_part():
298
312
  """Convert a dataclass of parameterValues into an actual input dataset parameters part."""
299
313
 
300
- def get_input_dataset_parameters_part(parameter_values: dataclass):
314
+ def make_input_dataset_parameters_part(parameter_values: dataclass):
301
315
  parameters_list = []
302
316
  value_id = randint(1000, 2000)
303
317
  for pn, pv in asdict(parameter_values).items():
@@ -312,7 +326,7 @@ def input_dataset_document_simple_parameters_part():
312
326
  parameters_list.append(parameter)
313
327
  return parameters_list
314
328
 
315
- return get_input_dataset_parameters_part
329
+ return make_input_dataset_parameters_part
316
330
 
317
331
 
318
332
  @pytest.fixture(scope="session")
@@ -325,12 +339,15 @@ def assign_input_dataset_doc_to_task(
325
339
  parameter_class=VbiParameters,
326
340
  obs_ip_start_time=testing_obs_ip_start_time,
327
341
  ):
328
- doc_path = task.scratch.workflow_base_path / "dataset_parameters.json"
329
- with open(doc_path, "w") as f:
330
- f.write(json.dumps(input_dataset_document_simple_parameters_part(parameter_values)))
331
- task.tag(doc_path, VbiTag.input_dataset_parameters())
342
+ task.write(
343
+ data=InputDatasetPartDocumentList(
344
+ doc_list=input_dataset_document_simple_parameters_part(parameter_values)
345
+ ),
346
+ tags=VbiTag.input_dataset_parameters(),
347
+ encoder=basemodel_encoder,
348
+ )
332
349
  task.parameters = parameter_class(
333
- task.input_dataset_parameters,
350
+ scratch=task.scratch,
334
351
  obs_ip_start_time=obs_ip_start_time,
335
352
  )
336
353
 
@@ -10,7 +10,9 @@ import numpy as np
10
10
  from astropy.io import fits
11
11
  from dkist_header_validator import spec122_validator
12
12
  from dkist_header_validator import spec214_validator
13
+ from dkist_processing_common.codecs.basemodel import basemodel_encoder
13
14
  from dkist_processing_common.manual import ManualProcessing
15
+ from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
14
16
  from dkist_processing_common.tasks import AssembleQualityData
15
17
  from dkist_processing_common.tasks import CreateTrialAsdf
16
18
  from dkist_processing_common.tasks import CreateTrialQualityReport
@@ -100,11 +102,16 @@ def translate_task(summit_processed: bool = False, suffix: str = "FITS"):
100
102
 
101
103
  class CreateInputDatasetParameterDocument(WorkflowTaskBase):
102
104
  def run(self) -> None:
103
- doc_path = self.scratch.workflow_base_path / "input_dataset_parameters.json"
104
- with open(doc_path, "w") as f:
105
- f.write(json.dumps(self.input_dataset_document_simple_parameters_part))
106
- self.tag(doc_path, VbiTag.input_dataset_parameters())
107
- logger.info(f"Wrote input dataset doc to {doc_path}")
105
+ relative_path = "input_dataset_parameters.json"
106
+ self.write(
107
+ data=InputDatasetPartDocumentList(
108
+ doc_list=self.input_dataset_document_simple_parameters_part
109
+ ),
110
+ relative_path=relative_path,
111
+ tags=VbiTag.input_dataset_parameters(),
112
+ encoder=basemodel_encoder,
113
+ )
114
+ logger.info(f"Wrote input dataset parameter doc to {relative_path}")
108
115
 
109
116
  @property
110
117
  def input_dataset_document_simple_parameters_part(self):
@@ -139,21 +146,6 @@ class ValidateL1Output(VbiTaskBase):
139
146
  spec214_validator.validate(f, extra=False)
140
147
 
141
148
 
142
- def setup_APM_config() -> None:
143
- mesh_config = {
144
- "system-monitoring-log-apm": {
145
- "mesh_address": "system-monitoring-log-apm.service.sim.consul",
146
- "mesh_port": 8200,
147
- },
148
- "automated-processing-scratch-inventory": {"mesh_address": "localhost", "mesh_port": 6379},
149
- "internal-api-gateway": {"mesh_address": "localhost", "mesh_port": 80},
150
- }
151
- apm_options = {"TRANSACTION_MAX_SPANS": 10000}
152
- os.environ["MESH_CONFIG"] = json.dumps(mesh_config)
153
- os.environ["ELASTIC_APM_ENABLED"] = "true"
154
- os.environ["ELASTIC_APM_OTHER_OPTIONS"] = json.dumps(apm_options)
155
-
156
-
157
149
  def l0_pipeline_workflow(manual_processing_run: ManualProcessing) -> None:
158
150
  manual_processing_run.run_task(task=ShowExposureTimes)
159
151
  manual_processing_run.run_task(task=VbiQualityL0Metrics)
@@ -174,10 +166,7 @@ def main(
174
166
  skip_movie: bool = False,
175
167
  only_translate: bool = False,
176
168
  science_workflow_name: str = "l0_processing",
177
- use_apm: bool = False,
178
169
  ):
179
- if use_apm:
180
- setup_APM_config()
181
170
  science_func_dict = {
182
171
  "l0_pipeline": l0_pipeline_workflow,
183
172
  "summit_data_processed": summit_data_processing_workflow,
@@ -258,7 +247,6 @@ if __name__ == "__main__":
258
247
  "-t", "--only-translate", help="Do ONLY the translation step", action="store_true"
259
248
  )
260
249
  parser.add_argument("-M", "--skip-movie", help="Skip making output movie", action="store_true")
261
- parser.add_argument("-A", "--use-apm", help="Send APM spans to SIM", action="store_true")
262
250
  args = parser.parse_args()
263
251
 
264
252
  sys.exit(
@@ -270,6 +258,5 @@ if __name__ == "__main__":
270
258
  only_translate=args.only_translate,
271
259
  skip_movie=args.skip_movie,
272
260
  science_workflow_name=args.workflow_name,
273
- use_apm=args.use_apm,
274
261
  )
275
262
  )
@@ -2,14 +2,13 @@ import numpy as np
2
2
  import pytest
3
3
  from dkist_processing_common._util.scratch import WorkflowFileSystem
4
4
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
5
- from dkist_processing_common.tests.conftest import FakeGQLClient
6
5
 
7
6
  from dkist_processing_vbi.models.tags import VbiTag
8
7
  from dkist_processing_vbi.tasks.assemble_movie import AssembleVbiMovie
9
- from dkist_processing_vbi.tests.conftest import generate_214_l1_fits_frame
10
8
  from dkist_processing_vbi.tests.conftest import Vbi122ObserveFrames
11
9
  from dkist_processing_vbi.tests.conftest import VbiConstantsDb
12
10
  from dkist_processing_vbi.tests.conftest import VbiInputDatasetParameterValues
11
+ from dkist_processing_vbi.tests.conftest import generate_214_l1_fits_frame
13
12
 
14
13
 
15
14
  @pytest.fixture(scope="function")
@@ -49,10 +48,13 @@ def assemble_task_with_tagged_movie_frames(tmp_path, recipe_run_id, init_vbi_con
49
48
 
50
49
 
51
50
  def test_assemble_movie(
52
- assemble_task_with_tagged_movie_frames, mocker, assign_input_dataset_doc_to_task
51
+ assemble_task_with_tagged_movie_frames,
52
+ mocker,
53
+ assign_input_dataset_doc_to_task,
54
+ fake_gql_client,
53
55
  ):
54
56
  mocker.patch(
55
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
57
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
56
58
  )
57
59
  task = assemble_task_with_tagged_movie_frames
58
60
  assign_input_dataset_doc_to_task(task, VbiInputDatasetParameterValues())
@@ -7,13 +7,12 @@ from dkist_processing_common._util.scratch import WorkflowFileSystem
7
7
  from dkist_processing_common.codecs.fits import fits_array_decoder
8
8
  from dkist_processing_common.codecs.fits import fits_array_encoder
9
9
  from dkist_processing_common.models.tags import Tag
10
- from dkist_processing_common.tests.conftest import FakeGQLClient
11
10
 
12
11
  from dkist_processing_vbi.models.tags import VbiTag
13
12
  from dkist_processing_vbi.tasks.dark import DarkCalibration
14
- from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
15
13
  from dkist_processing_vbi.tests.conftest import Vbi122DarkFrames
16
14
  from dkist_processing_vbi.tests.conftest import VbiConstantsDb
15
+ from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
17
16
 
18
17
 
19
18
  @pytest.fixture(scope="function")
@@ -65,14 +64,14 @@ def dark_calibration_task(tmp_path, recipe_run_id, init_vbi_constants_db):
65
64
  task._purge()
66
65
 
67
66
 
68
- def test_dark_calibration_task(dark_calibration_task, mocker):
67
+ def test_dark_calibration_task(dark_calibration_task, mocker, fake_gql_client):
69
68
  """
70
69
  Given: a set of parsed input dark frames and a DarkCalibration task
71
70
  When: running the task
72
71
  Then: a single output array is produced for each spatial step and the array values are correct
73
72
  """
74
73
  mocker.patch(
75
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
74
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
76
75
  )
77
76
  dark_calibration_task()
78
77
 
@@ -7,14 +7,13 @@ from dkist_processing_common.codecs.fits import fits_array_decoder
7
7
  from dkist_processing_common.codecs.fits import fits_array_encoder
8
8
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
9
9
  from dkist_processing_common.models.tags import Tag
10
- from dkist_processing_common.tests.conftest import FakeGQLClient
11
10
 
12
11
  from dkist_processing_vbi.models.tags import VbiTag
13
12
  from dkist_processing_vbi.tasks.gain import GainCalibration
14
- from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
15
- from dkist_processing_vbi.tests.conftest import generate_214_l0_fits_frame
16
13
  from dkist_processing_vbi.tests.conftest import Vbi122GainFrames
17
14
  from dkist_processing_vbi.tests.conftest import VbiConstantsDb
15
+ from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
16
+ from dkist_processing_vbi.tests.conftest import generate_214_l0_fits_frame
18
17
 
19
18
 
20
19
  @pytest.fixture(scope="function")
@@ -65,14 +64,14 @@ def gain_calibration_task(tmp_path, recipe_run_id, init_vbi_constants_db):
65
64
  task._purge()
66
65
 
67
66
 
68
- def test_gain_calibration(gain_calibration_task, mocker):
67
+ def test_gain_calibration(gain_calibration_task, mocker, fake_gql_client):
69
68
  """
70
69
  Given: a set of parsed input gain frames, dark calibration frames, and a GainCalibration task
71
70
  When: the task is run
72
71
  Then: a single array is produced for each step and the array values are correctly normalized
73
72
  """
74
73
  mocker.patch(
75
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
74
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
76
75
  )
77
76
  gain_calibration_task()
78
77
 
@@ -9,16 +9,15 @@ from dkist_processing_common.codecs.fits import fits_access_decoder
9
9
  from dkist_processing_common.codecs.fits import fits_array_decoder
10
10
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
11
11
  from dkist_processing_common.models.fits_access import FitsAccessBase
12
- from dkist_processing_common.tests.conftest import FakeGQLClient
13
12
 
14
13
  from dkist_processing_vbi.models.constants import VbiConstants
15
14
  from dkist_processing_vbi.models.tags import VbiTag
16
15
  from dkist_processing_vbi.parsers.vbi_l1_fits_access import VbiL1FitsAccess
17
16
  from dkist_processing_vbi.tasks.make_movie_frames import MakeVbiMovieFrames
18
- from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
19
- from dkist_processing_vbi.tests.conftest import generate_214_l1_fits_frame
20
17
  from dkist_processing_vbi.tests.conftest import Vbi122ObserveFrames
21
18
  from dkist_processing_vbi.tests.conftest import VbiConstantsDb
19
+ from dkist_processing_vbi.tests.conftest import ensure_all_inputs_used
20
+ from dkist_processing_vbi.tests.conftest import generate_214_l1_fits_frame
22
21
 
23
22
 
24
23
  @pytest.fixture(scope="function")
@@ -178,9 +177,9 @@ def compute_expected_mosiac(modification: int):
178
177
  return expected
179
178
 
180
179
 
181
- def test_make_movie_frames_task(raw_make_movie_frames_task, mocker):
180
+ def test_make_movie_frames_task(raw_make_movie_frames_task, mocker, fake_gql_client):
182
181
  mocker.patch(
183
- "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=FakeGQLClient
182
+ "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient", new=fake_gql_client
184
183
  )
185
184
 
186
185
  raw_make_movie_frames_task()