dkist-processing-common 11.2.1rc3__py3-none-any.whl → 11.3.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. changelog/265.feature.1.rst +3 -0
  2. changelog/265.feature.rst +4 -0
  3. changelog/265.misc.rst +2 -0
  4. dkist_processing_common/__init__.py +1 -0
  5. dkist_processing_common/_util/constants.py +1 -0
  6. dkist_processing_common/_util/graphql.py +1 -0
  7. dkist_processing_common/_util/scratch.py +1 -1
  8. dkist_processing_common/_util/tags.py +1 -0
  9. dkist_processing_common/codecs/array.py +1 -0
  10. dkist_processing_common/codecs/asdf.py +1 -0
  11. dkist_processing_common/codecs/basemodel.py +1 -0
  12. dkist_processing_common/codecs/bytes.py +1 -0
  13. dkist_processing_common/codecs/fits.py +1 -0
  14. dkist_processing_common/codecs/iobase.py +1 -0
  15. dkist_processing_common/codecs/json.py +1 -0
  16. dkist_processing_common/codecs/path.py +1 -0
  17. dkist_processing_common/codecs/quality.py +1 -1
  18. dkist_processing_common/codecs/str.py +1 -0
  19. dkist_processing_common/config.py +1 -0
  20. dkist_processing_common/manual.py +1 -1
  21. dkist_processing_common/models/constants.py +43 -28
  22. dkist_processing_common/models/dkist_location.py +1 -1
  23. dkist_processing_common/models/fits_access.py +57 -0
  24. dkist_processing_common/models/flower_pot.py +1 -0
  25. dkist_processing_common/models/graphql.py +2 -1
  26. dkist_processing_common/models/input_dataset.py +3 -2
  27. dkist_processing_common/models/message.py +1 -1
  28. dkist_processing_common/models/message_queue_binding.py +1 -1
  29. dkist_processing_common/models/metric_code.py +1 -0
  30. dkist_processing_common/models/parameters.py +1 -1
  31. dkist_processing_common/models/quality.py +1 -0
  32. dkist_processing_common/models/tags.py +3 -1
  33. dkist_processing_common/models/task_name.py +3 -2
  34. dkist_processing_common/models/wavelength.py +2 -1
  35. dkist_processing_common/parsers/cs_step.py +3 -2
  36. dkist_processing_common/parsers/dsps_repeat.py +6 -4
  37. dkist_processing_common/parsers/experiment_id_bud.py +6 -2
  38. dkist_processing_common/parsers/id_bud.py +7 -3
  39. dkist_processing_common/parsers/l0_fits_access.py +5 -3
  40. dkist_processing_common/parsers/l1_fits_access.py +23 -21
  41. dkist_processing_common/parsers/near_bud.py +6 -2
  42. dkist_processing_common/parsers/proposal_id_bud.py +4 -2
  43. dkist_processing_common/parsers/quality.py +1 -0
  44. dkist_processing_common/parsers/retarder.py +5 -3
  45. dkist_processing_common/parsers/single_value_single_key_flower.py +7 -1
  46. dkist_processing_common/parsers/task.py +8 -6
  47. dkist_processing_common/parsers/time.py +20 -15
  48. dkist_processing_common/parsers/unique_bud.py +6 -2
  49. dkist_processing_common/parsers/wavelength.py +5 -3
  50. dkist_processing_common/tasks/__init__.py +3 -2
  51. dkist_processing_common/tasks/assemble_movie.py +1 -0
  52. dkist_processing_common/tasks/base.py +1 -0
  53. dkist_processing_common/tasks/l1_output_data.py +1 -1
  54. dkist_processing_common/tasks/mixin/globus.py +1 -1
  55. dkist_processing_common/tasks/mixin/interservice_bus.py +1 -0
  56. dkist_processing_common/tasks/mixin/metadata_store.py +1 -1
  57. dkist_processing_common/tasks/mixin/object_store.py +1 -0
  58. dkist_processing_common/tasks/mixin/quality/__init__.py +1 -0
  59. dkist_processing_common/tasks/mixin/quality/_base.py +1 -0
  60. dkist_processing_common/tasks/mixin/quality/_metrics.py +1 -0
  61. dkist_processing_common/tasks/output_data_base.py +1 -0
  62. dkist_processing_common/tasks/parse_l0_input_data.py +6 -4
  63. dkist_processing_common/tasks/quality_metrics.py +1 -1
  64. dkist_processing_common/tasks/teardown.py +1 -1
  65. dkist_processing_common/tasks/transfer_input_data.py +1 -1
  66. dkist_processing_common/tasks/trial_catalog.py +3 -2
  67. dkist_processing_common/tasks/trial_output_data.py +1 -0
  68. dkist_processing_common/tasks/write_l1.py +19 -8
  69. dkist_processing_common/tests/conftest.py +1 -0
  70. dkist_processing_common/tests/mock_metadata_store.py +2 -3
  71. dkist_processing_common/tests/test_assemble_movie.py +0 -1
  72. dkist_processing_common/tests/test_codecs.py +2 -2
  73. dkist_processing_common/tests/test_constants.py +15 -0
  74. dkist_processing_common/tests/test_fits_access.py +62 -7
  75. dkist_processing_common/tests/test_interservice_bus.py +1 -0
  76. dkist_processing_common/tests/test_interservice_bus_mixin.py +1 -0
  77. dkist_processing_common/tests/test_manual_processing.py +1 -2
  78. dkist_processing_common/tests/test_output_data_base.py +1 -2
  79. dkist_processing_common/tests/test_parameters.py +1 -1
  80. dkist_processing_common/tests/test_parse_l0_input_data.py +23 -24
  81. dkist_processing_common/tests/test_quality.py +1 -0
  82. dkist_processing_common/tests/test_scratch.py +2 -1
  83. dkist_processing_common/tests/test_stems.py +31 -22
  84. dkist_processing_common/tests/test_tags.py +1 -0
  85. dkist_processing_common/tests/test_task_parsing.py +17 -7
  86. dkist_processing_common/tests/test_teardown.py +1 -1
  87. dkist_processing_common/tests/test_transfer_input_data.py +2 -3
  88. dkist_processing_common/tests/test_trial_catalog.py +1 -0
  89. dkist_processing_common/tests/test_trial_output_data.py +1 -1
  90. dkist_processing_common/tests/test_workflow_task_base.py +1 -2
  91. dkist_processing_common/tests/test_write_l1.py +8 -10
  92. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.1rc1.dist-info}/METADATA +2 -2
  93. dkist_processing_common-11.3.1rc1.dist-info/RECORD +125 -0
  94. docs/conf.py +1 -0
  95. changelog/262.misc.rst +0 -1
  96. dkist_processing_common-11.2.1rc3.dist-info/RECORD +0 -123
  97. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.1rc1.dist-info}/WHEEL +0 -0
  98. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.1rc1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,9 @@
1
1
  """By-frame 214 L1 only header keywords that are not instrument specific."""
2
+
2
3
  from astropy.io import fits
3
4
 
4
5
  from dkist_processing_common.models.fits_access import FitsAccessBase
6
+ from dkist_processing_common.models.fits_access import MetadataKey
5
7
 
6
8
 
7
9
  class L1FitsAccess(FitsAccessBase):
@@ -26,31 +28,31 @@ class L1FitsAccess(FitsAccessBase):
26
28
  ):
27
29
  super().__init__(hdu=hdu, name=name, auto_squeeze=auto_squeeze)
28
30
 
29
- self.elevation: float = self.header["ELEV_ANG"]
30
- self.azimuth: float = self.header["TAZIMUTH"]
31
- self.table_angle: float = self.header["TTBLANGL"]
32
- self.gos_level3_status: str = self.header["LVL3STAT"]
33
- self.gos_level3_lamp_status: str = self.header["LAMPSTAT"]
34
- self.gos_polarizer_status: str = self.header["LVL2STAT"]
35
- self.gos_retarder_status: str = self.header["LVL1STAT"]
36
- self.gos_level0_status: str = self.header["LVL0STAT"]
37
- self.time_obs: str = self.header["DATE-BEG"]
38
- self.ip_id: str = self.header["IP_ID"]
39
- self.instrument: str = self.header["INSTRUME"]
40
- self.wavelength: float = self.header["LINEWAV"]
41
- self.proposal_id: str = self.header["PROP_ID"]
42
- self.experiment_id: str = self.header["EXPER_ID"]
43
- self.num_dsps_repeats: int = self.header["DSPSREPS"]
44
- self.current_dsps_repeat: int = self.header["DSPSNUM"]
45
- self.fpa_exposure_time_ms: float = self.header["XPOSURE"]
46
- self.sensor_readout_exposure_time_ms: float = self.header["TEXPOSUR"]
47
- self.num_raw_frames_per_fpa: int = self.header["NSUMEXP"]
31
+ self._set_metadata_key_value(MetadataKey.elevation)
32
+ self._set_metadata_key_value(MetadataKey.azimuth)
33
+ self._set_metadata_key_value(MetadataKey.table_angle)
34
+ self._set_metadata_key_value(MetadataKey.gos_level3_status)
35
+ self._set_metadata_key_value(MetadataKey.gos_level3_lamp_status)
36
+ self._set_metadata_key_value(MetadataKey.gos_polarizer_status)
37
+ self._set_metadata_key_value(MetadataKey.gos_retarder_status)
38
+ self._set_metadata_key_value(MetadataKey.gos_level0_status)
39
+ self._set_metadata_key_value(MetadataKey.time_obs)
40
+ self._set_metadata_key_value(MetadataKey.ip_id)
41
+ self._set_metadata_key_value(MetadataKey.instrument)
42
+ self._set_metadata_key_value(MetadataKey.wavelength)
43
+ self._set_metadata_key_value(MetadataKey.proposal_id)
44
+ self._set_metadata_key_value(MetadataKey.experiment_id)
45
+ self._set_metadata_key_value(MetadataKey.num_dsps_repeats)
46
+ self._set_metadata_key_value(MetadataKey.current_dsps_repeat)
47
+ self._set_metadata_key_value(MetadataKey.fpa_exposure_time_ms)
48
+ self._set_metadata_key_value(MetadataKey.sensor_readout_exposure_time_ms)
49
+ self._set_metadata_key_value(MetadataKey.num_raw_frames_per_fpa)
48
50
 
49
51
  @property
50
52
  def gos_polarizer_angle(self) -> float:
51
53
  """Convert the polarizer angle to a float if possible before returning."""
52
54
  try:
53
- return float(self.header["POLANGLE"])
55
+ return float(self.header[MetadataKey.gos_polarizer_angle])
54
56
  except ValueError:
55
57
  return -999 # The angle is only used if the polarizer is in the beam
56
58
 
@@ -58,6 +60,6 @@ class L1FitsAccess(FitsAccessBase):
58
60
  def gos_retarder_angle(self) -> float:
59
61
  """Convert the retarder angle to a float if possible before returning."""
60
62
  try:
61
- return float(self.header["RETANGLE"])
63
+ return float(self.header[MetadataKey.gos_retarder_angle])
62
64
  except ValueError:
63
65
  return -999 # The angle is only used if the retarder is in the beam
@@ -1,4 +1,6 @@
1
1
  """Pre-made flower that reads a single header key from all files and raises a ValueError if the values are not in a supplied range."""
2
+
3
+ from enum import StrEnum
2
4
  from statistics import mean
3
5
  from typing import Callable
4
6
 
@@ -29,10 +31,12 @@ class NearFloatBud(Stem):
29
31
  def __init__(
30
32
  self,
31
33
  constant_name: str,
32
- metadata_key: str,
34
+ metadata_key: str | StrEnum,
33
35
  tolerance: float,
34
36
  ):
35
37
  super().__init__(stem_name=constant_name)
38
+ if isinstance(metadata_key, StrEnum):
39
+ metadata_key = metadata_key.name
36
40
  self.metadata_key = metadata_key
37
41
  self.tolerance = tolerance
38
42
 
@@ -97,7 +101,7 @@ class TaskNearFloatBud(NearFloatBud):
97
101
  def __init__(
98
102
  self,
99
103
  constant_name: str,
100
- metadata_key: str,
104
+ metadata_key: str | StrEnum,
101
105
  ip_task_types: str | list[str],
102
106
  tolerance: float,
103
107
  task_type_parsing_function: Callable = passthrough_header_ip_task,
@@ -1,5 +1,7 @@
1
1
  """Proposal Id parser."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
4
+ from dkist_processing_common.models.fits_access import MetadataKey
3
5
  from dkist_processing_common.parsers.id_bud import ContributingIdsBud
4
6
  from dkist_processing_common.parsers.id_bud import IdBud
5
7
 
@@ -8,7 +10,7 @@ class ProposalIdBud(IdBud):
8
10
  """Class to create a Bud for the proposal_id."""
9
11
 
10
12
  def __init__(self):
11
- super().__init__(constant_name=BudName.proposal_id.value, metadata_key="proposal_id")
13
+ super().__init__(constant_name=BudName.proposal_id, metadata_key=MetadataKey.proposal_id)
12
14
 
13
15
 
14
16
  class ContributingProposalIdsBud(ContributingIdsBud):
@@ -16,5 +18,5 @@ class ContributingProposalIdsBud(ContributingIdsBud):
16
18
 
17
19
  def __init__(self):
18
20
  super().__init__(
19
- stem_name=BudName.contributing_proposal_ids.value, metadata_key="proposal_id"
21
+ stem_name=BudName.contributing_proposal_ids, metadata_key=MetadataKey.proposal_id
20
22
  )
@@ -1,4 +1,5 @@
1
1
  """Support classes to define object attributes from header information."""
2
+
2
3
  from astropy.io import fits
3
4
 
4
5
  from dkist_processing_common.parsers.l0_fits_access import L1FitsAccess
@@ -1,5 +1,7 @@
1
1
  """Bud that parses the name of the retarder used during POLCAL task observations."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
4
+ from dkist_processing_common.models.fits_access import MetadataKey
3
5
  from dkist_processing_common.models.task_name import TaskName
4
6
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
5
7
 
@@ -18,9 +20,9 @@ class RetarderNameBud(TaskUniqueBud):
18
20
 
19
21
  def __init__(self):
20
22
  super().__init__(
21
- constant_name=BudName.retarder_name.value,
22
- metadata_key="gos_retarder_status",
23
- ip_task_types=TaskName.polcal.value,
23
+ constant_name=BudName.retarder_name,
24
+ metadata_key=MetadataKey.gos_retarder_status,
25
+ ip_task_types=TaskName.polcal,
24
26
  )
25
27
 
26
28
  def getter(self, key) -> str:
@@ -1,4 +1,8 @@
1
1
  """Pre-made flower that produces tag based on a single header key."""
2
+
3
+ from enum import StrEnum
4
+
5
+ from dkist_processing_common.models.fits_access import MetadataKey
2
6
  from dkist_processing_common.models.flower_pot import Stem
3
7
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
4
8
 
@@ -15,8 +19,10 @@ class SingleValueSingleKeyFlower(Stem):
15
19
  The metadata key
16
20
  """
17
21
 
18
- def __init__(self, tag_stem_name: str, metadata_key: str):
22
+ def __init__(self, tag_stem_name: str, metadata_key: str | StrEnum):
19
23
  super().__init__(stem_name=tag_stem_name)
24
+ if isinstance(metadata_key, StrEnum):
25
+ metadata_key = metadata_key.name
20
26
  self.metadata_key = metadata_key
21
27
 
22
28
  def setter(self, fits_obj: L0FitsAccess):
@@ -1,8 +1,10 @@
1
1
  """Module for parsing IP task related things."""
2
+
2
3
  from typing import Callable
3
4
  from typing import Type
4
5
 
5
6
  from dkist_processing_common.models.fits_access import FitsAccessBase
7
+ from dkist_processing_common.models.fits_access import MetadataKey
6
8
  from dkist_processing_common.models.flower_pot import SpilledDirt
7
9
  from dkist_processing_common.models.tags import StemName
8
10
  from dkist_processing_common.models.task_name import TaskName
@@ -35,9 +37,9 @@ def parse_header_ip_task_with_gains(fits_obj: FitsAccessBase) -> str:
35
37
  and fits_obj.gos_level3_status == "lamp"
36
38
  and fits_obj.gos_level3_lamp_status == "on"
37
39
  ):
38
- return TaskName.lamp_gain.value
40
+ return TaskName.lamp_gain
39
41
  if fits_obj.ip_task_type == "gain" and fits_obj.gos_level3_status == "clear":
40
- return TaskName.solar_gain.value
42
+ return TaskName.solar_gain
41
43
 
42
44
  # Everything else is unchanged
43
45
  return passthrough_header_ip_task(fits_obj)
@@ -58,14 +60,14 @@ def parse_polcal_task_type(fits_obj: Type[FitsAccessBase]) -> str | Type[Spilled
58
60
  and fits_obj.gos_retarder_status == "clear"
59
61
  and fits_obj.gos_polarizer_status == "clear"
60
62
  ):
61
- return TaskName.polcal_dark.value
63
+ return TaskName.polcal_dark
62
64
 
63
65
  elif (
64
66
  fits_obj.gos_level0_status.startswith("FieldStop")
65
67
  and fits_obj.gos_retarder_status == "clear"
66
68
  and fits_obj.gos_polarizer_status == "clear"
67
69
  ):
68
- return TaskName.polcal_gain.value
70
+ return TaskName.polcal_gain
69
71
 
70
72
  # We don't care about a POLCAL frame that is neither dark nor clear
71
73
  return SpilledDirt
@@ -77,7 +79,7 @@ class TaskTypeFlower(SingleValueSingleKeyFlower):
77
79
  def __init__(
78
80
  self, header_task_parsing_func: Callable[[FitsAccessBase], str] = passthrough_header_ip_task
79
81
  ):
80
- super().__init__(tag_stem_name=StemName.task.value, metadata_key="ip_task_type")
82
+ super().__init__(tag_stem_name=StemName.task, metadata_key=MetadataKey.ip_task_type)
81
83
  self.header_parsing_function = header_task_parsing_func
82
84
 
83
85
  def setter(self, fits_obj: FitsAccessBase):
@@ -102,7 +104,7 @@ class PolcalTaskFlower(SingleValueSingleKeyFlower):
102
104
  """
103
105
 
104
106
  def __init__(self):
105
- super().__init__(tag_stem_name=StemName.task.value, metadata_key="ip_task_type")
107
+ super().__init__(tag_stem_name=StemName.task, metadata_key=MetadataKey.ip_task_type)
106
108
 
107
109
  def setter(self, fits_obj: FitsAccessBase):
108
110
  """
@@ -1,6 +1,8 @@
1
1
  """Time parser."""
2
+
2
3
  from datetime import datetime
3
4
  from datetime import timezone
5
+ from enum import StrEnum
4
6
  from typing import Callable
5
7
  from typing import Hashable
6
8
  from typing import Type
@@ -8,6 +10,7 @@ from typing import Type
8
10
  import numpy as np
9
11
 
10
12
  from dkist_processing_common.models.constants import BudName
13
+ from dkist_processing_common.models.fits_access import MetadataKey
11
14
  from dkist_processing_common.models.flower_pot import SpilledDirt
12
15
  from dkist_processing_common.models.flower_pot import Stem
13
16
  from dkist_processing_common.models.tags import EXP_TIME_ROUND_DIGITS
@@ -27,9 +30,9 @@ class ObsIpStartTimeBud(TaskUniqueBud):
27
30
 
28
31
  def __init__(self):
29
32
  super().__init__(
30
- constant_name=BudName.obs_ip_start_time.value,
31
- metadata_key="ip_start_time",
32
- ip_task_types=TaskName.observe.value,
33
+ constant_name=BudName.obs_ip_start_time,
34
+ metadata_key=MetadataKey.ip_start_time,
35
+ ip_task_types=TaskName.observe,
33
36
  )
34
37
 
35
38
 
@@ -37,7 +40,7 @@ class CadenceBudBase(UniqueBud):
37
40
  """Base class for all Cadence Buds."""
38
41
 
39
42
  def __init__(self, constant_name: str):
40
- super().__init__(constant_name, metadata_key="time_obs")
43
+ super().__init__(constant_name, metadata_key=MetadataKey.time_obs)
41
44
 
42
45
  def setter(self, fits_obj: L0FitsAccess) -> float | Type[SpilledDirt]:
43
46
  """
@@ -64,7 +67,7 @@ class AverageCadenceBud(CadenceBudBase):
64
67
  """Class for the average cadence Bud."""
65
68
 
66
69
  def __init__(self):
67
- super().__init__(constant_name=BudName.average_cadence.value)
70
+ super().__init__(constant_name=BudName.average_cadence)
68
71
 
69
72
  def getter(self, key) -> np.float64:
70
73
  """
@@ -86,7 +89,7 @@ class MaximumCadenceBud(CadenceBudBase):
86
89
  """Class for the maximum cadence bud."""
87
90
 
88
91
  def __init__(self):
89
- super().__init__(constant_name=BudName.maximum_cadence.value)
92
+ super().__init__(constant_name=BudName.maximum_cadence)
90
93
 
91
94
  def getter(self, key) -> np.float64:
92
95
  """
@@ -108,7 +111,7 @@ class MinimumCadenceBud(CadenceBudBase):
108
111
  """Class for the minimum cadence bud."""
109
112
 
110
113
  def __init__(self):
111
- super().__init__(constant_name=BudName.minimum_cadence.value)
114
+ super().__init__(constant_name=BudName.minimum_cadence)
112
115
 
113
116
  def getter(self, key) -> np.float64:
114
117
  """
@@ -130,7 +133,7 @@ class VarianceCadenceBud(CadenceBudBase):
130
133
  """Class for the variance cadence Bud."""
131
134
 
132
135
  def __init__(self):
133
- super().__init__(constant_name=BudName.variance_cadence.value)
136
+ super().__init__(constant_name=BudName.variance_cadence)
134
137
 
135
138
  def getter(self, key) -> np.float64:
136
139
  """
@@ -171,7 +174,7 @@ class ExposureTimeFlower(TimeFlowerBase):
171
174
 
172
175
  def __init__(self):
173
176
  super().__init__(
174
- tag_stem_name=StemName.exposure_time.value, metadata_key="fpa_exposure_time_ms"
177
+ tag_stem_name=StemName.exposure_time, metadata_key=MetadataKey.fpa_exposure_time_ms
175
178
  )
176
179
 
177
180
 
@@ -180,8 +183,8 @@ class ReadoutExpTimeFlower(TimeFlowerBase):
180
183
 
181
184
  def __init__(self):
182
185
  super().__init__(
183
- tag_stem_name=StemName.readout_exp_time.value,
184
- metadata_key="sensor_readout_exposure_time_ms",
186
+ tag_stem_name=StemName.readout_exp_time,
187
+ metadata_key=MetadataKey.sensor_readout_exposure_time_ms,
185
188
  )
186
189
 
187
190
 
@@ -212,15 +215,17 @@ class TaskTimeBudBase(Stem):
212
215
  def __init__(
213
216
  self,
214
217
  stem_name: str,
215
- metadata_key: str,
218
+ metadata_key: str | StrEnum,
216
219
  ip_task_types: str | list[str],
217
220
  header_task_parsing_func: Callable = passthrough_header_ip_task,
218
221
  ):
219
222
  super().__init__(stem_name=stem_name)
220
223
 
224
+ if isinstance(metadata_key, StrEnum):
225
+ metadata_key = metadata_key.name
226
+ self.metadata_key = metadata_key
221
227
  if isinstance(ip_task_types, str):
222
228
  ip_task_types = [ip_task_types]
223
- self.metadata_key = metadata_key
224
229
  self.ip_task_types = [task.casefold() for task in ip_task_types]
225
230
  self.header_parsing_function = header_task_parsing_func
226
231
 
@@ -251,7 +256,7 @@ class TaskExposureTimesBud(TaskTimeBudBase):
251
256
  ):
252
257
  super().__init__(
253
258
  stem_name=stem_name,
254
- metadata_key="fpa_exposure_time_ms",
259
+ metadata_key=MetadataKey.fpa_exposure_time_ms,
255
260
  ip_task_types=ip_task_types,
256
261
  header_task_parsing_func=header_task_parsing_func,
257
262
  )
@@ -268,7 +273,7 @@ class TaskReadoutExpTimesBud(TaskTimeBudBase):
268
273
  ):
269
274
  super().__init__(
270
275
  stem_name=stem_name,
271
- metadata_key="sensor_readout_exposure_time_ms",
276
+ metadata_key=MetadataKey.sensor_readout_exposure_time_ms,
272
277
  ip_task_types=ip_task_types,
273
278
  header_task_parsing_func=header_task_parsing_func,
274
279
  )
@@ -1,4 +1,6 @@
1
1
  """Pre-made flower that reads a single header key from all files and raises a ValueError if it is not unique."""
2
+
3
+ from enum import StrEnum
2
4
  from typing import Callable
3
5
 
4
6
  from dkist_processing_common.models.flower_pot import SpilledDirt
@@ -23,9 +25,11 @@ class UniqueBud(Stem):
23
25
  def __init__(
24
26
  self,
25
27
  constant_name: str,
26
- metadata_key: str,
28
+ metadata_key: str | StrEnum,
27
29
  ):
28
30
  super().__init__(stem_name=constant_name)
31
+ if isinstance(metadata_key, StrEnum):
32
+ metadata_key = metadata_key.name
29
33
  self.metadata_key = metadata_key
30
34
 
31
35
  def setter(self, fits_obj: L0FitsAccess):
@@ -84,7 +88,7 @@ class TaskUniqueBud(UniqueBud):
84
88
  def __init__(
85
89
  self,
86
90
  constant_name: str,
87
- metadata_key: str,
91
+ metadata_key: str | StrEnum,
88
92
  ip_task_types: str | list[str],
89
93
  task_type_parsing_function: Callable = passthrough_header_ip_task,
90
94
  ):
@@ -1,5 +1,7 @@
1
1
  """Bud to get the wavelength of observe frames."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
4
+ from dkist_processing_common.models.fits_access import MetadataKey
3
5
  from dkist_processing_common.models.task_name import TaskName
4
6
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
5
7
 
@@ -9,7 +11,7 @@ class ObserveWavelengthBud(TaskUniqueBud):
9
11
 
10
12
  def __init__(self):
11
13
  super().__init__(
12
- constant_name=BudName.wavelength.value,
13
- metadata_key="wavelength",
14
- ip_task_types=TaskName.observe.value,
14
+ constant_name=BudName.wavelength,
15
+ metadata_key=MetadataKey.wavelength,
16
+ ip_task_types=TaskName.observe,
15
17
  )
@@ -1,11 +1,12 @@
1
1
  """Common tasks and bases."""
2
- from dkist_processing_common.tasks.base import * # noreorder
2
+
3
+ from dkist_processing_common.tasks.base import * # isort: skip
3
4
  from dkist_processing_common.tasks.assemble_movie import *
4
5
  from dkist_processing_common.tasks.l1_output_data import *
5
6
  from dkist_processing_common.tasks.parse_l0_input_data import *
6
7
  from dkist_processing_common.tasks.quality_metrics import *
7
8
  from dkist_processing_common.tasks.teardown import *
8
9
  from dkist_processing_common.tasks.transfer_input_data import *
9
- from dkist_processing_common.tasks.write_l1 import *
10
10
  from dkist_processing_common.tasks.trial_catalog import *
11
11
  from dkist_processing_common.tasks.trial_output_data import *
12
+ from dkist_processing_common.tasks.write_l1 import *
@@ -1,4 +1,5 @@
1
1
  """Task(s) for assembling a browse movie."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
@@ -1,4 +1,5 @@
1
1
  """Wrappers for all workflow tasks."""
2
+
2
3
  import json
3
4
  import logging
4
5
  import re
@@ -1,4 +1,5 @@
1
1
  """Task(s) for the transfer and publishing of L1 data from a production run of a processing pipeline."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from itertools import chain
@@ -20,7 +21,6 @@ from dkist_processing_common.tasks.mixin.quality import QualityMixin
20
21
  from dkist_processing_common.tasks.output_data_base import OutputDataBase
21
22
  from dkist_processing_common.tasks.output_data_base import TransferDataBase
22
23
 
23
-
24
24
  __all__ = [
25
25
  "L1OutputDataBase",
26
26
  "TransferL1Data",
@@ -1,4 +1,5 @@
1
1
  """Mixin to add methods to a Task to support globus transfers."""
2
+
2
3
  import logging
3
4
  from dataclasses import dataclass
4
5
  from pathlib import Path
@@ -11,7 +12,6 @@ from globus_sdk import TransferData
11
12
 
12
13
  from dkist_processing_common.config import common_configurations
13
14
 
14
-
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
17
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements interservice bus access functionality."""
2
+
2
3
  from talus import DurableProducer
3
4
  from talus import PublishMessageBase
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements Metadata Store data access functionality."""
2
+
2
3
  import json
3
4
  import logging
4
5
  from functools import cached_property
@@ -26,7 +27,6 @@ from dkist_processing_common.models.graphql import RecipeRunStatusMutation
26
27
  from dkist_processing_common.models.graphql import RecipeRunStatusQuery
27
28
  from dkist_processing_common.models.graphql import RecipeRunStatusResponse
28
29
 
29
-
30
30
  logger = logging.getLogger(__name__)
31
31
 
32
32
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements Object Store data access functionality."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
  from object_clerk import ObjectClerk
@@ -4,4 +4,5 @@ To improve readability the top-level mixin, `QualityMixin`, contains only base f
4
4
  metrics are grouped into sub-mixins. To protect a user, this mixin-on-mixin stack is hidden in protected modules
5
5
  and only the top-level mixin (`QualityMixin`) is exposed.
6
6
  """
7
+
7
8
  from ._base import QualityMixin
@@ -1,4 +1,5 @@
1
1
  """Base QualityMixin class that contains machinery common to all metric types."""
2
+
2
3
  from typing import Iterable
3
4
 
4
5
  import numpy as np
@@ -2,6 +2,7 @@
2
2
 
3
3
  These classes should not be directly mixed in to anything. They are pre-mixed into the top-level QualityMixin
4
4
  """
5
+
5
6
  import copy
6
7
  import json
7
8
  import logging
@@ -1,4 +1,5 @@
1
1
  """Base class that supports common output data methods and paths."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
@@ -20,6 +20,7 @@ that makes the rest of the pipeline easy to write.
20
20
  In other words, we can find exactly the frame we need (tags) and, once we have it, we never need to look
21
21
  at a different frame to get information (constants).
22
22
  """
23
+
23
24
  import logging
24
25
  from abc import ABC
25
26
  from abc import abstractmethod
@@ -27,10 +28,12 @@ from typing import TypeVar
27
28
 
28
29
  from dkist_processing_common.codecs.fits import fits_access_decoder
29
30
  from dkist_processing_common.models.constants import BudName
31
+ from dkist_processing_common.models.fits_access import MetadataKey
30
32
  from dkist_processing_common.models.flower_pot import FlowerPot
31
33
  from dkist_processing_common.models.flower_pot import Stem
32
34
  from dkist_processing_common.models.flower_pot import Thorn
33
35
  from dkist_processing_common.models.tags import Tag
36
+ from dkist_processing_common.models.task_name import TaskName
34
37
  from dkist_processing_common.parsers.experiment_id_bud import ContributingExperimentIdsBud
35
38
  from dkist_processing_common.parsers.experiment_id_bud import ExperimentIdBud
36
39
  from dkist_processing_common.parsers.proposal_id_bud import ContributingProposalIdsBud
@@ -44,7 +47,6 @@ from dkist_processing_common.parsers.time import VarianceCadenceBud
44
47
  from dkist_processing_common.parsers.unique_bud import UniqueBud
45
48
  from dkist_processing_common.tasks.base import WorkflowTaskBase
46
49
 
47
-
48
50
  __all__ = [
49
51
  "ParseL0InputDataBase",
50
52
  "ParseDataBase",
@@ -60,7 +62,7 @@ S = TypeVar("S", bound=Stem)
60
62
  def default_constant_bud_factory() -> list[S]:
61
63
  """Provide default constant buds for use in common parsing tasks."""
62
64
  return [
63
- UniqueBud(constant_name=BudName.instrument.value, metadata_key="instrument"),
65
+ UniqueBud(constant_name=BudName.instrument, metadata_key=MetadataKey.instrument),
64
66
  ProposalIdBud(),
65
67
  ContributingProposalIdsBud(),
66
68
  ExperimentIdBud(),
@@ -69,9 +71,9 @@ def default_constant_bud_factory() -> list[S]:
69
71
  MaximumCadenceBud(),
70
72
  MinimumCadenceBud(),
71
73
  VarianceCadenceBud(),
72
- TaskExposureTimesBud(stem_name=BudName.dark_exposure_times.value, ip_task_types="dark"),
74
+ TaskExposureTimesBud(stem_name=BudName.dark_exposure_times, ip_task_types=TaskName.dark),
73
75
  TaskReadoutExpTimesBud(
74
- stem_name=BudName.dark_readout_exp_times.value, ip_task_types="dark"
76
+ stem_name=BudName.dark_readout_exp_times, ip_task_types=TaskName.dark
75
77
  ),
76
78
  ]
77
79
 
@@ -1,4 +1,5 @@
1
1
  """Classes to support the generation of quality metrics for the calibrated data."""
2
+
2
3
  import logging
3
4
  from dataclasses import dataclass
4
5
  from dataclasses import field
@@ -18,7 +19,6 @@ from dkist_processing_common.parsers.quality import L1QualityFitsAccess
18
19
  from dkist_processing_common.tasks.base import WorkflowTaskBase
19
20
  from dkist_processing_common.tasks.mixin.quality import QualityMixin
20
21
 
21
-
22
22
  __all__ = ["QualityL1Metrics", "QualityL0Metrics"]
23
23
 
24
24
 
@@ -1,11 +1,11 @@
1
1
  """Task(s) for the clean up tasks at the conclusion of a processing pipeline."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
5
6
 
6
7
  from dkist_processing_common.tasks.base import WorkflowTaskBase
7
8
 
8
-
9
9
  __all__ = ["Teardown", "TrialTeardown"]
10
10
 
11
11
 
@@ -1,4 +1,5 @@
1
1
  """Task(s) for the transfer in of data sources for a processing pipeline."""
2
+
2
3
  import logging
3
4
  from pathlib import Path
4
5
 
@@ -11,7 +12,6 @@ from dkist_processing_common.tasks.base import WorkflowTaskBase
11
12
  from dkist_processing_common.tasks.mixin.globus import GlobusMixin
12
13
  from dkist_processing_common.tasks.mixin.globus import GlobusTransferItem
13
14
 
14
-
15
15
  __all__ = ["TransferL0Data"]
16
16
 
17
17
  logger = logging.getLogger(__name__)
@@ -1,4 +1,5 @@
1
1
  """Tasks to support the generation of downstream artifacts in a trial workflow that wouldn't otherwise produce them."""
2
+
2
3
  import importlib
3
4
  import logging
4
5
  from datetime import datetime
@@ -24,8 +25,8 @@ __all__ = ["CreateTrialDatasetInventory", "CreateTrialAsdf", "CreateTrialQuality
24
25
  # Capture condition of dkist-processing-common[inventory] install
25
26
  INVENTORY_EXTRA_INSTALLED = False
26
27
  try:
27
- from dkist_inventory.inventory import generate_inventory_from_frame_inventory
28
28
  from dkist_inventory.inventory import generate_asdf_filename
29
+ from dkist_inventory.inventory import generate_inventory_from_frame_inventory
29
30
 
30
31
  INVENTORY_EXTRA_INSTALLED = True
31
32
  except ModuleNotFoundError:
@@ -45,8 +46,8 @@ except ModuleNotFoundError:
45
46
  # Verify dkist-quality is installed
46
47
  QUALITY_EXTRA_INSTALLED = False
47
48
  try:
48
- from dkist_quality.report import format_report
49
49
  from dkist_quality.report import ReportFormattingException
50
+ from dkist_quality.report import format_report
50
51
 
51
52
  QUALITY_EXTRA_INSTALLED = True
52
53
  except ModuleNotFoundError:
@@ -1,4 +1,5 @@
1
1
  """Tasks to support transferring an arbitrary collection of files to a customizable post-run location."""
2
+
2
3
  import logging
3
4
  from functools import cached_property
4
5
  from pathlib import Path