dkist-processing-cryonirsp 1.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dkist-processing-cryonirsp might be problematic. Click here for more details.

Files changed (111) hide show
  1. changelog/.gitempty +0 -0
  2. dkist_processing_cryonirsp/__init__.py +11 -0
  3. dkist_processing_cryonirsp/config.py +12 -0
  4. dkist_processing_cryonirsp/models/__init__.py +1 -0
  5. dkist_processing_cryonirsp/models/constants.py +248 -0
  6. dkist_processing_cryonirsp/models/exposure_conditions.py +26 -0
  7. dkist_processing_cryonirsp/models/parameters.py +296 -0
  8. dkist_processing_cryonirsp/models/tags.py +168 -0
  9. dkist_processing_cryonirsp/models/task_name.py +14 -0
  10. dkist_processing_cryonirsp/parsers/__init__.py +1 -0
  11. dkist_processing_cryonirsp/parsers/cryonirsp_l0_fits_access.py +111 -0
  12. dkist_processing_cryonirsp/parsers/cryonirsp_l1_fits_access.py +30 -0
  13. dkist_processing_cryonirsp/parsers/exposure_conditions.py +163 -0
  14. dkist_processing_cryonirsp/parsers/map_repeats.py +40 -0
  15. dkist_processing_cryonirsp/parsers/measurements.py +55 -0
  16. dkist_processing_cryonirsp/parsers/modstates.py +31 -0
  17. dkist_processing_cryonirsp/parsers/optical_density_filters.py +40 -0
  18. dkist_processing_cryonirsp/parsers/polarimetric_check.py +120 -0
  19. dkist_processing_cryonirsp/parsers/scan_step.py +412 -0
  20. dkist_processing_cryonirsp/parsers/time.py +80 -0
  21. dkist_processing_cryonirsp/parsers/wavelength.py +26 -0
  22. dkist_processing_cryonirsp/tasks/__init__.py +19 -0
  23. dkist_processing_cryonirsp/tasks/assemble_movie.py +202 -0
  24. dkist_processing_cryonirsp/tasks/bad_pixel_map.py +96 -0
  25. dkist_processing_cryonirsp/tasks/beam_boundaries_base.py +279 -0
  26. dkist_processing_cryonirsp/tasks/ci_beam_boundaries.py +55 -0
  27. dkist_processing_cryonirsp/tasks/ci_science.py +169 -0
  28. dkist_processing_cryonirsp/tasks/cryonirsp_base.py +67 -0
  29. dkist_processing_cryonirsp/tasks/dark.py +98 -0
  30. dkist_processing_cryonirsp/tasks/gain.py +251 -0
  31. dkist_processing_cryonirsp/tasks/instrument_polarization.py +447 -0
  32. dkist_processing_cryonirsp/tasks/l1_output_data.py +44 -0
  33. dkist_processing_cryonirsp/tasks/linearity_correction.py +582 -0
  34. dkist_processing_cryonirsp/tasks/make_movie_frames.py +302 -0
  35. dkist_processing_cryonirsp/tasks/mixin/__init__.py +1 -0
  36. dkist_processing_cryonirsp/tasks/mixin/beam_access.py +52 -0
  37. dkist_processing_cryonirsp/tasks/mixin/corrections.py +177 -0
  38. dkist_processing_cryonirsp/tasks/mixin/intermediate_frame.py +193 -0
  39. dkist_processing_cryonirsp/tasks/mixin/linearized_frame.py +309 -0
  40. dkist_processing_cryonirsp/tasks/mixin/shift_measurements.py +297 -0
  41. dkist_processing_cryonirsp/tasks/parse.py +281 -0
  42. dkist_processing_cryonirsp/tasks/quality_metrics.py +271 -0
  43. dkist_processing_cryonirsp/tasks/science_base.py +511 -0
  44. dkist_processing_cryonirsp/tasks/sp_beam_boundaries.py +270 -0
  45. dkist_processing_cryonirsp/tasks/sp_dispersion_axis_correction.py +484 -0
  46. dkist_processing_cryonirsp/tasks/sp_geometric.py +585 -0
  47. dkist_processing_cryonirsp/tasks/sp_science.py +299 -0
  48. dkist_processing_cryonirsp/tasks/sp_solar_gain.py +475 -0
  49. dkist_processing_cryonirsp/tasks/trial_output_data.py +61 -0
  50. dkist_processing_cryonirsp/tasks/write_l1.py +1033 -0
  51. dkist_processing_cryonirsp/tests/__init__.py +1 -0
  52. dkist_processing_cryonirsp/tests/conftest.py +456 -0
  53. dkist_processing_cryonirsp/tests/header_models.py +592 -0
  54. dkist_processing_cryonirsp/tests/local_trial_workflows/__init__.py +0 -0
  55. dkist_processing_cryonirsp/tests/local_trial_workflows/l0_cals_only.py +541 -0
  56. dkist_processing_cryonirsp/tests/local_trial_workflows/l0_to_l1.py +615 -0
  57. dkist_processing_cryonirsp/tests/local_trial_workflows/linearize_only.py +96 -0
  58. dkist_processing_cryonirsp/tests/local_trial_workflows/local_trial_helpers.py +592 -0
  59. dkist_processing_cryonirsp/tests/test_assemble_movie.py +144 -0
  60. dkist_processing_cryonirsp/tests/test_assemble_qualilty.py +517 -0
  61. dkist_processing_cryonirsp/tests/test_bad_pixel_maps.py +115 -0
  62. dkist_processing_cryonirsp/tests/test_ci_beam_boundaries.py +106 -0
  63. dkist_processing_cryonirsp/tests/test_ci_science.py +355 -0
  64. dkist_processing_cryonirsp/tests/test_corrections.py +126 -0
  65. dkist_processing_cryonirsp/tests/test_cryo_base.py +202 -0
  66. dkist_processing_cryonirsp/tests/test_cryo_constants.py +76 -0
  67. dkist_processing_cryonirsp/tests/test_dark.py +287 -0
  68. dkist_processing_cryonirsp/tests/test_gain.py +278 -0
  69. dkist_processing_cryonirsp/tests/test_instrument_polarization.py +531 -0
  70. dkist_processing_cryonirsp/tests/test_linearity_correction.py +245 -0
  71. dkist_processing_cryonirsp/tests/test_make_movie_frames.py +111 -0
  72. dkist_processing_cryonirsp/tests/test_parameters.py +266 -0
  73. dkist_processing_cryonirsp/tests/test_parse.py +1439 -0
  74. dkist_processing_cryonirsp/tests/test_quality.py +203 -0
  75. dkist_processing_cryonirsp/tests/test_sp_beam_boundaries.py +112 -0
  76. dkist_processing_cryonirsp/tests/test_sp_dispersion_axis_correction.py +155 -0
  77. dkist_processing_cryonirsp/tests/test_sp_geometric.py +319 -0
  78. dkist_processing_cryonirsp/tests/test_sp_make_movie_frames.py +121 -0
  79. dkist_processing_cryonirsp/tests/test_sp_science.py +483 -0
  80. dkist_processing_cryonirsp/tests/test_sp_solar.py +198 -0
  81. dkist_processing_cryonirsp/tests/test_trial_create_quality_report.py +79 -0
  82. dkist_processing_cryonirsp/tests/test_trial_output_data.py +251 -0
  83. dkist_processing_cryonirsp/tests/test_workflows.py +9 -0
  84. dkist_processing_cryonirsp/tests/test_write_l1.py +436 -0
  85. dkist_processing_cryonirsp/workflows/__init__.py +2 -0
  86. dkist_processing_cryonirsp/workflows/ci_l0_processing.py +77 -0
  87. dkist_processing_cryonirsp/workflows/sp_l0_processing.py +84 -0
  88. dkist_processing_cryonirsp/workflows/trial_workflows.py +190 -0
  89. dkist_processing_cryonirsp-1.3.4.dist-info/METADATA +194 -0
  90. dkist_processing_cryonirsp-1.3.4.dist-info/RECORD +111 -0
  91. dkist_processing_cryonirsp-1.3.4.dist-info/WHEEL +5 -0
  92. dkist_processing_cryonirsp-1.3.4.dist-info/top_level.txt +4 -0
  93. docs/Makefile +134 -0
  94. docs/bad_pixel_calibration.rst +47 -0
  95. docs/beam_angle_calculation.rst +53 -0
  96. docs/beam_boundary_computation.rst +88 -0
  97. docs/changelog.rst +7 -0
  98. docs/ci_science_calibration.rst +33 -0
  99. docs/conf.py +52 -0
  100. docs/index.rst +21 -0
  101. docs/l0_to_l1_cryonirsp_ci-full-trial.rst +10 -0
  102. docs/l0_to_l1_cryonirsp_ci.rst +10 -0
  103. docs/l0_to_l1_cryonirsp_sp-full-trial.rst +10 -0
  104. docs/l0_to_l1_cryonirsp_sp.rst +10 -0
  105. docs/linearization.rst +43 -0
  106. docs/make.bat +170 -0
  107. docs/requirements.txt +1 -0
  108. docs/requirements_table.rst +8 -0
  109. docs/scientific_changelog.rst +10 -0
  110. docs/sp_science_calibration.rst +59 -0
  111. licenses/LICENSE.rst +11 -0
@@ -0,0 +1,120 @@
1
+ """Copy of UniqueBud from common that only activates if the frames are polarimetric "observe" or "polcal" task, or non-polarimetric "observe" task."""
2
+ from collections import namedtuple
3
+ from typing import Type
4
+
5
+ from dkist_processing_common.models.flower_pot import SpilledDirt
6
+ from dkist_processing_common.models.flower_pot import Stem
7
+ from dkist_processing_common.models.flower_pot import Thorn
8
+ from dkist_processing_common.models.task_name import TaskName
9
+
10
+ from dkist_processing_cryonirsp.models.constants import CryonirspBudName
11
+ from dkist_processing_cryonirsp.parsers.cryonirsp_l0_fits_access import CryonirspL0FitsAccess
12
+
13
+
14
+ class PolarimetricCheckingUniqueBud(Stem):
15
+ """Bud for checking if frames are polarimetric."""
16
+
17
+ PolarimetricValueData = namedtuple(
18
+ "PolarimetricValueData", ["task", "num_modstates", "spin_mode", "bud_value"]
19
+ )
20
+ observe_task_name = TaskName.observe.value.casefold()
21
+ polcal_task_name = TaskName.polcal.value.casefold()
22
+
23
+ def __init__(self, constant_name: str, metadata_key: str):
24
+ super().__init__(stem_name=constant_name)
25
+ self.metadata_key = metadata_key
26
+
27
+ @property
28
+ def observe_tuple_values(self) -> tuple:
29
+ """Return all ingested namedtuples from observe frames."""
30
+ return filter(lambda x: x.task == self.observe_task_name, self.key_to_petal_dict.values())
31
+
32
+ @property
33
+ def polcal_tuple_values(self) -> tuple:
34
+ """Return all ingested namedtuples from polcal frames."""
35
+ return filter(lambda x: x.task == self.polcal_task_name, self.key_to_petal_dict.values())
36
+
37
+ def is_polarimetric(self) -> bool:
38
+ """Check if data is polarimetric."""
39
+ obs_num_mod_set = set((o.num_modstates for o in self.observe_tuple_values))
40
+ obs_spin_mode_set = set((o.spin_mode for o in self.observe_tuple_values))
41
+
42
+ if len(obs_num_mod_set) > 1:
43
+ raise ValueError(
44
+ f"Observe frames have more than one value of NUM_MODSTATES. Set is {obs_num_mod_set}"
45
+ )
46
+ if len(obs_spin_mode_set) > 1:
47
+ raise ValueError(
48
+ f"Observe frames have more than one value of MODULATOR_SPIN_MODE. Set is {obs_spin_mode_set}"
49
+ )
50
+
51
+ num_mod = obs_num_mod_set.pop()
52
+ spin_mode = obs_spin_mode_set.pop()
53
+ if num_mod > 1 and spin_mode in [
54
+ "Continuous",
55
+ "Stepped",
56
+ ]:
57
+ return True
58
+ return False
59
+
60
+ def setter(self, fits_obj: CryonirspL0FitsAccess) -> Type[SpilledDirt] | tuple:
61
+ """
62
+ Return a `PolarimetricValueData` namedtuple only for OBSERVE and POLCAL frames.
63
+
64
+ Parameters
65
+ ----------
66
+ fits_obj:
67
+ A single FitsAccess object
68
+ """
69
+ task = fits_obj.ip_task_type.casefold()
70
+
71
+ # Some intensity mode data has the number of modulator states set to 0
72
+ num_modstates = fits_obj.number_of_modulator_states or 1
73
+
74
+ if self.metadata_key == "number_of_modulator_states":
75
+ bud_value = num_modstates
76
+ else:
77
+ bud_value = getattr(fits_obj, self.metadata_key)
78
+
79
+ if task in [self.observe_task_name, self.polcal_task_name]:
80
+ return self.PolarimetricValueData(
81
+ task=task,
82
+ num_modstates=num_modstates,
83
+ spin_mode=fits_obj.modulator_spin_mode,
84
+ bud_value=bud_value,
85
+ )
86
+ return SpilledDirt
87
+
88
+ def getter(self, key: str) -> int | str | Type[Thorn]:
89
+ """
90
+ Return the desired metadata key, with checks.
91
+
92
+ If data are from a polarimetric dataset then the values must match between observe and polcal frames.
93
+ In all cases the value returned must be the same across all observe (and potentially polcal) frames.
94
+ """
95
+ obs_value_set = set((o.bud_value for o in self.observe_tuple_values))
96
+ if len(obs_value_set) == 0:
97
+ # For the rare case where we have polcal frames but not observe frames that are being parsed. This only
98
+ # comes up in unit tests.
99
+ return Thorn
100
+
101
+ obs_value = obs_value_set.pop()
102
+
103
+ if self.is_polarimetric():
104
+ pol_value_set = set((o.bud_value for o in self.polcal_tuple_values))
105
+
106
+ if len(pol_value_set) > 1:
107
+ raise ValueError(
108
+ f"Polcal frames have more than one value of NUM_MODSTATES. Set is {pol_value_set}"
109
+ )
110
+
111
+ pol_value = pol_value_set.pop()
112
+
113
+ if obs_value != pol_value:
114
+ raise ValueError(
115
+ f"Polcal and Observe frames have different values for {self.metadata_key}. ({obs_value = }, {pol_value = })"
116
+ )
117
+
118
+ return obs_value
119
+
120
+ return obs_value
@@ -0,0 +1,412 @@
1
+ """
2
+ Machinery for sorting files based on scan step.
3
+
4
+ Also includes base classes that can be used to sort files based on map scan.
5
+ """
6
+ ######################
7
+ # HOW THIS ALL WORKS #
8
+ ######################
9
+ #
10
+ # By analogy
11
+ #
12
+ # Something to know: Spruce, Pine, and Fir all have pretty similar physical properties (like density). This is why
13
+ # framing lumber is often sold as "SPF"; it could be any of these three woods because they're similar enough for
14
+ # building.
15
+ #
16
+ # Now, the analogy of how we sort and figure out how many map scans and scan steps there are...
17
+ #
18
+ # We will be sorting wooden blocks. These blocks have 2 characteristics that we can directly observe: shape and color.
19
+ # We know that the factory only makes two shapes of blocks: spheres and cubes.
20
+ # We know that the factory only uses three colors: red, green, and blue.
21
+ # The blocks can be made of different woods: spruce, pine, or fir. Unfortunately we *can't* tell which wood is which
22
+ # via direct observation (especially after they have been painted).
23
+ # Additionally, the factory stamps a serial number on every block. These numbers are continuously increasing and never
24
+ # repeat.
25
+ #
26
+ # The problem is that we need to sort blocks based on the type of wood used.
27
+ #
28
+ # Fortunately we know how the factory makes these blocks:
29
+ #
30
+ # o They make exactly 6 blocks (2 shapes * 3 colors) from each board of wood
31
+ # o They use exactly 3 boards of wood each day (it's not a very big factory)
32
+ # o They *always* start with a board of spruce, then a board of pine, and finally a board of fir
33
+ # o When processing a single board, the sequence is:
34
+ # 1. Make 3 spheres
35
+ # 2. Color a sphere red. This block is done so stamp it with a serial number.
36
+ # 3. Color a sphere blue. This block is done so stamp it with a serial number.
37
+ # 4. Color the last sphere green. This block is done so stamp it with a serial number.
38
+ # 5. Repeat steps 1-4 but with cubes instead of spheres.
39
+ #
40
+ # With all of this information we can easily sort the blocks by wood type. The algorithm works like this.
41
+ #
42
+ # 1. Choose a (shape, color) combination. For this example we'll use green spheres
43
+ # 2. Collect all green spheres
44
+ # 3. Sort the green spheres by their serial numbers. They won't be in sequence, but that doesn't matter.
45
+ # 4. The lowest serial number came from the first board of the day (spruce), the middle number comes from the second
46
+ # (pine) board, and the largest number comes from the last (fir) board.
47
+ # 5. Start a new set of piles for spruce, pine, and fir, and place the sorted green spheres into these piles.
48
+ # 6. Repeat steps 1-5 for all other (shape, color) combinations.
49
+ #
50
+ # And there you go! We now have correctly sorted the blocks by type of wood.
51
+ # We have used our knowledge of the GENERATING PROCESS and OBSERVABLE PROPERTIES to sort by an UNOBSERVABLE PROPERTY.
52
+ #
53
+ # Analogy over.
54
+ #
55
+ # We want to sort frames by map_scan. This is the UNOBSERVABLE PROPERTY (like wood type).
56
+ # Each frame has many OBSERVABLE PROPERTIES (like color, shape, and serial #). In reality, we have 5 such properties:
57
+ # 1. Current scan step
58
+ # 2. Current measurement
59
+ # 3. Current modstate
60
+ # 4. Current sub repeat
61
+ # 5. Obs time
62
+ #
63
+ # Obs time is monotonically increasing and never repeats. It is the serial number in the analogy.
64
+ #
65
+ # A single frame gets ingested into a `SingleScanStep`. This is just a convenience container for the OBSERVABLE
66
+ # PROPERTIES we care about. A `SingleScanStep` is like a single block in the analogy.
67
+ #
68
+ # `MapScanStepStemBase.scan_step_dict` is how we sort frames based on their OBSERVABLE PROPERTIES. Instead of a pile
69
+ # of green spheres imagine a bag labeled "spheres" containing 3 sub-bags, each labeled by color. Each bag in that
70
+ # analogy is a level of the `scan_step_dict` dictionary; one for each of the OBSERVABLE PROPERTIES.
71
+ # If we go to the innermost level of that dictionary we will find a list of `SingleScanStep` objects. This is like
72
+ # going into the "sphere" bag, then the "green" bag and finding 3 blocks (for the three different woods)*.
73
+ #
74
+ # The number of `SingleScanStep` objects at the innermost level of `scan_step_dict` is the number of map scans.
75
+ #
76
+ # We can also use the `scan_step_dict` to figure out which map scan a frame comes from. This is what
77
+ # `MapScanStepStemBase.get_map_scan_for_key()` does. It sorts the innermost list by obs time and then figures out
78
+ # where in the resulting order the current frame belongs.
79
+ #
80
+ # Finally, once we can segregate frames (i.e., `SingleScanStep` objects) by map scan, we are able to figure out
81
+ # the number of scan steps in each map scan. This is what the methods in `NumberOfScanStepsBud` do and they allow us
82
+ # to account for aborts.
83
+ # Back to the analogy, we don't want different numbers of, e.g., green spheres and red cubes. If the factory stops after
84
+ # processing at least one full board (map scan) then we only accept blocks that come from complete boards because we
85
+ # have at least one full set of (color, shape) blocks. If, however, the factory aborts during the first board then we
86
+ # will be OK with a complete set of blocks of the same shape (scan step). So if the factory stops while making the
87
+ # green cube then we'll just take all the spheres throw away all the cubes.
88
+ #
89
+ #########
90
+ #
91
+ # Hopefully this helps you understand how we are able to sort by map scan even though there is no header key for it.
92
+ # We use our knowledge the loops that produce the data (GENERATING PROCESS) and the header values for scan step,
93
+ # measurement, modstate, sub repeat, and obs time (OBSERVABLE PROPERTIES) to infer the map scan (UNOBSERVABLE PROPERTY).
94
+ #
95
+ #
96
+ # *(We actually could make just single flat "piles" of blocks instead of nested bags/dictionaries. This would mean using
97
+ # keys like `f"{obj.scan_step}_{obj.measurement}_{obj.modstate}_{obj.sub_repeat}"`, which is pretty weird from a
98
+ # coding perspective).
99
+ from __future__ import annotations
100
+
101
+ from abc import ABC
102
+ from collections import defaultdict
103
+ from functools import cached_property
104
+ from typing import Type
105
+
106
+ from astropy.time import Time
107
+ from dkist_processing_common.models.flower_pot import SpilledDirt
108
+ from dkist_processing_common.models.flower_pot import Stem
109
+ from dkist_processing_common.parsers.single_value_single_key_flower import (
110
+ SingleValueSingleKeyFlower,
111
+ )
112
+
113
+ from dkist_processing_cryonirsp.models.constants import CryonirspBudName
114
+ from dkist_processing_cryonirsp.models.tags import CryonirspStemName
115
+ from dkist_processing_cryonirsp.parsers.cryonirsp_l0_fits_access import CryonirspL0FitsAccess
116
+
117
+
118
+ def zero_size_scan_step_test(fits_obj) -> bool:
119
+ """Test if the dual internal scan loop trick is being used."""
120
+ if fits_obj.num_cn2_scan_steps > 0 and fits_obj.cn2_step_size == 0:
121
+ return fits_obj.cn2_step_size == 0
122
+ return False
123
+
124
+
125
+ def single_scan_step_key(fits_obj: CryonirspL0FitsAccess):
126
+ """Return the single_step_key based on how the scanning is being done."""
127
+ if zero_size_scan_step_test(fits_obj):
128
+ return "cn1_scan_step"
129
+ return "scan_step"
130
+
131
+
132
+ def total_num_key(fits_obj: CryonirspL0FitsAccess):
133
+ """Return the total_num_key based on how the scanning is being done."""
134
+ if zero_size_scan_step_test(fits_obj):
135
+ return "num_cn1_scan_steps"
136
+ return "num_scan_steps"
137
+
138
+
139
+ class SingleScanStep:
140
+ """
141
+ An object that uniquely defines a (scan_step, meas_num, modstate, sub_rep, time_obs) tuple from any number of dsps repeat repeats.
142
+
143
+ This is just a fancy tuple.
144
+
145
+ Basically, it just hashes the (scan_step, meas_num, modstate, sub_rep, time_obs) tuple so these objects can easily be compared.
146
+ Also uses the time_obs property so that multiple dsps repeats of the same (scan_step, meas_num, modstate, sub_rep) can be sorted.
147
+ """
148
+
149
+ def __init__(self, fits_obj: CryonirspL0FitsAccess):
150
+ """Read relevant values from a FitsAccess object."""
151
+ self.num_scan_steps = self.get_num_scan_steps_value(fits_obj)
152
+ self.scan_step = self.get_scan_step_value(fits_obj)
153
+ self.meas_num = fits_obj.meas_num
154
+ self.modulator_state = fits_obj.modulator_state
155
+ self.sub_repeat_num = fits_obj.sub_repeat_num
156
+ self.date_obs = Time(fits_obj.time_obs)
157
+
158
+ # For __repr__
159
+ self._fits_obj = fits_obj
160
+
161
+ @staticmethod
162
+ def get_scan_step_value(fits_obj: CryonirspL0FitsAccess) -> int:
163
+ """Return the scan_step based on how the scanning is being done."""
164
+ return getattr(fits_obj, single_scan_step_key(fits_obj))
165
+
166
+ @staticmethod
167
+ def get_num_scan_steps_value(fits_obj: CryonirspL0FitsAccess) -> int:
168
+ """Return the header value for the total number of scan steps while accounting for different modes of scanning."""
169
+ return getattr(fits_obj, total_num_key(fits_obj))
170
+
171
+ def __repr__(self):
172
+ return f"{self.__class__.__name__}({self._fits_obj!r})"
173
+
174
+ def __str__(self):
175
+ return (
176
+ f"SingleScanStep with {self.date_obs = }, "
177
+ f"{self.num_scan_steps = }, "
178
+ f"{self.scan_step = }, "
179
+ f"{self.meas_num = }, "
180
+ f"{self.modulator_state = }, "
181
+ f"and {self.sub_repeat_num = }"
182
+ )
183
+
184
+ def __eq__(self, other: SingleScanStep) -> bool:
185
+ """
186
+ Two frames are equal if they have the same (scan_step, meas_num, modstate, sub_rep) tuple.
187
+
188
+ Doesn't account for num_scan_steps because it *should* be the same for all objects and the test of that
189
+ singularity exists elsewhere (i.e., we don't want a bad num_scan_steps value to affect comparison of these
190
+ objects).
191
+ """
192
+ if not isinstance(other, SingleScanStep):
193
+ raise TypeError(f"Cannot compare ScanStep with type {type(other)}")
194
+
195
+ for attr in ["scan_step", "modulator_state", "date_obs", "meas_num", "sub_repeat_num"]:
196
+ if getattr(self, attr) != getattr(other, attr):
197
+ return False
198
+
199
+ return True
200
+
201
+ def __lt__(self, other: SingleScanStep) -> bool:
202
+ """Only sort on date_obs."""
203
+ return self.date_obs < other.date_obs
204
+
205
+ def __hash__(self) -> int:
206
+ """
207
+ Not strictly necessary, but does allow for using set() on these objects.
208
+
209
+ Doesn't account for num_scan_steps because it *should* be the same for all objects and the test of that
210
+ singularity exists elsewhere (i.e., we don't want a bad num_scan_steps value to affect comparison of these
211
+ objects).
212
+ """
213
+ return hash(
214
+ (
215
+ self.scan_step,
216
+ self.meas_num,
217
+ self.modulator_state,
218
+ self.sub_repeat_num,
219
+ self.date_obs,
220
+ )
221
+ )
222
+
223
+
224
+ class MapScanStepStemBase(Stem, ABC):
225
+ """Base class for Stems that determine the sorting of map scans and scan steps."""
226
+
227
+ # This only here so type-hinting of this complex dictionary will work.
228
+ key_to_petal_dict: dict[str, SingleScanStep]
229
+
230
+ def setter(self, fits_obj: CryonirspL0FitsAccess) -> SingleScanStep | Type[SpilledDirt]:
231
+ """Ingest observe frames as SingleScanStep objects."""
232
+ if fits_obj.ip_task_type != "observe":
233
+ return SpilledDirt
234
+ return SingleScanStep(fits_obj=fits_obj)
235
+
236
+ @cached_property
237
+ def scan_step_dict(self) -> dict[int, dict[int, dict[int, dict[int, list[SingleScanStep]]]]]:
238
+ """Nested dictionary that contains a SingleScanStep for each ingested frame.
239
+
240
+ Dictionary structure is::
241
+
242
+ {scan_step:
243
+ {measurement:
244
+ {modstate:
245
+ {sub_repeat:
246
+ [SingleScanStep1, SingleScanStep2, ...]
247
+ }
248
+ }
249
+ }
250
+ }
251
+
252
+ """
253
+ scan_step_dict = defaultdict(
254
+ lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
255
+ )
256
+
257
+ for scan_step_obj in self.key_to_petal_dict.values():
258
+ scan_step_dict[scan_step_obj.scan_step][scan_step_obj.meas_num][
259
+ scan_step_obj.modulator_state
260
+ ][scan_step_obj.sub_repeat_num].append(scan_step_obj)
261
+
262
+ return scan_step_dict
263
+
264
+ @cached_property
265
+ def number_of_complete_map_scans(self) -> int:
266
+ """Compute the total number of complete map scans.
267
+
268
+ In a multi-map dataset a complete map scan is defined as a map scan that contains a complete
269
+ set of scan steps. In other words, each scan step will be present in the same number of
270
+ *complete* map scans when the entire set of scan steps is considered.
271
+
272
+ The map scan in a dataset with a single map scan is condsidered complete by default. (If it is missing
273
+ scan steps it will simply be truncated).
274
+
275
+ Assumes the incomplete map_scan is always the last one due to summit abort or cancellation.
276
+ """
277
+ map_scans_per_scan_step = []
278
+ for meas_dict in self.scan_step_dict.values():
279
+ for mod_dict in meas_dict.values():
280
+ for sub_repeat_dict in mod_dict.values():
281
+ files_per_subrepeat = []
282
+ for file_list in sub_repeat_dict.values():
283
+ files_per_subrepeat.append(len(file_list))
284
+
285
+ # Using `max` here makes us somewhat resistant to aborted-then-continued data
286
+ # (or just missing frames).
287
+ map_scans_per_scan_step.append(max(files_per_subrepeat))
288
+
289
+ if max(map_scans_per_scan_step) - min(map_scans_per_scan_step) > 1:
290
+ raise ValueError("More than one incomplete map exists in the data.")
291
+
292
+ return min(map_scans_per_scan_step)
293
+
294
+ def get_map_scan_for_key(self, key) -> int:
295
+ """Compute the map scan number for a single frame.
296
+
297
+ The frame implies a SingleScanStep. That object is then compared to the sorted list of objects for a single
298
+ (raster_step, meas_num, modstate, sub_repeat) tuple. The location within that sorted list is the map scan number.
299
+ """
300
+ scan_step_obj = self.key_to_petal_dict[key]
301
+ step_list: list[SingleScanStep] = sorted(
302
+ self.scan_step_dict[scan_step_obj.scan_step][scan_step_obj.meas_num][
303
+ scan_step_obj.modulator_state
304
+ ][scan_step_obj.sub_repeat_num]
305
+ )
306
+
307
+ num_exp = step_list.count(scan_step_obj)
308
+ if num_exp > 1:
309
+ raise ValueError(
310
+ f"More than one exposure detected for a single map scan of a single map step. (Randomly chosen step has {num_exp} exposures)."
311
+ )
312
+ return step_list.index(scan_step_obj) + 1 # Here we decide that map scan indices start at 1
313
+
314
+
315
+ class NumberOfScanStepsBud(MapScanStepStemBase):
316
+ """Bud for finding the total number of scan steps."""
317
+
318
+ def __init__(self):
319
+ super().__init__(stem_name=CryonirspBudName.num_scan_steps.value)
320
+
321
+ @cached_property
322
+ def map_scan_to_obj_dict(self) -> dict[int, list[SingleScanStep]]:
323
+ """Sort SingleScanStep objects by what map scan they belong to."""
324
+ map_scan_to_obj_dict = defaultdict(list)
325
+ for key, single_step_obj in self.key_to_petal_dict.items():
326
+ map_scan = self.get_map_scan_for_key(key)
327
+ map_scan_to_obj_dict[map_scan].append(single_step_obj)
328
+
329
+ return map_scan_to_obj_dict
330
+
331
+ def steps_in_map_scan(self, map_scan: int) -> int:
332
+ """
333
+ Compute the number of scan steps in the given map scan.
334
+
335
+ First we check how many files (i.e., `SingleScanStep` objects) belong to each scan step. The number of files
336
+ in a completed scan step is assumed to be the maximum of this set. Any scan steps with fewer than this number
337
+ are discarded, and finally we check that the remaining scan steps (i.e., those that have completed) are
338
+ continuous. This check ensures that any aborted scan steps were at the end of a sequence.
339
+ """
340
+ objs_in_map_scan_list = self.map_scan_to_obj_dict[map_scan]
341
+
342
+ scan_step_to_objs_dict = defaultdict(list)
343
+ for obj in objs_in_map_scan_list:
344
+ scan_step_to_objs_dict[obj.scan_step].append(obj)
345
+
346
+ # Using the dict keys ensures there are no repeats
347
+ sorted_steps = sorted(scan_step_to_objs_dict.keys())
348
+ files_per_step = [len(scan_step_to_objs_dict[step]) for step in sorted_steps]
349
+
350
+ completed_step_size = max(files_per_step) # A relatively safe assumption
351
+ indices_of_completed_steps = [
352
+ idx for idx, num_files in enumerate(files_per_step) if num_files == completed_step_size
353
+ ]
354
+
355
+ # Now check that all the steps we expect are present
356
+ # Incomplete steps are allowed *only at the end of the sequence*. I.e., if an incomplete step is followed
357
+ # by a complete step then the ValueError will be triggered.
358
+ completed_steps = [sorted_steps[i] for i in indices_of_completed_steps]
359
+ if completed_steps != list(range(1, max(completed_steps) + 1)):
360
+ raise ValueError(f"Not all sequential steps could be found. Found {completed_steps}")
361
+
362
+ return len(completed_steps)
363
+
364
+ def getter(self, key):
365
+ """
366
+ Compute the number of complete scan steps.
367
+
368
+ In cases where there is only a single map scan the number of scan steps is equal to the number of completed
369
+ scan steps. In cases where there multiple map scans, only completed map scans are considered and an Error is
370
+ raised if they each have a different number of scan steps.
371
+
372
+ An error is also raised if the set of values of the "CNNUMSCN" header key (number of scan steps) contains more
373
+ than one value.
374
+ """
375
+ # We still want to check that all files have the same value for NUM_SCAN_STEPS
376
+ num_steps_set = set(v.num_scan_steps for v in self.key_to_petal_dict.values())
377
+ if len(num_steps_set) > 1:
378
+ raise ValueError(f"Multiple {self.stem_name} values found. Values: {num_steps_set}")
379
+
380
+ steps_per_map_scan_set = set(
381
+ self.steps_in_map_scan(map_scan)
382
+ for map_scan in range(1, self.number_of_complete_map_scans + 1)
383
+ )
384
+ if len(steps_per_map_scan_set) > 1:
385
+ raise ValueError(
386
+ "The set of non-aborted maps have varying numbers of scan steps. This is very strange "
387
+ "and likely indicates a failure to parse the aborted map scans."
388
+ )
389
+
390
+ return steps_per_map_scan_set.pop()
391
+
392
+
393
+ class ScanStepNumberFlower(SingleValueSingleKeyFlower):
394
+ """Flower for a scan step."""
395
+
396
+ def __init__(self):
397
+ super().__init__(tag_stem_name=CryonirspStemName.scan_step.value, metadata_key="")
398
+
399
+ def setter(self, fits_obj: CryonirspL0FitsAccess):
400
+ """
401
+ Setter for a flower.
402
+
403
+ Parameters
404
+ ----------
405
+ fits_obj:
406
+ A single FitsAccess object
407
+ """
408
+ if fits_obj.ip_task_type != "observe":
409
+ return SpilledDirt
410
+ # Set the meta-data key, which isn't known at object creation time.
411
+ self.metadata_key = single_scan_step_key(fits_obj)
412
+ return super().setter(fits_obj)
@@ -0,0 +1,80 @@
1
+ """Buds to parse exposure time."""
2
+ from typing import Hashable
3
+ from typing import Type
4
+
5
+ from dkist_processing_common.models.flower_pot import SpilledDirt
6
+ from dkist_processing_common.models.flower_pot import Stem
7
+ from dkist_processing_common.models.task_name import TaskName
8
+ from dkist_processing_common.parsers.task import parse_header_ip_task_with_gains
9
+
10
+ from dkist_processing_cryonirsp.models.constants import CryonirspBudName
11
+ from dkist_processing_cryonirsp.parsers.cryonirsp_l0_fits_access import CryonirspL0FitsAccess
12
+ from dkist_processing_cryonirsp.parsers.cryonirsp_l0_fits_access import CryonirspRampFitsAccess
13
+
14
+
15
+ class CryonirspSolarGainStartTimeBud(Stem):
16
+ """Bud for finding the start time of the solar gain."""
17
+
18
+ def __init__(self):
19
+ self.metadata_key = "time_obs"
20
+ super().__init__(stem_name=CryonirspBudName.solar_gain_ip_start_time.value)
21
+
22
+ def setter(self, fits_obj: CryonirspL0FitsAccess) -> Type[SpilledDirt] | int:
23
+ """
24
+ Setter for the bud.
25
+
26
+ Parameters
27
+ ----------
28
+ fits_obj:
29
+ A single FitsAccess object
30
+ """
31
+ if parse_header_ip_task_with_gains(fits_obj) != TaskName.solar_gain.value:
32
+ return SpilledDirt
33
+ return getattr(fits_obj, self.metadata_key)
34
+
35
+ def getter(self, key: Hashable):
36
+ """Return the first date-obs value."""
37
+ first_date_obs = min(list(self.key_to_petal_dict.values()))
38
+ return first_date_obs
39
+
40
+
41
+ class CryonirspTimeObsBud(Stem):
42
+ """
43
+ Produce a tuple of all time_obs values present in the dataset.
44
+
45
+ The time_obs is a unique identifier for all raw frames in a single ramp. Hence, this list identifies all
46
+ the ramps that must be processed in a data set.
47
+ """
48
+
49
+ def __init__(self):
50
+ super().__init__(stem_name=CryonirspBudName.time_obs_list.value)
51
+
52
+ def setter(self, fits_obj: CryonirspRampFitsAccess):
53
+ """
54
+ Set the time_obs for this fits object.
55
+
56
+ Parameters
57
+ ----------
58
+ fits_obj
59
+ The input fits object
60
+ Returns
61
+ -------
62
+ The time_obs value associated with this fits object
63
+ """
64
+ return fits_obj.time_obs
65
+
66
+ def getter(self, key: Hashable) -> tuple:
67
+ """
68
+ Get the sorted tuple of time_obs values.
69
+
70
+ Parameters
71
+ ----------
72
+ key
73
+ The input key
74
+
75
+ Returns
76
+ -------
77
+ A tuple of exposure times
78
+ """
79
+ time_obs_tup = tuple(sorted(set(self.key_to_petal_dict.values())))
80
+ return time_obs_tup
@@ -0,0 +1,26 @@
1
+ """CryoNIRSP bud to get the wavelength."""
2
+ from dkist_processing_common.models.flower_pot import SpilledDirt
3
+ from dkist_processing_common.parsers.unique_bud import UniqueBud
4
+
5
+ from dkist_processing_cryonirsp.models.constants import CryonirspBudName
6
+ from dkist_processing_cryonirsp.parsers.cryonirsp_l0_fits_access import CryonirspL0FitsAccess
7
+
8
+
9
+ class ObserveWavelengthBud(UniqueBud):
10
+ """Bud to find the wavelength."""
11
+
12
+ def __init__(self):
13
+ super().__init__(constant_name=CryonirspBudName.wavelength.value, metadata_key="wavelength")
14
+
15
+ def setter(self, fits_obj: CryonirspL0FitsAccess):
16
+ """
17
+ Set the value of the bud.
18
+
19
+ Parameters
20
+ ----------
21
+ fits_obj:
22
+ A single FitsAccess object
23
+ """
24
+ if fits_obj.ip_task_type.lower() != "observe":
25
+ return SpilledDirt
26
+ return super().setter(fits_obj)
@@ -0,0 +1,19 @@
1
+ """Init."""
2
+ from dkist_processing_cryonirsp.tasks.assemble_movie import *
3
+ from dkist_processing_cryonirsp.tasks.bad_pixel_map import *
4
+ from dkist_processing_cryonirsp.tasks.ci_beam_boundaries import *
5
+ from dkist_processing_cryonirsp.tasks.ci_science import *
6
+ from dkist_processing_cryonirsp.tasks.dark import *
7
+ from dkist_processing_cryonirsp.tasks.gain import *
8
+ from dkist_processing_cryonirsp.tasks.instrument_polarization import *
9
+ from dkist_processing_cryonirsp.tasks.linearity_correction import *
10
+ from dkist_processing_cryonirsp.tasks.make_movie_frames import *
11
+ from dkist_processing_cryonirsp.tasks.parse import *
12
+ from dkist_processing_cryonirsp.tasks.quality_metrics import *
13
+ from dkist_processing_cryonirsp.tasks.sp_beam_boundaries import *
14
+ from dkist_processing_cryonirsp.tasks.sp_dispersion_axis_correction import *
15
+ from dkist_processing_cryonirsp.tasks.sp_geometric import *
16
+ from dkist_processing_cryonirsp.tasks.sp_science import *
17
+ from dkist_processing_cryonirsp.tasks.sp_solar_gain import *
18
+ from dkist_processing_cryonirsp.tasks.trial_output_data import *
19
+ from dkist_processing_cryonirsp.tasks.write_l1 import *