dkist-processing-common 12.0.0rc5__py3-none-any.whl → 12.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dkist_processing_common/codecs/fits.py +27 -6
- dkist_processing_common/models/constants.py +16 -10
- dkist_processing_common/models/extras.py +35 -0
- dkist_processing_common/models/flower_pot.py +230 -9
- dkist_processing_common/models/tags.py +13 -0
- dkist_processing_common/parsers/average_bud.py +0 -2
- dkist_processing_common/parsers/cs_step.py +10 -10
- dkist_processing_common/parsers/id_bud.py +8 -10
- dkist_processing_common/parsers/lookup_bud.py +7 -11
- dkist_processing_common/parsers/near_bud.py +7 -12
- dkist_processing_common/parsers/retarder.py +9 -13
- dkist_processing_common/parsers/time.py +19 -55
- dkist_processing_common/parsers/unique_bud.py +7 -14
- dkist_processing_common/tasks/l1_output_data.py +23 -14
- dkist_processing_common/tasks/output_data_base.py +25 -4
- dkist_processing_common/tasks/parse_l0_input_data.py +4 -2
- dkist_processing_common/tasks/transfer_input_data.py +1 -0
- dkist_processing_common/tasks/write_extra.py +333 -0
- dkist_processing_common/tasks/write_l1.py +2 -55
- dkist_processing_common/tasks/write_l1_base.py +67 -0
- dkist_processing_common/tests/test_codecs.py +57 -11
- dkist_processing_common/tests/test_construct_dataset_extras.py +224 -0
- dkist_processing_common/tests/test_flower_pot.py +147 -5
- dkist_processing_common/tests/test_output_data_base.py +24 -2
- dkist_processing_common/tests/test_parse_l0_input_data.py +28 -4
- dkist_processing_common/tests/test_stems.py +140 -193
- dkist_processing_common/tests/test_transfer_l1_output_data.py +1 -0
- dkist_processing_common/tests/test_trial_catalog.py +2 -0
- dkist_processing_common/tests/test_workflow_task_base.py +0 -11
- dkist_processing_common/tests/test_write_l1.py +0 -1
- {dkist_processing_common-12.0.0rc5.dist-info → dkist_processing_common-12.2.0.dist-info}/METADATA +4 -4
- {dkist_processing_common-12.0.0rc5.dist-info → dkist_processing_common-12.2.0.dist-info}/RECORD +34 -31
- {dkist_processing_common-12.0.0rc5.dist-info → dkist_processing_common-12.2.0.dist-info}/WHEEL +1 -1
- changelog/288.misc.rst +0 -1
- {dkist_processing_common-12.0.0rc5.dist-info → dkist_processing_common-12.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pytest
|
|
7
|
+
from astropy.io import fits
|
|
8
|
+
|
|
9
|
+
from dkist_processing_common._util.scratch import WorkflowFileSystem
|
|
10
|
+
from dkist_processing_common.codecs.fits import fits_array_decoder
|
|
11
|
+
from dkist_processing_common.codecs.fits import fits_array_encoder
|
|
12
|
+
from dkist_processing_common.models.extras import DatasetExtraHeaderSection
|
|
13
|
+
from dkist_processing_common.models.extras import DatasetExtraType
|
|
14
|
+
from dkist_processing_common.models.tags import Tag
|
|
15
|
+
from dkist_processing_common.models.task_name import TaskName
|
|
16
|
+
from dkist_processing_common.tasks.write_extra import WriteL1DatasetExtras
|
|
17
|
+
from dkist_processing_common.tests.mock_metadata_store import RecipeRunResponseMapping
|
|
18
|
+
from dkist_processing_common.tests.mock_metadata_store import fake_gql_client_factory
|
|
19
|
+
from dkist_processing_common.tests.mock_metadata_store import make_default_recipe_run_response
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class FakeConstantDb:
|
|
24
|
+
INSTRUMENT: str = "VBI"
|
|
25
|
+
DATASET_ID: str = "DATASETID"
|
|
26
|
+
AVERAGE_CADENCE: float = 10.0
|
|
27
|
+
MINIMUM_CADENCE: float = 10.0
|
|
28
|
+
MAXIMUM_CADENCE: float = 10.0
|
|
29
|
+
VARIANCE_CADENCE: float = 0.0
|
|
30
|
+
STOKES_PARAMS: tuple = ("I", "Q", "U", "V")
|
|
31
|
+
PROPOSAL_ID: str = "PROPID1"
|
|
32
|
+
EXPERIMENT_ID: str = "EXPERID1"
|
|
33
|
+
CAMERA_ID: str = "CAMERA1"
|
|
34
|
+
CAMERA_NAME: str = "Camera One"
|
|
35
|
+
CAMERA_BIT_DEPTH: int = 16
|
|
36
|
+
HARDWARE_BINNING_X: int = 1
|
|
37
|
+
HARDWARE_BINNING_Y: int = 1
|
|
38
|
+
SOFTWARE_BINNING_X: int = 1
|
|
39
|
+
SOFTWARE_BINNING_Y: int = 1
|
|
40
|
+
HLS_VERSION: str = "1.8"
|
|
41
|
+
WAVELENGTH: float = 854.2
|
|
42
|
+
# Dark
|
|
43
|
+
DARK_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
|
|
44
|
+
DARK_DATE_BEGIN: str = "2023-01-01T00:00:00"
|
|
45
|
+
DARK_DATE_END: str = "2023-01-01T01:00:00"
|
|
46
|
+
DARK_TELESCOPE_TRACKING_MODE: str = "None"
|
|
47
|
+
DARK_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
|
|
48
|
+
DARK_TELESCOPE_SCANNING_MODE: str = "None"
|
|
49
|
+
DARK_AVERAGE_LIGHT_LEVEL: float = 5.0
|
|
50
|
+
DARK_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
|
|
51
|
+
DARK_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
|
|
52
|
+
DARK_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
|
|
53
|
+
DARK_GOS_LEVEL3_STATUS: str = "clear"
|
|
54
|
+
DARK_GOS_LEVEL3_LAMP_STATUS: str = "off"
|
|
55
|
+
DARK_GOS_POLARIZER_STATUS: str = "clear"
|
|
56
|
+
DARK_GOS_POLARIZER_ANGLE: str = "0.0"
|
|
57
|
+
DARK_GOS_RETARDER_STATUS: str = "clear"
|
|
58
|
+
DARK_GOS_RETARDER_ANGLE: str = "0.0"
|
|
59
|
+
DARK_GOS_LEVEL0_STATUS: str = "DarkShutter"
|
|
60
|
+
# Solar Gain
|
|
61
|
+
SOLAR_GAIN_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
|
|
62
|
+
SOLAR_GAIN_DATE_BEGIN: str = "2023-01-01T00:00:00"
|
|
63
|
+
SOLAR_GAIN_DATE_END: str = "2023-01-01T01:00:00"
|
|
64
|
+
SOLAR_GAIN_NUM_RAW_FRAMES_PER_FPA: int = 1
|
|
65
|
+
SOLAR_GAIN_TELESCOPE_TRACKING_MODE: str = "None"
|
|
66
|
+
SOLAR_GAIN_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
|
|
67
|
+
SOLAR_GAIN_TELESCOPE_SCANNING_MODE: str = "None"
|
|
68
|
+
SOLAR_GAIN_AVERAGE_LIGHT_LEVEL: float = 5.0
|
|
69
|
+
SOLAR_GAIN_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
|
|
70
|
+
SOLAR_GAIN_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
|
|
71
|
+
SOLAR_GAIN_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
|
|
72
|
+
SOLAR_GAIN_GOS_LEVEL3_STATUS: str = "clear"
|
|
73
|
+
SOLAR_GAIN_GOS_LEVEL3_LAMP_STATUS: str = "off"
|
|
74
|
+
SOLAR_GAIN_GOS_POLARIZER_STATUS: str = "clear"
|
|
75
|
+
SOLAR_GAIN_GOS_POLARIZER_ANGLE: str = "0.0"
|
|
76
|
+
SOLAR_GAIN_GOS_RETARDER_STATUS: str = "clear"
|
|
77
|
+
SOLAR_GAIN_GOS_RETARDER_ANGLE: str = "0.0"
|
|
78
|
+
SOLAR_GAIN_GOS_LEVEL0_STATUS: str = "DarkShutter"
|
|
79
|
+
# Polcal
|
|
80
|
+
POLCAL_OBSERVING_PROGRAM_EXECUTION_IDS: tuple = ("OP1", "OP2", "OP3")
|
|
81
|
+
POLCAL_DATE_BEGIN: str = "2023-01-01T00:00:00"
|
|
82
|
+
POLCAL_DATE_END: str = "2023-01-01T01:00:00"
|
|
83
|
+
POLCAL_NUM_RAW_FRAMES_PER_FPA: int = 1
|
|
84
|
+
POLCAL_TELESCOPE_TRACKING_MODE: str = "None"
|
|
85
|
+
POLCAL_COUDE_TABLE_TRACKING_MODE: str = "fixed coude table angle"
|
|
86
|
+
POLCAL_TELESCOPE_SCANNING_MODE: str = "None"
|
|
87
|
+
POLCAL_AVERAGE_LIGHT_LEVEL: float = 5.0
|
|
88
|
+
POLCAL_AVERAGE_TELESCOPE_ELEVATION: float = 45.0
|
|
89
|
+
POLCAL_AVERAGE_COUDE_TABLE_ANGLE: float = 2.0
|
|
90
|
+
POLCAL_AVERAGE_TELESCOPE_AZIMUTH: float = 180.0
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class ConstructDatasetExtrasTest(WriteL1DatasetExtras):
|
|
94
|
+
def run(self):
|
|
95
|
+
# Make a dataset extra for each task type
|
|
96
|
+
|
|
97
|
+
for task_type in [
|
|
98
|
+
TaskName.dark,
|
|
99
|
+
TaskName.solar_gain,
|
|
100
|
+
]:
|
|
101
|
+
filename = self.format_extra_filename(task_type, detail="BEAM1")
|
|
102
|
+
data = next(
|
|
103
|
+
self.read(
|
|
104
|
+
tags=[Tag.task(task_type), Tag.intermediate()], decoder=fits_array_decoder
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
header = self.build_dataset_extra_header(
|
|
108
|
+
sections=[
|
|
109
|
+
DatasetExtraHeaderSection.common,
|
|
110
|
+
DatasetExtraHeaderSection.aggregate,
|
|
111
|
+
DatasetExtraHeaderSection.iptask,
|
|
112
|
+
DatasetExtraHeaderSection.gos,
|
|
113
|
+
],
|
|
114
|
+
filename=filename,
|
|
115
|
+
task_type=task_type,
|
|
116
|
+
total_exposure=0.058,
|
|
117
|
+
readout_exposure=0.029,
|
|
118
|
+
extra_name=(
|
|
119
|
+
DatasetExtraType.dark if task_type == "DARK" else DatasetExtraType.solar_gain
|
|
120
|
+
),
|
|
121
|
+
end_time="2025-01-01T00:00:00",
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
self.assemble_and_write_dataset_extra(data=data, header=header, filename=filename)
|
|
125
|
+
|
|
126
|
+
task_type = TaskName.polcal
|
|
127
|
+
filename = self.format_extra_filename(task_type, detail="BEAM1")
|
|
128
|
+
data = next(
|
|
129
|
+
self.read(tags=[Tag.task(task_type), Tag.intermediate()], decoder=fits_array_decoder)
|
|
130
|
+
)
|
|
131
|
+
header = self.build_dataset_extra_header(
|
|
132
|
+
sections=[
|
|
133
|
+
DatasetExtraHeaderSection.common,
|
|
134
|
+
DatasetExtraHeaderSection.aggregate,
|
|
135
|
+
DatasetExtraHeaderSection.iptask,
|
|
136
|
+
DatasetExtraHeaderSection.gos,
|
|
137
|
+
],
|
|
138
|
+
filename=filename,
|
|
139
|
+
task_type=task_type,
|
|
140
|
+
total_exposure=0.058,
|
|
141
|
+
readout_exposure=0.029,
|
|
142
|
+
extra_name=DatasetExtraType.demodulation_matrices,
|
|
143
|
+
end_time="2025-01-01T00:00:00",
|
|
144
|
+
)
|
|
145
|
+
self.assemble_and_write_dataset_extra(data=data, header=header, filename=filename)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
@pytest.fixture()
|
|
149
|
+
def construct_dataset_extras_task(request, recipe_run_id, tmp_path):
|
|
150
|
+
with ConstructDatasetExtrasTest(
|
|
151
|
+
recipe_run_id=recipe_run_id,
|
|
152
|
+
workflow_name="workflow_name",
|
|
153
|
+
workflow_version="workflow_version",
|
|
154
|
+
) as task:
|
|
155
|
+
task.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, scratch_base_path=tmp_path)
|
|
156
|
+
# Write an intermediate product to be used as the source for each dataset extra
|
|
157
|
+
for task_type in [
|
|
158
|
+
TaskName.dark,
|
|
159
|
+
TaskName.solar_gain,
|
|
160
|
+
TaskName.polcal,
|
|
161
|
+
]:
|
|
162
|
+
task.write(
|
|
163
|
+
data=np.random.random(size=(1, 128, 128)),
|
|
164
|
+
tags=[Tag.task(task_type), Tag.intermediate()],
|
|
165
|
+
encoder=fits_array_encoder,
|
|
166
|
+
)
|
|
167
|
+
task.constants._update(asdict(FakeConstantDb()))
|
|
168
|
+
yield task
|
|
169
|
+
task._purge()
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@pytest.fixture
|
|
173
|
+
def fake_gql_client_default_configuration():
|
|
174
|
+
"""Create GraphQL client Mock that returns result without recipe run configuration."""
|
|
175
|
+
recipe_run_response = make_default_recipe_run_response()
|
|
176
|
+
recipe_run_response.configuration = None
|
|
177
|
+
new_response_mapping = RecipeRunResponseMapping(response=recipe_run_response)
|
|
178
|
+
FakeGQLClientDefaultConfiguration = fake_gql_client_factory(
|
|
179
|
+
response_mapping_override=new_response_mapping
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
return FakeGQLClientDefaultConfiguration
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def test_construct_dataset_extras(
|
|
186
|
+
construct_dataset_extras_task, mocker, fake_gql_client_default_configuration
|
|
187
|
+
):
|
|
188
|
+
"""
|
|
189
|
+
Given: A ConstructDatasetExtras task with source data
|
|
190
|
+
When: Running the ConstructDatasetExtras task
|
|
191
|
+
Then: A dataset extra files are produced with expected header values
|
|
192
|
+
"""
|
|
193
|
+
mocker.patch(
|
|
194
|
+
"dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
|
|
195
|
+
new=fake_gql_client_default_configuration,
|
|
196
|
+
)
|
|
197
|
+
task = construct_dataset_extras_task
|
|
198
|
+
task()
|
|
199
|
+
dataset_extra_files = list(task.read(tags=[Tag.output(), Tag.extra()]))
|
|
200
|
+
assert len(dataset_extra_files) == 3
|
|
201
|
+
for filename in dataset_extra_files:
|
|
202
|
+
split_filename = Path(filename).name.split("_")
|
|
203
|
+
assert split_filename[0] == "VBI"
|
|
204
|
+
assert split_filename[1] == task.constants.dataset_id
|
|
205
|
+
assert split_filename[-2] == "BEAM1"
|
|
206
|
+
assert split_filename[-1] == "1.fits"
|
|
207
|
+
hdul = fits.open(filename)
|
|
208
|
+
for i in range(1, len(hdul)):
|
|
209
|
+
assert isinstance(hdul[i], fits.CompImageHDU)
|
|
210
|
+
header = hdul[i].header
|
|
211
|
+
assert header["LINEWAV"] == 854.2
|
|
212
|
+
assert header["INSTRUME"] == "VBI"
|
|
213
|
+
assert header["ATAZIMUT"] == 180.0
|
|
214
|
+
assert header["FRAMEVOL"] is not None
|
|
215
|
+
assert header["IDSOBSID"] == 2
|
|
216
|
+
assert header["XPOSURE"] == 0.058
|
|
217
|
+
assert header["OBSPR_ID"] == "OP1"
|
|
218
|
+
assert header["EXTOBSID"] == "OP2,OP3"
|
|
219
|
+
assert header["EXTNAME"] in ["DARK", "SOLAR GAIN", "DEMODULATION MATRICES"]
|
|
220
|
+
if header["IPTASK"] == "POLCAL":
|
|
221
|
+
assert "POLANGLE" not in header
|
|
222
|
+
else:
|
|
223
|
+
assert header.get("POLANGLE") == "0.0"
|
|
224
|
+
assert header.get("RETANGLE") == "0.0"
|
|
@@ -4,11 +4,13 @@ from typing import Hashable
|
|
|
4
4
|
import pytest
|
|
5
5
|
|
|
6
6
|
from dkist_processing_common.models.flower_pot import FlowerPot
|
|
7
|
+
from dkist_processing_common.models.flower_pot import ListStem
|
|
8
|
+
from dkist_processing_common.models.flower_pot import SetStem
|
|
7
9
|
from dkist_processing_common.models.flower_pot import SpilledDirt
|
|
8
10
|
from dkist_processing_common.models.flower_pot import Stem
|
|
9
11
|
|
|
10
12
|
|
|
11
|
-
@pytest.fixture
|
|
13
|
+
@pytest.fixture
|
|
12
14
|
def simple_flower():
|
|
13
15
|
class Flower(Stem):
|
|
14
16
|
def setter(self, value: Any) -> Any:
|
|
@@ -22,7 +24,7 @@ def simple_flower():
|
|
|
22
24
|
return Flower(stem_name="simple_flower")
|
|
23
25
|
|
|
24
26
|
|
|
25
|
-
@pytest.fixture
|
|
27
|
+
@pytest.fixture
|
|
26
28
|
def simple_flower_pot(simple_flower):
|
|
27
29
|
flower_pot = FlowerPot()
|
|
28
30
|
flower_pot.stems += [simple_flower]
|
|
@@ -30,16 +32,66 @@ def simple_flower_pot(simple_flower):
|
|
|
30
32
|
return flower_pot
|
|
31
33
|
|
|
32
34
|
|
|
33
|
-
@pytest.fixture
|
|
35
|
+
@pytest.fixture
|
|
34
36
|
def simple_key_values():
|
|
35
37
|
return {f"thing{i}": i for i in range(5)}
|
|
36
38
|
|
|
37
39
|
|
|
40
|
+
@pytest.fixture
|
|
41
|
+
def stem_bud():
|
|
42
|
+
class Bud(Stem):
|
|
43
|
+
def setter(self, value: int) -> int:
|
|
44
|
+
return value % 3
|
|
45
|
+
|
|
46
|
+
def getter(self, key: str) -> int:
|
|
47
|
+
return len(set(self.key_to_petal_dict.values()))
|
|
48
|
+
|
|
49
|
+
return Bud(stem_name="StemBud")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@pytest.fixture
|
|
53
|
+
def setstem_bud():
|
|
54
|
+
# Computes the same result as `stem_bud`
|
|
55
|
+
class SetBud(SetStem):
|
|
56
|
+
def setter(self, value: int) -> int:
|
|
57
|
+
return value % 3
|
|
58
|
+
|
|
59
|
+
def getter(self) -> int:
|
|
60
|
+
return len(self.value_set)
|
|
61
|
+
|
|
62
|
+
return SetBud(stem_name="SetStemBud")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@pytest.fixture
|
|
66
|
+
def liststem_bud():
|
|
67
|
+
# Highlights the difference between using a `set` and a `list` in these more efficient buds
|
|
68
|
+
class ListBud(ListStem):
|
|
69
|
+
def setter(self, value: int) -> int:
|
|
70
|
+
return value % 3
|
|
71
|
+
|
|
72
|
+
def getter(self) -> int:
|
|
73
|
+
return len(self.value_list)
|
|
74
|
+
|
|
75
|
+
return ListBud(stem_name="ListStemBud")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@pytest.fixture
|
|
79
|
+
def simple_bud_pot(stem_bud, setstem_bud, liststem_bud):
|
|
80
|
+
bud_pot = FlowerPot()
|
|
81
|
+
bud_pot.stems += [stem_bud, liststem_bud, setstem_bud]
|
|
82
|
+
return bud_pot
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@pytest.fixture
|
|
86
|
+
def bud_key_values():
|
|
87
|
+
return {f"DOESN'T_MATTER_THAT'S_THE_POINT_{i}": i for i in [0, 1, 3, 4]}
|
|
88
|
+
|
|
89
|
+
|
|
38
90
|
def test_simple_flower_pot(simple_flower_pot, simple_key_values):
|
|
39
91
|
"""
|
|
40
92
|
Given: A FlowerPot with a simple Flower
|
|
41
|
-
When: Updating
|
|
42
|
-
Then: The
|
|
93
|
+
When: Updating flower with key: value pairs
|
|
94
|
+
Then: The flower are correctly updated
|
|
43
95
|
"""
|
|
44
96
|
assert len(simple_flower_pot) == 1
|
|
45
97
|
|
|
@@ -51,6 +103,8 @@ def test_simple_flower_pot(simple_flower_pot, simple_key_values):
|
|
|
51
103
|
|
|
52
104
|
petals = sorted(list(flower.petals), key=lambda x: x.value)
|
|
53
105
|
assert len(petals) == 2
|
|
106
|
+
assert flower.bud.value == petals[0].value
|
|
107
|
+
assert flower.bud.keys == petals[0].keys
|
|
54
108
|
assert petals[0].value == 0
|
|
55
109
|
assert petals[0].keys == ["thing0", "thing2", "thing4"]
|
|
56
110
|
assert petals[1].value == 1
|
|
@@ -67,8 +121,12 @@ def test_cached_petal(simple_flower):
|
|
|
67
121
|
value1 = 4
|
|
68
122
|
simple_flower.update(key1, value1)
|
|
69
123
|
assert len(simple_flower.petals) == 1
|
|
124
|
+
|
|
125
|
+
# Assert twice to hit the cache
|
|
70
126
|
assert simple_flower.petals[0].value == value1 % 2 # % 2 because of simple_flower's `setter`
|
|
71
127
|
assert simple_flower.petals[0].keys == [key1]
|
|
128
|
+
assert simple_flower.petals[0].value == value1 % 2
|
|
129
|
+
assert simple_flower.petals[0].keys == [key1]
|
|
72
130
|
|
|
73
131
|
key2 = "thing2"
|
|
74
132
|
value2 = 3
|
|
@@ -79,6 +137,10 @@ def test_cached_petal(simple_flower):
|
|
|
79
137
|
assert sorted_petals[0].keys == [key1]
|
|
80
138
|
assert sorted_petals[1].value == value2 % 2
|
|
81
139
|
assert sorted_petals[1].keys == [key2]
|
|
140
|
+
assert sorted_petals[0].value == value1 % 2
|
|
141
|
+
assert sorted_petals[0].keys == [key1]
|
|
142
|
+
assert sorted_petals[1].value == value2 % 2
|
|
143
|
+
assert sorted_petals[1].keys == [key2]
|
|
82
144
|
|
|
83
145
|
|
|
84
146
|
def test_spilled_dirt_flower(simple_flower):
|
|
@@ -103,3 +165,83 @@ def test_unhashable_dirt(simple_flower_pot):
|
|
|
103
165
|
value = "never gonna get here"
|
|
104
166
|
with pytest.raises(TypeError):
|
|
105
167
|
simple_flower_pot.add_dirt(unhashable_key, value)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def test_buds(simple_bud_pot, bud_key_values):
|
|
171
|
+
"""
|
|
172
|
+
Given: A Flower pot with two Buds that compute the same thing; one a `Stem` and one a `SetStem`, and a `ListStem` bud
|
|
173
|
+
When: Updating the pot with key: value pairs
|
|
174
|
+
Then: The computed buds are correct and the `Stem` and `SetStem` buds match
|
|
175
|
+
"""
|
|
176
|
+
assert len(simple_bud_pot) == 3
|
|
177
|
+
|
|
178
|
+
assert simple_bud_pot[0].stem_name == "StemBud"
|
|
179
|
+
assert simple_bud_pot[1].stem_name == "ListStemBud"
|
|
180
|
+
assert simple_bud_pot[2].stem_name == "SetStemBud"
|
|
181
|
+
|
|
182
|
+
for k, m in bud_key_values.items():
|
|
183
|
+
simple_bud_pot.add_dirt(k, m)
|
|
184
|
+
|
|
185
|
+
assert simple_bud_pot[0].bud.value == 2
|
|
186
|
+
assert simple_bud_pot[1].bud.value == 4
|
|
187
|
+
assert simple_bud_pot[2].bud.value == simple_bud_pot[0].bud.value
|
|
188
|
+
|
|
189
|
+
assert len(simple_bud_pot[0].petals) == 1
|
|
190
|
+
assert simple_bud_pot[0].petals[0].value == 2
|
|
191
|
+
assert simple_bud_pot[0].petals[0].keys == [
|
|
192
|
+
f"DOESN'T_MATTER_THAT'S_THE_POINT_{i}" for i in [0, 1, 3, 4]
|
|
193
|
+
]
|
|
194
|
+
|
|
195
|
+
with pytest.raises(
|
|
196
|
+
AttributeError,
|
|
197
|
+
match="ListBud subclasses ListStem and therefore does not define the `petals` property",
|
|
198
|
+
):
|
|
199
|
+
_ = simple_bud_pot[1].petals
|
|
200
|
+
|
|
201
|
+
with pytest.raises(
|
|
202
|
+
AttributeError,
|
|
203
|
+
match="SetBud subclasses SetStem and therefore does not define the `petals` property",
|
|
204
|
+
):
|
|
205
|
+
_ = simple_bud_pot[2].petals
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def test_liststem_cached_bud(liststem_bud):
|
|
209
|
+
"""
|
|
210
|
+
Given: A `ListStem` instance and some different input values
|
|
211
|
+
When: Computing the `bud` property after each value is ingested
|
|
212
|
+
Then: The `bud` value correctly updates based on the state of the `ListStem` object
|
|
213
|
+
"""
|
|
214
|
+
key = "Who cares"
|
|
215
|
+
value1 = 3
|
|
216
|
+
liststem_bud.update(key, value1)
|
|
217
|
+
|
|
218
|
+
# Assert twice so we hit the cache
|
|
219
|
+
assert liststem_bud.bud.value == 1
|
|
220
|
+
assert liststem_bud.bud.value == 1
|
|
221
|
+
|
|
222
|
+
value2 = 1
|
|
223
|
+
liststem_bud.update(key, value2)
|
|
224
|
+
|
|
225
|
+
assert liststem_bud.bud.value == 2
|
|
226
|
+
assert liststem_bud.bud.value == 2
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def test_setstem_cached_bud(setstem_bud):
|
|
230
|
+
"""
|
|
231
|
+
Given: A `SetStem` instance and some different input values
|
|
232
|
+
When: Computing the `bud` property after each value is ingested
|
|
233
|
+
Then: The `bud` value correctly updates based on the state of the `SetStem` object
|
|
234
|
+
"""
|
|
235
|
+
key = "Who cares"
|
|
236
|
+
value1 = 3
|
|
237
|
+
setstem_bud.update(key, value1)
|
|
238
|
+
|
|
239
|
+
# Assert twice so we hit the cache
|
|
240
|
+
assert setstem_bud.bud.value == 1
|
|
241
|
+
assert setstem_bud.bud.value == 1
|
|
242
|
+
|
|
243
|
+
value2 = 1
|
|
244
|
+
setstem_bud.update(key, value2)
|
|
245
|
+
|
|
246
|
+
assert setstem_bud.bud.value == 2
|
|
247
|
+
assert setstem_bud.bud.value == 2
|
|
@@ -57,7 +57,11 @@ def transfer_data_task(recipe_run_id, tmp_path, mocker, fake_gql_client):
|
|
|
57
57
|
unwanted_file_obj = uuid4().hex.encode("utf8")
|
|
58
58
|
task.write(unwanted_file_obj, tags=[Tag.frame()])
|
|
59
59
|
|
|
60
|
-
|
|
60
|
+
# Write a dataset extra
|
|
61
|
+
extra_file_obj = uuid4().hex.encode("utf8")
|
|
62
|
+
task.write(extra_file_obj, tags=[Tag.output(), Tag.extra()])
|
|
63
|
+
|
|
64
|
+
yield task, output_file_obj, extra_file_obj
|
|
61
65
|
task._purge()
|
|
62
66
|
|
|
63
67
|
|
|
@@ -81,7 +85,7 @@ def test_build_output_frame_transfer_list(transfer_data_task):
|
|
|
81
85
|
When: Building a transfer list of all OUTPUT frames
|
|
82
86
|
Then: All OUTPUT frames are listed and no non-OUTPUT frames are listed
|
|
83
87
|
"""
|
|
84
|
-
task, output_file_obj = transfer_data_task
|
|
88
|
+
task, output_file_obj, _ = transfer_data_task
|
|
85
89
|
|
|
86
90
|
transfer_list = task.build_output_frame_transfer_list()
|
|
87
91
|
|
|
@@ -89,3 +93,21 @@ def test_build_output_frame_transfer_list(transfer_data_task):
|
|
|
89
93
|
transfer_item = transfer_list[0]
|
|
90
94
|
with transfer_item.source_path.open(mode="rb") as f:
|
|
91
95
|
assert output_file_obj == f.read()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_build_dataset_extra_transfer_list(transfer_data_task):
|
|
99
|
+
"""
|
|
100
|
+
Given: A task based on TransferDataBase with some files, some of which are EXTRA_OUTPUT
|
|
101
|
+
When: Building a transfer list of all EXTRA_OUTPUT frames
|
|
102
|
+
Then: All EXTRA_OUTPUT frames are listed and no non-EXTRA_OUTPUT frames are listed
|
|
103
|
+
"""
|
|
104
|
+
task, _, extra_file_obj = transfer_data_task
|
|
105
|
+
|
|
106
|
+
transfer_list = task.build_dataset_extra_transfer_list()
|
|
107
|
+
|
|
108
|
+
assert len(transfer_list) == 1
|
|
109
|
+
transfer_item = transfer_list[0]
|
|
110
|
+
assert "/extra/" not in str(transfer_item.source_path)
|
|
111
|
+
assert "/extra/" in str(transfer_item.destination_path)
|
|
112
|
+
with transfer_item.source_path.open(mode="rb") as f:
|
|
113
|
+
assert extra_file_obj == f.read()
|
|
@@ -12,6 +12,8 @@ from dkist_processing_common._util.scratch import WorkflowFileSystem
|
|
|
12
12
|
from dkist_processing_common.codecs.fits import fits_hdulist_encoder
|
|
13
13
|
from dkist_processing_common.models.constants import BudName
|
|
14
14
|
from dkist_processing_common.models.fits_access import FitsAccessBase
|
|
15
|
+
from dkist_processing_common.models.flower_pot import ListStem
|
|
16
|
+
from dkist_processing_common.models.flower_pot import SetStem
|
|
15
17
|
from dkist_processing_common.models.flower_pot import SpilledDirt
|
|
16
18
|
from dkist_processing_common.models.flower_pot import Stem
|
|
17
19
|
from dkist_processing_common.models.flower_pot import Thorn
|
|
@@ -156,7 +158,27 @@ def empty_buds():
|
|
|
156
158
|
def getter(self, key):
|
|
157
159
|
pass # We'll never get here because we spilled the dirt
|
|
158
160
|
|
|
159
|
-
|
|
161
|
+
class EmptyListBud(ListStem):
|
|
162
|
+
def __init__(self):
|
|
163
|
+
super().__init__(stem_name="EMPTY_LIST_BUD")
|
|
164
|
+
|
|
165
|
+
def setter(self, value):
|
|
166
|
+
return SpilledDirt
|
|
167
|
+
|
|
168
|
+
def getter(self):
|
|
169
|
+
pass
|
|
170
|
+
|
|
171
|
+
class EmptySetBud(SetStem):
|
|
172
|
+
def __init__(self):
|
|
173
|
+
super().__init__(stem_name="EMPTY_SET_BUD")
|
|
174
|
+
|
|
175
|
+
def setter(self, value):
|
|
176
|
+
return SpilledDirt
|
|
177
|
+
|
|
178
|
+
def getter(self):
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
return [EmptyBud(), EmptyListBud(), EmptySetBud()]
|
|
160
182
|
|
|
161
183
|
|
|
162
184
|
@pytest.fixture()
|
|
@@ -266,13 +288,15 @@ def test_make_flowerpots(parse_inputs_task):
|
|
|
266
288
|
tag_pot, constant_pot = parse_inputs_task.make_flower_pots()
|
|
267
289
|
|
|
268
290
|
assert len(tag_pot.stems) == 2
|
|
269
|
-
assert len(constant_pot.stems) ==
|
|
291
|
+
assert len(constant_pot.stems) == 6
|
|
270
292
|
assert tag_pot.stems[0].stem_name == StemName.modstate
|
|
271
293
|
assert tag_pot.stems[1].stem_name == "EMPTY_FLOWER"
|
|
272
294
|
assert constant_pot.stems[0].stem_name == BudName.num_modstates
|
|
273
295
|
assert constant_pot.stems[1].stem_name == "LOOKUP_BUD"
|
|
274
296
|
assert constant_pot.stems[2].stem_name == "EMPTY_BUD"
|
|
275
|
-
assert constant_pot.stems[3].stem_name == "
|
|
297
|
+
assert constant_pot.stems[3].stem_name == "EMPTY_LIST_BUD"
|
|
298
|
+
assert constant_pot.stems[4].stem_name == "EMPTY_SET_BUD"
|
|
299
|
+
assert constant_pot.stems[5].stem_name == "PICKY_BUD"
|
|
276
300
|
|
|
277
301
|
|
|
278
302
|
def test_subclass_flowers(visp_parse_inputs_task, max_cs_step_time_sec):
|
|
@@ -336,7 +360,7 @@ def test_constants_correct(parse_inputs_task):
|
|
|
336
360
|
parse_inputs_task.update_constants(constant_pot)
|
|
337
361
|
assert dict(parse_inputs_task.constants._db_dict) == {
|
|
338
362
|
BudName.num_modstates.value: parse_inputs_task._num_mod,
|
|
339
|
-
"LOOKUP_BUD":
|
|
363
|
+
"LOOKUP_BUD": {str(parse_inputs_task._num_mod): [0, 1]},
|
|
340
364
|
}
|
|
341
365
|
|
|
342
366
|
|