dkist-processing-common 11.7.0rc6__py3-none-any.whl → 11.8.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ Add parameters and their associated values relevant to a particular processing pipeline run to the metadata ASDF file generated in trial workflows.
@@ -5,15 +5,18 @@ import logging
5
5
  from datetime import datetime
6
6
  from itertools import chain
7
7
  from pathlib import Path
8
+ from typing import Any
8
9
  from typing import Generator
9
10
  from uuid import uuid4
10
11
 
11
12
  from dkist_processing_common.codecs.asdf import asdf_fileobj_encoder
13
+ from dkist_processing_common.codecs.basemodel import basemodel_decoder
12
14
  from dkist_processing_common.codecs.fits import fits_access_decoder
13
15
  from dkist_processing_common.codecs.json import json_encoder
14
16
  from dkist_processing_common.codecs.path import path_decoder
15
17
  from dkist_processing_common.codecs.quality import quality_data_decoder
16
18
  from dkist_processing_common.models.fits_access import FitsAccessBase
19
+ from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
17
20
  from dkist_processing_common.models.tags import Tag
18
21
  from dkist_processing_common.tasks.output_data_base import OutputDataBase
19
22
 
@@ -138,11 +141,15 @@ class CreateTrialAsdf(OutputDataBase):
138
141
 
139
142
  def run(self) -> None:
140
143
  """Generate an ASDF file simulating the ASDF file that would be produced when cataloging the dataset."""
144
+ with self.telemetry_span("Collate input dataset parameters"):
145
+ parameters = self.parse_input_dataset_parameters()
146
+
141
147
  with self.telemetry_span("Generate ASDF tree"):
142
148
  tree = asdf_tree_from_filenames(
143
149
  filenames=self.absolute_output_frame_paths,
144
150
  hdu=1, # compressed
145
151
  relative_to=self.scratch.workflow_base_path,
152
+ parameters=parameters,
146
153
  )
147
154
 
148
155
  trial_history = [
@@ -169,6 +176,37 @@ class CreateTrialAsdf(OutputDataBase):
169
176
  ),
170
177
  )
171
178
 
179
+ def parse_input_dataset_parameters(self) -> list[dict[str, Any]]:
180
+ """
181
+ Return the parameters associated with the dataset.
182
+
183
+ Returns
184
+ -------
185
+ list[dict[str, Any]]
186
+ A list of dictionaries, each containing a parameter name and its values.
187
+
188
+ Raises
189
+ ------
190
+ ValueError
191
+ If there is not exactly one ``InputDatasetPartDocumentList`` found.
192
+ """
193
+ part_docs_iter = self.read(
194
+ tags=Tag.input_dataset_parameters(),
195
+ decoder=basemodel_decoder,
196
+ model=InputDatasetPartDocumentList,
197
+ )
198
+ docs = list(part_docs_iter)
199
+
200
+ if not docs:
201
+ logger.warning("No parameter list decoded from files")
202
+ return []
203
+
204
+ if len(docs) > 1:
205
+ raise ValueError(f"Expected 1 parameter list, found {len(docs)}")
206
+
207
+ parameters = docs[0].model_dump(by_alias=True).get("doc_list", [])
208
+ return parameters
209
+
172
210
 
173
211
  class CreateTrialQualityReport(OutputDataBase):
174
212
  """
@@ -6,6 +6,7 @@ import json
6
6
  from abc import ABC
7
7
  from abc import abstractmethod
8
8
  from datetime import datetime
9
+ from datetime import timedelta
9
10
  from pathlib import Path
10
11
  from uuid import uuid4
11
12
 
@@ -234,3 +235,41 @@ def fake_gql_client():
234
235
  Convenience fixture for default mock GQL client. To customize, use fake_gql_client_factory.
235
236
  """
236
237
  return fake_gql_client_factory()
238
+
239
+
240
+ def input_dataset_parameters_part_factory(
241
+ parameter_count: int = 1,
242
+ parameter_value_count: int = 1,
243
+ has_date: bool = False,
244
+ has_file: bool = False,
245
+ ) -> list[dict]:
246
+ """Create a mock InputDatasetPartDocumentList with parameters."""
247
+ result = [
248
+ {
249
+ "parameterName": uuid4().hex[:6],
250
+ "parameterValues": [
251
+ {"parameterValueId": i, "parameterValue": json.dumps(uuid4().hex)}
252
+ for i in range(parameter_value_count)
253
+ ],
254
+ }
255
+ for _ in range(parameter_count)
256
+ ]
257
+ if has_date:
258
+ base = datetime(2018, 9, 14, 0, 0, 0) # This date is before any possible start dates
259
+ for parameter_index, data in enumerate(result):
260
+ for item in data["parameterValues"]:
261
+ dt = base + timedelta(days=parameter_index)
262
+ item["parameterValueStartDate"] = dt.isoformat()
263
+ if has_file:
264
+ for data in result:
265
+ param_list = data["parameterValues"]
266
+ for item in param_list:
267
+ item["parameterValue"] = json.dumps(
268
+ {
269
+ "__file__": {
270
+ "bucket": "data",
271
+ "objectKey": f"parameters/{data['parameterName']}/{uuid4().hex}.dat",
272
+ }
273
+ }
274
+ )
275
+ return result
@@ -1,5 +1,4 @@
1
1
  import json
2
- from datetime import datetime
3
2
  from typing import Any
4
3
  from uuid import uuid4
5
4
 
@@ -8,6 +7,7 @@ import pytest
8
7
  from dkist_processing_common.codecs.basemodel import basemodel_decoder
9
8
  from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
10
9
  from dkist_processing_common.models.tags import Tag
10
+ from dkist_processing_common.tests.mock_metadata_store import input_dataset_parameters_part_factory
11
11
 
12
12
 
13
13
  def input_dataset_frames_part_factory(bucket_count: int = 1) -> list[dict]:
@@ -25,42 +25,6 @@ def flatten_frame_parts(frame_parts: list[dict]) -> list[tuple[str, str]]:
25
25
  return result
26
26
 
27
27
 
28
- def input_dataset_parameters_part_factory(
29
- parameter_count: int = 1,
30
- parameter_value_count: int = 1,
31
- has_date: bool = False,
32
- has_file: bool = False,
33
- ) -> list[dict]:
34
- result = [
35
- {
36
- "parameterName": uuid4().hex[:6],
37
- "parameterValues": [
38
- {"parameterValueId": i, "parameterValue": json.dumps(uuid4().hex)}
39
- for i in range(parameter_value_count)
40
- ],
41
- }
42
- for _ in range(parameter_count)
43
- ]
44
- if has_date:
45
- for data in result:
46
- param_list = data["parameterValues"]
47
- for item in param_list:
48
- item["parameterValueStartDate"] = datetime(2022, 9, 14).isoformat()
49
- if has_file:
50
- for data in result:
51
- param_list = data["parameterValues"]
52
- for item in param_list:
53
- item["parameterValue"] = json.dumps(
54
- {
55
- "__file__": {
56
- "bucket": "data",
57
- "objectKey": f"parameters/{data['parameterName']}/{uuid4().hex}.dat",
58
- }
59
- }
60
- )
61
- return result
62
-
63
-
64
28
  @pytest.mark.parametrize(
65
29
  "input_dataset_parts",
66
30
  [
@@ -12,14 +12,30 @@ from sqids import Sqids
12
12
 
13
13
  from dkist_processing_common._util.scratch import WorkflowFileSystem
14
14
  from dkist_processing_common.codecs.asdf import asdf_decoder
15
+ from dkist_processing_common.codecs.basemodel import basemodel_encoder
15
16
  from dkist_processing_common.codecs.bytes import bytes_decoder
16
17
  from dkist_processing_common.codecs.fits import fits_hdulist_encoder
17
18
  from dkist_processing_common.codecs.json import json_decoder
18
19
  from dkist_processing_common.codecs.quality import quality_data_encoder
20
+ from dkist_processing_common.models.input_dataset import InputDatasetParameter
21
+ from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
19
22
  from dkist_processing_common.models.tags import Tag
20
23
  from dkist_processing_common.tasks import CreateTrialAsdf
21
24
  from dkist_processing_common.tasks import CreateTrialDatasetInventory
22
25
  from dkist_processing_common.tasks import CreateTrialQualityReport
26
+ from dkist_processing_common.tests.mock_metadata_store import input_dataset_parameters_part_factory
27
+
28
+
29
+ @pytest.fixture()
30
+ def mock_input_dataset_parts() -> InputDatasetPartDocumentList:
31
+ """An InputDatasetPartDocumentList with two parameters, each with one value and a date."""
32
+ raw = input_dataset_parameters_part_factory(
33
+ parameter_count=2,
34
+ parameter_value_count=1,
35
+ has_date=True,
36
+ has_file=False,
37
+ )
38
+ return InputDatasetPartDocumentList.model_validate({"doc_list": raw})
23
39
 
24
40
 
25
41
  @pytest.fixture()
@@ -41,6 +57,24 @@ def scratch_with_l1_frames(recipe_run_id, tmp_path) -> WorkflowFileSystem:
41
57
  scratch.write(
42
58
  file_obj, tags=[Tag.output(), Tag.frame()], relative_path=f"{uuid4().hex}.dat"
43
59
  )
60
+
61
+ return scratch
62
+
63
+
64
+ @pytest.fixture()
65
+ def scratch_with_l1_frames_and_parameters(
66
+ scratch_with_l1_frames, mock_input_dataset_parts
67
+ ) -> WorkflowFileSystem:
68
+ """Scratch instance for a recipe run id with tagged L1 frames and input parameters."""
69
+ scratch = scratch_with_l1_frames
70
+
71
+ # Write validated Pydantic model bytes expected by InputDatasetPartDocumentList
72
+ file_obj = basemodel_encoder(mock_input_dataset_parts)
73
+ scratch.write(
74
+ file_obj,
75
+ tags=Tag.input_dataset_parameters(),
76
+ relative_path=f"{uuid4().hex}.json",
77
+ )
44
78
  return scratch
45
79
 
46
80
 
@@ -85,6 +119,22 @@ def create_trial_asdf_task(
85
119
  task._purge()
86
120
 
87
121
 
122
+ @pytest.fixture(scope="function")
123
+ def create_trial_asdf_task_with_params(
124
+ recipe_run_id, tmp_path, scratch_with_l1_frames_and_parameters, fake_constants_db
125
+ ) -> CreateTrialAsdf:
126
+ """An instance of CreateTrialAsdf with L1 frames and input parameters tagged in scratch."""
127
+ task = CreateTrialAsdf(
128
+ recipe_run_id=recipe_run_id,
129
+ workflow_name="trial_asdf",
130
+ workflow_version="trial_asdf_version",
131
+ )
132
+ task.scratch = scratch_with_l1_frames_and_parameters
133
+ task.constants._update(fake_constants_db)
134
+ yield task
135
+ task._purge()
136
+
137
+
88
138
  @pytest.fixture()
89
139
  def create_trial_quality_report_task(
90
140
  recipe_run_id, tmp_path, fake_constants_db
@@ -143,25 +193,32 @@ def test_create_trial_dataset_inventory(create_trial_dataset_inventory_task):
143
193
  assert len(inventory) > 20 # a bunch
144
194
 
145
195
 
146
- def test_create_trial_asdf(create_trial_asdf_task, recipe_run_id):
196
+ @pytest.mark.parametrize("with_params", [False, True], ids=["no_params", "with_params"])
197
+ def test_create_trial_asdf(with_params, request, recipe_run_id, mock_input_dataset_parts):
147
198
  """
148
199
  :Given: An instance of CreateTrialAsdf with L1 frames tagged in scratch
149
200
  :When: CreateTrialAsdf is run
150
201
  :Then: An asdf file for the dataset is tagged in scratch
151
202
  """
152
- task = create_trial_asdf_task
203
+ task = request.getfixturevalue(
204
+ "create_trial_asdf_task_with_params" if with_params else "create_trial_asdf_task"
205
+ )
153
206
  # When
154
207
  task()
208
+
155
209
  # Then
156
210
  asdf_tags = [Tag.output(), Tag.asdf()]
157
211
  filepaths = list(task.scratch.find_all(tags=asdf_tags))
158
212
  assert len(filepaths) == 1
159
213
  dataset_id = Sqids(min_length=6, alphabet=ascii_uppercase).encode([recipe_run_id])
160
214
  assert filepaths[0].name == f"INSTRUMENT_L1_20240416T160000_{dataset_id}_metadata.asdf"
215
+
161
216
  results = list(task.read(tags=asdf_tags, decoder=asdf_decoder))
162
217
  assert len(results) == 1
218
+
163
219
  tree = results[0]
164
220
  assert isinstance(tree, dict)
221
+
165
222
  for file_name in tree["dataset"].files.filenames:
166
223
  # This is a slightly better than check that `not Path(file_name).is_absolute()` because it confirms
167
224
  # we've correctly stripped the path of *all* parents (not just those that start at root).
@@ -169,6 +226,19 @@ def test_create_trial_asdf(create_trial_asdf_task, recipe_run_id):
169
226
  # `scratch.workflow_base_path`
170
227
  assert Path(file_name).name == file_name
171
228
 
229
+ # Only check parameters when present
230
+ ds = tree["dataset"]
231
+ assert "parameters" in ds.meta
232
+ parameters = ds.meta["parameters"]
233
+ assert isinstance(parameters, list)
234
+ if with_params:
235
+ assert parameters, f"ASDF tree must include input parameters: {parameters}"
236
+ assert len(parameters) == len(mock_input_dataset_parts.doc_list)
237
+ for param in parameters:
238
+ assert InputDatasetParameter.model_validate(param) in mock_input_dataset_parts.doc_list
239
+ else:
240
+ assert ds.meta["parameters"] == []
241
+
172
242
 
173
243
  def test_create_trial_quality_report(create_trial_quality_report_task):
174
244
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 11.7.0rc6
3
+ Version: 11.8.0rc1
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -57,9 +57,9 @@ Requires-Dist: pytest; extra == "docs"
57
57
  Requires-Dist: towncrier<22.12.0; extra == "docs"
58
58
  Requires-Dist: dkist-sphinx-theme; extra == "docs"
59
59
  Provides-Extra: inventory
60
- Requires-Dist: dkist-inventory<2.0,>=1.6.0; extra == "inventory"
60
+ Requires-Dist: dkist-inventory<2.0,>=1.11.0; extra == "inventory"
61
61
  Provides-Extra: asdf
62
- Requires-Dist: dkist-inventory[asdf]<2.0,>=1.6.0; extra == "asdf"
62
+ Requires-Dist: dkist-inventory[asdf]<2.0,>=1.11.0; extra == "asdf"
63
63
  Provides-Extra: quality
64
64
  Requires-Dist: dkist-quality<3.0,>=2.0.0; extra == "quality"
65
65
 
@@ -1,10 +1,5 @@
1
1
  changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- changelog/267.feature.1.rst,sha256=G5Xl7EwiRKAtftSUvalfF7iUL-WgqIvnQ3k6wyTYzaw,98
3
- changelog/267.feature.2.rst,sha256=1z0TqX3159m-wFvEgElBjUp0XPWQvmqMlVlf4BzO-J8,99
4
- changelog/267.feature.rst,sha256=g4bBK3EFQiQljW9lQrFh6rVOL8C6yxBYbXI4WOH8NE0,65
5
- changelog/267.misc.rst,sha256=ciUqUesBbdxh41euyLhyD3Bl6IVT2TSIGeOg3LFjIvg,105
6
- changelog/267.removal.1.rst,sha256=qiGl1FAy9ioLwfCh8EvM788wjFbOY3jM3eUJHPOzpu8,207
7
- changelog/267.removal.rst,sha256=ecpctwJ-AE59sfbsTy-j3GYaPaN8h6oWphXUMNQ42Yw,103
2
+ changelog/245.feature.1.rst,sha256=BBwCgALdFPqX7ZI0Uj-z9piaGplVKbgErZ6LuMH3qbA,148
8
3
  dkist_processing_common/__init__.py,sha256=GQ9EBnYhkOnt-qODclAoLS_g5YVhurxfg1tjVtI9rDI,320
9
4
  dkist_processing_common/config.py,sha256=kkFm-Q6jGGoV3Zdf3PSfk3xNYd2vSH0J2R8dBBav3Og,5218
10
5
  dkist_processing_common/manual.py,sha256=bIVVyLsbXMh-g_2L3kGROL-1TtJe0_XviHsp7Br31x8,7023
@@ -70,7 +65,7 @@ dkist_processing_common/tasks/parse_l0_input_data.py,sha256=KguXT0Xavynu7C8NFMjs
70
65
  dkist_processing_common/tasks/quality_metrics.py,sha256=cvGF6tJ8yAvxOvkeG3tWxYwL885BrFW5X3V7_MSzL-A,12481
71
66
  dkist_processing_common/tasks/teardown.py,sha256=rwT9lWINVDF11-az_nx-Z5ykMTX_SJCchobpU6sErgk,2360
72
67
  dkist_processing_common/tasks/transfer_input_data.py,sha256=DAYfS-B1o-iBT9MXU-TiJG4Hv05Z0c_JzPrnFgvnK9g,5786
73
- dkist_processing_common/tasks/trial_catalog.py,sha256=iAaMT_oLnupA1O3xAtqVjsqRY5f_hyvMps-fXg6KlHU,8729
68
+ dkist_processing_common/tasks/trial_catalog.py,sha256=43XN4Hz4uNBGEps3O5RLfFHRfyo9FZPzMwkcapniztQ,10049
74
69
  dkist_processing_common/tasks/trial_output_data.py,sha256=CPMXXODvN5RTcu9bTF8v6AXciCl212EWP6qTiARvUNk,6837
75
70
  dkist_processing_common/tasks/write_l1.py,sha256=Xy834RTp3F95kLcW4ba5gfHMUocfZd82ZQQKnvQcP2M,23204
76
71
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
@@ -83,7 +78,7 @@ dkist_processing_common/tasks/mixin/quality/_base.py,sha256=qt9TZZ140skFWFmabrjl
83
78
  dkist_processing_common/tasks/mixin/quality/_metrics.py,sha256=k0hlthVbW7Jso9q_P0-hWGfp190kQO_oUQyGBRvxgqo,60626
84
79
  dkist_processing_common/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
85
80
  dkist_processing_common/tests/conftest.py,sha256=Tm-Yq956EAafpDtu1d7JjdVY0Unp9e4z9ak-yf4wnH4,22320
86
- dkist_processing_common/tests/mock_metadata_store.py,sha256=fbCvSk1-s0ojN6l538RWodPW7dx6k4eXqipemnHKO0Y,8248
81
+ dkist_processing_common/tests/mock_metadata_store.py,sha256=-uzwbLeRn19_Pq1hr97R4hpUD5K5sv8Zy1U8gG0GdSQ,9657
87
82
  dkist_processing_common/tests/test_assemble_movie.py,sha256=dyVhowxB-Kc6GuxlDs74UrPtK9fwdUL7y5haA3Bidz0,4065
88
83
  dkist_processing_common/tests/test_assemble_quality.py,sha256=-F22jMY6mPy65VZ1TZY2r1vsxMXOPmZHArGx70OD3BA,17832
89
84
  dkist_processing_common/tests/test_base.py,sha256=gsyBG2R6Ufx7CzbHeGMagUwM9yCfpN4gCSZ6-aH2q48,6643
@@ -94,7 +89,7 @@ dkist_processing_common/tests/test_dkist_location.py,sha256=-_OoSw4SZDLFyIuOltHv
94
89
  dkist_processing_common/tests/test_fits_access.py,sha256=a50B4IAAH5NH5zeudTqyy0b5uWKJwJuzQLUdK1LoOHM,12832
95
90
  dkist_processing_common/tests/test_flower_pot.py,sha256=X9_UI3maa3ZQncV3jYHgovWnawDsdEkEB5vw6EAB96o,3151
96
91
  dkist_processing_common/tests/test_fried_parameter.py,sha256=iXtlQIifZ6cDOkEi-YDgP3oAlss2loq08Uohgvy1byQ,1295
97
- dkist_processing_common/tests/test_input_dataset.py,sha256=pQ01rWAkQ2XQojyHWzAqeOdrMXshNcgEVL5I_9bBTdo,9610
92
+ dkist_processing_common/tests/test_input_dataset.py,sha256=wnQbZxBYywG5CEXces2WWk6I0QA7HjStaYSTVVbe5r0,8499
98
93
  dkist_processing_common/tests/test_interservice_bus.py,sha256=QrBeZ8dh497h6nxA8-aVUIGDcSj8y9DIXIk9I_HkXr0,3001
99
94
  dkist_processing_common/tests/test_interservice_bus_mixin.py,sha256=IptJkW7Qeu2Y742NKXEgkok2VdS600keLgCD3Y9iw3A,4131
100
95
  dkist_processing_common/tests/test_manual_processing.py,sha256=iHF7yQPlar9niYAGXtFv28Gw3Undlds38yMfszk4ccY,1037
@@ -113,7 +108,7 @@ dkist_processing_common/tests/test_task_parsing.py,sha256=2_OOmeZQWD17XAd_ECYmod
113
108
  dkist_processing_common/tests/test_teardown.py,sha256=DaliHSGsiQBZaFkf5wb3XBo6rHNPmx2bmQtVymYeBN4,5601
114
109
  dkist_processing_common/tests/test_transfer_input_data.py,sha256=eyAAWXpTHQ8aew87-MncWpYBn4DAZrTSOL3LvlQfR5Q,12611
115
110
  dkist_processing_common/tests/test_transfer_l1_output_data.py,sha256=PVGDJBEUk4kAeu8ivrhlCE7yd29R18t9kZLFx-mpBwY,2063
116
- dkist_processing_common/tests/test_trial_catalog.py,sha256=Efq4yP1_0jccCFTi0_inyf4utC-oJVpGoAZ13fBP7I0,6778
111
+ dkist_processing_common/tests/test_trial_catalog.py,sha256=CxjtVABE5Fw2EvyXR56IJ3PPi9QvEOjccH0OzzRWk30,9424
117
112
  dkist_processing_common/tests/test_trial_output_data.py,sha256=YwmSfhNsSG-kdnvNlq7Ku0PNi_onTvU6uxn_nLiAKZw,12008
118
113
  dkist_processing_common/tests/test_workflow_task_base.py,sha256=LTVusltNrsGUOvw9G323am4CXebgE4tJhP6gZCcS0CQ,10457
119
114
  dkist_processing_common/tests/test_write_l1.py,sha256=alN-lozKEm6vKNdhtvzjnuPqv-NjHyUg16Op7SkMH-c,27964
@@ -125,7 +120,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
125
120
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
126
121
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
127
122
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
128
- dkist_processing_common-11.7.0rc6.dist-info/METADATA,sha256=6JeOGbgzvzWIZH1VFNxUDvdoxhCDulcTuAjq8aEIebQ,13323
129
- dkist_processing_common-11.7.0rc6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
130
- dkist_processing_common-11.7.0rc6.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
131
- dkist_processing_common-11.7.0rc6.dist-info/RECORD,,
123
+ dkist_processing_common-11.8.0rc1.dist-info/METADATA,sha256=k1cSXjEW_NNR3JwR7h55s9SFLH-H2k4XfT-RigHlAsQ,13325
124
+ dkist_processing_common-11.8.0rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
125
+ dkist_processing_common-11.8.0rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
126
+ dkist_processing_common-11.8.0rc1.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- Add new bud types TaskAverageBud and TaskBeginDateBud, which is based on new TaskDatetimeBudBase.
@@ -1 +0,0 @@
1
- Add new bud type TaskContributingIdsBud, based on ContributingIdsBud, for for specific task types.
changelog/267.feature.rst DELETED
@@ -1 +0,0 @@
1
- Add new buds to parsing for what will become the dataset extras.
changelog/267.misc.rst DELETED
@@ -1 +0,0 @@
1
- Rename TimeFlowerBase and TaskTimeBudBase to RoundTimeFlowerBase and TaskRoundTimeBudBase, respectively.
@@ -1,2 +0,0 @@
1
- Remove the `_set_metadata_key_value` method from FitsAccessBase. Instead of using `setattr`, attributes
2
- are assigned explicitly in access classes using `MetadataKey` members in place of header key strings.
changelog/267.removal.rst DELETED
@@ -1 +0,0 @@
1
- Remove IdBud, which is just a TaskUniqueBud with the task set to observe, and therefore is not needed.