dkist-processing-common 11.5.0rc1__py3-none-any.whl → 11.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dkist_processing_common/config.py +45 -29
- dkist_processing_common/models/telemetry.py +28 -0
- dkist_processing_common/tasks/assemble_movie.py +1 -1
- dkist_processing_common/tasks/base.py +38 -46
- dkist_processing_common/tasks/l1_output_data.py +10 -10
- dkist_processing_common/tasks/output_data_base.py +2 -2
- dkist_processing_common/tasks/parse_l0_input_data.py +12 -7
- dkist_processing_common/tasks/quality_metrics.py +7 -7
- dkist_processing_common/tasks/teardown.py +5 -5
- dkist_processing_common/tasks/transfer_input_data.py +5 -5
- dkist_processing_common/tasks/trial_catalog.py +7 -7
- dkist_processing_common/tasks/trial_output_data.py +2 -2
- dkist_processing_common/tasks/write_l1.py +1 -1
- dkist_processing_common/tests/test_base.py +0 -14
- {dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/METADATA +82 -4
- {dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/RECORD +18 -18
- changelog/266.misc.rst +0 -1
- {dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/WHEEL +0 -0
- {dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/top_level.txt +0 -0
|
@@ -3,48 +3,65 @@
|
|
|
3
3
|
from dkist_processing_core.config import DKISTProcessingCoreConfiguration
|
|
4
4
|
from dkist_service_configuration.settings import DEFAULT_MESH_SERVICE
|
|
5
5
|
from dkist_service_configuration.settings import MeshService
|
|
6
|
-
from pydantic import BaseModel
|
|
7
6
|
from pydantic import Field
|
|
8
7
|
from talus import ConnectionRetryerFactory
|
|
9
8
|
from talus import ConsumerConnectionParameterFactory
|
|
10
9
|
from talus import ProducerConnectionParameterFactory
|
|
11
10
|
|
|
12
11
|
|
|
13
|
-
class RetryConfig(BaseModel):
|
|
14
|
-
"""Retry metadata model."""
|
|
15
|
-
|
|
16
|
-
retry_delay: int = 1
|
|
17
|
-
retry_backoff: int = 2
|
|
18
|
-
retry_jitter: tuple[int, int] = (1, 10)
|
|
19
|
-
retry_max_delay: int = 300
|
|
20
|
-
retry_tries: int = -1
|
|
21
|
-
|
|
22
|
-
|
|
23
12
|
class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
|
|
24
13
|
"""Common configurations."""
|
|
25
14
|
|
|
26
|
-
retry_config: RetryConfig = Field(default_factory=RetryConfig)
|
|
27
15
|
# metadata-store-api
|
|
28
|
-
gql_auth_token: str | None =
|
|
16
|
+
gql_auth_token: str | None = Field(
|
|
17
|
+
default="dev", description="The auth token for the metadata-store-api."
|
|
18
|
+
)
|
|
29
19
|
# object-store-api
|
|
30
|
-
object_store_access_key: str | None =
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
20
|
+
object_store_access_key: str | None = Field(
|
|
21
|
+
default=None, description="The access key for the object store."
|
|
22
|
+
)
|
|
23
|
+
object_store_secret_key: str | None = Field(
|
|
24
|
+
default=None, description="The secret key for the object store."
|
|
25
|
+
)
|
|
26
|
+
object_store_use_ssl: bool = Field(
|
|
27
|
+
default=False, description="Whether to use SSL for the object store connection."
|
|
28
|
+
)
|
|
29
|
+
# start object-clerk library
|
|
30
|
+
multipart_threshold: int | None = Field(
|
|
31
|
+
default=None, description="Multipart threshold for the object store."
|
|
32
|
+
)
|
|
33
|
+
s3_client_config: dict | None = Field(
|
|
34
|
+
default=None, description="S3 client configuration for the object store."
|
|
35
|
+
)
|
|
36
|
+
s3_upload_config: dict | None = Field(
|
|
37
|
+
default=None, description="S3 upload configuration for the object store."
|
|
38
|
+
)
|
|
39
|
+
s3_download_config: dict | None = Field(
|
|
40
|
+
default=None, description="S3 download configuration for the object store."
|
|
41
|
+
)
|
|
37
42
|
# globus
|
|
38
|
-
globus_transport_params: dict = Field(
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
+
globus_transport_params: dict = Field(
|
|
44
|
+
default_factory=dict, description="Globus transfer parameters."
|
|
45
|
+
)
|
|
46
|
+
globus_client_id: str | None = Field(
|
|
47
|
+
default=None, description="Globus client ID for inbound/outbound transfers."
|
|
48
|
+
)
|
|
49
|
+
globus_client_secret: str | None = Field(
|
|
50
|
+
default=None, description="Globus client secret for inbound/outbound transfers."
|
|
51
|
+
)
|
|
52
|
+
object_store_endpoint: str | None = Field(
|
|
53
|
+
default=None, description="Object store Globus Endpoint ID."
|
|
54
|
+
)
|
|
55
|
+
scratch_endpoint: str | None = Field(default=None, description="Scratch Globus Endpoint ID.")
|
|
43
56
|
# scratch
|
|
44
|
-
scratch_base_path: str = Field(default="scratch/")
|
|
45
|
-
scratch_inventory_db_count: int =
|
|
57
|
+
scratch_base_path: str = Field(default="scratch/", description="Base path for scratch storage.")
|
|
58
|
+
scratch_inventory_db_count: int = Field(
|
|
59
|
+
default=16, description="Number of databases in the scratch inventory (redis)."
|
|
60
|
+
)
|
|
46
61
|
# docs
|
|
47
|
-
docs_base_url: str = Field(
|
|
62
|
+
docs_base_url: str = Field(
|
|
63
|
+
default="my_test_url", description="Base URL for the documentation site."
|
|
64
|
+
)
|
|
48
65
|
|
|
49
66
|
@property
|
|
50
67
|
def metadata_store_api_base(self) -> str:
|
|
@@ -106,4 +123,3 @@ class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
|
|
|
106
123
|
|
|
107
124
|
|
|
108
125
|
common_configurations = DKISTProcessingCommonConfiguration()
|
|
109
|
-
common_configurations.log_configurations()
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Models to support telemetry data."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ObservableProgress(BaseModel, validate_assignment=True):
|
|
7
|
+
"""Container for tracking progress for a metering instrument e.g. task progress."""
|
|
8
|
+
|
|
9
|
+
current: int = 0
|
|
10
|
+
total: int = 0
|
|
11
|
+
|
|
12
|
+
def increment(self, step: int = 1) -> None:
|
|
13
|
+
"""Increment the current progress by the given step."""
|
|
14
|
+
self.current += step
|
|
15
|
+
|
|
16
|
+
@property
|
|
17
|
+
def percent_complete(self) -> float:
|
|
18
|
+
"""Return the percent complete as a float between 0 and 100."""
|
|
19
|
+
if self.total > 0:
|
|
20
|
+
return (self.current / self.total) * 100
|
|
21
|
+
return 0.0
|
|
22
|
+
|
|
23
|
+
def set_complete(self):
|
|
24
|
+
"""Set the current progress to the total."""
|
|
25
|
+
if self.total == 0:
|
|
26
|
+
self.total = self.current = 1
|
|
27
|
+
else:
|
|
28
|
+
self.current = self.total
|
|
@@ -163,7 +163,7 @@ class AssembleMovie(WorkflowTaskBase, ABC):
|
|
|
163
163
|
relative_movie_path = f"{self.constants.dataset_id}_browse_movie.mp4"
|
|
164
164
|
absolute_movie_path = str(self.scratch.absolute_path(relative_movie_path))
|
|
165
165
|
|
|
166
|
-
with self.
|
|
166
|
+
with self.telemetry_span("Assembling movie frames"):
|
|
167
167
|
clip.write_videofile(absolute_movie_path, fps=self.FPS, codec="libx264", audio=False)
|
|
168
168
|
|
|
169
169
|
self.tag(path=absolute_movie_path, tags=[Tag.movie(), Tag.output()])
|
|
@@ -12,6 +12,10 @@ from typing import Iterable
|
|
|
12
12
|
from typing import Type
|
|
13
13
|
|
|
14
14
|
from dkist_processing_core import TaskBase
|
|
15
|
+
from opentelemetry.metrics import CallbackOptions
|
|
16
|
+
from opentelemetry.metrics import Counter
|
|
17
|
+
from opentelemetry.metrics import ObservableGauge
|
|
18
|
+
from opentelemetry.metrics import Observation
|
|
15
19
|
|
|
16
20
|
from dkist_processing_common._util.scratch import WorkflowFileSystem
|
|
17
21
|
from dkist_processing_common._util.tags import TagDB
|
|
@@ -21,6 +25,7 @@ from dkist_processing_common.config import common_configurations
|
|
|
21
25
|
from dkist_processing_common.models.constants import ConstantsBase
|
|
22
26
|
from dkist_processing_common.models.tags import StemName
|
|
23
27
|
from dkist_processing_common.models.tags import Tag
|
|
28
|
+
from dkist_processing_common.models.telemetry import ObservableProgress
|
|
24
29
|
from dkist_processing_common.tasks.mixin.metadata_store import MetadataStoreMixin
|
|
25
30
|
|
|
26
31
|
__all__ = ["WorkflowTaskBase", "tag_type_hint"]
|
|
@@ -66,7 +71,6 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
66
71
|
workflow_name=workflow_name,
|
|
67
72
|
workflow_version=workflow_version,
|
|
68
73
|
)
|
|
69
|
-
self.task_name = self.__class__.__name__
|
|
70
74
|
self.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, task_name=self.task_name)
|
|
71
75
|
self.constants = self.constants_model_class(
|
|
72
76
|
recipe_run_id=recipe_run_id, task_name=self.task_name
|
|
@@ -76,50 +80,30 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
76
80
|
recipe_run_id=recipe_run_id, task_name=self.task_name, namespace="counter"
|
|
77
81
|
)
|
|
78
82
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
arg_span_type: str = None,
|
|
85
|
-
arg_labels: dict[str, str] = None,
|
|
86
|
-
**kwargs,
|
|
87
|
-
):
|
|
88
|
-
"""Groom inputs to apm_step to handle various kwarg collisions."""
|
|
89
|
-
if "span_type" in kwargs:
|
|
90
|
-
raise RuntimeError(
|
|
91
|
-
f"Cannot specify 'span_type' {kwargs['span_type']} in step that forces is it to be {arg_span_type}"
|
|
92
|
-
)
|
|
93
|
-
|
|
94
|
-
if "labels" in kwargs:
|
|
95
|
-
arg_labels.update(kwargs["labels"])
|
|
96
|
-
del kwargs["labels"]
|
|
97
|
-
logger.info(
|
|
98
|
-
f"Recording APM span: {name = }, {arg_span_type = }, {arg_labels = }, "
|
|
99
|
-
f"recipe_run_id = {self.recipe_run_id}"
|
|
83
|
+
# meter instruments
|
|
84
|
+
self.read_counter: Counter = self.meter.create_counter(
|
|
85
|
+
name=self.format_metric_name("tasks.reads"),
|
|
86
|
+
unit="1",
|
|
87
|
+
description="The number of reads executed in the processing stack.",
|
|
100
88
|
)
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
return self.apm_type_base(
|
|
106
|
-
name, *args, arg_span_type="code.task", arg_labels={"type": "task"}, **kwargs
|
|
89
|
+
self.write_counter: Counter = self.meter.create_counter(
|
|
90
|
+
name=self.format_metric_name("tasks.writes"),
|
|
91
|
+
unit="1",
|
|
92
|
+
description="The number of writes executed in the processing stack.",
|
|
107
93
|
)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
*args,
|
|
114
|
-
arg_span_type="code.processing",
|
|
115
|
-
arg_labels={"type": "processing"},
|
|
116
|
-
**kwargs,
|
|
94
|
+
self.outer_loop_progress = ObservableProgress()
|
|
95
|
+
self.outer_loop_progress_gauge: ObservableGauge = self.meter.create_observable_gauge(
|
|
96
|
+
name=self.format_metric_name("tasks.outer.loop.progress"),
|
|
97
|
+
description="The progress of a task through the main processing loop.",
|
|
98
|
+
callbacks=[lambda options: self.outer_loop_run_progress(options)],
|
|
117
99
|
)
|
|
118
100
|
|
|
119
|
-
def
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
101
|
+
def outer_loop_run_progress(
|
|
102
|
+
self, options: CallbackOptions
|
|
103
|
+
) -> Generator[Observation, None, None]:
|
|
104
|
+
"""Observe the progress of the current task as a percentage."""
|
|
105
|
+
yield Observation(
|
|
106
|
+
self.outer_loop_progress.percent_complete, attributes=self.base_telemetry_attributes
|
|
123
107
|
)
|
|
124
108
|
|
|
125
109
|
@property
|
|
@@ -161,9 +145,14 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
161
145
|
"""Execute any pre-task setup required."""
|
|
162
146
|
super().pre_run()
|
|
163
147
|
if self.record_provenance or self.is_task_manual:
|
|
164
|
-
with self.
|
|
148
|
+
with self.telemetry_span("Record Provenance"):
|
|
165
149
|
self._record_provenance()
|
|
166
150
|
|
|
151
|
+
def post_run(self) -> None:
|
|
152
|
+
"""Execute and post-task bookkeeping required."""
|
|
153
|
+
super().post_run()
|
|
154
|
+
self.outer_loop_progress.set_complete()
|
|
155
|
+
|
|
167
156
|
def read(
|
|
168
157
|
self, tags: tag_type_hint, decoder: callable = path_decoder, **decoder_kwargs
|
|
169
158
|
) -> Generator[Any, None, None]:
|
|
@@ -183,7 +172,9 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
183
172
|
**decoder_kwargs
|
|
184
173
|
Additional arguments to pass to the `decoder` function.
|
|
185
174
|
"""
|
|
186
|
-
|
|
175
|
+
for p in self.scratch.find_all(tags=tags):
|
|
176
|
+
self.read_counter.add(amount=1, attributes=self.base_telemetry_attributes)
|
|
177
|
+
yield decoder(p, **decoder_kwargs)
|
|
187
178
|
|
|
188
179
|
def write(
|
|
189
180
|
self,
|
|
@@ -221,6 +212,7 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
221
212
|
-------
|
|
222
213
|
The path for the written file
|
|
223
214
|
"""
|
|
215
|
+
self.write_counter.add(amount=1, attributes=self.base_telemetry_attributes)
|
|
224
216
|
file_obj = encoder(data, **encoder_kwargs)
|
|
225
217
|
if isinstance(tags, str):
|
|
226
218
|
tags = [tags]
|
|
@@ -372,11 +364,11 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
372
364
|
Filename Counter: not rolled back but its purpose of preventing file name collisions is not impacted
|
|
373
365
|
"""
|
|
374
366
|
super().rollback()
|
|
375
|
-
with self.
|
|
367
|
+
with self.telemetry_span("Rollback Scratch"):
|
|
376
368
|
self.scratch.rollback()
|
|
377
|
-
with self.
|
|
369
|
+
with self.telemetry_span("Rollback Constants"):
|
|
378
370
|
self.constants._rollback()
|
|
379
|
-
with self.
|
|
371
|
+
with self.telemetry_span("Change Recipe Run to Inprogress"):
|
|
380
372
|
self.metadata_store_change_recipe_run_to_inprogress()
|
|
381
373
|
|
|
382
374
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
@@ -54,11 +54,11 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
|
|
|
54
54
|
|
|
55
55
|
def transfer_objects(self):
|
|
56
56
|
"""Transfer movie and L1 output frames."""
|
|
57
|
-
with self.
|
|
57
|
+
with self.telemetry_span("Upload movie"):
|
|
58
58
|
# Movie needs to be transferred separately as the movie headers need to go with it
|
|
59
59
|
self.transfer_movie()
|
|
60
60
|
|
|
61
|
-
with self.
|
|
61
|
+
with self.telemetry_span("Upload science frames"):
|
|
62
62
|
self.transfer_output_frames()
|
|
63
63
|
|
|
64
64
|
def transfer_output_frames(self):
|
|
@@ -120,10 +120,10 @@ class AssembleQualityData(L1OutputDataBase, QualityMixin):
|
|
|
120
120
|
|
|
121
121
|
def run(self):
|
|
122
122
|
"""Run method for the task."""
|
|
123
|
-
with self.
|
|
123
|
+
with self.telemetry_span("Assembling quality data"):
|
|
124
124
|
quality_data = self.quality_assemble_data(polcal_label_list=self.polcal_label_list)
|
|
125
125
|
|
|
126
|
-
with self.
|
|
126
|
+
with self.telemetry_span(
|
|
127
127
|
f"Saving quality data with {len(quality_data)} metrics to the file system"
|
|
128
128
|
):
|
|
129
129
|
self.write(
|
|
@@ -146,7 +146,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
146
146
|
|
|
147
147
|
def run(self) -> None:
|
|
148
148
|
"""Run method for this task."""
|
|
149
|
-
with self.
|
|
149
|
+
with self.telemetry_span(f"Storing quality data to metadata store"):
|
|
150
150
|
# each quality_data file is a list - this will combine the elements of multiple lists into a single list
|
|
151
151
|
quality_data = list(
|
|
152
152
|
chain.from_iterable(
|
|
@@ -156,7 +156,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
156
156
|
self.metadata_store_add_quality_data(
|
|
157
157
|
dataset_id=self.constants.dataset_id, quality_data=quality_data
|
|
158
158
|
)
|
|
159
|
-
with self.
|
|
159
|
+
with self.telemetry_span("Count Expected Outputs"):
|
|
160
160
|
dataset_id = self.constants.dataset_id
|
|
161
161
|
expected_object_count = self.count(tags=Tag.output())
|
|
162
162
|
if quality_data:
|
|
@@ -165,7 +165,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
165
165
|
f"Adding Dataset Receipt Account: "
|
|
166
166
|
f"{dataset_id=}, {expected_object_count=}, recipe_run_id={self.recipe_run_id}"
|
|
167
167
|
)
|
|
168
|
-
with self.
|
|
168
|
+
with self.telemetry_span(
|
|
169
169
|
f"Add Dataset Receipt Account: {dataset_id = }, {expected_object_count = }"
|
|
170
170
|
):
|
|
171
171
|
self.metadata_store_add_dataset_receipt_account(
|
|
@@ -245,10 +245,10 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
|
|
|
245
245
|
|
|
246
246
|
def run(self) -> None:
|
|
247
247
|
"""Run method for this task."""
|
|
248
|
-
with self.
|
|
248
|
+
with self.telemetry_span("Gather output data"):
|
|
249
249
|
frames = self.read(tags=self.output_frame_tags)
|
|
250
250
|
movies = self.read(tags=[Tag.output(), Tag.movie()])
|
|
251
|
-
with self.
|
|
251
|
+
with self.telemetry_span("Create message objects"):
|
|
252
252
|
messages = []
|
|
253
253
|
messages += self.frame_messages(paths=frames)
|
|
254
254
|
frame_message_count = len(messages)
|
|
@@ -257,7 +257,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
|
|
|
257
257
|
dataset_has_quality_data = self.dataset_has_quality_data
|
|
258
258
|
if dataset_has_quality_data:
|
|
259
259
|
messages.append(self.quality_report_message)
|
|
260
|
-
with self.
|
|
260
|
+
with self.telemetry_span(
|
|
261
261
|
f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
|
|
262
262
|
):
|
|
263
263
|
self.interservice_bus_publish(messages=messages)
|
|
@@ -58,10 +58,10 @@ class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
|
|
|
58
58
|
|
|
59
59
|
def run(self) -> None:
|
|
60
60
|
"""Transfer the data and cleanup any folders."""
|
|
61
|
-
with self.
|
|
61
|
+
with self.telemetry_span("Transfer objects"):
|
|
62
62
|
self.transfer_objects()
|
|
63
63
|
|
|
64
|
-
with self.
|
|
64
|
+
with self.telemetry_span("Remove folder objects"):
|
|
65
65
|
self.remove_folder_objects()
|
|
66
66
|
|
|
67
67
|
@abstractmethod
|
|
@@ -103,21 +103,25 @@ class ParseDataBase(WorkflowTaskBase, ABC):
|
|
|
103
103
|
|
|
104
104
|
@property
|
|
105
105
|
@abstractmethod
|
|
106
|
-
def tags_for_input_frames(self) -> list[
|
|
106
|
+
def tags_for_input_frames(self) -> list[str]:
|
|
107
107
|
"""Define the tags for the data that will be parsed."""
|
|
108
108
|
|
|
109
|
+
def pre_run(self) -> None:
|
|
110
|
+
"""Execute pre-task setup."""
|
|
111
|
+
self.outer_loop_progress.total = self.scratch.count_all(tags=self.tags_for_input_frames)
|
|
112
|
+
|
|
109
113
|
def run(self) -> None:
|
|
110
114
|
"""Run method for this task."""
|
|
111
|
-
with self.
|
|
115
|
+
with self.telemetry_span("Check that input frames exist"):
|
|
112
116
|
self.check_input_frames()
|
|
113
117
|
|
|
114
|
-
with self.
|
|
118
|
+
with self.telemetry_span("Ingest all input files"):
|
|
115
119
|
tag_pot, constant_pot = self.make_flower_pots()
|
|
116
120
|
|
|
117
|
-
with self.
|
|
121
|
+
with self.telemetry_span("Update constants"):
|
|
118
122
|
self.update_constants(constant_pot)
|
|
119
123
|
|
|
120
|
-
with self.
|
|
124
|
+
with self.telemetry_span("Tag files"):
|
|
121
125
|
self.tag_petals(tag_pot)
|
|
122
126
|
|
|
123
127
|
def make_flower_pots(self) -> tuple[FlowerPot, FlowerPot]:
|
|
@@ -128,6 +132,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
|
|
|
128
132
|
constant_pot.stems += self.constant_buds
|
|
129
133
|
|
|
130
134
|
for fits_obj in self.input_frames:
|
|
135
|
+
self.outer_loop_progress.increment()
|
|
131
136
|
filepath = fits_obj.name
|
|
132
137
|
tag_pot.add_dirt(filepath, fits_obj)
|
|
133
138
|
constant_pot.add_dirt(filepath, fits_obj)
|
|
@@ -161,7 +166,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
|
|
|
161
166
|
None
|
|
162
167
|
"""
|
|
163
168
|
for stem in constant_pot:
|
|
164
|
-
with self.
|
|
169
|
+
with self.telemetry_span(f"Setting value of constant {stem.stem_name}"):
|
|
165
170
|
if len(stem.petals) == 0:
|
|
166
171
|
# There are no petals so nothing to do
|
|
167
172
|
continue
|
|
@@ -186,7 +191,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
|
|
|
186
191
|
None
|
|
187
192
|
"""
|
|
188
193
|
for stem in tag_pot:
|
|
189
|
-
with self.
|
|
194
|
+
with self.telemetry_span(f"Applying {stem.stem_name} tag to files"):
|
|
190
195
|
for petal in stem.petals:
|
|
191
196
|
tag = Tag.format_tag(stem.stem_name, petal.value)
|
|
192
197
|
for path in petal.keys:
|
|
@@ -100,10 +100,10 @@ class QualityL0Metrics(WorkflowTaskBase, QualityMixin):
|
|
|
100
100
|
`quality_task_types` properties, respectively.
|
|
101
101
|
"""
|
|
102
102
|
modstate_list = self.modstate_list if self.modstate_list is not None else [None]
|
|
103
|
-
with self.
|
|
103
|
+
with self.telemetry_span("Computing L0 Quality Metrics"):
|
|
104
104
|
quality_data_list = []
|
|
105
105
|
for task_type in self.quality_task_types:
|
|
106
|
-
with self.
|
|
106
|
+
with self.telemetry_span(f"Working on {task_type = }"):
|
|
107
107
|
for modstate in modstate_list:
|
|
108
108
|
paths = self.get_paths_for_modstate_and_task(modstate, task_type)
|
|
109
109
|
quality_data = self.calculate_l0_metrics(
|
|
@@ -112,7 +112,7 @@ class QualityL0Metrics(WorkflowTaskBase, QualityMixin):
|
|
|
112
112
|
quality_data.modstate = modstate
|
|
113
113
|
quality_data_list.append(quality_data)
|
|
114
114
|
|
|
115
|
-
with self.
|
|
115
|
+
with self.telemetry_span("Saving metrics to disk"):
|
|
116
116
|
for quality_data in quality_data_list:
|
|
117
117
|
if quality_data.has_values:
|
|
118
118
|
self.save_quality_data(quality_data, modstate=quality_data.modstate)
|
|
@@ -300,17 +300,17 @@ class QualityL1Metrics(WorkflowTaskBase, QualityMixin):
|
|
|
300
300
|
),
|
|
301
301
|
]
|
|
302
302
|
|
|
303
|
-
with self.
|
|
303
|
+
with self.telemetry_span("Reading L1 frames"):
|
|
304
304
|
paths = list(self.read(tags=[Tag.calibrated(), Tag.frame()]))
|
|
305
305
|
|
|
306
|
-
with self.
|
|
306
|
+
with self.telemetry_span("Calculating L1 quality metrics"):
|
|
307
307
|
for metric in metrics:
|
|
308
|
-
with self.
|
|
308
|
+
with self.telemetry_span(f"Calculating L1 metric {metric.value_source}"):
|
|
309
309
|
for path in paths:
|
|
310
310
|
frame = L1QualityFitsAccess.from_path(path)
|
|
311
311
|
metric.append_value(frame=frame)
|
|
312
312
|
|
|
313
|
-
with self.
|
|
313
|
+
with self.telemetry_span("Sending lists for storage"):
|
|
314
314
|
for metric in metrics:
|
|
315
315
|
if metric.has_values:
|
|
316
316
|
metric.store_metric()
|
|
@@ -26,11 +26,11 @@ class TeardownBase(WorkflowTaskBase, ABC):
|
|
|
26
26
|
|
|
27
27
|
def run(self) -> None:
|
|
28
28
|
"""Run method for Teardown class."""
|
|
29
|
-
with self.
|
|
29
|
+
with self.telemetry_span("Change recipe run status"):
|
|
30
30
|
self.change_recipe_run_status_to_success()
|
|
31
31
|
|
|
32
32
|
if not self.teardown_enabled:
|
|
33
|
-
with self.
|
|
33
|
+
with self.telemetry_span(f"Skip Teardown"):
|
|
34
34
|
return
|
|
35
35
|
|
|
36
36
|
logger.info(f"Removing data and tags for recipe run {self.recipe_run_id}")
|
|
@@ -43,13 +43,13 @@ class TeardownBase(WorkflowTaskBase, ABC):
|
|
|
43
43
|
|
|
44
44
|
def teardown(self):
|
|
45
45
|
"""Purge all constants and files/tags in scratch."""
|
|
46
|
-
with self.
|
|
46
|
+
with self.telemetry_span("Remove Data and Tags"):
|
|
47
47
|
self.scratch.purge()
|
|
48
48
|
|
|
49
|
-
with self.
|
|
49
|
+
with self.telemetry_span("Remove File Counters"):
|
|
50
50
|
self.filename_counter.purge()
|
|
51
51
|
|
|
52
|
-
with self.
|
|
52
|
+
with self.telemetry_span("Remove Constants"):
|
|
53
53
|
self.constants._purge()
|
|
54
54
|
|
|
55
55
|
|
|
@@ -81,13 +81,13 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin):
|
|
|
81
81
|
|
|
82
82
|
def run(self) -> None:
|
|
83
83
|
"""Execute the data transfer."""
|
|
84
|
-
with self.
|
|
84
|
+
with self.telemetry_span("Change Status to InProgress"):
|
|
85
85
|
self.metadata_store_change_recipe_run_to_inprogress()
|
|
86
86
|
|
|
87
|
-
with self.
|
|
87
|
+
with self.telemetry_span("Download Input Dataset Documents"):
|
|
88
88
|
self.download_input_dataset()
|
|
89
89
|
|
|
90
|
-
with self.
|
|
90
|
+
with self.telemetry_span("Build Input Dataset Transfer List"):
|
|
91
91
|
observe_transfer_objects = self.build_transfer_list(
|
|
92
92
|
doc_tag=Tag.input_dataset_observe_frames()
|
|
93
93
|
)
|
|
@@ -103,13 +103,13 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin):
|
|
|
103
103
|
if len(observe_transfer_objects + calibration_transfer_objects) == 0:
|
|
104
104
|
raise ValueError("No input dataset frames found to transfer")
|
|
105
105
|
|
|
106
|
-
with self.
|
|
106
|
+
with self.telemetry_span("Transfer Input Frames and Parameter Files via Globus"):
|
|
107
107
|
self.globus_transfer_object_store_to_scratch(
|
|
108
108
|
transfer_items=self.format_transfer_items(input_dataset_objects=transfer_objects),
|
|
109
109
|
label=f"Transfer Input Objects for Recipe Run {self.recipe_run_id}",
|
|
110
110
|
)
|
|
111
111
|
|
|
112
|
-
with self.
|
|
112
|
+
with self.telemetry_span("Tag Input Frames and Parameter Files"):
|
|
113
113
|
self.tag_transfer_objects(input_dataset_objects=transfer_objects)
|
|
114
114
|
|
|
115
115
|
def rollback(self):
|
|
@@ -97,13 +97,13 @@ class CreateTrialDatasetInventory(OutputDataBase):
|
|
|
97
97
|
|
|
98
98
|
def run(self) -> None:
|
|
99
99
|
"""Generate a json file simulating the dataset inventory record that would be produced when cataloging the dataset."""
|
|
100
|
-
with self.
|
|
100
|
+
with self.telemetry_span("Retrieve output frame headers"):
|
|
101
101
|
json_headers = list(self.frame_inventories)
|
|
102
|
-
with self.
|
|
102
|
+
with self.telemetry_span("Generate dataset inventory"):
|
|
103
103
|
inventory: dict = generate_inventory_from_frame_inventory(
|
|
104
104
|
bucket=self.destination_bucket, json_headers=json_headers
|
|
105
105
|
)
|
|
106
|
-
with self.
|
|
106
|
+
with self.telemetry_span("Save dataset inventory file"):
|
|
107
107
|
self.write(
|
|
108
108
|
inventory,
|
|
109
109
|
tags=[Tag.output(), Tag.dataset_inventory()],
|
|
@@ -138,7 +138,7 @@ class CreateTrialAsdf(OutputDataBase):
|
|
|
138
138
|
|
|
139
139
|
def run(self) -> None:
|
|
140
140
|
"""Generate an ASDF file simulating the ASDF file that would be produced when cataloging the dataset."""
|
|
141
|
-
with self.
|
|
141
|
+
with self.telemetry_span("Generate ASDF tree"):
|
|
142
142
|
tree = asdf_tree_from_filenames(
|
|
143
143
|
filenames=self.absolute_output_frame_paths,
|
|
144
144
|
hdu=1, # compressed
|
|
@@ -156,7 +156,7 @@ class CreateTrialAsdf(OutputDataBase):
|
|
|
156
156
|
},
|
|
157
157
|
)
|
|
158
158
|
]
|
|
159
|
-
with self.
|
|
159
|
+
with self.telemetry_span("Save ASDF file"):
|
|
160
160
|
with make_asdf_file_object(tree, extra_history=trial_history) as asdf_obj:
|
|
161
161
|
self.write(
|
|
162
162
|
asdf_obj,
|
|
@@ -192,7 +192,7 @@ class CreateTrialQualityReport(OutputDataBase):
|
|
|
192
192
|
|
|
193
193
|
def create_trial_quality_report(self) -> None:
|
|
194
194
|
"""Generate a trial quality report in pdf format and save to the file system for future upload."""
|
|
195
|
-
with self.
|
|
195
|
+
with self.telemetry_span(f"Building the trial quality report"):
|
|
196
196
|
# each quality_data file is a list - this will combine the elements of multiple lists into a single list
|
|
197
197
|
quality_data = list(
|
|
198
198
|
chain.from_iterable(
|
|
@@ -203,7 +203,7 @@ class CreateTrialQualityReport(OutputDataBase):
|
|
|
203
203
|
report_data=quality_data, dataset_id=self.constants.dataset_id
|
|
204
204
|
)
|
|
205
205
|
|
|
206
|
-
with self.
|
|
206
|
+
with self.telemetry_span(f"Saving the trial quality report to the file system"):
|
|
207
207
|
self.write(
|
|
208
208
|
quality_report,
|
|
209
209
|
tags=[Tag.output(), Tag.quality_report()],
|
|
@@ -36,10 +36,10 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
|
|
|
36
36
|
|
|
37
37
|
def transfer_objects(self) -> None:
|
|
38
38
|
"""Collect transfer items and send them to Globus for transfer."""
|
|
39
|
-
with self.
|
|
39
|
+
with self.telemetry_span("Build transfer list"):
|
|
40
40
|
transfer_manifest = self.build_transfer_list()
|
|
41
41
|
|
|
42
|
-
with self.
|
|
42
|
+
with self.telemetry_span("Send transfer manifest to globus"):
|
|
43
43
|
self.transfer_all_trial_frames(transfer_manifest)
|
|
44
44
|
|
|
45
45
|
@cached_property
|
|
@@ -53,7 +53,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
|
|
|
53
53
|
def run(self) -> None:
|
|
54
54
|
"""Run method for this task."""
|
|
55
55
|
for stokes_param in self.constants.stokes_params:
|
|
56
|
-
with self.
|
|
56
|
+
with self.telemetry_span(f"Get calibrated frames for stokes param {stokes_param}"):
|
|
57
57
|
tags = [Tag.frame(), Tag.calibrated(), Tag.stokes(stokes_param)]
|
|
58
58
|
calibrated_fits_objects = self.read(
|
|
59
59
|
tags=tags,
|
|
@@ -66,20 +66,6 @@ def tags_and_expected_generic_name() -> (list[str], str):
|
|
|
66
66
|
return tags, expected_base_name
|
|
67
67
|
|
|
68
68
|
|
|
69
|
-
def test_apm_spans(base_task):
|
|
70
|
-
"""
|
|
71
|
-
Given: A WorkflowTaskBase task
|
|
72
|
-
When: Calling the task-specific apm_steps with weird inputs
|
|
73
|
-
Then: Errors happen when they're supposed to and not when they're not supposed to
|
|
74
|
-
"""
|
|
75
|
-
with pytest.raises(RuntimeError):
|
|
76
|
-
with base_task.apm_processing_step("foo", span_type="bar"):
|
|
77
|
-
pass
|
|
78
|
-
|
|
79
|
-
with base_task.apm_task_step("foo", labels={"foo": "bar"}):
|
|
80
|
-
pass
|
|
81
|
-
|
|
82
|
-
|
|
83
69
|
def test_tags(base_task):
|
|
84
70
|
"""
|
|
85
71
|
Given: A WorkflowTaskBase task
|
{dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dkist-processing-common
|
|
3
|
-
Version: 11.
|
|
3
|
+
Version: 11.6.0
|
|
4
4
|
Summary: Common task classes used by the DKIST science data processing pipelines
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD-3-Clause
|
|
@@ -17,9 +17,9 @@ Requires-Dist: asdf<4.0.0,>=3.5.0
|
|
|
17
17
|
Requires-Dist: astropy>=7.0.0
|
|
18
18
|
Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
|
|
19
19
|
Requires-Dist: dkist-header-validator<6.0,>=5.0.0
|
|
20
|
-
Requires-Dist: dkist-processing-core==
|
|
20
|
+
Requires-Dist: dkist-processing-core==6.0.0
|
|
21
21
|
Requires-Dist: dkist-processing-pac<4.0,>=3.1
|
|
22
|
-
Requires-Dist: dkist-service-configuration<
|
|
22
|
+
Requires-Dist: dkist-service-configuration<5.0,>=4.1.7
|
|
23
23
|
Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
|
|
24
24
|
Requires-Dist: solar-wavelength-calibration<2.0,>=1.0
|
|
25
25
|
Requires-Dist: globus-sdk>=3.12.0
|
|
@@ -32,7 +32,7 @@ Requires-Dist: object-clerk==1.0.0
|
|
|
32
32
|
Requires-Dist: pandas>=1.4.2
|
|
33
33
|
Requires-Dist: pillow>=10.2.0
|
|
34
34
|
Requires-Dist: pydantic>=2.0
|
|
35
|
-
Requires-Dist: redis==4.
|
|
35
|
+
Requires-Dist: redis==6.4.0
|
|
36
36
|
Requires-Dist: requests>=2.23
|
|
37
37
|
Requires-Dist: scipy>=1.15.1
|
|
38
38
|
Requires-Dist: sunpy>=3.0.0
|
|
@@ -97,6 +97,84 @@ Deployment
|
|
|
97
97
|
|
|
98
98
|
dkist-processing-common is deployed to `PyPI <https://pypi.org/project/dkist-processing-common/>`_
|
|
99
99
|
|
|
100
|
+
Environment Variables
|
|
101
|
+
---------------------
|
|
102
|
+
|
|
103
|
+
.. list-table::
|
|
104
|
+
:widths: 10 90
|
|
105
|
+
:header-rows: 1
|
|
106
|
+
|
|
107
|
+
* - Variable
|
|
108
|
+
- Field Info
|
|
109
|
+
* - LOGURU_LEVEL
|
|
110
|
+
- annotation=str required=False default='INFO' alias_priority=2 validation_alias='LOGURU_LEVEL' description='Log level for the application'
|
|
111
|
+
* - MESH_CONFIG
|
|
112
|
+
- annotation=dict[str, MeshService] required=False default_factory=dict alias_priority=2 validation_alias='MESH_CONFIG' description='Service mesh configuration' examples=[{'upstream_service_name': {'mesh_address': 'localhost', 'mesh_port': 6742}}]
|
|
113
|
+
* - RETRY_CONFIG
|
|
114
|
+
- annotation=RetryConfig required=False default_factory=RetryConfig description='Retry configuration for the service'
|
|
115
|
+
* - OTEL_SERVICE_NAME
|
|
116
|
+
- annotation=str required=False default='unknown-service-name' alias_priority=2 validation_alias='OTEL_SERVICE_NAME' description='Service name for OpenTelemetry'
|
|
117
|
+
* - DKIST_SERVICE_VERSION
|
|
118
|
+
- annotation=str required=False default='unknown-service-version' alias_priority=2 validation_alias='DKIST_SERVICE_VERSION' description='Service version for OpenTelemetry'
|
|
119
|
+
* - NOMAD_ALLOC_ID
|
|
120
|
+
- annotation=str required=False default='unknown-allocation-id' alias_priority=2 validation_alias='NOMAD_ALLOC_ID' description='Nomad allocation ID for OpenTelemetry'
|
|
121
|
+
* - OTEL_EXPORTER_OTLP_TRACES_INSECURE
|
|
122
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP traces'
|
|
123
|
+
* - OTEL_EXPORTER_OTLP_METRICS_INSECURE
|
|
124
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP metrics'
|
|
125
|
+
* - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
|
|
126
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP traces endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
127
|
+
* - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT
|
|
128
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP metrics endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
129
|
+
* - OTEL_PYTHON_DISABLED_INSTRUMENTATIONS
|
|
130
|
+
- annotation=list[str] required=False default_factory=list description='List of instrumentations to disable. https://opentelemetry.io/docs/zero-code/python/configuration/' examples=[['pika', 'requests']]
|
|
131
|
+
* - OTEL_PYTHON_FASTAPI_EXCLUDED_URLS
|
|
132
|
+
- annotation=str required=False default='health' description='Comma separated list of URLs to exclude from OpenTelemetry instrumentation in FastAPI.' examples=['client/.*/info,healthcheck']
|
|
133
|
+
* - SYSTEM_METRIC_INSTRUMENTATION_CONFIG
|
|
134
|
+
- annotation=Union[dict[str, bool], NoneType] required=False default=None description='Configuration for system metric instrumentation. https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/system_metrics/system_metrics.html' examples=[{'system.memory.usage': ['used', 'free', 'cached'], 'system.cpu.time': ['idle', 'user', 'system', 'irq'], 'system.network.io': ['transmit', 'receive'], 'process.runtime.memory': ['rss', 'vms'], 'process.runtime.cpu.time': ['user', 'system'], 'process.runtime.context_switches': ['involuntary', 'voluntary']}]
|
|
135
|
+
* - ISB_USERNAME
|
|
136
|
+
- annotation=str required=False default='guest' description='Username for the interservice-bus.'
|
|
137
|
+
* - ISB_PASSWORD
|
|
138
|
+
- annotation=str required=False default='guest' description='Password for the interservice-bus.'
|
|
139
|
+
* - ISB_EXCHANGE
|
|
140
|
+
- annotation=str required=False default='master.direct.x' description='Exchange for the interservice-bus.'
|
|
141
|
+
* - ISB_QUEUE_TYPE
|
|
142
|
+
- annotation=str required=False default='classic' description='Queue type for the interservice-bus.' examples=['quorum', 'classic']
|
|
143
|
+
* - BUILD_VERSION
|
|
144
|
+
- annotation=str required=False default='dev' description='Fallback build version for workflow tasks.'
|
|
145
|
+
* - GQL_AUTH_TOKEN
|
|
146
|
+
- annotation=Union[str, NoneType] required=False default='dev' description='The auth token for the metadata-store-api.'
|
|
147
|
+
* - OBJECT_STORE_ACCESS_KEY
|
|
148
|
+
- annotation=Union[str, NoneType] required=False default=None description='The access key for the object store.'
|
|
149
|
+
* - OBJECT_STORE_SECRET_KEY
|
|
150
|
+
- annotation=Union[str, NoneType] required=False default=None description='The secret key for the object store.'
|
|
151
|
+
* - OBJECT_STORE_USE_SSL
|
|
152
|
+
- annotation=bool required=False default=False description='Whether to use SSL for the object store connection.'
|
|
153
|
+
* - MULTIPART_THRESHOLD
|
|
154
|
+
- annotation=Union[int, NoneType] required=False default=None description='Multipart threshold for the object store.'
|
|
155
|
+
* - S3_CLIENT_CONFIG
|
|
156
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 client configuration for the object store.'
|
|
157
|
+
* - S3_UPLOAD_CONFIG
|
|
158
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 upload configuration for the object store.'
|
|
159
|
+
* - S3_DOWNLOAD_CONFIG
|
|
160
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 download configuration for the object store.'
|
|
161
|
+
* - GLOBUS_TRANSPORT_PARAMS
|
|
162
|
+
- annotation=dict required=False default_factory=dict description='Globus transfer parameters.'
|
|
163
|
+
* - GLOBUS_CLIENT_ID
|
|
164
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client ID for inbound/outbound transfers.'
|
|
165
|
+
* - GLOBUS_CLIENT_SECRET
|
|
166
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client secret for inbound/outbound transfers.'
|
|
167
|
+
* - OBJECT_STORE_ENDPOINT
|
|
168
|
+
- annotation=Union[str, NoneType] required=False default=None description='Object store Globus Endpoint ID.'
|
|
169
|
+
* - SCRATCH_ENDPOINT
|
|
170
|
+
- annotation=Union[str, NoneType] required=False default=None description='Scratch Globus Endpoint ID.'
|
|
171
|
+
* - SCRATCH_BASE_PATH
|
|
172
|
+
- annotation=str required=False default='scratch/' description='Base path for scratch storage.'
|
|
173
|
+
* - SCRATCH_INVENTORY_DB_COUNT
|
|
174
|
+
- annotation=int required=False default=16 description='Number of databases in the scratch inventory (redis).'
|
|
175
|
+
* - DOCS_BASE_URL
|
|
176
|
+
- annotation=str required=False default='my_test_url' description='Base URL for the documentation site.'
|
|
177
|
+
|
|
100
178
|
Development
|
|
101
179
|
-----------
|
|
102
180
|
|
{dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/RECORD
RENAMED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
changelog/266.misc.rst,sha256=5yvMMAn3qAkY4NgLSgc-7dFzJdBTfWTu1J_TwCzd_eM,107
|
|
3
2
|
dkist_processing_common/__init__.py,sha256=GQ9EBnYhkOnt-qODclAoLS_g5YVhurxfg1tjVtI9rDI,320
|
|
4
|
-
dkist_processing_common/config.py,sha256=
|
|
3
|
+
dkist_processing_common/config.py,sha256=kkFm-Q6jGGoV3Zdf3PSfk3xNYd2vSH0J2R8dBBav3Og,5218
|
|
5
4
|
dkist_processing_common/manual.py,sha256=bIVVyLsbXMh-g_2L3kGROL-1TtJe0_XviHsp7Br31x8,7023
|
|
6
5
|
dkist_processing_common/_util/__init__.py,sha256=xf6JNpMKQgbhE2Jivymt-WO0WF6PpGt9rl604YpuTWk,92
|
|
7
6
|
dkist_processing_common/_util/constants.py,sha256=0_bWLsvusHD8GrTx4B6V7AieKAaFbN9crcAALaS8x5Q,3245
|
|
@@ -36,6 +35,7 @@ dkist_processing_common/models/parameters.py,sha256=9An3SxUEBI-oYHjICQ_q-IIScTfp
|
|
|
36
35
|
dkist_processing_common/models/quality.py,sha256=TmDVbvPbfl5CIIs1ioD5guLUoEOFTfiJESvDjLTLl5s,3981
|
|
37
36
|
dkist_processing_common/models/tags.py,sha256=0YqiDrismOSds_3XtFBb2dfv0gjMs6CgRv2dJKsSthI,12082
|
|
38
37
|
dkist_processing_common/models/task_name.py,sha256=uAl7qTK4Xx1nqPAhNAe5nAXqxwPwQzAq58YmoccX6xQ,567
|
|
38
|
+
dkist_processing_common/models/telemetry.py,sha256=XVcLNgHCZsP9L7oYiklyLUoqQtWt_xjEkuf70Kbudz4,839
|
|
39
39
|
dkist_processing_common/models/wavelength.py,sha256=4UhRVoNvCHZitXo5S1oRdewadbmGfmDK6wetMV06POA,967
|
|
40
40
|
dkist_processing_common/parsers/__init__.py,sha256=XJQzHtPb78F6-qXXKXjyztc0x-aHVlgv1C_l4dR88tI,67
|
|
41
41
|
dkist_processing_common/parsers/cs_step.py,sha256=rL2gdstKEV5aqdPDs3a5EuUaOT_6YXDJVqIPIVKSw8M,6450
|
|
@@ -54,17 +54,17 @@ dkist_processing_common/parsers/time.py,sha256=z9zHV3Fz6ebEDgiPhv6H-aAS8e-sSW3Ek
|
|
|
54
54
|
dkist_processing_common/parsers/unique_bud.py,sha256=IkS2zZkVzn3PRsYF2ksBkUxl_HJ4TxCqBKJUs1WdL54,3310
|
|
55
55
|
dkist_processing_common/parsers/wavelength.py,sha256=P5C9mG8DAKK3GB3vWNRBI5l7pAW68lJK-kw-4eqERuQ,612
|
|
56
56
|
dkist_processing_common/tasks/__init__.py,sha256=l23ctjNsKJbHbbqaZBMeOPaOtw0hmITEljI_JJ-CVsU,627
|
|
57
|
-
dkist_processing_common/tasks/assemble_movie.py,sha256=
|
|
58
|
-
dkist_processing_common/tasks/base.py,sha256=
|
|
59
|
-
dkist_processing_common/tasks/l1_output_data.py,sha256=
|
|
60
|
-
dkist_processing_common/tasks/output_data_base.py,sha256=
|
|
61
|
-
dkist_processing_common/tasks/parse_l0_input_data.py,sha256=
|
|
62
|
-
dkist_processing_common/tasks/quality_metrics.py,sha256=
|
|
63
|
-
dkist_processing_common/tasks/teardown.py,sha256=
|
|
64
|
-
dkist_processing_common/tasks/transfer_input_data.py,sha256=
|
|
65
|
-
dkist_processing_common/tasks/trial_catalog.py,sha256=
|
|
66
|
-
dkist_processing_common/tasks/trial_output_data.py,sha256=
|
|
67
|
-
dkist_processing_common/tasks/write_l1.py,sha256=
|
|
57
|
+
dkist_processing_common/tasks/assemble_movie.py,sha256=1ixDG-f4ODt0vywqVccG3aodLljVO5OGlvuMO9EEvcU,12767
|
|
58
|
+
dkist_processing_common/tasks/base.py,sha256=itAHCvzcodo-q8_AjpWoRaM86BlcjWDpCIiUP7uwmP0,13236
|
|
59
|
+
dkist_processing_common/tasks/l1_output_data.py,sha256=D4S3kH2uRKp1b8_xF2YiWO_mGj19UJS5wKQZ4OdatGs,10568
|
|
60
|
+
dkist_processing_common/tasks/output_data_base.py,sha256=r1Bu3FX5zTVj66GTMWtaV_NdhxjyjSm661Bt2Mxmfi4,3685
|
|
61
|
+
dkist_processing_common/tasks/parse_l0_input_data.py,sha256=Gf8tA0aKqKuhkuxJqtblLL9y_ARCKoMb1cztSt_2hSU,8343
|
|
62
|
+
dkist_processing_common/tasks/quality_metrics.py,sha256=cvGF6tJ8yAvxOvkeG3tWxYwL885BrFW5X3V7_MSzL-A,12481
|
|
63
|
+
dkist_processing_common/tasks/teardown.py,sha256=rwT9lWINVDF11-az_nx-Z5ykMTX_SJCchobpU6sErgk,2360
|
|
64
|
+
dkist_processing_common/tasks/transfer_input_data.py,sha256=DAYfS-B1o-iBT9MXU-TiJG4Hv05Z0c_JzPrnFgvnK9g,5786
|
|
65
|
+
dkist_processing_common/tasks/trial_catalog.py,sha256=iAaMT_oLnupA1O3xAtqVjsqRY5f_hyvMps-fXg6KlHU,8729
|
|
66
|
+
dkist_processing_common/tasks/trial_output_data.py,sha256=CPMXXODvN5RTcu9bTF8v6AXciCl212EWP6qTiARvUNk,6837
|
|
67
|
+
dkist_processing_common/tasks/write_l1.py,sha256=Xy834RTp3F95kLcW4ba5gfHMUocfZd82ZQQKnvQcP2M,23204
|
|
68
68
|
dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
|
|
69
69
|
dkist_processing_common/tasks/mixin/globus.py,sha256=9ey_UCacqCfmxYZSgm6VDefdlm7dkNloC8G5DeVub8s,6592
|
|
70
70
|
dkist_processing_common/tasks/mixin/interservice_bus.py,sha256=M6R922l7gJSmmU_vswUXxy-c5DWNrIRjQu9H9CSgGfU,1081
|
|
@@ -78,7 +78,7 @@ dkist_processing_common/tests/conftest.py,sha256=Tm-Yq956EAafpDtu1d7JjdVY0Unp9e4
|
|
|
78
78
|
dkist_processing_common/tests/mock_metadata_store.py,sha256=fbCvSk1-s0ojN6l538RWodPW7dx6k4eXqipemnHKO0Y,8248
|
|
79
79
|
dkist_processing_common/tests/test_assemble_movie.py,sha256=dyVhowxB-Kc6GuxlDs74UrPtK9fwdUL7y5haA3Bidz0,4065
|
|
80
80
|
dkist_processing_common/tests/test_assemble_quality.py,sha256=-F22jMY6mPy65VZ1TZY2r1vsxMXOPmZHArGx70OD3BA,17832
|
|
81
|
-
dkist_processing_common/tests/test_base.py,sha256=
|
|
81
|
+
dkist_processing_common/tests/test_base.py,sha256=gsyBG2R6Ufx7CzbHeGMagUwM9yCfpN4gCSZ6-aH2q48,6643
|
|
82
82
|
dkist_processing_common/tests/test_codecs.py,sha256=XuvG1sG8DECMPmxtDEi98TxlvTSAy0vrtUUFLrwnHlA,22173
|
|
83
83
|
dkist_processing_common/tests/test_constants.py,sha256=I_KcJs7ScCn53GYhEO6qjWrrnfZuyC1IVYOy87Pjlg4,6565
|
|
84
84
|
dkist_processing_common/tests/test_cs_step.py,sha256=RA0QD3D8eaL3YSOL_gIJ9wkngy14RQ2jbD-05KAziW4,2408
|
|
@@ -117,7 +117,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
|
|
|
117
117
|
docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
|
|
118
118
|
docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
|
|
119
119
|
licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
|
|
120
|
-
dkist_processing_common-11.
|
|
121
|
-
dkist_processing_common-11.
|
|
122
|
-
dkist_processing_common-11.
|
|
123
|
-
dkist_processing_common-11.
|
|
120
|
+
dkist_processing_common-11.6.0.dist-info/METADATA,sha256=AQxL1zjp7254K1ZpFGOic8SHuO6bbexvQQ0KwHQGHOY,13313
|
|
121
|
+
dkist_processing_common-11.6.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
122
|
+
dkist_processing_common-11.6.0.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
|
|
123
|
+
dkist_processing_common-11.6.0.dist-info/RECORD,,
|
changelog/266.misc.rst
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
Update dkist-processing-core to 5.2.0 which includes upgrades to airflow 2.11.0 and requires Python 3.12+.
|
{dkist_processing_common-11.5.0rc1.dist-info → dkist_processing_common-11.6.0.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|