dkist-processing-common 11.5.0rc1__py3-none-any.whl → 11.6.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
changelog/268.misc.rst ADDED
@@ -0,0 +1 @@
1
+ Integrate dkist-processing-core 6.0.0 which brings a swap of Elastic APM to OpenTelemetry for metrics and tracing.
@@ -3,48 +3,65 @@
3
3
  from dkist_processing_core.config import DKISTProcessingCoreConfiguration
4
4
  from dkist_service_configuration.settings import DEFAULT_MESH_SERVICE
5
5
  from dkist_service_configuration.settings import MeshService
6
- from pydantic import BaseModel
7
6
  from pydantic import Field
8
7
  from talus import ConnectionRetryerFactory
9
8
  from talus import ConsumerConnectionParameterFactory
10
9
  from talus import ProducerConnectionParameterFactory
11
10
 
12
11
 
13
- class RetryConfig(BaseModel):
14
- """Retry metadata model."""
15
-
16
- retry_delay: int = 1
17
- retry_backoff: int = 2
18
- retry_jitter: tuple[int, int] = (1, 10)
19
- retry_max_delay: int = 300
20
- retry_tries: int = -1
21
-
22
-
23
12
  class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
24
13
  """Common configurations."""
25
14
 
26
- retry_config: RetryConfig = Field(default_factory=RetryConfig)
27
15
  # metadata-store-api
28
- gql_auth_token: str | None = None
16
+ gql_auth_token: str | None = Field(
17
+ default="dev", description="The auth token for the metadata-store-api."
18
+ )
29
19
  # object-store-api
30
- object_store_access_key: str | None = None
31
- object_store_secret_key: str | None = None
32
- object_store_use_ssl: bool = False
33
- multipart_threshold: int | None = None
34
- s3_client_config: dict | None = None
35
- s3_upload_config: dict | None = None
36
- s3_download_config: dict | None = None
20
+ object_store_access_key: str | None = Field(
21
+ default=None, description="The access key for the object store."
22
+ )
23
+ object_store_secret_key: str | None = Field(
24
+ default=None, description="The secret key for the object store."
25
+ )
26
+ object_store_use_ssl: bool = Field(
27
+ default=False, description="Whether to use SSL for the object store connection."
28
+ )
29
+ # start object-clerk library
30
+ multipart_threshold: int | None = Field(
31
+ default=None, description="Multipart threshold for the object store."
32
+ )
33
+ s3_client_config: dict | None = Field(
34
+ default=None, description="S3 client configuration for the object store."
35
+ )
36
+ s3_upload_config: dict | None = Field(
37
+ default=None, description="S3 upload configuration for the object store."
38
+ )
39
+ s3_download_config: dict | None = Field(
40
+ default=None, description="S3 download configuration for the object store."
41
+ )
37
42
  # globus
38
- globus_transport_params: dict = Field(default_factory=dict)
39
- globus_client_id: str | None = None
40
- globus_client_secret: str | None = None
41
- object_store_endpoint: str | None = None
42
- scratch_endpoint: str | None = None
43
+ globus_transport_params: dict = Field(
44
+ default_factory=dict, description="Globus transfer parameters."
45
+ )
46
+ globus_client_id: str | None = Field(
47
+ default=None, description="Globus client ID for inbound/outbound transfers."
48
+ )
49
+ globus_client_secret: str | None = Field(
50
+ default=None, description="Globus client secret for inbound/outbound transfers."
51
+ )
52
+ object_store_endpoint: str | None = Field(
53
+ default=None, description="Object store Globus Endpoint ID."
54
+ )
55
+ scratch_endpoint: str | None = Field(default=None, description="Scratch Globus Endpoint ID.")
43
56
  # scratch
44
- scratch_base_path: str = Field(default="scratch/")
45
- scratch_inventory_db_count: int = 16
57
+ scratch_base_path: str = Field(default="scratch/", description="Base path for scratch storage.")
58
+ scratch_inventory_db_count: int = Field(
59
+ default=16, description="Number of databases in the scratch inventory (redis)."
60
+ )
46
61
  # docs
47
- docs_base_url: str = Field(default="my_test_url")
62
+ docs_base_url: str = Field(
63
+ default="my_test_url", description="Base URL for the documentation site."
64
+ )
48
65
 
49
66
  @property
50
67
  def metadata_store_api_base(self) -> str:
@@ -106,4 +123,3 @@ class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
106
123
 
107
124
 
108
125
  common_configurations = DKISTProcessingCommonConfiguration()
109
- common_configurations.log_configurations()
@@ -0,0 +1,21 @@
1
+ """Models to support telemetry data."""
2
+
3
+ from pydantic import BaseModel
4
+
5
+
6
+ class Progress(BaseModel, validate_assignment=True):
7
+ """Container for tracking progress for a metering instrument."""
8
+
9
+ current: int = 0
10
+ total: int = 0
11
+
12
+ def increment(self, step: int = 1) -> None:
13
+ """Increment the current progress by the given step."""
14
+ self.current += step
15
+
16
+ @property
17
+ def percent_complete(self) -> float:
18
+ """Return the percent complete as a float between 0 and 100."""
19
+ if self.total > 0:
20
+ return (self.current / self.total) * 100
21
+ return 0.0
@@ -163,7 +163,7 @@ class AssembleMovie(WorkflowTaskBase, ABC):
163
163
  relative_movie_path = f"{self.constants.dataset_id}_browse_movie.mp4"
164
164
  absolute_movie_path = str(self.scratch.absolute_path(relative_movie_path))
165
165
 
166
- with self.apm_processing_step("Assembling movie frames"):
166
+ with self.telemetry_span("Assembling movie frames"):
167
167
  clip.write_videofile(absolute_movie_path, fps=self.FPS, codec="libx264", audio=False)
168
168
 
169
169
  self.tag(path=absolute_movie_path, tags=[Tag.movie(), Tag.output()])
@@ -10,8 +10,14 @@ from typing import Any
10
10
  from typing import Generator
11
11
  from typing import Iterable
12
12
  from typing import Type
13
+ from typing import TypeAlias
13
14
 
14
15
  from dkist_processing_core import TaskBase
16
+ from opentelemetry.metrics import CallbackOptions
17
+ from opentelemetry.metrics import Counter
18
+ from opentelemetry.metrics import ObservableGauge
19
+ from opentelemetry.metrics import Observation
20
+ from pydantic import BaseModel
15
21
 
16
22
  from dkist_processing_common._util.scratch import WorkflowFileSystem
17
23
  from dkist_processing_common._util.tags import TagDB
@@ -27,7 +33,7 @@ __all__ = ["WorkflowTaskBase", "tag_type_hint"]
27
33
 
28
34
  logger = logging.getLogger(__name__)
29
35
 
30
- tag_type_hint = Iterable[str] | str
36
+ tag_type_hint: TypeAlias = Iterable[str] | str
31
37
 
32
38
 
33
39
  class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
@@ -66,7 +72,6 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
66
72
  workflow_name=workflow_name,
67
73
  workflow_version=workflow_version,
68
74
  )
69
- self.task_name = self.__class__.__name__
70
75
  self.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, task_name=self.task_name)
71
76
  self.constants = self.constants_model_class(
72
77
  recipe_run_id=recipe_run_id, task_name=self.task_name
@@ -76,50 +81,16 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
76
81
  recipe_run_id=recipe_run_id, task_name=self.task_name, namespace="counter"
77
82
  )
78
83
 
79
- # These apm* functions provide tagged spans for APM bliss
80
- def apm_type_base(
81
- self,
82
- name: str,
83
- *args,
84
- arg_span_type: str = None,
85
- arg_labels: dict[str, str] = None,
86
- **kwargs,
87
- ):
88
- """Groom inputs to apm_step to handle various kwarg collisions."""
89
- if "span_type" in kwargs:
90
- raise RuntimeError(
91
- f"Cannot specify 'span_type' {kwargs['span_type']} in step that forces is it to be {arg_span_type}"
92
- )
93
-
94
- if "labels" in kwargs:
95
- arg_labels.update(kwargs["labels"])
96
- del kwargs["labels"]
97
- logger.info(
98
- f"Recording APM span: {name = }, {arg_span_type = }, {arg_labels = }, "
99
- f"recipe_run_id = {self.recipe_run_id}"
100
- )
101
- return self.apm_step(name, *args, span_type=arg_span_type, labels=arg_labels, **kwargs)
102
-
103
- def apm_task_step(self, name: str, *args, **kwargs):
104
- """Span for management/organizational/info type stuff."""
105
- return self.apm_type_base(
106
- name, *args, arg_span_type="code.task", arg_labels={"type": "task"}, **kwargs
107
- )
108
-
109
- def apm_processing_step(self, name: str, *args, **kwargs):
110
- """Span for computations."""
111
- return self.apm_type_base(
112
- name,
113
- *args,
114
- arg_span_type="code.processing",
115
- arg_labels={"type": "processing"},
116
- **kwargs,
84
+ # meter instruments
85
+ self.read_counter: Counter = self.meter.create_counter(
86
+ name=self.format_metric_name("tasks.reads"),
87
+ unit="1",
88
+ description="The number of reads executed in the processing stack.",
117
89
  )
118
-
119
- def apm_writing_step(self, name: str, *args, **kwargs):
120
- """Span for writing to disk."""
121
- return self.apm_type_base(
122
- name, *args, arg_span_type="code.writing", arg_labels={"type": "writing"}, **kwargs
90
+ self.write_counter: Counter = self.meter.create_counter(
91
+ name=self.format_metric_name("tasks.writes"),
92
+ unit="1",
93
+ description="The number of writes executed in the processing stack.",
123
94
  )
124
95
 
125
96
  @property
@@ -161,7 +132,7 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
161
132
  """Execute any pre-task setup required."""
162
133
  super().pre_run()
163
134
  if self.record_provenance or self.is_task_manual:
164
- with self.apm_task_step("Record Provenance"):
135
+ with self.telemetry_span("Record Provenance"):
165
136
  self._record_provenance()
166
137
 
167
138
  def read(
@@ -183,7 +154,9 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
183
154
  **decoder_kwargs
184
155
  Additional arguments to pass to the `decoder` function.
185
156
  """
186
- return (decoder(p, **decoder_kwargs) for p in self.scratch.find_all(tags=tags))
157
+ for p in self.scratch.find_all(tags=tags):
158
+ self.read_counter.add(amount=1, attributes=self.base_telemetry_attributes)
159
+ yield decoder(p, **decoder_kwargs)
187
160
 
188
161
  def write(
189
162
  self,
@@ -221,6 +194,7 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
221
194
  -------
222
195
  The path for the written file
223
196
  """
197
+ self.write_counter.add(amount=1, attributes=self.base_telemetry_attributes)
224
198
  file_obj = encoder(data, **encoder_kwargs)
225
199
  if isinstance(tags, str):
226
200
  tags = [tags]
@@ -372,11 +346,11 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
372
346
  Filename Counter: not rolled back but its purpose of preventing file name collisions is not impacted
373
347
  """
374
348
  super().rollback()
375
- with self.apm_writing_step("Rollback Scratch"):
349
+ with self.telemetry_span("Rollback Scratch"):
376
350
  self.scratch.rollback()
377
- with self.apm_writing_step("Rollback Constants"):
351
+ with self.telemetry_span("Rollback Constants"):
378
352
  self.constants._rollback()
379
- with self.apm_task_step("Change Recipe Run to Inprogress"):
353
+ with self.telemetry_span("Change Recipe Run to Inprogress"):
380
354
  self.metadata_store_change_recipe_run_to_inprogress()
381
355
 
382
356
  def __exit__(self, exc_type, exc_val, exc_tb):
@@ -54,11 +54,11 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
54
54
 
55
55
  def transfer_objects(self):
56
56
  """Transfer movie and L1 output frames."""
57
- with self.apm_task_step("Upload movie"):
57
+ with self.telemetry_span("Upload movie"):
58
58
  # Movie needs to be transferred separately as the movie headers need to go with it
59
59
  self.transfer_movie()
60
60
 
61
- with self.apm_task_step("Upload science frames"):
61
+ with self.telemetry_span("Upload science frames"):
62
62
  self.transfer_output_frames()
63
63
 
64
64
  def transfer_output_frames(self):
@@ -120,10 +120,10 @@ class AssembleQualityData(L1OutputDataBase, QualityMixin):
120
120
 
121
121
  def run(self):
122
122
  """Run method for the task."""
123
- with self.apm_processing_step("Assembling quality data"):
123
+ with self.telemetry_span("Assembling quality data"):
124
124
  quality_data = self.quality_assemble_data(polcal_label_list=self.polcal_label_list)
125
125
 
126
- with self.apm_writing_step(
126
+ with self.telemetry_span(
127
127
  f"Saving quality data with {len(quality_data)} metrics to the file system"
128
128
  ):
129
129
  self.write(
@@ -146,7 +146,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
146
146
 
147
147
  def run(self) -> None:
148
148
  """Run method for this task."""
149
- with self.apm_writing_step(f"Storing quality data to metadata store"):
149
+ with self.telemetry_span(f"Storing quality data to metadata store"):
150
150
  # each quality_data file is a list - this will combine the elements of multiple lists into a single list
151
151
  quality_data = list(
152
152
  chain.from_iterable(
@@ -156,7 +156,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
156
156
  self.metadata_store_add_quality_data(
157
157
  dataset_id=self.constants.dataset_id, quality_data=quality_data
158
158
  )
159
- with self.apm_processing_step("Count Expected Outputs"):
159
+ with self.telemetry_span("Count Expected Outputs"):
160
160
  dataset_id = self.constants.dataset_id
161
161
  expected_object_count = self.count(tags=Tag.output())
162
162
  if quality_data:
@@ -165,7 +165,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
165
165
  f"Adding Dataset Receipt Account: "
166
166
  f"{dataset_id=}, {expected_object_count=}, recipe_run_id={self.recipe_run_id}"
167
167
  )
168
- with self.apm_task_step(
168
+ with self.telemetry_span(
169
169
  f"Add Dataset Receipt Account: {dataset_id = }, {expected_object_count = }"
170
170
  ):
171
171
  self.metadata_store_add_dataset_receipt_account(
@@ -245,10 +245,10 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
245
245
 
246
246
  def run(self) -> None:
247
247
  """Run method for this task."""
248
- with self.apm_task_step("Gather output data"):
248
+ with self.telemetry_span("Gather output data"):
249
249
  frames = self.read(tags=self.output_frame_tags)
250
250
  movies = self.read(tags=[Tag.output(), Tag.movie()])
251
- with self.apm_task_step("Create message objects"):
251
+ with self.telemetry_span("Create message objects"):
252
252
  messages = []
253
253
  messages += self.frame_messages(paths=frames)
254
254
  frame_message_count = len(messages)
@@ -257,7 +257,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
257
257
  dataset_has_quality_data = self.dataset_has_quality_data
258
258
  if dataset_has_quality_data:
259
259
  messages.append(self.quality_report_message)
260
- with self.apm_task_step(
260
+ with self.telemetry_span(
261
261
  f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
262
262
  ):
263
263
  self.interservice_bus_publish(messages=messages)
@@ -58,10 +58,10 @@ class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
58
58
 
59
59
  def run(self) -> None:
60
60
  """Transfer the data and cleanup any folders."""
61
- with self.apm_task_step("Transfer objects"):
61
+ with self.telemetry_span("Transfer objects"):
62
62
  self.transfer_objects()
63
63
 
64
- with self.apm_task_step("Remove folder objects"):
64
+ with self.telemetry_span("Remove folder objects"):
65
65
  self.remove_folder_objects()
66
66
 
67
67
  @abstractmethod
@@ -24,8 +24,13 @@ at a different frame to get information (constants).
24
24
  import logging
25
25
  from abc import ABC
26
26
  from abc import abstractmethod
27
+ from typing import Generator
27
28
  from typing import TypeVar
28
29
 
30
+ from opentelemetry.metrics import CallbackOptions
31
+ from opentelemetry.metrics import ObservableGauge
32
+ from opentelemetry.metrics import Observation
33
+
29
34
  from dkist_processing_common.codecs.fits import fits_access_decoder
30
35
  from dkist_processing_common.models.constants import BudName
31
36
  from dkist_processing_common.models.fits_access import MetadataKey
@@ -34,6 +39,7 @@ from dkist_processing_common.models.flower_pot import Stem
34
39
  from dkist_processing_common.models.flower_pot import Thorn
35
40
  from dkist_processing_common.models.tags import Tag
36
41
  from dkist_processing_common.models.task_name import TaskName
42
+ from dkist_processing_common.models.telemetry import Progress
37
43
  from dkist_processing_common.parsers.experiment_id_bud import ContributingExperimentIdsBud
38
44
  from dkist_processing_common.parsers.experiment_id_bud import ExperimentIdBud
39
45
  from dkist_processing_common.parsers.proposal_id_bud import ContributingProposalIdsBud
@@ -86,6 +92,33 @@ def default_tag_flower_factory() -> list[S]:
86
92
  class ParseDataBase(WorkflowTaskBase, ABC):
87
93
  """Base class for tasks which need to parse some already tagged data and set constants and/or add additional tags to them."""
88
94
 
95
+ def __init__(
96
+ self,
97
+ recipe_run_id: int,
98
+ workflow_name: str,
99
+ workflow_version: str,
100
+ ):
101
+ super().__init__(
102
+ recipe_run_id=recipe_run_id,
103
+ workflow_name=workflow_name,
104
+ workflow_version=workflow_version,
105
+ )
106
+ # task specific metering instrumentation
107
+ self.flower_pot_progress = Progress()
108
+ self.flower_pot_progress_gauge: ObservableGauge = self.meter.create_observable_gauge(
109
+ name=self.format_metric_name("tasks.flower.pot.progress"),
110
+ description="The progress of loading input frames for parsing as a percentage.",
111
+ callbacks=[lambda options: self.flower_pot_run_progress(options)],
112
+ )
113
+
114
+ def flower_pot_run_progress(
115
+ self, options: CallbackOptions
116
+ ) -> Generator[Observation, None, None]:
117
+ """Observe the progress of the current task as a percentage."""
118
+ yield Observation(
119
+ self.flower_pot_progress.percent_complete, attributes=self.base_telemetry_attributes
120
+ )
121
+
89
122
  @property
90
123
  @abstractmethod
91
124
  def constant_buds(self) -> list[S]:
@@ -103,21 +136,22 @@ class ParseDataBase(WorkflowTaskBase, ABC):
103
136
 
104
137
  @property
105
138
  @abstractmethod
106
- def tags_for_input_frames(self) -> list[Tag]:
139
+ def tags_for_input_frames(self) -> list[str]:
107
140
  """Define the tags for the data that will be parsed."""
108
141
 
109
142
  def run(self) -> None:
110
143
  """Run method for this task."""
111
- with self.apm_task_step("Check that input frames exist"):
144
+ with self.telemetry_span("Check that input frames exist"):
112
145
  self.check_input_frames()
113
146
 
114
- with self.apm_task_step("Ingest all input files"):
147
+ with self.telemetry_span("Ingest all input files"):
148
+ self.flower_pot_progress.total = self.scratch.count_all(tags=self.tags_for_input_frames)
115
149
  tag_pot, constant_pot = self.make_flower_pots()
116
150
 
117
- with self.apm_task_step("Update constants"):
151
+ with self.telemetry_span("Update constants"):
118
152
  self.update_constants(constant_pot)
119
153
 
120
- with self.apm_task_step("Tag files"):
154
+ with self.telemetry_span("Tag files"):
121
155
  self.tag_petals(tag_pot)
122
156
 
123
157
  def make_flower_pots(self) -> tuple[FlowerPot, FlowerPot]:
@@ -128,6 +162,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
128
162
  constant_pot.stems += self.constant_buds
129
163
 
130
164
  for fits_obj in self.input_frames:
165
+ self.flower_pot_progress.increment()
131
166
  filepath = fits_obj.name
132
167
  tag_pot.add_dirt(filepath, fits_obj)
133
168
  constant_pot.add_dirt(filepath, fits_obj)
@@ -161,7 +196,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
161
196
  None
162
197
  """
163
198
  for stem in constant_pot:
164
- with self.apm_processing_step(f"Setting value of constant {stem.stem_name}"):
199
+ with self.telemetry_span(f"Setting value of constant {stem.stem_name}"):
165
200
  if len(stem.petals) == 0:
166
201
  # There are no petals so nothing to do
167
202
  continue
@@ -186,7 +221,7 @@ class ParseDataBase(WorkflowTaskBase, ABC):
186
221
  None
187
222
  """
188
223
  for stem in tag_pot:
189
- with self.apm_processing_step(f"Applying {stem.stem_name} tag to files"):
224
+ with self.telemetry_span(f"Applying {stem.stem_name} tag to files"):
190
225
  for petal in stem.petals:
191
226
  tag = Tag.format_tag(stem.stem_name, petal.value)
192
227
  for path in petal.keys:
@@ -100,10 +100,10 @@ class QualityL0Metrics(WorkflowTaskBase, QualityMixin):
100
100
  `quality_task_types` properties, respectively.
101
101
  """
102
102
  modstate_list = self.modstate_list if self.modstate_list is not None else [None]
103
- with self.apm_task_step("Computing L0 Quality Metrics"):
103
+ with self.telemetry_span("Computing L0 Quality Metrics"):
104
104
  quality_data_list = []
105
105
  for task_type in self.quality_task_types:
106
- with self.apm_processing_step(f"Working on {task_type = }"):
106
+ with self.telemetry_span(f"Working on {task_type = }"):
107
107
  for modstate in modstate_list:
108
108
  paths = self.get_paths_for_modstate_and_task(modstate, task_type)
109
109
  quality_data = self.calculate_l0_metrics(
@@ -112,7 +112,7 @@ class QualityL0Metrics(WorkflowTaskBase, QualityMixin):
112
112
  quality_data.modstate = modstate
113
113
  quality_data_list.append(quality_data)
114
114
 
115
- with self.apm_writing_step("Saving metrics to disk"):
115
+ with self.telemetry_span("Saving metrics to disk"):
116
116
  for quality_data in quality_data_list:
117
117
  if quality_data.has_values:
118
118
  self.save_quality_data(quality_data, modstate=quality_data.modstate)
@@ -300,17 +300,17 @@ class QualityL1Metrics(WorkflowTaskBase, QualityMixin):
300
300
  ),
301
301
  ]
302
302
 
303
- with self.apm_task_step("Reading L1 frames"):
303
+ with self.telemetry_span("Reading L1 frames"):
304
304
  paths = list(self.read(tags=[Tag.calibrated(), Tag.frame()]))
305
305
 
306
- with self.apm_task_step("Calculating L1 quality metrics"):
306
+ with self.telemetry_span("Calculating L1 quality metrics"):
307
307
  for metric in metrics:
308
- with self.apm_processing_step(f"Calculating L1 metric {metric.value_source}"):
308
+ with self.telemetry_span(f"Calculating L1 metric {metric.value_source}"):
309
309
  for path in paths:
310
310
  frame = L1QualityFitsAccess.from_path(path)
311
311
  metric.append_value(frame=frame)
312
312
 
313
- with self.apm_task_step("Sending lists for storage"):
313
+ with self.telemetry_span("Sending lists for storage"):
314
314
  for metric in metrics:
315
315
  if metric.has_values:
316
316
  metric.store_metric()
@@ -26,11 +26,11 @@ class TeardownBase(WorkflowTaskBase, ABC):
26
26
 
27
27
  def run(self) -> None:
28
28
  """Run method for Teardown class."""
29
- with self.apm_task_step("Change recipe run status"):
29
+ with self.telemetry_span("Change recipe run status"):
30
30
  self.change_recipe_run_status_to_success()
31
31
 
32
32
  if not self.teardown_enabled:
33
- with self.apm_task_step(f"Skip Teardown"):
33
+ with self.telemetry_span(f"Skip Teardown"):
34
34
  return
35
35
 
36
36
  logger.info(f"Removing data and tags for recipe run {self.recipe_run_id}")
@@ -43,13 +43,13 @@ class TeardownBase(WorkflowTaskBase, ABC):
43
43
 
44
44
  def teardown(self):
45
45
  """Purge all constants and files/tags in scratch."""
46
- with self.apm_task_step("Remove Data and Tags"):
46
+ with self.telemetry_span("Remove Data and Tags"):
47
47
  self.scratch.purge()
48
48
 
49
- with self.apm_task_step("Remove File Counters"):
49
+ with self.telemetry_span("Remove File Counters"):
50
50
  self.filename_counter.purge()
51
51
 
52
- with self.apm_task_step("Remove Constants"):
52
+ with self.telemetry_span("Remove Constants"):
53
53
  self.constants._purge()
54
54
 
55
55
 
@@ -81,13 +81,13 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin):
81
81
 
82
82
  def run(self) -> None:
83
83
  """Execute the data transfer."""
84
- with self.apm_task_step("Change Status to InProgress"):
84
+ with self.telemetry_span("Change Status to InProgress"):
85
85
  self.metadata_store_change_recipe_run_to_inprogress()
86
86
 
87
- with self.apm_task_step("Download Input Dataset Documents"):
87
+ with self.telemetry_span("Download Input Dataset Documents"):
88
88
  self.download_input_dataset()
89
89
 
90
- with self.apm_task_step("Build Input Dataset Transfer List"):
90
+ with self.telemetry_span("Build Input Dataset Transfer List"):
91
91
  observe_transfer_objects = self.build_transfer_list(
92
92
  doc_tag=Tag.input_dataset_observe_frames()
93
93
  )
@@ -103,13 +103,13 @@ class TransferL0Data(WorkflowTaskBase, GlobusMixin):
103
103
  if len(observe_transfer_objects + calibration_transfer_objects) == 0:
104
104
  raise ValueError("No input dataset frames found to transfer")
105
105
 
106
- with self.apm_task_step("Transfer Input Frames and Parameter Files via Globus"):
106
+ with self.telemetry_span("Transfer Input Frames and Parameter Files via Globus"):
107
107
  self.globus_transfer_object_store_to_scratch(
108
108
  transfer_items=self.format_transfer_items(input_dataset_objects=transfer_objects),
109
109
  label=f"Transfer Input Objects for Recipe Run {self.recipe_run_id}",
110
110
  )
111
111
 
112
- with self.apm_processing_step("Tag Input Frames and Parameter Files"):
112
+ with self.telemetry_span("Tag Input Frames and Parameter Files"):
113
113
  self.tag_transfer_objects(input_dataset_objects=transfer_objects)
114
114
 
115
115
  def rollback(self):
@@ -97,13 +97,13 @@ class CreateTrialDatasetInventory(OutputDataBase):
97
97
 
98
98
  def run(self) -> None:
99
99
  """Generate a json file simulating the dataset inventory record that would be produced when cataloging the dataset."""
100
- with self.apm_task_step("Retrieve output frame headers"):
100
+ with self.telemetry_span("Retrieve output frame headers"):
101
101
  json_headers = list(self.frame_inventories)
102
- with self.apm_processing_step("Generate dataset inventory"):
102
+ with self.telemetry_span("Generate dataset inventory"):
103
103
  inventory: dict = generate_inventory_from_frame_inventory(
104
104
  bucket=self.destination_bucket, json_headers=json_headers
105
105
  )
106
- with self.apm_writing_step("Save dataset inventory file"):
106
+ with self.telemetry_span("Save dataset inventory file"):
107
107
  self.write(
108
108
  inventory,
109
109
  tags=[Tag.output(), Tag.dataset_inventory()],
@@ -138,7 +138,7 @@ class CreateTrialAsdf(OutputDataBase):
138
138
 
139
139
  def run(self) -> None:
140
140
  """Generate an ASDF file simulating the ASDF file that would be produced when cataloging the dataset."""
141
- with self.apm_processing_step("Generate ASDF tree"):
141
+ with self.telemetry_span("Generate ASDF tree"):
142
142
  tree = asdf_tree_from_filenames(
143
143
  filenames=self.absolute_output_frame_paths,
144
144
  hdu=1, # compressed
@@ -156,7 +156,7 @@ class CreateTrialAsdf(OutputDataBase):
156
156
  },
157
157
  )
158
158
  ]
159
- with self.apm_writing_step("Save ASDF file"):
159
+ with self.telemetry_span("Save ASDF file"):
160
160
  with make_asdf_file_object(tree, extra_history=trial_history) as asdf_obj:
161
161
  self.write(
162
162
  asdf_obj,
@@ -192,7 +192,7 @@ class CreateTrialQualityReport(OutputDataBase):
192
192
 
193
193
  def create_trial_quality_report(self) -> None:
194
194
  """Generate a trial quality report in pdf format and save to the file system for future upload."""
195
- with self.apm_processing_step(f"Building the trial quality report"):
195
+ with self.telemetry_span(f"Building the trial quality report"):
196
196
  # each quality_data file is a list - this will combine the elements of multiple lists into a single list
197
197
  quality_data = list(
198
198
  chain.from_iterable(
@@ -203,7 +203,7 @@ class CreateTrialQualityReport(OutputDataBase):
203
203
  report_data=quality_data, dataset_id=self.constants.dataset_id
204
204
  )
205
205
 
206
- with self.apm_writing_step(f"Saving the trial quality report to the file system"):
206
+ with self.telemetry_span(f"Saving the trial quality report to the file system"):
207
207
  self.write(
208
208
  quality_report,
209
209
  tags=[Tag.output(), Tag.quality_report()],
@@ -36,10 +36,10 @@ class TransferTrialData(TransferDataBase, GlobusMixin):
36
36
 
37
37
  def transfer_objects(self) -> None:
38
38
  """Collect transfer items and send them to Globus for transfer."""
39
- with self.apm_task_step("Build transfer list"):
39
+ with self.telemetry_span("Build transfer list"):
40
40
  transfer_manifest = self.build_transfer_list()
41
41
 
42
- with self.apm_task_step("Send transfer manifest to globus"):
42
+ with self.telemetry_span("Send transfer manifest to globus"):
43
43
  self.transfer_all_trial_frames(transfer_manifest)
44
44
 
45
45
  @cached_property
@@ -53,7 +53,7 @@ class WriteL1Frame(WorkflowTaskBase, MetadataStoreMixin, ABC):
53
53
  def run(self) -> None:
54
54
  """Run method for this task."""
55
55
  for stokes_param in self.constants.stokes_params:
56
- with self.apm_task_step(f"Get calibrated frames for stokes param {stokes_param}"):
56
+ with self.telemetry_span(f"Get calibrated frames for stokes param {stokes_param}"):
57
57
  tags = [Tag.frame(), Tag.calibrated(), Tag.stokes(stokes_param)]
58
58
  calibrated_fits_objects = self.read(
59
59
  tags=tags,
@@ -66,20 +66,6 @@ def tags_and_expected_generic_name() -> (list[str], str):
66
66
  return tags, expected_base_name
67
67
 
68
68
 
69
- def test_apm_spans(base_task):
70
- """
71
- Given: A WorkflowTaskBase task
72
- When: Calling the task-specific apm_steps with weird inputs
73
- Then: Errors happen when they're supposed to and not when they're not supposed to
74
- """
75
- with pytest.raises(RuntimeError):
76
- with base_task.apm_processing_step("foo", span_type="bar"):
77
- pass
78
-
79
- with base_task.apm_task_step("foo", labels={"foo": "bar"}):
80
- pass
81
-
82
-
83
69
  def test_tags(base_task):
84
70
  """
85
71
  Given: A WorkflowTaskBase task
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-common
3
- Version: 11.5.0rc1
3
+ Version: 11.6.0rc1
4
4
  Summary: Common task classes used by the DKIST science data processing pipelines
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -17,9 +17,9 @@ Requires-Dist: asdf<4.0.0,>=3.5.0
17
17
  Requires-Dist: astropy>=7.0.0
18
18
  Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
19
19
  Requires-Dist: dkist-header-validator<6.0,>=5.0.0
20
- Requires-Dist: dkist-processing-core==5.2.0rc1
20
+ Requires-Dist: dkist-processing-core==6.0.0rc3
21
21
  Requires-Dist: dkist-processing-pac<4.0,>=3.1
22
- Requires-Dist: dkist-service-configuration<3.0,>=2.0.2
22
+ Requires-Dist: dkist-service-configuration<5.0,>=4.1.7
23
23
  Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
24
24
  Requires-Dist: solar-wavelength-calibration<2.0,>=1.0
25
25
  Requires-Dist: globus-sdk>=3.12.0
@@ -97,6 +97,84 @@ Deployment
97
97
 
98
98
  dkist-processing-common is deployed to `PyPI <https://pypi.org/project/dkist-processing-common/>`_
99
99
 
100
+ Environment Variables
101
+ ---------------------
102
+
103
+ .. list-table::
104
+ :widths: 10 90
105
+ :header-rows: 1
106
+
107
+ * - Variable
108
+ - Field Info
109
+ * - LOGURU_LEVEL
110
+ - annotation=str required=False default='INFO' alias_priority=2 validation_alias='LOGURU_LEVEL' description='Log level for the application'
111
+ * - MESH_CONFIG
112
+ - annotation=dict[str, MeshService] required=False default_factory=dict alias_priority=2 validation_alias='MESH_CONFIG' description='Service mesh configuration' examples=[{'upstream_service_name': {'mesh_address': 'localhost', 'mesh_port': 6742}}]
113
+ * - RETRY_CONFIG
114
+ - annotation=RetryConfig required=False default_factory=RetryConfig description='Retry configuration for the service'
115
+ * - OTEL_SERVICE_NAME
116
+ - annotation=str required=False default='unknown-service-name' alias_priority=2 validation_alias='OTEL_SERVICE_NAME' description='Service name for OpenTelemetry'
117
+ * - DKIST_SERVICE_VERSION
118
+ - annotation=str required=False default='unknown-service-version' alias_priority=2 validation_alias='DKIST_SERVICE_VERSION' description='Service version for OpenTelemetry'
119
+ * - NOMAD_ALLOC_ID
120
+ - annotation=str required=False default='unknown-allocation-id' alias_priority=2 validation_alias='NOMAD_ALLOC_ID' description='Nomad allocation ID for OpenTelemetry'
121
+ * - OTEL_EXPORTER_OTLP_TRACES_INSECURE
122
+ - annotation=bool required=False default=True description='Use insecure connection for OTLP traces'
123
+ * - OTEL_EXPORTER_OTLP_METRICS_INSECURE
124
+ - annotation=bool required=False default=True description='Use insecure connection for OTLP metrics'
125
+ * - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
126
+ - annotation=Union[str, NoneType] required=False default=None description='OTLP traces endpoint. Overrides mesh configuration' examples=['localhost:4317']
127
+ * - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT
128
+ - annotation=Union[str, NoneType] required=False default=None description='OTLP metrics endpoint. Overrides mesh configuration' examples=['localhost:4317']
129
+ * - OTEL_PYTHON_DISABLED_INSTRUMENTATIONS
130
+ - annotation=list[str] required=False default_factory=list description='List of instrumentations to disable. https://opentelemetry.io/docs/zero-code/python/configuration/' examples=[['pika', 'requests']]
131
+ * - OTEL_PYTHON_FASTAPI_EXCLUDED_URLS
132
+ - annotation=str required=False default='health' description='Comma separated list of URLs to exclude from OpenTelemetry instrumentation in FastAPI.' examples=['client/.*/info,healthcheck']
133
+ * - SYSTEM_METRIC_INSTRUMENTATION_CONFIG
134
+ - annotation=Union[dict[str, bool], NoneType] required=False default=None description='Configuration for system metric instrumentation. https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/system_metrics/system_metrics.html' examples=[{'system.memory.usage': ['used', 'free', 'cached'], 'system.cpu.time': ['idle', 'user', 'system', 'irq'], 'system.network.io': ['transmit', 'receive'], 'process.runtime.memory': ['rss', 'vms'], 'process.runtime.cpu.time': ['user', 'system'], 'process.runtime.context_switches': ['involuntary', 'voluntary']}]
135
+ * - ISB_USERNAME
136
+ - annotation=str required=False default='guest' description='Username for the interservice-bus.'
137
+ * - ISB_PASSWORD
138
+ - annotation=str required=False default='guest' description='Password for the interservice-bus.'
139
+ * - ISB_EXCHANGE
140
+ - annotation=str required=False default='master.direct.x' description='Exchange for the interservice-bus.'
141
+ * - ISB_QUEUE_TYPE
142
+ - annotation=str required=False default='classic' description='Queue type for the interservice-bus.' examples=['quorum', 'classic']
143
+ * - BUILD_VERSION
144
+ - annotation=str required=False default='dev' description='Fallback build version for workflow tasks.'
145
+ * - GQL_AUTH_TOKEN
146
+ - annotation=Union[str, NoneType] required=False default='dev' description='The auth token for the metadata-store-api.'
147
+ * - OBJECT_STORE_ACCESS_KEY
148
+ - annotation=Union[str, NoneType] required=False default=None description='The access key for the object store.'
149
+ * - OBJECT_STORE_SECRET_KEY
150
+ - annotation=Union[str, NoneType] required=False default=None description='The secret key for the object store.'
151
+ * - OBJECT_STORE_USE_SSL
152
+ - annotation=bool required=False default=False description='Whether to use SSL for the object store connection.'
153
+ * - MULTIPART_THRESHOLD
154
+ - annotation=Union[int, NoneType] required=False default=None description='Multipart threshold for the object store.'
155
+ * - S3_CLIENT_CONFIG
156
+ - annotation=Union[dict, NoneType] required=False default=None description='S3 client configuration for the object store.'
157
+ * - S3_UPLOAD_CONFIG
158
+ - annotation=Union[dict, NoneType] required=False default=None description='S3 upload configuration for the object store.'
159
+ * - S3_DOWNLOAD_CONFIG
160
+ - annotation=Union[dict, NoneType] required=False default=None description='S3 download configuration for the object store.'
161
+ * - GLOBUS_TRANSPORT_PARAMS
162
+ - annotation=dict required=False default_factory=dict description='Globus transfer parameters.'
163
+ * - GLOBUS_CLIENT_ID
164
+ - annotation=Union[str, NoneType] required=False default=None description='Globus client ID for inbound/outbound transfers.'
165
+ * - GLOBUS_CLIENT_SECRET
166
+ - annotation=Union[str, NoneType] required=False default=None description='Globus client secret for inbound/outbound transfers.'
167
+ * - OBJECT_STORE_ENDPOINT
168
+ - annotation=Union[str, NoneType] required=False default=None description='Object store Globus Endpoint ID.'
169
+ * - SCRATCH_ENDPOINT
170
+ - annotation=Union[str, NoneType] required=False default=None description='Scratch Globus Endpoint ID.'
171
+ * - SCRATCH_BASE_PATH
172
+ - annotation=str required=False default='scratch/' description='Base path for scratch storage.'
173
+ * - SCRATCH_INVENTORY_DB_COUNT
174
+ - annotation=int required=False default=16 description='Number of databases in the scratch inventory (redis).'
175
+ * - DOCS_BASE_URL
176
+ - annotation=str required=False default='my_test_url' description='Base URL for the documentation site.'
177
+
100
178
  Development
101
179
  -----------
102
180
 
@@ -1,7 +1,7 @@
1
1
  changelog/.gitempty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- changelog/266.misc.rst,sha256=5yvMMAn3qAkY4NgLSgc-7dFzJdBTfWTu1J_TwCzd_eM,107
2
+ changelog/268.misc.rst,sha256=radOBJqs--v8J03OdpzBpv79gFMMr4lUSNihKEmiieQ,115
3
3
  dkist_processing_common/__init__.py,sha256=GQ9EBnYhkOnt-qODclAoLS_g5YVhurxfg1tjVtI9rDI,320
4
- dkist_processing_common/config.py,sha256=OCqMjIV9mDvsWX_nvwS_v1W3chAhfA52wQVVXjCQwe8,4248
4
+ dkist_processing_common/config.py,sha256=kkFm-Q6jGGoV3Zdf3PSfk3xNYd2vSH0J2R8dBBav3Og,5218
5
5
  dkist_processing_common/manual.py,sha256=bIVVyLsbXMh-g_2L3kGROL-1TtJe0_XviHsp7Br31x8,7023
6
6
  dkist_processing_common/_util/__init__.py,sha256=xf6JNpMKQgbhE2Jivymt-WO0WF6PpGt9rl604YpuTWk,92
7
7
  dkist_processing_common/_util/constants.py,sha256=0_bWLsvusHD8GrTx4B6V7AieKAaFbN9crcAALaS8x5Q,3245
@@ -36,6 +36,7 @@ dkist_processing_common/models/parameters.py,sha256=9An3SxUEBI-oYHjICQ_q-IIScTfp
36
36
  dkist_processing_common/models/quality.py,sha256=TmDVbvPbfl5CIIs1ioD5guLUoEOFTfiJESvDjLTLl5s,3981
37
37
  dkist_processing_common/models/tags.py,sha256=0YqiDrismOSds_3XtFBb2dfv0gjMs6CgRv2dJKsSthI,12082
38
38
  dkist_processing_common/models/task_name.py,sha256=uAl7qTK4Xx1nqPAhNAe5nAXqxwPwQzAq58YmoccX6xQ,567
39
+ dkist_processing_common/models/telemetry.py,sha256=LrALTxF1KNlX2ANsGEx2e0a5EP5eobwUGfgLLNu43r8,606
39
40
  dkist_processing_common/models/wavelength.py,sha256=4UhRVoNvCHZitXo5S1oRdewadbmGfmDK6wetMV06POA,967
40
41
  dkist_processing_common/parsers/__init__.py,sha256=XJQzHtPb78F6-qXXKXjyztc0x-aHVlgv1C_l4dR88tI,67
41
42
  dkist_processing_common/parsers/cs_step.py,sha256=rL2gdstKEV5aqdPDs3a5EuUaOT_6YXDJVqIPIVKSw8M,6450
@@ -54,17 +55,17 @@ dkist_processing_common/parsers/time.py,sha256=z9zHV3Fz6ebEDgiPhv6H-aAS8e-sSW3Ek
54
55
  dkist_processing_common/parsers/unique_bud.py,sha256=IkS2zZkVzn3PRsYF2ksBkUxl_HJ4TxCqBKJUs1WdL54,3310
55
56
  dkist_processing_common/parsers/wavelength.py,sha256=P5C9mG8DAKK3GB3vWNRBI5l7pAW68lJK-kw-4eqERuQ,612
56
57
  dkist_processing_common/tasks/__init__.py,sha256=l23ctjNsKJbHbbqaZBMeOPaOtw0hmITEljI_JJ-CVsU,627
57
- dkist_processing_common/tasks/assemble_movie.py,sha256=m8O1psUBn8bRPj0AwMd1K5wNlcAtlxBQb1PZeSKne6o,12772
58
- dkist_processing_common/tasks/base.py,sha256=mebbG-VZp9Iu-J7skE9S5qMCUENZy48ySJkaGh8weC0,13144
59
- dkist_processing_common/tasks/l1_output_data.py,sha256=jjJao1YO88sgsKfl6Lg11yurlM1bcljOZpK1-ykFWw4,10576
60
- dkist_processing_common/tasks/output_data_base.py,sha256=ZWdxkXKzrQdc1PN97b8cEcFBckraOJkwrBoMOPWii8o,3683
61
- dkist_processing_common/tasks/parse_l0_input_data.py,sha256=Apa1qG6TAVll_oCGp5aSZb_j_ywNnu1GmYNnpjvHqH4,8133
62
- dkist_processing_common/tasks/quality_metrics.py,sha256=5rLCkGyE6F76bJp71y2lmKbol5FJ_ynbZbkhX8IBPKA,12489
63
- dkist_processing_common/tasks/teardown.py,sha256=eHyOJbtu8w51wFSTVFSt8iVVOkSODqHLvrcP6W-Szcg,2355
64
- dkist_processing_common/tasks/transfer_input_data.py,sha256=hjJzgwCVqltF87IMa2Oi9hmKyT0mFqwvPCI8xTV2B44,5787
65
- dkist_processing_common/tasks/trial_catalog.py,sha256=rGIlckrWYqx7Div5dOTQr_hcKSWQfR_i5DyzoHVhE9A,8749
66
- dkist_processing_common/tasks/trial_output_data.py,sha256=5WwFuShXNIFO7fSh6nLAkpHS8NbZeW7AYsjRxzqRhNI,6835
67
- dkist_processing_common/tasks/write_l1.py,sha256=rki0og7B10LooAFHABMz64NtwCo54CSwy3qOcOsve_g,23203
58
+ dkist_processing_common/tasks/assemble_movie.py,sha256=1ixDG-f4ODt0vywqVccG3aodLljVO5OGlvuMO9EEvcU,12767
59
+ dkist_processing_common/tasks/base.py,sha256=MseRBypflKFTSRjkNzrumbc6OXA8f_gWy17odWlfEhk,12351
60
+ dkist_processing_common/tasks/l1_output_data.py,sha256=D4S3kH2uRKp1b8_xF2YiWO_mGj19UJS5wKQZ4OdatGs,10568
61
+ dkist_processing_common/tasks/output_data_base.py,sha256=r1Bu3FX5zTVj66GTMWtaV_NdhxjyjSm661Bt2Mxmfi4,3685
62
+ dkist_processing_common/tasks/parse_l0_input_data.py,sha256=DV38chKJRxxXy0Q4FkZIomWic6Q8UW-XV8nO8LLafO4,9573
63
+ dkist_processing_common/tasks/quality_metrics.py,sha256=cvGF6tJ8yAvxOvkeG3tWxYwL885BrFW5X3V7_MSzL-A,12481
64
+ dkist_processing_common/tasks/teardown.py,sha256=rwT9lWINVDF11-az_nx-Z5ykMTX_SJCchobpU6sErgk,2360
65
+ dkist_processing_common/tasks/transfer_input_data.py,sha256=DAYfS-B1o-iBT9MXU-TiJG4Hv05Z0c_JzPrnFgvnK9g,5786
66
+ dkist_processing_common/tasks/trial_catalog.py,sha256=iAaMT_oLnupA1O3xAtqVjsqRY5f_hyvMps-fXg6KlHU,8729
67
+ dkist_processing_common/tasks/trial_output_data.py,sha256=CPMXXODvN5RTcu9bTF8v6AXciCl212EWP6qTiARvUNk,6837
68
+ dkist_processing_common/tasks/write_l1.py,sha256=Xy834RTp3F95kLcW4ba5gfHMUocfZd82ZQQKnvQcP2M,23204
68
69
  dkist_processing_common/tasks/mixin/__init__.py,sha256=-g-DQbU7m1bclJYuFe3Yh757V-35GIDTbstardKQ7nU,68
69
70
  dkist_processing_common/tasks/mixin/globus.py,sha256=9ey_UCacqCfmxYZSgm6VDefdlm7dkNloC8G5DeVub8s,6592
70
71
  dkist_processing_common/tasks/mixin/interservice_bus.py,sha256=M6R922l7gJSmmU_vswUXxy-c5DWNrIRjQu9H9CSgGfU,1081
@@ -78,7 +79,7 @@ dkist_processing_common/tests/conftest.py,sha256=Tm-Yq956EAafpDtu1d7JjdVY0Unp9e4
78
79
  dkist_processing_common/tests/mock_metadata_store.py,sha256=fbCvSk1-s0ojN6l538RWodPW7dx6k4eXqipemnHKO0Y,8248
79
80
  dkist_processing_common/tests/test_assemble_movie.py,sha256=dyVhowxB-Kc6GuxlDs74UrPtK9fwdUL7y5haA3Bidz0,4065
80
81
  dkist_processing_common/tests/test_assemble_quality.py,sha256=-F22jMY6mPy65VZ1TZY2r1vsxMXOPmZHArGx70OD3BA,17832
81
- dkist_processing_common/tests/test_base.py,sha256=EQsIkeWoOtjk0yxr_oPkhW3Uc0p8cMsknSMwKgrJI9E,7078
82
+ dkist_processing_common/tests/test_base.py,sha256=gsyBG2R6Ufx7CzbHeGMagUwM9yCfpN4gCSZ6-aH2q48,6643
82
83
  dkist_processing_common/tests/test_codecs.py,sha256=XuvG1sG8DECMPmxtDEi98TxlvTSAy0vrtUUFLrwnHlA,22173
83
84
  dkist_processing_common/tests/test_constants.py,sha256=I_KcJs7ScCn53GYhEO6qjWrrnfZuyC1IVYOy87Pjlg4,6565
84
85
  dkist_processing_common/tests/test_cs_step.py,sha256=RA0QD3D8eaL3YSOL_gIJ9wkngy14RQ2jbD-05KAziW4,2408
@@ -117,7 +118,7 @@ docs/landing_page.rst,sha256=aPAuXFhBx73lEZ59B6E6JXxkK0LlxzD0n-HXqHrfumQ,746
117
118
  docs/make.bat,sha256=mBAhtURwhQ7yc95pqwJzlhqBSvRknr1aqZ5s8NKvdKs,4513
118
119
  docs/requirements.txt,sha256=Kbl_X4c7RQZw035YTeNB63We6I7pvXFU4T0Uflp2yDY,29
119
120
  licenses/LICENSE.rst,sha256=piZaQplkzOMmH1NXg6QIdo9wwo9pPCoHkvm2-DmH76E,1462
120
- dkist_processing_common-11.5.0rc1.dist-info/METADATA,sha256=8FTiTtNr9N0SSWAkAuQTWRChbk0UUh-jW0KoO1xWGTQ,7207
121
- dkist_processing_common-11.5.0rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
122
- dkist_processing_common-11.5.0rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
123
- dkist_processing_common-11.5.0rc1.dist-info/RECORD,,
121
+ dkist_processing_common-11.6.0rc1.dist-info/METADATA,sha256=xEnugllrpPzQIuFwW_mcD052MQ_KW6HMlYCyf9w9K3k,13319
122
+ dkist_processing_common-11.6.0rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
123
+ dkist_processing_common-11.6.0rc1.dist-info/top_level.txt,sha256=LJhd1W-Vn90K8HnQDIE4r52YDpUjjMWDnllAWHBByW0,48
124
+ dkist_processing_common-11.6.0rc1.dist-info/RECORD,,
changelog/266.misc.rst DELETED
@@ -1 +0,0 @@
1
- Update dkist-processing-core to 5.2.0 which includes upgrades to airflow 2.11.0 and requires Python 3.12+.