dkist-processing-common 10.5.4__py3-none-any.whl → 12.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. changelog/280.misc.rst +1 -0
  2. changelog/282.feature.2.rst +2 -0
  3. changelog/282.feature.rst +2 -0
  4. changelog/284.feature.rst +1 -0
  5. changelog/285.feature.rst +2 -0
  6. changelog/285.misc.rst +2 -0
  7. changelog/286.feature.rst +2 -0
  8. changelog/287.misc.rst +1 -0
  9. dkist_processing_common/__init__.py +1 -0
  10. dkist_processing_common/_util/constants.py +1 -0
  11. dkist_processing_common/_util/graphql.py +1 -0
  12. dkist_processing_common/_util/scratch.py +9 -9
  13. dkist_processing_common/_util/tags.py +1 -0
  14. dkist_processing_common/codecs/array.py +20 -0
  15. dkist_processing_common/codecs/asdf.py +9 -3
  16. dkist_processing_common/codecs/basemodel.py +22 -0
  17. dkist_processing_common/codecs/bytes.py +1 -0
  18. dkist_processing_common/codecs/fits.py +37 -9
  19. dkist_processing_common/codecs/iobase.py +1 -0
  20. dkist_processing_common/codecs/json.py +1 -0
  21. dkist_processing_common/codecs/path.py +1 -0
  22. dkist_processing_common/codecs/quality.py +1 -1
  23. dkist_processing_common/codecs/str.py +1 -0
  24. dkist_processing_common/config.py +64 -25
  25. dkist_processing_common/manual.py +6 -8
  26. dkist_processing_common/models/constants.py +373 -37
  27. dkist_processing_common/models/dkist_location.py +27 -0
  28. dkist_processing_common/models/fits_access.py +48 -0
  29. dkist_processing_common/models/flower_pot.py +231 -9
  30. dkist_processing_common/models/fried_parameter.py +41 -0
  31. dkist_processing_common/models/graphql.py +66 -75
  32. dkist_processing_common/models/input_dataset.py +117 -0
  33. dkist_processing_common/models/message.py +1 -1
  34. dkist_processing_common/models/message_queue_binding.py +1 -1
  35. dkist_processing_common/models/metric_code.py +2 -0
  36. dkist_processing_common/models/parameters.py +65 -28
  37. dkist_processing_common/models/quality.py +50 -5
  38. dkist_processing_common/models/tags.py +23 -21
  39. dkist_processing_common/models/task_name.py +3 -2
  40. dkist_processing_common/models/telemetry.py +28 -0
  41. dkist_processing_common/models/wavelength.py +3 -1
  42. dkist_processing_common/parsers/average_bud.py +46 -0
  43. dkist_processing_common/parsers/cs_step.py +13 -12
  44. dkist_processing_common/parsers/dsps_repeat.py +6 -4
  45. dkist_processing_common/parsers/experiment_id_bud.py +12 -4
  46. dkist_processing_common/parsers/id_bud.py +42 -27
  47. dkist_processing_common/parsers/l0_fits_access.py +5 -3
  48. dkist_processing_common/parsers/l1_fits_access.py +51 -23
  49. dkist_processing_common/parsers/lookup_bud.py +125 -0
  50. dkist_processing_common/parsers/near_bud.py +21 -20
  51. dkist_processing_common/parsers/observing_program_id_bud.py +24 -0
  52. dkist_processing_common/parsers/proposal_id_bud.py +13 -5
  53. dkist_processing_common/parsers/quality.py +2 -0
  54. dkist_processing_common/parsers/retarder.py +32 -0
  55. dkist_processing_common/parsers/single_value_single_key_flower.py +6 -1
  56. dkist_processing_common/parsers/task.py +8 -6
  57. dkist_processing_common/parsers/time.py +178 -72
  58. dkist_processing_common/parsers/unique_bud.py +21 -22
  59. dkist_processing_common/parsers/wavelength.py +5 -3
  60. dkist_processing_common/tasks/__init__.py +3 -2
  61. dkist_processing_common/tasks/assemble_movie.py +4 -3
  62. dkist_processing_common/tasks/base.py +59 -60
  63. dkist_processing_common/tasks/l1_output_data.py +54 -53
  64. dkist_processing_common/tasks/mixin/globus.py +24 -27
  65. dkist_processing_common/tasks/mixin/interservice_bus.py +1 -0
  66. dkist_processing_common/tasks/mixin/metadata_store.py +108 -243
  67. dkist_processing_common/tasks/mixin/object_store.py +22 -0
  68. dkist_processing_common/tasks/mixin/quality/__init__.py +1 -0
  69. dkist_processing_common/tasks/mixin/quality/_base.py +8 -1
  70. dkist_processing_common/tasks/mixin/quality/_metrics.py +166 -14
  71. dkist_processing_common/tasks/output_data_base.py +4 -3
  72. dkist_processing_common/tasks/parse_l0_input_data.py +277 -15
  73. dkist_processing_common/tasks/quality_metrics.py +9 -9
  74. dkist_processing_common/tasks/teardown.py +7 -7
  75. dkist_processing_common/tasks/transfer_input_data.py +67 -69
  76. dkist_processing_common/tasks/trial_catalog.py +77 -17
  77. dkist_processing_common/tasks/trial_output_data.py +16 -17
  78. dkist_processing_common/tasks/write_l1.py +102 -72
  79. dkist_processing_common/tests/conftest.py +32 -173
  80. dkist_processing_common/tests/mock_metadata_store.py +271 -0
  81. dkist_processing_common/tests/test_assemble_movie.py +4 -4
  82. dkist_processing_common/tests/test_assemble_quality.py +32 -4
  83. dkist_processing_common/tests/test_base.py +5 -19
  84. dkist_processing_common/tests/test_codecs.py +103 -12
  85. dkist_processing_common/tests/test_constants.py +15 -0
  86. dkist_processing_common/tests/test_dkist_location.py +15 -0
  87. dkist_processing_common/tests/test_fits_access.py +56 -19
  88. dkist_processing_common/tests/test_flower_pot.py +147 -5
  89. dkist_processing_common/tests/test_fried_parameter.py +27 -0
  90. dkist_processing_common/tests/test_input_dataset.py +78 -361
  91. dkist_processing_common/tests/test_interservice_bus.py +1 -0
  92. dkist_processing_common/tests/test_interservice_bus_mixin.py +1 -1
  93. dkist_processing_common/tests/test_manual_processing.py +33 -0
  94. dkist_processing_common/tests/test_output_data_base.py +5 -7
  95. dkist_processing_common/tests/test_parameters.py +71 -22
  96. dkist_processing_common/tests/test_parse_l0_input_data.py +115 -32
  97. dkist_processing_common/tests/test_publish_catalog_messages.py +2 -24
  98. dkist_processing_common/tests/test_quality.py +1 -0
  99. dkist_processing_common/tests/test_quality_mixin.py +255 -23
  100. dkist_processing_common/tests/test_scratch.py +2 -1
  101. dkist_processing_common/tests/test_stems.py +511 -168
  102. dkist_processing_common/tests/test_submit_dataset_metadata.py +3 -7
  103. dkist_processing_common/tests/test_tags.py +1 -0
  104. dkist_processing_common/tests/test_task_name.py +1 -1
  105. dkist_processing_common/tests/test_task_parsing.py +17 -7
  106. dkist_processing_common/tests/test_teardown.py +28 -24
  107. dkist_processing_common/tests/test_transfer_input_data.py +270 -125
  108. dkist_processing_common/tests/test_transfer_l1_output_data.py +2 -3
  109. dkist_processing_common/tests/test_trial_catalog.py +83 -8
  110. dkist_processing_common/tests/test_trial_output_data.py +46 -73
  111. dkist_processing_common/tests/test_workflow_task_base.py +8 -10
  112. dkist_processing_common/tests/test_write_l1.py +298 -76
  113. dkist_processing_common-12.1.0rc1.dist-info/METADATA +265 -0
  114. dkist_processing_common-12.1.0rc1.dist-info/RECORD +134 -0
  115. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/WHEEL +1 -1
  116. docs/conf.py +1 -0
  117. docs/index.rst +1 -1
  118. docs/landing_page.rst +13 -0
  119. dkist_processing_common/tasks/mixin/input_dataset.py +0 -166
  120. dkist_processing_common-10.5.4.dist-info/METADATA +0 -175
  121. dkist_processing_common-10.5.4.dist-info/RECORD +0 -112
  122. {dkist_processing_common-10.5.4.dist-info → dkist_processing_common-12.1.0rc1.dist-info}/top_level.txt +0 -0
changelog/280.misc.rst ADDED
@@ -0,0 +1 @@
1
+ Speed up the reading of INPUT files in Parse tasks by turning off image decompression and checksum checks.
@@ -0,0 +1,2 @@
1
+ Add `SetStem` base class that has all the benefits of `ListStem` but also gets a speedup by storing values in a `set` for
2
+ cases where repeated values don't need to be tracked.
@@ -0,0 +1,2 @@
1
+ Add `ListStem` base class for huge speedup in cases where the keys don't matter and the `getter` logic only depends on the
2
+ list of values computed by `setter`. This is the case for most (all?) "Buds".
@@ -0,0 +1 @@
1
+ Speed up parsing of the `*CadenceBud`, `TaskDateBeginBud`, and `[Task]NearFloatBud` by basing these buds on `ListStem`.
@@ -0,0 +1,2 @@
1
+ Speed up `NumCSStepBud`, `[Task]UniqueBud`, `[Task]ContributingIdsBud`, and `TaskRoundTimeBudBase` parsing by basing
2
+ these buds on `SetStem`.
changelog/285.misc.rst ADDED
@@ -0,0 +1,2 @@
1
+ Update `RetarderNameBud` to drop "clear" values (i.e., the retarder is out of the beam) in the `setter` instead of the `getter`.
2
+ This brings it in line with standard Bud-practice.
@@ -0,0 +1,2 @@
1
+ Speed up `CSStepFlower` parsing by using an internal set to keep track of the unique `CSStep` objects. This removes the
2
+ need to compute the unique set when computing the tag for each file.
changelog/287.misc.rst ADDED
@@ -0,0 +1 @@
1
+ Convert the TimeLookupBud to be a SetStem constant.
@@ -1,4 +1,5 @@
1
1
  """Package providing support classes and methods used by all workflow tasks."""
2
+
2
3
  from importlib.metadata import PackageNotFoundError
3
4
  from importlib.metadata import version
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Wrapper for interactions with shared database that holds arbitrary data that persists across the entire recipe run."""
2
+
2
3
  import json
3
4
  from collections.abc import MutableMapping
4
5
  from enum import Enum
@@ -1,4 +1,5 @@
1
1
  """Extension of the GraphQL supporting retries for data processing use cases."""
2
+
2
3
  import logging
3
4
  from typing import Any
4
5
  from typing import Callable
@@ -1,4 +1,5 @@
1
1
  """Scratch file system api."""
2
+
2
3
  import logging
3
4
  from contextlib import contextmanager
4
5
  from os import umask
@@ -9,7 +10,6 @@ from typing import Generator
9
10
  from dkist_processing_common._util.tags import TagDB
10
11
  from dkist_processing_common.config import common_configurations
11
12
 
12
-
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
@@ -82,7 +82,7 @@ class WorkflowFileSystem:
82
82
  return self.workflow_base_path / relative_path
83
83
 
84
84
  @staticmethod
85
- def _parse_tags(tags: str | list | None) -> list:
85
+ def parse_tags(tags: str | list | None) -> list:
86
86
  """Parse tags to support an individual tag in the form of a string or an arbitrarily nested list of strings."""
87
87
  if tags is None:
88
88
  return []
@@ -115,7 +115,7 @@ class WorkflowFileSystem:
115
115
  -------
116
116
  None
117
117
  """
118
- tags = self._parse_tags(tags)
118
+ tags = self.parse_tags(tags)
119
119
  path = self.absolute_path(relative_path)
120
120
  # audit the path that was written to scratch
121
121
  self._audit_db.add(tag=self._audit_write_tag, value=str(path))
@@ -163,7 +163,7 @@ class WorkflowFileSystem:
163
163
  -------
164
164
  None
165
165
  """
166
- tags = self._parse_tags(tags)
166
+ tags = self.parse_tags(tags)
167
167
  path = Path(path)
168
168
  if not (self.workflow_base_path in path.parents):
169
169
  raise ValueError(
@@ -201,7 +201,7 @@ class WorkflowFileSystem:
201
201
 
202
202
  def remove_tags(self, path: Path | str, tags: list | str) -> None:
203
203
  """Remove a tag or tags from a given path."""
204
- tags = self._parse_tags(tags)
204
+ tags = self.parse_tags(tags)
205
205
  for tag in tags:
206
206
  self._tag_db.remove(tag, str(path))
207
207
 
@@ -218,7 +218,7 @@ class WorkflowFileSystem:
218
218
  -------
219
219
  A generator of path objects matching the union of the desired tags
220
220
  """
221
- tags = self._parse_tags(tags)
221
+ tags = self.parse_tags(tags)
222
222
  paths = self._tag_db.any(tags)
223
223
  logger.debug(f"Found {len(paths)} files containing the set of {tags=}")
224
224
  for path in paths:
@@ -237,7 +237,7 @@ class WorkflowFileSystem:
237
237
  -------
238
238
  A generator of path objects matching the intersection of the desired tags
239
239
  """
240
- tags = self._parse_tags(tags)
240
+ tags = self.parse_tags(tags)
241
241
  paths = self._tag_db.all(tags)
242
242
  logger.debug(f"Found {len(paths)} files containing the set of {tags=}")
243
243
  for path in paths:
@@ -256,7 +256,7 @@ class WorkflowFileSystem:
256
256
  -------
257
257
  The number of objects tagged with the union of the input tags.
258
258
  """
259
- tags = self._parse_tags(tags)
259
+ tags = self.parse_tags(tags)
260
260
  return len(self._tag_db.any(tags))
261
261
 
262
262
  def count_all(self, tags: str | list) -> int:
@@ -273,7 +273,7 @@ class WorkflowFileSystem:
273
273
  The number of objects tagged with the intersection of the input tags.
274
274
 
275
275
  """
276
- tags = self._parse_tags(tags)
276
+ tags = self.parse_tags(tags)
277
277
  return len(self._tag_db.all(tags))
278
278
 
279
279
  def close(self):
@@ -1,4 +1,5 @@
1
1
  """Tag cloud manager."""
2
+
2
3
  from pathlib import Path
3
4
  from typing import Iterable
4
5
 
@@ -0,0 +1,20 @@
1
+ """Encoder/decoder for writing/reading numpy arrays."""
2
+
3
+ import io
4
+ from pathlib import Path
5
+
6
+ import numpy as np
7
+
8
+ from dkist_processing_common.codecs.iobase import iobase_encoder
9
+
10
+
11
+ def array_encoder(data: np.ndarray, **np_kwargs) -> bytes:
12
+ """Convert a numpy array to bytes compatible with np.load()."""
13
+ buffer = io.BytesIO()
14
+ np.save(buffer, data, **np_kwargs)
15
+ return iobase_encoder(buffer)
16
+
17
+
18
+ def array_decoder(path: Path, **np_kwargs) -> np.ndarray:
19
+ """Return the data in the file as a numpy array using np.load()."""
20
+ return np.load(path, **np_kwargs)
@@ -1,4 +1,5 @@
1
1
  """Encoders and decoders for writing and reading ASDF files."""
2
+
2
3
  from io import BytesIO
3
4
  from pathlib import Path
4
5
 
@@ -7,14 +8,19 @@ import asdf
7
8
  from dkist_processing_common.codecs.iobase import iobase_encoder
8
9
 
9
10
 
10
- def asdf_encoder(tree: dict, custom_schema=None, **asdf_write_kwargs) -> bytes:
11
- """Convert a dict to raw bytes representing an ASDF file for writing to a file."""
12
- asdf_obj = asdf.AsdfFile(tree, custom_schema=custom_schema)
11
+ def asdf_fileobj_encoder(asdf_obj: asdf.AsdfFile, custom_schema=None, **asdf_write_kwargs) -> bytes:
12
+ """Save an `asdf.AsdfFile` object."""
13
13
  file_obj = BytesIO()
14
14
  asdf_obj.write_to(file_obj, **asdf_write_kwargs)
15
15
  return iobase_encoder(file_obj)
16
16
 
17
17
 
18
+ def asdf_encoder(tree: dict, custom_schema=None, **asdf_write_kwargs) -> bytes:
19
+ """Convert a dict to raw bytes representing an ASDF file for writing to a file."""
20
+ asdf_obj = asdf.AsdfFile(tree, custom_schema=custom_schema)
21
+ return asdf_fileobj_encoder(asdf_obj, custom_schema=custom_schema, **asdf_write_kwargs)
22
+
23
+
18
24
  def asdf_decoder(
19
25
  path: Path, lazy_load: bool = False, memmap: bool = False, **asdf_read_kwargs
20
26
  ) -> dict:
@@ -0,0 +1,22 @@
1
+ """Encoder/decoder for writing and reading Pydantic BaseModel objects."""
2
+
3
+ from pathlib import Path
4
+ from typing import Type
5
+
6
+ from pydantic import BaseModel
7
+
8
+ from dkist_processing_common.codecs.bytes import bytes_decoder
9
+ from dkist_processing_common.codecs.str import str_encoder
10
+
11
+
12
+ def basemodel_encoder(data: BaseModel, **basemodel_kwargs) -> bytes:
13
+ """Convert a Pydantic BaseModel object into bytes for writing to file."""
14
+ data_dump = data.model_dump_json(**basemodel_kwargs)
15
+ return str_encoder(data_dump)
16
+
17
+
18
+ def basemodel_decoder(path: Path, model: Type[BaseModel], **basemodel_kwargs) -> BaseModel:
19
+ """Return the data in the file as a Pydantic BaseModel object."""
20
+ data = bytes_decoder(path)
21
+ model_validated = model.model_validate_json(data, **basemodel_kwargs)
22
+ return model_validated
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading bytes objects."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoders and decoders for writing and reading FITS files."""
2
+
2
3
  from io import BytesIO
3
4
  from pathlib import Path
4
5
  from typing import Type
@@ -30,15 +31,30 @@ def fits_hdulist_encoder(hdu_list: fits.HDUList) -> bytes:
30
31
  return iobase_encoder(file_obj)
31
32
 
32
33
 
33
- def fits_hdu_decoder(path: Path) -> fits.PrimaryHDU | fits.CompImageHDU:
34
+ def fits_hdu_decoder(
35
+ path: Path,
36
+ hdu: int | None = None,
37
+ checksum: bool = True,
38
+ disable_image_compression: bool = False,
39
+ ) -> fits.PrimaryHDU | fits.CompImageHDU:
34
40
  """Read a Path with `fits` to produce an `HDUList`."""
35
- hdu_list = fits.open(path)
36
- return _extract_hdu(hdu_list)
41
+ hdu_list = fits.open(
42
+ path, checksum=checksum, disable_image_compression=disable_image_compression
43
+ )
44
+ return _extract_hdu(hdu_list, hdu)
37
45
 
38
46
 
39
- def fits_array_decoder(path: Path, auto_squeeze: bool = True) -> np.ndarray:
47
+ def fits_array_decoder(
48
+ path: Path,
49
+ hdu: int | None = None,
50
+ auto_squeeze: bool = True,
51
+ checksum: bool = True,
52
+ disable_image_compression: bool = False,
53
+ ) -> np.ndarray:
40
54
  """Read a Path with `fits` and return the `.data` property."""
41
- hdu = fits_hdu_decoder(path)
55
+ hdu = fits_hdu_decoder(
56
+ path, hdu=hdu, checksum=checksum, disable_image_compression=disable_image_compression
57
+ )
42
58
  data = hdu.data
43
59
 
44
60
  # This conditional is explicitly to catch summit data with a dummy first axis for WCS
@@ -49,15 +65,27 @@ def fits_array_decoder(path: Path, auto_squeeze: bool = True) -> np.ndarray:
49
65
 
50
66
 
51
67
  def fits_access_decoder(
52
- path: Path, fits_access_class: Type[FitsAccessBase], **fits_access_kwargs
68
+ path: Path,
69
+ fits_access_class: Type[FitsAccessBase],
70
+ checksum: bool = True,
71
+ disable_image_compression: bool = False,
72
+ **fits_access_kwargs,
53
73
  ) -> FitsAccessBase:
54
74
  """Read a Path with `fits` and ingest into a `FitsAccessBase`-type object."""
55
- hdu = fits_hdu_decoder(path)
75
+ hdu = fits_hdu_decoder(
76
+ path, checksum=checksum, disable_image_compression=disable_image_compression
77
+ )
56
78
  return fits_access_class(hdu=hdu, name=str(path), **fits_access_kwargs)
57
79
 
58
80
 
59
- def _extract_hdu(hdul: fits.HDUList) -> fits.PrimaryHDU | fits.CompImageHDU:
60
- """Return the fits hdu associated with the data in the hdu list."""
81
+ def _extract_hdu(hdul: fits.HDUList, hdu: int | None = None) -> fits.PrimaryHDU | fits.CompImageHDU:
82
+ """
83
+ Return the fits hdu associated with the data in the hdu list.
84
+
85
+ Only search down the hdu index for the data if the hdu index is not explicitly provided.
86
+ """
87
+ if hdu is not None:
88
+ return hdul[hdu]
61
89
  if hdul[0].data is not None:
62
90
  return hdul[0]
63
91
  return hdul[1]
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading IOBase binary (i.e., not Text) objects."""
2
+
2
3
  from io import BytesIO
3
4
  from io import IOBase
4
5
  from io import TextIOBase
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoders for writing and reading JSON files."""
2
+
2
3
  import json
3
4
  from pathlib import Path
4
5
  from typing import Any
@@ -1,4 +1,5 @@
1
1
  """Default decoder to pass through paths from `read`."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoders for writing and reading quality data."""
2
+
2
3
  import json
3
4
  import logging
4
5
  from datetime import datetime
@@ -10,7 +11,6 @@ import numpy as np
10
11
  from dkist_processing_common.codecs.json import json_decoder
11
12
  from dkist_processing_common.codecs.json import json_encoder
12
13
 
13
-
14
14
  logger = logging.getLogger(__name__)
15
15
 
16
16
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading str to files."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
  from dkist_processing_common.codecs.bytes import bytes_decoder
@@ -1,4 +1,5 @@
1
1
  """Common configurations."""
2
+
2
3
  from dkist_processing_core.config import DKISTProcessingCoreConfiguration
3
4
  from dkist_service_configuration.settings import DEFAULT_MESH_SERVICE
4
5
  from dkist_service_configuration.settings import MeshService
@@ -9,41 +10,80 @@ from talus import ConsumerConnectionParameterFactory
9
10
  from talus import ProducerConnectionParameterFactory
10
11
 
11
12
 
12
- class RetryConfig(BaseModel):
13
- """Retry metadata model."""
13
+ class GlobusClientCredential(BaseModel):
14
+ """Globus client credential."""
14
15
 
15
- retry_delay: int = 1
16
- retry_backoff: int = 2
17
- retry_jitter: tuple[int, int] = (1, 10)
18
- retry_max_delay: int = 300
19
- retry_tries: int = -1
16
+ client_id: str = Field(..., description="Globus client ID for transfers.")
17
+ client_secret: str = Field(..., description="Globus client secret for transfers.")
20
18
 
21
19
 
22
20
  class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
23
21
  """Common configurations."""
24
22
 
25
- retry_config: RetryConfig = Field(default_factory=RetryConfig)
26
23
  # metadata-store-api
27
- gql_auth_token: str | None = None
24
+ gql_auth_token: str | None = Field(
25
+ default="dev", description="The auth token for the metadata-store-api."
26
+ )
28
27
  # object-store-api
29
- object_store_access_key: str | None = None
30
- object_store_secret_key: str | None = None
31
- object_store_use_ssl: bool = False
32
- multipart_threshold: int | None = None
33
- s3_client_config: dict | None = None
34
- s3_upload_config: dict | None = None
35
- s3_download_config: dict | None = None
28
+ object_store_access_key: str | None = Field(
29
+ default=None, description="The access key for the object store."
30
+ )
31
+ object_store_secret_key: str | None = Field(
32
+ default=None, description="The secret key for the object store."
33
+ )
34
+ object_store_use_ssl: bool = Field(
35
+ default=False, description="Whether to use SSL for the object store connection."
36
+ )
37
+ # start object-clerk library
38
+ multipart_threshold: int | None = Field(
39
+ default=None, description="Multipart threshold for the object store."
40
+ )
41
+ s3_client_config: dict | None = Field(
42
+ default=None, description="S3 client configuration for the object store."
43
+ )
44
+ s3_upload_config: dict | None = Field(
45
+ default=None, description="S3 upload configuration for the object store."
46
+ )
47
+ s3_download_config: dict | None = Field(
48
+ default=None, description="S3 download configuration for the object store."
49
+ )
36
50
  # globus
37
- globus_transport_params: dict = Field(default_factory=dict)
38
- globus_client_id: str | None = None
39
- globus_client_secret: str | None = None
40
- object_store_endpoint: str | None = None
41
- scratch_endpoint: str | None = None
51
+ globus_max_retries: int = Field(
52
+ default=5, description="Max retries for transient errors on calls to the globus api."
53
+ )
54
+ globus_inbound_client_credentials: list[GlobusClientCredential] = Field(
55
+ default_factory=list,
56
+ description="Globus client credentials for inbound transfers.",
57
+ examples=[
58
+ [
59
+ {"client_id": "id1", "client_secret": "secret1"},
60
+ {"client_id": "id2", "client_secret": "secret2"},
61
+ ],
62
+ ],
63
+ )
64
+ globus_outbound_client_credentials: list[GlobusClientCredential] = Field(
65
+ default_factory=list,
66
+ description="Globus client credentials for outbound transfers.",
67
+ examples=[
68
+ [
69
+ {"client_id": "id3", "client_secret": "secret3"},
70
+ {"client_id": "id4", "client_secret": "secret4"},
71
+ ],
72
+ ],
73
+ )
74
+ object_store_endpoint: str | None = Field(
75
+ default=None, description="Object store Globus Endpoint ID."
76
+ )
77
+ scratch_endpoint: str | None = Field(default=None, description="Scratch Globus Endpoint ID.")
42
78
  # scratch
43
- scratch_base_path: str = Field(default="scratch/")
44
- scratch_inventory_db_count: int = 16
79
+ scratch_base_path: str = Field(default="scratch/", description="Base path for scratch storage.")
80
+ scratch_inventory_db_count: int = Field(
81
+ default=16, description="Number of databases in the scratch inventory (redis)."
82
+ )
45
83
  # docs
46
- docs_base_url: str = Field(default="my_test_url")
84
+ docs_base_url: str = Field(
85
+ default="my_test_url", description="Base URL for the documentation site."
86
+ )
47
87
 
48
88
  @property
49
89
  def metadata_store_api_base(self) -> str:
@@ -105,4 +145,3 @@ class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
105
145
 
106
146
 
107
147
  common_configurations = DKISTProcessingCommonConfiguration()
108
- common_configurations.log_configurations()
@@ -1,21 +1,19 @@
1
1
  """Task wrapper for manual execution outside the workflow engine."""
2
+
2
3
  import json
3
4
  import logging
4
5
  import shutil
5
- from dataclasses import asdict
6
- from io import BytesIO
7
6
  from pathlib import Path
8
7
  from typing import Callable
9
8
  from unittest.mock import patch
10
9
 
11
10
  from dkist_processing_core.task import TaskBase
12
11
 
13
- from dkist_processing_common.codecs.json import json_encoder
12
+ from dkist_processing_common.codecs.basemodel import basemodel_encoder
14
13
  from dkist_processing_common.models.graphql import RecipeRunProvenanceMutation
15
14
  from dkist_processing_common.models.tags import Tag
16
15
  from dkist_processing_common.tasks.base import WorkflowTaskBase
17
- from dkist_processing_common.tests.conftest import FakeGQLClient
18
-
16
+ from dkist_processing_common.tests.mock_metadata_store import fake_gql_client_factory
19
17
 
20
18
  logger = logging.getLogger(__name__)
21
19
 
@@ -69,7 +67,7 @@ class ManualProcessing:
69
67
 
70
68
  with patch(
71
69
  "dkist_processing_common.tasks.mixin.metadata_store.GraphQLClient",
72
- new=FakeGQLClient,
70
+ new=fake_gql_client_factory(),
73
71
  ) as foo:
74
72
  # Run the task with a FakeGQLClient. This will handle pre_run(), run(), and post_run()
75
73
  with patch(
@@ -182,8 +180,8 @@ def writing_metadata_store_record_provenance(self, is_task_manual: bool, library
182
180
  workflowVersion=self.workflow_version,
183
181
  )
184
182
  self.write(
185
- data=asdict(params),
186
- encoder=json_encoder,
183
+ data=params,
184
+ encoder=basemodel_encoder,
187
185
  tags=["PROVENANCE_RECORD"],
188
186
  relative_path=f"{self.task_name}_provenance.json",
189
187
  overwrite=True,