dkist-processing-common 11.2.1rc3__py3-none-any.whl → 11.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. dkist_processing_common/__init__.py +1 -0
  2. dkist_processing_common/_util/constants.py +1 -0
  3. dkist_processing_common/_util/graphql.py +1 -0
  4. dkist_processing_common/_util/scratch.py +1 -1
  5. dkist_processing_common/_util/tags.py +1 -0
  6. dkist_processing_common/codecs/array.py +1 -0
  7. dkist_processing_common/codecs/asdf.py +1 -0
  8. dkist_processing_common/codecs/basemodel.py +1 -0
  9. dkist_processing_common/codecs/bytes.py +1 -0
  10. dkist_processing_common/codecs/fits.py +1 -0
  11. dkist_processing_common/codecs/iobase.py +1 -0
  12. dkist_processing_common/codecs/json.py +1 -0
  13. dkist_processing_common/codecs/path.py +1 -0
  14. dkist_processing_common/codecs/quality.py +1 -1
  15. dkist_processing_common/codecs/str.py +1 -0
  16. dkist_processing_common/config.py +1 -0
  17. dkist_processing_common/manual.py +1 -1
  18. dkist_processing_common/models/constants.py +1 -0
  19. dkist_processing_common/models/dkist_location.py +1 -1
  20. dkist_processing_common/models/fits_access.py +1 -0
  21. dkist_processing_common/models/flower_pot.py +1 -0
  22. dkist_processing_common/models/graphql.py +2 -1
  23. dkist_processing_common/models/input_dataset.py +3 -2
  24. dkist_processing_common/models/message.py +1 -1
  25. dkist_processing_common/models/message_queue_binding.py +1 -1
  26. dkist_processing_common/models/metric_code.py +1 -0
  27. dkist_processing_common/models/parameters.py +1 -1
  28. dkist_processing_common/models/quality.py +1 -0
  29. dkist_processing_common/models/tags.py +1 -0
  30. dkist_processing_common/models/task_name.py +1 -0
  31. dkist_processing_common/models/wavelength.py +2 -1
  32. dkist_processing_common/parsers/cs_step.py +1 -0
  33. dkist_processing_common/parsers/dsps_repeat.py +1 -0
  34. dkist_processing_common/parsers/experiment_id_bud.py +1 -0
  35. dkist_processing_common/parsers/id_bud.py +1 -0
  36. dkist_processing_common/parsers/l0_fits_access.py +1 -0
  37. dkist_processing_common/parsers/l1_fits_access.py +1 -0
  38. dkist_processing_common/parsers/near_bud.py +1 -0
  39. dkist_processing_common/parsers/proposal_id_bud.py +1 -0
  40. dkist_processing_common/parsers/quality.py +1 -0
  41. dkist_processing_common/parsers/retarder.py +1 -0
  42. dkist_processing_common/parsers/single_value_single_key_flower.py +1 -0
  43. dkist_processing_common/parsers/task.py +1 -0
  44. dkist_processing_common/parsers/time.py +1 -0
  45. dkist_processing_common/parsers/unique_bud.py +1 -0
  46. dkist_processing_common/parsers/wavelength.py +1 -0
  47. dkist_processing_common/tasks/__init__.py +3 -2
  48. dkist_processing_common/tasks/assemble_movie.py +1 -0
  49. dkist_processing_common/tasks/base.py +1 -0
  50. dkist_processing_common/tasks/l1_output_data.py +1 -1
  51. dkist_processing_common/tasks/mixin/globus.py +1 -1
  52. dkist_processing_common/tasks/mixin/interservice_bus.py +1 -0
  53. dkist_processing_common/tasks/mixin/metadata_store.py +1 -1
  54. dkist_processing_common/tasks/mixin/object_store.py +1 -0
  55. dkist_processing_common/tasks/mixin/quality/__init__.py +1 -0
  56. dkist_processing_common/tasks/mixin/quality/_base.py +1 -0
  57. dkist_processing_common/tasks/mixin/quality/_metrics.py +1 -0
  58. dkist_processing_common/tasks/output_data_base.py +1 -0
  59. dkist_processing_common/tasks/parse_l0_input_data.py +1 -1
  60. dkist_processing_common/tasks/quality_metrics.py +1 -1
  61. dkist_processing_common/tasks/teardown.py +1 -1
  62. dkist_processing_common/tasks/transfer_input_data.py +1 -1
  63. dkist_processing_common/tasks/trial_catalog.py +3 -2
  64. dkist_processing_common/tasks/trial_output_data.py +1 -0
  65. dkist_processing_common/tasks/write_l1.py +19 -8
  66. dkist_processing_common/tests/conftest.py +1 -0
  67. dkist_processing_common/tests/mock_metadata_store.py +2 -3
  68. dkist_processing_common/tests/test_codecs.py +2 -2
  69. dkist_processing_common/tests/test_interservice_bus.py +1 -0
  70. dkist_processing_common/tests/test_interservice_bus_mixin.py +1 -0
  71. dkist_processing_common/tests/test_manual_processing.py +1 -2
  72. dkist_processing_common/tests/test_output_data_base.py +1 -2
  73. dkist_processing_common/tests/test_parameters.py +1 -1
  74. dkist_processing_common/tests/test_parse_l0_input_data.py +1 -0
  75. dkist_processing_common/tests/test_quality.py +1 -0
  76. dkist_processing_common/tests/test_scratch.py +2 -1
  77. dkist_processing_common/tests/test_stems.py +1 -1
  78. dkist_processing_common/tests/test_tags.py +1 -0
  79. dkist_processing_common/tests/test_teardown.py +1 -1
  80. dkist_processing_common/tests/test_transfer_input_data.py +2 -3
  81. dkist_processing_common/tests/test_trial_catalog.py +1 -0
  82. dkist_processing_common/tests/test_trial_output_data.py +1 -1
  83. dkist_processing_common/tests/test_workflow_task_base.py +1 -2
  84. dkist_processing_common/tests/test_write_l1.py +8 -10
  85. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.0.dist-info}/METADATA +2 -2
  86. dkist_processing_common-11.3.0.dist-info/RECORD +122 -0
  87. docs/conf.py +1 -0
  88. changelog/262.misc.rst +0 -1
  89. dkist_processing_common-11.2.1rc3.dist-info/RECORD +0 -123
  90. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.0.dist-info}/WHEEL +0 -0
  91. {dkist_processing_common-11.2.1rc3.dist-info → dkist_processing_common-11.3.0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  """Package providing support classes and methods used by all workflow tasks."""
2
+
2
3
  from importlib.metadata import PackageNotFoundError
3
4
  from importlib.metadata import version
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Wrapper for interactions with shared database that holds arbitrary data that persists across the entire recipe run."""
2
+
2
3
  import json
3
4
  from collections.abc import MutableMapping
4
5
  from enum import Enum
@@ -1,4 +1,5 @@
1
1
  """Extension of the GraphQL supporting retries for data processing use cases."""
2
+
2
3
  import logging
3
4
  from typing import Any
4
5
  from typing import Callable
@@ -1,4 +1,5 @@
1
1
  """Scratch file system api."""
2
+
2
3
  import logging
3
4
  from contextlib import contextmanager
4
5
  from os import umask
@@ -9,7 +10,6 @@ from typing import Generator
9
10
  from dkist_processing_common._util.tags import TagDB
10
11
  from dkist_processing_common.config import common_configurations
11
12
 
12
-
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
@@ -1,4 +1,5 @@
1
1
  """Tag cloud manager."""
2
+
2
3
  from pathlib import Path
3
4
  from typing import Iterable
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing/reading numpy arrays."""
2
+
2
3
  import io
3
4
  from pathlib import Path
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoders and decoders for writing and reading ASDF files."""
2
+
2
3
  from io import BytesIO
3
4
  from pathlib import Path
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading Pydantic BaseModel objects."""
2
+
2
3
  from pathlib import Path
3
4
  from typing import Type
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading bytes objects."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoders and decoders for writing and reading FITS files."""
2
+
2
3
  from io import BytesIO
3
4
  from pathlib import Path
4
5
  from typing import Type
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading IOBase binary (i.e., not Text) objects."""
2
+
2
3
  from io import BytesIO
3
4
  from io import IOBase
4
5
  from io import TextIOBase
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoders for writing and reading JSON files."""
2
+
2
3
  import json
3
4
  from pathlib import Path
4
5
  from typing import Any
@@ -1,4 +1,5 @@
1
1
  """Default decoder to pass through paths from `read`."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoders for writing and reading quality data."""
2
+
2
3
  import json
3
4
  import logging
4
5
  from datetime import datetime
@@ -10,7 +11,6 @@ import numpy as np
10
11
  from dkist_processing_common.codecs.json import json_decoder
11
12
  from dkist_processing_common.codecs.json import json_encoder
12
13
 
13
-
14
14
  logger = logging.getLogger(__name__)
15
15
 
16
16
 
@@ -1,4 +1,5 @@
1
1
  """Encoder/decoder for writing and reading str to files."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
  from dkist_processing_common.codecs.bytes import bytes_decoder
@@ -1,4 +1,5 @@
1
1
  """Common configurations."""
2
+
2
3
  from dkist_processing_core.config import DKISTProcessingCoreConfiguration
3
4
  from dkist_service_configuration.settings import DEFAULT_MESH_SERVICE
4
5
  from dkist_service_configuration.settings import MeshService
@@ -1,4 +1,5 @@
1
1
  """Task wrapper for manual execution outside the workflow engine."""
2
+
2
3
  import json
3
4
  import logging
4
5
  import shutil
@@ -14,7 +15,6 @@ from dkist_processing_common.models.tags import Tag
14
15
  from dkist_processing_common.tasks.base import WorkflowTaskBase
15
16
  from dkist_processing_common.tests.mock_metadata_store import fake_gql_client_factory
16
17
 
17
-
18
18
  logger = logging.getLogger(__name__)
19
19
 
20
20
 
@@ -4,6 +4,7 @@ Components of the Constant model.
4
4
  Contains names of database entries and Base class for an object that simplifies
5
5
  accessing the database (tab completion, etc.)
6
6
  """
7
+
7
8
  from enum import Enum
8
9
  from string import ascii_uppercase
9
10
 
@@ -3,10 +3,10 @@
3
3
  Cartesian geocentric coordinates of DKIST on Earth as retrieved from
4
4
  https://github.com/astropy/astropy-data/blob/gh-pages/coordinates/sites.json#L838
5
5
  """
6
+
6
7
  import astropy.units as u
7
8
  from astropy.coordinates import EarthLocation
8
9
 
9
-
10
10
  _dkist_site_info = {
11
11
  "aliases": ["DKIST", "ATST"],
12
12
  "name": "Daniel K. Inouye Solar Telescope",
@@ -1,4 +1,5 @@
1
1
  """Abstraction layer for accessing fits data via class attributes."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from pathlib import Path
@@ -6,6 +6,7 @@ Defines:
6
6
 
7
7
  FlowerPot -> Container for Stem children (Flowers)
8
8
  """
9
+
9
10
  from __future__ import annotations
10
11
 
11
12
  from abc import ABC
@@ -1,9 +1,10 @@
1
1
  """GraphQL Data models for the metadata store api."""
2
+
2
3
  from pydantic import BaseModel
3
4
  from pydantic import ConfigDict
5
+ from pydantic import Json
4
6
  from pydantic import field_serializer
5
7
  from pydantic import field_validator
6
- from pydantic import Json
7
8
 
8
9
  from dkist_processing_common.models.input_dataset import InputDatasetBaseModel
9
10
  from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
@@ -1,4 +1,5 @@
1
1
  """Input dataset models for the inputDatasetPartDocument from the metadata store api."""
2
+
2
3
  import json
3
4
  from datetime import datetime
4
5
  from typing import Any
@@ -6,10 +7,10 @@ from typing import Any
6
7
  from pydantic import BaseModel
7
8
  from pydantic import ConfigDict
8
9
  from pydantic import Field
9
- from pydantic import field_serializer
10
- from pydantic import field_validator
11
10
  from pydantic import Json
12
11
  from pydantic import PlainSerializer
12
+ from pydantic import field_serializer
13
+ from pydantic import field_validator
13
14
  from pydantic.alias_generators import to_camel
14
15
  from typing_extensions import Annotated
15
16
 
@@ -1,11 +1,11 @@
1
1
  """Data structures for messages placed on the interservice bus."""
2
+
2
3
  from typing import Type
3
4
 
4
5
  from pydantic import Field
5
6
  from talus import MessageBodyBase
6
7
  from talus import PublishMessageBase
7
8
 
8
-
9
9
  ########################
10
10
  # Message Body Schemas #
11
11
  ########################
@@ -1,4 +1,5 @@
1
1
  """Binding between a queue and a message to be published."""
2
+
2
3
  from talus import Binding
3
4
  from talus import Queue
4
5
 
@@ -7,7 +8,6 @@ from dkist_processing_common.models.message import CatalogFrameMessage
7
8
  from dkist_processing_common.models.message import CatalogObjectMessage
8
9
  from dkist_processing_common.models.message import CreateQualityReportMessage
9
10
 
10
-
11
11
  catalog_frame_queue = Queue(
12
12
  name="catalog.frame.q", arguments=common_configurations.isb_queue_arguments
13
13
  )
@@ -1,4 +1,5 @@
1
1
  """Controlled list of quality metric codes."""
2
+
2
3
  from enum import StrEnum
3
4
 
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Base class for parameter-parsing object."""
2
+
2
3
  import logging
3
4
  from contextlib import contextmanager
4
5
  from datetime import datetime
@@ -18,7 +19,6 @@ from dkist_processing_common.models.input_dataset import InputDatasetFilePointer
18
19
  from dkist_processing_common.models.input_dataset import InputDatasetPartDocumentList
19
20
  from dkist_processing_common.models.tags import Tag
20
21
 
21
-
22
22
  logger = logging.getLogger(__name__)
23
23
 
24
24
 
@@ -1,4 +1,5 @@
1
1
  """Support classes used to create a quality report."""
2
+
2
3
  from typing import Any
3
4
 
4
5
  from pydantic import BaseModel
@@ -1,4 +1,5 @@
1
1
  """Components of the Tag model. Stem + Optional Suffix = Tag."""
2
+
2
3
  from enum import Enum
3
4
 
4
5
  from dkist_processing_common.models.task_name import TaskName
@@ -1,4 +1,5 @@
1
1
  """Controlled list of common IP task tag names."""
2
+
2
3
  from enum import Enum
3
4
 
4
5
 
@@ -1,10 +1,11 @@
1
1
  """Support classes for manipulating wavelengths."""
2
+
2
3
  import astropy.units as u
3
4
  from pydantic import BaseModel
4
5
  from pydantic import ConfigDict
6
+ from pydantic import ValidationInfo
5
7
  from pydantic import field_validator
6
8
  from pydantic import model_validator
7
- from pydantic import ValidationInfo
8
9
 
9
10
 
10
11
  class WavelengthRange(BaseModel):
@@ -1,4 +1,5 @@
1
1
  """Classes supporting Calibration Sequence steps."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from datetime import datetime
@@ -1,4 +1,5 @@
1
1
  """Classes supporting the Data Set Parameters Set (DSPS) Repeat parameter."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
3
4
  from dkist_processing_common.models.flower_pot import SpilledDirt
4
5
  from dkist_processing_common.models.tags import StemName
@@ -1,4 +1,5 @@
1
1
  """Experiment Id parser."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
3
4
  from dkist_processing_common.parsers.id_bud import ContributingIdsBud
4
5
  from dkist_processing_common.parsers.id_bud import IdBud
@@ -1,4 +1,5 @@
1
1
  """Base classes for ID bud parsing."""
2
+
2
3
  from typing import Type
3
4
 
4
5
  from dkist_processing_common.models.flower_pot import SpilledDirt
@@ -1,4 +1,5 @@
1
1
  """By-frame 214 L0 header keywords that are not instrument specific."""
2
+
2
3
  from astropy.io import fits
3
4
 
4
5
  from dkist_processing_common.parsers.l1_fits_access import L1FitsAccess
@@ -1,4 +1,5 @@
1
1
  """By-frame 214 L1 only header keywords that are not instrument specific."""
2
+
2
3
  from astropy.io import fits
3
4
 
4
5
  from dkist_processing_common.models.fits_access import FitsAccessBase
@@ -1,4 +1,5 @@
1
1
  """Pre-made flower that reads a single header key from all files and raises a ValueError if the values are not in a supplied range."""
2
+
2
3
  from statistics import mean
3
4
  from typing import Callable
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Proposal Id parser."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
3
4
  from dkist_processing_common.parsers.id_bud import ContributingIdsBud
4
5
  from dkist_processing_common.parsers.id_bud import IdBud
@@ -1,4 +1,5 @@
1
1
  """Support classes to define object attributes from header information."""
2
+
2
3
  from astropy.io import fits
3
4
 
4
5
  from dkist_processing_common.parsers.l0_fits_access import L1FitsAccess
@@ -1,4 +1,5 @@
1
1
  """Bud that parses the name of the retarder used during POLCAL task observations."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
3
4
  from dkist_processing_common.models.task_name import TaskName
4
5
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
@@ -1,4 +1,5 @@
1
1
  """Pre-made flower that produces tag based on a single header key."""
2
+
2
3
  from dkist_processing_common.models.flower_pot import Stem
3
4
  from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Module for parsing IP task related things."""
2
+
2
3
  from typing import Callable
3
4
  from typing import Type
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Time parser."""
2
+
2
3
  from datetime import datetime
3
4
  from datetime import timezone
4
5
  from typing import Callable
@@ -1,4 +1,5 @@
1
1
  """Pre-made flower that reads a single header key from all files and raises a ValueError if it is not unique."""
2
+
2
3
  from typing import Callable
3
4
 
4
5
  from dkist_processing_common.models.flower_pot import SpilledDirt
@@ -1,4 +1,5 @@
1
1
  """Bud to get the wavelength of observe frames."""
2
+
2
3
  from dkist_processing_common.models.constants import BudName
3
4
  from dkist_processing_common.models.task_name import TaskName
4
5
  from dkist_processing_common.parsers.unique_bud import TaskUniqueBud
@@ -1,11 +1,12 @@
1
1
  """Common tasks and bases."""
2
- from dkist_processing_common.tasks.base import * # noreorder
2
+
3
+ from dkist_processing_common.tasks.base import * # isort: skip
3
4
  from dkist_processing_common.tasks.assemble_movie import *
4
5
  from dkist_processing_common.tasks.l1_output_data import *
5
6
  from dkist_processing_common.tasks.parse_l0_input_data import *
6
7
  from dkist_processing_common.tasks.quality_metrics import *
7
8
  from dkist_processing_common.tasks.teardown import *
8
9
  from dkist_processing_common.tasks.transfer_input_data import *
9
- from dkist_processing_common.tasks.write_l1 import *
10
10
  from dkist_processing_common.tasks.trial_catalog import *
11
11
  from dkist_processing_common.tasks.trial_output_data import *
12
+ from dkist_processing_common.tasks.write_l1 import *
@@ -1,4 +1,5 @@
1
1
  """Task(s) for assembling a browse movie."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
@@ -1,4 +1,5 @@
1
1
  """Wrappers for all workflow tasks."""
2
+
2
3
  import json
3
4
  import logging
4
5
  import re
@@ -1,4 +1,5 @@
1
1
  """Task(s) for the transfer and publishing of L1 data from a production run of a processing pipeline."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from itertools import chain
@@ -20,7 +21,6 @@ from dkist_processing_common.tasks.mixin.quality import QualityMixin
20
21
  from dkist_processing_common.tasks.output_data_base import OutputDataBase
21
22
  from dkist_processing_common.tasks.output_data_base import TransferDataBase
22
23
 
23
-
24
24
  __all__ = [
25
25
  "L1OutputDataBase",
26
26
  "TransferL1Data",
@@ -1,4 +1,5 @@
1
1
  """Mixin to add methods to a Task to support globus transfers."""
2
+
2
3
  import logging
3
4
  from dataclasses import dataclass
4
5
  from pathlib import Path
@@ -11,7 +12,6 @@ from globus_sdk import TransferData
11
12
 
12
13
  from dkist_processing_common.config import common_configurations
13
14
 
14
-
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
17
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements interservice bus access functionality."""
2
+
2
3
  from talus import DurableProducer
3
4
  from talus import PublishMessageBase
4
5
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements Metadata Store data access functionality."""
2
+
2
3
  import json
3
4
  import logging
4
5
  from functools import cached_property
@@ -26,7 +27,6 @@ from dkist_processing_common.models.graphql import RecipeRunStatusMutation
26
27
  from dkist_processing_common.models.graphql import RecipeRunStatusQuery
27
28
  from dkist_processing_common.models.graphql import RecipeRunStatusResponse
28
29
 
29
-
30
30
  logger = logging.getLogger(__name__)
31
31
 
32
32
 
@@ -1,4 +1,5 @@
1
1
  """Mixin for a WorkflowDataTaskBase subclass which implements Object Store data access functionality."""
2
+
2
3
  from pathlib import Path
3
4
 
4
5
  from object_clerk import ObjectClerk
@@ -4,4 +4,5 @@ To improve readability the top-level mixin, `QualityMixin`, contains only base f
4
4
  metrics are grouped into sub-mixins. To protect a user, this mixin-on-mixin stack is hidden in protected modules
5
5
  and only the top-level mixin (`QualityMixin`) is exposed.
6
6
  """
7
+
7
8
  from ._base import QualityMixin
@@ -1,4 +1,5 @@
1
1
  """Base QualityMixin class that contains machinery common to all metric types."""
2
+
2
3
  from typing import Iterable
3
4
 
4
5
  import numpy as np
@@ -2,6 +2,7 @@
2
2
 
3
3
  These classes should not be directly mixed in to anything. They are pre-mixed into the top-level QualityMixin
4
4
  """
5
+
5
6
  import copy
6
7
  import json
7
8
  import logging
@@ -1,4 +1,5 @@
1
1
  """Base class that supports common output data methods and paths."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
@@ -20,6 +20,7 @@ that makes the rest of the pipeline easy to write.
20
20
  In other words, we can find exactly the frame we need (tags) and, once we have it, we never need to look
21
21
  at a different frame to get information (constants).
22
22
  """
23
+
23
24
  import logging
24
25
  from abc import ABC
25
26
  from abc import abstractmethod
@@ -44,7 +45,6 @@ from dkist_processing_common.parsers.time import VarianceCadenceBud
44
45
  from dkist_processing_common.parsers.unique_bud import UniqueBud
45
46
  from dkist_processing_common.tasks.base import WorkflowTaskBase
46
47
 
47
-
48
48
  __all__ = [
49
49
  "ParseL0InputDataBase",
50
50
  "ParseDataBase",
@@ -1,4 +1,5 @@
1
1
  """Classes to support the generation of quality metrics for the calibrated data."""
2
+
2
3
  import logging
3
4
  from dataclasses import dataclass
4
5
  from dataclasses import field
@@ -18,7 +19,6 @@ from dkist_processing_common.parsers.quality import L1QualityFitsAccess
18
19
  from dkist_processing_common.tasks.base import WorkflowTaskBase
19
20
  from dkist_processing_common.tasks.mixin.quality import QualityMixin
20
21
 
21
-
22
22
  __all__ = ["QualityL1Metrics", "QualityL0Metrics"]
23
23
 
24
24
 
@@ -1,11 +1,11 @@
1
1
  """Task(s) for the clean up tasks at the conclusion of a processing pipeline."""
2
+
2
3
  import logging
3
4
  from abc import ABC
4
5
  from abc import abstractmethod
5
6
 
6
7
  from dkist_processing_common.tasks.base import WorkflowTaskBase
7
8
 
8
-
9
9
  __all__ = ["Teardown", "TrialTeardown"]
10
10
 
11
11
 
@@ -1,4 +1,5 @@
1
1
  """Task(s) for the transfer in of data sources for a processing pipeline."""
2
+
2
3
  import logging
3
4
  from pathlib import Path
4
5
 
@@ -11,7 +12,6 @@ from dkist_processing_common.tasks.base import WorkflowTaskBase
11
12
  from dkist_processing_common.tasks.mixin.globus import GlobusMixin
12
13
  from dkist_processing_common.tasks.mixin.globus import GlobusTransferItem
13
14
 
14
-
15
15
  __all__ = ["TransferL0Data"]
16
16
 
17
17
  logger = logging.getLogger(__name__)
@@ -1,4 +1,5 @@
1
1
  """Tasks to support the generation of downstream artifacts in a trial workflow that wouldn't otherwise produce them."""
2
+
2
3
  import importlib
3
4
  import logging
4
5
  from datetime import datetime
@@ -24,8 +25,8 @@ __all__ = ["CreateTrialDatasetInventory", "CreateTrialAsdf", "CreateTrialQuality
24
25
  # Capture condition of dkist-processing-common[inventory] install
25
26
  INVENTORY_EXTRA_INSTALLED = False
26
27
  try:
27
- from dkist_inventory.inventory import generate_inventory_from_frame_inventory
28
28
  from dkist_inventory.inventory import generate_asdf_filename
29
+ from dkist_inventory.inventory import generate_inventory_from_frame_inventory
29
30
 
30
31
  INVENTORY_EXTRA_INSTALLED = True
31
32
  except ModuleNotFoundError:
@@ -45,8 +46,8 @@ except ModuleNotFoundError:
45
46
  # Verify dkist-quality is installed
46
47
  QUALITY_EXTRA_INSTALLED = False
47
48
  try:
48
- from dkist_quality.report import format_report
49
49
  from dkist_quality.report import ReportFormattingException
50
+ from dkist_quality.report import format_report
50
51
 
51
52
  QUALITY_EXTRA_INSTALLED = True
52
53
  except ModuleNotFoundError:
@@ -1,4 +1,5 @@
1
1
  """Tasks to support transferring an arbitrary collection of files to a customizable post-run location."""
2
+
2
3
  import logging
3
4
  from functools import cached_property
4
5
  from pathlib import Path