dkist-processing-common 11.5.0__tar.gz → 11.6.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/PKG-INFO +81 -3
- dkist_processing_common-11.6.0rc1/README.rst +194 -0
- dkist_processing_common-11.6.0rc1/changelog/268.misc.rst +1 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/config.py +45 -29
- dkist_processing_common-11.6.0rc1/dkist_processing_common/models/telemetry.py +21 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/assemble_movie.py +1 -1
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/base.py +24 -50
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/l1_output_data.py +10 -10
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/output_data_base.py +2 -2
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/parse_l0_input_data.py +42 -7
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/quality_metrics.py +7 -7
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/teardown.py +5 -5
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/transfer_input_data.py +5 -5
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/trial_catalog.py +7 -7
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/trial_output_data.py +2 -2
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/write_l1.py +1 -1
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_base.py +0 -14
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common.egg-info/PKG-INFO +81 -3
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common.egg-info/SOURCES.txt +2 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common.egg-info/requires.txt +2 -2
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/pyproject.toml +2 -2
- dkist_processing_common-11.5.0/README.rst +0 -116
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/.gitignore +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/.pre-commit-config.yaml +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/.readthedocs.yml +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/.snyk +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/CHANGELOG.rst +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/bitbucket-pipelines.yml +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/changelog/.gitempty +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/_util/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/_util/constants.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/_util/graphql.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/_util/scratch.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/_util/tags.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/array.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/asdf.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/basemodel.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/bytes.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/fits.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/iobase.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/json.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/path.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/quality.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/codecs/str.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/fonts/Lato-Regular.ttf +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/fonts/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/manual.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/constants.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/dkist_location.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/fits_access.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/flower_pot.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/fried_parameter.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/graphql.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/input_dataset.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/message.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/message_queue_binding.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/metric_code.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/parameters.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/quality.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/tags.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/task_name.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/models/wavelength.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/cs_step.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/dsps_repeat.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/experiment_id_bud.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/id_bud.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/l0_fits_access.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/l1_fits_access.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/near_bud.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/proposal_id_bud.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/quality.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/retarder.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/single_value_single_key_flower.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/task.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/time.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/unique_bud.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/parsers/wavelength.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/globus.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/interservice_bus.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/metadata_store.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/object_store.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/quality/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/quality/_base.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tasks/mixin/quality/_metrics.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/__init__.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/conftest.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/mock_metadata_store.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_assemble_movie.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_assemble_quality.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_codecs.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_constants.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_cs_step.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_dkist_location.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_fits_access.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_flower_pot.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_fried_parameter.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_input_dataset.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_interservice_bus.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_interservice_bus_mixin.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_manual_processing.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_output_data_base.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_parameters.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_parse_l0_input_data.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_publish_catalog_messages.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_quality.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_quality_mixin.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_scratch.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_stems.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_submit_dataset_metadata.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_tags.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_task_name.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_task_parsing.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_teardown.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_transfer_input_data.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_transfer_l1_output_data.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_trial_catalog.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_trial_output_data.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_workflow_task_base.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common/tests/test_write_l1.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common.egg-info/dependency_links.txt +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/dkist_processing_common.egg-info/top_level.txt +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/Makefile +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/changelog.rst +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/conf.py +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/index.rst +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/landing_page.rst +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/make.bat +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/docs/requirements.txt +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/licenses/LICENSE.rst +0 -0
- {dkist_processing_common-11.5.0 → dkist_processing_common-11.6.0rc1}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dkist-processing-common
|
|
3
|
-
Version: 11.
|
|
3
|
+
Version: 11.6.0rc1
|
|
4
4
|
Summary: Common task classes used by the DKIST science data processing pipelines
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD-3-Clause
|
|
@@ -17,9 +17,9 @@ Requires-Dist: asdf<4.0.0,>=3.5.0
|
|
|
17
17
|
Requires-Dist: astropy>=7.0.0
|
|
18
18
|
Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
|
|
19
19
|
Requires-Dist: dkist-header-validator<6.0,>=5.0.0
|
|
20
|
-
Requires-Dist: dkist-processing-core==
|
|
20
|
+
Requires-Dist: dkist-processing-core==6.0.0rc3
|
|
21
21
|
Requires-Dist: dkist-processing-pac<4.0,>=3.1
|
|
22
|
-
Requires-Dist: dkist-service-configuration<
|
|
22
|
+
Requires-Dist: dkist-service-configuration<5.0,>=4.1.7
|
|
23
23
|
Requires-Dist: dkist-spectral-lines<4.0,>=3.0.0
|
|
24
24
|
Requires-Dist: solar-wavelength-calibration<2.0,>=1.0
|
|
25
25
|
Requires-Dist: globus-sdk>=3.12.0
|
|
@@ -97,6 +97,84 @@ Deployment
|
|
|
97
97
|
|
|
98
98
|
dkist-processing-common is deployed to `PyPI <https://pypi.org/project/dkist-processing-common/>`_
|
|
99
99
|
|
|
100
|
+
Environment Variables
|
|
101
|
+
---------------------
|
|
102
|
+
|
|
103
|
+
.. list-table::
|
|
104
|
+
:widths: 10 90
|
|
105
|
+
:header-rows: 1
|
|
106
|
+
|
|
107
|
+
* - Variable
|
|
108
|
+
- Field Info
|
|
109
|
+
* - LOGURU_LEVEL
|
|
110
|
+
- annotation=str required=False default='INFO' alias_priority=2 validation_alias='LOGURU_LEVEL' description='Log level for the application'
|
|
111
|
+
* - MESH_CONFIG
|
|
112
|
+
- annotation=dict[str, MeshService] required=False default_factory=dict alias_priority=2 validation_alias='MESH_CONFIG' description='Service mesh configuration' examples=[{'upstream_service_name': {'mesh_address': 'localhost', 'mesh_port': 6742}}]
|
|
113
|
+
* - RETRY_CONFIG
|
|
114
|
+
- annotation=RetryConfig required=False default_factory=RetryConfig description='Retry configuration for the service'
|
|
115
|
+
* - OTEL_SERVICE_NAME
|
|
116
|
+
- annotation=str required=False default='unknown-service-name' alias_priority=2 validation_alias='OTEL_SERVICE_NAME' description='Service name for OpenTelemetry'
|
|
117
|
+
* - DKIST_SERVICE_VERSION
|
|
118
|
+
- annotation=str required=False default='unknown-service-version' alias_priority=2 validation_alias='DKIST_SERVICE_VERSION' description='Service version for OpenTelemetry'
|
|
119
|
+
* - NOMAD_ALLOC_ID
|
|
120
|
+
- annotation=str required=False default='unknown-allocation-id' alias_priority=2 validation_alias='NOMAD_ALLOC_ID' description='Nomad allocation ID for OpenTelemetry'
|
|
121
|
+
* - OTEL_EXPORTER_OTLP_TRACES_INSECURE
|
|
122
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP traces'
|
|
123
|
+
* - OTEL_EXPORTER_OTLP_METRICS_INSECURE
|
|
124
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP metrics'
|
|
125
|
+
* - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
|
|
126
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP traces endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
127
|
+
* - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT
|
|
128
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP metrics endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
129
|
+
* - OTEL_PYTHON_DISABLED_INSTRUMENTATIONS
|
|
130
|
+
- annotation=list[str] required=False default_factory=list description='List of instrumentations to disable. https://opentelemetry.io/docs/zero-code/python/configuration/' examples=[['pika', 'requests']]
|
|
131
|
+
* - OTEL_PYTHON_FASTAPI_EXCLUDED_URLS
|
|
132
|
+
- annotation=str required=False default='health' description='Comma separated list of URLs to exclude from OpenTelemetry instrumentation in FastAPI.' examples=['client/.*/info,healthcheck']
|
|
133
|
+
* - SYSTEM_METRIC_INSTRUMENTATION_CONFIG
|
|
134
|
+
- annotation=Union[dict[str, bool], NoneType] required=False default=None description='Configuration for system metric instrumentation. https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/system_metrics/system_metrics.html' examples=[{'system.memory.usage': ['used', 'free', 'cached'], 'system.cpu.time': ['idle', 'user', 'system', 'irq'], 'system.network.io': ['transmit', 'receive'], 'process.runtime.memory': ['rss', 'vms'], 'process.runtime.cpu.time': ['user', 'system'], 'process.runtime.context_switches': ['involuntary', 'voluntary']}]
|
|
135
|
+
* - ISB_USERNAME
|
|
136
|
+
- annotation=str required=False default='guest' description='Username for the interservice-bus.'
|
|
137
|
+
* - ISB_PASSWORD
|
|
138
|
+
- annotation=str required=False default='guest' description='Password for the interservice-bus.'
|
|
139
|
+
* - ISB_EXCHANGE
|
|
140
|
+
- annotation=str required=False default='master.direct.x' description='Exchange for the interservice-bus.'
|
|
141
|
+
* - ISB_QUEUE_TYPE
|
|
142
|
+
- annotation=str required=False default='classic' description='Queue type for the interservice-bus.' examples=['quorum', 'classic']
|
|
143
|
+
* - BUILD_VERSION
|
|
144
|
+
- annotation=str required=False default='dev' description='Fallback build version for workflow tasks.'
|
|
145
|
+
* - GQL_AUTH_TOKEN
|
|
146
|
+
- annotation=Union[str, NoneType] required=False default='dev' description='The auth token for the metadata-store-api.'
|
|
147
|
+
* - OBJECT_STORE_ACCESS_KEY
|
|
148
|
+
- annotation=Union[str, NoneType] required=False default=None description='The access key for the object store.'
|
|
149
|
+
* - OBJECT_STORE_SECRET_KEY
|
|
150
|
+
- annotation=Union[str, NoneType] required=False default=None description='The secret key for the object store.'
|
|
151
|
+
* - OBJECT_STORE_USE_SSL
|
|
152
|
+
- annotation=bool required=False default=False description='Whether to use SSL for the object store connection.'
|
|
153
|
+
* - MULTIPART_THRESHOLD
|
|
154
|
+
- annotation=Union[int, NoneType] required=False default=None description='Multipart threshold for the object store.'
|
|
155
|
+
* - S3_CLIENT_CONFIG
|
|
156
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 client configuration for the object store.'
|
|
157
|
+
* - S3_UPLOAD_CONFIG
|
|
158
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 upload configuration for the object store.'
|
|
159
|
+
* - S3_DOWNLOAD_CONFIG
|
|
160
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 download configuration for the object store.'
|
|
161
|
+
* - GLOBUS_TRANSPORT_PARAMS
|
|
162
|
+
- annotation=dict required=False default_factory=dict description='Globus transfer parameters.'
|
|
163
|
+
* - GLOBUS_CLIENT_ID
|
|
164
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client ID for inbound/outbound transfers.'
|
|
165
|
+
* - GLOBUS_CLIENT_SECRET
|
|
166
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client secret for inbound/outbound transfers.'
|
|
167
|
+
* - OBJECT_STORE_ENDPOINT
|
|
168
|
+
- annotation=Union[str, NoneType] required=False default=None description='Object store Globus Endpoint ID.'
|
|
169
|
+
* - SCRATCH_ENDPOINT
|
|
170
|
+
- annotation=Union[str, NoneType] required=False default=None description='Scratch Globus Endpoint ID.'
|
|
171
|
+
* - SCRATCH_BASE_PATH
|
|
172
|
+
- annotation=str required=False default='scratch/' description='Base path for scratch storage.'
|
|
173
|
+
* - SCRATCH_INVENTORY_DB_COUNT
|
|
174
|
+
- annotation=int required=False default=16 description='Number of databases in the scratch inventory (redis).'
|
|
175
|
+
* - DOCS_BASE_URL
|
|
176
|
+
- annotation=str required=False default='my_test_url' description='Base URL for the documentation site.'
|
|
177
|
+
|
|
100
178
|
Development
|
|
101
179
|
-----------
|
|
102
180
|
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
dkist-processing-common
|
|
2
|
+
=======================
|
|
3
|
+
|
|
4
|
+
|codecov|
|
|
5
|
+
|
|
6
|
+
This repository works in concert with `dkist-processing-core <https://pypi.org/project/dkist-processing-core/>`_ and `dkist-processing-*instrument*` to
|
|
7
|
+
form the DKIST calibration processing stack.
|
|
8
|
+
|
|
9
|
+
Usage
|
|
10
|
+
-----
|
|
11
|
+
|
|
12
|
+
The classes in this repository should be used as the base of any DKIST processing pipeline tasks. Science tasks should subclass `ScienceTaskL0ToL1Base`.
|
|
13
|
+
|
|
14
|
+
Each class is built on an abstract base class with the `run` method left for a developer to fill out with the required steps that the task should take.
|
|
15
|
+
This class is then used as the callable object for the workflow and scheduling engine.
|
|
16
|
+
|
|
17
|
+
Example
|
|
18
|
+
-------
|
|
19
|
+
|
|
20
|
+
.. code-block:: python
|
|
21
|
+
|
|
22
|
+
from dkist_processing_common.tasks.base import ScienceTaskL0ToL1Base
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RemoveArtifacts(ScienceTaskL0ToL1Base):
|
|
26
|
+
def run(self):
|
|
27
|
+
# task code here
|
|
28
|
+
total = 2 + 5
|
|
29
|
+
|
|
30
|
+
Deployment
|
|
31
|
+
----------
|
|
32
|
+
|
|
33
|
+
dkist-processing-common is deployed to `PyPI <https://pypi.org/project/dkist-processing-common/>`_
|
|
34
|
+
|
|
35
|
+
Environment Variables
|
|
36
|
+
---------------------
|
|
37
|
+
|
|
38
|
+
.. list-table::
|
|
39
|
+
:widths: 10 90
|
|
40
|
+
:header-rows: 1
|
|
41
|
+
|
|
42
|
+
* - Variable
|
|
43
|
+
- Field Info
|
|
44
|
+
* - LOGURU_LEVEL
|
|
45
|
+
- annotation=str required=False default='INFO' alias_priority=2 validation_alias='LOGURU_LEVEL' description='Log level for the application'
|
|
46
|
+
* - MESH_CONFIG
|
|
47
|
+
- annotation=dict[str, MeshService] required=False default_factory=dict alias_priority=2 validation_alias='MESH_CONFIG' description='Service mesh configuration' examples=[{'upstream_service_name': {'mesh_address': 'localhost', 'mesh_port': 6742}}]
|
|
48
|
+
* - RETRY_CONFIG
|
|
49
|
+
- annotation=RetryConfig required=False default_factory=RetryConfig description='Retry configuration for the service'
|
|
50
|
+
* - OTEL_SERVICE_NAME
|
|
51
|
+
- annotation=str required=False default='unknown-service-name' alias_priority=2 validation_alias='OTEL_SERVICE_NAME' description='Service name for OpenTelemetry'
|
|
52
|
+
* - DKIST_SERVICE_VERSION
|
|
53
|
+
- annotation=str required=False default='unknown-service-version' alias_priority=2 validation_alias='DKIST_SERVICE_VERSION' description='Service version for OpenTelemetry'
|
|
54
|
+
* - NOMAD_ALLOC_ID
|
|
55
|
+
- annotation=str required=False default='unknown-allocation-id' alias_priority=2 validation_alias='NOMAD_ALLOC_ID' description='Nomad allocation ID for OpenTelemetry'
|
|
56
|
+
* - OTEL_EXPORTER_OTLP_TRACES_INSECURE
|
|
57
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP traces'
|
|
58
|
+
* - OTEL_EXPORTER_OTLP_METRICS_INSECURE
|
|
59
|
+
- annotation=bool required=False default=True description='Use insecure connection for OTLP metrics'
|
|
60
|
+
* - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
|
|
61
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP traces endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
62
|
+
* - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT
|
|
63
|
+
- annotation=Union[str, NoneType] required=False default=None description='OTLP metrics endpoint. Overrides mesh configuration' examples=['localhost:4317']
|
|
64
|
+
* - OTEL_PYTHON_DISABLED_INSTRUMENTATIONS
|
|
65
|
+
- annotation=list[str] required=False default_factory=list description='List of instrumentations to disable. https://opentelemetry.io/docs/zero-code/python/configuration/' examples=[['pika', 'requests']]
|
|
66
|
+
* - OTEL_PYTHON_FASTAPI_EXCLUDED_URLS
|
|
67
|
+
- annotation=str required=False default='health' description='Comma separated list of URLs to exclude from OpenTelemetry instrumentation in FastAPI.' examples=['client/.*/info,healthcheck']
|
|
68
|
+
* - SYSTEM_METRIC_INSTRUMENTATION_CONFIG
|
|
69
|
+
- annotation=Union[dict[str, bool], NoneType] required=False default=None description='Configuration for system metric instrumentation. https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/system_metrics/system_metrics.html' examples=[{'system.memory.usage': ['used', 'free', 'cached'], 'system.cpu.time': ['idle', 'user', 'system', 'irq'], 'system.network.io': ['transmit', 'receive'], 'process.runtime.memory': ['rss', 'vms'], 'process.runtime.cpu.time': ['user', 'system'], 'process.runtime.context_switches': ['involuntary', 'voluntary']}]
|
|
70
|
+
* - ISB_USERNAME
|
|
71
|
+
- annotation=str required=False default='guest' description='Username for the interservice-bus.'
|
|
72
|
+
* - ISB_PASSWORD
|
|
73
|
+
- annotation=str required=False default='guest' description='Password for the interservice-bus.'
|
|
74
|
+
* - ISB_EXCHANGE
|
|
75
|
+
- annotation=str required=False default='master.direct.x' description='Exchange for the interservice-bus.'
|
|
76
|
+
* - ISB_QUEUE_TYPE
|
|
77
|
+
- annotation=str required=False default='classic' description='Queue type for the interservice-bus.' examples=['quorum', 'classic']
|
|
78
|
+
* - BUILD_VERSION
|
|
79
|
+
- annotation=str required=False default='dev' description='Fallback build version for workflow tasks.'
|
|
80
|
+
* - GQL_AUTH_TOKEN
|
|
81
|
+
- annotation=Union[str, NoneType] required=False default='dev' description='The auth token for the metadata-store-api.'
|
|
82
|
+
* - OBJECT_STORE_ACCESS_KEY
|
|
83
|
+
- annotation=Union[str, NoneType] required=False default=None description='The access key for the object store.'
|
|
84
|
+
* - OBJECT_STORE_SECRET_KEY
|
|
85
|
+
- annotation=Union[str, NoneType] required=False default=None description='The secret key for the object store.'
|
|
86
|
+
* - OBJECT_STORE_USE_SSL
|
|
87
|
+
- annotation=bool required=False default=False description='Whether to use SSL for the object store connection.'
|
|
88
|
+
* - MULTIPART_THRESHOLD
|
|
89
|
+
- annotation=Union[int, NoneType] required=False default=None description='Multipart threshold for the object store.'
|
|
90
|
+
* - S3_CLIENT_CONFIG
|
|
91
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 client configuration for the object store.'
|
|
92
|
+
* - S3_UPLOAD_CONFIG
|
|
93
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 upload configuration for the object store.'
|
|
94
|
+
* - S3_DOWNLOAD_CONFIG
|
|
95
|
+
- annotation=Union[dict, NoneType] required=False default=None description='S3 download configuration for the object store.'
|
|
96
|
+
* - GLOBUS_TRANSPORT_PARAMS
|
|
97
|
+
- annotation=dict required=False default_factory=dict description='Globus transfer parameters.'
|
|
98
|
+
* - GLOBUS_CLIENT_ID
|
|
99
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client ID for inbound/outbound transfers.'
|
|
100
|
+
* - GLOBUS_CLIENT_SECRET
|
|
101
|
+
- annotation=Union[str, NoneType] required=False default=None description='Globus client secret for inbound/outbound transfers.'
|
|
102
|
+
* - OBJECT_STORE_ENDPOINT
|
|
103
|
+
- annotation=Union[str, NoneType] required=False default=None description='Object store Globus Endpoint ID.'
|
|
104
|
+
* - SCRATCH_ENDPOINT
|
|
105
|
+
- annotation=Union[str, NoneType] required=False default=None description='Scratch Globus Endpoint ID.'
|
|
106
|
+
* - SCRATCH_BASE_PATH
|
|
107
|
+
- annotation=str required=False default='scratch/' description='Base path for scratch storage.'
|
|
108
|
+
* - SCRATCH_INVENTORY_DB_COUNT
|
|
109
|
+
- annotation=int required=False default=16 description='Number of databases in the scratch inventory (redis).'
|
|
110
|
+
* - DOCS_BASE_URL
|
|
111
|
+
- annotation=str required=False default='my_test_url' description='Base URL for the documentation site.'
|
|
112
|
+
|
|
113
|
+
Development
|
|
114
|
+
-----------
|
|
115
|
+
|
|
116
|
+
There are two prerequisites for test execution on a local machine:
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
* Redis. A running instance of redis on the local machine is required. The tests will use the default host ip of localhost and port of 6379 to connect to the database.
|
|
120
|
+
|
|
121
|
+
* RabbitMQ. A running instance of rabbitmq on the local machine is required. The tests will use the default host of localhost and a port of 5672 to connect to the interservice bus.
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
To run the tests locally, clone the repository and install the package in editable mode with the test extras.
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
.. code-block:: bash
|
|
128
|
+
|
|
129
|
+
git clone git@bitbucket.org:dkistdc/dkist-processing-common.git
|
|
130
|
+
cd dkist-processing-common
|
|
131
|
+
pre-commit install
|
|
132
|
+
pip install -e .[test]
|
|
133
|
+
# Redis must be running
|
|
134
|
+
pytest -v --cov dkist_processing_common
|
|
135
|
+
|
|
136
|
+
Changelog
|
|
137
|
+
#########
|
|
138
|
+
|
|
139
|
+
When you make **any** change to this repository it **MUST** be accompanied by a changelog file.
|
|
140
|
+
The changelog for this repository uses the `towncrier <https://github.com/twisted/towncrier>`__ package.
|
|
141
|
+
Entries in the changelog for the next release are added as individual files (one per change) to the ``changelog/`` directory.
|
|
142
|
+
|
|
143
|
+
Writing a Changelog Entry
|
|
144
|
+
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
145
|
+
|
|
146
|
+
A changelog entry accompanying a change should be added to the ``changelog/`` directory.
|
|
147
|
+
The name of a file in this directory follows a specific template::
|
|
148
|
+
|
|
149
|
+
<PULL REQUEST NUMBER>.<TYPE>[.<COUNTER>].rst
|
|
150
|
+
|
|
151
|
+
The fields have the following meanings:
|
|
152
|
+
|
|
153
|
+
* ``<PULL REQUEST NUMBER>``: This is the number of the pull request, so people can jump from the changelog entry to the diff on BitBucket.
|
|
154
|
+
* ``<TYPE>``: This is the type of the change and must be one of the values described below.
|
|
155
|
+
* ``<COUNTER>``: This is an optional field, if you make more than one change of the same type you can append a counter to the subsequent changes, i.e. ``100.bugfix.rst`` and ``100.bugfix.1.rst`` for two bugfix changes in the same PR.
|
|
156
|
+
|
|
157
|
+
The list of possible types is defined the the towncrier section of ``pyproject.toml``, the types are:
|
|
158
|
+
|
|
159
|
+
* ``feature``: This change is a new code feature.
|
|
160
|
+
* ``bugfix``: This is a change which fixes a bug.
|
|
161
|
+
* ``doc``: A documentation change.
|
|
162
|
+
* ``removal``: A deprecation or removal of public API.
|
|
163
|
+
* ``misc``: Any small change which doesn't fit anywhere else, such as a change to the package infrastructure.
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
Rendering the Changelog at Release Time
|
|
167
|
+
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
168
|
+
|
|
169
|
+
When you are about to tag a release first you must run ``towncrier`` to render the changelog.
|
|
170
|
+
The steps for this are as follows:
|
|
171
|
+
|
|
172
|
+
* Run `towncrier build --version vx.y.z` using the version number you want to tag.
|
|
173
|
+
* Agree to have towncrier remove the fragments.
|
|
174
|
+
* Add and commit your changes.
|
|
175
|
+
* Tag the release.
|
|
176
|
+
|
|
177
|
+
**NOTE:** If you forget to add a Changelog entry to a tagged release (either manually or automatically with ``towncrier``)
|
|
178
|
+
then the Bitbucket pipeline will fail. To be able to use the same tag you must delete it locally and on the remote branch:
|
|
179
|
+
|
|
180
|
+
.. code-block:: bash
|
|
181
|
+
|
|
182
|
+
# First, actually update the CHANGELOG and commit the update
|
|
183
|
+
git commit
|
|
184
|
+
|
|
185
|
+
# Delete tags
|
|
186
|
+
git tag -d vWHATEVER.THE.VERSION
|
|
187
|
+
git push --delete origin vWHATEVER.THE.VERSION
|
|
188
|
+
|
|
189
|
+
# Re-tag with the same version
|
|
190
|
+
git tag vWHATEVER.THE.VERSION
|
|
191
|
+
git push --tags origin main
|
|
192
|
+
|
|
193
|
+
.. |codecov| image:: https://codecov.io/bb/dkistdc/dkist-processing-common/graph/badge.svg?token=3QSLGSEF3O
|
|
194
|
+
:target: https://codecov.io/bb/dkistdc/dkist-processing-common
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
Integrate dkist-processing-core 6.0.0 which brings a swap of Elastic APM to OpenTelemetry for metrics and tracing.
|
|
@@ -3,48 +3,65 @@
|
|
|
3
3
|
from dkist_processing_core.config import DKISTProcessingCoreConfiguration
|
|
4
4
|
from dkist_service_configuration.settings import DEFAULT_MESH_SERVICE
|
|
5
5
|
from dkist_service_configuration.settings import MeshService
|
|
6
|
-
from pydantic import BaseModel
|
|
7
6
|
from pydantic import Field
|
|
8
7
|
from talus import ConnectionRetryerFactory
|
|
9
8
|
from talus import ConsumerConnectionParameterFactory
|
|
10
9
|
from talus import ProducerConnectionParameterFactory
|
|
11
10
|
|
|
12
11
|
|
|
13
|
-
class RetryConfig(BaseModel):
|
|
14
|
-
"""Retry metadata model."""
|
|
15
|
-
|
|
16
|
-
retry_delay: int = 1
|
|
17
|
-
retry_backoff: int = 2
|
|
18
|
-
retry_jitter: tuple[int, int] = (1, 10)
|
|
19
|
-
retry_max_delay: int = 300
|
|
20
|
-
retry_tries: int = -1
|
|
21
|
-
|
|
22
|
-
|
|
23
12
|
class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
|
|
24
13
|
"""Common configurations."""
|
|
25
14
|
|
|
26
|
-
retry_config: RetryConfig = Field(default_factory=RetryConfig)
|
|
27
15
|
# metadata-store-api
|
|
28
|
-
gql_auth_token: str | None =
|
|
16
|
+
gql_auth_token: str | None = Field(
|
|
17
|
+
default="dev", description="The auth token for the metadata-store-api."
|
|
18
|
+
)
|
|
29
19
|
# object-store-api
|
|
30
|
-
object_store_access_key: str | None =
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
20
|
+
object_store_access_key: str | None = Field(
|
|
21
|
+
default=None, description="The access key for the object store."
|
|
22
|
+
)
|
|
23
|
+
object_store_secret_key: str | None = Field(
|
|
24
|
+
default=None, description="The secret key for the object store."
|
|
25
|
+
)
|
|
26
|
+
object_store_use_ssl: bool = Field(
|
|
27
|
+
default=False, description="Whether to use SSL for the object store connection."
|
|
28
|
+
)
|
|
29
|
+
# start object-clerk library
|
|
30
|
+
multipart_threshold: int | None = Field(
|
|
31
|
+
default=None, description="Multipart threshold for the object store."
|
|
32
|
+
)
|
|
33
|
+
s3_client_config: dict | None = Field(
|
|
34
|
+
default=None, description="S3 client configuration for the object store."
|
|
35
|
+
)
|
|
36
|
+
s3_upload_config: dict | None = Field(
|
|
37
|
+
default=None, description="S3 upload configuration for the object store."
|
|
38
|
+
)
|
|
39
|
+
s3_download_config: dict | None = Field(
|
|
40
|
+
default=None, description="S3 download configuration for the object store."
|
|
41
|
+
)
|
|
37
42
|
# globus
|
|
38
|
-
globus_transport_params: dict = Field(
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
+
globus_transport_params: dict = Field(
|
|
44
|
+
default_factory=dict, description="Globus transfer parameters."
|
|
45
|
+
)
|
|
46
|
+
globus_client_id: str | None = Field(
|
|
47
|
+
default=None, description="Globus client ID for inbound/outbound transfers."
|
|
48
|
+
)
|
|
49
|
+
globus_client_secret: str | None = Field(
|
|
50
|
+
default=None, description="Globus client secret for inbound/outbound transfers."
|
|
51
|
+
)
|
|
52
|
+
object_store_endpoint: str | None = Field(
|
|
53
|
+
default=None, description="Object store Globus Endpoint ID."
|
|
54
|
+
)
|
|
55
|
+
scratch_endpoint: str | None = Field(default=None, description="Scratch Globus Endpoint ID.")
|
|
43
56
|
# scratch
|
|
44
|
-
scratch_base_path: str = Field(default="scratch/")
|
|
45
|
-
scratch_inventory_db_count: int =
|
|
57
|
+
scratch_base_path: str = Field(default="scratch/", description="Base path for scratch storage.")
|
|
58
|
+
scratch_inventory_db_count: int = Field(
|
|
59
|
+
default=16, description="Number of databases in the scratch inventory (redis)."
|
|
60
|
+
)
|
|
46
61
|
# docs
|
|
47
|
-
docs_base_url: str = Field(
|
|
62
|
+
docs_base_url: str = Field(
|
|
63
|
+
default="my_test_url", description="Base URL for the documentation site."
|
|
64
|
+
)
|
|
48
65
|
|
|
49
66
|
@property
|
|
50
67
|
def metadata_store_api_base(self) -> str:
|
|
@@ -106,4 +123,3 @@ class DKISTProcessingCommonConfiguration(DKISTProcessingCoreConfiguration):
|
|
|
106
123
|
|
|
107
124
|
|
|
108
125
|
common_configurations = DKISTProcessingCommonConfiguration()
|
|
109
|
-
common_configurations.log_configurations()
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Models to support telemetry data."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Progress(BaseModel, validate_assignment=True):
|
|
7
|
+
"""Container for tracking progress for a metering instrument."""
|
|
8
|
+
|
|
9
|
+
current: int = 0
|
|
10
|
+
total: int = 0
|
|
11
|
+
|
|
12
|
+
def increment(self, step: int = 1) -> None:
|
|
13
|
+
"""Increment the current progress by the given step."""
|
|
14
|
+
self.current += step
|
|
15
|
+
|
|
16
|
+
@property
|
|
17
|
+
def percent_complete(self) -> float:
|
|
18
|
+
"""Return the percent complete as a float between 0 and 100."""
|
|
19
|
+
if self.total > 0:
|
|
20
|
+
return (self.current / self.total) * 100
|
|
21
|
+
return 0.0
|
|
@@ -163,7 +163,7 @@ class AssembleMovie(WorkflowTaskBase, ABC):
|
|
|
163
163
|
relative_movie_path = f"{self.constants.dataset_id}_browse_movie.mp4"
|
|
164
164
|
absolute_movie_path = str(self.scratch.absolute_path(relative_movie_path))
|
|
165
165
|
|
|
166
|
-
with self.
|
|
166
|
+
with self.telemetry_span("Assembling movie frames"):
|
|
167
167
|
clip.write_videofile(absolute_movie_path, fps=self.FPS, codec="libx264", audio=False)
|
|
168
168
|
|
|
169
169
|
self.tag(path=absolute_movie_path, tags=[Tag.movie(), Tag.output()])
|
|
@@ -10,8 +10,14 @@ from typing import Any
|
|
|
10
10
|
from typing import Generator
|
|
11
11
|
from typing import Iterable
|
|
12
12
|
from typing import Type
|
|
13
|
+
from typing import TypeAlias
|
|
13
14
|
|
|
14
15
|
from dkist_processing_core import TaskBase
|
|
16
|
+
from opentelemetry.metrics import CallbackOptions
|
|
17
|
+
from opentelemetry.metrics import Counter
|
|
18
|
+
from opentelemetry.metrics import ObservableGauge
|
|
19
|
+
from opentelemetry.metrics import Observation
|
|
20
|
+
from pydantic import BaseModel
|
|
15
21
|
|
|
16
22
|
from dkist_processing_common._util.scratch import WorkflowFileSystem
|
|
17
23
|
from dkist_processing_common._util.tags import TagDB
|
|
@@ -27,7 +33,7 @@ __all__ = ["WorkflowTaskBase", "tag_type_hint"]
|
|
|
27
33
|
|
|
28
34
|
logger = logging.getLogger(__name__)
|
|
29
35
|
|
|
30
|
-
tag_type_hint = Iterable[str] | str
|
|
36
|
+
tag_type_hint: TypeAlias = Iterable[str] | str
|
|
31
37
|
|
|
32
38
|
|
|
33
39
|
class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
@@ -66,7 +72,6 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
66
72
|
workflow_name=workflow_name,
|
|
67
73
|
workflow_version=workflow_version,
|
|
68
74
|
)
|
|
69
|
-
self.task_name = self.__class__.__name__
|
|
70
75
|
self.scratch = WorkflowFileSystem(recipe_run_id=recipe_run_id, task_name=self.task_name)
|
|
71
76
|
self.constants = self.constants_model_class(
|
|
72
77
|
recipe_run_id=recipe_run_id, task_name=self.task_name
|
|
@@ -76,50 +81,16 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
76
81
|
recipe_run_id=recipe_run_id, task_name=self.task_name, namespace="counter"
|
|
77
82
|
)
|
|
78
83
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
arg_span_type: str = None,
|
|
85
|
-
arg_labels: dict[str, str] = None,
|
|
86
|
-
**kwargs,
|
|
87
|
-
):
|
|
88
|
-
"""Groom inputs to apm_step to handle various kwarg collisions."""
|
|
89
|
-
if "span_type" in kwargs:
|
|
90
|
-
raise RuntimeError(
|
|
91
|
-
f"Cannot specify 'span_type' {kwargs['span_type']} in step that forces is it to be {arg_span_type}"
|
|
92
|
-
)
|
|
93
|
-
|
|
94
|
-
if "labels" in kwargs:
|
|
95
|
-
arg_labels.update(kwargs["labels"])
|
|
96
|
-
del kwargs["labels"]
|
|
97
|
-
logger.info(
|
|
98
|
-
f"Recording APM span: {name = }, {arg_span_type = }, {arg_labels = }, "
|
|
99
|
-
f"recipe_run_id = {self.recipe_run_id}"
|
|
100
|
-
)
|
|
101
|
-
return self.apm_step(name, *args, span_type=arg_span_type, labels=arg_labels, **kwargs)
|
|
102
|
-
|
|
103
|
-
def apm_task_step(self, name: str, *args, **kwargs):
|
|
104
|
-
"""Span for management/organizational/info type stuff."""
|
|
105
|
-
return self.apm_type_base(
|
|
106
|
-
name, *args, arg_span_type="code.task", arg_labels={"type": "task"}, **kwargs
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
def apm_processing_step(self, name: str, *args, **kwargs):
|
|
110
|
-
"""Span for computations."""
|
|
111
|
-
return self.apm_type_base(
|
|
112
|
-
name,
|
|
113
|
-
*args,
|
|
114
|
-
arg_span_type="code.processing",
|
|
115
|
-
arg_labels={"type": "processing"},
|
|
116
|
-
**kwargs,
|
|
84
|
+
# meter instruments
|
|
85
|
+
self.read_counter: Counter = self.meter.create_counter(
|
|
86
|
+
name=self.format_metric_name("tasks.reads"),
|
|
87
|
+
unit="1",
|
|
88
|
+
description="The number of reads executed in the processing stack.",
|
|
117
89
|
)
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
name, *args, arg_span_type="code.writing", arg_labels={"type": "writing"}, **kwargs
|
|
90
|
+
self.write_counter: Counter = self.meter.create_counter(
|
|
91
|
+
name=self.format_metric_name("tasks.writes"),
|
|
92
|
+
unit="1",
|
|
93
|
+
description="The number of writes executed in the processing stack.",
|
|
123
94
|
)
|
|
124
95
|
|
|
125
96
|
@property
|
|
@@ -161,7 +132,7 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
161
132
|
"""Execute any pre-task setup required."""
|
|
162
133
|
super().pre_run()
|
|
163
134
|
if self.record_provenance or self.is_task_manual:
|
|
164
|
-
with self.
|
|
135
|
+
with self.telemetry_span("Record Provenance"):
|
|
165
136
|
self._record_provenance()
|
|
166
137
|
|
|
167
138
|
def read(
|
|
@@ -183,7 +154,9 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
183
154
|
**decoder_kwargs
|
|
184
155
|
Additional arguments to pass to the `decoder` function.
|
|
185
156
|
"""
|
|
186
|
-
|
|
157
|
+
for p in self.scratch.find_all(tags=tags):
|
|
158
|
+
self.read_counter.add(amount=1, attributes=self.base_telemetry_attributes)
|
|
159
|
+
yield decoder(p, **decoder_kwargs)
|
|
187
160
|
|
|
188
161
|
def write(
|
|
189
162
|
self,
|
|
@@ -221,6 +194,7 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
221
194
|
-------
|
|
222
195
|
The path for the written file
|
|
223
196
|
"""
|
|
197
|
+
self.write_counter.add(amount=1, attributes=self.base_telemetry_attributes)
|
|
224
198
|
file_obj = encoder(data, **encoder_kwargs)
|
|
225
199
|
if isinstance(tags, str):
|
|
226
200
|
tags = [tags]
|
|
@@ -372,11 +346,11 @@ class WorkflowTaskBase(TaskBase, MetadataStoreMixin, ABC):
|
|
|
372
346
|
Filename Counter: not rolled back but its purpose of preventing file name collisions is not impacted
|
|
373
347
|
"""
|
|
374
348
|
super().rollback()
|
|
375
|
-
with self.
|
|
349
|
+
with self.telemetry_span("Rollback Scratch"):
|
|
376
350
|
self.scratch.rollback()
|
|
377
|
-
with self.
|
|
351
|
+
with self.telemetry_span("Rollback Constants"):
|
|
378
352
|
self.constants._rollback()
|
|
379
|
-
with self.
|
|
353
|
+
with self.telemetry_span("Change Recipe Run to Inprogress"):
|
|
380
354
|
self.metadata_store_change_recipe_run_to_inprogress()
|
|
381
355
|
|
|
382
356
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
@@ -54,11 +54,11 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
|
|
|
54
54
|
|
|
55
55
|
def transfer_objects(self):
|
|
56
56
|
"""Transfer movie and L1 output frames."""
|
|
57
|
-
with self.
|
|
57
|
+
with self.telemetry_span("Upload movie"):
|
|
58
58
|
# Movie needs to be transferred separately as the movie headers need to go with it
|
|
59
59
|
self.transfer_movie()
|
|
60
60
|
|
|
61
|
-
with self.
|
|
61
|
+
with self.telemetry_span("Upload science frames"):
|
|
62
62
|
self.transfer_output_frames()
|
|
63
63
|
|
|
64
64
|
def transfer_output_frames(self):
|
|
@@ -120,10 +120,10 @@ class AssembleQualityData(L1OutputDataBase, QualityMixin):
|
|
|
120
120
|
|
|
121
121
|
def run(self):
|
|
122
122
|
"""Run method for the task."""
|
|
123
|
-
with self.
|
|
123
|
+
with self.telemetry_span("Assembling quality data"):
|
|
124
124
|
quality_data = self.quality_assemble_data(polcal_label_list=self.polcal_label_list)
|
|
125
125
|
|
|
126
|
-
with self.
|
|
126
|
+
with self.telemetry_span(
|
|
127
127
|
f"Saving quality data with {len(quality_data)} metrics to the file system"
|
|
128
128
|
):
|
|
129
129
|
self.write(
|
|
@@ -146,7 +146,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
146
146
|
|
|
147
147
|
def run(self) -> None:
|
|
148
148
|
"""Run method for this task."""
|
|
149
|
-
with self.
|
|
149
|
+
with self.telemetry_span(f"Storing quality data to metadata store"):
|
|
150
150
|
# each quality_data file is a list - this will combine the elements of multiple lists into a single list
|
|
151
151
|
quality_data = list(
|
|
152
152
|
chain.from_iterable(
|
|
@@ -156,7 +156,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
156
156
|
self.metadata_store_add_quality_data(
|
|
157
157
|
dataset_id=self.constants.dataset_id, quality_data=quality_data
|
|
158
158
|
)
|
|
159
|
-
with self.
|
|
159
|
+
with self.telemetry_span("Count Expected Outputs"):
|
|
160
160
|
dataset_id = self.constants.dataset_id
|
|
161
161
|
expected_object_count = self.count(tags=Tag.output())
|
|
162
162
|
if quality_data:
|
|
@@ -165,7 +165,7 @@ class SubmitDatasetMetadata(L1OutputDataBase):
|
|
|
165
165
|
f"Adding Dataset Receipt Account: "
|
|
166
166
|
f"{dataset_id=}, {expected_object_count=}, recipe_run_id={self.recipe_run_id}"
|
|
167
167
|
)
|
|
168
|
-
with self.
|
|
168
|
+
with self.telemetry_span(
|
|
169
169
|
f"Add Dataset Receipt Account: {dataset_id = }, {expected_object_count = }"
|
|
170
170
|
):
|
|
171
171
|
self.metadata_store_add_dataset_receipt_account(
|
|
@@ -245,10 +245,10 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
|
|
|
245
245
|
|
|
246
246
|
def run(self) -> None:
|
|
247
247
|
"""Run method for this task."""
|
|
248
|
-
with self.
|
|
248
|
+
with self.telemetry_span("Gather output data"):
|
|
249
249
|
frames = self.read(tags=self.output_frame_tags)
|
|
250
250
|
movies = self.read(tags=[Tag.output(), Tag.movie()])
|
|
251
|
-
with self.
|
|
251
|
+
with self.telemetry_span("Create message objects"):
|
|
252
252
|
messages = []
|
|
253
253
|
messages += self.frame_messages(paths=frames)
|
|
254
254
|
frame_message_count = len(messages)
|
|
@@ -257,7 +257,7 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
|
|
|
257
257
|
dataset_has_quality_data = self.dataset_has_quality_data
|
|
258
258
|
if dataset_has_quality_data:
|
|
259
259
|
messages.append(self.quality_report_message)
|
|
260
|
-
with self.
|
|
260
|
+
with self.telemetry_span(
|
|
261
261
|
f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
|
|
262
262
|
):
|
|
263
263
|
self.interservice_bus_publish(messages=messages)
|
|
@@ -58,10 +58,10 @@ class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
|
|
|
58
58
|
|
|
59
59
|
def run(self) -> None:
|
|
60
60
|
"""Transfer the data and cleanup any folders."""
|
|
61
|
-
with self.
|
|
61
|
+
with self.telemetry_span("Transfer objects"):
|
|
62
62
|
self.transfer_objects()
|
|
63
63
|
|
|
64
|
-
with self.
|
|
64
|
+
with self.telemetry_span("Remove folder objects"):
|
|
65
65
|
self.remove_folder_objects()
|
|
66
66
|
|
|
67
67
|
@abstractmethod
|