dkist-processing-common 12.1.0rc1__tar.gz → 12.2.0rc2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/CHANGELOG.rst +26 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/PKG-INFO +2 -2
- dkist_processing_common-12.2.0rc2/changelog/272.feature.rst +1 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/constants.py +6 -0
- dkist_processing_common-12.2.0rc2/dkist_processing_common/models/extras.py +35 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/tags.py +13 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/id_bud.py +7 -4
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/l1_output_data.py +16 -11
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/output_data_base.py +25 -4
- dkist_processing_common-12.2.0rc2/dkist_processing_common/tasks/write_extra.py +333 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/write_l1.py +2 -55
- dkist_processing_common-12.2.0rc2/dkist_processing_common/tasks/write_l1_base.py +67 -0
- dkist_processing_common-12.2.0rc2/dkist_processing_common/tests/test_construct_dataset_extras.py +219 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_output_data_base.py +24 -2
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_transfer_l1_output_data.py +1 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_write_l1.py +0 -1
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common.egg-info/PKG-INFO +2 -2
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common.egg-info/SOURCES.txt +5 -8
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common.egg-info/requires.txt +1 -1
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/pyproject.toml +1 -1
- dkist_processing_common-12.1.0rc1/changelog/280.misc.rst +0 -1
- dkist_processing_common-12.1.0rc1/changelog/282.feature.2.rst +0 -2
- dkist_processing_common-12.1.0rc1/changelog/282.feature.rst +0 -2
- dkist_processing_common-12.1.0rc1/changelog/284.feature.rst +0 -1
- dkist_processing_common-12.1.0rc1/changelog/285.feature.rst +0 -2
- dkist_processing_common-12.1.0rc1/changelog/285.misc.rst +0 -2
- dkist_processing_common-12.1.0rc1/changelog/286.feature.rst +0 -2
- dkist_processing_common-12.1.0rc1/changelog/287.misc.rst +0 -1
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/.gitignore +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/.pre-commit-config.yaml +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/.readthedocs.yml +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/.snyk +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/README.rst +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/bitbucket-pipelines.yml +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/changelog/.gitempty +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/_util/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/_util/constants.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/_util/graphql.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/_util/scratch.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/_util/tags.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/array.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/asdf.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/basemodel.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/bytes.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/fits.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/iobase.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/json.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/path.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/quality.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/codecs/str.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/config.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/fonts/Lato-Regular.ttf +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/fonts/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/manual.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/dkist_location.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/fits_access.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/flower_pot.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/fried_parameter.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/graphql.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/input_dataset.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/message.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/message_queue_binding.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/metric_code.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/parameters.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/quality.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/task_name.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/telemetry.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/models/wavelength.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/average_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/cs_step.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/dsps_repeat.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/experiment_id_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/l0_fits_access.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/l1_fits_access.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/lookup_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/near_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/observing_program_id_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/proposal_id_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/quality.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/retarder.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/single_value_single_key_flower.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/task.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/time.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/unique_bud.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/parsers/wavelength.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/assemble_movie.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/base.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/globus.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/interservice_bus.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/metadata_store.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/object_store.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/quality/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/quality/_base.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/mixin/quality/_metrics.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/parse_l0_input_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/quality_metrics.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/teardown.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/transfer_input_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/trial_catalog.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tasks/trial_output_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/__init__.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/conftest.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/mock_metadata_store.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_assemble_movie.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_assemble_quality.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_base.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_codecs.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_constants.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_cs_step.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_dkist_location.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_fits_access.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_flower_pot.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_fried_parameter.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_input_dataset.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_interservice_bus.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_interservice_bus_mixin.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_manual_processing.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_parameters.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_parse_l0_input_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_publish_catalog_messages.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_quality.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_quality_mixin.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_scratch.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_stems.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_submit_dataset_metadata.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_tags.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_task_name.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_task_parsing.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_teardown.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_transfer_input_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_trial_catalog.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_trial_output_data.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common/tests/test_workflow_task_base.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common.egg-info/dependency_links.txt +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/dkist_processing_common.egg-info/top_level.txt +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/Makefile +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/changelog.rst +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/conf.py +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/index.rst +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/landing_page.rst +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/make.bat +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/docs/requirements.txt +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/licenses/LICENSE.rst +0 -0
- {dkist_processing_common-12.1.0rc1 → dkist_processing_common-12.2.0rc2}/setup.cfg +0 -0
|
@@ -1,3 +1,29 @@
|
|
|
1
|
+
v12.1.0 (2026-01-26)
|
|
2
|
+
====================
|
|
3
|
+
|
|
4
|
+
Features
|
|
5
|
+
--------
|
|
6
|
+
|
|
7
|
+
- Add `ListStem` base class for huge speedup in cases where the keys don't matter and the `getter` logic only depends on the
|
|
8
|
+
list of values computed by `setter`. This is the case for most (all?) "Buds". (`#282 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/282>`__)
|
|
9
|
+
- Add `SetStem` base class that has all the benefits of `ListStem` but also gets a speedup by storing values in a `set` for
|
|
10
|
+
cases where repeated values don't need to be tracked. (`#282 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/282>`__)
|
|
11
|
+
- Speed up parsing of the `*CadenceBud`, `TaskDateBeginBud`, and `[Task]NearFloatBud` by basing these buds on `ListStem`. (`#284 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/284>`__)
|
|
12
|
+
- Speed up `NumCSStepBud`, `[Task]UniqueBud`, `[Task]ContributingIdsBud`, and `TaskRoundTimeBudBase` parsing by basing
|
|
13
|
+
these buds on `SetStem`. (`#285 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/285>`__)
|
|
14
|
+
- Speed up `CSStepFlower` parsing by using an internal set to keep track of the unique `CSStep` objects. This removes the
|
|
15
|
+
need to compute the unique set when computing the tag for each file. (`#286 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/286>`__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
Misc
|
|
19
|
+
----
|
|
20
|
+
|
|
21
|
+
- Speed up the reading of INPUT files in Parse tasks by turning off image decompression and checksum checks. (`#280 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/280>`__)
|
|
22
|
+
- Update `RetarderNameBud` to drop "clear" values (i.e., the retarder is out of the beam) in the `setter` instead of the `getter`.
|
|
23
|
+
This brings it in line with standard Bud-practice. (`#285 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/285>`__)
|
|
24
|
+
- Convert the TimeLookupBud to be a SetStem constant. (`#287 <https://bitbucket.org/dkistdc/dkist-processing-common/pull-requests/287>`__)
|
|
25
|
+
|
|
26
|
+
|
|
1
27
|
v12.0.0 (2026-01-22)
|
|
2
28
|
====================
|
|
3
29
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dkist-processing-common
|
|
3
|
-
Version: 12.
|
|
3
|
+
Version: 12.2.0rc2
|
|
4
4
|
Summary: Common task classes used by the DKIST science data processing pipelines
|
|
5
5
|
Author-email: NSO / AURA <dkistdc@nso.edu>
|
|
6
6
|
License: BSD-3-Clause
|
|
@@ -16,7 +16,7 @@ Description-Content-Type: text/x-rst
|
|
|
16
16
|
Requires-Dist: asdf<4.0.0,>=3.5.0
|
|
17
17
|
Requires-Dist: astropy>=7.0.0
|
|
18
18
|
Requires-Dist: dkist-fits-specifications<5.0,>=4.0.0
|
|
19
|
-
Requires-Dist: dkist-header-validator<6.0,>=5.
|
|
19
|
+
Requires-Dist: dkist-header-validator<6.0,>=5.3.0
|
|
20
20
|
Requires-Dist: dkist-processing-core==7.0.1
|
|
21
21
|
Requires-Dist: dkist-processing-pac<4.0,>=3.1
|
|
22
22
|
Requires-Dist: dkist-service-configuration<5.0,>=4.2.0
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
Add the framework for using Dataset Extras, that is, other data that can be included with the L1 FITS files generated by the regular pipeline. A new abstract class, WriteL1DatasetExtras, provides helper functionality for use in the instrument pipelines.
|
|
@@ -285,18 +285,24 @@ class ConstantsBase:
|
|
|
285
285
|
def dark_observing_program_execution_ids(self) -> list[str]:
|
|
286
286
|
"""Return the observing program execution ids constant for the dark task."""
|
|
287
287
|
observing_programs = self._db_dict[BudName.dark_observing_program_execution_ids]
|
|
288
|
+
if isinstance(observing_programs, str):
|
|
289
|
+
observing_programs = [observing_programs]
|
|
288
290
|
return list(observing_programs)
|
|
289
291
|
|
|
290
292
|
@property
|
|
291
293
|
def solar_gain_observing_program_execution_ids(self) -> list[str]:
|
|
292
294
|
"""Return the observing program execution ids constant for the solar_gain task."""
|
|
293
295
|
observing_programs = self._db_dict[BudName.solar_gain_observing_program_execution_ids]
|
|
296
|
+
if isinstance(observing_programs, str):
|
|
297
|
+
observing_programs = [observing_programs]
|
|
294
298
|
return list(observing_programs)
|
|
295
299
|
|
|
296
300
|
@property
|
|
297
301
|
def polcal_observing_program_execution_ids(self) -> list[str]:
|
|
298
302
|
"""Return the observing program execution ids constant."""
|
|
299
303
|
observing_programs = self._db_dict[BudName.polcal_observing_program_execution_ids]
|
|
304
|
+
if isinstance(observing_programs, str):
|
|
305
|
+
observing_programs = [observing_programs]
|
|
300
306
|
return list(observing_programs)
|
|
301
307
|
|
|
302
308
|
@property
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""Autocomplete access to dataset extra header sections."""
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DatasetExtraHeaderSection(StrEnum):
|
|
7
|
+
"""Enum defining the possible header sections for dataset extras."""
|
|
8
|
+
|
|
9
|
+
common = "common"
|
|
10
|
+
aggregate = "aggregate"
|
|
11
|
+
iptask = "iptask"
|
|
12
|
+
gos = "gos"
|
|
13
|
+
wavecal = "wavecal"
|
|
14
|
+
atlas = "atlas"
|
|
15
|
+
test = "test"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class DatasetExtraType(StrEnum):
|
|
19
|
+
"""Enum defining options for dataset extra names."""
|
|
20
|
+
|
|
21
|
+
dark = "DARK"
|
|
22
|
+
background_light = "BACKGROUND LIGHT"
|
|
23
|
+
solar_gain = "SOLAR GAIN"
|
|
24
|
+
characteristic_spectra = "CHARACTERISTIC SPECTRA"
|
|
25
|
+
modulation_state_offsets = "MODULATION STATE OFFSETS"
|
|
26
|
+
beam_angles = "BEAM ANGLES"
|
|
27
|
+
spectral_curvature_shifts = "SPECTRAL CURVATURE SHIFTS"
|
|
28
|
+
wavelength_calibration_input_spectrum = "WAVELENGTH CALIBRATION INPUT SPECTRUM"
|
|
29
|
+
wavelength_calibration_reference_spectrum = "WAVELENGTH CALIBRATION REFERENCE SPECTRUM"
|
|
30
|
+
reference_wavelength_vector = "REFERENCE WAVELENGTH VECTOR"
|
|
31
|
+
demodulation_matrices = "DEMODULATION MATRICES"
|
|
32
|
+
polcal_as_science = "POLCAL AS SCIENCE"
|
|
33
|
+
bad_pixel_map = "BAD PIXEL MAP"
|
|
34
|
+
beam_offsets = "BEAM OFFSETS"
|
|
35
|
+
spectral_curvature_scales = "SPECTRAL CURVATURE SCALES"
|
|
@@ -38,6 +38,8 @@ class StemName(StrEnum):
|
|
|
38
38
|
dataset_inventory = "DATASET_INVENTORY"
|
|
39
39
|
asdf = "ASDF"
|
|
40
40
|
quality_report = "QUALITY_REPORT"
|
|
41
|
+
# Dataset extras
|
|
42
|
+
extra = "EXTRA"
|
|
41
43
|
|
|
42
44
|
|
|
43
45
|
class Tag:
|
|
@@ -450,3 +452,14 @@ class Tag:
|
|
|
450
452
|
An asdf tag
|
|
451
453
|
"""
|
|
452
454
|
return cls.format_tag(StemName.asdf)
|
|
455
|
+
|
|
456
|
+
@classmethod
|
|
457
|
+
def extra(cls) -> str:
|
|
458
|
+
"""
|
|
459
|
+
Return a dataset extra tag.
|
|
460
|
+
|
|
461
|
+
Returns
|
|
462
|
+
-------
|
|
463
|
+
A dataset extra tag
|
|
464
|
+
"""
|
|
465
|
+
return cls.format_tag(StemName.extra)
|
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
"""Base classes for ID bud parsing."""
|
|
2
2
|
|
|
3
|
+
from collections import Counter
|
|
3
4
|
from enum import StrEnum
|
|
4
5
|
from typing import Callable
|
|
5
6
|
from typing import Type
|
|
6
7
|
|
|
7
|
-
from dkist_processing_common.models.flower_pot import
|
|
8
|
+
from dkist_processing_common.models.flower_pot import ListStem
|
|
8
9
|
from dkist_processing_common.models.flower_pot import SpilledDirt
|
|
9
10
|
from dkist_processing_common.parsers.l0_fits_access import L0FitsAccess
|
|
10
11
|
from dkist_processing_common.parsers.task import passthrough_header_ip_task
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
class ContributingIdsBud(
|
|
14
|
+
class ContributingIdsBud(ListStem):
|
|
14
15
|
"""Base class for contributing ID buds."""
|
|
15
16
|
|
|
16
17
|
def __init__(self, constant_name: str, metadata_key: str | StrEnum):
|
|
@@ -35,13 +36,15 @@ class ContributingIdsBud(SetStem):
|
|
|
35
36
|
|
|
36
37
|
def getter(self) -> tuple[str, ...]:
|
|
37
38
|
"""
|
|
38
|
-
Get all ids seen for any type of frame.
|
|
39
|
+
Get all ids seen for any type of frame, sorted by the number of appearances of that ID.
|
|
39
40
|
|
|
40
41
|
Returns
|
|
41
42
|
-------
|
|
42
43
|
IDs from all types of frames
|
|
43
44
|
"""
|
|
44
|
-
|
|
45
|
+
counts = Counter(self.value_list) # Count the number of appearances of each ID
|
|
46
|
+
sorted_ids = tuple(str(item) for item, count in counts.most_common())
|
|
47
|
+
return sorted_ids
|
|
45
48
|
|
|
46
49
|
|
|
47
50
|
class TaskContributingIdsBud(ContributingIdsBud):
|
|
@@ -2,18 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
from abc import ABC
|
|
5
|
-
from itertools import chain
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from typing import Iterable
|
|
8
7
|
|
|
9
|
-
from dkist_processing_common.codecs.quality import quality_data_decoder
|
|
10
8
|
from dkist_processing_common.codecs.quality import quality_data_encoder
|
|
11
9
|
from dkist_processing_common.models.message import CatalogFrameMessage
|
|
12
10
|
from dkist_processing_common.models.message import CatalogFrameMessageBody
|
|
13
11
|
from dkist_processing_common.models.message import CatalogObjectMessage
|
|
14
12
|
from dkist_processing_common.models.message import CatalogObjectMessageBody
|
|
15
|
-
from dkist_processing_common.models.message import CreateQualityReportMessage
|
|
16
|
-
from dkist_processing_common.models.message import CreateQualityReportMessageBody
|
|
17
13
|
from dkist_processing_common.models.tags import Tag
|
|
18
14
|
from dkist_processing_common.tasks.mixin.globus import GlobusMixin
|
|
19
15
|
from dkist_processing_common.tasks.mixin.interservice_bus import InterserviceBusMixin
|
|
@@ -62,15 +58,19 @@ class TransferL1Data(TransferDataBase, GlobusMixin):
|
|
|
62
58
|
with self.telemetry_span("Upload quality data"):
|
|
63
59
|
self.transfer_quality_data()
|
|
64
60
|
|
|
65
|
-
with self.telemetry_span("Upload
|
|
61
|
+
with self.telemetry_span("Upload output frames"):
|
|
66
62
|
self.transfer_output_frames()
|
|
67
63
|
|
|
68
64
|
def transfer_output_frames(self):
|
|
69
|
-
"""Create a Globus transfer for all output data."""
|
|
70
|
-
|
|
65
|
+
"""Create a Globus transfer for all output data, as well as any available dataset extras."""
|
|
66
|
+
output_transfer_items = self.build_output_frame_transfer_list()
|
|
67
|
+
dataset_extra_transfer_items = self.build_dataset_extra_transfer_list()
|
|
68
|
+
transfer_items = output_transfer_items + dataset_extra_transfer_items
|
|
71
69
|
|
|
72
70
|
logger.info(
|
|
73
71
|
f"Preparing globus transfer {len(transfer_items)} items: "
|
|
72
|
+
f"{len(output_transfer_items)} output frames. "
|
|
73
|
+
f"{len(dataset_extra_transfer_items)} dataset extras. "
|
|
74
74
|
f"recipe_run_id={self.recipe_run_id}. "
|
|
75
75
|
f"transfer_items={transfer_items[:3]}..."
|
|
76
76
|
)
|
|
@@ -246,19 +246,24 @@ class PublishCatalogAndQualityMessages(L1OutputDataBase, InterserviceBusMixin):
|
|
|
246
246
|
def run(self) -> None:
|
|
247
247
|
"""Run method for this task."""
|
|
248
248
|
with self.telemetry_span("Gather output data"):
|
|
249
|
-
frames = self.read(
|
|
250
|
-
|
|
249
|
+
frames = self.read(
|
|
250
|
+
tags=self.output_frame_tags
|
|
251
|
+
) # frames is kept as a generator as it is much longer than the other file categories
|
|
252
|
+
extras = list(self.read(tags=self.extra_frame_tags))
|
|
253
|
+
movies = list(self.read(tags=[Tag.output(), Tag.movie()]))
|
|
251
254
|
quality_data = self.read(tags=[Tag.output(), Tag.quality_data()])
|
|
252
255
|
with self.telemetry_span("Create message objects"):
|
|
253
256
|
messages = []
|
|
254
257
|
messages += self.frame_messages(paths=frames)
|
|
255
258
|
frame_message_count = len(messages)
|
|
259
|
+
messages += self.frame_messages(paths=extras)
|
|
260
|
+
extra_message_count = len(extras)
|
|
256
261
|
messages += self.object_messages(paths=movies, object_type="MOVIE")
|
|
257
|
-
object_message_count = len(
|
|
262
|
+
object_message_count = len(movies)
|
|
258
263
|
dataset_has_quality_data = self.dataset_has_quality_data
|
|
259
264
|
if dataset_has_quality_data:
|
|
260
265
|
messages += self.object_messages(paths=quality_data, object_type="QDATA")
|
|
261
266
|
with self.telemetry_span(
|
|
262
|
-
f"Publish messages: {frame_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
|
|
267
|
+
f"Publish messages: {frame_message_count = }, {extra_message_count = }, {object_message_count = }, {dataset_has_quality_data = }"
|
|
263
268
|
):
|
|
264
269
|
self.interservice_bus_publish(messages=messages)
|
|
@@ -22,19 +22,23 @@ class OutputDataBase(WorkflowTaskBase, ABC):
|
|
|
22
22
|
"""Get the destination bucket."""
|
|
23
23
|
return self.metadata_store_recipe_run.configuration.destination_bucket
|
|
24
24
|
|
|
25
|
-
def format_object_key(self, path: Path) -> str:
|
|
25
|
+
def format_object_key(self, path: Path, folder_modifier: str | None = None) -> str:
|
|
26
26
|
"""
|
|
27
27
|
Convert output paths into object store keys.
|
|
28
28
|
|
|
29
29
|
Parameters
|
|
30
30
|
----------
|
|
31
31
|
path: the Path to convert
|
|
32
|
+
folder_modifier: optional folder name to insert into the path
|
|
32
33
|
|
|
33
34
|
Returns
|
|
34
35
|
-------
|
|
35
36
|
formatted path in the object store
|
|
36
37
|
"""
|
|
37
|
-
|
|
38
|
+
if folder_modifier:
|
|
39
|
+
object_key = self.destination_folder / Path(folder_modifier) / Path(path.name)
|
|
40
|
+
else:
|
|
41
|
+
object_key = self.destination_folder / Path(path.name)
|
|
38
42
|
return str(object_key)
|
|
39
43
|
|
|
40
44
|
@property
|
|
@@ -52,6 +56,11 @@ class OutputDataBase(WorkflowTaskBase, ABC):
|
|
|
52
56
|
"""Tags that uniquely identify L1 fits frames i.e. the dataset-inventory-able frames."""
|
|
53
57
|
return [Tag.output(), Tag.frame()]
|
|
54
58
|
|
|
59
|
+
@property
|
|
60
|
+
def extra_frame_tags(self) -> list[str]:
|
|
61
|
+
"""Tags that uniquely identify dataset extra fits frames."""
|
|
62
|
+
return [Tag.output(), Tag.extra()]
|
|
63
|
+
|
|
55
64
|
|
|
56
65
|
class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
|
|
57
66
|
"""Base class for transferring data from scratch to somewhere else."""
|
|
@@ -73,9 +82,21 @@ class TransferDataBase(OutputDataBase, ObjectStoreMixin, ABC):
|
|
|
73
82
|
"""Build a list of GlobusTransfer items corresponding to all OUTPUT (i.e., L1) frames."""
|
|
74
83
|
science_frame_paths: list[Path] = list(self.read(tags=self.output_frame_tags))
|
|
75
84
|
|
|
85
|
+
return self.build_transfer_list(science_frame_paths)
|
|
86
|
+
|
|
87
|
+
def build_dataset_extra_transfer_list(self) -> list[GlobusTransferItem]:
|
|
88
|
+
"""Build a list of GlobusTransfer items corresponding to all extra dataset files."""
|
|
89
|
+
extra_paths: list[Path] = list(self.read(tags=self.extra_frame_tags))
|
|
90
|
+
|
|
91
|
+
return self.build_transfer_list(paths=extra_paths, destination_folder_modifier="extra")
|
|
92
|
+
|
|
93
|
+
def build_transfer_list(
|
|
94
|
+
self, paths: list[Path], destination_folder_modifier: str | None = None
|
|
95
|
+
) -> list[GlobusTransferItem]:
|
|
96
|
+
"""Given a list of paths, build a list of GlobusTransfer items."""
|
|
76
97
|
transfer_items = []
|
|
77
|
-
for p in
|
|
78
|
-
object_key = self.format_object_key(p)
|
|
98
|
+
for p in paths:
|
|
99
|
+
object_key = self.format_object_key(path=p, folder_modifier=destination_folder_modifier)
|
|
79
100
|
destination_path = Path(self.destination_bucket, object_key)
|
|
80
101
|
item = GlobusTransferItem(
|
|
81
102
|
source_path=p,
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""Classes to support the generation of dataset extras."""
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
from abc import ABC
|
|
5
|
+
from abc import abstractmethod
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
from astropy.io import fits
|
|
10
|
+
from astropy.time import Time
|
|
11
|
+
from dkist_fits_specifications.utils.formatter import reformat_dataset_extra_header
|
|
12
|
+
from dkist_header_validator.spec_validators import spec_extras_validator
|
|
13
|
+
|
|
14
|
+
from dkist_processing_common.codecs.fits import fits_hdulist_encoder
|
|
15
|
+
from dkist_processing_common.models.extras import DatasetExtraHeaderSection
|
|
16
|
+
from dkist_processing_common.models.extras import DatasetExtraType
|
|
17
|
+
from dkist_processing_common.models.tags import Tag
|
|
18
|
+
from dkist_processing_common.models.task_name import TaskName
|
|
19
|
+
from dkist_processing_common.tasks.write_l1_base import WriteL1Base
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class WriteL1DatasetExtras(WriteL1Base, ABC):
|
|
23
|
+
"""Class supporting the construction of dataset extras."""
|
|
24
|
+
|
|
25
|
+
def dataset_extra_headers(
|
|
26
|
+
self,
|
|
27
|
+
filename: str,
|
|
28
|
+
task_type: TaskName,
|
|
29
|
+
extra_name: DatasetExtraType,
|
|
30
|
+
total_exposure: float | None = None,
|
|
31
|
+
readout_exposure: float | None = None,
|
|
32
|
+
) -> dict:
|
|
33
|
+
"""Provide common FITS header keys for dataset extras."""
|
|
34
|
+
# Build task specific header values
|
|
35
|
+
match task_type:
|
|
36
|
+
case TaskName.dark:
|
|
37
|
+
task_specific_observing_program_execution_id = (
|
|
38
|
+
self.constants.dark_observing_program_execution_ids
|
|
39
|
+
)
|
|
40
|
+
task_specific_date_begin = self.constants.dark_date_begin
|
|
41
|
+
task_specific_date_end = self.constants.dark_date_end
|
|
42
|
+
task_specific_raw_frames_per_fpa = (
|
|
43
|
+
0 # can be updated in construction of dataset extra if required
|
|
44
|
+
)
|
|
45
|
+
task_specific_telescope_tracking_mode = (
|
|
46
|
+
"None" # can be updated in construction of dataset extra if required
|
|
47
|
+
)
|
|
48
|
+
task_specific_coude_table_tracking_mode = (
|
|
49
|
+
"None" # can be updated in construction of dataset extra if required
|
|
50
|
+
)
|
|
51
|
+
task_specific_telescope_scanning_mode = (
|
|
52
|
+
"None" # can be updated in construction of dataset extra if required
|
|
53
|
+
)
|
|
54
|
+
task_specific_average_light_level = self.constants.dark_average_light_level
|
|
55
|
+
task_specific_average_telescope_elevation = (
|
|
56
|
+
self.constants.dark_average_telescope_elevation
|
|
57
|
+
)
|
|
58
|
+
task_specific_average_coude_table_angle = (
|
|
59
|
+
self.constants.dark_average_coude_table_angle
|
|
60
|
+
)
|
|
61
|
+
task_specific_average_telescope_azimuth = (
|
|
62
|
+
self.constants.dark_average_telescope_azimuth
|
|
63
|
+
)
|
|
64
|
+
task_specific_gos_level3_status = self.constants.dark_gos_level3_status
|
|
65
|
+
task_specific_gos_level3_lamp_status = self.constants.dark_gos_level3_lamp_status
|
|
66
|
+
task_specific_gos_polarizer_status = self.constants.dark_gos_polarizer_status
|
|
67
|
+
task_specific_gos_polarizer_angle = self.constants.dark_gos_polarizer_angle
|
|
68
|
+
task_specific_gos_retarder_status = self.constants.dark_gos_retarder_status
|
|
69
|
+
task_specific_gos_retarder_angle = self.constants.dark_gos_retarder_angle
|
|
70
|
+
task_specific_gos_level0_status = self.constants.dark_gos_level0_status
|
|
71
|
+
case TaskName.solar_gain:
|
|
72
|
+
task_specific_observing_program_execution_id = (
|
|
73
|
+
self.constants.solar_gain_observing_program_execution_ids
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
task_specific_date_begin = self.constants.solar_gain_date_begin
|
|
77
|
+
task_specific_date_end = self.constants.solar_gain_date_end
|
|
78
|
+
task_specific_raw_frames_per_fpa = self.constants.solar_gain_num_raw_frames_per_fpa
|
|
79
|
+
task_specific_telescope_tracking_mode = (
|
|
80
|
+
self.constants.solar_gain_telescope_tracking_mode
|
|
81
|
+
)
|
|
82
|
+
task_specific_coude_table_tracking_mode = (
|
|
83
|
+
self.constants.solar_gain_coude_table_tracking_mode
|
|
84
|
+
)
|
|
85
|
+
task_specific_telescope_scanning_mode = (
|
|
86
|
+
self.constants.solar_gain_telescope_scanning_mode
|
|
87
|
+
)
|
|
88
|
+
task_specific_average_light_level = self.constants.solar_gain_average_light_level
|
|
89
|
+
task_specific_average_telescope_elevation = (
|
|
90
|
+
self.constants.solar_gain_average_telescope_elevation
|
|
91
|
+
)
|
|
92
|
+
task_specific_average_coude_table_angle = (
|
|
93
|
+
self.constants.solar_gain_average_coude_table_angle
|
|
94
|
+
)
|
|
95
|
+
task_specific_average_telescope_azimuth = (
|
|
96
|
+
self.constants.solar_gain_average_telescope_azimuth
|
|
97
|
+
)
|
|
98
|
+
task_specific_gos_level3_status = self.constants.solar_gain_gos_level3_status
|
|
99
|
+
task_specific_gos_level3_lamp_status = (
|
|
100
|
+
self.constants.solar_gain_gos_level3_lamp_status
|
|
101
|
+
)
|
|
102
|
+
task_specific_gos_polarizer_status = self.constants.solar_gain_gos_polarizer_status
|
|
103
|
+
task_specific_gos_polarizer_angle = self.constants.solar_gain_gos_polarizer_angle
|
|
104
|
+
task_specific_gos_retarder_status = self.constants.solar_gain_gos_retarder_status
|
|
105
|
+
task_specific_gos_retarder_angle = self.constants.solar_gain_gos_retarder_angle
|
|
106
|
+
task_specific_gos_level0_status = self.constants.solar_gain_gos_level0_status
|
|
107
|
+
case TaskName.polcal:
|
|
108
|
+
task_specific_observing_program_execution_id = (
|
|
109
|
+
self.constants.polcal_observing_program_execution_ids
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
task_specific_date_begin = self.constants.polcal_date_begin
|
|
113
|
+
task_specific_date_end = self.constants.polcal_date_end
|
|
114
|
+
task_specific_raw_frames_per_fpa = self.constants.polcal_num_raw_frames_per_fpa
|
|
115
|
+
task_specific_telescope_tracking_mode = (
|
|
116
|
+
self.constants.polcal_telescope_tracking_mode
|
|
117
|
+
)
|
|
118
|
+
task_specific_coude_table_tracking_mode = (
|
|
119
|
+
self.constants.polcal_coude_table_tracking_mode
|
|
120
|
+
)
|
|
121
|
+
task_specific_telescope_scanning_mode = (
|
|
122
|
+
self.constants.polcal_telescope_scanning_mode
|
|
123
|
+
)
|
|
124
|
+
task_specific_average_light_level = self.constants.polcal_average_light_level
|
|
125
|
+
task_specific_average_telescope_elevation = (
|
|
126
|
+
self.constants.polcal_average_telescope_elevation
|
|
127
|
+
)
|
|
128
|
+
task_specific_average_coude_table_angle = (
|
|
129
|
+
self.constants.polcal_average_coude_table_angle
|
|
130
|
+
)
|
|
131
|
+
task_specific_average_telescope_azimuth = (
|
|
132
|
+
self.constants.polcal_average_telescope_azimuth
|
|
133
|
+
)
|
|
134
|
+
task_specific_gos_level3_status = None
|
|
135
|
+
task_specific_gos_level3_lamp_status = None
|
|
136
|
+
task_specific_gos_polarizer_status = None
|
|
137
|
+
task_specific_gos_polarizer_angle = None
|
|
138
|
+
task_specific_gos_retarder_status = None
|
|
139
|
+
task_specific_gos_retarder_angle = None
|
|
140
|
+
task_specific_gos_level0_status = None
|
|
141
|
+
case _:
|
|
142
|
+
raise ValueError(f"Unsupported task type {task_type}")
|
|
143
|
+
|
|
144
|
+
start_datetime = datetime.fromisoformat(task_specific_date_begin)
|
|
145
|
+
end_datetime = datetime.fromisoformat(task_specific_date_end)
|
|
146
|
+
|
|
147
|
+
dataset_extra_header = {
|
|
148
|
+
DatasetExtraHeaderSection.common: {
|
|
149
|
+
"BUNIT": "count",
|
|
150
|
+
"DATE": Time.now().fits,
|
|
151
|
+
"DATE-BEG": task_specific_date_begin,
|
|
152
|
+
"DATE-END": task_specific_date_end,
|
|
153
|
+
"TELAPSE": (end_datetime - start_datetime).total_seconds(),
|
|
154
|
+
"DATE-AVG": (start_datetime + (end_datetime - start_datetime) / 2).isoformat(),
|
|
155
|
+
"TIMESYS": "UTC",
|
|
156
|
+
"ORIGIN": "National Solar Observatory",
|
|
157
|
+
"TELESCOP": "Daniel K. Inouye Solar Telescope",
|
|
158
|
+
"OBSRVTRY": "Haleakala High Altitude Observatory Site",
|
|
159
|
+
"NETWORK": "NSF-DKIST",
|
|
160
|
+
"INSTRUME": self.constants.instrument,
|
|
161
|
+
"OBJECT": "unknown",
|
|
162
|
+
"CAM_ID": self.constants.camera_id,
|
|
163
|
+
"CAMERA": self.constants.camera_name,
|
|
164
|
+
"BITDEPTH": self.constants.camera_bit_depth,
|
|
165
|
+
"XPOSURE": total_exposure,
|
|
166
|
+
"TEXPOSUR": readout_exposure,
|
|
167
|
+
"HWBIN1": self.constants.hardware_binning_x,
|
|
168
|
+
"HWBIN2": self.constants.hardware_binning_y,
|
|
169
|
+
"SWBIN1": self.constants.software_binning_x,
|
|
170
|
+
"SWBIN2": self.constants.software_binning_y,
|
|
171
|
+
"NSUMEXP": task_specific_raw_frames_per_fpa,
|
|
172
|
+
"DSETID": self.constants.dataset_id,
|
|
173
|
+
"PROCTYPE": "L1_EXTRA",
|
|
174
|
+
"RRUNID": self.recipe_run_id,
|
|
175
|
+
"RECIPEID": self.metadata_store_recipe_run.recipeInstance.recipeId,
|
|
176
|
+
"RINSTID": self.metadata_store_recipe_run.recipeInstanceId,
|
|
177
|
+
"FILENAME": filename,
|
|
178
|
+
"HEAD_URL": "",
|
|
179
|
+
"INFO_URL": self.docs_base_url,
|
|
180
|
+
"CAL_URL": "",
|
|
181
|
+
"CALVERS": self.version_from_module_name(),
|
|
182
|
+
"IDSPARID": (
|
|
183
|
+
parameters.inputDatasetPartId
|
|
184
|
+
if (parameters := self.metadata_store_input_dataset_parameters)
|
|
185
|
+
else None
|
|
186
|
+
),
|
|
187
|
+
"IDSOBSID": (
|
|
188
|
+
observe_frames.inputDatasetPartId
|
|
189
|
+
if (observe_frames := self.metadata_store_input_dataset_observe_frames)
|
|
190
|
+
else None
|
|
191
|
+
),
|
|
192
|
+
"IDSCALID": (
|
|
193
|
+
calibration_frames.inputDatasetPartId
|
|
194
|
+
if (calibration_frames := self.metadata_store_input_dataset_calibration_frames)
|
|
195
|
+
else None
|
|
196
|
+
),
|
|
197
|
+
"WKFLVERS": self.workflow_version,
|
|
198
|
+
"WKFLNAME": self.workflow_name,
|
|
199
|
+
"MANPROCD": self.workflow_had_manual_intervention,
|
|
200
|
+
"FILE_ID": uuid.uuid4().hex,
|
|
201
|
+
"OBSPR_ID": task_specific_observing_program_execution_id[
|
|
202
|
+
0
|
|
203
|
+
], # The OP IDs are stored sorted by number of appearances of each OP ID in the source task type frames
|
|
204
|
+
"EXTOBSID": ",".join(task_specific_observing_program_execution_id[1:]),
|
|
205
|
+
"EXPER_ID": self.constants.experiment_id,
|
|
206
|
+
"PROP_ID": self.constants.proposal_id,
|
|
207
|
+
"HLSVERS": self.constants.hls_version,
|
|
208
|
+
"LINEWAV": self.constants.wavelength,
|
|
209
|
+
"TELTRACK": (
|
|
210
|
+
task_specific_telescope_tracking_mode if task_type != TaskName.dark else None
|
|
211
|
+
),
|
|
212
|
+
"TTBLTRCK": (
|
|
213
|
+
task_specific_coude_table_tracking_mode if task_type != TaskName.dark else None
|
|
214
|
+
),
|
|
215
|
+
"TELSCAN": (
|
|
216
|
+
task_specific_telescope_scanning_mode if task_type != TaskName.dark else None
|
|
217
|
+
),
|
|
218
|
+
"EXTNAME": extra_name,
|
|
219
|
+
},
|
|
220
|
+
DatasetExtraHeaderSection.aggregate: {
|
|
221
|
+
"AVGLLVL": task_specific_average_light_level,
|
|
222
|
+
"ATELEVAT": task_specific_average_telescope_elevation,
|
|
223
|
+
"ATTBLANG": task_specific_average_coude_table_angle,
|
|
224
|
+
"ATAZIMUT": task_specific_average_telescope_azimuth,
|
|
225
|
+
},
|
|
226
|
+
DatasetExtraHeaderSection.iptask: {
|
|
227
|
+
"IPTASK": "GAIN" if "GAIN" in task_type else task_type,
|
|
228
|
+
},
|
|
229
|
+
DatasetExtraHeaderSection.gos: {
|
|
230
|
+
"LVL3STAT": task_specific_gos_level3_status,
|
|
231
|
+
"LAMPSTAT": task_specific_gos_level3_lamp_status,
|
|
232
|
+
"LVL2STAT": task_specific_gos_polarizer_status,
|
|
233
|
+
"POLANGLE": task_specific_gos_polarizer_angle,
|
|
234
|
+
"LVL1STAT": task_specific_gos_retarder_status,
|
|
235
|
+
"RETANGLE": task_specific_gos_retarder_angle,
|
|
236
|
+
"LVL0STAT": task_specific_gos_level0_status,
|
|
237
|
+
},
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
# Remove specific headers from dark frames as they don't constants to fill them
|
|
241
|
+
if task_type == TaskName.dark:
|
|
242
|
+
for key in ["TELTRACK", "TTBLTRCK", "TELSCAN"]:
|
|
243
|
+
del dataset_extra_header[DatasetExtraHeaderSection.common][key]
|
|
244
|
+
|
|
245
|
+
# Remove specific headers from polcal frames as they don't have constants to fill them
|
|
246
|
+
if task_type == TaskName.polcal:
|
|
247
|
+
for key in [
|
|
248
|
+
"LVL3STAT",
|
|
249
|
+
"LAMPSTAT",
|
|
250
|
+
"LVL2STAT",
|
|
251
|
+
"POLANGLE",
|
|
252
|
+
"LVL1STAT",
|
|
253
|
+
"RETANGLE",
|
|
254
|
+
"LVL0STAT",
|
|
255
|
+
]:
|
|
256
|
+
del dataset_extra_header[DatasetExtraHeaderSection.gos][key]
|
|
257
|
+
|
|
258
|
+
return dataset_extra_header
|
|
259
|
+
|
|
260
|
+
def build_dataset_extra_header(
|
|
261
|
+
self,
|
|
262
|
+
sections: list[DatasetExtraHeaderSection],
|
|
263
|
+
filename: str,
|
|
264
|
+
task_type: TaskName,
|
|
265
|
+
extra_name: DatasetExtraType,
|
|
266
|
+
total_exposure: float | None = None,
|
|
267
|
+
readout_exposure: float | None = None,
|
|
268
|
+
) -> fits.Header:
|
|
269
|
+
"""Build FITS header for dataset extra file."""
|
|
270
|
+
header = fits.Header()
|
|
271
|
+
all_section_headers = self.dataset_extra_headers(
|
|
272
|
+
filename=filename,
|
|
273
|
+
task_type=task_type,
|
|
274
|
+
total_exposure=total_exposure,
|
|
275
|
+
readout_exposure=readout_exposure,
|
|
276
|
+
extra_name=extra_name,
|
|
277
|
+
)
|
|
278
|
+
for section in sections:
|
|
279
|
+
header.update(all_section_headers[section].items())
|
|
280
|
+
return header
|
|
281
|
+
|
|
282
|
+
def format_extra_filename(self, extra_name: DatasetExtraType | str, detail: str | None = None):
|
|
283
|
+
"""Format the filename of dataset extras for consistency."""
|
|
284
|
+
base_filename = f"{self.constants.instrument}_{self.constants.dataset_id}_{extra_name.replace(' ', '-')}"
|
|
285
|
+
if detail:
|
|
286
|
+
base_filename += "_" + detail
|
|
287
|
+
filename_counter = str(self.filename_counter.increment(base_filename))
|
|
288
|
+
return f"{base_filename}_{filename_counter}.fits"
|
|
289
|
+
|
|
290
|
+
def assemble_and_write_dataset_extra(
|
|
291
|
+
self,
|
|
292
|
+
data: np.ndarray | list[np.ndarray],
|
|
293
|
+
header: fits.Header | list[fits.Header],
|
|
294
|
+
filename: str,
|
|
295
|
+
):
|
|
296
|
+
"""Given the data and header information, write the dataset extra."""
|
|
297
|
+
if isinstance(data, list) and isinstance(header, list):
|
|
298
|
+
if len(data) != len(header):
|
|
299
|
+
raise ValueError(
|
|
300
|
+
f"{len(data)} data arrays were provided with {len(header)} headers. These must be equal."
|
|
301
|
+
)
|
|
302
|
+
if isinstance(data, np.ndarray):
|
|
303
|
+
data = [data]
|
|
304
|
+
if isinstance(header, fits.Header):
|
|
305
|
+
header = [header]
|
|
306
|
+
hdus = [fits.PrimaryHDU()] # The first HDU in the list is an empty PrimaryHDU
|
|
307
|
+
for i, data_array in enumerate(data):
|
|
308
|
+
tile_size = self.compute_tile_size_for_array(data_array)
|
|
309
|
+
hdu = fits.CompImageHDU(header=header[i], data=data_array, tile_shape=tile_size)
|
|
310
|
+
formatted_header = reformat_dataset_extra_header(hdu.header)
|
|
311
|
+
hdu = fits.CompImageHDU(header=formatted_header, data=hdu.data, tile_shape=tile_size)
|
|
312
|
+
hdus.append(hdu)
|
|
313
|
+
self.write(
|
|
314
|
+
data=fits.HDUList(hdus),
|
|
315
|
+
tags=[Tag.extra(), Tag.output()],
|
|
316
|
+
encoder=fits_hdulist_encoder,
|
|
317
|
+
relative_path=filename,
|
|
318
|
+
)
|
|
319
|
+
self.update_framevol(relative_path=filename)
|
|
320
|
+
|
|
321
|
+
# Check that the written file passes spec 214 validation if requested
|
|
322
|
+
if self.validate_l1_on_write:
|
|
323
|
+
spec_extras_validator.validate(self.scratch.absolute_path(filename), extra=False)
|
|
324
|
+
|
|
325
|
+
@abstractmethod
|
|
326
|
+
def run(self) -> None:
|
|
327
|
+
"""
|
|
328
|
+
For each dataset extra.
|
|
329
|
+
|
|
330
|
+
* Gather the source data in whatever manner is necessary
|
|
331
|
+
* Build a header using the `build_dataset_extra_header` method to help with header construction
|
|
332
|
+
* Write the dataset extra using `assemble_and_write_dataset_extra()`
|
|
333
|
+
"""
|