lsst-pipe-base 30.0.0rc3__tar.gz → 30.0.1rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lsst_pipe_base-30.0.0rc3/python/lsst_pipe_base.egg-info → lsst_pipe_base-30.0.1rc1}/PKG-INFO +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/CHANGES.rst +42 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-pipeline.rst +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-pipelinetask.rst +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-task.rst +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/index.rst +12 -0
- lsst_pipe_base-30.0.1rc1/doc/lsst.pipe.base/recording-provenance.rst +108 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/task-framework-overview.rst +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/working-with-pipeline-graphs.rst +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/pyproject.toml +4 -12
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_instrument.py +25 -15
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_quantumContext.py +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_status.py +43 -10
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_task_metadata.py +2 -2
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/all_dimensions_quantum_graph_builder.py +8 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/automatic_connection_constants.py +20 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/cmd/__init__.py +18 -2
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/cmd/commands.py +149 -4
- lsst_pipe_base-30.0.1rc1/python/lsst/pipe/base/connectionTypes.py +279 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/connections.py +6 -9
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/execution_reports.py +0 -5
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/graph.py +11 -10
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/quantumNode.py +4 -4
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph_walker.py +8 -10
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/log_capture.py +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/log_on_close.py +4 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline.py +5 -6
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipelineIR.py +2 -8
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipelineTask.py +5 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_dataset_types.py +2 -2
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_edges.py +32 -22
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_mapping_views.py +4 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py +14 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/expressions.py +2 -2
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/io.py +7 -10
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_dot.py +13 -12
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_layout.py +16 -18
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_merge.py +4 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_printer.py +10 -10
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +7 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/prerequisite_helpers.py +2 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/_common.py +15 -17
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/_multiblock.py +36 -20
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/_predicted.py +7 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/_provenance.py +501 -61
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/__init__.py +0 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_communicators.py +187 -240
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_config.py +87 -9
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_ingester.py +13 -12
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_scanner.py +15 -7
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_structs.py +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_supervisor.py +19 -34
- lsst_pipe_base-30.0.1rc1/python/lsst/pipe/base/quantum_graph/aggregator/_workers.py +303 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_writer.py +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/formatter.py +74 -4
- lsst_pipe_base-30.0.1rc1/python/lsst/pipe/base/quantum_graph/ingest_graph.py +413 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/visualization.py +5 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph_builder.py +21 -8
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph_skeleton.py +31 -29
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_provenance_graph.py +29 -12
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/separable_pipeline_executor.py +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/single_quantum_executor.py +15 -8
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/struct.py +4 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/testUtils.py +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/mocks/_storage_class.py +2 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/version.py +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1/python/lsst_pipe_base.egg-info}/PKG-INFO +3 -3
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/SOURCES.txt +3 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_aggregator.py +478 -176
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_configOverrides.py +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_init_output_run.py +1 -1
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_separable_pipeline_executor.py +72 -2
- lsst_pipe_base-30.0.0rc3/python/lsst/pipe/base/connectionTypes.py +0 -367
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/COPYRIGHT +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/LICENSE +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/MANIFEST.in +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/README.md +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/bsd_license.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/task-retargeting-howto.rst +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/testing-a-pipeline-task.rst +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/testing-pipelines-with-mocks.rst +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/gpl-v3.0.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_datasetQueryConstraints.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_dataset_handle.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_observation_dimension_packer.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/caching_limited_butler.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/_get_cli_subcommands.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/opt/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/opt/arguments.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/cli/opt/options.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/config.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/configOverrides.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/dot_tools.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/exec_fixup_data_id.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/execution_graph_fixup.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/formatters/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/formatters/pexConfig.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/_implDetails.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/_loadHelpers.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/_versionDeserializers.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/graph/graphSummary.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/mermaid_tools.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/mp_graph_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/__main__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_exceptions.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_nodes.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_task_subsets.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/_tasks.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_formatting.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_mermaid.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_options.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/pipeline_graph/visualization/_show.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/py.typed +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph/aggregator/_progress.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_graph_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/quantum_reports.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/resource_usage.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/register_instrument.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/retrieve_artifacts_for_quanta.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/transfer_from_graph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/utils.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/script/zip_from_graph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/simple_pipeline_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/task.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/taskFactory.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/in_memory_limited_butler.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/mocks/__init__.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/mocks/_data_id_match.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/mocks/_pipeline_task.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/mocks/_repo.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/no_dimensions.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/pipelineStepTester.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/simpleQGraph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/tests/util.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/utils.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/dependency_links.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/entry_points.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/requires.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/top_level.txt +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst_pipe_base.egg-info/zip-safe +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/setup.cfg +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_adjust_all_quanta.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_caching_limited_butler.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_cliCmdRegisterInstrument.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_config_formatter.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_connections.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_dataid_match.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_dataset_handle.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_deferredDatasetRef.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_dot_tools.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_dynamic_connections.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_execution_reports.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_execution_storage_class_conversion.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_graphBuilder.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_graph_walker.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_instrument.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_mermaid.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_mp_graph_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipeline.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipelineIR.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipelineLoadSubset.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipelineTask.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipeline_graph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_pipeline_graph_expressions.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_predicted_qg.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_qg_builder_dimensions.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_quantumGraph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_quantum_provenance_graph.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_quantum_reports.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_quantum_success_caveats.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_script_utils.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_simple_pipeline_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_single_quantum_executor.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_struct.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_task.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_task_factory.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_taskmetadata.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_testUtils.py +0 -0
- {lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/tests/test_utils.py +0 -0
{lsst_pipe_base-30.0.0rc3/python/lsst_pipe_base.egg-info → lsst_pipe_base-30.0.1rc1}/PKG-INFO
RENAMED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-pipe-base
|
|
3
|
-
Version: 30.0.
|
|
3
|
+
Version: 30.0.1rc1
|
|
4
4
|
Summary: Pipeline infrastructure for the Rubin Science Pipelines.
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License-Expression: BSD-3-Clause OR GPL-3.0-or-later
|
|
7
7
|
Project-URL: Homepage, https://github.com/lsst/pipe_base
|
|
8
|
+
Project-URL: Source, https://github.com/lsst/pipe_base
|
|
8
9
|
Keywords: lsst
|
|
9
10
|
Classifier: Intended Audience :: Science/Research
|
|
10
11
|
Classifier: Operating System :: OS Independent
|
|
11
12
|
Classifier: Programming Language :: Python :: 3
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.13
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.14
|
|
16
16
|
Classifier: Topic :: Scientific/Engineering :: Astronomy
|
|
17
|
-
Requires-Python: >=3.
|
|
17
|
+
Requires-Python: >=3.12.0
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
License-File: COPYRIGHT
|
|
20
20
|
License-File: LICENSE
|
|
@@ -1,3 +1,42 @@
|
|
|
1
|
+
lsst-pipe-base v30.0.1 (2026-02-03)
|
|
2
|
+
===================================
|
|
3
|
+
|
|
4
|
+
Dropped support for Python 3.11.
|
|
5
|
+
Tested on Python 3.14.
|
|
6
|
+
|
|
7
|
+
New Features
|
|
8
|
+
------------
|
|
9
|
+
|
|
10
|
+
- Added the ``butler ingest-provenance`` command to ingest the provenance quantum graphs written by ``butler aggregate-graph``. (`DM-52738 <https://rubinobs.atlassian.net/browse/DM-52738>`_)
|
|
11
|
+
- Add report tooling (similar to ``pipetask report``, but faster) to the ``ProvenanceQuantumGraph`` class and the ``butler`` command line (as the new ``provenance-report`` subcommand).
|
|
12
|
+
|
|
13
|
+
Updatec ``butler ingest-graph`` to delete now-empty config/metadata/log directories.
|
|
14
|
+
|
|
15
|
+
Fix a bug that causes provenance quantum graphs to fail to be written if the address file grew to larger than 2G. (`DM-53851 <https://rubinobs.atlassian.net/browse/DM-53851>`_)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
API Changes
|
|
19
|
+
-----------
|
|
20
|
+
|
|
21
|
+
- Modified ``Instrument.importAll`` so that it now returns information on the instrument classes that were loaded successfully. (`DM-53882 <https://rubinobs.atlassian.net/browse/DM-53882>`_)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
Bug Fixes
|
|
25
|
+
---------
|
|
26
|
+
|
|
27
|
+
- Fixed ``aggregator-graph`` handling of killed subprocesses to avoid hangs.
|
|
28
|
+
|
|
29
|
+
Fixed a race condition in ``aggregate-graph`` that could cause hanging at shutdown on small graphs. (`DM-53913 <https://rubinobs.atlassian.net/browse/DM-53913>`_)
|
|
30
|
+
- Fixed a bug that caused provenance recording to fail in ``SeparablePipelineExecutor`` when ``NoWorkFound`` chaining occurs. (`DM-53977 <https://rubinobs.atlassian.net/browse/DM-53977>`_)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
Other Changes and Additions
|
|
34
|
+
---------------------------
|
|
35
|
+
|
|
36
|
+
- Now guard against quantum graph builds in which the initial query produces many more result rows than end up in the final graph, which can lead to catastrophically large dimension data storage in the QG file. (`DM-53773 <https://rubinobs.atlassian.net/browse/DM-53773>`_)
|
|
37
|
+
- Bumped minimum Python version to 3.12. (`DM-53998 <https://rubinobs.atlassian.net/browse/DM-53998>`_)
|
|
38
|
+
|
|
39
|
+
|
|
1
40
|
lsst-pipe-base v30.0.0 (2026-01-16)
|
|
2
41
|
===================================
|
|
3
42
|
|
|
@@ -131,7 +170,7 @@ New Features
|
|
|
131
170
|
Exceptions that lead to task failures are not yet tracked, because we do not write task metadata for failures and hence have nowhere to put the information. (`DM-48536 <https://rubinobs.atlassian.net/browse/DM-48536>`_)
|
|
132
171
|
- Swapped to the new butler query system in ``QuantumGraph`` generation.
|
|
133
172
|
|
|
134
|
-
This change should be mostly transparent to users, aside from small changes in speed (typically faster, but not always). (`DM-45896 <https://rubinobs.atlassian.net/browse/DM-45896>`)
|
|
173
|
+
This change should be mostly transparent to users, aside from small changes in speed (typically faster, but not always). (`DM-45896 <https://rubinobs.atlassian.net/browse/DM-45896>`_)
|
|
135
174
|
|
|
136
175
|
Bug Fixes
|
|
137
176
|
---------
|
|
@@ -175,7 +214,7 @@ New Features
|
|
|
175
214
|
For each graph/attempt, the status of each quantum and dataset is recorded in ``QuantumProvenanceGraph.add_new_graph`` and outcomes of quanta over multiple runs are resolved in ``QuantumProvenanceGraph.resolve_duplicates``.
|
|
176
215
|
At the end of this process, we can combine all attempts into a summary.
|
|
177
216
|
This serves to answer the question "What happened to this data ID?" in a holistic sense. (`DM-41711 <https://rubinobs.atlassian.net/browse/DM-41711>`_)
|
|
178
|
-
- Included the number of expected instances in ``pipetask report`` task-level summary for the
|
|
217
|
+
- Included the number of expected instances in ``pipetask report`` task-level summary for the ``QuantumGraphExecutionReport``. (`DM-44368 <https://rubinobs.atlassian.net/browse/DM-44368>`_)
|
|
179
218
|
- Added mocking support for tasks that write regular datasets with config, log, or metadata storage classes. (`DM-44583 <https://rubinobs.atlassian.net/browse/DM-44583>`_)
|
|
180
219
|
- Added new ``show_dot`` functionality.
|
|
181
220
|
|
|
@@ -251,7 +290,7 @@ New Features
|
|
|
251
290
|
This interface is available through YAML pipeline specification by specifying the ``labeledSubsetModifyMode`` key when writing YAML import defectives.
|
|
252
291
|
|
|
253
292
|
New Python interfaces were added for manipulating labeled subsets in a pipeline.
|
|
254
|
-
These include; ``Pipeline.subsets`` which is a property returning a `dict
|
|
293
|
+
These include; ``Pipeline.subsets`` which is a property returning a `dict` of subset labels to sets of task labels, ``Pipeline.addLabeledSubset`` to add a new labeled subset to a ``Pipeline``, and ``Pipeline.removeLabeledSubset`` to remove a labeled subset from a pipeline. (`DM-41203 <https://rubinobs.atlassian.net/browse/DM-41203>`_)
|
|
255
294
|
- Added ``QuantumGraph`` summary. (`DM-41542 <https://rubinobs.atlassian.net/browse/DM-41542>`_)
|
|
256
295
|
- Added human-readable option to report summary dictionaries. (`DM-41606 <https://rubinobs.atlassian.net/browse/DM-41606>`_)
|
|
257
296
|
- Added a section to pipelines which allows the explicit declaration of which susbsets correspond to steps and the dimensions the step's quanta can be sharded with. (`DM-41650 <https://rubinobs.atlassian.net/browse/DM-41650>`_)
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-pipeline.rst
RENAMED
|
@@ -150,12 +150,12 @@ associated with ``class`` keyword instead of the label directly. The
|
|
|
150
150
|
the configuration appropriate for this `Pipeline` specified as an additional
|
|
151
151
|
yaml mapping.
|
|
152
152
|
|
|
153
|
-
The complete complexity of
|
|
153
|
+
The complete complexity of `lsst.pex.config` can't be represented with simple
|
|
154
154
|
yaml mapping syntax. To account for this, ``config`` blocks in `Pipeline`\ s
|
|
155
155
|
support two special fields: ``file`` and ``python``.
|
|
156
156
|
|
|
157
157
|
The ``file`` key may be associated with either a single value pointing to a
|
|
158
|
-
filesystem path where a
|
|
158
|
+
filesystem path where a `lsst.pex.config` file can be found, or a yaml list
|
|
159
159
|
of such paths. The file paths can contain environment variables that will be
|
|
160
160
|
expanded prior to loading the file(s). These files will then be applied to
|
|
161
161
|
the task during configuration time to override any default values.
|
|
@@ -477,7 +477,7 @@ desired camera, or can serve as a base for further `Pipeline`\ s to import.
|
|
|
477
477
|
Command line options for running Pipelines
|
|
478
478
|
------------------------------------------
|
|
479
479
|
This section is not intended to serve as a tutorial for processing data from
|
|
480
|
-
the command line, for that refer to
|
|
480
|
+
the command line, for that refer to `lsst.ctrl.mpexec` or `lsst.ctrl.bps`.
|
|
481
481
|
However, both of these tools accept URI pointers to a `Pipeline`. These URIs
|
|
482
482
|
can be altered with a specific syntax which will control how the `Pipeline`
|
|
483
483
|
is loaded.
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-pipelinetask.rst
RENAMED
|
@@ -142,7 +142,7 @@ not tied to the exact band passes of an individual telescope filter).
|
|
|
142
142
|
|
|
143
143
|
Next, take a look at the fields defined on your new connection class. These
|
|
144
144
|
are defined in a similar way as defining a configuration class, but instead
|
|
145
|
-
of using `~lsst.pex.config.Field` types from
|
|
145
|
+
of using `~lsst.pex.config.Field` types from `lsst.pex.config`,
|
|
146
146
|
connection classes make use of connection types defined in
|
|
147
147
|
:py:mod:`lsst.pipe.base.connectionTypes`. These connections define the inputs and outputs that
|
|
148
148
|
a |PipelineTask| will expect to make use of. Each of these connections documents
|
|
@@ -471,9 +471,9 @@ connection had never existed.
|
|
|
471
471
|
Run-time optional inputs
|
|
472
472
|
------------------------
|
|
473
473
|
|
|
474
|
-
A separate mechanism exists that allows an
|
|
474
|
+
A separate mechanism exists that allows an ``Input`` connection to be made
|
|
475
475
|
run-time optional.
|
|
476
|
-
If the
|
|
476
|
+
If the ``Input.minimum`` attribute is initialized to zero for a connection,
|
|
477
477
|
graph-building will still generate a quantum, and the `PipelineTask` will
|
|
478
478
|
be run, even if no dataset for that input can be found.
|
|
479
479
|
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/creating-a-task.rst
RENAMED
|
@@ -145,7 +145,7 @@ Use the ``__init__`` method (task constructor) to do the following:
|
|
|
145
145
|
|
|
146
146
|
- Call the parent task's ``__init__`` method
|
|
147
147
|
- Make subtasks by calling ``self.makeSubtask(name)``, where ``name`` is the name of a field of type `lsst.pex.config.ConfigurableField` in your :ref:`task's configuration <creating-a-task-configuration>`.
|
|
148
|
-
- Make a schema if your task uses an
|
|
148
|
+
- Make a schema if your task uses an `lsst.afw.table`.
|
|
149
149
|
For an example of such a task `lsst.pipe.tasks.calibrate.CalibrateTask`.
|
|
150
150
|
- Initialize any other instance variables your task needs.
|
|
151
151
|
|
|
@@ -61,6 +61,14 @@ Developing Pipelines
|
|
|
61
61
|
testing-pipelines-with-mocks.rst
|
|
62
62
|
working-with-pipeline-graphs.rst
|
|
63
63
|
|
|
64
|
+
Running Pipelines
|
|
65
|
+
-----------------
|
|
66
|
+
|
|
67
|
+
.. toctree::
|
|
68
|
+
:maxdepth: 1
|
|
69
|
+
|
|
70
|
+
recording-provenance.rst
|
|
71
|
+
|
|
64
72
|
.. _lsst.pipe.base-contributing:
|
|
65
73
|
|
|
66
74
|
Contributing
|
|
@@ -102,6 +110,10 @@ Python API reference
|
|
|
102
110
|
|
|
103
111
|
.. automodapi:: lsst.pipe.base.quantum_graph
|
|
104
112
|
|
|
113
|
+
.. automodapi:: lsst.pipe.base.quantum_graph.aggregator
|
|
114
|
+
|
|
115
|
+
.. automodapi:: lsst.pipe.base.quantum_graph.ingest_graph
|
|
116
|
+
|
|
105
117
|
.. automodapi:: lsst.pipe.base.quantum_graph.visualization
|
|
106
118
|
|
|
107
119
|
QuantumGraph generation API reference
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
.. _pipe_base_provenance:
|
|
2
|
+
|
|
3
|
+
.. py:currentmodule:: lsst.pipe.base.quantum_graph
|
|
4
|
+
|
|
5
|
+
####################
|
|
6
|
+
Recording Provenance
|
|
7
|
+
####################
|
|
8
|
+
|
|
9
|
+
The `PredictedQuantumGraph` that is used to predict and control processing also contains a wealth of provenance information, including task configuration and the complete input-output relationships between all datasets.
|
|
10
|
+
Instead of storing these graphs directly in a `~lsst.daf.butler.Butler` repository, however, it is better to first augment them with additional provenance information that is only available after execution has completed, producing a `ProvenanceQuantumGraph` that is ingested instead.
|
|
11
|
+
We store provenance in a ``run_provenance`` dataset type with empty dimensions, which means there is exactly one for each `~lsst.daf.butler.CollectionType.RUN` collection.
|
|
12
|
+
In addition to the input-output graph itself and full configuration for all tasks, `ProvenanceQuantumGraph` stores status information for each attempt to run a quantum, including exception information and caveats on any successes.
|
|
13
|
+
It can also store the full logs and task metadata for each quantum, allowing repositories to store many fewer small files (it is possible to continue to have per-quantum butler datasets for these, all backed by the same file).
|
|
14
|
+
|
|
15
|
+
The pipeline system has many different execution contexts, and provenance recording is not supported in all of them at this time.
|
|
16
|
+
|
|
17
|
+
Batch Execution / Quantum-Backed Butler
|
|
18
|
+
=======================================
|
|
19
|
+
|
|
20
|
+
Provenance recording is fully supported in batch workflows that use the `~lsst.daf.butler.QuantumBackedButler` class (e.g. ``pipetask run-qbb``, as run by the ``bps`` tool) to avoid database writes during execution.
|
|
21
|
+
This involves the following steps:
|
|
22
|
+
|
|
23
|
+
- A `PredictedQuantumGraph` is generated as usual (e.g. via ``pipetask qgraph``, as run by ``bps submit``) and saved to a known location.
|
|
24
|
+
- All quanta are executed via ``pipetask run-qbb``, writing their outputs to butler-managed storage without updating the butler database.
|
|
25
|
+
- When all quanta have been attempted, the ``butler aggregate-graph`` tool is run (e.g. in the BPS ``finalJob``) to ingest output datasets into the butler database, and the ``--output`` option is used to save a `ProvenanceQuantumGraph` to a known location.
|
|
26
|
+
This step and the previous one may be run multiple times (e.g. via ``bps restart``) to retry some failures, and it is only necessary to pass ``--output`` the last time (though usually the user does not know which attempt will be the last one).
|
|
27
|
+
- When all processing attempts are complete, the ``butler ingest-graph`` tool is used to ingest the graph into the butler database and rewrite all metadata, log, and config datasets to also be backed by the same graph file (deleting the original files).
|
|
28
|
+
This step should not be included in the BPS ``finalJob`` (see below).
|
|
29
|
+
|
|
30
|
+
All of the above happens in a single `~lsst.daf.butler.CollectionType.RUN` collection.
|
|
31
|
+
Reference documentation for ``butler aggregate-graph`` and ``butler ingest-graph`` can be found in the `aggregator` and `ingest_graph` modules that implement them (respectively); in both cases there are Python interfaces that closely mirror the command-line ones.
|
|
32
|
+
|
|
33
|
+
Parallelization
|
|
34
|
+
---------------
|
|
35
|
+
|
|
36
|
+
Aggregating and ingesting a large batch run is expensive, and both tools use parallelism whenever possible to improve performance.
|
|
37
|
+
|
|
38
|
+
The aggregator in particular is explicitly parallel, with separate workers (usually subprocesses) assigned to scan and read metadata and log files (any number of workers), ingest datasets (a single worker), write the provenance graph file (a single worker), and coordinate all of these operations.
|
|
39
|
+
Since all information must be passed from the scanners to the ingestion and writer workers, additional parallelism can help when all operations are running at around the same speed (as reported in the logs), but not when ingestion or writing lags significantly behind.
|
|
40
|
+
The writer process has substantial startup overhead and will typically lag the others at the beginning before it catches up later.
|
|
41
|
+
|
|
42
|
+
The `ingest_graph` tool mostly performs database write operations, which do not benefit from parallelism, but it also deletes the original metadata, log, and config files as the new graph-backed variants of those datasets are ingested.
|
|
43
|
+
These deletes are delegated to `lsst.resources.ResourcePath.mremove`, which refers to the ``LSST_RESOURCES_NUM_WORKERS``, ``LSST_RESOURCES_EXECUTOR``, and ``LSST_S3_USE_THREADS`` environment variables to control parallelism.
|
|
44
|
+
As with other butler bulk-delete operations, the default parallelism is usually fine.
|
|
45
|
+
|
|
46
|
+
.. note::
|
|
47
|
+
|
|
48
|
+
Earlier versions of the `aggregator` would run catastrophically slowly when ``LSST_RESOURCES_EXECUTOR=process``, as this made each scanner process spawn multiple subprocesses constantly.
|
|
49
|
+
In recent versions all parallelism environment variables are ignored by the aggregator so this should not occur.
|
|
50
|
+
|
|
51
|
+
Ingesting Outputs Early
|
|
52
|
+
-----------------------
|
|
53
|
+
|
|
54
|
+
The `aggregator` may be run with `~aggregator.AggregatorConfig.incomplete` set to `True` (``--incomplete`` on the command line) to allow it to be safely run before the graph has finished executing.
|
|
55
|
+
Note that while ingestion always picks up where it left off, scanning always has to start at the beginning, and provenance graph writing is disabled when running in ``incomplete`` mode, so while this allows output datasets be be available via the `~lsst.daf.butler.Butler` sooner, it does not generally make the final complete `aggregator` call substantially faster.
|
|
56
|
+
|
|
57
|
+
Promising Graph Ingestion
|
|
58
|
+
-------------------------
|
|
59
|
+
|
|
60
|
+
By default, the `aggregator` ingests all metadata, log, and config outputs into the butler database in the usual way, i.e. backed by their original individual files.
|
|
61
|
+
The `ingest_graph` tool then has to delete these datasets from the butler database before it can ingest new ones and delete the original files.
|
|
62
|
+
When it is known in advance that `ingest_graph` will be run later, the `~aggregator.AggregatorConfig.promise_ingest_graph` (``--promise-ingest-graph``) option can be used to tell the `aggregator` *not* to ingest these, saving time for both commands.
|
|
63
|
+
This option must be used with care, however: if `ingest_graph` isn't run later, the original files will be orphaned in a butler-managed location without any record in the database, which generally means they'll quietly take up space.
|
|
64
|
+
In addition, because the metadata datasets are used by the middleware system as the indicator of a quantum's success, their absence will make any downstream quantum graphs built using ``--skip-existing-in`` incorrect.
|
|
65
|
+
And of course any downstream quantum graph builds that actually use those datasets as input (only metadata should be) will not see them as available.
|
|
66
|
+
|
|
67
|
+
Deferring Graph Ingestion
|
|
68
|
+
-------------------------
|
|
69
|
+
|
|
70
|
+
Ingesting the provenance graph is not generally necessary to kick off downstream processing by building new quantum graphs for later pipeline steps, and it is always safe to build downstream quantum graphs if `~aggregator.AggregatorConfig.promise_ingest_graph` is left `False`.
|
|
71
|
+
It can also be done safely if `~aggregator.AggregatorConfig.promise_ingest_graph` is `True` and:
|
|
72
|
+
|
|
73
|
+
- ``--skip-existing-in`` is not used;
|
|
74
|
+
- the downstream processing does not use metadata, log, or config datasets as an overall input (``pipetask build ... --show inputs`` can be used to check for this).
|
|
75
|
+
|
|
76
|
+
These conditions also must be met in order for `ingest_graph` to be safely run *while* a downstream quantum graph is being executed.
|
|
77
|
+
Both of these conditions are *usually* met, and deferring and promising graph ingest each provide significant wall-clock savings, so we recommend the following approach for very large BPS campaigns:
|
|
78
|
+
|
|
79
|
+
- Submit ``step(N)`` to BPS with ``--promise-ingest-graph`` in the ``finalJob`` invocation of ``aggregate-graph``.
|
|
80
|
+
- When ready to move on to ``step(N+1)``, run ``pipetask build ... --show inputs`` (on ``step(N+1)``) to scan for metadata, log, and config inputs that may be needed from the previous step.
|
|
81
|
+
- If there are no such inputs, immediately submit that step to BPS, and run `ingest_graph` on ``step(N)`` as soon as the quantum graph for ``step(N+1)`` is built (it could be built at the same time, but waiting a bit may help spread out database load).
|
|
82
|
+
- If there are metadata, log, or config inputs, run `ingest_graph` on ``step(N)`` and wait for it to finish before submitting ``step(N+1)``.
|
|
83
|
+
|
|
84
|
+
Note that *independent* quantum graph builds (e.g. same tasks, disjoint data IDs) can always be built before or while `ingest_graph` runs.
|
|
85
|
+
|
|
86
|
+
Recovering from Interruptions
|
|
87
|
+
-----------------------------
|
|
88
|
+
|
|
89
|
+
If the `aggregator` is interrupted it can simply be started again.
|
|
90
|
+
Database ingestion will pick up where it left off, while scanning and provenance-graph writing will start over from the beginning.
|
|
91
|
+
|
|
92
|
+
If `ingest_graph` is interrupted, it can also be started again, and everything will pick up where it left off.
|
|
93
|
+
To guarantee this it always modifies the repository in the following order:
|
|
94
|
+
|
|
95
|
+
- if the ``run_provenance`` dataset does not exist in the collection, all existing metadata/log/config datasets are assumed to be backed by their original files and are deleted from the butler database (without deleting the files);
|
|
96
|
+
- the ``run_provenance`` dataset itself is ingested (this ensures the metadata/log/config *content* is safe inside the butler, even if it's not fully accessible);
|
|
97
|
+
- in batches, metadata/log/config datasets are reingested into the butler backed by the graph file, and then the corresponding original files are deleted.
|
|
98
|
+
|
|
99
|
+
This means we can use the existence of ``run_provenance`` and any particular metadata/log/config dataset in the butler database to infer the status of the original files.
|
|
100
|
+
|
|
101
|
+
In fact, if `ingest_graph` is interrupted at any point, it *must* be tried again until it succeeds, since not doing so can leave metadata/log/config files orphaned, just like when `~aggregator.AggregatorConfig.promise_ingest_graph` is `True`.
|
|
102
|
+
|
|
103
|
+
.. note::
|
|
104
|
+
|
|
105
|
+
After the ``run_provenance`` dataset is ingested, it is *not* safe to run the `aggregator`: the `aggregator` reads the original metadata and log files to gather provenance information, and will infer the wrong states for quanta if those are missing because `ingest_graph` has deleted them.
|
|
106
|
+
|
|
107
|
+
This is why it is not safe to run ``bps restart`` after `ingest_graph`, and why we do not recommend adding `ingest_graph` to the BPS ``finalJob``, even if the user is willing to forgo using ``bps restart``: by default, the ``finalJob`` will be retried on failure, causing the `aggregator` to run again when it may not be safe to do so.
|
|
108
|
+
And if ``finalJob`` retries are disabled, it is too easy for the repository to end up in a state that would require manual `ingest_graph` runs to prevent orphan datasets.
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/doc/lsst.pipe.base/task-framework-overview.rst
RENAMED
|
@@ -25,14 +25,14 @@ A common use for this is to provide a camera-specific variant of a particular ta
|
|
|
25
25
|
Tasks may process multiple items of data in parallel, using the ``ctrl_mpexec`` package and its ``pipetask`` command.
|
|
26
26
|
|
|
27
27
|
Most tasks have a ``run`` method that performs the primary data processing.
|
|
28
|
-
Each task's
|
|
28
|
+
Each task's `~lsst.pipe.base.Task.run` method should return a `~lsst.pipe.base.Struct`.
|
|
29
29
|
This allows named access to returned data, which provides safer evolution than relying on the order of returned values.
|
|
30
30
|
All task methods that return more than one or two items of data should return the data in a `~lsst.pipe.base.Struct`.
|
|
31
31
|
|
|
32
32
|
Many tasks are found in the ``pipe_tasks`` package, especially tasks that use many different packages and don't seem to belong in any one of them.
|
|
33
|
-
Tasks that are associated with a particular package should be in that package; for example the instrument signature removal task ``ip.isr.isrTask.IsrTask`` is in the ``ip_isr`` package.
|
|
33
|
+
Tasks that are associated with a particular package should be in that package; for example the instrument signature removal task ``lsst.ip.isr.isrTask.IsrTask`` is in the ``ip_isr`` package.
|
|
34
34
|
|
|
35
|
-
|
|
35
|
+
``pipe_base`` is written purely in Python. The most important contents are:
|
|
36
36
|
|
|
37
37
|
- `~lsst.pipe.base.PipelineTask`: base class for pipeline tasks that can be run from pipelines.
|
|
38
38
|
- `~lsst.pipe.base.Task`: base class for subtasks that are not meant to be run from the
|
|
@@ -64,7 +64,7 @@ In some cases it may be more convenient to add tasks to an existing `PipelineGra
|
|
|
64
64
|
|
|
65
65
|
**The most important thing to remember when modifying `PipelineGraph` objects is that modifications typically reset some or all of the graph to an unresolved state.**
|
|
66
66
|
|
|
67
|
-
The reference documentation for these methods describes exactly what guarantees they make about existing resolutions in detail, and what operations are still supported on unresolved or partially-resolved graphs, but it is easiest to just ensure
|
|
67
|
+
The reference documentation for these methods describes exactly what guarantees they make about existing resolutions in detail, and what operations are still supported on unresolved or partially-resolved graphs, but it is easiest to just ensure ``resolve`` is called after any modifications are complete.
|
|
68
68
|
|
|
69
69
|
`PipelineGraph` mutator methods provide strong exception safety (the graph is left unchanged when an exception is raised and caught by calling code) unless the exception type raised is `PipelineGraphExceptionSafetyError`.
|
|
70
70
|
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "lsst-pipe-base"
|
|
7
|
-
requires-python = ">=3.
|
|
7
|
+
requires-python = ">=3.12.0"
|
|
8
8
|
description = "Pipeline infrastructure for the Rubin Science Pipelines."
|
|
9
9
|
license = "BSD-3-Clause OR GPL-3.0-or-later"
|
|
10
10
|
license-files = ["COPYRIGHT", "LICENSE", "bsd_license.txt", "gpl-v3.0.txt"]
|
|
@@ -16,7 +16,6 @@ classifiers = [
|
|
|
16
16
|
"Intended Audience :: Science/Research",
|
|
17
17
|
"Operating System :: OS Independent",
|
|
18
18
|
"Programming Language :: Python :: 3",
|
|
19
|
-
"Programming Language :: Python :: 3.11",
|
|
20
19
|
"Programming Language :: Python :: 3.12",
|
|
21
20
|
"Programming Language :: Python :: 3.13",
|
|
22
21
|
"Programming Language :: Python :: 3.14",
|
|
@@ -45,6 +44,7 @@ pipe_base = "lsst.pipe.base.cli:get_cli_subcommands"
|
|
|
45
44
|
|
|
46
45
|
[project.urls]
|
|
47
46
|
"Homepage" = "https://github.com/lsst/pipe_base"
|
|
47
|
+
"Source" = "https://github.com/lsst/pipe_base"
|
|
48
48
|
|
|
49
49
|
[project.optional-dependencies]
|
|
50
50
|
test = ["pytest >= 3.2"]
|
|
@@ -101,15 +101,6 @@ version = { attr = "lsst_versions.get_lsst_version" }
|
|
|
101
101
|
name = "An API Removal or Deprecation"
|
|
102
102
|
showcontent = true
|
|
103
103
|
|
|
104
|
-
[tool.black]
|
|
105
|
-
line-length = 110
|
|
106
|
-
target-version = ["py311"]
|
|
107
|
-
|
|
108
|
-
[tool.isort]
|
|
109
|
-
profile = "black"
|
|
110
|
-
line_length = 110
|
|
111
|
-
known_first_party = ["lsst"]
|
|
112
|
-
|
|
113
104
|
[tool.lsst_versions]
|
|
114
105
|
write_to = "python/lsst/pipe/base/version.py"
|
|
115
106
|
|
|
@@ -140,7 +131,7 @@ add-ignore = ["D107", "D105", "D102", "D100", "D200", "D205", "D400", "D104"]
|
|
|
140
131
|
|
|
141
132
|
[tool.ruff]
|
|
142
133
|
line-length = 110
|
|
143
|
-
target-version = "
|
|
134
|
+
target-version = "py312"
|
|
144
135
|
exclude = [
|
|
145
136
|
"__init__.py",
|
|
146
137
|
]
|
|
@@ -204,6 +195,7 @@ checks = [
|
|
|
204
195
|
"RT01", # Unfortunately our @property trigger this.
|
|
205
196
|
"RT02", # Does not want named return value. DM style says we do.
|
|
206
197
|
"SS05", # pydocstyle is better at finding infinitive verb.
|
|
198
|
+
"RT03", # sphinx bug requiring empty Returns.
|
|
207
199
|
]
|
|
208
200
|
exclude = [
|
|
209
201
|
'^__init__$',
|
|
@@ -35,7 +35,7 @@ from abc import ABCMeta, abstractmethod
|
|
|
35
35
|
from collections.abc import Sequence
|
|
36
36
|
from typing import TYPE_CHECKING, Any, Self, cast, final
|
|
37
37
|
|
|
38
|
-
from lsst.daf.butler import DataCoordinate, DataId, DimensionPacker, DimensionRecord, Formatter
|
|
38
|
+
from lsst.daf.butler import DataCoordinate, DataId, DimensionPacker, DimensionRecord, Formatter, FormatterV2
|
|
39
39
|
from lsst.daf.butler.registry import DataIdError
|
|
40
40
|
from lsst.pex.config import Config, RegistryField
|
|
41
41
|
from lsst.resources import ResourcePath, ResourcePathExpression
|
|
@@ -68,7 +68,7 @@ class Instrument(metaclass=ABCMeta):
|
|
|
68
68
|
configPaths: Sequence[ResourcePathExpression] = ()
|
|
69
69
|
"""Paths to config files to read for specific Tasks.
|
|
70
70
|
|
|
71
|
-
The paths in this list should contain files of the form
|
|
71
|
+
The paths in this list should contain files of the form ``task.py``, for
|
|
72
72
|
each of the Tasks that requires special configuration.
|
|
73
73
|
"""
|
|
74
74
|
|
|
@@ -99,7 +99,8 @@ class Instrument(metaclass=ABCMeta):
|
|
|
99
99
|
|
|
100
100
|
@abstractmethod
|
|
101
101
|
def register(self, registry: Registry, *, update: bool = False) -> None:
|
|
102
|
-
"""Insert instrument, and other relevant records into
|
|
102
|
+
"""Insert instrument, and other relevant records into a butler
|
|
103
|
+
registry.
|
|
103
104
|
|
|
104
105
|
Parameters
|
|
105
106
|
----------
|
|
@@ -109,6 +110,10 @@ class Instrument(metaclass=ABCMeta):
|
|
|
109
110
|
If `True` (`False` is default), update existing records if they
|
|
110
111
|
differ from the new ones.
|
|
111
112
|
|
|
113
|
+
Returns
|
|
114
|
+
-------
|
|
115
|
+
None
|
|
116
|
+
|
|
112
117
|
Raises
|
|
113
118
|
------
|
|
114
119
|
lsst.daf.butler.registry.ConflictingDefinitionError
|
|
@@ -127,13 +132,6 @@ class Instrument(metaclass=ABCMeta):
|
|
|
127
132
|
the level of individual dimension entries; new detectors and filters
|
|
128
133
|
should be added, but changes to any existing record should not be.
|
|
129
134
|
This can generally be achieved via a block like
|
|
130
|
-
|
|
131
|
-
.. code-block:: python
|
|
132
|
-
|
|
133
|
-
with registry.transaction():
|
|
134
|
-
registry.syncDimensionData("instrument", ...)
|
|
135
|
-
registry.syncDimensionData("detector", ...)
|
|
136
|
-
self.registerFilters(registry)
|
|
137
135
|
"""
|
|
138
136
|
raise NotImplementedError()
|
|
139
137
|
|
|
@@ -314,7 +312,7 @@ class Instrument(metaclass=ABCMeta):
|
|
|
314
312
|
return instrument_cls(collection_prefix=collection_prefix)
|
|
315
313
|
|
|
316
314
|
@staticmethod
|
|
317
|
-
def importAll(registry: Registry) ->
|
|
315
|
+
def importAll(registry: Registry) -> dict[str, type[Instrument]]:
|
|
318
316
|
"""Import all the instruments known to this registry.
|
|
319
317
|
|
|
320
318
|
This will ensure that all metadata translators have been registered.
|
|
@@ -324,31 +322,43 @@ class Instrument(metaclass=ABCMeta):
|
|
|
324
322
|
registry : `lsst.daf.butler.Registry`
|
|
325
323
|
Butler registry to query to find the information.
|
|
326
324
|
|
|
325
|
+
Returns
|
|
326
|
+
-------
|
|
327
|
+
imported : `dict` [`str`, `type` [`Instrument`]]
|
|
328
|
+
A mapping containing all the instrument classes that were loaded
|
|
329
|
+
successfully, keyed by their butler names.
|
|
330
|
+
|
|
327
331
|
Notes
|
|
328
332
|
-----
|
|
329
333
|
It is allowed for a particular instrument class to fail on import.
|
|
330
334
|
This might simply indicate that a particular obs package has
|
|
331
335
|
not been setup.
|
|
332
336
|
"""
|
|
337
|
+
imported: dict[str, type[Instrument]] = {}
|
|
333
338
|
records = list(registry.queryDimensionRecords("instrument"))
|
|
334
339
|
for record in records:
|
|
335
340
|
cls = record.class_name
|
|
341
|
+
instrument_name: str = cast(str, record.name)
|
|
336
342
|
with contextlib.suppress(Exception):
|
|
337
|
-
doImportType(cls)
|
|
343
|
+
instr = doImportType(cls)
|
|
344
|
+
assert issubclass(instr, Instrument)
|
|
345
|
+
imported[instrument_name] = instr
|
|
346
|
+
return imported
|
|
338
347
|
|
|
339
348
|
@abstractmethod
|
|
340
|
-
def getRawFormatter(self, dataId: DataId) -> type[Formatter]:
|
|
349
|
+
def getRawFormatter(self, dataId: DataId) -> type[Formatter | FormatterV2]:
|
|
341
350
|
"""Return the Formatter class that should be used to read a particular
|
|
342
351
|
raw file.
|
|
343
352
|
|
|
344
353
|
Parameters
|
|
345
354
|
----------
|
|
346
|
-
dataId : `DataId`
|
|
355
|
+
dataId : `lsst.daf.butler.DataId`
|
|
347
356
|
Dimension-based ID for the raw file or files being ingested.
|
|
348
357
|
|
|
349
358
|
Returns
|
|
350
359
|
-------
|
|
351
|
-
formatter : `
|
|
360
|
+
formatter : `type` \
|
|
361
|
+
[`lsst.daf.butler.Formatter` | `lsst.daf.butler.FormatterV2` ]
|
|
352
362
|
Class to be used that reads the file into the correct
|
|
353
363
|
Python object for the raw data.
|
|
354
364
|
"""
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_quantumContext.py
RENAMED
|
@@ -380,8 +380,8 @@ class QuantumContext:
|
|
|
380
380
|
if dataset is directly a `list` of `~lsst.daf.butler.DatasetRef`
|
|
381
381
|
or a single `~lsst.daf.butler.DatasetRef`. If ``values.NAME`` is
|
|
382
382
|
None, no output is written.
|
|
383
|
-
dataset : `OutputQuantizedConnection` or `list`
|
|
384
|
-
or `DatasetRef`
|
|
383
|
+
dataset : `OutputQuantizedConnection` or `list` \
|
|
384
|
+
[`lsst.daf.butler.DatasetRef`] or `lsst.daf.butler.DatasetRef`
|
|
385
385
|
This argument may either be an `InputQuantizedConnection` which
|
|
386
386
|
describes all the inputs of a quantum, a list of
|
|
387
387
|
`lsst.daf.butler.DatasetRef`, or a single
|
|
@@ -460,7 +460,7 @@ class QuantumContext:
|
|
|
460
460
|
|
|
461
461
|
Parameters
|
|
462
462
|
----------
|
|
463
|
-
ref : `DatasetRef`
|
|
463
|
+
ref : `lsst.daf.butler.DatasetRef`
|
|
464
464
|
The dataset to attach provenance to. This dataset must have been
|
|
465
465
|
retrieved by this quantum context.
|
|
466
466
|
extra : `dict` [ `str`, `int` | `float` | `str` | `bool` ]
|
|
@@ -275,15 +275,23 @@ class ExceptionInfo(pydantic.BaseModel):
|
|
|
275
275
|
class QuantumAttemptStatus(enum.Enum):
|
|
276
276
|
"""Enum summarizing an attempt to run a quantum."""
|
|
277
277
|
|
|
278
|
+
ABORTED = -4
|
|
279
|
+
"""The quantum failed with a hard error that prevented both logs and
|
|
280
|
+
metadata from being written.
|
|
281
|
+
|
|
282
|
+
This state is only set if information from higher-level tooling (e.g. BPS)
|
|
283
|
+
is available to distinguish it from ``UNKNOWN``.
|
|
284
|
+
"""
|
|
285
|
+
|
|
278
286
|
UNKNOWN = -3
|
|
279
287
|
"""The status of this attempt is unknown.
|
|
280
288
|
|
|
281
|
-
This
|
|
282
|
-
|
|
283
|
-
|
|
289
|
+
This means no logs or metadata were written, and it at least could not be
|
|
290
|
+
determined whether the quantum was blocked by an upstream failure (if it
|
|
291
|
+
was definitely blocked, `BLOCKED` is set instead).
|
|
284
292
|
"""
|
|
285
293
|
|
|
286
|
-
|
|
294
|
+
ABORTED_SUCCESS = -2
|
|
287
295
|
"""Task metadata was written for this attempt but logs were not.
|
|
288
296
|
|
|
289
297
|
This is a rare condition that requires a hard failure (i.e. the kind that
|
|
@@ -292,20 +300,21 @@ class QuantumAttemptStatus(enum.Enum):
|
|
|
292
300
|
"""
|
|
293
301
|
|
|
294
302
|
FAILED = -1
|
|
295
|
-
"""Execution of the quantum failed.
|
|
303
|
+
"""Execution of the quantum failed gracefully.
|
|
296
304
|
|
|
297
305
|
This is always set if the task metadata dataset was not written but logs
|
|
298
306
|
were, as is the case when a Python exception is caught and handled by the
|
|
299
|
-
execution system.
|
|
300
|
-
|
|
301
|
-
|
|
307
|
+
execution system.
|
|
308
|
+
|
|
309
|
+
This status guarantees that the task log dataset was produced but the
|
|
310
|
+
metadata dataset was not.
|
|
302
311
|
"""
|
|
303
312
|
|
|
304
313
|
BLOCKED = 0
|
|
305
314
|
"""This quantum was not executed because an upstream quantum failed.
|
|
306
315
|
|
|
307
|
-
Upstream quanta with status `UNKNOWN` or `
|
|
308
|
-
`
|
|
316
|
+
Upstream quanta with status `UNKNOWN`, `FAILED`, or `ABORTED` are
|
|
317
|
+
considered blockers; `ABORTED_SUCCESS` is not.
|
|
309
318
|
"""
|
|
310
319
|
|
|
311
320
|
SUCCESSFUL = 1
|
|
@@ -319,6 +328,30 @@ class QuantumAttemptStatus(enum.Enum):
|
|
|
319
328
|
these "successes with caveats" are reported.
|
|
320
329
|
"""
|
|
321
330
|
|
|
331
|
+
@property
|
|
332
|
+
def has_metadata(self) -> bool:
|
|
333
|
+
"""Whether the task metadata dataset was produced."""
|
|
334
|
+
return self is self.SUCCESSFUL or self is self.ABORTED_SUCCESS
|
|
335
|
+
|
|
336
|
+
@property
|
|
337
|
+
def has_log(self) -> bool:
|
|
338
|
+
"""Whether the log dataset was produced."""
|
|
339
|
+
return self is self.SUCCESSFUL or self is self.FAILED
|
|
340
|
+
|
|
341
|
+
@property
|
|
342
|
+
def title(self) -> str:
|
|
343
|
+
"""A version of this status' name suitable for use as a title in a plot
|
|
344
|
+
or table.
|
|
345
|
+
"""
|
|
346
|
+
return self.name.capitalize().replace("_", " ")
|
|
347
|
+
|
|
348
|
+
@property
|
|
349
|
+
def is_rare(self) -> bool:
|
|
350
|
+
"""Whether this status is rare enough that it should only be listed
|
|
351
|
+
when it actually occurs.
|
|
352
|
+
"""
|
|
353
|
+
return self in (self.ABORTED, self.ABORTED_SUCCESS, self.UNKNOWN)
|
|
354
|
+
|
|
322
355
|
|
|
323
356
|
class GetSetDictMetadataHolder(Protocol):
|
|
324
357
|
"""Protocol for objects that have a ``metadata`` attribute that satisfies
|
{lsst_pipe_base-30.0.0rc3 → lsst_pipe_base-30.0.1rc1}/python/lsst/pipe/base/_task_metadata.py
RENAMED
|
@@ -37,7 +37,7 @@ import itertools
|
|
|
37
37
|
import numbers
|
|
38
38
|
import sys
|
|
39
39
|
from collections.abc import Collection, Iterator, Mapping, Sequence
|
|
40
|
-
from typing import Any, Protocol
|
|
40
|
+
from typing import Any, Protocol
|
|
41
41
|
|
|
42
42
|
from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictFloat, StrictInt, StrictStr
|
|
43
43
|
|
|
@@ -47,7 +47,7 @@ _ALLOWED_PRIMITIVE_TYPES = (str, float, int, bool)
|
|
|
47
47
|
|
|
48
48
|
# Note that '|' syntax for unions doesn't work when we have to use a string
|
|
49
49
|
# literal (and we do since it's recursive and not an annotation).
|
|
50
|
-
NestedMetadataDict
|
|
50
|
+
type NestedMetadataDict = Mapping[str, str | float | int | bool | "NestedMetadataDict"]
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
class PropertySetLike(Protocol):
|