lsst-pipe-base 29.2025.3100__tar.gz → 29.2025.3300__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lsst_pipe_base-29.2025.3100/python/lsst_pipe_base.egg-info → lsst_pipe_base-29.2025.3300}/PKG-INFO +1 -1
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/pyproject.toml +1 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/__init__.py +0 -1
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/all_dimensions_quantum_graph_builder.py +4 -42
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/caching_limited_butler.py +8 -4
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/graphSummary.py +4 -4
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/mp_graph_executor.py +21 -9
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py +40 -10
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_tasks.py +106 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/io.py +1 -1
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/quantum_graph_builder.py +42 -16
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/quantum_graph_skeleton.py +60 -1
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/single_quantum_executor.py +10 -11
- lsst_pipe_base-29.2025.3300/python/lsst/pipe/base/tests/in_memory_limited_butler.py +223 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/mocks/__init__.py +1 -0
- lsst_pipe_base-29.2025.3300/python/lsst/pipe/base/tests/mocks/_in_memory_repo.py +357 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/mocks/_pipeline_task.py +19 -2
- lsst_pipe_base-29.2025.3300/python/lsst/pipe/base/version.py +2 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300/python/lsst_pipe_base.egg-info}/PKG-INFO +1 -1
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/SOURCES.txt +4 -3
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_caching_limited_butler.py +10 -12
- lsst_pipe_base-29.2025.3300/tests/test_mp_graph_executor.py +407 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipeline_graph.py +8 -1
- lsst_pipe_base-29.2025.3300/tests/test_single_quantum_executor.py +133 -0
- lsst_pipe_base-29.2025.3100/python/lsst/pipe/base/executionButlerBuilder.py +0 -493
- lsst_pipe_base-29.2025.3100/python/lsst/pipe/base/version.py +0 -2
- lsst_pipe_base-29.2025.3100/tests/test_executionButler.py +0 -69
- lsst_pipe_base-29.2025.3100/tests/test_executors.py +0 -808
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/COPYRIGHT +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/LICENSE +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/MANIFEST.in +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/README.md +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/bsd_license.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/CHANGES.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/creating-a-pipeline.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/creating-a-pipelinetask.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/creating-a-task.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/index.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/task-framework-overview.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/task-retargeting-howto.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/testing-a-pipeline-task.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/testing-pipelines-with-mocks.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/doc/lsst.pipe.base/working-with-pipeline-graphs.rst +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/gpl-v3.0.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_datasetQueryConstraints.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_dataset_handle.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_observation_dimension_packer.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_quantumContext.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_status.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/_task_metadata.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/automatic_connection_constants.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/_get_cli_subcommands.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/cmd/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/cmd/commands.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/opt/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/opt/arguments.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/cli/opt/options.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/config.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/configOverrides.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/connectionTypes.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/connections.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/dot_tools.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/exec_fixup_data_id.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/execution_graph_fixup.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/execution_reports.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/formatters/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/formatters/pexConfig.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/_implDetails.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/_loadHelpers.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/_versionDeserializers.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/graph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/graph/quantumNode.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/log_capture.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/mermaid_tools.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipelineIR.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipelineTask.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/__main__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_dataset_types.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_edges.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_exceptions.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_mapping_views.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_nodes.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/_task_subsets.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/expressions.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_dot.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_formatting.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_layout.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_merge.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_mermaid.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_options.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_printer.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_show.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/prerequisite_helpers.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/py.typed +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/quantum_graph_executor.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/quantum_provenance_graph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/quantum_reports.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/register_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/retrieve_artifacts_for_quanta.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/transfer_from_graph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/utils.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/script/zip_from_graph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/separable_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/simple_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/struct.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/task.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/taskFactory.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/testUtils.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/mocks/_data_id_match.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/mocks/_storage_class.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/no_dimensions.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/pipelineStepTester.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/simpleQGraph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/tests/util.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst/pipe/base/utils.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/dependency_links.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/entry_points.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/requires.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/top_level.txt +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/python/lsst_pipe_base.egg-info/zip-safe +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/setup.cfg +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_adjust_all_quanta.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_cliCmdRegisterInstrument.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_configOverrides.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_config_formatter.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_connections.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_dataid_match.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_dataset_handle.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_dot_tools.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_dynamic_connections.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_execution_reports.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_execution_storage_class_conversion.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_graphBuilder.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_init_output_run.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_mermaid.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipeline.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipelineIR.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipelineLoadSubset.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipelineTask.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_pipeline_graph_expressions.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_qg_builder_dimensions.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_quantumGraph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_quantum_provenance_graph.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_quantum_reports.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_quantum_success_caveats.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_script_utils.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_separable_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_simple_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_struct.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_task.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_task_factory.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_taskmetadata.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_testUtils.py +0 -0
- {lsst_pipe_base-29.2025.3100 → lsst_pipe_base-29.2025.3300}/tests/test_utils.py +0 -0
{lsst_pipe_base-29.2025.3100/python/lsst_pipe_base.egg-info → lsst_pipe_base-29.2025.3300}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-pipe-base
|
|
3
|
-
Version: 29.2025.
|
|
3
|
+
Version: 29.2025.3300
|
|
4
4
|
Summary: Pipeline infrastructure for the Rubin Science Pipelines.
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: BSD 3-Clause License
|
|
@@ -195,6 +195,7 @@ checks = [
|
|
|
195
195
|
"EX01", # Example section.
|
|
196
196
|
"SS06", # Summary can go into second line.
|
|
197
197
|
"GL01", # Summary text can start on same line as """
|
|
198
|
+
"GL03", # Double line breaks (fights with ruff-fmt on examples in docs).
|
|
198
199
|
"GL08", # Do not require docstring.
|
|
199
200
|
"ES01", # No extended summary required.
|
|
200
201
|
"RT01", # Unfortunately our @property trigger this.
|
|
@@ -44,7 +44,6 @@ import astropy.table
|
|
|
44
44
|
from lsst.daf.butler import (
|
|
45
45
|
Butler,
|
|
46
46
|
DataCoordinate,
|
|
47
|
-
DimensionDataAttacher,
|
|
48
47
|
DimensionElement,
|
|
49
48
|
DimensionGroup,
|
|
50
49
|
DimensionRecordSet,
|
|
@@ -57,7 +56,7 @@ from lsst.utils.timer import timeMethod
|
|
|
57
56
|
|
|
58
57
|
from ._datasetQueryConstraints import DatasetQueryConstraintVariant
|
|
59
58
|
from .quantum_graph_builder import QuantumGraphBuilder, QuantumGraphBuilderError
|
|
60
|
-
from .quantum_graph_skeleton import DatasetKey,
|
|
59
|
+
from .quantum_graph_skeleton import DatasetKey, PrerequisiteDatasetKey, QuantumGraphSkeleton, QuantumKey
|
|
61
60
|
|
|
62
61
|
if TYPE_CHECKING:
|
|
63
62
|
from .pipeline_graph import DatasetTypeNode, PipelineGraph, TaskNode
|
|
@@ -143,13 +142,14 @@ class AllDimensionsQuantumGraphBuilder(QuantumGraphBuilder):
|
|
|
143
142
|
self._query_for_data_ids(tree)
|
|
144
143
|
dimension_records = self._fetch_most_dimension_records(tree)
|
|
145
144
|
tree.generate_data_ids(self.log)
|
|
146
|
-
skeleton = self._make_subgraph_skeleton(tree)
|
|
145
|
+
skeleton: QuantumGraphSkeleton = self._make_subgraph_skeleton(tree)
|
|
147
146
|
if not skeleton.has_any_quanta:
|
|
148
147
|
# QG is going to be empty; exit early not just for efficiency, but
|
|
149
148
|
# also so downstream code doesn't have to guard against this case.
|
|
150
149
|
return skeleton
|
|
151
150
|
self._find_followup_datasets(tree, skeleton)
|
|
152
|
-
|
|
151
|
+
all_data_id_dimensions = subgraph.get_all_dimensions()
|
|
152
|
+
skeleton.attach_dimension_records(self.butler, all_data_id_dimensions, dimension_records)
|
|
153
153
|
return skeleton
|
|
154
154
|
|
|
155
155
|
def _query_for_data_ids(self, tree: _DimensionGroupTree) -> None:
|
|
@@ -486,44 +486,6 @@ class AllDimensionsQuantumGraphBuilder(QuantumGraphBuilder):
|
|
|
486
486
|
result.append(record_set)
|
|
487
487
|
return result
|
|
488
488
|
|
|
489
|
-
@timeMethod
|
|
490
|
-
def _attach_dimension_records(
|
|
491
|
-
self, skeleton: QuantumGraphSkeleton, dimension_records: Iterable[DimensionRecordSet]
|
|
492
|
-
) -> None:
|
|
493
|
-
"""Attach dimension records to most data IDs in the in-progress graph,
|
|
494
|
-
and return a data structure that records the rest.
|
|
495
|
-
|
|
496
|
-
Parameters
|
|
497
|
-
----------
|
|
498
|
-
skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
|
|
499
|
-
In-progress quantum graph to modify in place.
|
|
500
|
-
dimension_records : `~collections.abc.Iterable` [ \
|
|
501
|
-
`lsst.daf.butler.DimensionRecordSet` ]
|
|
502
|
-
Iterable of sets of dimension records.
|
|
503
|
-
"""
|
|
504
|
-
# Group all nodes by data ID (and dimensions of data ID).
|
|
505
|
-
data_ids_to_expand: defaultdict[DimensionGroup, defaultdict[DataCoordinate, list[Key]]] = defaultdict(
|
|
506
|
-
lambda: defaultdict(list)
|
|
507
|
-
)
|
|
508
|
-
data_id: DataCoordinate | None
|
|
509
|
-
for node_key in skeleton:
|
|
510
|
-
if data_id := skeleton[node_key].get("data_id"):
|
|
511
|
-
data_ids_to_expand[data_id.dimensions][data_id].append(node_key)
|
|
512
|
-
attacher = DimensionDataAttacher(
|
|
513
|
-
records=dimension_records,
|
|
514
|
-
dimensions=DimensionGroup.union(*data_ids_to_expand.keys(), universe=self.universe),
|
|
515
|
-
)
|
|
516
|
-
for dimensions, data_ids in data_ids_to_expand.items():
|
|
517
|
-
with self.butler.query() as query:
|
|
518
|
-
# Butler query will be used as-needed to get dimension records
|
|
519
|
-
# (from prerequisites) we didn't fetch in advance. These are
|
|
520
|
-
# cached in the attacher so we don't look them up multiple
|
|
521
|
-
# times.
|
|
522
|
-
expanded_data_ids = attacher.attach(dimensions, data_ids.keys(), query=query)
|
|
523
|
-
for expanded_data_id, node_keys in zip(expanded_data_ids, data_ids.values()):
|
|
524
|
-
for node_key in node_keys:
|
|
525
|
-
skeleton.set_data_id(node_key, expanded_data_id)
|
|
526
|
-
|
|
527
489
|
|
|
528
490
|
@dataclasses.dataclass(eq=False, repr=False, slots=True)
|
|
529
491
|
class _DimensionGroupTwig:
|
|
@@ -84,7 +84,6 @@ class CachingLimitedButler(LimitedButler):
|
|
|
84
84
|
no_copy_on_cache: Set[str] = frozenset(),
|
|
85
85
|
):
|
|
86
86
|
self._wrapped = wrapped
|
|
87
|
-
self._datastore = self._wrapped._datastore
|
|
88
87
|
self.storageClasses = self._wrapped.storageClasses
|
|
89
88
|
self._cache_on_put = cache_on_put
|
|
90
89
|
self._cache_on_get = cache_on_get
|
|
@@ -148,9 +147,6 @@ class CachingLimitedButler(LimitedButler):
|
|
|
148
147
|
# note that this does not use the cache at all
|
|
149
148
|
return self._wrapped.getDeferred(ref, parameters=parameters, storageClass=storageClass)
|
|
150
149
|
|
|
151
|
-
def stored(self, ref: DatasetRef) -> bool:
|
|
152
|
-
return self.stored_many([ref])[ref] # TODO: remove this once DM-43086 is done.
|
|
153
|
-
|
|
154
150
|
def stored_many(self, refs: Iterable[DatasetRef]) -> dict[DatasetRef, bool]:
|
|
155
151
|
result = {}
|
|
156
152
|
unknown_refs = []
|
|
@@ -205,3 +201,11 @@ class CachingLimitedButler(LimitedButler):
|
|
|
205
201
|
@property
|
|
206
202
|
def dimensions(self) -> DimensionUniverse:
|
|
207
203
|
return self._wrapped.dimensions
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def _datastore(self) -> Any:
|
|
207
|
+
return self._wrapped._datastore
|
|
208
|
+
|
|
209
|
+
@_datastore.setter # demanded by MyPy since we declare it to be an instance attribute in LimitedButler.
|
|
210
|
+
def _datastore(self, value: Any) -> None:
|
|
211
|
+
self._wrapped._datastore = value
|
|
@@ -39,10 +39,10 @@ class QgraphTaskSummary(pydantic.BaseModel):
|
|
|
39
39
|
numQuanta: int = 0
|
|
40
40
|
"""Number of Quanta for this PipelineTask in this QuantumGraph."""
|
|
41
41
|
|
|
42
|
-
numInputs: dict[str, int] = Counter
|
|
42
|
+
numInputs: dict[str, int] = pydantic.Field(default_factory=Counter)
|
|
43
43
|
"""Total number of inputs per dataset type name for this PipelineTask."""
|
|
44
44
|
|
|
45
|
-
numOutputs: dict[str, int] = Counter
|
|
45
|
+
numOutputs: dict[str, int] = pydantic.Field(default_factory=Counter)
|
|
46
46
|
"""Total number of outputs per dataset type name for this PipelineTask."""
|
|
47
47
|
|
|
48
48
|
# Work around the fact that Sphinx chokes on Pydantic docstring formatting,
|
|
@@ -79,7 +79,7 @@ class QgraphTaskSummary(pydantic.BaseModel):
|
|
|
79
79
|
class QgraphSummary(pydantic.BaseModel):
|
|
80
80
|
"""Report for the QuantumGraph creation or reading."""
|
|
81
81
|
|
|
82
|
-
graphID: BuildId
|
|
82
|
+
graphID: BuildId | None = None
|
|
83
83
|
"""QuantumGraph ID."""
|
|
84
84
|
|
|
85
85
|
cmdLine: str | None = None
|
|
@@ -97,7 +97,7 @@ class QgraphSummary(pydantic.BaseModel):
|
|
|
97
97
|
outputRun: str | None = None
|
|
98
98
|
"""Output run collection."""
|
|
99
99
|
|
|
100
|
-
qgraphTaskSummaries: dict[str, QgraphTaskSummary] =
|
|
100
|
+
qgraphTaskSummaries: dict[str, QgraphTaskSummary] = pydantic.Field(default_factory=dict)
|
|
101
101
|
"""Quanta information summarized per PipelineTask."""
|
|
102
102
|
|
|
103
103
|
# Work around the fact that Sphinx chokes on Pydantic docstring formatting,
|
|
@@ -29,6 +29,7 @@ from __future__ import annotations
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["MPGraphExecutor", "MPGraphExecutorError", "MPTimeoutError"]
|
|
31
31
|
|
|
32
|
+
import enum
|
|
32
33
|
import importlib
|
|
33
34
|
import logging
|
|
34
35
|
import multiprocessing
|
|
@@ -39,7 +40,6 @@ import threading
|
|
|
39
40
|
import time
|
|
40
41
|
import uuid
|
|
41
42
|
from collections.abc import Iterable
|
|
42
|
-
from enum import Enum
|
|
43
43
|
from typing import Literal
|
|
44
44
|
|
|
45
45
|
from lsst.daf.butler.cli.cliLog import CliLog
|
|
@@ -55,14 +55,26 @@ from .quantum_reports import ExecutionStatus, QuantumReport, Report
|
|
|
55
55
|
_LOG = logging.getLogger(__name__)
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
58
|
+
class JobState(enum.Enum):
|
|
59
|
+
"""Possible state for an executing task."""
|
|
60
|
+
|
|
61
|
+
PENDING = enum.auto()
|
|
62
|
+
"""The job has not started yet."""
|
|
63
|
+
|
|
64
|
+
RUNNING = enum.auto()
|
|
65
|
+
"""The job is currently executing."""
|
|
66
|
+
|
|
67
|
+
FINISHED = enum.auto()
|
|
68
|
+
"""The job finished successfully."""
|
|
69
|
+
|
|
70
|
+
FAILED = enum.auto()
|
|
71
|
+
"""The job execution failed (process returned non-zero status)."""
|
|
72
|
+
|
|
73
|
+
TIMED_OUT = enum.auto()
|
|
74
|
+
"""The job was killed due to too long execution time."""
|
|
75
|
+
|
|
76
|
+
FAILED_DEP = enum.auto()
|
|
77
|
+
"""One of the dependencies of this job failed or timed out."""
|
|
66
78
|
|
|
67
79
|
|
|
68
80
|
class _Job:
|
|
@@ -150,8 +150,10 @@ class PipelineGraph:
|
|
|
150
150
|
self._description = value
|
|
151
151
|
|
|
152
152
|
@property
|
|
153
|
-
def universe(self) -> DimensionUniverse
|
|
153
|
+
def universe(self) -> DimensionUniverse:
|
|
154
154
|
"""Definitions for all butler dimensions."""
|
|
155
|
+
if self._universe is None:
|
|
156
|
+
raise UnresolvedGraphError("Pipeline graph is not resolved.")
|
|
155
157
|
return self._universe
|
|
156
158
|
|
|
157
159
|
@property
|
|
@@ -159,7 +161,7 @@ class PipelineGraph:
|
|
|
159
161
|
"""Data ID that represents a constraint on all quanta generated from
|
|
160
162
|
this pipeline.
|
|
161
163
|
|
|
162
|
-
This is may not be available unless
|
|
164
|
+
This is may not be available unless the graph is resolved.
|
|
163
165
|
"""
|
|
164
166
|
return DataCoordinate.standardize(self._raw_data_id, universe=self.universe)
|
|
165
167
|
|
|
@@ -305,7 +307,7 @@ class PipelineGraph:
|
|
|
305
307
|
for k, v in self._task_subsets.items()
|
|
306
308
|
},
|
|
307
309
|
description=self._description,
|
|
308
|
-
universe=self.
|
|
310
|
+
universe=self._universe,
|
|
309
311
|
data_id=self._raw_data_id,
|
|
310
312
|
step_definitions=step_definitions,
|
|
311
313
|
)
|
|
@@ -774,7 +776,7 @@ class PipelineGraph:
|
|
|
774
776
|
key=NodeKey(NodeType.TASK, label),
|
|
775
777
|
init_key=NodeKey(NodeType.TASK_INIT, label),
|
|
776
778
|
data=_TaskNodeImportedData.configure(label, task_class, config, connections),
|
|
777
|
-
universe=self.
|
|
779
|
+
universe=self._universe,
|
|
778
780
|
)
|
|
779
781
|
self.add_task_nodes([task_node])
|
|
780
782
|
return task_node
|
|
@@ -1571,9 +1573,9 @@ class PipelineGraph:
|
|
|
1571
1573
|
|
|
1572
1574
|
Returns
|
|
1573
1575
|
-------
|
|
1574
|
-
groups : `dict` [
|
|
1575
|
-
A dictionary of groups keyed by
|
|
1576
|
-
value is a tuple of:
|
|
1576
|
+
groups : `dict` [ `~lsst.daf.butler.DimensionGroup`, `tuple` ]
|
|
1577
|
+
A dictionary of groups keyed by `~lsst.daf.butler.DimensionGroup`,
|
|
1578
|
+
in which each value is a tuple of:
|
|
1577
1579
|
|
|
1578
1580
|
- a `dict` of `TaskNode` instances, keyed by task label
|
|
1579
1581
|
- a `dict` of `DatasetTypeNode` instances, keyed by
|
|
@@ -1605,6 +1607,25 @@ class PipelineGraph:
|
|
|
1605
1607
|
group[1][dataset_type_node.name] = dataset_type_node
|
|
1606
1608
|
return result
|
|
1607
1609
|
|
|
1610
|
+
def get_all_dimensions(self, prerequisites: bool = True) -> DimensionGroup:
|
|
1611
|
+
"""Return all dimensions used in this graph's tasks and dataset types.
|
|
1612
|
+
|
|
1613
|
+
Parameters
|
|
1614
|
+
----------
|
|
1615
|
+
prerequisites : `bool`, optional
|
|
1616
|
+
If `False`, do not include the dimensions that are only used by
|
|
1617
|
+
prerequisite input dataset types.
|
|
1618
|
+
|
|
1619
|
+
Returns
|
|
1620
|
+
-------
|
|
1621
|
+
dimensions : `~lsst.daf.butler.DimensionGroup`.
|
|
1622
|
+
All dimensions in this pipeline.
|
|
1623
|
+
"""
|
|
1624
|
+
return DimensionGroup.union(
|
|
1625
|
+
*self.group_by_dimensions(prerequisites=prerequisites).keys(),
|
|
1626
|
+
universe=self.universe,
|
|
1627
|
+
)
|
|
1628
|
+
|
|
1608
1629
|
def split_independent(self) -> Iterable[PipelineGraph]:
|
|
1609
1630
|
"""Iterate over independent subgraphs that together comprise this
|
|
1610
1631
|
pipeline graph.
|
|
@@ -1668,11 +1689,13 @@ class PipelineGraph:
|
|
|
1668
1689
|
not considered part of the pipeline graph in other respects, but it
|
|
1669
1690
|
does get written with other provenance datasets.
|
|
1670
1691
|
"""
|
|
1671
|
-
if self.
|
|
1692
|
+
if self._universe is None:
|
|
1672
1693
|
raise UnresolvedGraphError(
|
|
1673
1694
|
"PipelineGraph must be resolved in order to get the packages dataset type."
|
|
1674
1695
|
)
|
|
1675
|
-
return DatasetType(
|
|
1696
|
+
return DatasetType(
|
|
1697
|
+
PACKAGES_INIT_OUTPUT_NAME, self._universe.empty, PACKAGES_INIT_OUTPUT_STORAGE_CLASS
|
|
1698
|
+
)
|
|
1676
1699
|
|
|
1677
1700
|
def register_dataset_types(self, butler: Butler, include_packages: bool = True) -> None:
|
|
1678
1701
|
"""Register all dataset types in a data repository.
|
|
@@ -1767,6 +1790,7 @@ class PipelineGraph:
|
|
|
1767
1790
|
self,
|
|
1768
1791
|
get_init_input: Callable[[DatasetType], Any] | None = None,
|
|
1769
1792
|
init_outputs: list[tuple[Any, DatasetType]] | None = None,
|
|
1793
|
+
labels: Iterable[str] | None = None,
|
|
1770
1794
|
) -> list[PipelineTask]:
|
|
1771
1795
|
"""Instantiate all tasks in the pipeline.
|
|
1772
1796
|
|
|
@@ -1785,6 +1809,9 @@ class PipelineGraph:
|
|
|
1785
1809
|
correspond to the storage class of the output connection, which
|
|
1786
1810
|
may not be the same as the storage class on the graph's dataset
|
|
1787
1811
|
type node.
|
|
1812
|
+
labels : `~collections.abc.Iterable` [ `str` ], optional
|
|
1813
|
+
The labels of tasks to instantiate. If not provided, all tasks in
|
|
1814
|
+
the graph will be instantiated.
|
|
1788
1815
|
|
|
1789
1816
|
Returns
|
|
1790
1817
|
-------
|
|
@@ -1793,10 +1820,13 @@ class PipelineGraph:
|
|
|
1793
1820
|
"""
|
|
1794
1821
|
if not self.is_fully_resolved:
|
|
1795
1822
|
raise UnresolvedGraphError("Pipeline graph must be fully resolved before instantiating tasks.")
|
|
1796
|
-
empty_data_id = DataCoordinate.make_empty(
|
|
1823
|
+
empty_data_id = DataCoordinate.make_empty(self.universe)
|
|
1824
|
+
labels = set(labels) if labels is not None else self.tasks.keys()
|
|
1797
1825
|
handles: dict[str, InMemoryDatasetHandle] = {}
|
|
1798
1826
|
tasks: list[PipelineTask] = []
|
|
1799
1827
|
for task_node in self.tasks.values():
|
|
1828
|
+
if task_node.label not in labels:
|
|
1829
|
+
continue
|
|
1800
1830
|
task_init_inputs: dict[str, Any] = {}
|
|
1801
1831
|
for read_edge in task_node.init.inputs.values():
|
|
1802
1832
|
if (handle := handles.get(read_edge.dataset_type_name)) is not None:
|
|
@@ -360,6 +360,57 @@ class TaskInitNode:
|
|
|
360
360
|
yield from self.outputs.values()
|
|
361
361
|
yield self.config_output
|
|
362
362
|
|
|
363
|
+
def get_input_edge(self, connection_name: str) -> ReadEdge:
|
|
364
|
+
"""Look up an input edge by connection name.
|
|
365
|
+
|
|
366
|
+
Parameters
|
|
367
|
+
----------
|
|
368
|
+
connection_name : `str`
|
|
369
|
+
Name of the connection.
|
|
370
|
+
|
|
371
|
+
Returns
|
|
372
|
+
-------
|
|
373
|
+
edge : `ReadEdge`
|
|
374
|
+
Input edge.
|
|
375
|
+
"""
|
|
376
|
+
return self.inputs[connection_name]
|
|
377
|
+
|
|
378
|
+
def get_output_edge(self, connection_name: str) -> WriteEdge:
|
|
379
|
+
"""Look up an output edge by connection name.
|
|
380
|
+
|
|
381
|
+
Parameters
|
|
382
|
+
----------
|
|
383
|
+
connection_name : `str`
|
|
384
|
+
Name of the connection.
|
|
385
|
+
|
|
386
|
+
Returns
|
|
387
|
+
-------
|
|
388
|
+
edge : `WriteEdge`
|
|
389
|
+
Output edge.
|
|
390
|
+
"""
|
|
391
|
+
if connection_name == acc.CONFIG_INIT_OUTPUT_CONNECTION_NAME:
|
|
392
|
+
return self.config_output
|
|
393
|
+
return self.outputs[connection_name]
|
|
394
|
+
|
|
395
|
+
def get_edge(self, connection_name: str) -> Edge:
|
|
396
|
+
"""Look up an edge by connection name.
|
|
397
|
+
|
|
398
|
+
Parameters
|
|
399
|
+
----------
|
|
400
|
+
connection_name : `str`
|
|
401
|
+
Name of the connection.
|
|
402
|
+
|
|
403
|
+
Returns
|
|
404
|
+
-------
|
|
405
|
+
edge : `Edge`
|
|
406
|
+
Edge.
|
|
407
|
+
"""
|
|
408
|
+
try:
|
|
409
|
+
return self.get_input_edge(connection_name)
|
|
410
|
+
except KeyError:
|
|
411
|
+
pass
|
|
412
|
+
return self.get_output_edge(connection_name)
|
|
413
|
+
|
|
363
414
|
def diff_edges(self, other: TaskInitNode) -> list[str]:
|
|
364
415
|
"""Compare the edges of this task initialization node to those from the
|
|
365
416
|
same task label in a different pipeline.
|
|
@@ -742,6 +793,61 @@ class TaskNode:
|
|
|
742
793
|
if self.log_output is not None:
|
|
743
794
|
yield self.log_output
|
|
744
795
|
|
|
796
|
+
def get_input_edge(self, connection_name: str) -> ReadEdge:
|
|
797
|
+
"""Look up an input edge by connection name.
|
|
798
|
+
|
|
799
|
+
Parameters
|
|
800
|
+
----------
|
|
801
|
+
connection_name : `str`
|
|
802
|
+
Name of the connection.
|
|
803
|
+
|
|
804
|
+
Returns
|
|
805
|
+
-------
|
|
806
|
+
edge : `ReadEdge`
|
|
807
|
+
Input edge.
|
|
808
|
+
"""
|
|
809
|
+
return self.inputs[connection_name]
|
|
810
|
+
|
|
811
|
+
def get_output_edge(self, connection_name: str) -> WriteEdge:
|
|
812
|
+
"""Look up an output edge by connection name.
|
|
813
|
+
|
|
814
|
+
Parameters
|
|
815
|
+
----------
|
|
816
|
+
connection_name : `str`
|
|
817
|
+
Name of the connection.
|
|
818
|
+
|
|
819
|
+
Returns
|
|
820
|
+
-------
|
|
821
|
+
edge : `WriteEdge`
|
|
822
|
+
Output edge.
|
|
823
|
+
"""
|
|
824
|
+
if connection_name == acc.METADATA_OUTPUT_CONNECTION_NAME:
|
|
825
|
+
return self.metadata_output
|
|
826
|
+
if connection_name == acc.LOG_OUTPUT_CONNECTION_NAME:
|
|
827
|
+
if self.log_output is None:
|
|
828
|
+
raise KeyError(connection_name)
|
|
829
|
+
return self.log_output
|
|
830
|
+
return self.outputs[connection_name]
|
|
831
|
+
|
|
832
|
+
def get_edge(self, connection_name: str) -> Edge:
|
|
833
|
+
"""Look up an edge by connection name.
|
|
834
|
+
|
|
835
|
+
Parameters
|
|
836
|
+
----------
|
|
837
|
+
connection_name : `str`
|
|
838
|
+
Name of the connection.
|
|
839
|
+
|
|
840
|
+
Returns
|
|
841
|
+
-------
|
|
842
|
+
edge : `Edge`
|
|
843
|
+
Edge.
|
|
844
|
+
"""
|
|
845
|
+
try:
|
|
846
|
+
return self.get_input_edge(connection_name)
|
|
847
|
+
except KeyError:
|
|
848
|
+
pass
|
|
849
|
+
return self.get_output_edge(connection_name)
|
|
850
|
+
|
|
745
851
|
def diff_edges(self, other: TaskNode) -> list[str]:
|
|
746
852
|
"""Compare the edges of this task node to those from the same task
|
|
747
853
|
label in a different pipeline.
|
|
@@ -713,7 +713,7 @@ class SerializedPipelineGraph(pydantic.BaseModel):
|
|
|
713
713
|
},
|
|
714
714
|
step_labels=list(target.steps),
|
|
715
715
|
steps_verified=target.steps.verified,
|
|
716
|
-
dimensions=target.
|
|
716
|
+
dimensions=target._universe.dimensionConfig.toDict() if target._universe is not None else None,
|
|
717
717
|
data_id=target._raw_data_id,
|
|
718
718
|
)
|
|
719
719
|
if target._sorted_keys:
|
|
@@ -55,6 +55,7 @@ from lsst.daf.butler import (
|
|
|
55
55
|
NamedKeyMapping,
|
|
56
56
|
Quantum,
|
|
57
57
|
)
|
|
58
|
+
from lsst.daf.butler.datastore.record_data import DatastoreRecordData
|
|
58
59
|
from lsst.daf.butler.registry import MissingCollectionError, MissingDatasetTypeError
|
|
59
60
|
from lsst.utils.logging import LsstLogAdapter, getLogger
|
|
60
61
|
from lsst.utils.timer import timeMethod
|
|
@@ -1000,19 +1001,35 @@ class QuantumGraphBuilder(ABC):
|
|
|
1000
1001
|
"""
|
|
1001
1002
|
overall_inputs = skeleton.extract_overall_inputs()
|
|
1002
1003
|
exported_records = self.butler._datastore.export_records(overall_inputs.values())
|
|
1003
|
-
for
|
|
1004
|
-
|
|
1005
|
-
|
|
1004
|
+
for task_label in self._pipeline_graph.tasks:
|
|
1005
|
+
if not skeleton.has_task(task_label):
|
|
1006
|
+
continue
|
|
1007
|
+
task_init_key = skeleton.get_task_init_node(task_label)
|
|
1008
|
+
init_input_ids = {
|
|
1006
1009
|
ref.id
|
|
1007
|
-
for dataset_key in skeleton.iter_inputs_of(
|
|
1010
|
+
for dataset_key in skeleton.iter_inputs_of(task_init_key)
|
|
1008
1011
|
if (ref := overall_inputs.get(dataset_key)) is not None
|
|
1009
1012
|
}
|
|
1010
|
-
|
|
1013
|
+
init_records = {}
|
|
1014
|
+
if init_input_ids:
|
|
1011
1015
|
for datastore_name, records in exported_records.items():
|
|
1012
|
-
matching_records = records.subset(
|
|
1016
|
+
matching_records = records.subset(init_input_ids)
|
|
1013
1017
|
if matching_records is not None:
|
|
1014
|
-
|
|
1015
|
-
skeleton[
|
|
1018
|
+
init_records[datastore_name] = matching_records
|
|
1019
|
+
skeleton[task_init_key]["datastore_records"] = init_records
|
|
1020
|
+
for quantum_key in skeleton.get_quanta(task_label):
|
|
1021
|
+
quantum_records = {}
|
|
1022
|
+
input_ids = {
|
|
1023
|
+
ref.id
|
|
1024
|
+
for dataset_key in skeleton.iter_inputs_of(quantum_key)
|
|
1025
|
+
if (ref := overall_inputs.get(dataset_key)) is not None
|
|
1026
|
+
}
|
|
1027
|
+
if input_ids:
|
|
1028
|
+
for datastore_name, records in exported_records.items():
|
|
1029
|
+
matching_records = records.subset(input_ids)
|
|
1030
|
+
if matching_records is not None:
|
|
1031
|
+
quantum_records[datastore_name] = matching_records
|
|
1032
|
+
skeleton[quantum_key]["datastore_records"] = quantum_records
|
|
1016
1033
|
|
|
1017
1034
|
@final
|
|
1018
1035
|
@timeMethod
|
|
@@ -1045,20 +1062,29 @@ class QuantumGraphBuilder(ABC):
|
|
|
1045
1062
|
continue
|
|
1046
1063
|
task_node = self._pipeline_graph.tasks[task_def.label]
|
|
1047
1064
|
task_init_key = skeleton.get_task_init_node(task_def.label)
|
|
1048
|
-
|
|
1049
|
-
|
|
1065
|
+
task_init_state = skeleton[task_init_key]
|
|
1066
|
+
init_datastore_records: dict[str, DatastoreRecordData] = task_init_state.get(
|
|
1067
|
+
"datastore_records", {}
|
|
1068
|
+
)
|
|
1069
|
+
init_inputs[task_def] = task_init_state["inputs"].values()
|
|
1070
|
+
init_outputs[task_def] = task_init_state["outputs"].values()
|
|
1050
1071
|
quanta_for_task: set[Quantum] = set()
|
|
1051
1072
|
for quantum_key in skeleton.get_quanta(task_node.label):
|
|
1052
|
-
|
|
1073
|
+
quantum_state = skeleton[quantum_key]
|
|
1074
|
+
quantum_datastore_records: dict[str, DatastoreRecordData] = quantum_state.get(
|
|
1075
|
+
"datastore_records", {}
|
|
1076
|
+
)
|
|
1053
1077
|
quanta_for_task.add(
|
|
1054
1078
|
Quantum(
|
|
1055
1079
|
taskName=task_node.task_class_name,
|
|
1056
1080
|
taskClass=task_node.task_class,
|
|
1057
|
-
dataId=
|
|
1058
|
-
initInputs=
|
|
1059
|
-
inputs=
|
|
1060
|
-
outputs=
|
|
1061
|
-
datastore_records=
|
|
1081
|
+
dataId=quantum_state["data_id"],
|
|
1082
|
+
initInputs=quantum_state["init_inputs"],
|
|
1083
|
+
inputs=quantum_state["inputs"],
|
|
1084
|
+
outputs=quantum_state["outputs"],
|
|
1085
|
+
datastore_records=DatastoreRecordData.merge_mappings(
|
|
1086
|
+
quantum_datastore_records, init_datastore_records
|
|
1087
|
+
),
|
|
1062
1088
|
)
|
|
1063
1089
|
)
|
|
1064
1090
|
quanta[task_def] = quanta_for_task
|
|
@@ -40,12 +40,21 @@ __all__ = (
|
|
|
40
40
|
)
|
|
41
41
|
|
|
42
42
|
import dataclasses
|
|
43
|
+
from collections import defaultdict
|
|
43
44
|
from collections.abc import Iterable, Iterator, MutableMapping, Set
|
|
44
45
|
from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias
|
|
45
46
|
|
|
46
47
|
import networkx
|
|
47
48
|
|
|
48
|
-
from lsst.daf.butler import
|
|
49
|
+
from lsst.daf.butler import (
|
|
50
|
+
Butler,
|
|
51
|
+
DataCoordinate,
|
|
52
|
+
DataIdValue,
|
|
53
|
+
DatasetRef,
|
|
54
|
+
DimensionDataAttacher,
|
|
55
|
+
DimensionGroup,
|
|
56
|
+
DimensionRecordSet,
|
|
57
|
+
)
|
|
49
58
|
from lsst.utils.logging import getLogger
|
|
50
59
|
|
|
51
60
|
if TYPE_CHECKING:
|
|
@@ -170,6 +179,7 @@ class QuantumGraphSkeleton:
|
|
|
170
179
|
self._tasks: dict[str, tuple[TaskInitKey, set[QuantumKey]]] = {}
|
|
171
180
|
self._xgraph: networkx.DiGraph = networkx.DiGraph()
|
|
172
181
|
self._global_init_outputs: set[DatasetKey] = set()
|
|
182
|
+
self._dimension_data: dict[str, DimensionRecordSet] = {}
|
|
173
183
|
for task_label in task_labels:
|
|
174
184
|
task_init_key = TaskInitKey(task_label)
|
|
175
185
|
self._tasks[task_label] = (task_init_key, set())
|
|
@@ -310,6 +320,10 @@ class QuantumGraphSkeleton:
|
|
|
310
320
|
for task_label, (_, quanta) in other._tasks.items():
|
|
311
321
|
self._tasks[task_label][1].update(quanta)
|
|
312
322
|
self._xgraph.update(other._xgraph)
|
|
323
|
+
for record_set in other._dimension_data.values():
|
|
324
|
+
self._dimension_data.setdefault(
|
|
325
|
+
record_set.element.name, DimensionRecordSet(record_set.element)
|
|
326
|
+
).update(record_set)
|
|
313
327
|
|
|
314
328
|
def add_quantum_node(self, task_label: str, data_id: DataCoordinate, **attrs: Any) -> QuantumKey:
|
|
315
329
|
"""Add a new node representing a quantum.
|
|
@@ -710,3 +724,48 @@ class QuantumGraphSkeleton:
|
|
|
710
724
|
Raised if this node does not have an expanded data ID.
|
|
711
725
|
"""
|
|
712
726
|
return self._xgraph.nodes[key]["data_id"]
|
|
727
|
+
|
|
728
|
+
def attach_dimension_records(
|
|
729
|
+
self, butler: Butler, dimensions: DimensionGroup, dimension_records: Iterable[DimensionRecordSet]
|
|
730
|
+
) -> None:
|
|
731
|
+
"""Attach dimension records to the data IDs in the skeleton.
|
|
732
|
+
|
|
733
|
+
Parameters
|
|
734
|
+
----------
|
|
735
|
+
butler : `lsst.daf.butler.Butler`
|
|
736
|
+
Butler to use to query for missing dimension records.
|
|
737
|
+
dimensions : `lsst.daf.butler.DimensionGroup`
|
|
738
|
+
Superset of all of the dimensions of all data IDs.
|
|
739
|
+
dimension_records : `~collections.abc.Iterable` [ \
|
|
740
|
+
`lsst.daf.butler.DimensionRecordSet` ]
|
|
741
|
+
Iterable of sets of dimension records to attach.
|
|
742
|
+
"""
|
|
743
|
+
for record_set in dimension_records:
|
|
744
|
+
self._dimension_data.setdefault(
|
|
745
|
+
record_set.element.name, DimensionRecordSet(record_set.element)
|
|
746
|
+
).update(record_set)
|
|
747
|
+
# Group all nodes by data ID (and dimensions of data ID).
|
|
748
|
+
data_ids_to_expand: defaultdict[DimensionGroup, defaultdict[DataCoordinate, list[Key]]] = defaultdict(
|
|
749
|
+
lambda: defaultdict(list)
|
|
750
|
+
)
|
|
751
|
+
data_id: DataCoordinate | None
|
|
752
|
+
for node_key in self:
|
|
753
|
+
if data_id := self[node_key].get("data_id"):
|
|
754
|
+
data_ids_to_expand[data_id.dimensions][data_id].append(node_key)
|
|
755
|
+
attacher = DimensionDataAttacher(records=self._dimension_data.values(), dimensions=dimensions)
|
|
756
|
+
for dimensions, data_ids in data_ids_to_expand.items():
|
|
757
|
+
with butler.query() as query:
|
|
758
|
+
# Butler query will be used as-needed to get dimension records
|
|
759
|
+
# (from prerequisites) we didn't fetch in advance. These are
|
|
760
|
+
# cached in the attacher so we don't look them up multiple
|
|
761
|
+
# times.
|
|
762
|
+
expanded_data_ids = attacher.attach(dimensions, data_ids.keys(), query=query)
|
|
763
|
+
for expanded_data_id, node_keys in zip(expanded_data_ids, data_ids.values()):
|
|
764
|
+
for node_key in node_keys:
|
|
765
|
+
self.set_data_id(node_key, expanded_data_id)
|
|
766
|
+
# Hold on to any records that we had to query for.
|
|
767
|
+
self._dimension_data = attacher.records
|
|
768
|
+
|
|
769
|
+
def get_dimension_data(self) -> list[DimensionRecordSet]:
|
|
770
|
+
"""Return the dimension records attached to data IDs."""
|
|
771
|
+
return list(self._dimension_data.values())
|