lsst-pipe-base 29.2025.3400__tar.gz → 29.2025.3500__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lsst_pipe_base-29.2025.3400/python/lsst_pipe_base.egg-info → lsst_pipe_base-29.2025.3500}/PKG-INFO +1 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/dot_tools.py +14 -99
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/graph.py +4 -4
- lsst_pipe_base-29.2025.3500/python/lsst/pipe/base/mermaid_tools.py +213 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/mp_graph_executor.py +10 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_edges.py +17 -3
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_nodes.py +30 -3
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_tasks.py +3 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_dot.py +16 -6
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/quantum_graph_builder.py +4 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/quantum_graph_skeleton.py +23 -4
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/quantum_reports.py +16 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/simple_pipeline_executor.py +2 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/single_quantum_executor.py +1 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/mocks/_in_memory_repo.py +1 -1
- lsst_pipe_base-29.2025.3500/python/lsst/pipe/base/version.py +2 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500/python/lsst_pipe_base.egg-info}/PKG-INFO +1 -1
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_dot_tools.py +4 -4
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_mermaid.py +7 -24
- lsst_pipe_base-29.2025.3400/python/lsst/pipe/base/mermaid_tools.py +0 -494
- lsst_pipe_base-29.2025.3400/python/lsst/pipe/base/version.py +0 -2
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/COPYRIGHT +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/LICENSE +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/MANIFEST.in +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/README.md +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/bsd_license.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/CHANGES.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/creating-a-pipeline.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/creating-a-pipelinetask.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/creating-a-task.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/index.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/task-framework-overview.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/task-retargeting-howto.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/testing-a-pipeline-task.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/testing-pipelines-with-mocks.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/doc/lsst.pipe.base/working-with-pipeline-graphs.rst +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/gpl-v3.0.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/pyproject.toml +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_datasetQueryConstraints.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_dataset_handle.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_observation_dimension_packer.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_quantumContext.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_status.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/_task_metadata.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/all_dimensions_quantum_graph_builder.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/automatic_connection_constants.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/caching_limited_butler.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/_get_cli_subcommands.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/cmd/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/cmd/commands.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/opt/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/opt/arguments.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/cli/opt/options.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/config.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/configOverrides.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/connectionTypes.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/connections.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/exec_fixup_data_id.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/execution_graph_fixup.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/execution_reports.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/formatters/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/formatters/pexConfig.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/_implDetails.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/_loadHelpers.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/_versionDeserializers.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/graphSummary.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/quantumNode.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/log_capture.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipelineIR.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipelineTask.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/__main__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_dataset_types.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_exceptions.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_mapping_views.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/_task_subsets.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/expressions.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/io.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_formatting.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_layout.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_merge.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_mermaid.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_options.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_printer.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_show.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/prerequisite_helpers.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/py.typed +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/quantum_graph_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/quantum_provenance_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/register_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/retrieve_artifacts_for_quanta.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/transfer_from_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/utils.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/script/zip_from_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/separable_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/struct.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/task.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/taskFactory.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/testUtils.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/in_memory_limited_butler.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/mocks/__init__.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/mocks/_data_id_match.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/mocks/_pipeline_task.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/mocks/_storage_class.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/no_dimensions.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/pipelineStepTester.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/simpleQGraph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/tests/util.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/utils.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/SOURCES.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/dependency_links.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/entry_points.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/requires.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/top_level.txt +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst_pipe_base.egg-info/zip-safe +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/setup.cfg +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_adjust_all_quanta.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_caching_limited_butler.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_cliCmdRegisterInstrument.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_configOverrides.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_config_formatter.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_connections.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_dataid_match.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_dataset_handle.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_dynamic_connections.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_execution_reports.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_execution_storage_class_conversion.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_graphBuilder.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_init_output_run.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_instrument.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_mp_graph_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipeline.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipelineIR.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipelineLoadSubset.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipelineTask.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipeline_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_pipeline_graph_expressions.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_qg_builder_dimensions.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_quantumGraph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_quantum_provenance_graph.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_quantum_reports.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_quantum_success_caveats.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_script_utils.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_separable_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_simple_pipeline_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_single_quantum_executor.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_struct.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_task.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_task_factory.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_taskmetadata.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_testUtils.py +0 -0
- {lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/tests/test_utils.py +0 -0
{lsst_pipe_base-29.2025.3400/python/lsst_pipe_base.egg-info → lsst_pipe_base-29.2025.3500}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-pipe-base
|
|
3
|
-
Version: 29.2025.
|
|
3
|
+
Version: 29.2025.3500
|
|
4
4
|
Summary: Pipeline infrastructure for the Rubin Science Pipelines.
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: BSD 3-Clause License
|
{lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/dot_tools.py
RENAMED
|
@@ -38,17 +38,12 @@ __all__ = ["graph2dot", "pipeline2dot"]
|
|
|
38
38
|
# -------------------------------
|
|
39
39
|
import html
|
|
40
40
|
import io
|
|
41
|
-
import re
|
|
42
41
|
from collections.abc import Iterable
|
|
43
42
|
from typing import TYPE_CHECKING, Any
|
|
44
43
|
|
|
45
44
|
# -----------------------------
|
|
46
45
|
# Imports for other modules --
|
|
47
46
|
# -----------------------------
|
|
48
|
-
from lsst.daf.butler import DatasetType, DimensionUniverse
|
|
49
|
-
|
|
50
|
-
from . import connectionTypes
|
|
51
|
-
from .connections import iterConnections
|
|
52
47
|
from .pipeline import Pipeline
|
|
53
48
|
|
|
54
49
|
if TYPE_CHECKING:
|
|
@@ -234,7 +229,7 @@ def pipeline2dot(pipeline: Pipeline | Iterable[TaskDef], file: Any) -> None:
|
|
|
234
229
|
|
|
235
230
|
Parameters
|
|
236
231
|
----------
|
|
237
|
-
pipeline : `
|
|
232
|
+
pipeline : `.Pipeline` or `~collections.abc.Iterable` [ `.TaskDef` ]
|
|
238
233
|
Pipeline description.
|
|
239
234
|
file : `str` or file object
|
|
240
235
|
File where GraphViz graph (DOT language) is written, can be a file name
|
|
@@ -247,30 +242,7 @@ def pipeline2dot(pipeline: Pipeline | Iterable[TaskDef], file: Any) -> None:
|
|
|
247
242
|
ImportError
|
|
248
243
|
Raised if the task class cannot be imported.
|
|
249
244
|
"""
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
def expand_dimensions(connection: connectionTypes.BaseConnection) -> list[str]:
|
|
253
|
-
"""Return expanded list of dimensions, with special skypix treatment.
|
|
254
|
-
|
|
255
|
-
Parameters
|
|
256
|
-
----------
|
|
257
|
-
connection : `list` [`str`]
|
|
258
|
-
Connection to examine.
|
|
259
|
-
|
|
260
|
-
Returns
|
|
261
|
-
-------
|
|
262
|
-
dimensions : `list` [`str`]
|
|
263
|
-
Expanded list of dimensions.
|
|
264
|
-
"""
|
|
265
|
-
dimension_set = set()
|
|
266
|
-
if isinstance(connection, connectionTypes.DimensionedConnection):
|
|
267
|
-
dimension_set = set(connection.dimensions)
|
|
268
|
-
skypix_dim = []
|
|
269
|
-
if "skypix" in dimension_set:
|
|
270
|
-
dimension_set.remove("skypix")
|
|
271
|
-
skypix_dim = ["skypix"]
|
|
272
|
-
dimensions = universe.conform(dimension_set)
|
|
273
|
-
return list(dimensions.names) + skypix_dim
|
|
245
|
+
from .pipeline_graph import PipelineGraph, visualization
|
|
274
246
|
|
|
275
247
|
# open a file if needed
|
|
276
248
|
close = False
|
|
@@ -278,76 +250,19 @@ def pipeline2dot(pipeline: Pipeline | Iterable[TaskDef], file: Any) -> None:
|
|
|
278
250
|
file = open(file, "w")
|
|
279
251
|
close = True
|
|
280
252
|
|
|
281
|
-
print("digraph Pipeline {", file=file)
|
|
282
|
-
_renderDefault("graph", _ATTRIBS["defaultGraph"], file)
|
|
283
|
-
_renderDefault("node", _ATTRIBS["defaultNode"], file)
|
|
284
|
-
_renderDefault("edge", _ATTRIBS["defaultEdge"], file)
|
|
285
|
-
|
|
286
|
-
allDatasets: set[str | tuple[str, str]] = set()
|
|
287
253
|
if isinstance(pipeline, Pipeline):
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
# next line is workaround until DM-29658
|
|
302
|
-
labelToTaskName[taskDef.label] = taskNodeName
|
|
303
|
-
|
|
304
|
-
_renderTaskNode(taskNodeName, taskDef, file, None)
|
|
305
|
-
|
|
306
|
-
metadataRePattern = re.compile("^(.*)_metadata$")
|
|
307
|
-
for attr in sorted(iterConnections(taskDef.connections, "inputs"), key=lambda x: x.name):
|
|
308
|
-
if attr.name not in allDatasets:
|
|
309
|
-
dimensions = expand_dimensions(attr)
|
|
310
|
-
_renderDSTypeNode(attr.name, dimensions, file)
|
|
311
|
-
allDatasets.add(attr.name)
|
|
312
|
-
nodeName, component = DatasetType.splitDatasetTypeName(attr.name)
|
|
313
|
-
_renderEdge(attr.name, taskNodeName, file)
|
|
314
|
-
# connect component dataset types to the composite type that
|
|
315
|
-
# produced it
|
|
316
|
-
if component is not None and (nodeName, attr.name) not in allDatasets:
|
|
317
|
-
_renderEdge(nodeName, attr.name, file)
|
|
318
|
-
allDatasets.add((nodeName, attr.name))
|
|
319
|
-
if nodeName not in allDatasets:
|
|
320
|
-
dimensions = expand_dimensions(attr)
|
|
321
|
-
_renderDSTypeNode(nodeName, dimensions, file)
|
|
322
|
-
# The next if block is a workaround until DM-29658 at which time
|
|
323
|
-
# metadata connections should start working with the above code
|
|
324
|
-
if (match := metadataRePattern.match(attr.name)) is not None:
|
|
325
|
-
matchTaskLabel = match.group(1)
|
|
326
|
-
metadataNodesToLink.add((matchTaskLabel, attr.name))
|
|
327
|
-
|
|
328
|
-
for attr in sorted(iterConnections(taskDef.connections, "prerequisiteInputs"), key=lambda x: x.name):
|
|
329
|
-
if attr.name not in allDatasets:
|
|
330
|
-
dimensions = expand_dimensions(attr)
|
|
331
|
-
_renderDSTypeNode(attr.name, dimensions, file)
|
|
332
|
-
allDatasets.add(attr.name)
|
|
333
|
-
# use dashed line for prerequisite edges to distinguish them
|
|
334
|
-
_renderEdge(attr.name, taskNodeName, file, style="dashed")
|
|
335
|
-
|
|
336
|
-
for attr in sorted(iterConnections(taskDef.connections, "outputs"), key=lambda x: x.name):
|
|
337
|
-
if attr.name not in allDatasets:
|
|
338
|
-
dimensions = expand_dimensions(attr)
|
|
339
|
-
_renderDSTypeNode(attr.name, dimensions, file)
|
|
340
|
-
allDatasets.add(attr.name)
|
|
341
|
-
_renderEdge(taskNodeName, attr.name, file)
|
|
342
|
-
|
|
343
|
-
# This for loop is a workaround until DM-29658 at which time metadata
|
|
344
|
-
# connections should start working with the above code
|
|
345
|
-
for matchLabel, dsTypeName in metadataNodesToLink:
|
|
346
|
-
# only render an edge to metadata if the label is part of the current
|
|
347
|
-
# graph
|
|
348
|
-
if (result := labelToTaskName.get(matchLabel)) is not None:
|
|
349
|
-
_renderEdge(result, dsTypeName, file)
|
|
254
|
+
pg = pipeline.to_graph(visualization_only=True)
|
|
255
|
+
else:
|
|
256
|
+
pg = PipelineGraph()
|
|
257
|
+
for task_def in pipeline:
|
|
258
|
+
pg.add_task(
|
|
259
|
+
task_def.label,
|
|
260
|
+
task_class=task_def.taskClass,
|
|
261
|
+
config=task_def.config,
|
|
262
|
+
connections=task_def.connections,
|
|
263
|
+
)
|
|
264
|
+
pg.resolve(visualization_only=True)
|
|
265
|
+
visualization.show_dot(pg, stream=file, dataset_types=True)
|
|
350
266
|
|
|
351
|
-
print("}", file=file)
|
|
352
267
|
if close:
|
|
353
268
|
file.close()
|
{lsst_pipe_base-29.2025.3400 → lsst_pipe_base-29.2025.3500}/python/lsst/pipe/base/graph/graph.py
RENAMED
|
@@ -191,10 +191,10 @@ class QuantumGraph:
|
|
|
191
191
|
"""
|
|
192
192
|
# Save packages to metadata
|
|
193
193
|
self._metadata = dict(metadata) if metadata is not None else {}
|
|
194
|
-
self._metadata
|
|
195
|
-
self._metadata
|
|
196
|
-
self._metadata
|
|
197
|
-
self._metadata
|
|
194
|
+
self._metadata.setdefault("packages", Packages.fromSystem())
|
|
195
|
+
self._metadata.setdefault("user", getpass.getuser())
|
|
196
|
+
self._metadata.setdefault("time", f"{datetime.datetime.now()}")
|
|
197
|
+
self._metadata.setdefault("full_command", " ".join(sys.argv))
|
|
198
198
|
|
|
199
199
|
self._buildId = _buildId if _buildId is not None else BuildId(f"{time.time()}-{os.getpid()}")
|
|
200
200
|
# Data structure used to identify relations between
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
# This file is part of pipe_base.
|
|
2
|
+
#
|
|
3
|
+
# Developed for the LSST Data Management System.
|
|
4
|
+
# This product includes software developed by the LSST Project
|
|
5
|
+
# (http://www.lsst.org).
|
|
6
|
+
# See the COPYRIGHT file at the top-level directory of this distribution
|
|
7
|
+
# for details of code ownership.
|
|
8
|
+
#
|
|
9
|
+
# This software is dual licensed under the GNU General Public License and also
|
|
10
|
+
# under a 3-clause BSD license. Recipients may choose which of these licenses
|
|
11
|
+
# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
|
|
12
|
+
# respectively. If you choose the GPL option then the following text applies
|
|
13
|
+
# (but note that there is still no warranty even if you opt for BSD instead):
|
|
14
|
+
#
|
|
15
|
+
# This program is free software: you can redistribute it and/or modify
|
|
16
|
+
# it under the terms of the GNU General Public License as published by
|
|
17
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
18
|
+
# (at your option) any later version.
|
|
19
|
+
#
|
|
20
|
+
# This program is distributed in the hope that it will be useful,
|
|
21
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
22
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
23
|
+
# GNU General Public License for more details.
|
|
24
|
+
#
|
|
25
|
+
# You should have received a copy of the GNU General Public License
|
|
26
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
27
|
+
|
|
28
|
+
"""Module defining few methods to generate Mermaid charts from pipelines or
|
|
29
|
+
quantum graphs.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
from __future__ import annotations
|
|
33
|
+
|
|
34
|
+
__all__ = ["graph2mermaid", "pipeline2mermaid"]
|
|
35
|
+
|
|
36
|
+
from collections.abc import Iterable
|
|
37
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
38
|
+
|
|
39
|
+
from .pipeline import Pipeline
|
|
40
|
+
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from lsst.daf.butler import DatasetRef
|
|
43
|
+
from lsst.pipe.base import QuantumGraph, TaskDef
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _datasetRefId(dsRef: DatasetRef) -> str:
|
|
47
|
+
"""Make a unique identifier string for a dataset ref based on its name and
|
|
48
|
+
dataId.
|
|
49
|
+
"""
|
|
50
|
+
dsIdParts = [dsRef.datasetType.name]
|
|
51
|
+
dsIdParts.extend(f"{key}_{dsRef.dataId[key]}" for key in sorted(dsRef.dataId.required.keys()))
|
|
52
|
+
return "_".join(dsIdParts)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _makeDatasetNode(dsRef: DatasetRef, allDatasetRefs: dict[str, str], file: Any) -> str:
|
|
56
|
+
"""Create a Mermaid node for a dataset if it doesn't exist, and return its
|
|
57
|
+
node ID.
|
|
58
|
+
"""
|
|
59
|
+
dsId = _datasetRefId(dsRef)
|
|
60
|
+
nodeName = allDatasetRefs.get(dsId)
|
|
61
|
+
if nodeName is None:
|
|
62
|
+
nodeName = f"DATASET_{len(allDatasetRefs)}"
|
|
63
|
+
allDatasetRefs[dsId] = nodeName
|
|
64
|
+
# Simple label: datasetType name and run.
|
|
65
|
+
label_lines = [f"**{dsRef.datasetType.name}**", f"run: {dsRef.run}"]
|
|
66
|
+
# Add dataId info.
|
|
67
|
+
for k in sorted(dsRef.dataId.required.keys()):
|
|
68
|
+
label_lines.append(f"{k}={dsRef.dataId[k]}")
|
|
69
|
+
label = "<br>".join(label_lines)
|
|
70
|
+
print(f'{nodeName}["{label}"]', file=file)
|
|
71
|
+
return nodeName
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def graph2mermaid(qgraph: QuantumGraph, file: Any) -> None:
|
|
75
|
+
"""Convert QuantumGraph into a Mermaid flowchart (top-down).
|
|
76
|
+
|
|
77
|
+
This method is mostly for documentation/presentation purposes.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
qgraph : `~lsst.pipe.base.QuantumGraph`
|
|
82
|
+
QuantumGraph instance.
|
|
83
|
+
file : `str` or file object
|
|
84
|
+
File where Mermaid flowchart is written, can be a file name or file
|
|
85
|
+
object.
|
|
86
|
+
|
|
87
|
+
Raises
|
|
88
|
+
------
|
|
89
|
+
OSError
|
|
90
|
+
Raised if the output file cannot be opened.
|
|
91
|
+
ImportError
|
|
92
|
+
Raised if the task class cannot be imported.
|
|
93
|
+
"""
|
|
94
|
+
# Open a file if needed.
|
|
95
|
+
close = False
|
|
96
|
+
if not hasattr(file, "write"):
|
|
97
|
+
file = open(file, "w")
|
|
98
|
+
close = True
|
|
99
|
+
|
|
100
|
+
# Start Mermaid code block with flowchart.
|
|
101
|
+
print("flowchart TD", file=file)
|
|
102
|
+
|
|
103
|
+
# To avoid duplicating dataset nodes, we track them.
|
|
104
|
+
allDatasetRefs: dict[str, str] = {}
|
|
105
|
+
|
|
106
|
+
# Process each task/quantum.
|
|
107
|
+
for taskId, taskDef in enumerate(qgraph.taskGraph):
|
|
108
|
+
quanta = qgraph.getNodesForTask(taskDef)
|
|
109
|
+
for qId, quantumNode in enumerate(quanta):
|
|
110
|
+
# Create quantum node.
|
|
111
|
+
taskNodeName = f"TASK_{taskId}_{qId}"
|
|
112
|
+
taskLabelLines = [f"**{taskDef.label}**", f"Node ID: {quantumNode.nodeId}"]
|
|
113
|
+
dataId = quantumNode.quantum.dataId
|
|
114
|
+
if dataId is not None:
|
|
115
|
+
for k in sorted(dataId.required.keys()):
|
|
116
|
+
taskLabelLines.append(f"{k}={dataId[k]}")
|
|
117
|
+
else:
|
|
118
|
+
raise ValueError("Quantum DataId cannot be None")
|
|
119
|
+
taskLabel = "<br>".join(taskLabelLines)
|
|
120
|
+
print(f'{taskNodeName}["{taskLabel}"]', file=file)
|
|
121
|
+
|
|
122
|
+
# Quantum inputs: datasets --> tasks
|
|
123
|
+
for dsRefs in quantumNode.quantum.inputs.values():
|
|
124
|
+
for dsRef in dsRefs:
|
|
125
|
+
dsNode = _makeDatasetNode(dsRef, allDatasetRefs, file)
|
|
126
|
+
print(f"{dsNode} --> {taskNodeName}", file=file)
|
|
127
|
+
|
|
128
|
+
# Quantum outputs: tasks --> datasets
|
|
129
|
+
for dsRefs in quantumNode.quantum.outputs.values():
|
|
130
|
+
for dsRef in dsRefs:
|
|
131
|
+
dsNode = _makeDatasetNode(dsRef, allDatasetRefs, file)
|
|
132
|
+
print(f"{taskNodeName} --> {dsNode}", file=file)
|
|
133
|
+
|
|
134
|
+
if close:
|
|
135
|
+
file.close()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def pipeline2mermaid(
|
|
139
|
+
pipeline: Pipeline | Iterable[TaskDef],
|
|
140
|
+
file: Any,
|
|
141
|
+
show_dimensions: bool = True,
|
|
142
|
+
expand_dimensions: bool = False,
|
|
143
|
+
show_storage: bool = True,
|
|
144
|
+
) -> None:
|
|
145
|
+
"""Convert a Pipeline into a Mermaid flowchart diagram.
|
|
146
|
+
|
|
147
|
+
This function produces a Mermaid flowchart, representing tasks and their
|
|
148
|
+
inputs/outputs as dataset nodes. It uses a top-down layout.
|
|
149
|
+
|
|
150
|
+
This method is mostly for documentation/presentation purposes.
|
|
151
|
+
|
|
152
|
+
Parameters
|
|
153
|
+
----------
|
|
154
|
+
pipeline : Pipeline or Iterable[TaskDef]
|
|
155
|
+
The pipeline or collection of tasks to represent.
|
|
156
|
+
file : str or file-like
|
|
157
|
+
The output file or file-like object into which the Mermaid code is
|
|
158
|
+
written.
|
|
159
|
+
show_dimensions : bool, optional
|
|
160
|
+
If True, display dimension information for tasks and datasets.
|
|
161
|
+
Default is True.
|
|
162
|
+
expand_dimensions : bool, optional
|
|
163
|
+
If True, expand dimension names to include all components. Default is
|
|
164
|
+
False.
|
|
165
|
+
show_storage : bool, optional
|
|
166
|
+
If True, display storage class information for datasets. Default is
|
|
167
|
+
True.
|
|
168
|
+
|
|
169
|
+
Raises
|
|
170
|
+
------
|
|
171
|
+
OSError
|
|
172
|
+
Raised if the output file cannot be opened.
|
|
173
|
+
ImportError
|
|
174
|
+
Raised if the task class cannot be imported.
|
|
175
|
+
"""
|
|
176
|
+
from .pipeline_graph import PipelineGraph, visualization
|
|
177
|
+
|
|
178
|
+
# Ensure that pipeline is iterable of task definitions.
|
|
179
|
+
if isinstance(pipeline, Pipeline):
|
|
180
|
+
pipeline = pipeline.to_graph()._iter_task_defs()
|
|
181
|
+
|
|
182
|
+
# Open file if needed.
|
|
183
|
+
close = False
|
|
184
|
+
if not hasattr(file, "write"):
|
|
185
|
+
file = open(file, "w")
|
|
186
|
+
close = True
|
|
187
|
+
|
|
188
|
+
if isinstance(pipeline, Pipeline):
|
|
189
|
+
pg = pipeline.to_graph(visualization_only=True)
|
|
190
|
+
else:
|
|
191
|
+
pg = PipelineGraph()
|
|
192
|
+
for task_def in pipeline:
|
|
193
|
+
pg.add_task(
|
|
194
|
+
task_def.label,
|
|
195
|
+
task_class=task_def.taskClass,
|
|
196
|
+
config=task_def.config,
|
|
197
|
+
connections=task_def.connections,
|
|
198
|
+
)
|
|
199
|
+
pg.resolve(visualization_only=True)
|
|
200
|
+
|
|
201
|
+
dimensions: Literal["full", "concise"] | None = None
|
|
202
|
+
if show_dimensions:
|
|
203
|
+
if expand_dimensions:
|
|
204
|
+
dimensions = "full"
|
|
205
|
+
else:
|
|
206
|
+
dimensions = "concise"
|
|
207
|
+
|
|
208
|
+
visualization.show_mermaid(
|
|
209
|
+
pg, stream=file, dataset_types=True, dimensions=dimensions, storage_classes=show_storage
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
if close:
|
|
213
|
+
file.close()
|
|
@@ -159,7 +159,7 @@ class _Job:
|
|
|
159
159
|
quantumExecutor_pickle: bytes,
|
|
160
160
|
task_node_pickle: bytes,
|
|
161
161
|
quantum_pickle: bytes,
|
|
162
|
-
quantum_id: uuid.UUID
|
|
162
|
+
quantum_id: uuid.UUID,
|
|
163
163
|
logConfigState: list,
|
|
164
164
|
snd_conn: multiprocessing.connection.Connection,
|
|
165
165
|
fail_fast: bool,
|
|
@@ -174,6 +174,8 @@ class _Job:
|
|
|
174
174
|
Task definition structure, pickled.
|
|
175
175
|
quantum_pickle : `bytes`
|
|
176
176
|
Quantum for this task execution in pickled form.
|
|
177
|
+
quantum_id : `uuid.UUID`
|
|
178
|
+
Unique ID for the quantum.
|
|
177
179
|
logConfigState : `list`
|
|
178
180
|
Logging state from parent process.
|
|
179
181
|
snd_conn : `multiprocessing.Connection`
|
|
@@ -205,6 +207,7 @@ class _Job:
|
|
|
205
207
|
_, report = quantumExecutor.execute(task_node, quantum, quantum_id=quantum_id)
|
|
206
208
|
except RepeatableQuantumError as exc:
|
|
207
209
|
report = QuantumReport.from_exception(
|
|
210
|
+
quantumId=quantum_id,
|
|
208
211
|
exception=exc,
|
|
209
212
|
dataId=quantum.dataId,
|
|
210
213
|
taskLabel=task_node.label,
|
|
@@ -220,6 +223,7 @@ class _Job:
|
|
|
220
223
|
_LOG.fatal("Invalid quantum error for %s (%s): %s", task_node.label, quantum.dataId)
|
|
221
224
|
_LOG.fatal(exc, exc_info=True)
|
|
222
225
|
report = QuantumReport.from_exception(
|
|
226
|
+
quantumId=quantum_id,
|
|
223
227
|
exception=exc,
|
|
224
228
|
dataId=quantum.dataId,
|
|
225
229
|
taskLabel=task_node.label,
|
|
@@ -229,6 +233,7 @@ class _Job:
|
|
|
229
233
|
except Exception as exc:
|
|
230
234
|
_LOG.debug("exception from task %s dataId %s: %s", task_node.label, quantum.dataId, exc)
|
|
231
235
|
report = QuantumReport.from_exception(
|
|
236
|
+
quantumId=quantum_id,
|
|
232
237
|
exception=exc,
|
|
233
238
|
dataId=quantum.dataId,
|
|
234
239
|
taskLabel=task_node.label,
|
|
@@ -282,6 +287,7 @@ class _Job:
|
|
|
282
287
|
exitcode = self.process.exitcode if self.process.exitcode is not None else -1
|
|
283
288
|
assert self.qnode.quantum.dataId is not None, "Quantum DataId cannot be None"
|
|
284
289
|
report = QuantumReport.from_exit_code(
|
|
290
|
+
quantumId=self.qnode.nodeId,
|
|
285
291
|
exitCode=exitcode,
|
|
286
292
|
dataId=self.qnode.quantum.dataId,
|
|
287
293
|
taskLabel=self.qnode.task_node.label,
|
|
@@ -539,6 +545,7 @@ class MPGraphExecutor(QuantumGraphExecutor):
|
|
|
539
545
|
)
|
|
540
546
|
failedNodes.add(qnode)
|
|
541
547
|
failed_quantum_report = QuantumReport(
|
|
548
|
+
quantumId=qnode.nodeId,
|
|
542
549
|
status=ExecutionStatus.SKIPPED,
|
|
543
550
|
dataId=qnode.quantum.dataId,
|
|
544
551
|
taskLabel=task_node.label,
|
|
@@ -576,6 +583,7 @@ class MPGraphExecutor(QuantumGraphExecutor):
|
|
|
576
583
|
raise
|
|
577
584
|
except Exception as exc:
|
|
578
585
|
quantum_report = QuantumReport.from_exception(
|
|
586
|
+
quantumId=qnode.nodeId,
|
|
579
587
|
exception=exc,
|
|
580
588
|
dataId=qnode.quantum.dataId,
|
|
581
589
|
taskLabel=task_node.label,
|
|
@@ -722,6 +730,7 @@ class MPGraphExecutor(QuantumGraphExecutor):
|
|
|
722
730
|
assert job.qnode.quantum.dataId is not None, "Quantum DataId cannot be None"
|
|
723
731
|
if jobInputNodes & jobs.failedNodes:
|
|
724
732
|
quantum_report = QuantumReport(
|
|
733
|
+
quantumId=job.qnode.nodeId,
|
|
725
734
|
status=ExecutionStatus.SKIPPED,
|
|
726
735
|
dataId=job.qnode.quantum.dataId,
|
|
727
736
|
taskLabel=job.qnode.task_node.label,
|
|
@@ -258,6 +258,7 @@ class Edge(ABC):
|
|
|
258
258
|
in exported networkx graphs.
|
|
259
259
|
"""
|
|
260
260
|
return {
|
|
261
|
+
"connection_name": self.connection_name,
|
|
261
262
|
"parent_dataset_type_name": self.parent_dataset_type_name,
|
|
262
263
|
"storage_class_name": self.storage_class_name,
|
|
263
264
|
"is_init": bool,
|
|
@@ -606,7 +607,18 @@ class ReadEdge(Edge):
|
|
|
606
607
|
"type is registered."
|
|
607
608
|
)
|
|
608
609
|
else:
|
|
609
|
-
|
|
610
|
+
try:
|
|
611
|
+
all_current_components = current.storageClass.allComponents()
|
|
612
|
+
except (KeyError, ImportError):
|
|
613
|
+
if visualization_only:
|
|
614
|
+
current = DatasetType(
|
|
615
|
+
self.parent_dataset_type_name,
|
|
616
|
+
dimensions,
|
|
617
|
+
storageClass="<UNKNOWN>",
|
|
618
|
+
isCalibration=self.is_calibration,
|
|
619
|
+
)
|
|
620
|
+
return current, is_initial_query_constraint, is_prerequisite
|
|
621
|
+
raise
|
|
610
622
|
if self.component not in all_current_components:
|
|
611
623
|
raise IncompatibleDatasetTypeError(
|
|
612
624
|
f"Dataset type {self.parent_dataset_type_name!r} has storage class "
|
|
@@ -618,8 +630,10 @@ class ReadEdge(Edge):
|
|
|
618
630
|
# for the component the task wants, because we don't have the
|
|
619
631
|
# parent storage class.
|
|
620
632
|
current_component = all_current_components[self.component]
|
|
633
|
+
|
|
621
634
|
if (
|
|
622
|
-
|
|
635
|
+
not visualization_only
|
|
636
|
+
and current_component.name != self.storage_class_name
|
|
623
637
|
and not StorageClassFactory()
|
|
624
638
|
.getStorageClass(self.storage_class_name)
|
|
625
639
|
.can_convert(current_component)
|
|
@@ -652,7 +666,7 @@ class ReadEdge(Edge):
|
|
|
652
666
|
"compatible but different, registering the dataset type in the data repository "
|
|
653
667
|
"in advance will avoid this error."
|
|
654
668
|
)
|
|
655
|
-
elif not dataset_type.is_compatible_with(current):
|
|
669
|
+
elif not visualization_only and not dataset_type.is_compatible_with(current):
|
|
656
670
|
raise IncompatibleDatasetTypeError(
|
|
657
671
|
f"Incompatible definition for input dataset type {self.parent_dataset_type_name!r}; "
|
|
658
672
|
f"task {self.task_label!r} has {dataset_type}, but the definition "
|
|
@@ -27,12 +27,39 @@
|
|
|
27
27
|
from __future__ import annotations
|
|
28
28
|
|
|
29
29
|
__all__ = (
|
|
30
|
+
"NodeBipartite",
|
|
30
31
|
"NodeKey",
|
|
31
32
|
"NodeType",
|
|
32
33
|
)
|
|
33
34
|
|
|
34
35
|
import enum
|
|
35
|
-
|
|
36
|
+
import sys
|
|
37
|
+
from typing import Any, NamedTuple
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class NodeBipartite(enum.IntEnum):
|
|
41
|
+
"""Constants for the 'bipartite' key in NetworkX graph views."""
|
|
42
|
+
|
|
43
|
+
DATASET_OR_TYPE = 0
|
|
44
|
+
"""Value for nodes that represent dataset types (in pipeline graphs)
|
|
45
|
+
or datasets (in quantum graphs).
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
TASK_OR_QUANTUM = 1
|
|
49
|
+
"""Value for nodes that represent tasks (in pipeline graphs) or quanta
|
|
50
|
+
(in quantum graphs).
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
if "sphinx" in sys.modules:
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def from_bytes(cls, *args: Any, **kwargs: Any) -> Any: # pragma: no cover
|
|
57
|
+
"""See `IntEnum.from_bytes`."""
|
|
58
|
+
return super().from_bytes(*args, **kwargs)
|
|
59
|
+
|
|
60
|
+
def to_bytes(self, *args: Any, **kwargs: Any) -> Any: # pragma: no cover
|
|
61
|
+
"""See `IntEnum.to_bytes`."""
|
|
62
|
+
return super().to_bytes(self, *args, **kwargs)
|
|
36
63
|
|
|
37
64
|
|
|
38
65
|
class NodeType(enum.Enum):
|
|
@@ -43,13 +70,13 @@ class NodeType(enum.Enum):
|
|
|
43
70
|
TASK = 2
|
|
44
71
|
|
|
45
72
|
@property
|
|
46
|
-
def bipartite(self) ->
|
|
73
|
+
def bipartite(self) -> NodeBipartite:
|
|
47
74
|
"""The integer used as the "bipartite" key in networkx exports of a
|
|
48
75
|
`PipelineGraph`.
|
|
49
76
|
|
|
50
77
|
This key is used by the `networkx.algorithms.bipartite` module.
|
|
51
78
|
"""
|
|
52
|
-
return
|
|
79
|
+
return NodeBipartite(self is not NodeType.DATASET_TYPE)
|
|
53
80
|
|
|
54
81
|
def __lt__(self, other: NodeType) -> bool:
|
|
55
82
|
# We define __lt__ only to be able to provide deterministic tiebreaking
|
|
@@ -806,7 +806,9 @@ class TaskNode:
|
|
|
806
806
|
edge : `ReadEdge`
|
|
807
807
|
Input edge.
|
|
808
808
|
"""
|
|
809
|
-
|
|
809
|
+
if (edge := self.inputs.get(connection_name)) is not None:
|
|
810
|
+
return edge
|
|
811
|
+
return self.prerequisite_inputs[connection_name]
|
|
810
812
|
|
|
811
813
|
def get_output_edge(self, connection_name: str) -> WriteEdge:
|
|
812
814
|
"""Look up an output edge by connection name.
|
|
@@ -57,6 +57,7 @@ _OVERFLOW_MAX_LINES = 20
|
|
|
57
57
|
def show_dot(
|
|
58
58
|
pipeline_graph: PipelineGraph,
|
|
59
59
|
stream: TextIO = sys.stdout,
|
|
60
|
+
label_edge_connections: bool = False,
|
|
60
61
|
**kwargs: Any,
|
|
61
62
|
) -> None:
|
|
62
63
|
"""Write a DOT representation of the pipeline graph to a stream.
|
|
@@ -67,6 +68,8 @@ def show_dot(
|
|
|
67
68
|
Pipeline graph to show.
|
|
68
69
|
stream : `TextIO`, optional
|
|
69
70
|
Stream to write the DOT representation to.
|
|
71
|
+
label_edge_connections : `bool`, optional
|
|
72
|
+
If `True`, label edges with their connection names.
|
|
70
73
|
**kwargs
|
|
71
74
|
Additional keyword arguments to pass to `parse_display_args`.
|
|
72
75
|
"""
|
|
@@ -96,12 +99,19 @@ def show_dot(
|
|
|
96
99
|
formatted_overflow_ids = [f'"{overflow_id}"' for overflow_id in overflow_ids]
|
|
97
100
|
print(f"{{rank=sink; {'; '.join(formatted_overflow_ids)};}}", file=stream)
|
|
98
101
|
|
|
99
|
-
for from_node, to_node,
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
102
|
+
for from_node, to_node, edge_data in xgraph.edges(data=True):
|
|
103
|
+
edge_kwargs = {}
|
|
104
|
+
if edge_data.get("is_prerequisite", False):
|
|
105
|
+
edge_kwargs["style"] = "dashed"
|
|
106
|
+
if (connection_name := edge_data.get("connection_name", None)) is not None:
|
|
107
|
+
if (component := edge_data.get("component", None)) is not None:
|
|
108
|
+
if label_edge_connections:
|
|
109
|
+
edge_kwargs["xlabel"] = f"{connection_name} (.{component})"
|
|
110
|
+
else:
|
|
111
|
+
edge_kwargs["xlabel"] = f".{component}"
|
|
112
|
+
elif label_edge_connections:
|
|
113
|
+
edge_kwargs["xlabel"] = connection_name
|
|
114
|
+
_render_edge(from_node.node_id, to_node.node_id, stream, **edge_kwargs)
|
|
105
115
|
|
|
106
116
|
print("}", file=stream)
|
|
107
117
|
|
|
@@ -364,6 +364,9 @@ class QuantumGraphBuilder(ABC):
|
|
|
364
364
|
# with the quanta because no quantum knows if its the only
|
|
365
365
|
# consumer).
|
|
366
366
|
full_skeleton.remove_orphan_datasets()
|
|
367
|
+
# Add any dimension records not handled by the subclass, and
|
|
368
|
+
# aggregate any that were added directly to data IDs.
|
|
369
|
+
full_skeleton.attach_dimension_records(self.butler, self._pipeline_graph.get_all_dimensions())
|
|
367
370
|
if attach_datastore_records:
|
|
368
371
|
self._attach_datastore_records(full_skeleton)
|
|
369
372
|
# TODO initialize most metadata here instead of in ctrl_mpexec.
|
|
@@ -939,7 +942,7 @@ class QuantumGraphBuilder(ABC):
|
|
|
939
942
|
inputs: dict[DatasetKey | PrerequisiteDatasetKey, DatasetRef] = {}
|
|
940
943
|
outputs_for_skip: dict[DatasetKey, DatasetRef] = {}
|
|
941
944
|
outputs_in_the_way: dict[DatasetKey, DatasetRef] = {}
|
|
942
|
-
_, dataset_type_nodes = self._pipeline_graph.group_by_dimensions()
|
|
945
|
+
_, dataset_type_nodes = self._pipeline_graph.group_by_dimensions().get(self.universe.empty, ({}, {}))
|
|
943
946
|
dataset_types = [node.dataset_type for node in dataset_type_nodes.values()]
|
|
944
947
|
dataset_types.extend(self._global_init_output_types.values())
|
|
945
948
|
for dataset_type in dataset_types:
|