lsst-pipe-base 29.2025.1100__tar.gz → 29.2025.1300__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. {lsst_pipe_base-29.2025.1100/python/lsst_pipe_base.egg-info → lsst_pipe_base-29.2025.1300}/PKG-INFO +4 -3
  2. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/CHANGES.rst +53 -0
  3. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/pyproject.toml +1 -1
  4. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/all_dimensions_quantum_graph_builder.py +4 -0
  5. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/connections.py +179 -2
  6. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_mermaid.py +10 -4
  7. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/quantum_graph_builder.py +91 -60
  8. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/quantum_graph_skeleton.py +20 -0
  9. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/quantum_provenance_graph.py +790 -421
  10. lsst_pipe_base-29.2025.1300/python/lsst/pipe/base/version.py +2 -0
  11. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300/python/lsst_pipe_base.egg-info}/PKG-INFO +4 -3
  12. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/SOURCES.txt +2 -1
  13. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/requires.txt +1 -1
  14. lsst_pipe_base-29.2025.1300/tests/test_adjust_all_quanta.py +145 -0
  15. lsst_pipe_base-29.2025.1300/tests/test_mermaid.py +344 -0
  16. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_quantum_provenance_graph.py +2 -4
  17. lsst_pipe_base-29.2025.1100/python/lsst/pipe/base/version.py +0 -2
  18. lsst_pipe_base-29.2025.1100/tests/test_mermaid_tools.py +0 -208
  19. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/COPYRIGHT +0 -0
  20. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/LICENSE +0 -0
  21. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/MANIFEST.in +0 -0
  22. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/README.md +0 -0
  23. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/bsd_license.txt +0 -0
  24. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/creating-a-pipeline.rst +0 -0
  25. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/creating-a-pipelinetask.rst +0 -0
  26. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/creating-a-task.rst +0 -0
  27. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/index.rst +0 -0
  28. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/task-framework-overview.rst +0 -0
  29. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/task-retargeting-howto.rst +0 -0
  30. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/testing-a-pipeline-task.rst +0 -0
  31. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/testing-pipelines-with-mocks.rst +0 -0
  32. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/doc/lsst.pipe.base/working-with-pipeline-graphs.rst +0 -0
  33. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/gpl-v3.0.txt +0 -0
  34. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/__init__.py +0 -0
  35. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/__init__.py +0 -0
  36. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/__init__.py +0 -0
  37. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_datasetQueryConstraints.py +0 -0
  38. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_dataset_handle.py +0 -0
  39. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_instrument.py +0 -0
  40. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_observation_dimension_packer.py +0 -0
  41. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_quantumContext.py +0 -0
  42. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_status.py +0 -0
  43. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/_task_metadata.py +0 -0
  44. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/automatic_connection_constants.py +0 -0
  45. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/caching_limited_butler.py +0 -0
  46. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/__init__.py +0 -0
  47. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/_get_cli_subcommands.py +0 -0
  48. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/cmd/__init__.py +0 -0
  49. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/cmd/commands.py +0 -0
  50. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/opt/__init__.py +0 -0
  51. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/opt/arguments.py +0 -0
  52. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/cli/opt/options.py +0 -0
  53. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/config.py +0 -0
  54. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/configOverrides.py +0 -0
  55. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/connectionTypes.py +0 -0
  56. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/dot_tools.py +0 -0
  57. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/executionButlerBuilder.py +0 -0
  58. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/execution_reports.py +0 -0
  59. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/formatters/__init__.py +0 -0
  60. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/formatters/pexConfig.py +0 -0
  61. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/__init__.py +0 -0
  62. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/_implDetails.py +0 -0
  63. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/_loadHelpers.py +0 -0
  64. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/_versionDeserializers.py +0 -0
  65. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/graph.py +0 -0
  66. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/graphSummary.py +0 -0
  67. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/graph/quantumNode.py +0 -0
  68. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/mermaid_tools.py +0 -0
  69. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline.py +0 -0
  70. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipelineIR.py +0 -0
  71. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipelineTask.py +0 -0
  72. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/__init__.py +0 -0
  73. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/__main__.py +0 -0
  74. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_dataset_types.py +0 -0
  75. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_edges.py +0 -0
  76. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_exceptions.py +0 -0
  77. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_mapping_views.py +0 -0
  78. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_nodes.py +0 -0
  79. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py +0 -0
  80. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_task_subsets.py +0 -0
  81. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/_tasks.py +0 -0
  82. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/io.py +0 -0
  83. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/__init__.py +0 -0
  84. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_dot.py +0 -0
  85. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_formatting.py +0 -0
  86. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_layout.py +0 -0
  87. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_merge.py +0 -0
  88. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_options.py +0 -0
  89. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_printer.py +0 -0
  90. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/pipeline_graph/visualization/_show.py +0 -0
  91. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/prerequisite_helpers.py +0 -0
  92. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/py.typed +0 -0
  93. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/script/__init__.py +0 -0
  94. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/script/register_instrument.py +0 -0
  95. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/script/retrieve_artifacts_for_quanta.py +0 -0
  96. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/script/transfer_from_graph.py +0 -0
  97. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/script/zip_from_graph.py +0 -0
  98. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/struct.py +0 -0
  99. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/task.py +0 -0
  100. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/taskFactory.py +0 -0
  101. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/testUtils.py +0 -0
  102. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/__init__.py +0 -0
  103. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/mocks/__init__.py +0 -0
  104. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/mocks/_data_id_match.py +0 -0
  105. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/mocks/_pipeline_task.py +0 -0
  106. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/mocks/_storage_class.py +0 -0
  107. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/no_dimensions.py +0 -0
  108. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/pipelineStepTester.py +0 -0
  109. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/simpleQGraph.py +0 -0
  110. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/tests/util.py +0 -0
  111. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst/pipe/base/utils.py +0 -0
  112. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/dependency_links.txt +0 -0
  113. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/entry_points.txt +0 -0
  114. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/top_level.txt +0 -0
  115. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/python/lsst_pipe_base.egg-info/zip-safe +0 -0
  116. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/setup.cfg +0 -0
  117. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_caching_limited_butler.py +0 -0
  118. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_cliCmdRegisterInstrument.py +0 -0
  119. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_configOverrides.py +0 -0
  120. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_config_formatter.py +0 -0
  121. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_connections.py +0 -0
  122. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_dataid_match.py +0 -0
  123. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_dataset_handle.py +0 -0
  124. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_dot_tools.py +0 -0
  125. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_dynamic_connections.py +0 -0
  126. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_executionButler.py +0 -0
  127. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_execution_reports.py +0 -0
  128. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_graphBuilder.py +0 -0
  129. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_init_output_run.py +0 -0
  130. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_instrument.py +0 -0
  131. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_pipeline.py +0 -0
  132. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_pipelineIR.py +0 -0
  133. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_pipelineLoadSubset.py +0 -0
  134. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_pipelineTask.py +0 -0
  135. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_pipeline_graph.py +0 -0
  136. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_quantumGraph.py +0 -0
  137. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_quantum_success_caveats.py +0 -0
  138. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_struct.py +0 -0
  139. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_task.py +0 -0
  140. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_taskmetadata.py +0 -0
  141. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_testUtils.py +0 -0
  142. {lsst_pipe_base-29.2025.1100 → lsst_pipe_base-29.2025.1300}/tests/test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: lsst-pipe-base
3
- Version: 29.2025.1100
3
+ Version: 29.2025.1300
4
4
  Summary: Pipeline infrastructure for the Rubin Science Pipelines.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -33,7 +33,8 @@ Requires-Dist: frozendict
33
33
  Provides-Extra: test
34
34
  Requires-Dist: pytest>=3.2; extra == "test"
35
35
  Provides-Extra: mermaid
36
- Requires-Dist: mermaid; extra == "mermaid"
36
+ Requires-Dist: mermaid-py>=0.7.1; extra == "mermaid"
37
+ Dynamic: license-file
37
38
 
38
39
  # lsst-pipe-base
39
40
 
@@ -1,3 +1,56 @@
1
+ lsst-pipe-base v29.0.0 (2025-03-25)
2
+ ===================================
3
+
4
+ New Features
5
+ ------------
6
+
7
+ - * Modified ``QuantumContext`` such that it now tracks all datasets that are retrieved and records them in ``dataset_provenance``.
8
+ This provenance is then passed to Butler on ``put()``.
9
+ * Added ``QuantumContext.add_additional_provenance()`` to allow a pipeline task author to attach additional provenance information to be recorded and associated with a particular input dataset. (`DM-35396 <https://rubinobs.atlassian.net/browse/DM-35396>`_)
10
+ - Finished support for "steps" in pipelines and pipeline graphs.
11
+
12
+ Steps are an ordered sequence of special pipeline subsets that provide extra information and validation about how pipelines should actually be run. (`DM-46023 <https://rubinobs.atlassian.net/browse/DM-46023>`_)
13
+ - Added Mermaid for pipeline and quantum graph visualization with tools for documentation/presentation purposes. (`DM-46503 <https://rubinobs.atlassian.net/browse/DM-46503>`_)
14
+ - Added ``zip-from-graph`` subcommand for ``butler`` command-line to enable output artifacts associated with a graph to be combined into a Zip archive. (`DM-46776 <https://rubinobs.atlassian.net/browse/DM-46776>`_)
15
+ - Added ``UpstreamFailureNoWorkFound``, an exception that is handled the same way as ``NoWorkFound`` that indicates that the root problem is probably in an upstream ``PipelineTask``. (`DM-46948 <https://rubinobs.atlassian.net/browse/DM-46948>`_)
16
+ - Plugin discovery is now automated through Python entry points when using ``pip``.
17
+ It is now an error if the ``DAF_BUTLER_PLUGINS`` environment variable is set for this package. (`DM-47143 <https://rubinobs.atlassian.net/browse/DM-47143>`_)
18
+ - * Added new command-line ``butler retrieve-artifacts-for-quanta`` which can be used to retrieve input or output datasets associated with a graph or specific quanta.
19
+ * Added new ``QuantumGraph.get_refs()`` method to retrieve dataset refs from a graph. (`DM-47328 <https://rubinobs.atlassian.net/browse/DM-47328>`_)
20
+ - Add the ``QuantumSuccessCaveats`` flag enum, which can be used to report on ``NoWorkFound`` and other qualified successes in execution.
21
+
22
+ This adds the flag enum itself and functionality in ``QuantumProvenanceGraph`` (which backs ``pipetask report --force-v2``) to include it in reports.
23
+ It relies on additional changes in ``lsst.ctrl.mpexec.SingleQuantumExecutor`` to write the caveat flags into task metadata. (`DM-47730 <https://rubinobs.atlassian.net/browse/DM-47730>`_)
24
+ - ``QuantumProvenanceGraph`` and ``pipetask report --force-v2`` can now report on exceptions raised and then ignored via the ``--no-raise-on-partial-outputs`` option.
25
+
26
+ Exceptions that lead to task failures are not yet tracked, because we do not write task metadata for failures and hence have nowhere to put the information. (`DM-48536 <https://rubinobs.atlassian.net/browse/DM-48536>`_)
27
+ - Swapped to the new butler query system in ``QuantumGraph`` generation.
28
+
29
+ This change should be mostly transparent to users, aside from small changes in speed (typically faster, but not always). (`DM-45896 <https://rubinobs.atlassian.net/browse/DM-45896>`)
30
+
31
+ Bug Fixes
32
+ ---------
33
+
34
+ - Fixed a bug in QG generation that could lead to an error when using ``--skip-existing-in`` on a collection that had a successful quantum that did not write all of its predicted outputs. (`DM-49266 <https://rubinobs.atlassian.net/browse/DM-49266>`_)
35
+
36
+
37
+ Performance Enhancement
38
+ -----------------------
39
+
40
+ - ``QuantumGraph`` generation has been partially rewritten to support building larger graphs and build all graphs faster.
41
+
42
+ With this change, ``QuantumGraph`` generation no longer uses a long-lived temporary table in the butler database for followup queries, and instead uploads a set of data IDs to the database for each query.
43
+ In addition, the algorithm for adding nodes and edges from the data ID query results has been reworked to duplicate irrelevant dimensions earlier, making it much faster. (`DM-49296 <https://rubinobs.atlassian.net/browse/DM-49296>`_)
44
+
45
+
46
+ Other Changes and Additions
47
+ ---------------------------
48
+
49
+ - Modified ``TaskMetadata`` such that it can now be assigned an empty list.
50
+ This list can be retrieved with ``getArray`` but if an attempt is made to get a scalar `KeyError` will be raised. (`DM-35396 <https://rubinobs.atlassian.net/browse/DM-35396>`_)
51
+ - ``QuantumGraph`` generation will no longer fail when the ``--dataset-query-constraint`` argument includes a dataset type that is not relevant for one or more pipeline subgraphs. (`DM-47505 <https://rubinobs.atlassian.net/browse/DM-47505>`_)
52
+
53
+
1
54
  lsst-pipe-base v28.0.0 (2024-11-21)
2
55
  ===================================
3
56
 
@@ -45,7 +45,7 @@ pipe_base = "lsst.pipe.base.cli:get_cli_subcommands"
45
45
 
46
46
  [project.optional-dependencies]
47
47
  test = ["pytest >= 3.2"]
48
- mermaid = ["mermaid"]
48
+ mermaid = ["mermaid-py >= 0.7.1"]
49
49
 
50
50
  [tool.setuptools.packages.find]
51
51
  where = ["python"]
@@ -213,7 +213,11 @@ class AllDimensionsQuantumGraphBuilder(QuantumGraphBuilder):
213
213
  lines.extend(query.explain_no_results())
214
214
  finally:
215
215
  lines.append("To reproduce this query for debugging purposes, run:")
216
+ lines.append("")
216
217
  lines.extend(query_cmd)
218
+ lines.append(" print(query.any())")
219
+ lines.append("")
220
+ lines.append("And then try removing various constraints until query.any() returns True.")
217
221
  # If an exception was raised, write a partial.
218
222
  self.log.error("\n".join(lines))
219
223
  return
@@ -35,6 +35,7 @@ __all__ = [
35
35
  "InputQuantizedConnection",
36
36
  "OutputQuantizedConnection",
37
37
  "PipelineTaskConnections",
38
+ "QuantaAdjuster",
38
39
  "QuantizedConnection",
39
40
  "ScalarError",
40
41
  "ScalarError",
@@ -45,8 +46,8 @@ import dataclasses
45
46
  import itertools
46
47
  import string
47
48
  import warnings
48
- from collections import UserDict
49
- from collections.abc import Collection, Generator, Iterable, Mapping, Sequence, Set
49
+ from collections import UserDict, defaultdict
50
+ from collections.abc import Collection, Generator, Iterable, Iterator, Mapping, Sequence, Set
50
51
  from dataclasses import dataclass
51
52
  from types import MappingProxyType, SimpleNamespace
52
53
  from typing import TYPE_CHECKING, Any
@@ -58,6 +59,8 @@ from .connectionTypes import BaseConnection, BaseInput, Output, PrerequisiteInpu
58
59
 
59
60
  if TYPE_CHECKING:
60
61
  from .config import PipelineTaskConfig
62
+ from .pipeline_graph import PipelineGraph, TaskNode
63
+ from .quantum_graph_skeleton import QuantumGraphSkeleton
61
64
 
62
65
 
63
66
  class ScalarError(TypeError):
@@ -999,6 +1002,25 @@ class PipelineTaskConnections(metaclass=PipelineTaskConnectionsMetaclass):
999
1002
  """
1000
1003
  return ()
1001
1004
 
1005
+ def adjust_all_quanta(self, adjuster: QuantaAdjuster) -> None:
1006
+ """Customize the set of quanta predicted for this task during quantum
1007
+ graph generation.
1008
+
1009
+ Parameters
1010
+ ----------
1011
+ adjuster : `QuantaAdjuster`
1012
+ A helper object that implementations can use to modify the
1013
+ under-construction quantum graph.
1014
+
1015
+ Notes
1016
+ -----
1017
+ This hook is called before `adjustQuantum`, which is where built-in
1018
+ checks for `NoWorkFound` cases and missing prerequisites are handled.
1019
+ This means that the set of preliminary quanta seen by this method could
1020
+ include some that would normally be dropped later.
1021
+ """
1022
+ pass
1023
+
1002
1024
 
1003
1025
  def iterConnections(
1004
1026
  connections: PipelineTaskConnections, connectionType: str | Iterable[str]
@@ -1130,3 +1152,158 @@ class AdjustQuantumHelper:
1130
1152
  self.outputs_adjusted = True
1131
1153
  else:
1132
1154
  self.outputs_adjusted = False
1155
+
1156
+
1157
+ class QuantaAdjuster:
1158
+ """A helper class for the `PipelineTaskConnections.adjust_all_quanta` hook.
1159
+
1160
+ Parameters
1161
+ ----------
1162
+ task_label : `str`
1163
+ Label of the task whose quanta are being adjusted.
1164
+ pipeline_graph : `pipeline_graph.PipelineGraph`
1165
+ Pipeline graph the quantum graph is being built from.
1166
+ skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
1167
+ Under-construction quantum graph that will be modified in place.
1168
+ """
1169
+
1170
+ def __init__(self, task_label: str, pipeline_graph: PipelineGraph, skeleton: QuantumGraphSkeleton):
1171
+ self._task_node = pipeline_graph.tasks[task_label]
1172
+ self._pipeline_graph = pipeline_graph
1173
+ self._skeleton = skeleton
1174
+ self._n_removed = 0
1175
+
1176
+ @property
1177
+ def task_label(self) -> str:
1178
+ """The label this task has been configured with."""
1179
+ return self._task_node.label
1180
+
1181
+ @property
1182
+ def task_node(self) -> TaskNode:
1183
+ """The node for this task in the pipeline graph."""
1184
+ return self._task_node
1185
+
1186
+ def iter_data_ids(self) -> Iterator[DataCoordinate]:
1187
+ """Iterate over the data IDs of all quanta for this task."
1188
+
1189
+ Returns
1190
+ -------
1191
+ data_ids : `~collections.abc.Iterator` [ \
1192
+ `~lsst.daf.butler.DataCoordinate` ]
1193
+ Data IDs. These are minimal data IDs without dimension records or
1194
+ implied values; use `expand_quantum_data_id` to get a full data ID
1195
+ when needed.
1196
+ """
1197
+ for key in self._skeleton.get_quanta(self._task_node.label):
1198
+ yield DataCoordinate.from_required_values(self._task_node.dimensions, key.data_id_values)
1199
+
1200
+ def remove_quantum(self, data_id: DataCoordinate) -> None:
1201
+ """Remove a quantum from the graph.
1202
+
1203
+ Parameters
1204
+ ----------
1205
+ data_id : `~lsst.daf.butler.DataCoordinate`
1206
+ Data ID of the quantum to remove. All outputs will be removed as
1207
+ well.
1208
+ """
1209
+ from .quantum_graph_skeleton import QuantumKey
1210
+
1211
+ self._skeleton.remove_quantum_node(
1212
+ QuantumKey(self._task_node.label, data_id.required_values), remove_outputs=True
1213
+ )
1214
+ self._n_removed += 1
1215
+
1216
+ def get_inputs(self, quantum_data_id: DataCoordinate) -> dict[str, list[DataCoordinate]]:
1217
+ """Return the data IDs of all regular inputs to a quantum.
1218
+
1219
+ Parameters
1220
+ ----------
1221
+ data_id : `~lsst.daf.butler.DataCoordinate`
1222
+ Data ID of the quantum to get the inputs of.
1223
+
1224
+ Returns
1225
+ -------
1226
+ inputs : `dict` [ `str`, `list` [ `~lsst.daf.butler.DataCoordinate` ] ]
1227
+ Data IDs of inputs, keyed by the connection name (the internal task
1228
+ name, not the dataset type name). This only contains regular
1229
+ inputs, not init-inputs or prerequisite inputs.
1230
+
1231
+ Notes
1232
+ -----
1233
+ If two connections have the same dataset type, the current
1234
+ implementation assumes the set of datasets is the same for the two
1235
+ connections. This limitation may be removed in the future.
1236
+ """
1237
+ from .quantum_graph_skeleton import DatasetKey, QuantumKey
1238
+
1239
+ by_dataset_type_name: defaultdict[str, list[DataCoordinate]] = defaultdict(list)
1240
+ quantum_key = QuantumKey(self._task_node.label, quantum_data_id.required_values)
1241
+ for dataset_key in self._skeleton.iter_inputs_of(quantum_key):
1242
+ if not isinstance(dataset_key, DatasetKey):
1243
+ continue
1244
+ dataset_type_node = self._pipeline_graph.dataset_types[dataset_key.parent_dataset_type_name]
1245
+ by_dataset_type_name[dataset_key.parent_dataset_type_name].append(
1246
+ DataCoordinate.from_required_values(dataset_type_node.dimensions, dataset_key.data_id_values)
1247
+ )
1248
+ return {
1249
+ edge.connection_name: by_dataset_type_name[edge.parent_dataset_type_name]
1250
+ for edge in self._task_node.iter_all_inputs()
1251
+ }
1252
+
1253
+ def add_input(
1254
+ self, quantum_data_id: DataCoordinate, connection_name: str, dataset_data_id: DataCoordinate
1255
+ ) -> None:
1256
+ """Add a new input to a quantum.
1257
+
1258
+ Parameters
1259
+ ----------
1260
+ quantum_data_id : `~lsst.daf.butler.DataCoordinate`
1261
+ Data ID of the quantum to add an input to.
1262
+ connection_name : `str`
1263
+ Name of the connection (the task-internal name, not the butler
1264
+ dataset type name).
1265
+ dataset_data_id : `~lsst.daf.butler.DataCoordinate`
1266
+ Data ID of the input dataset. Must already exist in the graph
1267
+ as an input to a different quantum of this task, and must be a
1268
+ regular input, not a prerequisite input or init-input.
1269
+
1270
+ Notes
1271
+ -----
1272
+ If two connections have the same dataset type, the current
1273
+ implementation assumes the set of datasets is the same for the two
1274
+ connections. This limitation may be removed in the future.
1275
+ """
1276
+ from .quantum_graph_skeleton import DatasetKey, QuantumKey
1277
+
1278
+ quantum_key = QuantumKey(self._task_node.label, quantum_data_id.required_values)
1279
+ read_edge = self._task_node.inputs[connection_name]
1280
+ dataset_key = DatasetKey(read_edge.parent_dataset_type_name, dataset_data_id.required_values)
1281
+ if dataset_key not in self._skeleton:
1282
+ raise LookupError(
1283
+ f"Dataset {read_edge.parent_dataset_type_name}@{dataset_data_id} is not already in the graph."
1284
+ )
1285
+ self._skeleton.add_input_edge(quantum_key, dataset_key)
1286
+
1287
+ def expand_quantum_data_id(self, data_id: DataCoordinate) -> DataCoordinate:
1288
+ """Expand a quantum data ID to include implied values and dimension
1289
+ records.
1290
+
1291
+ Parameters
1292
+ ----------
1293
+ quantum_data_id : `~lsst.daf.butler.DataCoordinate`
1294
+ A data ID of a quantum already in the graph.
1295
+
1296
+ Returns
1297
+ -------
1298
+ expanded_data_id : `~lsst.daf.butler.DataCoordinate`
1299
+ The same data ID, with implied values included and dimension
1300
+ records attached.
1301
+ """
1302
+ from .quantum_graph_skeleton import QuantumKey
1303
+
1304
+ return self._skeleton.get_data_id(QuantumKey(self._task_node.label, data_id.required_values))
1305
+
1306
+ @property
1307
+ def n_removed(self) -> int:
1308
+ """The number of quanta that have been removed by this helper."""
1309
+ return self._n_removed
@@ -29,7 +29,6 @@ from __future__ import annotations
29
29
  __all__ = ("show_mermaid",)
30
30
 
31
31
  import html
32
- import importlib.util
33
32
  import os
34
33
  import sys
35
34
  from collections.abc import Mapping
@@ -42,12 +41,14 @@ from ._formatting import NodeKey, format_dimensions, format_task_class
42
41
  from ._options import NodeAttributeOptions
43
42
  from ._show import parse_display_args
44
43
 
45
- MERMAID_AVAILABLE = importlib.util.find_spec("mermaid") is not None
46
-
47
- if MERMAID_AVAILABLE:
44
+ try:
48
45
  from mermaid import Mermaid # type: ignore
49
46
  from mermaid.graph import Graph # type: ignore
50
47
 
48
+ MERMAID_AVAILABLE = True
49
+ except ImportError:
50
+ MERMAID_AVAILABLE = False
51
+
51
52
  # Configuration constants for label formatting and overflow handling.
52
53
  _LABEL_PX_SIZE = 18
53
54
  _LABEL_MAX_LINES_SOFT = 10
@@ -237,6 +238,8 @@ def _render_mermaid_image(
237
238
 
238
239
  Raises
239
240
  ------
241
+ ImportError
242
+ If `mermaid-py` is not installed.
240
243
  ValueError
241
244
  If the requested ``output_format`` is not supported.
242
245
  RuntimeError
@@ -245,6 +248,9 @@ def _render_mermaid_image(
245
248
  if output_format.lower() not in {"svg", "png"}:
246
249
  raise ValueError(f"Unsupported format: {output_format}. Use 'svg' or 'png'.")
247
250
 
251
+ if not MERMAID_AVAILABLE:
252
+ raise ImportError("The `mermaid-py` package is required for rendering images but is not installed.")
253
+
248
254
  # Generate Mermaid graph object.
249
255
  graph = Graph(title="Mermaid Diagram", script=mermaid_source)
250
256
  diagram = Mermaid(graph, width=width, height=height, scale=scale)
@@ -62,7 +62,7 @@ from lsst.utils.timer import timeMethod
62
62
  from . import automatic_connection_constants as acc
63
63
  from ._status import NoWorkFound
64
64
  from ._task_metadata import TaskMetadata
65
- from .connections import AdjustQuantumHelper
65
+ from .connections import AdjustQuantumHelper, QuantaAdjuster
66
66
  from .graph import QuantumGraph
67
67
  from .pipeline_graph import PipelineGraph, TaskNode
68
68
  from .prerequisite_helpers import PrerequisiteInfo, SkyPixBoundsBuilder, TimespanBuilder
@@ -475,7 +475,6 @@ class QuantumGraphBuilder(ABC):
475
475
  # Loop over all quanta for this task, remembering the ones we've
476
476
  # gotten rid of.
477
477
  skipped_quanta = []
478
- no_work_quanta = []
479
478
  for quantum_key in skeleton.get_quanta(task_node.label):
480
479
  if self._skip_quantum_if_metadata_exists(task_node, quantum_key, skeleton):
481
480
  skipped_quanta.append(quantum_key)
@@ -483,17 +482,26 @@ class QuantumGraphBuilder(ABC):
483
482
  quantum_data_id = skeleton[quantum_key]["data_id"]
484
483
  skypix_bounds_builder = task_prerequisite_info.bounds.make_skypix_bounds_builder(quantum_data_id)
485
484
  timespan_builder = task_prerequisite_info.bounds.make_timespan_builder(quantum_data_id)
486
- adjusted_outputs = self._gather_quantum_outputs(
487
- task_node, quantum_key, skeleton, skypix_bounds_builder, timespan_builder
488
- )
489
- adjusted_inputs = self._gather_quantum_inputs(
490
- task_node,
485
+ self._update_quantum_for_adjust(
491
486
  quantum_key,
492
487
  skeleton,
493
488
  task_prerequisite_info,
494
489
  skypix_bounds_builder,
495
490
  timespan_builder,
496
491
  )
492
+ for skipped_quantum in skipped_quanta:
493
+ skeleton.remove_quantum_node(skipped_quantum, remove_outputs=False)
494
+ # Give the task a chance to adjust all quanta together. This
495
+ # operates directly on the skeleton (via a the 'adjuster', which
496
+ # is just an interface adapter).
497
+ adjuster = QuantaAdjuster(task_node.label, self._pipeline_graph, skeleton)
498
+ task_node.get_connections().adjust_all_quanta(adjuster)
499
+ # Loop over all quanta again, remembering those we get rid of in other
500
+ # ways.
501
+ no_work_quanta = []
502
+ for quantum_key in skeleton.get_quanta(task_node.label):
503
+ adjusted_outputs = self._adapt_quantum_outputs(task_node, quantum_key, skeleton)
504
+ adjusted_inputs = self._adapt_quantum_inputs(task_node, quantum_key, skeleton)
497
505
  # Give the task's Connections class an opportunity to remove
498
506
  # some inputs, or complain if they are unacceptable. This will
499
507
  # raise if one of the check conditions is not met, which is the
@@ -552,8 +560,6 @@ class QuantumGraphBuilder(ABC):
552
560
  skeleton[quantum_key]["outputs"] = helper.outputs
553
561
  for no_work_quantum in no_work_quanta:
554
562
  skeleton.remove_quantum_node(no_work_quantum, remove_outputs=True)
555
- for skipped_quantum in skipped_quanta:
556
- skeleton.remove_quantum_node(skipped_quantum, remove_outputs=False)
557
563
  remaining_quanta = skeleton.get_quanta(task_node.label)
558
564
  self._resolve_task_init(task_node, skeleton, bool(skipped_quanta))
559
565
  message_terms = []
@@ -561,6 +567,8 @@ class QuantumGraphBuilder(ABC):
561
567
  message_terms.append(f"{len(no_work_quanta)} had no work to do")
562
568
  if skipped_quanta:
563
569
  message_terms.append(f"{len(skipped_quanta)} previously succeeded")
570
+ if adjuster.n_removed:
571
+ message_terms.append(f"{adjuster.n_removed} removed by adjust_all_quanta")
564
572
  message_parenthetical = f" ({', '.join(message_terms)})" if message_terms else ""
565
573
  if remaining_quanta:
566
574
  self.log.info(
@@ -634,40 +642,32 @@ class QuantumGraphBuilder(ABC):
634
642
  return False
635
643
 
636
644
  @final
637
- def _gather_quantum_outputs(
645
+ def _update_quantum_for_adjust(
638
646
  self,
639
- task_node: TaskNode,
640
647
  quantum_key: QuantumKey,
641
648
  skeleton: QuantumGraphSkeleton,
649
+ task_prerequisite_info: PrerequisiteInfo,
642
650
  skypix_bounds_builder: SkyPixBoundsBuilder,
643
651
  timespan_builder: TimespanBuilder,
644
- ) -> NamedKeyDict[DatasetType, list[DatasetRef]]:
645
- """Collect outputs or generate datasets for a preliminary quantum and
646
- put them in the form used by `~lsst.daf.butler.Quantum` and
647
- `~PipelineTaskConnections.adjustQuantum`.
652
+ ) -> None:
653
+ """Update the quantum node in the skeleton by finding remaining
654
+ prerequisite inputs and dropping regular inputs that we now know will
655
+ not be produced.
648
656
 
649
657
  Parameters
650
658
  ----------
651
- task_node : `pipeline_graph.TaskNode`
652
- Node for this task in the pipeline graph.
653
659
  quantum_key : `QuantumKey`
654
660
  Identifier for this quantum in the graph.
655
661
  skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
656
662
  Preliminary quantum graph, to be modified in-place.
663
+ task_prerequisite_info : `~prerequisite_helpers.PrerequisiteInfo`
664
+ Information about the prerequisite inputs to this task.
657
665
  skypix_bounds_builder : `~prerequisite_helpers.SkyPixBoundsBuilder`
658
666
  An object that accumulates the appropriate spatial bounds for a
659
667
  quantum.
660
668
  timespan_builder : `~prerequisite_helpers.TimespanBuilder`
661
669
  An object that accumulates the appropriate timespan for a quantum.
662
670
 
663
- Returns
664
- -------
665
- outputs : `~lsst.daf.butler.NamedKeyDict` [ \
666
- `~lsst.daf.butler.DatasetType`, `list` [ \
667
- `~lsst.daf.butler.DatasetRef` ] ]
668
- All outputs to the task, using the storage class and components
669
- defined by the task's own connections.
670
-
671
671
  Notes
672
672
  -----
673
673
  This first looks for outputs already present in the `output_run` (i.e.
@@ -680,8 +680,7 @@ class QuantumGraphBuilder(ABC):
680
680
  UUID is generated. In all cases the dataset node in the skeleton is
681
681
  associated with a `DatasetRef`.
682
682
  """
683
- outputs_by_type: dict[str, list[DatasetRef]] = {}
684
- dataset_key: DatasetKey
683
+ dataset_key: DatasetKey | PrerequisiteDatasetKey
685
684
  for dataset_key in skeleton.iter_outputs_of(quantum_key):
686
685
  dataset_data_id = skeleton[dataset_key]["data_id"]
687
686
  dataset_type_node = self._pipeline_graph.dataset_types[dataset_key.parent_dataset_type_name]
@@ -699,6 +698,66 @@ class QuantumGraphBuilder(ABC):
699
698
  skypix_bounds_builder.handle_dataset(dataset_key.parent_dataset_type_name, dataset_data_id)
700
699
  timespan_builder.handle_dataset(dataset_key.parent_dataset_type_name, dataset_data_id)
701
700
  skeleton.set_dataset_ref(ref, dataset_key)
701
+ quantum_data_id = skeleton[quantum_key]["data_id"]
702
+ # Process inputs already present in the skeleton - this should include
703
+ # all regular inputs (including intermediates) and may include some
704
+ # prerequisites.
705
+ for dataset_key in list(skeleton.iter_inputs_of(quantum_key)):
706
+ if (ref := skeleton.get_dataset_ref(dataset_key)) is None:
707
+ # If the dataset ref hasn't been set either as an existing
708
+ # input or as an output of an already-processed upstream
709
+ # quantum, it's not going to be produced; remove it.
710
+ skeleton.remove_dataset_nodes([dataset_key])
711
+ continue
712
+ skypix_bounds_builder.handle_dataset(dataset_key.parent_dataset_type_name, ref.dataId)
713
+ timespan_builder.handle_dataset(dataset_key.parent_dataset_type_name, ref.dataId)
714
+ # Query for any prerequisites not handled by process_subgraph. Note
715
+ # that these were not already in the skeleton graph, so we add them
716
+ # now.
717
+ skypix_bounds = skypix_bounds_builder.finish()
718
+ timespan = timespan_builder.finish()
719
+ for finder in task_prerequisite_info.finders.values():
720
+ dataset_keys = []
721
+ for ref in finder.find(
722
+ self.butler, self.input_collections, quantum_data_id, skypix_bounds, timespan
723
+ ):
724
+ dataset_key = skeleton.add_prerequisite_node(ref)
725
+ dataset_keys.append(dataset_key)
726
+ skeleton.add_input_edges(quantum_key, dataset_keys)
727
+
728
+ @final
729
+ def _adapt_quantum_outputs(
730
+ self,
731
+ task_node: TaskNode,
732
+ quantum_key: QuantumKey,
733
+ skeleton: QuantumGraphSkeleton,
734
+ ) -> NamedKeyDict[DatasetType, list[DatasetRef]]:
735
+ """Adapt outputs for a preliminary quantum and put them into the form
736
+ used by `~lsst.daf.butler.Quantum` and
737
+ `~PipelineTaskConnections.adjustQuantum`.
738
+
739
+ Parameters
740
+ ----------
741
+ task_node : `pipeline_graph.TaskNode`
742
+ Node for this task in the pipeline graph.
743
+ quantum_key : `QuantumKey`
744
+ Identifier for this quantum in the graph.
745
+ skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
746
+ Preliminary quantum graph, to be modified in-place.
747
+
748
+ Returns
749
+ -------
750
+ outputs : `~lsst.daf.butler.NamedKeyDict` [ \
751
+ `~lsst.daf.butler.DatasetType`, `list` [ \
752
+ `~lsst.daf.butler.DatasetRef` ] ]
753
+ All outputs to the task, using the storage class and components
754
+ defined by the task's own connections.
755
+ """
756
+ outputs_by_type: dict[str, list[DatasetRef]] = {}
757
+ dataset_key: DatasetKey
758
+ for dataset_key in skeleton.iter_outputs_of(quantum_key):
759
+ ref = skeleton.get_dataset_ref(dataset_key)
760
+ assert ref is not None, "Should have been added (or the node removed) in a previous pass."
702
761
  outputs_by_type.setdefault(dataset_key.parent_dataset_type_name, []).append(ref)
703
762
  adapted_outputs: NamedKeyDict[DatasetType, list[DatasetRef]] = NamedKeyDict()
704
763
  for write_edge in task_node.iter_all_outputs():
@@ -711,17 +770,14 @@ class QuantumGraphBuilder(ABC):
711
770
  return adapted_outputs
712
771
 
713
772
  @final
714
- def _gather_quantum_inputs(
773
+ def _adapt_quantum_inputs(
715
774
  self,
716
775
  task_node: TaskNode,
717
776
  quantum_key: QuantumKey,
718
777
  skeleton: QuantumGraphSkeleton,
719
- task_prerequisite_info: PrerequisiteInfo,
720
- skypix_bounds_builder: SkyPixBoundsBuilder,
721
- timespan_builder: TimespanBuilder,
722
778
  ) -> NamedKeyDict[DatasetType, list[DatasetRef]]:
723
- """Collect input datasets for a preliminary quantum and put them in the
724
- form used by `~lsst.daf.butler.Quantum` and
779
+ """Adapt input datasets for a preliminary quantum into the form used by
780
+ `~lsst.daf.butler.Quantum` and
725
781
  `~PipelineTaskConnections.adjustQuantum`.
726
782
 
727
783
  Parameters
@@ -752,37 +808,12 @@ class QuantumGraphBuilder(ABC):
752
808
  with a `DatasetRef`, and queries for prerequisite input nodes that do
753
809
  not exist.
754
810
  """
755
- quantum_data_id = skeleton[quantum_key]["data_id"]
756
811
  inputs_by_type: dict[str, set[DatasetRef]] = {}
757
812
  dataset_key: DatasetKey | PrerequisiteDatasetKey
758
- # Process inputs already present in the skeleton - this should include
759
- # all regular inputs (including intermediates) and may include some
760
- # prerequisites.
761
813
  for dataset_key in list(skeleton.iter_inputs_of(quantum_key)):
762
- if (ref := skeleton.get_dataset_ref(dataset_key)) is None:
763
- # If the dataset ref hasn't been set either as an existing
764
- # input or as an output of an already-processed upstream
765
- # quantum, it's not going to be produced; remove it.
766
- skeleton.remove_dataset_nodes([dataset_key])
767
- continue
814
+ ref = skeleton.get_dataset_ref(dataset_key)
815
+ assert ref is not None, "Should have been added (or the node removed) in a previous pass."
768
816
  inputs_by_type.setdefault(dataset_key.parent_dataset_type_name, set()).add(ref)
769
- skypix_bounds_builder.handle_dataset(dataset_key.parent_dataset_type_name, ref.dataId)
770
- timespan_builder.handle_dataset(dataset_key.parent_dataset_type_name, ref.dataId)
771
- # Query for any prerequisites not handled by process_subgraph. Note
772
- # that these were not already in the skeleton graph, so we add them
773
- # now.
774
- skypix_bounds = skypix_bounds_builder.finish()
775
- timespan = timespan_builder.finish()
776
- for finder in task_prerequisite_info.finders.values():
777
- inputs_for_type = inputs_by_type.setdefault(finder.dataset_type_node.name, set())
778
- dataset_keys = []
779
- for ref in finder.find(
780
- self.butler, self.input_collections, quantum_data_id, skypix_bounds, timespan
781
- ):
782
- dataset_key = skeleton.add_prerequisite_node(ref)
783
- dataset_keys.append(dataset_key)
784
- inputs_for_type.add(ref)
785
- skeleton.add_input_edges(quantum_key, dataset_keys)
786
817
  adapted_inputs: NamedKeyDict[DatasetType, list[DatasetRef]] = NamedKeyDict()
787
818
  for read_edge in task_node.iter_all_inputs():
788
819
  dataset_type_node = self._pipeline_graph.dataset_types[read_edge.parent_dataset_type_name]
@@ -669,3 +669,23 @@ class QuantumGraphSkeleton:
669
669
  output_in_the_way: DatasetRef | None
670
670
  if (output_in_the_way := state.get("output_in_the_way")) is not None:
671
671
  state["output_in_the_way"] = output_in_the_way.expanded(data_id)
672
+
673
+ def get_data_id(self, key: Key) -> DataCoordinate:
674
+ """Return the full data ID for a quantum or dataset, if available.
675
+
676
+ Parameters
677
+ ----------
678
+ key : `Key`
679
+ Identifier for the graph node.
680
+
681
+ Returns
682
+ -------
683
+ data_id : `DataCoordinate`
684
+ Expanded data ID for the node, if one is available.
685
+
686
+ Raises
687
+ ------
688
+ KeyError
689
+ Raised if this node does not have an expanded data ID.
690
+ """
691
+ return self._xgraph.nodes[key]["data_id"]