lsst-pipe-base 29.2025.1400__py3-none-any.whl → 29.2025.1600__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. lsst/pipe/base/all_dimensions_quantum_graph_builder.py +17 -0
  2. lsst/pipe/base/graph/_loadHelpers.py +4 -0
  3. lsst/pipe/base/graph/graph.py +2 -2
  4. lsst/pipe/base/pipeline.py +1 -1
  5. lsst/pipe/base/pipelineIR.py +10 -1
  6. lsst/pipe/base/pipeline_graph/__main__.py +1 -0
  7. lsst/pipe/base/pipeline_graph/_exceptions.py +7 -0
  8. lsst/pipe/base/pipeline_graph/_pipeline_graph.py +360 -11
  9. lsst/pipe/base/pipeline_graph/expressions.py +271 -0
  10. lsst/pipe/base/pipeline_graph/visualization/__init__.py +1 -0
  11. lsst/pipe/base/pipeline_graph/visualization/_formatting.py +300 -5
  12. lsst/pipe/base/pipeline_graph/visualization/_mermaid.py +17 -25
  13. lsst/pipe/base/pipeline_graph/visualization/_options.py +11 -3
  14. lsst/pipe/base/pipeline_graph/visualization/_show.py +23 -3
  15. lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +250 -0
  16. lsst/pipe/base/quantum_provenance_graph.py +28 -0
  17. lsst/pipe/base/version.py +1 -1
  18. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/METADATA +2 -1
  19. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/RECORD +27 -25
  20. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/WHEEL +0 -0
  21. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/entry_points.txt +0 -0
  22. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/licenses/COPYRIGHT +0 -0
  23. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/licenses/LICENSE +0 -0
  24. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/licenses/bsd_license.txt +0 -0
  25. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/licenses/gpl-v3.0.txt +0 -0
  26. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/top_level.txt +0 -0
  27. {lsst_pipe_base-29.2025.1400.dist-info → lsst_pipe_base-29.2025.1600.dist-info}/zip-safe +0 -0
@@ -32,8 +32,8 @@ import html
32
32
  import os
33
33
  import sys
34
34
  from collections.abc import Mapping
35
- from io import BufferedIOBase, BytesIO, StringIO, TextIOBase
36
- from typing import Any, TextIO
35
+ from io import StringIO
36
+ from typing import IO, Any
37
37
 
38
38
  from .._nodes import NodeType
39
39
  from .._pipeline_graph import PipelineGraph
@@ -58,7 +58,7 @@ _OVERFLOW_MAX_LINES = 20
58
58
 
59
59
  def show_mermaid(
60
60
  pipeline_graph: PipelineGraph,
61
- stream: TextIO | BytesIO = sys.stdout,
61
+ stream: IO[Any] = sys.stdout,
62
62
  output_format: str = "mmd",
63
63
  width: int | None = None,
64
64
  height: int | None = None,
@@ -78,7 +78,7 @@ def show_mermaid(
78
78
  ----------
79
79
  pipeline_graph : `PipelineGraph`
80
80
  The pipeline graph to visualize.
81
- stream : `TextIO` or `BytesIO`, optional
81
+ stream : `typing.IO`, optional
82
82
  The output stream where Mermaid code is written. Defaults to
83
83
  `sys.stdout`.
84
84
  output_format : str, optional
@@ -113,19 +113,11 @@ def show_mermaid(
113
113
  mermaid_source = _generate_mermaid_source(pipeline_graph, **kwargs)
114
114
 
115
115
  if output_format == "mmd":
116
- if isinstance(stream, TextIOBase):
117
- # Write Mermaid source as a string.
118
- stream.write(mermaid_source)
119
- else:
120
- raise TypeError(f"Expected a text stream, but got {type(stream)}.")
116
+ # Write Mermaid source as a string.
117
+ stream.write(mermaid_source)
121
118
  else:
122
- if isinstance(stream, BufferedIOBase):
123
- # Render Mermaid source as an image and write to binary stream.
124
- _render_mermaid_image(
125
- mermaid_source, stream, output_format, width=width, height=height, scale=scale
126
- )
127
- else:
128
- raise ValueError(f"Expected a binary stream, but got {type(stream)}.")
119
+ # Render Mermaid source as an image and write to binary stream.
120
+ _render_mermaid_image(mermaid_source, stream, output_format, width=width, height=height, scale=scale)
129
121
 
130
122
 
131
123
  def _generate_mermaid_source(pipeline_graph: PipelineGraph, **kwargs: Any) -> str:
@@ -210,7 +202,7 @@ def _generate_mermaid_source(pipeline_graph: PipelineGraph, **kwargs: Any) -> st
210
202
 
211
203
  def _render_mermaid_image(
212
204
  mermaid_source: str,
213
- binary_stream: BytesIO,
205
+ binary_stream: IO[bytes],
214
206
  output_format: str,
215
207
  width: int | None = None,
216
208
  height: int | None = None,
@@ -287,7 +279,7 @@ def _render_task_node(
287
279
  node_key: NodeKey,
288
280
  node_data: Mapping[str, Any],
289
281
  options: NodeAttributeOptions,
290
- stream: TextIO,
282
+ stream: IO[str],
291
283
  ) -> None:
292
284
  """Render a Mermaid node for a task or task-init node.
293
285
 
@@ -301,7 +293,7 @@ def _render_task_node(
301
293
  options : NodeAttributeOptions
302
294
  Rendering options controlling whether to show dimensions, storage
303
295
  classes, etc.
304
- stream : TextIO
296
+ stream : `typing.IO` [ `str` ]
305
297
  The output stream for Mermaid syntax.
306
298
  """
307
299
  # Convert node_key into a label, handling line splitting and prefix
@@ -337,7 +329,7 @@ def _render_dataset_type_node(
337
329
  node_key: NodeKey,
338
330
  node_data: Mapping[str, Any],
339
331
  options: NodeAttributeOptions,
340
- stream: TextIO,
332
+ stream: IO[str],
341
333
  overflow_ref: int,
342
334
  ) -> tuple[int, list[str]]:
343
335
  """Render a Mermaid node for a dataset-type node, handling overflow lines
@@ -355,7 +347,7 @@ def _render_dataset_type_node(
355
347
  options : NodeAttributeOptions
356
348
  Rendering options controlling whether to show dimensions and storage
357
349
  classes.
358
- stream : TextIO
350
+ stream : `typing.IO` [ `str` ]
359
351
  The output stream for Mermaid syntax.
360
352
  overflow_ref : int
361
353
  The current reference number for overflow nodes. If overflow occurs,
@@ -414,7 +406,7 @@ def _render_dataset_type_node(
414
406
  return overflow_ref, overflow_ids
415
407
 
416
408
 
417
- def _render_simple_node(node_id: str, lines: list[str], node_class: str, stream: TextIO) -> None:
409
+ def _render_simple_node(node_id: str, lines: list[str], node_class: str, stream: IO[str]) -> None:
418
410
  """Render a simple Mermaid node with given lines and a class.
419
411
 
420
412
  This helper function is used for both primary nodes and overflow nodes once
@@ -429,7 +421,7 @@ def _render_simple_node(node_id: str, lines: list[str], node_class: str, stream:
429
421
  node_class : str
430
422
  Mermaid class name to style the node (e.g., 'dsType', 'task',
431
423
  'taskInit').
432
- stream : TextIO
424
+ stream : `typing.IO` [ `str` ]
433
425
  The output stream.
434
426
  """
435
427
  label = "<br>".join(lines)
@@ -437,7 +429,7 @@ def _render_simple_node(node_id: str, lines: list[str], node_class: str, stream:
437
429
  print(f"class {node_id} {node_class};", file=stream)
438
430
 
439
431
 
440
- def _render_edge(from_node_id: str, to_node_id: str, is_prerequisite: bool, stream: TextIO) -> None:
432
+ def _render_edge(from_node_id: str, to_node_id: str, is_prerequisite: bool, stream: IO[str]) -> None:
441
433
  """Render a Mermaid edge from one node to another.
442
434
 
443
435
  Edges in Mermaid are normally specified as `A --> B`. Prerequisite edges
@@ -453,7 +445,7 @@ def _render_edge(from_node_id: str, to_node_id: str, is_prerequisite: bool, stre
453
445
  is_prerequisite : bool
454
446
  If True, this edge represents a prerequisite connection and will be
455
447
  styled as dashed.
456
- stream : TextIO
448
+ stream : `typing.IO` [ `str` ]
457
449
  The output stream for Mermaid syntax.
458
450
  """
459
451
  # At this stage, we simply print the edge. The styling (dashed) for
@@ -32,6 +32,7 @@ import dataclasses
32
32
  from typing import Literal
33
33
 
34
34
  from .._nodes import NodeType
35
+ from ._status_annotator import NodeStatusOptions
35
36
 
36
37
 
37
38
  @dataclasses.dataclass
@@ -71,8 +72,8 @@ class NodeAttributeOptions:
71
72
  - `None`: context-dependent default behavior.
72
73
  """
73
74
 
74
- def __bool__(self) -> bool:
75
- return bool(self.dimensions or self.storage_classes or self.task_classes)
75
+ status: NodeStatusOptions | None
76
+ """Options for displaying execution status."""
76
77
 
77
78
  def has_details(self, node_type: NodeType) -> bool:
78
79
  """Check whether there is any information beyond the node name for a
@@ -93,7 +94,10 @@ class NodeAttributeOptions:
93
94
  else:
94
95
  return bool(self.dimensions or self.task_classes)
95
96
 
96
- def checked(self, is_resolved: bool) -> NodeAttributeOptions:
97
+ def __bool__(self) -> bool:
98
+ return bool(self.dimensions or self.storage_classes or self.task_classes or self.status)
99
+
100
+ def checked(self, is_resolved: bool, has_status: bool = False) -> NodeAttributeOptions:
97
101
  """Check these options against a pipeline graph's resolution status and
98
102
  fill in defaults.
99
103
 
@@ -102,6 +106,9 @@ class NodeAttributeOptions:
102
106
  is_resolved : `bool`
103
107
  Whether the pipeline graph to be displayed is resolved
104
108
  (`PipelineGraph.is_fully_resolved`).
109
+ has_status : `bool`
110
+ Whether the pipeline graph to be displayed has status information.
111
+ Defaults to `False`.
105
112
 
106
113
  Returns
107
114
  -------
@@ -127,4 +134,5 @@ class NodeAttributeOptions:
127
134
  self.task_classes if self.task_classes is not None else ("concise" if is_resolved else False)
128
135
  ),
129
136
  storage_classes=(self.storage_classes if self.storage_classes is not None else is_resolved),
137
+ status=self.status if has_status else None,
130
138
  )
@@ -48,6 +48,7 @@ from ._merge import (
48
48
  )
49
49
  from ._options import NodeAttributeOptions
50
50
  from ._printer import make_default_printer
51
+ from ._status_annotator import NodeStatusAnnotator, NodeStatusOptions
51
52
 
52
53
  DisplayNodeKey = NodeKey | MergedNodeKey
53
54
 
@@ -64,6 +65,8 @@ def parse_display_args(
64
65
  merge_output_trees: int = 4,
65
66
  merge_intermediates: bool = True,
66
67
  include_automatic_connections: bool = False,
68
+ status_annotator: NodeStatusAnnotator | None = None,
69
+ status_options: NodeStatusOptions | None = None,
67
70
  ) -> tuple[networkx.DiGraph | networkx.MultiDiGraph, NodeAttributeOptions]:
68
71
  """Print a text-based ~.PipelineGraph` visualization.
69
72
 
@@ -126,21 +129,34 @@ def parse_display_args(
126
129
  include_automatic_connections : `bool`, optional
127
130
  Whether to include automatically-added connections like the config,
128
131
  log, and metadata dataset types for each task. Default is `False`.
132
+ status_annotator : `NodeStatusAnnotator`, optional
133
+ Annotator to add status information to the graph. Default is `None`.
134
+ status_options : `NodeStatusOptions`, optional
135
+ Options for displaying execution status. Default is `None`.
129
136
  """
130
137
  if init is None:
131
138
  if not dataset_types:
132
139
  raise ValueError("Cannot show init and runtime graphs unless dataset types are shown.")
133
140
  xgraph = pipeline_graph.make_xgraph()
141
+ if status_annotator is not None:
142
+ raise ValueError("Cannot show status with both init and runtime graphs.")
134
143
  elif dataset_types:
135
144
  xgraph = pipeline_graph.make_bipartite_xgraph(init)
145
+ if status_annotator is not None:
146
+ status_annotator(xgraph, dataset_types=True)
136
147
  else:
137
148
  xgraph = pipeline_graph.make_task_xgraph(init)
138
149
  storage_classes = False
150
+ if status_annotator is not None:
151
+ status_annotator(xgraph, dataset_types=False)
139
152
 
140
153
  options = NodeAttributeOptions(
141
- dimensions=dimensions, storage_classes=storage_classes, task_classes=task_classes
154
+ dimensions=dimensions,
155
+ storage_classes=storage_classes,
156
+ task_classes=task_classes,
157
+ status=status_options,
142
158
  )
143
- options = options.checked(pipeline_graph.is_fully_resolved)
159
+ options = options.checked(pipeline_graph.is_fully_resolved, has_status=status_annotator is not None)
144
160
 
145
161
  if dataset_types and not include_automatic_connections:
146
162
  taskish_nodes: list[TaskNode | TaskInitNode] = []
@@ -219,10 +235,14 @@ def show(
219
235
  if width < 0:
220
236
  width, _ = get_terminal_size()
221
237
 
238
+ # Number of columns used for padding after a symbol.
239
+ # Must match the padding added by the `Printer` class.
240
+ symbol_padding = 2
241
+
222
242
  printer = make_default_printer(layout.width, color, stream)
223
243
  printer.get_symbol = get_node_symbol
224
244
 
225
- get_text = GetNodeText(xgraph, options, (width - printer.width) if width else 0)
245
+ get_text = GetNodeText(xgraph, options, (width - printer.width - symbol_padding) if width else 0)
226
246
  printer.get_text = get_text
227
247
 
228
248
  printer.print(stream, layout)
@@ -0,0 +1,250 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+ from __future__ import annotations
28
+
29
+ __all__ = (
30
+ "QuantumGraphExecutionStatusAnnotator",
31
+ "QuantumGraphExecutionStatusOptions",
32
+ "QuantumProvenanceGraphStatusAnnotator",
33
+ "QuantumProvenanceGraphStatusOptions",
34
+ )
35
+
36
+ import dataclasses
37
+ from typing import TYPE_CHECKING, Any, Literal, Protocol, overload
38
+
39
+ import networkx
40
+
41
+ from .._nodes import NodeKey, NodeType
42
+
43
+ if TYPE_CHECKING:
44
+ from ... import quantum_provenance_graph as qpg
45
+
46
+ # ANSI color codes.
47
+ GREEN = "\033[32m"
48
+ RED = "\033[31m"
49
+ YELLOW = "\033[33m"
50
+ CYAN = "\033[36m"
51
+ WHITE = "\033[37m"
52
+ GRAY = "\033[90m"
53
+ MAGENTA = "\033[35m"
54
+ BROWN = "\u001b[38;5;130m"
55
+ RESET = "\033[0m"
56
+
57
+
58
+ @dataclasses.dataclass
59
+ class TaskStatusInfo:
60
+ """Holds status information for a task."""
61
+
62
+ expected: int
63
+ succeeded: int
64
+ failed: int
65
+ blocked: int
66
+ ready: int | None = None
67
+ running: int | None = None
68
+ wonky: int | None = None
69
+ unknown: int | None = None
70
+
71
+
72
+ @dataclasses.dataclass
73
+ class DatasetTypeStatusInfo:
74
+ """Holds status information for a dataset type."""
75
+
76
+ expected: int
77
+ produced: int
78
+
79
+
80
+ @dataclasses.dataclass
81
+ class StatusColors:
82
+ """Base class for holding ANSI color codes for different progress segments
83
+ or statuses.
84
+ """
85
+
86
+ # Base task status colors.
87
+ expected: str = WHITE
88
+ succeeded: str = GREEN
89
+ failed: str = RED
90
+
91
+ # Base dataset type status colors.
92
+ produced: str = GREEN
93
+
94
+ # Reset to default color.
95
+ reset: str = RESET
96
+
97
+
98
+ @dataclasses.dataclass
99
+ class QuantumGraphExecutionStatusColors(StatusColors):
100
+ """Holds ANSI color codes for different progress segments or statuses for
101
+ quantum graph execution reports.
102
+
103
+ Status colors for both task and dataset type nodes are included.
104
+ """
105
+
106
+ def __post_init__(self) -> None:
107
+ raise NotImplementedError("`QuantumGraphExecutionStatusColors` is not implemented yet.")
108
+
109
+
110
+ @dataclasses.dataclass
111
+ class QuantumProvenanceGraphStatusColors(StatusColors):
112
+ """Holds ANSI color codes for different progress segments or statuses for
113
+ quantum provenance graph reports.
114
+
115
+ Status colors for both task and dataset type nodes are included.
116
+ """
117
+
118
+ # Additional task status colors.
119
+ blocked: str = YELLOW
120
+ ready: str = GRAY
121
+ running: str = MAGENTA
122
+ wonky: str = CYAN
123
+ unknown: str = BROWN
124
+
125
+
126
+ @dataclasses.dataclass
127
+ class NodeStatusOptions:
128
+ """Base options for node status visualization.
129
+
130
+ Attributes
131
+ ----------
132
+ colors : `StatusColors`
133
+ A dataclass specifying ANSI color codes for distinct progress segments
134
+ or statuses.
135
+ display_percent : `bool`
136
+ Whether to show percentage of progress.
137
+ display_counts : `bool`
138
+ Whether to show numeric counts (e.g., succeeded/expected).
139
+ visualize : `bool`
140
+ If `True`, status information for task or dataset type nodes will be
141
+ visually indicated by segmented fills in text-based bars or flowchart
142
+ nodes.
143
+ min_bar_width : `int`
144
+ Minimum width of the visualized progress bar in characters. Only counts
145
+ the width of the bar itself, not any surrounding text. Only relevant if
146
+ `visualize` is `True` and it's a text-based visualization.
147
+ abbreviate : `bool`
148
+ If `True`, status labels will be abbreviated to save space. For
149
+ example, 'expected' will be abbreviated to 'exp' and 'blocked' to
150
+ 'blk'.
151
+ """
152
+
153
+ colors: QuantumGraphExecutionStatusColors | QuantumProvenanceGraphStatusColors
154
+ display_percent: bool = True
155
+ display_counts: bool = True
156
+ visualize: bool = True
157
+ min_bar_width: int = 15
158
+ abbreviate: bool = True
159
+
160
+ def __post_init__(self) -> None:
161
+ if not (self.display_percent or self.display_counts or self.visualize):
162
+ raise ValueError(
163
+ "At least one of 'display_percent', 'display_counts', or 'visualize' must be True."
164
+ )
165
+
166
+
167
+ @dataclasses.dataclass
168
+ class QuantumGraphExecutionStatusOptions(NodeStatusOptions):
169
+ """Specialized status options for quantum graph execution reports."""
170
+
171
+ colors: QuantumGraphExecutionStatusColors = dataclasses.field(
172
+ default_factory=QuantumGraphExecutionStatusColors
173
+ )
174
+
175
+
176
+ @dataclasses.dataclass
177
+ class QuantumProvenanceGraphStatusOptions(NodeStatusOptions):
178
+ """Specialized status options for quantum provenance graph reports."""
179
+
180
+ colors: QuantumProvenanceGraphStatusColors = dataclasses.field(
181
+ default_factory=QuantumProvenanceGraphStatusColors
182
+ )
183
+
184
+
185
+ class NodeStatusAnnotator(Protocol):
186
+ """Protocol for annotating a networkx graph with task and dataset type
187
+ status information.
188
+ """
189
+
190
+ @overload
191
+ def __call__(self, xgraph: networkx.DiGraph, dataset_types: Literal[False]) -> None: ...
192
+
193
+ @overload
194
+ def __call__(self, xgraph: networkx.MultiDiGraph, dataset_types: Literal[True]) -> None: ...
195
+
196
+ def __call__(self, xgraph: networkx.DiGraph | networkx.MultiDiGraph, dataset_types: bool) -> None: ...
197
+
198
+
199
+ class QuantumGraphExecutionStatusAnnotator:
200
+ """Annotates a networkx graph with task and dataset status information from
201
+ a quantum graph execution summary, implementing the StatusAnnotator
202
+ protocol to update the graph with status data.
203
+ """
204
+
205
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
206
+ raise NotImplementedError("`QuantumGraphExecutionStatusAnnotator` is not implemented yet.")
207
+
208
+
209
+ class QuantumProvenanceGraphStatusAnnotator:
210
+ """Annotates a networkx graph with task and dataset status information from
211
+ a quantum provenance summary, implementing the StatusAnnotator protocol to
212
+ update the graph with status data.
213
+
214
+ Parameters
215
+ ----------
216
+ qpg_summary : `~lsst.pipe.base.quantum_provenance_graph.Summary`
217
+ The quantum provenance summary to use for status information.
218
+ """
219
+
220
+ def __init__(self, qpg_summary: qpg.Summary) -> None:
221
+ self.qpg_summary = qpg_summary
222
+
223
+ @overload
224
+ def __call__(self, xgraph: networkx.DiGraph, dataset_types: Literal[False]) -> None: ...
225
+
226
+ @overload
227
+ def __call__(self, xgraph: networkx.MultiDiGraph, dataset_types: Literal[True]) -> None: ...
228
+
229
+ def __call__(self, xgraph: networkx.DiGraph | networkx.MultiDiGraph, dataset_types: bool) -> None:
230
+ for task_label, task_summary in self.qpg_summary.tasks.items():
231
+ fields = {
232
+ name.replace("n_", "").replace("successful", "succeeded"): getattr(task_summary, name)
233
+ for name in dir(task_summary)
234
+ if name.startswith("n_")
235
+ }
236
+ assert sum(fields.values()) == 2 * task_summary.n_expected, f"Incosistent status counts: {fields}"
237
+ task_status_info = TaskStatusInfo(**fields)
238
+
239
+ key = NodeKey(NodeType.TASK, task_label)
240
+ xgraph.nodes[key]["status"] = task_status_info
241
+
242
+ if dataset_types:
243
+ for dataset_type_name, dataset_type_summary in self.qpg_summary.datasets.items():
244
+ expected = dataset_type_summary.n_expected
245
+ produced = dataset_type_summary.n_visible + dataset_type_summary.n_shadowed
246
+ assert produced <= expected, f"Dataset types produced ({produced}) > expected ({expected})"
247
+ dataset_type_status_info = DatasetTypeStatusInfo(expected=expected, produced=produced)
248
+
249
+ key = NodeKey(NodeType.DATASET_TYPE, dataset_type_name)
250
+ xgraph.nodes[key]["status"] = dataset_type_status_info
@@ -2106,6 +2106,12 @@ class _QuantumBackedButlerFactory:
2106
2106
  def _cli() -> None:
2107
2107
  import argparse
2108
2108
 
2109
+ from .pipeline_graph.visualization import (
2110
+ QuantumProvenanceGraphStatusAnnotator,
2111
+ QuantumProvenanceGraphStatusOptions,
2112
+ show,
2113
+ )
2114
+
2109
2115
  parser = argparse.ArgumentParser(
2110
2116
  "QuantumProvenanceGraph command-line utilities.",
2111
2117
  description=(
@@ -2114,6 +2120,7 @@ def _cli() -> None:
2114
2120
  ),
2115
2121
  )
2116
2122
  subparsers = parser.add_subparsers(dest="cmd")
2123
+
2117
2124
  pprint_parser = subparsers.add_parser("pprint", help="Print a saved summary as a series of tables.")
2118
2125
  pprint_parser.add_argument("file", type=argparse.FileType("r"), help="Saved summary JSON file.")
2119
2126
  pprint_parser.add_argument(
@@ -2127,12 +2134,33 @@ def _cli() -> None:
2127
2134
  action=argparse.BooleanOptionalAction,
2128
2135
  default=True,
2129
2136
  )
2137
+
2138
+ xgraph_parser = subparsers.add_parser("xgraph", help="Print a visual representation of a saved xgraph.")
2139
+ xgraph_parser.add_argument("file", type=argparse.FileType("r"), help="Saved summary JSON file.")
2140
+ xgraph_parser.add_argument("qgraph", type=str, help="Saved quantum graph file.")
2141
+
2130
2142
  args = parser.parse_args()
2143
+
2131
2144
  match args.cmd:
2132
2145
  case "pprint":
2133
2146
  summary = Summary.model_validate_json(args.file.read())
2134
2147
  args.file.close()
2135
2148
  summary.pprint(brief=args.brief, datasets=args.datasets)
2149
+ case "xgraph":
2150
+ summary = Summary.model_validate_json(args.file.read())
2151
+ args.file.close()
2152
+ status_annotator = QuantumProvenanceGraphStatusAnnotator(summary)
2153
+ status_options = QuantumProvenanceGraphStatusOptions(
2154
+ display_percent=True, display_counts=True, abbreviate=True, visualize=True
2155
+ )
2156
+ qgraph = QuantumGraph.loadUri(args.qgraph)
2157
+ pipeline_graph = qgraph.pipeline_graph
2158
+ show(
2159
+ pipeline_graph,
2160
+ dataset_types=True,
2161
+ status_annotator=status_annotator,
2162
+ status_options=status_options,
2163
+ )
2136
2164
  case _:
2137
2165
  raise AssertionError(f"Unhandled subcommand {args.dest}.")
2138
2166
 
lsst/pipe/base/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.1400"
2
+ __version__ = "29.2025.1600"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-pipe-base
3
- Version: 29.2025.1400
3
+ Version: 29.2025.1600
4
4
  Summary: Pipeline infrastructure for the Rubin Science Pipelines.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -27,6 +27,7 @@ Requires-Dist: lsst-pex-config
27
27
  Requires-Dist: astropy
28
28
  Requires-Dist: pydantic<3.0,>=2
29
29
  Requires-Dist: networkx
30
+ Requires-Dist: wcwidth
30
31
  Requires-Dist: pyyaml>=5.1
31
32
  Requires-Dist: numpy>=1.17
32
33
  Requires-Dist: frozendict