lsst-pipe-base 29.2025.1300__py3-none-any.whl → 29.2025.1500__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. lsst/pipe/base/all_dimensions_quantum_graph_builder.py +4 -0
  2. lsst/pipe/base/pipeline_graph/__main__.py +1 -0
  3. lsst/pipe/base/pipeline_graph/_exceptions.py +7 -0
  4. lsst/pipe/base/pipeline_graph/_pipeline_graph.py +360 -11
  5. lsst/pipe/base/pipeline_graph/expressions.py +271 -0
  6. lsst/pipe/base/pipeline_graph/visualization/__init__.py +1 -0
  7. lsst/pipe/base/pipeline_graph/visualization/_formatting.py +300 -5
  8. lsst/pipe/base/pipeline_graph/visualization/_options.py +11 -3
  9. lsst/pipe/base/pipeline_graph/visualization/_show.py +23 -3
  10. lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +250 -0
  11. lsst/pipe/base/quantum_graph_skeleton.py +7 -0
  12. lsst/pipe/base/quantum_provenance_graph.py +28 -0
  13. lsst/pipe/base/version.py +1 -1
  14. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/METADATA +2 -1
  15. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/RECORD +23 -21
  16. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/WHEEL +0 -0
  17. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/entry_points.txt +0 -0
  18. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/licenses/COPYRIGHT +0 -0
  19. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/licenses/LICENSE +0 -0
  20. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/licenses/bsd_license.txt +0 -0
  21. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/licenses/gpl-v3.0.txt +0 -0
  22. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/top_level.txt +0 -0
  23. {lsst_pipe_base-29.2025.1300.dist-info → lsst_pipe_base-29.2025.1500.dist-info}/zip-safe +0 -0
@@ -0,0 +1,271 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+ """Expressions that resolve to subsets of pipelines.
28
+
29
+ See :ref:`pipeline-graph-subset-expressions`.
30
+ """
31
+
32
+ from __future__ import annotations
33
+
34
+ __all__ = (
35
+ "DirectionNode",
36
+ "IdentifierNode",
37
+ "IntersectionNode",
38
+ "Node",
39
+ "NotNode",
40
+ "UnionNode",
41
+ "parse",
42
+ )
43
+
44
+ import dataclasses
45
+ import functools
46
+ from typing import TYPE_CHECKING, Any, Literal, TypeAlias
47
+
48
+ from lsst.daf.butler.registry.queries.expressions.parser.ply import lex, yacc
49
+
50
+ from ._exceptions import InvalidExpressionError
51
+
52
+ if TYPE_CHECKING:
53
+ from lsst.daf.butler.registry.queries.expressions.parser.parserLex import LexToken
54
+ from lsst.daf.butler.registry.queries.expressions.parser.parserYacc import YaccProduction
55
+
56
+
57
+ class _ParserLex:
58
+ @classmethod
59
+ def make_lexer(cls) -> Any: # unspecified PLY type.
60
+ return lex.lex(object=cls())
61
+
62
+ tokens = (
63
+ "IDENTIFIER",
64
+ "LPAREN",
65
+ "RPAREN",
66
+ "NOT",
67
+ "UNION",
68
+ "INTERSECTION",
69
+ "LT",
70
+ "LE",
71
+ "GT",
72
+ "GE",
73
+ )
74
+
75
+ t_LPAREN = r"\("
76
+ t_RPAREN = r"\)"
77
+ t_NOT = "~"
78
+ t_UNION = r"\|"
79
+ t_INTERSECTION = "&"
80
+ t_LT = "<"
81
+ t_LE = "<="
82
+ t_GT = ">"
83
+ t_GE = ">="
84
+
85
+ # Identifiers are alphanumeric, and may have a T:, D:, or S: prefix.
86
+ def t_IDENTIFIER(self, t: LexToken) -> LexToken:
87
+ r"""([TDS]:)?\w+"""
88
+ t.type = "IDENTIFIER"
89
+ return t
90
+
91
+ # Ignore spaces and tables.
92
+ t_ignore = " \t"
93
+
94
+ def t_error(self, t: LexToken) -> LexToken:
95
+ raise InvalidExpressionError(
96
+ f"invalid token in expression near character {t.lexer.lexpos}: {t.value[0]!r}"
97
+ )
98
+
99
+
100
+ class _ParserYacc:
101
+ def __init__(self) -> None:
102
+ self.parser = self._parser_factory()
103
+
104
+ @staticmethod
105
+ @functools.cache
106
+ def _parser_factory() -> Any: # unspecified PLY type.
107
+ return yacc.yacc(module=_ParserYacc, write_tables=False, debug=False)
108
+
109
+ def parse(self, input: str) -> Node:
110
+ """Parse input expression and return the parsed tree object.
111
+
112
+ Parameters
113
+ ----------
114
+ input : `str`
115
+ Expression to parse.
116
+
117
+ Returns
118
+ -------
119
+ node : `Node`
120
+ Root of the parsed expression tree.
121
+ """
122
+ lexer = _ParserLex.make_lexer()
123
+ tree = self.parser.parse(input=input, lexer=lexer)
124
+ return tree
125
+
126
+ tokens = _ParserLex.tokens[:]
127
+
128
+ start = "expr"
129
+
130
+ precedence = (
131
+ ("left", "UNION"),
132
+ ("left", "INTERSECTION"),
133
+ ("right", "NOT", "LT", "LE", "GT", "GE"),
134
+ )
135
+
136
+ # Ruff wants 'noqa' on the doc line, pydocstyle wants it on the function.
137
+
138
+ @classmethod
139
+ def p_expr_union(cls, p: YaccProduction) -> None: # noqa: D403
140
+ """expr : expr UNION expr""" # noqa: D403
141
+ p[0] = UnionNode(lhs=p[1], rhs=p[3])
142
+
143
+ @classmethod
144
+ def p_expr_intersection(cls, p: YaccProduction) -> None: # noqa: D403
145
+ """expr : expr INTERSECTION expr""" # noqa: D403
146
+ p[0] = IntersectionNode(lhs=p[1], rhs=p[3])
147
+
148
+ @classmethod
149
+ def p_expr_not(cls, p: YaccProduction) -> None: # noqa: D403
150
+ """expr : NOT expr""" # noqa: D403
151
+ p[0] = NotNode(operand=p[2])
152
+
153
+ @classmethod
154
+ def p_expr_parens(cls, p: YaccProduction) -> None: # noqa: D403
155
+ """expr : LPAREN expr RPAREN""" # noqa: D403
156
+ p[0] = p[2]
157
+
158
+ @classmethod
159
+ def p_expr_inequality(cls, p: YaccProduction) -> None: # noqa: D403
160
+ """expr : LT identifier
161
+ | LE identifier
162
+ | GT identifier
163
+ | GE identifier
164
+ """ # noqa: D403
165
+ p[0] = DirectionNode(operator=p[1], start=p[2])
166
+
167
+ @classmethod
168
+ def p_expr_identifier(cls, p: YaccProduction) -> None: # noqa: D403
169
+ """expr : identifier""" # noqa: D403
170
+ p[0] = p[1]
171
+
172
+ @classmethod
173
+ def p_identifier_qualified(cls, p: YaccProduction) -> None: # noqa: D403, D401
174
+ """identifier : IDENTIFIER""" # noqa: D403, D401
175
+ match p[1].split(":"):
176
+ case [qualifier, label]:
177
+ p[0] = IdentifierNode(qualifier=qualifier, label=label)
178
+ case [label]:
179
+ p[0] = IdentifierNode(qualifier=None, label=label)
180
+ case _: # pragma: no cover
181
+ raise AssertionError("Unexpected identifier form.")
182
+
183
+ @classmethod
184
+ def p_error(cls, p: YaccProduction | None) -> None:
185
+ if p is None:
186
+ raise InvalidExpressionError("Expression ended unexpectedly.")
187
+ else:
188
+ raise InvalidExpressionError(f"Syntax error near character {p.lexpos}: {p.value!r}")
189
+
190
+
191
+ @dataclasses.dataclass
192
+ class IdentifierNode:
193
+ """A node that corresponds to a task label, dataset type name, or labeled
194
+ subset.
195
+ """
196
+
197
+ qualifier: Literal["T", "D", "S"] | None
198
+ """Qualiifier that indicates whether this is a task (T), dataset type (T),
199
+ or labeled subset (S).
200
+
201
+ Unqualified identifiers (`None`) must resolve unambiguously.
202
+ """
203
+
204
+ label: str
205
+ """Task label, dataset type name, or subset label."""
206
+
207
+
208
+ @dataclasses.dataclass
209
+ class DirectionNode:
210
+ """A node that represents the ancestors or descendents of a task label or
211
+ dataset type.
212
+ """
213
+
214
+ operator: Literal["<", ">", "<=", ">="]
215
+ """Which direction to traverse the graph ('>' for descendents, '<' for
216
+ ancestors), and whether to include the operand ('=') or not.
217
+ """
218
+
219
+ start: IdentifierNode
220
+ """Node at which to start the DAG traversal."""
221
+
222
+
223
+ @dataclasses.dataclass
224
+ class NotNode:
225
+ """A node that represents set inversion (including all elements not in the
226
+ operand).
227
+ """
228
+
229
+ operand: Node
230
+ """Node representing the set to invert."""
231
+
232
+
233
+ @dataclasses.dataclass
234
+ class UnionNode:
235
+ """Node representing a set union."""
236
+
237
+ lhs: Node
238
+ rhs: Node
239
+
240
+
241
+ @dataclasses.dataclass
242
+ class IntersectionNode:
243
+ """Node representing a set intersection."""
244
+
245
+ lhs: Node
246
+ rhs: Node
247
+
248
+
249
+ def parse(expression: str) -> Node:
250
+ """Parse an expression into a `Node` tree.
251
+
252
+ Parameters
253
+ ----------
254
+ expression : `str`
255
+ String expression to parse. See
256
+ :ref:`pipeline-graph-subset-expressions`.
257
+
258
+ Returns
259
+ -------
260
+ node
261
+ Root node of the parsed expression tree.
262
+
263
+ Raises
264
+ ------
265
+ InvalidExpressionError
266
+ Raised if the expression could not be parsed.
267
+ """
268
+ return _ParserYacc().parse(expression)
269
+
270
+
271
+ Node: TypeAlias = IdentifierNode | DirectionNode | NotNode | UnionNode | IntersectionNode
@@ -34,3 +34,4 @@ from ._mermaid import *
34
34
  from ._options import *
35
35
  from ._printer import *
36
36
  from ._show import *
37
+ from ._status_annotator import *
@@ -29,17 +29,20 @@ from __future__ import annotations
29
29
  __all__ = ("GetNodeText", "format_dimensions", "format_task_class", "get_node_symbol")
30
30
 
31
31
  import itertools
32
+ import re
32
33
  import textwrap
33
34
  from collections.abc import Iterator
34
35
 
35
36
  import networkx
36
37
  import networkx.algorithms.community
38
+ from wcwidth import wcswidth # type: ignore
37
39
 
38
40
  from lsst.daf.butler import DimensionGroup
39
41
 
40
42
  from .._nodes import NodeKey, NodeType
41
43
  from ._merge import MergedNodeKey
42
44
  from ._options import NodeAttributeOptions
45
+ from ._status_annotator import DatasetTypeStatusInfo, NodeStatusOptions, StatusColors, TaskStatusInfo
43
46
 
44
47
  DisplayNodeKey = NodeKey | MergedNodeKey
45
48
  """Type alias for graph keys that may be original task, task init, or dataset
@@ -47,6 +50,42 @@ type keys, or a merge of several keys for display purposes.
47
50
  """
48
51
 
49
52
 
53
+ def strip_ansi(s: str) -> str:
54
+ """Remove ANSI escape codes from a string, so that `wcswidth()` measures
55
+ the real visible width of the string.
56
+
57
+ Parameters
58
+ ----------
59
+ s : `str`
60
+ String to strip of ANSI escape codes.
61
+
62
+ Returns
63
+ -------
64
+ stripped : `str`
65
+ String with ANSI escape codes removed.
66
+ """
67
+ # ANSI escape sequence remover
68
+ ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
69
+ return ansi_escape.sub("", s)
70
+
71
+
72
+ def render_segment(f: float) -> str:
73
+ """Convert a float into a string of blocks, rounding to the nearest whole
74
+ number of columns.
75
+
76
+ Parameters
77
+ ----------
78
+ f : `float`
79
+ Number of columns to fill.
80
+
81
+ Returns
82
+ -------
83
+ blocks : `str`
84
+ String of blocks filling the specified number of columns.
85
+ """
86
+ return "█" * round(f)
87
+
88
+
50
89
  def get_node_symbol(node: DisplayNodeKey, x: int | None = None) -> str:
51
90
  """Return a single-character symbol for a particular node type.
52
91
 
@@ -107,23 +146,134 @@ class GetNodeText:
107
146
  self.deferred: list[tuple[str, tuple[str, str], list[str]]] = []
108
147
 
109
148
  def __call__(self, node: DisplayNodeKey, x: int, style: tuple[str, str]) -> str:
149
+ """Return a line of text describing a node.
150
+
151
+ Parameters
152
+ ----------
153
+ node : `DisplayNodeKey`
154
+ Named tuple used as the node key.
155
+ x : `int`
156
+ Ignored; may be passed for compatibility with the `Printer` class's
157
+ ``get_text`` callback.
158
+ style : `tuple` [`str`, `str`]
159
+ Tuple of ANSI color codes for overflow markers.
160
+ """
110
161
  state = self.xgraph.nodes[node]
111
- terms: list[str] = [f"{node}:" if self.options.has_details(node.node_type) else str(node)]
162
+ has_status = "status" in state
163
+
164
+ # Build description.
165
+ description = self._build_description(node, state)
166
+
167
+ # Possibly build progress bar to append to description.
168
+ progress_portion = ""
169
+ if has_status:
170
+ progress_portion = self.format_node_status(description, state["status"])
171
+
172
+ # Stitch together the final line, handling overflow if needed.
173
+ final_line = self._stitch_node_text(description, progress_portion, style)
174
+ return final_line
175
+
176
+ def _build_description(self, node: DisplayNodeKey, state: dict) -> str:
177
+ """Build the node description, possibly with additional details.
178
+
179
+ Parameters
180
+ ----------
181
+ node : `DisplayNodeKey`
182
+ Named tuple used as the node key.
183
+ state : `dict`
184
+ Node attributes.
185
+
186
+ Returns
187
+ -------
188
+ description : `str`
189
+ The node description.
190
+ """
191
+ terms = [f"{node}:" if self.options.has_details(node.node_type) else str(node)]
192
+ # Optionally append dimension info.
112
193
  if self.options.dimensions and node.node_type != NodeType.TASK_INIT:
113
194
  terms.append(self.format_dimensions(state["dimensions"]))
195
+
196
+ # Optionally append task class name.
114
197
  if self.options.task_classes and (
115
198
  node.node_type is NodeType.TASK or node.node_type is NodeType.TASK_INIT
116
199
  ):
117
200
  terms.append(self.format_task_class(state["task_class_name"]))
201
+
202
+ # Optionally append storage class name.
118
203
  if self.options.storage_classes and node.node_type is NodeType.DATASET_TYPE:
119
204
  terms.append(state["storage_class_name"])
205
+
120
206
  description = " ".join(terms)
121
- if self.width and len(description) > self.width:
122
- index = f"[{len(self.deferred) + 1}]"
123
- self.deferred.append((index, style, terms))
124
- return f"{description[: self.width - len(index) - 6]}...{style[0]}{index}{style[1]} "
207
+
125
208
  return description
126
209
 
210
+ def _stitch_node_text(self, description: str, progress_portion: str, style: tuple[str, str]) -> str:
211
+ """Make the final line of node text to display given the description
212
+ and possibly a progress portion, and handle overflow.
213
+
214
+ It measures the total width of the description and progress portion,
215
+ and if it exceeds the screen width, it truncates the description and
216
+ appends a footnote.
217
+
218
+ Parameters
219
+ ----------
220
+ description : `str`
221
+ The node description.
222
+ progress_portion : `str`
223
+ The progress portion of the node.
224
+ style : `tuple` [`str`, `str`]
225
+ Tuple of ANSI color codes for overflow markers.
226
+
227
+ Returns
228
+ -------
229
+ final_line : `str`
230
+ The final line of text to display.
231
+ """
232
+ final_line = f"{description}{progress_portion}" if progress_portion else description
233
+ total_len = wcswidth(strip_ansi(final_line))
234
+
235
+ if self.width and total_len > self.width:
236
+ overflow_index = f"[{len(self.deferred) + 1}]"
237
+ overflow_marker = f"...{style[0]}{overflow_index}{style[1]}"
238
+
239
+ avail_desc_width = (
240
+ self.width - wcswidth(strip_ansi(progress_portion)) - wcswidth(strip_ansi(overflow_marker))
241
+ )
242
+ if avail_desc_width < 0:
243
+ avail_desc_width = 0
244
+
245
+ truncated_desc = description[:avail_desc_width] + overflow_marker
246
+ self.deferred.append((overflow_index, style, [description]))
247
+
248
+ return f"{truncated_desc}{progress_portion}"
249
+ else:
250
+ return final_line
251
+
252
+ def format_node_status(self, description: str, status: TaskStatusInfo | DatasetTypeStatusInfo) -> str:
253
+ """Format the status of a task node.
254
+
255
+ Parameters
256
+ ----------
257
+ description : `str`
258
+ The node description.
259
+ status : `TaskStatusInfo` or `DatasetTypeStatusInfo`
260
+ Holds status information for a task or dataset type.
261
+
262
+ Returns
263
+ -------
264
+ formatted : `str`
265
+ The formatted status string.
266
+ """
267
+ if not isinstance(self.options.status, NodeStatusOptions):
268
+ raise ValueError(f"Invalid node status options: {self.options.status!r}.")
269
+
270
+ return format_node_status(
271
+ description,
272
+ self.options.status,
273
+ status,
274
+ self.width,
275
+ )
276
+
127
277
  def format_dimensions(self, dimensions: DimensionGroup) -> str:
128
278
  """Format the dimensions of a task or dataset type node.
129
279
 
@@ -232,3 +382,148 @@ def format_task_class(options: NodeAttributeOptions, task_class_name: str) -> st
232
382
  case False:
233
383
  return ""
234
384
  raise ValueError(f"Invalid display option for task_classes: {options.task_classes!r}.")
385
+
386
+
387
+ def _build_progress_bar(
388
+ description: str,
389
+ prefix: str,
390
+ suffix: str,
391
+ segments: list[tuple[str, float]],
392
+ width: int | None,
393
+ colors: StatusColors,
394
+ min_bar_width: int,
395
+ ) -> str:
396
+ """Shared helper that constructs a multi-segment progress bar.
397
+
398
+ Parameters
399
+ ----------
400
+ description : `str`
401
+ Main node description (used for measuring available space).
402
+ prefix : `str`
403
+ Text before the bar (e.g. ' · 42%▕').
404
+ suffix : `str`
405
+ Text after the bar (e.g. '▏exp: 107 ...').
406
+ segments : `list` of `tuple` [`str`, `float`]
407
+ Each tuple is (color_code, fractionOfBarWidth). We'll create these
408
+ segments in sequence.
409
+ width : `int` or None
410
+ Overall maximum line width. None for unlimited. We'll still respect the
411
+ minimum bar width.
412
+ colors : `StatusColors`
413
+ An instance containing .reset and color fields.
414
+ min_bar_width : `int`
415
+ Minimum number of display columns for the bar.
416
+
417
+ Returns
418
+ -------
419
+ formatted : str
420
+ The assembled prefix + bar + suffix, sized with respect to the width.
421
+ """
422
+ used_len = wcswidth(strip_ansi(prefix)) + wcswidth(strip_ansi(suffix))
423
+ if width is not None:
424
+ bar_width = max(width - wcswidth(strip_ansi(description)) - used_len, min_bar_width)
425
+ else:
426
+ bar_width = min_bar_width
427
+
428
+ # Build bar from fractional segments.
429
+ bar_str = ""
430
+ total_cols = 0
431
+ for ansi_color, fraction in segments:
432
+ cols = round(bar_width * fraction)
433
+ bar_str += f"{ansi_color}{render_segment(cols)}{colors.reset}"
434
+ total_cols += cols
435
+
436
+ # Pad the bar to the full width.
437
+ if total_cols < bar_width:
438
+ bar_str += " " * (bar_width - total_cols)
439
+
440
+ return prefix + bar_str + suffix
441
+
442
+
443
+ def format_node_status(
444
+ description: str,
445
+ status_options: NodeStatusOptions,
446
+ status: TaskStatusInfo | DatasetTypeStatusInfo,
447
+ width: int | None,
448
+ ) -> str:
449
+ """Build a progress bar for a task or dataset type node.
450
+
451
+ Parameters
452
+ ----------
453
+ description : `str`
454
+ Node description for measuring leftover columns.
455
+ status_options : `NodeStatusOptions`
456
+ Options for node status visualization.
457
+ status : `TaskStatusInfo` or `DatasetTypeStatusInfo`
458
+ Holds status information for a task or dataset type.
459
+ width : `int` or None
460
+ Overall width limit (None => unlimited).
461
+
462
+ Returns
463
+ -------
464
+ formatted : str
465
+ The final prefix + bar + suffix line.
466
+ """
467
+ import dataclasses
468
+
469
+ status_abbreviations = {
470
+ "expected": "exp",
471
+ "succeeded": "suc",
472
+ "failed": "fail",
473
+ "blocked": "blk",
474
+ "ready": "rdy",
475
+ "running": "run",
476
+ "wonky": "wnk",
477
+ "unknown": "unk",
478
+ "produced": "prd",
479
+ }
480
+
481
+ colors = status_options.colors
482
+ expected = status.expected
483
+ done = status.succeeded if isinstance(status, TaskStatusInfo) else status.produced
484
+ full_success = done == expected
485
+ status_lookup = dataclasses.asdict(status)
486
+
487
+ percent = 100.0 * done / expected if expected else 0.0
488
+ total = float(expected) if expected else 1.0
489
+ prefix = ""
490
+
491
+ if status_options.display_percent or status_options.display_counts:
492
+ if not status_options.visualize or (status_options.visualize and status_options.display_percent):
493
+ full_success_color = colors.succeeded if isinstance(status, TaskStatusInfo) else colors.produced
494
+ color_code = full_success_color if full_success else colors.failed
495
+ prefix += f"{color_code} ▶ {colors.reset}"
496
+ if status_options.display_percent:
497
+ pct = round(percent)
498
+ if pct == 100 and not full_success:
499
+ pct == 99 # Avoid showing 100% if not fully successful.
500
+ prefix += f"{pct}%"
501
+ if not status_options.visualize and status_options.display_percent and status_options.display_counts:
502
+ prefix += " | "
503
+
504
+ if status_options.visualize:
505
+ prefix += "▕"
506
+
507
+ suffix_parts = []
508
+ segments = []
509
+
510
+ for key, value in status_lookup.items():
511
+ if value is not None:
512
+ color_code = getattr(colors, key)
513
+ if status_options.display_counts:
514
+ label = status_abbreviations[key] if status_options.abbreviate else key
515
+ suffix_parts.append(f"{label}:{color_code}{value}{colors.reset}")
516
+ if key != "expected":
517
+ # Build a progress bar segment.
518
+ segments.append((color_code, value / total))
519
+
520
+ # Produce suffix from the parts.
521
+ suffix = "▏" if status_options.visualize else ""
522
+ suffix += " | ".join(suffix_parts)
523
+
524
+ if status_options.visualize:
525
+ return _build_progress_bar(
526
+ description, prefix, suffix, segments, width, colors, status_options.min_bar_width
527
+ )
528
+ else:
529
+ return f"{prefix}{suffix}"
@@ -32,6 +32,7 @@ import dataclasses
32
32
  from typing import Literal
33
33
 
34
34
  from .._nodes import NodeType
35
+ from ._status_annotator import NodeStatusOptions
35
36
 
36
37
 
37
38
  @dataclasses.dataclass
@@ -71,8 +72,8 @@ class NodeAttributeOptions:
71
72
  - `None`: context-dependent default behavior.
72
73
  """
73
74
 
74
- def __bool__(self) -> bool:
75
- return bool(self.dimensions or self.storage_classes or self.task_classes)
75
+ status: NodeStatusOptions | None
76
+ """Options for displaying execution status."""
76
77
 
77
78
  def has_details(self, node_type: NodeType) -> bool:
78
79
  """Check whether there is any information beyond the node name for a
@@ -93,7 +94,10 @@ class NodeAttributeOptions:
93
94
  else:
94
95
  return bool(self.dimensions or self.task_classes)
95
96
 
96
- def checked(self, is_resolved: bool) -> NodeAttributeOptions:
97
+ def __bool__(self) -> bool:
98
+ return bool(self.dimensions or self.storage_classes or self.task_classes or self.status)
99
+
100
+ def checked(self, is_resolved: bool, has_status: bool = False) -> NodeAttributeOptions:
97
101
  """Check these options against a pipeline graph's resolution status and
98
102
  fill in defaults.
99
103
 
@@ -102,6 +106,9 @@ class NodeAttributeOptions:
102
106
  is_resolved : `bool`
103
107
  Whether the pipeline graph to be displayed is resolved
104
108
  (`PipelineGraph.is_fully_resolved`).
109
+ has_status : `bool`
110
+ Whether the pipeline graph to be displayed has status information.
111
+ Defaults to `False`.
105
112
 
106
113
  Returns
107
114
  -------
@@ -127,4 +134,5 @@ class NodeAttributeOptions:
127
134
  self.task_classes if self.task_classes is not None else ("concise" if is_resolved else False)
128
135
  ),
129
136
  storage_classes=(self.storage_classes if self.storage_classes is not None else is_resolved),
137
+ status=self.status if has_status else None,
130
138
  )