lsst-pipe-base 29.2025.3000__py3-none-any.whl → 29.2025.3100__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. lsst/pipe/base/_datasetQueryConstraints.py +1 -1
  2. lsst/pipe/base/all_dimensions_quantum_graph_builder.py +6 -4
  3. lsst/pipe/base/connectionTypes.py +19 -19
  4. lsst/pipe/base/connections.py +2 -2
  5. lsst/pipe/base/exec_fixup_data_id.py +131 -0
  6. lsst/pipe/base/execution_graph_fixup.py +69 -0
  7. lsst/pipe/base/log_capture.py +227 -0
  8. lsst/pipe/base/mp_graph_executor.py +774 -0
  9. lsst/pipe/base/quantum_graph_builder.py +43 -42
  10. lsst/pipe/base/quantum_graph_executor.py +125 -0
  11. lsst/pipe/base/quantum_reports.py +334 -0
  12. lsst/pipe/base/script/transfer_from_graph.py +4 -1
  13. lsst/pipe/base/separable_pipeline_executor.py +296 -0
  14. lsst/pipe/base/simple_pipeline_executor.py +674 -0
  15. lsst/pipe/base/single_quantum_executor.py +636 -0
  16. lsst/pipe/base/taskFactory.py +18 -12
  17. lsst/pipe/base/version.py +1 -1
  18. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/METADATA +1 -1
  19. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/RECORD +27 -18
  20. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/WHEEL +0 -0
  21. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/entry_points.txt +0 -0
  22. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/licenses/COPYRIGHT +0 -0
  23. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/licenses/LICENSE +0 -0
  24. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/licenses/bsd_license.txt +0 -0
  25. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/licenses/gpl-v3.0.txt +0 -0
  26. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/top_level.txt +0 -0
  27. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3100.dist-info}/zip-safe +0 -0
@@ -103,13 +103,13 @@ class InitInputMissingError(QuantumGraphBuilderError):
103
103
 
104
104
 
105
105
  class QuantumGraphBuilder(ABC):
106
- """An abstract base class for building `QuantumGraph` objects from a
106
+ """An abstract base class for building `.QuantumGraph` objects from a
107
107
  pipeline.
108
108
 
109
109
  Parameters
110
110
  ----------
111
111
  pipeline_graph : `.pipeline_graph.PipelineGraph`
112
- Pipeline to build a `QuantumGraph` from, as a graph. Will be resolved
112
+ Pipeline to build a `.QuantumGraph` from, as a graph. Will be resolved
113
113
  in-place with the given butler (any existing resolution is ignored).
114
114
  butler : `lsst.daf.butler.Butler`
115
115
  Client for the data repository. Should be read-only.
@@ -139,7 +139,7 @@ class QuantumGraphBuilder(ABC):
139
139
  The `build` method splits the pipeline graph into independent subgraphs,
140
140
  then calls the abstract method `process_subgraph` on each, to allow
141
141
  concrete implementations to populate the rough graph structure (the
142
- `~quantum_graph_skeleton.QuantumGraphSkeleton` class), including searching
142
+ `~.quantum_graph_skeleton.QuantumGraphSkeleton` class), including searching
143
143
  for existing datasets. The `build` method then:
144
144
 
145
145
  - assembles `lsst.daf.butler.Quantum` instances from all data IDs in the
@@ -321,7 +321,7 @@ class QuantumGraphBuilder(ABC):
321
321
 
322
322
  Returns
323
323
  -------
324
- quantum_graph : `QuantumGraph`
324
+ quantum_graph : `.QuantumGraph`
325
325
  DAG describing processing to be performed.
326
326
 
327
327
  Notes
@@ -373,7 +373,7 @@ class QuantumGraphBuilder(ABC):
373
373
  @abstractmethod
374
374
  def process_subgraph(self, subgraph: PipelineGraph) -> QuantumGraphSkeleton:
375
375
  """Build the rough structure for an independent subset of the
376
- `QuantumGraph` and query for relevant existing datasets.
376
+ `.QuantumGraph` and query for relevant existing datasets.
377
377
 
378
378
  Parameters
379
379
  ----------
@@ -384,39 +384,38 @@ class QuantumGraphBuilder(ABC):
384
384
 
385
385
  Returns
386
386
  -------
387
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
387
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
388
388
  Class representing an initial quantum graph. See
389
- `quantum_graph_skeleton.QuantumGraphSkeleton` docs for details.
389
+ `.quantum_graph_skeleton.QuantumGraphSkeleton` docs for details.
390
390
  After this is returned, the object may be modified in-place in
391
391
  unspecified ways.
392
392
 
393
393
  Notes
394
394
  -----
395
- The `quantum_graph_skeleton.QuantumGraphSkeleton` should associate
396
- `DatasetRef` objects with nodes for existing datasets. In
397
- particular:
395
+ The `.quantum_graph_skeleton.QuantumGraphSkeleton` should associate
396
+ `lsst.daf.butler.DatasetRef` objects with nodes for existing datasets.
397
+ In particular:
398
398
 
399
- - `quantum_graph_skeleton.QuantumGraphSkeleton.set_dataset_ref` must be
400
- used to associate existing datasets with all overall-input dataset
399
+ - `.quantum_graph_skeleton.QuantumGraphSkeleton.set_dataset_ref` must
400
+ be used to associate existing datasets with all overall-input dataset
401
401
  nodes in the skeleton by querying `input_collections`. This includes
402
402
  all standard input nodes and any prerequisite nodes added by the
403
403
  method (prerequisite nodes may also be left out entirely, as the base
404
404
  class can add them later, albeit possibly less efficiently).
405
- - `quantum_graph_skeleton.QuantumGraphSkeleton.set_output_for_skip`
405
+ - `.quantum_graph_skeleton.QuantumGraphSkeleton.set_output_for_skip`
406
406
  must be used to associate existing datasets with output dataset nodes
407
407
  by querying `skip_existing_in`.
408
- - `quantum_graph_skeleton.QuantumGraphSkeleton.add_output_in_the_way`
408
+ - `.quantum_graph_skeleton.QuantumGraphSkeleton.add_output_in_the_way`
409
409
  must be used to associated existing outputs with output dataset nodes
410
- by querying `output_run` if `output_run_exists` is `True`.
411
- Note that the presence of such datasets is not automatically an
412
- error, even if `clobber` is `False`, as these may be quanta that will
413
- be skipped.
410
+ by querying `output_run` if `output_run_exists` is `True`. Note that
411
+ the presence of such datasets is not automatically an error, even if
412
+ `clobber` is `False`, as these may be quanta that will be skipped.
414
413
 
415
- `DatasetRef` objects for existing datasets with empty data IDs in all
416
- of the above categories may be found in the `empty_dimensions_datasets`
417
- attribute, as these are queried for prior to this call by the base
418
- class, but associating them with graph nodes is still this method's
419
- responsibility.
414
+ `lsst.daf.butler.DatasetRef` objects for existing datasets with empty
415
+ data IDs in all of the above categories may be found in the
416
+ `empty_dimensions_datasets` attribute, as these are queried for prior
417
+ to this call by the base class, but associating them with graph nodes
418
+ is still this method's responsibility.
420
419
 
421
420
  Dataset types should never be components and should always use the
422
421
  "common" storage class definition in `pipeline_graph.DatasetTypeNode`
@@ -435,16 +434,17 @@ class QuantumGraphBuilder(ABC):
435
434
  ----------
436
435
  task_node : `pipeline_graph.TaskNode`
437
436
  Node for this task in the pipeline graph.
438
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
437
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
439
438
  Preliminary quantum graph, to be modified in-place.
440
439
 
441
440
  Notes
442
441
  -----
443
442
  This method modifies ``skeleton`` in-place in several ways:
444
443
 
445
- - It associates a `DatasetRef` with all output datasets and drops input
446
- dataset nodes that do not have a `DatasetRef` already. This ensures
447
- producing and consuming tasks start from the same `DatasetRef`.
444
+ - It associates a `lsst.daf.butler.DatasetRef` with all output datasets
445
+ and drops input dataset nodes that do not have a
446
+ `lsst.daf.butler.DatasetRef` already. This ensures producing and
447
+ consuming tasks start from the same `lsst.daf.butler.DatasetRef`.
448
448
  - It adds "inputs", "outputs", and "init_inputs" attributes to the
449
449
  quantum nodes, holding the same `NamedValueMapping` objects needed to
450
450
  construct an actual `Quantum` instances.
@@ -596,7 +596,7 @@ class QuantumGraphBuilder(ABC):
596
596
  Node for this task in the pipeline graph.
597
597
  quantum_key : `QuantumKey`
598
598
  Identifier for this quantum in the graph.
599
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
599
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
600
600
  Preliminary quantum graph, to be modified in-place.
601
601
 
602
602
  Returns
@@ -611,9 +611,10 @@ class QuantumGraphBuilder(ABC):
611
611
  `skip_existing_in` collections, the quantum will be skipped. This
612
612
  causes the quantum node to be removed from the graph. Dataset nodes
613
613
  that were previously the outputs of this quantum will be associated
614
- with `DatasetRef` objects that were found in ``skip_existing_in``, or
615
- will be removed if there is no such dataset there. Any output dataset
616
- in `output_run` will be removed from the "output in the way" category.
614
+ with `lsst.daf.butler.DatasetRef` objects that were found in
615
+ ``skip_existing_in``, or will be removed if there is no such dataset
616
+ there. Any output dataset in `output_run` will be removed from the
617
+ "output in the way" category.
617
618
  """
618
619
  metadata_dataset_key = DatasetKey(
619
620
  task_node.metadata_output.parent_dataset_type_name, quantum_key.data_id_values
@@ -659,7 +660,7 @@ class QuantumGraphBuilder(ABC):
659
660
  ----------
660
661
  quantum_key : `QuantumKey`
661
662
  Identifier for this quantum in the graph.
662
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
663
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
663
664
  Preliminary quantum graph, to be modified in-place.
664
665
  task_prerequisite_info : `~prerequisite_helpers.PrerequisiteInfo`
665
666
  Information about the prerequisite inputs to this task.
@@ -679,7 +680,7 @@ class QuantumGraphBuilder(ABC):
679
680
  the original there). If `clobber` is `False`, `RuntimeError` is
680
681
  raised. If there is no output already present, a new one with a random
681
682
  UUID is generated. In all cases the dataset node in the skeleton is
682
- associated with a `DatasetRef`.
683
+ associated with a `lsst.daf.butler.DatasetRef`.
683
684
  """
684
685
  dataset_key: DatasetKey | PrerequisiteDatasetKey
685
686
  for dataset_key in skeleton.iter_outputs_of(quantum_key):
@@ -743,7 +744,7 @@ class QuantumGraphBuilder(ABC):
743
744
  Node for this task in the pipeline graph.
744
745
  quantum_key : `QuantumKey`
745
746
  Identifier for this quantum in the graph.
746
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
747
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
747
748
  Preliminary quantum graph, to be modified in-place.
748
749
 
749
750
  Returns
@@ -787,7 +788,7 @@ class QuantumGraphBuilder(ABC):
787
788
  Node for this task in the pipeline graph.
788
789
  quantum_key : `QuantumKey`
789
790
  Identifier for this quantum in the graph.
790
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
791
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
791
792
  Preliminary quantum graph, to be modified in-place.
792
793
  skypix_bounds_builder : `~prerequisite_helpers.SkyPixBoundsBuilder`
793
794
  An object that accumulates the appropriate spatial bounds for a
@@ -806,8 +807,8 @@ class QuantumGraphBuilder(ABC):
806
807
  Notes
807
808
  -----
808
809
  This method trims input dataset nodes that are not already associated
809
- with a `DatasetRef`, and queries for prerequisite input nodes that do
810
- not exist.
810
+ with a `lsst.daf.butler.DatasetRef`, and queries for prerequisite input
811
+ nodes that do not exist.
811
812
  """
812
813
  inputs_by_type: dict[str, set[DatasetRef]] = {}
813
814
  dataset_key: DatasetKey | PrerequisiteDatasetKey
@@ -987,7 +988,7 @@ class QuantumGraphBuilder(ABC):
987
988
 
988
989
  Parameters
989
990
  ----------
990
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
991
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
991
992
  Preliminary quantum graph to update in place.
992
993
 
993
994
  Notes
@@ -1018,12 +1019,12 @@ class QuantumGraphBuilder(ABC):
1018
1019
  def _construct_quantum_graph(
1019
1020
  self, skeleton: QuantumGraphSkeleton, metadata: Mapping[str, Any]
1020
1021
  ) -> QuantumGraph:
1021
- """Construct a `QuantumGraph` object from the contents of a
1022
- fully-processed `quantum_graph_skeleton.QuantumGraphSkeleton`.
1022
+ """Construct a `.QuantumGraph` object from the contents of a
1023
+ fully-processed `.quantum_graph_skeleton.QuantumGraphSkeleton`.
1023
1024
 
1024
1025
  Parameters
1025
1026
  ----------
1026
- skeleton : `quantum_graph_skeleton.QuantumGraphSkeleton`
1027
+ skeleton : `.quantum_graph_skeleton.QuantumGraphSkeleton`
1027
1028
  Preliminary quantum graph. Must have "init_inputs", "inputs", and
1028
1029
  "outputs" attributes on all quantum nodes, as added by
1029
1030
  `_resolve_task_quanta`, as well as a "datastore_records" attribute
@@ -1033,7 +1034,7 @@ class QuantumGraphBuilder(ABC):
1033
1034
 
1034
1035
  Returns
1035
1036
  -------
1036
- quantum_graph : `QuantumGraph`
1037
+ quantum_graph : `.QuantumGraph`
1037
1038
  DAG describing processing to be performed.
1038
1039
  """
1039
1040
  quanta: dict[TaskDef, set[Quantum]] = {}
@@ -0,0 +1,125 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["QuantumExecutor", "QuantumGraphExecutor"]
31
+
32
+ from abc import ABC, abstractmethod
33
+ from typing import TYPE_CHECKING
34
+
35
+ from .quantum_reports import QuantumReport, Report
36
+
37
+ if TYPE_CHECKING:
38
+ import uuid
39
+
40
+ from lsst.daf.butler import Quantum
41
+
42
+ from .graph import QuantumGraph
43
+ from .pipeline_graph import TaskNode
44
+
45
+
46
+ class QuantumExecutor(ABC):
47
+ """Class which abstracts execution of a single Quantum.
48
+
49
+ In general implementation should not depend on execution model and
50
+ execution should always happen in-process. Main reason for existence
51
+ of this class is to provide do-nothing implementation that can be used
52
+ in the unit tests.
53
+ """
54
+
55
+ @abstractmethod
56
+ def execute(
57
+ self, task_node: TaskNode, /, quantum: Quantum, quantum_id: uuid.UUID | None = None
58
+ ) -> tuple[Quantum, QuantumReport | None]:
59
+ """Execute single quantum.
60
+
61
+ Parameters
62
+ ----------
63
+ task_node : `~.pipeline_graph.TaskNode`
64
+ Task definition structure.
65
+ quantum : `~lsst.daf.butler.Quantum`
66
+ Quantum for this execution.
67
+ quantum_id : `uuid.UUID` or `None`, optional
68
+ The ID of the quantum to be executed.
69
+
70
+ Returns
71
+ -------
72
+ quantum : `~lsst.daf.butler.Quantum`
73
+ The quantum actually executed.
74
+ report : `~.quantum_reports.QuantumReport`
75
+ Structure describing the status of the execution of a quantum.
76
+ `None` is returned if implementation does not support this
77
+ feature.
78
+
79
+ Notes
80
+ -----
81
+ Any exception raised by the task or code that wraps task execution is
82
+ propagated to the caller of this method.
83
+ """
84
+ raise NotImplementedError()
85
+
86
+
87
+ class QuantumGraphExecutor(ABC):
88
+ """Class which abstracts QuantumGraph execution.
89
+
90
+ Any specific execution model is implemented in sub-class by overriding
91
+ the `execute` method.
92
+ """
93
+
94
+ @abstractmethod
95
+ def execute(self, graph: QuantumGraph) -> None:
96
+ """Execute whole graph.
97
+
98
+ Implementation of this method depends on particular execution model
99
+ and it has to be provided by a subclass. Execution model determines
100
+ what happens here; it can be either actual running of the task or,
101
+ for example, generation of the scripts for delayed batch execution.
102
+
103
+ Parameters
104
+ ----------
105
+ graph : `.QuantumGraph`
106
+ Execution graph.
107
+ """
108
+ raise NotImplementedError()
109
+
110
+ def getReport(self) -> Report | None:
111
+ """Return execution report from last call to `execute`.
112
+
113
+ Returns
114
+ -------
115
+ report : `~.quantum_reports.Report`, optional
116
+ Structure describing the status of the execution of a quantum
117
+ graph. `None` is returned if implementation does not support
118
+ this feature.
119
+
120
+ Raises
121
+ ------
122
+ RuntimeError
123
+ Raised if this method is called before `execute`.
124
+ """
125
+ return None
@@ -0,0 +1,334 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["ExceptionInfo", "ExecutionStatus", "QuantumReport", "Report"]
31
+
32
+ import enum
33
+ import sys
34
+ from typing import Any
35
+
36
+ import pydantic
37
+
38
+ from lsst.daf.butler import DataCoordinate, DataId, DataIdValue
39
+ from lsst.utils.introspection import get_full_type_name
40
+
41
+ from .graph import QgraphSummary
42
+
43
+
44
+ def _serializeDataId(dataId: DataId) -> dict[str, DataIdValue]:
45
+ if isinstance(dataId, DataCoordinate):
46
+ return dict(dataId.required)
47
+ else:
48
+ return dataId # type: ignore
49
+
50
+
51
+ class ExecutionStatus(enum.Enum):
52
+ """Possible values for job execution status.
53
+
54
+ Status `FAILURE` is set if one or more tasks failed. Status `TIMEOUT` is
55
+ set if there are no failures but one or more tasks timed out. Timeouts can
56
+ only be detected in multi-process mode, child task is killed on timeout
57
+ and usually should have non-zero exit code.
58
+ """
59
+
60
+ SUCCESS = "success"
61
+ FAILURE = "failure"
62
+ TIMEOUT = "timeout"
63
+ SKIPPED = "skipped"
64
+
65
+
66
+ class ExceptionInfo(pydantic.BaseModel):
67
+ """Information about exception."""
68
+
69
+ className: str
70
+ """Name of the exception class if exception was raised."""
71
+
72
+ message: str
73
+ """Exception message for in-process quantum execution, None if
74
+ quantum was executed in sub-process.
75
+ """
76
+
77
+ @classmethod
78
+ def from_exception(cls, exception: Exception) -> ExceptionInfo:
79
+ """Construct instance from an exception.
80
+
81
+ Parameters
82
+ ----------
83
+ exception : `Exception`
84
+ Exception to wrap.
85
+
86
+ Returns
87
+ -------
88
+ info : `ExceptionInfo`
89
+ Information about the exception.
90
+ """
91
+ return cls(className=get_full_type_name(exception), message=str(exception))
92
+
93
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
94
+ # when we inherit those docstrings in our public classes.
95
+ if "sphinx" in sys.modules:
96
+
97
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
98
+ """See `pydantic.BaseModel.copy`."""
99
+ return super().copy(*args, **kwargs)
100
+
101
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
102
+ """See `pydantic.BaseModel.model_dump`."""
103
+ return super().model_dump(*args, **kwargs)
104
+
105
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
106
+ """See `pydantic.BaseModel.model_dump_json`."""
107
+ return super().model_dump(*args, **kwargs)
108
+
109
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
110
+ """See `pydantic.BaseModel.model_copy`."""
111
+ return super().model_copy(*args, **kwargs)
112
+
113
+ @classmethod
114
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
115
+ """See `pydantic.BaseModel.model_construct`."""
116
+ return super().model_construct(*args, **kwargs)
117
+
118
+ @classmethod
119
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
120
+ """See `pydantic.BaseModel.model_json_schema`."""
121
+ return super().model_json_schema(*args, **kwargs)
122
+
123
+
124
+ class QuantumReport(pydantic.BaseModel):
125
+ """Task execution report for a single Quantum.
126
+
127
+ Parameters
128
+ ----------
129
+ dataId : `~lsst.daf.butler.DataId`
130
+ Quantum data ID.
131
+ taskLabel : `str`
132
+ Label for task executing this Quantum.
133
+ status : `ExecutionStatus`
134
+ Status of this quantum execution.
135
+ exitCode : `int` or `None`, optional
136
+ Exit code for sub-process executing this Quantum. `None` for
137
+ in-process execution. Negative if process was killed by a signal.
138
+ exceptionInfo : `ExceptionInfo` or `None`, optional
139
+ Exception information if an exception was raised.
140
+ """
141
+
142
+ status: ExecutionStatus = ExecutionStatus.SUCCESS
143
+ """Execution status, one of the values in `ExecutionStatus` enum."""
144
+
145
+ dataId: dict[str, DataIdValue]
146
+ """Quantum DataId."""
147
+
148
+ taskLabel: str | None
149
+ """Label for a task executing this Quantum."""
150
+
151
+ exitCode: int | None = None
152
+ """Exit code for a sub-process executing Quantum, None for in-process
153
+ Quantum execution. Negative if process was killed by a signal.
154
+ """
155
+
156
+ exceptionInfo: ExceptionInfo | None = None
157
+ """Exception information if exception was raised."""
158
+
159
+ def __init__(
160
+ self,
161
+ dataId: DataId,
162
+ taskLabel: str,
163
+ status: ExecutionStatus = ExecutionStatus.SUCCESS,
164
+ exitCode: int | None = None,
165
+ exceptionInfo: ExceptionInfo | None = None,
166
+ ):
167
+ super().__init__(
168
+ status=status,
169
+ dataId=_serializeDataId(dataId),
170
+ taskLabel=taskLabel,
171
+ exitCode=exitCode,
172
+ exceptionInfo=exceptionInfo,
173
+ )
174
+
175
+ @classmethod
176
+ def from_exception(
177
+ cls,
178
+ exception: Exception,
179
+ dataId: DataId,
180
+ taskLabel: str,
181
+ *,
182
+ exitCode: int | None = None,
183
+ ) -> QuantumReport:
184
+ """Construct report instance from an exception and other pieces of
185
+ data.
186
+
187
+ Parameters
188
+ ----------
189
+ exception : `Exception`
190
+ Exception caught from processing quantum.
191
+ dataId : `~lsst.daf.butler.DataId`
192
+ Data ID of quantum.
193
+ taskLabel : `str`
194
+ Label of task.
195
+ exitCode : `int`, optional
196
+ Exit code for the process, used when it is known that the process
197
+ will exit with that exit code.
198
+ """
199
+ return cls(
200
+ status=ExecutionStatus.FAILURE,
201
+ dataId=dataId,
202
+ taskLabel=taskLabel,
203
+ exitCode=exitCode,
204
+ exceptionInfo=ExceptionInfo.from_exception(exception),
205
+ )
206
+
207
+ @classmethod
208
+ def from_exit_code(
209
+ cls,
210
+ exitCode: int,
211
+ dataId: DataId,
212
+ taskLabel: str,
213
+ ) -> QuantumReport:
214
+ """Construct report instance from an exit code and other pieces of
215
+ data.
216
+
217
+ Parameters
218
+ ----------
219
+ exitCode : `int`
220
+ The exit code of the subprocess.
221
+ dataId : `~lsst.daf.butler.DataId`
222
+ The quantum Data ID.
223
+ taskLabel : `str`
224
+ The task label.
225
+ """
226
+ return cls(
227
+ status=ExecutionStatus.SUCCESS if exitCode == 0 else ExecutionStatus.FAILURE,
228
+ dataId=dataId,
229
+ taskLabel=taskLabel,
230
+ exitCode=exitCode,
231
+ )
232
+
233
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
234
+ # when we inherit those docstrings in our public classes.
235
+ if "sphinx" in sys.modules:
236
+
237
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
238
+ """See `pydantic.BaseModel.copy`."""
239
+ return super().copy(*args, **kwargs)
240
+
241
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
242
+ """See `pydantic.BaseModel.model_dump`."""
243
+ return super().model_dump(*args, **kwargs)
244
+
245
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
246
+ """See `pydantic.BaseModel.model_dump_json`."""
247
+ return super().model_dump(*args, **kwargs)
248
+
249
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
250
+ """See `pydantic.BaseModel.model_copy`."""
251
+ return super().model_copy(*args, **kwargs)
252
+
253
+ @classmethod
254
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
255
+ """See `pydantic.BaseModel.model_construct`."""
256
+ return super().model_construct(*args, **kwargs)
257
+
258
+ @classmethod
259
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
260
+ """See `pydantic.BaseModel.model_json_schema`."""
261
+ return super().model_json_schema(*args, **kwargs)
262
+
263
+
264
+ class Report(pydantic.BaseModel):
265
+ """Execution report for the whole job with one or few quanta."""
266
+
267
+ qgraphSummary: QgraphSummary
268
+ """Summary report about QuantumGraph."""
269
+
270
+ status: ExecutionStatus = ExecutionStatus.SUCCESS
271
+ """Job status."""
272
+
273
+ cmdLine: list[str] | None = None
274
+ """Command line for the whole job."""
275
+
276
+ exitCode: int | None = None
277
+ """Job exit code, this obviously cannot be set in pipetask."""
278
+
279
+ exceptionInfo: ExceptionInfo | None = None
280
+ """Exception information if exception was raised."""
281
+
282
+ quantaReports: list[QuantumReport] = []
283
+ """List of per-quantum reports, ordering is not specified. Some or all
284
+ quanta may not produce a report.
285
+ """
286
+
287
+ # Always want to validate the default value for cmdLine so
288
+ # use a model_validator.
289
+ @pydantic.model_validator(mode="before")
290
+ @classmethod
291
+ def _set_cmdLine(cls, data: Any) -> Any:
292
+ if data.get("cmdLine") is None:
293
+ data["cmdLine"] = sys.argv
294
+ return data
295
+
296
+ def set_exception(self, exception: Exception) -> None:
297
+ """Update exception information from an exception object.
298
+
299
+ Parameters
300
+ ----------
301
+ exception : `Exception`
302
+ Exception to use to extract information from.
303
+ """
304
+ self.exceptionInfo = ExceptionInfo.from_exception(exception)
305
+
306
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
307
+ # when we inherit those docstrings in our public classes.
308
+ if "sphinx" in sys.modules:
309
+
310
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
311
+ """See `pydantic.BaseModel.copy`."""
312
+ return super().copy(*args, **kwargs)
313
+
314
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
315
+ """See `pydantic.BaseModel.model_dump`."""
316
+ return super().model_dump(*args, **kwargs)
317
+
318
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
319
+ """See `pydantic.BaseModel.model_dump_json`."""
320
+ return super().model_dump(*args, **kwargs)
321
+
322
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
323
+ """See `pydantic.BaseModel.model_copy`."""
324
+ return super().model_copy(*args, **kwargs)
325
+
326
+ @classmethod
327
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
328
+ """See `pydantic.BaseModel.model_construct`."""
329
+ return super().model_construct(*args, **kwargs)
330
+
331
+ @classmethod
332
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
333
+ """See `pydantic.BaseModel.model_json_schema`."""
334
+ return super().model_json_schema(*args, **kwargs)