lsst-pipe-base 29.2025.3000__py3-none-any.whl → 29.2025.3200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. lsst/pipe/base/__init__.py +0 -1
  2. lsst/pipe/base/_datasetQueryConstraints.py +1 -1
  3. lsst/pipe/base/all_dimensions_quantum_graph_builder.py +10 -46
  4. lsst/pipe/base/caching_limited_butler.py +8 -4
  5. lsst/pipe/base/connectionTypes.py +19 -19
  6. lsst/pipe/base/connections.py +2 -2
  7. lsst/pipe/base/exec_fixup_data_id.py +131 -0
  8. lsst/pipe/base/execution_graph_fixup.py +69 -0
  9. lsst/pipe/base/graph/graphSummary.py +4 -4
  10. lsst/pipe/base/log_capture.py +227 -0
  11. lsst/pipe/base/mp_graph_executor.py +786 -0
  12. lsst/pipe/base/pipeline_graph/_pipeline_graph.py +40 -10
  13. lsst/pipe/base/pipeline_graph/_tasks.py +106 -0
  14. lsst/pipe/base/pipeline_graph/io.py +1 -1
  15. lsst/pipe/base/quantum_graph_builder.py +85 -58
  16. lsst/pipe/base/quantum_graph_executor.py +125 -0
  17. lsst/pipe/base/quantum_graph_skeleton.py +60 -1
  18. lsst/pipe/base/quantum_reports.py +334 -0
  19. lsst/pipe/base/script/transfer_from_graph.py +4 -1
  20. lsst/pipe/base/separable_pipeline_executor.py +296 -0
  21. lsst/pipe/base/simple_pipeline_executor.py +674 -0
  22. lsst/pipe/base/single_quantum_executor.py +635 -0
  23. lsst/pipe/base/taskFactory.py +18 -12
  24. lsst/pipe/base/tests/in_memory_limited_butler.py +223 -0
  25. lsst/pipe/base/tests/mocks/__init__.py +1 -0
  26. lsst/pipe/base/tests/mocks/_in_memory_repo.py +357 -0
  27. lsst/pipe/base/tests/mocks/_pipeline_task.py +19 -2
  28. lsst/pipe/base/version.py +1 -1
  29. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/METADATA +1 -1
  30. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/RECORD +38 -28
  31. lsst/pipe/base/executionButlerBuilder.py +0 -493
  32. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/WHEEL +0 -0
  33. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/entry_points.txt +0 -0
  34. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/licenses/COPYRIGHT +0 -0
  35. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/licenses/LICENSE +0 -0
  36. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/licenses/bsd_license.txt +0 -0
  37. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/licenses/gpl-v3.0.txt +0 -0
  38. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/top_level.txt +0 -0
  39. {lsst_pipe_base-29.2025.3000.dist-info → lsst_pipe_base-29.2025.3200.dist-info}/zip-safe +0 -0
@@ -0,0 +1,125 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["QuantumExecutor", "QuantumGraphExecutor"]
31
+
32
+ from abc import ABC, abstractmethod
33
+ from typing import TYPE_CHECKING
34
+
35
+ from .quantum_reports import QuantumReport, Report
36
+
37
+ if TYPE_CHECKING:
38
+ import uuid
39
+
40
+ from lsst.daf.butler import Quantum
41
+
42
+ from .graph import QuantumGraph
43
+ from .pipeline_graph import TaskNode
44
+
45
+
46
+ class QuantumExecutor(ABC):
47
+ """Class which abstracts execution of a single Quantum.
48
+
49
+ In general implementation should not depend on execution model and
50
+ execution should always happen in-process. Main reason for existence
51
+ of this class is to provide do-nothing implementation that can be used
52
+ in the unit tests.
53
+ """
54
+
55
+ @abstractmethod
56
+ def execute(
57
+ self, task_node: TaskNode, /, quantum: Quantum, quantum_id: uuid.UUID | None = None
58
+ ) -> tuple[Quantum, QuantumReport | None]:
59
+ """Execute single quantum.
60
+
61
+ Parameters
62
+ ----------
63
+ task_node : `~.pipeline_graph.TaskNode`
64
+ Task definition structure.
65
+ quantum : `~lsst.daf.butler.Quantum`
66
+ Quantum for this execution.
67
+ quantum_id : `uuid.UUID` or `None`, optional
68
+ The ID of the quantum to be executed.
69
+
70
+ Returns
71
+ -------
72
+ quantum : `~lsst.daf.butler.Quantum`
73
+ The quantum actually executed.
74
+ report : `~.quantum_reports.QuantumReport`
75
+ Structure describing the status of the execution of a quantum.
76
+ `None` is returned if implementation does not support this
77
+ feature.
78
+
79
+ Notes
80
+ -----
81
+ Any exception raised by the task or code that wraps task execution is
82
+ propagated to the caller of this method.
83
+ """
84
+ raise NotImplementedError()
85
+
86
+
87
+ class QuantumGraphExecutor(ABC):
88
+ """Class which abstracts QuantumGraph execution.
89
+
90
+ Any specific execution model is implemented in sub-class by overriding
91
+ the `execute` method.
92
+ """
93
+
94
+ @abstractmethod
95
+ def execute(self, graph: QuantumGraph) -> None:
96
+ """Execute whole graph.
97
+
98
+ Implementation of this method depends on particular execution model
99
+ and it has to be provided by a subclass. Execution model determines
100
+ what happens here; it can be either actual running of the task or,
101
+ for example, generation of the scripts for delayed batch execution.
102
+
103
+ Parameters
104
+ ----------
105
+ graph : `.QuantumGraph`
106
+ Execution graph.
107
+ """
108
+ raise NotImplementedError()
109
+
110
+ def getReport(self) -> Report | None:
111
+ """Return execution report from last call to `execute`.
112
+
113
+ Returns
114
+ -------
115
+ report : `~.quantum_reports.Report`, optional
116
+ Structure describing the status of the execution of a quantum
117
+ graph. `None` is returned if implementation does not support
118
+ this feature.
119
+
120
+ Raises
121
+ ------
122
+ RuntimeError
123
+ Raised if this method is called before `execute`.
124
+ """
125
+ return None
@@ -40,12 +40,21 @@ __all__ = (
40
40
  )
41
41
 
42
42
  import dataclasses
43
+ from collections import defaultdict
43
44
  from collections.abc import Iterable, Iterator, MutableMapping, Set
44
45
  from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias
45
46
 
46
47
  import networkx
47
48
 
48
- from lsst.daf.butler import DataCoordinate, DataIdValue, DatasetRef
49
+ from lsst.daf.butler import (
50
+ Butler,
51
+ DataCoordinate,
52
+ DataIdValue,
53
+ DatasetRef,
54
+ DimensionDataAttacher,
55
+ DimensionGroup,
56
+ DimensionRecordSet,
57
+ )
49
58
  from lsst.utils.logging import getLogger
50
59
 
51
60
  if TYPE_CHECKING:
@@ -170,6 +179,7 @@ class QuantumGraphSkeleton:
170
179
  self._tasks: dict[str, tuple[TaskInitKey, set[QuantumKey]]] = {}
171
180
  self._xgraph: networkx.DiGraph = networkx.DiGraph()
172
181
  self._global_init_outputs: set[DatasetKey] = set()
182
+ self._dimension_data: dict[str, DimensionRecordSet] = {}
173
183
  for task_label in task_labels:
174
184
  task_init_key = TaskInitKey(task_label)
175
185
  self._tasks[task_label] = (task_init_key, set())
@@ -310,6 +320,10 @@ class QuantumGraphSkeleton:
310
320
  for task_label, (_, quanta) in other._tasks.items():
311
321
  self._tasks[task_label][1].update(quanta)
312
322
  self._xgraph.update(other._xgraph)
323
+ for record_set in other._dimension_data.values():
324
+ self._dimension_data.setdefault(
325
+ record_set.element.name, DimensionRecordSet(record_set.element)
326
+ ).update(record_set)
313
327
 
314
328
  def add_quantum_node(self, task_label: str, data_id: DataCoordinate, **attrs: Any) -> QuantumKey:
315
329
  """Add a new node representing a quantum.
@@ -710,3 +724,48 @@ class QuantumGraphSkeleton:
710
724
  Raised if this node does not have an expanded data ID.
711
725
  """
712
726
  return self._xgraph.nodes[key]["data_id"]
727
+
728
+ def attach_dimension_records(
729
+ self, butler: Butler, dimensions: DimensionGroup, dimension_records: Iterable[DimensionRecordSet]
730
+ ) -> None:
731
+ """Attach dimension records to the data IDs in the skeleton.
732
+
733
+ Parameters
734
+ ----------
735
+ butler : `lsst.daf.butler.Butler`
736
+ Butler to use to query for missing dimension records.
737
+ dimensions : `lsst.daf.butler.DimensionGroup`
738
+ Superset of all of the dimensions of all data IDs.
739
+ dimension_records : `~collections.abc.Iterable` [ \
740
+ `lsst.daf.butler.DimensionRecordSet` ]
741
+ Iterable of sets of dimension records to attach.
742
+ """
743
+ for record_set in dimension_records:
744
+ self._dimension_data.setdefault(
745
+ record_set.element.name, DimensionRecordSet(record_set.element)
746
+ ).update(record_set)
747
+ # Group all nodes by data ID (and dimensions of data ID).
748
+ data_ids_to_expand: defaultdict[DimensionGroup, defaultdict[DataCoordinate, list[Key]]] = defaultdict(
749
+ lambda: defaultdict(list)
750
+ )
751
+ data_id: DataCoordinate | None
752
+ for node_key in self:
753
+ if data_id := self[node_key].get("data_id"):
754
+ data_ids_to_expand[data_id.dimensions][data_id].append(node_key)
755
+ attacher = DimensionDataAttacher(records=self._dimension_data.values(), dimensions=dimensions)
756
+ for dimensions, data_ids in data_ids_to_expand.items():
757
+ with butler.query() as query:
758
+ # Butler query will be used as-needed to get dimension records
759
+ # (from prerequisites) we didn't fetch in advance. These are
760
+ # cached in the attacher so we don't look them up multiple
761
+ # times.
762
+ expanded_data_ids = attacher.attach(dimensions, data_ids.keys(), query=query)
763
+ for expanded_data_id, node_keys in zip(expanded_data_ids, data_ids.values()):
764
+ for node_key in node_keys:
765
+ self.set_data_id(node_key, expanded_data_id)
766
+ # Hold on to any records that we had to query for.
767
+ self._dimension_data = attacher.records
768
+
769
+ def get_dimension_data(self) -> list[DimensionRecordSet]:
770
+ """Return the dimension records attached to data IDs."""
771
+ return list(self._dimension_data.values())
@@ -0,0 +1,334 @@
1
+ # This file is part of pipe_base.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["ExceptionInfo", "ExecutionStatus", "QuantumReport", "Report"]
31
+
32
+ import enum
33
+ import sys
34
+ from typing import Any
35
+
36
+ import pydantic
37
+
38
+ from lsst.daf.butler import DataCoordinate, DataId, DataIdValue
39
+ from lsst.utils.introspection import get_full_type_name
40
+
41
+ from .graph import QgraphSummary
42
+
43
+
44
+ def _serializeDataId(dataId: DataId) -> dict[str, DataIdValue]:
45
+ if isinstance(dataId, DataCoordinate):
46
+ return dict(dataId.required)
47
+ else:
48
+ return dataId # type: ignore
49
+
50
+
51
+ class ExecutionStatus(enum.Enum):
52
+ """Possible values for job execution status.
53
+
54
+ Status `FAILURE` is set if one or more tasks failed. Status `TIMEOUT` is
55
+ set if there are no failures but one or more tasks timed out. Timeouts can
56
+ only be detected in multi-process mode, child task is killed on timeout
57
+ and usually should have non-zero exit code.
58
+ """
59
+
60
+ SUCCESS = "success"
61
+ FAILURE = "failure"
62
+ TIMEOUT = "timeout"
63
+ SKIPPED = "skipped"
64
+
65
+
66
+ class ExceptionInfo(pydantic.BaseModel):
67
+ """Information about exception."""
68
+
69
+ className: str
70
+ """Name of the exception class if exception was raised."""
71
+
72
+ message: str
73
+ """Exception message for in-process quantum execution, None if
74
+ quantum was executed in sub-process.
75
+ """
76
+
77
+ @classmethod
78
+ def from_exception(cls, exception: Exception) -> ExceptionInfo:
79
+ """Construct instance from an exception.
80
+
81
+ Parameters
82
+ ----------
83
+ exception : `Exception`
84
+ Exception to wrap.
85
+
86
+ Returns
87
+ -------
88
+ info : `ExceptionInfo`
89
+ Information about the exception.
90
+ """
91
+ return cls(className=get_full_type_name(exception), message=str(exception))
92
+
93
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
94
+ # when we inherit those docstrings in our public classes.
95
+ if "sphinx" in sys.modules:
96
+
97
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
98
+ """See `pydantic.BaseModel.copy`."""
99
+ return super().copy(*args, **kwargs)
100
+
101
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
102
+ """See `pydantic.BaseModel.model_dump`."""
103
+ return super().model_dump(*args, **kwargs)
104
+
105
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
106
+ """See `pydantic.BaseModel.model_dump_json`."""
107
+ return super().model_dump(*args, **kwargs)
108
+
109
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
110
+ """See `pydantic.BaseModel.model_copy`."""
111
+ return super().model_copy(*args, **kwargs)
112
+
113
+ @classmethod
114
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
115
+ """See `pydantic.BaseModel.model_construct`."""
116
+ return super().model_construct(*args, **kwargs)
117
+
118
+ @classmethod
119
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
120
+ """See `pydantic.BaseModel.model_json_schema`."""
121
+ return super().model_json_schema(*args, **kwargs)
122
+
123
+
124
+ class QuantumReport(pydantic.BaseModel):
125
+ """Task execution report for a single Quantum.
126
+
127
+ Parameters
128
+ ----------
129
+ dataId : `~lsst.daf.butler.DataId`
130
+ Quantum data ID.
131
+ taskLabel : `str`
132
+ Label for task executing this Quantum.
133
+ status : `ExecutionStatus`
134
+ Status of this quantum execution.
135
+ exitCode : `int` or `None`, optional
136
+ Exit code for sub-process executing this Quantum. `None` for
137
+ in-process execution. Negative if process was killed by a signal.
138
+ exceptionInfo : `ExceptionInfo` or `None`, optional
139
+ Exception information if an exception was raised.
140
+ """
141
+
142
+ status: ExecutionStatus = ExecutionStatus.SUCCESS
143
+ """Execution status, one of the values in `ExecutionStatus` enum."""
144
+
145
+ dataId: dict[str, DataIdValue]
146
+ """Quantum DataId."""
147
+
148
+ taskLabel: str | None
149
+ """Label for a task executing this Quantum."""
150
+
151
+ exitCode: int | None = None
152
+ """Exit code for a sub-process executing Quantum, None for in-process
153
+ Quantum execution. Negative if process was killed by a signal.
154
+ """
155
+
156
+ exceptionInfo: ExceptionInfo | None = None
157
+ """Exception information if exception was raised."""
158
+
159
+ def __init__(
160
+ self,
161
+ dataId: DataId,
162
+ taskLabel: str,
163
+ status: ExecutionStatus = ExecutionStatus.SUCCESS,
164
+ exitCode: int | None = None,
165
+ exceptionInfo: ExceptionInfo | None = None,
166
+ ):
167
+ super().__init__(
168
+ status=status,
169
+ dataId=_serializeDataId(dataId),
170
+ taskLabel=taskLabel,
171
+ exitCode=exitCode,
172
+ exceptionInfo=exceptionInfo,
173
+ )
174
+
175
+ @classmethod
176
+ def from_exception(
177
+ cls,
178
+ exception: Exception,
179
+ dataId: DataId,
180
+ taskLabel: str,
181
+ *,
182
+ exitCode: int | None = None,
183
+ ) -> QuantumReport:
184
+ """Construct report instance from an exception and other pieces of
185
+ data.
186
+
187
+ Parameters
188
+ ----------
189
+ exception : `Exception`
190
+ Exception caught from processing quantum.
191
+ dataId : `~lsst.daf.butler.DataId`
192
+ Data ID of quantum.
193
+ taskLabel : `str`
194
+ Label of task.
195
+ exitCode : `int`, optional
196
+ Exit code for the process, used when it is known that the process
197
+ will exit with that exit code.
198
+ """
199
+ return cls(
200
+ status=ExecutionStatus.FAILURE,
201
+ dataId=dataId,
202
+ taskLabel=taskLabel,
203
+ exitCode=exitCode,
204
+ exceptionInfo=ExceptionInfo.from_exception(exception),
205
+ )
206
+
207
+ @classmethod
208
+ def from_exit_code(
209
+ cls,
210
+ exitCode: int,
211
+ dataId: DataId,
212
+ taskLabel: str,
213
+ ) -> QuantumReport:
214
+ """Construct report instance from an exit code and other pieces of
215
+ data.
216
+
217
+ Parameters
218
+ ----------
219
+ exitCode : `int`
220
+ The exit code of the subprocess.
221
+ dataId : `~lsst.daf.butler.DataId`
222
+ The quantum Data ID.
223
+ taskLabel : `str`
224
+ The task label.
225
+ """
226
+ return cls(
227
+ status=ExecutionStatus.SUCCESS if exitCode == 0 else ExecutionStatus.FAILURE,
228
+ dataId=dataId,
229
+ taskLabel=taskLabel,
230
+ exitCode=exitCode,
231
+ )
232
+
233
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
234
+ # when we inherit those docstrings in our public classes.
235
+ if "sphinx" in sys.modules:
236
+
237
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
238
+ """See `pydantic.BaseModel.copy`."""
239
+ return super().copy(*args, **kwargs)
240
+
241
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
242
+ """See `pydantic.BaseModel.model_dump`."""
243
+ return super().model_dump(*args, **kwargs)
244
+
245
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
246
+ """See `pydantic.BaseModel.model_dump_json`."""
247
+ return super().model_dump(*args, **kwargs)
248
+
249
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
250
+ """See `pydantic.BaseModel.model_copy`."""
251
+ return super().model_copy(*args, **kwargs)
252
+
253
+ @classmethod
254
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
255
+ """See `pydantic.BaseModel.model_construct`."""
256
+ return super().model_construct(*args, **kwargs)
257
+
258
+ @classmethod
259
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
260
+ """See `pydantic.BaseModel.model_json_schema`."""
261
+ return super().model_json_schema(*args, **kwargs)
262
+
263
+
264
+ class Report(pydantic.BaseModel):
265
+ """Execution report for the whole job with one or few quanta."""
266
+
267
+ qgraphSummary: QgraphSummary
268
+ """Summary report about QuantumGraph."""
269
+
270
+ status: ExecutionStatus = ExecutionStatus.SUCCESS
271
+ """Job status."""
272
+
273
+ cmdLine: list[str] | None = None
274
+ """Command line for the whole job."""
275
+
276
+ exitCode: int | None = None
277
+ """Job exit code, this obviously cannot be set in pipetask."""
278
+
279
+ exceptionInfo: ExceptionInfo | None = None
280
+ """Exception information if exception was raised."""
281
+
282
+ quantaReports: list[QuantumReport] = []
283
+ """List of per-quantum reports, ordering is not specified. Some or all
284
+ quanta may not produce a report.
285
+ """
286
+
287
+ # Always want to validate the default value for cmdLine so
288
+ # use a model_validator.
289
+ @pydantic.model_validator(mode="before")
290
+ @classmethod
291
+ def _set_cmdLine(cls, data: Any) -> Any:
292
+ if data.get("cmdLine") is None:
293
+ data["cmdLine"] = sys.argv
294
+ return data
295
+
296
+ def set_exception(self, exception: Exception) -> None:
297
+ """Update exception information from an exception object.
298
+
299
+ Parameters
300
+ ----------
301
+ exception : `Exception`
302
+ Exception to use to extract information from.
303
+ """
304
+ self.exceptionInfo = ExceptionInfo.from_exception(exception)
305
+
306
+ # Work around the fact that Sphinx chokes on Pydantic docstring formatting,
307
+ # when we inherit those docstrings in our public classes.
308
+ if "sphinx" in sys.modules:
309
+
310
+ def copy(self, *args: Any, **kwargs: Any) -> Any:
311
+ """See `pydantic.BaseModel.copy`."""
312
+ return super().copy(*args, **kwargs)
313
+
314
+ def model_dump(self, *args: Any, **kwargs: Any) -> Any:
315
+ """See `pydantic.BaseModel.model_dump`."""
316
+ return super().model_dump(*args, **kwargs)
317
+
318
+ def model_dump_json(self, *args: Any, **kwargs: Any) -> Any:
319
+ """See `pydantic.BaseModel.model_dump_json`."""
320
+ return super().model_dump(*args, **kwargs)
321
+
322
+ def model_copy(self, *args: Any, **kwargs: Any) -> Any:
323
+ """See `pydantic.BaseModel.model_copy`."""
324
+ return super().model_copy(*args, **kwargs)
325
+
326
+ @classmethod
327
+ def model_construct(cls, *args: Any, **kwargs: Any) -> Any: # type: ignore[misc, override]
328
+ """See `pydantic.BaseModel.model_construct`."""
329
+ return super().model_construct(*args, **kwargs)
330
+
331
+ @classmethod
332
+ def model_json_schema(cls, *args: Any, **kwargs: Any) -> Any:
333
+ """See `pydantic.BaseModel.model_json_schema`."""
334
+ return super().model_json_schema(*args, **kwargs)
@@ -167,10 +167,13 @@ def _update_chain(butler: Butler, output_chain: str, output_run: str, inputs: li
167
167
  if created_now:
168
168
  _LOG.verbose("Registered chain collection: %s", output_chain)
169
169
  if inputs:
170
+ # First must flatten any input chains
171
+ flattened = butler.collections.query(inputs, flatten_chains=True)
172
+
170
173
  # Add input collections to chain collection just made. Using
171
174
  # extend instead of prepend in case of race condition where another
172
175
  # execution adds a run before this adds the inputs to the chain.
173
- butler.collections.extend_chain(output_chain, inputs)
176
+ butler.collections.extend_chain(output_chain, flattened)
174
177
  _LOG.verbose(
175
178
  "Prepending output chain collection (%s) with output RUN collection (%s)", output_chain, output_run
176
179
  )