lsst-pipe-base 30.0.1rc1__py3-none-any.whl → 30.2025.5100__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. lsst/pipe/base/_instrument.py +20 -31
  2. lsst/pipe/base/_quantumContext.py +3 -3
  3. lsst/pipe/base/_status.py +10 -43
  4. lsst/pipe/base/_task_metadata.py +2 -2
  5. lsst/pipe/base/all_dimensions_quantum_graph_builder.py +3 -8
  6. lsst/pipe/base/automatic_connection_constants.py +1 -20
  7. lsst/pipe/base/cli/cmd/__init__.py +2 -18
  8. lsst/pipe/base/cli/cmd/commands.py +4 -149
  9. lsst/pipe/base/connectionTypes.py +160 -72
  10. lsst/pipe/base/connections.py +9 -6
  11. lsst/pipe/base/execution_reports.py +5 -0
  12. lsst/pipe/base/graph/graph.py +10 -11
  13. lsst/pipe/base/graph/quantumNode.py +4 -4
  14. lsst/pipe/base/graph_walker.py +10 -8
  15. lsst/pipe/base/log_capture.py +80 -40
  16. lsst/pipe/base/mp_graph_executor.py +15 -51
  17. lsst/pipe/base/pipeline.py +6 -5
  18. lsst/pipe/base/pipelineIR.py +8 -2
  19. lsst/pipe/base/pipelineTask.py +7 -5
  20. lsst/pipe/base/pipeline_graph/_dataset_types.py +2 -2
  21. lsst/pipe/base/pipeline_graph/_edges.py +22 -32
  22. lsst/pipe/base/pipeline_graph/_mapping_views.py +7 -4
  23. lsst/pipe/base/pipeline_graph/_pipeline_graph.py +7 -14
  24. lsst/pipe/base/pipeline_graph/expressions.py +2 -2
  25. lsst/pipe/base/pipeline_graph/io.py +10 -7
  26. lsst/pipe/base/pipeline_graph/visualization/_dot.py +12 -13
  27. lsst/pipe/base/pipeline_graph/visualization/_layout.py +18 -16
  28. lsst/pipe/base/pipeline_graph/visualization/_merge.py +7 -4
  29. lsst/pipe/base/pipeline_graph/visualization/_printer.py +10 -10
  30. lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py +0 -7
  31. lsst/pipe/base/prerequisite_helpers.py +1 -2
  32. lsst/pipe/base/quantum_graph/_common.py +20 -19
  33. lsst/pipe/base/quantum_graph/_multiblock.py +31 -37
  34. lsst/pipe/base/quantum_graph/_predicted.py +13 -111
  35. lsst/pipe/base/quantum_graph/_provenance.py +45 -1136
  36. lsst/pipe/base/quantum_graph/aggregator/__init__.py +1 -0
  37. lsst/pipe/base/quantum_graph/aggregator/_communicators.py +289 -204
  38. lsst/pipe/base/quantum_graph/aggregator/_config.py +9 -87
  39. lsst/pipe/base/quantum_graph/aggregator/_ingester.py +12 -13
  40. lsst/pipe/base/quantum_graph/aggregator/_scanner.py +235 -49
  41. lsst/pipe/base/quantum_graph/aggregator/_structs.py +116 -6
  42. lsst/pipe/base/quantum_graph/aggregator/_supervisor.py +39 -29
  43. lsst/pipe/base/quantum_graph/aggregator/_writer.py +351 -34
  44. lsst/pipe/base/quantum_graph/visualization.py +1 -5
  45. lsst/pipe/base/quantum_graph_builder.py +8 -21
  46. lsst/pipe/base/quantum_graph_executor.py +13 -116
  47. lsst/pipe/base/quantum_graph_skeleton.py +29 -31
  48. lsst/pipe/base/quantum_provenance_graph.py +12 -29
  49. lsst/pipe/base/separable_pipeline_executor.py +3 -19
  50. lsst/pipe/base/single_quantum_executor.py +42 -67
  51. lsst/pipe/base/struct.py +0 -4
  52. lsst/pipe/base/testUtils.py +3 -3
  53. lsst/pipe/base/tests/mocks/_storage_class.py +1 -2
  54. lsst/pipe/base/version.py +1 -1
  55. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/METADATA +3 -3
  56. lsst_pipe_base-30.2025.5100.dist-info/RECORD +125 -0
  57. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/WHEEL +1 -1
  58. lsst/pipe/base/log_on_close.py +0 -76
  59. lsst/pipe/base/quantum_graph/aggregator/_workers.py +0 -303
  60. lsst/pipe/base/quantum_graph/formatter.py +0 -171
  61. lsst/pipe/base/quantum_graph/ingest_graph.py +0 -413
  62. lsst_pipe_base-30.0.1rc1.dist-info/RECORD +0 -129
  63. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/entry_points.txt +0 -0
  64. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/licenses/COPYRIGHT +0 -0
  65. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/licenses/LICENSE +0 -0
  66. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/licenses/bsd_license.txt +0 -0
  67. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/licenses/gpl-v3.0.txt +0 -0
  68. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/top_level.txt +0 -0
  69. {lsst_pipe_base-30.0.1rc1.dist-info → lsst_pipe_base-30.2025.5100.dist-info}/zip-safe +0 -0
@@ -1,413 +0,0 @@
1
- # This file is part of pipe_base.
2
- #
3
- # Developed for the LSST Data Management System.
4
- # This product includes software developed by the LSST Project
5
- # (http://www.lsst.org).
6
- # See the COPYRIGHT file at the top-level directory of this distribution
7
- # for details of code ownership.
8
- #
9
- # This software is dual licensed under the GNU General Public License and also
10
- # under a 3-clause BSD license. Recipients may choose which of these licenses
11
- # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
- # respectively. If you choose the GPL option then the following text applies
13
- # (but note that there is still no warranty even if you opt for BSD instead):
14
- #
15
- # This program is free software: you can redistribute it and/or modify
16
- # it under the terms of the GNU General Public License as published by
17
- # the Free Software Foundation, either version 3 of the License, or
18
- # (at your option) any later version.
19
- #
20
- # This program is distributed in the hope that it will be useful,
21
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
- # GNU General Public License for more details.
24
- #
25
- # You should have received a copy of the GNU General Public License
26
- # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
-
28
- """A tool for ingesting provenance quantum graphs (written by the `aggregator`
29
- module) and [re-]ingesting other datasets (metadata/logs/configs) backed by the
30
- same file. This "finalizes" the RUN collection, prohibiting (at least
31
- conceptually) further processing.
32
-
33
- This always proceeds in three steps, so we can resume efficiently:
34
-
35
- 1. First we ask the butler to "forget" any metadata/log/config datasets that
36
- exist in the output RUN collection, removing any record of them from the
37
- butler database while preserving their files.
38
-
39
- 2. Next we ingest the ``run_provenance`` graph dataset itself.
40
-
41
- 3. Finally, in batches of quanta, we use a
42
- `~lsst.daf.butler.QuantumBackedButler` to delete the original
43
- metadata/log/config files and ingest new versions of those datasets into the
44
- butler.
45
-
46
- Thus, at any point, if the ``run_provenance`` dataset has not been ingested,
47
- we know any metadata/log/config datasets that have been ingested are backed by
48
- the original files.
49
-
50
- Moreover, if the ``run_provenance`` dataset has been ingested, any existing
51
- metadata/log/config datasets must be backed by the graph file, and the original
52
- files for those datasets will have been deleted.
53
-
54
- We also know that at all times the metadata/log/config *content* is safely
55
- present in either the original files in the butler storage or in an
56
- already-ingested ``run_provenance`` dataset.
57
- """
58
-
59
- from __future__ import annotations
60
-
61
- __all__ = ("ingest_graph",)
62
-
63
- import dataclasses
64
- import itertools
65
- import os
66
- import uuid
67
- from collections.abc import Iterator
68
- from contextlib import contextmanager
69
-
70
- from lsst.daf.butler import (
71
- Butler,
72
- Config,
73
- DataCoordinate,
74
- DatasetRef,
75
- DatasetType,
76
- FileDataset,
77
- QuantumBackedButler,
78
- )
79
- from lsst.daf.butler.registry.sql_registry import SqlRegistry
80
- from lsst.resources import ResourcePath, ResourcePathExpression
81
- from lsst.utils.logging import getLogger
82
-
83
- from ..automatic_connection_constants import PROVENANCE_DATASET_TYPE_NAME, PROVENANCE_STORAGE_CLASS
84
- from ._provenance import (
85
- ProvenanceDatasetInfo,
86
- ProvenanceInitQuantumInfo,
87
- ProvenanceQuantumGraph,
88
- ProvenanceQuantumGraphReader,
89
- ProvenanceQuantumInfo,
90
- )
91
- from .formatter import ProvenanceFormatter
92
-
93
- _LOG = getLogger(__name__)
94
-
95
-
96
- def ingest_graph(
97
- butler_config: str | Config,
98
- uri: ResourcePathExpression | None = None,
99
- *,
100
- transfer: str | None = "move",
101
- batch_size: int = 10000,
102
- output_run: str | None = None,
103
- ) -> None:
104
- """Ingest a provenance graph into a butler repository.
105
-
106
- Parameters
107
- ----------
108
- butler_config : `str`
109
- Path or alias for the butler repository, or a butler repository config
110
- object.
111
- uri : `lsst.resources.ResourcePathExpression` or `None`, optional
112
- Location of the provenance quantum graph to ingest. `None` indicates
113
- that the quantum graph has already been ingested, but other ingests
114
- and/or deletions failed and need to be resumed.
115
- transfer : `str` or `None`, optional
116
- Transfer mode to use when ingesting graph. Matches those supported
117
- by `lsst.resources.ResourcePath.transfer_from`.
118
- batch_size : `int`, optional
119
- Number of datasets to process in each transaction.
120
- output_run : `str`, optional
121
- Output `~lsst.daf.butler.CollectionType.RUN` collection name. Only
122
- needs to be provided if ``uri`` is `None`. If it is provided the
123
- output run in the graph is checked against it.
124
-
125
- Notes
126
- -----
127
- After this operation, any further processing done in the
128
- `~lsst.daf.butler.CollectionType.RUN` collection will not be included in
129
- the provenance.
130
-
131
- If this process is interrupted, it can pick up where it left off if run
132
- again (at the cost of some duplicate work to figure out how much progress
133
- it had made).
134
- """
135
- with _GraphIngester.open(butler_config, uri, output_run) as helper:
136
- helper.fetch_already_ingested_datasets()
137
- if not helper.graph_already_ingested:
138
- assert uri is not None
139
- helper.forget_ingested_datasets(batch_size=batch_size)
140
- helper.ingest_graph_dataset(uri, transfer=transfer)
141
- helper.clean_and_reingest_datasets(batch_size=batch_size)
142
- if helper.directories_to_delete:
143
- _LOG.info(
144
- "Deleting %d directories after checking that they are empty.",
145
- len(helper.directories_to_delete),
146
- )
147
- n_deleted: int = 0
148
- for top in sorted(helper.directories_to_delete):
149
- nonempty: set[str] = set()
150
- for root, dirnames, filenames in os.walk(top, topdown=False):
151
- if filenames:
152
- nonempty.add(root)
153
- for dirname in dirnames:
154
- dirpath = os.path.join(root, dirname)
155
- if dirpath in nonempty:
156
- nonempty.add(root)
157
- else:
158
- os.rmdir(dirpath)
159
- if nonempty:
160
- _LOG.warning(
161
- "Directory %r was not deleted because it unexpectedly still had files in it.",
162
- top,
163
- )
164
- else:
165
- os.rmdir(root)
166
- n_deleted += 1
167
- _LOG.info("Deleted %d directories.", n_deleted)
168
-
169
-
170
- @dataclasses.dataclass
171
- class _GraphIngester:
172
- butler_config: str | Config
173
- butler: Butler
174
- graph: ProvenanceQuantumGraph
175
- graph_already_ingested: bool
176
- n_datasets: int
177
- datasets_already_ingested: set[uuid.UUID] = dataclasses.field(default_factory=set)
178
- directories_to_delete: set[str] = dataclasses.field(default_factory=set)
179
-
180
- @property
181
- def output_run(self) -> str:
182
- return self.graph.header.output_run
183
-
184
- @classmethod
185
- @contextmanager
186
- def open(
187
- cls,
188
- butler_config: str | Config,
189
- uri: ResourcePathExpression | None,
190
- output_run: str | None,
191
- ) -> Iterator[_GraphIngester]:
192
- with Butler.from_config(butler_config, collections=output_run, writeable=True) as butler:
193
- butler.registry.registerDatasetType(
194
- DatasetType(PROVENANCE_DATASET_TYPE_NAME, butler.dimensions.empty, PROVENANCE_STORAGE_CLASS)
195
- )
196
- graph, graph_already_ingested = cls.read_graph(butler, uri)
197
- if output_run is not None and graph.header.output_run != output_run:
198
- raise ValueError(
199
- f"Given output run {output_run!r} does not match the graph "
200
- f"header {graph.header.output_run!r}."
201
- )
202
- n_datasets = 2 * len(graph.quantum_only_xgraph) + len(graph.init_quanta)
203
- yield cls(
204
- butler_config=butler_config,
205
- butler=butler,
206
- graph=graph,
207
- graph_already_ingested=graph_already_ingested,
208
- n_datasets=n_datasets,
209
- )
210
-
211
- @staticmethod
212
- def read_graph(
213
- butler: Butler,
214
- uri: ResourcePathExpression | None,
215
- ) -> tuple[ProvenanceQuantumGraph, bool]:
216
- if uri is not None:
217
- _LOG.info("Reading the pre-ingest provenance graph.")
218
- with ProvenanceQuantumGraphReader.open(uri) as reader:
219
- reader.read_quanta()
220
- reader.read_init_quanta()
221
- graph = reader.graph
222
- already_ingested = (
223
- butler.find_dataset(PROVENANCE_DATASET_TYPE_NAME, collections=[graph.header.output_run])
224
- is not None
225
- )
226
- return graph, already_ingested
227
- else:
228
- _LOG.info("Reading the already-ingested provenance graph.")
229
- parameters = {"datasets": [], "read_init_quanta": True}
230
- return butler.get(PROVENANCE_DATASET_TYPE_NAME, parameters=parameters), True
231
-
232
- def fetch_already_ingested_datasets(self) -> None:
233
- _LOG.info("Querying for existing datasets in %r.", self.output_run)
234
- self.datasets_already_ingested.update(self.butler.registry._fetch_run_dataset_ids(self.output_run))
235
-
236
- def iter_datasets(self) -> Iterator[tuple[uuid.UUID, ProvenanceDatasetInfo]]:
237
- xgraph = self.graph.bipartite_xgraph
238
- for task_label, quanta_for_task in self.graph.quanta_by_task.items():
239
- _LOG.verbose(
240
- "Batching up metadata and log datasets from %d %s quanta.", len(quanta_for_task), task_label
241
- )
242
- for quantum_id in quanta_for_task.values():
243
- quantum_info: ProvenanceQuantumInfo = xgraph.nodes[quantum_id]
244
- metadata_id = quantum_info["metadata_id"]
245
- yield metadata_id, xgraph.nodes[metadata_id]
246
- log_id = quantum_info["log_id"]
247
- yield log_id, xgraph.nodes[log_id]
248
- _LOG.verbose("Batching up config datasets from %d tasks.", len(self.graph.init_quanta))
249
- for task_label, quantum_id in self.graph.init_quanta.items():
250
- init_quantum_info: ProvenanceInitQuantumInfo = xgraph.nodes[quantum_id]
251
- config_id = init_quantum_info["config_id"]
252
- yield config_id, xgraph.nodes[config_id]
253
-
254
- def forget_ingested_datasets(self, batch_size: int) -> None:
255
- _LOG.info(
256
- "Dropping database records for metadata/log/config datasets backed by their original files."
257
- )
258
- to_forget: list[DatasetRef] = []
259
- n_forgotten: int = 0
260
- n_skipped: int = 0
261
- for dataset_id, dataset_info in self.iter_datasets():
262
- if dataset_info["produced"] and dataset_id in self.datasets_already_ingested:
263
- to_forget.append(self._make_ref_from_info(dataset_id, dataset_info))
264
- self.datasets_already_ingested.remove(dataset_id)
265
- if len(to_forget) >= batch_size:
266
- n_forgotten += self._run_forget(to_forget, n_forgotten + n_skipped)
267
- else:
268
- n_skipped += 1
269
- n_forgotten += self._run_forget(to_forget, n_forgotten + n_skipped)
270
- _LOG.info(
271
- "Removed database records for %d metadata/log/config datasets, while %d were already absent.",
272
- n_forgotten,
273
- n_skipped,
274
- )
275
-
276
- def _run_forget(self, to_forget: list[DatasetRef], n_current: int) -> int:
277
- if to_forget:
278
- _LOG.verbose(
279
- "Forgetting a %d-dataset batch; %d/%d forgotten so far or already absent.",
280
- len(to_forget),
281
- n_current,
282
- self.n_datasets,
283
- )
284
- with self.butler.registry.transaction():
285
- self.butler._datastore.forget(to_forget)
286
- self.butler.registry.removeDatasets(to_forget)
287
- n = len(to_forget)
288
- to_forget.clear()
289
- return n
290
-
291
- def ingest_graph_dataset(self, uri: ResourcePathExpression, transfer: str | None) -> None:
292
- _LOG.info("Ingesting the provenance quantum graph.")
293
- dataset_type = DatasetType(
294
- PROVENANCE_DATASET_TYPE_NAME, self.butler.dimensions.empty, PROVENANCE_STORAGE_CLASS
295
- )
296
- self.butler.registry.registerDatasetType(dataset_type)
297
- ref = DatasetRef(dataset_type, DataCoordinate.make_empty(self.butler.dimensions), run=self.output_run)
298
- uri = ResourcePath(uri)
299
- self.butler.ingest(
300
- # We use .abspath() since butler assumes paths are relative to the
301
- # repo root, while users expects them to be relative to the CWD in
302
- # this context.
303
- FileDataset(refs=[ref], path=uri.abspath(), formatter=ProvenanceFormatter),
304
- transfer=transfer,
305
- )
306
-
307
- def clean_and_reingest_datasets(self, batch_size: int) -> None:
308
- _LOG.info(
309
- "Deleting original metadata/log/config files and re-ingesting them with provenance graph backing."
310
- )
311
- direct_uri = self.butler.getURI(PROVENANCE_DATASET_TYPE_NAME, collections=[self.output_run])
312
- qbb = self.make_qbb()
313
- to_process: list[DatasetRef] = []
314
- n_processed: int = 0
315
- n_skipped: int = 0
316
- n_not_produced: int = 0
317
- for dataset_id, dataset_info in self.iter_datasets():
318
- if not dataset_info["produced"]:
319
- n_not_produced += 1
320
- elif dataset_id not in self.datasets_already_ingested:
321
- to_process.append(self._make_ref_from_info(dataset_id, dataset_info))
322
- if len(to_process) >= batch_size:
323
- n_processed += self._run_clean_and_ingest(
324
- qbb, direct_uri, to_process, n_processed + n_skipped
325
- )
326
- else:
327
- n_skipped += 1
328
- n_processed += self._run_clean_and_ingest(qbb, direct_uri, to_process, n_processed + n_skipped)
329
- _LOG.info(
330
- "Deleted and re-ingested %d metadata/log/config datasets "
331
- "(%d had already been processed, %d were not produced).",
332
- n_processed,
333
- n_skipped,
334
- n_not_produced,
335
- )
336
-
337
- def _run_clean_and_ingest(
338
- self, qbb: QuantumBackedButler, direct_uri: ResourcePath, to_process: list[DatasetRef], n_current: int
339
- ) -> int:
340
- if not to_process:
341
- return 0
342
- _LOG.verbose(
343
- "Deleting and re-ingesting a %d-dataset batch; %d/%d complete.",
344
- len(to_process),
345
- n_current,
346
- self.n_datasets,
347
- )
348
- sql_registry: SqlRegistry = self.butler._registry # type: ignore[attr-defined]
349
- expanded_refs = sql_registry.expand_refs(to_process)
350
- # We need to pass predict=True to keep QBB/FileDatastore from wasting
351
- # time doing existence checks, since ResourcePath.mremove will ignore
352
- # nonexistent files anyway.
353
- original_uris = list(
354
- itertools.chain.from_iterable(
355
- ref_uris.iter_all() for ref_uris in qbb.get_many_uris(expanded_refs, predict=True).values()
356
- )
357
- )
358
- removal_status = ResourcePath.mremove(original_uris, do_raise=False)
359
- for path, status in removal_status.items():
360
- if not status.success and not isinstance(status.exception, FileNotFoundError):
361
- assert status.exception is not None, "Exception should be set if success=False."
362
- status.exception.add_note(f"Attempting to delete original file at {path}.")
363
- raise status.exception
364
- file_dataset = FileDataset(refs=expanded_refs, path=direct_uri, formatter=ProvenanceFormatter)
365
- self.butler.ingest(file_dataset, transfer=None)
366
- if len(original_uris) == len(expanded_refs):
367
- for uri, ref in zip(original_uris, expanded_refs):
368
- if uri.isLocal:
369
- if (
370
- parent_dir := self.find_dataset_type_directory(uri.ospath, ref.datasetType.name)
371
- ) is not None:
372
- self.directories_to_delete.add(parent_dir)
373
- elif any(uri.isLocal for uri in original_uris):
374
- _LOG.warning(
375
- "Not attempting to delete empty metadata/log/config directories because the number "
376
- "of paths (%s) did not match the number of datasets (%s).",
377
- len(original_uris),
378
- len(expanded_refs),
379
- )
380
- n = len(to_process)
381
- to_process.clear()
382
- return n
383
-
384
- @staticmethod
385
- def _make_ref_from_info(dataset_id: uuid.UUID, dataset_info: ProvenanceDatasetInfo) -> DatasetRef:
386
- return DatasetRef(
387
- dataset_info["pipeline_node"].dataset_type,
388
- dataset_info["data_id"],
389
- run=dataset_info["run"],
390
- id=dataset_id,
391
- )
392
-
393
- def make_qbb(self) -> QuantumBackedButler:
394
- dataset_types = {d.name: d.dataset_type for d in self.graph.pipeline_graph.dataset_types.values()}
395
- return QuantumBackedButler.from_predicted(
396
- config=self.butler_config,
397
- predicted_inputs=(),
398
- predicted_outputs=(),
399
- dimensions=self.butler.dimensions,
400
- datastore_records={},
401
- dataset_types=dataset_types,
402
- )
403
-
404
- def find_dataset_type_directory(self, ospath: str, dataset_type: str) -> str | None:
405
- dir_components: list[str] = []
406
- for component in os.path.dirname(ospath).split(os.path.sep):
407
- dir_components.append(component)
408
- # If the full dataset type name is in a single directory path
409
- # component, we guess that directory can only have datasets of
410
- # that type.
411
- if dataset_type in component:
412
- return os.path.sep.join(dir_components)
413
- return None
@@ -1,129 +0,0 @@
1
- lsst/__init__.py,sha256=_2bZAHuDVAx7MM7KA7pt3DYp641NY4RzSoRAwesWKfU,67
2
- lsst/pipe/__init__.py,sha256=_2bZAHuDVAx7MM7KA7pt3DYp641NY4RzSoRAwesWKfU,67
3
- lsst/pipe/base/__init__.py,sha256=qBLN0yYQjIcLBLb4jFKM_ppopuqTnCehcUdFcEe69Js,970
4
- lsst/pipe/base/_datasetQueryConstraints.py,sha256=bFH0_lVc49NS2_4v_i6r9POr500c0K-OHLMhMX5FjkQ,6373
5
- lsst/pipe/base/_dataset_handle.py,sha256=ft_ke1LbhLLndDPARsHSQJUA05LgUFnfWOq2vbwH3wI,11353
6
- lsst/pipe/base/_instrument.py,sha256=T7cLScH1I0sVDpPdW2LpXdTRP4n2EDpiSD4SKdy2zzc,30621
7
- lsst/pipe/base/_observation_dimension_packer.py,sha256=78Jg2OVFOdXIK62TS2Y3X4095xqCzmiIx9o4TXyADYA,8027
8
- lsst/pipe/base/_quantumContext.py,sha256=b6gQV0z38HhDK6yZbPT3m5a2aIyO9HwixQxeJEs8xPc,19394
9
- lsst/pipe/base/_status.py,sha256=ak8Cpj87-8uVp4ssfuHY6vcE5xzjIYVYHDZ_tA8zySA,22220
10
- lsst/pipe/base/_task_metadata.py,sha256=ak-EZFs0DmcRA-9Onk1UuoA_zkvdVPniYPNS1Ts0uQ4,25611
11
- lsst/pipe/base/all_dimensions_quantum_graph_builder.py,sha256=v04C4aKy9ctN1PXgfsloaOWOZnyArISb9AmDXyETT6E,71188
12
- lsst/pipe/base/automatic_connection_constants.py,sha256=i_V6FCsP8cORlsoH8gpDYnWiPVvjY2tbfGxREgne34w,3972
13
- lsst/pipe/base/caching_limited_butler.py,sha256=FEQK_QesUWG9sK0w9m6_SA5OPcrd6vqyq8oDKE9Wu6I,7858
14
- lsst/pipe/base/config.py,sha256=yNipVEc6awwhU_O9I01g20OnvQrs28dAwkXuI1hrlYE,11982
15
- lsst/pipe/base/configOverrides.py,sha256=B0An8EaX76VzWnC5dJxvyZ2AhVzawMtq7qlE9ma5lkc,14661
16
- lsst/pipe/base/connectionTypes.py,sha256=RbhGQpEdwpnDlexV_FoWoYBwcayHXsW0TZevtvK1s5c,11251
17
- lsst/pipe/base/connections.py,sha256=cQcHSxL59-3EnUM4JF-G_paHKDY-d45gWXWGKXsmUZ0,67068
18
- lsst/pipe/base/dot_tools.py,sha256=vriWMaB8YTEKKvhJE5KYdVGE4gB5XmiYfD2f18Fue-c,4285
19
- lsst/pipe/base/exec_fixup_data_id.py,sha256=9OjOcH-6AHZ1JnD_CemieI0wWX90J_VdaY9v1oXwMdQ,4187
20
- lsst/pipe/base/execution_graph_fixup.py,sha256=ND0x4hlpeEW-gudo-i2K7HT7MoM5sp_mcoqRMCopSqQ,3815
21
- lsst/pipe/base/execution_reports.py,sha256=62pY4sBQyHPCFceQzKLYQ0eQixNcmjs5m4DJAFAsPiA,17160
22
- lsst/pipe/base/graph_walker.py,sha256=JbPv4uTwQ8fQ8D67mPkCRCJg6l8FXuIBZb9_BIstDcA,4639
23
- lsst/pipe/base/log_capture.py,sha256=JnGFxyTp2ZDiiJ2-Zlj09hjCTWC86fCrj7NAOgdTl9I,11088
24
- lsst/pipe/base/log_on_close.py,sha256=JnmponL16Jr2afOxCSQb6BfAxXOdnXjIA0_YWSGB0OE,2619
25
- lsst/pipe/base/mermaid_tools.py,sha256=cdlDJQ1x8k7-VvCLEUqvSC3GR1zCsB-aUTxOjYejNWc,5216
26
- lsst/pipe/base/mp_graph_executor.py,sha256=bbsJD0i-WpW1Qc6lAvagIIx__jZfeOqN8dyt3IsyEq0,37350
27
- lsst/pipe/base/pipeline.py,sha256=bL5a4QQy35K8Efphy9ks-0-8VK-zyI_D9CF5Zrz2gbA,37505
28
- lsst/pipe/base/pipelineIR.py,sha256=2z8mdTxQS7etxQ3bSI3e0gdR5OiCyCzylPHoakDzRFM,45649
29
- lsst/pipe/base/pipelineTask.py,sha256=jCyT6F3oevQ66I33Ys6g20x0F2QxSVh5qFlDE_PMfE0,8245
30
- lsst/pipe/base/prerequisite_helpers.py,sha256=p2VaThE-zUcM9vzc_kcgEbEMzXVRGpJQH6KLcQ-QXDU,28479
31
- lsst/pipe/base/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- lsst/pipe/base/quantum_graph_builder.py,sha256=sic8D2OzXtccwjyHDjOSHER1ahykjUd24pmOUmv8NYs,69274
33
- lsst/pipe/base/quantum_graph_executor.py,sha256=-bbeR9wZ5_Etcqpx6cDS_R-oEvfUVv9gmW0Wc4QuQNc,8127
34
- lsst/pipe/base/quantum_graph_skeleton.py,sha256=872ypxlrt2mTsWLGWxyMCIiPv8dbfyAdMuQi9LQtRRY,28734
35
- lsst/pipe/base/quantum_provenance_graph.py,sha256=LRgFmgkBj_YQop-Zfj_rsWBtQ7mlmEIi8J-wJXb_A5I,92793
36
- lsst/pipe/base/quantum_reports.py,sha256=ut235L88v7SXaeVUvMA9qFl7tpeMwGnzob3X0QoOI_s,14210
37
- lsst/pipe/base/resource_usage.py,sha256=LfH7Qf6taI3lxw0aB90riRMn1UxUTMBSqtBjKPJ-XuY,6759
38
- lsst/pipe/base/separable_pipeline_executor.py,sha256=Yh0EXqiBfXBkA9WJYC54sqobg6_VaM_Y4q0MW1TkF-Q,17736
39
- lsst/pipe/base/simple_pipeline_executor.py,sha256=_HGeAOgHUiRdQwsmD_qira-vcHZXlLbf_LmhLJffh2U,29563
40
- lsst/pipe/base/single_quantum_executor.py,sha256=daQcgOp1mZhu4GvtOti5WpkJCbXewaRIW1_P2m4Q_8U,29485
41
- lsst/pipe/base/struct.py,sha256=fHO9QzFtzL5-jxqhcBWbjyrdbiWO-Pt4Kx1X5bxN3SU,5094
42
- lsst/pipe/base/task.py,sha256=XHBd-7m1a4-6LgobBYA1DgY4H7EV-_RWKfxbhZbMmD4,15145
43
- lsst/pipe/base/taskFactory.py,sha256=MsDGECJqZLSZk8SGhpuVhNaP32UWuNvxZiDcZExPFG8,3412
44
- lsst/pipe/base/testUtils.py,sha256=2C34i7FeZF9hJzWLKgvEAJMMzrx_F2-BNJ3yGlekUIo,18474
45
- lsst/pipe/base/utils.py,sha256=JmEt3l0xrh9uayKrSXuQEq12aXOhDr2YXmbYduaxCko,1940
46
- lsst/pipe/base/version.py,sha256=-auEgPcWk4qAWrWVMJY7Of7dAJ0dNQHoqQraCNdAEN4,52
47
- lsst/pipe/base/cli/__init__.py,sha256=861tXIAW7SqtqNUYkjbeEdfg8lDswXsjJQca0gVCFz4,54
48
- lsst/pipe/base/cli/_get_cli_subcommands.py,sha256=g_af64klRybBGKAg7fmBSZBdw2LYBAsFON_yQIMZON0,1289
49
- lsst/pipe/base/cli/cmd/__init__.py,sha256=AOKtbbpWFb-EXRNOlUkWOsySiN0L_ZJWsaY6z264Dx8,1689
50
- lsst/pipe/base/cli/cmd/commands.py,sha256=TrGmou_ZSpPYxmfm0d2vEOzUynu9qdp7svqsYz2kG2A,14531
51
- lsst/pipe/base/cli/opt/__init__.py,sha256=DN17wUbMwNIgbDBfF35sdyGfaMT81f3b_CA5Pp8POdk,1347
52
- lsst/pipe/base/cli/opt/arguments.py,sha256=9LhDnsM98_2zOqqm-eyv_nnZmAQcBG5OpHzeJYw_eTw,1484
53
- lsst/pipe/base/cli/opt/options.py,sha256=d5mC2WXZJiUbkdaIo_VUsvNxShD3GmredbEN5jQD64Q,1900
54
- lsst/pipe/base/formatters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
- lsst/pipe/base/formatters/pexConfig.py,sha256=MA-08FIDV7PmpcV-VtDaBegR6YO6_pEhMB37WKMwup8,2163
56
- lsst/pipe/base/graph/__init__.py,sha256=Zs2vwSFNiu1bYDsgrWQZ0qegG5F6PIjiQ5ZGT3EqcfA,118
57
- lsst/pipe/base/graph/_implDetails.py,sha256=QQHVnCW78UnIbALXX_v7EW7g6MTUTuuR1Q_Ss_squUw,6784
58
- lsst/pipe/base/graph/_loadHelpers.py,sha256=qUfjIgFezaXZRCFV7PFzmz1SSKFjRWOMWJePuyKiD24,12064
59
- lsst/pipe/base/graph/_versionDeserializers.py,sha256=Xwq-MHfxaml2bL5cxSF8qmb6rjEHHZBuSengX8iggRg,28011
60
- lsst/pipe/base/graph/graph.py,sha256=RUnqfkpuiTUWBYF9N0OzEtnSqc0jg0uJWlxbrS1Ezdg,75250
61
- lsst/pipe/base/graph/graphSummary.py,sha256=F0ET5H4cBFYNPXvHuUBa3nTCj99rpf0JwxPG5Kfi7iw,6352
62
- lsst/pipe/base/graph/quantumNode.py,sha256=at7UuzMmco1eh-XraEnRih6aLScNcxGU9nY376D-wBk,7199
63
- lsst/pipe/base/pipeline_graph/__init__.py,sha256=yTEuvlzbeKIHIm7GeRmGSsma1wpZFNv8j12WfSH-deY,1516
64
- lsst/pipe/base/pipeline_graph/__main__.py,sha256=E6ugEwJbds22wjgcfcgzeyO04JofQwVhn_Y8kZYY1lQ,20769
65
- lsst/pipe/base/pipeline_graph/_dataset_types.py,sha256=0Y5V-apEfMkswvmgIKSs0QSHjBXvij2Eg8nFdJNmbIU,11350
66
- lsst/pipe/base/pipeline_graph/_edges.py,sha256=TOLGiNiKWzUpw99xQuujkUNsi1-nMalTqhe6Glafqgs,36096
67
- lsst/pipe/base/pipeline_graph/_exceptions.py,sha256=3jvCXms0_5ThLGtsOlKxsI1vWiq3gY4hba8fRBW0tgI,3943
68
- lsst/pipe/base/pipeline_graph/_mapping_views.py,sha256=H7IPiVwwgLfgFPnv-hEi2TqH-DqcQy-nve-IF08LuUs,9132
69
- lsst/pipe/base/pipeline_graph/_nodes.py,sha256=GGXfzXvrjNbwPt-0w8cC0l_I6CCNskoDNjA8Ds4ILS0,4236
70
- lsst/pipe/base/pipeline_graph/_pipeline_graph.py,sha256=UpzW3U884Isa6URW9FYo47TU7RdgZYE_zkkD_lkVXww,122641
71
- lsst/pipe/base/pipeline_graph/_task_subsets.py,sha256=lLvcndSGcZigteWd4eeAM8LxQ1lHPBoysY8PjJTxx1c,13244
72
- lsst/pipe/base/pipeline_graph/_tasks.py,sha256=jTLpm5dZMXRNrGi3L45-3DtF95PGwhmejWLZ-zcSTzo,42802
73
- lsst/pipe/base/pipeline_graph/expressions.py,sha256=vvyUsK0fwV0bOv_NnmUYgn92E_FYGHxVGnP9zQgwLmE,7673
74
- lsst/pipe/base/pipeline_graph/io.py,sha256=__yRbbvX4FQBCUnlgyTLQzHyfSHLXDigqlMyV4rdly8,30898
75
- lsst/pipe/base/pipeline_graph/visualization/__init__.py,sha256=qQctfWuFpcmgRdgu8Y6OsJ_pXpLKrCK-alqfVtIecls,1551
76
- lsst/pipe/base/pipeline_graph/visualization/_dot.py,sha256=e03GTkSAKIM2z9b9c80MJwrms12I8MJ3YPJIWuVSRf0,13621
77
- lsst/pipe/base/pipeline_graph/visualization/_formatting.py,sha256=NsBxXwdmISitr8_4wPc-T8CqVB-Mq4pv7DmUefFm3JU,17845
78
- lsst/pipe/base/pipeline_graph/visualization/_layout.py,sha256=rvsadgasgGZKB625LmRVnEfB6r4u3Y8YrOQTdw6m1Ms,17088
79
- lsst/pipe/base/pipeline_graph/visualization/_merge.py,sha256=6ml-4zkamy-q47hqIhXgmWsKn9jjqoykDW9T0qlHc5c,15391
80
- lsst/pipe/base/pipeline_graph/visualization/_mermaid.py,sha256=V_LesIauJStjrxLO-cbvrhaZo5kDYG-JIMuErVu0EPk,20255
81
- lsst/pipe/base/pipeline_graph/visualization/_options.py,sha256=vOIp2T7DLA48lTm5mTyCakIByb_wM21U_Crz_83MjoM,5237
82
- lsst/pipe/base/pipeline_graph/visualization/_printer.py,sha256=-HEmoNT4z8_ouLdU5NuFzr3haf6pDb8TdM6alnRGKN4,16516
83
- lsst/pipe/base/pipeline_graph/visualization/_show.py,sha256=lPRjO1To2n5r3f_Wgcwy-7TmyJ7UszGGFXAlOtN1wDs,10510
84
- lsst/pipe/base/pipeline_graph/visualization/_status_annotator.py,sha256=qX-j0Azys3fU1kHDl-G60PmYt-QOPzZ3b1bxBKf6ILg,8829
85
- lsst/pipe/base/quantum_graph/__init__.py,sha256=-Gp3LihB0AXCvhG387wKAEpHRM-NrHGSXMti8cHee90,1437
86
- lsst/pipe/base/quantum_graph/_common.py,sha256=lHUILG8qhHibjUp4zwiWAMSpKe8nMKxvivXiJ3YA3wA,22868
87
- lsst/pipe/base/quantum_graph/_multiblock.py,sha256=wzTteI0D9iEtVjJulN0V8h1dmo9gzDQvJJEbRAEKfS0,28180
88
- lsst/pipe/base/quantum_graph/_predicted.py,sha256=ioDaBsm3juzHBTE7-YxLEREuibgcq759uGdd1j3_h2w,91362
89
- lsst/pipe/base/quantum_graph/_provenance.py,sha256=mV7hTHWbDQRAyI4X2JuoOXjuVez8y7NfotnfiSSVg-s,104563
90
- lsst/pipe/base/quantum_graph/formatter.py,sha256=EjwcgBkPqUxULU3yOstLOjg8MKlZozebGS8uzReDRBQ,7315
91
- lsst/pipe/base/quantum_graph/ingest_graph.py,sha256=Q0CMkHYpSAfjRUiJkW9nM3yE9Auy7neMtscebyUXM4o,17934
92
- lsst/pipe/base/quantum_graph/visualization.py,sha256=scDtzcG537WEbhfUE3BtAVjPuTuZTXRiLjJvxm_-Jcs,12548
93
- lsst/pipe/base/quantum_graph/aggregator/__init__.py,sha256=pjLjg1wm0fWihTN5_CCHg2sO2rIa_t0Fd8uuF1kk91A,7435
94
- lsst/pipe/base/quantum_graph/aggregator/_communicators.py,sha256=eA3HHAVZr4Dnqa704txsuHytVMV1kkpsowZhUyPpEGY,34375
95
- lsst/pipe/base/quantum_graph/aggregator/_config.py,sha256=S8xJKE2xC-VzSfpaFB2G_lOTO7F0kqxNvT9FV8vzEAs,8556
96
- lsst/pipe/base/quantum_graph/aggregator/_ingester.py,sha256=IY-5x-PXRYzheGpZrHCxKBKVRCeGUb9EJXjP3kpu1m4,14093
97
- lsst/pipe/base/quantum_graph/aggregator/_progress.py,sha256=jiz9Np73uUQ03CtH7pI6TXxXrwFUChh5hSj_gbMGHr0,7207
98
- lsst/pipe/base/quantum_graph/aggregator/_scanner.py,sha256=VijYSKYzQ0io8zZKLmg8TlMPwYG4fAYD3ob9nI-ciGE,13398
99
- lsst/pipe/base/quantum_graph/aggregator/_structs.py,sha256=MUBk8wrThFz4CuoIFUlm4t5K3QOBbye9uI1CAAdUIoA,2374
100
- lsst/pipe/base/quantum_graph/aggregator/_supervisor.py,sha256=J-17NGFCfb8uRmfDv6ivaqkB_lbRK-x5etf9IYS9T38,9251
101
- lsst/pipe/base/quantum_graph/aggregator/_workers.py,sha256=E7A6mrmIjSPa5-FKDDHof_bKX6jlZldJ2lqdc8dBpxU,9356
102
- lsst/pipe/base/quantum_graph/aggregator/_writer.py,sha256=Xrha3Y4PGNv_8rUrSMgCIUsx6TssvK4C_5N70p3Rmcg,7959
103
- lsst/pipe/base/script/__init__.py,sha256=cLEXE7aq5UZ0juL_ScmRw0weFgp4tDgwEX_ts-NEYic,1522
104
- lsst/pipe/base/script/register_instrument.py,sha256=neQ2MTPtAiV_Hl2yatQ8-vQC24xHjhpI7VJUHf5kPX4,2445
105
- lsst/pipe/base/script/retrieve_artifacts_for_quanta.py,sha256=Cr0HpzXm_C3LnIOQg5tNJht02O6xoqtWWIphjugasMA,3957
106
- lsst/pipe/base/script/transfer_from_graph.py,sha256=NelxNU_aNEwPHDZnDGXA07oeuSkeXqEZ6KG0ax6VE8c,7254
107
- lsst/pipe/base/script/utils.py,sha256=zNqpHG3kXA8OaNXnwYIo0Hu_LCie1qoBAARAME3WEjs,3739
108
- lsst/pipe/base/script/zip_from_graph.py,sha256=dMyyTGzEBCMMmR6Ts8R7QXUJrbtdrac_i-iy87lFDSI,3242
109
- lsst/pipe/base/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
110
- lsst/pipe/base/tests/in_memory_limited_butler.py,sha256=UzLh416H67nCUhD9y3cniAAjY7VojvhOLjF3gHHgjA4,8679
111
- lsst/pipe/base/tests/no_dimensions.py,sha256=58UpyRN8cLAMZtkOmjTm3dJZyRFRekotQ-7-OgEfiAI,4710
112
- lsst/pipe/base/tests/pipelineStepTester.py,sha256=KGxdB8gdVpSey2RUGURDIzIfPL-4qvQCsBpMrhG4Z2M,7208
113
- lsst/pipe/base/tests/simpleQGraph.py,sha256=V_h1SifQ27vnjz5Fax1KZYyn6yPUDNfc8gRjc7SFbhE,20197
114
- lsst/pipe/base/tests/util.py,sha256=IXpZOC58fdRnurB5lPcNX-xRgKEV-cPNkWKJDFIr1gs,4772
115
- lsst/pipe/base/tests/mocks/__init__.py,sha256=fDy9H9vRAIBpKDJEXNZuDWJMzWZfpcBT4TmyOw4o-RY,1572
116
- lsst/pipe/base/tests/mocks/_data_id_match.py,sha256=jVekStcrItC0tqOCc01VjYaiE9exYm3MRkwB0Gh_3J0,7465
117
- lsst/pipe/base/tests/mocks/_pipeline_task.py,sha256=N3fC4OMAMWWnYtyLkVdMfb9ZiFse39HniRDvlAOofOY,30691
118
- lsst/pipe/base/tests/mocks/_repo.py,sha256=SH-jzynS-H2xc_3GLjF7ln-kHdRoSeVVaal5qLd2hXI,28359
119
- lsst/pipe/base/tests/mocks/_storage_class.py,sha256=YD9iyOSs_JqfZYOejHy1AQkbfM2wJFT5zkscjrYU0Rc,27457
120
- lsst_pipe_base-30.0.1rc1.dist-info/licenses/COPYRIGHT,sha256=kB3Z9_f6a6uFLGpEmNJT_n186CE65H6wHu4F6BNt_zA,368
121
- lsst_pipe_base-30.0.1rc1.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
122
- lsst_pipe_base-30.0.1rc1.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
123
- lsst_pipe_base-30.0.1rc1.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
124
- lsst_pipe_base-30.0.1rc1.dist-info/METADATA,sha256=8f_z1jwcLGZF5GGwIZHJm1uPZxwrQ6wK7rCT1SK-9kU,2258
125
- lsst_pipe_base-30.0.1rc1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
126
- lsst_pipe_base-30.0.1rc1.dist-info/entry_points.txt,sha256=bnmUhJBsChxMdqST9VmFBYYKxLQoToOfqW1wjW7khjk,64
127
- lsst_pipe_base-30.0.1rc1.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
128
- lsst_pipe_base-30.0.1rc1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
129
- lsst_pipe_base-30.0.1rc1.dist-info/RECORD,,