lsst-ctrl-mpexec 29.2025.3100__py3-none-any.whl → 29.2025.3300__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,13 +31,9 @@ from __future__ import annotations
31
31
 
32
32
  __all__ = ["CmdLineFwk"]
33
33
 
34
- import atexit
35
- import contextlib
36
- import copy
37
34
  import logging
38
35
  import pickle
39
- import shutil
40
- from collections.abc import Mapping, Sequence
36
+ from collections.abc import Mapping
41
37
  from types import SimpleNamespace
42
38
 
43
39
  import astropy.units as u
@@ -46,7 +42,6 @@ from astropy.table import Table
46
42
  import lsst.utils.timer
47
43
  from lsst.daf.butler import (
48
44
  Butler,
49
- CollectionType,
50
45
  Config,
51
46
  DatasetType,
52
47
  DimensionConfig,
@@ -54,27 +49,13 @@ from lsst.daf.butler import (
54
49
  LimitedButler,
55
50
  Quantum,
56
51
  QuantumBackedButler,
57
- Registry,
58
- )
59
- from lsst.daf.butler.datastore.cache_manager import DatastoreCacheManager
60
- from lsst.daf.butler.direct_butler import DirectButler
61
- from lsst.daf.butler.registry import MissingCollectionError, RegistryDefaults
62
- from lsst.daf.butler.registry.wildcards import CollectionWildcard
63
- from lsst.pipe.base import (
64
- ExecutionResources,
65
- Instrument,
66
- Pipeline,
67
- PipelineGraph,
68
- QuantumGraph,
69
- TaskFactory,
70
- buildExecutionButler,
71
52
  )
53
+ from lsst.pipe.base import ExecutionResources, QuantumGraph, TaskFactory
72
54
  from lsst.pipe.base.all_dimensions_quantum_graph_builder import AllDimensionsQuantumGraphBuilder
73
55
  from lsst.pipe.base.dot_tools import graph2dot
74
56
  from lsst.pipe.base.execution_graph_fixup import ExecutionGraphFixup
75
57
  from lsst.pipe.base.mermaid_tools import graph2mermaid
76
58
  from lsst.pipe.base.mp_graph_executor import MPGraphExecutor
77
- from lsst.pipe.base.pipeline_graph import NodeType
78
59
  from lsst.pipe.base.quantum_reports import Report
79
60
  from lsst.pipe.base.single_quantum_executor import SingleQuantumExecutor
80
61
  from lsst.resources import ResourcePath
@@ -83,425 +64,12 @@ from lsst.utils.logging import VERBOSE, getLogger
83
64
  from lsst.utils.threads import disable_implicit_threading
84
65
 
85
66
  from ._pipeline_graph_factory import PipelineGraphFactory
67
+ from .cli.butler_factory import ButlerFactory
86
68
  from .preExecInit import PreExecInit, PreExecInitLimited
87
69
 
88
- # ----------------------------------
89
- # Local non-exported definitions --
90
- # ----------------------------------
91
-
92
70
  _LOG = getLogger(__name__)
93
71
 
94
72
 
95
- class _OutputChainedCollectionInfo:
96
- """A helper class for handling command-line arguments related to an output
97
- `~lsst.daf.butler.CollectionType.CHAINED` collection.
98
-
99
- Parameters
100
- ----------
101
- registry : `lsst.daf.butler.Registry`
102
- Butler registry that collections will be added to and/or queried from.
103
- name : `str`
104
- Name of the collection given on the command line.
105
- """
106
-
107
- def __init__(self, registry: Registry, name: str):
108
- self.name = name
109
- try:
110
- self.chain = tuple(registry.getCollectionChain(name))
111
- self.exists = True
112
- except MissingCollectionError:
113
- self.chain = ()
114
- self.exists = False
115
-
116
- def __str__(self) -> str:
117
- return self.name
118
-
119
- name: str
120
- """Name of the collection provided on the command line (`str`).
121
- """
122
-
123
- exists: bool
124
- """Whether this collection already exists in the registry (`bool`).
125
- """
126
-
127
- chain: tuple[str, ...]
128
- """The definition of the collection, if it already exists (`tuple`[`str`]).
129
-
130
- Empty if the collection does not already exist.
131
- """
132
-
133
-
134
- class _OutputRunCollectionInfo:
135
- """A helper class for handling command-line arguments related to an output
136
- `~lsst.daf.butler.CollectionType.RUN` collection.
137
-
138
- Parameters
139
- ----------
140
- registry : `lsst.daf.butler.Registry`
141
- Butler registry that collections will be added to and/or queried from.
142
- name : `str`
143
- Name of the collection given on the command line.
144
- """
145
-
146
- def __init__(self, registry: Registry, name: str):
147
- self.name = name
148
- try:
149
- actualType = registry.getCollectionType(name)
150
- if actualType is not CollectionType.RUN:
151
- raise TypeError(f"Collection '{name}' exists but has type {actualType.name}, not RUN.")
152
- self.exists = True
153
- except MissingCollectionError:
154
- self.exists = False
155
-
156
- name: str
157
- """Name of the collection provided on the command line (`str`).
158
- """
159
-
160
- exists: bool
161
- """Whether this collection already exists in the registry (`bool`).
162
- """
163
-
164
-
165
- class _ButlerFactory:
166
- """A helper class for processing command-line arguments related to input
167
- and output collections.
168
-
169
- Parameters
170
- ----------
171
- registry : `lsst.daf.butler.Registry`
172
- Butler registry that collections will be added to and/or queried from.
173
-
174
- args : `types.SimpleNamespace`
175
- Parsed command-line arguments. The following attributes are used,
176
- either at construction or in later methods.
177
-
178
- ``output``
179
- The name of a `~lsst.daf.butler.CollectionType.CHAINED`
180
- input/output collection.
181
-
182
- ``output_run``
183
- The name of a `~lsst.daf.butler.CollectionType.RUN` input/output
184
- collection.
185
-
186
- ``extend_run``
187
- A boolean indicating whether ``output_run`` should already exist
188
- and be extended.
189
-
190
- ``replace_run``
191
- A boolean indicating that (if `True`) ``output_run`` should already
192
- exist but will be removed from the output chained collection and
193
- replaced with a new one.
194
-
195
- ``prune_replaced``
196
- A boolean indicating whether to prune the replaced run (requires
197
- ``replace_run``).
198
-
199
- ``rebase``
200
- A boolean indicating whether to force the ``output`` collection
201
- to be consistent with ``inputs`` and ``output`` run such that the
202
- ``output`` collection has output run collections first (i.e. those
203
- that start with the same prefix), then the new inputs, then any
204
- original inputs not included in the new inputs.
205
-
206
- ``inputs``
207
- Input collections of any type; see
208
- :ref:`daf_butler_ordered_collection_searches` for details.
209
-
210
- ``butler_config``
211
- Path to a data repository root or configuration file.
212
-
213
- writeable : `bool`
214
- If `True`, a `~lsst.daf.butler.Butler` is being initialized in a
215
- context where actual writes should happens, and hence no output run
216
- is necessary.
217
-
218
- Raises
219
- ------
220
- ValueError
221
- Raised if ``writeable is True`` but there are no output collections.
222
- """
223
-
224
- def __init__(self, registry: Registry, args: SimpleNamespace, writeable: bool):
225
- if args.output is not None:
226
- self.output = _OutputChainedCollectionInfo(registry, args.output)
227
- else:
228
- self.output = None
229
- if args.output_run is not None:
230
- if args.rebase and self.output and not args.output_run.startswith(self.output.name):
231
- raise ValueError("Cannot rebase if output run does not start with output collection name.")
232
- self.outputRun = _OutputRunCollectionInfo(registry, args.output_run)
233
- elif self.output is not None:
234
- if args.extend_run:
235
- if not self.output.chain:
236
- raise ValueError("Cannot use --extend-run option with non-existing or empty output chain")
237
- runName = self.output.chain[0]
238
- else:
239
- runName = f"{self.output}/{Instrument.makeCollectionTimestamp()}"
240
- self.outputRun = _OutputRunCollectionInfo(registry, runName)
241
- elif not writeable:
242
- # If we're not writing yet, ok to have no output run.
243
- self.outputRun = None
244
- else:
245
- raise ValueError("Cannot write without at least one of (--output, --output-run).")
246
- # Recursively flatten any input CHAINED collections. We do this up
247
- # front so we can tell if the user passes the same inputs on subsequent
248
- # calls, even though we also flatten when we define the output CHAINED
249
- # collection.
250
- self.inputs = tuple(registry.queryCollections(args.input, flattenChains=True)) if args.input else ()
251
-
252
- # If things are inconsistent and user has asked for a rebase then
253
- # construct the new output chain.
254
- if args.rebase and self._checkOutputInputConsistency():
255
- assert self.output is not None
256
- newOutputChain = [item for item in self.output.chain if item.startswith(self.output.name)]
257
- newOutputChain.extend([item for item in self.inputs if item not in newOutputChain])
258
- newOutputChain.extend([item for item in self.output.chain if item not in newOutputChain])
259
- self.output.chain = tuple(newOutputChain)
260
-
261
- def check(self, args: SimpleNamespace) -> None:
262
- """Check command-line options for consistency with each other and the
263
- data repository.
264
-
265
- Parameters
266
- ----------
267
- args : `types.SimpleNamespace`
268
- Parsed command-line arguments. See class documentation for the
269
- construction parameter of the same name.
270
- """
271
- assert not (args.extend_run and args.replace_run), "In mutually-exclusive group in ArgumentParser."
272
- if consistencyError := self._checkOutputInputConsistency():
273
- raise ValueError(consistencyError)
274
-
275
- if args.extend_run:
276
- if self.outputRun is None:
277
- raise ValueError("Cannot --extend-run when no output collection is given.")
278
- elif not self.outputRun.exists:
279
- raise ValueError(
280
- f"Cannot --extend-run; output collection '{self.outputRun.name}' does not exist."
281
- )
282
- if not args.extend_run and self.outputRun is not None and self.outputRun.exists:
283
- raise ValueError(
284
- f"Output run '{self.outputRun.name}' already exists, but --extend-run was not given."
285
- )
286
- if args.prune_replaced and not args.replace_run:
287
- raise ValueError("--prune-replaced requires --replace-run.")
288
- if args.replace_run and (self.output is None or not self.output.exists):
289
- raise ValueError("--output must point to an existing CHAINED collection for --replace-run.")
290
-
291
- def _checkOutputInputConsistency(self) -> str | None:
292
- if self.inputs and self.output is not None and self.output.exists:
293
- # Passing the same inputs that were used to initialize the output
294
- # collection is allowed; this means the inputs must appear as a
295
- # contiguous subsequence of outputs (normally they're also at the
296
- # end, but --rebase will in general put them in the middle).
297
- for n in reversed(range(1 + len(self.output.chain) - len(self.inputs))):
298
- if self.inputs == self.output.chain[n : n + len(self.inputs)]:
299
- return None
300
- return (
301
- f"Output CHAINED collection {self.output.name!r} exists and does not include the "
302
- f"same sequence of (flattened) input collections {self.inputs} as a contiguous "
303
- "subsequence. "
304
- "Use --rebase to ignore this problem and reset the output collection, but note that "
305
- "this may obfuscate what inputs were actually used to produce these outputs."
306
- )
307
- return None
308
-
309
- @classmethod
310
- def _makeReadParts(cls, args: SimpleNamespace) -> tuple[Butler, Sequence[str], _ButlerFactory]:
311
- """Parse arguments to support implementations of `makeReadButler` and
312
- `makeButlerAndCollections`.
313
-
314
- Parameters
315
- ----------
316
- args : `types.SimpleNamespace`
317
- Parsed command-line arguments. See class documentation for the
318
- construction parameter of the same name.
319
-
320
- Returns
321
- -------
322
- butler : `lsst.daf.butler.Butler`
323
- A read-only butler constructed from the repo at
324
- ``args.butler_config``, but with no default collections.
325
- inputs : `~collections.abc.Sequence` [ `str` ]
326
- A collection search path constructed according to ``args``.
327
- self : `_ButlerFactory`
328
- A new `_ButlerFactory` instance representing the processed version
329
- of ``args``.
330
- """
331
- butler = Butler.from_config(args.butler_config, writeable=False)
332
- self = cls(butler.registry, args, writeable=False)
333
- self.check(args)
334
- if self.output and self.output.exists:
335
- if args.replace_run:
336
- replaced = self.output.chain[0]
337
- inputs = list(self.output.chain[1:])
338
- _LOG.debug(
339
- "Simulating collection search in '%s' after removing '%s'.", self.output.name, replaced
340
- )
341
- else:
342
- inputs = [self.output.name]
343
- else:
344
- inputs = list(self.inputs)
345
- if args.extend_run:
346
- assert self.outputRun is not None, "Output collection has to be specified."
347
- inputs.insert(0, self.outputRun.name)
348
- collSearch = CollectionWildcard.from_expression(inputs).require_ordered()
349
- return butler, collSearch, self
350
-
351
- @classmethod
352
- def makeReadButler(cls, args: SimpleNamespace) -> Butler:
353
- """Construct a read-only butler according to the given command-line
354
- arguments.
355
-
356
- Parameters
357
- ----------
358
- args : `types.SimpleNamespace`
359
- Parsed command-line arguments. See class documentation for the
360
- construction parameter of the same name.
361
-
362
- Returns
363
- -------
364
- butler : `lsst.daf.butler.Butler`
365
- A read-only butler initialized with the collections specified by
366
- ``args``.
367
- """
368
- cls.defineDatastoreCache() # Ensure that this butler can use a shared cache.
369
- butler, inputs, _ = cls._makeReadParts(args)
370
- _LOG.debug("Preparing butler to read from %s.", inputs)
371
- return Butler.from_config(butler=butler, collections=inputs)
372
-
373
- @classmethod
374
- def makeButlerAndCollections(cls, args: SimpleNamespace) -> tuple[Butler, Sequence[str], str | None]:
375
- """Return a read-only registry, a collection search path, and the name
376
- of the run to be used for future writes.
377
-
378
- Parameters
379
- ----------
380
- args : `types.SimpleNamespace`
381
- Parsed command-line arguments. See class documentation for the
382
- construction parameter of the same name.
383
-
384
- Returns
385
- -------
386
- butler : `lsst.daf.butler.Butler`
387
- A read-only butler that collections will be added to and/or queried
388
- from.
389
- inputs : `Sequence` [ `str` ]
390
- Collections to search for datasets.
391
- run : `str` or `None`
392
- Name of the output `~lsst.daf.butler.CollectionType.RUN` collection
393
- if it already exists, or `None` if it does not.
394
- """
395
- butler, inputs, self = cls._makeReadParts(args)
396
- run: str | None = None
397
- if args.extend_run:
398
- assert self.outputRun is not None, "Output collection has to be specified."
399
- if self.outputRun is not None:
400
- run = self.outputRun.name
401
- _LOG.debug("Preparing registry to read from %s and expect future writes to '%s'.", inputs, run)
402
- return butler, inputs, run
403
-
404
- @staticmethod
405
- def defineDatastoreCache() -> None:
406
- """Define where datastore cache directories should be found.
407
-
408
- Notes
409
- -----
410
- All the jobs should share a datastore cache if applicable. This
411
- method asks for a shared fallback cache to be defined and then
412
- configures an exit handler to clean it up.
413
- """
414
- defined, cache_dir = DatastoreCacheManager.set_fallback_cache_directory_if_unset()
415
- if defined:
416
- atexit.register(shutil.rmtree, cache_dir, ignore_errors=True)
417
- _LOG.debug("Defining shared datastore cache directory to %s", cache_dir)
418
-
419
- @classmethod
420
- def makeWriteButler(cls, args: SimpleNamespace, pipeline_graph: PipelineGraph | None = None) -> Butler:
421
- """Return a read-write butler initialized to write to and read from
422
- the collections specified by the given command-line arguments.
423
-
424
- Parameters
425
- ----------
426
- args : `types.SimpleNamespace`
427
- Parsed command-line arguments. See class documentation for the
428
- construction parameter of the same name.
429
- pipeline_graph : `lsst.pipe.base.PipelineGraph`, optional
430
- Definitions for tasks in a pipeline. This argument is only needed
431
- if ``args.replace_run`` is `True` and ``args.prune_replaced`` is
432
- "unstore".
433
-
434
- Returns
435
- -------
436
- butler : `lsst.daf.butler.Butler`
437
- A read-write butler initialized according to the given arguments.
438
- """
439
- cls.defineDatastoreCache() # Ensure that this butler can use a shared cache.
440
- butler = Butler.from_config(args.butler_config, writeable=True)
441
- self = cls(butler.registry, args, writeable=True)
442
- self.check(args)
443
- assert self.outputRun is not None, "Output collection has to be specified." # for mypy
444
- if self.output is not None:
445
- chainDefinition = list(self.output.chain if self.output.exists else self.inputs)
446
- if args.replace_run:
447
- replaced = chainDefinition.pop(0)
448
- if args.prune_replaced == "unstore":
449
- # Remove datasets from datastore
450
- with butler.transaction():
451
- # we want to remove regular outputs from this pipeline,
452
- # but keep initOutputs, configs, and versions.
453
- if pipeline_graph is not None:
454
- refs = [
455
- ref
456
- for ref in butler.registry.queryDatasets(..., collections=replaced)
457
- if (
458
- (producer := pipeline_graph.producer_of(ref.datasetType.name)) is not None
459
- and producer.key.node_type is NodeType.TASK # i.e. not TASK_INIT
460
- )
461
- ]
462
- butler.pruneDatasets(refs, unstore=True, disassociate=False)
463
- elif args.prune_replaced == "purge":
464
- # Erase entire collection and all datasets, need to remove
465
- # collection from its chain collection first.
466
- with butler.transaction():
467
- butler.registry.setCollectionChain(self.output.name, chainDefinition, flatten=True)
468
- butler.removeRuns([replaced], unstore=True)
469
- elif args.prune_replaced is not None:
470
- raise NotImplementedError(f"Unsupported --prune-replaced option '{args.prune_replaced}'.")
471
- if not self.output.exists:
472
- butler.registry.registerCollection(self.output.name, CollectionType.CHAINED)
473
- if not args.extend_run:
474
- butler.registry.registerCollection(self.outputRun.name, CollectionType.RUN)
475
- chainDefinition.insert(0, self.outputRun.name)
476
- butler.registry.setCollectionChain(self.output.name, chainDefinition, flatten=True)
477
- _LOG.debug(
478
- "Preparing butler to write to '%s' and read from '%s'=%s",
479
- self.outputRun.name,
480
- self.output.name,
481
- chainDefinition,
482
- )
483
- butler.registry.defaults = RegistryDefaults(run=self.outputRun.name, collections=self.output.name)
484
- else:
485
- inputs = (self.outputRun.name,) + self.inputs
486
- _LOG.debug("Preparing butler to write to '%s' and read from %s.", self.outputRun.name, inputs)
487
- butler.registry.defaults = RegistryDefaults(run=self.outputRun.name, collections=inputs)
488
- return butler
489
-
490
- output: _OutputChainedCollectionInfo | None
491
- """Information about the output chained collection, if there is or will be
492
- one (`_OutputChainedCollectionInfo` or `None`).
493
- """
494
-
495
- outputRun: _OutputRunCollectionInfo | None
496
- """Information about the output run collection, if there is or will be
497
- one (`_OutputRunCollectionInfo` or `None`).
498
- """
499
-
500
- inputs: tuple[str, ...]
501
- """Input collections provided directly by the user (`tuple` [ `str` ]).
502
- """
503
-
504
-
505
73
  class _QBBFactory:
506
74
  """Class which is a callable for making QBB instances.
507
75
 
@@ -555,11 +123,6 @@ class _QBBFactory:
555
123
  return (self._unpickle, (self.butler_config, dimension_config, dataset_types_pickle))
556
124
 
557
125
 
558
- # ------------------------
559
- # Exported definitions --
560
- # ------------------------
561
-
562
-
563
126
  class CmdLineFwk:
564
127
  """PipelineTask framework which executes tasks from command line.
565
128
 
@@ -569,54 +132,6 @@ class CmdLineFwk:
569
132
 
570
133
  MP_TIMEOUT = 3600 * 24 * 30 # Default timeout (sec) for multiprocessing
571
134
 
572
- def __init__(self) -> None:
573
- pass
574
-
575
- def makePipeline(self, args: SimpleNamespace) -> Pipeline:
576
- """Build a pipeline from command line arguments.
577
-
578
- Parameters
579
- ----------
580
- args : `types.SimpleNamespace`
581
- Parsed command line.
582
-
583
- Returns
584
- -------
585
- pipeline : `~lsst.pipe.base.Pipeline`
586
- Newly-constructed pipeline.
587
- """
588
- if args.pipeline:
589
- pipeline = Pipeline.from_uri(args.pipeline)
590
- else:
591
- pipeline = Pipeline("anonymous")
592
-
593
- # loop over all pipeline actions and apply them in order
594
- for action in args.pipeline_actions:
595
- if action.action == "add_instrument":
596
- pipeline.addInstrument(action.value)
597
-
598
- elif action.action == "new_task":
599
- pipeline.addTask(action.value, action.label)
600
-
601
- elif action.action == "delete_task":
602
- pipeline.removeTask(action.label)
603
-
604
- elif action.action == "config":
605
- # action value string is "field=value", split it at '='
606
- field, _, value = action.value.partition("=")
607
- pipeline.addConfigOverride(action.label, field, value)
608
-
609
- elif action.action == "configfile":
610
- pipeline.addConfigFile(action.label, action.value)
611
-
612
- else:
613
- raise ValueError(f"Unexpected pipeline action: {action.action}")
614
-
615
- if args.save_pipeline:
616
- pipeline.write_to_uri(args.save_pipeline)
617
-
618
- return pipeline
619
-
620
135
  def makeGraph(
621
136
  self, pipeline_graph_factory: PipelineGraphFactory | None, args: SimpleNamespace
622
137
  ) -> QuantumGraph | None:
@@ -639,7 +154,7 @@ class CmdLineFwk:
639
154
  if args.extend_run:
640
155
  args.skip_existing = True
641
156
 
642
- butler, collections, run = _ButlerFactory.makeButlerAndCollections(args)
157
+ butler, collections, run = ButlerFactory.make_butler_and_collections(args)
643
158
 
644
159
  if args.skip_existing and run:
645
160
  args.skip_existing_in += (run,)
@@ -717,12 +232,6 @@ class CmdLineFwk:
717
232
  _LOG.verbose("Writing QuantumGraph to %r.", args.save_qgraph)
718
233
  qgraph.saveUri(args.save_qgraph)
719
234
 
720
- if args.save_single_quanta:
721
- for quantumNode in qgraph:
722
- sqgraph = qgraph.subset(quantumNode)
723
- uri = args.save_single_quanta.format(quantumNode)
724
- sqgraph.saveUri(uri)
725
-
726
235
  if args.qgraph_dot:
727
236
  _LOG.verbose("Writing quantum graph DOT visualization to %r.", args.qgraph_dot)
728
237
  graph2dot(qgraph, args.qgraph_dot)
@@ -731,41 +240,6 @@ class CmdLineFwk:
731
240
  _LOG.verbose("Writing quantum graph Mermaid visualization to %r.", args.qgraph_mermaid)
732
241
  graph2mermaid(qgraph, args.qgraph_mermaid)
733
242
 
734
- if args.execution_butler_location:
735
- _LOG.verbose("Writing execution butler to %r.", args.execution_butler_location)
736
- butler = Butler.from_config(args.butler_config)
737
- assert isinstance(butler, DirectButler), "Execution butler needs DirectButler"
738
- newArgs = copy.deepcopy(args)
739
-
740
- def builderShim(butler: Butler) -> Butler:
741
- assert isinstance(butler, DirectButler), "Execution butler needs DirectButler"
742
- newArgs.butler_config = butler._config
743
- # Calling makeWriteButler is done for the side effects of
744
- # calling that method, maining parsing all the args into
745
- # collection names, creating collections, etc.
746
- newButler = _ButlerFactory.makeWriteButler(newArgs)
747
- return newButler
748
-
749
- # Include output collection in collections for input
750
- # files if it exists in the repo.
751
- all_inputs = args.input
752
- if args.output is not None:
753
- with contextlib.suppress(MissingCollectionError):
754
- all_inputs += (next(iter(butler.registry.queryCollections(args.output))),)
755
-
756
- _LOG.debug("Calling buildExecutionButler with collections=%s", all_inputs)
757
- buildExecutionButler(
758
- butler,
759
- qgraph,
760
- args.execution_butler_location,
761
- run,
762
- butlerModifier=builderShim,
763
- collections=all_inputs,
764
- clobber=args.clobber_execution_butler,
765
- datastoreRoot=args.target_datastore_root,
766
- transfer=args.transfer,
767
- )
768
-
769
243
  return qgraph
770
244
 
771
245
  def _make_execution_resources(self, args: SimpleNamespace) -> ExecutionResources:
@@ -830,7 +304,7 @@ class CmdLineFwk:
830
304
  # Make butler instance. QuantumGraph should have an output run defined,
831
305
  # but we ignore it here and let command line decide actual output run.
832
306
  if butler is None:
833
- butler = _ButlerFactory.makeWriteButler(args, graph.pipeline_graph)
307
+ butler = ButlerFactory.make_write_butler(args, graph.pipeline_graph)
834
308
 
835
309
  if args.skip_existing:
836
310
  args.skip_existing_in += (butler.run,)
@@ -984,7 +458,7 @@ class CmdLineFwk:
984
458
  qgraph = QuantumGraph.loadUri(args.qgraph, graphID=args.qgraph_id)
985
459
 
986
460
  # Ensure that QBB uses shared datastore cache for writes.
987
- _ButlerFactory.defineDatastoreCache()
461
+ ButlerFactory.define_datastore_cache()
988
462
 
989
463
  # Make QBB.
990
464
  _LOG.verbose("Initializing quantum-backed butler.")
@@ -1016,7 +490,7 @@ class CmdLineFwk:
1016
490
  dataset_types = {dstype.name: dstype for dstype in qgraph.registryDatasetTypes()}
1017
491
 
1018
492
  # Ensure that QBB uses shared datastore cache.
1019
- _ButlerFactory.defineDatastoreCache()
493
+ ButlerFactory.define_datastore_cache()
1020
494
 
1021
495
  _butler_factory = _QBBFactory(
1022
496
  butler_config=args.butler_config,
@@ -46,7 +46,7 @@ from lsst.pipe.base.pipeline_graph import visualization
46
46
 
47
47
  from . import util
48
48
  from ._pipeline_graph_factory import PipelineGraphFactory
49
- from .cmdLineFwk import _ButlerFactory
49
+ from .cli.butler_factory import ButlerFactory
50
50
 
51
51
 
52
52
  class _FilteredStream:
@@ -385,7 +385,7 @@ class ShowInfo:
385
385
  for compName, compUri in components.items():
386
386
  print(f" {compName}: {compUri}", file=self.stream)
387
387
 
388
- butler = _ButlerFactory.makeReadButler(args)
388
+ butler = ButlerFactory.make_read_butler(args)
389
389
  for node in graph:
390
390
  print(f"Quantum {node.nodeId}: {node.taskDef.taskName}", file=self.stream)
391
391
  print(" inputs:", file=self.stream)
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.3100"
2
+ __version__ = "29.2025.3300"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-ctrl-mpexec
3
- Version: 29.2025.3100
3
+ Version: 29.2025.3300
4
4
  Summary: Pipeline execution infrastructure for the Rubin Observatory LSST Science Pipelines.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License