lsst-daf-butler 30.0.1rc1__py3-none-any.whl → 30.2025.5100__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. lsst/daf/butler/_butler.py +8 -27
  2. lsst/daf/butler/_butler_collections.py +4 -4
  3. lsst/daf/butler/_butler_metrics.py +2 -51
  4. lsst/daf/butler/_dataset_provenance.py +1 -1
  5. lsst/daf/butler/_dataset_ref.py +1 -1
  6. lsst/daf/butler/_exceptions.py +2 -2
  7. lsst/daf/butler/_file_dataset.py +1 -2
  8. lsst/daf/butler/_formatter.py +9 -14
  9. lsst/daf/butler/_labeled_butler_factory.py +8 -28
  10. lsst/daf/butler/_query_all_datasets.py +0 -2
  11. lsst/daf/butler/cli/cmd/_remove_runs.py +12 -1
  12. lsst/daf/butler/column_spec.py +4 -4
  13. lsst/daf/butler/configs/datastores/formatters.yaml +0 -1
  14. lsst/daf/butler/configs/storageClasses.yaml +0 -15
  15. lsst/daf/butler/datastore/_datastore.py +1 -21
  16. lsst/daf/butler/datastore/stored_file_info.py +2 -2
  17. lsst/daf/butler/datastores/chainedDatastore.py +0 -4
  18. lsst/daf/butler/datastores/fileDatastore.py +1 -11
  19. lsst/daf/butler/datastores/file_datastore/get.py +4 -4
  20. lsst/daf/butler/datastores/file_datastore/retrieve_artifacts.py +1 -5
  21. lsst/daf/butler/datastores/file_datastore/transfer.py +2 -2
  22. lsst/daf/butler/datastores/inMemoryDatastore.py +0 -8
  23. lsst/daf/butler/ddl.py +2 -2
  24. lsst/daf/butler/dimensions/_coordinate.py +8 -11
  25. lsst/daf/butler/dimensions/_record_set.py +1 -1
  26. lsst/daf/butler/dimensions/_records.py +3 -9
  27. lsst/daf/butler/direct_butler/_direct_butler.py +51 -85
  28. lsst/daf/butler/direct_query_driver/_driver.py +4 -5
  29. lsst/daf/butler/direct_query_driver/_result_page_converter.py +1 -1
  30. lsst/daf/butler/formatters/parquet.py +6 -6
  31. lsst/daf/butler/logging.py +3 -9
  32. lsst/daf/butler/nonempty_mapping.py +1 -1
  33. lsst/daf/butler/persistence_context.py +5 -8
  34. lsst/daf/butler/queries/_general_query_results.py +1 -1
  35. lsst/daf/butler/queries/driver.py +1 -1
  36. lsst/daf/butler/queries/expression_factory.py +2 -2
  37. lsst/daf/butler/queries/expressions/parser/exprTree.py +1 -1
  38. lsst/daf/butler/queries/expressions/parser/parserYacc.py +1 -1
  39. lsst/daf/butler/queries/overlaps.py +2 -2
  40. lsst/daf/butler/queries/tree/_column_set.py +1 -1
  41. lsst/daf/butler/registry/_collection_record_cache.py +1 -1
  42. lsst/daf/butler/registry/_collection_summary_cache.py +4 -5
  43. lsst/daf/butler/registry/_registry.py +0 -4
  44. lsst/daf/butler/registry/databases/postgresql.py +1 -2
  45. lsst/daf/butler/registry/datasets/byDimensions/_dataset_type_cache.py +1 -1
  46. lsst/daf/butler/registry/datasets/byDimensions/_manager.py +2 -4
  47. lsst/daf/butler/registry/datasets/byDimensions/summaries.py +2 -3
  48. lsst/daf/butler/registry/interfaces/_datasets.py +1 -2
  49. lsst/daf/butler/registry/interfaces/_obscore.py +1 -1
  50. lsst/daf/butler/registry/obscore/_records.py +1 -1
  51. lsst/daf/butler/registry/obscore/_spatial.py +2 -2
  52. lsst/daf/butler/registry/queries/_results.py +2 -2
  53. lsst/daf/butler/registry/sql_registry.py +25 -3
  54. lsst/daf/butler/registry/wildcards.py +5 -5
  55. lsst/daf/butler/remote_butler/_get.py +1 -1
  56. lsst/daf/butler/remote_butler/_remote_butler.py +1 -6
  57. lsst/daf/butler/remote_butler/_remote_file_transfer_source.py +0 -4
  58. lsst/daf/butler/remote_butler/authentication/cadc.py +3 -4
  59. lsst/daf/butler/script/_pruneDatasets.py +2 -4
  60. lsst/daf/butler/script/configValidate.py +2 -2
  61. lsst/daf/butler/script/queryCollections.py +2 -2
  62. lsst/daf/butler/script/removeCollections.py +0 -2
  63. lsst/daf/butler/script/removeRuns.py +0 -2
  64. lsst/daf/butler/tests/cliCmdTestBase.py +0 -2
  65. lsst/daf/butler/tests/cliLogTestBase.py +0 -2
  66. lsst/daf/butler/tests/hybrid_butler.py +2 -10
  67. lsst/daf/butler/tests/registry_data/spatial.py +2 -4
  68. lsst/daf/butler/tests/utils.py +1 -1
  69. lsst/daf/butler/timespan_database_representation.py +3 -3
  70. lsst/daf/butler/transfers/_context.py +6 -7
  71. lsst/daf/butler/version.py +1 -1
  72. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/METADATA +2 -3
  73. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/RECORD +81 -84
  74. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/WHEEL +1 -1
  75. lsst/daf/butler/_rubin/temporary_for_ingest.py +0 -207
  76. lsst/daf/butler/registry/expand_data_ids.py +0 -93
  77. lsst/daf/butler/tests/registry_data/lsstcam-subset.yaml +0 -191
  78. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/entry_points.txt +0 -0
  79. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/licenses/COPYRIGHT +0 -0
  80. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/licenses/LICENSE +0 -0
  81. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/licenses/bsd_license.txt +0 -0
  82. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/licenses/gpl-v3.0.txt +0 -0
  83. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/top_level.txt +0 -0
  84. {lsst_daf_butler-30.0.1rc1.dist-info → lsst_daf_butler-30.2025.5100.dist-info}/zip-safe +0 -0
@@ -138,10 +138,7 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
138
138
  without_datastore : `bool`, optional
139
139
  If `True` do not attach a datastore to this butler. Any attempts
140
140
  to use a datastore will fail.
141
- metrics : `ButlerMetrics` or `None`
142
- External metrics object to be used for tracking butler usage. If `None`
143
- a new metrics object is created.
144
- **kwargs : `typing.Any`
141
+ **kwargs : `Any`
145
142
  Additional keyword arguments passed to a constructor of actual butler
146
143
  class.
147
144
 
@@ -243,7 +240,7 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
243
240
  to use a datastore will fail.
244
241
  metrics : `ButlerMetrics` or `None`, optional
245
242
  Metrics object to record butler usage statistics.
246
- **kwargs : `typing.Any`
243
+ **kwargs : `Any`
247
244
  Default data ID key-value pairs. These may only identify
248
245
  "governor" dimensions like ``instrument`` and ``skymap``.
249
246
 
@@ -1393,10 +1390,6 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
1393
1390
  raised if any datasets with the same dataset ID already exist
1394
1391
  in the datastore.
1395
1392
 
1396
- Returns
1397
- -------
1398
- None
1399
-
1400
1393
  Raises
1401
1394
  ------
1402
1395
  TypeError
@@ -1436,7 +1429,6 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
1436
1429
  *,
1437
1430
  transfer_dimensions: bool = False,
1438
1431
  dry_run: bool = False,
1439
- skip_existing: bool = False,
1440
1432
  ) -> None:
1441
1433
  """Ingest a Zip file into this butler.
1442
1434
 
@@ -1455,14 +1447,6 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
1455
1447
  If `True` the ingest will be processed without any modifications
1456
1448
  made to the target butler and as if the target butler did not
1457
1449
  have any of the datasets.
1458
- skip_existing : `bool`, optional
1459
- If `True`, a zip will not be ingested if the dataset entries listed
1460
- in the index with the same dataset ID already exists in the butler.
1461
- If `False` (the default), a `ConflictingDefinitionError` will be
1462
- raised if any datasets with the same dataset ID already exist
1463
- in the repository. If, somehow, some datasets are known to the
1464
- butler and some are not, this is currently treated as an error
1465
- rather than attempting to do a partial ingest.
1466
1450
 
1467
1451
  Notes
1468
1452
  -----
@@ -1582,7 +1566,7 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
1582
1566
 
1583
1567
  @abstractmethod
1584
1568
  def transfer_dimension_records_from(
1585
- self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef | DataCoordinate]
1569
+ self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef]
1586
1570
  ) -> None:
1587
1571
  """Transfer dimension records to this Butler from another Butler.
1588
1572
 
@@ -1594,9 +1578,10 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
1594
1578
  `Butler` whose registry will be used to expand data IDs. If the
1595
1579
  source refs contain coordinates that are used to populate other
1596
1580
  records then this will also need to be a full `Butler`.
1597
- source_refs : iterable of `DatasetRef` or `DataCoordinate`
1598
- Datasets or data IDs defined in the source butler whose dimension
1599
- records should be transferred to this butler.
1581
+ source_refs : iterable of `DatasetRef`
1582
+ Datasets defined in the source butler whose dimension records
1583
+ should be transferred to this butler. In most circumstances.
1584
+ transfer is faster if the dataset refs are expanded.
1600
1585
  """
1601
1586
  raise NotImplementedError()
1602
1587
 
@@ -2040,7 +2025,7 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
2040
2025
 
2041
2026
  Returns
2042
2027
  -------
2043
- records : `list` [`DimensionRecord`]
2028
+ records : `list`[`DimensionRecord`]
2044
2029
  Dimension records matching the given query parameters.
2045
2030
 
2046
2031
  Raises
@@ -2242,7 +2227,3 @@ class Butler(LimitedButler): # numpydoc ignore=PR02
2242
2227
  @abstractmethod
2243
2228
  def close(self) -> None:
2244
2229
  raise NotImplementedError()
2245
-
2246
- @abstractmethod
2247
- def _expand_data_ids(self, data_ids: Iterable[DataCoordinate]) -> list[DataCoordinate]:
2248
- raise NotImplementedError()
@@ -360,10 +360,10 @@ class ButlerCollections(ABC, Sequence):
360
360
  name : `str`
361
361
  The name of the collection of interest.
362
362
  include_parents : `bool`, optional
363
- If `True` any parents of this collection will be included.
363
+ If `True` any parents of this collection will be included.
364
364
  include_summary : `bool`, optional
365
- If `True` dataset type names and governor dimensions of datasets
366
- stored in this collection will be included in the result.
365
+ If `True` dataset type names and governor dimensions of datasets
366
+ stored in this collection will be included in the result.
367
367
 
368
368
  Returns
369
369
  -------
@@ -464,7 +464,7 @@ class ButlerCollections(ABC, Sequence):
464
464
 
465
465
  Returns
466
466
  -------
467
- filtered : `~collections.abc.Mapping` [`str`, `list` [`str`]]
467
+ filtered : `~collections.abc.Mapping` [`str`, `list`[`str`]]
468
468
  Mapping of the dataset type name to its corresponding list of
469
469
  collection names.
470
470
  """
@@ -27,19 +27,14 @@
27
27
 
28
28
  from __future__ import annotations
29
29
 
30
- __all__ = ["ButlerMetrics"]
31
-
32
30
  from collections.abc import Callable, Iterator
33
31
  from contextlib import contextmanager
34
- from typing import Concatenate, ParamSpec
35
32
 
36
33
  from pydantic import BaseModel
37
34
 
38
35
  from lsst.utils.logging import LsstLoggers
39
36
  from lsst.utils.timer import time_this
40
37
 
41
- P = ParamSpec("P")
42
-
43
38
 
44
39
  class ButlerMetrics(BaseModel):
45
40
  """Metrics collected during Butler operations."""
@@ -50,26 +45,18 @@ class ButlerMetrics(BaseModel):
50
45
  time_in_get: float = 0.0
51
46
  """Wall-clock time, in seconds, spent in get()."""
52
47
 
53
- time_in_ingest: float = 0.0
54
- """Wall-clock time, in seconds, spent in ingest()."""
55
-
56
48
  n_get: int = 0
57
49
  """Number of datasets retrieved with get()."""
58
50
 
59
51
  n_put: int = 0
60
52
  """Number of datasets stored with put()."""
61
53
 
62
- n_ingest: int = 0
63
- """Number of datasets ingested."""
64
-
65
54
  def reset(self) -> None:
66
55
  """Reset all metrics."""
67
56
  self.time_in_put = 0.0
68
57
  self.time_in_get = 0.0
69
- self.time_in_ingest = 0.0
70
58
  self.n_get = 0
71
59
  self.n_put = 0
72
- self.n_ingest = 0
73
60
 
74
61
  def increment_get(self, duration: float) -> None:
75
62
  """Increment time for get().
@@ -93,31 +80,13 @@ class ButlerMetrics(BaseModel):
93
80
  self.time_in_put += duration
94
81
  self.n_put += 1
95
82
 
96
- def increment_ingest(self, duration: float, n_datasets: int) -> None:
97
- """Increment time and datasets for ingest().
98
-
99
- Parameters
100
- ----------
101
- duration : `float`
102
- Duration to add to the ingest() statistics.
103
- n_datasets : `int`
104
- Number of datasets to be ingested for this call.
105
- """
106
- self.time_in_ingest += duration
107
- self.n_ingest += n_datasets
108
-
109
83
  @contextmanager
110
84
  def _timer(
111
- self,
112
- handler: Callable[Concatenate[float, P], None],
113
- log: LsstLoggers | None = None,
114
- msg: str | None = None,
115
- *args: P.args,
116
- **kwargs: P.kwargs,
85
+ self, handler: Callable[[float], None], log: LsstLoggers | None = None, msg: str | None = None
117
86
  ) -> Iterator[None]:
118
87
  with time_this(log=log, msg=msg) as timer:
119
88
  yield
120
- handler(timer.duration, *args, **kwargs)
89
+ handler(timer.duration)
121
90
 
122
91
  @contextmanager
123
92
  def instrument_get(self, log: LsstLoggers | None = None, msg: str | None = None) -> Iterator[None]:
@@ -146,21 +115,3 @@ class ButlerMetrics(BaseModel):
146
115
  """
147
116
  with self._timer(self.increment_put, log=log, msg=msg):
148
117
  yield
149
-
150
- @contextmanager
151
- def instrument_ingest(
152
- self, n_datasets: int, log: LsstLoggers | None = None, msg: str | None = None
153
- ) -> Iterator[None]:
154
- """Run code and increment ingest statistics.
155
-
156
- Parameters
157
- ----------
158
- n_datasets : `int`
159
- Number of datasets being ingested.
160
- log : `logging.Logger` or `None`
161
- Logger to use for any timing information.
162
- msg : `str` or `None`
163
- Any message to be included in log output.
164
- """
165
- with self._timer(self.increment_ingest, n_datasets=n_datasets, log=log, msg=msg):
166
- yield
@@ -267,7 +267,7 @@ class DatasetProvenance(pydantic.BaseModel):
267
267
  use_upper : `bool` or `None`
268
268
  If `True` use upper case for provenance keys, if `False` use lower
269
269
  case, if `None` match the case of the prefix.
270
- *keys : `tuple` of `str` | `int`
270
+ keys : `tuple` of `str` | `int`
271
271
  Components of key to combine with prefix and separator.
272
272
 
273
273
  Returns
@@ -479,7 +479,7 @@ class DatasetRef:
479
479
 
480
480
  Parameters
481
481
  ----------
482
- simple : `dict` of [`str`, `typing.Any`]
482
+ simple : `dict` of [`str`, `Any`]
483
483
  The value returned by `to_simple()`.
484
484
  universe : `DimensionUniverse`
485
485
  The special graph of all known dimensions.
@@ -196,8 +196,8 @@ class ValidationError(RuntimeError):
196
196
 
197
197
 
198
198
  class EmptyQueryResultError(Exception):
199
- """Exception raised when query methods return an empty result and
200
- ``explain`` flag is set.
199
+ """Exception raised when query methods return an empty result and `explain`
200
+ flag is set.
201
201
 
202
202
  Parameters
203
203
  ----------
@@ -129,8 +129,7 @@ class FileDataset:
129
129
  ----------
130
130
  dataset : `SerializedFileDataset`
131
131
  Object to deserialize.
132
- dataset_type_loader : `~collections.abc.Callable` \
133
- [[ `str` ], `DatasetType` ]
132
+ dataset_type_loader : `Callable` [[ `str` ], `DatasetType` ]
134
133
  Function that takes a string dataset type name as its
135
134
  only parameter, and returns an instance of `DatasetType`.
136
135
  Used to deserialize the `DatasetRef` instances contained
@@ -54,7 +54,6 @@ from ._config import Config
54
54
  from ._config_support import LookupKey, processLookupConfigs
55
55
  from ._file_descriptor import FileDescriptor
56
56
  from ._location import Location
57
- from ._rubin.temporary_for_ingest import TemporaryForIngest
58
57
  from .dimensions import DataCoordinate, DimensionUniverse
59
58
  from .mapping_factory import MappingFactory
60
59
 
@@ -910,10 +909,6 @@ class FormatterV2:
910
909
  provenance : `DatasetProvenance` | `None`, optional
911
910
  Provenance to attach to the file being written.
912
911
 
913
- Returns
914
- -------
915
- None
916
-
917
912
  Raises
918
913
  ------
919
914
  FormatterNotImplementedError
@@ -1036,7 +1031,15 @@ class FormatterV2:
1036
1031
  """
1037
1032
  cache_manager = self._ensure_cache(cache_manager)
1038
1033
 
1039
- with TemporaryForIngest.make_path(uri) as temporary_uri:
1034
+ # Always write to a temporary even if
1035
+ # using a local file system -- that gives us atomic writes.
1036
+ # If a process is killed as the file is being written we do not
1037
+ # want it to remain in the correct place but in corrupt state.
1038
+ # For local files write to the output directory not temporary dir.
1039
+ prefix = uri.dirname() if uri.isLocal else None
1040
+ if prefix is not None:
1041
+ prefix.mkdir()
1042
+ with ResourcePath.temporary_uri(suffix=uri.getExtension(), prefix=prefix) as temporary_uri:
1040
1043
  # Need to configure the formatter to write to a different
1041
1044
  # location and that needs us to overwrite internals
1042
1045
  log.debug("Writing dataset to temporary location at %s", temporary_uri)
@@ -1141,10 +1144,6 @@ class FormatterV2:
1141
1144
  location : `Location`
1142
1145
  Location from which to extract a file extension.
1143
1146
 
1144
- Returns
1145
- -------
1146
- None
1147
-
1148
1147
  Raises
1149
1148
  ------
1150
1149
  ValueError
@@ -1591,10 +1590,6 @@ class Formatter(metaclass=ABCMeta):
1591
1590
  location : `Location`
1592
1591
  Location from which to extract a file extension.
1593
1592
 
1594
- Returns
1595
- -------
1596
- None
1597
-
1598
1593
  Raises
1599
1594
  ------
1600
1595
  NotImplementedError
@@ -30,9 +30,7 @@ from __future__ import annotations
30
30
  __all__ = ("LabeledButlerFactory", "LabeledButlerFactoryProtocol")
31
31
 
32
32
  from collections.abc import Mapping
33
- from contextlib import AbstractContextManager
34
- from logging import getLogger
35
- from typing import Any, Literal, Protocol, Self
33
+ from typing import Protocol
36
34
 
37
35
  from lsst.resources import ResourcePathExpression
38
36
 
@@ -42,8 +40,6 @@ from ._butler_repo_index import ButlerRepoIndex
42
40
  from ._utilities.named_locks import NamedLocks
43
41
  from ._utilities.thread_safe_cache import ThreadSafeCache
44
42
 
45
- _LOG = getLogger(__name__)
46
-
47
43
 
48
44
  class LabeledButlerFactoryProtocol(Protocol):
49
45
  """Callable to retrieve a butler from a label."""
@@ -51,7 +47,7 @@ class LabeledButlerFactoryProtocol(Protocol):
51
47
  def __call__(self, label: str) -> Butler: ...
52
48
 
53
49
 
54
- class LabeledButlerFactory(AbstractContextManager):
50
+ class LabeledButlerFactory:
55
51
  """Factory for efficiently instantiating Butler instances from the
56
52
  repository index file. This is intended for use from long-lived services
57
53
  that want to instantiate a separate Butler instance for each end user
@@ -64,9 +60,6 @@ class LabeledButlerFactory(AbstractContextManager):
64
60
  files. If not provided, defaults to the global repository index
65
61
  configured by the ``DAF_BUTLER_REPOSITORY_INDEX`` environment variable
66
62
  -- see `ButlerRepoIndex`.
67
- writeable : `bool`, optional
68
- If `True`, Butler instances created by this factory will be writeable.
69
- If `False` (the default), instances will be read-only.
70
63
 
71
64
  Notes
72
65
  -----
@@ -83,12 +76,11 @@ class LabeledButlerFactory(AbstractContextManager):
83
76
  safely be used by separate threads.
84
77
  """
85
78
 
86
- def __init__(self, repositories: Mapping[str, str] | None = None, writeable: bool = False) -> None:
79
+ def __init__(self, repositories: Mapping[str, str] | None = None) -> None:
87
80
  if repositories is None:
88
81
  self._repositories = None
89
82
  else:
90
83
  self._repositories = dict(repositories)
91
- self._writeable = writeable
92
84
 
93
85
  self._factories = ThreadSafeCache[str, _ButlerFactory]()
94
86
  self._initialization_locks = NamedLocks()
@@ -96,16 +88,6 @@ class LabeledButlerFactory(AbstractContextManager):
96
88
  # This may be overridden by unit tests.
97
89
  self._preload_unsafe_direct_butler_caches = True
98
90
 
99
- def __enter__(self) -> Self:
100
- return self
101
-
102
- def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Literal[False]:
103
- try:
104
- self.close()
105
- except Exception:
106
- _LOG.exception("An exception occurred during LabeledButlerFactory.close()")
107
- return False
108
-
109
91
  def bind(self, access_token: str | None) -> LabeledButlerFactoryProtocol:
110
92
  """Create a callable factory function for generating Butler instances
111
93
  with out needing to specify access tokans again.
@@ -127,7 +109,7 @@ class LabeledButlerFactory(AbstractContextManager):
127
109
 
128
110
  return create
129
111
 
130
- def create_butler(self, label: str, *, access_token: str | None = None) -> Butler:
112
+ def create_butler(self, *, label: str, access_token: str | None) -> Butler:
131
113
  """Create a Butler instance.
132
114
 
133
115
  Parameters
@@ -136,7 +118,7 @@ class LabeledButlerFactory(AbstractContextManager):
136
118
  Label of the repository to instantiate, from the ``repositories``
137
119
  parameter to the `LabeledButlerFactory` constructor or the global
138
120
  repository index file.
139
- access_token : `str` | `None`, optional
121
+ access_token : `str` | `None`
140
122
  Gafaelfawr access token used to authenticate to a Butler server.
141
123
  This is required for any repositories configured to use
142
124
  `RemoteButler`. If you only use `DirectButler`, this may be
@@ -185,9 +167,7 @@ class LabeledButlerFactory(AbstractContextManager):
185
167
 
186
168
  match butler_type:
187
169
  case ButlerType.DIRECT:
188
- return _DirectButlerFactory(
189
- config, self._preload_unsafe_direct_butler_caches, self._writeable
190
- )
170
+ return _DirectButlerFactory(config, self._preload_unsafe_direct_butler_caches)
191
171
  case ButlerType.REMOTE:
192
172
  return _RemoteButlerFactory(config)
193
173
  case _:
@@ -209,12 +189,12 @@ class _ButlerFactory(Protocol):
209
189
 
210
190
 
211
191
  class _DirectButlerFactory(_ButlerFactory):
212
- def __init__(self, config: ButlerConfig, preload_unsafe_caches: bool, writeable: bool) -> None:
192
+ def __init__(self, config: ButlerConfig, preload_unsafe_caches: bool) -> None:
213
193
  import lsst.daf.butler.direct_butler
214
194
 
215
195
  # Create a 'template' Butler that will be cloned when callers request
216
196
  # an instance.
217
- self._butler = Butler.from_config(config, writeable=writeable)
197
+ self._butler = Butler.from_config(config)
218
198
  assert isinstance(self._butler, lsst.daf.butler.direct_butler.DirectButler)
219
199
 
220
200
  # Load caches so that data is available in cloned instances without
@@ -151,8 +151,6 @@ def _filter_collections_and_dataset_types(
151
151
 
152
152
  Parameters
153
153
  ----------
154
- butler
155
- Butler repository to use.
156
154
  collections
157
155
  List of collection names or collection search globs.
158
156
  dataset_type_query
@@ -114,7 +114,18 @@ def remove_runs(context: click.Context, confirm: bool, force: bool, **kwargs: An
114
114
 
115
115
  This command can be used to remove RUN collections and the datasets within
116
116
  them.
117
- """ # numpydoc ignore=PR01
117
+
118
+ Parameters
119
+ ----------
120
+ context : `click.Context`
121
+ Context provided by Click.
122
+ confirm : `bool`
123
+ Confirmation for removal of the run.
124
+ force : `bool`
125
+ Force removal.
126
+ **kwargs : `dict` [`str`, `str`]
127
+ The parameters to pass to `~lsst.daf.butler.script.removeRuns`.
128
+ """
118
129
  result = script.removeRuns(**kwargs)
119
130
  canRemoveRuns = len(result.runs)
120
131
  if not canRemoveRuns:
@@ -109,12 +109,12 @@ class ColumnValueSerializer(ABC):
109
109
 
110
110
  Parameters
111
111
  ----------
112
- value : `typing.Any`
112
+ value : `Any`
113
113
  Column value to be serialized.
114
114
 
115
115
  Returns
116
116
  -------
117
- value : `typing.Any`
117
+ value : `Any`
118
118
  Column value in serializable format.
119
119
  """
120
120
  raise NotImplementedError
@@ -125,12 +125,12 @@ class ColumnValueSerializer(ABC):
125
125
 
126
126
  Parameters
127
127
  ----------
128
- value : `typing.Any`
128
+ value : `Any`
129
129
  Serialized column value.
130
130
 
131
131
  Returns
132
132
  -------
133
- value : `typing.Any`
133
+ value : `Any`
134
134
  Deserialized column value.
135
135
  """
136
136
  raise NotImplementedError
@@ -100,4 +100,3 @@ VisitBackgroundModel: lsst.daf.butler.formatters.json.JsonFormatter
100
100
  VignettingCorrection: lsst.ts.observatory.control.utils.extras.vignetting_storage.VignettingCorrectionFormatter
101
101
  SSPAuxiliaryFile: lsst.pipe.tasks.sspAuxiliaryFile.SSPAuxiliaryFileFormatter
102
102
  VisitGeometry: lsst.daf.butler.formatters.json.JsonFormatter
103
- ProvenanceQuantumGraph: lsst.pipe.base.quantum_graph.formatter.ProvenanceFormatter
@@ -443,18 +443,3 @@ storageClasses:
443
443
  pytype: lsst.pipe.tasks.sspAuxiliaryFile.SSPAuxiliaryFile
444
444
  VisitGeometry:
445
445
  pytype: lsst.obs.base.visit_geometry.VisitGeometry
446
- ProvenanceQuantumGraph:
447
- pytype: lsst.pipe.base.quantum_graph.ProvenanceQuantumGraph
448
- parameters:
449
- - import_mode # lsst.pipe.base.pipeline_graph.TaskImportMode
450
- - quanta # iterable of uuid.UUID; quanta to read
451
- - datasets # iterable of uuid.UUID; datasets to read
452
- - read_init_quanta # bool, defaults to True; whether to read pre-exec-init info
453
- derivedComponents:
454
- packages: Packages # ignores node parameters
455
-
456
- # UUID keys can be quantum or data IDs (whichever is passed in via
457
- # parameters). Nested lists are attempts to run the quantum (last is
458
- # most recent).
459
- logs: StructuredDataDict # dict[uuid.UUID, list[ButlerLogRecords]]
460
- metadata: StructuredDataDict # dict[uuid.UUID, list[TaskMetadata]]
@@ -284,14 +284,6 @@ class DatasetRefURIs(abc.Sequence):
284
284
  def __repr__(self) -> str:
285
285
  return f"DatasetRefURIs({repr(self.primaryURI)}, {repr(self.componentURIs)})"
286
286
 
287
- def iter_all(self) -> Iterator[ResourcePath]:
288
- """Iterate over all URIs without regard to whether they are primary
289
- or component.
290
- """
291
- if self.primaryURI is not None:
292
- yield self.primaryURI
293
- yield from self.componentURIs.values()
294
-
295
287
 
296
288
  class Datastore(FileTransferSource, metaclass=ABCMeta):
297
289
  """Datastore interface.
@@ -544,7 +536,7 @@ class Datastore(FileTransferSource, metaclass=ABCMeta):
544
536
 
545
537
  Returns
546
538
  -------
547
- exists : `dict` [`DatasetRef`, `bool`]
539
+ exists : `dict`[`DatasetRef`, `bool`]
548
540
  Mapping of dataset to boolean indicating whether the dataset
549
541
  is known to the datastore.
550
542
  """
@@ -833,10 +825,6 @@ class Datastore(FileTransferSource, metaclass=ABCMeta):
833
825
  in an external system or if the file is to be compressed in place.
834
826
  It is up to the datastore whether this parameter is relevant.
835
827
 
836
- Returns
837
- -------
838
- None
839
-
840
828
  Raises
841
829
  ------
842
830
  NotImplementedError
@@ -1155,10 +1143,6 @@ class Datastore(FileTransferSource, metaclass=ABCMeta):
1155
1143
  Determine whether errors should be ignored. When multiple
1156
1144
  refs are being trashed there will be no per-ref check.
1157
1145
 
1158
- Returns
1159
- -------
1160
- None
1161
-
1162
1146
  Raises
1163
1147
  ------
1164
1148
  FileNotFoundError
@@ -1294,10 +1278,6 @@ class Datastore(FileTransferSource, metaclass=ABCMeta):
1294
1278
  Entity to compare with configuration retrieved using the
1295
1279
  specified lookup key.
1296
1280
 
1297
- Returns
1298
- -------
1299
- None
1300
-
1301
1281
  Raises
1302
1282
  ------
1303
1283
  DatastoreValidationError
@@ -423,8 +423,8 @@ def make_datastore_path_relative(path: str) -> str:
423
423
  path : `str`
424
424
  The file path from a `StoredFileInfo`.
425
425
 
426
- Returns
427
- -------
426
+ Return
427
+ ------
428
428
  normalized_path : `str`
429
429
  The original path, if it was relative. Otherwise, a version of it that
430
430
  was converted to a relative path, stripping URI scheme and netloc from
@@ -1077,10 +1077,6 @@ class ChainedDatastore(Datastore):
1077
1077
  If `True`, output a log message for every validation error
1078
1078
  detected.
1079
1079
 
1080
- Returns
1081
- -------
1082
- None
1083
-
1084
1080
  Raises
1085
1081
  ------
1086
1082
  DatastoreValidationError
@@ -2152,13 +2152,7 @@ class FileDatastore(GenericBaseDatastore[StoredFileInfo]):
2152
2152
 
2153
2153
  return artifact_map
2154
2154
 
2155
- def ingest_zip(
2156
- self,
2157
- zip_path: ResourcePath,
2158
- transfer: str | None,
2159
- *,
2160
- dry_run: bool = False,
2161
- ) -> None:
2155
+ def ingest_zip(self, zip_path: ResourcePath, transfer: str | None, *, dry_run: bool = False) -> None:
2162
2156
  """Ingest an indexed Zip file and contents.
2163
2157
 
2164
2158
  The Zip file must have an index file as created by `retrieveArtifacts`.
@@ -2982,10 +2976,6 @@ class FileDatastore(GenericBaseDatastore[StoredFileInfo]):
2982
2976
  If `True`, output a log message for every validation error
2983
2977
  detected.
2984
2978
 
2985
- Returns
2986
- -------
2987
- None
2988
-
2989
2979
  Raises
2990
2980
  ------
2991
2981
  DatastoreValidationError
@@ -97,12 +97,12 @@ def generate_datastore_get_information(
97
97
 
98
98
  Parameters
99
99
  ----------
100
- fileLocations : `list` [`DatasetLocationInformation`]
100
+ fileLocations : `list`[`DatasetLocationInformation`]
101
101
  List of file locations for this artifact and their associated datastore
102
102
  records.
103
103
  ref : `DatasetRef`
104
104
  The registry information associated with this artifact.
105
- parameters : `~collections.abc.Mapping` [`str`, `typing.Any`]
105
+ parameters : `Mapping`[`str`, `Any`]
106
106
  `StorageClass` and `Formatter` parameters.
107
107
  readStorageClass : `StorageClass` | `None`, optional
108
108
  The StorageClass to use when ultimately returning the resulting object
@@ -255,12 +255,12 @@ def get_dataset_as_python_object_from_get_info(
255
255
 
256
256
  Parameters
257
257
  ----------
258
- allGetInfo : `list` [`DatastoreFileGetInformation`]
258
+ allGetInfo : `list`[`DatastoreFileGetInformation`]
259
259
  Pre-processed information about each file associated with this
260
260
  artifact.
261
261
  ref : `DatasetRef`
262
262
  The registry information associated with this artifact.
263
- parameters : `~collections.abc.Mapping` [`str`, `typing.Any`]
263
+ parameters : `Mapping`[`str`, `Any`]
264
264
  `StorageClass` and `Formatter` parameters.
265
265
  cache_manager : `AbstractDatastoreCacheManager`
266
266
  The cache manager to use for caching retrieved files.
@@ -274,11 +274,7 @@ class ZipIndex(BaseModel):
274
274
  Path to the Zip file.
275
275
  """
276
276
  with zip_path.open("rb") as fd, zipfile.ZipFile(fd) as zf:
277
- return cls.from_open_zip(zf)
278
-
279
- @classmethod
280
- def from_open_zip(cls, zf: zipfile.ZipFile) -> Self:
281
- json_data = zf.read(cls.index_name)
277
+ json_data = zf.read(cls.index_name)
282
278
  return cls.model_validate_json(json_data)
283
279
 
284
280