lsst-daf-butler 29.2025.4100__py3-none-any.whl → 29.2025.4200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. lsst/daf/butler/_dataset_association.py +19 -3
  2. lsst/daf/butler/_registry_shim.py +68 -2
  3. lsst/daf/butler/cli/cmd/_remove_collections.py +15 -0
  4. lsst/daf/butler/direct_butler/_direct_butler.py +1 -1
  5. lsst/daf/butler/queries/_expression_strings.py +1 -1
  6. lsst/daf/butler/registry/_registry_base.py +13 -5
  7. lsst/daf/butler/registry/datasets/byDimensions/_manager.py +1 -1
  8. lsst/daf/butler/registry/sql_registry.py +2 -172
  9. lsst/daf/butler/registry/tests/_registry.py +93 -0
  10. lsst/daf/butler/remote_butler/_factory.py +2 -2
  11. lsst/daf/butler/remote_butler/_remote_butler.py +3 -1
  12. lsst/daf/butler/remote_butler/authentication/cadc.py +63 -11
  13. lsst/daf/butler/script/removeCollections.py +46 -13
  14. lsst/daf/butler/tests/butler_queries.py +40 -1
  15. lsst/daf/butler/tests/server_available.py +53 -0
  16. lsst/daf/butler/timespan_database_representation.py +8 -0
  17. lsst/daf/butler/transfers/_context.py +5 -16
  18. lsst/daf/butler/version.py +1 -1
  19. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/METADATA +1 -1
  20. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/RECORD +28 -27
  21. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/WHEEL +0 -0
  22. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/entry_points.txt +0 -0
  23. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/licenses/COPYRIGHT +0 -0
  24. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/licenses/LICENSE +0 -0
  25. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/licenses/bsd_license.txt +0 -0
  26. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/licenses/gpl-v3.0.txt +0 -0
  27. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/top_level.txt +0 -0
  28. {lsst_daf_butler-29.2025.4100.dist-info → lsst_daf_butler-29.2025.4200.dist-info}/zip-safe +0 -0
@@ -29,15 +29,17 @@ from __future__ import annotations
29
29
 
30
30
  __all__ = ("DatasetAssociation",)
31
31
 
32
- from collections.abc import Iterator
32
+ from collections.abc import Iterator, Mapping
33
33
  from dataclasses import dataclass
34
34
  from typing import TYPE_CHECKING, Any
35
35
 
36
+ from ._collection_type import CollectionType
36
37
  from ._dataset_ref import DatasetRef
37
38
  from ._dataset_type import DatasetType
38
39
  from ._timespan import Timespan
39
40
 
40
41
  if TYPE_CHECKING:
42
+ from ._butler_collections import CollectionInfo
41
43
  from .queries._general_query_results import GeneralQueryResults
42
44
 
43
45
 
@@ -66,7 +68,10 @@ class DatasetAssociation:
66
68
 
67
69
  @classmethod
68
70
  def from_query_result(
69
- cls, result: GeneralQueryResults, dataset_type: DatasetType
71
+ cls,
72
+ result: GeneralQueryResults,
73
+ dataset_type: DatasetType,
74
+ collection_info: Mapping[str, CollectionInfo],
70
75
  ) -> Iterator[DatasetAssociation]:
71
76
  """Construct dataset associations from the result of general query.
72
77
 
@@ -79,11 +84,22 @@ class DatasetAssociation:
79
84
  "timespan" dataset fields for ``dataset_type``.
80
85
  dataset_type : `DatasetType`
81
86
  Dataset type, query has to include this dataset type.
87
+ collection_info : `~collections.abc.Mapping` [`str`, `CollectionInfo`]
88
+ Mapping from collection name to information about it for all
89
+ collections that may appear in the query results.
82
90
  """
83
91
  timespan_key = f"{dataset_type.name}.timespan"
84
92
  collection_key = f"{dataset_type.name}.collection"
85
93
  for _, refs, row_dict in result.iter_tuples(dataset_type):
86
- yield DatasetAssociation(refs[0], row_dict[collection_key], row_dict[timespan_key])
94
+ collection = row_dict[collection_key]
95
+ timespan = row_dict[timespan_key]
96
+ if collection_info[collection].type is not CollectionType.CALIBRATION:
97
+ # This behavior is for backwards compatibility only; in most
98
+ # contexts it makes sense to consider the timespan of a RUN
99
+ # or TAGGED collection to be unbounded, not None, and that's
100
+ # what the query results we're iterating over do.
101
+ timespan = None
102
+ yield DatasetAssociation(refs[0], collection, timespan)
87
103
 
88
104
  def __lt__(self, other: Any) -> bool:
89
105
  # Allow sorting of associations
@@ -36,6 +36,7 @@ from typing import TYPE_CHECKING, Any
36
36
  from ._collection_type import CollectionType
37
37
  from ._dataset_ref import DatasetId, DatasetIdGenEnum, DatasetRef
38
38
  from ._dataset_type import DatasetType
39
+ from ._exceptions import CalibrationLookupError
39
40
  from ._storage_class import StorageClassFactory
40
41
  from ._timespan import Timespan
41
42
  from .dimensions import (
@@ -48,7 +49,9 @@ from .dimensions import (
48
49
  )
49
50
  from .registry._collection_summary import CollectionSummary
50
51
  from .registry._defaults import RegistryDefaults
52
+ from .registry._exceptions import NoDefaultCollectionError
51
53
  from .registry._registry_base import RegistryBase
54
+ from .registry.queries._query_common import resolve_collections
52
55
 
53
56
  if TYPE_CHECKING:
54
57
  from .direct_butler import DirectButler
@@ -182,13 +185,76 @@ class RegistryShim(RegistryBase):
182
185
  *,
183
186
  collections: CollectionArgType | None = None,
184
187
  timespan: Timespan | None = None,
188
+ datastore_records: bool = False,
185
189
  **kwargs: Any,
186
190
  ) -> DatasetRef | None:
187
191
  # Docstring inherited from a base class.
188
- return self._registry.findDataset(
189
- datasetType, dataId, collections=collections, timespan=timespan, **kwargs
192
+ if not isinstance(datasetType, DatasetType):
193
+ datasetType = self.getDatasetType(datasetType)
194
+
195
+ dataId = DataCoordinate.standardize(
196
+ dataId,
197
+ dimensions=datasetType.dimensions,
198
+ universe=self.dimensions,
199
+ defaults=self.defaults.dataId,
200
+ **kwargs,
190
201
  )
191
202
 
203
+ with self._butler.query() as query:
204
+ resolved_collections = resolve_collections(self._butler, collections)
205
+ if not resolved_collections:
206
+ if collections is None:
207
+ raise NoDefaultCollectionError("No collections provided, and no default collections set")
208
+ else:
209
+ return None
210
+
211
+ if datasetType.isCalibration() and timespan is None:
212
+ # Filter out calibration collections, because with no timespan
213
+ # we have no way of selecting a dataset from them.
214
+ collection_info = self._butler.collections.query_info(
215
+ resolved_collections, flatten_chains=True
216
+ )
217
+ resolved_collections = [
218
+ info.name for info in collection_info if info.type != CollectionType.CALIBRATION
219
+ ]
220
+ if not resolved_collections:
221
+ return None
222
+
223
+ result = query.datasets(datasetType, resolved_collections, find_first=True).limit(2)
224
+ dataset_type_name = result.dataset_type.name
225
+ # Search only on the 'required' dimensions for the dataset type.
226
+ # Any extra values provided by the user are ignored.
227
+ minimal_data_id = DataCoordinate.standardize(
228
+ dataId.subset(datasetType.dimensions.required).required, universe=self.dimensions
229
+ )
230
+ result = result.where(minimal_data_id)
231
+ if (
232
+ datasetType.isCalibration()
233
+ and timespan is not None
234
+ and (timespan.begin is not None or timespan.end is not None)
235
+ ):
236
+ timespan_column = query.expression_factory[dataset_type_name].timespan
237
+ result = result.where(timespan_column.overlaps(timespan))
238
+
239
+ datasets = list(result)
240
+ if len(datasets) == 1:
241
+ ref = datasets[0]
242
+ if dataId.hasRecords():
243
+ ref = ref.expanded(dataId)
244
+ # Propagate storage class from user-provided DatasetType, which
245
+ # may not match the definition in the database.
246
+ ref = ref.overrideStorageClass(datasetType.storageClass_name)
247
+ if datastore_records:
248
+ ref = self._registry.get_datastore_records(ref)
249
+ return ref
250
+ elif len(datasets) == 0:
251
+ return None
252
+ else:
253
+ raise CalibrationLookupError(
254
+ f"Ambiguous calibration lookup for {datasetType} with timespan {timespan}"
255
+ f" in collections {resolved_collections}."
256
+ )
257
+
192
258
  def insertDatasets(
193
259
  self,
194
260
  datasetType: DatasetType | str,
@@ -41,6 +41,8 @@ from ..utils import ButlerCommand
41
41
  noNonRunCollectionsMsg = "No non-RUN collections were found."
42
42
  willRemoveCollectionMsg = "The following collections will be removed:"
43
43
  removedCollectionsMsg = "Removed collections"
44
+ willRemoveCollectionChainsMsg = "Collections to be removed from their parent collection chains:"
45
+ removedCollectionChainsMsg = "Removed collections from their parent collection chains:"
44
46
  canNotRemoveFoundRuns = "The following RUN collections were found but can NOT be removed by this command:"
45
47
  didNotRemoveFoundRuns = "Found RUN collections but they can NOT be removed by this command:"
46
48
  abortedMsg = "Aborted."
@@ -53,6 +55,11 @@ abortedMsg = "Aborted."
53
55
  )
54
56
  @confirm_option()
55
57
  @options_file_option()
58
+ @click.option(
59
+ "--remove-from-parents",
60
+ is_flag=True,
61
+ help="Forcibly remove the collection even if it is still referenced from collection chains.",
62
+ )
56
63
  def remove_collections(**kwargs: Any) -> None: # numpydoc ignore=PR01
57
64
  """Remove one or more non-RUN collections.
58
65
 
@@ -73,6 +80,10 @@ def remove_collections(**kwargs: Any) -> None: # numpydoc ignore=PR01
73
80
  result.removeCollectionsTable.pprint_all(align="<")
74
81
  else:
75
82
  print("\n" + noNonRunCollectionsMsg)
83
+ if len(result.removeChainsTable):
84
+ print("\n" + willRemoveCollectionChainsMsg)
85
+ result.removeChainsTable.pprint_all(align="<")
86
+ print()
76
87
  if len(result.runsTable):
77
88
  print("\n" + canNotRemoveFoundRuns)
78
89
  result.runsTable.pprint_all(align="<")
@@ -86,6 +97,10 @@ def remove_collections(**kwargs: Any) -> None: # numpydoc ignore=PR01
86
97
  else:
87
98
  print("\n" + removedCollectionsMsg + ":\n")
88
99
  result.removeCollectionsTable.pprint_all(align="<")
100
+ if len(result.removeChainsTable):
101
+ print("\n" + removedCollectionChainsMsg)
102
+ result.removeChainsTable.pprint_all(align="<")
103
+ print()
89
104
  if len(result.runsTable):
90
105
  print("\n" + didNotRemoveFoundRuns)
91
106
  result.runsTable.pprint_all(align="<")
@@ -1302,7 +1302,7 @@ class DirectButler(Butler): # numpydoc ignore=PR02
1302
1302
 
1303
1303
  data_id, kwargs = self._rewrite_data_id(data_id, parent_type, **kwargs)
1304
1304
 
1305
- ref = self._registry.findDataset(
1305
+ ref = self.registry.findDataset(
1306
1306
  parent_type,
1307
1307
  data_id,
1308
1308
  collections=collections,
@@ -241,7 +241,7 @@ class _ConversionVisitor(TreeVisitor[_VisitorResult]):
241
241
 
242
242
  def visitBind(self, name: str, node: Node) -> _VisitorResult:
243
243
  if name not in self.context.bind:
244
- raise InvalidQueryError("Name {name!r} is not in the bind map.")
244
+ raise InvalidQueryError(f"Name {name!r} is not in the bind map.")
245
245
  # Logic in visitIdentifier handles binds.
246
246
  return self.visitIdentifier(name, node)
247
247
 
@@ -231,20 +231,28 @@ class RegistryBase(Registry):
231
231
  collectionTypes: Iterable[CollectionType] = CollectionType.all(),
232
232
  flattenChains: bool = False,
233
233
  ) -> Iterator[DatasetAssociation]:
234
- # queryCollections only accepts DatasetType.
235
234
  if isinstance(datasetType, str):
236
235
  datasetType = self.getDatasetType(datasetType)
237
- resolved_collections = self.queryCollections(
238
- collections, datasetType=datasetType, collectionTypes=collectionTypes, flattenChains=flattenChains
239
- )
240
236
  with self._butler.query() as query:
237
+ resolved_collections = self.queryCollections(
238
+ collections,
239
+ datasetType=datasetType,
240
+ collectionTypes=collectionTypes,
241
+ flattenChains=flattenChains,
242
+ )
243
+ # It's annoyingly difficult to just do the collection query once,
244
+ # since query_info doesn't accept all the expression types that
245
+ # queryCollections does. But it's all cached anyway.
246
+ collection_info = {
247
+ info.name: info for info in self._butler.collections.query_info(resolved_collections)
248
+ }
241
249
  query = query.join_dataset_search(datasetType, resolved_collections)
242
250
  result = query.general(
243
251
  datasetType.dimensions,
244
252
  dataset_fields={datasetType.name: {"dataset_id", "run", "collection", "timespan"}},
245
253
  find_first=False,
246
254
  )
247
- yield from DatasetAssociation.from_query_result(result, datasetType)
255
+ yield from DatasetAssociation.from_query_result(result, datasetType, collection_info)
248
256
 
249
257
  def _resolve_dataset_types(self, dataset_types: object | None) -> list[str]:
250
258
  if dataset_types is None:
@@ -1425,7 +1425,7 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
1425
1425
  )
1426
1426
  if "timespan" in fields:
1427
1427
  tags_builder.joins.timespans[fields_key] = self._db.getTimespanRepresentation().fromLiteral(
1428
- None
1428
+ Timespan(None, None)
1429
1429
  )
1430
1430
  calibs_builder: SqlSelectBuilder | None = None
1431
1431
  if CollectionType.CALIBRATION in collection_types:
@@ -36,7 +36,7 @@ import logging
36
36
  import warnings
37
37
  from collections import defaultdict
38
38
  from collections.abc import Iterable, Iterator, Mapping, Sequence
39
- from typing import TYPE_CHECKING, Any, cast
39
+ from typing import TYPE_CHECKING, Any
40
40
 
41
41
  import sqlalchemy
42
42
 
@@ -44,16 +44,10 @@ from lsst.resources import ResourcePathExpression
44
44
  from lsst.utils.iteration import ensure_iterable
45
45
 
46
46
  from .._collection_type import CollectionType
47
- from .._column_tags import DatasetColumnTag
48
47
  from .._config import Config
49
48
  from .._dataset_ref import DatasetId, DatasetIdGenEnum, DatasetRef
50
49
  from .._dataset_type import DatasetType
51
- from .._exceptions import (
52
- CalibrationLookupError,
53
- DataIdValueError,
54
- DimensionNameError,
55
- InconsistentDataIdError,
56
- )
50
+ from .._exceptions import DataIdValueError, DimensionNameError, InconsistentDataIdError
57
51
  from .._storage_class import StorageClassFactory
58
52
  from .._timespan import Timespan
59
53
  from ..dimensions import (
@@ -90,7 +84,6 @@ if TYPE_CHECKING:
90
84
  from .._butler_config import ButlerConfig
91
85
  from ..datastore._datastore import DatastoreOpaqueTable
92
86
  from ..datastore.stored_file_info import StoredDatastoreItemInfo
93
- from ..registry._registry import CollectionArgType
94
87
  from ..registry.interfaces import (
95
88
  CollectionRecord,
96
89
  Database,
@@ -830,169 +823,6 @@ class SqlRegistry:
830
823
  """
831
824
  return True
832
825
 
833
- def findDataset(
834
- self,
835
- datasetType: DatasetType | str,
836
- dataId: DataId | None = None,
837
- *,
838
- collections: CollectionArgType | None = None,
839
- timespan: Timespan | None = None,
840
- datastore_records: bool = False,
841
- **kwargs: Any,
842
- ) -> DatasetRef | None:
843
- """Find a dataset given its `DatasetType` and data ID.
844
-
845
- This can be used to obtain a `DatasetRef` that permits the dataset to
846
- be read from a `Datastore`. If the dataset is a component and can not
847
- be found using the provided dataset type, a dataset ref for the parent
848
- will be returned instead but with the correct dataset type.
849
-
850
- Parameters
851
- ----------
852
- datasetType : `DatasetType` or `str`
853
- A `DatasetType` or the name of one. If this is a `DatasetType`
854
- instance, its storage class will be respected and propagated to
855
- the output, even if it differs from the dataset type definition
856
- in the registry, as long as the storage classes are convertible.
857
- dataId : `dict` or `DataCoordinate`, optional
858
- A `dict`-like object containing the `Dimension` links that identify
859
- the dataset within a collection.
860
- collections : collection expression, optional
861
- An expression that fully or partially identifies the collections to
862
- search for the dataset; see
863
- :ref:`daf_butler_collection_expressions` for more information.
864
- Defaults to ``self.defaults.collections``.
865
- timespan : `Timespan`, optional
866
- A timespan that the validity range of the dataset must overlap.
867
- If not provided, any `~CollectionType.CALIBRATION` collections
868
- matched by the ``collections`` argument will not be searched.
869
- datastore_records : `bool`, optional
870
- Whether to attach datastore records to the `DatasetRef`.
871
- **kwargs
872
- Additional keyword arguments passed to
873
- `DataCoordinate.standardize` to convert ``dataId`` to a true
874
- `DataCoordinate` or augment an existing one.
875
-
876
- Returns
877
- -------
878
- ref : `DatasetRef`
879
- A reference to the dataset, or `None` if no matching Dataset
880
- was found.
881
-
882
- Raises
883
- ------
884
- lsst.daf.butler.registry.NoDefaultCollectionError
885
- Raised if ``collections`` is `None` and
886
- ``self.defaults.collections`` is `None`.
887
- LookupError
888
- Raised if one or more data ID keys are missing.
889
- lsst.daf.butler.registry.MissingDatasetTypeError
890
- Raised if the dataset type does not exist.
891
- lsst.daf.butler.registry.MissingCollectionError
892
- Raised if any of ``collections`` does not exist in the registry.
893
-
894
- Notes
895
- -----
896
- This method simply returns `None` and does not raise an exception even
897
- when the set of collections searched is intrinsically incompatible with
898
- the dataset type, e.g. if ``datasetType.isCalibration() is False``, but
899
- only `~CollectionType.CALIBRATION` collections are being searched.
900
- This may make it harder to debug some lookup failures, but the behavior
901
- is intentional; we consider it more important that failed searches are
902
- reported consistently, regardless of the reason, and that adding
903
- additional collections that do not contain a match to the search path
904
- never changes the behavior.
905
-
906
- This method handles component dataset types automatically, though most
907
- other registry operations do not.
908
- """
909
- if collections is None:
910
- if not self.defaults.collections:
911
- raise NoDefaultCollectionError(
912
- "No collections provided to findDataset, and no defaults from registry construction."
913
- )
914
- collections = self.defaults.collections
915
- backend = queries.SqlQueryBackend(self._db, self._managers, self.dimension_record_cache)
916
- with backend.caching_context():
917
- collection_wildcard = CollectionWildcard.from_expression(collections, require_ordered=True)
918
- if collection_wildcard.empty():
919
- return None
920
- matched_collections = backend.resolve_collection_wildcard(collection_wildcard)
921
- resolved_dataset_type = backend.resolve_single_dataset_type_wildcard(datasetType)
922
- dataId = DataCoordinate.standardize(
923
- dataId,
924
- dimensions=resolved_dataset_type.dimensions,
925
- universe=self.dimensions,
926
- defaults=self.defaults.dataId,
927
- **kwargs,
928
- )
929
- governor_constraints = {name: {cast(str, dataId[name])} for name in dataId.dimensions.governors}
930
- (filtered_collections,) = backend.filter_dataset_collections(
931
- [resolved_dataset_type],
932
- matched_collections,
933
- governor_constraints=governor_constraints,
934
- ).values()
935
- if not filtered_collections:
936
- return None
937
- if timespan is None:
938
- filtered_collections = [
939
- collection_record
940
- for collection_record in filtered_collections
941
- if collection_record.type is not CollectionType.CALIBRATION
942
- ]
943
- if filtered_collections:
944
- requested_columns = {"dataset_id", "run", "collection"}
945
- with backend.context() as context:
946
- predicate = context.make_data_coordinate_predicate(
947
- dataId.subset(resolved_dataset_type.dimensions), full=False
948
- )
949
- if timespan is not None:
950
- requested_columns.add("timespan")
951
- predicate = predicate.logical_and(
952
- context.make_timespan_overlap_predicate(
953
- DatasetColumnTag(resolved_dataset_type.name, "timespan"), timespan
954
- )
955
- )
956
- relation = backend.make_dataset_query_relation(
957
- resolved_dataset_type, filtered_collections, requested_columns, context
958
- ).with_rows_satisfying(predicate)
959
- rows = list(context.fetch_iterable(relation))
960
- else:
961
- rows = []
962
- if not rows:
963
- return None
964
- elif len(rows) == 1:
965
- best_row = rows[0]
966
- else:
967
- rank_by_collection_key = {record.key: n for n, record in enumerate(filtered_collections)}
968
- collection_tag = DatasetColumnTag(resolved_dataset_type.name, "collection")
969
- row_iter = iter(rows)
970
- best_row = next(row_iter)
971
- best_rank = rank_by_collection_key[best_row[collection_tag]]
972
- have_tie = False
973
- for row in row_iter:
974
- if (rank := rank_by_collection_key[row[collection_tag]]) < best_rank:
975
- best_row = row
976
- best_rank = rank
977
- have_tie = False
978
- elif rank == best_rank:
979
- have_tie = True
980
- assert timespan is not None, "Rank ties should be impossible given DB constraints."
981
- if have_tie:
982
- raise CalibrationLookupError(
983
- f"Ambiguous calibration lookup for {resolved_dataset_type.name} in collections "
984
- f"{collection_wildcard.strings} with timespan {timespan}."
985
- )
986
- reader = queries.DatasetRefReader(
987
- resolved_dataset_type,
988
- translate_collection=lambda k: self._managers.collections[k].name,
989
- )
990
- ref = reader.read(best_row, data_id=dataId)
991
- if datastore_records:
992
- ref = self.get_datastore_records(ref)
993
-
994
- return ref
995
-
996
826
  @transactional
997
827
  def insertDatasets(
998
828
  self,
@@ -550,6 +550,11 @@ class RegistryTests(ABC):
550
550
  "bias", instrument="Cam1", detector=2, collections=["empty", "imported_r", "imported_g"]
551
551
  ),
552
552
  )
553
+ # If the input data ID was an expanded DataCoordinate with records,
554
+ # then the output ref has records, too.
555
+ expanded_id = registry.expandDataId({"instrument": "Cam1", "detector": 2})
556
+ expanded_ref = registry.findDataset("bias", expanded_id, collections=["imported_r"])
557
+ self.assertTrue(expanded_ref.dataId.hasRecords())
553
558
  # Search more than one collection, with one of them a CALIBRATION
554
559
  # collection.
555
560
  registry.registerCollection("Cam1/calib", CollectionType.CALIBRATION)
@@ -568,6 +573,18 @@ class RegistryTests(ABC):
568
573
  timespan=timespan,
569
574
  ),
570
575
  )
576
+ self.assertEqual(
577
+ bias1,
578
+ registry.findDataset(
579
+ "bias",
580
+ instrument="Cam1",
581
+ detector=2,
582
+ # Calibration dataset type, with no calibration collection, but
583
+ # a timespan was provided.
584
+ collections=["imported_g"],
585
+ timespan=timespan,
586
+ ),
587
+ )
571
588
  self.assertEqual(
572
589
  bias2,
573
590
  registry.findDataset(
@@ -592,6 +609,82 @@ class RegistryTests(ABC):
592
609
  "bias", instrument="Cam1", detector=2, collections=["empty", "Cam1/calib", "imported_g"]
593
610
  ),
594
611
  )
612
+ self.assertIsNone(
613
+ registry.findDataset("bias", instrument="Cam1", detector=2, collections=["Cam1/calib"])
614
+ )
615
+ # Test non-calibration dataset type.
616
+ registry.registerDatasetType(
617
+ DatasetType("noncalibration", ["instrument", "detector"], "int", universe=butler.dimensions)
618
+ )
619
+ (non_calibration_ref,) = registry.insertDatasets("noncalibration", dataIds=[dataId2], run=run)
620
+ self.assertIsNone(
621
+ registry.findDataset("noncalibration", instrument="Cam1", detector=2, collections=["imported_g"])
622
+ )
623
+ self.assertEqual(
624
+ non_calibration_ref,
625
+ registry.findDataset("noncalibration", instrument="Cam1", detector=2, collections=[run]),
626
+ )
627
+ # Timespan parameter is ignored for non-calibration dataset types.
628
+ self.assertIsNone(
629
+ registry.findDataset(
630
+ "noncalibration", instrument="Cam1", detector=2, collections=["imported_g"], timespan=timespan
631
+ )
632
+ )
633
+ self.assertEqual(
634
+ non_calibration_ref,
635
+ registry.findDataset(
636
+ "noncalibration", instrument="Cam1", detector=2, collections=[run], timespan=timespan
637
+ ),
638
+ )
639
+ self.assertEqual(
640
+ non_calibration_ref,
641
+ registry.findDataset(
642
+ "noncalibration",
643
+ instrument="Cam1",
644
+ detector=2,
645
+ collections=["Cam1/calib", run],
646
+ timespan=timespan,
647
+ ),
648
+ )
649
+ # Add a dataset type whose dimension group involves an "implied"
650
+ # dimension. ("physical_filter" implies "band".)
651
+ registry.registerDatasetType(
652
+ DatasetType(
653
+ "dt_with_implied",
654
+ [
655
+ "instrument",
656
+ "physical_filter",
657
+ ],
658
+ "int",
659
+ universe=butler.dimensions,
660
+ )
661
+ )
662
+ data_id = {"instrument": "Cam1", "physical_filter": "Cam1-G"}
663
+ (implied_ref,) = registry.insertDatasets("dt_with_implied", dataIds=[data_id], run=run)
664
+ found_ref = registry.findDataset("dt_with_implied", data_id, collections=[run])
665
+ self.assertEqual(implied_ref, found_ref)
666
+ # The "full" data ID with implied values is looked up, even though we
667
+ # provided only the "required" values.
668
+ self.assertTrue(found_ref.dataId.hasFull())
669
+ # The search ignores excess data ID values beyond the 'required' set.
670
+ # This is not the correct band value for this physical_filter, but
671
+ # the mismatch is ignored.
672
+ self.assertEqual(
673
+ implied_ref,
674
+ registry.findDataset(
675
+ "dt_with_implied",
676
+ {"instrument": "Cam1", "physical_filter": "Cam1-G", "band": "r"},
677
+ collections=[run],
678
+ ),
679
+ )
680
+ # Correct band value, wrong physical_filter.
681
+ self.assertIsNone(
682
+ registry.findDataset(
683
+ "dt_with_implied",
684
+ {"instrument": "Cam1", "physical_filter": "Cam1-R1", "band": "g"},
685
+ collections=[run],
686
+ ),
687
+ )
595
688
 
596
689
  def testRemoveDatasetTypeSuccess(self):
597
690
  """Test that SqlRegistry.removeDatasetType works when there are no
@@ -136,7 +136,7 @@ class RemoteButlerFactory:
136
136
  if self._config.authentication == "rubin_science_platform":
137
137
  auth = RubinAuthenticationProvider(access_token)
138
138
  elif self._config.authentication == "cadc":
139
- auth = CadcAuthenticationProvider()
139
+ auth = CadcAuthenticationProvider(access_token)
140
140
  return self._create_butler(
141
141
  auth=auth, butler_options=butler_options, enable_datastore_cache=enable_datastore_cache
142
142
  )
@@ -151,7 +151,7 @@ class RemoteButlerFactory:
151
151
  if self._config.authentication == "rubin_science_platform":
152
152
  auth = RubinAuthenticationProvider.create_from_environment(self.server_url)
153
153
  elif self._config.authentication == "cadc":
154
- auth = CadcAuthenticationProvider()
154
+ auth = CadcAuthenticationProvider.create_from_environment(self.server_url)
155
155
 
156
156
  return self._create_butler(
157
157
  auth=auth, butler_options=butler_options, enable_datastore_cache=enable_datastore_cache
@@ -60,7 +60,7 @@ from .._storage_class import StorageClass, StorageClassFactory
60
60
  from .._utilities.locked_object import LockedObject
61
61
  from ..datastore import DatasetRefURIs, DatastoreConfig
62
62
  from ..datastore.cache_manager import AbstractDatastoreCacheManager, DatastoreCacheManager
63
- from ..dimensions import DataIdValue, DimensionConfig, DimensionUniverse, SerializedDataId
63
+ from ..dimensions import DataCoordinate, DataIdValue, DimensionConfig, DimensionUniverse, SerializedDataId
64
64
  from ..queries import Query
65
65
  from ..queries.tree import make_column_literal
66
66
  from ..registry import CollectionArgType, NoDefaultCollectionError, Registry, RegistryDefaults
@@ -435,6 +435,8 @@ class RemoteButler(Butler): # numpydoc ignore=PR02
435
435
  return None
436
436
 
437
437
  ref = DatasetRef.from_simple(model.dataset_ref, universe=self.dimensions)
438
+ if isinstance(data_id, DataCoordinate) and data_id.hasRecords():
439
+ ref = ref.expanded(data_id)
438
440
  return apply_storage_class_override(ref, dataset_type, storage_class)
439
441
 
440
442
  def _retrieve_artifacts(
@@ -27,28 +27,80 @@
27
27
 
28
28
  from __future__ import annotations
29
29
 
30
+ import os
31
+ from fnmatch import fnmatchcase
32
+ from urllib.parse import urlparse
33
+
30
34
  from .interface import RemoteButlerAuthenticationProvider
31
35
 
32
36
 
33
37
  class CadcAuthenticationProvider(RemoteButlerAuthenticationProvider):
34
- """Provide HTTP headers required for authenticating the user at the
35
- Canadian Astronomy Data Centre.
38
+ """
39
+ Represents an authentication provider for remote Butler services specific
40
+ to CADC connection requirements.
41
+
42
+ This class handles the creation and management of authentication headers
43
+ required for interaction with remote Butler services by handling bearer
44
+ tokens. It ensures that the object is pickleable as it may need to be
45
+ serialized and transferred between processes for file transfer operations.
46
+
47
+ Parameters
48
+ ----------
49
+ access_token : `str`
50
+ The bearer token used for authentication with CADC StorageInventory.
36
51
  """
37
52
 
38
53
  # NOTE -- This object needs to be pickleable. It will sometimes be
39
54
  # serialized and transferred to another process to execute file transfers.
40
55
 
41
- def __init__(self) -> None:
42
- # TODO: Load authentication information somehow
43
- pass
56
+ def __init__(self, access_token: str):
57
+ # Access tokens are opaque bearer tokens. See https://sqr-069.lsst.io/
58
+ self._headers = {"Authorization": f"Bearer {access_token}"}
59
+
60
+ @staticmethod
61
+ def create_from_environment(server_url: str) -> CadcAuthenticationProvider:
62
+ access_token = _get_authentication_token_from_environment(server_url)
63
+ if access_token is None:
64
+ raise RuntimeError(
65
+ "Attempting to connect to Butler server,"
66
+ " but no access credentials were found in the environment."
67
+ )
68
+ return CadcAuthenticationProvider(access_token)
44
69
 
45
70
  def get_server_headers(self) -> dict[str, str]:
46
- # TODO: I think you mentioned that you might not require
47
- # authentication for the Butler server REST API initially --
48
- # if so, you can leave this blank.
49
71
  return {}
50
72
 
51
73
  def get_datastore_headers(self) -> dict[str, str]:
52
- # TODO: Supply the headers needed to access the Storage Inventory
53
- # system.
54
- return {"Authorization": "Bearer stub"}
74
+ return self._headers
75
+
76
+
77
+ _SERVER_WHITELIST = ["*.cadc-ccda.hia-hia.nrc-cnrc.gc.ca", "*.canfar.net", "host.docker.internal"]
78
+ _CADC_TOKEN_ENVIRONMENT_KEY = "CADC_TOKEN"
79
+
80
+
81
+ def _get_authentication_token_from_environment(server_url: str) -> str | None:
82
+ """
83
+ Retrieve an authentication token from the environment.
84
+
85
+ This function checks if the provided server URL's hostname matches any
86
+ pattern in the server whitelist and if a valid token is available in
87
+ the environment variable. If both conditions are satisfied, the token is
88
+ returned; otherwise, None is returned.
89
+
90
+ Parameters
91
+ ----------
92
+ server_url (str): The URL of the server for which an authentication
93
+ token is being retrieved.
94
+
95
+ Returns
96
+ -------
97
+ str | None: The authentication token if available and hostname matches
98
+ the whitelist; otherwise, None.
99
+ """
100
+ hostname = urlparse(server_url.lower()).hostname
101
+ hostname_in_whitelist = any(hostname and fnmatchcase(hostname, pattern) for pattern in _SERVER_WHITELIST)
102
+ notebook_token = os.getenv(_CADC_TOKEN_ENVIRONMENT_KEY)
103
+ if hostname_in_whitelist and notebook_token:
104
+ return notebook_token
105
+
106
+ return None
@@ -34,7 +34,7 @@ from astropy.table import Table
34
34
 
35
35
  from .._butler import Butler
36
36
  from .._collection_type import CollectionType
37
- from ..registry import MissingCollectionError
37
+ from ..registry import MissingCollectionError, OrphanedRecordError
38
38
 
39
39
 
40
40
  @dataclass
@@ -49,6 +49,9 @@ class RemoveCollectionResult:
49
49
  onConfirmation: Callable[[], None]
50
50
  # astropy table describing data that will be removed.
51
51
  removeCollectionsTable: Table
52
+ # astropy table describing collection chain references that will be
53
+ # removed.
54
+ removeChainsTable: Table
52
55
  # astropy table describing any run collections that will NOT be removed.
53
56
  runsTable: Table
54
57
 
@@ -61,12 +64,13 @@ class CollectionInfo:
61
64
 
62
65
  nonRunCollections: Table
63
66
  runCollections: Table
67
+ parentCollections: dict[str, tuple[str, ...]]
68
+ """Mapping from child collection name to the list of chained collections
69
+ names that contain it.
70
+ """
64
71
 
65
72
 
66
- def _getCollectionInfo(
67
- repo: str,
68
- collection: str,
69
- ) -> CollectionInfo:
73
+ def _getCollectionInfo(repo: str, collection: str, include_parents: bool) -> CollectionInfo:
70
74
  """Get the names and types of collections that match the collection
71
75
  string.
72
76
 
@@ -77,6 +81,9 @@ def _getCollectionInfo(
77
81
  collection : `str`
78
82
  The collection string to search for. Same as the `expression`
79
83
  argument to `registry.queryCollections`.
84
+ include_parents : `bool`
85
+ If `True`, will fetch the list of parent chained collections containing
86
+ the given collections.
80
87
 
81
88
  Returns
82
89
  -------
@@ -85,25 +92,27 @@ def _getCollectionInfo(
85
92
  """
86
93
  butler = Butler.from_config(repo, without_datastore=True)
87
94
  try:
88
- collections_info = sorted(butler.collections.query_info(collection, include_chains=True))
95
+ collections_info = sorted(
96
+ butler.collections.query_info(collection, include_chains=True, include_parents=include_parents)
97
+ )
89
98
  except MissingCollectionError:
90
99
  # Hide the error and act like no collections should be removed.
91
100
  collections_info = []
92
101
  collections = Table(names=("Collection", "Collection Type"), dtype=(str, str))
93
102
  runCollections = Table(names=("Collection",), dtype=(str,))
103
+ parents: dict[str, tuple[str, ...]] = {}
94
104
  for collection_info in collections_info:
95
105
  if collection_info.type == CollectionType.RUN:
96
106
  runCollections.add_row((collection_info.name,))
97
107
  else:
98
108
  collections.add_row((collection_info.name, collection_info.type.name))
109
+ if include_parents and collection_info.parents is not None and len(collection_info.parents) > 0:
110
+ parents[collection_info.name] = tuple(collection_info.parents)
99
111
 
100
- return CollectionInfo(collections, runCollections)
112
+ return CollectionInfo(collections, runCollections, parents)
101
113
 
102
114
 
103
- def removeCollections(
104
- repo: str,
105
- collection: str,
106
- ) -> Table:
115
+ def removeCollections(repo: str, collection: str, remove_from_parents: bool) -> RemoveCollectionResult:
107
116
  """Remove collections.
108
117
 
109
118
  Parameters
@@ -112,6 +121,10 @@ def removeCollections(
112
121
  Same as the ``config`` argument to ``Butler.__init__``.
113
122
  collection : `str`
114
123
  Same as the ``name`` argument to ``Registry.removeCollection``.
124
+ remove_from_parents : `bool`
125
+ If `True`, will remove the given collections from any chained
126
+ collections they belong to before removing the collection
127
+ itself.
115
128
 
116
129
  Returns
117
130
  -------
@@ -119,17 +132,37 @@ def removeCollections(
119
132
  Contains tables describing what will be removed, and
120
133
  run collections that *will not* be removed.
121
134
  """
122
- collectionInfo = _getCollectionInfo(repo, collection)
135
+ collectionInfo = _getCollectionInfo(repo, collection, remove_from_parents)
123
136
 
124
137
  def _doRemove(collections: Table) -> None:
125
138
  """Perform the prune collection step."""
126
139
  butler = Butler.from_config(repo, writeable=True, without_datastore=True)
140
+
127
141
  for name in collections["Collection"]:
128
- butler.collections.x_remove(name)
142
+ with butler.transaction():
143
+ for parent in collectionInfo.parentCollections.get(name, []):
144
+ butler.collections.remove_from_chain(parent, name)
145
+ try:
146
+ butler.collections.x_remove(name)
147
+ except OrphanedRecordError as e:
148
+ e.add_note(
149
+ "Add the --remove-from-parents flag to this command"
150
+ " if you are sure this collection is no longer needed."
151
+ )
152
+ raise
153
+
154
+ remove_chains_table = Table(names=("Child Collection", "Parent Collection"), dtype=(str, str))
155
+ for child in sorted(collectionInfo.parentCollections.keys()):
156
+ parents = collectionInfo.parentCollections[child]
157
+ key = child
158
+ for parent in sorted(parents):
159
+ remove_chains_table.add_row((key, parent))
160
+ key = ""
129
161
 
130
162
  result = RemoveCollectionResult(
131
163
  onConfirmation=partial(_doRemove, collectionInfo.nonRunCollections),
132
164
  removeCollectionsTable=collectionInfo.nonRunCollections,
133
165
  runsTable=collectionInfo.runCollections,
166
+ removeChainsTable=remove_chains_table,
134
167
  )
135
168
  return result
@@ -517,7 +517,13 @@ class ButlerQueryTests(ABC, TestCaseMixin):
517
517
  )
518
518
  self.assertEqual(
519
519
  {row_tuple.raw_row["flat.timespan"] for row_tuple in row_tuples},
520
- {Timespan(t1, t2), Timespan(t2, t3), Timespan(t3, None), Timespan.makeEmpty(), None},
520
+ {
521
+ Timespan(t1, t2),
522
+ Timespan(t2, t3),
523
+ Timespan(t3, None),
524
+ Timespan.makeEmpty(),
525
+ Timespan(None, None),
526
+ },
521
527
  )
522
528
 
523
529
  dimensions = butler.dimensions["detector"].minimal_group
@@ -2101,6 +2107,39 @@ class ButlerQueryTests(ABC, TestCaseMixin):
2101
2107
  (DataCoordinate.standardize(base_data_id, detector=3, exposure=3), bias3b),
2102
2108
  ],
2103
2109
  )
2110
+ # Query with an explicit timespan, but no calibration collections.
2111
+ # This should succeed because the timespan for the dataset_tags tables
2112
+ # are logically unbounded, not Null.
2113
+ with butler.query() as query:
2114
+ timespan_column = query.expression_factory["bias"].timespan
2115
+ result = (
2116
+ query.datasets("bias", collections=["imported_g"])
2117
+ .where(instrument="Cam1", detector=2)
2118
+ .where(
2119
+ timespan_column.overlaps(
2120
+ Timespan(begin=t1, end=t2),
2121
+ )
2122
+ )
2123
+ )
2124
+ refs = list(result)
2125
+ self.assertEqual([ref.id for ref in refs], [bias2a.id])
2126
+
2127
+ # Query with an explicit timespan and a RUN collection ahead of
2128
+ # a CALIBRATION collection that would also match; the RUN collection
2129
+ # should win.
2130
+ with butler.query() as query:
2131
+ timespan_column = query.expression_factory["bias"].timespan
2132
+ result = (
2133
+ query.datasets("bias", collections=["imported_g", collection])
2134
+ .where(instrument="Cam1", detector=2)
2135
+ .where(
2136
+ timespan_column.overlaps(
2137
+ Timespan(begin=t1, end=t2),
2138
+ )
2139
+ )
2140
+ )
2141
+ refs = list(result)
2142
+ self.assertEqual([ref.id for ref in refs], [bias2a.id])
2104
2143
 
2105
2144
  # Query in multiple collections, with one of the collections being a
2106
2145
  # calibration collection. This triggers special cases related to
@@ -0,0 +1,53 @@
1
+ # This file is part of daf_butler.
2
+ #
3
+ # Developed for the LSST Data Management System.
4
+ # This product includes software developed by the LSST Project
5
+ # (http://www.lsst.org).
6
+ # See the COPYRIGHT file at the top-level directory of this distribution
7
+ # for details of code ownership.
8
+ #
9
+ # This software is dual licensed under the GNU General Public License and also
10
+ # under a 3-clause BSD license. Recipients may choose which of these licenses
11
+ # to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12
+ # respectively. If you choose the GPL option then the following text applies
13
+ # (but note that there is still no warranty even if you opt for BSD instead):
14
+ #
15
+ # This program is free software: you can redistribute it and/or modify
16
+ # it under the terms of the GNU General Public License as published by
17
+ # the Free Software Foundation, either version 3 of the License, or
18
+ # (at your option) any later version.
19
+ #
20
+ # This program is distributed in the hope that it will be useful,
21
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
22
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23
+ # GNU General Public License for more details.
24
+ #
25
+ # You should have received a copy of the GNU General Public License
26
+ # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
+
28
+ from __future__ import annotations
29
+
30
+ __all__ = (
31
+ "butler_server_import_error",
32
+ "butler_server_is_available",
33
+ )
34
+
35
+ butler_server_is_available = True
36
+ """`True` if all dependencies required to use Butler server and RemoteButler
37
+ are installed.
38
+ """
39
+
40
+ butler_server_import_error = ""
41
+ """String containing a human-readable error message explaining why the server
42
+ is not available, if ``butler_server_is_available`` is `False`.
43
+ """
44
+
45
+ try:
46
+ # Dependencies required by Butler server and RemoteButler, but not
47
+ # available in LSST Pipelines Stack.
48
+ import fastapi # noqa: F401
49
+ import httpx # noqa: F401
50
+ import safir # noqa: F401
51
+ except ImportError as e:
52
+ butler_server_is_available = False
53
+ butler_server_import_error = f"Server libraries could not be loaded: {str(e)}"
@@ -615,6 +615,10 @@ class _CompoundTimespanDatabaseRepresentation(TimespanDatabaseRepresentation):
615
615
  # Docstring inherited.
616
616
  if isinstance(other, sqlalchemy.sql.ColumnElement):
617
617
  return self.contains(other)
618
+
619
+ if self._is_null_literal() or other._is_null_literal():
620
+ return sqlalchemy.sql.null()
621
+
618
622
  return sqlalchemy.sql.and_(self._nsec[1] > other._nsec[0], other._nsec[1] > self._nsec[0])
619
623
 
620
624
  def contains(
@@ -653,5 +657,9 @@ class _CompoundTimespanDatabaseRepresentation(TimespanDatabaseRepresentation):
653
657
  nsec=(func(self._nsec[0]), func(self._nsec[1])), name=self._name
654
658
  )
655
659
 
660
+ def _is_null_literal(self) -> bool:
661
+ null = sqlalchemy.sql.null()
662
+ return self._nsec[0] is null and self._nsec[1] is null
663
+
656
664
 
657
665
  TimespanDatabaseRepresentation.Compound = _CompoundTimespanDatabaseRepresentation
@@ -354,20 +354,9 @@ class RepoExportContext:
354
354
  collectionTypes = {CollectionType.TAGGED}
355
355
  if datasetType.isCalibration():
356
356
  collectionTypes.add(CollectionType.CALIBRATION)
357
- resolved_collections = self._butler.collections.query(
358
- self._collections.keys(),
359
- collection_types=collectionTypes,
360
- flatten_chains=False,
361
- )
362
- with self._butler.query() as query:
363
- query = query.join_dataset_search(datasetType, resolved_collections)
364
- result = query.general(
365
- datasetType.dimensions,
366
- dataset_fields={datasetType.name: {"dataset_id", "run", "collection", "timespan"}},
367
- find_first=False,
368
- )
369
- for association in DatasetAssociation.from_query_result(result, datasetType):
370
- if association.ref.id in self._dataset_ids:
371
- results[association.collection].append(association)
372
-
357
+ for association in self._butler.registry.queryDatasetAssociations(
358
+ datasetType, self._collections.keys(), collectionTypes=collectionTypes, flattenChains=False
359
+ ):
360
+ if association.ref.id in self._dataset_ids:
361
+ results[association.collection].append(association)
373
362
  return results
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.4100"
2
+ __version__ = "29.2025.4200"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-daf-butler
3
- Version: 29.2025.4100
3
+ Version: 29.2025.4200
4
4
  Summary: An abstraction layer for reading and writing astronomical data to datastores.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License-Expression: BSD-3-Clause OR GPL-3.0-or-later
@@ -13,7 +13,7 @@ lsst/daf/butler/_column_tags.py,sha256=gwYOCYEVybICztwCxkZG3ely9ctK5UCBnn38am_ju
13
13
  lsst/daf/butler/_column_type_info.py,sha256=LpF3YmAwIBGqak51M7c-sDaGOWJInk9W3SKni4XGRm8,7163
14
14
  lsst/daf/butler/_config.py,sha256=urs7_ngiDXLCEs3ghOvwn7ob612c2O03KxafkRUERQ0,52100
15
15
  lsst/daf/butler/_config_support.py,sha256=scAJ8SWJnn0czerfSWX5bXMr0lwIyvKHDk4Pr81spQ0,14135
16
- lsst/daf/butler/_dataset_association.py,sha256=M1QNdpIc_Nm1cQqdSscYs4txnEwZFSiKqs8_xVE3NVo,3519
16
+ lsst/daf/butler/_dataset_association.py,sha256=8GBIsTMz-XpL1LNiJs8xYIdQ53tmB6yz_m1TmADHxyw,4390
17
17
  lsst/daf/butler/_dataset_existence.py,sha256=iGWyQSav0KqyZWCNfug2vWUYYS3My2Br4AAKXtZ-_LU,3570
18
18
  lsst/daf/butler/_dataset_provenance.py,sha256=xqNOwVGDMtdPb6xE1cD4lNs9y5-ihLFx5bD8YWqmDs8,19201
19
19
  lsst/daf/butler/_dataset_ref.py,sha256=qN-eTAuPKWelt_CESWPucO4d5FC3XOal0EDAR0ZJLkw,40929
@@ -31,7 +31,7 @@ lsst/daf/butler/_named.py,sha256=-AvkTP5JIsCwj5tAmZO8epoRoF1_dGuz2aC0ItLg7-M,195
31
31
  lsst/daf/butler/_quantum.py,sha256=xqWk2mHLydf4VWpRTCR74eBjApESuaTb46_iOdLKFDo,25575
32
32
  lsst/daf/butler/_quantum_backed.py,sha256=PgrOmnUGvnRUSmwSU-dOzdgTahtmJZHh6gnW5oUnIZA,35583
33
33
  lsst/daf/butler/_query_all_datasets.py,sha256=Ev5LHI3Mq9a3XEB9bccX4n70EDQ6OVHhKqeHoThIkz8,7135
34
- lsst/daf/butler/_registry_shim.py,sha256=H4rhzuLp788LH3izFP7qRd2-D-FwHonH01kQmJNuKak,11730
34
+ lsst/daf/butler/_registry_shim.py,sha256=8SSAXLBL2IAEJe8S9ktjaFZRs-8JaOs57n-rBk94uyQ,14839
35
35
  lsst/daf/butler/_standalone_datastore.py,sha256=ImgzPFAeG716zIzhnS-pPFMT0Hil7CxfIx9rELlfmRA,5149
36
36
  lsst/daf/butler/_storage_class.py,sha256=p74C-_QjYbcVFm6SdS0RYa_Gjmz9sJJ3JAA98llisdc,32997
37
37
  lsst/daf/butler/_storage_class_delegate.py,sha256=0fpDwQILeP0TcXP1W147Va4rX3qZhAyFpFkq6vrf6fo,16324
@@ -52,9 +52,9 @@ lsst/daf/butler/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
52
  lsst/daf/butler/pydantic_utils.py,sha256=2B0CfPxL8BHFn0ZXhVhqMGRob6eTSIDW-tQa3fe1jaA,13755
53
53
  lsst/daf/butler/repo_relocation.py,sha256=Ivhx2xU4slc53Z6RExhNnquMr2Hx-S8h62emmlXxyy8,3658
54
54
  lsst/daf/butler/time_utils.py,sha256=MVTfOFI2xt3IeA46pa-fWY2kJRwSzaQyq1uzeUABcTM,11805
55
- lsst/daf/butler/timespan_database_representation.py,sha256=MWDusjIQIL2RH1CDpWSW5sYvdHCJKzAfpg1rm1DfgEU,24302
55
+ lsst/daf/butler/timespan_database_representation.py,sha256=rYeQ_vp6gneRjboqV-gvNW0DWhm1QJM-KnVzFTDVZ0I,24550
56
56
  lsst/daf/butler/utils.py,sha256=5u50COK5z4u31grOhmQF7mFz55biNLOvSMRdQjEdsjo,5140
57
- lsst/daf/butler/version.py,sha256=imQpc8l8n1Xj3guWKoFaXrBR3HnAY8jkMITBuZx-5DA,55
57
+ lsst/daf/butler/version.py,sha256=loN_SewEappJtRr1bfMkUAloXYiZtM4w0O03FF5yeZQ,55
58
58
  lsst/daf/butler/_rubin/__init__.py,sha256=9z5kmc6LJ3C_iPFV46cvdlQ2qOGJbZh-2Ft5Z-rbE28,1569
59
59
  lsst/daf/butler/_rubin/file_datasets.py,sha256=P5_BIhxpVj9qfLuLiI2_dClMHsjO5Qm5oDXVr3WntNU,3607
60
60
  lsst/daf/butler/_utilities/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
@@ -67,7 +67,7 @@ lsst/daf/butler/cli/cliLog.py,sha256=9W8TVd6Lx0WkKoAU599jZlpDu8ACoikPwdjEndX1_g4
67
67
  lsst/daf/butler/cli/progress.py,sha256=doJ0g7HfB1goUFk8H3_tliSchzmieufME0nIpHp4QSA,3600
68
68
  lsst/daf/butler/cli/utils.py,sha256=-aKqerxPPpPLf8bggV-9ISQNHbMX3-ZNx3nnIJB5GVA,47578
69
69
  lsst/daf/butler/cli/cmd/__init__.py,sha256=LPxSRh0zUcHFuoQSpA5RpUBAhMkQ56Zm64gHbXhI2QI,2358
70
- lsst/daf/butler/cli/cmd/_remove_collections.py,sha256=pHFXaiw8CJ8hMSsVvnvvpVx7OnFEeOiFNKN7v2sG-EE,3925
70
+ lsst/daf/butler/cli/cmd/_remove_collections.py,sha256=El6Ug1HbcYNaDOvsO6WSH_RutIojHSwMOaX6XO42KDY,4639
71
71
  lsst/daf/butler/cli/cmd/_remove_runs.py,sha256=j40vvI8eAadRXtuBKuuLHPERnml_CXPlvmCTx6Y705Q,5756
72
72
  lsst/daf/butler/cli/cmd/commands.py,sha256=Co10QOnlMdSrPepaIABLCYaedx7SnsZ91zJTO9iIIUA,31872
73
73
  lsst/daf/butler/cli/opt/__init__.py,sha256=VUts6eyMPoxcaKSXv5wD4XISG3Igj625JK3AmSdTBtI,1376
@@ -129,7 +129,7 @@ lsst/daf/butler/dimensions/_universe.py,sha256=HYWtfdxu09aPaZV9AJb6qLvzF0ObVq903
129
129
  lsst/daf/butler/dimensions/construction.py,sha256=KKRBO_Nk6QvSMPlWGId0XHJw-EAzogwrWbwk5Yk7F3Q,6905
130
130
  lsst/daf/butler/dimensions/record_cache.py,sha256=9BIt4Sag4ERlz95RRxDseyIw17NjopoCPrgRpxi5MCk,3944
131
131
  lsst/daf/butler/direct_butler/__init__.py,sha256=8uPQWbFoKDpP2T2fvjV794JUITtdcDH9Zu6ZXq9cRjE,1329
132
- lsst/daf/butler/direct_butler/_direct_butler.py,sha256=o_Q5us_cPB75Ip4-lFPcmzeGMFdBq458wNmEyDdLCJs,116088
132
+ lsst/daf/butler/direct_butler/_direct_butler.py,sha256=lzT-UcrlxnkqssZFjHdM-OuTMn7rbEBJ8I50OexPJlk,116087
133
133
  lsst/daf/butler/direct_butler/_direct_butler_collections.py,sha256=6XvdvYqNSPAwva8vgR1bEqF8vYTfaNXUW3sb03NRwOM,8143
134
134
  lsst/daf/butler/direct_query_driver/__init__.py,sha256=hurmYGcTsnlqR4Hkh2xO_M544eEpCGeIJte0GXY3RFQ,1443
135
135
  lsst/daf/butler/direct_query_driver/_driver.py,sha256=-6f77noT5M-foo0DanukpARrbKmsKbIkj3vyGhUkm1s,76269
@@ -155,7 +155,7 @@ lsst/daf/butler/queries/_base.py,sha256=lk_xLoN08nmxj0N5aKCm-qI5gWjq8lY3UaLuCWbd
155
155
  lsst/daf/butler/queries/_data_coordinate_query_results.py,sha256=S_81y4EkSJw34ekuiHW7RSl0LKNWoV-ZM-C5FLIM-nk,3913
156
156
  lsst/daf/butler/queries/_dataset_query_results.py,sha256=SAJsI-KQsEziaNtfu3Zdp0iX4zF2BoDIF4mtb-xvtxQ,5113
157
157
  lsst/daf/butler/queries/_dimension_record_query_results.py,sha256=N89s23jBhSMEbr0uxlhMZykiw91O1bLTbdzwv-0Az14,4405
158
- lsst/daf/butler/queries/_expression_strings.py,sha256=zRgKveojeAeLD-hwwD8qULwgNF27g_XCVK5MdMNfY7w,18486
158
+ lsst/daf/butler/queries/_expression_strings.py,sha256=IDVN2Tcoh6LhfoDlEDvysQ1jF_SDDRxt0vOWwuSm3dQ,18487
159
159
  lsst/daf/butler/queries/_general_query_results.py,sha256=Q1-BQFS6V_Uihwdv1t77GOV1M8idZr63WpZMOOmDXT8,9780
160
160
  lsst/daf/butler/queries/_identifiers.py,sha256=f_GqZw87IsRsefb2Et0oNormV2t4VMsR9V79gNV7IlU,7913
161
161
  lsst/daf/butler/queries/_query.py,sha256=rYWzQyfLPOXvwtSNuug0eKAkD5guoW__aIocRwOPZWg,38830
@@ -183,14 +183,14 @@ lsst/daf/butler/registry/_config.py,sha256=ZFBf7TkRaCiysaaSQMswsPjcNlSyLz9T3fk8H
183
183
  lsst/daf/butler/registry/_defaults.py,sha256=PZqgwZbUw4egyWBRSuFbXUPo_U2Xm2X6uWWCDRTOUok,10848
184
184
  lsst/daf/butler/registry/_exceptions.py,sha256=R6CN8jRWyiy8uN3UMwSTRS0zTw77MyrE9R61v1M-dBE,3400
185
185
  lsst/daf/butler/registry/_registry.py,sha256=OoOT1B5VrxM9XJcjPA2kEhlsFgIW04jjoOQYHWdDI28,59617
186
- lsst/daf/butler/registry/_registry_base.py,sha256=ED0CyXcNNrhETlOXxALaha3IdqnQHSNvHxOUi8gkLMs,10102
186
+ lsst/daf/butler/registry/_registry_base.py,sha256=z3GY5wU6F_gw_Kc3RkxguZ3_Pc2MIb2biqL3RlLFp7M,10495
187
187
  lsst/daf/butler/registry/_registry_factory.py,sha256=meBBGzmXMCp-xCPar1oAzFxLB8FjIKW_wE9Bsic_z4o,5418
188
188
  lsst/daf/butler/registry/attributes.py,sha256=YldNjsjVHFAy-pp--hAnRfN3yilnHKGaMfCCxd0Z4o0,5921
189
189
  lsst/daf/butler/registry/connectionString.py,sha256=MgZ_jdUAL9_8l7UFeYF0Fhvo6TBdVS1i67I939Kgd3U,5394
190
190
  lsst/daf/butler/registry/managers.py,sha256=gal-O39ViqsvrOJNWtFjdtWT2BCUaLYBvzf67blkPp8,21175
191
191
  lsst/daf/butler/registry/nameShrinker.py,sha256=7gnlYnRdeYc3XXSfrMnSyQrIZerDYVZeU8x4eppWDXI,1464
192
192
  lsst/daf/butler/registry/opaque.py,sha256=xbdOogqabRH3YaevIZ7_hbbTb54rCOGyC7Uq7SZNH-Q,9882
193
- lsst/daf/butler/registry/sql_registry.py,sha256=aNPHX-z0q3hkQEG4JDIvWSBozm3iU6Qmp1ClDi6YR2Y,78816
193
+ lsst/daf/butler/registry/sql_registry.py,sha256=ORo1-rdAoICSCkhEpz_Sz1_9OnWxMmAJCRfyuGk0-0o,70629
194
194
  lsst/daf/butler/registry/versions.py,sha256=egvrctt_1wBzZgh8iSfySaQJQ9bkx_9bUJWkvvDZONQ,9331
195
195
  lsst/daf/butler/registry/wildcards.py,sha256=akMGgqDkVM0mQ9RAFENv0IrnoUyMP3mhODYXDaWIQ8o,20277
196
196
  lsst/daf/butler/registry/bridge/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
@@ -206,7 +206,7 @@ lsst/daf/butler/registry/databases/sqlite.py,sha256=xW82mdbOoOOTBeuSYDQ9DiDPhnMG
206
206
  lsst/daf/butler/registry/datasets/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
207
207
  lsst/daf/butler/registry/datasets/byDimensions/__init__.py,sha256=BG4C7mhKFbCzvfQSI31CIV_iTMc1gYL_LT4Plyu6LdE,1323
208
208
  lsst/daf/butler/registry/datasets/byDimensions/_dataset_type_cache.py,sha256=WdbU7ZTAqvpjyouIaKsEEi62P_0Y-zQZjdvzDahKLuw,9653
209
- lsst/daf/butler/registry/datasets/byDimensions/_manager.py,sha256=oGvCNB27Ahneu-83eDb2Djy3NFcAPhUNUkbollQOPNc,80781
209
+ lsst/daf/butler/registry/datasets/byDimensions/_manager.py,sha256=WUKB2fGnM_zxjxAqRw2HCITmPvCULAd33lA70uIDdKQ,80797
210
210
  lsst/daf/butler/registry/datasets/byDimensions/summaries.py,sha256=MuRk2p6fAKhvjId3jWnuFzhqMnKNF31ugExD7a2g48k,18534
211
211
  lsst/daf/butler/registry/datasets/byDimensions/tables.py,sha256=40Z-kWTHAZNdxO-PxByidOtpEpTvnAtjsZLgChSI4SU,25537
212
212
  lsst/daf/butler/registry/dimensions/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
@@ -259,25 +259,25 @@ lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py,sha256=Y9SoJv-_0X
259
259
  lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py,sha256=aGhbTj66OEYILEApQtwyfDunWoAPJ3QWZu6mD8uEB3s,137077
260
260
  lsst/daf/butler/registry/tests/__init__.py,sha256=uWJM-8Yv52Ox6e9rKGTaTVRdJc3fIfAyI5WnZQp81EA,1349
261
261
  lsst/daf/butler/registry/tests/_database.py,sha256=0lbzjDUj3n2UPvAktiKcc3G1TE0TK867cASezUISN6I,65218
262
- lsst/daf/butler/registry/tests/_registry.py,sha256=uHdaEz8Wc9mhovN8T7MoHomrzTHiTwt7JjqTNwTjhfg,187719
262
+ lsst/daf/butler/registry/tests/_registry.py,sha256=7s3x64ab6vIv_USJhtxmuaRSxfFDNqzJl1weOoMPneI,191668
263
263
  lsst/daf/butler/remote_butler/__init__.py,sha256=Q5TZrGeqEGspRyQYi4X2HX1PtkD7JyHe_metcjaSuAo,1377
264
264
  lsst/daf/butler/remote_butler/_collection_args.py,sha256=_ToE6jplUCzmmqwWvkl1cwrXRJkNU8Qrl2hqwIqHibw,4969
265
265
  lsst/daf/butler/remote_butler/_config.py,sha256=GIMYT-XW2AeGU0Ej_cHNFxeFzIGN0HqKxCwm_IkBoXM,1825
266
266
  lsst/daf/butler/remote_butler/_defaults.py,sha256=lja5yZVZKzzeEQeEkAQwtw0VDTpy5kQfAk5LmGhKlC4,2213
267
267
  lsst/daf/butler/remote_butler/_errors.py,sha256=zwXOhYBRkCXX7WMUdYDJTbnX-8cDNI4zBZxVoSY1YBs,1994
268
- lsst/daf/butler/remote_butler/_factory.py,sha256=IkSYL666XTKsA4IyHO-buOmn6srtFN7WoMyNZm57fmA,6786
268
+ lsst/daf/butler/remote_butler/_factory.py,sha256=C5-PwIXBEhRl5pp9YI2Y9aO6ovz96cU_2p1STY3WXhE,6837
269
269
  lsst/daf/butler/remote_butler/_get.py,sha256=1yOJew2bbT8NEtWKvI7iJB4b1BPnijN2LwXckOLGunY,3553
270
270
  lsst/daf/butler/remote_butler/_http_connection.py,sha256=k1iCLa1jkfXAVg1FAo2rDwODMPn7HZEpbsdj5JnHC4M,12713
271
271
  lsst/daf/butler/remote_butler/_query_driver.py,sha256=ceVyFMcsPrdj_lZVqxCYbHEsv-zw5OjxEETS_s3Mexc,11030
272
272
  lsst/daf/butler/remote_butler/_query_results.py,sha256=mPiKb0OYDVL5GbiL-5iVb8JPhqGBNCzMyVrP8O-u1r4,3385
273
273
  lsst/daf/butler/remote_butler/_ref_utils.py,sha256=rVesx319JsTPIYCg59ZMT-thHlVrnPAqY0y8CJNo11Y,4958
274
274
  lsst/daf/butler/remote_butler/_registry.py,sha256=CXCthpDDxxGO1fHQTjCJOMNwUWvpJweWYe3jdlPbQbM,12914
275
- lsst/daf/butler/remote_butler/_remote_butler.py,sha256=dr_Rl-7KNyaBAA_Hx04vLQqe9nLn4ERnCRHTiULOzr0,30336
275
+ lsst/daf/butler/remote_butler/_remote_butler.py,sha256=yg-TuWH1P7jRcJBqTbRzVjy6VZbJDVUHxbxxeoQEkn0,30465
276
276
  lsst/daf/butler/remote_butler/_remote_butler_collections.py,sha256=ZycaPncojNjzJEyHdPIlt8H7r-IxfJ_ipKWPhLx4Xfo,5561
277
277
  lsst/daf/butler/remote_butler/_remote_file_transfer_source.py,sha256=EdHJRFe0LHv7R1eT44ytlROTHXyWSQfT1shrUBVVVMg,4887
278
278
  lsst/daf/butler/remote_butler/server_models.py,sha256=CTiyXtSEw8AbmZiPMNs6qCHEkmiv9BRmRbVTWgeUfaA,14463
279
279
  lsst/daf/butler/remote_butler/authentication/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
280
- lsst/daf/butler/remote_butler/authentication/cadc.py,sha256=22U3W_U3CXW2TfLEDxWoTt-CfAfTibVIugnbx8Mq8AY,2278
280
+ lsst/daf/butler/remote_butler/authentication/cadc.py,sha256=5v6aX1I91rTnA6wmdth5fD0W0mOD_1mfV4_iQ1tVKXk,4234
281
281
  lsst/daf/butler/remote_butler/authentication/interface.py,sha256=inGPIq7pDLH8HRvlCIHiFGOwTSyNIethrTMDhPUBxYo,2000
282
282
  lsst/daf/butler/remote_butler/authentication/rubin.py,sha256=3Aw5M3CVG7uVfmaYgF4vM4aNiqoHcVGdS0_VTgroBx8,4560
283
283
  lsst/daf/butler/remote_butler/registry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -314,7 +314,7 @@ lsst/daf/butler/script/queryDatasetTypes.py,sha256=aHXCSgb2UxRh82jAKaVJYUuwC4vrf
314
314
  lsst/daf/butler/script/queryDatasets.py,sha256=nMgo_az4lfA7oqu3MKelFtbG8RF9k-9tmz6z-c57jrQ,14153
315
315
  lsst/daf/butler/script/queryDimensionRecords.py,sha256=9xjxEvrjycZE63m6v3ZzF0w2J329GIPip2GiqTGhwUs,4899
316
316
  lsst/daf/butler/script/register_dataset_type.py,sha256=FYxGYdvceF7_0Tg1ftR2Vr29OJ98snd7AFnqUr7fNk8,3123
317
- lsst/daf/butler/script/removeCollections.py,sha256=zCNnoiBBxG0_nRR55uQhaKxglRhTM7JL3uJ9lSMdopc,4789
317
+ lsst/daf/butler/script/removeCollections.py,sha256=_RtmblbSUa_lTsDInHeIEpseBkPhnBGphoWrLv8ssMQ,6686
318
318
  lsst/daf/butler/script/removeDatasetType.py,sha256=aG4W8Nvne5TFPFtEUxpQ4KJ1C_pL_0BOxXDexhlr_BY,1910
319
319
  lsst/daf/butler/script/removeRuns.py,sha256=5rZeJ7O-ZAYNrh_a7FkDm84pn6Bs0VArjp0MiLSFFyg,5202
320
320
  lsst/daf/butler/script/retrieveArtifacts.py,sha256=7cT6yHEMQfuziGcqoOXFBH6cOJ7_3x4kLJldsFxIm3w,4783
@@ -324,7 +324,7 @@ lsst/daf/butler/tests/_datasetsHelper.py,sha256=LH1ZPuzSpRijAPtAiwBoSZZPzI-aSaHN
324
324
  lsst/daf/butler/tests/_dummyRegistry.py,sha256=CowulEeirnWwSJf-AnutvOLvGSr3ITgShlYj9f_IuYk,9509
325
325
  lsst/daf/butler/tests/_examplePythonTypes.py,sha256=oKuKhFkyipD7XCojlQR7D9xPyq6YS9nkB-U4_l07BIM,13281
326
326
  lsst/daf/butler/tests/_testRepo.py,sha256=MVkQwXJ5vI86--Npa7SDHRblpJC5pig6vpGxv-dgGvE,23755
327
- lsst/daf/butler/tests/butler_queries.py,sha256=4fTO5mHM3DwN_K1nYdcTNifUZM-DOs3nILBsQ-Z3hvg,121612
327
+ lsst/daf/butler/tests/butler_queries.py,sha256=cw7WIDIhcxhvtKTXPW6HdY7X9TqCJbXTA9rfhXkpLeM,123190
328
328
  lsst/daf/butler/tests/cliCmdTestBase.py,sha256=6SsDIIttbvwi9TX7jkaPa8VuTvzO17hL6WGAm_T6uxM,6994
329
329
  lsst/daf/butler/tests/cliLogTestBase.py,sha256=sgpOMQV21ItF4ElE7Z1rMAi0rzAcBEOfe2Yu62bujBU,18325
330
330
  lsst/daf/butler/tests/deferredFormatter.py,sha256=EtUnxNy90n5ouESBbcFBLOt4mrHhmxvHS2vP6Y3dF0w,1920
@@ -334,6 +334,7 @@ lsst/daf/butler/tests/hybrid_butler_collections.py,sha256=XzuZzqKLKkVVY6Y96Rdrwk
334
334
  lsst/daf/butler/tests/hybrid_butler_registry.py,sha256=-xR5BuLIl3HA_bLVDMBZFFF0tPRHthttODbcfGAivMo,13393
335
335
  lsst/daf/butler/tests/postgresql.py,sha256=6-84OPoysRtsborTdsyu5HcVfVWZqHaU8azFPSxu0Wc,4331
336
336
  lsst/daf/butler/tests/server.py,sha256=crFS4weG5TEkf4N5m7ATxr7gRfTNeNzL3lXkyKyqdi8,9551
337
+ lsst/daf/butler/tests/server_available.py,sha256=NC_G-vgrp8abDkXhoBmOBD-V_JutS12_gYBwqf0qgQY,2087
337
338
  lsst/daf/butler/tests/server_utils.py,sha256=sneYxThLhvc7KaFdHJXV15j-R3JtC6yOjDfVptJw6x4,2833
338
339
  lsst/daf/butler/tests/testFormatters.py,sha256=2q7t1t6G6aFjyiTPTEsXeHHdIMVH_tRWxHFpK3x_s1w,9575
339
340
  lsst/daf/butler/tests/utils.py,sha256=KRQT6wFYWrLdpI0UEiGq9T9LBZ5bEvo8Y12ocMxTTfE,15622
@@ -346,16 +347,16 @@ lsst/daf/butler/tests/registry_data/hsc-rc2-subset-v0.yaml,sha256=Y_Ihkoa2uZ2QSW
346
347
  lsst/daf/butler/tests/registry_data/spatial.py,sha256=KVh7b-iuJGdXloVLIGsR15P2jK87rcRJw7TyLVzH2j0,22787
347
348
  lsst/daf/butler/tests/registry_data/spatial.yaml,sha256=33X7e4XZ84jmSSYBvCKw6CxTPplL-83Bnk0vZi9PPZk,11262
348
349
  lsst/daf/butler/transfers/__init__.py,sha256=M1YcFszSkNB5hB2pZwwGXqbJE2dKt4YXDinW4s1iHI8,1371
349
- lsst/daf/butler/transfers/_context.py,sha256=1oOlKWj53bjcVioOIzRNPZSC_Q0dLDI_R4syI9gMOkg,16938
350
+ lsst/daf/butler/transfers/_context.py,sha256=Ro_nf9NDw9IAr-Pw_NtcdotQKx34RbBbNubt20zwRXU,16449
350
351
  lsst/daf/butler/transfers/_interfaces.py,sha256=Ia1NqcFR5E-Ik4zsXEe2fuMtNCJj5Yfe_gVHLTBtJDw,7490
351
352
  lsst/daf/butler/transfers/_yaml.py,sha256=w_0GmrueuHVLfOfAXGHFBbWAl18tX6eSElbTC-2jRoc,32632
352
- lsst_daf_butler-29.2025.4100.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
353
- lsst_daf_butler-29.2025.4100.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
354
- lsst_daf_butler-29.2025.4100.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
355
- lsst_daf_butler-29.2025.4100.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
356
- lsst_daf_butler-29.2025.4100.dist-info/METADATA,sha256=JpxDaRocREFqVfxiPesMBeyGnQV00ftHxiircXn8UYs,3846
357
- lsst_daf_butler-29.2025.4100.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
358
- lsst_daf_butler-29.2025.4100.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
359
- lsst_daf_butler-29.2025.4100.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
360
- lsst_daf_butler-29.2025.4100.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
361
- lsst_daf_butler-29.2025.4100.dist-info/RECORD,,
353
+ lsst_daf_butler-29.2025.4200.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
354
+ lsst_daf_butler-29.2025.4200.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
355
+ lsst_daf_butler-29.2025.4200.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
356
+ lsst_daf_butler-29.2025.4200.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
357
+ lsst_daf_butler-29.2025.4200.dist-info/METADATA,sha256=DkM04zD7-teIjvNEJkwxPBS70lH1n1BLRGBdP4B2PJE,3846
358
+ lsst_daf_butler-29.2025.4200.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
359
+ lsst_daf_butler-29.2025.4200.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
360
+ lsst_daf_butler-29.2025.4200.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
361
+ lsst_daf_butler-29.2025.4200.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
362
+ lsst_daf_butler-29.2025.4200.dist-info/RECORD,,