lsst-daf-butler 29.2025.2600__py3-none-any.whl → 29.2025.2700__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -809,8 +809,7 @@ class DimensionDataAttacher:
809
809
  been referenced in at least one of the constructor arguments.
810
810
  data_ids : `~collections.abc.Iterable` [ `DataCoordinate` ]
811
811
  Data IDs to attach dimension records to (not in place; data
812
- coordinates are immutable). Must have full values (i.e. implied
813
- as well as required dimensions).
812
+ coordinates are immutable).
814
813
  query : `.queries.Query`, optional
815
814
  A butler query that can be used to look up missing dimension
816
815
  records. Records fetched via query are cached in the `records`
@@ -822,49 +821,41 @@ class DimensionDataAttacher:
822
821
  Data IDs with dimension records attached, in the same order as the
823
822
  original iterable.
824
823
  """
825
- incomplete: list[_InProgressRecordDicts] = []
826
824
  lookup_helpers = [
827
825
  _DimensionRecordLookupHelper.build(dimensions, element_name, self)
828
- for element_name in dimensions.elements
826
+ for element_name in dimensions.lookup_order
829
827
  ]
830
- result: list[DataCoordinate] = []
831
- for n, data_id in enumerate(data_ids):
832
- records = _InProgressRecordDicts(n)
833
- for lookup_helper in lookup_helpers:
834
- lookup_helper.lookup(data_id.full_values, records)
835
- if records.missing:
836
- incomplete.append(records)
837
- else:
838
- data_id = data_id.expanded(records.done)
839
- result.append(data_id)
840
- if query is not None:
841
- for lookup_helper in lookup_helpers:
842
- lookup_helper.fetch_missing(query)
843
- for records in incomplete:
844
- for element, required_values in records.missing.items():
845
- records.done[element] = self.records[element].find_with_required_values(required_values)
846
- result[records.index] = result[records.index].expanded(records.done)
847
- else:
848
- # Other logic branches also raise LookupError internally (e.g. in
849
- # find_with_required_values); this one has a better message (i.e.
850
- # about all failures, not just the first one) just because we
851
- # happen to have any information at hand, while we don't in other
852
- # branches.
853
- for records in incomplete:
854
- raise LookupError(
855
- f"No dimension record for element(s) {list(records.missing.keys())} "
856
- f"for data ID {result[records.index]}. "
857
- f"{len(incomplete)} data ID{' was' if len(incomplete) == 1 else 's were'} "
858
- "missing at least one record."
859
- )
860
- return result
828
+ records = [_InProgressRecordDicts(data_id) for data_id in data_ids]
829
+ for lookup_helper in lookup_helpers:
830
+ for r in records:
831
+ lookup_helper.lookup(r)
832
+ incomplete = lookup_helper.incomplete_records
833
+ if incomplete:
834
+ if query is not None:
835
+ lookup_helper.fetch_missing(query)
836
+ # We may still be missing records at this point, if they
837
+ # were not available in the database.
838
+ # This is intentional, because in existing Butler
839
+ # repositories dimension records are not always fully
840
+ # populated. (For example, it is common for a visit to
841
+ # exist without corresponding visit_detector_region
842
+ # records, since these are populated at different times
843
+ # by different processes.)
844
+ else:
845
+ raise LookupError(
846
+ f"No dimension record for element '{lookup_helper.element}' "
847
+ f"for data ID {incomplete[0].data_id}. "
848
+ f"{len(incomplete)} data ID{' was' if len(incomplete) == 1 else 's were'} "
849
+ "missing at least one record."
850
+ )
851
+
852
+ return [r.data_id.expanded(r.done) for r in records]
861
853
 
862
854
 
863
855
  @dataclasses.dataclass
864
856
  class _InProgressRecordDicts:
865
- index: int # Index of the data ID these are for in the result list.
857
+ data_id: DataCoordinate
866
858
  done: dict[str, DimensionRecord] = dataclasses.field(default_factory=dict)
867
- missing: dict[str, tuple[DataIdValue, ...]] = dataclasses.field(default_factory=dict)
868
859
 
869
860
 
870
861
  @dataclasses.dataclass
@@ -873,7 +864,7 @@ class _DimensionRecordLookupHelper:
873
864
  # tuple in the to-be-expanded data ID's full-values tuple.
874
865
  indices: list[int]
875
866
  record_set: DimensionRecordSet
876
- missing: set[tuple[DataIdValue, ...]] = dataclasses.field(default_factory=set)
867
+ incomplete_records: list[_InProgressRecordDicts] = dataclasses.field(default_factory=list)
877
868
 
878
869
  @property
879
870
  def element(self) -> str:
@@ -902,33 +893,62 @@ class _DimensionRecordLookupHelper:
902
893
  else:
903
894
  return _DimensionRecordLookupHelper(indices, attacher.records[element])
904
895
 
905
- def lookup(self, full_data_id_values: tuple[DataIdValue, ...], records: _InProgressRecordDicts) -> None:
906
- required_values = tuple([full_data_id_values[i] for i in self.indices])
896
+ def lookup(self, records: _InProgressRecordDicts) -> None:
897
+ required_values = self._get_required_values(records)
907
898
  if (result := self.record_set._by_required_values.get(required_values)) is None:
908
899
  result = self.fallback(required_values)
909
900
  if result is not None:
910
901
  self.record_set.add(result)
911
902
  records.done[self.element] = result
912
903
  else:
913
- records.missing[self.element] = required_values
914
- self.missing.add(required_values)
904
+ self.incomplete_records.append(records)
915
905
  else:
916
906
  records.done[self.element] = result
917
907
 
908
+ def _get_required_values(self, records: _InProgressRecordDicts) -> tuple[DataIdValue, ...]:
909
+ if records.data_id.hasFull():
910
+ full_values = records.data_id.full_values
911
+ return tuple([full_values[i] for i in self.indices])
912
+ else:
913
+ values = []
914
+ dimensions = self.record_set.element.minimal_group.required
915
+ for dimension in dimensions:
916
+ value = records.data_id.get(dimension)
917
+ if value is None:
918
+ value = self._find_implied_value(dimension, records)
919
+ values.append(value)
920
+ return tuple(values)
921
+
922
+ def _find_implied_value(self, implied_dimension: str, records: _InProgressRecordDicts) -> DataIdValue:
923
+ for rec in records.done.values():
924
+ if implied_dimension in rec.definition.implied:
925
+ return rec.get(implied_dimension)
926
+
927
+ raise LookupError(
928
+ f"Implied value for dimension '{implied_dimension}' not found in records for"
929
+ f" {list(records.done.keys())}"
930
+ )
931
+
918
932
  def fallback(self, required_values: tuple[DataIdValue, ...]) -> DimensionRecord | None:
919
933
  return None
920
934
 
921
935
  def fetch_missing(self, query: Query) -> None:
922
- if self.missing:
936
+ if self.incomplete_records:
937
+ missing_values = set(self._get_required_values(r) for r in self.incomplete_records)
923
938
  self.record_set.update(
924
939
  query.join_data_coordinates(
925
940
  [
926
- DataCoordinate.from_required_values(self.record_set.element.minimal_group, row)
927
- for row in self.missing
941
+ DataCoordinate.from_required_values(self.record_set.element.minimal_group, values)
942
+ for values in missing_values
928
943
  ]
929
944
  ).dimension_records(self.record_set.element.name)
930
945
  )
931
946
 
947
+ missing = self.incomplete_records
948
+ self.incomplete_records = list()
949
+ for record in missing:
950
+ self.lookup(record)
951
+
932
952
 
933
953
  @dataclasses.dataclass
934
954
  class _DeserializingDimensionRecordLookupHelper(_DimensionRecordLookupHelper):
@@ -34,15 +34,7 @@ from collections.abc import Hashable
34
34
  from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias
35
35
 
36
36
  import pydantic
37
- from pydantic import (
38
- BaseModel,
39
- Field,
40
- StrictBool,
41
- StrictFloat,
42
- StrictInt,
43
- StrictStr,
44
- create_model,
45
- )
37
+ from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr, create_model
46
38
 
47
39
  import lsst.sphgeom
48
40
  from lsst.utils.classes import immutable
@@ -533,6 +525,29 @@ class DimensionRecord:
533
525
  results["datetime_end"] = timespan.end
534
526
  return results
535
527
 
528
+ def get(self, name: str) -> Any:
529
+ """Return a single metadata value associated with this record.
530
+
531
+ Parameters
532
+ ----------
533
+ name : `str`
534
+ Key of the metadata value to be retrieved.
535
+
536
+ Returns
537
+ -------
538
+ value
539
+ The metadata value.
540
+
541
+ Raises
542
+ ------
543
+ KeyError
544
+ If the given name is not a valid key in this dimension record.
545
+ """
546
+ if name not in self.__slots__:
547
+ raise KeyError(f"'{name}' is not a valid record key for dimension '{self.definition.name}'")
548
+
549
+ return getattr(self, name)
550
+
536
551
  def serialize_key_value(self) -> SerializedKeyValueDimensionRecord:
537
552
  """Serialize this record to a `list` that can be sliced into a key
538
553
  (data ID values) / value (everything else) pair.
@@ -34,6 +34,7 @@ __all__ = ("SqlRegistry",)
34
34
  import contextlib
35
35
  import logging
36
36
  import warnings
37
+ from collections import defaultdict
37
38
  from collections.abc import Iterable, Iterator, Mapping, Sequence
38
39
  from typing import TYPE_CHECKING, Any, Literal, cast
39
40
 
@@ -61,6 +62,7 @@ from ..dimensions import (
61
62
  DataCoordinate,
62
63
  DataId,
63
64
  DimensionConfig,
65
+ DimensionDataAttacher,
64
66
  DimensionElement,
65
67
  DimensionGroup,
66
68
  DimensionRecord,
@@ -1060,16 +1062,15 @@ class SqlRegistry:
1060
1062
  f"Given collection is of type {runRecord.type.name}; RUN collection required."
1061
1063
  )
1062
1064
  assert isinstance(runRecord, RunRecord)
1063
- progress = Progress("daf.butler.Registry.insertDatasets", level=logging.DEBUG)
1065
+
1066
+ expandedDataIds = [
1067
+ DataCoordinate.standardize(dataId, dimensions=datasetType.dimensions) for dataId in dataIds
1068
+ ]
1064
1069
  if expand:
1065
- expandedDataIds = [
1066
- self.expandDataId(dataId, dimensions=datasetType.dimensions)
1067
- for dataId in progress.wrap(dataIds, f"Expanding {datasetType.name} data IDs")
1068
- ]
1069
- else:
1070
- expandedDataIds = [
1071
- DataCoordinate.standardize(dataId, dimensions=datasetType.dimensions) for dataId in dataIds
1072
- ]
1070
+ _LOG.debug("Expanding %d data IDs", len(expandedDataIds))
1071
+ expandedDataIds = self.expand_data_ids(expandedDataIds)
1072
+ _LOG.debug("Finished expanding data IDs")
1073
+
1073
1074
  try:
1074
1075
  refs = list(
1075
1076
  self._managers.datasets.insert(datasetType.name, runRecord, expandedDataIds, idGenerationMode)
@@ -1161,12 +1162,10 @@ class SqlRegistry:
1161
1162
  )
1162
1163
  assert isinstance(runRecord, RunRecord)
1163
1164
 
1164
- progress = Progress("daf.butler.Registry.importDatasets", level=logging.DEBUG)
1165
1165
  if expand:
1166
- datasets = [
1167
- dataset.expanded(self.expandDataId(dataset.dataId, dimensions=dataset.datasetType.dimensions))
1168
- for dataset in progress.wrap(datasets, "Expanding data IDs")
1169
- ]
1166
+ _LOG.debug("Expanding %d data IDs", len(datasets))
1167
+ datasets = self.expand_refs(datasets)
1168
+ _LOG.debug("Finished expanding data IDs")
1170
1169
 
1171
1170
  try:
1172
1171
  self._managers.datasets.import_(runRecord, datasets)
@@ -1545,6 +1544,34 @@ class SqlRegistry:
1545
1544
  )
1546
1545
  return DataCoordinate.standardize(keys, dimensions=standardized.dimensions).expanded(records=records)
1547
1546
 
1547
+ def expand_data_ids(self, data_ids: Iterable[DataCoordinate]) -> list[DataCoordinate]:
1548
+ output = list(data_ids)
1549
+
1550
+ grouped_by_dimensions: defaultdict[DimensionGroup, list[int]] = defaultdict(list)
1551
+ for i, data_id in enumerate(data_ids):
1552
+ if not data_id.hasRecords():
1553
+ grouped_by_dimensions[data_id.dimensions].append(i)
1554
+
1555
+ if not grouped_by_dimensions:
1556
+ # All given DataCoordinate values are already expanded.
1557
+ return output
1558
+
1559
+ attacher = DimensionDataAttacher(
1560
+ cache=self.dimension_record_cache,
1561
+ dimensions=DimensionGroup.union(*grouped_by_dimensions.keys(), universe=self.dimensions),
1562
+ )
1563
+ with self._query() as query:
1564
+ for dimensions, indexes in grouped_by_dimensions.items():
1565
+ expanded = attacher.attach(dimensions, (output[index] for index in indexes), query)
1566
+ for index, data_id in zip(indexes, expanded):
1567
+ output[index] = data_id
1568
+
1569
+ return output
1570
+
1571
+ def expand_refs(self, dataset_refs: list[DatasetRef]) -> list[DatasetRef]:
1572
+ expanded_ids = self.expand_data_ids([ref.dataId for ref in dataset_refs])
1573
+ return [ref.expanded(data_id) for ref, data_id in zip(dataset_refs, expanded_ids)]
1574
+
1548
1575
  def insertDimensionData(
1549
1576
  self,
1550
1577
  element: DimensionElement | str,
@@ -174,13 +174,9 @@ class RepoExportContext:
174
174
  element = self._butler.dimensions[element]
175
175
  if element.has_own_table:
176
176
  standardized_elements.add(element)
177
- for dataId in dataIds:
178
- # This is potentially quite slow, because it's approximately
179
- # len(dataId.graph.elements) queries per data ID. But it's a no-op
180
- # if the data ID is already expanded, and DM-26692 will add (or at
181
- # least start to add / unblock) query functionality that should
182
- # let us speed this up internally as well.
183
- dataId = self._butler.registry.expandDataId(dataId)
177
+
178
+ expanded_data_ids = self._butler._registry.expand_data_ids(dataIds)
179
+ for dataId in expanded_data_ids:
184
180
  for element_name in dataId.dimensions.elements:
185
181
  record = dataId.records[element_name]
186
182
  if record is not None and record.definition in standardized_elements:
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.2600"
2
+ __version__ = "29.2025.2700"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-daf-butler
3
- Version: 29.2025.2600
3
+ Version: 29.2025.2700
4
4
  Summary: An abstraction layer for reading and writing astronomical data to datastores.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: BSD 3-Clause License
@@ -52,7 +52,7 @@ lsst/daf/butler/repo_relocation.py,sha256=Ivhx2xU4slc53Z6RExhNnquMr2Hx-S8h62emml
52
52
  lsst/daf/butler/time_utils.py,sha256=MVTfOFI2xt3IeA46pa-fWY2kJRwSzaQyq1uzeUABcTM,11805
53
53
  lsst/daf/butler/timespan_database_representation.py,sha256=MWDusjIQIL2RH1CDpWSW5sYvdHCJKzAfpg1rm1DfgEU,24302
54
54
  lsst/daf/butler/utils.py,sha256=5u50COK5z4u31grOhmQF7mFz55biNLOvSMRdQjEdsjo,5140
55
- lsst/daf/butler/version.py,sha256=eKEngRNTsqzq5yv4faNqXQ7KB1ya5wGTLTxJ1qbWG50,55
55
+ lsst/daf/butler/version.py,sha256=ijvsqTn7akfUc9cY8XFVpeeoU8nGM3T0Tr7idt4aIaw,55
56
56
  lsst/daf/butler/_utilities/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
57
57
  lsst/daf/butler/_utilities/locked_object.py,sha256=3RQf0Ish55mfQAfBy3V4Tfnfq5Q7-cxrwTlQMUhrIno,1931
58
58
  lsst/daf/butler/_utilities/named_locks.py,sha256=Zj_u1rZELaiWec3wJfkgmGD_YiZMLVxbMQmdbbVgk5E,2286
@@ -117,9 +117,9 @@ lsst/daf/butler/dimensions/_elements.py,sha256=3Hn884mHbxz6H2KkMm-SFitEG_eHVRivU
117
117
  lsst/daf/butler/dimensions/_governor.py,sha256=En0Ar_uBuUnuKG1IACbyaZ7FRwf234zI5IEjwIQmMHw,5963
118
118
  lsst/daf/butler/dimensions/_group.py,sha256=1X9PbAqyLWt-MxucVaTNFgBPzMS6migbFptqvQNSboE,21953
119
119
  lsst/daf/butler/dimensions/_packer.py,sha256=yfgw7gXMSkPC59Kr51X6Isi8j6ihxqoR1OO2xPnd8FY,6461
120
- lsst/daf/butler/dimensions/_record_set.py,sha256=4AGb53ECbAbEKrefJLqoJMfAWOgDCylx1jSDVHWzc7U,37417
120
+ lsst/daf/butler/dimensions/_record_set.py,sha256=agiAZcAxueb1gqOf4723xXrQlXJ-MZI8-mh8gzvslJ8,38107
121
121
  lsst/daf/butler/dimensions/_record_table.py,sha256=b1ijXQlnXH1N8rdiAV4C4GlDmOsBbWGuzob8Gv0mtII,9733
122
- lsst/daf/butler/dimensions/_records.py,sha256=7MBeUESXbopEkrm4Nxq0Bin_0PlJ4z8Au9vGWA8XByg,24092
122
+ lsst/daf/butler/dimensions/_records.py,sha256=QX8XxiHRhRKFkUO3CvoDpe_8KGGyt3nyl08ifxUWwVs,24677
123
123
  lsst/daf/butler/dimensions/_schema.py,sha256=DOuhmI98f3yuvasFFATJCFrx6cjQSx4o2kyLioMbHlU,18507
124
124
  lsst/daf/butler/dimensions/_skypix.py,sha256=Tn60mvl_n7uWcip6eltfGzMB__98zubY3m9gHbzIwKc,6853
125
125
  lsst/daf/butler/dimensions/_universe.py,sha256=HYWtfdxu09aPaZV9AJb6qLvzF0ObVq903zLAVIsx2jw,21191
@@ -186,7 +186,7 @@ lsst/daf/butler/registry/connectionString.py,sha256=MgZ_jdUAL9_8l7UFeYF0Fhvo6TBd
186
186
  lsst/daf/butler/registry/managers.py,sha256=gal-O39ViqsvrOJNWtFjdtWT2BCUaLYBvzf67blkPp8,21175
187
187
  lsst/daf/butler/registry/nameShrinker.py,sha256=7gnlYnRdeYc3XXSfrMnSyQrIZerDYVZeU8x4eppWDXI,1464
188
188
  lsst/daf/butler/registry/opaque.py,sha256=xbdOogqabRH3YaevIZ7_hbbTb54rCOGyC7Uq7SZNH-Q,9882
189
- lsst/daf/butler/registry/sql_registry.py,sha256=FohIriwaDu9WC5sj3FK0whOEVOIbIomRAw3s9vGDVKc,107398
189
+ lsst/daf/butler/registry/sql_registry.py,sha256=0sgrI_w9vrmN28ybR-lMUUz1xmeMW0ofKTFidaAQkj4,108472
190
190
  lsst/daf/butler/registry/versions.py,sha256=egvrctt_1wBzZgh8iSfySaQJQ9bkx_9bUJWkvvDZONQ,9331
191
191
  lsst/daf/butler/registry/wildcards.py,sha256=akMGgqDkVM0mQ9RAFENv0IrnoUyMP3mhODYXDaWIQ8o,20277
192
192
  lsst/daf/butler/registry/bridge/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
@@ -340,16 +340,16 @@ lsst/daf/butler/tests/registry_data/hsc-rc2-subset-v0.yaml,sha256=Y_Ihkoa2uZ2QSW
340
340
  lsst/daf/butler/tests/registry_data/spatial.py,sha256=X8kq0_MzlWxtAA8wjWGFPkmyOaWHP1XjRsSLZNyZaFU,21302
341
341
  lsst/daf/butler/tests/registry_data/spatial.yaml,sha256=B8nPSgBnVUx7USQPdIpTND-0gW1njcrXQ-0c3jVrv-8,11262
342
342
  lsst/daf/butler/transfers/__init__.py,sha256=M1YcFszSkNB5hB2pZwwGXqbJE2dKt4YXDinW4s1iHI8,1371
343
- lsst/daf/butler/transfers/_context.py,sha256=h1XDJpdg64R7DRHo7mb9xgaLiHDs_AIJmZbyo66qSSw,17278
343
+ lsst/daf/butler/transfers/_context.py,sha256=1oOlKWj53bjcVioOIzRNPZSC_Q0dLDI_R4syI9gMOkg,16938
344
344
  lsst/daf/butler/transfers/_interfaces.py,sha256=Ia1NqcFR5E-Ik4zsXEe2fuMtNCJj5Yfe_gVHLTBtJDw,7490
345
345
  lsst/daf/butler/transfers/_yaml.py,sha256=w_0GmrueuHVLfOfAXGHFBbWAl18tX6eSElbTC-2jRoc,32632
346
- lsst_daf_butler-29.2025.2600.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
347
- lsst_daf_butler-29.2025.2600.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
348
- lsst_daf_butler-29.2025.2600.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
349
- lsst_daf_butler-29.2025.2600.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
350
- lsst_daf_butler-29.2025.2600.dist-info/METADATA,sha256=cmlhIrx73oYFr2nqz5rguNpe5QIZRf949UPtZpzmcJQ,3265
351
- lsst_daf_butler-29.2025.2600.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
352
- lsst_daf_butler-29.2025.2600.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
353
- lsst_daf_butler-29.2025.2600.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
354
- lsst_daf_butler-29.2025.2600.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
355
- lsst_daf_butler-29.2025.2600.dist-info/RECORD,,
346
+ lsst_daf_butler-29.2025.2700.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
347
+ lsst_daf_butler-29.2025.2700.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
348
+ lsst_daf_butler-29.2025.2700.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
349
+ lsst_daf_butler-29.2025.2700.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
350
+ lsst_daf_butler-29.2025.2700.dist-info/METADATA,sha256=fXwwFAdYJ-5kzkL2Dnk9EPZXtgxjhe9O6pAx45PPrNc,3265
351
+ lsst_daf_butler-29.2025.2700.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
352
+ lsst_daf_butler-29.2025.2700.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
353
+ lsst_daf_butler-29.2025.2700.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
354
+ lsst_daf_butler-29.2025.2700.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
355
+ lsst_daf_butler-29.2025.2700.dist-info/RECORD,,