lsst-daf-butler 29.2025.4800__py3-none-any.whl → 30.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,7 +36,7 @@ from abc import abstractmethod
36
36
  from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence
37
37
  from contextlib import AbstractContextManager
38
38
  from types import EllipsisType
39
- from typing import TYPE_CHECKING, Any, Literal, Self, TextIO
39
+ from typing import TYPE_CHECKING, Any, TextIO
40
40
 
41
41
  from lsst.resources import ResourcePath, ResourcePathExpression
42
42
  from lsst.utils import doImportType
@@ -94,7 +94,7 @@ class _DeprecatedDefault:
94
94
  """Default value for a deprecated parameter."""
95
95
 
96
96
 
97
- class Butler(LimitedButler, AbstractContextManager): # numpydoc ignore=PR02
97
+ class Butler(LimitedButler): # numpydoc ignore=PR02
98
98
  """Interface for data butler and factory for Butler instances.
99
99
 
100
100
  Parameters
@@ -358,16 +358,6 @@ class Butler(LimitedButler, AbstractContextManager): # numpydoc ignore=PR02
358
358
  case _:
359
359
  raise TypeError(f"Unknown Butler type '{butler_type}'")
360
360
 
361
- def __enter__(self) -> Self:
362
- return self
363
-
364
- def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Literal[False]:
365
- try:
366
- self.close()
367
- except Exception:
368
- _LOG.exception("An exception occured during Butler.close()")
369
- return False
370
-
371
361
  @staticmethod
372
362
  def makeRepo(
373
363
  root: ResourcePathExpression,
@@ -2236,15 +2226,4 @@ class Butler(LimitedButler, AbstractContextManager): # numpydoc ignore=PR02
2236
2226
 
2237
2227
  @abstractmethod
2238
2228
  def close(self) -> None:
2239
- """Release all resources associated with this Butler instance. The
2240
- instance may no longer be used after this is called.
2241
-
2242
- Notes
2243
- -----
2244
- Instead of calling ``close()``directly, you can use the Butler object
2245
- as a context manager. For example::
2246
- with Butler(...) as butler:
2247
- butler.get(...)
2248
- # butler is closed after exiting the block.
2249
- """
2250
2229
  raise NotImplementedError()
@@ -30,10 +30,10 @@ from __future__ import annotations
30
30
  __all__ = ("LimitedButler",)
31
31
 
32
32
  import logging
33
- from abc import ABC, abstractmethod
33
+ from abc import abstractmethod
34
34
  from collections.abc import Iterable, Iterator
35
- from contextlib import contextmanager
36
- from typing import Any, ClassVar
35
+ from contextlib import AbstractContextManager, contextmanager
36
+ from typing import Any, ClassVar, Literal, Self
37
37
 
38
38
  from lsst.resources import ResourcePath
39
39
 
@@ -48,7 +48,7 @@ from .dimensions import DimensionUniverse
48
48
  log = logging.getLogger(__name__)
49
49
 
50
50
 
51
- class LimitedButler(ABC):
51
+ class LimitedButler(AbstractContextManager):
52
52
  """A minimal butler interface that is sufficient to back
53
53
  `~lsst.pipe.base.PipelineTask` execution.
54
54
  """
@@ -100,6 +100,31 @@ class LimitedButler(ABC):
100
100
  """
101
101
  raise NotImplementedError()
102
102
 
103
+ def __enter__(self) -> Self:
104
+ return self
105
+
106
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Literal[False]:
107
+ try:
108
+ self.close()
109
+ except Exception:
110
+ log.exception("An exception occurred during Butler.close()")
111
+ return False
112
+
113
+ def close(self) -> None:
114
+ """Release all resources associated with this Butler instance. The
115
+ instance may no longer be used after this is called.
116
+
117
+ Notes
118
+ -----
119
+ Instead of calling ``close()`` directly, you can use the Butler object
120
+ as a context manager. For example::
121
+
122
+ with Butler(...) as butler:
123
+ butler.get(...)
124
+ # butler is closed after exiting the block.
125
+ """
126
+ pass
127
+
103
128
  def get(
104
129
  self,
105
130
  ref: DatasetRef,
@@ -49,13 +49,14 @@ __all__ = (
49
49
  )
50
50
 
51
51
  import collections.abc
52
+ import contextlib
52
53
  import itertools
53
54
  import json
54
55
  import logging
55
56
  import re
56
- from collections.abc import Iterable, Sequence
57
+ from collections.abc import Generator, Iterable, Sequence
57
58
  from fnmatch import fnmatchcase
58
- from typing import TYPE_CHECKING, Any, cast
59
+ from typing import IO, TYPE_CHECKING, Any, cast
59
60
 
60
61
  import pyarrow as pa
61
62
  import pyarrow.parquet as pq
@@ -84,6 +85,16 @@ TARGET_ROW_GROUP_BYTES = 1_000_000_000
84
85
  ASTROPY_PANDAS_INDEX_KEY = "lsst::arrow::astropy_pandas_index"
85
86
 
86
87
 
88
+ @contextlib.contextmanager
89
+ def generic_open(path: str, fs: AbstractFileSystem | None) -> Generator[IO]:
90
+ if fs is None:
91
+ with open(path, "rb") as fh:
92
+ yield fh
93
+ else:
94
+ with fs.open(path) as fh:
95
+ yield fh
96
+
97
+
87
98
  class ParquetFormatter(FormatterV2):
88
99
  """Interface for reading and writing Arrow Table objects to and from
89
100
  Parquet files.
@@ -120,7 +131,8 @@ class ParquetFormatter(FormatterV2):
120
131
  component: str | None = None,
121
132
  expected_size: int = -1,
122
133
  ) -> Any:
123
- schema = pq.read_schema(path, filesystem=fs)
134
+ with generic_open(path, fs) as handle:
135
+ schema = pq.read_schema(handle)
124
136
 
125
137
  schema_names = ["ArrowSchema", "DataFrameSchema", "ArrowAstropySchema", "ArrowNumpySchema"]
126
138
 
@@ -133,13 +145,13 @@ class ParquetFormatter(FormatterV2):
133
145
  if b"lsst::arrow::rowcount" in schema.metadata:
134
146
  return int(schema.metadata[b"lsst::arrow::rowcount"])
135
147
 
136
- temp_table = pq.read_table(
137
- path,
138
- filesystem=fs,
139
- columns=[schema.names[0]],
140
- use_threads=False,
141
- use_pandas_metadata=False,
142
- )
148
+ with generic_open(path, fs) as handle:
149
+ temp_table = pq.read_table(
150
+ handle,
151
+ columns=[schema.names[0]],
152
+ use_threads=False,
153
+ use_pandas_metadata=False,
154
+ )
143
155
 
144
156
  return len(temp_table[schema.names[0]])
145
157
 
@@ -148,7 +160,7 @@ class ParquetFormatter(FormatterV2):
148
160
  par_columns = self.file_descriptor.parameters.pop("columns", None)
149
161
  if par_columns:
150
162
  has_pandas_multi_index = False
151
- if b"pandas" in schema.metadata:
163
+ if schema.metadata and b"pandas" in schema.metadata:
152
164
  md = json.loads(schema.metadata[b"pandas"])
153
165
  if len(md["column_indexes"]) > 1:
154
166
  has_pandas_multi_index = True
@@ -184,13 +196,13 @@ class ParquetFormatter(FormatterV2):
184
196
  )
185
197
 
186
198
  metadata = schema.metadata if schema.metadata is not None else {}
187
- arrow_table = pq.read_table(
188
- path,
189
- filesystem=fs,
190
- columns=par_columns,
191
- use_threads=False,
192
- use_pandas_metadata=(b"pandas" in metadata),
193
- )
199
+ with generic_open(path, fs) as handle:
200
+ arrow_table = pq.read_table(
201
+ handle,
202
+ columns=par_columns,
203
+ use_threads=False,
204
+ use_pandas_metadata=(b"pandas" in metadata),
205
+ )
194
206
 
195
207
  return arrow_table
196
208
 
@@ -537,6 +537,9 @@ class ButlerLogRecords(MutableSequence[ButlerLogRecord]):
537
537
  if cls._generic_startswith(startdata, "["):
538
538
  # This is a JSON array of records.
539
539
  return _ButlerLogRecordsModelV1
540
+ elif cls._generic_startswith(startdata, cls.STREAMING_EXTRA_DELIMITER):
541
+ # This is an empty log file with a log record per line format.
542
+ return None
540
543
  elif not cls._generic_startswith(startdata, "{"):
541
544
  # Limit the length of string reported in error message in case
542
545
  # this is an enormous file.
@@ -570,9 +573,12 @@ class ButlerLogRecords(MutableSequence[ButlerLogRecord]):
570
573
  """
571
574
  first_line = stream.readline()
572
575
 
576
+ empty_stream = False
573
577
  if not first_line:
574
578
  # Empty file, return zero records.
575
579
  return cls.from_records([])
580
+ elif cls._generic_startswith(first_line, cls.STREAMING_EXTRA_DELIMITER):
581
+ empty_stream = True
576
582
 
577
583
  model_type = cls._detect_model(first_line)
578
584
 
@@ -583,13 +589,17 @@ class ButlerLogRecords(MutableSequence[ButlerLogRecord]):
583
589
  return model_type.model_validate_json(all).wrap(cls)
584
590
 
585
591
  # A stream of records with one record per line.
586
- records = [ButlerLogRecord.model_validate_json(first_line)]
587
- for line in stream:
588
- line = line.rstrip()
589
- if cls._generic_startswith(line, "###EXTRA###"):
590
- break
591
- elif line: # skip blank lines
592
- records.append(ButlerLogRecord.model_validate_json(line))
592
+ if not empty_stream:
593
+ records = [ButlerLogRecord.model_validate_json(first_line)]
594
+ for line in stream:
595
+ line = line.rstrip()
596
+ if cls._generic_startswith(line, cls.STREAMING_EXTRA_DELIMITER):
597
+ break
598
+ elif line: # skip blank lines
599
+ records.append(ButlerLogRecord.model_validate_json(line))
600
+ else:
601
+ # No records but might have extra metadata.
602
+ records = []
593
603
  extra_data = stream.read()
594
604
  if extra_data:
595
605
  extra = pydantic_core.from_json(extra_data)
@@ -243,6 +243,7 @@ def pruneDatasets(
243
243
  find_first=not find_all,
244
244
  show_uri=False,
245
245
  )
246
+ dataset_refs = list(itertools.chain.from_iterable(datasets_found.getDatasets()))
246
247
 
247
248
  result = PruneDatasetsResult(list(datasets_found.getTables()))
248
249
 
@@ -258,7 +259,7 @@ def pruneDatasets(
258
259
  def doPruneDatasets() -> PruneDatasetsResult:
259
260
  with Butler.from_config(repo, writeable=True) as butler:
260
261
  butler.pruneDatasets(
261
- refs=list(itertools.chain(*datasets_found.getDatasets())),
262
+ refs=dataset_refs,
262
263
  disassociate=disassociate,
263
264
  tags=disassociate_tags or (),
264
265
  purge=purge,
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "29.2025.4800"
2
+ __version__ = "30.0.0rc1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-daf-butler
3
- Version: 29.2025.4800
3
+ Version: 30.0.0rc1
4
4
  Summary: An abstraction layer for reading and writing astronomical data to datastores.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License-Expression: BSD-3-Clause OR GPL-3.0-or-later
@@ -1,7 +1,7 @@
1
1
  lsst/__init__.py,sha256=_2bZAHuDVAx7MM7KA7pt3DYp641NY4RzSoRAwesWKfU,67
2
2
  lsst/daf/__init__.py,sha256=_2bZAHuDVAx7MM7KA7pt3DYp641NY4RzSoRAwesWKfU,67
3
3
  lsst/daf/butler/__init__.py,sha256=Ku-Gzm95RVSJtlvA3Ptr050S8JFM_QFZVwnxpkC136o,3519
4
- lsst/daf/butler/_butler.py,sha256=0Tr9E28uV43M5aGhlkxpwOd_QLyTJgfNRXNtdvuUMNE,96780
4
+ lsst/daf/butler/_butler.py,sha256=Unvzmm2BzJy1J50F9G5M63ZJ-MJIqtIVNp_TGczXx3g,96021
5
5
  lsst/daf/butler/_butler_collections.py,sha256=tRZaFjPyQzKz--9v7HNmJNjDwB4hi9LJUIyvx1adVFc,19384
6
6
  lsst/daf/butler/_butler_config.py,sha256=q_iJf4rk-L5tMdvIZwEqOvQrt5VEMvDsVzoWmvoxx2A,9619
7
7
  lsst/daf/butler/_butler_instance_options.py,sha256=6l4hF7nfxb77Rf0r2PAmiaSid7kM1z6kQPW4VzRUjYs,2046
@@ -22,7 +22,7 @@ lsst/daf/butler/_file_dataset.py,sha256=KF_o5zi01L0gK4t_Ncb3BSbCTiyf9zmwwTpWoNga
22
22
  lsst/daf/butler/_file_descriptor.py,sha256=PIYT9O2NRbNndtI2rMXfx7tP6nUcTnd9fxFVsq-Hu1s,3841
23
23
  lsst/daf/butler/_formatter.py,sha256=BZTpbAwATykj6Omz4pl5f5SdI_Qa0rWxhSUk1G4_UIc,83156
24
24
  lsst/daf/butler/_labeled_butler_factory.py,sha256=Ly1oaSBDTNDaDtEJFEEys4KRc359Gcr9KLLwi-ggZHE,9052
25
- lsst/daf/butler/_limited_butler.py,sha256=85na5IWR3nRRWEgMcuy-lY46FPrxBRqsvgEMEnZoOkQ,16696
25
+ lsst/daf/butler/_limited_butler.py,sha256=GTsAzymQf11daCtDhI4WBqee8zyBzKN9CD85gOeG-WU,17514
26
26
  lsst/daf/butler/_location.py,sha256=oX-AKZeSdToeKBxvB6y-pxLMiFwF-3nz3YleH5SiA3A,10451
27
27
  lsst/daf/butler/_named.py,sha256=-AvkTP5JIsCwj5tAmZO8epoRoF1_dGuz2aC0ItLg7-M,19541
28
28
  lsst/daf/butler/_quantum.py,sha256=xqWk2mHLydf4VWpRTCR74eBjApESuaTb46_iOdLKFDo,25575
@@ -39,7 +39,7 @@ lsst/daf/butler/arrow_utils.py,sha256=_vADFsvZZD-vFRYOZ_cZ9FkUkbKnJxD4KaiUmrQCGD
39
39
  lsst/daf/butler/column_spec.py,sha256=XFijHMSv2Gwa2olqDjCEF5mELr7IhGJK6npNwZ5iMRE,14639
40
40
  lsst/daf/butler/ddl.py,sha256=Yi8_8IgSh7omR1oW1M2kOTw83zyIqC0E041FShSYFBw,23376
41
41
  lsst/daf/butler/json.py,sha256=hOCbNsVBXc9q9ZksKP_-5Zh5-lSGXZqjBFaCGbobBcI,5374
42
- lsst/daf/butler/logging.py,sha256=Bfcmw5lk0UInO_TyQez250O4YkAHAq3vCjYuxaTDAvg,25942
42
+ lsst/daf/butler/logging.py,sha256=SO6aq8vgnEYuMqNI-TOBf370x9Eaa9dZp_FkXBR9iGo,26431
43
43
  lsst/daf/butler/mapping_factory.py,sha256=tLMTD03Rgz3xUwGs2vLysyahpEgt3Ob7F5zrXfJHNv4,11504
44
44
  lsst/daf/butler/name_shrinker.py,sha256=Pc5S8cbauT4KJ7ALKurEO-kaVR6Ih5AC5lF3NOZqMRg,4269
45
45
  lsst/daf/butler/nonempty_mapping.py,sha256=YnIowyh4v-bBf5Q032egN7DUOgv1Ip0uS6C80GIEJTU,4356
@@ -51,7 +51,7 @@ lsst/daf/butler/repo_relocation.py,sha256=Ivhx2xU4slc53Z6RExhNnquMr2Hx-S8h62emml
51
51
  lsst/daf/butler/time_utils.py,sha256=MVTfOFI2xt3IeA46pa-fWY2kJRwSzaQyq1uzeUABcTM,11805
52
52
  lsst/daf/butler/timespan_database_representation.py,sha256=rYeQ_vp6gneRjboqV-gvNW0DWhm1QJM-KnVzFTDVZ0I,24550
53
53
  lsst/daf/butler/utils.py,sha256=5u50COK5z4u31grOhmQF7mFz55biNLOvSMRdQjEdsjo,5140
54
- lsst/daf/butler/version.py,sha256=MHZJr_Yoevn1SeqerO6dmfTP3xtJLGxPmEi-9Jpl380,55
54
+ lsst/daf/butler/version.py,sha256=vSgpHsF0PG5ox9cJDNTmDJ8a4r5c8EKOc2gxM3iszgs,52
55
55
  lsst/daf/butler/_rubin/__init__.py,sha256=9z5kmc6LJ3C_iPFV46cvdlQ2qOGJbZh-2Ft5Z-rbE28,1569
56
56
  lsst/daf/butler/_rubin/file_datasets.py,sha256=P5_BIhxpVj9qfLuLiI2_dClMHsjO5Qm5oDXVr3WntNU,3607
57
57
  lsst/daf/butler/_utilities/__init__.py,sha256=vLzPZYAJ-9r1cnqsP64MVpFgSw2166yOpq0iPMSdAvw,1298
@@ -143,7 +143,7 @@ lsst/daf/butler/formatters/json.py,sha256=n8IDbIH4yAsbppXVOtMar3WvMaZm4chfXQmm9D
143
143
  lsst/daf/butler/formatters/logs.py,sha256=p_J2v9VFpUIRzCqtttP_QdgiX0FepTG4C4f7EQCLJoE,3050
144
144
  lsst/daf/butler/formatters/matplotlib.py,sha256=nEqFJOab970b5EeEjSm-T1fzM9M7cHKGmTubjr4bgNA,2159
145
145
  lsst/daf/butler/formatters/packages.py,sha256=XzOUptSjXX-NJ2CUfYkTdI09M1m6hj-mdy4SZMCyey8,3409
146
- lsst/daf/butler/formatters/parquet.py,sha256=fGNoBMoixA3UxHo3f5rseCIgeUi4Uozo524Bwm2356I,48618
146
+ lsst/daf/butler/formatters/parquet.py,sha256=sPZG1sid0qpWquC9d5CL9hIimozXUL1IkiO_WWAoVw8,49049
147
147
  lsst/daf/butler/formatters/pickle.py,sha256=gxZ9VumvJm1EGQiPtwC8D3x5CPjihfUzE4GDVrv7YRQ,2218
148
148
  lsst/daf/butler/formatters/typeless.py,sha256=Q_Nh5bt0zrCExA19XA_VdAVvVUPlGwCCQ463RWGArkM,9158
149
149
  lsst/daf/butler/formatters/yaml.py,sha256=-zqKTioad3wPNc1Xp6GYgx_TRhQNaxydGQ4VOCzfEZU,4764
@@ -285,7 +285,7 @@ lsst/daf/butler/remote_butler/server/handlers/_query_streaming.py,sha256=uSWaJ8A
285
285
  lsst/daf/butler/remote_butler/server/handlers/_utils.py,sha256=ALCX9Hi59izfEGifHBs9lP7Udjptv0lXpf4mGpvorCA,2332
286
286
  lsst/daf/butler/script/__init__.py,sha256=mbYs1becM_mW1ejNDDePqWyt47yBPARL2pO9Z7i0DC0,2326
287
287
  lsst/daf/butler/script/_associate.py,sha256=Uxarvv5cHQmDRIpu4CC4EY2JN3suXOvga8-AktWXw60,2930
288
- lsst/daf/butler/script/_pruneDatasets.py,sha256=0uKuDUxSNBIsr82ixasG9IvUyUl6WPfbJeWKAZVK810,10461
288
+ lsst/daf/butler/script/_pruneDatasets.py,sha256=VtpKCjC7-qaXwVdDwpfZgwNhTFy9UW-4zjncwWg4OrA,10510
289
289
  lsst/daf/butler/script/butlerImport.py,sha256=j4dfnYSJ6KlRKLT6V2ZjugamJq8DhpI0PjEnSLBBOrk,3132
290
290
  lsst/daf/butler/script/certifyCalibrations.py,sha256=kDtdeO9HKTFzd68eWGn_meNQnqnrA2JG7f9du_xrYxg,4026
291
291
  lsst/daf/butler/script/collectionChain.py,sha256=4SeGm1CyWk6_qAvTua9yWyQSY9NlTCMQUuX3xPCu1HQ,6164
@@ -337,13 +337,13 @@ lsst/daf/butler/transfers/__init__.py,sha256=M1YcFszSkNB5hB2pZwwGXqbJE2dKt4YXDin
337
337
  lsst/daf/butler/transfers/_context.py,sha256=Ro_nf9NDw9IAr-Pw_NtcdotQKx34RbBbNubt20zwRXU,16449
338
338
  lsst/daf/butler/transfers/_interfaces.py,sha256=Ia1NqcFR5E-Ik4zsXEe2fuMtNCJj5Yfe_gVHLTBtJDw,7490
339
339
  lsst/daf/butler/transfers/_yaml.py,sha256=w_0GmrueuHVLfOfAXGHFBbWAl18tX6eSElbTC-2jRoc,32632
340
- lsst_daf_butler-29.2025.4800.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
341
- lsst_daf_butler-29.2025.4800.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
342
- lsst_daf_butler-29.2025.4800.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
343
- lsst_daf_butler-29.2025.4800.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
344
- lsst_daf_butler-29.2025.4800.dist-info/METADATA,sha256=lHXvwM7V9rhuuLWAqceEi7LucKY8jZ3runxAfycxoUw,3813
345
- lsst_daf_butler-29.2025.4800.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
346
- lsst_daf_butler-29.2025.4800.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
347
- lsst_daf_butler-29.2025.4800.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
348
- lsst_daf_butler-29.2025.4800.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
349
- lsst_daf_butler-29.2025.4800.dist-info/RECORD,,
340
+ lsst_daf_butler-30.0.0rc1.dist-info/licenses/COPYRIGHT,sha256=k1Vq0-Be_K-puaeW4UZnckPjksEL-MJh4XKiWcjMxJE,312
341
+ lsst_daf_butler-30.0.0rc1.dist-info/licenses/LICENSE,sha256=pRExkS03v0MQW-neNfIcaSL6aiAnoLxYgtZoFzQ6zkM,232
342
+ lsst_daf_butler-30.0.0rc1.dist-info/licenses/bsd_license.txt,sha256=7MIcv8QRX9guUtqPSBDMPz2SnZ5swI-xZMqm_VDSfxY,1606
343
+ lsst_daf_butler-30.0.0rc1.dist-info/licenses/gpl-v3.0.txt,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
344
+ lsst_daf_butler-30.0.0rc1.dist-info/METADATA,sha256=jMWZliOVZ-7FuZBEA5n0FNiRsm531aSDDPQdkSAQ87k,3810
345
+ lsst_daf_butler-30.0.0rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
346
+ lsst_daf_butler-30.0.0rc1.dist-info/entry_points.txt,sha256=XsRxyTK3c-jGlKVuVnbpch3gtaO0lAA_fS3i2NGS5rw,59
347
+ lsst_daf_butler-30.0.0rc1.dist-info/top_level.txt,sha256=eUWiOuVVm9wwTrnAgiJT6tp6HQHXxIhj2QSZ7NYZH80,5
348
+ lsst_daf_butler-30.0.0rc1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
349
+ lsst_daf_butler-30.0.0rc1.dist-info/RECORD,,