lsst-daf-butler 30.0.0rc3__py3-none-any.whl → 30.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. lsst/daf/butler/_butler.py +19 -3
  2. lsst/daf/butler/_butler_collections.py +4 -4
  3. lsst/daf/butler/_butler_metrics.py +2 -0
  4. lsst/daf/butler/_dataset_provenance.py +1 -1
  5. lsst/daf/butler/_dataset_ref.py +1 -1
  6. lsst/daf/butler/_exceptions.py +2 -2
  7. lsst/daf/butler/_file_dataset.py +2 -1
  8. lsst/daf/butler/_formatter.py +12 -0
  9. lsst/daf/butler/_query_all_datasets.py +2 -0
  10. lsst/daf/butler/cli/cmd/_remove_runs.py +1 -12
  11. lsst/daf/butler/column_spec.py +4 -4
  12. lsst/daf/butler/datastore/_datastore.py +21 -1
  13. lsst/daf/butler/datastore/stored_file_info.py +2 -2
  14. lsst/daf/butler/datastores/chainedDatastore.py +4 -0
  15. lsst/daf/butler/datastores/fileDatastore.py +11 -1
  16. lsst/daf/butler/datastores/file_datastore/get.py +4 -4
  17. lsst/daf/butler/datastores/file_datastore/retrieve_artifacts.py +5 -1
  18. lsst/daf/butler/datastores/file_datastore/transfer.py +2 -2
  19. lsst/daf/butler/datastores/inMemoryDatastore.py +8 -0
  20. lsst/daf/butler/ddl.py +2 -2
  21. lsst/daf/butler/dimensions/_coordinate.py +6 -8
  22. lsst/daf/butler/dimensions/_record_set.py +1 -1
  23. lsst/daf/butler/dimensions/_records.py +9 -3
  24. lsst/daf/butler/direct_butler/_direct_butler.py +40 -23
  25. lsst/daf/butler/direct_query_driver/_driver.py +5 -4
  26. lsst/daf/butler/direct_query_driver/_result_page_converter.py +1 -1
  27. lsst/daf/butler/formatters/parquet.py +6 -6
  28. lsst/daf/butler/nonempty_mapping.py +1 -1
  29. lsst/daf/butler/persistence_context.py +8 -5
  30. lsst/daf/butler/queries/_general_query_results.py +1 -1
  31. lsst/daf/butler/queries/driver.py +1 -1
  32. lsst/daf/butler/queries/expression_factory.py +2 -2
  33. lsst/daf/butler/queries/expressions/parser/exprTree.py +1 -1
  34. lsst/daf/butler/queries/expressions/parser/parserYacc.py +1 -1
  35. lsst/daf/butler/queries/overlaps.py +2 -2
  36. lsst/daf/butler/queries/tree/_column_set.py +1 -1
  37. lsst/daf/butler/registry/_collection_record_cache.py +1 -1
  38. lsst/daf/butler/registry/_collection_summary_cache.py +5 -4
  39. lsst/daf/butler/registry/_registry.py +4 -0
  40. lsst/daf/butler/registry/databases/postgresql.py +2 -1
  41. lsst/daf/butler/registry/datasets/byDimensions/_dataset_type_cache.py +1 -1
  42. lsst/daf/butler/registry/datasets/byDimensions/_manager.py +4 -2
  43. lsst/daf/butler/registry/datasets/byDimensions/summaries.py +3 -2
  44. lsst/daf/butler/registry/interfaces/_datasets.py +2 -1
  45. lsst/daf/butler/registry/interfaces/_obscore.py +1 -1
  46. lsst/daf/butler/registry/obscore/_records.py +1 -1
  47. lsst/daf/butler/registry/obscore/_spatial.py +2 -2
  48. lsst/daf/butler/registry/queries/_results.py +2 -2
  49. lsst/daf/butler/registry/sql_registry.py +1 -1
  50. lsst/daf/butler/registry/wildcards.py +5 -5
  51. lsst/daf/butler/remote_butler/_get.py +1 -1
  52. lsst/daf/butler/remote_butler/_remote_butler.py +1 -0
  53. lsst/daf/butler/remote_butler/_remote_file_transfer_source.py +4 -0
  54. lsst/daf/butler/remote_butler/authentication/cadc.py +4 -3
  55. lsst/daf/butler/script/_pruneDatasets.py +4 -2
  56. lsst/daf/butler/script/configValidate.py +2 -2
  57. lsst/daf/butler/script/queryCollections.py +2 -2
  58. lsst/daf/butler/script/removeCollections.py +2 -0
  59. lsst/daf/butler/script/removeRuns.py +2 -0
  60. lsst/daf/butler/tests/cliCmdTestBase.py +2 -0
  61. lsst/daf/butler/tests/cliLogTestBase.py +2 -0
  62. lsst/daf/butler/tests/hybrid_butler.py +6 -1
  63. lsst/daf/butler/tests/registry_data/spatial.py +4 -2
  64. lsst/daf/butler/tests/utils.py +1 -1
  65. lsst/daf/butler/timespan_database_representation.py +3 -3
  66. lsst/daf/butler/version.py +1 -1
  67. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/METADATA +3 -2
  68. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/RECORD +76 -76
  69. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/WHEEL +1 -1
  70. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/entry_points.txt +0 -0
  71. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/COPYRIGHT +0 -0
  72. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/LICENSE +0 -0
  73. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/bsd_license.txt +0 -0
  74. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/gpl-v3.0.txt +0 -0
  75. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/top_level.txt +0 -0
  76. {lsst_daf_butler-30.0.0rc3.dist-info → lsst_daf_butler-30.0.1.dist-info}/zip-safe +0 -0
@@ -270,18 +270,18 @@ def arrow_to_pandas(arrow_table: pa.Table) -> pd.DataFrame:
270
270
 
271
271
 
272
272
  def arrow_to_astropy(arrow_table: pa.Table) -> atable.Table:
273
- """Convert a pyarrow table to an `astropy.Table`.
273
+ """Convert a pyarrow table to an `astropy.table.Table`.
274
274
 
275
275
  Parameters
276
276
  ----------
277
277
  arrow_table : `pyarrow.Table`
278
278
  Input arrow table to convert. If the table has astropy unit
279
279
  metadata in the schema it will be used in the construction
280
- of the ``astropy.Table``.
280
+ of the ``astropy.table.Table``.
281
281
 
282
282
  Returns
283
283
  -------
284
- table : `astropy.Table`
284
+ table : `astropy.table.Table`
285
285
  Converted astropy table.
286
286
  """
287
287
  from astropy.table import Table
@@ -520,7 +520,7 @@ def astropy_to_arrow(astropy_table: atable.Table) -> pa.Table:
520
520
 
521
521
  Parameters
522
522
  ----------
523
- astropy_table : `astropy.Table`
523
+ astropy_table : `astropy.table.Table`
524
524
  Input astropy table.
525
525
 
526
526
  Returns
@@ -584,7 +584,7 @@ def astropy_to_pandas(astropy_table: atable.Table, index: str | None = None) ->
584
584
 
585
585
  Parameters
586
586
  ----------
587
- astropy_table : `astropy.Table`
587
+ astropy_table : `astropy.table.Table`
588
588
  Input astropy table.
589
589
  index : `str`, optional
590
590
  Name of column to set as index.
@@ -640,7 +640,7 @@ def _astropy_to_numpy_dict(astropy_table: atable.Table) -> dict[str, np.ndarray]
640
640
 
641
641
  Parameters
642
642
  ----------
643
- astropy_table : `astropy.Table`
643
+ astropy_table : `astropy.table.Table`
644
644
  Input astropy table.
645
645
 
646
646
  Returns
@@ -43,7 +43,7 @@ _V = TypeVar("_V", bound=Copyable, covariant=True)
43
43
 
44
44
 
45
45
  class NonemptyMapping(Mapping[_K, _V]):
46
- """A `Mapping` that implicitly adds values (like
46
+ """A `~collections.abc.Mapping` that implicitly adds values (like
47
47
  `~collections.defaultdict`) but treats any that evaluate to `False` as not
48
48
  present.
49
49
 
@@ -180,17 +180,20 @@ class PersistenceContextVars:
180
180
 
181
181
  Parameters
182
182
  ----------
183
- function : `Callable`
183
+ function : `collections.abc.Callable`
184
184
  A callable which is to be executed inside a specific context.
185
185
  *args : tuple
186
- Positional arguments which are to be passed to the `Callable`.
186
+ Positional arguments which are to be passed to the
187
+ `~collections.abc.Callable`.
187
188
  **kwargs : dict, optional
188
- Extra key word arguments which are to be passed to the `Callable`.
189
+ Extra key word arguments which are to be passed to the
190
+ `~collections.abc.Callable`.
189
191
 
190
192
  Returns
191
193
  -------
192
- result : `Any`
193
- The result returned by executing the supplied `Callable`.
194
+ result : `typing.Any`
195
+ The result returned by executing the supplied
196
+ `~collections.abc.Callable`.
194
197
  """
195
198
  self._ctx = copy_context()
196
199
  # Type checkers seem to have trouble with a second layer nesting of
@@ -93,7 +93,7 @@ class GeneralQueryResults(QueryResultsBase):
93
93
 
94
94
  Yields
95
95
  ------
96
- row_dict : `dict` [`str`, `Any`]
96
+ row_dict : `dict` [`str`, `typing.Any`]
97
97
  Result row as dictionary, the keys are the names of the dimensions,
98
98
  dimension fields (separated from dimension by dot) or dataset type
99
99
  fields (separated from dataset type name by dot).
@@ -245,7 +245,7 @@ class QueryDriver(AbstractContextManager[None]):
245
245
  ----------
246
246
  dimensions : `DimensionGroup`
247
247
  Dimensions of the data coordinates.
248
- rows : `Iterable` [ `tuple` ]
248
+ rows : `~collections.abc.Iterable` [ `tuple` ]
249
249
  Tuples of data coordinate values, covering just the "required"
250
250
  subset of ``dimensions``.
251
251
 
@@ -557,7 +557,7 @@ class ExpressionFactory:
557
557
  -------
558
558
  logical_and : `tree.Predicate`
559
559
  A boolean expression that evaluates to `True` only if all operands
560
- evaluate to `True.
560
+ evaluate to `True`.
561
561
  """
562
562
  return first.logical_and(*args)
563
563
 
@@ -575,7 +575,7 @@ class ExpressionFactory:
575
575
  -------
576
576
  logical_or : `tree.Predicate`
577
577
  A boolean expression that evaluates to `True` if any operand
578
- evaluates to `True.
578
+ evaluates to `True`.
579
579
  """
580
580
  return first.logical_or(*args)
581
581
 
@@ -561,7 +561,7 @@ class PolygonNode(Node):
561
561
 
562
562
  Parameters
563
563
  ----------
564
- vertices : `list`[`tuple`[`Node`, `Node`]]
564
+ vertices : `list` [`tuple` [`Node`, `Node`]]
565
565
  Node representing vertices of polygon.
566
566
  """
567
567
 
@@ -109,7 +109,7 @@ def _parseTimeString(time_str: str) -> astropy.time.Time:
109
109
  Returns
110
110
  -------
111
111
  time : `astropy.time.Time`
112
- The parsed time.
112
+ The parsed time.
113
113
 
114
114
  Raises
115
115
  ------
@@ -69,9 +69,9 @@ class _NaiveDisjointSet(Generic[_T]):
69
69
 
70
70
  Parameters
71
71
  ----------
72
- a :
72
+ a
73
73
  Element whose subset should be merged.
74
- b :
74
+ b
75
75
  Element whose subset should be merged.
76
76
 
77
77
  Returns
@@ -429,7 +429,7 @@ class ColumnOrder:
429
429
 
430
430
  Parameters
431
431
  ----------
432
- row : `Sequence` [ `DataIdValue` ]
432
+ row : `~collections.abc.Sequence` [ `DataIdValue` ]
433
433
  A row output by the SQL query associated with these columns.
434
434
  """
435
435
  return row[: len(self._dimension_keys)]
@@ -135,7 +135,7 @@ class CollectionRecordCache:
135
135
 
136
136
  Parameters
137
137
  ----------
138
- key : `Any`
138
+ key : `typing.Any`
139
139
  Collection key.
140
140
 
141
141
  Returns
@@ -54,7 +54,8 @@ class CollectionSummaryCache:
54
54
 
55
55
  Parameters
56
56
  ----------
57
- summaries : `~collections.abc.Mapping` [`Any`, `CollectionSummary`]
57
+ summaries : `~collections.abc.Mapping` [`typing.Any`, \
58
+ `CollectionSummary`]
58
59
  Summary records indexed by collection key, records must include all
59
60
  dataset types.
60
61
  """
@@ -65,15 +66,15 @@ class CollectionSummaryCache:
65
66
 
66
67
  Parameters
67
68
  ----------
68
- keys : `~collections.abc.Iterable` [`Any`]
69
+ keys : `~collections.abc.Iterable` [`typing.Any`]
69
70
  Sequence of collection keys.
70
71
 
71
72
  Returns
72
73
  -------
73
- summaries : `dict` [`Any`, `CollectionSummary`]
74
+ summaries : `dict` [`typing.Any`, `CollectionSummary`]
74
75
  Dictionary of summaries indexed by collection keys, includes
75
76
  records found in the cache.
76
- missing_keys : `set` [`Any`]
77
+ missing_keys : `set` [`typing.Any`]
77
78
  Collection keys that are not present in the cache.
78
79
  """
79
80
  found = {}
@@ -437,6 +437,10 @@ class Registry(ABC):
437
437
  Name of the type to be removed or tuple containing a list of type
438
438
  names to be removed. Wildcards are allowed.
439
439
 
440
+ Returns
441
+ -------
442
+ None
443
+
440
444
  Raises
441
445
  ------
442
446
  lsst.daf.butler.registry.OrphanedRecordError
@@ -92,7 +92,8 @@ class PostgresqlDatabase(Database):
92
92
  allow_temporary_tables: bool = True,
93
93
  ):
94
94
  with engine.connect() as connection:
95
- # `Any` to make mypy ignore the line below, can't use type: ignore
95
+ # `typing.Any` to make mypy ignore the line below, can't
96
+ # use type: ignore
96
97
  dbapi: Any = connection.connection
97
98
  try:
98
99
  dsn = dbapi.get_dsn_parameters()
@@ -155,7 +155,7 @@ class DatasetTypeCache:
155
155
  dataset_type : `DatasetType` or `None`
156
156
  Cached dataset type, `None` is returned if the name is not in the
157
157
  cache.
158
- extra : `Any` or `None`
158
+ extra : `typing.Any` or `None`
159
159
  Cached opaque data, `None` is returned if the name is not in the
160
160
  cache.
161
161
  """
@@ -822,8 +822,10 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
822
822
 
823
823
  Parameters
824
824
  ----------
825
- storage : `_DatasetREcordStorage`
826
- Struct that holds the tables and ID for a dataset type.
825
+ dimensions : `DimensionGroup`
826
+ Dimensions to validate.
827
+ tags : `sqlalchemy.schema.Table`
828
+ ???
827
829
  tmp_tags : `sqlalchemy.schema.Table`
828
830
  Temporary table with new datasets and the same schema as tags
829
831
  table.
@@ -304,13 +304,14 @@ class CollectionSummaryManager:
304
304
  dataset_type_names : `~collections.abc.Iterable` [`str`]
305
305
  Names of dataset types to include into returned summaries. If
306
306
  `None` then all dataset types will be included.
307
- dataset_type_factory : `Callable`
307
+ dataset_type_factory : `~collections.abc.Callable`
308
308
  Method that takes a table row and make `DatasetType` instance out
309
309
  of it.
310
310
 
311
311
  Returns
312
312
  -------
313
- summaries : `~collections.abc.Mapping` [`Any`, `CollectionSummary`]
313
+ summaries : `~collections.abc.Mapping` [`typing.Any`, \
314
+ `CollectionSummary`]
314
315
  Collection summaries indexed by collection record key. This mapping
315
316
  will also contain all nested non-chained collections of the chained
316
317
  collections.
@@ -378,7 +378,8 @@ class DatasetRecordStorageManager(VersionedExtension):
378
378
 
379
379
  Returns
380
380
  -------
381
- summaries : `~collections.abc.Mapping` [`Any`, `CollectionSummary`]
381
+ summaries : `~collections.abc.Mapping` [`typing.Any`, \
382
+ `CollectionSummary`]
382
383
  Collection summaries indexed by collection record key. This mapping
383
384
  will also contain all nested non-chained collections of the chained
384
385
  collections.
@@ -115,7 +115,7 @@ class ObsCoreTableManager(VersionedExtension):
115
115
  implemented with this manager.
116
116
  universe : `DimensionUniverse`
117
117
  All dimensions known to the registry.
118
- config : `dict` [ `str`, `Any` ]
118
+ config : `dict` [ `str`, `typing.Any` ]
119
119
  Configuration of the obscore manager.
120
120
  datasets : `type`
121
121
  Type of dataset manager.
@@ -256,7 +256,7 @@ class RecordFactory:
256
256
 
257
257
  Returns
258
258
  -------
259
- record : `dict` [ `str`, `Any` ] or `None`
259
+ record : `dict` [ `str`, `typing.Any` ] or `None`
260
260
  ObsCore record represented as a dictionary. `None` is returned if
261
261
  dataset does not need to be stored in the obscore table, e.g. when
262
262
  dataset type is not in obscore configuration.
@@ -72,7 +72,7 @@ class SpatialObsCorePlugin(ABC):
72
72
  name : `str`
73
73
  Arbitrary name given to this plugin (usually key in
74
74
  configuration).
75
- config : `dict` [ `str`, `Any` ]
75
+ config : `dict` [ `str`, `typing.Any` ]
76
76
  Plugin configuration dictionary.
77
77
  db : `Database`, optional
78
78
  Interface to the underlying database engine and namespace. In some
@@ -120,7 +120,7 @@ class SpatialObsCorePlugin(ABC):
120
120
 
121
121
  Returns
122
122
  -------
123
- record : `dict` [ `str`, `Any` ] or `None`
123
+ record : `dict` [ `str`, `typing.Any` ] or `None`
124
124
  Data to store in the main obscore table with column values
125
125
  corresponding to a region or `None` if there is nothing to store.
126
126
 
@@ -319,7 +319,7 @@ class DataCoordinateQueryResults(QueryResultsBase, DataCoordinateIterable):
319
319
  datasetType : `DatasetType` or `str`
320
320
  Dataset type or the name of one to search for. Must have
321
321
  dimensions that are a subset of ``self.graph``.
322
- collections : `Any`
322
+ collections : `typing.Any`
323
323
  An expression that fully or partially identifies the collections
324
324
  to search for the dataset, such as a `str`, `re.Pattern`, or
325
325
  iterable thereof. ``...`` can be used to return all collections.
@@ -368,7 +368,7 @@ class DataCoordinateQueryResults(QueryResultsBase, DataCoordinateIterable):
368
368
  datasetType : `DatasetType` or `str`
369
369
  Dataset type or the name of one to search for. Must have
370
370
  dimensions that are a subset of ``self.graph``.
371
- collections : `Any`
371
+ collections : `typing.Any`
372
372
  An expression that fully or partially identifies the collections
373
373
  to search for the dataset, such as a `str`, `re.Pattern`, or
374
374
  iterable thereof. ``...`` can be used to return all collections.
@@ -1375,7 +1375,7 @@ class SqlRegistry:
1375
1375
  records = {}
1376
1376
  else:
1377
1377
  records = dict(records)
1378
- if isinstance(dataId, DataCoordinate) and dataId.hasRecords():
1378
+ if isinstance(dataId, DataCoordinate) and dataId.hasRecords() and not kwargs:
1379
1379
  for element_name in dataId.dimensions.elements:
1380
1380
  records[element_name] = dataId.records[element_name]
1381
1381
  keys: dict[str, str | int] = dict(standardized.mapping)
@@ -97,14 +97,14 @@ class CategorizedWildcard:
97
97
  coerceUnrecognized : `~collections.abc.Callable`, optional
98
98
  A callback that takes a single argument of arbitrary type and
99
99
  returns either a `str` - appended to `strings` - or a `tuple` of
100
- (`str`, `Any`) to be appended to `items`. This will be called on
101
- objects of unrecognized type. Exceptions will be reraised as
102
- `TypeError` (and chained).
100
+ (`str`, `typing.Any`) to be appended to `items`. This will be
101
+ called on objects of unrecognized type. Exceptions will be reraised
102
+ as `TypeError` (and chained).
103
103
  coerceItemValue : `~collections.abc.Callable`, optional
104
104
  If provided, ``expression`` may be a mapping from `str` to any
105
105
  type that can be passed to this function; the result of that call
106
106
  will be stored instead as the value in ``self.items``.
107
- defaultItemValue : `Any`, optional
107
+ defaultItemValue : `typing.Any`, optional
108
108
  If provided, combine this value with any string values encountered
109
109
  (including any returned by ``coerceUnrecognized``) to form a
110
110
  `tuple` and add it to `items`, guaranteeing that `strings` will be
@@ -267,7 +267,7 @@ class CategorizedWildcard:
267
267
 
268
268
  items: list[tuple[str, Any]]
269
269
  """Two-item tuples that relate string values to other objects
270
- (`list` [ `tuple` [ `str`, `Any` ] ]).
270
+ (`list` [ `tuple` [ `str`, `typing.Any` ] ]).
271
271
  """
272
272
 
273
273
 
@@ -39,7 +39,7 @@ def get_dataset_as_python_object(
39
39
  auth : `RemoteButlerAuthenticationProvider`
40
40
  Provides authentication headers for HTTP service hosting the artifact
41
41
  files.
42
- parameters : `Mapping`[`str`, `typing.Any`]
42
+ parameters : `~collections.abc.Mapping` [`str`, `typing.Any`]
43
43
  `StorageClass` and `Formatter` parameters to be used when converting
44
44
  the artifact to a Python object.
45
45
  cache_manager : `AbstractDatastoreCacheManager` or `None`, optional
@@ -604,6 +604,7 @@ class RemoteButler(Butler): # numpydoc ignore=PR02
604
604
  *,
605
605
  transfer_dimensions: bool = False,
606
606
  dry_run: bool = False,
607
+ skip_existing: bool = False,
607
608
  ) -> None:
608
609
  # Docstring inherited.
609
610
  raise NotImplementedError()
@@ -25,6 +25,10 @@
25
25
  # You should have received a copy of the GNU General Public License
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
 
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["RemoteFileTransferSource"]
31
+
28
32
  from collections.abc import Callable, Iterable, Iterator
29
33
  from contextlib import contextmanager
30
34
  from typing import Any, cast
@@ -89,12 +89,13 @@ def _get_authentication_token_from_environment(server_url: str) -> str | None:
89
89
 
90
90
  Parameters
91
91
  ----------
92
- server_url (str): The URL of the server for which an authentication
93
- token is being retrieved.
92
+ server_url : The URL of the server for which an authentication
93
+ token is being retrieved.
94
94
 
95
95
  Returns
96
96
  -------
97
- str | None: The authentication token if available and hostname matches
97
+ str | None
98
+ The authentication token if available and hostname matches
98
99
  the whitelist; otherwise, None.
99
100
  """
100
101
  hostname = urlparse(server_url.lower()).hostname
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["pruneDatasets"]
30
+
29
31
  import itertools
30
32
  from collections.abc import Callable, Iterable
31
33
  from enum import Enum, auto
@@ -57,11 +59,11 @@ class PruneDatasetsResult:
57
59
 
58
60
  Attributes
59
61
  ----------
60
- tables
62
+ tables : `list` [`astropy.table.Table`]
61
63
  Same as in Parameters.
62
64
  state : ``PruneDatasetsResult.State``
63
65
  The current state of the action.
64
- onConfirmation : `Callable[None, None]`
66
+ onConfirmation : `~collections.abc.Callable` [`None`, `None`]`
65
67
  The function to call to perform the action if the caller wants to
66
68
  confirm the tables before performing the action.
67
69
  """
@@ -40,9 +40,9 @@ def configValidate(repo: str, quiet: bool, dataset_type: list[str], ignore: list
40
40
  URI to the location to create the repo.
41
41
  quiet : `bool`
42
42
  Do not report individual failures if True.
43
- dataset_type : `list`[`str`]
43
+ dataset_type : `list` [`str`]
44
44
  Specific DatasetTypes to validate.
45
- ignore : `list`[`str`]
45
+ ignore : `list` [`str`]
46
46
  DatasetTypes to ignore for validation.
47
47
 
48
48
  Returns
@@ -43,13 +43,13 @@ def _parseDatasetTypes(dataset_types: frozenset[str] | list[str] | None) -> list
43
43
 
44
44
  Parameters
45
45
  ----------
46
- dataset_types : `frozenset`[`str`] | `list`[`str`] | `None`
46
+ dataset_types : `frozenset` [`str`] | `list` [`str`] | `None`
47
47
  The dataset types to parse. If `None`, an empty list is returned.
48
48
  If a `frozenset` or `list` is provided, it is returned as a list.
49
49
 
50
50
  Returns
51
51
  -------
52
- dataset_types : `list`[`str`]
52
+ dataset_types : `list` [`str`]
53
53
  The parsed dataset types.
54
54
  """
55
55
  return [""] if not dataset_types else list(dataset_types)
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["removeCollections"]
30
+
29
31
  from collections.abc import Callable
30
32
  from dataclasses import dataclass
31
33
  from functools import partial
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["removeRuns"]
30
+
29
31
  from collections import defaultdict
30
32
  from collections.abc import Callable, Mapping, Sequence
31
33
  from dataclasses import dataclass
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["CliCmdTestBase"]
30
+
29
31
  import abc
30
32
  import copy
31
33
  import os
@@ -35,6 +35,8 @@ lsst.log, and only uses it if it has been setup by another package.
35
35
 
36
36
  from __future__ import annotations
37
37
 
38
+ __all__ = ["CliLogTestBase"]
39
+
38
40
  import logging
39
41
  import os
40
42
  import re
@@ -284,10 +284,15 @@ class HybridButler(Butler):
284
284
  *,
285
285
  transfer_dimensions: bool = False,
286
286
  dry_run: bool = False,
287
+ skip_existing: bool = False,
287
288
  ) -> None:
288
289
  # Docstring inherited.
289
290
  return self._direct_butler.ingest_zip(
290
- zip_file, transfer=transfer, transfer_dimensions=transfer_dimensions, dry_run=dry_run
291
+ zip_file,
292
+ transfer=transfer,
293
+ transfer_dimensions=transfer_dimensions,
294
+ dry_run=dry_run,
295
+ skip_existing=skip_existing,
291
296
  )
292
297
 
293
298
  def ingest(
@@ -44,6 +44,8 @@ in the future.
44
44
 
45
45
  from __future__ import annotations
46
46
 
47
+ __all__ = []
48
+
47
49
  import argparse
48
50
  import os.path
49
51
  from collections.abc import Callable, Iterable, Iterator
@@ -526,7 +528,7 @@ def plot_pixels(
526
528
  Pixelization that interprets ``indices``.
527
529
  wcs : `WCS`
528
530
  Tangent plane to project spherical polygons onto.
529
- indices : `Iterable` [ `int` ]
531
+ indices : `~collections.abc.Iterable` [ `int` ]
530
532
  Pixel indices to plot.
531
533
  *callbacks
532
534
  Callbacks to call for each pixel, passing the pixel index, the
@@ -555,7 +557,7 @@ def plot_hull(
555
557
  Pixelization that interprets ``indices``.
556
558
  wcs : `WCS`
557
559
  Tangent plane to project spherical polygons onto.
558
- indices : `Iterable` [ `int` ]
560
+ indices : `~collections.abc.Iterable` [ `int` ]
559
561
  Pixel indices to plot.
560
562
  *callbacks
561
563
  Callbacks to call passing the list of pixel indices, the
@@ -432,7 +432,7 @@ def mock_env(new_environment: dict[str, str]) -> Iterator[None]:
432
432
 
433
433
  Parameters
434
434
  ----------
435
- new_environment : `dict`[`str`, `str`]
435
+ new_environment : `dict` [`str`, `str`]
436
436
  New environment variable values.
437
437
  """
438
438
  with patch.dict(os.environ, new_environment, clear=True):
@@ -188,13 +188,13 @@ class TimespanDatabaseRepresentation(ABC):
188
188
  name : `str`, optional
189
189
  Name for the logical column; a part of the name for multi-column
190
190
  representations. Defaults to ``cls.NAME``.
191
- result : `dict` [ `str`, `Any` ], optional
191
+ result : `dict` [ `str`, `typing.Any` ], optional
192
192
  A dictionary representing a database row that fields should be
193
193
  added to, or `None` to create and return a new one.
194
194
 
195
195
  Returns
196
196
  -------
197
- result : `dict` [ `str`, `Any` ]
197
+ result : `dict` [ `str`, `typing.Any` ]
198
198
  A dictionary containing this representation of a timespan. Exactly
199
199
  the `dict` passed as ``result`` if that is not `None`.
200
200
  """
@@ -207,7 +207,7 @@ class TimespanDatabaseRepresentation(ABC):
207
207
 
208
208
  Parameters
209
209
  ----------
210
- mapping : `~collections.abc.Mapping` [ `Any`, `Any` ]
210
+ mapping : `~collections.abc.Mapping` [ `typing.Any`, `typing.Any` ]
211
211
  A dictionary representing a database row containing a `Timespan`
212
212
  in this representation. Should have key(s) equal to the return
213
213
  value of `getFieldNames`.
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "30.0.0rc3"
2
+ __version__ = "30.0.1"
@@ -1,10 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-daf-butler
3
- Version: 30.0.0rc3
3
+ Version: 30.0.1
4
4
  Summary: An abstraction layer for reading and writing astronomical data to datastores.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License-Expression: BSD-3-Clause OR GPL-3.0-or-later
7
7
  Project-URL: Homepage, https://github.com/lsst/daf_butler
8
+ Project-URL: Source, https://github.com/lsst/daf_butler
8
9
  Keywords: lsst
9
10
  Classifier: Intended Audience :: Science/Research
10
11
  Classifier: Operating System :: OS Independent
@@ -42,7 +43,7 @@ Provides-Extra: test
42
43
  Requires-Dist: pytest>=3.2; extra == "test"
43
44
  Requires-Dist: numpy>=1.17; extra == "test"
44
45
  Requires-Dist: matplotlib>=3.0.3; extra == "test"
45
- Requires-Dist: pandas>=1.0; extra == "test"
46
+ Requires-Dist: pandas<3.0,>=1.0; extra == "test"
46
47
  Provides-Extra: s3
47
48
  Requires-Dist: lsst-resources[s3]; extra == "s3"
48
49
  Provides-Extra: https