lsst-daf-butler 30.0.0rc2__py3-none-any.whl → 30.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. lsst/daf/butler/_butler.py +27 -8
  2. lsst/daf/butler/_butler_collections.py +4 -4
  3. lsst/daf/butler/_butler_metrics.py +51 -2
  4. lsst/daf/butler/_dataset_provenance.py +1 -1
  5. lsst/daf/butler/_dataset_ref.py +1 -1
  6. lsst/daf/butler/_exceptions.py +2 -2
  7. lsst/daf/butler/_file_dataset.py +2 -1
  8. lsst/daf/butler/_formatter.py +14 -7
  9. lsst/daf/butler/_labeled_butler_factory.py +28 -8
  10. lsst/daf/butler/_query_all_datasets.py +2 -0
  11. lsst/daf/butler/_rubin/temporary_for_ingest.py +207 -0
  12. lsst/daf/butler/cli/cmd/_remove_runs.py +1 -12
  13. lsst/daf/butler/column_spec.py +4 -4
  14. lsst/daf/butler/configs/datastores/formatters.yaml +1 -0
  15. lsst/daf/butler/configs/storageClasses.yaml +15 -0
  16. lsst/daf/butler/datastore/_datastore.py +21 -1
  17. lsst/daf/butler/datastore/record_data.py +1 -1
  18. lsst/daf/butler/datastore/stored_file_info.py +2 -2
  19. lsst/daf/butler/datastores/chainedDatastore.py +4 -0
  20. lsst/daf/butler/datastores/fileDatastore.py +26 -13
  21. lsst/daf/butler/datastores/file_datastore/get.py +4 -4
  22. lsst/daf/butler/datastores/file_datastore/retrieve_artifacts.py +5 -1
  23. lsst/daf/butler/datastores/file_datastore/transfer.py +2 -2
  24. lsst/daf/butler/datastores/inMemoryDatastore.py +8 -0
  25. lsst/daf/butler/ddl.py +2 -2
  26. lsst/daf/butler/dimensions/_coordinate.py +11 -8
  27. lsst/daf/butler/dimensions/_record_set.py +1 -1
  28. lsst/daf/butler/dimensions/_records.py +9 -3
  29. lsst/daf/butler/direct_butler/_direct_butler.py +85 -51
  30. lsst/daf/butler/direct_query_driver/_driver.py +5 -4
  31. lsst/daf/butler/direct_query_driver/_result_page_converter.py +1 -1
  32. lsst/daf/butler/formatters/parquet.py +6 -6
  33. lsst/daf/butler/logging.py +9 -3
  34. lsst/daf/butler/nonempty_mapping.py +1 -1
  35. lsst/daf/butler/persistence_context.py +8 -5
  36. lsst/daf/butler/queries/_general_query_results.py +1 -1
  37. lsst/daf/butler/queries/driver.py +1 -1
  38. lsst/daf/butler/queries/expression_factory.py +2 -2
  39. lsst/daf/butler/queries/expressions/parser/exprTree.py +1 -1
  40. lsst/daf/butler/queries/expressions/parser/parserYacc.py +1 -1
  41. lsst/daf/butler/queries/overlaps.py +2 -2
  42. lsst/daf/butler/queries/tree/_column_set.py +1 -1
  43. lsst/daf/butler/registry/_collection_record_cache.py +1 -1
  44. lsst/daf/butler/registry/_collection_summary_cache.py +5 -4
  45. lsst/daf/butler/registry/_registry.py +4 -0
  46. lsst/daf/butler/registry/bridge/monolithic.py +17 -13
  47. lsst/daf/butler/registry/databases/postgresql.py +2 -1
  48. lsst/daf/butler/registry/datasets/byDimensions/_dataset_type_cache.py +1 -1
  49. lsst/daf/butler/registry/datasets/byDimensions/_manager.py +53 -47
  50. lsst/daf/butler/registry/datasets/byDimensions/summaries.py +3 -2
  51. lsst/daf/butler/registry/expand_data_ids.py +93 -0
  52. lsst/daf/butler/registry/interfaces/_database.py +6 -1
  53. lsst/daf/butler/registry/interfaces/_datasets.py +2 -1
  54. lsst/daf/butler/registry/interfaces/_obscore.py +1 -1
  55. lsst/daf/butler/registry/obscore/_records.py +1 -1
  56. lsst/daf/butler/registry/obscore/_spatial.py +2 -2
  57. lsst/daf/butler/registry/queries/_results.py +2 -2
  58. lsst/daf/butler/registry/sql_registry.py +3 -25
  59. lsst/daf/butler/registry/wildcards.py +5 -5
  60. lsst/daf/butler/remote_butler/_get.py +1 -1
  61. lsst/daf/butler/remote_butler/_remote_butler.py +6 -1
  62. lsst/daf/butler/remote_butler/_remote_file_transfer_source.py +4 -0
  63. lsst/daf/butler/remote_butler/authentication/cadc.py +4 -3
  64. lsst/daf/butler/script/_pruneDatasets.py +4 -2
  65. lsst/daf/butler/script/configValidate.py +2 -2
  66. lsst/daf/butler/script/queryCollections.py +2 -2
  67. lsst/daf/butler/script/removeCollections.py +2 -0
  68. lsst/daf/butler/script/removeRuns.py +2 -0
  69. lsst/daf/butler/tests/cliCmdTestBase.py +2 -0
  70. lsst/daf/butler/tests/cliLogTestBase.py +2 -0
  71. lsst/daf/butler/tests/hybrid_butler.py +10 -2
  72. lsst/daf/butler/tests/registry_data/lsstcam-subset.yaml +191 -0
  73. lsst/daf/butler/tests/registry_data/spatial.py +4 -2
  74. lsst/daf/butler/tests/testFormatters.py +2 -2
  75. lsst/daf/butler/tests/utils.py +1 -1
  76. lsst/daf/butler/timespan_database_representation.py +3 -3
  77. lsst/daf/butler/transfers/_context.py +7 -6
  78. lsst/daf/butler/version.py +1 -1
  79. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/METADATA +3 -2
  80. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/RECORD +88 -85
  81. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/WHEEL +1 -1
  82. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/entry_points.txt +0 -0
  83. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/COPYRIGHT +0 -0
  84. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/LICENSE +0 -0
  85. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/bsd_license.txt +0 -0
  86. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/licenses/gpl-v3.0.txt +0 -0
  87. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/top_level.txt +0 -0
  88. {lsst_daf_butler-30.0.0rc2.dist-info → lsst_daf_butler-30.0.1.dist-info}/zip-safe +0 -0
@@ -319,7 +319,7 @@ class DataCoordinateQueryResults(QueryResultsBase, DataCoordinateIterable):
319
319
  datasetType : `DatasetType` or `str`
320
320
  Dataset type or the name of one to search for. Must have
321
321
  dimensions that are a subset of ``self.graph``.
322
- collections : `Any`
322
+ collections : `typing.Any`
323
323
  An expression that fully or partially identifies the collections
324
324
  to search for the dataset, such as a `str`, `re.Pattern`, or
325
325
  iterable thereof. ``...`` can be used to return all collections.
@@ -368,7 +368,7 @@ class DataCoordinateQueryResults(QueryResultsBase, DataCoordinateIterable):
368
368
  datasetType : `DatasetType` or `str`
369
369
  Dataset type or the name of one to search for. Must have
370
370
  dimensions that are a subset of ``self.graph``.
371
- collections : `Any`
371
+ collections : `typing.Any`
372
372
  An expression that fully or partially identifies the collections
373
373
  to search for the dataset, such as a `str`, `re.Pattern`, or
374
374
  iterable thereof. ``...`` can be used to return all collections.
@@ -34,7 +34,6 @@ __all__ = ("SqlRegistry",)
34
34
  import contextlib
35
35
  import logging
36
36
  import warnings
37
- from collections import defaultdict
38
37
  from collections.abc import Iterable, Iterator, Mapping, Sequence
39
38
  from typing import TYPE_CHECKING, Any
40
39
 
@@ -54,7 +53,6 @@ from ..dimensions import (
54
53
  DataCoordinate,
55
54
  DataId,
56
55
  DimensionConfig,
57
- DimensionDataAttacher,
58
56
  DimensionElement,
59
57
  DimensionGroup,
60
58
  DimensionRecord,
@@ -78,6 +76,7 @@ from ..registry.interfaces import ChainedCollectionRecord, ReadOnlyDatabaseError
78
76
  from ..registry.managers import RegistryManagerInstances, RegistryManagerTypes
79
77
  from ..registry.wildcards import CollectionWildcard, DatasetTypeWildcard
80
78
  from ..utils import transactional
79
+ from .expand_data_ids import expand_data_ids
81
80
 
82
81
  if TYPE_CHECKING:
83
82
  from .._butler_config import ButlerConfig
@@ -1376,7 +1375,7 @@ class SqlRegistry:
1376
1375
  records = {}
1377
1376
  else:
1378
1377
  records = dict(records)
1379
- if isinstance(dataId, DataCoordinate) and dataId.hasRecords():
1378
+ if isinstance(dataId, DataCoordinate) and dataId.hasRecords() and not kwargs:
1380
1379
  for element_name in dataId.dimensions.elements:
1381
1380
  records[element_name] = dataId.records[element_name]
1382
1381
  keys: dict[str, str | int] = dict(standardized.mapping)
@@ -1415,28 +1414,7 @@ class SqlRegistry:
1415
1414
  return DataCoordinate.standardize(keys, dimensions=standardized.dimensions).expanded(records=records)
1416
1415
 
1417
1416
  def expand_data_ids(self, data_ids: Iterable[DataCoordinate]) -> list[DataCoordinate]:
1418
- output = list(data_ids)
1419
-
1420
- grouped_by_dimensions: defaultdict[DimensionGroup, list[int]] = defaultdict(list)
1421
- for i, data_id in enumerate(data_ids):
1422
- if not data_id.hasRecords():
1423
- grouped_by_dimensions[data_id.dimensions].append(i)
1424
-
1425
- if not grouped_by_dimensions:
1426
- # All given DataCoordinate values are already expanded.
1427
- return output
1428
-
1429
- attacher = DimensionDataAttacher(
1430
- cache=self.dimension_record_cache,
1431
- dimensions=DimensionGroup.union(*grouped_by_dimensions.keys(), universe=self.dimensions),
1432
- )
1433
- with self._query() as query:
1434
- for dimensions, indexes in grouped_by_dimensions.items():
1435
- expanded = attacher.attach(dimensions, (output[index] for index in indexes), query)
1436
- for index, data_id in zip(indexes, expanded):
1437
- output[index] = data_id
1438
-
1439
- return output
1417
+ return expand_data_ids(data_ids, self.dimensions, self._query, self.dimension_record_cache)
1440
1418
 
1441
1419
  def expand_refs(self, dataset_refs: list[DatasetRef]) -> list[DatasetRef]:
1442
1420
  expanded_ids = self.expand_data_ids([ref.dataId for ref in dataset_refs])
@@ -97,14 +97,14 @@ class CategorizedWildcard:
97
97
  coerceUnrecognized : `~collections.abc.Callable`, optional
98
98
  A callback that takes a single argument of arbitrary type and
99
99
  returns either a `str` - appended to `strings` - or a `tuple` of
100
- (`str`, `Any`) to be appended to `items`. This will be called on
101
- objects of unrecognized type. Exceptions will be reraised as
102
- `TypeError` (and chained).
100
+ (`str`, `typing.Any`) to be appended to `items`. This will be
101
+ called on objects of unrecognized type. Exceptions will be reraised
102
+ as `TypeError` (and chained).
103
103
  coerceItemValue : `~collections.abc.Callable`, optional
104
104
  If provided, ``expression`` may be a mapping from `str` to any
105
105
  type that can be passed to this function; the result of that call
106
106
  will be stored instead as the value in ``self.items``.
107
- defaultItemValue : `Any`, optional
107
+ defaultItemValue : `typing.Any`, optional
108
108
  If provided, combine this value with any string values encountered
109
109
  (including any returned by ``coerceUnrecognized``) to form a
110
110
  `tuple` and add it to `items`, guaranteeing that `strings` will be
@@ -267,7 +267,7 @@ class CategorizedWildcard:
267
267
 
268
268
  items: list[tuple[str, Any]]
269
269
  """Two-item tuples that relate string values to other objects
270
- (`list` [ `tuple` [ `str`, `Any` ] ]).
270
+ (`list` [ `tuple` [ `str`, `typing.Any` ] ]).
271
271
  """
272
272
 
273
273
 
@@ -39,7 +39,7 @@ def get_dataset_as_python_object(
39
39
  auth : `RemoteButlerAuthenticationProvider`
40
40
  Provides authentication headers for HTTP service hosting the artifact
41
41
  files.
42
- parameters : `Mapping`[`str`, `typing.Any`]
42
+ parameters : `~collections.abc.Mapping` [`str`, `typing.Any`]
43
43
  `StorageClass` and `Formatter` parameters to be used when converting
44
44
  the artifact to a Python object.
45
45
  cache_manager : `AbstractDatastoreCacheManager` or `None`, optional
@@ -65,6 +65,7 @@ from ..dimensions import DataCoordinate, DataIdValue, DimensionConfig, Dimension
65
65
  from ..queries import Query
66
66
  from ..queries.tree import make_column_literal
67
67
  from ..registry import CollectionArgType, NoDefaultCollectionError, Registry, RegistryDefaults
68
+ from ..registry.expand_data_ids import expand_data_ids
68
69
  from ._collection_args import convert_collection_arg_to_glob_string_list
69
70
  from ._defaults import DefaultsHolder
70
71
  from ._get import convert_http_url_to_resource_path, get_dataset_as_python_object
@@ -603,6 +604,7 @@ class RemoteButler(Butler): # numpydoc ignore=PR02
603
604
  *,
604
605
  transfer_dimensions: bool = False,
605
606
  dry_run: bool = False,
607
+ skip_existing: bool = False,
606
608
  ) -> None:
607
609
  # Docstring inherited.
608
610
  raise NotImplementedError()
@@ -633,7 +635,7 @@ class RemoteButler(Butler): # numpydoc ignore=PR02
633
635
  raise NotImplementedError()
634
636
 
635
637
  def transfer_dimension_records_from(
636
- self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef]
638
+ self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef | DataCoordinate]
637
639
  ) -> None:
638
640
  # Docstring inherited.
639
641
  raise NotImplementedError()
@@ -738,6 +740,9 @@ class RemoteButler(Butler): # numpydoc ignore=PR02
738
740
  def close(self) -> None:
739
741
  pass
740
742
 
743
+ def _expand_data_ids(self, data_ids: Iterable[DataCoordinate]) -> list[DataCoordinate]:
744
+ return expand_data_ids(data_ids, self.dimensions, self.query, None)
745
+
741
746
  @property
742
747
  def _file_transfer_source(self) -> RemoteFileTransferSource:
743
748
  return RemoteFileTransferSource(self._connection)
@@ -25,6 +25,10 @@
25
25
  # You should have received a copy of the GNU General Public License
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
 
28
+ from __future__ import annotations
29
+
30
+ __all__ = ["RemoteFileTransferSource"]
31
+
28
32
  from collections.abc import Callable, Iterable, Iterator
29
33
  from contextlib import contextmanager
30
34
  from typing import Any, cast
@@ -89,12 +89,13 @@ def _get_authentication_token_from_environment(server_url: str) -> str | None:
89
89
 
90
90
  Parameters
91
91
  ----------
92
- server_url (str): The URL of the server for which an authentication
93
- token is being retrieved.
92
+ server_url : The URL of the server for which an authentication
93
+ token is being retrieved.
94
94
 
95
95
  Returns
96
96
  -------
97
- str | None: The authentication token if available and hostname matches
97
+ str | None
98
+ The authentication token if available and hostname matches
98
99
  the whitelist; otherwise, None.
99
100
  """
100
101
  hostname = urlparse(server_url.lower()).hostname
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["pruneDatasets"]
30
+
29
31
  import itertools
30
32
  from collections.abc import Callable, Iterable
31
33
  from enum import Enum, auto
@@ -57,11 +59,11 @@ class PruneDatasetsResult:
57
59
 
58
60
  Attributes
59
61
  ----------
60
- tables
62
+ tables : `list` [`astropy.table.Table`]
61
63
  Same as in Parameters.
62
64
  state : ``PruneDatasetsResult.State``
63
65
  The current state of the action.
64
- onConfirmation : `Callable[None, None]`
66
+ onConfirmation : `~collections.abc.Callable` [`None`, `None`]`
65
67
  The function to call to perform the action if the caller wants to
66
68
  confirm the tables before performing the action.
67
69
  """
@@ -40,9 +40,9 @@ def configValidate(repo: str, quiet: bool, dataset_type: list[str], ignore: list
40
40
  URI to the location to create the repo.
41
41
  quiet : `bool`
42
42
  Do not report individual failures if True.
43
- dataset_type : `list`[`str`]
43
+ dataset_type : `list` [`str`]
44
44
  Specific DatasetTypes to validate.
45
- ignore : `list`[`str`]
45
+ ignore : `list` [`str`]
46
46
  DatasetTypes to ignore for validation.
47
47
 
48
48
  Returns
@@ -43,13 +43,13 @@ def _parseDatasetTypes(dataset_types: frozenset[str] | list[str] | None) -> list
43
43
 
44
44
  Parameters
45
45
  ----------
46
- dataset_types : `frozenset`[`str`] | `list`[`str`] | `None`
46
+ dataset_types : `frozenset` [`str`] | `list` [`str`] | `None`
47
47
  The dataset types to parse. If `None`, an empty list is returned.
48
48
  If a `frozenset` or `list` is provided, it is returned as a list.
49
49
 
50
50
  Returns
51
51
  -------
52
- dataset_types : `list`[`str`]
52
+ dataset_types : `list` [`str`]
53
53
  The parsed dataset types.
54
54
  """
55
55
  return [""] if not dataset_types else list(dataset_types)
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["removeCollections"]
30
+
29
31
  from collections.abc import Callable
30
32
  from dataclasses import dataclass
31
33
  from functools import partial
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["removeRuns"]
30
+
29
31
  from collections import defaultdict
30
32
  from collections.abc import Callable, Mapping, Sequence
31
33
  from dataclasses import dataclass
@@ -26,6 +26,8 @@
26
26
  # along with this program. If not, see <http://www.gnu.org/licenses/>.
27
27
  from __future__ import annotations
28
28
 
29
+ __all__ = ["CliCmdTestBase"]
30
+
29
31
  import abc
30
32
  import copy
31
33
  import os
@@ -35,6 +35,8 @@ lsst.log, and only uses it if it has been setup by another package.
35
35
 
36
36
  from __future__ import annotations
37
37
 
38
+ __all__ = ["CliLogTestBase"]
39
+
38
40
  import logging
39
41
  import os
40
42
  import re
@@ -284,10 +284,15 @@ class HybridButler(Butler):
284
284
  *,
285
285
  transfer_dimensions: bool = False,
286
286
  dry_run: bool = False,
287
+ skip_existing: bool = False,
287
288
  ) -> None:
288
289
  # Docstring inherited.
289
290
  return self._direct_butler.ingest_zip(
290
- zip_file, transfer=transfer, transfer_dimensions=transfer_dimensions, dry_run=dry_run
291
+ zip_file,
292
+ transfer=transfer,
293
+ transfer_dimensions=transfer_dimensions,
294
+ dry_run=dry_run,
295
+ skip_existing=skip_existing,
291
296
  )
292
297
 
293
298
  def ingest(
@@ -338,7 +343,7 @@ class HybridButler(Butler):
338
343
  )
339
344
 
340
345
  def transfer_dimension_records_from(
341
- self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef]
346
+ self, source_butler: LimitedButler | Butler, source_refs: Iterable[DatasetRef | DataCoordinate]
342
347
  ) -> None:
343
348
  return self._direct_butler.transfer_dimension_records_from(source_butler, source_refs)
344
349
 
@@ -425,6 +430,9 @@ class HybridButler(Butler):
425
430
  source_butler, data_ids, allowed_elements
426
431
  )
427
432
 
433
+ def _expand_data_ids(self, data_ids: Iterable[DataCoordinate]) -> list[DataCoordinate]:
434
+ return self._remote_butler._expand_data_ids(data_ids)
435
+
428
436
  @property
429
437
  def collection_chains(self) -> ButlerCollections:
430
438
  return HybridButlerCollections(self)
@@ -0,0 +1,191 @@
1
+ description: Butler Data Repository Export
2
+ version: 1.0.2
3
+ universe_version: 7
4
+ universe_namespace: daf_butler
5
+ data:
6
+ - type: dimension
7
+ element: instrument
8
+ records:
9
+ - name: LSSTCam
10
+ visit_max: 6050123199999
11
+ visit_system: 2
12
+ exposure_max: 6050123199999
13
+ detector_max: 1000
14
+ class_name: lsst.obs.lsst.LsstCam
15
+ - type: dimension
16
+ element: day_obs
17
+ records:
18
+ - instrument: LSSTCam
19
+ id: 20251202
20
+ datetime_begin: !butler_time/tai/iso '2025-12-02 12:00:00.000000000'
21
+ datetime_end: !butler_time/tai/iso '2025-12-03 12:00:00.000000000'
22
+ - type: dimension
23
+ element: detector
24
+ records:
25
+ - instrument: LSSTCam
26
+ id: 10
27
+ full_name: R02_S01
28
+ name_in_raft: S01
29
+ raft: R02
30
+ purpose: SCIENCE
31
+ - instrument: LSSTCam
32
+ id: 11
33
+ full_name: R02_S02
34
+ name_in_raft: S02
35
+ raft: R02
36
+ purpose: SCIENCE
37
+ - type: dimension
38
+ element: group
39
+ records:
40
+ - instrument: LSSTCam
41
+ name: '2025-12-03T07:58:10.858'
42
+ - instrument: LSSTCam
43
+ name: '2025-12-03T07:58:25.583'
44
+ - type: dimension
45
+ element: physical_filter
46
+ records:
47
+ - instrument: LSSTCam
48
+ name: z_20
49
+ band: z
50
+ - type: dimension
51
+ element: visit_system
52
+ records:
53
+ - instrument: LSSTCam
54
+ id: 0
55
+ name: one-to-one
56
+ - instrument: LSSTCam
57
+ id: 2
58
+ name: by-seq-start-end
59
+ - type: dimension
60
+ element: exposure
61
+ records:
62
+ - instrument: LSSTCam
63
+ id: 2025120200439
64
+ day_obs: 20251202
65
+ group: '2025-12-03T07:58:10.858'
66
+ physical_filter: z_20
67
+ obs_id: MC_O_20251202_000439
68
+ exposure_time: 30.0
69
+ dark_time: 30.9296
70
+ observation_type: science
71
+ observation_reason: singles_z
72
+ seq_num: 439
73
+ seq_start: 439
74
+ seq_end: 439
75
+ target_name: lowdust
76
+ science_program: BLOCK-407
77
+ tracking_ra: 104.55395819494994
78
+ tracking_dec: -42.69572623114717
79
+ sky_angle: 314.98128613456913
80
+ azimuth: 222.066471281557
81
+ zenith_angle: 18.114896072334602
82
+ has_simulated: false
83
+ can_see_sky: true
84
+ datetime_begin: !butler_time/tai/iso '2025-12-03 07:59:07.425363958'
85
+ datetime_end: !butler_time/tai/iso '2025-12-03 07:59:38.355000000'
86
+ - instrument: LSSTCam
87
+ id: 2025120200440
88
+ day_obs: 20251202
89
+ group: '2025-12-03T07:58:25.583'
90
+ physical_filter: z_20
91
+ obs_id: MC_O_20251202_000440
92
+ exposure_time: 30.0
93
+ dark_time: 30.9303
94
+ observation_type: science
95
+ observation_reason: singles_z
96
+ seq_num: 440
97
+ seq_start: 440
98
+ seq_end: 440
99
+ target_name: dusty_plane
100
+ science_program: BLOCK-407
101
+ tracking_ra: 125.6049150178604
102
+ tracking_dec: -49.58422076567679
103
+ sky_angle: 250.7803018484177
104
+ azimuth: 171.977440002199
105
+ zenith_angle: 19.626699659649404
106
+ has_simulated: false
107
+ can_see_sky: true
108
+ datetime_begin: !butler_time/tai/iso '2025-12-03 08:00:33.880859613'
109
+ datetime_end: !butler_time/tai/iso '2025-12-03 08:01:04.811000000'
110
+ - type: dimension
111
+ element: visit
112
+ records:
113
+ - instrument: LSSTCam
114
+ id: 2025120200439
115
+ day_obs: 20251202
116
+ physical_filter: z_20
117
+ name: MC_O_20251202_000439
118
+ seq_num: 439
119
+ exposure_time: 30.0
120
+ target_name: lowdust
121
+ observation_reason: singles_z
122
+ science_program: BLOCK-407
123
+ azimuth: 222.066471281557
124
+ zenith_angle: 18.114896072334602
125
+ region: !<lsst.sphgeom.ConvexPolygon>
126
+ encoded: 709f2616dbe950cbbfd1695e3d4c3fe63fec1e82d2d2f6e5bfd2b5f450bb59cbbfc947227f0f40e63f0132ed675df5e5bf0e2bc2a2ffc4cbbf810ce8d04b49e63f79c1356b8ee3e5bf55733b8d76efcbbf55bb2b0eff65e63f00319fe5c9c2e5bf5293160b3013ccbfe00ef1748288e63f20e6025c269ce5bf9472e2dae725ccbfd6bf6c4ecca6e63fd893dbc5dc7ae5bf059a6d18e7dacbbffbc59646b1bce63f3cd6c18ed069e5bfe3682cba03d6cbbf7f2a664f1dbee63f4cdc89a5b368e5bf8f31700ad460cabfad34b2ed0629e73fce2af391f212e5bfb93570b2ea14cabfbb3cbfc3553ee73f65cda55c5d01e5bfaa95285be60fcabf44c68b02bd3fe73f725765643300e5bfd596a360e4c8c9bf1aa7c5df8853e73fe49dfa9eb3efe4bfc3bf8f38dec3c9bfdcc9f542ee54e73f8ad7cc5188eee4bf52449e14bc77c9bffd8e0bf9026ae73fc28a1e74c9dce4bfaa653f44f1cac8bfb4471a597377e73f23f99718b2dae4bfcfca446b61ffc7bf205883b3a484e73ffac60081b7dae4bf1b7f26ebfc4fc7bf107e39bbbd8de73fe8b6a209e2dce4bf8c96310e80e4c6bf3dfb007d9184e73fe8547f02a4eee4bf2d91e593aadbc6bfb878a25fcf83e73f25065d8018f0e4bff8203259e278c6bf2a924e25497be73f16ae16315200e5bfc705f6580a70c6bfc58bd9bc847ae73fd6abe006c501e5bf98864073840cc6bfc66970b9d671e73ff758c57c0612e5bfb05a4f834d04c6bf5070965b1e71e73f2c52e1025d13e5bf3c02ecaab0a0c5bf453834035768e73f2b811be68a23e5bf50d30993d497c5bfb61542168e67e73f0191603efa24e5bff63e7ed0c734c5bf40444235bc5ee73fbf4b6f7ef934e5bff0a3512aea2bc5bf6968a312f15de73ffaf8b3046736e5bfa540b9392ac8c4bf54da11e0f754e73f2b2d1ad76b46e5bfef445af3eebfc4bfd51888583954e73fbfbdad49bd47e5bf8d81e404235cc4bf3722beca274be73fbcb56ac4ac57e5bfd8746d3b4353c4bfb0fd2f50584ae73f1edb5a7c1659e5bf1f7d83b312f0c3bf76c248b33d41e73f09c34601d668e5bf76ddad5832e7c3bf98b9361a6c40e73f66e74ac03d6ae5bf126d08c51c7bc3bf4bc9844c6636e73fe7fcf5ad4c7be5bff79a2522cb50c3bf1912e0baed19e73fd89d11c4479ce5bf796b0bce112dc3bf31834f7c6af7e63f23360174ebc2e5bf9da79734d019c3bfac3b3634e0d8e63fae06ac0707e4e5bf37d035bea165c3bfae03c050c4c3e63f0e8ce6e9d2f5e5bfdbb43449a36ac3bf739a64845ec2e63fcc7ed4f5fef6e5bfa68433606db1c3bfd37828ad8daee63f7cfdfbc18707e6bfd8bafc6c6eb6c3bfcaeb562326ade63f829a4d42b208e6bfe7c6c2eca2fdc3bfc929ecac1c99e63fe8f5377f3f19e6bfeb14be8f3002c4bf258586cfd397e63fabc8abcc4d1ae6bf523822005a49c4bf5c9555a7b283e63fb7ed541bc42ae6bf246f067e594ec4bf311eacc64782e63fc3065e60eb2be6bf461132970295c4bf33eda92b306ee63f932f23b12f3ce6bffcaf3307019ac4bfa81b84b4c36ce63feb3d5e49553de6bf71e1f9600ce1c4bf4a9842387558e63f8e794ec69b4de6bf6ecaeb2197e5c4bf12c2b7022857e63fbbc9727ea54ee6bf4e987c9b8f2cc5bfe98f9e0dc442e63f45eb8440d35ee6bf893587628b31c5bf71a0d58f5441e63f9fe5875cf55fe6bff1b4af1efc77c5bf289d5cd6fc2ce63f66f284d0ef6fe6bf9e46544df67cc5bfbff75aeb8b2be63f0e0af51f1071e6bf5d01f29e47c8c5bf120a4e20ae15e63f9b669d500f82e6bfa7a8216b5075c6bfba85f16f7908e63fb5d518915f84e6bf0134dd6de040c7bff039bd3d48fbe53f4a55034f5a84e6bf29eb578f1ff0c7bfdfddfc60f4f1e53ff1ba880ff981e6bf12b7fda96d5cc8bf857c661beafbe53ff688b825f770e6bfbbd9e9085265c8bf5ecd1890bafce53fada16b53906fe6bfb05d8e01a9c8c8bf3e19603eca05e63fdcbb5fa8d95fe6bfcf3f55a88bd1c8bf266160e89806e63f93764a99705ee6bf495d8b527135c9bf3e0c8624a30fe63fa0078f0b874ee6bf5a6c6d30ae3dc9bf03ca26386110e63fb9eed4f6354de6bfaec07aff7aa1c9bfa3fd86c15619e63fbc3e491b343de6bf49d4893759aac9bf8ef5a0ba211ae63f8c02a4b6c63be6bfc27a35f8650dcabf3ee6bf9bf322e63f794f9576c72be6bf044adf7d4116cabfc67e3cb2bc23e63ffccfb0fc572ae6bf6ca90674d179cabfd3c60eb4872ce63f3a2c9c22271ae6bf9be124cc0682cabf05f2d685402de63f9dbca63ed018e6bfe80d50f272e5cabfe14523dbf535e63ff795c0db8808e6bf4ce6e51448eecabfaac32b14bb36e63f7fb91b5d1507e6bf
127
+ datetime_begin: !butler_time/tai/iso '2025-12-03 07:59:07.425363958'
128
+ datetime_end: !butler_time/tai/iso '2025-12-03 07:59:38.355000000'
129
+ - instrument: LSSTCam
130
+ id: 2025120200440
131
+ day_obs: 20251202
132
+ physical_filter: z_20
133
+ name: MC_O_20251202_000440
134
+ seq_num: 440
135
+ exposure_time: 30.0
136
+ target_name: dusty_plane
137
+ observation_reason: singles_z
138
+ science_program: BLOCK-407
139
+ azimuth: 171.977440002199
140
+ zenith_angle: 19.626699659649404
141
+ region: !<lsst.sphgeom.ConvexPolygon>
142
+ encoded: 70e7009720b815dabfc5f19be05fdee03f0374b5cc65dce7bf848c54708316dabf6a50767e7de0e03f4e321a2dafdae7bf770d59cb1c20dabf5fa1dcbc34fae03f79f51125c6c5e7bf918adb0e9ef4d9bf22cdc451b71ee13f04bb62207cb7e7bf75a90e30e4bbd9bf01f6718e6d48e13f59f4b8ebaaa8e7bf77139cd67486d9bfda7ca3cf3a6be13f45adb02e9b9de7bf9bf1e32fb349d9bf2fd044bb2477e13f174697a624a5e7bf51667792bd45d9bfc5230595ea77e13f91e0f46aa1a5e7bfae0420495617d8bf7b5c2fb3beb1e13f55ebed4c70c9e7bfd357b434e2d9d7bff11617e52dbde13fa62d199266d0e7bfe61dae89d2d5d7bf63f6676deebde13f12f5675fdbd0e7bf823d9b54589cd7bf829a6e3087c8e13fc498877443d7e7bfb8bdd04d4798d7bf8ec0c54246c9e13fd311bd92b6d7e7bf3f7d34bca85ad7bf4ed448c087d4e13f0796b12978dee7bf385b6bf38013d7bfca7dab7b42c9e13f8ad4b73520f8e7bf6b3751165ec4d6bf4922fe1c42b9e13ff508713ed216e8bf58256fe33a84d6bffc1eb55e20a9e13f74974b82ae31e8bf8efcaec0737ad6bf4b21ecca718fe13fda1767e39c46e8bfd99e4a30a579d6bfcb1cb1ca548de13f93ba47e45348e8bfae71c0a09670d6bfb6050655ac75e13fa84e46b0725be8bf5e705845c66fd6bfcfed627e8d73e13f3ef030a3275de8bfae239bad9466d6bfa51cdfb2a95be13fcc8c68714e70e8bf4c44f6afd165d6bfdd4fe054b059e13fe29357f0e171e8bfd6269f5d8c5cd6bf0695e92cb941e13f2a63cb8bf084e8bf2664977db85bd6bf43e75ce2963fe13f1837032ea186e8bf1a232b076f52d6bfe4361e9ab427e13fe0d841b87799e8bf0dcd2d719951d6bfda19cab49025e13fdac9b21b269be8bff62870872d48d6bfa5345059750de13fe5fddf0f02aee8bf093aabc46547d6bfc5c7a881770be13f7fa9cd4b8fafe8bf5cfe4d27e73dd6bf6e620f6e4bf3e03f10a4c5f750c2e8bf14af9a3b0e3dd6bfe276788d24f1e03f8e3d5aabfac3e8bfc0bc9bf08c33d6bfcdffe15810d9e03f9257d6b882d6e8bf579fd368b232d6bffcc5961ae8d6e03fcf2641ff29d8e8bf90c89a3c4328d6bf275a748f9bbce03fb4c634643bece8bfae79141af653d6bf37bf50a54198e03fbfc59102c6fae8bf38dd8b23b08cd6bfa5676e868b6ee03f447143629709e9bf2c0dc12bd6c1d6bf9e40c6b6814be03fa7df62af7514e9bfcc24ad2e6dffd6bfed0cb3c92c40e03ff340d16cc30de9bf2774bac67d03d7bfea73909a6c3fe03f2f3a3c2f510de9bf09ae51bef23cd7bf4b368f5dc634e03f168838bbf306e9bf0dc1591a0241d7bf0db8170a0534e03f520b258e7f06e9bfb1d5b28bc17ad7bf4d4786693d29e03f2cd4c842fcffe8bfe525ac81727ed7bf1d19e7638c28e03fe098eebd90ffe8bf216448241db8d7bf25794811b51de03f8279353df2f8e8bf6003ad9129bcd7bf39769287f11ce03f8208803a7af8e8bf8cf9e36860f5d7bf53bfad6b1c12e03f68422983cbf1e8bfa517ce266bf9d7bfd34c26d25711e03f76f3839d51f1e8bf9384dfabe532d8bfa9d338306206e03f53fea9417deae8bff43652059236d8bffb16fa42ae05e03f1d2961930ceae8bf17669267f16fd8bff4be754354f5df3f30fd62bf1de3e8bfe13a7452f873d8bfe1cd65fac6f3df3f55bfc81ea0e2e8bf8f6eac1cdeacd8bf3ee84f2fc6dddf3f25eff559a2dbe8bf559d0be7e2b0d8bf8a781bf436dcdf3f851f8de422dbe8bfb7292e16acedd8bf64e874f989c4df3f304414188ad3e8bf05a984351335d9bf231dd4f66cdbdf3f38ac3ced21bae8bf434f493d3684d9bfb876c4ef6dfbdf3fa85c8b0f709be8bfce58ca2b25c4d9bf1b0714efaf0de03fae3b29045380e8bf2e7a211ec2ced9bf6f9945cff327e03f69a3c2463c6ce8bfa49a6ee69fcfd9bf43a4ea6f1b2ae03f678f1f9f946ae8bf1d1d505240d9d9bfb0d471bf2942e03fc7e813ef0858e8bf4dced98f1bdad9bfbd57932f5044e03fe5c127f65e56e8bff1092604afe3d9bffae8ce50785ce03f211345db9a43e8bfa4b6f21078e4d9bfc2f3f3e9755ee03f3e81cf780d42e8bf2cc00567eeedd9bfc23df04b8f76e03faa6d2c4d302fe8bf54cb3473c4eed9bf3d0add1ab378e03f24eff1db812de8bfbd6fe6e80df8d9bfb205f6629590e03f8aaafb51ab1ae8bf8a169b52e1f8d9bf3844e3c3b792e03ffd579abdfa18e8bf5bcb24371c02dabf23a00be5b0aae03f7c372e5aed05e8bffadda2eadd02dabf72888681aaace03fe621d3015a04e8bfce02f8a9fa0bdabf6ef81b3f92c4e03f497a18a335f1e7bf2e0768b3c80cdabf7bae2d86b1c6e03f2c6e92f580efe7bf
143
+ datetime_begin: !butler_time/tai/iso '2025-12-03 08:00:33.880859613'
144
+ datetime_end: !butler_time/tai/iso '2025-12-03 08:01:04.811000000'
145
+ - type: dimension
146
+ element: visit_definition
147
+ records:
148
+ - instrument: LSSTCam
149
+ exposure: 2025120200439
150
+ visit: 2025120200439
151
+ - instrument: LSSTCam
152
+ exposure: 2025120200440
153
+ visit: 2025120200440
154
+ - type: dimension
155
+ element: visit_detector_region
156
+ records:
157
+ - instrument: LSSTCam
158
+ detector: 10
159
+ visit: 2025120200439
160
+ region: !<lsst.sphgeom.ConvexPolygon>
161
+ encoded: 70aec07aff7aa1c9bfa3fd86c15619e63fbc3e491b343de6bf044adf7d4116cabfc67e3cb2bc23e63ffccfb0fc572ae6bfcc8d75f09fc6c9bf028d3797543be63faedbfdf98518e6bf0d0ea2dbce51c9bf8d5d2fb5ed30e63fc2eb4fce632be6bf
162
+ - instrument: LSSTCam
163
+ detector: 10
164
+ visit: 2025120200440
165
+ region: !<lsst.sphgeom.ConvexPolygon>
166
+ encoded: 702cc00567eeedd9bfc23df04b8f76e03faa6d2c4d302fe8bf8a169b52e1f8d9bf3844e3c3b792e03ffd579abdfa18e8bf7bc4b8488cb8d9bf5b9fabb9969fe03fa081fed15b21e8bf6801f65e98add9bf492e2eb46b83e03f74180d659337e8bf
167
+ - instrument: LSSTCam
168
+ detector: 11
169
+ visit: 2025120200439
170
+ region: !<lsst.sphgeom.ConvexPolygon>
171
+ encoded: 70495d8b527135c9bf3e0c8624a30fe63fa0078f0b874ee6bf49d4893759aac9bf8ef5a0ba211ae63f8c02a4b6c63be6bfcb9e2ad7ad5ac9bf110bf8c3b831e63fcde15146f629e6bffd38b867bce5c8bf7e0f9ee33827e63ff8f84889b83ce6bf
172
+ - instrument: LSSTCam
173
+ detector: 11
174
+ visit: 2025120200440
175
+ region: !<lsst.sphgeom.ConvexPolygon>
176
+ encoded: 70f1092604afe3d9bffae8ce50785ce03f211345db9a43e8bf54cb3473c4eed9bf3d0add1ab378e03f24eff1db812de8bf71c632766eaed9bfc1f498b68f85e03fab7510cee435e8bf1a15eee158a3d9bfb69bb42c5269e03f5fa03eadff4be8bf
177
+ - type: dimension
178
+ element: visit_system_membership
179
+ records:
180
+ - instrument: LSSTCam
181
+ visit_system: 0
182
+ visit: 2025120200439
183
+ - instrument: LSSTCam
184
+ visit_system: 0
185
+ visit: 2025120200440
186
+ - instrument: LSSTCam
187
+ visit_system: 2
188
+ visit: 2025120200439
189
+ - instrument: LSSTCam
190
+ visit_system: 2
191
+ visit: 2025120200440
@@ -44,6 +44,8 @@ in the future.
44
44
 
45
45
  from __future__ import annotations
46
46
 
47
+ __all__ = []
48
+
47
49
  import argparse
48
50
  import os.path
49
51
  from collections.abc import Callable, Iterable, Iterator
@@ -526,7 +528,7 @@ def plot_pixels(
526
528
  Pixelization that interprets ``indices``.
527
529
  wcs : `WCS`
528
530
  Tangent plane to project spherical polygons onto.
529
- indices : `Iterable` [ `int` ]
531
+ indices : `~collections.abc.Iterable` [ `int` ]
530
532
  Pixel indices to plot.
531
533
  *callbacks
532
534
  Callbacks to call for each pixel, passing the pixel index, the
@@ -555,7 +557,7 @@ def plot_hull(
555
557
  Pixelization that interprets ``indices``.
556
558
  wcs : `WCS`
557
559
  Tangent plane to project spherical polygons onto.
558
- indices : `Iterable` [ `int` ]
560
+ indices : `~collections.abc.Iterable` [ `int` ]
559
561
  Pixel indices to plot.
560
562
  *callbacks
561
563
  Callbacks to call passing the list of pixel indices, the
@@ -272,8 +272,8 @@ class MetricsExampleDataFormatter(Formatter):
272
272
  # Update the location with the formatter-preferred file extension
273
273
  fileDescriptor.location.updateExtension(self.extension)
274
274
 
275
- with open(fileDescriptor.location.path, "w") as fd:
276
- yaml.dump(inMemoryDataset, fd)
275
+ data = yaml.dump(inMemoryDataset)
276
+ fileDescriptor.location.uri.write(data.encode("utf-8"))
277
277
 
278
278
 
279
279
  class MetricsExampleModelProvenanceFormatter(JsonFormatter):
@@ -432,7 +432,7 @@ def mock_env(new_environment: dict[str, str]) -> Iterator[None]:
432
432
 
433
433
  Parameters
434
434
  ----------
435
- new_environment : `dict`[`str`, `str`]
435
+ new_environment : `dict` [`str`, `str`]
436
436
  New environment variable values.
437
437
  """
438
438
  with patch.dict(os.environ, new_environment, clear=True):
@@ -188,13 +188,13 @@ class TimespanDatabaseRepresentation(ABC):
188
188
  name : `str`, optional
189
189
  Name for the logical column; a part of the name for multi-column
190
190
  representations. Defaults to ``cls.NAME``.
191
- result : `dict` [ `str`, `Any` ], optional
191
+ result : `dict` [ `str`, `typing.Any` ], optional
192
192
  A dictionary representing a database row that fields should be
193
193
  added to, or `None` to create and return a new one.
194
194
 
195
195
  Returns
196
196
  -------
197
- result : `dict` [ `str`, `Any` ]
197
+ result : `dict` [ `str`, `typing.Any` ]
198
198
  A dictionary containing this representation of a timespan. Exactly
199
199
  the `dict` passed as ``result`` if that is not `None`.
200
200
  """
@@ -207,7 +207,7 @@ class TimespanDatabaseRepresentation(ABC):
207
207
 
208
208
  Parameters
209
209
  ----------
210
- mapping : `~collections.abc.Mapping` [ `Any`, `Any` ]
210
+ mapping : `~collections.abc.Mapping` [ `typing.Any`, `typing.Any` ]
211
211
  A dictionary representing a database row containing a `Timespan`
212
212
  in this representation. Should have key(s) equal to the return
213
213
  value of `getFieldNames`.
@@ -175,12 +175,13 @@ class RepoExportContext:
175
175
  if element.has_own_table:
176
176
  standardized_elements.add(element)
177
177
 
178
- expanded_data_ids = self._butler._registry.expand_data_ids(dataIds)
179
- for dataId in expanded_data_ids:
180
- for element_name in dataId.dimensions.elements:
181
- record = dataId.records[element_name]
182
- if record is not None and record.definition in standardized_elements:
183
- self._records[record.definition].setdefault(record.dataId, record)
178
+ dimension_records = self._butler._extract_all_dimension_records_from_data_ids(
179
+ self._butler, set(dataIds), frozenset(standardized_elements)
180
+ )
181
+ for element, record_mapping in dimension_records.items():
182
+ if element in standardized_elements:
183
+ for record in record_mapping.values():
184
+ self._records[element].setdefault(record.dataId, record)
184
185
 
185
186
  def saveDatasets(
186
187
  self,
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "30.0.0rc2"
2
+ __version__ = "30.0.1"
@@ -1,10 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lsst-daf-butler
3
- Version: 30.0.0rc2
3
+ Version: 30.0.1
4
4
  Summary: An abstraction layer for reading and writing astronomical data to datastores.
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License-Expression: BSD-3-Clause OR GPL-3.0-or-later
7
7
  Project-URL: Homepage, https://github.com/lsst/daf_butler
8
+ Project-URL: Source, https://github.com/lsst/daf_butler
8
9
  Keywords: lsst
9
10
  Classifier: Intended Audience :: Science/Research
10
11
  Classifier: Operating System :: OS Independent
@@ -42,7 +43,7 @@ Provides-Extra: test
42
43
  Requires-Dist: pytest>=3.2; extra == "test"
43
44
  Requires-Dist: numpy>=1.17; extra == "test"
44
45
  Requires-Dist: matplotlib>=3.0.3; extra == "test"
45
- Requires-Dist: pandas>=1.0; extra == "test"
46
+ Requires-Dist: pandas<3.0,>=1.0; extra == "test"
46
47
  Provides-Extra: s3
47
48
  Requires-Dist: lsst-resources[s3]; extra == "s3"
48
49
  Provides-Extra: https