lsst-daf-butler 29.2025.4400__tar.gz → 29.2025.4500__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lsst_daf_butler-29.2025.4400/python/lsst_daf_butler.egg-info → lsst_daf_butler-29.2025.4500}/PKG-INFO +1 -1
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/datastores/formatters.yaml +1 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/datastores/writeRecipes.yaml +8 -21
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/storageClasses.yaml +2 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_driver.py +3 -3
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_query_builder.py +1 -1
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_general_query_results.py +9 -1
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_query.py +11 -3
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/result_specs.py +2 -2
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_base.py +3 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_column_set.py +3 -3
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/byDimensions/_manager.py +48 -279
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/dimensions/static.py +1 -72
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_datasets.py +13 -51
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_dimensions.py +0 -42
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_obscore.py +15 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/_manager.py +50 -33
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_backend.py +0 -182
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_sql_query_backend.py +1 -34
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/sql_registry.py +12 -17
- lsst_daf_butler-29.2025.4500/python/lsst/daf/butler/version.py +2 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500/python/lsst_daf_butler.egg-info}/PKG-INFO +1 -1
- lsst_daf_butler-29.2025.4400/python/lsst/daf/butler/version.py +0 -2
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/COPYRIGHT +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/LICENSE +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/MANIFEST.in +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/README.md +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/bsd_license.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/CHANGES.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/concreteStorageClasses.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/configuring.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/datastores.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/dimensions.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/formatters.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/index.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/organizing.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/queries.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/use-in-tests.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/doc/lsst.daf.butler/writing-subcommands.rst +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/gpl-v3.0.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/pyproject.toml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler_instance_options.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler_metrics.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_butler_repo_index.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_collection_type.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_column_categorization.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_column_tags.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_column_type_info.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_config_support.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_dataset_association.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_dataset_existence.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_dataset_provenance.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_dataset_ref.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_dataset_type.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_deferredDatasetHandle.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_exceptions.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_exceptions_legacy.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_file_dataset.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_file_descriptor.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_formatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_labeled_butler_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_limited_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_location.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_named.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_quantum.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_quantum_backed.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_query_all_datasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_registry_shim.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_rubin/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_rubin/file_datasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_standalone_datastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_storage_class.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_storage_class_delegate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_timespan.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_topology.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_utilities/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_utilities/locked_object.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_utilities/named_locks.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_utilities/thread_safe_cache.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/_uuid.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/arrow_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/cliLog.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/cmd/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/cmd/_remove_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/cmd/_remove_runs.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/cmd/commands.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/opt/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/opt/arguments.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/opt/optionGroups.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/opt/options.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/progress.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/cli/utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/column_spec.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/datastore.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/datastores/composites.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/datastores/fileDatastore.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/dimensions.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe0.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe1.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe2.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe3.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe4.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe5.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe6.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/old_dimensions/daf_butler_universe7.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/registry.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/configs/repo_transfer_formats.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/_datastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/_transfer.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/cache_manager.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/composites.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/constraints.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/file_templates.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/generic_base.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/record_data.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastore/stored_file_info.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/chainedDatastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/fileDatastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/file_datastore/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/file_datastore/get.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/file_datastore/retrieve_artifacts.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/file_datastore/transfer.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/datastores/inMemoryDatastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/ddl.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/delegates/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/delegates/arrowtable.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_coordinate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_data_coordinate_iterable.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_database.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_elements.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_governor.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_group.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_packer.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_record_set.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_record_table.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_records.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_schema.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_skypix.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/_universe.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/construction.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/dimensions/record_cache.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_butler/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_butler/_direct_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_butler/_direct_butler_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_postprocessing.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_query_analysis.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_result_page_converter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_sql_builders.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/direct_query_driver/_sql_column_visitor.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/astropyTable.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/file.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/json.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/logs.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/matplotlib.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/packages.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/parquet.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/pickle.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/typeless.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/formatters/yaml.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/json.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/logging.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/mapping_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/name_shrinker.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/nonempty_mapping.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/persistence_context.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/progress.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/py.typed +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/pydantic_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_base.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_data_coordinate_query_results.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_dataset_query_results.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_dimension_record_query_results.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_expression_strings.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/_identifiers.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/convert_args.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/driver.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/expression_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/overlaps.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/predicate_constraints_summary.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_column_expression.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_column_literal.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_column_reference.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_predicate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/tree/_query_tree.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/queries/visitors.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_caching_context.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_collection_record_cache.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_collection_summary.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_collection_summary_cache.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_defaults.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_exceptions.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_registry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_registry_base.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/_registry_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/attributes.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/bridge/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/bridge/ephemeral.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/bridge/monolithic.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/collections/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/collections/_base.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/collections/nameKey.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/collections/synthIntKey.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/connectionString.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/databases/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/databases/postgresql.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/databases/sqlite.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/byDimensions/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/byDimensions/_dataset_type_cache.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/byDimensions/summaries.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/datasets/byDimensions/tables.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/dimensions/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_attributes.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_bridge.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_database.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_database_explain.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_opaque.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/interfaces/_versioning.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/managers.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/nameShrinker.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/_records.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/_schema.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/_spatial.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/default_spatial.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/obscore/pgsphere.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/opaque.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_common.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_context.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_data_coordinates.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_datasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_query_dimension_records.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_readers.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_results.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/_sql_query_context.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/butler_sql_engine.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/_predicate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/categorize.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/check.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/normalForm.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/exprTree.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/parser.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/parserLex.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/parserYacc.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/ply/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/ply/lex.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/ply/yacc.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/expressions/parser/treeVisitor.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/queries/find_first_dataset.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/tests/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/tests/_database.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/tests/_registry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/versions.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/registry/wildcards.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_collection_args.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_defaults.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_errors.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_get.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_http_connection.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_query_driver.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_query_results.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_ref_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_registry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_remote_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_remote_butler_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/_remote_file_transfer_source.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/authentication/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/authentication/cadc.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/authentication/interface.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/authentication/rubin.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/registry/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_dependencies.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_gafaelfawr.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_server.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/_telemetry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_external.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_external_query.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_file_info.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_internal.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_query_limits.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_query_serialization.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_query_streaming.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server/handlers/_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/remote_butler/server_models.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/repo_relocation.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/_associate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/_pruneDatasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/butlerImport.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/certifyCalibrations.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/collectionChain.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/configDump.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/configValidate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/createRepo.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/exportCalibs.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/ingest_files.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/ingest_zip.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/queryCollections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/queryDataIds.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/queryDatasetTypes.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/queryDatasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/queryDimensionRecords.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/register_dataset_type.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/removeCollections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/removeDatasetType.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/removeRuns.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/retrieveArtifacts.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/script/transferDatasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/_datasetsHelper.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/_dummyRegistry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/_examplePythonTypes.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/_testRepo.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/butler_queries.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/cliCmdTestBase.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/cliLogTestBase.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/deferredFormatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/dict_convertible_model.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/hybrid_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/hybrid_butler_collections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/hybrid_butler_registry.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/postgresql.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/base.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/ci_hsc-subset-skymap.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/ci_hsc-subset.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/datasets.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/hsc-rc2-subset-v0.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/spatial.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/registry_data/spatial.yaml +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/server.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/server_available.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/server_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/testFormatters.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/tests/utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/time_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/timespan_database_representation.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/transfers/__init__.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/transfers/_context.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/transfers/_interfaces.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/transfers/_yaml.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst/daf/butler/utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/SOURCES.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/dependency_links.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/entry_points.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/requires.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/top_level.txt +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/python/lsst_daf_butler.egg-info/zip-safe +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/setup.cfg +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_astropyTableFormatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_authentication.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_butler_factory.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdAssociate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdConfigDump.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdConfigValidate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdCreate.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdImport.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdIngestFiles.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdPruneDatasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdQueryCollections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdQueryDataIds.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdQueryDatasetTypes.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdQueryDatasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdQueryDimensionRecords.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdRemoveCollections.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdRemoveRuns.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliCmdRetrieveArtifacts.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliLog.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliPluginLoader.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliUtilSplitCommas.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliUtilSplitKv.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliUtilToUpper.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_cliUtils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_column_spec.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_composites.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_config.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_connectionString.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_constraints.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_datasets.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_datastore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_ddl.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_dimension_record_containers.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_dimensions.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_exprParserLex.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_exprParserYacc.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_expressions.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_formatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_gafaelfawr.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_location.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_logFormatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_logging.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_matplotlibFormatter.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_nonempty_mapping.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_normalFormExpression.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_obscore.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_packages.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_parquet.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_postgresql.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_progress.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_pydantic_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_quantum.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_quantumBackedButler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_query_direct_postgresql.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_query_direct_sqlite.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_query_interface.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_query_remote.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_query_utilities.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_remote_butler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_server.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_simpleButler.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_sqlite.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_storageClass.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_templates.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_testRepo.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_thread_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_time_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_timespan.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_utils.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_uuid.py +0 -0
- {lsst_daf_butler-29.2025.4400 → lsst_daf_butler-29.2025.4500}/tests/test_versioning.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-daf-butler
|
|
3
|
-
Version: 29.2025.
|
|
3
|
+
Version: 29.2025.4500
|
|
4
4
|
Summary: An abstraction layer for reading and writing astronomical data to datastores.
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License-Expression: BSD-3-Clause OR GPL-3.0-or-later
|
|
@@ -97,3 +97,4 @@ QPEnsemble: lsst.meas.pz.qp_formatter.QPFormatter
|
|
|
97
97
|
PZModel: lsst.meas.pz.model_formatter.ModelFormatter
|
|
98
98
|
VisitBackgroundModel: lsst.daf.butler.formatters.json.JsonFormatter
|
|
99
99
|
VignettingCorrection: lsst.ts.observatory.control.utils.extras.vignetting_storage.VignettingCorrectionFormatter
|
|
100
|
+
SSPAuxiliaryFile: lsst.pipe.tasks.sspAuxiliaryFile.SSPAuxiliaryFileFormatter
|
|
@@ -1,23 +1,12 @@
|
|
|
1
1
|
lsst.obs.base.formatters.fitsExposure.StandardFitsImageFormatterBase: &StandardFitsImageFormatterBase
|
|
2
2
|
# No compression
|
|
3
3
|
noCompression: &noCompression
|
|
4
|
-
|
|
5
|
-
compression:
|
|
6
|
-
algorithm: NONE
|
|
7
|
-
scaling:
|
|
8
|
-
algorithm: NONE
|
|
9
|
-
mask:
|
|
10
|
-
<<: *noCompressionOptions
|
|
11
|
-
variance:
|
|
12
|
-
<<: *noCompressionOptions
|
|
4
|
+
null
|
|
13
5
|
|
|
14
6
|
# Lossless compression
|
|
15
7
|
lossless: &lossless
|
|
16
8
|
image: &losslessOptions
|
|
17
|
-
|
|
18
|
-
algorithm: GZIP_SHUFFLE
|
|
19
|
-
scaling:
|
|
20
|
-
algorithm: NONE
|
|
9
|
+
algorithm: GZIP_2
|
|
21
10
|
mask:
|
|
22
11
|
<<: *losslessOptions
|
|
23
12
|
variance:
|
|
@@ -26,14 +15,12 @@ lsst.obs.base.formatters.fitsExposure.StandardFitsImageFormatterBase: &StandardF
|
|
|
26
15
|
# Basic lossy (quantizing) compression
|
|
27
16
|
lossyBasic: &lossyBasic
|
|
28
17
|
image: &lossyBasicOptions
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
quantizeLevel: 10.0
|
|
36
|
-
quantizePad: 10.0
|
|
18
|
+
algorithm: RICE_1
|
|
19
|
+
quantization:
|
|
20
|
+
dither: SUBTRACTIVE_DITHER_2
|
|
21
|
+
scaling: STDEV_MASKED
|
|
22
|
+
mask_planes: ["NO_DATA"]
|
|
23
|
+
level: 10.0
|
|
37
24
|
mask:
|
|
38
25
|
<<: *losslessOptions
|
|
39
26
|
variance:
|
|
@@ -437,3 +437,5 @@ storageClasses:
|
|
|
437
437
|
pytype: lsst.drp.tasks.fit_visit_background.VisitBackgroundModel
|
|
438
438
|
VignettingCorrection:
|
|
439
439
|
pytype: lsst.ts.observatory.control.utils.extras.vignetting_correction.VignettingCorrection
|
|
440
|
+
SSPAuxiliaryFile:
|
|
441
|
+
pytype: lsst.pipe.tasks.sspAuxiliaryFile.SSPAuxiliaryFile
|
|
@@ -1437,7 +1437,7 @@ class DirectQueryDriver(QueryDriver):
|
|
|
1437
1437
|
self,
|
|
1438
1438
|
joins_builder: SqlJoinsBuilder,
|
|
1439
1439
|
resolved_search: ResolvedDatasetSearch[list[str]],
|
|
1440
|
-
fields: Set[
|
|
1440
|
+
fields: Set[qt.AnyDatasetFieldName],
|
|
1441
1441
|
union_dataset_type_name: str,
|
|
1442
1442
|
) -> None: ...
|
|
1443
1443
|
|
|
@@ -1446,14 +1446,14 @@ class DirectQueryDriver(QueryDriver):
|
|
|
1446
1446
|
self,
|
|
1447
1447
|
joins_builder: SqlJoinsBuilder,
|
|
1448
1448
|
resolved_search: ResolvedDatasetSearch[str],
|
|
1449
|
-
fields: Set[
|
|
1449
|
+
fields: Set[qt.AnyDatasetFieldName],
|
|
1450
1450
|
) -> None: ...
|
|
1451
1451
|
|
|
1452
1452
|
def join_dataset_search(
|
|
1453
1453
|
self,
|
|
1454
1454
|
joins_builder: SqlJoinsBuilder,
|
|
1455
1455
|
resolved_search: ResolvedDatasetSearch[Any],
|
|
1456
|
-
fields: Set[
|
|
1456
|
+
fields: Set[qt.AnyDatasetFieldName],
|
|
1457
1457
|
union_dataset_type_name: str | None = None,
|
|
1458
1458
|
) -> None:
|
|
1459
1459
|
"""Join a dataset search into an under-construction query.
|
|
@@ -284,7 +284,7 @@ class QueryBuilder(ABC):
|
|
|
284
284
|
raise NotImplementedError()
|
|
285
285
|
|
|
286
286
|
def _needs_collection_key_field(
|
|
287
|
-
self, dataset_search: ResolvedDatasetSearch, fields_for_dataset: set[
|
|
287
|
+
self, dataset_search: ResolvedDatasetSearch, fields_for_dataset: set[qt.AnyDatasetFieldName]
|
|
288
288
|
) -> bool:
|
|
289
289
|
"""Return `True` if the ``collection_key`` dataset field is needed to
|
|
290
290
|
provide uniqueness for rows.
|
|
@@ -39,7 +39,7 @@ from ..dimensions import DataCoordinate, DimensionElement, DimensionGroup, Dimen
|
|
|
39
39
|
from ._base import QueryResultsBase
|
|
40
40
|
from .driver import QueryDriver
|
|
41
41
|
from .result_specs import GeneralResultSpec
|
|
42
|
-
from .tree import QueryTree, ResultColumn
|
|
42
|
+
from .tree import AnyDatasetFieldName, QueryTree, ResultColumn
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
class GeneralResultTuple(NamedTuple):
|
|
@@ -173,6 +173,14 @@ class GeneralQueryResults(QueryResultsBase):
|
|
|
173
173
|
# Docstring inherited.
|
|
174
174
|
return GeneralQueryResults(self._driver, tree, self._spec.model_copy(update=kwargs))
|
|
175
175
|
|
|
176
|
+
def _with_added_dataset_field(self, dataset_type: str, field: AnyDatasetFieldName) -> GeneralQueryResults:
|
|
177
|
+
dataset_fields = dict(self._spec.dataset_fields)
|
|
178
|
+
field_set = set(dataset_fields.get(dataset_type, set()))
|
|
179
|
+
field_set.add(field)
|
|
180
|
+
dataset_fields[dataset_type] = field_set
|
|
181
|
+
|
|
182
|
+
return self._copy(self._tree, dataset_fields=dataset_fields)
|
|
183
|
+
|
|
176
184
|
def _get_datasets(self) -> frozenset[str]:
|
|
177
185
|
# Docstring inherited.
|
|
178
186
|
return frozenset(self._spec.dataset_fields)
|
|
@@ -27,11 +27,12 @@
|
|
|
27
27
|
|
|
28
28
|
from __future__ import annotations
|
|
29
29
|
|
|
30
|
-
__all__ = ("Query",)
|
|
30
|
+
__all__ = ("Query", "QueryFactoryFunction")
|
|
31
31
|
|
|
32
|
-
from collections.abc import Iterable, Mapping, Set
|
|
32
|
+
from collections.abc import Callable, Iterable, Mapping, Set
|
|
33
|
+
from contextlib import AbstractContextManager
|
|
33
34
|
from types import EllipsisType
|
|
34
|
-
from typing import Any, final
|
|
35
|
+
from typing import Any, TypeAlias, final
|
|
35
36
|
|
|
36
37
|
import astropy.table
|
|
37
38
|
|
|
@@ -831,3 +832,10 @@ class Query(QueryBase):
|
|
|
831
832
|
storage_class_name,
|
|
832
833
|
Query(self._driver, self._tree.join_dataset(dataset_type_name, dataset_search)),
|
|
833
834
|
)
|
|
835
|
+
|
|
836
|
+
|
|
837
|
+
QueryFactoryFunction: TypeAlias = Callable[[], AbstractContextManager[Query]]
|
|
838
|
+
"""
|
|
839
|
+
Type signature for a function returning a context manager that sets up a
|
|
840
|
+
`Query` object. (That is, a function equivalent to ``Butler.query()``).
|
|
841
|
+
"""
|
|
@@ -43,7 +43,7 @@ import pydantic
|
|
|
43
43
|
from .._exceptions import InvalidQueryError
|
|
44
44
|
from ..dimensions import DimensionElement, DimensionGroup, DimensionUniverse
|
|
45
45
|
from ..pydantic_utils import DeferredValidation
|
|
46
|
-
from .tree import AnyDatasetType, ColumnSet,
|
|
46
|
+
from .tree import AnyDatasetFieldName, AnyDatasetType, ColumnSet, OrderExpression, QueryTree
|
|
47
47
|
|
|
48
48
|
|
|
49
49
|
class ResultSpecBase(pydantic.BaseModel, ABC):
|
|
@@ -215,7 +215,7 @@ class GeneralResultSpec(ResultSpecBase):
|
|
|
215
215
|
dimension_fields: Mapping[str, set[str]]
|
|
216
216
|
"""Dimension record fields included in this query."""
|
|
217
217
|
|
|
218
|
-
dataset_fields: Mapping[str, set[
|
|
218
|
+
dataset_fields: Mapping[str, set[AnyDatasetFieldName]]
|
|
219
219
|
"""Dataset fields included in this query."""
|
|
220
220
|
|
|
221
221
|
include_dimension_records: bool = False
|
|
@@ -30,6 +30,7 @@ from __future__ import annotations
|
|
|
30
30
|
__all__ = (
|
|
31
31
|
"ANY_DATASET",
|
|
32
32
|
"DATASET_FIELD_NAMES",
|
|
33
|
+
"AnyDatasetFieldName",
|
|
33
34
|
"AnyDatasetType",
|
|
34
35
|
"ColumnExpressionBase",
|
|
35
36
|
"DatasetFieldName",
|
|
@@ -57,6 +58,8 @@ if TYPE_CHECKING:
|
|
|
57
58
|
# collection primary key values) and hence should use `str` rather than this
|
|
58
59
|
# type.
|
|
59
60
|
DatasetFieldName: TypeAlias = Literal["dataset_id", "ingest_date", "run", "collection", "timespan"]
|
|
61
|
+
InternalDatasetFieldName: TypeAlias = Literal["calib_pkey", "collection_key"]
|
|
62
|
+
AnyDatasetFieldName: TypeAlias = DatasetFieldName | InternalDatasetFieldName
|
|
60
63
|
|
|
61
64
|
# Tuple of the strings that can be use as dataset fields in public APIs.
|
|
62
65
|
DATASET_FIELD_NAMES: tuple[DatasetFieldName, ...] = tuple(get_args(DatasetFieldName))
|
|
@@ -35,7 +35,7 @@ from typing import NamedTuple, cast
|
|
|
35
35
|
from ... import column_spec
|
|
36
36
|
from ...dimensions import DataIdValue, DimensionGroup
|
|
37
37
|
from ...nonempty_mapping import NonemptyMapping
|
|
38
|
-
from ._base import ANY_DATASET, AnyDatasetType
|
|
38
|
+
from ._base import ANY_DATASET, AnyDatasetFieldName, AnyDatasetType
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
class ColumnSet:
|
|
@@ -65,7 +65,7 @@ class ColumnSet:
|
|
|
65
65
|
self._dimensions = dimensions
|
|
66
66
|
self._removed_dimension_keys: set[str] = set()
|
|
67
67
|
self._dimension_fields: dict[str, set[str]] = {name: set() for name in dimensions.elements}
|
|
68
|
-
self._dataset_fields = NonemptyMapping[str | AnyDatasetType, set[
|
|
68
|
+
self._dataset_fields = NonemptyMapping[str | AnyDatasetType, set[AnyDatasetFieldName]](set)
|
|
69
69
|
|
|
70
70
|
@property
|
|
71
71
|
def dimensions(self) -> DimensionGroup:
|
|
@@ -83,7 +83,7 @@ class ColumnSet:
|
|
|
83
83
|
return self._dimension_fields
|
|
84
84
|
|
|
85
85
|
@property
|
|
86
|
-
def dataset_fields(self) -> NonemptyMapping[str | AnyDatasetType, set[
|
|
86
|
+
def dataset_fields(self) -> NonemptyMapping[str | AnyDatasetType, set[AnyDatasetFieldName]]:
|
|
87
87
|
"""Dataset fields included in the set, grouped by dataset type name.
|
|
88
88
|
|
|
89
89
|
The keys of this mapping are just those that actually have nonempty
|
|
@@ -11,12 +11,8 @@ from typing import TYPE_CHECKING, Any, ClassVar
|
|
|
11
11
|
import astropy.time
|
|
12
12
|
import sqlalchemy
|
|
13
13
|
|
|
14
|
-
from lsst.daf.relation import Relation, sql
|
|
15
|
-
|
|
16
14
|
from .... import ddl
|
|
17
15
|
from ...._collection_type import CollectionType
|
|
18
|
-
from ...._column_tags import DatasetColumnTag, DimensionKeyColumnTag
|
|
19
|
-
from ...._column_type_info import LogicalColumn
|
|
20
16
|
from ...._dataset_ref import DatasetId, DatasetIdFactory, DatasetIdGenEnum, DatasetRef
|
|
21
17
|
from ...._dataset_type import DatasetType, get_dataset_type_name
|
|
22
18
|
from ...._exceptions import CollectionTypeError, MissingDatasetTypeError
|
|
@@ -24,12 +20,12 @@ from ...._exceptions_legacy import DatasetTypeError
|
|
|
24
20
|
from ...._timespan import Timespan
|
|
25
21
|
from ....dimensions import DataCoordinate, DimensionGroup, DimensionUniverse
|
|
26
22
|
from ....direct_query_driver import SqlJoinsBuilder, SqlSelectBuilder # new query system, server+direct only
|
|
23
|
+
from ....queries import QueryFactoryFunction
|
|
27
24
|
from ....queries import tree as qt # new query system, both clients + server
|
|
28
25
|
from ..._caching_context import CachingContext
|
|
29
26
|
from ..._collection_summary import CollectionSummary
|
|
30
27
|
from ..._exceptions import ConflictingDefinitionError, DatasetTypeExpressionError, OrphanedRecordError
|
|
31
28
|
from ...interfaces import DatasetRecordStorageManager, RunRecord, VersionTuple
|
|
32
|
-
from ...queries import SqlQueryContext # old registry query system
|
|
33
29
|
from ...wildcards import DatasetTypeWildcard
|
|
34
30
|
from ._dataset_type_cache import DatasetTypeCache
|
|
35
31
|
from .summaries import CollectionSummaryManager
|
|
@@ -1015,7 +1011,7 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1015
1011
|
collection: CollectionRecord,
|
|
1016
1012
|
datasets: Iterable[DatasetRef],
|
|
1017
1013
|
timespan: Timespan,
|
|
1018
|
-
|
|
1014
|
+
query_func: QueryFactoryFunction,
|
|
1019
1015
|
) -> None:
|
|
1020
1016
|
# Docstring inherited from DatasetRecordStorageManager.
|
|
1021
1017
|
if (storage := self._find_storage(dataset_type.name)) is None:
|
|
@@ -1068,23 +1064,25 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1068
1064
|
) from err
|
|
1069
1065
|
else:
|
|
1070
1066
|
# Have to implement exclusion constraint ourselves.
|
|
1071
|
-
# Start by building a SELECT query for any rows that would overlap
|
|
1072
|
-
# this one.
|
|
1073
|
-
relation = self._build_calib_overlap_query(dataset_type, collection, data_ids, timespan, context)
|
|
1074
1067
|
# Acquire a table lock to ensure there are no concurrent writes
|
|
1075
1068
|
# could invalidate our checking before we finish the inserts. We
|
|
1076
1069
|
# use a SAVEPOINT in case there is an outer transaction that a
|
|
1077
1070
|
# failure here should not roll back.
|
|
1078
|
-
with self._db.transaction(
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
#
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
#
|
|
1085
|
-
with
|
|
1086
|
-
|
|
1087
|
-
|
|
1071
|
+
with self._db.transaction(
|
|
1072
|
+
lock=[calibs_table],
|
|
1073
|
+
savepoint=True,
|
|
1074
|
+
# join_data_coordinates sometimes requires a temp table
|
|
1075
|
+
for_temp_tables=True,
|
|
1076
|
+
):
|
|
1077
|
+
# Query for any rows that would overlap this one.
|
|
1078
|
+
with query_func() as query:
|
|
1079
|
+
if data_ids is not None:
|
|
1080
|
+
query = query.join_data_coordinates(data_ids)
|
|
1081
|
+
timespan_column = query.expression_factory[dataset_type.name].timespan
|
|
1082
|
+
result = query.datasets(dataset_type, collection.name, find_first=False).where(
|
|
1083
|
+
timespan_column.overlaps(timespan)
|
|
1084
|
+
)
|
|
1085
|
+
conflicting = result.count()
|
|
1088
1086
|
if conflicting > 0:
|
|
1089
1087
|
raise ConflictingDefinitionError(
|
|
1090
1088
|
f"{conflicting} validity range conflicts certifying datasets of type "
|
|
@@ -1101,7 +1099,7 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1101
1099
|
timespan: Timespan,
|
|
1102
1100
|
*,
|
|
1103
1101
|
data_ids: Iterable[DataCoordinate] | None = None,
|
|
1104
|
-
|
|
1102
|
+
query_func: QueryFactoryFunction,
|
|
1105
1103
|
) -> None:
|
|
1106
1104
|
# Docstring inherited from DatasetRecordStorageManager.
|
|
1107
1105
|
if (storage := self._find_storage(dataset_type.name)) is None:
|
|
@@ -1117,17 +1115,12 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1117
1115
|
f"of type {collection.type.name}; must be CALIBRATION."
|
|
1118
1116
|
)
|
|
1119
1117
|
TimespanReprClass = self._db.getTimespanRepresentation()
|
|
1120
|
-
# Construct a SELECT query to find all rows that overlap our inputs.
|
|
1121
1118
|
data_id_set: set[DataCoordinate] | None
|
|
1122
1119
|
if data_ids is not None:
|
|
1123
1120
|
data_id_set = set(data_ids)
|
|
1124
1121
|
else:
|
|
1125
1122
|
data_id_set = None
|
|
1126
|
-
|
|
1127
|
-
calib_pkey_tag = DatasetColumnTag(dataset_type.name, "calib_pkey")
|
|
1128
|
-
dataset_id_tag = DatasetColumnTag(dataset_type.name, "dataset_id")
|
|
1129
|
-
timespan_tag = DatasetColumnTag(dataset_type.name, "timespan")
|
|
1130
|
-
data_id_tags = [(name, DimensionKeyColumnTag(name)) for name in dataset_type.dimensions.required]
|
|
1123
|
+
|
|
1131
1124
|
# Set up collections to populate with the rows we'll want to modify.
|
|
1132
1125
|
# The insert rows will have the same values for collection and
|
|
1133
1126
|
# dataset type.
|
|
@@ -1141,21 +1134,33 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1141
1134
|
# between the SELECT and the DELETE and INSERT queries based on it.
|
|
1142
1135
|
calibs_table = self._get_calibs_table(storage.dynamic_tables)
|
|
1143
1136
|
with self._db.transaction(lock=[calibs_table], savepoint=True):
|
|
1144
|
-
#
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1137
|
+
# Find rows overlapping our inputs.
|
|
1138
|
+
with query_func() as query:
|
|
1139
|
+
query = query.join_dataset_search(dataset_type, [collection.name])
|
|
1140
|
+
if data_id_set is not None:
|
|
1141
|
+
query = query.join_data_coordinates(data_id_set)
|
|
1142
|
+
timespan_column = query.expression_factory[dataset_type.name].timespan
|
|
1143
|
+
query = query.where(timespan_column.overlaps(timespan))
|
|
1144
|
+
result = query.general(
|
|
1145
|
+
dataset_type.dimensions,
|
|
1146
|
+
dataset_fields={dataset_type.name: {"dataset_id", "timespan"}},
|
|
1147
|
+
find_first=False,
|
|
1148
|
+
)._with_added_dataset_field(dataset_type.name, "calib_pkey")
|
|
1149
|
+
|
|
1150
|
+
calib_pkey_key = f"{dataset_type.name}.calib_pkey"
|
|
1151
|
+
dataset_id_key = f"{dataset_type.name}.dataset_id"
|
|
1152
|
+
timespan_key = f"{dataset_type.name}.timespan"
|
|
1153
|
+
for row in result.iter_tuples():
|
|
1154
|
+
rows_to_delete.append({"id": row.raw_row[calib_pkey_key]})
|
|
1150
1155
|
# Construct the insert row(s) by copying the prototype row,
|
|
1151
1156
|
# then adding the dimension column values, then adding
|
|
1152
1157
|
# what's left of the timespan from that row after we
|
|
1153
1158
|
# subtract the given timespan.
|
|
1154
1159
|
new_insert_row = proto_insert_row.copy()
|
|
1155
|
-
new_insert_row["dataset_id"] = row[
|
|
1156
|
-
for name,
|
|
1157
|
-
new_insert_row[name] =
|
|
1158
|
-
row_timespan = row[
|
|
1160
|
+
new_insert_row["dataset_id"] = row.raw_row[dataset_id_key]
|
|
1161
|
+
for name, value in row.data_id.required.items():
|
|
1162
|
+
new_insert_row[name] = value
|
|
1163
|
+
row_timespan = row.raw_row[timespan_key]
|
|
1159
1164
|
assert row_timespan is not None, "Field should have a NOT NULL constraint."
|
|
1160
1165
|
for diff_timespan in row_timespan.difference(timespan):
|
|
1161
1166
|
rows_to_insert.append(
|
|
@@ -1165,252 +1170,11 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1165
1170
|
self._db.delete(calibs_table, ["id"], *rows_to_delete)
|
|
1166
1171
|
self._db.insert(calibs_table, *rows_to_insert)
|
|
1167
1172
|
|
|
1168
|
-
def _build_calib_overlap_query(
|
|
1169
|
-
self,
|
|
1170
|
-
dataset_type: DatasetType,
|
|
1171
|
-
collection: CollectionRecord,
|
|
1172
|
-
data_ids: set[DataCoordinate] | None,
|
|
1173
|
-
timespan: Timespan,
|
|
1174
|
-
context: SqlQueryContext,
|
|
1175
|
-
) -> Relation:
|
|
1176
|
-
relation = self.make_relation(
|
|
1177
|
-
dataset_type, collection, columns={"timespan", "dataset_id", "calib_pkey"}, context=context
|
|
1178
|
-
).with_rows_satisfying(
|
|
1179
|
-
context.make_timespan_overlap_predicate(
|
|
1180
|
-
DatasetColumnTag(dataset_type.name, "timespan"), timespan
|
|
1181
|
-
),
|
|
1182
|
-
)
|
|
1183
|
-
if data_ids is not None:
|
|
1184
|
-
relation = relation.join(
|
|
1185
|
-
context.make_data_id_relation(data_ids, dataset_type.dimensions.required).transferred_to(
|
|
1186
|
-
context.sql_engine
|
|
1187
|
-
),
|
|
1188
|
-
)
|
|
1189
|
-
return relation
|
|
1190
|
-
|
|
1191
|
-
def make_relation(
|
|
1192
|
-
self,
|
|
1193
|
-
dataset_type: DatasetType,
|
|
1194
|
-
*collections: CollectionRecord,
|
|
1195
|
-
columns: Set[str],
|
|
1196
|
-
context: SqlQueryContext,
|
|
1197
|
-
) -> Relation:
|
|
1198
|
-
# Docstring inherited from DatasetRecordStorageManager.
|
|
1199
|
-
if (storage := self._find_storage(dataset_type.name)) is None:
|
|
1200
|
-
raise MissingDatasetTypeError(f"Dataset type {dataset_type.name!r} has not been registered.")
|
|
1201
|
-
collection_types = {collection.type for collection in collections}
|
|
1202
|
-
assert CollectionType.CHAINED not in collection_types, "CHAINED collections must be flattened."
|
|
1203
|
-
TimespanReprClass = self._db.getTimespanRepresentation()
|
|
1204
|
-
#
|
|
1205
|
-
# There are two kinds of table in play here:
|
|
1206
|
-
#
|
|
1207
|
-
# - the static dataset table (with the dataset ID, dataset type ID,
|
|
1208
|
-
# run ID/name, and ingest date);
|
|
1209
|
-
#
|
|
1210
|
-
# - the dynamic tags/calibs table (with the dataset ID, dataset type
|
|
1211
|
-
# type ID, collection ID/name, data ID, and possibly validity
|
|
1212
|
-
# range).
|
|
1213
|
-
#
|
|
1214
|
-
# That means that we might want to return a query against either table
|
|
1215
|
-
# or a JOIN of both, depending on which quantities the caller wants.
|
|
1216
|
-
# But the data ID is always included, which means we'll always include
|
|
1217
|
-
# the tags/calibs table and join in the static dataset table only if we
|
|
1218
|
-
# need things from it that we can't get from the tags/calibs table.
|
|
1219
|
-
#
|
|
1220
|
-
# Note that it's important that we include a WHERE constraint on both
|
|
1221
|
-
# tables for any column (e.g. dataset_type_id) that is in both when
|
|
1222
|
-
# it's given explicitly; not doing can prevent the query planner from
|
|
1223
|
-
# using very important indexes. At present, we don't include those
|
|
1224
|
-
# redundant columns in the JOIN ON expression, however, because the
|
|
1225
|
-
# FOREIGN KEY (and its index) are defined only on dataset_id.
|
|
1226
|
-
tag_relation: Relation | None = None
|
|
1227
|
-
calib_relation: Relation | None = None
|
|
1228
|
-
if collection_types != {CollectionType.CALIBRATION}:
|
|
1229
|
-
tags_table = self._get_tags_table(storage.dynamic_tables)
|
|
1230
|
-
# We'll need a subquery for the tags table if any of the given
|
|
1231
|
-
# collections are not a CALIBRATION collection. This intentionally
|
|
1232
|
-
# also fires when the list of collections is empty as a way to
|
|
1233
|
-
# create a dummy subquery that we know will fail.
|
|
1234
|
-
# We give the table an alias because it might appear multiple times
|
|
1235
|
-
# in the same query, for different dataset types.
|
|
1236
|
-
tags_parts = sql.Payload[LogicalColumn](tags_table.alias(f"{dataset_type.name}_tags"))
|
|
1237
|
-
if "timespan" in columns:
|
|
1238
|
-
tags_parts.columns_available[DatasetColumnTag(dataset_type.name, "timespan")] = (
|
|
1239
|
-
TimespanReprClass.fromLiteral(Timespan(None, None))
|
|
1240
|
-
)
|
|
1241
|
-
tag_relation = self._finish_single_relation(
|
|
1242
|
-
storage,
|
|
1243
|
-
tags_parts,
|
|
1244
|
-
columns,
|
|
1245
|
-
[
|
|
1246
|
-
(record, rank)
|
|
1247
|
-
for rank, record in enumerate(collections)
|
|
1248
|
-
if record.type is not CollectionType.CALIBRATION
|
|
1249
|
-
],
|
|
1250
|
-
context,
|
|
1251
|
-
)
|
|
1252
|
-
assert "calib_pkey" not in columns, "For internal use only, and only for pure-calib queries."
|
|
1253
|
-
if CollectionType.CALIBRATION in collection_types:
|
|
1254
|
-
# If at least one collection is a CALIBRATION collection, we'll
|
|
1255
|
-
# need a subquery for the calibs table, and could include the
|
|
1256
|
-
# timespan as a result or constraint.
|
|
1257
|
-
calibs_table = self._get_calibs_table(storage.dynamic_tables)
|
|
1258
|
-
calibs_parts = sql.Payload[LogicalColumn](calibs_table.alias(f"{dataset_type.name}_calibs"))
|
|
1259
|
-
if "timespan" in columns:
|
|
1260
|
-
calibs_parts.columns_available[DatasetColumnTag(dataset_type.name, "timespan")] = (
|
|
1261
|
-
TimespanReprClass.from_columns(calibs_parts.from_clause.columns)
|
|
1262
|
-
)
|
|
1263
|
-
if "calib_pkey" in columns:
|
|
1264
|
-
# This is a private extension not included in the base class
|
|
1265
|
-
# interface, for internal use only in _buildCalibOverlapQuery,
|
|
1266
|
-
# which needs access to the autoincrement primary key for the
|
|
1267
|
-
# calib association table.
|
|
1268
|
-
calibs_parts.columns_available[DatasetColumnTag(dataset_type.name, "calib_pkey")] = (
|
|
1269
|
-
calibs_parts.from_clause.columns.id
|
|
1270
|
-
)
|
|
1271
|
-
calib_relation = self._finish_single_relation(
|
|
1272
|
-
storage,
|
|
1273
|
-
calibs_parts,
|
|
1274
|
-
columns,
|
|
1275
|
-
[
|
|
1276
|
-
(record, rank)
|
|
1277
|
-
for rank, record in enumerate(collections)
|
|
1278
|
-
if record.type is CollectionType.CALIBRATION
|
|
1279
|
-
],
|
|
1280
|
-
context,
|
|
1281
|
-
)
|
|
1282
|
-
if tag_relation is not None:
|
|
1283
|
-
if calib_relation is not None:
|
|
1284
|
-
# daf_relation's chain operation does not automatically
|
|
1285
|
-
# deduplicate; it's more like SQL's UNION ALL. To get UNION
|
|
1286
|
-
# in SQL here, we add an explicit deduplication.
|
|
1287
|
-
return tag_relation.chain(calib_relation).without_duplicates()
|
|
1288
|
-
else:
|
|
1289
|
-
return tag_relation
|
|
1290
|
-
elif calib_relation is not None:
|
|
1291
|
-
return calib_relation
|
|
1292
|
-
else:
|
|
1293
|
-
raise AssertionError("Branch should be unreachable.")
|
|
1294
|
-
|
|
1295
|
-
def _finish_single_relation(
|
|
1296
|
-
self,
|
|
1297
|
-
storage: _DatasetRecordStorage,
|
|
1298
|
-
payload: sql.Payload[LogicalColumn],
|
|
1299
|
-
requested_columns: Set[str],
|
|
1300
|
-
collections: Sequence[tuple[CollectionRecord, int]],
|
|
1301
|
-
context: SqlQueryContext,
|
|
1302
|
-
) -> Relation:
|
|
1303
|
-
"""Handle adding columns and WHERE terms that are not specific to
|
|
1304
|
-
either the tags or calibs tables.
|
|
1305
|
-
|
|
1306
|
-
Helper method for `make_relation`.
|
|
1307
|
-
|
|
1308
|
-
Parameters
|
|
1309
|
-
----------
|
|
1310
|
-
storage : `ByDimensionsDatasetRecordStorageUUID`
|
|
1311
|
-
Struct that holds the tables and ID for the dataset type.
|
|
1312
|
-
payload : `lsst.daf.relation.sql.Payload`
|
|
1313
|
-
SQL query parts under construction, to be modified in-place and
|
|
1314
|
-
used to construct the new relation.
|
|
1315
|
-
requested_columns : `~collections.abc.Set` [ `str` ]
|
|
1316
|
-
Columns the relation should include.
|
|
1317
|
-
collections : `~collections.abc.Sequence` [ `tuple` \
|
|
1318
|
-
[ `CollectionRecord`, `int` ] ]
|
|
1319
|
-
Collections to search for the dataset and their ranks.
|
|
1320
|
-
context : `SqlQueryContext`
|
|
1321
|
-
Context that manages engines and state for the query.
|
|
1322
|
-
|
|
1323
|
-
Returns
|
|
1324
|
-
-------
|
|
1325
|
-
relation : `lsst.daf.relation.Relation`
|
|
1326
|
-
New dataset query relation.
|
|
1327
|
-
"""
|
|
1328
|
-
payload.where.append(payload.from_clause.columns.dataset_type_id == storage.dataset_type_id)
|
|
1329
|
-
dataset_id_col = payload.from_clause.columns.dataset_id
|
|
1330
|
-
collection_col = payload.from_clause.columns[self._collections.getCollectionForeignKeyName()]
|
|
1331
|
-
# We always constrain and optionally retrieve the collection(s) via the
|
|
1332
|
-
# tags/calibs table.
|
|
1333
|
-
if len(collections) == 1:
|
|
1334
|
-
payload.where.append(collection_col == collections[0][0].key)
|
|
1335
|
-
if "collection" in requested_columns:
|
|
1336
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "collection")] = (
|
|
1337
|
-
sqlalchemy.sql.literal(collections[0][0].key)
|
|
1338
|
-
)
|
|
1339
|
-
else:
|
|
1340
|
-
assert collections, "The no-collections case should be in calling code for better diagnostics."
|
|
1341
|
-
payload.where.append(collection_col.in_([collection.key for collection, _ in collections]))
|
|
1342
|
-
if "collection" in requested_columns:
|
|
1343
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "collection")] = (
|
|
1344
|
-
collection_col
|
|
1345
|
-
)
|
|
1346
|
-
# Add rank if requested as a CASE-based calculation the collection
|
|
1347
|
-
# column.
|
|
1348
|
-
if "rank" in requested_columns:
|
|
1349
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "rank")] = (
|
|
1350
|
-
sqlalchemy.sql.case(
|
|
1351
|
-
{record.key: rank for record, rank in collections},
|
|
1352
|
-
value=collection_col,
|
|
1353
|
-
)
|
|
1354
|
-
)
|
|
1355
|
-
# Add more column definitions, starting with the data ID.
|
|
1356
|
-
for dimension_name in storage.dataset_type.dimensions.required:
|
|
1357
|
-
payload.columns_available[DimensionKeyColumnTag(dimension_name)] = payload.from_clause.columns[
|
|
1358
|
-
dimension_name
|
|
1359
|
-
]
|
|
1360
|
-
# We can always get the dataset_id from the tags/calibs table.
|
|
1361
|
-
if "dataset_id" in requested_columns:
|
|
1362
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "dataset_id")] = (
|
|
1363
|
-
dataset_id_col
|
|
1364
|
-
)
|
|
1365
|
-
# It's possible we now have everything we need, from just the
|
|
1366
|
-
# tags/calibs table. The things we might need to get from the static
|
|
1367
|
-
# dataset table are the run key and the ingest date.
|
|
1368
|
-
need_static_table = False
|
|
1369
|
-
if "run" in requested_columns:
|
|
1370
|
-
if len(collections) == 1 and collections[0][0].type is CollectionType.RUN:
|
|
1371
|
-
# If we are searching exactly one RUN collection, we
|
|
1372
|
-
# know that if we find the dataset in that collection,
|
|
1373
|
-
# then that's the datasets's run; we don't need to
|
|
1374
|
-
# query for it.
|
|
1375
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "run")] = (
|
|
1376
|
-
sqlalchemy.sql.literal(collections[0][0].key)
|
|
1377
|
-
)
|
|
1378
|
-
else:
|
|
1379
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "run")] = (
|
|
1380
|
-
self._static.dataset.columns[self._run_key_column]
|
|
1381
|
-
)
|
|
1382
|
-
need_static_table = True
|
|
1383
|
-
# Ingest date can only come from the static table.
|
|
1384
|
-
if "ingest_date" in requested_columns:
|
|
1385
|
-
need_static_table = True
|
|
1386
|
-
payload.columns_available[DatasetColumnTag(storage.dataset_type.name, "ingest_date")] = (
|
|
1387
|
-
self._static.dataset.columns.ingest_date
|
|
1388
|
-
)
|
|
1389
|
-
# If we need the static table, join it in via dataset_id and
|
|
1390
|
-
# dataset_type_id
|
|
1391
|
-
if need_static_table:
|
|
1392
|
-
payload.from_clause = payload.from_clause.join(
|
|
1393
|
-
self._static.dataset, onclause=(dataset_id_col == self._static.dataset.columns.id)
|
|
1394
|
-
)
|
|
1395
|
-
# Also constrain dataset_type_id in static table in case that helps
|
|
1396
|
-
# generate a better plan.
|
|
1397
|
-
# We could also include this in the JOIN ON clause, but my guess is
|
|
1398
|
-
# that that's a good idea IFF it's in the foreign key, and right
|
|
1399
|
-
# now it isn't.
|
|
1400
|
-
payload.where.append(self._static.dataset.columns.dataset_type_id == storage.dataset_type_id)
|
|
1401
|
-
leaf = context.sql_engine.make_leaf(
|
|
1402
|
-
payload.columns_available.keys(),
|
|
1403
|
-
payload=payload,
|
|
1404
|
-
name=storage.dataset_type.name,
|
|
1405
|
-
parameters={record.name: rank for record, rank in collections},
|
|
1406
|
-
)
|
|
1407
|
-
return leaf
|
|
1408
|
-
|
|
1409
1173
|
def make_joins_builder(
|
|
1410
1174
|
self,
|
|
1411
1175
|
dataset_type: DatasetType,
|
|
1412
1176
|
collections: Sequence[CollectionRecord],
|
|
1413
|
-
fields: Set[
|
|
1177
|
+
fields: Set[qt.AnyDatasetFieldName],
|
|
1414
1178
|
is_union: bool = False,
|
|
1415
1179
|
) -> SqlJoinsBuilder:
|
|
1416
1180
|
if (storage := self._find_storage(dataset_type.name)) is None:
|
|
@@ -1476,6 +1240,9 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1476
1240
|
tags_builder.joins.timespans[fields_key] = self._db.getTimespanRepresentation().fromLiteral(
|
|
1477
1241
|
Timespan(None, None)
|
|
1478
1242
|
)
|
|
1243
|
+
assert "calib_pkey" not in fields, (
|
|
1244
|
+
"Calibration primary key for internal use only on calibration collections."
|
|
1245
|
+
)
|
|
1479
1246
|
calibs_builder: SqlSelectBuilder | None = None
|
|
1480
1247
|
if CollectionType.CALIBRATION in collection_types:
|
|
1481
1248
|
# If at least one collection is a CALIBRATION collection, we'll
|
|
@@ -1495,6 +1262,8 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1495
1262
|
calibs_builder.joins.timespans[fields_key] = (
|
|
1496
1263
|
self._db.getTimespanRepresentation().from_columns(calibs_table.columns)
|
|
1497
1264
|
)
|
|
1265
|
+
if "calib_pkey" in fields:
|
|
1266
|
+
calibs_builder.joins.fields[fields_key]["calib_pkey"] = calibs_table.columns["id"]
|
|
1498
1267
|
|
|
1499
1268
|
# In calibration collections, we need timespan as well as data ID
|
|
1500
1269
|
# to ensure unique rows.
|
|
@@ -1515,7 +1284,7 @@ class ByDimensionsDatasetRecordStorageManagerUUID(DatasetRecordStorageManager):
|
|
|
1515
1284
|
storage: _DatasetRecordStorage,
|
|
1516
1285
|
sql_projection: SqlSelectBuilder,
|
|
1517
1286
|
collections: Sequence[CollectionRecord],
|
|
1518
|
-
fields: Set[
|
|
1287
|
+
fields: Set[qt.AnyDatasetFieldName],
|
|
1519
1288
|
fields_key: str | qt.AnyDatasetType,
|
|
1520
1289
|
) -> SqlSelectBuilder:
|
|
1521
1290
|
# This method plays the same role as _finish_single_relation in the new
|