ctao-calibpipe 0.1.0rc7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ctao-calibpipe might be problematic. Click here for more details.
- calibpipe/__init__.py +5 -0
- calibpipe/_dev_version/__init__.py +9 -0
- calibpipe/_version.py +21 -0
- calibpipe/atmosphere/__init__.py +1 -0
- calibpipe/atmosphere/atmosphere_containers.py +109 -0
- calibpipe/atmosphere/meteo_data_handlers.py +485 -0
- calibpipe/atmosphere/models/README.md +14 -0
- calibpipe/atmosphere/models/__init__.py +1 -0
- calibpipe/atmosphere/models/macobac.ecsv +23 -0
- calibpipe/atmosphere/models/reference_MDPs/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_intermediate.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_atmospheres/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_intermediate.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_intermediate.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_winter.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_winter.ecsv +857 -0
- calibpipe/atmosphere/templates/request_templates/__init__.py +1 -0
- calibpipe/atmosphere/templates/request_templates/copernicus.json +11 -0
- calibpipe/atmosphere/templates/request_templates/gdas.json +12 -0
- calibpipe/core/__init__.py +39 -0
- calibpipe/core/common_metadata_containers.py +195 -0
- calibpipe/core/exceptions.py +87 -0
- calibpipe/database/__init__.py +24 -0
- calibpipe/database/adapter/__init__.py +23 -0
- calibpipe/database/adapter/adapter.py +80 -0
- calibpipe/database/adapter/database_containers/__init__.py +61 -0
- calibpipe/database/adapter/database_containers/atmosphere.py +199 -0
- calibpipe/database/adapter/database_containers/common_metadata.py +148 -0
- calibpipe/database/adapter/database_containers/container_map.py +59 -0
- calibpipe/database/adapter/database_containers/observatory.py +61 -0
- calibpipe/database/adapter/database_containers/table_version_manager.py +39 -0
- calibpipe/database/adapter/database_containers/version_control.py +17 -0
- calibpipe/database/connections/__init__.py +28 -0
- calibpipe/database/connections/calibpipe_database.py +60 -0
- calibpipe/database/connections/postgres_utils.py +97 -0
- calibpipe/database/connections/sql_connection.py +103 -0
- calibpipe/database/connections/user_confirmation.py +19 -0
- calibpipe/database/interfaces/__init__.py +71 -0
- calibpipe/database/interfaces/hashable_row_data.py +54 -0
- calibpipe/database/interfaces/queries.py +180 -0
- calibpipe/database/interfaces/sql_column_info.py +67 -0
- calibpipe/database/interfaces/sql_metadata.py +6 -0
- calibpipe/database/interfaces/sql_table_info.py +131 -0
- calibpipe/database/interfaces/table_handler.py +351 -0
- calibpipe/database/interfaces/types.py +96 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/__init__.py +0 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/contemporary_MDP.ecsv +34 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.csv +852 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.ecsv +23 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/merged_file.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_copernicus.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_gdas.ecsv +66 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/observatory_configurations.json +71 -0
- calibpipe/tests/data/utils/__init__.py +0 -0
- calibpipe/tests/data/utils/meteo_data_winter_and_summer.ecsv +12992 -0
- calibpipe/tests/unittests/atmosphere/astral_testing.py +107 -0
- calibpipe/tests/unittests/atmosphere/test_meteo_data_handler.py +775 -0
- calibpipe/tests/unittests/atmosphere/test_molecular_atmosphere.py +327 -0
- calibpipe/tests/unittests/database/test_table_handler.py +66 -0
- calibpipe/tests/unittests/database/test_types.py +38 -0
- calibpipe/tests/unittests/test_bootstrap_db.py +79 -0
- calibpipe/tests/unittests/utils/test_observatory.py +309 -0
- calibpipe/tools/atmospheric_base_tool.py +78 -0
- calibpipe/tools/atmospheric_model_db_loader.py +181 -0
- calibpipe/tools/basic_tool_with_db.py +38 -0
- calibpipe/tools/contemporary_mdp_producer.py +87 -0
- calibpipe/tools/init_db.py +37 -0
- calibpipe/tools/macobac_calculator.py +82 -0
- calibpipe/tools/molecular_atmospheric_model_producer.py +197 -0
- calibpipe/tools/observatory_data_db_loader.py +71 -0
- calibpipe/tools/reference_atmospheric_model_selector.py +201 -0
- calibpipe/utils/__init__.py +10 -0
- calibpipe/utils/observatory.py +486 -0
- calibpipe/utils/observatory_containers.py +26 -0
- calibpipe/version.py +24 -0
- ctao_calibpipe-0.1.0rc7.dist-info/METADATA +86 -0
- ctao_calibpipe-0.1.0rc7.dist-info/RECORD +93 -0
- ctao_calibpipe-0.1.0rc7.dist-info/WHEEL +5 -0
- ctao_calibpipe-0.1.0rc7.dist-info/entry_points.txt +8 -0
- ctao_calibpipe-0.1.0rc7.dist-info/licenses/AUTHORS.md +13 -0
- ctao_calibpipe-0.1.0rc7.dist-info/licenses/LICENSE +21 -0
- ctao_calibpipe-0.1.0rc7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"""Adapter for psycopg types and database uri."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
from psycopg import adapters
|
|
7
|
+
from psycopg.adapt import Buffer, Dumper
|
|
8
|
+
from psycopg.errors import DataError
|
|
9
|
+
from psycopg.postgres import types as _types
|
|
10
|
+
from psycopg.types.bool import BoolDumper
|
|
11
|
+
from psycopg.types.numeric import Float4Dumper, FloatDumper
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_postgres_uri(
|
|
15
|
+
user: str,
|
|
16
|
+
database: str,
|
|
17
|
+
passwd: str,
|
|
18
|
+
host: str = "postgres",
|
|
19
|
+
port: str | None = None,
|
|
20
|
+
) -> str:
|
|
21
|
+
"""Generate a valid uri to connect to the postgres+psycopg database."""
|
|
22
|
+
port_str = f":{port}" if port else ""
|
|
23
|
+
return f"postgresql+psycopg://{user}:{passwd}@{host}{port_str}/{database}"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# np.int dumpers
|
|
27
|
+
class _NPIntDumper(Dumper):
|
|
28
|
+
def dump(self, obj: Any) -> Buffer:
|
|
29
|
+
t = type(obj)
|
|
30
|
+
allowed_types = [
|
|
31
|
+
np.int8,
|
|
32
|
+
np.int16,
|
|
33
|
+
np.int32,
|
|
34
|
+
np.int64,
|
|
35
|
+
np.longlong,
|
|
36
|
+
np.uint8,
|
|
37
|
+
np.uint16,
|
|
38
|
+
np.uint32,
|
|
39
|
+
np.uint64,
|
|
40
|
+
np.ulonglong,
|
|
41
|
+
]
|
|
42
|
+
if t not in allowed_types:
|
|
43
|
+
raise DataError(f"Numpy integer expected, got {type(obj).__name__!r}")
|
|
44
|
+
return str(obj).encode()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class NPInt16Dumper(_NPIntDumper):
|
|
48
|
+
"""Numpy int16 dumper."""
|
|
49
|
+
|
|
50
|
+
oid = _types["int2"].oid
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class NPInt32Dumper(_NPIntDumper):
|
|
54
|
+
"""Numpy int32 dumper."""
|
|
55
|
+
|
|
56
|
+
oid = _types["int4"].oid
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class NPInt64Dumper(_NPIntDumper):
|
|
60
|
+
"""Numpy int64 dumper."""
|
|
61
|
+
|
|
62
|
+
oid = _types["int8"].oid
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class NPNumericDumper(_NPIntDumper):
|
|
66
|
+
"""Numpy numeric dumper."""
|
|
67
|
+
|
|
68
|
+
oid = _types["numeric"].oid
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def adapt_psycopg() -> None:
|
|
72
|
+
# pylint: disable=line-too-long
|
|
73
|
+
"""
|
|
74
|
+
Adapt numpy numerical types for psycopg3.
|
|
75
|
+
|
|
76
|
+
.. note::
|
|
77
|
+
Required for psycopg3 < 3.2. Until the pyscopg-3.2.0 is released, we borrow their code.
|
|
78
|
+
See `this PR <https://github.com/psycopg/psycopg/pull/332/files#diff-6d04f11a711cbef8ea32bd1479af4a79b402e213559d8b66359a6b871c5bdd28>`_ for details
|
|
79
|
+
"""
|
|
80
|
+
# pylint: enable=line-too-long
|
|
81
|
+
adapters.register_dumper("numpy.int8", NPInt16Dumper)
|
|
82
|
+
adapters.register_dumper("numpy.int16", NPInt16Dumper)
|
|
83
|
+
adapters.register_dumper("numpy.int32", NPInt32Dumper)
|
|
84
|
+
adapters.register_dumper("numpy.int64", NPInt64Dumper)
|
|
85
|
+
adapters.register_dumper("numpy.longlong", NPInt64Dumper)
|
|
86
|
+
adapters.register_dumper("numpy.bool_", BoolDumper)
|
|
87
|
+
adapters.register_dumper("numpy.uint8", NPInt16Dumper)
|
|
88
|
+
adapters.register_dumper("numpy.uint16", NPInt32Dumper)
|
|
89
|
+
adapters.register_dumper("numpy.uint32", NPInt64Dumper)
|
|
90
|
+
adapters.register_dumper("numpy.uint64", NPNumericDumper)
|
|
91
|
+
adapters.register_dumper("numpy.ulonglong", NPNumericDumper)
|
|
92
|
+
adapters.register_dumper("numpy.float16", Float4Dumper)
|
|
93
|
+
adapters.register_dumper("numpy.float32", Float4Dumper)
|
|
94
|
+
adapters.register_dumper("numpy.float64", FloatDumper)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
adapt_psycopg()
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""Interface to connect to the calibration DB stored in a SQL DB."""
|
|
2
|
+
|
|
3
|
+
import sqlalchemy as sa
|
|
4
|
+
from sqlalchemy.engine import Engine, Result
|
|
5
|
+
from sqlalchemy.orm import Session
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SQLConnection:
|
|
9
|
+
"""
|
|
10
|
+
Interface to communicate with a SQL database.
|
|
11
|
+
|
|
12
|
+
Once an uri (`str`) has been generated, the connection can be
|
|
13
|
+
open in a context to ensure proper closing (and commit if
|
|
14
|
+
required)::
|
|
15
|
+
|
|
16
|
+
uri: str = get_postgres_uri(user='api-owner', database='calibration')
|
|
17
|
+
with SQLConnection(uri=uri, autocommit=True) as connection:
|
|
18
|
+
# e.g.
|
|
19
|
+
# connection.execute(...)
|
|
20
|
+
|
|
21
|
+
Attributes
|
|
22
|
+
----------
|
|
23
|
+
autocommit: bool
|
|
24
|
+
Tell if the database changes must be committed automatically
|
|
25
|
+
when closing the connection (can be done manually by calling
|
|
26
|
+
the :meth:`commit` method).
|
|
27
|
+
|
|
28
|
+
uri: str
|
|
29
|
+
Uri used to connect to the database. This attribute is not
|
|
30
|
+
used anymore once the connection is open.
|
|
31
|
+
|
|
32
|
+
engine: sqlalchemy.engine.Engine
|
|
33
|
+
Engine used for the database connection. It can be of several
|
|
34
|
+
kinds, the default one is `postgres + psycopg`. The engine
|
|
35
|
+
is automatically connected at the initialization step.
|
|
36
|
+
|
|
37
|
+
session: sqlalchemy.orm.Session
|
|
38
|
+
Session (use the :attr:`engine`) used to execute
|
|
39
|
+
queries to the database.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(self, uri: str, autocommit: bool = False) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Initialize the session and engine, connect to the database.
|
|
45
|
+
|
|
46
|
+
Parameters
|
|
47
|
+
----------
|
|
48
|
+
uri: str
|
|
49
|
+
uri to connect to the database. See the
|
|
50
|
+
:func:`calibpipe.database.connections.get_postgres_uri`
|
|
51
|
+
to generate the uri connecting to a Postgres database.
|
|
52
|
+
|
|
53
|
+
autocommit: bool, optional (default=False)
|
|
54
|
+
Determines if the connection commits changes when the
|
|
55
|
+
:meth:`__exit__`
|
|
56
|
+
method is called. If set to `False` (default), changes will not be
|
|
57
|
+
committed and it is necessary to call :meth:`commit` after
|
|
58
|
+
modifications have been done.
|
|
59
|
+
"""
|
|
60
|
+
self.autocommit = autocommit
|
|
61
|
+
self.uri: str = uri
|
|
62
|
+
self.engine: Engine = sa.create_engine(self.uri, echo=True, future=True)
|
|
63
|
+
self.engine.connect()
|
|
64
|
+
self.session: Session = Session(self.engine)
|
|
65
|
+
|
|
66
|
+
def __enter__(self) -> "SQLConnection":
|
|
67
|
+
"""Enter a new context."""
|
|
68
|
+
return self
|
|
69
|
+
|
|
70
|
+
def __exit__(self, *args) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Exit the context and close the connection.
|
|
73
|
+
|
|
74
|
+
This method simply call `close()`.
|
|
75
|
+
"""
|
|
76
|
+
self.close()
|
|
77
|
+
|
|
78
|
+
def close(self) -> None:
|
|
79
|
+
"""
|
|
80
|
+
Close the session.
|
|
81
|
+
|
|
82
|
+
If the autocommit attribute is True, changes are committed before
|
|
83
|
+
closing the connection.
|
|
84
|
+
"""
|
|
85
|
+
if self.session:
|
|
86
|
+
if self.autocommit:
|
|
87
|
+
self.commit()
|
|
88
|
+
self.session.close()
|
|
89
|
+
|
|
90
|
+
def commit(self) -> None:
|
|
91
|
+
"""Commit changes to the database."""
|
|
92
|
+
self.session.commit()
|
|
93
|
+
|
|
94
|
+
def execute(self, *args) -> Result:
|
|
95
|
+
"""
|
|
96
|
+
Execute a query in the SQL session.
|
|
97
|
+
|
|
98
|
+
This methods forwards the arguments to the
|
|
99
|
+
:meth:`sqlalchemy.orm.Session.execute` method of
|
|
100
|
+
:attr:`session`.
|
|
101
|
+
Refer to the documentation of `sqlalchemy` to use queries.
|
|
102
|
+
"""
|
|
103
|
+
return self.session.execute(*args)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Function to ask for a user confirmation."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def get_user_confirmation(prompt: str) -> bool:
|
|
5
|
+
"""Ask a confirmation from the user by displaying a prompt and asking yes or no.
|
|
6
|
+
|
|
7
|
+
Parameters
|
|
8
|
+
----------
|
|
9
|
+
prompt: str
|
|
10
|
+
Prompt to display
|
|
11
|
+
|
|
12
|
+
Returns
|
|
13
|
+
-------
|
|
14
|
+
bool
|
|
15
|
+
Answer from the user
|
|
16
|
+
"""
|
|
17
|
+
print(f"{prompt} [y/n, default: no]")
|
|
18
|
+
user_input = input()
|
|
19
|
+
return user_input.lower() == "y"
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Various interfaces for database access.
|
|
3
|
+
|
|
4
|
+
This module contains field, column, row and table interfaces
|
|
5
|
+
to interact with a database. In general the module is tightly
|
|
6
|
+
coupled to sqlalchemy though some parts have been preserved
|
|
7
|
+
from an explicit dependency.
|
|
8
|
+
|
|
9
|
+
Module content:
|
|
10
|
+
- `types.py`: Type definition for DB fields using sqlalchemy
|
|
11
|
+
types and defining the specific case of the `numpy.ndarray`.
|
|
12
|
+
- `sql_metadata.py`: Simple metadata variable definition for
|
|
13
|
+
sqlalchemy.
|
|
14
|
+
- `sql_column_info.py`: Container for column information, useful
|
|
15
|
+
to e.g. define the corresponding LST fields at the DB level.
|
|
16
|
+
- `sql_table_info.py`: Container for table information, used in
|
|
17
|
+
particular to create sqlalchemy table objects from a list
|
|
18
|
+
of column information.
|
|
19
|
+
- `hashable_row_data.py`: Hashable container of a (single) row
|
|
20
|
+
information (table_name + primary_key value). This can be
|
|
21
|
+
(and is) used to index data using the row to which they belong
|
|
22
|
+
and create a cache for data retrieved from the database.
|
|
23
|
+
- `table_handler.py`: Container for functions to handle
|
|
24
|
+
tables in the DB.
|
|
25
|
+
- `queries.py`: Built-in queries that can be used to retrieve camera calibration data.
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
from .hashable_row_data import HashableRowData
|
|
30
|
+
from .queries import query_from_date, query_from_run, query_full_table
|
|
31
|
+
from .sql_column_info import SQLColumnInfo
|
|
32
|
+
from .sql_metadata import sql_metadata
|
|
33
|
+
from .sql_table_info import SQLTableInfo
|
|
34
|
+
from .table_handler import TableHandler
|
|
35
|
+
from .types import (
|
|
36
|
+
BigInteger,
|
|
37
|
+
Boolean,
|
|
38
|
+
Date,
|
|
39
|
+
DateTime,
|
|
40
|
+
Double,
|
|
41
|
+
Float,
|
|
42
|
+
Integer,
|
|
43
|
+
NDArray,
|
|
44
|
+
Numeric,
|
|
45
|
+
SmallInteger,
|
|
46
|
+
String,
|
|
47
|
+
Time,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
__all__ = [
|
|
51
|
+
"Boolean",
|
|
52
|
+
"SmallInteger",
|
|
53
|
+
"Integer",
|
|
54
|
+
"BigInteger",
|
|
55
|
+
"Float",
|
|
56
|
+
"Double",
|
|
57
|
+
"Numeric",
|
|
58
|
+
"String",
|
|
59
|
+
"Date",
|
|
60
|
+
"Time",
|
|
61
|
+
"DateTime",
|
|
62
|
+
"NDArray",
|
|
63
|
+
"sql_metadata",
|
|
64
|
+
"SQLTableInfo",
|
|
65
|
+
"SQLColumnInfo",
|
|
66
|
+
"HashableRowData",
|
|
67
|
+
"TableHandler",
|
|
68
|
+
"query_full_table",
|
|
69
|
+
"query_from_date",
|
|
70
|
+
"query_from_run",
|
|
71
|
+
]
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""HashableRowData class."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Hashable
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class HashableRowData:
|
|
7
|
+
"""
|
|
8
|
+
Contain hashable row information (table and primary key).
|
|
9
|
+
|
|
10
|
+
A table name and a primary key **value** is enough information
|
|
11
|
+
to uniquely identify a row inside the entire database. This
|
|
12
|
+
information can therefore be hashed to create a map indexed
|
|
13
|
+
by a row.
|
|
14
|
+
|
|
15
|
+
Attributes
|
|
16
|
+
----------
|
|
17
|
+
table_name: str
|
|
18
|
+
Name of the table in which the row is stored.
|
|
19
|
+
|
|
20
|
+
primary_key: Hashable
|
|
21
|
+
Any python object that can be hashed, must contain the primary
|
|
22
|
+
key value of the row.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
table_name: str
|
|
26
|
+
primary_key: Hashable # Any hashable value
|
|
27
|
+
|
|
28
|
+
def __init__(self, table_name: str, primary_key: Hashable) -> None:
|
|
29
|
+
"""Initialize hashable table."""
|
|
30
|
+
self.table_name = table_name
|
|
31
|
+
self.primary_key = primary_key
|
|
32
|
+
|
|
33
|
+
def __eq__(self, object_: object) -> bool:
|
|
34
|
+
"""Compare two HashableRowData objects."""
|
|
35
|
+
if not isinstance(object_, HashableRowData):
|
|
36
|
+
return False
|
|
37
|
+
return (
|
|
38
|
+
self.table_name == object_.table_name
|
|
39
|
+
and self.primary_key == object_.primary_key
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
def __hash__(self) -> int:
|
|
43
|
+
"""
|
|
44
|
+
Hash function.
|
|
45
|
+
|
|
46
|
+
The xor (operator ^) seems ok because the table name and primary_key
|
|
47
|
+
will in general be different, or at least it should be extremely
|
|
48
|
+
rare and performance should not be affected.
|
|
49
|
+
"""
|
|
50
|
+
return hash(self.table_name) ^ hash(self.primary_key)
|
|
51
|
+
|
|
52
|
+
def __str__(self) -> str:
|
|
53
|
+
"""Generate a string representation for HashableRowData object. Unicity is guaranteed."""
|
|
54
|
+
return f"{self.primary_key}_{self.table_name}"
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Built-in queries for camera calibration data.
|
|
3
|
+
|
|
4
|
+
All built-in queries are obtained by calling functions that
|
|
5
|
+
return a tuple of query and a list of strings, e.g.::
|
|
6
|
+
|
|
7
|
+
light_query, deferred_column_names = some_builtin_query()
|
|
8
|
+
|
|
9
|
+
Each time, the query object can be directly sent to an `execute()`
|
|
10
|
+
SQLConnection method and it will retrieve from the DB the primary
|
|
11
|
+
key and all light fields (by default non-array types, see the
|
|
12
|
+
deferred property of `SQLColumnInfo`). This can be done e.g. using::
|
|
13
|
+
|
|
14
|
+
db.execute(light_query)
|
|
15
|
+
|
|
16
|
+
The list of strings returned in the
|
|
17
|
+
tuple is the list of deferred fields, that must be queried
|
|
18
|
+
separately later on, e.g. using::
|
|
19
|
+
|
|
20
|
+
query = sa.select(deferred_column_names)
|
|
21
|
+
res = db.execute(query)
|
|
22
|
+
|
|
23
|
+
The built-in queries are:
|
|
24
|
+
|
|
25
|
+
- `query_full_table()`: Return the query utilities
|
|
26
|
+
to retrieve a full table from the database
|
|
27
|
+
(used e.g. to retrieve the run metadata table).
|
|
28
|
+
- `query_from_run()`: Return the query utilities
|
|
29
|
+
to retrieve data of a given run in a given table.
|
|
30
|
+
- `query_from_date()`: Return the query utilities
|
|
31
|
+
to retrieve data at a given date in a given table.
|
|
32
|
+
|
|
33
|
+
The queries have an undefined type (internal to sqlalchemy and
|
|
34
|
+
not fully defined), an alias to `Any` is used in this file to express
|
|
35
|
+
what objects are SQL queries.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
import datetime
|
|
39
|
+
from typing import Any
|
|
40
|
+
|
|
41
|
+
import sqlalchemy as sa
|
|
42
|
+
|
|
43
|
+
from ..adapter.database_containers.table_version_manager import (
|
|
44
|
+
TableVersionManager,
|
|
45
|
+
)
|
|
46
|
+
from .sql_table_info import SQLTableInfo
|
|
47
|
+
|
|
48
|
+
Query = Any
|
|
49
|
+
""" Alias to express which objects are queries as `sqlalchemy` does not define it. """
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _get_deferred_column(table: sa.Table, info: SQLTableInfo) -> list[str]:
|
|
53
|
+
"""Return the names of deferred columns to use in a select statement."""
|
|
54
|
+
return [getattr(table.c, column.name) for column in info.get_deferred_columns()]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _get_undeferred_column(table: sa.Table, info: SQLTableInfo) -> list[str]:
|
|
58
|
+
"""Return the names of undeferred columns to use in a select statement."""
|
|
59
|
+
return [getattr(table.c, column.name) for column in info.get_undeferred_columns()]
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _process_table_info(
|
|
63
|
+
table_info: SQLTableInfo, version: str
|
|
64
|
+
) -> tuple[sa.Table, list[str], list[str]]:
|
|
65
|
+
"""Return objects necessary to build queries from a table info."""
|
|
66
|
+
table = TableVersionManager.apply_version(table_info=table_info, version=version)
|
|
67
|
+
deferred_columns = _get_deferred_column(table=table, info=table_info)
|
|
68
|
+
undeferred_columns = _get_undeferred_column(table=table, info=table_info)
|
|
69
|
+
return table, deferred_columns, undeferred_columns
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _select_and_filter(column: list[Any], condition: Any) -> Query | None:
|
|
73
|
+
"""Return a select clause on several columns using a simple filter."""
|
|
74
|
+
if not column:
|
|
75
|
+
return None
|
|
76
|
+
return sa.select(*column).filter(condition)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _select_by_date(
|
|
80
|
+
table: sa.Table, column: list[str], date: datetime.date
|
|
81
|
+
) -> Query | None:
|
|
82
|
+
"""Return a query selecting a list of columns from a date."""
|
|
83
|
+
return _select_and_filter(
|
|
84
|
+
column=column,
|
|
85
|
+
condition=(table.c.date == date), # pylint: disable=superfluous-parens
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _select_by_run(table: sa.Table, column: list[str], run: int) -> Query | None:
|
|
90
|
+
"""Return a query selecting a list of columns from a run."""
|
|
91
|
+
return _select_and_filter(
|
|
92
|
+
column=column,
|
|
93
|
+
condition=(table.c.run == run), # pylint: disable=superfluous-parens
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def query_full_table(
|
|
98
|
+
table_info: SQLTableInfo, version: str | None = None
|
|
99
|
+
) -> tuple[Query, list[str]]:
|
|
100
|
+
"""
|
|
101
|
+
Return a query for a complete table.
|
|
102
|
+
|
|
103
|
+
Parameters
|
|
104
|
+
----------
|
|
105
|
+
table_info: SQLTableInfo
|
|
106
|
+
Table to which the query must be built.
|
|
107
|
+
|
|
108
|
+
version: Optional[str], default=None
|
|
109
|
+
Software version of the data to retrieve. If `None` is given, the
|
|
110
|
+
`_pro` version will be used i.e. the latest available.
|
|
111
|
+
|
|
112
|
+
Returns
|
|
113
|
+
-------
|
|
114
|
+
tuple[Query, list[str]]
|
|
115
|
+
A tuple containing the light query to retrieve small fields and the list
|
|
116
|
+
of field names for deferred fields (cached and loaded later).
|
|
117
|
+
"""
|
|
118
|
+
_unused_table, deferred_columns, undeferred_columns = _process_table_info(
|
|
119
|
+
table_info, version=version
|
|
120
|
+
)
|
|
121
|
+
light_query = sa.select(undeferred_columns)
|
|
122
|
+
return light_query, deferred_columns
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def query_from_date(
|
|
126
|
+
table_info: SQLTableInfo, version: str, date: datetime.date
|
|
127
|
+
) -> tuple[Query, list[str]]:
|
|
128
|
+
"""
|
|
129
|
+
Return a query from a date.
|
|
130
|
+
|
|
131
|
+
Parameters
|
|
132
|
+
----------
|
|
133
|
+
table_info: SQLTableInfo
|
|
134
|
+
Table to which the query must be built.
|
|
135
|
+
|
|
136
|
+
version: Optional[str], default=None
|
|
137
|
+
Software version of the data to retrieve. If `None` is given, the
|
|
138
|
+
`_pro` version will be used i.e. the latest available.
|
|
139
|
+
|
|
140
|
+
Returns
|
|
141
|
+
-------
|
|
142
|
+
tuple[Query, list[str]]
|
|
143
|
+
A tuple containing the light query to retrieve small fields and the list
|
|
144
|
+
of field names for deferred fields (cached and loaded later).
|
|
145
|
+
"""
|
|
146
|
+
table, deferred_columns, undeferred_columns = _process_table_info(
|
|
147
|
+
table_info, version=version
|
|
148
|
+
)
|
|
149
|
+
light_query = _select_by_date(table, undeferred_columns, date)
|
|
150
|
+
|
|
151
|
+
return light_query, deferred_columns
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def query_from_run(
|
|
155
|
+
table_info: SQLTableInfo, version: str, run: int
|
|
156
|
+
) -> tuple[Query, list[str]]:
|
|
157
|
+
"""
|
|
158
|
+
Return a query from a run.
|
|
159
|
+
|
|
160
|
+
Parameters
|
|
161
|
+
----------
|
|
162
|
+
table_info: SQLTableInfo
|
|
163
|
+
Table to which the query must be built.
|
|
164
|
+
|
|
165
|
+
version: Optional[str], default=None
|
|
166
|
+
Software version of the data to retrieve. If `None` is given, the
|
|
167
|
+
`_pro` version will be used i.e. the latest available.
|
|
168
|
+
|
|
169
|
+
Returns
|
|
170
|
+
-------
|
|
171
|
+
tuple[Query, list[str]]
|
|
172
|
+
A tuple containing the light query to retrieve small fields and the list
|
|
173
|
+
of field names for deferred fields (cached and loaded later).
|
|
174
|
+
"""
|
|
175
|
+
table, deferred_columns, undeferred_columns = _process_table_info(
|
|
176
|
+
table_info, version=version
|
|
177
|
+
)
|
|
178
|
+
light_query = _select_by_run(table, undeferred_columns, run)
|
|
179
|
+
|
|
180
|
+
return light_query, deferred_columns
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""SQLColumnInfo class."""
|
|
2
|
+
|
|
3
|
+
import astropy.units as u
|
|
4
|
+
import sqlalchemy as sa
|
|
5
|
+
from astropy.units.cds import ppm
|
|
6
|
+
|
|
7
|
+
from .types import ColumnType, NDArray
|
|
8
|
+
|
|
9
|
+
u.add_enabled_units([ppm])
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SQLColumnInfo:
|
|
13
|
+
"""
|
|
14
|
+
Contain info required to create a `sa.Column` object.
|
|
15
|
+
|
|
16
|
+
The data representing the column is system-independent,
|
|
17
|
+
using in particular the generic types in `.types`,
|
|
18
|
+
only the `generate_column()` method is specialized for
|
|
19
|
+
`sqlalchemy` (returning a `sa.Column` object).
|
|
20
|
+
|
|
21
|
+
Attributes
|
|
22
|
+
----------
|
|
23
|
+
name: str
|
|
24
|
+
Field name
|
|
25
|
+
|
|
26
|
+
field_type: ColumnType
|
|
27
|
+
Field `type.` See the `.types` import for possible types
|
|
28
|
+
|
|
29
|
+
is_deferred: bool (optional, default=None)
|
|
30
|
+
If given, tell if the field must be deferred i.e. loaded
|
|
31
|
+
only later (when queried) if a cache system is in place.
|
|
32
|
+
If not given, only `NDArray` objects are deferred.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
name: str,
|
|
38
|
+
field_type: ColumnType,
|
|
39
|
+
unit: str | None = "",
|
|
40
|
+
is_deferred: bool | None = None,
|
|
41
|
+
**kwargs,
|
|
42
|
+
) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Initialize the column data.
|
|
45
|
+
|
|
46
|
+
Any keyword argument required to build the final Column object
|
|
47
|
+
(here sa.Column for sqlachemy) in the generate_column() method
|
|
48
|
+
can be given to the initializer.
|
|
49
|
+
"""
|
|
50
|
+
self.name = name
|
|
51
|
+
self.type = field_type
|
|
52
|
+
self.unit = u.Unit(unit)
|
|
53
|
+
if is_deferred is not None:
|
|
54
|
+
self.is_deferred = is_deferred
|
|
55
|
+
else:
|
|
56
|
+
# If not specified defer automatically arrays and arrays only
|
|
57
|
+
self.is_deferred = field_type == NDArray
|
|
58
|
+
self.kwargs = {**kwargs}
|
|
59
|
+
|
|
60
|
+
def generate_column(self) -> sa.Column:
|
|
61
|
+
"""Generate a new corresponding sa.Column object."""
|
|
62
|
+
column = sa.Column(self.name, self.type, comment=str(self.unit), **self.kwargs)
|
|
63
|
+
return column
|
|
64
|
+
|
|
65
|
+
def is_primary_key(self) -> bool:
|
|
66
|
+
"""Check if the column is a primary key."""
|
|
67
|
+
return "primary_key" in self.kwargs and self.kwargs["primary_key"]
|