ctao-calibpipe 0.3.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- calibpipe/__init__.py +5 -0
- calibpipe/_dev_version/__init__.py +9 -0
- calibpipe/_version.py +34 -0
- calibpipe/atmosphere/__init__.py +1 -0
- calibpipe/atmosphere/atmosphere_containers.py +109 -0
- calibpipe/atmosphere/meteo_data_handlers.py +485 -0
- calibpipe/atmosphere/models/README.md +14 -0
- calibpipe/atmosphere/models/__init__.py +1 -0
- calibpipe/atmosphere/models/macobac.ecsv +23 -0
- calibpipe/atmosphere/models/reference_MDPs/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_intermediate.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_atmospheres/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_intermediate.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_intermediate.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_winter.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_winter.ecsv +857 -0
- calibpipe/atmosphere/templates/request_templates/__init__.py +1 -0
- calibpipe/atmosphere/templates/request_templates/copernicus.json +11 -0
- calibpipe/atmosphere/templates/request_templates/gdas.json +12 -0
- calibpipe/core/__init__.py +39 -0
- calibpipe/core/common_metadata_containers.py +198 -0
- calibpipe/core/exceptions.py +87 -0
- calibpipe/database/__init__.py +24 -0
- calibpipe/database/adapter/__init__.py +23 -0
- calibpipe/database/adapter/adapter.py +80 -0
- calibpipe/database/adapter/database_containers/__init__.py +63 -0
- calibpipe/database/adapter/database_containers/atmosphere.py +199 -0
- calibpipe/database/adapter/database_containers/common_metadata.py +150 -0
- calibpipe/database/adapter/database_containers/container_map.py +59 -0
- calibpipe/database/adapter/database_containers/observatory.py +61 -0
- calibpipe/database/adapter/database_containers/table_version_manager.py +39 -0
- calibpipe/database/adapter/database_containers/throughput.py +30 -0
- calibpipe/database/adapter/database_containers/version_control.py +17 -0
- calibpipe/database/connections/__init__.py +28 -0
- calibpipe/database/connections/calibpipe_database.py +60 -0
- calibpipe/database/connections/postgres_utils.py +97 -0
- calibpipe/database/connections/sql_connection.py +103 -0
- calibpipe/database/connections/user_confirmation.py +19 -0
- calibpipe/database/interfaces/__init__.py +71 -0
- calibpipe/database/interfaces/hashable_row_data.py +54 -0
- calibpipe/database/interfaces/queries.py +180 -0
- calibpipe/database/interfaces/sql_column_info.py +67 -0
- calibpipe/database/interfaces/sql_metadata.py +6 -0
- calibpipe/database/interfaces/sql_table_info.py +131 -0
- calibpipe/database/interfaces/table_handler.py +333 -0
- calibpipe/database/interfaces/types.py +96 -0
- calibpipe/telescope/throughput/containers.py +66 -0
- calibpipe/tests/conftest.py +274 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/__init__.py +0 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/contemporary_MDP.ecsv +34 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.csv +852 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.ecsv +23 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/merged_file.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_copernicus.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_gdas.ecsv +66 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/observatory_configurations.json +71 -0
- calibpipe/tests/data/utils/__init__.py +0 -0
- calibpipe/tests/data/utils/meteo_data_winter_and_summer.ecsv +12992 -0
- calibpipe/tests/test_conftest_data.py +200 -0
- calibpipe/tests/unittests/array/test_cross_calibration.py +412 -0
- calibpipe/tests/unittests/atmosphere/astral_testing.py +107 -0
- calibpipe/tests/unittests/atmosphere/test_meteo_data_handler.py +775 -0
- calibpipe/tests/unittests/atmosphere/test_molecular_atmosphere.py +327 -0
- calibpipe/tests/unittests/database/test_table_handler.py +163 -0
- calibpipe/tests/unittests/database/test_types.py +38 -0
- calibpipe/tests/unittests/telescope/camera/test_calculate_camcalib_coefficients.py +456 -0
- calibpipe/tests/unittests/telescope/camera/test_produce_camcalib_test_data.py +37 -0
- calibpipe/tests/unittests/telescope/throughput/test_muon_throughput_calibrator.py +693 -0
- calibpipe/tests/unittests/test_bootstrap_db.py +79 -0
- calibpipe/tests/unittests/utils/test_observatory.py +309 -0
- calibpipe/tools/atmospheric_base_tool.py +78 -0
- calibpipe/tools/atmospheric_model_db_loader.py +181 -0
- calibpipe/tools/basic_tool_with_db.py +38 -0
- calibpipe/tools/camcalib_test_data.py +374 -0
- calibpipe/tools/camera_calibrator.py +462 -0
- calibpipe/tools/contemporary_mdp_producer.py +87 -0
- calibpipe/tools/init_db.py +37 -0
- calibpipe/tools/macobac_calculator.py +82 -0
- calibpipe/tools/molecular_atmospheric_model_producer.py +197 -0
- calibpipe/tools/muon_throughput_calculator.py +219 -0
- calibpipe/tools/observatory_data_db_loader.py +71 -0
- calibpipe/tools/reference_atmospheric_model_selector.py +201 -0
- calibpipe/tools/telescope_cross_calibration_calculator.py +721 -0
- calibpipe/utils/__init__.py +10 -0
- calibpipe/utils/observatory.py +486 -0
- calibpipe/utils/observatory_containers.py +26 -0
- calibpipe/version.py +24 -0
- ctao_calibpipe-0.3.0rc2.dist-info/METADATA +92 -0
- ctao_calibpipe-0.3.0rc2.dist-info/RECORD +105 -0
- ctao_calibpipe-0.3.0rc2.dist-info/WHEEL +5 -0
- ctao_calibpipe-0.3.0rc2.dist-info/entry_points.txt +12 -0
- ctao_calibpipe-0.3.0rc2.dist-info/licenses/AUTHORS.md +13 -0
- ctao_calibpipe-0.3.0rc2.dist-info/licenses/LICENSE +21 -0
- ctao_calibpipe-0.3.0rc2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""SQLTableInfo class."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sqlalchemy as sa
|
|
6
|
+
from sqlalchemy.orm import declarative_base
|
|
7
|
+
from sqlalchemy.schema import (
|
|
8
|
+
CheckConstraint,
|
|
9
|
+
ForeignKeyConstraint,
|
|
10
|
+
PrimaryKeyConstraint,
|
|
11
|
+
UniqueConstraint,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from ..interfaces import sql_metadata
|
|
15
|
+
from .sql_column_info import SQLColumnInfo
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class InvalidTableError(Exception):
|
|
19
|
+
"""Raised when a table is invalid e.g. has no primary key."""
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SQLTableInfo:
|
|
23
|
+
"""
|
|
24
|
+
Collection of attributes defining a Table's columns.
|
|
25
|
+
|
|
26
|
+
The class contains the column information (`SQLColumnInfo`)
|
|
27
|
+
and additional arguments required to build the sqlalchemy
|
|
28
|
+
table when the `get_table()` method is called.
|
|
29
|
+
|
|
30
|
+
This class can provide useful information on the corresponding
|
|
31
|
+
table. For example the primary-key or the list of undeferred
|
|
32
|
+
and deferred columns, i.e. that must be loaded directly or
|
|
33
|
+
looked up in a cache system (if implemented) respectively.
|
|
34
|
+
Note that no cache implementation lies here, only the information
|
|
35
|
+
that some columns must be deferred if possible.
|
|
36
|
+
|
|
37
|
+
The `SQLTableInfo` also can manage several tables of the same type
|
|
38
|
+
(e.g. for versioning, table_A_v1 && table_A_v2). When calling
|
|
39
|
+
the `get_table()` method, a custom table name can be given. The
|
|
40
|
+
object will ensure that only one table is created for a given
|
|
41
|
+
name (otherwise `sqlalchemy` cannot work properly).
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
table_base_class = declarative_base()
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
table_name: str,
|
|
49
|
+
metadata: sql_metadata,
|
|
50
|
+
columns: list[SQLColumnInfo],
|
|
51
|
+
constraints: list[
|
|
52
|
+
ForeignKeyConstraint
|
|
53
|
+
| UniqueConstraint
|
|
54
|
+
| CheckConstraint
|
|
55
|
+
| PrimaryKeyConstraint
|
|
56
|
+
]
|
|
57
|
+
| None = None,
|
|
58
|
+
) -> None:
|
|
59
|
+
"""Initialize the table data and sqlachemy metadata."""
|
|
60
|
+
self.table_name = table_name
|
|
61
|
+
self.metadata = metadata
|
|
62
|
+
self.columns = columns
|
|
63
|
+
self.constraints = constraints if constraints else []
|
|
64
|
+
self._table_instances: dict[str, sa.Table] = {}
|
|
65
|
+
|
|
66
|
+
def get_primary_keys(self) -> list[SQLColumnInfo]:
|
|
67
|
+
"""Get list of primary keys for the table.
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
-------
|
|
71
|
+
list
|
|
72
|
+
list with SQLColumnInfo objects that are the primary keys
|
|
73
|
+
|
|
74
|
+
Raises
|
|
75
|
+
------
|
|
76
|
+
InvalidTableError
|
|
77
|
+
If there are no primary key in the table
|
|
78
|
+
"""
|
|
79
|
+
pk_columns = []
|
|
80
|
+
for column in self.columns:
|
|
81
|
+
if column.is_primary_key():
|
|
82
|
+
pk_columns.append(column)
|
|
83
|
+
if pk_columns:
|
|
84
|
+
return pk_columns
|
|
85
|
+
raise InvalidTableError(f"Table {self.table_name!r} has no primary key.")
|
|
86
|
+
|
|
87
|
+
def get_deferred_columns(self) -> list[SQLColumnInfo]:
|
|
88
|
+
"""
|
|
89
|
+
Return the columns that must be deferred.
|
|
90
|
+
|
|
91
|
+
Deferred columns won't be loaded directly when queried.
|
|
92
|
+
"""
|
|
93
|
+
return [column for column in self.columns if column.is_deferred]
|
|
94
|
+
|
|
95
|
+
def get_undeferred_columns(self) -> list[SQLColumnInfo]:
|
|
96
|
+
"""Return the columns that must not be deferred.
|
|
97
|
+
|
|
98
|
+
These columns are loaded directly when queried.
|
|
99
|
+
"""
|
|
100
|
+
return [column for column in self.columns if not column.is_deferred]
|
|
101
|
+
|
|
102
|
+
def get_table(self, table_name: str | None = None) -> sa.Table:
|
|
103
|
+
"""
|
|
104
|
+
Return a table with a given name, create it if necessary.
|
|
105
|
+
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
table_name: str (optional, default=None)
|
|
109
|
+
Name of the table to create. If not given, the `table_name`
|
|
110
|
+
attribute is used. If the table with the given name has
|
|
111
|
+
already been created it is returned and no new table
|
|
112
|
+
is generated.
|
|
113
|
+
"""
|
|
114
|
+
table_name = table_name or self.table_name
|
|
115
|
+
if table_name not in self._table_instances:
|
|
116
|
+
if table_name in self.metadata.tables:
|
|
117
|
+
self._table_instances[table_name] = sa.Table(table_name, self.metadata)
|
|
118
|
+
else:
|
|
119
|
+
self._table_instances[table_name] = self._generate_table(
|
|
120
|
+
table_name=table_name
|
|
121
|
+
)
|
|
122
|
+
return self._table_instances[table_name]
|
|
123
|
+
|
|
124
|
+
def _generate_table(self, table_name: str) -> sa.Table:
|
|
125
|
+
"""Generate a table corresponding to the info with a specific name."""
|
|
126
|
+
return sa.Table(
|
|
127
|
+
table_name,
|
|
128
|
+
self.metadata,
|
|
129
|
+
*[col.generate_column() for col in self.columns],
|
|
130
|
+
*self.constraints,
|
|
131
|
+
)
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""Utilities for CalibPipe data."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import astropy.units as u
|
|
7
|
+
import numpy as np # # noqa: F401
|
|
8
|
+
import sqlalchemy as sa
|
|
9
|
+
from astropy.table import QTable
|
|
10
|
+
from ctapipe.core import Container
|
|
11
|
+
|
|
12
|
+
from ...core.exceptions import DBStorageError
|
|
13
|
+
from ..adapter.adapter import Adapter
|
|
14
|
+
from ..adapter.database_containers.container_map import ContainerMap
|
|
15
|
+
from ..adapter.database_containers.table_version_manager import TableVersionManager
|
|
16
|
+
from ..connections import CalibPipeDatabase
|
|
17
|
+
from ..interfaces import sql_metadata
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TableHandler:
|
|
21
|
+
"""
|
|
22
|
+
Handles tables in CalibPipe DataBase.
|
|
23
|
+
|
|
24
|
+
The first method returns a valid insertion for a DB, made by the table instance
|
|
25
|
+
and the values to be inserted. The second method just insert values in a DB,
|
|
26
|
+
provided the DB connection, the table and the values.
|
|
27
|
+
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def get_database_table_insertion(
|
|
32
|
+
container: Container,
|
|
33
|
+
version: str | None = None,
|
|
34
|
+
) -> tuple[sa.Table, dict[str, Any]]:
|
|
35
|
+
"""Return a valid insertion for a DB made by the table instance, and the values to insert."""
|
|
36
|
+
table, kwargs = Adapter.to_postgres(container, version=version)
|
|
37
|
+
if table is None:
|
|
38
|
+
raise TypeError(f"Table cannot be created for {type(container)}.")
|
|
39
|
+
return table, kwargs
|
|
40
|
+
|
|
41
|
+
@staticmethod
|
|
42
|
+
def insert_row_in_database(
|
|
43
|
+
table: sa.Table,
|
|
44
|
+
kwargs: dict[str, Any],
|
|
45
|
+
connection: CalibPipeDatabase,
|
|
46
|
+
) -> None:
|
|
47
|
+
"""Insert values in a DB table as a row."""
|
|
48
|
+
connection.execute(sa.insert(table).values(**kwargs))
|
|
49
|
+
|
|
50
|
+
@staticmethod
|
|
51
|
+
def read_table_from_database(
|
|
52
|
+
container: Container,
|
|
53
|
+
connection: CalibPipeDatabase,
|
|
54
|
+
condition: str | None = None,
|
|
55
|
+
) -> QTable:
|
|
56
|
+
"""
|
|
57
|
+
Read a table from the DB and return it as a QTable object.
|
|
58
|
+
|
|
59
|
+
An optional argument `condition` shall have the following form:
|
|
60
|
+
`c.<column_name> <operator> <value>`
|
|
61
|
+
or a combination of thereof using `&` and `|` operators.
|
|
62
|
+
In case of compound condition, every singleton must be contained in parentheses.
|
|
63
|
+
"""
|
|
64
|
+
table = ContainerMap.map_to_db_container(container).get_table()
|
|
65
|
+
if condition:
|
|
66
|
+
query = table.select().where(
|
|
67
|
+
eval(condition.replace("c.", "table.c.")) # pylint: disable=eval-used
|
|
68
|
+
)
|
|
69
|
+
else:
|
|
70
|
+
query = table.select()
|
|
71
|
+
rows = connection.execute(query).fetchall()
|
|
72
|
+
if not rows:
|
|
73
|
+
return QTable(
|
|
74
|
+
names=table.columns.keys(),
|
|
75
|
+
units=[
|
|
76
|
+
1 * u.Unit(c.comment) if c.comment else None for c in table.columns
|
|
77
|
+
],
|
|
78
|
+
)
|
|
79
|
+
return QTable(
|
|
80
|
+
rows=rows,
|
|
81
|
+
names=table.columns.keys(),
|
|
82
|
+
units=[1 * u.Unit(c.comment) if c.comment else None for c in table.columns],
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
@staticmethod
|
|
86
|
+
def get_compatible_version(
|
|
87
|
+
version_table: sa.Table,
|
|
88
|
+
table_name: str,
|
|
89
|
+
version: str,
|
|
90
|
+
connection: CalibPipeDatabase,
|
|
91
|
+
) -> str:
|
|
92
|
+
"""
|
|
93
|
+
Get a compatible version for a certain table from the version table.
|
|
94
|
+
|
|
95
|
+
If no compatible version of the table is available, the new version
|
|
96
|
+
the table will be added to the version table.
|
|
97
|
+
"""
|
|
98
|
+
version_major = version.split(".")[0]
|
|
99
|
+
query = sa.select(version_table.c.version).where(
|
|
100
|
+
version_table.c.version.like(f"{version_major}%"),
|
|
101
|
+
version_table.c.name == table_name,
|
|
102
|
+
)
|
|
103
|
+
query_results = connection.execute(query).first()
|
|
104
|
+
if query_results is None:
|
|
105
|
+
vals = {
|
|
106
|
+
"name": table_name,
|
|
107
|
+
"version": version,
|
|
108
|
+
"validity_start": datetime(2023, 1, 1, 0, 0, 1, tzinfo=timezone.utc),
|
|
109
|
+
"validity_end": datetime(2023, 1, 1, 0, 0, 2, tzinfo=timezone.utc),
|
|
110
|
+
}
|
|
111
|
+
TableHandler.insert_row_in_database(
|
|
112
|
+
version_table,
|
|
113
|
+
vals,
|
|
114
|
+
connection=connection,
|
|
115
|
+
)
|
|
116
|
+
return version
|
|
117
|
+
comp_version = query_results[0]
|
|
118
|
+
return comp_version
|
|
119
|
+
|
|
120
|
+
@staticmethod
|
|
121
|
+
def update_tables_info(
|
|
122
|
+
table: sa.Table,
|
|
123
|
+
version_table: sa.Table,
|
|
124
|
+
table_name: str,
|
|
125
|
+
comp_version: str,
|
|
126
|
+
table_version: str,
|
|
127
|
+
connection: CalibPipeDatabase,
|
|
128
|
+
) -> str:
|
|
129
|
+
"""
|
|
130
|
+
Update the tables' info.
|
|
131
|
+
|
|
132
|
+
Updated min and max timestamps are taken from the data table,
|
|
133
|
+
and a check on version is performed to update the version table.
|
|
134
|
+
Also, the name of the table is updated accordingly if version has changed.
|
|
135
|
+
"""
|
|
136
|
+
msg = "DB tables have been updated successfully."
|
|
137
|
+
query = sa.select(
|
|
138
|
+
sa.func.min(table.c.validity_start).label("min_time"),
|
|
139
|
+
sa.func.max(table.c.validity_end).label("max_time"),
|
|
140
|
+
)
|
|
141
|
+
results = connection.execute(query).first()
|
|
142
|
+
|
|
143
|
+
if float(table_version.split(".")[1]) > float(comp_version.split(".")[1]):
|
|
144
|
+
TableHandler.update_version_table(
|
|
145
|
+
version_table,
|
|
146
|
+
table_name,
|
|
147
|
+
comp_version,
|
|
148
|
+
table_version,
|
|
149
|
+
results.min_time,
|
|
150
|
+
results.max_time,
|
|
151
|
+
connection,
|
|
152
|
+
)
|
|
153
|
+
TableHandler.update_table_name(table, table_version, connection)
|
|
154
|
+
return (
|
|
155
|
+
msg
|
|
156
|
+
+ f" Version has been updated from v{comp_version} to v{table_version}."
|
|
157
|
+
)
|
|
158
|
+
TableHandler.update_version_table(
|
|
159
|
+
version_table,
|
|
160
|
+
table_name,
|
|
161
|
+
comp_version,
|
|
162
|
+
comp_version,
|
|
163
|
+
results.min_time,
|
|
164
|
+
results.max_time,
|
|
165
|
+
connection,
|
|
166
|
+
)
|
|
167
|
+
return msg
|
|
168
|
+
|
|
169
|
+
@staticmethod
|
|
170
|
+
def update_version_table(
|
|
171
|
+
version_table: sa.Table,
|
|
172
|
+
table_name: str,
|
|
173
|
+
old_version: str,
|
|
174
|
+
new_version: str,
|
|
175
|
+
min_time: datetime,
|
|
176
|
+
max_time: datetime,
|
|
177
|
+
connection: CalibPipeDatabase,
|
|
178
|
+
) -> None:
|
|
179
|
+
"""Update the version of a table with the new version in the version table of the DB."""
|
|
180
|
+
stmt = (
|
|
181
|
+
sa.update(version_table)
|
|
182
|
+
.where(
|
|
183
|
+
version_table.c.name == table_name,
|
|
184
|
+
version_table.c.version == old_version,
|
|
185
|
+
)
|
|
186
|
+
.values(version=new_version, validity_start=min_time, validity_end=max_time)
|
|
187
|
+
)
|
|
188
|
+
connection.execute(stmt)
|
|
189
|
+
|
|
190
|
+
@staticmethod
|
|
191
|
+
def update_table_name(
|
|
192
|
+
table: sa.Table,
|
|
193
|
+
version: str,
|
|
194
|
+
connection: CalibPipeDatabase,
|
|
195
|
+
) -> None:
|
|
196
|
+
"""Update the name of a table with the new version."""
|
|
197
|
+
new_table_name = TableVersionManager.update_version(table.name, version)
|
|
198
|
+
stmt = sa.text(f"ALTER TABLE {table} RENAME TO {new_table_name};")
|
|
199
|
+
connection.execute(stmt)
|
|
200
|
+
|
|
201
|
+
@staticmethod
|
|
202
|
+
def prepare_db_tables(containers, db_config):
|
|
203
|
+
"""
|
|
204
|
+
Create and upload to the CalibPipe DB empty tables for selected calibration containers.
|
|
205
|
+
|
|
206
|
+
Parameters
|
|
207
|
+
----------
|
|
208
|
+
containers : list[Container]
|
|
209
|
+
list of calibpipe containers or ContainerMeta instances
|
|
210
|
+
that will be created as empty tables in the DB
|
|
211
|
+
|
|
212
|
+
config_data : dict
|
|
213
|
+
Calibpipe configuration with database connection configuration
|
|
214
|
+
"""
|
|
215
|
+
try:
|
|
216
|
+
with CalibPipeDatabase(**db_config) as connection:
|
|
217
|
+
sql_metadata.reflect(bind=connection.engine, extend_existing=True)
|
|
218
|
+
|
|
219
|
+
# Create empty main data tables
|
|
220
|
+
for cp_container in containers:
|
|
221
|
+
if isinstance(cp_container, Container):
|
|
222
|
+
db_container = ContainerMap.map_to_db_container(
|
|
223
|
+
type(cp_container)
|
|
224
|
+
)
|
|
225
|
+
else:
|
|
226
|
+
db_container = ContainerMap.map_to_db_container(cp_container)
|
|
227
|
+
if not sa.inspect(connection.engine).has_table(
|
|
228
|
+
db_container.table_name
|
|
229
|
+
):
|
|
230
|
+
db_container.get_table()
|
|
231
|
+
sql_metadata.create_all(bind=connection.engine)
|
|
232
|
+
except sa.exc.DatabaseError:
|
|
233
|
+
raise DBStorageError("Issues with connection to the CalibPipe DB")
|
|
234
|
+
|
|
235
|
+
@staticmethod
|
|
236
|
+
def upload_data(
|
|
237
|
+
calibpipe_data_container: Container,
|
|
238
|
+
metadata: list[Container] | None,
|
|
239
|
+
connection: CalibPipeDatabase,
|
|
240
|
+
) -> None:
|
|
241
|
+
"""
|
|
242
|
+
Upload data and optional metadata to the database.
|
|
243
|
+
|
|
244
|
+
This method uploads the provided data to the main database table and,
|
|
245
|
+
if provided, associates the metadata with the inserted data row.
|
|
246
|
+
|
|
247
|
+
Parameters
|
|
248
|
+
----------
|
|
249
|
+
calibpipe_data_container : ctapipe.core.Container
|
|
250
|
+
The data container with the data to be uploaded to the main table of the database.
|
|
251
|
+
|
|
252
|
+
metadata : list[Container] or None
|
|
253
|
+
Optional list of metadata containers to be uploaded. Should include
|
|
254
|
+
a "ReferenceMetadataContainer" if metadata is provided.
|
|
255
|
+
|
|
256
|
+
connection : CalibPipeDatabase
|
|
257
|
+
An active database connection to the CalibPipe database.
|
|
258
|
+
|
|
259
|
+
Raises
|
|
260
|
+
------
|
|
261
|
+
DBStorageError
|
|
262
|
+
If there are issues with the database connection.
|
|
263
|
+
|
|
264
|
+
ValueError
|
|
265
|
+
If the main table does not contain a single autoincremented primary key
|
|
266
|
+
or if ReferenceMetadataContainer is missing when metadata is provided.
|
|
267
|
+
"""
|
|
268
|
+
data_db_container = ContainerMap.map_to_db_container(
|
|
269
|
+
type(calibpipe_data_container)
|
|
270
|
+
)
|
|
271
|
+
has_autoincrement_pk = any(
|
|
272
|
+
col.autoincrement for col in data_db_container.get_table().c
|
|
273
|
+
)
|
|
274
|
+
is_single_pk = len(data_db_container.get_primary_keys()) == 1
|
|
275
|
+
|
|
276
|
+
if not (has_autoincrement_pk and is_single_pk):
|
|
277
|
+
raise ValueError(
|
|
278
|
+
f"Table '{data_db_container.table_name}' "
|
|
279
|
+
"doesn't contain a single autoincremented primary key."
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
pk_name = data_db_container.get_primary_keys()[0].name
|
|
283
|
+
|
|
284
|
+
try:
|
|
285
|
+
# Insert main data
|
|
286
|
+
table, values = TableHandler.get_database_table_insertion(
|
|
287
|
+
calibpipe_data_container
|
|
288
|
+
)
|
|
289
|
+
TableHandler.insert_row_in_database(table, values, connection)
|
|
290
|
+
|
|
291
|
+
# No metadata to upload
|
|
292
|
+
if not metadata:
|
|
293
|
+
return
|
|
294
|
+
|
|
295
|
+
# Get the last inserted primary key
|
|
296
|
+
stmt = sa.select(table).order_by(sa.desc(table.c[pk_name])).limit(1)
|
|
297
|
+
last_db_record = connection.execute(stmt).fetchone()
|
|
298
|
+
data_pk_value = last_db_record._asdict()[pk_name]
|
|
299
|
+
|
|
300
|
+
# Handle ReferenceMetadataContainer
|
|
301
|
+
reference_meta_container = next(
|
|
302
|
+
(
|
|
303
|
+
c
|
|
304
|
+
for c in metadata
|
|
305
|
+
if c.__class__.__name__ == "ReferenceMetadataContainer"
|
|
306
|
+
),
|
|
307
|
+
None,
|
|
308
|
+
)
|
|
309
|
+
if reference_meta_container is None:
|
|
310
|
+
raise ValueError("ReferenceMetadataContainer is required in metadata.")
|
|
311
|
+
|
|
312
|
+
reference_meta_container.ID_optical_throughput = data_pk_value
|
|
313
|
+
ref_table, ref_values = TableHandler.get_database_table_insertion(
|
|
314
|
+
reference_meta_container
|
|
315
|
+
)
|
|
316
|
+
TableHandler.insert_row_in_database(ref_table, ref_values, connection)
|
|
317
|
+
|
|
318
|
+
# Get ReferenceMetadata ID to link to other metadata
|
|
319
|
+
stmt = sa.select(ref_table).order_by(sa.desc(ref_table.c.ID)).limit(1)
|
|
320
|
+
metadata_id = connection.execute(stmt).fetchone().ID
|
|
321
|
+
|
|
322
|
+
# Upload other metadata
|
|
323
|
+
for container in metadata:
|
|
324
|
+
if container.__class__.__name__ == "ReferenceMetadataContainer":
|
|
325
|
+
continue
|
|
326
|
+
container.ID = metadata_id
|
|
327
|
+
meta_table, meta_values = TableHandler.get_database_table_insertion(
|
|
328
|
+
container
|
|
329
|
+
)
|
|
330
|
+
TableHandler.insert_row_in_database(meta_table, meta_values, connection)
|
|
331
|
+
|
|
332
|
+
except sa.exc.DatabaseError:
|
|
333
|
+
raise DBStorageError("Issues with connection to the CalibPipe DB")
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Type definitions for SQLAlchemy.
|
|
3
|
+
|
|
4
|
+
These type definitions allow us to define database fields and
|
|
5
|
+
containers being almost completely decoupled from SQLAlchemy
|
|
6
|
+
(without direct coupling).
|
|
7
|
+
|
|
8
|
+
In particular, SQLColumnInfo and SQLTableInfo use these generic
|
|
9
|
+
types and not the sqlalchemy types directly.
|
|
10
|
+
|
|
11
|
+
The NDArray type is defined explicitly to implemented the
|
|
12
|
+
serialization/deserialization np.ndarray <-> bytes and the
|
|
13
|
+
(optional) zlib compression/decompression on the byte data.
|
|
14
|
+
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import pickle
|
|
18
|
+
import zlib
|
|
19
|
+
|
|
20
|
+
import numpy as np
|
|
21
|
+
import sqlalchemy as sa
|
|
22
|
+
import sqlalchemy.sql.sqltypes
|
|
23
|
+
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION
|
|
24
|
+
|
|
25
|
+
ColumnType = sqlalchemy.sql.sqltypes.TypeEngine
|
|
26
|
+
|
|
27
|
+
Boolean: ColumnType = sa.Boolean
|
|
28
|
+
|
|
29
|
+
SmallInteger: ColumnType = sa.SmallInteger
|
|
30
|
+
Integer: ColumnType = sa.Integer
|
|
31
|
+
BigInteger: ColumnType = sa.BigInteger
|
|
32
|
+
Float: ColumnType = sa.Float
|
|
33
|
+
Double: ColumnType = DOUBLE_PRECISION
|
|
34
|
+
Numeric: ColumnType = sa.Numeric
|
|
35
|
+
Binary: ColumnType = sa.types.LargeBinary
|
|
36
|
+
String: ColumnType = sa.String
|
|
37
|
+
|
|
38
|
+
ArrayF1D: ColumnType = ARRAY(Float, dimensions=1)
|
|
39
|
+
ArrayF2D: ColumnType = ARRAY(Float, dimensions=2)
|
|
40
|
+
ArrayF3D: ColumnType = ARRAY(Float, dimensions=3)
|
|
41
|
+
|
|
42
|
+
Date: ColumnType = sa.Date
|
|
43
|
+
Time: ColumnType = sa.Time
|
|
44
|
+
DateTime: ColumnType = sa.DateTime
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class NDArray(sa.types.TypeDecorator): # pylint: disable=too-many-ancestors
|
|
48
|
+
"""
|
|
49
|
+
Type for numpy.ndarray binding, include data compression.
|
|
50
|
+
|
|
51
|
+
The array is stored as a compressed byte string in the database.
|
|
52
|
+
The class implements the binding between the `np.ndarray` in the
|
|
53
|
+
program memory and the byte string stored in the DB.
|
|
54
|
+
|
|
55
|
+
Compression can be removed or modified, but the two process methods
|
|
56
|
+
should be the opposite of each other for the binding to work.
|
|
57
|
+
Ignoring the dialect parameter that is anyway not used, this means
|
|
58
|
+
that the following assertion should always pass::
|
|
59
|
+
|
|
60
|
+
db_arr: NDArray
|
|
61
|
+
arr: np.ndarray
|
|
62
|
+
arr_bytes: bytes = db_arr.process_bind_param(arr)
|
|
63
|
+
recov_arr: np.ndarray = db_arr.process_result_value(arr_bytes)
|
|
64
|
+
assert(arr == recov_arr)
|
|
65
|
+
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
impl = sa.types.LargeBinary # Byte storage in the DB
|
|
69
|
+
cache_ok: bool = True # Results of process methods can be cached
|
|
70
|
+
|
|
71
|
+
def process_bind_param(self, value: np.ndarray, dialect) -> bytes:
|
|
72
|
+
"""
|
|
73
|
+
Serialize a np.ndarray into a byte object to store in the DB.
|
|
74
|
+
|
|
75
|
+
The array is first serialized into bytes and compressed using
|
|
76
|
+
the default zlib compression algorithm.
|
|
77
|
+
"""
|
|
78
|
+
return zlib.compress(pickle.dumps(value))
|
|
79
|
+
|
|
80
|
+
def process_result_value(self, value: bytes, dialect) -> np.ndarray:
|
|
81
|
+
"""
|
|
82
|
+
Deserialize a np.ndarray from bytes read in the DB.
|
|
83
|
+
|
|
84
|
+
The bytes are first decompressed and the array is loaded from
|
|
85
|
+
the decompressed byte string.
|
|
86
|
+
"""
|
|
87
|
+
return pickle.loads(zlib.decompress(value))
|
|
88
|
+
|
|
89
|
+
def process_literal_param(self, value: np.ndarray, dialect) -> str:
|
|
90
|
+
"""Representation of the NDArray object."""
|
|
91
|
+
return f"NDArray(shape={value.shape}, dtype={value.dtype})"
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def python_type(self) -> type:
|
|
95
|
+
"""Return the python type of the underlying object represented by the byte string."""
|
|
96
|
+
return np.ndarray
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Containers to keep optical throughput data and metadata."""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
from astropy.time import Time
|
|
5
|
+
from ctapipe.core import Container, Field
|
|
6
|
+
|
|
7
|
+
NAN_TIME = Time(0, format="mjd", scale="tai")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class OpticalThoughtputContainer(Container):
|
|
11
|
+
"""Optical throughput calibration coefficient and analysis results for a single telescope."""
|
|
12
|
+
|
|
13
|
+
mean = Field(
|
|
14
|
+
np.nan,
|
|
15
|
+
"Mean optical throughput from the selected calibration method",
|
|
16
|
+
type=np.float64,
|
|
17
|
+
allow_none=False,
|
|
18
|
+
)
|
|
19
|
+
median = Field(
|
|
20
|
+
np.nan,
|
|
21
|
+
"Optical throughput from the selected calibration method",
|
|
22
|
+
type=np.float64,
|
|
23
|
+
allow_none=False,
|
|
24
|
+
)
|
|
25
|
+
std = Field(
|
|
26
|
+
np.nan,
|
|
27
|
+
"Optical throughput from the selected calibration method",
|
|
28
|
+
type=np.float64,
|
|
29
|
+
allow_none=False,
|
|
30
|
+
)
|
|
31
|
+
sem = Field(
|
|
32
|
+
np.nan,
|
|
33
|
+
"Standard error of the mean optical throughput from the selected calibration method",
|
|
34
|
+
type=np.float64,
|
|
35
|
+
allow_none=False,
|
|
36
|
+
)
|
|
37
|
+
method = Field(
|
|
38
|
+
"None",
|
|
39
|
+
"Calibration method used",
|
|
40
|
+
type=str,
|
|
41
|
+
allow_none=False,
|
|
42
|
+
)
|
|
43
|
+
time_start = Field(
|
|
44
|
+
NAN_TIME,
|
|
45
|
+
description="Starting timestamp of validity for the selected throughput.",
|
|
46
|
+
type=Time,
|
|
47
|
+
allow_none=False,
|
|
48
|
+
)
|
|
49
|
+
time_end = Field(
|
|
50
|
+
NAN_TIME,
|
|
51
|
+
description="Ending timestamp of validity for the selected throughput.",
|
|
52
|
+
type=Time,
|
|
53
|
+
allow_none=False,
|
|
54
|
+
)
|
|
55
|
+
obs_id = Field(
|
|
56
|
+
-1,
|
|
57
|
+
description="ID of the observation block for validity",
|
|
58
|
+
type=np.int32,
|
|
59
|
+
allow_none=False,
|
|
60
|
+
)
|
|
61
|
+
n_events = Field(
|
|
62
|
+
0,
|
|
63
|
+
description="Number of muon rings used to calculate the throughput",
|
|
64
|
+
type=np.int64,
|
|
65
|
+
allow_none=False,
|
|
66
|
+
)
|