ctao-calibpipe 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ctao-calibpipe might be problematic. Click here for more details.
- calibpipe/__init__.py +5 -0
- calibpipe/_dev_version/__init__.py +9 -0
- calibpipe/_version.py +21 -0
- calibpipe/atmosphere/__init__.py +1 -0
- calibpipe/atmosphere/atmosphere_containers.py +109 -0
- calibpipe/atmosphere/meteo_data_handlers.py +485 -0
- calibpipe/atmosphere/models/README.md +14 -0
- calibpipe/atmosphere/models/__init__.py +1 -0
- calibpipe/atmosphere/models/macobac.ecsv +23 -0
- calibpipe/atmosphere/models/reference_MDPs/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_intermediate.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_atmospheres/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_intermediate.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_intermediate.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_winter.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_winter.ecsv +857 -0
- calibpipe/atmosphere/templates/request_templates/__init__.py +1 -0
- calibpipe/atmosphere/templates/request_templates/copernicus.json +11 -0
- calibpipe/atmosphere/templates/request_templates/gdas.json +12 -0
- calibpipe/core/__init__.py +39 -0
- calibpipe/core/common_metadata_containers.py +195 -0
- calibpipe/core/exceptions.py +87 -0
- calibpipe/database/__init__.py +24 -0
- calibpipe/database/adapter/__init__.py +23 -0
- calibpipe/database/adapter/adapter.py +80 -0
- calibpipe/database/adapter/database_containers/__init__.py +61 -0
- calibpipe/database/adapter/database_containers/atmosphere.py +199 -0
- calibpipe/database/adapter/database_containers/common_metadata.py +148 -0
- calibpipe/database/adapter/database_containers/container_map.py +59 -0
- calibpipe/database/adapter/database_containers/observatory.py +61 -0
- calibpipe/database/adapter/database_containers/table_version_manager.py +39 -0
- calibpipe/database/adapter/database_containers/version_control.py +17 -0
- calibpipe/database/connections/__init__.py +28 -0
- calibpipe/database/connections/calibpipe_database.py +60 -0
- calibpipe/database/connections/postgres_utils.py +97 -0
- calibpipe/database/connections/sql_connection.py +103 -0
- calibpipe/database/connections/user_confirmation.py +19 -0
- calibpipe/database/interfaces/__init__.py +71 -0
- calibpipe/database/interfaces/hashable_row_data.py +54 -0
- calibpipe/database/interfaces/queries.py +180 -0
- calibpipe/database/interfaces/sql_column_info.py +67 -0
- calibpipe/database/interfaces/sql_metadata.py +6 -0
- calibpipe/database/interfaces/sql_table_info.py +131 -0
- calibpipe/database/interfaces/table_handler.py +351 -0
- calibpipe/database/interfaces/types.py +96 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/__init__.py +0 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/contemporary_MDP.ecsv +34 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.csv +852 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.ecsv +23 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/merged_file.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_copernicus.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_gdas.ecsv +66 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/observatory_configurations.json +71 -0
- calibpipe/tests/data/utils/__init__.py +0 -0
- calibpipe/tests/data/utils/meteo_data_winter_and_summer.ecsv +12992 -0
- calibpipe/tests/unittests/atmosphere/astral_testing.py +107 -0
- calibpipe/tests/unittests/atmosphere/test_meteo_data_handler.py +775 -0
- calibpipe/tests/unittests/atmosphere/test_molecular_atmosphere.py +327 -0
- calibpipe/tests/unittests/database/test_table_handler.py +66 -0
- calibpipe/tests/unittests/database/test_types.py +38 -0
- calibpipe/tests/unittests/test_bootstrap_db.py +79 -0
- calibpipe/tests/unittests/utils/test_observatory.py +309 -0
- calibpipe/tools/atmospheric_base_tool.py +78 -0
- calibpipe/tools/atmospheric_model_db_loader.py +181 -0
- calibpipe/tools/basic_tool_with_db.py +38 -0
- calibpipe/tools/contemporary_mdp_producer.py +87 -0
- calibpipe/tools/init_db.py +37 -0
- calibpipe/tools/macobac_calculator.py +82 -0
- calibpipe/tools/molecular_atmospheric_model_producer.py +197 -0
- calibpipe/tools/observatory_data_db_loader.py +71 -0
- calibpipe/tools/reference_atmospheric_model_selector.py +201 -0
- calibpipe/utils/__init__.py +10 -0
- calibpipe/utils/observatory.py +486 -0
- calibpipe/utils/observatory_containers.py +26 -0
- calibpipe/version.py +24 -0
- ctao_calibpipe-0.1.0.dist-info/METADATA +86 -0
- ctao_calibpipe-0.1.0.dist-info/RECORD +93 -0
- ctao_calibpipe-0.1.0.dist-info/WHEEL +5 -0
- ctao_calibpipe-0.1.0.dist-info/entry_points.txt +8 -0
- ctao_calibpipe-0.1.0.dist-info/licenses/AUTHORS.md +13 -0
- ctao_calibpipe-0.1.0.dist-info/licenses/LICENSE +21 -0
- ctao_calibpipe-0.1.0.dist-info/top_level.txt +1 -0
calibpipe/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# Try to use setuptools_scm to get the current version; this is only used
|
|
2
|
+
# in development installations from the git repository.
|
|
3
|
+
# see ../version.py for details
|
|
4
|
+
try:
|
|
5
|
+
from setuptools_scm import get_version
|
|
6
|
+
|
|
7
|
+
version = get_version(root="../../..", relative_to=__file__)
|
|
8
|
+
except Exception as e:
|
|
9
|
+
raise ImportError(f"setuptools_scm broken or not installed: {e}")
|
calibpipe/_version.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
|
|
5
|
+
|
|
6
|
+
TYPE_CHECKING = False
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
from typing import Union
|
|
10
|
+
|
|
11
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
12
|
+
else:
|
|
13
|
+
VERSION_TUPLE = object
|
|
14
|
+
|
|
15
|
+
version: str
|
|
16
|
+
__version__: str
|
|
17
|
+
__version_tuple__: VERSION_TUPLE
|
|
18
|
+
version_tuple: VERSION_TUPLE
|
|
19
|
+
|
|
20
|
+
__version__ = version = '0.1.0'
|
|
21
|
+
__version_tuple__ = version_tuple = (0, 1, 0)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Sub-package containing classes responsible for the atmosphere calibration."""
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Containers to keep atmospheric data and metadata."""
|
|
2
|
+
|
|
3
|
+
# Python built-in imports
|
|
4
|
+
import datetime
|
|
5
|
+
|
|
6
|
+
# Third-party imports
|
|
7
|
+
import astropy.units as u
|
|
8
|
+
import numpy as np
|
|
9
|
+
from astropy.units.cds import ppm
|
|
10
|
+
|
|
11
|
+
# CTA-related imports
|
|
12
|
+
from ctapipe.core import Container, Field
|
|
13
|
+
|
|
14
|
+
REFERENCE_ATMOSPHERIC_MODEL_VER_DESC = "Atmospheric model version"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AtmosphericModelContainer(Container):
|
|
18
|
+
"""Container for the atmospheric models."""
|
|
19
|
+
|
|
20
|
+
start = Field(None, "Start of use timestamp")
|
|
21
|
+
stop = Field(None, "End of use timestamp")
|
|
22
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
23
|
+
current = Field(
|
|
24
|
+
True, "Boolean flag showing whether a given model is currently in use"
|
|
25
|
+
)
|
|
26
|
+
season = Field(None, "Season alias")
|
|
27
|
+
name_Observatory = Field(None, "Reference observatory name") # noqa: N815
|
|
28
|
+
version_Observatory = Field(None, "Reference observatory configuration version") # noqa: N815
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class MacobacContainer(Container):
|
|
32
|
+
"""Container for 12 months average CO2 background concentration."""
|
|
33
|
+
|
|
34
|
+
co2_concentration = Field(
|
|
35
|
+
np.nan * ppm, "12 months average CO2 background concentration", unit=ppm
|
|
36
|
+
)
|
|
37
|
+
estimation_date = Field(
|
|
38
|
+
None, "Date of MACOBAC estimation", type=datetime.date, allow_none=False
|
|
39
|
+
)
|
|
40
|
+
version = Field("0.0.0", REFERENCE_ATMOSPHERIC_MODEL_VER_DESC)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class MolecularAtmosphericProfileMetaContainer(Container):
|
|
44
|
+
"""
|
|
45
|
+
Container for molecular atmospheric metadata.
|
|
46
|
+
|
|
47
|
+
Container that stores the metadata associated
|
|
48
|
+
to the molecular atmospheric part of the model.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
data_assimilation_system = Field("", "Data assimilation system")
|
|
52
|
+
dataset = Field("", "Dataset of the given data assimilation system")
|
|
53
|
+
description = Field("", "Optional description field")
|
|
54
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class MolecularAtmosphericProfileContainer(Container):
|
|
58
|
+
"""Container for molecular atmospheric profile."""
|
|
59
|
+
|
|
60
|
+
altitude = Field(None, "Altitude", unit=u.km, ndim=1)
|
|
61
|
+
pressure = Field(None, "Pressure", unit=u.hPa, ndim=1)
|
|
62
|
+
temperature = Field(None, "Temperature", unit=u.K, ndim=1)
|
|
63
|
+
partial_water_pressure = Field(
|
|
64
|
+
None,
|
|
65
|
+
"Partial water vapor pressure, expressed as a fraction of the total pressure",
|
|
66
|
+
ndim=1,
|
|
67
|
+
)
|
|
68
|
+
refractive_index_m_1 = Field(None, "Refractive index N-1", ndim=1)
|
|
69
|
+
atmospheric_density = Field(
|
|
70
|
+
None, "Atmospheric density", unit=u.g / (u.cm**3), ndim=1
|
|
71
|
+
)
|
|
72
|
+
atmospheric_thickness = Field(
|
|
73
|
+
None, "Atmospheric thickness", unit=u.g / (u.cm**2), ndim=1
|
|
74
|
+
)
|
|
75
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class MolecularDensityContainer(Container):
|
|
79
|
+
"""Container for molecular density profile."""
|
|
80
|
+
|
|
81
|
+
season = Field(None, "Atmospheric model season alias.", ndim=1)
|
|
82
|
+
density = Field(None, "Molecular number density", unit=1 / (u.cm**3))
|
|
83
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class RayleighExtinctionContainer(Container):
|
|
87
|
+
"""Container for Rayleigh extinction profile."""
|
|
88
|
+
|
|
89
|
+
wavelength = Field(None, "Wavelength", unit=u.nm, ndim=1, allow_none=False)
|
|
90
|
+
altitude = Field(None, "Altitude", unit=u.km, ndim=2, allow_none=False)
|
|
91
|
+
AOD = Field(None, "Absolute Optical Depth (AOD)", ndim=2, allow_none=False)
|
|
92
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class SelectedAtmosphericModelContainer(Container):
|
|
96
|
+
"""Container for atmosphere model selection."""
|
|
97
|
+
|
|
98
|
+
date = Field(
|
|
99
|
+
None, "Date of reference model selection.", type=datetime.date, allow_none=False
|
|
100
|
+
)
|
|
101
|
+
version = Field(None, REFERENCE_ATMOSPHERIC_MODEL_VER_DESC, allow_none=False)
|
|
102
|
+
season = Field(None, "Atmospheric model season alias.", ndim=1)
|
|
103
|
+
site = Field(None, "Observation site name", type=str, allow_none=False)
|
|
104
|
+
provenance = Field(
|
|
105
|
+
None,
|
|
106
|
+
"Model data provenance. Can be `timestamp`, `GDAS` or `ECMWF`.",
|
|
107
|
+
type=str,
|
|
108
|
+
allow_none=False,
|
|
109
|
+
)
|
|
@@ -0,0 +1,485 @@
|
|
|
1
|
+
"""Meteorological data handling module."""
|
|
2
|
+
|
|
3
|
+
# Python built-in imports
|
|
4
|
+
import copy
|
|
5
|
+
import errno
|
|
6
|
+
import glob
|
|
7
|
+
import importlib.resources
|
|
8
|
+
import json
|
|
9
|
+
import math
|
|
10
|
+
import os
|
|
11
|
+
import shutil
|
|
12
|
+
import tarfile
|
|
13
|
+
import time
|
|
14
|
+
from datetime import datetime, timedelta, timezone
|
|
15
|
+
from zoneinfo import ZoneInfo
|
|
16
|
+
|
|
17
|
+
# Third-party imports
|
|
18
|
+
import astropy.units as u
|
|
19
|
+
import cdsapi
|
|
20
|
+
import molecularprofiles.utils.grib_utils as gu
|
|
21
|
+
import numpy as np
|
|
22
|
+
import rdams_client as rc
|
|
23
|
+
import requests
|
|
24
|
+
from astropy.coordinates import Latitude, Longitude, angular_separation
|
|
25
|
+
|
|
26
|
+
# CTA-related imports
|
|
27
|
+
from ctapipe.core.component import Component
|
|
28
|
+
from ctapipe.core.traits import (
|
|
29
|
+
Float,
|
|
30
|
+
Int,
|
|
31
|
+
Path,
|
|
32
|
+
Unicode,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
from ..core.exceptions import IntermittentError
|
|
36
|
+
|
|
37
|
+
# Internal imports
|
|
38
|
+
from .templates import request_templates
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MeteoDataHandler(Component):
|
|
42
|
+
"""Abstract class for meteo data handling."""
|
|
43
|
+
|
|
44
|
+
dataset = Unicode(
|
|
45
|
+
default_value="ds083.2",
|
|
46
|
+
help="Meteorological dataset name",
|
|
47
|
+
allow_none=False,
|
|
48
|
+
).tag(config=True)
|
|
49
|
+
gridstep = Float(default_value=1.0, help="Meteo data grid step in degrees").tag(
|
|
50
|
+
config=True
|
|
51
|
+
)
|
|
52
|
+
update_frequency = Int(
|
|
53
|
+
default_value=6,
|
|
54
|
+
help="Frequency at which new meteorological data is available in hours",
|
|
55
|
+
).tag(config=True)
|
|
56
|
+
update_tzinfo = Unicode(
|
|
57
|
+
default_value="UTC",
|
|
58
|
+
help="IANA-compliant time zone base for the meteo data updates",
|
|
59
|
+
).tag(config=True)
|
|
60
|
+
data_path = Path(
|
|
61
|
+
default_value="/tmp/meteo_data/",
|
|
62
|
+
file_ok=False,
|
|
63
|
+
help="Path where the meteorological data shall be stored",
|
|
64
|
+
).tag(config=True)
|
|
65
|
+
timeout = Int(default_value=600, help="Request timeout limit in seconds").tag(
|
|
66
|
+
config=True
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
def __init__(self, config=None, parent=None, das=None, **kwargs):
|
|
70
|
+
super().__init__(config, parent, **kwargs)
|
|
71
|
+
try:
|
|
72
|
+
os.makedirs(self.data_path, exist_ok=True)
|
|
73
|
+
except OSError as error:
|
|
74
|
+
if error.errno != errno.EEXIST:
|
|
75
|
+
raise
|
|
76
|
+
if das is not None:
|
|
77
|
+
self.request = json.loads(
|
|
78
|
+
importlib.resources.files(request_templates)
|
|
79
|
+
.joinpath(f"{das}.json")
|
|
80
|
+
.read_text()
|
|
81
|
+
)
|
|
82
|
+
self.start = None
|
|
83
|
+
self.stop = None
|
|
84
|
+
|
|
85
|
+
def _untar(self):
|
|
86
|
+
"""Unpack compressed grib2 files."""
|
|
87
|
+
tar_files = list(glob.glob(os.path.join(self.data_path, "*.tar")))
|
|
88
|
+
for compressed_file in tar_files:
|
|
89
|
+
with tarfile.open(compressed_file) as my_tar:
|
|
90
|
+
my_tar.extractall(self.data_path)
|
|
91
|
+
os.remove(compressed_file)
|
|
92
|
+
|
|
93
|
+
def time_rounder(self, timestamp, up=None):
|
|
94
|
+
"""
|
|
95
|
+
Round a given timestamp to the nearest DAS update timestamp.
|
|
96
|
+
|
|
97
|
+
This function rounds the given timestamp to the nearest multiple of the DAS update frequency.
|
|
98
|
+
The rounding is done with respect to a fixed epoch (2000-01-01 00:00:00 UTC).
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
timestamp : datetime.datetime
|
|
103
|
+
The timestamp to be rounded.
|
|
104
|
+
up : bool, optional
|
|
105
|
+
If True, round up to the next nearest DAS update timestamp.
|
|
106
|
+
If False, round down to the previous nearest DAS update timestamp.
|
|
107
|
+
|
|
108
|
+
Returns
|
|
109
|
+
-------
|
|
110
|
+
datetime.datetime
|
|
111
|
+
The rounded timestamp.
|
|
112
|
+
"""
|
|
113
|
+
epoch = datetime(
|
|
114
|
+
2000, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo(self.update_tzinfo)
|
|
115
|
+
).astimezone(timezone.utc)
|
|
116
|
+
multiple = timedelta(hours=self.update_frequency)
|
|
117
|
+
low = ((timestamp - epoch) // multiple) * multiple
|
|
118
|
+
high = low + multiple
|
|
119
|
+
if up is True:
|
|
120
|
+
return high + epoch
|
|
121
|
+
if up is False:
|
|
122
|
+
return low + epoch
|
|
123
|
+
if abs((high + epoch) - timestamp) < abs(timestamp - (low + epoch)):
|
|
124
|
+
return high + epoch
|
|
125
|
+
return low + epoch
|
|
126
|
+
|
|
127
|
+
def get_near_gridpoints(self, latitude, longitude):
|
|
128
|
+
"""
|
|
129
|
+
Get closest meteorological data point and a grid box, surrounding the observatory.
|
|
130
|
+
|
|
131
|
+
The interpolation grids of the meteorological systems
|
|
132
|
+
is assumed to start at (0,0) and be defined w.r.t. WGS84.
|
|
133
|
+
|
|
134
|
+
Parameters
|
|
135
|
+
----------
|
|
136
|
+
latitude : astropy.coordinates.Latitude
|
|
137
|
+
Latitude of the observatory location.
|
|
138
|
+
longitude : astropy.coordinates.Longitude
|
|
139
|
+
Longitude of the observatory location.
|
|
140
|
+
|
|
141
|
+
Returns
|
|
142
|
+
-------
|
|
143
|
+
nearest_grid_point : tuple(float, float)
|
|
144
|
+
Longitude and latitude of the nearest grid point.
|
|
145
|
+
box_coordinates : list(tuple(float, float))
|
|
146
|
+
List of coordinates (longitude, latitude) of four grid points forming a box
|
|
147
|
+
around the observatory location.
|
|
148
|
+
"""
|
|
149
|
+
lon = longitude.to_value(u.deg)
|
|
150
|
+
lat = latitude.to_value(u.deg)
|
|
151
|
+
box_coordinates = [
|
|
152
|
+
(
|
|
153
|
+
Longitude(
|
|
154
|
+
angle=math.floor(lon / self.gridstep) * self.gridstep,
|
|
155
|
+
unit=u.deg,
|
|
156
|
+
wrap_angle=180 * u.deg,
|
|
157
|
+
),
|
|
158
|
+
Latitude(
|
|
159
|
+
angle=math.floor(lat / self.gridstep) * self.gridstep, unit=u.deg
|
|
160
|
+
),
|
|
161
|
+
),
|
|
162
|
+
(
|
|
163
|
+
Longitude(
|
|
164
|
+
angle=(math.floor(lon / self.gridstep) + 1) * self.gridstep,
|
|
165
|
+
unit=u.deg,
|
|
166
|
+
wrap_angle=180 * u.deg,
|
|
167
|
+
),
|
|
168
|
+
Latitude(
|
|
169
|
+
angle=math.floor(lat / self.gridstep) * self.gridstep, unit=u.deg
|
|
170
|
+
),
|
|
171
|
+
),
|
|
172
|
+
(
|
|
173
|
+
Longitude(
|
|
174
|
+
angle=math.floor(lon / self.gridstep) * self.gridstep,
|
|
175
|
+
unit=u.deg,
|
|
176
|
+
wrap_angle=180 * u.deg,
|
|
177
|
+
),
|
|
178
|
+
Latitude(
|
|
179
|
+
angle=(math.floor(lat / self.gridstep) + 1) * self.gridstep,
|
|
180
|
+
unit=u.deg,
|
|
181
|
+
),
|
|
182
|
+
),
|
|
183
|
+
(
|
|
184
|
+
Longitude(
|
|
185
|
+
angle=(math.floor(lon / self.gridstep) + 1) * self.gridstep,
|
|
186
|
+
unit=u.deg,
|
|
187
|
+
wrap_angle=180 * u.deg,
|
|
188
|
+
),
|
|
189
|
+
Latitude(
|
|
190
|
+
angle=(math.floor(lat / self.gridstep) + 1) * self.gridstep,
|
|
191
|
+
unit=u.deg,
|
|
192
|
+
),
|
|
193
|
+
),
|
|
194
|
+
]
|
|
195
|
+
|
|
196
|
+
distances = [
|
|
197
|
+
angular_separation(longitude, latitude, *grid_point).to_value()
|
|
198
|
+
for grid_point in box_coordinates
|
|
199
|
+
]
|
|
200
|
+
|
|
201
|
+
nearest_grid_point = box_coordinates[np.argmin(distances)]
|
|
202
|
+
|
|
203
|
+
return nearest_grid_point, box_coordinates
|
|
204
|
+
|
|
205
|
+
def create_request(self, start, stop, latitude, longitude, nearest_point=True):
|
|
206
|
+
"""To be implemented in the child classes."""
|
|
207
|
+
|
|
208
|
+
def request_data(self):
|
|
209
|
+
"""To be implemented in the child classes."""
|
|
210
|
+
|
|
211
|
+
def merge_data(self):
|
|
212
|
+
"""Merge meteo data.
|
|
213
|
+
|
|
214
|
+
Creates an ecsv file that contains an astropy.core.Table
|
|
215
|
+
with the meteorological data from grib files,
|
|
216
|
+
downloaded from DAS.
|
|
217
|
+
"""
|
|
218
|
+
gu.convert_to_text(self.data_path)
|
|
219
|
+
gu.merge_ecsv_files(self.data_path)
|
|
220
|
+
|
|
221
|
+
def cleanup(self):
|
|
222
|
+
"""Remove temporary files."""
|
|
223
|
+
shutil.rmtree(self.data_path)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class GDASDataHandler(MeteoDataHandler):
|
|
227
|
+
"""GDAS/NCAR meteorological data handler."""
|
|
228
|
+
|
|
229
|
+
dataset = Unicode("ds083.2").tag(config=True)
|
|
230
|
+
gridstep = Float(1.0).tag(config=True)
|
|
231
|
+
update_frequency = Int(6).tag(config=True)
|
|
232
|
+
|
|
233
|
+
def __init__(self, config=None, parent=None, **kwargs):
|
|
234
|
+
super().__init__(config, parent, das="gdas", **kwargs)
|
|
235
|
+
rc.setup_logging(self.log.level)
|
|
236
|
+
|
|
237
|
+
def create_request(self, start, stop, latitude, longitude, nearest_point=True):
|
|
238
|
+
"""
|
|
239
|
+
Create a request for GDAS data.
|
|
240
|
+
|
|
241
|
+
Parameters
|
|
242
|
+
----------
|
|
243
|
+
start : datetime.datetime
|
|
244
|
+
The start time for the data request.
|
|
245
|
+
stop : datetime.datetime
|
|
246
|
+
The stop time for the data request.
|
|
247
|
+
latitude : astropy.coordinates.Latitude
|
|
248
|
+
Latitude of the location for which data is requested.
|
|
249
|
+
longitude : astropy.coordinates.Longitude
|
|
250
|
+
Longitude of the location for which data is requested.
|
|
251
|
+
nearest_point : bool, optional
|
|
252
|
+
If True, request data for the nearest grid point.
|
|
253
|
+
If False, request data for a grid box surrounding the location.
|
|
254
|
+
Default is True.
|
|
255
|
+
"""
|
|
256
|
+
self.start = self.time_rounder(start, up=True)
|
|
257
|
+
self.stop = self.time_rounder(stop, up=False)
|
|
258
|
+
point, box = self.get_near_gridpoints(latitude, longitude)
|
|
259
|
+
timeseries = (
|
|
260
|
+
f"{self.start.strftime('%Y%m%d%H%M')}/to/{self.stop.strftime('%Y%m%d%H%M')}"
|
|
261
|
+
)
|
|
262
|
+
self.log.debug(timeseries)
|
|
263
|
+
self.request.update({"dataset": self.dataset})
|
|
264
|
+
self.request.update({"date": timeseries})
|
|
265
|
+
if nearest_point:
|
|
266
|
+
lon = point[0].to_value(unit=u.deg)
|
|
267
|
+
lat = point[1].to_value(unit=u.deg)
|
|
268
|
+
self.request.update({"nlat": lat})
|
|
269
|
+
self.request.update({"slat": lat})
|
|
270
|
+
self.request.update({"elon": lon})
|
|
271
|
+
self.request.update({"wlon": lon})
|
|
272
|
+
else:
|
|
273
|
+
self.request.update({"nlat": box[3][1].to_value(unit=u.deg)})
|
|
274
|
+
self.request.update({"slat": box[0][1].to_value(unit=u.deg)})
|
|
275
|
+
self.request.update({"elon": box[3][0].to_value(unit=u.deg)})
|
|
276
|
+
self.request.update({"wlon": box[0][0].to_value(unit=u.deg)})
|
|
277
|
+
self.log.debug("Created DAS request:\n%s", json.dumps(self.request, indent=4))
|
|
278
|
+
|
|
279
|
+
def _is_request_ready(self, request_id):
|
|
280
|
+
"""Check if a request is ready.
|
|
281
|
+
|
|
282
|
+
Parameters
|
|
283
|
+
----------
|
|
284
|
+
request_id: int
|
|
285
|
+
Unique identification number of the request for GDAS data.
|
|
286
|
+
|
|
287
|
+
Returns
|
|
288
|
+
-------
|
|
289
|
+
Boolean
|
|
290
|
+
"""
|
|
291
|
+
start = time.time()
|
|
292
|
+
while True:
|
|
293
|
+
now = time.time()
|
|
294
|
+
if (now - start) > self.timeout:
|
|
295
|
+
self.log.error(
|
|
296
|
+
"Maximum waiting time for the request exceeded. Exiting..."
|
|
297
|
+
)
|
|
298
|
+
return False
|
|
299
|
+
|
|
300
|
+
res = rc.get_status(request_id)
|
|
301
|
+
try:
|
|
302
|
+
request_status = res["data"]["status"]
|
|
303
|
+
except KeyError as err:
|
|
304
|
+
self.log.error("Can't get request status: %s", err)
|
|
305
|
+
self.log.error("Response content: \n%s", res)
|
|
306
|
+
rc.purge_request(request_id)
|
|
307
|
+
return False
|
|
308
|
+
if request_status == "Completed":
|
|
309
|
+
return True
|
|
310
|
+
time.sleep(10) # Sleep ten seconds before retry
|
|
311
|
+
|
|
312
|
+
return False
|
|
313
|
+
|
|
314
|
+
def request_data(self):
|
|
315
|
+
"""Request GDAS data."""
|
|
316
|
+
response = rc.submit_json(self.request)
|
|
317
|
+
request_id = response.get("data", {}).get("request_id")
|
|
318
|
+
if request_id is None:
|
|
319
|
+
self.log.critical(
|
|
320
|
+
"Request ID can't be retrieved, request can't be purged."
|
|
321
|
+
"Manual intervention is required to purge the request!\n"
|
|
322
|
+
"Response content:\n%s",
|
|
323
|
+
json.dumps(response, indent=4),
|
|
324
|
+
)
|
|
325
|
+
self.log.warning("Activating exception scenario")
|
|
326
|
+
return 1
|
|
327
|
+
if response.get("http_response") != 200:
|
|
328
|
+
self.log.error(
|
|
329
|
+
"Request to GDAS failed with response code %s\nResponse content:\n%s\nRequest content:\n%s\n",
|
|
330
|
+
response.get("http_response"),
|
|
331
|
+
json.dumps(response, indent=4),
|
|
332
|
+
json.dumps(self.request, indent=4),
|
|
333
|
+
)
|
|
334
|
+
self.log.error("Purging request")
|
|
335
|
+
rc.purge_request(request_id)
|
|
336
|
+
self.log.warning("Activating exception scenario")
|
|
337
|
+
return 1
|
|
338
|
+
|
|
339
|
+
self.log.debug("Response content:\n%s", json.dumps(response, indent=4))
|
|
340
|
+
if self._is_request_ready(request_id):
|
|
341
|
+
rc.download(request_id, f"{self.data_path}/")
|
|
342
|
+
rc.purge_request(request_id)
|
|
343
|
+
self._untar()
|
|
344
|
+
self.merge_data()
|
|
345
|
+
return 0
|
|
346
|
+
rc.purge_request(request_id)
|
|
347
|
+
self.log.warning("Activating exception scenario")
|
|
348
|
+
return 1
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
class ECMWFDataHandler(MeteoDataHandler):
|
|
352
|
+
"""ECMWF/Copernicus meteorological data handler."""
|
|
353
|
+
|
|
354
|
+
dataset = Unicode("reanalysis-era5-pressure-levels").tag(config=True)
|
|
355
|
+
gridstep = Float(0.25).tag(config=True)
|
|
356
|
+
update_frequency = Int(1).tag(config=True)
|
|
357
|
+
|
|
358
|
+
def __init__(self, config=None, parent=None, **kwargs):
|
|
359
|
+
super().__init__(config, parent, das="copernicus", **kwargs)
|
|
360
|
+
self.requests = []
|
|
361
|
+
|
|
362
|
+
def create_request(self, start, stop, latitude, longitude, nearest_point=False):
|
|
363
|
+
"""
|
|
364
|
+
Create a request for ECMWF/Copernicus meteorological data.
|
|
365
|
+
|
|
366
|
+
This method prepares a request for meteorological data from the ECMWF/Copernicus dataset
|
|
367
|
+
for a specified time range and location. The request can be for the nearest grid point
|
|
368
|
+
or a grid box surrounding the specified location.
|
|
369
|
+
|
|
370
|
+
Parameters
|
|
371
|
+
----------
|
|
372
|
+
start : datetime.datetime
|
|
373
|
+
The start time for the data request.
|
|
374
|
+
stop : datetime.datetime
|
|
375
|
+
The stop time for the data request.
|
|
376
|
+
latitude : astropy.coordinates.Latitude
|
|
377
|
+
Latitude of the location for which data is requested.
|
|
378
|
+
longitude : astropy.coordinates.Longitude
|
|
379
|
+
Longitude of the location for which data is requested.
|
|
380
|
+
nearest_point : bool, optional
|
|
381
|
+
If True, request data for the nearest grid point.
|
|
382
|
+
If False, request data for a grid box surrounding the location.
|
|
383
|
+
Default is False.
|
|
384
|
+
|
|
385
|
+
Returns
|
|
386
|
+
-------
|
|
387
|
+
None
|
|
388
|
+
"""
|
|
389
|
+
self.start = self.time_rounder(start, up=True)
|
|
390
|
+
self.stop = self.time_rounder(stop, up=False)
|
|
391
|
+
point, box = self.get_near_gridpoints(latitude, longitude)
|
|
392
|
+
area_of_interest = []
|
|
393
|
+
if nearest_point:
|
|
394
|
+
lon = point[0].to_value(unit=u.deg)
|
|
395
|
+
lat = point[1].to_value(unit=u.deg)
|
|
396
|
+
area_of_interest = [lat, lon, lat, lon]
|
|
397
|
+
else:
|
|
398
|
+
area_of_interest = [
|
|
399
|
+
box[0][1].to_value(unit=u.deg),
|
|
400
|
+
box[0][0].to_value(unit=u.deg),
|
|
401
|
+
box[3][1].to_value(unit=u.deg),
|
|
402
|
+
box[3][0].to_value(unit=u.deg),
|
|
403
|
+
]
|
|
404
|
+
self.request.update({"area": area_of_interest})
|
|
405
|
+
years = [str(self.start.year), str(self.stop.year)]
|
|
406
|
+
months = [str(self.start.month), str(self.stop.month)]
|
|
407
|
+
days = [str(self.start.day), str(self.stop.day)]
|
|
408
|
+
if self.start.day == self.stop.day:
|
|
409
|
+
hours = [
|
|
410
|
+
[
|
|
411
|
+
f"{h:02d}:00"
|
|
412
|
+
for h in range(
|
|
413
|
+
self.start.hour, self.stop.hour + 1, self.update_frequency
|
|
414
|
+
)
|
|
415
|
+
]
|
|
416
|
+
]
|
|
417
|
+
else:
|
|
418
|
+
hours = [
|
|
419
|
+
[
|
|
420
|
+
f"{h:02d}:00"
|
|
421
|
+
for h in range(self.start.hour, 24, self.update_frequency)
|
|
422
|
+
],
|
|
423
|
+
[
|
|
424
|
+
f"{h:02d}:00"
|
|
425
|
+
for h in range(0, self.stop.hour + 1, self.update_frequency)
|
|
426
|
+
],
|
|
427
|
+
]
|
|
428
|
+
for i in range(len(hours)):
|
|
429
|
+
self.request.update({"year": years[i]})
|
|
430
|
+
self.request.update({"month": months[i]})
|
|
431
|
+
self.request.update({"day": days[i]})
|
|
432
|
+
self.request.update({"time": hours[i]})
|
|
433
|
+
self.requests.append(copy.deepcopy(self.request))
|
|
434
|
+
|
|
435
|
+
def request_data(self):
|
|
436
|
+
"""Request ECMWF data.
|
|
437
|
+
|
|
438
|
+
The data is requested from the ECMWF/Copernicus server using the cdsapi library.
|
|
439
|
+
|
|
440
|
+
"""
|
|
441
|
+
client = cdsapi.Client()
|
|
442
|
+
for i, request in enumerate(self.requests):
|
|
443
|
+
self.log.debug(request)
|
|
444
|
+
client.retrieve(
|
|
445
|
+
self.dataset, request, f"{self.data_path}/copernicus_{i}.grib"
|
|
446
|
+
)
|
|
447
|
+
self.merge_data()
|
|
448
|
+
return 0
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
class CO2DataHandler(MeteoDataHandler):
|
|
452
|
+
"""Mauna Loa CO2 data handler."""
|
|
453
|
+
|
|
454
|
+
dataset = Unicode(
|
|
455
|
+
"https://scrippsco2.ucsd.edu/assets/data/atmospheric/stations/in_situ_co2/monthly/monthly_in_situ_co2_mlo.csv" # pylint: disable=line-too-long
|
|
456
|
+
).tag(config=True)
|
|
457
|
+
|
|
458
|
+
def __init__(self, config=None, parent=None, **kwargs):
|
|
459
|
+
super().__init__(config, parent, **kwargs)
|
|
460
|
+
|
|
461
|
+
def request_data(self):
|
|
462
|
+
"""
|
|
463
|
+
Request CO2 data from the Mauna Loa Observatory.
|
|
464
|
+
|
|
465
|
+
Returns
|
|
466
|
+
-------
|
|
467
|
+
int
|
|
468
|
+
Returns 0 on successful data retrieval.
|
|
469
|
+
|
|
470
|
+
Raises
|
|
471
|
+
------
|
|
472
|
+
IntermittentError
|
|
473
|
+
If the request to the dataset URL times out.
|
|
474
|
+
"""
|
|
475
|
+
try:
|
|
476
|
+
response = requests.get(
|
|
477
|
+
self.dataset, allow_redirects=True, timeout=self.timeout
|
|
478
|
+
)
|
|
479
|
+
except requests.exceptions.Timeout:
|
|
480
|
+
raise IntermittentError(
|
|
481
|
+
f"Keeling curve server {self.dataset} is not accessible"
|
|
482
|
+
)
|
|
483
|
+
with open(f"{self.data_path}/macobac.csv", "wb") as keeling_curve_file:
|
|
484
|
+
keeling_curve_file.write(response.content)
|
|
485
|
+
return 0
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
In this directory, we store (mockup) reference atmospheric models. Those models are
|
|
2
|
+
- reference MDPs
|
|
3
|
+
- reference atmospheric models (Corsika simulation inputs)
|
|
4
|
+
- reference ozone profiles
|
|
5
|
+
|
|
6
|
+
For the moment we have created only the MDPs (more urgent for DPPS release 0). The models were created using the dataset "ERA5 monthly averaged data on pressure levels from 1959 to present" provided by the Copernicus service. This choice should be confirmed.
|
|
7
|
+
We provide an example script, Reference_MDP_calculator.py, that produces an MDP for La Palma, intermediate season. This script is made to be run in Climate Data Store (CDS) server toolbox. It loads the data in the CDS cache, analyses them, and produces a list of scaled molecular number densities per altitude level.
|
|
8
|
+
|
|
9
|
+
Some notes concerning the mockup reference MDPs:
|
|
10
|
+
1. How many years worth of data do we need to process? Currently, we process only one year (2022). The processing time in the CDS toolbox is a few minutes. The queue time varies, and is independent of the requested data, but usually doesn't exceed a few minutes also. However, the climate normals are 30 years long.
|
|
11
|
+
2. The current requests are downloading and averaging the data from a few grid points, e.g. 5 grid points for La Palma. Shall we restrict to the closest one, as stated in the requirements support document? However, this document was written with different datasets in mind.
|
|
12
|
+
3. The MDPs have been created with nighttime data, however, we considered the same nighttime for the whole year. That's wrong, but probably good enough for mockup MDPs. It can be corrected rather easily.
|
|
13
|
+
4. Seasons definition: as the requirement support document states, we considered two seasons for Atacama and three for La Palma. Their definition is somewhat reasonable, but of course, it needs to be verified.
|
|
14
|
+
5. Atmospheric scale height: the issue is described [here](https://gitlab.cta-observatory.org/cta-array-elements/ccf/mdps/-/issues/24): in order to resolve it, we need to find the mean atmospheric temperature over the two observatories for every season in question. Or simply calculate it using climatological data.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Collection of reference files for atmospheric models."""
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# %ECSV 1.0
|
|
2
|
+
# ---
|
|
3
|
+
# datatype:
|
|
4
|
+
# - {name: co2_concentration, unit: ppm, datatype: float64}
|
|
5
|
+
# - {name: estimation_date, datatype: string}
|
|
6
|
+
# - {name: version, datatype: int64}
|
|
7
|
+
# meta: !!omap
|
|
8
|
+
# - __serialized_columns__:
|
|
9
|
+
# co2_concentration:
|
|
10
|
+
# __class__: astropy.units.quantity.Quantity
|
|
11
|
+
# unit: !astropy.units.Unit {unit: ppm}
|
|
12
|
+
# value: !astropy.table.SerializedColumn {name: co2_concentration}
|
|
13
|
+
# estimation_date:
|
|
14
|
+
# __class__: astropy.time.core.Time
|
|
15
|
+
# format: iso
|
|
16
|
+
# in_subfmt: '*'
|
|
17
|
+
# out_subfmt: '*'
|
|
18
|
+
# precision: 3
|
|
19
|
+
# scale: utc
|
|
20
|
+
# value: !astropy.table.SerializedColumn {name: estimation_date}
|
|
21
|
+
# schema: astropy-2.0
|
|
22
|
+
co2_concentration estimation_date version
|
|
23
|
+
419.3125 "2023-07-10 00:00:00.000" -1
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Reference molecular density profiles."""
|