ctao-calibpipe 0.3.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- calibpipe/__init__.py +5 -0
- calibpipe/_dev_version/__init__.py +9 -0
- calibpipe/_version.py +34 -0
- calibpipe/atmosphere/__init__.py +1 -0
- calibpipe/atmosphere/atmosphere_containers.py +109 -0
- calibpipe/atmosphere/meteo_data_handlers.py +485 -0
- calibpipe/atmosphere/models/README.md +14 -0
- calibpipe/atmosphere/models/__init__.py +1 -0
- calibpipe/atmosphere/models/macobac.ecsv +23 -0
- calibpipe/atmosphere/models/reference_MDPs/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_intermediate.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-north_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_summer.ecsv +8 -0
- calibpipe/atmosphere/models/reference_MDPs/ref_density_at_15km_ctao-south_winter.ecsv +8 -0
- calibpipe/atmosphere/models/reference_atmospheres/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_intermediate.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-north_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_summer.ecsv +73 -0
- calibpipe/atmosphere/models/reference_atmospheres/reference_atmo_model_v0_ctao-south_winter.ecsv +73 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/__init__.py +1 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_intermediate.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-north_winter.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_summer.ecsv +857 -0
- calibpipe/atmosphere/models/reference_rayleigh_scattering_profiles/reference_rayleigh_extinction_profile_v0_ctao-south_winter.ecsv +857 -0
- calibpipe/atmosphere/templates/request_templates/__init__.py +1 -0
- calibpipe/atmosphere/templates/request_templates/copernicus.json +11 -0
- calibpipe/atmosphere/templates/request_templates/gdas.json +12 -0
- calibpipe/core/__init__.py +39 -0
- calibpipe/core/common_metadata_containers.py +198 -0
- calibpipe/core/exceptions.py +87 -0
- calibpipe/database/__init__.py +24 -0
- calibpipe/database/adapter/__init__.py +23 -0
- calibpipe/database/adapter/adapter.py +80 -0
- calibpipe/database/adapter/database_containers/__init__.py +63 -0
- calibpipe/database/adapter/database_containers/atmosphere.py +199 -0
- calibpipe/database/adapter/database_containers/common_metadata.py +150 -0
- calibpipe/database/adapter/database_containers/container_map.py +59 -0
- calibpipe/database/adapter/database_containers/observatory.py +61 -0
- calibpipe/database/adapter/database_containers/table_version_manager.py +39 -0
- calibpipe/database/adapter/database_containers/throughput.py +30 -0
- calibpipe/database/adapter/database_containers/version_control.py +17 -0
- calibpipe/database/connections/__init__.py +28 -0
- calibpipe/database/connections/calibpipe_database.py +60 -0
- calibpipe/database/connections/postgres_utils.py +97 -0
- calibpipe/database/connections/sql_connection.py +103 -0
- calibpipe/database/connections/user_confirmation.py +19 -0
- calibpipe/database/interfaces/__init__.py +71 -0
- calibpipe/database/interfaces/hashable_row_data.py +54 -0
- calibpipe/database/interfaces/queries.py +180 -0
- calibpipe/database/interfaces/sql_column_info.py +67 -0
- calibpipe/database/interfaces/sql_metadata.py +6 -0
- calibpipe/database/interfaces/sql_table_info.py +131 -0
- calibpipe/database/interfaces/table_handler.py +333 -0
- calibpipe/database/interfaces/types.py +96 -0
- calibpipe/telescope/throughput/containers.py +66 -0
- calibpipe/tests/conftest.py +274 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/__init__.py +0 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/contemporary_MDP.ecsv +34 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.csv +852 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/macobac.ecsv +23 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/merged_file.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_copernicus.ecsv +1082 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/meteo_data_gdas.ecsv +66 -0
- calibpipe/tests/data/atmosphere/molecular_atmosphere/observatory_configurations.json +71 -0
- calibpipe/tests/data/utils/__init__.py +0 -0
- calibpipe/tests/data/utils/meteo_data_winter_and_summer.ecsv +12992 -0
- calibpipe/tests/test_conftest_data.py +200 -0
- calibpipe/tests/unittests/array/test_cross_calibration.py +412 -0
- calibpipe/tests/unittests/atmosphere/astral_testing.py +107 -0
- calibpipe/tests/unittests/atmosphere/test_meteo_data_handler.py +775 -0
- calibpipe/tests/unittests/atmosphere/test_molecular_atmosphere.py +327 -0
- calibpipe/tests/unittests/database/test_table_handler.py +163 -0
- calibpipe/tests/unittests/database/test_types.py +38 -0
- calibpipe/tests/unittests/telescope/camera/test_calculate_camcalib_coefficients.py +456 -0
- calibpipe/tests/unittests/telescope/camera/test_produce_camcalib_test_data.py +37 -0
- calibpipe/tests/unittests/telescope/throughput/test_muon_throughput_calibrator.py +693 -0
- calibpipe/tests/unittests/test_bootstrap_db.py +79 -0
- calibpipe/tests/unittests/utils/test_observatory.py +309 -0
- calibpipe/tools/atmospheric_base_tool.py +78 -0
- calibpipe/tools/atmospheric_model_db_loader.py +181 -0
- calibpipe/tools/basic_tool_with_db.py +38 -0
- calibpipe/tools/camcalib_test_data.py +374 -0
- calibpipe/tools/camera_calibrator.py +462 -0
- calibpipe/tools/contemporary_mdp_producer.py +87 -0
- calibpipe/tools/init_db.py +37 -0
- calibpipe/tools/macobac_calculator.py +82 -0
- calibpipe/tools/molecular_atmospheric_model_producer.py +197 -0
- calibpipe/tools/muon_throughput_calculator.py +219 -0
- calibpipe/tools/observatory_data_db_loader.py +71 -0
- calibpipe/tools/reference_atmospheric_model_selector.py +201 -0
- calibpipe/tools/telescope_cross_calibration_calculator.py +721 -0
- calibpipe/utils/__init__.py +10 -0
- calibpipe/utils/observatory.py +486 -0
- calibpipe/utils/observatory_containers.py +26 -0
- calibpipe/version.py +24 -0
- ctao_calibpipe-0.3.0rc2.dist-info/METADATA +92 -0
- ctao_calibpipe-0.3.0rc2.dist-info/RECORD +105 -0
- ctao_calibpipe-0.3.0rc2.dist-info/WHEEL +5 -0
- ctao_calibpipe-0.3.0rc2.dist-info/entry_points.txt +12 -0
- ctao_calibpipe-0.3.0rc2.dist-info/licenses/AUTHORS.md +13 -0
- ctao_calibpipe-0.3.0rc2.dist-info/licenses/LICENSE +21 -0
- ctao_calibpipe-0.3.0rc2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
"""Calculate camera calibration coefficients using the FFactor method."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
import astropy.units as u
|
|
5
|
+
import h5py
|
|
6
|
+
import numpy as np
|
|
7
|
+
from astropy.table import Column, Table
|
|
8
|
+
from astropy.time import Time
|
|
9
|
+
from ctapipe.core import Tool, ToolConfigurationError
|
|
10
|
+
from ctapipe.core.traits import (
|
|
11
|
+
AstroQuantity,
|
|
12
|
+
Bool,
|
|
13
|
+
Float,
|
|
14
|
+
Int,
|
|
15
|
+
List,
|
|
16
|
+
Path,
|
|
17
|
+
classes_with_traits,
|
|
18
|
+
)
|
|
19
|
+
from ctapipe.io import HDF5MonitoringSource, write_table
|
|
20
|
+
from ctapipe.io.hdf5dataformat import (
|
|
21
|
+
DL1_CAMERA_COEFFICIENTS_GROUP,
|
|
22
|
+
)
|
|
23
|
+
from ctapipe.monitoring import (
|
|
24
|
+
StdOutlierDetector,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
"CameraCalibratorTool",
|
|
29
|
+
"NpeStdOutlierDetector",
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class NpeStdOutlierDetector(StdOutlierDetector):
|
|
34
|
+
"""
|
|
35
|
+
Detect outliers based on the deviation from the expected standard deviation of the number of photoelectrons.
|
|
36
|
+
|
|
37
|
+
The clipping interval to set the thresholds for detecting outliers is computed by multiplying
|
|
38
|
+
the configurable factors and the expected standard deviation of the number of photoelectrons. The
|
|
39
|
+
expected standard deviation of the number of photoelectrons is calculated based on the median number
|
|
40
|
+
of photoelectrons and the number of events.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
n_events = Int(
|
|
44
|
+
default_value=2500,
|
|
45
|
+
help="Number of events used for the chunk-wise aggregation of the statistic values of the calibration data.",
|
|
46
|
+
).tag(config=True)
|
|
47
|
+
|
|
48
|
+
relative_qe_dispersion = Float(
|
|
49
|
+
0.07,
|
|
50
|
+
help="Relative (effective) quantum efficiency dispersion of PMs over the camera",
|
|
51
|
+
).tag(config=True)
|
|
52
|
+
|
|
53
|
+
linear_noise_coeff = List(
|
|
54
|
+
trait=Float(),
|
|
55
|
+
default_value=[1.79717813, 1.72458305],
|
|
56
|
+
minlen=1,
|
|
57
|
+
maxlen=2,
|
|
58
|
+
help=(
|
|
59
|
+
"Linear noise coefficients [high gain, low gain] or [single gain] obtained with a fit of the std of the "
|
|
60
|
+
"LST-1 filter scan taken on 2023/05/10."
|
|
61
|
+
),
|
|
62
|
+
).tag(config=True)
|
|
63
|
+
|
|
64
|
+
linear_noise_offset = List(
|
|
65
|
+
trait=Float(),
|
|
66
|
+
default_value=[0.0231544, -0.00162036639],
|
|
67
|
+
minlen=1,
|
|
68
|
+
maxlen=2,
|
|
69
|
+
help=(
|
|
70
|
+
"Linear noise offsets [high gain, low gain] or [single gain] obtained with a fit of the std of the "
|
|
71
|
+
"LST-1 filter scan taken on 2023/05/10."
|
|
72
|
+
),
|
|
73
|
+
).tag(config=True)
|
|
74
|
+
|
|
75
|
+
quadratic_noise_coeff = List(
|
|
76
|
+
trait=Float(),
|
|
77
|
+
default_value=[0.000499670969, 0.00142218],
|
|
78
|
+
minlen=1,
|
|
79
|
+
maxlen=2,
|
|
80
|
+
help=(
|
|
81
|
+
"Quadratic noise coefficients [high gain, low gain] or [single gain] obtained with a fit of the std of the "
|
|
82
|
+
"LST-1 filter scan taken on 2023/05/10."
|
|
83
|
+
),
|
|
84
|
+
).tag(config=True)
|
|
85
|
+
|
|
86
|
+
quadratic_noise_offset = List(
|
|
87
|
+
trait=Float(),
|
|
88
|
+
default_value=[0.0000249034290, 0.0001207],
|
|
89
|
+
minlen=1,
|
|
90
|
+
maxlen=2,
|
|
91
|
+
help=(
|
|
92
|
+
"Quadratic noise offsets [high gain, low gain] or [single gain] obtained with a fit of the std of the LST-1 "
|
|
93
|
+
"LST-1 filter scan taken on 2023/05/10."
|
|
94
|
+
),
|
|
95
|
+
).tag(config=True)
|
|
96
|
+
|
|
97
|
+
def __call__(self, column):
|
|
98
|
+
r"""
|
|
99
|
+
Detect outliers based on the deviation from the expected standard deviation of the number of photoelectrons.
|
|
100
|
+
|
|
101
|
+
The clipping interval to set the thresholds for detecting outliers is computed by multiplying
|
|
102
|
+
the configurable factors and the expected standard deviation of the number of photoelectrons
|
|
103
|
+
(npe) over the camera. The expected standard deviation of the estimated npe is given by
|
|
104
|
+
``std_pe_mean = \frac{std_npe}{\sqrt{n_events + (relative_qe_dispersion \cdot npe)^2}}`` where the
|
|
105
|
+
relative_qe_dispersion is mainly due to different detection QE among PMs. However, due to
|
|
106
|
+
the systematics correction associated to the B term, a linear and quadratic noise component
|
|
107
|
+
must be added, these components depend on the sample statistics (n_events).
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
column : astropy.table.Column
|
|
112
|
+
Column of the calculated the number of photoelectrons using the chunk-wise aggregated statistic values
|
|
113
|
+
of the calibration data of shape (n_entries, n_channels, n_pixels).
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
outliers : np.ndarray of bool
|
|
118
|
+
The mask of outliers of shape (n_entries, n_channels, n_pixels) based on the deviation
|
|
119
|
+
from the expected standard deviation of the number of photoelectrons.
|
|
120
|
+
"""
|
|
121
|
+
# Calculate the median number of photoelectrons
|
|
122
|
+
npe_median = np.nanmedian(column, axis=2)
|
|
123
|
+
# Calculate the basic variance
|
|
124
|
+
basic_variance = (
|
|
125
|
+
npe_median / self.n_events + (self.relative_qe_dispersion * npe_median) ** 2
|
|
126
|
+
)
|
|
127
|
+
# Calculate the linear noise term
|
|
128
|
+
linear_term = (
|
|
129
|
+
self.linear_noise_coeff / (np.sqrt(self.n_events))
|
|
130
|
+
+ self.linear_noise_offset
|
|
131
|
+
)
|
|
132
|
+
# Calculate the quadratic noise term
|
|
133
|
+
quadratic_term = (
|
|
134
|
+
self.quadratic_noise_coeff / (np.sqrt(self.n_events))
|
|
135
|
+
+ self.quadratic_noise_offset
|
|
136
|
+
)
|
|
137
|
+
# Calculate the added variance
|
|
138
|
+
added_variance = (linear_term * npe_median) ** 2 + (
|
|
139
|
+
quadratic_term * npe_median
|
|
140
|
+
) ** 2
|
|
141
|
+
# Calculate the total standard deviation of the number of photoelectrons
|
|
142
|
+
npe_std = np.sqrt(basic_variance + added_variance)
|
|
143
|
+
# Detect outliers based on the deviation of the standard deviation distribution
|
|
144
|
+
deviation = column - npe_median[:, :, np.newaxis]
|
|
145
|
+
outliers = np.logical_or(
|
|
146
|
+
deviation < self.std_range_factors[0] * npe_std[:, :, np.newaxis],
|
|
147
|
+
deviation > self.std_range_factors[1] * npe_std[:, :, np.newaxis],
|
|
148
|
+
)
|
|
149
|
+
return outliers
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class CameraCalibratorTool(Tool):
|
|
153
|
+
"""Calculate camera calibration coefficients using the FFactor method."""
|
|
154
|
+
|
|
155
|
+
name = "calibpipe-calculate-camcalib-coefficients"
|
|
156
|
+
description = "Calculate camera calibration coefficients using the FFactor method"
|
|
157
|
+
|
|
158
|
+
examples = """
|
|
159
|
+
To calculate camera calibration coefficients using the FFactor method, run:
|
|
160
|
+
|
|
161
|
+
> calibpipe-calculate-camcalib-coefficients --input_url monitoring.h5 --overwrite
|
|
162
|
+
"""
|
|
163
|
+
|
|
164
|
+
timestamp_tolerance = AstroQuantity(
|
|
165
|
+
default_value=u.Quantity(1.0, u.second),
|
|
166
|
+
physical_type=u.physical.time,
|
|
167
|
+
help="Time difference in seconds to consider two timestamps equal.",
|
|
168
|
+
).tag(config=True)
|
|
169
|
+
|
|
170
|
+
faulty_pixels_fraction = Float(
|
|
171
|
+
default_value=0.1,
|
|
172
|
+
allow_none=True,
|
|
173
|
+
help="Minimum fraction of faulty camera pixels to identify regions of trouble.",
|
|
174
|
+
).tag(config=True)
|
|
175
|
+
|
|
176
|
+
# TODO These parameters are temporary and should be read from the metadata
|
|
177
|
+
systematic_correction_path = Path(
|
|
178
|
+
default_value=None,
|
|
179
|
+
allow_none=True,
|
|
180
|
+
exists=True,
|
|
181
|
+
directory_ok=False,
|
|
182
|
+
help=(
|
|
183
|
+
"Temp Fix: Path to systematic correction file "
|
|
184
|
+
"for additional noise component that is proportional to the signal amplitude "
|
|
185
|
+
),
|
|
186
|
+
).tag(config=True)
|
|
187
|
+
|
|
188
|
+
# TODO These parameters are temporary and should be read from the metadata
|
|
189
|
+
squared_excess_noise_factor = Float(
|
|
190
|
+
1.222, help="Temp Fix: Excess noise factor squared: 1+ Var(gain)/Mean(Gain)**2"
|
|
191
|
+
).tag(config=True)
|
|
192
|
+
|
|
193
|
+
# TODO These parameters are temporary and should be read from the metadata
|
|
194
|
+
window_width = Int(
|
|
195
|
+
12,
|
|
196
|
+
help="Temp Fix: Width of the window used for the image extraction",
|
|
197
|
+
).tag(config=True)
|
|
198
|
+
|
|
199
|
+
overwrite = Bool(
|
|
200
|
+
help="Overwrite the tables of the camera calibration coefficients if they exist"
|
|
201
|
+
).tag(config=True)
|
|
202
|
+
|
|
203
|
+
aliases = {
|
|
204
|
+
("i", "input_url"): "HDF5MonitoringSource.input_files",
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
flags = {
|
|
208
|
+
"overwrite": (
|
|
209
|
+
{"CameraCalibratorTool": {"overwrite": True}},
|
|
210
|
+
"Overwrite existing tables of the camera calibration coefficients",
|
|
211
|
+
),
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
classes = classes_with_traits(HDF5MonitoringSource) + classes_with_traits(
|
|
215
|
+
NpeStdOutlierDetector
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
def setup(self):
|
|
219
|
+
"""Set up the tool.
|
|
220
|
+
|
|
221
|
+
- Set up the monitoring source.
|
|
222
|
+
- Load the systematic correction term B.
|
|
223
|
+
- Configure the outlier detector for the expected standard deviation of the number of photoelectrons.
|
|
224
|
+
"""
|
|
225
|
+
# Set up the MonitoringSource
|
|
226
|
+
self.mon_source = self.enter_context(HDF5MonitoringSource(parent=self))
|
|
227
|
+
# Enforce only one input file
|
|
228
|
+
if len(self.mon_source.input_files) != 1:
|
|
229
|
+
raise ToolConfigurationError(
|
|
230
|
+
"CameraCalibratorTool requires exactly one input file."
|
|
231
|
+
)
|
|
232
|
+
# Check if the monitoring source has aggregated pixel statistics
|
|
233
|
+
if not self.mon_source.has_pixel_statistics:
|
|
234
|
+
raise OSError(
|
|
235
|
+
f"Monitoring source '{self.mon_source.input_files[0]}' does not have required pixel statistics."
|
|
236
|
+
)
|
|
237
|
+
# Check if camera calibration coefficients are available in the monitoring source
|
|
238
|
+
# and break if the overwrite is not set. Better than letting the tool run till the end
|
|
239
|
+
# and then break while it tries to write the table.
|
|
240
|
+
if self.mon_source.has_camera_coefficients and self.overwrite is False:
|
|
241
|
+
raise ToolConfigurationError(
|
|
242
|
+
"CameraCalibratorTool: Camera calibration coefficients are already "
|
|
243
|
+
f"available in the monitoring source '{self.mon_source.input_files[0]}'. "
|
|
244
|
+
"Use --overwrite to overwrite the existing tables."
|
|
245
|
+
)
|
|
246
|
+
# Load systematic correction term B
|
|
247
|
+
self.quadratic_term = 0
|
|
248
|
+
if self.systematic_correction_path is not None:
|
|
249
|
+
with h5py.File(self.systematic_correction_path, "r") as hf:
|
|
250
|
+
self.quadratic_term = np.array(hf["B_term"])
|
|
251
|
+
# Load the outlier detector for the expected standard deviation of the number of photoelectrons
|
|
252
|
+
if "NpeStdOutlierDetector" in self.config:
|
|
253
|
+
self.log.info(
|
|
254
|
+
"Applying outlier detection 'NpeStdOutlierDetector' "
|
|
255
|
+
"based on the deviation from the expected standard "
|
|
256
|
+
"deviation of the number of photoelectrons."
|
|
257
|
+
)
|
|
258
|
+
self.outlier_detector = NpeStdOutlierDetector(
|
|
259
|
+
parent=self, subarray=self.mon_source.subarray
|
|
260
|
+
)
|
|
261
|
+
else:
|
|
262
|
+
self.log.info(
|
|
263
|
+
"No outlier detection applied. 'NpeStdOutlierDetector' not in config."
|
|
264
|
+
)
|
|
265
|
+
self.outlier_detector = None
|
|
266
|
+
|
|
267
|
+
def start(self):
|
|
268
|
+
"""Iterate over the telescope IDs and calculate the camera calibration coefficients."""
|
|
269
|
+
self.camcalib_table = {}
|
|
270
|
+
# Iterate over the telescope IDs and calculate the camera calibration coefficients
|
|
271
|
+
for tel_id in self.mon_source.subarray.tel_ids:
|
|
272
|
+
# Get the unique timestamp(s) from the tables
|
|
273
|
+
unique_timestamps = self._get_unique_timestamps(
|
|
274
|
+
*self.mon_source.pixel_statistics[tel_id].values()
|
|
275
|
+
)
|
|
276
|
+
# Get the camera monitoring container from the monitoring source
|
|
277
|
+
if self.mon_source.is_simulation:
|
|
278
|
+
cam_mon_con = self.mon_source.get_camera_monitoring_container(tel_id)
|
|
279
|
+
else:
|
|
280
|
+
cam_mon_con = self.mon_source.get_camera_monitoring_container(
|
|
281
|
+
tel_id=tel_id,
|
|
282
|
+
time=unique_timestamps,
|
|
283
|
+
timestamp_tolerance=self.timestamp_tolerance,
|
|
284
|
+
)
|
|
285
|
+
# Concatenate the outlier masks
|
|
286
|
+
outlier_mask = np.logical_or.reduce(
|
|
287
|
+
[
|
|
288
|
+
np.isnan(cam_mon_con.pixel_statistics[name]["median"])
|
|
289
|
+
for name in cam_mon_con.pixel_statistics.keys()
|
|
290
|
+
]
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Extract calibration coefficients with F-factor method
|
|
294
|
+
# Calculate the signal
|
|
295
|
+
signal = np.array(
|
|
296
|
+
cam_mon_con.pixel_statistics.flatfield_image["median"]
|
|
297
|
+
) - np.array(cam_mon_con.pixel_statistics.pedestal_image["median"])
|
|
298
|
+
# Calculate the gain with the excess noise factor must be known from elsewhere
|
|
299
|
+
gain = (
|
|
300
|
+
np.divide(
|
|
301
|
+
np.array(cam_mon_con.pixel_statistics.flatfield_image["std"]) ** 2
|
|
302
|
+
- np.array(cam_mon_con.pixel_statistics.pedestal_image["std"]) ** 2,
|
|
303
|
+
self.squared_excess_noise_factor * signal,
|
|
304
|
+
)
|
|
305
|
+
- self.quadratic_term**2 * signal / self.squared_excess_noise_factor
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
# Calculate the number of photoelectrons
|
|
309
|
+
n_pe = np.divide(signal, gain)
|
|
310
|
+
# Absolute gain calibration
|
|
311
|
+
npe_median = np.nanmedian(n_pe, axis=-1, keepdims=True)
|
|
312
|
+
|
|
313
|
+
data, units = {}, {}
|
|
314
|
+
# Set the time column to the unique timestamps
|
|
315
|
+
data["time"] = unique_timestamps
|
|
316
|
+
data["factor"] = np.divide(npe_median, signal)
|
|
317
|
+
# Pedestal offset
|
|
318
|
+
# TODO: read window_width from metadata
|
|
319
|
+
data["pedestal_offset"] = (
|
|
320
|
+
np.array(cam_mon_con.pixel_statistics.pedestal_image["median"])
|
|
321
|
+
/ self.window_width
|
|
322
|
+
)
|
|
323
|
+
# Relative time calibration
|
|
324
|
+
data["time_shift"] = np.array(
|
|
325
|
+
cam_mon_con.pixel_statistics.flatfield_peak_time["median"]
|
|
326
|
+
) - np.nanmedian(
|
|
327
|
+
np.array(cam_mon_con.pixel_statistics.flatfield_peak_time["median"]),
|
|
328
|
+
axis=-1,
|
|
329
|
+
keepdims=True,
|
|
330
|
+
)
|
|
331
|
+
# Add a new axis if needed
|
|
332
|
+
if unique_timestamps.isscalar:
|
|
333
|
+
outlier_mask = outlier_mask[np.newaxis, ...]
|
|
334
|
+
for key in data.keys():
|
|
335
|
+
data[key] = data[key][np.newaxis, ...]
|
|
336
|
+
|
|
337
|
+
# Apply outlier detection if selected
|
|
338
|
+
if self.outlier_detector is not None:
|
|
339
|
+
# Add a new axis if needed
|
|
340
|
+
if n_pe.ndim == 2:
|
|
341
|
+
n_pe = n_pe[np.newaxis, ...]
|
|
342
|
+
|
|
343
|
+
npe_outliers = self.outlier_detector(Column(data=n_pe, name="n_pe"))
|
|
344
|
+
# Stack the outlier masks with the npe outlier mask
|
|
345
|
+
outlier_mask = np.logical_or(
|
|
346
|
+
outlier_mask,
|
|
347
|
+
npe_outliers,
|
|
348
|
+
)
|
|
349
|
+
# Append the column of the new outlier mask
|
|
350
|
+
data["outlier_mask"] = outlier_mask
|
|
351
|
+
# Check if the camera has two gain channels
|
|
352
|
+
if outlier_mask.shape[1] == 2:
|
|
353
|
+
# Combine the outlier mask of both gain channels
|
|
354
|
+
outlier_mask = np.logical_or.reduce(outlier_mask, axis=1)
|
|
355
|
+
# Calculate the fraction of faulty pixels over the camera
|
|
356
|
+
faulty_pixels = (
|
|
357
|
+
np.count_nonzero(outlier_mask, axis=-1) / np.shape(outlier_mask)[-1]
|
|
358
|
+
)
|
|
359
|
+
# Check for valid chunks if the predefined threshold ``faulty_pixels_fraction``
|
|
360
|
+
# is not exceeded and append the is_valid column
|
|
361
|
+
data["is_valid"] = faulty_pixels < self.faulty_pixels_fraction
|
|
362
|
+
|
|
363
|
+
# Create the table for the camera calibration coefficients
|
|
364
|
+
self.camcalib_table[tel_id] = Table(data, units=units)
|
|
365
|
+
|
|
366
|
+
def finish(self):
|
|
367
|
+
"""Write the camera calibration coefficients to the output file."""
|
|
368
|
+
# Write the camera calibration coefficients and their outlier mask
|
|
369
|
+
# to the output file for each telescope
|
|
370
|
+
for tel_id in self.mon_source.subarray.tel_ids:
|
|
371
|
+
write_table(
|
|
372
|
+
self.camcalib_table[tel_id],
|
|
373
|
+
self.mon_source.input_files[0],
|
|
374
|
+
f"{DL1_CAMERA_COEFFICIENTS_GROUP}/tel_{tel_id:03d}",
|
|
375
|
+
overwrite=self.overwrite,
|
|
376
|
+
)
|
|
377
|
+
self.log.info(
|
|
378
|
+
"DL1 monitoring data was stored in '%s' under '%s'",
|
|
379
|
+
self.mon_source.input_files[0],
|
|
380
|
+
f"{DL1_CAMERA_COEFFICIENTS_GROUP}/tel_{tel_id:03d}",
|
|
381
|
+
)
|
|
382
|
+
self.log.info("Tool is shutting down")
|
|
383
|
+
|
|
384
|
+
def _get_unique_timestamps(
|
|
385
|
+
self, pedestal_image_table, flatfield_image_table, flatfield_peak_time_table
|
|
386
|
+
):
|
|
387
|
+
"""
|
|
388
|
+
Extract unique timestamps from the given tables.
|
|
389
|
+
|
|
390
|
+
This method collects the start and end timestamps from the provided
|
|
391
|
+
chunks in the pedestal_image, flatfield_image, and flatfield_peak_time
|
|
392
|
+
tables. It then sorts the timestamps and filters them based on the
|
|
393
|
+
specified timestamp tolerance.
|
|
394
|
+
|
|
395
|
+
Parameters
|
|
396
|
+
----------
|
|
397
|
+
pedestal_image_table : astropy.table.Table
|
|
398
|
+
Table containing pedestal image data.
|
|
399
|
+
flatfield_image_table : astropy.table.Table
|
|
400
|
+
Table containing flatfield image data.
|
|
401
|
+
flatfield_peak_time_table : astropy.table.Table
|
|
402
|
+
Table containing flatfield peak time data.
|
|
403
|
+
|
|
404
|
+
Returns
|
|
405
|
+
-------
|
|
406
|
+
unique_timestamps : astropy.time.Time
|
|
407
|
+
Unique timestamps sorted and filtered based on the timestamp tolerance.
|
|
408
|
+
"""
|
|
409
|
+
# Check if there is a single chunk for all the tables
|
|
410
|
+
if (
|
|
411
|
+
all(
|
|
412
|
+
len(table) == 1
|
|
413
|
+
for table in (
|
|
414
|
+
pedestal_image_table,
|
|
415
|
+
flatfield_image_table,
|
|
416
|
+
flatfield_peak_time_table,
|
|
417
|
+
)
|
|
418
|
+
)
|
|
419
|
+
or self.mon_source.is_simulation
|
|
420
|
+
):
|
|
421
|
+
# If there is only a single chunk, return the unique timestamp(s) to the start time
|
|
422
|
+
return Time(
|
|
423
|
+
min(
|
|
424
|
+
pedestal_image_table["time_start"][0],
|
|
425
|
+
flatfield_image_table["time_start"][0],
|
|
426
|
+
)
|
|
427
|
+
)
|
|
428
|
+
# Collect all start and end times in MJD (days)
|
|
429
|
+
timestamps = []
|
|
430
|
+
for mon_table in (
|
|
431
|
+
pedestal_image_table,
|
|
432
|
+
flatfield_image_table,
|
|
433
|
+
flatfield_peak_time_table,
|
|
434
|
+
):
|
|
435
|
+
# Append timestamps from the start and end of chunks
|
|
436
|
+
timestamps.append(mon_table["time_start"])
|
|
437
|
+
timestamps.append(mon_table["time_end"])
|
|
438
|
+
# Sort the timestamps
|
|
439
|
+
timestamps = np.concatenate(timestamps)
|
|
440
|
+
timestamps.sort()
|
|
441
|
+
# Filter the timestamps based on the timestamp tolerance
|
|
442
|
+
unique_timestamps = [timestamps[-1]]
|
|
443
|
+
for t in reversed(timestamps[:-1]):
|
|
444
|
+
if (unique_timestamps[-1] - t) > self.timestamp_tolerance:
|
|
445
|
+
unique_timestamps.append(t)
|
|
446
|
+
unique_timestamps.reverse()
|
|
447
|
+
# Ensure that the first unique timestamp is set to the first timestamp of the provided
|
|
448
|
+
# tables if within the timestamp tolerance. It might be that the first chunk starts
|
|
449
|
+
# before the first unique timestamp if they are in the timestamp tolerance.
|
|
450
|
+
if (min(timestamps) - unique_timestamps[0]) < self.timestamp_tolerance:
|
|
451
|
+
unique_timestamps[0] = min(timestamps)
|
|
452
|
+
return Time(unique_timestamps)
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def main():
|
|
456
|
+
# Run the tool
|
|
457
|
+
tool = CameraCalibratorTool()
|
|
458
|
+
tool.run()
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
if __name__ == "main":
|
|
462
|
+
main()
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# noqa: D100
|
|
2
|
+
from ctapipe.core.traits import (
|
|
3
|
+
Dict,
|
|
4
|
+
Unicode,
|
|
5
|
+
)
|
|
6
|
+
from molecularprofiles.molecularprofiles import MolecularProfile
|
|
7
|
+
|
|
8
|
+
from ..core.exceptions import MissingInputDataError
|
|
9
|
+
from ..utils.observatory import Observatory
|
|
10
|
+
from .atmospheric_base_tool import AtmosphericBaseTool
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CreateMolecularDensityProfile(AtmosphericBaseTool):
|
|
14
|
+
"""
|
|
15
|
+
Tool for creating a contemporary Molecular Density Profile (MDP).
|
|
16
|
+
|
|
17
|
+
This tool downloads and processes meteorological data from a specified data assimilation system
|
|
18
|
+
for a night, corresponding to the provided timestamp, and produces a molecular density profile.
|
|
19
|
+
This implementation follows the specifications outlined in UC-DPPS-CP-115.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
name = Unicode("CreateMDP")
|
|
23
|
+
description = "Create a contemporary MDP"
|
|
24
|
+
aliases = Dict(
|
|
25
|
+
{
|
|
26
|
+
"timestamp": "CreateMDP.timestamp",
|
|
27
|
+
"output_path": "CreateMDP.output_path",
|
|
28
|
+
}
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
def setup(self):
|
|
32
|
+
"""Parse configuration and setup the database connection and MeteoDataHandler."""
|
|
33
|
+
super().setup()
|
|
34
|
+
self.mdp_table = None
|
|
35
|
+
|
|
36
|
+
def start(self):
|
|
37
|
+
"""
|
|
38
|
+
Download meteorological data and create a molecular density profile.
|
|
39
|
+
|
|
40
|
+
This method performs the following operations:
|
|
41
|
+
1. Retrieves the observatory data from the database.
|
|
42
|
+
2. Calculates the astronomical night based on the observatory's coordinates and the provided timestamp.
|
|
43
|
+
3. Creates a data request for the calculated time frame and coordinates.
|
|
44
|
+
4. Attempts to fetch the meteorological data; raises an exception if unavailable.
|
|
45
|
+
5. Generates and saves the molecular density profile to the specified output path.
|
|
46
|
+
|
|
47
|
+
Raises
|
|
48
|
+
------
|
|
49
|
+
MissingInputDataError: If the required meteorological data is not available.
|
|
50
|
+
"""
|
|
51
|
+
observatory = Observatory.from_db(
|
|
52
|
+
self.database_configuration,
|
|
53
|
+
site=self.observatory["name"].upper(),
|
|
54
|
+
version=self.observatory["version"],
|
|
55
|
+
)
|
|
56
|
+
latitude, longitude = observatory.coordinates
|
|
57
|
+
dusk, dawn = observatory.get_astronomical_night(self._timestamp)
|
|
58
|
+
self.data_handler.create_request(
|
|
59
|
+
start=dusk, stop=dawn, latitude=latitude, longitude=longitude
|
|
60
|
+
)
|
|
61
|
+
data_status = self.data_handler.request_data()
|
|
62
|
+
if data_status:
|
|
63
|
+
raise MissingInputDataError(
|
|
64
|
+
f"Meteorologocal data from {self.meteo_data_handler} is not available."
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
molecular_profile = MolecularProfile(
|
|
68
|
+
f"{self.data_handler.data_path}/merged_file.ecsv",
|
|
69
|
+
stat_columns=self.DEFAULT_METEO_COLUMNS,
|
|
70
|
+
)
|
|
71
|
+
molecular_profile.get_data()
|
|
72
|
+
self.mdp_table = molecular_profile.create_molecular_density_profile()
|
|
73
|
+
|
|
74
|
+
def finish(self):
|
|
75
|
+
"""Store the molecular density profile in the output file and perform cleanup."""
|
|
76
|
+
self.mdp_table.write(
|
|
77
|
+
f"{self.output_path}/contemporary_molecular_density_profile.{self.output_format}",
|
|
78
|
+
format=f"{self.output_format}",
|
|
79
|
+
)
|
|
80
|
+
self.log.info("Shutting down.")
|
|
81
|
+
self.data_handler.cleanup()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def main():
|
|
85
|
+
"""Run the app."""
|
|
86
|
+
tool = CreateMolecularDensityProfile()
|
|
87
|
+
tool.run()
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# CTA-related imports # noqa: D100
|
|
2
|
+
from ctapipe.core.traits import Unicode
|
|
3
|
+
|
|
4
|
+
from ..database.adapter.database_containers import ContainerMap
|
|
5
|
+
from ..database.interfaces import TableHandler
|
|
6
|
+
|
|
7
|
+
# Internal imports
|
|
8
|
+
from .basic_tool_with_db import BasicToolWithDB
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CalibPipeDatabaseInitialization(BasicToolWithDB):
|
|
12
|
+
"""Tool to create empty data and metadata tables in the CalibPipe DB."""
|
|
13
|
+
|
|
14
|
+
name = Unicode("CalibPipeDatabaseInitialization")
|
|
15
|
+
description = "Populate an empty databased with empty tables."
|
|
16
|
+
|
|
17
|
+
def setup(self):
|
|
18
|
+
"""Parse configuration, setup the database connection and fetch CalibPipe containers."""
|
|
19
|
+
super().setup()
|
|
20
|
+
self.containers = ContainerMap.get_cp_containers()
|
|
21
|
+
|
|
22
|
+
def start(self):
|
|
23
|
+
"""Create tables in the database."""
|
|
24
|
+
TableHandler.prepare_db_tables(self.containers, self.database_configuration)
|
|
25
|
+
|
|
26
|
+
def finish(self):
|
|
27
|
+
"""Log created tables."""
|
|
28
|
+
self.log.info(
|
|
29
|
+
"Data tables for %s was created and uploaded to CalibPipe DB",
|
|
30
|
+
[_.__name__ for _ in self.containers],
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def main():
|
|
35
|
+
"""Run the app."""
|
|
36
|
+
tool = CalibPipeDatabaseInitialization()
|
|
37
|
+
tool.run()
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""Calculate the average CO2 concentration of the last 12 months (12-MACOBAC)."""
|
|
2
|
+
|
|
3
|
+
# Python built-in imports
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
|
|
6
|
+
# Third-party imports
|
|
7
|
+
import astropy.units as u
|
|
8
|
+
import numpy as np
|
|
9
|
+
from astropy.table import QTable, Table
|
|
10
|
+
from astropy.time import Time
|
|
11
|
+
from astropy.units.cds import ppm
|
|
12
|
+
|
|
13
|
+
# CTA-related imports
|
|
14
|
+
from ctapipe.core import Tool
|
|
15
|
+
from ctapipe.core.traits import Path, Unicode
|
|
16
|
+
|
|
17
|
+
from ..atmosphere.atmosphere_containers import MacobacContainer
|
|
18
|
+
|
|
19
|
+
# Internal imports
|
|
20
|
+
from ..atmosphere.meteo_data_handlers import CO2DataHandler
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class CalculateMACOBAC(Tool):
|
|
24
|
+
"""Download Keeling curve data and calculate the average CO2 concentration of the past 12 months."""
|
|
25
|
+
|
|
26
|
+
name = Unicode("CalculateMACOBAC")
|
|
27
|
+
description = "Download Keeling curve data and calculate average CO2 concentration of the past 12 months."
|
|
28
|
+
|
|
29
|
+
output_file = Path(
|
|
30
|
+
"macobac.ecsv", help="Output ecsv file where macobac container will be written"
|
|
31
|
+
).tag(config=True)
|
|
32
|
+
|
|
33
|
+
classes = [CO2DataHandler]
|
|
34
|
+
|
|
35
|
+
def setup(self):
|
|
36
|
+
"""Create CO2DataHandler."""
|
|
37
|
+
u.add_enabled_units([ppm])
|
|
38
|
+
self.data_handler = CO2DataHandler(parent=self)
|
|
39
|
+
self.macobac12_table = None
|
|
40
|
+
|
|
41
|
+
def start(self):
|
|
42
|
+
"""Request meteorological data from Scripps server and compute 12-MACOBAC."""
|
|
43
|
+
self.data_handler.request_data()
|
|
44
|
+
macobac_table = Table.read(
|
|
45
|
+
f"{self.data_handler.data_path}/macobac.csv",
|
|
46
|
+
comment='"',
|
|
47
|
+
skipinitialspace=True,
|
|
48
|
+
format="pandas.csv",
|
|
49
|
+
)
|
|
50
|
+
mask = macobac_table["CO2"].value != "-99.99"
|
|
51
|
+
co2_values = macobac_table[mask][::-1][0:12]["CO2"].data
|
|
52
|
+
macobac12 = np.mean(co2_values.data.astype(float)) * ppm
|
|
53
|
+
self.log.debug(
|
|
54
|
+
"CO2 average atmospheric concentration for the previous 12 months: %f",
|
|
55
|
+
macobac12,
|
|
56
|
+
)
|
|
57
|
+
macobac12_container = MacobacContainer(
|
|
58
|
+
co2_concentration=macobac12,
|
|
59
|
+
estimation_date=Time(
|
|
60
|
+
str(datetime.now(timezone.utc).date()), out_subfmt="date"
|
|
61
|
+
),
|
|
62
|
+
)
|
|
63
|
+
self.macobac12_table = QTable(
|
|
64
|
+
names=macobac12_container.keys(),
|
|
65
|
+
rows=[macobac12_container.values()],
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
def finish(self):
|
|
69
|
+
"""Store results and perform the cleanup."""
|
|
70
|
+
self.log.info("Storing the results and performing the cleanup.")
|
|
71
|
+
self.macobac12_table.write(
|
|
72
|
+
self.output_file,
|
|
73
|
+
format="ascii.ecsv",
|
|
74
|
+
serialize_method={"estimation_date": "formatted_value"},
|
|
75
|
+
)
|
|
76
|
+
self.data_handler.cleanup()
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def main():
|
|
80
|
+
"""Run the app."""
|
|
81
|
+
tool = CalculateMACOBAC()
|
|
82
|
+
tool.run()
|