gammasimtools 0.5.1__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/METADATA +80 -28
- gammasimtools-0.6.1.dist-info/RECORD +91 -0
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/WHEEL +1 -1
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/entry_points.txt +4 -2
- simtools/_version.py +14 -2
- simtools/applications/add_file_to_db.py +2 -1
- simtools/applications/compare_cumulative_psf.py +10 -15
- simtools/applications/db_development_tools/add_new_parameter_to_db.py +12 -6
- simtools/applications/derive_mirror_rnda.py +95 -71
- simtools/applications/generate_corsika_histograms.py +216 -131
- simtools/applications/generate_default_metadata.py +110 -0
- simtools/applications/generate_simtel_array_histograms.py +192 -0
- simtools/applications/get_file_from_db.py +1 -1
- simtools/applications/get_parameter.py +3 -3
- simtools/applications/make_regular_arrays.py +89 -93
- simtools/applications/{plot_layout_array.py → plot_array_layout.py} +15 -14
- simtools/applications/print_array_elements.py +81 -34
- simtools/applications/produce_array_config.py +2 -2
- simtools/applications/production.py +39 -5
- simtools/applications/sim_showers_for_trigger_rates.py +26 -30
- simtools/applications/simulate_prod.py +49 -107
- simtools/applications/submit_data_from_external.py +8 -10
- simtools/applications/tune_psf.py +16 -18
- simtools/applications/validate_camera_efficiency.py +63 -9
- simtools/applications/validate_camera_fov.py +9 -13
- simtools/applications/validate_file_using_schema.py +127 -0
- simtools/applications/validate_optics.py +13 -15
- simtools/camera_efficiency.py +73 -80
- simtools/configuration/commandline_parser.py +52 -22
- simtools/configuration/configurator.py +98 -33
- simtools/constants.py +9 -0
- simtools/corsika/corsika_config.py +28 -22
- simtools/corsika/corsika_default_config.py +282 -0
- simtools/corsika/corsika_histograms.py +328 -282
- simtools/corsika/corsika_histograms_visualize.py +162 -163
- simtools/corsika/corsika_runner.py +8 -4
- simtools/corsika_simtel/corsika_simtel_runner.py +18 -23
- simtools/data_model/data_reader.py +129 -0
- simtools/data_model/metadata_collector.py +346 -118
- simtools/data_model/metadata_model.py +123 -218
- simtools/data_model/model_data_writer.py +79 -22
- simtools/data_model/validate_data.py +96 -46
- simtools/db_handler.py +67 -42
- simtools/io_operations/__init__.py +0 -0
- simtools/io_operations/hdf5_handler.py +112 -0
- simtools/{io_handler.py → io_operations/io_handler.py} +51 -22
- simtools/job_execution/job_manager.py +1 -1
- simtools/layout/{layout_array.py → array_layout.py} +168 -199
- simtools/layout/geo_coordinates.py +196 -0
- simtools/layout/telescope_position.py +12 -12
- simtools/model/array_model.py +16 -14
- simtools/model/camera.py +5 -8
- simtools/model/mirrors.py +136 -73
- simtools/model/model_utils.py +1 -69
- simtools/model/telescope_model.py +32 -25
- simtools/psf_analysis.py +26 -19
- simtools/ray_tracing.py +54 -26
- simtools/schemas/data.metaschema.yml +400 -0
- simtools/schemas/metadata.metaschema.yml +566 -0
- simtools/simtel/simtel_config_writer.py +14 -5
- simtools/simtel/simtel_histograms.py +266 -83
- simtools/simtel/simtel_runner.py +8 -7
- simtools/simtel/simtel_runner_array.py +7 -8
- simtools/simtel/simtel_runner_camera_efficiency.py +48 -2
- simtools/simtel/simtel_runner_ray_tracing.py +61 -25
- simtools/simulator.py +43 -50
- simtools/utils/general.py +232 -286
- simtools/utils/geometry.py +163 -0
- simtools/utils/names.py +294 -142
- simtools/visualization/legend_handlers.py +115 -9
- simtools/visualization/visualize.py +13 -13
- gammasimtools-0.5.1.dist-info/RECORD +0 -83
- simtools/applications/plot_simtel_histograms.py +0 -120
- simtools/applications/validate_schema_files.py +0 -135
- simtools/corsika/corsika_output_visualize.py +0 -345
- simtools/data_model/validate_schema.py +0 -285
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/LICENSE +0 -0
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/top_level.txt +0 -0
|
@@ -30,7 +30,7 @@ class DataValidator:
|
|
|
30
30
|
|
|
31
31
|
"""
|
|
32
32
|
|
|
33
|
-
def __init__(self, schema_file=None, data_file=None):
|
|
33
|
+
def __init__(self, schema_file=None, data_file=None, data_table=None):
|
|
34
34
|
"""
|
|
35
35
|
Initialize validation class and read required reference data columns
|
|
36
36
|
|
|
@@ -38,11 +38,11 @@ class DataValidator:
|
|
|
38
38
|
|
|
39
39
|
self._logger = logging.getLogger(__name__)
|
|
40
40
|
|
|
41
|
-
self.
|
|
42
|
-
self.
|
|
41
|
+
self.data_file_name = data_file
|
|
42
|
+
self.schema_file_name = schema_file
|
|
43
43
|
self._reference_data_columns = None
|
|
44
44
|
self.data = None
|
|
45
|
-
self.data_table =
|
|
45
|
+
self.data_table = data_table
|
|
46
46
|
|
|
47
47
|
def validate_and_transform(self):
|
|
48
48
|
"""
|
|
@@ -53,13 +53,22 @@ class DataValidator:
|
|
|
53
53
|
data: dict or astropy.table
|
|
54
54
|
Data dict or table
|
|
55
55
|
|
|
56
|
+
Raises
|
|
57
|
+
------
|
|
58
|
+
TypeError
|
|
59
|
+
if no data or data table is available
|
|
60
|
+
|
|
56
61
|
"""
|
|
57
62
|
|
|
58
|
-
self.
|
|
63
|
+
if self.data_file_name:
|
|
64
|
+
self.validate_data_file()
|
|
59
65
|
if isinstance(self.data, dict):
|
|
60
66
|
self._validate_data_dict()
|
|
61
|
-
|
|
67
|
+
elif isinstance(self.data_table, Table):
|
|
62
68
|
self._validate_data_table()
|
|
69
|
+
else:
|
|
70
|
+
self._logger.error("No data or data table to validate")
|
|
71
|
+
raise TypeError
|
|
63
72
|
|
|
64
73
|
return self.data_table
|
|
65
74
|
|
|
@@ -72,12 +81,12 @@ class DataValidator:
|
|
|
72
81
|
"""
|
|
73
82
|
|
|
74
83
|
try:
|
|
75
|
-
if Path(self.
|
|
76
|
-
self.data = gen.
|
|
77
|
-
self._logger.info(f"
|
|
84
|
+
if Path(self.data_file_name).suffix in (".yml", ".yaml"):
|
|
85
|
+
self.data = gen.collect_data_from_file_or_dict(self.data_file_name, None)
|
|
86
|
+
self._logger.info(f"Validating data from: {self.data_file_name}")
|
|
78
87
|
else:
|
|
79
|
-
self.data_table = Table.read(self.
|
|
80
|
-
self._logger.info(f"
|
|
88
|
+
self.data_table = Table.read(self.data_file_name, guess=True, delimiter=r"\s")
|
|
89
|
+
self._logger.info(f"Validating tabled data from: {self.data_file_name}")
|
|
81
90
|
except (AttributeError, TypeError):
|
|
82
91
|
pass
|
|
83
92
|
|
|
@@ -90,7 +99,7 @@ class DataValidator:
|
|
|
90
99
|
|
|
91
100
|
try:
|
|
92
101
|
self._reference_data_columns = self._read_validation_schema(
|
|
93
|
-
self.
|
|
102
|
+
self.schema_file_name, self.data["name"]
|
|
94
103
|
)
|
|
95
104
|
_quantities = []
|
|
96
105
|
for value, unit in zip(self.data["value"], self.data["units"]):
|
|
@@ -113,11 +122,11 @@ class DataValidator:
|
|
|
113
122
|
"""
|
|
114
123
|
|
|
115
124
|
try:
|
|
116
|
-
self._reference_data_columns = self._read_validation_schema(self.
|
|
125
|
+
self._reference_data_columns = self._read_validation_schema(self.schema_file_name)[
|
|
117
126
|
0
|
|
118
127
|
].get("table_columns", None)
|
|
119
128
|
except IndexError:
|
|
120
|
-
self._logger.error(f"Error reading validation schema from {self.
|
|
129
|
+
self._logger.error(f"Error reading validation schema from {self.schema_file_name}")
|
|
121
130
|
raise
|
|
122
131
|
|
|
123
132
|
if self._reference_data_columns is not None:
|
|
@@ -129,8 +138,8 @@ class DataValidator:
|
|
|
129
138
|
"""
|
|
130
139
|
Validate that
|
|
131
140
|
- required data columns are available
|
|
132
|
-
-
|
|
133
|
-
-
|
|
141
|
+
- columns are in the correct units (if necessary apply a unit conversion)
|
|
142
|
+
- ranges (minimum, maximum) are correct.
|
|
134
143
|
|
|
135
144
|
This is not applied to columns of type 'string'.
|
|
136
145
|
|
|
@@ -138,15 +147,17 @@ class DataValidator:
|
|
|
138
147
|
|
|
139
148
|
self._check_required_columns()
|
|
140
149
|
|
|
141
|
-
for
|
|
142
|
-
|
|
150
|
+
for col_name in self.data_table.colnames:
|
|
151
|
+
col = self.data_table[col_name]
|
|
152
|
+
if not self._get_reference_data_column(col_name, status_test=True):
|
|
143
153
|
continue
|
|
144
154
|
if not np.issubdtype(col.dtype, np.number):
|
|
145
155
|
continue
|
|
146
|
-
self._check_for_not_a_number(col)
|
|
147
|
-
self.
|
|
148
|
-
|
|
149
|
-
self._check_range(
|
|
156
|
+
self._check_for_not_a_number(col, col_name)
|
|
157
|
+
self._check_data_type(col, col_name)
|
|
158
|
+
col = self._check_and_convert_units(col, col_name)
|
|
159
|
+
self._check_range(col_name, np.nanmin(col.data), np.nanmax(col.data), "allowed_range")
|
|
160
|
+
self._check_range(col_name, np.nanmin(col.data), np.nanmax(col.data), "required_range")
|
|
150
161
|
|
|
151
162
|
def _check_required_columns(self):
|
|
152
163
|
"""
|
|
@@ -160,7 +171,7 @@ class DataValidator:
|
|
|
160
171
|
"""
|
|
161
172
|
|
|
162
173
|
for entry in self._reference_data_columns:
|
|
163
|
-
if entry.get("
|
|
174
|
+
if entry.get("required", False):
|
|
164
175
|
if entry["name"] in self.data_table.columns:
|
|
165
176
|
self._logger.debug(f"Found required data column {entry['name']}")
|
|
166
177
|
else:
|
|
@@ -231,7 +242,7 @@ class DataValidator:
|
|
|
231
242
|
else:
|
|
232
243
|
self._logger.error(
|
|
233
244
|
"Failed removal of duplication for column "
|
|
234
|
-
f"{_column_with_unique_requirement}, values are not
|
|
245
|
+
f"{_column_with_unique_requirement}, values are not unique"
|
|
235
246
|
)
|
|
236
247
|
raise ValueError
|
|
237
248
|
|
|
@@ -283,14 +294,45 @@ class DataValidator:
|
|
|
283
294
|
|
|
284
295
|
return u.Unit(reference_unit)
|
|
285
296
|
|
|
286
|
-
def
|
|
297
|
+
def _check_data_type(self, col, column_name):
|
|
298
|
+
"""
|
|
299
|
+
Check column data type.
|
|
300
|
+
|
|
301
|
+
Parameters
|
|
302
|
+
----------
|
|
303
|
+
col: astropy.column or Quantity
|
|
304
|
+
data column to be converted
|
|
305
|
+
column_name: str
|
|
306
|
+
column name
|
|
307
|
+
|
|
308
|
+
Raises
|
|
309
|
+
------
|
|
310
|
+
TypeError
|
|
311
|
+
if data type is not correct
|
|
312
|
+
|
|
313
|
+
"""
|
|
314
|
+
|
|
315
|
+
reference_dtype = self._get_reference_data_column(column_name).get("type", None)
|
|
316
|
+
|
|
317
|
+
if not np.issubdtype(col.dtype, reference_dtype):
|
|
318
|
+
self._logger.error(
|
|
319
|
+
f"Invalid data type in column '{column_name}'. "
|
|
320
|
+
f"Expected type '{reference_dtype}', found '{col.dtype}'"
|
|
321
|
+
)
|
|
322
|
+
raise TypeError
|
|
323
|
+
|
|
324
|
+
self._logger.debug(f"Data column '{column_name}' has correct data type")
|
|
325
|
+
|
|
326
|
+
def _check_for_not_a_number(self, col, col_name):
|
|
287
327
|
"""
|
|
288
328
|
Check that column values are finite and not NaN.
|
|
289
329
|
|
|
290
330
|
Parameters
|
|
291
331
|
----------
|
|
292
|
-
col: astropy.column
|
|
332
|
+
col: astropy.column or Quantity
|
|
293
333
|
data column to be converted
|
|
334
|
+
col_name: str
|
|
335
|
+
column name
|
|
294
336
|
|
|
295
337
|
Returns
|
|
296
338
|
-------
|
|
@@ -305,11 +347,11 @@ class DataValidator:
|
|
|
305
347
|
"""
|
|
306
348
|
|
|
307
349
|
if np.isnan(col.data).any():
|
|
308
|
-
self._logger.info(f"Column {
|
|
350
|
+
self._logger.info(f"Column {col_name} contains NaN.")
|
|
309
351
|
if np.isinf(col.data).any():
|
|
310
|
-
self._logger.info(f"Column {
|
|
352
|
+
self._logger.info(f"Column {col_name} contains infinite value.")
|
|
311
353
|
|
|
312
|
-
entry = self._get_reference_data_column(
|
|
354
|
+
entry = self._get_reference_data_column(col_name)
|
|
313
355
|
if "allow_nan" in entry.get("input_processing", {}):
|
|
314
356
|
return np.isnan(col.data).any() or np.isinf(col.data).any()
|
|
315
357
|
|
|
@@ -319,7 +361,7 @@ class DataValidator:
|
|
|
319
361
|
|
|
320
362
|
return False
|
|
321
363
|
|
|
322
|
-
def _check_and_convert_units(self, col):
|
|
364
|
+
def _check_and_convert_units(self, col, col_name):
|
|
323
365
|
"""
|
|
324
366
|
Check that all columns have an allowed unit. Convert to reference unit (e.g., Angstrom to
|
|
325
367
|
nm).
|
|
@@ -332,8 +374,10 @@ class DataValidator:
|
|
|
332
374
|
|
|
333
375
|
Parameters
|
|
334
376
|
----------
|
|
335
|
-
col: astropy.column
|
|
377
|
+
col: astropy.column or Quantity
|
|
336
378
|
data column to be converted
|
|
379
|
+
col_name: str
|
|
380
|
+
column name
|
|
337
381
|
|
|
338
382
|
Returns
|
|
339
383
|
-------
|
|
@@ -347,30 +391,26 @@ class DataValidator:
|
|
|
347
391
|
|
|
348
392
|
"""
|
|
349
393
|
|
|
350
|
-
self._logger.debug(f"Checking data column '{
|
|
394
|
+
self._logger.debug(f"Checking data column '{col_name}'")
|
|
351
395
|
|
|
352
396
|
try:
|
|
353
|
-
reference_unit = self._get_reference_unit(
|
|
397
|
+
reference_unit = self._get_reference_unit(col_name)
|
|
354
398
|
if col.unit is None or col.unit == "dimensionless":
|
|
355
399
|
col.unit = u.dimensionless_unscaled
|
|
356
400
|
return col
|
|
357
401
|
|
|
358
402
|
self._logger.debug(
|
|
359
|
-
f"Data column '{
|
|
403
|
+
f"Data column '{col_name}' with reference unit "
|
|
360
404
|
f"'{reference_unit}' and data unit '{col.unit}'"
|
|
361
405
|
)
|
|
362
|
-
|
|
363
|
-
col.convert_unit_to(reference_unit)
|
|
364
|
-
|
|
406
|
+
return u.Quantity(col).to(reference_unit)
|
|
365
407
|
except u.core.UnitConversionError:
|
|
366
408
|
self._logger.error(
|
|
367
|
-
f"Invalid unit in data column '{
|
|
409
|
+
f"Invalid unit in data column '{col_name}'. "
|
|
368
410
|
f"Expected type '{reference_unit}', found '{col.unit}'"
|
|
369
411
|
)
|
|
370
412
|
raise
|
|
371
413
|
|
|
372
|
-
return col
|
|
373
|
-
|
|
374
414
|
def _check_range(self, col_name, col_min, col_max, range_type="allowed_range"):
|
|
375
415
|
"""
|
|
376
416
|
Check that column data is within allowed range or required range. Assumes that column and
|
|
@@ -434,7 +474,7 @@ class DataValidator:
|
|
|
434
474
|
def _interval_check(data, axis_range, range_type):
|
|
435
475
|
"""
|
|
436
476
|
Check that values are inside allowed range (range_type='allowed_range') or span at least
|
|
437
|
-
the given
|
|
477
|
+
the given interval (range_type='required_range').
|
|
438
478
|
|
|
439
479
|
Parameters
|
|
440
480
|
----------
|
|
@@ -480,15 +520,20 @@ class DataValidator:
|
|
|
480
520
|
dict
|
|
481
521
|
validation schema
|
|
482
522
|
|
|
523
|
+
Raises
|
|
524
|
+
------
|
|
525
|
+
KeyError
|
|
526
|
+
if 'data' can not be read from dict in schema file
|
|
527
|
+
|
|
483
528
|
"""
|
|
484
529
|
|
|
485
530
|
try:
|
|
486
531
|
if Path(schema_file).is_dir():
|
|
487
|
-
return gen.
|
|
488
|
-
|
|
489
|
-
|
|
532
|
+
return gen.collect_data_from_file_or_dict(
|
|
533
|
+
file_name=Path(schema_file) / (parameter + ".schema.yml"),
|
|
534
|
+
in_dict=None,
|
|
490
535
|
)["data"]
|
|
491
|
-
return gen.
|
|
536
|
+
return gen.collect_data_from_file_or_dict(file_name=schema_file, in_dict=None)["data"]
|
|
492
537
|
except KeyError:
|
|
493
538
|
self._logger.error(f"Error reading validation schema from {schema_file}")
|
|
494
539
|
raise
|
|
@@ -502,11 +547,16 @@ class DataValidator:
|
|
|
502
547
|
----------
|
|
503
548
|
column_name: str
|
|
504
549
|
Column name.
|
|
550
|
+
status_test: bool
|
|
551
|
+
Test if reference column exists.
|
|
505
552
|
|
|
506
553
|
Returns
|
|
507
554
|
-------
|
|
508
555
|
dict
|
|
509
|
-
Reference schema column.
|
|
556
|
+
Reference schema column (for status_test==False).
|
|
557
|
+
bool
|
|
558
|
+
True if reference column exists (for status_test==True).
|
|
559
|
+
|
|
510
560
|
|
|
511
561
|
Raises
|
|
512
562
|
------
|
simtools/db_handler.py
CHANGED
|
@@ -12,8 +12,7 @@ from pymongo import MongoClient
|
|
|
12
12
|
from pymongo.errors import BulkWriteError
|
|
13
13
|
|
|
14
14
|
import simtools.utils.general as gen
|
|
15
|
-
from simtools import io_handler
|
|
16
|
-
from simtools.model.model_utils import get_telescope_class
|
|
15
|
+
from simtools.io_operations import io_handler
|
|
17
16
|
from simtools.utils import names
|
|
18
17
|
|
|
19
18
|
__all__ = ["DatabaseHandler"]
|
|
@@ -46,7 +45,7 @@ class DatabaseHandler:
|
|
|
46
45
|
DB_REFERENCE_DATA = "CTA-Reference-Data"
|
|
47
46
|
DB_DERIVED_VALUES = "CTA-Simulation-Model-Derived-Values"
|
|
48
47
|
|
|
49
|
-
ALLOWED_FILE_EXTENSIONS = [".dat", ".txt", ".lis", ".cfg", ".yml", ".ecsv"]
|
|
48
|
+
ALLOWED_FILE_EXTENSIONS = [".dat", ".txt", ".lis", ".cfg", ".yml", ".yaml", ".ecsv"]
|
|
50
49
|
|
|
51
50
|
db_client = None
|
|
52
51
|
|
|
@@ -69,32 +68,14 @@ class DatabaseHandler:
|
|
|
69
68
|
|
|
70
69
|
self._set_up_connection()
|
|
71
70
|
|
|
72
|
-
def set_mongo_db_config(self, mongo_db_config):
|
|
73
|
-
"""
|
|
74
|
-
Set the MongoDB config and open the connection to the DB.
|
|
75
|
-
|
|
76
|
-
Parameters
|
|
77
|
-
----------
|
|
78
|
-
mongo_db_config: dict
|
|
79
|
-
Dictionary with the MongoDB configuration.
|
|
80
|
-
|
|
81
|
-
Raises
|
|
82
|
-
------
|
|
83
|
-
KeyError
|
|
84
|
-
if there is non-valid key in the db_config.
|
|
85
|
-
|
|
86
|
-
"""
|
|
87
|
-
|
|
88
|
-
self.mongo_db_config = mongo_db_config
|
|
89
|
-
self._set_up_connection()
|
|
90
|
-
|
|
91
71
|
def _set_up_connection(self):
|
|
92
72
|
"""
|
|
93
73
|
Open the connection to MongoDB.
|
|
94
74
|
"""
|
|
95
75
|
if self.mongo_db_config:
|
|
96
76
|
if DatabaseHandler.db_client is None:
|
|
97
|
-
|
|
77
|
+
lock = Lock()
|
|
78
|
+
with lock:
|
|
98
79
|
DatabaseHandler.db_client = self._open_mongo_db()
|
|
99
80
|
|
|
100
81
|
def _open_mongo_db(self):
|
|
@@ -237,6 +218,9 @@ class DatabaseHandler:
|
|
|
237
218
|
for info in parameters.values():
|
|
238
219
|
if not info["File"]:
|
|
239
220
|
continue
|
|
221
|
+
if Path(dest).joinpath(info["Value"]).exists():
|
|
222
|
+
self._logger.debug(f"File {info['Value']} already exists in {dest}")
|
|
223
|
+
continue
|
|
240
224
|
file = self._get_file_mongo_db(
|
|
241
225
|
DatabaseHandler.DB_CTA_SIMULATION_MODEL, info["Value"]
|
|
242
226
|
)
|
|
@@ -279,7 +263,7 @@ class DatabaseHandler:
|
|
|
279
263
|
|
|
280
264
|
raise
|
|
281
265
|
|
|
282
|
-
dest_file.write_text(file.read_text())
|
|
266
|
+
dest_file.write_text(file.read_text(encoding="utf-8"), encoding="utf-8")
|
|
283
267
|
|
|
284
268
|
def _get_model_parameters_yaml(
|
|
285
269
|
self, telescope_model_name, model_version, only_applicable=False
|
|
@@ -302,8 +286,8 @@ class DatabaseHandler:
|
|
|
302
286
|
|
|
303
287
|
"""
|
|
304
288
|
|
|
305
|
-
_tel_class = get_telescope_class(telescope_model_name)
|
|
306
|
-
_tel_name_converted = names.
|
|
289
|
+
_tel_class = names.get_telescope_class(telescope_model_name)
|
|
290
|
+
_tel_name_converted = names.convert_telescope_model_name_to_yaml_name(telescope_model_name)
|
|
307
291
|
|
|
308
292
|
if _tel_class == "MST":
|
|
309
293
|
# MST-FlashCam or MST-NectarCam
|
|
@@ -361,7 +345,7 @@ class DatabaseHandler:
|
|
|
361
345
|
|
|
362
346
|
_site_validated = names.validate_site_name(site)
|
|
363
347
|
_tel_name_db = self._get_telescope_model_name_for_db(_site_validated, telescope_model_name)
|
|
364
|
-
_tel_class = get_telescope_class(telescope_model_name)
|
|
348
|
+
_tel_class = names.get_telescope_class(telescope_model_name)
|
|
365
349
|
|
|
366
350
|
self._logger.debug(f"Tel_name_db: {_tel_name_db}")
|
|
367
351
|
self._logger.debug(f"Tel_class: {_tel_class}")
|
|
@@ -881,7 +865,7 @@ class DatabaseHandler:
|
|
|
881
865
|
try:
|
|
882
866
|
collection.insert_many(db_entries)
|
|
883
867
|
except BulkWriteError as exc:
|
|
884
|
-
raise BulkWriteError.details from exc
|
|
868
|
+
raise BulkWriteError(str(exc.details)) from exc
|
|
885
869
|
|
|
886
870
|
def copy_documents(self, db_name, collection, query, db_to_copy_to, collection_to_copy_to=None):
|
|
887
871
|
"""
|
|
@@ -931,7 +915,7 @@ class DatabaseHandler:
|
|
|
931
915
|
try:
|
|
932
916
|
_collection.insert_many(db_entries)
|
|
933
917
|
except BulkWriteError as exc:
|
|
934
|
-
raise BulkWriteError.details from exc
|
|
918
|
+
raise BulkWriteError(str(exc.details)) from exc
|
|
935
919
|
|
|
936
920
|
def delete_query(self, db_name, collection, query):
|
|
937
921
|
"""
|
|
@@ -1281,8 +1265,12 @@ class DatabaseHandler:
|
|
|
1281
1265
|
|
|
1282
1266
|
db_entry["Version"] = version
|
|
1283
1267
|
db_entry["Parameter"] = parameter
|
|
1284
|
-
|
|
1285
|
-
|
|
1268
|
+
|
|
1269
|
+
_base_value, _base_unit, _base_type = gen.get_value_unit_type(value)
|
|
1270
|
+
db_entry["Value"] = _base_value
|
|
1271
|
+
if _base_unit is not None:
|
|
1272
|
+
db_entry["units"] = _base_unit
|
|
1273
|
+
db_entry["Type"] = kwargs["Type"] if "Type" in kwargs else _base_type
|
|
1286
1274
|
|
|
1287
1275
|
files_to_add_to_db = set()
|
|
1288
1276
|
db_entry["File"] = False
|
|
@@ -1308,24 +1296,24 @@ class DatabaseHandler:
|
|
|
1308
1296
|
|
|
1309
1297
|
def _convert_version_to_tagged(self, model_version, db_name):
|
|
1310
1298
|
"""Convert to tagged version, if needed."""
|
|
1311
|
-
if model_version in ["
|
|
1299
|
+
if model_version in ["Released", "Latest"]:
|
|
1312
1300
|
return self._get_tagged_version(db_name, model_version)
|
|
1313
1301
|
|
|
1314
1302
|
return model_version
|
|
1315
1303
|
|
|
1316
1304
|
@staticmethod
|
|
1317
|
-
def _get_tagged_version(db_name, version="
|
|
1305
|
+
def _get_tagged_version(db_name, version="Released"):
|
|
1318
1306
|
"""
|
|
1319
|
-
Get the tag of the "
|
|
1320
|
-
The "
|
|
1321
|
-
the latest is the latest tag (not necessarily stable, but can be equivalent to "
|
|
1307
|
+
Get the tag of the "Released" or "Latest" version of the MC Model.
|
|
1308
|
+
The "Released" is the latest stable MC Model,
|
|
1309
|
+
the latest is the latest tag (not necessarily stable, but can be equivalent to "Released").
|
|
1322
1310
|
|
|
1323
1311
|
Parameters
|
|
1324
1312
|
----------
|
|
1325
1313
|
db_name: str
|
|
1326
1314
|
the name of the DB
|
|
1327
1315
|
version: str
|
|
1328
|
-
Can be "
|
|
1316
|
+
Can be "Released" or "Latest" (default: "Released").
|
|
1329
1317
|
|
|
1330
1318
|
Returns
|
|
1331
1319
|
-------
|
|
@@ -1335,12 +1323,12 @@ class DatabaseHandler:
|
|
|
1335
1323
|
Raises
|
|
1336
1324
|
------
|
|
1337
1325
|
ValueError
|
|
1338
|
-
if version not valid. Valid versions are: '
|
|
1326
|
+
if version not valid. Valid versions are: 'Released' and 'Latest'.
|
|
1339
1327
|
|
|
1340
1328
|
"""
|
|
1341
1329
|
|
|
1342
|
-
if version not in ["
|
|
1343
|
-
raise ValueError('The only default versions are "
|
|
1330
|
+
if version not in ["Released", "Latest"]:
|
|
1331
|
+
raise ValueError('The only default versions are "Released" or "Latest"')
|
|
1344
1332
|
|
|
1345
1333
|
collection = DatabaseHandler.db_client[db_name].metadata
|
|
1346
1334
|
query = {"Entry": "Simulation-Model-Tags"}
|
|
@@ -1385,9 +1373,9 @@ class DatabaseHandler:
|
|
|
1385
1373
|
self._logger.warning(
|
|
1386
1374
|
f"The file {kwargs['filename']} exists in the DB. Returning its ID"
|
|
1387
1375
|
)
|
|
1388
|
-
return file_system.find_one(
|
|
1376
|
+
return file_system.find_one( # pylint: disable=protected-access
|
|
1389
1377
|
{"filename": kwargs["filename"]}
|
|
1390
|
-
)._id
|
|
1378
|
+
)._id
|
|
1391
1379
|
with open(file_name, "rb") as data_file:
|
|
1392
1380
|
return file_system.put(data_file, **kwargs)
|
|
1393
1381
|
|
|
@@ -1453,3 +1441,40 @@ class DatabaseHandler:
|
|
|
1453
1441
|
self._logger.warning(f"The query {query} did not return any results. No versions found")
|
|
1454
1442
|
|
|
1455
1443
|
return _all_versions
|
|
1444
|
+
|
|
1445
|
+
def get_all_available_telescopes(
|
|
1446
|
+
self,
|
|
1447
|
+
db_name=DB_CTA_SIMULATION_MODEL,
|
|
1448
|
+
model_version="Released",
|
|
1449
|
+
):
|
|
1450
|
+
"""
|
|
1451
|
+
Get all available telescope names in the collection "telescopes" in the DB.
|
|
1452
|
+
|
|
1453
|
+
Parameters
|
|
1454
|
+
----------
|
|
1455
|
+
db_name: str
|
|
1456
|
+
the name of the DB
|
|
1457
|
+
model_version: str
|
|
1458
|
+
Which version to get the telescopes of (default: "Released").
|
|
1459
|
+
|
|
1460
|
+
Returns
|
|
1461
|
+
-------
|
|
1462
|
+
all_available_telescopes: list
|
|
1463
|
+
List of all telescope names found
|
|
1464
|
+
|
|
1465
|
+
"""
|
|
1466
|
+
|
|
1467
|
+
collection = DatabaseHandler.db_client[db_name]["telescopes"]
|
|
1468
|
+
|
|
1469
|
+
_model_version = self._convert_version_to_tagged(
|
|
1470
|
+
names.validate_model_version_name(model_version),
|
|
1471
|
+
DatabaseHandler.DB_CTA_SIMULATION_MODEL,
|
|
1472
|
+
)
|
|
1473
|
+
|
|
1474
|
+
query = {
|
|
1475
|
+
"Version": _model_version,
|
|
1476
|
+
}
|
|
1477
|
+
|
|
1478
|
+
_all_available_telescopes = collection.find(query).distinct("Telescope")
|
|
1479
|
+
|
|
1480
|
+
return _all_available_telescopes
|
|
File without changes
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import PosixPath
|
|
3
|
+
|
|
4
|
+
import astropy.units as u
|
|
5
|
+
import tables
|
|
6
|
+
from astropy.table import Table
|
|
7
|
+
from ctapipe.io import read_table
|
|
8
|
+
|
|
9
|
+
from simtools.utils.names import sanitize_name
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"fill_hdf5_table",
|
|
13
|
+
"read_hdf5",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
_logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def fill_hdf5_table(hist, x_bin_edges, y_bin_edges, x_label, y_label, meta_data):
|
|
20
|
+
"""
|
|
21
|
+
Create and fill an hdf5 table with the histogram information.
|
|
22
|
+
It works for both 1D and 2D distributions.
|
|
23
|
+
|
|
24
|
+
Parameters
|
|
25
|
+
----------
|
|
26
|
+
hist: numpy.ndarray
|
|
27
|
+
The counts of the histograms.
|
|
28
|
+
x_bin_edges: numpy.array
|
|
29
|
+
The x bin edges of the histograms.
|
|
30
|
+
y_bin_edges: numpy.array
|
|
31
|
+
The y bin edges of the histograms.
|
|
32
|
+
Use None for 1D histograms.
|
|
33
|
+
x_label: str
|
|
34
|
+
X bin edges label.
|
|
35
|
+
y_label: str
|
|
36
|
+
Y bin edges label.
|
|
37
|
+
Use None for 1D histograms.
|
|
38
|
+
meta_data: dict
|
|
39
|
+
Dictionary with the histogram metadata.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
# Complement metadata
|
|
43
|
+
if x_label is not None:
|
|
44
|
+
meta_data["x_bin_edges"] = sanitize_name(x_label)
|
|
45
|
+
meta_data["x_bin_edges_unit"] = (
|
|
46
|
+
x_bin_edges.unit if isinstance(x_bin_edges, u.Quantity) else u.dimensionless_unscaled
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if y_bin_edges is not None:
|
|
50
|
+
if y_label is not None:
|
|
51
|
+
meta_data["y_bin_edges"] = sanitize_name(y_label)
|
|
52
|
+
names = [
|
|
53
|
+
f"{meta_data['y_bin_edges'].split('__')[0]}_{i}"
|
|
54
|
+
for i in range(len(y_bin_edges[:-1]))
|
|
55
|
+
]
|
|
56
|
+
else:
|
|
57
|
+
names = [
|
|
58
|
+
f"{meta_data['Title'].split('__')[0]}_{i}" for i in range(len(y_bin_edges[:-1]))
|
|
59
|
+
]
|
|
60
|
+
meta_data["y_bin_edges_unit"] = (
|
|
61
|
+
y_bin_edges.unit if isinstance(y_bin_edges, u.Quantity) else u.dimensionless_unscaled
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
table = Table(
|
|
65
|
+
[hist[i, :] for i in range(len(y_bin_edges[:-1]))],
|
|
66
|
+
names=names,
|
|
67
|
+
meta=meta_data,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
else:
|
|
71
|
+
if x_label is not None:
|
|
72
|
+
meta_data["x_bin_edges"] = sanitize_name(x_label)
|
|
73
|
+
names = meta_data["x_bin_edges"]
|
|
74
|
+
else:
|
|
75
|
+
names = meta_data["Title"]
|
|
76
|
+
table = Table(
|
|
77
|
+
[
|
|
78
|
+
x_bin_edges[:-1],
|
|
79
|
+
hist,
|
|
80
|
+
],
|
|
81
|
+
names=(names, sanitize_name("Values")),
|
|
82
|
+
meta=meta_data,
|
|
83
|
+
)
|
|
84
|
+
return table
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def read_hdf5(hdf5_file_name):
|
|
88
|
+
"""
|
|
89
|
+
Read a hdf5 output file.
|
|
90
|
+
|
|
91
|
+
Parameters
|
|
92
|
+
----------
|
|
93
|
+
hdf5_file_name: str or Path
|
|
94
|
+
Name or Path of the hdf5 file to read from.
|
|
95
|
+
|
|
96
|
+
Returns
|
|
97
|
+
-------
|
|
98
|
+
list
|
|
99
|
+
The list with the astropy.Table instances for the various 1D and 2D histograms saved
|
|
100
|
+
in the hdf5 file.
|
|
101
|
+
"""
|
|
102
|
+
if isinstance(hdf5_file_name, PosixPath):
|
|
103
|
+
hdf5_file_name = hdf5_file_name.absolute().as_posix()
|
|
104
|
+
|
|
105
|
+
tables_list = []
|
|
106
|
+
|
|
107
|
+
with tables.open_file(hdf5_file_name, mode="r") as file:
|
|
108
|
+
for node in file.walk_nodes("/", "Table"):
|
|
109
|
+
table_path = node._v_pathname # pylint: disable=protected-access
|
|
110
|
+
table = read_table(hdf5_file_name, table_path)
|
|
111
|
+
tables_list.append(table)
|
|
112
|
+
return tables_list
|