gammasimtools 0.24.0__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/METADATA +1 -1
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/RECORD +58 -55
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/entry_points.txt +1 -0
- simtools/_version.py +2 -2
- simtools/application_control.py +50 -0
- simtools/applications/derive_psf_parameters.py +5 -0
- simtools/applications/derive_pulse_shape_parameters.py +195 -0
- simtools/applications/plot_array_layout.py +63 -1
- simtools/applications/simulate_flasher.py +3 -2
- simtools/applications/simulate_pedestals.py +1 -1
- simtools/applications/simulate_prod.py +8 -23
- simtools/applications/simulate_prod_htcondor_generator.py +7 -0
- simtools/applications/submit_array_layouts.py +5 -3
- simtools/applications/validate_file_using_schema.py +49 -123
- simtools/configuration/commandline_parser.py +8 -6
- simtools/corsika/corsika_config.py +197 -87
- simtools/data_model/model_data_writer.py +14 -2
- simtools/data_model/schema.py +112 -5
- simtools/data_model/validate_data.py +82 -48
- simtools/db/db_model_upload.py +2 -1
- simtools/db/mongo_db.py +133 -42
- simtools/dependencies.py +5 -9
- simtools/io/eventio_handler.py +128 -0
- simtools/job_execution/htcondor_script_generator.py +0 -2
- simtools/layout/array_layout_utils.py +1 -1
- simtools/model/array_model.py +36 -5
- simtools/model/model_parameter.py +0 -1
- simtools/model/model_repository.py +18 -5
- simtools/ray_tracing/psf_analysis.py +11 -8
- simtools/ray_tracing/psf_parameter_optimisation.py +822 -679
- simtools/reporting/docs_read_parameters.py +69 -9
- simtools/runners/corsika_runner.py +12 -3
- simtools/runners/corsika_simtel_runner.py +6 -0
- simtools/runners/runner_services.py +17 -7
- simtools/runners/simtel_runner.py +12 -54
- simtools/schemas/model_parameters/flasher_pulse_exp_decay.schema.yml +2 -0
- simtools/schemas/model_parameters/flasher_pulse_shape.schema.yml +50 -0
- simtools/schemas/model_parameters/flasher_pulse_width.schema.yml +2 -0
- simtools/schemas/simulation_models_info.schema.yml +2 -0
- simtools/simtel/pulse_shapes.py +268 -0
- simtools/simtel/simtel_config_writer.py +82 -1
- simtools/simtel/simtel_io_event_writer.py +2 -2
- simtools/simtel/simulator_array.py +58 -12
- simtools/simtel/simulator_light_emission.py +45 -8
- simtools/simulator.py +361 -347
- simtools/testing/assertions.py +62 -6
- simtools/testing/configuration.py +1 -1
- simtools/testing/log_inspector.py +4 -1
- simtools/testing/sim_telarray_metadata.py +1 -1
- simtools/testing/validate_output.py +44 -9
- simtools/utils/names.py +2 -4
- simtools/version.py +37 -0
- simtools/visualization/legend_handlers.py +14 -4
- simtools/visualization/plot_array_layout.py +229 -33
- simtools/visualization/plot_mirrors.py +837 -0
- simtools/simtel/simtel_io_file_info.py +0 -62
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/WHEEL +0 -0
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/licenses/LICENSE +0 -0
- {gammasimtools-0.24.0.dist-info → gammasimtools-0.25.0.dist-info}/top_level.txt +0 -0
|
@@ -14,6 +14,7 @@ import simtools.utils.general as gen
|
|
|
14
14
|
from simtools.data_model import schema
|
|
15
15
|
from simtools.io import ascii_handler
|
|
16
16
|
from simtools.utils import names, value_conversion
|
|
17
|
+
from simtools.version import is_valid_semantic_version
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class DataValidator:
|
|
@@ -46,7 +47,7 @@ class DataValidator:
|
|
|
46
47
|
check_exact_data_type=True,
|
|
47
48
|
):
|
|
48
49
|
"""Initialize validation class and read required reference data columns."""
|
|
49
|
-
self.
|
|
50
|
+
self.logger = logging.getLogger(__name__)
|
|
50
51
|
|
|
51
52
|
self.data_file_name = data_file
|
|
52
53
|
self.schema_file_name = schema_file
|
|
@@ -83,7 +84,7 @@ class DataValidator:
|
|
|
83
84
|
return self._validate_data_dict(is_model_parameter, lists_as_strings)
|
|
84
85
|
if isinstance(self.data_table, Table):
|
|
85
86
|
return self._validate_data_table()
|
|
86
|
-
self.
|
|
87
|
+
self.logger.error("No data or data table to validate")
|
|
87
88
|
raise TypeError
|
|
88
89
|
|
|
89
90
|
def validate_data_file(self, is_model_parameter=None):
|
|
@@ -100,10 +101,10 @@ class DataValidator:
|
|
|
100
101
|
try:
|
|
101
102
|
if Path(self.data_file_name).suffix in (".yml", ".yaml", ".json"):
|
|
102
103
|
self.data_dict = ascii_handler.collect_data_from_file(self.data_file_name)
|
|
103
|
-
self.
|
|
104
|
+
self.logger.info(f"Validating data from: {self.data_file_name}")
|
|
104
105
|
else:
|
|
105
106
|
self.data_table = Table.read(self.data_file_name, guess=True, delimiter=r"\s")
|
|
106
|
-
self.
|
|
107
|
+
self.logger.info(f"Validating tabled data from: {self.data_file_name}")
|
|
107
108
|
except (AttributeError, TypeError):
|
|
108
109
|
pass
|
|
109
110
|
if is_model_parameter:
|
|
@@ -129,7 +130,7 @@ class DataValidator:
|
|
|
129
130
|
raise ValueError(f"Mismatch: version '{param_version}' vs. file '{file_stem}'.")
|
|
130
131
|
|
|
131
132
|
if param_version is None:
|
|
132
|
-
self.
|
|
133
|
+
self.logger.warning(f"File '{file_stem}' has no parameter version defined.")
|
|
133
134
|
|
|
134
135
|
@staticmethod
|
|
135
136
|
def validate_model_parameter(par_dict):
|
|
@@ -153,6 +154,46 @@ class DataValidator:
|
|
|
153
154
|
)
|
|
154
155
|
return data_validator.validate_and_transform(is_model_parameter=True)
|
|
155
156
|
|
|
157
|
+
@staticmethod
|
|
158
|
+
def validate_data_files(
|
|
159
|
+
file_directory=None,
|
|
160
|
+
file_name=None,
|
|
161
|
+
is_model_parameter=True,
|
|
162
|
+
check_exact_data_type=False,
|
|
163
|
+
schema_file=None,
|
|
164
|
+
):
|
|
165
|
+
"""
|
|
166
|
+
Validate data or model parameters in files in a directory or a single file.
|
|
167
|
+
|
|
168
|
+
Parameters
|
|
169
|
+
----------
|
|
170
|
+
file_directory: str or Path
|
|
171
|
+
Directory with files to be validated.
|
|
172
|
+
file_name: str or Path
|
|
173
|
+
Name of the file to be validated.
|
|
174
|
+
is_model_parameter: bool
|
|
175
|
+
This is a model parameter (add some data preparation).
|
|
176
|
+
check_exact_data_type: bool
|
|
177
|
+
Require exact data type for validation.
|
|
178
|
+
"""
|
|
179
|
+
if file_directory:
|
|
180
|
+
file_list = sorted(Path(file_directory).rglob("*.json"))
|
|
181
|
+
elif file_name:
|
|
182
|
+
file_list = [Path(file_name)]
|
|
183
|
+
else:
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
for data_file in file_list:
|
|
187
|
+
parameter_name = re.sub(r"-\d+\.\d+\.\d+", "", data_file.stem)
|
|
188
|
+
schema_path = schema_file or schema.get_model_parameter_schema_file(f"{parameter_name}")
|
|
189
|
+
data_validator = DataValidator(
|
|
190
|
+
schema_file=schema_path,
|
|
191
|
+
data_file=data_file,
|
|
192
|
+
check_exact_data_type=check_exact_data_type,
|
|
193
|
+
)
|
|
194
|
+
data_validator.validate_and_transform(is_model_parameter)
|
|
195
|
+
data_validator.logger.info(f"Validated data file {data_file} with schema {schema_path}")
|
|
196
|
+
|
|
156
197
|
def _validate_data_dict(self, is_model_parameter=False, lists_as_strings=False):
|
|
157
198
|
"""
|
|
158
199
|
Validate values in a dictionary.
|
|
@@ -208,8 +249,10 @@ class DataValidator:
|
|
|
208
249
|
self.data_dict.get("instrument"), self.data_dict.get("site")
|
|
209
250
|
)
|
|
210
251
|
|
|
211
|
-
for
|
|
212
|
-
self.
|
|
252
|
+
for version_type in ("version", "parameter_version", "model_version"):
|
|
253
|
+
version_string = self.data_dict.get(version_type, "0.0.0")
|
|
254
|
+
if not is_valid_semantic_version(version_string):
|
|
255
|
+
raise ValueError(f"Invalid version string '{version_string}'")
|
|
213
256
|
|
|
214
257
|
if lists_as_strings:
|
|
215
258
|
self._convert_results_to_model_format()
|
|
@@ -278,7 +321,7 @@ class DataValidator:
|
|
|
278
321
|
"table_columns", None
|
|
279
322
|
)
|
|
280
323
|
except IndexError:
|
|
281
|
-
self.
|
|
324
|
+
self.logger.error(f"Error reading validation schema from {self.schema_file_name}")
|
|
282
325
|
raise
|
|
283
326
|
|
|
284
327
|
if self._data_description is not None:
|
|
@@ -327,7 +370,7 @@ class DataValidator:
|
|
|
327
370
|
for entry in self._data_description:
|
|
328
371
|
if entry.get("required", False):
|
|
329
372
|
if entry["name"] in self.data_table.columns:
|
|
330
|
-
self.
|
|
373
|
+
self.logger.debug(f"Found required data column {entry['name']}")
|
|
331
374
|
else:
|
|
332
375
|
raise KeyError(f"Missing required column {entry['name']}")
|
|
333
376
|
|
|
@@ -353,18 +396,18 @@ class DataValidator:
|
|
|
353
396
|
_columns_by_which_to_reverse_sort.append(entry["name"])
|
|
354
397
|
|
|
355
398
|
if len(_columns_by_which_to_sort) > 0:
|
|
356
|
-
self.
|
|
399
|
+
self.logger.debug(f"Sorting data columns: {_columns_by_which_to_sort}")
|
|
357
400
|
try:
|
|
358
401
|
self.data_table.sort(_columns_by_which_to_sort)
|
|
359
402
|
except AttributeError:
|
|
360
|
-
self.
|
|
403
|
+
self.logger.error("No data table defined for sorting")
|
|
361
404
|
raise
|
|
362
405
|
elif len(_columns_by_which_to_reverse_sort) > 0:
|
|
363
|
-
self.
|
|
406
|
+
self.logger.debug(f"Reverse sorting data columns: {_columns_by_which_to_reverse_sort}")
|
|
364
407
|
try:
|
|
365
408
|
self.data_table.sort(_columns_by_which_to_reverse_sort, reverse=True)
|
|
366
409
|
except AttributeError:
|
|
367
|
-
self.
|
|
410
|
+
self.logger.error("No data table defined for reverse sorting")
|
|
368
411
|
raise
|
|
369
412
|
|
|
370
413
|
def _check_data_for_duplicates(self):
|
|
@@ -379,7 +422,7 @@ class DataValidator:
|
|
|
379
422
|
"""
|
|
380
423
|
_column_with_unique_requirement = self._get_unique_column_requirement()
|
|
381
424
|
if len(_column_with_unique_requirement) == 0:
|
|
382
|
-
self.
|
|
425
|
+
self.logger.debug("No data columns with unique value requirement")
|
|
383
426
|
return
|
|
384
427
|
_data_table_unique_for_key_column = unique(
|
|
385
428
|
self.data_table, keys=_column_with_unique_requirement
|
|
@@ -412,10 +455,10 @@ class DataValidator:
|
|
|
412
455
|
|
|
413
456
|
for entry in self._data_description:
|
|
414
457
|
if "input_processing" in entry and "remove_duplicates" in entry["input_processing"]:
|
|
415
|
-
self.
|
|
458
|
+
self.logger.debug(f"Removing duplicates for column {entry['name']}")
|
|
416
459
|
_unique_required_column.append(entry["name"])
|
|
417
460
|
|
|
418
|
-
self.
|
|
461
|
+
self.logger.debug(f"Unique required columns: {_unique_required_column}")
|
|
419
462
|
return _unique_required_column
|
|
420
463
|
|
|
421
464
|
def _get_reference_unit(self, column_name):
|
|
@@ -470,7 +513,7 @@ class DataValidator:
|
|
|
470
513
|
dtype=dtype,
|
|
471
514
|
allow_subtypes=(not self.check_exact_data_type),
|
|
472
515
|
):
|
|
473
|
-
self.
|
|
516
|
+
self.logger.error(
|
|
474
517
|
f"Invalid data type in column '{column_name}'. "
|
|
475
518
|
f"Expected type '{reference_dtype}', found '{dtype}' "
|
|
476
519
|
f"(exact type: {self.check_exact_data_type})"
|
|
@@ -505,9 +548,9 @@ class DataValidator:
|
|
|
505
548
|
data = np.array(data)
|
|
506
549
|
|
|
507
550
|
if np.isnan(data).any():
|
|
508
|
-
self.
|
|
551
|
+
self.logger.info(f"Column {col_name} contains NaN.")
|
|
509
552
|
if np.isinf(data).any():
|
|
510
|
-
self.
|
|
553
|
+
self.logger.info(f"Column {col_name} contains infinite value.")
|
|
511
554
|
|
|
512
555
|
entry = self._get_data_description(col_name)
|
|
513
556
|
if "allow_nan" in entry.get("input_processing", {}):
|
|
@@ -593,7 +636,7 @@ class DataValidator:
|
|
|
593
636
|
# ensure that the data type is preserved (e.g., integers)
|
|
594
637
|
return (type(data)(u.Unit(column_unit).to(reference_unit) * data), reference_unit)
|
|
595
638
|
except (u.core.UnitConversionError, ValueError) as exc:
|
|
596
|
-
self.
|
|
639
|
+
self.logger.error(
|
|
597
640
|
f"Invalid unit in data column '{col_name}'. "
|
|
598
641
|
f"Expected type '{reference_unit}', found '{column_unit}'"
|
|
599
642
|
)
|
|
@@ -696,9 +739,9 @@ class DataValidator:
|
|
|
696
739
|
try:
|
|
697
740
|
col_index = int(col_name)
|
|
698
741
|
if col_index < max_logs:
|
|
699
|
-
self.
|
|
742
|
+
self.logger.debug(message)
|
|
700
743
|
except (ValueError, TypeError):
|
|
701
|
-
self.
|
|
744
|
+
self.logger.debug(message)
|
|
702
745
|
|
|
703
746
|
@staticmethod
|
|
704
747
|
def _interval_check(data, axis_range, range_type):
|
|
@@ -817,7 +860,7 @@ class DataValidator:
|
|
|
817
860
|
except IndexError as exc:
|
|
818
861
|
if len(self._data_description) == 1: # all columns are described by the same schema
|
|
819
862
|
return self._data_description[0]
|
|
820
|
-
self.
|
|
863
|
+
self.logger.error(
|
|
821
864
|
f"Data column '{column_name}' not found in reference column definition"
|
|
822
865
|
)
|
|
823
866
|
raise exc
|
|
@@ -835,7 +878,7 @@ class DataValidator:
|
|
|
835
878
|
try:
|
|
836
879
|
return _entry[_index]
|
|
837
880
|
except IndexError:
|
|
838
|
-
self.
|
|
881
|
+
self.logger.error(
|
|
839
882
|
f"Data column '{column_name}' not found in reference column definition"
|
|
840
883
|
)
|
|
841
884
|
raise
|
|
@@ -868,42 +911,33 @@ class DataValidator:
|
|
|
868
911
|
if isinstance(self.data_dict["unit"], list):
|
|
869
912
|
self.data_dict["unit"] = gen.convert_list_to_string(self.data_dict["unit"])
|
|
870
913
|
|
|
871
|
-
def _check_version_string(self, version):
|
|
872
|
-
"""
|
|
873
|
-
Check that version string follows semantic versioning.
|
|
874
|
-
|
|
875
|
-
Parameters
|
|
876
|
-
----------
|
|
877
|
-
version: str
|
|
878
|
-
version string
|
|
879
|
-
|
|
880
|
-
Raises
|
|
881
|
-
------
|
|
882
|
-
ValueError
|
|
883
|
-
if version string does not follow semantic versioning
|
|
884
|
-
|
|
885
|
-
"""
|
|
886
|
-
if version is None:
|
|
887
|
-
return
|
|
888
|
-
semver_regex = r"^\d+\.\d+\.\d+(-[0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$"
|
|
889
|
-
if not re.match(semver_regex, version):
|
|
890
|
-
raise ValueError(f"Invalid version string '{version}'")
|
|
891
|
-
self._logger.debug(f"Valid version string '{version}'")
|
|
892
|
-
|
|
893
914
|
def _check_site_and_array_element_consistency(self, instrument, site):
|
|
894
915
|
"""
|
|
895
916
|
Check that site and array element names are consistent.
|
|
896
917
|
|
|
897
918
|
An example for an inconsistency is 'LSTN' at site 'South'
|
|
898
919
|
"""
|
|
899
|
-
if not
|
|
920
|
+
if not (instrument and site):
|
|
921
|
+
return
|
|
922
|
+
|
|
923
|
+
instruments = [instrument] if isinstance(instrument, str) else instrument
|
|
924
|
+
if any(inst.startswith("OBS") for inst in instruments):
|
|
900
925
|
return
|
|
901
926
|
|
|
902
927
|
def to_sorted_list(value):
|
|
903
928
|
"""Return value as sorted list."""
|
|
904
929
|
return [value] if isinstance(value, str) else sorted(value)
|
|
905
930
|
|
|
906
|
-
|
|
931
|
+
instrument_sites = []
|
|
932
|
+
for inst in instruments:
|
|
933
|
+
instrument_sites.append(names.get_site_from_array_element_name(inst))
|
|
934
|
+
# names.get_site_from_array_element_name might return a list
|
|
935
|
+
flat_sites = [
|
|
936
|
+
s
|
|
937
|
+
for sublist in instrument_sites
|
|
938
|
+
for s in (sublist if isinstance(sublist, list) else [sublist])
|
|
939
|
+
]
|
|
940
|
+
instrument_site = to_sorted_list(set(flat_sites))
|
|
907
941
|
site = to_sorted_list(site)
|
|
908
942
|
|
|
909
943
|
if instrument_site != site:
|
simtools/db/db_model_upload.py
CHANGED
|
@@ -186,7 +186,8 @@ def _read_production_tables(model_path):
|
|
|
186
186
|
models = [model_path.name]
|
|
187
187
|
if (model_path / "info.yml").exists():
|
|
188
188
|
info = ascii_handler.collect_data_from_file(file_name=model_path / "info.yml")
|
|
189
|
-
|
|
189
|
+
if info.get("model_update") == "patch_update":
|
|
190
|
+
models.extend(info.get("model_version_history", []))
|
|
190
191
|
# sort oldest --> newest
|
|
191
192
|
models = sorted(set(models), key=Version, reverse=False)
|
|
192
193
|
for model in models:
|
simtools/db/mongo_db.py
CHANGED
|
@@ -10,7 +10,7 @@ import gridfs
|
|
|
10
10
|
import jsonschema
|
|
11
11
|
from astropy.table import Table
|
|
12
12
|
from bson.objectid import ObjectId
|
|
13
|
-
from pymongo import MongoClient
|
|
13
|
+
from pymongo import MongoClient, monitoring
|
|
14
14
|
|
|
15
15
|
from simtools.io import ascii_handler
|
|
16
16
|
|
|
@@ -57,6 +57,61 @@ jsonschema_db_dict = {
|
|
|
57
57
|
}
|
|
58
58
|
|
|
59
59
|
|
|
60
|
+
class IdleConnectionMonitor(monitoring.ConnectionPoolListener):
|
|
61
|
+
"""
|
|
62
|
+
A listener to track MongoDB connection pool activity.
|
|
63
|
+
|
|
64
|
+
Used to monitor idle connections and log connection events.
|
|
65
|
+
Switched on in debug mode.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(self):
|
|
69
|
+
self._logger = logging.getLogger("IdleConnectionMonitor")
|
|
70
|
+
self.open_connections = 0
|
|
71
|
+
|
|
72
|
+
def connection_created(self, event):
|
|
73
|
+
"""Handle connection creation event."""
|
|
74
|
+
self.open_connections += 1
|
|
75
|
+
self._logger.debug(
|
|
76
|
+
f"MongoDB connection Created: {event.address}. Total in Pool: {self.open_connections}"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
def connection_closed(self, event):
|
|
80
|
+
"""Handle connection closure event."""
|
|
81
|
+
self.open_connections -= 1
|
|
82
|
+
self._logger.debug(
|
|
83
|
+
f"MongoDB connection Closed: {event.address}. Reason: {event.reason}. "
|
|
84
|
+
f"Total in Pool: {self.open_connections}"
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
def connection_check_out_started(self, event):
|
|
88
|
+
"""Handle connection check out started event."""
|
|
89
|
+
|
|
90
|
+
def connection_check_out_failed(self, event):
|
|
91
|
+
"""Handle connection check out failure event."""
|
|
92
|
+
|
|
93
|
+
def connection_checked_out(self, event):
|
|
94
|
+
"""Handle connection checked out event."""
|
|
95
|
+
|
|
96
|
+
def connection_checked_in(self, event):
|
|
97
|
+
"""Handle connection checked in event."""
|
|
98
|
+
|
|
99
|
+
def connection_ready(self, event):
|
|
100
|
+
"""Handle connection ready event."""
|
|
101
|
+
|
|
102
|
+
def pool_created(self, event):
|
|
103
|
+
"""Handle connection pool creation event."""
|
|
104
|
+
|
|
105
|
+
def pool_ready(self, event):
|
|
106
|
+
"""Handle connection pool ready event."""
|
|
107
|
+
|
|
108
|
+
def pool_cleared(self, event):
|
|
109
|
+
"""Handle connection pool cleared event."""
|
|
110
|
+
|
|
111
|
+
def pool_closed(self, event):
|
|
112
|
+
"""Handle connection pool closure event."""
|
|
113
|
+
|
|
114
|
+
|
|
60
115
|
class MongoDBHandler: # pylint: disable=unsubscriptable-object
|
|
61
116
|
"""
|
|
62
117
|
MongoDBHandler provides low-level interface to MongoDB operations.
|
|
@@ -70,19 +125,90 @@ class MongoDBHandler: # pylint: disable=unsubscriptable-object
|
|
|
70
125
|
Dictionary with the MongoDB configuration (see jsonschema_db_dict for details).
|
|
71
126
|
"""
|
|
72
127
|
|
|
73
|
-
db_client: MongoClient
|
|
128
|
+
db_client: MongoClient = None
|
|
74
129
|
_lock = Lock()
|
|
130
|
+
_logger = logging.getLogger(__name__)
|
|
75
131
|
|
|
76
132
|
def __init__(self, db_config=None):
|
|
77
133
|
"""Initialize the MongoDBHandler class."""
|
|
78
|
-
self._logger = logging.getLogger(__name__)
|
|
79
134
|
self.db_config = MongoDBHandler.validate_db_config(db_config)
|
|
80
135
|
self.list_of_collections = {}
|
|
81
136
|
|
|
82
|
-
if self.db_config
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
137
|
+
if self.db_config:
|
|
138
|
+
self._initialize_client(self.db_config)
|
|
139
|
+
|
|
140
|
+
@classmethod
|
|
141
|
+
def _initialize_client(cls, db_config):
|
|
142
|
+
"""
|
|
143
|
+
Initialize the MongoDB client in a thread-safe manner.
|
|
144
|
+
|
|
145
|
+
Only initializes if it hasn't been done yet. Uses double-checked locking
|
|
146
|
+
to ensure thread safety.
|
|
147
|
+
|
|
148
|
+
Parameters
|
|
149
|
+
----------
|
|
150
|
+
db_config: dict
|
|
151
|
+
Dictionary with the MongoDB configuration.
|
|
152
|
+
"""
|
|
153
|
+
if cls.db_client is not None:
|
|
154
|
+
return
|
|
155
|
+
with cls._lock:
|
|
156
|
+
if cls.db_client is None:
|
|
157
|
+
try:
|
|
158
|
+
uri = cls._build_uri(db_config)
|
|
159
|
+
client_kwargs = {"maxIdleTimeMS": 10000}
|
|
160
|
+
|
|
161
|
+
if cls._logger.isEnabledFor(logging.DEBUG):
|
|
162
|
+
client_kwargs["event_listeners"] = [IdleConnectionMonitor()]
|
|
163
|
+
|
|
164
|
+
cls.db_client = MongoClient(uri, **client_kwargs)
|
|
165
|
+
cls._logger.debug("MongoDB client initialized successfully.")
|
|
166
|
+
except Exception as e:
|
|
167
|
+
cls._logger.error(f"Failed to initialize MongoDB client: {e}")
|
|
168
|
+
raise
|
|
169
|
+
|
|
170
|
+
@staticmethod
|
|
171
|
+
def _build_uri(db_config):
|
|
172
|
+
"""
|
|
173
|
+
Build MongoDB URI from configuration.
|
|
174
|
+
|
|
175
|
+
Parameters
|
|
176
|
+
----------
|
|
177
|
+
db_config: dict
|
|
178
|
+
Dictionary with the MongoDB configuration.
|
|
179
|
+
|
|
180
|
+
Returns
|
|
181
|
+
-------
|
|
182
|
+
str
|
|
183
|
+
MongoDB connection URI.
|
|
184
|
+
"""
|
|
185
|
+
direct_connection = db_config["db_server"] in (
|
|
186
|
+
"localhost",
|
|
187
|
+
"simtools-mongodb",
|
|
188
|
+
"mongodb",
|
|
189
|
+
)
|
|
190
|
+
auth_source = (
|
|
191
|
+
db_config.get("db_api_authentication_database")
|
|
192
|
+
if db_config.get("db_api_authentication_database")
|
|
193
|
+
else "admin"
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
username = db_config["db_api_user"]
|
|
197
|
+
password = db_config["db_api_pw"]
|
|
198
|
+
server = db_config["db_server"]
|
|
199
|
+
port = db_config["db_api_port"]
|
|
200
|
+
|
|
201
|
+
uri_base = f"mongodb://{username}:{password}@{server}:{port}/"
|
|
202
|
+
params = [f"authSource={auth_source}"]
|
|
203
|
+
|
|
204
|
+
if direct_connection:
|
|
205
|
+
params.append("directConnection=true")
|
|
206
|
+
else:
|
|
207
|
+
params.append("ssl=true")
|
|
208
|
+
params.append("tlsAllowInvalidHostnames=true")
|
|
209
|
+
params.append("tlsAllowInvalidCertificates=true")
|
|
210
|
+
|
|
211
|
+
return f"{uri_base}?{'&'.join(params)}"
|
|
86
212
|
|
|
87
213
|
@staticmethod
|
|
88
214
|
def validate_db_config(db_config):
|
|
@@ -112,41 +238,6 @@ class MongoDBHandler: # pylint: disable=unsubscriptable-object
|
|
|
112
238
|
except jsonschema.exceptions.ValidationError as err:
|
|
113
239
|
raise ValueError("Invalid MongoDB configuration") from err
|
|
114
240
|
|
|
115
|
-
def _open_db(self):
|
|
116
|
-
"""
|
|
117
|
-
Open a connection to MongoDB and return the client.
|
|
118
|
-
|
|
119
|
-
Returns
|
|
120
|
-
-------
|
|
121
|
-
MongoClient
|
|
122
|
-
A PyMongo DB client
|
|
123
|
-
|
|
124
|
-
Raises
|
|
125
|
-
------
|
|
126
|
-
KeyError
|
|
127
|
-
If the DB configuration is invalid
|
|
128
|
-
"""
|
|
129
|
-
direct_connection = self.db_config["db_server"] in (
|
|
130
|
-
"localhost",
|
|
131
|
-
"simtools-mongodb",
|
|
132
|
-
"mongodb",
|
|
133
|
-
)
|
|
134
|
-
return MongoClient(
|
|
135
|
-
self.db_config["db_server"],
|
|
136
|
-
port=self.db_config["db_api_port"],
|
|
137
|
-
username=self.db_config["db_api_user"],
|
|
138
|
-
password=self.db_config["db_api_pw"],
|
|
139
|
-
authSource=(
|
|
140
|
-
self.db_config.get("db_api_authentication_database")
|
|
141
|
-
if self.db_config.get("db_api_authentication_database")
|
|
142
|
-
else "admin"
|
|
143
|
-
),
|
|
144
|
-
directConnection=direct_connection,
|
|
145
|
-
ssl=not direct_connection,
|
|
146
|
-
tlsallowinvalidhostnames=True,
|
|
147
|
-
tlsallowinvalidcertificates=True,
|
|
148
|
-
)
|
|
149
|
-
|
|
150
241
|
@staticmethod
|
|
151
242
|
def get_db_name(db_name=None, db_simulation_model_version=None, model_name=None):
|
|
152
243
|
"""
|
simtools/dependencies.py
CHANGED
|
@@ -151,12 +151,11 @@ def get_corsika_version(run_time=None):
|
|
|
151
151
|
str
|
|
152
152
|
Version of the CORSIKA package.
|
|
153
153
|
"""
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
if sim_telarray_path is None:
|
|
154
|
+
corsika_path = os.getenv("SIMTOOLS_SIMTEL_PATH")
|
|
155
|
+
if corsika_path is None:
|
|
157
156
|
_logger.warning("Environment variable SIMTOOLS_SIMTEL_PATH is not set.")
|
|
158
157
|
return None
|
|
159
|
-
corsika_command = Path(
|
|
158
|
+
corsika_command = Path(corsika_path) / "corsika-run" / "corsika"
|
|
160
159
|
|
|
161
160
|
if run_time is None:
|
|
162
161
|
command = [str(corsika_command)]
|
|
@@ -173,11 +172,8 @@ def get_corsika_version(run_time=None):
|
|
|
173
172
|
text=True,
|
|
174
173
|
)
|
|
175
174
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
line = process.stdout.readline()
|
|
179
|
-
if not line:
|
|
180
|
-
break
|
|
175
|
+
version = None
|
|
176
|
+
for line in process.stdout:
|
|
181
177
|
# Extract the version from the line "NUMBER OF VERSION : 7.7550"
|
|
182
178
|
if "NUMBER OF VERSION" in line:
|
|
183
179
|
version = line.split(":")[1].strip()
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
#!/usr/bin/python3
|
|
2
|
+
"""Read file info and run headers from eventio (CORSIKA IACT, sim_telarray) files."""
|
|
3
|
+
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
from eventio import EventIOFile, iact
|
|
7
|
+
from eventio.simtel import MCRunHeader, MCShower, RunHeader
|
|
8
|
+
|
|
9
|
+
# Suppress all UserWarnings from corsikaio - no CORSIKA versions <7.7 are supported anyway
|
|
10
|
+
warnings.filterwarnings("ignore", category=UserWarning, module=r"corsikaio\.subblocks\..*")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_corsika_run_number(file):
|
|
14
|
+
"""
|
|
15
|
+
Return the CORSIKA run number from an eventio (CORSIKA IACT or sim_telarray) file.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
file: str
|
|
20
|
+
Path to the eventio file.
|
|
21
|
+
|
|
22
|
+
Returns
|
|
23
|
+
-------
|
|
24
|
+
int, None
|
|
25
|
+
CORSIKA run number. Returns None if not found.
|
|
26
|
+
"""
|
|
27
|
+
run_header = get_combined_corsika_run_header(file)
|
|
28
|
+
if run_header and "run" in run_header:
|
|
29
|
+
return run_header["run"]
|
|
30
|
+
run_header, _ = get_corsika_run_and_event_headers(file)
|
|
31
|
+
try:
|
|
32
|
+
return int(run_header["run_number"])
|
|
33
|
+
except (TypeError, KeyError, ValueError):
|
|
34
|
+
return None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def get_combined_corsika_run_header(sim_telarray_file):
|
|
38
|
+
"""
|
|
39
|
+
Return the CORSIKA run header information from an sim_telarray file.
|
|
40
|
+
|
|
41
|
+
Reads both RunHeader and MCRunHeader object from file and returns a merged dictionary.
|
|
42
|
+
Adds primary id from the first event.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
sim_telarray_file: str
|
|
47
|
+
Path to the sim_telarray file.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
dict, None
|
|
52
|
+
CORSIKA run header. Returns None if not found.
|
|
53
|
+
"""
|
|
54
|
+
run_header = mc_run_header = None
|
|
55
|
+
primary_id = None
|
|
56
|
+
|
|
57
|
+
with EventIOFile(sim_telarray_file) as f:
|
|
58
|
+
for o in f:
|
|
59
|
+
if isinstance(o, RunHeader) and run_header is None:
|
|
60
|
+
run_header = o.parse()
|
|
61
|
+
elif isinstance(o, MCRunHeader) and mc_run_header is None:
|
|
62
|
+
mc_run_header = o.parse()
|
|
63
|
+
elif isinstance(o, MCShower): # get primary_id from first MCShower
|
|
64
|
+
primary_id = o.parse().get("primary_id")
|
|
65
|
+
if run_header and mc_run_header and primary_id is not None:
|
|
66
|
+
break
|
|
67
|
+
|
|
68
|
+
run_header = run_header or {}
|
|
69
|
+
mc_run_header = mc_run_header or {}
|
|
70
|
+
if primary_id is not None:
|
|
71
|
+
mc_run_header["primary_id"] = primary_id
|
|
72
|
+
return run_header | mc_run_header or None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_corsika_run_and_event_headers(corsika_iact_file):
|
|
76
|
+
"""
|
|
77
|
+
Return the CORSIKA run and event headers from a CORSIKA IACT eventio file.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
corsika_iact_file: str, Path
|
|
82
|
+
Path to the CORSIKA IACT eventio file.
|
|
83
|
+
|
|
84
|
+
Returns
|
|
85
|
+
-------
|
|
86
|
+
tuple
|
|
87
|
+
CORSIKA run header and event header as dictionaries.
|
|
88
|
+
"""
|
|
89
|
+
run_header = event_header = None
|
|
90
|
+
|
|
91
|
+
with EventIOFile(corsika_iact_file) as f:
|
|
92
|
+
for o in f:
|
|
93
|
+
if isinstance(o, iact.RunHeader) and run_header is None:
|
|
94
|
+
run_header = o.parse()
|
|
95
|
+
elif isinstance(o, iact.EventHeader) and event_header is None:
|
|
96
|
+
event_header = o.parse()
|
|
97
|
+
if run_header and event_header:
|
|
98
|
+
break
|
|
99
|
+
|
|
100
|
+
return run_header, event_header
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_simulated_events(event_io_file):
|
|
104
|
+
"""
|
|
105
|
+
Return the number of shower and MC events from a simulation (eventio) file.
|
|
106
|
+
|
|
107
|
+
For a sim_telarray file, the number of simulated showers and MC events is
|
|
108
|
+
determined by counting the number of MCShower (type id 2020) and MCEvent
|
|
109
|
+
objects (type id 2021). For a CORSIKA IACT file, the number of simulated
|
|
110
|
+
showers is determined by counting the number of IACTShower (type id 1202).
|
|
111
|
+
|
|
112
|
+
Parameters
|
|
113
|
+
----------
|
|
114
|
+
event_io_file: str, Path
|
|
115
|
+
Path to the eventio file.
|
|
116
|
+
|
|
117
|
+
Returns
|
|
118
|
+
-------
|
|
119
|
+
tuple
|
|
120
|
+
Number of showers and number of MC events (MC events for sim_telarray files only).
|
|
121
|
+
"""
|
|
122
|
+
counts = {1202: 0, 2020: 0, 2021: 0}
|
|
123
|
+
with EventIOFile(event_io_file) as f:
|
|
124
|
+
for o in f:
|
|
125
|
+
t = o.header.type
|
|
126
|
+
if t in counts:
|
|
127
|
+
counts[t] += 1
|
|
128
|
+
return counts[2020] if counts[2020] else counts[1202], counts[2021]
|
|
@@ -136,8 +136,6 @@ simtools-simulate-prod \\
|
|
|
136
136
|
--view_cone {view_cone_string} \\
|
|
137
137
|
--run_number $((process_id)) \\
|
|
138
138
|
--run_number_offset {run_number_offset} \\
|
|
139
|
-
--number_of_runs 1 \\
|
|
140
|
-
--data_directory /tmp/simtools-data \\
|
|
141
139
|
--output_path /tmp/simtools-output \\
|
|
142
140
|
--log_level {args_dict["log_level"]} \\
|
|
143
141
|
--pack_for_grid_register simtools-output
|
|
@@ -162,7 +162,7 @@ def write_array_layouts(array_layouts, args_dict, db_config):
|
|
|
162
162
|
ModelDataWriter.dump_model_parameter(
|
|
163
163
|
parameter_name="array_layouts",
|
|
164
164
|
value=array_layouts["value"],
|
|
165
|
-
instrument=site,
|
|
165
|
+
instrument=f"OBS-{site}",
|
|
166
166
|
parameter_version=args_dict.get("updated_parameter_version"),
|
|
167
167
|
output_file=output_file,
|
|
168
168
|
db_config=db_config,
|