gammasimtools 0.8.2__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/METADATA +4 -4
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/RECORD +119 -105
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/WHEEL +1 -1
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/entry_points.txt +4 -1
- simtools/_version.py +2 -2
- simtools/applications/calculate_trigger_rate.py +15 -38
- simtools/applications/convert_all_model_parameters_from_simtel.py +9 -28
- simtools/applications/convert_geo_coordinates_of_array_elements.py +54 -53
- simtools/applications/convert_model_parameter_from_simtel.py +2 -2
- simtools/applications/db_add_file_to_db.py +1 -2
- simtools/applications/db_add_simulation_model_from_repository_to_db.py +110 -0
- simtools/applications/db_add_value_from_json_to_db.py +2 -11
- simtools/applications/db_development_tools/write_array_elements_positions_to_repository.py +6 -6
- simtools/applications/db_get_array_layouts_from_db.py +3 -1
- simtools/applications/db_get_file_from_db.py +11 -12
- simtools/applications/db_get_parameter_from_db.py +44 -32
- simtools/applications/derive_mirror_rnda.py +10 -1
- simtools/applications/derive_photon_electron_spectrum.py +99 -0
- simtools/applications/derive_psf_parameters.py +1 -1
- simtools/applications/generate_array_config.py +18 -22
- simtools/applications/generate_regular_arrays.py +24 -21
- simtools/applications/generate_simtel_array_histograms.py +11 -48
- simtools/applications/plot_array_layout.py +3 -1
- simtools/applications/plot_tabular_data.py +84 -0
- simtools/applications/production_generate_simulation_config.py +25 -7
- simtools/applications/production_scale_events.py +3 -4
- simtools/applications/simulate_light_emission.py +2 -2
- simtools/applications/simulate_prod.py +25 -60
- simtools/applications/simulate_prod_htcondor_generator.py +95 -0
- simtools/applications/submit_data_from_external.py +12 -4
- simtools/applications/submit_model_parameter_from_external.py +8 -6
- simtools/applications/validate_camera_efficiency.py +3 -3
- simtools/applications/validate_camera_fov.py +3 -7
- simtools/applications/validate_cumulative_psf.py +3 -7
- simtools/applications/validate_file_using_schema.py +38 -24
- simtools/applications/validate_optics.py +3 -4
- simtools/{camera_efficiency.py → camera/camera_efficiency.py} +1 -4
- simtools/camera/single_photon_electron_spectrum.py +168 -0
- simtools/configuration/commandline_parser.py +14 -13
- simtools/configuration/configurator.py +6 -19
- simtools/constants.py +10 -3
- simtools/corsika/corsika_config.py +8 -7
- simtools/corsika/corsika_histograms.py +1 -1
- simtools/data_model/data_reader.py +0 -3
- simtools/data_model/metadata_collector.py +21 -4
- simtools/data_model/metadata_model.py +8 -111
- simtools/data_model/model_data_writer.py +18 -64
- simtools/data_model/schema.py +213 -0
- simtools/data_model/validate_data.py +73 -51
- simtools/db/db_handler.py +395 -790
- simtools/db/db_model_upload.py +139 -0
- simtools/io_operations/hdf5_handler.py +54 -24
- simtools/io_operations/legacy_data_handler.py +61 -0
- simtools/job_execution/htcondor_script_generator.py +133 -0
- simtools/job_execution/job_manager.py +77 -50
- simtools/layout/array_layout.py +33 -28
- simtools/model/array_model.py +13 -7
- simtools/model/camera.py +4 -2
- simtools/model/model_parameter.py +61 -63
- simtools/model/site_model.py +3 -3
- simtools/production_configuration/calculate_statistical_errors_grid_point.py +119 -144
- simtools/production_configuration/event_scaler.py +7 -17
- simtools/production_configuration/generate_simulation_config.py +5 -32
- simtools/production_configuration/interpolation_handler.py +8 -11
- simtools/ray_tracing/mirror_panel_psf.py +47 -27
- simtools/runners/corsika_runner.py +14 -3
- simtools/runners/corsika_simtel_runner.py +3 -1
- simtools/runners/runner_services.py +3 -3
- simtools/runners/simtel_runner.py +27 -8
- simtools/schemas/input/MST_mirror_2f_measurements.schema.yml +39 -0
- simtools/schemas/input/single_pe_spectrum.schema.yml +38 -0
- simtools/schemas/integration_tests_config.metaschema.yml +23 -3
- simtools/schemas/model_parameter.metaschema.yml +95 -2
- simtools/schemas/model_parameter_and_data_schema.metaschema.yml +2 -0
- simtools/schemas/model_parameters/array_element_position_utm.schema.yml +1 -1
- simtools/schemas/model_parameters/array_window.schema.yml +37 -0
- simtools/schemas/model_parameters/asum_clipping.schema.yml +0 -4
- simtools/schemas/model_parameters/channels_per_chip.schema.yml +1 -1
- simtools/schemas/model_parameters/corsika_iact_io_buffer.schema.yml +2 -2
- simtools/schemas/model_parameters/dsum_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_ignore_below.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_offset.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pedsub.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pre_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_prescale.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_max.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_shift.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping_renormalize.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_threshold.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_zero_clip.schema.yml +0 -2
- simtools/schemas/model_parameters/effective_focal_length.schema.yml +31 -1
- simtools/schemas/model_parameters/fadc_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_lg_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_noise.schema.yml +3 -3
- simtools/schemas/model_parameters/fake_mirror_list.schema.yml +33 -0
- simtools/schemas/model_parameters/laser_photons.schema.yml +2 -2
- simtools/schemas/model_parameters/secondary_mirror_degraded_reflection.schema.yml +1 -1
- simtools/schemas/production_configuration_metrics.schema.yml +68 -0
- simtools/schemas/production_tables.schema.yml +41 -0
- simtools/simtel/simtel_config_writer.py +5 -6
- simtools/simtel/simtel_io_histogram.py +32 -67
- simtools/simtel/simtel_io_histograms.py +15 -30
- simtools/simtel/simtel_table_reader.py +410 -0
- simtools/simtel/simulator_array.py +2 -1
- simtools/simtel/simulator_camera_efficiency.py +11 -4
- simtools/simtel/simulator_light_emission.py +5 -3
- simtools/simtel/simulator_ray_tracing.py +2 -2
- simtools/simulator.py +80 -33
- simtools/testing/configuration.py +12 -8
- simtools/testing/helpers.py +9 -16
- simtools/testing/validate_output.py +152 -68
- simtools/utils/general.py +149 -12
- simtools/utils/names.py +25 -21
- simtools/utils/value_conversion.py +9 -1
- simtools/visualization/plot_tables.py +106 -0
- simtools/visualization/visualize.py +43 -5
- simtools/applications/db_add_model_parameters_from_repository_to_db.py +0 -184
- simtools/db/db_array_elements.py +0 -130
- simtools/db/db_from_repo_handler.py +0 -106
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/LICENSE +0 -0
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""Single photon electron spectral analysis."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import re
|
|
5
|
+
import subprocess
|
|
6
|
+
import tempfile
|
|
7
|
+
from io import BytesIO
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from astropy.table import Table
|
|
11
|
+
|
|
12
|
+
import simtools.data_model.model_data_writer as writer
|
|
13
|
+
from simtools.constants import SCHEMA_PATH
|
|
14
|
+
from simtools.data_model import validate_data
|
|
15
|
+
from simtools.data_model.metadata_collector import MetadataCollector
|
|
16
|
+
from simtools.io_operations import io_handler
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class SinglePhotonElectronSpectrum:
|
|
20
|
+
"""
|
|
21
|
+
Single photon electron spectral analysis.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
args_dict: dict
|
|
26
|
+
Dictionary with input arguments.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
prompt_column = "frequency (prompt)"
|
|
30
|
+
prompt_plus_afterpulse_column = "frequency (prompt+afterpulsing)"
|
|
31
|
+
afterpulse_column = "frequency (afterpulsing)"
|
|
32
|
+
|
|
33
|
+
input_schema = SCHEMA_PATH / "input" / "single_pe_spectrum.schema.yml"
|
|
34
|
+
|
|
35
|
+
def __init__(self, args_dict):
|
|
36
|
+
"""Initialize SinglePhotonElectronSpectrum class."""
|
|
37
|
+
self._logger = logging.getLogger(__name__)
|
|
38
|
+
self._logger.debug("Initialize SinglePhotonElectronSpectrum class.")
|
|
39
|
+
|
|
40
|
+
self.args_dict = args_dict
|
|
41
|
+
# default output is of ecsv format
|
|
42
|
+
self.args_dict["output_file"] = str(
|
|
43
|
+
Path(self.args_dict["output_file"]).with_suffix(".ecsv")
|
|
44
|
+
)
|
|
45
|
+
self.io_handler = io_handler.IOHandler()
|
|
46
|
+
self.data = "" # Single photon electron spectrum data (as string)
|
|
47
|
+
self.metadata = MetadataCollector(args_dict=self.args_dict)
|
|
48
|
+
|
|
49
|
+
def derive_single_pe_spectrum(self):
|
|
50
|
+
"""Derive single photon electron spectrum."""
|
|
51
|
+
if self.args_dict.get("use_norm_spe"):
|
|
52
|
+
return self._derive_spectrum_norm_spe()
|
|
53
|
+
|
|
54
|
+
raise NotImplementedError(
|
|
55
|
+
"Derivation of single photon electron spectrum using a simtool is not yet implemented."
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def write_single_pe_spectrum(self):
|
|
59
|
+
"""
|
|
60
|
+
Write single photon electron spectrum plus metadata to disk.
|
|
61
|
+
|
|
62
|
+
Includes writing in simtel and simtools (ecsv) formats.
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
simtel_file = self.io_handler.get_output_directory() / Path(
|
|
66
|
+
self.args_dict["output_file"]
|
|
67
|
+
).with_suffix(".dat")
|
|
68
|
+
self._logger.debug(f"norm_spe output file: {simtel_file}")
|
|
69
|
+
with open(simtel_file, "w", encoding="utf-8") as simtel:
|
|
70
|
+
simtel.write(self.data)
|
|
71
|
+
|
|
72
|
+
cleaned_data = re.sub(r"%%%.+", "", self.data) # remove norm_spe row metadata
|
|
73
|
+
table = Table.read(
|
|
74
|
+
BytesIO(cleaned_data.encode("utf-8")),
|
|
75
|
+
format="ascii.no_header",
|
|
76
|
+
comment="#",
|
|
77
|
+
delimiter="\t",
|
|
78
|
+
)
|
|
79
|
+
table.rename_columns(
|
|
80
|
+
["col1", "col2", "col3"],
|
|
81
|
+
["amplitude", self.prompt_column, self.prompt_plus_afterpulse_column],
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
writer.ModelDataWriter.dump(
|
|
85
|
+
args_dict=self.args_dict,
|
|
86
|
+
metadata=self.metadata.top_level_meta,
|
|
87
|
+
product_data=table,
|
|
88
|
+
validate_schema_file=None,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def _derive_spectrum_norm_spe(self):
|
|
92
|
+
"""
|
|
93
|
+
Derive single photon electron spectrum using sim_telarray tool 'norm_spe'.
|
|
94
|
+
|
|
95
|
+
Returns
|
|
96
|
+
-------
|
|
97
|
+
int
|
|
98
|
+
Return code of the executed command
|
|
99
|
+
|
|
100
|
+
Raises
|
|
101
|
+
------
|
|
102
|
+
subprocess.CalledProcessError
|
|
103
|
+
If the command execution fails.
|
|
104
|
+
"""
|
|
105
|
+
tmp_input_file = self._get_input_data(
|
|
106
|
+
input_file=self.args_dict["input_spectrum"],
|
|
107
|
+
frequency_column=self.prompt_column,
|
|
108
|
+
)
|
|
109
|
+
tmp_ap_file = self._get_input_data(
|
|
110
|
+
input_file=self.args_dict.get("afterpulse_spectrum"),
|
|
111
|
+
frequency_column=self.afterpulse_column,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
command = [
|
|
115
|
+
f"{self.args_dict['simtel_path']}/sim_telarray/bin/norm_spe",
|
|
116
|
+
"-r",
|
|
117
|
+
f"{self.args_dict['step_size']},{self.args_dict['max_amplitude']}",
|
|
118
|
+
tmp_input_file.name,
|
|
119
|
+
]
|
|
120
|
+
if tmp_ap_file:
|
|
121
|
+
command.insert(1, "-a")
|
|
122
|
+
command.insert(2, f"{tmp_ap_file.name}")
|
|
123
|
+
|
|
124
|
+
self._logger.debug(f"Running norm_spe command: {' '.join(command)}")
|
|
125
|
+
try:
|
|
126
|
+
result = subprocess.run(command, capture_output=True, text=True, check=True)
|
|
127
|
+
except subprocess.CalledProcessError as exc:
|
|
128
|
+
self._logger.error(f"Error running norm_spe: {exc}")
|
|
129
|
+
self._logger.error(f"stderr: {exc.stderr}")
|
|
130
|
+
raise exc
|
|
131
|
+
finally:
|
|
132
|
+
for tmp_file in [tmp_input_file, tmp_ap_file]:
|
|
133
|
+
try:
|
|
134
|
+
Path(tmp_file.name).unlink()
|
|
135
|
+
except (AttributeError, FileNotFoundError):
|
|
136
|
+
pass
|
|
137
|
+
|
|
138
|
+
self.data = result.stdout
|
|
139
|
+
return result.returncode
|
|
140
|
+
|
|
141
|
+
def _get_input_data(self, input_file, frequency_column):
|
|
142
|
+
"""
|
|
143
|
+
Return input data for norm_spe command.
|
|
144
|
+
|
|
145
|
+
Input data need to be space separated values of the amplitude spectrum.
|
|
146
|
+
"""
|
|
147
|
+
input_data = ""
|
|
148
|
+
if not input_file:
|
|
149
|
+
return None
|
|
150
|
+
input_file = Path(input_file)
|
|
151
|
+
|
|
152
|
+
if input_file.suffix == ".ecsv":
|
|
153
|
+
data_validator = validate_data.DataValidator(
|
|
154
|
+
schema_file=self.input_schema, data_file=input_file
|
|
155
|
+
)
|
|
156
|
+
table = data_validator.validate_and_transform()
|
|
157
|
+
input_data = "\n".join(f"{row['amplitude']} {row[frequency_column]}" for row in table)
|
|
158
|
+
else:
|
|
159
|
+
with open(input_file, encoding="utf-8") as f:
|
|
160
|
+
input_data = (
|
|
161
|
+
f.read().replace(",", " ")
|
|
162
|
+
if frequency_column == self.prompt_column
|
|
163
|
+
else f.read()
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
with tempfile.NamedTemporaryFile(delete=False, mode="w", encoding="utf-8") as tmpfile:
|
|
167
|
+
tmpfile.write(input_data)
|
|
168
|
+
return tmpfile
|
|
@@ -198,13 +198,6 @@ class CommandLineParser(argparse.ArgumentParser):
|
|
|
198
198
|
required=False,
|
|
199
199
|
default=None,
|
|
200
200
|
)
|
|
201
|
-
_job_group.add_argument(
|
|
202
|
-
"--db_simulation_model_url",
|
|
203
|
-
help="simulation model repository URL",
|
|
204
|
-
type=str,
|
|
205
|
-
required=False,
|
|
206
|
-
default=None,
|
|
207
|
-
)
|
|
208
201
|
|
|
209
202
|
def initialize_job_submission_arguments(self):
|
|
210
203
|
"""Initialize job submission arguments for simulator."""
|
|
@@ -243,12 +236,20 @@ class CommandLineParser(argparse.ArgumentParser):
|
|
|
243
236
|
return
|
|
244
237
|
|
|
245
238
|
_job_group = self.add_argument_group("simulation model")
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
239
|
+
if "model_version" in model_options:
|
|
240
|
+
_job_group.add_argument(
|
|
241
|
+
"--model_version",
|
|
242
|
+
help="production model version",
|
|
243
|
+
type=str,
|
|
244
|
+
default=None,
|
|
245
|
+
)
|
|
246
|
+
if "parameter_version" in model_options:
|
|
247
|
+
_job_group.add_argument(
|
|
248
|
+
"--parameter_version",
|
|
249
|
+
help="model parameter version",
|
|
250
|
+
type=str,
|
|
251
|
+
default=None,
|
|
252
|
+
)
|
|
252
253
|
if any(
|
|
253
254
|
option in model_options for option in ["site", "telescope", "layout", "layout_file"]
|
|
254
255
|
):
|
|
@@ -10,6 +10,7 @@ import astropy.units as u
|
|
|
10
10
|
from dotenv import load_dotenv
|
|
11
11
|
|
|
12
12
|
import simtools.configuration.commandline_parser as argparser
|
|
13
|
+
from simtools.db.db_handler import jsonschema_db_dict
|
|
13
14
|
from simtools.io_operations import io_handler
|
|
14
15
|
from simtools.utils import general as gen
|
|
15
16
|
|
|
@@ -438,24 +439,10 @@ class Configurator:
|
|
|
438
439
|
"""
|
|
439
440
|
Return parameters for DB configuration.
|
|
440
441
|
|
|
441
|
-
|
|
442
|
-
|
|
442
|
+
Returns
|
|
443
|
+
-------
|
|
443
444
|
dict
|
|
444
|
-
Dictionary with DB parameters
|
|
445
|
+
Dictionary with DB parameters.
|
|
445
446
|
"""
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
"db_api_user",
|
|
449
|
-
"db_api_pw",
|
|
450
|
-
"db_api_port",
|
|
451
|
-
"db_server",
|
|
452
|
-
"db_simulation_model",
|
|
453
|
-
"db_simulation_model_url",
|
|
454
|
-
)
|
|
455
|
-
try:
|
|
456
|
-
for _para in _db_para:
|
|
457
|
-
_db_dict[_para] = self.config[_para]
|
|
458
|
-
except KeyError:
|
|
459
|
-
pass
|
|
460
|
-
|
|
461
|
-
return _db_dict
|
|
447
|
+
db_params = jsonschema_db_dict["properties"].keys()
|
|
448
|
+
return {param: self.config.get(param) for param in db_params if param in self.config}
|
simtools/constants.py
CHANGED
|
@@ -2,8 +2,15 @@
|
|
|
2
2
|
|
|
3
3
|
from importlib.resources import files
|
|
4
4
|
|
|
5
|
+
# Schema path
|
|
6
|
+
SCHEMA_PATH = files("simtools") / "schemas"
|
|
5
7
|
# Path to metadata jsonschema
|
|
6
|
-
METADATA_JSON_SCHEMA =
|
|
7
|
-
|
|
8
|
+
METADATA_JSON_SCHEMA = SCHEMA_PATH / "metadata.metaschema.yml"
|
|
9
|
+
# Path to model parameter metaschema
|
|
10
|
+
MODEL_PARAMETER_METASCHEMA = SCHEMA_PATH / "model_parameter.metaschema.yml"
|
|
11
|
+
# Path to model parameter description metaschema
|
|
12
|
+
MODEL_PARAMETER_DESCRIPTION_METASCHEMA = (
|
|
13
|
+
SCHEMA_PATH / "model_parameter_and_data_schema.metaschema.yml"
|
|
14
|
+
)
|
|
8
15
|
# Path to model parameter schema files
|
|
9
|
-
MODEL_PARAMETER_SCHEMA_PATH =
|
|
16
|
+
MODEL_PARAMETER_SCHEMA_PATH = SCHEMA_PATH / "model_parameters"
|
|
@@ -6,7 +6,6 @@ from pathlib import Path
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
from astropy import units as u
|
|
8
8
|
|
|
9
|
-
import simtools.utils.general as gen
|
|
10
9
|
from simtools.corsika.primary_particle import PrimaryParticle
|
|
11
10
|
from simtools.io_operations import io_handler
|
|
12
11
|
from simtools.model.model_parameter import ModelParameter
|
|
@@ -111,8 +110,6 @@ class CorsikaConfig:
|
|
|
111
110
|
if args_dict is None:
|
|
112
111
|
return {}
|
|
113
112
|
|
|
114
|
-
self._logger.debug("Setting CORSIKA parameters ")
|
|
115
|
-
|
|
116
113
|
self._is_file_updated = False
|
|
117
114
|
self.azimuth_angle = int(args_dict["azimuth_angle"].to("deg").value)
|
|
118
115
|
self.zenith_angle = args_dict["zenith_angle"].to("deg").value
|
|
@@ -243,7 +240,7 @@ class CorsikaConfig:
|
|
|
243
240
|
|
|
244
241
|
def _input_config_corsika_particle_kinetic_energy_cutoff(self, entry):
|
|
245
242
|
"""Return ECUTS parameter CORSIKA format."""
|
|
246
|
-
e_cuts =
|
|
243
|
+
e_cuts = entry["value"]
|
|
247
244
|
return [
|
|
248
245
|
f"{e_cuts[0]*u.Unit(entry['unit']).to('GeV')} "
|
|
249
246
|
f"{e_cuts[1]*u.Unit(entry['unit']).to('GeV')} "
|
|
@@ -280,7 +277,7 @@ class CorsikaConfig:
|
|
|
280
277
|
|
|
281
278
|
def _input_config_corsika_cherenkov_wavelength(self, entry):
|
|
282
279
|
"""Return CWAVLG parameter CORSIKA format."""
|
|
283
|
-
wavelength_range =
|
|
280
|
+
wavelength_range = entry["value"]
|
|
284
281
|
return [
|
|
285
282
|
f"{wavelength_range[0]*u.Unit(entry['unit']).to('nm')}",
|
|
286
283
|
f"{wavelength_range[1]*u.Unit(entry['unit']).to('nm')}",
|
|
@@ -318,8 +315,12 @@ class CorsikaConfig:
|
|
|
318
315
|
}
|
|
319
316
|
|
|
320
317
|
def _input_config_io_buff(self, entry):
|
|
321
|
-
"""Return IO_BUFFER parameter CORSIKA format."""
|
|
322
|
-
|
|
318
|
+
"""Return IO_BUFFER parameter CORSIKA format (Byte or MB required)."""
|
|
319
|
+
value = entry["value"] * u.Unit(entry["unit"]).to("Mbyte")
|
|
320
|
+
# check if value is integer-like
|
|
321
|
+
if value.is_integer():
|
|
322
|
+
return f"{int(value)}MB"
|
|
323
|
+
return f"{int(entry['value'] * u.Unit(entry['unit']).to('byte'))}"
|
|
323
324
|
|
|
324
325
|
def _rotate_azimuth_by_180deg(self, az, correct_for_geomagnetic_field_alignment=True):
|
|
325
326
|
"""
|
|
@@ -678,7 +678,7 @@ class CorsikaHistograms:
|
|
|
678
678
|
----------
|
|
679
679
|
new_individual_telescopes: bool
|
|
680
680
|
if False, the histograms are supposed to be filled for all telescopes.
|
|
681
|
-
if True, one histogram is set for each telescope
|
|
681
|
+
if True, one histogram is set for each telescope separately.
|
|
682
682
|
"""
|
|
683
683
|
if new_individual_telescopes is None:
|
|
684
684
|
self._individual_telescopes = False
|
|
@@ -112,9 +112,6 @@ def read_value_from_file(file_name, schema_file=None, validate=False):
|
|
|
112
112
|
_logger.info("Reading data from %s", file_name)
|
|
113
113
|
|
|
114
114
|
if validate:
|
|
115
|
-
if schema_file is None and "meta_schema_url" in data:
|
|
116
|
-
schema_file = data["meta_schema_url"]
|
|
117
|
-
_logger.debug(f"Using schema from meta_schema_url: {schema_file}")
|
|
118
115
|
if schema_file is None:
|
|
119
116
|
_collector = MetadataCollector(None, metadata_file_name=file_name)
|
|
120
117
|
schema_file = _collector.get_data_model_schema_file_name()
|
|
@@ -10,13 +10,12 @@ import datetime
|
|
|
10
10
|
import getpass
|
|
11
11
|
import logging
|
|
12
12
|
import uuid
|
|
13
|
-
from importlib.resources import files
|
|
14
13
|
from pathlib import Path
|
|
15
14
|
|
|
16
15
|
import simtools.constants
|
|
17
16
|
import simtools.utils.general as gen
|
|
18
17
|
import simtools.version
|
|
19
|
-
from simtools.data_model import metadata_model
|
|
18
|
+
from simtools.data_model import metadata_model, schema
|
|
20
19
|
from simtools.io_operations import io_handler
|
|
21
20
|
from simtools.utils import names
|
|
22
21
|
|
|
@@ -86,6 +85,24 @@ class MetadataCollector:
|
|
|
86
85
|
except AttributeError:
|
|
87
86
|
self._logger.debug(f"Method _fill_{meta_type}_meta not implemented")
|
|
88
87
|
|
|
88
|
+
def get_top_level_metadata(self):
|
|
89
|
+
"""
|
|
90
|
+
Return top level metadata dictionary (with updated activity end time).
|
|
91
|
+
|
|
92
|
+
Returns
|
|
93
|
+
-------
|
|
94
|
+
dict
|
|
95
|
+
Top level metadata dictionary.
|
|
96
|
+
|
|
97
|
+
"""
|
|
98
|
+
try:
|
|
99
|
+
self.top_level_meta[self.observatory]["activity"][
|
|
100
|
+
"end"
|
|
101
|
+
] = datetime.datetime.now().isoformat(timespec="seconds")
|
|
102
|
+
except KeyError:
|
|
103
|
+
pass
|
|
104
|
+
return self.top_level_meta
|
|
105
|
+
|
|
89
106
|
def get_data_model_schema_file_name(self):
|
|
90
107
|
"""
|
|
91
108
|
Return data model schema file name.
|
|
@@ -117,7 +134,7 @@ class MetadataCollector:
|
|
|
117
134
|
# from data model name
|
|
118
135
|
if self.data_model_name:
|
|
119
136
|
self._logger.debug(f"Schema file from data model name: {self.data_model_name}")
|
|
120
|
-
return
|
|
137
|
+
return str(schema.get_model_parameter_schema_file(self.data_model_name))
|
|
121
138
|
|
|
122
139
|
# from input metadata
|
|
123
140
|
try:
|
|
@@ -249,7 +266,7 @@ class MetadataCollector:
|
|
|
249
266
|
self._logger.error("Unknown metadata file format: %s", metadata_file_name)
|
|
250
267
|
raise gen.InvalidConfigDataError
|
|
251
268
|
|
|
252
|
-
|
|
269
|
+
schema.validate_dict_using_schema(_input_metadata, None)
|
|
253
270
|
|
|
254
271
|
return gen.change_dict_keys_case(
|
|
255
272
|
self._process_metadata_from_file(_input_metadata),
|
|
@@ -9,45 +9,12 @@ Follows CTAO top-level data model definition.
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import logging
|
|
12
|
-
from importlib.resources import files
|
|
13
12
|
|
|
14
|
-
import
|
|
15
|
-
|
|
16
|
-
import simtools.constants
|
|
17
|
-
import simtools.utils.general as gen
|
|
18
|
-
from simtools.data_model import format_checkers
|
|
19
|
-
from simtools.utils import names
|
|
13
|
+
import simtools.data_model.schema
|
|
20
14
|
|
|
21
15
|
_logger = logging.getLogger(__name__)
|
|
22
16
|
|
|
23
17
|
|
|
24
|
-
def validate_schema(data, schema_file):
|
|
25
|
-
"""
|
|
26
|
-
Validate dictionary against schema.
|
|
27
|
-
|
|
28
|
-
Parameters
|
|
29
|
-
----------
|
|
30
|
-
data
|
|
31
|
-
dictionary to be validated
|
|
32
|
-
schema_file (dict)
|
|
33
|
-
schema used for validation
|
|
34
|
-
|
|
35
|
-
Raises
|
|
36
|
-
------
|
|
37
|
-
jsonschema.exceptions.ValidationError
|
|
38
|
-
if validation fails
|
|
39
|
-
|
|
40
|
-
"""
|
|
41
|
-
schema, schema_file = _load_schema(schema_file)
|
|
42
|
-
|
|
43
|
-
try:
|
|
44
|
-
jsonschema.validate(data, schema=schema, format_checker=format_checkers.format_checker)
|
|
45
|
-
except jsonschema.exceptions.ValidationError:
|
|
46
|
-
_logger.error(f"Failed using {schema}")
|
|
47
|
-
raise
|
|
48
|
-
_logger.debug(f"Successful validation of data using schema from {schema_file}")
|
|
49
|
-
|
|
50
|
-
|
|
51
18
|
def get_default_metadata_dict(schema_file=None, observatory="CTA"):
|
|
52
19
|
"""
|
|
53
20
|
Return metadata schema with default values.
|
|
@@ -68,80 +35,10 @@ def get_default_metadata_dict(schema_file=None, observatory="CTA"):
|
|
|
68
35
|
|
|
69
36
|
|
|
70
37
|
"""
|
|
71
|
-
schema
|
|
38
|
+
schema = simtools.data_model.schema.load_schema(schema_file)
|
|
72
39
|
return _fill_defaults(schema["definitions"], observatory)
|
|
73
40
|
|
|
74
41
|
|
|
75
|
-
def _load_schema(schema_file=None):
|
|
76
|
-
"""
|
|
77
|
-
Load parameter schema from file from simpipe metadata schema.
|
|
78
|
-
|
|
79
|
-
Returns
|
|
80
|
-
-------
|
|
81
|
-
schema_file dict
|
|
82
|
-
Schema used for validation.
|
|
83
|
-
schema_file str
|
|
84
|
-
File name schema is loaded from. If schema_file is not given,
|
|
85
|
-
the default schema file name is returned.
|
|
86
|
-
|
|
87
|
-
Raises
|
|
88
|
-
------
|
|
89
|
-
FileNotFoundError
|
|
90
|
-
if schema file is not found
|
|
91
|
-
|
|
92
|
-
"""
|
|
93
|
-
if schema_file is None:
|
|
94
|
-
schema_file = files("simtools").joinpath(simtools.constants.METADATA_JSON_SCHEMA)
|
|
95
|
-
|
|
96
|
-
try:
|
|
97
|
-
schema = gen.collect_data_from_file(file_name=schema_file)
|
|
98
|
-
except FileNotFoundError:
|
|
99
|
-
schema_file = files("simtools").joinpath("schemas") / schema_file
|
|
100
|
-
schema = gen.collect_data_from_file(file_name=schema_file)
|
|
101
|
-
_logger.debug(f"Loading schema from {schema_file}")
|
|
102
|
-
_add_array_elements("InstrumentTypeElement", schema)
|
|
103
|
-
|
|
104
|
-
return schema, schema_file
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def _add_array_elements(key, schema):
|
|
108
|
-
"""
|
|
109
|
-
Add list of array elements to schema.
|
|
110
|
-
|
|
111
|
-
This assumes an element [key]['enum'] is a list of elements.
|
|
112
|
-
|
|
113
|
-
Parameters
|
|
114
|
-
----------
|
|
115
|
-
key: str
|
|
116
|
-
Key in schema dictionary
|
|
117
|
-
schema: dict
|
|
118
|
-
Schema dictionary
|
|
119
|
-
|
|
120
|
-
Returns
|
|
121
|
-
-------
|
|
122
|
-
dict
|
|
123
|
-
Schema dictionary with added array elements.
|
|
124
|
-
|
|
125
|
-
"""
|
|
126
|
-
_list_of_array_elements = sorted(names.array_elements().keys())
|
|
127
|
-
|
|
128
|
-
def recursive_search(sub_schema, key):
|
|
129
|
-
if key in sub_schema:
|
|
130
|
-
if "enum" in sub_schema[key] and isinstance(sub_schema[key]["enum"], list):
|
|
131
|
-
sub_schema[key]["enum"] = list(
|
|
132
|
-
set(sub_schema[key]["enum"] + _list_of_array_elements)
|
|
133
|
-
)
|
|
134
|
-
else:
|
|
135
|
-
sub_schema[key]["enum"] = _list_of_array_elements
|
|
136
|
-
else:
|
|
137
|
-
for _, v in sub_schema.items():
|
|
138
|
-
if isinstance(v, dict):
|
|
139
|
-
recursive_search(v, key)
|
|
140
|
-
|
|
141
|
-
recursive_search(schema, key)
|
|
142
|
-
return schema
|
|
143
|
-
|
|
144
|
-
|
|
145
42
|
def _resolve_references(yaml_data, observatory="CTA"):
|
|
146
43
|
"""
|
|
147
44
|
Resolve references in yaml data and expand the received dictionary accordingly.
|
|
@@ -214,21 +111,21 @@ def _fill_defaults(schema, observatory="CTA"):
|
|
|
214
111
|
return defaults
|
|
215
112
|
|
|
216
113
|
|
|
217
|
-
def _fill_defaults_recursive(
|
|
114
|
+
def _fill_defaults_recursive(sub_schema, current_dict):
|
|
218
115
|
"""
|
|
219
|
-
Recursively fill default values from the
|
|
116
|
+
Recursively fill default values from the sub_schema into the current dictionary.
|
|
220
117
|
|
|
221
118
|
Parameters
|
|
222
119
|
----------
|
|
223
|
-
|
|
224
|
-
|
|
120
|
+
sub_schema: dict
|
|
121
|
+
Sub schema describing part of the input data.
|
|
225
122
|
current_dict: dict
|
|
226
123
|
Current dictionary to fill with default values.
|
|
227
124
|
"""
|
|
228
|
-
if "properties" not in
|
|
125
|
+
if "properties" not in sub_schema:
|
|
229
126
|
_raise_missing_properties_error()
|
|
230
127
|
|
|
231
|
-
for prop, prop_schema in
|
|
128
|
+
for prop, prop_schema in sub_schema["properties"].items():
|
|
232
129
|
_process_property(prop, prop_schema, current_dict)
|
|
233
130
|
|
|
234
131
|
|