gammasimtools 0.8.2__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/METADATA +4 -4
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/RECORD +119 -105
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/WHEEL +1 -1
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/entry_points.txt +4 -1
- simtools/_version.py +2 -2
- simtools/applications/calculate_trigger_rate.py +15 -38
- simtools/applications/convert_all_model_parameters_from_simtel.py +9 -28
- simtools/applications/convert_geo_coordinates_of_array_elements.py +54 -53
- simtools/applications/convert_model_parameter_from_simtel.py +2 -2
- simtools/applications/db_add_file_to_db.py +1 -2
- simtools/applications/db_add_simulation_model_from_repository_to_db.py +110 -0
- simtools/applications/db_add_value_from_json_to_db.py +2 -11
- simtools/applications/db_development_tools/write_array_elements_positions_to_repository.py +6 -6
- simtools/applications/db_get_array_layouts_from_db.py +3 -1
- simtools/applications/db_get_file_from_db.py +11 -12
- simtools/applications/db_get_parameter_from_db.py +44 -32
- simtools/applications/derive_mirror_rnda.py +10 -1
- simtools/applications/derive_photon_electron_spectrum.py +99 -0
- simtools/applications/derive_psf_parameters.py +1 -1
- simtools/applications/generate_array_config.py +18 -22
- simtools/applications/generate_regular_arrays.py +24 -21
- simtools/applications/generate_simtel_array_histograms.py +11 -48
- simtools/applications/plot_array_layout.py +3 -1
- simtools/applications/plot_tabular_data.py +84 -0
- simtools/applications/production_generate_simulation_config.py +25 -7
- simtools/applications/production_scale_events.py +3 -4
- simtools/applications/simulate_light_emission.py +2 -2
- simtools/applications/simulate_prod.py +25 -60
- simtools/applications/simulate_prod_htcondor_generator.py +95 -0
- simtools/applications/submit_data_from_external.py +12 -4
- simtools/applications/submit_model_parameter_from_external.py +8 -6
- simtools/applications/validate_camera_efficiency.py +3 -3
- simtools/applications/validate_camera_fov.py +3 -7
- simtools/applications/validate_cumulative_psf.py +3 -7
- simtools/applications/validate_file_using_schema.py +38 -24
- simtools/applications/validate_optics.py +3 -4
- simtools/{camera_efficiency.py → camera/camera_efficiency.py} +1 -4
- simtools/camera/single_photon_electron_spectrum.py +168 -0
- simtools/configuration/commandline_parser.py +14 -13
- simtools/configuration/configurator.py +6 -19
- simtools/constants.py +10 -3
- simtools/corsika/corsika_config.py +8 -7
- simtools/corsika/corsika_histograms.py +1 -1
- simtools/data_model/data_reader.py +0 -3
- simtools/data_model/metadata_collector.py +21 -4
- simtools/data_model/metadata_model.py +8 -111
- simtools/data_model/model_data_writer.py +18 -64
- simtools/data_model/schema.py +213 -0
- simtools/data_model/validate_data.py +73 -51
- simtools/db/db_handler.py +395 -790
- simtools/db/db_model_upload.py +139 -0
- simtools/io_operations/hdf5_handler.py +54 -24
- simtools/io_operations/legacy_data_handler.py +61 -0
- simtools/job_execution/htcondor_script_generator.py +133 -0
- simtools/job_execution/job_manager.py +77 -50
- simtools/layout/array_layout.py +33 -28
- simtools/model/array_model.py +13 -7
- simtools/model/camera.py +4 -2
- simtools/model/model_parameter.py +61 -63
- simtools/model/site_model.py +3 -3
- simtools/production_configuration/calculate_statistical_errors_grid_point.py +119 -144
- simtools/production_configuration/event_scaler.py +7 -17
- simtools/production_configuration/generate_simulation_config.py +5 -32
- simtools/production_configuration/interpolation_handler.py +8 -11
- simtools/ray_tracing/mirror_panel_psf.py +47 -27
- simtools/runners/corsika_runner.py +14 -3
- simtools/runners/corsika_simtel_runner.py +3 -1
- simtools/runners/runner_services.py +3 -3
- simtools/runners/simtel_runner.py +27 -8
- simtools/schemas/input/MST_mirror_2f_measurements.schema.yml +39 -0
- simtools/schemas/input/single_pe_spectrum.schema.yml +38 -0
- simtools/schemas/integration_tests_config.metaschema.yml +23 -3
- simtools/schemas/model_parameter.metaschema.yml +95 -2
- simtools/schemas/model_parameter_and_data_schema.metaschema.yml +2 -0
- simtools/schemas/model_parameters/array_element_position_utm.schema.yml +1 -1
- simtools/schemas/model_parameters/array_window.schema.yml +37 -0
- simtools/schemas/model_parameters/asum_clipping.schema.yml +0 -4
- simtools/schemas/model_parameters/channels_per_chip.schema.yml +1 -1
- simtools/schemas/model_parameters/corsika_iact_io_buffer.schema.yml +2 -2
- simtools/schemas/model_parameters/dsum_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_ignore_below.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_offset.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pedsub.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pre_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_prescale.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_max.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_shift.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping_renormalize.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_threshold.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_zero_clip.schema.yml +0 -2
- simtools/schemas/model_parameters/effective_focal_length.schema.yml +31 -1
- simtools/schemas/model_parameters/fadc_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_lg_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_noise.schema.yml +3 -3
- simtools/schemas/model_parameters/fake_mirror_list.schema.yml +33 -0
- simtools/schemas/model_parameters/laser_photons.schema.yml +2 -2
- simtools/schemas/model_parameters/secondary_mirror_degraded_reflection.schema.yml +1 -1
- simtools/schemas/production_configuration_metrics.schema.yml +68 -0
- simtools/schemas/production_tables.schema.yml +41 -0
- simtools/simtel/simtel_config_writer.py +5 -6
- simtools/simtel/simtel_io_histogram.py +32 -67
- simtools/simtel/simtel_io_histograms.py +15 -30
- simtools/simtel/simtel_table_reader.py +410 -0
- simtools/simtel/simulator_array.py +2 -1
- simtools/simtel/simulator_camera_efficiency.py +11 -4
- simtools/simtel/simulator_light_emission.py +5 -3
- simtools/simtel/simulator_ray_tracing.py +2 -2
- simtools/simulator.py +80 -33
- simtools/testing/configuration.py +12 -8
- simtools/testing/helpers.py +9 -16
- simtools/testing/validate_output.py +152 -68
- simtools/utils/general.py +149 -12
- simtools/utils/names.py +25 -21
- simtools/utils/value_conversion.py +9 -1
- simtools/visualization/plot_tables.py +106 -0
- simtools/visualization/visualize.py +43 -5
- simtools/applications/db_add_model_parameters_from_repository_to_db.py +0 -184
- simtools/db/db_array_elements.py +0 -130
- simtools/db/db_from_repo_handler.py +0 -106
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/LICENSE +0 -0
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/top_level.txt +0 -0
|
@@ -10,8 +10,7 @@ import yaml
|
|
|
10
10
|
from astropy.io.registry.base import IORegistryError
|
|
11
11
|
|
|
12
12
|
import simtools.utils.general as gen
|
|
13
|
-
from simtools.
|
|
14
|
-
from simtools.data_model import validate_data
|
|
13
|
+
from simtools.data_model import schema, validate_data
|
|
15
14
|
from simtools.data_model.metadata_collector import MetadataCollector
|
|
16
15
|
from simtools.io_operations import io_handler
|
|
17
16
|
from simtools.utils import names, value_conversion
|
|
@@ -122,7 +121,7 @@ class ModelDataWriter:
|
|
|
122
121
|
parameter_name,
|
|
123
122
|
value,
|
|
124
123
|
instrument,
|
|
125
|
-
|
|
124
|
+
parameter_version,
|
|
126
125
|
output_file,
|
|
127
126
|
output_path=None,
|
|
128
127
|
use_plain_output_path=False,
|
|
@@ -139,8 +138,8 @@ class ModelDataWriter:
|
|
|
139
138
|
Value of the parameter.
|
|
140
139
|
instrument: str
|
|
141
140
|
Name of the instrument.
|
|
142
|
-
|
|
143
|
-
Version of the
|
|
141
|
+
parameter_version: str
|
|
142
|
+
Version of the parameter.
|
|
144
143
|
output_file: str
|
|
145
144
|
Name of output file.
|
|
146
145
|
output_path: str or Path
|
|
@@ -163,19 +162,21 @@ class ModelDataWriter:
|
|
|
163
162
|
use_plain_output_path=use_plain_output_path,
|
|
164
163
|
)
|
|
165
164
|
_json_dict = writer.get_validated_parameter_dict(
|
|
166
|
-
parameter_name, value, instrument,
|
|
165
|
+
parameter_name, value, instrument, parameter_version
|
|
167
166
|
)
|
|
168
167
|
writer.write_dict_to_model_parameter_json(output_file, _json_dict)
|
|
169
168
|
if metadata_input_dict is not None:
|
|
170
169
|
metadata_input_dict["output_file"] = output_file
|
|
171
170
|
metadata_input_dict["output_file_format"] = Path(output_file).suffix.lstrip(".")
|
|
172
171
|
writer.write_metadata_to_yml(
|
|
173
|
-
metadata=MetadataCollector(args_dict=metadata_input_dict).
|
|
172
|
+
metadata=MetadataCollector(args_dict=metadata_input_dict).get_top_level_metadata(),
|
|
174
173
|
yml_file=output_path / f"{Path(output_file).stem}",
|
|
175
174
|
)
|
|
176
175
|
return _json_dict
|
|
177
176
|
|
|
178
|
-
def get_validated_parameter_dict(
|
|
177
|
+
def get_validated_parameter_dict(
|
|
178
|
+
self, parameter_name, value, instrument, parameter_version, schema_version=None
|
|
179
|
+
):
|
|
179
180
|
"""
|
|
180
181
|
Get validated parameter dictionary.
|
|
181
182
|
|
|
@@ -187,8 +188,10 @@ class ModelDataWriter:
|
|
|
187
188
|
Value of the parameter.
|
|
188
189
|
instrument: str
|
|
189
190
|
Name of the instrument.
|
|
190
|
-
|
|
191
|
-
Version of the
|
|
191
|
+
parameter_version: str
|
|
192
|
+
Version of the parameter.
|
|
193
|
+
schema_version: str
|
|
194
|
+
Version of the schema.
|
|
192
195
|
|
|
193
196
|
Returns
|
|
194
197
|
-------
|
|
@@ -196,29 +199,26 @@ class ModelDataWriter:
|
|
|
196
199
|
Validated parameter dictionary.
|
|
197
200
|
"""
|
|
198
201
|
self._logger.debug(f"Getting validated parameter dictionary for {instrument}")
|
|
199
|
-
schema_file =
|
|
202
|
+
schema_file = schema.get_model_parameter_schema_file(parameter_name)
|
|
203
|
+
self.schema_dict = gen.collect_data_from_file(schema_file)
|
|
200
204
|
|
|
201
205
|
try: # e.g. instrument is 'North"
|
|
202
206
|
site = names.validate_site_name(instrument)
|
|
203
207
|
except ValueError: # e.g. instrument is 'LSTN-01'
|
|
204
208
|
site = names.get_site_from_array_element_name(instrument)
|
|
205
209
|
|
|
206
|
-
try:
|
|
207
|
-
applicable = self._get_parameter_applicability(instrument)
|
|
208
|
-
except ValueError:
|
|
209
|
-
applicable = True # Default to True (expect that this field goes in future)
|
|
210
|
-
|
|
211
210
|
value, unit = value_conversion.split_value_and_unit(value)
|
|
212
211
|
|
|
213
212
|
data_dict = {
|
|
213
|
+
"schema_version": schema.get_model_parameter_schema_version(schema_version),
|
|
214
214
|
"parameter": parameter_name,
|
|
215
215
|
"instrument": instrument,
|
|
216
216
|
"site": site,
|
|
217
|
-
"
|
|
217
|
+
"parameter_version": parameter_version,
|
|
218
|
+
"unique_id": None,
|
|
218
219
|
"value": value,
|
|
219
220
|
"unit": unit,
|
|
220
221
|
"type": self._get_parameter_type(),
|
|
221
|
-
"applicable": applicable,
|
|
222
222
|
"file": self._parameter_is_a_file(),
|
|
223
223
|
}
|
|
224
224
|
return self.validate_and_transform(
|
|
@@ -227,22 +227,6 @@ class ModelDataWriter:
|
|
|
227
227
|
is_model_parameter=True,
|
|
228
228
|
)
|
|
229
229
|
|
|
230
|
-
def _read_model_parameter_schema(self, parameter_name):
|
|
231
|
-
"""
|
|
232
|
-
Read model parameter schema.
|
|
233
|
-
|
|
234
|
-
Parameters
|
|
235
|
-
----------
|
|
236
|
-
parameter_name: str
|
|
237
|
-
Name of the parameter.
|
|
238
|
-
"""
|
|
239
|
-
schema_file = MODEL_PARAMETER_SCHEMA_PATH / f"{parameter_name}.schema.yml"
|
|
240
|
-
try:
|
|
241
|
-
self.schema_dict = gen.collect_data_from_file(file_name=schema_file)
|
|
242
|
-
except FileNotFoundError as exc:
|
|
243
|
-
raise FileNotFoundError(f"Schema file not found: {schema_file}") from exc
|
|
244
|
-
return schema_file
|
|
245
|
-
|
|
246
230
|
def _get_parameter_type(self):
|
|
247
231
|
"""
|
|
248
232
|
Return parameter type from schema.
|
|
@@ -273,36 +257,6 @@ class ModelDataWriter:
|
|
|
273
257
|
pass
|
|
274
258
|
return False
|
|
275
259
|
|
|
276
|
-
def _get_parameter_applicability(self, telescope_name):
|
|
277
|
-
"""
|
|
278
|
-
Check if a parameter is applicable for a given telescope using schema files.
|
|
279
|
-
|
|
280
|
-
First check for exact telescope name (e.g., LSTN-01), if not listed in the schema
|
|
281
|
-
use telescope type (LSTN).
|
|
282
|
-
|
|
283
|
-
Parameters
|
|
284
|
-
----------
|
|
285
|
-
telescope_name: str
|
|
286
|
-
Telescope name (e.g., LSTN-01)
|
|
287
|
-
|
|
288
|
-
Returns
|
|
289
|
-
-------
|
|
290
|
-
bool
|
|
291
|
-
True if parameter is applicable to telescope.
|
|
292
|
-
|
|
293
|
-
"""
|
|
294
|
-
try:
|
|
295
|
-
if telescope_name in self.schema_dict["instrument"]["type"]:
|
|
296
|
-
return True
|
|
297
|
-
except KeyError as exc:
|
|
298
|
-
self._logger.error("Schema file does not contain 'instrument:type' key.")
|
|
299
|
-
raise exc
|
|
300
|
-
|
|
301
|
-
return (
|
|
302
|
-
names.get_array_element_type_from_name(telescope_name)
|
|
303
|
-
in self.schema_dict["instrument"]["type"]
|
|
304
|
-
)
|
|
305
|
-
|
|
306
260
|
def _get_unit_from_schema(self):
|
|
307
261
|
"""
|
|
308
262
|
Return unit(s) from schema dict.
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"""Module providing functionality to read and validate dictionaries using schema."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import jsonschema
|
|
7
|
+
|
|
8
|
+
import simtools.utils.general as gen
|
|
9
|
+
from simtools.constants import (
|
|
10
|
+
METADATA_JSON_SCHEMA,
|
|
11
|
+
MODEL_PARAMETER_METASCHEMA,
|
|
12
|
+
MODEL_PARAMETER_SCHEMA_PATH,
|
|
13
|
+
SCHEMA_PATH,
|
|
14
|
+
)
|
|
15
|
+
from simtools.data_model import format_checkers
|
|
16
|
+
from simtools.utils import names
|
|
17
|
+
|
|
18
|
+
_logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_get_model_parameter_schema_files(schema_directory=MODEL_PARAMETER_SCHEMA_PATH):
|
|
22
|
+
"""
|
|
23
|
+
Return list of parameters and schema files located in schema file directory.
|
|
24
|
+
|
|
25
|
+
Returns
|
|
26
|
+
-------
|
|
27
|
+
list
|
|
28
|
+
List of parameters found in schema file directory.
|
|
29
|
+
list
|
|
30
|
+
List of schema files found in schema file directory.
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
schema_files = sorted(Path(schema_directory).rglob("*.schema.yml"))
|
|
34
|
+
if not schema_files:
|
|
35
|
+
raise FileNotFoundError(f"No schema files found in {schema_directory}")
|
|
36
|
+
parameters = []
|
|
37
|
+
for schema_file in schema_files:
|
|
38
|
+
schema_dict = gen.collect_data_from_file(file_name=schema_file)
|
|
39
|
+
parameters.append(schema_dict.get("name"))
|
|
40
|
+
return parameters, schema_files
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_model_parameter_schema_file(parameter):
|
|
44
|
+
"""
|
|
45
|
+
Return schema file path for a given model parameter.
|
|
46
|
+
|
|
47
|
+
Parameters
|
|
48
|
+
----------
|
|
49
|
+
parameter: str
|
|
50
|
+
Model parameter name.
|
|
51
|
+
|
|
52
|
+
Returns
|
|
53
|
+
-------
|
|
54
|
+
Path
|
|
55
|
+
Schema file path.
|
|
56
|
+
|
|
57
|
+
"""
|
|
58
|
+
schema_file = MODEL_PARAMETER_SCHEMA_PATH / f"{parameter}.schema.yml"
|
|
59
|
+
if not schema_file.exists():
|
|
60
|
+
raise FileNotFoundError(f"Schema file not found: {schema_file}")
|
|
61
|
+
return schema_file
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_model_parameter_schema_version(schema_version=None):
|
|
65
|
+
"""
|
|
66
|
+
Validate and return schema versions.
|
|
67
|
+
|
|
68
|
+
If no schema_version is given, the most recent version is provided.
|
|
69
|
+
|
|
70
|
+
Parameters
|
|
71
|
+
----------
|
|
72
|
+
schema_version: str
|
|
73
|
+
Schema version.
|
|
74
|
+
|
|
75
|
+
Returns
|
|
76
|
+
-------
|
|
77
|
+
str
|
|
78
|
+
Schema version.
|
|
79
|
+
|
|
80
|
+
"""
|
|
81
|
+
schemas = gen.collect_data_from_file(MODEL_PARAMETER_METASCHEMA)
|
|
82
|
+
|
|
83
|
+
if schema_version is None and schemas:
|
|
84
|
+
return schemas[0].get("version")
|
|
85
|
+
|
|
86
|
+
if any(schema.get("version") == schema_version for schema in schemas):
|
|
87
|
+
return schema_version
|
|
88
|
+
|
|
89
|
+
raise ValueError(f"Schema version {schema_version} not found in {MODEL_PARAMETER_METASCHEMA}.")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def validate_dict_using_schema(data, schema_file=None, json_schema=None):
|
|
93
|
+
"""
|
|
94
|
+
Validate a data dictionary against a schema.
|
|
95
|
+
|
|
96
|
+
Parameters
|
|
97
|
+
----------
|
|
98
|
+
data
|
|
99
|
+
dictionary to be validated
|
|
100
|
+
schema_file (dict)
|
|
101
|
+
schema used for validation
|
|
102
|
+
|
|
103
|
+
Raises
|
|
104
|
+
------
|
|
105
|
+
jsonschema.exceptions.ValidationError
|
|
106
|
+
if validation fails
|
|
107
|
+
|
|
108
|
+
"""
|
|
109
|
+
if json_schema is None and schema_file is None:
|
|
110
|
+
_logger.warning(f"No schema provided for validation of {data}")
|
|
111
|
+
return
|
|
112
|
+
if json_schema is None:
|
|
113
|
+
json_schema = load_schema(
|
|
114
|
+
schema_file,
|
|
115
|
+
data.get("schema_version", "0.1.0"), # default version to ensure backward compatibility
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
jsonschema.validate(data, schema=json_schema, format_checker=format_checkers.format_checker)
|
|
120
|
+
except jsonschema.exceptions.ValidationError as exc:
|
|
121
|
+
_logger.error(f"Validation failed using schema: {json_schema}")
|
|
122
|
+
raise exc
|
|
123
|
+
if data.get("meta_schema_url") and not gen.url_exists(data["meta_schema_url"]):
|
|
124
|
+
raise FileNotFoundError(f"Meta schema URL does not exist: {data['meta_schema_url']}")
|
|
125
|
+
|
|
126
|
+
_logger.debug(f"Successful validation of data using schema ({json_schema.get('name')})")
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def load_schema(schema_file=None, schema_version=None):
|
|
130
|
+
"""
|
|
131
|
+
Load parameter schema from file.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
schema_file: str
|
|
136
|
+
Path to schema file.
|
|
137
|
+
schema_version: str
|
|
138
|
+
Schema version.
|
|
139
|
+
|
|
140
|
+
Returns
|
|
141
|
+
-------
|
|
142
|
+
schema: dict
|
|
143
|
+
Schema dictionary.
|
|
144
|
+
|
|
145
|
+
Raises
|
|
146
|
+
------
|
|
147
|
+
FileNotFoundError
|
|
148
|
+
if schema file is not found
|
|
149
|
+
|
|
150
|
+
"""
|
|
151
|
+
schema_file = schema_file or METADATA_JSON_SCHEMA
|
|
152
|
+
|
|
153
|
+
for path in (schema_file, SCHEMA_PATH / schema_file):
|
|
154
|
+
try:
|
|
155
|
+
schema = gen.collect_data_from_file(file_name=path)
|
|
156
|
+
break
|
|
157
|
+
except FileNotFoundError:
|
|
158
|
+
continue
|
|
159
|
+
else:
|
|
160
|
+
raise FileNotFoundError(f"Schema file not found: {schema_file}")
|
|
161
|
+
|
|
162
|
+
if isinstance(schema, list): # schema file with several schemas defined
|
|
163
|
+
if schema_version is None:
|
|
164
|
+
raise ValueError(f"Schema version not given in {schema_file}.")
|
|
165
|
+
schema = next((doc for doc in schema if doc.get("version") == schema_version), None)
|
|
166
|
+
if schema is None:
|
|
167
|
+
raise ValueError(f"Schema version {schema_version} not found in {schema_file}.")
|
|
168
|
+
elif schema_version is not None and schema_version != schema.get("version"):
|
|
169
|
+
_logger.warning(f"Schema version {schema_version} does not match {schema.get('version')}")
|
|
170
|
+
|
|
171
|
+
_logger.debug(f"Loading schema from {schema_file}")
|
|
172
|
+
_add_array_elements("InstrumentTypeElement", schema)
|
|
173
|
+
|
|
174
|
+
return schema
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def _add_array_elements(key, schema):
|
|
178
|
+
"""
|
|
179
|
+
Add list of array elements to schema.
|
|
180
|
+
|
|
181
|
+
Avoids having to list all array elements in multiple schema.
|
|
182
|
+
Assumes an element [key]['enum'] is a list of elements.
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
key: str
|
|
187
|
+
Key in schema dictionary
|
|
188
|
+
schema: dict
|
|
189
|
+
Schema dictionary
|
|
190
|
+
|
|
191
|
+
Returns
|
|
192
|
+
-------
|
|
193
|
+
dict
|
|
194
|
+
Schema dictionary with added array elements.
|
|
195
|
+
|
|
196
|
+
"""
|
|
197
|
+
_list_of_array_elements = sorted(names.array_elements().keys())
|
|
198
|
+
|
|
199
|
+
def recursive_search(sub_schema, key):
|
|
200
|
+
if key in sub_schema:
|
|
201
|
+
if "enum" in sub_schema[key] and isinstance(sub_schema[key]["enum"], list):
|
|
202
|
+
sub_schema[key]["enum"] = list(
|
|
203
|
+
set(sub_schema[key]["enum"] + _list_of_array_elements)
|
|
204
|
+
)
|
|
205
|
+
else:
|
|
206
|
+
sub_schema[key]["enum"] = _list_of_array_elements
|
|
207
|
+
else:
|
|
208
|
+
for _, v in sub_schema.items():
|
|
209
|
+
if isinstance(v, dict):
|
|
210
|
+
recursive_search(v, key)
|
|
211
|
+
|
|
212
|
+
recursive_search(schema, key)
|
|
213
|
+
return schema
|
|
@@ -5,14 +5,13 @@ import os
|
|
|
5
5
|
import re
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
|
|
8
|
-
import jsonschema
|
|
9
8
|
import numpy as np
|
|
10
9
|
from astropy import units as u
|
|
11
10
|
from astropy.table import Column, Table, unique
|
|
12
11
|
from astropy.utils.diff import report_diff_values
|
|
13
12
|
|
|
14
13
|
import simtools.utils.general as gen
|
|
15
|
-
from simtools.data_model import
|
|
14
|
+
from simtools.data_model import schema
|
|
16
15
|
from simtools.utils import value_conversion
|
|
17
16
|
|
|
18
17
|
__all__ = ["DataValidator"]
|
|
@@ -57,7 +56,7 @@ class DataValidator:
|
|
|
57
56
|
self.data_table = data_table
|
|
58
57
|
self.check_exact_data_type = check_exact_data_type
|
|
59
58
|
|
|
60
|
-
def validate_and_transform(self, is_model_parameter=False):
|
|
59
|
+
def validate_and_transform(self, is_model_parameter=False, lists_as_strings=False):
|
|
61
60
|
"""
|
|
62
61
|
Validate data and data file.
|
|
63
62
|
|
|
@@ -65,6 +64,8 @@ class DataValidator:
|
|
|
65
64
|
----------
|
|
66
65
|
is_model_parameter: bool
|
|
67
66
|
This is a model parameter (add some data preparation)
|
|
67
|
+
lists_as_strings: bool
|
|
68
|
+
Convert lists to strings (as needed for model parameters)
|
|
68
69
|
|
|
69
70
|
Returns
|
|
70
71
|
-------
|
|
@@ -80,13 +81,9 @@ class DataValidator:
|
|
|
80
81
|
if self.data_file_name:
|
|
81
82
|
self.validate_data_file()
|
|
82
83
|
if isinstance(self.data_dict, dict):
|
|
83
|
-
|
|
84
|
-
self._prepare_model_parameter()
|
|
85
|
-
self._validate_data_dict()
|
|
86
|
-
return self.data_dict
|
|
84
|
+
return self._validate_data_dict(is_model_parameter, lists_as_strings)
|
|
87
85
|
if isinstance(self.data_table, Table):
|
|
88
|
-
self._validate_data_table()
|
|
89
|
-
return self.data_table
|
|
86
|
+
return self._validate_data_table()
|
|
90
87
|
self._logger.error("No data or data table to validate")
|
|
91
88
|
raise TypeError
|
|
92
89
|
|
|
@@ -108,28 +105,58 @@ class DataValidator:
|
|
|
108
105
|
|
|
109
106
|
def validate_parameter_and_file_name(self):
|
|
110
107
|
"""Validate that file name and key 'parameter_name' in data dict are the same."""
|
|
111
|
-
if self.data_dict.get("parameter")
|
|
108
|
+
if not str(Path(self.data_file_name).stem).startswith(self.data_dict.get("parameter")):
|
|
112
109
|
raise ValueError(
|
|
113
110
|
f"Parameter name in data dict {self.data_dict.get('parameter')} and "
|
|
114
111
|
f"file name {Path(self.data_file_name).stem} do not match."
|
|
115
112
|
)
|
|
116
113
|
|
|
117
|
-
|
|
114
|
+
@staticmethod
|
|
115
|
+
def validate_model_parameter(par_dict):
|
|
116
|
+
"""
|
|
117
|
+
Validate a simulation model parameter (static method).
|
|
118
|
+
|
|
119
|
+
Parameters
|
|
120
|
+
----------
|
|
121
|
+
par_dict: dict
|
|
122
|
+
Data dictionary
|
|
123
|
+
|
|
124
|
+
Returns
|
|
125
|
+
-------
|
|
126
|
+
dict
|
|
127
|
+
Validated data dictionary
|
|
128
|
+
"""
|
|
129
|
+
data_validator = DataValidator(
|
|
130
|
+
schema_file=schema.get_model_parameter_schema_file(f"{par_dict['parameter']}"),
|
|
131
|
+
data_dict=par_dict,
|
|
132
|
+
check_exact_data_type=False,
|
|
133
|
+
)
|
|
134
|
+
return data_validator.validate_and_transform(is_model_parameter=True)
|
|
135
|
+
|
|
136
|
+
def _validate_data_dict(self, is_model_parameter=False, lists_as_strings=False):
|
|
118
137
|
"""
|
|
119
138
|
Validate values in a dictionary.
|
|
120
139
|
|
|
121
140
|
Handles different types of naming in data dicts (using 'name' or 'parameter'
|
|
122
141
|
keys for name fields).
|
|
123
142
|
|
|
143
|
+
Parameters
|
|
144
|
+
----------
|
|
145
|
+
is_model_parameter: bool
|
|
146
|
+
This is a model parameter (add some data preparation)
|
|
147
|
+
lists_as_strings: bool
|
|
148
|
+
Convert lists to strings (as needed for model parameters)
|
|
149
|
+
|
|
124
150
|
Raises
|
|
125
151
|
------
|
|
126
152
|
KeyError
|
|
127
153
|
if data dict does not contain a 'name' or 'parameter' key.
|
|
128
154
|
|
|
129
155
|
"""
|
|
130
|
-
if
|
|
131
|
-
|
|
132
|
-
|
|
156
|
+
if is_model_parameter:
|
|
157
|
+
self._prepare_model_parameter()
|
|
158
|
+
|
|
159
|
+
self._data_description = self._read_validation_schema(self.schema_file_name)
|
|
133
160
|
|
|
134
161
|
value_as_list, unit_as_list = self._get_value_and_units_as_lists()
|
|
135
162
|
|
|
@@ -145,6 +172,11 @@ class DataValidator:
|
|
|
145
172
|
|
|
146
173
|
self._check_version_string(self.data_dict.get("version"))
|
|
147
174
|
|
|
175
|
+
if lists_as_strings:
|
|
176
|
+
self._convert_results_to_model_format()
|
|
177
|
+
|
|
178
|
+
return self.data_dict
|
|
179
|
+
|
|
148
180
|
def _validate_value_and_unit(self, value, unit, index):
|
|
149
181
|
"""
|
|
150
182
|
Validate value, unit, and perform type checking and conversions.
|
|
@@ -152,8 +184,9 @@ class DataValidator:
|
|
|
152
184
|
Take into account different data types and allow to use json_schema for testing.
|
|
153
185
|
"""
|
|
154
186
|
if self._get_data_description(index).get("type", None) == "dict":
|
|
155
|
-
|
|
156
|
-
self.data_dict["value"],
|
|
187
|
+
schema.validate_dict_using_schema(
|
|
188
|
+
data=self.data_dict["value"],
|
|
189
|
+
json_schema=self._get_data_description(index).get("json_schema"),
|
|
157
190
|
)
|
|
158
191
|
else:
|
|
159
192
|
self._check_data_type(np.array(value).dtype, index)
|
|
@@ -191,28 +224,13 @@ class DataValidator:
|
|
|
191
224
|
conversion_factor = [
|
|
192
225
|
1 if v is None else u.Unit(v).to(u.Unit(t)) for v, t in zip(unit, target_unit)
|
|
193
226
|
]
|
|
194
|
-
return [v * c for v, c in zip(value, conversion_factor)], target_unit
|
|
195
|
-
|
|
196
|
-
def _validate_data_dict_using_json_schema(self, data, json_schema):
|
|
197
|
-
"""
|
|
198
|
-
Validate a dictionary using a json schema.
|
|
199
|
-
|
|
200
|
-
Parameters
|
|
201
|
-
----------
|
|
202
|
-
data: dict
|
|
203
|
-
Data dictionary
|
|
204
|
-
json_schema: dict
|
|
205
|
-
JSON schema
|
|
206
|
-
"""
|
|
207
|
-
if json_schema is None:
|
|
208
|
-
self._logger.debug("Skipping validation of dict type")
|
|
209
|
-
return
|
|
210
|
-
self._logger.debug("Validation of dict type using JSON schema")
|
|
211
227
|
try:
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
228
|
+
return [
|
|
229
|
+
v * c if not isinstance(v, bool) and not isinstance(v, dict) else v
|
|
230
|
+
for v, c in zip(value, conversion_factor)
|
|
231
|
+
], target_unit
|
|
232
|
+
except TypeError:
|
|
233
|
+
return [None], target_unit
|
|
216
234
|
|
|
217
235
|
def _validate_data_table(self):
|
|
218
236
|
"""Validate tabulated data."""
|
|
@@ -228,6 +246,7 @@ class DataValidator:
|
|
|
228
246
|
self._validate_data_columns()
|
|
229
247
|
self._check_data_for_duplicates()
|
|
230
248
|
self._sort_data()
|
|
249
|
+
return self.data_table
|
|
231
250
|
|
|
232
251
|
def _validate_data_columns(self):
|
|
233
252
|
"""
|
|
@@ -649,7 +668,7 @@ class DataValidator:
|
|
|
649
668
|
|
|
650
669
|
return False
|
|
651
670
|
|
|
652
|
-
def _read_validation_schema(self, schema_file
|
|
671
|
+
def _read_validation_schema(self, schema_file):
|
|
653
672
|
"""
|
|
654
673
|
Read validation schema from file.
|
|
655
674
|
|
|
@@ -657,11 +676,6 @@ class DataValidator:
|
|
|
657
676
|
----------
|
|
658
677
|
schema_file: Path
|
|
659
678
|
Schema file describing input data.
|
|
660
|
-
If this is a directory, a filename of
|
|
661
|
-
'<par>.schema.yml' is assumed.
|
|
662
|
-
parameter: str
|
|
663
|
-
Parameter name of required schema
|
|
664
|
-
(if None, return first schema in file)
|
|
665
679
|
|
|
666
680
|
Returns
|
|
667
681
|
-------
|
|
@@ -672,17 +686,11 @@ class DataValidator:
|
|
|
672
686
|
------
|
|
673
687
|
KeyError
|
|
674
688
|
if 'data' can not be read from dict in schema file
|
|
675
|
-
|
|
676
689
|
"""
|
|
677
690
|
try:
|
|
678
|
-
if Path(schema_file).is_dir():
|
|
679
|
-
return gen.collect_data_from_file(
|
|
680
|
-
file_name=Path(schema_file) / (parameter + ".schema.yml"),
|
|
681
|
-
)["data"]
|
|
682
691
|
return gen.collect_data_from_file(file_name=schema_file)["data"]
|
|
683
|
-
except KeyError:
|
|
684
|
-
|
|
685
|
-
raise
|
|
692
|
+
except KeyError as exc:
|
|
693
|
+
raise KeyError(f"Error reading validation schema from {schema_file}") from exc
|
|
686
694
|
|
|
687
695
|
def _get_data_description(self, column_name=None, status_test=False):
|
|
688
696
|
"""
|
|
@@ -726,6 +734,8 @@ class DataValidator:
|
|
|
726
734
|
)
|
|
727
735
|
)
|
|
728
736
|
except IndexError as exc:
|
|
737
|
+
if len(self._data_description) == 1: # all columns are described by the same schema
|
|
738
|
+
return self._data_description[0]
|
|
729
739
|
self._logger.error(
|
|
730
740
|
f"Data column '{column_name}' not found in reference column definition"
|
|
731
741
|
)
|
|
@@ -771,6 +781,18 @@ class DataValidator:
|
|
|
771
781
|
if self.data_dict["unit"] is not None:
|
|
772
782
|
self.data_dict["unit"] = gen.convert_string_to_list(self.data_dict["unit"])
|
|
773
783
|
|
|
784
|
+
def _convert_results_to_model_format(self):
|
|
785
|
+
"""
|
|
786
|
+
Convert results to model format.
|
|
787
|
+
|
|
788
|
+
Convert lists to strings (as needed for model parameters).
|
|
789
|
+
"""
|
|
790
|
+
value = self.data_dict["value"]
|
|
791
|
+
if isinstance(value, list):
|
|
792
|
+
self.data_dict["value"] = gen.convert_list_to_string(value)
|
|
793
|
+
if isinstance(self.data_dict["unit"], list):
|
|
794
|
+
self.data_dict["unit"] = gen.convert_list_to_string(self.data_dict["unit"])
|
|
795
|
+
|
|
774
796
|
def _check_version_string(self, version):
|
|
775
797
|
"""
|
|
776
798
|
Check that version string follows semantic versioning.
|