gammasimtools 0.23.0__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/METADATA +1 -1
  2. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/RECORD +89 -85
  3. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/entry_points.txt +1 -0
  4. simtools/_version.py +2 -2
  5. simtools/application_control.py +54 -4
  6. simtools/applications/convert_geo_coordinates_of_array_elements.py +1 -1
  7. simtools/applications/db_add_file_to_db.py +2 -2
  8. simtools/applications/db_add_simulation_model_from_repository_to_db.py +1 -1
  9. simtools/applications/db_add_value_from_json_to_db.py +2 -2
  10. simtools/applications/db_development_tools/write_array_elements_positions_to_repository.py +1 -1
  11. simtools/applications/db_generate_compound_indexes.py +1 -1
  12. simtools/applications/db_get_array_layouts_from_db.py +2 -2
  13. simtools/applications/db_get_file_from_db.py +1 -1
  14. simtools/applications/db_get_parameter_from_db.py +1 -1
  15. simtools/applications/db_inspect_databases.py +4 -2
  16. simtools/applications/db_upload_model_repository.py +1 -1
  17. simtools/applications/derive_ctao_array_layouts.py +1 -1
  18. simtools/applications/derive_psf_parameters.py +5 -0
  19. simtools/applications/derive_pulse_shape_parameters.py +195 -0
  20. simtools/applications/generate_array_config.py +1 -1
  21. simtools/applications/maintain_simulation_model_add_production.py +11 -21
  22. simtools/applications/plot_array_layout.py +63 -1
  23. simtools/applications/production_generate_grid.py +1 -1
  24. simtools/applications/simulate_flasher.py +3 -2
  25. simtools/applications/simulate_pedestals.py +1 -1
  26. simtools/applications/simulate_prod.py +8 -23
  27. simtools/applications/simulate_prod_htcondor_generator.py +7 -0
  28. simtools/applications/submit_array_layouts.py +7 -5
  29. simtools/applications/validate_camera_fov.py +1 -1
  30. simtools/applications/validate_cumulative_psf.py +2 -2
  31. simtools/applications/validate_file_using_schema.py +49 -123
  32. simtools/applications/validate_optics.py +1 -1
  33. simtools/configuration/commandline_parser.py +15 -15
  34. simtools/configuration/configurator.py +1 -1
  35. simtools/corsika/corsika_config.py +199 -91
  36. simtools/data_model/model_data_writer.py +15 -3
  37. simtools/data_model/schema.py +145 -36
  38. simtools/data_model/validate_data.py +82 -48
  39. simtools/db/db_handler.py +61 -294
  40. simtools/db/db_model_upload.py +3 -2
  41. simtools/db/mongo_db.py +626 -0
  42. simtools/dependencies.py +38 -17
  43. simtools/io/eventio_handler.py +128 -0
  44. simtools/job_execution/htcondor_script_generator.py +0 -2
  45. simtools/layout/array_layout.py +7 -7
  46. simtools/layout/array_layout_utils.py +4 -4
  47. simtools/model/array_model.py +72 -72
  48. simtools/model/calibration_model.py +12 -9
  49. simtools/model/model_parameter.py +196 -160
  50. simtools/model/model_repository.py +176 -39
  51. simtools/model/model_utils.py +3 -3
  52. simtools/model/site_model.py +59 -27
  53. simtools/model/telescope_model.py +21 -13
  54. simtools/ray_tracing/mirror_panel_psf.py +4 -4
  55. simtools/ray_tracing/psf_analysis.py +11 -8
  56. simtools/ray_tracing/psf_parameter_optimisation.py +823 -680
  57. simtools/reporting/docs_auto_report_generator.py +1 -1
  58. simtools/reporting/docs_read_parameters.py +72 -11
  59. simtools/runners/corsika_runner.py +12 -3
  60. simtools/runners/corsika_simtel_runner.py +6 -0
  61. simtools/runners/runner_services.py +17 -7
  62. simtools/runners/simtel_runner.py +12 -54
  63. simtools/schemas/model_parameters/flasher_pulse_exp_decay.schema.yml +2 -0
  64. simtools/schemas/model_parameters/flasher_pulse_shape.schema.yml +50 -0
  65. simtools/schemas/model_parameters/flasher_pulse_width.schema.yml +2 -0
  66. simtools/schemas/simulation_models_info.schema.yml +4 -1
  67. simtools/simtel/pulse_shapes.py +268 -0
  68. simtools/simtel/simtel_config_writer.py +179 -21
  69. simtools/simtel/simtel_io_event_writer.py +2 -2
  70. simtools/simtel/simulator_array.py +58 -12
  71. simtools/simtel/simulator_light_emission.py +45 -8
  72. simtools/simulator.py +361 -346
  73. simtools/testing/assertions.py +110 -10
  74. simtools/testing/configuration.py +1 -1
  75. simtools/testing/log_inspector.py +4 -1
  76. simtools/testing/sim_telarray_metadata.py +1 -1
  77. simtools/testing/validate_output.py +46 -15
  78. simtools/utils/names.py +2 -4
  79. simtools/utils/value_conversion.py +10 -5
  80. simtools/version.py +61 -0
  81. simtools/visualization/legend_handlers.py +14 -4
  82. simtools/visualization/plot_array_layout.py +229 -33
  83. simtools/visualization/plot_mirrors.py +837 -0
  84. simtools/visualization/plot_pixels.py +1 -1
  85. simtools/visualization/plot_psf.py +1 -1
  86. simtools/visualization/plot_tables.py +1 -1
  87. simtools/simtel/simtel_io_file_info.py +0 -62
  88. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/WHEEL +0 -0
  89. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/licenses/LICENSE +0 -0
  90. {gammasimtools-0.23.0.dist-info → gammasimtools-0.25.0.dist-info}/top_level.txt +0 -0
@@ -4,12 +4,9 @@ import logging
4
4
  from pathlib import Path
5
5
 
6
6
  import jsonschema
7
- from packaging.specifiers import SpecifierSet
8
- from packaging.version import Version
9
7
  from referencing import Registry, Resource
10
8
 
11
9
  import simtools.utils.general as gen
12
- from simtools import version
13
10
  from simtools.constants import (
14
11
  METADATA_JSON_SCHEMA,
15
12
  MODEL_PARAMETER_METASCHEMA,
@@ -17,8 +14,10 @@ from simtools.constants import (
17
14
  SCHEMA_PATH,
18
15
  )
19
16
  from simtools.data_model import format_checkers
17
+ from simtools.dependencies import get_software_version
20
18
  from simtools.io import ascii_handler
21
19
  from simtools.utils import names
20
+ from simtools.version import check_version_constraint
22
21
 
23
22
  _logger = logging.getLogger(__name__)
24
23
 
@@ -96,7 +95,7 @@ def get_model_parameter_schema_version(schema_version=None):
96
95
 
97
96
 
98
97
  def validate_dict_using_schema(
99
- data, schema_file=None, json_schema=None, ignore_software_version=False
98
+ data, schema_file=None, json_schema=None, ignore_software_version=False, offline=False
100
99
  ):
101
100
  """
102
101
  Validate a data dictionary against a schema.
@@ -124,7 +123,7 @@ def validate_dict_using_schema(
124
123
  if json_schema is None:
125
124
  json_schema = load_schema(schema_file, get_schema_version_from_data(data))
126
125
 
127
- _validate_deprecation_and_version(data, ignore_software_version)
126
+ validate_deprecation_and_version(data, ignore_software_version=ignore_software_version)
128
127
 
129
128
  validator = jsonschema.Draft6Validator(
130
129
  schema=json_schema,
@@ -137,17 +136,23 @@ def validate_dict_using_schema(
137
136
  except jsonschema.exceptions.ValidationError as exc:
138
137
  _logger.error(f"Validation failed using schema: {json_schema} for data: {data}")
139
138
  raise exc
140
- if (
141
- isinstance(data, dict)
142
- and data.get("meta_schema_url")
143
- and not gen.url_exists(data["meta_schema_url"])
144
- ):
145
- raise FileNotFoundError(f"Meta schema URL does not exist: {data['meta_schema_url']}")
139
+
140
+ if not offline:
141
+ _validate_meta_schema_url(data)
146
142
 
147
143
  _logger.debug(f"Successful validation of data using schema ({json_schema.get('name')})")
148
144
  return data
149
145
 
150
146
 
147
+ def _validate_meta_schema_url(data):
148
+ """Validate meta_schema_url if present in data."""
149
+ if not isinstance(data, dict):
150
+ return
151
+
152
+ if data.get("meta_schema_url") is not None and not gen.url_exists(data["meta_schema_url"]):
153
+ raise FileNotFoundError(f"Meta schema URL does not exist: {data['meta_schema_url']}")
154
+
155
+
151
156
  def _retrieve_yaml_schema_from_uri(uri):
152
157
  """Load schema from a file URI."""
153
158
  path = SCHEMA_PATH / Path(uri.removeprefix("file:/"))
@@ -307,9 +312,7 @@ def _add_array_elements(key, schema):
307
312
  return schema
308
313
 
309
314
 
310
- def _validate_deprecation_and_version(
311
- data, software_name="simtools", ignore_software_version=False
312
- ):
315
+ def validate_deprecation_and_version(data, software_name=None, ignore_software_version=False):
313
316
  """
314
317
  Check if data contains deprecated parameters or version mismatches.
315
318
 
@@ -317,39 +320,145 @@ def _validate_deprecation_and_version(
317
320
  ----------
318
321
  data: dict
319
322
  Data dictionary to check.
320
- software_name: str
321
- Name of the software to check version against.
323
+ software_name: str or None
324
+ Name of the software to check version against. If None, use complete list
322
325
  ignore_software_version: bool
323
326
  If True, ignore software version check.
324
327
  """
325
328
  if not isinstance(data, dict):
326
329
  return
327
330
 
331
+ data_name = data.get("name", "<unknown>")
332
+
328
333
  if data.get("deprecated", False):
329
334
  note = data.get("deprecation_note", "(no deprecation note provided)")
330
- _logger.warning(f"Data is deprecated. Note: {note}")
335
+ _logger.warning(f"Data for {data_name} is deprecated. Note: {note}")
331
336
 
332
- def check_version(sw):
333
- constraint = sw.get("version")
334
- if constraint is None:
335
- return
336
- constraint = constraint.strip()
337
- spec = SpecifierSet(constraint, prereleases=True)
338
- if Version(version.__version__) in spec:
337
+ for sw in data.get("simulation_software", []):
338
+ name, constraint = sw.get("name"), sw.get("version")
339
+ if not name or not constraint:
340
+ continue
341
+ if software_name is not None and name.lower() != software_name.lower():
342
+ continue
343
+
344
+ software_version = get_software_version(name)
345
+ if check_version_constraint(software_version, constraint):
339
346
  _logger.debug(
340
- f"Version {version.__version__} of {software_name} matches constraint {constraint}."
347
+ f"{data_name}: version {software_version} of {name} matches "
348
+ f"constraint {constraint}."
341
349
  )
350
+ continue
351
+
352
+ msg = f"{data_name}: version {software_version} of {name} does not match {constraint}."
353
+ if ignore_software_version:
354
+ _logger.warning(f"{msg}, but version check is ignored.")
342
355
  else:
343
- msg = (
344
- f"Version {version.__version__} of {software_name} "
345
- f"does not match constraint {constraint}."
346
- )
347
- if ignore_software_version:
348
- _logger.warning(f"{msg}, but version check is ignored.")
349
- return
350
356
  raise ValueError(msg)
351
357
 
352
- for sw in data.get("simulation_software", []):
353
- if sw.get("name") == software_name:
354
- check_version(sw)
355
- break
358
+
359
+ def validate_schema_from_files(
360
+ file_directory, file_name=None, schema_file=None, ignore_software_version=False
361
+ ):
362
+ """
363
+ Validate a schema file or several files in a directory.
364
+
365
+ Files to be validated are taken from file_directory and file_name pattern.
366
+ The schema is either given as command line argument, read from the meta_schema_url or from
367
+ the metadata section of the data dictionary.
368
+
369
+ Parameters
370
+ ----------
371
+ file_directory : str or Path, optional
372
+ Directory with files to be validated.
373
+ file_name : str or Path, optional
374
+ File name pattern to be validated.
375
+ schema_file : str, optional
376
+ Schema file name provided directly.
377
+ ignore_software_version : bool
378
+ If True, ignore software version check.
379
+ """
380
+ if file_directory and file_name:
381
+ file_list = sorted(Path(file_directory).rglob(file_name))
382
+ else:
383
+ file_list = [Path(file_name)] if file_name else []
384
+
385
+ for _file_name in file_list:
386
+ try:
387
+ data = ascii_handler.collect_data_from_file(file_name=_file_name)
388
+ except FileNotFoundError as exc:
389
+ raise FileNotFoundError(f"Error reading schema file from {_file_name}") from exc
390
+ data = data if isinstance(data, list) else [data]
391
+ try:
392
+ for data_dict in data:
393
+ validate_dict_using_schema(
394
+ data_dict,
395
+ _get_schema_file_name(schema_file, _file_name, data_dict),
396
+ ignore_software_version=ignore_software_version,
397
+ )
398
+ except Exception as exc:
399
+ raise ValueError(f"Validation of file {_file_name} failed") from exc
400
+ _logger.info(f"Successful validation of file {_file_name}")
401
+
402
+
403
+ def _get_schema_file_name(schema_file=None, file_name=None, data_dict=None):
404
+ """
405
+ Get schema file name from metadata, data dict, or from file.
406
+
407
+ Parameters
408
+ ----------
409
+ schema_file : str, optional
410
+ Schema file name provided directly.
411
+ file_name : str or Path, optional
412
+ File name to extract schema information from.
413
+ data_dict : dict, optional
414
+ Dictionary with metaschema information.
415
+
416
+ Returns
417
+ -------
418
+ str or None
419
+ Schema file name.
420
+ """
421
+ if schema_file is not None:
422
+ return schema_file
423
+
424
+ if data_dict and (url := data_dict.get("meta_schema_url")):
425
+ return url
426
+
427
+ if file_name:
428
+ return _extract_schema_from_file(file_name)
429
+
430
+ return None
431
+
432
+
433
+ def _extract_schema_url_from_metadata_dict(metadata, observatory="cta"):
434
+ """Extract schema URL from metadata dictionary."""
435
+ for key in (observatory, observatory.lower()):
436
+ url = metadata.get(key, {}).get("product", {}).get("data", {}).get("model", {}).get("url")
437
+ if url:
438
+ return url
439
+ return None
440
+
441
+
442
+ def _extract_schema_from_file(file_name, observatory="cta"):
443
+ """
444
+ Extract schema file name from a metadata or data file.
445
+
446
+ Parameters
447
+ ----------
448
+ file_name : str or Path
449
+ File name to extract schema information from.
450
+ observatory : str
451
+ Observatory name (default: "cta").
452
+
453
+ Returns
454
+ -------
455
+ str or None
456
+ Schema file name or None if not found.
457
+
458
+ """
459
+ try:
460
+ metadata = ascii_handler.collect_data_from_file(file_name=file_name, yaml_document=0)
461
+ except FileNotFoundError:
462
+ return None
463
+
464
+ return _extract_schema_url_from_metadata_dict(metadata, observatory)
@@ -14,6 +14,7 @@ import simtools.utils.general as gen
14
14
  from simtools.data_model import schema
15
15
  from simtools.io import ascii_handler
16
16
  from simtools.utils import names, value_conversion
17
+ from simtools.version import is_valid_semantic_version
17
18
 
18
19
 
19
20
  class DataValidator:
@@ -46,7 +47,7 @@ class DataValidator:
46
47
  check_exact_data_type=True,
47
48
  ):
48
49
  """Initialize validation class and read required reference data columns."""
49
- self._logger = logging.getLogger(__name__)
50
+ self.logger = logging.getLogger(__name__)
50
51
 
51
52
  self.data_file_name = data_file
52
53
  self.schema_file_name = schema_file
@@ -83,7 +84,7 @@ class DataValidator:
83
84
  return self._validate_data_dict(is_model_parameter, lists_as_strings)
84
85
  if isinstance(self.data_table, Table):
85
86
  return self._validate_data_table()
86
- self._logger.error("No data or data table to validate")
87
+ self.logger.error("No data or data table to validate")
87
88
  raise TypeError
88
89
 
89
90
  def validate_data_file(self, is_model_parameter=None):
@@ -100,10 +101,10 @@ class DataValidator:
100
101
  try:
101
102
  if Path(self.data_file_name).suffix in (".yml", ".yaml", ".json"):
102
103
  self.data_dict = ascii_handler.collect_data_from_file(self.data_file_name)
103
- self._logger.info(f"Validating data from: {self.data_file_name}")
104
+ self.logger.info(f"Validating data from: {self.data_file_name}")
104
105
  else:
105
106
  self.data_table = Table.read(self.data_file_name, guess=True, delimiter=r"\s")
106
- self._logger.info(f"Validating tabled data from: {self.data_file_name}")
107
+ self.logger.info(f"Validating tabled data from: {self.data_file_name}")
107
108
  except (AttributeError, TypeError):
108
109
  pass
109
110
  if is_model_parameter:
@@ -129,7 +130,7 @@ class DataValidator:
129
130
  raise ValueError(f"Mismatch: version '{param_version}' vs. file '{file_stem}'.")
130
131
 
131
132
  if param_version is None:
132
- self._logger.warning(f"File '{file_stem}' has no parameter version defined.")
133
+ self.logger.warning(f"File '{file_stem}' has no parameter version defined.")
133
134
 
134
135
  @staticmethod
135
136
  def validate_model_parameter(par_dict):
@@ -153,6 +154,46 @@ class DataValidator:
153
154
  )
154
155
  return data_validator.validate_and_transform(is_model_parameter=True)
155
156
 
157
+ @staticmethod
158
+ def validate_data_files(
159
+ file_directory=None,
160
+ file_name=None,
161
+ is_model_parameter=True,
162
+ check_exact_data_type=False,
163
+ schema_file=None,
164
+ ):
165
+ """
166
+ Validate data or model parameters in files in a directory or a single file.
167
+
168
+ Parameters
169
+ ----------
170
+ file_directory: str or Path
171
+ Directory with files to be validated.
172
+ file_name: str or Path
173
+ Name of the file to be validated.
174
+ is_model_parameter: bool
175
+ This is a model parameter (add some data preparation).
176
+ check_exact_data_type: bool
177
+ Require exact data type for validation.
178
+ """
179
+ if file_directory:
180
+ file_list = sorted(Path(file_directory).rglob("*.json"))
181
+ elif file_name:
182
+ file_list = [Path(file_name)]
183
+ else:
184
+ return
185
+
186
+ for data_file in file_list:
187
+ parameter_name = re.sub(r"-\d+\.\d+\.\d+", "", data_file.stem)
188
+ schema_path = schema_file or schema.get_model_parameter_schema_file(f"{parameter_name}")
189
+ data_validator = DataValidator(
190
+ schema_file=schema_path,
191
+ data_file=data_file,
192
+ check_exact_data_type=check_exact_data_type,
193
+ )
194
+ data_validator.validate_and_transform(is_model_parameter)
195
+ data_validator.logger.info(f"Validated data file {data_file} with schema {schema_path}")
196
+
156
197
  def _validate_data_dict(self, is_model_parameter=False, lists_as_strings=False):
157
198
  """
158
199
  Validate values in a dictionary.
@@ -208,8 +249,10 @@ class DataValidator:
208
249
  self.data_dict.get("instrument"), self.data_dict.get("site")
209
250
  )
210
251
 
211
- for version_string in ("version", "parameter_version", "model_version"):
212
- self._check_version_string(self.data_dict.get(version_string))
252
+ for version_type in ("version", "parameter_version", "model_version"):
253
+ version_string = self.data_dict.get(version_type, "0.0.0")
254
+ if not is_valid_semantic_version(version_string):
255
+ raise ValueError(f"Invalid version string '{version_string}'")
213
256
 
214
257
  if lists_as_strings:
215
258
  self._convert_results_to_model_format()
@@ -278,7 +321,7 @@ class DataValidator:
278
321
  "table_columns", None
279
322
  )
280
323
  except IndexError:
281
- self._logger.error(f"Error reading validation schema from {self.schema_file_name}")
324
+ self.logger.error(f"Error reading validation schema from {self.schema_file_name}")
282
325
  raise
283
326
 
284
327
  if self._data_description is not None:
@@ -327,7 +370,7 @@ class DataValidator:
327
370
  for entry in self._data_description:
328
371
  if entry.get("required", False):
329
372
  if entry["name"] in self.data_table.columns:
330
- self._logger.debug(f"Found required data column {entry['name']}")
373
+ self.logger.debug(f"Found required data column {entry['name']}")
331
374
  else:
332
375
  raise KeyError(f"Missing required column {entry['name']}")
333
376
 
@@ -353,18 +396,18 @@ class DataValidator:
353
396
  _columns_by_which_to_reverse_sort.append(entry["name"])
354
397
 
355
398
  if len(_columns_by_which_to_sort) > 0:
356
- self._logger.debug(f"Sorting data columns: {_columns_by_which_to_sort}")
399
+ self.logger.debug(f"Sorting data columns: {_columns_by_which_to_sort}")
357
400
  try:
358
401
  self.data_table.sort(_columns_by_which_to_sort)
359
402
  except AttributeError:
360
- self._logger.error("No data table defined for sorting")
403
+ self.logger.error("No data table defined for sorting")
361
404
  raise
362
405
  elif len(_columns_by_which_to_reverse_sort) > 0:
363
- self._logger.debug(f"Reverse sorting data columns: {_columns_by_which_to_reverse_sort}")
406
+ self.logger.debug(f"Reverse sorting data columns: {_columns_by_which_to_reverse_sort}")
364
407
  try:
365
408
  self.data_table.sort(_columns_by_which_to_reverse_sort, reverse=True)
366
409
  except AttributeError:
367
- self._logger.error("No data table defined for reverse sorting")
410
+ self.logger.error("No data table defined for reverse sorting")
368
411
  raise
369
412
 
370
413
  def _check_data_for_duplicates(self):
@@ -379,7 +422,7 @@ class DataValidator:
379
422
  """
380
423
  _column_with_unique_requirement = self._get_unique_column_requirement()
381
424
  if len(_column_with_unique_requirement) == 0:
382
- self._logger.debug("No data columns with unique value requirement")
425
+ self.logger.debug("No data columns with unique value requirement")
383
426
  return
384
427
  _data_table_unique_for_key_column = unique(
385
428
  self.data_table, keys=_column_with_unique_requirement
@@ -412,10 +455,10 @@ class DataValidator:
412
455
 
413
456
  for entry in self._data_description:
414
457
  if "input_processing" in entry and "remove_duplicates" in entry["input_processing"]:
415
- self._logger.debug(f"Removing duplicates for column {entry['name']}")
458
+ self.logger.debug(f"Removing duplicates for column {entry['name']}")
416
459
  _unique_required_column.append(entry["name"])
417
460
 
418
- self._logger.debug(f"Unique required columns: {_unique_required_column}")
461
+ self.logger.debug(f"Unique required columns: {_unique_required_column}")
419
462
  return _unique_required_column
420
463
 
421
464
  def _get_reference_unit(self, column_name):
@@ -470,7 +513,7 @@ class DataValidator:
470
513
  dtype=dtype,
471
514
  allow_subtypes=(not self.check_exact_data_type),
472
515
  ):
473
- self._logger.error(
516
+ self.logger.error(
474
517
  f"Invalid data type in column '{column_name}'. "
475
518
  f"Expected type '{reference_dtype}', found '{dtype}' "
476
519
  f"(exact type: {self.check_exact_data_type})"
@@ -505,9 +548,9 @@ class DataValidator:
505
548
  data = np.array(data)
506
549
 
507
550
  if np.isnan(data).any():
508
- self._logger.info(f"Column {col_name} contains NaN.")
551
+ self.logger.info(f"Column {col_name} contains NaN.")
509
552
  if np.isinf(data).any():
510
- self._logger.info(f"Column {col_name} contains infinite value.")
553
+ self.logger.info(f"Column {col_name} contains infinite value.")
511
554
 
512
555
  entry = self._get_data_description(col_name)
513
556
  if "allow_nan" in entry.get("input_processing", {}):
@@ -593,7 +636,7 @@ class DataValidator:
593
636
  # ensure that the data type is preserved (e.g., integers)
594
637
  return (type(data)(u.Unit(column_unit).to(reference_unit) * data), reference_unit)
595
638
  except (u.core.UnitConversionError, ValueError) as exc:
596
- self._logger.error(
639
+ self.logger.error(
597
640
  f"Invalid unit in data column '{col_name}'. "
598
641
  f"Expected type '{reference_unit}', found '{column_unit}'"
599
642
  )
@@ -696,9 +739,9 @@ class DataValidator:
696
739
  try:
697
740
  col_index = int(col_name)
698
741
  if col_index < max_logs:
699
- self._logger.debug(message)
742
+ self.logger.debug(message)
700
743
  except (ValueError, TypeError):
701
- self._logger.debug(message)
744
+ self.logger.debug(message)
702
745
 
703
746
  @staticmethod
704
747
  def _interval_check(data, axis_range, range_type):
@@ -817,7 +860,7 @@ class DataValidator:
817
860
  except IndexError as exc:
818
861
  if len(self._data_description) == 1: # all columns are described by the same schema
819
862
  return self._data_description[0]
820
- self._logger.error(
863
+ self.logger.error(
821
864
  f"Data column '{column_name}' not found in reference column definition"
822
865
  )
823
866
  raise exc
@@ -835,7 +878,7 @@ class DataValidator:
835
878
  try:
836
879
  return _entry[_index]
837
880
  except IndexError:
838
- self._logger.error(
881
+ self.logger.error(
839
882
  f"Data column '{column_name}' not found in reference column definition"
840
883
  )
841
884
  raise
@@ -868,42 +911,33 @@ class DataValidator:
868
911
  if isinstance(self.data_dict["unit"], list):
869
912
  self.data_dict["unit"] = gen.convert_list_to_string(self.data_dict["unit"])
870
913
 
871
- def _check_version_string(self, version):
872
- """
873
- Check that version string follows semantic versioning.
874
-
875
- Parameters
876
- ----------
877
- version: str
878
- version string
879
-
880
- Raises
881
- ------
882
- ValueError
883
- if version string does not follow semantic versioning
884
-
885
- """
886
- if version is None:
887
- return
888
- semver_regex = r"^\d+\.\d+\.\d+(-[0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$"
889
- if not re.match(semver_regex, version):
890
- raise ValueError(f"Invalid version string '{version}'")
891
- self._logger.debug(f"Valid version string '{version}'")
892
-
893
914
  def _check_site_and_array_element_consistency(self, instrument, site):
894
915
  """
895
916
  Check that site and array element names are consistent.
896
917
 
897
918
  An example for an inconsistency is 'LSTN' at site 'South'
898
919
  """
899
- if not all([instrument, site]) or "OBS" in instrument:
920
+ if not (instrument and site):
921
+ return
922
+
923
+ instruments = [instrument] if isinstance(instrument, str) else instrument
924
+ if any(inst.startswith("OBS") for inst in instruments):
900
925
  return
901
926
 
902
927
  def to_sorted_list(value):
903
928
  """Return value as sorted list."""
904
929
  return [value] if isinstance(value, str) else sorted(value)
905
930
 
906
- instrument_site = to_sorted_list(names.get_site_from_array_element_name(instrument))
931
+ instrument_sites = []
932
+ for inst in instruments:
933
+ instrument_sites.append(names.get_site_from_array_element_name(inst))
934
+ # names.get_site_from_array_element_name might return a list
935
+ flat_sites = [
936
+ s
937
+ for sublist in instrument_sites
938
+ for s in (sublist if isinstance(sublist, list) else [sublist])
939
+ ]
940
+ instrument_site = to_sorted_list(set(flat_sites))
907
941
  site = to_sorted_list(site)
908
942
 
909
943
  if instrument_site != site: