gammasimtools 0.11.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/METADATA +1 -1
  2. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/RECORD +66 -79
  3. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/entry_points.txt +2 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/convert_all_model_parameters_from_simtel.py +77 -88
  7. simtools/applications/convert_geo_coordinates_of_array_elements.py +1 -1
  8. simtools/applications/db_get_parameter_from_db.py +52 -22
  9. simtools/applications/derive_photon_electron_spectrum.py +1 -1
  10. simtools/applications/docs_produce_array_element_report.py +1 -10
  11. simtools/applications/docs_produce_model_parameter_reports.py +4 -17
  12. simtools/applications/plot_tabular_data.py +14 -2
  13. simtools/applications/{production_derive_limits.py → production_derive_corsika_limits.py} +20 -8
  14. simtools/applications/production_extract_mc_event_data.py +125 -0
  15. simtools/applications/run_application.py +9 -10
  16. simtools/applications/submit_data_from_external.py +1 -1
  17. simtools/applications/submit_model_parameter_from_external.py +2 -1
  18. simtools/camera/single_photon_electron_spectrum.py +6 -2
  19. simtools/configuration/commandline_parser.py +1 -1
  20. simtools/constants.py +7 -0
  21. simtools/data_model/metadata_collector.py +159 -61
  22. simtools/data_model/model_data_writer.py +11 -55
  23. simtools/data_model/schema.py +2 -1
  24. simtools/data_model/validate_data.py +5 -3
  25. simtools/db/db_handler.py +119 -33
  26. simtools/model/model_parameter.py +0 -31
  27. simtools/production_configuration/derive_corsika_limits.py +260 -0
  28. simtools/production_configuration/extract_mc_event_data.py +253 -0
  29. simtools/ray_tracing/mirror_panel_psf.py +1 -1
  30. simtools/reporting/docs_read_parameters.py +164 -91
  31. simtools/schemas/metadata.metaschema.yml +7 -6
  32. simtools/schemas/model_parameter.metaschema.yml +0 -4
  33. simtools/schemas/model_parameter_and_data_schema.metaschema.yml +13 -5
  34. simtools/schemas/model_parameters/array_coordinates.schema.yml +1 -1
  35. simtools/schemas/model_parameters/array_layouts.schema.yml +3 -0
  36. simtools/schemas/model_parameters/asum_shaping.schema.yml +1 -1
  37. simtools/schemas/model_parameters/atmospheric_profile.schema.yml +1 -1
  38. simtools/schemas/model_parameters/camera_config_file.schema.yml +1 -1
  39. simtools/schemas/model_parameters/camera_degraded_map.schema.yml +1 -1
  40. simtools/schemas/model_parameters/camera_filter.schema.yml +1 -1
  41. simtools/schemas/model_parameters/dsum_shaping.schema.yml +1 -1
  42. simtools/schemas/model_parameters/fadc_dev_pedestal.schema.yml +1 -1
  43. simtools/schemas/model_parameters/fadc_lg_dev_pedestal.schema.yml +1 -1
  44. simtools/schemas/model_parameters/fadc_lg_max_sum.schema.yml +3 -3
  45. simtools/schemas/model_parameters/fadc_max_sum.schema.yml +3 -3
  46. simtools/schemas/model_parameters/fake_mirror_list.schema.yml +1 -1
  47. simtools/schemas/model_parameters/lightguide_efficiency_vs_incidence_angle.schema.yml +1 -1
  48. simtools/schemas/model_parameters/lightguide_efficiency_vs_wavelength.schema.yml +1 -1
  49. simtools/schemas/model_parameters/mirror_list.schema.yml +1 -1
  50. simtools/schemas/model_parameters/nsb_reference_spectrum.schema.yml +1 -1
  51. simtools/schemas/model_parameters/nsb_skymap.schema.yml +1 -1
  52. simtools/schemas/model_parameters/primary_mirror_degraded_map.schema.yml +1 -1
  53. simtools/schemas/model_parameters/primary_mirror_segmentation.schema.yml +1 -1
  54. simtools/schemas/model_parameters/secondary_mirror_degraded_map.schema.yml +1 -1
  55. simtools/schemas/model_parameters/secondary_mirror_segmentation.schema.yml +1 -1
  56. simtools/schemas/plot_configuration.metaschema.yml +162 -0
  57. simtools/schemas/production_tables.schema.yml +1 -1
  58. simtools/simtel/simtel_config_reader.py +85 -34
  59. simtools/simtel/simtel_table_reader.py +4 -0
  60. simtools/utils/general.py +50 -9
  61. simtools/utils/names.py +7 -2
  62. simtools/utils/value_conversion.py +6 -4
  63. simtools/visualization/plot_tables.py +25 -20
  64. simtools/visualization/visualize.py +71 -23
  65. simtools/_dev_version/__init__.py +0 -9
  66. simtools/applications/__init__.py +0 -0
  67. simtools/configuration/__init__.py +0 -0
  68. simtools/corsika/__init__.py +0 -0
  69. simtools/data_model/__init__.py +0 -0
  70. simtools/db/__init__.py +0 -0
  71. simtools/io_operations/__init__.py +0 -0
  72. simtools/job_execution/__init__.py +0 -0
  73. simtools/layout/__init__.py +0 -0
  74. simtools/model/__init__.py +0 -0
  75. simtools/production_configuration/limits_calculation.py +0 -202
  76. simtools/ray_tracing/__init__.py +0 -0
  77. simtools/runners/__init__.py +0 -0
  78. simtools/simtel/__init__.py +0 -0
  79. simtools/testing/__init__.py +0 -0
  80. simtools/utils/__init__.py +0 -0
  81. simtools/visualization/__init__.py +0 -0
  82. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/LICENSE +0 -0
  83. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,125 @@
1
+ #!/usr/bin/python3
2
+
3
+ """
4
+ Reduces and compiles event data from multiple input files into a structured dataset with event info.
5
+
6
+ Command line arguments
7
+ ----------------------
8
+ prefix (str, required)
9
+ Path prefix for the input files.
10
+ output_file (str, required)
11
+ Path to save the output file.
12
+ max_files (int, optional, default=100)
13
+ Maximum number of files to process.
14
+ print_dataset_information (flag)
15
+ Print information about the datasets in the generated reduced event dataset.
16
+
17
+ Example
18
+ -------
19
+ Generate a reduced dataset from input files and save the result.
20
+
21
+ .. code-block:: console
22
+
23
+ simtools-production-extract-mc-event-data \
24
+ --prefix path/to/input_files/ \
25
+ --wildcard 'gamma_*dark*.simtel.zst' \
26
+ --output_file output_file.hdf5 \
27
+ --max_files 50 \
28
+ --print_dataset_information
29
+ """
30
+
31
+ import logging
32
+ from pathlib import Path
33
+
34
+ import simtools.utils.general as gen
35
+ from simtools.configuration import configurator
36
+ from simtools.io_operations import io_handler
37
+ from simtools.production_configuration.extract_mc_event_data import MCEventExtractor
38
+
39
+
40
+ def _parse(label, description):
41
+ """
42
+ Parse command line arguments.
43
+
44
+ Returns
45
+ -------
46
+ dict
47
+ Parsed command-line arguments.
48
+ """
49
+ config = configurator.Configurator(label=label, description=description)
50
+
51
+ config.parser.add_argument(
52
+ "--prefix", type=str, required=True, help="Prefix path for input files."
53
+ )
54
+ config.parser.add_argument(
55
+ "--wildcard",
56
+ type=str,
57
+ required=True,
58
+ help="Wildcard for querying the files in the directory (e.g., 'gamma_*dark*.simtel.zst')",
59
+ )
60
+ config.parser.add_argument("--output_file", type=str, required=True, help="Output filename.")
61
+ config.parser.add_argument(
62
+ "--max_files", type=int, default=100, help="Maximum number of files to process."
63
+ )
64
+
65
+ config.parser.add_argument(
66
+ "--print_dataset_information",
67
+ action="store_true",
68
+ help="Print information about the datasets in the generated reduced event dataset.",
69
+ )
70
+
71
+ return config.initialize(db_config=False)
72
+
73
+
74
+ def main():
75
+ """
76
+ Process event data files and store data in reduced dataset.
77
+
78
+ The reduced dataset contains the following information:
79
+ - simulated: List of simulated events.
80
+ - shower_id_triggered: List of triggered shower IDs
81
+ (as in the telescope definition file used for simulations).
82
+ - triggered_energies: List of energies for triggered events.
83
+ - num_triggered_telescopes: Number of triggered telescopes for each event.
84
+ - core_x: X-coordinate of the shower core (ground coordinates).
85
+ - core_y: Y-coordinate of the shower core (ground coordinates).
86
+ - trigger_telescope_list_list: List of lists containing triggered telescope IDs.
87
+ - file_names: List of input file names.
88
+ - shower_sim_azimuth: Simulated azimuth angle of the shower.
89
+ - shower_sim_altitude: Simulated altitude angle of the shower.
90
+ - array_altitudes: List of altitudes for the array.
91
+ - array_azimuths: List of azimuths for the array.
92
+ """
93
+ label = Path(__file__).stem
94
+
95
+ args_dict, _ = _parse(
96
+ label=label,
97
+ description=(
98
+ "Process files and store reduced dataset with event information, "
99
+ "array information and triggered telescopes."
100
+ ),
101
+ )
102
+
103
+ _logger = logging.getLogger()
104
+ _logger.setLevel(gen.get_log_level_from_user(args_dict["log_level"]))
105
+ _logger.info(f"Loading input files with prefix: {args_dict['prefix']}")
106
+
107
+ input_path = Path(args_dict["prefix"])
108
+ files = list(input_path.glob(args_dict["wildcard"]))
109
+ if not files:
110
+ _logger.warning("No matching input files found.")
111
+ return
112
+
113
+ output_path = io_handler.IOHandler().get_output_directory(label)
114
+ output_filepath = Path(output_path).joinpath(f"{args_dict['output_file']}")
115
+
116
+ output_filepath.parent.mkdir(parents=True, exist_ok=True)
117
+ generator = MCEventExtractor(files, output_filepath, args_dict["max_files"])
118
+ generator.process_files()
119
+ _logger.info(f"reduced dataset saved to: {output_filepath}")
120
+ if args_dict["print_dataset_information"]:
121
+ generator.print_dataset_information()
122
+
123
+
124
+ if __name__ == "__main__":
125
+ main()
@@ -49,7 +49,7 @@ def _parse(label, description, usage):
49
49
  required=True,
50
50
  default=None,
51
51
  )
52
- return config.initialize(db_config=False)
52
+ return config.initialize(db_config=True)
53
53
 
54
54
 
55
55
  def run_application(application, configuration, logger):
@@ -111,13 +111,12 @@ def read_application_configuration(configuration_file, logger):
111
111
  application_config = gen.collect_data_from_file(configuration_file).get("CTA_SIMPIPE")
112
112
  place_holder = "__SETTING_WORKFLOW__"
113
113
  workflow_dir, setting_workflow = get_subdirectory_name(configuration_file)
114
- output_path = str(workflow_dir).replace("input", "output") + setting_workflow
115
- logger.info(f"Setting workflow output path to {output_path}")
116
- log_file = (
117
- Path(application_config.get("LOG_PATH", "./").replace(place_holder, setting_workflow))
118
- / "simtools.log"
114
+ output_path = (
115
+ workflow_dir.with_name(workflow_dir.name.replace("input", "output")) / setting_workflow
119
116
  )
120
- log_file.parent.mkdir(parents=True, exist_ok=True)
117
+ output_path.mkdir(parents=True, exist_ok=True)
118
+ logger.info(f"Setting workflow output path to {output_path}")
119
+ log_file = output_path / "simtools.log"
121
120
  configurations = application_config.get("APPLICATIONS")
122
121
  for config in configurations:
123
122
  for key, value in config.get("CONFIGURATION", {}).items():
@@ -128,13 +127,13 @@ def read_application_configuration(configuration_file, logger):
128
127
  item.replace(place_holder, setting_workflow) for item in value
129
128
  ]
130
129
  config["CONFIGURATION"]["USE_PLAIN_OUTPUT_PATH"] = True
131
- config["OUTPUT_PATH"] = output_path
130
+ config["CONFIGURATION"]["OUTPUT_PATH"] = str(output_path)
132
131
 
133
132
  return configurations, log_file
134
133
 
135
134
 
136
135
  def main(): # noqa: D103
137
- args_dict, _ = _parse(
136
+ args_dict, db_config = _parse(
138
137
  Path(__file__).stem,
139
138
  description="Run simtools applications from configuration file.",
140
139
  usage="simtools-run-application --config_file config_file_name",
@@ -148,7 +147,7 @@ def main(): # noqa: D103
148
147
 
149
148
  with log_file.open("w", encoding="utf-8") as file:
150
149
  file.write("Running simtools applications\n")
151
- file.write(dependencies.get_version_string())
150
+ file.write(dependencies.get_version_string(db_config))
152
151
  for config in configurations:
153
152
  logger.info(f"Running application: {config.get('APPLICATION')}")
154
153
  config = gen.change_dict_keys_case(config, False)
@@ -112,7 +112,7 @@ def main(): # noqa: D103
112
112
 
113
113
  writer.ModelDataWriter.dump(
114
114
  args_dict=args_dict,
115
- metadata=_metadata.get_top_level_metadata() if _metadata else None,
115
+ metadata=_metadata,
116
116
  product_data=data_validator.validate_and_transform(),
117
117
  )
118
118
 
@@ -85,8 +85,9 @@ def _parse(label, description):
85
85
  )
86
86
  config.parser.add_argument(
87
87
  "--input_meta",
88
- help="meta data file associated to input data",
88
+ help="meta data file(s) associated to input data (wildcards or list of files allowed)",
89
89
  type=str,
90
+ nargs="+",
90
91
  required=False,
91
92
  )
92
93
  config.parser.add_argument(
@@ -10,7 +10,7 @@ from pathlib import Path
10
10
  from astropy.table import Table
11
11
 
12
12
  import simtools.data_model.model_data_writer as writer
13
- from simtools.constants import SCHEMA_PATH
13
+ from simtools.constants import MODEL_PARAMETER_SCHEMA_URL, SCHEMA_PATH
14
14
  from simtools.data_model import validate_data
15
15
  from simtools.data_model.metadata_collector import MetadataCollector
16
16
  from simtools.io_operations import io_handler
@@ -44,6 +44,10 @@ class SinglePhotonElectronSpectrum:
44
44
  )
45
45
  self.io_handler = io_handler.IOHandler()
46
46
  self.data = "" # Single photon electron spectrum data (as string)
47
+ self.args_dict["metadata_product_data_name"] = "single_pe_spectrum"
48
+ self.args_dict["metadata_product_data_url"] = (
49
+ MODEL_PARAMETER_SCHEMA_URL + "/pm_photoelectron_spectrum.schema.yml"
50
+ )
47
51
  self.metadata = MetadataCollector(args_dict=self.args_dict)
48
52
 
49
53
  def derive_single_pe_spectrum(self):
@@ -83,7 +87,7 @@ class SinglePhotonElectronSpectrum:
83
87
 
84
88
  writer.ModelDataWriter.dump(
85
89
  args_dict=self.args_dict,
86
- metadata=self.metadata.top_level_meta,
90
+ metadata=self.metadata,
87
91
  product_data=table,
88
92
  validate_schema_file=None,
89
93
  )
@@ -218,7 +218,7 @@ class CommandLineParser(argparse.ArgumentParser):
218
218
  help="database with user info (optional)",
219
219
  type=str,
220
220
  required=False,
221
- default="admin",
221
+ default=None,
222
222
  )
223
223
  _job_group.add_argument(
224
224
  "--db_simulation_model",
simtools/constants.py CHANGED
@@ -6,6 +6,8 @@ from importlib.resources import files
6
6
  SCHEMA_PATH = files("simtools") / "schemas"
7
7
  # Path to metadata jsonschema
8
8
  METADATA_JSON_SCHEMA = SCHEMA_PATH / "metadata.metaschema.yml"
9
+ # Path to plotting configuration json schema
10
+ PLOT_CONFIG_SCHEMA = SCHEMA_PATH / "plot_configuration.metaschema.yml"
9
11
  # Path to model parameter metaschema
10
12
  MODEL_PARAMETER_METASCHEMA = SCHEMA_PATH / "model_parameter.metaschema.yml"
11
13
  # Path to model parameter description metaschema
@@ -14,3 +16,8 @@ MODEL_PARAMETER_DESCRIPTION_METASCHEMA = (
14
16
  )
15
17
  # Path to model parameter schema files
16
18
  MODEL_PARAMETER_SCHEMA_PATH = SCHEMA_PATH / "model_parameters"
19
+ # URL to model parameter schema files
20
+ MODEL_PARAMETER_SCHEMA_URL = (
21
+ "https://raw.githubusercontent.com/gammasim/simtools/main/src/simtools/schemas/"
22
+ "/model_parameters"
23
+ )
@@ -11,6 +11,8 @@ import logging
11
11
  import uuid
12
12
  from pathlib import Path
13
13
 
14
+ import yaml
15
+
14
16
  import simtools.constants
15
17
  import simtools.utils.general as gen
16
18
  import simtools.version
@@ -68,9 +70,7 @@ class MetadataCollector:
68
70
  self.top_level_meta = gen.change_dict_keys_case(
69
71
  data_dict=metadata_model.get_default_metadata_dict(), lower_case=True
70
72
  )
71
- self.input_metadata = self._read_input_metadata_from_file(
72
- metadata_file_name=metadata_file_name
73
- )
73
+ self.input_metadata = self._read_input_metadata_from_file(metadata_file_name)
74
74
  self.collect_meta_data()
75
75
  if clean_meta:
76
76
  self.top_level_meta = self.clean_meta_data(self.top_level_meta)
@@ -103,6 +103,74 @@ class MetadataCollector:
103
103
  pass
104
104
  return self.top_level_meta
105
105
 
106
+ @staticmethod
107
+ def dump(args_dict, output_file, add_activity_name=False):
108
+ """
109
+ Write metadata to file (static method).
110
+
111
+ Parameters
112
+ ----------
113
+ args_dict: dict
114
+ Command line parameters
115
+ output_file: str or Path
116
+ Name of output file.
117
+ add_activity_name: bool
118
+ Add activity name to file name.
119
+ """
120
+ collector = MetadataCollector(args_dict)
121
+ collector.write(output_file, add_activity_name=add_activity_name)
122
+
123
+ def write(self, yml_file=None, keys_lower_case=False, add_activity_name=False):
124
+ """
125
+ Write toplevel metadata to file (yaml file format).
126
+
127
+ Parameters
128
+ ----------
129
+ metadata: dict
130
+ Metadata to be stored
131
+ yml_file: str
132
+ Name of output file.
133
+ keys_lower_case: bool
134
+ Write yaml keys in lower case.
135
+ add_activity_name: bool
136
+ Add activity name to file name.
137
+
138
+ Returns
139
+ -------
140
+ str
141
+ Name of output file
142
+
143
+ Raises
144
+ ------
145
+ FileNotFoundError
146
+ If yml_file not found.
147
+ TypeError
148
+ If yml_file is not defined.
149
+ """
150
+ metadata = self.get_top_level_metadata()
151
+ activity_name = metadata.get("cta", {}).get("activity", {}).get("name", "").rstrip(".")
152
+ suffix = f".{activity_name}.meta.yml" if add_activity_name else ".meta.yml"
153
+
154
+ if yml_file is None:
155
+ raise TypeError("No output file for metadata defined")
156
+
157
+ try:
158
+ yml_file = names.file_name_with_version(yml_file, suffix)
159
+ with open(yml_file, "w", encoding="UTF-8") as file:
160
+ yaml.safe_dump(
161
+ gen.change_dict_keys_case(
162
+ gen.remove_substring_recursively_from_dict(metadata, substring="\n"),
163
+ keys_lower_case,
164
+ ),
165
+ file,
166
+ sort_keys=False,
167
+ explicit_start=True,
168
+ )
169
+ self._logger.info(f"Writing metadata to {yml_file}")
170
+ return yml_file
171
+ except FileNotFoundError as exc:
172
+ raise FileNotFoundError(f"Error writing metadata to {yml_file}") from exc
173
+
106
174
  def get_data_model_schema_file_name(self):
107
175
  """
108
176
  Return data model schema file name.
@@ -136,12 +204,12 @@ class MetadataCollector:
136
204
  self._logger.debug(f"Schema file from data model name: {self.data_model_name}")
137
205
  return str(schema.get_model_parameter_schema_file(self.data_model_name))
138
206
 
139
- # from input metadata
207
+ # from first entry in input metadata (least preferred)
140
208
  try:
141
- url = self.input_metadata[self.observatory]["product"]["data"]["model"]["url"]
209
+ url = self.input_metadata[0][self.observatory]["product"]["data"]["model"]["url"]
142
210
  self._logger.debug(f"Schema file from input metadata: {url}")
143
211
  return url
144
- except KeyError:
212
+ except (KeyError, TypeError):
145
213
  pass
146
214
 
147
215
  self._logger.warning("No schema file found.")
@@ -170,7 +238,7 @@ class MetadataCollector:
170
238
  Parameters
171
239
  ----------
172
240
  from_input_meta: bool
173
- Get site from input metadata (default: False)
241
+ Get site from first entry of input metadata (default: False)
174
242
 
175
243
  Returns
176
244
  -------
@@ -182,11 +250,11 @@ class MetadataCollector:
182
250
  _site = (
183
251
  self.top_level_meta[self.observatory]["instrument"]["site"]
184
252
  if not from_input_meta
185
- else self.input_metadata[self.observatory]["instrument"]["site"]
253
+ else self.input_metadata[0][self.observatory]["instrument"]["site"]
186
254
  )
187
255
  if _site is not None:
188
256
  return names.validate_site_name(_site)
189
- except KeyError:
257
+ except (KeyError, TypeError):
190
258
  pass
191
259
  return None
192
260
 
@@ -202,7 +270,13 @@ class MetadataCollector:
202
270
  contact_dict["name"] = contact_dict.get("name") or self.args_dict.get("user_name")
203
271
  if contact_dict["name"] is None:
204
272
  self._logger.warning("No user name provided, take user info from system level.")
205
- contact_dict["name"] = getpass.getuser()
273
+ try:
274
+ contact_dict["name"] = getpass.getuser()
275
+ except Exception as exc: # pylint: disable=broad-except
276
+ contact_dict["name"] = "UNKNOWN_USER"
277
+ self._logger.warning(
278
+ f"Failed to get user name: {exc}, setting it to {contact_dict['name']} "
279
+ )
206
280
  meta_dict = {
207
281
  "email": "user_mail",
208
282
  "orcid": "user_orcid",
@@ -221,17 +295,28 @@ class MetadataCollector:
221
295
  Dictionary for context metadata fields.
222
296
 
223
297
  """
224
- try: # wide try..except as for some cases we expect that there is no product metadata
225
- reduced_product_meta = {
226
- key: value
227
- for key, value in self.input_metadata[self.observatory]["product"].items()
228
- if key in {"description", "id", "creation_time", "valid", "format", "filename"}
229
- }
230
- self._fill_context_sim_list(context_dict["associated_data"], reduced_product_meta)
231
- except (KeyError, TypeError):
232
- self._logger.debug("No input product metadata appended to associated data.")
298
+ input_metadata = (
299
+ self.input_metadata if isinstance(self.input_metadata, list) else [self.input_metadata]
300
+ )
233
301
 
234
- def _read_input_metadata_from_file(self, metadata_file_name=None):
302
+ for metadata in input_metadata:
303
+ try: # wide try..except as for some cases we expect that there is no product metadata
304
+ reduced_product_meta = {
305
+ key: value
306
+ for key, value in metadata[self.observatory]["product"].items()
307
+ if key in {"description", "id", "creation_time", "valid", "format", "filename"}
308
+ }
309
+ if metadata[self.observatory].get("activity", {}).get("name"):
310
+ reduced_product_meta["activity_name"] = metadata[self.observatory][
311
+ "activity"
312
+ ].get("name")
313
+ context_dict["associated_data"] = self._fill_context_sim_list(
314
+ context_dict["associated_data"], reduced_product_meta
315
+ )
316
+ except (KeyError, TypeError):
317
+ self._logger.debug("No input product metadata appended to associated data.")
318
+
319
+ def _read_input_metadata_from_file(self, metadata_file_name_expression=None):
235
320
  """
236
321
  Read and validate input metadata from file.
237
322
 
@@ -240,8 +325,8 @@ class MetadataCollector:
240
325
 
241
326
  Parameter
242
327
  ---------
243
- metadata_file_name: str or Path
244
- Name of metadata file.
328
+ metadata_file_name_expression: str or Path
329
+ Name of metadata file (regular expressions allowed).
245
330
 
246
331
  Returns
247
332
  -------
@@ -256,31 +341,32 @@ class MetadataCollector:
256
341
  if metadata does not exist
257
342
 
258
343
  """
259
- metadata_file_name = (
260
- self.args_dict.get("input_meta", None) or self.args_dict.get("input", None)
261
- if metadata_file_name is None
262
- else metadata_file_name
344
+ metadata_file_names = (
345
+ metadata_file_name_expression
346
+ or self.args_dict.get("input_meta")
347
+ or self.args_dict.get("input")
263
348
  )
264
349
 
265
- if metadata_file_name is None:
350
+ try:
351
+ metadata_files = gen.resolve_file_patterns(metadata_file_names)
352
+ except ValueError:
266
353
  self._logger.debug("No input metadata file defined.")
267
- return {}
268
-
269
- self._logger.debug("Reading meta data from %s", metadata_file_name)
270
- if Path(metadata_file_name).suffix in (".yaml", ".yml", ".json"):
271
- _input_metadata = self._read_input_metadata_from_yml_or_json(metadata_file_name)
272
- elif Path(metadata_file_name).suffix == ".ecsv":
273
- _input_metadata = self._read_input_metadata_from_ecsv(metadata_file_name)
274
- else:
275
- self._logger.error("Unknown metadata file format: %s", metadata_file_name)
276
- raise gen.InvalidConfigDataError
354
+ return None
355
+
356
+ metadata = []
357
+ for metadata_file in metadata_files:
358
+ self._logger.debug(f"Reading meta data from {metadata_file}")
359
+ if Path(metadata_file).suffix in (".yaml", ".yml", ".json"):
360
+ _input_metadata = self._read_input_metadata_from_yml_or_json(metadata_file)
361
+ elif Path(metadata_file).suffix == ".ecsv":
362
+ _input_metadata = self._read_input_metadata_from_ecsv(metadata_file)
363
+ else:
364
+ raise gen.InvalidConfigDataError(f"Unknown metadata file format: {metadata_file}")
277
365
 
278
- schema.validate_dict_using_schema(_input_metadata, schema_file=METADATA_JSON_SCHEMA)
366
+ schema.validate_dict_using_schema(_input_metadata, schema_file=METADATA_JSON_SCHEMA)
367
+ metadata.append(gen.change_dict_keys_case(_input_metadata, lower_case=True))
279
368
 
280
- return gen.change_dict_keys_case(
281
- self._process_metadata_from_file(_input_metadata),
282
- lower_case=True,
283
- )
369
+ return metadata
284
370
 
285
371
  def _read_input_metadata_from_ecsv(self, metadata_file_name):
286
372
  """Read input metadata from ecsv file."""
@@ -348,13 +434,23 @@ class MetadataCollector:
348
434
  pass
349
435
 
350
436
  # DATA:MODEL
351
- helper_dict = {"name": "name", "version": "version", "type": "meta_schema"}
352
- for key, value in helper_dict.items():
353
- product_dict["data"]["model"][key] = self.schema_dict.get(value, None)
354
- product_dict["data"]["model"]["url"] = self.schema_file
437
+ product_dict["data"]["model"]["name"] = (
438
+ self.schema_dict.get("name")
439
+ or self.args_dict.get("metadata_product_data_name")
440
+ or "undefined_model_name"
441
+ )
442
+ product_dict["data"]["model"]["version"] = self.schema_dict.get("version", "0.0.0")
443
+ product_dict["data"]["model"]["type"] = self.schema_dict.get("meta_schema", None)
444
+ product_dict["data"]["model"]["url"] = self.schema_file or self.args_dict.get(
445
+ "metadata_product_data_url"
446
+ )
355
447
 
356
- product_dict["format"] = self.args_dict.get("output_file_format", None)
357
- product_dict["filename"] = str(self.args_dict.get("output_file", None))
448
+ product_dict["filename"] = str(self.args_dict.get("output_file", ""))
449
+ product_dict["format"] = (
450
+ self.args_dict.get("output_file_format")
451
+ or Path(product_dict["filename"]).suffix.lstrip(".")
452
+ or None
453
+ )
358
454
 
359
455
  def _fill_instrument_meta(self, instrument_dict):
360
456
  """
@@ -369,14 +465,17 @@ class MetadataCollector:
369
465
  Dictionary for instrument metadata fields.
370
466
 
371
467
  """
372
- instrument_dict["site"] = self.args_dict.get("site", None)
373
- instrument_dict["ID"] = self.args_dict.get("instrument") or self.args_dict.get(
374
- "telescope", None
375
- )
468
+ instrument_dict["site"] = self.args_dict.get("site")
469
+ instrument_dict["ID"] = self.args_dict.get("instrument") or self.args_dict.get("telescope")
376
470
  if instrument_dict["ID"]:
377
471
  instrument_dict["class"] = names.get_collection_name_from_array_element_name(
378
472
  instrument_dict["ID"], False
379
473
  )
474
+ instrument_dict["type"] = (
475
+ names.get_array_element_type_from_name(instrument_dict["ID"])
476
+ if not instrument_dict.get("type")
477
+ else instrument_dict["type"]
478
+ )
380
479
 
381
480
  def _fill_process_meta(self, process_dict):
382
481
  """
@@ -460,15 +559,14 @@ class MetadataCollector:
460
559
  Updated meta list.
461
560
 
462
561
  """
463
- if len(new_entry_dict) == 0:
562
+ if not new_entry_dict:
464
563
  return []
465
- try:
466
- if self._all_values_none(meta_list[0]):
467
- meta_list[0] = new_entry_dict
468
- else:
469
- meta_list.append(new_entry_dict)
470
- except (TypeError, IndexError):
471
- meta_list = [new_entry_dict]
564
+ if meta_list is None or not meta_list:
565
+ return [new_entry_dict]
566
+ if self._all_values_none(meta_list[0]):
567
+ meta_list[0] = new_entry_dict
568
+ else:
569
+ meta_list.append(new_entry_dict)
472
570
  return meta_list
473
571
 
474
572
  def _process_metadata_from_file(self, meta_dict):
@@ -533,7 +631,7 @@ class MetadataCollector:
533
631
  """
534
632
  try:
535
633
  for document in _input_metadata["context"][key]:
536
- self._fill_context_sim_list(context_dict[key], document)
634
+ context_dict[key] = self._fill_context_sim_list(context_dict[key], document)
537
635
  except KeyError:
538
636
  pass
539
637