gammasimtools 0.12.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/METADATA +1 -1
  2. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/RECORD +64 -77
  3. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/entry_points.txt +2 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/convert_all_model_parameters_from_simtel.py +77 -88
  7. simtools/applications/convert_geo_coordinates_of_array_elements.py +1 -1
  8. simtools/applications/db_get_parameter_from_db.py +52 -22
  9. simtools/applications/derive_photon_electron_spectrum.py +1 -1
  10. simtools/applications/docs_produce_array_element_report.py +1 -10
  11. simtools/applications/docs_produce_model_parameter_reports.py +4 -17
  12. simtools/applications/plot_tabular_data.py +14 -2
  13. simtools/applications/{production_derive_limits.py → production_derive_corsika_limits.py} +20 -8
  14. simtools/applications/production_extract_mc_event_data.py +125 -0
  15. simtools/applications/run_application.py +9 -10
  16. simtools/applications/submit_data_from_external.py +1 -1
  17. simtools/applications/submit_model_parameter_from_external.py +2 -1
  18. simtools/camera/single_photon_electron_spectrum.py +6 -2
  19. simtools/constants.py +7 -0
  20. simtools/data_model/metadata_collector.py +159 -61
  21. simtools/data_model/model_data_writer.py +11 -55
  22. simtools/data_model/schema.py +2 -1
  23. simtools/data_model/validate_data.py +5 -3
  24. simtools/db/db_handler.py +115 -31
  25. simtools/model/model_parameter.py +0 -31
  26. simtools/production_configuration/derive_corsika_limits.py +260 -0
  27. simtools/production_configuration/extract_mc_event_data.py +253 -0
  28. simtools/ray_tracing/mirror_panel_psf.py +1 -1
  29. simtools/reporting/docs_read_parameters.py +164 -91
  30. simtools/schemas/metadata.metaschema.yml +7 -6
  31. simtools/schemas/model_parameter.metaschema.yml +0 -4
  32. simtools/schemas/model_parameter_and_data_schema.metaschema.yml +13 -5
  33. simtools/schemas/model_parameters/array_coordinates.schema.yml +1 -1
  34. simtools/schemas/model_parameters/array_layouts.schema.yml +3 -0
  35. simtools/schemas/model_parameters/asum_shaping.schema.yml +1 -1
  36. simtools/schemas/model_parameters/atmospheric_profile.schema.yml +1 -1
  37. simtools/schemas/model_parameters/camera_config_file.schema.yml +1 -1
  38. simtools/schemas/model_parameters/camera_degraded_map.schema.yml +1 -1
  39. simtools/schemas/model_parameters/camera_filter.schema.yml +1 -1
  40. simtools/schemas/model_parameters/dsum_shaping.schema.yml +1 -1
  41. simtools/schemas/model_parameters/fadc_dev_pedestal.schema.yml +1 -1
  42. simtools/schemas/model_parameters/fadc_lg_dev_pedestal.schema.yml +1 -1
  43. simtools/schemas/model_parameters/fadc_lg_max_sum.schema.yml +3 -3
  44. simtools/schemas/model_parameters/fadc_max_sum.schema.yml +3 -3
  45. simtools/schemas/model_parameters/fake_mirror_list.schema.yml +1 -1
  46. simtools/schemas/model_parameters/lightguide_efficiency_vs_incidence_angle.schema.yml +1 -1
  47. simtools/schemas/model_parameters/lightguide_efficiency_vs_wavelength.schema.yml +1 -1
  48. simtools/schemas/model_parameters/mirror_list.schema.yml +1 -1
  49. simtools/schemas/model_parameters/nsb_reference_spectrum.schema.yml +1 -1
  50. simtools/schemas/model_parameters/nsb_skymap.schema.yml +1 -1
  51. simtools/schemas/model_parameters/primary_mirror_degraded_map.schema.yml +1 -1
  52. simtools/schemas/model_parameters/primary_mirror_segmentation.schema.yml +1 -1
  53. simtools/schemas/model_parameters/secondary_mirror_degraded_map.schema.yml +1 -1
  54. simtools/schemas/model_parameters/secondary_mirror_segmentation.schema.yml +1 -1
  55. simtools/schemas/plot_configuration.metaschema.yml +162 -0
  56. simtools/schemas/production_tables.schema.yml +1 -1
  57. simtools/simtel/simtel_config_reader.py +85 -34
  58. simtools/simtel/simtel_table_reader.py +4 -0
  59. simtools/utils/general.py +50 -9
  60. simtools/utils/names.py +7 -2
  61. simtools/visualization/plot_tables.py +25 -20
  62. simtools/visualization/visualize.py +71 -23
  63. simtools/_dev_version/__init__.py +0 -9
  64. simtools/applications/__init__.py +0 -0
  65. simtools/configuration/__init__.py +0 -0
  66. simtools/corsika/__init__.py +0 -0
  67. simtools/data_model/__init__.py +0 -0
  68. simtools/db/__init__.py +0 -0
  69. simtools/io_operations/__init__.py +0 -0
  70. simtools/job_execution/__init__.py +0 -0
  71. simtools/layout/__init__.py +0 -0
  72. simtools/model/__init__.py +0 -0
  73. simtools/production_configuration/limits_calculation.py +0 -202
  74. simtools/ray_tracing/__init__.py +0 -0
  75. simtools/runners/__init__.py +0 -0
  76. simtools/simtel/__init__.py +0 -0
  77. simtools/testing/__init__.py +0 -0
  78. simtools/utils/__init__.py +0 -0
  79. simtools/visualization/__init__.py +0 -0
  80. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/LICENSE +0 -0
  81. {gammasimtools-0.12.0.dist-info → gammasimtools-0.13.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,253 @@
1
+ """Generate a reduced dataset from given simulation event list and save the output to file."""
2
+
3
+ import logging
4
+
5
+ import numpy as np
6
+ import tables
7
+ from ctapipe.core import Container, Field
8
+ from ctapipe.io import HDF5TableWriter
9
+ from eventio import EventIOFile
10
+ from eventio.simtel import ArrayEvent, MCEvent, MCRunHeader, MCShower, TriggerInformation
11
+
12
+ DEFAULT_FILTERS = tables.Filters(complevel=5, complib="zlib", shuffle=True, bitshuffle=False)
13
+
14
+
15
+ class ReducedDatasetContainer(Container):
16
+ """Container for reduced dataset information."""
17
+
18
+ simulated = Field(None, "Simulated energy")
19
+ core_x = Field(None, "X-coordinate of the shower core")
20
+ core_y = Field(None, "Y-coordinate of the shower core")
21
+ shower_sim_azimuth = Field(None, "Simulated azimuth angle of the shower")
22
+ shower_sim_altitude = Field(None, "Simulated altitude angle of the shower")
23
+ array_altitudes = Field(None, "Altitudes for the array")
24
+ array_azimuths = Field(None, "Azimuths for the array")
25
+
26
+
27
+ class TriggeredShowerContainer(Container):
28
+ """Container for triggered shower information."""
29
+
30
+ shower_id_triggered = Field(None, "Triggered shower ID")
31
+ triggered_energies = Field(None, "Triggered energies")
32
+
33
+
34
+ class FileNamesContainer(Container):
35
+ """Container for file names."""
36
+
37
+ file_names = Field(None, "Input file names")
38
+
39
+
40
+ class MCEventExtractor:
41
+ """
42
+ Generate a reduced dataset from given simulation event list and save the output to file.
43
+
44
+ Attributes
45
+ ----------
46
+ input_files : list
47
+ List of input file paths to process.
48
+ output_file : str
49
+ Path to the output HDF5 file.
50
+ max_files : int, optional
51
+ Maximum number of files to process.
52
+ """
53
+
54
+ def __init__(self, input_files, output_file, max_files=100):
55
+ """
56
+ Initialize the MCEventExtractor with input files, output file, and max file limit.
57
+
58
+ Parameters
59
+ ----------
60
+ input_files : list
61
+ List of input file paths to process.
62
+ output_file : str
63
+ Path to the output HDF5 file.
64
+ max_files : int, optional
65
+ Maximum number of files to process.
66
+ """
67
+ self._logger = logging.getLogger(__name__)
68
+ self.input_files = input_files
69
+ self.output_file = output_file
70
+ self.max_files = max_files
71
+ self.shower = None
72
+ self.n_use = None
73
+ self.shower_id_offset = 0
74
+
75
+ def process_files(self):
76
+ """Process the input files and store them in an HDF5 file."""
77
+ if not self.input_files:
78
+ self._logger.warning("No input files provided.")
79
+ return
80
+
81
+ data_lists = self._initialize_data_lists()
82
+ self.shower_id_offset = 0
83
+ # Process the first file in write mode
84
+ self._logger.info(f"Processing file 1/{self.max_files}: {self.input_files[0]}")
85
+ self._process_file(self.input_files[0], data_lists, str(self.input_files[0]))
86
+ self._write_all_data(data_lists, mode="w")
87
+ self.shower_id_offset = len(data_lists["simulated"])
88
+ self._reset_data_lists(data_lists)
89
+
90
+ # Process remaining files in append mode
91
+ for i_file, file in enumerate(self.input_files[1 : self.max_files], start=2):
92
+ self._logger.info(f"Processing file {i_file}/{self.max_files}: {file}")
93
+ self._process_file(file, data_lists, str(file))
94
+ if len(data_lists["simulated"]) >= 1e7:
95
+ self._write_all_data(data_lists, mode="a")
96
+ self.shower_id_offset += len(data_lists["simulated"])
97
+ self._reset_data_lists(data_lists)
98
+
99
+ # Final write for any remaining data
100
+ self._write_all_data(data_lists, mode="a")
101
+
102
+ def _write_all_data(self, data_lists, mode):
103
+ """Write all data sections at once helper method."""
104
+ self._write_data(data_lists, mode=mode)
105
+ self._write_variable_length_data(data_lists["trigger_telescope_list_list"], mode="a")
106
+ self._write_file_names(data_lists["file_names"], mode="a")
107
+
108
+ def _write_file_names(self, file_names, mode="a"):
109
+ """Write file names to HDF5 file."""
110
+ print("file_names", file_names)
111
+ with HDF5TableWriter(
112
+ self.output_file, group_name="data", mode=mode, filters=DEFAULT_FILTERS
113
+ ) as writer:
114
+ file_names_container = FileNamesContainer()
115
+ for file_name in file_names:
116
+ file_names_container.file_names = file_name
117
+ writer.write(table_name="file_names", containers=[file_names_container])
118
+
119
+ def _write_variable_length_data(self, trigger_telescope_list_list, mode="a"):
120
+ """Write variable-length array data to HDF5 file."""
121
+ with tables.open_file(self.output_file, mode=mode) as f:
122
+ if "trigger_telescope_list_list" in f.root.data:
123
+ vlarray = f.root.data.trigger_telescope_list_list
124
+ else:
125
+ vlarray = f.create_vlarray(
126
+ f.root.data,
127
+ "trigger_telescope_list_list",
128
+ tables.Int16Atom(),
129
+ "List of triggered telescope IDs",
130
+ )
131
+
132
+ for item in trigger_telescope_list_list:
133
+ vlarray.append(item)
134
+
135
+ def _initialize_data_lists(self):
136
+ """Initialize data lists."""
137
+ return {
138
+ "simulated": [],
139
+ "shower_id_triggered": [],
140
+ "triggered_energies": [],
141
+ "core_x": [],
142
+ "core_y": [],
143
+ "trigger_telescope_list_list": [],
144
+ "file_names": [],
145
+ "shower_sim_azimuth": [],
146
+ "shower_sim_altitude": [],
147
+ "array_altitudes": [],
148
+ "array_azimuths": [],
149
+ }
150
+
151
+ def _process_file(self, file, data_lists, file_name):
152
+ """Process a single file and update data lists."""
153
+ with EventIOFile(file) as f:
154
+ array_altitude = None
155
+ array_azimuth = None
156
+ for eventio_object in f:
157
+ if isinstance(eventio_object, MCRunHeader):
158
+ self._process_mc_run_header(eventio_object, data_lists)
159
+ elif isinstance(eventio_object, MCShower):
160
+ self._process_mc_shower(
161
+ eventio_object, data_lists, array_altitude, array_azimuth
162
+ )
163
+ elif isinstance(eventio_object, MCEvent):
164
+ self._process_mc_event(eventio_object, data_lists)
165
+ elif isinstance(eventio_object, ArrayEvent):
166
+ self._process_array_event(eventio_object, data_lists)
167
+ data_lists["file_names"].extend([file_name])
168
+
169
+ def _process_mc_run_header(self, eventio_object, data_lists):
170
+ """Process MC run header and update data lists."""
171
+ mc_head = eventio_object.parse()
172
+ self.n_use = mc_head["n_use"] # reuse factor n_use needed to extend the values below
173
+ array_altitude = np.mean(mc_head["alt_range"])
174
+ array_azimuth = np.mean(mc_head["az_range"])
175
+ data_lists["array_altitudes"].extend(self.n_use * [array_altitude])
176
+ data_lists["array_azimuths"].extend(self.n_use * [array_azimuth])
177
+
178
+ def _process_mc_shower(self, eventio_object, data_lists, array_altitude, array_azimuth):
179
+ """Process MC shower and update data lists."""
180
+ self.shower = eventio_object.parse()
181
+ data_lists["simulated"].extend(self.n_use * [self.shower["energy"]])
182
+ data_lists["shower_sim_azimuth"].extend(self.n_use * [self.shower["azimuth"]])
183
+ data_lists["shower_sim_altitude"].extend(self.n_use * [self.shower["altitude"]])
184
+ data_lists["array_altitudes"].extend(self.n_use * [array_altitude])
185
+ data_lists["array_azimuths"].extend(self.n_use * [array_azimuth])
186
+
187
+ def _process_mc_event(self, eventio_object, data_lists):
188
+ """Process MC event and update data lists."""
189
+ event = eventio_object.parse()
190
+ data_lists["core_x"].append(event["xcore"])
191
+ data_lists["core_y"].append(event["ycore"])
192
+
193
+ def _process_array_event(self, eventio_object, data_lists):
194
+ """Process array event and update data lists."""
195
+ for i, obj in enumerate(eventio_object):
196
+ if i == 0 and isinstance(obj, TriggerInformation):
197
+ self._process_trigger_information(obj, data_lists)
198
+
199
+ def _process_trigger_information(self, trigger_info, data_lists):
200
+ """Process trigger information and update data lists."""
201
+ trigger_info = trigger_info.parse()
202
+ telescopes = trigger_info["telescopes_with_data"]
203
+ if len(telescopes) > 0:
204
+ data_lists["shower_id_triggered"].append(self.shower["shower"] + self.shower_id_offset)
205
+ data_lists["triggered_energies"].append(self.shower["energy"])
206
+ data_lists["trigger_telescope_list_list"].append(np.array(telescopes, dtype=np.int16))
207
+
208
+ def _write_data(self, data_lists, mode="a"):
209
+ """Write data to HDF5 file using HDF5TableWriter."""
210
+ with HDF5TableWriter(
211
+ self.output_file, group_name="data", mode=mode, filters=DEFAULT_FILTERS
212
+ ) as writer:
213
+ # Write reduced dataset container
214
+ reduced_container = ReducedDatasetContainer()
215
+ for i in range(len(data_lists["simulated"])):
216
+ reduced_container.simulated = data_lists["simulated"][i]
217
+ reduced_container.core_x = data_lists["core_x"][i]
218
+ reduced_container.core_y = data_lists["core_y"][i]
219
+ reduced_container.shower_sim_azimuth = data_lists["shower_sim_azimuth"][i]
220
+ reduced_container.shower_sim_altitude = data_lists["shower_sim_altitude"][i]
221
+ reduced_container.array_altitudes = data_lists["array_altitudes"][i]
222
+ reduced_container.array_azimuths = data_lists["array_azimuths"][i]
223
+ writer.write(table_name="reduced_data", containers=[reduced_container])
224
+
225
+ # Write triggered shower container
226
+ triggered_container = TriggeredShowerContainer()
227
+ for i in range(len(data_lists["shower_id_triggered"])):
228
+ triggered_container.shower_id_triggered = data_lists["shower_id_triggered"][i]
229
+ triggered_container.triggered_energies = data_lists["triggered_energies"][i]
230
+ writer.write(table_name="triggered_data", containers=[triggered_container])
231
+
232
+ def _reset_data_lists(self, data_lists):
233
+ """Reset data lists during batch processing."""
234
+ for key in data_lists:
235
+ data_lists[key] = []
236
+
237
+ def print_dataset_information(self):
238
+ """Print information about the datasets in the generated HDF5 file."""
239
+ try:
240
+ with tables.open_file(self.output_file, mode="r") as reader:
241
+ print("Datasets in file:")
242
+ for key in reader.root.data._v_children.keys(): # pylint: disable=protected-access
243
+ dset = reader.root.data._v_children[key] # pylint: disable=protected-access
244
+ print(f"- {key}: shape={dset.shape}, dtype={dset.dtype}")
245
+
246
+ # Print first 5 values each
247
+ print(f" First 5 values: {dset[:5]}")
248
+
249
+ # Print units if available
250
+ # units = dset.attrs.get("units", "N/A")
251
+ # print(f" Units: {units}")
252
+ except Exception as exc:
253
+ raise ValueError("An error occurred while reading the HDF5 file") from exc
@@ -295,6 +295,6 @@ class MirrorPanelPSF:
295
295
  )
296
296
  writer.ModelDataWriter.dump(
297
297
  args_dict=self.args_dict,
298
- metadata=MetadataCollector(args_dict=self.args_dict).get_top_level_metadata(),
298
+ metadata=MetadataCollector(args_dict=self.args_dict),
299
299
  product_data=result_table,
300
300
  )
@@ -4,9 +4,13 @@ r"""Class to read and manage relevant model parameters for a given telescope mod
4
4
 
5
5
  import logging
6
6
  import textwrap
7
+ from collections import defaultdict
7
8
  from itertools import groupby
8
9
  from pathlib import Path
9
10
 
11
+ import numpy as np
12
+
13
+ from simtools.db import db_handler
10
14
  from simtools.io_operations import io_handler
11
15
  from simtools.model.telescope_model import TelescopeModel
12
16
  from simtools.utils import names
@@ -15,13 +19,16 @@ logger = logging.getLogger()
15
19
 
16
20
 
17
21
  class ReadParameters:
18
- """Read and manage model parameter data."""
22
+ """Read and manage model parameter data for report generation."""
19
23
 
20
- def __init__(self, db_config, telescope_model, output_path):
21
- """Initialise class with a telescope model."""
24
+ def __init__(self, db_config, args, output_path):
25
+ """Initialise class."""
22
26
  self._logger = logging.getLogger(__name__)
27
+ self.db = db_handler.DatabaseHandler(mongo_db_config=db_config)
23
28
  self.db_config = db_config
24
- self.telescope_model = telescope_model
29
+ self.array_element = args.get("telescope")
30
+ self.site = args.get("site")
31
+ self.model_version = args.get("model_version", None)
25
32
  self.output_path = output_path
26
33
 
27
34
  def _convert_to_md(self, input_file):
@@ -86,9 +93,9 @@ class ReadParameters:
86
93
 
87
94
  def get_array_element_parameter_data(self, telescope_model, collection="telescopes"):
88
95
  """
89
- Get model parameter data for a given array element.
96
+ Get model parameter data and descriptions for a given array element.
90
97
 
91
- Currently only configures for telescope.
98
+ Currently only configured for telescope.
92
99
 
93
100
  Parameters
94
101
  ----------
@@ -104,6 +111,7 @@ class ReadParameters:
104
111
  site=telescope_model.site,
105
112
  array_element_name=telescope_model.name,
106
113
  collection=collection,
114
+ model_version=telescope_model.model_version,
107
115
  )
108
116
 
109
117
  telescope_model.export_model_files()
@@ -140,75 +148,143 @@ class ReadParameters:
140
148
 
141
149
  return data
142
150
 
143
- def _compare_parameter_across_versions(self, parameter_name):
151
+ def _format_parameter_value(self, value_data, unit, file_flag):
152
+ """Format parameter value based on type."""
153
+ if file_flag:
154
+ input_file_name = f"{self.output_path}/model/{value_data}"
155
+ output_file_name = self._convert_to_md(input_file_name)
156
+ return f"[{Path(value_data).name}]({output_file_name})".strip()
157
+ if isinstance(value_data, (str | int | float)):
158
+ return f"{value_data} {unit}".strip()
159
+ if len(value_data) > 5 and np.allclose(value_data, value_data[0]):
160
+ return f"all: {value_data[0]} {unit}".strip()
161
+ return (
162
+ ", ".join(f"{v} {u}" for v, u in zip(value_data, unit))
163
+ if isinstance(unit, list)
164
+ else ", ".join(f"{v} {unit}" for v in value_data)
165
+ ).strip()
166
+
167
+ def _group_model_versions_by_parameter_version(self, grouped_data):
168
+ """Group model versions by parameter version and track the parameter values."""
169
+ result = {}
170
+
171
+ for parameter_name, items in grouped_data.items():
172
+ version_grouped = defaultdict(
173
+ lambda: {"model_versions": [], "value": None, "file_flag": None}
174
+ )
175
+
176
+ for item in items:
177
+ param_version = item["parameter_version"]
178
+ version_grouped[param_version]["model_versions"].append(item["model_version"])
179
+
180
+ if version_grouped[param_version]["value"] is None:
181
+ version_grouped[param_version]["value"] = item["value"]
182
+ version_grouped[param_version]["file_flag"] = item["file_flag"]
183
+
184
+ result[parameter_name] = [
185
+ {
186
+ "value": data["value"],
187
+ "parameter_version": param_version,
188
+ "file_flag": data["file_flag"],
189
+ "model_version": ", ".join(data["model_versions"]),
190
+ }
191
+ for param_version, data in version_grouped.items()
192
+ ]
193
+
194
+ return result
195
+
196
+ def _compare_parameter_across_versions(self, all_param_data, all_parameter_names):
144
197
  """
145
198
  Compare a parameter's value across different model versions.
146
199
 
147
200
  Parameters
148
201
  ----------
149
- parameter_name : str
150
- The name of the parameter to compare.
202
+ all_param_data : dict
203
+ The dictionary containing parameter data for all versions.
204
+
205
+ all_parameter_names : list
206
+ The list of parameter names to compare across versions.
151
207
 
152
208
  Returns
153
209
  -------
154
210
  list
155
- A list of dictionaries containing model version, parameter value, and description.
211
+ A list of dictionaries containing model version, parameter value, description.
156
212
  """
157
- all_versions = self.telescope_model.db.get_model_versions()
158
- all_versions.reverse()
159
- comparison_data = []
160
-
161
- for model_version in all_versions:
162
- telescope_model = TelescopeModel(
163
- site=self.telescope_model.site,
164
- telescope_name=self.telescope_model.name,
165
- model_version=model_version,
166
- label="reports",
167
- mongo_db_config=self.db_config,
213
+ all_versions = self.db.get_model_versions()
214
+ all_versions.reverse() # latest first
215
+ grouped_data = defaultdict(list)
216
+
217
+ # Iterate over each model version
218
+ for version in all_versions:
219
+ Path(f"{self.output_path}/model").mkdir(parents=True, exist_ok=True)
220
+
221
+ self.db.export_model_files(
222
+ parameters=all_param_data.get(version), dest=f"{self.output_path}/model"
168
223
  )
169
224
 
170
- if not telescope_model.has_parameter(parameter_name):
171
- return comparison_data
172
-
173
- parameter_data = self.get_array_element_parameter_data(telescope_model)
174
- for param in parameter_data:
175
- if param[1] == parameter_name:
176
- comparison_data.append(
177
- {
178
- "model_version": model_version,
179
- "parameter_version": param[2],
180
- "value": param[3],
181
- "description": param[4],
182
- }
183
- )
184
- break
185
- return comparison_data
225
+ parameter_dict = all_param_data.get(version, {})
226
+
227
+ for parameter_name in filter(parameter_dict.__contains__, all_parameter_names):
228
+ parameter_data = parameter_dict.get(parameter_name)
229
+
230
+ # Skip if instrument doesn't match
231
+ if parameter_data.get("instrument") != self.array_element:
232
+ continue
233
+
234
+ unit = parameter_data.get("unit") or " "
235
+ value_data = parameter_data.get("value")
236
+
237
+ if not value_data:
238
+ continue
239
+
240
+ file_flag = parameter_data.get("file", False)
241
+ value = self._format_parameter_value(value_data, unit, file_flag)
242
+ parameter_version = parameter_data.get("parameter_version")
243
+ model_version = version
244
+
245
+ # Group the data by parameter version and store model versions as a list
246
+ grouped_data[parameter_name].append(
247
+ {
248
+ "value": value,
249
+ "parameter_version": parameter_version,
250
+ "model_version": model_version,
251
+ "file_flag": file_flag,
252
+ }
253
+ )
254
+
255
+ return self._group_model_versions_by_parameter_version(grouped_data)
186
256
 
187
257
  def produce_array_element_report(self):
188
258
  """
189
259
  Produce a markdown report of all model parameters per array element.
190
260
 
191
- Output
192
- ----------
193
- One markdown report of a given array element listing parameter values,
261
+ Outputs one markdown report of a given array element listing parameter values,
194
262
  versions, and descriptions.
195
263
  """
196
- output_filename = Path(self.output_path / (self.telescope_model.name + ".md"))
264
+ telescope_model = TelescopeModel(
265
+ site=self.site,
266
+ telescope_name=self.array_element,
267
+ model_version=self.model_version,
268
+ label="reports",
269
+ mongo_db_config=self.db_config,
270
+ )
271
+
272
+ output_filename = Path(self.output_path / (telescope_model.name + ".md"))
197
273
  output_filename.parent.mkdir(parents=True, exist_ok=True)
198
- data = self.get_array_element_parameter_data(self.telescope_model)
274
+ data = self.get_array_element_parameter_data(telescope_model)
199
275
  # Sort data by class to prepare for grouping
200
276
  if not isinstance(data, str):
201
277
  data.sort(key=lambda x: (x[0], x[1]), reverse=True)
202
278
 
203
279
  with output_filename.open("w", encoding="utf-8") as file:
204
280
  # Group by class and write sections
205
- file.write(f"# {self.telescope_model.name}\n")
281
+ file.write(f"# {telescope_model.name}\n")
206
282
 
207
- if self.telescope_model.name != self.telescope_model.design_model:
283
+ if telescope_model.name != telescope_model.design_model:
208
284
  file.write(
209
285
  "The design model can be found here: "
210
- f"[{self.telescope_model.design_model}]"
211
- f"({self.telescope_model.design_model}.md).\n"
286
+ f"[{telescope_model.design_model}]"
287
+ f"({telescope_model.design_model}.md).\n"
212
288
  )
213
289
  file.write("\n\n")
214
290
 
@@ -249,62 +325,59 @@ class ReadParameters:
249
325
  """
250
326
  Produce a markdown report per parameter for a given array element.
251
327
 
252
- Output
253
- ----------
254
- One markdown report per model parameter of a given array element comparing
328
+ Outputs one markdown report per model parameter of a given array element comparing
255
329
  values across model versions.
256
330
  """
257
331
  logger.info(
258
- f"Comparing parameters across model versions for Telescope: {self.telescope_model.name}"
259
- f" and Site: {self.telescope_model.site}."
332
+ f"Comparing parameters across model versions for Telescope: {self.array_element}"
333
+ f" and Site: {self.site}."
260
334
  )
261
335
  io_handler_instance = io_handler.IOHandler()
262
336
  output_path = io_handler_instance.get_output_directory(
263
- label="reports", sub_dir=f"parameters/{self.telescope_model.name}"
337
+ label="reports", sub_dir=f"parameters/{self.array_element}"
264
338
  )
265
339
 
266
- all_params = self.telescope_model.db.get_model_parameters(
267
- site=self.telescope_model.site,
268
- array_element_name=self.telescope_model.name,
269
- collection="telescopes",
340
+ all_parameter_names = names.model_parameters(None).keys()
341
+ all_parameter_data = self.db.get_model_parameters_for_all_model_versions(
342
+ site=self.site, array_element_name=self.array_element, collection="telescopes"
270
343
  )
271
344
 
272
- for parameter in all_params:
273
- comparison_data = []
274
- if all_params[parameter]["instrument"] == self.telescope_model.name:
275
- comparison_data = self._compare_parameter_across_versions(parameter)
276
- if comparison_data:
277
- output_filename = output_path / f"{parameter}.md"
278
- with output_filename.open("w", encoding="utf-8") as file:
279
- # Write header
280
- file.write(
281
- f"# {parameter}\n\n"
282
- f"**Telescope**: {self.telescope_model.name}\n\n"
283
- f"**Description**: {comparison_data[0]['description']}\n\n"
284
- "\n"
285
- )
345
+ comparison_data = self._compare_parameter_across_versions(
346
+ all_parameter_data, all_parameter_names
347
+ )
286
348
 
287
- # Write table header
349
+ for parameter in all_parameter_names:
350
+ parameter_data = comparison_data.get(parameter)
351
+ if not parameter_data:
352
+ continue
353
+
354
+ output_filename = output_path / f"{parameter}.md"
355
+ description = self.get_all_parameter_descriptions()[0].get(parameter)
356
+ with output_filename.open("w", encoding="utf-8") as file:
357
+ # Write header
358
+ file.write(
359
+ f"# {parameter}\n\n"
360
+ f"**Telescope**: {self.array_element}\n\n"
361
+ f"**Description**: {description}\n\n"
362
+ "\n"
363
+ )
364
+
365
+ # Write table header
366
+ file.write(
367
+ "| Parameter Version | Model Version(s) "
368
+ "| Value |\n"
369
+ "|------------------------|--------------------"
370
+ "|----------------------|\n"
371
+ )
372
+
373
+ # Write table rows
374
+ for item in comparison_data.get(parameter):
288
375
  file.write(
289
- "| Model Version | Parameter Version "
290
- "| Value |\n"
291
- "|--------------------|------------------------"
292
- "|----------------------|\n"
376
+ f"| {item['parameter_version']} |"
377
+ f" {item['model_version']} |"
378
+ f"{item['value'].replace('](', '](../')} |\n"
293
379
  )
294
380
 
295
- # Write table rows
296
- for item in comparison_data:
297
- file.write(
298
- f"| {item['model_version']} |"
299
- f" {item['parameter_version']} |"
300
- f"{item['value'].replace('](', '](../')} |\n"
301
- )
302
-
303
- file.write("\n")
304
- if isinstance(comparison_data[0]["value"], str) and comparison_data[0][
305
- "value"
306
- ].endswith(".md)"):
307
- file.write(
308
- f"![Parameter plot.](_images/"
309
- f"{self.telescope_model.name}_{parameter}.png)"
310
- )
381
+ file.write("\n")
382
+ if comparison_data.get(parameter)[0]["file_flag"]:
383
+ file.write(f"![Parameter plot.](_images/{self.array_element}_{parameter}.png)")
@@ -51,9 +51,7 @@ definitions:
51
51
  Describes the person or institution that is responsible for this
52
52
  data product.
53
53
  required:
54
- - EMAIL
55
54
  - NAME
56
- - ORGANIZATION
57
55
  type: object
58
56
  additionalProperties: false
59
57
  properties:
@@ -90,10 +88,8 @@ definitions:
90
88
  links to the data model definition.
91
89
  required:
92
90
  - DATA
93
- - DESCRIPTION
94
91
  - FORMAT
95
92
  - ID
96
- - VALID
97
93
  type: object
98
94
  additionalProperties: false
99
95
  properties:
@@ -125,7 +121,6 @@ definitions:
125
121
  title: Data
126
122
  required:
127
123
  - CATEGORY
128
- - ASSOCIATION
129
124
  - LEVEL
130
125
  - MODEL
131
126
  type: object
@@ -346,7 +341,6 @@ definitions:
346
341
  associated ID.
347
342
  required:
348
343
  - TYPE
349
- - ID
350
344
  type: object
351
345
  additionalProperties: false
352
346
  properties:
@@ -524,6 +518,13 @@ definitions:
524
518
  - type: string
525
519
  - type: "null"
526
520
  default: null
521
+ AUTHOR:
522
+ description: |-
523
+ Author of document.
524
+ anyOf:
525
+ - type: string
526
+ - type: "null"
527
+ default: null
527
528
  ID:
528
529
  description: |-
529
530
  Unique identifier of document.
@@ -205,9 +205,6 @@ definitions:
205
205
  _id:
206
206
  type: string
207
207
  description: "DB unique identifier"
208
- applicable:
209
- type: boolean
210
- description: "Model parameter to be used for this telescope and site"
211
208
  entry_date:
212
209
  type: string
213
210
  description: "Value entry data"
@@ -257,7 +254,6 @@ definitions:
257
254
  - type: "null"
258
255
  description: "Model version."
259
256
  required:
260
- - applicable
261
257
  - file
262
258
  - instrument
263
259
  - site