gammasimtools 0.11.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/METADATA +1 -1
  2. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/RECORD +66 -79
  3. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/entry_points.txt +2 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/convert_all_model_parameters_from_simtel.py +77 -88
  7. simtools/applications/convert_geo_coordinates_of_array_elements.py +1 -1
  8. simtools/applications/db_get_parameter_from_db.py +52 -22
  9. simtools/applications/derive_photon_electron_spectrum.py +1 -1
  10. simtools/applications/docs_produce_array_element_report.py +1 -10
  11. simtools/applications/docs_produce_model_parameter_reports.py +4 -17
  12. simtools/applications/plot_tabular_data.py +14 -2
  13. simtools/applications/{production_derive_limits.py → production_derive_corsika_limits.py} +20 -8
  14. simtools/applications/production_extract_mc_event_data.py +125 -0
  15. simtools/applications/run_application.py +9 -10
  16. simtools/applications/submit_data_from_external.py +1 -1
  17. simtools/applications/submit_model_parameter_from_external.py +2 -1
  18. simtools/camera/single_photon_electron_spectrum.py +6 -2
  19. simtools/configuration/commandline_parser.py +1 -1
  20. simtools/constants.py +7 -0
  21. simtools/data_model/metadata_collector.py +159 -61
  22. simtools/data_model/model_data_writer.py +11 -55
  23. simtools/data_model/schema.py +2 -1
  24. simtools/data_model/validate_data.py +5 -3
  25. simtools/db/db_handler.py +119 -33
  26. simtools/model/model_parameter.py +0 -31
  27. simtools/production_configuration/derive_corsika_limits.py +260 -0
  28. simtools/production_configuration/extract_mc_event_data.py +253 -0
  29. simtools/ray_tracing/mirror_panel_psf.py +1 -1
  30. simtools/reporting/docs_read_parameters.py +164 -91
  31. simtools/schemas/metadata.metaschema.yml +7 -6
  32. simtools/schemas/model_parameter.metaschema.yml +0 -4
  33. simtools/schemas/model_parameter_and_data_schema.metaschema.yml +13 -5
  34. simtools/schemas/model_parameters/array_coordinates.schema.yml +1 -1
  35. simtools/schemas/model_parameters/array_layouts.schema.yml +3 -0
  36. simtools/schemas/model_parameters/asum_shaping.schema.yml +1 -1
  37. simtools/schemas/model_parameters/atmospheric_profile.schema.yml +1 -1
  38. simtools/schemas/model_parameters/camera_config_file.schema.yml +1 -1
  39. simtools/schemas/model_parameters/camera_degraded_map.schema.yml +1 -1
  40. simtools/schemas/model_parameters/camera_filter.schema.yml +1 -1
  41. simtools/schemas/model_parameters/dsum_shaping.schema.yml +1 -1
  42. simtools/schemas/model_parameters/fadc_dev_pedestal.schema.yml +1 -1
  43. simtools/schemas/model_parameters/fadc_lg_dev_pedestal.schema.yml +1 -1
  44. simtools/schemas/model_parameters/fadc_lg_max_sum.schema.yml +3 -3
  45. simtools/schemas/model_parameters/fadc_max_sum.schema.yml +3 -3
  46. simtools/schemas/model_parameters/fake_mirror_list.schema.yml +1 -1
  47. simtools/schemas/model_parameters/lightguide_efficiency_vs_incidence_angle.schema.yml +1 -1
  48. simtools/schemas/model_parameters/lightguide_efficiency_vs_wavelength.schema.yml +1 -1
  49. simtools/schemas/model_parameters/mirror_list.schema.yml +1 -1
  50. simtools/schemas/model_parameters/nsb_reference_spectrum.schema.yml +1 -1
  51. simtools/schemas/model_parameters/nsb_skymap.schema.yml +1 -1
  52. simtools/schemas/model_parameters/primary_mirror_degraded_map.schema.yml +1 -1
  53. simtools/schemas/model_parameters/primary_mirror_segmentation.schema.yml +1 -1
  54. simtools/schemas/model_parameters/secondary_mirror_degraded_map.schema.yml +1 -1
  55. simtools/schemas/model_parameters/secondary_mirror_segmentation.schema.yml +1 -1
  56. simtools/schemas/plot_configuration.metaschema.yml +162 -0
  57. simtools/schemas/production_tables.schema.yml +1 -1
  58. simtools/simtel/simtel_config_reader.py +85 -34
  59. simtools/simtel/simtel_table_reader.py +4 -0
  60. simtools/utils/general.py +50 -9
  61. simtools/utils/names.py +7 -2
  62. simtools/utils/value_conversion.py +6 -4
  63. simtools/visualization/plot_tables.py +25 -20
  64. simtools/visualization/visualize.py +71 -23
  65. simtools/_dev_version/__init__.py +0 -9
  66. simtools/applications/__init__.py +0 -0
  67. simtools/configuration/__init__.py +0 -0
  68. simtools/corsika/__init__.py +0 -0
  69. simtools/data_model/__init__.py +0 -0
  70. simtools/db/__init__.py +0 -0
  71. simtools/io_operations/__init__.py +0 -0
  72. simtools/job_execution/__init__.py +0 -0
  73. simtools/layout/__init__.py +0 -0
  74. simtools/model/__init__.py +0 -0
  75. simtools/production_configuration/limits_calculation.py +0 -202
  76. simtools/ray_tracing/__init__.py +0 -0
  77. simtools/runners/__init__.py +0 -0
  78. simtools/simtel/__init__.py +0 -0
  79. simtools/testing/__init__.py +0 -0
  80. simtools/utils/__init__.py +0 -0
  81. simtools/visualization/__init__.py +0 -0
  82. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/LICENSE +0 -0
  83. {gammasimtools-0.11.0.dist-info → gammasimtools-0.13.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,260 @@
1
+ """Calculate the thresholds for energy, radial distance, and viewcone."""
2
+
3
+ import astropy.units as u
4
+ import matplotlib.pyplot as plt
5
+ import numpy as np
6
+ import tables
7
+ from astropy.coordinates import AltAz
8
+ from ctapipe.coordinates import GroundFrame, TiltedGroundFrame
9
+
10
+
11
+ class LimitCalculator:
12
+ """
13
+ Compute thresholds/limits for energy, radial distance, and viewcone.
14
+
15
+ Event data is read from the reduced MC event data file.
16
+
17
+ Parameters
18
+ ----------
19
+ event_data_file : str
20
+ Path to the HDF5 file containing the event data.
21
+ telescope_list : list, optional
22
+ List of telescope IDs to filter the events (default is None).
23
+ """
24
+
25
+ def __init__(self, event_data_file, telescope_list=None):
26
+ """
27
+ Initialize the LimitCalculator with the given event data file.
28
+
29
+ Parameters
30
+ ----------
31
+ event_data_file : str
32
+ Path to the reduced MC event data file.
33
+ telescope_list : list, optional
34
+ List of telescope IDs to filter the events (default is None).
35
+ """
36
+ self.event_data_file = event_data_file
37
+ self.telescope_list = telescope_list
38
+ self.event_x_core = None
39
+ self.event_y_core = None
40
+ self.simulated = None
41
+ self.shower_id_triggered = None
42
+ self.list_of_files = None
43
+ self.shower_sim_azimuth = None
44
+ self.shower_sim_altitude = None
45
+ self.array_azimuth = None
46
+ self.array_altitude = None
47
+ self.trigger_telescope_list_list = None
48
+ self.units = {}
49
+ self._read_event_data()
50
+
51
+ def _read_event_data(self):
52
+ """Read the event data from the reduced MC event data file."""
53
+ with tables.open_file(self.event_data_file, mode="r") as f:
54
+ reduced_data = f.root.data.reduced_data
55
+ triggered_data = f.root.data.triggered_data
56
+ file_names = f.root.data.file_names
57
+ trigger_telescope_list_list = f.root.data.trigger_telescope_list_list
58
+
59
+ self.event_x_core = reduced_data.col("core_x")
60
+ self.event_y_core = reduced_data.col("core_y")
61
+ self.simulated = reduced_data.col("simulated")
62
+ self.shower_id_triggered = triggered_data.col("shower_id_triggered")
63
+ self.list_of_files = file_names.col("file_names")
64
+ self.shower_sim_azimuth = reduced_data.col("shower_sim_azimuth")
65
+ self.shower_sim_altitude = reduced_data.col("shower_sim_altitude")
66
+ self.array_altitude = reduced_data.col("array_altitudes")
67
+ self.array_azimuth = reduced_data.col("array_azimuths")
68
+
69
+ self.trigger_telescope_list_list = [
70
+ [np.int16(tel) for tel in event] for event in trigger_telescope_list_list
71
+ ]
72
+
73
+ def _compute_limits(self, hist, bin_edges, loss_fraction, limit_type="lower"):
74
+ """
75
+ Compute the limits based on the loss fraction.
76
+
77
+ Parameters
78
+ ----------
79
+ hist : np.ndarray
80
+ 1D histogram array.
81
+ bin_edges : np.ndarray
82
+ Array of bin edges.
83
+ loss_fraction : float
84
+ Fraction of events to be lost.
85
+ limit_type : str, optional
86
+ Type of limit ('lower' or 'upper'). Default is 'lower'.
87
+
88
+ Returns
89
+ -------
90
+ float
91
+ Bin edge value corresponding to the threshold.
92
+ """
93
+ cumulative_sum = np.cumsum(hist) if limit_type == "upper" else np.cumsum(hist[::-1])
94
+ total_events = np.sum(hist)
95
+ threshold = (1 - loss_fraction) * total_events
96
+ bin_index = np.searchsorted(cumulative_sum, threshold)
97
+ return bin_edges[bin_index] if limit_type == "upper" else bin_edges[-bin_index]
98
+
99
+ def _prepare_data_for_limits(self):
100
+ """
101
+ Prepare the data required for computing limits.
102
+
103
+ Returns
104
+ -------
105
+ tuple
106
+ Tuple containing core distances, triggered energies, core bins, and energy bins.
107
+ """
108
+ shower_id_triggered_masked = self.shower_id_triggered
109
+ if self.telescope_list is not None:
110
+ mask = np.array(
111
+ [
112
+ all(tel in event for tel in self.telescope_list)
113
+ for event in self.trigger_telescope_list_list
114
+ ]
115
+ )
116
+ shower_id_triggered_masked = self.shower_id_triggered[mask]
117
+
118
+ triggered_energies = self.simulated[shower_id_triggered_masked]
119
+ energy_bins = np.logspace(
120
+ np.log10(triggered_energies.min()), np.log10(triggered_energies.max()), 1000
121
+ )
122
+ event_x_core_shower, event_y_core_shower = self._transform_to_shower_coordinates()
123
+ core_distances_all = np.sqrt(event_x_core_shower**2 + event_y_core_shower**2)
124
+ core_distances_triggered = core_distances_all[shower_id_triggered_masked]
125
+ core_bins = np.linspace(
126
+ core_distances_triggered.min(), core_distances_triggered.max(), 1000
127
+ )
128
+
129
+ return core_distances_triggered, triggered_energies, core_bins, energy_bins
130
+
131
+ def compute_lower_energy_limit(self, loss_fraction):
132
+ """
133
+ Compute the lower energy limit in TeV based on the event loss fraction.
134
+
135
+ Parameters
136
+ ----------
137
+ loss_fraction : float
138
+ Fraction of events to be lost.
139
+
140
+ Returns
141
+ -------
142
+ astropy.units.Quantity
143
+ Lower energy limit.
144
+ """
145
+ _, triggered_energies, _, energy_bins = self._prepare_data_for_limits()
146
+
147
+ hist, _ = np.histogram(triggered_energies, bins=energy_bins)
148
+ lower_bin_edge_value = self._compute_limits(
149
+ hist, energy_bins, loss_fraction, limit_type="lower"
150
+ )
151
+ return lower_bin_edge_value * u.TeV
152
+
153
+ def compute_upper_radial_distance(self, loss_fraction):
154
+ """
155
+ Compute the upper radial distance based on the event loss fraction.
156
+
157
+ Parameters
158
+ ----------
159
+ loss_fraction : float
160
+ Fraction of events to be lost.
161
+
162
+ Returns
163
+ -------
164
+ astropy.units.Quantity
165
+ Upper radial distance in m.
166
+ """
167
+ core_distances_triggered, _, core_bins, _ = self._prepare_data_for_limits()
168
+
169
+ hist, _ = np.histogram(core_distances_triggered, bins=core_bins)
170
+ upper_bin_edge_value = self._compute_limits(
171
+ hist, core_bins, loss_fraction, limit_type="upper"
172
+ )
173
+ return upper_bin_edge_value * u.m
174
+
175
+ def compute_viewcone(self, loss_fraction):
176
+ """
177
+ Compute the viewcone based on the event loss fraction.
178
+
179
+ Parameters
180
+ ----------
181
+ loss_fraction : float
182
+ Fraction of events to be lost.
183
+
184
+ Returns
185
+ -------
186
+ astropy.units.Quantity
187
+ Viewcone radius in degrees.
188
+ """
189
+ # already in radians
190
+ azimuth_diff = self.array_azimuth - self.shower_sim_azimuth # * (np.pi / 180.0)
191
+ sim_altitude_rad = self.shower_sim_altitude # * (np.pi / 180.0)
192
+ array_altitude_rad = self.array_altitude # * (np.pi / 180.0)
193
+ x_1 = np.cos(azimuth_diff) * np.cos(sim_altitude_rad)
194
+ y_1 = np.sin(azimuth_diff) * np.cos(sim_altitude_rad)
195
+ z_1 = np.sin(sim_altitude_rad)
196
+ x_2 = x_1 * np.sin(array_altitude_rad) - z_1 * np.cos(array_altitude_rad)
197
+ y_2 = y_1
198
+ z_2 = x_1 * np.cos(array_altitude_rad) + z_1 * np.sin(array_altitude_rad)
199
+ off_angles = np.arctan2(np.sqrt(x_2**2 + y_2**2), z_2) * (180.0 / np.pi)
200
+ angle_bins = np.linspace(off_angles.min(), off_angles.max(), 400)
201
+ hist, _ = np.histogram(off_angles, bins=angle_bins)
202
+
203
+ upper_bin_edge_value = self._compute_limits(
204
+ hist, angle_bins, loss_fraction, limit_type="upper"
205
+ )
206
+ return upper_bin_edge_value * u.deg
207
+
208
+ def _transform_to_shower_coordinates(self):
209
+ """
210
+ Transform core positions from ground coordinates to shower coordinates.
211
+
212
+ Returns
213
+ -------
214
+ tuple
215
+ Core positions in shower coordinates (x, y).
216
+ """
217
+ pointing_az = self.shower_sim_azimuth * u.rad
218
+ pointing_alt = self.shower_sim_altitude * u.rad
219
+
220
+ pointing = AltAz(az=pointing_az, alt=pointing_alt)
221
+ ground = GroundFrame(x=self.event_x_core * u.m, y=self.event_y_core * u.m, z=0 * u.m)
222
+ shower_frame = ground.transform_to(TiltedGroundFrame(pointing_direction=pointing))
223
+
224
+ return shower_frame.x.value, shower_frame.y.value
225
+
226
+ def plot_data(self):
227
+ """Plot the core distances and energies of triggered events."""
228
+ shower_id_triggered_masked = self.shower_id_triggered
229
+ if self.telescope_list is not None:
230
+ mask = np.array(
231
+ [
232
+ all(tel in event for tel in self.telescope_list)
233
+ for event in self.trigger_telescope_list_list
234
+ ]
235
+ )
236
+ shower_id_triggered_masked = self.shower_id_triggered[mask]
237
+
238
+ core_distances_all = np.sqrt(self.event_x_core**2 + self.event_y_core**2)
239
+ core_distances_triggered = core_distances_all[shower_id_triggered_masked]
240
+ triggered_energies = self.simulated[shower_id_triggered_masked]
241
+
242
+ core_bins = np.linspace(core_distances_triggered.min(), core_distances_triggered.max(), 400)
243
+ energy_bins = np.logspace(
244
+ np.log10(triggered_energies.min()), np.log10(triggered_energies.max()), 400
245
+ )
246
+ plt.figure(figsize=(8, 6))
247
+ plt.hist2d(
248
+ core_distances_triggered,
249
+ triggered_energies,
250
+ bins=[core_bins, energy_bins],
251
+ norm="log",
252
+ cmap="viridis",
253
+ )
254
+
255
+ plt.colorbar(label="Event Count")
256
+ plt.xlabel("Core Distance [m]")
257
+ plt.ylabel("Energy [TeV]")
258
+ plt.yscale("log")
259
+ plt.title("2D Histogram of Triggered Core Distance vs Energy")
260
+ plt.show()
@@ -0,0 +1,253 @@
1
+ """Generate a reduced dataset from given simulation event list and save the output to file."""
2
+
3
+ import logging
4
+
5
+ import numpy as np
6
+ import tables
7
+ from ctapipe.core import Container, Field
8
+ from ctapipe.io import HDF5TableWriter
9
+ from eventio import EventIOFile
10
+ from eventio.simtel import ArrayEvent, MCEvent, MCRunHeader, MCShower, TriggerInformation
11
+
12
+ DEFAULT_FILTERS = tables.Filters(complevel=5, complib="zlib", shuffle=True, bitshuffle=False)
13
+
14
+
15
+ class ReducedDatasetContainer(Container):
16
+ """Container for reduced dataset information."""
17
+
18
+ simulated = Field(None, "Simulated energy")
19
+ core_x = Field(None, "X-coordinate of the shower core")
20
+ core_y = Field(None, "Y-coordinate of the shower core")
21
+ shower_sim_azimuth = Field(None, "Simulated azimuth angle of the shower")
22
+ shower_sim_altitude = Field(None, "Simulated altitude angle of the shower")
23
+ array_altitudes = Field(None, "Altitudes for the array")
24
+ array_azimuths = Field(None, "Azimuths for the array")
25
+
26
+
27
+ class TriggeredShowerContainer(Container):
28
+ """Container for triggered shower information."""
29
+
30
+ shower_id_triggered = Field(None, "Triggered shower ID")
31
+ triggered_energies = Field(None, "Triggered energies")
32
+
33
+
34
+ class FileNamesContainer(Container):
35
+ """Container for file names."""
36
+
37
+ file_names = Field(None, "Input file names")
38
+
39
+
40
+ class MCEventExtractor:
41
+ """
42
+ Generate a reduced dataset from given simulation event list and save the output to file.
43
+
44
+ Attributes
45
+ ----------
46
+ input_files : list
47
+ List of input file paths to process.
48
+ output_file : str
49
+ Path to the output HDF5 file.
50
+ max_files : int, optional
51
+ Maximum number of files to process.
52
+ """
53
+
54
+ def __init__(self, input_files, output_file, max_files=100):
55
+ """
56
+ Initialize the MCEventExtractor with input files, output file, and max file limit.
57
+
58
+ Parameters
59
+ ----------
60
+ input_files : list
61
+ List of input file paths to process.
62
+ output_file : str
63
+ Path to the output HDF5 file.
64
+ max_files : int, optional
65
+ Maximum number of files to process.
66
+ """
67
+ self._logger = logging.getLogger(__name__)
68
+ self.input_files = input_files
69
+ self.output_file = output_file
70
+ self.max_files = max_files
71
+ self.shower = None
72
+ self.n_use = None
73
+ self.shower_id_offset = 0
74
+
75
+ def process_files(self):
76
+ """Process the input files and store them in an HDF5 file."""
77
+ if not self.input_files:
78
+ self._logger.warning("No input files provided.")
79
+ return
80
+
81
+ data_lists = self._initialize_data_lists()
82
+ self.shower_id_offset = 0
83
+ # Process the first file in write mode
84
+ self._logger.info(f"Processing file 1/{self.max_files}: {self.input_files[0]}")
85
+ self._process_file(self.input_files[0], data_lists, str(self.input_files[0]))
86
+ self._write_all_data(data_lists, mode="w")
87
+ self.shower_id_offset = len(data_lists["simulated"])
88
+ self._reset_data_lists(data_lists)
89
+
90
+ # Process remaining files in append mode
91
+ for i_file, file in enumerate(self.input_files[1 : self.max_files], start=2):
92
+ self._logger.info(f"Processing file {i_file}/{self.max_files}: {file}")
93
+ self._process_file(file, data_lists, str(file))
94
+ if len(data_lists["simulated"]) >= 1e7:
95
+ self._write_all_data(data_lists, mode="a")
96
+ self.shower_id_offset += len(data_lists["simulated"])
97
+ self._reset_data_lists(data_lists)
98
+
99
+ # Final write for any remaining data
100
+ self._write_all_data(data_lists, mode="a")
101
+
102
+ def _write_all_data(self, data_lists, mode):
103
+ """Write all data sections at once helper method."""
104
+ self._write_data(data_lists, mode=mode)
105
+ self._write_variable_length_data(data_lists["trigger_telescope_list_list"], mode="a")
106
+ self._write_file_names(data_lists["file_names"], mode="a")
107
+
108
+ def _write_file_names(self, file_names, mode="a"):
109
+ """Write file names to HDF5 file."""
110
+ print("file_names", file_names)
111
+ with HDF5TableWriter(
112
+ self.output_file, group_name="data", mode=mode, filters=DEFAULT_FILTERS
113
+ ) as writer:
114
+ file_names_container = FileNamesContainer()
115
+ for file_name in file_names:
116
+ file_names_container.file_names = file_name
117
+ writer.write(table_name="file_names", containers=[file_names_container])
118
+
119
+ def _write_variable_length_data(self, trigger_telescope_list_list, mode="a"):
120
+ """Write variable-length array data to HDF5 file."""
121
+ with tables.open_file(self.output_file, mode=mode) as f:
122
+ if "trigger_telescope_list_list" in f.root.data:
123
+ vlarray = f.root.data.trigger_telescope_list_list
124
+ else:
125
+ vlarray = f.create_vlarray(
126
+ f.root.data,
127
+ "trigger_telescope_list_list",
128
+ tables.Int16Atom(),
129
+ "List of triggered telescope IDs",
130
+ )
131
+
132
+ for item in trigger_telescope_list_list:
133
+ vlarray.append(item)
134
+
135
+ def _initialize_data_lists(self):
136
+ """Initialize data lists."""
137
+ return {
138
+ "simulated": [],
139
+ "shower_id_triggered": [],
140
+ "triggered_energies": [],
141
+ "core_x": [],
142
+ "core_y": [],
143
+ "trigger_telescope_list_list": [],
144
+ "file_names": [],
145
+ "shower_sim_azimuth": [],
146
+ "shower_sim_altitude": [],
147
+ "array_altitudes": [],
148
+ "array_azimuths": [],
149
+ }
150
+
151
+ def _process_file(self, file, data_lists, file_name):
152
+ """Process a single file and update data lists."""
153
+ with EventIOFile(file) as f:
154
+ array_altitude = None
155
+ array_azimuth = None
156
+ for eventio_object in f:
157
+ if isinstance(eventio_object, MCRunHeader):
158
+ self._process_mc_run_header(eventio_object, data_lists)
159
+ elif isinstance(eventio_object, MCShower):
160
+ self._process_mc_shower(
161
+ eventio_object, data_lists, array_altitude, array_azimuth
162
+ )
163
+ elif isinstance(eventio_object, MCEvent):
164
+ self._process_mc_event(eventio_object, data_lists)
165
+ elif isinstance(eventio_object, ArrayEvent):
166
+ self._process_array_event(eventio_object, data_lists)
167
+ data_lists["file_names"].extend([file_name])
168
+
169
+ def _process_mc_run_header(self, eventio_object, data_lists):
170
+ """Process MC run header and update data lists."""
171
+ mc_head = eventio_object.parse()
172
+ self.n_use = mc_head["n_use"] # reuse factor n_use needed to extend the values below
173
+ array_altitude = np.mean(mc_head["alt_range"])
174
+ array_azimuth = np.mean(mc_head["az_range"])
175
+ data_lists["array_altitudes"].extend(self.n_use * [array_altitude])
176
+ data_lists["array_azimuths"].extend(self.n_use * [array_azimuth])
177
+
178
+ def _process_mc_shower(self, eventio_object, data_lists, array_altitude, array_azimuth):
179
+ """Process MC shower and update data lists."""
180
+ self.shower = eventio_object.parse()
181
+ data_lists["simulated"].extend(self.n_use * [self.shower["energy"]])
182
+ data_lists["shower_sim_azimuth"].extend(self.n_use * [self.shower["azimuth"]])
183
+ data_lists["shower_sim_altitude"].extend(self.n_use * [self.shower["altitude"]])
184
+ data_lists["array_altitudes"].extend(self.n_use * [array_altitude])
185
+ data_lists["array_azimuths"].extend(self.n_use * [array_azimuth])
186
+
187
+ def _process_mc_event(self, eventio_object, data_lists):
188
+ """Process MC event and update data lists."""
189
+ event = eventio_object.parse()
190
+ data_lists["core_x"].append(event["xcore"])
191
+ data_lists["core_y"].append(event["ycore"])
192
+
193
+ def _process_array_event(self, eventio_object, data_lists):
194
+ """Process array event and update data lists."""
195
+ for i, obj in enumerate(eventio_object):
196
+ if i == 0 and isinstance(obj, TriggerInformation):
197
+ self._process_trigger_information(obj, data_lists)
198
+
199
+ def _process_trigger_information(self, trigger_info, data_lists):
200
+ """Process trigger information and update data lists."""
201
+ trigger_info = trigger_info.parse()
202
+ telescopes = trigger_info["telescopes_with_data"]
203
+ if len(telescopes) > 0:
204
+ data_lists["shower_id_triggered"].append(self.shower["shower"] + self.shower_id_offset)
205
+ data_lists["triggered_energies"].append(self.shower["energy"])
206
+ data_lists["trigger_telescope_list_list"].append(np.array(telescopes, dtype=np.int16))
207
+
208
+ def _write_data(self, data_lists, mode="a"):
209
+ """Write data to HDF5 file using HDF5TableWriter."""
210
+ with HDF5TableWriter(
211
+ self.output_file, group_name="data", mode=mode, filters=DEFAULT_FILTERS
212
+ ) as writer:
213
+ # Write reduced dataset container
214
+ reduced_container = ReducedDatasetContainer()
215
+ for i in range(len(data_lists["simulated"])):
216
+ reduced_container.simulated = data_lists["simulated"][i]
217
+ reduced_container.core_x = data_lists["core_x"][i]
218
+ reduced_container.core_y = data_lists["core_y"][i]
219
+ reduced_container.shower_sim_azimuth = data_lists["shower_sim_azimuth"][i]
220
+ reduced_container.shower_sim_altitude = data_lists["shower_sim_altitude"][i]
221
+ reduced_container.array_altitudes = data_lists["array_altitudes"][i]
222
+ reduced_container.array_azimuths = data_lists["array_azimuths"][i]
223
+ writer.write(table_name="reduced_data", containers=[reduced_container])
224
+
225
+ # Write triggered shower container
226
+ triggered_container = TriggeredShowerContainer()
227
+ for i in range(len(data_lists["shower_id_triggered"])):
228
+ triggered_container.shower_id_triggered = data_lists["shower_id_triggered"][i]
229
+ triggered_container.triggered_energies = data_lists["triggered_energies"][i]
230
+ writer.write(table_name="triggered_data", containers=[triggered_container])
231
+
232
+ def _reset_data_lists(self, data_lists):
233
+ """Reset data lists during batch processing."""
234
+ for key in data_lists:
235
+ data_lists[key] = []
236
+
237
+ def print_dataset_information(self):
238
+ """Print information about the datasets in the generated HDF5 file."""
239
+ try:
240
+ with tables.open_file(self.output_file, mode="r") as reader:
241
+ print("Datasets in file:")
242
+ for key in reader.root.data._v_children.keys(): # pylint: disable=protected-access
243
+ dset = reader.root.data._v_children[key] # pylint: disable=protected-access
244
+ print(f"- {key}: shape={dset.shape}, dtype={dset.dtype}")
245
+
246
+ # Print first 5 values each
247
+ print(f" First 5 values: {dset[:5]}")
248
+
249
+ # Print units if available
250
+ # units = dset.attrs.get("units", "N/A")
251
+ # print(f" Units: {units}")
252
+ except Exception as exc:
253
+ raise ValueError("An error occurred while reading the HDF5 file") from exc
@@ -295,6 +295,6 @@ class MirrorPanelPSF:
295
295
  )
296
296
  writer.ModelDataWriter.dump(
297
297
  args_dict=self.args_dict,
298
- metadata=MetadataCollector(args_dict=self.args_dict).get_top_level_metadata(),
298
+ metadata=MetadataCollector(args_dict=self.args_dict),
299
299
  product_data=result_table,
300
300
  )