gammasimtools 0.16.0__py3-none-any.whl → 0.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/METADATA +4 -2
  2. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/RECORD +60 -54
  3. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/entry_points.txt +3 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/derive_ctao_array_layouts.py +5 -5
  7. simtools/applications/generate_simtel_event_data.py +36 -46
  8. simtools/applications/merge_tables.py +104 -0
  9. simtools/applications/plot_array_layout.py +145 -258
  10. simtools/applications/production_derive_corsika_limits.py +35 -220
  11. simtools/applications/production_derive_statistics.py +77 -43
  12. simtools/applications/simulate_light_emission.py +1 -0
  13. simtools/applications/simulate_prod.py +30 -18
  14. simtools/applications/simulate_prod_htcondor_generator.py +0 -1
  15. simtools/applications/submit_array_layouts.py +93 -0
  16. simtools/applications/verify_simulation_model_production_tables.py +52 -0
  17. simtools/camera/camera_efficiency.py +3 -3
  18. simtools/configuration/commandline_parser.py +28 -34
  19. simtools/configuration/configurator.py +0 -4
  20. simtools/corsika/corsika_config.py +17 -12
  21. simtools/corsika/primary_particle.py +46 -13
  22. simtools/data_model/metadata_collector.py +7 -3
  23. simtools/db/db_handler.py +11 -11
  24. simtools/db/db_model_upload.py +2 -2
  25. simtools/io_operations/io_handler.py +2 -2
  26. simtools/io_operations/io_table_handler.py +345 -0
  27. simtools/job_execution/htcondor_script_generator.py +2 -2
  28. simtools/job_execution/job_manager.py +7 -121
  29. simtools/layout/array_layout_utils.py +385 -0
  30. simtools/model/array_model.py +5 -0
  31. simtools/model/model_repository.py +134 -0
  32. simtools/production_configuration/{calculate_statistical_errors_grid_point.py → calculate_statistical_uncertainties_grid_point.py} +101 -112
  33. simtools/production_configuration/derive_corsika_limits.py +239 -111
  34. simtools/production_configuration/derive_corsika_limits_grid.py +189 -0
  35. simtools/production_configuration/derive_production_statistics.py +57 -26
  36. simtools/production_configuration/derive_production_statistics_handler.py +70 -37
  37. simtools/production_configuration/interpolation_handler.py +296 -94
  38. simtools/ray_tracing/ray_tracing.py +7 -6
  39. simtools/reporting/docs_read_parameters.py +104 -62
  40. simtools/runners/corsika_simtel_runner.py +4 -1
  41. simtools/runners/runner_services.py +5 -4
  42. simtools/schemas/model_parameters/dsum_threshold.schema.yml +41 -0
  43. simtools/schemas/production_configuration_metrics.schema.yml +2 -2
  44. simtools/simtel/simtel_config_writer.py +34 -14
  45. simtools/simtel/simtel_io_event_reader.py +301 -194
  46. simtools/simtel/simtel_io_event_writer.py +207 -227
  47. simtools/simtel/simtel_io_file_info.py +9 -4
  48. simtools/simtel/simtel_io_metadata.py +20 -5
  49. simtools/simtel/simulator_array.py +2 -2
  50. simtools/simtel/simulator_light_emission.py +79 -34
  51. simtools/simtel/simulator_ray_tracing.py +2 -2
  52. simtools/simulator.py +101 -68
  53. simtools/testing/validate_output.py +4 -1
  54. simtools/utils/general.py +1 -1
  55. simtools/utils/names.py +5 -5
  56. simtools/visualization/plot_array_layout.py +242 -0
  57. simtools/visualization/plot_pixels.py +681 -0
  58. simtools/visualization/visualize.py +3 -219
  59. simtools/applications/production_generate_simulation_config.py +0 -152
  60. simtools/layout/ctao_array_layouts.py +0 -172
  61. simtools/production_configuration/generate_simulation_config.py +0 -158
  62. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/licenses/LICENSE +0 -0
  63. {gammasimtools-0.16.0.dist-info → gammasimtools-0.17.0.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,11 @@
1
- """Generate a reduced dataset from given simulation event list and save the output to file."""
1
+ """Generate a reduced dataset from sim_telarray output files using astropy tables."""
2
2
 
3
3
  import logging
4
- from dataclasses import dataclass, field
4
+ from dataclasses import dataclass
5
5
 
6
+ import astropy.units as u
6
7
  import numpy as np
7
- import tables
8
+ from astropy.table import Table
8
9
  from eventio import EventIOFile
9
10
  from eventio.simtel import (
10
11
  ArrayEvent,
@@ -15,107 +16,132 @@ from eventio.simtel import (
15
16
  TriggerInformation,
16
17
  )
17
18
 
19
+ from simtools.corsika.primary_particle import PrimaryParticle
20
+ from simtools.simtel.simtel_io_file_info import get_corsika_run_header
18
21
  from simtools.utils.geometry import calculate_circular_mean
19
22
 
20
- DEFAULT_FILTERS = tables.Filters(complevel=5, complib="zlib", shuffle=True, bitshuffle=False)
21
-
22
23
 
23
24
  @dataclass
24
- class ShowerEventData:
25
- """Shower event data."""
26
-
27
- simulated_energy: list = field(default_factory=list)
28
- x_core: list = field(default_factory=list)
29
- y_core: list = field(default_factory=list)
30
- shower_azimuth: list = field(default_factory=list)
31
- shower_altitude: list = field(default_factory=list)
32
- shower_id: list = field(default_factory=list)
33
- area_weight: list = field(default_factory=list)
34
-
35
- x_core_shower: list = field(default_factory=list)
36
- y_core_shower: list = field(default_factory=list)
37
- core_distance_shower: list = field(default_factory=list)
38
-
39
-
40
- @dataclass
41
- class TriggeredEventData:
42
- """Triggered event data."""
43
-
44
- triggered_id: list = field(default_factory=list)
45
- array_altitudes: list = field(default_factory=list)
46
- array_azimuths: list = field(default_factory=list)
47
- trigger_telescope_list_list: list = field(default_factory=list)
48
- angular_distance: list = field(default_factory=list)
25
+ class TableSchemas:
26
+ """Define schemas for output tables with units."""
27
+
28
+ shower_schema = {
29
+ "shower_id": (np.uint32, None),
30
+ "event_id": (np.uint32, None),
31
+ "file_id": (np.uint32, None),
32
+ "simulated_energy": (np.float64, u.TeV),
33
+ "x_core": (np.float64, u.m),
34
+ "y_core": (np.float64, u.m),
35
+ "shower_azimuth": (np.float64, u.rad),
36
+ "shower_altitude": (np.float64, u.rad),
37
+ "area_weight": (np.float64, None),
38
+ }
39
+
40
+ trigger_schema = {
41
+ "shower_id": (np.uint32, None),
42
+ "event_id": (np.uint32, None),
43
+ "file_id": (np.uint32, None),
44
+ "array_altitude": (np.float64, u.rad),
45
+ "array_azimuth": (np.float64, u.rad),
46
+ "telescope_list": (str, None), # Store as comma-separated string
47
+ }
48
+
49
+ file_info_schema = {
50
+ "file_name": (str, None),
51
+ "file_id": (np.uint32, None),
52
+ "particle_id": (np.uint32, None),
53
+ "energy_min": (np.float64, u.TeV),
54
+ "energy_max": (np.float64, u.TeV),
55
+ "viewcone_min": (np.float64, u.deg),
56
+ "viewcone_max": (np.float64, u.deg),
57
+ "core_scatter_min": (np.float64, u.m),
58
+ "core_scatter_max": (np.float64, u.m),
59
+ "zenith": (np.float64, u.deg),
60
+ "azimuth": (np.float64, u.deg),
61
+ "nsb_level": (np.float64, None),
62
+ }
49
63
 
50
64
 
51
65
  class SimtelIOEventDataWriter:
52
66
  """
53
- Generate a reduced dataset from given simulation event list and save the output to file.
67
+ Process sim_telarray events and write tables to file.
68
+
69
+ Extracts essential information from sim_telarray output files:
70
+
71
+ - Shower parameters (energy, core location, direction)
72
+ - Trigger patterns
73
+ - Telescope pointing
54
74
 
55
75
  Attributes
56
76
  ----------
57
77
  input_files : list
58
78
  List of input file paths to process.
59
- output_file : str
60
- Path to the output file.
61
79
  max_files : int, optional
62
80
  Maximum number of files to process.
63
81
  """
64
82
 
65
- def __init__(self, input_files, output_file, max_files=100):
83
+ def __init__(self, input_files, max_files=100):
66
84
  """Initialize class."""
67
85
  self._logger = logging.getLogger(__name__)
68
86
  self.input_files = input_files
69
- self.output_file = output_file
70
87
  try:
71
88
  self.max_files = max_files if max_files < len(input_files) else len(input_files)
72
89
  except TypeError as exc:
73
90
  raise TypeError("No input files provided.") from exc
74
- self.shower = None
91
+
75
92
  self.n_use = None
76
- self.shower_id_offset = 0
77
- self.event_data = ShowerEventData()
78
- self.triggered_data = TriggeredEventData()
79
- self.file_names = []
93
+ self.shower_data = []
94
+ self.trigger_data = []
95
+ self.file_info = []
80
96
 
81
97
  def process_files(self):
82
- """Process the input files and store them in an file."""
83
- self.shower_id_offset = 0
84
-
85
- for i, file in enumerate(self.input_files[: self.max_files], start=1):
86
- self._logger.info(f"Processing file {i}/{self.max_files}: {file}")
87
- self._process_file(file)
88
- if i == 1 or len(self.event_data.simulated_energy) >= 1e7:
89
- self._write_data(mode="w" if i == 1 else "a")
90
- self.shower_id_offset += len(self.event_data.simulated_energy)
91
- self._reset_data()
92
-
93
- self._write_data(mode="a")
94
-
95
- def get_event_data(self):
96
98
  """
97
- Return shower and triggered event data.
99
+ Process input files and return tables.
98
100
 
99
101
  Returns
100
102
  -------
101
- ShowerEventData, TriggeredEventData
102
- Shower and triggered event data.
103
+ list
104
+ List of astropy tables containing processed data.
103
105
  """
104
- return self.event_data, self.triggered_data
105
-
106
- def _process_file(self, file):
106
+ for i, file in enumerate(self.input_files[: self.max_files]):
107
+ self._logger.info(f"Processing file {i + 1}/{self.max_files}: {file}")
108
+ self._process_file(i, file)
109
+
110
+ return self.create_tables()
111
+
112
+ def create_tables(self):
113
+ """Create astropy tables from collected data."""
114
+ tables = []
115
+ for data, schema, name in [
116
+ (self.shower_data, TableSchemas.shower_schema, "SHOWERS"),
117
+ (self.trigger_data, TableSchemas.trigger_schema, "TRIGGERS"),
118
+ (self.file_info, TableSchemas.file_info_schema, "FILE_INFO"),
119
+ ]:
120
+ table = Table(rows=data, names=schema.keys())
121
+ table.meta["EXTNAME"] = name
122
+ self._add_units_to_table(table, schema)
123
+ tables.append(table)
124
+ return tables
125
+
126
+ def _add_units_to_table(self, table, schema):
127
+ """Add units to a single table's columns."""
128
+ for col, (_, unit) in schema.items():
129
+ if unit is not None:
130
+ table[col].unit = unit
131
+
132
+ def _process_file(self, file_id, file):
107
133
  """Process a single file and update data lists."""
134
+ self._process_file_info(file_id, file)
108
135
  with EventIOFile(file) as f:
109
136
  for eventio_object in f:
110
137
  if isinstance(eventio_object, MCRunHeader):
111
138
  self._process_mc_run_header(eventio_object)
112
139
  elif isinstance(eventio_object, MCShower):
113
- self._process_mc_shower(eventio_object)
140
+ self._process_mc_shower(eventio_object, file_id)
114
141
  elif isinstance(eventio_object, MCEvent):
115
142
  self._process_mc_event(eventio_object)
116
143
  elif isinstance(eventio_object, ArrayEvent):
117
- self._process_array_event(eventio_object)
118
- self.file_names.append(str(file))
144
+ self._process_array_event(eventio_object, file_id)
119
145
 
120
146
  def _process_mc_run_header(self, eventio_object):
121
147
  """Process MC run header and update data lists."""
@@ -123,36 +149,93 @@ class SimtelIOEventDataWriter:
123
149
  self.n_use = mc_head["n_use"] # reuse factor n_use needed to extend the values below
124
150
  self._logger.info(f"Shower reuse factor: {self.n_use} (viewcone: {mc_head['viewcone']})")
125
151
 
126
- def _process_mc_shower(self, eventio_object):
152
+ def _process_file_info(self, file_id, file):
153
+ """Process file information and append to file info list."""
154
+ run_info = get_corsika_run_header(file)
155
+ particle = PrimaryParticle(
156
+ particle_id_type="eventio_id", particle_id=run_info.get("primary_id", 1)
157
+ )
158
+ self.file_info.append(
159
+ {
160
+ "file_name": str(file),
161
+ "file_id": file_id,
162
+ "particle_id": particle.corsika7_id,
163
+ "energy_min": run_info["E_range"][0],
164
+ "energy_max": run_info["E_range"][1],
165
+ "viewcone_min": run_info["viewcone"][0],
166
+ "viewcone_max": run_info["viewcone"][1],
167
+ "core_scatter_min": run_info["core_range"][0],
168
+ "core_scatter_max": run_info["core_range"][1],
169
+ "zenith": 90.0 - np.degrees(run_info["direction"][1]),
170
+ "azimuth": np.degrees(run_info["direction"][0]),
171
+ "nsb_level": self._get_preliminary_nsb_level(str(file)),
172
+ }
173
+ )
174
+
175
+ def _process_mc_shower(self, eventio_object, file_id):
127
176
  """
128
177
  Process MC shower and update shower event list.
129
178
 
130
179
  Duplicated entries 'self.n_use' times to match the number simulated events with
131
180
  different core positions.
132
181
  """
133
- self.shower = eventio_object.parse()
134
-
135
- self.event_data.simulated_energy.extend([self.shower["energy"]] * self.n_use)
136
- self.event_data.shower_azimuth.extend([self.shower["azimuth"]] * self.n_use)
137
- self.event_data.shower_altitude.extend([self.shower["altitude"]] * self.n_use)
182
+ shower = eventio_object.parse()
183
+
184
+ self.shower_data.extend(
185
+ {
186
+ "shower_id": shower["shower"],
187
+ "event_id": None, # filled in _process_mc_event
188
+ "file_id": file_id,
189
+ "simulated_energy": shower["energy"],
190
+ "x_core": None, # filled in _process_mc_event
191
+ "y_core": None, # filled in _process_mc_event
192
+ "shower_azimuth": shower["azimuth"],
193
+ "shower_altitude": shower["altitude"],
194
+ "area_weight": None, # filled in _process_mc_event
195
+ }
196
+ for _ in range(self.n_use)
197
+ )
138
198
 
139
199
  def _process_mc_event(self, eventio_object):
140
- """Process MC event and update shower event list."""
200
+ """
201
+ Process MC event and update shower event list.
202
+
203
+ Expected to be called n_use times after _process_shower.
204
+ """
141
205
  event = eventio_object.parse()
142
206
 
143
- self.event_data.shower_id.append(event["shower_num"])
144
- self.event_data.x_core.append(event["xcore"])
145
- self.event_data.y_core.append(event["ycore"])
146
- self.event_data.area_weight.append(event["aweight"])
207
+ shower_data_index = len(self.shower_data) - self.n_use + event["event_id"] % 100
147
208
 
148
- def _process_array_event(self, eventio_object):
209
+ try:
210
+ if self.shower_data[shower_data_index]["shower_id"] != event["shower_num"]:
211
+ raise IndexError
212
+ except IndexError as exc:
213
+ raise IndexError(
214
+ f"Inconsistent shower and MC event data for shower id {event['shower_num']}"
215
+ ) from exc
216
+
217
+ self.shower_data[shower_data_index].update(
218
+ {
219
+ "event_id": event["event_id"],
220
+ "x_core": event["xcore"],
221
+ "y_core": event["ycore"],
222
+ "area_weight": event["aweight"],
223
+ }
224
+ )
225
+
226
+ def _process_array_event(self, eventio_object, file_id):
149
227
  """Process array event and update triggered event list."""
150
228
  tracking_positions = []
229
+ telescopes = []
151
230
 
152
- for _, obj in enumerate(eventio_object):
231
+ for obj in eventio_object:
153
232
  if isinstance(obj, TriggerInformation):
154
- self._process_trigger_information(obj)
155
-
233
+ trigger_info = obj.parse()
234
+ telescopes = (
235
+ trigger_info["triggered_telescopes"]
236
+ if len(trigger_info["triggered_telescopes"]) > 0
237
+ else []
238
+ )
156
239
  if isinstance(obj, TrackingPosition):
157
240
  tracking_position = obj.parse()
158
241
  tracking_positions.append(
@@ -162,156 +245,53 @@ class SimtelIOEventDataWriter:
162
245
  }
163
246
  )
164
247
 
165
- if tracking_positions:
166
- self._process_tracking_positions(tracking_positions)
248
+ if len(telescopes) > 0 and tracking_positions:
249
+ self._fill_array_event(telescopes, tracking_positions, eventio_object.event_id, file_id)
167
250
 
168
- def _process_tracking_positions(self, tracking_positions):
169
- """
170
- Process collected tracking positions and update triggered event list.
171
-
172
- Use mean telescope tracking positions, averaged over all triggered telescopes.
173
- """
251
+ def _fill_array_event(self, telescopes, tracking_positions, event_id, file_id):
252
+ """Add array event triggered events with tracking positions."""
174
253
  altitudes = [pos["altitude"] for pos in tracking_positions]
175
254
  azimuths = [pos["azimuth"] for pos in tracking_positions]
176
255
 
177
- self.triggered_data.array_altitudes.append(np.mean(altitudes))
178
- self.triggered_data.array_azimuths.append(calculate_circular_mean(azimuths))
179
-
180
- def _process_trigger_information(self, trigger_info):
181
- """Process trigger information and update triggered event list."""
182
- trigger_info = trigger_info.parse()
183
- telescopes = trigger_info["triggered_telescopes"]
184
- if len(telescopes) > 0:
185
- # add offset to obtain unique shower IDs among all files
186
- self.triggered_data.triggered_id.append(self.shower["shower"] + self.shower_id_offset)
187
- self.triggered_data.trigger_telescope_list_list.append(
188
- np.array(telescopes, dtype=np.int16)
189
- )
190
-
191
- def _table_descriptions(self):
192
- """HDF5 table descriptions for shower data, triggered data, and file names."""
193
- shower_data_desc = {
194
- "shower_id": tables.Int32Col(),
195
- "simulated_energy": tables.Float32Col(),
196
- "x_core": tables.Float32Col(),
197
- "y_core": tables.Float32Col(),
198
- "area_weight": tables.Float32Col(),
199
- "shower_azimuth": tables.Float32Col(),
200
- "shower_altitude": tables.Float32Col(),
201
- }
202
- triggered_data_desc = {
203
- "triggered_id": tables.Int32Col(),
204
- "array_altitudes": tables.Float32Col(),
205
- "array_azimuths": tables.Float32Col(),
206
- "telescope_list_index": tables.Int32Col(), # Index into VLArray
207
- }
208
- file_names_desc = {
209
- "file_names": tables.StringCol(256),
210
- }
211
- return shower_data_desc, triggered_data_desc, file_names_desc
212
-
213
- def _tables(self, output_file, data_group, mode="a"):
214
- """Create or get HDF5 tables."""
215
- descriptions = self._table_descriptions()
216
- table_names = ["reduced_data", "triggered_data", "file_names"]
217
-
218
- table_dict = {}
219
- for name, desc in zip(table_names, descriptions):
220
- path = f"/data/{name}"
221
- table_dict[name] = (
222
- output_file.create_table(
223
- data_group, name, desc, name.replace("_", " ").title(), filters=DEFAULT_FILTERS
224
- )
225
- if mode == "w" or path not in output_file
226
- else output_file.get_node(path)
227
- )
228
-
229
- return table_dict["reduced_data"], table_dict["triggered_data"], table_dict["file_names"]
230
-
231
- def _write_event_data(self, reduced_table):
232
- """Fill event data tables."""
233
- if len(self.event_data.simulated_energy) == 0:
234
- return
235
- row = reduced_table.row
236
- for i, energy in enumerate(self.event_data.simulated_energy):
237
- row["shower_id"] = (
238
- self.event_data.shower_id[i] if i < len(self.event_data.shower_id) else 0
239
- )
240
- row["simulated_energy"] = energy
241
- row["x_core"] = self.event_data.x_core[i] if i < len(self.event_data.x_core) else 0
242
- row["y_core"] = self.event_data.y_core[i] if i < len(self.event_data.y_core) else 0
243
- row["area_weight"] = (
244
- self.event_data.area_weight[i] if i < len(self.event_data.area_weight) else 0
245
- )
246
- row["shower_azimuth"] = (
247
- self.event_data.shower_azimuth[i] if i < len(self.event_data.shower_azimuth) else 0
248
- )
249
- row["shower_altitude"] = (
250
- self.event_data.shower_altitude[i]
251
- if i < len(self.event_data.shower_altitude)
252
- else 0
253
- )
254
- row.append()
255
- reduced_table.flush()
256
-
257
- def _writer_triggered_data(self, triggered_table, vlarray):
258
- """Fill triggered event data tables."""
259
- # Get or create VLArray for telescope lists
260
- if len(self.triggered_data.triggered_id) == 0:
261
- return
262
- row = triggered_table.row
263
- start_idx = vlarray.nrows
264
- for i, triggered_id in enumerate(self.triggered_data.triggered_id):
265
- row["triggered_id"] = triggered_id
266
- row["array_altitudes"] = (
267
- self.triggered_data.array_altitudes[i]
268
- if i < len(self.triggered_data.array_altitudes)
269
- else 0
270
- )
271
- row["array_azimuths"] = (
272
- self.triggered_data.array_azimuths[i]
273
- if i < len(self.triggered_data.array_azimuths)
274
- else 0
275
- )
276
- row["telescope_list_index"] = start_idx + i # Index into the VLArray
277
- row.append()
278
- vlarray.append(
279
- self.triggered_data.trigger_telescope_list_list[i]
280
- if i < len(self.triggered_data.trigger_telescope_list_list)
281
- else []
282
- )
283
- triggered_table.flush()
284
-
285
- def _write_data(self, mode="a"):
286
- """Write data to HDF5 file."""
287
- with tables.open_file(self.output_file, mode=mode) as f:
288
- data_group = (
289
- f.create_group("/", "data", "Data group")
290
- if mode == "w" or "/data" not in f
291
- else f.get_node("/data")
292
- )
293
-
294
- reduced_table, triggered_table, file_names_table = self._tables(f, data_group, mode)
295
- self._write_event_data(reduced_table)
296
-
297
- vlarray = (
298
- f.create_vlarray(
299
- data_group,
300
- "trigger_telescope_list_list",
301
- tables.Int16Atom(),
302
- "List of triggered telescope IDs",
303
- )
304
- if mode == "w" or "/data/trigger_telescope_list_list" not in f
305
- else f.get_node("/data/trigger_telescope_list_list")
306
- )
307
- self._writer_triggered_data(triggered_table, vlarray)
308
-
309
- if self.file_names:
310
- file_names_table.append([[name] for name in self.file_names])
311
- file_names_table.flush()
312
-
313
- def _reset_data(self):
314
- """Reset data structures for batch processing."""
315
- self.event_data = ShowerEventData()
316
- self.triggered_data = TriggeredEventData()
317
- self.file_names = []
256
+ self.trigger_data.append(
257
+ {
258
+ "shower_id": self.shower_data[-1]["shower_id"],
259
+ "event_id": event_id,
260
+ "file_id": file_id,
261
+ "array_altitude": float(np.mean(altitudes)),
262
+ "array_azimuth": float(calculate_circular_mean(azimuths)),
263
+ "telescope_list": ",".join(map(str, telescopes)),
264
+ }
265
+ )
266
+
267
+ def _get_preliminary_nsb_level(self, file):
268
+ """
269
+ Return preliminary NSB level from file name.
270
+
271
+ Hardwired values are used for "dark", "half", and "full" NSB levels
272
+ (actual values are made up for this example). Will be replaced with
273
+ reading of sim_telarray metadata entry for NSB level (to be implemented,
274
+ see issue #1572).
275
+
276
+ Parameters
277
+ ----------
278
+ file : str
279
+ File name to extract NSB level from.
280
+
281
+ Returns
282
+ -------
283
+ float
284
+ NSB level extracted from file name.
285
+ """
286
+ nsb_levels = {"dark": 1.0, "half": 2.0, "full": 5.0}
287
+
288
+ for key, value in nsb_levels.items():
289
+ try:
290
+ if key in file.lower():
291
+ self._logger.warning(f"NSB level set to hardwired value of {value}")
292
+ return value
293
+ except AttributeError as exc:
294
+ raise AttributeError("Invalid file name.") from exc
295
+
296
+ self._logger.warning("No NSB level found in file name, defaulting to 1.0")
297
+ return 1.0
@@ -2,7 +2,7 @@
2
2
  """Read file info and run headers from sim_telarray files."""
3
3
 
4
4
  from eventio import EventIOFile
5
- from eventio.simtel import MCRunHeader, RunHeader
5
+ from eventio.simtel import MCRunHeader, MCShower, RunHeader
6
6
 
7
7
 
8
8
  def get_corsika_run_number(file):
@@ -25,10 +25,10 @@ def get_corsika_run_number(file):
25
25
 
26
26
  def get_corsika_run_header(file):
27
27
  """
28
- Return the CORSIKA run header from a sim_telarray file.
28
+ Return the CORSIKA run header information from a sim_telarray file.
29
29
 
30
30
  Reads both RunHeader and MCRunHeader object from file and
31
- returns a merged dictionary.
31
+ returns a merged dictionary. Adds primary id from the first event.
32
32
 
33
33
  Parameters
34
34
  ----------
@@ -42,6 +42,7 @@ def get_corsika_run_header(file):
42
42
  """
43
43
  run_header = None
44
44
  mc_run_header = None
45
+ primary_id = None
45
46
 
46
47
  with EventIOFile(file) as f:
47
48
  for o in f:
@@ -49,9 +50,13 @@ def get_corsika_run_header(file):
49
50
  run_header = o.parse()
50
51
  elif isinstance(o, MCRunHeader) and mc_run_header is None:
51
52
  mc_run_header = o.parse()
52
- if run_header and mc_run_header:
53
+ elif isinstance(o, MCShower): # get primary_id from first MCShower
54
+ primary_id = o.parse().get("primary_id")
55
+ if run_header and mc_run_header and primary_id is not None:
53
56
  break
54
57
 
55
58
  run_header = run_header or {}
56
59
  mc_run_header = mc_run_header or {}
60
+ if primary_id is not None:
61
+ mc_run_header["primary_id"] = primary_id
57
62
  return run_header | mc_run_header or None
@@ -1,11 +1,14 @@
1
1
  #!/usr/bin/python3
2
2
  """Read metadata from sim_telarray files."""
3
3
 
4
+ import logging
4
5
  from functools import cache
5
6
 
6
7
  from eventio import EventIOFile
7
8
  from eventio.simtel import HistoryMeta
8
9
 
10
+ _logger = logging.getLogger(__name__)
11
+
9
12
 
10
13
  @cache
11
14
  def read_sim_telarray_metadata(file, encoding="utf8"):
@@ -28,10 +31,6 @@ def read_sim_telarray_metadata(file, encoding="utf8"):
28
31
  telescope_meta: dict
29
32
  Dictionary of telescope metadata, keyed by telescope ID.
30
33
  """
31
-
32
- def decode(meta):
33
- return {k.decode(encoding): v.decode(encoding) for k, v in meta.items()}
34
-
35
34
  global_meta = None
36
35
  telescope_meta = {}
37
36
 
@@ -45,7 +44,7 @@ def read_sim_telarray_metadata(file, encoding="utf8"):
45
44
  break
46
45
  continue
47
46
 
48
- meta = decode(o.parse())
47
+ meta = _decode_dictionary(o.parse(), encoding=encoding)
49
48
  if o.header.id == -1:
50
49
  global_meta = meta
51
50
  else:
@@ -62,6 +61,22 @@ def read_sim_telarray_metadata(file, encoding="utf8"):
62
61
  }
63
62
 
64
63
 
64
+ def _decode_dictionary(meta, encoding="utf8"):
65
+ """Decode metadata dictionary."""
66
+
67
+ def safe_decode(byte_str, encoding, errors="ignore"):
68
+ return byte_str.decode(encoding, errors=errors)
69
+
70
+ try:
71
+ return {k.decode(encoding, errors="ignore"): v.decode(encoding) for k, v in meta.items()}
72
+ except UnicodeDecodeError as e:
73
+ _logger.warning(
74
+ f"Failed to decode metadata with encoding {encoding}: {e}. "
75
+ "Falling back to 'utf-8' with errors='ignore'."
76
+ )
77
+ return {safe_decode(k, encoding): safe_decode(v, encoding) for k, v in meta.items()}
78
+
79
+
65
80
  def get_sim_telarray_telescope_id(telescope_name, file):
66
81
  """
67
82
  Return the telescope ID for a given telescope name in a sim_telarray file.
@@ -71,7 +71,7 @@ class SimulatorArray(SimtelRunner):
71
71
  config_dir = self.corsika_config.array_model.get_config_directory()
72
72
  self._log_file = self.get_file_name(file_type="log", run_number=run_number)
73
73
  histogram_file = self.get_file_name(file_type="histogram", run_number=run_number)
74
- output_file = self.get_file_name(file_type="output", run_number=run_number)
74
+ output_file = self.get_file_name(file_type="simtel_output", run_number=run_number)
75
75
  self.corsika_config.array_model.export_all_simtel_config_files()
76
76
 
77
77
  command = str(self._simtel_path.joinpath("sim_telarray/bin/sim_telarray"))
@@ -124,7 +124,7 @@ class SimulatorArray(SimtelRunner):
124
124
  InvalidOutputFileError
125
125
  If simtel output file does not exist.
126
126
  """
127
- output_file = self.get_file_name(file_type="output", run_number=run_number)
127
+ output_file = self.get_file_name(file_type="simtel_output", run_number=run_number)
128
128
  if not output_file.exists():
129
129
  msg = f"sim_telarray output file {output_file} does not exist."
130
130
  self._logger.error(msg)