gammasimtools 0.20.0__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/METADATA +1 -1
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/RECORD +24 -23
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/entry_points.txt +1 -1
- simtools/_version.py +2 -2
- simtools/applications/db_generate_compound_indexes.py +1 -1
- simtools/applications/derive_psf_parameters.py +58 -39
- simtools/applications/generate_corsika_histograms.py +7 -184
- simtools/applications/maintain_simulation_model_add_production.py +105 -0
- simtools/applications/plot_simtel_events.py +2 -228
- simtools/applications/print_version.py +8 -7
- simtools/corsika/corsika_histograms.py +81 -0
- simtools/db/db_handler.py +45 -11
- simtools/db/db_model_upload.py +40 -14
- simtools/model/model_repository.py +118 -63
- simtools/ray_tracing/psf_parameter_optimisation.py +999 -565
- simtools/simtel/simtel_config_writer.py +1 -1
- simtools/simulator.py +1 -4
- simtools/version.py +89 -0
- simtools/{corsika/corsika_histograms_visualize.py → visualization/plot_corsika_histograms.py} +109 -0
- simtools/visualization/plot_psf.py +673 -0
- simtools/visualization/plot_simtel_events.py +284 -87
- simtools/applications/maintain_simulation_model_add_production_table.py +0 -71
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/WHEEL +0 -0
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/licenses/LICENSE +0 -0
- {gammasimtools-0.20.0.dist-info → gammasimtools-0.21.0.dist-info}/top_level.txt +0 -0
|
@@ -81,87 +81,8 @@ from pathlib import Path
|
|
|
81
81
|
|
|
82
82
|
import simtools.utils.general as gen
|
|
83
83
|
from simtools.configuration import configurator
|
|
84
|
-
from simtools.corsika.corsika_histograms_visualize import save_figs_to_pdf
|
|
85
|
-
from simtools.data_model.metadata_collector import MetadataCollector
|
|
86
84
|
from simtools.io import io_handler
|
|
87
|
-
from simtools.visualization.plot_simtel_events import
|
|
88
|
-
plot_simtel_event_image,
|
|
89
|
-
plot_simtel_integrated_pedestal_image,
|
|
90
|
-
plot_simtel_integrated_signal_image,
|
|
91
|
-
plot_simtel_peak_timing,
|
|
92
|
-
plot_simtel_step_traces,
|
|
93
|
-
plot_simtel_time_traces,
|
|
94
|
-
plot_simtel_waveform_matrix,
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
PLOT_CHOICES = {
|
|
98
|
-
"event_image": "event_image",
|
|
99
|
-
"time_traces": "time_traces",
|
|
100
|
-
"waveform_matrix": "waveform_matrix",
|
|
101
|
-
"step_traces": "step_traces",
|
|
102
|
-
"integrated_signal_image": "integrated_signal_image",
|
|
103
|
-
"integrated_pedestal_image": "integrated_pedestal_image",
|
|
104
|
-
"peak_timing": "peak_timing",
|
|
105
|
-
"all": "all",
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def _call_peak_timing(
|
|
110
|
-
filename,
|
|
111
|
-
*,
|
|
112
|
-
tel_id=None,
|
|
113
|
-
sum_threshold=10.0,
|
|
114
|
-
peak_width=8,
|
|
115
|
-
examples=3,
|
|
116
|
-
timing_bins=None,
|
|
117
|
-
event_index=None,
|
|
118
|
-
):
|
|
119
|
-
"""Call ``plot_simtel_peak_timing`` and support optional ``return_stats``.
|
|
120
|
-
|
|
121
|
-
Parameters
|
|
122
|
-
----------
|
|
123
|
-
filename : pathlib.Path or str
|
|
124
|
-
Path to the input simtel file.
|
|
125
|
-
tel_id : int, optional
|
|
126
|
-
Telescope ID to visualize.
|
|
127
|
-
sum_threshold : float, default 10.0
|
|
128
|
-
Minimum pixel sum to consider a pixel.
|
|
129
|
-
peak_width : int, default 8
|
|
130
|
-
Expected peak width in samples.
|
|
131
|
-
examples : int, default 3
|
|
132
|
-
Number of example traces to draw.
|
|
133
|
-
timing_bins : int or None, optional
|
|
134
|
-
Number of bins for timing histogram (contiguous if not set).
|
|
135
|
-
event_index : int or None, optional
|
|
136
|
-
0-based index of the event to plot; default is the first event.
|
|
137
|
-
|
|
138
|
-
Returns
|
|
139
|
-
-------
|
|
140
|
-
object or None
|
|
141
|
-
The matplotlib Figure if available, otherwise ``None``.
|
|
142
|
-
"""
|
|
143
|
-
try:
|
|
144
|
-
fig_stats = plot_simtel_peak_timing(
|
|
145
|
-
filename,
|
|
146
|
-
tel_id=tel_id,
|
|
147
|
-
sum_threshold=sum_threshold,
|
|
148
|
-
peak_width=peak_width,
|
|
149
|
-
examples=examples,
|
|
150
|
-
timing_bins=timing_bins,
|
|
151
|
-
return_stats=True,
|
|
152
|
-
event_index=event_index,
|
|
153
|
-
)
|
|
154
|
-
return fig_stats[0] if isinstance(fig_stats, tuple) else fig_stats
|
|
155
|
-
except TypeError:
|
|
156
|
-
return plot_simtel_peak_timing(
|
|
157
|
-
filename,
|
|
158
|
-
tel_id=tel_id,
|
|
159
|
-
sum_threshold=sum_threshold,
|
|
160
|
-
peak_width=peak_width,
|
|
161
|
-
examples=examples,
|
|
162
|
-
timing_bins=timing_bins,
|
|
163
|
-
event_index=event_index,
|
|
164
|
-
)
|
|
85
|
+
from simtools.visualization.plot_simtel_events import PLOT_CHOICES, generate_and_save_plots
|
|
165
86
|
|
|
166
87
|
|
|
167
88
|
def _parse(label: str):
|
|
@@ -186,7 +107,6 @@ def _parse(label: str):
|
|
|
186
107
|
default=["event_image"],
|
|
187
108
|
choices=sorted(PLOT_CHOICES),
|
|
188
109
|
)
|
|
189
|
-
# common plotting options
|
|
190
110
|
config.parser.add_argument("--tel_id", type=int, default=None, help="Telescope ID")
|
|
191
111
|
config.parser.add_argument(
|
|
192
112
|
"--n_pixels", type=int, default=3, help="For time_traces: number of pixel traces"
|
|
@@ -237,7 +157,6 @@ def _parse(label: str):
|
|
|
237
157
|
default=None,
|
|
238
158
|
help="0-based index of the event to plot; default is the first event",
|
|
239
159
|
)
|
|
240
|
-
# outputs
|
|
241
160
|
config.parser.add_argument(
|
|
242
161
|
"--output_file",
|
|
243
162
|
type=str,
|
|
@@ -257,123 +176,6 @@ def _parse(label: str):
|
|
|
257
176
|
return config.initialize(db_config=False, require_command_line=True)
|
|
258
177
|
|
|
259
178
|
|
|
260
|
-
def _save_png(fig, out_dir: Path, stem: str, suffix: str, dpi: int):
|
|
261
|
-
"""Save ``fig`` as a PNG into ``out_dir`` using ``stem`` and ``suffix``.
|
|
262
|
-
|
|
263
|
-
Errors during saving are logged as warnings and otherwise ignored.
|
|
264
|
-
"""
|
|
265
|
-
png_path = out_dir.joinpath(f"{stem}_{suffix}.png")
|
|
266
|
-
try:
|
|
267
|
-
fig.savefig(png_path, dpi=dpi, bbox_inches="tight")
|
|
268
|
-
except Exception as ex: # pylint:disable=broad-except
|
|
269
|
-
logging.getLogger(__name__).warning("Failed to save PNG %s: %s", png_path, ex)
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
def _make_output_paths(
|
|
273
|
-
ioh: io_handler.IOHandler, base: str | None, input_file: Path
|
|
274
|
-
) -> tuple[Path, Path]:
|
|
275
|
-
"""Return (out_dir, pdf_path) based on base and input_file."""
|
|
276
|
-
out_dir = ioh.get_output_directory(label=Path(__file__).stem)
|
|
277
|
-
if base:
|
|
278
|
-
pdf_path = ioh.get_output_file(f"{base}_{input_file.stem}")
|
|
279
|
-
else:
|
|
280
|
-
pdf_path = ioh.get_output_file(input_file.stem)
|
|
281
|
-
pdf_path = Path(f"{pdf_path}.pdf") if pdf_path.suffix != ".pdf" else Path(pdf_path)
|
|
282
|
-
return out_dir, pdf_path
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
def _collect_figures_for_file(
|
|
286
|
-
filename: Path,
|
|
287
|
-
plots: list[str],
|
|
288
|
-
args: dict,
|
|
289
|
-
out_dir: Path,
|
|
290
|
-
base_stem: str,
|
|
291
|
-
save_pngs: bool,
|
|
292
|
-
dpi: int,
|
|
293
|
-
):
|
|
294
|
-
"""Generate the selected plots for a single sim_telarray file.
|
|
295
|
-
|
|
296
|
-
Returns a list of figures. If ``save_pngs`` is True, also writes PNGs to
|
|
297
|
-
``out_dir`` using ``base_stem`` for filenames.
|
|
298
|
-
"""
|
|
299
|
-
logger = logging.getLogger(__name__)
|
|
300
|
-
figures: list[object] = []
|
|
301
|
-
|
|
302
|
-
def add(fig, tag: str):
|
|
303
|
-
if fig is not None:
|
|
304
|
-
figures.append(fig)
|
|
305
|
-
if save_pngs:
|
|
306
|
-
_save_png(fig, out_dir, base_stem, tag, dpi)
|
|
307
|
-
else:
|
|
308
|
-
logger.warning("Plot '%s' returned no figure for %s", tag, filename)
|
|
309
|
-
|
|
310
|
-
plots_to_run = (
|
|
311
|
-
[
|
|
312
|
-
"event_image",
|
|
313
|
-
"time_traces",
|
|
314
|
-
"waveform_matrix",
|
|
315
|
-
"step_traces",
|
|
316
|
-
"integrated_signal_image",
|
|
317
|
-
"integrated_pedestal_image",
|
|
318
|
-
"peak_timing",
|
|
319
|
-
]
|
|
320
|
-
if "all" in plots
|
|
321
|
-
else list(plots)
|
|
322
|
-
)
|
|
323
|
-
|
|
324
|
-
# function name -> (callable, defaults)
|
|
325
|
-
dispatch: dict[str, tuple[object, dict[str, object]]] = {
|
|
326
|
-
"event_image": (
|
|
327
|
-
plot_simtel_event_image,
|
|
328
|
-
{"distance": None, "event_index": None},
|
|
329
|
-
),
|
|
330
|
-
"time_traces": (
|
|
331
|
-
plot_simtel_time_traces,
|
|
332
|
-
{"tel_id": None, "n_pixels": 3, "event_index": None},
|
|
333
|
-
),
|
|
334
|
-
"waveform_matrix": (
|
|
335
|
-
plot_simtel_waveform_matrix,
|
|
336
|
-
{"tel_id": None, "vmax": None, "event_index": None},
|
|
337
|
-
),
|
|
338
|
-
"step_traces": (
|
|
339
|
-
plot_simtel_step_traces,
|
|
340
|
-
{"tel_id": None, "pixel_step": None, "max_pixels": None, "event_index": None},
|
|
341
|
-
),
|
|
342
|
-
"integrated_signal_image": (
|
|
343
|
-
plot_simtel_integrated_signal_image,
|
|
344
|
-
{"tel_id": None, "half_width": 8, "event_index": None},
|
|
345
|
-
),
|
|
346
|
-
"integrated_pedestal_image": (
|
|
347
|
-
plot_simtel_integrated_pedestal_image,
|
|
348
|
-
{"tel_id": None, "half_width": 8, "offset": 16, "event_index": None},
|
|
349
|
-
),
|
|
350
|
-
"peak_timing": (
|
|
351
|
-
_call_peak_timing,
|
|
352
|
-
{
|
|
353
|
-
"tel_id": None,
|
|
354
|
-
"sum_threshold": 10.0,
|
|
355
|
-
"peak_width": 8,
|
|
356
|
-
"examples": 3,
|
|
357
|
-
"timing_bins": None,
|
|
358
|
-
"event_index": None,
|
|
359
|
-
},
|
|
360
|
-
),
|
|
361
|
-
}
|
|
362
|
-
|
|
363
|
-
for plot_name in plots_to_run:
|
|
364
|
-
entry = dispatch.get(plot_name)
|
|
365
|
-
if entry is None:
|
|
366
|
-
logger.warning("Unknown plot selection '%s'", plot_name)
|
|
367
|
-
continue
|
|
368
|
-
func, defaults = entry
|
|
369
|
-
# Build kwargs with user args overriding defaults
|
|
370
|
-
kwargs = {k: args.get(k, v) for k, v in defaults.items()}
|
|
371
|
-
fig = func(filename, **kwargs) # type: ignore[misc]
|
|
372
|
-
add(fig, plot_name)
|
|
373
|
-
|
|
374
|
-
return figures
|
|
375
|
-
|
|
376
|
-
|
|
377
179
|
def main():
|
|
378
180
|
"""Generate plots from sim_telarray files."""
|
|
379
181
|
label = Path(__file__).stem
|
|
@@ -383,38 +185,10 @@ def main():
|
|
|
383
185
|
logger.setLevel(gen.get_log_level_from_user(args.get("log_level", "INFO")))
|
|
384
186
|
|
|
385
187
|
ioh = io_handler.IOHandler()
|
|
386
|
-
|
|
387
188
|
simtel_files = [Path(p).expanduser() for p in gen.ensure_iterable(args["simtel_files"])]
|
|
388
189
|
plots = list(gen.ensure_iterable(args.get("plots")))
|
|
389
190
|
|
|
390
|
-
|
|
391
|
-
out_dir, pdf_path = _make_output_paths(ioh, args.get("output_file"), simtel)
|
|
392
|
-
figures = _collect_figures_for_file(
|
|
393
|
-
filename=simtel,
|
|
394
|
-
plots=plots,
|
|
395
|
-
args=args,
|
|
396
|
-
out_dir=out_dir,
|
|
397
|
-
base_stem=simtel.stem,
|
|
398
|
-
save_pngs=bool(args.get("save_pngs", False)),
|
|
399
|
-
dpi=int(args.get("dpi", 300)),
|
|
400
|
-
)
|
|
401
|
-
|
|
402
|
-
if not figures:
|
|
403
|
-
logger.warning("No figures produced for %s", simtel)
|
|
404
|
-
continue
|
|
405
|
-
|
|
406
|
-
# Save a multipage PDF
|
|
407
|
-
try:
|
|
408
|
-
save_figs_to_pdf(figures, pdf_path)
|
|
409
|
-
logger.info("Saved PDF: %s", pdf_path)
|
|
410
|
-
except Exception as ex: # pylint:disable=broad-except
|
|
411
|
-
logger.error("Failed to save PDF %s: %s", pdf_path, ex)
|
|
412
|
-
|
|
413
|
-
# Dump run metadata alongside PDF
|
|
414
|
-
try:
|
|
415
|
-
MetadataCollector.dump(args, pdf_path, add_activity_name=True)
|
|
416
|
-
except Exception as ex: # pylint:disable=broad-except
|
|
417
|
-
logger.warning("Failed to write metadata for %s: %s", pdf_path, ex)
|
|
191
|
+
generate_and_save_plots(simtel_files=simtel_files, plots=plots, args=args, ioh=ioh)
|
|
418
192
|
|
|
419
193
|
|
|
420
194
|
if __name__ == "__main__":
|
|
@@ -39,7 +39,7 @@ def _parse(label, description, usage):
|
|
|
39
39
|
"""
|
|
40
40
|
config = configurator.Configurator(label=label, description=description, usage=usage)
|
|
41
41
|
|
|
42
|
-
return config.initialize(db_config=True, output=True)
|
|
42
|
+
return config.initialize(db_config=True, output=True, require_command_line=False)
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
def main():
|
|
@@ -68,12 +68,13 @@ def main():
|
|
|
68
68
|
key, value = version_entry.split(": ", 1)
|
|
69
69
|
version_dict[key] = value
|
|
70
70
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
71
|
+
if not args_dict.get("output_file_from_default", False):
|
|
72
|
+
ascii_handler.write_data_to_file(
|
|
73
|
+
data=version_dict,
|
|
74
|
+
output_file=io_handler_instance.get_output_file(
|
|
75
|
+
args_dict.get("output_file", "simtools_version.json"), label=label
|
|
76
|
+
),
|
|
77
|
+
)
|
|
77
78
|
|
|
78
79
|
|
|
79
80
|
if __name__ == "__main__":
|
|
@@ -22,6 +22,7 @@ from simtools.io.ascii_handler import collect_data_from_file
|
|
|
22
22
|
from simtools.io.hdf5_handler import fill_hdf5_table
|
|
23
23
|
from simtools.utils.geometry import convert_2d_to_radial_distr, rotate
|
|
24
24
|
from simtools.utils.names import sanitize_name
|
|
25
|
+
from simtools.visualization import plot_corsika_histograms as visualize
|
|
25
26
|
|
|
26
27
|
X_AXIS_STRING = "x axis"
|
|
27
28
|
Y_AXIS_STRING = "y axis"
|
|
@@ -111,6 +112,86 @@ class CorsikaHistograms:
|
|
|
111
112
|
self.read_event_information()
|
|
112
113
|
self._initialize_header()
|
|
113
114
|
|
|
115
|
+
def parse_telescope_indices(self, indices_arg):
|
|
116
|
+
"""Return telescope indices as ndarray[int] or None.
|
|
117
|
+
|
|
118
|
+
Accepts None, a sequence of strings/ints. Raises ValueError on invalid input.
|
|
119
|
+
"""
|
|
120
|
+
if indices_arg is None:
|
|
121
|
+
return None
|
|
122
|
+
try:
|
|
123
|
+
return np.array(indices_arg).astype(int)
|
|
124
|
+
except ValueError as exc:
|
|
125
|
+
msg = (
|
|
126
|
+
f"{indices_arg} not a valid input. Please use integer numbers for telescope_indices"
|
|
127
|
+
)
|
|
128
|
+
self._logger.error(msg)
|
|
129
|
+
raise ValueError(msg) from exc
|
|
130
|
+
|
|
131
|
+
def should_overwrite(
|
|
132
|
+
self, write_hdf5: bool, event1d: list | None, event2d: list | None
|
|
133
|
+
) -> bool:
|
|
134
|
+
"""Return True if output HDF5 exists and any writing flag is requested."""
|
|
135
|
+
exists = Path(self.hdf5_file_name).exists()
|
|
136
|
+
if exists and (write_hdf5 or bool(event1d) or bool(event2d)):
|
|
137
|
+
self._logger.warning(
|
|
138
|
+
f"Output hdf5 file {self.hdf5_file_name} already exists. Overwriting it."
|
|
139
|
+
)
|
|
140
|
+
return True
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
def run_export_pipeline(
|
|
144
|
+
self,
|
|
145
|
+
*,
|
|
146
|
+
individual_telescopes: bool,
|
|
147
|
+
hist_config,
|
|
148
|
+
indices_arg,
|
|
149
|
+
write_pdf: bool,
|
|
150
|
+
write_hdf5: bool,
|
|
151
|
+
event1d: list | None,
|
|
152
|
+
event2d: list | None,
|
|
153
|
+
test: bool = False,
|
|
154
|
+
) -> dict:
|
|
155
|
+
"""Run the full histogram export pipeline and return output artifact paths.
|
|
156
|
+
|
|
157
|
+
Returns a dict with optional keys: pdf_photons, pdf_event1d, pdf_event2d.
|
|
158
|
+
"""
|
|
159
|
+
outputs: dict[str, Path | None] = {
|
|
160
|
+
"pdf_photons": None,
|
|
161
|
+
"pdf_event_1d": None,
|
|
162
|
+
"pdf_event_2d": None,
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
indices = self.parse_telescope_indices(indices_arg)
|
|
166
|
+
overwrite = self.should_overwrite(write_hdf5, event1d, event2d)
|
|
167
|
+
|
|
168
|
+
self.set_histograms(
|
|
169
|
+
telescope_indices=indices,
|
|
170
|
+
individual_telescopes=individual_telescopes,
|
|
171
|
+
hist_config=hist_config,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if write_pdf:
|
|
175
|
+
pdf_path = visualize.export_all_photon_figures_pdf(self, test=test)
|
|
176
|
+
outputs["pdf_photons"] = pdf_path
|
|
177
|
+
if write_hdf5:
|
|
178
|
+
self.export_histograms(overwrite=overwrite)
|
|
179
|
+
|
|
180
|
+
if event1d is not None:
|
|
181
|
+
outputs["pdf_event_1d"] = visualize.derive_event_1d_histograms(
|
|
182
|
+
self, event1d, pdf=write_pdf, hdf5=write_hdf5, overwrite=not write_hdf5
|
|
183
|
+
)
|
|
184
|
+
if event2d is not None:
|
|
185
|
+
outputs["pdf_event_2d"] = visualize.derive_event_2d_histograms(
|
|
186
|
+
self,
|
|
187
|
+
event2d,
|
|
188
|
+
pdf=write_pdf,
|
|
189
|
+
hdf5=write_hdf5,
|
|
190
|
+
overwrite=not (write_hdf5 or bool(event1d)),
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
return outputs
|
|
194
|
+
|
|
114
195
|
@property
|
|
115
196
|
def hdf5_file_name(self):
|
|
116
197
|
"""
|
simtools/db/db_handler.py
CHANGED
|
@@ -18,6 +18,7 @@ from simtools.data_model import validate_data
|
|
|
18
18
|
from simtools.io import ascii_handler, io_handler
|
|
19
19
|
from simtools.simtel import simtel_table_reader
|
|
20
20
|
from simtools.utils import names, value_conversion
|
|
21
|
+
from simtools.version import resolve_version_to_latest_patch
|
|
21
22
|
|
|
22
23
|
__all__ = ["DatabaseHandler"]
|
|
23
24
|
|
|
@@ -72,6 +73,11 @@ class DatabaseHandler:
|
|
|
72
73
|
"""
|
|
73
74
|
DatabaseHandler provides the interface to the DB.
|
|
74
75
|
|
|
76
|
+
Note the two types of version variables used in this class:
|
|
77
|
+
|
|
78
|
+
- db_simulation_model_version (from mongo_db_config): version of the simulation model database
|
|
79
|
+
- model_version (from production_tables): version of the model contained in the database
|
|
80
|
+
|
|
75
81
|
Parameters
|
|
76
82
|
----------
|
|
77
83
|
mongo_db_config: dict
|
|
@@ -83,6 +89,7 @@ class DatabaseHandler:
|
|
|
83
89
|
db_client = None
|
|
84
90
|
production_table_cached = {}
|
|
85
91
|
model_parameters_cached = {}
|
|
92
|
+
model_versions_cached = {}
|
|
86
93
|
|
|
87
94
|
def __init__(self, mongo_db_config=None):
|
|
88
95
|
"""Initialize the DatabaseHandler class."""
|
|
@@ -96,7 +103,7 @@ class DatabaseHandler:
|
|
|
96
103
|
self._find_latest_simulation_model_db()
|
|
97
104
|
self.db_name = (
|
|
98
105
|
self.get_db_name(
|
|
99
|
-
|
|
106
|
+
db_simulation_model_version=self.mongo_db_config.get("db_simulation_model_version"),
|
|
100
107
|
model_name=self.mongo_db_config.get("db_simulation_model"),
|
|
101
108
|
)
|
|
102
109
|
if self.mongo_db_config
|
|
@@ -110,15 +117,15 @@ class DatabaseHandler:
|
|
|
110
117
|
with lock:
|
|
111
118
|
DatabaseHandler.db_client = self._open_mongo_db()
|
|
112
119
|
|
|
113
|
-
def get_db_name(self, db_name=None,
|
|
120
|
+
def get_db_name(self, db_name=None, db_simulation_model_version=None, model_name=None):
|
|
114
121
|
"""Build DB name from configuration."""
|
|
115
122
|
if db_name:
|
|
116
123
|
return db_name
|
|
117
|
-
if
|
|
118
|
-
return f"{model_name}-{
|
|
119
|
-
if
|
|
124
|
+
if db_simulation_model_version and model_name:
|
|
125
|
+
return f"{model_name}-{db_simulation_model_version.replace('.', '-')}"
|
|
126
|
+
if db_simulation_model_version or model_name:
|
|
120
127
|
return None
|
|
121
|
-
return None if (
|
|
128
|
+
return None if (db_simulation_model_version or model_name) else self.db_name
|
|
122
129
|
|
|
123
130
|
def _validate_mongo_db_config(self, mongo_db_config):
|
|
124
131
|
"""Validate the MongoDB configuration."""
|
|
@@ -269,6 +276,9 @@ class DatabaseHandler:
|
|
|
269
276
|
raise ValueError(
|
|
270
277
|
"Only one model version can be passed to get_model_parameter, not a list."
|
|
271
278
|
)
|
|
279
|
+
model_version = resolve_version_to_latest_patch(
|
|
280
|
+
model_version, self.get_model_versions(collection_name)
|
|
281
|
+
)
|
|
272
282
|
production_table = self.read_production_table_from_mongo_db(
|
|
273
283
|
collection_name, model_version
|
|
274
284
|
)
|
|
@@ -315,6 +325,9 @@ class DatabaseHandler:
|
|
|
315
325
|
dict containing the parameters
|
|
316
326
|
"""
|
|
317
327
|
pars = {}
|
|
328
|
+
model_version = resolve_version_to_latest_patch(
|
|
329
|
+
model_version, self.get_model_versions(collection)
|
|
330
|
+
)
|
|
318
331
|
production_table = self.read_production_table_from_mongo_db(collection, model_version)
|
|
319
332
|
array_element_list = self._get_array_element_list(
|
|
320
333
|
array_element_name, site, production_table, collection
|
|
@@ -361,6 +374,11 @@ class DatabaseHandler:
|
|
|
361
374
|
def _get_parameter_for_model_version(
|
|
362
375
|
self, array_element, model_version, site, collection, production_table
|
|
363
376
|
):
|
|
377
|
+
"""
|
|
378
|
+
Get parameters for a specific model version and array element.
|
|
379
|
+
|
|
380
|
+
Uses caching wherever possible.
|
|
381
|
+
"""
|
|
364
382
|
cache_key, cache_dict = self._read_cache(
|
|
365
383
|
DatabaseHandler.model_parameters_cached,
|
|
366
384
|
names.validate_site_name(site) if site else None,
|
|
@@ -589,6 +607,9 @@ class DatabaseHandler:
|
|
|
589
607
|
ValueError
|
|
590
608
|
if query returned no results.
|
|
591
609
|
"""
|
|
610
|
+
model_version = resolve_version_to_latest_patch(
|
|
611
|
+
model_version, self.get_model_versions(collection_name)
|
|
612
|
+
)
|
|
592
613
|
try:
|
|
593
614
|
return DatabaseHandler.production_table_cached[
|
|
594
615
|
self._cache_key(None, None, model_version, collection_name)
|
|
@@ -612,7 +633,7 @@ class DatabaseHandler:
|
|
|
612
633
|
|
|
613
634
|
def get_model_versions(self, collection_name="telescopes"):
|
|
614
635
|
"""
|
|
615
|
-
Get list of model versions from the DB.
|
|
636
|
+
Get list of model versions from the DB with caching.
|
|
616
637
|
|
|
617
638
|
Parameters
|
|
618
639
|
----------
|
|
@@ -624,10 +645,12 @@ class DatabaseHandler:
|
|
|
624
645
|
list
|
|
625
646
|
List of model versions
|
|
626
647
|
"""
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
648
|
+
if collection_name not in DatabaseHandler.model_versions_cached:
|
|
649
|
+
collection = self.get_collection("production_tables", db_name=self.db_name)
|
|
650
|
+
DatabaseHandler.model_versions_cached[collection_name] = sorted(
|
|
651
|
+
{post["model_version"] for post in collection.find({"collection": collection_name})}
|
|
652
|
+
)
|
|
653
|
+
return DatabaseHandler.model_versions_cached[collection_name]
|
|
631
654
|
|
|
632
655
|
def get_array_elements(self, model_version, collection="telescopes"):
|
|
633
656
|
"""
|
|
@@ -646,6 +669,9 @@ class DatabaseHandler:
|
|
|
646
669
|
list
|
|
647
670
|
Sorted list of all array elements found in collection
|
|
648
671
|
"""
|
|
672
|
+
model_version = resolve_version_to_latest_patch(
|
|
673
|
+
model_version, self.get_model_versions(collection)
|
|
674
|
+
)
|
|
649
675
|
production_table = self.read_production_table_from_mongo_db(collection, model_version)
|
|
650
676
|
return sorted([entry for entry in production_table["parameters"] if "-design" not in entry])
|
|
651
677
|
|
|
@@ -668,6 +694,9 @@ class DatabaseHandler:
|
|
|
668
694
|
str
|
|
669
695
|
Design model for a given array element.
|
|
670
696
|
"""
|
|
697
|
+
model_version = resolve_version_to_latest_patch(
|
|
698
|
+
model_version, self.get_model_versions(collection)
|
|
699
|
+
)
|
|
671
700
|
production_table = self.read_production_table_from_mongo_db(collection, model_version)
|
|
672
701
|
try:
|
|
673
702
|
return production_table["design_model"][array_element_name]
|
|
@@ -696,6 +725,9 @@ class DatabaseHandler:
|
|
|
696
725
|
list
|
|
697
726
|
Sorted list of all array element names found in collection
|
|
698
727
|
"""
|
|
728
|
+
model_version = resolve_version_to_latest_patch(
|
|
729
|
+
model_version, self.get_model_versions(collection)
|
|
730
|
+
)
|
|
699
731
|
production_table = self.read_production_table_from_mongo_db(collection, model_version)
|
|
700
732
|
all_array_elements = production_table["parameters"]
|
|
701
733
|
return sorted(
|
|
@@ -846,6 +878,7 @@ class DatabaseHandler:
|
|
|
846
878
|
self._logger.debug(f"Adding production for {production_table.get('collection')} to to DB")
|
|
847
879
|
collection.insert_one(production_table)
|
|
848
880
|
DatabaseHandler.production_table_cached.clear()
|
|
881
|
+
DatabaseHandler.model_versions_cached.clear()
|
|
849
882
|
|
|
850
883
|
def add_new_parameter(
|
|
851
884
|
self,
|
|
@@ -1001,6 +1034,7 @@ class DatabaseHandler:
|
|
|
1001
1034
|
def _reset_parameter_cache(self):
|
|
1002
1035
|
"""Reset the cache for the parameters."""
|
|
1003
1036
|
DatabaseHandler.model_parameters_cached.clear()
|
|
1037
|
+
DatabaseHandler.model_versions_cached.clear()
|
|
1004
1038
|
|
|
1005
1039
|
def _get_array_element_list(self, array_element_name, site, production_table, collection):
|
|
1006
1040
|
"""
|
simtools/db/db_model_upload.py
CHANGED
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
import logging
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
|
|
6
|
+
from packaging.version import Version
|
|
7
|
+
|
|
6
8
|
from simtools.io import ascii_handler
|
|
7
9
|
from simtools.utils import names
|
|
8
10
|
|
|
@@ -51,8 +53,7 @@ def add_model_parameters_to_db(input_path, db):
|
|
|
51
53
|
"""
|
|
52
54
|
input_path = Path(input_path)
|
|
53
55
|
logger.info(f"Reading model parameters from repository path {input_path}")
|
|
54
|
-
|
|
55
|
-
for element in array_elements:
|
|
56
|
+
for element in filter(Path.is_dir, input_path.iterdir()):
|
|
56
57
|
collection = names.get_collection_name_from_array_element_name(element.name, False)
|
|
57
58
|
if collection == "Files":
|
|
58
59
|
logger.info("Files (tables) are uploaded with the corresponding model parameters")
|
|
@@ -87,18 +88,40 @@ def add_production_tables_to_db(input_path, db):
|
|
|
87
88
|
|
|
88
89
|
for model in filter(Path.is_dir, input_path.iterdir()):
|
|
89
90
|
logger.info(f"Reading production tables for model version {model.name}")
|
|
90
|
-
model_dict =
|
|
91
|
-
for file in sorted(model.rglob("*json")):
|
|
92
|
-
_read_production_table(model_dict, file, model.name)
|
|
91
|
+
model_dict = _read_production_tables(model)
|
|
93
92
|
|
|
94
93
|
for collection, data in model_dict.items():
|
|
95
|
-
if
|
|
94
|
+
if data["parameters"]:
|
|
95
|
+
logger.info(f"Adding production table for {collection} to the database")
|
|
96
|
+
db.add_production_table(production_table=data)
|
|
97
|
+
else:
|
|
96
98
|
logger.info(f"No production table for {collection} in model version {model.name}")
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _read_production_tables(model_path):
|
|
102
|
+
"""
|
|
103
|
+
Read production tables from a directory.
|
|
104
|
+
|
|
105
|
+
Take into account that some productions include patch updates only. Read in this cases
|
|
106
|
+
the base models first.
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
model_path : Path
|
|
111
|
+
Path to the directory containing the production tables for a specific model version.
|
|
112
|
+
"""
|
|
113
|
+
model_dict = {}
|
|
114
|
+
models = [model_path.name]
|
|
115
|
+
if (model_path / "info.yml").exists():
|
|
116
|
+
info = ascii_handler.collect_data_from_file(file_name=model_path / "info.yml")
|
|
117
|
+
models.extend(info.get("model_version_history", []))
|
|
118
|
+
# sort oldest --> newest
|
|
119
|
+
models = sorted(set(models), key=Version, reverse=False)
|
|
120
|
+
for model in models:
|
|
121
|
+
for file in sorted((model_path.parent / model).rglob("*json")):
|
|
122
|
+
_read_production_table(model_dict, file, model)
|
|
123
|
+
|
|
124
|
+
return model_dict
|
|
102
125
|
|
|
103
126
|
|
|
104
127
|
def _read_production_table(model_dict, file, model_name):
|
|
@@ -120,9 +143,10 @@ def _read_production_table(model_dict, file, model_name):
|
|
|
120
143
|
if array_element in ("configuration_corsika", "configuration_sim_telarray"):
|
|
121
144
|
model_dict[collection]["parameters"] = parameter_dict["parameters"]
|
|
122
145
|
else:
|
|
123
|
-
model_dict[collection]["parameters"]
|
|
124
|
-
array_element
|
|
125
|
-
|
|
146
|
+
model_dict[collection]["parameters"].setdefault(array_element, {}).update(
|
|
147
|
+
parameter_dict["parameters"][array_element]
|
|
148
|
+
)
|
|
149
|
+
|
|
126
150
|
except KeyError as exc:
|
|
127
151
|
logger.error(f"KeyError: {exc}")
|
|
128
152
|
raise
|
|
@@ -132,3 +156,5 @@ def _read_production_table(model_dict, file, model_name):
|
|
|
132
156
|
]
|
|
133
157
|
except KeyError:
|
|
134
158
|
pass
|
|
159
|
+
|
|
160
|
+
model_dict[collection]["model_version"] = model_name
|