gammasimtools 0.8.2__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/METADATA +4 -4
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/RECORD +119 -105
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/WHEEL +1 -1
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/entry_points.txt +4 -1
- simtools/_version.py +2 -2
- simtools/applications/calculate_trigger_rate.py +15 -38
- simtools/applications/convert_all_model_parameters_from_simtel.py +9 -28
- simtools/applications/convert_geo_coordinates_of_array_elements.py +54 -53
- simtools/applications/convert_model_parameter_from_simtel.py +2 -2
- simtools/applications/db_add_file_to_db.py +1 -2
- simtools/applications/db_add_simulation_model_from_repository_to_db.py +110 -0
- simtools/applications/db_add_value_from_json_to_db.py +2 -11
- simtools/applications/db_development_tools/write_array_elements_positions_to_repository.py +6 -6
- simtools/applications/db_get_array_layouts_from_db.py +3 -1
- simtools/applications/db_get_file_from_db.py +11 -12
- simtools/applications/db_get_parameter_from_db.py +44 -32
- simtools/applications/derive_mirror_rnda.py +10 -1
- simtools/applications/derive_photon_electron_spectrum.py +99 -0
- simtools/applications/derive_psf_parameters.py +1 -1
- simtools/applications/generate_array_config.py +18 -22
- simtools/applications/generate_regular_arrays.py +24 -21
- simtools/applications/generate_simtel_array_histograms.py +11 -48
- simtools/applications/plot_array_layout.py +3 -1
- simtools/applications/plot_tabular_data.py +84 -0
- simtools/applications/production_generate_simulation_config.py +25 -7
- simtools/applications/production_scale_events.py +3 -4
- simtools/applications/simulate_light_emission.py +2 -2
- simtools/applications/simulate_prod.py +25 -60
- simtools/applications/simulate_prod_htcondor_generator.py +95 -0
- simtools/applications/submit_data_from_external.py +12 -4
- simtools/applications/submit_model_parameter_from_external.py +8 -6
- simtools/applications/validate_camera_efficiency.py +3 -3
- simtools/applications/validate_camera_fov.py +3 -7
- simtools/applications/validate_cumulative_psf.py +3 -7
- simtools/applications/validate_file_using_schema.py +38 -24
- simtools/applications/validate_optics.py +3 -4
- simtools/{camera_efficiency.py → camera/camera_efficiency.py} +1 -4
- simtools/camera/single_photon_electron_spectrum.py +168 -0
- simtools/configuration/commandline_parser.py +14 -13
- simtools/configuration/configurator.py +6 -19
- simtools/constants.py +10 -3
- simtools/corsika/corsika_config.py +8 -7
- simtools/corsika/corsika_histograms.py +1 -1
- simtools/data_model/data_reader.py +0 -3
- simtools/data_model/metadata_collector.py +21 -4
- simtools/data_model/metadata_model.py +8 -111
- simtools/data_model/model_data_writer.py +18 -64
- simtools/data_model/schema.py +213 -0
- simtools/data_model/validate_data.py +73 -51
- simtools/db/db_handler.py +395 -790
- simtools/db/db_model_upload.py +139 -0
- simtools/io_operations/hdf5_handler.py +54 -24
- simtools/io_operations/legacy_data_handler.py +61 -0
- simtools/job_execution/htcondor_script_generator.py +133 -0
- simtools/job_execution/job_manager.py +77 -50
- simtools/layout/array_layout.py +33 -28
- simtools/model/array_model.py +13 -7
- simtools/model/camera.py +4 -2
- simtools/model/model_parameter.py +61 -63
- simtools/model/site_model.py +3 -3
- simtools/production_configuration/calculate_statistical_errors_grid_point.py +119 -144
- simtools/production_configuration/event_scaler.py +7 -17
- simtools/production_configuration/generate_simulation_config.py +5 -32
- simtools/production_configuration/interpolation_handler.py +8 -11
- simtools/ray_tracing/mirror_panel_psf.py +47 -27
- simtools/runners/corsika_runner.py +14 -3
- simtools/runners/corsika_simtel_runner.py +3 -1
- simtools/runners/runner_services.py +3 -3
- simtools/runners/simtel_runner.py +27 -8
- simtools/schemas/input/MST_mirror_2f_measurements.schema.yml +39 -0
- simtools/schemas/input/single_pe_spectrum.schema.yml +38 -0
- simtools/schemas/integration_tests_config.metaschema.yml +23 -3
- simtools/schemas/model_parameter.metaschema.yml +95 -2
- simtools/schemas/model_parameter_and_data_schema.metaschema.yml +2 -0
- simtools/schemas/model_parameters/array_element_position_utm.schema.yml +1 -1
- simtools/schemas/model_parameters/array_window.schema.yml +37 -0
- simtools/schemas/model_parameters/asum_clipping.schema.yml +0 -4
- simtools/schemas/model_parameters/channels_per_chip.schema.yml +1 -1
- simtools/schemas/model_parameters/corsika_iact_io_buffer.schema.yml +2 -2
- simtools/schemas/model_parameters/dsum_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_ignore_below.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_offset.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pedsub.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_pre_clipping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_prescale.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_max.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_presum_shift.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_shaping_renormalize.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_threshold.schema.yml +0 -2
- simtools/schemas/model_parameters/dsum_zero_clip.schema.yml +0 -2
- simtools/schemas/model_parameters/effective_focal_length.schema.yml +31 -1
- simtools/schemas/model_parameters/fadc_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_lg_compensate_pedestal.schema.yml +1 -1
- simtools/schemas/model_parameters/fadc_noise.schema.yml +3 -3
- simtools/schemas/model_parameters/fake_mirror_list.schema.yml +33 -0
- simtools/schemas/model_parameters/laser_photons.schema.yml +2 -2
- simtools/schemas/model_parameters/secondary_mirror_degraded_reflection.schema.yml +1 -1
- simtools/schemas/production_configuration_metrics.schema.yml +68 -0
- simtools/schemas/production_tables.schema.yml +41 -0
- simtools/simtel/simtel_config_writer.py +5 -6
- simtools/simtel/simtel_io_histogram.py +32 -67
- simtools/simtel/simtel_io_histograms.py +15 -30
- simtools/simtel/simtel_table_reader.py +410 -0
- simtools/simtel/simulator_array.py +2 -1
- simtools/simtel/simulator_camera_efficiency.py +11 -4
- simtools/simtel/simulator_light_emission.py +5 -3
- simtools/simtel/simulator_ray_tracing.py +2 -2
- simtools/simulator.py +80 -33
- simtools/testing/configuration.py +12 -8
- simtools/testing/helpers.py +9 -16
- simtools/testing/validate_output.py +152 -68
- simtools/utils/general.py +149 -12
- simtools/utils/names.py +25 -21
- simtools/utils/value_conversion.py +9 -1
- simtools/visualization/plot_tables.py +106 -0
- simtools/visualization/visualize.py +43 -5
- simtools/applications/db_add_model_parameters_from_repository_to_db.py +0 -184
- simtools/db/db_array_elements.py +0 -130
- simtools/db/db_from_repo_handler.py +0 -106
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/LICENSE +0 -0
- {gammasimtools-0.8.2.dist-info → gammasimtools-0.10.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""Upload a simulation model (parameters and production tables) to the database."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import simtools.utils.general as gen
|
|
7
|
+
from simtools.utils import names
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def add_values_from_json_to_db(file, collection, db, db_name, file_prefix):
|
|
13
|
+
"""
|
|
14
|
+
Upload new model parameter from json files to db.
|
|
15
|
+
|
|
16
|
+
Parameters
|
|
17
|
+
----------
|
|
18
|
+
file : list
|
|
19
|
+
Json file to be uploaded to the DB.
|
|
20
|
+
collection : str
|
|
21
|
+
The DB collection to which to add the file.
|
|
22
|
+
db : DatabaseHandler
|
|
23
|
+
Database handler object.
|
|
24
|
+
db_name : str
|
|
25
|
+
Name of the database to be created.
|
|
26
|
+
file_prefix : str
|
|
27
|
+
Path to location of all additional files to be uploaded.
|
|
28
|
+
"""
|
|
29
|
+
par_dict = gen.collect_data_from_file(file_name=file)
|
|
30
|
+
logger.info(
|
|
31
|
+
f"Adding the following parameter to the DB: {par_dict['parameter']} "
|
|
32
|
+
f"version {par_dict['parameter_version']} "
|
|
33
|
+
f"(collection {collection} in database {db_name})"
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
db.add_new_parameter(
|
|
37
|
+
db_name=db_name,
|
|
38
|
+
par_dict=par_dict,
|
|
39
|
+
collection_name=collection,
|
|
40
|
+
file_prefix=file_prefix,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def add_model_parameters_to_db(args_dict, db):
|
|
45
|
+
"""
|
|
46
|
+
Read model parameters from a directory and upload them to the database.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
args_dict : dict
|
|
51
|
+
Command line arguments.
|
|
52
|
+
db : DatabaseHandler
|
|
53
|
+
Database handler object.
|
|
54
|
+
"""
|
|
55
|
+
input_path = Path(args_dict["input_path"])
|
|
56
|
+
logger.info(f"Reading model parameters from repository path {input_path}")
|
|
57
|
+
array_elements = [d for d in input_path.iterdir() if d.is_dir()]
|
|
58
|
+
for element in array_elements:
|
|
59
|
+
collection = names.get_collection_name_from_array_element_name(element.name, False)
|
|
60
|
+
if collection == "Files":
|
|
61
|
+
logger.info("Files (tables) are uploaded with the corresponding model parameters")
|
|
62
|
+
continue
|
|
63
|
+
logger.info(f"Reading model parameters for {element.name} into collection {collection}")
|
|
64
|
+
files_to_insert = list(Path(element).rglob("*json"))
|
|
65
|
+
for file in files_to_insert:
|
|
66
|
+
add_values_from_json_to_db(
|
|
67
|
+
file=file,
|
|
68
|
+
collection=collection,
|
|
69
|
+
db=db,
|
|
70
|
+
db_name=args_dict["db_name"],
|
|
71
|
+
file_prefix=input_path / "Files",
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def add_production_tables_to_db(args_dict, db):
|
|
76
|
+
"""
|
|
77
|
+
Read production tables from a directory and upload them to the database.
|
|
78
|
+
|
|
79
|
+
One dictionary per collection is prepared for each model version, containing
|
|
80
|
+
tables of all array elements, sites, and configuration parameters.
|
|
81
|
+
|
|
82
|
+
Parameters
|
|
83
|
+
----------
|
|
84
|
+
args_dict : dict
|
|
85
|
+
Command line arguments.
|
|
86
|
+
db : DatabaseHandler
|
|
87
|
+
Database handler object.
|
|
88
|
+
"""
|
|
89
|
+
input_path = Path(args_dict["input_path"])
|
|
90
|
+
logger.info(f"Reading production tables from repository path {input_path}")
|
|
91
|
+
|
|
92
|
+
for model in filter(Path.is_dir, input_path.iterdir()):
|
|
93
|
+
logger.info(f"Reading production tables for model version {model.name}")
|
|
94
|
+
model_dict = {}
|
|
95
|
+
for file in sorted(model.rglob("*json")):
|
|
96
|
+
_read_production_table(model_dict, file, model.name)
|
|
97
|
+
|
|
98
|
+
for collection, data in model_dict.items():
|
|
99
|
+
if not data["parameters"]:
|
|
100
|
+
logger.info(f"No production table for {collection} in model version {model.name}")
|
|
101
|
+
continue
|
|
102
|
+
logger.info(f"Adding production table for {collection} to the database")
|
|
103
|
+
db.add_production_table(
|
|
104
|
+
db_name=args_dict["db_name"],
|
|
105
|
+
production_table=data,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _read_production_table(model_dict, file, model_name):
|
|
110
|
+
"""Read a single production table from file."""
|
|
111
|
+
array_element = file.stem
|
|
112
|
+
collection = names.get_collection_name_from_array_element_name(array_element, False)
|
|
113
|
+
model_dict.setdefault(
|
|
114
|
+
collection,
|
|
115
|
+
{
|
|
116
|
+
"collection": collection,
|
|
117
|
+
"model_version": model_name,
|
|
118
|
+
"parameters": {},
|
|
119
|
+
"design_model": {},
|
|
120
|
+
},
|
|
121
|
+
)
|
|
122
|
+
parameter_dict = gen.collect_data_from_file(file_name=file)
|
|
123
|
+
logger.info(f"Reading production table for {array_element} (collection {collection})")
|
|
124
|
+
try:
|
|
125
|
+
if array_element in ("configuration_corsika", "configuration_sim_telarray"):
|
|
126
|
+
model_dict[collection]["parameters"] = parameter_dict["parameters"]
|
|
127
|
+
else:
|
|
128
|
+
model_dict[collection]["parameters"][array_element] = parameter_dict["parameters"][
|
|
129
|
+
array_element
|
|
130
|
+
]
|
|
131
|
+
except KeyError as exc:
|
|
132
|
+
logger.error(f"KeyError: {exc}")
|
|
133
|
+
raise
|
|
134
|
+
try:
|
|
135
|
+
model_dict[collection]["design_model"][array_element] = parameter_dict["design_model"][
|
|
136
|
+
array_element
|
|
137
|
+
]
|
|
138
|
+
except KeyError:
|
|
139
|
+
pass
|
|
@@ -41,40 +41,26 @@ def fill_hdf5_table(hist, x_bin_edges, y_bin_edges, x_label, y_label, meta_data)
|
|
|
41
41
|
meta_data: dict
|
|
42
42
|
Dictionary with the histogram metadata.
|
|
43
43
|
"""
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
44
|
+
validate_histogram(hist, y_bin_edges)
|
|
45
|
+
|
|
46
|
+
meta_data["x_bin_edges"] = x_bin_edges
|
|
47
47
|
meta_data["x_bin_edges_unit"] = (
|
|
48
48
|
x_bin_edges.unit if isinstance(x_bin_edges, u.Quantity) else u.dimensionless_unscaled
|
|
49
49
|
)
|
|
50
|
-
|
|
51
50
|
if y_bin_edges is not None:
|
|
52
|
-
|
|
53
|
-
meta_data["y_bin_edges"] = sanitize_name(y_label)
|
|
54
|
-
names = [
|
|
55
|
-
f"{meta_data['y_bin_edges'].split('__')[0]}_{i}"
|
|
56
|
-
for i in range(len(y_bin_edges[:-1]))
|
|
57
|
-
]
|
|
58
|
-
else:
|
|
59
|
-
names = [
|
|
60
|
-
f"{meta_data['Title'].split('__')[0]}_{i}" for i in range(len(y_bin_edges[:-1]))
|
|
61
|
-
]
|
|
51
|
+
meta_data["y_bin_edges"] = y_bin_edges
|
|
62
52
|
meta_data["y_bin_edges_unit"] = (
|
|
63
53
|
y_bin_edges.unit if isinstance(y_bin_edges, u.Quantity) else u.dimensionless_unscaled
|
|
64
54
|
)
|
|
65
55
|
|
|
66
|
-
|
|
67
|
-
[hist[i, :] for i in range(len(y_bin_edges[:-1]))],
|
|
68
|
-
names=names,
|
|
69
|
-
meta=meta_data,
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
else:
|
|
56
|
+
if hist.ndim == 1:
|
|
73
57
|
if x_label is not None:
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
else:
|
|
58
|
+
names = sanitize_name(x_label)
|
|
59
|
+
try:
|
|
77
60
|
names = meta_data["Title"]
|
|
61
|
+
except KeyError:
|
|
62
|
+
_logger.warning("Title not found in metadata.")
|
|
63
|
+
|
|
78
64
|
table = Table(
|
|
79
65
|
[
|
|
80
66
|
x_bin_edges[:-1],
|
|
@@ -83,9 +69,53 @@ def fill_hdf5_table(hist, x_bin_edges, y_bin_edges, x_label, y_label, meta_data)
|
|
|
83
69
|
names=(names, sanitize_name("Values")),
|
|
84
70
|
meta=meta_data,
|
|
85
71
|
)
|
|
72
|
+
else:
|
|
73
|
+
if y_label is not None:
|
|
74
|
+
names = [
|
|
75
|
+
f"{sanitize_name(y_label).split('__')[0]}_{i}" for i in range(len(y_bin_edges[:-1]))
|
|
76
|
+
]
|
|
77
|
+
try:
|
|
78
|
+
names = [
|
|
79
|
+
f"{(meta_data['Title']).split('__')[0]}_{sanitize_name(y_label)}_{i}"
|
|
80
|
+
for i in range(len(y_bin_edges[:-1]))
|
|
81
|
+
]
|
|
82
|
+
except KeyError:
|
|
83
|
+
_logger.warning("Title not found in metadata.")
|
|
84
|
+
names = [
|
|
85
|
+
f"{sanitize_name(y_label).split('__')[0]}_{i}" for i in range(len(y_bin_edges[:-1]))
|
|
86
|
+
]
|
|
87
|
+
|
|
88
|
+
table = Table(
|
|
89
|
+
[hist[i, :] for i in range(len(y_bin_edges[:-1]))],
|
|
90
|
+
names=names,
|
|
91
|
+
meta=meta_data,
|
|
92
|
+
)
|
|
93
|
+
|
|
86
94
|
return table
|
|
87
95
|
|
|
88
96
|
|
|
97
|
+
def validate_histogram(hist, y_bin_edges):
|
|
98
|
+
"""Validate histogram dimensions and y_bin_edges consistency.
|
|
99
|
+
|
|
100
|
+
Parameters
|
|
101
|
+
----------
|
|
102
|
+
hist (np.ndarray): The histogram array, expected to be 1D or 2D.
|
|
103
|
+
y_bin_edges (array-like or None): Bin edges for the second dimension (if applicable).
|
|
104
|
+
|
|
105
|
+
Raises
|
|
106
|
+
------
|
|
107
|
+
ValueError: If histogram dimensions are invalid or inconsistent with y_bin_edges.
|
|
108
|
+
"""
|
|
109
|
+
if hist.ndim not in (1, 2):
|
|
110
|
+
raise ValueError("Histogram must be either 1D or 2D.")
|
|
111
|
+
|
|
112
|
+
if hist.ndim == 1 and y_bin_edges is not None:
|
|
113
|
+
raise ValueError("y_bin_edges should be None for 1D histograms.")
|
|
114
|
+
|
|
115
|
+
if hist.ndim == 2 and y_bin_edges is None:
|
|
116
|
+
raise ValueError("y_bin_edges should not be None for 2D histograms.")
|
|
117
|
+
|
|
118
|
+
|
|
89
119
|
def read_hdf5(hdf5_file_name):
|
|
90
120
|
"""
|
|
91
121
|
Read a hdf5 output file.
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
#!/usr/bin/python3
|
|
2
|
+
"""Reading of legacy data files (expect that this will be obsolete in future)."""
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
from astropy.table import Table
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"read_legacy_data_as_table",
|
|
10
|
+
"read_legacy_lst_single_pe",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def read_legacy_data_as_table(file_path, file_type):
|
|
17
|
+
"""
|
|
18
|
+
Read legacy data file.
|
|
19
|
+
|
|
20
|
+
Parameters
|
|
21
|
+
----------
|
|
22
|
+
file_path: Path
|
|
23
|
+
Path to the legacy data file.
|
|
24
|
+
file_type: str
|
|
25
|
+
Type of legacy data file.
|
|
26
|
+
|
|
27
|
+
Returns
|
|
28
|
+
-------
|
|
29
|
+
Table
|
|
30
|
+
Astropy table.
|
|
31
|
+
|
|
32
|
+
Raises
|
|
33
|
+
------
|
|
34
|
+
ValueError
|
|
35
|
+
If unsupported legacy data file type.
|
|
36
|
+
"""
|
|
37
|
+
logger.debug(f"Reading legacy data file of type {file_type} from {file_path}")
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
return globals()[f"read_{file_type}"](file_path)
|
|
41
|
+
except KeyError as exc:
|
|
42
|
+
raise ValueError(f"Unsupported legacy data file type: {file_type}") from exc
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def read_legacy_lst_single_pe(file_path):
|
|
46
|
+
"""
|
|
47
|
+
Read LST single pe file (in legacy data format).
|
|
48
|
+
|
|
49
|
+
File contains two columns: amplitude (in units of single p.e) and response.
|
|
50
|
+
|
|
51
|
+
Parameters
|
|
52
|
+
----------
|
|
53
|
+
file_path: Path
|
|
54
|
+
Path to the legacy data file.
|
|
55
|
+
|
|
56
|
+
Returns
|
|
57
|
+
-------
|
|
58
|
+
Table
|
|
59
|
+
Astropy table.
|
|
60
|
+
"""
|
|
61
|
+
return Table.read(file_path, format="ascii.csv", names=("amplitude", "response"))
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
"""HT Condor script generator for simulation production."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import astropy.units as u
|
|
7
|
+
|
|
8
|
+
_logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def generate_submission_script(args_dict):
|
|
12
|
+
"""
|
|
13
|
+
Generate the HT Condor submission script.
|
|
14
|
+
|
|
15
|
+
Parameters
|
|
16
|
+
----------
|
|
17
|
+
args_dict: dict
|
|
18
|
+
Arguments dictionary.
|
|
19
|
+
"""
|
|
20
|
+
_logger.info("Generating HT Condor submission scripts ")
|
|
21
|
+
|
|
22
|
+
work_dir = Path(args_dict["output_path"])
|
|
23
|
+
log_dir = work_dir / "logs"
|
|
24
|
+
work_dir.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
log_dir.mkdir(parents=True, exist_ok=True)
|
|
26
|
+
submit_file_name = "simulate_prod.submit"
|
|
27
|
+
|
|
28
|
+
with open(work_dir / f"{submit_file_name}.condor", "w", encoding="utf-8") as submit_file_handle:
|
|
29
|
+
submit_file_handle.write(
|
|
30
|
+
_get_submit_file(
|
|
31
|
+
f"{submit_file_name}.sh",
|
|
32
|
+
args_dict["apptainer_image"],
|
|
33
|
+
args_dict["priority"],
|
|
34
|
+
+args_dict["number_of_runs"],
|
|
35
|
+
)
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
with open(work_dir / f"{submit_file_name}.sh", "w", encoding="utf-8") as submit_script_handle:
|
|
39
|
+
submit_script_handle.write(_get_submit_script(args_dict))
|
|
40
|
+
|
|
41
|
+
Path(work_dir / f"{submit_file_name}.sh").chmod(0o755)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _get_submit_file(executable, apptainer_image, priority, n_jobs):
|
|
45
|
+
"""
|
|
46
|
+
Return HT Condor submit file.
|
|
47
|
+
|
|
48
|
+
Database access variables are passed through the environment file.
|
|
49
|
+
|
|
50
|
+
Parameters
|
|
51
|
+
----------
|
|
52
|
+
executable: str
|
|
53
|
+
Name of the executable script.
|
|
54
|
+
apptainer_image: str
|
|
55
|
+
Path to the Apptainer image.
|
|
56
|
+
priority: int
|
|
57
|
+
Priority of the job.
|
|
58
|
+
n_jobs: int
|
|
59
|
+
Number of jobs to queue.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
str
|
|
64
|
+
HT Condor submit file content.
|
|
65
|
+
"""
|
|
66
|
+
return f"""universe = container
|
|
67
|
+
container_image = {apptainer_image}
|
|
68
|
+
transfer_container = false
|
|
69
|
+
|
|
70
|
+
executable = {executable}
|
|
71
|
+
error = logs/err.$(cluster)_$(process)
|
|
72
|
+
output = logs/out.$(cluster)_$(process)
|
|
73
|
+
log = logs/log.$(cluster)_$(process)
|
|
74
|
+
|
|
75
|
+
priority = {priority}
|
|
76
|
+
arguments = "$(process) env.txt"
|
|
77
|
+
|
|
78
|
+
queue {n_jobs}
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _get_submit_script(args_dict):
|
|
83
|
+
"""
|
|
84
|
+
Return HT Condor submit script.
|
|
85
|
+
|
|
86
|
+
Parameters
|
|
87
|
+
----------
|
|
88
|
+
args_dict: dict
|
|
89
|
+
Arguments dictionary.
|
|
90
|
+
|
|
91
|
+
Returns
|
|
92
|
+
-------
|
|
93
|
+
str
|
|
94
|
+
HT Condor submit script content.
|
|
95
|
+
"""
|
|
96
|
+
azimuth_angle_string = f"{args_dict['azimuth_angle'].to(u.deg).value}"
|
|
97
|
+
zenith_angle_string = f"{args_dict['zenith_angle'].to(u.deg).value}"
|
|
98
|
+
energy_range = args_dict["energy_range"]
|
|
99
|
+
energy_range_string = (
|
|
100
|
+
f'"{energy_range[0].to(u.GeV).value} GeV {energy_range[1].to(u.GeV).value} GeV"'
|
|
101
|
+
)
|
|
102
|
+
core_scatter = args_dict["core_scatter"]
|
|
103
|
+
core_scatter_string = f'"{core_scatter[0]} {core_scatter[1].to(u.m).value} m"'
|
|
104
|
+
|
|
105
|
+
label = args_dict["label"] if args_dict["label"] else "simulate-prod"
|
|
106
|
+
|
|
107
|
+
return f"""#!/usr/bin/env bash
|
|
108
|
+
|
|
109
|
+
# Process ID used to generate run number
|
|
110
|
+
process_id="$1"
|
|
111
|
+
# Load environment variables (for DB access)
|
|
112
|
+
set -a; source "$2"
|
|
113
|
+
|
|
114
|
+
simtools-simulate-prod \\
|
|
115
|
+
--simulation_software {args_dict["simulation_software"]} \\
|
|
116
|
+
--label {label} \\
|
|
117
|
+
--model_version {args_dict["model_version"]} \\
|
|
118
|
+
--site {args_dict["site"]} \\
|
|
119
|
+
--array_layout_name {args_dict["array_layout_name"]} \\
|
|
120
|
+
--primary {args_dict["primary"]} \\
|
|
121
|
+
--azimuth_angle {azimuth_angle_string} \\
|
|
122
|
+
--zenith_angle {zenith_angle_string} \\
|
|
123
|
+
--nshow {args_dict["nshow"]} \\
|
|
124
|
+
--energy_range {energy_range_string} \\
|
|
125
|
+
--core_scatter {core_scatter_string} \\
|
|
126
|
+
--run_number_start $((process_id + {args_dict["run_number_start"]})) \\
|
|
127
|
+
--number_of_runs 1 \\
|
|
128
|
+
--submit_engine \"local\" \\
|
|
129
|
+
--data_directory /tmp/simtools-data \\
|
|
130
|
+
--output_path /tmp/simtools-output \\
|
|
131
|
+
--log_level {args_dict["log_level"]} \\
|
|
132
|
+
--pack_for_grid_register simtools-output
|
|
133
|
+
"""
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""Interface to workload managers like gridengine or HTCondor."""
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
|
-
import
|
|
4
|
+
import subprocess
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
|
|
7
7
|
import simtools.utils.general as gen
|
|
@@ -65,11 +65,9 @@ class JobManager:
|
|
|
65
65
|
ValueError
|
|
66
66
|
if invalid submit engine.
|
|
67
67
|
"""
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
raise ValueError(f"Invalid submit command: {value}")
|
|
72
|
-
self._submit_engine = value
|
|
68
|
+
self._submit_engine = value or "local"
|
|
69
|
+
if self._submit_engine not in self.engines:
|
|
70
|
+
raise ValueError(f"Invalid submit command: {self._submit_engine}")
|
|
73
71
|
|
|
74
72
|
def check_submission_system(self):
|
|
75
73
|
"""
|
|
@@ -77,14 +75,17 @@ class JobManager:
|
|
|
77
75
|
|
|
78
76
|
Raises
|
|
79
77
|
------
|
|
80
|
-
|
|
78
|
+
JobExecutionError
|
|
81
79
|
if workflow manager is not found.
|
|
82
80
|
"""
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
81
|
+
try:
|
|
82
|
+
if self.submit_engine in (None, "local") or gen.program_is_executable(
|
|
83
|
+
self.engines[self.submit_engine]
|
|
84
|
+
):
|
|
85
|
+
return
|
|
86
|
+
except KeyError:
|
|
87
|
+
pass
|
|
88
|
+
raise JobExecutionError(f"Submit engine {self.submit_engine} not found")
|
|
88
89
|
|
|
89
90
|
def submit(self, run_script=None, run_out_file=None, log_file=None):
|
|
90
91
|
"""
|
|
@@ -109,12 +110,14 @@ class JobManager:
|
|
|
109
110
|
self._logger.info(f"Job error stream {self.run_out_file + '.err'}")
|
|
110
111
|
self._logger.info(f"Job log stream {self.run_out_file + '.job'}")
|
|
111
112
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
113
|
+
submit_result = 0
|
|
114
|
+
if self.submit_engine == "local":
|
|
115
|
+
submit_result = self._submit_local(log_file)
|
|
116
|
+
else:
|
|
117
|
+
submit_result = getattr(self, f"_submit_{self.submit_engine}")()
|
|
118
|
+
|
|
119
|
+
if submit_result != 0:
|
|
120
|
+
raise JobExecutionError(f"Job submission failed with return code {submit_result}")
|
|
118
121
|
|
|
119
122
|
def _submit_local(self, log_file):
|
|
120
123
|
"""
|
|
@@ -125,50 +128,72 @@ class JobManager:
|
|
|
125
128
|
log_file: str or Path
|
|
126
129
|
The log file of the actual simulator (CORSIKA or sim_telarray).
|
|
127
130
|
Provided in order to print the log excerpt in case of run time error.
|
|
131
|
+
|
|
132
|
+
Returns
|
|
133
|
+
-------
|
|
134
|
+
int
|
|
135
|
+
Return code of the executed script
|
|
128
136
|
"""
|
|
129
137
|
self._logger.info("Running script locally")
|
|
130
138
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
if not self.test:
|
|
134
|
-
sys_output = os.system(shell_command)
|
|
135
|
-
if sys_output != 0:
|
|
136
|
-
msg = gen.get_log_excerpt(f"{self.run_out_file}.err")
|
|
137
|
-
self._logger.error(msg)
|
|
138
|
-
if log_file.exists() and gen.get_file_age(log_file) < 5:
|
|
139
|
-
msg = gen.get_log_excerpt(log_file)
|
|
140
|
-
self._logger.error(msg)
|
|
141
|
-
raise JobExecutionError("See excerpt from log file above\n")
|
|
142
|
-
else:
|
|
139
|
+
if self.test:
|
|
143
140
|
self._logger.info("Testing (local)")
|
|
141
|
+
return 0
|
|
142
|
+
|
|
143
|
+
result = None
|
|
144
|
+
try:
|
|
145
|
+
with (
|
|
146
|
+
open(f"{self.run_out_file}.out", "w", encoding="utf-8") as stdout,
|
|
147
|
+
open(f"{self.run_out_file}.err", "w", encoding="utf-8") as stderr,
|
|
148
|
+
):
|
|
149
|
+
result = subprocess.run(
|
|
150
|
+
f"{self.run_script}",
|
|
151
|
+
shell=True,
|
|
152
|
+
check=True,
|
|
153
|
+
text=True,
|
|
154
|
+
stdout=stdout,
|
|
155
|
+
stderr=stderr,
|
|
156
|
+
)
|
|
157
|
+
except subprocess.CalledProcessError as exc:
|
|
158
|
+
self._logger.error(gen.get_log_excerpt(f"{self.run_out_file}.err"))
|
|
159
|
+
if log_file.exists() and gen.get_file_age(log_file) < 5:
|
|
160
|
+
self._logger.error(gen.get_log_excerpt(log_file))
|
|
161
|
+
raise JobExecutionError("See excerpt from log file above\n") from exc
|
|
162
|
+
|
|
163
|
+
return result.returncode if result else 0
|
|
144
164
|
|
|
145
165
|
def _submit_htcondor(self):
|
|
146
166
|
"""Submit a job described by a shell script to HTcondor."""
|
|
147
167
|
_condor_file = self.run_script + ".condor"
|
|
168
|
+
lines = [
|
|
169
|
+
f"Executable = {self.run_script}",
|
|
170
|
+
f"Output = {self.run_out_file}.out",
|
|
171
|
+
f"Error = {self.run_out_file}.err",
|
|
172
|
+
f"Log = {self.run_out_file}.job",
|
|
173
|
+
]
|
|
174
|
+
if self.submit_options:
|
|
175
|
+
lines.extend(option.lstrip() for option in self.submit_options.split(","))
|
|
176
|
+
lines.append("queue 1")
|
|
148
177
|
try:
|
|
149
178
|
with open(_condor_file, "w", encoding="utf-8") as file:
|
|
150
|
-
file.write(
|
|
151
|
-
file.write(f"Output = {self.run_out_file + '.out'}\n")
|
|
152
|
-
file.write(f"Error = {self.run_out_file + '.err'}\n")
|
|
153
|
-
file.write(f"Log = {self.run_out_file + '.job'}\n")
|
|
154
|
-
if self.submit_options:
|
|
155
|
-
submit_option_list = self.submit_options.split(",")
|
|
156
|
-
for option in submit_option_list:
|
|
157
|
-
file.write(option.lstrip() + "\n")
|
|
158
|
-
file.write("queue 1\n")
|
|
179
|
+
file.write("\n".join(lines) + "\n")
|
|
159
180
|
except FileNotFoundError as exc:
|
|
160
181
|
self._logger.error(f"Failed creating condor submission file {_condor_file}")
|
|
161
182
|
raise JobExecutionError from exc
|
|
162
183
|
|
|
163
|
-
self._execute(self.submit_engine, self.engines[self.submit_engine]
|
|
184
|
+
return self._execute(self.submit_engine, [self.engines[self.submit_engine], _condor_file])
|
|
164
185
|
|
|
165
186
|
def _submit_gridengine(self):
|
|
166
187
|
"""Submit a job described by a shell script to gridengine."""
|
|
167
|
-
this_sub_cmd =
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
188
|
+
this_sub_cmd = [
|
|
189
|
+
self.engines[self.submit_engine],
|
|
190
|
+
"-o",
|
|
191
|
+
self.run_out_file + ".out",
|
|
192
|
+
"-e",
|
|
193
|
+
self.run_out_file + ".err",
|
|
194
|
+
self.run_script,
|
|
195
|
+
]
|
|
196
|
+
return self._execute(self.submit_engine, this_sub_cmd)
|
|
172
197
|
|
|
173
198
|
def _execute(self, engine, shell_command):
|
|
174
199
|
"""
|
|
@@ -178,13 +203,15 @@ class JobManager:
|
|
|
178
203
|
----------
|
|
179
204
|
engine : str
|
|
180
205
|
Engine to use.
|
|
181
|
-
shell_command :
|
|
182
|
-
|
|
206
|
+
shell_command : list
|
|
207
|
+
List of shell command plus arguments.
|
|
183
208
|
"""
|
|
184
209
|
self._logger.info(f"Submitting script to {engine}")
|
|
185
210
|
self._logger.debug(shell_command)
|
|
211
|
+
result = None
|
|
186
212
|
if not self.test:
|
|
187
|
-
|
|
213
|
+
result = subprocess.run(shell_command, shell=True, check=True)
|
|
188
214
|
else:
|
|
189
|
-
self._logger.info(f"Testing ({engine})")
|
|
190
|
-
|
|
215
|
+
self._logger.info(f"Testing ({engine}: {shell_command})")
|
|
216
|
+
|
|
217
|
+
return result.returncode if result else 0
|