gammasimtools 0.16.0__py3-none-any.whl → 0.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/METADATA +5 -2
  2. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/RECORD +82 -74
  3. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/entry_points.txt +4 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/db_add_simulation_model_from_repository_to_db.py +10 -1
  7. simtools/applications/derive_ctao_array_layouts.py +5 -5
  8. simtools/applications/derive_mirror_rnda.py +1 -1
  9. simtools/applications/generate_simtel_event_data.py +128 -46
  10. simtools/applications/merge_tables.py +102 -0
  11. simtools/applications/plot_array_layout.py +145 -258
  12. simtools/applications/plot_tabular_data.py +12 -1
  13. simtools/applications/plot_tabular_data_for_model_parameter.py +103 -0
  14. simtools/applications/production_derive_corsika_limits.py +78 -225
  15. simtools/applications/production_derive_statistics.py +77 -43
  16. simtools/applications/simulate_light_emission.py +1 -0
  17. simtools/applications/simulate_prod.py +30 -18
  18. simtools/applications/simulate_prod_htcondor_generator.py +0 -1
  19. simtools/applications/submit_array_layouts.py +93 -0
  20. simtools/applications/verify_simulation_model_production_tables.py +52 -0
  21. simtools/camera/camera_efficiency.py +3 -3
  22. simtools/configuration/commandline_parser.py +30 -35
  23. simtools/configuration/configurator.py +0 -4
  24. simtools/constants.py +2 -0
  25. simtools/corsika/corsika_config.py +17 -12
  26. simtools/corsika/primary_particle.py +46 -13
  27. simtools/data_model/metadata_collector.py +7 -3
  28. simtools/data_model/schema.py +15 -1
  29. simtools/db/db_handler.py +16 -11
  30. simtools/db/db_model_upload.py +2 -2
  31. simtools/io_operations/io_handler.py +2 -2
  32. simtools/io_operations/io_table_handler.py +345 -0
  33. simtools/job_execution/htcondor_script_generator.py +2 -2
  34. simtools/job_execution/job_manager.py +7 -121
  35. simtools/layout/array_layout_utils.py +389 -0
  36. simtools/model/array_model.py +10 -1
  37. simtools/model/model_repository.py +134 -0
  38. simtools/production_configuration/{calculate_statistical_errors_grid_point.py → calculate_statistical_uncertainties_grid_point.py} +101 -112
  39. simtools/production_configuration/derive_corsika_limits.py +239 -111
  40. simtools/production_configuration/derive_corsika_limits_grid.py +232 -0
  41. simtools/production_configuration/derive_production_statistics.py +57 -26
  42. simtools/production_configuration/derive_production_statistics_handler.py +70 -37
  43. simtools/production_configuration/interpolation_handler.py +296 -94
  44. simtools/ray_tracing/ray_tracing.py +7 -6
  45. simtools/reporting/docs_read_parameters.py +104 -62
  46. simtools/resources/array-element-ids.json +126 -0
  47. simtools/runners/corsika_simtel_runner.py +4 -1
  48. simtools/runners/runner_services.py +5 -4
  49. simtools/schemas/model_parameter_and_data_schema.metaschema.yml +5 -1
  50. simtools/schemas/model_parameters/atmospheric_profile.schema.yml +41 -0
  51. simtools/schemas/model_parameters/atmospheric_transmission.schema.yml +43 -0
  52. simtools/schemas/model_parameters/camera_filter.schema.yml +10 -0
  53. simtools/schemas/model_parameters/camera_filter_incidence_angle.schema.yml +10 -0
  54. simtools/schemas/model_parameters/discriminator_pulse_shape.schema.yml +31 -0
  55. simtools/schemas/model_parameters/dsum_threshold.schema.yml +41 -0
  56. simtools/schemas/model_parameters/fadc_pulse_shape.schema.yml +12 -0
  57. simtools/schemas/model_parameters/lightguide_efficiency_vs_incidence_angle.schema.yml +10 -0
  58. simtools/schemas/model_parameters/mirror_reflectivity.schema.yml +10 -0
  59. simtools/schemas/model_parameters/nsb_reference_spectrum.schema.yml +12 -0
  60. simtools/schemas/model_parameters/pm_photoelectron_spectrum.schema.yml +19 -0
  61. simtools/schemas/model_parameters/quantum_efficiency.schema.yml +10 -0
  62. simtools/schemas/plot_configuration.metaschema.yml +46 -57
  63. simtools/schemas/production_configuration_metrics.schema.yml +2 -2
  64. simtools/simtel/simtel_config_writer.py +34 -14
  65. simtools/simtel/simtel_io_event_reader.py +301 -194
  66. simtools/simtel/simtel_io_event_writer.py +237 -221
  67. simtools/simtel/simtel_io_file_info.py +9 -4
  68. simtools/simtel/simtel_io_metadata.py +119 -8
  69. simtools/simtel/simulator_array.py +2 -2
  70. simtools/simtel/simulator_light_emission.py +79 -34
  71. simtools/simtel/simulator_ray_tracing.py +2 -2
  72. simtools/simulator.py +101 -68
  73. simtools/testing/validate_output.py +4 -1
  74. simtools/utils/general.py +1 -3
  75. simtools/utils/names.py +76 -7
  76. simtools/visualization/plot_array_layout.py +242 -0
  77. simtools/visualization/plot_pixels.py +680 -0
  78. simtools/visualization/plot_tables.py +81 -2
  79. simtools/visualization/visualize.py +3 -219
  80. simtools/applications/production_generate_simulation_config.py +0 -152
  81. simtools/layout/ctao_array_layouts.py +0 -172
  82. simtools/production_configuration/generate_simulation_config.py +0 -158
  83. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/licenses/LICENSE +0 -0
  84. {gammasimtools-0.16.0.dist-info → gammasimtools-0.18.0.dist-info}/top_level.txt +0 -0
  85. /simtools/{schemas → resources}/array_elements.yml +0 -0
@@ -72,7 +72,7 @@ class IOHandler(metaclass=IOHandlerSingleton):
72
72
  label: str
73
73
  Instance label.
74
74
  sub_dir: str
75
- Name of the subdirectory (ray-tracing, model etc)
75
+ Name of the subdirectory (ray_tracing, model etc)
76
76
 
77
77
  Returns
78
78
  -------
@@ -116,7 +116,7 @@ class IOHandler(metaclass=IOHandlerSingleton):
116
116
  label: str
117
117
  Instance label.
118
118
  sub_dir: str
119
- Name of the subdirectory (ray-tracing, model etc)
119
+ Name of the subdirectory (ray_tracing, model etc)
120
120
 
121
121
  Returns
122
122
  -------
@@ -0,0 +1,345 @@
1
+ """IO operations on astropy tables."""
2
+
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ import astropy.units as u
7
+ import h5py
8
+ import numpy as np
9
+ from astropy.io import fits
10
+ from astropy.table import Table, vstack
11
+
12
+ _logger = logging.getLogger(__name__)
13
+
14
+
15
+ def read_table_list(input_file, table_names, include_indexed_tables=False):
16
+ """
17
+ Read available tables found in the input file.
18
+
19
+ If table_counter is True, search for tables with the same name
20
+ but with different suffixes (e.g., "_0", "_1", etc.).
21
+
22
+ """
23
+ file_type = read_table_file_type(input_file)
24
+ if file_type == "HDF5":
25
+ return _read_table_list_hdf5(input_file, table_names, include_indexed_tables)
26
+ if file_type == "FITS":
27
+ return _read_table_list_fits(input_file, table_names, include_indexed_tables)
28
+ return None
29
+
30
+
31
+ def _read_table_list_hdf5(input_file, table_names, include_indexed_tables):
32
+ """Read available tables from HDF5 file."""
33
+ datasets = {name: [] for name in table_names}
34
+
35
+ def is_indexed_variant(name, base):
36
+ if not name.startswith(f"{base}_"):
37
+ return False
38
+ suffix = name[len(base) + 1 :]
39
+ return suffix.isdigit()
40
+
41
+ def visitor(name, obj):
42
+ if not isinstance(obj, h5py.Dataset):
43
+ return
44
+
45
+ for base in datasets:
46
+ if name == base or (include_indexed_tables and is_indexed_variant(name, base)):
47
+ datasets[base].append(name)
48
+
49
+ with h5py.File(input_file, "r") as f:
50
+ f.visititems(visitor)
51
+
52
+ return datasets
53
+
54
+
55
+ def _read_table_list_fits(input_file, table_names, include_indexed_tables):
56
+ """Read available tables from FITS file."""
57
+ datasets = {name: [] for name in table_names}
58
+
59
+ with fits.open(input_file) as hdul:
60
+ for hdu in hdul[1:]:
61
+ if not isinstance(hdu, fits.BinTableHDU):
62
+ continue
63
+ name = hdu.name
64
+ if name in table_names:
65
+ datasets[name].append(name)
66
+ continue
67
+ if not include_indexed_tables or "_" not in name:
68
+ continue
69
+ base, _, suffix = name.rpartition("_")
70
+ if base in table_names and suffix.isdigit():
71
+ datasets[base].append(name)
72
+
73
+ return datasets
74
+
75
+
76
+ def merge_tables(input_files, input_table_names, output_file):
77
+ """
78
+ Merge multiple astropy tables from different files into a single file.
79
+
80
+ Handles multiple tables per file and supports both HDF5 and FITS formats.
81
+ Updates 'file_id' column if present to maintain file origin tracking.
82
+
83
+ Parameters
84
+ ----------
85
+ input_files : list of str
86
+ List of input file paths to be merged.
87
+ input_table_names : list of str
88
+ List of table names to be merged from each input file.
89
+ output_file : str
90
+ Path to the output file where the merged data will be saved.
91
+
92
+ Returns
93
+ -------
94
+ None
95
+ """
96
+ _logger.info(f"Merging {len(input_files)} files into {output_file}")
97
+
98
+ file_type = read_table_file_type(input_files)
99
+ merged_tables = _merge(input_files, input_table_names, file_type, output_file)
100
+ if file_type != "HDF5":
101
+ write_tables(merged_tables, output_file, file_type)
102
+
103
+
104
+ def read_table_file_type(input_files):
105
+ """
106
+ Determine the file type of the input files.
107
+
108
+ All input files must be of the same type (either all HDF5 or all FITS).
109
+
110
+ Parameters
111
+ ----------
112
+ input_files : list of str
113
+ List of input file paths.
114
+
115
+ Returns
116
+ -------
117
+ str
118
+ File type ('HDF5' or 'FITS').
119
+ """
120
+ if not input_files:
121
+ raise ValueError("No input files provided.")
122
+ input_files = [input_files] if isinstance(input_files, str | Path) else input_files
123
+
124
+ def get_type(f):
125
+ if f.lower().endswith((".hdf5", ".h5")):
126
+ return "HDF5"
127
+ if f.lower().endswith((".fits", ".fits.gz")):
128
+ return "FITS"
129
+ raise ValueError(f"Unsupported file type: {f}")
130
+
131
+ file_types = {get_type(str(f)) for f in input_files}
132
+ if len(file_types) != 1:
133
+ raise ValueError("All input files must be of the same type (either all HDF5 or all FITS)")
134
+ return file_types.pop()
135
+
136
+
137
+ def _merge(input_files, table_names, file_type, output_file, add_file_id_to_table_name=True):
138
+ """
139
+ Merge tables from multiple input files into single tables.
140
+
141
+ Parameters
142
+ ----------
143
+ input_files : list of str
144
+ List of input file paths to be merged.
145
+ table_names : list of str
146
+ List of table names to be merged from each input file.
147
+ file_type : str
148
+ Type of the input files ('HDF5' or 'FITS').
149
+ add_file_id_to_table_name : bool, optional
150
+ If True, appends the file index to the table name.
151
+
152
+ Returns
153
+ -------
154
+ dict
155
+ Dictionary with table names as keys and merged astropy tables as values.
156
+ """
157
+ merged = {name: [] for name in table_names}
158
+ is_hdf5 = file_type == "HDF5"
159
+
160
+ def update_file_id(table, idx):
161
+ if "file_id" in table.colnames:
162
+ table["file_id"] = idx
163
+
164
+ def process_table(table, key, idx):
165
+ table_name = f"{key}_{idx}" if add_file_id_to_table_name else key
166
+ update_file_id(table, idx)
167
+ if is_hdf5:
168
+ write_table_in_hdf5(table, output_file, table_name)
169
+ if idx == 0:
170
+ copy_metadata_to_hdf5(input_files[0], output_file, table_name)
171
+ else:
172
+ merged[key].append(table)
173
+
174
+ for idx, file in enumerate(input_files):
175
+ tables = read_tables(file, table_names, file_type)
176
+ for key, table in tables.items():
177
+ process_table(table, key, idx)
178
+
179
+ if file_type != "HDF5":
180
+ merged = {k: vstack(v, metadata_conflicts="silent") for k, v in merged.items()}
181
+
182
+ return merged
183
+
184
+
185
+ def read_tables(file, table_names, file_type=None):
186
+ """
187
+ Read tables from a file.
188
+
189
+ Parameters
190
+ ----------
191
+ file : str
192
+ Path to the input file.
193
+ table_names : list of str
194
+ List of table names to read.
195
+ file_type : str
196
+ Type of the input file ('HDF5' or 'FITS').
197
+
198
+ Returns
199
+ -------
200
+ dict
201
+ Dictionary with table names as keys and astropy tables as values.
202
+ """
203
+ file_type = file_type or read_table_file_type([file])
204
+ if file_type == "HDF5":
205
+ return {name: read_table_from_hdf5(file, name) for name in table_names}
206
+ if file_type == "FITS":
207
+ return {name: Table.read(file, hdu=name) for name in table_names}
208
+ raise ValueError(f"Unsupported file format: {file_type}. Supported formats are HDF5 and FITS.")
209
+
210
+
211
+ def read_table_from_hdf5(file, table_name):
212
+ """
213
+ Read a single astropy table from an HDF5 file.
214
+
215
+ Parameters
216
+ ----------
217
+ file : str or Path
218
+ Path to the input HDF5 file.
219
+ table_name : str
220
+ Name of the table to read.
221
+
222
+ Returns
223
+ -------
224
+ astropy.table.Table
225
+ The requested astropy table.
226
+ """
227
+ table = Table.read(file, path=table_name)
228
+ with h5py.File(file, "r") as f:
229
+ dset = f[table_name]
230
+ for col in table.colnames:
231
+ unit_key = f"{col}_unit"
232
+ if unit_key in dset.attrs:
233
+ table[col].unit = u.Unit(dset.attrs[unit_key])
234
+ return table
235
+
236
+
237
+ def write_tables(tables, output_file, overwrite_existing=True, file_type=None):
238
+ """
239
+ Write tables to file (overwriting if exists).
240
+
241
+ Parameters
242
+ ----------
243
+ tables : list or dict
244
+ List or Dictionary with astropy tables as values.
245
+ output_file : str or Path
246
+ Path to the output file.
247
+ overwrite_existing : bool
248
+ If True, overwrite the output file if it exists.
249
+ file_type : str
250
+ Type of the output file ('HDF5' or 'FITS').
251
+
252
+ Returns
253
+ -------
254
+ None
255
+ """
256
+ output_file = Path(output_file)
257
+ file_type = file_type or read_table_file_type([output_file])
258
+ if output_file.exists():
259
+ if overwrite_existing:
260
+ output_file.unlink()
261
+ else:
262
+ raise FileExistsError(f"Output file {output_file} already exists.")
263
+ hdus = [fits.PrimaryHDU()]
264
+ if isinstance(tables, dict):
265
+ tables = list(tables.values())
266
+ for table in tables:
267
+ _table_name = table.meta.get("EXTNAME")
268
+ _logger.info(f"Writing table {_table_name} of length {len(table)} to {output_file}")
269
+ if file_type == "HDF5":
270
+ write_table_in_hdf5(table, output_file, _table_name)
271
+ if file_type == "FITS":
272
+ hdu = fits.table_to_hdu(table)
273
+ hdu.name = _table_name
274
+ hdus.append(hdu)
275
+
276
+ if file_type == "FITS":
277
+ fits.HDUList(hdus).writeto(output_file, checksum=False)
278
+
279
+
280
+ def write_table_in_hdf5(table, output_file, table_name):
281
+ """
282
+ Write or append a single astropy table to an HDF5 file.
283
+
284
+ Parameters
285
+ ----------
286
+ table : astropy.table.Table
287
+ The astropy table to write.
288
+ output_file : str or Path
289
+ Path to the output HDF5 file.
290
+ table_name : str
291
+ Name of the table in the HDF5 file.
292
+
293
+ Returns
294
+ -------
295
+ None
296
+ """
297
+ for col in table.colnames:
298
+ if table[col].dtype.kind == "U": # hd5 does not support unicode
299
+ table[col] = table[col].astype("S")
300
+
301
+ with h5py.File(output_file, "a") as f:
302
+ data = np.array(table)
303
+ if table_name not in f:
304
+ maxshape = (None, *data.shape[1:])
305
+ dset = f.create_dataset(
306
+ table_name,
307
+ data=data,
308
+ maxshape=maxshape,
309
+ chunks=True,
310
+ compression="gzip",
311
+ compression_opts=4,
312
+ )
313
+ for key, val in table.meta.items():
314
+ dset.attrs[key] = val
315
+ for col in table.colnames:
316
+ unit = getattr(table[col], "unit", None)
317
+ if unit is not None:
318
+ dset.attrs[f"{col}_unit"] = str(unit)
319
+ else:
320
+ dset = f[table_name]
321
+ dset.resize(dset.shape[0] + data.shape[0], axis=0)
322
+ dset[-data.shape[0] :] = data
323
+
324
+
325
+ def copy_metadata_to_hdf5(src_file, dst_file, table_name):
326
+ """
327
+ Copy metadata (table column meta) from one HDF5 file to another.
328
+
329
+ For merging tables, this function ensures that the metadata is preserved.
330
+
331
+ Parameters
332
+ ----------
333
+ src_file : str or Path
334
+ Path to the source HDF5 file.
335
+ dst_file : str or Path
336
+ Path to the destination HDF5 file.
337
+ table_name : str
338
+ Name of the table whose metadata is to be copied.
339
+ """
340
+ with h5py.File(src_file, "r") as src, h5py.File(dst_file, "a") as dst:
341
+ meta_name = f"{table_name}.__table_column_meta__"
342
+ if meta_name in src:
343
+ if meta_name in dst:
344
+ del dst[meta_name] # overwrite if exists
345
+ src.copy(meta_name, dst, name=meta_name)
@@ -123,9 +123,9 @@ simtools-simulate-prod \\
123
123
  --nshow {args_dict["nshow"]} \\
124
124
  --energy_range {energy_range_string} \\
125
125
  --core_scatter {core_scatter_string} \\
126
- --run_number_start $((process_id + {args_dict["run_number_start"]})) \\
126
+ --run_number $((process_id)) \\
127
+ --run_number_offset {args_dict["run_number_offset"]} \\
127
128
  --number_of_runs 1 \\
128
- --submit_engine \"local\" \\
129
129
  --data_directory /tmp/simtools-data \\
130
130
  --output_path /tmp/simtools-output \\
131
131
  --log_level {args_dict["log_level"]} \\
@@ -1,4 +1,4 @@
1
- """Interface to workload managers like gridengine or HTCondor."""
1
+ """Interface to workload managers to run jobs on a compute node."""
2
2
 
3
3
  import logging
4
4
  import subprocess
@@ -15,78 +15,23 @@ class JobExecutionError(Exception):
15
15
 
16
16
  class JobManager:
17
17
  """
18
- Interface to workload managers like gridengine or HTCondor.
18
+ Job manager for submitting jobs to a compute node.
19
19
 
20
- Expects that jobs are described by shell scripts.
20
+ Expects that jobs can be described by shell scripts.
21
21
 
22
22
  Parameters
23
23
  ----------
24
- submit_engine : str
25
- Job submission system. Default is local.
26
24
  test : bool
27
25
  Testing mode without sub submission.
28
26
  """
29
27
 
30
- engines = {
31
- "gridengine": "qsub",
32
- "htcondor": "condor_submit",
33
- "local": "",
34
- "test_wms": "test_wms", # used for testing only
35
- }
36
-
37
- def __init__(self, submit_engine=None, submit_options=None, test=False):
28
+ def __init__(self, test=False):
38
29
  """Initialize JobManager."""
39
30
  self._logger = logging.getLogger(__name__)
40
- self.submit_engine = submit_engine
41
- self.submit_options = submit_options
42
31
  self.test = test
43
32
  self.run_script = None
44
33
  self.run_out_file = None
45
34
 
46
- self.check_submission_system()
47
-
48
- @property
49
- def submit_engine(self):
50
- """Get the submit command."""
51
- return self._submit_engine
52
-
53
- @submit_engine.setter
54
- def submit_engine(self, value):
55
- """
56
- Set the submit command.
57
-
58
- Parameters
59
- ----------
60
- value : str
61
- Name of submit engine.
62
-
63
- Raises
64
- ------
65
- ValueError
66
- if invalid submit engine.
67
- """
68
- self._submit_engine = value or "local"
69
- if self._submit_engine not in self.engines:
70
- raise ValueError(f"Invalid submit command: {self._submit_engine}")
71
-
72
- def check_submission_system(self):
73
- """
74
- Check that the requested workload manager exist on the system.
75
-
76
- Raises
77
- ------
78
- JobExecutionError
79
- if workflow manager is not found.
80
- """
81
- try:
82
- if self.submit_engine in (None, "local") or gen.program_is_executable(
83
- self.engines[self.submit_engine]
84
- ):
85
- return
86
- except KeyError:
87
- pass
88
- raise JobExecutionError(f"Submit engine {self.submit_engine} not found")
89
-
90
35
  def submit(self, run_script=None, run_out_file=None, log_file=None):
91
36
  """
92
37
  Submit a job described by a shell script.
@@ -110,18 +55,13 @@ class JobManager:
110
55
  self._logger.info(f"Job error stream {self.run_out_file + '.err'}")
111
56
  self._logger.info(f"Job log stream {self.run_out_file + '.job'}")
112
57
 
113
- submit_result = 0
114
- if self.submit_engine == "local":
115
- submit_result = self._submit_local(log_file)
116
- else:
117
- submit_result = getattr(self, f"_submit_{self.submit_engine}")()
118
-
58
+ submit_result = self.submit_local(log_file)
119
59
  if submit_result != 0:
120
60
  raise JobExecutionError(f"Job submission failed with return code {submit_result}")
121
61
 
122
- def _submit_local(self, log_file):
62
+ def submit_local(self, log_file):
123
63
  """
124
- Run a job script on the command line (no submission to a workload manager).
64
+ Run a job script on the command line.
125
65
 
126
66
  Parameters
127
67
  ----------
@@ -161,57 +101,3 @@ class JobManager:
161
101
  raise JobExecutionError("See excerpt from log file above\n") from exc
162
102
 
163
103
  return result.returncode if result else 0
164
-
165
- def _submit_htcondor(self):
166
- """Submit a job described by a shell script to HTcondor."""
167
- _condor_file = self.run_script + ".condor"
168
- lines = [
169
- f"Executable = {self.run_script}",
170
- f"Output = {self.run_out_file}.out",
171
- f"Error = {self.run_out_file}.err",
172
- f"Log = {self.run_out_file}.job",
173
- ]
174
- if self.submit_options:
175
- lines.extend(option.lstrip() for option in self.submit_options.split(","))
176
- lines.append("queue 1")
177
- try:
178
- with open(_condor_file, "w", encoding="utf-8") as file:
179
- file.write("\n".join(lines) + "\n")
180
- except FileNotFoundError as exc:
181
- self._logger.error(f"Failed creating condor submission file {_condor_file}")
182
- raise JobExecutionError from exc
183
-
184
- return self._execute(self.submit_engine, [self.engines[self.submit_engine], _condor_file])
185
-
186
- def _submit_gridengine(self):
187
- """Submit a job described by a shell script to gridengine."""
188
- this_sub_cmd = [
189
- self.engines[self.submit_engine],
190
- "-o",
191
- self.run_out_file + ".out",
192
- "-e",
193
- self.run_out_file + ".err",
194
- self.run_script,
195
- ]
196
- return self._execute(self.submit_engine, this_sub_cmd)
197
-
198
- def _execute(self, engine, shell_command):
199
- """
200
- Execute a shell command using a specific engine.
201
-
202
- Parameters
203
- ----------
204
- engine : str
205
- Engine to use.
206
- shell_command : list
207
- List of shell command plus arguments.
208
- """
209
- self._logger.info(f"Submitting script to {engine}")
210
- self._logger.debug(shell_command)
211
- result = None
212
- if not self.test:
213
- result = subprocess.run(shell_command, shell=True, check=True)
214
- else:
215
- self._logger.info(f"Testing ({engine}: {shell_command})")
216
-
217
- return result.returncode if result else 0