gammasimtools 0.26.0__py3-none-any.whl → 0.27.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/METADATA +5 -1
  2. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/RECORD +70 -66
  3. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/entry_points.txt +1 -1
  5. simtools/_version.py +2 -2
  6. simtools/applications/convert_geo_coordinates_of_array_elements.py +2 -1
  7. simtools/applications/db_get_array_layouts_from_db.py +1 -1
  8. simtools/applications/{calculate_incident_angles.py → derive_incident_angle.py} +16 -16
  9. simtools/applications/derive_mirror_rnda.py +111 -177
  10. simtools/applications/generate_corsika_histograms.py +38 -1
  11. simtools/applications/generate_regular_arrays.py +73 -36
  12. simtools/applications/simulate_flasher.py +3 -13
  13. simtools/applications/simulate_illuminator.py +2 -10
  14. simtools/applications/simulate_pedestals.py +1 -1
  15. simtools/applications/simulate_prod.py +8 -7
  16. simtools/applications/submit_data_from_external.py +2 -1
  17. simtools/applications/validate_camera_efficiency.py +28 -27
  18. simtools/applications/validate_cumulative_psf.py +1 -3
  19. simtools/applications/validate_optics.py +2 -1
  20. simtools/atmosphere.py +83 -0
  21. simtools/camera/camera_efficiency.py +171 -48
  22. simtools/camera/single_photon_electron_spectrum.py +6 -6
  23. simtools/configuration/commandline_parser.py +47 -9
  24. simtools/constants.py +5 -0
  25. simtools/corsika/corsika_config.py +88 -185
  26. simtools/corsika/corsika_histograms.py +246 -69
  27. simtools/data_model/model_data_writer.py +46 -49
  28. simtools/data_model/schema.py +2 -0
  29. simtools/db/db_handler.py +4 -2
  30. simtools/db/mongo_db.py +2 -2
  31. simtools/io/ascii_handler.py +52 -4
  32. simtools/io/io_handler.py +23 -12
  33. simtools/job_execution/job_manager.py +154 -79
  34. simtools/job_execution/process_pool.py +137 -0
  35. simtools/layout/array_layout.py +0 -1
  36. simtools/layout/array_layout_utils.py +143 -21
  37. simtools/model/array_model.py +22 -50
  38. simtools/model/calibration_model.py +4 -4
  39. simtools/model/model_parameter.py +123 -73
  40. simtools/model/model_utils.py +40 -1
  41. simtools/model/site_model.py +4 -4
  42. simtools/model/telescope_model.py +4 -5
  43. simtools/ray_tracing/incident_angles.py +87 -6
  44. simtools/ray_tracing/mirror_panel_psf.py +337 -217
  45. simtools/ray_tracing/psf_analysis.py +57 -42
  46. simtools/ray_tracing/psf_parameter_optimisation.py +3 -2
  47. simtools/ray_tracing/ray_tracing.py +37 -10
  48. simtools/runners/corsika_runner.py +52 -191
  49. simtools/runners/corsika_simtel_runner.py +74 -100
  50. simtools/runners/runner_services.py +214 -213
  51. simtools/runners/simtel_runner.py +27 -155
  52. simtools/runners/simtools_runner.py +9 -69
  53. simtools/schemas/application_workflow.metaschema.yml +8 -0
  54. simtools/settings.py +19 -0
  55. simtools/simtel/simtel_config_writer.py +0 -55
  56. simtools/simtel/simtel_seeds.py +184 -0
  57. simtools/simtel/simulator_array.py +115 -103
  58. simtools/simtel/simulator_camera_efficiency.py +66 -42
  59. simtools/simtel/simulator_light_emission.py +110 -123
  60. simtools/simtel/simulator_ray_tracing.py +78 -63
  61. simtools/simulator.py +135 -346
  62. simtools/testing/sim_telarray_metadata.py +13 -11
  63. simtools/testing/validate_output.py +87 -19
  64. simtools/utils/general.py +6 -17
  65. simtools/utils/random.py +36 -0
  66. simtools/visualization/plot_corsika_histograms.py +2 -0
  67. simtools/visualization/plot_incident_angles.py +48 -1
  68. simtools/visualization/plot_psf.py +160 -18
  69. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/licenses/LICENSE +0 -0
  70. {gammasimtools-0.26.0.dist-info → gammasimtools-0.27.1.dist-info}/top_level.txt +0 -0
simtools/db/db_handler.py CHANGED
@@ -260,9 +260,7 @@ class DatabaseHandler:
260
260
  collection,
261
261
  )
262
262
  if cache_dict:
263
- self._logger.debug(f"Found {array_element} in cache (key: {cache_key})")
264
263
  return cache_dict
265
- self._logger.debug(f"Did not find {array_element} in cache (key: {cache_key})")
266
264
 
267
265
  try:
268
266
  parameter_version_table = production_table["parameters"][array_element]
@@ -873,6 +871,10 @@ class DatabaseHandler:
873
871
  array_element_name,
874
872
  ]
875
873
  except KeyError as exc:
874
+ # simplified model definitions when e.g. adding new telescopes without design model
875
+ if settings.config.args.get("ignore_missing_design_model", False):
876
+ element_type = names.get_array_element_type_from_name(array_element_name)
877
+ return [array_element_name, f"{element_type}-01", f"{element_type}-design"]
876
878
  raise KeyError(
877
879
  f"Failed generated array element list for db query for {array_element_name}"
878
880
  ) from exc
simtools/db/mongo_db.py CHANGED
@@ -4,7 +4,7 @@ import io
4
4
  import logging
5
5
  import re
6
6
  from pathlib import Path
7
- from threading import Lock
7
+ from threading import Lock as _Lock
8
8
 
9
9
  import gridfs
10
10
  import jsonschema
@@ -126,7 +126,7 @@ class MongoDBHandler: # pylint: disable=unsubscriptable-object
126
126
  """
127
127
 
128
128
  db_client: MongoClient = None
129
- _lock = Lock()
129
+ _lock = _Lock()
130
130
  _logger = logging.getLogger(__name__)
131
131
 
132
132
  def __init__(self, db_config=None):
@@ -10,7 +10,7 @@ import astropy.units as u
10
10
  import numpy as np
11
11
  import yaml
12
12
 
13
- from simtools.utils.general import is_url
13
+ from simtools.utils.general import ensure_iterable, is_url
14
14
 
15
15
  _logger = logging.getLogger(__name__)
16
16
 
@@ -189,9 +189,9 @@ def is_utf8_file(file_name):
189
189
  return False
190
190
 
191
191
 
192
- def write_data_to_file(data, output_file, sort_keys=False, numpy_types=False):
192
+ def write_data_to_file(data, output_file, sort_keys=False, numpy_types=False, unique_lines=False):
193
193
  """
194
- Write structured data to JSON or YAML file.
194
+ Write structured data to JSON, YAML, or text file.
195
195
 
196
196
  The file type is determined by the file extension.
197
197
 
@@ -213,12 +213,43 @@ def write_data_to_file(data, output_file, sort_keys=False, numpy_types=False):
213
213
  if output_file.suffix.lower() in [".yml", ".yaml"]:
214
214
  _write_to_yaml(data, output_file, sort_keys)
215
215
  return
216
+ if output_file.suffix.lower() in [".txt", ".list"]:
217
+ _write_to_text_file(data, output_file, unique_lines)
218
+ return
216
219
 
217
220
  raise ValueError(
218
221
  f"Unsupported file type {output_file.suffix}. Only .json, .yml, and .yaml are supported."
219
222
  )
220
223
 
221
224
 
225
+ def _write_to_text_file(data, output_file, unique_lines):
226
+ """
227
+ Write data to a text file.
228
+
229
+ Parameters
230
+ ----------
231
+ data: list
232
+ Data to be written to the file.
233
+ output_file: Path
234
+ Name of the file to be written.
235
+ unique_lines: bool
236
+ If True, write only unique lines.
237
+
238
+ """
239
+
240
+ def iter_lines(data):
241
+ for entry in ensure_iterable(data):
242
+ yield from entry.splitlines()
243
+
244
+ lines_to_write = (
245
+ list(dict.fromkeys(iter_lines(data))) if unique_lines else list(iter_lines(data))
246
+ )
247
+
248
+ with open(output_file, "w", encoding="utf-8") as file:
249
+ for line in lines_to_write:
250
+ file.write(f"{line}\n")
251
+
252
+
222
253
  def _write_to_json(data, output_file, sort_keys, numpy_types):
223
254
  """
224
255
  Write data to a JSON file.
@@ -259,8 +290,25 @@ def _write_to_yaml(data, output_file, sort_keys):
259
290
  If True, sort the keys.
260
291
 
261
292
  """
293
+ data = _to_builtin(data)
262
294
  with open(output_file, "w", encoding="utf-8") as file:
263
- yaml.dump(data, file, indent=4, sort_keys=sort_keys, explicit_start=True)
295
+ yaml.dump(data, file, indent=2, sort_keys=sort_keys, explicit_start=True)
296
+
297
+
298
+ def _to_builtin(data):
299
+ """Convert numpy types to native Python types for yaml output."""
300
+ if isinstance(data, u.Quantity):
301
+ return {
302
+ "value": float(data.value),
303
+ "unit": str(data.unit),
304
+ }
305
+ if isinstance(data, np.generic):
306
+ return data.item()
307
+ if isinstance(data, dict):
308
+ return {k: _to_builtin(v) for k, v in data.items()}
309
+ if isinstance(data, (list, tuple)):
310
+ return [_to_builtin(v) for v in data]
311
+ return data
264
312
 
265
313
 
266
314
  class JsonNumpyEncoder(json.JSONEncoder):
simtools/io/io_handler.py CHANGED
@@ -22,12 +22,10 @@ class IOHandler(metaclass=IOHandlerSingleton):
22
22
  def __init__(self):
23
23
  """Initialize IOHandler."""
24
24
  self.logger = logging.getLogger(__name__)
25
- self.logger.debug("Init IOHandler")
26
-
27
- self.output_path = None
25
+ self.output_path = {}
28
26
  self.model_path = None
29
27
 
30
- def set_paths(self, output_path=None, model_path=None):
28
+ def set_paths(self, output_path=None, model_path=None, output_path_label="default"):
31
29
  """
32
30
  Set paths for input and output.
33
31
 
@@ -37,11 +35,13 @@ class IOHandler(metaclass=IOHandlerSingleton):
37
35
  Path pointing to the output directory.
38
36
  model_path: str or Path
39
37
  Path pointing to the model file directory.
38
+ output_path_label: str
39
+ Label for the output path.
40
40
  """
41
- self.output_path = output_path
41
+ self.output_path[output_path_label] = output_path
42
42
  self.model_path = model_path
43
43
 
44
- def get_output_directory(self, sub_dir=None):
44
+ def get_output_directory(self, sub_dir=None, output_path_label="default"):
45
45
  """
46
46
  Create and get path of an output directory.
47
47
 
@@ -49,6 +49,8 @@ class IOHandler(metaclass=IOHandlerSingleton):
49
49
  ----------
50
50
  sub_dir: str or list of str, optional
51
51
  Name of the subdirectory (ray_tracing, model etc)
52
+ output_path_label: str
53
+ Label for the output path.
52
54
 
53
55
  Returns
54
56
  -------
@@ -65,16 +67,19 @@ class IOHandler(metaclass=IOHandlerSingleton):
65
67
  parts = sub_dir
66
68
  else:
67
69
  parts = [sub_dir]
68
- path = Path(self.output_path, *parts)
70
+ try:
71
+ output_path = Path(self.output_path[output_path_label], *parts)
72
+ except KeyError as exc:
73
+ raise KeyError(f"Output path label '{output_path_label}' not found") from exc
69
74
 
70
75
  try:
71
- path.mkdir(parents=True, exist_ok=True)
76
+ output_path.mkdir(parents=True, exist_ok=True)
72
77
  except FileNotFoundError as exc:
73
- raise FileNotFoundError(f"Error creating directory {path!s}") from exc
78
+ raise FileNotFoundError(f"Error creating directory {output_path!s}") from exc
74
79
 
75
- return path.resolve()
80
+ return output_path.resolve()
76
81
 
77
- def get_output_file(self, file_name, sub_dir=None):
82
+ def get_output_file(self, file_name, sub_dir=None, output_path_label="default"):
78
83
  """
79
84
  Get path of an output file.
80
85
 
@@ -84,12 +89,18 @@ class IOHandler(metaclass=IOHandlerSingleton):
84
89
  File name.
85
90
  sub_dir: sub_dir: str or list of str, optional
86
91
  Name of the subdirectory (ray_tracing, model etc)
92
+ output_path_label: str
93
+ Label for the output path.
87
94
 
88
95
  Returns
89
96
  -------
90
97
  Path
91
98
  """
92
- return self.get_output_directory(sub_dir).joinpath(file_name).absolute()
99
+ return (
100
+ self.get_output_directory(sub_dir, output_path_label=output_path_label)
101
+ .joinpath(file_name)
102
+ .absolute()
103
+ )
93
104
 
94
105
  def get_test_data_file(self, file_name=None):
95
106
  """
@@ -1,6 +1,8 @@
1
1
  """Interface to workload managers to run jobs on a compute node."""
2
2
 
3
3
  import logging
4
+ import os
5
+ import stat
4
6
  import subprocess
5
7
  import time
6
8
  from pathlib import Path
@@ -55,91 +57,164 @@ def retry_command(command, max_attempts=3, delay=10):
55
57
  return False
56
58
 
57
59
 
58
- class JobManager:
60
+ def submit(
61
+ command,
62
+ stdin=None,
63
+ out_file=None,
64
+ err_file=None,
65
+ configuration=None,
66
+ application_log=None,
67
+ runtime_environment=None,
68
+ env=None,
69
+ test=False,
70
+ ):
59
71
  """
60
- Job manager for submitting jobs to a compute node.
72
+ Submit a job described by a command or a shell script.
61
73
 
62
- Expects that jobs can be described by shell scripts.
74
+ Allow to specify a runtime environment (e.g., Docker).
63
75
 
64
76
  Parameters
65
77
  ----------
66
- test : bool
78
+ command: str
79
+ Command or shell script to execute.
80
+ stdin: str or Path
81
+ Input stream.
82
+ out_file: str or Path
83
+ Output stream (stdout if out_file and err_file are None).
84
+ err_file: str or Path
85
+ Error stream (stderr if out_file and err_file are None).
86
+ configuration: dict
87
+ Configuration for the 'command' execution.
88
+ runtime_environment: list
89
+ Command to run the application in the specified runtime environment.
90
+ env: dict
91
+ Environment variables to set for the job execution.
92
+ application_log: str or Path
93
+ The log file of the actual application.
94
+ Provided in order to print the log excerpt in case of run time error.
95
+ test: bool
67
96
  Testing mode without sub submission.
68
97
  """
98
+ command = _build_command(command, configuration, runtime_environment)
99
+
100
+ logger.info(f"Submitting command {command}")
101
+ logger.info(f"Job output/error streams {out_file} / {err_file}")
102
+
103
+ if test:
104
+ logger.info("Testing mode enabled")
105
+ return None
106
+
107
+ sub_process_env = os.environ.copy()
108
+ if env:
109
+ for key, value in env.items():
110
+ sub_process_env[key] = value
111
+ logger.debug(f"Setting environment variables for job execution: {sub_process_env}")
112
+
113
+ # disable pylint warning about not closing files here (explicitly closed in finally block)
114
+ stdout = open(out_file, "w", encoding="utf-8") if out_file else subprocess.PIPE # pylint: disable=consider-using-with
115
+ stderr = open(err_file, "w", encoding="utf-8") if err_file else subprocess.PIPE # pylint: disable=consider-using-with
116
+
117
+ try:
118
+ result = subprocess.run(
119
+ command,
120
+ shell=isinstance(command, str),
121
+ check=True,
122
+ text=True,
123
+ stdin=stdin,
124
+ stdout=stdout,
125
+ stderr=stderr,
126
+ env=sub_process_env,
127
+ )
128
+
129
+ except subprocess.CalledProcessError as exc:
130
+ _raise_job_execution_error(exc, out_file, err_file, application_log)
131
+ finally:
132
+ if stdout != subprocess.PIPE:
133
+ stdout.close()
134
+ if stderr != subprocess.PIPE:
135
+ stderr.close()
136
+
137
+ return result
138
+
139
+
140
+ def _build_command(command, configuration=None, runtime_environment=None):
141
+ """Build command to run in the specified runtime environment."""
142
+ if isinstance(command, (str, Path)) and Path(command).is_file():
143
+ command = Path(command)
144
+ command.chmod(command.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP)
145
+ command = str(command)
146
+
147
+ if runtime_environment:
148
+ if isinstance(runtime_environment, list):
149
+ command = [*runtime_environment, command]
150
+ else:
151
+ command = [runtime_environment, command]
152
+
153
+ if configuration:
154
+ if isinstance(command, list):
155
+ command = command + _convert_dict_to_args(configuration)
156
+ else:
157
+ command = [command, *_convert_dict_to_args(configuration)]
158
+
159
+ return command
160
+
161
+
162
+ def _convert_dict_to_args(parameters):
163
+ """
164
+ Convert a dictionary of parameters to a list of command line arguments.
69
165
 
70
- def __init__(self, test=False):
71
- """Initialize JobManager."""
72
- self._logger = logging.getLogger(__name__)
73
- self.test = test
74
- self.run_script = None
75
- self.run_out_file = None
76
-
77
- def submit(self, run_script=None, run_out_file=None, log_file=None):
78
- """
79
- Submit a job described by a shell script.
80
-
81
- Parameters
82
- ----------
83
- run_script: str
84
- Shell script describing the job to be submitted.
85
- run_out_file: str or Path
86
- Redirect output/error/job stream to this file (out,err,job suffix).
87
- log_file: str or Path
88
- The log file of the actual simulator (CORSIKA or sim_telarray).
89
- Provided in order to print the log excerpt in case of run time error.
90
- """
91
- self.run_script = str(run_script)
92
- run_out_file = Path(run_out_file)
93
- self.run_out_file = str(run_out_file.parent.joinpath(run_out_file.stem))
94
-
95
- self._logger.info(f"Submitting script {self.run_script}")
96
- self._logger.info(f"Job output stream {self.run_out_file + '.out'}")
97
- self._logger.info(f"Job error stream {self.run_out_file + '.err'}")
98
- self._logger.info(f"Job log stream {self.run_out_file + '.job'}")
99
-
100
- submit_result = self.submit_local(log_file)
101
- if submit_result != 0:
102
- raise JobExecutionError(f"Job submission failed with return code {submit_result}")
103
-
104
- def submit_local(self, log_file):
105
- """
106
- Run a job script on the command line.
107
-
108
- Parameters
109
- ----------
110
- log_file: str or Path
111
- The log file of the actual simulator (CORSIKA or sim_telarray).
112
- Provided in order to print the log excerpt in case of run time error.
113
-
114
- Returns
115
- -------
116
- int
117
- Return code of the executed script
118
- """
119
- self._logger.info("Running script locally")
120
-
121
- if self.test:
122
- self._logger.info("Testing (local)")
123
- return 0
124
-
125
- result = None
126
- try:
127
- with (
128
- open(f"{self.run_out_file}.out", "w", encoding="utf-8") as stdout,
129
- open(f"{self.run_out_file}.err", "w", encoding="utf-8") as stderr,
130
- ):
131
- result = subprocess.run(
132
- f"{self.run_script}",
133
- shell=True,
134
- check=True,
135
- text=True,
136
- stdout=stdout,
137
- stderr=stderr,
138
- )
139
- except subprocess.CalledProcessError as exc:
140
- self._logger.error(gen.get_log_excerpt(f"{self.run_out_file}.err"))
141
- if log_file.exists() and gen.get_file_age(log_file) < 5:
142
- self._logger.error(gen.get_log_excerpt(log_file))
143
- raise JobExecutionError("See excerpt from log file above\n") from exc
166
+ Parameters
167
+ ----------
168
+ parameters : dict
169
+ Dictionary containing parameters to convert.
170
+
171
+ Returns
172
+ -------
173
+ list
174
+ List of command line arguments.
175
+ """
176
+ args = []
177
+ for key, value in parameters.items():
178
+ if isinstance(value, bool):
179
+ if value:
180
+ args.append(f"--{key}")
181
+ elif isinstance(value, list):
182
+ args.extend([f"--{key}", *(str(item) for item in value)])
183
+ else:
184
+ args.extend([f"--{key}", str(value)])
185
+ return args
186
+
187
+
188
+ def _raise_job_execution_error(exc, out_file, err_file, application_log):
189
+ """
190
+ Raise job execution error with log excerpt.
191
+
192
+ Parameters
193
+ ----------
194
+ exc: subprocess.CalledProcessError
195
+ The caught exception.
196
+ out_file: str or Path
197
+ Output stream file path.
198
+ err_file: str or Path
199
+ Error stream file path.
200
+ application_log: str or Path
201
+ The log file of the actual application.
202
+ """
203
+ logger.error(f"Job execution failed with return code {exc.returncode}")
204
+ logger.error(f"stderr: {exc.stderr}")
205
+
206
+ if out_file:
207
+ logger.error(f"Output log excerpt from {out_file}:\n{gen.get_log_excerpt(out_file)}")
208
+
209
+ if err_file:
210
+ logger.error(f"Error log excerpt from {err_file}:\n{gen.get_log_excerpt(err_file)}")
211
+
212
+ if application_log:
213
+ log = Path(application_log)
214
+ if log.exists() and gen.get_file_age(log) < 5:
215
+ logger.error(
216
+ f"Application log excerpt from {application_log}:\n"
217
+ f"{gen.get_log_excerpt(application_log)}"
218
+ )
144
219
 
145
- return result.returncode if result else 0
220
+ raise JobExecutionError("See excerpt from log file above") from exc
@@ -0,0 +1,137 @@
1
+ """Run work in parallel and keep results ordered.
2
+
3
+ Notes
4
+ -----
5
+ This module provides small wrappers around ``concurrent.futures.ProcessPoolExecutor``
6
+ that make it easy to:
7
+
8
+ - preserve input order in the returned results;
9
+ - choose how many worker processes to use;
10
+ - optionally run per-process initialization code; and
11
+ - select a multiprocessing start method (e.g. ``"fork"`` or ``"spawn"``).
12
+
13
+ Examples
14
+ --------
15
+ Parallelize a function with multiple arguments using tuples:
16
+
17
+ .. code-block:: python
18
+
19
+ from simtools.job_execution.process_pool import process_pool_map_ordered
20
+
21
+ def power(args):
22
+ base, exp = args
23
+ return base ** exp
24
+
25
+ inputs = [(2, 3), (3, 2), (4, 0)]
26
+ results = process_pool_map_ordered(power, inputs, max_workers=3)
27
+ # results == [8, 9, 1]
28
+
29
+ Parallelize methods that require an object instance:
30
+
31
+ .. code-block:: python
32
+
33
+ from simtools.ray_tracing import MirrorPanelPSF
34
+ from simtools.job_execution.process_pool import process_pool_map_ordered
35
+
36
+ def worker_function(args):
37
+ mirror_idx, instance = args
38
+ measured_psf_mm = float(instance.measured_data[mirror_idx])
39
+ return instance.optimize_single_mirror(mirror_idx, measured_psf_mm)
40
+
41
+ instance = MirrorPanelPSF(label="test", args_dict=args)
42
+ worker_inputs = [(i, instance) for i in range(n_mirrors)]
43
+ results = process_pool_map_ordered(worker_function, worker_inputs)
44
+ # results contains per-mirror optimization outputs
45
+ """
46
+
47
+ import logging
48
+ import os
49
+ from concurrent.futures import ProcessPoolExecutor, as_completed
50
+ from multiprocessing import get_context
51
+ from typing import Any, TypeVar
52
+
53
+ logger = logging.getLogger(__name__)
54
+
55
+ T = TypeVar("T") # type of input items
56
+ R = TypeVar("R") # type of return values
57
+
58
+
59
+ def process_pool_map_ordered(
60
+ func,
61
+ items,
62
+ max_workers=None,
63
+ mp_start_method="fork",
64
+ initializer=None,
65
+ initargs=(),
66
+ ):
67
+ """Apply ``func`` to each item in ``items`` using a process pool.
68
+
69
+ Parameters
70
+ ----------
71
+ func : callable
72
+ Function to apply to each item.
73
+ items : iterable
74
+ Items to process.
75
+ max_workers : int or None
76
+ Number of worker processes. If ``None`` or ``<= 0``, uses ``os.cpu_count()``.
77
+ mp_start_method : str or None
78
+ Multiprocessing start method (e.g. ``"fork"``, ``"spawn"``). If ``None``,
79
+ uses the default context.
80
+ initializer : callable or None
81
+ Optional per-process initializer.
82
+ initargs : tuple
83
+ Arguments passed to ``initializer``.
84
+
85
+ Returns
86
+ -------
87
+ list
88
+ Results ordered to match the input item order.
89
+
90
+ Raises
91
+ ------
92
+ Exception
93
+ Any exception raised by a worker process is re-raised when collecting
94
+ the corresponding future result.
95
+
96
+ Notes
97
+ -----
98
+ This helper submits all items at once and collects results as futures
99
+ complete.
100
+ """
101
+ item_list = list(items)
102
+ n_items = len(item_list)
103
+
104
+ if max_workers is None or int(max_workers) <= 0:
105
+ max_workers = os.cpu_count() or 1
106
+
107
+ # create a temporary list of Nones to hold results in input order
108
+ results: list[R] = [None] * n_items # type: ignore[list-item]
109
+
110
+ ctx = None
111
+ if mp_start_method:
112
+ ctx = get_context(str(mp_start_method))
113
+
114
+ logger.info(
115
+ "Starting ProcessPoolExecutor: n_items=%d, max_workers=%s, start_method=%s",
116
+ n_items,
117
+ str(max_workers),
118
+ str(mp_start_method),
119
+ )
120
+
121
+ executor_kwargs: dict[str, Any] = {
122
+ "max_workers": int(max_workers),
123
+ "initializer": initializer,
124
+ "initargs": tuple(initargs),
125
+ }
126
+ if ctx is not None:
127
+ executor_kwargs["mp_context"] = ctx
128
+
129
+ with ProcessPoolExecutor(**executor_kwargs) as executor:
130
+ future_to_index = {
131
+ executor.submit(func, item): index for index, item in enumerate(item_list)
132
+ }
133
+ for fut in as_completed(future_to_index):
134
+ index = future_to_index[fut]
135
+ results[index] = fut.result()
136
+
137
+ return results
@@ -331,7 +331,6 @@ class ArrayLayout:
331
331
  astropy.table.QTable
332
332
  Table with the telescope layout information.
333
333
  """
334
- self._logger.debug("Initializing array (site and telescope parameters)")
335
334
  self._initialize_site_parameters_from_db()
336
335
  self._initialize_coordinate_systems()
337
336