gammasimtools 0.5.1__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/METADATA +80 -28
- gammasimtools-0.6.1.dist-info/RECORD +91 -0
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/WHEEL +1 -1
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/entry_points.txt +4 -2
- simtools/_version.py +14 -2
- simtools/applications/add_file_to_db.py +2 -1
- simtools/applications/compare_cumulative_psf.py +10 -15
- simtools/applications/db_development_tools/add_new_parameter_to_db.py +12 -6
- simtools/applications/derive_mirror_rnda.py +95 -71
- simtools/applications/generate_corsika_histograms.py +216 -131
- simtools/applications/generate_default_metadata.py +110 -0
- simtools/applications/generate_simtel_array_histograms.py +192 -0
- simtools/applications/get_file_from_db.py +1 -1
- simtools/applications/get_parameter.py +3 -3
- simtools/applications/make_regular_arrays.py +89 -93
- simtools/applications/{plot_layout_array.py → plot_array_layout.py} +15 -14
- simtools/applications/print_array_elements.py +81 -34
- simtools/applications/produce_array_config.py +2 -2
- simtools/applications/production.py +39 -5
- simtools/applications/sim_showers_for_trigger_rates.py +26 -30
- simtools/applications/simulate_prod.py +49 -107
- simtools/applications/submit_data_from_external.py +8 -10
- simtools/applications/tune_psf.py +16 -18
- simtools/applications/validate_camera_efficiency.py +63 -9
- simtools/applications/validate_camera_fov.py +9 -13
- simtools/applications/validate_file_using_schema.py +127 -0
- simtools/applications/validate_optics.py +13 -15
- simtools/camera_efficiency.py +73 -80
- simtools/configuration/commandline_parser.py +52 -22
- simtools/configuration/configurator.py +98 -33
- simtools/constants.py +9 -0
- simtools/corsika/corsika_config.py +28 -22
- simtools/corsika/corsika_default_config.py +282 -0
- simtools/corsika/corsika_histograms.py +328 -282
- simtools/corsika/corsika_histograms_visualize.py +162 -163
- simtools/corsika/corsika_runner.py +8 -4
- simtools/corsika_simtel/corsika_simtel_runner.py +18 -23
- simtools/data_model/data_reader.py +129 -0
- simtools/data_model/metadata_collector.py +346 -118
- simtools/data_model/metadata_model.py +123 -218
- simtools/data_model/model_data_writer.py +79 -22
- simtools/data_model/validate_data.py +96 -46
- simtools/db_handler.py +67 -42
- simtools/io_operations/__init__.py +0 -0
- simtools/io_operations/hdf5_handler.py +112 -0
- simtools/{io_handler.py → io_operations/io_handler.py} +51 -22
- simtools/job_execution/job_manager.py +1 -1
- simtools/layout/{layout_array.py → array_layout.py} +168 -199
- simtools/layout/geo_coordinates.py +196 -0
- simtools/layout/telescope_position.py +12 -12
- simtools/model/array_model.py +16 -14
- simtools/model/camera.py +5 -8
- simtools/model/mirrors.py +136 -73
- simtools/model/model_utils.py +1 -69
- simtools/model/telescope_model.py +32 -25
- simtools/psf_analysis.py +26 -19
- simtools/ray_tracing.py +54 -26
- simtools/schemas/data.metaschema.yml +400 -0
- simtools/schemas/metadata.metaschema.yml +566 -0
- simtools/simtel/simtel_config_writer.py +14 -5
- simtools/simtel/simtel_histograms.py +266 -83
- simtools/simtel/simtel_runner.py +8 -7
- simtools/simtel/simtel_runner_array.py +7 -8
- simtools/simtel/simtel_runner_camera_efficiency.py +48 -2
- simtools/simtel/simtel_runner_ray_tracing.py +61 -25
- simtools/simulator.py +43 -50
- simtools/utils/general.py +232 -286
- simtools/utils/geometry.py +163 -0
- simtools/utils/names.py +294 -142
- simtools/visualization/legend_handlers.py +115 -9
- simtools/visualization/visualize.py +13 -13
- gammasimtools-0.5.1.dist-info/RECORD +0 -83
- simtools/applications/plot_simtel_histograms.py +0 -120
- simtools/applications/validate_schema_files.py +0 -135
- simtools/corsika/corsika_output_visualize.py +0 -345
- simtools/data_model/validate_schema.py +0 -285
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/LICENSE +0 -0
- {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/top_level.txt +0 -0
simtools/utils/general.py
CHANGED
|
@@ -1,19 +1,26 @@
|
|
|
1
|
+
"""
|
|
2
|
+
General functions useful across different parts of the code.
|
|
3
|
+
"""
|
|
4
|
+
|
|
1
5
|
import copy
|
|
6
|
+
import json
|
|
2
7
|
import logging
|
|
3
|
-
import mmap
|
|
4
8
|
import os
|
|
5
9
|
import re
|
|
10
|
+
import tempfile
|
|
6
11
|
import time
|
|
12
|
+
import urllib.error
|
|
13
|
+
import urllib.request
|
|
7
14
|
from collections import namedtuple
|
|
8
15
|
from pathlib import Path
|
|
16
|
+
from urllib.parse import urlparse
|
|
9
17
|
|
|
10
18
|
import astropy.units as u
|
|
11
|
-
import numpy as np
|
|
12
|
-
from astropy.coordinates.errors import UnitsError
|
|
13
19
|
from astropy.io.misc import yaml
|
|
14
20
|
|
|
15
21
|
__all__ = [
|
|
16
|
-
"
|
|
22
|
+
"change_dict_keys_case",
|
|
23
|
+
"collect_data_from_file_or_dict",
|
|
17
24
|
"collect_final_lines",
|
|
18
25
|
"collect_kwargs",
|
|
19
26
|
"InvalidConfigData",
|
|
@@ -21,14 +28,12 @@ __all__ = [
|
|
|
21
28
|
"MissingRequiredConfigEntry",
|
|
22
29
|
"UnableToIdentifyConfigEntry",
|
|
23
30
|
"get_log_level_from_user",
|
|
24
|
-
"
|
|
31
|
+
"remove_substring_recursively_from_dict",
|
|
25
32
|
"separate_args_and_config_data",
|
|
26
33
|
"set_default_kwargs",
|
|
27
|
-
"sort_arrays",
|
|
28
34
|
"validate_config_data",
|
|
29
35
|
"get_log_excerpt",
|
|
30
|
-
"
|
|
31
|
-
"save_dict_to_file",
|
|
36
|
+
"sort_arrays",
|
|
32
37
|
]
|
|
33
38
|
|
|
34
39
|
_logger = logging.getLogger(__name__)
|
|
@@ -50,33 +55,6 @@ class InvalidConfigData(Exception):
|
|
|
50
55
|
"""Exception for invalid configuration data."""
|
|
51
56
|
|
|
52
57
|
|
|
53
|
-
def file_has_text(file, text):
|
|
54
|
-
"""
|
|
55
|
-
Check whether a file contain a certain piece of text.
|
|
56
|
-
|
|
57
|
-
Parameters
|
|
58
|
-
----------
|
|
59
|
-
file: str
|
|
60
|
-
Path of the file.
|
|
61
|
-
text: str
|
|
62
|
-
Piece of text to be searched for.
|
|
63
|
-
|
|
64
|
-
Returns
|
|
65
|
-
-------
|
|
66
|
-
bool
|
|
67
|
-
True if file has text.
|
|
68
|
-
"""
|
|
69
|
-
with open(file, "rb", 0) as string_file, mmap.mmap(
|
|
70
|
-
string_file.fileno(), 0, access=mmap.ACCESS_READ
|
|
71
|
-
) as text_file_input:
|
|
72
|
-
re_search_1 = re.compile(f"{text}".encode())
|
|
73
|
-
search_result_1 = re_search_1.search(text_file_input)
|
|
74
|
-
if search_result_1 is None:
|
|
75
|
-
return False
|
|
76
|
-
|
|
77
|
-
return True
|
|
78
|
-
|
|
79
|
-
|
|
80
58
|
def validate_config_data(config_data, parameters, ignore_unidentified=False):
|
|
81
59
|
"""
|
|
82
60
|
Validate a generic config_data dict by using the info
|
|
@@ -202,7 +180,7 @@ def _validate_and_convert_value_without_units(value, value_keys, par_name, par_i
|
|
|
202
180
|
|
|
203
181
|
def _check_value_entry_length(value, par_name, par_info):
|
|
204
182
|
"""
|
|
205
|
-
Validate length of user input
|
|
183
|
+
Validate length of user input parameters
|
|
206
184
|
|
|
207
185
|
Parameters
|
|
208
186
|
----------
|
|
@@ -320,14 +298,85 @@ def _validate_and_convert_value(par_name, par_info, value_in):
|
|
|
320
298
|
return _validate_and_convert_value_with_units(value, value_keys, par_name, par_info)
|
|
321
299
|
|
|
322
300
|
|
|
323
|
-
def
|
|
301
|
+
def is_url(url):
|
|
302
|
+
"""
|
|
303
|
+
Check if a string is a valid URL.
|
|
304
|
+
|
|
305
|
+
Parameters
|
|
306
|
+
----------
|
|
307
|
+
url: str
|
|
308
|
+
String to be checked.
|
|
309
|
+
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
bool
|
|
313
|
+
True if url is a valid URL.
|
|
314
|
+
|
|
315
|
+
"""
|
|
316
|
+
|
|
317
|
+
try:
|
|
318
|
+
result = urlparse(url)
|
|
319
|
+
return all([result.scheme, result.netloc])
|
|
320
|
+
except AttributeError:
|
|
321
|
+
return False
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def collect_data_from_http(url):
|
|
325
|
+
"""
|
|
326
|
+
Download yaml or json file from url and return it contents as dict.
|
|
327
|
+
File is downloaded as a temporary file and deleted afterwards.
|
|
328
|
+
|
|
329
|
+
Parameters
|
|
330
|
+
----------
|
|
331
|
+
url: str
|
|
332
|
+
URL of the yaml/json file.
|
|
333
|
+
|
|
334
|
+
Returns
|
|
335
|
+
-------
|
|
336
|
+
dict
|
|
337
|
+
Dictionary containing the file content.
|
|
338
|
+
|
|
339
|
+
Raises
|
|
340
|
+
------
|
|
341
|
+
TypeError
|
|
342
|
+
If url is not a valid URL.
|
|
343
|
+
urllib.error.HTTPError
|
|
344
|
+
If downloading the yaml file fails.
|
|
345
|
+
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
_logger.debug(f"Downloaded yaml file from {url}")
|
|
349
|
+
try:
|
|
350
|
+
with tempfile.NamedTemporaryFile() as tmp_file:
|
|
351
|
+
urllib.request.urlretrieve(url, tmp_file.name)
|
|
352
|
+
if url.endswith("yml") or url.endswith("yaml"):
|
|
353
|
+
data = yaml.load(tmp_file)
|
|
354
|
+
elif url.endswith("json"):
|
|
355
|
+
data = json.load(tmp_file)
|
|
356
|
+
else:
|
|
357
|
+
msg = f"File extension of {url} not supported (should be json or yaml)"
|
|
358
|
+
_logger.error(msg)
|
|
359
|
+
raise TypeError(msg)
|
|
360
|
+
except TypeError:
|
|
361
|
+
msg = "Invalid url {url}"
|
|
362
|
+
_logger.error(msg)
|
|
363
|
+
raise
|
|
364
|
+
except urllib.error.HTTPError:
|
|
365
|
+
msg = f"Failed to download yaml file from {url}"
|
|
366
|
+
_logger.error(msg)
|
|
367
|
+
raise
|
|
368
|
+
|
|
369
|
+
return data
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def collect_data_from_file_or_dict(file_name, in_dict, allow_empty=False):
|
|
324
373
|
"""
|
|
325
|
-
Collect input data that can be given either as a dict or as a yaml file.
|
|
374
|
+
Collect input data that can be given either as a dict or as a yaml/json file.
|
|
326
375
|
|
|
327
376
|
Parameters
|
|
328
377
|
----------
|
|
329
|
-
|
|
330
|
-
Name of the yaml file.
|
|
378
|
+
file_name: str
|
|
379
|
+
Name of the yaml/json file.
|
|
331
380
|
in_dict: dict
|
|
332
381
|
Data as dict.
|
|
333
382
|
allow_empty: bool
|
|
@@ -339,60 +388,30 @@ def collect_data_from_yaml_or_dict(in_yaml, in_dict, allow_empty=False):
|
|
|
339
388
|
Data as dict.
|
|
340
389
|
"""
|
|
341
390
|
|
|
342
|
-
if
|
|
391
|
+
if file_name is not None:
|
|
343
392
|
if in_dict is not None:
|
|
344
|
-
_logger.warning("Both in_dict
|
|
345
|
-
|
|
346
|
-
data =
|
|
393
|
+
_logger.warning("Both in_dict and file_name were given - file_name will be used")
|
|
394
|
+
if is_url(str(file_name)):
|
|
395
|
+
data = collect_data_from_http(file_name)
|
|
396
|
+
elif Path(file_name).suffix.lower() == ".json":
|
|
397
|
+
with open(file_name, encoding="utf-8") as file:
|
|
398
|
+
data = json.load(file)
|
|
399
|
+
else:
|
|
400
|
+
with open(file_name, encoding="utf-8") as file:
|
|
401
|
+
data = yaml.load(file)
|
|
347
402
|
return data
|
|
348
403
|
if in_dict is not None:
|
|
349
404
|
return dict(in_dict)
|
|
350
405
|
|
|
351
|
-
msg = "Input has not been provided (neither by
|
|
406
|
+
msg = "Input has not been provided (neither by file, nor by dict)"
|
|
352
407
|
if allow_empty:
|
|
353
408
|
_logger.debug(msg)
|
|
354
409
|
return None
|
|
355
410
|
|
|
356
|
-
_logger.
|
|
411
|
+
_logger.debug(msg)
|
|
357
412
|
raise InvalidConfigData(msg)
|
|
358
413
|
|
|
359
414
|
|
|
360
|
-
def collect_dict_from_file(file_path, file_name=None):
|
|
361
|
-
"""
|
|
362
|
-
Collect input data from a file.
|
|
363
|
-
|
|
364
|
-
File_path can be a yaml file name or a directory.
|
|
365
|
-
In the latter case, file_name is used to find the file.
|
|
366
|
-
|
|
367
|
-
Note that this method returns an empty dict if the file is not found
|
|
368
|
-
(while gen.collect_data_from_yaml_or_dict returns None).
|
|
369
|
-
|
|
370
|
-
Parameters
|
|
371
|
-
----------
|
|
372
|
-
file_path: str
|
|
373
|
-
Name of the yaml file or directory.
|
|
374
|
-
file_name: str
|
|
375
|
-
Name of the file to be found in the directory.
|
|
376
|
-
|
|
377
|
-
"""
|
|
378
|
-
|
|
379
|
-
_dict = {}
|
|
380
|
-
try:
|
|
381
|
-
_dict = (
|
|
382
|
-
collect_data_from_yaml_or_dict(in_yaml=file_path, in_dict=None, allow_empty=True) or {}
|
|
383
|
-
)
|
|
384
|
-
except IsADirectoryError:
|
|
385
|
-
try:
|
|
386
|
-
_file = Path(file_path).joinpath(file_name)
|
|
387
|
-
_dict = (
|
|
388
|
-
collect_data_from_yaml_or_dict(in_yaml=_file, in_dict=None, allow_empty=True) or {}
|
|
389
|
-
)
|
|
390
|
-
except (TypeError, KeyError):
|
|
391
|
-
pass
|
|
392
|
-
|
|
393
|
-
return _dict
|
|
394
|
-
|
|
395
|
-
|
|
396
415
|
def collect_kwargs(label, in_kwargs):
|
|
397
416
|
"""
|
|
398
417
|
Collect kwargs of the type label_* and return them as a dict.
|
|
@@ -437,27 +456,6 @@ def set_default_kwargs(in_kwargs, **kwargs):
|
|
|
437
456
|
return in_kwargs
|
|
438
457
|
|
|
439
458
|
|
|
440
|
-
def sort_arrays(*args):
|
|
441
|
-
"""Sort arrays
|
|
442
|
-
|
|
443
|
-
Parameters
|
|
444
|
-
----------
|
|
445
|
-
*args
|
|
446
|
-
Arguments to be sorted.
|
|
447
|
-
Returns
|
|
448
|
-
-------
|
|
449
|
-
list
|
|
450
|
-
Sorted args.
|
|
451
|
-
"""
|
|
452
|
-
|
|
453
|
-
order_array = copy.copy(args[0])
|
|
454
|
-
new_args = []
|
|
455
|
-
for arg in args:
|
|
456
|
-
_, value = zip(*sorted(zip(order_array, arg)))
|
|
457
|
-
new_args.append(list(value))
|
|
458
|
-
return new_args
|
|
459
|
-
|
|
460
|
-
|
|
461
459
|
def collect_final_lines(file, n_lines):
|
|
462
460
|
"""
|
|
463
461
|
Collect final lines.
|
|
@@ -477,7 +475,7 @@ def collect_final_lines(file, n_lines):
|
|
|
477
475
|
list_of_lines = []
|
|
478
476
|
|
|
479
477
|
if Path(file).suffix == ".gz":
|
|
480
|
-
import gzip
|
|
478
|
+
import gzip # pylint: disable=import-outside-toplevel
|
|
481
479
|
|
|
482
480
|
file_open_function = gzip.open
|
|
483
481
|
else:
|
|
@@ -573,8 +571,8 @@ def copy_as_list(value):
|
|
|
573
571
|
|
|
574
572
|
def separate_args_and_config_data(expected_args, **kwargs):
|
|
575
573
|
"""
|
|
576
|
-
Separate kwargs into the arguments expected for instancing a class and the dict to be given as
|
|
577
|
-
config_data. This function is specific for methods from_kwargs in classes which use the
|
|
574
|
+
Separate kwargs into the arguments expected for instancing a class and the dict to be given as
|
|
575
|
+
config_data. This function is specific for methods from_kwargs in classes which use the
|
|
578
576
|
validate_config_data system.
|
|
579
577
|
|
|
580
578
|
Parameters
|
|
@@ -621,6 +619,7 @@ def program_is_executable(program):
|
|
|
621
619
|
if is_exe(exe_file):
|
|
622
620
|
return exe_file
|
|
623
621
|
except KeyError:
|
|
622
|
+
_logger.debug("PATH environment variable is not set.")
|
|
624
623
|
return None
|
|
625
624
|
|
|
626
625
|
return None
|
|
@@ -686,6 +685,47 @@ def find_file(name, loc):
|
|
|
686
685
|
raise FileNotFoundError(msg)
|
|
687
686
|
|
|
688
687
|
|
|
688
|
+
def get_log_excerpt(log_file, n_last_lines=30):
|
|
689
|
+
"""
|
|
690
|
+
Get an excerpt from a log file, namely the n_last_lines of the file.
|
|
691
|
+
|
|
692
|
+
Parameters
|
|
693
|
+
----------
|
|
694
|
+
log_file: str or Path
|
|
695
|
+
Log file to get the excerpt from.
|
|
696
|
+
n_last_lines: int
|
|
697
|
+
Number of last lines of the file to get.
|
|
698
|
+
|
|
699
|
+
Returns
|
|
700
|
+
-------
|
|
701
|
+
str
|
|
702
|
+
Excerpt from log file with header/footer
|
|
703
|
+
"""
|
|
704
|
+
|
|
705
|
+
return (
|
|
706
|
+
"\n\nRuntime error - See below the relevant part of the log/err file.\n\n"
|
|
707
|
+
f"{log_file}\n"
|
|
708
|
+
"====================================================================\n\n"
|
|
709
|
+
f"{collect_final_lines(log_file, n_last_lines)}\n\n"
|
|
710
|
+
"====================================================================\n"
|
|
711
|
+
)
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
def get_file_age(file_path):
|
|
715
|
+
"""
|
|
716
|
+
Get the age of a file in seconds since the last modification.
|
|
717
|
+
"""
|
|
718
|
+
if not Path(file_path).is_file():
|
|
719
|
+
raise FileNotFoundError(f"'{file_path}' does not exist or is not a file.")
|
|
720
|
+
|
|
721
|
+
file_stats = os.stat(file_path)
|
|
722
|
+
modification_time = file_stats.st_mtime
|
|
723
|
+
current_time = time.time()
|
|
724
|
+
|
|
725
|
+
file_age_minutes = (current_time - modification_time) / 60
|
|
726
|
+
return file_age_minutes
|
|
727
|
+
|
|
728
|
+
|
|
689
729
|
def change_dict_keys_case(data_dict, lower_case=True):
|
|
690
730
|
"""
|
|
691
731
|
Change keys of a dictionary to lower or upper case. Crawls through the dictionary and changes\
|
|
@@ -719,220 +759,126 @@ def change_dict_keys_case(data_dict, lower_case=True):
|
|
|
719
759
|
else:
|
|
720
760
|
_return_dict[_key_changed] = data_dict[key]
|
|
721
761
|
except AttributeError:
|
|
722
|
-
_logger
|
|
723
|
-
_logger.error(f"Invalid method argument: {data_dict}")
|
|
762
|
+
_logger.error(f"Input is not a proper dictionary: {data_dict}")
|
|
724
763
|
raise
|
|
725
|
-
|
|
726
764
|
return _return_dict
|
|
727
765
|
|
|
728
766
|
|
|
729
|
-
|
|
730
|
-
def rotate(x, y, rotation_around_z_axis, rotation_around_y_axis=0):
|
|
767
|
+
def remove_substring_recursively_from_dict(data_dict, substring="\n"):
|
|
731
768
|
"""
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
`rotation_angle_around_y_axis` allows to rotate the observation plane in space.
|
|
735
|
-
The function returns the rotated x and y values in the same unit given.
|
|
736
|
-
The direction of rotation of the elements in the plane is counterclockwise, i.e.,
|
|
737
|
-
the rotation of the coordinate system is clockwise.
|
|
769
|
+
Remove substrings from all strings in a dictionary. Recursively crawls through the dictionary
|
|
770
|
+
This e.g., allows to remove all newline characters from a dictionary.
|
|
738
771
|
|
|
739
772
|
Parameters
|
|
740
773
|
----------
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
rotation_angle_around_z_axis: astropy.units.rad
|
|
746
|
-
Angle to rotate the array in the observation plane (around z axis) in radians.
|
|
747
|
-
rotation_angle_around_y_axis: astropy.units.rad
|
|
748
|
-
Angle to rotate the observation plane around the y axis in radians.
|
|
749
|
-
|
|
750
|
-
Returns
|
|
751
|
-
-------
|
|
752
|
-
2-tuple of list
|
|
753
|
-
x and y positions of the rotated entry (e.g. telescopes) positions.
|
|
774
|
+
data_dict: dict
|
|
775
|
+
Dictionary to be converted.
|
|
776
|
+
substring: str
|
|
777
|
+
Substring to be removed.
|
|
754
778
|
|
|
755
779
|
Raises
|
|
756
780
|
------
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
if not isinstance(y, (list, np.ndarray)):
|
|
781
|
-
y = [y]
|
|
782
|
-
|
|
783
|
-
if len(x) != len(y):
|
|
784
|
-
raise RuntimeError(
|
|
785
|
-
"Cannot perform coordinate transformation when x and y have different lengths."
|
|
786
|
-
)
|
|
787
|
-
if all(isinstance(variable, (u.Quantity)) for variable in [x, y]):
|
|
788
|
-
if not isinstance(x[0].unit, type(y[0].unit)):
|
|
789
|
-
raise UnitsError(
|
|
790
|
-
"Cannot perform coordinate transformation when x and y have different units."
|
|
791
|
-
)
|
|
792
|
-
|
|
793
|
-
x_trans = np.cos(rotation_around_y_axis) * (
|
|
794
|
-
x * np.cos(rotation_around_z_axis) - y * np.sin(rotation_around_z_axis)
|
|
795
|
-
)
|
|
796
|
-
y_trans = x * np.sin(rotation_around_z_axis) + y * np.cos(rotation_around_z_axis)
|
|
797
|
-
|
|
798
|
-
return x_trans, y_trans
|
|
781
|
+
AttributeError:
|
|
782
|
+
if input is not a proper dictionary.
|
|
783
|
+
"""
|
|
784
|
+
try:
|
|
785
|
+
for key, value in data_dict.items():
|
|
786
|
+
if isinstance(value, str):
|
|
787
|
+
data_dict[key] = value.replace(substring, "")
|
|
788
|
+
elif isinstance(value, list):
|
|
789
|
+
modified_items = [
|
|
790
|
+
item.replace(substring, "") if isinstance(item, str) else item for item in value
|
|
791
|
+
]
|
|
792
|
+
modified_items = [
|
|
793
|
+
remove_substring_recursively_from_dict(item, substring)
|
|
794
|
+
if isinstance(item, dict)
|
|
795
|
+
else item
|
|
796
|
+
for item in modified_items
|
|
797
|
+
]
|
|
798
|
+
data_dict[key] = modified_items
|
|
799
|
+
elif isinstance(value, dict):
|
|
800
|
+
data_dict[key] = remove_substring_recursively_from_dict(value, substring)
|
|
801
|
+
except AttributeError:
|
|
802
|
+
_logger.debug(f"Input is not a dictionary: {data_dict}")
|
|
803
|
+
return data_dict
|
|
799
804
|
|
|
800
805
|
|
|
801
|
-
def
|
|
802
|
-
"""
|
|
803
|
-
Get an excerpt from a log file, namely the n_last_lines of the file.
|
|
806
|
+
def sort_arrays(*args):
|
|
807
|
+
"""Sort arrays
|
|
804
808
|
|
|
805
809
|
Parameters
|
|
806
810
|
----------
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
n_last_lines: int
|
|
810
|
-
Number of last lines of the file to get.
|
|
811
|
-
|
|
811
|
+
*args
|
|
812
|
+
Arguments to be sorted.
|
|
812
813
|
Returns
|
|
813
814
|
-------
|
|
814
|
-
|
|
815
|
-
|
|
815
|
+
list
|
|
816
|
+
Sorted args.
|
|
816
817
|
"""
|
|
817
818
|
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
819
|
+
if len(args) == 0:
|
|
820
|
+
return args
|
|
821
|
+
order_array = copy.copy(args[0])
|
|
822
|
+
new_args = []
|
|
823
|
+
for arg in args:
|
|
824
|
+
_, value = zip(*sorted(zip(order_array, arg)))
|
|
825
|
+
new_args.append(list(value))
|
|
826
|
+
return new_args
|
|
825
827
|
|
|
826
828
|
|
|
827
|
-
def
|
|
829
|
+
def extract_type_of_value(value) -> str:
|
|
828
830
|
"""
|
|
829
|
-
|
|
831
|
+
Extract the string representation of the the type of a value.
|
|
832
|
+
For example, for a string, it returns 'str' rather than '<class 'str'>'.
|
|
833
|
+
Take into account also the case where the value is a numpy type.
|
|
830
834
|
"""
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
current_time = time.time()
|
|
835
|
+
_type = str(type(value))
|
|
836
|
+
if "numpy" in _type:
|
|
837
|
+
return re.sub(r"\d+", "", _type.split("'")[1].split(".")[-1])
|
|
838
|
+
if "astropy" in _type:
|
|
839
|
+
raise NotImplementedError("Astropy types are not supported yet.")
|
|
837
840
|
|
|
838
|
-
|
|
839
|
-
return
|
|
841
|
+
_type = _type.split("'")[1]
|
|
842
|
+
return _type
|
|
840
843
|
|
|
841
844
|
|
|
842
|
-
def
|
|
845
|
+
def get_value_unit_type(value):
|
|
843
846
|
"""
|
|
844
|
-
|
|
847
|
+
Get the value, unit and type of a value.
|
|
848
|
+
The value is stripped of its unit and the unit is returned
|
|
849
|
+
in its string form (i.e., to_string()).
|
|
850
|
+
The type is returned as a string representation of the type.
|
|
851
|
+
For example, for a string, it returns 'str' rather than '<class 'str'>'.
|
|
852
|
+
|
|
853
|
+
Note that Quantities are always floats, even if the original value is represented as an int.
|
|
845
854
|
|
|
846
855
|
Parameters
|
|
847
856
|
----------
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
xaxis: numpy.array
|
|
851
|
-
The values of the x axis (histogram edges) on the ground.
|
|
852
|
-
yaxis: numpy.array
|
|
853
|
-
The values of the y axis (histogram edges) on the ground.
|
|
854
|
-
bins: float
|
|
855
|
-
Number of bins in distance.
|
|
856
|
-
max_dist: float
|
|
857
|
-
Maximum distance to consider in the 1D histogram, usually in meters.
|
|
857
|
+
value: str, int, float, bool, u.Quantity
|
|
858
|
+
Value to be parsed.
|
|
858
859
|
|
|
859
860
|
Returns
|
|
860
861
|
-------
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
The edges of the 1D histogram with size = int(max_dist/bin_size) + 1.
|
|
865
|
-
|
|
866
|
-
"""
|
|
867
|
-
|
|
868
|
-
# Check if the histogram will make sense
|
|
869
|
-
bins_step = 2 * max_dist / bins # in the 2D array, the positive and negative direction count.
|
|
870
|
-
for axis in [xaxis, yaxis]:
|
|
871
|
-
if (bins_step < np.diff(axis)).any():
|
|
872
|
-
msg = (
|
|
873
|
-
f"The histogram with number of bins {bins} and maximum distance of {max_dist} "
|
|
874
|
-
f"resulted in a bin size smaller than the original array. Please adjust those "
|
|
875
|
-
f"parameters to increase the bin size and avoid nan in the histogram values."
|
|
876
|
-
)
|
|
877
|
-
_logger.warning(msg)
|
|
878
|
-
break
|
|
879
|
-
|
|
880
|
-
grid_2d_x, grid_2d_y = np.meshgrid(xaxis[:-1], yaxis[:-1]) # [:-1], since xaxis and yaxis are
|
|
881
|
-
# the hist edges (n + 1).
|
|
882
|
-
# radial_distance_map maps the distance to the center from each element in a square matrix.
|
|
883
|
-
radial_distance_map = np.sqrt(grid_2d_x**2 + grid_2d_y**2)
|
|
884
|
-
# The sorting and unravel_index give us the two indices for the position of the sorted element
|
|
885
|
-
# in the original 2d matrix
|
|
886
|
-
x_indices_sorted, y_indices_sorted = np.unravel_index(
|
|
887
|
-
np.argsort(radial_distance_map, axis=None), np.shape(radial_distance_map)
|
|
888
|
-
)
|
|
889
|
-
|
|
890
|
-
# We construct a 1D array with the histogram counts sorted according to the distance to the
|
|
891
|
-
# center.
|
|
892
|
-
hist_sorted = np.array(
|
|
893
|
-
[hist2d[i_x, i_y] for i_x, i_y in zip(x_indices_sorted, y_indices_sorted)]
|
|
894
|
-
)
|
|
895
|
-
distance_sorted = np.sort(radial_distance_map, axis=None)
|
|
896
|
-
|
|
897
|
-
# For larger distances, we have more elements in a slice 'dr' in radius, hence, we need to
|
|
898
|
-
# acount for it using weights below.
|
|
899
|
-
|
|
900
|
-
weights, radial_edges = np.histogram(distance_sorted, bins=bins, range=(0, max_dist))
|
|
901
|
-
histogram_1D = np.empty_like(weights, dtype=float)
|
|
902
|
-
|
|
903
|
-
for i_radial, _ in enumerate(radial_edges[:-1]):
|
|
904
|
-
# Here we sum all the events within a radial interval 'dr' and then divide by the number of
|
|
905
|
-
# bins that fit this interval.
|
|
906
|
-
indices_to_sum = (distance_sorted >= radial_edges[i_radial]) * (
|
|
907
|
-
distance_sorted < radial_edges[i_radial + 1]
|
|
908
|
-
)
|
|
909
|
-
histogram_1D[i_radial] = np.sum(hist_sorted[indices_to_sum]) / weights[i_radial]
|
|
910
|
-
return histogram_1D, radial_edges
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
def save_dict_to_file(dictionary, file_name):
|
|
862
|
+
type of value, str, str
|
|
863
|
+
Value, unit in string representation (to_string())),
|
|
864
|
+
and string representation of the type of the value.
|
|
914
865
|
"""
|
|
915
|
-
Save dictionary to a file.
|
|
916
866
|
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
867
|
+
base_value = value
|
|
868
|
+
base_unit = None
|
|
869
|
+
base_type = ""
|
|
870
|
+
if isinstance(value, (str, u.Quantity)):
|
|
871
|
+
try:
|
|
872
|
+
_quantity_value = u.Quantity(value)
|
|
873
|
+
base_value = _quantity_value.value
|
|
874
|
+
base_type = extract_type_of_value(base_value)
|
|
875
|
+
if _quantity_value.unit.to_string() != "":
|
|
876
|
+
base_unit = _quantity_value.unit.to_string()
|
|
877
|
+
except TypeError:
|
|
878
|
+
base_value = value
|
|
879
|
+
base_type = "str"
|
|
880
|
+
else:
|
|
881
|
+
base_value = value
|
|
882
|
+
base_type = extract_type_of_value(base_value)
|
|
929
883
|
|
|
930
|
-
|
|
931
|
-
_logger.info(f"Exporting histogram configuration to {file_name}")
|
|
932
|
-
try:
|
|
933
|
-
with open(file_name, "w", encoding="utf-8") as file:
|
|
934
|
-
yaml.dump(dictionary, file)
|
|
935
|
-
except IOError:
|
|
936
|
-
msg = f"Failed to write to {file_name}."
|
|
937
|
-
_logger.error(msg)
|
|
938
|
-
raise
|
|
884
|
+
return base_value, base_unit, base_type
|