fameio 3.1.0__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fameio/cli/__init__.py +2 -3
- fameio/cli/convert_results.py +6 -4
- fameio/cli/make_config.py +6 -4
- fameio/cli/options.py +3 -3
- fameio/cli/parser.py +43 -31
- fameio/input/__init__.py +1 -9
- fameio/input/loader/__init__.py +9 -7
- fameio/input/loader/controller.py +64 -14
- fameio/input/loader/loader.py +14 -7
- fameio/input/metadata.py +37 -18
- fameio/input/resolver.py +5 -4
- fameio/input/scenario/__init__.py +7 -8
- fameio/input/scenario/agent.py +52 -19
- fameio/input/scenario/attribute.py +28 -29
- fameio/input/scenario/contract.py +161 -52
- fameio/input/scenario/exception.py +45 -22
- fameio/input/scenario/fameiofactory.py +63 -7
- fameio/input/scenario/generalproperties.py +17 -6
- fameio/input/scenario/scenario.py +111 -28
- fameio/input/scenario/stringset.py +27 -8
- fameio/input/schema/__init__.py +5 -5
- fameio/input/schema/agenttype.py +29 -11
- fameio/input/schema/attribute.py +174 -84
- fameio/input/schema/java_packages.py +8 -5
- fameio/input/schema/schema.py +35 -9
- fameio/input/validator.py +58 -42
- fameio/input/writer.py +139 -41
- fameio/logs.py +23 -17
- fameio/output/__init__.py +5 -1
- fameio/output/agent_type.py +93 -27
- fameio/output/conversion.py +48 -30
- fameio/output/csv_writer.py +88 -18
- fameio/output/data_transformer.py +12 -21
- fameio/output/input_dao.py +68 -32
- fameio/output/output_dao.py +26 -4
- fameio/output/reader.py +61 -18
- fameio/output/yaml_writer.py +18 -9
- fameio/scripts/__init__.py +9 -2
- fameio/scripts/convert_results.py +144 -52
- fameio/scripts/convert_results.py.license +1 -1
- fameio/scripts/exception.py +7 -0
- fameio/scripts/make_config.py +34 -12
- fameio/scripts/make_config.py.license +1 -1
- fameio/series.py +132 -47
- fameio/time.py +88 -37
- fameio/tools.py +9 -8
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/METADATA +19 -13
- fameio-3.2.0.dist-info/RECORD +56 -0
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/WHEEL +1 -1
- CHANGELOG.md +0 -279
- fameio-3.1.0.dist-info/RECORD +0 -56
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSE.txt +0 -0
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
- {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/entry_points.txt +0 -0
fameio/cli/__init__.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
|
-
from fameio.cli.parser import update_default_config
|
4
|
+
from fameio.cli.parser import update_default_config # noqa: F401
|
fameio/cli/convert_results.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
import argparse
|
5
|
-
from typing import Any
|
7
|
+
from typing import Any
|
6
8
|
|
7
9
|
from fameio.cli.options import Options, ResolveOptions, TimeOptions
|
8
10
|
from fameio.cli.parser import (
|
@@ -38,7 +40,7 @@ _INFILE_PATH_HELP = "Provide path to protobuf file"
|
|
38
40
|
_OUTFILE_PATH_HELP = "Provide path to folder to store output .csv files"
|
39
41
|
|
40
42
|
|
41
|
-
def handle_args(args: list[str], defaults:
|
43
|
+
def handle_args(args: list[str], defaults: dict[Options, Any] | None = None) -> dict[Options, Any]:
|
42
44
|
"""
|
43
45
|
Handles command line arguments and returns `run_config` for convert_results script
|
44
46
|
|
@@ -54,7 +56,7 @@ def handle_args(args: list[str], defaults: Optional[dict[Options, Any]] = None)
|
|
54
56
|
return map_namespace_to_options_dict(parsed)
|
55
57
|
|
56
58
|
|
57
|
-
def _prepare_parser(defaults:
|
59
|
+
def _prepare_parser(defaults: dict[Options, Any] | None) -> argparse.ArgumentParser:
|
58
60
|
"""
|
59
61
|
Creates a parser with given defaults to handle `make_config` configuration arguments
|
60
62
|
|
fameio/cli/make_config.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
import argparse
|
5
7
|
from pathlib import Path
|
6
|
-
from typing import Any
|
8
|
+
from typing import Any
|
7
9
|
|
8
10
|
from fameio.cli.options import Options
|
9
11
|
from fameio.cli.parser import (
|
@@ -31,7 +33,7 @@ _ENCODING_HELP = (
|
|
31
33
|
)
|
32
34
|
|
33
35
|
|
34
|
-
def handle_args(args: list[str], defaults:
|
36
|
+
def handle_args(args: list[str], defaults: dict[Options, Any] | None = None) -> dict[Options, Any]:
|
35
37
|
"""
|
36
38
|
Converts given `arguments` and returns a configuration for the make_config script
|
37
39
|
|
@@ -47,7 +49,7 @@ def handle_args(args: list[str], defaults: Optional[dict[Options, Any]] = None)
|
|
47
49
|
return map_namespace_to_options_dict(parsed)
|
48
50
|
|
49
51
|
|
50
|
-
def _prepare_parser(defaults:
|
52
|
+
def _prepare_parser(defaults: dict[Options, Any] | None) -> argparse.ArgumentParser:
|
51
53
|
"""
|
52
54
|
Creates a parser with given defaults to handle `make_config` configuration arguments
|
53
55
|
|
fameio/cli/options.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
import argparse
|
@@ -12,8 +12,8 @@ class ParsableEnum(Enum):
|
|
12
12
|
def instantiate(cls, name: str) -> Enum:
|
13
13
|
try:
|
14
14
|
return cls[name]
|
15
|
-
except KeyError:
|
16
|
-
raise argparse.ArgumentTypeError(f"'{name}' is not a valid option")
|
15
|
+
except KeyError as e:
|
16
|
+
raise argparse.ArgumentTypeError(f"'{name}' is not a valid option") from e
|
17
17
|
|
18
18
|
def __str__(self):
|
19
19
|
return self.name
|
fameio/cli/parser.py
CHANGED
@@ -1,18 +1,19 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
import copy
|
5
7
|
from argparse import ArgumentParser, ArgumentTypeError, BooleanOptionalAction, Namespace
|
6
|
-
from enum import Enum
|
7
8
|
from pathlib import Path
|
8
|
-
from typing import
|
9
|
+
from typing import Any
|
9
10
|
|
10
11
|
from fameio.cli.options import TimeOptions, ResolveOptions, Options
|
11
12
|
from fameio.logs import LogLevel
|
12
13
|
|
13
14
|
_ERR_INVALID_MERGING_DEFAULT = "Invalid merge-times default: needs list of 3 integers separated by spaces but was: '{}'"
|
14
15
|
|
15
|
-
_OPTION_ARGUMENT_NAME: dict[str,
|
16
|
+
_OPTION_ARGUMENT_NAME: dict[str, Options] = {
|
16
17
|
"file": Options.FILE,
|
17
18
|
"log": Options.LOG_LEVEL,
|
18
19
|
"logfile": Options.LOG_FILE,
|
@@ -28,7 +29,7 @@ _OPTION_ARGUMENT_NAME: dict[str, Union[Options, dict]] = {
|
|
28
29
|
}
|
29
30
|
|
30
31
|
|
31
|
-
def add_file_argument(parser: ArgumentParser, default:
|
32
|
+
def add_file_argument(parser: ArgumentParser, default: Path | None, help_text: str) -> None:
|
32
33
|
"""
|
33
34
|
Adds 'file' argument to the provided `parser` with the provided `help_text`.
|
34
35
|
If a default is not specified, the argument is required (optional otherwise)
|
@@ -44,16 +45,16 @@ def add_file_argument(parser: ArgumentParser, default: Optional[Path], help_text
|
|
44
45
|
parser.add_argument("-f", "--file", type=Path, required=True, help=help_text)
|
45
46
|
|
46
47
|
|
47
|
-
def add_select_agents_argument(parser: ArgumentParser,
|
48
|
+
def add_select_agents_argument(parser: ArgumentParser, default_value: list[str] | None) -> None:
|
48
49
|
"""Adds optional repeatable string argument 'agent' to given `parser`"""
|
49
|
-
help_text = "Provide list of agents to extract (default=
|
50
|
-
parser.add_argument("-a", "--agents", nargs="*", type=str, default=
|
50
|
+
help_text = f"Provide list of agents to extract (default={default_value})"
|
51
|
+
parser.add_argument("-a", "--agents", nargs="*", type=str, default=default_value, help=help_text)
|
51
52
|
|
52
53
|
|
53
|
-
def add_logfile_argument(parser: ArgumentParser,
|
54
|
+
def add_logfile_argument(parser: ArgumentParser, default_value: Path | None) -> None:
|
54
55
|
"""Adds optional argument 'logfile' to given `parser`"""
|
55
|
-
help_text = "provide logging file (default=
|
56
|
-
parser.add_argument("-lf", "--logfile", type=Path, default=
|
56
|
+
help_text = f"provide logging file (default={default_value})"
|
57
|
+
parser.add_argument("-lf", "--logfile", type=Path, default=default_value, help=help_text)
|
57
58
|
|
58
59
|
|
59
60
|
def add_output_argument(parser: ArgumentParser, default_value, help_text: str) -> None:
|
@@ -63,7 +64,7 @@ def add_output_argument(parser: ArgumentParser, default_value, help_text: str) -
|
|
63
64
|
|
64
65
|
def add_log_level_argument(parser: ArgumentParser, default_value: str) -> None:
|
65
66
|
"""Adds optional argument 'log' to given `parser`"""
|
66
|
-
help_text = "choose logging level (default
|
67
|
+
help_text = f"choose logging level (default={default_value})"
|
67
68
|
# noinspection PyTypeChecker
|
68
69
|
parser.add_argument(
|
69
70
|
"-l",
|
@@ -75,14 +76,14 @@ def add_log_level_argument(parser: ArgumentParser, default_value: str) -> None:
|
|
75
76
|
)
|
76
77
|
|
77
78
|
|
78
|
-
def add_encoding_argument(parser: ArgumentParser, default_value:
|
79
|
+
def add_encoding_argument(parser: ArgumentParser, default_value: str | None, help_text: str) -> None:
|
79
80
|
"""Adds optional argument `enc` to given parser"""
|
80
81
|
parser.add_argument("-enc", "--encoding", type=str, default=default_value, help=help_text)
|
81
82
|
|
82
83
|
|
83
84
|
def add_single_export_argument(parser: ArgumentParser, default_value: bool) -> None:
|
84
85
|
"""Adds optional repeatable string argument 'agent' to given `parser`"""
|
85
|
-
help_text = "Enable export of single agents (default=
|
86
|
+
help_text = f"Enable export of single agents (default={default_value})"
|
86
87
|
parser.add_argument(
|
87
88
|
"-se",
|
88
89
|
"--single-export",
|
@@ -94,7 +95,7 @@ def add_single_export_argument(parser: ArgumentParser, default_value: bool) -> N
|
|
94
95
|
|
95
96
|
def add_memory_saving_argument(parser: ArgumentParser, default_value: bool) -> None:
|
96
97
|
"""Adds optional bool argument to given `parser` to enable memory saving mode"""
|
97
|
-
help_text = "Reduces memory usage profile at the cost of runtime (default=
|
98
|
+
help_text = f"Reduces memory usage profile at the cost of runtime (default={default_value})"
|
98
99
|
parser.add_argument(
|
99
100
|
"-m",
|
100
101
|
"--memory-saving",
|
@@ -104,10 +105,10 @@ def add_memory_saving_argument(parser: ArgumentParser, default_value: bool) -> N
|
|
104
105
|
)
|
105
106
|
|
106
107
|
|
107
|
-
def add_resolve_complex_argument(parser: ArgumentParser, default_value:
|
108
|
+
def add_resolve_complex_argument(parser: ArgumentParser, default_value: ResolveOptions | str):
|
108
109
|
"""Instructs given `parser` how to deal with complex field outputs"""
|
109
110
|
default_value = default_value if isinstance(default_value, ResolveOptions) else ResolveOptions[default_value]
|
110
|
-
help_text = f"How to deal with complex index columns? (default={default_value})"
|
111
|
+
help_text = f"How to deal with complex index columns? (default={default_value.name})"
|
111
112
|
parser.add_argument(
|
112
113
|
"-cc",
|
113
114
|
"--complex-column",
|
@@ -118,10 +119,10 @@ def add_resolve_complex_argument(parser: ArgumentParser, default_value: Union[Re
|
|
118
119
|
)
|
119
120
|
|
120
121
|
|
121
|
-
def add_time_argument(parser: ArgumentParser, default_value:
|
122
|
+
def add_time_argument(parser: ArgumentParser, default_value: TimeOptions | str) -> None:
|
122
123
|
"""Adds optional argument to given `parser` to define conversion of TimeSteps"""
|
123
124
|
default_value = default_value if isinstance(default_value, TimeOptions) else TimeOptions[default_value]
|
124
|
-
help_text = "Apply conversion of time steps to given format (default=
|
125
|
+
help_text = f"Apply conversion of time steps to given format (default={default_value.name})"
|
125
126
|
parser.add_argument(
|
126
127
|
"-t",
|
127
128
|
"--time",
|
@@ -132,14 +133,14 @@ def add_time_argument(parser: ArgumentParser, default_value: Union[TimeOptions,
|
|
132
133
|
)
|
133
134
|
|
134
135
|
|
135
|
-
def add_merge_time_argument(parser: ArgumentParser, defaults:
|
136
|
+
def add_merge_time_argument(parser: ArgumentParser, defaults: list[int] | None = None) -> None:
|
136
137
|
"""Adds optional three-fold argument for merging of TimeSteps to given `parser`"""
|
137
138
|
if defaults is None:
|
138
139
|
defaults = []
|
139
140
|
if (
|
140
141
|
not isinstance(defaults, list)
|
141
142
|
or len(defaults) not in [0, 3]
|
142
|
-
or not all(
|
143
|
+
or not all(isinstance(value, int) for value in defaults)
|
143
144
|
):
|
144
145
|
raise ArgumentTypeError(_ERR_INVALID_MERGING_DEFAULT.format(repr(defaults)))
|
145
146
|
|
@@ -150,22 +151,33 @@ def add_merge_time_argument(parser: ArgumentParser, defaults: Optional[list[int]
|
|
150
151
|
parser.add_argument("-mt", "--merge-times", type=int, nargs=3, default=defaults, help=help_text)
|
151
152
|
|
152
153
|
|
153
|
-
def add_inputs_recovery_argument(parser: ArgumentParser,
|
154
|
+
def add_inputs_recovery_argument(parser: ArgumentParser, default_value: bool) -> None:
|
154
155
|
"""Adds optional bool argument to given `parser` to recover inputs"""
|
155
|
-
|
156
|
+
arg_name = "input-recovery"
|
157
|
+
default_str = "--" + ("no-" if not default_value else "") + arg_name
|
158
|
+
help_text = f"If --(no-)input-recovery is specified, (no) inputs will be recovered (default={default_str})"
|
156
159
|
parser.add_argument(
|
157
|
-
"--
|
160
|
+
f"--{arg_name}",
|
158
161
|
action=BooleanOptionalAction,
|
159
|
-
default=
|
162
|
+
default=default_value,
|
160
163
|
help=help_text,
|
161
164
|
)
|
162
165
|
|
163
166
|
|
164
|
-
def update_default_config(
|
165
|
-
"""
|
166
|
-
|
167
|
-
|
168
|
-
|
167
|
+
def update_default_config(overrides: dict[Options, Any] | None, defaults: dict[Options, Any]) -> dict[Options, Any]:
|
168
|
+
"""
|
169
|
+
Returns `defaults` with updated fields received from `overrides`
|
170
|
+
|
171
|
+
Args:
|
172
|
+
overrides: updates to be applied to `defaults`
|
173
|
+
defaults: base values, possibly replaced by options specified in `config`
|
174
|
+
|
175
|
+
Returns:
|
176
|
+
Deep copy of given `defaults` with updates values as specified in `overrides`
|
177
|
+
"""
|
178
|
+
result = copy.deepcopy(defaults)
|
179
|
+
if overrides:
|
180
|
+
for name, option in overrides.items():
|
169
181
|
result[name] = option
|
170
182
|
return result
|
171
183
|
|
@@ -183,7 +195,7 @@ def map_namespace_to_options_dict(parsed: Namespace) -> dict[Options, Any]:
|
|
183
195
|
return _map_namespace_to_options(parsed, _OPTION_ARGUMENT_NAME)
|
184
196
|
|
185
197
|
|
186
|
-
def _map_namespace_to_options(parsed: Namespace, names_to_options: dict[str,
|
198
|
+
def _map_namespace_to_options(parsed: Namespace, names_to_options: dict[str, Options]) -> dict[Options, Any]:
|
187
199
|
"""
|
188
200
|
Maps given parsing results to their corresponding configuration option; elements that cannot be mapped are ignored.
|
189
201
|
If a configuration option has inner elements, these well be also read and added as inner dictionary.
|
fameio/input/__init__.py
CHANGED
@@ -4,24 +4,16 @@
|
|
4
4
|
|
5
5
|
|
6
6
|
class InputError(Exception):
|
7
|
-
"""An error that occurred while
|
8
|
-
|
9
|
-
pass
|
7
|
+
"""An error that occurred while preparing a fame input file"""
|
10
8
|
|
11
9
|
|
12
10
|
class SchemaError(InputError):
|
13
11
|
"""An error that occurred while parsing a Schema"""
|
14
12
|
|
15
|
-
pass
|
16
|
-
|
17
13
|
|
18
14
|
class ScenarioError(InputError):
|
19
15
|
"""An error that occurred while parsing a Scenario"""
|
20
16
|
|
21
|
-
pass
|
22
|
-
|
23
17
|
|
24
18
|
class YamlLoaderError(InputError):
|
25
19
|
"""An error that occurred while parsing a YAML file"""
|
26
|
-
|
27
|
-
pass
|
fameio/input/loader/__init__.py
CHANGED
@@ -1,16 +1,18 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
from pathlib import Path
|
5
7
|
from typing import Any
|
6
8
|
|
7
9
|
import yaml
|
8
10
|
|
9
11
|
from fameio.input import YamlLoaderError
|
10
|
-
from fameio.input.resolver import PathResolver
|
11
12
|
from fameio.input.loader.controller import LoaderController
|
12
13
|
from fameio.input.loader.loader import FameYamlLoader
|
13
|
-
from fameio.
|
14
|
+
from fameio.input.resolver import PathResolver
|
15
|
+
from fameio.logs import log, log_critical
|
14
16
|
|
15
17
|
ALLOWED_SUFFIXES: tuple[str, ...] = (".yaml", ".yml")
|
16
18
|
|
@@ -29,7 +31,7 @@ def _include_callback(own_loader: FameYamlLoader, args: yaml.Node) -> Any:
|
|
29
31
|
FameYamlLoader.add_constructor(FameYamlLoader.INCLUDE_COMMAND, _include_callback)
|
30
32
|
|
31
33
|
|
32
|
-
def load_yaml(yaml_file_path: Path, path_resolver: PathResolver = PathResolver(), encoding: str = None) -> dict:
|
34
|
+
def load_yaml(yaml_file_path: Path, path_resolver: PathResolver = PathResolver(), encoding: str | None = None) -> dict:
|
33
35
|
"""
|
34
36
|
Loads the YAML file from given and returns its content as a dict
|
35
37
|
|
@@ -42,14 +44,14 @@ def load_yaml(yaml_file_path: Path, path_resolver: PathResolver = PathResolver()
|
|
42
44
|
Content of the specified YAML file
|
43
45
|
|
44
46
|
Raises:
|
45
|
-
YamlLoaderError: if the YAML file could not be read
|
47
|
+
YamlLoaderError: if the YAML file could not be found, read, or parsed
|
46
48
|
"""
|
47
49
|
log().info(_INFO_LOADING.format(yaml_file_path))
|
48
50
|
_update_current_controller(path_resolver, encoding)
|
49
51
|
return __CONTROLLERS[0].load(yaml_file_path)
|
50
52
|
|
51
53
|
|
52
|
-
def _update_current_controller(path_resolver: PathResolver, encoding: str) -> None:
|
54
|
+
def _update_current_controller(path_resolver: PathResolver, encoding: str | None) -> None:
|
53
55
|
"""Updates the current LoaderController to use the given `path_resolver` and `encoding`"""
|
54
56
|
__CONTROLLERS[0] = LoaderController(path_resolver, encoding)
|
55
57
|
|
@@ -65,4 +67,4 @@ def validate_yaml_file_suffix(yaml_file: Path) -> None:
|
|
65
67
|
YamlLoaderError: if given file has no YAML-associated file suffix
|
66
68
|
"""
|
67
69
|
if yaml_file.suffix.lower() not in ALLOWED_SUFFIXES:
|
68
|
-
|
70
|
+
raise log_critical(YamlLoaderError(_ERR_NO_YAML_SUFFIX.format(ALLOWED_SUFFIXES, yaml_file)))
|
@@ -1,6 +1,8 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
from fnmatch import fnmatch
|
5
7
|
from pathlib import Path
|
6
8
|
from typing import Callable, IO, Any, Final
|
@@ -8,9 +10,9 @@ from typing import Callable, IO, Any, Final
|
|
8
10
|
import yaml
|
9
11
|
|
10
12
|
from fameio.input import YamlLoaderError
|
11
|
-
from fameio.input.resolver import PathResolver
|
12
13
|
from fameio.input.loader.loader import FameYamlLoader
|
13
|
-
from fameio.
|
14
|
+
from fameio.input.resolver import PathResolver
|
15
|
+
from fameio.logs import log, log_critical
|
14
16
|
|
15
17
|
|
16
18
|
class LoaderController:
|
@@ -22,6 +24,8 @@ class LoaderController:
|
|
22
24
|
DISABLING_YAML_FILE_PREFIX: Final[str] = "IGNORE_"
|
23
25
|
NODE_SPLIT_STRING: Final[str] = ":"
|
24
26
|
|
27
|
+
_ERR_FILE_OPEN_ERROR = "Could not open file: '{}'"
|
28
|
+
_ERR_FILE_LOAD_ERROR = "Could not parse YAML file due to errors in (line:column): ({}:{})"
|
25
29
|
_ERR_NODE_MISSING = "'!include_node [{}, {}]': Cannot find '{}'"
|
26
30
|
_ERR_NOT_LIST = "!include can only combine list-like elements from multiple files!"
|
27
31
|
_WARN_NOTHING_TO_INCLUDE = "Could not find any files matching this '!include' directive '{}'"
|
@@ -31,23 +35,70 @@ class LoaderController:
|
|
31
35
|
_DEBUG_LOAD_FILE = "Loaded included YAML file '{}'"
|
32
36
|
_DEBUG_FILES_INCLUDED = "!include directive '{}' yielded these files: '{}'"
|
33
37
|
|
34
|
-
def __init__(self, path_resolver: PathResolver = PathResolver(), encoding: str = None) -> None:
|
38
|
+
def __init__(self, path_resolver: PathResolver = PathResolver(), encoding: str | None = None) -> None:
|
35
39
|
self._path_resolver = path_resolver
|
36
|
-
self._encoding: str = encoding
|
40
|
+
self._encoding: str | None = encoding
|
37
41
|
|
38
42
|
def load(self, yaml_file_path: Path) -> dict:
|
39
|
-
"""
|
40
|
-
|
41
|
-
|
43
|
+
"""
|
44
|
+
Spawns a new FameYamlLoader, loads the given `yaml_file_path` and returns its content
|
45
|
+
|
46
|
+
Args:
|
47
|
+
yaml_file_path: path to YAML file that is to be loaded
|
48
|
+
|
49
|
+
Returns:
|
50
|
+
dictionary representation of loaded file
|
51
|
+
|
52
|
+
Raises:
|
53
|
+
YamlLoaderError: if file could not be read, logged with level "CRITICAL"
|
54
|
+
"""
|
55
|
+
try:
|
56
|
+
with open(yaml_file_path, "r", encoding=self._encoding) as configfile:
|
57
|
+
try:
|
58
|
+
data = yaml.load(configfile, self._spawn_loader_builder()) # type: ignore[arg-type]
|
59
|
+
except yaml.YAMLError as e:
|
60
|
+
line, column = self._get_problem_position(e)
|
61
|
+
raise log_critical(YamlLoaderError(self._ERR_FILE_LOAD_ERROR.format(line, column))) from e
|
62
|
+
except OSError as e:
|
63
|
+
raise log_critical(YamlLoaderError(self._ERR_FILE_OPEN_ERROR.format(yaml_file_path))) from e
|
42
64
|
return data
|
43
65
|
|
44
66
|
@staticmethod
|
45
67
|
def _spawn_loader_builder() -> Callable[[IO], FameYamlLoader]:
|
46
68
|
"""Returns a new Callable that instantiates a new FameYamlLoader with an IO-stream"""
|
47
|
-
return lambda stream: FameYamlLoader(stream)
|
69
|
+
return lambda stream: FameYamlLoader(stream) # pylint: disable=unnecessary-lambda
|
70
|
+
|
71
|
+
@staticmethod
|
72
|
+
def _get_problem_position(exception: yaml.YAMLError) -> tuple[str, str]:
|
73
|
+
"""
|
74
|
+
Returns problematic line and column from given error (if available)
|
75
|
+
|
76
|
+
Args:
|
77
|
+
exception: error thrown by yaml.load()
|
78
|
+
|
79
|
+
Returns:
|
80
|
+
Line and Column of error (if available), else a tuple of questions marks
|
81
|
+
"""
|
82
|
+
if hasattr(exception, "problem_mark"):
|
83
|
+
mark = exception.problem_mark
|
84
|
+
return str(mark.line + 1), str(mark.column + 1)
|
85
|
+
return "?", "?"
|
48
86
|
|
49
87
|
def include(self, loader: FameYamlLoader, include_args: yaml.Node) -> Any:
|
50
|
-
"""
|
88
|
+
"""
|
89
|
+
Returns content loaded from the specified `include_args`
|
90
|
+
|
91
|
+
Args:
|
92
|
+
loader: the YAML loader to be used to load the file(s) that are to be included
|
93
|
+
include_args: arguments of include statement
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
content of file as specified by include
|
97
|
+
|
98
|
+
Raises:
|
99
|
+
YamlLoaderError: If !include statement could not be interpreted, included files could not be read,
|
100
|
+
or multiple included files could not be joined - logged with level "CRITICAL"
|
101
|
+
"""
|
51
102
|
root_path, file_pattern, node_pattern = loader.digest_include(include_args)
|
52
103
|
files = self._resolve_imported_path(root_path, file_pattern)
|
53
104
|
nodes = node_pattern.split(self.NODE_SPLIT_STRING)
|
@@ -100,13 +151,13 @@ class LoaderController:
|
|
100
151
|
if node:
|
101
152
|
if node not in data.keys():
|
102
153
|
message = LoaderController._ERR_NODE_MISSING.format(file_name, node_address, node)
|
103
|
-
|
154
|
+
raise log_critical(YamlLoaderError(message))
|
104
155
|
data = data[node]
|
105
156
|
log().debug(LoaderController._DEBUG_SEARCH_NODE.format(file_name, node_address))
|
106
157
|
return data
|
107
158
|
|
108
159
|
@staticmethod
|
109
|
-
def _join_data(new_data: list, previous_data: list) -> list:
|
160
|
+
def _join_data(new_data: list, previous_data: list | None) -> list:
|
110
161
|
"""
|
111
162
|
Joins two lists with data to a larger list
|
112
163
|
|
@@ -125,5 +176,4 @@ class LoaderController:
|
|
125
176
|
if isinstance(new_data, list) and isinstance(previous_data, list):
|
126
177
|
previous_data.extend(new_data)
|
127
178
|
return previous_data
|
128
|
-
|
129
|
-
log_critical_and_raise(YamlLoaderError(LoaderController._ERR_NOT_LIST))
|
179
|
+
raise log_critical(YamlLoaderError(LoaderController._ERR_NOT_LIST))
|
fameio/input/loader/loader.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
from os import path
|
@@ -7,7 +7,7 @@ from typing import IO, Final
|
|
7
7
|
import yaml
|
8
8
|
|
9
9
|
from fameio.input import YamlLoaderError
|
10
|
-
from fameio.logs import
|
10
|
+
from fameio.logs import log, log_critical
|
11
11
|
|
12
12
|
|
13
13
|
class FameYamlLoader(yaml.SafeLoader):
|
@@ -43,9 +43,10 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
43
43
|
`root` is a path to the current file that was read by this FameYamlLoader,
|
44
44
|
`files` is a file pattern,
|
45
45
|
and nodes is an optional address (list of nodes) for name for the node that is to be returned
|
46
|
+
|
47
|
+
Raises:
|
48
|
+
YamlLoaderError: If !include statement could not be interpreted, logged with level "CRITICAL"
|
46
49
|
"""
|
47
|
-
node_string = ""
|
48
|
-
file_pattern = None
|
49
50
|
if isinstance(node, yaml.nodes.ScalarNode):
|
50
51
|
file_pattern, node_string = self._read_scalar_node(node)
|
51
52
|
elif isinstance(node, yaml.nodes.SequenceNode):
|
@@ -53,7 +54,7 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
53
54
|
elif isinstance(node, yaml.nodes.MappingNode):
|
54
55
|
file_pattern, node_string = self._read_mapping_node(node)
|
55
56
|
else:
|
56
|
-
|
57
|
+
raise log_critical(YamlLoaderError(self._ERR_NODE_TYPE.format(node)))
|
57
58
|
return self._root_path, file_pattern, node_string
|
58
59
|
|
59
60
|
def _read_scalar_node(self, args: yaml.nodes.ScalarNode) -> tuple[str, str]:
|
@@ -79,10 +80,13 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
79
80
|
|
80
81
|
Returns:
|
81
82
|
first part of argument as file path, the second part of argument as node-address
|
83
|
+
|
84
|
+
Raises:
|
85
|
+
YamlLoaderError: if argument count is not 1 or 2, logged with level "CRITICAL"
|
82
86
|
"""
|
83
87
|
argument_list = self.construct_sequence(args)
|
84
88
|
if len(argument_list) not in [1, 2]:
|
85
|
-
|
89
|
+
raise log_critical(YamlLoaderError(self._ERR_ARGUMENT_COUNT.format(str(args))))
|
86
90
|
|
87
91
|
file_pattern = argument_list[0]
|
88
92
|
node_string = argument_list[1] if len(argument_list) == 2 else ""
|
@@ -98,10 +102,13 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
98
102
|
|
99
103
|
Returns:
|
100
104
|
file argument as file path, node argument as node-address
|
105
|
+
|
106
|
+
Raises:
|
107
|
+
YamlLoaderError: if "file" key is missing, logged with level "CRITICAL"
|
101
108
|
"""
|
102
109
|
argument_map = {str(k).lower(): v for k, v in self.construct_mapping(args).items()}
|
103
110
|
if "file" not in argument_map.keys():
|
104
|
-
|
111
|
+
raise log_critical(YamlLoaderError(self._ERR_FILE_KEY_MISSING.format(str(args))))
|
105
112
|
|
106
113
|
file_pattern = argument_map["file"]
|
107
114
|
node_string = argument_map.get("node", "")
|
fameio/input/metadata.py
CHANGED
@@ -1,10 +1,13 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
from abc import ABC, abstractmethod
|
5
|
-
from typing import Any,
|
7
|
+
from typing import Any, final, Final
|
6
8
|
|
7
9
|
from fameio.input import InputError
|
10
|
+
from fameio.logs import log_error
|
8
11
|
|
9
12
|
|
10
13
|
class Metadata(ABC):
|
@@ -12,7 +15,7 @@ class Metadata(ABC):
|
|
12
15
|
|
13
16
|
KEY_METADATA: Final[str] = "Metadata".lower()
|
14
17
|
|
15
|
-
def __init__(self, definitions:
|
18
|
+
def __init__(self, definitions: Any | dict[str, Any] | None = None):
|
16
19
|
"""
|
17
20
|
Initialises the metadata by searching the given definitions' top level for metadata.
|
18
21
|
Alternatively, call `_extract_metadata()` to add metadata later on.
|
@@ -21,7 +24,7 @@ class Metadata(ABC):
|
|
21
24
|
self._metadata = self.__extract_metadata(definitions)
|
22
25
|
|
23
26
|
@staticmethod
|
24
|
-
def __extract_metadata(definitions:
|
27
|
+
def __extract_metadata(definitions: dict[str, Any] | None) -> dict:
|
25
28
|
"""
|
26
29
|
If keyword `metadata` is found on the highest level of given `definitions`, metadata are extracted (removed) and
|
27
30
|
returned, otherwise, an empty dict is returned and definitions are not changed
|
@@ -37,7 +40,7 @@ class Metadata(ABC):
|
|
37
40
|
return self._metadata
|
38
41
|
|
39
42
|
@final
|
40
|
-
def _extract_metadata(self, definitions:
|
43
|
+
def _extract_metadata(self, definitions: dict[str, Any] | None) -> None:
|
41
44
|
"""If keyword `metadata` is found on the highest level of given `definitions`, metadata are removed and set"""
|
42
45
|
self._metadata = self.__extract_metadata(definitions)
|
43
46
|
|
@@ -76,7 +79,7 @@ class MetadataComponent(Metadata):
|
|
76
79
|
Metadata itself, like, e.g., Strings in a list.
|
77
80
|
"""
|
78
81
|
|
79
|
-
def __init__(self, additional_definition:
|
82
|
+
def __init__(self, additional_definition: dict | None = None) -> None:
|
80
83
|
super().__init__(additional_definition)
|
81
84
|
|
82
85
|
def _to_dict(self) -> dict[str, dict]:
|
@@ -89,25 +92,41 @@ class ValueContainer:
|
|
89
92
|
class ParseError(InputError):
|
90
93
|
"""An error that occurred while parsing content for metadata-annotated simple values"""
|
91
94
|
|
92
|
-
|
95
|
+
_ERR_VALUES_ILL_FORMATTED = "Only Lists or Dictionaries are supported for value definitions, but was: {}"
|
96
|
+
|
97
|
+
def __init__(self, definition: dict[str, Any] | list | None = None) -> None:
|
98
|
+
"""
|
99
|
+
Sets data (and metadata - if any) from given `definition`
|
93
100
|
|
94
|
-
|
101
|
+
Args:
|
102
|
+
definition: dictionary representation of value(s) with potential associated metadata
|
95
103
|
|
96
|
-
|
97
|
-
|
98
|
-
|
104
|
+
Raises:
|
105
|
+
ParseError: if value definition is ill-formatted
|
106
|
+
"""
|
107
|
+
self._values: dict[Any, MetadataComponent] = self._extract_values(definition)
|
99
108
|
|
100
109
|
@staticmethod
|
101
|
-
def _extract_values(definition:
|
102
|
-
"""
|
110
|
+
def _extract_values(definition: dict[str, Any] | list | None) -> dict[Any, MetadataComponent]:
|
111
|
+
"""
|
112
|
+
Returns value data (and optional metadata) extracted from given `definition`
|
113
|
+
|
114
|
+
Args:
|
115
|
+
definition: dictionary representation of value with potential associated metadata
|
116
|
+
|
117
|
+
Returns:
|
118
|
+
value linked with associated metadata (if any)
|
119
|
+
|
120
|
+
Raises:
|
121
|
+
ParseError: if value definition is ill-formatted, logged on level "ERROR"
|
122
|
+
"""
|
103
123
|
if definition is None:
|
104
124
|
return {}
|
105
|
-
|
125
|
+
if isinstance(definition, dict):
|
106
126
|
return {key: MetadataComponent(key_definition) for key, key_definition in definition.items()}
|
107
|
-
|
127
|
+
if isinstance(definition, list):
|
108
128
|
return {key: MetadataComponent() for key in definition}
|
109
|
-
|
110
|
-
raise ValueContainer.ParseError(ValueContainer._ERR_VALUES_ILL_FORMATTED.format(repr(definition)))
|
129
|
+
raise log_error(ValueContainer.ParseError(ValueContainer._ERR_VALUES_ILL_FORMATTED.format(repr(definition))))
|
111
130
|
|
112
131
|
@property
|
113
132
|
def values(self) -> dict[str, MetadataComponent]:
|
@@ -127,7 +146,7 @@ class ValueContainer:
|
|
127
146
|
"""
|
128
147
|
return {value: component_metadata.to_dict() for value, component_metadata in self._values.items()}
|
129
148
|
|
130
|
-
def has_value(self, to_search) -> bool:
|
149
|
+
def has_value(self, to_search: Any) -> bool:
|
131
150
|
"""
|
132
151
|
Returns True if given value `to_search` is a key in this ValueContainer
|
133
152
|
|