fameio 1.8.1__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +204 -0
- fameio/scripts/__init__.py +8 -6
- fameio/scripts/__init__.py.license +3 -0
- fameio/scripts/convert_results.py +30 -34
- fameio/scripts/convert_results.py.license +3 -0
- fameio/scripts/make_config.py +13 -16
- fameio/scripts/make_config.py.license +3 -0
- fameio/source/cli/__init__.py +3 -0
- fameio/source/cli/convert_results.py +75 -0
- fameio/source/cli/make_config.py +62 -0
- fameio/source/cli/options.py +59 -0
- fameio/source/cli/parser.py +238 -0
- fameio/source/loader.py +10 -11
- fameio/source/logs.py +49 -25
- fameio/source/results/conversion.py +12 -14
- fameio/source/results/csv_writer.py +16 -5
- fameio/source/results/data_transformer.py +3 -2
- fameio/source/results/input_dao.py +163 -0
- fameio/source/results/reader.py +25 -14
- fameio/source/results/yaml_writer.py +28 -0
- fameio/source/scenario/agent.py +56 -39
- fameio/source/scenario/attribute.py +9 -12
- fameio/source/scenario/contract.py +55 -40
- fameio/source/scenario/exception.py +11 -9
- fameio/source/scenario/generalproperties.py +11 -17
- fameio/source/scenario/scenario.py +19 -14
- fameio/source/schema/agenttype.py +75 -27
- fameio/source/schema/attribute.py +8 -7
- fameio/source/schema/schema.py +24 -11
- fameio/source/series.py +146 -25
- fameio/source/time.py +8 -8
- fameio/source/tools.py +13 -2
- fameio/source/validator.py +138 -58
- fameio/source/writer.py +108 -112
- fameio-2.0.0.dist-info/LICENSES/Apache-2.0.txt +178 -0
- fameio-2.0.0.dist-info/LICENSES/CC-BY-4.0.txt +395 -0
- fameio-2.0.0.dist-info/LICENSES/CC0-1.0.txt +121 -0
- {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/METADATA +144 -112
- fameio-2.0.0.dist-info/RECORD +52 -0
- {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/WHEEL +1 -2
- fameio-2.0.0.dist-info/entry_points.txt +4 -0
- fameio/source/cli.py +0 -253
- fameio-1.8.1.dist-info/RECORD +0 -40
- fameio-1.8.1.dist-info/entry_points.txt +0 -3
- fameio-1.8.1.dist-info/top_level.txt +0 -1
- {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/LICENSE.txt +0 -0
@@ -0,0 +1,238 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
import copy
|
5
|
+
from argparse import ArgumentParser, ArgumentTypeError, BooleanOptionalAction, Namespace
|
6
|
+
from enum import Enum
|
7
|
+
from pathlib import Path
|
8
|
+
from typing import Optional, Dict, Any, List, Union
|
9
|
+
|
10
|
+
from fameio.source.cli.options import MergingOptions, TimeOptions, ResolveOptions, Options
|
11
|
+
from fameio.source.logs import LOG_LEVELS
|
12
|
+
|
13
|
+
_ERR_NEGATIVE_INT = "Given value `{}` is not a non-negative int."
|
14
|
+
|
15
|
+
_OPTION_ARGUMENT_NAME: Dict[str, Union[Options, Dict]] = {
|
16
|
+
"file": Options.FILE,
|
17
|
+
"log": Options.LOG_LEVEL,
|
18
|
+
"logfile": Options.LOG_FILE,
|
19
|
+
"output": Options.OUTPUT,
|
20
|
+
"agents": Options.AGENT_LIST,
|
21
|
+
"single_export": Options.SINGLE_AGENT_EXPORT,
|
22
|
+
"memory_saving": Options.MEMORY_SAVING,
|
23
|
+
"time": Options.TIME,
|
24
|
+
"input_recovery": Options.INPUT_RECOVERY,
|
25
|
+
"complex_column": Options.RESOLVE_COMPLEX_FIELD,
|
26
|
+
"time_merging": {
|
27
|
+
"name": Options.TIME_MERGING,
|
28
|
+
"inner_elements": {
|
29
|
+
"focal_point": MergingOptions.FOCAL_POINT,
|
30
|
+
"steps_before": MergingOptions.STEPS_BEFORE,
|
31
|
+
"steps_after": MergingOptions.STEPS_AFTER,
|
32
|
+
},
|
33
|
+
},
|
34
|
+
}
|
35
|
+
|
36
|
+
|
37
|
+
def add_file_argument(parser: ArgumentParser, default: Optional[Path], help_text: str) -> None:
|
38
|
+
"""
|
39
|
+
Adds 'file' argument to the provided `parser` with the provided `help_text`.
|
40
|
+
If a default is not specified, the argument is required (optional otherwise)
|
41
|
+
|
42
|
+
Args:
|
43
|
+
parser: to add the argument to
|
44
|
+
default: optional, if it is a valid Path, it is added as default and the argument becomes optional
|
45
|
+
help_text: to be displayed
|
46
|
+
"""
|
47
|
+
if default is not None and isinstance(default, (Path, str)):
|
48
|
+
parser.add_argument("-f", "--file", type=Path, required=False, default=default, help=help_text)
|
49
|
+
else:
|
50
|
+
parser.add_argument("-f", "--file", type=Path, required=True, help=help_text)
|
51
|
+
|
52
|
+
|
53
|
+
def add_select_agents_argument(parser: ArgumentParser, default: List[str]) -> None:
|
54
|
+
"""Adds optional repeatable string argument 'agent' to given `parser`"""
|
55
|
+
help_text = "Provide list of agents to extract (default=None)"
|
56
|
+
parser.add_argument("-a", "--agents", nargs="*", type=str, default=default, help=help_text)
|
57
|
+
|
58
|
+
|
59
|
+
def add_logfile_argument(parser: ArgumentParser, default: Path) -> None:
|
60
|
+
"""Adds optional argument 'logfile' to given `parser`"""
|
61
|
+
help_text = "provide logging file (default=None)"
|
62
|
+
parser.add_argument("-lf", "--logfile", type=Path, default=default, help=help_text)
|
63
|
+
|
64
|
+
|
65
|
+
def add_output_argument(parser: ArgumentParser, default_value, help_text: str) -> None:
|
66
|
+
"""Adds optional argument 'output' to given `parser` using the given `help_text` and `default_value`"""
|
67
|
+
parser.add_argument("-o", "--output", type=Path, default=default_value, help=help_text)
|
68
|
+
|
69
|
+
|
70
|
+
def add_log_level_argument(parser: ArgumentParser, default_value: str) -> None:
|
71
|
+
"""Adds optional argument 'log' to given `parser`"""
|
72
|
+
help_text = "choose logging level (default: {})".format(default_value)
|
73
|
+
parser.add_argument(
|
74
|
+
"-l",
|
75
|
+
"--log",
|
76
|
+
default=default_value,
|
77
|
+
choices=list(LOG_LEVELS.keys()),
|
78
|
+
type=str.lower,
|
79
|
+
help=help_text,
|
80
|
+
)
|
81
|
+
|
82
|
+
|
83
|
+
def add_single_export_argument(parser: ArgumentParser, default_value: bool) -> None:
|
84
|
+
"""Adds optional repeatable string argument 'agent' to given `parser`"""
|
85
|
+
help_text = "Enable export of single agents (default=False)"
|
86
|
+
parser.add_argument(
|
87
|
+
"-se",
|
88
|
+
"--single-export",
|
89
|
+
default=default_value,
|
90
|
+
action="store_true",
|
91
|
+
help=help_text,
|
92
|
+
)
|
93
|
+
|
94
|
+
|
95
|
+
def add_memory_saving_argument(parser: ArgumentParser, default_value: bool) -> None:
|
96
|
+
"""Adds optional bool argument to given `parser` to enable memory saving mode"""
|
97
|
+
help_text = "Reduces memory usage profile at the cost of runtime (default=False)"
|
98
|
+
parser.add_argument(
|
99
|
+
"-m",
|
100
|
+
"--memory-saving",
|
101
|
+
default=default_value,
|
102
|
+
action="store_true",
|
103
|
+
help=help_text,
|
104
|
+
)
|
105
|
+
|
106
|
+
|
107
|
+
def add_resolve_complex_argument(parser: ArgumentParser, default_value: Union[ResolveOptions, str]):
|
108
|
+
"""Instructs given `parser` how to deal with complex field outputs"""
|
109
|
+
default_value = default_value if isinstance(default_value, ResolveOptions) else ResolveOptions[default_value]
|
110
|
+
help_text = f"How to deal with complex index columns? (default={default_value})"
|
111
|
+
parser.add_argument(
|
112
|
+
"-cc",
|
113
|
+
"--complex-column",
|
114
|
+
type=ResolveOptions.instantiate,
|
115
|
+
default=default_value,
|
116
|
+
choices=ResolveOptions,
|
117
|
+
help=help_text,
|
118
|
+
)
|
119
|
+
|
120
|
+
|
121
|
+
def add_time_argument(parser: ArgumentParser, default_value: Union[TimeOptions, str]) -> None:
|
122
|
+
"""Adds optional argument to given `parser` to define conversion of TimeSteps"""
|
123
|
+
default_value = default_value if isinstance(default_value, TimeOptions) else TimeOptions[default_value]
|
124
|
+
help_text = "Apply conversion of time steps to given format (default=UTC)"
|
125
|
+
parser.add_argument(
|
126
|
+
"-t",
|
127
|
+
"--time",
|
128
|
+
type=TimeOptions.instantiate,
|
129
|
+
default=default_value,
|
130
|
+
choices=TimeOptions,
|
131
|
+
help=help_text,
|
132
|
+
)
|
133
|
+
|
134
|
+
|
135
|
+
def add_merge_time_parser(parser: ArgumentParser) -> None:
|
136
|
+
"""Adds subparser for merging of TimeSteps to given `parser`"""
|
137
|
+
subparser = parser.add_subparsers(dest="time_merging", required=False, help="Optional merging of TimeSteps")
|
138
|
+
group_parser = subparser.add_parser("merge-times")
|
139
|
+
add_focal_point_argument(group_parser)
|
140
|
+
add_steps_before_argument(group_parser)
|
141
|
+
add_steps_after_argument(group_parser)
|
142
|
+
|
143
|
+
|
144
|
+
def add_focal_point_argument(parser: ArgumentParser) -> None:
|
145
|
+
"""Adds `focal-point` argument to given `parser`"""
|
146
|
+
help_text = "TimeStep on which `steps_before` earlier and `steps_after` later TimeSteps are merged on"
|
147
|
+
parser.add_argument("-fp", "--focal-point", required=True, type=int, help=help_text)
|
148
|
+
|
149
|
+
|
150
|
+
def add_steps_before_argument(parser: ArgumentParser) -> None:
|
151
|
+
"""Adds `steps-before` argument to given `parser`"""
|
152
|
+
help_text = "Range of TimeSteps before the `focal-point` they get merged to"
|
153
|
+
parser.add_argument("-sb", "--steps-before", required=True, type=_non_negative_int, help=help_text)
|
154
|
+
|
155
|
+
|
156
|
+
def _non_negative_int(value: Any) -> int:
|
157
|
+
"""
|
158
|
+
Casts a given ´value` to int and checks it for non-negativity
|
159
|
+
|
160
|
+
Args:
|
161
|
+
value: to check and parse
|
162
|
+
|
163
|
+
Returns:
|
164
|
+
`value` parsed to int if it is a non-negative integer
|
165
|
+
|
166
|
+
Raises:
|
167
|
+
TypeError: if `value` is None
|
168
|
+
ValueError: if `value` cannot be parsed to int
|
169
|
+
argparse.ArgumentTypeError: if `value` is a negative int
|
170
|
+
|
171
|
+
"""
|
172
|
+
value = int(value)
|
173
|
+
if value < 0:
|
174
|
+
raise ArgumentTypeError(_ERR_NEGATIVE_INT.format(value))
|
175
|
+
return value
|
176
|
+
|
177
|
+
|
178
|
+
def add_steps_after_argument(parser: ArgumentParser) -> None:
|
179
|
+
"""Adds `steps-after` argument to given `parser`"""
|
180
|
+
help_text = "Range of TimeSteps after the `focal-point` they get merged to"
|
181
|
+
parser.add_argument("-sa", "--steps-after", required=True, type=_non_negative_int, help=help_text)
|
182
|
+
|
183
|
+
|
184
|
+
def add_inputs_recovery_argument(parser: ArgumentParser, default: bool) -> None:
|
185
|
+
"""Adds optional bool argument to given `parser` to recover inputs"""
|
186
|
+
help_text = "If --(no-)input-recovery is specified, (no) inputs will be recovered"
|
187
|
+
parser.add_argument(
|
188
|
+
"--input-recovery",
|
189
|
+
action=BooleanOptionalAction,
|
190
|
+
default=default,
|
191
|
+
help=help_text,
|
192
|
+
)
|
193
|
+
|
194
|
+
|
195
|
+
def update_default_config(config: Optional[dict], default: dict) -> dict:
|
196
|
+
"""Returns `default` config with updated fields received from `config`"""
|
197
|
+
result = copy.deepcopy(default)
|
198
|
+
if config:
|
199
|
+
for name, option in config.items():
|
200
|
+
result[name] = option
|
201
|
+
return result
|
202
|
+
|
203
|
+
|
204
|
+
def map_namespace_to_options_dict(parsed: Namespace) -> Dict[Options, Any]:
|
205
|
+
"""
|
206
|
+
Maps given parsing results to their corresponding configuration option
|
207
|
+
|
208
|
+
Args:
|
209
|
+
parsed: result of a parsing
|
210
|
+
|
211
|
+
Returns:
|
212
|
+
Map of each parsed argument to their configuration option
|
213
|
+
"""
|
214
|
+
return _map_namespace_to_options(parsed, _OPTION_ARGUMENT_NAME)
|
215
|
+
|
216
|
+
|
217
|
+
def _map_namespace_to_options(parsed: Namespace, names_to_options: Dict[str, Enum]) -> Dict[Options, Any]:
|
218
|
+
"""
|
219
|
+
Maps given parsing results to their corresponding configuration option; elements that cannot be mapped are ignored.
|
220
|
+
If a configuration option has inner elements, these well be also read and added as inner dictionary.
|
221
|
+
|
222
|
+
Args:
|
223
|
+
parsed: result of a parsing
|
224
|
+
names_to_options: dict to search for configuration option specifications
|
225
|
+
|
226
|
+
Returns:
|
227
|
+
Map parsed arguments to their configuration option if they exist in the given `names_to_options` dict
|
228
|
+
"""
|
229
|
+
config = {}
|
230
|
+
for name, value in vars(parsed).items():
|
231
|
+
option = names_to_options.get(name, None)
|
232
|
+
if option:
|
233
|
+
if isinstance(option, dict):
|
234
|
+
inner_element_map = option["inner_elements"]
|
235
|
+
option = option["name"]
|
236
|
+
value = _map_namespace_to_options(parsed, inner_element_map)
|
237
|
+
config[option] = value
|
238
|
+
return config
|
fameio/source/loader.py
CHANGED
@@ -2,14 +2,13 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
import logging as log
|
6
5
|
import os
|
7
6
|
from pathlib import Path
|
8
7
|
from fnmatch import fnmatch
|
9
8
|
from typing import IO, Any, Callable
|
10
9
|
|
11
10
|
import yaml
|
12
|
-
from fameio.source.logs import log_and_raise_critical
|
11
|
+
from fameio.source.logs import log_and_raise_critical, logger
|
13
12
|
from fameio.source.path_resolver import PathResolver
|
14
13
|
|
15
14
|
DISABLING_YAML_FILE_PREFIX = "IGNORE_"
|
@@ -29,7 +28,7 @@ def read_args(loader, args):
|
|
29
28
|
file_string = None
|
30
29
|
if isinstance(args, yaml.nodes.ScalarNode):
|
31
30
|
file_string = loader.construct_scalar(args)
|
32
|
-
|
31
|
+
logger().debug("Found instance `ScalarNode` in {}".format(file_string))
|
33
32
|
elif isinstance(args, yaml.nodes.SequenceNode):
|
34
33
|
argument_list = loader.construct_sequence(args)
|
35
34
|
if len(argument_list) not in [1, 2]:
|
@@ -37,7 +36,7 @@ def read_args(loader, args):
|
|
37
36
|
elif len(argument_list) == 2:
|
38
37
|
node_string = argument_list[1]
|
39
38
|
file_string = argument_list[0]
|
40
|
-
|
39
|
+
logger().debug("Found instance `SequenceNode` in {}".format(file_string))
|
41
40
|
elif isinstance(args, yaml.nodes.MappingNode):
|
42
41
|
argument_map = loader.construct_mapping(args)
|
43
42
|
for key, value in argument_map.items():
|
@@ -56,7 +55,7 @@ def read_args(loader, args):
|
|
56
55
|
|
57
56
|
def split_nodes(node_string):
|
58
57
|
"""Returns a list of nodes created from the given `node_string`"""
|
59
|
-
|
58
|
+
logger().debug("Splitting given node_string `{}`".format(node_string))
|
60
59
|
return node_string.split(":")
|
61
60
|
|
62
61
|
|
@@ -64,7 +63,7 @@ class FameYamlLoader(yaml.SafeLoader):
|
|
64
63
|
"""Custom YAML Loader for `!include` constructor"""
|
65
64
|
|
66
65
|
def __init__(self, stream: IO, path_resolver=PathResolver()) -> None:
|
67
|
-
|
66
|
+
logger().debug("Initialize custom YAML loader")
|
68
67
|
self._path_resolver = path_resolver
|
69
68
|
try:
|
70
69
|
self._root_path = os.path.split(stream.name)[0]
|
@@ -100,12 +99,12 @@ def resolve_imported_path(loader: FameYamlLoader, included_path: str):
|
|
100
99
|
cleaned_file_list = []
|
101
100
|
for file in file_list:
|
102
101
|
if fnmatch(file, ignore_filter):
|
103
|
-
|
102
|
+
logger().debug("Ignoring file {} due to prefix {}".format(file, DISABLING_YAML_FILE_PREFIX))
|
104
103
|
else:
|
105
104
|
cleaned_file_list.append(file)
|
106
105
|
if not cleaned_file_list:
|
107
106
|
log_and_raise_critical("Failed to find any file matching the `!include` directive `{}`".format(included_path))
|
108
|
-
|
107
|
+
logger().debug("Collected file(s) `{}` from given included path `{}`".format(cleaned_file_list, included_path))
|
109
108
|
return cleaned_file_list
|
110
109
|
|
111
110
|
|
@@ -118,7 +117,7 @@ def read_data_from_file(file, node_address, path_resolver: PathResolver):
|
|
118
117
|
data = data[node]
|
119
118
|
except KeyError:
|
120
119
|
log_and_raise_critical("'!include_node [{}, {}]': Cannot find '{}'.".format(file, node_address, node))
|
121
|
-
|
120
|
+
logger().debug("Searched file `{}` for node `{}`".format(file, node_address))
|
122
121
|
return data
|
123
122
|
|
124
123
|
|
@@ -157,7 +156,7 @@ def construct_include(loader: FameYamlLoader, args: yaml.Node) -> Any:
|
|
157
156
|
with open(file_name, "r") as open_file:
|
158
157
|
data = read_data_from_file(open_file, nodes, loader.path_resolver)
|
159
158
|
joined_data = join_data(data, joined_data)
|
160
|
-
|
159
|
+
logger().debug("Joined all files `{}` to joined data `{}`".format(files, joined_data))
|
161
160
|
return joined_data
|
162
161
|
|
163
162
|
|
@@ -166,7 +165,7 @@ FameYamlLoader.add_constructor("!include", construct_include)
|
|
166
165
|
|
167
166
|
def load_yaml(yaml_file_path: Path, path_resolver=PathResolver()):
|
168
167
|
"""Loads the yaml file from given `yaml_file_path` and returns its content"""
|
169
|
-
|
168
|
+
logger().info("Loading yaml from {}".format(yaml_file_path))
|
170
169
|
with open(yaml_file_path, "r") as configfile:
|
171
170
|
data = yaml.load(configfile, make_yaml_loader_builder(path_resolver))
|
172
171
|
return data
|
fameio/source/logs.py
CHANGED
@@ -4,7 +4,7 @@
|
|
4
4
|
|
5
5
|
import logging as log
|
6
6
|
from pathlib import Path
|
7
|
-
from typing import NoReturn
|
7
|
+
from typing import NoReturn, Optional, List
|
8
8
|
|
9
9
|
LOG_LEVELS = {
|
10
10
|
"critical": log.CRITICAL,
|
@@ -15,45 +15,69 @@ LOG_LEVELS = {
|
|
15
15
|
"debug": log.DEBUG,
|
16
16
|
}
|
17
17
|
|
18
|
+
_loggers: List[log.Logger] = []
|
19
|
+
|
20
|
+
_FORMAT_NORMAL = "%(asctime)s — %(levelname)s — %(message)s" # noqa
|
21
|
+
_FORMAT_DETAILLED = "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
|
22
|
+
_TIME_FORMAT = "%H:%M:%S"
|
23
|
+
|
24
|
+
_WARN_ALREADY_INITIALIZED = "Cannot set up new FAMEIO logger: using existing logger with previous settings."
|
25
|
+
_WARN_NOT_INITIALIZED = "Logger not initialised for FAMEIO - using default root logger"
|
26
|
+
|
27
|
+
LOGGER_NAME = "fameio"
|
28
|
+
|
29
|
+
|
30
|
+
def logger() -> log.Logger:
|
31
|
+
"""Returns already set up FAME-Io's logger or - if not set up - a new logger with level `INFO`"""
|
32
|
+
if not _loggers:
|
33
|
+
set_up_logger("info")
|
34
|
+
log.warning(_WARN_NOT_INITIALIZED)
|
35
|
+
return _loggers[0]
|
36
|
+
|
18
37
|
|
19
38
|
def log_and_raise_critical(message: str) -> NoReturn:
|
20
39
|
"""Raises a critical error and logs with given `error_message`"""
|
21
|
-
|
40
|
+
logger().critical(message)
|
22
41
|
raise Exception(message)
|
23
42
|
|
24
43
|
|
25
44
|
def log_error_and_raise(exception: Exception) -> NoReturn:
|
26
45
|
"""Raises the specified `exception` and logs an error with the same `message`"""
|
27
|
-
|
46
|
+
logger().error(str(exception))
|
28
47
|
raise exception
|
29
48
|
|
30
49
|
|
31
|
-
def set_up_logger(level_name: str, file_name: Path) -> None:
|
50
|
+
def set_up_logger(level_name: str, file_name: Optional[Path] = None) -> None:
|
32
51
|
"""Uses existing logger or sets up logger"""
|
33
|
-
if not
|
34
|
-
|
52
|
+
if not _loggers:
|
53
|
+
_loggers.append(log.getLogger(LOGGER_NAME))
|
54
|
+
level = LOG_LEVELS.get(level_name.lower())
|
55
|
+
_set_log_level(level)
|
56
|
+
formatter = _get_formatter(level)
|
57
|
+
_add_handler(log.StreamHandler(), formatter)
|
58
|
+
if file_name:
|
59
|
+
_add_handler(log.FileHandler(file_name, mode="w"), formatter)
|
60
|
+
else:
|
61
|
+
log.warning(_WARN_ALREADY_INITIALIZED)
|
35
62
|
|
36
63
|
|
37
|
-
def
|
38
|
-
"""
|
39
|
-
|
40
|
-
if level is log.DEBUG:
|
41
|
-
formatter_string = (
|
42
|
-
"%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
|
43
|
-
)
|
44
|
-
else:
|
45
|
-
formatter_string = "%(asctime)s — %(levelname)s — %(message)s" # noqa
|
64
|
+
def _set_log_level(level: int) -> None:
|
65
|
+
"""Set the global log level to given `level`"""
|
66
|
+
logger().setLevel(level)
|
46
67
|
|
47
|
-
log_formatter = log.Formatter(formatter_string, "%H:%M:%S")
|
48
68
|
|
49
|
-
|
50
|
-
|
69
|
+
def _get_formatter(level: int) -> log.Formatter:
|
70
|
+
"""
|
71
|
+
Returns a log formatter depending on the given log `level`
|
72
|
+
Args:
|
73
|
+
level: this log level determines how detailed the logger's output is
|
74
|
+
Returns:
|
75
|
+
new log formatter
|
76
|
+
"""
|
77
|
+
return log.Formatter(_FORMAT_DETAILLED if level is log.DEBUG else _FORMAT_NORMAL, _TIME_FORMAT)
|
51
78
|
|
52
|
-
if file_name:
|
53
|
-
file_handler = log.FileHandler(file_name, mode="w")
|
54
|
-
file_handler.setFormatter(log_formatter)
|
55
|
-
root_logger.addHandler(file_handler)
|
56
79
|
|
57
|
-
|
58
|
-
|
59
|
-
|
80
|
+
def _add_handler(handler: log.Handler, formatter: log.Formatter) -> None:
|
81
|
+
"""Adds given `handler` to root logger using the specified `formatter`"""
|
82
|
+
handler.setFormatter(formatter)
|
83
|
+
logger().addHandler(handler)
|
@@ -4,13 +4,12 @@
|
|
4
4
|
|
5
5
|
import math
|
6
6
|
from typing import Dict, Optional
|
7
|
-
import logging as log
|
8
7
|
|
9
8
|
import pandas as pd
|
10
9
|
|
11
10
|
from fameio.source import FameTime
|
12
|
-
from fameio.source.cli import TimeOptions, MergingOptions
|
13
|
-
from fameio.source.logs import log_error_and_raise
|
11
|
+
from fameio.source.cli.options import TimeOptions, MergingOptions
|
12
|
+
from fameio.source.logs import log_error_and_raise, logger
|
14
13
|
from fameio.source.time import ConversionException
|
15
14
|
|
16
15
|
_ERR_UNIMPLEMENTED = "Time conversion mode '{}' not implemented."
|
@@ -28,7 +27,7 @@ def apply_time_merging(data: Dict[Optional[str], pd.DataFrame], config: Optional
|
|
28
27
|
Nothing - data is modified inplace
|
29
28
|
"""
|
30
29
|
if config:
|
31
|
-
|
30
|
+
logger().debug(f"Grouping TimeSteps...")
|
32
31
|
offset = config[MergingOptions.STEPS_BEFORE]
|
33
32
|
period = config[MergingOptions.STEPS_AFTER] + config[MergingOptions.STEPS_BEFORE] + 1
|
34
33
|
first_positive_focal_point = config[MergingOptions.FOCAL_POINT] % period
|
@@ -37,7 +36,7 @@ def apply_time_merging(data: Dict[Optional[str], pd.DataFrame], config: Optional
|
|
37
36
|
index_columns = df.index.names
|
38
37
|
df.reset_index(inplace=True)
|
39
38
|
df["TimeStep"] = df["TimeStep"].apply(lambda t: merge_time(t, first_positive_focal_point, offset, period))
|
40
|
-
data[key] = df.groupby(by=index_columns
|
39
|
+
data[key] = df.groupby(by=index_columns).sum()
|
41
40
|
|
42
41
|
|
43
42
|
def merge_time(time_step: int, focal_time: int, offset: int, period: int) -> int:
|
@@ -56,26 +55,25 @@ def merge_time(time_step: int, focal_time: int, offset: int, period: int) -> int
|
|
56
55
|
return math.floor((time_step + offset - focal_time) / period) * period + focal_time
|
57
56
|
|
58
57
|
|
59
|
-
def apply_time_option(data: Dict[Optional[str], pd.DataFrame],
|
58
|
+
def apply_time_option(data: Dict[Optional[str], pd.DataFrame], mode: TimeOptions) -> None:
|
60
59
|
"""
|
61
60
|
Applies time option based on given `mode` inplace of given `data`
|
62
61
|
|
63
62
|
Args:
|
64
63
|
data: one or multiple DataFrames of time series; column `TimeStep` might be modified (depending on mode)
|
65
|
-
|
64
|
+
mode: name of time conversion mode (derived from Enum)
|
66
65
|
|
67
66
|
Returns:
|
68
67
|
Nothing - data is modified inplace
|
69
68
|
"""
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
elif mode_name == TimeOptions.UTC.name:
|
69
|
+
if mode == TimeOptions.INT:
|
70
|
+
logger().debug("No time conversion...")
|
71
|
+
elif mode == TimeOptions.UTC:
|
74
72
|
_convert_time_index(data, "%Y-%m-%d %H:%M:%S")
|
75
|
-
elif
|
73
|
+
elif mode == TimeOptions.FAME:
|
76
74
|
_convert_time_index(data, "%Y-%m-%d_%H:%M:%S")
|
77
75
|
else:
|
78
|
-
log_error_and_raise(ConversionException(_ERR_UNIMPLEMENTED.format(
|
76
|
+
log_error_and_raise(ConversionException(_ERR_UNIMPLEMENTED.format(mode)))
|
79
77
|
|
80
78
|
|
81
79
|
def _convert_time_index(data: Dict[Optional[str], pd.DataFrame], datetime_format: str) -> None:
|
@@ -90,7 +88,7 @@ def _convert_time_index(data: Dict[Optional[str], pd.DataFrame], datetime_format
|
|
90
88
|
Returns:
|
91
89
|
Nothing - data is modified inplace
|
92
90
|
"""
|
93
|
-
|
91
|
+
logger().debug(f"Converting TimeStep to format '{datetime_format}'...")
|
94
92
|
for _, df in data.items():
|
95
93
|
index_columns = df.index.names
|
96
94
|
df.reset_index(inplace=True)
|
@@ -1,14 +1,16 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
import logging as log
|
6
5
|
from pathlib import Path
|
7
6
|
from typing import Dict, Union
|
8
7
|
|
9
8
|
import pandas as pd
|
10
9
|
|
10
|
+
from fameio.source.logs import logger
|
11
11
|
from fameio.source.results.data_transformer import INDEX
|
12
|
+
from fameio.source.series import TimeSeriesManager
|
13
|
+
from fameio.source.tools import ensure_path_exists
|
12
14
|
|
13
15
|
|
14
16
|
class CsvWriter:
|
@@ -28,15 +30,15 @@ class CsvWriter:
|
|
28
30
|
"""Returns name of the output folder derived either from the specified `config_output` or `input_file_path`"""
|
29
31
|
if config_output:
|
30
32
|
output_folder_name = config_output
|
31
|
-
|
33
|
+
logger().info(CsvWriter._INFO_USING_PATH.format(config_output))
|
32
34
|
else:
|
33
35
|
output_folder_name = input_file_path.stem
|
34
|
-
|
36
|
+
logger().info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
|
35
37
|
return Path(output_folder_name)
|
36
38
|
|
37
39
|
def _create_output_folder(self) -> None:
|
38
40
|
"""Creates output folder if not yet present"""
|
39
|
-
|
41
|
+
logger().debug("Creating output folder if required...")
|
40
42
|
if not self._output_folder.is_dir():
|
41
43
|
self._output_folder.mkdir(parents=True)
|
42
44
|
|
@@ -52,6 +54,15 @@ class CsvWriter:
|
|
52
54
|
identifier = self._get_identifier(agent_name, column_name)
|
53
55
|
self._write_data_frame(column_data, identifier)
|
54
56
|
|
57
|
+
def write_time_series_to_disk(self, timeseries_manager: TimeSeriesManager) -> None:
|
58
|
+
"""Writes time_series of given `timeseries_manager` to disk"""
|
59
|
+
for _, name, data in timeseries_manager.get_all_series():
|
60
|
+
if data is not None:
|
61
|
+
target_path = Path(self._output_folder, name)
|
62
|
+
ensure_path_exists(target_path.parent)
|
63
|
+
# noinspection PyTypeChecker
|
64
|
+
data.to_csv(path_or_buf=target_path, sep=";", header=None, index=None)
|
65
|
+
|
55
66
|
@staticmethod
|
56
67
|
def _get_identifier(agent_name: str, column_name: str, agent_id: str = None) -> str:
|
57
68
|
"""Returns unique identifier for given `agent_name` and (optional) `agent_id` and `column_name`"""
|
@@ -1,6 +1,7 @@
|
|
1
1
|
# SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from __future__ import annotations
|
4
5
|
|
5
6
|
from abc import ABC
|
6
7
|
from builtins import staticmethod
|
@@ -10,7 +11,7 @@ import pandas as pd
|
|
10
11
|
from fameprotobuf.Services_pb2 import Output
|
11
12
|
from pandas import DataFrame
|
12
13
|
|
13
|
-
from fameio.source.cli import ResolveOptions
|
14
|
+
from fameio.source.cli.options import ResolveOptions
|
14
15
|
from fameio.source.results.agent_type import AgentType
|
15
16
|
|
16
17
|
INDEX = ("AgentId", "TimeStep")
|
@@ -27,7 +28,7 @@ class DataTransformer(ABC):
|
|
27
28
|
SIMPLE_COLUMN_INDEX = -1
|
28
29
|
|
29
30
|
@staticmethod
|
30
|
-
def build(complex_column_mode: ResolveOptions) ->
|
31
|
+
def build(complex_column_mode: ResolveOptions) -> DataTransformer:
|
31
32
|
return DataTransformer.MODES[complex_column_mode]()
|
32
33
|
|
33
34
|
def extract_agent_data(
|