fameio 1.8.2__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. CHANGELOG.md +224 -0
  2. fameio/scripts/__init__.py +8 -6
  3. fameio/scripts/__init__.py.license +3 -0
  4. fameio/scripts/convert_results.py +31 -35
  5. fameio/scripts/convert_results.py.license +3 -0
  6. fameio/scripts/make_config.py +14 -17
  7. fameio/scripts/make_config.py.license +3 -0
  8. fameio/source/cli/__init__.py +3 -0
  9. fameio/source/cli/convert_results.py +84 -0
  10. fameio/source/cli/make_config.py +62 -0
  11. fameio/source/cli/options.py +58 -0
  12. fameio/source/cli/parser.py +238 -0
  13. fameio/source/loader.py +10 -11
  14. fameio/source/logs.py +90 -35
  15. fameio/source/results/conversion.py +11 -13
  16. fameio/source/results/csv_writer.py +16 -5
  17. fameio/source/results/data_transformer.py +6 -22
  18. fameio/source/results/input_dao.py +163 -0
  19. fameio/source/results/reader.py +25 -14
  20. fameio/source/results/yaml_writer.py +28 -0
  21. fameio/source/scenario/agent.py +56 -39
  22. fameio/source/scenario/attribute.py +9 -12
  23. fameio/source/scenario/contract.py +55 -40
  24. fameio/source/scenario/exception.py +11 -9
  25. fameio/source/scenario/generalproperties.py +11 -17
  26. fameio/source/scenario/scenario.py +19 -14
  27. fameio/source/schema/agenttype.py +75 -27
  28. fameio/source/schema/attribute.py +8 -7
  29. fameio/source/schema/java_packages.py +69 -0
  30. fameio/source/schema/schema.py +44 -15
  31. fameio/source/series.py +148 -25
  32. fameio/source/time.py +8 -8
  33. fameio/source/tools.py +13 -2
  34. fameio/source/validator.py +138 -58
  35. fameio/source/writer.py +120 -113
  36. fameio-2.1.0.dist-info/LICENSES/Apache-2.0.txt +178 -0
  37. fameio-2.1.0.dist-info/LICENSES/CC-BY-4.0.txt +395 -0
  38. fameio-2.1.0.dist-info/LICENSES/CC0-1.0.txt +121 -0
  39. {fameio-1.8.2.dist-info → fameio-2.1.0.dist-info}/METADATA +706 -660
  40. fameio-2.1.0.dist-info/RECORD +53 -0
  41. {fameio-1.8.2.dist-info → fameio-2.1.0.dist-info}/WHEEL +1 -2
  42. fameio-2.1.0.dist-info/entry_points.txt +4 -0
  43. fameio/source/cli.py +0 -253
  44. fameio-1.8.2.dist-info/RECORD +0 -40
  45. fameio-1.8.2.dist-info/entry_points.txt +0 -3
  46. fameio-1.8.2.dist-info/top_level.txt +0 -1
  47. {fameio-1.8.2.dist-info → fameio-2.1.0.dist-info}/LICENSE.txt +0 -0
@@ -0,0 +1,238 @@
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ import copy
5
+ from argparse import ArgumentParser, ArgumentTypeError, BooleanOptionalAction, Namespace
6
+ from enum import Enum
7
+ from pathlib import Path
8
+ from typing import Optional, Dict, Any, List, Union
9
+
10
+ from fameio.source.cli.options import MergingOptions, TimeOptions, ResolveOptions, Options
11
+ from fameio.source.logs import LogLevel
12
+
13
+ _ERR_NEGATIVE_INT = "Given value `{}` is not a non-negative int."
14
+
15
+ _OPTION_ARGUMENT_NAME: Dict[str, Union[Options, Dict]] = {
16
+ "file": Options.FILE,
17
+ "log": Options.LOG_LEVEL,
18
+ "logfile": Options.LOG_FILE,
19
+ "output": Options.OUTPUT,
20
+ "agents": Options.AGENT_LIST,
21
+ "single_export": Options.SINGLE_AGENT_EXPORT,
22
+ "memory_saving": Options.MEMORY_SAVING,
23
+ "time": Options.TIME,
24
+ "input_recovery": Options.INPUT_RECOVERY,
25
+ "complex_column": Options.RESOLVE_COMPLEX_FIELD,
26
+ "time_merging": {
27
+ "name": Options.TIME_MERGING,
28
+ "inner_elements": {
29
+ "focal_point": MergingOptions.FOCAL_POINT,
30
+ "steps_before": MergingOptions.STEPS_BEFORE,
31
+ "steps_after": MergingOptions.STEPS_AFTER,
32
+ },
33
+ },
34
+ }
35
+
36
+
37
+ def add_file_argument(parser: ArgumentParser, default: Optional[Path], help_text: str) -> None:
38
+ """
39
+ Adds 'file' argument to the provided `parser` with the provided `help_text`.
40
+ If a default is not specified, the argument is required (optional otherwise)
41
+
42
+ Args:
43
+ parser: to add the argument to
44
+ default: optional, if it is a valid Path, it is added as default and the argument becomes optional
45
+ help_text: to be displayed
46
+ """
47
+ if default is not None and isinstance(default, (Path, str)):
48
+ parser.add_argument("-f", "--file", type=Path, required=False, default=default, help=help_text)
49
+ else:
50
+ parser.add_argument("-f", "--file", type=Path, required=True, help=help_text)
51
+
52
+
53
+ def add_select_agents_argument(parser: ArgumentParser, default: List[str]) -> None:
54
+ """Adds optional repeatable string argument 'agent' to given `parser`"""
55
+ help_text = "Provide list of agents to extract (default=None)"
56
+ parser.add_argument("-a", "--agents", nargs="*", type=str, default=default, help=help_text)
57
+
58
+
59
+ def add_logfile_argument(parser: ArgumentParser, default: Path) -> None:
60
+ """Adds optional argument 'logfile' to given `parser`"""
61
+ help_text = "provide logging file (default=None)"
62
+ parser.add_argument("-lf", "--logfile", type=Path, default=default, help=help_text)
63
+
64
+
65
+ def add_output_argument(parser: ArgumentParser, default_value, help_text: str) -> None:
66
+ """Adds optional argument 'output' to given `parser` using the given `help_text` and `default_value`"""
67
+ parser.add_argument("-o", "--output", type=Path, default=default_value, help=help_text)
68
+
69
+
70
+ def add_log_level_argument(parser: ArgumentParser, default_value: str) -> None:
71
+ """Adds optional argument 'log' to given `parser`"""
72
+ help_text = "choose logging level (default: {})".format(default_value)
73
+ parser.add_argument(
74
+ "-l",
75
+ "--log",
76
+ default=default_value,
77
+ choices=[level.name for level in LogLevel if level not in [LogLevel.PRINT, LogLevel.WARN]],
78
+ type=str.upper,
79
+ help=help_text,
80
+ )
81
+
82
+
83
+ def add_single_export_argument(parser: ArgumentParser, default_value: bool) -> None:
84
+ """Adds optional repeatable string argument 'agent' to given `parser`"""
85
+ help_text = "Enable export of single agents (default=False)"
86
+ parser.add_argument(
87
+ "-se",
88
+ "--single-export",
89
+ default=default_value,
90
+ action="store_true",
91
+ help=help_text,
92
+ )
93
+
94
+
95
+ def add_memory_saving_argument(parser: ArgumentParser, default_value: bool) -> None:
96
+ """Adds optional bool argument to given `parser` to enable memory saving mode"""
97
+ help_text = "Reduces memory usage profile at the cost of runtime (default=False)"
98
+ parser.add_argument(
99
+ "-m",
100
+ "--memory-saving",
101
+ default=default_value,
102
+ action="store_true",
103
+ help=help_text,
104
+ )
105
+
106
+
107
+ def add_resolve_complex_argument(parser: ArgumentParser, default_value: Union[ResolveOptions, str]):
108
+ """Instructs given `parser` how to deal with complex field outputs"""
109
+ default_value = default_value if isinstance(default_value, ResolveOptions) else ResolveOptions[default_value]
110
+ help_text = f"How to deal with complex index columns? (default={default_value})"
111
+ parser.add_argument(
112
+ "-cc",
113
+ "--complex-column",
114
+ type=ResolveOptions.instantiate,
115
+ default=default_value,
116
+ choices=ResolveOptions,
117
+ help=help_text,
118
+ )
119
+
120
+
121
+ def add_time_argument(parser: ArgumentParser, default_value: Union[TimeOptions, str]) -> None:
122
+ """Adds optional argument to given `parser` to define conversion of TimeSteps"""
123
+ default_value = default_value if isinstance(default_value, TimeOptions) else TimeOptions[default_value]
124
+ help_text = "Apply conversion of time steps to given format (default=UTC)"
125
+ parser.add_argument(
126
+ "-t",
127
+ "--time",
128
+ type=TimeOptions.instantiate,
129
+ default=default_value,
130
+ choices=TimeOptions,
131
+ help=help_text,
132
+ )
133
+
134
+
135
+ def add_merge_time_parser(parser: ArgumentParser) -> None:
136
+ """Adds subparser for merging of TimeSteps to given `parser`"""
137
+ subparser = parser.add_subparsers(dest="time_merging", required=False, help="Optional merging of TimeSteps")
138
+ group_parser = subparser.add_parser("merge-times")
139
+ add_focal_point_argument(group_parser)
140
+ add_steps_before_argument(group_parser)
141
+ add_steps_after_argument(group_parser)
142
+
143
+
144
+ def add_focal_point_argument(parser: ArgumentParser) -> None:
145
+ """Adds `focal-point` argument to given `parser`"""
146
+ help_text = "TimeStep on which `steps_before` earlier and `steps_after` later TimeSteps are merged on"
147
+ parser.add_argument("-fp", "--focal-point", required=True, type=int, help=help_text)
148
+
149
+
150
+ def add_steps_before_argument(parser: ArgumentParser) -> None:
151
+ """Adds `steps-before` argument to given `parser`"""
152
+ help_text = "Range of TimeSteps before the `focal-point` they get merged to"
153
+ parser.add_argument("-sb", "--steps-before", required=True, type=_non_negative_int, help=help_text)
154
+
155
+
156
+ def _non_negative_int(value: Any) -> int:
157
+ """
158
+ Casts a given ´value` to int and checks it for non-negativity
159
+
160
+ Args:
161
+ value: to check and parse
162
+
163
+ Returns:
164
+ `value` parsed to int if it is a non-negative integer
165
+
166
+ Raises:
167
+ TypeError: if `value` is None
168
+ ValueError: if `value` cannot be parsed to int
169
+ argparse.ArgumentTypeError: if `value` is a negative int
170
+
171
+ """
172
+ value = int(value)
173
+ if value < 0:
174
+ raise ArgumentTypeError(_ERR_NEGATIVE_INT.format(value))
175
+ return value
176
+
177
+
178
+ def add_steps_after_argument(parser: ArgumentParser) -> None:
179
+ """Adds `steps-after` argument to given `parser`"""
180
+ help_text = "Range of TimeSteps after the `focal-point` they get merged to"
181
+ parser.add_argument("-sa", "--steps-after", required=True, type=_non_negative_int, help=help_text)
182
+
183
+
184
+ def add_inputs_recovery_argument(parser: ArgumentParser, default: bool) -> None:
185
+ """Adds optional bool argument to given `parser` to recover inputs"""
186
+ help_text = "If --(no-)input-recovery is specified, (no) inputs will be recovered"
187
+ parser.add_argument(
188
+ "--input-recovery",
189
+ action=BooleanOptionalAction,
190
+ default=default,
191
+ help=help_text,
192
+ )
193
+
194
+
195
+ def update_default_config(config: Optional[dict], default: dict) -> dict:
196
+ """Returns `default` config with updated fields received from `config`"""
197
+ result = copy.deepcopy(default)
198
+ if config:
199
+ for name, option in config.items():
200
+ result[name] = option
201
+ return result
202
+
203
+
204
+ def map_namespace_to_options_dict(parsed: Namespace) -> Dict[Options, Any]:
205
+ """
206
+ Maps given parsing results to their corresponding configuration option
207
+
208
+ Args:
209
+ parsed: result of a parsing
210
+
211
+ Returns:
212
+ Map of each parsed argument to their configuration option
213
+ """
214
+ return _map_namespace_to_options(parsed, _OPTION_ARGUMENT_NAME)
215
+
216
+
217
+ def _map_namespace_to_options(parsed: Namespace, names_to_options: Dict[str, Enum]) -> Dict[Options, Any]:
218
+ """
219
+ Maps given parsing results to their corresponding configuration option; elements that cannot be mapped are ignored.
220
+ If a configuration option has inner elements, these well be also read and added as inner dictionary.
221
+
222
+ Args:
223
+ parsed: result of a parsing
224
+ names_to_options: dict to search for configuration option specifications
225
+
226
+ Returns:
227
+ Map parsed arguments to their configuration option if they exist in the given `names_to_options` dict
228
+ """
229
+ config = {}
230
+ for name, value in vars(parsed).items():
231
+ option = names_to_options.get(name, None)
232
+ if option:
233
+ if isinstance(option, dict):
234
+ inner_element_map = option["inner_elements"]
235
+ option = option["name"]
236
+ value = _map_namespace_to_options(parsed, inner_element_map)
237
+ config[option] = value
238
+ return config
fameio/source/loader.py CHANGED
@@ -2,14 +2,13 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- import logging as log
6
5
  import os
7
6
  from pathlib import Path
8
7
  from fnmatch import fnmatch
9
8
  from typing import IO, Any, Callable
10
9
 
11
10
  import yaml
12
- from fameio.source.logs import log_and_raise_critical
11
+ from fameio.source.logs import log_and_raise_critical, log
13
12
  from fameio.source.path_resolver import PathResolver
14
13
 
15
14
  DISABLING_YAML_FILE_PREFIX = "IGNORE_"
@@ -29,7 +28,7 @@ def read_args(loader, args):
29
28
  file_string = None
30
29
  if isinstance(args, yaml.nodes.ScalarNode):
31
30
  file_string = loader.construct_scalar(args)
32
- log.debug("Found instance `ScalarNode` in {}".format(file_string))
31
+ log().debug("Found instance `ScalarNode` in {}".format(file_string))
33
32
  elif isinstance(args, yaml.nodes.SequenceNode):
34
33
  argument_list = loader.construct_sequence(args)
35
34
  if len(argument_list) not in [1, 2]:
@@ -37,7 +36,7 @@ def read_args(loader, args):
37
36
  elif len(argument_list) == 2:
38
37
  node_string = argument_list[1]
39
38
  file_string = argument_list[0]
40
- log.debug("Found instance `SequenceNode` in {}".format(file_string))
39
+ log().debug("Found instance `SequenceNode` in {}".format(file_string))
41
40
  elif isinstance(args, yaml.nodes.MappingNode):
42
41
  argument_map = loader.construct_mapping(args)
43
42
  for key, value in argument_map.items():
@@ -56,7 +55,7 @@ def read_args(loader, args):
56
55
 
57
56
  def split_nodes(node_string):
58
57
  """Returns a list of nodes created from the given `node_string`"""
59
- log.debug("Splitting given node_string `{}`".format(node_string))
58
+ log().debug("Splitting given node_string `{}`".format(node_string))
60
59
  return node_string.split(":")
61
60
 
62
61
 
@@ -64,7 +63,7 @@ class FameYamlLoader(yaml.SafeLoader):
64
63
  """Custom YAML Loader for `!include` constructor"""
65
64
 
66
65
  def __init__(self, stream: IO, path_resolver=PathResolver()) -> None:
67
- log.debug("Initialize custom YAML loader")
66
+ log().debug("Initialize custom YAML loader")
68
67
  self._path_resolver = path_resolver
69
68
  try:
70
69
  self._root_path = os.path.split(stream.name)[0]
@@ -100,12 +99,12 @@ def resolve_imported_path(loader: FameYamlLoader, included_path: str):
100
99
  cleaned_file_list = []
101
100
  for file in file_list:
102
101
  if fnmatch(file, ignore_filter):
103
- log.debug("Ignoring file {} due to prefix {}".format(file, DISABLING_YAML_FILE_PREFIX))
102
+ log().debug("Ignoring file {} due to prefix {}".format(file, DISABLING_YAML_FILE_PREFIX))
104
103
  else:
105
104
  cleaned_file_list.append(file)
106
105
  if not cleaned_file_list:
107
106
  log_and_raise_critical("Failed to find any file matching the `!include` directive `{}`".format(included_path))
108
- log.debug("Collected file(s) `{}` from given included path `{}`".format(cleaned_file_list, included_path))
107
+ log().debug("Collected file(s) `{}` from given included path `{}`".format(cleaned_file_list, included_path))
109
108
  return cleaned_file_list
110
109
 
111
110
 
@@ -118,7 +117,7 @@ def read_data_from_file(file, node_address, path_resolver: PathResolver):
118
117
  data = data[node]
119
118
  except KeyError:
120
119
  log_and_raise_critical("'!include_node [{}, {}]': Cannot find '{}'.".format(file, node_address, node))
121
- log.debug("Searched file `{}` for node `{}`".format(file, node_address))
120
+ log().debug("Searched file `{}` for node `{}`".format(file, node_address))
122
121
  return data
123
122
 
124
123
 
@@ -157,7 +156,7 @@ def construct_include(loader: FameYamlLoader, args: yaml.Node) -> Any:
157
156
  with open(file_name, "r") as open_file:
158
157
  data = read_data_from_file(open_file, nodes, loader.path_resolver)
159
158
  joined_data = join_data(data, joined_data)
160
- log.debug("Joined all files `{}` to joined data `{}`".format(files, joined_data))
159
+ log().debug("Joined all files `{}` to joined data `{}`".format(files, joined_data))
161
160
  return joined_data
162
161
 
163
162
 
@@ -166,7 +165,7 @@ FameYamlLoader.add_constructor("!include", construct_include)
166
165
 
167
166
  def load_yaml(yaml_file_path: Path, path_resolver=PathResolver()):
168
167
  """Loads the yaml file from given `yaml_file_path` and returns its content"""
169
- log.info("Loading yaml from {}".format(yaml_file_path))
168
+ log().info("Loading yaml from {}".format(yaml_file_path))
170
169
  with open(yaml_file_path, "r") as configfile:
171
170
  data = yaml.load(configfile, make_yaml_loader_builder(path_resolver))
172
171
  return data
fameio/source/logs.py CHANGED
@@ -1,59 +1,114 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- import logging as log
5
+ import logging as pylog
6
+ from enum import Enum
6
7
  from pathlib import Path
7
- from typing import NoReturn
8
+ from typing import NoReturn, Optional, List
8
9
 
9
- LOG_LEVELS = {
10
- "critical": log.CRITICAL,
11
- "error": log.ERROR,
12
- "warn": log.WARNING,
13
- "warning": log.WARNING,
14
- "info": log.INFO,
15
- "debug": log.DEBUG,
16
- }
10
+
11
+ class LogLevel(Enum):
12
+ """Levels for Logging"""
13
+
14
+ PRINT = 100
15
+ CRITICAL = pylog.CRITICAL
16
+ ERROR = pylog.ERROR
17
+ WARN = pylog.WARNING
18
+ WARNING = pylog.WARNING
19
+ INFO = pylog.INFO
20
+ DEBUG = pylog.DEBUG
21
+
22
+
23
+ _loggers: List[pylog.Logger] = []
24
+ _handlers: List[pylog.Handler] = []
25
+
26
+ _FORMAT_NORMAL = "%(asctime)s — %(levelname)s — %(message)s" # noqa
27
+ _FORMAT_DETAILLED = "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
28
+ _TIME_FORMAT = "%H:%M:%S"
29
+
30
+ _INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: {}"
31
+ _WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log level `WARNING`"
32
+
33
+ LOGGER_NAME = "fameio"
34
+ DEFAULT_LOG_LEVEL = LogLevel.WARNING
35
+
36
+
37
+ def log() -> pylog.Logger:
38
+ """Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`"""
39
+ if not _loggers:
40
+ fameio_logger(DEFAULT_LOG_LEVEL.name)
41
+ pylog.warning(_WARN_NOT_INITIALIZED)
42
+ return _loggers[0]
17
43
 
18
44
 
19
45
  def log_and_raise_critical(message: str) -> NoReturn:
20
46
  """Raises a critical error and logs with given `error_message`"""
21
- log.critical(message)
47
+ log().critical(message)
22
48
  raise Exception(message)
23
49
 
24
50
 
25
51
  def log_error_and_raise(exception: Exception) -> NoReturn:
26
52
  """Raises the specified `exception` and logs an error with the same `message`"""
27
- log.error(str(exception))
53
+ log().error(str(exception))
28
54
  raise exception
29
55
 
30
56
 
31
- def set_up_logger(level_name: str, file_name: Path) -> None:
32
- """Uses existing logger or sets up logger"""
33
- if not log.getLogger().hasHandlers():
34
- _set_up_new_logger(level_name, file_name)
57
+ def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
58
+ """
59
+ Ensures a logger for fameio is present and uses the specified options
60
+
61
+ Args:
62
+ log_level_name: one of Python's official logging level names, e.g. "INFO"
63
+ file_name: if present, logs are also written to the specified file path
64
+ """
65
+ log_level = LogLevel[log_level_name.upper()]
66
+ logger = _get_logger(log_level)
35
67
 
68
+ formatter = _get_formatter(log_level)
69
+ _add_handler(logger, pylog.StreamHandler(), formatter)
70
+ if file_name:
71
+ _add_handler(logger, pylog.FileHandler(file_name, mode="w"), formatter)
36
72
 
37
- def _set_up_new_logger(level_name: str, file_name: Path) -> None:
38
- """Sets up new logger which always writes to the console and if provided also to `file_name`"""
39
- level = LOG_LEVELS.get(level_name.lower())
40
- if level is log.DEBUG:
41
- formatter_string = (
42
- "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
43
- )
73
+ if _loggers:
74
+ pylog.info(_INFO_UPDATING_LOG_LEVEL.format(log_level_name))
75
+ _loggers[0] = logger
44
76
  else:
45
- formatter_string = "%(asctime)s — %(levelname)s — %(message)s" # noqa
77
+ _loggers.append(logger)
46
78
 
47
- log_formatter = log.Formatter(formatter_string, "%H:%M:%S")
48
79
 
49
- root_logger = log.getLogger()
50
- root_logger.setLevel(level)
80
+ def _get_logger(level: LogLevel) -> pylog.Logger:
81
+ """
82
+ Returns fameio logger with given log level without any handler and, not propagating to parent
83
+
84
+ Args:
85
+ level: integer representing the log level
86
+
87
+ Returns:
88
+ logger for fameio with specified level
89
+ """
90
+ logger = pylog.getLogger(LOGGER_NAME)
91
+ logger.setLevel(level.value)
92
+ logger.propagate = False
93
+ for handler in _handlers:
94
+ logger.removeHandler(handler)
95
+ _handlers.clear()
96
+ return logger
97
+
98
+
99
+ def _get_formatter(level: LogLevel) -> pylog.Formatter:
100
+ """
101
+ Returns a log formatter depending on the given log `level`
102
+ Args:
103
+ level: this log level determines how detailed the logger's output is
104
+ Returns:
105
+ new log formatter
106
+ """
107
+ return pylog.Formatter(_FORMAT_DETAILLED if level is LogLevel.DEBUG else _FORMAT_NORMAL, _TIME_FORMAT)
51
108
 
52
- if file_name:
53
- file_handler = log.FileHandler(file_name, mode="w")
54
- file_handler.setFormatter(log_formatter)
55
- root_logger.addHandler(file_handler)
56
109
 
57
- console_handler = log.StreamHandler()
58
- console_handler.setFormatter(log_formatter)
59
- root_logger.addHandler(console_handler)
110
+ def _add_handler(logger: pylog.Logger, handler: pylog.Handler, formatter: pylog.Formatter) -> None:
111
+ """Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list"""
112
+ handler.setFormatter(formatter)
113
+ _handlers.append(handler)
114
+ logger.addHandler(handler)
@@ -4,13 +4,12 @@
4
4
 
5
5
  import math
6
6
  from typing import Dict, Optional
7
- import logging as log
8
7
 
9
8
  import pandas as pd
10
9
 
11
10
  from fameio.source import FameTime
12
- from fameio.source.cli import TimeOptions, MergingOptions
13
- from fameio.source.logs import log_error_and_raise
11
+ from fameio.source.cli.options import TimeOptions, MergingOptions
12
+ from fameio.source.logs import log_error_and_raise, log
14
13
  from fameio.source.time import ConversionException
15
14
 
16
15
  _ERR_UNIMPLEMENTED = "Time conversion mode '{}' not implemented."
@@ -28,7 +27,7 @@ def apply_time_merging(data: Dict[Optional[str], pd.DataFrame], config: Optional
28
27
  Nothing - data is modified inplace
29
28
  """
30
29
  if config:
31
- log.debug(f"Grouping TimeSteps...")
30
+ log().debug(f"Grouping TimeSteps...")
32
31
  offset = config[MergingOptions.STEPS_BEFORE]
33
32
  period = config[MergingOptions.STEPS_AFTER] + config[MergingOptions.STEPS_BEFORE] + 1
34
33
  first_positive_focal_point = config[MergingOptions.FOCAL_POINT] % period
@@ -56,26 +55,25 @@ def merge_time(time_step: int, focal_time: int, offset: int, period: int) -> int
56
55
  return math.floor((time_step + offset - focal_time) / period) * period + focal_time
57
56
 
58
57
 
59
- def apply_time_option(data: Dict[Optional[str], pd.DataFrame], mode_name: str) -> None:
58
+ def apply_time_option(data: Dict[Optional[str], pd.DataFrame], mode: TimeOptions) -> None:
60
59
  """
61
60
  Applies time option based on given `mode` inplace of given `data`
62
61
 
63
62
  Args:
64
63
  data: one or multiple DataFrames of time series; column `TimeStep` might be modified (depending on mode)
65
- mode_name: name of time conversion mode (derived from Enum)
64
+ mode: name of time conversion mode (derived from Enum)
66
65
 
67
66
  Returns:
68
67
  Nothing - data is modified inplace
69
68
  """
70
- mode_name = mode_name.upper()
71
- if mode_name == TimeOptions.INT.name:
72
- log.debug("No time conversion...")
73
- elif mode_name == TimeOptions.UTC.name:
69
+ if mode == TimeOptions.INT:
70
+ log().debug("No time conversion...")
71
+ elif mode == TimeOptions.UTC:
74
72
  _convert_time_index(data, "%Y-%m-%d %H:%M:%S")
75
- elif mode_name == TimeOptions.FAME.name:
73
+ elif mode == TimeOptions.FAME:
76
74
  _convert_time_index(data, "%Y-%m-%d_%H:%M:%S")
77
75
  else:
78
- log_error_and_raise(ConversionException(_ERR_UNIMPLEMENTED.format(mode_name)))
76
+ log_error_and_raise(ConversionException(_ERR_UNIMPLEMENTED.format(mode)))
79
77
 
80
78
 
81
79
  def _convert_time_index(data: Dict[Optional[str], pd.DataFrame], datetime_format: str) -> None:
@@ -90,7 +88,7 @@ def _convert_time_index(data: Dict[Optional[str], pd.DataFrame], datetime_format
90
88
  Returns:
91
89
  Nothing - data is modified inplace
92
90
  """
93
- log.debug(f"Converting TimeStep to format '{datetime_format}'...")
91
+ log().debug(f"Converting TimeStep to format '{datetime_format}'...")
94
92
  for _, df in data.items():
95
93
  index_columns = df.index.names
96
94
  df.reset_index(inplace=True)
@@ -1,14 +1,16 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- import logging as log
6
5
  from pathlib import Path
7
6
  from typing import Dict, Union
8
7
 
9
8
  import pandas as pd
10
9
 
10
+ from fameio.source.logs import log
11
11
  from fameio.source.results.data_transformer import INDEX
12
+ from fameio.source.series import TimeSeriesManager
13
+ from fameio.source.tools import ensure_path_exists
12
14
 
13
15
 
14
16
  class CsvWriter:
@@ -28,15 +30,15 @@ class CsvWriter:
28
30
  """Returns name of the output folder derived either from the specified `config_output` or `input_file_path`"""
29
31
  if config_output:
30
32
  output_folder_name = config_output
31
- log.info(CsvWriter._INFO_USING_PATH.format(config_output))
33
+ log().info(CsvWriter._INFO_USING_PATH.format(config_output))
32
34
  else:
33
35
  output_folder_name = input_file_path.stem
34
- log.info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
36
+ log().info(CsvWriter._INFO_USING_DERIVED_PATH.format(output_folder_name))
35
37
  return Path(output_folder_name)
36
38
 
37
39
  def _create_output_folder(self) -> None:
38
40
  """Creates output folder if not yet present"""
39
- log.debug("Creating output folder if required...")
41
+ log().debug("Creating output folder if required...")
40
42
  if not self._output_folder.is_dir():
41
43
  self._output_folder.mkdir(parents=True)
42
44
 
@@ -52,6 +54,15 @@ class CsvWriter:
52
54
  identifier = self._get_identifier(agent_name, column_name)
53
55
  self._write_data_frame(column_data, identifier)
54
56
 
57
+ def write_time_series_to_disk(self, timeseries_manager: TimeSeriesManager) -> None:
58
+ """Writes time_series of given `timeseries_manager` to disk"""
59
+ for _, name, data in timeseries_manager.get_all_series():
60
+ if data is not None:
61
+ target_path = Path(self._output_folder, name)
62
+ ensure_path_exists(target_path.parent)
63
+ # noinspection PyTypeChecker
64
+ data.to_csv(path_or_buf=target_path, sep=";", header=None, index=None)
65
+
55
66
  @staticmethod
56
67
  def _get_identifier(agent_name: str, column_name: str, agent_id: str = None) -> str:
57
68
  """Returns unique identifier for given `agent_name` and (optional) `agent_id` and `column_name`"""