fameio 3.1.1__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. fameio/cli/convert_results.py +6 -4
  2. fameio/cli/make_config.py +6 -4
  3. fameio/cli/parser.py +41 -29
  4. fameio/input/__init__.py +1 -1
  5. fameio/input/loader/__init__.py +9 -7
  6. fameio/input/loader/controller.py +59 -8
  7. fameio/input/loader/loader.py +14 -7
  8. fameio/input/metadata.py +35 -13
  9. fameio/input/resolver.py +5 -4
  10. fameio/input/scenario/agent.py +50 -16
  11. fameio/input/scenario/attribute.py +14 -15
  12. fameio/input/scenario/contract.py +152 -43
  13. fameio/input/scenario/exception.py +44 -18
  14. fameio/input/scenario/fameiofactory.py +63 -7
  15. fameio/input/scenario/generalproperties.py +17 -6
  16. fameio/input/scenario/scenario.py +111 -28
  17. fameio/input/scenario/stringset.py +27 -8
  18. fameio/input/schema/agenttype.py +21 -2
  19. fameio/input/schema/attribute.py +91 -22
  20. fameio/input/schema/java_packages.py +8 -5
  21. fameio/input/schema/schema.py +35 -9
  22. fameio/input/validator.py +22 -15
  23. fameio/input/writer.py +136 -36
  24. fameio/logs.py +3 -31
  25. fameio/output/__init__.py +5 -1
  26. fameio/output/agent_type.py +86 -23
  27. fameio/output/conversion.py +47 -29
  28. fameio/output/csv_writer.py +88 -18
  29. fameio/output/data_transformer.py +7 -14
  30. fameio/output/input_dao.py +62 -21
  31. fameio/output/output_dao.py +26 -4
  32. fameio/output/reader.py +58 -13
  33. fameio/output/yaml_writer.py +15 -6
  34. fameio/scripts/__init__.py +9 -2
  35. fameio/scripts/convert_results.py +123 -50
  36. fameio/scripts/convert_results.py.license +1 -1
  37. fameio/scripts/exception.py +7 -0
  38. fameio/scripts/make_config.py +34 -12
  39. fameio/scripts/make_config.py.license +1 -1
  40. fameio/series.py +117 -33
  41. fameio/time.py +74 -17
  42. fameio/tools.py +7 -5
  43. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/METADATA +19 -13
  44. fameio-3.2.0.dist-info/RECORD +56 -0
  45. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/WHEEL +1 -1
  46. CHANGELOG.md +0 -288
  47. fameio-3.1.1.dist-info/RECORD +0 -56
  48. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSE.txt +0 -0
  49. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  50. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  51. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  52. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/entry_points.txt +0 -0
@@ -2,7 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  import ast
5
- from typing import Any, Optional
5
+ from typing import Any
6
6
 
7
7
  from fameprotobuf.data_storage_pb2 import DataStorage
8
8
  from fameprotobuf.field_pb2 import NestedField
@@ -10,11 +10,12 @@ from fameprotobuf.input_file_pb2 import InputData
10
10
 
11
11
  from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario
12
12
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType
13
- from fameio.logs import log
14
- from fameio.series import TimeSeriesManager
13
+ from fameio.logs import log_error
14
+ from fameio.output import OutputError
15
+ from fameio.series import TimeSeriesManager, TimeSeriesError
15
16
 
16
17
 
17
- class InputConversionError(Exception):
18
+ class InputConversionError(OutputError):
18
19
  """Indicates an error during reconstruction of input from its protobuf representation"""
19
20
 
20
21
 
@@ -23,6 +24,8 @@ class InputDao:
23
24
 
24
25
  _ERR_NO_INPUTS = "No input data found on file."
25
26
  _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
27
+ _ERR_NO_SCHEMA = "No schema found on file - cannot recover inputs."
28
+ _ERR_SERIES_MISSING = "References time series '{}' was not registered on file."
26
29
 
27
30
  _FIELD_NAME_MAP: dict = {
28
31
  AttributeType.STRING: "string_values",
@@ -39,7 +42,6 @@ class InputDao:
39
42
  def __init__(self) -> None:
40
43
  self._inputs: list[InputData] = []
41
44
  self._timeseries_manager: TimeSeriesManager = TimeSeriesManager()
42
- self._schema: Optional[Schema] = None
43
45
 
44
46
  def store_inputs(self, data_storages: list[DataStorage]) -> None:
45
47
  """
@@ -58,16 +60,17 @@ class InputDao:
58
60
  recovered timeseries and scenario
59
61
 
60
62
  Raises:
61
- InputConversionException: if inputs could not be recovered
63
+ InputConversionError: if inputs could not be recovered, logged with level "ERROR"
64
+ InputError: if scenario in file is incompatible with this version of fameio, logged with level "ERROR"
62
65
  """
63
66
  input_data = self._get_input_data()
64
- self._schema = self._get_schema(input_data)
65
- scenario = Scenario(self._schema, self._get_general_properties(input_data))
67
+ schema = self._get_schema(input_data)
68
+ scenario = Scenario(schema, self._get_general_properties(input_data))
66
69
  for contract in self._get_contracts(input_data):
67
70
  scenario.add_contract(contract)
68
71
 
69
72
  self._init_timeseries(input_data)
70
- for agent in self._get_agents(input_data):
73
+ for agent in self._get_agents(input_data, schema):
71
74
  scenario.add_agent(agent)
72
75
 
73
76
  return self._timeseries_manager, scenario
@@ -77,14 +80,12 @@ class InputDao:
77
80
  Check that exactly one previously extracted input data exist, otherwise raises an exception
78
81
 
79
82
  Raises:
80
- InputConversionException: if no or more than one input is present
83
+ InputConversionException: if no or more than one input is present, logged with level "ERROR"
81
84
  """
82
85
  if not self._inputs:
83
- log().error(self._ERR_NO_INPUTS)
84
- raise InputConversionError(self._ERR_NO_INPUTS)
86
+ raise log_error(InputConversionError(self._ERR_NO_INPUTS))
85
87
  if len(self._inputs) > 1:
86
- log().error(self._ERR_MULTIPLE_INPUTS)
87
- raise InputConversionError(self._ERR_MULTIPLE_INPUTS)
88
+ raise log_error(InputConversionError(self._ERR_MULTIPLE_INPUTS))
88
89
  return self._inputs[0]
89
90
 
90
91
  @staticmethod
@@ -122,8 +123,21 @@ class InputDao:
122
123
  """Read timeseries from given `input_data` and initialise TimeSeriesManager"""
123
124
  self._timeseries_manager.reconstruct_time_series(list(input_data.time_series))
124
125
 
125
- def _get_agents(self, input_data: InputData) -> list[Agent]:
126
- """Read and return Agents from given `input_data`"""
126
+ def _get_agents(self, input_data: InputData, schema: Schema) -> list[Agent]:
127
+ """
128
+ Read and return Agents from given `input_data`
129
+
130
+ Args:
131
+ input_data: to read agents from
132
+ schema: corresponding to the agent definitions
133
+
134
+ Returns:
135
+ all extracted agents
136
+
137
+ Raises:
138
+ InputError: if agents cannot be reconstructed, logged with level "ERROR"
139
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
140
+ """
127
141
  agents = []
128
142
  for agent_dao in input_data.agents:
129
143
  agent = Agent(
@@ -132,29 +146,56 @@ class InputDao:
132
146
  metadata=ast.literal_eval(agent_dao.metadata) if agent_dao.metadata else None,
133
147
  )
134
148
  attribute_dict = self._get_attributes(
135
- list(agent_dao.fields), self._schema.agent_types[agent_dao.class_name].attributes
149
+ list(agent_dao.fields), schema.agent_types[agent_dao.class_name].attributes
136
150
  )
137
151
  agent.init_attributes_from_dict(attribute_dict)
138
152
  agents.append(agent)
139
153
  return agents
140
154
 
141
155
  def _get_attributes(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, Any]:
142
- """Read and return Attributes as Dictionary from given list of fields"""
156
+ """
157
+ Read and return all Attributes as Dictionary from given list of fields
158
+
159
+ Args:
160
+ fields: data fields representing attributes
161
+ schematics: description of the attributes associated by name
162
+
163
+ Returns:
164
+ all recovered attributes and their associated values
165
+
166
+ Raises:
167
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
168
+ """
143
169
  attributes: dict[str, Any] = {}
144
170
  for field in fields:
145
171
  attributes[field.field_name] = self._get_field_value(field, schematics[field.field_name])
146
172
  return attributes
147
173
 
148
174
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
149
- """Extracts and returns value(s) of given `field`"""
175
+ """
176
+ Extracts and returns value(s) of given `field`
177
+
178
+ Args:
179
+ field: to extract the value(s) from
180
+ schematic: describing the data type of this field
181
+
182
+ Returns:
183
+ value(s) of provided field
184
+
185
+ Raises:
186
+ InputConversionError: if TimeSeries could not be found, logged with level "ERROR"
187
+ """
150
188
  attribute_type: AttributeType = schematic.attr_type
151
- value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
152
189
  if attribute_type is AttributeType.TIME_SERIES:
153
- return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
190
+ try:
191
+ return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
192
+ except TimeSeriesError as e:
193
+ raise log_error(InputConversionError(self._ERR_SERIES_MISSING.format(field.series_id))) from e
154
194
  if attribute_type is AttributeType.BLOCK:
155
195
  if schematic.is_list:
156
196
  return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
157
197
  return self._get_attributes(list(field.fields), schematic.nested_attributes)
198
+ value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
158
199
  if schematic.is_list:
159
200
  return list(value)
160
201
  return list(value)[0]
@@ -1,7 +1,9 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from typing import Iterable, Optional
4
+ from __future__ import annotations
5
+
6
+ from typing import Iterable
5
7
 
6
8
  import pandas as pd
7
9
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -15,6 +17,16 @@ class OutputDAO:
15
17
  """Grants convenient access to content of Output protobuf messages for given DataStorages"""
16
18
 
17
19
  def __init__(self, data_storages: list[DataStorage], agent_type_log: AgentTypeLog) -> None:
20
+ """
21
+ Initialise a new OutputDAO
22
+
23
+ Args:
24
+ data_storages: to grant access to by this DAO
25
+ agent_type_log: new types of agents that might come up in the associated data_storages
26
+
27
+ Raises:
28
+ AgentTypeError: if duplicate agent definitions occur, logged with level "ERROR"
29
+ """
18
30
  self._agent_type_log = agent_type_log
19
31
  outputs = self._extract_output_from_data_storages(data_storages)
20
32
  self._agent_type_log.update_agents(self._extract_new_agent_types(outputs))
@@ -40,7 +52,7 @@ class OutputDAO:
40
52
  list_of_series_lists = [output.series for output in outputs if len(output.series) > 0]
41
53
  list_of_series = [series for sublist in list_of_series_lists for series in sublist]
42
54
 
43
- series_per_class_name = {}
55
+ series_per_class_name: dict[str, list[Output.Series]] = {}
44
56
  for series in list_of_series:
45
57
  if series.class_name not in series_per_class_name:
46
58
  series_per_class_name[series.class_name] = []
@@ -59,11 +71,21 @@ class OutputDAO:
59
71
  sorted_dict = sorted(length_per_agent_types.items(), key=lambda item: item[1])
60
72
  return [agent_name for agent_name, _ in sorted_dict]
61
73
 
62
- def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[Optional[str], pd.DataFrame]:
74
+ def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[str | None, pd.DataFrame]:
63
75
  """
64
76
  Returns DataFrame(s) containing all data of given `agent` - data is removed after the first call
65
77
  Depending on the chosen ResolveOption the dict contains one DataFrame for the simple (and merged columns),
66
78
  or, in `SPLIT` mode, additional DataFrames mapped to each complex column's name.
79
+
80
+ Args:
81
+ agent_name: name of agent whose data are to be returned
82
+ data_transformer: to handle data transformation
83
+
84
+ Returns:
85
+ output data for requested agent: on data frame for all simple columns, any one for each complex column
86
+
87
+ Raises:
88
+ AgentTypeError: if type of agent was not yet registered, logged with level "ERROR"
67
89
  """
68
90
  agent_series = self._all_series.pop(agent_name) if agent_name in self._all_series else []
69
91
  agent_type = self._agent_type_log.get_agent_type(agent_name)
fameio/output/reader.py CHANGED
@@ -11,21 +11,24 @@ from fameprotobuf.data_storage_pb2 import DataStorage
11
11
  from google.protobuf.message import DecodeError
12
12
 
13
13
  import fameio
14
- from fameio.logs import log, log_critical_and_raise
14
+ from fameio.logs import log, log_critical, log_error
15
+ from fameio.output import OutputError
15
16
 
16
17
 
17
- class ProtobufReaderError(Exception):
18
+ class ProtobufReaderError(OutputError):
18
19
  """Indicates an error while reading a protobuf file"""
19
20
 
20
21
 
21
22
  class Reader(ABC):
22
23
  """Abstract base class for protobuf file readers"""
23
24
 
24
- _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted." # noqa
25
+ _ERR_FILE_READ = "Could not read file content."
26
+ _ERR_HEADER_UNRECOGNISED = ""
25
27
  _ERR_FILE_CORRUPT_NEGATIVE_LENGTH = "Corrupt file, message length must be positive."
26
28
  _ERR_FILE_CORRUPT_MISSING_DATA = "Trying to read corrupt file caused by inconsistent message length."
27
29
  _ERR_UNSUPPORTED_MODE = "Ignoring memory saving mode: not supported for files created with `fame-core<1.4`."
28
30
  _ERR_PARSING_FAILED = "File Corrupt. Could not parse file content."
31
+ _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted."
29
32
  _DEBUG_FILE_END_REACHED = "Reached expected end of file."
30
33
 
31
34
  _HEADER_LENGTH = 30
@@ -44,7 +47,7 @@ class Reader(ABC):
44
47
  @staticmethod
45
48
  @final
46
49
  def _raise_error(error_message: str) -> NoReturn:
47
- log_critical_and_raise(ProtobufReaderError(error_message))
50
+ raise log_critical(ProtobufReaderError(error_message))
48
51
 
49
52
  def __init__(self, file: IO, read_single) -> None:
50
53
  self._file = file
@@ -52,7 +55,15 @@ class Reader(ABC):
52
55
 
53
56
  @abstractmethod
54
57
  def read(self) -> list[DataStorage]:
55
- """Reads associated filestream and returns one or multiple DataStorage(s) or empty list"""
58
+ """
59
+ Reads associated filestream and returns one or multiple DataStorage(s) or empty list
60
+
61
+ Returns:
62
+ one or multiple DataStorage protobuf object(s) read from file
63
+
64
+ Raises:
65
+ ProtobufReaderError: if file is corrupted in any way, logged with level "ERROR"
66
+ """
56
67
 
57
68
  @staticmethod
58
69
  def get_reader(file: IO, read_single: bool = False) -> Reader:
@@ -65,13 +76,26 @@ class Reader(ABC):
65
76
 
66
77
  Returns:
67
78
  Reader that can read the specified file
79
+
80
+ Raises:
81
+ ProtobufReaderError: if file has an unsupported header,logged with level "CRITICAL"
68
82
  """
69
83
  log().debug("Reading file headers...")
70
84
  try:
71
- header = file.read(Reader._HEADER_LENGTH).decode(Reader.HEADER_ENCODING)
72
- return Reader._READER_HEADERS[header](file, read_single)
73
- except (KeyError, UnicodeDecodeError):
74
- return Reader._READER_HEADERS[None](file, read_single)
85
+ header_content = file.read(Reader._HEADER_LENGTH)
86
+ except ValueError as e:
87
+ raise log_critical(ProtobufReaderError(Reader._ERR_FILE_READ)) from e
88
+
89
+ try:
90
+ header = header_content.decode(Reader.HEADER_ENCODING)
91
+ except UnicodeDecodeError:
92
+ header = None
93
+ log().warning(Reader._WARN_NO_HEADER)
94
+
95
+ if header not in Reader._READER_HEADERS:
96
+ header = None
97
+
98
+ return Reader._READER_HEADERS[header](file, read_single)
75
99
 
76
100
  @final
77
101
  def _read_message_length(self) -> int:
@@ -85,29 +109,50 @@ class Reader(ABC):
85
109
  return message_length_int
86
110
 
87
111
  @final
88
- def _read_data_storage_message(self, message_length: int = None) -> DataStorage:
112
+ def _read_data_storage_message(self, message_length: int | None = None) -> DataStorage:
89
113
  """
90
114
  Returns given `data_storage` read from current file position and following `message_length` bytes.
91
115
  If `message_length` is omitted, the rest of the file is read. If no message is found, None is returned.
116
+
117
+ Args:
118
+ message_length: amounts of bytes to read - must correspond to the next DataStorage message in file
119
+
120
+ Returns:
121
+ Read and de-serialised DataStorage
122
+
123
+ Raises:
124
+ ProtobufReaderError: if message_length is corrupt or file is corrupt, logged with level "ERROR"
92
125
  """
93
126
  if message_length is None:
94
127
  message = self._file.read()
95
128
  elif message_length > 0:
96
129
  message = self._file.read(message_length)
97
130
  else:
98
- raise IOError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH)
131
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH))
99
132
  if message_length and len(message) != message_length:
100
- log().error(self._ERR_FILE_CORRUPT_MISSING_DATA)
133
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_MISSING_DATA))
101
134
  return self._parse_to_data_storage(message) if message else None
102
135
 
103
136
  @staticmethod
104
137
  @final
105
138
  def _parse_to_data_storage(message: bytes) -> DataStorage:
139
+ """
140
+ De-serialises a binary message into a DataStorage protobuf object
141
+
142
+ Args:
143
+ message: to be convert
144
+
145
+ Returns:
146
+ DataStorage initialised from the given message
147
+
148
+ Raises:
149
+ ProtobufReaderError: if message could not be converted, logged with level "ERROR"
150
+ """
106
151
  data_storage = DataStorage()
107
152
  try:
108
153
  data_storage.ParseFromString(message)
109
154
  except DecodeError as e:
110
- raise IOError(Reader._ERR_PARSING_FAILED) from e
155
+ raise log_error(ProtobufReaderError(Reader._ERR_PARSING_FAILED)) from e
111
156
  return data_storage
112
157
 
113
158
 
@@ -5,10 +5,16 @@ from pathlib import Path
5
5
 
6
6
  import yaml
7
7
 
8
- from fameio.logs import log
8
+ from fameio.logs import log, log_error
9
+ from fameio.output import OutputError
9
10
 
10
- ERR_WRITE_EXCEPTION = "Failed to save dictionary to YAML file `{}`"
11
- INFO_DESTINATION = "Saving scenario to file at {}"
11
+ _ERR_OPEN_FILE = "Could not open file for reading: '{}'"
12
+
13
+ _INFO_DESTINATION = "Saving scenario to file at {}"
14
+
15
+
16
+ class YamlWriterError(OutputError):
17
+ """An error occurred during writing a YAML file"""
12
18
 
13
19
 
14
20
  def data_to_yaml_file(data: dict, file_path: Path) -> None:
@@ -18,10 +24,13 @@ def data_to_yaml_file(data: dict, file_path: Path) -> None:
18
24
  Args:
19
25
  data: to be saved to yaml file
20
26
  file_path: at which the file will be created
27
+
28
+ Raises:
29
+ YamlWriterError: if file could not be opened or written, logged with level "ERROR"
21
30
  """
22
- log().info(INFO_DESTINATION.format(file_path))
31
+ log().info(_INFO_DESTINATION.format(file_path))
23
32
  try:
24
33
  with open(file_path, "w", encoding="utf-8") as f:
25
34
  yaml.dump(data, f, sort_keys=False, encoding="utf-8")
26
- except Exception as e:
27
- raise RuntimeError(ERR_WRITE_EXCEPTION.format(file_path)) from e
35
+ except OSError as e:
36
+ raise log_error(YamlWriterError(_ERR_OPEN_FILE.format(file_path))) from e
@@ -3,6 +3,7 @@ import sys
3
3
 
4
4
  from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
5
5
  from fameio.scripts.convert_results import run as convert_results
6
+ from fameio.scripts.exception import ScriptError
6
7
  from fameio.scripts.make_config import run as make_config
7
8
  from fameio.cli.convert_results import handle_args as handle_convert_results_args
8
9
  from fameio.cli.make_config import handle_args as handle_make_config_args
@@ -11,10 +12,16 @@ from fameio.cli.make_config import handle_args as handle_make_config_args
11
12
  # noinspection PyPep8Naming
12
13
  def makeFameRunConfig():
13
14
  run_config = handle_make_config_args(sys.argv[1:])
14
- make_config(run_config)
15
+ try:
16
+ make_config(run_config)
17
+ except ScriptError as e:
18
+ raise SystemExit(1) from e
15
19
 
16
20
 
17
21
  # noinspection PyPep8Naming
18
22
  def convertFameResults():
19
23
  run_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
20
- convert_results(run_config)
24
+ try:
25
+ convert_results(run_config)
26
+ except ScriptError as e:
27
+ raise SystemExit(1) from e
@@ -1,13 +1,18 @@
1
1
  #!/usr/bin/env python
2
+ from __future__ import annotations
3
+
2
4
  import sys
3
5
  from pathlib import Path
6
+ from typing import Any, BinaryIO
4
7
 
5
8
  import pandas as pd
6
9
 
10
+ from fameio.cli import update_default_config
7
11
  from fameio.cli.convert_results import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
8
12
  from fameio.cli.options import Options
9
- from fameio.cli import update_default_config
10
- from fameio.logs import log_critical_and_raise, fameio_logger, log
13
+ from fameio.input import InputError
14
+ from fameio.logs import fameio_logger, log, log_error, log_critical
15
+ from fameio.output import OutputError
11
16
  from fameio.output.agent_type import AgentTypeLog
12
17
  from fameio.output.conversion import apply_time_option, apply_time_merging
13
18
  from fameio.output.csv_writer import CsvWriter
@@ -16,53 +21,98 @@ from fameio.output.input_dao import InputDao
16
21
  from fameio.output.output_dao import OutputDAO
17
22
  from fameio.output.reader import Reader
18
23
  from fameio.output.yaml_writer import data_to_yaml_file
24
+ from fameio.scripts.exception import ScriptError
19
25
 
20
- ERR_OUT_OF_MEMORY = "Out of memory. Retry result conversion using `-m` or `--memory-saving` option."
21
- ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
26
+ _ERR_OUT_OF_MEMORY = "Out of memory. Retry result conversion using `-m` or `--memory-saving` option."
27
+ _ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
28
+ _ERR_FILE_OPEN_FAIL = "Could not open file: '{}'"
29
+ _ERR_RECOVER_INPUT = "Could not recover inputs due to an incompatibility with this version of fameio."
30
+ _ERR_FAIL = "Results conversion script failed."
22
31
 
23
- WARN_OUTPUT_MISSING = "Provided file did not contain any output data, only input recovery available."
32
+ _WARN_OUTPUT_SUPPRESSED = "All output data suppressed by agent filter, but there is data available for agent types: {}"
33
+ _WARN_OUTPUT_MISSING = "Provided file did not contain any output data, only input recovery available."
34
+ _INFO_MEMORY_SAVING = "Memory saving mode enabled: Disable on conversion of small files for performance improvements."
24
35
 
25
- INFO_MEMORY_SAVING = "Memory saving mode enabled: Disable on conversion of small files for performance improvements."
26
- INFO_RECOVERY = "Recovering inputs..."
27
- INFO_CONVERSION = "Applying time conversion and merging options to extracted files..."
28
36
 
37
+ def _read_and_extract_data(config: dict[Options, Any]) -> None:
38
+ """
39
+ Read protobuf file, extracts, converts, and saves the converted data; Returns false if no result data was found
29
40
 
30
- def _extract_data(config: dict) -> bool:
31
- """Extracts, converts, and saves the converted data; Returns false if no result data was found"""
41
+ Args:
42
+ config: script configuration options
43
+
44
+ Raises:
45
+ OutputError: if file could not be opened or converted, logged with level "ERROR"
46
+ """
32
47
  file_path = Path(config[Options.FILE])
48
+ log().info("Opening file for reading...")
49
+ try:
50
+ with open(file_path, "rb") as file_stream:
51
+ _extract_and_convert_data(config, file_stream, file_path)
52
+ except OSError as ex:
53
+ raise log_error(OutputError(_ERR_FILE_OPEN_FAIL.format(file_path))) from ex
54
+
55
+
56
+ def _extract_and_convert_data(config: dict[Options, Any], file_stream: BinaryIO, file_path: Path) -> None:
57
+ """
58
+ Extracts data from provided input file stream, converts it, and writes the result to output files
59
+
60
+ Args:
61
+ config: script configuration options
62
+ file_stream: opened input file
63
+ file_path: path to input file
64
+
65
+ Raises:
66
+ OutputError: if file could not be opened or converted, logged with level "ERROR"
67
+ """
68
+ log().info("Reading and extracting data...")
33
69
  output_writer = CsvWriter(config[Options.OUTPUT], file_path, config[Options.SINGLE_AGENT_EXPORT])
34
- agent_type_log = AgentTypeLog(requested_agents=config[Options.AGENT_LIST])
70
+ agent_type_log = AgentTypeLog(_agent_name_filter_list=config[Options.AGENT_LIST])
35
71
  data_transformer = DataTransformer.build(config[Options.RESOLVE_COMPLEX_FIELD])
72
+ reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
73
+ input_dao = InputDao()
74
+ while data_storages := reader.read():
75
+ if config[Options.INPUT_RECOVERY]:
76
+ input_dao.store_inputs(data_storages)
77
+ output = OutputDAO(data_storages, agent_type_log)
78
+ for agent_name in output.get_sorted_agents_to_extract():
79
+ log().debug(f"Extracting data for {agent_name}...")
80
+ data_frames = output.get_agent_data(agent_name, data_transformer)
81
+ if not config[Options.MEMORY_SAVING]:
82
+ apply_time_merging(data_frames, config[Options.TIME_MERGING])
83
+ apply_time_option(data_frames, config[Options.TIME])
84
+ log().debug(f"Writing data for {agent_name}...")
85
+ output_writer.write_to_files(agent_name, data_frames)
86
+
87
+ if config[Options.INPUT_RECOVERY]:
88
+ _recover_inputs(config, input_dao)
89
+ if config[Options.MEMORY_SAVING]:
90
+ _memory_saving_apply_conversions(config, output_writer)
36
91
 
37
- log().info("Reading and extracting data...")
38
- with open(file_path, "rb") as file_stream:
39
- reader = Reader.get_reader(file=file_stream, read_single=config[Options.MEMORY_SAVING])
40
- input_dao = InputDao()
41
- while data_storages := reader.read():
42
- if config[Options.INPUT_RECOVERY]:
43
- input_dao.store_inputs(data_storages)
44
- output = OutputDAO(data_storages, agent_type_log)
45
- for agent_name in output.get_sorted_agents_to_extract():
46
- log().debug(f"Extracting data for {agent_name}...")
47
- data_frames = output.get_agent_data(agent_name, data_transformer)
48
- if not config[Options.MEMORY_SAVING]:
49
- apply_time_merging(data_frames, config[Options.TIME_MERGING])
50
- apply_time_option(data_frames, config[Options.TIME])
51
- log().debug(f"Writing data for {agent_name}...")
52
- output_writer.write_to_files(agent_name, data_frames)
92
+ if not agent_type_log.has_any_agent_type():
93
+ if len(agent_type_log.get_agents_with_output()) > 0:
94
+ log().warning(_WARN_OUTPUT_SUPPRESSED.format(agent_type_log.get_agents_with_output()))
95
+ else:
96
+ log().warning(_WARN_OUTPUT_MISSING)
97
+ log().info("Data conversion completed.")
53
98
 
54
- if config[Options.INPUT_RECOVERY]:
55
- _recover_inputs(config, input_dao)
56
- if config[Options.MEMORY_SAVING]:
57
- _memory_saving_apply_conversions(config, output_writer)
58
- log().info("Data conversion completed.")
59
- return agent_type_log.has_any_agent_type()
60
99
 
100
+ def _recover_inputs(config: dict[Options, Any], input_dao: InputDao) -> None:
101
+ """
102
+ Reads scenario configuration from provided input_dao
61
103
 
62
- def _recover_inputs(config: dict, input_dao: InputDao) -> None:
63
- """Reads scenario configuration from provided input_dao"""
64
- log().info(INFO_RECOVERY)
65
- timeseries, scenario = input_dao.recover_inputs()
104
+ Args:
105
+ config: script configuration options
106
+ input_dao: to recover the input data from
107
+
108
+ Raises:
109
+ OutputError: if inputs could not be recovered or saved to files, logged with level "ERROR"
110
+ """
111
+ log().info("Recovering inputs...")
112
+ try:
113
+ timeseries, scenario = input_dao.recover_inputs()
114
+ except InputError as ex:
115
+ raise log_error(OutputError(_ERR_RECOVER_INPUT)) from ex
66
116
  base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
67
117
  series_writer = CsvWriter(
68
118
  config_output=Path(base_path, "./recovered"), input_file_path=Path("./"), single_export=False
@@ -71,31 +121,54 @@ def _recover_inputs(config: dict, input_dao: InputDao) -> None:
71
121
  data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
72
122
 
73
123
 
74
- def _memory_saving_apply_conversions(config: dict, output_writer: CsvWriter) -> None:
75
- """Rewrite result files in memory saving mode: apply time-merging and time conversion options on a per-file basis"""
76
- log().info(INFO_CONVERSION)
124
+ def _memory_saving_apply_conversions(config: dict[Options, Any], output_writer: CsvWriter) -> None:
125
+ """
126
+ Rewrite result files in memory saving mode: apply time-merging and time conversion options on a per-file basis
127
+
128
+ Args:
129
+ config: script configuration options
130
+ output_writer: to rewrite the previously written files
131
+
132
+ Raises:
133
+ OutputError: in case files could not be read, converted, or re-written, logged with level "ERROR"
134
+ """
135
+ log().info("Applying time conversion and merging options to extracted files...")
77
136
  written_files = output_writer.pop_all_file_paths()
78
137
  for agent_name, file_path in written_files.items():
79
- parsed_data = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
138
+ parsed_data: dict[str | None, pd.DataFrame] = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
80
139
  apply_time_merging(parsed_data, config[Options.TIME_MERGING])
81
140
  apply_time_option(parsed_data, config[Options.TIME])
82
141
  output_writer.write_to_files(agent_name, parsed_data)
83
142
 
84
143
 
85
- def run(config: dict = None) -> None:
86
- """Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s)"""
144
+ def run(config: dict[Options, Any] | None = None) -> None:
145
+ """
146
+ Reads configured file in protobuf format and extracts its content to .CSV and .YAML file(s)
147
+
148
+ Args:
149
+ config: script configuration options
150
+
151
+ Raises:
152
+ ScriptError: if any kind of expected error or a memory error occurred, logged with level "CRITICAL"
153
+ """
87
154
  config = update_default_config(config, DEFAULT_CONFIG)
88
155
  fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
89
156
  if config[Options.MEMORY_SAVING]:
90
- log().info(INFO_MEMORY_SAVING)
157
+ log().info(_INFO_MEMORY_SAVING)
158
+
91
159
  try:
92
- found_result_data = _extract_data(config)
93
- if not found_result_data:
94
- log().warning(WARN_OUTPUT_MISSING)
95
- except MemoryError:
96
- log_critical_and_raise(MemoryError(ERR_MEMORY_SEVERE if config[Options.MEMORY_SAVING] else ERR_OUT_OF_MEMORY))
160
+ try:
161
+ _read_and_extract_data(config)
162
+ except MemoryError as ex:
163
+ error = OutputError(_ERR_MEMORY_SEVERE if config[Options.MEMORY_SAVING] else _ERR_OUT_OF_MEMORY)
164
+ raise log_critical(error) from ex
165
+ except OutputError as ex:
166
+ raise log_critical(ScriptError(_ERR_FAIL)) from ex
97
167
 
98
168
 
99
169
  if __name__ == "__main__":
100
170
  run_config = handle_args(sys.argv[1:])
101
- run(run_config)
171
+ try:
172
+ run(run_config)
173
+ except ScriptError as e:
174
+ raise SystemExit(1) from e
@@ -1,3 +1,3 @@
1
- SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
 
3
3
  SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,7 @@
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+
6
+ class ScriptError(Exception):
7
+ """Any kind of expected error that occurred during execution of FAME-Io scripts"""