fameio 3.1.1__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. fameio/cli/convert_results.py +10 -10
  2. fameio/cli/make_config.py +9 -9
  3. fameio/cli/options.py +6 -4
  4. fameio/cli/parser.py +87 -51
  5. fameio/cli/reformat.py +58 -0
  6. fameio/input/__init__.py +4 -4
  7. fameio/input/loader/__init__.py +13 -13
  8. fameio/input/loader/controller.py +64 -18
  9. fameio/input/loader/loader.py +25 -16
  10. fameio/input/metadata.py +57 -38
  11. fameio/input/resolver.py +9 -10
  12. fameio/input/scenario/agent.py +62 -26
  13. fameio/input/scenario/attribute.py +93 -40
  14. fameio/input/scenario/contract.py +160 -56
  15. fameio/input/scenario/exception.py +41 -18
  16. fameio/input/scenario/fameiofactory.py +57 -6
  17. fameio/input/scenario/generalproperties.py +22 -12
  18. fameio/input/scenario/scenario.py +117 -38
  19. fameio/input/scenario/stringset.py +29 -11
  20. fameio/input/schema/agenttype.py +27 -10
  21. fameio/input/schema/attribute.py +108 -45
  22. fameio/input/schema/java_packages.py +14 -12
  23. fameio/input/schema/schema.py +39 -15
  24. fameio/input/validator.py +198 -54
  25. fameio/input/writer.py +137 -46
  26. fameio/logs.py +28 -47
  27. fameio/output/__init__.py +5 -1
  28. fameio/output/agent_type.py +89 -28
  29. fameio/output/conversion.py +52 -37
  30. fameio/output/csv_writer.py +107 -27
  31. fameio/output/data_transformer.py +17 -24
  32. fameio/output/execution_dao.py +170 -0
  33. fameio/output/input_dao.py +71 -33
  34. fameio/output/output_dao.py +33 -11
  35. fameio/output/reader.py +64 -21
  36. fameio/output/yaml_writer.py +16 -8
  37. fameio/scripts/__init__.py +22 -4
  38. fameio/scripts/convert_results.py +126 -52
  39. fameio/scripts/convert_results.py.license +1 -1
  40. fameio/scripts/exception.py +7 -0
  41. fameio/scripts/make_config.py +34 -13
  42. fameio/scripts/make_config.py.license +1 -1
  43. fameio/scripts/reformat.py +71 -0
  44. fameio/scripts/reformat.py.license +3 -0
  45. fameio/series.py +174 -59
  46. fameio/time.py +79 -25
  47. fameio/tools.py +48 -8
  48. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/METADATA +50 -34
  49. fameio-3.3.0.dist-info/RECORD +60 -0
  50. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/WHEEL +1 -1
  51. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/entry_points.txt +1 -0
  52. CHANGELOG.md +0 -288
  53. fameio-3.1.1.dist-info/RECORD +0 -56
  54. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSE.txt +0 -0
  55. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  56. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  57. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
@@ -2,7 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  import ast
5
- from typing import Any, Optional
5
+ from typing import Any
6
6
 
7
7
  from fameprotobuf.data_storage_pb2 import DataStorage
8
8
  from fameprotobuf.field_pb2 import NestedField
@@ -10,19 +10,22 @@ from fameprotobuf.input_file_pb2 import InputData
10
10
 
11
11
  from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario
12
12
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType
13
- from fameio.logs import log
14
- from fameio.series import TimeSeriesManager
13
+ from fameio.logs import log_error
14
+ from fameio.output import OutputError
15
+ from fameio.series import TimeSeriesManager, TimeSeriesError
15
16
 
16
17
 
17
- class InputConversionError(Exception):
18
- """Indicates an error during reconstruction of input from its protobuf representation"""
18
+ class InputConversionError(OutputError):
19
+ """Indicates an error during reconstruction of input from its protobuf representation."""
19
20
 
20
21
 
21
22
  class InputDao:
22
- """Data access object for inputs saved in protobuf"""
23
+ """Data access object for inputs saved in protobuf."""
23
24
 
24
25
  _ERR_NO_INPUTS = "No input data found on file."
25
26
  _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
27
+ _ERR_NO_SCHEMA = "No schema found on file - cannot recover inputs."
28
+ _ERR_SERIES_MISSING = "References time series '{}' was not registered on file."
26
29
 
27
30
  _FIELD_NAME_MAP: dict = {
28
31
  AttributeType.STRING: "string_values",
@@ -39,11 +42,9 @@ class InputDao:
39
42
  def __init__(self) -> None:
40
43
  self._inputs: list[InputData] = []
41
44
  self._timeseries_manager: TimeSeriesManager = TimeSeriesManager()
42
- self._schema: Optional[Schema] = None
43
45
 
44
46
  def store_inputs(self, data_storages: list[DataStorage]) -> None:
45
- """
46
- Extracts and stores Inputs in given DataStorages - if such are present
47
+ """Extracts and stores Inputs in given DataStorages - if such are present.
47
48
 
48
49
  Args:
49
50
  data_storages: to be scanned for InputData
@@ -51,50 +52,50 @@ class InputDao:
51
52
  self._inputs.extend([data_storage.input for data_storage in data_storages if data_storage.HasField("input")])
52
53
 
53
54
  def recover_inputs(self) -> tuple[TimeSeriesManager, Scenario]:
54
- """
55
- Recovers inputs to GeneralProperties, Schema, Agents, Contracts, Timeseries
55
+ """Recovers inputs to GeneralProperties, Schema, Agents, Contracts, Timeseries.
56
56
 
57
57
  Return:
58
58
  recovered timeseries and scenario
59
59
 
60
60
  Raises:
61
- InputConversionException: if inputs could not be recovered
61
+ InputConversionError: if inputs could not be recovered, logged with level "ERROR"
62
+ InputError: if scenario in file is incompatible with this version of fameio, logged with level "ERROR"
62
63
  """
63
64
  input_data = self._get_input_data()
64
- self._schema = self._get_schema(input_data)
65
- scenario = Scenario(self._schema, self._get_general_properties(input_data))
65
+ schema = self._get_schema(input_data)
66
+ scenario = Scenario(schema, self._get_general_properties(input_data))
66
67
  for contract in self._get_contracts(input_data):
67
68
  scenario.add_contract(contract)
68
69
 
69
70
  self._init_timeseries(input_data)
70
- for agent in self._get_agents(input_data):
71
+ for agent in self._get_agents(input_data, schema):
71
72
  scenario.add_agent(agent)
72
73
 
73
74
  return self._timeseries_manager, scenario
74
75
 
75
76
  def _get_input_data(self) -> InputData:
76
- """
77
- Check that exactly one previously extracted input data exist, otherwise raises an exception
77
+ """Check that exactly one previously extracted input data exist, otherwise raises an exception.
78
+
79
+ Returns:
80
+ the previously extracted input data
78
81
 
79
82
  Raises:
80
- InputConversionException: if no or more than one input is present
83
+ InputConversionException: if no input, or more than one input is present, logged with level "ERROR"
81
84
  """
82
85
  if not self._inputs:
83
- log().error(self._ERR_NO_INPUTS)
84
- raise InputConversionError(self._ERR_NO_INPUTS)
86
+ raise log_error(InputConversionError(self._ERR_NO_INPUTS))
85
87
  if len(self._inputs) > 1:
86
- log().error(self._ERR_MULTIPLE_INPUTS)
87
- raise InputConversionError(self._ERR_MULTIPLE_INPUTS)
88
+ raise log_error(InputConversionError(self._ERR_MULTIPLE_INPUTS))
88
89
  return self._inputs[0]
89
90
 
90
91
  @staticmethod
91
92
  def _get_schema(input_data: InputData) -> Schema:
92
- """Read and return Schema from given `input_data`"""
93
+ """Read and return Schema from given `input_data`."""
93
94
  return Schema.from_string(input_data.schema)
94
95
 
95
96
  @staticmethod
96
97
  def _get_general_properties(input_data: InputData) -> GeneralProperties:
97
- """Read and return GeneralProperties from given `input_data`"""
98
+ """Read and return GeneralProperties from given `input_data`."""
98
99
  return GeneralProperties(
99
100
  run_id=input_data.run_id,
100
101
  simulation_start_time=input_data.simulation.start_time,
@@ -104,7 +105,7 @@ class InputDao:
104
105
 
105
106
  @staticmethod
106
107
  def _get_contracts(input_data: InputData) -> list[Contract]:
107
- """Read and return Contracts from given `input_data`"""
108
+ """Read and return Contracts from given `input_data`."""
108
109
  return [
109
110
  Contract(
110
111
  sender_id=contract.sender_id,
@@ -119,11 +120,23 @@ class InputDao:
119
120
  ]
120
121
 
121
122
  def _init_timeseries(self, input_data: InputData) -> None:
122
- """Read timeseries from given `input_data` and initialise TimeSeriesManager"""
123
+ """Read timeseries from given `input_data` and initialise TimeSeriesManager."""
123
124
  self._timeseries_manager.reconstruct_time_series(list(input_data.time_series))
124
125
 
125
- def _get_agents(self, input_data: InputData) -> list[Agent]:
126
- """Read and return Agents from given `input_data`"""
126
+ def _get_agents(self, input_data: InputData, schema: Schema) -> list[Agent]:
127
+ """Read and return Agents from given `input_data`.
128
+
129
+ Args:
130
+ input_data: to read agents from
131
+ schema: corresponding to the agent definitions
132
+
133
+ Returns:
134
+ all extracted agents
135
+
136
+ Raises:
137
+ InputError: if agents cannot be reconstructed, logged with level "ERROR"
138
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
139
+ """
127
140
  agents = []
128
141
  for agent_dao in input_data.agents:
129
142
  agent = Agent(
@@ -132,29 +145,54 @@ class InputDao:
132
145
  metadata=ast.literal_eval(agent_dao.metadata) if agent_dao.metadata else None,
133
146
  )
134
147
  attribute_dict = self._get_attributes(
135
- list(agent_dao.fields), self._schema.agent_types[agent_dao.class_name].attributes
148
+ list(agent_dao.fields), schema.agent_types[agent_dao.class_name].attributes
136
149
  )
137
150
  agent.init_attributes_from_dict(attribute_dict)
138
151
  agents.append(agent)
139
152
  return agents
140
153
 
141
154
  def _get_attributes(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, Any]:
142
- """Read and return Attributes as Dictionary from given list of fields"""
155
+ """Read and return all Attributes as Dictionary from given list of fields.
156
+
157
+ Args:
158
+ fields: data fields representing attributes
159
+ schematics: description of the attributes associated by name
160
+
161
+ Returns:
162
+ all recovered attributes and their associated values
163
+
164
+ Raises:
165
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
166
+ """
143
167
  attributes: dict[str, Any] = {}
144
168
  for field in fields:
145
169
  attributes[field.field_name] = self._get_field_value(field, schematics[field.field_name])
146
170
  return attributes
147
171
 
148
172
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
149
- """Extracts and returns value(s) of given `field`"""
173
+ """Extracts and returns value(s) of given `field`.
174
+
175
+ Args:
176
+ field: to extract the value(s) from
177
+ schematic: describing the data type of this field
178
+
179
+ Returns:
180
+ value(s) of provided field
181
+
182
+ Raises:
183
+ InputConversionError: if TimeSeries could not be found, logged with level "ERROR"
184
+ """
150
185
  attribute_type: AttributeType = schematic.attr_type
151
- value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
152
186
  if attribute_type is AttributeType.TIME_SERIES:
153
- return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
187
+ try:
188
+ return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
189
+ except TimeSeriesError as e:
190
+ raise log_error(InputConversionError(self._ERR_SERIES_MISSING.format(field.series_id))) from e
154
191
  if attribute_type is AttributeType.BLOCK:
155
192
  if schematic.is_list:
156
193
  return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
157
194
  return self._get_attributes(list(field.fields), schematic.nested_attributes)
195
+ value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
158
196
  if schematic.is_list:
159
197
  return list(value)
160
198
  return list(value)[0]
@@ -1,7 +1,9 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from typing import Iterable, Optional
4
+ from __future__ import annotations
5
+
6
+ from typing import Iterable
5
7
 
6
8
  import pandas as pd
7
9
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -12,9 +14,19 @@ from fameio.output.data_transformer import DataTransformer
12
14
 
13
15
 
14
16
  class OutputDAO:
15
- """Grants convenient access to content of Output protobuf messages for given DataStorages"""
17
+ """Grants convenient access to content of Output protobuf messages for given DataStorages."""
16
18
 
17
19
  def __init__(self, data_storages: list[DataStorage], agent_type_log: AgentTypeLog) -> None:
20
+ """
21
+ Initialise a new OutputDAO
22
+
23
+ Args:
24
+ data_storages: to grant access to by this DAO
25
+ agent_type_log: new types of agents that might come up in the associated data_storages
26
+
27
+ Raises:
28
+ AgentTypeError: if duplicate agent definitions occur, logged with level "ERROR"
29
+ """
18
30
  self._agent_type_log = agent_type_log
19
31
  outputs = self._extract_output_from_data_storages(data_storages)
20
32
  self._agent_type_log.update_agents(self._extract_new_agent_types(outputs))
@@ -22,25 +34,25 @@ class OutputDAO:
22
34
 
23
35
  @staticmethod
24
36
  def _extract_output_from_data_storages(data_storages: list[DataStorage]) -> list[Output]:
25
- """Returns list of Outputs extracted from given `data_storages`"""
37
+ """Returns list of Outputs extracted from given `data_storages`."""
26
38
  if data_storages is None:
27
39
  data_storages = []
28
40
  return [data_storage.output for data_storage in data_storages if data_storage.HasField("output")]
29
41
 
30
42
  @staticmethod
31
43
  def _extract_new_agent_types(outputs: list[Output]) -> dict[str, Output.AgentType]:
32
- """Returns dict of agent names mapped to its type defined in given `outputs`"""
44
+ """Returns dict of agent names mapped to its type defined in given `outputs`."""
33
45
  list_of_agent_type_lists = [output.agent_types for output in outputs if len(output.agent_types) > 0]
34
46
  list_of_agent_types = [item for sublist in list_of_agent_type_lists for item in sublist]
35
47
  return {item.class_name: item for item in list_of_agent_types}
36
48
 
37
49
  @staticmethod
38
50
  def _extract_series(outputs: list[Output]) -> dict[str, list[Output.Series]]:
39
- """Returns series data from associated `outputs` mapped to the className of its agent"""
51
+ """Returns series data from associated `outputs` mapped to the className of its agent."""
40
52
  list_of_series_lists = [output.series for output in outputs if len(output.series) > 0]
41
53
  list_of_series = [series for sublist in list_of_series_lists for series in sublist]
42
54
 
43
- series_per_class_name = {}
55
+ series_per_class_name: dict[str, list[Output.Series]] = {}
44
56
  for series in list_of_series:
45
57
  if series.class_name not in series_per_class_name:
46
58
  series_per_class_name[series.class_name] = []
@@ -48,7 +60,7 @@ class OutputDAO:
48
60
  return series_per_class_name
49
61
 
50
62
  def get_sorted_agents_to_extract(self) -> Iterable[str]:
51
- """Returns iterator of requested and available agent names in ascending order by count of series"""
63
+ """Returns iterator of requested and available agent names in ascending order by count of series."""
52
64
  all_series = self._get_agent_names_by_series_count_ascending()
53
65
  filtered_series = [agent_name for agent_name in all_series if self._agent_type_log.is_requested(agent_name)]
54
66
  return iter(filtered_series)
@@ -59,11 +71,21 @@ class OutputDAO:
59
71
  sorted_dict = sorted(length_per_agent_types.items(), key=lambda item: item[1])
60
72
  return [agent_name for agent_name, _ in sorted_dict]
61
73
 
62
- def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[Optional[str], pd.DataFrame]:
63
- """
64
- Returns DataFrame(s) containing all data of given `agent` - data is removed after the first call
74
+ def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[str | None, pd.DataFrame]:
75
+ """Returns DataFrame(s) containing all data of given `agent` - data is removed after the first call.
76
+
65
77
  Depending on the chosen ResolveOption the dict contains one DataFrame for the simple (and merged columns),
66
78
  or, in `SPLIT` mode, additional DataFrames mapped to each complex column's name.
79
+
80
+ Args:
81
+ agent_name: name of agent whose data are to be returned
82
+ data_transformer: to handle data transformation
83
+
84
+ Returns:
85
+ output data for requested agent: on data frame for all simple columns, any one for each complex column
86
+
87
+ Raises:
88
+ AgentTypeError: if type of agent was not yet registered, logged with level "ERROR"
67
89
  """
68
90
  agent_series = self._all_series.pop(agent_name) if agent_name in self._all_series else []
69
91
  agent_type = self._agent_type_log.get_agent_type(agent_name)
fameio/output/reader.py CHANGED
@@ -11,21 +11,24 @@ from fameprotobuf.data_storage_pb2 import DataStorage
11
11
  from google.protobuf.message import DecodeError
12
12
 
13
13
  import fameio
14
- from fameio.logs import log, log_critical_and_raise
14
+ from fameio.logs import log, log_critical, log_error
15
+ from fameio.output import OutputError
15
16
 
16
17
 
17
- class ProtobufReaderError(Exception):
18
- """Indicates an error while reading a protobuf file"""
18
+ class ProtobufReaderError(OutputError):
19
+ """Indicates an error while reading a protobuf file."""
19
20
 
20
21
 
21
22
  class Reader(ABC):
22
- """Abstract base class for protobuf file readers"""
23
+ """Abstract base class for protobuf file readers."""
23
24
 
24
- _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted." # noqa
25
+ _ERR_FILE_READ = "Could not read file content."
26
+ _ERR_HEADER_UNRECOGNISED = ""
25
27
  _ERR_FILE_CORRUPT_NEGATIVE_LENGTH = "Corrupt file, message length must be positive."
26
28
  _ERR_FILE_CORRUPT_MISSING_DATA = "Trying to read corrupt file caused by inconsistent message length."
27
29
  _ERR_UNSUPPORTED_MODE = "Ignoring memory saving mode: not supported for files created with `fame-core<1.4`."
28
30
  _ERR_PARSING_FAILED = "File Corrupt. Could not parse file content."
31
+ _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted."
29
32
  _DEBUG_FILE_END_REACHED = "Reached expected end of file."
30
33
 
31
34
  _HEADER_LENGTH = 30
@@ -44,7 +47,7 @@ class Reader(ABC):
44
47
  @staticmethod
45
48
  @final
46
49
  def _raise_error(error_message: str) -> NoReturn:
47
- log_critical_and_raise(ProtobufReaderError(error_message))
50
+ raise log_critical(ProtobufReaderError(error_message))
48
51
 
49
52
  def __init__(self, file: IO, read_single) -> None:
50
53
  self._file = file
@@ -52,12 +55,18 @@ class Reader(ABC):
52
55
 
53
56
  @abstractmethod
54
57
  def read(self) -> list[DataStorage]:
55
- """Reads associated filestream and returns one or multiple DataStorage(s) or empty list"""
58
+ """Reads associated filestream and returns one or multiple DataStorage(s) or empty list.
59
+
60
+ Returns:
61
+ one or multiple DataStorage protobuf object(s) read from file
62
+
63
+ Raises:
64
+ ProtobufReaderError: if file is corrupted in any way, logged with level "ERROR"
65
+ """
56
66
 
57
67
  @staticmethod
58
68
  def get_reader(file: IO, read_single: bool = False) -> Reader:
59
- """
60
- Returns reader matching the given file header
69
+ """Returns reader matching the given file header.
61
70
 
62
71
  Args:
63
72
  file: to be read by the returned Reader
@@ -65,17 +74,30 @@ class Reader(ABC):
65
74
 
66
75
  Returns:
67
76
  Reader that can read the specified file
77
+
78
+ Raises:
79
+ ProtobufReaderError: if file has an unsupported header,logged with level "CRITICAL"
68
80
  """
69
81
  log().debug("Reading file headers...")
70
82
  try:
71
- header = file.read(Reader._HEADER_LENGTH).decode(Reader.HEADER_ENCODING)
72
- return Reader._READER_HEADERS[header](file, read_single)
73
- except (KeyError, UnicodeDecodeError):
74
- return Reader._READER_HEADERS[None](file, read_single)
83
+ header_content = file.read(Reader._HEADER_LENGTH)
84
+ except ValueError as e:
85
+ raise log_critical(ProtobufReaderError(Reader._ERR_FILE_READ)) from e
86
+
87
+ try:
88
+ header = header_content.decode(Reader.HEADER_ENCODING)
89
+ except UnicodeDecodeError:
90
+ header = None
91
+ log().warning(Reader._WARN_NO_HEADER)
92
+
93
+ if header not in Reader._READER_HEADERS:
94
+ header = None
95
+
96
+ return Reader._READER_HEADERS[header](file, read_single)
75
97
 
76
98
  @final
77
99
  def _read_message_length(self) -> int:
78
- """Returns length of next DataStorage message in file"""
100
+ """Returns length of next DataStorage message in file."""
79
101
  message_length_byte = self._file.read(self.BYTES_DEFINING_MESSAGE_LENGTH)
80
102
  if not message_length_byte:
81
103
  log().debug(self._DEBUG_FILE_END_REACHED)
@@ -85,34 +107,55 @@ class Reader(ABC):
85
107
  return message_length_int
86
108
 
87
109
  @final
88
- def _read_data_storage_message(self, message_length: int = None) -> DataStorage:
89
- """
90
- Returns given `data_storage` read from current file position and following `message_length` bytes.
110
+ def _read_data_storage_message(self, message_length: int | None = None) -> DataStorage:
111
+ """Returns data storage read from current file position and following `message_length` bytes.
112
+
91
113
  If `message_length` is omitted, the rest of the file is read. If no message is found, None is returned.
114
+
115
+ Args:
116
+ message_length: amounts of bytes to read - must correspond to the next DataStorage message in file
117
+
118
+ Returns:
119
+ Read and de-serialised DataStorage
120
+
121
+ Raises:
122
+ ProtobufReaderError: if message_length is corrupt or file is corrupt, logged with level "ERROR"
92
123
  """
93
124
  if message_length is None:
94
125
  message = self._file.read()
95
126
  elif message_length > 0:
96
127
  message = self._file.read(message_length)
97
128
  else:
98
- raise IOError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH)
129
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH))
99
130
  if message_length and len(message) != message_length:
100
- log().error(self._ERR_FILE_CORRUPT_MISSING_DATA)
131
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_MISSING_DATA))
101
132
  return self._parse_to_data_storage(message) if message else None
102
133
 
103
134
  @staticmethod
104
135
  @final
105
136
  def _parse_to_data_storage(message: bytes) -> DataStorage:
137
+ """
138
+ De-serialises a binary message into a DataStorage protobuf object
139
+
140
+ Args:
141
+ message: to be convert
142
+
143
+ Returns:
144
+ DataStorage initialised from the given message
145
+
146
+ Raises:
147
+ ProtobufReaderError: if message could not be converted, logged with level "ERROR"
148
+ """
106
149
  data_storage = DataStorage()
107
150
  try:
108
151
  data_storage.ParseFromString(message)
109
152
  except DecodeError as e:
110
- raise IOError(Reader._ERR_PARSING_FAILED) from e
153
+ raise log_error(ProtobufReaderError(Reader._ERR_PARSING_FAILED)) from e
111
154
  return data_storage
112
155
 
113
156
 
114
157
  class ReaderV2(Reader):
115
- """Reader class for `fame-core>=2.0` output with header of version v002"""
158
+ """Reader class for `fame-core>=2.0` output with header of version v002."""
116
159
 
117
160
  def read(self) -> list[DataStorage]:
118
161
  messages = []
@@ -5,23 +5,31 @@ from pathlib import Path
5
5
 
6
6
  import yaml
7
7
 
8
- from fameio.logs import log
8
+ from fameio.logs import log, log_error
9
+ from fameio.output import OutputError
9
10
 
10
- ERR_WRITE_EXCEPTION = "Failed to save dictionary to YAML file `{}`"
11
- INFO_DESTINATION = "Saving scenario to file at {}"
11
+ _ERR_OPEN_FILE = "Could not open file for reading: '{}'"
12
+
13
+ _INFO_DESTINATION = "Saving scenario to file at {}"
14
+
15
+
16
+ class YamlWriterError(OutputError):
17
+ """An error occurred during writing a YAML file."""
12
18
 
13
19
 
14
20
  def data_to_yaml_file(data: dict, file_path: Path) -> None:
15
- """
16
- Save the given data to a YAML file at given path
21
+ """Save the given data to a YAML file at given path.
17
22
 
18
23
  Args:
19
24
  data: to be saved to yaml file
20
25
  file_path: at which the file will be created
26
+
27
+ Raises:
28
+ YamlWriterError: if file could not be opened or written, logged with level "ERROR"
21
29
  """
22
- log().info(INFO_DESTINATION.format(file_path))
30
+ log().info(_INFO_DESTINATION.format(file_path))
23
31
  try:
24
32
  with open(file_path, "w", encoding="utf-8") as f:
25
33
  yaml.dump(data, f, sort_keys=False, encoding="utf-8")
26
- except Exception as e:
27
- raise RuntimeError(ERR_WRITE_EXCEPTION.format(file_path)) from e
34
+ except OSError as e:
35
+ raise log_error(YamlWriterError(_ERR_OPEN_FILE.format(file_path))) from e
@@ -3,18 +3,36 @@ import sys
3
3
 
4
4
  from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
5
5
  from fameio.scripts.convert_results import run as convert_results
6
+ from fameio.scripts.exception import ScriptError
6
7
  from fameio.scripts.make_config import run as make_config
8
+ from fameio.scripts.reformat import run as reformat
7
9
  from fameio.cli.convert_results import handle_args as handle_convert_results_args
8
10
  from fameio.cli.make_config import handle_args as handle_make_config_args
11
+ from fameio.cli.reformat import handle_args as handle_reformat_args
9
12
 
10
13
 
11
14
  # noinspection PyPep8Naming
12
15
  def makeFameRunConfig():
13
- run_config = handle_make_config_args(sys.argv[1:])
14
- make_config(run_config)
16
+ cli_config = handle_make_config_args(sys.argv[1:])
17
+ try:
18
+ make_config(cli_config)
19
+ except ScriptError as e:
20
+ raise SystemExit(1) from e
15
21
 
16
22
 
17
23
  # noinspection PyPep8Naming
18
24
  def convertFameResults():
19
- run_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
20
- convert_results(run_config)
25
+ cli_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
26
+ try:
27
+ convert_results(cli_config)
28
+ except ScriptError as e:
29
+ raise SystemExit(1) from e
30
+
31
+
32
+ # noinspection PyPep8Naming
33
+ def reformatTimeSeries():
34
+ cli_config = handle_reformat_args(sys.argv[1:])
35
+ try:
36
+ reformat(cli_config)
37
+ except ScriptError as e:
38
+ raise SystemExit(1) from e