fameio 3.1.0__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. fameio/cli/__init__.py +2 -3
  2. fameio/cli/convert_results.py +6 -4
  3. fameio/cli/make_config.py +6 -4
  4. fameio/cli/options.py +3 -3
  5. fameio/cli/parser.py +43 -31
  6. fameio/input/__init__.py +1 -9
  7. fameio/input/loader/__init__.py +9 -7
  8. fameio/input/loader/controller.py +64 -14
  9. fameio/input/loader/loader.py +14 -7
  10. fameio/input/metadata.py +37 -18
  11. fameio/input/resolver.py +5 -4
  12. fameio/input/scenario/__init__.py +7 -8
  13. fameio/input/scenario/agent.py +52 -19
  14. fameio/input/scenario/attribute.py +28 -29
  15. fameio/input/scenario/contract.py +161 -52
  16. fameio/input/scenario/exception.py +45 -22
  17. fameio/input/scenario/fameiofactory.py +63 -7
  18. fameio/input/scenario/generalproperties.py +17 -6
  19. fameio/input/scenario/scenario.py +111 -28
  20. fameio/input/scenario/stringset.py +27 -8
  21. fameio/input/schema/__init__.py +5 -5
  22. fameio/input/schema/agenttype.py +29 -11
  23. fameio/input/schema/attribute.py +174 -84
  24. fameio/input/schema/java_packages.py +8 -5
  25. fameio/input/schema/schema.py +35 -9
  26. fameio/input/validator.py +58 -42
  27. fameio/input/writer.py +139 -41
  28. fameio/logs.py +23 -17
  29. fameio/output/__init__.py +5 -1
  30. fameio/output/agent_type.py +93 -27
  31. fameio/output/conversion.py +48 -30
  32. fameio/output/csv_writer.py +88 -18
  33. fameio/output/data_transformer.py +12 -21
  34. fameio/output/input_dao.py +68 -32
  35. fameio/output/output_dao.py +26 -4
  36. fameio/output/reader.py +61 -18
  37. fameio/output/yaml_writer.py +18 -9
  38. fameio/scripts/__init__.py +9 -2
  39. fameio/scripts/convert_results.py +144 -52
  40. fameio/scripts/convert_results.py.license +1 -1
  41. fameio/scripts/exception.py +7 -0
  42. fameio/scripts/make_config.py +34 -12
  43. fameio/scripts/make_config.py.license +1 -1
  44. fameio/series.py +132 -47
  45. fameio/time.py +88 -37
  46. fameio/tools.py +9 -8
  47. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/METADATA +19 -13
  48. fameio-3.2.0.dist-info/RECORD +56 -0
  49. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/WHEEL +1 -1
  50. CHANGELOG.md +0 -279
  51. fameio-3.1.0.dist-info/RECORD +0 -56
  52. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSE.txt +0 -0
  53. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  54. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  55. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  56. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,8 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  import ast
5
- from typing import Any, Optional
5
+ from typing import Any
6
6
 
7
7
  from fameprotobuf.data_storage_pb2 import DataStorage
8
8
  from fameprotobuf.field_pb2 import NestedField
@@ -10,21 +10,22 @@ from fameprotobuf.input_file_pb2 import InputData
10
10
 
11
11
  from fameio.input.scenario import GeneralProperties, Agent, Contract, Scenario
12
12
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType
13
- from fameio.logs import log
14
- from fameio.series import TimeSeriesManager
13
+ from fameio.logs import log_error
14
+ from fameio.output import OutputError
15
+ from fameio.series import TimeSeriesManager, TimeSeriesError
15
16
 
16
17
 
17
- class InputConversionError(Exception):
18
+ class InputConversionError(OutputError):
18
19
  """Indicates an error during reconstruction of input from its protobuf representation"""
19
20
 
20
- pass
21
-
22
21
 
23
22
  class InputDao:
24
23
  """Data access object for inputs saved in protobuf"""
25
24
 
26
25
  _ERR_NO_INPUTS = "No input data found on file."
27
26
  _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
27
+ _ERR_NO_SCHEMA = "No schema found on file - cannot recover inputs."
28
+ _ERR_SERIES_MISSING = "References time series '{}' was not registered on file."
28
29
 
29
30
  _FIELD_NAME_MAP: dict = {
30
31
  AttributeType.STRING: "string_values",
@@ -41,7 +42,6 @@ class InputDao:
41
42
  def __init__(self) -> None:
42
43
  self._inputs: list[InputData] = []
43
44
  self._timeseries_manager: TimeSeriesManager = TimeSeriesManager()
44
- self._schema: Optional[Schema] = None
45
45
 
46
46
  def store_inputs(self, data_storages: list[DataStorage]) -> None:
47
47
  """
@@ -60,16 +60,17 @@ class InputDao:
60
60
  recovered timeseries and scenario
61
61
 
62
62
  Raises:
63
- InputConversionException: if inputs could not be recovered
63
+ InputConversionError: if inputs could not be recovered, logged with level "ERROR"
64
+ InputError: if scenario in file is incompatible with this version of fameio, logged with level "ERROR"
64
65
  """
65
66
  input_data = self._get_input_data()
66
- self._schema = self._get_schema(input_data)
67
- scenario = Scenario(self._schema, self._get_general_properties(input_data))
67
+ schema = self._get_schema(input_data)
68
+ scenario = Scenario(schema, self._get_general_properties(input_data))
68
69
  for contract in self._get_contracts(input_data):
69
70
  scenario.add_contract(contract)
70
71
 
71
72
  self._init_timeseries(input_data)
72
- for agent in self._get_agents(input_data):
73
+ for agent in self._get_agents(input_data, schema):
73
74
  scenario.add_agent(agent)
74
75
 
75
76
  return self._timeseries_manager, scenario
@@ -79,14 +80,12 @@ class InputDao:
79
80
  Check that exactly one previously extracted input data exist, otherwise raises an exception
80
81
 
81
82
  Raises:
82
- InputConversionException: if no or more than one input is present
83
+ InputConversionException: if no or more than one input is present, logged with level "ERROR"
83
84
  """
84
85
  if not self._inputs:
85
- log().error(self._ERR_NO_INPUTS)
86
- raise InputConversionError(self._ERR_NO_INPUTS)
86
+ raise log_error(InputConversionError(self._ERR_NO_INPUTS))
87
87
  if len(self._inputs) > 1:
88
- log().error(self._ERR_MULTIPLE_INPUTS)
89
- raise InputConversionError(self._ERR_MULTIPLE_INPUTS)
88
+ raise log_error(InputConversionError(self._ERR_MULTIPLE_INPUTS))
90
89
  return self._inputs[0]
91
90
 
92
91
  @staticmethod
@@ -124,8 +123,21 @@ class InputDao:
124
123
  """Read timeseries from given `input_data` and initialise TimeSeriesManager"""
125
124
  self._timeseries_manager.reconstruct_time_series(list(input_data.time_series))
126
125
 
127
- def _get_agents(self, input_data: InputData) -> list[Agent]:
128
- """Read and return Agents from given `input_data`"""
126
+ def _get_agents(self, input_data: InputData, schema: Schema) -> list[Agent]:
127
+ """
128
+ Read and return Agents from given `input_data`
129
+
130
+ Args:
131
+ input_data: to read agents from
132
+ schema: corresponding to the agent definitions
133
+
134
+ Returns:
135
+ all extracted agents
136
+
137
+ Raises:
138
+ InputError: if agents cannot be reconstructed, logged with level "ERROR"
139
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
140
+ """
129
141
  agents = []
130
142
  for agent_dao in input_data.agents:
131
143
  agent = Agent(
@@ -134,32 +146,56 @@ class InputDao:
134
146
  metadata=ast.literal_eval(agent_dao.metadata) if agent_dao.metadata else None,
135
147
  )
136
148
  attribute_dict = self._get_attributes(
137
- list(agent_dao.fields), self._schema.agent_types[agent_dao.class_name].attributes
149
+ list(agent_dao.fields), schema.agent_types[agent_dao.class_name].attributes
138
150
  )
139
151
  agent.init_attributes_from_dict(attribute_dict)
140
152
  agents.append(agent)
141
153
  return agents
142
154
 
143
155
  def _get_attributes(self, fields: list[NestedField], schematics: dict[str, AttributeSpecs]) -> dict[str, Any]:
144
- """Read and return Attributes as Dictionary from given list of fields"""
156
+ """
157
+ Read and return all Attributes as Dictionary from given list of fields
158
+
159
+ Args:
160
+ fields: data fields representing attributes
161
+ schematics: description of the attributes associated by name
162
+
163
+ Returns:
164
+ all recovered attributes and their associated values
165
+
166
+ Raises:
167
+ InputConversionError: if attributes could not be reconstructed, logged with level "ERROR"
168
+ """
145
169
  attributes: dict[str, Any] = {}
146
170
  for field in fields:
147
171
  attributes[field.field_name] = self._get_field_value(field, schematics[field.field_name])
148
172
  return attributes
149
173
 
150
174
  def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
151
- """Extracts and returns value(s) of given `field`"""
175
+ """
176
+ Extracts and returns value(s) of given `field`
177
+
178
+ Args:
179
+ field: to extract the value(s) from
180
+ schematic: describing the data type of this field
181
+
182
+ Returns:
183
+ value(s) of provided field
184
+
185
+ Raises:
186
+ InputConversionError: if TimeSeries could not be found, logged with level "ERROR"
187
+ """
152
188
  attribute_type: AttributeType = schematic.attr_type
153
- value = field.__getattribute__(self._FIELD_NAME_MAP[attribute_type])
154
189
  if attribute_type is AttributeType.TIME_SERIES:
155
- return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
156
- elif attribute_type is AttributeType.BLOCK:
190
+ try:
191
+ return self._timeseries_manager.get_reconstructed_series_by_id(field.series_id)
192
+ except TimeSeriesError as e:
193
+ raise log_error(InputConversionError(self._ERR_SERIES_MISSING.format(field.series_id))) from e
194
+ if attribute_type is AttributeType.BLOCK:
157
195
  if schematic.is_list:
158
196
  return [self._get_attributes(list(entry.fields), schematic.nested_attributes) for entry in field.fields]
159
- else:
160
- return self._get_attributes(list(field.fields), schematic.nested_attributes)
161
- else:
162
- if schematic.is_list:
163
- return list(value)
164
- else:
165
- return list(value)[0]
197
+ return self._get_attributes(list(field.fields), schematic.nested_attributes)
198
+ value = getattr(field, self._FIELD_NAME_MAP[attribute_type])
199
+ if schematic.is_list:
200
+ return list(value)
201
+ return list(value)[0]
@@ -1,7 +1,9 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from typing import Iterable, Optional
4
+ from __future__ import annotations
5
+
6
+ from typing import Iterable
5
7
 
6
8
  import pandas as pd
7
9
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -15,6 +17,16 @@ class OutputDAO:
15
17
  """Grants convenient access to content of Output protobuf messages for given DataStorages"""
16
18
 
17
19
  def __init__(self, data_storages: list[DataStorage], agent_type_log: AgentTypeLog) -> None:
20
+ """
21
+ Initialise a new OutputDAO
22
+
23
+ Args:
24
+ data_storages: to grant access to by this DAO
25
+ agent_type_log: new types of agents that might come up in the associated data_storages
26
+
27
+ Raises:
28
+ AgentTypeError: if duplicate agent definitions occur, logged with level "ERROR"
29
+ """
18
30
  self._agent_type_log = agent_type_log
19
31
  outputs = self._extract_output_from_data_storages(data_storages)
20
32
  self._agent_type_log.update_agents(self._extract_new_agent_types(outputs))
@@ -40,7 +52,7 @@ class OutputDAO:
40
52
  list_of_series_lists = [output.series for output in outputs if len(output.series) > 0]
41
53
  list_of_series = [series for sublist in list_of_series_lists for series in sublist]
42
54
 
43
- series_per_class_name = {}
55
+ series_per_class_name: dict[str, list[Output.Series]] = {}
44
56
  for series in list_of_series:
45
57
  if series.class_name not in series_per_class_name:
46
58
  series_per_class_name[series.class_name] = []
@@ -59,11 +71,21 @@ class OutputDAO:
59
71
  sorted_dict = sorted(length_per_agent_types.items(), key=lambda item: item[1])
60
72
  return [agent_name for agent_name, _ in sorted_dict]
61
73
 
62
- def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[Optional[str], pd.DataFrame]:
74
+ def get_agent_data(self, agent_name: str, data_transformer: DataTransformer) -> dict[str | None, pd.DataFrame]:
63
75
  """
64
76
  Returns DataFrame(s) containing all data of given `agent` - data is removed after the first call
65
77
  Depending on the chosen ResolveOption the dict contains one DataFrame for the simple (and merged columns),
66
78
  or, in `SPLIT` mode, additional DataFrames mapped to each complex column's name.
79
+
80
+ Args:
81
+ agent_name: name of agent whose data are to be returned
82
+ data_transformer: to handle data transformation
83
+
84
+ Returns:
85
+ output data for requested agent: on data frame for all simple columns, any one for each complex column
86
+
87
+ Raises:
88
+ AgentTypeError: if type of agent was not yet registered, logged with level "ERROR"
67
89
  """
68
90
  agent_series = self._all_series.pop(agent_name) if agent_name in self._all_series else []
69
91
  agent_type = self._agent_type_log.get_agent_type(agent_name)
fameio/output/reader.py CHANGED
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from __future__ import annotations
@@ -11,23 +11,24 @@ from fameprotobuf.data_storage_pb2 import DataStorage
11
11
  from google.protobuf.message import DecodeError
12
12
 
13
13
  import fameio
14
- from fameio.logs import log, log_critical_and_raise
14
+ from fameio.logs import log, log_critical, log_error
15
+ from fameio.output import OutputError
15
16
 
16
17
 
17
- class ProtobufReaderError(Exception):
18
+ class ProtobufReaderError(OutputError):
18
19
  """Indicates an error while reading a protobuf file"""
19
20
 
20
- pass
21
-
22
21
 
23
22
  class Reader(ABC):
24
23
  """Abstract base class for protobuf file readers"""
25
24
 
26
- _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted." # noqa
25
+ _ERR_FILE_READ = "Could not read file content."
26
+ _ERR_HEADER_UNRECOGNISED = ""
27
27
  _ERR_FILE_CORRUPT_NEGATIVE_LENGTH = "Corrupt file, message length must be positive."
28
28
  _ERR_FILE_CORRUPT_MISSING_DATA = "Trying to read corrupt file caused by inconsistent message length."
29
29
  _ERR_UNSUPPORTED_MODE = "Ignoring memory saving mode: not supported for files created with `fame-core<1.4`."
30
30
  _ERR_PARSING_FAILED = "File Corrupt. Could not parse file content."
31
+ _WARN_NO_HEADER = "No header recognised in file. File might be deprecated or corrupted."
31
32
  _DEBUG_FILE_END_REACHED = "Reached expected end of file."
32
33
 
33
34
  _HEADER_LENGTH = 30
@@ -40,13 +41,13 @@ class Reader(ABC):
40
41
  _READER_HEADERS = {
41
42
  None: lambda file, mode: Reader._raise_error(Reader._ERR_DEPRECATED_V0),
42
43
  fameio.FILE_HEADER_V1: lambda file, mode: Reader._raise_error(Reader._ERR_DEPRECATED_V1),
43
- fameio.FILE_HEADER_V2: lambda file, mode: ReaderV2(file, mode),
44
+ fameio.FILE_HEADER_V2: lambda file, mode: ReaderV2(file, mode), # pylint: disable=unnecessary-lambda
44
45
  }
45
46
 
46
47
  @staticmethod
47
48
  @final
48
49
  def _raise_error(error_message: str) -> NoReturn:
49
- log_critical_and_raise(ProtobufReaderError(error_message))
50
+ raise log_critical(ProtobufReaderError(error_message))
50
51
 
51
52
  def __init__(self, file: IO, read_single) -> None:
52
53
  self._file = file
@@ -54,7 +55,15 @@ class Reader(ABC):
54
55
 
55
56
  @abstractmethod
56
57
  def read(self) -> list[DataStorage]:
57
- """Reads associated filestream and returns one or multiple DataStorage(s) or empty list"""
58
+ """
59
+ Reads associated filestream and returns one or multiple DataStorage(s) or empty list
60
+
61
+ Returns:
62
+ one or multiple DataStorage protobuf object(s) read from file
63
+
64
+ Raises:
65
+ ProtobufReaderError: if file is corrupted in any way, logged with level "ERROR"
66
+ """
58
67
 
59
68
  @staticmethod
60
69
  def get_reader(file: IO, read_single: bool = False) -> Reader:
@@ -67,13 +76,26 @@ class Reader(ABC):
67
76
 
68
77
  Returns:
69
78
  Reader that can read the specified file
79
+
80
+ Raises:
81
+ ProtobufReaderError: if file has an unsupported header,logged with level "CRITICAL"
70
82
  """
71
83
  log().debug("Reading file headers...")
72
84
  try:
73
- header = file.read(Reader._HEADER_LENGTH).decode(Reader.HEADER_ENCODING)
74
- return Reader._READER_HEADERS[header](file, read_single)
75
- except (KeyError, UnicodeDecodeError):
76
- return Reader._READER_HEADERS[None](file, read_single)
85
+ header_content = file.read(Reader._HEADER_LENGTH)
86
+ except ValueError as e:
87
+ raise log_critical(ProtobufReaderError(Reader._ERR_FILE_READ)) from e
88
+
89
+ try:
90
+ header = header_content.decode(Reader.HEADER_ENCODING)
91
+ except UnicodeDecodeError:
92
+ header = None
93
+ log().warning(Reader._WARN_NO_HEADER)
94
+
95
+ if header not in Reader._READER_HEADERS:
96
+ header = None
97
+
98
+ return Reader._READER_HEADERS[header](file, read_single)
77
99
 
78
100
  @final
79
101
  def _read_message_length(self) -> int:
@@ -87,29 +109,50 @@ class Reader(ABC):
87
109
  return message_length_int
88
110
 
89
111
  @final
90
- def _read_data_storage_message(self, message_length: int = None) -> DataStorage:
112
+ def _read_data_storage_message(self, message_length: int | None = None) -> DataStorage:
91
113
  """
92
114
  Returns given `data_storage` read from current file position and following `message_length` bytes.
93
115
  If `message_length` is omitted, the rest of the file is read. If no message is found, None is returned.
116
+
117
+ Args:
118
+ message_length: amounts of bytes to read - must correspond to the next DataStorage message in file
119
+
120
+ Returns:
121
+ Read and de-serialised DataStorage
122
+
123
+ Raises:
124
+ ProtobufReaderError: if message_length is corrupt or file is corrupt, logged with level "ERROR"
94
125
  """
95
126
  if message_length is None:
96
127
  message = self._file.read()
97
128
  elif message_length > 0:
98
129
  message = self._file.read(message_length)
99
130
  else:
100
- raise IOError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH)
131
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH))
101
132
  if message_length and len(message) != message_length:
102
- log().error(self._ERR_FILE_CORRUPT_MISSING_DATA)
133
+ raise log_error(ProtobufReaderError(self._ERR_FILE_CORRUPT_MISSING_DATA))
103
134
  return self._parse_to_data_storage(message) if message else None
104
135
 
105
136
  @staticmethod
106
137
  @final
107
138
  def _parse_to_data_storage(message: bytes) -> DataStorage:
139
+ """
140
+ De-serialises a binary message into a DataStorage protobuf object
141
+
142
+ Args:
143
+ message: to be convert
144
+
145
+ Returns:
146
+ DataStorage initialised from the given message
147
+
148
+ Raises:
149
+ ProtobufReaderError: if message could not be converted, logged with level "ERROR"
150
+ """
108
151
  data_storage = DataStorage()
109
152
  try:
110
153
  data_storage.ParseFromString(message)
111
- except DecodeError:
112
- raise IOError(Reader._ERR_PARSING_FAILED)
154
+ except DecodeError as e:
155
+ raise log_error(ProtobufReaderError(Reader._ERR_PARSING_FAILED)) from e
113
156
  return data_storage
114
157
 
115
158
 
@@ -1,14 +1,20 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from pathlib import Path
5
5
 
6
6
  import yaml
7
7
 
8
- from fameio.logs import log
8
+ from fameio.logs import log, log_error
9
+ from fameio.output import OutputError
9
10
 
10
- ERR_WRITE_EXCEPTION = "Failed to save dictionary to YAML file `{}`"
11
- INFO_DESTINATION = "Saving scenario to file at {}"
11
+ _ERR_OPEN_FILE = "Could not open file for reading: '{}'"
12
+
13
+ _INFO_DESTINATION = "Saving scenario to file at {}"
14
+
15
+
16
+ class YamlWriterError(OutputError):
17
+ """An error occurred during writing a YAML file"""
12
18
 
13
19
 
14
20
  def data_to_yaml_file(data: dict, file_path: Path) -> None:
@@ -18,10 +24,13 @@ def data_to_yaml_file(data: dict, file_path: Path) -> None:
18
24
  Args:
19
25
  data: to be saved to yaml file
20
26
  file_path: at which the file will be created
27
+
28
+ Raises:
29
+ YamlWriterError: if file could not be opened or written, logged with level "ERROR"
21
30
  """
22
- log().info(INFO_DESTINATION.format(file_path))
31
+ log().info(_INFO_DESTINATION.format(file_path))
23
32
  try:
24
- with open(file_path, "w") as f:
25
- yaml.dump(data, f, sort_keys=False)
26
- except Exception as e:
27
- raise RuntimeError(ERR_WRITE_EXCEPTION.format(file_path)) from e
33
+ with open(file_path, "w", encoding="utf-8") as f:
34
+ yaml.dump(data, f, sort_keys=False, encoding="utf-8")
35
+ except OSError as e:
36
+ raise log_error(YamlWriterError(_ERR_OPEN_FILE.format(file_path))) from e
@@ -3,6 +3,7 @@ import sys
3
3
 
4
4
  from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
5
5
  from fameio.scripts.convert_results import run as convert_results
6
+ from fameio.scripts.exception import ScriptError
6
7
  from fameio.scripts.make_config import run as make_config
7
8
  from fameio.cli.convert_results import handle_args as handle_convert_results_args
8
9
  from fameio.cli.make_config import handle_args as handle_make_config_args
@@ -11,10 +12,16 @@ from fameio.cli.make_config import handle_args as handle_make_config_args
11
12
  # noinspection PyPep8Naming
12
13
  def makeFameRunConfig():
13
14
  run_config = handle_make_config_args(sys.argv[1:])
14
- make_config(run_config)
15
+ try:
16
+ make_config(run_config)
17
+ except ScriptError as e:
18
+ raise SystemExit(1) from e
15
19
 
16
20
 
17
21
  # noinspection PyPep8Naming
18
22
  def convertFameResults():
19
23
  run_config = handle_convert_results_args(sys.argv[1:], DEFAULT_CONVERT_CONFIG)
20
- convert_results(run_config)
24
+ try:
25
+ convert_results(run_config)
26
+ except ScriptError as e:
27
+ raise SystemExit(1) from e