fameio 3.1.1__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. fameio/cli/convert_results.py +6 -4
  2. fameio/cli/make_config.py +6 -4
  3. fameio/cli/parser.py +41 -29
  4. fameio/input/__init__.py +1 -1
  5. fameio/input/loader/__init__.py +9 -7
  6. fameio/input/loader/controller.py +59 -8
  7. fameio/input/loader/loader.py +14 -7
  8. fameio/input/metadata.py +35 -13
  9. fameio/input/resolver.py +5 -4
  10. fameio/input/scenario/agent.py +50 -16
  11. fameio/input/scenario/attribute.py +14 -15
  12. fameio/input/scenario/contract.py +152 -43
  13. fameio/input/scenario/exception.py +44 -18
  14. fameio/input/scenario/fameiofactory.py +63 -7
  15. fameio/input/scenario/generalproperties.py +17 -6
  16. fameio/input/scenario/scenario.py +111 -28
  17. fameio/input/scenario/stringset.py +27 -8
  18. fameio/input/schema/agenttype.py +21 -2
  19. fameio/input/schema/attribute.py +91 -22
  20. fameio/input/schema/java_packages.py +8 -5
  21. fameio/input/schema/schema.py +35 -9
  22. fameio/input/validator.py +22 -15
  23. fameio/input/writer.py +136 -36
  24. fameio/logs.py +3 -31
  25. fameio/output/__init__.py +5 -1
  26. fameio/output/agent_type.py +86 -23
  27. fameio/output/conversion.py +47 -29
  28. fameio/output/csv_writer.py +88 -18
  29. fameio/output/data_transformer.py +7 -14
  30. fameio/output/input_dao.py +62 -21
  31. fameio/output/output_dao.py +26 -4
  32. fameio/output/reader.py +58 -13
  33. fameio/output/yaml_writer.py +15 -6
  34. fameio/scripts/__init__.py +9 -2
  35. fameio/scripts/convert_results.py +123 -50
  36. fameio/scripts/convert_results.py.license +1 -1
  37. fameio/scripts/exception.py +7 -0
  38. fameio/scripts/make_config.py +34 -12
  39. fameio/scripts/make_config.py.license +1 -1
  40. fameio/series.py +117 -33
  41. fameio/time.py +74 -17
  42. fameio/tools.py +7 -5
  43. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/METADATA +19 -13
  44. fameio-3.2.0.dist-info/RECORD +56 -0
  45. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/WHEEL +1 -1
  46. CHANGELOG.md +0 -288
  47. fameio-3.1.1.dist-info/RECORD +0 -56
  48. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSE.txt +0 -0
  49. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  50. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  51. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  52. {fameio-3.1.1.dist-info → fameio-3.2.0.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from __future__ import annotations
@@ -7,7 +7,7 @@ import ast
7
7
  from typing import Any, Final
8
8
 
9
9
  from fameio.input import SchemaError
10
- from fameio.logs import log_error_and_raise
10
+ from fameio.logs import log_error
11
11
  from fameio.tools import keys_to_lower
12
12
  from .agenttype import AgentType
13
13
  from .java_packages import JavaPackages
@@ -23,20 +23,31 @@ class Schema:
23
23
  _ERR_AGENT_TYPES_EMPTY = "`AgentTypes` must not be empty - at least one type of agent is required."
24
24
  _ERR_MISSING_PACKAGES = "Missing required section `JavaPackages` in Schema."
25
25
 
26
- def __init__(self, definitions: dict):
26
+ def __init__(self, definitions: dict) -> None:
27
27
  self._original_input_dict = definitions
28
- self._agent_types = {}
29
- self._packages = None
28
+ self._agent_types: dict[str, AgentType] = {}
29
+ self._packages: JavaPackages | None = None
30
30
 
31
31
  @classmethod
32
32
  def from_dict(cls, definitions: dict) -> Schema:
33
- """Load given dictionary `definitions` into a new Schema"""
33
+ """
34
+ Load given dictionary `definitions` into a new schema
35
+
36
+ Args:
37
+ definitions: dictionary representation of schema
38
+
39
+ Returns:
40
+ new Schema
41
+
42
+ Raises:
43
+ SchemaError: if definitions are incomplete or erroneous, logged on level "ERROR"
44
+ """
34
45
  definitions = keys_to_lower(definitions)
35
46
  schema = cls(definitions)
36
47
 
37
48
  agent_types = cls._get_or_raise(definitions, Schema.KEY_AGENT_TYPE, Schema._ERR_AGENT_TYPES_MISSING)
38
49
  if len(agent_types) == 0:
39
- log_error_and_raise(SchemaError(Schema._ERR_AGENT_TYPES_EMPTY))
50
+ raise log_error(SchemaError(Schema._ERR_AGENT_TYPES_EMPTY))
40
51
  for agent_type_name, agent_definition in agent_types.items():
41
52
  agent_type = AgentType.from_dict(agent_type_name, agent_definition)
42
53
  schema._agent_types[agent_type_name] = agent_type
@@ -48,9 +59,22 @@ class Schema:
48
59
 
49
60
  @staticmethod
50
61
  def _get_or_raise(definitions: dict[str, Any], key: str, error_message: str) -> Any:
51
- """Get given `key` from given `definitions` - raise error with given `error_message` if not present"""
62
+ """
63
+ Get given `key` from given `definitions` - raise error with given `error_message` if not present
64
+
65
+ Args:
66
+ definitions: to search the key in
67
+ key: to be searched
68
+ error_message: to be logged and included in the raised exception if key is missing
69
+
70
+ Returns:
71
+ value associated with given key in given definitions
72
+
73
+ Raises:
74
+ SchemaError: if given key is not in given definitions, logged on level "ERROR"
75
+ """
52
76
  if key not in definitions:
53
- log_error_and_raise(SchemaError(error_message))
77
+ raise log_error(SchemaError(error_message))
54
78
  return definitions[key]
55
79
 
56
80
  @classmethod
@@ -74,4 +98,6 @@ class Schema:
74
98
  @property
75
99
  def packages(self) -> JavaPackages:
76
100
  """Returns JavaPackages, i.e. names where model classes are defined in"""
101
+ if self._packages is None:
102
+ raise log_error(SchemaError(self._ERR_MISSING_PACKAGES))
77
103
  return self._packages
fameio/input/validator.py CHANGED
@@ -1,10 +1,13 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
5
+
4
6
  import math
5
7
  from collections import Counter
6
- from typing import Any, Union
8
+ from typing import Any
7
9
 
10
+ from fameio.input import InputError
8
11
  from fameio.input.resolver import PathResolver
9
12
  from fameio.input.scenario import Agent, Attribute, Contract, Scenario, StringSet
10
13
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType, AgentType
@@ -13,7 +16,7 @@ from fameio.series import TimeSeriesManager, TimeSeriesError
13
16
  from fameio.time import FameTime
14
17
 
15
18
 
16
- class ValidationError(Exception):
19
+ class ValidationError(InputError):
17
20
  """Indicates an error occurred during validation of any data with a connected schema"""
18
21
 
19
22
 
@@ -26,8 +29,8 @@ class SchemaValidator:
26
29
  _TYPE_NOT_IMPLEMENTED = "Validation not implemented for AttributeType '{}'."
27
30
  _INCOMPATIBLE = "Value '{}' incompatible with {} of Attribute '{}'."
28
31
  _DISALLOWED = "Value '{}' not in list of allowed values of Attribute '{}'"
29
- _AGENT_MISSING = "Contract mentions Agent with ID '{}' but Agent was not declared."
30
- _PRODUCT_MISSING = "Product '{}' not declared in Schema for AgentType '{}'."
32
+ _AGENT_MISSING = "Agent with ID '{}' was not declared in Scenario but used in Contract: '{}'"
33
+ _PRODUCT_MISSING = "'{}' is no product of AgentType '{}'. Contract invalid: '{}'"
31
34
  _KEY_MISSING = "Required key '{}' missing in dictionary '{}'."
32
35
  _ATTRIBUTE_MISSING = "Mandatory attribute '{}' is missing."
33
36
  _DEFAULT_IGNORED = "Optional Attribute: '{}': not specified - provided Default ignored for optional Attributes."
@@ -53,7 +56,7 @@ class SchemaValidator:
53
56
  Returns:
54
57
  a new TimeSeriesManager initialised with validated time series from scenario
55
58
  Raises:
56
- ValidationException: if an error in the scenario or in timeseries is spotted
59
+ ValidationError: if an error in the scenario or in timeseries is spotted
57
60
  """
58
61
  schema = scenario.schema
59
62
  agents = scenario.agents
@@ -113,7 +116,7 @@ class SchemaValidator:
113
116
  def _check_mandatory_or_default(
114
117
  attributes: dict[str, Attribute],
115
118
  specifications: dict[str, AttributeSpecs],
116
- ) -> list[Union[str, float]]:
119
+ ) -> list[str | float]:
117
120
  """
118
121
  Raises Exception if in given list of `specifications` at least one item is mandatory,
119
122
  provides no defaults and is not contained in given `attributes` dictionary
@@ -121,7 +124,7 @@ class SchemaValidator:
121
124
  Returns:
122
125
  list of time series defaults used in scenario
123
126
  """
124
- missing_series_defaults = []
127
+ missing_series_defaults: list[str | float] = []
125
128
  for name, specification in specifications.items():
126
129
  if name not in attributes:
127
130
  if specification.is_mandatory:
@@ -130,7 +133,7 @@ class SchemaValidator:
130
133
  ValidationError(SchemaValidator._ATTRIBUTE_MISSING.format(specification.full_name))
131
134
  )
132
135
  if specification.attr_type == AttributeType.TIME_SERIES:
133
- missing_series_defaults.append(specification.default_value)
136
+ missing_series_defaults.append(specification.default_value) # type: ignore[arg-type]
134
137
  else:
135
138
  if specification.has_default_value:
136
139
  log().warning(SchemaValidator._DEFAULT_IGNORED.format(specification.full_name))
@@ -237,7 +240,7 @@ class SchemaValidator:
237
240
  timeseries_manager: to be filled with timeseries
238
241
 
239
242
  Raises:
240
- ValidationException: if timeseries is not found, ill-formatted or invalid
243
+ ValidationError: if timeseries is not found, ill-formatted or invalid
241
244
  """
242
245
  scenario_attributes = agent.attributes
243
246
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
@@ -254,10 +257,10 @@ class SchemaValidator:
254
257
  attribute_type = specification.attr_type
255
258
  if attribute_type is AttributeType.TIME_SERIES:
256
259
  try:
257
- manager.register_and_validate(attribute.value)
260
+ manager.register_and_validate(attribute.value) # type: ignore[arg-type]
258
261
  except TimeSeriesError as e:
259
262
  message = SchemaValidator._TIME_SERIES_INVALID.format(specification.full_name)
260
- raise log_error(ValidationError(message, e)) from e
263
+ raise log_error(ValidationError(message)) from e
261
264
  if attribute.has_nested:
262
265
  SchemaValidator._ensure_valid_timeseries(attribute.nested, specification.nested_attributes, manager)
263
266
  if attribute.has_nested_list:
@@ -282,7 +285,7 @@ class SchemaValidator:
282
285
  """
283
286
  Recursively iterates through all attributes of an agent, applying tests if attribute type is `StringSet`
284
287
  Raises:
285
- ValidationException: if
288
+ ValidationError: if
286
289
  a) StringSet mentioned in schema is not defined in the scenario
287
290
  b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
288
291
  """
@@ -313,16 +316,20 @@ class SchemaValidator:
313
316
  """Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
314
317
  sender_id = contract.sender_id
315
318
  if sender_id not in agent_types_by_id:
316
- raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id)))
319
+ raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id, contract.to_dict())))
317
320
  if contract.receiver_id not in agent_types_by_id:
318
- raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id)))
321
+ raise log_error(
322
+ ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id, contract.to_dict()))
323
+ )
319
324
  sender_type_name = agent_types_by_id[sender_id]
320
325
  if sender_type_name not in schema.agent_types:
321
326
  raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(sender_type_name)))
322
327
  sender_type = schema.agent_types[sender_type_name]
323
328
  product = contract.product_name
324
329
  if product not in sender_type.products:
325
- raise log_error(ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name)))
330
+ raise log_error(
331
+ ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name, contract.to_dict()))
332
+ )
326
333
 
327
334
  @staticmethod
328
335
  def check_agents_have_contracts(scenario: Scenario) -> None:
fameio/input/writer.py CHANGED
@@ -1,10 +1,12 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from importlib import metadata
4
+ from __future__ import annotations
5
+
5
6
  import sys
7
+ from importlib import metadata
6
8
  from pathlib import Path
7
- from typing import Any, Union
9
+ from typing import Any
8
10
 
9
11
  from fameprotobuf.contract_pb2 import ProtoContract
10
12
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -12,19 +14,21 @@ from fameprotobuf.execution_data_pb2 import ExecutionData
12
14
  from fameprotobuf.field_pb2 import NestedField
13
15
  from fameprotobuf.input_file_pb2 import InputData
14
16
  from fameprotobuf.model_pb2 import ModelData
17
+ from google.protobuf.message import EncodeError
15
18
 
16
19
  import fameio
20
+ from fameio.input import InputError
17
21
  from fameio.input.scenario import Agent, Attribute, Contract, GeneralProperties, Scenario, StringSet
18
22
  from fameio.input.schema import AttributeSpecs, AttributeType, JavaPackages, Schema
19
- from fameio.logs import log_error_and_raise, log
23
+ from fameio.logs import log, log_error
20
24
  from fameio.output.reader import Reader
21
25
  from fameio.series import TimeSeriesManager
22
26
  from fameio.time import FameTime
23
27
  from fameio.tools import ensure_is_list
24
28
 
25
29
 
26
- class ProtoWriterError(Exception):
27
- """Indicates an error during writing of protobuf file"""
30
+ class ProtoWriterError(InputError):
31
+ """Indicates an error during writing of a protobuf file"""
28
32
 
29
33
 
30
34
  class ProtoWriter:
@@ -32,13 +36,15 @@ class ProtoWriter:
32
36
 
33
37
  _FAME_PROTOBUF_STREAM_HEADER = fameio.FILE_HEADER_V2
34
38
 
35
- _TYPE_NOT_IMPLEMENTED = "AttributeType '{}' not implemented."
39
+ _TYPE_NOT_IMPLEMENTED = "Protobuf representation for AttributeType '{}' not implemented."
36
40
  _CONTRACT_UNSUPPORTED = (
37
41
  "Unsupported value for Contract Attribute '{}'; "
38
42
  "Only support `int`, `float`, `enum` or `dict` types are supported here."
39
43
  )
40
44
  _USING_DEFAULT = "Using provided Default for Attribute: '{}'."
41
- _NO_FILE_SPECIFIED = "Could not write to '{}'. Please specify a valid output file."
45
+ _ERR_FILE_PATH = "Could not open file '{}' for writing. Please specify a valid output file."
46
+ _ERR_PROTOBUF_ENCODING = "Could not encode to protobuf. Please contact FAME-Io developers: fame@dlr.de"
47
+ _ERR_FILE_WRITE = "Could not write to file '{}'."
42
48
 
43
49
  _INFO_WRITING = "Writing scenario to protobuf file `{}`"
44
50
  _INFO_WRITING_COMPLETED = "Saved protobuf file `{}` to disk"
@@ -48,12 +54,32 @@ class ProtoWriter:
48
54
  self._time_series_manager: TimeSeriesManager = time_series_manager
49
55
 
50
56
  def write_validated_scenario(self, scenario: Scenario) -> None:
51
- """Writes given validated Scenario to file"""
52
- pb_data_storage = self._create_protobuf_from_scenario(scenario)
53
- self._write_protobuf_to_disk(pb_data_storage)
57
+ """
58
+ Writes given validated Scenario to file
59
+
60
+ Args:
61
+ scenario: to be written to file
62
+
63
+ Raises:
64
+ ProtoWriterError: if scenario could not be written to file, logged with level "ERROR"
65
+ """
66
+ data_storage = self._create_protobuf_from_scenario(scenario)
67
+ serialised = self._serialise(data_storage)
68
+ self._write_data_to_disk(serialised)
54
69
 
55
70
  def _create_protobuf_from_scenario(self, scenario: Scenario) -> DataStorage:
56
- """Returns given `scenario` written to new DataStorage protobuf"""
71
+ """
72
+ Returns given `scenario` written to new DataStorage protobuf
73
+
74
+ Args:
75
+ scenario: to be converted to protobuf
76
+
77
+ Returns:
78
+ protobuf container with the scenario
79
+
80
+ Raises:
81
+ ProtoWriterError: if the protobuf representation cannot be constructed, logged with level "ERROR"
82
+ """
57
83
  log().info("Converting scenario to protobuf.")
58
84
  pb_data_storage = DataStorage()
59
85
  pb_input = pb_data_storage.input
@@ -79,7 +105,17 @@ class ProtoWriter:
79
105
  pb_input.simulation.random_seed = general_properties.simulation_random_seed
80
106
 
81
107
  def _add_agents(self, pb_input: InputData, agents: list[Agent], schema: Schema) -> None:
82
- """Triggers setting of `agents` to `pb_input`"""
108
+ """
109
+ Triggers setting of `agents` to `pb_input`
110
+
111
+ Args:
112
+ pb_input: parent element to add the agents to
113
+ agents: to be added to parent input
114
+ schema: describing the agents' attributes
115
+
116
+ Raises:
117
+ ProtoWriterError: if any agent's attributes cannot be set, logged with level "ERROR"
118
+ """
83
119
  log().info("Adding Agents")
84
120
  for agent in agents:
85
121
  pb_agent = self._set_agent(pb_input.agents.add(), agent)
@@ -96,11 +132,21 @@ class ProtoWriter:
96
132
 
97
133
  def _set_attributes(
98
134
  self,
99
- pb_parent: Union[InputData.AgentDao, NestedField],
135
+ pb_parent: InputData.AgentDao | NestedField,
100
136
  attributes: dict[str, Attribute],
101
137
  specs: dict[str, AttributeSpecs],
102
138
  ) -> None:
103
- """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes"""
139
+ """
140
+ Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes
141
+
142
+ Args:
143
+ pb_parent: to store the attributes in
144
+ attributes: to be stored
145
+ specs: attribute specifications associated with attributes
146
+
147
+ Raises:
148
+ ProtoWriterError: if any attribute cannot be set, logged with level "ERROR"
149
+ """
104
150
  values_not_set = list(specs.keys())
105
151
  for name, attribute in attributes.items():
106
152
  pb_field = self._add_field(pb_parent, name)
@@ -124,14 +170,24 @@ class ProtoWriter:
124
170
  log().info(self._USING_DEFAULT.format(name))
125
171
 
126
172
  @staticmethod
127
- def _add_field(pb_parent: Union[InputData.AgentDao, NestedField], name: str) -> NestedField:
173
+ def _add_field(pb_parent: InputData.AgentDao | NestedField, name: str) -> NestedField:
128
174
  """Returns new field with given `name` that is added to given `pb_parent`"""
129
175
  pb_field = pb_parent.fields.add()
130
176
  pb_field.field_name = name
131
177
  return pb_field
132
178
 
133
179
  def _set_attribute(self, pb_field: NestedField, value: Any, attribute_type: AttributeType) -> None:
134
- """Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`"""
180
+ """
181
+ Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`
182
+
183
+ Args:
184
+ pb_field: parent element to contain the attribute value therein
185
+ value: of the attribute
186
+ attribute_type: type of the attribute
187
+
188
+ Raises:
189
+ ProtoWriterError: if the attribute type has no serialisation implementation, logged with level "ERROR"
190
+ """
135
191
  if attribute_type is AttributeType.INTEGER:
136
192
  pb_field.int_values.extend(ensure_is_list(value))
137
193
  elif attribute_type is AttributeType.DOUBLE:
@@ -145,11 +201,20 @@ class ProtoWriter:
145
201
  elif attribute_type is AttributeType.TIME_SERIES:
146
202
  pb_field.series_id = self._time_series_manager.get_series_id_by_identifier(value)
147
203
  else:
148
- log_error_and_raise(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
204
+ raise log_error(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
149
205
 
150
206
  @staticmethod
151
207
  def _add_contracts(pb_input: InputData, contracts: list[Contract]) -> None:
152
- """Triggers setting of `contracts` to `pb_input`"""
208
+ """
209
+ Adds given contracts to input data
210
+
211
+ Args:
212
+ pb_input: parent element to have the contracts added to
213
+ contracts: to be added
214
+
215
+ Raises:
216
+ ProtoWriterError: if any contract cannot be added, logged with level "ERROR"
217
+ """
153
218
  log().info("Adding Contracts")
154
219
  for contract in contracts:
155
220
  pb_contract = ProtoWriter._set_contract(pb_input.contracts.add(), contract)
@@ -169,10 +234,17 @@ class ProtoWriter:
169
234
  return pb_contract
170
235
 
171
236
  @staticmethod
172
- def _set_contract_attributes(
173
- pb_parent: Union[ProtoContract, NestedField], attributes: dict[str, Attribute]
174
- ) -> None:
175
- """Assign (nested) Attributes to given protobuf container `pb_parent`"""
237
+ def _set_contract_attributes(pb_parent: ProtoContract | NestedField, attributes: dict[str, Attribute]) -> None:
238
+ """
239
+ Assign (nested) Attributes to given protobuf container `pb_parent`
240
+
241
+ Args:
242
+ pb_parent: parent element, either a contract or an attribute
243
+ attributes: to be set as child elements of parent
244
+
245
+ Raises:
246
+ ProtoWriterError: if a type unsupported for contract attributes is found, logged with level "ERROR"
247
+ """
176
248
  for name, attribute in attributes.items():
177
249
  log().debug(f"Assigning contract attribute `{name}`.")
178
250
  pb_field = ProtoWriter._add_field(pb_parent, name)
@@ -186,7 +258,7 @@ class ProtoWriter:
186
258
  elif isinstance(value, str):
187
259
  pb_field.string_values.extend([value])
188
260
  else:
189
- log_error_and_raise(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
261
+ raise log_error(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
190
262
  elif attribute.has_nested:
191
263
  ProtoWriter._set_contract_attributes(pb_field, attribute.nested)
192
264
 
@@ -228,22 +300,50 @@ class ProtoWriter:
228
300
  pb_packages.data_items.extend(java_packages.data_items)
229
301
  pb_packages.portables.extend(java_packages.portables)
230
302
 
231
- def _write_protobuf_to_disk(self, pb_data_storage: DataStorage) -> None:
232
- """Writes given `protobuf_input_data` to disk"""
233
- log().info(self._INFO_WRITING.format(self.file_path))
234
- try:
235
- with open(self.file_path, "wb") as file:
236
- serialised_data_storage = pb_data_storage.SerializeToString()
237
- file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
238
- file.write(len(serialised_data_storage).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
239
- file.write(serialised_data_storage)
240
- except OSError as e:
241
- log_error_and_raise(ProtoWriterError(ProtoWriter._NO_FILE_SPECIFIED.format(self.file_path), e))
242
- log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
243
-
244
303
  @staticmethod
245
304
  def _set_execution_versions(pb_version_data: ExecutionData.VersionData) -> None:
246
305
  """Adds version strings for fameio, fameprotobuf, and python to the given Versions message"""
247
306
  pb_version_data.fame_protobuf = metadata.version("fameprotobuf")
248
307
  pb_version_data.fame_io = metadata.version("fameio")
249
308
  pb_version_data.python = sys.version
309
+
310
+ def _serialise(self, data_storage: DataStorage) -> bytes:
311
+ """
312
+ Serialise given data storage to bytes
313
+
314
+ Args:
315
+ data_storage: to be serialised
316
+
317
+ Returns:
318
+ binary string representation of given data storage
319
+
320
+ Raises:
321
+ ProtoWriterError: if given data storage could not be serialised, logged with level "ERROR"
322
+ """
323
+ try:
324
+ return data_storage.SerializeToString()
325
+ except EncodeError as e:
326
+ raise log_error(ProtoWriterError(self._ERR_PROTOBUF_ENCODING)) from e
327
+
328
+ def _write_data_to_disk(self, serialised_data: bytes) -> None:
329
+ """
330
+ Writes given serialised data to file
331
+
332
+ Args:
333
+ serialised_data: to be written to file
334
+
335
+ Raises:
336
+ ProtoWriterError: if file could not be opened or written, logged with level "ERROR"
337
+ """
338
+ log().info(self._INFO_WRITING.format(self.file_path))
339
+ try:
340
+ with open(self.file_path, "wb") as file:
341
+ try:
342
+ file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
343
+ file.write(len(serialised_data).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
344
+ file.write(serialised_data)
345
+ except IOError as e:
346
+ raise log_error(ProtoWriterError(self._ERR_FILE_WRITE.format(self.file_path))) from e
347
+ except OSError as e:
348
+ raise log_error(ProtoWriterError(ProtoWriter._ERR_FILE_PATH.format(self.file_path), e)) from e
349
+ log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
fameio/logs.py CHANGED
@@ -1,11 +1,11 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
6
  import logging as pylog
6
7
  from enum import Enum
7
8
  from pathlib import Path
8
- from typing import Optional
9
9
 
10
10
 
11
11
  class LogLevel(Enum):
@@ -42,20 +42,6 @@ def log() -> pylog.Logger:
42
42
  return _loggers[0]
43
43
 
44
44
 
45
- def log_critical_and_raise(exception: Exception) -> None:
46
- """
47
- Raises the specified `exception` and logs a critical error with the exception's message
48
-
49
- Args:
50
- exception: to be raised and logged at level `critical`
51
-
52
- Raises:
53
- Exception: the given exception
54
- """
55
- log().critical(str(exception))
56
- raise exception
57
-
58
-
59
45
  def log_critical(exception: Exception) -> Exception:
60
46
  """
61
47
  Logs a critical error with the exception's message and returns the exception for raising it.
@@ -73,20 +59,6 @@ def log_critical(exception: Exception) -> Exception:
73
59
  return exception
74
60
 
75
61
 
76
- def log_error_and_raise(exception: Exception) -> None:
77
- """
78
- Raises the specified `exception` and logs a critical error with the exception's message
79
-
80
- Args:
81
- exception: to be raised and logged at level `error`
82
-
83
- Raises:
84
- Exception: the given exception
85
- """
86
- log().error(str(exception))
87
- raise exception
88
-
89
-
90
62
  def log_error(exception: Exception) -> Exception:
91
63
  """
92
64
  Logs an error with the exception's message and returns the exception for raising.
@@ -98,13 +70,13 @@ def log_error(exception: Exception) -> Exception:
98
70
  exception: to extract the error message from
99
71
 
100
72
  Returns:
101
- the given exception
73
+ the given exception
102
74
  """
103
75
  log().error(str(exception))
104
76
  return exception
105
77
 
106
78
 
107
- def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
79
+ def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
108
80
  """
109
81
  Ensures a logger for fameio is present and uses the specified options
110
82
 
fameio/output/__init__.py CHANGED
@@ -1,3 +1,7 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: CC0-1.0
4
+
5
+
6
+ class OutputError(Exception):
7
+ """An error that occurred during extracting an output file"""