fameio 3.1.0__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. fameio/cli/__init__.py +2 -3
  2. fameio/cli/convert_results.py +6 -4
  3. fameio/cli/make_config.py +6 -4
  4. fameio/cli/options.py +3 -3
  5. fameio/cli/parser.py +43 -31
  6. fameio/input/__init__.py +1 -9
  7. fameio/input/loader/__init__.py +9 -7
  8. fameio/input/loader/controller.py +64 -14
  9. fameio/input/loader/loader.py +14 -7
  10. fameio/input/metadata.py +37 -18
  11. fameio/input/resolver.py +5 -4
  12. fameio/input/scenario/__init__.py +7 -8
  13. fameio/input/scenario/agent.py +52 -19
  14. fameio/input/scenario/attribute.py +28 -29
  15. fameio/input/scenario/contract.py +161 -52
  16. fameio/input/scenario/exception.py +45 -22
  17. fameio/input/scenario/fameiofactory.py +63 -7
  18. fameio/input/scenario/generalproperties.py +17 -6
  19. fameio/input/scenario/scenario.py +111 -28
  20. fameio/input/scenario/stringset.py +27 -8
  21. fameio/input/schema/__init__.py +5 -5
  22. fameio/input/schema/agenttype.py +29 -11
  23. fameio/input/schema/attribute.py +174 -84
  24. fameio/input/schema/java_packages.py +8 -5
  25. fameio/input/schema/schema.py +35 -9
  26. fameio/input/validator.py +58 -42
  27. fameio/input/writer.py +139 -41
  28. fameio/logs.py +23 -17
  29. fameio/output/__init__.py +5 -1
  30. fameio/output/agent_type.py +93 -27
  31. fameio/output/conversion.py +48 -30
  32. fameio/output/csv_writer.py +88 -18
  33. fameio/output/data_transformer.py +12 -21
  34. fameio/output/input_dao.py +68 -32
  35. fameio/output/output_dao.py +26 -4
  36. fameio/output/reader.py +61 -18
  37. fameio/output/yaml_writer.py +18 -9
  38. fameio/scripts/__init__.py +9 -2
  39. fameio/scripts/convert_results.py +144 -52
  40. fameio/scripts/convert_results.py.license +1 -1
  41. fameio/scripts/exception.py +7 -0
  42. fameio/scripts/make_config.py +34 -12
  43. fameio/scripts/make_config.py.license +1 -1
  44. fameio/series.py +132 -47
  45. fameio/time.py +88 -37
  46. fameio/tools.py +9 -8
  47. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/METADATA +19 -13
  48. fameio-3.2.0.dist-info/RECORD +56 -0
  49. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/WHEEL +1 -1
  50. CHANGELOG.md +0 -279
  51. fameio-3.1.0.dist-info/RECORD +0 -56
  52. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSE.txt +0 -0
  53. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  54. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  55. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  56. {fameio-3.1.0.dist-info → fameio-3.2.0.dist-info}/entry_points.txt +0 -0
fameio/input/validator.py CHANGED
@@ -1,24 +1,24 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
6
  import math
6
7
  from collections import Counter
7
8
  from typing import Any
8
9
 
10
+ from fameio.input import InputError
9
11
  from fameio.input.resolver import PathResolver
10
12
  from fameio.input.scenario import Agent, Attribute, Contract, Scenario, StringSet
11
13
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType, AgentType
12
- from fameio.logs import log_error_and_raise, log
14
+ from fameio.logs import log, log_error
13
15
  from fameio.series import TimeSeriesManager, TimeSeriesError
14
16
  from fameio.time import FameTime
15
17
 
16
18
 
17
- class ValidationError(Exception):
19
+ class ValidationError(InputError):
18
20
  """Indicates an error occurred during validation of any data with a connected schema"""
19
21
 
20
- pass
21
-
22
22
 
23
23
  class SchemaValidator:
24
24
  """Handles validation of scenarios based on a connected `schema`"""
@@ -29,8 +29,8 @@ class SchemaValidator:
29
29
  _TYPE_NOT_IMPLEMENTED = "Validation not implemented for AttributeType '{}'."
30
30
  _INCOMPATIBLE = "Value '{}' incompatible with {} of Attribute '{}'."
31
31
  _DISALLOWED = "Value '{}' not in list of allowed values of Attribute '{}'"
32
- _AGENT_MISSING = "Contract mentions Agent with ID '{}' but Agent was not declared."
33
- _PRODUCT_MISSING = "Product '{}' not declared in Schema for AgentType '{}'."
32
+ _AGENT_MISSING = "Agent with ID '{}' was not declared in Scenario but used in Contract: '{}'"
33
+ _PRODUCT_MISSING = "'{}' is no product of AgentType '{}'. Contract invalid: '{}'"
34
34
  _KEY_MISSING = "Required key '{}' missing in dictionary '{}'."
35
35
  _ATTRIBUTE_MISSING = "Mandatory attribute '{}' is missing."
36
36
  _DEFAULT_IGNORED = "Optional Attribute: '{}': not specified - provided Default ignored for optional Attributes."
@@ -56,7 +56,7 @@ class SchemaValidator:
56
56
  Returns:
57
57
  a new TimeSeriesManager initialised with validated time series from scenario
58
58
  Raises:
59
- ValidationException: if an error in the scenario or in timeseries is spotted
59
+ ValidationError: if an error in the scenario or in timeseries is spotted
60
60
  """
61
61
  schema = scenario.schema
62
62
  agents = scenario.agents
@@ -79,27 +79,29 @@ class SchemaValidator:
79
79
  list_of_ids = [agent.id for agent in agents]
80
80
  non_unique_ids = [agent_id for agent_id, count in Counter(list_of_ids).items() if count > 1]
81
81
  if non_unique_ids:
82
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
82
+ raise log_error(ValidationError(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
83
83
 
84
84
  @staticmethod
85
85
  def ensure_agent_and_timeseries_are_valid(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager):
86
86
  """Validates given `agent` against `schema` plus loads and validates its timeseries"""
87
87
  SchemaValidator.ensure_agent_type_in_schema(agent, schema)
88
- SchemaValidator.ensure_is_valid_agent(agent, schema)
88
+ SchemaValidator.ensure_is_valid_agent(agent, schema, timeseries_manager)
89
89
  SchemaValidator.load_and_validate_timeseries(agent, schema, timeseries_manager)
90
90
 
91
91
  @staticmethod
92
92
  def ensure_agent_type_in_schema(agent: Agent, schema: Schema) -> None:
93
93
  """Raises exception if type for given `agent` is not specified in given `schema`"""
94
94
  if agent.type_name not in schema.agent_types:
95
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
95
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
96
96
 
97
97
  @staticmethod
98
- def ensure_is_valid_agent(agent: Agent, schema: Schema) -> None:
98
+ def ensure_is_valid_agent(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
99
99
  """Raises an exception if given `agent` does not meet the specified `schema` requirements"""
100
100
  scenario_attributes = agent.attributes
101
101
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
102
- SchemaValidator._ensure_mandatory_present(scenario_attributes, schema_attributes)
102
+ missing_default_series = SchemaValidator._check_mandatory_or_default(scenario_attributes, schema_attributes)
103
+ for missing_series in missing_default_series:
104
+ timeseries_manager.register_and_validate(missing_series)
103
105
  SchemaValidator._ensure_attributes_exist(scenario_attributes, schema_attributes)
104
106
  SchemaValidator._ensure_value_and_type_match(scenario_attributes, schema_attributes)
105
107
 
@@ -108,22 +110,30 @@ class SchemaValidator:
108
110
  """Returns agent specified by `name` or raises Exception if this agent is not present in given `schema`"""
109
111
  if name in schema.agent_types:
110
112
  return schema.agent_types[name]
111
- else:
112
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
113
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
113
114
 
114
115
  @staticmethod
115
- def _ensure_mandatory_present(attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]) -> None:
116
+ def _check_mandatory_or_default(
117
+ attributes: dict[str, Attribute],
118
+ specifications: dict[str, AttributeSpecs],
119
+ ) -> list[str | float]:
116
120
  """
117
121
  Raises Exception if in given list of `specifications` at least one item is mandatory,
118
122
  provides no defaults and is not contained in given `attributes` dictionary
123
+
124
+ Returns:
125
+ list of time series defaults used in scenario
119
126
  """
127
+ missing_series_defaults: list[str | float] = []
120
128
  for name, specification in specifications.items():
121
129
  if name not in attributes:
122
130
  if specification.is_mandatory:
123
131
  if not specification.has_default_value:
124
- log_error_and_raise(
132
+ raise log_error(
125
133
  ValidationError(SchemaValidator._ATTRIBUTE_MISSING.format(specification.full_name))
126
134
  )
135
+ if specification.attr_type == AttributeType.TIME_SERIES:
136
+ missing_series_defaults.append(specification.default_value) # type: ignore[arg-type]
127
137
  else:
128
138
  if specification.has_default_value:
129
139
  log().warning(SchemaValidator._DEFAULT_IGNORED.format(specification.full_name))
@@ -133,16 +143,21 @@ class SchemaValidator:
133
143
  attribute = attributes[name]
134
144
  if specification.is_list:
135
145
  for entry in attribute.nested_list:
136
- SchemaValidator._ensure_mandatory_present(entry, specification.nested_attributes)
146
+ missing_series_defaults.extend(
147
+ SchemaValidator._check_mandatory_or_default(entry, specification.nested_attributes)
148
+ )
137
149
  else:
138
- SchemaValidator._ensure_mandatory_present(attribute.nested, specification.nested_attributes)
150
+ missing_series_defaults.extend(
151
+ SchemaValidator._check_mandatory_or_default(attribute.nested, specification.nested_attributes)
152
+ )
153
+ return missing_series_defaults
139
154
 
140
155
  @staticmethod
141
156
  def _ensure_attributes_exist(attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]) -> None:
142
157
  """Raises exception any entry of given `attributes` has no corresponding type `specification`"""
143
158
  for name, attribute in attributes.items():
144
159
  if name not in specifications:
145
- log_error_and_raise(ValidationError(SchemaValidator._ATTRIBUTE_UNKNOWN.format(attribute)))
160
+ raise log_error(ValidationError(SchemaValidator._ATTRIBUTE_UNKNOWN.format(attribute)))
146
161
  if attribute.has_nested:
147
162
  specification = specifications[name]
148
163
  SchemaValidator._ensure_attributes_exist(attribute.nested, specification.nested_attributes)
@@ -163,10 +178,10 @@ class SchemaValidator:
163
178
  type_spec = specification.attr_type
164
179
  if not SchemaValidator._is_compatible(specification, value):
165
180
  message = SchemaValidator._INCOMPATIBLE.format(value, type_spec, specification.full_name)
166
- log_error_and_raise(ValidationError(message))
181
+ raise log_error(ValidationError(message))
167
182
  if not SchemaValidator._is_allowed_value(specification, value):
168
183
  message = SchemaValidator._DISALLOWED.format(value, specification.full_name)
169
- log_error_and_raise(ValidationError(message))
184
+ raise log_error(ValidationError(message))
170
185
  if attribute.has_nested:
171
186
  SchemaValidator._ensure_value_and_type_match(attribute.nested, specification.nested_attributes)
172
187
  if attribute.has_nested_list:
@@ -186,8 +201,7 @@ class SchemaValidator:
186
201
  if not SchemaValidator._is_compatible_value(attribute_type, value):
187
202
  return False
188
203
  return True
189
- else:
190
- return (not is_list) and SchemaValidator._is_compatible_value(attribute_type, value_or_values)
204
+ return (not is_list) and SchemaValidator._is_compatible_value(attribute_type, value_or_values)
191
205
 
192
206
  @staticmethod
193
207
  def _is_compatible_value(attribute_type: AttributeType, value) -> bool:
@@ -198,24 +212,22 @@ class SchemaValidator:
198
212
  return False
199
213
  if attribute_type is AttributeType.LONG:
200
214
  return isinstance(value, int)
201
- elif attribute_type is AttributeType.DOUBLE:
215
+ if attribute_type is AttributeType.DOUBLE:
202
216
  return isinstance(value, (int, float)) and not math.isnan(value)
203
- elif attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
217
+ if attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
204
218
  return isinstance(value, str)
205
- elif attribute_type is AttributeType.TIME_STAMP:
219
+ if attribute_type is AttributeType.TIME_STAMP:
206
220
  return FameTime.is_fame_time_compatible(value)
207
- elif attribute_type is AttributeType.TIME_SERIES:
221
+ if attribute_type is AttributeType.TIME_SERIES:
208
222
  return isinstance(value, (str, int)) or (isinstance(value, float) and not math.isnan(value))
209
- else:
210
- log_error_and_raise(ValidationError(SchemaValidator._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
223
+ raise log_error(ValidationError(SchemaValidator._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
211
224
 
212
225
  @staticmethod
213
226
  def _is_allowed_value(attribute: AttributeSpecs, value) -> bool:
214
227
  """Returns True if `value` matches an entry of given `Attribute`'s value list or if this list is empty"""
215
228
  if not attribute.values:
216
229
  return True
217
- else:
218
- return value in attribute.values
230
+ return value in attribute.values
219
231
 
220
232
  @staticmethod
221
233
  def load_and_validate_timeseries(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
@@ -228,7 +240,7 @@ class SchemaValidator:
228
240
  timeseries_manager: to be filled with timeseries
229
241
 
230
242
  Raises:
231
- ValidationException: if timeseries is not found, ill-formatted or invalid
243
+ ValidationError: if timeseries is not found, ill-formatted or invalid
232
244
  """
233
245
  scenario_attributes = agent.attributes
234
246
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
@@ -245,10 +257,10 @@ class SchemaValidator:
245
257
  attribute_type = specification.attr_type
246
258
  if attribute_type is AttributeType.TIME_SERIES:
247
259
  try:
248
- manager.register_and_validate(attribute.value)
260
+ manager.register_and_validate(attribute.value) # type: ignore[arg-type]
249
261
  except TimeSeriesError as e:
250
262
  message = SchemaValidator._TIME_SERIES_INVALID.format(specification.full_name)
251
- log_error_and_raise(ValidationError(message, e))
263
+ raise log_error(ValidationError(message)) from e
252
264
  if attribute.has_nested:
253
265
  SchemaValidator._ensure_valid_timeseries(attribute.nested, specification.nested_attributes, manager)
254
266
  if attribute.has_nested_list:
@@ -273,7 +285,7 @@ class SchemaValidator:
273
285
  """
274
286
  Recursively iterates through all attributes of an agent, applying tests if attribute type is `StringSet`
275
287
  Raises:
276
- ValidationException: if
288
+ ValidationError: if
277
289
  a) StringSet mentioned in schema is not defined in the scenario
278
290
  b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
279
291
  """
@@ -287,10 +299,10 @@ class SchemaValidator:
287
299
  msg = SchemaValidator._MISSING_STRING_SET_ENTRY.format(
288
300
  attribute.value, str(attribute), name
289
301
  )
290
- log_error_and_raise(ValidationError(msg))
302
+ raise log_error(ValidationError(msg))
291
303
  else:
292
304
  msg = SchemaValidator._MISSING_STRING_SET.format(specification.full_name)
293
- log_error_and_raise(ValidationError(msg))
305
+ raise log_error(ValidationError(msg))
294
306
  if attribute.has_nested:
295
307
  SchemaValidator._ensure_string_set_consistency(
296
308
  attribute.nested, specification.nested_attributes, string_sets
@@ -304,16 +316,20 @@ class SchemaValidator:
304
316
  """Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
305
317
  sender_id = contract.sender_id
306
318
  if sender_id not in agent_types_by_id:
307
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id)))
319
+ raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id, contract.to_dict())))
308
320
  if contract.receiver_id not in agent_types_by_id:
309
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id)))
321
+ raise log_error(
322
+ ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id, contract.to_dict()))
323
+ )
310
324
  sender_type_name = agent_types_by_id[sender_id]
311
325
  if sender_type_name not in schema.agent_types:
312
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(sender_type_name)))
326
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(sender_type_name)))
313
327
  sender_type = schema.agent_types[sender_type_name]
314
328
  product = contract.product_name
315
329
  if product not in sender_type.products:
316
- log_error_and_raise(ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name)))
330
+ raise log_error(
331
+ ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name, contract.to_dict()))
332
+ )
317
333
 
318
334
  @staticmethod
319
335
  def check_agents_have_contracts(scenario: Scenario) -> None:
fameio/input/writer.py CHANGED
@@ -1,10 +1,12 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- import importlib.metadata as metadata
4
+ from __future__ import annotations
5
+
5
6
  import sys
7
+ from importlib import metadata
6
8
  from pathlib import Path
7
- from typing import Any, Union
9
+ from typing import Any
8
10
 
9
11
  from fameprotobuf.contract_pb2 import ProtoContract
10
12
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -12,21 +14,21 @@ from fameprotobuf.execution_data_pb2 import ExecutionData
12
14
  from fameprotobuf.field_pb2 import NestedField
13
15
  from fameprotobuf.input_file_pb2 import InputData
14
16
  from fameprotobuf.model_pb2 import ModelData
17
+ from google.protobuf.message import EncodeError
15
18
 
16
19
  import fameio
20
+ from fameio.input import InputError
17
21
  from fameio.input.scenario import Agent, Attribute, Contract, GeneralProperties, Scenario, StringSet
18
22
  from fameio.input.schema import AttributeSpecs, AttributeType, JavaPackages, Schema
19
- from fameio.logs import log_error_and_raise, log
23
+ from fameio.logs import log, log_error
20
24
  from fameio.output.reader import Reader
21
25
  from fameio.series import TimeSeriesManager
22
26
  from fameio.time import FameTime
23
27
  from fameio.tools import ensure_is_list
24
28
 
25
29
 
26
- class ProtoWriterError(Exception):
27
- """Indicates an error during writing of protobuf file"""
28
-
29
- pass
30
+ class ProtoWriterError(InputError):
31
+ """Indicates an error during writing of a protobuf file"""
30
32
 
31
33
 
32
34
  class ProtoWriter:
@@ -34,13 +36,15 @@ class ProtoWriter:
34
36
 
35
37
  _FAME_PROTOBUF_STREAM_HEADER = fameio.FILE_HEADER_V2
36
38
 
37
- _TYPE_NOT_IMPLEMENTED = "AttributeType '{}' not implemented."
39
+ _TYPE_NOT_IMPLEMENTED = "Protobuf representation for AttributeType '{}' not implemented."
38
40
  _CONTRACT_UNSUPPORTED = (
39
41
  "Unsupported value for Contract Attribute '{}'; "
40
42
  "Only support `int`, `float`, `enum` or `dict` types are supported here."
41
43
  )
42
44
  _USING_DEFAULT = "Using provided Default for Attribute: '{}'."
43
- _NO_FILE_SPECIFIED = "Could not write to '{}'. Please specify a valid output file."
45
+ _ERR_FILE_PATH = "Could not open file '{}' for writing. Please specify a valid output file."
46
+ _ERR_PROTOBUF_ENCODING = "Could not encode to protobuf. Please contact FAME-Io developers: fame@dlr.de"
47
+ _ERR_FILE_WRITE = "Could not write to file '{}'."
44
48
 
45
49
  _INFO_WRITING = "Writing scenario to protobuf file `{}`"
46
50
  _INFO_WRITING_COMPLETED = "Saved protobuf file `{}` to disk"
@@ -50,12 +54,32 @@ class ProtoWriter:
50
54
  self._time_series_manager: TimeSeriesManager = time_series_manager
51
55
 
52
56
  def write_validated_scenario(self, scenario: Scenario) -> None:
53
- """Writes given validated Scenario to file"""
54
- pb_data_storage = self._create_protobuf_from_scenario(scenario)
55
- self._write_protobuf_to_disk(pb_data_storage)
57
+ """
58
+ Writes given validated Scenario to file
59
+
60
+ Args:
61
+ scenario: to be written to file
62
+
63
+ Raises:
64
+ ProtoWriterError: if scenario could not be written to file, logged with level "ERROR"
65
+ """
66
+ data_storage = self._create_protobuf_from_scenario(scenario)
67
+ serialised = self._serialise(data_storage)
68
+ self._write_data_to_disk(serialised)
56
69
 
57
70
  def _create_protobuf_from_scenario(self, scenario: Scenario) -> DataStorage:
58
- """Returns given `scenario` written to new DataStorage protobuf"""
71
+ """
72
+ Returns given `scenario` written to new DataStorage protobuf
73
+
74
+ Args:
75
+ scenario: to be converted to protobuf
76
+
77
+ Returns:
78
+ protobuf container with the scenario
79
+
80
+ Raises:
81
+ ProtoWriterError: if the protobuf representation cannot be constructed, logged with level "ERROR"
82
+ """
59
83
  log().info("Converting scenario to protobuf.")
60
84
  pb_data_storage = DataStorage()
61
85
  pb_input = pb_data_storage.input
@@ -81,7 +105,17 @@ class ProtoWriter:
81
105
  pb_input.simulation.random_seed = general_properties.simulation_random_seed
82
106
 
83
107
  def _add_agents(self, pb_input: InputData, agents: list[Agent], schema: Schema) -> None:
84
- """Triggers setting of `agents` to `pb_input`"""
108
+ """
109
+ Triggers setting of `agents` to `pb_input`
110
+
111
+ Args:
112
+ pb_input: parent element to add the agents to
113
+ agents: to be added to parent input
114
+ schema: describing the agents' attributes
115
+
116
+ Raises:
117
+ ProtoWriterError: if any agent's attributes cannot be set, logged with level "ERROR"
118
+ """
85
119
  log().info("Adding Agents")
86
120
  for agent in agents:
87
121
  pb_agent = self._set_agent(pb_input.agents.add(), agent)
@@ -98,12 +132,22 @@ class ProtoWriter:
98
132
 
99
133
  def _set_attributes(
100
134
  self,
101
- pb_parent: Union[InputData.AgentDao, NestedField],
135
+ pb_parent: InputData.AgentDao | NestedField,
102
136
  attributes: dict[str, Attribute],
103
137
  specs: dict[str, AttributeSpecs],
104
138
  ) -> None:
105
- """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes"""
106
- values_not_set = [key for key in specs.keys()]
139
+ """
140
+ Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes
141
+
142
+ Args:
143
+ pb_parent: to store the attributes in
144
+ attributes: to be stored
145
+ specs: attribute specifications associated with attributes
146
+
147
+ Raises:
148
+ ProtoWriterError: if any attribute cannot be set, logged with level "ERROR"
149
+ """
150
+ values_not_set = list(specs.keys())
107
151
  for name, attribute in attributes.items():
108
152
  pb_field = self._add_field(pb_parent, name)
109
153
  attribute_specs = specs[name]
@@ -126,14 +170,24 @@ class ProtoWriter:
126
170
  log().info(self._USING_DEFAULT.format(name))
127
171
 
128
172
  @staticmethod
129
- def _add_field(pb_parent: Union[InputData.AgentDao, NestedField], name: str) -> NestedField:
173
+ def _add_field(pb_parent: InputData.AgentDao | NestedField, name: str) -> NestedField:
130
174
  """Returns new field with given `name` that is added to given `pb_parent`"""
131
175
  pb_field = pb_parent.fields.add()
132
176
  pb_field.field_name = name
133
177
  return pb_field
134
178
 
135
179
  def _set_attribute(self, pb_field: NestedField, value: Any, attribute_type: AttributeType) -> None:
136
- """Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`"""
180
+ """
181
+ Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`
182
+
183
+ Args:
184
+ pb_field: parent element to contain the attribute value therein
185
+ value: of the attribute
186
+ attribute_type: type of the attribute
187
+
188
+ Raises:
189
+ ProtoWriterError: if the attribute type has no serialisation implementation, logged with level "ERROR"
190
+ """
137
191
  if attribute_type is AttributeType.INTEGER:
138
192
  pb_field.int_values.extend(ensure_is_list(value))
139
193
  elif attribute_type is AttributeType.DOUBLE:
@@ -147,11 +201,20 @@ class ProtoWriter:
147
201
  elif attribute_type is AttributeType.TIME_SERIES:
148
202
  pb_field.series_id = self._time_series_manager.get_series_id_by_identifier(value)
149
203
  else:
150
- log_error_and_raise(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
204
+ raise log_error(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
151
205
 
152
206
  @staticmethod
153
207
  def _add_contracts(pb_input: InputData, contracts: list[Contract]) -> None:
154
- """Triggers setting of `contracts` to `pb_input`"""
208
+ """
209
+ Adds given contracts to input data
210
+
211
+ Args:
212
+ pb_input: parent element to have the contracts added to
213
+ contracts: to be added
214
+
215
+ Raises:
216
+ ProtoWriterError: if any contract cannot be added, logged with level "ERROR"
217
+ """
155
218
  log().info("Adding Contracts")
156
219
  for contract in contracts:
157
220
  pb_contract = ProtoWriter._set_contract(pb_input.contracts.add(), contract)
@@ -171,12 +234,19 @@ class ProtoWriter:
171
234
  return pb_contract
172
235
 
173
236
  @staticmethod
174
- def _set_contract_attributes(
175
- pb_parent: Union[ProtoContract, NestedField], attributes: dict[str, Attribute]
176
- ) -> None:
177
- """Assign (nested) Attributes to given protobuf container `pb_parent`"""
237
+ def _set_contract_attributes(pb_parent: ProtoContract | NestedField, attributes: dict[str, Attribute]) -> None:
238
+ """
239
+ Assign (nested) Attributes to given protobuf container `pb_parent`
240
+
241
+ Args:
242
+ pb_parent: parent element, either a contract or an attribute
243
+ attributes: to be set as child elements of parent
244
+
245
+ Raises:
246
+ ProtoWriterError: if a type unsupported for contract attributes is found, logged with level "ERROR"
247
+ """
178
248
  for name, attribute in attributes.items():
179
- log().debug("Assigning contract attribute `{}`.".format(name))
249
+ log().debug(f"Assigning contract attribute `{name}`.")
180
250
  pb_field = ProtoWriter._add_field(pb_parent, name)
181
251
 
182
252
  if attribute.has_value:
@@ -188,7 +258,7 @@ class ProtoWriter:
188
258
  elif isinstance(value, str):
189
259
  pb_field.string_values.extend([value])
190
260
  else:
191
- log_error_and_raise(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
261
+ raise log_error(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
192
262
  elif attribute.has_nested:
193
263
  ProtoWriter._set_contract_attributes(pb_field, attribute.nested)
194
264
 
@@ -230,22 +300,50 @@ class ProtoWriter:
230
300
  pb_packages.data_items.extend(java_packages.data_items)
231
301
  pb_packages.portables.extend(java_packages.portables)
232
302
 
233
- def _write_protobuf_to_disk(self, pb_data_storage: DataStorage) -> None:
234
- """Writes given `protobuf_input_data` to disk"""
235
- log().info(self._INFO_WRITING.format(self.file_path))
236
- try:
237
- with open(self.file_path, "wb") as file:
238
- serialised_data_storage = pb_data_storage.SerializeToString()
239
- file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
240
- file.write(len(serialised_data_storage).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
241
- file.write(serialised_data_storage)
242
- except OSError as e:
243
- log_error_and_raise(ProtoWriterError(ProtoWriter._NO_FILE_SPECIFIED.format(self.file_path), e))
244
- log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
245
-
246
303
  @staticmethod
247
304
  def _set_execution_versions(pb_version_data: ExecutionData.VersionData) -> None:
248
305
  """Adds version strings for fameio, fameprotobuf, and python to the given Versions message"""
249
306
  pb_version_data.fame_protobuf = metadata.version("fameprotobuf")
250
307
  pb_version_data.fame_io = metadata.version("fameio")
251
308
  pb_version_data.python = sys.version
309
+
310
+ def _serialise(self, data_storage: DataStorage) -> bytes:
311
+ """
312
+ Serialise given data storage to bytes
313
+
314
+ Args:
315
+ data_storage: to be serialised
316
+
317
+ Returns:
318
+ binary string representation of given data storage
319
+
320
+ Raises:
321
+ ProtoWriterError: if given data storage could not be serialised, logged with level "ERROR"
322
+ """
323
+ try:
324
+ return data_storage.SerializeToString()
325
+ except EncodeError as e:
326
+ raise log_error(ProtoWriterError(self._ERR_PROTOBUF_ENCODING)) from e
327
+
328
+ def _write_data_to_disk(self, serialised_data: bytes) -> None:
329
+ """
330
+ Writes given serialised data to file
331
+
332
+ Args:
333
+ serialised_data: to be written to file
334
+
335
+ Raises:
336
+ ProtoWriterError: if file could not be opened or written, logged with level "ERROR"
337
+ """
338
+ log().info(self._INFO_WRITING.format(self.file_path))
339
+ try:
340
+ with open(self.file_path, "wb") as file:
341
+ try:
342
+ file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
343
+ file.write(len(serialised_data).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
344
+ file.write(serialised_data)
345
+ except IOError as e:
346
+ raise log_error(ProtoWriterError(self._ERR_FILE_WRITE.format(self.file_path))) from e
347
+ except OSError as e:
348
+ raise log_error(ProtoWriterError(ProtoWriter._ERR_FILE_PATH.format(self.file_path), e)) from e
349
+ log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
fameio/logs.py CHANGED
@@ -1,11 +1,11 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
6
  import logging as pylog
6
7
  from enum import Enum
7
8
  from pathlib import Path
8
- from typing import Optional
9
9
 
10
10
 
11
11
  class LogLevel(Enum):
@@ -27,7 +27,7 @@ _FORMAT_NORMAL = "%(asctime)s — %(levelname)s — %(message)s" # noqa
27
27
  _FORMAT_DETAILLED = "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
28
28
  _TIME_FORMAT = "%H:%M:%S"
29
29
 
30
- _INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: {}"
30
+ _INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: %s"
31
31
  _WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log level `WARNING`"
32
32
 
33
33
  LOGGER_NAME = "fameio"
@@ -42,35 +42,41 @@ def log() -> pylog.Logger:
42
42
  return _loggers[0]
43
43
 
44
44
 
45
- def log_critical_and_raise(exception: Exception) -> None:
45
+ def log_critical(exception: Exception) -> Exception:
46
46
  """
47
- Raises the specified `exception` and logs a critical error with the exception's message
47
+ Logs a critical error with the exception's message and returns the exception for raising it.
48
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
49
+
50
+ Example: `raise log_critical(MyException("My error message"))`
48
51
 
49
52
  Args:
50
- exception: to be raised and logged at level `critical`
53
+ exception: to extract the error message from
51
54
 
52
- Raises:
53
- Exception: the given exception
55
+ Returns:
56
+ the given exception
54
57
  """
55
58
  log().critical(str(exception))
56
- raise exception
59
+ return exception
57
60
 
58
61
 
59
- def log_error_and_raise(exception: Exception) -> None:
62
+ def log_error(exception: Exception) -> Exception:
60
63
  """
61
- Raises the specified `exception` and logs a critical error with the exception's message
64
+ Logs an error with the exception's message and returns the exception for raising.
65
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
66
+
67
+ Example: `raise log_error(MyException("My error message"))`
62
68
 
63
69
  Args:
64
- exception: to be raised and logged at level `error`
70
+ exception: to extract the error message from
65
71
 
66
- Raises:
67
- Exception: the given exception
72
+ Returns:
73
+ the given exception
68
74
  """
69
75
  log().error(str(exception))
70
- raise exception
76
+ return exception
71
77
 
72
78
 
73
- def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
79
+ def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
74
80
  """
75
81
  Ensures a logger for fameio is present and uses the specified options
76
82
 
@@ -87,7 +93,7 @@ def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None
87
93
  _add_handler(logger, pylog.FileHandler(file_name, mode="w"), formatter)
88
94
 
89
95
  if _loggers:
90
- pylog.info(_INFO_UPDATING_LOG_LEVEL.format(log_level_name))
96
+ pylog.info(_INFO_UPDATING_LOG_LEVEL, log_level_name)
91
97
  _loggers[0] = logger
92
98
  else:
93
99
  _loggers.append(logger)
fameio/output/__init__.py CHANGED
@@ -1,3 +1,7 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: CC0-1.0
4
+
5
+
6
+ class OutputError(Exception):
7
+ """An error that occurred during extracting an output file"""