fameio 3.1.1__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. fameio/cli/convert_results.py +10 -10
  2. fameio/cli/make_config.py +9 -9
  3. fameio/cli/options.py +6 -4
  4. fameio/cli/parser.py +87 -51
  5. fameio/cli/reformat.py +58 -0
  6. fameio/input/__init__.py +4 -4
  7. fameio/input/loader/__init__.py +13 -13
  8. fameio/input/loader/controller.py +64 -18
  9. fameio/input/loader/loader.py +25 -16
  10. fameio/input/metadata.py +57 -38
  11. fameio/input/resolver.py +9 -10
  12. fameio/input/scenario/agent.py +62 -26
  13. fameio/input/scenario/attribute.py +93 -40
  14. fameio/input/scenario/contract.py +160 -56
  15. fameio/input/scenario/exception.py +41 -18
  16. fameio/input/scenario/fameiofactory.py +57 -6
  17. fameio/input/scenario/generalproperties.py +22 -12
  18. fameio/input/scenario/scenario.py +117 -38
  19. fameio/input/scenario/stringset.py +29 -11
  20. fameio/input/schema/agenttype.py +27 -10
  21. fameio/input/schema/attribute.py +108 -45
  22. fameio/input/schema/java_packages.py +14 -12
  23. fameio/input/schema/schema.py +39 -15
  24. fameio/input/validator.py +198 -54
  25. fameio/input/writer.py +137 -46
  26. fameio/logs.py +28 -47
  27. fameio/output/__init__.py +5 -1
  28. fameio/output/agent_type.py +89 -28
  29. fameio/output/conversion.py +52 -37
  30. fameio/output/csv_writer.py +107 -27
  31. fameio/output/data_transformer.py +17 -24
  32. fameio/output/execution_dao.py +170 -0
  33. fameio/output/input_dao.py +71 -33
  34. fameio/output/output_dao.py +33 -11
  35. fameio/output/reader.py +64 -21
  36. fameio/output/yaml_writer.py +16 -8
  37. fameio/scripts/__init__.py +22 -4
  38. fameio/scripts/convert_results.py +126 -52
  39. fameio/scripts/convert_results.py.license +1 -1
  40. fameio/scripts/exception.py +7 -0
  41. fameio/scripts/make_config.py +34 -13
  42. fameio/scripts/make_config.py.license +1 -1
  43. fameio/scripts/reformat.py +71 -0
  44. fameio/scripts/reformat.py.license +3 -0
  45. fameio/series.py +174 -59
  46. fameio/time.py +79 -25
  47. fameio/tools.py +48 -8
  48. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/METADATA +50 -34
  49. fameio-3.3.0.dist-info/RECORD +60 -0
  50. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/WHEEL +1 -1
  51. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/entry_points.txt +1 -0
  52. CHANGELOG.md +0 -288
  53. fameio-3.1.1.dist-info/RECORD +0 -56
  54. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSE.txt +0 -0
  55. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  56. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  57. {fameio-3.1.1.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
fameio/input/writer.py CHANGED
@@ -1,10 +1,12 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from importlib import metadata
4
+ from __future__ import annotations
5
+
5
6
  import sys
7
+ from importlib import metadata
6
8
  from pathlib import Path
7
- from typing import Any, Union
9
+ from typing import Any
8
10
 
9
11
  from fameprotobuf.contract_pb2 import ProtoContract
10
12
  from fameprotobuf.data_storage_pb2 import DataStorage
@@ -12,33 +14,37 @@ from fameprotobuf.execution_data_pb2 import ExecutionData
12
14
  from fameprotobuf.field_pb2 import NestedField
13
15
  from fameprotobuf.input_file_pb2 import InputData
14
16
  from fameprotobuf.model_pb2 import ModelData
17
+ from google.protobuf.message import EncodeError
15
18
 
16
19
  import fameio
20
+ from fameio.input import InputError
17
21
  from fameio.input.scenario import Agent, Attribute, Contract, GeneralProperties, Scenario, StringSet
18
22
  from fameio.input.schema import AttributeSpecs, AttributeType, JavaPackages, Schema
19
- from fameio.logs import log_error_and_raise, log
23
+ from fameio.logs import log, log_error
20
24
  from fameio.output.reader import Reader
21
25
  from fameio.series import TimeSeriesManager
22
26
  from fameio.time import FameTime
23
27
  from fameio.tools import ensure_is_list
24
28
 
25
29
 
26
- class ProtoWriterError(Exception):
27
- """Indicates an error during writing of protobuf file"""
30
+ class ProtoWriterError(InputError):
31
+ """Indicates an error during writing of a protobuf file."""
28
32
 
29
33
 
30
34
  class ProtoWriter:
31
- """Writes a given scenario to protobuf file"""
35
+ """Writes a given scenario to protobuf file."""
32
36
 
33
37
  _FAME_PROTOBUF_STREAM_HEADER = fameio.FILE_HEADER_V2
34
38
 
35
- _TYPE_NOT_IMPLEMENTED = "AttributeType '{}' not implemented."
39
+ _TYPE_NOT_IMPLEMENTED = "Protobuf representation for AttributeType '{}' not implemented."
36
40
  _CONTRACT_UNSUPPORTED = (
37
41
  "Unsupported value for Contract Attribute '{}'; "
38
42
  "Only support `int`, `float`, `enum` or `dict` types are supported here."
39
43
  )
40
44
  _USING_DEFAULT = "Using provided Default for Attribute: '{}'."
41
- _NO_FILE_SPECIFIED = "Could not write to '{}'. Please specify a valid output file."
45
+ _ERR_FILE_PATH = "Could not open file '{}' for writing. Please specify a valid output file."
46
+ _ERR_PROTOBUF_ENCODING = "Could not encode to protobuf. Please contact FAME-Io developers: fame@dlr.de"
47
+ _ERR_FILE_WRITE = "Could not write to file '{}'."
42
48
 
43
49
  _INFO_WRITING = "Writing scenario to protobuf file `{}`"
44
50
  _INFO_WRITING_COMPLETED = "Saved protobuf file `{}` to disk"
@@ -48,12 +54,30 @@ class ProtoWriter:
48
54
  self._time_series_manager: TimeSeriesManager = time_series_manager
49
55
 
50
56
  def write_validated_scenario(self, scenario: Scenario) -> None:
51
- """Writes given validated Scenario to file"""
52
- pb_data_storage = self._create_protobuf_from_scenario(scenario)
53
- self._write_protobuf_to_disk(pb_data_storage)
57
+ """Writes given validated Scenario to file.
58
+
59
+ Args:
60
+ scenario: to be written to file
61
+
62
+ Raises:
63
+ ProtoWriterError: if scenario could not be written to file, logged with level "ERROR"
64
+ """
65
+ data_storage = self._create_protobuf_from_scenario(scenario)
66
+ serialised = self._serialise(data_storage)
67
+ self._write_data_to_disk(serialised)
54
68
 
55
69
  def _create_protobuf_from_scenario(self, scenario: Scenario) -> DataStorage:
56
- """Returns given `scenario` written to new DataStorage protobuf"""
70
+ """Returns given `scenario` written to new DataStorage protobuf.
71
+
72
+ Args:
73
+ scenario: to be converted to protobuf
74
+
75
+ Returns:
76
+ protobuf container with the scenario
77
+
78
+ Raises:
79
+ ProtoWriterError: if the protobuf representation cannot be constructed, logged with level "ERROR"
80
+ """
57
81
  log().info("Converting scenario to protobuf.")
58
82
  pb_data_storage = DataStorage()
59
83
  pb_input = pb_data_storage.input
@@ -71,7 +95,7 @@ class ProtoWriter:
71
95
 
72
96
  @staticmethod
73
97
  def _set_general_properties(pb_input: InputData, general_properties: GeneralProperties) -> None:
74
- """Saves a scenario's general properties to specified protobuf `pb_input` container"""
98
+ """Saves a scenario's general properties to specified protobuf `pb_input` container."""
75
99
  log().info("Adding General Properties")
76
100
  pb_input.run_id = general_properties.run_id
77
101
  pb_input.simulation.start_time = general_properties.simulation_start_time
@@ -79,7 +103,16 @@ class ProtoWriter:
79
103
  pb_input.simulation.random_seed = general_properties.simulation_random_seed
80
104
 
81
105
  def _add_agents(self, pb_input: InputData, agents: list[Agent], schema: Schema) -> None:
82
- """Triggers setting of `agents` to `pb_input`"""
106
+ """Triggers setting of `agents` to `pb_input`.
107
+
108
+ Args:
109
+ pb_input: parent element to add the agents to
110
+ agents: to be added to parent input
111
+ schema: describing the agents' attributes
112
+
113
+ Raises:
114
+ ProtoWriterError: if any agent's attributes cannot be set, logged with level "ERROR"
115
+ """
83
116
  log().info("Adding Agents")
84
117
  for agent in agents:
85
118
  pb_agent = self._set_agent(pb_input.agents.add(), agent)
@@ -89,18 +122,27 @@ class ProtoWriter:
89
122
 
90
123
  @staticmethod
91
124
  def _set_agent(pb_agent: InputData.AgentDao, agent: Agent) -> InputData.AgentDao:
92
- """Saves type and id of given `agent` to protobuf `pb_agent` container. Returns given `pb_agent`"""
125
+ """Saves type and id of given `agent` to protobuf `pb_agent` container. Returns given `pb_agent`."""
93
126
  pb_agent.class_name = agent.type_name
94
127
  pb_agent.id = agent.id
95
128
  return pb_agent
96
129
 
97
130
  def _set_attributes(
98
131
  self,
99
- pb_parent: Union[InputData.AgentDao, NestedField],
132
+ pb_parent: InputData.AgentDao | NestedField,
100
133
  attributes: dict[str, Attribute],
101
134
  specs: dict[str, AttributeSpecs],
102
135
  ) -> None:
103
- """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes"""
136
+ """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes.
137
+
138
+ Args:
139
+ pb_parent: to store the attributes in
140
+ attributes: to be stored
141
+ specs: attribute specifications associated with attributes
142
+
143
+ Raises:
144
+ ProtoWriterError: if any attribute cannot be set, logged with level "ERROR"
145
+ """
104
146
  values_not_set = list(specs.keys())
105
147
  for name, attribute in attributes.items():
106
148
  pb_field = self._add_field(pb_parent, name)
@@ -124,14 +166,23 @@ class ProtoWriter:
124
166
  log().info(self._USING_DEFAULT.format(name))
125
167
 
126
168
  @staticmethod
127
- def _add_field(pb_parent: Union[InputData.AgentDao, NestedField], name: str) -> NestedField:
128
- """Returns new field with given `name` that is added to given `pb_parent`"""
169
+ def _add_field(pb_parent: InputData.AgentDao | NestedField, name: str) -> NestedField:
170
+ """Returns new field with given `name` that is added to given `pb_parent`."""
129
171
  pb_field = pb_parent.fields.add()
130
172
  pb_field.field_name = name
131
173
  return pb_field
132
174
 
133
175
  def _set_attribute(self, pb_field: NestedField, value: Any, attribute_type: AttributeType) -> None:
134
- """Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`"""
176
+ """Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`.
177
+
178
+ Args:
179
+ pb_field: parent element to contain the attribute value therein
180
+ value: of the attribute
181
+ attribute_type: type of the attribute
182
+
183
+ Raises:
184
+ ProtoWriterError: if the attribute type has no serialisation implementation, logged with level "ERROR"
185
+ """
135
186
  if attribute_type is AttributeType.INTEGER:
136
187
  pb_field.int_values.extend(ensure_is_list(value))
137
188
  elif attribute_type is AttributeType.DOUBLE:
@@ -145,11 +196,19 @@ class ProtoWriter:
145
196
  elif attribute_type is AttributeType.TIME_SERIES:
146
197
  pb_field.series_id = self._time_series_manager.get_series_id_by_identifier(value)
147
198
  else:
148
- log_error_and_raise(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
199
+ raise log_error(ProtoWriterError(self._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
149
200
 
150
201
  @staticmethod
151
202
  def _add_contracts(pb_input: InputData, contracts: list[Contract]) -> None:
152
- """Triggers setting of `contracts` to `pb_input`"""
203
+ """Adds given contracts to input data.
204
+
205
+ Args:
206
+ pb_input: parent element to have the contracts added to
207
+ contracts: to be added
208
+
209
+ Raises:
210
+ ProtoWriterError: if any contract cannot be added, logged with level "ERROR"
211
+ """
153
212
  log().info("Adding Contracts")
154
213
  for contract in contracts:
155
214
  pb_contract = ProtoWriter._set_contract(pb_input.contracts.add(), contract)
@@ -158,7 +217,7 @@ class ProtoWriter:
158
217
 
159
218
  @staticmethod
160
219
  def _set_contract(pb_contract: ProtoContract, contract: Contract) -> ProtoContract:
161
- """Saves given `contract` details to protobuf container `pb_contract`. Returns given `pb_contract`"""
220
+ """Saves given `contract` details to protobuf container `pb_contract`. Returns given `pb_contract`."""
162
221
  pb_contract.sender_id = contract.sender_id
163
222
  pb_contract.receiver_id = contract.receiver_id
164
223
  pb_contract.product_name = contract.product_name
@@ -169,10 +228,16 @@ class ProtoWriter:
169
228
  return pb_contract
170
229
 
171
230
  @staticmethod
172
- def _set_contract_attributes(
173
- pb_parent: Union[ProtoContract, NestedField], attributes: dict[str, Attribute]
174
- ) -> None:
175
- """Assign (nested) Attributes to given protobuf container `pb_parent`"""
231
+ def _set_contract_attributes(pb_parent: ProtoContract | NestedField, attributes: dict[str, Attribute]) -> None:
232
+ """Assign (nested) Attributes to given protobuf container `pb_parent`.
233
+
234
+ Args:
235
+ pb_parent: parent element, either a contract or an attribute
236
+ attributes: to be set as child elements of parent
237
+
238
+ Raises:
239
+ ProtoWriterError: if a type unsupported for contract attributes is found, logged with level "ERROR"
240
+ """
176
241
  for name, attribute in attributes.items():
177
242
  log().debug(f"Assigning contract attribute `{name}`.")
178
243
  pb_field = ProtoWriter._add_field(pb_parent, name)
@@ -186,12 +251,12 @@ class ProtoWriter:
186
251
  elif isinstance(value, str):
187
252
  pb_field.string_values.extend([value])
188
253
  else:
189
- log_error_and_raise(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
254
+ raise log_error(ProtoWriterError(ProtoWriter._CONTRACT_UNSUPPORTED.format(str(attribute))))
190
255
  elif attribute.has_nested:
191
256
  ProtoWriter._set_contract_attributes(pb_field, attribute.nested)
192
257
 
193
258
  def _set_time_series(self, pb_input: InputData) -> None:
194
- """Adds all time series from TimeSeriesManager to given `pb_input`"""
259
+ """Adds all time series from TimeSeriesManager to given `pb_input`."""
195
260
  log().info("Adding TimeSeries")
196
261
  for unique_id, identifier, data in self._time_series_manager.get_all_series():
197
262
  pb_series = pb_input.time_series.add()
@@ -202,13 +267,13 @@ class ProtoWriter:
202
267
 
203
268
  @staticmethod
204
269
  def _set_schema(pb_input: InputData, schema: Schema) -> None:
205
- """Sets the given `schema` `pb_input`"""
270
+ """Sets the given `schema` `pb_input`."""
206
271
  log().info("Adding Schema")
207
272
  pb_input.schema = schema.to_string()
208
273
 
209
274
  @staticmethod
210
275
  def _set_string_sets(pb_input: InputData, string_sets: dict[str, StringSet]) -> None:
211
- """Adds the given StringSets to given `pb_input`"""
276
+ """Adds the given StringSets to given `pb_input`."""
212
277
  for name, string_set in string_sets.items():
213
278
  pb_set = pb_input.string_sets.add()
214
279
  pb_set.name = name
@@ -222,28 +287,54 @@ class ProtoWriter:
222
287
 
223
288
  @staticmethod
224
289
  def _set_java_package_names(pb_model: ModelData, java_packages: JavaPackages) -> None:
225
- """Adds given JavaPackages names to given ModelData section"""
290
+ """Adds given JavaPackages names to given ModelData section."""
226
291
  pb_packages = pb_model.package_definition
227
292
  pb_packages.agents.extend(java_packages.agents)
228
293
  pb_packages.data_items.extend(java_packages.data_items)
229
294
  pb_packages.portables.extend(java_packages.portables)
230
295
 
231
- def _write_protobuf_to_disk(self, pb_data_storage: DataStorage) -> None:
232
- """Writes given `protobuf_input_data` to disk"""
233
- log().info(self._INFO_WRITING.format(self.file_path))
234
- try:
235
- with open(self.file_path, "wb") as file:
236
- serialised_data_storage = pb_data_storage.SerializeToString()
237
- file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
238
- file.write(len(serialised_data_storage).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
239
- file.write(serialised_data_storage)
240
- except OSError as e:
241
- log_error_and_raise(ProtoWriterError(ProtoWriter._NO_FILE_SPECIFIED.format(self.file_path), e))
242
- log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
243
-
244
296
  @staticmethod
245
297
  def _set_execution_versions(pb_version_data: ExecutionData.VersionData) -> None:
246
- """Adds version strings for fameio, fameprotobuf, and python to the given Versions message"""
298
+ """Adds version strings for fameio, fameprotobuf, and python to the given Versions message."""
247
299
  pb_version_data.fame_protobuf = metadata.version("fameprotobuf")
248
300
  pb_version_data.fame_io = metadata.version("fameio")
249
301
  pb_version_data.python = sys.version
302
+
303
+ def _serialise(self, data_storage: DataStorage) -> bytes:
304
+ """Serialise given data storage to bytes.
305
+
306
+ Args:
307
+ data_storage: to be serialised
308
+
309
+ Returns:
310
+ binary string representation of given data storage
311
+
312
+ Raises:
313
+ ProtoWriterError: if given data storage could not be serialised, logged with level "ERROR"
314
+ """
315
+ try:
316
+ return data_storage.SerializeToString()
317
+ except EncodeError as e:
318
+ raise log_error(ProtoWriterError(self._ERR_PROTOBUF_ENCODING)) from e
319
+
320
+ def _write_data_to_disk(self, serialised_data: bytes) -> None:
321
+ """Writes given serialised data to file.
322
+
323
+ Args:
324
+ serialised_data: to be written to file
325
+
326
+ Raises:
327
+ ProtoWriterError: if file could not be opened or written, logged with level "ERROR"
328
+ """
329
+ log().info(self._INFO_WRITING.format(self.file_path))
330
+ try:
331
+ with open(self.file_path, "wb") as file:
332
+ try:
333
+ file.write(self._FAME_PROTOBUF_STREAM_HEADER.encode(Reader.HEADER_ENCODING))
334
+ file.write(len(serialised_data).to_bytes(Reader.BYTES_DEFINING_MESSAGE_LENGTH, byteorder="big"))
335
+ file.write(serialised_data)
336
+ except IOError as e:
337
+ raise log_error(ProtoWriterError(self._ERR_FILE_WRITE.format(self.file_path))) from e
338
+ except OSError as e:
339
+ raise log_error(ProtoWriterError(ProtoWriter._ERR_FILE_PATH.format(self.file_path), e)) from e
340
+ log().info(self._INFO_WRITING_COMPLETED.format(self.file_path))
fameio/logs.py CHANGED
@@ -1,15 +1,16 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
6
  import logging as pylog
6
7
  from enum import Enum
7
8
  from pathlib import Path
8
- from typing import Optional
9
+ from typing import TypeVar
9
10
 
10
11
 
11
12
  class LogLevel(Enum):
12
- """Levels for Logging"""
13
+ """Levels for Logging."""
13
14
 
14
15
  PRINT = 100
15
16
  CRITICAL = pylog.CRITICAL
@@ -33,80 +34,60 @@ _WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log le
33
34
  LOGGER_NAME = "fameio"
34
35
  DEFAULT_LOG_LEVEL = LogLevel.WARNING
35
36
 
37
+ T = TypeVar("T", bound=Exception)
38
+
36
39
 
37
40
  def log() -> pylog.Logger:
38
- """Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`"""
41
+ """Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`."""
39
42
  if not _loggers:
40
43
  fameio_logger(DEFAULT_LOG_LEVEL.name)
41
44
  pylog.warning(_WARN_NOT_INITIALIZED)
42
45
  return _loggers[0]
43
46
 
44
47
 
45
- def log_critical_and_raise(exception: Exception) -> None:
46
- """
47
- Raises the specified `exception` and logs a critical error with the exception's message
48
-
49
- Args:
50
- exception: to be raised and logged at level `critical`
51
-
52
- Raises:
53
- Exception: the given exception
54
- """
55
- log().critical(str(exception))
56
- raise exception
57
-
48
+ def log_critical(exception: T) -> T:
49
+ """Logs a critical error with the exception's message and returns the exception for raising it.
58
50
 
59
- def log_critical(exception: Exception) -> Exception:
60
- """
61
- Logs a critical error with the exception's message and returns the exception for raising it.
62
51
  Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
63
-
64
- Example: `raise log_critical(MyException("My error message"))`
52
+ Example: `raise log_critical(MyException("My error message"))
65
53
 
66
54
  Args:
67
55
  exception: to extract the error message from
68
56
 
69
57
  Returns:
70
- the given exception
58
+ the given exception
71
59
  """
72
60
  log().critical(str(exception))
73
61
  return exception
74
62
 
75
63
 
76
- def log_error_and_raise(exception: Exception) -> None:
77
- """
78
- Raises the specified `exception` and logs a critical error with the exception's message
64
+ def log_error(exception: T) -> T:
65
+ """Logs an error with the exception's message and returns the exception for raising.
79
66
 
80
- Args:
81
- exception: to be raised and logged at level `error`
82
-
83
- Raises:
84
- Exception: the given exception
85
- """
86
- log().error(str(exception))
87
- raise exception
88
-
89
-
90
- def log_error(exception: Exception) -> Exception:
91
- """
92
- Logs an error with the exception's message and returns the exception for raising.
93
67
  Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
94
-
95
68
  Example: `raise log_error(MyException("My error message"))`
96
69
 
97
70
  Args:
98
71
  exception: to extract the error message from
99
72
 
100
73
  Returns:
101
- the given exception
74
+ the given exception
102
75
  """
103
76
  log().error(str(exception))
104
77
  return exception
105
78
 
106
79
 
107
- def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
80
+ def log_and_print(message: str) -> None:
81
+ """Logs a message with priority "PRINT"., ensuring it is printed to console.
82
+
83
+ Args:
84
+ message: to be logged with the highest priority
108
85
  """
109
- Ensures a logger for fameio is present and uses the specified options
86
+ log().log(LogLevel.PRINT.value, message)
87
+
88
+
89
+ def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
90
+ """Ensures a logger for fameio is present and uses the specified options.
110
91
 
111
92
  Args:
112
93
  log_level_name: one of Python's official logging level names, e.g. "INFO"
@@ -128,8 +109,7 @@ def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None
128
109
 
129
110
 
130
111
  def _get_logger(level: LogLevel) -> pylog.Logger:
131
- """
132
- Returns fameio logger with given log level without any handler and, not propagating to parent
112
+ """Returns fameio logger with given log level without any handler and, not propagating to parent.
133
113
 
134
114
  Args:
135
115
  level: integer representing the log level
@@ -147,10 +127,11 @@ def _get_logger(level: LogLevel) -> pylog.Logger:
147
127
 
148
128
 
149
129
  def _get_formatter(level: LogLevel) -> pylog.Formatter:
150
- """
151
- Returns a log formatter depending on the given log `level`
130
+ """Returns a log formatter depending on the given log `level`.
131
+
152
132
  Args:
153
133
  level: this log level determines how detailed the logger's output is
134
+
154
135
  Returns:
155
136
  new log formatter
156
137
  """
@@ -158,7 +139,7 @@ def _get_formatter(level: LogLevel) -> pylog.Formatter:
158
139
 
159
140
 
160
141
  def _add_handler(logger: pylog.Logger, handler: pylog.Handler, formatter: pylog.Formatter) -> None:
161
- """Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list"""
142
+ """Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list."""
162
143
  handler.setFormatter(formatter)
163
144
  _handlers.append(handler)
164
145
  logger.addHandler(handler)
fameio/output/__init__.py CHANGED
@@ -1,3 +1,7 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: CC0-1.0
4
+
5
+
6
+ class OutputError(Exception):
7
+ """An error that occurred during extracting an output file"""
@@ -1,13 +1,16 @@
1
1
  # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- from typing import Union
4
+ from __future__ import annotations
5
5
 
6
6
  from fameprotobuf.services_pb2 import Output
7
7
 
8
+ from fameio.logs import log_error
9
+ from fameio.output import OutputError
10
+
8
11
 
9
12
  class AgentType:
10
- """Provides information derived from an underlying protobuf AgentType"""
13
+ """Provides information derived from an underlying protobuf AgentType."""
11
14
 
12
15
  def __init__(self, agent_type: Output.AgentType) -> None:
13
16
  self._agent_type = agent_type
@@ -34,7 +37,7 @@ class AgentType:
34
37
  """Returns set of IDs for complex columns, ignoring simple columns"""
35
38
  return {field.field_id for field in self._agent_type.fields if len(field.index_names) > 0}
36
39
 
37
- def get_column_name_for_id(self, column_index: int) -> Union[str, None]:
40
+ def get_column_name_for_id(self, column_index: int) -> str | None:
38
41
  """Returns name of column by given `column_index` or None, if column is not present"""
39
42
  if 0 <= column_index < len(self._agent_type.fields):
40
43
  return self._agent_type.fields[column_index].field_name
@@ -49,44 +52,102 @@ class AgentType:
49
52
  return self._agent_type.class_name
50
53
 
51
54
 
52
- class AgentTypeError(Exception):
53
- """Indicates an error with the agent types definitions"""
55
+ class AgentTypeError(OutputError):
56
+ """Indicates an error with the agent types definitions."""
54
57
 
55
58
 
56
59
  class AgentTypeLog:
57
- """Stores data about collected agent types"""
60
+ """Stores data about collected agent types."""
58
61
 
59
62
  _ERR_AGENT_TYPE_MISSING = "Requested AgentType `{}` not found."
60
63
  _ERR_DOUBLE_DEFINITION = "Just one definition allowed per AgentType. Found multiple for {}. File might be corrupt."
61
64
 
62
- def __init__(self, requested_agents: list[str]) -> None:
63
- self._requested_agents = [agent.upper() for agent in requested_agents] if requested_agents else None
64
- self._requested_agent_types = {}
65
+ def __init__(self, _agent_name_filter_list: list[str]) -> None:
66
+ """Initialises new AgentTypeLog.
67
+
68
+ Args:
69
+ _agent_name_filter_list: list of agent type names that are requested for output data extraction
70
+ """
71
+ self._agent_name_filter_list: list[str] | None = (
72
+ [agent.upper() for agent in _agent_name_filter_list] if _agent_name_filter_list else None
73
+ )
74
+ self._requested_agent_types: dict[str, AgentType] = {}
75
+ self._agents_with_output: list[str] = []
65
76
 
66
77
  def update_agents(self, new_types: dict[str, Output.AgentType]) -> None:
67
- """Saves new `agent_types` (if any) contained in given `output` if requested for extraction"""
68
- if new_types:
69
- if self._requested_agents:
70
- new_types = {
71
- agent_name: agent_type
72
- for agent_name, agent_type in new_types.items()
73
- if agent_name.upper() in self._requested_agents
74
- }
75
- for agent_name in self._requested_agent_types:
76
- if agent_name in new_types:
77
- raise AgentTypeError(self._ERR_DOUBLE_DEFINITION.format(agent_name))
78
- self._requested_agent_types.update(new_types)
78
+ """Saves `new_types` if they are requested for extraction.
79
+
80
+ If any new agent types are provided, checks if they are requested for extraction, and, if so, saves them.
81
+ Agent types not requested for extraction are ignored.
82
+
83
+ Args:
84
+ new_types: to be saved (if requested for extraction)
85
+
86
+ Raises:
87
+ AgentTypeError: if agent type was already registered, logged with level "ERROR"
88
+ """
89
+ if not new_types:
90
+ return
91
+
92
+ self._agents_with_output.extend(list(new_types.keys()))
93
+ filtered_types = self._filter_agents_by_name(new_types)
94
+ self._ensure_no_duplication(filtered_types)
95
+ self._requested_agent_types.update(filtered_types)
96
+
97
+ def _filter_agents_by_name(self, new_types: dict[str, Output.AgentType]) -> dict[str, Output.AgentType]:
98
+ """Removes and entries from `new_types` not on `agent_name_filter_list`.
99
+
100
+ Args:
101
+ new_types: to be filtered
102
+
103
+ Returns:
104
+ filtered list, or original list if no filter is active
105
+ """
106
+ if self._agent_name_filter_list:
107
+ return {
108
+ agent_name: agent_type
109
+ for agent_name, agent_type in new_types.items()
110
+ if agent_name.upper() in self._agent_name_filter_list
111
+ }
112
+ return new_types
113
+
114
+ def _ensure_no_duplication(self, filtered_types: dict[str, Output.AgentType]) -> None:
115
+ """Ensures no duplicate agent type definitions occur.
116
+
117
+ Args:
118
+ filtered_types: to be checked for duplications with already registered types
119
+
120
+ Raises:
121
+ AgentTypeError: if duplicate agent type is found, logged with level "ERROR"
122
+ """
123
+ for agent_name in self._requested_agent_types:
124
+ if agent_name in filtered_types:
125
+ raise log_error(AgentTypeError(self._ERR_DOUBLE_DEFINITION.format(agent_name)))
79
126
 
80
127
  def has_any_agent_type(self) -> bool:
81
- """Returns True if any agent type was registered so far present"""
128
+ """Returns True if any agent type was registered so far present."""
82
129
  return len(self._requested_agent_types) > 0
83
130
 
84
- def get_agent_type(self, agent_name: str) -> AgentType:
85
- """Returns `AgentType` of given agent `name`"""
86
- if agent_name not in self._requested_agent_types:
87
- raise AgentTypeError(self._ERR_AGENT_TYPE_MISSING.format(agent_name))
88
- return AgentType(self._requested_agent_types[agent_name])
131
+ def get_agent_type(self, agent_type_name: str) -> AgentType:
132
+ """Returns the requested type of agent.
133
+
134
+ Args:
135
+ agent_type_name: requested name of agent type
136
+
137
+ Returns:
138
+ stored agent type
139
+
140
+ Raises:
141
+ AgentTypeError: if no agent type could be found with that name, logged with level "ERROR"
142
+ """
143
+ if agent_type_name not in self._requested_agent_types:
144
+ raise log_error(AgentTypeError(self._ERR_AGENT_TYPE_MISSING.format(agent_type_name)))
145
+ return AgentType(self._requested_agent_types[agent_type_name])
89
146
 
90
147
  def is_requested(self, agent_name: str) -> bool:
91
- """Returns True if given agent_name is known and requested"""
148
+ """Returns True if given agent_name is known and requested."""
92
149
  return agent_name in self._requested_agent_types
150
+
151
+ def get_agents_with_output(self) -> list[str]:
152
+ """Returns all names of agents that had output."""
153
+ return self._agents_with_output