fameio 3.1.0__py3-none-any.whl → 3.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. CHANGELOG.md +10 -1
  2. fameio/cli/__init__.py +2 -3
  3. fameio/cli/options.py +3 -3
  4. fameio/cli/parser.py +3 -3
  5. fameio/input/__init__.py +0 -8
  6. fameio/input/loader/controller.py +5 -6
  7. fameio/input/metadata.py +4 -7
  8. fameio/input/scenario/__init__.py +7 -8
  9. fameio/input/scenario/agent.py +3 -4
  10. fameio/input/scenario/attribute.py +14 -14
  11. fameio/input/scenario/contract.py +9 -9
  12. fameio/input/scenario/exception.py +8 -11
  13. fameio/input/schema/__init__.py +5 -5
  14. fameio/input/schema/agenttype.py +8 -9
  15. fameio/input/schema/attribute.py +95 -74
  16. fameio/input/validator.py +46 -37
  17. fameio/input/writer.py +4 -6
  18. fameio/logs.py +37 -3
  19. fameio/output/agent_type.py +11 -8
  20. fameio/output/conversion.py +2 -2
  21. fameio/output/data_transformer.py +6 -8
  22. fameio/output/input_dao.py +7 -12
  23. fameio/output/reader.py +4 -6
  24. fameio/output/yaml_writer.py +3 -3
  25. fameio/scripts/convert_results.py +52 -33
  26. fameio/series.py +18 -17
  27. fameio/time.py +15 -21
  28. fameio/tools.py +2 -3
  29. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/METADATA +8 -8
  30. fameio-3.1.1.dist-info/RECORD +56 -0
  31. fameio-3.1.0.dist-info/RECORD +0 -56
  32. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSE.txt +0 -0
  33. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  34. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  35. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  36. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/WHEEL +0 -0
  37. {fameio-3.1.0.dist-info → fameio-3.1.1.dist-info}/entry_points.txt +0 -0
@@ -1,14 +1,16 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from __future__ import annotations
5
5
 
6
6
  from enum import Enum, auto
7
+ from pathlib import Path
7
8
  from typing import Any, Optional, Final, Union
8
9
 
9
10
  from fameio.input import SchemaError
10
11
  from fameio.input.metadata import Metadata, ValueContainer
11
- from fameio.logs import log
12
+ from fameio.logs import log, log_error
13
+ from fameio.series import CSV_FILE_SUFFIX
12
14
  from fameio.time import FameTime
13
15
  from fameio.tools import keys_to_lower
14
16
 
@@ -41,16 +43,17 @@ class AttributeType(Enum):
41
43
  """
42
44
  if self is AttributeType.INTEGER or self is AttributeType.LONG:
43
45
  return int(value)
44
- elif self is AttributeType.DOUBLE:
46
+ if self is AttributeType.DOUBLE:
45
47
  return float(value)
46
- elif self is AttributeType.TIME_STAMP:
48
+ if self is AttributeType.TIME_STAMP:
47
49
  return FameTime.convert_string_if_is_datetime(value)
48
- elif self is AttributeType.ENUM or self is AttributeType.STRING or self is AttributeType.STRING_SET:
50
+ if self is AttributeType.ENUM or self is AttributeType.STRING or self is AttributeType.STRING_SET:
49
51
  return str(value)
50
- elif self is AttributeType.TIME_SERIES:
52
+ if self is AttributeType.TIME_SERIES:
53
+ if isinstance(value, str) and Path(value).suffix.lower() == CSV_FILE_SUFFIX:
54
+ return value
51
55
  return float(value)
52
- else:
53
- raise ValueError("String conversion not supported for '{}'.".format(self))
56
+ raise ValueError(f"String conversion not supported for '{self}'.")
54
57
 
55
58
 
56
59
  class AttributeSpecs(Metadata):
@@ -74,75 +77,78 @@ class AttributeSpecs(Metadata):
74
77
  _DEFAULT_NOT_LIST = "Attribute is list, but provided Default '{}' is not a list."
75
78
  _INCOMPATIBLE = "Value '{}' in section '{}' can not be converted to AttributeType '{}'."
76
79
  _DEFAULT_DISALLOWED = "Default '{}' is not an allowed value."
77
- _LIST_DISALLOWED = "Attribute '{}' of type TIME_SERIES cannot be a list."
80
+ _SERIES_LIST_DISALLOWED = "Attribute '{}' of type TIME_SERIES cannot be a list."
78
81
  _VALUES_ILL_FORMAT = "Only List and Dictionary is supported for 'Values' but was: {}"
79
82
  _NAME_DISALLOWED = f"Attribute name must not be empty and none of: {_DISALLOWED_NAMES}"
80
83
 
81
84
  def __init__(self, name: str, definition: dict):
82
85
  """Loads Attribute from given `definition`"""
83
86
  super().__init__(definition)
84
- if self._name_is_disallowed(name):
85
- raise SchemaError(AttributeSpecs._NAME_DISALLOWED)
86
-
87
+ self._assert_is_allowed_name(name)
87
88
  self._full_name = name
89
+
88
90
  if not definition:
89
91
  raise SchemaError(AttributeSpecs._EMPTY_DEFINITION.format(name))
90
92
  definition = keys_to_lower(definition)
91
93
 
92
- if AttributeSpecs.KEY_MANDATORY in definition:
93
- self._is_mandatory = definition[AttributeSpecs.KEY_MANDATORY]
94
- else:
95
- self._is_mandatory = True
96
- log().warning(AttributeSpecs._MISSING_SPEC_DEFAULT.format(AttributeSpecs.KEY_MANDATORY, name, True))
97
-
98
- if AttributeSpecs.KEY_LIST in definition:
99
- self._is_list = definition[AttributeSpecs.KEY_LIST]
100
- else:
101
- self._is_list = False
102
- log().warning(AttributeSpecs._MISSING_SPEC_DEFAULT.format(AttributeSpecs.KEY_LIST, name, False))
103
-
104
- if AttributeSpecs.KEY_TYPE in definition:
105
- self._attr_type = AttributeSpecs._get_type_for_name(definition[AttributeSpecs.KEY_TYPE])
106
- else:
107
- log().error(AttributeSpecs._MISSING_TYPE.format(name))
108
- raise SchemaError(AttributeSpecs._MISSING_TYPE.format(name))
94
+ self._is_mandatory = self._get_is_mandatory(definition, name)
95
+ self._is_list = self._get_is_list(definition, name)
96
+ self._attr_type = self._get_type(definition, name)
109
97
 
110
98
  if self._attr_type == AttributeType.TIME_SERIES and self._is_list:
111
- raise SchemaError(AttributeSpecs._LIST_DISALLOWED.format(name))
112
-
113
- self._allowed_values: ValueContainer = ValueContainer()
114
- if AttributeSpecs.KEY_VALUES in definition:
115
- value_definition = definition[AttributeSpecs.KEY_VALUES]
116
- if value_definition:
117
- self._allowed_values = self._read_values(value_definition)
99
+ raise SchemaError(AttributeSpecs._SERIES_LIST_DISALLOWED.format(name))
118
100
 
119
- self._default_value = None
120
- if AttributeSpecs.KEY_DEFAULT in definition:
121
- provided_value = definition[AttributeSpecs.KEY_DEFAULT]
122
- if self._is_list:
123
- self._default_value = self._convert_list(provided_value)
124
- else:
125
- self._default_value = self._convert_and_test(provided_value)
126
-
127
- self._nested_attributes = {}
128
- if AttributeSpecs.KEY_NESTED in definition:
129
- for nested_name, nested_details in definition[AttributeSpecs.KEY_NESTED].items():
130
- full_name = name + self._SEPARATOR + nested_name
131
- self._nested_attributes[nested_name] = AttributeSpecs(full_name, nested_details)
132
-
133
- self._help = None
134
- if AttributeSpecs.KEY_HELP in definition:
135
- self._help = definition[AttributeSpecs.KEY_HELP].strip()
101
+ self._allowed_values = self._get_allowed_values(definition)
102
+ self._default_value = self._get_default_value(definition)
103
+ self._nested_attributes = self._get_nested_attributes(definition, name)
104
+ self._help = self._get_help(definition)
136
105
 
137
106
  @staticmethod
138
- def _name_is_disallowed(full_name: str) -> bool:
139
- """Returns True if name is not allowed"""
107
+ def _assert_is_allowed_name(full_name: str) -> None:
108
+ """Raises SchemaError if provided name is not allowed for Attributes"""
140
109
  if full_name is None:
141
- return True
110
+ raise SchemaError(AttributeSpecs._NAME_DISALLOWED)
142
111
  short_name = full_name.split(AttributeSpecs._SEPARATOR)[-1]
143
112
  if len(short_name) == 0 or short_name.isspace():
144
- return True
145
- return short_name.lower() in AttributeSpecs._DISALLOWED_NAMES
113
+ raise SchemaError(AttributeSpecs._NAME_DISALLOWED)
114
+ if short_name.lower() in AttributeSpecs._DISALLOWED_NAMES:
115
+ raise SchemaError(AttributeSpecs._NAME_DISALLOWED)
116
+
117
+ @staticmethod
118
+ def _get_is_mandatory(definition: dict, name: str) -> bool:
119
+ """Returns True if `Mandatory` is set to True or if specification is missing; False otherwise"""
120
+ if AttributeSpecs.KEY_MANDATORY in definition:
121
+ return definition[AttributeSpecs.KEY_MANDATORY]
122
+ log().warning(AttributeSpecs._MISSING_SPEC_DEFAULT.format(AttributeSpecs.KEY_MANDATORY, name, True))
123
+ return True
124
+
125
+ @staticmethod
126
+ def _get_is_list(definition: dict, name: str) -> bool:
127
+ """Returns True if `List` is set to True; Returns False otherwise or if specification is missing"""
128
+ if AttributeSpecs.KEY_LIST in definition:
129
+ return definition[AttributeSpecs.KEY_LIST]
130
+ log().warning(AttributeSpecs._MISSING_SPEC_DEFAULT.format(AttributeSpecs.KEY_LIST, name, False))
131
+ return False
132
+
133
+ @staticmethod
134
+ def _get_type(definition: dict, name: str) -> AttributeType:
135
+ """Returns `AttributeType` from given definition; Raises an exception if no proper type can be extracted"""
136
+ if AttributeSpecs.KEY_TYPE in definition:
137
+ type_name = definition[AttributeSpecs.KEY_TYPE]
138
+ try:
139
+ return AttributeType[type_name.upper()]
140
+ except KeyError as e:
141
+ raise SchemaError(AttributeSpecs._INVALID_TYPE.format(type_name)) from e
142
+ raise log_error(SchemaError(AttributeSpecs._MISSING_TYPE.format(name)))
143
+
144
+ def _get_allowed_values(self, definition: dict) -> ValueContainer:
145
+ """Returns ValueContainer with allowed values if defined; otherwise an empty ValueContainer"""
146
+ allowed_values: ValueContainer = ValueContainer()
147
+ if AttributeSpecs.KEY_VALUES in definition:
148
+ value_definition = definition[AttributeSpecs.KEY_VALUES]
149
+ if value_definition:
150
+ allowed_values = self._read_values(value_definition)
151
+ return allowed_values
146
152
 
147
153
  def _read_values(self, definition: [dict, list]) -> ValueContainer:
148
154
  """
@@ -160,22 +166,30 @@ class AttributeSpecs(Metadata):
160
166
  for value in value_container.as_list():
161
167
  self._convert_to_data_type(value, self.KEY_VALUES)
162
168
  return value_container
163
- except ValueContainer.ParseError:
164
- raise SchemaError(AttributeSpecs._VALUES_ILL_FORMAT.format(definition))
169
+ except ValueContainer.ParseError as e:
170
+ raise SchemaError(AttributeSpecs._VALUES_ILL_FORMAT.format(definition)) from e
165
171
 
166
172
  def _convert_to_data_type(self, value: str, section: str) -> Union[int, float, str]:
167
173
  """Returns a given single `value` in `section` converted to this Attribute's data type"""
168
174
  try:
169
175
  return self._attr_type.convert_string_to_type(value)
170
- except ValueError:
171
- raise SchemaError(AttributeSpecs._INCOMPATIBLE.format(value, section, self._attr_type))
176
+ except ValueError as e:
177
+ raise SchemaError(AttributeSpecs._INCOMPATIBLE.format(value, section, self._attr_type)) from e
178
+
179
+ def _get_default_value(self, definition: dict) -> Optional[Union[int, float, str, list]]:
180
+ """Returns default value(s) from given definitions, or None if no default is specified"""
181
+ if AttributeSpecs.KEY_DEFAULT in definition:
182
+ provided_value = definition[AttributeSpecs.KEY_DEFAULT]
183
+ if self._is_list:
184
+ return self._convert_list(provided_value)
185
+ return self._convert_and_test(provided_value)
186
+ return None
172
187
 
173
188
  def _convert_list(self, values) -> list:
174
189
  """Converts all entries in given `values` list to this attribute data type and returns this new list"""
175
190
  if isinstance(values, list):
176
191
  return [self._convert_and_test(item) for item in values]
177
- else:
178
- raise SchemaError(AttributeSpecs._DEFAULT_NOT_LIST.format(values))
192
+ raise SchemaError(AttributeSpecs._DEFAULT_NOT_LIST.format(values))
179
193
 
180
194
  def _convert_and_test(self, value: str):
181
195
  """Converts a given single `value` to this Attribute's data type and tests if the value is allowed"""
@@ -183,6 +197,21 @@ class AttributeSpecs(Metadata):
183
197
  raise SchemaError(AttributeSpecs._DEFAULT_DISALLOWED.format(value))
184
198
  return self._convert_to_data_type(value, self.KEY_DEFAULT)
185
199
 
200
+ @staticmethod
201
+ def _get_nested_attributes(definition: dict, name: str) -> dict[str, AttributeSpecs]:
202
+ """Returns dict of nested attributes read from given definition; empty dict if no nested attributes exist"""
203
+ nested_attributes = {}
204
+ if AttributeSpecs.KEY_NESTED in definition:
205
+ for nested_name, nested_details in definition[AttributeSpecs.KEY_NESTED].items():
206
+ full_name = name + AttributeSpecs._SEPARATOR + nested_name
207
+ nested_attributes[nested_name] = AttributeSpecs(full_name, nested_details)
208
+ return nested_attributes
209
+
210
+ @staticmethod
211
+ def _get_help(definition) -> str:
212
+ """Returns (possible empty) help text if provided in definition; None otherwise"""
213
+ return definition.get(AttributeSpecs.KEY_HELP, "").strip()
214
+
186
215
  @property
187
216
  def attr_type(self) -> AttributeType:
188
217
  """Returns AttributeType of this attribute"""
@@ -233,14 +262,6 @@ class AttributeSpecs(Metadata):
233
262
  """Returns name including name of enclosing parent attributes"""
234
263
  return self._full_name
235
264
 
236
- @staticmethod
237
- def _get_type_for_name(name: str) -> AttributeType:
238
- """Returns the AttributeType matching the given `name` converted to upper case"""
239
- try:
240
- return AttributeType[name.upper()]
241
- except KeyError:
242
- raise SchemaError(AttributeSpecs._INVALID_TYPE.format(name))
243
-
244
265
  @property
245
266
  def has_help_text(self) -> bool:
246
267
  """Return True if a help_text is available"""
@@ -248,8 +269,8 @@ class AttributeSpecs(Metadata):
248
269
 
249
270
  @property
250
271
  def help_text(self) -> str:
251
- """Return the help_text of this attribute, if any, otherwise an empty string"""
252
- return self._help if self.has_help_text else ""
272
+ """Return the help_text of this attribute, if any"""
273
+ return self._help
253
274
 
254
275
  def _to_dict(self) -> dict[str, Any]:
255
276
  definition = {
fameio/input/validator.py CHANGED
@@ -1,15 +1,14 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
-
5
4
  import math
6
5
  from collections import Counter
7
- from typing import Any
6
+ from typing import Any, Union
8
7
 
9
8
  from fameio.input.resolver import PathResolver
10
9
  from fameio.input.scenario import Agent, Attribute, Contract, Scenario, StringSet
11
10
  from fameio.input.schema import Schema, AttributeSpecs, AttributeType, AgentType
12
- from fameio.logs import log_error_and_raise, log
11
+ from fameio.logs import log, log_error
13
12
  from fameio.series import TimeSeriesManager, TimeSeriesError
14
13
  from fameio.time import FameTime
15
14
 
@@ -17,8 +16,6 @@ from fameio.time import FameTime
17
16
  class ValidationError(Exception):
18
17
  """Indicates an error occurred during validation of any data with a connected schema"""
19
18
 
20
- pass
21
-
22
19
 
23
20
  class SchemaValidator:
24
21
  """Handles validation of scenarios based on a connected `schema`"""
@@ -79,27 +76,29 @@ class SchemaValidator:
79
76
  list_of_ids = [agent.id for agent in agents]
80
77
  non_unique_ids = [agent_id for agent_id, count in Counter(list_of_ids).items() if count > 1]
81
78
  if non_unique_ids:
82
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
79
+ raise log_error(ValidationError(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
83
80
 
84
81
  @staticmethod
85
82
  def ensure_agent_and_timeseries_are_valid(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager):
86
83
  """Validates given `agent` against `schema` plus loads and validates its timeseries"""
87
84
  SchemaValidator.ensure_agent_type_in_schema(agent, schema)
88
- SchemaValidator.ensure_is_valid_agent(agent, schema)
85
+ SchemaValidator.ensure_is_valid_agent(agent, schema, timeseries_manager)
89
86
  SchemaValidator.load_and_validate_timeseries(agent, schema, timeseries_manager)
90
87
 
91
88
  @staticmethod
92
89
  def ensure_agent_type_in_schema(agent: Agent, schema: Schema) -> None:
93
90
  """Raises exception if type for given `agent` is not specified in given `schema`"""
94
91
  if agent.type_name not in schema.agent_types:
95
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
92
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
96
93
 
97
94
  @staticmethod
98
- def ensure_is_valid_agent(agent: Agent, schema: Schema) -> None:
95
+ def ensure_is_valid_agent(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
99
96
  """Raises an exception if given `agent` does not meet the specified `schema` requirements"""
100
97
  scenario_attributes = agent.attributes
101
98
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
102
- SchemaValidator._ensure_mandatory_present(scenario_attributes, schema_attributes)
99
+ missing_default_series = SchemaValidator._check_mandatory_or_default(scenario_attributes, schema_attributes)
100
+ for missing_series in missing_default_series:
101
+ timeseries_manager.register_and_validate(missing_series)
103
102
  SchemaValidator._ensure_attributes_exist(scenario_attributes, schema_attributes)
104
103
  SchemaValidator._ensure_value_and_type_match(scenario_attributes, schema_attributes)
105
104
 
@@ -108,22 +107,30 @@ class SchemaValidator:
108
107
  """Returns agent specified by `name` or raises Exception if this agent is not present in given `schema`"""
109
108
  if name in schema.agent_types:
110
109
  return schema.agent_types[name]
111
- else:
112
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
110
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
113
111
 
114
112
  @staticmethod
115
- def _ensure_mandatory_present(attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]) -> None:
113
+ def _check_mandatory_or_default(
114
+ attributes: dict[str, Attribute],
115
+ specifications: dict[str, AttributeSpecs],
116
+ ) -> list[Union[str, float]]:
116
117
  """
117
118
  Raises Exception if in given list of `specifications` at least one item is mandatory,
118
119
  provides no defaults and is not contained in given `attributes` dictionary
120
+
121
+ Returns:
122
+ list of time series defaults used in scenario
119
123
  """
124
+ missing_series_defaults = []
120
125
  for name, specification in specifications.items():
121
126
  if name not in attributes:
122
127
  if specification.is_mandatory:
123
128
  if not specification.has_default_value:
124
- log_error_and_raise(
129
+ raise log_error(
125
130
  ValidationError(SchemaValidator._ATTRIBUTE_MISSING.format(specification.full_name))
126
131
  )
132
+ if specification.attr_type == AttributeType.TIME_SERIES:
133
+ missing_series_defaults.append(specification.default_value)
127
134
  else:
128
135
  if specification.has_default_value:
129
136
  log().warning(SchemaValidator._DEFAULT_IGNORED.format(specification.full_name))
@@ -133,16 +140,21 @@ class SchemaValidator:
133
140
  attribute = attributes[name]
134
141
  if specification.is_list:
135
142
  for entry in attribute.nested_list:
136
- SchemaValidator._ensure_mandatory_present(entry, specification.nested_attributes)
143
+ missing_series_defaults.extend(
144
+ SchemaValidator._check_mandatory_or_default(entry, specification.nested_attributes)
145
+ )
137
146
  else:
138
- SchemaValidator._ensure_mandatory_present(attribute.nested, specification.nested_attributes)
147
+ missing_series_defaults.extend(
148
+ SchemaValidator._check_mandatory_or_default(attribute.nested, specification.nested_attributes)
149
+ )
150
+ return missing_series_defaults
139
151
 
140
152
  @staticmethod
141
153
  def _ensure_attributes_exist(attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]) -> None:
142
154
  """Raises exception any entry of given `attributes` has no corresponding type `specification`"""
143
155
  for name, attribute in attributes.items():
144
156
  if name not in specifications:
145
- log_error_and_raise(ValidationError(SchemaValidator._ATTRIBUTE_UNKNOWN.format(attribute)))
157
+ raise log_error(ValidationError(SchemaValidator._ATTRIBUTE_UNKNOWN.format(attribute)))
146
158
  if attribute.has_nested:
147
159
  specification = specifications[name]
148
160
  SchemaValidator._ensure_attributes_exist(attribute.nested, specification.nested_attributes)
@@ -163,10 +175,10 @@ class SchemaValidator:
163
175
  type_spec = specification.attr_type
164
176
  if not SchemaValidator._is_compatible(specification, value):
165
177
  message = SchemaValidator._INCOMPATIBLE.format(value, type_spec, specification.full_name)
166
- log_error_and_raise(ValidationError(message))
178
+ raise log_error(ValidationError(message))
167
179
  if not SchemaValidator._is_allowed_value(specification, value):
168
180
  message = SchemaValidator._DISALLOWED.format(value, specification.full_name)
169
- log_error_and_raise(ValidationError(message))
181
+ raise log_error(ValidationError(message))
170
182
  if attribute.has_nested:
171
183
  SchemaValidator._ensure_value_and_type_match(attribute.nested, specification.nested_attributes)
172
184
  if attribute.has_nested_list:
@@ -186,8 +198,7 @@ class SchemaValidator:
186
198
  if not SchemaValidator._is_compatible_value(attribute_type, value):
187
199
  return False
188
200
  return True
189
- else:
190
- return (not is_list) and SchemaValidator._is_compatible_value(attribute_type, value_or_values)
201
+ return (not is_list) and SchemaValidator._is_compatible_value(attribute_type, value_or_values)
191
202
 
192
203
  @staticmethod
193
204
  def _is_compatible_value(attribute_type: AttributeType, value) -> bool:
@@ -198,24 +209,22 @@ class SchemaValidator:
198
209
  return False
199
210
  if attribute_type is AttributeType.LONG:
200
211
  return isinstance(value, int)
201
- elif attribute_type is AttributeType.DOUBLE:
212
+ if attribute_type is AttributeType.DOUBLE:
202
213
  return isinstance(value, (int, float)) and not math.isnan(value)
203
- elif attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
214
+ if attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
204
215
  return isinstance(value, str)
205
- elif attribute_type is AttributeType.TIME_STAMP:
216
+ if attribute_type is AttributeType.TIME_STAMP:
206
217
  return FameTime.is_fame_time_compatible(value)
207
- elif attribute_type is AttributeType.TIME_SERIES:
218
+ if attribute_type is AttributeType.TIME_SERIES:
208
219
  return isinstance(value, (str, int)) or (isinstance(value, float) and not math.isnan(value))
209
- else:
210
- log_error_and_raise(ValidationError(SchemaValidator._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
220
+ raise log_error(ValidationError(SchemaValidator._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
211
221
 
212
222
  @staticmethod
213
223
  def _is_allowed_value(attribute: AttributeSpecs, value) -> bool:
214
224
  """Returns True if `value` matches an entry of given `Attribute`'s value list or if this list is empty"""
215
225
  if not attribute.values:
216
226
  return True
217
- else:
218
- return value in attribute.values
227
+ return value in attribute.values
219
228
 
220
229
  @staticmethod
221
230
  def load_and_validate_timeseries(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
@@ -248,7 +257,7 @@ class SchemaValidator:
248
257
  manager.register_and_validate(attribute.value)
249
258
  except TimeSeriesError as e:
250
259
  message = SchemaValidator._TIME_SERIES_INVALID.format(specification.full_name)
251
- log_error_and_raise(ValidationError(message, e))
260
+ raise log_error(ValidationError(message, e)) from e
252
261
  if attribute.has_nested:
253
262
  SchemaValidator._ensure_valid_timeseries(attribute.nested, specification.nested_attributes, manager)
254
263
  if attribute.has_nested_list:
@@ -287,10 +296,10 @@ class SchemaValidator:
287
296
  msg = SchemaValidator._MISSING_STRING_SET_ENTRY.format(
288
297
  attribute.value, str(attribute), name
289
298
  )
290
- log_error_and_raise(ValidationError(msg))
299
+ raise log_error(ValidationError(msg))
291
300
  else:
292
301
  msg = SchemaValidator._MISSING_STRING_SET.format(specification.full_name)
293
- log_error_and_raise(ValidationError(msg))
302
+ raise log_error(ValidationError(msg))
294
303
  if attribute.has_nested:
295
304
  SchemaValidator._ensure_string_set_consistency(
296
305
  attribute.nested, specification.nested_attributes, string_sets
@@ -304,16 +313,16 @@ class SchemaValidator:
304
313
  """Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
305
314
  sender_id = contract.sender_id
306
315
  if sender_id not in agent_types_by_id:
307
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id)))
316
+ raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id)))
308
317
  if contract.receiver_id not in agent_types_by_id:
309
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id)))
318
+ raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(contract.receiver_id)))
310
319
  sender_type_name = agent_types_by_id[sender_id]
311
320
  if sender_type_name not in schema.agent_types:
312
- log_error_and_raise(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(sender_type_name)))
321
+ raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(sender_type_name)))
313
322
  sender_type = schema.agent_types[sender_type_name]
314
323
  product = contract.product_name
315
324
  if product not in sender_type.products:
316
- log_error_and_raise(ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name)))
325
+ raise log_error(ValidationError(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name)))
317
326
 
318
327
  @staticmethod
319
328
  def check_agents_have_contracts(scenario: Scenario) -> None:
fameio/input/writer.py CHANGED
@@ -1,7 +1,7 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
- import importlib.metadata as metadata
4
+ from importlib import metadata
5
5
  import sys
6
6
  from pathlib import Path
7
7
  from typing import Any, Union
@@ -26,8 +26,6 @@ from fameio.tools import ensure_is_list
26
26
  class ProtoWriterError(Exception):
27
27
  """Indicates an error during writing of protobuf file"""
28
28
 
29
- pass
30
-
31
29
 
32
30
  class ProtoWriter:
33
31
  """Writes a given scenario to protobuf file"""
@@ -103,7 +101,7 @@ class ProtoWriter:
103
101
  specs: dict[str, AttributeSpecs],
104
102
  ) -> None:
105
103
  """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes"""
106
- values_not_set = [key for key in specs.keys()]
104
+ values_not_set = list(specs.keys())
107
105
  for name, attribute in attributes.items():
108
106
  pb_field = self._add_field(pb_parent, name)
109
107
  attribute_specs = specs[name]
@@ -176,7 +174,7 @@ class ProtoWriter:
176
174
  ) -> None:
177
175
  """Assign (nested) Attributes to given protobuf container `pb_parent`"""
178
176
  for name, attribute in attributes.items():
179
- log().debug("Assigning contract attribute `{}`.".format(name))
177
+ log().debug(f"Assigning contract attribute `{name}`.")
180
178
  pb_field = ProtoWriter._add_field(pb_parent, name)
181
179
 
182
180
  if attribute.has_value:
fameio/logs.py CHANGED
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
@@ -27,7 +27,7 @@ _FORMAT_NORMAL = "%(asctime)s — %(levelname)s — %(message)s" # noqa
27
27
  _FORMAT_DETAILLED = "%(asctime)s.%(msecs)03d — %(levelname)s — %(module)s:%(funcName)s:%(lineno)d — %(message)s" # noqa
28
28
  _TIME_FORMAT = "%H:%M:%S"
29
29
 
30
- _INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: {}"
30
+ _INFO_UPDATING_LOG_LEVEL = "Updating fameio log level to: %s"
31
31
  _WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log level `WARNING`"
32
32
 
33
33
  LOGGER_NAME = "fameio"
@@ -56,6 +56,23 @@ def log_critical_and_raise(exception: Exception) -> None:
56
56
  raise exception
57
57
 
58
58
 
59
+ def log_critical(exception: Exception) -> Exception:
60
+ """
61
+ Logs a critical error with the exception's message and returns the exception for raising it.
62
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
63
+
64
+ Example: `raise log_critical(MyException("My error message"))`
65
+
66
+ Args:
67
+ exception: to extract the error message from
68
+
69
+ Returns:
70
+ the given exception
71
+ """
72
+ log().critical(str(exception))
73
+ return exception
74
+
75
+
59
76
  def log_error_and_raise(exception: Exception) -> None:
60
77
  """
61
78
  Raises the specified `exception` and logs a critical error with the exception's message
@@ -70,6 +87,23 @@ def log_error_and_raise(exception: Exception) -> None:
70
87
  raise exception
71
88
 
72
89
 
90
+ def log_error(exception: Exception) -> Exception:
91
+ """
92
+ Logs an error with the exception's message and returns the exception for raising.
93
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
94
+
95
+ Example: `raise log_error(MyException("My error message"))`
96
+
97
+ Args:
98
+ exception: to extract the error message from
99
+
100
+ Returns:
101
+ the given exception
102
+ """
103
+ log().error(str(exception))
104
+ return exception
105
+
106
+
73
107
  def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None:
74
108
  """
75
109
  Ensures a logger for fameio is present and uses the specified options
@@ -87,7 +121,7 @@ def fameio_logger(log_level_name: str, file_name: Optional[Path] = None) -> None
87
121
  _add_handler(logger, pylog.FileHandler(file_name, mode="w"), formatter)
88
122
 
89
123
  if _loggers:
90
- pylog.info(_INFO_UPDATING_LOG_LEVEL.format(log_level_name))
124
+ pylog.info(_INFO_UPDATING_LOG_LEVEL, log_level_name)
91
125
  _loggers[0] = logger
92
126
  else:
93
127
  _loggers.append(logger)
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
  from typing import Union
@@ -28,18 +28,17 @@ class AgentType:
28
28
 
29
29
  def get_simple_column_mask(self) -> list[bool]:
30
30
  """Returns list of bool - where an entry is True if the output column with the same index is not complex"""
31
- return [True if len(field.index_names) == 0 else False for field in self._agent_type.fields]
31
+ return [len(field.index_names) == 0 for field in self._agent_type.fields]
32
32
 
33
33
  def get_complex_column_ids(self) -> set[int]:
34
34
  """Returns set of IDs for complex columns, ignoring simple columns"""
35
- return set([field.field_id for field in self._agent_type.fields if len(field.index_names) > 0])
35
+ return {field.field_id for field in self._agent_type.fields if len(field.index_names) > 0}
36
36
 
37
37
  def get_column_name_for_id(self, column_index: int) -> Union[str, None]:
38
38
  """Returns name of column by given `column_index` or None, if column is not present"""
39
39
  if 0 <= column_index < len(self._agent_type.fields):
40
40
  return self._agent_type.fields[column_index].field_name
41
- else:
42
- return None
41
+ return None
43
42
 
44
43
  def get_inner_columns(self, column_index: int) -> tuple[str, ...]:
45
44
  """Returns tuple of inner column names for complex column with given `column_index`"""
@@ -50,6 +49,10 @@ class AgentType:
50
49
  return self._agent_type.class_name
51
50
 
52
51
 
52
+ class AgentTypeError(Exception):
53
+ """Indicates an error with the agent types definitions"""
54
+
55
+
53
56
  class AgentTypeLog:
54
57
  """Stores data about collected agent types"""
55
58
 
@@ -69,9 +72,9 @@ class AgentTypeLog:
69
72
  for agent_name, agent_type in new_types.items()
70
73
  if agent_name.upper() in self._requested_agents
71
74
  }
72
- for agent_name in self._requested_agent_types.keys():
75
+ for agent_name in self._requested_agent_types:
73
76
  if agent_name in new_types:
74
- raise Exception(self._ERR_DOUBLE_DEFINITION.format(agent_name))
77
+ raise AgentTypeError(self._ERR_DOUBLE_DEFINITION.format(agent_name))
75
78
  self._requested_agent_types.update(new_types)
76
79
 
77
80
  def has_any_agent_type(self) -> bool:
@@ -81,7 +84,7 @@ class AgentTypeLog:
81
84
  def get_agent_type(self, agent_name: str) -> AgentType:
82
85
  """Returns `AgentType` of given agent `name`"""
83
86
  if agent_name not in self._requested_agent_types:
84
- raise Exception(self._ERR_AGENT_TYPE_MISSING.format(agent_name))
87
+ raise AgentTypeError(self._ERR_AGENT_TYPE_MISSING.format(agent_name))
85
88
  return AgentType(self._requested_agent_types[agent_name])
86
89
 
87
90
  def is_requested(self, agent_name: str) -> bool:
@@ -1,4 +1,4 @@
1
- # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2025 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
@@ -19,7 +19,7 @@ def _apply_time_merging(
19
19
  dataframes: dict[Optional[str], pd.DataFrame], offset: int, period: int, first_positive_focal_point: int
20
20
  ) -> None:
21
21
  """Applies time merging to `data` based on given `offset`, `period`, and `first_positive_focal_point`"""
22
- log().debug(f"Grouping TimeSteps...")
22
+ log().debug("Grouping TimeSteps...")
23
23
  for key in dataframes.keys():
24
24
  df = dataframes[key]
25
25
  index_columns = df.index.names