fameio 3.2.0__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. fameio/cli/convert_results.py +4 -6
  2. fameio/cli/make_config.py +3 -5
  3. fameio/cli/options.py +6 -4
  4. fameio/cli/parser.py +53 -29
  5. fameio/cli/reformat.py +58 -0
  6. fameio/input/__init__.py +4 -4
  7. fameio/input/loader/__init__.py +4 -6
  8. fameio/input/loader/controller.py +11 -16
  9. fameio/input/loader/loader.py +11 -9
  10. fameio/input/metadata.py +26 -29
  11. fameio/input/resolver.py +4 -6
  12. fameio/input/scenario/agent.py +18 -16
  13. fameio/input/scenario/attribute.py +85 -31
  14. fameio/input/scenario/contract.py +23 -28
  15. fameio/input/scenario/exception.py +3 -6
  16. fameio/input/scenario/fameiofactory.py +7 -12
  17. fameio/input/scenario/generalproperties.py +7 -8
  18. fameio/input/scenario/scenario.py +14 -18
  19. fameio/input/scenario/stringset.py +5 -6
  20. fameio/input/schema/agenttype.py +8 -10
  21. fameio/input/schema/attribute.py +30 -36
  22. fameio/input/schema/java_packages.py +6 -7
  23. fameio/input/schema/schema.py +9 -11
  24. fameio/input/validator.py +178 -41
  25. fameio/input/writer.py +20 -29
  26. fameio/logs.py +28 -19
  27. fameio/output/agent_type.py +14 -16
  28. fameio/output/conversion.py +9 -12
  29. fameio/output/csv_writer.py +33 -23
  30. fameio/output/data_transformer.py +11 -11
  31. fameio/output/execution_dao.py +170 -0
  32. fameio/output/input_dao.py +16 -19
  33. fameio/output/output_dao.py +7 -7
  34. fameio/output/reader.py +8 -10
  35. fameio/output/yaml_writer.py +2 -3
  36. fameio/scripts/__init__.py +15 -4
  37. fameio/scripts/convert_results.py +18 -17
  38. fameio/scripts/exception.py +1 -1
  39. fameio/scripts/make_config.py +3 -4
  40. fameio/scripts/reformat.py +71 -0
  41. fameio/scripts/reformat.py.license +3 -0
  42. fameio/series.py +78 -47
  43. fameio/time.py +15 -18
  44. fameio/tools.py +42 -4
  45. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/METADATA +33 -23
  46. fameio-3.3.0.dist-info/RECORD +60 -0
  47. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/entry_points.txt +1 -0
  48. fameio-3.2.0.dist-info/RECORD +0 -56
  49. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/LICENSE.txt +0 -0
  50. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
  51. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
  52. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
  53. {fameio-3.2.0.dist-info → fameio-3.3.0.dist-info}/WHEEL +0 -0
fameio/input/validator.py CHANGED
@@ -17,11 +17,11 @@ from fameio.time import FameTime
17
17
 
18
18
 
19
19
  class ValidationError(InputError):
20
- """Indicates an error occurred during validation of any data with a connected schema"""
20
+ """Indicates an error occurred during validation of any data with a connected schema."""
21
21
 
22
22
 
23
23
  class SchemaValidator:
24
- """Handles validation of scenarios based on a connected `schema`"""
24
+ """Handles validation of scenarios based on a connected `schema`."""
25
25
 
26
26
  _AGENT_ID_NOT_UNIQUE = "Agent ID(s) not unique: '{}'."
27
27
  _AGENT_TYPE_UNKNOWN = "Agent type '{}' not declared in Schema."
@@ -45,9 +45,7 @@ class SchemaValidator:
45
45
  def validate_scenario_and_timeseries(
46
46
  scenario: Scenario, path_resolver: PathResolver = PathResolver()
47
47
  ) -> TimeSeriesManager:
48
- """
49
- Validates the given `scenario` and its timeseries using given `path_resolver`
50
- Raises an exception if schema requirements are not met or timeseries data are erroneous.
48
+ """Validates the given `scenario` and its timeseries using given `path_resolver`.
51
49
 
52
50
  Args:
53
51
  scenario: to be validated against the encompassed schema
@@ -55,8 +53,9 @@ class SchemaValidator:
55
53
 
56
54
  Returns:
57
55
  a new TimeSeriesManager initialised with validated time series from scenario
56
+
58
57
  Raises:
59
- ValidationError: if an error in the scenario or in timeseries is spotted
58
+ ValidationError: if schema requirements are not met or timeseries are erroneous, logged with level "ERROR"
60
59
  """
61
60
  schema = scenario.schema
62
61
  agents = scenario.agents
@@ -75,28 +74,63 @@ class SchemaValidator:
75
74
 
76
75
  @staticmethod
77
76
  def ensure_unique_agent_ids(agents: list[Agent]) -> None:
78
- """Raises exception if any id for given `agents` is not unique"""
77
+ """Ensures that IDs of given agents are unique.
78
+
79
+ Args:
80
+ agents: whose IDs are to be checked to uniqueness
81
+
82
+ Raises:
83
+ ValidationError: if any id for given `agents` is not unique, logged with level "ERROR"
84
+ """
79
85
  list_of_ids = [agent.id for agent in agents]
80
86
  non_unique_ids = [agent_id for agent_id, count in Counter(list_of_ids).items() if count > 1]
81
87
  if non_unique_ids:
82
88
  raise log_error(ValidationError(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
83
89
 
84
90
  @staticmethod
85
- def ensure_agent_and_timeseries_are_valid(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager):
86
- """Validates given `agent` against `schema` plus loads and validates its timeseries"""
91
+ def ensure_agent_and_timeseries_are_valid(
92
+ agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager
93
+ ) -> None:
94
+ """Validates given `agent` against `schema`, loads and validates its timeseries.
95
+
96
+ Args:
97
+ agent: to be checked
98
+ schema: to check the agent against
99
+ timeseries_manager: to register new timeseries at
100
+
101
+ Raises:
102
+ ValidationError: if agent is not in schema, has missing or invalid data; logged with level "ERROR"
103
+ """
87
104
  SchemaValidator.ensure_agent_type_in_schema(agent, schema)
88
105
  SchemaValidator.ensure_is_valid_agent(agent, schema, timeseries_manager)
89
106
  SchemaValidator.load_and_validate_timeseries(agent, schema, timeseries_manager)
90
107
 
91
108
  @staticmethod
92
109
  def ensure_agent_type_in_schema(agent: Agent, schema: Schema) -> None:
93
- """Raises exception if type for given `agent` is not specified in given `schema`"""
110
+ """Makes sure that the given agent is contained in the given schema.
111
+
112
+ Args:
113
+ agent: to be checked
114
+ schema: that ought to contain the agent
115
+
116
+ Raises:
117
+ ValidationError: if type for given `agent` is not specified in given `schema`, logged with level "ERROR"
118
+ """
94
119
  if agent.type_name not in schema.agent_types:
95
120
  raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
96
121
 
97
122
  @staticmethod
98
123
  def ensure_is_valid_agent(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
99
- """Raises an exception if given `agent` does not meet the specified `schema` requirements"""
124
+ """Ensures that given `agent` meets the specified `schema` requirements and registers new timeseries
125
+
126
+ Args:
127
+ agent: to be checked
128
+ schema: to check against
129
+ timeseries_manager: to register new timeseries at
130
+
131
+ Raises:
132
+ ValidationError: if the agent doesn't meet the schema's requirements, logged with level "ERROR"
133
+ """
100
134
  scenario_attributes = agent.attributes
101
135
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
102
136
  missing_default_series = SchemaValidator._check_mandatory_or_default(scenario_attributes, schema_attributes)
@@ -107,7 +141,18 @@ class SchemaValidator:
107
141
 
108
142
  @staticmethod
109
143
  def _get_agent(schema: Schema, name: str) -> AgentType:
110
- """Returns agent specified by `name` or raises Exception if this agent is not present in given `schema`"""
144
+ """Returns agent type as specified by `name`.
145
+
146
+ Args:
147
+ schema: to obtain the agent type from
148
+ name: of the agent type to obtain
149
+
150
+ Returns:
151
+ AgentType corresponding to given name
152
+
153
+ Raises:
154
+ ValidationError: if this agent is not present in given `schema`, logged with level "ERROR"
155
+ """
111
156
  if name in schema.agent_types:
112
157
  return schema.agent_types[name]
113
158
  raise log_error(ValidationError(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
@@ -117,12 +162,19 @@ class SchemaValidator:
117
162
  attributes: dict[str, Attribute],
118
163
  specifications: dict[str, AttributeSpecs],
119
164
  ) -> list[str | float]:
120
- """
121
- Raises Exception if in given list of `specifications` at least one item is mandatory,
122
- provides no defaults and is not contained in given `attributes` dictionary
165
+ """Ensures that each attribute that is mandatory has either a value specified or a default value available.
166
+
167
+ Also gathers and returns all default values of time series attributes.
168
+
169
+ Args:
170
+ attributes: to check for completeness
171
+ specifications: to check attributes against
123
172
 
124
173
  Returns:
125
174
  list of time series defaults used in scenario
175
+
176
+ Raises:
177
+ ValidationError: if any mandatory attribute is missing and has no default
126
178
  """
127
179
  missing_series_defaults: list[str | float] = []
128
180
  for name, specification in specifications.items():
@@ -154,7 +206,16 @@ class SchemaValidator:
154
206
 
155
207
  @staticmethod
156
208
  def _ensure_attributes_exist(attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]) -> None:
157
- """Raises exception any entry of given `attributes` has no corresponding type `specification`"""
209
+ """Ensures that each attribute has a corresponding entry in given specifications.
210
+
211
+ Args:
212
+ attributes: to search specifications for
213
+ specifications: describing the attributes
214
+
215
+ Raises:
216
+ ValidationError: if any entry of given `attributes` has no corresponding type `specification`,
217
+ logged with level "ERROR"
218
+ """
158
219
  for name, attribute in attributes.items():
159
220
  if name not in specifications:
160
221
  raise log_error(ValidationError(SchemaValidator._ATTRIBUTE_UNKNOWN.format(attribute)))
@@ -170,7 +231,16 @@ class SchemaValidator:
170
231
  def _ensure_value_and_type_match(
171
232
  attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs]
172
233
  ) -> None:
173
- """Raises exception if in given list of `attributes` its value does not match associated type `specification`"""
234
+ """Ensure that the value of an attribute match the attribute's type and are allowed.
235
+
236
+ Args:
237
+ attributes: to check the values for
238
+ specifications: describing the attribute (and potential value restrictions)
239
+
240
+ Raises:
241
+ ValidationError: if in given list of `attributes` any value does not match associated type `specification`,
242
+ logged with level "ERROR"
243
+ """
174
244
  for name, attribute in attributes.items():
175
245
  specification = specifications[name]
176
246
  if attribute.has_value:
@@ -190,7 +260,18 @@ class SchemaValidator:
190
260
 
191
261
  @staticmethod
192
262
  def _is_compatible(specification: AttributeSpecs, value_or_values: Any) -> bool:
193
- """Returns True if given `value_or_values` is compatible to specified `attribute_type` and `should_be_list`"""
263
+ """Checks if given `value_or_values` is compatible with the given `specification`.
264
+
265
+ Args:
266
+ specification: of the attribute for which to check the values
267
+ value_or_values: singe value or list of values that is to be checked for compatibility
268
+
269
+ Returns:
270
+ True if given `value_or_values` is compatible the to specified `attribute_type`, False otherwise
271
+
272
+ Raises:
273
+ ValidationError: if an unknown attribute type is encountered, logged with level "ERROR"
274
+ """
194
275
  is_list = isinstance(value_or_values, list)
195
276
  attribute_type = specification.attr_type
196
277
  if specification.is_list:
@@ -205,7 +286,19 @@ class SchemaValidator:
205
286
 
206
287
  @staticmethod
207
288
  def _is_compatible_value(attribute_type: AttributeType, value) -> bool:
208
- """Returns True if given single value is compatible to specified `attribute_type` and is not a NaN float"""
289
+ """Checks if given value is compatible with the specifications of the `attribute_type`.
290
+
291
+ Args:
292
+ attribute_type: specification to test the value against
293
+ value: to be tested for compatibility
294
+
295
+ Returns:
296
+ True if given single value is compatible to specified `attribute_type` and is not a NaN float,
297
+ False otherwise
298
+
299
+ Raises:
300
+ ValidationError: if checks for the attribute type are not implemented, logged with level "ERROR"
301
+ """
209
302
  if attribute_type is AttributeType.INTEGER:
210
303
  if isinstance(value, int):
211
304
  return -2147483648 < value < 2147483647
@@ -224,15 +317,20 @@ class SchemaValidator:
224
317
 
225
318
  @staticmethod
226
319
  def _is_allowed_value(attribute: AttributeSpecs, value) -> bool:
227
- """Returns True if `value` matches an entry of given `Attribute`'s value list or if this list is empty"""
228
- if not attribute.values:
229
- return True
230
- return value in attribute.values
320
+ """Checks if given value is on the list of allowed values for an attribute type.
321
+
322
+ Args:
323
+ attribute: type description of an attribute
324
+ value: to be checked if compatible with the attribute type's value restrictions
325
+
326
+ Returns:
327
+ True if `value` matches an entry of given `Attribute`'s value list or if this list is empty
328
+ """
329
+ return not attribute.values or value in attribute.values
231
330
 
232
331
  @staticmethod
233
332
  def load_and_validate_timeseries(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
234
- """
235
- Loads all timeseries specified in given `schema` of given `agent` into given `timeseries_manager`
333
+ """Loads all timeseries in given `schema` for given `agent`. Uses `timeseries_manager` to validates them.
236
334
 
237
335
  Args:
238
336
  agent: definition in scenario
@@ -244,13 +342,22 @@ class SchemaValidator:
244
342
  """
245
343
  scenario_attributes = agent.attributes
246
344
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
247
- SchemaValidator._ensure_valid_timeseries(scenario_attributes, schema_attributes, timeseries_manager)
345
+ SchemaValidator._register_timeseries(scenario_attributes, schema_attributes, timeseries_manager)
248
346
 
249
347
  @staticmethod
250
- def _ensure_valid_timeseries(
348
+ def _register_timeseries(
251
349
  attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs], manager: TimeSeriesManager
252
350
  ) -> None:
253
- """Recursively searches for time_series in agent attributes and registers them at given `manager`"""
351
+ """Recursively searches for timeseries in agent attributes and registers them at given `manager`.
352
+
353
+ Args:
354
+ attributes: to search timeseries in
355
+ specifications: corresponding to the attributes
356
+ manager: to register new timeseries at
357
+
358
+ Raises:
359
+ ValidationError: if a timeseries could not be registered, logged at level "ERROR"
360
+ """
254
361
  for name, attribute in attributes.items():
255
362
  specification = specifications[name]
256
363
  if attribute.has_value:
@@ -262,17 +369,24 @@ class SchemaValidator:
262
369
  message = SchemaValidator._TIME_SERIES_INVALID.format(specification.full_name)
263
370
  raise log_error(ValidationError(message)) from e
264
371
  if attribute.has_nested:
265
- SchemaValidator._ensure_valid_timeseries(attribute.nested, specification.nested_attributes, manager)
372
+ SchemaValidator._register_timeseries(attribute.nested, specification.nested_attributes, manager)
266
373
  if attribute.has_nested_list:
267
374
  for entry in attribute.nested_list:
268
- SchemaValidator._ensure_valid_timeseries(entry, specification.nested_attributes, manager)
375
+ SchemaValidator._register_timeseries(entry, specification.nested_attributes, manager)
269
376
 
270
377
  @staticmethod
271
378
  def ensure_string_set_consistency(agent: Agent, schema: Schema, string_sets: dict[str, StringSet]) -> None:
272
- """
273
- Raises exception if
274
- a) an agent's attribute is of type StringSet but the corresponding StringSet is not defined in the scenario
275
- b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
379
+ """Checks consistency of an `agent's` StringSet attributes as mentioned in `schema` with provided `string_sets`.
380
+
381
+ Args:
382
+ agent: whose StringSet attributes are to be checked for consistency
383
+ schema: describing the agent's attributes
384
+ string_sets: as defined in the scenario and to test the agents attribute against
385
+
386
+ Raises:
387
+ ValidationError: logged with level "ERROR", occur when either
388
+ a) an agent's attribute is type StringSet but the corresponding StringSet is not defined in the scenario, or
389
+ b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
276
390
  """
277
391
  scenario_attributes = agent.attributes
278
392
  schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
@@ -282,12 +396,19 @@ class SchemaValidator:
282
396
  def _ensure_string_set_consistency(
283
397
  attributes: dict[str, Attribute], specifications: dict[str, AttributeSpecs], string_sets: dict[str, StringSet]
284
398
  ) -> None:
285
- """
286
- Recursively iterates through all attributes of an agent, applying tests if attribute type is `StringSet`
399
+ """Recursively iterates through all attributes of an agent checking consistency of `StringSet` type attributes.
400
+
401
+ Checks consistency of agent `StringSet` attributes with provided `string_sets` in the scenario and schema.
402
+
403
+ Args:
404
+ attributes: attributes of an agent
405
+ specifications: corresponding to the provided attributes
406
+ string_sets: to check attributes of type string_set against
407
+
287
408
  Raises:
288
- ValidationError: if
289
- a) StringSet mentioned in schema is not defined in the scenario
290
- b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
409
+ ValidationError: logged with level "ERROR", occur when
410
+ a) StringSet declared in schema is not defined in the section "StringSet" in the scenario, or
411
+ b) value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
291
412
  """
292
413
  for name, attribute in attributes.items():
293
414
  specification = specifications[name]
@@ -313,7 +434,19 @@ class SchemaValidator:
313
434
 
314
435
  @staticmethod
315
436
  def ensure_is_valid_contract(contract: Contract, schema: Schema, agent_types_by_id: dict[int, str]) -> None:
316
- """Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
437
+ """Checks validity of a contract's IDs and product.
438
+
439
+ Ensures that for a given `contract` sender and receiver IDs are valid, and that the sender offers the
440
+ contracted product.
441
+
442
+ Args:
443
+ contract: to be checked
444
+ schema: to extract the sender's available products from
445
+ agent_types_by_id: to test if sender and receiver IDs are contained
446
+
447
+ Raises:
448
+ ValidationError: if given `contract` uses unknown agent IDs or an unknown product, logged with level "ERROR"
449
+ """
317
450
  sender_id = contract.sender_id
318
451
  if sender_id not in agent_types_by_id:
319
452
  raise log_error(ValidationError(SchemaValidator._AGENT_MISSING.format(sender_id, contract.to_dict())))
@@ -333,7 +466,11 @@ class SchemaValidator:
333
466
 
334
467
  @staticmethod
335
468
  def check_agents_have_contracts(scenario: Scenario) -> None:
336
- """Raises warning for each agent without any assigned contract"""
469
+ """Loads a warning for each agent without any assigned contract.
470
+
471
+ Args:
472
+ scenario: to search for agents without any contract
473
+ """
337
474
  senders = [contract.sender_id for contract in scenario.contracts]
338
475
  receivers = [contract.receiver_id for contract in scenario.contracts]
339
476
  active_agents = set(senders + receivers)
fameio/input/writer.py CHANGED
@@ -28,11 +28,11 @@ from fameio.tools import ensure_is_list
28
28
 
29
29
 
30
30
  class ProtoWriterError(InputError):
31
- """Indicates an error during writing of a protobuf file"""
31
+ """Indicates an error during writing of a protobuf file."""
32
32
 
33
33
 
34
34
  class ProtoWriter:
35
- """Writes a given scenario to protobuf file"""
35
+ """Writes a given scenario to protobuf file."""
36
36
 
37
37
  _FAME_PROTOBUF_STREAM_HEADER = fameio.FILE_HEADER_V2
38
38
 
@@ -54,8 +54,7 @@ class ProtoWriter:
54
54
  self._time_series_manager: TimeSeriesManager = time_series_manager
55
55
 
56
56
  def write_validated_scenario(self, scenario: Scenario) -> None:
57
- """
58
- Writes given validated Scenario to file
57
+ """Writes given validated Scenario to file.
59
58
 
60
59
  Args:
61
60
  scenario: to be written to file
@@ -68,8 +67,7 @@ class ProtoWriter:
68
67
  self._write_data_to_disk(serialised)
69
68
 
70
69
  def _create_protobuf_from_scenario(self, scenario: Scenario) -> DataStorage:
71
- """
72
- Returns given `scenario` written to new DataStorage protobuf
70
+ """Returns given `scenario` written to new DataStorage protobuf.
73
71
 
74
72
  Args:
75
73
  scenario: to be converted to protobuf
@@ -97,7 +95,7 @@ class ProtoWriter:
97
95
 
98
96
  @staticmethod
99
97
  def _set_general_properties(pb_input: InputData, general_properties: GeneralProperties) -> None:
100
- """Saves a scenario's general properties to specified protobuf `pb_input` container"""
98
+ """Saves a scenario's general properties to specified protobuf `pb_input` container."""
101
99
  log().info("Adding General Properties")
102
100
  pb_input.run_id = general_properties.run_id
103
101
  pb_input.simulation.start_time = general_properties.simulation_start_time
@@ -105,8 +103,7 @@ class ProtoWriter:
105
103
  pb_input.simulation.random_seed = general_properties.simulation_random_seed
106
104
 
107
105
  def _add_agents(self, pb_input: InputData, agents: list[Agent], schema: Schema) -> None:
108
- """
109
- Triggers setting of `agents` to `pb_input`
106
+ """Triggers setting of `agents` to `pb_input`.
110
107
 
111
108
  Args:
112
109
  pb_input: parent element to add the agents to
@@ -125,7 +122,7 @@ class ProtoWriter:
125
122
 
126
123
  @staticmethod
127
124
  def _set_agent(pb_agent: InputData.AgentDao, agent: Agent) -> InputData.AgentDao:
128
- """Saves type and id of given `agent` to protobuf `pb_agent` container. Returns given `pb_agent`"""
125
+ """Saves type and id of given `agent` to protobuf `pb_agent` container. Returns given `pb_agent`."""
129
126
  pb_agent.class_name = agent.type_name
130
127
  pb_agent.id = agent.id
131
128
  return pb_agent
@@ -136,8 +133,7 @@ class ProtoWriter:
136
133
  attributes: dict[str, Attribute],
137
134
  specs: dict[str, AttributeSpecs],
138
135
  ) -> None:
139
- """
140
- Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes
136
+ """Assigns `attributes` to protobuf fields of given `pb_parent` - cascades for nested Attributes.
141
137
 
142
138
  Args:
143
139
  pb_parent: to store the attributes in
@@ -171,14 +167,13 @@ class ProtoWriter:
171
167
 
172
168
  @staticmethod
173
169
  def _add_field(pb_parent: InputData.AgentDao | NestedField, name: str) -> NestedField:
174
- """Returns new field with given `name` that is added to given `pb_parent`"""
170
+ """Returns new field with given `name` that is added to given `pb_parent`."""
175
171
  pb_field = pb_parent.fields.add()
176
172
  pb_field.field_name = name
177
173
  return pb_field
178
174
 
179
175
  def _set_attribute(self, pb_field: NestedField, value: Any, attribute_type: AttributeType) -> None:
180
- """
181
- Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`
176
+ """Sets given `value` to given protobuf `pb_field` depending on specified `attribute_type`.
182
177
 
183
178
  Args:
184
179
  pb_field: parent element to contain the attribute value therein
@@ -205,8 +200,7 @@ class ProtoWriter:
205
200
 
206
201
  @staticmethod
207
202
  def _add_contracts(pb_input: InputData, contracts: list[Contract]) -> None:
208
- """
209
- Adds given contracts to input data
203
+ """Adds given contracts to input data.
210
204
 
211
205
  Args:
212
206
  pb_input: parent element to have the contracts added to
@@ -223,7 +217,7 @@ class ProtoWriter:
223
217
 
224
218
  @staticmethod
225
219
  def _set_contract(pb_contract: ProtoContract, contract: Contract) -> ProtoContract:
226
- """Saves given `contract` details to protobuf container `pb_contract`. Returns given `pb_contract`"""
220
+ """Saves given `contract` details to protobuf container `pb_contract`. Returns given `pb_contract`."""
227
221
  pb_contract.sender_id = contract.sender_id
228
222
  pb_contract.receiver_id = contract.receiver_id
229
223
  pb_contract.product_name = contract.product_name
@@ -235,8 +229,7 @@ class ProtoWriter:
235
229
 
236
230
  @staticmethod
237
231
  def _set_contract_attributes(pb_parent: ProtoContract | NestedField, attributes: dict[str, Attribute]) -> None:
238
- """
239
- Assign (nested) Attributes to given protobuf container `pb_parent`
232
+ """Assign (nested) Attributes to given protobuf container `pb_parent`.
240
233
 
241
234
  Args:
242
235
  pb_parent: parent element, either a contract or an attribute
@@ -263,7 +256,7 @@ class ProtoWriter:
263
256
  ProtoWriter._set_contract_attributes(pb_field, attribute.nested)
264
257
 
265
258
  def _set_time_series(self, pb_input: InputData) -> None:
266
- """Adds all time series from TimeSeriesManager to given `pb_input`"""
259
+ """Adds all time series from TimeSeriesManager to given `pb_input`."""
267
260
  log().info("Adding TimeSeries")
268
261
  for unique_id, identifier, data in self._time_series_manager.get_all_series():
269
262
  pb_series = pb_input.time_series.add()
@@ -274,13 +267,13 @@ class ProtoWriter:
274
267
 
275
268
  @staticmethod
276
269
  def _set_schema(pb_input: InputData, schema: Schema) -> None:
277
- """Sets the given `schema` `pb_input`"""
270
+ """Sets the given `schema` `pb_input`."""
278
271
  log().info("Adding Schema")
279
272
  pb_input.schema = schema.to_string()
280
273
 
281
274
  @staticmethod
282
275
  def _set_string_sets(pb_input: InputData, string_sets: dict[str, StringSet]) -> None:
283
- """Adds the given StringSets to given `pb_input`"""
276
+ """Adds the given StringSets to given `pb_input`."""
284
277
  for name, string_set in string_sets.items():
285
278
  pb_set = pb_input.string_sets.add()
286
279
  pb_set.name = name
@@ -294,7 +287,7 @@ class ProtoWriter:
294
287
 
295
288
  @staticmethod
296
289
  def _set_java_package_names(pb_model: ModelData, java_packages: JavaPackages) -> None:
297
- """Adds given JavaPackages names to given ModelData section"""
290
+ """Adds given JavaPackages names to given ModelData section."""
298
291
  pb_packages = pb_model.package_definition
299
292
  pb_packages.agents.extend(java_packages.agents)
300
293
  pb_packages.data_items.extend(java_packages.data_items)
@@ -302,14 +295,13 @@ class ProtoWriter:
302
295
 
303
296
  @staticmethod
304
297
  def _set_execution_versions(pb_version_data: ExecutionData.VersionData) -> None:
305
- """Adds version strings for fameio, fameprotobuf, and python to the given Versions message"""
298
+ """Adds version strings for fameio, fameprotobuf, and python to the given Versions message."""
306
299
  pb_version_data.fame_protobuf = metadata.version("fameprotobuf")
307
300
  pb_version_data.fame_io = metadata.version("fameio")
308
301
  pb_version_data.python = sys.version
309
302
 
310
303
  def _serialise(self, data_storage: DataStorage) -> bytes:
311
- """
312
- Serialise given data storage to bytes
304
+ """Serialise given data storage to bytes.
313
305
 
314
306
  Args:
315
307
  data_storage: to be serialised
@@ -326,8 +318,7 @@ class ProtoWriter:
326
318
  raise log_error(ProtoWriterError(self._ERR_PROTOBUF_ENCODING)) from e
327
319
 
328
320
  def _write_data_to_disk(self, serialised_data: bytes) -> None:
329
- """
330
- Writes given serialised data to file
321
+ """Writes given serialised data to file.
331
322
 
332
323
  Args:
333
324
  serialised_data: to be written to file
fameio/logs.py CHANGED
@@ -6,10 +6,11 @@ from __future__ import annotations
6
6
  import logging as pylog
7
7
  from enum import Enum
8
8
  from pathlib import Path
9
+ from typing import TypeVar
9
10
 
10
11
 
11
12
  class LogLevel(Enum):
12
- """Levels for Logging"""
13
+ """Levels for Logging."""
13
14
 
14
15
  PRINT = 100
15
16
  CRITICAL = pylog.CRITICAL
@@ -33,37 +34,37 @@ _WARN_NOT_INITIALIZED = "Logger for fameio not initialised: using default log le
33
34
  LOGGER_NAME = "fameio"
34
35
  DEFAULT_LOG_LEVEL = LogLevel.WARNING
35
36
 
37
+ T = TypeVar("T", bound=Exception)
38
+
36
39
 
37
40
  def log() -> pylog.Logger:
38
- """Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`"""
41
+ """Returns already set up FAME-Io's logger or - if not set up - a new logger with `WARNING`."""
39
42
  if not _loggers:
40
43
  fameio_logger(DEFAULT_LOG_LEVEL.name)
41
44
  pylog.warning(_WARN_NOT_INITIALIZED)
42
45
  return _loggers[0]
43
46
 
44
47
 
45
- def log_critical(exception: Exception) -> Exception:
46
- """
47
- Logs a critical error with the exception's message and returns the exception for raising it.
48
- Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
48
+ def log_critical(exception: T) -> T:
49
+ """Logs a critical error with the exception's message and returns the exception for raising it.
49
50
 
50
- Example: `raise log_critical(MyException("My error message"))`
51
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
52
+ Example: `raise log_critical(MyException("My error message"))
51
53
 
52
54
  Args:
53
55
  exception: to extract the error message from
54
56
 
55
57
  Returns:
56
- the given exception
58
+ the given exception
57
59
  """
58
60
  log().critical(str(exception))
59
61
  return exception
60
62
 
61
63
 
62
- def log_error(exception: Exception) -> Exception:
63
- """
64
- Logs an error with the exception's message and returns the exception for raising.
65
- Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
64
+ def log_error(exception: T) -> T:
65
+ """Logs an error with the exception's message and returns the exception for raising.
66
66
 
67
+ Does **not** raise the exception, i.e. the command must be preceded by a `raise`.
67
68
  Example: `raise log_error(MyException("My error message"))`
68
69
 
69
70
  Args:
@@ -76,9 +77,17 @@ def log_error(exception: Exception) -> Exception:
76
77
  return exception
77
78
 
78
79
 
79
- def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
80
+ def log_and_print(message: str) -> None:
81
+ """Logs a message with priority "PRINT"., ensuring it is printed to console.
82
+
83
+ Args:
84
+ message: to be logged with the highest priority
80
85
  """
81
- Ensures a logger for fameio is present and uses the specified options
86
+ log().log(LogLevel.PRINT.value, message)
87
+
88
+
89
+ def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
90
+ """Ensures a logger for fameio is present and uses the specified options.
82
91
 
83
92
  Args:
84
93
  log_level_name: one of Python's official logging level names, e.g. "INFO"
@@ -100,8 +109,7 @@ def fameio_logger(log_level_name: str, file_name: Path | None = None) -> None:
100
109
 
101
110
 
102
111
  def _get_logger(level: LogLevel) -> pylog.Logger:
103
- """
104
- Returns fameio logger with given log level without any handler and, not propagating to parent
112
+ """Returns fameio logger with given log level without any handler and, not propagating to parent.
105
113
 
106
114
  Args:
107
115
  level: integer representing the log level
@@ -119,10 +127,11 @@ def _get_logger(level: LogLevel) -> pylog.Logger:
119
127
 
120
128
 
121
129
  def _get_formatter(level: LogLevel) -> pylog.Formatter:
122
- """
123
- Returns a log formatter depending on the given log `level`
130
+ """Returns a log formatter depending on the given log `level`.
131
+
124
132
  Args:
125
133
  level: this log level determines how detailed the logger's output is
134
+
126
135
  Returns:
127
136
  new log formatter
128
137
  """
@@ -130,7 +139,7 @@ def _get_formatter(level: LogLevel) -> pylog.Formatter:
130
139
 
131
140
 
132
141
  def _add_handler(logger: pylog.Logger, handler: pylog.Handler, formatter: pylog.Formatter) -> None:
133
- """Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list"""
142
+ """Adds given `handler` using the specified `formatter` to given `logger` and `_handlers` list."""
134
143
  handler.setFormatter(formatter)
135
144
  _handlers.append(handler)
136
145
  logger.addHandler(handler)