fameio 1.8.1__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. CHANGELOG.md +204 -0
  2. fameio/scripts/__init__.py +8 -6
  3. fameio/scripts/__init__.py.license +3 -0
  4. fameio/scripts/convert_results.py +30 -34
  5. fameio/scripts/convert_results.py.license +3 -0
  6. fameio/scripts/make_config.py +13 -16
  7. fameio/scripts/make_config.py.license +3 -0
  8. fameio/source/cli/__init__.py +3 -0
  9. fameio/source/cli/convert_results.py +75 -0
  10. fameio/source/cli/make_config.py +62 -0
  11. fameio/source/cli/options.py +59 -0
  12. fameio/source/cli/parser.py +238 -0
  13. fameio/source/loader.py +10 -11
  14. fameio/source/logs.py +49 -25
  15. fameio/source/results/conversion.py +12 -14
  16. fameio/source/results/csv_writer.py +16 -5
  17. fameio/source/results/data_transformer.py +3 -2
  18. fameio/source/results/input_dao.py +163 -0
  19. fameio/source/results/reader.py +25 -14
  20. fameio/source/results/yaml_writer.py +28 -0
  21. fameio/source/scenario/agent.py +56 -39
  22. fameio/source/scenario/attribute.py +9 -12
  23. fameio/source/scenario/contract.py +55 -40
  24. fameio/source/scenario/exception.py +11 -9
  25. fameio/source/scenario/generalproperties.py +11 -17
  26. fameio/source/scenario/scenario.py +19 -14
  27. fameio/source/schema/agenttype.py +75 -27
  28. fameio/source/schema/attribute.py +8 -7
  29. fameio/source/schema/schema.py +24 -11
  30. fameio/source/series.py +146 -25
  31. fameio/source/time.py +8 -8
  32. fameio/source/tools.py +13 -2
  33. fameio/source/validator.py +138 -58
  34. fameio/source/writer.py +108 -112
  35. fameio-2.0.0.dist-info/LICENSES/Apache-2.0.txt +178 -0
  36. fameio-2.0.0.dist-info/LICENSES/CC-BY-4.0.txt +395 -0
  37. fameio-2.0.0.dist-info/LICENSES/CC0-1.0.txt +121 -0
  38. {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/METADATA +144 -112
  39. fameio-2.0.0.dist-info/RECORD +52 -0
  40. {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/WHEEL +1 -2
  41. fameio-2.0.0.dist-info/entry_points.txt +4 -0
  42. fameio/source/cli.py +0 -253
  43. fameio-1.8.1.dist-info/RECORD +0 -40
  44. fameio-1.8.1.dist-info/entry_points.txt +0 -3
  45. fameio-1.8.1.dist-info/top_level.txt +0 -1
  46. {fameio-1.8.1.dist-info → fameio-2.0.0.dist-info}/LICENSE.txt +0 -0
@@ -0,0 +1,163 @@
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ import ast
5
+ from typing import List, Dict, Any, Optional, Tuple
6
+
7
+ from fameprotobuf.DataStorage_pb2 import DataStorage
8
+ from fameprotobuf.Field_pb2 import NestedField
9
+ from fameprotobuf.InputFile_pb2 import InputData
10
+
11
+ from fameio.source.logs import logger
12
+ from fameio.source.scenario import GeneralProperties, Agent, Contract, Scenario
13
+ from fameio.source.schema import Schema, AttributeSpecs, AttributeType
14
+ from fameio.source.series import TimeSeriesManager
15
+
16
+
17
+ class InputConversionException(Exception):
18
+ """An Exception indication an error during reconstruction of input from its protobuf representation"""
19
+
20
+ pass
21
+
22
+
23
+ class InputDao:
24
+ """Data access object for inputs saved in protobuf"""
25
+
26
+ _ERR_NO_INPUTS = "No input data found on file."
27
+ _ERR_MULTIPLE_INPUTS = "File corrupt. More than one input section found on file."
28
+
29
+ _FIELD_NAME_MAP: Dict = {
30
+ AttributeType.STRING: "stringValue",
31
+ AttributeType.ENUM: "stringValue",
32
+ AttributeType.INTEGER: "intValue",
33
+ AttributeType.DOUBLE: "doubleValue",
34
+ AttributeType.LONG: "longValue",
35
+ AttributeType.TIME_SERIES: "seriesId",
36
+ AttributeType.BLOCK: "field",
37
+ }
38
+
39
+ def __init__(self) -> None:
40
+ self._inputs: List[InputData] = []
41
+ self._timeseries_manager: TimeSeriesManager = TimeSeriesManager()
42
+ self._schema: Optional[Schema] = None
43
+
44
+ def store_inputs(self, data_storages: List[DataStorage]) -> None:
45
+ """
46
+ Extracts and stores Inputs in given DataStorages - if such are present
47
+
48
+ Args:
49
+ data_storages: to be scanned for InputData
50
+ """
51
+ self._inputs.extend([data_storage.input for data_storage in data_storages if data_storage.HasField("input")])
52
+
53
+ def recover_inputs(self) -> Tuple[TimeSeriesManager, Scenario]:
54
+ """
55
+ Recovers inputs to GeneralProperties, Schema, Agents, Contracts, Timeseries
56
+
57
+ Return:
58
+ recovered timeseries and scenario
59
+
60
+ Raises:
61
+ InputConversionException: if inputs could not be recovered
62
+ """
63
+ input_data = self._get_input_data()
64
+ self._schema = self._get_schema(input_data)
65
+ scenario = Scenario(self._schema, self._get_general_properties(input_data))
66
+ for contract in self._get_contracts(input_data):
67
+ scenario.add_contract(contract)
68
+
69
+ self._init_timeseries(input_data)
70
+ for agent in self._get_agents(input_data):
71
+ scenario.add_agent(agent)
72
+
73
+ return self._timeseries_manager, scenario
74
+
75
+ def _get_input_data(self) -> InputData:
76
+ """
77
+ Check that exactly one previously extracted input data exist, otherwise raises an exception
78
+
79
+ Raises:
80
+ InputConversionException: if no or more than one input is present
81
+ """
82
+ if not self._inputs:
83
+ logger().error(self._ERR_NO_INPUTS)
84
+ raise InputConversionException(self._ERR_NO_INPUTS)
85
+ if len(self._inputs) > 1:
86
+ logger().error(self._ERR_MULTIPLE_INPUTS)
87
+ raise InputConversionException(self._ERR_MULTIPLE_INPUTS)
88
+ return self._inputs[0]
89
+
90
+ @staticmethod
91
+ def _get_schema(input_data: InputData) -> Schema:
92
+ """Read and return Schema from given `input_data`"""
93
+ return Schema.from_string(input_data.schema)
94
+
95
+ @staticmethod
96
+ def _get_general_properties(input_data: InputData) -> GeneralProperties:
97
+ """Read and return GeneralProperties from given `input_data`"""
98
+ return GeneralProperties(
99
+ run_id=input_data.runId,
100
+ simulation_start_time=input_data.simulation.startTime,
101
+ simulation_stop_time=input_data.simulation.stopTime,
102
+ simulation_random_seed=input_data.simulation.randomSeed,
103
+ output_process=input_data.output.process,
104
+ output_interval=input_data.output.interval,
105
+ )
106
+
107
+ @staticmethod
108
+ def _get_contracts(input_data: InputData) -> List[Contract]:
109
+ """Read and return Contracts from given `input_data`"""
110
+ return [
111
+ Contract(
112
+ sender_id=contract.senderId,
113
+ receiver_id=contract.receiverId,
114
+ product_name=contract.productName,
115
+ delivery_interval=contract.deliveryIntervalInSteps,
116
+ first_delivery_time=contract.firstDeliveryTime,
117
+ expiration_time=contract.expirationTime,
118
+ meta_data=ast.literal_eval(contract.metadata),
119
+ )
120
+ for contract in input_data.contract
121
+ ]
122
+
123
+ def _init_timeseries(self, input_data: InputData) -> None:
124
+ """Read timeseries from given `input_data` and initialise TimeSeriesManager"""
125
+ self._timeseries_manager.reconstruct_time_series(list(input_data.timeSeries))
126
+
127
+ def _get_agents(self, input_data: InputData) -> List[Agent]:
128
+ """Read and return Agents from given `input_data`"""
129
+ agents = []
130
+ for agent_dao in input_data.agent:
131
+ agent = Agent(
132
+ agent_id=agent_dao.id, type_name=agent_dao.className, meta_data=ast.literal_eval(agent_dao.metadata)
133
+ )
134
+ attribute_dict = self._get_attributes(
135
+ list(agent_dao.field), self._schema.agent_types[agent_dao.className].attributes
136
+ )
137
+ agent.init_attributes_from_dict(attribute_dict)
138
+ agents.append(agent)
139
+ return agents
140
+
141
+ def _get_attributes(self, fields: List[NestedField], schematics: Dict[str, AttributeSpecs]) -> Dict[str, Any]:
142
+ """Read and return Attributes as Dictionary from given list of fields"""
143
+ attributes: Dict[str, Any] = {}
144
+ for field in fields:
145
+ attributes[field.fieldName] = self._get_field_value(field, schematics[field.fieldName])
146
+ return attributes
147
+
148
+ def _get_field_value(self, field: NestedField, schematic: AttributeSpecs) -> Any:
149
+ """Extracts and returns value(s) of given `field`"""
150
+ attribute_type: AttributeType = schematic.attr_type
151
+ value = field.__getattribute__(self._FIELD_NAME_MAP[attribute_type])
152
+ if attribute_type is AttributeType.TIME_SERIES:
153
+ return self._timeseries_manager.get_reconstructed_series_by_id(field.seriesId)
154
+ elif attribute_type is AttributeType.BLOCK:
155
+ if schematic.is_list:
156
+ return [self._get_attributes(list(entry.field), schematic.nested_attributes) for entry in field.field]
157
+ else:
158
+ return self._get_attributes(list(field.field), schematic.nested_attributes)
159
+ else:
160
+ if schematic.is_list:
161
+ return list(value)
162
+ else:
163
+ return list(value)[0]
@@ -1,8 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
- import logging as log
6
6
  import struct
7
7
  import typing
8
8
  from abc import ABC, abstractmethod
@@ -11,6 +11,8 @@ from typing import IO, List
11
11
  from fameprotobuf.DataStorage_pb2 import DataStorage
12
12
  from google.protobuf.message import DecodeError
13
13
 
14
+ from fameio.source.logs import logger
15
+
14
16
 
15
17
  class Reader(ABC):
16
18
  """Abstract base class for protobuf file readers"""
@@ -23,8 +25,8 @@ class Reader(ABC):
23
25
  _DEBUG_FILE_END_REACHED = "Reached expected end of file."
24
26
 
25
27
  _HEADER_LENGTH = 30
26
- _HEADER_ENCODING = "utf-8"
27
- _BYTES_DEFINING_MESSAGE_LENGTH = 4
28
+ HEADER_ENCODING = "utf-8"
29
+ BYTES_DEFINING_MESSAGE_LENGTH = 4
28
30
  _READER_HEADERS = {
29
31
  "famecoreprotobufstreamfilev001": lambda file, mode: ReaderV1(file, mode), # noqa
30
32
  }
@@ -38,25 +40,34 @@ class Reader(ABC):
38
40
  """Reads associated filestream and returns one or multiple DataStorage(s) or empty list"""
39
41
 
40
42
  @staticmethod
41
- def get_reader(file: IO, read_single: bool = False) -> "Reader":
42
- """Returns reader matching the given file header - if `read_one` is True, read() gets one messages at a time"""
43
- log.debug("Reading file headers...")
43
+ def get_reader(file: IO, read_single: bool = False) -> Reader:
44
+ """
45
+ Returns reader matching the given file header
46
+
47
+ Args:
48
+ file: to be read by the returned Reader
49
+ read_single: if True, the returned Reader's `read()` method gets one messages at a time
50
+
51
+ Returns:
52
+ Reader that can read the specified file
53
+ """
54
+ logger().debug("Reading file headers...")
44
55
  try:
45
- header = file.read(Reader._HEADER_LENGTH).decode(Reader._HEADER_ENCODING)
56
+ header = file.read(Reader._HEADER_LENGTH).decode(Reader.HEADER_ENCODING)
46
57
  return Reader._READER_HEADERS[header](file, read_single)
47
58
  except (KeyError, UnicodeDecodeError):
48
- log.warning(Reader._WARN_NO_HEADER)
59
+ logger().warning(Reader._WARN_NO_HEADER)
49
60
  file.seek(0)
50
61
  if read_single:
51
- log.error(Reader._ERR_UNSUPPORTED_MODE)
62
+ logger().error(Reader._ERR_UNSUPPORTED_MODE)
52
63
  return ReaderV0(file, False)
53
64
 
54
65
  @typing.final
55
66
  def _read_message_length(self) -> int:
56
67
  """Returns length of next DataStorage message in file"""
57
- message_length_byte = self._file.read(self._BYTES_DEFINING_MESSAGE_LENGTH)
68
+ message_length_byte = self._file.read(self.BYTES_DEFINING_MESSAGE_LENGTH)
58
69
  if not message_length_byte:
59
- log.debug(self._DEBUG_FILE_END_REACHED)
70
+ logger().debug(self._DEBUG_FILE_END_REACHED)
60
71
  message_length_int = 0
61
72
  else:
62
73
  message_length_int = struct.unpack(">i", message_length_byte)[0]
@@ -75,7 +86,7 @@ class Reader(ABC):
75
86
  else:
76
87
  raise IOError(self._ERR_FILE_CORRUPT_NEGATIVE_LENGTH)
77
88
  if message_length and len(message) != message_length:
78
- log.error(self._ERR_FILE_CORRUPT_MISSING_DATA)
89
+ logger().error(self._ERR_FILE_CORRUPT_MISSING_DATA)
79
90
  return self._parse_to_data_storage(message) if message else None
80
91
 
81
92
  @staticmethod
@@ -96,7 +107,7 @@ class ReaderV0(Reader):
96
107
 
97
108
  def __init__(self, file: IO, read_single):
98
109
  super().__init__(file, read_single)
99
- log.warning(self._WARN_DEPRECATED)
110
+ logger().warning(self._WARN_DEPRECATED)
100
111
 
101
112
  def read(self) -> List[DataStorage]:
102
113
  result = self._read_data_storage_message()
@@ -115,5 +126,5 @@ class ReaderV1(Reader):
115
126
  messages.append(self._read_data_storage_message(message_length))
116
127
  if self._read_single:
117
128
  break
118
- log.debug(f"Read {len(messages)} messages from file.")
129
+ logger().debug(f"Read {len(messages)} messages from file.")
119
130
  return messages
@@ -0,0 +1,28 @@
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+ from pathlib import Path
5
+ from typing import Dict
6
+
7
+ import yaml
8
+
9
+ from fameio.source.logs import logger
10
+
11
+ ERR_WRITE_EXCEPTION = "Failed to save dictionary to YAML file `{}`"
12
+ INFO_DESTINATION = "Saving scenario to file at {}"
13
+
14
+
15
+ def data_to_yaml_file(data: Dict, file_path: Path) -> None:
16
+ """
17
+ Save the given data to a YAML file at given path
18
+
19
+ Args:
20
+ data: to be saved to yaml file
21
+ file_path: at which the file will be created
22
+ """
23
+ logger().info(INFO_DESTINATION.format(file_path))
24
+ try:
25
+ with open(file_path, "w") as f:
26
+ yaml.dump(data, f, sort_keys=False)
27
+ except Exception as e:
28
+ raise RuntimeError(ERR_WRITE_EXCEPTION.format(file_path)) from e
@@ -1,8 +1,10 @@
1
- # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
1
+ # SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
- from typing import Any, Dict
6
+ import ast
7
+ from typing import Any, Dict, Optional
6
8
 
7
9
  from fameio.source.scenario.attribute import Attribute
8
10
  from fameio.source.scenario.exception import (
@@ -19,46 +21,71 @@ class Agent:
19
21
  _KEY_TYPE = "Type".lower()
20
22
  _KEY_ID = "Id".lower()
21
23
  _KEY_ATTRIBUTES = "Attributes".lower()
24
+ _KEY_METADATA = "MetaData".lower()
22
25
 
23
- _MISSING_KEY = "Agent requires `key` '{}' but is missing it."
24
- _MISSING_TYPE = "Agent requires `type` but is missing it."
25
- _MISSING_ID = "Agent requires a positive integer `id` but was '{}'."
26
- _DOUBLE_ATTRIBUTE = "Cannot add attribute '{}' to agent {} because it already exists."
26
+ _ERR_MISSING_KEY = "Agent requires `key` '{}' but is missing it."
27
+ _ERR_MISSING_TYPE = "Agent requires `type` but is missing it."
28
+ _ERR_MISSING_ID = "Agent requires a positive integer `id` but was '{}'."
29
+ _ERR_DOUBLE_ATTRIBUTE = "Cannot add attribute '{}' to agent {} because it already exists."
30
+ _ERR_ATTRIBUTE_OVERWRITE = "Agent's attributes are already set and would be overwritten."
27
31
 
28
- def __init__(self, agent_id: int, type_name: str) -> None:
32
+ def __init__(self, agent_id: int, type_name: str, meta_data: Optional[Dict] = None) -> None:
29
33
  """Constructs a new Agent"""
30
- assert_or_raise(type(agent_id) is int and agent_id >= 0, self._MISSING_ID.format(agent_id))
31
- assert_or_raise(type_name and len(type_name.strip()) > 0, self._MISSING_TYPE)
32
- self._id = agent_id
33
- self._type_name = type_name.strip()
34
- self._attributes = {}
34
+ assert_or_raise(type(agent_id) is int and agent_id >= 0, self._ERR_MISSING_ID.format(agent_id))
35
+ assert_or_raise(bool(type_name and type_name.strip()), self._ERR_MISSING_TYPE)
36
+ self._id: int = agent_id
37
+ self._type_name: str = type_name.strip()
38
+ self._attributes: Dict = {}
39
+ self._meta_data: Optional[Dict] = meta_data if meta_data else {}
35
40
 
36
41
  @classmethod
37
- def from_dict(cls, definitions: dict) -> "Agent":
42
+ def from_dict(cls, definitions: dict) -> Agent:
38
43
  """Parses an agent from provided `definitions`"""
39
44
  definitions = keys_to_lower(definitions)
40
- agent_type = get_or_raise(definitions, Agent._KEY_TYPE, Agent._MISSING_KEY)
41
- agent_id = get_or_raise(definitions, Agent._KEY_ID, Agent._MISSING_KEY)
42
- result = cls(agent_id, agent_type)
43
- attribute_definitions = get_or_default(definitions, Agent._KEY_ATTRIBUTES, dict())
44
- result.__init_attributes_from_dict(attribute_definitions)
45
- return result
45
+ agent_type = get_or_raise(definitions, Agent._KEY_TYPE, Agent._ERR_MISSING_TYPE)
46
+ agent_id = get_or_raise(definitions, Agent._KEY_ID, Agent._ERR_MISSING_ID)
47
+ agent = cls(agent_id, agent_type)
48
+ attribute_definitions = get_or_default(definitions, Agent._KEY_ATTRIBUTES, {})
49
+ agent.init_attributes_from_dict(attribute_definitions)
50
+ agent._meta_data = get_or_default(definitions, Agent._KEY_METADATA, {})
51
+ return agent
52
+
53
+ def init_attributes_from_dict(self, attributes: Dict[str, Any]) -> None:
54
+ """Initialize Agent `attributes` from dict; Must only be called when creating a new Agent"""
55
+ assert_or_raise(not self._attributes, self._ERR_ATTRIBUTE_OVERWRITE)
56
+ self._attributes = {}
57
+ for name, value in attributes.items():
58
+ full_name = f"{self.type_name}({self.id}): {name}"
59
+ self.add_attribute(name, Attribute(full_name, value))
60
+
61
+ def add_attribute(self, name: str, value: Attribute) -> None:
62
+ """Adds a new attribute to the Agent (raise an error if it already exists)"""
63
+ if name in self._attributes:
64
+ raise ValueError(self._ERR_DOUBLE_ATTRIBUTE.format(name, self.display_id))
65
+ self._attributes[name] = value
66
+ self._notify_data_changed()
46
67
 
47
68
  def to_dict(self) -> dict:
48
69
  """Serializes the Agent content to a dict"""
49
- result = {}
50
-
51
- result[Agent._KEY_TYPE] = self.type_name
52
- result[Agent._KEY_ID] = self.id
70
+ result = {Agent._KEY_TYPE: self.type_name, Agent._KEY_ID: self.id}
53
71
 
54
- if len(self.attributes) > 0:
72
+ if self.attributes:
55
73
  attributes_dict = {}
56
74
  for attr_name, attr_value in self.attributes.items():
57
75
  attributes_dict[attr_name] = attr_value.generic_content
58
76
  result[self._KEY_ATTRIBUTES] = attributes_dict
59
-
77
+ if self.meta_data:
78
+ result[self._KEY_METADATA] = self.meta_data
60
79
  return result
61
80
 
81
+ def to_string(self) -> str:
82
+ """Serializes this agent to a string"""
83
+ return repr(self.to_dict())
84
+
85
+ @classmethod
86
+ def from_string(cls, definitions: str) -> Agent:
87
+ return cls.from_dict(ast.literal_eval(definitions))
88
+
62
89
  def _notify_data_changed(self):
63
90
  """Placeholder method used to signal data changes to derived types"""
64
91
  pass
@@ -83,17 +110,7 @@ class Agent:
83
110
  """Returns dictionary of all Attributes of this agent"""
84
111
  return self._attributes
85
112
 
86
- def add_attribute(self, name: str, value: Attribute):
87
- """Adds a new attribute to the Agent (raise an error if it already exists)"""
88
- if name in self._attributes:
89
- raise ValueError(self._DOUBLE_ATTRIBUTE.format(name, self.display_id))
90
- self._attributes[name] = value
91
- self._notify_data_changed()
92
-
93
- def __init_attributes_from_dict(self, attributes: Dict[str, Any]) -> None:
94
- """Initialize Agent `attributes` from dict; Must only be called when creating a new Agent"""
95
- assert len(self._attributes) == 0
96
- self._attributes = {}
97
- for name, value in attributes.items():
98
- full_name = str(self.type_name) + "(" + str(self.id) + "): " + name
99
- self.add_attribute(name, Attribute(full_name, value))
113
+ @property
114
+ def meta_data(self) -> dict:
115
+ """Returns dictionary of all MetaData of this agent"""
116
+ return self._meta_data
@@ -1,8 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ from __future__ import annotations
4
5
 
5
- import logging as log
6
6
  from typing import Any, Dict, List
7
7
 
8
8
  from fameio.source.scenario.exception import log_and_raise
@@ -12,7 +12,6 @@ class Attribute:
12
12
  """An Attribute of an agent in a scenario"""
13
13
 
14
14
  _VALUE_MISSING = "Value not specified for Attribute '{}' - leave out if default shall be used (if defined)."
15
- _OVERWRITE = "Value already defined for Attribute '{}' - overwriting value with new one!"
16
15
  _LIST_EMPTY = "Attribute '{}' was assigned an empty list - please remove or fill empty assignments."
17
16
  _DICT_EMPTY = "Attribute '{}' was assigned an empty dictionary - please remove or fill empty assignments."
18
17
  _MIXED_DATA = "Attribute '{}' was assigned a list with mixed complex and simple entries - please fix."
@@ -45,13 +44,11 @@ class Attribute:
45
44
  if not definitions:
46
45
  log_and_raise(Attribute._DICT_EMPTY.format(name))
47
46
 
48
- dictionary = {}
47
+ inner_elements = {}
49
48
  for nested_name, value in definitions.items():
50
49
  full_name = name + "." + nested_name
51
- if nested_name in dictionary:
52
- log.warning(Attribute._OVERWRITE.format(full_name))
53
- dictionary[nested_name] = Attribute(full_name, value)
54
- return dictionary
50
+ inner_elements[nested_name] = Attribute(full_name, value)
51
+ return inner_elements
55
52
 
56
53
  @staticmethod
57
54
  def _is_list_of_dict(name: str, definitions: Any) -> bool:
@@ -79,10 +76,10 @@ class Attribute:
79
76
  elif self.has_nested_list:
80
77
  result = []
81
78
  for attr_dict in self.nested_list:
82
- sub_dict = {}
79
+ inner_elements = {}
83
80
  for name, attr in attr_dict.items():
84
- sub_dict[name] = attr.generic_content
85
- result.append(sub_dict)
81
+ inner_elements[name] = attr.generic_content
82
+ result.append(inner_elements)
86
83
  return result
87
84
  elif self.has_nested:
88
85
  result = {}
@@ -107,12 +104,12 @@ class Attribute:
107
104
  return bool(self._nested)
108
105
 
109
106
  @property
110
- def nested(self) -> Dict[str, "Attribute"]:
107
+ def nested(self) -> Dict[str, Attribute]:
111
108
  """Returns dictionary of all nested Attributes"""
112
109
  assert self.has_nested
113
110
  return self._nested
114
111
 
115
- def get_nested_by_name(self, key: str) -> "Attribute":
112
+ def get_nested_by_name(self, key: str) -> Attribute:
116
113
  """Returns nested Attribute by specified name"""
117
114
  return self._nested[key]
118
115