fameio 1.8.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. CHANGELOG.md +204 -0
  2. fameio/scripts/__init__.py +8 -6
  3. fameio/scripts/__init__.py.license +3 -0
  4. fameio/scripts/convert_results.py +30 -34
  5. fameio/scripts/convert_results.py.license +3 -0
  6. fameio/scripts/make_config.py +13 -16
  7. fameio/scripts/make_config.py.license +3 -0
  8. fameio/source/cli/__init__.py +3 -0
  9. fameio/source/cli/convert_results.py +75 -0
  10. fameio/source/cli/make_config.py +62 -0
  11. fameio/source/cli/options.py +59 -0
  12. fameio/source/cli/parser.py +238 -0
  13. fameio/source/loader.py +10 -11
  14. fameio/source/logs.py +49 -25
  15. fameio/source/results/conversion.py +11 -13
  16. fameio/source/results/csv_writer.py +16 -5
  17. fameio/source/results/data_transformer.py +3 -2
  18. fameio/source/results/input_dao.py +163 -0
  19. fameio/source/results/reader.py +25 -14
  20. fameio/source/results/yaml_writer.py +28 -0
  21. fameio/source/scenario/agent.py +56 -39
  22. fameio/source/scenario/attribute.py +9 -12
  23. fameio/source/scenario/contract.py +55 -40
  24. fameio/source/scenario/exception.py +11 -9
  25. fameio/source/scenario/generalproperties.py +11 -17
  26. fameio/source/scenario/scenario.py +19 -14
  27. fameio/source/schema/agenttype.py +75 -27
  28. fameio/source/schema/attribute.py +8 -7
  29. fameio/source/schema/schema.py +24 -11
  30. fameio/source/series.py +146 -25
  31. fameio/source/time.py +8 -8
  32. fameio/source/tools.py +13 -2
  33. fameio/source/validator.py +138 -58
  34. fameio/source/writer.py +108 -112
  35. fameio-2.0.0.dist-info/LICENSES/Apache-2.0.txt +178 -0
  36. fameio-2.0.0.dist-info/LICENSES/CC-BY-4.0.txt +395 -0
  37. fameio-2.0.0.dist-info/LICENSES/CC0-1.0.txt +121 -0
  38. {fameio-1.8.2.dist-info → fameio-2.0.0.dist-info}/METADATA +694 -660
  39. fameio-2.0.0.dist-info/RECORD +52 -0
  40. {fameio-1.8.2.dist-info → fameio-2.0.0.dist-info}/WHEEL +1 -2
  41. fameio-2.0.0.dist-info/entry_points.txt +4 -0
  42. fameio/source/cli.py +0 -253
  43. fameio-1.8.2.dist-info/RECORD +0 -40
  44. fameio-1.8.2.dist-info/entry_points.txt +0 -3
  45. fameio-1.8.2.dist-info/top_level.txt +0 -1
  46. {fameio-1.8.2.dist-info → fameio-2.0.0.dist-info}/LICENSE.txt +0 -0
fameio/source/series.py CHANGED
@@ -1,48 +1,169 @@
1
1
  # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+ import math
5
+ import os
6
+ from enum import Enum, auto
7
+ from pathlib import Path
8
+ from typing import Dict, Union, Tuple, Any, List
4
9
 
5
- from typing import Dict, Union
10
+ import pandas as pd
11
+ from fameprotobuf.InputFile_pb2 import InputData
6
12
 
7
- from fameio.source.logs import log_error_and_raise
13
+ from fameio.source import PathResolver
14
+ from fameio.source.logs import log_error_and_raise, logger
15
+ from fameio.source.time import ConversionException, FameTime
16
+ from fameio.source.tools import clean_up_file_name
8
17
 
9
18
 
10
- class SeriesManagementException(Exception):
19
+ class TimeSeriesException(Exception):
11
20
  """Indicates that an error occurred during management of time series"""
12
21
 
13
22
  pass
14
23
 
15
24
 
25
+ class Entry(Enum):
26
+ ID = auto()
27
+ NAME = auto()
28
+ DATA = auto()
29
+
30
+
16
31
  class TimeSeriesManager:
17
32
  """Manages matching of files to time series ids and their protobuf representation"""
18
33
 
19
- _ALREADY_REGISTERED = "File '{}' was already registered."
34
+ _TIMESERIES_RECONSTRUCTION_PATH = "./timeseries/"
35
+ _CONSTANT_IDENTIFIER = "Constant value: {}"
36
+ _KEY_ROW_TIME = "timeStep"
37
+ _KEY_ROW_VALUE = "value"
38
+
39
+ _ERR_FILE_NOT_FOUND = "Cannot find Timeseries file '{}'"
40
+ _ERR_CORRUPT_TIME_SERIES_KEY = "TimeSeries file '{}' corrupt: At least one entry in first column isn't a timestamp."
41
+ _ERR_CORRUPT_TIME_SERIES_VALUE = "TimeSeries file '{}' corrupt: At least one entry in value column isn't numeric."
42
+ _ERR_NON_NUMERIC = "Values in TimeSeries must be numeric but was: '{}'"
43
+ _ERR_NAN_VALUE = "Values in TimeSeries must not be missing or NaN."
44
+ _ERR_UNREGISTERED_SERIES = "No timeseries registered with identifier '{}' - was the Scenario validated?"
45
+ _WARN_NO_DATA = "No timeseries stored in timeseries manager. Double check if you expected timeseries."
20
46
 
21
- def __init__(self):
47
+ def __init__(self, path_resolver: PathResolver = PathResolver()) -> None:
48
+ self._path_resolver = path_resolver
22
49
  self._id_count = -1
23
- self._ids_of_time_series = {}
50
+ self._series_by_id: Dict[Union[str, int, float], Dict[Entry, Any]] = {}
24
51
 
25
- def _get_time_series_id(self, name: Union[str, int, float]) -> int:
26
- """Returns the id assigned to the given file name"""
27
- return self._ids_of_time_series.get(name)
52
+ def register_and_validate(self, identifier: Union[str, int, float]) -> None:
53
+ """
54
+ Registers given timeseries `identifier` and validates associated timeseries
28
55
 
29
- def _time_series_is_registered(self, name: Union[str, int, float]) -> bool:
30
- """Returns True if the file is already registered"""
31
- return name in self._ids_of_time_series.keys()
56
+ Args:
57
+ identifier: to be registered - either a single numeric value or a string pointing to a timeseries file
32
58
 
33
- def _register_time_series(self, name: Union[str, int, float]) -> None:
34
- """Assigns an id to the given file or raises an Exception if the file is already registered"""
35
- if not self._time_series_is_registered(name):
36
- self._id_count += 1
37
- self._ids_of_time_series[name] = self._id_count
59
+ Raises:
60
+ TimeSeriesException: if file was not found, ill-formatted, or value was invalid
61
+ """
62
+ if not self._time_series_is_registered(identifier):
63
+ self._register_time_series(identifier)
64
+
65
+ def _time_series_is_registered(self, identifier: Union[str, int, float]) -> bool:
66
+ """Returns True if the value was already registered"""
67
+ return identifier in self._series_by_id.keys()
68
+
69
+ def _register_time_series(self, identifier: Union[str, int, float]) -> None:
70
+ """Assigns an id to the given `identifier` and loads the time series into a dataframe"""
71
+ self._id_count += 1
72
+ name, series = self._get_name_and_dataframe(identifier)
73
+ self._series_by_id[identifier] = {Entry.ID: self._id_count, Entry.NAME: name, Entry.DATA: series}
74
+
75
+ def _get_name_and_dataframe(self, identifier: Union[str, int, float]) -> Tuple[str, pd.DataFrame]:
76
+ """Returns name and DataFrame containing the series obtained from the given `identifier`"""
77
+ if isinstance(identifier, str):
78
+ series_path = self._path_resolver.resolve_series_file_path(Path(identifier).as_posix())
79
+ if series_path and os.path.exists(series_path):
80
+ data = pd.read_csv(series_path, sep=";", header=None, comment="#")
81
+ try:
82
+ return identifier, self._check_and_convert_series(data)
83
+ except TypeError as e:
84
+ log_error_and_raise(TimeSeriesException(self._ERR_CORRUPT_TIME_SERIES_VALUE.format(identifier), e))
85
+ except ConversionException:
86
+ log_error_and_raise(TimeSeriesException(self._ERR_CORRUPT_TIME_SERIES_KEY.format(identifier)))
87
+ else:
88
+ log_error_and_raise(TimeSeriesException(self._ERR_FILE_NOT_FOUND.format(identifier)))
38
89
  else:
39
- log_error_and_raise(SeriesManagementException(TimeSeriesManager._ALREADY_REGISTERED.format(name)))
90
+ return self._create_timeseries_from_value(identifier)
91
+
92
+ def _check_and_convert_series(self, data: pd.DataFrame) -> pd.DataFrame:
93
+ """Ensures validity of time series and convert to required format for writing to disk"""
94
+ data = data.apply(
95
+ lambda r: [FameTime.convert_string_if_is_datetime(r[0]), self._assert_valid(r[1])],
96
+ axis=1,
97
+ result_type="expand",
98
+ )
99
+ return data
100
+
101
+ @staticmethod
102
+ def _assert_valid(value: Any) -> float:
103
+ """Returns the given `value` if it is a numeric value other than NaN"""
104
+ try:
105
+ value = float(value)
106
+ except ValueError:
107
+ log_error_and_raise(TypeError(TimeSeriesManager._ERR_NON_NUMERIC.format(value)))
108
+ if math.isnan(value):
109
+ log_error_and_raise(TypeError(TimeSeriesManager._ERR_NAN_VALUE))
110
+ return value
40
111
 
41
- def save_get_time_series_id(self, name: Union[str, int, float]) -> int:
42
- """Returns the id of the time series file name - if the file is not yet registered, assigns an id"""
43
- if not self._time_series_is_registered(name):
44
- self._register_time_series(name)
45
- return self._get_time_series_id(name)
112
+ @staticmethod
113
+ def _create_timeseries_from_value(value: Union[int, float]) -> Tuple[str, pd.DataFrame]:
114
+ """Returns name and dataframe for a new static timeseries created from the given `value`"""
115
+ if math.isnan(value):
116
+ log_error_and_raise(TimeSeriesException(TimeSeriesManager._ERR_NAN_VALUE))
117
+ return TimeSeriesManager._CONSTANT_IDENTIFIER.format(value), pd.DataFrame({0: [0], 1: [value]})
118
+
119
+ def get_series_id_by_identifier(self, identifier: Union[str, int, float]) -> int:
120
+ """
121
+ Returns id for a previously stored time series by given `identifier`
122
+
123
+ Args:
124
+ identifier: to get the unique ID for
125
+
126
+ Returns:
127
+ unique ID for the given identifier
128
+
129
+ Raises:
130
+ TimeSeriesException: if identifier was not yet registered
131
+ """
132
+ if not self._time_series_is_registered(identifier):
133
+ log_error_and_raise(TimeSeriesException(self._ERR_UNREGISTERED_SERIES.format(identifier)))
134
+ return self._series_by_id.get(identifier)[Entry.ID]
135
+
136
+ def get_all_series(self) -> List[Tuple[int, str, pd.DataFrame]]:
137
+ """Returns iterator over id, name and dataframe of all stored series"""
138
+ if len(self._series_by_id) == 0:
139
+ logger().warning(self._WARN_NO_DATA)
140
+ return [(v[Entry.ID], v[Entry.NAME], v[Entry.DATA]) for v in self._series_by_id.values()]
141
+
142
+ def reconstruct_time_series(self, timeseries: List[InputData.TimeSeriesDao]) -> None:
143
+ """Reconstructs and stores time series from given list of `timeseries_dao`"""
144
+ for one_series in timeseries:
145
+ self._id_count += 1
146
+ reconstructed = {Entry.ID: one_series.seriesId}
147
+ if len(one_series.row) == 1:
148
+ reconstructed[Entry.NAME] = one_series.row[0].value
149
+ reconstructed[Entry.DATA] = None
150
+ else:
151
+ reconstructed[Entry.NAME] = self._get_cleaned_file_name(one_series.seriesName)
152
+ reconstructed[Entry.DATA] = pd.DataFrame(
153
+ [{self._KEY_ROW_TIME: item.timeStep, self._KEY_ROW_VALUE: item.value} for item in one_series.row]
154
+ )
155
+ self._series_by_id[one_series.seriesId] = reconstructed
156
+
157
+ def _get_cleaned_file_name(self, timeseries_name: str):
158
+ if timeseries_name.lower().endswith(".csv"):
159
+ filename = Path(timeseries_name).name
160
+ else:
161
+ filename = clean_up_file_name(timeseries_name) + ".csv"
162
+ return str(Path(self._TIMESERIES_RECONSTRUCTION_PATH, filename))
46
163
 
47
- def get_ids_of_series_by_name(self) -> Dict[Union[str, int, float], int]:
48
- return self._ids_of_time_series
164
+ def get_reconstructed_series_by_id(self, series_id: int) -> str:
165
+ """Return name or path for given `series_id` if series these are identified by their number.
166
+ Use this only if series were added via `reconstruct_time_series`"""
167
+ if series_id < 0 or series_id > self._id_count:
168
+ log_error_and_raise(TimeSeriesException(self._ERR_UNREGISTERED_SERIES.format(series_id)))
169
+ return self._series_by_id[series_id][Entry.NAME]
fameio/source/time.py CHANGED
@@ -5,7 +5,7 @@
5
5
  import datetime as dt
6
6
  import math
7
7
  import re
8
- from enum import Enum
8
+ from enum import Enum, auto
9
9
  from typing import Union
10
10
 
11
11
  from fameio.source.logs import log_error_and_raise
@@ -25,13 +25,13 @@ class ConversionException(Exception):
25
25
  class TimeUnit(Enum):
26
26
  """Time units defined in FAME"""
27
27
 
28
- SECONDS = 0
29
- MINUTES = 1
30
- HOURS = 2
31
- DAYS = 3
32
- WEEKS = 4
33
- MONTHS = 5
34
- YEARS = 6
28
+ SECONDS = auto()
29
+ MINUTES = auto()
30
+ HOURS = auto()
31
+ DAYS = auto()
32
+ WEEKS = auto()
33
+ MONTHS = auto()
34
+ YEARS = auto()
35
35
 
36
36
 
37
37
  class Constants:
fameio/source/tools.py CHANGED
@@ -1,8 +1,8 @@
1
1
  # SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
-
5
- from typing import Any, Dict
4
+ from pathlib import Path
5
+ from typing import Any, Dict, Union
6
6
 
7
7
 
8
8
  def keys_to_lower(dictionary: Dict[str, Any]) -> Dict[str, Any]:
@@ -16,3 +16,14 @@ def ensure_is_list(value: Any) -> list:
16
16
  return value
17
17
  else:
18
18
  return [value]
19
+
20
+
21
+ def ensure_path_exists(path: Union[Path, str]):
22
+ """Creates a specified path if not already existent"""
23
+ Path(path).mkdir(parents=True, exist_ok=True)
24
+
25
+
26
+ def clean_up_file_name(name: str) -> str:
27
+ """Returns given `name` with replacements defined in `replace_map`"""
28
+ replace_map = {" ": "_", ":": "_", "/": "-"}
29
+ return name.translate(str.maketrans(replace_map))
@@ -2,15 +2,17 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- import logging as log
5
+ import math
6
6
  from collections import Counter
7
7
  from typing import Any, Dict, List
8
8
 
9
- from fameio.source.logs import log_error_and_raise
9
+ from fameio.source import PathResolver
10
+ from fameio.source.logs import log_error_and_raise, logger
10
11
  from fameio.source.scenario import Agent, Attribute, Contract, Scenario
11
12
  from fameio.source.schema.agenttype import AgentType
12
13
  from fameio.source.schema.attribute import AttributeSpecs, AttributeType
13
14
  from fameio.source.schema.schema import Schema
15
+ from fameio.source.series import TimeSeriesManager, TimeSeriesException
14
16
  from fameio.source.time import FameTime
15
17
 
16
18
 
@@ -36,6 +38,77 @@ class SchemaValidator:
36
38
  _DEFAULT_IGNORED = "Optional Attribute: '{}': not specified - provided Default ignored for optional Attributes."
37
39
  _OPTIONAL_MISSING = "Optional Attribute: '{}': not specified."
38
40
  _IS_NO_LIST = "Attribute '{}' is list but assigned value '{}' is not a list."
41
+ _TIME_SERIES_INVALID = "Timeseries at '{}' is invalid."
42
+ _MISSING_CONTRACTS_FOR_AGENTS = "No contracts defined for Agent '{}' of type '{}'"
43
+
44
+ @staticmethod
45
+ def validate_scenario_and_timeseries(
46
+ scenario: Scenario, path_resolver: PathResolver = PathResolver()
47
+ ) -> TimeSeriesManager:
48
+ """
49
+ Validates the given `scenario` and its timeseries using given `path_resolver`
50
+ Raises an exception if schema requirements are not met or timeseries data are erroneous.
51
+
52
+ Args:
53
+ scenario: to be validated against the encompassed schema
54
+ path_resolver: to resolve paths of timeseries
55
+
56
+ Returns:
57
+ a new TimeSeriesManager initialised with validated time series from scenario
58
+ Raises:
59
+ ValidationException: if an error in the scenario or in timeseries is spotted
60
+ """
61
+ schema = scenario.schema
62
+ agents = scenario.agents
63
+ timeseries_manager = TimeSeriesManager(path_resolver)
64
+
65
+ SchemaValidator.ensure_unique_agent_ids(agents)
66
+ for agent in agents:
67
+ SchemaValidator.ensure_agent_and_timeseries_are_valid(agent, schema, timeseries_manager)
68
+
69
+ agent_types_by_id = {agent.id: agent.type_name for agent in agents}
70
+ for contract in scenario.contracts:
71
+ SchemaValidator.ensure_is_valid_contract(contract, schema, agent_types_by_id)
72
+
73
+ return timeseries_manager
74
+
75
+ @staticmethod
76
+ def ensure_unique_agent_ids(agents: List[Agent]) -> None:
77
+ """Raises exception if any id for given `agents` is not unique"""
78
+ list_of_ids = [agent.id for agent in agents]
79
+ non_unique_ids = [agent_id for agent_id, count in Counter(list_of_ids).items() if count > 1]
80
+ if non_unique_ids:
81
+ log_error_and_raise(ValidationException(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
82
+
83
+ @staticmethod
84
+ def ensure_agent_and_timeseries_are_valid(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager):
85
+ """Validates given `agent` against `schema` plus loads and validates its timeseries"""
86
+ SchemaValidator.ensure_agent_type_in_schema(agent, schema)
87
+ SchemaValidator.ensure_is_valid_agent(agent, schema)
88
+ SchemaValidator.load_and_validate_timeseries(agent, schema, timeseries_manager)
89
+
90
+ @staticmethod
91
+ def ensure_agent_type_in_schema(agent: Agent, schema: Schema) -> None:
92
+ """Raises exception if type for given `agent` is not specified in given `schema`"""
93
+ if agent.type_name not in schema.agent_types:
94
+ log_error_and_raise(ValidationException(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
95
+
96
+ @staticmethod
97
+ def ensure_is_valid_agent(agent: Agent, schema: Schema) -> None:
98
+ """Raises an exception if given `agent` does not meet the specified `schema` requirements"""
99
+ scenario_attributes = agent.attributes
100
+ schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
101
+ SchemaValidator._ensure_mandatory_present(scenario_attributes, schema_attributes)
102
+ SchemaValidator._ensure_attributes_exist(scenario_attributes, schema_attributes)
103
+ SchemaValidator._ensure_value_and_type_match(scenario_attributes, schema_attributes)
104
+
105
+ @staticmethod
106
+ def _get_agent(schema: Schema, name: str) -> AgentType:
107
+ """Returns agent specified by `name` or raises Exception if this agent is not present in given `schema`"""
108
+ if name in schema.agent_types:
109
+ return schema.agent_types[name]
110
+ else:
111
+ log_error_and_raise(ValidationException(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
39
112
 
40
113
  @staticmethod
41
114
  def _ensure_mandatory_present(attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs]) -> None:
@@ -47,12 +120,14 @@ class SchemaValidator:
47
120
  if name not in attributes:
48
121
  if specification.is_mandatory:
49
122
  if not specification.has_default_value:
50
- log_error_and_raise(ValidationException(SchemaValidator._ATTRIBUTE_MISSING.format(name)))
123
+ log_error_and_raise(
124
+ ValidationException(SchemaValidator._ATTRIBUTE_MISSING.format(specification.full_name))
125
+ )
51
126
  else:
52
127
  if specification.has_default_value:
53
- log.warning(SchemaValidator._DEFAULT_IGNORED.format(name))
128
+ logger().warning(SchemaValidator._DEFAULT_IGNORED.format(specification.full_name))
54
129
  else:
55
- log.info(SchemaValidator._OPTIONAL_MISSING.format(name))
130
+ logger().info(SchemaValidator._OPTIONAL_MISSING.format(specification.full_name))
56
131
  if name in attributes and specification.has_nested_attributes:
57
132
  attribute = attributes[name]
58
133
  if specification.is_list:
@@ -61,23 +136,6 @@ class SchemaValidator:
61
136
  else:
62
137
  SchemaValidator._ensure_mandatory_present(attribute.nested, specification.nested_attributes)
63
138
 
64
- @staticmethod
65
- def _get_agent(schema: Schema, name: str) -> AgentType:
66
- """Returns agent specified by `name` or raises Exception if this agent is not present in given `schema`"""
67
- if name in schema.agent_types:
68
- return schema.agent_types[name]
69
- else:
70
- log_error_and_raise(ValidationException(SchemaValidator._AGENT_TYPE_UNKNOWN.format(name)))
71
-
72
- @staticmethod
73
- def ensure_is_valid_agent(agent: Agent, schema: Schema) -> None:
74
- """Raises an exception if given `agent` does not meet the specified `schema` requirements"""
75
- scenario_attributes = agent.attributes
76
- schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
77
- SchemaValidator._ensure_mandatory_present(scenario_attributes, schema_attributes)
78
- SchemaValidator._ensure_attributes_exist(scenario_attributes, schema_attributes)
79
- SchemaValidator._ensure_value_matches_type(scenario_attributes, schema_attributes)
80
-
81
139
  @staticmethod
82
140
  def _ensure_attributes_exist(attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs]) -> None:
83
141
  """Raises exception any entry of given `attributes` has no corresponding type `specification`"""
@@ -93,7 +151,9 @@ class SchemaValidator:
93
151
  SchemaValidator._ensure_attributes_exist(entry, specification.nested_attributes)
94
152
 
95
153
  @staticmethod
96
- def _ensure_value_matches_type(attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs]) -> None:
154
+ def _ensure_value_and_type_match(
155
+ attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs]
156
+ ) -> None:
97
157
  """Raises exception if in given list of `attributes` its value does not match associated type `specification`"""
98
158
  for name, attribute in attributes.items():
99
159
  specification = specifications[name]
@@ -104,14 +164,13 @@ class SchemaValidator:
104
164
  message = SchemaValidator._INCOMPATIBLE.format(value, type_spec, specification.full_name)
105
165
  log_error_and_raise(ValidationException(message))
106
166
  if not SchemaValidator._is_allowed_value(specification, value):
107
- log_error_and_raise(
108
- ValidationException(SchemaValidator._DISALLOWED.format(value, specification.full_name))
109
- )
167
+ message = SchemaValidator._DISALLOWED.format(value, specification.full_name)
168
+ log_error_and_raise(ValidationException(message))
110
169
  if attribute.has_nested:
111
- SchemaValidator._ensure_value_matches_type(attribute.nested, specification.nested_attributes)
170
+ SchemaValidator._ensure_value_and_type_match(attribute.nested, specification.nested_attributes)
112
171
  if attribute.has_nested_list:
113
172
  for entry in attribute.nested_list:
114
- SchemaValidator._ensure_value_matches_type(entry, specification.nested_attributes)
173
+ SchemaValidator._ensure_value_and_type_match(entry, specification.nested_attributes)
115
174
 
116
175
  @staticmethod
117
176
  def _is_compatible(specification: AttributeSpecs, value_or_values: Any) -> bool:
@@ -120,7 +179,7 @@ class SchemaValidator:
120
179
  attribute_type = specification.attr_type
121
180
  if specification.is_list:
122
181
  if not is_list:
123
- log.warning(SchemaValidator._IS_NO_LIST.format(specification.full_name, value_or_values))
182
+ logger().warning(SchemaValidator._IS_NO_LIST.format(specification.full_name, value_or_values))
124
183
  return SchemaValidator._is_compatible_value(attribute_type, value_or_values)
125
184
  for value in value_or_values:
126
185
  if not SchemaValidator._is_compatible_value(attribute_type, value):
@@ -131,7 +190,7 @@ class SchemaValidator:
131
190
 
132
191
  @staticmethod
133
192
  def _is_compatible_value(attribute_type: AttributeType, value) -> bool:
134
- """Returns True if given single value is compatible to specified `attribute_type`"""
193
+ """Returns True if given single value is compatible to specified `attribute_type` and is not a NaN float"""
135
194
  if attribute_type is AttributeType.INTEGER:
136
195
  if isinstance(value, int):
137
196
  return -2147483648 < value < 2147483647
@@ -139,13 +198,13 @@ class SchemaValidator:
139
198
  if attribute_type is AttributeType.LONG:
140
199
  return isinstance(value, int)
141
200
  elif attribute_type is AttributeType.DOUBLE:
142
- return isinstance(value, (int, float))
201
+ return isinstance(value, (int, float)) and not math.isnan(value)
143
202
  elif attribute_type in (AttributeType.ENUM, AttributeType.STRING):
144
203
  return isinstance(value, str)
145
204
  elif attribute_type is AttributeType.TIME_STAMP:
146
205
  return FameTime.is_fame_time_compatible(value)
147
206
  elif attribute_type is AttributeType.TIME_SERIES:
148
- return isinstance(value, (str, int, float))
207
+ return isinstance(value, (str, int)) or (isinstance(value, float) and not math.isnan(value))
149
208
  else:
150
209
  log_error_and_raise(ValidationException(SchemaValidator._TYPE_NOT_IMPLEMENTED.format(attribute_type)))
151
210
 
@@ -157,6 +216,44 @@ class SchemaValidator:
157
216
  else:
158
217
  return value in attribute.values
159
218
 
219
+ @staticmethod
220
+ def load_and_validate_timeseries(agent: Agent, schema: Schema, timeseries_manager: TimeSeriesManager) -> None:
221
+ """
222
+ Loads all timeseries specified in given `schema` of given `agent` into given `timeseries_manager`
223
+
224
+ Args:
225
+ agent: definition in scenario
226
+ schema: schema encompassed in scenario
227
+ timeseries_manager: to be filled with timeseries
228
+
229
+ Raises:
230
+ ValidationException: if timeseries is not found, ill-formatted or invalid
231
+ """
232
+ scenario_attributes = agent.attributes
233
+ schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
234
+ SchemaValidator._ensure_valid_timeseries(scenario_attributes, schema_attributes, timeseries_manager)
235
+
236
+ @staticmethod
237
+ def _ensure_valid_timeseries(
238
+ attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs], manager: TimeSeriesManager
239
+ ) -> None:
240
+ """Recursively searches for time_series in agent attributes and registers them at given `manager`"""
241
+ for name, attribute in attributes.items():
242
+ specification = specifications[name]
243
+ if attribute.has_value:
244
+ attribute_type = specification.attr_type
245
+ if attribute_type is AttributeType.TIME_SERIES:
246
+ try:
247
+ manager.register_and_validate(attribute.value)
248
+ except TimeSeriesException as e:
249
+ message = SchemaValidator._TIME_SERIES_INVALID.format(specification.full_name)
250
+ log_error_and_raise(ValidationException(message, e))
251
+ if attribute.has_nested:
252
+ SchemaValidator._ensure_valid_timeseries(attribute.nested, specification.nested_attributes, manager)
253
+ if attribute.has_nested_list:
254
+ for entry in attribute.nested_list:
255
+ SchemaValidator._ensure_valid_timeseries(entry, specification.nested_attributes, manager)
256
+
160
257
  @staticmethod
161
258
  def ensure_is_valid_contract(contract: Contract, schema: Schema, agent_types_by_id: Dict[int, str]) -> None:
162
259
  """Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
@@ -174,30 +271,13 @@ class SchemaValidator:
174
271
  log_error_and_raise(ValidationException(SchemaValidator._PRODUCT_MISSING.format(product, sender_type_name)))
175
272
 
176
273
  @staticmethod
177
- def ensure_unique_agent_ids(agents: List[Agent]) -> None:
178
- """Raises exception if any id for given `agents` is not unique"""
179
- list_of_ids = [agent.id for agent in agents]
180
- non_unique_ids = [agent_id for agent_id, count in Counter(list_of_ids).items() if count > 1]
181
- if non_unique_ids:
182
- log_error_and_raise(ValidationException(SchemaValidator._AGENT_ID_NOT_UNIQUE.format(non_unique_ids)))
274
+ def check_agents_have_contracts(scenario: Scenario) -> None:
275
+ """Raises warning for each agent without any assigned contract"""
276
+ senders = [contract.sender_id for contract in scenario.contracts]
277
+ receivers = [contract.receiver_id for contract in scenario.contracts]
278
+ active_agents = set(senders + receivers)
279
+ inactive_agents = {agent.id: agent.type_name for agent in scenario.agents if agent.id not in active_agents}
183
280
 
184
- @staticmethod
185
- def ensure_agent_type_in_schema(agent: Agent, schema: Schema) -> None:
186
- """Raises exception if type for given `agent` is not specified in given `schema`"""
187
- if agent.type_name not in schema.agent_types:
188
- log_error_and_raise(ValidationException(SchemaValidator._AGENT_TYPE_UNKNOWN.format(agent.type_name)))
189
-
190
- @staticmethod
191
- def ensure_is_valid_scenario(scenario: Scenario) -> None:
192
- """Raises exception if given `scenario` does not meet its own schema requirements"""
193
- schema = scenario.schema
194
- agents = scenario.agents
195
-
196
- SchemaValidator.ensure_unique_agent_ids(agents)
197
- for agent in agents:
198
- SchemaValidator.ensure_agent_type_in_schema(agent, schema)
199
- SchemaValidator.ensure_is_valid_agent(agent, schema)
200
-
201
- agent_types_by_id = {agent.id: agent.type_name for agent in agents}
202
- for contract in scenario.contracts:
203
- SchemaValidator.ensure_is_valid_contract(contract, schema, agent_types_by_id)
281
+ if inactive_agents:
282
+ for agent_id, agent_name in inactive_agents.items():
283
+ logger().warning(SchemaValidator._MISSING_CONTRACTS_FOR_AGENTS.format(agent_id, agent_name))