fameio 2.2.0__py3-none-any.whl → 2.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +18 -5
- fameio/scripts/convert_results.py +14 -4
- fameio/source/cli/convert_results.py +1 -1
- fameio/source/cli/parser.py +32 -12
- fameio/source/metadata.py +32 -0
- fameio/source/results/conversion.py +1 -2
- fameio/source/results/csv_writer.py +9 -2
- fameio/source/scenario/attribute.py +4 -2
- fameio/source/scenario/fameiofactory.py +6 -1
- fameio/source/scenario/scenario.py +20 -3
- fameio/source/scenario/stringset.py +51 -0
- fameio/source/schema/attribute.py +2 -1
- fameio/source/series.py +5 -0
- fameio/source/tools.py +1 -1
- fameio/source/validator.py +50 -2
- fameio/source/writer.py +4 -4
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/METADATA +48 -10
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/RECORD +24 -22
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/LICENSE.txt +0 -0
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/LICENSES/Apache-2.0.txt +0 -0
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/LICENSES/CC0-1.0.txt +0 -0
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/WHEEL +0 -0
- {fameio-2.2.0.dist-info → fameio-2.3.1.dist-info}/entry_points.txt +0 -0
CHANGELOG.md
CHANGED
@@ -3,9 +3,22 @@
|
|
3
3
|
SPDX-License-Identifier: CC0-1.0 -->
|
4
4
|
|
5
5
|
# Changelog
|
6
|
-
## [2.
|
6
|
+
## [2.3.1](https://gitlab.com/fame-framework/fame-io/-/tags/v2.3.1) - 2024-08-26
|
7
|
+
### Fixed
|
8
|
+
- Fix ignored default values of `convert_results` for `merge-times` arguments #211 (@dlr-cjs, dlr_fn)
|
9
|
+
|
10
|
+
## [2.3.0](https://gitlab.com/fame-framework/fame-io/-/tags/v2.3.0) - 2024-08-12
|
11
|
+
### Added
|
12
|
+
- New attribute type `string_set` #175 (@dlr_fn @dlr-cjs)
|
13
|
+
- Add warning if a timeseries file has additional, non-empty columns #155 (@LeonardWilleke)
|
14
|
+
- Ensure `CHANGELOG.md` is updated in automated testing pipeline #207 (@dlr_fn)
|
15
|
+
|
16
|
+
### Fixed
|
17
|
+
- ConvertFameResults: Fix bug on `merge-times` when `--memory-saving` is active #201 (@dlr_fn @dlr-cjs)
|
18
|
+
|
19
|
+
## [2.2.0](https://gitlab.com/fame-framework/fame-io/-/tags/v2.2.0) - 2024-05-28
|
7
20
|
### Changed
|
8
|
-
- New command line option `-enc --encoding` to change encoding when reading yaml-files
|
21
|
+
- New command line option `-enc --encoding` to change encoding when reading yaml-files #170 (@dlr-cjs)
|
9
22
|
- Improve error message when timeseries is not found and is number string #178 (@dlr-cjs)
|
10
23
|
|
11
24
|
### Added
|
@@ -14,7 +27,7 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
14
27
|
|
15
28
|
## [2.1.1](https://gitlab.com/fame-framework/fame-io/-/tags/v2.1.1) - 2024-05-28
|
16
29
|
### Fixed
|
17
|
-
- ConvertFameResults: Fix crash on complex column conversion if Agent has no simple columns #204 (@dlr_fn @dlr-cjs)
|
30
|
+
- ConvertFameResults: Fix crash on complex column conversion if Agent has no simple columns #204 (@dlr_fn @dlr-cjs)
|
18
31
|
|
19
32
|
## [2.1.0](https://gitlab.com/fame-framework/fame-io/-/tags/v2.1.0) - 2024-05-11
|
20
33
|
### Changed
|
@@ -30,8 +43,8 @@ SPDX-License-Identifier: CC0-1.0 -->
|
|
30
43
|
- Fix potential duplicates in logging #191 (@dlr_fn @dlr-cjs)
|
31
44
|
|
32
45
|
## [2.0.1](https://gitlab.com/fame-framework/fame-io/-/tags/v2.0.1) - 2024-04-05
|
33
|
-
###
|
34
|
-
- Fix potential missing columns when memory-saving-mode `-m` is enabled #194 (@dlr_fn @dlr-cjs)
|
46
|
+
### Fixed
|
47
|
+
- Fix potential missing columns when memory-saving-mode `-m` is enabled #194 (@dlr_fn @dlr-cjs)
|
35
48
|
|
36
49
|
### Remove
|
37
50
|
- Remove convert results option `-cc MERGE` #194 (@dlr_fn @dlr-cjs)
|
@@ -1,8 +1,9 @@
|
|
1
1
|
#!/usr/bin/env python
|
2
|
-
|
3
2
|
import sys
|
4
3
|
from pathlib import Path
|
5
4
|
|
5
|
+
import pandas as pd
|
6
|
+
|
6
7
|
from fameio.source.cli.convert_results import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
|
7
8
|
from fameio.source.cli.options import Options
|
8
9
|
from fameio.source.cli.parser import update_default_config
|
@@ -10,7 +11,7 @@ from fameio.source.logs import log_and_raise_critical, fameio_logger, log
|
|
10
11
|
from fameio.source.results.agent_type import AgentTypeLog
|
11
12
|
from fameio.source.results.conversion import apply_time_option, apply_time_merging
|
12
13
|
from fameio.source.results.csv_writer import CsvWriter
|
13
|
-
from fameio.source.results.data_transformer import DataTransformer
|
14
|
+
from fameio.source.results.data_transformer import DataTransformer, INDEX
|
14
15
|
from fameio.source.results.input_dao import InputDao
|
15
16
|
from fameio.source.results.output_dao import OutputDAO
|
16
17
|
from fameio.source.results.reader import Reader
|
@@ -45,8 +46,9 @@ def run(config: dict = None) -> None:
|
|
45
46
|
for agent_name in output.get_sorted_agents_to_extract():
|
46
47
|
log().debug(f"Extracting data for {agent_name}...")
|
47
48
|
data_frames = output.get_agent_data(agent_name, data_transformer)
|
48
|
-
|
49
|
-
|
49
|
+
if not config[Options.MEMORY_SAVING]:
|
50
|
+
apply_time_merging(data_frames, config[Options.TIME_MERGING])
|
51
|
+
apply_time_option(data_frames, config[Options.TIME])
|
50
52
|
log().debug(f"Writing data for {agent_name}...")
|
51
53
|
output_writer.write_to_files(agent_name, data_frames)
|
52
54
|
|
@@ -57,6 +59,14 @@ def run(config: dict = None) -> None:
|
|
57
59
|
series_writer.write_time_series_to_disk(timeseries)
|
58
60
|
data_to_yaml_file(scenario.to_dict(), Path(config[Options.OUTPUT], "./recovered/scenario.yaml"))
|
59
61
|
|
62
|
+
if config[Options.MEMORY_SAVING]:
|
63
|
+
written_files = output_writer.pop_all_file_paths()
|
64
|
+
for agent_name, file_path in written_files.items():
|
65
|
+
parsed_data = {None: pd.read_csv(file_path, sep=";", index_col=INDEX)}
|
66
|
+
apply_time_merging(parsed_data, config[Options.TIME_MERGING])
|
67
|
+
apply_time_option(parsed_data, config[Options.TIME])
|
68
|
+
output_writer.write_to_files(agent_name, parsed_data)
|
69
|
+
|
60
70
|
log().info("Data conversion completed.")
|
61
71
|
except MemoryError:
|
62
72
|
log_and_raise_critical(ERR_MEMORY_SEVERE if Options.MEMORY_SAVING else ERR_MEMORY_ERROR)
|
@@ -73,7 +73,7 @@ def _prepare_parser(defaults: Optional[Dict[Options, Any]]) -> argparse.Argument
|
|
73
73
|
add_memory_saving_argument(parser, _get_default(defaults, Options.MEMORY_SAVING))
|
74
74
|
add_resolve_complex_argument(parser, _get_default(defaults, Options.RESOLVE_COMPLEX_FIELD))
|
75
75
|
add_time_argument(parser, _get_default(defaults, Options.TIME))
|
76
|
-
add_merge_time_parser(parser)
|
76
|
+
add_merge_time_parser(parser, _get_default(defaults, Options.TIME_MERGING))
|
77
77
|
add_inputs_recovery_argument(parser, _get_default(defaults, Options.INPUT_RECOVERY))
|
78
78
|
|
79
79
|
return parser
|
fameio/source/cli/parser.py
CHANGED
@@ -138,25 +138,40 @@ def add_time_argument(parser: ArgumentParser, default_value: Union[TimeOptions,
|
|
138
138
|
)
|
139
139
|
|
140
140
|
|
141
|
-
def add_merge_time_parser(parser: ArgumentParser) -> None:
|
142
|
-
"""
|
143
|
-
subparser
|
141
|
+
def add_merge_time_parser(parser: ArgumentParser, defaults: Optional[Dict[MergingOptions, int]]) -> None:
|
142
|
+
"""
|
143
|
+
Adds subparser for merging of TimeSteps to given `parser`
|
144
|
+
If at least one valid time merging option is specified in given defaults, calling the subparser becomes mandatory
|
145
|
+
"""
|
146
|
+
defaults = defaults if (defaults is not None) and (isinstance(defaults, dict)) else {}
|
147
|
+
if any([option in defaults.keys() for option in MergingOptions]):
|
148
|
+
subparser = parser.add_subparsers(dest="time_merging", required=True, help="Optional merging of TimeSteps")
|
149
|
+
else:
|
150
|
+
subparser = parser.add_subparsers(dest="time_merging", required=False, help="Optional merging of TimeSteps")
|
144
151
|
group_parser = subparser.add_parser("merge-times")
|
145
|
-
add_focal_point_argument(group_parser)
|
146
|
-
add_steps_before_argument(group_parser)
|
147
|
-
add_steps_after_argument(group_parser)
|
152
|
+
add_focal_point_argument(group_parser, defaults.get(MergingOptions.FOCAL_POINT, None))
|
153
|
+
add_steps_before_argument(group_parser, defaults.get(MergingOptions.STEPS_BEFORE, None))
|
154
|
+
add_steps_after_argument(group_parser, defaults.get(MergingOptions.STEPS_AFTER, None))
|
148
155
|
|
149
156
|
|
150
|
-
def add_focal_point_argument(parser: ArgumentParser) -> None:
|
157
|
+
def add_focal_point_argument(parser: ArgumentParser, default_value: Optional[int]) -> None:
|
151
158
|
"""Adds `focal-point` argument to given `parser`"""
|
152
159
|
help_text = "TimeStep on which `steps_before` earlier and `steps_after` later TimeSteps are merged on"
|
153
|
-
|
160
|
+
if default_value is not None:
|
161
|
+
parser.add_argument("-fp", "--focal-point", required=False, type=int, help=help_text, default=default_value)
|
162
|
+
else:
|
163
|
+
parser.add_argument("-fp", "--focal-point", required=True, type=int, help=help_text)
|
154
164
|
|
155
165
|
|
156
|
-
def add_steps_before_argument(parser: ArgumentParser) -> None:
|
166
|
+
def add_steps_before_argument(parser: ArgumentParser, default_value: Optional[int]) -> None:
|
157
167
|
"""Adds `steps-before` argument to given `parser`"""
|
158
168
|
help_text = "Range of TimeSteps before the `focal-point` they get merged to"
|
159
|
-
|
169
|
+
if default_value is not None:
|
170
|
+
parser.add_argument(
|
171
|
+
"-sb", "--steps-before", required=False, type=_non_negative_int, help=help_text, default=default_value
|
172
|
+
)
|
173
|
+
else:
|
174
|
+
parser.add_argument("-sb", "--steps-before", required=True, type=_non_negative_int, help=help_text)
|
160
175
|
|
161
176
|
|
162
177
|
def _non_negative_int(value: Any) -> int:
|
@@ -181,10 +196,15 @@ def _non_negative_int(value: Any) -> int:
|
|
181
196
|
return value
|
182
197
|
|
183
198
|
|
184
|
-
def add_steps_after_argument(parser: ArgumentParser) -> None:
|
199
|
+
def add_steps_after_argument(parser: ArgumentParser, default_value: Optional[int]) -> None:
|
185
200
|
"""Adds `steps-after` argument to given `parser`"""
|
186
201
|
help_text = "Range of TimeSteps after the `focal-point` they get merged to"
|
187
|
-
|
202
|
+
if default_value is not None:
|
203
|
+
parser.add_argument(
|
204
|
+
"-sa", "--steps-after", required=False, type=_non_negative_int, help=help_text, default=default_value
|
205
|
+
)
|
206
|
+
else:
|
207
|
+
parser.add_argument("-sa", "--steps-after", required=True, type=_non_negative_int, help=help_text)
|
188
208
|
|
189
209
|
|
190
210
|
def add_inputs_recovery_argument(parser: ArgumentParser, default: bool) -> None:
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from abc import ABC
|
5
|
+
from typing import Dict, Any
|
6
|
+
|
7
|
+
from fameio.source.tools import keys_to_lower
|
8
|
+
|
9
|
+
|
10
|
+
class Metadata(ABC):
|
11
|
+
"""Hosts Metadata"""
|
12
|
+
|
13
|
+
_KEY_METADATA = "MetaData".lower()
|
14
|
+
|
15
|
+
def __init__(self):
|
16
|
+
self._metadata = {}
|
17
|
+
|
18
|
+
@property
|
19
|
+
def metadata(self) -> dict:
|
20
|
+
"""Returns list of metadata or an empty list if no metadata are defined"""
|
21
|
+
return self._metadata
|
22
|
+
|
23
|
+
def _extract_metadata(self, definitions: Dict[str, Any]) -> None:
|
24
|
+
"""If metadata is found in definitions, it is extracted and set"""
|
25
|
+
definitions = keys_to_lower(definitions)
|
26
|
+
if self._KEY_METADATA in definitions:
|
27
|
+
self._metadata = definitions[self._KEY_METADATA]
|
28
|
+
|
29
|
+
def _enrich_with_metadata(self, data: Dict) -> Dict:
|
30
|
+
"""Returns data enriched with metadata field"""
|
31
|
+
data[self._KEY_METADATA] = self._metadata
|
32
|
+
return data
|
@@ -7,10 +7,9 @@ from typing import Dict, Optional
|
|
7
7
|
|
8
8
|
import pandas as pd
|
9
9
|
|
10
|
-
from fameio.source import FameTime
|
11
10
|
from fameio.source.cli.options import TimeOptions, MergingOptions
|
12
11
|
from fameio.source.logs import log_error_and_raise, log
|
13
|
-
from fameio.source.time import ConversionException
|
12
|
+
from fameio.source.time import ConversionException, FameTime
|
14
13
|
|
15
14
|
_ERR_UNIMPLEMENTED = "Time conversion mode '{}' not implemented."
|
16
15
|
|
@@ -1,7 +1,6 @@
|
|
1
1
|
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
5
4
|
from pathlib import Path
|
6
5
|
from typing import Dict, Union
|
7
6
|
|
@@ -19,6 +18,8 @@ class CsvWriter:
|
|
19
18
|
_INFO_USING_PATH = "Using specified output path: {}"
|
20
19
|
_INFO_USING_DERIVED_PATH = "No output path specified - writing to new local folder: {}"
|
21
20
|
|
21
|
+
CSV_FILE_SUFFIX = ".csv"
|
22
|
+
|
22
23
|
def __init__(self, config_output: Path, input_file_path: Path, single_export: bool) -> None:
|
23
24
|
self._single_export = single_export
|
24
25
|
self._output_folder = self._get_output_folder_name(config_output, input_file_path)
|
@@ -90,13 +91,19 @@ class CsvWriter:
|
|
90
91
|
"""Returns True if a file for given `identifier` was already written"""
|
91
92
|
return identifier in self._files
|
92
93
|
|
94
|
+
def pop_all_file_paths(self) -> Dict[str, Path]:
|
95
|
+
"""Clears all stored file paths and returns their previous identifiers and their paths"""
|
96
|
+
current_files = self._files
|
97
|
+
self._files = {}
|
98
|
+
return current_files
|
99
|
+
|
93
100
|
def _get_outfile_name(self, identifier: str) -> str:
|
94
101
|
"""Returns file name for given `agent_name` and (optional) `agent_id`"""
|
95
102
|
return self._files[identifier]
|
96
103
|
|
97
104
|
def _create_outfile_name(self, identifier: str) -> Path:
|
98
105
|
"""Returns fully qualified file name based on given `agent_name` and (optional) `agent_id`"""
|
99
|
-
return Path(self._output_folder, f"{identifier}.
|
106
|
+
return Path(self._output_folder, f"{identifier}{self.CSV_FILE_SUFFIX}")
|
100
107
|
|
101
108
|
def _save_outfile_name(self, outfile_name: Path, identifier: str) -> None:
|
102
109
|
"""Stores given name for given `agent_name` and (optional) `agent_id`"""
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
from __future__ import annotations
|
@@ -16,6 +16,8 @@ class Attribute:
|
|
16
16
|
_DICT_EMPTY = "Attribute '{}' was assigned an empty dictionary - please remove or fill empty assignments."
|
17
17
|
_MIXED_DATA = "Attribute '{}' was assigned a list with mixed complex and simple entries - please fix."
|
18
18
|
|
19
|
+
_NAME_STRING_SEPARATOR = "."
|
20
|
+
|
19
21
|
def __init__(self, name: str, definitions) -> None:
|
20
22
|
"""Parses an Attribute's definition"""
|
21
23
|
self._full_name = name
|
@@ -46,7 +48,7 @@ class Attribute:
|
|
46
48
|
|
47
49
|
inner_elements = {}
|
48
50
|
for nested_name, value in definitions.items():
|
49
|
-
full_name = name +
|
51
|
+
full_name = name + Attribute._NAME_STRING_SEPARATOR + nested_name
|
50
52
|
inner_elements[nested_name] = Attribute(full_name, value)
|
51
53
|
return inner_elements
|
52
54
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
# SPDX-FileCopyrightText: 2023 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
|
-
|
4
|
+
from fameio.source.scenario.stringset import StringSet
|
5
5
|
from fameio.source.schema.schema import Schema
|
6
6
|
from fameio.source.scenario.generalproperties import GeneralProperties
|
7
7
|
from fameio.source.scenario.agent import Agent
|
@@ -32,3 +32,8 @@ class FameIOFactory:
|
|
32
32
|
def new_contract_from_dict(definitions: dict) -> Contract:
|
33
33
|
"""Constructs a new Contract from provided `definitions`"""
|
34
34
|
return Contract.from_dict(definitions)
|
35
|
+
|
36
|
+
@staticmethod
|
37
|
+
def new_string_set_from_dict(definition: StringSet.StringSetType) -> StringSet:
|
38
|
+
"""Constructs a new StringSet from provided `definitions`"""
|
39
|
+
return StringSet.from_dict(definition)
|
@@ -3,13 +3,14 @@
|
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
from __future__ import annotations
|
5
5
|
|
6
|
-
from typing import List
|
6
|
+
from typing import List, Dict
|
7
7
|
|
8
8
|
from fameio.source.scenario.agent import Agent
|
9
9
|
from fameio.source.scenario.contract import Contract
|
10
10
|
from fameio.source.scenario.exception import get_or_default, get_or_raise
|
11
11
|
from fameio.source.scenario.fameiofactory import FameIOFactory
|
12
12
|
from fameio.source.scenario.generalproperties import GeneralProperties
|
13
|
+
from fameio.source.scenario.stringset import StringSet
|
13
14
|
from fameio.source.schema.schema import Schema
|
14
15
|
from fameio.source.tools import keys_to_lower
|
15
16
|
|
@@ -21,12 +22,14 @@ class Scenario:
|
|
21
22
|
_KEY_GENERAL = "GeneralProperties".lower()
|
22
23
|
_KEY_AGENTS = "Agents".lower()
|
23
24
|
_KEY_CONTRACTS = "Contracts".lower()
|
25
|
+
_KEY_STRING_SETS = "StringSets".lower()
|
24
26
|
|
25
27
|
_MISSING_KEY = "Scenario definition misses required key '{}'."
|
26
28
|
|
27
29
|
def __init__(self, schema: Schema, general_props: GeneralProperties) -> None:
|
28
30
|
self._schema = schema
|
29
31
|
self._general_props = general_props
|
32
|
+
self._string_sets = {}
|
30
33
|
self._agents = []
|
31
34
|
self._contracts = []
|
32
35
|
|
@@ -41,6 +44,9 @@ class Scenario:
|
|
41
44
|
)
|
42
45
|
scenario = cls(schema, general_props)
|
43
46
|
|
47
|
+
for name, string_set_definition in get_or_default(definitions, Scenario._KEY_STRING_SETS, {}).items():
|
48
|
+
scenario.add_string_set(name, factory.new_string_set_from_dict(string_set_definition))
|
49
|
+
|
44
50
|
for agent_definition in get_or_default(definitions, Scenario._KEY_AGENTS, []):
|
45
51
|
scenario.add_agent(factory.new_agent_from_dict(agent_definition))
|
46
52
|
|
@@ -56,12 +62,14 @@ class Scenario:
|
|
56
62
|
Scenario._KEY_GENERAL: self.general_properties.to_dict(),
|
57
63
|
Scenario._KEY_SCHEMA: self.schema.to_dict(),
|
58
64
|
}
|
59
|
-
|
65
|
+
if self.string_sets:
|
66
|
+
result[Scenario._KEY_STRING_SETS] = {
|
67
|
+
name: string_set.to_dict() for name, string_set in self.string_sets.items()
|
68
|
+
}
|
60
69
|
if self.agents:
|
61
70
|
result[Scenario._KEY_AGENTS] = []
|
62
71
|
for agent in self.agents:
|
63
72
|
result[Scenario._KEY_AGENTS].append(agent.to_dict())
|
64
|
-
|
65
73
|
if self.contracts:
|
66
74
|
result[Scenario._KEY_CONTRACTS] = []
|
67
75
|
for contract in self.contracts:
|
@@ -95,3 +103,12 @@ class Scenario:
|
|
95
103
|
def general_properties(self) -> GeneralProperties:
|
96
104
|
"""Returns General properties of this scenario"""
|
97
105
|
return self._general_props
|
106
|
+
|
107
|
+
@property
|
108
|
+
def string_sets(self) -> Dict[str, StringSet]:
|
109
|
+
"""Returns StringSets of this scenario"""
|
110
|
+
return self._string_sets
|
111
|
+
|
112
|
+
def add_string_set(self, name: str, string_set: StringSet) -> None:
|
113
|
+
"""Adds `string_set` with `name`"""
|
114
|
+
self._string_sets[name] = string_set
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
from typing import Dict, List, Union
|
5
|
+
|
6
|
+
from fameio.source.metadata import Metadata
|
7
|
+
from fameio.source.scenario.exception import log_and_raise
|
8
|
+
from fameio.source.tools import keys_to_lower
|
9
|
+
|
10
|
+
|
11
|
+
class StringSet(Metadata):
|
12
|
+
"""Hosts a StringSet in the given format"""
|
13
|
+
|
14
|
+
ValueType = Union[List[str], Dict[str, Dict]]
|
15
|
+
StringSetType = Dict[str, Union[Dict, ValueType]]
|
16
|
+
|
17
|
+
_ERR_NO_STRING_SET_VALUES = "Missing mandatory key '{}' in StringSet definition {}."
|
18
|
+
|
19
|
+
_KEY_VALUES = "Values".lower()
|
20
|
+
|
21
|
+
def __init__(self):
|
22
|
+
super().__init__()
|
23
|
+
self._values = {}
|
24
|
+
|
25
|
+
@classmethod
|
26
|
+
def from_dict(cls, definition: StringSetType) -> "StringSet":
|
27
|
+
"""Returns StringSet initialised from `definition`"""
|
28
|
+
string_set = cls()
|
29
|
+
string_set._extract_metadata(definition)
|
30
|
+
definition = keys_to_lower(definition)
|
31
|
+
if cls._KEY_VALUES in definition:
|
32
|
+
string_set._values = string_set._read_values(definition)
|
33
|
+
else:
|
34
|
+
log_and_raise(cls._ERR_NO_STRING_SET_VALUES.format(cls._KEY_VALUES, definition))
|
35
|
+
return string_set
|
36
|
+
|
37
|
+
def _read_values(self, definition: ValueType) -> Dict[str, Dict]:
|
38
|
+
"""Ensures values are returned as dictionary representation by converting `definitions` of type 'List[str]'"""
|
39
|
+
values = definition[self._KEY_VALUES]
|
40
|
+
if isinstance(values, list):
|
41
|
+
return {name: {} for name in values}
|
42
|
+
return values
|
43
|
+
|
44
|
+
def to_dict(self) -> Dict:
|
45
|
+
"""Serializes the StringSet to a dict"""
|
46
|
+
result = {self._KEY_VALUES: self._values}
|
47
|
+
return self._enrich_with_metadata(result)
|
48
|
+
|
49
|
+
def is_in_set(self, key: str) -> bool:
|
50
|
+
"""Returns True if `key` is a valid name in this StringSet"""
|
51
|
+
return key in self._values
|
@@ -21,6 +21,7 @@ class AttributeType(Enum):
|
|
21
21
|
LONG = auto()
|
22
22
|
TIME_STAMP = auto()
|
23
23
|
STRING = auto()
|
24
|
+
STRING_SET = auto()
|
24
25
|
ENUM = auto()
|
25
26
|
TIME_SERIES = auto()
|
26
27
|
BLOCK = auto()
|
@@ -33,7 +34,7 @@ class AttributeType(Enum):
|
|
33
34
|
return float(value)
|
34
35
|
elif self is AttributeType.TIME_STAMP:
|
35
36
|
return FameTime.convert_string_if_is_datetime(value)
|
36
|
-
elif self is AttributeType.ENUM or self is AttributeType.STRING:
|
37
|
+
elif self is AttributeType.ENUM or self is AttributeType.STRING or self is AttributeType.STRING_SET:
|
37
38
|
return str(value)
|
38
39
|
elif self is AttributeType.TIME_SERIES:
|
39
40
|
return float(value)
|
fameio/source/series.py
CHANGED
@@ -45,6 +45,7 @@ class TimeSeriesManager:
|
|
45
45
|
_ERR_NAN_VALUE = "Values in TimeSeries must not be missing or NaN."
|
46
46
|
_ERR_UNREGISTERED_SERIES = "No timeseries registered with identifier '{}' - was the Scenario validated?"
|
47
47
|
_WARN_NO_DATA = "No timeseries stored in timeseries manager. Double check if you expected timeseries."
|
48
|
+
_WARN_DATA_IGNORED = "Timeseries contains additional columns with data which will be ignored."
|
48
49
|
|
49
50
|
def __init__(self, path_resolver: PathResolver = PathResolver()) -> None:
|
50
51
|
self._path_resolver = path_resolver
|
@@ -96,6 +97,10 @@ class TimeSeriesManager:
|
|
96
97
|
|
97
98
|
def _check_and_convert_series(self, data: pd.DataFrame) -> pd.DataFrame:
|
98
99
|
"""Ensures validity of time series and convert to required format for writing to disk"""
|
100
|
+
additional_columns = data.loc[:, 2:]
|
101
|
+
is_empty = additional_columns.dropna(how="all").empty
|
102
|
+
if not is_empty:
|
103
|
+
log().warning(self._WARN_DATA_IGNORED)
|
99
104
|
data = data.apply(
|
100
105
|
lambda r: [FameTime.convert_string_if_is_datetime(r[0]), self._assert_valid(r[1])],
|
101
106
|
axis=1,
|
fameio/source/tools.py
CHANGED
@@ -6,7 +6,7 @@ from typing import Any, Dict, Union
|
|
6
6
|
|
7
7
|
|
8
8
|
def keys_to_lower(dictionary: Dict[str, Any]) -> Dict[str, Any]:
|
9
|
-
"""Returns new dictionary content of given `dictionary` but its `keys` in lower case"""
|
9
|
+
"""Returns new dictionary content of given `dictionary` but its top-level `keys` in lower case"""
|
10
10
|
return {keys.lower(): value for keys, value in dictionary.items()}
|
11
11
|
|
12
12
|
|
fameio/source/validator.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# SPDX-FileCopyrightText:
|
1
|
+
# SPDX-FileCopyrightText: 2024 German Aerospace Center <fame@dlr.de>
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
@@ -9,6 +9,7 @@ from typing import Any, Dict, List
|
|
9
9
|
from fameio.source import PathResolver
|
10
10
|
from fameio.source.logs import log_error_and_raise, log
|
11
11
|
from fameio.source.scenario import Agent, Attribute, Contract, Scenario
|
12
|
+
from fameio.source.scenario.stringset import StringSet
|
12
13
|
from fameio.source.schema.agenttype import AgentType
|
13
14
|
from fameio.source.schema.attribute import AttributeSpecs, AttributeType
|
14
15
|
from fameio.source.schema.schema import Schema
|
@@ -40,6 +41,8 @@ class SchemaValidator:
|
|
40
41
|
_IS_NO_LIST = "Attribute '{}' is list but assigned value '{}' is not a list."
|
41
42
|
_TIME_SERIES_INVALID = "Timeseries at '{}' is invalid."
|
42
43
|
_MISSING_CONTRACTS_FOR_AGENTS = "No contracts defined for Agent '{}' of type '{}'"
|
44
|
+
_MISSING_STRING_SET = "StringSet '{}' not defined in scenario."
|
45
|
+
_MISSING_STRING_SET_ENTRY = "Value '{}' of Attribute '{}' not defined in StringSet '{}'"
|
43
46
|
|
44
47
|
@staticmethod
|
45
48
|
def validate_scenario_and_timeseries(
|
@@ -65,6 +68,7 @@ class SchemaValidator:
|
|
65
68
|
SchemaValidator.ensure_unique_agent_ids(agents)
|
66
69
|
for agent in agents:
|
67
70
|
SchemaValidator.ensure_agent_and_timeseries_are_valid(agent, schema, timeseries_manager)
|
71
|
+
SchemaValidator.ensure_string_set_consistency(agent, schema, scenario.string_sets)
|
68
72
|
|
69
73
|
agent_types_by_id = {agent.id: agent.type_name for agent in agents}
|
70
74
|
for contract in scenario.contracts:
|
@@ -199,7 +203,7 @@ class SchemaValidator:
|
|
199
203
|
return isinstance(value, int)
|
200
204
|
elif attribute_type is AttributeType.DOUBLE:
|
201
205
|
return isinstance(value, (int, float)) and not math.isnan(value)
|
202
|
-
elif attribute_type in (AttributeType.ENUM, AttributeType.STRING):
|
206
|
+
elif attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
|
203
207
|
return isinstance(value, str)
|
204
208
|
elif attribute_type is AttributeType.TIME_STAMP:
|
205
209
|
return FameTime.is_fame_time_compatible(value)
|
@@ -254,6 +258,50 @@ class SchemaValidator:
|
|
254
258
|
for entry in attribute.nested_list:
|
255
259
|
SchemaValidator._ensure_valid_timeseries(entry, specification.nested_attributes, manager)
|
256
260
|
|
261
|
+
@staticmethod
|
262
|
+
def ensure_string_set_consistency(agent: Agent, schema: Schema, string_sets: Dict[str, StringSet]) -> None:
|
263
|
+
"""
|
264
|
+
Raises exception if
|
265
|
+
a) an agent's attribute is of type StringSet but the corresponding StringSet is not defined in the scenario
|
266
|
+
b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
|
267
|
+
"""
|
268
|
+
scenario_attributes = agent.attributes
|
269
|
+
schema_attributes = SchemaValidator._get_agent(schema, agent.type_name).attributes
|
270
|
+
SchemaValidator._ensure_string_set_consistency(scenario_attributes, schema_attributes, string_sets)
|
271
|
+
|
272
|
+
@staticmethod
|
273
|
+
def _ensure_string_set_consistency(
|
274
|
+
attributes: Dict[str, Attribute], specifications: Dict[str, AttributeSpecs], string_sets: Dict[str, StringSet]
|
275
|
+
) -> None:
|
276
|
+
"""
|
277
|
+
Recursively iterates through all attributes of an agent, applying tests if attribute type is `StringSet`
|
278
|
+
Raises:
|
279
|
+
ValidationException: if
|
280
|
+
a) StringSet mentioned in schema is not defined in the scenario
|
281
|
+
b) the value assigned to an attribute of type StringSet is not contained in the corresponding StringSet
|
282
|
+
"""
|
283
|
+
for name, attribute in attributes.items():
|
284
|
+
specification = specifications[name]
|
285
|
+
if attribute.has_value:
|
286
|
+
attribute_type = specification.attr_type
|
287
|
+
if attribute_type is AttributeType.STRING_SET:
|
288
|
+
if name in string_sets:
|
289
|
+
if not string_sets[name].is_in_set(attribute.value):
|
290
|
+
msg = SchemaValidator._MISSING_STRING_SET_ENTRY.format(
|
291
|
+
attribute.value, str(attribute), name
|
292
|
+
)
|
293
|
+
log_error_and_raise(ValidationException(msg))
|
294
|
+
else:
|
295
|
+
msg = SchemaValidator._MISSING_STRING_SET.format(specification.full_name)
|
296
|
+
log_error_and_raise(ValidationException(msg))
|
297
|
+
if attribute.has_nested:
|
298
|
+
SchemaValidator._ensure_string_set_consistency(
|
299
|
+
attribute.nested, specification.nested_attributes, string_sets
|
300
|
+
)
|
301
|
+
if attribute.has_nested_list:
|
302
|
+
for entry in attribute.nested_list:
|
303
|
+
SchemaValidator._ensure_string_set_consistency(entry, specification.nested_attributes, string_sets)
|
304
|
+
|
257
305
|
@staticmethod
|
258
306
|
def ensure_is_valid_contract(contract: Contract, schema: Schema, agent_types_by_id: Dict[int, str]) -> None:
|
259
307
|
"""Raises exception if given `contract` does not meet the `schema`'s requirements, using `agent_types_by_id`"""
|
fameio/source/writer.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
import sys
|
5
|
-
|
5
|
+
import importlib.metadata as metadata
|
6
6
|
from pathlib import Path
|
7
7
|
from typing import Any, Dict, List, Union
|
8
8
|
|
@@ -150,7 +150,7 @@ class ProtoWriter:
|
|
150
150
|
pb_field.longValue.extend(ensure_is_list(value))
|
151
151
|
elif attribute_type is AttributeType.TIME_STAMP:
|
152
152
|
pb_field.longValue.extend(ensure_is_list(FameTime.convert_string_if_is_datetime(value)))
|
153
|
-
elif attribute_type in (AttributeType.ENUM, AttributeType.STRING):
|
153
|
+
elif attribute_type in (AttributeType.ENUM, AttributeType.STRING, AttributeType.STRING_SET):
|
154
154
|
pb_field.stringValue.extend(ensure_is_list(value))
|
155
155
|
elif attribute_type is AttributeType.TIME_SERIES:
|
156
156
|
pb_field.seriesId = self._time_series_manager.get_series_id_by_identifier(value)
|
@@ -246,6 +246,6 @@ class ProtoWriter:
|
|
246
246
|
@staticmethod
|
247
247
|
def _set_execution_versions(pb_versions: ExecutionData.Versions) -> None:
|
248
248
|
"""Adds version strings for fameio, fameprotobuf, and python to the given Versions message"""
|
249
|
-
pb_versions.fameProtobuf = version("fameprotobuf")
|
250
|
-
pb_versions.fameIo = version("fameio")
|
249
|
+
pb_versions.fameProtobuf = metadata.version("fameprotobuf")
|
250
|
+
pb_versions.fameIo = metadata.version("fameio")
|
251
251
|
pb_versions.python = sys.version
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fameio
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.3.1
|
4
4
|
Summary: Python scripts for operation of FAME models
|
5
5
|
Home-page: https://gitlab.com/fame-framework/wiki/-/wikis/home
|
6
6
|
License: Apache-2.0
|
@@ -127,18 +127,19 @@ make_config(run_config)
|
|
127
127
|
|
128
128
|
### Scenario YAML
|
129
129
|
The "scenario.yaml" file contains all configuration options for a FAME-based simulation.
|
130
|
-
It consists of the sections `Schema`, `GeneralProperties`, `Agents` and `Contracts`,
|
130
|
+
It consists of the sections `Schema`, `GeneralProperties`, `Agents` and `Contracts`, and the optional section `StringSets`.
|
131
|
+
All of them are described below.
|
131
132
|
|
132
133
|
#### Schema
|
133
|
-
The Schema describes a model's components such as its types of agents, their inputs, what data they exchange, etc.
|
134
|
-
It is also used to validate the model inputs provided in the `scenario.yaml`.
|
134
|
+
The Schema describes a model's components such as its types of agents, their inputs, what data they exchange, etc.
|
135
|
+
It is also used to validate the model inputs provided in the `scenario.yaml`.
|
135
136
|
Since the Schema is valid until the model itself is changed, it is recommended to defined it in a separate file and include the file here.
|
136
137
|
|
137
138
|
Currently, the schema specifies:
|
138
139
|
* which type of Agents can be created
|
139
140
|
* what type of input attributes an Agent uses
|
140
141
|
* what type of Products an Agent can send in Contracts, and
|
141
|
-
* the names of the Java packages for the classes corresponding to Agents, DataItems and Portables.
|
142
|
+
* the names of the Java packages for the classes corresponding to Agents, DataItems and Portables.
|
142
143
|
|
143
144
|
The Schema consists of the sections `JavaPackages` and `AgentTypes`.
|
144
145
|
|
@@ -154,7 +155,7 @@ Also, package names occur on multiple lists for Agent, DataItem or Portable.
|
|
154
155
|
For example, for a project with all its
|
155
156
|
* Agent-derived java classes located in packages below the package named "agents",
|
156
157
|
* DataItem implementation classes in a subpackage named "msg",
|
157
|
-
* Portable implementation classes in a subpackages named "portableItems" and "otherPortables",
|
158
|
+
* Portable implementation classes in a subpackages named "portableItems" and "otherPortables",
|
158
159
|
|
159
160
|
the corresponding section in the schema would look like this:
|
160
161
|
|
@@ -235,7 +236,8 @@ MyComplexAttribute:
|
|
235
236
|
| `long` | a 64-bit integer value |
|
236
237
|
| `time_stamp` | either a FAME time stamp string or 64-bit integer value |
|
237
238
|
| `string` | any string |
|
238
|
-
| `
|
239
|
+
| `string_set` | a string from a set of allowed `Values` defined in `StringSet` section in `scenario` |
|
240
|
+
| `enum` | a string from a set of allowed `Values` defined in `schema` |
|
239
241
|
| `time_series` | either a path to a .csv-file or a single 64-bit floating-point value; does not support `List: true` |
|
240
242
|
| `block` | this attribute has no value of its own but hosts a group of nested Attributes; implies `NestedAttributes` to be defined |
|
241
243
|
|
@@ -352,7 +354,7 @@ Contract Parameters:
|
|
352
354
|
* `ProductName` name of the product to be sent
|
353
355
|
* `FirstDeliveryTime` first time of delivery in the format "seconds after the January 1st 2000, 00:00:00"
|
354
356
|
* `DeliveryIntervalInSteps` delay time in between deliveries in seconds
|
355
|
-
* `Attributes` can be set to include additional information as `int`, `float`, `enum
|
357
|
+
* `Attributes` can be set to include additional information as `int`, `float`, `enum`, or `dict` data types
|
356
358
|
|
357
359
|
##### Definition of Multiple Similar Contracts
|
358
360
|
Often, scenarios contain multiple agents of similar type that also have similar chains of contracts.
|
@@ -414,11 +416,47 @@ Contracts:
|
|
414
416
|
DeliveryIntervalInSteps: 3600
|
415
417
|
```
|
416
418
|
|
419
|
+
#### StringSets
|
420
|
+
This optional section defines values of type `string_set`.
|
421
|
+
In contrast to `enum` values, which are **statically** defined in the `Schema`, `string_set` values can be **dynamically** defined in this section.
|
422
|
+
If an agent attribute is of type `string_set` and the attribute is set in the `scenario`, then
|
423
|
+
1. the section `StringSets` in the `scenario` must contain an entry named exactly like the attribute, and
|
424
|
+
2. the attribute value must be contained in the string set's `Values` declaration.
|
425
|
+
|
426
|
+
For instance:
|
427
|
+
|
428
|
+
In `schema`:
|
429
|
+
|
430
|
+
``` yaml
|
431
|
+
AgentTypes:
|
432
|
+
FuelsMarket:
|
433
|
+
Attributes:
|
434
|
+
FuelType:
|
435
|
+
AttributeType: string_set
|
436
|
+
```
|
437
|
+
|
438
|
+
In `scenario`:
|
439
|
+
|
440
|
+
``` yaml
|
441
|
+
StringSets:
|
442
|
+
FuelType:
|
443
|
+
Values: ['OIL', 'HARD_COAL', 'LIGNITE']
|
444
|
+
|
445
|
+
Agents:
|
446
|
+
- Type: FuelsMarket
|
447
|
+
Id: 1
|
448
|
+
Attributes:
|
449
|
+
FuelType: OIL
|
450
|
+
```
|
451
|
+
|
452
|
+
Important: If different types of Agents shall refer to the same StringSet, their attributes in schema must have the **exact** same name.
|
453
|
+
|
417
454
|
### CSV files
|
418
455
|
TIME_SERIES inputs are not directly fed into the Scenario YAML file.
|
419
456
|
Instead, TIME_SERIES reference a CSV file that can be stored some place else.
|
420
457
|
These CSV files follow a specific structure:
|
421
|
-
* They
|
458
|
+
* They should contain exactly two columns - any other columns are ignored.
|
459
|
+
A warning is raised if more than two non-empty columns are detected.
|
422
460
|
* The first column must be a time stamp in form `YYYY-MM-DD_hh:mm:ss`
|
423
461
|
* The second column must be a numerical value (either integer or floating-point)
|
424
462
|
* The separator of the two columns is a semicolon
|
@@ -612,7 +650,7 @@ You may also specify any of the following arguments:
|
|
612
650
|
| `-t` or `--time` <option> | Option to define conversion of time steps to given format (default=`UTC`) by `-t/--time {UTC, INT, FAME}` |
|
613
651
|
| `--input-recovery` or `--no-input-recovery` | If True, all input data are recovered as well as the outputs (default=False). |
|
614
652
|
|
615
|
-
Additionally, you may merge TimeSteps of a certain range of steps in the output files to
|
653
|
+
Additionally, you may merge `TimeSteps` of a certain range of steps in the output files to
|
616
654
|
i) associate multiple time steps with a common logical time in your simulation
|
617
655
|
ii) reduce number of lines in output files
|
618
656
|
|
@@ -1,24 +1,25 @@
|
|
1
|
-
CHANGELOG.md,sha256=
|
1
|
+
CHANGELOG.md,sha256=0T5FhyDSLmIo_tr4aXo_tOgJ5IsQH4WTY2JG3cnIjiI,12025
|
2
2
|
fameio/__init__.py,sha256=IQm0MNOXkhBexiMXBoNZDK5xHUYgazH7oXm-lc0Vm04,109
|
3
3
|
fameio/scripts/__init__.py,sha256=Bdu79kajJvvmPWdSP82Y6G8MCpP4n9ftTR-snWSbMJY,741
|
4
4
|
fameio/scripts/__init__.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
|
5
|
-
fameio/scripts/convert_results.py,sha256=
|
5
|
+
fameio/scripts/convert_results.py,sha256=ljrfPTwhauW98w-wJs4Fu4vtTreGzsMEd-io33UC1z0,4189
|
6
6
|
fameio/scripts/convert_results.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
|
7
7
|
fameio/scripts/make_config.py,sha256=KQsj-aPXXtch8ITITCOcQUj6nYhvDs9LO7tdSikUe6w,1388
|
8
8
|
fameio/scripts/make_config.py.license,sha256=2-OqCNxP4504xY2XQqseYypJi1_Qx4xJSzO3t7c3ACM,107
|
9
9
|
fameio/source/__init__.py,sha256=14CnWOIkdSeKQg6FioQSgO7UtEFF6pO4MUtDAUfwNmA,278
|
10
10
|
fameio/source/cli/__init__.py,sha256=UqrdWnYMoy-o2--m0a4c-MtAorsc4eILXaPq27EXM1Q,112
|
11
|
-
fameio/source/cli/convert_results.py,sha256=
|
11
|
+
fameio/source/cli/convert_results.py,sha256=oijIHua5gCjJ7ZAHNQJXNAoNRK1-pt11LlCDiwudhMQ,3458
|
12
12
|
fameio/source/cli/make_config.py,sha256=phZKimvItl538xnwv9IhBoDkh__YqNlRk4YNDyTOFaA,2738
|
13
13
|
fameio/source/cli/options.py,sha256=5DOFq4LMcObrSOlbt74HGE1vghByn-I30YY46tm5Snc,1422
|
14
|
-
fameio/source/cli/parser.py,sha256=
|
14
|
+
fameio/source/cli/parser.py,sha256=WK3vV7lrCpY2GCwS0N4ppNhJfVUcJ8pRZrM6NmGE8Ks,11015
|
15
15
|
fameio/source/loader.py,sha256=-Y0j3OrV2MwAlfxFP2YA3ZwZhP-eFVlT6hdHO4Qze2U,7536
|
16
16
|
fameio/source/logs.py,sha256=9Iq9jcglGyALYir-Luxfa-m4uUJX074JkUM0uMwzPNc,3684
|
17
|
+
fameio/source/metadata.py,sha256=zdqKEIEQ0NKqEmBTBdPEq0fw1MdsXwCpzs1O6rFTq_Y,1024
|
17
18
|
fameio/source/path_resolver.py,sha256=cIEVvz7Eh4e3Rh87XkwyGiyj9iKxUI7TzWtq6ClhLd8,1321
|
18
19
|
fameio/source/results/__init__.py,sha256=IQm0MNOXkhBexiMXBoNZDK5xHUYgazH7oXm-lc0Vm04,109
|
19
20
|
fameio/source/results/agent_type.py,sha256=pW5cLduLlNOcBBvS6sCLpTpZt3D6B8UMsizEZhe5lJ0,4321
|
20
|
-
fameio/source/results/conversion.py,sha256=
|
21
|
-
fameio/source/results/csv_writer.py,sha256=
|
21
|
+
fameio/source/results/conversion.py,sha256=5s6gPWNIOOLKn0O0-9gVvmJ8wq7m0Dh5NgtXIe1PIJs,3683
|
22
|
+
fameio/source/results/csv_writer.py,sha256=lp5fhnhD99ksYLJwUguC9o8mujM8tkyCfq2vQ5LT1zw,5152
|
22
23
|
fameio/source/results/data_transformer.py,sha256=tz58m7_TZPE3YjBoDyiMWiP_sz9mWaj1JfW2FjKLVhY,5490
|
23
24
|
fameio/source/results/input_dao.py,sha256=5V_7dUhO1XZGNuwH5041HnMGCcQjC9d9_TBR4FZ9QJE,6919
|
24
25
|
fameio/source/results/output_dao.py,sha256=-tMewCUS4m3RebTAf8Z33gtlj-SxqRdxnzvGIkg6Ah0,4106
|
@@ -26,28 +27,29 @@ fameio/source/results/reader.py,sha256=g3cfbNq7eHO98dxtpqjjPE8_YJVM56Wp0MCnNi3ds
|
|
26
27
|
fameio/source/results/yaml_writer.py,sha256=rOAbeZQgRojYvofUcXreIdIJtdSbOrHAohiNLkmPgCI,837
|
27
28
|
fameio/source/scenario/__init__.py,sha256=YfJvz275GWX8kVWMSMRcF3KsUQNqSAPrwBgNV7tqBTY,372
|
28
29
|
fameio/source/scenario/agent.py,sha256=Z8jfMfUjrTJ4R1rEpwNdKCblhEx6-9xioewxGuajuqw,4713
|
29
|
-
fameio/source/scenario/attribute.py,sha256
|
30
|
+
fameio/source/scenario/attribute.py,sha256=X63ZFpAdObTjMHpf0FD_H3v9sfOOqn7KDdjiGmyMZj4,4899
|
30
31
|
fameio/source/scenario/contract.py,sha256=lM2NN_CUDh9iqDghq_F6JE9rEW3ZGUhBJOD0qkjuKBU,9442
|
31
32
|
fameio/source/scenario/exception.py,sha256=7MYN4h40ptMZFD1lLtnK5PWXbAGlsmUSDmUQ5FAKeN8,1554
|
32
|
-
fameio/source/scenario/fameiofactory.py,sha256=
|
33
|
+
fameio/source/scenario/fameiofactory.py,sha256=kV2GXGjNxYrt4lv5KoK8wYN6lfN3Z3hWaRNOl89gYv8,1654
|
33
34
|
fameio/source/scenario/generalproperties.py,sha256=TkQV9dEo5zMfNiFITolJU1rQUBi-yDJLpoello39e_0,4796
|
34
|
-
fameio/source/scenario/scenario.py,sha256=
|
35
|
+
fameio/source/scenario/scenario.py,sha256=w64SHQXpRFIMhDUNOIRkuK81jY_cXvT_M4CNhtULbMQ,4585
|
36
|
+
fameio/source/scenario/stringset.py,sha256=ZfObpTbAsYZcTti-1xoq3KaZf3D59fMToRkkZaTXIcc,1920
|
35
37
|
fameio/source/schema/__init__.py,sha256=ZGTyliDbjlYGPAjB9bbggzACOYWdhm4I1VSmm7YGmTk,270
|
36
38
|
fameio/source/schema/agenttype.py,sha256=fe6GWJwl2S-J7KmH_eR5Y8Ieez1difez1w7v-r26yGU,5183
|
37
|
-
fameio/source/schema/attribute.py,sha256=
|
39
|
+
fameio/source/schema/attribute.py,sha256=wzu4563_zUU863dhjZQOuoPBBW4a7TjED5orKWuEujM,8357
|
38
40
|
fameio/source/schema/exception.py,sha256=NMftGnrFOaS3MwrjZl8qbx3bi6KYUxhzHJIX1v--B5M,224
|
39
41
|
fameio/source/schema/java_packages.py,sha256=-2ZbAG_htEowB-fBSMizsLWrphyiKfESDDHspzdEQP4,2656
|
40
42
|
fameio/source/schema/schema.py,sha256=-J8VtO4W9Z-OnTV7MFO1umZWIC_epbLCYd6-q4lNQ8Y,2975
|
41
|
-
fameio/source/series.py,sha256=
|
43
|
+
fameio/source/series.py,sha256=wwq7Nm0i0mOvMOjmqPW5x1gSDnhpWEy14VLceUhWcYw,9041
|
42
44
|
fameio/source/time.py,sha256=Vsd95nTubL8x5cwiznJYeti6clZCCHHJv7xjSpYqkF0,6984
|
43
|
-
fameio/source/tools.py,sha256=
|
44
|
-
fameio/source/validator.py,sha256=
|
45
|
-
fameio/source/writer.py,sha256=
|
46
|
-
fameio-2.
|
47
|
-
fameio-2.
|
48
|
-
fameio-2.
|
49
|
-
fameio-2.
|
50
|
-
fameio-2.
|
51
|
-
fameio-2.
|
52
|
-
fameio-2.
|
53
|
-
fameio-2.
|
45
|
+
fameio/source/tools.py,sha256=B94U7Idq9ZuvsQmc9HOti016z4jn2oTTMIElWhs-5_w,1062
|
46
|
+
fameio/source/validator.py,sha256=44wAlSxzSlaP9cm3YpNXOcs_kXIUNagctHLguY6H3T4,18596
|
47
|
+
fameio/source/writer.py,sha256=d35rZGPLil3yku8_N6l1B45J4agKemBhAHJJPynQiJE,12499
|
48
|
+
fameio-2.3.1.dist-info/entry_points.txt,sha256=jvQVfwJjZXPWQjJlhj1Dt6PTeblryTc1GxjKeK90twI,123
|
49
|
+
fameio-2.3.1.dist-info/LICENSE.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
|
50
|
+
fameio-2.3.1.dist-info/LICENSES/Apache-2.0.txt,sha256=eGHBZnhr9CWjE95SWjRfmhtK1lvVn5X4Fpf3KrrAZDg,10391
|
51
|
+
fameio-2.3.1.dist-info/LICENSES/CC-BY-4.0.txt,sha256=y9WvMYKGt0ZW8UXf9QkZB8wj1tjJrQngKR7CSXeSukE,19051
|
52
|
+
fameio-2.3.1.dist-info/LICENSES/CC0-1.0.txt,sha256=9Ofzc7m5lpUDN-jUGkopOcLZC3cl6brz1QhKInF60yg,7169
|
53
|
+
fameio-2.3.1.dist-info/METADATA,sha256=kYlnlq-NqQnaJpgxUgfehySdfLTA0wIUF8OU66a8Wqw,32471
|
54
|
+
fameio-2.3.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
55
|
+
fameio-2.3.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|