fameio 2.3.1__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +24 -0
- fameio/__init__.py +4 -1
- fameio/{source/cli → cli}/__init__.py +2 -0
- fameio/{source/cli → cli}/convert_results.py +8 -8
- fameio/{source/cli → cli}/make_config.py +5 -5
- fameio/{source/cli → cli}/options.py +0 -8
- fameio/{source/cli → cli}/parser.py +26 -83
- fameio/input/__init__.py +27 -0
- fameio/input/loader/__init__.py +68 -0
- fameio/input/loader/controller.py +129 -0
- fameio/input/loader/loader.py +109 -0
- fameio/input/metadata.py +149 -0
- fameio/input/resolver.py +44 -0
- fameio/{source → input}/scenario/__init__.py +1 -2
- fameio/{source → input}/scenario/agent.py +24 -38
- fameio/input/scenario/attribute.py +203 -0
- fameio/{source → input}/scenario/contract.py +50 -61
- fameio/{source → input}/scenario/exception.py +8 -13
- fameio/{source → input}/scenario/fameiofactory.py +6 -6
- fameio/{source → input}/scenario/generalproperties.py +22 -47
- fameio/{source → input}/scenario/scenario.py +34 -31
- fameio/input/scenario/stringset.py +48 -0
- fameio/{source → input}/schema/__init__.py +2 -2
- fameio/input/schema/agenttype.py +125 -0
- fameio/input/schema/attribute.py +268 -0
- fameio/{source → input}/schema/java_packages.py +26 -22
- fameio/{source → input}/schema/schema.py +25 -22
- fameio/{source → input}/validator.py +32 -35
- fameio/{source → input}/writer.py +86 -86
- fameio/{source/logs.py → logs.py} +25 -9
- fameio/{source/results → output}/agent_type.py +21 -22
- fameio/{source/results → output}/conversion.py +34 -31
- fameio/{source/results → output}/csv_writer.py +7 -7
- fameio/{source/results → output}/data_transformer.py +24 -24
- fameio/{source/results → output}/input_dao.py +51 -49
- fameio/{source/results → output}/output_dao.py +16 -17
- fameio/{source/results → output}/reader.py +30 -31
- fameio/{source/results → output}/yaml_writer.py +2 -3
- fameio/scripts/__init__.py +2 -2
- fameio/scripts/convert_results.py +16 -15
- fameio/scripts/make_config.py +9 -9
- fameio/{source/series.py → series.py} +28 -26
- fameio/{source/time.py → time.py} +8 -8
- fameio/{source/tools.py → tools.py} +2 -2
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/METADATA +277 -72
- fameio-3.0.0.dist-info/RECORD +56 -0
- fameio/source/__init__.py +0 -8
- fameio/source/loader.py +0 -181
- fameio/source/metadata.py +0 -32
- fameio/source/path_resolver.py +0 -34
- fameio/source/scenario/attribute.py +0 -130
- fameio/source/scenario/stringset.py +0 -51
- fameio/source/schema/agenttype.py +0 -132
- fameio/source/schema/attribute.py +0 -203
- fameio/source/schema/exception.py +0 -9
- fameio-2.3.1.dist-info/RECORD +0 -55
- /fameio/{source/results → output}/__init__.py +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/LICENSE.txt +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/LICENSES/Apache-2.0.txt +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/LICENSES/CC-BY-4.0.txt +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/LICENSES/CC0-1.0.txt +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/WHEEL +0 -0
- {fameio-2.3.1.dist-info → fameio-3.0.0.dist-info}/entry_points.txt +0 -0
fameio/scripts/__init__.py
CHANGED
@@ -4,8 +4,8 @@ import sys
|
|
4
4
|
from fameio.scripts.convert_results import DEFAULT_CONFIG as DEFAULT_CONVERT_CONFIG
|
5
5
|
from fameio.scripts.convert_results import run as convert_results
|
6
6
|
from fameio.scripts.make_config import run as make_config
|
7
|
-
from fameio.
|
8
|
-
from fameio.
|
7
|
+
from fameio.cli.convert_results import handle_args as handle_convert_results_args
|
8
|
+
from fameio.cli.make_config import handle_args as handle_make_config_args
|
9
9
|
|
10
10
|
|
11
11
|
# noinspection PyPep8Naming
|
@@ -4,18 +4,18 @@ from pathlib import Path
|
|
4
4
|
|
5
5
|
import pandas as pd
|
6
6
|
|
7
|
-
from fameio.
|
8
|
-
from fameio.
|
9
|
-
from fameio.
|
10
|
-
from fameio.
|
11
|
-
from fameio.
|
12
|
-
from fameio.
|
13
|
-
from fameio.
|
14
|
-
from fameio.
|
15
|
-
from fameio.
|
16
|
-
from fameio.
|
17
|
-
from fameio.
|
18
|
-
from fameio.
|
7
|
+
from fameio.cli.convert_results import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
|
8
|
+
from fameio.cli.options import Options
|
9
|
+
from fameio.cli import update_default_config
|
10
|
+
from fameio.logs import log_critical_and_raise, fameio_logger, log
|
11
|
+
from fameio.output.agent_type import AgentTypeLog
|
12
|
+
from fameio.output.conversion import apply_time_option, apply_time_merging
|
13
|
+
from fameio.output.csv_writer import CsvWriter
|
14
|
+
from fameio.output.data_transformer import DataTransformer, INDEX
|
15
|
+
from fameio.output.input_dao import InputDao
|
16
|
+
from fameio.output.output_dao import OutputDAO
|
17
|
+
from fameio.output.reader import Reader
|
18
|
+
from fameio.output.yaml_writer import data_to_yaml_file
|
19
19
|
|
20
20
|
ERR_MEMORY_ERROR = "Out of memory. Try using `-m` or `--memory-saving` option."
|
21
21
|
ERR_MEMORY_SEVERE = "Out of memory despite memory-saving mode. Reduce output interval in `FAME-Core` and rerun model."
|
@@ -55,9 +55,10 @@ def run(config: dict = None) -> None:
|
|
55
55
|
if config[Options.INPUT_RECOVERY]:
|
56
56
|
log().info("Recovering inputs...")
|
57
57
|
timeseries, scenario = input_dao.recover_inputs()
|
58
|
-
|
58
|
+
base_path = config[Options.OUTPUT] if config[Options.OUTPUT] is not None else "./"
|
59
|
+
series_writer = CsvWriter(Path(base_path, "./recovered"), Path("./"), False)
|
59
60
|
series_writer.write_time_series_to_disk(timeseries)
|
60
|
-
data_to_yaml_file(scenario.to_dict(), Path(
|
61
|
+
data_to_yaml_file(scenario.to_dict(), Path(base_path, "./recovered/scenario.yaml"))
|
61
62
|
|
62
63
|
if config[Options.MEMORY_SAVING]:
|
63
64
|
written_files = output_writer.pop_all_file_paths()
|
@@ -69,7 +70,7 @@ def run(config: dict = None) -> None:
|
|
69
70
|
|
70
71
|
log().info("Data conversion completed.")
|
71
72
|
except MemoryError:
|
72
|
-
|
73
|
+
log_critical_and_raise(MemoryError(ERR_MEMORY_SEVERE if Options.MEMORY_SAVING else ERR_MEMORY_ERROR))
|
73
74
|
|
74
75
|
file_stream.close()
|
75
76
|
if not agent_type_log.has_any_agent_type():
|
fameio/scripts/make_config.py
CHANGED
@@ -2,14 +2,14 @@
|
|
2
2
|
import sys
|
3
3
|
from pathlib import Path
|
4
4
|
|
5
|
-
from fameio.
|
6
|
-
from fameio.
|
7
|
-
from fameio.
|
8
|
-
from fameio.
|
9
|
-
from fameio.
|
10
|
-
from fameio.
|
11
|
-
from fameio.
|
12
|
-
from fameio.
|
5
|
+
from fameio.cli.make_config import handle_args, CLI_DEFAULTS as DEFAULT_CONFIG
|
6
|
+
from fameio.cli.options import Options
|
7
|
+
from fameio.cli import update_default_config
|
8
|
+
from fameio.input.loader import load_yaml, validate_yaml_file_suffix
|
9
|
+
from fameio.logs import fameio_logger, log
|
10
|
+
from fameio.input.scenario import Scenario
|
11
|
+
from fameio.input.validator import SchemaValidator
|
12
|
+
from fameio.input.writer import ProtoWriter
|
13
13
|
|
14
14
|
|
15
15
|
def run(config: dict = None) -> None:
|
@@ -18,7 +18,7 @@ def run(config: dict = None) -> None:
|
|
18
18
|
fameio_logger(log_level_name=config[Options.LOG_LEVEL], file_name=config[Options.LOG_FILE])
|
19
19
|
|
20
20
|
file = config[Options.FILE]
|
21
|
-
|
21
|
+
validate_yaml_file_suffix(Path(file))
|
22
22
|
scenario = Scenario.from_dict(load_yaml(Path(file), encoding=config[Options.INPUT_ENCODING]))
|
23
23
|
SchemaValidator.check_agents_have_contracts(scenario)
|
24
24
|
|
@@ -5,19 +5,19 @@ import math
|
|
5
5
|
import os
|
6
6
|
from enum import Enum, auto
|
7
7
|
from pathlib import Path
|
8
|
-
from typing import
|
8
|
+
from typing import Union, Any
|
9
9
|
|
10
10
|
import pandas as pd
|
11
|
-
from fameprotobuf.
|
11
|
+
from fameprotobuf.input_file_pb2 import InputData
|
12
12
|
from google.protobuf.internal.wire_format import INT64_MIN, INT64_MAX
|
13
13
|
|
14
|
-
from fameio.
|
15
|
-
from fameio.
|
16
|
-
from fameio.
|
17
|
-
from fameio.
|
14
|
+
from fameio.input.resolver import PathResolver
|
15
|
+
from fameio.logs import log_error_and_raise, log
|
16
|
+
from fameio.time import ConversionError, FameTime
|
17
|
+
from fameio.tools import clean_up_file_name
|
18
18
|
|
19
19
|
|
20
|
-
class
|
20
|
+
class TimeSeriesError(Exception):
|
21
21
|
"""Indicates that an error occurred during management of time series"""
|
22
22
|
|
23
23
|
pass
|
@@ -50,7 +50,7 @@ class TimeSeriesManager:
|
|
50
50
|
def __init__(self, path_resolver: PathResolver = PathResolver()) -> None:
|
51
51
|
self._path_resolver = path_resolver
|
52
52
|
self._id_count = -1
|
53
|
-
self._series_by_id:
|
53
|
+
self._series_by_id: dict[Union[str, int, float], dict[Entry, Any]] = {}
|
54
54
|
|
55
55
|
def register_and_validate(self, identifier: Union[str, int, float]) -> None:
|
56
56
|
"""
|
@@ -75,7 +75,7 @@ class TimeSeriesManager:
|
|
75
75
|
name, series = self._get_name_and_dataframe(identifier)
|
76
76
|
self._series_by_id[identifier] = {Entry.ID: self._id_count, Entry.NAME: name, Entry.DATA: series}
|
77
77
|
|
78
|
-
def _get_name_and_dataframe(self, identifier: Union[str, int, float]) ->
|
78
|
+
def _get_name_and_dataframe(self, identifier: Union[str, int, float]) -> tuple[str, pd.DataFrame]:
|
79
79
|
"""Returns name and DataFrame containing the series obtained from the given `identifier`"""
|
80
80
|
if isinstance(identifier, str):
|
81
81
|
series_path = self._path_resolver.resolve_series_file_path(Path(identifier).as_posix())
|
@@ -84,14 +84,14 @@ class TimeSeriesManager:
|
|
84
84
|
try:
|
85
85
|
return identifier, self._check_and_convert_series(data)
|
86
86
|
except TypeError as e:
|
87
|
-
log_error_and_raise(
|
88
|
-
except
|
89
|
-
log_error_and_raise(
|
87
|
+
log_error_and_raise(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_VALUE.format(identifier), e))
|
88
|
+
except ConversionError:
|
89
|
+
log_error_and_raise(TimeSeriesError(self._ERR_CORRUPT_TIME_SERIES_KEY.format(identifier)))
|
90
90
|
else:
|
91
91
|
message = self._ERR_FILE_NOT_FOUND.format(identifier)
|
92
92
|
if self._is_number_string(identifier):
|
93
93
|
message += self._ERR_NUMERIC_STRING
|
94
|
-
log_error_and_raise(
|
94
|
+
log_error_and_raise(TimeSeriesError(message))
|
95
95
|
else:
|
96
96
|
return self._create_timeseries_from_value(identifier)
|
97
97
|
|
@@ -106,7 +106,7 @@ class TimeSeriesManager:
|
|
106
106
|
axis=1,
|
107
107
|
result_type="expand",
|
108
108
|
)
|
109
|
-
return data
|
109
|
+
return data.astype({0: "int64"})
|
110
110
|
|
111
111
|
@staticmethod
|
112
112
|
def _assert_valid(value: Any) -> float:
|
@@ -129,10 +129,10 @@ class TimeSeriesManager:
|
|
129
129
|
return False
|
130
130
|
|
131
131
|
@staticmethod
|
132
|
-
def _create_timeseries_from_value(value: Union[int, float]) ->
|
132
|
+
def _create_timeseries_from_value(value: Union[int, float]) -> tuple[str, pd.DataFrame]:
|
133
133
|
"""Returns name and dataframe for a new static timeseries created from the given `value`"""
|
134
134
|
if math.isnan(value):
|
135
|
-
log_error_and_raise(
|
135
|
+
log_error_and_raise(TimeSeriesError(TimeSeriesManager._ERR_NAN_VALUE))
|
136
136
|
data = pd.DataFrame({0: [INT64_MIN, INT64_MAX], 1: [value, value]})
|
137
137
|
return TimeSeriesManager._CONSTANT_IDENTIFIER.format(value), data
|
138
138
|
|
@@ -150,29 +150,31 @@ class TimeSeriesManager:
|
|
150
150
|
TimeSeriesException: if identifier was not yet registered
|
151
151
|
"""
|
152
152
|
if not self._time_series_is_registered(identifier):
|
153
|
-
log_error_and_raise(
|
153
|
+
log_error_and_raise(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(identifier)))
|
154
154
|
return self._series_by_id.get(identifier)[Entry.ID]
|
155
155
|
|
156
|
-
def get_all_series(self) ->
|
156
|
+
def get_all_series(self) -> list[tuple[int, str, pd.DataFrame]]:
|
157
157
|
"""Returns iterator over id, name and dataframe of all stored series"""
|
158
158
|
if len(self._series_by_id) == 0:
|
159
159
|
log().warning(self._WARN_NO_DATA)
|
160
160
|
return [(v[Entry.ID], v[Entry.NAME], v[Entry.DATA]) for v in self._series_by_id.values()]
|
161
161
|
|
162
|
-
def reconstruct_time_series(self, timeseries:
|
162
|
+
def reconstruct_time_series(self, timeseries: list[InputData.TimeSeriesDao]) -> None:
|
163
163
|
"""Reconstructs and stores time series from given list of `timeseries_dao`"""
|
164
164
|
for one_series in timeseries:
|
165
165
|
self._id_count += 1
|
166
|
-
reconstructed = {Entry.ID: one_series.
|
167
|
-
if len(one_series.
|
168
|
-
|
166
|
+
reconstructed = {Entry.ID: one_series.series_id}
|
167
|
+
if len(one_series.values) == 1 or (
|
168
|
+
len(one_series.values) == 2 and one_series.values[0] == one_series.values[1]
|
169
|
+
):
|
170
|
+
reconstructed[Entry.NAME] = one_series.values[0]
|
169
171
|
reconstructed[Entry.DATA] = None
|
170
172
|
else:
|
171
|
-
reconstructed[Entry.NAME] = self._get_cleaned_file_name(one_series.
|
173
|
+
reconstructed[Entry.NAME] = self._get_cleaned_file_name(one_series.series_name)
|
172
174
|
reconstructed[Entry.DATA] = pd.DataFrame(
|
173
|
-
|
175
|
+
{self._KEY_ROW_TIME: list(one_series.time_steps), self._KEY_ROW_VALUE: list(one_series.values)}
|
174
176
|
)
|
175
|
-
self._series_by_id[one_series.
|
177
|
+
self._series_by_id[one_series.series_id] = reconstructed
|
176
178
|
|
177
179
|
def _get_cleaned_file_name(self, timeseries_name: str):
|
178
180
|
if timeseries_name.lower().endswith(".csv"):
|
@@ -185,5 +187,5 @@ class TimeSeriesManager:
|
|
185
187
|
"""Return name or path for given `series_id` if series these are identified by their number.
|
186
188
|
Use this only if series were added via `reconstruct_time_series`"""
|
187
189
|
if series_id < 0 or series_id > self._id_count:
|
188
|
-
log_error_and_raise(
|
190
|
+
log_error_and_raise(TimeSeriesError(self._ERR_UNREGISTERED_SERIES.format(series_id)))
|
189
191
|
return self._series_by_id[series_id][Entry.NAME]
|
@@ -8,7 +8,7 @@ import re
|
|
8
8
|
from enum import Enum, auto
|
9
9
|
from typing import Union
|
10
10
|
|
11
|
-
from fameio.
|
11
|
+
from fameio.logs import log_error_and_raise
|
12
12
|
|
13
13
|
START_IN_REAL_TIME = "2000-01-01_00:00:00"
|
14
14
|
DATE_FORMAT = "%Y-%m-%d_%H:%M:%S"
|
@@ -16,7 +16,7 @@ DATE_REGEX = re.compile("[0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}:[0-9]{2}:[0-9]{2}")
|
|
16
16
|
FAME_FIRST_DATETIME = dt.datetime.strptime(START_IN_REAL_TIME, DATE_FORMAT)
|
17
17
|
|
18
18
|
|
19
|
-
class
|
19
|
+
class ConversionError(Exception):
|
20
20
|
"""Indicates that something went wrong during time stamp conversion"""
|
21
21
|
|
22
22
|
pass
|
@@ -75,14 +75,14 @@ class FameTime:
|
|
75
75
|
def convert_datetime_to_fame_time_step(datetime_string: str) -> int:
|
76
76
|
"""Converts real Datetime string to FAME time step"""
|
77
77
|
if not FameTime.is_datetime(datetime_string):
|
78
|
-
log_error_and_raise(
|
78
|
+
log_error_and_raise(ConversionError(FameTime._FORMAT_INVALID.format(datetime_string)))
|
79
79
|
datetime = FameTime._convert_to_datetime(datetime_string)
|
80
80
|
years_since_start_time = datetime.year - FAME_FIRST_DATETIME.year
|
81
81
|
beginning_of_year = dt.datetime(year=datetime.year, month=1, day=1, hour=0, minute=0, second=0)
|
82
82
|
seconds_since_beginning_of_year = int((datetime - beginning_of_year).total_seconds())
|
83
83
|
steps_since_beginning_of_year = seconds_since_beginning_of_year * Constants.STEPS_PER_SECOND
|
84
84
|
if steps_since_beginning_of_year > Constants.STEPS_PER_YEAR:
|
85
|
-
log_error_and_raise(
|
85
|
+
log_error_and_raise(ConversionError(FameTime._INVALID_TOO_LARGE.format(datetime_string)))
|
86
86
|
year_offset = years_since_start_time * Constants.STEPS_PER_YEAR
|
87
87
|
return year_offset + steps_since_beginning_of_year
|
88
88
|
|
@@ -92,7 +92,7 @@ class FameTime:
|
|
92
92
|
try:
|
93
93
|
return dt.datetime.strptime(datetime_string, DATE_FORMAT)
|
94
94
|
except ValueError:
|
95
|
-
log_error_and_raise(
|
95
|
+
log_error_and_raise(ConversionError(FameTime._INVALID_TIMESTAMP.format(datetime_string)))
|
96
96
|
|
97
97
|
@staticmethod
|
98
98
|
def convert_fame_time_step_to_datetime(fame_time_steps: int, date_format: str = DATE_FORMAT) -> str:
|
@@ -109,7 +109,7 @@ class FameTime:
|
|
109
109
|
try:
|
110
110
|
return datetime.strftime(date_format)
|
111
111
|
except ValueError:
|
112
|
-
log_error_and_raise(
|
112
|
+
log_error_and_raise(ConversionError(FameTime._INVALID_DATE_FORMAT.format(date_format)))
|
113
113
|
|
114
114
|
@staticmethod
|
115
115
|
def convert_time_span_to_fame_time_steps(value: int, unit: TimeUnit) -> int:
|
@@ -118,7 +118,7 @@ class FameTime:
|
|
118
118
|
if steps:
|
119
119
|
return steps * value
|
120
120
|
else:
|
121
|
-
log_error_and_raise(
|
121
|
+
log_error_and_raise(ConversionError(FameTime._TIME_UNIT_UNKNOWN.format(unit)))
|
122
122
|
|
123
123
|
@staticmethod
|
124
124
|
def is_datetime(string: str) -> bool:
|
@@ -159,4 +159,4 @@ class FameTime:
|
|
159
159
|
try:
|
160
160
|
return int(value)
|
161
161
|
except ValueError:
|
162
|
-
log_error_and_raise(
|
162
|
+
log_error_and_raise(ConversionError(FameTime._NO_TIMESTAMP.format(value)))
|
@@ -2,10 +2,10 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
from pathlib import Path
|
5
|
-
from typing import Any,
|
5
|
+
from typing import Any, Union
|
6
6
|
|
7
7
|
|
8
|
-
def keys_to_lower(dictionary:
|
8
|
+
def keys_to_lower(dictionary: dict[str, Any]) -> dict[str, Any]:
|
9
9
|
"""Returns new dictionary content of given `dictionary` but its top-level `keys` in lower case"""
|
10
10
|
return {keys.lower(): value for keys, value in dictionary.items()}
|
11
11
|
|