aiphoria 0.0.1__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiphoria/__init__.py +59 -0
- aiphoria/core/__init__.py +55 -0
- aiphoria/core/builder.py +305 -0
- aiphoria/core/datachecker.py +1808 -0
- aiphoria/core/dataprovider.py +806 -0
- aiphoria/core/datastructures.py +1686 -0
- aiphoria/core/datavisualizer.py +431 -0
- aiphoria/core/datavisualizer_data/LICENSE +21 -0
- aiphoria/core/datavisualizer_data/datavisualizer_plotly.html +5561 -0
- aiphoria/core/datavisualizer_data/pako.min.js +2 -0
- aiphoria/core/datavisualizer_data/plotly-3.0.0.min.js +3879 -0
- aiphoria/core/flowmodifiersolver.py +1754 -0
- aiphoria/core/flowsolver.py +1472 -0
- aiphoria/core/logger.py +113 -0
- aiphoria/core/network_graph.py +136 -0
- aiphoria/core/network_graph_data/ECHARTS_LICENSE +202 -0
- aiphoria/core/network_graph_data/echarts_min.js +45 -0
- aiphoria/core/network_graph_data/network_graph.html +76 -0
- aiphoria/core/network_graph_data/network_graph.js +1391 -0
- aiphoria/core/parameters.py +269 -0
- aiphoria/core/types.py +20 -0
- aiphoria/core/utils.py +362 -0
- aiphoria/core/visualizer_parameters.py +7 -0
- aiphoria/data/example_scenario.xlsx +0 -0
- aiphoria/example.py +66 -0
- aiphoria/lib/docs/dynamic_stock.py +124 -0
- aiphoria/lib/odym/modules/ODYM_Classes.py +362 -0
- aiphoria/lib/odym/modules/ODYM_Functions.py +1299 -0
- aiphoria/lib/odym/modules/__init__.py +1 -0
- aiphoria/lib/odym/modules/dynamic_stock_model.py +808 -0
- aiphoria/lib/odym/modules/test/DSM_test_known_results.py +762 -0
- aiphoria/lib/odym/modules/test/ODYM_Classes_test_known_results.py +107 -0
- aiphoria/lib/odym/modules/test/ODYM_Functions_test_known_results.py +136 -0
- aiphoria/lib/odym/modules/test/__init__.py +2 -0
- aiphoria/runner.py +678 -0
- aiphoria-0.8.0.dist-info/METADATA +119 -0
- aiphoria-0.8.0.dist-info/RECORD +40 -0
- {aiphoria-0.0.1.dist-info → aiphoria-0.8.0.dist-info}/WHEEL +1 -1
- aiphoria-0.8.0.dist-info/licenses/LICENSE +21 -0
- aiphoria-0.0.1.dist-info/METADATA +0 -5
- aiphoria-0.0.1.dist-info/RECORD +0 -5
- {aiphoria-0.0.1.dist-info → aiphoria-0.8.0.dist-info}/top_level.txt +0 -0
aiphoria/__init__.py
CHANGED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
__version__ = "0.0.1"
|
|
2
|
+
|
|
3
|
+
# Import aiphoria core files, classes and functions
|
|
4
|
+
from . import core
|
|
5
|
+
from .core.builder import (
|
|
6
|
+
init_builder,
|
|
7
|
+
build_results,
|
|
8
|
+
build_dataprovider,
|
|
9
|
+
build_datachecker,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from .core.dataprovider import DataProvider
|
|
13
|
+
from .core.datachecker import DataChecker
|
|
14
|
+
from .core.datastructures import (
|
|
15
|
+
Scenario,
|
|
16
|
+
ScenarioData,
|
|
17
|
+
Process,
|
|
18
|
+
Flow,
|
|
19
|
+
Stock,
|
|
20
|
+
Indicator,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
from .core.parameters import (
|
|
24
|
+
ParameterName,
|
|
25
|
+
ParameterFillMethod,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
from .core.utils import (
|
|
29
|
+
create_output_directory,
|
|
30
|
+
setup_current_working_directory,
|
|
31
|
+
set_output_directory,
|
|
32
|
+
get_output_directory,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
from .core.logger import log
|
|
36
|
+
from .runner import run_scenarios
|
|
37
|
+
|
|
38
|
+
__all__ = [
|
|
39
|
+
"core",
|
|
40
|
+
"init_builder",
|
|
41
|
+
"build_results",
|
|
42
|
+
"build_dataprovider",
|
|
43
|
+
"build_datachecker",
|
|
44
|
+
"DataProvider",
|
|
45
|
+
"DataChecker",
|
|
46
|
+
"Scenario",
|
|
47
|
+
"Process",
|
|
48
|
+
"Flow",
|
|
49
|
+
"Stock",
|
|
50
|
+
"Indicator",
|
|
51
|
+
"ParameterName",
|
|
52
|
+
"ParameterFillMethod",
|
|
53
|
+
"create_output_directory",
|
|
54
|
+
"setup_current_working_directory",
|
|
55
|
+
"log",
|
|
56
|
+
"set_output_directory",
|
|
57
|
+
"get_output_directory",
|
|
58
|
+
"run_scenarios",
|
|
59
|
+
]
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""
|
|
2
|
+
aiphoria core module
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .builder import (
|
|
6
|
+
init_builder,
|
|
7
|
+
build_results,
|
|
8
|
+
build_dataprovider,
|
|
9
|
+
build_datachecker,
|
|
10
|
+
build_and_solve_scenarios,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from .dataprovider import DataProvider
|
|
14
|
+
from .datachecker import DataChecker
|
|
15
|
+
from .datastructures import (
|
|
16
|
+
Scenario,
|
|
17
|
+
ScenarioData,
|
|
18
|
+
Process,
|
|
19
|
+
Flow,
|
|
20
|
+
Stock,
|
|
21
|
+
Indicator,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from .flowsolver import FlowSolver
|
|
25
|
+
from .parameters import ParameterName, ParameterFillMethod, StockDistributionType
|
|
26
|
+
from .datavisualizer import DataVisualizer
|
|
27
|
+
from .network_graph import NetworkGraph
|
|
28
|
+
from .logger import log
|
|
29
|
+
from .utils import (
|
|
30
|
+
create_output_directory,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
__all__ = [
|
|
34
|
+
"init_builder",
|
|
35
|
+
"build_results",
|
|
36
|
+
"build_dataprovider",
|
|
37
|
+
"build_datachecker",
|
|
38
|
+
"build_and_solve_scenarios",
|
|
39
|
+
"DataProvider",
|
|
40
|
+
"DataChecker",
|
|
41
|
+
"Scenario",
|
|
42
|
+
"ScenarioData",
|
|
43
|
+
"Process",
|
|
44
|
+
"Flow",
|
|
45
|
+
"Stock",
|
|
46
|
+
"Indicator",
|
|
47
|
+
"FlowSolver",
|
|
48
|
+
"DataVisualizer",
|
|
49
|
+
"NetworkGraph",
|
|
50
|
+
"ParameterName",
|
|
51
|
+
"ParameterFillMethod",
|
|
52
|
+
"StockDistributionType",
|
|
53
|
+
"log",
|
|
54
|
+
"create_output_directory",
|
|
55
|
+
]
|
aiphoria/core/builder.py
ADDED
|
@@ -0,0 +1,305 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pickle
|
|
3
|
+
import shutil
|
|
4
|
+
from typing import Union, List
|
|
5
|
+
from . import logger
|
|
6
|
+
from .logger import log, start_log_perf, stop_log_perf, clear_log_perf, show_log_perf_summary
|
|
7
|
+
from .datachecker import DataChecker
|
|
8
|
+
from .dataprovider import DataProvider
|
|
9
|
+
from .datastructures import Scenario
|
|
10
|
+
from .flowsolver import FlowSolver
|
|
11
|
+
from .parameters import ParameterName
|
|
12
|
+
from .utils import show_exception_errors, show_model_parameters, build_mfa_system_for_scenario
|
|
13
|
+
|
|
14
|
+
# Globals
|
|
15
|
+
global_path_to_cache = ""
|
|
16
|
+
global_use_cache = False
|
|
17
|
+
global_use_timing = False
|
|
18
|
+
global_clear_cache = False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def init_builder(path_to_cache: str,
|
|
22
|
+
use_cache: bool = False,
|
|
23
|
+
use_timing: bool = False,
|
|
24
|
+
clear_cache: bool = False,
|
|
25
|
+
) -> None:
|
|
26
|
+
"""
|
|
27
|
+
Initialize Builder module.
|
|
28
|
+
If use_cache is True then create cache directory if directory does not exist.
|
|
29
|
+
|
|
30
|
+
:param path_to_cache: Absolute path to directory to contain cached objects
|
|
31
|
+
:param use_cache: True to use cached objects (default: False)
|
|
32
|
+
:param use_timing: True to show timing information (default: False)
|
|
33
|
+
:param clear_cache: True to clear delete cache directory and create new directory
|
|
34
|
+
:return: None
|
|
35
|
+
"""
|
|
36
|
+
# Update globals
|
|
37
|
+
globals().update(global_path_to_cache=path_to_cache)
|
|
38
|
+
globals().update(global_use_cache=use_cache)
|
|
39
|
+
globals().update(global_use_timing=use_timing)
|
|
40
|
+
globals().update(global_clear_cache=clear_cache)
|
|
41
|
+
|
|
42
|
+
if global_clear_cache:
|
|
43
|
+
shutil.rmtree(global_path_to_cache, ignore_errors=True)
|
|
44
|
+
|
|
45
|
+
if global_use_cache:
|
|
46
|
+
if not os.path.exists(global_path_to_cache):
|
|
47
|
+
os.makedirs(global_path_to_cache, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
logger.use_log_perf = global_use_timing
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def build_dataprovider(filename: str, use_cache: Union[bool, None] = None) -> DataProvider:
|
|
53
|
+
"""
|
|
54
|
+
Build DataProvider.
|
|
55
|
+
If use_cache is True then create DataProvider normally, write pickled object to file and read pickled
|
|
56
|
+
object from cache.
|
|
57
|
+
|
|
58
|
+
:param filename: Target settings filename
|
|
59
|
+
:param use_cache: True to use cached DataProvider object (default: False)
|
|
60
|
+
:return: DataProvider-object
|
|
61
|
+
"""
|
|
62
|
+
if use_cache is None:
|
|
63
|
+
use_cache = global_use_cache
|
|
64
|
+
|
|
65
|
+
dataprovider = None
|
|
66
|
+
if use_cache:
|
|
67
|
+
path_to_cached_dataprovider = os.path.join(global_path_to_cache, "dataprovider.pickle")
|
|
68
|
+
|
|
69
|
+
if not os.path.exists(path_to_cached_dataprovider):
|
|
70
|
+
try:
|
|
71
|
+
dataprovider = DataProvider(filename)
|
|
72
|
+
except Exception as ex:
|
|
73
|
+
show_exception_errors(ex, "Following errors occurred when loading settings file:")
|
|
74
|
+
print("Fatal error, stopping execution...")
|
|
75
|
+
raise ex
|
|
76
|
+
|
|
77
|
+
with open(path_to_cached_dataprovider, "wb") as fs:
|
|
78
|
+
pickle.dump(dataprovider, fs, pickle.HIGHEST_PROTOCOL)
|
|
79
|
+
|
|
80
|
+
with open(path_to_cached_dataprovider, "rb") as fs:
|
|
81
|
+
dataprovider = pickle.load(fs)
|
|
82
|
+
else:
|
|
83
|
+
try:
|
|
84
|
+
dataprovider = DataProvider(filename)
|
|
85
|
+
except Exception as ex:
|
|
86
|
+
show_exception_errors(ex, "Following errors occurred when loading settings file:")
|
|
87
|
+
print("Fatal error, stopping execution...")
|
|
88
|
+
raise ex
|
|
89
|
+
|
|
90
|
+
return dataprovider
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def build_datachecker(dataprovider: DataProvider = None, use_cache: Union[bool, None] = None) -> DataChecker:
|
|
94
|
+
"""
|
|
95
|
+
Build DataChecker.
|
|
96
|
+
|
|
97
|
+
:param dataprovider: DataProvider
|
|
98
|
+
:param use_cache: True to use cached DataChecker from file (default: False)
|
|
99
|
+
:return: DataChecker
|
|
100
|
+
"""
|
|
101
|
+
if use_cache is None:
|
|
102
|
+
use_cache = global_use_cache
|
|
103
|
+
|
|
104
|
+
datachecker = None
|
|
105
|
+
if use_cache:
|
|
106
|
+
path_to_cached_datachecker = os.path.join(global_path_to_cache, "datachecker.pickle")
|
|
107
|
+
if not os.path.exists(path_to_cached_datachecker):
|
|
108
|
+
datachecker = DataChecker(dataprovider)
|
|
109
|
+
|
|
110
|
+
with open(path_to_cached_datachecker, "wb") as fs:
|
|
111
|
+
pickle.dump(datachecker, fs, pickle.HIGHEST_PROTOCOL)
|
|
112
|
+
|
|
113
|
+
with open(path_to_cached_datachecker, "rb") as fs:
|
|
114
|
+
datachecker = pickle.load(fs)
|
|
115
|
+
else:
|
|
116
|
+
datachecker = DataChecker(dataprovider)
|
|
117
|
+
|
|
118
|
+
return datachecker
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def build_and_solve_scenarios(datachecker: DataChecker = None, use_cache: Union[bool, None] = None) -> List[Scenario]:
|
|
122
|
+
"""
|
|
123
|
+
Build and check errors in scenario data and solve scenarios.
|
|
124
|
+
|
|
125
|
+
:param datachecker: DataChecker-object
|
|
126
|
+
:param use_cache: True to use cached Scenario-objects (default: False)
|
|
127
|
+
:return: List of solved Scenario-objects
|
|
128
|
+
"""
|
|
129
|
+
if use_cache is None:
|
|
130
|
+
use_cache = global_use_cache
|
|
131
|
+
|
|
132
|
+
scenarios = []
|
|
133
|
+
if use_cache:
|
|
134
|
+
# Check for cached scenario files
|
|
135
|
+
filenames_to_load = []
|
|
136
|
+
scenario_prefix = "scenario"
|
|
137
|
+
files_in_cache = os.listdir(global_path_to_cache)
|
|
138
|
+
for file in files_in_cache:
|
|
139
|
+
abs_path = os.path.join(global_path_to_cache, file)
|
|
140
|
+
has_prefix = file.startswith(scenario_prefix)
|
|
141
|
+
is_file = os.path.isfile(abs_path)
|
|
142
|
+
if has_prefix and is_file:
|
|
143
|
+
filenames_to_load.append(abs_path)
|
|
144
|
+
|
|
145
|
+
if not filenames_to_load:
|
|
146
|
+
# No existing scenario data, build scenarios to pickle to files
|
|
147
|
+
try:
|
|
148
|
+
scenarios = datachecker.build_scenarios()
|
|
149
|
+
except Exception as ex:
|
|
150
|
+
show_exception_errors(ex, "Following errors occurred when building scenarios:")
|
|
151
|
+
print("Fatal error, stopping execution...")
|
|
152
|
+
raise ex
|
|
153
|
+
|
|
154
|
+
# Check for build scenario errors
|
|
155
|
+
try:
|
|
156
|
+
datachecker.check_for_errors()
|
|
157
|
+
except Exception as ex:
|
|
158
|
+
show_exception_errors(ex, "Following errors found when checking scenario errors:")
|
|
159
|
+
print("Fatal error, stopping execution...")
|
|
160
|
+
raise ex
|
|
161
|
+
|
|
162
|
+
# Solve scenarios
|
|
163
|
+
for scenario_index, scenario in enumerate(scenarios):
|
|
164
|
+
# NOTE: Baseline scenario is always the first element in the list
|
|
165
|
+
# and all the alternative scenarios (if any) are after that
|
|
166
|
+
if scenario_index == 0:
|
|
167
|
+
# Process baseline scenario
|
|
168
|
+
baseline_flow_solver = FlowSolver(scenario=scenario)
|
|
169
|
+
baseline_flow_solver.solve_timesteps()
|
|
170
|
+
scenario.flow_solver = baseline_flow_solver
|
|
171
|
+
else:
|
|
172
|
+
# Get and copy solved scenario data from baseline scenario flow solver
|
|
173
|
+
baseline_scenario_data = scenarios[0].flow_solver.get_solved_scenario_data()
|
|
174
|
+
scenario.copy_from_baseline_scenario_data(baseline_scenario_data)
|
|
175
|
+
|
|
176
|
+
# Solve this alternative scenario time steps
|
|
177
|
+
scenario_flow_solver = FlowSolver(scenario=scenario)
|
|
178
|
+
scenario_flow_solver.solve_timesteps()
|
|
179
|
+
scenario.flow_solver = scenario_flow_solver
|
|
180
|
+
|
|
181
|
+
# Build MFA systems for the scenarios
|
|
182
|
+
for scenario in scenarios:
|
|
183
|
+
scenario.mfa_system = build_mfa_system_for_scenario(scenario)
|
|
184
|
+
|
|
185
|
+
for scenario_index, scenario in enumerate(scenarios):
|
|
186
|
+
filename = os.path.join(global_path_to_cache, "{}_{}.pickle".format(scenario_prefix, scenario_index))
|
|
187
|
+
with open(filename, "wb") as fs:
|
|
188
|
+
pickle.dump(scenario, fs, pickle.HIGHEST_PROTOCOL)
|
|
189
|
+
|
|
190
|
+
# Rescan cache for scenario files
|
|
191
|
+
filenames_to_load = []
|
|
192
|
+
files_in_cache = os.listdir(global_path_to_cache)
|
|
193
|
+
for file in files_in_cache:
|
|
194
|
+
if file.startswith(scenario_prefix) and os.path.isfile(file):
|
|
195
|
+
filenames_to_load.append(os.path.abspath(file))
|
|
196
|
+
filenames_to_load.sort()
|
|
197
|
+
|
|
198
|
+
# Load all scenario files
|
|
199
|
+
for filename in filenames_to_load:
|
|
200
|
+
with open(filename, "rb") as fs:
|
|
201
|
+
scenario = pickle.load(fs)
|
|
202
|
+
scenarios.append(scenario)
|
|
203
|
+
|
|
204
|
+
else:
|
|
205
|
+
try:
|
|
206
|
+
scenarios = datachecker.build_scenarios()
|
|
207
|
+
except Exception as ex:
|
|
208
|
+
show_exception_errors(ex, "Following errors occurred when building scenarios:")
|
|
209
|
+
print("Fatal error, stopping execution...")
|
|
210
|
+
raise ex
|
|
211
|
+
|
|
212
|
+
# Check for build scenario errors
|
|
213
|
+
try:
|
|
214
|
+
datachecker.check_for_errors()
|
|
215
|
+
except Exception as ex:
|
|
216
|
+
show_exception_errors(ex, "Following errors found when checking scenario errors:")
|
|
217
|
+
print("Fatal error, stopping execution...")
|
|
218
|
+
raise ex
|
|
219
|
+
|
|
220
|
+
# Solve scenarios
|
|
221
|
+
for scenario_index, scenario in enumerate(scenarios):
|
|
222
|
+
# NOTE: Baseline scenario is always the first element in the list
|
|
223
|
+
# and all the alternative scenarios (if any) are after that
|
|
224
|
+
if scenario_index == 0:
|
|
225
|
+
# Process baseline scenario
|
|
226
|
+
baseline_flow_solver = FlowSolver(scenario=scenario)
|
|
227
|
+
baseline_flow_solver.solve_timesteps()
|
|
228
|
+
scenario.flow_solver = baseline_flow_solver
|
|
229
|
+
else:
|
|
230
|
+
# Get and copy solved scenario data from baseline scenario flow solver
|
|
231
|
+
baseline_scenario_data = scenarios[0].flow_solver.get_solved_scenario_data()
|
|
232
|
+
scenario.copy_from_baseline_scenario_data(baseline_scenario_data)
|
|
233
|
+
|
|
234
|
+
# Solve this alternative scenario time steps
|
|
235
|
+
scenario_flow_solver = FlowSolver(scenario=scenario, reset_evaluated_values=False)
|
|
236
|
+
scenario_flow_solver.solve_timesteps()
|
|
237
|
+
scenario.flow_solver = scenario_flow_solver
|
|
238
|
+
|
|
239
|
+
# Build MFA systems for the scenarios
|
|
240
|
+
for scenario in scenarios:
|
|
241
|
+
scenario.mfa_system = build_mfa_system_for_scenario(scenario)
|
|
242
|
+
|
|
243
|
+
return scenarios
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def build_results(filename: str = None, path_to_output_dir: Union[str, None] = None):
|
|
247
|
+
"""
|
|
248
|
+
Build and solve scenarios using the settings file.
|
|
249
|
+
|
|
250
|
+
:param filename: Path to Excel settings file
|
|
251
|
+
:param path_to_output_dir: If None then uses the path from settings file
|
|
252
|
+
:return: Tuple (model parameters (dictionary), list of scenarios (List[Scenario), color definitions (dictionary))
|
|
253
|
+
"""
|
|
254
|
+
# Build DataProvider
|
|
255
|
+
log("Loading data from file '{}'...".format(filename), level="info")
|
|
256
|
+
|
|
257
|
+
start_log_perf("Loaded DataProvider" if not global_use_cache else "Loaded DataProvider (cached)")
|
|
258
|
+
log("Build DataProvider...")
|
|
259
|
+
dataprovider: DataProvider = build_dataprovider(filename)
|
|
260
|
+
stop_log_perf()
|
|
261
|
+
|
|
262
|
+
# Model parameters is a Dictionary that contains loaded data from Excel sheet named "Settings"
|
|
263
|
+
# and are used for running the FlowSolver and setting up ODYM
|
|
264
|
+
model_params = dataprovider.get_model_params()
|
|
265
|
+
|
|
266
|
+
# If parameter "path_to_output_dir" is set then replace the output from
|
|
267
|
+
# defined in scenario file with the "path_to_output_dir"
|
|
268
|
+
use_path_to_output_from_settings = True
|
|
269
|
+
if path_to_output_dir:
|
|
270
|
+
use_path_to_output_from_settings = False
|
|
271
|
+
model_params[ParameterName.OutputPath] = path_to_output_dir
|
|
272
|
+
|
|
273
|
+
show_model_parameters(model_params)
|
|
274
|
+
|
|
275
|
+
# Setup output path
|
|
276
|
+
# Convert output directory name to absolute path and update model parameter dictionary
|
|
277
|
+
if use_path_to_output_from_settings:
|
|
278
|
+
model_params[ParameterName.OutputPath] = os.path.abspath(
|
|
279
|
+
os.path.join(os.getcwd(), model_params[ParameterName.OutputPath]))
|
|
280
|
+
|
|
281
|
+
# **************************************************************
|
|
282
|
+
# * Step 2: Check data integrity and build data for FlowSolver *
|
|
283
|
+
# **************************************************************
|
|
284
|
+
log("Build and check errors in data...")
|
|
285
|
+
start_log_perf("Loaded DataChecker" if not global_use_cache else "Loaded DataChecker (cached)")
|
|
286
|
+
datachecker: DataChecker = build_datachecker(dataprovider)
|
|
287
|
+
stop_log_perf()
|
|
288
|
+
|
|
289
|
+
# **************************************************
|
|
290
|
+
# * Step 3: Build and solve flows in all scenarios *
|
|
291
|
+
# **************************************************
|
|
292
|
+
log("Solve scenarios...")
|
|
293
|
+
start_log_perf("Solve scenarios" if not global_use_cache else "Loaded scenarios (cached)")
|
|
294
|
+
scenarios: List[Scenario] = build_and_solve_scenarios(datachecker)
|
|
295
|
+
stop_log_perf()
|
|
296
|
+
|
|
297
|
+
# Transformation stage color definitions
|
|
298
|
+
color_definitions = {color.name: color.value for color in dataprovider.get_color_definitions()}
|
|
299
|
+
|
|
300
|
+
# Show perf logs if toggled on
|
|
301
|
+
show_log_perf_summary("build_scenarios()")
|
|
302
|
+
clear_log_perf()
|
|
303
|
+
|
|
304
|
+
return model_params, scenarios, color_definitions
|
|
305
|
+
|