aiphoria 0.0.1__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aiphoria/__init__.py +59 -0
- aiphoria/core/__init__.py +55 -0
- aiphoria/core/builder.py +305 -0
- aiphoria/core/datachecker.py +1808 -0
- aiphoria/core/dataprovider.py +806 -0
- aiphoria/core/datastructures.py +1686 -0
- aiphoria/core/datavisualizer.py +431 -0
- aiphoria/core/datavisualizer_data/LICENSE +21 -0
- aiphoria/core/datavisualizer_data/datavisualizer_plotly.html +5561 -0
- aiphoria/core/datavisualizer_data/pako.min.js +2 -0
- aiphoria/core/datavisualizer_data/plotly-3.0.0.min.js +3879 -0
- aiphoria/core/flowmodifiersolver.py +1754 -0
- aiphoria/core/flowsolver.py +1472 -0
- aiphoria/core/logger.py +113 -0
- aiphoria/core/network_graph.py +136 -0
- aiphoria/core/network_graph_data/ECHARTS_LICENSE +202 -0
- aiphoria/core/network_graph_data/echarts_min.js +45 -0
- aiphoria/core/network_graph_data/network_graph.html +76 -0
- aiphoria/core/network_graph_data/network_graph.js +1391 -0
- aiphoria/core/parameters.py +269 -0
- aiphoria/core/types.py +20 -0
- aiphoria/core/utils.py +362 -0
- aiphoria/core/visualizer_parameters.py +7 -0
- aiphoria/data/example_scenario.xlsx +0 -0
- aiphoria/example.py +66 -0
- aiphoria/lib/docs/dynamic_stock.py +124 -0
- aiphoria/lib/odym/modules/ODYM_Classes.py +362 -0
- aiphoria/lib/odym/modules/ODYM_Functions.py +1299 -0
- aiphoria/lib/odym/modules/__init__.py +1 -0
- aiphoria/lib/odym/modules/dynamic_stock_model.py +808 -0
- aiphoria/lib/odym/modules/test/DSM_test_known_results.py +762 -0
- aiphoria/lib/odym/modules/test/ODYM_Classes_test_known_results.py +107 -0
- aiphoria/lib/odym/modules/test/ODYM_Functions_test_known_results.py +136 -0
- aiphoria/lib/odym/modules/test/__init__.py +2 -0
- aiphoria/runner.py +678 -0
- aiphoria-0.8.0.dist-info/METADATA +119 -0
- aiphoria-0.8.0.dist-info/RECORD +40 -0
- {aiphoria-0.0.1.dist-info → aiphoria-0.8.0.dist-info}/WHEEL +1 -1
- aiphoria-0.8.0.dist-info/licenses/LICENSE +21 -0
- aiphoria-0.0.1.dist-info/METADATA +0 -5
- aiphoria-0.0.1.dist-info/RECORD +0 -5
- {aiphoria-0.0.1.dist-info → aiphoria-0.8.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1472 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import sys
|
|
3
|
+
from typing import List, Dict, Tuple, Union
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import tqdm as tqdm
|
|
7
|
+
from pandas import DataFrame
|
|
8
|
+
|
|
9
|
+
from .types import FunctionType
|
|
10
|
+
from .datastructures import Process, Flow, Stock, ScenarioData, Scenario, Indicator
|
|
11
|
+
# from .flowmodifiersolver import FlowModifierSolver
|
|
12
|
+
from .parameters import ParameterName, StockDistributionType, StockDistributionParameter, ParameterScenarioType
|
|
13
|
+
from aiphoria.lib.odym.modules.dynamic_stock_model import DynamicStockModel
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# Solves flows to absolute values
|
|
17
|
+
class FlowSolver(object):
|
|
18
|
+
"""
|
|
19
|
+
Solves flows to absolute values, evaluates Process in- and outflow values,
|
|
20
|
+
and handles dynamic stocks.
|
|
21
|
+
"""
|
|
22
|
+
_virtual_process_id_prefix = "VP_"
|
|
23
|
+
_virtual_flow_id_prefix = "VF_"
|
|
24
|
+
_max_iterations = 100000
|
|
25
|
+
_virtual_process_transformation_stage = "Virtual"
|
|
26
|
+
|
|
27
|
+
def __init__(self, scenario: Scenario = None, reset_evaluated_values: bool = True):
|
|
28
|
+
self._reset_evaluated_values = reset_evaluated_values
|
|
29
|
+
self._scenario = scenario
|
|
30
|
+
|
|
31
|
+
# Prioritized transformation stages
|
|
32
|
+
self._model_params = self._scenario.model_params
|
|
33
|
+
self._prioritized_locations = self._model_params[ParameterName.PrioritizeLocations]
|
|
34
|
+
self._prioritized_transformation_stages = self._model_params[ParameterName.PrioritizeTransformationStages]
|
|
35
|
+
|
|
36
|
+
# Time
|
|
37
|
+
self._year_start = self._scenario.scenario_data.start_year
|
|
38
|
+
self._year_end = self._scenario.scenario_data.end_year
|
|
39
|
+
self._years = self._scenario.scenario_data.years
|
|
40
|
+
self._year_current = self._year_start
|
|
41
|
+
self._year_prev = self._year_current
|
|
42
|
+
|
|
43
|
+
# Year to Process/Flow/Flow IDs mappings
|
|
44
|
+
self._year_to_process_id_to_process = scenario.scenario_data.year_to_process_id_to_process
|
|
45
|
+
self._year_to_process_id_to_flow_ids = scenario.scenario_data.year_to_process_id_to_flow_ids
|
|
46
|
+
self._year_to_flow_id_to_flow = scenario.scenario_data.year_to_flow_id_to_flow
|
|
47
|
+
|
|
48
|
+
# Stocks
|
|
49
|
+
self._all_stocks = self._scenario.scenario_data.stocks
|
|
50
|
+
self._process_id_to_stock = self._scenario.scenario_data.process_id_to_stock
|
|
51
|
+
|
|
52
|
+
# Unique Process IDs and Flow IDs to Process/Flow
|
|
53
|
+
self._unique_process_id_to_process = self._scenario.scenario_data.unique_process_id_to_process
|
|
54
|
+
self._unique_flow_id_to_flow = self._scenario.scenario_data.unique_flow_id_to_flow
|
|
55
|
+
|
|
56
|
+
# Virtual flows
|
|
57
|
+
self._use_virtual_flows = self._scenario.scenario_data.use_virtual_flows
|
|
58
|
+
self._virtual_flows_epsilon = self._scenario.scenario_data.virtual_flows_epsilon
|
|
59
|
+
|
|
60
|
+
# Current timestep data
|
|
61
|
+
self._current_process_id_to_process = self._year_to_process_id_to_process[self._year_current]
|
|
62
|
+
self._current_process_id_to_flow_ids = self._year_to_process_id_to_flow_ids[self._year_current]
|
|
63
|
+
self._current_flow_id_to_flow = self._year_to_flow_id_to_flow[self._year_current]
|
|
64
|
+
|
|
65
|
+
# Prepare flows for all timesteps
|
|
66
|
+
for year, flow_id_to_flow in self._year_to_flow_id_to_flow.items():
|
|
67
|
+
self._prepare_flows_for_timestep(flow_id_to_flow, year)
|
|
68
|
+
|
|
69
|
+
# Get and store indicator names from scenario.scenario_data
|
|
70
|
+
self._indicator_name_to_indicator = scenario.scenario_data.indicator_name_to_indicator
|
|
71
|
+
self._indicators = {name: indicator for name, indicator in self._indicator_name_to_indicator.items()}
|
|
72
|
+
|
|
73
|
+
# Baseline indicator name (e.g. Solid wood equivalent) and unit name (e.g. 'Mm3')
|
|
74
|
+
self._baseline_value_name = self._scenario.scenario_data.baseline_value_name
|
|
75
|
+
self._baseline_unit_name = self._scenario.scenario_data.baseline_unit_name
|
|
76
|
+
|
|
77
|
+
# Stock ID -> Baseline value DSM
|
|
78
|
+
self._stock_id_to_baseline_dsm = {}
|
|
79
|
+
|
|
80
|
+
# Stock ID -> Indicator name -> DSM
|
|
81
|
+
self._stock_id_to_indicator_name_to_dsm = {}
|
|
82
|
+
|
|
83
|
+
def get_scenario(self) -> Scenario:
|
|
84
|
+
"""
|
|
85
|
+
Get Scenario that FlowSolver is using.
|
|
86
|
+
|
|
87
|
+
:return: Scenario-object
|
|
88
|
+
"""
|
|
89
|
+
return self._scenario
|
|
90
|
+
|
|
91
|
+
def get_all_stocks(self) -> List[Stock]:
|
|
92
|
+
"""
|
|
93
|
+
Get list of all Stocks.
|
|
94
|
+
|
|
95
|
+
:return: List of all Stocks
|
|
96
|
+
"""
|
|
97
|
+
return self._all_stocks
|
|
98
|
+
|
|
99
|
+
def get_unique_processes(self) -> Dict[str, Process]:
|
|
100
|
+
"""
|
|
101
|
+
Get dictionary of all unique Process ID to Process.
|
|
102
|
+
|
|
103
|
+
:return: Dictionary (Process ID (str) -> Process)
|
|
104
|
+
"""
|
|
105
|
+
return self._unique_process_id_to_process
|
|
106
|
+
|
|
107
|
+
def get_unique_flows(self) -> Dict[str, Flow]:
|
|
108
|
+
"""
|
|
109
|
+
Get dictionary of all unique Flow ID to Flow.
|
|
110
|
+
|
|
111
|
+
:return: Dictionary (Flow ID (str) -> Flow)
|
|
112
|
+
"""
|
|
113
|
+
return self._unique_flow_id_to_flow
|
|
114
|
+
|
|
115
|
+
# Utility methods
|
|
116
|
+
def get_processes_as_dataframe(self) -> DataFrame:
|
|
117
|
+
"""
|
|
118
|
+
Get Process information as DataFrame for every year:
|
|
119
|
+
- Process ID
|
|
120
|
+
- Total inflows (baseline)
|
|
121
|
+
- Total outflows (baseline)
|
|
122
|
+
- Total inflows (indicator N)
|
|
123
|
+
- Total outflows (indicator N)
|
|
124
|
+
- Total inflows (indicator N+1)
|
|
125
|
+
- Total outflows (indicator N+1)
|
|
126
|
+
- ...
|
|
127
|
+
|
|
128
|
+
:return: DataFrame
|
|
129
|
+
"""
|
|
130
|
+
col_names = ["Year", "Process ID"]
|
|
131
|
+
col_names += ["Total inflows, {} ({})".format(self._baseline_value_name, self._baseline_unit_name)]
|
|
132
|
+
col_names += ["Total outflows, {} ({})".format(self._baseline_value_name, self._baseline_unit_name)]
|
|
133
|
+
for indicator in self.get_indicator_name_to_indicator().values():
|
|
134
|
+
col_names += ["Total inflows, {} ({})".format(indicator.name, indicator.unit)]
|
|
135
|
+
col_names += ["Total outflows, {} ({})".format(indicator.name, indicator.unit)]
|
|
136
|
+
|
|
137
|
+
rows = []
|
|
138
|
+
for year, process_id_to_process in self._year_to_process_id_to_process.items():
|
|
139
|
+
for process_id, process in process_id_to_process.items():
|
|
140
|
+
new_row = [year, process_id]
|
|
141
|
+
new_row += [self.get_process_inflows_total(process_id, year)]
|
|
142
|
+
new_row += [self.get_process_outflows_total(process_id, year)]
|
|
143
|
+
for indicator in self.get_indicator_name_to_indicator().values():
|
|
144
|
+
new_row += [self._get_process_indicator_inflows_total(process_id, indicator.name, year)]
|
|
145
|
+
new_row += [self._get_process_indicator_outflows_total(process_id, indicator.name, year)]
|
|
146
|
+
rows.append(new_row)
|
|
147
|
+
|
|
148
|
+
df = pd.DataFrame(rows, columns=col_names)
|
|
149
|
+
return df
|
|
150
|
+
|
|
151
|
+
def get_flows_as_dataframe(self) -> DataFrame:
|
|
152
|
+
"""
|
|
153
|
+
Get all Flow information for all years in DataFrame:
|
|
154
|
+
- Flow ID
|
|
155
|
+
- Source Process ID
|
|
156
|
+
- Target Process ID
|
|
157
|
+
- Baseline value (baseline unit)
|
|
158
|
+
- Indicator N value (indicator N unit)
|
|
159
|
+
- Indicator N+1 value (indicator N+1 unit)
|
|
160
|
+
- ...
|
|
161
|
+
|
|
162
|
+
:return: DataFrame
|
|
163
|
+
"""
|
|
164
|
+
col_names = ["Year", "Flow ID", "Source Process ID", "Target Process ID", "Flow share"]
|
|
165
|
+
col_names += ["{} ({})".format(self._baseline_value_name, self._baseline_unit_name)]
|
|
166
|
+
col_names += ["{} ({})".format(ind.name, ind.unit) for ind in self.get_indicator_name_to_indicator().values()]
|
|
167
|
+
|
|
168
|
+
rows = []
|
|
169
|
+
for year, flow_id_to_flow in self._year_to_flow_id_to_flow.items():
|
|
170
|
+
for flow_id, flow in flow_id_to_flow.items():
|
|
171
|
+
if not isinstance(flow, Flow):
|
|
172
|
+
continue
|
|
173
|
+
|
|
174
|
+
new_row = [year, flow_id, flow.source_process_id, flow.target_process_id, flow.evaluated_share]
|
|
175
|
+
new_row += [evaluated_value for evaluated_value in flow.get_all_evaluated_values()]
|
|
176
|
+
rows.append(new_row)
|
|
177
|
+
|
|
178
|
+
df = pd.DataFrame(rows, columns=col_names)
|
|
179
|
+
return df
|
|
180
|
+
|
|
181
|
+
def get_evaluated_flow_values_as_dataframe(self) -> DataFrame:
|
|
182
|
+
"""
|
|
183
|
+
Get baseline evaluated value for Flows for all years.
|
|
184
|
+
|
|
185
|
+
:return: DataFrame
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
unique_flows = self.get_unique_flows()
|
|
189
|
+
sorted_flow_ids = sorted([flow.id for flow in unique_flows.values()], key=lambda x: x)
|
|
190
|
+
columns = ["Year"]
|
|
191
|
+
columns += [flow_id for flow_id in sorted_flow_ids]
|
|
192
|
+
|
|
193
|
+
df = pd.DataFrame(columns=columns)
|
|
194
|
+
df["Year"] = [year for year in self._years]
|
|
195
|
+
df.set_index(["Year"], inplace=True)
|
|
196
|
+
for year in df.index:
|
|
197
|
+
for flow_id in df.columns:
|
|
198
|
+
flow_value = 0.0
|
|
199
|
+
if self.has_flow(flow_id, year):
|
|
200
|
+
flow = self.get_flow(flow_id, year)
|
|
201
|
+
if not isinstance(flow, Flow):
|
|
202
|
+
pass
|
|
203
|
+
else:
|
|
204
|
+
flow_value = flow.evaluated_value
|
|
205
|
+
df.at[year, flow_id] = flow_value
|
|
206
|
+
df.reset_index(inplace=True)
|
|
207
|
+
return df
|
|
208
|
+
|
|
209
|
+
def get_process(self, process_id: str, year: int = -1) -> Process:
|
|
210
|
+
"""
|
|
211
|
+
Get Process by ID and target year.
|
|
212
|
+
|
|
213
|
+
:param process_id: Process ID (str)
|
|
214
|
+
:param year: Target year (int)
|
|
215
|
+
:return: Process (Process)
|
|
216
|
+
"""
|
|
217
|
+
if year >= 0:
|
|
218
|
+
return self._year_to_process_id_to_process[year][process_id]
|
|
219
|
+
|
|
220
|
+
return self._current_process_id_to_process[process_id]
|
|
221
|
+
|
|
222
|
+
def get_flow(self, flow_id: str, year: int = -1) -> Flow:
|
|
223
|
+
"""
|
|
224
|
+
Get Flow by ID and target year.
|
|
225
|
+
|
|
226
|
+
:param flow_id: Flow ID (str)
|
|
227
|
+
:param year: Target year (int)
|
|
228
|
+
:return: Flow (Flow)
|
|
229
|
+
"""
|
|
230
|
+
if year >= 0:
|
|
231
|
+
return self._year_to_flow_id_to_flow[year][flow_id]
|
|
232
|
+
|
|
233
|
+
return self._current_flow_id_to_flow[flow_id]
|
|
234
|
+
|
|
235
|
+
def get_stock(self, process_id: str) -> Stock:
|
|
236
|
+
"""
|
|
237
|
+
Get stock by ID.
|
|
238
|
+
NOTE: Process and stocks share the same ID
|
|
239
|
+
|
|
240
|
+
:param process_id: Target Stock ID / Process ID
|
|
241
|
+
:return: Stock (Stock)
|
|
242
|
+
"""
|
|
243
|
+
return self._process_id_to_stock[process_id]
|
|
244
|
+
|
|
245
|
+
def get_baseline_dynamic_stocks(self) -> Dict[str, DynamicStockModel]:
|
|
246
|
+
"""
|
|
247
|
+
Get dictionary of Stock ID -> baseline DynamicStockModel.
|
|
248
|
+
|
|
249
|
+
:return: Dictionary (Stock ID -> baseline DynamicStockModel)
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
return self._stock_id_to_baseline_dsm
|
|
253
|
+
|
|
254
|
+
def get_indicator_dynamic_stocks(self) -> Dict[str, Dict[str, DynamicStockModel]]:
|
|
255
|
+
"""
|
|
256
|
+
Get dictionary of stock ID -> indicator name -> DynamicStockModel
|
|
257
|
+
|
|
258
|
+
:return: Dictionary (stock ID (str) -> indicator name -> DynamicStockModel)
|
|
259
|
+
"""
|
|
260
|
+
return self._stock_id_to_indicator_name_to_dsm
|
|
261
|
+
|
|
262
|
+
def get_indicator_names(self) -> List[str]:
|
|
263
|
+
"""
|
|
264
|
+
Get indicator names.
|
|
265
|
+
|
|
266
|
+
:return: Indicator names (list of strings)
|
|
267
|
+
"""
|
|
268
|
+
return list(self._indicator_name_to_indicator.keys())
|
|
269
|
+
|
|
270
|
+
def get_indicator_name_to_indicator(self) -> Dict[str, Indicator]:
|
|
271
|
+
"""
|
|
272
|
+
Get Indicator ID to Indicator mappings.
|
|
273
|
+
|
|
274
|
+
:return: Dictionary (Indicator ID (str) -> Indicator (Indicator))
|
|
275
|
+
"""
|
|
276
|
+
return self._indicator_name_to_indicator
|
|
277
|
+
|
|
278
|
+
def get_process_inflows_total(self, process_id: str, year: int = -1) -> float:
|
|
279
|
+
"""
|
|
280
|
+
Get total inflows (baseline) for Process ID.
|
|
281
|
+
|
|
282
|
+
:param process_id: Target Process ID (str)
|
|
283
|
+
:param year: Target year (int)
|
|
284
|
+
:return: Sum of all inflows' evaluated value (baseline)
|
|
285
|
+
"""
|
|
286
|
+
total = 0.0
|
|
287
|
+
inflows = self._get_process_inflows(process_id, year)
|
|
288
|
+
for flow in inflows:
|
|
289
|
+
total += flow.evaluated_value
|
|
290
|
+
return total
|
|
291
|
+
|
|
292
|
+
def get_process_outflows_total(self, process_id: str, year: int = -1) -> float:
|
|
293
|
+
"""
|
|
294
|
+
Get total outflows (baseline) for Process ID.
|
|
295
|
+
|
|
296
|
+
:param process_id: Target Process ID (str)
|
|
297
|
+
:param year: Target year (int)
|
|
298
|
+
:return: Sum of all outflows' evaluated value (baseline)
|
|
299
|
+
"""
|
|
300
|
+
total = 0.0
|
|
301
|
+
outflows = self._get_process_outflows(process_id, year)
|
|
302
|
+
for flow in outflows:
|
|
303
|
+
total += flow.evaluated_value
|
|
304
|
+
return total
|
|
305
|
+
|
|
306
|
+
def get_process_outflows_total_abs(self, process_id: str, year: int = -1) -> float:
|
|
307
|
+
"""
|
|
308
|
+
Get total absolute outflows evaluated value (baseline) for Process ID.
|
|
309
|
+
Includes only absolute outflows.
|
|
310
|
+
|
|
311
|
+
:param process_id: Target Process ID (str)
|
|
312
|
+
:param year: Target year (int)
|
|
313
|
+
:return: Sum of all absolute outflows' evaluated value (baseline)
|
|
314
|
+
"""
|
|
315
|
+
total = 0.0
|
|
316
|
+
outflows = self._get_process_outflows_abs(process_id, year)
|
|
317
|
+
for flow in outflows:
|
|
318
|
+
total += flow.evaluated_value
|
|
319
|
+
return total
|
|
320
|
+
|
|
321
|
+
def get_process_outflows_total_rel(self, process_id: str, year: int = -1) -> float:
|
|
322
|
+
"""
|
|
323
|
+
Get total relative outflows evaluated value (baseline) for Process ID.
|
|
324
|
+
Includes only relative outflows.
|
|
325
|
+
|
|
326
|
+
:param process_id: Target Process ID (str)
|
|
327
|
+
:param year: Target year (int)
|
|
328
|
+
:return: Sum of all relative outflows' evaluated value (baseline)
|
|
329
|
+
"""
|
|
330
|
+
total = 0.0
|
|
331
|
+
outflows = self._get_process_outflows_rel(process_id, year)
|
|
332
|
+
for flow in outflows:
|
|
333
|
+
total += flow.evaluated_value
|
|
334
|
+
return total
|
|
335
|
+
|
|
336
|
+
def solve_timesteps(self) -> None:
|
|
337
|
+
"""
|
|
338
|
+
Solves all timesteps.
|
|
339
|
+
"""
|
|
340
|
+
bar = tqdm.tqdm(initial=0)
|
|
341
|
+
self._create_dynamic_stocks()
|
|
342
|
+
self._apply_flow_modifiers()
|
|
343
|
+
self._remove_virtual_processes_and_flows()
|
|
344
|
+
|
|
345
|
+
for current_year in self._years:
|
|
346
|
+
bar.set_description("Solving flows for year {}/{}".format(current_year, self._year_end))
|
|
347
|
+
self._solve_timestep()
|
|
348
|
+
self._advance_timestep()
|
|
349
|
+
bar.update()
|
|
350
|
+
bar.close()
|
|
351
|
+
|
|
352
|
+
def get_year_to_process_to_flows(self) -> Dict[int, Dict[Process, Dict[str, Flow]]]:
|
|
353
|
+
"""
|
|
354
|
+
Get year to Process to Flow entry mappings
|
|
355
|
+
|
|
356
|
+
:return: Year to Process to Flow entry mappings
|
|
357
|
+
"""
|
|
358
|
+
year_to_process_to_flows = {}
|
|
359
|
+
for year, process_id_to_process in self._year_to_process_id_to_process.items():
|
|
360
|
+
year_to_process_to_flows[year] = {}
|
|
361
|
+
for process_id, process in process_id_to_process.items():
|
|
362
|
+
flow_ids = self._year_to_process_id_to_flow_ids[year][process_id]
|
|
363
|
+
|
|
364
|
+
flows_in = []
|
|
365
|
+
for flow_id in flow_ids["in"]:
|
|
366
|
+
flows_in.append(self._year_to_flow_id_to_flow[year][flow_id])
|
|
367
|
+
|
|
368
|
+
flows_out = []
|
|
369
|
+
for flow_id in flow_ids["out"]:
|
|
370
|
+
flows_out.append(self._year_to_flow_id_to_flow[year][flow_id])
|
|
371
|
+
|
|
372
|
+
year_to_process_to_flows[year][process] = {"in": flows_in, "out": flows_out}
|
|
373
|
+
|
|
374
|
+
return year_to_process_to_flows
|
|
375
|
+
|
|
376
|
+
def get_process_flows(self, process_id: str, year: int = -1) -> Dict[str, List[Flow]]:
|
|
377
|
+
"""
|
|
378
|
+
Get target Process ID flows for target year (both inflows and outflows).
|
|
379
|
+
|
|
380
|
+
:param process_id: Target Process ID (str)
|
|
381
|
+
:param year: Target year (int)
|
|
382
|
+
:return: Dictionary (keys: "Inflows", "Outflows"). Key points to List of Flows
|
|
383
|
+
"""
|
|
384
|
+
process_inflows = self._get_process_inflows(process_id, year)
|
|
385
|
+
process_outflows = self._get_process_outflows(process_id, year)
|
|
386
|
+
return {"Inflows": process_inflows, "Outflows": process_outflows}
|
|
387
|
+
|
|
388
|
+
def get_process_inflows(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
389
|
+
"""
|
|
390
|
+
Get target Process ID inflows.
|
|
391
|
+
|
|
392
|
+
:param process_id: Target Process ID (str)
|
|
393
|
+
:param year: Target year (int)
|
|
394
|
+
:return: List of inflows
|
|
395
|
+
"""
|
|
396
|
+
return self._get_process_inflows(process_id, year)
|
|
397
|
+
|
|
398
|
+
def get_process_outflows(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
399
|
+
"""
|
|
400
|
+
Get target Process ID outflows.
|
|
401
|
+
|
|
402
|
+
:param process_id: Target Process ID (str)
|
|
403
|
+
:param year: Target year (int)
|
|
404
|
+
:return: List of outflows
|
|
405
|
+
"""
|
|
406
|
+
return self._get_process_outflows(process_id, year)
|
|
407
|
+
|
|
408
|
+
def is_root_process(self, process_id: str, year: int = -1) -> bool:
|
|
409
|
+
"""
|
|
410
|
+
Check if Process has no inflows at target year.
|
|
411
|
+
|
|
412
|
+
:param process_id: Target Process ID (str)
|
|
413
|
+
:param year: Target year (int)
|
|
414
|
+
:return: True if Process has no inflows, False otherwise
|
|
415
|
+
"""
|
|
416
|
+
return len(self._get_process_inflows(process_id, year)) == 0
|
|
417
|
+
|
|
418
|
+
def is_leaf_process(self, process_id: str, year: int = -1) -> bool:
|
|
419
|
+
"""
|
|
420
|
+
Check if Process ID has no outflows at target year.
|
|
421
|
+
|
|
422
|
+
:param process_id: Target Process ID (str)
|
|
423
|
+
:param year: Target year (int)
|
|
424
|
+
:return: True if Process has no outflows, False otherwise
|
|
425
|
+
"""
|
|
426
|
+
return len(self._get_process_outflows(process_id, year)) == 0
|
|
427
|
+
|
|
428
|
+
def is_all_process_inflows_evaluated(self, process_id: str, year: int = -1) -> bool:
|
|
429
|
+
"""
|
|
430
|
+
Check if all inflows to target Process ID are evaluated at the target year.
|
|
431
|
+
|
|
432
|
+
:param process_id: Target Process ID (str)
|
|
433
|
+
:param year: Target year (int)
|
|
434
|
+
:return: True if all inflows are evaluated, False otherwise.
|
|
435
|
+
"""
|
|
436
|
+
inflows = self._get_process_inflows(process_id, year)
|
|
437
|
+
return all([flow.is_evaluated for flow in inflows])
|
|
438
|
+
|
|
439
|
+
def has_flow(self, flow_id: str, year: int = -1) -> bool:
|
|
440
|
+
"""
|
|
441
|
+
Check if Flow with ID exists at target year.
|
|
442
|
+
If year is not provided then internally uses the current timestep year inside FlowSolver.
|
|
443
|
+
|
|
444
|
+
:param flow_id: Flow ID (str)
|
|
445
|
+
:param year: Target year (int)
|
|
446
|
+
:return: True if Flow with ID exists for year, False otherwise.
|
|
447
|
+
"""
|
|
448
|
+
|
|
449
|
+
if year >= 0:
|
|
450
|
+
return flow_id in self._year_to_flow_id_to_flow[year]
|
|
451
|
+
|
|
452
|
+
return flow_id in self._current_flow_id_to_flow
|
|
453
|
+
|
|
454
|
+
def has_process(self, process_id: str, year: int = -1) -> bool:
|
|
455
|
+
"""
|
|
456
|
+
Check if Process ID exists at target year.
|
|
457
|
+
|
|
458
|
+
:param process_id: Process ID
|
|
459
|
+
:param year: Selected year. If not defined then uses the current year inside FlowSolver.
|
|
460
|
+
:return: True if Process with ID exists for year, False otherwise.
|
|
461
|
+
"""
|
|
462
|
+
if year >= 0:
|
|
463
|
+
return process_id in self._year_to_process_id_to_process[year]
|
|
464
|
+
|
|
465
|
+
return process_id in self._current_process_id_to_process
|
|
466
|
+
|
|
467
|
+
def accumulate_dynamic_stock_inflows(self, dsm: DynamicStockModel, total_inflows: float, year: int = -1) -> None:
|
|
468
|
+
"""
|
|
469
|
+
Update and accumulate inflows to DynamicStockModel.
|
|
470
|
+
|
|
471
|
+
:param dsm: Target DynamicStockModel
|
|
472
|
+
:param total_inflows: Total inflows for the stock (float)
|
|
473
|
+
:param year: Target year (int)
|
|
474
|
+
:return: None
|
|
475
|
+
"""
|
|
476
|
+
|
|
477
|
+
year_index = self._years.index(year)
|
|
478
|
+
|
|
479
|
+
# Resetting some DynamicStockModel properties are needed to make
|
|
480
|
+
# timestep stock accumulation and other calculations work
|
|
481
|
+
dsm.i[year_index] = total_inflows
|
|
482
|
+
|
|
483
|
+
# Recalculate stock by cohort
|
|
484
|
+
dsm.s_c = None
|
|
485
|
+
dsm.compute_s_c_inflow_driven()
|
|
486
|
+
|
|
487
|
+
# Recalculate outflow by cohort
|
|
488
|
+
dsm.o_c = None
|
|
489
|
+
dsm.compute_o_c_from_s_c()
|
|
490
|
+
|
|
491
|
+
# Recalculate stock total
|
|
492
|
+
dsm.s = None
|
|
493
|
+
dsm.compute_stock_total()
|
|
494
|
+
|
|
495
|
+
# Get stock total
|
|
496
|
+
dsm.compute_stock_change()
|
|
497
|
+
|
|
498
|
+
# Recalculate stock outflow
|
|
499
|
+
dsm.o = None
|
|
500
|
+
dsm.compute_outflow_total()
|
|
501
|
+
|
|
502
|
+
def get_year_range(self) -> List[int]:
|
|
503
|
+
"""
|
|
504
|
+
Get years
|
|
505
|
+
|
|
506
|
+
:return: List of years (integer)
|
|
507
|
+
"""
|
|
508
|
+
return self._years
|
|
509
|
+
|
|
510
|
+
def clamp_flow_values_below_zero(self) -> None:
|
|
511
|
+
"""
|
|
512
|
+
Clamp all flows to minimum of 0.0 if flow value is negative.
|
|
513
|
+
"""
|
|
514
|
+
# NOTE: Clamp all flows to minimum of 0.0 to introduce virtual flows
|
|
515
|
+
for year, flow_id_to_flow in self._year_to_flow_id_to_flow.items():
|
|
516
|
+
for flow_id, flow in flow_id_to_flow.items():
|
|
517
|
+
if flow.value < 0.0:
|
|
518
|
+
flow.value = 0.0
|
|
519
|
+
|
|
520
|
+
if flow.evaluated_value < 0.0:
|
|
521
|
+
flow.evaluated_value = 0.0
|
|
522
|
+
|
|
523
|
+
def _get_year_to_process_id_to_process(self) -> Dict[int, Dict[str, Process]]:
|
|
524
|
+
return self._year_to_process_id_to_process
|
|
525
|
+
|
|
526
|
+
def _get_current_year_flow_id_to_flow(self) -> Dict[str, Flow]:
|
|
527
|
+
"""
|
|
528
|
+
Get current year flow ID to Flow mappings.
|
|
529
|
+
|
|
530
|
+
:return: Dictionary (Flow ID -> Flow)
|
|
531
|
+
"""
|
|
532
|
+
return self._year_to_flow_id_to_flow[self._year_current]
|
|
533
|
+
|
|
534
|
+
def _get_current_year_process_id_to_process(self) -> Dict[str, Process]:
|
|
535
|
+
"""
|
|
536
|
+
Get current year process ID to Process mappings.
|
|
537
|
+
|
|
538
|
+
:return: Dictionary (Process ID -> Process)
|
|
539
|
+
"""
|
|
540
|
+
return self._year_to_process_id_to_process[self._year_current]
|
|
541
|
+
|
|
542
|
+
def _get_current_year_process_id_to_to_flow_ids(self) -> Dict[str, Dict[str, List[str]]]:
|
|
543
|
+
"""
|
|
544
|
+
Get current year Process ID to Flow ID mappings.
|
|
545
|
+
|
|
546
|
+
:return: Dictionary (Process ID -> Dictionary (keys "in", "out") -> List of Flow IDs)
|
|
547
|
+
"""
|
|
548
|
+
return self._year_to_process_id_to_flow_ids[self._year_current]
|
|
549
|
+
|
|
550
|
+
def _get_year_to_flow_id_to_flow(self) -> Dict[int, Dict[str, Flow]]:
|
|
551
|
+
"""
|
|
552
|
+
Get year to Flow ID to Flow mappings
|
|
553
|
+
|
|
554
|
+
:return: Dictionary (Year -> Flow ID -> Flow)
|
|
555
|
+
"""
|
|
556
|
+
return self._year_to_flow_id_to_flow
|
|
557
|
+
|
|
558
|
+
def _get_process_inflow_ids(self, process_id: str, year: int = -1) -> List[str]:
|
|
559
|
+
"""
|
|
560
|
+
Get list of inflow IDs to Process for target year.
|
|
561
|
+
If year is not provided then uses the current year.
|
|
562
|
+
|
|
563
|
+
:param process_id: Target Process ID
|
|
564
|
+
:param year: Target year
|
|
565
|
+
|
|
566
|
+
:return: List of inflow IDs (Flow)
|
|
567
|
+
"""
|
|
568
|
+
result = []
|
|
569
|
+
if year >= 0:
|
|
570
|
+
result = self._year_to_process_id_to_flow_ids[year][process_id]["in"]
|
|
571
|
+
else:
|
|
572
|
+
result = self._current_process_id_to_flow_ids[process_id]["in"]
|
|
573
|
+
|
|
574
|
+
# If year -> process ID does not exist, return empty array
|
|
575
|
+
if not result:
|
|
576
|
+
result = []
|
|
577
|
+
|
|
578
|
+
return result
|
|
579
|
+
|
|
580
|
+
def _get_process_outflow_ids(self, process_id: str, year: int = -1) -> List[str]:
|
|
581
|
+
"""
|
|
582
|
+
Get list of outflow IDs from Process for target year.
|
|
583
|
+
If year is not provided then uses the current year.
|
|
584
|
+
|
|
585
|
+
:param process_id: Target Process ID
|
|
586
|
+
:param year: Target year
|
|
587
|
+
|
|
588
|
+
:return: List of outflow IDs (Flow)
|
|
589
|
+
"""
|
|
590
|
+
|
|
591
|
+
result = []
|
|
592
|
+
if year >= 0:
|
|
593
|
+
result = self._year_to_process_id_to_flow_ids[year][process_id]["out"]
|
|
594
|
+
else:
|
|
595
|
+
result = self._current_process_id_to_flow_ids[process_id]["out"]
|
|
596
|
+
|
|
597
|
+
if not result:
|
|
598
|
+
result = []
|
|
599
|
+
return result
|
|
600
|
+
|
|
601
|
+
def _get_process_inflows(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
602
|
+
"""
|
|
603
|
+
Get list of all inflows for Process ID in target year.
|
|
604
|
+
|
|
605
|
+
:param process_id: Target Process ID
|
|
606
|
+
:param year: Target year
|
|
607
|
+
:return: List of Flows
|
|
608
|
+
"""
|
|
609
|
+
# Get list of process inflows for current year
|
|
610
|
+
flows = []
|
|
611
|
+
inflow_ids = self._get_process_inflow_ids(process_id, year)
|
|
612
|
+
for flow_id in inflow_ids:
|
|
613
|
+
flows.append(self.get_flow(flow_id, year))
|
|
614
|
+
return flows
|
|
615
|
+
|
|
616
|
+
# Get list of outflows (DataFlows)
|
|
617
|
+
def _get_process_outflows(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
618
|
+
"""
|
|
619
|
+
Get list of Process outflows for the target year.
|
|
620
|
+
|
|
621
|
+
:param process_id: Target Process ID
|
|
622
|
+
:param year: Target year
|
|
623
|
+
:return: List of Flows
|
|
624
|
+
"""
|
|
625
|
+
# Get list of outflows for current year
|
|
626
|
+
flows = []
|
|
627
|
+
outflow_ids = self._get_process_outflow_ids(process_id, year)
|
|
628
|
+
for flow_id in outflow_ids:
|
|
629
|
+
flow = self.get_flow(flow_id, year)
|
|
630
|
+
flows.append(flow)
|
|
631
|
+
return flows
|
|
632
|
+
|
|
633
|
+
def _get_process_outflows_abs(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
634
|
+
"""
|
|
635
|
+
Get list of absolute outflows from Process for the target year.
|
|
636
|
+
|
|
637
|
+
:param process_id: Target Process ID
|
|
638
|
+
:param year: Target year
|
|
639
|
+
:return: List of absolute outflows (Flow)
|
|
640
|
+
"""
|
|
641
|
+
outflows_abs = []
|
|
642
|
+
flows = self._get_process_outflows(process_id, year)
|
|
643
|
+
for flow in flows:
|
|
644
|
+
if flow.is_unit_absolute_value:
|
|
645
|
+
outflows_abs.append(flow)
|
|
646
|
+
return outflows_abs
|
|
647
|
+
|
|
648
|
+
def _get_process_outflows_rel(self, process_id: str, year: int = -1) -> List[Flow]:
|
|
649
|
+
"""
|
|
650
|
+
Get list of relative outflows from Process for the target year.
|
|
651
|
+
|
|
652
|
+
:param process_id: Target Process ID
|
|
653
|
+
:param year: Target year
|
|
654
|
+
:return: List of relative outflows (Flow)
|
|
655
|
+
"""
|
|
656
|
+
|
|
657
|
+
outflows_rel = []
|
|
658
|
+
flows = self._get_process_outflows(process_id, year)
|
|
659
|
+
for flow in flows:
|
|
660
|
+
if not flow.is_unit_absolute_value:
|
|
661
|
+
outflows_rel.append(flow)
|
|
662
|
+
return outflows_rel
|
|
663
|
+
|
|
664
|
+
def _get_process_indicator_inflows_total(self, process_id: str, indicator_name: str, year: int = -1) -> float:
|
|
665
|
+
"""
|
|
666
|
+
Get total evaluated inflows for Indicator for Process ID at target year.
|
|
667
|
+
|
|
668
|
+
:param process_id: Target Process ID
|
|
669
|
+
:param indicator_name: Target Indicator name
|
|
670
|
+
:param year: Target year
|
|
671
|
+
:return: Total inflows of indicator name (float)
|
|
672
|
+
"""
|
|
673
|
+
total = 0.0
|
|
674
|
+
flows = self._get_process_inflows(process_id, year)
|
|
675
|
+
for flow in flows:
|
|
676
|
+
total += flow.get_evaluated_value_for_indicator(indicator_name)
|
|
677
|
+
return total
|
|
678
|
+
|
|
679
|
+
def _get_process_indicator_outflows_total(self, process_id: str, indicator_name: str, year: int = -1) -> float:
|
|
680
|
+
"""
|
|
681
|
+
Get total evaluated outflows for Indicator for Process ID at target year.
|
|
682
|
+
|
|
683
|
+
:param process_id: Target Process ID
|
|
684
|
+
:param indicator_name: Target Indicator name
|
|
685
|
+
:param year: Target year
|
|
686
|
+
:return: Total outflows of indicator name (float)
|
|
687
|
+
"""
|
|
688
|
+
|
|
689
|
+
total = 0.0
|
|
690
|
+
flows = self._get_process_outflows(process_id, year)
|
|
691
|
+
for flow in flows:
|
|
692
|
+
total += flow.get_evaluated_value_for_indicator(indicator_name)
|
|
693
|
+
return total
|
|
694
|
+
|
|
695
|
+
def _prepare_flows_for_timestep(self, flow_id_to_flow: Dict[str, Flow], year: int):
|
|
696
|
+
"""
|
|
697
|
+
Prepare flows for timestep:
|
|
698
|
+
- Mark all absolute flows as evaluated and set flow.value to flow.evaluated_value
|
|
699
|
+
- Normalize all relative flow values from [0%, 100%] range to [0, 1] range
|
|
700
|
+
- Mark all flows as prioritized that have target process in prioritized location or transformation stage
|
|
701
|
+
|
|
702
|
+
:param flow_id_to_flow: Dictionary (Flow ID to Flow)
|
|
703
|
+
"""
|
|
704
|
+
for flow_id, flow in flow_id_to_flow.items():
|
|
705
|
+
if not isinstance(flow, Flow):
|
|
706
|
+
continue
|
|
707
|
+
|
|
708
|
+
if flow.is_unit_absolute_value:
|
|
709
|
+
flow.is_evaluated = True
|
|
710
|
+
flow.evaluated_share = 1.0
|
|
711
|
+
flow.evaluated_value = flow.value
|
|
712
|
+
flow.evaluate_indicator_values_from_baseline_value()
|
|
713
|
+
else:
|
|
714
|
+
# Normalize relative flow value from [0, 100] % range to 0 - 1 range
|
|
715
|
+
if self._reset_evaluated_values:
|
|
716
|
+
flow.is_evaluated = False
|
|
717
|
+
flow.evaluated_share = flow.value / 100.0
|
|
718
|
+
flow.evaluated_value = 0.0
|
|
719
|
+
else:
|
|
720
|
+
flow.is_evaluated = False
|
|
721
|
+
flow.evaluated_share = flow.value / 100.0
|
|
722
|
+
|
|
723
|
+
# Mark flow prioritized
|
|
724
|
+
process = self.get_process(flow.target_process_id, year)
|
|
725
|
+
if process.location in self._prioritized_locations:
|
|
726
|
+
flow.is_prioritized = True
|
|
727
|
+
|
|
728
|
+
if process.transformation_stage in self._prioritized_transformation_stages:
|
|
729
|
+
flow.is_prioritized = True
|
|
730
|
+
|
|
731
|
+
def _evaluate_process(self, process_id: str, year: int) -> tuple[bool, List]:
|
|
732
|
+
"""
|
|
733
|
+
Evaluate Process and accumulate inflows to DynamicStockModels.
|
|
734
|
+
|
|
735
|
+
:param process_id: Target Process ID
|
|
736
|
+
:param year: Target year
|
|
737
|
+
:return: Tuple (is all inflows evaluated, list of outflows to check)
|
|
738
|
+
"""
|
|
739
|
+
is_evaluated = False
|
|
740
|
+
outflows = self._get_process_outflows(process_id, year)
|
|
741
|
+
|
|
742
|
+
# Root process should have only absolute outflow
|
|
743
|
+
if self.is_root_process(process_id, year):
|
|
744
|
+
is_evaluated = True
|
|
745
|
+
return is_evaluated, outflows
|
|
746
|
+
|
|
747
|
+
# Distribute outflows (stock or direct) only if all the inflows are already evaluated
|
|
748
|
+
if self.is_all_process_inflows_evaluated(process_id, year):
|
|
749
|
+
# Total baseline inflows
|
|
750
|
+
total_inflows = self.get_process_inflows_total(process_id, year)
|
|
751
|
+
if process_id in self.get_baseline_dynamic_stocks():
|
|
752
|
+
# All inflows are evaluated but process has stocks
|
|
753
|
+
|
|
754
|
+
# Flow prioritization:
|
|
755
|
+
# Ignore inflow amount to stock for outflows that are prioritized.
|
|
756
|
+
total_outflows_prioritized = 0.0
|
|
757
|
+
prioritized_outflows = {}
|
|
758
|
+
for flow in outflows:
|
|
759
|
+
if not flow.is_prioritized:
|
|
760
|
+
continue
|
|
761
|
+
|
|
762
|
+
if not flow.is_unit_absolute_value:
|
|
763
|
+
raise Exception("Relative flow as prioritized flow!")
|
|
764
|
+
|
|
765
|
+
flow.is_evaluated = True
|
|
766
|
+
flow.evaluated_value = flow.value
|
|
767
|
+
prioritized_outflows[flow.id] = flow
|
|
768
|
+
total_outflows_prioritized += flow.evaluated_value
|
|
769
|
+
|
|
770
|
+
if total_outflows_prioritized > total_inflows:
|
|
771
|
+
s = "Not enough inflows for prioritized outflows at process '{}' in year {}".format(
|
|
772
|
+
process_id, year)
|
|
773
|
+
sys.stdout.flush()
|
|
774
|
+
raise Exception(s)
|
|
775
|
+
|
|
776
|
+
# Reduce total inflows to baseline stock by total prioritized outflows
|
|
777
|
+
total_inflows_to_stock = total_inflows - total_outflows_prioritized
|
|
778
|
+
|
|
779
|
+
# Update baseline DSM
|
|
780
|
+
baseline_dsm = self.get_baseline_dynamic_stocks()[process_id]
|
|
781
|
+
self.accumulate_dynamic_stock_inflows(baseline_dsm, total_inflows_to_stock, year)
|
|
782
|
+
|
|
783
|
+
# Update stock inflows to indicator DSMs
|
|
784
|
+
indicator_dynamic_stocks = self.get_indicator_dynamic_stocks()
|
|
785
|
+
if process_id in indicator_dynamic_stocks:
|
|
786
|
+
inflows = self.get_process_inflows(process_id)
|
|
787
|
+
indicator_name_to_dsm = indicator_dynamic_stocks[process_id]
|
|
788
|
+
for indicator_name, indicator_dsm in indicator_name_to_dsm.items():
|
|
789
|
+
# Flow evaluated indicator values are based on the actual inflows to process with stock
|
|
790
|
+
# but with prioritized flows these values do not include the reduction of the prioritized flow.
|
|
791
|
+
# Fix this by...
|
|
792
|
+
# - calculating how much each inflow contributes to the original total inflows to get
|
|
793
|
+
# correction factor)
|
|
794
|
+
# - Multiply total_inflows_to_stock by this factor to get correct value how much
|
|
795
|
+
# flow indicator contributes to the indicator stock
|
|
796
|
+
total_indicator_inflows_to_stock = 0.0
|
|
797
|
+
if total_inflows_to_stock > 0.0:
|
|
798
|
+
for flow in inflows:
|
|
799
|
+
evaluated_indicator_value = flow.get_evaluated_value_for_indicator(indicator_name)
|
|
800
|
+
correction_factor = evaluated_indicator_value / total_inflows
|
|
801
|
+
corrected_flow_value = correction_factor * total_inflows_to_stock
|
|
802
|
+
total_indicator_inflows_to_stock += corrected_flow_value
|
|
803
|
+
|
|
804
|
+
self.accumulate_dynamic_stock_inflows(indicator_dsm, total_indicator_inflows_to_stock, year)
|
|
805
|
+
|
|
806
|
+
# Distribute baseline total outflow values
|
|
807
|
+
baseline_stock_outflow = self._get_dynamic_stock_outflow_value(baseline_dsm, year)
|
|
808
|
+
|
|
809
|
+
# Check that if process has absolute outflow then outflow value must be
|
|
810
|
+
# less than stock outflow. If absolute outflow is greater than stock outflow
|
|
811
|
+
# then there is user error with the data
|
|
812
|
+
outflows_abs = self._get_process_outflows_abs(process_id, year)
|
|
813
|
+
outflows_rel = self._get_process_outflows_rel(process_id, year)
|
|
814
|
+
|
|
815
|
+
# Get all outflows except prioritized outflows
|
|
816
|
+
total_outflows_abs = np.sum([flow.evaluated_value for flow in outflows_abs if not flow.is_prioritized])
|
|
817
|
+
total_outflows_rel = baseline_stock_outflow - total_outflows_abs
|
|
818
|
+
if total_outflows_rel < 0.0:
|
|
819
|
+
# This is error: Total absolute outflows are greater than stock outflow.
|
|
820
|
+
# It means that there is not enough flows to distribute between remaining
|
|
821
|
+
# relative outflows
|
|
822
|
+
s = "Process {}: stock outflow ({:.3f}) is less than sum of absolute outflows ({:.3f}) in year {}!".format(
|
|
823
|
+
process_id, baseline_stock_outflow, total_outflows_rel, year)
|
|
824
|
+
raise Exception(s)
|
|
825
|
+
|
|
826
|
+
# total_outflows_rel is the remaining outflows to be distributed between all relative outflows
|
|
827
|
+
for flow in outflows_rel:
|
|
828
|
+
flow.is_evaluated = True
|
|
829
|
+
flow.evaluated_value = flow.evaluated_share * total_outflows_rel
|
|
830
|
+
flow.evaluate_indicator_values_from_baseline_value()
|
|
831
|
+
|
|
832
|
+
else:
|
|
833
|
+
# All inflows are evaluated but the current process does not have stocks
|
|
834
|
+
outflows_abs = self._get_process_outflows_abs(process_id, year)
|
|
835
|
+
outflows_rel = self._get_process_outflows_rel(process_id, year)
|
|
836
|
+
total_outflows_abs = np.sum([flow.evaluated_value for flow in outflows_abs])
|
|
837
|
+
|
|
838
|
+
# Ignore root and leaf processes because those have zero inflows and zero outflows
|
|
839
|
+
is_root = self.is_root_process(process_id)
|
|
840
|
+
is_leaf = self.is_leaf_process(process_id)
|
|
841
|
+
|
|
842
|
+
# Check that virtual flows are actually needed
|
|
843
|
+
diff = abs(total_inflows - total_outflows_abs)
|
|
844
|
+
need_virtual_flows = total_inflows < total_outflows_abs and (diff > self._virtual_flows_epsilon)
|
|
845
|
+
if (not is_root and not is_leaf) and total_inflows < total_outflows_abs:
|
|
846
|
+
if self._use_virtual_flows and need_virtual_flows:
|
|
847
|
+
# Create new virtual inflow and new virtual process where flow comes from
|
|
848
|
+
diff = total_inflows - total_outflows_abs
|
|
849
|
+
process = self.get_process(process_id, year)
|
|
850
|
+
v_process = self._create_virtual_process_ex(process)
|
|
851
|
+
v_flow = self._create_virtual_flow_ex(v_process, process, abs(diff))
|
|
852
|
+
v_flow.evaluate_indicator_values_from_baseline_value()
|
|
853
|
+
|
|
854
|
+
# Create virtual Flows and Processes to current year data
|
|
855
|
+
self._year_to_process_id_to_process[year][v_process.id] = v_process
|
|
856
|
+
self._year_to_process_id_to_flow_ids[year][v_process.id] = {"in": [], "out": []}
|
|
857
|
+
self._unique_process_id_to_process[v_process.id] = v_process
|
|
858
|
+
|
|
859
|
+
self._year_to_flow_id_to_flow[year][v_flow.id] = v_flow
|
|
860
|
+
self._year_to_process_id_to_flow_ids[year][v_flow.target_process_id]["in"].append(v_flow.id)
|
|
861
|
+
self._year_to_process_id_to_flow_ids[year][v_flow.source_process_id]["out"].append(v_flow.id)
|
|
862
|
+
self._unique_flow_id_to_flow[v_flow.id] = v_flow
|
|
863
|
+
|
|
864
|
+
# Recalculate total_inflows again
|
|
865
|
+
total_inflows = self.get_process_inflows_total(process_id)
|
|
866
|
+
|
|
867
|
+
# Remaining outflows to be distributed between all relative outflows
|
|
868
|
+
total_outflows_rel = total_inflows - total_outflows_abs
|
|
869
|
+
for flow in outflows_rel:
|
|
870
|
+
flow.is_evaluated = True
|
|
871
|
+
flow.evaluated_value = flow.evaluated_share * total_outflows_rel
|
|
872
|
+
flow.evaluate_indicator_values_from_baseline_value()
|
|
873
|
+
|
|
874
|
+
is_evaluated = True
|
|
875
|
+
return is_evaluated, outflows
|
|
876
|
+
|
|
877
|
+
for flow in outflows:
|
|
878
|
+
if flow.is_unit_absolute_value:
|
|
879
|
+
flow.is_evaluated = True
|
|
880
|
+
|
|
881
|
+
return is_evaluated, outflows
|
|
882
|
+
|
|
883
|
+
def _solve_timestep(self) -> None:
|
|
884
|
+
"""
|
|
885
|
+
Solve current timestep.
|
|
886
|
+
"""
|
|
887
|
+
self._current_flow_id_to_flow = self._year_to_flow_id_to_flow[self._year_current]
|
|
888
|
+
self._current_process_id_to_flow_ids = self._year_to_process_id_to_flow_ids[self._year_current]
|
|
889
|
+
self._current_process_id_to_process = self._year_to_process_id_to_process[self._year_current]
|
|
890
|
+
|
|
891
|
+
# Mark all absolute flows as evaluated at the start of each timestep and also
|
|
892
|
+
# mark all flows that have target process ID in prioritized transform stage as prioritized
|
|
893
|
+
self._prepare_flows_for_timestep(self._current_flow_id_to_flow, self._year_current)
|
|
894
|
+
|
|
895
|
+
# Each year evaluate dynamic stock outflows and related outflows as evaluated
|
|
896
|
+
# NOTE: All outflows from process with stocks are initialized as evaluated relative flows
|
|
897
|
+
# Without this mechanism the relative outflows from stocks are not possible, and it
|
|
898
|
+
# would prevent in some cases the whole evaluation of scenarios with stocks.
|
|
899
|
+
self._evaluate_dynamic_stock_outflows(self._year_current)
|
|
900
|
+
|
|
901
|
+
# Add all root processes (= processes with no inflows) to unvisited list
|
|
902
|
+
unevaluated_process_ids = []
|
|
903
|
+
evaluated_process_ids = []
|
|
904
|
+
current_year_process_ids = list(self._current_process_id_to_process.keys())
|
|
905
|
+
for process_id in current_year_process_ids:
|
|
906
|
+
inflows = self._get_process_inflows(process_id, year=self._year_current)
|
|
907
|
+
if not inflows:
|
|
908
|
+
unevaluated_process_ids.append(process_id)
|
|
909
|
+
|
|
910
|
+
# Process flow value propagation until all inflows to processes are calculated
|
|
911
|
+
current_iteration = 0
|
|
912
|
+
while unevaluated_process_ids:
|
|
913
|
+
process_id = unevaluated_process_ids.pop(0)
|
|
914
|
+
if process_id in evaluated_process_ids:
|
|
915
|
+
continue
|
|
916
|
+
|
|
917
|
+
is_evaluated, outflows = self._evaluate_process(process_id, self._year_current)
|
|
918
|
+
if is_evaluated:
|
|
919
|
+
evaluated_process_ids.append(process_id)
|
|
920
|
+
for flow in outflows:
|
|
921
|
+
target_process_id = flow.target_process_id
|
|
922
|
+
if target_process_id not in unevaluated_process_ids:
|
|
923
|
+
unevaluated_process_ids.insert(0, target_process_id)
|
|
924
|
+
|
|
925
|
+
else:
|
|
926
|
+
# Check all outflow target process ids
|
|
927
|
+
for flow in outflows:
|
|
928
|
+
target_process_id = flow.target_process_id
|
|
929
|
+
if target_process_id not in unevaluated_process_ids:
|
|
930
|
+
unevaluated_process_ids.insert(0, target_process_id)
|
|
931
|
+
|
|
932
|
+
# Add this process_id back to unevaluated list
|
|
933
|
+
if process_id not in unevaluated_process_ids:
|
|
934
|
+
unevaluated_process_ids.append(process_id)
|
|
935
|
+
|
|
936
|
+
# NOTE: Break out of infinite loop if running over big amount of iterations
|
|
937
|
+
# This will happen if graph has loops that contain only relative flows between them
|
|
938
|
+
current_iteration += 1
|
|
939
|
+
if current_iteration >= self._max_iterations:
|
|
940
|
+
print("Encountered processes that could not be evaluated in year {}:".format(self._year_current))
|
|
941
|
+
print("The following processes have no inflows and have ONLY relative outflows (= error in data)")
|
|
942
|
+
print("Possible ways to to fix:")
|
|
943
|
+
print("- Introducing a valid inflow to the process")
|
|
944
|
+
print("- Ensure that a valid inflow is present for the process in the model's initial year")
|
|
945
|
+
print("")
|
|
946
|
+
|
|
947
|
+
# Get list of unevaluated flows
|
|
948
|
+
# The possible process causing the error is probably one of the flows' source processes
|
|
949
|
+
unevaluated_inflows = []
|
|
950
|
+
for p_id in current_year_process_ids:
|
|
951
|
+
for flow in self._get_process_inflows(p_id, self._year_current):
|
|
952
|
+
if not flow.is_evaluated:
|
|
953
|
+
unevaluated_inflows.append(flow)
|
|
954
|
+
|
|
955
|
+
# Check all flow source processes and check for problematic processes:
|
|
956
|
+
# Invalid process means:
|
|
957
|
+
# - no inflows
|
|
958
|
+
# - only relative outflows
|
|
959
|
+
# This is definitely error in data
|
|
960
|
+
unique_process_ids = set()
|
|
961
|
+
for flow in unevaluated_inflows:
|
|
962
|
+
source_process_inflows = self._get_process_inflows(flow.source_process_id, self._year_current)
|
|
963
|
+
source_process_outflows = self._get_process_outflows(flow.source_process_id, self._year_current)
|
|
964
|
+
has_no_inflows = len(source_process_inflows) == 0
|
|
965
|
+
has_only_relative_outflows = len(source_process_outflows) > 0 and all(
|
|
966
|
+
[not flow.is_unit_absolute_value for flow in source_process_outflows])
|
|
967
|
+
|
|
968
|
+
if has_no_inflows and has_only_relative_outflows:
|
|
969
|
+
unique_process_ids.add(flow.source_process_id)
|
|
970
|
+
|
|
971
|
+
print("List of invalid process IDs:")
|
|
972
|
+
for source_process_id in unique_process_ids:
|
|
973
|
+
print("\t{}".format(source_process_id))
|
|
974
|
+
print("")
|
|
975
|
+
|
|
976
|
+
print("List of unevaluated flows:")
|
|
977
|
+
for flow in unevaluated_inflows:
|
|
978
|
+
print("\t{}".format(flow))
|
|
979
|
+
|
|
980
|
+
raise Exception("Unsolvable loop detected")
|
|
981
|
+
|
|
982
|
+
# Check for unreported inflows or outflows (= process mass balance != 0)
|
|
983
|
+
# and create virtual flows to balance out those processes.
|
|
984
|
+
# Epsilon is maximum allowed difference of process inputs and outputs before creating virtual flow
|
|
985
|
+
if self._use_virtual_flows:
|
|
986
|
+
self._create_virtual_flows(self._year_current, self._virtual_flows_epsilon)
|
|
987
|
+
|
|
988
|
+
# Show summary if virtual processes and flows created this year
|
|
989
|
+
#self._show_virtual_flows_summary()
|
|
990
|
+
|
|
991
|
+
# Recalculate evaluated values for stock outflows
|
|
992
|
+
self._recalculate_indicator_dynamic_stock_outflows(self._year_current)
|
|
993
|
+
|
|
994
|
+
def _advance_timestep(self) -> None:
|
|
995
|
+
"""
|
|
996
|
+
Advance to next timestep.
|
|
997
|
+
"""
|
|
998
|
+
self._year_prev = self._year_current
|
|
999
|
+
self._year_current += 1
|
|
1000
|
+
|
|
1001
|
+
def _create_virtual_process_id(self, process: Process) -> str:
|
|
1002
|
+
"""
|
|
1003
|
+
Create virtual Process ID from target Process.
|
|
1004
|
+
|
|
1005
|
+
:param process: Target Process
|
|
1006
|
+
:return: New vrtual Process ID
|
|
1007
|
+
"""
|
|
1008
|
+
return self._virtual_process_id_prefix + process.id
|
|
1009
|
+
|
|
1010
|
+
def _create_virtual_process_name(self, process: Process) -> str:
|
|
1011
|
+
"""
|
|
1012
|
+
Create virtual Process name from target Process.
|
|
1013
|
+
|
|
1014
|
+
:param process: Target Process
|
|
1015
|
+
:return: New virtual Process name
|
|
1016
|
+
"""
|
|
1017
|
+
return self._virtual_process_id_prefix + process.name
|
|
1018
|
+
|
|
1019
|
+
def _create_virtual_process_transformation_stage(self) -> str:
|
|
1020
|
+
"""
|
|
1021
|
+
Get virtual process transformation stage.
|
|
1022
|
+
|
|
1023
|
+
:return:
|
|
1024
|
+
"""
|
|
1025
|
+
return self._virtual_process_transformation_stage
|
|
1026
|
+
|
|
1027
|
+
def _create_virtual_process(self, process_id: str, process_name: str, transformation_stage: str) -> Process:
|
|
1028
|
+
"""
|
|
1029
|
+
Create virtual Process
|
|
1030
|
+
|
|
1031
|
+
:param process_id: Virtual Process ID
|
|
1032
|
+
:param process_name: Virtual Process name
|
|
1033
|
+
:param transformation_stage: Virtual process transformation stage
|
|
1034
|
+
:return: New virtual Process
|
|
1035
|
+
"""
|
|
1036
|
+
new_virtual_process = Process()
|
|
1037
|
+
new_virtual_process.id = process_id
|
|
1038
|
+
new_virtual_process.name = process_name
|
|
1039
|
+
new_virtual_process.stock_lifetime = 0
|
|
1040
|
+
new_virtual_process.conversion_factor = 1.0
|
|
1041
|
+
new_virtual_process.transformation_stage = transformation_stage
|
|
1042
|
+
new_virtual_process.is_virtual = True
|
|
1043
|
+
return new_virtual_process
|
|
1044
|
+
|
|
1045
|
+
def _create_virtual_process_ex(self, process: Process) -> Process:
|
|
1046
|
+
"""
|
|
1047
|
+
Create virtual Process (extended).
|
|
1048
|
+
|
|
1049
|
+
:param process: Target Process
|
|
1050
|
+
:return: New virtual Process
|
|
1051
|
+
"""
|
|
1052
|
+
v_id = self._create_virtual_process_id(process)
|
|
1053
|
+
v_name = self._create_virtual_process_name(process)
|
|
1054
|
+
v_ts = self._create_virtual_process_transformation_stage()
|
|
1055
|
+
v_process = self._create_virtual_process(v_id, v_name, v_ts)
|
|
1056
|
+
return v_process
|
|
1057
|
+
|
|
1058
|
+
def _remove_virtual_process(self, virtual_process_id: str, year: int) -> bool:
|
|
1059
|
+
"""
|
|
1060
|
+
Remove process from year and all related virtual flows. Does nothing if process ID is not found.
|
|
1061
|
+
|
|
1062
|
+
:param virtual_process_id: Target virtual process ID
|
|
1063
|
+
:param year: Target year
|
|
1064
|
+
:return: True if process ID was found, false otherwise
|
|
1065
|
+
"""
|
|
1066
|
+
if year not in self._year_to_process_id_to_process:
|
|
1067
|
+
return False
|
|
1068
|
+
|
|
1069
|
+
if virtual_process_id not in self._year_to_process_id_to_process[year]:
|
|
1070
|
+
return False
|
|
1071
|
+
|
|
1072
|
+
# Remove flows related to this virtual process
|
|
1073
|
+
inflows = self.get_process_inflows(virtual_process_id, year)
|
|
1074
|
+
for flow in inflows:
|
|
1075
|
+
source_process_flow_ids = self._year_to_process_id_to_flow_ids[year][flow.source_process_id]["out"]
|
|
1076
|
+
if flow.id in source_process_flow_ids:
|
|
1077
|
+
source_process_flow_ids.remove(flow.id)
|
|
1078
|
+
if flow.id in self._year_to_flow_id_to_flow[year]:
|
|
1079
|
+
del self._year_to_flow_id_to_flow[year][flow.id]
|
|
1080
|
+
if flow.id in self._unique_flow_id_to_flow:
|
|
1081
|
+
del self._unique_flow_id_to_flow[flow.id]
|
|
1082
|
+
|
|
1083
|
+
outflows = self.get_process_outflows(virtual_process_id, year)
|
|
1084
|
+
for flow in outflows:
|
|
1085
|
+
target_process_inflow_ids = self._year_to_process_id_to_flow_ids[year][flow.target_process_id]["in"]
|
|
1086
|
+
if flow.id in target_process_inflow_ids:
|
|
1087
|
+
target_process_inflow_ids.remove(flow.id)
|
|
1088
|
+
if flow.id in self._year_to_flow_id_to_flow[year]:
|
|
1089
|
+
del self._year_to_flow_id_to_flow[year][flow.id]
|
|
1090
|
+
if flow.id in self._unique_flow_id_to_flow:
|
|
1091
|
+
del self._unique_flow_id_to_flow[flow.id]
|
|
1092
|
+
|
|
1093
|
+
# Remove virtual process
|
|
1094
|
+
if virtual_process_id in self._year_to_process_id_to_process[year]:
|
|
1095
|
+
del self._year_to_process_id_to_process[year][virtual_process_id]
|
|
1096
|
+
|
|
1097
|
+
if virtual_process_id in self._year_to_process_id_to_flow_ids[year]:
|
|
1098
|
+
del self._year_to_process_id_to_flow_ids[year][virtual_process_id]
|
|
1099
|
+
|
|
1100
|
+
if virtual_process_id in self._unique_process_id_to_process:
|
|
1101
|
+
del self._unique_process_id_to_process[virtual_process_id]
|
|
1102
|
+
|
|
1103
|
+
def _create_virtual_flow(self, source_process_id: str, target_process_id: str, value: float, unit: str) -> Flow:
|
|
1104
|
+
"""
|
|
1105
|
+
Create virtual flow.
|
|
1106
|
+
All indicator conversion factors are set to 0.0.
|
|
1107
|
+
|
|
1108
|
+
:param source_process_id: Source Process ID
|
|
1109
|
+
:param target_process_id: Target Process ID
|
|
1110
|
+
:param value: Flow value
|
|
1111
|
+
:param unit: Flow unit (absolute/relative)
|
|
1112
|
+
:return: New virtual Flow
|
|
1113
|
+
"""
|
|
1114
|
+
new_virtual_flow = Flow()
|
|
1115
|
+
new_virtual_flow.source_process_id = source_process_id
|
|
1116
|
+
new_virtual_flow.target_process_id = target_process_id
|
|
1117
|
+
new_virtual_flow.value = value
|
|
1118
|
+
new_virtual_flow.is_evaluated = True
|
|
1119
|
+
new_virtual_flow.evaluated_value = value
|
|
1120
|
+
new_virtual_flow.unit = unit
|
|
1121
|
+
new_virtual_flow.is_virtual = True
|
|
1122
|
+
|
|
1123
|
+
# Copy indicators to virtual flows
|
|
1124
|
+
for indicator_name, indicator in self._indicators.items():
|
|
1125
|
+
new_indicator = copy.deepcopy(indicator)
|
|
1126
|
+
new_indicator.conversion_factor = 0.0
|
|
1127
|
+
new_virtual_flow.indicator_name_to_indicator[new_indicator.name] = new_indicator
|
|
1128
|
+
new_virtual_flow.indicator_name_to_evaluated_value[new_indicator.name] = 0.0
|
|
1129
|
+
|
|
1130
|
+
return new_virtual_flow
|
|
1131
|
+
|
|
1132
|
+
def _create_virtual_flow_ex(self, source_process: Process, target_process: Process, value: float) -> Flow:
|
|
1133
|
+
"""
|
|
1134
|
+
Create virtual Flow (extended).
|
|
1135
|
+
|
|
1136
|
+
:param source_process: Source Process
|
|
1137
|
+
:param target_process: Target Process
|
|
1138
|
+
:param value: Flow value
|
|
1139
|
+
:return: New virtual Flow
|
|
1140
|
+
"""
|
|
1141
|
+
v_flow = self._create_virtual_flow(source_process.id, target_process.id, value, "")
|
|
1142
|
+
return v_flow
|
|
1143
|
+
|
|
1144
|
+
def _create_virtual_flows(self, year: int, epsilon: float = 0.1) -> None:
|
|
1145
|
+
"""
|
|
1146
|
+
Create virtual flows to balance out process inflows and outflows.
|
|
1147
|
+
NOTE: Virtual inflows are not created to processes with stocks.
|
|
1148
|
+
:param year: Target year
|
|
1149
|
+
:param epsilon: Maximum allowed absolute difference between total inflows and total outflows before creating
|
|
1150
|
+
virtual flow
|
|
1151
|
+
"""
|
|
1152
|
+
# Virtual outflow is unreported flow of process
|
|
1153
|
+
created_virtual_processes = {}
|
|
1154
|
+
created_virtual_flows = {}
|
|
1155
|
+
for process_id, process in self._current_process_id_to_process.items():
|
|
1156
|
+
# Skip virtual processes that were included during previous timesteps
|
|
1157
|
+
# to prevent cascading effect of creating infinite number of virtual processes and flows
|
|
1158
|
+
if process.is_virtual:
|
|
1159
|
+
continue
|
|
1160
|
+
|
|
1161
|
+
# Ignore root and leaf processes (= root process has no inflows and leaf process has no outflows)
|
|
1162
|
+
is_root_process = self.is_root_process(process_id, year)
|
|
1163
|
+
is_leaf_process = self.is_leaf_process(process_id, year)
|
|
1164
|
+
if is_root_process or is_leaf_process:
|
|
1165
|
+
continue
|
|
1166
|
+
|
|
1167
|
+
inflows_total = self.get_process_inflows_total(process_id, year)
|
|
1168
|
+
outflows_total = self.get_process_outflows_total(process_id, year)
|
|
1169
|
+
|
|
1170
|
+
# If process has stock then consider only the stock outflows
|
|
1171
|
+
process_mass_balance = 0.0
|
|
1172
|
+
if process_id in self._process_id_to_stock:
|
|
1173
|
+
# Distribute baseline total outflow values
|
|
1174
|
+
baseline_dsm = self.get_baseline_dynamic_stocks()[process.id]
|
|
1175
|
+
baseline_stock_outflow = self._get_dynamic_stock_outflow_value(baseline_dsm, year)
|
|
1176
|
+
|
|
1177
|
+
# Check that if process has absolute outflow then outflow value must be
|
|
1178
|
+
# less than stock outflow. If absolute outflow is greater than stock outflow
|
|
1179
|
+
# then there is user error with the data
|
|
1180
|
+
outflows_abs = self._get_process_outflows_abs(process_id, year)
|
|
1181
|
+
outflows_rel = self._get_process_outflows_rel(process_id, year)
|
|
1182
|
+
total_outflows_abs = np.sum([flow.evaluated_value for flow in outflows_abs if not flow.is_prioritized])
|
|
1183
|
+
total_outflows_rel = np.sum([flow.evaluated_value for flow in outflows_rel])
|
|
1184
|
+
process_mass_balance = baseline_stock_outflow - total_outflows_abs - total_outflows_rel
|
|
1185
|
+
else:
|
|
1186
|
+
# Process has no stock
|
|
1187
|
+
process_mass_balance = inflows_total - outflows_total
|
|
1188
|
+
|
|
1189
|
+
if abs(process_mass_balance) < epsilon:
|
|
1190
|
+
# Total inflow and outflow difference less than epsilon, continue to next
|
|
1191
|
+
continue
|
|
1192
|
+
|
|
1193
|
+
need_virtual_inflow = process_mass_balance < 0.0
|
|
1194
|
+
need_virtual_outflow = process_mass_balance > 0.0
|
|
1195
|
+
if not need_virtual_inflow and not need_virtual_outflow:
|
|
1196
|
+
# Inflows and outflows are balanced, do nothing and continue to next process
|
|
1197
|
+
continue
|
|
1198
|
+
|
|
1199
|
+
#print("{}: Creating virtual flow, inflow={}, outflow={} ({})".format(process_id, need_virtual_inflow, need_virtual_outflow, year))
|
|
1200
|
+
|
|
1201
|
+
if need_virtual_inflow:
|
|
1202
|
+
# Create new virtual Process
|
|
1203
|
+
v_process = self._create_virtual_process_ex(process)
|
|
1204
|
+
created_virtual_processes[v_process.id] = v_process
|
|
1205
|
+
|
|
1206
|
+
# Create new virtual inflow
|
|
1207
|
+
source_process_id = v_process.id
|
|
1208
|
+
target_process_id = process_id
|
|
1209
|
+
value = process_mass_balance * -1.0
|
|
1210
|
+
unit = ""
|
|
1211
|
+
new_virtual_flow = self._create_virtual_flow(source_process_id, target_process_id, value, unit)
|
|
1212
|
+
created_virtual_flows[new_virtual_flow.id] = new_virtual_flow
|
|
1213
|
+
|
|
1214
|
+
if need_virtual_outflow:
|
|
1215
|
+
# Create new virtual Process
|
|
1216
|
+
v_process = self._create_virtual_process_ex(process)
|
|
1217
|
+
created_virtual_processes[v_process.id] = v_process
|
|
1218
|
+
|
|
1219
|
+
# Create new virtual outflow
|
|
1220
|
+
source_process_id = process_id
|
|
1221
|
+
target_process_id = v_process.id
|
|
1222
|
+
value = process_mass_balance
|
|
1223
|
+
unit = ""
|
|
1224
|
+
new_virtual_flow = self._create_virtual_flow(source_process_id, target_process_id, value, unit)
|
|
1225
|
+
created_virtual_flows[new_virtual_flow.id] = new_virtual_flow
|
|
1226
|
+
|
|
1227
|
+
# Add create virtual Flows and Processes to current year data
|
|
1228
|
+
for v_id, virtual_process in created_virtual_processes.items():
|
|
1229
|
+
self._year_to_process_id_to_process[year][v_id] = virtual_process
|
|
1230
|
+
self._year_to_process_id_to_flow_ids[year][v_id] = {"in": [], "out": []}
|
|
1231
|
+
self._unique_process_id_to_process[v_id] = virtual_process
|
|
1232
|
+
|
|
1233
|
+
for v_flow_id, virtual_flow in created_virtual_flows.items():
|
|
1234
|
+
self._year_to_flow_id_to_flow[year][v_flow_id] = virtual_flow
|
|
1235
|
+
self._year_to_process_id_to_flow_ids[year][virtual_flow.target_process_id]["in"].append(v_flow_id)
|
|
1236
|
+
self._year_to_process_id_to_flow_ids[year][virtual_flow.source_process_id]["out"].append(v_flow_id)
|
|
1237
|
+
self._unique_flow_id_to_flow[v_flow_id] = virtual_flow
|
|
1238
|
+
|
|
1239
|
+
def _show_virtual_flows_summary(self):
|
|
1240
|
+
"""
|
|
1241
|
+
Show virtual process and virtual flows summary for the current year.
|
|
1242
|
+
Does not do anything if no virtual processes or flows are created for current year.
|
|
1243
|
+
"""
|
|
1244
|
+
# Show summary of created virtual processes and virtual flows
|
|
1245
|
+
virtual_processes = []
|
|
1246
|
+
virtual_flows = []
|
|
1247
|
+
for process_id, process in self._get_current_year_process_id_to_process().items():
|
|
1248
|
+
if not process.is_virtual:
|
|
1249
|
+
continue
|
|
1250
|
+
|
|
1251
|
+
virtual_processes.append(process)
|
|
1252
|
+
|
|
1253
|
+
entry = self._get_current_year_process_id_to_to_flow_ids()[process_id]
|
|
1254
|
+
inflow_ids = entry["in"]
|
|
1255
|
+
outflow_ids = entry["out"]
|
|
1256
|
+
|
|
1257
|
+
for flow_id in inflow_ids:
|
|
1258
|
+
flow = self._get_current_year_flow_id_to_flow()[flow_id]
|
|
1259
|
+
if not flow.is_virtual:
|
|
1260
|
+
continue
|
|
1261
|
+
|
|
1262
|
+
virtual_flows.append(flow)
|
|
1263
|
+
|
|
1264
|
+
for flow_id in outflow_ids:
|
|
1265
|
+
flow = self._get_current_year_flow_id_to_flow()[flow_id]
|
|
1266
|
+
if not flow.is_virtual:
|
|
1267
|
+
continue
|
|
1268
|
+
|
|
1269
|
+
virtual_flows.append(flow)
|
|
1270
|
+
|
|
1271
|
+
# Show summary only if there is something to be shown
|
|
1272
|
+
num_virtual_processes = len(virtual_processes)
|
|
1273
|
+
num_virtual_flows = len(virtual_flows)
|
|
1274
|
+
if num_virtual_processes or num_virtual_flows:
|
|
1275
|
+
print("INFO: Created {} virtual processes and {} virtual flows for year {}".format(
|
|
1276
|
+
num_virtual_processes, num_virtual_flows, self._year_current))
|
|
1277
|
+
for process in virtual_processes:
|
|
1278
|
+
print("INFO: \t- Virtual process ID '{}'".format(process.id))
|
|
1279
|
+
for flow in virtual_flows:
|
|
1280
|
+
print("INFO: \t- Virtual flow ID '{} (value={:.5})'".format(flow.id, flow.evaluated_value))
|
|
1281
|
+
|
|
1282
|
+
print("")
|
|
1283
|
+
|
|
1284
|
+
def _create_dynamic_stocks(self) -> None:
|
|
1285
|
+
"""
|
|
1286
|
+
Convert Stocks to ODYM DynamicStockModels.
|
|
1287
|
+
"""
|
|
1288
|
+
# Create DynamicStockModels for Processes that contain Stock
|
|
1289
|
+
for stock in self.get_all_stocks():
|
|
1290
|
+
# If stock.distribution_params is float then use as default StdDev value
|
|
1291
|
+
# Otherwise check if the StdDev is defined for the cell
|
|
1292
|
+
stddev = 0.0
|
|
1293
|
+
shape = 1.0
|
|
1294
|
+
scale = 1.0
|
|
1295
|
+
if type(stock.stock_distribution_params) is float:
|
|
1296
|
+
stddev = stock.stock_distribution_params
|
|
1297
|
+
|
|
1298
|
+
condition = None
|
|
1299
|
+
if type(stock.stock_distribution_params) is dict:
|
|
1300
|
+
stddev = stock.stock_distribution_params.get(StockDistributionParameter.StdDev, 1.0)
|
|
1301
|
+
shape = stock.stock_distribution_params.get(StockDistributionParameter.Shape, 1.0)
|
|
1302
|
+
scale = stock.stock_distribution_params.get(StockDistributionParameter.Scale, 1.0)
|
|
1303
|
+
|
|
1304
|
+
# For new decay functions
|
|
1305
|
+
landfill_decay_types = [StockDistributionType.LandfillDecayWood, StockDistributionType.LandfillDecayWood]
|
|
1306
|
+
if stock.stock_distribution_type in landfill_decay_types:
|
|
1307
|
+
condition = stock.stock_distribution_params[StockDistributionParameter.Condition]
|
|
1308
|
+
|
|
1309
|
+
# Stock parameters
|
|
1310
|
+
stock_years = np.array(self._years)
|
|
1311
|
+
stock_total_inflows = np.zeros(len(stock_years))
|
|
1312
|
+
stock_total = np.zeros(len(stock_years))
|
|
1313
|
+
stock_lifetime_params = {
|
|
1314
|
+
'Type': stock.stock_distribution_type,
|
|
1315
|
+
'Mean': [stock.stock_lifetime],
|
|
1316
|
+
'StdDev': [stddev],
|
|
1317
|
+
'Shape': [shape],
|
|
1318
|
+
'Scale': [scale],
|
|
1319
|
+
StockDistributionParameter.Condition: [condition],
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
# Baseline DSM
|
|
1323
|
+
baseline_dsm = DynamicStockModel(t=copy.deepcopy(stock_years),
|
|
1324
|
+
i=copy.deepcopy(stock_total_inflows),
|
|
1325
|
+
s=copy.deepcopy(stock_total),
|
|
1326
|
+
lt=copy.deepcopy(stock_lifetime_params))
|
|
1327
|
+
|
|
1328
|
+
baseline_dsm.compute_s_c_inflow_driven()
|
|
1329
|
+
baseline_dsm.compute_o_c_from_s_c()
|
|
1330
|
+
baseline_dsm.compute_stock_total()
|
|
1331
|
+
baseline_dsm.compute_stock_change()
|
|
1332
|
+
baseline_dsm.compute_outflow_total()
|
|
1333
|
+
|
|
1334
|
+
# Stock ID -> DSM
|
|
1335
|
+
self._stock_id_to_baseline_dsm[stock.id] = baseline_dsm
|
|
1336
|
+
|
|
1337
|
+
# Create indicator DSMs for each indicator name
|
|
1338
|
+
for indicator_name in self.get_indicator_names():
|
|
1339
|
+
indicator_dsm = DynamicStockModel(t=copy.deepcopy(stock_years),
|
|
1340
|
+
i=copy.deepcopy(stock_total_inflows),
|
|
1341
|
+
s=copy.deepcopy(stock_total),
|
|
1342
|
+
lt=copy.deepcopy(stock_lifetime_params))
|
|
1343
|
+
|
|
1344
|
+
indicator_dsm.compute_s_c_inflow_driven()
|
|
1345
|
+
indicator_dsm.compute_o_c_from_s_c()
|
|
1346
|
+
indicator_dsm.compute_stock_total()
|
|
1347
|
+
indicator_dsm.compute_stock_change()
|
|
1348
|
+
indicator_dsm.compute_outflow_total()
|
|
1349
|
+
|
|
1350
|
+
# Stock ID -> Indicator name -> DSM
|
|
1351
|
+
indicator_name_to_dsm = self._stock_id_to_indicator_name_to_dsm.get(stock.id, {})
|
|
1352
|
+
indicator_name_to_dsm[indicator_name] = indicator_dsm
|
|
1353
|
+
self._stock_id_to_indicator_name_to_dsm[stock.id] = indicator_name_to_dsm
|
|
1354
|
+
|
|
1355
|
+
def _evaluate_dynamic_stock_outflows(self, year: int) -> None:
|
|
1356
|
+
"""
|
|
1357
|
+
Evaluate dynamic stock outflows and distribute stock outflow among all outflows.
|
|
1358
|
+
Marks stock outflows as evaluated.
|
|
1359
|
+
|
|
1360
|
+
This method must be called at the beginning of every timestep before starting evaluating Processes.
|
|
1361
|
+
|
|
1362
|
+
:param year: Target year
|
|
1363
|
+
"""
|
|
1364
|
+
# Get stock outflow for year, distribute that to relative outflows and mark those Flows as evaluated
|
|
1365
|
+
# NOTE: Now also outflows to prioritized flows
|
|
1366
|
+
year_index = self._years.index(year)
|
|
1367
|
+
for stock_id, dsm in self.get_baseline_dynamic_stocks().items():
|
|
1368
|
+
outflows = self._get_process_outflows(stock_id)
|
|
1369
|
+
stock_total_outflow = dsm.compute_outflow_total()[year_index]
|
|
1370
|
+
|
|
1371
|
+
for flow in outflows:
|
|
1372
|
+
if flow.is_unit_absolute_value:
|
|
1373
|
+
continue
|
|
1374
|
+
|
|
1375
|
+
flow.is_evaluated = True
|
|
1376
|
+
flow.evaluated_value = flow.evaluated_share * stock_total_outflow
|
|
1377
|
+
flow.evaluate_indicator_values_from_baseline_value()
|
|
1378
|
+
|
|
1379
|
+
def _recalculate_indicator_dynamic_stock_outflows(self, year: int) -> None:
|
|
1380
|
+
# Recalculate indicator values for stock outflows
|
|
1381
|
+
# NOTE: Relative outflows from indicator stock do not need indicator conversion factor
|
|
1382
|
+
# defined in the settings file because relative share of that flow can be directly calculated
|
|
1383
|
+
# from indicator stock total outflow. Indicator conversion factors are needed when flows enter stock
|
|
1384
|
+
# but in stock outflow it's not mandatory.
|
|
1385
|
+
year_index = self._years.index(year)
|
|
1386
|
+
for stock_id, dsm_indicators in self.get_indicator_dynamic_stocks().items():
|
|
1387
|
+
outflows = self._get_process_outflows(stock_id)
|
|
1388
|
+
for indicator_name, dsm in dsm_indicators.items():
|
|
1389
|
+
stock_total_outflow = dsm.compute_outflow_total()[year_index]
|
|
1390
|
+
for flow in outflows:
|
|
1391
|
+
if flow.is_unit_absolute_value:
|
|
1392
|
+
continue
|
|
1393
|
+
|
|
1394
|
+
evaluated_value = flow.evaluated_share * stock_total_outflow
|
|
1395
|
+
flow.set_evaluated_value_for_indicator(indicator_name, evaluated_value)
|
|
1396
|
+
|
|
1397
|
+
def _get_dynamic_stock_outflow_value(self, dsm: DynamicStockModel, year: int) -> float:
|
|
1398
|
+
"""
|
|
1399
|
+
Get dynamic stock total outflow value.
|
|
1400
|
+
|
|
1401
|
+
:param dsm: Target DynamicStockModel
|
|
1402
|
+
:param year: Target year
|
|
1403
|
+
:return: Total stock outflow (float)
|
|
1404
|
+
"""
|
|
1405
|
+
year_index = self._years.index(year)
|
|
1406
|
+
stock_outflow_total = dsm.compute_outflow_total()
|
|
1407
|
+
return stock_outflow_total[year_index]
|
|
1408
|
+
|
|
1409
|
+
def get_solved_scenario_data(self) -> ScenarioData:
|
|
1410
|
+
"""
|
|
1411
|
+
Get solved ScenarioData.
|
|
1412
|
+
|
|
1413
|
+
:return: Solved ScenarioData
|
|
1414
|
+
"""
|
|
1415
|
+
# Make deep copies and return ScenarioData containing the data
|
|
1416
|
+
years = copy.deepcopy(self._years)
|
|
1417
|
+
year_to_process_id_to_process = copy.deepcopy(self._year_to_process_id_to_process)
|
|
1418
|
+
year_to_process_id_to_flow_ids = copy.deepcopy(self._year_to_process_id_to_flow_ids)
|
|
1419
|
+
year_to_flow_id_to_flow = copy.deepcopy(self._year_to_flow_id_to_flow)
|
|
1420
|
+
unique_process_id_to_process = copy.deepcopy(self._unique_process_id_to_process)
|
|
1421
|
+
unique_flow_id_to_flow = copy.deepcopy(self._unique_flow_id_to_flow)
|
|
1422
|
+
process_id_to_stock = copy.deepcopy(self._process_id_to_stock)
|
|
1423
|
+
stocks = copy.deepcopy(self._all_stocks)
|
|
1424
|
+
use_virtual_flows = copy.deepcopy(self._use_virtual_flows)
|
|
1425
|
+
virtual_flows_epsilon = copy.deepcopy(self._virtual_flows_epsilon)
|
|
1426
|
+
baseline_value_name = copy.deepcopy(self._baseline_value_name)
|
|
1427
|
+
baseline_unit_name = copy.deepcopy(self._baseline_unit_name)
|
|
1428
|
+
indicator_name_to_indicator = copy.deepcopy(self._indicator_name_to_indicator)
|
|
1429
|
+
|
|
1430
|
+
scenario_data = ScenarioData(years=years,
|
|
1431
|
+
year_to_process_id_to_process=year_to_process_id_to_process,
|
|
1432
|
+
year_to_process_id_to_flow_ids=year_to_process_id_to_flow_ids,
|
|
1433
|
+
year_to_flow_id_to_flow=year_to_flow_id_to_flow,
|
|
1434
|
+
unique_process_id_to_process=unique_process_id_to_process,
|
|
1435
|
+
unique_flow_id_to_flow=unique_flow_id_to_flow,
|
|
1436
|
+
process_id_to_stock=process_id_to_stock,
|
|
1437
|
+
stocks=stocks,
|
|
1438
|
+
use_virtual_flows=use_virtual_flows,
|
|
1439
|
+
virtual_flows_epsilon=virtual_flows_epsilon,
|
|
1440
|
+
baseline_value_name=baseline_value_name,
|
|
1441
|
+
baseline_unit_name=baseline_unit_name,
|
|
1442
|
+
indicator_name_to_indicator=indicator_name_to_indicator
|
|
1443
|
+
)
|
|
1444
|
+
return scenario_data
|
|
1445
|
+
|
|
1446
|
+
def _apply_flow_modifiers(self) -> None:
|
|
1447
|
+
"""
|
|
1448
|
+
Apply flow modifiers if Scenario has those defined.
|
|
1449
|
+
Baseline scenario does not have those so return immediately if there isn't anything to process.
|
|
1450
|
+
"""
|
|
1451
|
+
# NOTE: Lazily import FlowModifierSolver to fix circular dependency
|
|
1452
|
+
from .flowmodifiersolver import FlowModifierSolver
|
|
1453
|
+
|
|
1454
|
+
if not self._scenario.scenario_definition.flow_modifiers:
|
|
1455
|
+
# This is the case when dealing with baseline scenario, do nothing and just return
|
|
1456
|
+
return
|
|
1457
|
+
|
|
1458
|
+
print("*** Applying flow modifiers for scenario '{}' ***".format(self._scenario.name))
|
|
1459
|
+
scenario_type = self._scenario.model_params[ParameterName.ScenarioType]
|
|
1460
|
+
fms = FlowModifierSolver(self, scenario_type)
|
|
1461
|
+
fms.solve()
|
|
1462
|
+
|
|
1463
|
+
def _remove_virtual_processes_and_flows(self) -> None:
|
|
1464
|
+
# Remove all virtual processes and related flows in all years
|
|
1465
|
+
for year in self._years:
|
|
1466
|
+
virtual_process_ids = []
|
|
1467
|
+
for process_id, process in self._year_to_process_id_to_process[year].items():
|
|
1468
|
+
if process.is_virtual:
|
|
1469
|
+
virtual_process_ids.append(process_id)
|
|
1470
|
+
|
|
1471
|
+
for process_id in virtual_process_ids:
|
|
1472
|
+
self._remove_virtual_process(process_id, year)
|