fram-core 0.0.0__py3-none-any.whl → 0.1.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fram_core-0.1.0a2.dist-info/METADATA +42 -0
- fram_core-0.1.0a2.dist-info/RECORD +100 -0
- {fram_core-0.0.0.dist-info → fram_core-0.1.0a2.dist-info}/WHEEL +1 -2
- fram_core-0.1.0a2.dist-info/licenses/LICENSE.md +8 -0
- framcore/Base.py +142 -0
- framcore/Model.py +73 -0
- framcore/__init__.py +9 -0
- framcore/aggregators/Aggregator.py +153 -0
- framcore/aggregators/HydroAggregator.py +837 -0
- framcore/aggregators/NodeAggregator.py +495 -0
- framcore/aggregators/WindSolarAggregator.py +323 -0
- framcore/aggregators/__init__.py +13 -0
- framcore/aggregators/_utils.py +184 -0
- framcore/attributes/Arrow.py +305 -0
- framcore/attributes/ElasticDemand.py +90 -0
- framcore/attributes/ReservoirCurve.py +37 -0
- framcore/attributes/SoftBound.py +19 -0
- framcore/attributes/StartUpCost.py +54 -0
- framcore/attributes/Storage.py +146 -0
- framcore/attributes/TargetBound.py +18 -0
- framcore/attributes/__init__.py +65 -0
- framcore/attributes/hydro/HydroBypass.py +42 -0
- framcore/attributes/hydro/HydroGenerator.py +83 -0
- framcore/attributes/hydro/HydroPump.py +156 -0
- framcore/attributes/hydro/HydroReservoir.py +27 -0
- framcore/attributes/hydro/__init__.py +13 -0
- framcore/attributes/level_profile_attributes.py +714 -0
- framcore/components/Component.py +112 -0
- framcore/components/Demand.py +130 -0
- framcore/components/Flow.py +167 -0
- framcore/components/HydroModule.py +330 -0
- framcore/components/Node.py +76 -0
- framcore/components/Thermal.py +204 -0
- framcore/components/Transmission.py +183 -0
- framcore/components/_PowerPlant.py +81 -0
- framcore/components/__init__.py +22 -0
- framcore/components/wind_solar.py +67 -0
- framcore/curves/Curve.py +44 -0
- framcore/curves/LoadedCurve.py +155 -0
- framcore/curves/__init__.py +9 -0
- framcore/events/__init__.py +21 -0
- framcore/events/events.py +51 -0
- framcore/expressions/Expr.py +490 -0
- framcore/expressions/__init__.py +28 -0
- framcore/expressions/_get_constant_from_expr.py +483 -0
- framcore/expressions/_time_vector_operations.py +615 -0
- framcore/expressions/_utils.py +73 -0
- framcore/expressions/queries.py +423 -0
- framcore/expressions/units.py +207 -0
- framcore/fingerprints/__init__.py +11 -0
- framcore/fingerprints/fingerprint.py +293 -0
- framcore/juliamodels/JuliaModel.py +161 -0
- framcore/juliamodels/__init__.py +7 -0
- framcore/loaders/__init__.py +10 -0
- framcore/loaders/loaders.py +407 -0
- framcore/metadata/Div.py +73 -0
- framcore/metadata/ExprMeta.py +50 -0
- framcore/metadata/LevelExprMeta.py +17 -0
- framcore/metadata/Member.py +55 -0
- framcore/metadata/Meta.py +44 -0
- framcore/metadata/__init__.py +15 -0
- framcore/populators/Populator.py +108 -0
- framcore/populators/__init__.py +7 -0
- framcore/querydbs/CacheDB.py +50 -0
- framcore/querydbs/ModelDB.py +34 -0
- framcore/querydbs/QueryDB.py +45 -0
- framcore/querydbs/__init__.py +11 -0
- framcore/solvers/Solver.py +48 -0
- framcore/solvers/SolverConfig.py +272 -0
- framcore/solvers/__init__.py +9 -0
- framcore/timeindexes/AverageYearRange.py +20 -0
- framcore/timeindexes/ConstantTimeIndex.py +17 -0
- framcore/timeindexes/DailyIndex.py +21 -0
- framcore/timeindexes/FixedFrequencyTimeIndex.py +762 -0
- framcore/timeindexes/HourlyIndex.py +21 -0
- framcore/timeindexes/IsoCalendarDay.py +31 -0
- framcore/timeindexes/ListTimeIndex.py +197 -0
- framcore/timeindexes/ModelYear.py +17 -0
- framcore/timeindexes/ModelYears.py +18 -0
- framcore/timeindexes/OneYearProfileTimeIndex.py +21 -0
- framcore/timeindexes/ProfileTimeIndex.py +32 -0
- framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
- framcore/timeindexes/TimeIndex.py +90 -0
- framcore/timeindexes/WeeklyIndex.py +21 -0
- framcore/timeindexes/__init__.py +36 -0
- framcore/timevectors/ConstantTimeVector.py +135 -0
- framcore/timevectors/LinearTransformTimeVector.py +114 -0
- framcore/timevectors/ListTimeVector.py +123 -0
- framcore/timevectors/LoadedTimeVector.py +104 -0
- framcore/timevectors/ReferencePeriod.py +41 -0
- framcore/timevectors/TimeVector.py +94 -0
- framcore/timevectors/__init__.py +17 -0
- framcore/utils/__init__.py +36 -0
- framcore/utils/get_regional_volumes.py +369 -0
- framcore/utils/get_supported_components.py +60 -0
- framcore/utils/global_energy_equivalent.py +46 -0
- framcore/utils/isolate_subnodes.py +163 -0
- framcore/utils/loaders.py +97 -0
- framcore/utils/node_flow_utils.py +236 -0
- framcore/utils/storage_subsystems.py +107 -0
- fram_core-0.0.0.dist-info/METADATA +0 -5
- fram_core-0.0.0.dist-info/RECORD +0 -4
- fram_core-0.0.0.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,495 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from time import time
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
from framcore.aggregators import Aggregator
|
|
8
|
+
from framcore.aggregators._utils import _aggregate_costs
|
|
9
|
+
from framcore.attributes import MaxFlowVolume, Price
|
|
10
|
+
from framcore.components import Component, Demand, Node, Transmission, Flow
|
|
11
|
+
from framcore.curves import Curve
|
|
12
|
+
from framcore.expressions import Expr
|
|
13
|
+
from framcore.metadata import Member, Meta
|
|
14
|
+
from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
|
|
15
|
+
from framcore.timevectors import TimeVector
|
|
16
|
+
from framcore.utils import get_component_to_nodes, get_transports_by_commodity, get_supported_components, get_flow_infos, get_node_to_commodity
|
|
17
|
+
|
|
18
|
+
# TODO: Support internal loss demand
|
|
19
|
+
# TODO: Document method appropriate place (which docstring? module? class? __init__? _aggregate?)
|
|
20
|
+
# TODO: transfer member metadata to internal loss Demand
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from framcore import Model
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class NodeAggregator(Aggregator):
|
|
27
|
+
"""Aggregate groups of nodes for a commodity. Subclass of Aggregator."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
commodity: str,
|
|
32
|
+
meta_key: str,
|
|
33
|
+
data_dim: SinglePeriodTimeIndex,
|
|
34
|
+
scen_dim: FixedFrequencyTimeIndex,
|
|
35
|
+
utilization_rate: float = 0.5,
|
|
36
|
+
) -> None:
|
|
37
|
+
"""
|
|
38
|
+
Aggregate groups of nodes (defined by metadata key) for a commodity.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
commodity (str): Commodity of the Nodes to be aggregated.
|
|
42
|
+
meta_key (str): _description_
|
|
43
|
+
data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation of prices.
|
|
44
|
+
scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation of prices.
|
|
45
|
+
utilization_rate (float, optional): Assumed utilization rate on internal transports. Used to calculate new Demands after aggregation
|
|
46
|
+
if the transport does not have a volume.
|
|
47
|
+
Defaults to 0.5 (i.e. 50 percent utilization in each direction).
|
|
48
|
+
|
|
49
|
+
"""
|
|
50
|
+
super().__init__()
|
|
51
|
+
self._commodity = commodity
|
|
52
|
+
self._meta_key = meta_key
|
|
53
|
+
self._data_dim = data_dim
|
|
54
|
+
self._scen_dim = scen_dim
|
|
55
|
+
self._utilization_rate = utilization_rate
|
|
56
|
+
|
|
57
|
+
# To remember all modifications in _aggregate so we can undo them in _disaggregate
|
|
58
|
+
# Will be cleared in _init_aggregate, so that same memory can be re-used.
|
|
59
|
+
self._grouped_nodes: dict[str, set[str]] = defaultdict(set)
|
|
60
|
+
self._replaced_references: dict[str, set[tuple[str, str]]] = defaultdict(set) # dict with controll of all nodes which have been replaced
|
|
61
|
+
self._internal_transports: set[str] = set()
|
|
62
|
+
self._internal_transport_demands: set[str] = set()
|
|
63
|
+
|
|
64
|
+
# To record error messages in _aggregate and _disaggregate
|
|
65
|
+
# Will be cleared in _init_aggregate and _init_disaggregate,
|
|
66
|
+
# so that same memory can be re-used.
|
|
67
|
+
self._errors: set[str] = set()
|
|
68
|
+
|
|
69
|
+
def _aggregate(self, model: Model) -> None:
|
|
70
|
+
"""Modify model, components and data."""
|
|
71
|
+
t0 = time()
|
|
72
|
+
# Will be modified by upcoming code by adding group_nodes
|
|
73
|
+
# and deleting member_nodes and redundant transports.
|
|
74
|
+
data = model.get_data()
|
|
75
|
+
|
|
76
|
+
# Helper-dict to give simpler access to components in upcoming loops
|
|
77
|
+
# The components are the same instances as in data, and upcoming code
|
|
78
|
+
# will use this to modify components inplace, in self._replace_node.
|
|
79
|
+
components: dict[str, Component] = {key: c for key, c in data.items() if isinstance(c, Component)}
|
|
80
|
+
|
|
81
|
+
# This is just a helper-dict to give fast access
|
|
82
|
+
component_to_nodes: dict[str, set[str]] = get_component_to_nodes(components)
|
|
83
|
+
|
|
84
|
+
self._init_aggregate(components, data)
|
|
85
|
+
self.send_debug_event(f"init time {round(time() - t0, 3)} seconds")
|
|
86
|
+
|
|
87
|
+
# main logic
|
|
88
|
+
t = time()
|
|
89
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
90
|
+
member_node_names: set[str]
|
|
91
|
+
group_node = Node(commodity=self._commodity)
|
|
92
|
+
self._set_group_price(model, group_node, member_node_names, "EUR/MWh")
|
|
93
|
+
self._delete_members(data, member_node_names)
|
|
94
|
+
|
|
95
|
+
assert group_name not in data, f"{group_name}"
|
|
96
|
+
data[group_name] = group_node
|
|
97
|
+
|
|
98
|
+
self._replace_node(group_name, member_node_names, components, component_to_nodes)
|
|
99
|
+
components[group_name] = group_node
|
|
100
|
+
self.send_debug_event(f"main logic time {round(time() - t, 3)} seconds")
|
|
101
|
+
|
|
102
|
+
t = time()
|
|
103
|
+
transports = get_transports_by_commodity(components, self._commodity)
|
|
104
|
+
self._update_internal_transports(transports)
|
|
105
|
+
self._delete_internal_transports(data)
|
|
106
|
+
self._add_internal_transport_demands(model, components, transports)
|
|
107
|
+
self.send_debug_event(f"handle internal transport losses time {round(time() - t, 3)} seconds")
|
|
108
|
+
|
|
109
|
+
self.send_debug_event(f"total time {round(time() - t0, 3)} seconds")
|
|
110
|
+
|
|
111
|
+
def _update_internal_transports(
|
|
112
|
+
self,
|
|
113
|
+
transports: dict[str, tuple[str, str]],
|
|
114
|
+
) -> None:
|
|
115
|
+
for name, (from_node, to_node) in transports.items():
|
|
116
|
+
if from_node == to_node:
|
|
117
|
+
# if not, then invalid transport from before
|
|
118
|
+
assert to_node in self._grouped_nodes
|
|
119
|
+
|
|
120
|
+
# earlier to_node was added here, but it should be the transport name, right?
|
|
121
|
+
self._internal_transports.add(name)
|
|
122
|
+
|
|
123
|
+
def _get_demand_member_meta_keys(self, components: dict[str, Component]) -> set[str]:
|
|
124
|
+
"""We find all direct_out demands via flows from get_supported_components and collect member meta keys from them."""
|
|
125
|
+
out: set[str] = set()
|
|
126
|
+
nodes_and_flows = get_supported_components(components, supported_types=(Node, Flow), forbidden_types=tuple())
|
|
127
|
+
node_to_commodity = get_node_to_commodity(nodes_and_flows)
|
|
128
|
+
for flow in nodes_and_flows.values():
|
|
129
|
+
if not isinstance(flow, Flow):
|
|
130
|
+
continue
|
|
131
|
+
flow_infos = get_flow_infos(flow, node_to_commodity)
|
|
132
|
+
if not len(flow_infos) == 1:
|
|
133
|
+
continue
|
|
134
|
+
flow_info = flow_infos[0]
|
|
135
|
+
if flow_info.category != "direct_out":
|
|
136
|
+
continue
|
|
137
|
+
if flow_info.commodity_out != self._commodity:
|
|
138
|
+
continue
|
|
139
|
+
demand = flow
|
|
140
|
+
for key in demand.get_meta_keys():
|
|
141
|
+
meta = demand.get_meta(key)
|
|
142
|
+
if isinstance(meta, Member):
|
|
143
|
+
out.add(key)
|
|
144
|
+
return out
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _add_internal_transport_demands(
|
|
148
|
+
self,
|
|
149
|
+
model: Model,
|
|
150
|
+
components: dict[str, Component],
|
|
151
|
+
transports: dict[str, tuple[str, str]],
|
|
152
|
+
) -> None:
|
|
153
|
+
"""
|
|
154
|
+
Add demand representing loss on internal transmission lines being removed by aggregation.
|
|
155
|
+
|
|
156
|
+
This is done to avoid underestimation of aggregated demand.
|
|
157
|
+
"""
|
|
158
|
+
data = model.get_data()
|
|
159
|
+
|
|
160
|
+
demand_member_meta_keys = self._get_demand_member_meta_keys(components)
|
|
161
|
+
|
|
162
|
+
# TODO: Document that we rely on Transmission and Demand APIs to get loss
|
|
163
|
+
for key in self._internal_transports:
|
|
164
|
+
transport = components[key]
|
|
165
|
+
from_node, to_node = transports[key]
|
|
166
|
+
assert from_node == to_node, f"{from_node}, {to_node}"
|
|
167
|
+
node = from_node
|
|
168
|
+
|
|
169
|
+
transport: Transmission
|
|
170
|
+
|
|
171
|
+
if transport.get_loss():
|
|
172
|
+
profile = None
|
|
173
|
+
loss = transport.get_loss()
|
|
174
|
+
if loss.get_level() is None:
|
|
175
|
+
continue
|
|
176
|
+
if transport.get_outgoing_volume().get_level():
|
|
177
|
+
level = transport.get_outgoing_volume().get_level() * loss.get_level()
|
|
178
|
+
|
|
179
|
+
# could multiply by loss profile here, but profile * profile is not yet supported so we wait.
|
|
180
|
+
profile = transport.get_outgoing_volume().get_profile()
|
|
181
|
+
|
|
182
|
+
# elif exploitation factor at individual level. How to best access this?
|
|
183
|
+
else:
|
|
184
|
+
level = transport.get_max_capacity().get_level() * self._utilization_rate * loss.get_level()
|
|
185
|
+
profile = loss.get_profile()
|
|
186
|
+
|
|
187
|
+
internal_losses_demand = Demand(
|
|
188
|
+
node=node,
|
|
189
|
+
capacity=MaxFlowVolume(
|
|
190
|
+
level=level,
|
|
191
|
+
profile=profile,
|
|
192
|
+
),
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
for meta_key in demand_member_meta_keys:
|
|
196
|
+
internal_losses_demand.add_meta(meta_key, Member("InternalTransportLossFromNodeAggregator"))
|
|
197
|
+
|
|
198
|
+
demand_key = key + "_InternalTransportLossDemand_" + node
|
|
199
|
+
|
|
200
|
+
self._internal_transport_demands.add(demand_key)
|
|
201
|
+
assert demand_key not in data, f"{demand_key}"
|
|
202
|
+
data[demand_key] = internal_losses_demand
|
|
203
|
+
|
|
204
|
+
def _delete_internal_transports(
|
|
205
|
+
self,
|
|
206
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
207
|
+
) -> None:
|
|
208
|
+
for key in self._internal_transports:
|
|
209
|
+
self._aggregation_map[key] = set()
|
|
210
|
+
del data[key]
|
|
211
|
+
|
|
212
|
+
def _delete_members(
|
|
213
|
+
self,
|
|
214
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
215
|
+
member_node_names: set[str],
|
|
216
|
+
) -> None:
|
|
217
|
+
for member in member_node_names:
|
|
218
|
+
del data[member]
|
|
219
|
+
|
|
220
|
+
def _set_group_price(
|
|
221
|
+
self,
|
|
222
|
+
model: Model,
|
|
223
|
+
group_node: Node,
|
|
224
|
+
member_node_names: set[str],
|
|
225
|
+
weight_unit: str,
|
|
226
|
+
) -> None:
|
|
227
|
+
data = model.get_data()
|
|
228
|
+
weights = [1.0 / len(member_node_names)] * len(member_node_names)
|
|
229
|
+
prices = [data[key].get_price() for key in member_node_names]
|
|
230
|
+
if all(prices):
|
|
231
|
+
level, profile, intercept = _aggregate_costs(
|
|
232
|
+
model=model,
|
|
233
|
+
costs=prices,
|
|
234
|
+
weights=weights,
|
|
235
|
+
weight_unit=weight_unit,
|
|
236
|
+
data_dim=self._data_dim,
|
|
237
|
+
scen_dim=self._scen_dim,
|
|
238
|
+
)
|
|
239
|
+
group_node.get_price().set_level(level)
|
|
240
|
+
group_node.get_price().set_profile(profile)
|
|
241
|
+
group_node.get_price().set_intercept(intercept)
|
|
242
|
+
elif any(prices):
|
|
243
|
+
missing = [key for key in member_node_names if data[key].get_price() is None]
|
|
244
|
+
self.send_warning_event(f"Only some member nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
|
|
245
|
+
|
|
246
|
+
def _replace_node(
|
|
247
|
+
self,
|
|
248
|
+
group_name: str,
|
|
249
|
+
member_node_names: set[str],
|
|
250
|
+
components: dict[str, Component],
|
|
251
|
+
component_to_nodes: dict[str, set[str]],
|
|
252
|
+
) -> None:
|
|
253
|
+
for name, component in components.items():
|
|
254
|
+
replace_keys = component_to_nodes[name]
|
|
255
|
+
for key in member_node_names:
|
|
256
|
+
if key in replace_keys:
|
|
257
|
+
component.replace_node(key, group_name)
|
|
258
|
+
self._replaced_references[name].add((key, group_name))
|
|
259
|
+
|
|
260
|
+
def _init_aggregate( # noqa C901
|
|
261
|
+
self,
|
|
262
|
+
components: dict[str, Component],
|
|
263
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
264
|
+
) -> None:
|
|
265
|
+
self._grouped_nodes.clear()
|
|
266
|
+
self._internal_transports.clear()
|
|
267
|
+
self._internal_transport_demands.clear()
|
|
268
|
+
self._errors.clear()
|
|
269
|
+
|
|
270
|
+
self._aggregation_map = defaultdict(set)
|
|
271
|
+
|
|
272
|
+
exogenous_groups = set()
|
|
273
|
+
|
|
274
|
+
meta_key = self._meta_key
|
|
275
|
+
|
|
276
|
+
for key, component in components.items():
|
|
277
|
+
if not isinstance(component, Node):
|
|
278
|
+
self._aggregation_map[key].add(key)
|
|
279
|
+
continue
|
|
280
|
+
|
|
281
|
+
node: Node = component
|
|
282
|
+
|
|
283
|
+
commodity = node.get_commodity()
|
|
284
|
+
|
|
285
|
+
if self._commodity != commodity:
|
|
286
|
+
self._aggregation_map[key].add(key)
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
meta: Meta | None = node.get_meta(meta_key)
|
|
290
|
+
|
|
291
|
+
if meta is None:
|
|
292
|
+
self._errors.add(f"Node {key} had no metadata behind key {meta_key}.")
|
|
293
|
+
continue
|
|
294
|
+
|
|
295
|
+
meta: Meta
|
|
296
|
+
|
|
297
|
+
if not isinstance(meta, Member):
|
|
298
|
+
got = type(meta).__name__
|
|
299
|
+
message = f"Node {key} has metadata behind key {meta_key} with wrong type. Expected Member, got {got}."
|
|
300
|
+
self._errors.add(message)
|
|
301
|
+
continue
|
|
302
|
+
|
|
303
|
+
meta: Member
|
|
304
|
+
|
|
305
|
+
group_name: str = meta.get_value()
|
|
306
|
+
|
|
307
|
+
if node.is_exogenous():
|
|
308
|
+
# register groups with exogenous Nodes to validate later.
|
|
309
|
+
exogenous_groups.add(group_name)
|
|
310
|
+
|
|
311
|
+
if not self._errors:
|
|
312
|
+
self._aggregation_map[key].add(group_name)
|
|
313
|
+
self._grouped_nodes[group_name].add(key)
|
|
314
|
+
|
|
315
|
+
grouped_nodes = self._grouped_nodes.copy()
|
|
316
|
+
|
|
317
|
+
for group_name in exogenous_groups: # Check exogenous groups.
|
|
318
|
+
node_keys = grouped_nodes[group_name]
|
|
319
|
+
if len(node_keys) != 1: # allow unchanged or renamed exogenous Nodes.
|
|
320
|
+
self._errors.add(
|
|
321
|
+
f"Group {group_name} contains an exogenous Node and must therefore contain only one Node."
|
|
322
|
+
" Exogenous Nodes cannot be grouped together with other Nodes.",
|
|
323
|
+
)
|
|
324
|
+
# For if we want to allow pure exogenous groups.
|
|
325
|
+
# for node_key in node_keys:
|
|
326
|
+
# node: Node = components[node_key]
|
|
327
|
+
# if not node.is_exogenous():
|
|
328
|
+
# self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is not allowed.")
|
|
329
|
+
# break
|
|
330
|
+
|
|
331
|
+
# remove single groups with unchanged names and check for duplicated names
|
|
332
|
+
for group_name, node_keys in grouped_nodes.items():
|
|
333
|
+
if len(node_keys) == 1 and group_name == next(iter(node_keys)):
|
|
334
|
+
del self._grouped_nodes[group_name]
|
|
335
|
+
try: # If group name already exists for a node and the existing node is not aggregated to a new one.
|
|
336
|
+
meta = data[group_name].get_meta(meta_key)
|
|
337
|
+
if meta is None or meta.get_value() is None:
|
|
338
|
+
self._errors.add(
|
|
339
|
+
f"Metadata name for aggregated node ({group_name}) already exists in the model: {data[group_name]}",
|
|
340
|
+
)
|
|
341
|
+
except KeyError:
|
|
342
|
+
pass
|
|
343
|
+
|
|
344
|
+
self._check_uniqueness()
|
|
345
|
+
self._report_errors(self._errors)
|
|
346
|
+
|
|
347
|
+
def _report_errors(self, errors: set[str]) -> None:
|
|
348
|
+
if errors:
|
|
349
|
+
n = len(errors)
|
|
350
|
+
s = "s" if n > 1 else ""
|
|
351
|
+
error_str = "\n".join(errors)
|
|
352
|
+
message = f"Found {n} error{s}:\n{error_str}"
|
|
353
|
+
raise RuntimeError(message)
|
|
354
|
+
|
|
355
|
+
def _check_uniqueness(self) -> None:
|
|
356
|
+
flipped = defaultdict(set)
|
|
357
|
+
for group, members in self._grouped_nodes.items():
|
|
358
|
+
for member in members:
|
|
359
|
+
flipped[member].add(group)
|
|
360
|
+
for k, v in flipped.items():
|
|
361
|
+
if len(v) > 1:
|
|
362
|
+
self._errors.add(f"Node {k} belong to more than one group {v}")
|
|
363
|
+
|
|
364
|
+
def _disaggregate(
|
|
365
|
+
self,
|
|
366
|
+
model: Model,
|
|
367
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
368
|
+
) -> None:
|
|
369
|
+
new_data = model.get_data()
|
|
370
|
+
|
|
371
|
+
deleted_group_names: set[str] = self._init_disaggregate(new_data)
|
|
372
|
+
|
|
373
|
+
self._validate_restore_nodes(new_data, deleted_group_names)
|
|
374
|
+
self._restore_nodes(new_data, original_data, deleted_group_names)
|
|
375
|
+
self._restore_references(new_data)
|
|
376
|
+
|
|
377
|
+
restorable_transports = self._validate_restore_internal_transports(new_data, original_data, deleted_group_names)
|
|
378
|
+
self._restore_internal_transports(new_data, original_data, restorable_transports)
|
|
379
|
+
|
|
380
|
+
self._delete_internal_transport_demands(new_data)
|
|
381
|
+
|
|
382
|
+
def _init_disaggregate(
|
|
383
|
+
self,
|
|
384
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
385
|
+
) -> set[str]:
|
|
386
|
+
self._errors.clear()
|
|
387
|
+
deleted_group_names: set[str] = set()
|
|
388
|
+
|
|
389
|
+
for group_name in self._grouped_nodes:
|
|
390
|
+
if group_name not in new_data:
|
|
391
|
+
deleted_group_names.add(group_name)
|
|
392
|
+
continue
|
|
393
|
+
|
|
394
|
+
group_node = new_data[group_name]
|
|
395
|
+
|
|
396
|
+
if not isinstance(group_node, Node):
|
|
397
|
+
deleted_group_names.add(group_name)
|
|
398
|
+
|
|
399
|
+
return deleted_group_names
|
|
400
|
+
|
|
401
|
+
def _validate_restore_nodes(
|
|
402
|
+
self,
|
|
403
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
404
|
+
deleted_group_names: set[str],
|
|
405
|
+
) -> None:
|
|
406
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
407
|
+
if group_name in deleted_group_names:
|
|
408
|
+
continue
|
|
409
|
+
for key in member_node_names:
|
|
410
|
+
if key in new_data:
|
|
411
|
+
obj = new_data[key]
|
|
412
|
+
if not isinstance(obj, Node) and obj.get_commodity() == self._commodity:
|
|
413
|
+
typ = type(obj).__name__
|
|
414
|
+
message = f"Restoring node {key} from group node {group_name} failed because model already stores object of {typ} with that name."
|
|
415
|
+
self._errors.add(message)
|
|
416
|
+
self._report_errors(self._errors)
|
|
417
|
+
|
|
418
|
+
def _restore_nodes(
|
|
419
|
+
self,
|
|
420
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
421
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
422
|
+
deleted_group_names: set[str],
|
|
423
|
+
) -> None:
|
|
424
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
425
|
+
if group_name in deleted_group_names:
|
|
426
|
+
continue
|
|
427
|
+
|
|
428
|
+
group_node: Node = new_data.pop(group_name)
|
|
429
|
+
|
|
430
|
+
group_price: Price | None = group_node.get_price()
|
|
431
|
+
|
|
432
|
+
for key in member_node_names:
|
|
433
|
+
original_node: Node = original_data[key]
|
|
434
|
+
if group_price is not None:
|
|
435
|
+
original_price = original_node.get_price()
|
|
436
|
+
original_price.copy_from(group_price)
|
|
437
|
+
new_data[key] = original_node
|
|
438
|
+
|
|
439
|
+
def _validate_restore_internal_transports(
|
|
440
|
+
self,
|
|
441
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
442
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
443
|
+
deleted_group_names: set[str],
|
|
444
|
+
) -> set[str]:
|
|
445
|
+
nodes_not_added_back: set[str] = set()
|
|
446
|
+
restorable_transports: set[str] = set()
|
|
447
|
+
|
|
448
|
+
components = {k: v for k, v in original_data.items() if isinstance(v, Component)}
|
|
449
|
+
transports = get_transports_by_commodity(components, self._commodity)
|
|
450
|
+
|
|
451
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
452
|
+
if group_name in deleted_group_names:
|
|
453
|
+
nodes_not_added_back.update(member_node_names)
|
|
454
|
+
continue
|
|
455
|
+
|
|
456
|
+
for key in self._internal_transports:
|
|
457
|
+
from_node, to_node = transports[key]
|
|
458
|
+
|
|
459
|
+
if (from_node in nodes_not_added_back) and (to_node in nodes_not_added_back):
|
|
460
|
+
continue
|
|
461
|
+
|
|
462
|
+
restorable_transports.add(key)
|
|
463
|
+
if key in new_data:
|
|
464
|
+
obj = new_data[key]
|
|
465
|
+
typ = type(obj).__name__
|
|
466
|
+
message = f"Restoring deleted transport {key} from group node {group_name} failed becausemodel already stores object of {typ} with that name."
|
|
467
|
+
self._errors.add(message)
|
|
468
|
+
|
|
469
|
+
self._report_errors(self._errors)
|
|
470
|
+
|
|
471
|
+
return restorable_transports
|
|
472
|
+
|
|
473
|
+
def _restore_internal_transports(
|
|
474
|
+
self,
|
|
475
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
476
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
477
|
+
restorable_transports: set[str],
|
|
478
|
+
) -> None:
|
|
479
|
+
for key in self._internal_transports:
|
|
480
|
+
if key not in restorable_transports:
|
|
481
|
+
continue
|
|
482
|
+
transport = original_data[key]
|
|
483
|
+
new_data[key] = transport
|
|
484
|
+
|
|
485
|
+
def _delete_internal_transport_demands(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
|
|
486
|
+
for key in self._internal_transport_demands:
|
|
487
|
+
new_data.pop(key, None)
|
|
488
|
+
|
|
489
|
+
def _restore_references(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
|
|
490
|
+
for component_name, replacements in self._replaced_references.items():
|
|
491
|
+
# internal transports are handled by themselves.
|
|
492
|
+
if component_name in new_data and component_name not in self._internal_transports and isinstance(new_data[component_name], Component):
|
|
493
|
+
for replacement in replacements:
|
|
494
|
+
disaggregated, group_name = replacement
|
|
495
|
+
new_data[component_name].replace_node(old=group_name, new=disaggregated) # set the disaggregated node back in the component.
|