fram-core 0.0.0__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fram_core-0.1.0.dist-info/METADATA +42 -0
- fram_core-0.1.0.dist-info/RECORD +100 -0
- {fram_core-0.0.0.dist-info → fram_core-0.1.0.dist-info}/WHEEL +1 -2
- fram_core-0.1.0.dist-info/licenses/LICENSE.md +8 -0
- framcore/Base.py +161 -0
- framcore/Model.py +90 -0
- framcore/__init__.py +10 -0
- framcore/aggregators/Aggregator.py +172 -0
- framcore/aggregators/HydroAggregator.py +849 -0
- framcore/aggregators/NodeAggregator.py +530 -0
- framcore/aggregators/WindSolarAggregator.py +315 -0
- framcore/aggregators/__init__.py +13 -0
- framcore/aggregators/_utils.py +184 -0
- framcore/attributes/Arrow.py +307 -0
- framcore/attributes/ElasticDemand.py +90 -0
- framcore/attributes/ReservoirCurve.py +23 -0
- framcore/attributes/SoftBound.py +16 -0
- framcore/attributes/StartUpCost.py +65 -0
- framcore/attributes/Storage.py +158 -0
- framcore/attributes/TargetBound.py +16 -0
- framcore/attributes/__init__.py +63 -0
- framcore/attributes/hydro/HydroBypass.py +49 -0
- framcore/attributes/hydro/HydroGenerator.py +100 -0
- framcore/attributes/hydro/HydroPump.py +178 -0
- framcore/attributes/hydro/HydroReservoir.py +27 -0
- framcore/attributes/hydro/__init__.py +13 -0
- framcore/attributes/level_profile_attributes.py +911 -0
- framcore/components/Component.py +136 -0
- framcore/components/Demand.py +144 -0
- framcore/components/Flow.py +189 -0
- framcore/components/HydroModule.py +371 -0
- framcore/components/Node.py +99 -0
- framcore/components/Thermal.py +208 -0
- framcore/components/Transmission.py +198 -0
- framcore/components/_PowerPlant.py +81 -0
- framcore/components/__init__.py +22 -0
- framcore/components/wind_solar.py +82 -0
- framcore/curves/Curve.py +44 -0
- framcore/curves/LoadedCurve.py +146 -0
- framcore/curves/__init__.py +9 -0
- framcore/events/__init__.py +21 -0
- framcore/events/events.py +51 -0
- framcore/expressions/Expr.py +591 -0
- framcore/expressions/__init__.py +30 -0
- framcore/expressions/_get_constant_from_expr.py +477 -0
- framcore/expressions/_utils.py +73 -0
- framcore/expressions/queries.py +416 -0
- framcore/expressions/units.py +227 -0
- framcore/fingerprints/__init__.py +11 -0
- framcore/fingerprints/fingerprint.py +292 -0
- framcore/juliamodels/JuliaModel.py +171 -0
- framcore/juliamodels/__init__.py +7 -0
- framcore/loaders/__init__.py +10 -0
- framcore/loaders/loaders.py +405 -0
- framcore/metadata/Div.py +73 -0
- framcore/metadata/ExprMeta.py +56 -0
- framcore/metadata/LevelExprMeta.py +32 -0
- framcore/metadata/Member.py +55 -0
- framcore/metadata/Meta.py +44 -0
- framcore/metadata/__init__.py +15 -0
- framcore/populators/Populator.py +108 -0
- framcore/populators/__init__.py +7 -0
- framcore/querydbs/CacheDB.py +50 -0
- framcore/querydbs/ModelDB.py +34 -0
- framcore/querydbs/QueryDB.py +45 -0
- framcore/querydbs/__init__.py +11 -0
- framcore/solvers/Solver.py +63 -0
- framcore/solvers/SolverConfig.py +272 -0
- framcore/solvers/__init__.py +9 -0
- framcore/timeindexes/AverageYearRange.py +27 -0
- framcore/timeindexes/ConstantTimeIndex.py +22 -0
- framcore/timeindexes/DailyIndex.py +33 -0
- framcore/timeindexes/FixedFrequencyTimeIndex.py +814 -0
- framcore/timeindexes/HourlyIndex.py +33 -0
- framcore/timeindexes/IsoCalendarDay.py +33 -0
- framcore/timeindexes/ListTimeIndex.py +277 -0
- framcore/timeindexes/ModelYear.py +23 -0
- framcore/timeindexes/ModelYears.py +27 -0
- framcore/timeindexes/OneYearProfileTimeIndex.py +29 -0
- framcore/timeindexes/ProfileTimeIndex.py +43 -0
- framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
- framcore/timeindexes/TimeIndex.py +103 -0
- framcore/timeindexes/WeeklyIndex.py +33 -0
- framcore/timeindexes/__init__.py +36 -0
- framcore/timeindexes/_time_vector_operations.py +689 -0
- framcore/timevectors/ConstantTimeVector.py +131 -0
- framcore/timevectors/LinearTransformTimeVector.py +131 -0
- framcore/timevectors/ListTimeVector.py +127 -0
- framcore/timevectors/LoadedTimeVector.py +97 -0
- framcore/timevectors/ReferencePeriod.py +51 -0
- framcore/timevectors/TimeVector.py +108 -0
- framcore/timevectors/__init__.py +17 -0
- framcore/utils/__init__.py +35 -0
- framcore/utils/get_regional_volumes.py +387 -0
- framcore/utils/get_supported_components.py +60 -0
- framcore/utils/global_energy_equivalent.py +63 -0
- framcore/utils/isolate_subnodes.py +172 -0
- framcore/utils/loaders.py +97 -0
- framcore/utils/node_flow_utils.py +236 -0
- framcore/utils/storage_subsystems.py +106 -0
- fram_core-0.0.0.dist-info/METADATA +0 -5
- fram_core-0.0.0.dist-info/RECORD +0 -4
- fram_core-0.0.0.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,530 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from time import time
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
from framcore.aggregators import Aggregator
|
|
8
|
+
from framcore.aggregators._utils import _aggregate_costs
|
|
9
|
+
from framcore.attributes import MaxFlowVolume, Price
|
|
10
|
+
from framcore.components import Component, Demand, Flow, Node, Transmission
|
|
11
|
+
from framcore.curves import Curve
|
|
12
|
+
from framcore.expressions import Expr
|
|
13
|
+
from framcore.metadata import Member, Meta
|
|
14
|
+
from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
|
|
15
|
+
from framcore.timevectors import TimeVector
|
|
16
|
+
from framcore.utils import get_component_to_nodes, get_flow_infos, get_node_to_commodity, get_supported_components, get_transports_by_commodity
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from framcore import Model
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class NodeAggregator(Aggregator):
|
|
23
|
+
"""
|
|
24
|
+
Aggregate groups of Nodes for a commodity. Subclass of Aggregator.
|
|
25
|
+
|
|
26
|
+
Aggregation steps (self._aggregate):
|
|
27
|
+
|
|
28
|
+
1. Map all Components to their Nodes of the correct commodity if they are referencing any. This is important to redirect all references to the
|
|
29
|
+
new Nodes after aggregation.
|
|
30
|
+
2. Create mapping of what members the new Nodes will be aggregated from. This step also does alot of error handling and checks the validity of the
|
|
31
|
+
metadata and groupings. Raises error if:
|
|
32
|
+
- Nodes do not have any metadata for the meta key.
|
|
33
|
+
- Nodes have the wrong metadata object type for the meta key (must be Member).
|
|
34
|
+
- Exogenous Nodes are grouped together for aggregation with endogenous Nodes.
|
|
35
|
+
3. Initialize new Node objects and set prices and exogenous status. Prices are calculated as a weighted average of all the member Node prices.
|
|
36
|
+
4. Old Nodes are deleted from the Model data, after which the aggregated Node is added, and references in the rest of the system are updated to point to
|
|
37
|
+
the new Node.
|
|
38
|
+
5. Handling of transports: All Components which transport the same commodity as the aggregated Nodes are analysed. If the two Nodes they connect is now
|
|
39
|
+
the same aggregated Node, the transpart is 'internal' meaning it is now operating within a Node. If the transport Component is lossy, it is replaced
|
|
40
|
+
by a Demand Component representing the commodity consumption caused by the loss. All internal transports are afterwards deleted.
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
Disaggregation steps (self._aggregate):
|
|
44
|
+
|
|
45
|
+
1. Collect set of Nodes group keys for which have been either removed from the Model data or changed to reference something other than Nodes.
|
|
46
|
+
2. Validate that IDs of Nodes to be restored have not been used to reference something else in the meantime.
|
|
47
|
+
3. Delete the aggregated Nodes and restore the old Nodes to the Model. Also copy shadow price results from the aggregated Nodes to the disaggregated.
|
|
48
|
+
NB! This will overwrite the possible previous shadow prices of the original disaggregated Nodes.
|
|
49
|
+
4. Restore the references in all objects to the disaggregated Nodes. A mapping created during aggregation is used for this.
|
|
50
|
+
5. Validate that no restorable internal transports has a name conflict with existing objects in the Model.
|
|
51
|
+
NB! an internal transport is not restorable if one or both of its referenced Nodes have been removed from the Model or is now referencing another
|
|
52
|
+
object. See step 1.
|
|
53
|
+
6. Restore all the restorable internal transports from the original data.
|
|
54
|
+
7. Delete the aggregation-created Demand objects representing internal transports.
|
|
55
|
+
|
|
56
|
+
See Aggregator for general design notes and rules to follow when using Aggregators.
|
|
57
|
+
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
commodity: str,
|
|
63
|
+
meta_key: str,
|
|
64
|
+
data_dim: SinglePeriodTimeIndex,
|
|
65
|
+
scen_dim: FixedFrequencyTimeIndex,
|
|
66
|
+
utilization_rate: float = 0.5,
|
|
67
|
+
) -> None:
|
|
68
|
+
"""
|
|
69
|
+
Aggregate groups of nodes (defined by metadata key) for a commodity.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
commodity (str): Commodity of the Nodes to be aggregated.
|
|
73
|
+
meta_key (str): _description_
|
|
74
|
+
data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation of prices.
|
|
75
|
+
scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation of prices.
|
|
76
|
+
utilization_rate (float, optional): Assumed utilization rate on internal transports. Used to calculate new Demands after aggregation
|
|
77
|
+
if the transport does not have a volume.
|
|
78
|
+
Defaults to 0.5 (i.e. 50 percent utilization in each direction).
|
|
79
|
+
|
|
80
|
+
"""
|
|
81
|
+
super().__init__()
|
|
82
|
+
self._commodity = commodity
|
|
83
|
+
self._meta_key = meta_key
|
|
84
|
+
self._data_dim = data_dim
|
|
85
|
+
self._scen_dim = scen_dim
|
|
86
|
+
self._utilization_rate = utilization_rate
|
|
87
|
+
|
|
88
|
+
# To remember all modifications in _aggregate so we can undo them in _disaggregate
|
|
89
|
+
# Will be cleared in _init_aggregate, so that same memory can be re-used.
|
|
90
|
+
self._grouped_nodes: dict[str, set[str]] = defaultdict(set)
|
|
91
|
+
self._replaced_references: dict[str, set[tuple[str, str]]] = defaultdict(set) # dict with controll of all nodes which have been replaced
|
|
92
|
+
self._internal_transports: set[str] = set()
|
|
93
|
+
self._internal_transport_demands: set[str] = set()
|
|
94
|
+
|
|
95
|
+
# To record error messages in _aggregate and _disaggregate
|
|
96
|
+
# Will be cleared in _init_aggregate and _init_disaggregate,
|
|
97
|
+
# so that same memory can be re-used.
|
|
98
|
+
self._errors: set[str] = set()
|
|
99
|
+
|
|
100
|
+
def _aggregate(self, model: Model) -> None:
|
|
101
|
+
"""Modify model, components and data."""
|
|
102
|
+
t0 = time()
|
|
103
|
+
# Will be modified by upcoming code by adding group_nodes
|
|
104
|
+
# and deleting member_nodes and redundant transports.
|
|
105
|
+
data = model.get_data()
|
|
106
|
+
|
|
107
|
+
# Helper-dict to give simpler access to components in upcoming loops
|
|
108
|
+
# The components are the same instances as in data, and upcoming code
|
|
109
|
+
# will use this to modify components inplace, in self._replace_node.
|
|
110
|
+
components: dict[str, Component] = {key: c for key, c in data.items() if isinstance(c, Component)}
|
|
111
|
+
|
|
112
|
+
# This is just a helper-dict to give fast access
|
|
113
|
+
component_to_nodes: dict[str, set[str]] = get_component_to_nodes(components)
|
|
114
|
+
|
|
115
|
+
self._init_aggregate(components, data)
|
|
116
|
+
self.send_debug_event(f"init time {round(time() - t0, 3)} seconds")
|
|
117
|
+
|
|
118
|
+
# main logic
|
|
119
|
+
t = time()
|
|
120
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
121
|
+
member_node_names: set[str]
|
|
122
|
+
group_node = Node(commodity=self._commodity)
|
|
123
|
+
self._set_group_price(model, group_node, member_node_names, "EUR/MWh")
|
|
124
|
+
self._delete_members(data, member_node_names)
|
|
125
|
+
|
|
126
|
+
assert group_name not in data, f"{group_name}"
|
|
127
|
+
data[group_name] = group_node
|
|
128
|
+
|
|
129
|
+
self._replace_node(group_name, member_node_names, components, component_to_nodes)
|
|
130
|
+
components[group_name] = group_node
|
|
131
|
+
self.send_debug_event(f"main logic time {round(time() - t, 3)} seconds")
|
|
132
|
+
|
|
133
|
+
t = time()
|
|
134
|
+
transports = get_transports_by_commodity(components, self._commodity)
|
|
135
|
+
self._update_internal_transports(transports)
|
|
136
|
+
self._delete_internal_transports(data)
|
|
137
|
+
self._add_internal_transport_demands(model, components, transports)
|
|
138
|
+
self.send_debug_event(f"handle internal transport losses time {round(time() - t, 3)} seconds")
|
|
139
|
+
|
|
140
|
+
self.send_debug_event(f"total time {round(time() - t0, 3)} seconds")
|
|
141
|
+
|
|
142
|
+
def _update_internal_transports(
|
|
143
|
+
self,
|
|
144
|
+
transports: dict[str, tuple[str, str]],
|
|
145
|
+
) -> None:
|
|
146
|
+
for name, (from_node, to_node) in transports.items():
|
|
147
|
+
if from_node == to_node:
|
|
148
|
+
# if not, then invalid transport from before
|
|
149
|
+
assert to_node in self._grouped_nodes
|
|
150
|
+
|
|
151
|
+
# earlier to_node was added here, but it should be the transport name, right?
|
|
152
|
+
self._internal_transports.add(name)
|
|
153
|
+
|
|
154
|
+
def _get_demand_member_meta_keys(self, components: dict[str, Component]) -> set[str]:
|
|
155
|
+
"""We find all direct_out demands via flows from get_supported_components and collect member meta keys from them."""
|
|
156
|
+
out: set[str] = set()
|
|
157
|
+
nodes_and_flows = get_supported_components(components, supported_types=(Node, Flow), forbidden_types=tuple())
|
|
158
|
+
node_to_commodity = get_node_to_commodity(nodes_and_flows)
|
|
159
|
+
for flow in nodes_and_flows.values():
|
|
160
|
+
if not isinstance(flow, Flow):
|
|
161
|
+
continue
|
|
162
|
+
flow_infos = get_flow_infos(flow, node_to_commodity)
|
|
163
|
+
if len(flow_infos) != 1:
|
|
164
|
+
continue
|
|
165
|
+
flow_info = flow_infos[0]
|
|
166
|
+
if flow_info.category != "direct_out":
|
|
167
|
+
continue
|
|
168
|
+
if flow_info.commodity_out != self._commodity:
|
|
169
|
+
continue
|
|
170
|
+
demand = flow
|
|
171
|
+
for key in demand.get_meta_keys():
|
|
172
|
+
meta = demand.get_meta(key)
|
|
173
|
+
if isinstance(meta, Member):
|
|
174
|
+
out.add(key)
|
|
175
|
+
return out
|
|
176
|
+
|
|
177
|
+
def _add_internal_transport_demands(
|
|
178
|
+
self,
|
|
179
|
+
model: Model,
|
|
180
|
+
components: dict[str, Component],
|
|
181
|
+
transports: dict[str, tuple[str, str]],
|
|
182
|
+
) -> None:
|
|
183
|
+
"""
|
|
184
|
+
Add demand representing loss on internal transmission lines being removed by aggregation.
|
|
185
|
+
|
|
186
|
+
This is done to avoid underestimation of aggregated demand.
|
|
187
|
+
"""
|
|
188
|
+
data = model.get_data()
|
|
189
|
+
|
|
190
|
+
demand_member_meta_keys = self._get_demand_member_meta_keys(components)
|
|
191
|
+
|
|
192
|
+
# TODO: Document that we rely on Transmission and Demand APIs to get loss
|
|
193
|
+
for key in self._internal_transports:
|
|
194
|
+
transport = components[key]
|
|
195
|
+
from_node, to_node = transports[key]
|
|
196
|
+
assert from_node == to_node, (
|
|
197
|
+
f"Transport {key} added to internal transport when it should not. Source node {from_node}, and destination node {to_node} are not the same."
|
|
198
|
+
)
|
|
199
|
+
node = from_node
|
|
200
|
+
|
|
201
|
+
transport: Transmission
|
|
202
|
+
|
|
203
|
+
if transport.get_loss():
|
|
204
|
+
profile = None
|
|
205
|
+
loss = transport.get_loss()
|
|
206
|
+
if loss.get_level() is None:
|
|
207
|
+
continue
|
|
208
|
+
if transport.get_outgoing_volume().get_level():
|
|
209
|
+
level = transport.get_outgoing_volume().get_level() * loss.get_level()
|
|
210
|
+
|
|
211
|
+
# could multiply by loss profile here, but profile * profile is not yet supported so we wait.
|
|
212
|
+
profile = transport.get_outgoing_volume().get_profile()
|
|
213
|
+
|
|
214
|
+
# elif exploitation factor at individual level. How to best access this?
|
|
215
|
+
else:
|
|
216
|
+
level = transport.get_max_capacity().get_level() * self._utilization_rate * loss.get_level()
|
|
217
|
+
profile = loss.get_profile()
|
|
218
|
+
|
|
219
|
+
internal_losses_demand = Demand(
|
|
220
|
+
node=node,
|
|
221
|
+
capacity=MaxFlowVolume(
|
|
222
|
+
level=level,
|
|
223
|
+
profile=profile,
|
|
224
|
+
),
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
for meta_key in demand_member_meta_keys: # transfer member metadata to internal loss Demand
|
|
228
|
+
internal_losses_demand.add_meta(meta_key, Member("InternalTransportLossFromNodeAggregator"))
|
|
229
|
+
|
|
230
|
+
demand_key = key + "_InternalTransportLossDemand_" + node
|
|
231
|
+
|
|
232
|
+
self._internal_transport_demands.add(demand_key)
|
|
233
|
+
if demand_key in data:
|
|
234
|
+
msg = f"Could not use key {demand_key} for internal transport demand because it already exists in the Model."
|
|
235
|
+
raise KeyError(msg)
|
|
236
|
+
data[demand_key] = internal_losses_demand
|
|
237
|
+
|
|
238
|
+
def _delete_internal_transports(
|
|
239
|
+
self,
|
|
240
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
241
|
+
) -> None:
|
|
242
|
+
for key in self._internal_transports:
|
|
243
|
+
self._aggregation_map[key] = set()
|
|
244
|
+
del data[key]
|
|
245
|
+
|
|
246
|
+
def _delete_members(
|
|
247
|
+
self,
|
|
248
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
249
|
+
member_node_names: set[str],
|
|
250
|
+
) -> None:
|
|
251
|
+
for member in member_node_names:
|
|
252
|
+
del data[member]
|
|
253
|
+
|
|
254
|
+
def _set_group_price(
|
|
255
|
+
self,
|
|
256
|
+
model: Model,
|
|
257
|
+
group_node: Node,
|
|
258
|
+
member_node_names: set[str],
|
|
259
|
+
weight_unit: str,
|
|
260
|
+
) -> None:
|
|
261
|
+
data = model.get_data()
|
|
262
|
+
weights = [1.0 / len(member_node_names)] * len(member_node_names)
|
|
263
|
+
prices = [data[key].get_price() for key in member_node_names]
|
|
264
|
+
|
|
265
|
+
exogenous = [data[key].is_exogenous() for key in member_node_names]
|
|
266
|
+
if all(exogenous):
|
|
267
|
+
group_node.set_exogenous()
|
|
268
|
+
elif any(exogenous):
|
|
269
|
+
message = f"Only some member Nodes of group {group_node} are exogenous. This is ambiguous. Either all or none must be exogenous."
|
|
270
|
+
raise ValueError(message)
|
|
271
|
+
if all(prices):
|
|
272
|
+
level, profile, intercept = _aggregate_costs(
|
|
273
|
+
model=model,
|
|
274
|
+
costs=prices,
|
|
275
|
+
weights=weights,
|
|
276
|
+
weight_unit=weight_unit,
|
|
277
|
+
data_dim=self._data_dim,
|
|
278
|
+
scen_dim=self._scen_dim,
|
|
279
|
+
)
|
|
280
|
+
group_node.get_price().set_level(level)
|
|
281
|
+
group_node.get_price().set_profile(profile)
|
|
282
|
+
group_node.get_price().set_intercept(intercept)
|
|
283
|
+
elif any(prices):
|
|
284
|
+
missing = [key for key in member_node_names if data[key].get_price() is None]
|
|
285
|
+
self.send_warning_event(f"Only some member Nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
|
|
286
|
+
|
|
287
|
+
def _replace_node(
|
|
288
|
+
self,
|
|
289
|
+
group_name: str,
|
|
290
|
+
member_node_names: set[str],
|
|
291
|
+
components: dict[str, Component],
|
|
292
|
+
component_to_nodes: dict[str, set[str]],
|
|
293
|
+
) -> None:
|
|
294
|
+
for name, component in components.items():
|
|
295
|
+
replace_keys = component_to_nodes[name]
|
|
296
|
+
for key in member_node_names:
|
|
297
|
+
if key in replace_keys:
|
|
298
|
+
component.replace_node(key, group_name)
|
|
299
|
+
self._replaced_references[name].add((key, group_name))
|
|
300
|
+
|
|
301
|
+
def _init_aggregate( # noqa C901
|
|
302
|
+
self,
|
|
303
|
+
components: dict[str, Component],
|
|
304
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
305
|
+
) -> None:
|
|
306
|
+
self._grouped_nodes.clear()
|
|
307
|
+
self._internal_transports.clear()
|
|
308
|
+
self._internal_transport_demands.clear()
|
|
309
|
+
self._errors.clear()
|
|
310
|
+
|
|
311
|
+
self._aggregation_map = defaultdict(set)
|
|
312
|
+
|
|
313
|
+
exogenous_groups = set()
|
|
314
|
+
|
|
315
|
+
meta_key = self._meta_key
|
|
316
|
+
|
|
317
|
+
for key, component in components.items():
|
|
318
|
+
if not isinstance(component, Node):
|
|
319
|
+
self._aggregation_map[key].add(key)
|
|
320
|
+
continue
|
|
321
|
+
|
|
322
|
+
node: Node = component
|
|
323
|
+
|
|
324
|
+
commodity = node.get_commodity()
|
|
325
|
+
|
|
326
|
+
if self._commodity != commodity:
|
|
327
|
+
self._aggregation_map[key].add(key)
|
|
328
|
+
continue
|
|
329
|
+
|
|
330
|
+
meta: Meta | None = node.get_meta(meta_key)
|
|
331
|
+
|
|
332
|
+
if meta is None:
|
|
333
|
+
self._errors.add(f"Node {key} had no metadata behind key {meta_key}.")
|
|
334
|
+
continue
|
|
335
|
+
|
|
336
|
+
meta: Meta
|
|
337
|
+
|
|
338
|
+
if not isinstance(meta, Member):
|
|
339
|
+
got = type(meta).__name__
|
|
340
|
+
message = f"Node {key} has metadata behind key {meta_key} with wrong type. Expected Member, got {got}."
|
|
341
|
+
self._errors.add(message)
|
|
342
|
+
continue
|
|
343
|
+
|
|
344
|
+
meta: Member
|
|
345
|
+
|
|
346
|
+
group_name: str = meta.get_value()
|
|
347
|
+
|
|
348
|
+
if node.is_exogenous():
|
|
349
|
+
# register groups with exogenous Nodes to validate later.
|
|
350
|
+
exogenous_groups.add(group_name)
|
|
351
|
+
|
|
352
|
+
if not self._errors:
|
|
353
|
+
self._aggregation_map[key].add(group_name)
|
|
354
|
+
self._grouped_nodes[group_name].add(key)
|
|
355
|
+
|
|
356
|
+
grouped_nodes = self._grouped_nodes.copy()
|
|
357
|
+
|
|
358
|
+
for group_name in exogenous_groups: # Check exogenous groups.
|
|
359
|
+
node_keys = grouped_nodes[group_name]
|
|
360
|
+
if len(node_keys) > 1: # allow unchanged or renamed exogenous Nodes.
|
|
361
|
+
# We allow pure exogenous groups.
|
|
362
|
+
exogenous = [components[node_key].is_exogenous() for node_key in node_keys]
|
|
363
|
+
if (not all(exogenous)) and any(exogenous):
|
|
364
|
+
self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is ambiguous and therefore not allowed.")
|
|
365
|
+
|
|
366
|
+
# remove single groups with unchanged names and check for duplicated names
|
|
367
|
+
for group_name, node_keys in grouped_nodes.items():
|
|
368
|
+
if len(node_keys) == 1 and group_name == next(iter(node_keys)):
|
|
369
|
+
del self._grouped_nodes[group_name]
|
|
370
|
+
try: # If group name already exists for a node and the existing node is not aggregated to a new one.
|
|
371
|
+
meta = data[group_name].get_meta(meta_key)
|
|
372
|
+
if meta is None or meta.get_value() is None:
|
|
373
|
+
self._errors.add(
|
|
374
|
+
f"Metadata name for aggregated node ({group_name}) already exists in the model: {data[group_name]}",
|
|
375
|
+
)
|
|
376
|
+
except KeyError:
|
|
377
|
+
pass
|
|
378
|
+
|
|
379
|
+
self._check_uniqueness()
|
|
380
|
+
self._report_errors(self._errors)
|
|
381
|
+
|
|
382
|
+
def _report_errors(self, errors: set[str]) -> None:
|
|
383
|
+
if errors:
|
|
384
|
+
n = len(errors)
|
|
385
|
+
s = "s" if n > 1 else ""
|
|
386
|
+
error_str = "\n".join(errors)
|
|
387
|
+
message = f"Found {n} error{s}:\n{error_str}"
|
|
388
|
+
raise RuntimeError(message)
|
|
389
|
+
|
|
390
|
+
def _check_uniqueness(self) -> None:
|
|
391
|
+
flipped = defaultdict(set)
|
|
392
|
+
for group, members in self._grouped_nodes.items():
|
|
393
|
+
for member in members:
|
|
394
|
+
flipped[member].add(group)
|
|
395
|
+
for k, v in flipped.items():
|
|
396
|
+
if len(v) > 1:
|
|
397
|
+
self._errors.add(f"Node {k} belongs to more than one group {v}")
|
|
398
|
+
|
|
399
|
+
def _disaggregate(
|
|
400
|
+
self,
|
|
401
|
+
model: Model,
|
|
402
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
403
|
+
) -> None:
|
|
404
|
+
new_data = model.get_data()
|
|
405
|
+
|
|
406
|
+
deleted_group_names: set[str] = self._init_disaggregate(new_data)
|
|
407
|
+
|
|
408
|
+
self._validate_restore_nodes(new_data, deleted_group_names)
|
|
409
|
+
self._restore_nodes(new_data, original_data, deleted_group_names)
|
|
410
|
+
self._restore_references(new_data)
|
|
411
|
+
|
|
412
|
+
restorable_transports = self._validate_restore_internal_transports(new_data, original_data, deleted_group_names)
|
|
413
|
+
self._restore_internal_transports(new_data, original_data, restorable_transports)
|
|
414
|
+
|
|
415
|
+
self._delete_internal_transport_demands(new_data)
|
|
416
|
+
|
|
417
|
+
def _init_disaggregate(
|
|
418
|
+
self,
|
|
419
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
420
|
+
) -> set[str]:
|
|
421
|
+
self._errors.clear()
|
|
422
|
+
deleted_group_names: set[str] = set()
|
|
423
|
+
|
|
424
|
+
for group_name in self._grouped_nodes:
|
|
425
|
+
if group_name not in new_data:
|
|
426
|
+
deleted_group_names.add(group_name)
|
|
427
|
+
continue
|
|
428
|
+
|
|
429
|
+
group_node = new_data[group_name]
|
|
430
|
+
|
|
431
|
+
if not (isinstance(group_node, Node) and group_node.get_commodity() == self._commodity):
|
|
432
|
+
deleted_group_names.add(group_name)
|
|
433
|
+
|
|
434
|
+
return deleted_group_names
|
|
435
|
+
|
|
436
|
+
def _validate_restore_nodes(
|
|
437
|
+
self,
|
|
438
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
439
|
+
deleted_group_names: set[str],
|
|
440
|
+
) -> None:
|
|
441
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
442
|
+
if group_name in deleted_group_names:
|
|
443
|
+
continue
|
|
444
|
+
for key in member_node_names:
|
|
445
|
+
if key in new_data:
|
|
446
|
+
obj = new_data[key]
|
|
447
|
+
if not (isinstance(obj, Node) and obj.get_commodity() == self._commodity):
|
|
448
|
+
typ = type(obj).__name__
|
|
449
|
+
message = f"Restoring node {key} from group node {group_name} failed because model already stores object of {typ} with that name."
|
|
450
|
+
self._errors.add(message)
|
|
451
|
+
self._report_errors(self._errors)
|
|
452
|
+
|
|
453
|
+
def _restore_nodes(
|
|
454
|
+
self,
|
|
455
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
456
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
457
|
+
deleted_group_names: set[str],
|
|
458
|
+
) -> None:
|
|
459
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
460
|
+
if group_name in deleted_group_names:
|
|
461
|
+
continue
|
|
462
|
+
|
|
463
|
+
group_node: Node = new_data.pop(group_name)
|
|
464
|
+
|
|
465
|
+
group_price: Price | None = group_node.get_price()
|
|
466
|
+
|
|
467
|
+
for key in member_node_names:
|
|
468
|
+
original_node: Node = original_data[key]
|
|
469
|
+
if group_price is not None:
|
|
470
|
+
original_price = original_node.get_price()
|
|
471
|
+
original_price.copy_from(group_price)
|
|
472
|
+
new_data[key] = original_node
|
|
473
|
+
|
|
474
|
+
def _validate_restore_internal_transports(
|
|
475
|
+
self,
|
|
476
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
477
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
478
|
+
deleted_group_names: set[str],
|
|
479
|
+
) -> set[str]:
|
|
480
|
+
nodes_not_added_back: set[str] = set()
|
|
481
|
+
restorable_transports: set[str] = set()
|
|
482
|
+
|
|
483
|
+
components = {k: v for k, v in original_data.items() if isinstance(v, Component)}
|
|
484
|
+
transports = get_transports_by_commodity(components, self._commodity)
|
|
485
|
+
|
|
486
|
+
for group_name, member_node_names in self._grouped_nodes.items():
|
|
487
|
+
if group_name in deleted_group_names:
|
|
488
|
+
nodes_not_added_back.update(member_node_names)
|
|
489
|
+
continue
|
|
490
|
+
|
|
491
|
+
for key in self._internal_transports:
|
|
492
|
+
from_node, to_node = transports[key]
|
|
493
|
+
|
|
494
|
+
if (from_node in nodes_not_added_back) and (to_node in nodes_not_added_back):
|
|
495
|
+
continue
|
|
496
|
+
|
|
497
|
+
restorable_transports.add(key)
|
|
498
|
+
if key in new_data:
|
|
499
|
+
obj = new_data[key]
|
|
500
|
+
typ = type(obj).__name__
|
|
501
|
+
message = f"Restoring deleted transport {key} from group node {group_name} failed because model already stores object of {typ} with that name."
|
|
502
|
+
self._errors.add(message)
|
|
503
|
+
|
|
504
|
+
self._report_errors(self._errors)
|
|
505
|
+
|
|
506
|
+
return restorable_transports
|
|
507
|
+
|
|
508
|
+
def _restore_internal_transports(
|
|
509
|
+
self,
|
|
510
|
+
new_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
511
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
512
|
+
restorable_transports: set[str],
|
|
513
|
+
) -> None:
|
|
514
|
+
for key in self._internal_transports:
|
|
515
|
+
if key not in restorable_transports:
|
|
516
|
+
continue
|
|
517
|
+
transport = original_data[key]
|
|
518
|
+
new_data[key] = transport
|
|
519
|
+
|
|
520
|
+
def _delete_internal_transport_demands(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
|
|
521
|
+
for key in self._internal_transport_demands:
|
|
522
|
+
new_data.pop(key, None)
|
|
523
|
+
|
|
524
|
+
def _restore_references(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> None:
|
|
525
|
+
for component_name, replacements in self._replaced_references.items():
|
|
526
|
+
# internal transports are handled by themselves.
|
|
527
|
+
if component_name in new_data and component_name not in self._internal_transports and isinstance(new_data[component_name], Component):
|
|
528
|
+
for replacement in replacements:
|
|
529
|
+
disaggregated, group_name = replacement
|
|
530
|
+
new_data[component_name].replace_node(old=group_name, new=disaggregated) # set the disaggregated node back in the component.
|