fram-core 0.0.0__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fram_core-0.1.0.dist-info/METADATA +42 -0
- fram_core-0.1.0.dist-info/RECORD +100 -0
- {fram_core-0.0.0.dist-info → fram_core-0.1.0.dist-info}/WHEEL +1 -2
- fram_core-0.1.0.dist-info/licenses/LICENSE.md +8 -0
- framcore/Base.py +161 -0
- framcore/Model.py +90 -0
- framcore/__init__.py +10 -0
- framcore/aggregators/Aggregator.py +172 -0
- framcore/aggregators/HydroAggregator.py +849 -0
- framcore/aggregators/NodeAggregator.py +530 -0
- framcore/aggregators/WindSolarAggregator.py +315 -0
- framcore/aggregators/__init__.py +13 -0
- framcore/aggregators/_utils.py +184 -0
- framcore/attributes/Arrow.py +307 -0
- framcore/attributes/ElasticDemand.py +90 -0
- framcore/attributes/ReservoirCurve.py +23 -0
- framcore/attributes/SoftBound.py +16 -0
- framcore/attributes/StartUpCost.py +65 -0
- framcore/attributes/Storage.py +158 -0
- framcore/attributes/TargetBound.py +16 -0
- framcore/attributes/__init__.py +63 -0
- framcore/attributes/hydro/HydroBypass.py +49 -0
- framcore/attributes/hydro/HydroGenerator.py +100 -0
- framcore/attributes/hydro/HydroPump.py +178 -0
- framcore/attributes/hydro/HydroReservoir.py +27 -0
- framcore/attributes/hydro/__init__.py +13 -0
- framcore/attributes/level_profile_attributes.py +911 -0
- framcore/components/Component.py +136 -0
- framcore/components/Demand.py +144 -0
- framcore/components/Flow.py +189 -0
- framcore/components/HydroModule.py +371 -0
- framcore/components/Node.py +99 -0
- framcore/components/Thermal.py +208 -0
- framcore/components/Transmission.py +198 -0
- framcore/components/_PowerPlant.py +81 -0
- framcore/components/__init__.py +22 -0
- framcore/components/wind_solar.py +82 -0
- framcore/curves/Curve.py +44 -0
- framcore/curves/LoadedCurve.py +146 -0
- framcore/curves/__init__.py +9 -0
- framcore/events/__init__.py +21 -0
- framcore/events/events.py +51 -0
- framcore/expressions/Expr.py +591 -0
- framcore/expressions/__init__.py +30 -0
- framcore/expressions/_get_constant_from_expr.py +477 -0
- framcore/expressions/_utils.py +73 -0
- framcore/expressions/queries.py +416 -0
- framcore/expressions/units.py +227 -0
- framcore/fingerprints/__init__.py +11 -0
- framcore/fingerprints/fingerprint.py +292 -0
- framcore/juliamodels/JuliaModel.py +171 -0
- framcore/juliamodels/__init__.py +7 -0
- framcore/loaders/__init__.py +10 -0
- framcore/loaders/loaders.py +405 -0
- framcore/metadata/Div.py +73 -0
- framcore/metadata/ExprMeta.py +56 -0
- framcore/metadata/LevelExprMeta.py +32 -0
- framcore/metadata/Member.py +55 -0
- framcore/metadata/Meta.py +44 -0
- framcore/metadata/__init__.py +15 -0
- framcore/populators/Populator.py +108 -0
- framcore/populators/__init__.py +7 -0
- framcore/querydbs/CacheDB.py +50 -0
- framcore/querydbs/ModelDB.py +34 -0
- framcore/querydbs/QueryDB.py +45 -0
- framcore/querydbs/__init__.py +11 -0
- framcore/solvers/Solver.py +63 -0
- framcore/solvers/SolverConfig.py +272 -0
- framcore/solvers/__init__.py +9 -0
- framcore/timeindexes/AverageYearRange.py +27 -0
- framcore/timeindexes/ConstantTimeIndex.py +22 -0
- framcore/timeindexes/DailyIndex.py +33 -0
- framcore/timeindexes/FixedFrequencyTimeIndex.py +814 -0
- framcore/timeindexes/HourlyIndex.py +33 -0
- framcore/timeindexes/IsoCalendarDay.py +33 -0
- framcore/timeindexes/ListTimeIndex.py +277 -0
- framcore/timeindexes/ModelYear.py +23 -0
- framcore/timeindexes/ModelYears.py +27 -0
- framcore/timeindexes/OneYearProfileTimeIndex.py +29 -0
- framcore/timeindexes/ProfileTimeIndex.py +43 -0
- framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
- framcore/timeindexes/TimeIndex.py +103 -0
- framcore/timeindexes/WeeklyIndex.py +33 -0
- framcore/timeindexes/__init__.py +36 -0
- framcore/timeindexes/_time_vector_operations.py +689 -0
- framcore/timevectors/ConstantTimeVector.py +131 -0
- framcore/timevectors/LinearTransformTimeVector.py +131 -0
- framcore/timevectors/ListTimeVector.py +127 -0
- framcore/timevectors/LoadedTimeVector.py +97 -0
- framcore/timevectors/ReferencePeriod.py +51 -0
- framcore/timevectors/TimeVector.py +108 -0
- framcore/timevectors/__init__.py +17 -0
- framcore/utils/__init__.py +35 -0
- framcore/utils/get_regional_volumes.py +387 -0
- framcore/utils/get_supported_components.py +60 -0
- framcore/utils/global_energy_equivalent.py +63 -0
- framcore/utils/isolate_subnodes.py +172 -0
- framcore/utils/loaders.py +97 -0
- framcore/utils/node_flow_utils.py +236 -0
- framcore/utils/storage_subsystems.py +106 -0
- fram_core-0.0.0.dist-info/METADATA +0 -5
- fram_core-0.0.0.dist-info/RECORD +0 -4
- fram_core-0.0.0.dist-info/top_level.txt +0 -1
|
@@ -0,0 +1,849 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from copy import deepcopy
|
|
5
|
+
from time import time
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from framcore.aggregators import Aggregator
|
|
9
|
+
from framcore.aggregators._utils import (
|
|
10
|
+
_aggregate_result_volumes,
|
|
11
|
+
_aggregate_weighted_expressions,
|
|
12
|
+
_all_detailed_exprs_in_sum_expr,
|
|
13
|
+
_get_level_profile_weights_from_disagg_levelprofiles,
|
|
14
|
+
)
|
|
15
|
+
from framcore.attributes import AvgFlowVolume, Conversion, HydroGenerator, HydroReservoir, MaxFlowVolume, StockVolume
|
|
16
|
+
from framcore.components import Component, HydroModule
|
|
17
|
+
from framcore.curves import Curve
|
|
18
|
+
from framcore.expressions import Expr, get_level_value
|
|
19
|
+
from framcore.metadata import LevelExprMeta
|
|
20
|
+
from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
|
|
21
|
+
from framcore.timevectors import ConstantTimeVector, TimeVector
|
|
22
|
+
from framcore.utils import get_hydro_downstream_energy_equivalent
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from framcore import Model
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class HydroAggregator(Aggregator):
|
|
29
|
+
"""
|
|
30
|
+
Aggregate HydroModules into two equivalent modules based on the regulation factor, into one regulated and one unregulated module per area.
|
|
31
|
+
|
|
32
|
+
Aggregation steps (self._aggregate):
|
|
33
|
+
|
|
34
|
+
1. Group modules based on their power nodes (self._group_modules_by_power_node)
|
|
35
|
+
- Modules with generators are grouped based on their power nodes. You can choose to only group modules for certain power nodes by giving
|
|
36
|
+
self._power_node_members alone or together with self._metakey_power_node. NB! Watershed that crosses power nodes should not be aggregated in two
|
|
37
|
+
different HydroAggregators as the aggregator will remove all connected modules from the model after the first aggregation.
|
|
38
|
+
- Reservoirs are assigned to the power node which has the highest cumulative energy equivalent downstream of the reservoir. This is because JulES
|
|
39
|
+
currently only support one-to-one mapping of detailed and aggregated reservoirs.
|
|
40
|
+
- Reservoirs without generators downstream are ignored in the aggregation.
|
|
41
|
+
2. Group area modules into regulated and unregulated based on regulation factor (self._group_modules_by_regulation_factor)
|
|
42
|
+
- Regulation factor = upstream reservoir capacity / yearly upstream inflow. Modules with generators that have regulation factor <= self._ror_threshold
|
|
43
|
+
are grouped into unregulated run-of-river modules, the other modules with generators are grouped into regulated reservoir modules.
|
|
44
|
+
- All reservoirs are assigned to the regulated group.
|
|
45
|
+
- Generators without upstream inflows are ignored in the aggregation.
|
|
46
|
+
3. Make aggregated hydro module for each group (self._aggregate_groups)
|
|
47
|
+
- The resulting HydroModule has a generator with energy equivalent of 1 kWh/m3. The inflow, discharge capacity and reservoir capacity
|
|
48
|
+
is calculated based on energy and transformed back to water using this energy equivalent.
|
|
49
|
+
- Generation capacity (release_cap*energy_equivalent/agg_energy_equivalent, capacity of hydraulic couplings not double counted). The release capacity
|
|
50
|
+
profile is ignored except if self._release_capacity_profile is given, then this profile is used for all aggregated modules.
|
|
51
|
+
- Energy reservoir capacity (res_cap*energy_equivalent_downstream/agg_energy_equivalent)
|
|
52
|
+
- Gross energy inflow (inflow_up*energy_equivalent/agg_energy_equivalent) - TODO: Add possibility to adjust inflow to closer represent net inflow
|
|
53
|
+
- Inflow profiles weighted based on gross energy inflow (inflow_up_per_profile*energy_equivalent) - calc from core model using self._map_topology()
|
|
54
|
+
- TODO: Other details like pumps and environmental constraints are currently ignored in the aggregation.
|
|
55
|
+
3a. Aggregate results if all modules in group have results.
|
|
56
|
+
- Production is the sum of production levels with weighted profiles
|
|
57
|
+
- Reservoir filling is the sum of energy reservoir filling levels (filling*energy_equivalent_downstream/agg_energy_equivalent) with weighted profiles
|
|
58
|
+
- TODO: Water values, spill, bypass and pumping results are currently ignored in the aggregation.
|
|
59
|
+
- TODO: Add possibility to skip results aggregation.
|
|
60
|
+
3b. Make new hydro module and delete original modules from model data.
|
|
61
|
+
4. Add mapping from detailed to aggregated modules to self._aggregation_map.
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
Disaggregation steps (self._disaggregate):
|
|
65
|
+
|
|
66
|
+
1. Restore original modules from self._original_data. NB! Changes to aggregated modules are lost except for results (TODO)
|
|
67
|
+
2. Move production and filling results from aggregated modules to detailed modules, weighted based on production capacity and reservoir capacity.
|
|
68
|
+
- TODO: Water values, spill, bypass and pumping results are currently ignored in the disaggregation.
|
|
69
|
+
3. Delete aggregated modules.
|
|
70
|
+
|
|
71
|
+
NB! Watershed that crosses power nodes should not be aggregated in two different HydroAggregators as the aggregator will remove all connected modules
|
|
72
|
+
from the model after the first aggregation. Reservoirs will also be assigned to the power node which has the highest cumulative energy equivalent, so
|
|
73
|
+
this aggregator does not work well for reservoirs that are upstream of multiple power nodes.
|
|
74
|
+
|
|
75
|
+
See Aggregator for general design notes and rules to follow when using Aggregators.
|
|
76
|
+
|
|
77
|
+
Attributes:
|
|
78
|
+
_metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
|
|
79
|
+
_data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation.
|
|
80
|
+
_scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation.
|
|
81
|
+
_grouped_modules (dict[str, set[str]]): Mapping of aggregated modules to detailed modules. agg to detailed
|
|
82
|
+
_grouped_reservoirs (dict[str, set[str]]): Mapping of aggregated reservoirs to detailed reservoirs. agg to detailed
|
|
83
|
+
_ror_threshold (float): Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification.
|
|
84
|
+
Default is 0.5.
|
|
85
|
+
_metakey_power_node (str | None): If given, check metadata of power nodes to check if they should be grouped.
|
|
86
|
+
_power_node_members (list[str] | None): If given along with metakey_power_node, group modules only for power nodes with these metadata values.
|
|
87
|
+
If given without metakey_power_node, only group power nodes in this list.
|
|
88
|
+
_release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.
|
|
89
|
+
|
|
90
|
+
Parent Attributes (see framcore.aggregators.Aggregator):
|
|
91
|
+
|
|
92
|
+
_is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
|
|
93
|
+
_original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
|
|
94
|
+
_aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
|
|
95
|
+
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
def __init__(
|
|
99
|
+
self,
|
|
100
|
+
metakey_energy_eq_downstream: str,
|
|
101
|
+
data_dim: SinglePeriodTimeIndex,
|
|
102
|
+
scen_dim: FixedFrequencyTimeIndex,
|
|
103
|
+
ror_threshold: float = 0.5,
|
|
104
|
+
metakey_power_node: str | None = None,
|
|
105
|
+
power_node_members: list[str] | None = None,
|
|
106
|
+
release_capacity_profile: TimeVector | None = None,
|
|
107
|
+
) -> None:
|
|
108
|
+
"""
|
|
109
|
+
Initialize HydroAggregator.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
|
|
113
|
+
Can be calculated with framcore.utils.set_global_energy_equivalent
|
|
114
|
+
data_dim (SinglePeriodTimeIndex): Data dimension for eager evalutation.
|
|
115
|
+
scen_dim (FixedFrequencyTimeIndex): Scenario dimension for eager evalutation.
|
|
116
|
+
ror_threshold (float): Regulation factor (upstream reservoir capacity / yearly upstream inflow) threshold for run-of-river classification.
|
|
117
|
+
Default is 0.5.
|
|
118
|
+
metakey_power_node (str | None): If given, check metadata of power nodes to check if they should be grouped.
|
|
119
|
+
power_node_members (list[str] | None): If given along with metakey_power_node, group modules only for power nodes with these metadata values.
|
|
120
|
+
If given without metakey_power_node, only group power nodes in this list.
|
|
121
|
+
release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.
|
|
122
|
+
|
|
123
|
+
"""
|
|
124
|
+
super().__init__()
|
|
125
|
+
self._check_type(metakey_energy_eq_downstream, str)
|
|
126
|
+
self._check_type(ror_threshold, float)
|
|
127
|
+
self._check_type(data_dim, SinglePeriodTimeIndex)
|
|
128
|
+
self._check_type(scen_dim, FixedFrequencyTimeIndex)
|
|
129
|
+
self._check_type(metakey_power_node, (str, type(None)))
|
|
130
|
+
self._check_type(power_node_members, (list, type(None)))
|
|
131
|
+
if ror_threshold < 0:
|
|
132
|
+
msg = f"ror_threshold must be non-negative, got {ror_threshold}."
|
|
133
|
+
raise ValueError(msg)
|
|
134
|
+
if metakey_power_node is not None and len(power_node_members) <= 0:
|
|
135
|
+
raise ValueError("If metakey_power_node is given, power_node_members must also be given.")
|
|
136
|
+
|
|
137
|
+
self._metakey_energy_eq_downstream = metakey_energy_eq_downstream
|
|
138
|
+
self._ror_threshold = ror_threshold
|
|
139
|
+
self._metakey_power_node = metakey_power_node
|
|
140
|
+
self._power_node_members = power_node_members
|
|
141
|
+
self._release_capacity_profile = release_capacity_profile
|
|
142
|
+
|
|
143
|
+
self._data_dim = data_dim
|
|
144
|
+
self._scen_dim = scen_dim
|
|
145
|
+
|
|
146
|
+
self._grouped_modules: dict[str, set[str]] = defaultdict(list) # agg to detailed
|
|
147
|
+
self._grouped_reservoirs: dict[str, set[str]] = defaultdict(list) # agg to detailed
|
|
148
|
+
|
|
149
|
+
def _aggregate(self, model: Model) -> None: # noqa: C901, PLR0915
|
|
150
|
+
t0 = time()
|
|
151
|
+
data = model.get_data()
|
|
152
|
+
|
|
153
|
+
t = time()
|
|
154
|
+
upstream_topology = self._map_upstream_topology(data)
|
|
155
|
+
self.send_debug_event(f"_map_upstream_topology time: {round(time() - t, 3)} seconds")
|
|
156
|
+
|
|
157
|
+
t = time()
|
|
158
|
+
generator_module_groups, reservoir_module_groups = self._group_modules_by_power_node(model, upstream_topology)
|
|
159
|
+
self.send_debug_event(f"_group_modules_by_power_node time: {round(time() - t, 3)} seconds")
|
|
160
|
+
|
|
161
|
+
t = time()
|
|
162
|
+
self._group_modules_by_regulation_factor(model, generator_module_groups, reservoir_module_groups, upstream_topology)
|
|
163
|
+
self.send_debug_event(f"_group_modules_by_regulation_factor time: {round(time() - t, 3)} seconds")
|
|
164
|
+
|
|
165
|
+
t = time()
|
|
166
|
+
ignore_production_capacity_modules = self._ignore_production_capacity_modules(model)
|
|
167
|
+
self.send_debug_event(f"_ignore_production_capacity_modules time: {round(time() - t, 3)} seconds")
|
|
168
|
+
|
|
169
|
+
t = time()
|
|
170
|
+
self._aggregate_groups(model, upstream_topology, ignore_production_capacity_modules)
|
|
171
|
+
self.send_debug_event(f"_aggregate_groups time: {round(time() - t, 3)} seconds")
|
|
172
|
+
|
|
173
|
+
# Add reservoir modules to aggregation map
|
|
174
|
+
t = time()
|
|
175
|
+
self._aggregation_map = {dd: set([a]) for a, d in self._grouped_reservoirs.items() for dd in d}
|
|
176
|
+
self.send_debug_event(f"add reservoir modules to _aggregation_map time: {round(time() - t, 3)} seconds")
|
|
177
|
+
|
|
178
|
+
# Add generator modules to aggregation map
|
|
179
|
+
t = time()
|
|
180
|
+
for a, d in self._grouped_modules.items():
|
|
181
|
+
for dd in d:
|
|
182
|
+
if dd not in self._aggregation_map:
|
|
183
|
+
self._aggregation_map[dd] = set([a])
|
|
184
|
+
elif not (data[dd].get_reservoir() and data[a].get_reservoir()): # reservoir modules can only be mapped to one aggregated reservoir module
|
|
185
|
+
self._aggregation_map[dd].add(a)
|
|
186
|
+
self.send_debug_event(f"add generator modules to _aggregation_map time: {round(time() - t, 3)} seconds")
|
|
187
|
+
|
|
188
|
+
# Delete detailed modules and add remaining modules to aggregation map
|
|
189
|
+
t = time()
|
|
190
|
+
upstream_topology_with_bypass_spill = self._map_upstream_topology(data, include_bypass_spill=True)
|
|
191
|
+
aggregated_hydromodules = {module for modules in generator_module_groups.values() for module in modules} # add generator modules
|
|
192
|
+
for grouped_modules in generator_module_groups.values(): # add upstream modules
|
|
193
|
+
for grouped_module in grouped_modules:
|
|
194
|
+
upstream = upstream_topology_with_bypass_spill[grouped_module]
|
|
195
|
+
aggregated_hydromodules.update(upstream)
|
|
196
|
+
for downstream_module in upstream_topology_with_bypass_spill: # add downstream modules
|
|
197
|
+
for upstream in upstream_topology_with_bypass_spill[downstream_module]:
|
|
198
|
+
if upstream in aggregated_hydromodules:
|
|
199
|
+
aggregated_hydromodules.add(downstream_module)
|
|
200
|
+
break
|
|
201
|
+
other_modules = [key for key, component in data.items() if isinstance(component, HydroModule) and key not in aggregated_hydromodules]
|
|
202
|
+
other_generator_modules = [m for m in other_modules if data[m].get_generator()]
|
|
203
|
+
for m in other_modules: # remove other modules that do not interact with generator modules
|
|
204
|
+
interacts = False
|
|
205
|
+
for upstreams in upstream_topology_with_bypass_spill[m]:
|
|
206
|
+
for upstream in upstreams:
|
|
207
|
+
if upstream in other_generator_modules:
|
|
208
|
+
interacts = True
|
|
209
|
+
break
|
|
210
|
+
for gm in other_generator_modules:
|
|
211
|
+
if m in upstream_topology_with_bypass_spill[gm]:
|
|
212
|
+
interacts = True
|
|
213
|
+
break
|
|
214
|
+
if not interacts:
|
|
215
|
+
aggregated_hydromodules.add(m)
|
|
216
|
+
message = f"Module {m} is not upstream or downstream of any generator module, adding to aggregation as it does not interact with power system."
|
|
217
|
+
self.send_warning_event(message)
|
|
218
|
+
|
|
219
|
+
for m_key in aggregated_hydromodules:
|
|
220
|
+
if m_key not in self._grouped_modules:
|
|
221
|
+
if not (m_key in self._aggregation_map or m_key in self._grouped_reservoirs):
|
|
222
|
+
self._aggregation_map[m_key] = set()
|
|
223
|
+
del model.get_data()[m_key]
|
|
224
|
+
self.send_debug_event(f"delete detailed modules time: {round(time() - t, 3)} seconds")
|
|
225
|
+
|
|
226
|
+
self.send_debug_event(f"total _aggregate: {round(time() - t0, 3)} seconds")
|
|
227
|
+
|
|
228
|
+
def _map_upstream_topology( # noqa: C901
|
|
229
|
+
self,
|
|
230
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
231
|
+
include_bypass_spill: bool = False,
|
|
232
|
+
) -> dict[str, list[str]]:
|
|
233
|
+
"""Map HydroModules topology. Return dict[module, List[upstream modules + itself]]."""
|
|
234
|
+
module_names = [key for key, component in data.items() if isinstance(component, HydroModule)]
|
|
235
|
+
|
|
236
|
+
# Direct upstream mapping (including transport pumps)
|
|
237
|
+
direct_upstream = {module_name: [] for module_name in module_names}
|
|
238
|
+
for module_name in module_names:
|
|
239
|
+
release_to = data[module_name].get_release_to()
|
|
240
|
+
pump = data[module_name].get_pump()
|
|
241
|
+
if data[module_name].get_pump() and pump.get_from_module() == module_name: # transport pump
|
|
242
|
+
pump = data[module_name].get_pump()
|
|
243
|
+
pump_to = pump.get_to_module()
|
|
244
|
+
direct_upstream[pump_to].append(module_name)
|
|
245
|
+
elif release_to: # other
|
|
246
|
+
try:
|
|
247
|
+
direct_upstream[release_to].append(module_name)
|
|
248
|
+
except KeyError as e:
|
|
249
|
+
message = f"Reference to {release_to} does not exist in Model. Referenced by {module_name} Module."
|
|
250
|
+
raise KeyError(message) from e
|
|
251
|
+
if include_bypass_spill:
|
|
252
|
+
bypass = data[module_name].get_bypass()
|
|
253
|
+
if bypass:
|
|
254
|
+
bypass_to = bypass.get_to_module()
|
|
255
|
+
if bypass_to:
|
|
256
|
+
try:
|
|
257
|
+
direct_upstream[bypass_to].append(module_name)
|
|
258
|
+
except KeyError as e:
|
|
259
|
+
message = f"Reference to {bypass_to} does not exist in Model. Referenced by {module_name} Module."
|
|
260
|
+
raise KeyError(message) from e
|
|
261
|
+
spill_to = data[module_name].get_spill_to()
|
|
262
|
+
if spill_to:
|
|
263
|
+
try:
|
|
264
|
+
direct_upstream[spill_to].append(module_name)
|
|
265
|
+
except KeyError as e:
|
|
266
|
+
message = f"Reference to {spill_to} does not exist in Model. Referenced by {module_name} Module."
|
|
267
|
+
raise KeyError(message) from e
|
|
268
|
+
|
|
269
|
+
# Recursive upstream function
|
|
270
|
+
def find_all_upstream(
|
|
271
|
+
module_name: str,
|
|
272
|
+
visited: set,
|
|
273
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
274
|
+
) -> list[str]:
|
|
275
|
+
if module_name in visited:
|
|
276
|
+
return [] # Avoid circular dependencies
|
|
277
|
+
visited.add(module_name)
|
|
278
|
+
upstream_names = direct_upstream[module_name]
|
|
279
|
+
all_upstream = set(upstream_names)
|
|
280
|
+
for upstream in upstream_names:
|
|
281
|
+
all_upstream.update(find_all_upstream(upstream, visited, data))
|
|
282
|
+
all_upstream.add(module_name) # include itself
|
|
283
|
+
return visited
|
|
284
|
+
|
|
285
|
+
# Full upstream topology
|
|
286
|
+
topology = {}
|
|
287
|
+
for module_name in module_names:
|
|
288
|
+
topology[module_name] = list(find_all_upstream(module_name, set(), data))
|
|
289
|
+
|
|
290
|
+
return topology
|
|
291
|
+
|
|
292
|
+
def _build_upstream_reservoir_and_inflow_exprs(
|
|
293
|
+
self,
|
|
294
|
+
data: dict[str, Component | TimeVector | Curve | Expr],
|
|
295
|
+
upstream_topology: dict[str, list[str]],
|
|
296
|
+
) -> tuple[dict[str, Expr], dict[str, Expr]]:
|
|
297
|
+
"""Build upstream inflow and reservoir expressions for each generator module."""
|
|
298
|
+
upstream_inflow_exprs = dict[str, Expr]()
|
|
299
|
+
upstream_reservoir_exprs = dict[str, Expr]()
|
|
300
|
+
generator_modules = [key for key, module in data.items() if isinstance(module, HydroModule) and module.get_generator()]
|
|
301
|
+
for m in generator_modules:
|
|
302
|
+
inflow_expr = 0
|
|
303
|
+
reservoir_expr = 0
|
|
304
|
+
for mm in upstream_topology[m]:
|
|
305
|
+
inflow = data[mm].get_inflow()
|
|
306
|
+
if inflow:
|
|
307
|
+
inflow_expr += inflow.get_level()
|
|
308
|
+
reservoir = data[mm].get_reservoir()
|
|
309
|
+
if reservoir:
|
|
310
|
+
reservoir_expr += reservoir.get_capacity().get_level()
|
|
311
|
+
|
|
312
|
+
upstream_inflow_exprs[m] = inflow_expr
|
|
313
|
+
upstream_reservoir_exprs[m] = reservoir_expr
|
|
314
|
+
|
|
315
|
+
return upstream_inflow_exprs, upstream_reservoir_exprs
|
|
316
|
+
|
|
317
|
+
def _group_modules_by_power_node(self, model: Model, upstream_topology: dict[str, list[str]]) -> dict[str, list[str]]: # noqa: C901
|
|
318
|
+
"""Group modules by power node. Return generator_module_groups, reservoir_module_groups."""
|
|
319
|
+
data = model.get_data()
|
|
320
|
+
generator_module_groups = defaultdict(list) # power_node -> generator_modules
|
|
321
|
+
reservoir_mapping = defaultdict(set) # reservoir -> power_node(s)
|
|
322
|
+
for key, component in data.items():
|
|
323
|
+
if isinstance(component, HydroModule) and component.get_generator():
|
|
324
|
+
power_node = component.get_generator().get_power_node()
|
|
325
|
+
if self._metakey_power_node is None and self._power_node_members and power_node not in self._power_node_members:
|
|
326
|
+
continue
|
|
327
|
+
if self._metakey_power_node is not None: # only group modules for nodes in self._power_node_members
|
|
328
|
+
power_node_component = data[power_node]
|
|
329
|
+
node_meta = power_node_component.get_meta(self._metakey_power_node)
|
|
330
|
+
if node_meta is None:
|
|
331
|
+
message = f"Module {key} does not have metadata '{self._metakey_power_node}' for node mapping."
|
|
332
|
+
raise ValueError(message)
|
|
333
|
+
node_meta_value = node_meta.get_value()
|
|
334
|
+
if node_meta_value not in self._power_node_members:
|
|
335
|
+
continue
|
|
336
|
+
|
|
337
|
+
generator_module_groups[power_node].append(key)
|
|
338
|
+
|
|
339
|
+
for m in upstream_topology[key]:
|
|
340
|
+
if data[m].get_reservoir():
|
|
341
|
+
reservoir_mapping[m].add(power_node)
|
|
342
|
+
|
|
343
|
+
# Group reservoirs to the power node with the highest cumulative energy equivalent downstream from the reservoir
|
|
344
|
+
reservoir_module_groups: dict[str, list[str]] = defaultdict(list)
|
|
345
|
+
for res_name in reservoir_mapping:
|
|
346
|
+
power_nodes = reservoir_mapping[res_name]
|
|
347
|
+
if len(power_nodes) > 1:
|
|
348
|
+
highest_power_node = max(
|
|
349
|
+
power_nodes,
|
|
350
|
+
key=lambda pn: get_level_value(
|
|
351
|
+
get_hydro_downstream_energy_equivalent(data, res_name, pn),
|
|
352
|
+
db=model,
|
|
353
|
+
unit="kWh/m3",
|
|
354
|
+
data_dim=self._data_dim,
|
|
355
|
+
scen_dim=self._scen_dim,
|
|
356
|
+
is_max=False,
|
|
357
|
+
),
|
|
358
|
+
)
|
|
359
|
+
reservoir_module_groups[highest_power_node].append(res_name)
|
|
360
|
+
else:
|
|
361
|
+
reservoir_module_groups[next(iter(power_nodes))].append(res_name)
|
|
362
|
+
|
|
363
|
+
return generator_module_groups, reservoir_module_groups
|
|
364
|
+
|
|
365
|
+
def _group_modules_by_regulation_factor(
|
|
366
|
+
self,
|
|
367
|
+
model: Model,
|
|
368
|
+
generator_module_groups: dict[str, list[str]],
|
|
369
|
+
reservoir_module_groups: dict[str, list[str]],
|
|
370
|
+
upstream_topology: dict[str, list[str]],
|
|
371
|
+
) -> None:
|
|
372
|
+
"""
|
|
373
|
+
Group modules into regulated and unregulated based on regulation factor and self._ror_threshold.
|
|
374
|
+
|
|
375
|
+
Regulation factor = upstream reservoir capacity / yearly upstream inflow.
|
|
376
|
+
Run-of-river = regulation factor <= self._ror_threshold.
|
|
377
|
+
Regulated = regulation factor > self._ror_threshold.
|
|
378
|
+
"""
|
|
379
|
+
data = model.get_data()
|
|
380
|
+
upstream_inflow_exprs, upstream_reservoir_exprs = self._build_upstream_reservoir_and_inflow_exprs(data, upstream_topology)
|
|
381
|
+
|
|
382
|
+
for area, member_modules in generator_module_groups.items():
|
|
383
|
+
ror_name = area + "_hydro_RoR"
|
|
384
|
+
reg_name = area + "_hydro_reservoir"
|
|
385
|
+
|
|
386
|
+
ror_modules = []
|
|
387
|
+
reservoir_modules = []
|
|
388
|
+
|
|
389
|
+
for m_key in member_modules:
|
|
390
|
+
if upstream_inflow_exprs[m_key] != 0:
|
|
391
|
+
upstream_inflow = get_level_value(
|
|
392
|
+
upstream_inflow_exprs[m_key],
|
|
393
|
+
db=model,
|
|
394
|
+
unit="Mm3/year",
|
|
395
|
+
data_dim=self._data_dim,
|
|
396
|
+
scen_dim=self._scen_dim,
|
|
397
|
+
is_max=False,
|
|
398
|
+
)
|
|
399
|
+
else:
|
|
400
|
+
continue # Skip generator modules with no upstream inflow
|
|
401
|
+
if upstream_reservoir_exprs[m_key] != 0:
|
|
402
|
+
upstream_reservoir = get_level_value(
|
|
403
|
+
upstream_reservoir_exprs[m_key],
|
|
404
|
+
db=model,
|
|
405
|
+
unit="Mm3",
|
|
406
|
+
data_dim=self._data_dim,
|
|
407
|
+
scen_dim=self._scen_dim,
|
|
408
|
+
is_max=False,
|
|
409
|
+
)
|
|
410
|
+
else:
|
|
411
|
+
upstream_reservoir = 0
|
|
412
|
+
regulation_factor = upstream_reservoir / upstream_inflow if upstream_inflow > 0 else 0
|
|
413
|
+
|
|
414
|
+
if regulation_factor <= self._ror_threshold:
|
|
415
|
+
ror_modules.append(m_key)
|
|
416
|
+
else:
|
|
417
|
+
reservoir_modules.append(m_key)
|
|
418
|
+
|
|
419
|
+
if len(ror_modules) > 0: # only make run-of-river group if there are any modules
|
|
420
|
+
self._grouped_modules[ror_name] = ror_modules
|
|
421
|
+
|
|
422
|
+
if len(reservoir_modules) > 0: # only make reservoir group if there are any modules
|
|
423
|
+
self._grouped_modules[reg_name] = reservoir_modules
|
|
424
|
+
|
|
425
|
+
if len(reservoir_module_groups[area]) > 0 and len(reservoir_modules) > 0: # add reservoirs to reg group
|
|
426
|
+
self._grouped_reservoirs[reg_name] = reservoir_module_groups[area]
|
|
427
|
+
elif len(reservoir_module_groups[area]) > 0: # add reservoirs to ror group if no reg group
|
|
428
|
+
self._grouped_reservoirs[ror_name] = reservoir_module_groups[area]
|
|
429
|
+
message = f"{area} has no modules over ror_threshold ({self._ror_threshold}), so all reservoirs are put in RoR module."
|
|
430
|
+
self.send_warning_event(message)
|
|
431
|
+
|
|
432
|
+
def _ignore_production_capacity_modules(
|
|
433
|
+
self,
|
|
434
|
+
model: Model,
|
|
435
|
+
) -> list[str]:
|
|
436
|
+
"""
|
|
437
|
+
Return list of module names to ignore production capacity for in aggregation, because of hydraulic coupled reservoirs.
|
|
438
|
+
|
|
439
|
+
Ignore the lowest production capacity of modules that are under the same hydraulic coupled reservoirs.
|
|
440
|
+
"""
|
|
441
|
+
ignore_production_capacity_modules = []
|
|
442
|
+
data = model.get_data()
|
|
443
|
+
module_names = [key for key, component in data.items() if isinstance(component, HydroModule)]
|
|
444
|
+
|
|
445
|
+
for m in module_names:
|
|
446
|
+
if data[m].get_hydraulic_coupling() != 0:
|
|
447
|
+
under_hydraulic = [
|
|
448
|
+
(
|
|
449
|
+
mm,
|
|
450
|
+
get_level_value(
|
|
451
|
+
data[mm].get_generator().get_energy_equivalent().get_level() * data[mm].get_release_capacity().get_level(),
|
|
452
|
+
model,
|
|
453
|
+
"MW",
|
|
454
|
+
self._data_dim,
|
|
455
|
+
self._scen_dim,
|
|
456
|
+
is_max=False,
|
|
457
|
+
),
|
|
458
|
+
)
|
|
459
|
+
for mm in module_names
|
|
460
|
+
if data[mm].get_release_to() == m
|
|
461
|
+
]
|
|
462
|
+
assert len(under_hydraulic) > 1
|
|
463
|
+
ignore_production_capacity_modules.append(min(under_hydraulic, key=lambda x: x[1])[0])
|
|
464
|
+
|
|
465
|
+
return ignore_production_capacity_modules
|
|
466
|
+
|
|
467
|
+
def _aggregate_groups( # noqa: C901, PLR0915
|
|
468
|
+
self,
|
|
469
|
+
model: Model,
|
|
470
|
+
upstream_topology: dict[str, list[str]],
|
|
471
|
+
ignore_capacity: list[str],
|
|
472
|
+
) -> None:
|
|
473
|
+
"""Aggregate each group of modules into one HydroModule."""
|
|
474
|
+
data = model.get_data()
|
|
475
|
+
for new_id, module_names in self._grouped_modules.items():
|
|
476
|
+
num_reservoirs = 0
|
|
477
|
+
if new_id in self._grouped_reservoirs:
|
|
478
|
+
num_reservoirs = len(self._grouped_reservoirs[new_id])
|
|
479
|
+
self.send_info_event(f"{new_id} from {len(module_names)} generator modules and {num_reservoirs} reservoirs.")
|
|
480
|
+
|
|
481
|
+
# Generator and production
|
|
482
|
+
generator_module_names = [m for m in module_names if data[m].get_generator()]
|
|
483
|
+
productions = [data[m].get_generator().get_production() for m in generator_module_names]
|
|
484
|
+
sum_production = _aggregate_result_volumes(model, productions, "MW", self._data_dim, self._scen_dim, new_id, generator_module_names)
|
|
485
|
+
|
|
486
|
+
generator = HydroGenerator(
|
|
487
|
+
power_node=data[generator_module_names[0]].get_generator().get_power_node(),
|
|
488
|
+
energy_equivalent=Conversion(level=ConstantTimeVector(1.0, "kWh/m3", is_max_level=True)),
|
|
489
|
+
production=sum_production,
|
|
490
|
+
)
|
|
491
|
+
energy_eq = generator.get_energy_equivalent().get_level()
|
|
492
|
+
|
|
493
|
+
# Release capacity
|
|
494
|
+
release_capacities = [data[m].get_release_capacity() for m in generator_module_names if m not in ignore_capacity]
|
|
495
|
+
if self._release_capacity_profile:
|
|
496
|
+
if not all([rc.get_profile() is None for rc in release_capacities]):
|
|
497
|
+
message = f"Some release capacities in {new_id} have profiles, using provided profile for all."
|
|
498
|
+
self.send_warning_event(message)
|
|
499
|
+
release_capacities = deepcopy(release_capacities)
|
|
500
|
+
for rc in release_capacities:
|
|
501
|
+
rc.set_profile(self._release_capacity_profile)
|
|
502
|
+
generator_energy_eqs = [data[m].get_generator().get_energy_equivalent() for m in generator_module_names if m not in ignore_capacity]
|
|
503
|
+
release_capacity_levels = [rc.get_level() * ee.get_level() for rc, ee in zip(release_capacities, generator_energy_eqs, strict=True)]
|
|
504
|
+
|
|
505
|
+
release_capacity_profile = None
|
|
506
|
+
if any(rc.get_profile() for rc in release_capacities):
|
|
507
|
+
one_profile_max = Expr(src=ConstantTimeVector(1.0, is_zero_one_profile=False), is_profile=True)
|
|
508
|
+
weights = [get_level_value(rcl, model, "MW", self._data_dim, self._scen_dim, is_max=True) for rcl in release_capacity_levels]
|
|
509
|
+
profiles = [rc.get_profile() if rc.get_profile() else one_profile_max for rc in release_capacities]
|
|
510
|
+
release_capacity_profile = _aggregate_weighted_expressions(profiles, weights)
|
|
511
|
+
release_capacity = MaxFlowVolume(level=sum(release_capacity_levels) / energy_eq, profile=release_capacity_profile)
|
|
512
|
+
|
|
513
|
+
# Inflow level
|
|
514
|
+
upstream_inflow_levels = defaultdict(list)
|
|
515
|
+
for m in generator_module_names:
|
|
516
|
+
for mm in upstream_topology[m]:
|
|
517
|
+
inflow = data[mm].get_inflow()
|
|
518
|
+
if inflow:
|
|
519
|
+
upstream_inflow_levels[m].append(inflow.get_level())
|
|
520
|
+
inflow_level_energy = sum(
|
|
521
|
+
sum(upstream_inflow_levels[m]) * data[m].get_generator().get_energy_equivalent().get_level()
|
|
522
|
+
for m in generator_module_names
|
|
523
|
+
if len(upstream_inflow_levels[m]) > 0
|
|
524
|
+
)
|
|
525
|
+
inflow_level = inflow_level_energy / energy_eq
|
|
526
|
+
|
|
527
|
+
# Inflow profile
|
|
528
|
+
one_profile = Expr(src=ConstantTimeVector(1.0, is_zero_one_profile=False), is_profile=True)
|
|
529
|
+
inflow_profile_to_energyinflow = defaultdict(list)
|
|
530
|
+
inflow_level_to_value = dict()
|
|
531
|
+
for m in generator_module_names:
|
|
532
|
+
m_energy_eq = data[m].get_generator().get_energy_equivalent().get_level()
|
|
533
|
+
m_energy_eq_value = get_level_value(
|
|
534
|
+
m_energy_eq,
|
|
535
|
+
db=model,
|
|
536
|
+
unit="kWh/m3",
|
|
537
|
+
data_dim=self._data_dim,
|
|
538
|
+
scen_dim=self._scen_dim,
|
|
539
|
+
is_max=False,
|
|
540
|
+
)
|
|
541
|
+
for upstream_module in upstream_topology[m]:
|
|
542
|
+
inflow = data[upstream_module].get_inflow()
|
|
543
|
+
if inflow:
|
|
544
|
+
if inflow not in inflow_level_to_value:
|
|
545
|
+
inflow_level_to_value[inflow] = get_level_value(
|
|
546
|
+
inflow.get_level(),
|
|
547
|
+
db=model,
|
|
548
|
+
unit="m3/s",
|
|
549
|
+
data_dim=self._data_dim,
|
|
550
|
+
scen_dim=self._scen_dim,
|
|
551
|
+
is_max=False,
|
|
552
|
+
)
|
|
553
|
+
upstream_energy_inflow = inflow_level_to_value[inflow] * m_energy_eq_value
|
|
554
|
+
upstream_profile = inflow.get_profile() if inflow.get_profile() else one_profile
|
|
555
|
+
inflow_profile_to_energyinflow[upstream_profile].append(upstream_energy_inflow)
|
|
556
|
+
|
|
557
|
+
profile_weights = [sum(energyinflows) for energyinflows in inflow_profile_to_energyinflow.values()]
|
|
558
|
+
inflow_profile = _aggregate_weighted_expressions(list(inflow_profile_to_energyinflow.keys()), profile_weights)
|
|
559
|
+
inflow = AvgFlowVolume(level=inflow_level, profile=inflow_profile)
|
|
560
|
+
|
|
561
|
+
# Reservoir capacity and filling
|
|
562
|
+
if new_id in self._grouped_reservoirs and len(self._grouped_reservoirs[new_id]) > 0:
|
|
563
|
+
reservoir_levels = [
|
|
564
|
+
data[m].get_reservoir().get_capacity().get_level() * data[m].get_meta(self._metakey_energy_eq_downstream).get_value()
|
|
565
|
+
for m in self._grouped_reservoirs[new_id]
|
|
566
|
+
]
|
|
567
|
+
reservoir_level = sum(reservoir_levels) / energy_eq
|
|
568
|
+
reservoir_capacity = StockVolume(level=reservoir_level)
|
|
569
|
+
|
|
570
|
+
fillings = [data[m].get_reservoir().get_volume() for m in self._grouped_reservoirs[new_id]]
|
|
571
|
+
energy_eq_downstreams = [data[m].get_meta(self._metakey_energy_eq_downstream).get_value() for m in self._grouped_reservoirs[new_id]]
|
|
572
|
+
sum_filling = self._aggregate_fillings(fillings, energy_eq_downstreams, energy_eq, model, "GWh", new_id, self._grouped_reservoirs[new_id])
|
|
573
|
+
reservoir = HydroReservoir(capacity=reservoir_capacity, volume=sum_filling)
|
|
574
|
+
else:
|
|
575
|
+
reservoir = None
|
|
576
|
+
|
|
577
|
+
new_hydro = HydroModule(
|
|
578
|
+
generator=generator,
|
|
579
|
+
reservoir=reservoir,
|
|
580
|
+
inflow=inflow,
|
|
581
|
+
release_capacity=release_capacity,
|
|
582
|
+
)
|
|
583
|
+
new_hydro.add_meta(key=self._metakey_energy_eq_downstream, value=LevelExprMeta(energy_eq))
|
|
584
|
+
|
|
585
|
+
data[new_id] = new_hydro
|
|
586
|
+
|
|
587
|
+
def _aggregate_fillings(
|
|
588
|
+
self,
|
|
589
|
+
fillings: list[StockVolume],
|
|
590
|
+
energy_eq_downstreams: list[Expr],
|
|
591
|
+
energy_eq: Expr,
|
|
592
|
+
model: Model,
|
|
593
|
+
weight_unit: str,
|
|
594
|
+
group_id: str,
|
|
595
|
+
members: list[str],
|
|
596
|
+
) -> StockVolume | None:
|
|
597
|
+
"""Aggregate reservoir fillings if all fillings are not None."""
|
|
598
|
+
sum_filling = None
|
|
599
|
+
if all(filling.get_level() for filling in fillings):
|
|
600
|
+
if any(not filling.get_profile() for filling in fillings):
|
|
601
|
+
missing = [member for member, filling in zip(members, fillings, strict=False) if not filling.get_profile()]
|
|
602
|
+
message = (
|
|
603
|
+
"Some reservoir fillings in grouped modules have no profile. Cannot aggregate profiles.",
|
|
604
|
+
f"Group: '{group_id}', missing profile for {missing}.",
|
|
605
|
+
)
|
|
606
|
+
raise ValueError(message)
|
|
607
|
+
level, profiles, weights = self._get_level_profiles_weights_fillings(model, fillings, energy_eq_downstreams, energy_eq, weight_unit)
|
|
608
|
+
profile = _aggregate_weighted_expressions(profiles, weights)
|
|
609
|
+
sum_filling = StockVolume(level=level, profile=profile)
|
|
610
|
+
elif any(filling.get_level() for filling in fillings):
|
|
611
|
+
missing = [member for member, filling in zip(members, fillings, strict=False) if not filling.get_level()]
|
|
612
|
+
message = (
|
|
613
|
+
"Some but not all grouped modules have reservoir filling defined, reservoir filling not aggregated. "
|
|
614
|
+
f"Group: {group_id}, missing filling for {missing}."
|
|
615
|
+
)
|
|
616
|
+
self.send_warning_event(message)
|
|
617
|
+
return sum_filling
|
|
618
|
+
|
|
619
|
+
def _get_level_profiles_weights_fillings(
|
|
620
|
+
self,
|
|
621
|
+
model: Model,
|
|
622
|
+
fillings: list[StockVolume],
|
|
623
|
+
energy_eq_downstreams: list[Expr],
|
|
624
|
+
energy_eq: Expr,
|
|
625
|
+
weight_unit: str,
|
|
626
|
+
) -> tuple[Expr, list[Expr], list[float]]:
|
|
627
|
+
"""
|
|
628
|
+
Get aggregated filling level, and profiles with weights from list of fillings.
|
|
629
|
+
|
|
630
|
+
Two cases:
|
|
631
|
+
1) All fillings are expressions from previous disaggregation. Can be aggregated more efficiently.
|
|
632
|
+
2) Default case, where we weight fillings based on energy equivalent inflow.
|
|
633
|
+
"""
|
|
634
|
+
levels = [filling.get_level() for filling in fillings]
|
|
635
|
+
if all(self._is_disagg_filling_expr(level) for level in levels):
|
|
636
|
+
return _get_level_profile_weights_from_disagg_levelprofiles(model, fillings, self._data_dim, self._scen_dim)
|
|
637
|
+
levels_energy = [filling * ee for filling, ee in zip(levels, energy_eq_downstreams, strict=True)]
|
|
638
|
+
level = sum(levels_energy) / energy_eq
|
|
639
|
+
profiles = [filling.get_profile() for filling in fillings]
|
|
640
|
+
weights = [get_level_value(level_energy, model, weight_unit, self._data_dim, self._scen_dim, False) for level_energy in levels_energy]
|
|
641
|
+
return level, profiles, weights
|
|
642
|
+
|
|
643
|
+
def _is_disagg_filling_expr(self, expr: Expr) -> bool:
|
|
644
|
+
"""Check if expr is ((weight * agg_level * energy_eq_downstream) / energy_eq_agg) which indicates it comes from disaggregation."""
|
|
645
|
+
if expr.is_leaf():
|
|
646
|
+
return False
|
|
647
|
+
ops, args = expr.get_operations(expect_ops=True, copy_list=False)
|
|
648
|
+
if not (
|
|
649
|
+
ops == "**/"
|
|
650
|
+
and len(args) == 4 # noqa E501
|
|
651
|
+
and all([args[0].is_leaf(), args[3].is_leaf()])
|
|
652
|
+
and not args[0].is_level()
|
|
653
|
+
and not args[0].is_flow()
|
|
654
|
+
and not args[0].is_stock()
|
|
655
|
+
and args[1].is_stock()
|
|
656
|
+
and args[2].is_level()
|
|
657
|
+
and not args[2].is_flow()
|
|
658
|
+
and not args[2].is_stock()
|
|
659
|
+
and args[3].is_level()
|
|
660
|
+
and not args[3].is_flow()
|
|
661
|
+
and not args[3].is_stock()
|
|
662
|
+
):
|
|
663
|
+
return False
|
|
664
|
+
return args[2].is_leaf() or args[2].get_operations(expect_ops=True, copy_list=False)[0] == "+"
|
|
665
|
+
|
|
666
|
+
def _disaggregate( # noqa: C901
|
|
667
|
+
self,
|
|
668
|
+
model: Model,
|
|
669
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
670
|
+
) -> None:
|
|
671
|
+
"""Disaggregate HydroAggregator."""
|
|
672
|
+
new_data = model.get_data()
|
|
673
|
+
|
|
674
|
+
deleted_group_names = self._get_deleted_group_modules(new_data) # find agg groups that have been deleted
|
|
675
|
+
agg_modules = {key: new_data.pop(key) for key in self._grouped_modules if key not in deleted_group_names} # isolate agg modules out of new_data
|
|
676
|
+
|
|
677
|
+
# Reinstate original detailed modules that are not fully deleted
|
|
678
|
+
for detailed_key, agg_keys in self._aggregation_map.items():
|
|
679
|
+
if agg_keys and all(key in deleted_group_names for key in agg_keys):
|
|
680
|
+
continue
|
|
681
|
+
new_data[detailed_key] = original_data[detailed_key]
|
|
682
|
+
|
|
683
|
+
# Set production results in detailed modules
|
|
684
|
+
for agg_key, detailed_keys in self._grouped_modules.items():
|
|
685
|
+
if agg_key in deleted_group_names:
|
|
686
|
+
continue
|
|
687
|
+
|
|
688
|
+
agg_production_level = agg_modules[agg_key].get_generator().get_production().get_level()
|
|
689
|
+
if agg_production_level is None: # keep original production if agg has no production defined
|
|
690
|
+
continue
|
|
691
|
+
if len(detailed_keys) == 1: # only one detailed module, set production directly
|
|
692
|
+
new_data[detailed_key].get_generator().get_production().set_level(agg_production_level)
|
|
693
|
+
continue
|
|
694
|
+
detailed_production_levels = [new_data[detailed_key].get_generator().get_production().get_level() for detailed_key in detailed_keys]
|
|
695
|
+
if any(detailed_production_levels) and not all(
|
|
696
|
+
detailed_production_levels,
|
|
697
|
+
): # if some but not all detailed modules have production defined, skip setting productio
|
|
698
|
+
missing = [detailed_key for detailed_key, level in zip(detailed_keys, detailed_production_levels, strict=False) if not level]
|
|
699
|
+
message = f"Some but not all grouped modules have production defined. Production not disaggregated for {agg_key}, missing for {missing}."
|
|
700
|
+
self.send_warning_event(message)
|
|
701
|
+
continue
|
|
702
|
+
if _all_detailed_exprs_in_sum_expr(agg_production_level, detailed_production_levels): # if agg production is sum of detailed levels, keep original
|
|
703
|
+
continue
|
|
704
|
+
production_weights = self._get_disaggregation_production_weights(model, detailed_keys) # default method
|
|
705
|
+
for detailed_key in detailed_keys:
|
|
706
|
+
self._set_weighted_production(new_data[detailed_key], agg_modules[agg_key], production_weights[detailed_key])
|
|
707
|
+
|
|
708
|
+
# Set filling results in detailed modules
|
|
709
|
+
for agg_key, detailed_keys in self._grouped_reservoirs.items():
|
|
710
|
+
if agg_key in deleted_group_names:
|
|
711
|
+
continue
|
|
712
|
+
|
|
713
|
+
agg_filling_level = agg_modules[agg_key].get_reservoir().get_volume().get_level()
|
|
714
|
+
if agg_filling_level is None: # keep original filling if agg has no filling defined
|
|
715
|
+
continue
|
|
716
|
+
if len(detailed_keys) == 1: # only one detailed module, set filling directly
|
|
717
|
+
new_data[detailed_key].get_reservoir().get_volume().set_level(agg_filling_level)
|
|
718
|
+
continue
|
|
719
|
+
detailed_filling_levels = [new_data[detailed_key].get_reservoir().get_volume().get_level() for detailed_key in detailed_keys]
|
|
720
|
+
if any(detailed_filling_levels) and not all(
|
|
721
|
+
detailed_filling_levels,
|
|
722
|
+
): # if some but not all detailed modules have filling defined, skip setting filling
|
|
723
|
+
missing = [detailed_key for detailed_key, level in zip(detailed_keys, detailed_filling_levels, strict=False) if not level]
|
|
724
|
+
message = f"Some but not all grouped modules have filling defined. Filling not disaggregated for {agg_key}, missing for {missing}."
|
|
725
|
+
self.send_warning_event(message)
|
|
726
|
+
continue
|
|
727
|
+
detailed_energy_eq_downstreams = [new_data[detailed_key].get_meta(self._metakey_energy_eq_downstream).get_value() for detailed_key in detailed_keys]
|
|
728
|
+
agg_energy_eq_downstream = agg_modules[agg_key].get_meta(self._metakey_energy_eq_downstream).get_value()
|
|
729
|
+
agg_detailed_fillings = [
|
|
730
|
+
detailed_filling * detailed_energy_eq
|
|
731
|
+
for detailed_filling, detailed_energy_eq in zip(detailed_filling_levels, detailed_energy_eq_downstreams, strict=True)
|
|
732
|
+
if detailed_filling and detailed_energy_eq
|
|
733
|
+
]
|
|
734
|
+
if self._is_sum_filling_expr(
|
|
735
|
+
agg_filling_level,
|
|
736
|
+
agg_detailed_fillings,
|
|
737
|
+
agg_energy_eq_downstream,
|
|
738
|
+
): # if agg filling is sum of detailed levels, keep original
|
|
739
|
+
continue
|
|
740
|
+
reservoir_weights = self._get_disaggregation_filling_weights(model, detailed_keys) # default method
|
|
741
|
+
for detailed_key in detailed_keys:
|
|
742
|
+
self._set_weighted_filling(new_data[detailed_key], agg_modules[agg_key], reservoir_weights[detailed_key])
|
|
743
|
+
|
|
744
|
+
self._grouped_modules.clear()
|
|
745
|
+
self._grouped_reservoirs.clear()
|
|
746
|
+
|
|
747
|
+
def _get_deleted_group_modules(self, new_data: dict[str, Component | TimeVector | Curve | Expr]) -> set[str]:
|
|
748
|
+
deleted_group_names: set[str] = set()
|
|
749
|
+
|
|
750
|
+
for group_name in self._grouped_modules:
|
|
751
|
+
if group_name not in new_data:
|
|
752
|
+
deleted_group_names.add(group_name)
|
|
753
|
+
continue
|
|
754
|
+
|
|
755
|
+
return deleted_group_names
|
|
756
|
+
|
|
757
|
+
def _get_disaggregation_production_weights(
|
|
758
|
+
self,
|
|
759
|
+
model: Model,
|
|
760
|
+
detailed_keys: list[str],
|
|
761
|
+
) -> dict[str, float]:
|
|
762
|
+
"""Get weights to disaggregate production based on production capacity."""
|
|
763
|
+
# Calculate production capacity for each detailed module
|
|
764
|
+
data = model.get_data()
|
|
765
|
+
production_weights = dict() # detailed_key -> production_weight
|
|
766
|
+
production_weight_factors = dict() # detailed_key -> production_weight_factor
|
|
767
|
+
for det in detailed_keys:
|
|
768
|
+
det_module = data[det]
|
|
769
|
+
release_capacity_level = det_module.get_release_capacity().get_level()
|
|
770
|
+
generator_energy_eq = det_module.get_generator().get_energy_equivalent().get_level()
|
|
771
|
+
production_weight = get_level_value(
|
|
772
|
+
release_capacity_level * generator_energy_eq,
|
|
773
|
+
db=model,
|
|
774
|
+
unit="kW",
|
|
775
|
+
data_dim=self._data_dim,
|
|
776
|
+
scen_dim=self._scen_dim,
|
|
777
|
+
is_max=False,
|
|
778
|
+
)
|
|
779
|
+
production_weights[det] = production_weight
|
|
780
|
+
|
|
781
|
+
# Calculate production weight for each detailed module
|
|
782
|
+
for det in detailed_keys:
|
|
783
|
+
production_weight_factors[det] = production_weights[det] / sum(production_weights.values())
|
|
784
|
+
|
|
785
|
+
return production_weight_factors
|
|
786
|
+
|
|
787
|
+
def _get_disaggregation_filling_weights(
|
|
788
|
+
self,
|
|
789
|
+
model: Model,
|
|
790
|
+
detailed_keys: list[str],
|
|
791
|
+
) -> dict[str, float]:
|
|
792
|
+
"""Get weights to disaggregate filling based on reservoir capacity."""
|
|
793
|
+
# Calculate reservoir capacity for each detailed module
|
|
794
|
+
data = model.get_data()
|
|
795
|
+
filling_weights = dict() # detailed_key -> reservoir_weight
|
|
796
|
+
filling_weight_factors = dict() # detailed_key -> reservoir_weight_factor
|
|
797
|
+
for det in detailed_keys:
|
|
798
|
+
det_module = data[det]
|
|
799
|
+
reservoir_capacity_level = det_module.get_reservoir().get_capacity().get_level()
|
|
800
|
+
reservoir_energy_eq = det_module.get_meta(self._metakey_energy_eq_downstream).get_value()
|
|
801
|
+
reservoir_weight = get_level_value(
|
|
802
|
+
reservoir_capacity_level * reservoir_energy_eq,
|
|
803
|
+
db=model,
|
|
804
|
+
unit="GWh",
|
|
805
|
+
data_dim=self._data_dim,
|
|
806
|
+
scen_dim=self._scen_dim,
|
|
807
|
+
is_max=False,
|
|
808
|
+
)
|
|
809
|
+
filling_weights[det] = reservoir_weight
|
|
810
|
+
|
|
811
|
+
# Calculate reservoir weight for each detailed module
|
|
812
|
+
for det in detailed_keys:
|
|
813
|
+
filling_weight_factors[det] = filling_weights[det] / sum(filling_weights.values())
|
|
814
|
+
|
|
815
|
+
return filling_weight_factors
|
|
816
|
+
|
|
817
|
+
def _set_weighted_production(self, detailed_module: HydroModule, agg_module: HydroModule, production_weight: float) -> None:
|
|
818
|
+
"""Set production level and profile for detailed module based on aggregated module."""
|
|
819
|
+
agg_production_level = agg_module.get_generator().get_production().get_level()
|
|
820
|
+
agg_production_profile = agg_module.get_generator().get_production().get_profile()
|
|
821
|
+
production_level = production_weight * agg_production_level
|
|
822
|
+
detailed_module.get_generator().get_production().set_level(production_level)
|
|
823
|
+
detailed_module.get_generator().get_production().set_profile(agg_production_profile)
|
|
824
|
+
|
|
825
|
+
def _is_sum_filling_expr(self, agg_filling: Expr, agg_detailed_fillings: list[Expr], agg_energy_eq_downstream: Expr) -> bool:
|
|
826
|
+
"""Check if expr is (sum(filling * energy_eq_downstream)) / agg_energy_eq_downstream, indicating it comes from aggregation."""
|
|
827
|
+
if agg_filling.is_leaf():
|
|
828
|
+
return False
|
|
829
|
+
ops, args = agg_filling.get_operations(expect_ops=True, copy_list=False)
|
|
830
|
+
if not (ops == "/" and len(args) == 2) and args[1] == agg_energy_eq_downstream: # noqa E501
|
|
831
|
+
return False
|
|
832
|
+
ops_sum, args_sum = args[0].get_operations(expect_ops=True, copy_list=False)
|
|
833
|
+
if "+" not in ops_sum:
|
|
834
|
+
return False
|
|
835
|
+
if len(args_sum) != len(agg_detailed_fillings):
|
|
836
|
+
return False
|
|
837
|
+
return all(arg in agg_detailed_fillings for arg in args_sum)
|
|
838
|
+
|
|
839
|
+
def _set_weighted_filling(self, detailed_module: HydroModule, agg_module: HydroModule, filling_weight: float) -> None:
|
|
840
|
+
"""Set filling level and profile for detailed module based on aggregated module."""
|
|
841
|
+
agg_filling_level = agg_module.get_reservoir().get_volume().get_level()
|
|
842
|
+
agg_filling_profile = agg_module.get_reservoir().get_volume().get_profile()
|
|
843
|
+
if agg_filling_level: # keep original filling if agg has no filling defined
|
|
844
|
+
agg_energy_eq = agg_module.get_meta(self._metakey_energy_eq_downstream).get_value()
|
|
845
|
+
detailed_energy_eq = detailed_module.get_meta(self._metakey_energy_eq_downstream).get_value()
|
|
846
|
+
|
|
847
|
+
filling_level = (filling_weight * agg_filling_level * agg_energy_eq) / detailed_energy_eq
|
|
848
|
+
detailed_module.get_reservoir().get_volume().set_level(filling_level)
|
|
849
|
+
detailed_module.get_reservoir().get_volume().set_profile(agg_filling_profile)
|