fram-core 0.0.0__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. fram_core-0.1.0.dist-info/METADATA +42 -0
  2. fram_core-0.1.0.dist-info/RECORD +100 -0
  3. {fram_core-0.0.0.dist-info → fram_core-0.1.0.dist-info}/WHEEL +1 -2
  4. fram_core-0.1.0.dist-info/licenses/LICENSE.md +8 -0
  5. framcore/Base.py +161 -0
  6. framcore/Model.py +90 -0
  7. framcore/__init__.py +10 -0
  8. framcore/aggregators/Aggregator.py +172 -0
  9. framcore/aggregators/HydroAggregator.py +849 -0
  10. framcore/aggregators/NodeAggregator.py +530 -0
  11. framcore/aggregators/WindSolarAggregator.py +315 -0
  12. framcore/aggregators/__init__.py +13 -0
  13. framcore/aggregators/_utils.py +184 -0
  14. framcore/attributes/Arrow.py +307 -0
  15. framcore/attributes/ElasticDemand.py +90 -0
  16. framcore/attributes/ReservoirCurve.py +23 -0
  17. framcore/attributes/SoftBound.py +16 -0
  18. framcore/attributes/StartUpCost.py +65 -0
  19. framcore/attributes/Storage.py +158 -0
  20. framcore/attributes/TargetBound.py +16 -0
  21. framcore/attributes/__init__.py +63 -0
  22. framcore/attributes/hydro/HydroBypass.py +49 -0
  23. framcore/attributes/hydro/HydroGenerator.py +100 -0
  24. framcore/attributes/hydro/HydroPump.py +178 -0
  25. framcore/attributes/hydro/HydroReservoir.py +27 -0
  26. framcore/attributes/hydro/__init__.py +13 -0
  27. framcore/attributes/level_profile_attributes.py +911 -0
  28. framcore/components/Component.py +136 -0
  29. framcore/components/Demand.py +144 -0
  30. framcore/components/Flow.py +189 -0
  31. framcore/components/HydroModule.py +371 -0
  32. framcore/components/Node.py +99 -0
  33. framcore/components/Thermal.py +208 -0
  34. framcore/components/Transmission.py +198 -0
  35. framcore/components/_PowerPlant.py +81 -0
  36. framcore/components/__init__.py +22 -0
  37. framcore/components/wind_solar.py +82 -0
  38. framcore/curves/Curve.py +44 -0
  39. framcore/curves/LoadedCurve.py +146 -0
  40. framcore/curves/__init__.py +9 -0
  41. framcore/events/__init__.py +21 -0
  42. framcore/events/events.py +51 -0
  43. framcore/expressions/Expr.py +591 -0
  44. framcore/expressions/__init__.py +30 -0
  45. framcore/expressions/_get_constant_from_expr.py +477 -0
  46. framcore/expressions/_utils.py +73 -0
  47. framcore/expressions/queries.py +416 -0
  48. framcore/expressions/units.py +227 -0
  49. framcore/fingerprints/__init__.py +11 -0
  50. framcore/fingerprints/fingerprint.py +292 -0
  51. framcore/juliamodels/JuliaModel.py +171 -0
  52. framcore/juliamodels/__init__.py +7 -0
  53. framcore/loaders/__init__.py +10 -0
  54. framcore/loaders/loaders.py +405 -0
  55. framcore/metadata/Div.py +73 -0
  56. framcore/metadata/ExprMeta.py +56 -0
  57. framcore/metadata/LevelExprMeta.py +32 -0
  58. framcore/metadata/Member.py +55 -0
  59. framcore/metadata/Meta.py +44 -0
  60. framcore/metadata/__init__.py +15 -0
  61. framcore/populators/Populator.py +108 -0
  62. framcore/populators/__init__.py +7 -0
  63. framcore/querydbs/CacheDB.py +50 -0
  64. framcore/querydbs/ModelDB.py +34 -0
  65. framcore/querydbs/QueryDB.py +45 -0
  66. framcore/querydbs/__init__.py +11 -0
  67. framcore/solvers/Solver.py +63 -0
  68. framcore/solvers/SolverConfig.py +272 -0
  69. framcore/solvers/__init__.py +9 -0
  70. framcore/timeindexes/AverageYearRange.py +27 -0
  71. framcore/timeindexes/ConstantTimeIndex.py +22 -0
  72. framcore/timeindexes/DailyIndex.py +33 -0
  73. framcore/timeindexes/FixedFrequencyTimeIndex.py +814 -0
  74. framcore/timeindexes/HourlyIndex.py +33 -0
  75. framcore/timeindexes/IsoCalendarDay.py +33 -0
  76. framcore/timeindexes/ListTimeIndex.py +277 -0
  77. framcore/timeindexes/ModelYear.py +23 -0
  78. framcore/timeindexes/ModelYears.py +27 -0
  79. framcore/timeindexes/OneYearProfileTimeIndex.py +29 -0
  80. framcore/timeindexes/ProfileTimeIndex.py +43 -0
  81. framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
  82. framcore/timeindexes/TimeIndex.py +103 -0
  83. framcore/timeindexes/WeeklyIndex.py +33 -0
  84. framcore/timeindexes/__init__.py +36 -0
  85. framcore/timeindexes/_time_vector_operations.py +689 -0
  86. framcore/timevectors/ConstantTimeVector.py +131 -0
  87. framcore/timevectors/LinearTransformTimeVector.py +131 -0
  88. framcore/timevectors/ListTimeVector.py +127 -0
  89. framcore/timevectors/LoadedTimeVector.py +97 -0
  90. framcore/timevectors/ReferencePeriod.py +51 -0
  91. framcore/timevectors/TimeVector.py +108 -0
  92. framcore/timevectors/__init__.py +17 -0
  93. framcore/utils/__init__.py +35 -0
  94. framcore/utils/get_regional_volumes.py +387 -0
  95. framcore/utils/get_supported_components.py +60 -0
  96. framcore/utils/global_energy_equivalent.py +63 -0
  97. framcore/utils/isolate_subnodes.py +172 -0
  98. framcore/utils/loaders.py +97 -0
  99. framcore/utils/node_flow_utils.py +236 -0
  100. framcore/utils/storage_subsystems.py +106 -0
  101. fram_core-0.0.0.dist-info/METADATA +0 -5
  102. fram_core-0.0.0.dist-info/RECORD +0 -4
  103. fram_core-0.0.0.dist-info/top_level.txt +0 -1
@@ -0,0 +1,35 @@
1
+ # framcore/lib/__init__.py
2
+
3
+ from framcore.utils.get_supported_components import get_supported_components
4
+ from framcore.utils.node_flow_utils import (
5
+ FlowInfo,
6
+ get_component_to_nodes,
7
+ get_flow_infos,
8
+ get_node_to_commodity,
9
+ get_transports_by_commodity,
10
+ is_transport_by_commodity,
11
+ )
12
+ from framcore.utils.global_energy_equivalent import get_hydro_downstream_energy_equivalent, set_global_energy_equivalent
13
+ from framcore.utils.storage_subsystems import get_one_commodity_storage_subsystems
14
+ from framcore.utils.isolate_subnodes import isolate_subnodes
15
+ from framcore.utils.get_regional_volumes import get_regional_volumes, RegionalVolumes
16
+ from framcore.utils.loaders import add_loaders_if, add_loaders, replace_loader_path
17
+
18
+ __all__ = [
19
+ "FlowInfo",
20
+ "RegionalVolumes",
21
+ "add_loaders",
22
+ "add_loaders_if",
23
+ "get_component_to_nodes",
24
+ "get_flow_infos",
25
+ "get_hydro_downstream_energy_equivalent",
26
+ "get_node_to_commodity",
27
+ "get_one_commodity_storage_subsystems",
28
+ "get_regional_volumes",
29
+ "get_supported_components",
30
+ "get_transports_by_commodity",
31
+ "is_transport_by_commodity",
32
+ "isolate_subnodes",
33
+ "replace_loader_path",
34
+ "set_global_energy_equivalent",
35
+ ]
@@ -0,0 +1,387 @@
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ from collections import defaultdict
5
+ from typing import TYPE_CHECKING
6
+
7
+ import numpy as np
8
+ from numpy.typing import NDArray
9
+
10
+ from framcore.attributes import FlowVolume
11
+ from framcore.components import Component, Flow, Node
12
+ from framcore.events import send_warning_event
13
+ from framcore.expressions import get_unit_conversion_factor
14
+ from framcore.expressions._utils import _load_model_and_create_model_db
15
+ from framcore.metadata import Member
16
+ from framcore.querydbs import QueryDB
17
+ from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
18
+ from framcore.utils import FlowInfo, get_flow_infos, get_node_to_commodity, get_supported_components
19
+
20
+ if TYPE_CHECKING:
21
+ from framcore import Model
22
+
23
+
24
+ class RegionalVolumes:
25
+ """
26
+ Container for regional energy volumes.
27
+
28
+ Stores production, consumption, import, and export vectors for each node and category.
29
+ Provides methods to access these aggregated results.
30
+ """
31
+
32
+ def __init__(self) -> None:
33
+ """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
34
+ self._production: dict[str, dict[str, NDArray]] = dict()
35
+ self._consumption: dict[str, dict[str, NDArray]] = dict()
36
+ self._export: dict[str, dict[str, NDArray]] = dict()
37
+ self._import: dict[str, dict[str, NDArray]] = dict()
38
+
39
+ def get_production(self) -> dict[str, dict[str, NDArray]]:
40
+ """Return dict with production vector by category for each node."""
41
+ return self._production
42
+
43
+ def get_consumption(self) -> dict[str, dict[str, NDArray]]:
44
+ """Return dict with consumption vector by category for each node."""
45
+ return self._consumption
46
+
47
+ def get_export(self) -> dict[str, dict[str, NDArray]]:
48
+ """Return nested dict with export vector for each trade partner to an exporting node."""
49
+ return self._export
50
+
51
+ def get_import(self) -> dict[str, dict[str, NDArray]]:
52
+ """Return nested dict with import vector for each trade partner to an importing node."""
53
+ return self._import
54
+
55
+
56
+ def _get_meta_value(key: str, v: Node | Flow, category_level: str) -> str:
57
+ """Get member meta value from component."""
58
+ meta = v.get_meta(category_level)
59
+ if not isinstance(meta, Member):
60
+ message = f"Expected for key {key} metadata of type Member, got {meta}"
61
+ raise ValueError(message)
62
+ return meta.get_value()
63
+
64
+
65
+ def _get_vector(
66
+ flow: Flow,
67
+ is_ingoing: bool,
68
+ commodity: str,
69
+ node_to_commodity: dict[str, str],
70
+ db: QueryDB,
71
+ data_period: SinglePeriodTimeIndex,
72
+ scenario_period: FixedFrequencyTimeIndex,
73
+ unit: str,
74
+ is_float32: bool,
75
+ ) -> FlowVolume:
76
+ arrows = flow.get_arrows()
77
+ if len(arrows) == 1:
78
+ volume = flow.get_volume()
79
+ return volume.get_scenario_vector(
80
+ db=db,
81
+ scenario_horizon=scenario_period,
82
+ level_period=data_period,
83
+ unit=unit,
84
+ is_float32=is_float32,
85
+ )
86
+
87
+ arrows = [a for a in flow.get_arrows() if a.is_ingoing() == is_ingoing and node_to_commodity[a.get_node()] == commodity]
88
+ if len(arrows) != 1:
89
+ message = f"Expected one arrow, got {arrows}"
90
+ raise ValueError(message)
91
+ arrow = arrows[0]
92
+
93
+ arrow_volumes = flow.get_arrow_volumes()
94
+
95
+ if arrow in arrow_volumes:
96
+ volume = arrow_volumes[arrow]
97
+ return volume.get_scenario_vector(
98
+ db=db,
99
+ scenario_horizon=scenario_period,
100
+ level_period=data_period,
101
+ unit=unit,
102
+ is_float32=is_float32,
103
+ )
104
+
105
+ # we have to calculate using volume and conversion
106
+ volume = flow.get_volume()
107
+
108
+ main_node = flow.get_main_node()
109
+ main_arrows = [a for a in flow.get_arrows() if a.get_node() == main_node]
110
+ if len(main_arrows) != 1:
111
+ message = f"Expected exactly one arrow connected to main node of flow. Got {main_arrows}"
112
+ raise ValueError(message)
113
+ main_arrow = main_arrows[0]
114
+
115
+ if arrow == main_arrow:
116
+ return volume.get_scenario_vector(
117
+ db=db,
118
+ scenario_horizon=scenario_period,
119
+ level_period=data_period,
120
+ unit=unit,
121
+ is_float32=is_float32,
122
+ )
123
+
124
+ main_units = main_arrow.get_conversion_unit_set(db)
125
+ if not main_units:
126
+ return volume.get_scenario_vector(
127
+ db=db,
128
+ scenario_horizon=scenario_period,
129
+ level_period=data_period,
130
+ unit=unit,
131
+ is_float32=is_float32,
132
+ )
133
+
134
+ # we must convert to correct unit
135
+ arrow_units = arrow.get_conversion_unit_set(db)
136
+
137
+ a_main_unit = next(iter(main_units))
138
+ a_arrow_unit = next(iter(arrow_units))
139
+
140
+ unit_conversion_factor = get_unit_conversion_factor(
141
+ from_unit=f"(({a_arrow_unit}) / ({a_main_unit}))",
142
+ to_unit=unit,
143
+ )
144
+
145
+ vector = volume.get_scenario_vector(
146
+ db=db,
147
+ scenario_horizon=scenario_period,
148
+ level_period=data_period,
149
+ unit=a_main_unit,
150
+ is_float32=is_float32,
151
+ )
152
+
153
+ if arrow.has_profile():
154
+ conversion_vector = arrow.get_scenario_vector(
155
+ db=db,
156
+ scenario_horizon=scenario_period,
157
+ level_period=data_period,
158
+ unit=a_arrow_unit,
159
+ is_float32=is_float32,
160
+ )
161
+ np.multiply(vector, conversion_vector, out=vector)
162
+ np.multiply(vector, unit_conversion_factor, out=vector)
163
+ return vector
164
+
165
+ conversion_value = arrow.get_data_value(
166
+ db=db,
167
+ scenario_horizon=scenario_period,
168
+ level_period=data_period,
169
+ unit=a_arrow_unit,
170
+ )
171
+ np.multiply(vector, conversion_value * unit_conversion_factor, out=vector)
172
+
173
+ return vector
174
+
175
+
176
+ # TODO: More options: node_category, consumption_category, production_category, with_trade_partners
177
+
178
+
179
+ def _check_category(category: str, flow_id: str, flow_info: FlowInfo) -> None:
180
+ pass
181
+
182
+
183
+ def get_regional_volumes( # noqa C901
184
+ db: Model | QueryDB,
185
+ commodity: str,
186
+ node_category: str,
187
+ production_category: str,
188
+ consumption_category: str,
189
+ data_period: SinglePeriodTimeIndex,
190
+ scenario_period: FixedFrequencyTimeIndex,
191
+ unit: str,
192
+ is_float32: bool = True,
193
+ ) -> RegionalVolumes:
194
+ """
195
+ Calculate aggregated production, consumption, import and export for member in node_category.
196
+
197
+ Decompose the model components into nodes and flows. Analyze the flows to determine their contribution to production, consumption, import, and export if
198
+ they are associated with the specified commodity. Group these contributions based on the provided node_category, production_category, and
199
+ consumption_category metadata.
200
+
201
+ Args:
202
+ db (Model | QueryDB): Model or QueryDB to use
203
+ commodity (str): Commodity to consider
204
+ node_category (str): Meta key for node category to group the results by
205
+ production_category (str): Meta key for production category to group the results by
206
+ consumption_category (str): Meta key for consumption category to group the results by
207
+ data_period (SinglePeriodTimeIndex): Consider results for this data period
208
+ scenario_period (FixedFrequencyTimeIndex): Consider results for this scenario period
209
+ unit (str): Unit to use for the results
210
+ is_float32 (bool): Use float32 for calculations and results if True
211
+
212
+ """
213
+ db = _load_model_and_create_model_db(db)
214
+
215
+ if not isinstance(is_float32, bool):
216
+ message = f"Expected bool for is_float32, got {is_float32}"
217
+ raise ValueError(message)
218
+
219
+ domain_components = {k: v for k, v in db.get_data().items() if isinstance(v, Component)}
220
+
221
+ graph: dict[str, Node | Flow] = get_supported_components(
222
+ components=domain_components,
223
+ supported_types=(Node, Flow),
224
+ forbidden_types=tuple(),
225
+ )
226
+
227
+ flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
228
+ nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}
229
+
230
+ node_to_commodity = get_node_to_commodity(graph)
231
+
232
+ # only nodes of prefered commodity
233
+ nodes_of_commodity: dict[str, Node] = {k: v for k, v in nodes.items() if v.get_commodity() == commodity}
234
+
235
+ # Mapping of node to category of prefered node level
236
+ node_to_category: dict[str, str] = {k: _get_meta_value(k, v, node_category) for k, v in nodes_of_commodity.items()}
237
+
238
+ category_to_nodes: dict[str, set[str]] = defaultdict(set)
239
+ visited = set()
240
+ for node_id, category in node_to_category.items():
241
+ assert node_id not in visited, f"{node_id} is duplicated"
242
+ category_to_nodes[category].add(node_id)
243
+ visited.add(node_id)
244
+
245
+ direct_production: dict[str, dict[str, list[Flow]]] = dict()
246
+ direct_consumption: dict[str, dict[str, list[Flow]]] = dict()
247
+ converted_production: dict[str, dict[str, list[Flow]]] = dict()
248
+ converted_consumption: dict[str, dict[str, list[Flow]]] = dict()
249
+ import_: dict[str, dict[str, list[Flow]]] = dict()
250
+ export: dict[str, dict[str, list[Flow]]] = dict()
251
+
252
+ for flow_id, flow in flows.items():
253
+ flow_infos = get_flow_infos(flow, node_to_commodity)
254
+
255
+ prod_category = None
256
+ cons_category = None
257
+ with contextlib.suppress(Exception):
258
+ prod_category = _get_meta_value(flow_id, flow, production_category)
259
+ with contextlib.suppress(Exception):
260
+ cons_category = _get_meta_value(flow_id, flow, consumption_category)
261
+
262
+ for flow_info in flow_infos:
263
+ flow_info: FlowInfo
264
+ if flow_info.category == "direct_in" and flow_info.commodity_in == commodity:
265
+ _check_category(prod_category, flow_id, flow_info)
266
+ node_category = node_to_category[flow_info.node_in]
267
+ if node_category not in direct_production:
268
+ direct_production[node_category] = defaultdict(list)
269
+ direct_production[node_category][prod_category].append(flow)
270
+
271
+ elif flow_info.category == "conversion" and flow_info.commodity_in == commodity:
272
+ _check_category(prod_category, flow_id, flow_info)
273
+ node_category = node_to_category[flow_info.node_in]
274
+ if node_category not in converted_production:
275
+ converted_production[node_category] = defaultdict(list)
276
+ converted_production[node_category][prod_category].append(flow)
277
+
278
+ elif flow_info.category == "direct_out" and flow_info.commodity_out == commodity:
279
+ _check_category(cons_category, flow_id, flow_info)
280
+ node_category = node_to_category[flow_info.node_out]
281
+ if node_category not in direct_consumption:
282
+ direct_consumption[node_category] = defaultdict(list)
283
+ direct_consumption[node_category][cons_category].append(flow)
284
+
285
+ elif flow_info.category == "conversion" and flow_info.commodity_out == commodity:
286
+ _check_category(cons_category, flow_id, flow_info)
287
+ node_category = node_to_category[flow_info.node_out]
288
+ if node_category not in converted_consumption:
289
+ converted_consumption[node_category] = defaultdict(list)
290
+ converted_consumption[node_category][cons_category].append(flow)
291
+
292
+ elif flow_info.category == "transport":
293
+ if node_to_commodity[flow_info.node_in] != commodity:
294
+ continue
295
+ category_in = node_to_category[flow_info.node_in]
296
+ category_out = node_to_category[flow_info.node_out]
297
+ if category_in == category_out:
298
+ continue
299
+
300
+ if category_in not in import_:
301
+ import_[category_in] = defaultdict(list)
302
+ import_[category_in][category_out].append(flow)
303
+
304
+ if category_out not in export:
305
+ export[category_out] = defaultdict(list)
306
+ export[category_out][category_in].append(flow)
307
+
308
+ num_periods = scenario_period.get_num_periods()
309
+ dtype = np.float32 if is_float32 else np.float64
310
+
311
+ out = RegionalVolumes()
312
+
313
+ # direct
314
+ for flow_dict, out_dict, is_ingoing in [(direct_production, out.get_production(), True), (direct_consumption, out.get_consumption(), False)]:
315
+ for node_category, flow_categories in flow_dict.items():
316
+ if node_category not in out_dict:
317
+ out_dict[node_category] = dict()
318
+ for flow_category, flows in flow_categories.items():
319
+ x = np.zeros(num_periods, dtype=dtype)
320
+ for flow in set(flows):
321
+ try:
322
+ vector = _get_vector(
323
+ flow=flow,
324
+ is_ingoing=is_ingoing,
325
+ commodity=commodity,
326
+ node_to_commodity=node_to_commodity,
327
+ db=db,
328
+ scenario_period=scenario_period,
329
+ data_period=data_period,
330
+ unit=unit,
331
+ is_float32=is_float32,
332
+ )
333
+ np.add(x, vector, out=x)
334
+ except Exception as e:
335
+ send_warning_event(flow, f"Could not get direct production or consumption for flow {flow}: {e}")
336
+ out_dict[node_category][flow_category] = x
337
+
338
+ # converted
339
+ for flow_dict, out_dict, is_ingoing in [(converted_production, out.get_production(), True), (converted_consumption, out.get_consumption(), False)]:
340
+ for node_category, flow_categories in flow_dict.items():
341
+ if node_category not in out_dict:
342
+ out_dict[node_category] = dict()
343
+ for flow_category, flows in flow_categories.items():
344
+ x = out_dict[node_category][flow_category] if flow_category in out_dict[node_category] else np.zeros(num_periods, dtype=dtype)
345
+ for flow in set(flows):
346
+ try:
347
+ vector = _get_vector(
348
+ flow=flow,
349
+ is_ingoing=is_ingoing,
350
+ commodity=commodity,
351
+ node_to_commodity=node_to_commodity,
352
+ db=db,
353
+ scenario_period=scenario_period,
354
+ data_period=data_period,
355
+ unit=unit,
356
+ is_float32=is_float32,
357
+ )
358
+ np.add(x, vector, out=x)
359
+ except Exception as e:
360
+ send_warning_event(flow, f"Could not get indirect production or consumption for flow {flow}: {e}")
361
+ out_dict[node_category][flow_category] = x
362
+
363
+ # trade
364
+ for flow_dict, out_dict, is_ingoing in [(import_, out.get_import(), True), (export, out.get_export(), False)]:
365
+ for category, trade_partners in flow_dict.items():
366
+ out_dict[category] = dict()
367
+ for trade_partner, flows in trade_partners.items():
368
+ x = np.zeros(num_periods, dtype=dtype)
369
+ for flow in set(flows):
370
+ try:
371
+ vector = _get_vector(
372
+ flow=flow,
373
+ is_ingoing=is_ingoing,
374
+ commodity=commodity,
375
+ node_to_commodity=node_to_commodity,
376
+ db=db,
377
+ scenario_period=scenario_period,
378
+ data_period=data_period,
379
+ unit=unit,
380
+ is_float32=is_float32,
381
+ )
382
+ np.add(x, vector, out=x)
383
+ except Exception as e:
384
+ send_warning_event(flow, f"Could not get trade for flow {flow}: {e}")
385
+ out_dict[category][trade_partner] = x
386
+
387
+ return out
@@ -0,0 +1,60 @@
1
+ from framcore.components import Component
2
+
3
+
4
+ def get_supported_components(
5
+ components: dict[str, Component],
6
+ supported_types: tuple[type[Component]],
7
+ forbidden_types: tuple[type[Component]],
8
+ ) -> dict[str, Component]:
9
+ """Return simplified version of components in compliance with specified component types.See description in Component."""
10
+ output: dict[str, Component] = {}
11
+ errors: list[str] = []
12
+
13
+ _simplify_until_supported(
14
+ output,
15
+ errors,
16
+ components,
17
+ supported_types,
18
+ forbidden_types,
19
+ )
20
+
21
+ if errors:
22
+ message = "\n".join(errors)
23
+ raise ValueError(message)
24
+
25
+ return output
26
+
27
+
28
+ def _simplify_until_supported(
29
+ output: dict[str, Component],
30
+ errors: list[str],
31
+ candidates: dict[str, Component],
32
+ supported_types: tuple[type[Component]],
33
+ forbidden_types: tuple[type[Component]],
34
+ ) -> None:
35
+ for name, component in candidates.items():
36
+ if isinstance(component, forbidden_types):
37
+ message = f"{component.get_top_parent()} has forbidden component {component}"
38
+ errors.append(message)
39
+
40
+ elif isinstance(component, supported_types):
41
+ output[name] = component
42
+
43
+ else:
44
+ simpler_components = component.get_simpler_components(name)
45
+
46
+ if not simpler_components:
47
+ message = (
48
+ f"Failed to support component. Reached bottom level component {component} with top level "
49
+ f"parent {component.get_top_parent()}. No component in the hierarchy was supported."
50
+ )
51
+ errors.append(message)
52
+
53
+ else:
54
+ _simplify_until_supported(
55
+ output,
56
+ errors,
57
+ simpler_components,
58
+ supported_types,
59
+ forbidden_types,
60
+ )
@@ -0,0 +1,63 @@
1
+ from framcore.components import Component, HydroModule
2
+ from framcore.curves import Curve
3
+ from framcore.expressions import Expr
4
+ from framcore.metadata import LevelExprMeta
5
+ from framcore.timevectors import ConstantTimeVector, TimeVector
6
+
7
+
8
+ def get_hydro_downstream_energy_equivalent(
9
+ data: dict[str, Component | TimeVector | Curve | Expr],
10
+ module_name: str,
11
+ power_node: str | None = None,
12
+ ) -> Expr:
13
+ """
14
+ Get the expression for the sum downstream energy equivalent for a hydro module.
15
+
16
+ - If power node is given, only count downstream energy equivalents that are connected to the power node.
17
+ - Energy equivalents are collected from hydro generators downstream, and the main topology follows the release_to attribute.
18
+ - Transport pumps are included in the downstream topology, but counted as negative energy equivalents.
19
+
20
+ Args:
21
+ data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
22
+ module_name (str): The name of the hydro module to start from.
23
+ power_node (str): Optional power node to filter energy equivalents.
24
+
25
+ """
26
+ if data[module_name].get_pump() and data[module_name].get_pump().get_from_module() == module_name: # transport pump
27
+ pump_power_node = data[module_name].get_pump().get_power_node()
28
+ pump_to = data[module_name].get_pump().get_to_module()
29
+ energy_equivalent = get_hydro_downstream_energy_equivalent(data, pump_to, power_node) # continue downstream of pump_to module
30
+ if power_node in (pump_power_node, None):
31
+ return energy_equivalent - data[module_name].get_pump().get_energy_equivalent().get_level() # pumps has negative energy equivalents
32
+ return energy_equivalent
33
+
34
+ energy_equivalent = 0
35
+ if data[module_name].get_generator(): # hydro generator
36
+ module_power_node = data[module_name].get_generator().get_power_node()
37
+ if power_node in (module_power_node, None):
38
+ energy_equivalent += data[module_name].get_generator().get_energy_equivalent().get_level()
39
+ if data[module_name].get_release_to(): # continue from release_to module
40
+ release_to = data[module_name].get_release_to()
41
+ energy_equivalent += get_hydro_downstream_energy_equivalent(data, release_to, power_node)
42
+ return energy_equivalent
43
+
44
+
45
+ def set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None:
46
+ """
47
+ Loop through data dict and set the downstream energy equivalent for all HydroModules.
48
+
49
+ Send a warning event if a HydroModule has no downstream energy equivalents.
50
+
51
+ Args:
52
+ data (dict[str, Component | TimeVector | Curve | Expr]): The dict containing the components.
53
+ metakey_energy_eq_downstream (str): The meta key to use for storing the downstream energy equivalent.
54
+
55
+ """
56
+ for module_name, module in data.items():
57
+ if isinstance(module, HydroModule) and module.get_reservoir():
58
+ energy_equivalent = get_hydro_downstream_energy_equivalent(data, module_name)
59
+ if energy_equivalent == 0:
60
+ message = f"HydroModule {module_name} has no downstream energy equivalents."
61
+ module.send_warning_event(message)
62
+ energy_equivalent = ConstantTimeVector(scalar=0.0, unit="kWh/m3", is_max_level=False)
63
+ module.add_meta(metakey_energy_eq_downstream, LevelExprMeta(energy_equivalent))