fram-core 0.0.0__py3-none-any.whl → 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. fram_core-0.1.0a1.dist-info/METADATA +41 -0
  2. fram_core-0.1.0a1.dist-info/RECORD +100 -0
  3. {fram_core-0.0.0.dist-info → fram_core-0.1.0a1.dist-info}/WHEEL +1 -2
  4. fram_core-0.1.0a1.dist-info/licenses/LICENSE.md +8 -0
  5. framcore/Base.py +142 -0
  6. framcore/Model.py +73 -0
  7. framcore/__init__.py +9 -0
  8. framcore/aggregators/Aggregator.py +153 -0
  9. framcore/aggregators/HydroAggregator.py +837 -0
  10. framcore/aggregators/NodeAggregator.py +495 -0
  11. framcore/aggregators/WindSolarAggregator.py +323 -0
  12. framcore/aggregators/__init__.py +13 -0
  13. framcore/aggregators/_utils.py +184 -0
  14. framcore/attributes/Arrow.py +305 -0
  15. framcore/attributes/ElasticDemand.py +90 -0
  16. framcore/attributes/ReservoirCurve.py +37 -0
  17. framcore/attributes/SoftBound.py +19 -0
  18. framcore/attributes/StartUpCost.py +54 -0
  19. framcore/attributes/Storage.py +146 -0
  20. framcore/attributes/TargetBound.py +18 -0
  21. framcore/attributes/__init__.py +65 -0
  22. framcore/attributes/hydro/HydroBypass.py +42 -0
  23. framcore/attributes/hydro/HydroGenerator.py +83 -0
  24. framcore/attributes/hydro/HydroPump.py +156 -0
  25. framcore/attributes/hydro/HydroReservoir.py +27 -0
  26. framcore/attributes/hydro/__init__.py +13 -0
  27. framcore/attributes/level_profile_attributes.py +714 -0
  28. framcore/components/Component.py +112 -0
  29. framcore/components/Demand.py +130 -0
  30. framcore/components/Flow.py +167 -0
  31. framcore/components/HydroModule.py +330 -0
  32. framcore/components/Node.py +76 -0
  33. framcore/components/Thermal.py +204 -0
  34. framcore/components/Transmission.py +183 -0
  35. framcore/components/_PowerPlant.py +81 -0
  36. framcore/components/__init__.py +22 -0
  37. framcore/components/wind_solar.py +67 -0
  38. framcore/curves/Curve.py +44 -0
  39. framcore/curves/LoadedCurve.py +155 -0
  40. framcore/curves/__init__.py +9 -0
  41. framcore/events/__init__.py +21 -0
  42. framcore/events/events.py +51 -0
  43. framcore/expressions/Expr.py +490 -0
  44. framcore/expressions/__init__.py +28 -0
  45. framcore/expressions/_get_constant_from_expr.py +483 -0
  46. framcore/expressions/_time_vector_operations.py +615 -0
  47. framcore/expressions/_utils.py +73 -0
  48. framcore/expressions/queries.py +423 -0
  49. framcore/expressions/units.py +207 -0
  50. framcore/fingerprints/__init__.py +11 -0
  51. framcore/fingerprints/fingerprint.py +293 -0
  52. framcore/juliamodels/JuliaModel.py +161 -0
  53. framcore/juliamodels/__init__.py +7 -0
  54. framcore/loaders/__init__.py +10 -0
  55. framcore/loaders/loaders.py +407 -0
  56. framcore/metadata/Div.py +73 -0
  57. framcore/metadata/ExprMeta.py +50 -0
  58. framcore/metadata/LevelExprMeta.py +17 -0
  59. framcore/metadata/Member.py +55 -0
  60. framcore/metadata/Meta.py +44 -0
  61. framcore/metadata/__init__.py +15 -0
  62. framcore/populators/Populator.py +108 -0
  63. framcore/populators/__init__.py +7 -0
  64. framcore/querydbs/CacheDB.py +50 -0
  65. framcore/querydbs/ModelDB.py +34 -0
  66. framcore/querydbs/QueryDB.py +45 -0
  67. framcore/querydbs/__init__.py +11 -0
  68. framcore/solvers/Solver.py +48 -0
  69. framcore/solvers/SolverConfig.py +272 -0
  70. framcore/solvers/__init__.py +9 -0
  71. framcore/timeindexes/AverageYearRange.py +20 -0
  72. framcore/timeindexes/ConstantTimeIndex.py +17 -0
  73. framcore/timeindexes/DailyIndex.py +21 -0
  74. framcore/timeindexes/FixedFrequencyTimeIndex.py +762 -0
  75. framcore/timeindexes/HourlyIndex.py +21 -0
  76. framcore/timeindexes/IsoCalendarDay.py +31 -0
  77. framcore/timeindexes/ListTimeIndex.py +197 -0
  78. framcore/timeindexes/ModelYear.py +17 -0
  79. framcore/timeindexes/ModelYears.py +18 -0
  80. framcore/timeindexes/OneYearProfileTimeIndex.py +21 -0
  81. framcore/timeindexes/ProfileTimeIndex.py +32 -0
  82. framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
  83. framcore/timeindexes/TimeIndex.py +90 -0
  84. framcore/timeindexes/WeeklyIndex.py +21 -0
  85. framcore/timeindexes/__init__.py +36 -0
  86. framcore/timevectors/ConstantTimeVector.py +135 -0
  87. framcore/timevectors/LinearTransformTimeVector.py +114 -0
  88. framcore/timevectors/ListTimeVector.py +123 -0
  89. framcore/timevectors/LoadedTimeVector.py +104 -0
  90. framcore/timevectors/ReferencePeriod.py +41 -0
  91. framcore/timevectors/TimeVector.py +94 -0
  92. framcore/timevectors/__init__.py +17 -0
  93. framcore/utils/__init__.py +36 -0
  94. framcore/utils/get_regional_volumes.py +369 -0
  95. framcore/utils/get_supported_components.py +60 -0
  96. framcore/utils/global_energy_equivalent.py +46 -0
  97. framcore/utils/isolate_subnodes.py +163 -0
  98. framcore/utils/loaders.py +97 -0
  99. framcore/utils/node_flow_utils.py +236 -0
  100. framcore/utils/storage_subsystems.py +107 -0
  101. fram_core-0.0.0.dist-info/METADATA +0 -5
  102. fram_core-0.0.0.dist-info/RECORD +0 -4
  103. fram_core-0.0.0.dist-info/top_level.txt +0 -1
@@ -0,0 +1,369 @@
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ from collections import defaultdict
5
+ from typing import TYPE_CHECKING
6
+
7
+ import numpy as np
8
+ from numpy.typing import NDArray
9
+
10
+ from framcore.attributes import FlowVolume
11
+ from framcore.components import Component, Flow, Node
12
+ from framcore.events import send_warning_event
13
+ from framcore.expressions import get_unit_conversion_factor
14
+ from framcore.expressions._utils import _load_model_and_create_model_db
15
+ from framcore.metadata import Member
16
+ from framcore.querydbs import QueryDB
17
+ from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
18
+ from framcore.utils import FlowInfo, get_flow_infos, get_node_to_commodity, get_supported_components
19
+
20
+ if TYPE_CHECKING:
21
+ from framcore import Model
22
+
23
+
24
+ class RegionalVolumes:
25
+ """
26
+ Container for regional energy volumes.
27
+
28
+ Stores production, consumption, import, and export vectors for each node and category.
29
+ Provides methods to access these aggregated results.
30
+ """
31
+
32
+ def __init__(self) -> None:
33
+ """Initialize the RegionalVolumes instance with empty dictionaries for production, consumption, import, and export."""
34
+ self._production: dict[str, dict[str, NDArray]] = dict()
35
+ self._consumption: dict[str, dict[str, NDArray]] = dict()
36
+ self._export: dict[str, dict[str, NDArray]] = dict()
37
+ self._import: dict[str, dict[str, NDArray]] = dict()
38
+
39
+ def get_production(self) -> dict[str, dict[str, NDArray]]:
40
+ """Return dict with production vector by category for each node."""
41
+ return self._production
42
+
43
+ def get_consumption(self) -> dict[str, dict[str, NDArray]]:
44
+ """Return dict with consumption vector by category for each node."""
45
+ return self._consumption
46
+
47
+ def get_export(self) -> dict[str, dict[str, NDArray]]:
48
+ """Return nested dict with export vector for each trade partner to an exporting node."""
49
+ return self._export
50
+
51
+ def get_import(self) -> dict[str, dict[str, NDArray]]:
52
+ """Return nested dict with import vector for each trade partner to an importing node."""
53
+ return self._import
54
+
55
+
56
+ def _get_meta_value(key: str, v: Node | Flow, category_level: str) -> str:
57
+ """Get member meta value from component."""
58
+ meta = v.get_meta(category_level)
59
+ if not isinstance(meta, Member):
60
+ message = f"Expected for key {key} metadata of type Member, got {meta}"
61
+ raise ValueError(message)
62
+ return meta.get_value()
63
+
64
+
65
+ def _get_vector(
66
+ flow: Flow,
67
+ is_ingoing: bool,
68
+ commodity: str,
69
+ node_to_commodity: dict[str, str],
70
+ db: QueryDB,
71
+ data_period: SinglePeriodTimeIndex,
72
+ scenario_period: FixedFrequencyTimeIndex,
73
+ unit: str,
74
+ is_float32: bool,
75
+ ) -> FlowVolume:
76
+ arrows = flow.get_arrows()
77
+ if len(arrows) == 1:
78
+ volume = flow.get_volume()
79
+ return volume.get_scenario_vector(
80
+ db=db,
81
+ scenario_horizon=scenario_period,
82
+ level_period=data_period,
83
+ unit=unit,
84
+ is_float32=is_float32,
85
+ )
86
+
87
+ arrows = [a for a in flow.get_arrows() if a.is_ingoing() == is_ingoing and node_to_commodity[a.get_node()] == commodity]
88
+ if len(arrows) != 1:
89
+ message = f"Expected one arrow, got {arrows}"
90
+ raise ValueError(message)
91
+ arrow = arrows[0]
92
+
93
+ arrow_volumes = flow.get_arrow_volumes()
94
+
95
+ if arrow in arrow_volumes:
96
+ volume = arrow_volumes[arrow]
97
+ return volume.get_scenario_vector(
98
+ db=db,
99
+ scenario_horizon=scenario_period,
100
+ level_period=data_period,
101
+ unit=unit,
102
+ is_float32=is_float32,
103
+ )
104
+
105
+ # we have to calculate using volume and conversion
106
+ volume = flow.get_volume()
107
+
108
+ main_node = flow.get_main_node()
109
+ main_arrows = [a for a in flow.get_arrows() if a.get_node() == main_node]
110
+ if len(main_arrows) != 1:
111
+ message = f"Expected exactly one arrow connected to main node of flow. Got {main_arrows}"
112
+ raise ValueError(message)
113
+ main_arrow = main_arrows[0]
114
+
115
+ if arrow == main_arrow:
116
+ return volume.get_scenario_vector(
117
+ db=db,
118
+ scenario_horizon=scenario_period,
119
+ level_period=data_period,
120
+ unit=unit,
121
+ is_float32=is_float32,
122
+ )
123
+
124
+ main_units = main_arrow.get_conversion_unit_set(db)
125
+ if not main_units:
126
+ return volume.get_scenario_vector(
127
+ db=db,
128
+ scenario_horizon=scenario_period,
129
+ level_period=data_period,
130
+ unit=unit,
131
+ is_float32=is_float32,
132
+ )
133
+
134
+ # we must convert to correct unit
135
+ arrow_units = arrow.get_conversion_unit_set(db)
136
+
137
+ a_main_unit = next(iter(main_units))
138
+ a_arrow_unit = next(iter(arrow_units))
139
+
140
+ unit_conversion_factor = get_unit_conversion_factor(
141
+ from_unit=f"(({a_arrow_unit}) / ({a_main_unit}))",
142
+ to_unit=unit,
143
+ )
144
+
145
+ vector = volume.get_scenario_vector(
146
+ db=db,
147
+ scenario_horizon=scenario_period,
148
+ level_period=data_period,
149
+ unit=a_main_unit,
150
+ is_float32=is_float32,
151
+ )
152
+
153
+ if arrow.has_profile():
154
+ conversion_vector = arrow.get_scenario_vector(
155
+ db=db,
156
+ scenario_horizon=scenario_period,
157
+ level_period=data_period,
158
+ unit=a_arrow_unit,
159
+ is_float32=is_float32,
160
+ )
161
+ np.multiply(vector, conversion_vector, out=vector)
162
+ np.multiply(vector, unit_conversion_factor, out=vector)
163
+ return vector
164
+
165
+ conversion_value = arrow.get_data_value(
166
+ db=db,
167
+ scenario_horizon=scenario_period,
168
+ level_period=data_period,
169
+ unit=a_arrow_unit,
170
+ )
171
+ np.multiply(vector, conversion_value * unit_conversion_factor, out=vector)
172
+
173
+ return vector
174
+
175
+
176
+ # TODO: More options: node_category, consumption_category, production_category, with_trade_partners
177
+
178
+
179
+ def _check_category(category, flow_id, flow_info) -> None:
180
+ pass
181
+
182
+
183
+ def get_regional_volumes(
184
+ db: Model | QueryDB,
185
+ commodity: str,
186
+ node_category: str,
187
+ production_category: str,
188
+ consumption_category: str,
189
+ data_period: SinglePeriodTimeIndex,
190
+ scenario_period: FixedFrequencyTimeIndex,
191
+ unit: str,
192
+ is_float32: bool = True,
193
+ ) -> RegionalVolumes:
194
+ """Calculate aggregated production, consumption, import and export."""
195
+ db = _load_model_and_create_model_db(db)
196
+
197
+ if not isinstance(is_float32, bool):
198
+ message = f"Expected bool for is_float32, got {is_float32}"
199
+ raise ValueError(message)
200
+
201
+ domain_components = {k: v for k, v in db.get_data().items() if isinstance(v, Component)}
202
+
203
+ graph: dict[str, Node | Flow] = get_supported_components(
204
+ components=domain_components,
205
+ supported_types=(Node, Flow),
206
+ forbidden_types=tuple(),
207
+ )
208
+
209
+ flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
210
+ nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}
211
+
212
+ node_to_commodity = get_node_to_commodity(graph)
213
+
214
+ # only nodes of prefered commodity
215
+ nodes_of_commodity: dict[str, Node] = {k: v for k, v in nodes.items() if v.get_commodity() == commodity}
216
+
217
+ # Mapping of node to category of prefered node level
218
+ node_to_category: dict[str, str] = {k: _get_meta_value(k, v, node_category) for k, v in nodes_of_commodity.items()}
219
+
220
+ category_to_nodes: dict[str, set[str]] = defaultdict(set)
221
+ visited = set()
222
+ for node_id, category in node_to_category.items():
223
+ assert node_id not in visited, f"{node_id} is duplicated"
224
+ category_to_nodes[category].add(node_id)
225
+ visited.add(node_id)
226
+
227
+ direct_production: dict[str, dict[str, list[Flow]]] = dict()
228
+ direct_consumption: dict[str, dict[str, list[Flow]]] = dict()
229
+ converted_production: dict[str, dict[str, list[Flow]]] = dict()
230
+ converted_consumption: dict[str, dict[str, list[Flow]]] = dict()
231
+ import_: dict[str, dict[str, list[Flow]]] = dict()
232
+ export: dict[str, dict[str, list[Flow]]] = dict()
233
+
234
+ for flow_id, flow in flows.items():
235
+ flow_infos = get_flow_infos(flow, node_to_commodity)
236
+
237
+ prod_category = None
238
+ cons_category = None
239
+ with contextlib.suppress(Exception):
240
+ prod_category = _get_meta_value(flow_id, flow, production_category)
241
+ with contextlib.suppress(Exception):
242
+ cons_category = _get_meta_value(flow_id, flow, consumption_category)
243
+
244
+ for flow_info in flow_infos:
245
+ flow_info: FlowInfo
246
+ if flow_info.category == "direct_in" and flow_info.commodity_in == commodity:
247
+ _check_category(prod_category, flow_id, flow_info)
248
+ node_category = node_to_category[flow_info.node_in]
249
+ if node_category not in direct_production:
250
+ direct_production[node_category] = defaultdict(list)
251
+ direct_production[node_category][prod_category].append(flow)
252
+
253
+ elif flow_info.category == "conversion" and flow_info.commodity_in == commodity:
254
+ _check_category(prod_category, flow_id, flow_info)
255
+ node_category = node_to_category[flow_info.node_in]
256
+ if node_category not in converted_production:
257
+ converted_production[node_category] = defaultdict(list)
258
+ converted_production[node_category][prod_category].append(flow)
259
+
260
+ elif flow_info.category == "direct_out" and flow_info.commodity_out == commodity:
261
+ _check_category(cons_category, flow_id, flow_info)
262
+ node_category = node_to_category[flow_info.node_out]
263
+ if node_category not in direct_consumption:
264
+ direct_consumption[node_category] = defaultdict(list)
265
+ direct_consumption[node_category][cons_category].append(flow)
266
+
267
+ elif flow_info.category == "conversion" and flow_info.commodity_out == commodity:
268
+ _check_category(cons_category, flow_id, flow_info)
269
+ node_category = node_to_category[flow_info.node_out]
270
+ if node_category not in converted_consumption:
271
+ converted_consumption[node_category] = defaultdict(list)
272
+ converted_consumption[node_category][cons_category].append(flow)
273
+
274
+ elif flow_info.category == "transport":
275
+ if node_to_commodity[flow_info.node_in] != commodity:
276
+ continue
277
+ category_in = node_to_category[flow_info.node_in]
278
+ category_out = node_to_category[flow_info.node_out]
279
+ if category_in == category_out:
280
+ continue
281
+
282
+ if category_in not in import_:
283
+ import_[category_in] = defaultdict(list)
284
+ import_[category_in][category_out].append(flow)
285
+
286
+ if category_out not in export:
287
+ export[category_out] = defaultdict(list)
288
+ export[category_out][category_in].append(flow)
289
+
290
+ num_periods = scenario_period.get_num_periods()
291
+ dtype = np.float32 if is_float32 else np.float64
292
+
293
+ out = RegionalVolumes()
294
+
295
+ # direct
296
+ for flow_dict, out_dict, is_ingoing in [(direct_production, out.get_production(), True), (direct_consumption, out.get_consumption(), False)]:
297
+ for node_category, flow_categories in flow_dict.items():
298
+ if node_category not in out_dict:
299
+ out_dict[node_category] = dict()
300
+ for flow_category, flows in flow_categories.items():
301
+ x = np.zeros(num_periods, dtype=dtype)
302
+ for flow in set(flows):
303
+ try:
304
+ vector = _get_vector(
305
+ flow=flow,
306
+ is_ingoing=is_ingoing,
307
+ commodity=commodity,
308
+ node_to_commodity=node_to_commodity,
309
+ db=db,
310
+ scenario_period=scenario_period,
311
+ data_period=data_period,
312
+ unit=unit,
313
+ is_float32=is_float32,
314
+ )
315
+ np.add(x, vector, out=x)
316
+ except Exception as e:
317
+ send_warning_event(flow, f"Could not get direct production or consumption for flow {flow}: {e}")
318
+ out_dict[node_category][flow_category] = x
319
+
320
+ # converted
321
+ for flow_dict, out_dict, is_ingoing in [(converted_production, out.get_production(), True), (converted_consumption, out.get_consumption(), False)]:
322
+ for node_category, flow_categories in flow_dict.items():
323
+ if node_category not in out_dict:
324
+ out_dict[node_category] = dict()
325
+ for flow_category, flows in flow_categories.items():
326
+ x = out_dict[node_category][flow_category] if flow_category in out_dict[node_category] else np.zeros(num_periods, dtype=dtype)
327
+ for flow in set(flows):
328
+ try:
329
+ vector = _get_vector(
330
+ flow=flow,
331
+ is_ingoing=is_ingoing,
332
+ commodity=commodity,
333
+ node_to_commodity=node_to_commodity,
334
+ db=db,
335
+ scenario_period=scenario_period,
336
+ data_period=data_period,
337
+ unit=unit,
338
+ is_float32=is_float32,
339
+ )
340
+ np.add(x, vector, out=x)
341
+ except Exception as e:
342
+ send_warning_event(flow, f"Could not get indirect production or consumption for flow {flow}: {e}")
343
+ out_dict[node_category][flow_category] = x
344
+
345
+ # trade
346
+ for flow_dict, out_dict, is_ingoing in [(import_, out.get_import(), True), (export, out.get_export(), False)]:
347
+ for category, trade_partners in flow_dict.items():
348
+ out_dict[category] = dict()
349
+ for trade_partner, flows in trade_partners.items():
350
+ x = np.zeros(num_periods, dtype=dtype)
351
+ for flow in set(flows):
352
+ try:
353
+ vector = _get_vector(
354
+ flow=flow,
355
+ is_ingoing=is_ingoing,
356
+ commodity=commodity,
357
+ node_to_commodity=node_to_commodity,
358
+ db=db,
359
+ scenario_period=scenario_period,
360
+ data_period=data_period,
361
+ unit=unit,
362
+ is_float32=is_float32,
363
+ )
364
+ np.add(x, vector, out=x)
365
+ except Exception as e:
366
+ send_warning_event(flow, f"Could not get trade for flow {flow}: {e}")
367
+ out_dict[category][trade_partner] = x
368
+
369
+ return out
@@ -0,0 +1,60 @@
1
+ from framcore.components import Component
2
+
3
+
4
+ def get_supported_components(
5
+ components: dict[str, Component],
6
+ supported_types: tuple[type[Component]],
7
+ forbidden_types: tuple[type[Component]],
8
+ ) -> dict[str, Component]:
9
+ """Return simplified version of components in compliance with specified component types."""
10
+ output: dict[str, Component] = {}
11
+ errors: list[str] = []
12
+
13
+ _simplify_until_supported(
14
+ output,
15
+ errors,
16
+ components,
17
+ supported_types,
18
+ forbidden_types,
19
+ )
20
+
21
+ if errors:
22
+ message = "\n".join(errors)
23
+ raise ValueError(message)
24
+
25
+ return output
26
+
27
+
28
+ def _simplify_until_supported(
29
+ output: dict[str, Component],
30
+ errors: list[str],
31
+ candidates: dict[str, Component],
32
+ supported_types: tuple[type[Component]],
33
+ forbidden_types: tuple[type[Component]],
34
+ ) -> None:
35
+ for name, component in candidates.items():
36
+ if isinstance(component, forbidden_types):
37
+ message = f"{component.get_top_parent()} has forbidden component {component}"
38
+ errors.append(message)
39
+
40
+ elif isinstance(component, supported_types):
41
+ output[name] = component
42
+
43
+ else:
44
+ simpler_components = component.get_simpler_components(name)
45
+
46
+ if not simpler_components:
47
+ message = (
48
+ f"Failed to support component. Reached bottom level component {component} with top level "
49
+ f"parent {component.get_top_parent()}. No component in the hierarchy was supported."
50
+ )
51
+ errors.append(message)
52
+
53
+ else:
54
+ _simplify_until_supported(
55
+ output,
56
+ errors,
57
+ simpler_components,
58
+ supported_types,
59
+ forbidden_types,
60
+ )
@@ -0,0 +1,46 @@
1
+ from framcore.components import Component, HydroModule
2
+ from framcore.curves import Curve
3
+ from framcore.expressions import Expr
4
+ from framcore.metadata import LevelExprMeta
5
+ from framcore.timevectors import ConstantTimeVector, TimeVector
6
+
7
+
8
+ def get_hydro_downstream_energy_equivalent(
9
+ data: dict[str, Component | TimeVector | Curve | Expr],
10
+ module_name: str,
11
+ power_node: str | None = None,
12
+ ) -> Expr:
13
+ """
14
+ Get the sum downstream energy equivalent for a hydro module.
15
+
16
+ Either count all downstream energy equivalents, or only those that are connected to the given power_node.
17
+ """
18
+ if data[module_name].get_pump() and data[module_name].get_pump().get_from_module() == module_name: # transport pump
19
+ pump_power_node = data[module_name].get_pump().get_power_node()
20
+ pump_to = data[module_name].get_pump().get_to_module()
21
+ energy_equivalent = get_hydro_downstream_energy_equivalent(data, pump_to, power_node) # continue downstream of pump_to module
22
+ if power_node in (pump_power_node, None):
23
+ return energy_equivalent - data[module_name].get_pump().get_energy_eq().get_level() # pumps has negative energy equivalents
24
+ return energy_equivalent
25
+
26
+ energy_equivalent = 0
27
+ if data[module_name].get_generator(): # hydro generator
28
+ module_power_node = data[module_name].get_generator().get_power_node()
29
+ if power_node in (module_power_node, None):
30
+ energy_equivalent += data[module_name].get_generator().get_energy_eq().get_level()
31
+ if data[module_name].get_release_to(): # continue from release_to module
32
+ release_to = data[module_name].get_release_to()
33
+ energy_equivalent += get_hydro_downstream_energy_equivalent(data, release_to, power_node)
34
+ return energy_equivalent
35
+
36
+
37
+ def set_global_energy_equivalent(data: dict[str, Component | TimeVector | Curve | Expr], metakey_energy_eq_downstream: str) -> None:
38
+ """Set the downstream energy equivalent of all HydroModules. Set to 1 for other types of components?."""
39
+ for module_name, module in data.items():
40
+ if isinstance(module, HydroModule) and module.get_reservoir():
41
+ energy_equivalent = get_hydro_downstream_energy_equivalent(data, module_name)
42
+ if energy_equivalent == 0:
43
+ message = f"HydroModule {module_name} has no downstream energy equivalents."
44
+ module.send_warning_event(message)
45
+ energy_equivalent = ConstantTimeVector(scalar=0.0, unit="kWh/m3", is_max_level=False)
46
+ module.add_meta(metakey_energy_eq_downstream, LevelExprMeta(energy_equivalent))
@@ -0,0 +1,163 @@
1
+ """Demo to show how we can use the core to write some functions we need."""
2
+
3
+ from collections import defaultdict
4
+ from copy import copy
5
+ from time import time
6
+
7
+ from framcore import Model
8
+ from framcore.components import Component, Flow, Node
9
+ from framcore.events import send_debug_event
10
+ from framcore.utils import get_node_to_commodity, get_supported_components, is_transport_by_commodity
11
+
12
+
13
+ def _is_boundary_flow(flow: Flow, nodes: set[str]) -> bool:
14
+ arrows = flow.get_arrows()
15
+ x, y = tuple(arrows) # has len 2
16
+ return int(x.get_node() in nodes) + int(y.get_node() in nodes) == 1
17
+
18
+
19
+ def _is_member(node: Node, meta_key: str, members: set[str]) -> bool:
20
+ meta = node.get_meta(meta_key)
21
+ value = meta.get_value()
22
+ return value in members
23
+
24
+
25
+ def isolate_subnodes(model: Model, commodity: str, meta_key: str, members: list[str]) -> None:
26
+ """
27
+ Delete nodes of commodity named using meta_key except members and boundary nodes and flows.
28
+
29
+ Boudary nodes are set exogenous and all flows pointing to them except boundary flows into or out from member nodes.
30
+ """
31
+ t = time()
32
+
33
+ data = model.get_data()
34
+ counts_before = model.get_content_counts()
35
+
36
+ has_not_converged = True
37
+ num_iterations = 0
38
+
39
+ while has_not_converged:
40
+ num_iterations += 1
41
+
42
+ n_data_before = len(data)
43
+
44
+ # We need copy of components to set _parent None so component becomes top_parent in upcoming code
45
+ components: dict[str, Component] = {k: copy(v) for k, v in data.items() if isinstance(v, Component)}
46
+ for c in components.values():
47
+ c: Component
48
+ c._parent = None # noqa: SLF001
49
+
50
+ node_to_commodity = get_node_to_commodity(components)
51
+
52
+ parent_keys: dict[Component, str] = {v: k for k, v in components.items()}
53
+
54
+ graph: dict[str, Node | Flow] = get_supported_components(components, (Node, Flow), tuple())
55
+
56
+ parent_to_components = defaultdict(set)
57
+ for c in graph.values():
58
+ parent_to_components[c.get_top_parent()].add(c)
59
+
60
+ nodes: dict[str, Node] = {k: v for k, v in graph.items() if isinstance(v, Node)}
61
+ flows: dict[str, Flow] = {k: v for k, v in graph.items() if isinstance(v, Flow)}
62
+
63
+ commodity_nodes: dict[str, Node] = {k: v for k, v in nodes.items() if commodity == v.get_commodity()}
64
+ for k, v in commodity_nodes.items():
65
+ assert v.get_meta(meta_key), f"missing meta_key {meta_key} node_id {k}"
66
+
67
+ inside_nodes: dict[str, Node] = {k: v for k, v in commodity_nodes.items() if _is_member(v, meta_key, members)}
68
+
69
+ transports: dict[str, Flow] = {k: v for k, v in flows.items() if is_transport_by_commodity(v, node_to_commodity, commodity)}
70
+
71
+ boundary_flows: dict[str, Flow] = {k: v for k, v in transports.items() if _is_boundary_flow(v, inside_nodes.keys())}
72
+
73
+ boundary_nodes: dict[str, Node] = dict()
74
+ for flow_id, flow in boundary_flows.items():
75
+ for a in flow.get_arrows():
76
+ node_id = a.get_node()
77
+ if node_id not in inside_nodes:
78
+ boundary_nodes[node_id] = nodes[node_id]
79
+
80
+ outside_nodes: dict[str, Node] = {k: v for k, v in commodity_nodes.items() if not (k in inside_nodes or k in boundary_nodes)}
81
+
82
+ deletes: set[str] = set()
83
+
84
+ deletes.update(outside_nodes.keys())
85
+ deletes.update(boundary_nodes.keys()) # will be kept in delete step below
86
+ deletes.update(boundary_flows.keys()) # will be kept in delete step below
87
+
88
+ # delete flows delivering to deleted node
89
+ for k, flow in flows.items():
90
+ for a in flow.get_arrows():
91
+ if a.get_node() in deletes:
92
+ deletes.add(k)
93
+ break # goto next k, flow
94
+
95
+ # needed for next two steps
96
+ node_to_flows: dict[str, set[str]] = defaultdict(set)
97
+ flow_to_nodes: dict[str, set[str]] = defaultdict(set)
98
+ for flow_id, flow in flows.items():
99
+ for arrow in flow.get_arrows():
100
+ node_id = arrow.get_node()
101
+ node_to_flows[node_id].add(flow_id)
102
+ flow_to_nodes[flow_id].add(node_id)
103
+
104
+ # delete disconnected subgraphs
105
+ remaining = set(n for n in nodes if n not in commodity_nodes)
106
+ while remaining:
107
+ is_disconnected_subgraph = True
108
+ subgraph = set()
109
+ possible_members = set()
110
+ possible_members.add(remaining.pop())
111
+ while possible_members:
112
+ member = possible_members.pop()
113
+ if member in subgraph: # avoid cycle
114
+ continue
115
+ if member in flows:
116
+ subgraph.add(member)
117
+ for node in flow_to_nodes[member]:
118
+ if node not in outside_nodes or node not in boundary_nodes:
119
+ possible_members.add(node)
120
+ if node in inside_nodes:
121
+ is_disconnected_subgraph = False
122
+ else:
123
+ subgraph.add(member)
124
+ for flow in node_to_flows[member]:
125
+ possible_members.add(flow)
126
+ if is_disconnected_subgraph:
127
+ deletes.update(subgraph)
128
+
129
+ for key in deletes:
130
+ if (key in boundary_flows) or (key in boundary_nodes):
131
+ continue
132
+
133
+ if key not in graph:
134
+ continue
135
+
136
+ parent_key = parent_keys[graph[key].get_top_parent()]
137
+
138
+ if parent_key in data:
139
+ del data[parent_key]
140
+
141
+ n_data_after = len(data)
142
+
143
+ if n_data_after == n_data_before:
144
+ has_not_converged = False
145
+
146
+ counts_after = model.get_content_counts()
147
+
148
+ added_components = counts_after["components"] - counts_before["components"]
149
+ if added_components.total() > 0:
150
+ message = f"Expected only deleted components. Got additions {added_components}"
151
+ raise RuntimeError(message)
152
+
153
+ deleted_components = counts_before["components"] - counts_after["components"]
154
+
155
+ for node_id in boundary_nodes:
156
+ if node_id in data:
157
+ node: Node = data[node_id]
158
+ node.set_exogenous()
159
+ if not node.get_price().has_level():
160
+ message = f"{node_id} set to be exogenous, but no price is available."
161
+ raise RuntimeError(message)
162
+
163
+ send_debug_event(isolate_subnodes, f"Used {num_iterations} iterations and {round(time() - t, 2)} seconds and deleted {deleted_components}")