fram-core 0.1.0a1__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. {fram_core-0.1.0a1.dist-info → fram_core-0.1.1.dist-info}/METADATA +6 -5
  2. fram_core-0.1.1.dist-info/RECORD +100 -0
  3. {fram_core-0.1.0a1.dist-info → fram_core-0.1.1.dist-info}/WHEEL +1 -1
  4. framcore/Base.py +22 -3
  5. framcore/Model.py +26 -9
  6. framcore/__init__.py +2 -1
  7. framcore/aggregators/Aggregator.py +30 -11
  8. framcore/aggregators/HydroAggregator.py +37 -25
  9. framcore/aggregators/NodeAggregator.py +65 -30
  10. framcore/aggregators/WindSolarAggregator.py +22 -30
  11. framcore/attributes/Arrow.py +6 -4
  12. framcore/attributes/ElasticDemand.py +13 -13
  13. framcore/attributes/ReservoirCurve.py +3 -17
  14. framcore/attributes/SoftBound.py +2 -5
  15. framcore/attributes/StartUpCost.py +14 -3
  16. framcore/attributes/Storage.py +17 -5
  17. framcore/attributes/TargetBound.py +2 -4
  18. framcore/attributes/__init__.py +2 -4
  19. framcore/attributes/hydro/HydroBypass.py +9 -2
  20. framcore/attributes/hydro/HydroGenerator.py +24 -7
  21. framcore/attributes/hydro/HydroPump.py +32 -10
  22. framcore/attributes/hydro/HydroReservoir.py +4 -4
  23. framcore/attributes/level_profile_attributes.py +250 -53
  24. framcore/components/Component.py +27 -3
  25. framcore/components/Demand.py +18 -4
  26. framcore/components/Flow.py +26 -4
  27. framcore/components/HydroModule.py +45 -4
  28. framcore/components/Node.py +32 -9
  29. framcore/components/Thermal.py +12 -8
  30. framcore/components/Transmission.py +17 -2
  31. framcore/components/wind_solar.py +25 -10
  32. framcore/curves/LoadedCurve.py +0 -9
  33. framcore/expressions/Expr.py +137 -36
  34. framcore/expressions/__init__.py +3 -1
  35. framcore/expressions/_get_constant_from_expr.py +14 -20
  36. framcore/expressions/queries.py +121 -84
  37. framcore/expressions/units.py +30 -3
  38. framcore/fingerprints/fingerprint.py +0 -1
  39. framcore/juliamodels/JuliaModel.py +13 -3
  40. framcore/loaders/loaders.py +0 -2
  41. framcore/metadata/ExprMeta.py +13 -7
  42. framcore/metadata/LevelExprMeta.py +16 -1
  43. framcore/metadata/Member.py +7 -7
  44. framcore/metadata/__init__.py +1 -1
  45. framcore/querydbs/CacheDB.py +1 -1
  46. framcore/solvers/Solver.py +21 -6
  47. framcore/solvers/SolverConfig.py +4 -4
  48. framcore/timeindexes/AverageYearRange.py +9 -2
  49. framcore/timeindexes/ConstantTimeIndex.py +7 -2
  50. framcore/timeindexes/DailyIndex.py +14 -2
  51. framcore/timeindexes/FixedFrequencyTimeIndex.py +105 -53
  52. framcore/timeindexes/HourlyIndex.py +14 -2
  53. framcore/timeindexes/IsoCalendarDay.py +5 -3
  54. framcore/timeindexes/ListTimeIndex.py +103 -23
  55. framcore/timeindexes/ModelYear.py +8 -2
  56. framcore/timeindexes/ModelYears.py +11 -2
  57. framcore/timeindexes/OneYearProfileTimeIndex.py +10 -2
  58. framcore/timeindexes/ProfileTimeIndex.py +14 -3
  59. framcore/timeindexes/SinglePeriodTimeIndex.py +1 -1
  60. framcore/timeindexes/TimeIndex.py +16 -3
  61. framcore/timeindexes/WeeklyIndex.py +14 -2
  62. framcore/{expressions → timeindexes}/_time_vector_operations.py +76 -2
  63. framcore/timevectors/ConstantTimeVector.py +12 -16
  64. framcore/timevectors/LinearTransformTimeVector.py +20 -3
  65. framcore/timevectors/ListTimeVector.py +18 -14
  66. framcore/timevectors/LoadedTimeVector.py +1 -8
  67. framcore/timevectors/ReferencePeriod.py +13 -3
  68. framcore/timevectors/TimeVector.py +26 -12
  69. framcore/utils/__init__.py +0 -1
  70. framcore/utils/get_regional_volumes.py +21 -3
  71. framcore/utils/get_supported_components.py +1 -1
  72. framcore/utils/global_energy_equivalent.py +22 -5
  73. framcore/utils/isolate_subnodes.py +12 -3
  74. framcore/utils/loaders.py +7 -7
  75. framcore/utils/node_flow_utils.py +4 -4
  76. framcore/utils/storage_subsystems.py +3 -4
  77. fram_core-0.1.0a1.dist-info/RECORD +0 -100
  78. {fram_core-0.1.0a1.dist-info → fram_core-0.1.1.dist-info}/licenses/LICENSE.md +0 -0
@@ -7,24 +7,55 @@ from typing import TYPE_CHECKING
7
7
  from framcore.aggregators import Aggregator
8
8
  from framcore.aggregators._utils import _aggregate_costs
9
9
  from framcore.attributes import MaxFlowVolume, Price
10
- from framcore.components import Component, Demand, Node, Transmission, Flow
10
+ from framcore.components import Component, Demand, Flow, Node, Transmission
11
11
  from framcore.curves import Curve
12
12
  from framcore.expressions import Expr
13
13
  from framcore.metadata import Member, Meta
14
14
  from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
15
15
  from framcore.timevectors import TimeVector
16
- from framcore.utils import get_component_to_nodes, get_transports_by_commodity, get_supported_components, get_flow_infos, get_node_to_commodity
17
-
18
- # TODO: Support internal loss demand
19
- # TODO: Document method appropriate place (which docstring? module? class? __init__? _aggregate?)
20
- # TODO: transfer member metadata to internal loss Demand
16
+ from framcore.utils import get_component_to_nodes, get_flow_infos, get_node_to_commodity, get_supported_components, get_transports_by_commodity
21
17
 
22
18
  if TYPE_CHECKING:
23
19
  from framcore import Model
24
20
 
25
21
 
26
22
  class NodeAggregator(Aggregator):
27
- """Aggregate groups of nodes for a commodity. Subclass of Aggregator."""
23
+ """
24
+ Aggregate groups of Nodes for a commodity. Subclass of Aggregator.
25
+
26
+ Aggregation steps (self._aggregate):
27
+
28
+ 1. Map all Components to their Nodes of the correct commodity if they are referencing any. This is important to redirect all references to the
29
+ new Nodes after aggregation.
30
+ 2. Create mapping of what members the new Nodes will be aggregated from. This step also does alot of error handling and checks the validity of the
31
+ metadata and groupings. Raises error if:
32
+ - Nodes do not have any metadata for the meta key.
33
+ - Nodes have the wrong metadata object type for the meta key (must be Member).
34
+ - Exogenous Nodes are grouped together for aggregation with endogenous Nodes.
35
+ 3. Initialize new Node objects and set prices and exogenous status. Prices are calculated as a weighted average of all the member Node prices.
36
+ 4. Old Nodes are deleted from the Model data, after which the aggregated Node is added, and references in the rest of the system are updated to point to
37
+ the new Node.
38
+ 5. Handling of transports: All Components which transport the same commodity as the aggregated Nodes are analysed. If the two Nodes they connect is now
39
+ the same aggregated Node, the transpart is 'internal' meaning it is now operating within a Node. If the transport Component is lossy, it is replaced
40
+ by a Demand Component representing the commodity consumption caused by the loss. All internal transports are afterwards deleted.
41
+
42
+
43
+ Disaggregation steps (self._aggregate):
44
+
45
+ 1. Collect set of Nodes group keys for which have been either removed from the Model data or changed to reference something other than Nodes.
46
+ 2. Validate that IDs of Nodes to be restored have not been used to reference something else in the meantime.
47
+ 3. Delete the aggregated Nodes and restore the old Nodes to the Model. Also copy shadow price results from the aggregated Nodes to the disaggregated.
48
+ NB! This will overwrite the possible previous shadow prices of the original disaggregated Nodes.
49
+ 4. Restore the references in all objects to the disaggregated Nodes. A mapping created during aggregation is used for this.
50
+ 5. Validate that no restorable internal transports has a name conflict with existing objects in the Model.
51
+ NB! an internal transport is not restorable if one or both of its referenced Nodes have been removed from the Model or is now referencing another
52
+ object. See step 1.
53
+ 6. Restore all the restorable internal transports from the original data.
54
+ 7. Delete the aggregation-created Demand objects representing internal transports.
55
+
56
+ See Aggregator for general design notes and rules to follow when using Aggregators.
57
+
58
+ """
28
59
 
29
60
  def __init__(
30
61
  self,
@@ -125,16 +156,16 @@ class NodeAggregator(Aggregator):
125
156
  out: set[str] = set()
126
157
  nodes_and_flows = get_supported_components(components, supported_types=(Node, Flow), forbidden_types=tuple())
127
158
  node_to_commodity = get_node_to_commodity(nodes_and_flows)
128
- for flow in nodes_and_flows.values():
159
+ for flow in nodes_and_flows.values():
129
160
  if not isinstance(flow, Flow):
130
161
  continue
131
162
  flow_infos = get_flow_infos(flow, node_to_commodity)
132
- if not len(flow_infos) == 1:
163
+ if len(flow_infos) != 1:
133
164
  continue
134
165
  flow_info = flow_infos[0]
135
166
  if flow_info.category != "direct_out":
136
167
  continue
137
- if flow_info.commodity_out != self._commodity:
168
+ if flow_info.commodity_out != self._commodity:
138
169
  continue
139
170
  demand = flow
140
171
  for key in demand.get_meta_keys():
@@ -142,7 +173,6 @@ class NodeAggregator(Aggregator):
142
173
  if isinstance(meta, Member):
143
174
  out.add(key)
144
175
  return out
145
-
146
176
 
147
177
  def _add_internal_transport_demands(
148
178
  self,
@@ -163,7 +193,9 @@ class NodeAggregator(Aggregator):
163
193
  for key in self._internal_transports:
164
194
  transport = components[key]
165
195
  from_node, to_node = transports[key]
166
- assert from_node == to_node, f"{from_node}, {to_node}"
196
+ assert from_node == to_node, (
197
+ f"Transport {key} added to internal transport when it should not. Source node {from_node}, and destination node {to_node} are not the same."
198
+ )
167
199
  node = from_node
168
200
 
169
201
  transport: Transmission
@@ -192,13 +224,15 @@ class NodeAggregator(Aggregator):
192
224
  ),
193
225
  )
194
226
 
195
- for meta_key in demand_member_meta_keys:
227
+ for meta_key in demand_member_meta_keys: # transfer member metadata to internal loss Demand
196
228
  internal_losses_demand.add_meta(meta_key, Member("InternalTransportLossFromNodeAggregator"))
197
229
 
198
230
  demand_key = key + "_InternalTransportLossDemand_" + node
199
231
 
200
232
  self._internal_transport_demands.add(demand_key)
201
- assert demand_key not in data, f"{demand_key}"
233
+ if demand_key in data:
234
+ msg = f"Could not use key {demand_key} for internal transport demand because it already exists in the Model."
235
+ raise KeyError(msg)
202
236
  data[demand_key] = internal_losses_demand
203
237
 
204
238
  def _delete_internal_transports(
@@ -227,6 +261,13 @@ class NodeAggregator(Aggregator):
227
261
  data = model.get_data()
228
262
  weights = [1.0 / len(member_node_names)] * len(member_node_names)
229
263
  prices = [data[key].get_price() for key in member_node_names]
264
+
265
+ exogenous = [data[key].is_exogenous() for key in member_node_names]
266
+ if all(exogenous):
267
+ group_node.set_exogenous()
268
+ elif any(exogenous):
269
+ message = f"Only some member Nodes of group {group_node} are exogenous. This is ambiguous. Either all or none must be exogenous."
270
+ raise ValueError(message)
230
271
  if all(prices):
231
272
  level, profile, intercept = _aggregate_costs(
232
273
  model=model,
@@ -241,7 +282,7 @@ class NodeAggregator(Aggregator):
241
282
  group_node.get_price().set_intercept(intercept)
242
283
  elif any(prices):
243
284
  missing = [key for key in member_node_names if data[key].get_price() is None]
244
- self.send_warning_event(f"Only some member nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
285
+ self.send_warning_event(f"Only some member Nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
245
286
 
246
287
  def _replace_node(
247
288
  self,
@@ -316,17 +357,11 @@ class NodeAggregator(Aggregator):
316
357
 
317
358
  for group_name in exogenous_groups: # Check exogenous groups.
318
359
  node_keys = grouped_nodes[group_name]
319
- if len(node_keys) != 1: # allow unchanged or renamed exogenous Nodes.
320
- self._errors.add(
321
- f"Group {group_name} contains an exogenous Node and must therefore contain only one Node."
322
- " Exogenous Nodes cannot be grouped together with other Nodes.",
323
- )
324
- # For if we want to allow pure exogenous groups.
325
- # for node_key in node_keys:
326
- # node: Node = components[node_key]
327
- # if not node.is_exogenous():
328
- # self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is not allowed.")
329
- # break
360
+ if len(node_keys) > 1: # allow unchanged or renamed exogenous Nodes.
361
+ # We allow pure exogenous groups.
362
+ exogenous = [components[node_key].is_exogenous() for node_key in node_keys]
363
+ if (not all(exogenous)) and any(exogenous):
364
+ self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is ambiguous and therefore not allowed.")
330
365
 
331
366
  # remove single groups with unchanged names and check for duplicated names
332
367
  for group_name, node_keys in grouped_nodes.items():
@@ -359,7 +394,7 @@ class NodeAggregator(Aggregator):
359
394
  flipped[member].add(group)
360
395
  for k, v in flipped.items():
361
396
  if len(v) > 1:
362
- self._errors.add(f"Node {k} belong to more than one group {v}")
397
+ self._errors.add(f"Node {k} belongs to more than one group {v}")
363
398
 
364
399
  def _disaggregate(
365
400
  self,
@@ -393,7 +428,7 @@ class NodeAggregator(Aggregator):
393
428
 
394
429
  group_node = new_data[group_name]
395
430
 
396
- if not isinstance(group_node, Node):
431
+ if not (isinstance(group_node, Node) and group_node.get_commodity() == self._commodity):
397
432
  deleted_group_names.add(group_name)
398
433
 
399
434
  return deleted_group_names
@@ -409,7 +444,7 @@ class NodeAggregator(Aggregator):
409
444
  for key in member_node_names:
410
445
  if key in new_data:
411
446
  obj = new_data[key]
412
- if not isinstance(obj, Node) and obj.get_commodity() == self._commodity:
447
+ if not (isinstance(obj, Node) and obj.get_commodity() == self._commodity):
413
448
  typ = type(obj).__name__
414
449
  message = f"Restoring node {key} from group node {group_name} failed because model already stores object of {typ} with that name."
415
450
  self._errors.add(message)
@@ -463,7 +498,7 @@ class NodeAggregator(Aggregator):
463
498
  if key in new_data:
464
499
  obj = new_data[key]
465
500
  typ = type(obj).__name__
466
- message = f"Restoring deleted transport {key} from group node {group_name} failed becausemodel already stores object of {typ} with that name."
501
+ message = f"Restoring deleted transport {key} from group node {group_name} failed because model already stores object of {typ} with that name."
467
502
  self._errors.add(message)
468
503
 
469
504
  self._report_errors(self._errors)
@@ -23,9 +23,10 @@ if TYPE_CHECKING:
23
23
 
24
24
  class _WindSolarAggregator(Aggregator):
25
25
  """
26
- Aggregate components into groups based on their power nodes.
26
+ Aggregate Wind and Solar components into groups based on their power nodes.
27
27
 
28
28
  Aggregation steps (self._aggregate):
29
+
29
30
  1. Group components based on their power nodes (self._group_by_power_node):
30
31
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
31
32
  - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
@@ -35,21 +36,15 @@ class _WindSolarAggregator(Aggregator):
35
36
  2a. Make new hydro module and delete original components from model data.
36
37
  3. Add mapping from detailed to aggregated components to self._aggregation_map.
37
38
 
39
+
38
40
  Disaggregation steps (self._disaggregate):
41
+
39
42
  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results (TODO)
40
43
  2. Distribute production from aggregated components back to the original components:
41
44
  - Results are weighted based on the weighting method (now only max_capacity supported).
42
45
  3. Delete aggregated components from the model.
43
46
 
44
- Comments:
45
- - It is recommended to only use the same aggregator type once on the same components of a model. If you want to go from one aggregation level to
46
- another, it is better to use model.disaggregate first and then aggregate again. This is to keep the logic simple and avoid complex expressions.
47
- We have also logic that recognises if result expressions come from aggregations or disaggregations. When aggregating or disaggregating these,
48
- we can go back to the original results rather than setting up complex expressions that for examples aggregates the disaggregated results.
49
- - Levels and profiles are aggregated separately, and then combined into attributes.
50
- - We have chosen to eagerly evaluate weights for aggregation and disaggregation of levels and profiles. This is a balance between eagerly evaluating
51
- everything, and setting up complex expressions. Eagerly evaluating everything would require setting up new timevectors after eager evaluation, which
52
- is not ideal. While setting up complex expressions gives expressions that are harder to work with and slower to query from.
47
+ See Aggregator for general design notes and rules to follow when using Aggregators.
53
48
 
54
49
  Attributes:
55
50
  _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
@@ -57,6 +52,7 @@ class _WindSolarAggregator(Aggregator):
57
52
  _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components. agg to detailed
58
53
 
59
54
  Parent Attributes (see framcore.aggregators.Aggregator):
55
+
60
56
  _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
61
57
  _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
62
58
  _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
@@ -235,9 +231,10 @@ class _WindSolarAggregator(Aggregator):
235
231
 
236
232
  class WindAggregator(_WindSolarAggregator):
237
233
  """
238
- Aggregate components into groups based on their power nodes.
234
+ Aggregate Wind components into groups based on their power nodes.
239
235
 
240
236
  Aggregation steps (self._aggregate):
237
+
241
238
  1. Group components based on their power nodes (self._group_by_power_node):
242
239
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
243
240
  - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
@@ -247,28 +244,25 @@ class WindAggregator(_WindSolarAggregator):
247
244
  2a. Make new hydro module and delete original components from model data.
248
245
  3. Add mapping from detailed to aggregated components to self._aggregation_map.
249
246
 
247
+
250
248
  Disaggregation steps (self._disaggregate):
249
+
251
250
  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
252
251
  2. Distribute production from aggregated components back to the original components:
253
252
  - Results are weighted based on the weighting method (now ony max_capacity supported).
254
253
  3. Delete aggregated components from the model.
255
254
 
256
- Comments:
257
- - It is recommended to only use the same aggregator type once on the same components of a model. If you want to go from one aggregation level to
258
- another, it is better to use model.disaggregate first and then aggregate again. This is to keep the logic simple and avoid complex expressions.
259
- We have also logic that recognises if result expressions come from aggregations or disaggregations. When aggregating or disaggregating these,
260
- we can go back to the original results rather than setting up complex expressions that for examples aggregates the disaggregated results.
261
- - Levels and profiles are aggregated separately, and then combined into attributes.
262
- - We have chosen to eagerly evaluate weights for aggregation of levels and profiles, and disaggregation. This is a balance between eagerly evaluating
263
- everything, and setting up complex expressions. Eagerly evaluating everything would require setting up new timevectors after eager evaluation, which
264
- is not ideal. While setting up complex expressions gives expressions that are harder to work with and slower to query from.
255
+
256
+ See Aggregator for general design notes and rules to follow when using Aggregators.
265
257
 
266
258
  Attributes:
267
259
  _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
268
260
  _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
269
261
  _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components. agg to detailed
270
262
 
263
+
271
264
  Parent Attributes (see framcore.aggregators.Aggregator):
265
+
272
266
  _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
273
267
  _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
274
268
  _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
@@ -280,9 +274,10 @@ class WindAggregator(_WindSolarAggregator):
280
274
 
281
275
  class SolarAggregator(_WindSolarAggregator):
282
276
  """
283
- Aggregate components into groups based on their power nodes.
277
+ Aggregate Solar components into groups based on their power nodes.
284
278
 
285
279
  Aggregation steps (self._aggregate):
280
+
286
281
  1. Group components based on their power nodes (self._group_by_power_node):
287
282
  2. Aggregate grouped components into a single aggregated component for each group (self._aggregate_groups):
288
283
  - Max_capacity is calculated as the sum of the maximum capacity levels with weighted profiles.
@@ -292,28 +287,25 @@ class SolarAggregator(_WindSolarAggregator):
292
287
  2a. Make new hydro module and delete original components from model data.
293
288
  3. Add mapping from detailed to aggregated components to self._aggregation_map.
294
289
 
290
+
295
291
  Disaggregation steps (self._disaggregate):
292
+
296
293
  1. Restore original components from self._original_data. NB! Changes to aggregated modules are lost except for results.
297
294
  2. Distribute production from aggregated components back to the original components:
298
295
  - Results are weighted based on the weighting method (now ony max_capacity supported).
299
296
  3. Delete aggregated components from the model.
300
297
 
301
- Comments:
302
- - It is recommended to only use the same aggregator type once on the same components of a model. If you want to go from one aggregation level to
303
- another, it is better to use model.disaggregate first and then aggregate again. This is to keep the logic simple and avoid complex expressions.
304
- We have also logic that recognises if result expressions come from aggregations or disaggregations. When aggregating or disaggregating these,
305
- we can go back to the original results rather than setting up complex expressions that for examples aggregates the disaggregated results.
306
- - Levels and profiles are aggregated separately, and then combined into attributes.
307
- - We have chosen to eagerly evaluate weights for aggregation of levels and profiles, and disaggregation. This is a balance between eagerly evaluating
308
- everything, and setting up complex expressions. Eagerly evaluating everything would require setting up new timevectors after eager evaluation, which
309
- is not ideal. While setting up complex expressions gives expressions that are harder to work with and slower to query from.
298
+
299
+ See Aggregator for general design notes and rules to follow when using Aggregators.
310
300
 
311
301
  Attributes:
312
302
  _data_dim (SinglePeriodTimeIndex | None): Data dimension for eager evaluation.
313
303
  _scen_dim (FixedFrequencyTimeIndex | None): Scenario dimension for eager evaluation.
314
304
  _grouped_components (dict[str, set[str]]): Mapping of aggregated components to their detailed components. agg to detailed
315
305
 
306
+
316
307
  Parent Attributes (see framcore.aggregators.Aggregator):
308
+
317
309
  _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
318
310
  _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
319
311
  _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
@@ -19,7 +19,9 @@ class Arrow(Base):
19
19
  """
20
20
  Arrow class is used by Flows to represent contribution of its commodity to Nodes.
21
21
 
22
- coefficient = conversion * (1 / efficiency) * (1 - loss)
22
+ The Arrow has direction to determine input or output (is_ingoing), and parameters for the contribution of the Flow to the Node.
23
+ The main parameters are conversion, efficiency and loss which together form the coefficient = conversion * (1 / efficiency) * (1 - loss)
24
+ Arrow has its own implementation of get_scenario_vector and get_data_value to calculate the coefficient shown above.
23
25
  """
24
26
 
25
27
  def __init__(
@@ -102,7 +104,7 @@ class Arrow(Base):
102
104
  """Get set of units behind conversion level expr (if any)."""
103
105
  if self._conversion is None:
104
106
  return set()
105
- return self._conversion.get_level_unit_set()
107
+ return self._conversion.get_level_unit_set(db)
106
108
 
107
109
  def get_profile_timeindex_set(
108
110
  self,
@@ -125,7 +127,7 @@ class Arrow(Base):
125
127
  s.update(self._efficiency.get_profile_timeindex_set(db))
126
128
  return s
127
129
 
128
- def get_scenario_vector(
130
+ def get_scenario_vector( # noqa: C901, PLR0915
129
131
  self,
130
132
  db: QueryDB | Model,
131
133
  scenario_horizon: FixedFrequencyTimeIndex,
@@ -298,7 +300,7 @@ class Arrow(Base):
298
300
 
299
301
  def add_loaders(self, loaders: set[Loader]) -> None:
300
302
  """Add all loaders stored in attributes to loaders."""
301
- from framcore.utils import add_loaders_if # noqa: PLC0415
303
+ from framcore.utils import add_loaders_if
302
304
 
303
305
  add_loaders_if(loaders, self.get_conversion())
304
306
  add_loaders_if(loaders, self.get_loss())
@@ -14,17 +14,7 @@ if TYPE_CHECKING:
14
14
 
15
15
 
16
16
  class ElasticDemand(Base):
17
- """
18
- ElasticDemand class representing the price elasticity of a demand Component.
19
-
20
- Attributes:
21
- _price_elasticity: The price elasticity factor of the demand consumer.
22
- _min_price: Lower limit for price elasticity.
23
- _normal_price: Price for which the demand is inelastic. If it deviates from this price, the consumer will adjust
24
- it's consumption according to the _price_elasticity factor.
25
- _max_price: Upper limit for price elasticity / reservation price level.
26
-
27
- """
17
+ """ElasticDemand class representing the price elasticity of a demand Component."""
28
18
 
29
19
  def __init__(
30
20
  self,
@@ -33,7 +23,17 @@ class ElasticDemand(Base):
33
23
  normal_price: Price,
34
24
  max_price: Price,
35
25
  ) -> None:
36
- """Initialize the ElasticDemand class."""
26
+ """
27
+ Initialize the ElasticDemand class.
28
+
29
+ Args:
30
+ price_elasticity (Elasticity): The price elasticity factor of the demand consumer.
31
+ min_price (Price): Lower limit for price elasticity.
32
+ normal_price (Price): Price for which the demand is inelastic. If it deviates from this price, the consumer will adjust
33
+ it's consumption according to the _price_elasticity factor.
34
+ max_price (Price): Upper limit for price elasticity / reservation price level.
35
+
36
+ """
37
37
  self._check_type(price_elasticity, Elasticity)
38
38
  self._check_type(min_price, Price)
39
39
  self._check_type(normal_price, Price)
@@ -82,7 +82,7 @@ class ElasticDemand(Base):
82
82
 
83
83
  def add_loaders(self, loaders: set[Loader]) -> None:
84
84
  """Add all loaders stored in attributes to loaders."""
85
- from framcore.utils import add_loaders_if # noqa: PLC0415
85
+ from framcore.utils import add_loaders_if
86
86
 
87
87
  add_loaders_if(loaders, self._normal_price)
88
88
  add_loaders_if(loaders, self._price_elasticity)
@@ -9,26 +9,12 @@ if TYPE_CHECKING:
9
9
 
10
10
 
11
11
  class ReservoirCurve(Base):
12
- """
13
- Represents a reservoir curve attribute.
12
+ """Water level elevation to water volume characteristics for HydroStorage."""
14
13
 
15
- Attributes
16
- ----------
17
- _value : str | None
18
- The value representing the reservoir curve.
19
-
20
- """
14
+ # TODO: Implement and comment, also too generic name
21
15
 
22
16
  def __init__(self, value: str | None) -> None:
23
- """
24
- Initialize a ReservoirCurve instance.
25
-
26
- Parameters
27
- ----------
28
- value : str | None
29
- The value representing the reservoir curve.
30
-
31
- """
17
+ """Initialize a ReservoirCurve instance."""
32
18
  self._check_type(value, (str, type(None)))
33
19
  self._value = value
34
20
 
@@ -7,12 +7,9 @@ if TYPE_CHECKING:
7
7
 
8
8
 
9
9
  class SoftBound:
10
- """
11
- Represents a soft bound attribute.
10
+ """Represents a soft bound attribute. Penalty applied if the bound is violated."""
12
11
 
13
- This class can be extended to define soft bounds for various parameters.
14
-
15
- """
12
+ # TODO: Implement and comment
16
13
 
17
14
  def add_loaders(self, loaders: set[Loader]) -> None:
18
15
  """Add all loaders stored in attributes to loaders."""
@@ -11,7 +11,9 @@ if TYPE_CHECKING:
11
11
 
12
12
 
13
13
  class StartUpCost(Base):
14
- """StartUpCost class representing the startup cost of a Component."""
14
+ """Represent the costs associated with starting up the operation of a Component."""
15
+
16
+ # TODO: Complete description
15
17
 
16
18
  def __init__(
17
19
  self,
@@ -20,7 +22,16 @@ class StartUpCost(Base):
20
22
  start_hours: Hours,
21
23
  part_load_efficiency: Efficiency,
22
24
  ) -> None:
23
- """Initialize the StartUpCost class."""
25
+ """
26
+ Initialize the StartUpCost class.
27
+
28
+ Args:
29
+ startup_cost (Cost): _description_
30
+ min_stable_load (Proportion): _description_
31
+ start_hours (Hours): _description_
32
+ part_load_efficiency (Efficiency): _description_
33
+
34
+ """
24
35
  self._check_type(startup_cost, Cost)
25
36
  self._check_type(min_stable_load, Proportion)
26
37
  self._check_type(start_hours, Hours)
@@ -46,7 +57,7 @@ class StartUpCost(Base):
46
57
 
47
58
  def add_loaders(self, loaders: set[Loader]) -> None:
48
59
  """Get all loaders stored in attributes."""
49
- from framcore.utils import add_loaders_if # noqa: PLC0415
60
+ from framcore.utils import add_loaders_if
50
61
 
51
62
  add_loaders_if(loaders, self.get_startupcost())
52
63
  add_loaders_if(loaders, self._start_hours)
@@ -13,22 +13,34 @@ class Storage(Base):
13
13
  """
14
14
  Represents all types of storage this system supports.
15
15
 
16
- Subclasses are supposed to restrict which attributes that are
17
- used, not add more.
16
+ Subclasses are supposed to restrict which attributes that are used, not add more.
18
17
  """
19
18
 
20
19
  def __init__(
21
20
  self,
22
21
  capacity: StockVolume,
23
22
  volume: StockVolume | None = None,
24
- loss: Loss | None = None,
23
+ loss: Loss | None = None, # TODO: Should be loss percentage per time.
25
24
  reservoir_curve: ReservoirCurve | None = None,
26
25
  max_soft_bound: SoftBound | None = None,
27
26
  min_soft_bound: SoftBound | None = None,
28
27
  target_bound: TargetBound | None = None,
29
28
  initial_storage_percentage: float | None = None,
30
29
  ) -> None:
31
- """Create new storage."""
30
+ """
31
+ Create new storage.
32
+
33
+ Args:
34
+ capacity (StockVolume): Storage capacity.
35
+ volume (StockVolume | None, optional): Storage filling (actual/result). Defaults to None.
36
+ loss (Loss | None, optional): Loss percentage per time. Defaults to None.
37
+ reservoir_curve (ReservoirCurve | None, optional): Water level elevation to water volume for HydroStorage. Defaults to None.
38
+ max_soft_bound (SoftBound | None, optional): Upper soft boundary that is penalized if broken. Defaults to None.
39
+ min_soft_bound (SoftBound | None, optional): Lower soft boundary that is penalized if broken. Defaults to None.
40
+ target_bound (TargetBound | None, optional): Target filling, can be penalized if deviation. Defaults to None.
41
+ initial_storage_percentage (float | None, optional): Initial storage filling percentage at start of simulation. Defaults to None.
42
+
43
+ """
32
44
  super().__init__()
33
45
 
34
46
  self._check_type(capacity, StockVolume)
@@ -132,7 +144,7 @@ class Storage(Base):
132
144
 
133
145
  def add_loaders(self, loaders: set[Loader]) -> None:
134
146
  """Add all loaders stored in attributes to loaders."""
135
- from framcore.utils import add_loaders_if # noqa: PLC0415
147
+ from framcore.utils import add_loaders_if
136
148
 
137
149
  add_loaders_if(loaders, self.get_capacity())
138
150
  add_loaders_if(loaders, self.get_loss())
@@ -7,11 +7,9 @@ if TYPE_CHECKING:
7
7
 
8
8
 
9
9
  class TargetBound:
10
- """
11
- Represents a target bound attribute.
10
+ """Target boundary attribute. Can be penalized if deviation from target."""
12
11
 
13
- This class can be extended to define specific bounds for targets in the energy model.
14
- """
12
+ # TODO: Implement and comment
15
13
 
16
14
  def add_loaders(self, loaders: set[Loader]) -> None:
17
15
  """Add all loaders stored in attributes to loaders."""
@@ -1,5 +1,3 @@
1
- # ruff: noqa: I001
2
-
3
1
  from framcore.attributes.level_profile_attributes import (
4
2
  AvgFlowVolume,
5
3
  Coefficient,
@@ -16,7 +14,7 @@ from framcore.attributes.level_profile_attributes import (
16
14
  Price,
17
15
  Proportion,
18
16
  ReservePrice,
19
- ShaddowPrice,
17
+ ShadowPrice,
20
18
  StockVolume,
21
19
  WaterValue,
22
20
  )
@@ -55,7 +53,7 @@ __all__ = [
55
53
  "Proportion",
56
54
  "ReservePrice",
57
55
  "ReservoirCurve",
58
- "ShaddowPrice",
56
+ "ShadowPrice",
59
57
  "SoftBound",
60
58
  "StartUpCost",
61
59
  "StockVolume",
@@ -4,14 +4,21 @@ from framcore.fingerprints import Fingerprint
4
4
 
5
5
 
6
6
  class HydroBypass(Base):
7
- """Bypass class representing a hydro bypass attribute."""
7
+ """HydroBypass represents a controlled water way from a HydroModule. Used to bypass main release of the HydroModule."""
8
8
 
9
9
  def __init__(
10
10
  self,
11
11
  to_module: str | None,
12
12
  capacity: FlowVolume | None = None,
13
13
  ) -> None:
14
- """Initialize Bypass."""
14
+ """
15
+ Initialize object.
16
+
17
+ Args:
18
+ to_module (str | None): Name of the HydroModule the water is released to.
19
+ capacity (FlowVolume | None, optional): Restrictions on the volume of water which can pass through the bypass at a given moment. Defaults to None.
20
+
21
+ """
15
22
  super().__init__()
16
23
 
17
24
  self._check_type(to_module, (str, type(None)))