fram-core 0.1.0a1__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. {fram_core-0.1.0a1 → fram_core-0.1.1}/PKG-INFO +6 -5
  2. fram_core-0.1.1/README.md +19 -0
  3. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/Base.py +22 -3
  4. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/Model.py +26 -9
  5. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/__init__.py +2 -1
  6. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/Aggregator.py +30 -11
  7. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/HydroAggregator.py +37 -25
  8. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/NodeAggregator.py +65 -30
  9. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/WindSolarAggregator.py +22 -30
  10. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/Arrow.py +6 -4
  11. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/ElasticDemand.py +13 -13
  12. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/ReservoirCurve.py +3 -17
  13. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/SoftBound.py +2 -5
  14. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/StartUpCost.py +14 -3
  15. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/Storage.py +17 -5
  16. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/TargetBound.py +2 -4
  17. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/__init__.py +2 -4
  18. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/hydro/HydroBypass.py +9 -2
  19. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/hydro/HydroGenerator.py +24 -7
  20. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/hydro/HydroPump.py +32 -10
  21. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/hydro/HydroReservoir.py +4 -4
  22. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/level_profile_attributes.py +250 -53
  23. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/Component.py +27 -3
  24. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/Demand.py +18 -4
  25. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/Flow.py +26 -4
  26. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/HydroModule.py +45 -4
  27. fram_core-0.1.1/framcore/components/Node.py +99 -0
  28. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/Thermal.py +12 -8
  29. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/Transmission.py +17 -2
  30. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/wind_solar.py +25 -10
  31. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/curves/LoadedCurve.py +0 -9
  32. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/Expr.py +137 -36
  33. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/__init__.py +3 -1
  34. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/_get_constant_from_expr.py +14 -20
  35. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/queries.py +121 -84
  36. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/units.py +30 -3
  37. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/fingerprints/fingerprint.py +0 -1
  38. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/juliamodels/JuliaModel.py +13 -3
  39. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/loaders/loaders.py +0 -2
  40. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/metadata/ExprMeta.py +13 -7
  41. fram_core-0.1.1/framcore/metadata/LevelExprMeta.py +32 -0
  42. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/metadata/Member.py +7 -7
  43. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/querydbs/CacheDB.py +1 -1
  44. fram_core-0.1.1/framcore/solvers/Solver.py +63 -0
  45. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/solvers/SolverConfig.py +4 -4
  46. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/AverageYearRange.py +9 -2
  47. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/ConstantTimeIndex.py +7 -2
  48. fram_core-0.1.1/framcore/timeindexes/DailyIndex.py +33 -0
  49. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/FixedFrequencyTimeIndex.py +105 -53
  50. fram_core-0.1.1/framcore/timeindexes/HourlyIndex.py +33 -0
  51. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/IsoCalendarDay.py +5 -3
  52. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/ListTimeIndex.py +103 -23
  53. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/ModelYear.py +8 -2
  54. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/ModelYears.py +11 -2
  55. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/OneYearProfileTimeIndex.py +10 -2
  56. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/ProfileTimeIndex.py +14 -3
  57. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/SinglePeriodTimeIndex.py +1 -1
  58. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/TimeIndex.py +16 -3
  59. fram_core-0.1.1/framcore/timeindexes/WeeklyIndex.py +33 -0
  60. {fram_core-0.1.0a1/framcore/expressions → fram_core-0.1.1/framcore/timeindexes}/_time_vector_operations.py +76 -2
  61. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/ConstantTimeVector.py +12 -16
  62. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/LinearTransformTimeVector.py +20 -3
  63. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/ListTimeVector.py +18 -14
  64. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/LoadedTimeVector.py +1 -8
  65. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/ReferencePeriod.py +13 -3
  66. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/TimeVector.py +26 -12
  67. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/__init__.py +0 -1
  68. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/get_regional_volumes.py +21 -3
  69. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/get_supported_components.py +1 -1
  70. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/global_energy_equivalent.py +22 -5
  71. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/isolate_subnodes.py +12 -3
  72. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/loaders.py +7 -7
  73. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/node_flow_utils.py +4 -4
  74. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/utils/storage_subsystems.py +3 -4
  75. {fram_core-0.1.0a1 → fram_core-0.1.1}/pyproject.toml +3 -2
  76. fram_core-0.1.0a1/README.md +0 -19
  77. fram_core-0.1.0a1/framcore/components/Node.py +0 -76
  78. fram_core-0.1.0a1/framcore/metadata/LevelExprMeta.py +0 -17
  79. fram_core-0.1.0a1/framcore/solvers/Solver.py +0 -48
  80. fram_core-0.1.0a1/framcore/timeindexes/DailyIndex.py +0 -21
  81. fram_core-0.1.0a1/framcore/timeindexes/HourlyIndex.py +0 -21
  82. fram_core-0.1.0a1/framcore/timeindexes/WeeklyIndex.py +0 -21
  83. {fram_core-0.1.0a1 → fram_core-0.1.1}/LICENSE.md +0 -0
  84. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/__init__.py +0 -0
  85. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/aggregators/_utils.py +0 -0
  86. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/attributes/hydro/__init__.py +0 -0
  87. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/_PowerPlant.py +0 -0
  88. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/components/__init__.py +0 -0
  89. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/curves/Curve.py +0 -0
  90. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/curves/__init__.py +0 -0
  91. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/events/__init__.py +0 -0
  92. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/events/events.py +0 -0
  93. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/expressions/_utils.py +0 -0
  94. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/fingerprints/__init__.py +0 -0
  95. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/juliamodels/__init__.py +0 -0
  96. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/loaders/__init__.py +0 -0
  97. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/metadata/Div.py +0 -0
  98. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/metadata/Meta.py +0 -0
  99. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/metadata/__init__.py +1 -1
  100. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/populators/Populator.py +0 -0
  101. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/populators/__init__.py +0 -0
  102. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/querydbs/ModelDB.py +0 -0
  103. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/querydbs/QueryDB.py +0 -0
  104. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/querydbs/__init__.py +0 -0
  105. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/solvers/__init__.py +0 -0
  106. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timeindexes/__init__.py +0 -0
  107. {fram_core-0.1.0a1 → fram_core-0.1.1}/framcore/timevectors/__init__.py +0 -0
@@ -1,18 +1,19 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fram-core
3
- Version: 0.1.0a1
3
+ Version: 0.1.1
4
4
  Summary:
5
5
  License: LICENSE.md
6
6
  License-File: LICENSE.md
7
7
  Author: The Norwegian Water Resources and Energy Directorate
8
8
  Author-email: fram@nve.no
9
- Requires-Python: >=3.11
9
+ Requires-Python: >=3.11,<4
10
10
  Classifier: License :: Other/Proprietary License
11
11
  Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Classifier: Programming Language :: Python :: 3.13
15
15
  Classifier: Programming Language :: Python :: 3.14
16
+ Requires-Dist: juliacall (>=0.9.28,<0.10.0)
16
17
  Requires-Dist: numexpr (>=2.10.2)
17
18
  Requires-Dist: numpy (>=2.2.2)
18
19
  Requires-Dist: pandas (>=2.2.3)
@@ -23,11 +24,11 @@ Description-Content-Type: text/markdown
23
24
 
24
25
  ## About
25
26
 
26
- **fram-core** is the main package in **FRAM** modelling framework. The package contains essential features, interfaces and components for running energy market models in FRAM.
27
+ **fram-core** is the main package in the **FRAM** modelling framework. The package holds the functionality used to describe and manipulate the energy system, handle time series operations, and hold the definition of key interfaces in FRAM.
27
28
 
28
- For package documentation see [fram-core](https://nve.github.io/fram-core){:target="_blank"}.
29
+ For package documentation see [fram-core](https://nve.github.io/fram-core).
29
30
 
30
- For FRAM documentation see [FRAM mainpage](https://nve.github.io/fram){:target="_blank"}.
31
+ For FRAM documentation see [FRAM mainpage](https://nve.github.io/fram).
31
32
 
32
33
  ## Installation
33
34
 
@@ -0,0 +1,19 @@
1
+ # fram-core
2
+
3
+ ## About
4
+
5
+ **fram-core** is the main package in the **FRAM** modelling framework. The package holds the functionality used to describe and manipulate the energy system, handle time series operations, and hold the definition of key interfaces in FRAM.
6
+
7
+ For package documentation see [fram-core](https://nve.github.io/fram-core).
8
+
9
+ For FRAM documentation see [FRAM mainpage](https://nve.github.io/fram).
10
+
11
+ ## Installation
12
+
13
+ To add the package to your project use:
14
+
15
+ pip install fram-core
16
+
17
+ With poetry:
18
+
19
+ poetry add fram-core
@@ -1,5 +1,6 @@
1
1
  import contextlib
2
2
  import inspect
3
+ from collections.abc import Callable
3
4
  from typing import Any
4
5
 
5
6
  from framcore.events import (
@@ -18,9 +19,7 @@ class Base:
18
19
  """Core base class to share methods."""
19
20
 
20
21
  def _check_type(self, value, class_or_tuple) -> None: # noqa: ANN001
21
- if not isinstance(value, class_or_tuple):
22
- message = f"Expected {class_or_tuple} for {self}, got {type(value).__name__}"
23
- raise TypeError(message)
22
+ check_type(value, class_or_tuple, caller=self)
24
23
 
25
24
  def _ensure_float(self, value: object) -> float:
26
25
  with contextlib.suppress(Exception):
@@ -140,3 +139,23 @@ class Base:
140
139
  except Exception:
141
140
  pass
142
141
  return type(value).__name__
142
+
143
+
144
+ # could not place this in utils and use __init__ as modules in utils also import queries, if queries then import via utils __init__ we get circular imports.
145
+ def check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None:
146
+ """
147
+ Check a value matches expected type(s).
148
+
149
+ Args:
150
+ value (object): value being checked.
151
+ expected (type | tuple[type]): Expected types.
152
+ caller (Callable): The origin of the check.
153
+
154
+ Raises:
155
+ TypeError: When value does not match expected types.
156
+
157
+ """
158
+ if not isinstance(value, expected):
159
+ message = f"{expected}, got {type(value).__name__}"
160
+ message = "Expected " + message if caller is None else f"{caller} expected " + message
161
+ raise TypeError(message)
@@ -10,9 +10,12 @@ from framcore.timevectors import TimeVector
10
10
  if TYPE_CHECKING:
11
11
  from framcore.aggregators import Aggregator
12
12
 
13
+
13
14
  class ModelDict(dict):
14
15
  """Dict storing only values of type Component | Expr | TimeVector | Curve."""
15
- def __setitem__(self, key, value):
16
+
17
+ def __setitem__(self, key: str, value: Component | Expr | TimeVector | Curve) -> None:
18
+ """Set item with type checking."""
16
19
  if not isinstance(key, str):
17
20
  message = f"Expected str for key {key}, got {type(key).__name__}"
18
21
  raise TypeError(message)
@@ -21,24 +24,39 @@ class ModelDict(dict):
21
24
  raise TypeError(message)
22
25
  return super().__setitem__(key, value)
23
26
 
27
+
24
28
  class Model(Base):
25
- """Definition of the Model class."""
29
+ """
30
+ Model stores the representation of the energy system with Components, TimeVectors, Expression, and the Aggregators applied to the Model.
31
+
32
+ - Components describe the main elements in the energy system. Can have additional Attributes.
33
+ - TimeVector and Curve hold the time series data.
34
+ - Expressions for data manipulation of TimeVectors and Curves. Can be queried.
35
+ - Aggregators handle aggregation and disaggregation of Components. Aggregators are added to Model when used (Aggregator.aggregate(model)),
36
+ and can be undone in LIFO order with disaggregate().
37
+
38
+ Methods:
39
+ get_data(): Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified.
40
+ disaggregate(): Undo all aggregations applied to Model in LIFO order.
41
+ get_content_counts(): Return number of objects stored in model organized into concepts and types.
42
+
43
+ """
26
44
 
27
45
  def __init__(self) -> None:
28
- """Create a new model instance."""
46
+ """Create a new model instance with empty data and no aggregators."""
29
47
  self._data = ModelDict()
30
48
  self._aggregators: list[Aggregator] = []
31
49
 
50
+ def get_data(self) -> ModelDict:
51
+ """Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified."""
52
+ return self._data
53
+
32
54
  def disaggregate(self) -> None:
33
- """Undo all aggregations in LIFO order."""
55
+ """Undo all aggregations applied to Model in LIFO order."""
34
56
  while self._aggregators:
35
57
  aggregator = self._aggregators.pop(-1) # last item
36
58
  aggregator.disaggregate(self)
37
59
 
38
- def get_data(self) -> ModelDict:
39
- """Get internal data. Modify this with care."""
40
- return self._data
41
-
42
60
  def get_content_counts(self) -> dict[str, Counter]:
43
61
  """Return number of objects stored in model organized into concepts and types."""
44
62
  data_values = self.get_data().values()
@@ -70,4 +88,3 @@ class Model(Base):
70
88
  counts["aggregators"][type(a).__name__] += 1
71
89
 
72
90
  return counts
73
-
@@ -1,9 +1,10 @@
1
1
  # framcore/__init__.py
2
-
2
+ from framcore.Base import check_type
3
3
  from framcore.Base import Base
4
4
  from framcore.Model import Model
5
5
 
6
6
  __all__ = [
7
7
  "Base",
8
8
  "Model",
9
+ "check_type",
9
10
  ]
@@ -18,17 +18,36 @@ class Aggregator(Base, ABC):
18
18
  """
19
19
  Aggregator interface class.
20
20
 
21
- Public API is the aggregate and disaggregate methods.
21
+ Aggregators handles aggregation and disaggregation of Components.
22
+ - The general approach for aggregation is to group Components, aggregate Components in the same group to (a) new Component(s),
23
+ delete the detailed Components, and add the mapping to self._aggregation_map.
24
+ - The general approach for disaggregation is to restore the detailed Components, move results from aggregated
25
+ Components to detailed Components, and delete the aggregated Components.
26
+
27
+ Concrete Aggregators must implement the abstract methods _aggregate() and _disaggregate().
28
+
29
+ Some rules for using Aggregators:
30
+ 1. Disaggragate can only be called after aggregate has been called.
31
+ 2. Not allowed to call aggregate twice. Must call disaggregate before aggregate can be called again.
32
+ 3. Aggregators are stored in Model when aggregate is called. Disaggregate by calling Model.disaggregate(),
33
+ which will disaggregate all Aggregators in LIFO order.
34
+ 4. At the moment we allow changes to the aggregated Components, which is ignored during disaggregation. TODO: Handle this
35
+ 5. It is recommended to only use the same Aggregator type once on the same components of a Model.
36
+ If you want to go from one aggregation level to another, it is better to use Model.disaggregate first and then aggregate again.
37
+ This is to keep the logic simple and avoid complex expressions.
38
+
39
+ Some design notes:
40
+ - Levels and profiles are aggregated separately and then combined into attributes.
41
+ - We have chosen to eagerly evaluate weights for aggregation (weighted averages) and disaggregation of levels and profiles.
42
+ This approach supports any form of aggregation by varying the weights, and complex weights can be created by eagerly evaluating
43
+ expressions and using the result to compute those weights.
44
+ - This is a balance between eagerly evaluating everything and setting up complex expressions.
45
+ Eagerly evaluating everything would require setting up new TimeVectors after evaluation, which is not ideal.
46
+ While setting up complex expressions gives expressions that are harder to work with and slower to query from.
47
+ - This trade-off simplifies adding logic that recognises if result expressions come from aggregations or disaggregations.
48
+ When aggregating or disaggregating these, we can go back to the original results rather than setting up complex expressions
49
+ that for examples aggregates the disaggregated results.
22
50
 
23
- These methods come with the folloing calling rules:
24
- 1. Not allowed to call aggregate twice. Must call disaggregate before aggregate can be called again.
25
- 2. Disaggragate can only be called after aggregate has been called.
26
-
27
- Implementations should implement _aggregate and _disaggregate.
28
- - The general approach for aggregation is to group components, aggregated components in the same group, delete the detailed components,
29
- and add the mapping to self._aggregation_map.
30
- - The general approach for disaggregation is to restore the detailed components, move results from aggregated components to detailed components,
31
- and delete the aggregated components.
32
51
  """
33
52
 
34
53
  def __init__(self) -> None:
@@ -42,7 +61,7 @@ class Aggregator(Base, ABC):
42
61
  self._check_type(model, Model)
43
62
 
44
63
  if self._is_last_call_aggregate is True:
45
- message = f"Will overwrite existing aggregation."
64
+ message = "Will overwrite existing aggregation."
46
65
  self.send_warning_event(message)
47
66
 
48
67
  self._original_data = deepcopy(model.get_data())
@@ -27,9 +27,10 @@ if TYPE_CHECKING:
27
27
 
28
28
  class HydroAggregator(Aggregator):
29
29
  """
30
- Aggregate hydro modules into two equivalent modules based on the regulation factor, into one regulated and one unregulated module per area.
30
+ Aggregate HydroModules into two equivalent modules based on the regulation factor, into one regulated and one unregulated module per area.
31
31
 
32
32
  Aggregation steps (self._aggregate):
33
+
33
34
  1. Group modules based on their power nodes (self._group_modules_by_power_node)
34
35
  - Modules with generators are grouped based on their power nodes. You can choose to only group modules for certain power nodes by giving
35
36
  self._power_node_members alone or together with self._metakey_power_node. NB! Watershed that crosses power nodes should not be aggregated in two
@@ -54,30 +55,24 @@ class HydroAggregator(Aggregator):
54
55
  3a. Aggregate results if all modules in group have results.
55
56
  - Production is the sum of production levels with weighted profiles
56
57
  - Reservoir filling is the sum of energy reservoir filling levels (filling*energy_equivalent_downstream/agg_energy_equivalent) with weighted profiles
57
- - TODO: Spill, bypass and pumping results are currently ignored in the aggregation.
58
+ - TODO: Water values, spill, bypass and pumping results are currently ignored in the aggregation.
58
59
  - TODO: Add possibility to skip results aggregation.
59
60
  3b. Make new hydro module and delete original modules from model data.
60
61
  4. Add mapping from detailed to aggregated modules to self._aggregation_map.
61
62
 
63
+
62
64
  Disaggregation steps (self._disaggregate):
65
+
63
66
  1. Restore original modules from self._original_data. NB! Changes to aggregated modules are lost except for results (TODO)
64
67
  2. Move production and filling results from aggregated modules to detailed modules, weighted based on production capacity and reservoir capacity.
65
- - TODO: Spill and bypass results are currently ignored in the disaggregation.
68
+ - TODO: Water values, spill, bypass and pumping results are currently ignored in the disaggregation.
66
69
  3. Delete aggregated modules.
67
70
 
68
71
  NB! Watershed that crosses power nodes should not be aggregated in two different HydroAggregators as the aggregator will remove all connected modules
69
72
  from the model after the first aggregation. Reservoirs will also be assigned to the power node which has the highest cumulative energy equivalent, so
70
73
  this aggregator does not work well for reservoirs that are upstream of multiple power nodes.
71
74
 
72
- Other comments:
73
- - It is recommended to only use the same aggregator type once on the same components of a model. If you want to go from one aggregation level to
74
- another, it is better to use model.disaggregate first and then aggregate again. This is to keep the logic simple and avoid complex expressions.
75
- We have also logic that recognises if result expressions come from aggregations or disaggregations. When aggregating or disaggregating these,
76
- we can go back to the original results rather than setting up complex expressions that for examples aggregates the disaggregated results.
77
- - Levels and profiles are aggregated separately, and then combined into attributes.
78
- - We have chosen to eagerly evaluate weights for aggregation and disaggregation of levels and profiles. This is a balance between eagerly evaluating
79
- everything, and setting up complex expressions. Eagerly evaluating everything would require setting up new timevectors after eager evaluation, which
80
- is not ideal. While setting up complex expressions gives expressions that are harder to work with and slower to query from.
75
+ See Aggregator for general design notes and rules to follow when using Aggregators.
81
76
 
82
77
  Attributes:
83
78
  _metakey_energy_eq_downstream (str): Metadata key for energy equivalent downstream.
@@ -93,6 +88,7 @@ class HydroAggregator(Aggregator):
93
88
  _release_capacity_profile (TimeVector | None): If given, use this profile for all aggregated modules' release capacities.
94
89
 
95
90
  Parent Attributes (see framcore.aggregators.Aggregator):
91
+
96
92
  _is_last_call_aggregate (bool | None): Tracks whether the last operation was an aggregation.
97
93
  _original_data (dict[str, Component | TimeVector | Curve | Expr] | None): Original detailed data before aggregation.
98
94
  _aggregation_map (dict[str, set[str]] | None): Maps aggregated components to their detailed components. detailed to agg
@@ -128,13 +124,15 @@ class HydroAggregator(Aggregator):
128
124
  super().__init__()
129
125
  self._check_type(metakey_energy_eq_downstream, str)
130
126
  self._check_type(ror_threshold, float)
131
- assert ror_threshold >= 0, ValueError(f"ror_threshold must be non-negative, got {ror_threshold}.")
132
127
  self._check_type(data_dim, SinglePeriodTimeIndex)
133
128
  self._check_type(scen_dim, FixedFrequencyTimeIndex)
134
129
  self._check_type(metakey_power_node, (str, type(None)))
135
130
  self._check_type(power_node_members, (list, type(None)))
136
- if metakey_power_node is not None:
137
- assert len(power_node_members) > 0, ValueError("If metakey_power_node is given, power_node_members must also be given.")
131
+ if ror_threshold < 0:
132
+ msg = f"ror_threshold must be non-negative, got {ror_threshold}."
133
+ raise ValueError(msg)
134
+ if metakey_power_node is not None and len(power_node_members) <= 0:
135
+ raise ValueError("If metakey_power_node is given, power_node_members must also be given.")
138
136
 
139
137
  self._metakey_energy_eq_downstream = metakey_energy_eq_downstream
140
138
  self._ror_threshold = ror_threshold
@@ -183,7 +181,7 @@ class HydroAggregator(Aggregator):
183
181
  for dd in d:
184
182
  if dd not in self._aggregation_map:
185
183
  self._aggregation_map[dd] = set([a])
186
- elif not data[dd].get_reservoir(): # if reservoir module already in map, skip as reservoir mapping is main mapping
184
+ elif not (data[dd].get_reservoir() and data[a].get_reservoir()): # reservoir modules can only be mapped to one aggregated reservoir module
187
185
  self._aggregation_map[dd].add(a)
188
186
  self.send_debug_event(f"add generator modules to _aggregation_map time: {round(time() - t, 3)} seconds")
189
187
 
@@ -450,7 +448,7 @@ class HydroAggregator(Aggregator):
450
448
  (
451
449
  mm,
452
450
  get_level_value(
453
- data[mm].get_generator().get_energy_eq().get_level() * data[mm].get_release_capacity().get_level(),
451
+ data[mm].get_generator().get_energy_equivalent().get_level() * data[mm].get_release_capacity().get_level(),
454
452
  model,
455
453
  "MW",
456
454
  self._data_dim,
@@ -466,7 +464,7 @@ class HydroAggregator(Aggregator):
466
464
 
467
465
  return ignore_production_capacity_modules
468
466
 
469
- def _aggregate_groups( # noqa: C901
467
+ def _aggregate_groups( # noqa: C901, PLR0915
470
468
  self,
471
469
  model: Model,
472
470
  upstream_topology: dict[str, list[str]],
@@ -487,10 +485,10 @@ class HydroAggregator(Aggregator):
487
485
 
488
486
  generator = HydroGenerator(
489
487
  power_node=data[generator_module_names[0]].get_generator().get_power_node(),
490
- energy_eq=Conversion(level=ConstantTimeVector(1.0, "kWh/m3", is_max_level=True)),
488
+ energy_equivalent=Conversion(level=ConstantTimeVector(1.0, "kWh/m3", is_max_level=True)),
491
489
  production=sum_production,
492
490
  )
493
- energy_eq = generator.get_energy_eq().get_level()
491
+ energy_eq = generator.get_energy_equivalent().get_level()
494
492
 
495
493
  # Release capacity
496
494
  release_capacities = [data[m].get_release_capacity() for m in generator_module_names if m not in ignore_capacity]
@@ -501,9 +499,16 @@ class HydroAggregator(Aggregator):
501
499
  release_capacities = deepcopy(release_capacities)
502
500
  for rc in release_capacities:
503
501
  rc.set_profile(self._release_capacity_profile)
504
- generator_energy_eqs = [data[m].get_generator().get_energy_eq() for m in generator_module_names if m not in ignore_capacity]
502
+ generator_energy_eqs = [data[m].get_generator().get_energy_equivalent() for m in generator_module_names if m not in ignore_capacity]
505
503
  release_capacity_levels = [rc.get_level() * ee.get_level() for rc, ee in zip(release_capacities, generator_energy_eqs, strict=True)]
506
- release_capacity = MaxFlowVolume(level=sum(release_capacity_levels) / energy_eq, profile=self._release_capacity_profile)
504
+
505
+ release_capacity_profile = None
506
+ if any(rc.get_profile() for rc in release_capacities):
507
+ one_profile_max = Expr(src=ConstantTimeVector(1.0, is_zero_one_profile=False), is_profile=True)
508
+ weights = [get_level_value(rcl, model, "MW", self._data_dim, self._scen_dim, is_max=True) for rcl in release_capacity_levels]
509
+ profiles = [rc.get_profile() if rc.get_profile() else one_profile_max for rc in release_capacities]
510
+ release_capacity_profile = _aggregate_weighted_expressions(profiles, weights)
511
+ release_capacity = MaxFlowVolume(level=sum(release_capacity_levels) / energy_eq, profile=release_capacity_profile)
507
512
 
508
513
  # Inflow level
509
514
  upstream_inflow_levels = defaultdict(list)
@@ -513,7 +518,7 @@ class HydroAggregator(Aggregator):
513
518
  if inflow:
514
519
  upstream_inflow_levels[m].append(inflow.get_level())
515
520
  inflow_level_energy = sum(
516
- sum(upstream_inflow_levels[m]) * data[m].get_generator().get_energy_eq().get_level()
521
+ sum(upstream_inflow_levels[m]) * data[m].get_generator().get_energy_equivalent().get_level()
517
522
  for m in generator_module_names
518
523
  if len(upstream_inflow_levels[m]) > 0
519
524
  )
@@ -524,7 +529,7 @@ class HydroAggregator(Aggregator):
524
529
  inflow_profile_to_energyinflow = defaultdict(list)
525
530
  inflow_level_to_value = dict()
526
531
  for m in generator_module_names:
527
- m_energy_eq = data[m].get_generator().get_energy_eq().get_level()
532
+ m_energy_eq = data[m].get_generator().get_energy_equivalent().get_level()
528
533
  m_energy_eq_value = get_level_value(
529
534
  m_energy_eq,
530
535
  db=model,
@@ -592,6 +597,13 @@ class HydroAggregator(Aggregator):
592
597
  """Aggregate reservoir fillings if all fillings are not None."""
593
598
  sum_filling = None
594
599
  if all(filling.get_level() for filling in fillings):
600
+ if any(not filling.get_profile() for filling in fillings):
601
+ missing = [member for member, filling in zip(members, fillings, strict=False) if not filling.get_profile()]
602
+ message = (
603
+ "Some reservoir fillings in grouped modules have no profile. Cannot aggregate profiles.",
604
+ f"Group: '{group_id}', missing profile for {missing}.",
605
+ )
606
+ raise ValueError(message)
595
607
  level, profiles, weights = self._get_level_profiles_weights_fillings(model, fillings, energy_eq_downstreams, energy_eq, weight_unit)
596
608
  profile = _aggregate_weighted_expressions(profiles, weights)
597
609
  sum_filling = StockVolume(level=level, profile=profile)
@@ -755,7 +767,7 @@ class HydroAggregator(Aggregator):
755
767
  for det in detailed_keys:
756
768
  det_module = data[det]
757
769
  release_capacity_level = det_module.get_release_capacity().get_level()
758
- generator_energy_eq = det_module.get_generator().get_energy_eq().get_level()
770
+ generator_energy_eq = det_module.get_generator().get_energy_equivalent().get_level()
759
771
  production_weight = get_level_value(
760
772
  release_capacity_level * generator_energy_eq,
761
773
  db=model,
@@ -7,24 +7,55 @@ from typing import TYPE_CHECKING
7
7
  from framcore.aggregators import Aggregator
8
8
  from framcore.aggregators._utils import _aggregate_costs
9
9
  from framcore.attributes import MaxFlowVolume, Price
10
- from framcore.components import Component, Demand, Node, Transmission, Flow
10
+ from framcore.components import Component, Demand, Flow, Node, Transmission
11
11
  from framcore.curves import Curve
12
12
  from framcore.expressions import Expr
13
13
  from framcore.metadata import Member, Meta
14
14
  from framcore.timeindexes import FixedFrequencyTimeIndex, SinglePeriodTimeIndex
15
15
  from framcore.timevectors import TimeVector
16
- from framcore.utils import get_component_to_nodes, get_transports_by_commodity, get_supported_components, get_flow_infos, get_node_to_commodity
17
-
18
- # TODO: Support internal loss demand
19
- # TODO: Document method appropriate place (which docstring? module? class? __init__? _aggregate?)
20
- # TODO: transfer member metadata to internal loss Demand
16
+ from framcore.utils import get_component_to_nodes, get_flow_infos, get_node_to_commodity, get_supported_components, get_transports_by_commodity
21
17
 
22
18
  if TYPE_CHECKING:
23
19
  from framcore import Model
24
20
 
25
21
 
26
22
  class NodeAggregator(Aggregator):
27
- """Aggregate groups of nodes for a commodity. Subclass of Aggregator."""
23
+ """
24
+ Aggregate groups of Nodes for a commodity. Subclass of Aggregator.
25
+
26
+ Aggregation steps (self._aggregate):
27
+
28
+ 1. Map all Components to their Nodes of the correct commodity if they are referencing any. This is important to redirect all references to the
29
+ new Nodes after aggregation.
30
+ 2. Create mapping of what members the new Nodes will be aggregated from. This step also does alot of error handling and checks the validity of the
31
+ metadata and groupings. Raises error if:
32
+ - Nodes do not have any metadata for the meta key.
33
+ - Nodes have the wrong metadata object type for the meta key (must be Member).
34
+ - Exogenous Nodes are grouped together for aggregation with endogenous Nodes.
35
+ 3. Initialize new Node objects and set prices and exogenous status. Prices are calculated as a weighted average of all the member Node prices.
36
+ 4. Old Nodes are deleted from the Model data, after which the aggregated Node is added, and references in the rest of the system are updated to point to
37
+ the new Node.
38
+ 5. Handling of transports: All Components which transport the same commodity as the aggregated Nodes are analysed. If the two Nodes they connect is now
39
+ the same aggregated Node, the transpart is 'internal' meaning it is now operating within a Node. If the transport Component is lossy, it is replaced
40
+ by a Demand Component representing the commodity consumption caused by the loss. All internal transports are afterwards deleted.
41
+
42
+
43
+ Disaggregation steps (self._aggregate):
44
+
45
+ 1. Collect set of Nodes group keys for which have been either removed from the Model data or changed to reference something other than Nodes.
46
+ 2. Validate that IDs of Nodes to be restored have not been used to reference something else in the meantime.
47
+ 3. Delete the aggregated Nodes and restore the old Nodes to the Model. Also copy shadow price results from the aggregated Nodes to the disaggregated.
48
+ NB! This will overwrite the possible previous shadow prices of the original disaggregated Nodes.
49
+ 4. Restore the references in all objects to the disaggregated Nodes. A mapping created during aggregation is used for this.
50
+ 5. Validate that no restorable internal transports has a name conflict with existing objects in the Model.
51
+ NB! an internal transport is not restorable if one or both of its referenced Nodes have been removed from the Model or is now referencing another
52
+ object. See step 1.
53
+ 6. Restore all the restorable internal transports from the original data.
54
+ 7. Delete the aggregation-created Demand objects representing internal transports.
55
+
56
+ See Aggregator for general design notes and rules to follow when using Aggregators.
57
+
58
+ """
28
59
 
29
60
  def __init__(
30
61
  self,
@@ -125,16 +156,16 @@ class NodeAggregator(Aggregator):
125
156
  out: set[str] = set()
126
157
  nodes_and_flows = get_supported_components(components, supported_types=(Node, Flow), forbidden_types=tuple())
127
158
  node_to_commodity = get_node_to_commodity(nodes_and_flows)
128
- for flow in nodes_and_flows.values():
159
+ for flow in nodes_and_flows.values():
129
160
  if not isinstance(flow, Flow):
130
161
  continue
131
162
  flow_infos = get_flow_infos(flow, node_to_commodity)
132
- if not len(flow_infos) == 1:
163
+ if len(flow_infos) != 1:
133
164
  continue
134
165
  flow_info = flow_infos[0]
135
166
  if flow_info.category != "direct_out":
136
167
  continue
137
- if flow_info.commodity_out != self._commodity:
168
+ if flow_info.commodity_out != self._commodity:
138
169
  continue
139
170
  demand = flow
140
171
  for key in demand.get_meta_keys():
@@ -142,7 +173,6 @@ class NodeAggregator(Aggregator):
142
173
  if isinstance(meta, Member):
143
174
  out.add(key)
144
175
  return out
145
-
146
176
 
147
177
  def _add_internal_transport_demands(
148
178
  self,
@@ -163,7 +193,9 @@ class NodeAggregator(Aggregator):
163
193
  for key in self._internal_transports:
164
194
  transport = components[key]
165
195
  from_node, to_node = transports[key]
166
- assert from_node == to_node, f"{from_node}, {to_node}"
196
+ assert from_node == to_node, (
197
+ f"Transport {key} added to internal transport when it should not. Source node {from_node}, and destination node {to_node} are not the same."
198
+ )
167
199
  node = from_node
168
200
 
169
201
  transport: Transmission
@@ -192,13 +224,15 @@ class NodeAggregator(Aggregator):
192
224
  ),
193
225
  )
194
226
 
195
- for meta_key in demand_member_meta_keys:
227
+ for meta_key in demand_member_meta_keys: # transfer member metadata to internal loss Demand
196
228
  internal_losses_demand.add_meta(meta_key, Member("InternalTransportLossFromNodeAggregator"))
197
229
 
198
230
  demand_key = key + "_InternalTransportLossDemand_" + node
199
231
 
200
232
  self._internal_transport_demands.add(demand_key)
201
- assert demand_key not in data, f"{demand_key}"
233
+ if demand_key in data:
234
+ msg = f"Could not use key {demand_key} for internal transport demand because it already exists in the Model."
235
+ raise KeyError(msg)
202
236
  data[demand_key] = internal_losses_demand
203
237
 
204
238
  def _delete_internal_transports(
@@ -227,6 +261,13 @@ class NodeAggregator(Aggregator):
227
261
  data = model.get_data()
228
262
  weights = [1.0 / len(member_node_names)] * len(member_node_names)
229
263
  prices = [data[key].get_price() for key in member_node_names]
264
+
265
+ exogenous = [data[key].is_exogenous() for key in member_node_names]
266
+ if all(exogenous):
267
+ group_node.set_exogenous()
268
+ elif any(exogenous):
269
+ message = f"Only some member Nodes of group {group_node} are exogenous. This is ambiguous. Either all or none must be exogenous."
270
+ raise ValueError(message)
230
271
  if all(prices):
231
272
  level, profile, intercept = _aggregate_costs(
232
273
  model=model,
@@ -241,7 +282,7 @@ class NodeAggregator(Aggregator):
241
282
  group_node.get_price().set_intercept(intercept)
242
283
  elif any(prices):
243
284
  missing = [key for key in member_node_names if data[key].get_price() is None]
244
- self.send_warning_event(f"Only some member nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
285
+ self.send_warning_event(f"Only some member Nodes of group {group_node} have a Price, skip aggregate prices. Missing: {missing}")
245
286
 
246
287
  def _replace_node(
247
288
  self,
@@ -316,17 +357,11 @@ class NodeAggregator(Aggregator):
316
357
 
317
358
  for group_name in exogenous_groups: # Check exogenous groups.
318
359
  node_keys = grouped_nodes[group_name]
319
- if len(node_keys) != 1: # allow unchanged or renamed exogenous Nodes.
320
- self._errors.add(
321
- f"Group {group_name} contains an exogenous Node and must therefore contain only one Node."
322
- " Exogenous Nodes cannot be grouped together with other Nodes.",
323
- )
324
- # For if we want to allow pure exogenous groups.
325
- # for node_key in node_keys:
326
- # node: Node = components[node_key]
327
- # if not node.is_exogenous():
328
- # self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is not allowed.")
329
- # break
360
+ if len(node_keys) > 1: # allow unchanged or renamed exogenous Nodes.
361
+ # We allow pure exogenous groups.
362
+ exogenous = [components[node_key].is_exogenous() for node_key in node_keys]
363
+ if (not all(exogenous)) and any(exogenous):
364
+ self._errors.add(f"Group {group_name} contains both exogenous and endogenous Nodes. This is ambiguous and therefore not allowed.")
330
365
 
331
366
  # remove single groups with unchanged names and check for duplicated names
332
367
  for group_name, node_keys in grouped_nodes.items():
@@ -359,7 +394,7 @@ class NodeAggregator(Aggregator):
359
394
  flipped[member].add(group)
360
395
  for k, v in flipped.items():
361
396
  if len(v) > 1:
362
- self._errors.add(f"Node {k} belong to more than one group {v}")
397
+ self._errors.add(f"Node {k} belongs to more than one group {v}")
363
398
 
364
399
  def _disaggregate(
365
400
  self,
@@ -393,7 +428,7 @@ class NodeAggregator(Aggregator):
393
428
 
394
429
  group_node = new_data[group_name]
395
430
 
396
- if not isinstance(group_node, Node):
431
+ if not (isinstance(group_node, Node) and group_node.get_commodity() == self._commodity):
397
432
  deleted_group_names.add(group_name)
398
433
 
399
434
  return deleted_group_names
@@ -409,7 +444,7 @@ class NodeAggregator(Aggregator):
409
444
  for key in member_node_names:
410
445
  if key in new_data:
411
446
  obj = new_data[key]
412
- if not isinstance(obj, Node) and obj.get_commodity() == self._commodity:
447
+ if not (isinstance(obj, Node) and obj.get_commodity() == self._commodity):
413
448
  typ = type(obj).__name__
414
449
  message = f"Restoring node {key} from group node {group_name} failed because model already stores object of {typ} with that name."
415
450
  self._errors.add(message)
@@ -463,7 +498,7 @@ class NodeAggregator(Aggregator):
463
498
  if key in new_data:
464
499
  obj = new_data[key]
465
500
  typ = type(obj).__name__
466
- message = f"Restoring deleted transport {key} from group node {group_name} failed becausemodel already stores object of {typ} with that name."
501
+ message = f"Restoring deleted transport {key} from group node {group_name} failed because model already stores object of {typ} with that name."
467
502
  self._errors.add(message)
468
503
 
469
504
  self._report_errors(self._errors)