fram-core 0.0.0__tar.gz → 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fram_core-0.1.0/LICENSE.md +8 -0
- fram_core-0.1.0/PKG-INFO +42 -0
- fram_core-0.1.0/README.md +19 -0
- fram_core-0.1.0/framcore/Base.py +161 -0
- fram_core-0.1.0/framcore/Model.py +90 -0
- fram_core-0.1.0/framcore/__init__.py +10 -0
- fram_core-0.1.0/framcore/aggregators/Aggregator.py +172 -0
- fram_core-0.1.0/framcore/aggregators/HydroAggregator.py +849 -0
- fram_core-0.1.0/framcore/aggregators/NodeAggregator.py +530 -0
- fram_core-0.1.0/framcore/aggregators/WindSolarAggregator.py +315 -0
- fram_core-0.1.0/framcore/aggregators/__init__.py +13 -0
- fram_core-0.1.0/framcore/aggregators/_utils.py +184 -0
- fram_core-0.1.0/framcore/attributes/Arrow.py +307 -0
- fram_core-0.1.0/framcore/attributes/ElasticDemand.py +90 -0
- fram_core-0.1.0/framcore/attributes/ReservoirCurve.py +23 -0
- fram_core-0.1.0/framcore/attributes/SoftBound.py +16 -0
- fram_core-0.1.0/framcore/attributes/StartUpCost.py +65 -0
- fram_core-0.1.0/framcore/attributes/Storage.py +158 -0
- fram_core-0.1.0/framcore/attributes/TargetBound.py +16 -0
- fram_core-0.1.0/framcore/attributes/__init__.py +63 -0
- fram_core-0.1.0/framcore/attributes/hydro/HydroBypass.py +49 -0
- fram_core-0.1.0/framcore/attributes/hydro/HydroGenerator.py +100 -0
- fram_core-0.1.0/framcore/attributes/hydro/HydroPump.py +178 -0
- fram_core-0.1.0/framcore/attributes/hydro/HydroReservoir.py +27 -0
- fram_core-0.1.0/framcore/attributes/hydro/__init__.py +13 -0
- fram_core-0.1.0/framcore/attributes/level_profile_attributes.py +911 -0
- fram_core-0.1.0/framcore/components/Component.py +136 -0
- fram_core-0.1.0/framcore/components/Demand.py +144 -0
- fram_core-0.1.0/framcore/components/Flow.py +189 -0
- fram_core-0.1.0/framcore/components/HydroModule.py +371 -0
- fram_core-0.1.0/framcore/components/Node.py +99 -0
- fram_core-0.1.0/framcore/components/Thermal.py +208 -0
- fram_core-0.1.0/framcore/components/Transmission.py +198 -0
- fram_core-0.1.0/framcore/components/_PowerPlant.py +81 -0
- fram_core-0.1.0/framcore/components/__init__.py +22 -0
- fram_core-0.1.0/framcore/components/wind_solar.py +82 -0
- fram_core-0.1.0/framcore/curves/Curve.py +44 -0
- fram_core-0.1.0/framcore/curves/LoadedCurve.py +146 -0
- fram_core-0.1.0/framcore/curves/__init__.py +9 -0
- fram_core-0.1.0/framcore/events/__init__.py +21 -0
- fram_core-0.1.0/framcore/events/events.py +51 -0
- fram_core-0.1.0/framcore/expressions/Expr.py +591 -0
- fram_core-0.1.0/framcore/expressions/__init__.py +30 -0
- fram_core-0.1.0/framcore/expressions/_get_constant_from_expr.py +477 -0
- fram_core-0.1.0/framcore/expressions/_utils.py +73 -0
- fram_core-0.1.0/framcore/expressions/queries.py +416 -0
- fram_core-0.1.0/framcore/expressions/units.py +227 -0
- fram_core-0.1.0/framcore/fingerprints/__init__.py +11 -0
- fram_core-0.1.0/framcore/fingerprints/fingerprint.py +292 -0
- fram_core-0.1.0/framcore/juliamodels/JuliaModel.py +171 -0
- fram_core-0.1.0/framcore/juliamodels/__init__.py +7 -0
- fram_core-0.1.0/framcore/loaders/__init__.py +10 -0
- fram_core-0.1.0/framcore/loaders/loaders.py +405 -0
- fram_core-0.1.0/framcore/metadata/Div.py +73 -0
- fram_core-0.1.0/framcore/metadata/ExprMeta.py +56 -0
- fram_core-0.1.0/framcore/metadata/LevelExprMeta.py +32 -0
- fram_core-0.1.0/framcore/metadata/Member.py +55 -0
- fram_core-0.1.0/framcore/metadata/Meta.py +44 -0
- fram_core-0.1.0/framcore/metadata/__init__.py +15 -0
- fram_core-0.1.0/framcore/populators/Populator.py +108 -0
- fram_core-0.1.0/framcore/populators/__init__.py +7 -0
- fram_core-0.1.0/framcore/querydbs/CacheDB.py +50 -0
- fram_core-0.1.0/framcore/querydbs/ModelDB.py +34 -0
- fram_core-0.1.0/framcore/querydbs/QueryDB.py +45 -0
- fram_core-0.1.0/framcore/querydbs/__init__.py +11 -0
- fram_core-0.1.0/framcore/solvers/Solver.py +63 -0
- fram_core-0.1.0/framcore/solvers/SolverConfig.py +272 -0
- fram_core-0.1.0/framcore/solvers/__init__.py +9 -0
- fram_core-0.1.0/framcore/timeindexes/AverageYearRange.py +27 -0
- fram_core-0.1.0/framcore/timeindexes/ConstantTimeIndex.py +22 -0
- fram_core-0.1.0/framcore/timeindexes/DailyIndex.py +33 -0
- fram_core-0.1.0/framcore/timeindexes/FixedFrequencyTimeIndex.py +814 -0
- fram_core-0.1.0/framcore/timeindexes/HourlyIndex.py +33 -0
- fram_core-0.1.0/framcore/timeindexes/IsoCalendarDay.py +33 -0
- fram_core-0.1.0/framcore/timeindexes/ListTimeIndex.py +277 -0
- fram_core-0.1.0/framcore/timeindexes/ModelYear.py +23 -0
- fram_core-0.1.0/framcore/timeindexes/ModelYears.py +27 -0
- fram_core-0.1.0/framcore/timeindexes/OneYearProfileTimeIndex.py +29 -0
- fram_core-0.1.0/framcore/timeindexes/ProfileTimeIndex.py +43 -0
- fram_core-0.1.0/framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
- fram_core-0.1.0/framcore/timeindexes/TimeIndex.py +103 -0
- fram_core-0.1.0/framcore/timeindexes/WeeklyIndex.py +33 -0
- fram_core-0.1.0/framcore/timeindexes/__init__.py +36 -0
- fram_core-0.1.0/framcore/timeindexes/_time_vector_operations.py +689 -0
- fram_core-0.1.0/framcore/timevectors/ConstantTimeVector.py +131 -0
- fram_core-0.1.0/framcore/timevectors/LinearTransformTimeVector.py +131 -0
- fram_core-0.1.0/framcore/timevectors/ListTimeVector.py +127 -0
- fram_core-0.1.0/framcore/timevectors/LoadedTimeVector.py +97 -0
- fram_core-0.1.0/framcore/timevectors/ReferencePeriod.py +51 -0
- fram_core-0.1.0/framcore/timevectors/TimeVector.py +108 -0
- fram_core-0.1.0/framcore/timevectors/__init__.py +17 -0
- fram_core-0.1.0/framcore/utils/__init__.py +35 -0
- fram_core-0.1.0/framcore/utils/get_regional_volumes.py +387 -0
- fram_core-0.1.0/framcore/utils/get_supported_components.py +60 -0
- fram_core-0.1.0/framcore/utils/global_energy_equivalent.py +63 -0
- fram_core-0.1.0/framcore/utils/isolate_subnodes.py +172 -0
- fram_core-0.1.0/framcore/utils/loaders.py +97 -0
- fram_core-0.1.0/framcore/utils/node_flow_utils.py +236 -0
- fram_core-0.1.0/framcore/utils/storage_subsystems.py +106 -0
- fram_core-0.1.0/pyproject.toml +47 -0
- fram_core-0.0.0/PKG-INFO +0 -5
- fram_core-0.0.0/fram_core.egg-info/PKG-INFO +0 -5
- fram_core-0.0.0/fram_core.egg-info/SOURCES.txt +0 -5
- fram_core-0.0.0/fram_core.egg-info/dependency_links.txt +0 -1
- fram_core-0.0.0/fram_core.egg-info/top_level.txt +0 -1
- fram_core-0.0.0/pyproject.toml +0 -8
- fram_core-0.0.0/setup.cfg +0 -4
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
The MIT License (MIT)
|
|
2
|
+
Copyright © 2025 NVE
|
|
3
|
+
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
5
|
+
|
|
6
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
7
|
+
|
|
8
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
fram_core-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fram-core
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary:
|
|
5
|
+
License: LICENSE.md
|
|
6
|
+
License-File: LICENSE.md
|
|
7
|
+
Author: The Norwegian Water Resources and Energy Directorate
|
|
8
|
+
Author-email: fram@nve.no
|
|
9
|
+
Requires-Python: >=3.11,<4
|
|
10
|
+
Classifier: License :: Other/Proprietary License
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
16
|
+
Requires-Dist: juliacall (>=0.9.28,<0.10.0)
|
|
17
|
+
Requires-Dist: numexpr (>=2.10.2)
|
|
18
|
+
Requires-Dist: numpy (>=2.2.2)
|
|
19
|
+
Requires-Dist: pandas (>=2.2.3)
|
|
20
|
+
Requires-Dist: sympy (>=1.13.3)
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
|
|
23
|
+
# fram-core
|
|
24
|
+
|
|
25
|
+
## About
|
|
26
|
+
|
|
27
|
+
**fram-core** is the main package in the **FRAM** modelling framework. The package holds the functionality used to describe and manipulate the energy system, handle time series operations, and hold the definition of key interfaces in FRAM.
|
|
28
|
+
|
|
29
|
+
For package documentation see [fram-core](https://nve.github.io/fram-core).
|
|
30
|
+
|
|
31
|
+
For FRAM documentation see [FRAM mainpage](https://nve.github.io/fram).
|
|
32
|
+
|
|
33
|
+
## Installation
|
|
34
|
+
|
|
35
|
+
To add the package to your project use:
|
|
36
|
+
|
|
37
|
+
pip install fram-core
|
|
38
|
+
|
|
39
|
+
With poetry:
|
|
40
|
+
|
|
41
|
+
poetry add fram-core
|
|
42
|
+
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# fram-core
|
|
2
|
+
|
|
3
|
+
## About
|
|
4
|
+
|
|
5
|
+
**fram-core** is the main package in the **FRAM** modelling framework. The package holds the functionality used to describe and manipulate the energy system, handle time series operations, and hold the definition of key interfaces in FRAM.
|
|
6
|
+
|
|
7
|
+
For package documentation see [fram-core](https://nve.github.io/fram-core).
|
|
8
|
+
|
|
9
|
+
For FRAM documentation see [FRAM mainpage](https://nve.github.io/fram).
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
To add the package to your project use:
|
|
14
|
+
|
|
15
|
+
pip install fram-core
|
|
16
|
+
|
|
17
|
+
With poetry:
|
|
18
|
+
|
|
19
|
+
poetry add fram-core
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import inspect
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from framcore.events import (
|
|
7
|
+
send_debug_event,
|
|
8
|
+
send_error_event,
|
|
9
|
+
send_event,
|
|
10
|
+
send_info_event,
|
|
11
|
+
send_warning_event,
|
|
12
|
+
)
|
|
13
|
+
from framcore.fingerprints import Fingerprint
|
|
14
|
+
|
|
15
|
+
# TODO: Consider context dict | None in event-methods to support more info (e.g. process id)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class Base:
|
|
19
|
+
"""Core base class to share methods."""
|
|
20
|
+
|
|
21
|
+
def _check_type(self, value, class_or_tuple) -> None: # noqa: ANN001
|
|
22
|
+
check_type(value, class_or_tuple, caller=self)
|
|
23
|
+
|
|
24
|
+
def _ensure_float(self, value: object) -> float:
|
|
25
|
+
with contextlib.suppress(Exception):
|
|
26
|
+
return float(value)
|
|
27
|
+
message = f"Unable to convert {value} to float."
|
|
28
|
+
raise ValueError(message)
|
|
29
|
+
|
|
30
|
+
def _check_int(self, value: int, lower_bound: int | None, upper_bound: int | None) -> None:
|
|
31
|
+
if lower_bound is not None and value < lower_bound:
|
|
32
|
+
message = f"Value {value} is less than lower_bound {lower_bound}."
|
|
33
|
+
raise ValueError(message)
|
|
34
|
+
if upper_bound is not None and value > upper_bound:
|
|
35
|
+
message = f"Value {value} is greater than upper_bound {upper_bound}."
|
|
36
|
+
raise ValueError(message)
|
|
37
|
+
|
|
38
|
+
def _check_float(self, value: float, lower_bound: float | None, upper_bound: float | None) -> None:
|
|
39
|
+
if lower_bound is not None and value < lower_bound:
|
|
40
|
+
message = f"Value {value} is less than lower_bound {lower_bound}."
|
|
41
|
+
raise ValueError(message)
|
|
42
|
+
if upper_bound is not None and value > upper_bound:
|
|
43
|
+
message = f"Value {value} is greater than upper_bound {upper_bound}."
|
|
44
|
+
raise ValueError(message)
|
|
45
|
+
|
|
46
|
+
def _report_errors(self, errors: set[str]) -> None:
|
|
47
|
+
if errors:
|
|
48
|
+
n = len(errors)
|
|
49
|
+
s = "s" if n > 1 else ""
|
|
50
|
+
error_str = "\n".join(errors)
|
|
51
|
+
message = f"Found {n} error{s}:\n{error_str}"
|
|
52
|
+
raise RuntimeError(message)
|
|
53
|
+
|
|
54
|
+
def send_event(self, event_type: str, **kwargs: dict[str, Any]) -> None:
|
|
55
|
+
"""All events in core should use this."""
|
|
56
|
+
send_event(sender=self, event_type=event_type, **kwargs)
|
|
57
|
+
|
|
58
|
+
def send_warning_event(self, message: str) -> None:
|
|
59
|
+
"""Use this to send warning event."""
|
|
60
|
+
send_warning_event(sender=self, message=message)
|
|
61
|
+
|
|
62
|
+
def send_error_event(self, message: str, exception_type_name: str, traceback: str) -> None:
|
|
63
|
+
"""Use this to send error event."""
|
|
64
|
+
send_error_event(sender=self, message=message, exception_type_name=exception_type_name, traceback=traceback)
|
|
65
|
+
|
|
66
|
+
def send_info_event(self, message: str) -> None:
|
|
67
|
+
"""Use this to send info event."""
|
|
68
|
+
send_info_event(sender=self, message=message)
|
|
69
|
+
|
|
70
|
+
def send_debug_event(self, message: str) -> None:
|
|
71
|
+
"""Use this to send debug event."""
|
|
72
|
+
send_debug_event(sender=self, message=message)
|
|
73
|
+
|
|
74
|
+
def get_fingerprint_default(
|
|
75
|
+
self,
|
|
76
|
+
refs: dict[str, str] | None = None,
|
|
77
|
+
excludes: set[str] | None = None,
|
|
78
|
+
) -> Fingerprint:
|
|
79
|
+
"""
|
|
80
|
+
Generate a Fingerprint for the object, optionally including references and excluding specified properties.
|
|
81
|
+
|
|
82
|
+
Parameters
|
|
83
|
+
----------
|
|
84
|
+
refs : dict[str, str] | None, optional
|
|
85
|
+
Dictionary mapping property names to reference keys to include as references in the fingerprint.
|
|
86
|
+
excludes : set[str] | None, optional
|
|
87
|
+
Set of property names to exclude from the fingerprint.
|
|
88
|
+
|
|
89
|
+
Returns
|
|
90
|
+
-------
|
|
91
|
+
Fingerprint
|
|
92
|
+
The generated fingerprint for the object.
|
|
93
|
+
|
|
94
|
+
"""
|
|
95
|
+
fingerprint = Fingerprint(source=self)
|
|
96
|
+
|
|
97
|
+
if refs:
|
|
98
|
+
for ref_prop, ref_key in refs.items():
|
|
99
|
+
if ref_key is not None:
|
|
100
|
+
fingerprint.add_ref(ref_prop, ref_key)
|
|
101
|
+
|
|
102
|
+
default_excludes = {"_parent"}
|
|
103
|
+
|
|
104
|
+
for prop_name, prop_value in self.__dict__.items():
|
|
105
|
+
if callable(prop_value) or (refs and prop_name in refs) or (excludes and prop_name in excludes) or prop_name in default_excludes:
|
|
106
|
+
continue
|
|
107
|
+
|
|
108
|
+
if prop_value is None:
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
fingerprint.add(prop_name, prop_value)
|
|
112
|
+
|
|
113
|
+
return fingerprint
|
|
114
|
+
|
|
115
|
+
def _get_property_name(self, property_reference) -> str | None: # noqa: ANN001
|
|
116
|
+
for name, value in inspect.getmembers(self):
|
|
117
|
+
if value is property_reference:
|
|
118
|
+
return name
|
|
119
|
+
return None
|
|
120
|
+
|
|
121
|
+
def __repr__(self) -> str:
|
|
122
|
+
"""Display type and non-None fields."""
|
|
123
|
+
type_name = type(self).__name__
|
|
124
|
+
value_fields = []
|
|
125
|
+
for k, v in vars(self).items():
|
|
126
|
+
display_value = self._get_attr_str(k, v)
|
|
127
|
+
if display_value is not None:
|
|
128
|
+
value_fields.append(f"{k}={display_value}")
|
|
129
|
+
value_fields = ", ".join(value_fields)
|
|
130
|
+
return f"{type_name}({value_fields})"
|
|
131
|
+
|
|
132
|
+
def _get_attr_str(self, key: str, value: object) -> str | None:
|
|
133
|
+
if value is None:
|
|
134
|
+
return None
|
|
135
|
+
if isinstance(value, int | float | str | bool):
|
|
136
|
+
return value
|
|
137
|
+
try:
|
|
138
|
+
return value._get_attr_str() # noqa: SLF001
|
|
139
|
+
except Exception:
|
|
140
|
+
pass
|
|
141
|
+
return type(value).__name__
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
# could not place this in utils and use __init__ as modules in utils also import queries, if queries then import via utils __init__ we get circular imports.
|
|
145
|
+
def check_type(value: object, expected: type | tuple[type], caller: Callable | None = None) -> None:
|
|
146
|
+
"""
|
|
147
|
+
Check a value matches expected type(s).
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
value (object): value being checked.
|
|
151
|
+
expected (type | tuple[type]): Expected types.
|
|
152
|
+
caller (Callable): The origin of the check.
|
|
153
|
+
|
|
154
|
+
Raises:
|
|
155
|
+
TypeError: When value does not match expected types.
|
|
156
|
+
|
|
157
|
+
"""
|
|
158
|
+
if not isinstance(value, expected):
|
|
159
|
+
message = f"{expected}, got {type(value).__name__}"
|
|
160
|
+
message = "Expected " + message if caller is None else f"{caller} expected " + message
|
|
161
|
+
raise TypeError(message)
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from collections import Counter
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
from framcore import Base
|
|
5
|
+
from framcore.components import Component
|
|
6
|
+
from framcore.curves import Curve
|
|
7
|
+
from framcore.expressions import Expr
|
|
8
|
+
from framcore.timevectors import TimeVector
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from framcore.aggregators import Aggregator
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ModelDict(dict):
|
|
15
|
+
"""Dict storing only values of type Component | Expr | TimeVector | Curve."""
|
|
16
|
+
|
|
17
|
+
def __setitem__(self, key: str, value: Component | Expr | TimeVector | Curve) -> None:
|
|
18
|
+
"""Set item with type checking."""
|
|
19
|
+
if not isinstance(key, str):
|
|
20
|
+
message = f"Expected str for key {key}, got {type(key).__name__}"
|
|
21
|
+
raise TypeError(message)
|
|
22
|
+
if not isinstance(value, Component | Expr | TimeVector | Curve):
|
|
23
|
+
message = f"Expected Component | Expr | TimeVector | Curve for key {key}, got {type(value).__name__}"
|
|
24
|
+
raise TypeError(message)
|
|
25
|
+
return super().__setitem__(key, value)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Model(Base):
|
|
29
|
+
"""
|
|
30
|
+
Model stores the representation of the energy system with Components, TimeVectors, Expression, and the Aggregators applied to the Model.
|
|
31
|
+
|
|
32
|
+
- Components describe the main elements in the energy system. Can have additional Attributes.
|
|
33
|
+
- TimeVector and Curve hold the time series data.
|
|
34
|
+
- Expressions for data manipulation of TimeVectors and Curves. Can be queried.
|
|
35
|
+
- Aggregators handle aggregation and disaggregation of Components. Aggregators are added to Model when used (Aggregator.aggregate(model)),
|
|
36
|
+
and can be undone in LIFO order with disaggregate().
|
|
37
|
+
|
|
38
|
+
Methods:
|
|
39
|
+
get_data(): Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified.
|
|
40
|
+
disaggregate(): Undo all aggregations applied to Model in LIFO order.
|
|
41
|
+
get_content_counts(): Return number of objects stored in model organized into concepts and types.
|
|
42
|
+
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self) -> None:
|
|
46
|
+
"""Create a new model instance with empty data and no aggregators."""
|
|
47
|
+
self._data = ModelDict()
|
|
48
|
+
self._aggregators: list[Aggregator] = []
|
|
49
|
+
|
|
50
|
+
def get_data(self) -> ModelDict:
|
|
51
|
+
"""Get dict of Components, Expressions, TimeVectors and Curves stored in the Model. Can be modified."""
|
|
52
|
+
return self._data
|
|
53
|
+
|
|
54
|
+
def disaggregate(self) -> None:
|
|
55
|
+
"""Undo all aggregations applied to Model in LIFO order."""
|
|
56
|
+
while self._aggregators:
|
|
57
|
+
aggregator = self._aggregators.pop(-1) # last item
|
|
58
|
+
aggregator.disaggregate(self)
|
|
59
|
+
|
|
60
|
+
def get_content_counts(self) -> dict[str, Counter]:
|
|
61
|
+
"""Return number of objects stored in model organized into concepts and types."""
|
|
62
|
+
data_values = self.get_data().values()
|
|
63
|
+
counts = {
|
|
64
|
+
"components": Counter(),
|
|
65
|
+
"timevectors": Counter(),
|
|
66
|
+
"curves": Counter(),
|
|
67
|
+
"expressions": Counter(),
|
|
68
|
+
}
|
|
69
|
+
for obj in data_values:
|
|
70
|
+
if isinstance(obj, Component):
|
|
71
|
+
key = "components"
|
|
72
|
+
elif isinstance(obj, TimeVector):
|
|
73
|
+
key = "timevectors"
|
|
74
|
+
elif isinstance(obj, Curve):
|
|
75
|
+
key = "curves"
|
|
76
|
+
elif isinstance(obj, Expr):
|
|
77
|
+
key = "expressions"
|
|
78
|
+
else:
|
|
79
|
+
key = "unexpected"
|
|
80
|
+
if key not in counts:
|
|
81
|
+
counts[key] = Counter()
|
|
82
|
+
counts[key][type(obj).__name__] += 1
|
|
83
|
+
|
|
84
|
+
assert len(data_values) == sum(c.total() for c in counts.values())
|
|
85
|
+
|
|
86
|
+
counts["aggregators"] = Counter()
|
|
87
|
+
for a in self._aggregators:
|
|
88
|
+
counts["aggregators"][type(a).__name__] += 1
|
|
89
|
+
|
|
90
|
+
return counts
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from collections.abc import Iterable
|
|
6
|
+
from copy import deepcopy
|
|
7
|
+
|
|
8
|
+
from framcore.Base import Base
|
|
9
|
+
from framcore.components import Component
|
|
10
|
+
from framcore.curves import Curve
|
|
11
|
+
from framcore.expressions import Expr
|
|
12
|
+
from framcore.metadata import Member
|
|
13
|
+
from framcore.Model import Model
|
|
14
|
+
from framcore.timevectors import TimeVector
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Aggregator(Base, ABC):
|
|
18
|
+
"""
|
|
19
|
+
Aggregator interface class.
|
|
20
|
+
|
|
21
|
+
Aggregators handles aggregation and disaggregation of Components.
|
|
22
|
+
- The general approach for aggregation is to group Components, aggregate Components in the same group to (a) new Component(s),
|
|
23
|
+
delete the detailed Components, and add the mapping to self._aggregation_map.
|
|
24
|
+
- The general approach for disaggregation is to restore the detailed Components, move results from aggregated
|
|
25
|
+
Components to detailed Components, and delete the aggregated Components.
|
|
26
|
+
|
|
27
|
+
Concrete Aggregators must implement the abstract methods _aggregate() and _disaggregate().
|
|
28
|
+
|
|
29
|
+
Some rules for using Aggregators:
|
|
30
|
+
1. Disaggragate can only be called after aggregate has been called.
|
|
31
|
+
2. Not allowed to call aggregate twice. Must call disaggregate before aggregate can be called again.
|
|
32
|
+
3. Aggregators are stored in Model when aggregate is called. Disaggregate by calling Model.disaggregate(),
|
|
33
|
+
which will disaggregate all Aggregators in LIFO order.
|
|
34
|
+
4. At the moment we allow changes to the aggregated Components, which is ignored during disaggregation. TODO: Handle this
|
|
35
|
+
5. It is recommended to only use the same Aggregator type once on the same components of a Model.
|
|
36
|
+
If you want to go from one aggregation level to another, it is better to use Model.disaggregate first and then aggregate again.
|
|
37
|
+
This is to keep the logic simple and avoid complex expressions.
|
|
38
|
+
|
|
39
|
+
Some design notes:
|
|
40
|
+
- Levels and profiles are aggregated separately and then combined into attributes.
|
|
41
|
+
- We have chosen to eagerly evaluate weights for aggregation (weighted averages) and disaggregation of levels and profiles.
|
|
42
|
+
This approach supports any form of aggregation by varying the weights, and complex weights can be created by eagerly evaluating
|
|
43
|
+
expressions and using the result to compute those weights.
|
|
44
|
+
- This is a balance between eagerly evaluating everything and setting up complex expressions.
|
|
45
|
+
Eagerly evaluating everything would require setting up new TimeVectors after evaluation, which is not ideal.
|
|
46
|
+
While setting up complex expressions gives expressions that are harder to work with and slower to query from.
|
|
47
|
+
- This trade-off simplifies adding logic that recognises if result expressions come from aggregations or disaggregations.
|
|
48
|
+
When aggregating or disaggregating these, we can go back to the original results rather than setting up complex expressions
|
|
49
|
+
that for examples aggregates the disaggregated results.
|
|
50
|
+
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(self) -> None:
|
|
54
|
+
"""Initialize the Aggregator with default state for aggregation tracking and data storage."""
|
|
55
|
+
self._is_last_call_aggregate = None
|
|
56
|
+
self._original_data: dict[str, Component | TimeVector | Curve | Expr] | None = None
|
|
57
|
+
self._aggregation_map: dict[str, set[str]] = None
|
|
58
|
+
|
|
59
|
+
def aggregate(self, model: Model) -> None:
|
|
60
|
+
"""Aggregate model. Keep original data in case disaggregate is called."""
|
|
61
|
+
self._check_type(model, Model)
|
|
62
|
+
|
|
63
|
+
if self._is_last_call_aggregate is True:
|
|
64
|
+
message = "Will overwrite existing aggregation."
|
|
65
|
+
self.send_warning_event(message)
|
|
66
|
+
|
|
67
|
+
self._original_data = deepcopy(model.get_data())
|
|
68
|
+
self._aggregate(model)
|
|
69
|
+
self._is_last_call_aggregate = True
|
|
70
|
+
if self in model._aggregators: # noqa: SLF001
|
|
71
|
+
message = f"{model} has already been aggregated with {self}. Cannot perform the same Aggregation more than once on a Model object."
|
|
72
|
+
raise ValueError(message)
|
|
73
|
+
|
|
74
|
+
# transfer_unambigous_memberships to aggregated components to support further aggregation
|
|
75
|
+
mapping = self.get_aggregation_map()
|
|
76
|
+
reversed_mapping = defaultdict(set)
|
|
77
|
+
new_data = model.get_data()
|
|
78
|
+
for member_id, group_ids in mapping.items():
|
|
79
|
+
self._check_type(group_ids, set)
|
|
80
|
+
for group_id in group_ids:
|
|
81
|
+
self._check_type(group_id, str)
|
|
82
|
+
member_component = self._original_data[member_id]
|
|
83
|
+
group_component = new_data[group_id]
|
|
84
|
+
reversed_mapping[group_component].add(member_component)
|
|
85
|
+
for group_component, member_components in reversed_mapping.items():
|
|
86
|
+
transfer_unambigous_memberships(group_component, member_components)
|
|
87
|
+
|
|
88
|
+
model._aggregators.append(deepcopy(self)) # noqa: SLF001
|
|
89
|
+
|
|
90
|
+
def disaggregate(self, model: Model) -> None:
|
|
91
|
+
"""Disaggregate model back to pre-aggregate form. Move results into the disaggregated objects."""
|
|
92
|
+
self._check_type(model, Model)
|
|
93
|
+
self._check_is_aggregated()
|
|
94
|
+
self._disaggregate(model, self._original_data)
|
|
95
|
+
self._is_last_call_aggregate = False
|
|
96
|
+
self._original_data = None
|
|
97
|
+
self._aggregation_map = None
|
|
98
|
+
|
|
99
|
+
def get_aggregation_map(self) -> dict[str, set[str]]:
|
|
100
|
+
"""
|
|
101
|
+
Return dictionary mapping from disaggregated to aggregated Component IDs.
|
|
102
|
+
|
|
103
|
+
The mapping should tell you which of the original Components were aggregated into which new Components.
|
|
104
|
+
Components which are left as is should not be in the mapping.
|
|
105
|
+
Components which are deleted without being aggregated are mapped to an empty set.
|
|
106
|
+
"""
|
|
107
|
+
if self._aggregation_map is None:
|
|
108
|
+
message = f"{self} has not yet performed an aggregation or the aggregation map was not created during aggregation."
|
|
109
|
+
raise ValueError(message)
|
|
110
|
+
return self._aggregation_map
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def _aggregate(self, model: Model) -> None:
|
|
114
|
+
"""Modify model inplace. Replace components with aggregated components according to some method."""
|
|
115
|
+
pass
|
|
116
|
+
|
|
117
|
+
@abstractmethod
|
|
118
|
+
def _disaggregate(
|
|
119
|
+
self,
|
|
120
|
+
model: Model,
|
|
121
|
+
original_data: dict[str, Component | TimeVector | Curve | Expr],
|
|
122
|
+
) -> None:
|
|
123
|
+
"""
|
|
124
|
+
Modify model inplace. Restore from aggregated to original components.
|
|
125
|
+
|
|
126
|
+
Transfer any results from aggregated components to restored (disaggregated) components.
|
|
127
|
+
|
|
128
|
+
Implementers should document and handle changes in model instance between aggregation and disaggregation.
|
|
129
|
+
E.g. what to do if an aggregated component has been deleted prior to disaggregate call.
|
|
130
|
+
"""
|
|
131
|
+
pass
|
|
132
|
+
|
|
133
|
+
def _check_is_aggregated(self) -> None:
|
|
134
|
+
if self._is_last_call_aggregate in [False, None]:
|
|
135
|
+
message = "Not aggregated. Must call aggregate and disaggregate in pairs."
|
|
136
|
+
raise RuntimeError(message)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def transfer_unambigous_memberships(group_component: Component, member_components: Iterable[Component]) -> None:
|
|
140
|
+
"""
|
|
141
|
+
Transfer unambiguous membership metadata from member components to a group component.
|
|
142
|
+
|
|
143
|
+
Parameters
|
|
144
|
+
----------
|
|
145
|
+
group_component : Component
|
|
146
|
+
The component to which unambiguous membership metadata will be transferred.
|
|
147
|
+
member_components : Iterable[Component]
|
|
148
|
+
The components from which membership metadata is collected.
|
|
149
|
+
|
|
150
|
+
Notes
|
|
151
|
+
-----
|
|
152
|
+
Only metadata keys with a single unique Member value among all member components are transferred.
|
|
153
|
+
Existing metadata on the group component is not overwritten.
|
|
154
|
+
|
|
155
|
+
"""
|
|
156
|
+
d = defaultdict(set)
|
|
157
|
+
for member in member_components:
|
|
158
|
+
for key in member.get_meta_keys():
|
|
159
|
+
value = member.get_meta(key)
|
|
160
|
+
if not isinstance(value, Member):
|
|
161
|
+
continue
|
|
162
|
+
d[key].add(value)
|
|
163
|
+
for key, value_set in d.items():
|
|
164
|
+
test_value = group_component.get_meta(key)
|
|
165
|
+
if test_value is not None:
|
|
166
|
+
# don't overwrite if already set
|
|
167
|
+
continue
|
|
168
|
+
if len(value_set) != 1:
|
|
169
|
+
# ambigous membership
|
|
170
|
+
continue
|
|
171
|
+
value = next(iter(value_set))
|
|
172
|
+
group_component.add_meta(key, value)
|