openenergyid 0.1.9__py2.py3-none-any.whl → 0.1.12__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of openenergyid might be problematic. Click here for more details.
- openenergyid/__init__.py +3 -3
- openenergyid/const.py +5 -0
- openenergyid/dyntar/__init__.py +17 -0
- openenergyid/dyntar/const.py +18 -0
- openenergyid/dyntar/main.py +169 -0
- openenergyid/dyntar/models.py +66 -0
- openenergyid/energysharing/__init__.py +6 -0
- openenergyid/energysharing/const.py +8 -0
- openenergyid/energysharing/data_formatting.py +73 -0
- openenergyid/energysharing/main.py +110 -0
- openenergyid/energysharing/models.py +55 -0
- openenergyid/models.py +83 -31
- openenergyid/mvlr/helpers.py +1 -1
- openenergyid/mvlr/models.py +4 -4
- {openenergyid-0.1.9.dist-info → openenergyid-0.1.12.dist-info}/METADATA +2 -2
- openenergyid-0.1.12.dist-info/RECORD +22 -0
- {openenergyid-0.1.9.dist-info → openenergyid-0.1.12.dist-info}/WHEEL +1 -1
- openenergyid-0.1.9.dist-info/RECORD +0 -13
- {openenergyid-0.1.9.dist-info → openenergyid-0.1.12.dist-info}/licenses/LICENSE +0 -0
openenergyid/__init__.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
"""Open Energy ID Python SDK."""
|
|
2
2
|
|
|
3
|
-
__version__ = "0.1.
|
|
3
|
+
__version__ = "0.1.12"
|
|
4
4
|
|
|
5
5
|
from .enums import Granularity
|
|
6
|
-
from .models import TimeSeries
|
|
6
|
+
from .models import TimeDataFrame, TimeSeries
|
|
7
7
|
|
|
8
|
-
__all__ = ["Granularity", "TimeSeries"]
|
|
8
|
+
__all__ = ["Granularity", "TimeDataFrame", "TimeSeries"]
|
openenergyid/const.py
CHANGED
|
@@ -11,3 +11,8 @@ ELECTRICITY_PRODUCED: Literal["electricity_produced"] = "electricity_produced"
|
|
|
11
11
|
PRICE_DAY_AHEAD: Literal["price_day_ahead"] = "price_day_ahead"
|
|
12
12
|
PRICE_IMBALANCE_UPWARD: Literal["price_imbalance_upward"] = "price_imbalance_upward"
|
|
13
13
|
PRICE_IMBALANCE_DOWNWARD: Literal["price_imbalance_downward"] = "price_imbalance_downward"
|
|
14
|
+
PRICE_ELECTRICITY_DELIVERED: Literal["price_electricity_delivered"] = "price_electricity_delivered"
|
|
15
|
+
PRICE_ELECTRICITY_EXPORTED: Literal["price_electricity_exported"] = "price_electricity_exported"
|
|
16
|
+
|
|
17
|
+
RLP: Literal["RLP"] = "RLP"
|
|
18
|
+
SPP: Literal["SPP"] = "SPP"
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""Dynamic Tariff Analysis module."""
|
|
2
|
+
|
|
3
|
+
from .main import calculate_dyntar_columns
|
|
4
|
+
from .models import (
|
|
5
|
+
DynamicTariffAnalysisInput,
|
|
6
|
+
DynamicTariffAnalysisOutput,
|
|
7
|
+
OutputColumns,
|
|
8
|
+
RequiredColumns,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"calculate_dyntar_columns",
|
|
13
|
+
"DynamicTariffAnalysisInput",
|
|
14
|
+
"DynamicTariffAnalysisOutput",
|
|
15
|
+
"OutputColumns",
|
|
16
|
+
"RequiredColumns",
|
|
17
|
+
]
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Constants for the dyntar analysis."""
|
|
2
|
+
|
|
3
|
+
ELECTRICITY_DELIVERED_SMR3 = "electricity_delivered_smr3"
|
|
4
|
+
ELECTRICITY_EXPORTED_SMR3 = "electricity_exported_smr3"
|
|
5
|
+
ELECTRICITY_DELIVERED_SMR2 = "electricity_delivered_smr2"
|
|
6
|
+
ELECTRICITY_EXPORTED_SMR2 = "electricity_exported_smr2"
|
|
7
|
+
|
|
8
|
+
COST_ELECTRICITY_DELIVERED_SMR2 = "cost_electricity_delivered_smr2"
|
|
9
|
+
COST_ELECTRICITY_EXPORTED_SMR2 = "cost_electricity_exported_smr2"
|
|
10
|
+
COST_ELECTRICITY_DELIVERED_SMR3 = "cost_electricity_delivered_smr3"
|
|
11
|
+
COST_ELECTRICITY_EXPORTED_SMR3 = "cost_electricity_exported_smr3"
|
|
12
|
+
|
|
13
|
+
RLP_WEIGHTED_PRICE_DELIVERED = "rlp_weighted_price_delivered"
|
|
14
|
+
SPP_WEIGHTED_PRICE_EXPORTED = "spp_weighted_price_exported"
|
|
15
|
+
|
|
16
|
+
HEATMAP_DELIVERED = "heatmap_delivered"
|
|
17
|
+
HEATMAP_EXPORTED = "heatmap_exported"
|
|
18
|
+
HEATMAP_TOTAL = "heatmap_total"
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""Main module of the DynTar package."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
import pandas as pd
|
|
5
|
+
|
|
6
|
+
from openenergyid.const import (
|
|
7
|
+
ELECTRICITY_DELIVERED,
|
|
8
|
+
ELECTRICITY_EXPORTED,
|
|
9
|
+
PRICE_ELECTRICITY_DELIVERED,
|
|
10
|
+
PRICE_ELECTRICITY_EXPORTED,
|
|
11
|
+
RLP,
|
|
12
|
+
SPP,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from .const import (
|
|
16
|
+
ELECTRICITY_DELIVERED_SMR3,
|
|
17
|
+
ELECTRICITY_EXPORTED_SMR3,
|
|
18
|
+
ELECTRICITY_DELIVERED_SMR2,
|
|
19
|
+
ELECTRICITY_EXPORTED_SMR2,
|
|
20
|
+
COST_ELECTRICITY_DELIVERED_SMR2,
|
|
21
|
+
COST_ELECTRICITY_EXPORTED_SMR2,
|
|
22
|
+
COST_ELECTRICITY_DELIVERED_SMR3,
|
|
23
|
+
COST_ELECTRICITY_EXPORTED_SMR3,
|
|
24
|
+
RLP_WEIGHTED_PRICE_DELIVERED,
|
|
25
|
+
SPP_WEIGHTED_PRICE_EXPORTED,
|
|
26
|
+
HEATMAP_DELIVERED,
|
|
27
|
+
HEATMAP_EXPORTED,
|
|
28
|
+
HEATMAP_TOTAL,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def weigh_by_monthly_profile(series: pd.Series, profile: pd.Series) -> pd.Series:
|
|
33
|
+
"""Weigh a time series by a monthly profile."""
|
|
34
|
+
df = pd.DataFrame({"series": series, "profile": profile})
|
|
35
|
+
results = []
|
|
36
|
+
for _, frame in df.groupby(pd.Grouper(freq="MS")):
|
|
37
|
+
frame = frame.copy()
|
|
38
|
+
frame["weighted"] = frame["series"].sum() * (frame["profile"] / frame["profile"].sum())
|
|
39
|
+
results.append(frame)
|
|
40
|
+
return pd.concat(results)["weighted"]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def extend_dataframe_with_smr2(df: pd.DataFrame, inplace: bool = False) -> Optional[pd.DataFrame]:
|
|
44
|
+
"""Extend a DataFrame with the SMR2 columns."""
|
|
45
|
+
if not inplace:
|
|
46
|
+
result_df = df.copy()
|
|
47
|
+
else:
|
|
48
|
+
result_df = df
|
|
49
|
+
|
|
50
|
+
result_df[ELECTRICITY_DELIVERED_SMR2] = weigh_by_monthly_profile(
|
|
51
|
+
df[ELECTRICITY_DELIVERED], df[RLP]
|
|
52
|
+
)
|
|
53
|
+
result_df[ELECTRICITY_EXPORTED_SMR2] = weigh_by_monthly_profile(
|
|
54
|
+
df[ELECTRICITY_EXPORTED], df[SPP]
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
result_df.rename(
|
|
58
|
+
columns={
|
|
59
|
+
ELECTRICITY_DELIVERED: ELECTRICITY_DELIVERED_SMR3,
|
|
60
|
+
ELECTRICITY_EXPORTED: ELECTRICITY_EXPORTED_SMR3,
|
|
61
|
+
},
|
|
62
|
+
inplace=True,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if not inplace:
|
|
66
|
+
return result_df
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def extend_dataframe_with_costs(df: pd.DataFrame, inplace: bool = False) -> Optional[pd.DataFrame]:
|
|
71
|
+
"""Extend a DataFrame with the cost columns."""
|
|
72
|
+
if not inplace:
|
|
73
|
+
result_df = df.copy()
|
|
74
|
+
else:
|
|
75
|
+
result_df = df
|
|
76
|
+
|
|
77
|
+
result_df[COST_ELECTRICITY_DELIVERED_SMR2] = (
|
|
78
|
+
df[ELECTRICITY_DELIVERED_SMR2] * df[PRICE_ELECTRICITY_DELIVERED]
|
|
79
|
+
)
|
|
80
|
+
result_df[COST_ELECTRICITY_EXPORTED_SMR2] = (
|
|
81
|
+
df[ELECTRICITY_EXPORTED_SMR2] * df[PRICE_ELECTRICITY_EXPORTED]
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
result_df[COST_ELECTRICITY_DELIVERED_SMR3] = (
|
|
85
|
+
df[ELECTRICITY_DELIVERED_SMR3] * df[PRICE_ELECTRICITY_DELIVERED]
|
|
86
|
+
)
|
|
87
|
+
result_df[COST_ELECTRICITY_EXPORTED_SMR3] = (
|
|
88
|
+
df[ELECTRICITY_EXPORTED_SMR3] * df[PRICE_ELECTRICITY_EXPORTED]
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
if not inplace:
|
|
92
|
+
return result_df
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def extend_dataframe_with_weighted_prices(
|
|
97
|
+
df: pd.DataFrame, inplace: bool = False
|
|
98
|
+
) -> Optional[pd.DataFrame]:
|
|
99
|
+
"""Extend a DataFrame with the weighted price columns."""
|
|
100
|
+
if not inplace:
|
|
101
|
+
df = df.copy()
|
|
102
|
+
|
|
103
|
+
rlp_weighted_price_delivered = (df[PRICE_ELECTRICITY_DELIVERED] * df[RLP]).resample(
|
|
104
|
+
"MS"
|
|
105
|
+
).sum() / df[RLP].resample("MS").sum()
|
|
106
|
+
df[RLP_WEIGHTED_PRICE_DELIVERED] = rlp_weighted_price_delivered.reindex_like(
|
|
107
|
+
df[RLP], method="ffill"
|
|
108
|
+
)
|
|
109
|
+
spp_weighted_price_exported = (df[PRICE_ELECTRICITY_EXPORTED] * df[SPP]).resample(
|
|
110
|
+
"MS"
|
|
111
|
+
).sum() / df[SPP].resample("MS").sum()
|
|
112
|
+
df[SPP_WEIGHTED_PRICE_EXPORTED] = spp_weighted_price_exported.reindex_like(
|
|
113
|
+
df[SPP], method="ffill"
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if not inplace:
|
|
117
|
+
return df
|
|
118
|
+
return None
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def extend_dataframe_with_heatmap(
|
|
122
|
+
df: pd.DataFrame, inplace: bool = False
|
|
123
|
+
) -> Optional[pd.DataFrame]:
|
|
124
|
+
"""Extend a DataFrame with the heatmap columns."""
|
|
125
|
+
if not inplace:
|
|
126
|
+
df = df.copy()
|
|
127
|
+
|
|
128
|
+
heatmap_score_delivered = (
|
|
129
|
+
(df[ELECTRICITY_DELIVERED_SMR2] - df[ELECTRICITY_DELIVERED_SMR3])
|
|
130
|
+
/ df[ELECTRICITY_DELIVERED_SMR2]
|
|
131
|
+
* (df[RLP_WEIGHTED_PRICE_DELIVERED] - df[PRICE_ELECTRICITY_DELIVERED])
|
|
132
|
+
/ df[RLP_WEIGHTED_PRICE_DELIVERED]
|
|
133
|
+
)
|
|
134
|
+
heatmap_score_exported = (
|
|
135
|
+
(df[ELECTRICITY_EXPORTED_SMR2] - df[ELECTRICITY_EXPORTED_SMR3])
|
|
136
|
+
/ df[ELECTRICITY_EXPORTED_SMR2]
|
|
137
|
+
* (df[SPP_WEIGHTED_PRICE_EXPORTED] - df[PRICE_ELECTRICITY_EXPORTED])
|
|
138
|
+
/ df[SPP_WEIGHTED_PRICE_EXPORTED]
|
|
139
|
+
)
|
|
140
|
+
heatmap_score_delivered.fillna(0, inplace=True)
|
|
141
|
+
heatmap_score_exported.fillna(0, inplace=True)
|
|
142
|
+
|
|
143
|
+
# Invert scores so that positive values indicate a positive impact
|
|
144
|
+
heatmap_score_delivered = -heatmap_score_delivered
|
|
145
|
+
heatmap_score_exported = -heatmap_score_exported
|
|
146
|
+
heatmap_score_combined = heatmap_score_delivered - heatmap_score_exported
|
|
147
|
+
|
|
148
|
+
df[HEATMAP_DELIVERED] = heatmap_score_delivered
|
|
149
|
+
df[HEATMAP_EXPORTED] = heatmap_score_exported
|
|
150
|
+
df[HEATMAP_TOTAL] = heatmap_score_combined
|
|
151
|
+
|
|
152
|
+
if not inplace:
|
|
153
|
+
return df
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def calculate_dyntar_columns(df: pd.DataFrame, inplace: bool = False) -> Optional[pd.DataFrame]:
|
|
158
|
+
"""Calculate all columns required for the dynamic tariff analysis."""
|
|
159
|
+
if not inplace:
|
|
160
|
+
df = df.copy()
|
|
161
|
+
|
|
162
|
+
extend_dataframe_with_smr2(df, inplace=True)
|
|
163
|
+
extend_dataframe_with_costs(df, inplace=True)
|
|
164
|
+
extend_dataframe_with_weighted_prices(df, inplace=True)
|
|
165
|
+
extend_dataframe_with_heatmap(df, inplace=True)
|
|
166
|
+
|
|
167
|
+
if not inplace:
|
|
168
|
+
return df
|
|
169
|
+
return None
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Models for dynamic tariff analysis."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal
|
|
4
|
+
from pydantic import Field, conlist
|
|
5
|
+
|
|
6
|
+
from openenergyid.models import TimeDataFrame
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
RequiredColumns = Literal[
|
|
10
|
+
"electricity_delivered",
|
|
11
|
+
"electricity_exported",
|
|
12
|
+
"price_electricity_delivered",
|
|
13
|
+
"price_electricity_exported",
|
|
14
|
+
"RLP",
|
|
15
|
+
"SPP",
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
OutputColumns = Literal[
|
|
19
|
+
"electricity_delivered_smr3",
|
|
20
|
+
"electricity_exported_smr3",
|
|
21
|
+
"price_electricity_delivered",
|
|
22
|
+
"price_electricity_exported",
|
|
23
|
+
"RLP",
|
|
24
|
+
"SPP",
|
|
25
|
+
"electricity_delivered_smr2",
|
|
26
|
+
"electricity_exported_smr2",
|
|
27
|
+
"cost_electricity_delivered_smr2",
|
|
28
|
+
"cost_electricity_exported_smr2",
|
|
29
|
+
"cost_electricity_delivered_smr3",
|
|
30
|
+
"cost_electricity_exported_smr3",
|
|
31
|
+
"rlp_weighted_price_delivered",
|
|
32
|
+
"spp_weighted_price_exported",
|
|
33
|
+
"heatmap_delivered",
|
|
34
|
+
"heatmap_exported",
|
|
35
|
+
"heatmap_total",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class DynamicTariffAnalysisInput(TimeDataFrame):
|
|
40
|
+
"""Input frame for dynamic tariff analysis."""
|
|
41
|
+
|
|
42
|
+
columns: list[RequiredColumns] = Field(
|
|
43
|
+
min_length=len(RequiredColumns.__args__),
|
|
44
|
+
max_length=len(RequiredColumns.__args__),
|
|
45
|
+
examples=[RequiredColumns.__args__],
|
|
46
|
+
)
|
|
47
|
+
data: list[
|
|
48
|
+
conlist(
|
|
49
|
+
item_type=float,
|
|
50
|
+
min_length=len(RequiredColumns.__args__),
|
|
51
|
+
max_length=len(RequiredColumns.__args__),
|
|
52
|
+
) # type: ignore
|
|
53
|
+
] = Field(examples=[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]])
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class DynamicTariffAnalysisOutput(TimeDataFrame):
|
|
57
|
+
"""Output frame for dynamic tariff analysis."""
|
|
58
|
+
|
|
59
|
+
columns: list[OutputColumns] = Field(
|
|
60
|
+
min_length=1,
|
|
61
|
+
max_length=len(OutputColumns.__args__),
|
|
62
|
+
examples=[OutputColumns.__args__],
|
|
63
|
+
)
|
|
64
|
+
data: list[
|
|
65
|
+
conlist(item_type=float, min_length=1, max_length=len(OutputColumns.__args__)) # type: ignore
|
|
66
|
+
] = Field(examples=[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]])
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""Functions to create multi-indexed DataFrames for input and output data for energy sharing."""
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from .const import GROSS_INJECTION, GROSS_OFFTAKE, KEY, NET_INJECTION, NET_OFFTAKE, SHARED_ENERGY
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def create_multi_index_input_frame(
|
|
8
|
+
gross_injection: pd.DataFrame,
|
|
9
|
+
gross_offtake: pd.DataFrame,
|
|
10
|
+
key: pd.DataFrame,
|
|
11
|
+
) -> pd.DataFrame:
|
|
12
|
+
"""Create a multi-indexed DataFrame with the input data for energy sharing."""
|
|
13
|
+
gross_injection = gross_injection.copy()
|
|
14
|
+
gross_offtake = gross_offtake.copy()
|
|
15
|
+
key = key.copy()
|
|
16
|
+
|
|
17
|
+
gross_injection.columns = pd.MultiIndex.from_product(
|
|
18
|
+
[[GROSS_INJECTION], gross_injection.columns]
|
|
19
|
+
)
|
|
20
|
+
gross_offtake.columns = pd.MultiIndex.from_product([[GROSS_OFFTAKE], gross_offtake.columns])
|
|
21
|
+
key.columns = pd.MultiIndex.from_product([[KEY], key.columns])
|
|
22
|
+
|
|
23
|
+
df = pd.concat([gross_injection, gross_offtake, key], axis=1)
|
|
24
|
+
|
|
25
|
+
# Do a check that the sum of the keys per timestamp is 1
|
|
26
|
+
assert df[KEY].dropna(how="all").sum(axis=1).round(2).eq(1).all()
|
|
27
|
+
|
|
28
|
+
return df
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def create_multi_index_output_frame(
|
|
32
|
+
net_injection: pd.DataFrame,
|
|
33
|
+
net_offtake: pd.DataFrame,
|
|
34
|
+
shared_energy: pd.DataFrame,
|
|
35
|
+
) -> pd.DataFrame:
|
|
36
|
+
"""Create a multi-indexed DataFrame with the output data for energy sharing."""
|
|
37
|
+
net_injection = net_injection.copy()
|
|
38
|
+
net_offtake = net_offtake.copy()
|
|
39
|
+
shared_energy = shared_energy.copy()
|
|
40
|
+
|
|
41
|
+
net_injection.columns = pd.MultiIndex.from_product([[NET_INJECTION], net_injection.columns])
|
|
42
|
+
net_offtake.columns = pd.MultiIndex.from_product([[NET_OFFTAKE], net_offtake.columns])
|
|
43
|
+
shared_energy.columns = pd.MultiIndex.from_product([[SHARED_ENERGY], shared_energy.columns])
|
|
44
|
+
|
|
45
|
+
df = pd.concat([net_injection, net_offtake, shared_energy], axis=1)
|
|
46
|
+
|
|
47
|
+
df = df.round(2)
|
|
48
|
+
return df
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def result_to_input_for_reiteration(result: pd.DataFrame, key: pd.DataFrame) -> pd.DataFrame:
|
|
52
|
+
"""Create a multi-indexed DataFrame with the input data for energy sharing after the first iteration."""
|
|
53
|
+
# We iterate again. The net injection of the previous result is taken as gross injection input
|
|
54
|
+
# And the net offtake is taken as the gross offtake input
|
|
55
|
+
# When a user's net offtake is 0, the key is set to 0; and the keys are re-normalized
|
|
56
|
+
|
|
57
|
+
gross_injection = result[NET_INJECTION].copy()
|
|
58
|
+
gross_offtake = result[NET_OFFTAKE].copy()
|
|
59
|
+
|
|
60
|
+
# Take the original key, but replace the value with 0.0 if result[NET_OFFTAKE] is 0.0
|
|
61
|
+
|
|
62
|
+
key = key.copy()
|
|
63
|
+
key = key.where(~result[NET_OFFTAKE].eq(0), 0)
|
|
64
|
+
|
|
65
|
+
# Re-normalize the keys
|
|
66
|
+
|
|
67
|
+
key = key.div(key.sum(axis=1), axis=0)
|
|
68
|
+
|
|
69
|
+
df = create_multi_index_input_frame(
|
|
70
|
+
gross_injection=gross_injection, gross_offtake=gross_offtake, key=key
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return df
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Main Calcuation Module for Energy Sharing."""
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from .models import CalculationMethod
|
|
5
|
+
from .const import GROSS_INJECTION, GROSS_OFFTAKE, KEY, NET_INJECTION, NET_OFFTAKE, SHARED_ENERGY
|
|
6
|
+
from .data_formatting import create_multi_index_output_frame, result_to_input_for_reiteration
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _calculate(df: pd.DataFrame, method: CalculationMethod) -> pd.DataFrame:
|
|
10
|
+
"""Calculate the energy sharing for the given input data. This function is not iterative."""
|
|
11
|
+
# Step 1: Calculate the maximum available gross injection that can be shared
|
|
12
|
+
# A participant cannot share their injection with themselves
|
|
13
|
+
|
|
14
|
+
# Take the injection of P1, and divide it per participant as per their key
|
|
15
|
+
|
|
16
|
+
injections_to_share = []
|
|
17
|
+
rest = {}
|
|
18
|
+
|
|
19
|
+
for participant in df.columns.levels[1]:
|
|
20
|
+
injection_to_share = df[GROSS_INJECTION][participant].copy()
|
|
21
|
+
|
|
22
|
+
if method == CalculationMethod.RELATIVE or method == CalculationMethod.OPTIMAL:
|
|
23
|
+
# Set the key of the current participant to 0
|
|
24
|
+
# Re-normalize the keys for the other participants
|
|
25
|
+
key = df[KEY].copy()
|
|
26
|
+
key.loc[:, participant] = 0
|
|
27
|
+
key = key.div(key.sum(axis=1), axis=0)
|
|
28
|
+
elif method == CalculationMethod.FIXED:
|
|
29
|
+
key = df[KEY].copy()
|
|
30
|
+
|
|
31
|
+
# Multiply injection_to_share with the key of each participant
|
|
32
|
+
shared_by_participant = (injection_to_share * key.T).T
|
|
33
|
+
shared_by_participant.fillna(0, inplace=True)
|
|
34
|
+
# Set the value for the current participant to 0
|
|
35
|
+
shared_by_participant.loc[:, participant] = 0
|
|
36
|
+
|
|
37
|
+
# Put the not shared injection in the rest
|
|
38
|
+
rest[participant] = injection_to_share - shared_by_participant.sum(axis=1)
|
|
39
|
+
|
|
40
|
+
injections_to_share.append(shared_by_participant)
|
|
41
|
+
|
|
42
|
+
# Sum the injections to share
|
|
43
|
+
max_allocated_injection = sum(injections_to_share)
|
|
44
|
+
|
|
45
|
+
# Concat the rest
|
|
46
|
+
injection_that_cannot_be_shared = pd.concat(rest, axis=1)
|
|
47
|
+
|
|
48
|
+
# Step 2: Calculate the Net Offtake, by assigning the injections to each participant
|
|
49
|
+
# But, a participant cannot receive more than their offtake
|
|
50
|
+
|
|
51
|
+
net_offtake = df[GROSS_OFFTAKE] - max_allocated_injection
|
|
52
|
+
|
|
53
|
+
# Sum all negative values into a column "Not Shared"
|
|
54
|
+
not_shared_after_assignment = net_offtake.clip(upper=0).sum(axis=1) * -1
|
|
55
|
+
|
|
56
|
+
# Clip the values to 0
|
|
57
|
+
net_offtake = net_offtake.clip(lower=0)
|
|
58
|
+
|
|
59
|
+
# Calculate the amount of actual shared energy
|
|
60
|
+
# This is the difference between the gross offtake and the net offtake
|
|
61
|
+
shared_energy = df[GROSS_OFFTAKE] - net_offtake
|
|
62
|
+
|
|
63
|
+
# Step 3: Assign the Rests back to the original injectors
|
|
64
|
+
|
|
65
|
+
# The energy that is not shared after assignment
|
|
66
|
+
# should be divided back to the original injectors
|
|
67
|
+
# A ratio of the original injection should be used
|
|
68
|
+
|
|
69
|
+
re_distributed_not_shared = (
|
|
70
|
+
(df[GROSS_INJECTION].T / df[GROSS_INJECTION].sum(axis=1)) * not_shared_after_assignment
|
|
71
|
+
).T
|
|
72
|
+
re_distributed_not_shared.fillna(0, inplace=True)
|
|
73
|
+
|
|
74
|
+
# The nett injection is the sum of:
|
|
75
|
+
# the injection that cannot be shared to begin with
|
|
76
|
+
# (because participants cannot share with themselves)
|
|
77
|
+
# and the injection that cannot be shared after assignment
|
|
78
|
+
# (because participants cannot receive more than their offtake)
|
|
79
|
+
|
|
80
|
+
net_injection = injection_that_cannot_be_shared + re_distributed_not_shared
|
|
81
|
+
|
|
82
|
+
result = create_multi_index_output_frame(
|
|
83
|
+
net_injection=net_injection, net_offtake=net_offtake, shared_energy=shared_energy
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
return result
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def calculate(df: pd.DataFrame, method: CalculationMethod) -> pd.DataFrame:
|
|
90
|
+
"""Calculate the energy sharing for the given input data.
|
|
91
|
+
|
|
92
|
+
This function is iterative if the method is optimal."""
|
|
93
|
+
result = _calculate(df, method)
|
|
94
|
+
|
|
95
|
+
if method in [CalculationMethod.FIXED, CalculationMethod.RELATIVE]:
|
|
96
|
+
return result
|
|
97
|
+
|
|
98
|
+
# Optimal method, we iterate until the amount of shared energy is 0
|
|
99
|
+
final_result = result.copy()
|
|
100
|
+
while not result[SHARED_ENERGY].eq(0).all().all():
|
|
101
|
+
df = result_to_input_for_reiteration(result, df[KEY])
|
|
102
|
+
result = _calculate(df, method)
|
|
103
|
+
|
|
104
|
+
# Add the result to the final result
|
|
105
|
+
# Overwrite NET_INJECTION and NET_OFFTAKE, Sum SHARED_ENERGY
|
|
106
|
+
final_result[NET_INJECTION] = result[NET_INJECTION]
|
|
107
|
+
final_result[NET_OFFTAKE] = result[NET_OFFTAKE]
|
|
108
|
+
final_result[SHARED_ENERGY] += result[SHARED_ENERGY]
|
|
109
|
+
|
|
110
|
+
return final_result
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""Data models for energy sharing."""
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Annotated
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
from openenergyid import TimeDataFrame
|
|
10
|
+
from .data_formatting import create_multi_index_input_frame
|
|
11
|
+
from .const import NET_INJECTION, NET_OFFTAKE, SHARED_ENERGY
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CalculationMethod(Enum):
|
|
15
|
+
"""Calculation method for energy sharing."""
|
|
16
|
+
|
|
17
|
+
FIXED = "Fixed"
|
|
18
|
+
RELATIVE = "Relative"
|
|
19
|
+
OPTIMAL = "Optimal"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class EnergySharingInput(BaseModel):
|
|
23
|
+
"""Input data for energy sharing."""
|
|
24
|
+
|
|
25
|
+
gross_injection: Annotated[TimeDataFrame, Field(alias="grossInjection")]
|
|
26
|
+
gross_offtake: Annotated[TimeDataFrame, Field(alias="grossOfftake")]
|
|
27
|
+
key: Annotated[TimeDataFrame, Field(alias="key")]
|
|
28
|
+
timezone: str = Field(alias="timeZone", default="Europe/Brussels")
|
|
29
|
+
|
|
30
|
+
def data_frame(self) -> pd.DataFrame:
|
|
31
|
+
"""Return the data as a combined DataFrame"""
|
|
32
|
+
df = create_multi_index_input_frame(
|
|
33
|
+
gross_injection=self.gross_injection.to_pandas(),
|
|
34
|
+
gross_offtake=self.gross_offtake.to_pandas(),
|
|
35
|
+
key=self.key.to_pandas(),
|
|
36
|
+
)
|
|
37
|
+
df = df.tz_convert(self.timezone)
|
|
38
|
+
return df
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class EnergySharingOutput(BaseModel):
|
|
42
|
+
"""Output data for energy sharing."""
|
|
43
|
+
|
|
44
|
+
net_injection: TimeDataFrame = Field(alias="netInjection")
|
|
45
|
+
net_offtake: TimeDataFrame = Field(alias="netOfftake")
|
|
46
|
+
shared_energy: TimeDataFrame = Field(alias="sharedEnergy")
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def from_calculation_result(cls, result: pd.DataFrame) -> "EnergySharingOutput":
|
|
50
|
+
"""Create an output model from a calculation result."""
|
|
51
|
+
return cls.model_construct(
|
|
52
|
+
net_injection=TimeDataFrame.from_pandas(result[NET_INJECTION]),
|
|
53
|
+
net_offtake=TimeDataFrame.from_pandas(result[NET_OFFTAKE]),
|
|
54
|
+
shared_energy=TimeDataFrame.from_pandas(result[SHARED_ENERGY]),
|
|
55
|
+
)
|
openenergyid/models.py
CHANGED
|
@@ -1,67 +1,119 @@
|
|
|
1
1
|
"""Data models for the Open Energy ID."""
|
|
2
2
|
|
|
3
3
|
import datetime as dt
|
|
4
|
-
from typing import Optional, overload
|
|
4
|
+
from typing import Optional, overload, Union
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
from typing import Self
|
|
8
|
+
except ImportError:
|
|
9
|
+
from typing_extensions import Self
|
|
5
10
|
|
|
6
11
|
import pandas as pd
|
|
7
12
|
from pydantic import BaseModel
|
|
8
13
|
|
|
9
14
|
|
|
10
|
-
class
|
|
11
|
-
"""
|
|
15
|
+
class TimeSeriesBase(BaseModel):
|
|
16
|
+
"""Pydantic base model for time series data."""
|
|
12
17
|
|
|
13
|
-
columns: list[str]
|
|
14
18
|
index: list[dt.datetime]
|
|
15
|
-
data: list[list[float]]
|
|
16
19
|
|
|
17
20
|
@classmethod
|
|
18
|
-
def from_pandas(cls, data: pd.DataFrame) ->
|
|
19
|
-
"""Create
|
|
20
|
-
|
|
21
|
+
def from_pandas(cls, data: Union[pd.Series, pd.DataFrame]) -> Self:
|
|
22
|
+
"""Create from a Pandas Object."""
|
|
23
|
+
raise NotImplementedError
|
|
21
24
|
|
|
22
|
-
def to_pandas(self, timezone: str = "UTC") -> pd.DataFrame:
|
|
23
|
-
"""Convert
|
|
24
|
-
|
|
25
|
-
frame.index = pd.to_datetime(frame.index, utc=True)
|
|
26
|
-
return frame.tz_convert(timezone)
|
|
25
|
+
def to_pandas(self, timezone: str = "UTC") -> Union[pd.Series, pd.DataFrame]:
|
|
26
|
+
"""Convert to a Pandas Object."""
|
|
27
|
+
raise NotImplementedError
|
|
27
28
|
|
|
28
29
|
@overload
|
|
29
30
|
def to_json(self, path: None = None, **kwargs) -> str:
|
|
30
|
-
|
|
31
|
+
"""Dump to a JSON string."""
|
|
31
32
|
|
|
32
33
|
@overload
|
|
33
34
|
def to_json(self, path: str, **kwargs) -> None:
|
|
34
|
-
|
|
35
|
+
"""Dump to a JSON file."""
|
|
35
36
|
|
|
36
37
|
def to_json(self, path: Optional[str] = None, **kwargs) -> Optional[str]:
|
|
37
|
-
"""
|
|
38
|
+
"""Dump to a JSON string or file."""
|
|
38
39
|
if path is None:
|
|
39
40
|
return self.model_dump_json(**kwargs)
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
41
|
+
encoding = kwargs.pop("encoding", "UTF-8")
|
|
42
|
+
with open(path, "w", encoding=encoding) as file:
|
|
43
|
+
file.write(self.model_dump_json(**kwargs))
|
|
44
|
+
return None
|
|
44
45
|
|
|
45
46
|
@overload
|
|
46
47
|
@classmethod
|
|
47
|
-
def from_json(cls, string: str, **kwargs) ->
|
|
48
|
-
|
|
48
|
+
def from_json(cls, string: str, **kwargs) -> Self:
|
|
49
|
+
"""Load from a JSON string."""
|
|
49
50
|
|
|
50
51
|
@overload
|
|
51
52
|
@classmethod
|
|
52
|
-
def from_json(cls, path: str, **kwargs) ->
|
|
53
|
-
|
|
53
|
+
def from_json(cls, path: str, **kwargs) -> Self:
|
|
54
|
+
"""Load from a JSON file."""
|
|
54
55
|
|
|
55
56
|
@classmethod
|
|
56
|
-
def from_json(
|
|
57
|
-
|
|
58
|
-
) -> "TimeSeries":
|
|
59
|
-
"""Load the TimeSeries from a JSON file or string."""
|
|
57
|
+
def from_json(cls, string: Optional[str] = None, path: Optional[str] = None, **kwargs) -> Self:
|
|
58
|
+
"""Load from a JSON file or string."""
|
|
60
59
|
if string:
|
|
61
60
|
return cls.model_validate_json(string, **kwargs)
|
|
62
|
-
|
|
61
|
+
if path:
|
|
63
62
|
encoding = kwargs.pop("encoding", "UTF-8")
|
|
64
63
|
with open(path, "r", encoding=encoding) as file:
|
|
65
64
|
return cls.model_validate_json(file.read(), **kwargs)
|
|
66
|
-
|
|
67
|
-
|
|
65
|
+
raise ValueError("Either string or path must be provided.")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class TimeSeries(TimeSeriesBase):
|
|
69
|
+
"""Time series data with a single column."""
|
|
70
|
+
|
|
71
|
+
name: Union[str, None] = None
|
|
72
|
+
data: list[float]
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def from_pandas(cls, data: pd.Series) -> Self:
|
|
76
|
+
"""Create from a Pandas Series."""
|
|
77
|
+
return cls.model_construct(name=data.name, data=data.tolist(), index=data.index.tolist())
|
|
78
|
+
|
|
79
|
+
def to_pandas(self, timezone: str = "UTC") -> pd.Series:
|
|
80
|
+
"""Convert to a Pandas Series."""
|
|
81
|
+
series = pd.Series(self.data, name=self.name, index=self.index)
|
|
82
|
+
series.index = pd.to_datetime(series.index, utc=True)
|
|
83
|
+
return series.tz_convert(timezone)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class TimeDataFrame(TimeSeriesBase):
|
|
87
|
+
"""Time series data with multiple columns."""
|
|
88
|
+
|
|
89
|
+
columns: list[str]
|
|
90
|
+
data: list[list[float]]
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def from_pandas(cls, data: pd.DataFrame) -> Self:
|
|
94
|
+
"""Create from a Pandas DataFrame."""
|
|
95
|
+
return cls.model_construct(
|
|
96
|
+
columns=data.columns.tolist(), data=data.values.tolist(), index=data.index.tolist()
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
def to_pandas(self, timezone: str = "UTC") -> pd.DataFrame:
|
|
100
|
+
"""Convert to a Pandas DataFrame."""
|
|
101
|
+
frame = pd.DataFrame(self.data, columns=self.columns, index=self.index)
|
|
102
|
+
frame.index = pd.to_datetime(frame.index, utc=True)
|
|
103
|
+
return frame.tz_convert(timezone)
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def from_timeseries(cls, data: list[TimeSeries]) -> Self:
|
|
107
|
+
"""Create from a list of TimeSeries objects."""
|
|
108
|
+
return cls.model_construct(
|
|
109
|
+
columns=[series.name for series in data],
|
|
110
|
+
data=[series.data for series in data],
|
|
111
|
+
index=data[0].index,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
def to_timeseries(self) -> list[TimeSeries]:
|
|
115
|
+
"""Convert to a list of TimeSeries objects."""
|
|
116
|
+
return [
|
|
117
|
+
TimeSeries(name=column, data=column_data, index=self.index)
|
|
118
|
+
for column, column_data in zip(self.columns, self.data)
|
|
119
|
+
]
|
openenergyid/mvlr/helpers.py
CHANGED
|
@@ -4,7 +4,7 @@ import pandas as pd
|
|
|
4
4
|
|
|
5
5
|
from openenergyid.enums import Granularity
|
|
6
6
|
|
|
7
|
-
pandas_granularity_map = {Granularity.P7D: "W-MON", Granularity.P1M: "MS"}
|
|
7
|
+
pandas_granularity_map = {Granularity.P7D: "W-MON", Granularity.P1M: "MS", Granularity.P1D: "D"}
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
def resample_input_data(
|
openenergyid/mvlr/models.py
CHANGED
|
@@ -6,7 +6,7 @@ from pydantic import BaseModel, Field, ConfigDict
|
|
|
6
6
|
import statsmodels.formula.api as fm
|
|
7
7
|
|
|
8
8
|
from openenergyid.enums import Granularity
|
|
9
|
-
from openenergyid.models import
|
|
9
|
+
from openenergyid.models import TimeDataFrame
|
|
10
10
|
|
|
11
11
|
from .mvlr import MultiVariableLinearRegression
|
|
12
12
|
|
|
@@ -66,7 +66,7 @@ class MultiVariableRegressionInput(BaseModel):
|
|
|
66
66
|
alias="independentVariables", min_length=1
|
|
67
67
|
)
|
|
68
68
|
dependent_variable: str = Field(alias="dependentVariable")
|
|
69
|
-
frame:
|
|
69
|
+
frame: TimeDataFrame
|
|
70
70
|
granularities: list[Granularity]
|
|
71
71
|
allow_negative_predictions: bool = Field(alias="allowNegativePredictions", default=False)
|
|
72
72
|
validation_parameters: ValidationParameters = Field(
|
|
@@ -195,7 +195,7 @@ class MultiVariableRegressionResult(BaseModel):
|
|
|
195
195
|
prob_f_stat: float = Field(ge=0, le=1, alias="probFStat")
|
|
196
196
|
intercept: IndependentVariableResult
|
|
197
197
|
granularity: Granularity
|
|
198
|
-
frame:
|
|
198
|
+
frame: TimeDataFrame
|
|
199
199
|
|
|
200
200
|
model_config = ConfigDict(populate_by_name=True)
|
|
201
201
|
|
|
@@ -225,5 +225,5 @@ class MultiVariableRegressionResult(BaseModel):
|
|
|
225
225
|
prob_f_stat=mvlr.fit.f_pvalue,
|
|
226
226
|
intercept=IndependentVariableResult.from_fit(mvlr.fit, "Intercept"),
|
|
227
227
|
granularity=mvlr.granularity,
|
|
228
|
-
frame=
|
|
228
|
+
frame=TimeDataFrame.from_pandas(frame),
|
|
229
229
|
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: openenergyid
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.12
|
|
4
4
|
Summary: Open Source Python library for energy analytics and simulations
|
|
5
5
|
Project-URL: Homepage, https://energyid.eu
|
|
6
6
|
Project-URL: Repository, https://github.com/EnergieID/OpenEnergyID
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
openenergyid/__init__.py,sha256=5EauuvlNmYacZW0kJXDfcpOSZWOwSlzX640snXiX92E,193
|
|
2
|
+
openenergyid/const.py,sha256=D-xUnUyVuLmphClkePgxpFP6z0RDhw_6m7rX0BHBgrw,823
|
|
3
|
+
openenergyid/enums.py,sha256=jdw4CB1gkisx0re_SesrTEyh_T-UxYp6uieE7iYlHdA,357
|
|
4
|
+
openenergyid/models.py,sha256=ANdbdtr6JccSz6zEZj0TQOXISGPdtJPQy4t17uub97E,3975
|
|
5
|
+
openenergyid/dyntar/__init__.py,sha256=iQXQXrEQOiVNeeF6LRmUf3oOhKlGjMNF7o4T04IWTGA,371
|
|
6
|
+
openenergyid/dyntar/const.py,sha256=K7X6nHIl9DNyC6hU8jLtvOy3-IBGuYC449evOpImuJE,773
|
|
7
|
+
openenergyid/dyntar/main.py,sha256=sn2zN5ep8XBgSGR_UDBU6mdp1WKNLzxROwp8HWCDgCM,5316
|
|
8
|
+
openenergyid/dyntar/models.py,sha256=BbGdHj7eUOMepblWJNsNd21xULluv6m7TtdhSggvVbY,1873
|
|
9
|
+
openenergyid/energysharing/__init__.py,sha256=swGqqgTh6DS2wFhnKs4CtnVMVrQNs6kv1BAzwgo0MQA,229
|
|
10
|
+
openenergyid/energysharing/const.py,sha256=X2zEPtTlsmZ66w6RmLS_h8NmdzObAEi5N6-0yrLN5V4,219
|
|
11
|
+
openenergyid/energysharing/data_formatting.py,sha256=ruAPa_Qbg6PMoUSObnhIYz5y-BeK92WlsOpLIdNXaeg,2736
|
|
12
|
+
openenergyid/energysharing/main.py,sha256=2AWaTysNVLZEFVs8yRmFWx1_qG-NPeAazWXlFLHfLRM,4409
|
|
13
|
+
openenergyid/energysharing/models.py,sha256=Bh7Nj_PFoCIKwdI7k4d_RrJnaiubT3dN2eIn5P4Xam4,1894
|
|
14
|
+
openenergyid/mvlr/__init__.py,sha256=Glrc218oqa8tq_Y2G9LXaSoN4Yba-vsjXUi9r9iPzaY,471
|
|
15
|
+
openenergyid/mvlr/helpers.py,sha256=Uzbfrj3IpH26wA206KOl0hNucKE-n9guJNC_EROBVKA,983
|
|
16
|
+
openenergyid/mvlr/main.py,sha256=cn7jZ98cHn2eh-0zG9q8Pad0Ft_FuI-u3a-eeHeF8jA,1304
|
|
17
|
+
openenergyid/mvlr/models.py,sha256=qi5NWeQC1KeWEAO8KglgVM1xbHM5QoeK25ugn0W_X6c,8628
|
|
18
|
+
openenergyid/mvlr/mvlr.py,sha256=F7WvWnZQtqUmK1vsguemsn9n8pDDk3tQ1weOlv-bo0c,18626
|
|
19
|
+
openenergyid-0.1.12.dist-info/METADATA,sha256=5wBSQdBWuoeh8YMuW-pU9Y9VeK3gRhK9QG18zMZf1kM,2432
|
|
20
|
+
openenergyid-0.1.12.dist-info/WHEEL,sha256=fl6v0VwpzfGBVsGtkAkhILUlJxROXbA3HvRL6Fe3140,105
|
|
21
|
+
openenergyid-0.1.12.dist-info/licenses/LICENSE,sha256=NgRdcNHwyXVCXZ8sJwoTp0DCowThJ9LWWl4xhbV1IUY,1074
|
|
22
|
+
openenergyid-0.1.12.dist-info/RECORD,,
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
openenergyid/__init__.py,sha256=_KWarhaNa9a-pxb9L4P7NUtWUi8A2CVl9nD29g21mYs,160
|
|
2
|
-
openenergyid/const.py,sha256=bF-U-r0Qj2GWCYBBxReg8fbv2D0V1JzfPMwSEQ5ZWds,569
|
|
3
|
-
openenergyid/enums.py,sha256=jdw4CB1gkisx0re_SesrTEyh_T-UxYp6uieE7iYlHdA,357
|
|
4
|
-
openenergyid/models.py,sha256=pUJpQCodph0NukiIpFdc9X6Zj6qEGQPSWoztYDwqyuE,2214
|
|
5
|
-
openenergyid/mvlr/__init__.py,sha256=Glrc218oqa8tq_Y2G9LXaSoN4Yba-vsjXUi9r9iPzaY,471
|
|
6
|
-
openenergyid/mvlr/helpers.py,sha256=fsx-gSvBdU31BjncFkRd1RySmSPPYgwflCnmSFzox2Q,961
|
|
7
|
-
openenergyid/mvlr/main.py,sha256=cn7jZ98cHn2eh-0zG9q8Pad0Ft_FuI-u3a-eeHeF8jA,1304
|
|
8
|
-
openenergyid/mvlr/models.py,sha256=SdZYroi3EM4D1U6OnnyfBv6ygzfAQM2UzICg0jaQD6w,8616
|
|
9
|
-
openenergyid/mvlr/mvlr.py,sha256=F7WvWnZQtqUmK1vsguemsn9n8pDDk3tQ1weOlv-bo0c,18626
|
|
10
|
-
openenergyid-0.1.9.dist-info/METADATA,sha256=8BhGlQs47GfSa2XnCp3Hb9vQE_eE0vtSo7RoSAH3Mx8,2431
|
|
11
|
-
openenergyid-0.1.9.dist-info/WHEEL,sha256=ccEkY-EGGllEs7ySpwBlD8G4u70wR77CNej8Q6tzIqA,105
|
|
12
|
-
openenergyid-0.1.9.dist-info/licenses/LICENSE,sha256=NgRdcNHwyXVCXZ8sJwoTp0DCowThJ9LWWl4xhbV1IUY,1074
|
|
13
|
-
openenergyid-0.1.9.dist-info/RECORD,,
|
|
File without changes
|