openenergyid 0.1.19__py2.py3-none-any.whl → 0.1.21__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of openenergyid might be problematic. Click here for more details.
- openenergyid/__init__.py +1 -1
- openenergyid/baseload/__init__.py +15 -0
- openenergyid/baseload/analysis.py +173 -0
- openenergyid/baseload/exceptions.py +9 -0
- openenergyid/baseload/models.py +31 -0
- openenergyid/dyntar/__init__.py +4 -1
- openenergyid/dyntar/const.py +9 -0
- openenergyid/dyntar/main.py +153 -90
- openenergyid/dyntar/models.py +40 -3
- openenergyid/models.py +46 -10
- openenergyid/mvlr/main.py +6 -2
- {openenergyid-0.1.19.dist-info → openenergyid-0.1.21.dist-info}/METADATA +3 -3
- {openenergyid-0.1.19.dist-info → openenergyid-0.1.21.dist-info}/RECORD +15 -11
- {openenergyid-0.1.19.dist-info → openenergyid-0.1.21.dist-info}/WHEEL +1 -1
- {openenergyid-0.1.19.dist-info → openenergyid-0.1.21.dist-info}/licenses/LICENSE +0 -0
openenergyid/__init__.py
CHANGED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Baseload analysis package for power consumption data."""
|
|
2
|
+
|
|
3
|
+
from .models import PowerReadingSchema, PowerSeriesSchema, BaseloadResultSchema
|
|
4
|
+
from .analysis import BaseloadAnalyzer
|
|
5
|
+
from .exceptions import InsufficientDataError, InvalidDataError
|
|
6
|
+
|
|
7
|
+
__version__ = "0.1.0"
|
|
8
|
+
__all__ = [
|
|
9
|
+
"BaseloadAnalyzer",
|
|
10
|
+
"InsufficientDataError",
|
|
11
|
+
"InvalidDataError",
|
|
12
|
+
"PowerReadingSchema",
|
|
13
|
+
"PowerSeriesSchema",
|
|
14
|
+
"BaseloadResultSchema",
|
|
15
|
+
]
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"""Baseload Power Consumption Analysis Module
|
|
2
|
+
|
|
3
|
+
This module provides tools for analyzing electrical power consumption patterns to identify
|
|
4
|
+
and quantify baseload - the continuous background power usage in electrical systems.
|
|
5
|
+
It uses sophisticated time-series analysis to detect consistent minimum power draws
|
|
6
|
+
that represent always-on devices and systems.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import polars as pl
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BaseloadAnalyzer:
|
|
13
|
+
"""Analyzes power consumption data to determine baseload characteristics.
|
|
14
|
+
|
|
15
|
+
The BaseloadAnalyzer helps identify the minimum continuous power consumption in
|
|
16
|
+
an electrical system by analyzing regular energy readings. It uses a statistical
|
|
17
|
+
approach to determine baseload, which represents power used by devices that run
|
|
18
|
+
continuously (like refrigerators, standby electronics, or network equipment).
|
|
19
|
+
|
|
20
|
+
The analyzer works by:
|
|
21
|
+
1. Converting 15-minute energy readings to instantaneous power values
|
|
22
|
+
2. Analyzing daily patterns to identify consistent minimum usage
|
|
23
|
+
3. Aggregating results into configurable time periods
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
quantile : float, default=0.05
|
|
28
|
+
Defines what portion of lowest daily readings to consider as baseload.
|
|
29
|
+
The default 0.05 (5%) corresponds to roughly 72 minutes of lowest
|
|
30
|
+
consumption per day, which helps filter out brief power dips while
|
|
31
|
+
capturing true baseload patterns.
|
|
32
|
+
|
|
33
|
+
timezone : str
|
|
34
|
+
Timezone for analysis. All timestamps will be converted to this timezone
|
|
35
|
+
to ensure correct daily boundaries and consistent reporting periods.
|
|
36
|
+
|
|
37
|
+
Example Usage
|
|
38
|
+
------------
|
|
39
|
+
>>> analyzer = BaseloadAnalyzer(quantile=0.05)
|
|
40
|
+
>>> power_data = analyzer.prepare_power_seriespolars(energy_readings)
|
|
41
|
+
>>> hourly_analysis = analyzer.analyze(power_data, "1h")
|
|
42
|
+
>>> monthly_analysis = analyzer.analyze(power_data, "1mo")
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self, timezone: str, quantile: float = 0.05):
|
|
46
|
+
self.quantile = quantile
|
|
47
|
+
self.timezone = timezone
|
|
48
|
+
|
|
49
|
+
def prepare_power_seriespolars(self, energy_lf: pl.LazyFrame) -> pl.LazyFrame:
|
|
50
|
+
"""Converts energy readings into a power consumption time series.
|
|
51
|
+
|
|
52
|
+
Transforms 15-minute energy readings (kilowatt-hours) into instantaneous
|
|
53
|
+
power readings (watts) while handling timezone conversion.
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
energy_lf : pl.LazyFrame
|
|
58
|
+
Input energy data with columns:
|
|
59
|
+
- timestamp: Datetime with timezone (e.g. "2023-01-01T00:00:00+01:00")
|
|
60
|
+
- total: Energy readings in kilowatt-hours (kWh)
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
pl.LazyFrame
|
|
65
|
+
Power series with columns:
|
|
66
|
+
- timestamp: Timezone-adjusted timestamps
|
|
67
|
+
- power: Power readings in watts
|
|
68
|
+
|
|
69
|
+
Notes
|
|
70
|
+
-----
|
|
71
|
+
The conversion from kWh/15min to watts uses the formula:
|
|
72
|
+
watts = kWh * 4000
|
|
73
|
+
where:
|
|
74
|
+
- Multiply by 4 to convert from 15-minute to hourly rate
|
|
75
|
+
- Multiply by 1000 to convert from kilowatts to watts
|
|
76
|
+
"""
|
|
77
|
+
return (
|
|
78
|
+
energy_lf.with_columns(
|
|
79
|
+
[
|
|
80
|
+
# Convert timezone
|
|
81
|
+
pl.col("timestamp")
|
|
82
|
+
.dt.replace_time_zone("UTC")
|
|
83
|
+
.dt.convert_time_zone(self.timezone)
|
|
84
|
+
.alias("timestamp"),
|
|
85
|
+
# Convert to watts and clip negative values
|
|
86
|
+
(pl.col("total") * 4000).clip(0).alias("power"),
|
|
87
|
+
]
|
|
88
|
+
)
|
|
89
|
+
.drop("total")
|
|
90
|
+
.sort("timestamp")
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def analyze(self, power_lf: pl.LazyFrame, reporting_granularity: str = "1h") -> pl.LazyFrame:
|
|
94
|
+
"""Analyze power consumption data to calculate baseload and total energy metrics.
|
|
95
|
+
|
|
96
|
+
Takes power readings (in watts) with 15-minute intervals and calculates:
|
|
97
|
+
- Daily baseload power using a percentile threshold
|
|
98
|
+
- Energy consumption from baseload vs total consumption
|
|
99
|
+
- Average power metrics
|
|
100
|
+
|
|
101
|
+
The analysis happens in three steps:
|
|
102
|
+
1. Calculate the daily baseload power level using the configured percentile
|
|
103
|
+
2. Join this daily baseload with the original power readings
|
|
104
|
+
3. Aggregate the combined data into the requested reporting periods
|
|
105
|
+
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
power_lf : pl.LazyFrame
|
|
109
|
+
Power consumption data with columns:
|
|
110
|
+
- timestamp: Datetime in configured timezone
|
|
111
|
+
- power: Power readings in watts
|
|
112
|
+
|
|
113
|
+
reporting_granularity : str, default="1h"
|
|
114
|
+
Time period for aggregating results. Must be a valid Polars interval string
|
|
115
|
+
like "1h", "1d", "1mo" etc.
|
|
116
|
+
|
|
117
|
+
Returns
|
|
118
|
+
-------
|
|
119
|
+
pl.LazyFrame
|
|
120
|
+
Analysis results with metrics per reporting period:
|
|
121
|
+
- timestamp: Start of reporting period
|
|
122
|
+
- consumption_due_to_baseload_in_kilowatthour: Baseload energy
|
|
123
|
+
- total_consumption_in_kilowatthour: Total energy
|
|
124
|
+
- consumption_not_due_to_baseload_in_kilowatthour: Non-baseload energy
|
|
125
|
+
- average_daily_baseload_in_watt: Average baseload power level
|
|
126
|
+
- average_power_in_watt: Average total power
|
|
127
|
+
- baseload_ratio: Fraction of energy from baseload
|
|
128
|
+
"""
|
|
129
|
+
# Step 1: Calculate the daily baseload level
|
|
130
|
+
# Group power readings by day and find the threshold power level that represents baseload
|
|
131
|
+
daily_baseload = power_lf.group_by_dynamic("timestamp", every="1d").agg(
|
|
132
|
+
pl.col("power").quantile(self.quantile).alias("daily_baseload")
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
# Step 2 & 3: Join baseload data and aggregate metrics
|
|
136
|
+
return (
|
|
137
|
+
# Join the daily baseload level with original power readings
|
|
138
|
+
# Using asof join since baseload changes daily but readings are every 15min
|
|
139
|
+
power_lf.join_asof(daily_baseload, on="timestamp")
|
|
140
|
+
# Group into requested reporting periods
|
|
141
|
+
.group_by_dynamic("timestamp", every=reporting_granularity)
|
|
142
|
+
.agg(
|
|
143
|
+
[
|
|
144
|
+
# Energy calculations:
|
|
145
|
+
# Each 15min power reading (watts) represents 0.25 hours
|
|
146
|
+
# Convert to kWh: watts * 0.25h * (1kW/1000W)
|
|
147
|
+
(pl.col("daily_baseload").sum() * 0.25 / 1000).alias(
|
|
148
|
+
"consumption_due_to_baseload_in_kilowatthour"
|
|
149
|
+
),
|
|
150
|
+
(pl.col("power").sum() * 0.25 / 1000).alias(
|
|
151
|
+
"total_consumption_in_kilowatthour"
|
|
152
|
+
),
|
|
153
|
+
# Average power levels during the period
|
|
154
|
+
pl.col("daily_baseload").mean().alias("average_daily_baseload_in_watt"),
|
|
155
|
+
pl.col("power").mean().alias("average_power_in_watt"),
|
|
156
|
+
]
|
|
157
|
+
)
|
|
158
|
+
# Calculate derived metrics
|
|
159
|
+
.with_columns(
|
|
160
|
+
[
|
|
161
|
+
# Energy consumed above baseload level
|
|
162
|
+
(
|
|
163
|
+
pl.col("total_consumption_in_kilowatthour")
|
|
164
|
+
- pl.col("consumption_due_to_baseload_in_kilowatthour")
|
|
165
|
+
).alias("consumption_not_due_to_baseload_in_kilowatthour"),
|
|
166
|
+
# What fraction of total energy was from baseload
|
|
167
|
+
(
|
|
168
|
+
pl.col("consumption_due_to_baseload_in_kilowatthour")
|
|
169
|
+
/ pl.col("total_consumption_in_kilowatthour")
|
|
170
|
+
).alias("baseload_ratio"),
|
|
171
|
+
]
|
|
172
|
+
)
|
|
173
|
+
)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import pandera.polars as pa
|
|
2
|
+
from pandera.engines.polars_engine import DateTime
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class PowerReadingSchema(pa.DataFrameModel):
|
|
6
|
+
"""Validates input energy readings"""
|
|
7
|
+
|
|
8
|
+
timestamp: DateTime = pa.Field()
|
|
9
|
+
total: float = pa.Field(ge=0)
|
|
10
|
+
|
|
11
|
+
class Config:
|
|
12
|
+
coerce = True
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PowerSeriesSchema(pa.DataFrameModel):
|
|
16
|
+
"""Validates converted power series"""
|
|
17
|
+
|
|
18
|
+
timestamp: DateTime = pa.Field()
|
|
19
|
+
power: float = pa.Field(ge=0)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class BaseloadResultSchema(pa.DataFrameModel):
|
|
23
|
+
"""Validates analysis results"""
|
|
24
|
+
|
|
25
|
+
timestamp: DateTime = pa.Field()
|
|
26
|
+
consumption_due_to_baseload_in_kilowatthour: float = pa.Field(ge=0)
|
|
27
|
+
total_consumption_in_kilowatthour: float = pa.Field(ge=0)
|
|
28
|
+
average_daily_baseload_in_watt: float = pa.Field(ge=0)
|
|
29
|
+
average_power_in_watt: float = pa.Field(ge=0)
|
|
30
|
+
consumption_not_due_to_baseload_in_kilowatthour: float
|
|
31
|
+
baseload_ratio: float = pa.Field(ge=0, le=2)
|
openenergyid/dyntar/__init__.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
"""Dynamic Tariff Analysis module."""
|
|
2
2
|
|
|
3
|
-
from .main import calculate_dyntar_columns
|
|
3
|
+
from .main import calculate_dyntar_columns, summarize_result
|
|
4
4
|
from .models import (
|
|
5
5
|
DynamicTariffAnalysisInput,
|
|
6
6
|
DynamicTariffAnalysisOutput,
|
|
7
|
+
DynamicTariffAnalysisOutputSummary,
|
|
7
8
|
OutputColumns,
|
|
8
9
|
RequiredColumns,
|
|
9
10
|
)
|
|
@@ -12,6 +13,8 @@ __all__ = [
|
|
|
12
13
|
"calculate_dyntar_columns",
|
|
13
14
|
"DynamicTariffAnalysisInput",
|
|
14
15
|
"DynamicTariffAnalysisOutput",
|
|
16
|
+
"DynamicTariffAnalysisOutputSummary",
|
|
15
17
|
"OutputColumns",
|
|
16
18
|
"RequiredColumns",
|
|
19
|
+
"summarize_result",
|
|
17
20
|
]
|
openenergyid/dyntar/const.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
"""Constants for the dyntar analysis."""
|
|
2
2
|
|
|
3
|
+
from enum import Enum
|
|
4
|
+
|
|
3
5
|
ELECTRICITY_DELIVERED_SMR3 = "electricity_delivered_smr3"
|
|
4
6
|
ELECTRICITY_EXPORTED_SMR3 = "electricity_exported_smr3"
|
|
5
7
|
ELECTRICITY_DELIVERED_SMR2 = "electricity_delivered_smr2"
|
|
@@ -20,3 +22,10 @@ HEATMAP_TOTAL = "heatmap_total"
|
|
|
20
22
|
HEATMAP_DELIVERED_DESCRIPTION = "heatmap_delivered_description"
|
|
21
23
|
HEATMAP_EXPORTED_DESCRIPTION = "heatmap_exported_description"
|
|
22
24
|
HEATMAP_TOTAL_DESCRIPTION = "heatmap_total_description"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Register(Enum):
|
|
28
|
+
"""Register for dynamic tariff analysis."""
|
|
29
|
+
|
|
30
|
+
DELIVERY = "delivery"
|
|
31
|
+
EXPORT = "export"
|
openenergyid/dyntar/main.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""Main module of the DynTar package."""
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
from typing import cast
|
|
4
4
|
import pandas as pd
|
|
5
5
|
|
|
6
6
|
from openenergyid.const import (
|
|
@@ -29,6 +29,7 @@ from .const import (
|
|
|
29
29
|
HEATMAP_DELIVERED_DESCRIPTION,
|
|
30
30
|
HEATMAP_EXPORTED_DESCRIPTION,
|
|
31
31
|
HEATMAP_TOTAL_DESCRIPTION,
|
|
32
|
+
Register,
|
|
32
33
|
)
|
|
33
34
|
|
|
34
35
|
|
|
@@ -40,15 +41,28 @@ def weigh_by_monthly_profile(df: pd.DataFrame, series_name, profile_name) -> pd.
|
|
|
40
41
|
)
|
|
41
42
|
|
|
42
43
|
|
|
43
|
-
def extend_dataframe_with_smr2(
|
|
44
|
+
def extend_dataframe_with_smr2(
|
|
45
|
+
df: pd.DataFrame,
|
|
46
|
+
inplace: bool = False,
|
|
47
|
+
registers: list[Register] | None = None,
|
|
48
|
+
) -> pd.DataFrame | None:
|
|
44
49
|
"""Extend a DataFrame with the SMR2 columns."""
|
|
45
50
|
if not inplace:
|
|
46
51
|
result_df = df.copy()
|
|
47
52
|
else:
|
|
48
53
|
result_df = df
|
|
49
54
|
|
|
50
|
-
|
|
51
|
-
|
|
55
|
+
if registers is None:
|
|
56
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
57
|
+
|
|
58
|
+
if Register.DELIVERY in registers:
|
|
59
|
+
result_df[ELECTRICITY_DELIVERED_SMR2] = weigh_by_monthly_profile(
|
|
60
|
+
df, ELECTRICITY_DELIVERED, RLP
|
|
61
|
+
)
|
|
62
|
+
if Register.EXPORT in registers:
|
|
63
|
+
result_df[ELECTRICITY_EXPORTED_SMR2] = weigh_by_monthly_profile(
|
|
64
|
+
df, ELECTRICITY_EXPORTED, SPP
|
|
65
|
+
)
|
|
52
66
|
|
|
53
67
|
result_df.rename(
|
|
54
68
|
columns={
|
|
@@ -56,6 +70,7 @@ def extend_dataframe_with_smr2(df: pd.DataFrame, inplace: bool = False) -> pd.Da
|
|
|
56
70
|
ELECTRICITY_EXPORTED: ELECTRICITY_EXPORTED_SMR3,
|
|
57
71
|
},
|
|
58
72
|
inplace=True,
|
|
73
|
+
errors="ignore",
|
|
59
74
|
)
|
|
60
75
|
|
|
61
76
|
if not inplace:
|
|
@@ -63,26 +78,33 @@ def extend_dataframe_with_smr2(df: pd.DataFrame, inplace: bool = False) -> pd.Da
|
|
|
63
78
|
return None
|
|
64
79
|
|
|
65
80
|
|
|
66
|
-
def extend_dataframe_with_costs(
|
|
81
|
+
def extend_dataframe_with_costs(
|
|
82
|
+
df: pd.DataFrame, inplace: bool = False, registers: list[Register] | None = None
|
|
83
|
+
) -> pd.DataFrame | None:
|
|
67
84
|
"""Extend a DataFrame with the cost columns."""
|
|
68
85
|
if not inplace:
|
|
69
86
|
result_df = df.copy()
|
|
70
87
|
else:
|
|
71
88
|
result_df = df
|
|
72
89
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
)
|
|
76
|
-
result_df[COST_ELECTRICITY_EXPORTED_SMR2] = (
|
|
77
|
-
df[ELECTRICITY_EXPORTED_SMR2] * df[PRICE_ELECTRICITY_EXPORTED] * -1
|
|
78
|
-
)
|
|
90
|
+
if registers is None:
|
|
91
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
79
92
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
93
|
+
if Register.DELIVERY in registers:
|
|
94
|
+
result_df[COST_ELECTRICITY_DELIVERED_SMR2] = (
|
|
95
|
+
df[ELECTRICITY_DELIVERED_SMR2] * df[PRICE_ELECTRICITY_DELIVERED]
|
|
96
|
+
)
|
|
97
|
+
result_df[COST_ELECTRICITY_DELIVERED_SMR3] = (
|
|
98
|
+
df[ELECTRICITY_DELIVERED_SMR3] * df[PRICE_ELECTRICITY_DELIVERED]
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
if Register.EXPORT in registers:
|
|
102
|
+
result_df[COST_ELECTRICITY_EXPORTED_SMR2] = (
|
|
103
|
+
df[ELECTRICITY_EXPORTED_SMR2] * df[PRICE_ELECTRICITY_EXPORTED] * -1
|
|
104
|
+
)
|
|
105
|
+
result_df[COST_ELECTRICITY_EXPORTED_SMR3] = (
|
|
106
|
+
df[ELECTRICITY_EXPORTED_SMR3] * df[PRICE_ELECTRICITY_EXPORTED] * -1
|
|
107
|
+
)
|
|
86
108
|
|
|
87
109
|
if not inplace:
|
|
88
110
|
return result_df
|
|
@@ -90,63 +112,70 @@ def extend_dataframe_with_costs(df: pd.DataFrame, inplace: bool = False) -> pd.D
|
|
|
90
112
|
|
|
91
113
|
|
|
92
114
|
def extend_dataframe_with_weighted_prices(
|
|
93
|
-
df: pd.DataFrame, inplace: bool = False
|
|
115
|
+
df: pd.DataFrame, inplace: bool = False, registers: list[Register] | None = None
|
|
94
116
|
) -> pd.DataFrame | None:
|
|
95
117
|
"""Extend a DataFrame with the weighted price columns."""
|
|
96
118
|
if not inplace:
|
|
97
119
|
df = df.copy()
|
|
98
120
|
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
df[RLP]
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
121
|
+
if registers is None:
|
|
122
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
123
|
+
|
|
124
|
+
if Register.DELIVERY in registers:
|
|
125
|
+
rlp_weighted_price_delivered = (df[PRICE_ELECTRICITY_DELIVERED] * df[RLP]).resample(
|
|
126
|
+
"MS"
|
|
127
|
+
).sum() / df[RLP].resample("MS").sum()
|
|
128
|
+
df[RLP_WEIGHTED_PRICE_DELIVERED] = rlp_weighted_price_delivered.reindex_like(
|
|
129
|
+
df[RLP], method="ffill"
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
if Register.EXPORT in registers:
|
|
133
|
+
spp_weighted_price_exported = (df[PRICE_ELECTRICITY_EXPORTED] * df[SPP]).resample(
|
|
134
|
+
"MS"
|
|
135
|
+
).sum() / df[SPP].resample("MS").sum()
|
|
136
|
+
df[SPP_WEIGHTED_PRICE_EXPORTED] = spp_weighted_price_exported.reindex_like(
|
|
137
|
+
df[SPP], method="ffill"
|
|
138
|
+
)
|
|
111
139
|
|
|
112
140
|
if not inplace:
|
|
113
141
|
return df
|
|
114
142
|
return None
|
|
115
143
|
|
|
116
144
|
|
|
117
|
-
def extend_dataframe_with_heatmap(
|
|
145
|
+
def extend_dataframe_with_heatmap(
|
|
146
|
+
df: pd.DataFrame, inplace: bool = False, registers: list[Register] | None = None
|
|
147
|
+
) -> pd.DataFrame | None:
|
|
118
148
|
"""Extend a DataFrame with the heatmap columns."""
|
|
119
149
|
if not inplace:
|
|
120
150
|
df = df.copy()
|
|
121
151
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
df[
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
df[HEATMAP_EXPORTED] = heatmap_score_exported
|
|
152
|
+
if registers is None:
|
|
153
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
154
|
+
|
|
155
|
+
if Register.DELIVERY in registers:
|
|
156
|
+
energy_delta_delivered = df[ELECTRICITY_DELIVERED_SMR2] - df[ELECTRICITY_DELIVERED_SMR3]
|
|
157
|
+
price_delta_delivered = df[RLP_WEIGHTED_PRICE_DELIVERED] - df[PRICE_ELECTRICITY_DELIVERED]
|
|
158
|
+
heatmap_score_delivered = energy_delta_delivered * price_delta_delivered
|
|
159
|
+
heatmap_score_delivered.fillna(0, inplace=True)
|
|
160
|
+
# Invert score so that positive values indicate a positive impact
|
|
161
|
+
heatmap_score_delivered = -heatmap_score_delivered
|
|
162
|
+
df[HEATMAP_DELIVERED] = heatmap_score_delivered
|
|
163
|
+
|
|
164
|
+
if Register.EXPORT in registers:
|
|
165
|
+
energy_delta_exported = df[ELECTRICITY_EXPORTED_SMR2] - df[ELECTRICITY_EXPORTED_SMR3]
|
|
166
|
+
price_delta_exported = df[SPP_WEIGHTED_PRICE_EXPORTED] - df[PRICE_ELECTRICITY_EXPORTED]
|
|
167
|
+
heatmap_score_exported = energy_delta_exported * price_delta_exported
|
|
168
|
+
heatmap_score_exported.fillna(0, inplace=True)
|
|
169
|
+
df[HEATMAP_EXPORTED] = heatmap_score_exported
|
|
170
|
+
|
|
171
|
+
if Register.DELIVERY in registers and Register.EXPORT in registers:
|
|
172
|
+
heatmap_score_delivered = cast(pd.Series, df[HEATMAP_DELIVERED])
|
|
173
|
+
heatmap_score_exported = cast(pd.Series, df[HEATMAP_EXPORTED])
|
|
174
|
+
heatmap_score_combined = heatmap_score_delivered + heatmap_score_exported
|
|
175
|
+
elif Register.DELIVERY in registers:
|
|
176
|
+
heatmap_score_combined = heatmap_score_delivered
|
|
177
|
+
else:
|
|
178
|
+
heatmap_score_combined = heatmap_score_exported
|
|
150
179
|
df[HEATMAP_TOTAL] = heatmap_score_combined
|
|
151
180
|
|
|
152
181
|
if not inplace:
|
|
@@ -194,56 +223,90 @@ def map_total_description(
|
|
|
194
223
|
|
|
195
224
|
|
|
196
225
|
def extend_dataframe_with_heatmap_description(
|
|
197
|
-
df: pd.DataFrame, inplace: bool = False
|
|
226
|
+
df: pd.DataFrame, inplace: bool = False, registers: list[Register] | None = None
|
|
198
227
|
) -> pd.DataFrame | None:
|
|
199
228
|
"""Extend a DataFrame with the heatmap description columns."""
|
|
200
229
|
if not inplace:
|
|
201
230
|
df = df.copy()
|
|
202
231
|
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
232
|
+
if registers is None:
|
|
233
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
234
|
+
|
|
235
|
+
if Register.DELIVERY in registers:
|
|
236
|
+
df[HEATMAP_DELIVERED_DESCRIPTION] = list(
|
|
237
|
+
map(
|
|
238
|
+
map_delivery_description,
|
|
239
|
+
df[PRICE_ELECTRICITY_DELIVERED],
|
|
240
|
+
df[RLP_WEIGHTED_PRICE_DELIVERED],
|
|
241
|
+
df[ELECTRICITY_DELIVERED_SMR3],
|
|
242
|
+
df[ELECTRICITY_DELIVERED_SMR2],
|
|
243
|
+
)
|
|
210
244
|
)
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
245
|
+
|
|
246
|
+
if Register.EXPORT in registers:
|
|
247
|
+
df[HEATMAP_EXPORTED_DESCRIPTION] = list(
|
|
248
|
+
map(
|
|
249
|
+
map_export_description,
|
|
250
|
+
df[PRICE_ELECTRICITY_EXPORTED],
|
|
251
|
+
df[SPP_WEIGHTED_PRICE_EXPORTED],
|
|
252
|
+
df[ELECTRICITY_EXPORTED_SMR3],
|
|
253
|
+
df[ELECTRICITY_EXPORTED_SMR2],
|
|
254
|
+
)
|
|
219
255
|
)
|
|
220
|
-
)
|
|
221
256
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
257
|
+
if Register.DELIVERY in registers and Register.EXPORT in registers:
|
|
258
|
+
df[HEATMAP_TOTAL_DESCRIPTION] = list(
|
|
259
|
+
map(
|
|
260
|
+
map_total_description,
|
|
261
|
+
df[HEATMAP_DELIVERED].abs(),
|
|
262
|
+
df[HEATMAP_EXPORTED].abs(),
|
|
263
|
+
df[HEATMAP_DELIVERED_DESCRIPTION],
|
|
264
|
+
df[HEATMAP_EXPORTED_DESCRIPTION],
|
|
265
|
+
)
|
|
229
266
|
)
|
|
230
|
-
|
|
267
|
+
elif Register.DELIVERY in registers:
|
|
268
|
+
df[HEATMAP_TOTAL_DESCRIPTION] = df[HEATMAP_DELIVERED_DESCRIPTION]
|
|
269
|
+
else:
|
|
270
|
+
df[HEATMAP_TOTAL_DESCRIPTION] = df[HEATMAP_EXPORTED_DESCRIPTION]
|
|
231
271
|
|
|
232
272
|
if not inplace:
|
|
233
273
|
return df
|
|
234
274
|
|
|
235
275
|
|
|
236
|
-
def calculate_dyntar_columns(
|
|
276
|
+
def calculate_dyntar_columns(
|
|
277
|
+
df: pd.DataFrame,
|
|
278
|
+
inplace: bool = False,
|
|
279
|
+
registers: list[Register] | None = None,
|
|
280
|
+
) -> pd.DataFrame | None:
|
|
237
281
|
"""Calculate all columns required for the dynamic tariff analysis."""
|
|
238
282
|
if not inplace:
|
|
239
283
|
df = df.copy()
|
|
240
284
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
285
|
+
if registers is None:
|
|
286
|
+
registers = [Register.DELIVERY, Register.EXPORT]
|
|
287
|
+
|
|
288
|
+
extend_dataframe_with_smr2(df, inplace=True, registers=registers)
|
|
289
|
+
extend_dataframe_with_costs(df, inplace=True, registers=registers)
|
|
290
|
+
extend_dataframe_with_weighted_prices(df, inplace=True, registers=registers)
|
|
291
|
+
extend_dataframe_with_heatmap(df, inplace=True, registers=registers)
|
|
292
|
+
extend_dataframe_with_heatmap_description(df, inplace=True, registers=registers)
|
|
246
293
|
|
|
247
294
|
if not inplace:
|
|
248
295
|
return df
|
|
249
296
|
return None
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def summarize_result(df: pd.DataFrame) -> pd.Series:
|
|
300
|
+
"""Summarize the dynamic tariff analysis result."""
|
|
301
|
+
summary = df.filter(like="cost").sum()
|
|
302
|
+
|
|
303
|
+
abs_smr2 = summary.filter(like="smr2").abs().sum()
|
|
304
|
+
|
|
305
|
+
summary["cost_electricity_total_smr2"] = summary.filter(like="smr2").sum()
|
|
306
|
+
summary["cost_electricity_total_smr3"] = summary.filter(like="smr3").sum()
|
|
307
|
+
|
|
308
|
+
summary["ratio"] = (
|
|
309
|
+
summary["cost_electricity_total_smr3"] - summary["cost_electricity_total_smr2"]
|
|
310
|
+
) / abs_smr2
|
|
311
|
+
|
|
312
|
+
return summary
|
openenergyid/dyntar/models.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
"""Models for dynamic tariff analysis."""
|
|
2
2
|
|
|
3
3
|
from typing import Literal
|
|
4
|
-
from pydantic import Field, conlist, confloat
|
|
4
|
+
from pydantic import Field, conlist, confloat, BaseModel
|
|
5
5
|
|
|
6
6
|
from openenergyid.models import TimeDataFrame
|
|
7
|
+
from .const import Register
|
|
7
8
|
|
|
8
9
|
|
|
9
10
|
RequiredColumns = Literal[
|
|
@@ -43,18 +44,53 @@ class DynamicTariffAnalysisInput(TimeDataFrame):
|
|
|
43
44
|
"""Input frame for dynamic tariff analysis."""
|
|
44
45
|
|
|
45
46
|
columns: list[RequiredColumns] = Field(
|
|
46
|
-
min_length=
|
|
47
|
+
min_length=3,
|
|
47
48
|
max_length=len(RequiredColumns.__args__),
|
|
48
49
|
examples=[RequiredColumns.__args__],
|
|
49
50
|
)
|
|
50
51
|
data: list[
|
|
51
52
|
conlist(
|
|
52
53
|
item_type=confloat(allow_inf_nan=True),
|
|
53
|
-
min_length=
|
|
54
|
+
min_length=3,
|
|
54
55
|
max_length=len(RequiredColumns.__args__),
|
|
55
56
|
) # type: ignore
|
|
56
57
|
] = Field(examples=[[0.0] * len(RequiredColumns.__args__)])
|
|
57
58
|
|
|
59
|
+
@property
|
|
60
|
+
def registers(self) -> list[Register]:
|
|
61
|
+
"""Check which registers are present in the input data."""
|
|
62
|
+
registers = []
|
|
63
|
+
columns = list(self.columns)
|
|
64
|
+
# if "electricity_delivered", "price_electricity_delivered" and "RLP" are present
|
|
65
|
+
if all(
|
|
66
|
+
column in columns
|
|
67
|
+
for column in [
|
|
68
|
+
"electricity_delivered",
|
|
69
|
+
"price_electricity_delivered",
|
|
70
|
+
"RLP",
|
|
71
|
+
]
|
|
72
|
+
):
|
|
73
|
+
registers.append(Register.DELIVERY)
|
|
74
|
+
# if "electricity_exported", "price_electricity_exported" and "SPP" are present
|
|
75
|
+
if all(
|
|
76
|
+
column in columns
|
|
77
|
+
for column in ["electricity_exported", "price_electricity_exported", "SPP"]
|
|
78
|
+
):
|
|
79
|
+
registers.append(Register.EXPORT)
|
|
80
|
+
return registers
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class DynamicTariffAnalysisOutputSummary(BaseModel):
|
|
84
|
+
"""Summary of the dynamic tariff analysis output."""
|
|
85
|
+
|
|
86
|
+
cost_electricity_delivered_smr2: float | None = None
|
|
87
|
+
cost_electricity_delivered_smr3: float | None = None
|
|
88
|
+
cost_electricity_exported_smr2: float | None = None
|
|
89
|
+
cost_electricity_exported_smr3: float | None = None
|
|
90
|
+
cost_electricity_total_smr2: float | None = None
|
|
91
|
+
cost_electricity_total_smr3: float | None = None
|
|
92
|
+
ratio: float | None = None
|
|
93
|
+
|
|
58
94
|
|
|
59
95
|
class DynamicTariffAnalysisOutput(TimeDataFrame):
|
|
60
96
|
"""Output frame for dynamic tariff analysis."""
|
|
@@ -71,3 +107,4 @@ class DynamicTariffAnalysisOutput(TimeDataFrame):
|
|
|
71
107
|
max_length=len(OutputColumns.__args__),
|
|
72
108
|
) # type: ignore
|
|
73
109
|
] = Field(examples=[[0.0] * len(OutputColumns.__args__)])
|
|
110
|
+
summary: DynamicTariffAnalysisOutputSummary | None = None
|
openenergyid/models.py
CHANGED
|
@@ -7,6 +7,7 @@ from typing import Self
|
|
|
7
7
|
|
|
8
8
|
import pandas as pd
|
|
9
9
|
from pydantic import BaseModel
|
|
10
|
+
import polars as pl
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class TimeSeriesBase(BaseModel):
|
|
@@ -75,10 +76,14 @@ class TimeSeries(TimeSeriesBase):
|
|
|
75
76
|
Create a TimeSeries object from a Pandas Series.
|
|
76
77
|
to_pandas(self, timezone: str = "UTC") -> pd.Series:
|
|
77
78
|
Convert the TimeSeries object to a Pandas Series.
|
|
79
|
+
from_polars(cls, data: pl.DataFrame | pl.LazyFrame) -> Self:
|
|
80
|
+
Create a TimeSeries object from Polars data.
|
|
81
|
+
to_polars(self, timezone: str = "UTC") -> pl.LazyFrame:
|
|
82
|
+
Convert the TimeSeries object to a Polars LazyFrame.
|
|
78
83
|
"""
|
|
79
84
|
|
|
80
85
|
name: str | None = None
|
|
81
|
-
data: list[float]
|
|
86
|
+
data: list[float | None]
|
|
82
87
|
|
|
83
88
|
@classmethod
|
|
84
89
|
def from_pandas(cls, data: pd.Series) -> Self:
|
|
@@ -91,19 +96,50 @@ class TimeSeries(TimeSeriesBase):
|
|
|
91
96
|
series.index = pd.to_datetime(series.index, utc=True)
|
|
92
97
|
return series.tz_convert(timezone)
|
|
93
98
|
|
|
99
|
+
@classmethod
|
|
100
|
+
def from_polars(cls, data: pl.DataFrame | pl.LazyFrame) -> Self:
|
|
101
|
+
"""Create from Polars data."""
|
|
102
|
+
# Always work with DataFrame
|
|
103
|
+
df = data.collect() if isinstance(data, pl.LazyFrame) else data
|
|
104
|
+
|
|
105
|
+
if len(df.columns) != 2:
|
|
106
|
+
raise ValueError("Must contain exactly two columns: timestamp and value")
|
|
107
|
+
|
|
108
|
+
value_col = [col for col in df.columns if col != "timestamp"][0]
|
|
109
|
+
return cls(
|
|
110
|
+
name=value_col,
|
|
111
|
+
data=df[value_col].cast(pl.Float64).to_list(), # Ensure float type
|
|
112
|
+
index=df["timestamp"].cast(pl.Datetime).dt.convert_time_zone("UTC").to_list(),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
def to_polars(self, timezone: str = "UTC") -> pl.LazyFrame:
|
|
116
|
+
"""Convert to Polars LazyFrame."""
|
|
117
|
+
# Always return LazyFrame as specified in return type
|
|
118
|
+
df = pl.DataFrame(
|
|
119
|
+
{
|
|
120
|
+
"timestamp": pl.Series(self.index, dtype=pl.Datetime).dt.convert_time_zone(
|
|
121
|
+
timezone
|
|
122
|
+
),
|
|
123
|
+
"total" if self.name is None else self.name: pl.Series(self.data, dtype=pl.Float64),
|
|
124
|
+
}
|
|
125
|
+
)
|
|
126
|
+
return df.lazy()
|
|
127
|
+
|
|
94
128
|
|
|
95
129
|
class TimeDataFrame(TimeSeriesBase):
|
|
96
130
|
"""Time series data with multiple columns."""
|
|
97
131
|
|
|
98
132
|
columns: list[str]
|
|
99
|
-
data: list[list[float]]
|
|
133
|
+
data: list[list[float | None]]
|
|
100
134
|
|
|
101
135
|
@classmethod
|
|
102
136
|
def from_pandas(cls, data: pd.DataFrame) -> Self:
|
|
103
137
|
"""Create from a Pandas DataFrame."""
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
138
|
+
# Cast values to float | None
|
|
139
|
+
values = [
|
|
140
|
+
[float(x) if pd.notnull(x) else None for x in row] for row in data.values.tolist()
|
|
141
|
+
]
|
|
142
|
+
return cls(columns=data.columns.tolist(), data=values, index=data.index.tolist())
|
|
107
143
|
|
|
108
144
|
def to_pandas(self, timezone: str = "UTC") -> pd.DataFrame:
|
|
109
145
|
"""Convert to a Pandas DataFrame."""
|
|
@@ -114,15 +150,15 @@ class TimeDataFrame(TimeSeriesBase):
|
|
|
114
150
|
@classmethod
|
|
115
151
|
def from_timeseries(cls, data: list[TimeSeries]) -> Self:
|
|
116
152
|
"""Create from a list of TimeSeries objects."""
|
|
117
|
-
return cls
|
|
118
|
-
columns=[series.name for series in data],
|
|
119
|
-
data=[series.data for series in data],
|
|
153
|
+
return cls(
|
|
154
|
+
columns=[series.name or "" for series in data], # Handle None names
|
|
155
|
+
data=[series.data for series in data], # Pass list of value lists
|
|
120
156
|
index=data[0].index,
|
|
121
157
|
)
|
|
122
158
|
|
|
123
159
|
def to_timeseries(self) -> list[TimeSeries]:
|
|
124
160
|
"""Convert to a list of TimeSeries objects."""
|
|
125
161
|
return [
|
|
126
|
-
TimeSeries(name=
|
|
127
|
-
for
|
|
162
|
+
TimeSeries(name=col, data=[row[i] for row in self.data], index=self.index)
|
|
163
|
+
for i, col in enumerate(self.columns)
|
|
128
164
|
]
|
openenergyid/mvlr/main.py
CHANGED
|
@@ -9,6 +9,7 @@ def find_best_mvlr(
|
|
|
9
9
|
data: MultiVariableRegressionInput,
|
|
10
10
|
) -> MultiVariableRegressionResult:
|
|
11
11
|
"""Cycle through multiple granularities and return the best model."""
|
|
12
|
+
best_rsquared = 0
|
|
12
13
|
for granularity in data.granularities:
|
|
13
14
|
frame = data.data_frame()
|
|
14
15
|
frame = resample_input_data(data=frame, granularity=granularity)
|
|
@@ -17,7 +18,7 @@ def find_best_mvlr(
|
|
|
17
18
|
y=data.dependent_variable,
|
|
18
19
|
granularity=granularity,
|
|
19
20
|
allow_negative_predictions=data.allow_negative_predictions,
|
|
20
|
-
single_use_exog_prefixes=data.single_use_exog_prefixes,
|
|
21
|
+
single_use_exog_prefixes=data.single_use_exog_prefixes or [],
|
|
21
22
|
exogs__disallow_negative_coefficient=data.get_disallowed_negative_coefficients(),
|
|
22
23
|
)
|
|
23
24
|
mvlr.do_analysis()
|
|
@@ -27,4 +28,7 @@ def find_best_mvlr(
|
|
|
27
28
|
max_pvalues=data.validation_parameters.pvalues,
|
|
28
29
|
):
|
|
29
30
|
return MultiVariableRegressionResult.from_mvlr(mvlr)
|
|
30
|
-
|
|
31
|
+
best_rsquared = max(best_rsquared, mvlr.fit.rsquared_adj)
|
|
32
|
+
raise ValueError(
|
|
33
|
+
f"No valid model found. Best R²: {best_rsquared:.3f} (need ≥{data.validation_parameters.rsquared})"
|
|
34
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: openenergyid
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.21
|
|
4
4
|
Summary: Open Source Python library for energy analytics and simulations
|
|
5
5
|
Project-URL: Homepage, https://energyid.eu
|
|
6
6
|
Project-URL: Repository, https://github.com/EnergieID/OpenEnergyID
|
|
@@ -47,4 +47,4 @@ Description-Content-Type: text/markdown
|
|
|
47
47
|
|
|
48
48
|
Open Source Python library for energy data analytics and simulations
|
|
49
49
|
|
|
50
|
-
[*more info for developers*](DEVELOPERS.md)
|
|
50
|
+
[*more info for developers*](DEVELOPERS.md)
|
|
@@ -1,14 +1,18 @@
|
|
|
1
|
-
openenergyid/__init__.py,sha256=
|
|
1
|
+
openenergyid/__init__.py,sha256=LOByZ5xMdSMme37T7DEKZJ_57qDoXhUNzP7uotgNx7w,193
|
|
2
2
|
openenergyid/const.py,sha256=D-xUnUyVuLmphClkePgxpFP6z0RDhw_6m7rX0BHBgrw,823
|
|
3
3
|
openenergyid/enums.py,sha256=jdw4CB1gkisx0re_SesrTEyh_T-UxYp6uieE7iYlHdA,357
|
|
4
|
-
openenergyid/models.py,sha256=
|
|
4
|
+
openenergyid/models.py,sha256=IA6me6dsXH4mjBzewzHcs7U4jev07JX3pCadEcugeEI,5953
|
|
5
|
+
openenergyid/baseload/__init__.py,sha256=LXgnCnoSB1fyknMvkAv8nbtYczqW8Od_N2v_bp-LYVw,437
|
|
6
|
+
openenergyid/baseload/analysis.py,sha256=GAma3G3jE5ZZwxV3fL3lBlw3DDncXyAKgpB9XfNMibI,7520
|
|
7
|
+
openenergyid/baseload/exceptions.py,sha256=uPPQlFmOikp3wuwdVxj3Mx-45TzPkLF86rKMFjT5qB4,250
|
|
8
|
+
openenergyid/baseload/models.py,sha256=W_WCcdLdkbySH7o5adLE7_txXLZsAVTjJkwzjepwN1Y,917
|
|
5
9
|
openenergyid/capacity/__init__.py,sha256=1En96HlPV8kd1hOJO9RjRbXNInp5ZSkmjsjp0jfZlcQ,221
|
|
6
10
|
openenergyid/capacity/main.py,sha256=G6_EtXs1k_W-fxS33pFrCNKajuH81skdI32zp5RX9bI,3674
|
|
7
11
|
openenergyid/capacity/models.py,sha256=qi0IFyF_QOVleSzN8g0U2Fzqcc9ZDfNKt8oteFLY6Q0,832
|
|
8
|
-
openenergyid/dyntar/__init__.py,sha256=
|
|
9
|
-
openenergyid/dyntar/const.py,sha256=
|
|
10
|
-
openenergyid/dyntar/main.py,sha256=
|
|
11
|
-
openenergyid/dyntar/models.py,sha256=
|
|
12
|
+
openenergyid/dyntar/__init__.py,sha256=lUrk7ktS7yAqiafRHFoBE0RvFSI9mzDoO37diwLHuBg,495
|
|
13
|
+
openenergyid/dyntar/const.py,sha256=eJJV9VfpHlS9vWV47DWQkS3ICIXWhDmG4cU-ofbZJ3Q,1100
|
|
14
|
+
openenergyid/dyntar/main.py,sha256=i8EkayRicnMhG66cyrxGwUumFx3UGe7KDSImfFqmK04,10638
|
|
15
|
+
openenergyid/dyntar/models.py,sha256=lI4IjdAFallhsCqbw-EbBPbmk0g2MACgZnmMtTX7Pq0,3452
|
|
12
16
|
openenergyid/energysharing/__init__.py,sha256=A4JfrUYf-hBCzhUm0qL1GGlNMvpO8OwXJo80dJxFIvw,274
|
|
13
17
|
openenergyid/energysharing/const.py,sha256=X2zEPtTlsmZ66w6RmLS_h8NmdzObAEi5N6-0yrLN5V4,219
|
|
14
18
|
openenergyid/energysharing/data_formatting.py,sha256=Kwuhyn6ao_8Brdm9frlA6VzYOqimNYZsRbYwNXnE7yc,2583
|
|
@@ -16,10 +20,10 @@ openenergyid/energysharing/main.py,sha256=QKrtDyAlmKj0qtlqlUMjTJujQeKBK9U1_W80-R
|
|
|
16
20
|
openenergyid/energysharing/models.py,sha256=-FedTqWqoi7AYrbI4S_pX0bMScrbZxncQ21CXFz2cXM,2526
|
|
17
21
|
openenergyid/mvlr/__init__.py,sha256=Glrc218oqa8tq_Y2G9LXaSoN4Yba-vsjXUi9r9iPzaY,471
|
|
18
22
|
openenergyid/mvlr/helpers.py,sha256=Uzbfrj3IpH26wA206KOl0hNucKE-n9guJNC_EROBVKA,983
|
|
19
|
-
openenergyid/mvlr/main.py,sha256=
|
|
23
|
+
openenergyid/mvlr/main.py,sha256=Daj9UjcX70WETRrKu3QY-1LfMRkKP8Wvu4Ted-Smwzs,1491
|
|
20
24
|
openenergyid/mvlr/models.py,sha256=XvkViOLlYqi0ffgF3AD4Jvk3yL05gsoKdKgBAsGJ7L4,8581
|
|
21
25
|
openenergyid/mvlr/mvlr.py,sha256=F7WvWnZQtqUmK1vsguemsn9n8pDDk3tQ1weOlv-bo0c,18626
|
|
22
|
-
openenergyid-0.1.
|
|
23
|
-
openenergyid-0.1.
|
|
24
|
-
openenergyid-0.1.
|
|
25
|
-
openenergyid-0.1.
|
|
26
|
+
openenergyid-0.1.21.dist-info/METADATA,sha256=V1ysO8oMVbxB2ZTEOdHgXcG8P7erlagleMPPpe_efjc,2478
|
|
27
|
+
openenergyid-0.1.21.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
|
|
28
|
+
openenergyid-0.1.21.dist-info/licenses/LICENSE,sha256=NgRdcNHwyXVCXZ8sJwoTp0DCowThJ9LWWl4xhbV1IUY,1074
|
|
29
|
+
openenergyid-0.1.21.dist-info/RECORD,,
|
|
File without changes
|