kpower-forecast 2026.2.0__py3-none-any.whl → 2026.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kpower_forecast/__init__.py +1 -1
- kpower_forecast/core.py +187 -27
- kpower_forecast/storage.py +3 -2
- kpower_forecast/utils.py +27 -6
- kpower_forecast/weather_client.py +30 -23
- {kpower_forecast-2026.2.0.dist-info → kpower_forecast-2026.2.2.dist-info}/METADATA +22 -2
- kpower_forecast-2026.2.2.dist-info/RECORD +9 -0
- kpower_forecast-2026.2.0.dist-info/RECORD +0 -9
- {kpower_forecast-2026.2.0.dist-info → kpower_forecast-2026.2.2.dist-info}/WHEEL +0 -0
- {kpower_forecast-2026.2.0.dist-info → kpower_forecast-2026.2.2.dist-info}/licenses/LICENSE +0 -0
kpower_forecast/__init__.py
CHANGED
kpower_forecast/core.py
CHANGED
|
@@ -1,16 +1,27 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from typing import Literal
|
|
2
|
+
from typing import List, Literal, cast
|
|
3
3
|
|
|
4
4
|
import pandas as pd
|
|
5
5
|
from prophet import Prophet
|
|
6
|
+
from prophet.diagnostics import cross_validation, performance_metrics
|
|
6
7
|
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
7
8
|
|
|
8
9
|
from .storage import ModelStorage
|
|
9
|
-
from .utils import calculate_solar_elevation
|
|
10
|
+
from .utils import calculate_solar_elevation, get_clear_sky_ghi
|
|
10
11
|
from .weather_client import WeatherClient
|
|
11
12
|
|
|
12
13
|
logger = logging.getLogger(__name__)
|
|
13
14
|
|
|
15
|
+
|
|
16
|
+
class PredictionInterval(BaseModel):
|
|
17
|
+
timestamp: pd.Timestamp
|
|
18
|
+
expected_kwh: float
|
|
19
|
+
lower_bound_kwh: float # P10
|
|
20
|
+
upper_bound_kwh: float # P90
|
|
21
|
+
|
|
22
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
23
|
+
|
|
24
|
+
|
|
14
25
|
class KPowerConfig(BaseModel):
|
|
15
26
|
model_id: str
|
|
16
27
|
latitude: float = Field(..., ge=-90, le=90)
|
|
@@ -19,6 +30,8 @@ class KPowerConfig(BaseModel):
|
|
|
19
30
|
interval_minutes: int = Field(15)
|
|
20
31
|
forecast_type: Literal["solar", "consumption"] = "solar"
|
|
21
32
|
heat_pump_mode: bool = False
|
|
33
|
+
changepoint_prior_scale: float = 0.05
|
|
34
|
+
seasonality_prior_scale: float = 10.0
|
|
22
35
|
|
|
23
36
|
@field_validator("interval_minutes")
|
|
24
37
|
@classmethod
|
|
@@ -26,9 +39,10 @@ class KPowerConfig(BaseModel):
|
|
|
26
39
|
if v not in (15, 60):
|
|
27
40
|
raise ValueError("interval_minutes must be 15 or 60")
|
|
28
41
|
return v
|
|
29
|
-
|
|
42
|
+
|
|
30
43
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
31
44
|
|
|
45
|
+
|
|
32
46
|
class KPowerForecast:
|
|
33
47
|
def __init__(
|
|
34
48
|
self,
|
|
@@ -49,12 +63,40 @@ class KPowerForecast:
|
|
|
49
63
|
forecast_type=forecast_type,
|
|
50
64
|
heat_pump_mode=heat_pump_mode,
|
|
51
65
|
)
|
|
52
|
-
|
|
66
|
+
|
|
53
67
|
self.weather_client = WeatherClient(
|
|
54
68
|
lat=self.config.latitude, lon=self.config.longitude
|
|
55
69
|
)
|
|
56
70
|
self.storage = ModelStorage(storage_path=self.config.storage_path)
|
|
57
71
|
|
|
72
|
+
def _prepare_features(self, df: pd.DataFrame) -> pd.DataFrame:
|
|
73
|
+
"""
|
|
74
|
+
Add physics-informed features and rolling windows.
|
|
75
|
+
"""
|
|
76
|
+
df = df.copy()
|
|
77
|
+
df["ds"] = pd.to_datetime(df["ds"], utc=True)
|
|
78
|
+
|
|
79
|
+
# 1. Physics: Clear Sky GHI
|
|
80
|
+
logger.info("Calculating physics-informed Clear Sky GHI...")
|
|
81
|
+
# Ensure index is DatetimeIndex for pvlib
|
|
82
|
+
temp_df = df.set_index("ds")
|
|
83
|
+
if not isinstance(temp_df.index, pd.DatetimeIndex):
|
|
84
|
+
raise ValueError("Index must be DatetimeIndex")
|
|
85
|
+
|
|
86
|
+
df["clear_sky_ghi"] = get_clear_sky_ghi(
|
|
87
|
+
self.config.latitude, self.config.longitude, temp_df.index
|
|
88
|
+
).values
|
|
89
|
+
|
|
90
|
+
# 2. Rolling Cloud Cover (3-hour window)
|
|
91
|
+
# 3 hours = 180 minutes. Window depends on interval_minutes.
|
|
92
|
+
window_size = 180 // self.config.interval_minutes
|
|
93
|
+
logger.info(f"Adding rolling cloud cover (window={window_size})...")
|
|
94
|
+
df["rolling_cloud_cover"] = (
|
|
95
|
+
df["cloud_cover"].rolling(window=window_size, min_periods=1).mean()
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
return df
|
|
99
|
+
|
|
58
100
|
def train(self, history_df: pd.DataFrame, force: bool = False):
|
|
59
101
|
"""
|
|
60
102
|
Trains the Prophet model using the provided history.
|
|
@@ -69,41 +111,115 @@ class KPowerForecast:
|
|
|
69
111
|
df = history_df.copy()
|
|
70
112
|
if "ds" not in df.columns or "y" not in df.columns:
|
|
71
113
|
raise ValueError("history_df must contain 'ds' and 'y' columns")
|
|
72
|
-
|
|
114
|
+
|
|
73
115
|
df["ds"] = pd.to_datetime(df["ds"], utc=True)
|
|
74
116
|
df = df.sort_values("ds")
|
|
75
|
-
|
|
117
|
+
|
|
76
118
|
start_date = df["ds"].min().date()
|
|
77
119
|
end_date = df["ds"].max().date()
|
|
78
|
-
|
|
120
|
+
|
|
79
121
|
weather_df = self.weather_client.fetch_historical(start_date, end_date)
|
|
80
122
|
weather_df = self.weather_client.resample_weather(
|
|
81
123
|
weather_df, self.config.interval_minutes
|
|
82
124
|
)
|
|
83
|
-
|
|
125
|
+
|
|
84
126
|
df = pd.merge(df, weather_df, on="ds", how="left")
|
|
85
|
-
|
|
127
|
+
|
|
86
128
|
weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
|
|
87
129
|
df[weather_cols] = df[weather_cols].interpolate(method="linear").bfill().ffill()
|
|
88
|
-
|
|
130
|
+
|
|
89
131
|
if df[weather_cols].isnull().any().any():
|
|
90
132
|
df = df.dropna(subset=weather_cols)
|
|
91
133
|
|
|
92
|
-
|
|
93
|
-
|
|
134
|
+
# Feature Engineering
|
|
135
|
+
df = self._prepare_features(df)
|
|
136
|
+
|
|
137
|
+
# Initialize Prophet with tuned hyperparameters
|
|
138
|
+
m = Prophet(
|
|
139
|
+
changepoint_prior_scale=self.config.changepoint_prior_scale,
|
|
140
|
+
seasonality_prior_scale=self.config.seasonality_prior_scale,
|
|
141
|
+
interval_width=0.8, # Used for P10/P90 (80% interval)
|
|
142
|
+
)
|
|
143
|
+
|
|
94
144
|
if self.config.forecast_type == "solar":
|
|
95
145
|
m.add_regressor("temperature_2m")
|
|
96
|
-
m.add_regressor("
|
|
146
|
+
m.add_regressor("rolling_cloud_cover")
|
|
97
147
|
m.add_regressor("shortwave_radiation")
|
|
148
|
+
m.add_regressor("clear_sky_ghi")
|
|
98
149
|
elif self.config.forecast_type == "consumption":
|
|
99
150
|
if self.config.heat_pump_mode:
|
|
100
151
|
m.add_regressor("temperature_2m")
|
|
101
|
-
|
|
152
|
+
|
|
102
153
|
logger.info(f"Training Prophet model for {self.config.forecast_type}...")
|
|
103
154
|
m.fit(df)
|
|
104
|
-
|
|
155
|
+
|
|
105
156
|
self.storage.save_model(m, self.config.model_id)
|
|
106
157
|
|
|
158
|
+
def tune_model(self, history_df: pd.DataFrame, days: int = 30):
|
|
159
|
+
"""
|
|
160
|
+
Find optimal hyperparameters using cross-validation.
|
|
161
|
+
"""
|
|
162
|
+
logger.info(f"Tuning model hyperparameters using {days} days of history...")
|
|
163
|
+
|
|
164
|
+
# We need to prepare data first as cross_validation needs the regressors
|
|
165
|
+
# This is a bit complex as we need weather data for history_df
|
|
166
|
+
# For simplicity, we assume train() logic but without fitting.
|
|
167
|
+
|
|
168
|
+
# Prepare data (duplicated logic from train, could be refactored)
|
|
169
|
+
df = history_df.copy()
|
|
170
|
+
df["ds"] = pd.to_datetime(df["ds"], utc=True)
|
|
171
|
+
start_date = df["ds"].min().date()
|
|
172
|
+
end_date = df["ds"].max().date()
|
|
173
|
+
weather_df = self.weather_client.fetch_historical(start_date, end_date)
|
|
174
|
+
weather_df = self.weather_client.resample_weather(
|
|
175
|
+
weather_df, self.config.interval_minutes
|
|
176
|
+
)
|
|
177
|
+
df = pd.merge(df, weather_df, on="ds", how="left")
|
|
178
|
+
weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
|
|
179
|
+
df[weather_cols] = df[weather_cols].interpolate(method="linear").bfill().ffill()
|
|
180
|
+
df = self._prepare_features(df.dropna(subset=weather_cols))
|
|
181
|
+
|
|
182
|
+
param_grid = {
|
|
183
|
+
"changepoint_prior_scale": [0.001, 0.05, 0.5],
|
|
184
|
+
"seasonality_prior_scale": [0.01, 1.0, 10.0],
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
# Simplified tuning loop
|
|
188
|
+
best_params = {}
|
|
189
|
+
min_rmse = float("inf")
|
|
190
|
+
|
|
191
|
+
for cps in param_grid["changepoint_prior_scale"]:
|
|
192
|
+
for sps in param_grid["seasonality_prior_scale"]:
|
|
193
|
+
m = Prophet(changepoint_prior_scale=cps, seasonality_prior_scale=sps)
|
|
194
|
+
if self.config.forecast_type == "solar":
|
|
195
|
+
m.add_regressor("temperature_2m")
|
|
196
|
+
m.add_regressor("rolling_cloud_cover")
|
|
197
|
+
m.add_regressor("shortwave_radiation")
|
|
198
|
+
m.add_regressor("clear_sky_ghi")
|
|
199
|
+
elif (
|
|
200
|
+
self.config.forecast_type == "consumption"
|
|
201
|
+
and self.config.heat_pump_mode
|
|
202
|
+
):
|
|
203
|
+
m.add_regressor("temperature_2m")
|
|
204
|
+
|
|
205
|
+
m.fit(df)
|
|
206
|
+
|
|
207
|
+
# Cross-validation
|
|
208
|
+
# initial should be at least 3x horizon
|
|
209
|
+
df_cv = cross_validation(
|
|
210
|
+
m, initial=f"{days // 2} days", period="5 days", horizon="5 days"
|
|
211
|
+
)
|
|
212
|
+
df_p = performance_metrics(df_cv, rolling_window=1)
|
|
213
|
+
rmse = df_p["rmse"].values[0]
|
|
214
|
+
|
|
215
|
+
if rmse < min_rmse:
|
|
216
|
+
min_rmse = rmse
|
|
217
|
+
best_params = {"cps": cps, "sps": sps}
|
|
218
|
+
|
|
219
|
+
logger.info(f"Best params found: {best_params} with RMSE {min_rmse}")
|
|
220
|
+
self.config.changepoint_prior_scale = best_params["cps"]
|
|
221
|
+
self.config.seasonality_prior_scale = best_params["sps"]
|
|
222
|
+
|
|
107
223
|
def predict(self, days: int = 7) -> pd.DataFrame:
|
|
108
224
|
"""
|
|
109
225
|
Generates forecast for the next 'days' days.
|
|
@@ -113,30 +229,74 @@ class KPowerForecast:
|
|
|
113
229
|
raise RuntimeError(
|
|
114
230
|
f"Model {self.config.model_id} not found. Please run train() first."
|
|
115
231
|
)
|
|
116
|
-
|
|
232
|
+
|
|
117
233
|
weather_forecast = self.weather_client.fetch_forecast(days=days)
|
|
118
234
|
weather_forecast = self.weather_client.resample_weather(
|
|
119
235
|
weather_forecast, self.config.interval_minutes
|
|
120
236
|
)
|
|
121
|
-
|
|
237
|
+
|
|
122
238
|
future = pd.DataFrame({"ds": weather_forecast["ds"]})
|
|
123
239
|
future = pd.merge(future, weather_forecast, on="ds", how="left")
|
|
124
|
-
|
|
240
|
+
|
|
125
241
|
weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
|
|
126
242
|
future[weather_cols] = (
|
|
127
243
|
future[weather_cols].interpolate(method="linear").bfill().ffill()
|
|
128
244
|
)
|
|
129
|
-
|
|
245
|
+
|
|
246
|
+
# Feature Engineering
|
|
247
|
+
future = self._prepare_features(future)
|
|
248
|
+
|
|
130
249
|
forecast = m.predict(future)
|
|
131
|
-
|
|
132
|
-
|
|
250
|
+
|
|
251
|
+
# Night Mask & Clipping
|
|
133
252
|
if self.config.forecast_type == "solar":
|
|
134
253
|
logger.info("Applying night mask for solar forecast...")
|
|
135
254
|
elevations = calculate_solar_elevation(
|
|
136
|
-
self.config.latitude, self.config.longitude,
|
|
255
|
+
self.config.latitude, self.config.longitude, forecast["ds"]
|
|
137
256
|
)
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
257
|
+
forecast.loc[elevations < 0, ["yhat", "yhat_lower", "yhat_upper"]] = 0
|
|
258
|
+
|
|
259
|
+
for col in ["yhat", "yhat_lower", "yhat_upper"]:
|
|
260
|
+
forecast[col] = forecast[col].clip(lower=0)
|
|
261
|
+
|
|
262
|
+
return cast(pd.DataFrame, forecast)
|
|
263
|
+
|
|
264
|
+
def get_prediction_intervals(self, days: int = 7) -> List[PredictionInterval]:
|
|
265
|
+
"""
|
|
266
|
+
Returns prediction intervals for EMS.
|
|
267
|
+
"""
|
|
268
|
+
forecast = self.predict(days=days)
|
|
269
|
+
|
|
270
|
+
intervals = []
|
|
271
|
+
for _, row in forecast.iterrows():
|
|
272
|
+
intervals.append(
|
|
273
|
+
PredictionInterval(
|
|
274
|
+
timestamp=row["ds"],
|
|
275
|
+
expected_kwh=row["yhat"],
|
|
276
|
+
lower_bound_kwh=row["yhat_lower"],
|
|
277
|
+
upper_bound_kwh=row["yhat_upper"],
|
|
278
|
+
)
|
|
279
|
+
)
|
|
280
|
+
return intervals
|
|
281
|
+
|
|
282
|
+
def get_surplus_probability(
|
|
283
|
+
self, threshold_kwh: float, days: int = 7
|
|
284
|
+
) -> pd.DataFrame:
|
|
285
|
+
"""
|
|
286
|
+
Returns probability of exceeding threshold_kwh.
|
|
287
|
+
Prophet doesn't provide direct probabilities, but we can estimate
|
|
288
|
+
from the uncertainty interval (yhat_upper - yhat_lower)
|
|
289
|
+
assuming normal distribution.
|
|
290
|
+
"""
|
|
291
|
+
forecast = self.predict(days=days)
|
|
292
|
+
|
|
293
|
+
# Estimate sigma from 80% interval (approx 1.28 * sigma)
|
|
294
|
+
sigma = (forecast["yhat_upper"] - forecast["yhat_lower"]) / (2 * 1.28)
|
|
295
|
+
sigma = sigma.replace(0, 1e-9) # Avoid div by zero
|
|
296
|
+
|
|
297
|
+
from scipy.stats import norm
|
|
298
|
+
|
|
299
|
+
z_score = (threshold_kwh - forecast["yhat"]) / sigma
|
|
300
|
+
prob_exceed = 1 - norm.cdf(z_score)
|
|
301
|
+
|
|
302
|
+
return pd.DataFrame({"ds": forecast["ds"], "surplus_prob": prob_exceed})
|
kpower_forecast/storage.py
CHANGED
|
@@ -8,6 +8,7 @@ from prophet.serialize import model_from_json, model_to_json
|
|
|
8
8
|
|
|
9
9
|
logger = logging.getLogger(__name__)
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
class ModelStorage:
|
|
12
13
|
def __init__(self, storage_path: str):
|
|
13
14
|
self.storage_path = Path(storage_path)
|
|
@@ -50,8 +51,8 @@ class ModelStorage:
|
|
|
50
51
|
return model_from_json(json.load(f))
|
|
51
52
|
except Exception as e:
|
|
52
53
|
logger.error(f"Failed to load model {model_id}: {e}")
|
|
53
|
-
# If load fails, we might want to return None to trigger retraining,
|
|
54
|
-
# or raise to alert the user.
|
|
54
|
+
# If load fails, we might want to return None to trigger retraining,
|
|
55
|
+
# or raise to alert the user.
|
|
55
56
|
# Given "production-grade", maybe explicit failure is safer
|
|
56
57
|
# than silent fallback?
|
|
57
58
|
# But the prompt says "If a model exists, load it...
|
kpower_forecast/utils.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import datetime
|
|
2
|
-
from typing import List, Union
|
|
2
|
+
from typing import List, Union, cast
|
|
3
3
|
|
|
4
4
|
import numpy as np
|
|
5
5
|
import pandas as pd
|
|
6
|
+
from pvlib.location import Location
|
|
6
7
|
from pysolar.solar import get_altitude
|
|
7
8
|
|
|
8
9
|
|
|
@@ -12,13 +13,13 @@ def calculate_solar_elevation(
|
|
|
12
13
|
"""
|
|
13
14
|
Calculate solar elevation angles (altitude) for a list of times
|
|
14
15
|
at a specific location.
|
|
15
|
-
|
|
16
|
+
|
|
16
17
|
Args:
|
|
17
18
|
lat: Latitude in decimal degrees.
|
|
18
19
|
lon: Longitude in decimal degrees.
|
|
19
|
-
times: List of datetime objects or pandas DatetimeIndex.
|
|
20
|
+
times: List of datetime objects or pandas DatetimeIndex.
|
|
20
21
|
Must be timezone-aware or UTC.
|
|
21
|
-
|
|
22
|
+
|
|
22
23
|
Returns:
|
|
23
24
|
Numpy array of elevation angles in degrees.
|
|
24
25
|
"""
|
|
@@ -28,9 +29,29 @@ def calculate_solar_elevation(
|
|
|
28
29
|
if t.tzinfo is None:
|
|
29
30
|
# Assume UTC if naive, though strictly we should enforce awareness
|
|
30
31
|
t = t.replace(tzinfo=datetime.timezone.utc)
|
|
31
|
-
|
|
32
|
+
|
|
32
33
|
# get_altitude returns degrees
|
|
33
34
|
alt = get_altitude(lat, lon, t)
|
|
34
35
|
elevations.append(alt)
|
|
35
|
-
|
|
36
|
+
|
|
36
37
|
return np.array(elevations)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_clear_sky_ghi(lat: float, lon: float, times: pd.DatetimeIndex) -> pd.Series:
|
|
41
|
+
"""
|
|
42
|
+
Calculate Theoretical Clear Sky GHI (Global Horizontal Irradiance)
|
|
43
|
+
using pvlib.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
lat: Latitude.
|
|
47
|
+
lon: Longitude.
|
|
48
|
+
times: Pandas DatetimeIndex (must be timezone aware).
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Pandas Series of GHI values.
|
|
52
|
+
"""
|
|
53
|
+
location = Location(lat, lon)
|
|
54
|
+
# get_clearsky returns GHI, DNI, DHI. We only need GHI.
|
|
55
|
+
# Ineichen is the default model.
|
|
56
|
+
clearsky = location.get_clearsky(times)
|
|
57
|
+
return cast(pd.Series, clearsky["ghi"])
|
|
@@ -8,6 +8,7 @@ from pydantic import BaseModel
|
|
|
8
8
|
|
|
9
9
|
logger = logging.getLogger(__name__)
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
class WeatherConfig(BaseModel):
|
|
12
13
|
base_url: str = "https://api.open-meteo.com/v1/forecast"
|
|
13
14
|
# Historical API is different, usually:
|
|
@@ -19,27 +20,28 @@ class WeatherConfig(BaseModel):
|
|
|
19
20
|
# For simplicity, let's allow passing the base_url.
|
|
20
21
|
archive_url: str = "https://archive-api.open-meteo.com/v1/archive"
|
|
21
22
|
|
|
23
|
+
|
|
22
24
|
class WeatherClient:
|
|
23
25
|
def __init__(self, lat: float, lon: float, config: Optional[WeatherConfig] = None):
|
|
24
26
|
self.lat = lat
|
|
25
27
|
self.lon = lon
|
|
26
28
|
self.config = config or WeatherConfig()
|
|
27
|
-
|
|
29
|
+
|
|
28
30
|
def fetch_historical(
|
|
29
31
|
self, start_date: datetime.date, end_date: datetime.date
|
|
30
32
|
) -> pd.DataFrame:
|
|
31
33
|
"""
|
|
32
34
|
Fetch historical weather data for training.
|
|
33
35
|
"""
|
|
34
|
-
params = {
|
|
36
|
+
params: dict[str, str | float | list[str]] = {
|
|
35
37
|
"latitude": self.lat,
|
|
36
38
|
"longitude": self.lon,
|
|
37
39
|
"start_date": start_date.isoformat(),
|
|
38
40
|
"end_date": end_date.isoformat(),
|
|
39
41
|
"hourly": ["temperature_2m", "cloud_cover", "shortwave_radiation"],
|
|
40
|
-
"timezone": "UTC"
|
|
42
|
+
"timezone": "UTC",
|
|
41
43
|
}
|
|
42
|
-
|
|
44
|
+
|
|
43
45
|
try:
|
|
44
46
|
logger.info(f"Fetching historical weather from {self.config.archive_url}")
|
|
45
47
|
response = requests.get(self.config.archive_url, params=params, timeout=10)
|
|
@@ -54,14 +56,14 @@ class WeatherClient:
|
|
|
54
56
|
"""
|
|
55
57
|
Fetch weather forecast for prediction.
|
|
56
58
|
"""
|
|
57
|
-
params = {
|
|
59
|
+
params: dict[str, str | float | int | list[str]] = {
|
|
58
60
|
"latitude": self.lat,
|
|
59
61
|
"longitude": self.lon,
|
|
60
62
|
"hourly": ["temperature_2m", "cloud_cover", "shortwave_radiation"],
|
|
61
63
|
"forecast_days": days,
|
|
62
|
-
"timezone": "UTC"
|
|
64
|
+
"timezone": "UTC",
|
|
63
65
|
}
|
|
64
|
-
|
|
66
|
+
|
|
65
67
|
try:
|
|
66
68
|
logger.info(f"Fetching forecast weather from {self.config.base_url}")
|
|
67
69
|
response = requests.get(self.config.base_url, params=params, timeout=10)
|
|
@@ -76,18 +78,20 @@ class WeatherClient:
|
|
|
76
78
|
hourly = data.get("hourly", {})
|
|
77
79
|
if not hourly:
|
|
78
80
|
raise ValueError("No hourly data in response")
|
|
79
|
-
|
|
80
|
-
df = pd.DataFrame(
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
81
|
+
|
|
82
|
+
df = pd.DataFrame(
|
|
83
|
+
{
|
|
84
|
+
"ds": pd.to_datetime(hourly["time"], utc=True),
|
|
85
|
+
"temperature_2m": hourly["temperature_2m"],
|
|
86
|
+
"cloud_cover": hourly["cloud_cover"],
|
|
87
|
+
"shortwave_radiation": hourly["shortwave_radiation"],
|
|
88
|
+
}
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# Open-Meteo returns nulls sometimes, fill or drop?
|
|
88
92
|
# Linear interpolation is usually safe for weather gaps
|
|
89
|
-
df = df.interpolate(method=
|
|
90
|
-
|
|
93
|
+
df = df.interpolate(method="linear").bfill().ffill()
|
|
94
|
+
|
|
91
95
|
return df
|
|
92
96
|
|
|
93
97
|
def resample_weather(self, df: pd.DataFrame, interval_minutes: int) -> pd.DataFrame:
|
|
@@ -97,19 +101,22 @@ class WeatherClient:
|
|
|
97
101
|
"""
|
|
98
102
|
if df.empty:
|
|
99
103
|
return df
|
|
100
|
-
|
|
104
|
+
|
|
101
105
|
df = df.set_index("ds").sort_index()
|
|
102
|
-
|
|
106
|
+
|
|
103
107
|
# Check if we need resampling
|
|
108
|
+
if not isinstance(df.index, pd.DatetimeIndex):
|
|
109
|
+
raise ValueError("Index must be DatetimeIndex")
|
|
110
|
+
|
|
104
111
|
current_freq = pd.infer_freq(df.index)
|
|
105
112
|
target_freq = f"{interval_minutes}min"
|
|
106
|
-
|
|
113
|
+
|
|
107
114
|
if current_freq == target_freq:
|
|
108
115
|
return df.reset_index()
|
|
109
|
-
|
|
116
|
+
|
|
110
117
|
# Resample and interpolate
|
|
111
118
|
# Cubic is good for temperature/radiation curves
|
|
112
119
|
df_resampled = df.resample(target_freq).interpolate(method="cubic")
|
|
113
|
-
|
|
120
|
+
|
|
114
121
|
# Reset index to get 'ds' column back
|
|
115
122
|
return df_resampled.reset_index()
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: kpower-forecast
|
|
3
|
-
Version: 2026.2.
|
|
3
|
+
Version: 2026.2.2
|
|
4
4
|
Summary: Solar production and power consumption forecasting package.
|
|
5
|
-
Author-email:
|
|
5
|
+
Author-email: "KSoft.TECH OSS" <oss@ksoft.tech>
|
|
6
6
|
License: AGPL-3.0
|
|
7
7
|
License-File: LICENSE
|
|
8
8
|
Keywords: energy,forecast,prophet,solar,weather
|
|
@@ -16,14 +16,19 @@ Requires-Dist: cmdstanpy>=1.2.0
|
|
|
16
16
|
Requires-Dist: numpy>=1.26.0
|
|
17
17
|
Requires-Dist: pandas>=2.2.0
|
|
18
18
|
Requires-Dist: prophet>=1.1.5
|
|
19
|
+
Requires-Dist: pvlib>=0.11.0
|
|
19
20
|
Requires-Dist: pydantic>=2.6.0
|
|
20
21
|
Requires-Dist: pysolar>=0.8.0
|
|
21
22
|
Requires-Dist: pytz>=2024.1
|
|
22
23
|
Requires-Dist: requests>=2.31.0
|
|
23
24
|
Provides-Extra: dev
|
|
24
25
|
Requires-Dist: mypy>=1.8.0; extra == 'dev'
|
|
26
|
+
Requires-Dist: pandas-stubs; extra == 'dev'
|
|
27
|
+
Requires-Dist: pre-commit; extra == 'dev'
|
|
25
28
|
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
26
29
|
Requires-Dist: ruff>=0.2.0; extra == 'dev'
|
|
30
|
+
Requires-Dist: scipy; extra == 'dev'
|
|
31
|
+
Requires-Dist: scipy-stubs; extra == 'dev'
|
|
27
32
|
Requires-Dist: types-pytz; extra == 'dev'
|
|
28
33
|
Requires-Dist: types-requests; extra == 'dev'
|
|
29
34
|
Description-Content-Type: text/markdown
|
|
@@ -111,6 +116,21 @@ kp_cons = KPowerForecast(
|
|
|
111
116
|
|
|
112
117
|
---
|
|
113
118
|
|
|
119
|
+
## 🔢 Versioning
|
|
120
|
+
|
|
121
|
+
This project follows a custom **Date-Based Versioning** scheme:
|
|
122
|
+
`YYYY.MM.Patch` (e.g., `2026.2.1`)
|
|
123
|
+
|
|
124
|
+
- **YYYY**: Year of release.
|
|
125
|
+
- **MM**: Month of release (no leading zero, 1-12).
|
|
126
|
+
- **Patch**: Incremental counter for releases within the same month.
|
|
127
|
+
|
|
128
|
+
### Enforcement
|
|
129
|
+
- **CI Validation**: Every Pull Request is checked against `scripts/validate_version.py` to ensure adherence.
|
|
130
|
+
- **Consistency**: Both `pyproject.toml` and `src/kpower_forecast/__init__.py` must match exactly.
|
|
131
|
+
|
|
132
|
+
---
|
|
133
|
+
|
|
114
134
|
## 🧪 Development & Testing
|
|
115
135
|
|
|
116
136
|
We use [uv](https://github.com/astral-sh/uv) for lightning-fast dependency management.
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
kpower_forecast/__init__.py,sha256=K5T2i1xP5ZNLgeTdRpEd_E7_WmhaoBTiHHur-GTfY9o,88
|
|
2
|
+
kpower_forecast/core.py,sha256=-cIHwUJb-2qqVfCAlS8Nb1ONcsCEDnVDk4Ljes7U0Q8,11237
|
|
3
|
+
kpower_forecast/storage.py,sha256=3dwejuB2QKP1XMXcKR8nCAYFTCzylSW4jIS9xfjyaZM,2225
|
|
4
|
+
kpower_forecast/utils.py,sha256=-X6e58osfN2z3oBcZHth3YXybgWNf8Ep2o_nuqsT1OM,1670
|
|
5
|
+
kpower_forecast/weather_client.py,sha256=T9bi_rT0LWYVx8ug7a27GwzY-KTPlMdxI9J4SwNnfdw,4350
|
|
6
|
+
kpower_forecast-2026.2.2.dist-info/METADATA,sha256=u79c7nhk3haudnOuuRyuRhYPdOI4si-6RNjU8Ywagw8,5198
|
|
7
|
+
kpower_forecast-2026.2.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
8
|
+
kpower_forecast-2026.2.2.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
9
|
+
kpower_forecast-2026.2.2.dist-info/RECORD,,
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
kpower_forecast/__init__.py,sha256=cHOO5TdFOeVt1rmNUDwzRDX2ie2KwsBog6aV2D23APg,88
|
|
2
|
-
kpower_forecast/core.py,sha256=aE2b0XBj5wcYo1l6hgTP0roWOARtsVYkj4Uy5DPHqT8,5027
|
|
3
|
-
kpower_forecast/storage.py,sha256=GBpqiirt3QG9RF_FMQ1SjKio2FR0VtkukWqtbYYaf_g,2226
|
|
4
|
-
kpower_forecast/utils.py,sha256=0ID3XKpbxUvphiR7apg7Mzy72c-bDtQ8AhGtVcaoN8g,1079
|
|
5
|
-
kpower_forecast/weather_client.py,sha256=cRp2lmOfvrft2GZl3nM161OqiE5I8lnKrtJTJi7n2uw,4210
|
|
6
|
-
kpower_forecast-2026.2.0.dist-info/METADATA,sha256=6oFS_xfcTu_UsfB1est4MjMJea_i_X1sS1vtEodDr_Y,4501
|
|
7
|
-
kpower_forecast-2026.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
8
|
-
kpower_forecast-2026.2.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
9
|
-
kpower_forecast-2026.2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|