kpower-forecast 2026.2.0__tar.gz → 2026.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.github/workflows/ci.yml +4 -0
  2. kpower_forecast-2026.2.2/.pre-commit-config.yaml +30 -0
  3. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/PKG-INFO +22 -2
  4. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/README.md +15 -0
  5. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/pyproject.toml +21 -2
  6. kpower_forecast-2026.2.2/scripts/validate_version.py +39 -0
  7. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/src/kpower_forecast/__init__.py +1 -1
  8. kpower_forecast-2026.2.2/src/kpower_forecast/core.py +302 -0
  9. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/src/kpower_forecast/storage.py +3 -2
  10. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/src/kpower_forecast/utils.py +27 -6
  11. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/src/kpower_forecast/weather_client.py +30 -23
  12. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/tests/test_core.py +56 -43
  13. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/tests/test_utils.py +3 -2
  14. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/uv.lock +277 -1
  15. kpower_forecast-2026.2.0/src/kpower_forecast/core.py +0 -142
  16. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
  17. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
  18. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.github/pull_request_template.md +0 -0
  19. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.github/workflows/deploy.yml +0 -0
  20. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.gitignore +0 -0
  21. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/.python-version +0 -0
  22. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/AGENTS.md +0 -0
  23. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/GEMINI.md +0 -0
  24. {kpower_forecast-2026.2.0 → kpower_forecast-2026.2.2}/LICENSE +0 -0
@@ -28,6 +28,10 @@ jobs:
28
28
  run: |
29
29
  uv pip install --system ".[dev]"
30
30
 
31
+ - name: Validate Version Format
32
+ run: |
33
+ python scripts/validate_version.py
34
+
31
35
  - name: Lint with Ruff
32
36
  run: |
33
37
  ruff check .
@@ -0,0 +1,30 @@
1
+ repos:
2
+ - repo: local
3
+ hooks:
4
+ - id: ruff-check
5
+ name: ruff check
6
+ entry: uv run ruff check --fix
7
+ language: system
8
+ types: [python]
9
+ require_serial: true
10
+
11
+ - id: ruff-format
12
+ name: ruff format
13
+ entry: uv run ruff format
14
+ language: system
15
+ types: [python]
16
+ require_serial: true
17
+
18
+ - id: mypy
19
+ name: mypy
20
+ entry: uv run mypy src
21
+ language: system
22
+ types: [python]
23
+ pass_filenames: false
24
+
25
+ - id: pytest
26
+ name: pytest
27
+ entry: uv run pytest
28
+ language: system
29
+ pass_filenames: false
30
+ always_run: true
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: kpower-forecast
3
- Version: 2026.2.0
3
+ Version: 2026.2.2
4
4
  Summary: Solar production and power consumption forecasting package.
5
- Author-email: KPower Team <info@kpower.example>
5
+ Author-email: "KSoft.TECH OSS" <oss@ksoft.tech>
6
6
  License: AGPL-3.0
7
7
  License-File: LICENSE
8
8
  Keywords: energy,forecast,prophet,solar,weather
@@ -16,14 +16,19 @@ Requires-Dist: cmdstanpy>=1.2.0
16
16
  Requires-Dist: numpy>=1.26.0
17
17
  Requires-Dist: pandas>=2.2.0
18
18
  Requires-Dist: prophet>=1.1.5
19
+ Requires-Dist: pvlib>=0.11.0
19
20
  Requires-Dist: pydantic>=2.6.0
20
21
  Requires-Dist: pysolar>=0.8.0
21
22
  Requires-Dist: pytz>=2024.1
22
23
  Requires-Dist: requests>=2.31.0
23
24
  Provides-Extra: dev
24
25
  Requires-Dist: mypy>=1.8.0; extra == 'dev'
26
+ Requires-Dist: pandas-stubs; extra == 'dev'
27
+ Requires-Dist: pre-commit; extra == 'dev'
25
28
  Requires-Dist: pytest>=8.0.0; extra == 'dev'
26
29
  Requires-Dist: ruff>=0.2.0; extra == 'dev'
30
+ Requires-Dist: scipy; extra == 'dev'
31
+ Requires-Dist: scipy-stubs; extra == 'dev'
27
32
  Requires-Dist: types-pytz; extra == 'dev'
28
33
  Requires-Dist: types-requests; extra == 'dev'
29
34
  Description-Content-Type: text/markdown
@@ -111,6 +116,21 @@ kp_cons = KPowerForecast(
111
116
 
112
117
  ---
113
118
 
119
+ ## 🔢 Versioning
120
+
121
+ This project follows a custom **Date-Based Versioning** scheme:
122
+ `YYYY.MM.Patch` (e.g., `2026.2.1`)
123
+
124
+ - **YYYY**: Year of release.
125
+ - **MM**: Month of release (no leading zero, 1-12).
126
+ - **Patch**: Incremental counter for releases within the same month.
127
+
128
+ ### Enforcement
129
+ - **CI Validation**: Every Pull Request is checked against `scripts/validate_version.py` to ensure adherence.
130
+ - **Consistency**: Both `pyproject.toml` and `src/kpower_forecast/__init__.py` must match exactly.
131
+
132
+ ---
133
+
114
134
  ## 🧪 Development & Testing
115
135
 
116
136
  We use [uv](https://github.com/astral-sh/uv) for lightning-fast dependency management.
@@ -81,6 +81,21 @@ kp_cons = KPowerForecast(
81
81
 
82
82
  ---
83
83
 
84
+ ## 🔢 Versioning
85
+
86
+ This project follows a custom **Date-Based Versioning** scheme:
87
+ `YYYY.MM.Patch` (e.g., `2026.2.1`)
88
+
89
+ - **YYYY**: Year of release.
90
+ - **MM**: Month of release (no leading zero, 1-12).
91
+ - **Patch**: Incremental counter for releases within the same month.
92
+
93
+ ### Enforcement
94
+ - **CI Validation**: Every Pull Request is checked against `scripts/validate_version.py` to ensure adherence.
95
+ - **Consistency**: Both `pyproject.toml` and `src/kpower_forecast/__init__.py` must match exactly.
96
+
97
+ ---
98
+
84
99
  ## 🧪 Development & Testing
85
100
 
86
101
  We use [uv](https://github.com/astral-sh/uv) for lightning-fast dependency management.
@@ -1,12 +1,12 @@
1
1
  [project]
2
2
  name = "kpower-forecast"
3
- version = "2026.2.0"
3
+ version = "2026.2.2"
4
4
  description = "Solar production and power consumption forecasting package."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.13"
7
7
  license = { text = "AGPL-3.0" }
8
8
  authors = [
9
- { name = "KPower Team", email = "info@kpower.example" }
9
+ { name = "KSoft.TECH OSS", email = "oss@ksoft.tech" }
10
10
  ]
11
11
  keywords = ["solar", "energy", "forecast", "prophet", "weather"]
12
12
  classifiers = [
@@ -25,6 +25,7 @@ dependencies = [
25
25
  "requests>=2.31.0",
26
26
  "cmdstanpy>=1.2.0",
27
27
  "pytz>=2024.1",
28
+ "pvlib>=0.11.0",
28
29
  ]
29
30
 
30
31
  [project.optional-dependencies]
@@ -34,8 +35,26 @@ dev = [
34
35
  "mypy>=1.8.0",
35
36
  "types-requests",
36
37
  "types-pytz",
38
+ "pre-commit",
39
+ "scipy",
40
+ "pandas-stubs",
41
+ "scipy-stubs",
37
42
  ]
38
43
 
44
+ [tool.mypy]
45
+ python_version = "3.13"
46
+ warn_return_any = true
47
+ warn_unused_configs = true
48
+ ignore_missing_imports = true
49
+
50
+ [[tool.mypy.overrides]]
51
+ module = [
52
+ "prophet.*",
53
+ "pvlib.*",
54
+ "pysolar.*",
55
+ ]
56
+ ignore_missing_imports = true
57
+
39
58
  [build-system]
40
59
  requires = ["hatchling", "hatch-vcs"]
41
60
  build-backend = "hatchling.build"
@@ -0,0 +1,39 @@
1
+ import re
2
+ import sys
3
+ from pathlib import Path
4
+
5
+ # Regex for YYYY.MM.Patch (e.g., 2026.2.1)
6
+ VERSION_PATTERN = r"^\d{4}\.(?:[1-9]|1[0-2])\.\d+$"
7
+
8
+
9
+ def validate_version(file_path: str, pattern: str):
10
+ content = Path(file_path).read_text()
11
+ # Find version in pyproject.toml or __init__.py
12
+ match = re.search(pattern, content, re.MULTILINE)
13
+ if not match:
14
+ print(f"❌ Could not find version in {file_path}")
15
+ return False
16
+
17
+ version = match.group(1)
18
+ if not re.match(VERSION_PATTERN, version):
19
+ print(f"❌ Invalid version format in {file_path}: '{version}'")
20
+ print(" Expected format: YYYY.MM.Patch (e.g., 2026.2.1)")
21
+ return False
22
+
23
+ print(f"✅ Version {version} in {file_path} is valid.")
24
+ return True
25
+
26
+
27
+ if __name__ == "__main__":
28
+ success = True
29
+ # Check pyproject.toml
30
+ if not validate_version("pyproject.toml", r'^version\s*=\s*"([^"]+)"'):
31
+ success = False
32
+
33
+ # Check __init__.py
34
+ pattern = r'^__version__\s*=\s*"([^"]+)"'
35
+ if not validate_version("src/kpower_forecast/__init__.py", pattern):
36
+ success = False
37
+
38
+ if not success:
39
+ sys.exit(1)
@@ -1,4 +1,4 @@
1
1
  from .core import KPowerForecast
2
2
 
3
3
  __all__ = ["KPowerForecast"]
4
- __version__ = "2026.2.0"
4
+ __version__ = "2026.2.2"
@@ -0,0 +1,302 @@
1
+ import logging
2
+ from typing import List, Literal, cast
3
+
4
+ import pandas as pd
5
+ from prophet import Prophet
6
+ from prophet.diagnostics import cross_validation, performance_metrics
7
+ from pydantic import BaseModel, ConfigDict, Field, field_validator
8
+
9
+ from .storage import ModelStorage
10
+ from .utils import calculate_solar_elevation, get_clear_sky_ghi
11
+ from .weather_client import WeatherClient
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class PredictionInterval(BaseModel):
17
+ timestamp: pd.Timestamp
18
+ expected_kwh: float
19
+ lower_bound_kwh: float # P10
20
+ upper_bound_kwh: float # P90
21
+
22
+ model_config = ConfigDict(arbitrary_types_allowed=True)
23
+
24
+
25
+ class KPowerConfig(BaseModel):
26
+ model_id: str
27
+ latitude: float = Field(..., ge=-90, le=90)
28
+ longitude: float = Field(..., ge=-180, le=180)
29
+ storage_path: str = "./data"
30
+ interval_minutes: int = Field(15)
31
+ forecast_type: Literal["solar", "consumption"] = "solar"
32
+ heat_pump_mode: bool = False
33
+ changepoint_prior_scale: float = 0.05
34
+ seasonality_prior_scale: float = 10.0
35
+
36
+ @field_validator("interval_minutes")
37
+ @classmethod
38
+ def check_interval(cls, v: int) -> int:
39
+ if v not in (15, 60):
40
+ raise ValueError("interval_minutes must be 15 or 60")
41
+ return v
42
+
43
+ model_config = ConfigDict(arbitrary_types_allowed=True)
44
+
45
+
46
+ class KPowerForecast:
47
+ def __init__(
48
+ self,
49
+ model_id: str,
50
+ latitude: float,
51
+ longitude: float,
52
+ storage_path: str = "./data",
53
+ interval_minutes: int = 15,
54
+ forecast_type: Literal["solar", "consumption"] = "solar",
55
+ heat_pump_mode: bool = False,
56
+ ):
57
+ self.config = KPowerConfig(
58
+ model_id=model_id,
59
+ latitude=latitude,
60
+ longitude=longitude,
61
+ storage_path=storage_path,
62
+ interval_minutes=interval_minutes,
63
+ forecast_type=forecast_type,
64
+ heat_pump_mode=heat_pump_mode,
65
+ )
66
+
67
+ self.weather_client = WeatherClient(
68
+ lat=self.config.latitude, lon=self.config.longitude
69
+ )
70
+ self.storage = ModelStorage(storage_path=self.config.storage_path)
71
+
72
+ def _prepare_features(self, df: pd.DataFrame) -> pd.DataFrame:
73
+ """
74
+ Add physics-informed features and rolling windows.
75
+ """
76
+ df = df.copy()
77
+ df["ds"] = pd.to_datetime(df["ds"], utc=True)
78
+
79
+ # 1. Physics: Clear Sky GHI
80
+ logger.info("Calculating physics-informed Clear Sky GHI...")
81
+ # Ensure index is DatetimeIndex for pvlib
82
+ temp_df = df.set_index("ds")
83
+ if not isinstance(temp_df.index, pd.DatetimeIndex):
84
+ raise ValueError("Index must be DatetimeIndex")
85
+
86
+ df["clear_sky_ghi"] = get_clear_sky_ghi(
87
+ self.config.latitude, self.config.longitude, temp_df.index
88
+ ).values
89
+
90
+ # 2. Rolling Cloud Cover (3-hour window)
91
+ # 3 hours = 180 minutes. Window depends on interval_minutes.
92
+ window_size = 180 // self.config.interval_minutes
93
+ logger.info(f"Adding rolling cloud cover (window={window_size})...")
94
+ df["rolling_cloud_cover"] = (
95
+ df["cloud_cover"].rolling(window=window_size, min_periods=1).mean()
96
+ )
97
+
98
+ return df
99
+
100
+ def train(self, history_df: pd.DataFrame, force: bool = False):
101
+ """
102
+ Trains the Prophet model using the provided history.
103
+ """
104
+ if not force and self.storage.load_model(self.config.model_id):
105
+ logger.info(
106
+ f"Model {self.config.model_id} already exists. "
107
+ "Use force=True to retrain."
108
+ )
109
+ return
110
+
111
+ df = history_df.copy()
112
+ if "ds" not in df.columns or "y" not in df.columns:
113
+ raise ValueError("history_df must contain 'ds' and 'y' columns")
114
+
115
+ df["ds"] = pd.to_datetime(df["ds"], utc=True)
116
+ df = df.sort_values("ds")
117
+
118
+ start_date = df["ds"].min().date()
119
+ end_date = df["ds"].max().date()
120
+
121
+ weather_df = self.weather_client.fetch_historical(start_date, end_date)
122
+ weather_df = self.weather_client.resample_weather(
123
+ weather_df, self.config.interval_minutes
124
+ )
125
+
126
+ df = pd.merge(df, weather_df, on="ds", how="left")
127
+
128
+ weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
129
+ df[weather_cols] = df[weather_cols].interpolate(method="linear").bfill().ffill()
130
+
131
+ if df[weather_cols].isnull().any().any():
132
+ df = df.dropna(subset=weather_cols)
133
+
134
+ # Feature Engineering
135
+ df = self._prepare_features(df)
136
+
137
+ # Initialize Prophet with tuned hyperparameters
138
+ m = Prophet(
139
+ changepoint_prior_scale=self.config.changepoint_prior_scale,
140
+ seasonality_prior_scale=self.config.seasonality_prior_scale,
141
+ interval_width=0.8, # Used for P10/P90 (80% interval)
142
+ )
143
+
144
+ if self.config.forecast_type == "solar":
145
+ m.add_regressor("temperature_2m")
146
+ m.add_regressor("rolling_cloud_cover")
147
+ m.add_regressor("shortwave_radiation")
148
+ m.add_regressor("clear_sky_ghi")
149
+ elif self.config.forecast_type == "consumption":
150
+ if self.config.heat_pump_mode:
151
+ m.add_regressor("temperature_2m")
152
+
153
+ logger.info(f"Training Prophet model for {self.config.forecast_type}...")
154
+ m.fit(df)
155
+
156
+ self.storage.save_model(m, self.config.model_id)
157
+
158
+ def tune_model(self, history_df: pd.DataFrame, days: int = 30):
159
+ """
160
+ Find optimal hyperparameters using cross-validation.
161
+ """
162
+ logger.info(f"Tuning model hyperparameters using {days} days of history...")
163
+
164
+ # We need to prepare data first as cross_validation needs the regressors
165
+ # This is a bit complex as we need weather data for history_df
166
+ # For simplicity, we assume train() logic but without fitting.
167
+
168
+ # Prepare data (duplicated logic from train, could be refactored)
169
+ df = history_df.copy()
170
+ df["ds"] = pd.to_datetime(df["ds"], utc=True)
171
+ start_date = df["ds"].min().date()
172
+ end_date = df["ds"].max().date()
173
+ weather_df = self.weather_client.fetch_historical(start_date, end_date)
174
+ weather_df = self.weather_client.resample_weather(
175
+ weather_df, self.config.interval_minutes
176
+ )
177
+ df = pd.merge(df, weather_df, on="ds", how="left")
178
+ weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
179
+ df[weather_cols] = df[weather_cols].interpolate(method="linear").bfill().ffill()
180
+ df = self._prepare_features(df.dropna(subset=weather_cols))
181
+
182
+ param_grid = {
183
+ "changepoint_prior_scale": [0.001, 0.05, 0.5],
184
+ "seasonality_prior_scale": [0.01, 1.0, 10.0],
185
+ }
186
+
187
+ # Simplified tuning loop
188
+ best_params = {}
189
+ min_rmse = float("inf")
190
+
191
+ for cps in param_grid["changepoint_prior_scale"]:
192
+ for sps in param_grid["seasonality_prior_scale"]:
193
+ m = Prophet(changepoint_prior_scale=cps, seasonality_prior_scale=sps)
194
+ if self.config.forecast_type == "solar":
195
+ m.add_regressor("temperature_2m")
196
+ m.add_regressor("rolling_cloud_cover")
197
+ m.add_regressor("shortwave_radiation")
198
+ m.add_regressor("clear_sky_ghi")
199
+ elif (
200
+ self.config.forecast_type == "consumption"
201
+ and self.config.heat_pump_mode
202
+ ):
203
+ m.add_regressor("temperature_2m")
204
+
205
+ m.fit(df)
206
+
207
+ # Cross-validation
208
+ # initial should be at least 3x horizon
209
+ df_cv = cross_validation(
210
+ m, initial=f"{days // 2} days", period="5 days", horizon="5 days"
211
+ )
212
+ df_p = performance_metrics(df_cv, rolling_window=1)
213
+ rmse = df_p["rmse"].values[0]
214
+
215
+ if rmse < min_rmse:
216
+ min_rmse = rmse
217
+ best_params = {"cps": cps, "sps": sps}
218
+
219
+ logger.info(f"Best params found: {best_params} with RMSE {min_rmse}")
220
+ self.config.changepoint_prior_scale = best_params["cps"]
221
+ self.config.seasonality_prior_scale = best_params["sps"]
222
+
223
+ def predict(self, days: int = 7) -> pd.DataFrame:
224
+ """
225
+ Generates forecast for the next 'days' days.
226
+ """
227
+ m = self.storage.load_model(self.config.model_id)
228
+ if m is None:
229
+ raise RuntimeError(
230
+ f"Model {self.config.model_id} not found. Please run train() first."
231
+ )
232
+
233
+ weather_forecast = self.weather_client.fetch_forecast(days=days)
234
+ weather_forecast = self.weather_client.resample_weather(
235
+ weather_forecast, self.config.interval_minutes
236
+ )
237
+
238
+ future = pd.DataFrame({"ds": weather_forecast["ds"]})
239
+ future = pd.merge(future, weather_forecast, on="ds", how="left")
240
+
241
+ weather_cols = ["temperature_2m", "cloud_cover", "shortwave_radiation"]
242
+ future[weather_cols] = (
243
+ future[weather_cols].interpolate(method="linear").bfill().ffill()
244
+ )
245
+
246
+ # Feature Engineering
247
+ future = self._prepare_features(future)
248
+
249
+ forecast = m.predict(future)
250
+
251
+ # Night Mask & Clipping
252
+ if self.config.forecast_type == "solar":
253
+ logger.info("Applying night mask for solar forecast...")
254
+ elevations = calculate_solar_elevation(
255
+ self.config.latitude, self.config.longitude, forecast["ds"]
256
+ )
257
+ forecast.loc[elevations < 0, ["yhat", "yhat_lower", "yhat_upper"]] = 0
258
+
259
+ for col in ["yhat", "yhat_lower", "yhat_upper"]:
260
+ forecast[col] = forecast[col].clip(lower=0)
261
+
262
+ return cast(pd.DataFrame, forecast)
263
+
264
+ def get_prediction_intervals(self, days: int = 7) -> List[PredictionInterval]:
265
+ """
266
+ Returns prediction intervals for EMS.
267
+ """
268
+ forecast = self.predict(days=days)
269
+
270
+ intervals = []
271
+ for _, row in forecast.iterrows():
272
+ intervals.append(
273
+ PredictionInterval(
274
+ timestamp=row["ds"],
275
+ expected_kwh=row["yhat"],
276
+ lower_bound_kwh=row["yhat_lower"],
277
+ upper_bound_kwh=row["yhat_upper"],
278
+ )
279
+ )
280
+ return intervals
281
+
282
+ def get_surplus_probability(
283
+ self, threshold_kwh: float, days: int = 7
284
+ ) -> pd.DataFrame:
285
+ """
286
+ Returns probability of exceeding threshold_kwh.
287
+ Prophet doesn't provide direct probabilities, but we can estimate
288
+ from the uncertainty interval (yhat_upper - yhat_lower)
289
+ assuming normal distribution.
290
+ """
291
+ forecast = self.predict(days=days)
292
+
293
+ # Estimate sigma from 80% interval (approx 1.28 * sigma)
294
+ sigma = (forecast["yhat_upper"] - forecast["yhat_lower"]) / (2 * 1.28)
295
+ sigma = sigma.replace(0, 1e-9) # Avoid div by zero
296
+
297
+ from scipy.stats import norm
298
+
299
+ z_score = (threshold_kwh - forecast["yhat"]) / sigma
300
+ prob_exceed = 1 - norm.cdf(z_score)
301
+
302
+ return pd.DataFrame({"ds": forecast["ds"], "surplus_prob": prob_exceed})
@@ -8,6 +8,7 @@ from prophet.serialize import model_from_json, model_to_json
8
8
 
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
+
11
12
  class ModelStorage:
12
13
  def __init__(self, storage_path: str):
13
14
  self.storage_path = Path(storage_path)
@@ -50,8 +51,8 @@ class ModelStorage:
50
51
  return model_from_json(json.load(f))
51
52
  except Exception as e:
52
53
  logger.error(f"Failed to load model {model_id}: {e}")
53
- # If load fails, we might want to return None to trigger retraining,
54
- # or raise to alert the user.
54
+ # If load fails, we might want to return None to trigger retraining,
55
+ # or raise to alert the user.
55
56
  # Given "production-grade", maybe explicit failure is safer
56
57
  # than silent fallback?
57
58
  # But the prompt says "If a model exists, load it...
@@ -1,8 +1,9 @@
1
1
  import datetime
2
- from typing import List, Union
2
+ from typing import List, Union, cast
3
3
 
4
4
  import numpy as np
5
5
  import pandas as pd
6
+ from pvlib.location import Location
6
7
  from pysolar.solar import get_altitude
7
8
 
8
9
 
@@ -12,13 +13,13 @@ def calculate_solar_elevation(
12
13
  """
13
14
  Calculate solar elevation angles (altitude) for a list of times
14
15
  at a specific location.
15
-
16
+
16
17
  Args:
17
18
  lat: Latitude in decimal degrees.
18
19
  lon: Longitude in decimal degrees.
19
- times: List of datetime objects or pandas DatetimeIndex.
20
+ times: List of datetime objects or pandas DatetimeIndex.
20
21
  Must be timezone-aware or UTC.
21
-
22
+
22
23
  Returns:
23
24
  Numpy array of elevation angles in degrees.
24
25
  """
@@ -28,9 +29,29 @@ def calculate_solar_elevation(
28
29
  if t.tzinfo is None:
29
30
  # Assume UTC if naive, though strictly we should enforce awareness
30
31
  t = t.replace(tzinfo=datetime.timezone.utc)
31
-
32
+
32
33
  # get_altitude returns degrees
33
34
  alt = get_altitude(lat, lon, t)
34
35
  elevations.append(alt)
35
-
36
+
36
37
  return np.array(elevations)
38
+
39
+
40
+ def get_clear_sky_ghi(lat: float, lon: float, times: pd.DatetimeIndex) -> pd.Series:
41
+ """
42
+ Calculate Theoretical Clear Sky GHI (Global Horizontal Irradiance)
43
+ using pvlib.
44
+
45
+ Args:
46
+ lat: Latitude.
47
+ lon: Longitude.
48
+ times: Pandas DatetimeIndex (must be timezone aware).
49
+
50
+ Returns:
51
+ Pandas Series of GHI values.
52
+ """
53
+ location = Location(lat, lon)
54
+ # get_clearsky returns GHI, DNI, DHI. We only need GHI.
55
+ # Ineichen is the default model.
56
+ clearsky = location.get_clearsky(times)
57
+ return cast(pd.Series, clearsky["ghi"])