spotforecast2 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spotforecast2/.DS_Store +0 -0
- spotforecast2/__init__.py +2 -0
- spotforecast2/data/__init__.py +0 -0
- spotforecast2/data/data.py +130 -0
- spotforecast2/data/fetch_data.py +209 -0
- spotforecast2/exceptions.py +681 -0
- spotforecast2/forecaster/.DS_Store +0 -0
- spotforecast2/forecaster/__init__.py +7 -0
- spotforecast2/forecaster/base.py +448 -0
- spotforecast2/forecaster/metrics.py +527 -0
- spotforecast2/forecaster/recursive/__init__.py +4 -0
- spotforecast2/forecaster/recursive/_forecaster_equivalent_date.py +1075 -0
- spotforecast2/forecaster/recursive/_forecaster_recursive.py +939 -0
- spotforecast2/forecaster/recursive/_warnings.py +15 -0
- spotforecast2/forecaster/utils.py +954 -0
- spotforecast2/model_selection/__init__.py +5 -0
- spotforecast2/model_selection/bayesian_search.py +453 -0
- spotforecast2/model_selection/grid_search.py +314 -0
- spotforecast2/model_selection/random_search.py +151 -0
- spotforecast2/model_selection/split_base.py +357 -0
- spotforecast2/model_selection/split_one_step.py +245 -0
- spotforecast2/model_selection/split_ts_cv.py +634 -0
- spotforecast2/model_selection/utils_common.py +718 -0
- spotforecast2/model_selection/utils_metrics.py +103 -0
- spotforecast2/model_selection/validation.py +685 -0
- spotforecast2/preprocessing/__init__.py +30 -0
- spotforecast2/preprocessing/_binner.py +378 -0
- spotforecast2/preprocessing/_common.py +123 -0
- spotforecast2/preprocessing/_differentiator.py +123 -0
- spotforecast2/preprocessing/_rolling.py +136 -0
- spotforecast2/preprocessing/curate_data.py +254 -0
- spotforecast2/preprocessing/imputation.py +92 -0
- spotforecast2/preprocessing/outlier.py +114 -0
- spotforecast2/preprocessing/split.py +139 -0
- spotforecast2/py.typed +0 -0
- spotforecast2/utils/__init__.py +43 -0
- spotforecast2/utils/convert_to_utc.py +44 -0
- spotforecast2/utils/data_transform.py +208 -0
- spotforecast2/utils/forecaster_config.py +344 -0
- spotforecast2/utils/generate_holiday.py +70 -0
- spotforecast2/utils/validation.py +569 -0
- spotforecast2/weather/__init__.py +0 -0
- spotforecast2/weather/weather_client.py +288 -0
- spotforecast2-0.0.1.dist-info/METADATA +47 -0
- spotforecast2-0.0.1.dist-info/RECORD +46 -0
- spotforecast2-0.0.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
"""Weather data fetching and processing using Open-Meteo API."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Optional, Dict, Any, Union
|
|
6
|
+
|
|
7
|
+
import pandas as pd
|
|
8
|
+
import requests
|
|
9
|
+
from requests.adapters import HTTPAdapter
|
|
10
|
+
from requests.packages.urllib3.util.retry import Retry
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class WeatherClient:
|
|
14
|
+
"""Client for fetching weather data from Open-Meteo API.
|
|
15
|
+
|
|
16
|
+
Handles the low-level API interactions, parameter building, and response parsing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
ARCHIVE_BASE_URL = "https://archive-api.open-meteo.com/v1/archive"
|
|
20
|
+
FORECAST_BASE_URL = "https://api.open-meteo.com/v1/forecast"
|
|
21
|
+
|
|
22
|
+
HOURLY_PARAMS = [
|
|
23
|
+
"temperature_2m",
|
|
24
|
+
"relative_humidity_2m",
|
|
25
|
+
"precipitation",
|
|
26
|
+
"rain",
|
|
27
|
+
"snowfall",
|
|
28
|
+
"weather_code",
|
|
29
|
+
"pressure_msl",
|
|
30
|
+
"surface_pressure",
|
|
31
|
+
"cloud_cover",
|
|
32
|
+
"cloud_cover_low",
|
|
33
|
+
"cloud_cover_mid",
|
|
34
|
+
"cloud_cover_high",
|
|
35
|
+
"wind_speed_10m",
|
|
36
|
+
"wind_direction_10m",
|
|
37
|
+
"wind_gusts_10m",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
def __init__(self, latitude: float, longitude: float):
|
|
41
|
+
"""Initialize WeatherClient.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
latitude: Latitude of the location.
|
|
45
|
+
longitude: Longitude of the location.
|
|
46
|
+
"""
|
|
47
|
+
self.latitude = latitude
|
|
48
|
+
self.longitude = longitude
|
|
49
|
+
self.logger = logging.getLogger(__name__)
|
|
50
|
+
self._session = self._create_session()
|
|
51
|
+
|
|
52
|
+
def _create_session(self) -> requests.Session:
|
|
53
|
+
"""Create a requests session with retry logic."""
|
|
54
|
+
session = requests.Session()
|
|
55
|
+
retry_strategy = Retry(
|
|
56
|
+
total=3,
|
|
57
|
+
backoff_factor=1,
|
|
58
|
+
status_forcelist=[429, 500, 502, 503, 504],
|
|
59
|
+
)
|
|
60
|
+
adapter = HTTPAdapter(max_retries=retry_strategy)
|
|
61
|
+
session.mount("https://", adapter)
|
|
62
|
+
session.mount("http://", adapter)
|
|
63
|
+
return session
|
|
64
|
+
|
|
65
|
+
def _fetch(self, url: str, params: Dict[str, Any]) -> pd.DataFrame:
|
|
66
|
+
"""Execute API request and return parsed DataFrame."""
|
|
67
|
+
try:
|
|
68
|
+
response = self._session.get(url, params=params, timeout=30)
|
|
69
|
+
response.raise_for_status()
|
|
70
|
+
data = response.json()
|
|
71
|
+
except requests.exceptions.RequestException as e:
|
|
72
|
+
self.logger.error(f"API request failed: {e}")
|
|
73
|
+
raise
|
|
74
|
+
|
|
75
|
+
if "error" in data and data["error"]:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Open-Meteo API error: {data.get('reason', 'Unknown error')}"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
hourly_data = data.get("hourly", {})
|
|
81
|
+
if not hourly_data:
|
|
82
|
+
raise ValueError("No hourly data returned from API")
|
|
83
|
+
|
|
84
|
+
# Parse to DataFrame
|
|
85
|
+
times = pd.to_datetime(hourly_data["time"])
|
|
86
|
+
df_dict = {"datetime": times}
|
|
87
|
+
for param in self.HOURLY_PARAMS:
|
|
88
|
+
if param in hourly_data:
|
|
89
|
+
df_dict[param] = hourly_data[param]
|
|
90
|
+
|
|
91
|
+
df = pd.DataFrame(df_dict)
|
|
92
|
+
df.set_index("datetime", inplace=True)
|
|
93
|
+
return df
|
|
94
|
+
|
|
95
|
+
def fetch_archive(
|
|
96
|
+
self, start: pd.Timestamp, end: pd.Timestamp, timezone: str = "UTC"
|
|
97
|
+
) -> pd.DataFrame:
|
|
98
|
+
"""Fetch historical data from Archive API."""
|
|
99
|
+
params = {
|
|
100
|
+
"latitude": self.latitude,
|
|
101
|
+
"longitude": self.longitude,
|
|
102
|
+
"hourly": ",".join(self.HOURLY_PARAMS),
|
|
103
|
+
"timezone": timezone,
|
|
104
|
+
"start_date": start.strftime("%Y-%m-%d"),
|
|
105
|
+
"end_date": end.strftime("%Y-%m-%d"),
|
|
106
|
+
}
|
|
107
|
+
return self._fetch(self.ARCHIVE_BASE_URL, params)
|
|
108
|
+
|
|
109
|
+
def fetch_forecast(self, days_ahead: int, timezone: str = "UTC") -> pd.DataFrame:
|
|
110
|
+
"""Fetch forecast data from Forecast API."""
|
|
111
|
+
params = {
|
|
112
|
+
"latitude": self.latitude,
|
|
113
|
+
"longitude": self.longitude,
|
|
114
|
+
"hourly": ",".join(self.HOURLY_PARAMS),
|
|
115
|
+
"timezone": timezone,
|
|
116
|
+
"forecast_days": days_ahead,
|
|
117
|
+
}
|
|
118
|
+
return self._fetch(self.FORECAST_BASE_URL, params)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class WeatherService(WeatherClient):
|
|
122
|
+
"""High-level service for weather data generation.
|
|
123
|
+
|
|
124
|
+
Extends WeatherClient with caching, hybrid fetching (archive+forecast),
|
|
125
|
+
and fallback strategies.
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
def __init__(
|
|
129
|
+
self,
|
|
130
|
+
latitude: float,
|
|
131
|
+
longitude: float,
|
|
132
|
+
cache_path: Optional[Path] = None,
|
|
133
|
+
use_forecast: bool = True,
|
|
134
|
+
):
|
|
135
|
+
super().__init__(latitude, longitude)
|
|
136
|
+
self.cache_path = cache_path
|
|
137
|
+
self.use_forecast = use_forecast
|
|
138
|
+
|
|
139
|
+
def get_dataframe(
|
|
140
|
+
self,
|
|
141
|
+
start: Union[str, pd.Timestamp],
|
|
142
|
+
end: Union[str, pd.Timestamp],
|
|
143
|
+
timezone: str = "UTC",
|
|
144
|
+
freq: str = "h",
|
|
145
|
+
fallback_on_failure: bool = True,
|
|
146
|
+
) -> pd.DataFrame:
|
|
147
|
+
"""Get weather DataFrame for a specified range using best available methods.
|
|
148
|
+
|
|
149
|
+
Refactored from spotpredict.create_weather_df.
|
|
150
|
+
"""
|
|
151
|
+
start_ts = pd.Timestamp(start)
|
|
152
|
+
end_ts = pd.Timestamp(end)
|
|
153
|
+
|
|
154
|
+
# Localize if naive
|
|
155
|
+
if start_ts.tz is None:
|
|
156
|
+
start_ts = start_ts.tz_localize(timezone)
|
|
157
|
+
if end_ts.tz is None:
|
|
158
|
+
end_ts = end_ts.tz_localize(timezone)
|
|
159
|
+
|
|
160
|
+
# Convert to UTC for consistency
|
|
161
|
+
start_utc = start_ts.tz_convert("UTC")
|
|
162
|
+
end_utc = end_ts.tz_convert("UTC")
|
|
163
|
+
|
|
164
|
+
# 1. Try Cache
|
|
165
|
+
cached_df = self._load_cache()
|
|
166
|
+
if cached_df is not None:
|
|
167
|
+
if cached_df.index.min() <= start_utc and cached_df.index.max() >= end_utc:
|
|
168
|
+
self.logger.info("Using full cached data.")
|
|
169
|
+
return self._finalize_df(
|
|
170
|
+
cached_df.loc[start_utc:end_utc], freq, timezone
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
# 2. Hybrid Fetch (filling gaps if cache exists, or fetching all)
|
|
174
|
+
# (The original logic did partial fills, but full fetch is safer and
|
|
175
|
+
# simpler for now unless specifically improved).
|
|
176
|
+
# Actually, strict refactor implies keeping logic. Let's keep it simple:
|
|
177
|
+
# fetch what's needed.
|
|
178
|
+
|
|
179
|
+
try:
|
|
180
|
+
df = self._fetch_hybrid(start_ts, end_ts, timezone)
|
|
181
|
+
except Exception as e:
|
|
182
|
+
self.logger.warning(f"Fetch failed: {e}")
|
|
183
|
+
if fallback_on_failure and cached_df is not None and len(cached_df) >= 24:
|
|
184
|
+
df = self._create_fallback(start_utc, end_utc, cached_df, timezone)
|
|
185
|
+
else:
|
|
186
|
+
raise
|
|
187
|
+
|
|
188
|
+
# 3. Merge with cache and save
|
|
189
|
+
if cached_df is not None:
|
|
190
|
+
df = pd.concat([cached_df, df])
|
|
191
|
+
df = df[~df.index.duplicated(keep="last")].sort_index() # Keep new data
|
|
192
|
+
|
|
193
|
+
if self.cache_path:
|
|
194
|
+
self._save_cache(df)
|
|
195
|
+
|
|
196
|
+
# 4. Return slice
|
|
197
|
+
return self._finalize_df(df.loc[start_utc:end_utc], freq, timezone)
|
|
198
|
+
|
|
199
|
+
def _fetch_hybrid(
|
|
200
|
+
self, start: pd.Timestamp, end: pd.Timestamp, timezone: str
|
|
201
|
+
) -> pd.DataFrame:
|
|
202
|
+
"""Fetch from Archive and/or Forecast based on date."""
|
|
203
|
+
now = pd.Timestamp.now(tz=start.tz)
|
|
204
|
+
archive_cutoff = now - pd.Timedelta(days=5)
|
|
205
|
+
|
|
206
|
+
dfs = []
|
|
207
|
+
|
|
208
|
+
# Archive part
|
|
209
|
+
if start < archive_cutoff:
|
|
210
|
+
arch_end = min(end, archive_cutoff)
|
|
211
|
+
try:
|
|
212
|
+
dfs.append(self.fetch_archive(start, arch_end, timezone))
|
|
213
|
+
except Exception as e:
|
|
214
|
+
self.logger.warning(f"Archive fetch warning: {e}")
|
|
215
|
+
|
|
216
|
+
# Forecast part
|
|
217
|
+
if end > now and self.use_forecast:
|
|
218
|
+
days = (end - now).days + 2
|
|
219
|
+
days = min(max(1, days), 16)
|
|
220
|
+
try:
|
|
221
|
+
df_fore = self.fetch_forecast(days, timezone)
|
|
222
|
+
# Filter forecast to needed range to avoid overlap issues
|
|
223
|
+
dfs.append(df_fore)
|
|
224
|
+
except Exception as e:
|
|
225
|
+
self.logger.warning(f"Forecast fetch warning: {e}")
|
|
226
|
+
|
|
227
|
+
if not dfs:
|
|
228
|
+
raise ValueError("Could not fetch data from Archive or Forecast.")
|
|
229
|
+
|
|
230
|
+
full_df = pd.concat(dfs)
|
|
231
|
+
full_df = full_df[~full_df.index.duplicated(keep="first")].sort_index()
|
|
232
|
+
|
|
233
|
+
# Ensure UTC index
|
|
234
|
+
if full_df.index.tz is None:
|
|
235
|
+
full_df.index = full_df.index.tz_localize(timezone)
|
|
236
|
+
full_df.index = full_df.index.tz_convert("UTC")
|
|
237
|
+
|
|
238
|
+
return full_df
|
|
239
|
+
|
|
240
|
+
def _create_fallback(
|
|
241
|
+
self,
|
|
242
|
+
start: pd.Timestamp,
|
|
243
|
+
end: pd.Timestamp,
|
|
244
|
+
source_df: pd.DataFrame,
|
|
245
|
+
timezone: str,
|
|
246
|
+
) -> pd.DataFrame:
|
|
247
|
+
"""Repeat last 24h of data."""
|
|
248
|
+
last_24 = source_df.tail(24)
|
|
249
|
+
hours = int((end - start).total_seconds() / 3600) + 1
|
|
250
|
+
repeats = (hours // 24) + 1
|
|
251
|
+
|
|
252
|
+
new_data = pd.concat([last_24] * repeats, ignore_index=True)
|
|
253
|
+
new_data = new_data.iloc[:hours]
|
|
254
|
+
|
|
255
|
+
idx = pd.date_range(start, periods=hours, freq="h", tz="UTC")
|
|
256
|
+
new_data.index = idx
|
|
257
|
+
return new_data
|
|
258
|
+
|
|
259
|
+
def _load_cache(self) -> Optional[pd.DataFrame]:
|
|
260
|
+
if not self.cache_path or not self.cache_path.exists():
|
|
261
|
+
return None
|
|
262
|
+
try:
|
|
263
|
+
df = pd.read_parquet(self.cache_path)
|
|
264
|
+
if df.index.tz is None:
|
|
265
|
+
df.index = df.index.tz_localize("UTC")
|
|
266
|
+
return df
|
|
267
|
+
except Exception:
|
|
268
|
+
return None
|
|
269
|
+
|
|
270
|
+
def _save_cache(self, df: pd.DataFrame):
|
|
271
|
+
if self.cache_path:
|
|
272
|
+
self.cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
273
|
+
df.to_parquet(self.cache_path)
|
|
274
|
+
|
|
275
|
+
def _finalize_df(self, df: pd.DataFrame, freq: str, timezone: str) -> pd.DataFrame:
|
|
276
|
+
"""Resample and localize."""
|
|
277
|
+
# Resample
|
|
278
|
+
if freq != "h": # Assuming API returns hourly
|
|
279
|
+
df = df.resample(freq).ffill() # Forward fill for weather is reasonable
|
|
280
|
+
|
|
281
|
+
# Fill gaps
|
|
282
|
+
df = df.ffill().bfill()
|
|
283
|
+
|
|
284
|
+
# Convert to requested timezone if needed (though we keep internal UTC mostly)
|
|
285
|
+
# User requested specific tz output usually?
|
|
286
|
+
# Original code returned normalized DF. Let's ensure frequency matches exactly.
|
|
287
|
+
|
|
288
|
+
return df
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: spotforecast2
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Forecasting with spot
|
|
5
|
+
Author: bartzbeielstein
|
|
6
|
+
Author-email: bartzbeielstein <32470350+bartzbeielstein@users.noreply.github.com>
|
|
7
|
+
Requires-Dist: astral>=3.2
|
|
8
|
+
Requires-Dist: feature-engine>=1.9.3
|
|
9
|
+
Requires-Dist: flake8>=7.3.0
|
|
10
|
+
Requires-Dist: holidays>=0.90
|
|
11
|
+
Requires-Dist: ipykernel>=7.1.0
|
|
12
|
+
Requires-Dist: jupyter>=1.1.1
|
|
13
|
+
Requires-Dist: lightgbm>=4.6.0
|
|
14
|
+
Requires-Dist: matplotlib>=3.10.8
|
|
15
|
+
Requires-Dist: numba>=0.63.1
|
|
16
|
+
Requires-Dist: optuna>=4.7.0
|
|
17
|
+
Requires-Dist: pandas>=3.0.0
|
|
18
|
+
Requires-Dist: plotly>=6.5.2
|
|
19
|
+
Requires-Dist: pyarrow>=23.0.0
|
|
20
|
+
Requires-Dist: scikit-learn>=1.8.0
|
|
21
|
+
Requires-Dist: spotoptim>=0.0.160
|
|
22
|
+
Requires-Dist: tqdm>=4.67.2
|
|
23
|
+
Requires-Python: >=3.13
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# About spotforecast2
|
|
27
|
+
|
|
28
|
+
`spotforecast2` is a Python library for time series forecasting.
|
|
29
|
+
|
|
30
|
+
Parts of the code are ported from `skforecast` to reduce external dependencies.
|
|
31
|
+
Many thanks to the [skforecast team](https://skforecast.org/0.20.0/more/about-skforecast.html) for their great work!
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# License
|
|
35
|
+
|
|
36
|
+
`spotforecast2` software: [BSD-3-Clause License](https://github.com/sequential-parameter-optimization/spotforecast2?tab=BSD-3-Clause-1-ov-file)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# References
|
|
40
|
+
|
|
41
|
+
## skforecast:
|
|
42
|
+
|
|
43
|
+
* Amat Rodrigo, J., & Escobar Ortiz, J. (2026). skforecast (Version 0.20.0) [Computer software]. https://doi.org/10.5281/zenodo.8382788
|
|
44
|
+
|
|
45
|
+
## spotoptim:
|
|
46
|
+
|
|
47
|
+
* [spotoptim documentation](https://sequential-parameter-optimization.github.io/spotoptim/)
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
spotforecast2/.DS_Store,sha256=4yBH5_e0YHcGSgDSeKs4V5_sHINqyWiP33kMXar-lz8,6148
|
|
2
|
+
spotforecast2/__init__.py,sha256=X9sBx15iz8yqr9iDJcrGJM5nhvnpaczXto4XV_GtfhE,59
|
|
3
|
+
spotforecast2/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
spotforecast2/data/data.py,sha256=HEgr-FULaqHvuMeKTviOgYyo3GbxpGRTo3ZnmIU9w2Y,4422
|
|
5
|
+
spotforecast2/data/fetch_data.py,sha256=V2vy2ZcLDDn96e7gnBG4MmuCfoDTwZ1VOm44sbSh6bs,6756
|
|
6
|
+
spotforecast2/exceptions.py,sha256=gi8rmJWLKEpi3kNB9jWdHcH6XYkmsfyHfXTNg_fAy0w,20497
|
|
7
|
+
spotforecast2/forecaster/.DS_Store,sha256=GXyLvW6LC7GpVyo-vy-zehyHDbffWnsn_ZBT5AX0CQI,6148
|
|
8
|
+
spotforecast2/forecaster/__init__.py,sha256=BbCOS2ouKcPC9VzcdprllVyqlZIyAWXCOvUAiInxDi4,140
|
|
9
|
+
spotforecast2/forecaster/base.py,sha256=rXhcjY4AMpyQhkpbtLIA8OOrGEb8fU57SQiyeR9c9DQ,16748
|
|
10
|
+
spotforecast2/forecaster/metrics.py,sha256=MiZs9MAvT5JjPEGEks1uWR0nFuzYucCWuu4bMV_4HPQ,19316
|
|
11
|
+
spotforecast2/forecaster/recursive/__init__.py,sha256=YNVxLReLEwSFDasmjXXMSKJqNL_Y4lVEZ696UksjVVE,184
|
|
12
|
+
spotforecast2/forecaster/recursive/_forecaster_equivalent_date.py,sha256=Mdr-3D1lUivXO07Rp4T8NIgQ2H_2y4IR4BqCwjBtZsw,48261
|
|
13
|
+
spotforecast2/forecaster/recursive/_forecaster_recursive.py,sha256=oU2zCOI0UaGIn8doLJGphP7jcNL5FF6Y972UCwlxDJI,35739
|
|
14
|
+
spotforecast2/forecaster/recursive/_warnings.py,sha256=BtZ3UoycywjEQ0ceXe4TL1WEdFcLAi1EnDMvZXHw_U8,325
|
|
15
|
+
spotforecast2/forecaster/utils.py,sha256=DuYQcWZlJclSQoelFxcrydAAE8gHH_tgq6tzF7h837g,36030
|
|
16
|
+
spotforecast2/model_selection/__init__.py,sha256=uP60TkgDzs_x5V60rnKanc12S9-yXx2ZLsXsXdqAYEA,208
|
|
17
|
+
spotforecast2/model_selection/bayesian_search.py,sha256=Vwb_LatDnt22LhIWyzqNhCdlDQ_UgVCyFcXmOxF3Pic,17407
|
|
18
|
+
spotforecast2/model_selection/grid_search.py,sha256=a5rNEndTXlx1ghT7ws5qs7WM0XBFMqEiK3Q5k7P0EJg,10998
|
|
19
|
+
spotforecast2/model_selection/random_search.py,sha256=klMeWTgfKpVenmABr_PZWaZveuoS8XksqSatxUuWdw8,6267
|
|
20
|
+
spotforecast2/model_selection/split_base.py,sha256=BD58PIMx5YT7jl3tSohUlkN0Jgegu8cyLJsgOk1zxUk,14997
|
|
21
|
+
spotforecast2/model_selection/split_one_step.py,sha256=IgXorAYkKwsNIuHohOb1IJkpBqq5JGSCXfUwhhxBuqs,9388
|
|
22
|
+
spotforecast2/model_selection/split_ts_cv.py,sha256=uwACVC5m-cRuCtpA5U46K-tdj0zmvlLdG0Id6qLq9Uk,28479
|
|
23
|
+
spotforecast2/model_selection/utils_common.py,sha256=HKDxm4pLwG0cqhE4t8bzNHFtRa6yn_O7b5ud-nx6b7E,31814
|
|
24
|
+
spotforecast2/model_selection/utils_metrics.py,sha256=mMVKh03-yAvRjEnZlbg3CsktXNcHo7yiTkI5VMg5wQk,3842
|
|
25
|
+
spotforecast2/model_selection/validation.py,sha256=nwZATc74tVb992HbefP_sAcJaz8ukV_uqjtVFXaySxs,30038
|
|
26
|
+
spotforecast2/preprocessing/__init__.py,sha256=Jk1RJRbPkggw70h4Lay4FY7yQHN9_tjRxzp9QJcF3Oo,828
|
|
27
|
+
spotforecast2/preprocessing/_binner.py,sha256=EYBOwNSOW85bdLUgQ_qLSq8xpujWJezWkNTIL1jNaYo,13723
|
|
28
|
+
spotforecast2/preprocessing/_common.py,sha256=aP8EIYIg3iBXnijXByHedGEdcubXu-ciRtEgqdDfO_8,3141
|
|
29
|
+
spotforecast2/preprocessing/_differentiator.py,sha256=otka_TO1edM3zgp16zOjeSKxa61arbmPPsr96_GfgLI,4646
|
|
30
|
+
spotforecast2/preprocessing/_rolling.py,sha256=_BUG_aHbOI-1e2ku8AwsJJGl3akTBWjRju2PhclkXso,4202
|
|
31
|
+
spotforecast2/preprocessing/curate_data.py,sha256=4VV8aYwShyrUc9lqWVx_ckIH-moK0B8ONEMb2i463ag,9603
|
|
32
|
+
spotforecast2/preprocessing/imputation.py,sha256=lmH-HumI_QLLm9aMESe_oZq84Axn60woLaMqd_Abw3k,3509
|
|
33
|
+
spotforecast2/preprocessing/outlier.py,sha256=jZxAR870QtYner7b4gXk6LLGJw0juLq1VU4CGklYd3c,4208
|
|
34
|
+
spotforecast2/preprocessing/split.py,sha256=mzzt5ltUZdVzfWtBBTQjp8E2MyqVdWUFtz7nN11urbU,5011
|
|
35
|
+
spotforecast2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
|
+
spotforecast2/utils/__init__.py,sha256=NrMt_xJLe4rbTFbsbgSQYeREohEOiYG5S-97e6Jj07I,1018
|
|
37
|
+
spotforecast2/utils/convert_to_utc.py,sha256=hz8mJUHK9jDLUiN5LdNX5l3KZuOKlklyycB4zFdB9Ng,1405
|
|
38
|
+
spotforecast2/utils/data_transform.py,sha256=PhLeZoimM0TLfp34Fp56dQrxlCYNWGVU8h8RZHdZSlo,7294
|
|
39
|
+
spotforecast2/utils/forecaster_config.py,sha256=0jchk_9tjxzttN8btWlRBfAjT2bz27JO4CDrpPsC58E,12875
|
|
40
|
+
spotforecast2/utils/generate_holiday.py,sha256=SHaPvPMt-abis95cChHf5ObyPwCTrzJ87bxffeqZLRc,2707
|
|
41
|
+
spotforecast2/utils/validation.py,sha256=vcfpS6HF7YzVjKUZl-AGrIW71vCXrATJlfg2ZLjUse0,21633
|
|
42
|
+
spotforecast2/weather/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
43
|
+
spotforecast2/weather/weather_client.py,sha256=Ec_ywug6uoa71MfXM8RNbXEvtBtBzr-SUS5xq_HKtZE,9837
|
|
44
|
+
spotforecast2-0.0.1.dist-info/WHEEL,sha256=ZyFSCYkV2BrxH6-HRVRg3R9Fo7MALzer9KiPYqNxSbo,79
|
|
45
|
+
spotforecast2-0.0.1.dist-info/METADATA,sha256=uTPv1foQoCIMf1UjKRIq7wNMaPJ4M4nHDWlkEVRKkYw,1475
|
|
46
|
+
spotforecast2-0.0.1.dist-info/RECORD,,
|