openstef 3.4.69__py3-none-any.whl → 3.4.72__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openstef/model/model_creator.py +0 -1
- openstef/model/objective.py +40 -10
- openstef/model/regressors/arima.py +1 -1
- openstef/model/regressors/regressor.py +2 -2
- openstef/model/regressors/xgb.py +0 -3
- openstef/pipeline/train_model.py +1 -2
- {openstef-3.4.69.dist-info → openstef-3.4.72.dist-info}/METADATA +3 -3
- {openstef-3.4.69.dist-info → openstef-3.4.72.dist-info}/RECORD +11 -11
- {openstef-3.4.69.dist-info → openstef-3.4.72.dist-info}/WHEEL +0 -0
- {openstef-3.4.69.dist-info → openstef-3.4.72.dist-info}/licenses/LICENSE +0 -0
- {openstef-3.4.69.dist-info → openstef-3.4.72.dist-info}/top_level.txt +0 -0
openstef/model/model_creator.py
CHANGED
openstef/model/objective.py
CHANGED
@@ -7,6 +7,8 @@ from typing import Any, Callable, Optional
|
|
7
7
|
|
8
8
|
import optuna
|
9
9
|
import pandas as pd
|
10
|
+
from lightgbm import early_stopping
|
11
|
+
from xgboost.callback import EarlyStopping
|
10
12
|
|
11
13
|
from openstef.enums import ModelType
|
12
14
|
from openstef.metrics import metrics
|
@@ -130,31 +132,42 @@ class RegressorObjective:
|
|
130
132
|
# Configure evals for early stopping
|
131
133
|
eval_set = [(train_x, train_y), (valid_x, valid_y)]
|
132
134
|
|
133
|
-
#
|
135
|
+
# Get the parameters used in this trial
|
134
136
|
hyper_params = self.get_params(trial)
|
135
137
|
|
136
|
-
#
|
138
|
+
# Insert parameters into model
|
137
139
|
self.model.set_params(**hyper_params)
|
138
140
|
|
139
|
-
|
141
|
+
callbacks = []
|
142
|
+
|
143
|
+
# Create the early stopping callback
|
144
|
+
early_stopping_callback = self.get_early_stopping_callback()
|
145
|
+
if early_stopping_callback is not None:
|
146
|
+
callbacks.append(early_stopping_callback)
|
147
|
+
|
148
|
+
# Create the specific pruning callback
|
140
149
|
pruning_callback = self.get_pruning_callback(trial)
|
141
|
-
if pruning_callback is None:
|
142
|
-
callbacks
|
143
|
-
|
144
|
-
|
150
|
+
if pruning_callback is not None:
|
151
|
+
callbacks.append(pruning_callback)
|
152
|
+
|
153
|
+
# Pass verbose argument to fit call if model is not LGB
|
154
|
+
fit_kwargs = {}
|
155
|
+
if self.model_type not in [ModelType.XGB, ModelType.LGB]:
|
156
|
+
fit_kwargs["early_stopping_rounds"] = EARLY_STOPPING_ROUNDS
|
157
|
+
elif self.model_type != ModelType.LGB:
|
158
|
+
fit_kwargs["verbose"] = self.verbose
|
145
159
|
|
146
160
|
# validation_0 and validation_1 are available
|
147
161
|
self.model.fit(
|
148
162
|
train_x,
|
149
163
|
train_y,
|
150
164
|
eval_set=eval_set,
|
151
|
-
early_stopping_rounds=EARLY_STOPPING_ROUNDS,
|
152
|
-
verbose=self.verbose,
|
153
165
|
eval_metric=self.eval_metric,
|
154
166
|
callbacks=callbacks,
|
167
|
+
**fit_kwargs,
|
155
168
|
)
|
156
169
|
|
157
|
-
self.model.feature_importance_dataframe = self.model.
|
170
|
+
self.model.feature_importance_dataframe = self.model.get_feature_importance()
|
158
171
|
|
159
172
|
# Do confidence interval determination
|
160
173
|
self.model = StandardDeviationGenerator(
|
@@ -203,6 +216,9 @@ class RegressorObjective:
|
|
203
216
|
def get_pruning_callback(self, trial: optuna.trial.FrozenTrial):
|
204
217
|
return None
|
205
218
|
|
219
|
+
def get_early_stopping_callback(self):
|
220
|
+
return None
|
221
|
+
|
206
222
|
def get_trial_track(self) -> dict:
|
207
223
|
"""Get a dictionary of al trials.
|
208
224
|
|
@@ -272,6 +288,15 @@ class XGBRegressorObjective(RegressorObjective):
|
|
272
288
|
trial, observation_key=f"validation_1-{self.eval_metric}"
|
273
289
|
)
|
274
290
|
|
291
|
+
def get_early_stopping_callback(self):
|
292
|
+
return EarlyStopping(
|
293
|
+
rounds=EARLY_STOPPING_ROUNDS,
|
294
|
+
metric_name=self.eval_metric,
|
295
|
+
data_name=f"validation_1",
|
296
|
+
maximize=False,
|
297
|
+
save_best=True,
|
298
|
+
)
|
299
|
+
|
275
300
|
@classmethod
|
276
301
|
def get_default_values(cls) -> dict:
|
277
302
|
default_parameter_values = super().get_default_values()
|
@@ -319,6 +344,11 @@ class LGBRegressorObjective(RegressorObjective):
|
|
319
344
|
trial, metric=metric, valid_name="valid_1"
|
320
345
|
)
|
321
346
|
|
347
|
+
def get_early_stopping_callback(self):
|
348
|
+
return early_stopping(
|
349
|
+
stopping_rounds=EARLY_STOPPING_ROUNDS, verbose=self.verbose
|
350
|
+
)
|
351
|
+
|
322
352
|
|
323
353
|
class XGBQuantileRegressorObjective(RegressorObjective):
|
324
354
|
def __init__(self, *args, **kwargs):
|
@@ -126,7 +126,7 @@ class ARIMAOpenstfRegressor(OpenstfRegressor):
|
|
126
126
|
predictions = self.predict_quantile(start, end, exog=x, quantile=quantile)
|
127
127
|
return predictions
|
128
128
|
|
129
|
-
def
|
129
|
+
def get_feature_importance(self):
|
130
130
|
"""Because report needs 'weight' and 'gain' as importance metrics, we set the values to these names.
|
131
131
|
|
132
132
|
- 'weight' is corresponding to the coefficients values
|
@@ -59,7 +59,7 @@ class OpenstfRegressor(BaseEstimator):
|
|
59
59
|
"""
|
60
60
|
|
61
61
|
@abstractmethod
|
62
|
-
def fit(self, x:
|
62
|
+
def fit(self, x: pd.DataFrame, y: pd.DataFrame, **kwargs) -> RegressorMixin:
|
63
63
|
"""Fits the regressor.
|
64
64
|
|
65
65
|
Args:
|
@@ -72,7 +72,7 @@ class OpenstfRegressor(BaseEstimator):
|
|
72
72
|
|
73
73
|
"""
|
74
74
|
|
75
|
-
def
|
75
|
+
def get_feature_importance(self) -> Union[pd.DataFrame, None]:
|
76
76
|
"""Get feature importance.
|
77
77
|
|
78
78
|
Returns:
|
openstef/model/regressors/xgb.py
CHANGED
@@ -36,13 +36,10 @@ class XGBOpenstfRegressor(XGBRegressor, OpenstfRegressor):
|
|
36
36
|
x: np.array,
|
37
37
|
y: np.array,
|
38
38
|
*,
|
39
|
-
early_stopping_rounds: Optional[int] = None,
|
40
39
|
callbacks: Optional[list] = None,
|
41
40
|
eval_metric: Optional[str] = None,
|
42
41
|
**kwargs
|
43
42
|
):
|
44
|
-
if early_stopping_rounds is not None:
|
45
|
-
self.set_params(early_stopping_rounds=early_stopping_rounds)
|
46
43
|
if callbacks is not None:
|
47
44
|
self.set_params(callbacks=callbacks)
|
48
45
|
if eval_metric is not None:
|
openstef/pipeline/train_model.py
CHANGED
@@ -495,10 +495,9 @@ def train_pipeline_step_train_model(
|
|
495
495
|
train_x,
|
496
496
|
train_y,
|
497
497
|
eval_set=eval_set,
|
498
|
-
verbose=False,
|
499
498
|
)
|
500
499
|
# Gets the feature importance df or None if we don't have feature importance
|
501
|
-
model.feature_importance_dataframe = model.
|
500
|
+
model.feature_importance_dataframe = model.get_feature_importance()
|
502
501
|
|
503
502
|
logger.info("Fitted a new model, not yet stored")
|
504
503
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: openstef
|
3
|
-
Version: 3.4.
|
3
|
+
Version: 3.4.72
|
4
4
|
Summary: Open short term energy forecaster
|
5
5
|
Home-page: https://github.com/OpenSTEF/openstef
|
6
6
|
Author: Alliander N.V
|
@@ -16,7 +16,7 @@ Description-Content-Type: text/markdown
|
|
16
16
|
License-File: LICENSE
|
17
17
|
Requires-Dist: holidays==0.21
|
18
18
|
Requires-Dist: joblib==1.3.2
|
19
|
-
Requires-Dist: lightgbm
|
19
|
+
Requires-Dist: lightgbm>=4.0
|
20
20
|
Requires-Dist: matplotlib~=3.7
|
21
21
|
Requires-Dist: mlflow~=2.3
|
22
22
|
Requires-Dist: networkx~=3.1
|
@@ -32,7 +32,7 @@ Requires-Dist: pymsteams~=0.2.2
|
|
32
32
|
Requires-Dist: scikit-learn<1.6,>=1.3
|
33
33
|
Requires-Dist: scipy~=1.10
|
34
34
|
Requires-Dist: statsmodels<1.0.0,>=0.13.5
|
35
|
-
Requires-Dist: structlog
|
35
|
+
Requires-Dist: structlog>=23.1
|
36
36
|
Requires-Dist: xgboost~=2.0; extra == "gpu"
|
37
37
|
Provides-Extra: cpu
|
38
38
|
Requires-Dist: xgboost-cpu~=2.0; extra == "cpu"
|
@@ -46,8 +46,8 @@ openstef/model/__init__.py,sha256=bIyGTSA4V5VoOLTwdaiJJAnozmpSzvQooVYlsf8H4eU,16
|
|
46
46
|
openstef/model/basecase.py,sha256=caI6Q-8y0ymlxGK9Js_H3Vh0q6ruNHlGD5RG0_kE5M0,2878
|
47
47
|
openstef/model/confidence_interval_applicator.py,sha256=VOdHsDJhfeyaq_cnk9QMUaZ2IumbiBBoW1zo8AuqDg0,9559
|
48
48
|
openstef/model/fallback.py,sha256=g6TEuEhV4w07SCGkR_AvPf2up9f0ixGKQojYC-Ewl6Y,2812
|
49
|
-
openstef/model/model_creator.py,sha256=
|
50
|
-
openstef/model/objective.py,sha256=
|
49
|
+
openstef/model/model_creator.py,sha256=XiWqTrVwCqGcqc9PUKprMOHu7T9Lz2qngi4bD8LkWas,6473
|
50
|
+
openstef/model/objective.py,sha256=0PZUbPzuyaYlpWEH_qPavO6ll7zwqTTUTfIrUzzFMbs,15585
|
51
51
|
openstef/model/objective_creator.py,sha256=3jJgcmY1sm-Yoe3SfjKrJukrsqtYyloUFaPbBWqswhQ,2208
|
52
52
|
openstef/model/serializer.py,sha256=k5GY8eRJdlii8mEY7Qheu4yb5USyIyxw77EYkSQJGYk,17034
|
53
53
|
openstef/model/standard_deviation_generator.py,sha256=OorRvX2wRScU7f4SIBoiT24yJeeM50sETP3xC6m5IG4,2865
|
@@ -56,7 +56,7 @@ openstef/model/metamodels/feature_clipper.py,sha256=DNsyYdjUT7ZNimJJIyTvv1nmwTwD
|
|
56
56
|
openstef/model/metamodels/grouped_regressor.py,sha256=yMN_a6TnQSyFaqlB_6Nifq-ydpb5hs6w_b97IaBbHj4,8337
|
57
57
|
openstef/model/metamodels/missing_values_handler.py,sha256=glgAlkeubLZFWbD8trTYBik7_qOJi4GCPGl1sSybSkQ,5257
|
58
58
|
openstef/model/regressors/__init__.py,sha256=bIyGTSA4V5VoOLTwdaiJJAnozmpSzvQooVYlsf8H4eU,163
|
59
|
-
openstef/model/regressors/arima.py,sha256=
|
59
|
+
openstef/model/regressors/arima.py,sha256=2MIcIvfxOAxYC5QtC00K9oQl2-qaOewm7P3gzoHgxgE,7587
|
60
60
|
openstef/model/regressors/custom_regressor.py,sha256=T4JdJ-oTTt1PHQV0DdIEIhALvEEh07WCFlWxl8EFih0,1765
|
61
61
|
openstef/model/regressors/dazls.py,sha256=Xt89yFHjkwpIUTkkhPmPZ74F8_tht_XV88INuP5GU2E,3994
|
62
62
|
openstef/model/regressors/flatliner.py,sha256=T9u-ukhqFcatQmlgUtBL_G-1b_wQzgdVRq0ac64GnjQ,2789
|
@@ -64,8 +64,8 @@ openstef/model/regressors/gblinear_quantile.py,sha256=PKQL_TAXa3Kw9oZrKC6Uvo_n2N
|
|
64
64
|
openstef/model/regressors/lgbm.py,sha256=zCdn1euEdSFxYJzH8XqQFFnb6R4JVUnmineKjX_Gy-g,800
|
65
65
|
openstef/model/regressors/linear.py,sha256=uOvZMLGZH_9nXfmS5honCMfyVeyGXP1Cza9A_BdXlVw,3665
|
66
66
|
openstef/model/regressors/linear_quantile.py,sha256=zIpGo9deMeTZdwFWoZ3FstX74mYdlAhfg-YOsPRFl0k,10534
|
67
|
-
openstef/model/regressors/regressor.py,sha256=
|
68
|
-
openstef/model/regressors/xgb.py,sha256=
|
67
|
+
openstef/model/regressors/regressor.py,sha256=0um575rTEkzYb1E5IAOuTlsZDhmb7eI5byu5e062NRs,3469
|
68
|
+
openstef/model/regressors/xgb.py,sha256=uhV9Wm90aOkjByTm-O2xpt2kpANRxAqQvv5mA0H1uBc,1294
|
69
69
|
openstef/model/regressors/xgb_multioutput_quantile.py,sha256=xWzA7tymC_o-F1OS3I7vUKf9zP6RR1ZglEeY4NAgjU0,9146
|
70
70
|
openstef/model/regressors/xgb_quantile.py,sha256=PzKIxqN_CnEPFmzXACNuzLSmZSHbooTuiJ5ckJ9vh_E,7805
|
71
71
|
openstef/model_selection/__init__.py,sha256=bIyGTSA4V5VoOLTwdaiJJAnozmpSzvQooVYlsf8H4eU,163
|
@@ -79,7 +79,7 @@ openstef/pipeline/create_component_forecast.py,sha256=40fYKajdj4F9K7fzmL3euyvwTr
|
|
79
79
|
openstef/pipeline/create_forecast.py,sha256=mVbbu7jM31NEwfaDeQPqF3Okps9H1oLfjhPPiJRL4zg,5582
|
80
80
|
openstef/pipeline/optimize_hyperparameters.py,sha256=w5LpZhW3KVklCJzaogNzyHfpMJfNqeRAnvyV4vi35wg,10953
|
81
81
|
openstef/pipeline/train_create_forecast_backtest.py,sha256=hBJPxfDkbrmFSSGZrRH1vTiIVqJP-SWe0ibVpHT_8Qg,6048
|
82
|
-
openstef/pipeline/train_model.py,sha256=
|
82
|
+
openstef/pipeline/train_model.py,sha256=O1pyATMQUkNZQ01FlOwG8r3gtKwRcx7YD73f-91umuo,19948
|
83
83
|
openstef/pipeline/utils.py,sha256=23mB31p19FoGWelLJzxNmqlzGwEr3fCDBEA37V2kpYY,2167
|
84
84
|
openstef/plotting/__init__.py,sha256=KQjXzyafCt1bE7XDrSeV4TDUIO7MkwN_Br4ASOcNI2g,163
|
85
85
|
openstef/plotting/load_forecast_plotter.py,sha256=GWHVmUB2YosNj7TnSrMnxYAfM2Z1mNg5oRV9A_lJmQY,8129
|
@@ -103,8 +103,8 @@ openstef/tasks/utils/predictionjobloop.py,sha256=Ysy3zF5lzPMz_asYDKeF5m0qgVT3tCt
|
|
103
103
|
openstef/tasks/utils/taskcontext.py,sha256=O-LZ_wHEl5vbT8oB7EYtOeMkvk6EqCnI1-KiyER7Eu4,5407
|
104
104
|
openstef/validation/__init__.py,sha256=bIyGTSA4V5VoOLTwdaiJJAnozmpSzvQooVYlsf8H4eU,163
|
105
105
|
openstef/validation/validation.py,sha256=r6UqkdH5TMjsGfn8Ta07K1jkqmrVmwcPGfyQvMmZyO4,11459
|
106
|
-
openstef-3.4.
|
107
|
-
openstef-3.4.
|
108
|
-
openstef-3.4.
|
109
|
-
openstef-3.4.
|
110
|
-
openstef-3.4.
|
106
|
+
openstef-3.4.72.dist-info/licenses/LICENSE,sha256=7Pm2fWFFHHUG5lDHed1vl5CjzxObIXQglnYsEdtjo_k,14907
|
107
|
+
openstef-3.4.72.dist-info/METADATA,sha256=79Nx_OmSE_1sLNUR1OIvNarsDaieOjrbjEU1dnRfo28,8834
|
108
|
+
openstef-3.4.72.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
109
|
+
openstef-3.4.72.dist-info/top_level.txt,sha256=kD0H4PqrQoncZ957FvqwfBxa89kTrun4Z_RAPs_HhLs,9
|
110
|
+
openstef-3.4.72.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|