openstef 3.4.37__tar.gz → 3.4.40__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openstef-3.4.37 → openstef-3.4.40}/PKG-INFO +2 -3
- {openstef-3.4.37 → openstef-3.4.40}/README.md +1 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/enums.py +1 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/missing_values_transformer.py +42 -4
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/model_creator.py +24 -20
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/objective.py +7 -7
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/objective_creator.py +11 -11
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/flatliner.py +1 -6
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/linear_quantile.py +57 -8
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/calculate_kpi.py +3 -3
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/create_components_forecast.py +2 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/create_forecast.py +2 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/optimize_hyperparameters.py +2 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/split_forecast.py +2 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/train_model.py +2 -2
- {openstef-3.4.37 → openstef-3.4.40}/openstef.egg-info/PKG-INFO +2 -3
- {openstef-3.4.37 → openstef-3.4.40}/setup.py +1 -1
- {openstef-3.4.37 → openstef-3.4.40}/LICENSE +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/__main__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/app_settings.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_baseline_model.z +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_baseline_model.z.license +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_model_card.md +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_model_card.md.license +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dutch_holidays.csv +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/dutch_holidays.csv.license +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/pv_single_coefs.csv +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data/pv_single_coefs.csv.license +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data_classes/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data_classes/data_prep.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data_classes/model_specifications.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data_classes/prediction_job.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/data_classes/split_function.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/exceptions.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/apply_features.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/data_preparation.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/feature_adder.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/feature_applicator.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/general.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/holiday_features.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/lag_features.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/weather_features.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/metrics/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/metrics/figure.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/metrics/metrics.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/metrics/reporter.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/basecase.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/confidence_interval_applicator.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/fallback.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/metamodels/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/metamodels/grouped_regressor.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/metamodels/missing_values_handler.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/arima.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/custom_regressor.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/dazls.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/lgbm.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/linear.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/regressor.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/xgb.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/xgb_multioutput_quantile.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/regressors/xgb_quantile.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/serializer.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model/standard_deviation_generator.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model_selection/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/model_selection/model_selection.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/monitoring/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/monitoring/performance_meter.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/monitoring/teams.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/create_basecase_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/create_component_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/create_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/optimize_hyperparameters.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/train_create_forecast_backtest.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/train_model.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/pipeline/utils.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/postprocessing/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/postprocessing/postprocessing.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/preprocessing/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/preprocessing/preprocessing.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/settings.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/create_basecase_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/create_solar_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/create_wind_forecast.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/utils/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/utils/dependencies.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/utils/predictionjobloop.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/tasks/utils/taskcontext.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/validation/__init__.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef/validation/validation.py +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef.egg-info/SOURCES.txt +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef.egg-info/dependency_links.txt +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef.egg-info/requires.txt +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/openstef.egg-info/top_level.txt +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/pyproject.toml +0 -0
- {openstef-3.4.37 → openstef-3.4.40}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: openstef
|
3
|
-
Version: 3.4.
|
3
|
+
Version: 3.4.40
|
4
4
|
Summary: Open short term energy forecaster
|
5
5
|
Home-page: https://github.com/OpenSTEF/openstef
|
6
6
|
Author: Alliander N.V
|
@@ -78,7 +78,6 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
78
78
|
- [Linux Foundation project page](https://www.lfenergy.org/projects/openstef/);
|
79
79
|
- [Documentation on dashboard](https://raw.githack.com/OpenSTEF/.github/main/profile/html/openstef_dashboard_doc.html);
|
80
80
|
- [Video about OpenSTEF](https://www.lfenergy.org/forecasting-to-create-a-more-resilient-optimized-grid/);
|
81
|
-
- [Teams channel](https://teams.microsoft.com/l/team/19%3ac08a513650524fc988afb296cd0358cc%40thread.tacv2/conversations?groupId=bfcb763a-3a97-4938-81d7-b14512aa537d&tenantId=697f104b-d7cb-48c8-ac9f-bd87105bafdc)
|
82
81
|
|
83
82
|
# Installation
|
84
83
|
|
@@ -88,7 +87,7 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
88
87
|
pip install openstef
|
89
88
|
```
|
90
89
|
|
91
|
-
### Remark regarding installation within a **conda environment on Windows
|
90
|
+
### Remark regarding installation within a **conda environment on Windows**
|
92
91
|
|
93
92
|
A version of the pywin32 package will be installed as a secondary dependency along with the installation of the openstef package. Since conda relies on an old version of pywin32, the new installation can break conda's functionality. The following command can solve this issue:
|
94
93
|
```shell
|
@@ -41,7 +41,6 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
41
41
|
- [Linux Foundation project page](https://www.lfenergy.org/projects/openstef/);
|
42
42
|
- [Documentation on dashboard](https://raw.githack.com/OpenSTEF/.github/main/profile/html/openstef_dashboard_doc.html);
|
43
43
|
- [Video about OpenSTEF](https://www.lfenergy.org/forecasting-to-create-a-more-resilient-optimized-grid/);
|
44
|
-
- [Teams channel](https://teams.microsoft.com/l/team/19%3ac08a513650524fc988afb296cd0358cc%40thread.tacv2/conversations?groupId=bfcb763a-3a97-4938-81d7-b14512aa537d&tenantId=697f104b-d7cb-48c8-ac9f-bd87105bafdc)
|
45
44
|
|
46
45
|
# Installation
|
47
46
|
|
@@ -51,7 +50,7 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
51
50
|
pip install openstef
|
52
51
|
```
|
53
52
|
|
54
|
-
### Remark regarding installation within a **conda environment on Windows
|
53
|
+
### Remark regarding installation within a **conda environment on Windows**
|
55
54
|
|
56
55
|
A version of the pywin32 package will be installed as a secondary dependency along with the installation of the openstef package. Since conda relies on an old version of pywin32, the new installation can break conda's functionality. The following command can solve this issue:
|
57
56
|
```shell
|
@@ -4,8 +4,7 @@
|
|
4
4
|
from enum import Enum
|
5
5
|
|
6
6
|
|
7
|
-
|
8
|
-
class MLModelType(Enum):
|
7
|
+
class ModelType(Enum):
|
9
8
|
XGB = "xgb"
|
10
9
|
XGB_QUANTILE = "xgb_quantile"
|
11
10
|
XGB_MULTIOUTPUT_QUANTILE = "xgb_multioutput_quantile"
|
{openstef-3.4.37 → openstef-3.4.40}/openstef/feature_engineering/missing_values_transformer.py
RENAMED
@@ -27,6 +27,7 @@ class MissingValuesTransformer:
|
|
27
27
|
missing_values: Union[int, float, str, None] = np.nan,
|
28
28
|
imputation_strategy: str = None,
|
29
29
|
fill_value: Union[str, int, float] = None,
|
30
|
+
no_fill_future_values_features: List[str] = None,
|
30
31
|
):
|
31
32
|
"""Initialize missing values handler.
|
32
33
|
|
@@ -37,11 +38,15 @@ class MissingValuesTransformer:
|
|
37
38
|
Can be one of "mean", "median", "most_frequent", "constant" or None.
|
38
39
|
fill_value: When strategy == "constant", fill_value is used to replace all
|
39
40
|
occurrences of missing_values.
|
41
|
+
no_fill_future_values_features: The features for which it does not make sense
|
42
|
+
to fill future values. Rows that contain trailing null values for these
|
43
|
+
features will be removed from the data.
|
40
44
|
|
41
45
|
"""
|
42
46
|
self.missing_values = missing_values
|
43
47
|
self.imputation_strategy = imputation_strategy
|
44
48
|
self.fill_value = fill_value
|
49
|
+
self.no_fill_future_values_features = no_fill_future_values_features or []
|
45
50
|
self.is_fitted_ = False
|
46
51
|
|
47
52
|
# Build the proper imputation transformer
|
@@ -57,6 +62,11 @@ class MissingValuesTransformer:
|
|
57
62
|
).set_output(transform="pandas")
|
58
63
|
self.imputer_._validate_params()
|
59
64
|
|
65
|
+
@staticmethod
|
66
|
+
def _determine_trailing_null_rows(x: pd.DataFrame) -> pd.Series:
|
67
|
+
"""Determine rows with trailing null values in a DataFrame."""
|
68
|
+
return ~x.bfill().isnull().any(axis="columns")
|
69
|
+
|
60
70
|
def fit(self, x, y=None):
|
61
71
|
"""Fit the imputer on the input data."""
|
62
72
|
_ = check_array(x, force_all_finite="allow-nan")
|
@@ -69,9 +79,17 @@ class MissingValuesTransformer:
|
|
69
79
|
# Remove always null columns
|
70
80
|
is_column_null = x.isnull().all(axis="index")
|
71
81
|
self.non_null_feature_names = list(x.columns[~is_column_null])
|
82
|
+
x = x[self.non_null_feature_names]
|
83
|
+
|
84
|
+
# Remove trailing null rows for features that should
|
85
|
+
# not be imputed in the future
|
86
|
+
trailing_null_rows = self._determine_trailing_null_rows(
|
87
|
+
x[self.no_fill_future_values_features]
|
88
|
+
)
|
89
|
+
x = x.loc[trailing_null_rows]
|
72
90
|
|
73
91
|
# Imputers do not support labels
|
74
|
-
self.imputer_.fit(X=x
|
92
|
+
self.imputer_.fit(X=x, y=None)
|
75
93
|
self.is_fitted_ = True
|
76
94
|
|
77
95
|
def transform(self, x) -> pd.DataFrame:
|
@@ -83,9 +101,11 @@ class MissingValuesTransformer:
|
|
83
101
|
|
84
102
|
x = x[self.non_null_feature_names]
|
85
103
|
|
86
|
-
|
104
|
+
transformed = self.imputer_.transform(x)
|
105
|
+
|
106
|
+
return transformed
|
87
107
|
|
88
|
-
def fit_transform(self, x, y=None):
|
108
|
+
def fit_transform(self, x, y=None) -> tuple[pd.DataFrame, Optional[pd.Series]]:
|
89
109
|
"""Fit the imputer on the input data and transform it.
|
90
110
|
|
91
111
|
Returns:
|
@@ -93,7 +113,25 @@ class MissingValuesTransformer:
|
|
93
113
|
|
94
114
|
"""
|
95
115
|
self.fit(x, y)
|
96
|
-
|
116
|
+
|
117
|
+
if not isinstance(x, pd.DataFrame):
|
118
|
+
x = pd.DataFrame(np.asarray(x))
|
119
|
+
|
120
|
+
x = x[self.non_null_feature_names]
|
121
|
+
|
122
|
+
# Remove trailing null rows for features that should
|
123
|
+
# not be imputed in the future
|
124
|
+
non_trailing_null_rows = self._determine_trailing_null_rows(
|
125
|
+
x[self.no_fill_future_values_features]
|
126
|
+
)
|
127
|
+
x = x.loc[non_trailing_null_rows]
|
128
|
+
|
129
|
+
x = self.transform(x)
|
130
|
+
|
131
|
+
if y is not None:
|
132
|
+
y = y.loc[non_trailing_null_rows]
|
133
|
+
|
134
|
+
return x, y
|
97
135
|
|
98
136
|
@classmethod
|
99
137
|
def _identity(cls, x):
|
@@ -6,7 +6,7 @@ from typing import Union
|
|
6
6
|
|
7
7
|
import structlog
|
8
8
|
|
9
|
-
from openstef.enums import
|
9
|
+
from openstef.enums import ModelType
|
10
10
|
from openstef.model.regressors.arima import ARIMAOpenstfRegressor
|
11
11
|
from openstef.model.regressors.custom_regressor import is_custom_type, load_custom_model
|
12
12
|
from openstef.model.regressors.lgbm import LGBMOpenstfRegressor
|
@@ -29,7 +29,7 @@ structlog.configure(
|
|
29
29
|
logger = structlog.get_logger(__name__)
|
30
30
|
|
31
31
|
valid_model_kwargs = {
|
32
|
-
|
32
|
+
ModelType.XGB: [
|
33
33
|
"n_estimators",
|
34
34
|
"objective",
|
35
35
|
"max_depth",
|
@@ -60,7 +60,7 @@ valid_model_kwargs = {
|
|
60
60
|
"validate_parameters",
|
61
61
|
"early_stopping_rounds",
|
62
62
|
],
|
63
|
-
|
63
|
+
ModelType.LGB: [
|
64
64
|
"boosting_type",
|
65
65
|
"objective",
|
66
66
|
"num_leaves",
|
@@ -82,7 +82,7 @@ valid_model_kwargs = {
|
|
82
82
|
"importance_type",
|
83
83
|
"early_stopping_rounds",
|
84
84
|
],
|
85
|
-
|
85
|
+
ModelType.XGB_QUANTILE: [
|
86
86
|
"quantiles",
|
87
87
|
"gamma",
|
88
88
|
"colsample_bytree",
|
@@ -91,7 +91,7 @@ valid_model_kwargs = {
|
|
91
91
|
"max_depth",
|
92
92
|
"early_stopping_rounds",
|
93
93
|
],
|
94
|
-
|
94
|
+
ModelType.XGB_MULTIOUTPUT_QUANTILE: [
|
95
95
|
"quantiles",
|
96
96
|
"gamma",
|
97
97
|
"colsample_bytree",
|
@@ -101,23 +101,27 @@ valid_model_kwargs = {
|
|
101
101
|
"early_stopping_rounds",
|
102
102
|
"arctan_smoothing",
|
103
103
|
],
|
104
|
-
|
104
|
+
ModelType.LINEAR: [
|
105
105
|
"missing_values",
|
106
106
|
"imputation_strategy",
|
107
107
|
"fill_value",
|
108
108
|
],
|
109
|
-
|
109
|
+
ModelType.FLATLINER: [
|
110
110
|
"quantiles",
|
111
111
|
],
|
112
|
-
|
112
|
+
ModelType.LINEAR_QUANTILE: [
|
113
113
|
"alpha",
|
114
114
|
"quantiles",
|
115
115
|
"solver",
|
116
116
|
"missing_values",
|
117
117
|
"imputation_strategy",
|
118
118
|
"fill_value",
|
119
|
+
"weight_scale_percentile",
|
120
|
+
"weight_exponent",
|
121
|
+
"weight_floor",
|
122
|
+
"no_fill_future_values_features",
|
119
123
|
],
|
120
|
-
|
124
|
+
ModelType.ARIMA: [
|
121
125
|
"backtest_max_horizon",
|
122
126
|
"order",
|
123
127
|
"seasonal_order",
|
@@ -131,18 +135,18 @@ class ModelCreator:
|
|
131
135
|
|
132
136
|
# Set object mapping
|
133
137
|
MODEL_CONSTRUCTORS = {
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
138
|
+
ModelType.XGB: XGBOpenstfRegressor,
|
139
|
+
ModelType.LGB: LGBMOpenstfRegressor,
|
140
|
+
ModelType.XGB_QUANTILE: XGBQuantileOpenstfRegressor,
|
141
|
+
ModelType.XGB_MULTIOUTPUT_QUANTILE: XGBMultiOutputQuantileOpenstfRegressor,
|
142
|
+
ModelType.LINEAR: LinearOpenstfRegressor,
|
143
|
+
ModelType.LINEAR_QUANTILE: LinearQuantileOpenstfRegressor,
|
144
|
+
ModelType.ARIMA: ARIMAOpenstfRegressor,
|
145
|
+
ModelType.FLATLINER: FlatlinerRegressor,
|
142
146
|
}
|
143
147
|
|
144
148
|
@staticmethod
|
145
|
-
def create_model(model_type: Union[
|
149
|
+
def create_model(model_type: Union[ModelType, str], **kwargs) -> OpenstfRegressor:
|
146
150
|
"""Create a machine learning model based on model type.
|
147
151
|
|
148
152
|
Args:
|
@@ -163,7 +167,7 @@ class ModelCreator:
|
|
163
167
|
model_class = load_custom_model(model_type)
|
164
168
|
valid_kwargs = model_class.valid_kwargs()
|
165
169
|
else:
|
166
|
-
model_type =
|
170
|
+
model_type = ModelType(model_type)
|
167
171
|
model_class = ModelCreator.MODEL_CONSTRUCTORS[model_type]
|
168
172
|
valid_kwargs = valid_model_kwargs[model_type]
|
169
173
|
# Check if model as imported
|
@@ -174,7 +178,7 @@ class ModelCreator:
|
|
174
178
|
"Please refer to the ReadMe for instructions"
|
175
179
|
)
|
176
180
|
except ValueError as e:
|
177
|
-
valid_types = [t.value for t in
|
181
|
+
valid_types = [t.value for t in ModelType]
|
178
182
|
raise NotImplementedError(
|
179
183
|
f"No constructor for '{model_type}', "
|
180
184
|
f"valid model_types are: {valid_types} "
|
@@ -8,7 +8,7 @@ from typing import Any, Callable, Optional
|
|
8
8
|
import optuna
|
9
9
|
import pandas as pd
|
10
10
|
|
11
|
-
from openstef.enums import
|
11
|
+
from openstef.enums import ModelType
|
12
12
|
from openstef.metrics import metrics
|
13
13
|
from openstef.metrics.reporter import Report, Reporter
|
14
14
|
from openstef.model.regressors.regressor import OpenstfRegressor
|
@@ -245,7 +245,7 @@ class RegressorObjective:
|
|
245
245
|
class XGBRegressorObjective(RegressorObjective):
|
246
246
|
def __init__(self, *args, **kwargs):
|
247
247
|
super().__init__(*args, **kwargs)
|
248
|
-
self.model_type =
|
248
|
+
self.model_type = ModelType.XGB
|
249
249
|
|
250
250
|
# extend the parameters with the model specific ones per implementation
|
251
251
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
@@ -282,7 +282,7 @@ class XGBRegressorObjective(RegressorObjective):
|
|
282
282
|
class LGBRegressorObjective(RegressorObjective):
|
283
283
|
def __init__(self, *args, **kwargs):
|
284
284
|
super().__init__(*args, **kwargs)
|
285
|
-
self.model_type =
|
285
|
+
self.model_type = ModelType.LGB
|
286
286
|
|
287
287
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
288
288
|
"""Get parameters for LGB Regressor Objective with objective specific parameters.
|
@@ -323,7 +323,7 @@ class LGBRegressorObjective(RegressorObjective):
|
|
323
323
|
class XGBQuantileRegressorObjective(RegressorObjective):
|
324
324
|
def __init__(self, *args, **kwargs):
|
325
325
|
super().__init__(*args, **kwargs)
|
326
|
-
self.model_type =
|
326
|
+
self.model_type = ModelType.XGB_QUANTILE
|
327
327
|
|
328
328
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
329
329
|
"""Get parameters for XGBQuantile Regressor Objective with objective specific parameters.
|
@@ -352,7 +352,7 @@ class XGBQuantileRegressorObjective(RegressorObjective):
|
|
352
352
|
class XGBMultioutputQuantileRegressorObjective(RegressorObjective):
|
353
353
|
def __init__(self, *args, **kwargs):
|
354
354
|
super().__init__(*args, **kwargs)
|
355
|
-
self.model_type =
|
355
|
+
self.model_type = ModelType.XGB_QUANTILE
|
356
356
|
|
357
357
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
358
358
|
"""Get parameters for XGB Multioutput Quantile Regressor Objective with objective specific parameters.
|
@@ -382,7 +382,7 @@ class XGBMultioutputQuantileRegressorObjective(RegressorObjective):
|
|
382
382
|
class LinearRegressorObjective(RegressorObjective):
|
383
383
|
def __init__(self, *args, **kwargs):
|
384
384
|
super().__init__(*args, **kwargs)
|
385
|
-
self.model_type =
|
385
|
+
self.model_type = ModelType.LINEAR
|
386
386
|
|
387
387
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
388
388
|
"""Get parameters for Linear Regressor Objective with objective specific parameters.
|
@@ -405,7 +405,7 @@ class LinearRegressorObjective(RegressorObjective):
|
|
405
405
|
class ARIMARegressorObjective(RegressorObjective):
|
406
406
|
def __init__(self, *args, **kwargs):
|
407
407
|
super().__init__(*args, **kwargs)
|
408
|
-
self.model_type =
|
408
|
+
self.model_type = ModelType.ARIMA
|
409
409
|
|
410
410
|
def get_params(self, trial: optuna.trial.FrozenTrial) -> dict:
|
411
411
|
"""Get parameters for ARIMA Regressor Objective with objective specific parameters.
|
@@ -4,7 +4,7 @@
|
|
4
4
|
|
5
5
|
from typing import Union
|
6
6
|
|
7
|
-
from openstef.enums import
|
7
|
+
from openstef.enums import ModelType
|
8
8
|
from openstef.model.objective import (
|
9
9
|
ARIMARegressorObjective,
|
10
10
|
LGBRegressorObjective,
|
@@ -22,17 +22,17 @@ from openstef.model.regressors.custom_regressor import (
|
|
22
22
|
|
23
23
|
class ObjectiveCreator:
|
24
24
|
OBJECTIVES = {
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
25
|
+
ModelType.XGB: XGBRegressorObjective,
|
26
|
+
ModelType.LGB: LGBRegressorObjective,
|
27
|
+
ModelType.XGB_QUANTILE: XGBQuantileRegressorObjective,
|
28
|
+
ModelType.XGB_MULTIOUTPUT_QUANTILE: XGBMultioutputQuantileRegressorObjective,
|
29
|
+
ModelType.LINEAR: LinearRegressorObjective,
|
30
|
+
ModelType.LINEAR_QUANTILE: LinearRegressorObjective,
|
31
|
+
ModelType.ARIMA: ARIMARegressorObjective,
|
32
32
|
}
|
33
33
|
|
34
34
|
@staticmethod
|
35
|
-
def create_objective(model_type: Union[
|
35
|
+
def create_objective(model_type: Union[ModelType, str]) -> RegressorObjective:
|
36
36
|
"""Create an objective function based on model type.
|
37
37
|
|
38
38
|
Args:
|
@@ -51,10 +51,10 @@ class ObjectiveCreator:
|
|
51
51
|
if is_custom_type(model_type):
|
52
52
|
objective = create_custom_objective(model_type)
|
53
53
|
else:
|
54
|
-
model_type =
|
54
|
+
model_type = ModelType(model_type)
|
55
55
|
objective = ObjectiveCreator.OBJECTIVES[model_type]
|
56
56
|
except ValueError as e:
|
57
|
-
valid_types = [t.value for t in
|
57
|
+
valid_types = [t.value for t in ModelType]
|
58
58
|
raise NotImplementedError(
|
59
59
|
f"No objective for '{model_type}', "
|
60
60
|
f"valid model_types are: {valid_types}"
|
@@ -2,18 +2,13 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: MPL-2.0
|
4
4
|
import re
|
5
|
-
from typing import
|
5
|
+
from typing import List
|
6
6
|
|
7
7
|
import numpy as np
|
8
8
|
import pandas as pd
|
9
9
|
from sklearn.base import RegressorMixin
|
10
|
-
from sklearn.linear_model import QuantileRegressor
|
11
|
-
from sklearn.preprocessing import MinMaxScaler
|
12
10
|
from sklearn.utils.validation import check_is_fitted
|
13
11
|
|
14
|
-
from openstef.feature_engineering.missing_values_transformer import (
|
15
|
-
MissingValuesTransformer,
|
16
|
-
)
|
17
12
|
from openstef.model.regressors.regressor import OpenstfRegressor
|
18
13
|
|
19
14
|
|
@@ -2,13 +2,13 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: MPL-2.0
|
4
4
|
import re
|
5
|
-
from typing import Dict, Union, Set, Optional
|
5
|
+
from typing import Dict, Union, Set, Optional, List
|
6
6
|
|
7
7
|
import numpy as np
|
8
8
|
import pandas as pd
|
9
9
|
from sklearn.base import RegressorMixin
|
10
10
|
from sklearn.linear_model import QuantileRegressor
|
11
|
-
from sklearn.preprocessing import
|
11
|
+
from sklearn.preprocessing import StandardScaler
|
12
12
|
from sklearn.utils.validation import check_is_fitted
|
13
13
|
|
14
14
|
from openstef.feature_engineering.missing_values_transformer import (
|
@@ -25,8 +25,8 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
25
25
|
solver: str
|
26
26
|
|
27
27
|
imputer_: MissingValuesTransformer
|
28
|
-
x_scaler_:
|
29
|
-
y_scaler_:
|
28
|
+
x_scaler_: StandardScaler
|
29
|
+
y_scaler_: StandardScaler
|
30
30
|
models_: Dict[float, QuantileRegressor]
|
31
31
|
|
32
32
|
is_fitted_: bool = False
|
@@ -47,6 +47,10 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
47
47
|
missing_values: Union[int, float, str, None] = np.nan,
|
48
48
|
imputation_strategy: Optional[str] = "mean",
|
49
49
|
fill_value: Union[str, int, float] = None,
|
50
|
+
weight_scale_percentile: int = 95,
|
51
|
+
weight_exponent: float = 1,
|
52
|
+
weight_floor: float = 0.1,
|
53
|
+
no_fill_future_values_features: List[str] = None,
|
50
54
|
):
|
51
55
|
"""Initialize LinearQuantileOpenstfRegressor.
|
52
56
|
|
@@ -69,6 +73,12 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
69
73
|
missing_values: Value to be considered as missing value
|
70
74
|
imputation_strategy: Imputation strategy
|
71
75
|
fill_value: Fill value
|
76
|
+
weight_scale_percentile: Percentile used in scaling of the samples
|
77
|
+
weight_exponent: Exponent used in sample weighing
|
78
|
+
weight_floor: Minimum weight for samples
|
79
|
+
no_fill_future_values_features: The features for which it does not make sense
|
80
|
+
to fill future values. Rows that contain trailing null values for these
|
81
|
+
features will be removed from the data.
|
72
82
|
|
73
83
|
"""
|
74
84
|
super().__init__()
|
@@ -82,13 +92,17 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
82
92
|
self.quantiles = quantiles
|
83
93
|
self.alpha = alpha
|
84
94
|
self.solver = solver
|
95
|
+
self.weight_scale_percentile = weight_scale_percentile
|
96
|
+
self.weight_exponent = weight_exponent
|
97
|
+
self.weight_floor = weight_floor
|
85
98
|
self.imputer_ = MissingValuesTransformer(
|
86
99
|
missing_values=missing_values,
|
87
100
|
imputation_strategy=imputation_strategy,
|
88
101
|
fill_value=fill_value,
|
102
|
+
no_fill_future_values_features=no_fill_future_values_features,
|
89
103
|
)
|
90
|
-
self.x_scaler_ =
|
91
|
-
self.y_scaler_ =
|
104
|
+
self.x_scaler_ = StandardScaler()
|
105
|
+
self.y_scaler_ = StandardScaler()
|
92
106
|
self.models_ = {
|
93
107
|
quantile: QuantileRegressor(alpha=alpha, quantile=quantile, solver=solver)
|
94
108
|
for quantile in quantiles
|
@@ -165,7 +179,7 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
165
179
|
x = self._remove_ignored_features(x)
|
166
180
|
|
167
181
|
# Fix nan columns
|
168
|
-
x = self.imputer_.fit_transform(x)
|
182
|
+
x, y = self.imputer_.fit_transform(x, y)
|
169
183
|
if x.isna().any().any():
|
170
184
|
raise ValueError(
|
171
185
|
"There are nan values in the input data. Set "
|
@@ -177,7 +191,7 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
177
191
|
y_scaled = self.y_scaler_.fit_transform(y.to_frame())[:, 0]
|
178
192
|
|
179
193
|
# Add more focus on extreme / peak values
|
180
|
-
sample_weight =
|
194
|
+
sample_weight = self._calculate_sample_weights(y.values.squeeze())
|
181
195
|
|
182
196
|
# Fit quantile regressors
|
183
197
|
for quantile in self.quantiles:
|
@@ -191,6 +205,33 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
191
205
|
|
192
206
|
return self
|
193
207
|
|
208
|
+
def _calculate_sample_weights(self, y: np.array):
|
209
|
+
"""Calculate sample weights based on the y values of arbitrary scale.
|
210
|
+
|
211
|
+
The resulting weights are in the range [0,1] and are used to put more emphasis
|
212
|
+
on certain samples. The sample weighting function does:
|
213
|
+
|
214
|
+
* Rescale data to a [-1, 1] range using quantile scaling. 90% of the data will
|
215
|
+
be within this range. Rest is outside.
|
216
|
+
* Calculate the weight by taking the exponent of scaled data.
|
217
|
+
* exponent=0: Results in uniform weights for all samples.
|
218
|
+
* exponent=1: Results in linearly increasing weights for samples that are
|
219
|
+
closer to the extremes.
|
220
|
+
* exponent>1: Results in exponentially increasing weights for samples that are
|
221
|
+
closer to the extremes.
|
222
|
+
* Clip the data to [0, 1] range with weight_floor as the minimum weight.
|
223
|
+
* Weight floor is used to make sure that all the samples are considered.
|
224
|
+
|
225
|
+
"""
|
226
|
+
return np.clip(
|
227
|
+
_weight_exp(
|
228
|
+
_scale_percentile(y, percentile=self.weight_scale_percentile),
|
229
|
+
exponent=self.weight_exponent,
|
230
|
+
),
|
231
|
+
a_min=self.weight_floor,
|
232
|
+
a_max=1,
|
233
|
+
)
|
234
|
+
|
194
235
|
def predict(self, x: pd.DataFrame, quantile: float = 0.5, **kwargs) -> np.array:
|
195
236
|
"""Makes a prediction for a desired quantile.
|
196
237
|
|
@@ -245,3 +286,11 @@ class LinearQuantileOpenstfRegressor(OpenstfRegressor, RegressorMixin):
|
|
245
286
|
|
246
287
|
def __sklearn_is_fitted__(self) -> bool:
|
247
288
|
return self.is_fitted_
|
289
|
+
|
290
|
+
|
291
|
+
def _scale_percentile(x: np.ndarray, percentile: int = 95):
|
292
|
+
return np.abs(x / np.percentile(np.abs(x), percentile))
|
293
|
+
|
294
|
+
|
295
|
+
def _weight_exp(x: np.ndarray, exponent: float = 1):
|
296
|
+
return np.abs(x) ** exponent
|
@@ -29,7 +29,7 @@ import pandas as pd
|
|
29
29
|
import structlog
|
30
30
|
|
31
31
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
32
|
-
from openstef.enums import
|
32
|
+
from openstef.enums import ModelType
|
33
33
|
from openstef.exceptions import NoPredictedLoadError, NoRealisedLoadError
|
34
34
|
from openstef.metrics import metrics
|
35
35
|
from openstef.settings import Settings
|
@@ -42,7 +42,7 @@ THRESHOLD_RETRAINING = 0.25
|
|
42
42
|
THRESHOLD_OPTIMIZING = 0.50
|
43
43
|
|
44
44
|
|
45
|
-
def main(model_type:
|
45
|
+
def main(model_type: ModelType = None, config=None, database=None) -> None:
|
46
46
|
taskname = Path(__file__).name.replace(".py", "")
|
47
47
|
|
48
48
|
if database is None or config is None:
|
@@ -52,7 +52,7 @@ def main(model_type: MLModelType = None, config=None, database=None) -> None:
|
|
52
52
|
)
|
53
53
|
|
54
54
|
if model_type is None:
|
55
|
-
model_type = [ml.value for ml in
|
55
|
+
model_type = [ml.value for ml in ModelType]
|
56
56
|
|
57
57
|
with TaskContext(taskname, config, database) as context:
|
58
58
|
# Set start and end time
|
@@ -29,7 +29,7 @@ import pandas as pd
|
|
29
29
|
import structlog
|
30
30
|
|
31
31
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
32
|
-
from openstef.enums import
|
32
|
+
from openstef.enums import ModelType
|
33
33
|
from openstef.exceptions import ComponentForecastTooShortHorizonError
|
34
34
|
from openstef.pipeline.create_component_forecast import (
|
35
35
|
create_components_forecast_pipeline,
|
@@ -150,7 +150,7 @@ def main(config: object = None, database: object = None, **kwargs):
|
|
150
150
|
)
|
151
151
|
|
152
152
|
with TaskContext(taskname, config, database) as context:
|
153
|
-
model_type = [ml.value for ml in
|
153
|
+
model_type = [ml.value for ml in ModelType]
|
154
154
|
|
155
155
|
PredictionJobLoop(
|
156
156
|
context,
|
@@ -24,7 +24,7 @@ from datetime import datetime, timedelta
|
|
24
24
|
from pathlib import Path
|
25
25
|
|
26
26
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
27
|
-
from openstef.enums import
|
27
|
+
from openstef.enums import ModelType, PipelineType
|
28
28
|
from openstef.exceptions import InputDataOngoingZeroFlatlinerError
|
29
29
|
from openstef.pipeline.create_forecast import create_forecast_pipeline
|
30
30
|
from openstef.tasks.utils.predictionjobloop import PredictionJobLoop
|
@@ -129,7 +129,7 @@ def main(model_type=None, config=None, database=None, **kwargs):
|
|
129
129
|
|
130
130
|
with TaskContext(taskname, config, database) as context:
|
131
131
|
if model_type is None:
|
132
|
-
model_type = [ml.value for ml in
|
132
|
+
model_type = [ml.value for ml in ModelType]
|
133
133
|
|
134
134
|
PredictionJobLoop(context, model_type=model_type).map(
|
135
135
|
create_forecast_task, context, **kwargs
|
@@ -20,7 +20,7 @@ from datetime import datetime, timedelta
|
|
20
20
|
from pathlib import Path
|
21
21
|
|
22
22
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
23
|
-
from openstef.enums import
|
23
|
+
from openstef.enums import ModelType, PipelineType
|
24
24
|
from openstef.model.serializer import MLflowSerializer
|
25
25
|
from openstef.monitoring import teams
|
26
26
|
from openstef.pipeline.optimize_hyperparameters import optimize_hyperparameters_pipeline
|
@@ -124,7 +124,7 @@ def main(config=None, database=None):
|
|
124
124
|
)
|
125
125
|
|
126
126
|
with TaskContext(taskname, config, database) as context:
|
127
|
-
model_type = [ml.value for ml in
|
127
|
+
model_type = [ml.value for ml in ModelType]
|
128
128
|
|
129
129
|
PredictionJobLoop(context, model_type=model_type).map(
|
130
130
|
optimize_hyperparameters_task, context
|
@@ -33,7 +33,7 @@ import structlog
|
|
33
33
|
|
34
34
|
import openstef.monitoring.teams as monitoring
|
35
35
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
36
|
-
from openstef.enums import
|
36
|
+
from openstef.enums import ModelType
|
37
37
|
from openstef.settings import Settings
|
38
38
|
from openstef.tasks.utils.predictionjobloop import PredictionJobLoop
|
39
39
|
from openstef.tasks.utils.taskcontext import TaskContext
|
@@ -51,7 +51,7 @@ def main(config=None, database=None):
|
|
51
51
|
)
|
52
52
|
|
53
53
|
with TaskContext(taskname, config, database) as context:
|
54
|
-
model_type = [ml.value for ml in
|
54
|
+
model_type = [ml.value for ml in ModelType]
|
55
55
|
|
56
56
|
PredictionJobLoop(
|
57
57
|
context,
|
@@ -23,7 +23,7 @@ from datetime import datetime, timedelta
|
|
23
23
|
from pathlib import Path
|
24
24
|
|
25
25
|
from openstef.data_classes.prediction_job import PredictionJobDataClass
|
26
|
-
from openstef.enums import
|
26
|
+
from openstef.enums import ModelType, PipelineType
|
27
27
|
from openstef.exceptions import (
|
28
28
|
InputDataOngoingZeroFlatlinerError,
|
29
29
|
SkipSaveTrainingForecasts,
|
@@ -179,7 +179,7 @@ def main(model_type=None, config=None, database=None):
|
|
179
179
|
)
|
180
180
|
|
181
181
|
if model_type is None:
|
182
|
-
model_type = [ml.value for ml in
|
182
|
+
model_type = [ml.value for ml in ModelType]
|
183
183
|
|
184
184
|
taskname = Path(__file__).name.replace(".py", "")
|
185
185
|
datetime_now = datetime.utcnow()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: openstef
|
3
|
-
Version: 3.4.
|
3
|
+
Version: 3.4.40
|
4
4
|
Summary: Open short term energy forecaster
|
5
5
|
Home-page: https://github.com/OpenSTEF/openstef
|
6
6
|
Author: Alliander N.V
|
@@ -78,7 +78,6 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
78
78
|
- [Linux Foundation project page](https://www.lfenergy.org/projects/openstef/);
|
79
79
|
- [Documentation on dashboard](https://raw.githack.com/OpenSTEF/.github/main/profile/html/openstef_dashboard_doc.html);
|
80
80
|
- [Video about OpenSTEF](https://www.lfenergy.org/forecasting-to-create-a-more-resilient-optimized-grid/);
|
81
|
-
- [Teams channel](https://teams.microsoft.com/l/team/19%3ac08a513650524fc988afb296cd0358cc%40thread.tacv2/conversations?groupId=bfcb763a-3a97-4938-81d7-b14512aa537d&tenantId=697f104b-d7cb-48c8-ac9f-bd87105bafdc)
|
82
81
|
|
83
82
|
# Installation
|
84
83
|
|
@@ -88,7 +87,7 @@ OpenSTEF is a Python package designed for generating short-term forecasts in the
|
|
88
87
|
pip install openstef
|
89
88
|
```
|
90
89
|
|
91
|
-
### Remark regarding installation within a **conda environment on Windows
|
90
|
+
### Remark regarding installation within a **conda environment on Windows**
|
92
91
|
|
93
92
|
A version of the pywin32 package will be installed as a secondary dependency along with the installation of the openstef package. Since conda relies on an old version of pywin32, the new installation can break conda's functionality. The following command can solve this issue:
|
94
93
|
```shell
|
@@ -29,7 +29,7 @@ def read_long_description_from_readme():
|
|
29
29
|
|
30
30
|
setup(
|
31
31
|
name="openstef",
|
32
|
-
version="3.4.
|
32
|
+
version="3.4.40",
|
33
33
|
packages=find_packages(include=["openstef", "openstef.*"]),
|
34
34
|
description="Open short term energy forecaster",
|
35
35
|
long_description=read_long_description_from_readme(),
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|