openstef 3.4.56__py3-none-any.whl → 4.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. openstef-4.0.0a3.dist-info/METADATA +177 -0
  2. openstef-4.0.0a3.dist-info/RECORD +4 -0
  3. {openstef-3.4.56.dist-info → openstef-4.0.0a3.dist-info}/WHEEL +1 -2
  4. openstef/__init__.py +0 -14
  5. openstef/__main__.py +0 -3
  6. openstef/app_settings.py +0 -19
  7. openstef/data/NL_terrestrial_radiation.csv +0 -25585
  8. openstef/data/NL_terrestrial_radiation.csv.license +0 -3
  9. openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_baseline_model.z +0 -0
  10. openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_baseline_model.z.license +0 -3
  11. openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_model_card.md +0 -18
  12. openstef/data/dazls_model_3.4.24/dazls_stored_3.4.24_model_card.md.license +0 -3
  13. openstef/data/dutch_holidays.csv +0 -1759
  14. openstef/data/dutch_holidays.csv.license +0 -3
  15. openstef/data/pv_single_coefs.csv +0 -601
  16. openstef/data/pv_single_coefs.csv.license +0 -3
  17. openstef/data_classes/__init__.py +0 -3
  18. openstef/data_classes/data_prep.py +0 -99
  19. openstef/data_classes/model_specifications.py +0 -30
  20. openstef/data_classes/prediction_job.py +0 -135
  21. openstef/data_classes/split_function.py +0 -97
  22. openstef/enums.py +0 -140
  23. openstef/exceptions.py +0 -74
  24. openstef/feature_engineering/__init__.py +0 -3
  25. openstef/feature_engineering/apply_features.py +0 -138
  26. openstef/feature_engineering/bidding_zone_to_country_mapping.py +0 -106
  27. openstef/feature_engineering/cyclic_features.py +0 -161
  28. openstef/feature_engineering/data_preparation.py +0 -152
  29. openstef/feature_engineering/feature_adder.py +0 -206
  30. openstef/feature_engineering/feature_applicator.py +0 -202
  31. openstef/feature_engineering/general.py +0 -141
  32. openstef/feature_engineering/holiday_features.py +0 -231
  33. openstef/feature_engineering/lag_features.py +0 -165
  34. openstef/feature_engineering/missing_values_transformer.py +0 -141
  35. openstef/feature_engineering/rolling_features.py +0 -58
  36. openstef/feature_engineering/weather_features.py +0 -492
  37. openstef/metrics/__init__.py +0 -3
  38. openstef/metrics/figure.py +0 -303
  39. openstef/metrics/metrics.py +0 -486
  40. openstef/metrics/reporter.py +0 -222
  41. openstef/model/__init__.py +0 -3
  42. openstef/model/basecase.py +0 -82
  43. openstef/model/confidence_interval_applicator.py +0 -242
  44. openstef/model/fallback.py +0 -77
  45. openstef/model/metamodels/__init__.py +0 -3
  46. openstef/model/metamodels/feature_clipper.py +0 -90
  47. openstef/model/metamodels/grouped_regressor.py +0 -222
  48. openstef/model/metamodels/missing_values_handler.py +0 -138
  49. openstef/model/model_creator.py +0 -214
  50. openstef/model/objective.py +0 -426
  51. openstef/model/objective_creator.py +0 -65
  52. openstef/model/regressors/__init__.py +0 -3
  53. openstef/model/regressors/arima.py +0 -197
  54. openstef/model/regressors/custom_regressor.py +0 -64
  55. openstef/model/regressors/dazls.py +0 -116
  56. openstef/model/regressors/flatliner.py +0 -95
  57. openstef/model/regressors/gblinear_quantile.py +0 -334
  58. openstef/model/regressors/lgbm.py +0 -29
  59. openstef/model/regressors/linear.py +0 -90
  60. openstef/model/regressors/linear_quantile.py +0 -305
  61. openstef/model/regressors/regressor.py +0 -114
  62. openstef/model/regressors/xgb.py +0 -52
  63. openstef/model/regressors/xgb_multioutput_quantile.py +0 -261
  64. openstef/model/regressors/xgb_quantile.py +0 -228
  65. openstef/model/serializer.py +0 -431
  66. openstef/model/standard_deviation_generator.py +0 -81
  67. openstef/model_selection/__init__.py +0 -3
  68. openstef/model_selection/model_selection.py +0 -311
  69. openstef/monitoring/__init__.py +0 -3
  70. openstef/monitoring/performance_meter.py +0 -92
  71. openstef/monitoring/teams.py +0 -203
  72. openstef/pipeline/__init__.py +0 -3
  73. openstef/pipeline/create_basecase_forecast.py +0 -133
  74. openstef/pipeline/create_component_forecast.py +0 -168
  75. openstef/pipeline/create_forecast.py +0 -171
  76. openstef/pipeline/optimize_hyperparameters.py +0 -317
  77. openstef/pipeline/train_create_forecast_backtest.py +0 -163
  78. openstef/pipeline/train_model.py +0 -561
  79. openstef/pipeline/utils.py +0 -52
  80. openstef/postprocessing/__init__.py +0 -3
  81. openstef/postprocessing/postprocessing.py +0 -275
  82. openstef/preprocessing/__init__.py +0 -3
  83. openstef/preprocessing/preprocessing.py +0 -42
  84. openstef/settings.py +0 -15
  85. openstef/tasks/__init__.py +0 -3
  86. openstef/tasks/calculate_kpi.py +0 -324
  87. openstef/tasks/create_basecase_forecast.py +0 -118
  88. openstef/tasks/create_components_forecast.py +0 -162
  89. openstef/tasks/create_forecast.py +0 -145
  90. openstef/tasks/create_solar_forecast.py +0 -420
  91. openstef/tasks/create_wind_forecast.py +0 -80
  92. openstef/tasks/optimize_hyperparameters.py +0 -135
  93. openstef/tasks/split_forecast.py +0 -273
  94. openstef/tasks/train_model.py +0 -224
  95. openstef/tasks/utils/__init__.py +0 -3
  96. openstef/tasks/utils/dependencies.py +0 -107
  97. openstef/tasks/utils/predictionjobloop.py +0 -243
  98. openstef/tasks/utils/taskcontext.py +0 -160
  99. openstef/validation/__init__.py +0 -3
  100. openstef/validation/validation.py +0 -322
  101. openstef-3.4.56.dist-info/METADATA +0 -154
  102. openstef-3.4.56.dist-info/RECORD +0 -102
  103. openstef-3.4.56.dist-info/top_level.txt +0 -1
  104. /openstef-3.4.56.dist-info/LICENSE → /openstef-4.0.0a3.dist-info/licenses/LICENSE.md +0 -0
@@ -1,303 +0,0 @@
1
- # SPDX-FileCopyrightText: 2017-2023 Contributors to the OpenSTEF project <korte.termijn.prognoses@alliander.com> # noqa E501>
2
- #
3
- # SPDX-License-Identifier: MPL-2.0
4
- """This module contains all functions for generating figures."""
5
- import base64
6
- from typing import Union
7
-
8
- import pandas as pd
9
- import plotly.express as px
10
- import plotly.graph_objects as go
11
-
12
-
13
- def plot_feature_importance(feature_importance: pd.DataFrame) -> go.Figure:
14
- """Created a treemap plot based on feature importance and weights.
15
-
16
- Args:
17
- feature_importance: A DataFrame describing the
18
- feature importances and weights of the trained model.
19
-
20
- Returns:
21
- A treemap of the features.
22
-
23
- """
24
- feature_importance["parent"] = "Feature importance"
25
-
26
- return go.Figure(
27
- go.Treemap(
28
- labels=feature_importance.index,
29
- parents=feature_importance["parent"],
30
- values=feature_importance["gain"],
31
- customdata=feature_importance["weight"],
32
- marker=dict(colors=feature_importance["weight"], colorscale="greens"),
33
- hovertemplate=(
34
- "<b>%{label}</b><br>importance: %{value:.1%}"
35
- "<br>weight: %{customdata:.1%}<extra></extra>"
36
- ),
37
- ),
38
- layout={
39
- "margin": {
40
- "t": 0,
41
- "r": 0,
42
- "b": 0,
43
- "l": 0,
44
- }
45
- },
46
- )
47
-
48
-
49
- def plot_data_series(
50
- data: Union[list[pd.DataFrame], list[pd.Series]],
51
- predict_data: Union[list[pd.DataFrame], list[pd.Series]] = None,
52
- horizon: int = 47,
53
- names: list[str] = None,
54
- ) -> go.Figure:
55
- """Plots passed data and optionally prediction data for specified horizon.
56
-
57
- Args:
58
- data: There are two options to use this function. Either
59
- pass a list of pandas.DataFrame where each dataframe contains a load
60
- column and a horizon column. Or pass a list of pandas.Series with
61
- unique indexing.
62
- predict_data: Similar to data, but
63
- for prediction data instead. When passing a list of pandas.DataFrame
64
- the column forecast should exist. Can be set to None.
65
- horizon: This function will select only data matching
66
- this horizon. Defaults to 47.
67
- names: The names that will be used in the
68
- legend of the plot. If None is passed, this will be build
69
- automatically based on the number of series passed.
70
-
71
- Returns:
72
- A line plot of each passed data series.
73
-
74
- Raises:
75
- ValueError: If names is None and the number of series is greater than 3.
76
-
77
- """
78
- series_names = {
79
- 1: ("series",),
80
- 2: ("train", "validation"),
81
- 3: ("train", "validation", "test"),
82
- }
83
-
84
- num_series = len(data)
85
-
86
- if names is None and num_series > 3:
87
- raise ValueError(
88
- "Cannot pass names=None when passing data with more than 3 series."
89
- )
90
-
91
- if names is None:
92
- names = series_names[num_series]
93
-
94
- if predict_data is None:
95
- # Check if horizon columns exists in the data
96
- if "horizon" in data[0]:
97
- # Filter data on given horizon
98
- filtered = []
99
- for series in data:
100
- mask = series["horizon"] == horizon
101
- filtered.append(series[mask]["load"])
102
- else:
103
- filtered = data
104
-
105
- return _plot_data(names, filtered)
106
-
107
- # Check if horizon columns exists in the data
108
- if "horizon" in data[0]:
109
- # Filter data on given horizon
110
- actuals = []
111
- predictions = []
112
- q_low = []
113
- q_high = []
114
-
115
- for series, predict_series in zip(data, predict_data):
116
- mask = series["horizon"] == horizon
117
- actuals.append(series[mask]["load"])
118
- predictions.append(predict_series[mask]["forecast"])
119
- if len(predict_series[mask].columns) > 1:
120
- q_low.append(predict_series[mask].iloc[:, -2])
121
- q_high.append(predict_series[mask].iloc[:, -1])
122
-
123
- else:
124
- actuals = data
125
- predictions = predict_data
126
- if len(predictions.columns) > 1:
127
- q_low = predict_data.iloc[:, -2]
128
- q_high = predict_data.iloc[:, -1]
129
- else:
130
- q_low = None
131
- q_high = None
132
-
133
- quantiles = [q_low, q_high] if (q_low is not None) and (len(q_low) != 0) else None
134
- fig = _plot_data_and_predictions(names, actuals, predictions, quantiles)
135
- fig.update_layout(
136
- title=f"Predictor in action for horizon: {horizon}",
137
- )
138
-
139
- return fig
140
-
141
-
142
- def _plot_data(names: list[str], series: list[pd.Series]) -> go.Figure:
143
- """Create plot of data consisting of different splits.
144
-
145
- Note:
146
- Do not use this function directly, use plot_data_series instead.
147
-
148
- Args:
149
- names: Name of each seperate split.
150
- series: Each split as a seperate series.
151
-
152
- Returns:
153
- A line plot of each passed series.
154
-
155
- """
156
- # Build a combined DataFrame with all data.
157
- # This step is important to create forced NaNs to create gaps in the plot.
158
- combined = []
159
- for name, sequence in zip(names, series):
160
- combined.extend(
161
- [
162
- sequence.rename(name),
163
- ]
164
- )
165
- df_plot = pd.concat(combined, axis=1)
166
-
167
- fig = go.Figure()
168
-
169
- # Add a trace for every data series
170
- for i, name in enumerate(names):
171
- fig.add_trace(
172
- go.Scatter(
173
- x=df_plot.index,
174
- y=df_plot[name],
175
- name=name,
176
- line=dict(color=px.colors.qualitative.Set2[i]),
177
- )
178
- )
179
-
180
- fig.update_layout(yaxis_title="Load (MW)")
181
-
182
- return fig
183
-
184
-
185
- def _plot_data_and_predictions(
186
- names: list[str],
187
- actuals: list[pd.Series],
188
- predictions: list[pd.Series],
189
- quantiles: list[float] = None,
190
- ) -> go.Figure:
191
- """Create plot of different data and prediction splits.
192
-
193
- Note:
194
- Do not use this function directly, use plot_data_series instead.
195
-
196
- Args:
197
- names: Name of each seperate split. The passed names will be suffixed with _actual and _predict for data and predictions respectively.
198
- actuals: Each data split as a seperate series.
199
- predictions: Each prediction split as a seperate series.
200
- quantiles: List of predicted quantiles that have to be plotted.
201
-
202
- Returns:
203
- A line plot of each passed series.
204
-
205
- """
206
- # Build a combined DataFrame with all data.
207
- # This step is important to create forced NaNs to create gaps in the plot.
208
- combined = []
209
- if quantiles is None:
210
- for name, actual, prediction in zip(names, actuals, predictions):
211
- combined.extend(
212
- [
213
- actual.rename(f"{name}_actual"),
214
- prediction.rename(f"{name}_predict"),
215
- ]
216
- )
217
- else:
218
- for name, actual, prediction, q_low, q_high in zip(
219
- names, actuals, predictions, quantiles[0], quantiles[-1]
220
- ):
221
- q_low_name = q_low.name
222
- q_high_name = q_high.name
223
- combined.extend(
224
- [
225
- actual.rename(f"{name}_actual"),
226
- prediction.rename(f"{name}_predict"),
227
- q_low.rename(f"{name}_{q_low_name}"),
228
- q_high.rename(f"{name}_{q_high_name}"),
229
- ]
230
- )
231
-
232
- df_plot = pd.concat(combined, axis=1)
233
-
234
- fig = go.Figure()
235
-
236
- # Add a trace for every data series
237
- for i, name in enumerate(names):
238
- actual, predict = f"{name}_actual", f"{name}_predict"
239
- fig.add_trace(
240
- go.Scatter(
241
- x=df_plot.index,
242
- y=df_plot[actual],
243
- name=actual,
244
- line=dict(color=px.colors.qualitative.Set2[i]),
245
- )
246
- )
247
- fig.add_trace(
248
- go.Scatter(
249
- x=df_plot.index,
250
- y=df_plot[predict],
251
- name=predict,
252
- line=dict(dash="dot", color=px.colors.qualitative.Dark2[i]),
253
- )
254
- )
255
- if quantiles is not None:
256
- q_low, q_high = f"{name}_{q_low_name}", f"{name}_{q_high_name}"
257
- fig.add_trace(
258
- go.Scatter(
259
- x=df_plot.index,
260
- y=df_plot[q_low],
261
- mode="lines",
262
- line=dict(
263
- color=px.colors.qualitative.Dark2[i], width=0.5, dash="dash"
264
- ),
265
- name=q_low,
266
- )
267
- )
268
- fig.add_trace(
269
- go.Scatter(
270
- x=df_plot.index,
271
- y=df_plot[q_high],
272
- fill="tonexty",
273
- fillcolor=f"rgba({px.colors.qualitative.Dark2[i][4:-1]}, 0.3)",
274
- mode="lines",
275
- line=dict(
276
- color=px.colors.qualitative.Dark2[i], width=0.5, dash="dash"
277
- ),
278
- name=q_high,
279
- )
280
- )
281
-
282
- fig.update_layout(yaxis_title="Load (MW)")
283
-
284
- return fig
285
-
286
-
287
- def convert_to_base64_data_uri(path_in: str, path_out: str, content_type: str) -> None:
288
- """Read file, convert it to a data_uri, then writes the data_uri to file.
289
-
290
- Args:
291
- path_in: Path of the file that will be converted
292
- path_out: Path of the file containing the data uri
293
- content_type: Content type of the data uri according to
294
- (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type).
295
-
296
- """
297
- with open(path_in, "rb") as f:
298
- encoded = base64.b64encode(f.read()).decode()
299
-
300
- data_uri = "data:{0};base64,{1}".format(content_type, encoded)
301
-
302
- with open(path_out, "wt") as f:
303
- f.write(data_uri)