tsam 2.2.2__py3-none-any.whl → 2.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tsam/hyperparametertuning.py +245 -245
- tsam/periodAggregation.py +141 -141
- tsam/representations.py +167 -167
- tsam/timeseriesaggregation.py +1358 -1309
- tsam/utils/durationRepresentation.py +204 -128
- tsam/utils/k_maxoids.py +145 -145
- tsam/utils/k_medoids_contiguity.py +140 -133
- tsam/utils/k_medoids_exact.py +239 -234
- tsam/utils/segmentation.py +118 -119
- {tsam-2.2.2.dist-info → tsam-2.3.4.dist-info}/LICENSE.txt +20 -20
- {tsam-2.2.2.dist-info → tsam-2.3.4.dist-info}/METADATA +175 -167
- tsam-2.3.4.dist-info/RECORD +16 -0
- {tsam-2.2.2.dist-info → tsam-2.3.4.dist-info}/WHEEL +1 -1
- tsam-2.2.2.dist-info/RECORD +0 -16
- {tsam-2.2.2.dist-info → tsam-2.3.4.dist-info}/top_level.txt +0 -0
tsam/hyperparametertuning.py
CHANGED
|
@@ -1,245 +1,245 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
|
|
3
|
-
import copy
|
|
4
|
-
|
|
5
|
-
import numpy as np
|
|
6
|
-
|
|
7
|
-
import tqdm
|
|
8
|
-
|
|
9
|
-
from tsam.timeseriesaggregation import TimeSeriesAggregation
|
|
10
|
-
|
|
11
|
-
def getNoPeriodsForDataReduction(noRawTimeSteps, segmentsPerPeriod, dataReduction):
|
|
12
|
-
"""
|
|
13
|
-
Identifies the maximum number of periods which can be set to achieve the required data reduction.
|
|
14
|
-
|
|
15
|
-
:param noRawTimeSteps: Number of original time steps. required
|
|
16
|
-
:type noRawTimeSteps: int
|
|
17
|
-
|
|
18
|
-
:param segmentsPerPeriod: Segments per period. required
|
|
19
|
-
:type segmentsPerPeriod: int
|
|
20
|
-
|
|
21
|
-
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
22
|
-
:type dataReduction: float
|
|
23
|
-
|
|
24
|
-
:returns: **noTypicalPeriods** -- Number of typical periods that can be set.
|
|
25
|
-
"""
|
|
26
|
-
return int(np.floor(dataReduction * float(noRawTimeSteps)/segmentsPerPeriod))
|
|
27
|
-
|
|
28
|
-
def getNoSegmentsForDataReduction(noRawTimeSteps, typicalPeriods, dataReduction):
|
|
29
|
-
"""
|
|
30
|
-
Identifies the maximum number of segments which can be set to achieve the required data reduction.
|
|
31
|
-
|
|
32
|
-
:param noRawTimeSteps: Number of original time steps. required
|
|
33
|
-
:type noRawTimeSteps: int
|
|
34
|
-
|
|
35
|
-
:param typicalPeriods: Number of typical periods. required
|
|
36
|
-
:type typicalPeriods: int
|
|
37
|
-
|
|
38
|
-
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
39
|
-
:type dataReduction: float
|
|
40
|
-
|
|
41
|
-
:returns: **segmentsPerPeriod** -- Number of segments per period that can be set.
|
|
42
|
-
"""
|
|
43
|
-
return int(np.floor(dataReduction * float(noRawTimeSteps)/typicalPeriods))
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
class HyperTunedAggregations(object):
|
|
49
|
-
|
|
50
|
-
def __init__(self, base_aggregation, saveAggregationHistory=True):
|
|
51
|
-
"""
|
|
52
|
-
A class that does a parameter variation and tuning of the aggregation itself.
|
|
53
|
-
|
|
54
|
-
:param base_aggregation: TimeSeriesAggregation object which is used as basis for tuning the hyper parameters. required
|
|
55
|
-
:type base_aggregation: TimeSeriesAggregation
|
|
56
|
-
|
|
57
|
-
:param saveAggregationHistory: Defines if all aggregations that are created during the tuning and iterations shall be saved under self.aggregationHistory.
|
|
58
|
-
:type saveAggregationHistory: boolean
|
|
59
|
-
"""
|
|
60
|
-
self.base_aggregation = base_aggregation
|
|
61
|
-
|
|
62
|
-
if not isinstance(self.base_aggregation, TimeSeriesAggregation):
|
|
63
|
-
raise ValueError(
|
|
64
|
-
"base_aggregation has to be an TimeSeriesAggregation object"
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
self._alterableAggregation=copy.deepcopy(self.base_aggregation)
|
|
68
|
-
|
|
69
|
-
self.saveAggregationHistory=saveAggregationHistory
|
|
70
|
-
|
|
71
|
-
self._segmentHistory=[]
|
|
72
|
-
|
|
73
|
-
self._periodHistory=[]
|
|
74
|
-
|
|
75
|
-
self._RMSEHistory=[]
|
|
76
|
-
|
|
77
|
-
if self.saveAggregationHistory:
|
|
78
|
-
self.aggregationHistory=[]
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def _testAggregation(self, noTypicalPeriods, noSegments):
|
|
84
|
-
"""
|
|
85
|
-
Tests the aggregation for a set of typical periods and segments and returns the RMSE
|
|
86
|
-
"""
|
|
87
|
-
self._segmentHistory.append(noSegments)
|
|
88
|
-
|
|
89
|
-
self._periodHistory.append(noTypicalPeriods)
|
|
90
|
-
|
|
91
|
-
self._alterableAggregation.noTypicalPeriods=noTypicalPeriods
|
|
92
|
-
|
|
93
|
-
self._alterableAggregation.noSegments=noSegments
|
|
94
|
-
|
|
95
|
-
self._alterableAggregation.createTypicalPeriods()
|
|
96
|
-
|
|
97
|
-
self._alterableAggregation.predictOriginalData()
|
|
98
|
-
|
|
99
|
-
RMSE=self._alterableAggregation.totalAccuracyIndicators()["RMSE"]
|
|
100
|
-
|
|
101
|
-
self._RMSEHistory.append(RMSE)
|
|
102
|
-
|
|
103
|
-
if self.saveAggregationHistory:
|
|
104
|
-
self.aggregationHistory.append(copy.copy(self._alterableAggregation))
|
|
105
|
-
|
|
106
|
-
return RMSE
|
|
107
|
-
|
|
108
|
-
def _deleteTestHistory(self, index):
|
|
109
|
-
"""
|
|
110
|
-
Delelets the defined index from the test history
|
|
111
|
-
"""
|
|
112
|
-
del self._segmentHistory[index]
|
|
113
|
-
del self._periodHistory[index]
|
|
114
|
-
del self._RMSEHistory[index]
|
|
115
|
-
|
|
116
|
-
if self.saveAggregationHistory:
|
|
117
|
-
del self.aggregationHistory[index]
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def identifyOptimalSegmentPeriodCombination(self, dataReduction):
|
|
121
|
-
"""
|
|
122
|
-
Identifies the optimal combination of number of typical periods and number of segments for a given data reduction set.
|
|
123
|
-
|
|
124
|
-
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
125
|
-
:type dataReduction: float
|
|
126
|
-
|
|
127
|
-
:returns: **noSegments, noTypicalperiods** -- The optimal combination of segments and typical periods for the given optimization set.
|
|
128
|
-
"""
|
|
129
|
-
if not self.base_aggregation.segmentation:
|
|
130
|
-
raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
|
|
131
|
-
|
|
132
|
-
noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
|
|
133
|
-
|
|
134
|
-
_maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
|
|
135
|
-
_maxSegments = self.base_aggregation.timeStepsPerPeriod
|
|
136
|
-
|
|
137
|
-
# save RMSE
|
|
138
|
-
RMSE_history = []
|
|
139
|
-
|
|
140
|
-
# correct 0 index of python
|
|
141
|
-
possibleSegments = np.arange(_maxSegments)+1
|
|
142
|
-
possiblePeriods = np.arange(_maxPeriods)+1
|
|
143
|
-
|
|
144
|
-
# number of time steps of all combinations of segments and periods
|
|
145
|
-
combinedTimeSteps = np.outer(possibleSegments, possiblePeriods)
|
|
146
|
-
# reduce to valid combinations for targeted data reduction
|
|
147
|
-
reductionValidCombinations = combinedTimeSteps <= noRawTimeSteps * dataReduction
|
|
148
|
-
|
|
149
|
-
# number of time steps for all feasible combinations
|
|
150
|
-
reductionValidTimsteps = combinedTimeSteps * reductionValidCombinations
|
|
151
|
-
|
|
152
|
-
# identify max segments and max period combination
|
|
153
|
-
optimalPeriods = np.zeros_like(reductionValidTimsteps)
|
|
154
|
-
optimalPeriods[np.arange(reductionValidTimsteps.shape[0]), reductionValidTimsteps.argmax(axis=1)] = 1
|
|
155
|
-
optimalSegments = np.zeros_like(reductionValidTimsteps)
|
|
156
|
-
optimalSegments[reductionValidTimsteps.argmax(axis=0), np.arange(reductionValidTimsteps.shape[1])] = 1
|
|
157
|
-
|
|
158
|
-
optimalIndexCombo = np.nonzero(optimalPeriods*optimalSegments)
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
for segmentIx, periodIx in tqdm.tqdm(zip(optimalIndexCombo[0],optimalIndexCombo[1])):
|
|
162
|
-
|
|
163
|
-
# derive new typical periods and derive rmse
|
|
164
|
-
RMSE_history.append(self._testAggregation(possiblePeriods[periodIx], possibleSegments[segmentIx]))
|
|
165
|
-
|
|
166
|
-
# take the negative backwards index with the minimal RMSE
|
|
167
|
-
min_index = - list(reversed(RMSE_history)).index(min(RMSE_history)) - 1
|
|
168
|
-
RMSE_min = RMSE_history[min_index]
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
noTypicalPeriods=self._periodHistory[min_index]
|
|
172
|
-
noSegments=self._segmentHistory[min_index]
|
|
173
|
-
|
|
174
|
-
# and return the segment and typical period pair
|
|
175
|
-
return noSegments, noTypicalPeriods, RMSE_min
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
def identifyParetoOptimalAggregation(self, untilTotalTimeSteps=None):
|
|
179
|
-
"""
|
|
180
|
-
Identifies the pareto-optimal combination of number of typical periods and number of segments along with a steepest decent approach, starting from the aggregation to a single period and a single segment up to the representation of the full time series.
|
|
181
|
-
|
|
182
|
-
:param untilTotalTimeSteps: Number of timesteps until which the pareto-front should be determined. If None, the maximum number of timesteps is chosen.
|
|
183
|
-
:type untilTotalTimeSteps: int
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
:returns: **** -- Nothing. Check aggregation history for results. All typical Periods in scaled form.
|
|
187
|
-
"""
|
|
188
|
-
if not self.base_aggregation.segmentation:
|
|
189
|
-
raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
|
|
190
|
-
|
|
191
|
-
noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
|
|
192
|
-
|
|
193
|
-
_maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
|
|
194
|
-
_maxSegments = self.base_aggregation.timeStepsPerPeriod
|
|
195
|
-
|
|
196
|
-
if untilTotalTimeSteps is None:
|
|
197
|
-
untilTotalTimeSteps=noRawTimeSteps
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
progressBar = tqdm.tqdm(total=untilTotalTimeSteps)
|
|
201
|
-
|
|
202
|
-
# starting point
|
|
203
|
-
noTypicalPeriods=1
|
|
204
|
-
noSegments=1
|
|
205
|
-
_RMSE_0=self._testAggregation(noTypicalPeriods, noSegments)
|
|
206
|
-
|
|
207
|
-
# loop until either segments or periods have reached their maximum
|
|
208
|
-
while (noTypicalPeriods<_maxPeriods and noSegments<_maxSegments
|
|
209
|
-
and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps
|
|
210
|
-
and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps):
|
|
211
|
-
# test for more segments
|
|
212
|
-
RMSE_segments = self._testAggregation(noTypicalPeriods, noSegments+1)
|
|
213
|
-
# test for more periods
|
|
214
|
-
RMSE_periods = self._testAggregation(noTypicalPeriods+1, noSegments)
|
|
215
|
-
|
|
216
|
-
# RMSE old
|
|
217
|
-
RMSE_old = self._RMSEHistory[-3]
|
|
218
|
-
|
|
219
|
-
# segment gradient (RMSE improvement per increased time step number)
|
|
220
|
-
# for segments: for each period on segment added
|
|
221
|
-
RMSE_segment_gradient = (RMSE_old - RMSE_segments) / noTypicalPeriods
|
|
222
|
-
# for periods: one period with no of segments
|
|
223
|
-
RMSE_periods_gradient = (RMSE_old - RMSE_periods) / noSegments
|
|
224
|
-
|
|
225
|
-
# go along the steeper gradient
|
|
226
|
-
if RMSE_periods_gradient>RMSE_segment_gradient:
|
|
227
|
-
noTypicalPeriods+=1
|
|
228
|
-
# and delete the search direction which was not persued
|
|
229
|
-
self._deleteTestHistory(-2)
|
|
230
|
-
else:
|
|
231
|
-
noSegments+=1
|
|
232
|
-
self._deleteTestHistory(-1)
|
|
233
|
-
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
234
|
-
|
|
235
|
-
# afterwards loop over periods and segments exclusively until maximum is reached
|
|
236
|
-
while noTypicalPeriods<_maxPeriods and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps:
|
|
237
|
-
noTypicalPeriods+=1
|
|
238
|
-
RMSE = self._testAggregation(noTypicalPeriods, noSegments)
|
|
239
|
-
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
240
|
-
|
|
241
|
-
while noSegments<_maxSegments and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps:
|
|
242
|
-
noSegments+=1
|
|
243
|
-
RMSE = self._testAggregation(noTypicalPeriods, noSegments)
|
|
244
|
-
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
245
|
-
return
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
import tqdm
|
|
8
|
+
|
|
9
|
+
from tsam.timeseriesaggregation import TimeSeriesAggregation
|
|
10
|
+
|
|
11
|
+
def getNoPeriodsForDataReduction(noRawTimeSteps, segmentsPerPeriod, dataReduction):
|
|
12
|
+
"""
|
|
13
|
+
Identifies the maximum number of periods which can be set to achieve the required data reduction.
|
|
14
|
+
|
|
15
|
+
:param noRawTimeSteps: Number of original time steps. required
|
|
16
|
+
:type noRawTimeSteps: int
|
|
17
|
+
|
|
18
|
+
:param segmentsPerPeriod: Segments per period. required
|
|
19
|
+
:type segmentsPerPeriod: int
|
|
20
|
+
|
|
21
|
+
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
22
|
+
:type dataReduction: float
|
|
23
|
+
|
|
24
|
+
:returns: **noTypicalPeriods** -- Number of typical periods that can be set.
|
|
25
|
+
"""
|
|
26
|
+
return int(np.floor(dataReduction * float(noRawTimeSteps)/segmentsPerPeriod))
|
|
27
|
+
|
|
28
|
+
def getNoSegmentsForDataReduction(noRawTimeSteps, typicalPeriods, dataReduction):
|
|
29
|
+
"""
|
|
30
|
+
Identifies the maximum number of segments which can be set to achieve the required data reduction.
|
|
31
|
+
|
|
32
|
+
:param noRawTimeSteps: Number of original time steps. required
|
|
33
|
+
:type noRawTimeSteps: int
|
|
34
|
+
|
|
35
|
+
:param typicalPeriods: Number of typical periods. required
|
|
36
|
+
:type typicalPeriods: int
|
|
37
|
+
|
|
38
|
+
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
39
|
+
:type dataReduction: float
|
|
40
|
+
|
|
41
|
+
:returns: **segmentsPerPeriod** -- Number of segments per period that can be set.
|
|
42
|
+
"""
|
|
43
|
+
return int(np.floor(dataReduction * float(noRawTimeSteps)/typicalPeriods))
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class HyperTunedAggregations(object):
|
|
49
|
+
|
|
50
|
+
def __init__(self, base_aggregation, saveAggregationHistory=True):
|
|
51
|
+
"""
|
|
52
|
+
A class that does a parameter variation and tuning of the aggregation itself.
|
|
53
|
+
|
|
54
|
+
:param base_aggregation: TimeSeriesAggregation object which is used as basis for tuning the hyper parameters. required
|
|
55
|
+
:type base_aggregation: TimeSeriesAggregation
|
|
56
|
+
|
|
57
|
+
:param saveAggregationHistory: Defines if all aggregations that are created during the tuning and iterations shall be saved under self.aggregationHistory.
|
|
58
|
+
:type saveAggregationHistory: boolean
|
|
59
|
+
"""
|
|
60
|
+
self.base_aggregation = base_aggregation
|
|
61
|
+
|
|
62
|
+
if not isinstance(self.base_aggregation, TimeSeriesAggregation):
|
|
63
|
+
raise ValueError(
|
|
64
|
+
"base_aggregation has to be an TimeSeriesAggregation object"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
self._alterableAggregation=copy.deepcopy(self.base_aggregation)
|
|
68
|
+
|
|
69
|
+
self.saveAggregationHistory=saveAggregationHistory
|
|
70
|
+
|
|
71
|
+
self._segmentHistory=[]
|
|
72
|
+
|
|
73
|
+
self._periodHistory=[]
|
|
74
|
+
|
|
75
|
+
self._RMSEHistory=[]
|
|
76
|
+
|
|
77
|
+
if self.saveAggregationHistory:
|
|
78
|
+
self.aggregationHistory=[]
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _testAggregation(self, noTypicalPeriods, noSegments):
|
|
84
|
+
"""
|
|
85
|
+
Tests the aggregation for a set of typical periods and segments and returns the RMSE
|
|
86
|
+
"""
|
|
87
|
+
self._segmentHistory.append(noSegments)
|
|
88
|
+
|
|
89
|
+
self._periodHistory.append(noTypicalPeriods)
|
|
90
|
+
|
|
91
|
+
self._alterableAggregation.noTypicalPeriods=noTypicalPeriods
|
|
92
|
+
|
|
93
|
+
self._alterableAggregation.noSegments=noSegments
|
|
94
|
+
|
|
95
|
+
self._alterableAggregation.createTypicalPeriods()
|
|
96
|
+
|
|
97
|
+
self._alterableAggregation.predictOriginalData()
|
|
98
|
+
|
|
99
|
+
RMSE=self._alterableAggregation.totalAccuracyIndicators()["RMSE"]
|
|
100
|
+
|
|
101
|
+
self._RMSEHistory.append(RMSE)
|
|
102
|
+
|
|
103
|
+
if self.saveAggregationHistory:
|
|
104
|
+
self.aggregationHistory.append(copy.copy(self._alterableAggregation))
|
|
105
|
+
|
|
106
|
+
return RMSE
|
|
107
|
+
|
|
108
|
+
def _deleteTestHistory(self, index):
|
|
109
|
+
"""
|
|
110
|
+
Delelets the defined index from the test history
|
|
111
|
+
"""
|
|
112
|
+
del self._segmentHistory[index]
|
|
113
|
+
del self._periodHistory[index]
|
|
114
|
+
del self._RMSEHistory[index]
|
|
115
|
+
|
|
116
|
+
if self.saveAggregationHistory:
|
|
117
|
+
del self.aggregationHistory[index]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def identifyOptimalSegmentPeriodCombination(self, dataReduction):
|
|
121
|
+
"""
|
|
122
|
+
Identifies the optimal combination of number of typical periods and number of segments for a given data reduction set.
|
|
123
|
+
|
|
124
|
+
:param dataReduction: Factor by which the resulting dataset should be reduced. required
|
|
125
|
+
:type dataReduction: float
|
|
126
|
+
|
|
127
|
+
:returns: **noSegments, noTypicalperiods** -- The optimal combination of segments and typical periods for the given optimization set.
|
|
128
|
+
"""
|
|
129
|
+
if not self.base_aggregation.segmentation:
|
|
130
|
+
raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
|
|
131
|
+
|
|
132
|
+
noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
|
|
133
|
+
|
|
134
|
+
_maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
|
|
135
|
+
_maxSegments = self.base_aggregation.timeStepsPerPeriod
|
|
136
|
+
|
|
137
|
+
# save RMSE
|
|
138
|
+
RMSE_history = []
|
|
139
|
+
|
|
140
|
+
# correct 0 index of python
|
|
141
|
+
possibleSegments = np.arange(_maxSegments)+1
|
|
142
|
+
possiblePeriods = np.arange(_maxPeriods)+1
|
|
143
|
+
|
|
144
|
+
# number of time steps of all combinations of segments and periods
|
|
145
|
+
combinedTimeSteps = np.outer(possibleSegments, possiblePeriods)
|
|
146
|
+
# reduce to valid combinations for targeted data reduction
|
|
147
|
+
reductionValidCombinations = combinedTimeSteps <= noRawTimeSteps * dataReduction
|
|
148
|
+
|
|
149
|
+
# number of time steps for all feasible combinations
|
|
150
|
+
reductionValidTimsteps = combinedTimeSteps * reductionValidCombinations
|
|
151
|
+
|
|
152
|
+
# identify max segments and max period combination
|
|
153
|
+
optimalPeriods = np.zeros_like(reductionValidTimsteps)
|
|
154
|
+
optimalPeriods[np.arange(reductionValidTimsteps.shape[0]), reductionValidTimsteps.argmax(axis=1)] = 1
|
|
155
|
+
optimalSegments = np.zeros_like(reductionValidTimsteps)
|
|
156
|
+
optimalSegments[reductionValidTimsteps.argmax(axis=0), np.arange(reductionValidTimsteps.shape[1])] = 1
|
|
157
|
+
|
|
158
|
+
optimalIndexCombo = np.nonzero(optimalPeriods*optimalSegments)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
for segmentIx, periodIx in tqdm.tqdm(zip(optimalIndexCombo[0],optimalIndexCombo[1])):
|
|
162
|
+
|
|
163
|
+
# derive new typical periods and derive rmse
|
|
164
|
+
RMSE_history.append(self._testAggregation(possiblePeriods[periodIx], possibleSegments[segmentIx]))
|
|
165
|
+
|
|
166
|
+
# take the negative backwards index with the minimal RMSE
|
|
167
|
+
min_index = - list(reversed(RMSE_history)).index(min(RMSE_history)) - 1
|
|
168
|
+
RMSE_min = RMSE_history[min_index]
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
noTypicalPeriods=self._periodHistory[min_index]
|
|
172
|
+
noSegments=self._segmentHistory[min_index]
|
|
173
|
+
|
|
174
|
+
# and return the segment and typical period pair
|
|
175
|
+
return noSegments, noTypicalPeriods, RMSE_min
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def identifyParetoOptimalAggregation(self, untilTotalTimeSteps=None):
|
|
179
|
+
"""
|
|
180
|
+
Identifies the pareto-optimal combination of number of typical periods and number of segments along with a steepest decent approach, starting from the aggregation to a single period and a single segment up to the representation of the full time series.
|
|
181
|
+
|
|
182
|
+
:param untilTotalTimeSteps: Number of timesteps until which the pareto-front should be determined. If None, the maximum number of timesteps is chosen.
|
|
183
|
+
:type untilTotalTimeSteps: int
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
:returns: **** -- Nothing. Check aggregation history for results. All typical Periods in scaled form.
|
|
187
|
+
"""
|
|
188
|
+
if not self.base_aggregation.segmentation:
|
|
189
|
+
raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
|
|
190
|
+
|
|
191
|
+
noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
|
|
192
|
+
|
|
193
|
+
_maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
|
|
194
|
+
_maxSegments = self.base_aggregation.timeStepsPerPeriod
|
|
195
|
+
|
|
196
|
+
if untilTotalTimeSteps is None:
|
|
197
|
+
untilTotalTimeSteps=noRawTimeSteps
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
progressBar = tqdm.tqdm(total=untilTotalTimeSteps)
|
|
201
|
+
|
|
202
|
+
# starting point
|
|
203
|
+
noTypicalPeriods=1
|
|
204
|
+
noSegments=1
|
|
205
|
+
_RMSE_0=self._testAggregation(noTypicalPeriods, noSegments)
|
|
206
|
+
|
|
207
|
+
# loop until either segments or periods have reached their maximum
|
|
208
|
+
while (noTypicalPeriods<_maxPeriods and noSegments<_maxSegments
|
|
209
|
+
and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps
|
|
210
|
+
and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps):
|
|
211
|
+
# test for more segments
|
|
212
|
+
RMSE_segments = self._testAggregation(noTypicalPeriods, noSegments+1)
|
|
213
|
+
# test for more periods
|
|
214
|
+
RMSE_periods = self._testAggregation(noTypicalPeriods+1, noSegments)
|
|
215
|
+
|
|
216
|
+
# RMSE old
|
|
217
|
+
RMSE_old = self._RMSEHistory[-3]
|
|
218
|
+
|
|
219
|
+
# segment gradient (RMSE improvement per increased time step number)
|
|
220
|
+
# for segments: for each period on segment added
|
|
221
|
+
RMSE_segment_gradient = (RMSE_old - RMSE_segments) / noTypicalPeriods
|
|
222
|
+
# for periods: one period with no of segments
|
|
223
|
+
RMSE_periods_gradient = (RMSE_old - RMSE_periods) / noSegments
|
|
224
|
+
|
|
225
|
+
# go along the steeper gradient
|
|
226
|
+
if RMSE_periods_gradient>RMSE_segment_gradient:
|
|
227
|
+
noTypicalPeriods+=1
|
|
228
|
+
# and delete the search direction which was not persued
|
|
229
|
+
self._deleteTestHistory(-2)
|
|
230
|
+
else:
|
|
231
|
+
noSegments+=1
|
|
232
|
+
self._deleteTestHistory(-1)
|
|
233
|
+
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
234
|
+
|
|
235
|
+
# afterwards loop over periods and segments exclusively until maximum is reached
|
|
236
|
+
while noTypicalPeriods<_maxPeriods and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps:
|
|
237
|
+
noTypicalPeriods+=1
|
|
238
|
+
RMSE = self._testAggregation(noTypicalPeriods, noSegments)
|
|
239
|
+
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
240
|
+
|
|
241
|
+
while noSegments<_maxSegments and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps:
|
|
242
|
+
noSegments+=1
|
|
243
|
+
RMSE = self._testAggregation(noTypicalPeriods, noSegments)
|
|
244
|
+
progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
|
|
245
|
+
return
|