tsam 2.1.0__py3-none-any.whl → 2.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tsam/__init__.py CHANGED
@@ -0,0 +1,11 @@
1
+ import sys
2
+
3
+ if not sys.warnoptions:
4
+ import warnings
5
+
6
+ warnings.filterwarnings(
7
+ action="ignore",
8
+ category=FutureWarning,
9
+ append=True,
10
+ message=r".*The previous implementation of stack is deprecated and will be removed in a future version of pandas.*",
11
+ )
@@ -1,225 +1,245 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- import copy
4
-
5
- import numpy as np
6
-
7
- import tqdm
8
-
9
- from tsam.timeseriesaggregation import TimeSeriesAggregation
10
-
11
- def getNoPeriodsForDataReduction(noRawTimeSteps, segmentsPerPeriod, dataReduction):
12
- """
13
- Identifies the maximum number of periods which can be set to achieve the required data reduction.
14
-
15
- :param noRawTimeSteps: Number of original time steps. required
16
- :type noRawTimeSteps: int
17
-
18
- :param segmentsPerPeriod: Segments per period. required
19
- :type segmentsPerPeriod: int
20
-
21
- :param dataReduction: Factor by which the resulting dataset should be reduced. required
22
- :type dataReduction: float
23
-
24
- :returns: **noTypicalPeriods** -- Number of typical periods that can be set.
25
- """
26
- return int(np.floor(dataReduction * float(noRawTimeSteps)/segmentsPerPeriod))
27
-
28
- def getNoSegmentsForDataReduction(noRawTimeSteps, typicalPeriods, dataReduction):
29
- """
30
- Identifies the maximum number of segments which can be set to achieve the required data reduction.
31
-
32
- :param noRawTimeSteps: Number of original time steps. required
33
- :type noRawTimeSteps: int
34
-
35
- :param typicalPeriods: Number of typical periods. required
36
- :type typicalPeriods: int
37
-
38
- :param dataReduction: Factor by which the resulting dataset should be reduced. required
39
- :type dataReduction: float
40
-
41
- :returns: **segmentsPerPeriod** -- Number of segments per period that can be set.
42
- """
43
- return int(np.floor(dataReduction * float(noRawTimeSteps)/typicalPeriods))
44
-
45
-
46
-
47
-
48
- class HyperTunedAggregations(object):
49
-
50
- def __init__(self, base_aggregation, saveAggregationHistory=True):
51
- """
52
- A class that does a parameter variation and tuning of the aggregation itself.
53
-
54
- :param base_aggregation: TimeSeriesAggregation object which is used as basis for tuning the hyper parameters. required
55
- :type base_aggregation: TimeSeriesAggregation
56
-
57
- :param saveAggregationHistory: Defines if all aggregations that are created during the tuning and iterations shall be saved under self.aggregationHistory.
58
- :type saveAggregationHistory: boolean
59
- """
60
- self.base_aggregation = base_aggregation
61
-
62
- if not isinstance(self.base_aggregation, TimeSeriesAggregation):
63
- raise ValueError(
64
- "base_aggregation has to be an TimeSeriesAggregation object"
65
- )
66
-
67
- self._alterableAggregation=copy.deepcopy(self.base_aggregation)
68
-
69
- self.saveAggregationHistory=saveAggregationHistory
70
-
71
- self._segmentHistory=[]
72
-
73
- self._periodHistory=[]
74
-
75
- self._RMSEHistory=[]
76
-
77
- if self.saveAggregationHistory:
78
- self.aggregationHistory=[]
79
-
80
-
81
-
82
-
83
- def _testAggregation(self, noTypicalPeriods, noSegments):
84
- """
85
- Tests the aggregation for a set of typical periods and segments and returns the RMSE
86
- """
87
- self._segmentHistory.append(noSegments)
88
-
89
- self._periodHistory.append(noTypicalPeriods)
90
-
91
- self._alterableAggregation.noTypicalPeriods=noTypicalPeriods
92
-
93
- self._alterableAggregation.noSegments=noSegments
94
-
95
- self._alterableAggregation.createTypicalPeriods()
96
-
97
- self._alterableAggregation.predictOriginalData()
98
-
99
- RMSE=self._alterableAggregation.totalAccuracyIndicators()["RMSE"]
100
-
101
- self._RMSEHistory.append(RMSE)
102
-
103
- if self.saveAggregationHistory:
104
- self.aggregationHistory.append(copy.copy(self._alterableAggregation))
105
-
106
- return RMSE
107
-
108
- def _deleteTestHistory(self, index):
109
- """
110
- Delelets the defined index from the test history
111
- """
112
- del self._segmentHistory[index]
113
- del self._periodHistory[index]
114
- del self._RMSEHistory[index]
115
-
116
- if self.saveAggregationHistory:
117
- del self.aggregationHistory[index]
118
-
119
-
120
- def identifyOptimalSegmentPeriodCombination(self, dataReduction):
121
- """
122
- Identifies the optimal combination of number of typical periods and number of segments for a given data reduction set.
123
-
124
- :param dataReduction: Factor by which the resulting dataset should be reduced. required
125
- :type dataReduction: float
126
-
127
- :returns: **noSegments, noTypicalperiods** -- The optimal combination of segments and typical periods for the given optimization set.
128
- """
129
- if not self.base_aggregation.segmentation:
130
- raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
131
-
132
- noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
133
- # derive the minimum of periods allowed for this data reduction as starting point
134
- _minPeriods = getNoPeriodsForDataReduction(noRawTimeSteps, self.base_aggregation.timeStepsPerPeriod, dataReduction)
135
- # get the maximum number of periods as limit for the convergence
136
- _maxPeriods = min(getNoPeriodsForDataReduction(noRawTimeSteps, 1, dataReduction), int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod))
137
-
138
- # starting point
139
- noTypicalPeriods=_minPeriods
140
- noSegments=self.base_aggregation.timeStepsPerPeriod
141
- RMSE_old=self._testAggregation(noTypicalPeriods, noSegments)
142
-
143
- # start the iteration
144
- convergence=False
145
- while not convergence and noTypicalPeriods<=_maxPeriods and noSegments>0:
146
- # increase the number of periods until we get a reduced set of segments
147
- while self._segmentHistory[-1]==noSegments and noTypicalPeriods<_maxPeriods:
148
- noTypicalPeriods+=1
149
- noSegments=getNoSegmentsForDataReduction(noRawTimeSteps, noTypicalPeriods, dataReduction)
150
-
151
- # derive new typical periods
152
- RMSE_n=self._testAggregation(noTypicalPeriods, noSegments)
153
-
154
- # check if the RMSE could be reduced
155
- if RMSE_n < RMSE_old:
156
- RMSE_old=RMSE_n
157
- convergence=False
158
- # in case it cannot be reduced anymore stop
159
- else:
160
- convergence=True
161
-
162
- # take the previous set, since the latest did not have a reduced error
163
- noTypicalPeriods=self._periodHistory[-2]
164
- noSegments=self._segmentHistory[-2]
165
-
166
- # and return the segment and typical period pair
167
- return noSegments, noTypicalPeriods
168
-
169
-
170
- def identifyParetoOptimalAggregation(self, untilTotalTimeSteps=None):
171
- """
172
- Identifies the pareto-optimal combination of number of typical periods and number of segments along with a steepest decent approach, starting from the aggregation to a single period and a single segment up to the representation of the full time series.
173
-
174
- :param untilTotalTimeSteps: Number of timesteps until which the pareto-front should be determined. If None, the maximum number of timesteps is chosen.
175
- :type untilTotalTimeSteps: int
176
-
177
-
178
- :returns: **** -- Nothing. Check aggregation history for results. All typical Periods in scaled form.
179
- """
180
- if not self.base_aggregation.segmentation:
181
- raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
182
-
183
- noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
184
-
185
- _maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
186
- _maxSegments = self.base_aggregation.timeStepsPerPeriod
187
-
188
- if untilTotalTimeSteps is None:
189
- untilTotalTimeSteps=noRawTimeSteps
190
-
191
-
192
- progressBar = tqdm.tqdm(total=untilTotalTimeSteps)
193
-
194
- # starting point
195
- noTypicalPeriods=1
196
- noSegments=1
197
- _RMSE_0=self._testAggregation(noTypicalPeriods, noSegments)
198
-
199
- # loop until either segments or periods have reached their maximum
200
- while noTypicalPeriods<_maxPeriods and noSegments<_maxSegments and noSegments*noTypicalPeriods<=untilTotalTimeSteps:
201
- # test for more segments
202
- RMSE_segments = self._testAggregation(noTypicalPeriods, noSegments+1)
203
- # test for more periods
204
- RMSE_periods = self._testAggregation(noTypicalPeriods+1, noSegments)
205
- # go along the better RMSE reduction
206
- if RMSE_periods<RMSE_segments:
207
- noTypicalPeriods+=1
208
- # and delete the search direction which was not persued
209
- self._deleteTestHistory(-2)
210
- else:
211
- noSegments+=1
212
- self._deleteTestHistory(-1)
213
- progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
214
-
215
- # afterwards loop over periods and segments exclusively until maximum is reached
216
- while noTypicalPeriods<_maxPeriods and noSegments*noTypicalPeriods<=untilTotalTimeSteps:
217
- noTypicalPeriods+=1
218
- RMSE = self._testAggregation(noTypicalPeriods, noSegments)
219
- progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
220
-
221
- while noSegments<_maxSegments and noSegments*noTypicalPeriods<=untilTotalTimeSteps:
222
- noSegments+=1
223
- RMSE = self._testAggregation(noTypicalPeriods, noSegments)
224
- progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
225
- return
1
+ # -*- coding: utf-8 -*-
2
+
3
+ import copy
4
+
5
+ import numpy as np
6
+
7
+ import tqdm
8
+
9
+ from tsam.timeseriesaggregation import TimeSeriesAggregation
10
+
11
+ def getNoPeriodsForDataReduction(noRawTimeSteps, segmentsPerPeriod, dataReduction):
12
+ """
13
+ Identifies the maximum number of periods which can be set to achieve the required data reduction.
14
+
15
+ :param noRawTimeSteps: Number of original time steps. required
16
+ :type noRawTimeSteps: int
17
+
18
+ :param segmentsPerPeriod: Segments per period. required
19
+ :type segmentsPerPeriod: int
20
+
21
+ :param dataReduction: Factor by which the resulting dataset should be reduced. required
22
+ :type dataReduction: float
23
+
24
+ :returns: **noTypicalPeriods** -- Number of typical periods that can be set.
25
+ """
26
+ return int(np.floor(dataReduction * float(noRawTimeSteps)/segmentsPerPeriod))
27
+
28
+ def getNoSegmentsForDataReduction(noRawTimeSteps, typicalPeriods, dataReduction):
29
+ """
30
+ Identifies the maximum number of segments which can be set to achieve the required data reduction.
31
+
32
+ :param noRawTimeSteps: Number of original time steps. required
33
+ :type noRawTimeSteps: int
34
+
35
+ :param typicalPeriods: Number of typical periods. required
36
+ :type typicalPeriods: int
37
+
38
+ :param dataReduction: Factor by which the resulting dataset should be reduced. required
39
+ :type dataReduction: float
40
+
41
+ :returns: **segmentsPerPeriod** -- Number of segments per period that can be set.
42
+ """
43
+ return int(np.floor(dataReduction * float(noRawTimeSteps)/typicalPeriods))
44
+
45
+
46
+
47
+
48
+ class HyperTunedAggregations(object):
49
+
50
+ def __init__(self, base_aggregation, saveAggregationHistory=True):
51
+ """
52
+ A class that does a parameter variation and tuning of the aggregation itself.
53
+
54
+ :param base_aggregation: TimeSeriesAggregation object which is used as basis for tuning the hyper parameters. required
55
+ :type base_aggregation: TimeSeriesAggregation
56
+
57
+ :param saveAggregationHistory: Defines if all aggregations that are created during the tuning and iterations shall be saved under self.aggregationHistory.
58
+ :type saveAggregationHistory: boolean
59
+ """
60
+ self.base_aggregation = base_aggregation
61
+
62
+ if not isinstance(self.base_aggregation, TimeSeriesAggregation):
63
+ raise ValueError(
64
+ "base_aggregation has to be an TimeSeriesAggregation object"
65
+ )
66
+
67
+ self._alterableAggregation=copy.deepcopy(self.base_aggregation)
68
+
69
+ self.saveAggregationHistory=saveAggregationHistory
70
+
71
+ self._segmentHistory=[]
72
+
73
+ self._periodHistory=[]
74
+
75
+ self._RMSEHistory=[]
76
+
77
+ if self.saveAggregationHistory:
78
+ self.aggregationHistory=[]
79
+
80
+
81
+
82
+
83
+ def _testAggregation(self, noTypicalPeriods, noSegments):
84
+ """
85
+ Tests the aggregation for a set of typical periods and segments and returns the RMSE
86
+ """
87
+ self._segmentHistory.append(noSegments)
88
+
89
+ self._periodHistory.append(noTypicalPeriods)
90
+
91
+ self._alterableAggregation.noTypicalPeriods=noTypicalPeriods
92
+
93
+ self._alterableAggregation.noSegments=noSegments
94
+
95
+ self._alterableAggregation.createTypicalPeriods()
96
+
97
+ self._alterableAggregation.predictOriginalData()
98
+
99
+ RMSE=self._alterableAggregation.totalAccuracyIndicators()["RMSE"]
100
+
101
+ self._RMSEHistory.append(RMSE)
102
+
103
+ if self.saveAggregationHistory:
104
+ self.aggregationHistory.append(copy.copy(self._alterableAggregation))
105
+
106
+ return RMSE
107
+
108
+ def _deleteTestHistory(self, index):
109
+ """
110
+ Delelets the defined index from the test history
111
+ """
112
+ del self._segmentHistory[index]
113
+ del self._periodHistory[index]
114
+ del self._RMSEHistory[index]
115
+
116
+ if self.saveAggregationHistory:
117
+ del self.aggregationHistory[index]
118
+
119
+
120
+ def identifyOptimalSegmentPeriodCombination(self, dataReduction):
121
+ """
122
+ Identifies the optimal combination of number of typical periods and number of segments for a given data reduction set.
123
+
124
+ :param dataReduction: Factor by which the resulting dataset should be reduced. required
125
+ :type dataReduction: float
126
+
127
+ :returns: **noSegments, noTypicalperiods** -- The optimal combination of segments and typical periods for the given optimization set.
128
+ """
129
+ if not self.base_aggregation.segmentation:
130
+ raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
131
+
132
+ noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
133
+
134
+ _maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
135
+ _maxSegments = self.base_aggregation.timeStepsPerPeriod
136
+
137
+ # save RMSE
138
+ RMSE_history = []
139
+
140
+ # correct 0 index of python
141
+ possibleSegments = np.arange(_maxSegments)+1
142
+ possiblePeriods = np.arange(_maxPeriods)+1
143
+
144
+ # number of time steps of all combinations of segments and periods
145
+ combinedTimeSteps = np.outer(possibleSegments, possiblePeriods)
146
+ # reduce to valid combinations for targeted data reduction
147
+ reductionValidCombinations = combinedTimeSteps <= noRawTimeSteps * dataReduction
148
+
149
+ # number of time steps for all feasible combinations
150
+ reductionValidTimsteps = combinedTimeSteps * reductionValidCombinations
151
+
152
+ # identify max segments and max period combination
153
+ optimalPeriods = np.zeros_like(reductionValidTimsteps)
154
+ optimalPeriods[np.arange(reductionValidTimsteps.shape[0]), reductionValidTimsteps.argmax(axis=1)] = 1
155
+ optimalSegments = np.zeros_like(reductionValidTimsteps)
156
+ optimalSegments[reductionValidTimsteps.argmax(axis=0), np.arange(reductionValidTimsteps.shape[1])] = 1
157
+
158
+ optimalIndexCombo = np.nonzero(optimalPeriods*optimalSegments)
159
+
160
+
161
+ for segmentIx, periodIx in tqdm.tqdm(zip(optimalIndexCombo[0],optimalIndexCombo[1])):
162
+
163
+ # derive new typical periods and derive rmse
164
+ RMSE_history.append(self._testAggregation(possiblePeriods[periodIx], possibleSegments[segmentIx]))
165
+
166
+ # take the negative backwards index with the minimal RMSE
167
+ min_index = - list(reversed(RMSE_history)).index(min(RMSE_history)) - 1
168
+ RMSE_min = RMSE_history[min_index]
169
+
170
+
171
+ noTypicalPeriods=self._periodHistory[min_index]
172
+ noSegments=self._segmentHistory[min_index]
173
+
174
+ # and return the segment and typical period pair
175
+ return noSegments, noTypicalPeriods, RMSE_min
176
+
177
+
178
+ def identifyParetoOptimalAggregation(self, untilTotalTimeSteps=None):
179
+ """
180
+ Identifies the pareto-optimal combination of number of typical periods and number of segments along with a steepest decent approach, starting from the aggregation to a single period and a single segment up to the representation of the full time series.
181
+
182
+ :param untilTotalTimeSteps: Number of timesteps until which the pareto-front should be determined. If None, the maximum number of timesteps is chosen.
183
+ :type untilTotalTimeSteps: int
184
+
185
+
186
+ :returns: **** -- Nothing. Check aggregation history for results. All typical Periods in scaled form.
187
+ """
188
+ if not self.base_aggregation.segmentation:
189
+ raise ValueError("This function does only make sense in combination with 'segmentation' activated.")
190
+
191
+ noRawTimeSteps=len(self.base_aggregation.timeSeries.index)
192
+
193
+ _maxPeriods = int(float(noRawTimeSteps)/self.base_aggregation.timeStepsPerPeriod)
194
+ _maxSegments = self.base_aggregation.timeStepsPerPeriod
195
+
196
+ if untilTotalTimeSteps is None:
197
+ untilTotalTimeSteps=noRawTimeSteps
198
+
199
+
200
+ progressBar = tqdm.tqdm(total=untilTotalTimeSteps)
201
+
202
+ # starting point
203
+ noTypicalPeriods=1
204
+ noSegments=1
205
+ _RMSE_0=self._testAggregation(noTypicalPeriods, noSegments)
206
+
207
+ # loop until either segments or periods have reached their maximum
208
+ while (noTypicalPeriods<_maxPeriods and noSegments<_maxSegments
209
+ and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps
210
+ and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps):
211
+ # test for more segments
212
+ RMSE_segments = self._testAggregation(noTypicalPeriods, noSegments+1)
213
+ # test for more periods
214
+ RMSE_periods = self._testAggregation(noTypicalPeriods+1, noSegments)
215
+
216
+ # RMSE old
217
+ RMSE_old = self._RMSEHistory[-3]
218
+
219
+ # segment gradient (RMSE improvement per increased time step number)
220
+ # for segments: for each period on segment added
221
+ RMSE_segment_gradient = (RMSE_old - RMSE_segments) / noTypicalPeriods
222
+ # for periods: one period with no of segments
223
+ RMSE_periods_gradient = (RMSE_old - RMSE_periods) / noSegments
224
+
225
+ # go along the steeper gradient
226
+ if RMSE_periods_gradient>RMSE_segment_gradient:
227
+ noTypicalPeriods+=1
228
+ # and delete the search direction which was not persued
229
+ self._deleteTestHistory(-2)
230
+ else:
231
+ noSegments+=1
232
+ self._deleteTestHistory(-1)
233
+ progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
234
+
235
+ # afterwards loop over periods and segments exclusively until maximum is reached
236
+ while noTypicalPeriods<_maxPeriods and noSegments*(noTypicalPeriods+1)<=untilTotalTimeSteps:
237
+ noTypicalPeriods+=1
238
+ RMSE = self._testAggregation(noTypicalPeriods, noSegments)
239
+ progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
240
+
241
+ while noSegments<_maxSegments and (noSegments+1)*noTypicalPeriods<=untilTotalTimeSteps:
242
+ noSegments+=1
243
+ RMSE = self._testAggregation(noTypicalPeriods, noSegments)
244
+ progressBar.update(noSegments*noTypicalPeriods-progressBar.n)
245
+ return