autogluon.timeseries 1.2.1b20250224__py3-none-any.whl → 1.4.1b20251215__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.timeseries might be problematic. Click here for more details.
- autogluon/timeseries/configs/__init__.py +3 -2
- autogluon/timeseries/configs/hyperparameter_presets.py +62 -0
- autogluon/timeseries/configs/predictor_presets.py +106 -0
- autogluon/timeseries/dataset/ts_dataframe.py +256 -141
- autogluon/timeseries/learner.py +86 -52
- autogluon/timeseries/metrics/__init__.py +42 -8
- autogluon/timeseries/metrics/abstract.py +89 -19
- autogluon/timeseries/metrics/point.py +142 -53
- autogluon/timeseries/metrics/quantile.py +46 -21
- autogluon/timeseries/metrics/utils.py +4 -4
- autogluon/timeseries/models/__init__.py +8 -2
- autogluon/timeseries/models/abstract/__init__.py +2 -2
- autogluon/timeseries/models/abstract/abstract_timeseries_model.py +361 -592
- autogluon/timeseries/models/abstract/model_trial.py +2 -1
- autogluon/timeseries/models/abstract/tunable.py +189 -0
- autogluon/timeseries/models/autogluon_tabular/__init__.py +2 -0
- autogluon/timeseries/models/autogluon_tabular/mlforecast.py +282 -194
- autogluon/timeseries/models/autogluon_tabular/per_step.py +513 -0
- autogluon/timeseries/models/autogluon_tabular/transforms.py +25 -18
- autogluon/timeseries/models/chronos/__init__.py +2 -1
- autogluon/timeseries/models/chronos/chronos2.py +361 -0
- autogluon/timeseries/models/chronos/model.py +219 -138
- autogluon/timeseries/models/chronos/{pipeline/utils.py → utils.py} +81 -50
- autogluon/timeseries/models/ensemble/__init__.py +37 -2
- autogluon/timeseries/models/ensemble/abstract.py +107 -0
- autogluon/timeseries/models/ensemble/array_based/__init__.py +3 -0
- autogluon/timeseries/models/ensemble/array_based/abstract.py +240 -0
- autogluon/timeseries/models/ensemble/array_based/models.py +185 -0
- autogluon/timeseries/models/ensemble/array_based/regressor/__init__.py +12 -0
- autogluon/timeseries/models/ensemble/array_based/regressor/abstract.py +88 -0
- autogluon/timeseries/models/ensemble/array_based/regressor/linear_stacker.py +186 -0
- autogluon/timeseries/models/ensemble/array_based/regressor/per_quantile_tabular.py +94 -0
- autogluon/timeseries/models/ensemble/array_based/regressor/tabular.py +107 -0
- autogluon/timeseries/models/ensemble/ensemble_selection.py +167 -0
- autogluon/timeseries/models/ensemble/per_item_greedy.py +172 -0
- autogluon/timeseries/models/ensemble/weighted/__init__.py +8 -0
- autogluon/timeseries/models/ensemble/weighted/abstract.py +45 -0
- autogluon/timeseries/models/ensemble/weighted/basic.py +91 -0
- autogluon/timeseries/models/ensemble/weighted/greedy.py +62 -0
- autogluon/timeseries/models/gluonts/__init__.py +1 -1
- autogluon/timeseries/models/gluonts/{abstract_gluonts.py → abstract.py} +148 -208
- autogluon/timeseries/models/gluonts/dataset.py +109 -0
- autogluon/timeseries/models/gluonts/{torch/models.py → models.py} +38 -22
- autogluon/timeseries/models/local/__init__.py +0 -7
- autogluon/timeseries/models/local/abstract_local_model.py +71 -74
- autogluon/timeseries/models/local/naive.py +13 -9
- autogluon/timeseries/models/local/npts.py +9 -2
- autogluon/timeseries/models/local/statsforecast.py +52 -36
- autogluon/timeseries/models/multi_window/multi_window_model.py +65 -45
- autogluon/timeseries/models/registry.py +64 -0
- autogluon/timeseries/models/toto/__init__.py +3 -0
- autogluon/timeseries/models/toto/_internal/__init__.py +9 -0
- autogluon/timeseries/models/toto/_internal/backbone/__init__.py +3 -0
- autogluon/timeseries/models/toto/_internal/backbone/attention.py +196 -0
- autogluon/timeseries/models/toto/_internal/backbone/backbone.py +262 -0
- autogluon/timeseries/models/toto/_internal/backbone/distribution.py +70 -0
- autogluon/timeseries/models/toto/_internal/backbone/kvcache.py +136 -0
- autogluon/timeseries/models/toto/_internal/backbone/rope.py +89 -0
- autogluon/timeseries/models/toto/_internal/backbone/rotary_embedding_torch.py +342 -0
- autogluon/timeseries/models/toto/_internal/backbone/scaler.py +305 -0
- autogluon/timeseries/models/toto/_internal/backbone/transformer.py +333 -0
- autogluon/timeseries/models/toto/_internal/dataset.py +165 -0
- autogluon/timeseries/models/toto/_internal/forecaster.py +423 -0
- autogluon/timeseries/models/toto/dataloader.py +108 -0
- autogluon/timeseries/models/toto/hf_pretrained_model.py +200 -0
- autogluon/timeseries/models/toto/model.py +249 -0
- autogluon/timeseries/predictor.py +685 -297
- autogluon/timeseries/regressor.py +94 -44
- autogluon/timeseries/splitter.py +8 -32
- autogluon/timeseries/trainer/__init__.py +3 -0
- autogluon/timeseries/trainer/ensemble_composer.py +444 -0
- autogluon/timeseries/trainer/model_set_builder.py +256 -0
- autogluon/timeseries/trainer/prediction_cache.py +149 -0
- autogluon/timeseries/{trainer.py → trainer/trainer.py} +387 -390
- autogluon/timeseries/trainer/utils.py +17 -0
- autogluon/timeseries/transforms/__init__.py +2 -13
- autogluon/timeseries/transforms/covariate_scaler.py +34 -40
- autogluon/timeseries/transforms/target_scaler.py +37 -20
- autogluon/timeseries/utils/constants.py +10 -0
- autogluon/timeseries/utils/datetime/lags.py +3 -5
- autogluon/timeseries/utils/datetime/seasonality.py +1 -3
- autogluon/timeseries/utils/datetime/time_features.py +2 -2
- autogluon/timeseries/utils/features.py +70 -47
- autogluon/timeseries/utils/forecast.py +19 -14
- autogluon/timeseries/utils/timer.py +173 -0
- autogluon/timeseries/utils/warning_filters.py +4 -2
- autogluon/timeseries/version.py +1 -1
- autogluon.timeseries-1.4.1b20251215-py3.11-nspkg.pth +1 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info}/METADATA +49 -36
- autogluon_timeseries-1.4.1b20251215.dist-info/RECORD +103 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info}/WHEEL +1 -1
- autogluon/timeseries/configs/presets_configs.py +0 -79
- autogluon/timeseries/evaluator.py +0 -6
- autogluon/timeseries/models/chronos/pipeline/__init__.py +0 -11
- autogluon/timeseries/models/chronos/pipeline/base.py +0 -160
- autogluon/timeseries/models/chronos/pipeline/chronos.py +0 -585
- autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +0 -518
- autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py +0 -78
- autogluon/timeseries/models/ensemble/greedy_ensemble.py +0 -170
- autogluon/timeseries/models/gluonts/torch/__init__.py +0 -0
- autogluon/timeseries/models/presets.py +0 -360
- autogluon.timeseries-1.2.1b20250224-py3.9-nspkg.pth +0 -1
- autogluon.timeseries-1.2.1b20250224.dist-info/RECORD +0 -68
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info/licenses}/LICENSE +0 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info/licenses}/NOTICE +0 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.2.1b20250224.dist-info → autogluon_timeseries-1.4.1b20251215.dist-info}/zip-safe +0 -0
|
@@ -1,170 +0,0 @@
|
|
|
1
|
-
import copy
|
|
2
|
-
import logging
|
|
3
|
-
import pprint
|
|
4
|
-
from typing import Dict, List, Optional
|
|
5
|
-
|
|
6
|
-
import numpy as np
|
|
7
|
-
|
|
8
|
-
import autogluon.core as ag
|
|
9
|
-
from autogluon.core.models.greedy_ensemble.ensemble_selection import EnsembleSelection
|
|
10
|
-
from autogluon.timeseries import TimeSeriesDataFrame
|
|
11
|
-
from autogluon.timeseries.metrics import TimeSeriesScorer
|
|
12
|
-
from autogluon.timeseries.models.ensemble import AbstractTimeSeriesEnsembleModel
|
|
13
|
-
from autogluon.timeseries.utils.datetime import get_seasonality
|
|
14
|
-
|
|
15
|
-
logger = logging.getLogger(__name__)
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class TimeSeriesEnsembleSelection(EnsembleSelection):
|
|
19
|
-
def __init__(
|
|
20
|
-
self,
|
|
21
|
-
ensemble_size: int,
|
|
22
|
-
metric: TimeSeriesScorer,
|
|
23
|
-
problem_type: str = ag.constants.QUANTILE,
|
|
24
|
-
sorted_initialization: bool = False,
|
|
25
|
-
bagging: bool = False,
|
|
26
|
-
tie_breaker: str = "random",
|
|
27
|
-
random_state: np.random.RandomState = None,
|
|
28
|
-
prediction_length: int = 1,
|
|
29
|
-
target: str = "target",
|
|
30
|
-
eval_metric_seasonal_period: Optional[int] = None,
|
|
31
|
-
**kwargs,
|
|
32
|
-
):
|
|
33
|
-
super().__init__(
|
|
34
|
-
ensemble_size=ensemble_size,
|
|
35
|
-
metric=metric,
|
|
36
|
-
problem_type=problem_type,
|
|
37
|
-
sorted_initialization=sorted_initialization,
|
|
38
|
-
bagging=bagging,
|
|
39
|
-
tie_breaker=tie_breaker,
|
|
40
|
-
random_state=random_state,
|
|
41
|
-
**kwargs,
|
|
42
|
-
)
|
|
43
|
-
self.prediction_length = prediction_length
|
|
44
|
-
self.target = target
|
|
45
|
-
self.eval_metric_seasonal_period = eval_metric_seasonal_period
|
|
46
|
-
|
|
47
|
-
def _fit(
|
|
48
|
-
self,
|
|
49
|
-
predictions: List[List[TimeSeriesDataFrame]], # first dim: model, second dim: val window index
|
|
50
|
-
labels: List[TimeSeriesDataFrame],
|
|
51
|
-
time_limit: Optional[int] = None,
|
|
52
|
-
sample_weight=None,
|
|
53
|
-
):
|
|
54
|
-
# Stack predictions for each model into a 3d tensor of shape [num_val_windows, num_rows, num_cols]
|
|
55
|
-
stacked_predictions = [np.stack(preds) for preds in predictions]
|
|
56
|
-
|
|
57
|
-
self.dummy_pred_per_window = []
|
|
58
|
-
self.scorer_per_window = []
|
|
59
|
-
self.data_future_per_window = []
|
|
60
|
-
|
|
61
|
-
for window_idx, data in enumerate(labels):
|
|
62
|
-
dummy_pred = copy.deepcopy(predictions[0][window_idx])
|
|
63
|
-
# This should never happen; sanity check to make sure that all predictions have the same index
|
|
64
|
-
assert all(dummy_pred.index.equals(pred[window_idx].index) for pred in predictions)
|
|
65
|
-
|
|
66
|
-
self.dummy_pred_per_window.append(dummy_pred)
|
|
67
|
-
|
|
68
|
-
scorer = copy.deepcopy(self.metric)
|
|
69
|
-
# Split the observed time series once to avoid repeated computations inside the evaluator
|
|
70
|
-
data_past = data.slice_by_timestep(None, -self.prediction_length)
|
|
71
|
-
data_future = data.slice_by_timestep(-self.prediction_length, None)
|
|
72
|
-
scorer.save_past_metrics(data_past, target=self.target, seasonal_period=self.eval_metric_seasonal_period)
|
|
73
|
-
self.scorer_per_window.append(scorer)
|
|
74
|
-
self.data_future_per_window.append(data_future)
|
|
75
|
-
|
|
76
|
-
super()._fit(
|
|
77
|
-
predictions=stacked_predictions,
|
|
78
|
-
labels=data_future,
|
|
79
|
-
time_limit=time_limit,
|
|
80
|
-
)
|
|
81
|
-
self.dummy_pred_per_window = None
|
|
82
|
-
self.evaluator_per_window = None
|
|
83
|
-
self.data_future_per_window = None
|
|
84
|
-
|
|
85
|
-
def _calculate_regret(self, y_true, y_pred_proba, metric=None, sample_weight=None): # noqa
|
|
86
|
-
# Compute average score across all validation windows
|
|
87
|
-
total_score = 0.0
|
|
88
|
-
for window_idx, data_future in enumerate(self.data_future_per_window):
|
|
89
|
-
dummy_pred = self.dummy_pred_per_window[window_idx]
|
|
90
|
-
dummy_pred[list(dummy_pred.columns)] = y_pred_proba[window_idx]
|
|
91
|
-
# We use scorer.compute_metric instead of scorer.score to avoid repeated calls to scorer.save_past_metrics
|
|
92
|
-
metric_value = self.scorer_per_window[window_idx].compute_metric(
|
|
93
|
-
data_future, dummy_pred, target=self.target
|
|
94
|
-
)
|
|
95
|
-
total_score += metric.sign * metric_value
|
|
96
|
-
avg_score = total_score / len(self.data_future_per_window)
|
|
97
|
-
# score: higher is better, regret: lower is better, so we flip the sign
|
|
98
|
-
return -avg_score
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
class TimeSeriesGreedyEnsemble(AbstractTimeSeriesEnsembleModel):
|
|
102
|
-
"""Constructs a weighted ensemble using the greedy Ensemble Selection algorithm."""
|
|
103
|
-
|
|
104
|
-
def __init__(self, name: str, ensemble_size: int = 100, **kwargs):
|
|
105
|
-
super().__init__(name=name, **kwargs)
|
|
106
|
-
self.ensemble_size = ensemble_size
|
|
107
|
-
self.model_to_weight: Dict[str, float] = {}
|
|
108
|
-
|
|
109
|
-
def _fit_ensemble(
|
|
110
|
-
self,
|
|
111
|
-
predictions_per_window: Dict[str, List[TimeSeriesDataFrame]],
|
|
112
|
-
data_per_window: List[TimeSeriesDataFrame],
|
|
113
|
-
time_limit: Optional[int] = None,
|
|
114
|
-
**kwargs,
|
|
115
|
-
):
|
|
116
|
-
if self.eval_metric_seasonal_period is None:
|
|
117
|
-
self.eval_metric_seasonal_period = get_seasonality(self.freq)
|
|
118
|
-
ensemble_selection = TimeSeriesEnsembleSelection(
|
|
119
|
-
ensemble_size=self.ensemble_size,
|
|
120
|
-
metric=self.eval_metric,
|
|
121
|
-
prediction_length=self.prediction_length,
|
|
122
|
-
target=self.target,
|
|
123
|
-
eval_metric_seasonal_period=self.eval_metric_seasonal_period,
|
|
124
|
-
)
|
|
125
|
-
ensemble_selection.fit(
|
|
126
|
-
predictions=list(predictions_per_window.values()),
|
|
127
|
-
labels=data_per_window,
|
|
128
|
-
time_limit=time_limit,
|
|
129
|
-
)
|
|
130
|
-
self.model_to_weight = {}
|
|
131
|
-
for model_name, weight in zip(predictions_per_window.keys(), ensemble_selection.weights_):
|
|
132
|
-
if weight != 0:
|
|
133
|
-
self.model_to_weight[model_name] = weight
|
|
134
|
-
|
|
135
|
-
weights_for_printing = {model: round(weight, 2) for model, weight in self.model_to_weight.items()}
|
|
136
|
-
logger.info(f"\tEnsemble weights: {pprint.pformat(weights_for_printing, width=200)}")
|
|
137
|
-
|
|
138
|
-
@property
|
|
139
|
-
def model_names(self) -> List[str]:
|
|
140
|
-
return list(self.model_to_weight.keys())
|
|
141
|
-
|
|
142
|
-
@property
|
|
143
|
-
def model_weights(self) -> np.ndarray:
|
|
144
|
-
return np.array(list(self.model_to_weight.values()), dtype=np.float64)
|
|
145
|
-
|
|
146
|
-
def predict(self, data: Dict[str, Optional[TimeSeriesDataFrame]], **kwargs) -> TimeSeriesDataFrame:
|
|
147
|
-
if set(data.keys()) != set(self.model_names):
|
|
148
|
-
raise ValueError(
|
|
149
|
-
f"Set of models given for prediction in {self.name} differ from those provided during initialization."
|
|
150
|
-
)
|
|
151
|
-
for model_name, model_pred in data.items():
|
|
152
|
-
if model_pred is None:
|
|
153
|
-
raise RuntimeError(f"{self.name} cannot predict because base model {model_name} failed.")
|
|
154
|
-
|
|
155
|
-
# Make sure that all predictions have same shape
|
|
156
|
-
assert len(set(pred.shape for pred in data.values())) == 1
|
|
157
|
-
|
|
158
|
-
return sum(data[model_name] * weight for model_name, weight in self.model_to_weight.items())
|
|
159
|
-
|
|
160
|
-
def get_info(self) -> dict:
|
|
161
|
-
info = super().get_info()
|
|
162
|
-
info["model_weights"] = self.model_to_weight
|
|
163
|
-
return info
|
|
164
|
-
|
|
165
|
-
def remap_base_models(self, model_refit_map: Dict[str, str]) -> None:
|
|
166
|
-
updated_weights = {}
|
|
167
|
-
for model, weight in self.model_to_weight.items():
|
|
168
|
-
model_full_name = model_refit_map.get(model, model)
|
|
169
|
-
updated_weights[model_full_name] = weight
|
|
170
|
-
self.model_to_weight = updated_weights
|
|
File without changes
|
|
@@ -1,360 +0,0 @@
|
|
|
1
|
-
import copy
|
|
2
|
-
import logging
|
|
3
|
-
import re
|
|
4
|
-
from collections import defaultdict
|
|
5
|
-
from typing import Any, Dict, List, Optional, Type, Union
|
|
6
|
-
|
|
7
|
-
from autogluon.common import space
|
|
8
|
-
from autogluon.core import constants
|
|
9
|
-
from autogluon.timeseries.metrics import TimeSeriesScorer
|
|
10
|
-
from autogluon.timeseries.utils.features import CovariateMetadata
|
|
11
|
-
|
|
12
|
-
from . import (
|
|
13
|
-
ADIDAModel,
|
|
14
|
-
ARIMAModel,
|
|
15
|
-
AutoARIMAModel,
|
|
16
|
-
AutoCESModel,
|
|
17
|
-
AutoETSModel,
|
|
18
|
-
AverageModel,
|
|
19
|
-
ChronosModel,
|
|
20
|
-
CrostonModel,
|
|
21
|
-
DeepARModel,
|
|
22
|
-
DirectTabularModel,
|
|
23
|
-
DLinearModel,
|
|
24
|
-
DynamicOptimizedThetaModel,
|
|
25
|
-
ETSModel,
|
|
26
|
-
IMAPAModel,
|
|
27
|
-
NaiveModel,
|
|
28
|
-
NPTSModel,
|
|
29
|
-
PatchTSTModel,
|
|
30
|
-
RecursiveTabularModel,
|
|
31
|
-
SeasonalAverageModel,
|
|
32
|
-
SeasonalNaiveModel,
|
|
33
|
-
SimpleFeedForwardModel,
|
|
34
|
-
TemporalFusionTransformerModel,
|
|
35
|
-
ThetaModel,
|
|
36
|
-
TiDEModel,
|
|
37
|
-
WaveNetModel,
|
|
38
|
-
ZeroModel,
|
|
39
|
-
)
|
|
40
|
-
from .abstract import AbstractTimeSeriesModel
|
|
41
|
-
from .multi_window.multi_window_model import MultiWindowBacktestingModel
|
|
42
|
-
|
|
43
|
-
logger = logging.getLogger(__name__)
|
|
44
|
-
|
|
45
|
-
ModelHyperparameters = Dict[str, Any]
|
|
46
|
-
|
|
47
|
-
# define the model zoo with their aliases
|
|
48
|
-
MODEL_TYPES = dict(
|
|
49
|
-
SimpleFeedForward=SimpleFeedForwardModel,
|
|
50
|
-
DeepAR=DeepARModel,
|
|
51
|
-
DLinear=DLinearModel,
|
|
52
|
-
PatchTST=PatchTSTModel,
|
|
53
|
-
TemporalFusionTransformer=TemporalFusionTransformerModel,
|
|
54
|
-
TiDE=TiDEModel,
|
|
55
|
-
WaveNet=WaveNetModel,
|
|
56
|
-
RecursiveTabular=RecursiveTabularModel,
|
|
57
|
-
DirectTabular=DirectTabularModel,
|
|
58
|
-
Average=AverageModel,
|
|
59
|
-
SeasonalAverage=SeasonalAverageModel,
|
|
60
|
-
Naive=NaiveModel,
|
|
61
|
-
SeasonalNaive=SeasonalNaiveModel,
|
|
62
|
-
Zero=ZeroModel,
|
|
63
|
-
AutoETS=AutoETSModel,
|
|
64
|
-
AutoCES=AutoCESModel,
|
|
65
|
-
AutoARIMA=AutoARIMAModel,
|
|
66
|
-
DynamicOptimizedTheta=DynamicOptimizedThetaModel,
|
|
67
|
-
NPTS=NPTSModel,
|
|
68
|
-
Theta=ThetaModel,
|
|
69
|
-
ETS=ETSModel,
|
|
70
|
-
ARIMA=ARIMAModel,
|
|
71
|
-
ADIDA=ADIDAModel,
|
|
72
|
-
Croston=CrostonModel,
|
|
73
|
-
CrostonSBA=CrostonModel, # Alias for backward compatibility
|
|
74
|
-
IMAPA=IMAPAModel,
|
|
75
|
-
Chronos=ChronosModel,
|
|
76
|
-
)
|
|
77
|
-
|
|
78
|
-
DEFAULT_MODEL_NAMES = {v: k for k, v in MODEL_TYPES.items()}
|
|
79
|
-
DEFAULT_MODEL_PRIORITY = dict(
|
|
80
|
-
Naive=100,
|
|
81
|
-
SeasonalNaive=100,
|
|
82
|
-
Average=100,
|
|
83
|
-
SeasonalAverage=100,
|
|
84
|
-
Zero=100,
|
|
85
|
-
RecursiveTabular=90,
|
|
86
|
-
DirectTabular=85,
|
|
87
|
-
# All local models are grouped together to make sure that joblib parallel pool is reused
|
|
88
|
-
NPTS=80,
|
|
89
|
-
ETS=80,
|
|
90
|
-
CrostonSBA=80, # Alias for backward compatibility
|
|
91
|
-
Croston=80,
|
|
92
|
-
Theta=75,
|
|
93
|
-
DynamicOptimizedTheta=75,
|
|
94
|
-
AutoETS=70,
|
|
95
|
-
AutoARIMA=60,
|
|
96
|
-
Chronos=55,
|
|
97
|
-
# Models that can early stop are trained at the end
|
|
98
|
-
TemporalFusionTransformer=45,
|
|
99
|
-
DeepAR=40,
|
|
100
|
-
TiDE=30,
|
|
101
|
-
PatchTST=30,
|
|
102
|
-
# Models below are not included in any presets
|
|
103
|
-
WaveNet=25,
|
|
104
|
-
AutoCES=10,
|
|
105
|
-
ARIMA=10,
|
|
106
|
-
ADIDA=10,
|
|
107
|
-
IMAPA=10,
|
|
108
|
-
SimpleFeedForward=10,
|
|
109
|
-
)
|
|
110
|
-
DEFAULT_CUSTOM_MODEL_PRIORITY = 0
|
|
111
|
-
|
|
112
|
-
VALID_AG_ARGS_KEYS = {
|
|
113
|
-
"name",
|
|
114
|
-
"name_prefix",
|
|
115
|
-
"name_suffix",
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
def get_default_hps(key):
|
|
120
|
-
default_model_hps = {
|
|
121
|
-
"very_light": {
|
|
122
|
-
"Naive": {},
|
|
123
|
-
"SeasonalNaive": {},
|
|
124
|
-
"ETS": {},
|
|
125
|
-
"Theta": {},
|
|
126
|
-
"RecursiveTabular": {"max_num_samples": 100_000},
|
|
127
|
-
"DirectTabular": {"max_num_samples": 100_000},
|
|
128
|
-
},
|
|
129
|
-
"light": {
|
|
130
|
-
"Naive": {},
|
|
131
|
-
"SeasonalNaive": {},
|
|
132
|
-
"ETS": {},
|
|
133
|
-
"Theta": {},
|
|
134
|
-
"RecursiveTabular": {},
|
|
135
|
-
"DirectTabular": {},
|
|
136
|
-
"TemporalFusionTransformer": {},
|
|
137
|
-
"Chronos": {"model_path": "bolt_small"},
|
|
138
|
-
},
|
|
139
|
-
"light_inference": {
|
|
140
|
-
"SeasonalNaive": {},
|
|
141
|
-
"DirectTabular": {},
|
|
142
|
-
"RecursiveTabular": {},
|
|
143
|
-
"TemporalFusionTransformer": {},
|
|
144
|
-
"PatchTST": {},
|
|
145
|
-
},
|
|
146
|
-
"default": {
|
|
147
|
-
"SeasonalNaive": {},
|
|
148
|
-
"AutoETS": {},
|
|
149
|
-
"NPTS": {},
|
|
150
|
-
"DynamicOptimizedTheta": {},
|
|
151
|
-
"RecursiveTabular": {},
|
|
152
|
-
"DirectTabular": {},
|
|
153
|
-
"TemporalFusionTransformer": {},
|
|
154
|
-
"PatchTST": {},
|
|
155
|
-
"DeepAR": {},
|
|
156
|
-
"Chronos": [
|
|
157
|
-
{
|
|
158
|
-
"ag_args": {"name_suffix": "ZeroShot"},
|
|
159
|
-
"model_path": "bolt_base",
|
|
160
|
-
},
|
|
161
|
-
{
|
|
162
|
-
"ag_args": {"name_suffix": "FineTuned"},
|
|
163
|
-
"model_path": "bolt_small",
|
|
164
|
-
"fine_tune": True,
|
|
165
|
-
"target_scaler": "standard",
|
|
166
|
-
"covariate_regressor": {"model_name": "CAT", "model_hyperparameters": {"iterations": 1_000}},
|
|
167
|
-
},
|
|
168
|
-
],
|
|
169
|
-
"TiDE": {
|
|
170
|
-
"encoder_hidden_dim": 256,
|
|
171
|
-
"decoder_hidden_dim": 256,
|
|
172
|
-
"temporal_hidden_dim": 64,
|
|
173
|
-
"num_batches_per_epoch": 100,
|
|
174
|
-
"lr": 1e-4,
|
|
175
|
-
},
|
|
176
|
-
},
|
|
177
|
-
}
|
|
178
|
-
return default_model_hps[key]
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
def get_preset_models(
|
|
182
|
-
freq: Optional[str],
|
|
183
|
-
prediction_length: int,
|
|
184
|
-
path: str,
|
|
185
|
-
eval_metric: Union[str, TimeSeriesScorer],
|
|
186
|
-
eval_metric_seasonal_period: Optional[int],
|
|
187
|
-
hyperparameters: Union[str, Dict, None],
|
|
188
|
-
hyperparameter_tune: bool,
|
|
189
|
-
metadata: CovariateMetadata,
|
|
190
|
-
all_assigned_names: List[str],
|
|
191
|
-
excluded_model_types: Optional[List[str]],
|
|
192
|
-
multi_window: bool = False,
|
|
193
|
-
**kwargs,
|
|
194
|
-
):
|
|
195
|
-
"""
|
|
196
|
-
Create a list of models according to hyperparameters. If hyperparamaters=None,
|
|
197
|
-
will create models according to presets.
|
|
198
|
-
"""
|
|
199
|
-
models = []
|
|
200
|
-
if hyperparameters is None:
|
|
201
|
-
hp_string = "default"
|
|
202
|
-
hyperparameters = copy.deepcopy(get_default_hps(hp_string))
|
|
203
|
-
elif isinstance(hyperparameters, str):
|
|
204
|
-
hyperparameters = copy.deepcopy(get_default_hps(hyperparameters))
|
|
205
|
-
elif isinstance(hyperparameters, dict):
|
|
206
|
-
hyperparameters = copy.deepcopy(hyperparameters)
|
|
207
|
-
else:
|
|
208
|
-
raise ValueError(
|
|
209
|
-
f"hyperparameters must be a dict, a string or None (received {type(hyperparameters)}). "
|
|
210
|
-
f"Please see the documentation for TimeSeriesPredictor.fit"
|
|
211
|
-
)
|
|
212
|
-
hyperparameters = check_and_clean_hyperparameters(hyperparameters, must_contain_searchspace=hyperparameter_tune)
|
|
213
|
-
|
|
214
|
-
excluded_models = set()
|
|
215
|
-
if excluded_model_types is not None and len(excluded_model_types) > 0:
|
|
216
|
-
if not isinstance(excluded_model_types, list):
|
|
217
|
-
raise ValueError(f"`excluded_model_types` must be a list, received {type(excluded_model_types)}")
|
|
218
|
-
logger.info(f"Excluded model types: {excluded_model_types}")
|
|
219
|
-
for model in excluded_model_types:
|
|
220
|
-
if not isinstance(model, str):
|
|
221
|
-
raise ValueError(f"Each entry in `excluded_model_types` must be a string, received {type(model)}")
|
|
222
|
-
excluded_models.add(normalize_model_type_name(model))
|
|
223
|
-
|
|
224
|
-
all_assigned_names = set(all_assigned_names)
|
|
225
|
-
|
|
226
|
-
model_priority_list = sorted(hyperparameters.keys(), key=lambda x: DEFAULT_MODEL_PRIORITY.get(x, 0), reverse=True)
|
|
227
|
-
|
|
228
|
-
for model in model_priority_list:
|
|
229
|
-
if isinstance(model, str):
|
|
230
|
-
if model not in MODEL_TYPES:
|
|
231
|
-
raise ValueError(f"Model {model} is not supported. Available models: {sorted(MODEL_TYPES)}")
|
|
232
|
-
if model in excluded_models:
|
|
233
|
-
logger.info(
|
|
234
|
-
f"\tFound '{model}' model in `hyperparameters`, but '{model}' "
|
|
235
|
-
"is present in `excluded_model_types` and will be removed."
|
|
236
|
-
)
|
|
237
|
-
continue
|
|
238
|
-
if "mxnet" in model.lower():
|
|
239
|
-
logger.info(f"\tMXNet model '{model}' given in `hyperparameters` is deprecated and won't be trained. ")
|
|
240
|
-
continue
|
|
241
|
-
model_type = MODEL_TYPES[model]
|
|
242
|
-
elif isinstance(model, type):
|
|
243
|
-
if not issubclass(model, AbstractTimeSeriesModel):
|
|
244
|
-
raise ValueError(f"Custom model type {model} must inherit from `AbstractTimeSeriesModel`.")
|
|
245
|
-
model_type = model
|
|
246
|
-
else:
|
|
247
|
-
raise ValueError(
|
|
248
|
-
f"Keys of the `hyperparameters` dictionary must be strings or types, received {type(model)}."
|
|
249
|
-
)
|
|
250
|
-
|
|
251
|
-
for model_hps in hyperparameters[model]:
|
|
252
|
-
ag_args = model_hps.pop(constants.AG_ARGS, {})
|
|
253
|
-
for key in ag_args:
|
|
254
|
-
if key not in VALID_AG_ARGS_KEYS:
|
|
255
|
-
raise ValueError(
|
|
256
|
-
f"Model {model_type} received unknown ag_args key: {key} (valid keys {VALID_AG_ARGS_KEYS})"
|
|
257
|
-
)
|
|
258
|
-
model_name_base = get_model_name(ag_args, model_type)
|
|
259
|
-
|
|
260
|
-
model_type_kwargs = dict(
|
|
261
|
-
name=model_name_base,
|
|
262
|
-
path=path,
|
|
263
|
-
freq=freq,
|
|
264
|
-
prediction_length=prediction_length,
|
|
265
|
-
eval_metric=eval_metric,
|
|
266
|
-
eval_metric_seasonal_period=eval_metric_seasonal_period,
|
|
267
|
-
metadata=metadata,
|
|
268
|
-
hyperparameters=model_hps,
|
|
269
|
-
**kwargs,
|
|
270
|
-
)
|
|
271
|
-
|
|
272
|
-
# add models while preventing name collisions
|
|
273
|
-
model = model_type(**model_type_kwargs)
|
|
274
|
-
|
|
275
|
-
model_type_kwargs.pop("name", None)
|
|
276
|
-
increment = 1
|
|
277
|
-
while model.name in all_assigned_names:
|
|
278
|
-
increment += 1
|
|
279
|
-
model = model_type(name=f"{model_name_base}_{increment}", **model_type_kwargs)
|
|
280
|
-
|
|
281
|
-
if multi_window:
|
|
282
|
-
model = MultiWindowBacktestingModel(model_base=model, name=model.name, **model_type_kwargs)
|
|
283
|
-
|
|
284
|
-
all_assigned_names.add(model.name)
|
|
285
|
-
models.append(model)
|
|
286
|
-
|
|
287
|
-
return models
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
def normalize_model_type_name(model_name: str) -> str:
|
|
291
|
-
"""Remove 'Model' suffix from the end of the string, if it's present."""
|
|
292
|
-
if model_name.endswith("Model"):
|
|
293
|
-
model_name = model_name[: -len("Model")]
|
|
294
|
-
return model_name
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
def check_and_clean_hyperparameters(
|
|
298
|
-
hyperparameters: Dict[str, Union[ModelHyperparameters, List[ModelHyperparameters]]],
|
|
299
|
-
must_contain_searchspace: bool,
|
|
300
|
-
) -> Dict[str, List[ModelHyperparameters]]:
|
|
301
|
-
"""Convert the hyperparameters dictionary to a unified format:
|
|
302
|
-
- Remove 'Model' suffix from model names, if present
|
|
303
|
-
- Make sure that each value in the hyperparameters dict is a list with model configurations
|
|
304
|
-
- Checks if hyperparameters contain searchspaces
|
|
305
|
-
"""
|
|
306
|
-
hyperparameters_clean = defaultdict(list)
|
|
307
|
-
for key, value in hyperparameters.items():
|
|
308
|
-
# Handle model names ending with "Model", e.g., "DeepARModel" is mapped to "DeepAR"
|
|
309
|
-
if isinstance(key, str):
|
|
310
|
-
key = normalize_model_type_name(key)
|
|
311
|
-
if not isinstance(value, list):
|
|
312
|
-
value = [value]
|
|
313
|
-
hyperparameters_clean[key].extend(value)
|
|
314
|
-
|
|
315
|
-
if must_contain_searchspace:
|
|
316
|
-
verify_contains_at_least_one_searchspace(hyperparameters_clean)
|
|
317
|
-
else:
|
|
318
|
-
verify_contains_no_searchspaces(hyperparameters_clean)
|
|
319
|
-
|
|
320
|
-
return dict(hyperparameters_clean)
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
def get_model_name(ag_args: Dict[str, Any], model_type: Type[AbstractTimeSeriesModel]) -> str:
|
|
324
|
-
name = ag_args.get("name")
|
|
325
|
-
if name is None:
|
|
326
|
-
name_stem = re.sub(r"Model$", "", model_type.__name__)
|
|
327
|
-
name_prefix = ag_args.get("name_prefix", "")
|
|
328
|
-
name_suffix = ag_args.get("name_suffix", "")
|
|
329
|
-
name = name_prefix + name_stem + name_suffix
|
|
330
|
-
return name
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
def contains_searchspace(model_hyperparameters: ModelHyperparameters) -> bool:
|
|
334
|
-
for hp_value in model_hyperparameters.values():
|
|
335
|
-
if isinstance(hp_value, space.Space):
|
|
336
|
-
return True
|
|
337
|
-
return False
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
def verify_contains_at_least_one_searchspace(hyperparameters: Dict[str, List[ModelHyperparameters]]):
|
|
341
|
-
for model, model_hps_list in hyperparameters.items():
|
|
342
|
-
for model_hps in model_hps_list:
|
|
343
|
-
if contains_searchspace(model_hps):
|
|
344
|
-
return
|
|
345
|
-
|
|
346
|
-
raise ValueError(
|
|
347
|
-
"Hyperparameter tuning specified, but no model contains a hyperparameter search space. "
|
|
348
|
-
"Please disable hyperparameter tuning with `hyperparameter_tune_kwargs=None` or provide a search space "
|
|
349
|
-
"for at least one model."
|
|
350
|
-
)
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
def verify_contains_no_searchspaces(hyperparameters: Dict[str, List[ModelHyperparameters]]):
|
|
354
|
-
for model, model_hps_list in hyperparameters.items():
|
|
355
|
-
for model_hps in model_hps_list:
|
|
356
|
-
if contains_searchspace(model_hps):
|
|
357
|
-
raise ValueError(
|
|
358
|
-
f"Hyperparameter tuning not specified, so hyperparameters must have fixed values. "
|
|
359
|
-
f"However, for model {model} hyperparameters {model_hps} contain a search space."
|
|
360
|
-
)
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('autogluon',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('autogluon', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('autogluon', [os.path.dirname(p)])));m = m or sys.modules.setdefault('autogluon', types.ModuleType('autogluon'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
autogluon.timeseries-1.2.1b20250224-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
|
2
|
-
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
|
3
|
-
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
|
4
|
-
autogluon/timeseries/learner.py,sha256=PDAHFlos6q5JukwRE86tKoH0zxYf3nLzy7qfD_a5NYY,13849
|
|
5
|
-
autogluon/timeseries/predictor.py,sha256=HTE8a_R_9U0z-KlxyoELm-64BXNRzFu3mIEbTabQNEc,85171
|
|
6
|
-
autogluon/timeseries/regressor.py,sha256=dIXttb0SOGS8IAwZOMANNDc796spN0LMysGUvuKgskU,9623
|
|
7
|
-
autogluon/timeseries/splitter.py,sha256=yzPca9p2bWV-_VJAptUyyzQsxu-uixAdpMoGQtDzMD4,3205
|
|
8
|
-
autogluon/timeseries/trainer.py,sha256=L9FT5qERcqlWTgH9IgE6QsO0aBNj2nivRKF2Oy4UJOk,57250
|
|
9
|
-
autogluon/timeseries/version.py,sha256=wvpihE7pNO9z8UVRsqZkNCn6OXTvBuhtKEB6VMhlhxM,91
|
|
10
|
-
autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
|
|
11
|
-
autogluon/timeseries/configs/presets_configs.py,sha256=cLat8ecLlWrI-SC5KLBDCX2SbVXaucemy2pjxJAtSY0,2543
|
|
12
|
-
autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
|
|
13
|
-
autogluon/timeseries/dataset/ts_dataframe.py,sha256=SodnGhEA2V-hnfYHuAkH8rK4hQlLH8K5Tb6dsGapvPM,47161
|
|
14
|
-
autogluon/timeseries/metrics/__init__.py,sha256=dJCrZ2cHwqhqNctwQjwG-FHgGUmzIFT-D0z72f4RAVM,2104
|
|
15
|
-
autogluon/timeseries/metrics/abstract.py,sha256=8IWEHU8Xt14sivmsX3iXASXCzn9fA0w9FLSca2zYvjI,8190
|
|
16
|
-
autogluon/timeseries/metrics/point.py,sha256=g7L8jVUKc5YVjETZ-B7syK9nZswfKxLFlkN-rTbJdlg,15777
|
|
17
|
-
autogluon/timeseries/metrics/quantile.py,sha256=eemdLbo3y2wstnVkuA-f55YXywctUmSW1EhIW4BsoH4,3965
|
|
18
|
-
autogluon/timeseries/metrics/utils.py,sha256=HuDe1BNe8yJU4f_DKM913nNrUueoRaw6zhxm1-S20s0,910
|
|
19
|
-
autogluon/timeseries/models/__init__.py,sha256=MYD9JJ-wUDE5B6jW6E6LU2eXQ6vflfQBvqQJkdzJa3A,1189
|
|
20
|
-
autogluon/timeseries/models/presets.py,sha256=GezDk-p591Mlhm5UTIjKKJqQE2mnWw9rdsDYKen4zJo,12478
|
|
21
|
-
autogluon/timeseries/models/abstract/__init__.py,sha256=wvDsQAZIV0N3AwBeMaGItoQ82trEfnT-nol2AAOIxBg,102
|
|
22
|
-
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=tfFAu-NQ1UVDh9nxMKNkixzZoJw98Dxs4WSsXCSu9i4,43885
|
|
23
|
-
autogluon/timeseries/models/abstract/model_trial.py,sha256=ENPg_7nsdxIvaNM0o0UShZ3x8jFlRmwRc5m0fGPC0TM,3720
|
|
24
|
-
autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=r9i6jWcyeLHYClkcMSKRVsfrkBUMxpDrTATNTBc_qgQ,136
|
|
25
|
-
autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=H2UlpnJcIIEi_swYn9AJUPFGT4qwFSmzZ7yvC3I2pUU,33039
|
|
26
|
-
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=XVoy8KpvoeX38lHHAXq4Be9LCxKjxZ36SOFeSAICRFM,2524
|
|
27
|
-
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
|
28
|
-
autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
|
|
29
|
-
autogluon/timeseries/models/chronos/model.py,sha256=ijupqg4S6kRxdJWanNt6bnyPoGAaJluUHHmZpyQrfDE,31211
|
|
30
|
-
autogluon/timeseries/models/chronos/pipeline/__init__.py,sha256=N-YZH9BGBoi99r5cznJe1zEEjwjIg7cOYIHZkKuJq44,247
|
|
31
|
-
autogluon/timeseries/models/chronos/pipeline/base.py,sha256=14OAKHmio6LmO4mVom2mPGB0CvIrOjMGJzb-MVSAq-s,5596
|
|
32
|
-
autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=uFJLsSb2WQiSrmDZ0g2mO-lhTFUlq7vplGRBXZ9_VBk,22591
|
|
33
|
-
autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=2MJuik-YFgONZ3X2DciAph5So6ABys5ppQhBC81gLyk,20083
|
|
34
|
-
autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=dtDX5Pyu95bGv7qmqgfUc1iYowWPY84dnGN0uyqyHyQ,13131
|
|
35
|
-
autogluon/timeseries/models/ensemble/__init__.py,sha256=kFr11Gmt7lQJu9Rr8HuIPphQN5l1TsoorfbJm_O3a_s,128
|
|
36
|
-
autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py,sha256=OM8S0mUSSlnzNh2GNxM-xFGgXVVecXj50aKtC945ioc,3405
|
|
37
|
-
autogluon/timeseries/models/ensemble/greedy_ensemble.py,sha256=UPEmNx-RSuqCXS7V093NEid_AwwEigM6AXMcZtof8vg,7230
|
|
38
|
-
autogluon/timeseries/models/gluonts/__init__.py,sha256=asC1PTj4j9xMbilvk1IT1julnpeoKbv5ZNuAR6-DFgA,361
|
|
39
|
-
autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=brf2lIMHH4a_AETwyOcOBVPWqWhLxr8iolJ3Z5AR8MA,30621
|
|
40
|
-
autogluon/timeseries/models/gluonts/torch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
-
autogluon/timeseries/models/gluonts/torch/models.py,sha256=xlhblFM4DG_GQVj_4dxqWntLh1fPMyRF63eXPs_Z098,25672
|
|
42
|
-
autogluon/timeseries/models/local/__init__.py,sha256=e2UImoJhmj70E148IIObv90C_bHxgyLNk6YsS4p7pfs,701
|
|
43
|
-
autogluon/timeseries/models/local/abstract_local_model.py,sha256=CYDvOXs7ZNzyz75gMOAKI1socB_qGep51FSPfzXMopA,11948
|
|
44
|
-
autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F_VAp6-jb_5SUE,7249
|
|
45
|
-
autogluon/timeseries/models/local/npts.py,sha256=Bp74doKnfpGE8ywP4FWOCI_RwRMsmgocYDfGtq764DA,4143
|
|
46
|
-
autogluon/timeseries/models/local/statsforecast.py,sha256=s3Byp7WAUy0Rnfl1qYMSIm44MKD9t8E732xuNLk_aao,32615
|
|
47
|
-
autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
|
|
48
|
-
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=kVHAGNQC8ahEmAgnnLa38hcbxMFC_Tl1lHFJMos2G8w,11985
|
|
49
|
-
autogluon/timeseries/transforms/__init__.py,sha256=Stym_998LZQgKPuFN4_w1AcJFh4_AeaQLXgXLzv53kY,299
|
|
50
|
-
autogluon/timeseries/transforms/covariate_scaler.py,sha256=Xa1qBKAMt8TRDEeTcc4Gtym1iXt2_Yat8gwQQV6lIFs,7013
|
|
51
|
-
autogluon/timeseries/transforms/target_scaler.py,sha256=R-PhcaXTTRkl4bkguSy7p2QU7Grv2-ew3QsNT4NJv-I,5382
|
|
52
|
-
autogluon/timeseries/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
53
|
-
autogluon/timeseries/utils/features.py,sha256=FYgcv_i5fFfxGOW86GLLrLanmaIpA2OXoMwUwvPsdkI,22443
|
|
54
|
-
autogluon/timeseries/utils/forecast.py,sha256=kMeP2KxrlX5Gk31D2BesuVIc0F4hEahvAcFpJHgcWG4,2082
|
|
55
|
-
autogluon/timeseries/utils/warning_filters.py,sha256=FyXvYW_ylULcZP4R9xNBxojKtvadW3uygXwHK_xHq5g,2522
|
|
56
|
-
autogluon/timeseries/utils/datetime/__init__.py,sha256=bTMR8jLh1LW55vHjbOr1zvWRMF_PqbvxpS-cUcNIDWI,173
|
|
57
|
-
autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbjy4DJ_YYOGuu9x4,1341
|
|
58
|
-
autogluon/timeseries/utils/datetime/lags.py,sha256=gQDk5_zmsY5DUWDUpSaCKYkQ9nHKKY-LsywJQRAoYSk,5988
|
|
59
|
-
autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
|
|
60
|
-
autogluon/timeseries/utils/datetime/time_features.py,sha256=MjLi3zQ00uWWJtXH9oGX2GJkTbvjdSiuabSa4kcVuxE,2672
|
|
61
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
|
62
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/METADATA,sha256=11lBa7naRjQBmasccuaxeYIuELnFuL4uO1hbqcW-sZc,12684
|
|
63
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
|
64
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
65
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
66
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
67
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
68
|
-
autogluon.timeseries-1.2.1b20250224.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|