autogluon.timeseries 1.4.1b20250907__py3-none-any.whl → 1.5.1b20260122__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of autogluon.timeseries might be problematic. Click here for more details.

Files changed (95) hide show
  1. autogluon/timeseries/configs/hyperparameter_presets.py +13 -28
  2. autogluon/timeseries/configs/predictor_presets.py +23 -39
  3. autogluon/timeseries/dataset/ts_dataframe.py +97 -86
  4. autogluon/timeseries/learner.py +70 -35
  5. autogluon/timeseries/metrics/__init__.py +4 -4
  6. autogluon/timeseries/metrics/abstract.py +8 -8
  7. autogluon/timeseries/metrics/point.py +9 -9
  8. autogluon/timeseries/metrics/quantile.py +5 -5
  9. autogluon/timeseries/metrics/utils.py +4 -4
  10. autogluon/timeseries/models/__init__.py +4 -1
  11. autogluon/timeseries/models/abstract/abstract_timeseries_model.py +52 -50
  12. autogluon/timeseries/models/abstract/model_trial.py +2 -1
  13. autogluon/timeseries/models/abstract/tunable.py +8 -8
  14. autogluon/timeseries/models/autogluon_tabular/mlforecast.py +58 -62
  15. autogluon/timeseries/models/autogluon_tabular/per_step.py +27 -16
  16. autogluon/timeseries/models/autogluon_tabular/transforms.py +11 -9
  17. autogluon/timeseries/models/chronos/__init__.py +2 -1
  18. autogluon/timeseries/models/chronos/chronos2.py +395 -0
  19. autogluon/timeseries/models/chronos/model.py +127 -89
  20. autogluon/timeseries/models/chronos/{pipeline/utils.py → utils.py} +69 -37
  21. autogluon/timeseries/models/ensemble/__init__.py +36 -2
  22. autogluon/timeseries/models/ensemble/abstract.py +14 -46
  23. autogluon/timeseries/models/ensemble/array_based/__init__.py +3 -0
  24. autogluon/timeseries/models/ensemble/array_based/abstract.py +240 -0
  25. autogluon/timeseries/models/ensemble/array_based/models.py +185 -0
  26. autogluon/timeseries/models/ensemble/array_based/regressor/__init__.py +12 -0
  27. autogluon/timeseries/models/ensemble/array_based/regressor/abstract.py +88 -0
  28. autogluon/timeseries/models/ensemble/array_based/regressor/linear_stacker.py +186 -0
  29. autogluon/timeseries/models/ensemble/array_based/regressor/per_quantile_tabular.py +94 -0
  30. autogluon/timeseries/models/ensemble/array_based/regressor/tabular.py +107 -0
  31. autogluon/timeseries/models/ensemble/{greedy.py → ensemble_selection.py} +41 -61
  32. autogluon/timeseries/models/ensemble/per_item_greedy.py +172 -0
  33. autogluon/timeseries/models/ensemble/weighted/__init__.py +8 -0
  34. autogluon/timeseries/models/ensemble/weighted/abstract.py +45 -0
  35. autogluon/timeseries/models/ensemble/{basic.py → weighted/basic.py} +25 -22
  36. autogluon/timeseries/models/ensemble/weighted/greedy.py +64 -0
  37. autogluon/timeseries/models/gluonts/abstract.py +32 -31
  38. autogluon/timeseries/models/gluonts/dataset.py +11 -11
  39. autogluon/timeseries/models/gluonts/models.py +0 -7
  40. autogluon/timeseries/models/local/__init__.py +0 -7
  41. autogluon/timeseries/models/local/abstract_local_model.py +15 -18
  42. autogluon/timeseries/models/local/naive.py +2 -2
  43. autogluon/timeseries/models/local/npts.py +7 -1
  44. autogluon/timeseries/models/local/statsforecast.py +13 -13
  45. autogluon/timeseries/models/multi_window/multi_window_model.py +39 -24
  46. autogluon/timeseries/models/registry.py +3 -4
  47. autogluon/timeseries/models/toto/__init__.py +3 -0
  48. autogluon/timeseries/models/toto/_internal/__init__.py +9 -0
  49. autogluon/timeseries/models/toto/_internal/backbone/__init__.py +3 -0
  50. autogluon/timeseries/models/toto/_internal/backbone/attention.py +196 -0
  51. autogluon/timeseries/models/toto/_internal/backbone/backbone.py +262 -0
  52. autogluon/timeseries/models/toto/_internal/backbone/distribution.py +70 -0
  53. autogluon/timeseries/models/toto/_internal/backbone/kvcache.py +136 -0
  54. autogluon/timeseries/models/toto/_internal/backbone/rope.py +89 -0
  55. autogluon/timeseries/models/toto/_internal/backbone/rotary_embedding_torch.py +342 -0
  56. autogluon/timeseries/models/toto/_internal/backbone/scaler.py +305 -0
  57. autogluon/timeseries/models/toto/_internal/backbone/transformer.py +333 -0
  58. autogluon/timeseries/models/toto/_internal/dataset.py +165 -0
  59. autogluon/timeseries/models/toto/_internal/forecaster.py +423 -0
  60. autogluon/timeseries/models/toto/dataloader.py +108 -0
  61. autogluon/timeseries/models/toto/hf_pretrained_model.py +200 -0
  62. autogluon/timeseries/models/toto/model.py +249 -0
  63. autogluon/timeseries/predictor.py +541 -162
  64. autogluon/timeseries/regressor.py +27 -30
  65. autogluon/timeseries/splitter.py +3 -27
  66. autogluon/timeseries/trainer/ensemble_composer.py +444 -0
  67. autogluon/timeseries/trainer/model_set_builder.py +9 -9
  68. autogluon/timeseries/trainer/prediction_cache.py +16 -16
  69. autogluon/timeseries/trainer/trainer.py +300 -279
  70. autogluon/timeseries/trainer/utils.py +17 -0
  71. autogluon/timeseries/transforms/covariate_scaler.py +8 -8
  72. autogluon/timeseries/transforms/target_scaler.py +15 -15
  73. autogluon/timeseries/utils/constants.py +10 -0
  74. autogluon/timeseries/utils/datetime/lags.py +1 -3
  75. autogluon/timeseries/utils/datetime/seasonality.py +1 -3
  76. autogluon/timeseries/utils/features.py +31 -14
  77. autogluon/timeseries/utils/forecast.py +6 -7
  78. autogluon/timeseries/utils/timer.py +173 -0
  79. autogluon/timeseries/version.py +1 -1
  80. autogluon.timeseries-1.5.1b20260122-py3.11-nspkg.pth +1 -0
  81. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info}/METADATA +39 -22
  82. autogluon_timeseries-1.5.1b20260122.dist-info/RECORD +103 -0
  83. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info}/WHEEL +1 -1
  84. autogluon/timeseries/evaluator.py +0 -6
  85. autogluon/timeseries/models/chronos/pipeline/__init__.py +0 -10
  86. autogluon/timeseries/models/chronos/pipeline/base.py +0 -160
  87. autogluon/timeseries/models/chronos/pipeline/chronos.py +0 -544
  88. autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +0 -580
  89. autogluon.timeseries-1.4.1b20250907-py3.9-nspkg.pth +0 -1
  90. autogluon.timeseries-1.4.1b20250907.dist-info/RECORD +0 -75
  91. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info/licenses}/LICENSE +0 -0
  92. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info/licenses}/NOTICE +0 -0
  93. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info}/namespace_packages.txt +0 -0
  94. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info}/top_level.txt +0 -0
  95. {autogluon.timeseries-1.4.1b20250907.dist-info → autogluon_timeseries-1.5.1b20260122.dist-info}/zip-safe +0 -0
@@ -1,15 +1,14 @@
1
1
  import logging
2
2
  import reprlib
3
3
  import time
4
- from typing import Any, Literal, Optional, Type, Union
4
+ from typing import Any, Literal, Type
5
5
 
6
6
  import pandas as pd
7
7
 
8
8
  from autogluon.core.learner import AbstractLearner
9
- from autogluon.timeseries.dataset.ts_dataframe import TimeSeriesDataFrame
9
+ from autogluon.timeseries.dataset import TimeSeriesDataFrame
10
10
  from autogluon.timeseries.metrics import TimeSeriesScorer, check_get_evaluation_metric
11
11
  from autogluon.timeseries.models.abstract import AbstractTimeSeriesModel
12
- from autogluon.timeseries.splitter import AbstractWindowSplitter
13
12
  from autogluon.timeseries.trainer import TimeSeriesTrainer
14
13
  from autogluon.timeseries.utils.features import TimeSeriesFeatureGenerator
15
14
  from autogluon.timeseries.utils.forecast import make_future_data_frame
@@ -26,12 +25,12 @@ class TimeSeriesLearner(AbstractLearner):
26
25
  self,
27
26
  path_context: str,
28
27
  target: str = "target",
29
- known_covariates_names: Optional[list[str]] = None,
28
+ known_covariates_names: list[str] | None = None,
30
29
  trainer_type: Type[TimeSeriesTrainer] = TimeSeriesTrainer,
31
- eval_metric: Union[str, TimeSeriesScorer, None] = None,
30
+ eval_metric: str | TimeSeriesScorer | None = None,
32
31
  prediction_length: int = 1,
33
32
  cache_predictions: bool = True,
34
- ensemble_model_type: Optional[Type] = None,
33
+ ensemble_model_type: Type | None = None,
35
34
  **kwargs,
36
35
  ):
37
36
  super().__init__(path_context=path_context)
@@ -42,7 +41,7 @@ class TimeSeriesLearner(AbstractLearner):
42
41
  self.prediction_length = prediction_length
43
42
  self.quantile_levels = kwargs.get("quantile_levels", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
44
43
  self.cache_predictions = cache_predictions
45
- self.freq: Optional[str] = None
44
+ self.freq: str | None = None
46
45
  self.ensemble_model_type = ensemble_model_type
47
46
 
48
47
  self.feature_generator = TimeSeriesFeatureGenerator(
@@ -56,13 +55,15 @@ class TimeSeriesLearner(AbstractLearner):
56
55
  def fit(
57
56
  self,
58
57
  train_data: TimeSeriesDataFrame,
59
- hyperparameters: Union[str, dict],
60
- val_data: Optional[TimeSeriesDataFrame] = None,
61
- hyperparameter_tune_kwargs: Optional[Union[str, dict]] = None,
62
- time_limit: Optional[float] = None,
63
- val_splitter: Optional[AbstractWindowSplitter] = None,
64
- refit_every_n_windows: Optional[int] = 1,
65
- random_seed: Optional[int] = None,
58
+ hyperparameters: str | dict,
59
+ val_data: TimeSeriesDataFrame | None = None,
60
+ hyperparameter_tune_kwargs: str | dict | None = None,
61
+ ensemble_hyperparameters: dict[str, Any] | list[dict[str, Any]] | None = None,
62
+ time_limit: float | None = None,
63
+ num_val_windows: tuple[int, ...] = (1,),
64
+ val_step_size: int | None = None,
65
+ refit_every_n_windows: int | None = 1,
66
+ random_seed: int | None = None,
66
67
  **kwargs,
67
68
  ) -> None:
68
69
  self._time_limit = time_limit
@@ -86,7 +87,8 @@ class TimeSeriesLearner(AbstractLearner):
86
87
  skip_model_selection=kwargs.get("skip_model_selection", False),
87
88
  enable_ensemble=kwargs.get("enable_ensemble", True),
88
89
  covariate_metadata=self.feature_generator.covariate_metadata,
89
- val_splitter=val_splitter,
90
+ num_val_windows=num_val_windows,
91
+ val_step_size=val_step_size,
90
92
  refit_every_n_windows=refit_every_n_windows,
91
93
  cache_predictions=self.cache_predictions,
92
94
  ensemble_model_type=self.ensemble_model_type,
@@ -94,7 +96,7 @@ class TimeSeriesLearner(AbstractLearner):
94
96
  )
95
97
 
96
98
  assert issubclass(self.trainer_type, TimeSeriesTrainer)
97
- self.trainer: Optional[TimeSeriesTrainer] = self.trainer_type(**trainer_init_kwargs)
99
+ self.trainer: TimeSeriesTrainer | None = self.trainer_type(**trainer_init_kwargs)
98
100
  self.trainer_path = self.trainer.path
99
101
  self.save()
100
102
 
@@ -111,6 +113,7 @@ class TimeSeriesLearner(AbstractLearner):
111
113
  val_data=val_data,
112
114
  hyperparameters=hyperparameters,
113
115
  hyperparameter_tune_kwargs=hyperparameter_tune_kwargs,
116
+ ensemble_hyperparameters=ensemble_hyperparameters,
114
117
  excluded_model_types=kwargs.get("excluded_model_types"),
115
118
  time_limit=time_limit,
116
119
  random_seed=random_seed,
@@ -121,9 +124,9 @@ class TimeSeriesLearner(AbstractLearner):
121
124
 
122
125
  def _align_covariates_with_forecast_index(
123
126
  self,
124
- known_covariates: Optional[TimeSeriesDataFrame],
127
+ known_covariates: TimeSeriesDataFrame | None,
125
128
  data: TimeSeriesDataFrame,
126
- ) -> Optional[TimeSeriesDataFrame]:
129
+ ) -> TimeSeriesDataFrame | None:
127
130
  """Select the relevant item_ids and timestamps from the known_covariates dataframe.
128
131
 
129
132
  If some of the item_ids or timestamps are missing, an exception is raised.
@@ -162,10 +165,10 @@ class TimeSeriesLearner(AbstractLearner):
162
165
  def predict(
163
166
  self,
164
167
  data: TimeSeriesDataFrame,
165
- known_covariates: Optional[TimeSeriesDataFrame] = None,
166
- model: Optional[Union[str, AbstractTimeSeriesModel]] = None,
168
+ known_covariates: TimeSeriesDataFrame | None = None,
169
+ model: str | AbstractTimeSeriesModel | None = None,
167
170
  use_cache: bool = True,
168
- random_seed: Optional[int] = None,
171
+ random_seed: int | None = None,
169
172
  **kwargs,
170
173
  ) -> TimeSeriesDataFrame:
171
174
  data = self.feature_generator.transform(data)
@@ -183,8 +186,8 @@ class TimeSeriesLearner(AbstractLearner):
183
186
  def score(
184
187
  self,
185
188
  data: TimeSeriesDataFrame,
186
- model: Optional[Union[str, AbstractTimeSeriesModel]] = None,
187
- metric: Union[str, TimeSeriesScorer, None] = None,
189
+ model: str | AbstractTimeSeriesModel | None = None,
190
+ metric: str | TimeSeriesScorer | None = None,
188
191
  use_cache: bool = True,
189
192
  ) -> float:
190
193
  data = self.feature_generator.transform(data)
@@ -193,8 +196,8 @@ class TimeSeriesLearner(AbstractLearner):
193
196
  def evaluate(
194
197
  self,
195
198
  data: TimeSeriesDataFrame,
196
- model: Optional[str] = None,
197
- metrics: Optional[Union[str, TimeSeriesScorer, list[Union[str, TimeSeriesScorer]]]] = None,
199
+ model: str | None = None,
200
+ metrics: str | TimeSeriesScorer | list[str | TimeSeriesScorer] | None = None,
198
201
  use_cache: bool = True,
199
202
  ) -> dict[str, float]:
200
203
  data = self.feature_generator.transform(data)
@@ -202,15 +205,15 @@ class TimeSeriesLearner(AbstractLearner):
202
205
 
203
206
  def get_feature_importance(
204
207
  self,
205
- data: Optional[TimeSeriesDataFrame] = None,
206
- model: Optional[str] = None,
207
- metric: Optional[Union[str, TimeSeriesScorer]] = None,
208
- features: Optional[list[str]] = None,
209
- time_limit: Optional[float] = None,
208
+ data: TimeSeriesDataFrame | None = None,
209
+ model: str | None = None,
210
+ metric: str | TimeSeriesScorer | None = None,
211
+ features: list[str] | None = None,
212
+ time_limit: float | None = None,
210
213
  method: Literal["naive", "permutation"] = "permutation",
211
214
  subsample_size: int = 50,
212
- num_iterations: Optional[int] = None,
213
- random_seed: Optional[int] = None,
215
+ num_iterations: int | None = None,
216
+ random_seed: int | None = None,
214
217
  relative_scores: bool = False,
215
218
  include_confidence_band: bool = True,
216
219
  confidence_level: float = 0.99,
@@ -271,9 +274,9 @@ class TimeSeriesLearner(AbstractLearner):
271
274
 
272
275
  def leaderboard(
273
276
  self,
274
- data: Optional[TimeSeriesDataFrame] = None,
277
+ data: TimeSeriesDataFrame | None = None,
275
278
  extra_info: bool = False,
276
- extra_metrics: Optional[list[Union[str, TimeSeriesScorer]]] = None,
279
+ extra_metrics: list[str | TimeSeriesScorer] | None = None,
277
280
  use_cache: bool = True,
278
281
  ) -> pd.DataFrame:
279
282
  if data is not None:
@@ -300,7 +303,7 @@ class TimeSeriesLearner(AbstractLearner):
300
303
  return learner_info
301
304
 
302
305
  def persist_trainer(
303
- self, models: Union[Literal["all", "best"], list[str]] = "all", with_ancestors: bool = False
306
+ self, models: Literal["all", "best"] | list[str] = "all", with_ancestors: bool = False
304
307
  ) -> list[str]:
305
308
  """Loads models and trainer in memory so that they don't have to be
306
309
  loaded during predictions
@@ -328,3 +331,35 @@ class TimeSeriesLearner(AbstractLearner):
328
331
 
329
332
  def refit_full(self, model: str = "all") -> dict[str, str]:
330
333
  return self.load_trainer().refit_full(model=model)
334
+
335
+ def backtest_predictions(
336
+ self,
337
+ data: TimeSeriesDataFrame | None,
338
+ model_names: list[str],
339
+ num_val_windows: int | None = None,
340
+ val_step_size: int | None = None,
341
+ use_cache: bool = True,
342
+ ) -> dict[str, list[TimeSeriesDataFrame]]:
343
+ if data is not None:
344
+ data = self.feature_generator.transform(data)
345
+ return self.load_trainer().backtest_predictions(
346
+ model_names=model_names,
347
+ data=data,
348
+ num_val_windows=num_val_windows,
349
+ val_step_size=val_step_size,
350
+ use_cache=use_cache,
351
+ )
352
+
353
+ def backtest_targets(
354
+ self,
355
+ data: TimeSeriesDataFrame | None,
356
+ num_val_windows: int | None = None,
357
+ val_step_size: int | None = None,
358
+ ) -> list[TimeSeriesDataFrame]:
359
+ if data is not None:
360
+ data = self.feature_generator.transform(data)
361
+ return self.load_trainer().backtest_targets(
362
+ data=data,
363
+ num_val_windows=num_val_windows,
364
+ val_step_size=val_step_size,
365
+ )
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from pprint import pformat
4
- from typing import Any, Optional, Sequence, Type, Union
4
+ from typing import Any, Sequence, Type
5
5
 
6
6
  import numpy as np
7
7
 
@@ -54,10 +54,10 @@ EXPERIMENTAL_METRICS: dict[str, Type[TimeSeriesScorer]] = {
54
54
 
55
55
 
56
56
  def check_get_evaluation_metric(
57
- eval_metric: Union[str, TimeSeriesScorer, Type[TimeSeriesScorer], None],
57
+ eval_metric: str | TimeSeriesScorer | Type[TimeSeriesScorer] | None,
58
58
  prediction_length: int,
59
- seasonal_period: Optional[int] = None,
60
- horizon_weight: Optional[Sequence[float] | np.ndarray] = None,
59
+ seasonal_period: int | None = None,
60
+ horizon_weight: Sequence[float] | np.ndarray | None = None,
61
61
  ) -> TimeSeriesScorer:
62
62
  """Factory method for TimeSeriesScorer objects.
63
63
 
@@ -1,5 +1,5 @@
1
1
  import warnings
2
- from typing import Optional, Sequence, Union, overload
2
+ from typing import Sequence, overload
3
3
 
4
4
  import numpy as np
5
5
  import pandas as pd
@@ -52,13 +52,13 @@ class TimeSeriesScorer:
52
52
  optimum: float = 0.0
53
53
  optimized_by_median: bool = False
54
54
  needs_quantile: bool = False
55
- equivalent_tabular_regression_metric: Optional[str] = None
55
+ equivalent_tabular_regression_metric: str | None = None
56
56
 
57
57
  def __init__(
58
58
  self,
59
59
  prediction_length: int = 1,
60
- seasonal_period: Optional[int] = None,
61
- horizon_weight: Optional[Sequence[float]] = None,
60
+ seasonal_period: int | None = None,
61
+ horizon_weight: Sequence[float] | None = None,
62
62
  ):
63
63
  self.prediction_length = int(prediction_length)
64
64
  if self.prediction_length < 1:
@@ -192,7 +192,7 @@ class TimeSeriesScorer:
192
192
  return self.optimum - self.score(*args, **kwargs)
193
193
 
194
194
  @staticmethod
195
- def _safemean(array: Union[np.ndarray, pd.Series]) -> float:
195
+ def _safemean(array: np.ndarray | pd.Series) -> float:
196
196
  """Compute mean of a numpy array-like object, ignoring inf, -inf and nan values."""
197
197
  return float(np.mean(array[np.isfinite(array)]))
198
198
 
@@ -240,13 +240,13 @@ class TimeSeriesScorer:
240
240
  @overload
241
241
  @staticmethod
242
242
  def check_get_horizon_weight(
243
- horizon_weight: Union[Sequence[float], np.ndarray], prediction_length: int
243
+ horizon_weight: Sequence[float] | np.ndarray, prediction_length: int
244
244
  ) -> np.ndarray: ...
245
245
 
246
246
  @staticmethod
247
247
  def check_get_horizon_weight(
248
- horizon_weight: Union[Sequence[float], np.ndarray, None], prediction_length: int
249
- ) -> Optional[np.ndarray]:
248
+ horizon_weight: Sequence[float] | np.ndarray | None, prediction_length: int
249
+ ) -> np.ndarray | None:
250
250
  """Convert horizon_weight to a non-negative numpy array that sums up to prediction_length.
251
251
  Raises an exception if horizon_weight has an invalid shape or contains invalid values.
252
252
 
@@ -1,6 +1,6 @@
1
1
  import logging
2
2
  import warnings
3
- from typing import Optional, Sequence
3
+ from typing import Sequence
4
4
 
5
5
  import numpy as np
6
6
  import pandas as pd
@@ -279,13 +279,13 @@ class MASE(TimeSeriesScorer):
279
279
  def __init__(
280
280
  self,
281
281
  prediction_length: int = 1,
282
- seasonal_period: Optional[int] = None,
283
- horizon_weight: Optional[Sequence[float]] = None,
282
+ seasonal_period: int | None = None,
283
+ horizon_weight: Sequence[float] | None = None,
284
284
  ):
285
285
  super().__init__(
286
286
  prediction_length=prediction_length, seasonal_period=seasonal_period, horizon_weight=horizon_weight
287
287
  )
288
- self._past_abs_seasonal_error: Optional[pd.Series] = None
288
+ self._past_abs_seasonal_error: pd.Series | None = None
289
289
 
290
290
  def save_past_metrics(
291
291
  self, data_past: TimeSeriesDataFrame, target: str = "target", seasonal_period: int = 1, **kwargs
@@ -353,13 +353,13 @@ class RMSSE(TimeSeriesScorer):
353
353
  def __init__(
354
354
  self,
355
355
  prediction_length: int = 1,
356
- seasonal_period: Optional[int] = None,
357
- horizon_weight: Optional[Sequence[float]] = None,
356
+ seasonal_period: int | None = None,
357
+ horizon_weight: Sequence[float] | None = None,
358
358
  ):
359
359
  super().__init__(
360
360
  prediction_length=prediction_length, seasonal_period=seasonal_period, horizon_weight=horizon_weight
361
361
  )
362
- self._past_squared_seasonal_error: Optional[pd.Series] = None
362
+ self._past_squared_seasonal_error: pd.Series | None = None
363
363
 
364
364
  def save_past_metrics(
365
365
  self, data_past: TimeSeriesDataFrame, target: str = "target", seasonal_period: int = 1, **kwargs
@@ -471,8 +471,8 @@ class WCD(TimeSeriesScorer):
471
471
  self,
472
472
  alpha: float = 0.5,
473
473
  prediction_length: int = 1,
474
- seasonal_period: Optional[int] = None,
475
- horizon_weight: Optional[Sequence[float]] = None,
474
+ seasonal_period: int | None = None,
475
+ horizon_weight: Sequence[float] | None = None,
476
476
  ):
477
477
  super().__init__(
478
478
  prediction_length=prediction_length, seasonal_period=seasonal_period, horizon_weight=horizon_weight
@@ -1,9 +1,9 @@
1
- from typing import Optional, Sequence
1
+ from typing import Sequence
2
2
 
3
3
  import numpy as np
4
4
  import pandas as pd
5
5
 
6
- from autogluon.timeseries.dataset.ts_dataframe import TimeSeriesDataFrame
6
+ from autogluon.timeseries.dataset import TimeSeriesDataFrame
7
7
 
8
8
  from .abstract import TimeSeriesScorer
9
9
  from .utils import in_sample_abs_seasonal_error
@@ -92,13 +92,13 @@ class SQL(TimeSeriesScorer):
92
92
  def __init__(
93
93
  self,
94
94
  prediction_length: int = 1,
95
- seasonal_period: Optional[int] = None,
96
- horizon_weight: Optional[Sequence[float]] = None,
95
+ seasonal_period: int | None = None,
96
+ horizon_weight: Sequence[float] | None = None,
97
97
  ):
98
98
  super().__init__(
99
99
  prediction_length=prediction_length, seasonal_period=seasonal_period, horizon_weight=horizon_weight
100
100
  )
101
- self._past_abs_seasonal_error: Optional[pd.Series] = None
101
+ self._past_abs_seasonal_error: pd.Series | None = None
102
102
 
103
103
  def save_past_metrics(
104
104
  self, data_past: TimeSeriesDataFrame, target: str = "target", seasonal_period: int = 1, **kwargs
@@ -1,18 +1,18 @@
1
1
  import pandas as pd
2
2
 
3
- from autogluon.timeseries.dataset.ts_dataframe import ITEMID
3
+ from autogluon.timeseries.dataset import TimeSeriesDataFrame
4
4
 
5
5
 
6
6
  def _get_seasonal_diffs(*, y_past: pd.Series, seasonal_period: int = 1) -> pd.Series:
7
- return y_past.groupby(level=ITEMID, sort=False).diff(seasonal_period).abs()
7
+ return y_past.groupby(level=TimeSeriesDataFrame.ITEMID, sort=False).diff(seasonal_period).abs()
8
8
 
9
9
 
10
10
  def in_sample_abs_seasonal_error(*, y_past: pd.Series, seasonal_period: int = 1) -> pd.Series:
11
11
  """Compute seasonal naive forecast error (predict value from seasonal_period steps ago) for each time series."""
12
12
  seasonal_diffs = _get_seasonal_diffs(y_past=y_past, seasonal_period=seasonal_period)
13
- return seasonal_diffs.groupby(level=ITEMID, sort=False).mean().fillna(1.0)
13
+ return seasonal_diffs.groupby(level=TimeSeriesDataFrame.ITEMID, sort=False).mean().fillna(1.0)
14
14
 
15
15
 
16
16
  def in_sample_squared_seasonal_error(*, y_past: pd.Series, seasonal_period: int = 1) -> pd.Series:
17
17
  seasonal_diffs = _get_seasonal_diffs(y_past=y_past, seasonal_period=seasonal_period)
18
- return seasonal_diffs.pow(2.0).groupby(level=ITEMID, sort=False).mean().fillna(1.0)
18
+ return seasonal_diffs.pow(2.0).groupby(level=TimeSeriesDataFrame.ITEMID, sort=False).mean().fillna(1.0)
@@ -1,5 +1,5 @@
1
1
  from .autogluon_tabular import DirectTabularModel, PerStepTabularModel, RecursiveTabularModel
2
- from .chronos import ChronosModel
2
+ from .chronos import Chronos2Model, ChronosModel
3
3
  from .gluonts import (
4
4
  DeepARModel,
5
5
  DLinearModel,
@@ -28,6 +28,7 @@ from .local import (
28
28
  ZeroModel,
29
29
  )
30
30
  from .registry import ModelRegistry
31
+ from .toto import TotoModel
31
32
 
32
33
  __all__ = [
33
34
  "ADIDAModel",
@@ -44,6 +45,7 @@ __all__ = [
44
45
  "ETSModel",
45
46
  "IMAPAModel",
46
47
  "ChronosModel",
48
+ "Chronos2Model",
47
49
  "ModelRegistry",
48
50
  "NPTSModel",
49
51
  "NaiveModel",
@@ -56,6 +58,7 @@ __all__ = [
56
58
  "TemporalFusionTransformerModel",
57
59
  "ThetaModel",
58
60
  "TiDEModel",
61
+ "TotoModel",
59
62
  "WaveNetModel",
60
63
  "ZeroModel",
61
64
  ]
@@ -4,7 +4,7 @@ import os
4
4
  import re
5
5
  import time
6
6
  from abc import ABC, abstractmethod
7
- from typing import Any, Optional, Sequence, Union
7
+ from typing import Any, Sequence
8
8
 
9
9
  import pandas as pd
10
10
  from typing_extensions import Self
@@ -75,15 +75,15 @@ class TimeSeriesModelBase(ModelBase, ABC):
75
75
 
76
76
  def __init__(
77
77
  self,
78
- path: Optional[str] = None,
79
- name: Optional[str] = None,
80
- hyperparameters: Optional[dict[str, Any]] = None,
81
- freq: Optional[str] = None,
78
+ path: str | None = None,
79
+ name: str | None = None,
80
+ hyperparameters: dict[str, Any] | None = None,
81
+ freq: str | None = None,
82
82
  prediction_length: int = 1,
83
- covariate_metadata: Optional[CovariateMetadata] = None,
83
+ covariate_metadata: CovariateMetadata | None = None,
84
84
  target: str = "target",
85
85
  quantile_levels: Sequence[float] = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9),
86
- eval_metric: Union[str, TimeSeriesScorer, None] = None,
86
+ eval_metric: str | TimeSeriesScorer | None = None,
87
87
  ):
88
88
  self.name = name or re.sub(r"Model$", "", self.__class__.__name__)
89
89
 
@@ -102,7 +102,7 @@ class TimeSeriesModelBase(ModelBase, ABC):
102
102
  self.target: str = target
103
103
  self.covariate_metadata = covariate_metadata or CovariateMetadata()
104
104
 
105
- self.freq: Optional[str] = freq
105
+ self.freq: str | None = freq
106
106
  self.prediction_length: int = prediction_length
107
107
  self.quantile_levels: list[float] = list(quantile_levels)
108
108
 
@@ -117,17 +117,21 @@ class TimeSeriesModelBase(ModelBase, ABC):
117
117
  else:
118
118
  self.must_drop_median = False
119
119
 
120
- self._oof_predictions: Optional[list[TimeSeriesDataFrame]] = None
120
+ self._oof_predictions: list[TimeSeriesDataFrame] | None = None
121
121
 
122
122
  # user provided hyperparameters and extra arguments that are used during model training
123
123
  self._hyperparameters, self._extra_ag_args = self._check_and_split_hyperparameters(hyperparameters)
124
124
 
125
- self.fit_time: Optional[float] = None # Time taken to fit in seconds (Training data)
126
- self.predict_time: Optional[float] = None # Time taken to predict in seconds (Validation data)
127
- self.predict_1_time: Optional[float] = (
128
- None # Time taken to predict 1 row of data in seconds (with batch size `predict_1_batch_size`)
129
- )
130
- self.val_score: Optional[float] = None # Score with eval_metric (Validation data)
125
+ # Time taken to fit in seconds (Training data)
126
+ self.fit_time: float | None = None
127
+ # Time taken to predict in seconds, for a single prediction horizon on validation data
128
+ self.predict_time: float | None = None
129
+ # Time taken to predict 1 row of data in seconds (with batch size `predict_1_batch_size`)
130
+ self.predict_1_time: float | None = None
131
+ # Useful for ensembles, additional prediction time excluding base models. None for base models.
132
+ self.predict_time_marginal: float | None = None
133
+ # Score with eval_metric on validation data
134
+ self.val_score: float | None = None
131
135
 
132
136
  def __repr__(self) -> str:
133
137
  return self.name
@@ -143,9 +147,14 @@ class TimeSeriesModelBase(ModelBase, ABC):
143
147
  self.path = path_context
144
148
  self.path_root = self.path.rsplit(self.name, 1)[0]
145
149
 
150
+ def cache_oof_predictions(self, predictions: TimeSeriesDataFrame | list[TimeSeriesDataFrame]) -> None:
151
+ if isinstance(predictions, TimeSeriesDataFrame):
152
+ predictions = [predictions]
153
+ self._oof_predictions = predictions
154
+
146
155
  @classmethod
147
156
  def _check_and_split_hyperparameters(
148
- cls, hyperparameters: Optional[dict[str, Any]] = None
157
+ cls, hyperparameters: dict[str, Any] | None = None
149
158
  ) -> tuple[dict[str, Any], dict[str, Any]]:
150
159
  """Given the user-specified hyperparameters, split into `hyperparameters` and `extra_ag_args`, intended
151
160
  to be used during model initialization.
@@ -180,7 +189,7 @@ class TimeSeriesModelBase(ModelBase, ABC):
180
189
  )
181
190
  return hyperparameters, extra_ag_args
182
191
 
183
- def save(self, path: Optional[str] = None, verbose: bool = True) -> str:
192
+ def save(self, path: str | None = None, verbose: bool = True) -> str:
184
193
  if path is None:
185
194
  path = self.path
186
195
 
@@ -242,9 +251,13 @@ class TimeSeriesModelBase(ModelBase, ABC):
242
251
  return {}
243
252
 
244
253
  def get_hyperparameters(self) -> dict:
245
- """Get hyperparameters that will be passed to the "inner model" that AutoGluon wraps."""
254
+ """Get dictionary of hyperparameters that will be passed to the "inner model" that AutoGluon wraps."""
246
255
  return {**self._get_default_hyperparameters(), **self._hyperparameters}
247
256
 
257
+ def get_hyperparameter(self, key: str) -> Any:
258
+ """Get a single hyperparameter value for the "inner model"."""
259
+ return self.get_hyperparameters()[key]
260
+
248
261
  def get_info(self) -> dict:
249
262
  """
250
263
  Returns a dictionary of numerous fields describing the model.
@@ -384,15 +397,15 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
384
397
 
385
398
  def __init__(
386
399
  self,
387
- path: Optional[str] = None,
388
- name: Optional[str] = None,
389
- hyperparameters: Optional[dict[str, Any]] = None,
390
- freq: Optional[str] = None,
400
+ path: str | None = None,
401
+ name: str | None = None,
402
+ hyperparameters: dict[str, Any] | None = None,
403
+ freq: str | None = None,
391
404
  prediction_length: int = 1,
392
- covariate_metadata: Optional[CovariateMetadata] = None,
405
+ covariate_metadata: CovariateMetadata | None = None,
393
406
  target: str = "target",
394
407
  quantile_levels: Sequence[float] = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9),
395
- eval_metric: Union[str, TimeSeriesScorer, None] = None,
408
+ eval_metric: str | TimeSeriesScorer | None = None,
396
409
  ):
397
410
  # TODO: make freq a required argument in AbstractTimeSeriesModel
398
411
  super().__init__(
@@ -406,9 +419,9 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
406
419
  quantile_levels=quantile_levels,
407
420
  eval_metric=eval_metric,
408
421
  )
409
- self.target_scaler: Optional[TargetScaler]
410
- self.covariate_scaler: Optional[CovariateScaler]
411
- self.covariate_regressor: Optional[CovariateRegressor]
422
+ self.target_scaler: TargetScaler | None
423
+ self.covariate_scaler: CovariateScaler | None
424
+ self.covariate_regressor: CovariateRegressor | None
412
425
 
413
426
  def _initialize_transforms_and_regressor(self) -> None:
414
427
  self.target_scaler = get_target_scaler(self.get_hyperparameters().get("target_scaler"), target=self.target)
@@ -433,8 +446,8 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
433
446
  def fit(
434
447
  self,
435
448
  train_data: TimeSeriesDataFrame,
436
- val_data: Optional[TimeSeriesDataFrame] = None,
437
- time_limit: Optional[float] = None,
449
+ val_data: TimeSeriesDataFrame | None = None,
450
+ time_limit: float | None = None,
438
451
  verbosity: int = 2,
439
452
  **kwargs,
440
453
  ) -> Self:
@@ -527,10 +540,10 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
527
540
  def _fit(
528
541
  self,
529
542
  train_data: TimeSeriesDataFrame,
530
- val_data: Optional[TimeSeriesDataFrame] = None,
531
- time_limit: Optional[float] = None,
532
- num_cpus: Optional[int] = None,
533
- num_gpus: Optional[int] = None,
543
+ val_data: TimeSeriesDataFrame | None = None,
544
+ time_limit: float | None = None,
545
+ num_cpus: int | None = None,
546
+ num_gpus: int | None = None,
534
547
  verbosity: int = 2,
535
548
  **kwargs,
536
549
  ) -> None:
@@ -551,7 +564,7 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
551
564
  "as hyperparameters when initializing or use `hyperparameter_tune` instead."
552
565
  )
553
566
 
554
- def _log_unused_hyperparameters(self, extra_allowed_hyperparameters: Optional[list[str]] = None) -> None:
567
+ def _log_unused_hyperparameters(self, extra_allowed_hyperparameters: list[str] | None = None) -> None:
555
568
  """Log a warning if unused hyperparameters were provided to the model."""
556
569
  allowed_hyperparameters = self.allowed_hyperparameters
557
570
  if extra_allowed_hyperparameters is not None:
@@ -567,7 +580,7 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
567
580
  def predict(
568
581
  self,
569
582
  data: TimeSeriesDataFrame,
570
- known_covariates: Optional[TimeSeriesDataFrame] = None,
583
+ known_covariates: TimeSeriesDataFrame | None = None,
571
584
  **kwargs,
572
585
  ) -> TimeSeriesDataFrame:
573
586
  """Given a dataset, predict the next `self.prediction_length` time steps.
@@ -648,14 +661,13 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
648
661
  def _predict(
649
662
  self,
650
663
  data: TimeSeriesDataFrame,
651
- known_covariates: Optional[TimeSeriesDataFrame] = None,
664
+ known_covariates: TimeSeriesDataFrame | None = None,
652
665
  **kwargs,
653
666
  ) -> TimeSeriesDataFrame:
654
667
  """Private method for `predict`. See `predict` for documentation of arguments."""
655
668
  pass
656
669
 
657
670
  def _preprocess_time_limit(self, time_limit: float) -> float:
658
- original_time_limit = time_limit
659
671
  max_time_limit_ratio = self._extra_ag_args.get("max_time_limit_ratio", self.default_max_time_limit_ratio)
660
672
  max_time_limit = self._extra_ag_args.get("max_time_limit")
661
673
 
@@ -664,16 +676,6 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
664
676
  if max_time_limit is not None:
665
677
  time_limit = min(time_limit, max_time_limit)
666
678
 
667
- if original_time_limit != time_limit:
668
- time_limit_og_str = f"{original_time_limit:.2f}s" if original_time_limit is not None else "None"
669
- time_limit_str = f"{time_limit:.2f}s" if time_limit is not None else "None"
670
- logger.debug(
671
- f"\tTime limit adjusted due to model hyperparameters: "
672
- f"{time_limit_og_str} -> {time_limit_str} "
673
- f"(ag.max_time_limit={max_time_limit}, "
674
- f"ag.max_time_limit_ratio={max_time_limit_ratio}"
675
- )
676
-
677
679
  return time_limit
678
680
 
679
681
  def _get_search_space(self):
@@ -731,7 +733,7 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
731
733
  )
732
734
  predict_start_time = time.time()
733
735
  oof_predictions = self.predict(past_data, known_covariates=known_covariates, **predict_kwargs)
734
- self._oof_predictions = [oof_predictions]
736
+ self.cache_oof_predictions(oof_predictions)
735
737
  if store_predict_time:
736
738
  self.predict_time = time.time() - predict_start_time
737
739
  if store_val_score:
@@ -740,9 +742,9 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
740
742
  def preprocess(
741
743
  self,
742
744
  data: TimeSeriesDataFrame,
743
- known_covariates: Optional[TimeSeriesDataFrame] = None,
745
+ known_covariates: TimeSeriesDataFrame | None = None,
744
746
  is_train: bool = False,
745
747
  **kwargs,
746
- ) -> tuple[TimeSeriesDataFrame, Optional[TimeSeriesDataFrame]]:
748
+ ) -> tuple[TimeSeriesDataFrame, TimeSeriesDataFrame | None]:
747
749
  """Method that implements model-specific preprocessing logic."""
748
750
  return data, known_covariates