autogluon.timeseries 1.1.2b20241119__py3-none-any.whl → 1.1.2b20241120__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. autogluon/timeseries/configs/presets_configs.py +18 -0
  2. autogluon/timeseries/learner.py +3 -0
  3. autogluon/timeseries/models/abstract/abstract_timeseries_model.py +6 -0
  4. autogluon/timeseries/models/chronos/model.py +24 -52
  5. autogluon/timeseries/models/chronos/pipeline/base.py +1 -0
  6. autogluon/timeseries/models/chronos/pipeline/chronos.py +1 -0
  7. autogluon/timeseries/models/chronos/pipeline/utils.py +2 -1
  8. autogluon/timeseries/models/local/abstract_local_model.py +1 -14
  9. autogluon/timeseries/models/multi_window/multi_window_model.py +11 -3
  10. autogluon/timeseries/models/presets.py +25 -6
  11. autogluon/timeseries/predictor.py +1 -0
  12. autogluon/timeseries/trainer/abstract_trainer.py +16 -4
  13. autogluon/timeseries/version.py +1 -1
  14. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/METADATA +4 -4
  15. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/RECORD +22 -22
  16. /autogluon.timeseries-1.1.2b20241119-py3.8-nspkg.pth → /autogluon.timeseries-1.1.2b20241120-py3.8-nspkg.pth +0 -0
  17. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/LICENSE +0 -0
  18. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/NOTICE +0 -0
  19. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/WHEEL +0 -0
  20. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/namespace_packages.txt +0 -0
  21. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/top_level.txt +0 -0
  22. {autogluon.timeseries-1.1.2b20241119.dist-info → autogluon.timeseries-1.1.2b20241120.dist-info}/zip-safe +0 -0
@@ -10,6 +10,24 @@ TIMESERIES_PRESETS_CONFIGS = dict(
10
10
  high_quality={"hyperparameters": "default"},
11
11
  medium_quality={"hyperparameters": "light"},
12
12
  fast_training={"hyperparameters": "very_light"},
13
+ # Chronos-Bolt models
14
+ bolt_tiny={
15
+ "hyperparameters": {"Chronos": {"model_path": "bolt-tiny"}},
16
+ "skip_model_selection": True,
17
+ },
18
+ bolt_mini={
19
+ "hyperparameters": {"Chronos": {"model_path": "bolt-mini"}},
20
+ "skip_model_selection": True,
21
+ },
22
+ bolt_small={
23
+ "hyperparameters": {"Chronos": {"model_path": "bolt-small"}},
24
+ "skip_model_selection": True,
25
+ },
26
+ bolt_base={
27
+ "hyperparameters": {"Chronos": {"model_path": "bolt-base"}},
28
+ "skip_model_selection": True,
29
+ },
30
+ # Original Chronos models
13
31
  chronos_tiny={
14
32
  "hyperparameters": {"Chronos": {"model_path": "tiny"}},
15
33
  "skip_model_selection": True,
@@ -32,6 +32,7 @@ class TimeSeriesLearner(AbstractLearner):
32
32
  eval_metric_seasonal_period: Optional[int] = None,
33
33
  prediction_length: int = 1,
34
34
  cache_predictions: bool = True,
35
+ ensemble_model_type: Optional[Type] = None,
35
36
  **kwargs,
36
37
  ):
37
38
  super().__init__(path_context=path_context)
@@ -44,6 +45,7 @@ class TimeSeriesLearner(AbstractLearner):
44
45
  self.quantile_levels = kwargs.get("quantile_levels", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
45
46
  self.cache_predictions = cache_predictions
46
47
  self.freq: Optional[str] = None
48
+ self.ensemble_model_type = ensemble_model_type
47
49
 
48
50
  self.feature_generator = TimeSeriesFeatureGenerator(
49
51
  target=self.target, known_covariates_names=self.known_covariates_names
@@ -106,6 +108,7 @@ class TimeSeriesLearner(AbstractLearner):
106
108
  val_splitter=val_splitter,
107
109
  refit_every_n_windows=refit_every_n_windows,
108
110
  cache_predictions=self.cache_predictions,
111
+ ensemble_model_type=self.ensemble_model_type,
109
112
  )
110
113
  )
111
114
  self.trainer = self.trainer_type(**trainer_init_kwargs)
@@ -67,6 +67,7 @@ class AbstractTimeSeriesModel(AbstractModel):
67
67
  _oof_filename = "oof.pkl"
68
68
  # TODO: For which models should we override this parameter?
69
69
  _covariate_regressor_fit_time_fraction: float = 0.5
70
+ default_max_time_limit_ratio: float = 0.9
70
71
 
71
72
  # TODO: refactor "pruned" methods after AbstractModel is refactored
72
73
  predict_proba = None
@@ -174,6 +175,11 @@ class AbstractTimeSeriesModel(AbstractModel):
174
175
  self._oof_predictions = self.load_oof_predictions(self.path)
175
176
  return self._oof_predictions
176
177
 
178
+ def _get_default_auxiliary_params(self) -> dict:
179
+ default_auxiliary_params = super()._get_default_auxiliary_params()
180
+ default_auxiliary_params["max_time_limit_ratio"] = self.default_max_time_limit_ratio
181
+ return default_auxiliary_params
182
+
177
183
  def _initialize(self, **kwargs) -> None:
178
184
  self._init_params_aux()
179
185
  self._init_params()
@@ -1,7 +1,6 @@
1
1
  import logging
2
2
  import os
3
3
  import shutil
4
- import time
5
4
  from pathlib import Path
6
5
  from typing import Any, Dict, Literal, Optional, Union
7
6
 
@@ -68,6 +67,7 @@ MODEL_ALIASES = {
68
67
  "small": "autogluon/chronos-t5-small",
69
68
  "base": "autogluon/chronos-t5-base",
70
69
  "large": "autogluon/chronos-t5-large",
70
+ "bolt-tiny": "autogluon/chronos-bolt-tiny",
71
71
  "bolt-mini": "autogluon/chronos-bolt-mini",
72
72
  "bolt-small": "autogluon/chronos-bolt-small",
73
73
  "bolt-base": "autogluon/chronos-bolt-base",
@@ -113,10 +113,12 @@ class ChronosModel(AbstractTimeSeriesModel):
113
113
  batch_size : int, default = 16
114
114
  Size of batches used during inference
115
115
  num_samples : int, default = 20
116
- Number of samples used during inference
116
+ Number of samples used during inference, only used for the original Chronos models
117
117
  device : str, default = None
118
118
  Device to use for inference (and fine-tuning, if enabled). If None, model will use the GPU if available.
119
- For larger model sizes `small`, `base`, and `large`; inference will fail if no GPU is available.
119
+ For larger Chronos model sizes ``small``, ``base``, and ``large``; inference will fail if no GPU is available.
120
+ For Chronos-Bolt models, inference can be done on the CPU. Although fine-tuning the smaller Chronos models
121
+ (``tiny`` and ``mini``) and all Chronos-Bolt is allowed on the CPU, we recommend using a GPU for faster fine-tuning.
120
122
  context_length : int or None, default = None
121
123
  The context length to use in the model. Shorter context lengths will decrease model accuracy, but result
122
124
  in faster inference. If None, the model will infer context length from the data set length at inference
@@ -140,9 +142,9 @@ class ChronosModel(AbstractTimeSeriesModel):
140
142
  If True, the pretrained model will be fine-tuned
141
143
  fine_tune_lr: float, default = 0.0001
142
144
  The learning rate used for fine-tuning
143
- fine_tune_steps : int, default = 5000
145
+ fine_tune_steps : int, default = 1000
144
146
  The number of gradient update steps to fine-tune for
145
- fine_tune_batch_size : int, default = 16
147
+ fine_tune_batch_size : int, default = 32
146
148
  The batch size to use for fine-tuning
147
149
  fine_tune_shuffle_buffer_size : int, default = 10000
148
150
  The size of the shuffle buffer to shuffle the data during fine-tuning. If None, shuffling will
@@ -162,6 +164,7 @@ class ChronosModel(AbstractTimeSeriesModel):
162
164
  # default number of samples for prediction
163
165
  default_num_samples: int = 20
164
166
  default_model_path = "autogluon/chronos-t5-small"
167
+ default_max_time_limit_ratio = 0.8
165
168
  maximum_context_length = 2048
166
169
  fine_tuned_ckpt_name: str = "fine-tuned-ckpt"
167
170
 
@@ -216,7 +219,6 @@ class ChronosModel(AbstractTimeSeriesModel):
216
219
  )
217
220
 
218
221
  self.model_pipeline: Optional[Any] = None # of type BaseChronosPipeline
219
- self.time_limit: Optional[float] = None
220
222
 
221
223
  def save(self, path: str = None, verbose: bool = True) -> str:
222
224
  pipeline = self.model_pipeline
@@ -234,7 +236,7 @@ class ChronosModel(AbstractTimeSeriesModel):
234
236
 
235
237
  fine_tune_ckpt_path = Path(model.path) / cls.fine_tuned_ckpt_name
236
238
  if fine_tune_ckpt_path.exists():
237
- logger.debug(f"Fine-tuned checkpoint exists, setting model_path to {fine_tune_ckpt_path}")
239
+ logger.debug(f"\tFine-tuned checkpoint exists, setting model_path to {fine_tune_ckpt_path}")
238
240
  model.model_path = fine_tune_ckpt_path
239
241
 
240
242
  return model
@@ -320,8 +322,8 @@ class ChronosModel(AbstractTimeSeriesModel):
320
322
  init_args.setdefault("fine_tune", False)
321
323
  init_args.setdefault("keep_transformers_logs", False)
322
324
  init_args.setdefault("fine_tune_lr", 1e-4)
323
- init_args.setdefault("fine_tune_steps", 5000)
324
- init_args.setdefault("fine_tune_batch_size", self.default_batch_size)
325
+ init_args.setdefault("fine_tune_steps", 1000)
326
+ init_args.setdefault("fine_tune_batch_size", 32)
325
327
  init_args.setdefault("eval_during_fine_tune", False)
326
328
  init_args.setdefault("fine_tune_eval_max_items", 256)
327
329
  init_args.setdefault("fine_tune_shuffle_buffer_size", 10_000)
@@ -399,7 +401,6 @@ class ChronosModel(AbstractTimeSeriesModel):
399
401
 
400
402
  eval_during_fine_tune = val_data is not None and fine_tune_args["eval_during_fine_tune"]
401
403
 
402
- start_time = time.monotonic()
403
404
  if do_fine_tune:
404
405
  context_length = self._get_context_length(train_data)
405
406
  # load model pipeline to device memory
@@ -428,7 +429,7 @@ class ChronosModel(AbstractTimeSeriesModel):
428
429
 
429
430
  if self.prediction_length != fine_tune_prediction_length:
430
431
  logger.debug(
431
- f"ChronosBolt models can only be fine-tuned with a maximum prediction_length of {model_prediction_length}. "
432
+ f"\tChronosBolt models can only be fine-tuned with a maximum prediction_length of {model_prediction_length}. "
432
433
  f"Fine-tuning prediction_length has been changed to {fine_tune_prediction_length}."
433
434
  )
434
435
 
@@ -436,10 +437,15 @@ class ChronosModel(AbstractTimeSeriesModel):
436
437
  fine_tune_trainer_kwargs["disable_tqdm"] = fine_tune_trainer_kwargs.get("disable_tqdm", (verbosity < 3))
437
438
  fine_tune_trainer_kwargs["use_cpu"] = str(self.model_pipeline.inner_model.device) == "cpu"
438
439
 
439
- # TODO: adamw_torch_fused is not supported on CPU in torch <= 2.3. When torch 2.4 becomes the lower bound
440
- # this if block can be removed because torch >= 2.4 supports AdamW optimizer with fused=True on CPU
441
- if fine_tune_trainer_kwargs["use_cpu"] and fine_tune_trainer_kwargs["optim"] == "adamw_torch_fused":
442
- fine_tune_trainer_kwargs["optim"] = "adamw_torch"
440
+ if fine_tune_trainer_kwargs["use_cpu"]:
441
+ logger.info(
442
+ "\tFine-tuning on the CPU detected. We recommend using a GPU for faster fine-tuning of Chronos."
443
+ )
444
+
445
+ # TODO: adamw_torch_fused is not supported on CPU in torch <= 2.3. When torch 2.4 becomes the lower bound
446
+ # this if block can be removed because torch >= 2.4 supports AdamW optimizer with fused=True on CPU
447
+ if fine_tune_trainer_kwargs["optim"] == "adamw_torch_fused":
448
+ fine_tune_trainer_kwargs["optim"] = "adamw_torch"
443
449
 
444
450
  output_dir = Path(fine_tune_trainer_kwargs["output_dir"])
445
451
 
@@ -509,37 +515,16 @@ class ChronosModel(AbstractTimeSeriesModel):
509
515
  )
510
516
  trainer.add_callback(LoggerCallback())
511
517
 
512
- if val_data is not None:
513
- # evaluate once before training
514
- zero_shot_eval_loss = trainer.evaluate()["eval_loss"]
515
-
516
518
  trainer.train()
517
519
 
518
- if eval_during_fine_tune:
519
- # get the best eval_loss logged during fine-tuning
520
- log_history_df = pd.DataFrame(trainer.state.log_history)
521
- best_train_eval_loss = log_history_df["eval_loss"].min()
522
- elif val_data is not None:
523
- # evaluate at the end of fine-tuning
524
- best_train_eval_loss = trainer.evaluate()["eval_loss"]
525
-
526
- if val_data is None or best_train_eval_loss <= zero_shot_eval_loss:
527
- fine_tuned_ckpt_path = Path(self.path) / self.fine_tuned_ckpt_name
528
- logger.info(f"Saving fine-tuned model to {fine_tuned_ckpt_path}")
529
- self.model_pipeline.inner_model.save_pretrained(Path(self.path) / self.fine_tuned_ckpt_name)
530
- else:
531
- # Reset the model to its pretrained state
532
- logger.info("Validation loss worsened after fine-tuning. Reverting to the pretrained model.")
533
- self.model_pipeline = None
534
- self.load_model_pipeline(is_training=False)
520
+ fine_tuned_ckpt_path = Path(self.path) / self.fine_tuned_ckpt_name
521
+ logger.info(f"\tSaving fine-tuned model to {fine_tuned_ckpt_path}")
522
+ self.model_pipeline.inner_model.save_pretrained(Path(self.path) / self.fine_tuned_ckpt_name)
535
523
 
536
524
  if not fine_tune_args["keep_transformers_logs"]:
537
525
  logger.debug(f"Removing transformers_logs directory {output_dir}")
538
526
  shutil.rmtree(output_dir)
539
527
 
540
- if time_limit is not None:
541
- self.time_limit = time_limit - (time.monotonic() - start_time) # inference time budget
542
-
543
528
  def _get_inference_data_loader(
544
529
  self,
545
530
  data: TimeSeriesDataFrame,
@@ -635,16 +620,3 @@ class ChronosModel(AbstractTimeSeriesModel):
635
620
  "can_use_train_data": do_fine_tune,
636
621
  "can_use_val_data": do_fine_tune,
637
622
  }
638
-
639
- def score_and_cache_oof(
640
- self,
641
- val_data: TimeSeriesDataFrame,
642
- store_val_score: bool = False,
643
- store_predict_time: bool = False,
644
- **predict_kwargs,
645
- ) -> None:
646
- # All computation happens during inference, so we provide the time_limit at prediction time
647
- # TODO: Once custom predict_kwargs is allowed, make sure that `time_limit` is not among the keys
648
- super().score_and_cache_oof(
649
- val_data, store_val_score, store_predict_time, time_limit=self.time_limit, **predict_kwargs
650
- )
@@ -135,6 +135,7 @@ class BaseChronosPipeline(metaclass=PipelineRegistry):
135
135
  """
136
136
  from transformers import AutoConfig
137
137
 
138
+ kwargs.setdefault("resume_download", None) # silence huggingface_hub warning
138
139
  if str(pretrained_model_name_or_path).startswith("s3://"):
139
140
  from .utils import cache_model_from_s3
140
141
 
@@ -546,6 +546,7 @@ class ChronosPipeline(BaseChronosPipeline):
546
546
  "onnx",
547
547
  "openvino",
548
548
  ], "optimization_strategy not recognized. Please provide one of `onnx` or `openvino`"
549
+ kwargs.pop("resume_download", None) # Optimized pipeline does not support 'resume_download' kwargs
549
550
  torch_dtype = kwargs.pop("torch_dtype", "auto")
550
551
  if torch_dtype != "auto":
551
552
  logger.warning(f"\t`torch_dtype` will be ignored for optimization_strategy {optimization_strategy}")
@@ -317,7 +317,8 @@ class TimeLimitCallback(TrainerCallback):
317
317
  def on_step_end(self, args, state, control, **kwargs):
318
318
  elapsed_time = time.monotonic() - self.start_time
319
319
  if elapsed_time > self.time_limit:
320
- raise TimeLimitExceeded
320
+ logger.info("\tStopping fine-tuning since time_limit is reached")
321
+ control.should_training_stop = True
321
322
 
322
323
 
323
324
  class LoggerCallback(TrainerCallback):
@@ -44,6 +44,7 @@ class AbstractLocalModel(AbstractTimeSeriesModel):
44
44
  allowed_local_model_args: List[str] = []
45
45
  default_n_jobs: Union[int, float] = AG_DEFAULT_N_JOBS
46
46
  default_max_ts_length: Optional[int] = 2500
47
+ default_max_time_limit_ratio = 1.0
47
48
  init_time_in_seconds: int = 0
48
49
 
49
50
  def __init__(
@@ -84,7 +85,6 @@ class AbstractLocalModel(AbstractTimeSeriesModel):
84
85
 
85
86
  self._local_model_args: Dict[str, Any] = None
86
87
  self._seasonal_period: Optional[int] = None
87
- self.time_limit: Optional[float] = None
88
88
  self._dummy_forecast: Optional[pd.DataFrame] = None
89
89
 
90
90
  @property
@@ -138,7 +138,6 @@ class AbstractLocalModel(AbstractTimeSeriesModel):
138
138
  self._seasonal_period = local_model_args["seasonal_period"]
139
139
 
140
140
  self._local_model_args = self._update_local_model_args(local_model_args=local_model_args)
141
- self.time_limit = time_limit
142
141
 
143
142
  self._dummy_forecast = self._get_dummy_forecast(train_data)
144
143
  return self
@@ -187,18 +186,6 @@ class AbstractLocalModel(AbstractTimeSeriesModel):
187
186
  predictions_df.index = get_forecast_horizon_index_ts_dataframe(data, self.prediction_length, freq=self.freq)
188
187
  return TimeSeriesDataFrame(predictions_df)
189
188
 
190
- def score_and_cache_oof(
191
- self,
192
- val_data: TimeSeriesDataFrame,
193
- store_val_score: bool = False,
194
- store_predict_time: bool = False,
195
- **predict_kwargs,
196
- ) -> None:
197
- # All computation happens during inference, so we provide the time_limit at prediction time
198
- super().score_and_cache_oof(
199
- val_data, store_val_score, store_predict_time, time_limit=self.time_limit, **predict_kwargs
200
- )
201
-
202
189
  def _predict_wrapper(self, time_series: pd.Series, end_time: Optional[float] = None) -> Tuple[pd.DataFrame, bool]:
203
190
  if end_time is not None and time.time() >= end_time:
204
191
  raise TimeLimitExceeded
@@ -33,6 +33,7 @@ class MultiWindowBacktestingModel(AbstractTimeSeriesModel):
33
33
  """
34
34
 
35
35
  # TODO: Remove the MultiWindowBacktestingModel class, move the logic to AbstractTimeSeriesTrainer
36
+ default_max_time_limit_ratio = 1.0
36
37
 
37
38
  def __init__(
38
39
  self,
@@ -124,8 +125,7 @@ class MultiWindowBacktestingModel(AbstractTimeSeriesModel):
124
125
  num_refits_remaining = math.ceil(
125
126
  (val_splitter.num_val_windows - window_index) / refit_every_n_windows
126
127
  )
127
- # Reserve 10% of the remaining time for prediction, use 90% of time for training
128
- time_left_for_window = 0.9 * time_left / num_refits_remaining
128
+ time_left_for_window = time_left / num_refits_remaining
129
129
 
130
130
  if refit_this_window:
131
131
  model = self.get_child_model(window_index)
@@ -138,7 +138,15 @@ class MultiWindowBacktestingModel(AbstractTimeSeriesModel):
138
138
  )
139
139
  model.fit_time = time.time() - model_fit_start_time
140
140
  most_recent_refit_window = f"W{window_index}"
141
- model.score_and_cache_oof(val_fold, store_val_score=True, store_predict_time=True)
141
+
142
+ if time_limit is None:
143
+ time_left_for_prediction = None
144
+ else:
145
+ time_left_for_prediction = time_limit - (time.time() - global_fit_start_time)
146
+
147
+ model.score_and_cache_oof(
148
+ val_fold, store_val_score=True, store_predict_time=True, time_limit=time_left_for_prediction
149
+ )
142
150
 
143
151
  oof_predictions_per_window.append(model.get_oof_predictions()[0])
144
152
 
@@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Type, Union
7
7
  from autogluon.common import space
8
8
  from autogluon.core import constants
9
9
  from autogluon.timeseries.metrics import TimeSeriesScorer
10
+ from autogluon.timeseries.utils.features import CovariateMetadata
10
11
 
11
12
  from . import (
12
13
  ADIDAModel,
@@ -133,6 +134,7 @@ def get_default_hps(key):
133
134
  "RecursiveTabular": {},
134
135
  "DirectTabular": {},
135
136
  "TemporalFusionTransformer": {},
137
+ "Chronos": {"model_path": "bolt-small"},
136
138
  },
137
139
  "light_inference": {
138
140
  "SeasonalNaive": {},
@@ -145,18 +147,33 @@ def get_default_hps(key):
145
147
  "SeasonalNaive": {},
146
148
  "Croston": {},
147
149
  "AutoETS": {},
148
- "AutoARIMA": {},
149
150
  "NPTS": {},
150
151
  "DynamicOptimizedTheta": {},
151
- # TODO: Define separate model for each tabular submodel?
152
- "RecursiveTabular": {
153
- "tabular_hyperparameters": {"NN_TORCH": {"proc.impute_strategy": "constant"}, "GBM": {}},
154
- },
152
+ "RecursiveTabular": {},
155
153
  "DirectTabular": {},
156
154
  "TemporalFusionTransformer": {},
157
155
  "PatchTST": {},
158
156
  "DeepAR": {},
159
- "Chronos": {"model_path": "base"},
157
+ "Chronos": [
158
+ {
159
+ "ag_args": {"name_suffix": "ZeroShot"},
160
+ "model_path": "bolt-base",
161
+ },
162
+ {
163
+ "ag_args": {"name_suffix": "FineTuned"},
164
+ "model_path": "bolt-small",
165
+ "fine_tune": True,
166
+ "target_scaler": "standard",
167
+ "covariate_regressor": {"model_name": "CAT", "model_hyperparameters": {"iterations": 1_000}},
168
+ },
169
+ ],
170
+ "TiDE": {
171
+ "encoder_hidden_dim": 256,
172
+ "decoder_hidden_dim": 256,
173
+ "temporal_hidden_dim": 64,
174
+ "num_batches_per_epoch": 100,
175
+ "lr": 1e-4,
176
+ },
160
177
  },
161
178
  }
162
179
  return default_model_hps[key]
@@ -170,6 +187,7 @@ def get_preset_models(
170
187
  eval_metric_seasonal_period: Optional[int],
171
188
  hyperparameters: Union[str, Dict, None],
172
189
  hyperparameter_tune: bool,
190
+ metadata: CovariateMetadata,
173
191
  all_assigned_names: List[str],
174
192
  excluded_model_types: List[str],
175
193
  multi_window: bool = False,
@@ -247,6 +265,7 @@ def get_preset_models(
247
265
  prediction_length=prediction_length,
248
266
  eval_metric=eval_metric,
249
267
  eval_metric_seasonal_period=eval_metric_seasonal_period,
268
+ metadata=metadata,
250
269
  hyperparameters=model_hps,
251
270
  **kwargs,
252
271
  )
@@ -217,6 +217,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
217
217
  prediction_length=self.prediction_length,
218
218
  quantile_levels=self.quantile_levels,
219
219
  cache_predictions=self.cache_predictions,
220
+ ensemble_model_type=kwargs.pop("ensemble_model_type", None),
220
221
  )
221
222
  )
222
223
  # Using `TimeSeriesLearner` as default argument breaks doc generation with Sphnix
@@ -29,7 +29,7 @@ from autogluon.timeseries.utils.features import (
29
29
  CovariateMetadata,
30
30
  PermutationFeatureImportanceTransform,
31
31
  )
32
- from autogluon.timeseries.utils.warning_filters import disable_tqdm
32
+ from autogluon.timeseries.utils.warning_filters import disable_tqdm, warning_filter
33
33
 
34
34
  logger = logging.getLogger("autogluon.timeseries.trainer")
35
35
 
@@ -264,6 +264,7 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
264
264
  val_splitter: Optional[AbstractWindowSplitter] = None,
265
265
  refit_every_n_windows: Optional[int] = 1,
266
266
  cache_predictions: bool = True,
267
+ ensemble_model_type: Optional[Type] = None,
267
268
  **kwargs,
268
269
  ):
269
270
  super().__init__(path=path, save_data=save_data, low_memory=True, **kwargs)
@@ -276,7 +277,13 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
276
277
  self.skip_model_selection = skip_model_selection
277
278
  # Ensemble cannot be fit if val_scores are not computed
278
279
  self.enable_ensemble = enable_ensemble and not skip_model_selection
279
- self.ensemble_model_type = TimeSeriesGreedyEnsemble
280
+ if ensemble_model_type is None:
281
+ ensemble_model_type = TimeSeriesGreedyEnsemble
282
+ else:
283
+ logger.warning(
284
+ "Using a custom `ensemble_model_type` is experimental functionality that may break in future versions."
285
+ )
286
+ self.ensemble_model_type = ensemble_model_type
280
287
 
281
288
  self.verbosity = verbosity
282
289
 
@@ -519,8 +526,12 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
519
526
  fit_end_time = time.time()
520
527
  model.fit_time = model.fit_time or (fit_end_time - fit_start_time)
521
528
 
529
+ if time_limit is not None:
530
+ time_limit = fit_end_time - fit_start_time
522
531
  if val_data is not None and not self.skip_model_selection:
523
- model.score_and_cache_oof(val_data, store_val_score=True, store_predict_time=True)
532
+ model.score_and_cache_oof(
533
+ val_data, store_val_score=True, store_predict_time=True, time_limit=time_limit
534
+ )
524
535
 
525
536
  self._log_scores_and_times(model.val_score, model.fit_time, model.predict_time)
526
537
 
@@ -736,7 +747,8 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
736
747
  quantile_levels=self.quantile_levels,
737
748
  metadata=self.metadata,
738
749
  )
739
- ensemble.fit_ensemble(model_preds, data_per_window=data_per_window, time_limit=time_limit)
750
+ with warning_filter():
751
+ ensemble.fit_ensemble(model_preds, data_per_window=data_per_window, time_limit=time_limit)
740
752
  ensemble.fit_time = time.time() - time_start
741
753
 
742
754
  predict_time = 0
@@ -1,3 +1,3 @@
1
1
  """This is the autogluon version file."""
2
- __version__ = '1.1.2b20241119'
2
+ __version__ = '1.1.2b20241120'
3
3
  __lite__ = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.1.2b20241119
3
+ Version: 1.1.2b20241120
4
4
  Summary: Fast and Accurate ML in 3 Lines of Code
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -53,9 +53,9 @@ Requires-Dist: fugue>=0.9.0
53
53
  Requires-Dist: tqdm<5,>=4.38
54
54
  Requires-Dist: orjson~=3.9
55
55
  Requires-Dist: tensorboard<3,>=2.9
56
- Requires-Dist: autogluon.core[raytune]==1.1.2b20241119
57
- Requires-Dist: autogluon.common==1.1.2b20241119
58
- Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.1.2b20241119
56
+ Requires-Dist: autogluon.core[raytune]==1.1.2b20241120
57
+ Requires-Dist: autogluon.common==1.1.2b20241120
58
+ Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.1.2b20241120
59
59
  Provides-Extra: all
60
60
  Requires-Dist: optimum[onnxruntime]<1.20,>=1.17; extra == "all"
61
61
  Provides-Extra: chronos-onnx
@@ -1,13 +1,13 @@
1
- autogluon.timeseries-1.1.2b20241119-py3.8-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
1
+ autogluon.timeseries-1.1.2b20241120-py3.8-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
2
2
  autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
3
3
  autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
4
- autogluon/timeseries/learner.py,sha256=3dUxI-U6TGfNtRQUzWTvBIo1GKeXYOhxIX_q7Fed9eA,14013
5
- autogluon/timeseries/predictor.py,sha256=R9m-TYmlA4WoJbdYEL_AnEM26EhRIclynOfSmpO7mBk,84926
4
+ autogluon/timeseries/learner.py,sha256=mFnBC750C5PqgkkYNYni9oYQ5a6K8pXSsDLRDXuA7DI,14182
5
+ autogluon/timeseries/predictor.py,sha256=EsJAkzlEkCFxYO55BTfjlaJPNwQwnS6yyj5nXjMr3XQ,85003
6
6
  autogluon/timeseries/regressor.py,sha256=tqQ2zWImxpuEyaAM0DeCjOZ-xcWUYZbCXsqd471xXxQ,8351
7
7
  autogluon/timeseries/splitter.py,sha256=eghGwAAN2_cxGk5aJBILgjGWtLzjxJcytMy49gg_q18,3061
8
- autogluon/timeseries/version.py,sha256=0242cKF96lnZcXGbtRmKZUi3yVY1-rAip2PtRhF89WQ,90
8
+ autogluon/timeseries/version.py,sha256=t6TdlyVVjsI-etjHY5tqRoGECcEKxWQwAjz-UboJ0sE,90
9
9
  autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
10
- autogluon/timeseries/configs/presets_configs.py,sha256=94-yL9teDHKs2irWjP3kpewI7FE1ChYCgEgz9XHJ6gc,1965
10
+ autogluon/timeseries/configs/presets_configs.py,sha256=k5RRP0DQMa2Xq2oWAbRKouWUH8xUuBx2tIWk-we6_I8,2543
11
11
  autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
12
12
  autogluon/timeseries/dataset/ts_dataframe.py,sha256=9bJQeg3HkPeVnyxzwqAJiTJGYXths7vxUV_3-OsJ6pk,48640
13
13
  autogluon/timeseries/metrics/__init__.py,sha256=LLGmYaexsx7CregV-QaHc5exjZbsJfBSVOtxHRGC0ho,2139
@@ -16,21 +16,21 @@ autogluon/timeseries/metrics/point.py,sha256=b19Ed4dS_ROdkrOZIik_Q3-8deCN9IQSZXt
16
16
  autogluon/timeseries/metrics/quantile.py,sha256=eemdLbo3y2wstnVkuA-f55YXywctUmSW1EhIW4BsoH4,3965
17
17
  autogluon/timeseries/metrics/utils.py,sha256=HuDe1BNe8yJU4f_DKM913nNrUueoRaw6zhxm1-S20s0,910
18
18
  autogluon/timeseries/models/__init__.py,sha256=MYD9JJ-wUDE5B6jW6E6LU2eXQ6vflfQBvqQJkdzJa3A,1189
19
- autogluon/timeseries/models/presets.py,sha256=ujNt_hft_5eNkh-Wj_Na9GBdBmI-JdnBnOEHq8X0qXc,11778
19
+ autogluon/timeseries/models/presets.py,sha256=toZePXy1UYKIoQFQnXUIdhZuq7DBNMyfsYU_FwV94Nk,12473
20
20
  autogluon/timeseries/models/abstract/__init__.py,sha256=wvDsQAZIV0N3AwBeMaGItoQ82trEfnT-nol2AAOIxBg,102
21
- autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=B1R0PBymUAwhIGkPIt29X-J9o9Ipdu-bQR0gK-nmcRU,30320
21
+ autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=82lg2odAxzyhM3dkV3Msqv8AeYLILnnk-UvNnYobzFA,30628
22
22
  autogluon/timeseries/models/abstract/model_trial.py,sha256=ENPg_7nsdxIvaNM0o0UShZ3x8jFlRmwRc5m0fGPC0TM,3720
23
23
  autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=r9i6jWcyeLHYClkcMSKRVsfrkBUMxpDrTATNTBc_qgQ,136
24
24
  autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=vfWXLdxYlbzjKJa1qrFN-qzxgG2tiWxOSBOnmnNVxtA,33295
25
25
  autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=XVoy8KpvoeX38lHHAXq4Be9LCxKjxZ36SOFeSAICRFM,2524
26
26
  autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
27
27
  autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
28
- autogluon/timeseries/models/chronos/model.py,sha256=pkT-V6yYCxz6TTgn1mQ5QHNTdpyn_wyj7jM80vnrDFQ,30270
28
+ autogluon/timeseries/models/chronos/model.py,sha256=krsF5Fu-Q_FYTW-WbeXX4--s1ZH5kY-LlxvZg2jvfT0,29085
29
29
  autogluon/timeseries/models/chronos/pipeline/__init__.py,sha256=N-YZH9BGBoi99r5cznJe1zEEjwjIg7cOYIHZkKuJq44,247
30
- autogluon/timeseries/models/chronos/pipeline/base.py,sha256=aAXCKy7Jmip4BI2UdPMoPe2gdDMbJHKxEolcTx_5SYQ,5463
31
- autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=one9UwXja042JeXYSdenpCoIQ2DWZKpKiivnom5Epbw,22046
30
+ autogluon/timeseries/models/chronos/pipeline/base.py,sha256=HlWQTS5q7UMzwbA5Pmg_N01AxuGfTf2tP5xq2jgavqI,5549
31
+ autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=doAaWbrfNilkP9ORtjDnL-1S5ge4sOKhzGN-mgsY2bM,22158
32
32
  autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=2MJuik-YFgONZ3X2DciAph5So6ABys5ppQhBC81gLyk,20083
33
- autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=_P_9m9Wl4FC2QyoKLluT4l7FLmZU2xw6G4xNcUpZE4k,13043
33
+ autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=hw4QbRy81V9Jkndda9p_KgO9CrtYKbNq5kp0r8m0Rek,13133
34
34
  autogluon/timeseries/models/ensemble/__init__.py,sha256=kFr11Gmt7lQJu9Rr8HuIPphQN5l1TsoorfbJm_O3a_s,128
35
35
  autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py,sha256=tifETwmiEGt-YtQ9eNK7ojJ3fBvtFMUJvisbfkIJ7gw,3393
36
36
  autogluon/timeseries/models/ensemble/greedy_ensemble.py,sha256=5HvZuW5osgsZg3V69k82nKEOy_YgeH1JTfQa7F3cU7s,7220
@@ -39,14 +39,14 @@ autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=M4vGs5oNZqr_ebE4U
39
39
  autogluon/timeseries/models/gluonts/torch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  autogluon/timeseries/models/gluonts/torch/models.py,sha256=Pu7f43jr1C5S3k_bVqRB8ENuBHNEWT4ssUTdZoA1J58,25556
41
41
  autogluon/timeseries/models/local/__init__.py,sha256=e2UImoJhmj70E148IIObv90C_bHxgyLNk6YsS4p7pfs,701
42
- autogluon/timeseries/models/local/abstract_local_model.py,sha256=WBBJp2h2UMUVuguTrUmJOI21neil-ZnGGZDy-zJPD2M,12592
42
+ autogluon/timeseries/models/local/abstract_local_model.py,sha256=5WI-TApbWi01bsQghnQaPH0SXp5SwV5VAPBwmbCTbeU,12090
43
43
  autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F_VAp6-jb_5SUE,7249
44
44
  autogluon/timeseries/models/local/npts.py,sha256=Bp74doKnfpGE8ywP4FWOCI_RwRMsmgocYDfGtq764DA,4143
45
45
  autogluon/timeseries/models/local/statsforecast.py,sha256=cFJ_A7LR2jTmFNGgMxt3xvEivQVYuV6bDCMii8-TKH0,32424
46
46
  autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
47
- autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=mTxqPBsN0Ri2c56MYZU6pMF9dDFyEl5eHmhqHPe3ouw,11596
47
+ autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=2o_wKNCNP570c0UuNufPhEDYOWQ2mFYwmmHUZzPneRI,11792
48
48
  autogluon/timeseries/trainer/__init__.py,sha256=lxiOT-Gc6BEnr_yWQqra85kEngeM_wtH2SCaRbmC_qE,170
49
- autogluon/timeseries/trainer/abstract_trainer.py,sha256=zCzzVO8yssqSyteMoUkDT5zWi44Oc91pRWiRdQR1We8,60521
49
+ autogluon/timeseries/trainer/abstract_trainer.py,sha256=3DEr4hATu2zNzkuzwO5UeqS25Ak4k5w4OFAtCmYqmAc,61053
50
50
  autogluon/timeseries/trainer/auto_trainer.py,sha256=psJFZBwWWPlLjNwAgvO4OUJXsRW1sTN2YS9a4pdoeoE,3344
51
51
  autogluon/timeseries/transforms/__init__.py,sha256=Stym_998LZQgKPuFN4_w1AcJFh4_AeaQLXgXLzv53kY,299
52
52
  autogluon/timeseries/transforms/covariate_scaler.py,sha256=iscshgfNTCn379Q73BJXyDUFFm1WRclzKdG2MIPTOEc,6587
@@ -60,11 +60,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
60
60
  autogluon/timeseries/utils/datetime/lags.py,sha256=GoLtvcZ8oKb3QkoBJ9E59LSPLOP7Qjxrr2UmMSZgjyw,5909
61
61
  autogluon/timeseries/utils/datetime/seasonality.py,sha256=h_4w00iEytAz_N_EpCENQ8RCXy7KQITczrYjBgVqWkQ,764
62
62
  autogluon/timeseries/utils/datetime/time_features.py,sha256=PAXbYbQ0z_5GFbkxSNi41zLY_2-U3x0Ynm1m_WhdtGc,2572
63
- autogluon.timeseries-1.1.2b20241119.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
64
- autogluon.timeseries-1.1.2b20241119.dist-info/METADATA,sha256=I9IrNzlXQskc4xcwgmngBRT6iIDZBdmd0rvXxOWaXcQ,12388
65
- autogluon.timeseries-1.1.2b20241119.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
66
- autogluon.timeseries-1.1.2b20241119.dist-info/WHEEL,sha256=bFJAMchF8aTQGUgMZzHJyDDMPTO3ToJ7x23SLJa1SVo,92
67
- autogluon.timeseries-1.1.2b20241119.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
68
- autogluon.timeseries-1.1.2b20241119.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
69
- autogluon.timeseries-1.1.2b20241119.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
70
- autogluon.timeseries-1.1.2b20241119.dist-info/RECORD,,
63
+ autogluon.timeseries-1.1.2b20241120.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
64
+ autogluon.timeseries-1.1.2b20241120.dist-info/METADATA,sha256=gf3xf53SAhlU4E2tfn_mQ9Fj5aclpX1ZXczq6_6ABRk,12388
65
+ autogluon.timeseries-1.1.2b20241120.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
66
+ autogluon.timeseries-1.1.2b20241120.dist-info/WHEEL,sha256=bFJAMchF8aTQGUgMZzHJyDDMPTO3ToJ7x23SLJa1SVo,92
67
+ autogluon.timeseries-1.1.2b20241120.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
68
+ autogluon.timeseries-1.1.2b20241120.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
69
+ autogluon.timeseries-1.1.2b20241120.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
70
+ autogluon.timeseries-1.1.2b20241120.dist-info/RECORD,,