autogluon.timeseries 1.0.1b20240325__tar.gz → 1.0.1b20240326__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of autogluon.timeseries might be problematic. Click here for more details.

Files changed (62) hide show
  1. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/PKG-INFO +1 -1
  2. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/configs/presets_configs.py +6 -0
  3. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/learner.py +1 -0
  4. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/predictor.py +11 -3
  5. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/trainer/abstract_trainer.py +20 -3
  6. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/trainer/auto_trainer.py +2 -1
  7. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/version.py +1 -1
  8. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/PKG-INFO +1 -1
  9. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/requires.txt +4 -4
  10. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/setup.cfg +0 -0
  11. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/setup.py +0 -0
  12. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/__init__.py +0 -0
  13. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/configs/__init__.py +0 -0
  14. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/dataset/__init__.py +0 -0
  15. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/dataset/ts_dataframe.py +0 -0
  16. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/evaluator.py +0 -0
  17. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/metrics/__init__.py +0 -0
  18. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/metrics/abstract.py +0 -0
  19. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/metrics/point.py +0 -0
  20. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/metrics/quantile.py +0 -0
  21. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/metrics/utils.py +0 -0
  22. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/__init__.py +0 -0
  23. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/abstract/__init__.py +0 -0
  24. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/abstract/abstract_timeseries_model.py +0 -0
  25. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/abstract/model_trial.py +0 -0
  26. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/autogluon_tabular/__init__.py +0 -0
  27. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/autogluon_tabular/mlforecast.py +0 -0
  28. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/autogluon_tabular/utils.py +0 -0
  29. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/chronos/__init__.py +0 -0
  30. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/chronos/chronos.py +0 -0
  31. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/chronos/model.py +0 -0
  32. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/ensemble/__init__.py +0 -0
  33. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py +0 -0
  34. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/ensemble/greedy_ensemble.py +0 -0
  35. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/gluonts/__init__.py +0 -0
  36. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/gluonts/abstract_gluonts.py +0 -0
  37. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/gluonts/torch/__init__.py +0 -0
  38. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/gluonts/torch/models.py +0 -0
  39. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/local/__init__.py +0 -0
  40. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/local/abstract_local_model.py +0 -0
  41. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/local/naive.py +0 -0
  42. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/local/npts.py +0 -0
  43. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/local/statsforecast.py +0 -0
  44. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/multi_window/__init__.py +0 -0
  45. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/multi_window/multi_window_model.py +0 -0
  46. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/models/presets.py +0 -0
  47. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/splitter.py +0 -0
  48. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/trainer/__init__.py +0 -0
  49. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/__init__.py +0 -0
  50. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/datetime/__init__.py +0 -0
  51. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/datetime/base.py +0 -0
  52. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/datetime/lags.py +0 -0
  53. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/datetime/seasonality.py +0 -0
  54. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/datetime/time_features.py +0 -0
  55. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/features.py +0 -0
  56. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/forecast.py +0 -0
  57. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon/timeseries/utils/warning_filters.py +0 -0
  58. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/SOURCES.txt +0 -0
  59. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/dependency_links.txt +0 -0
  60. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/namespace_packages.txt +0 -0
  61. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/top_level.txt +0 -0
  62. {autogluon.timeseries-1.0.1b20240325 → autogluon.timeseries-1.0.1b20240326}/src/autogluon.timeseries.egg-info/zip-safe +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.0.1b20240325
3
+ Version: 1.0.1b20240326
4
4
  Summary: AutoML for Image, Text, and Tabular Data
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -1,4 +1,5 @@
1
1
  """Preset configurations for autogluon.timeseries Predictors"""
2
+
2
3
  from autogluon.timeseries.models.presets import get_default_hps
3
4
 
4
5
  # TODO: change default HPO settings when other HPO strategies (e.g., Ray tune) are available
@@ -11,18 +12,23 @@ TIMESERIES_PRESETS_CONFIGS = dict(
11
12
  fast_training={"hyperparameters": "very_light"},
12
13
  chronos_tiny={
13
14
  "hyperparameters": {"Chronos": {"model_path": "tiny"}},
15
+ "skip_model_selection": True,
14
16
  },
15
17
  chronos_mini={
16
18
  "hyperparameters": {"Chronos": {"model_path": "mini"}},
19
+ "skip_model_selection": True,
17
20
  },
18
21
  chronos_small={
19
22
  "hyperparameters": {"Chronos": {"model_path": "small"}},
23
+ "skip_model_selection": True,
20
24
  },
21
25
  chronos_base={
22
26
  "hyperparameters": {"Chronos": {"model_path": "base"}},
27
+ "skip_model_selection": True,
23
28
  },
24
29
  chronos_large={
25
30
  "hyperparameters": {"Chronos": {"model_path": "large", "batch_size": 8}},
31
+ "skip_model_selection": True,
26
32
  },
27
33
  chronos_ensemble={
28
34
  "hyperparameters": {
@@ -97,6 +97,7 @@ class TimeSeriesLearner(AbstractLearner):
97
97
  target=self.target,
98
98
  quantile_levels=self.quantile_levels,
99
99
  verbosity=kwargs.get("verbosity", 2),
100
+ skip_model_selection=kwargs.get("skip_model_selection", False),
100
101
  enable_ensemble=kwargs.get("enable_ensemble", True),
101
102
  metadata=self.feature_generator.covariate_metadata,
102
103
  val_splitter=val_splitter,
@@ -425,6 +425,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
425
425
  refit_every_n_windows: int = 1,
426
426
  refit_full: bool = False,
427
427
  enable_ensemble: bool = True,
428
+ skip_model_selection: bool = False,
428
429
  random_seed: Optional[int] = 123,
429
430
  verbosity: Optional[int] = None,
430
431
  ) -> "TimeSeriesPredictor":
@@ -642,6 +643,10 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
642
643
  enable_ensemble : bool, default = True
643
644
  If True, the ``TimeSeriesPredictor`` will fit a simple weighted ensemble on top of the models specified via
644
645
  ``hyperparameters``.
646
+ skip_model_selection : bool, default = False
647
+ If True, predictor will not compute the validation score. For example, this argument is useful if we want
648
+ to use the predictor as a wrapper for a single pre-trained model. If set to True, then the ``hyperparameters``
649
+ dict must contain exactly one model without hyperparameter search spaces or an exception will be raised.
645
650
  random_seed : int or None, default = 123
646
651
  If provided, fixes the seed of the random number generator for all models. This guarantees reproducible
647
652
  results for most models (except those trained on GPU because of the non-determinism of GPU operations).
@@ -681,6 +686,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
681
686
  val_step_size=val_step_size,
682
687
  refit_every_n_windows=refit_every_n_windows,
683
688
  refit_full=refit_full,
689
+ skip_model_selection=skip_model_selection,
684
690
  enable_ensemble=enable_ensemble,
685
691
  random_seed=random_seed,
686
692
  verbosity=verbosity,
@@ -715,9 +721,10 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
715
721
  if num_val_windows == 0 and tuning_data is None:
716
722
  raise ValueError("Please set num_val_windows >= 1 or provide custom tuning_data")
717
723
 
718
- train_data = self._filter_short_series(
719
- train_data, num_val_windows=num_val_windows, val_step_size=val_step_size
720
- )
724
+ if not skip_model_selection:
725
+ train_data = self._filter_short_series(
726
+ train_data, num_val_windows=num_val_windows, val_step_size=val_step_size
727
+ )
721
728
 
722
729
  val_splitter = ExpandingWindowSplitter(
723
730
  prediction_length=self.prediction_length, num_val_windows=num_val_windows, val_step_size=val_step_size
@@ -734,6 +741,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
734
741
  verbosity=verbosity,
735
742
  val_splitter=val_splitter,
736
743
  refit_every_n_windows=refit_every_n_windows,
744
+ skip_model_selection=skip_model_selection,
737
745
  enable_ensemble=enable_ensemble,
738
746
  random_seed=random_seed,
739
747
  )
@@ -76,6 +76,8 @@ class SimpleAbstractTrainer:
76
76
  models = self.get_model_names()
77
77
  if not models:
78
78
  raise ValueError("Trainer has no fit models that can predict.")
79
+ if len(models) == 1:
80
+ return models[0]
79
81
  model_performances = self.get_models_attribute_dict(attribute="val_score")
80
82
  performances_list = [(m, model_performances[m]) for m in models if model_performances[m] is not None]
81
83
 
@@ -256,6 +258,7 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
256
258
  eval_metric: Union[str, TimeSeriesScorer, None] = None,
257
259
  eval_metric_seasonal_period: Optional[int] = None,
258
260
  save_data: bool = True,
261
+ skip_model_selection: bool = False,
259
262
  enable_ensemble: bool = True,
260
263
  verbosity: int = 2,
261
264
  val_splitter: Optional[AbstractWindowSplitter] = None,
@@ -270,7 +273,9 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
270
273
  self.target = kwargs.get("target", "target")
271
274
  self.metadata = kwargs.get("metadata", CovariateMetadata())
272
275
  self.is_data_saved = False
273
- self.enable_ensemble = enable_ensemble
276
+ self.skip_model_selection = skip_model_selection
277
+ # Ensemble cannot be fit if val_scores are not computed
278
+ self.enable_ensemble = enable_ensemble and not skip_model_selection
274
279
  self.ensemble_model_type = TimeSeriesGreedyEnsemble
275
280
 
276
281
  self.verbosity = verbosity
@@ -487,7 +492,7 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
487
492
  fit_end_time = time.time()
488
493
  model.fit_time = model.fit_time or (fit_end_time - fit_start_time)
489
494
 
490
- if val_data is not None:
495
+ if val_data is not None and not self.skip_model_selection:
491
496
  model.score_and_cache_oof(val_data, store_val_score=True, store_predict_time=True)
492
497
 
493
498
  self._log_scores_and_times(model.val_score, model.fit_time, model.predict_time)
@@ -557,6 +562,17 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
557
562
 
558
563
  logger.info(f"Models that will be trained: {list(m.name for m in models)}")
559
564
 
565
+ if self.skip_model_selection:
566
+ if len(models) > 1:
567
+ raise ValueError(
568
+ "When `skip_model_selection=True`, only a single model must be provided via `hyperparameters` "
569
+ f"but {len(models)} models were given"
570
+ )
571
+ if contains_searchspace(models[0].get_user_params()):
572
+ raise ValueError(
573
+ "When `skip_model_selection=True`, model configuration should contain no search spaces."
574
+ )
575
+
560
576
  num_base_models = len(models)
561
577
  model_names_trained = []
562
578
  for i, model in enumerate(models):
@@ -646,7 +662,8 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
646
662
  try:
647
663
  best_model = self.get_model_best()
648
664
  logger.info(f"Best model: {best_model}")
649
- logger.info(f"Best model score: {self.get_model_attribute(best_model, 'val_score'):.4f}")
665
+ if not self.skip_model_selection:
666
+ logger.info(f"Best model score: {self.get_model_attribute(best_model, 'val_score'):.4f}")
650
667
  except ValueError as e:
651
668
  logger.error(str(e))
652
669
 
@@ -27,7 +27,8 @@ class AutoTimeSeriesTrainer(AbstractTimeSeriesTrainer):
27
27
  target=self.target,
28
28
  metadata=self.metadata,
29
29
  excluded_model_types=kwargs.get("excluded_model_types"),
30
- multi_window=multi_window,
30
+ # if skip_model_selection = True, we skip backtesting
31
+ multi_window=multi_window and not self.skip_model_selection,
31
32
  )
32
33
 
33
34
  def fit(
@@ -1,3 +1,3 @@
1
1
  """This is the autogluon version file."""
2
- __version__ = '1.0.1b20240325'
2
+ __version__ = '1.0.1b20240326'
3
3
  __lite__ = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.0.1b20240325
3
+ Version: 1.0.1b20240326
4
4
  Summary: AutoML for Image, Text, and Tabular Data
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -16,13 +16,13 @@ utilsforecast<0.0.11,>=0.0.10
16
16
  tqdm<5,>=4.38
17
17
  orjson~=3.9
18
18
  tensorboard<3,>=2.9
19
- autogluon.core[raytune]==1.0.1b20240325
20
- autogluon.common==1.0.1b20240325
21
- autogluon.tabular[catboost,lightgbm,xgboost]==1.0.1b20240325
19
+ autogluon.core[raytune]==1.0.1b20240326
20
+ autogluon.common==1.0.1b20240326
21
+ autogluon.tabular[catboost,lightgbm,xgboost]==1.0.1b20240326
22
22
 
23
23
  [all]
24
- optimum[nncf,openvino]<1.18,>=1.17
25
24
  optimum[onnxruntime]<1.18,>=1.17
25
+ optimum[nncf,openvino]<1.18,>=1.17
26
26
 
27
27
  [chronos-onnx]
28
28
  optimum[onnxruntime]<1.18,>=1.17