autogluon.timeseries 1.4.1b20251117__tar.gz → 1.4.1b20251206__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.timeseries might be problematic. Click here for more details.
- {autogluon_timeseries-1.4.1b20251117/src/autogluon.timeseries.egg-info → autogluon_timeseries-1.4.1b20251206}/PKG-INFO +23 -21
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/README.md +7 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/setup.py +7 -9
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/configs/hyperparameter_presets.py +2 -2
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/dataset/ts_dataframe.py +32 -34
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/learner.py +65 -33
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/metrics/__init__.py +4 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/metrics/abstract.py +8 -8
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/metrics/point.py +9 -9
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/metrics/quantile.py +4 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/__init__.py +2 -1
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/abstract/abstract_timeseries_model.py +52 -39
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/abstract/tunable.py +8 -8
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/autogluon_tabular/mlforecast.py +30 -26
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/autogluon_tabular/per_step.py +12 -10
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/autogluon_tabular/transforms.py +2 -2
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/chronos/__init__.py +4 -0
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/chronos/chronos2.py +353 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/chronos/model.py +23 -23
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/chronos/utils.py +4 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/__init__.py +6 -1
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/abstract.py +4 -5
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/array_based/__init__.py +3 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/array_based/abstract.py +14 -25
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/array_based/models.py +73 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/array_based/regressor/__init__.py +2 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/array_based/regressor/abstract.py +2 -6
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/array_based/regressor/linear_stacker.py +167 -0
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/array_based/regressor/per_quantile_tabular.py +94 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/array_based/regressor/tabular.py +21 -54
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/ensemble/weighted/greedy.py → autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/ensemble_selection.py +41 -61
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/per_item_greedy.py +162 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/weighted/abstract.py +1 -2
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/weighted/basic.py +6 -6
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/ensemble/weighted/greedy.py +57 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/gluonts/abstract.py +23 -23
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/gluonts/dataset.py +9 -9
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/local/__init__.py +15 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/local/abstract_local_model.py +13 -16
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/local/naive.py +2 -2
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/local/npts.py +1 -1
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/local/statsforecast.py +12 -12
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/multi_window/multi_window_model.py +18 -17
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/registry.py +3 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/attention.py +3 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/backbone.py +6 -6
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/rope.py +4 -9
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/models/toto/_internal/backbone/rotary_embedding_torch.py +342 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/scaler.py +2 -3
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/transformer.py +10 -10
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/dataset.py +2 -2
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/forecaster.py +8 -8
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/dataloader.py +4 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/hf_pretrained_model.py +2 -3
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/model.py +14 -14
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/predictor.py +286 -74
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/regressor.py +18 -23
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/splitter.py +2 -2
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/trainer/ensemble_composer.py +439 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/trainer/model_set_builder.py +9 -9
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/trainer/prediction_cache.py +16 -16
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/trainer/trainer.py +226 -123
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/trainer/utils.py +3 -4
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/transforms/covariate_scaler.py +7 -7
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/transforms/target_scaler.py +8 -8
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/local/__init__.py → autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/utils/constants.py +4 -16
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/datetime/lags.py +1 -3
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/datetime/seasonality.py +1 -3
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/features.py +9 -9
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/forecast.py +1 -2
- autogluon_timeseries-1.4.1b20251206/src/autogluon/timeseries/utils/timer.py +173 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/version.py +1 -1
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206/src/autogluon.timeseries.egg-info}/PKG-INFO +23 -21
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/SOURCES.txt +6 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/requires.txt +14 -15
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/chronos/__init__.py +0 -3
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/ensemble/array_based/__init__.py +0 -3
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/ensemble/array_based/models.py +0 -50
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/models/ensemble/array_based/regressor/per_quantile_tabular.py +0 -139
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/trainer/ensemble_composer.py +0 -250
- autogluon_timeseries-1.4.1b20251117/src/autogluon/timeseries/utils/timer.py +0 -68
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/LICENSE +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/NOTICE +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/setup.cfg +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/configs/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/configs/predictor_presets.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/dataset/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/metrics/utils.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/abstract/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/abstract/model_trial.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/autogluon_tabular/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/autogluon_tabular/utils.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/ensemble/weighted/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/gluonts/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/gluonts/models.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/multi_window/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/distribution.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/models/toto/_internal/backbone/kvcache.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/trainer/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/transforms/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/datetime/__init__.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/datetime/base.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/datetime/time_features.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon/timeseries/utils/warning_filters.py +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/dependency_links.txt +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/namespace_packages.txt +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/top_level.txt +0 -0
- {autogluon_timeseries-1.4.1b20251117 → autogluon_timeseries-1.4.1b20251206}/src/autogluon.timeseries.egg-info/zip-safe +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: autogluon.timeseries
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1b20251206
|
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
|
6
6
|
Author: AutoGluon Community
|
|
@@ -23,15 +23,15 @@ Classifier: Operating System :: Microsoft :: Windows
|
|
|
23
23
|
Classifier: Operating System :: POSIX
|
|
24
24
|
Classifier: Operating System :: Unix
|
|
25
25
|
Classifier: Programming Language :: Python :: 3
|
|
26
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
27
26
|
Classifier: Programming Language :: Python :: 3.10
|
|
28
27
|
Classifier: Programming Language :: Python :: 3.11
|
|
29
28
|
Classifier: Programming Language :: Python :: 3.12
|
|
29
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
30
30
|
Classifier: Topic :: Software Development
|
|
31
31
|
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
32
32
|
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
|
33
33
|
Classifier: Topic :: Scientific/Engineering :: Image Recognition
|
|
34
|
-
Requires-Python: >=3.
|
|
34
|
+
Requires-Python: >=3.10, <3.14
|
|
35
35
|
Description-Content-Type: text/markdown
|
|
36
36
|
License-File: LICENSE
|
|
37
37
|
License-File: NOTICE
|
|
@@ -39,10 +39,9 @@ Requires-Dist: joblib<1.7,>=1.2
|
|
|
39
39
|
Requires-Dist: numpy<2.4.0,>=1.25.0
|
|
40
40
|
Requires-Dist: scipy<1.17,>=1.5.4
|
|
41
41
|
Requires-Dist: pandas<2.4.0,>=2.0.0
|
|
42
|
-
Requires-Dist: torch<2.
|
|
43
|
-
Requires-Dist: lightning<2.
|
|
44
|
-
Requires-Dist:
|
|
45
|
-
Requires-Dist: transformers[sentencepiece]<4.50,>=4.38.0
|
|
42
|
+
Requires-Dist: torch<2.10,>=2.6
|
|
43
|
+
Requires-Dist: lightning<2.6,>=2.5.1
|
|
44
|
+
Requires-Dist: transformers[sentencepiece]<4.58,>=4.51.0
|
|
46
45
|
Requires-Dist: accelerate<2.0,>=0.34.0
|
|
47
46
|
Requires-Dist: gluonts<0.17,>=0.15.0
|
|
48
47
|
Requires-Dist: networkx<4,>=3.0
|
|
@@ -53,23 +52,23 @@ Requires-Dist: coreforecast<0.0.17,>=0.0.12
|
|
|
53
52
|
Requires-Dist: fugue>=0.9.0
|
|
54
53
|
Requires-Dist: tqdm<5,>=4.38
|
|
55
54
|
Requires-Dist: orjson~=3.9
|
|
56
|
-
Requires-Dist:
|
|
55
|
+
Requires-Dist: einops<1,>=0.7
|
|
56
|
+
Requires-Dist: chronos-forecasting<3,>=2.2.0rc4
|
|
57
|
+
Requires-Dist: peft<0.18,>=0.13.0
|
|
57
58
|
Requires-Dist: tensorboard<3,>=2.9
|
|
58
|
-
Requires-Dist: autogluon.core
|
|
59
|
-
Requires-Dist: autogluon.common==1.4.
|
|
60
|
-
Requires-Dist: autogluon.features==1.4.
|
|
61
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.
|
|
59
|
+
Requires-Dist: autogluon.core==1.4.1b20251206
|
|
60
|
+
Requires-Dist: autogluon.common==1.4.1b20251206
|
|
61
|
+
Requires-Dist: autogluon.features==1.4.1b20251206
|
|
62
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.1b20251206
|
|
62
63
|
Provides-Extra: tests
|
|
63
64
|
Requires-Dist: pytest; extra == "tests"
|
|
64
65
|
Requires-Dist: ruff>=0.0.285; extra == "tests"
|
|
65
66
|
Requires-Dist: flaky<4,>=3.7; extra == "tests"
|
|
66
67
|
Requires-Dist: pytest-timeout<3,>=2.1; extra == "tests"
|
|
67
|
-
Provides-Extra:
|
|
68
|
-
Requires-Dist:
|
|
69
|
-
Requires-Dist: rotary-embedding-torch<1,>=0.8; extra == "toto"
|
|
68
|
+
Provides-Extra: ray
|
|
69
|
+
Requires-Dist: autogluon.core[raytune]==1.4.1b20251206; extra == "ray"
|
|
70
70
|
Provides-Extra: all
|
|
71
|
-
Requires-Dist:
|
|
72
|
-
Requires-Dist: rotary-embedding-torch<1,>=0.8; extra == "all"
|
|
71
|
+
Requires-Dist: autogluon.core[raytune]==1.4.1b20251206; extra == "all"
|
|
73
72
|
Dynamic: author
|
|
74
73
|
Dynamic: classifier
|
|
75
74
|
Dynamic: description
|
|
@@ -92,7 +91,7 @@ Dynamic: summary
|
|
|
92
91
|
|
|
93
92
|
[](https://github.com/autogluon/autogluon/releases)
|
|
94
93
|
[](https://anaconda.org/conda-forge/autogluon)
|
|
95
|
-
[](https://pypi.org/project/autogluon/)
|
|
96
95
|
[](https://pepy.tech/project/autogluon)
|
|
97
96
|
[](./LICENSE)
|
|
98
97
|
[](https://discord.gg/wjUmjqAc2N)
|
|
@@ -109,7 +108,7 @@ AutoGluon, developed by AWS AI, automates machine learning tasks enabling you to
|
|
|
109
108
|
|
|
110
109
|
## 💾 Installation
|
|
111
110
|
|
|
112
|
-
AutoGluon is supported on Python 3.
|
|
111
|
+
AutoGluon is supported on Python 3.10 - 3.13 and is available on Linux, MacOS, and Windows.
|
|
113
112
|
|
|
114
113
|
You can install AutoGluon with:
|
|
115
114
|
|
|
@@ -132,8 +131,8 @@ predictions = predictor.predict("test.csv")
|
|
|
132
131
|
| AutoGluon Task | Quickstart | API |
|
|
133
132
|
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------:|
|
|
134
133
|
| TabularPredictor | [](https://auto.gluon.ai/stable/tutorials/tabular/tabular-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.tabular.TabularPredictor.html) |
|
|
135
|
-
| MultiModalPredictor | [](https://auto.gluon.ai/stable/tutorials/multimodal/multimodal_prediction/multimodal-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.multimodal.MultiModalPredictor.html) |
|
|
136
134
|
| TimeSeriesPredictor | [](https://auto.gluon.ai/stable/tutorials/timeseries/forecasting-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.timeseries.TimeSeriesPredictor.html) |
|
|
135
|
+
| MultiModalPredictor | [](https://auto.gluon.ai/stable/tutorials/multimodal/multimodal_prediction/multimodal-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.multimodal.MultiModalPredictor.html) |
|
|
137
136
|
|
|
138
137
|
## :mag: Resources
|
|
139
138
|
|
|
@@ -156,7 +155,10 @@ Below is a curated list of recent tutorials and talks on AutoGluon. A comprehens
|
|
|
156
155
|
- [Benchmarking Multimodal AutoML for Tabular Data with Text Fields](https://datasets-benchmarks-proceedings.neurips.cc/paper/2021/file/9bf31c7ff062936a96d3c8bd1f8f2ff3-Paper-round2.pdf) (*NeurIPS*, 2021) ([BibTeX](CITING.md#autogluonmultimodal))
|
|
157
156
|
- [XTab: Cross-table Pretraining for Tabular Transformers](https://proceedings.mlr.press/v202/zhu23k/zhu23k.pdf) (*ICML*, 2023)
|
|
158
157
|
- [AutoGluon-TimeSeries: AutoML for Probabilistic Time Series Forecasting](https://arxiv.org/abs/2308.05566) (*AutoML Conf*, 2023) ([BibTeX](CITING.md#autogluontimeseries))
|
|
159
|
-
- [TabRepo: A Large Scale Repository of Tabular Model Evaluations and its AutoML Applications](https://arxiv.org/pdf/2311.02971.pdf) (*
|
|
158
|
+
- [TabRepo: A Large Scale Repository of Tabular Model Evaluations and its AutoML Applications](https://arxiv.org/pdf/2311.02971.pdf) (*AutoML Conf*, 2024)
|
|
159
|
+
- [AutoGluon-Multimodal (AutoMM): Supercharging Multimodal AutoML with Foundation Models](https://arxiv.org/pdf/2404.16233) (*AutoML Conf*, 2024) ([BibTeX](CITING.md#autogluonmultimodal))
|
|
160
|
+
- [Multi-layer Stack Ensembles for Time Series Forecasting](https://arxiv.org/abs/2511.15350) (*AutoML Conf*, 2025) ([BibTeX](CITING.md#autogluontimeseries))
|
|
161
|
+
- [Chronos-2: From Univariate to Universal Forecasting](https://arxiv.org/abs/2510.15821) (*Arxiv*, 2025) ([BibTeX](CITING.md#autogluontimeseries))
|
|
160
162
|
|
|
161
163
|
### Articles
|
|
162
164
|
- [AutoGluon-TimeSeries: Every Time Series Forecasting Model In One Library](https://towardsdatascience.com/autogluon-timeseries-every-time-series-forecasting-model-in-one-library-29a3bf6879db) (*Towards Data Science*, Jan 2024)
|
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
|
|
8
8
|
[](https://github.com/autogluon/autogluon/releases)
|
|
9
9
|
[](https://anaconda.org/conda-forge/autogluon)
|
|
10
|
-
[](https://pypi.org/project/autogluon/)
|
|
11
11
|
[](https://pepy.tech/project/autogluon)
|
|
12
12
|
[](./LICENSE)
|
|
13
13
|
[](https://discord.gg/wjUmjqAc2N)
|
|
@@ -24,7 +24,7 @@ AutoGluon, developed by AWS AI, automates machine learning tasks enabling you to
|
|
|
24
24
|
|
|
25
25
|
## 💾 Installation
|
|
26
26
|
|
|
27
|
-
AutoGluon is supported on Python 3.
|
|
27
|
+
AutoGluon is supported on Python 3.10 - 3.13 and is available on Linux, MacOS, and Windows.
|
|
28
28
|
|
|
29
29
|
You can install AutoGluon with:
|
|
30
30
|
|
|
@@ -47,8 +47,8 @@ predictions = predictor.predict("test.csv")
|
|
|
47
47
|
| AutoGluon Task | Quickstart | API |
|
|
48
48
|
|:--------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------:|
|
|
49
49
|
| TabularPredictor | [](https://auto.gluon.ai/stable/tutorials/tabular/tabular-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.tabular.TabularPredictor.html) |
|
|
50
|
-
| MultiModalPredictor | [](https://auto.gluon.ai/stable/tutorials/multimodal/multimodal_prediction/multimodal-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.multimodal.MultiModalPredictor.html) |
|
|
51
50
|
| TimeSeriesPredictor | [](https://auto.gluon.ai/stable/tutorials/timeseries/forecasting-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.timeseries.TimeSeriesPredictor.html) |
|
|
51
|
+
| MultiModalPredictor | [](https://auto.gluon.ai/stable/tutorials/multimodal/multimodal_prediction/multimodal-quick-start.html) | [](https://auto.gluon.ai/stable/api/autogluon.multimodal.MultiModalPredictor.html) |
|
|
52
52
|
|
|
53
53
|
## :mag: Resources
|
|
54
54
|
|
|
@@ -71,7 +71,10 @@ Below is a curated list of recent tutorials and talks on AutoGluon. A comprehens
|
|
|
71
71
|
- [Benchmarking Multimodal AutoML for Tabular Data with Text Fields](https://datasets-benchmarks-proceedings.neurips.cc/paper/2021/file/9bf31c7ff062936a96d3c8bd1f8f2ff3-Paper-round2.pdf) (*NeurIPS*, 2021) ([BibTeX](CITING.md#autogluonmultimodal))
|
|
72
72
|
- [XTab: Cross-table Pretraining for Tabular Transformers](https://proceedings.mlr.press/v202/zhu23k/zhu23k.pdf) (*ICML*, 2023)
|
|
73
73
|
- [AutoGluon-TimeSeries: AutoML for Probabilistic Time Series Forecasting](https://arxiv.org/abs/2308.05566) (*AutoML Conf*, 2023) ([BibTeX](CITING.md#autogluontimeseries))
|
|
74
|
-
- [TabRepo: A Large Scale Repository of Tabular Model Evaluations and its AutoML Applications](https://arxiv.org/pdf/2311.02971.pdf) (*
|
|
74
|
+
- [TabRepo: A Large Scale Repository of Tabular Model Evaluations and its AutoML Applications](https://arxiv.org/pdf/2311.02971.pdf) (*AutoML Conf*, 2024)
|
|
75
|
+
- [AutoGluon-Multimodal (AutoMM): Supercharging Multimodal AutoML with Foundation Models](https://arxiv.org/pdf/2404.16233) (*AutoML Conf*, 2024) ([BibTeX](CITING.md#autogluonmultimodal))
|
|
76
|
+
- [Multi-layer Stack Ensembles for Time Series Forecasting](https://arxiv.org/abs/2511.15350) (*AutoML Conf*, 2025) ([BibTeX](CITING.md#autogluontimeseries))
|
|
77
|
+
- [Chronos-2: From Univariate to Universal Forecasting](https://arxiv.org/abs/2510.15821) (*Arxiv*, 2025) ([BibTeX](CITING.md#autogluontimeseries))
|
|
75
78
|
|
|
76
79
|
### Articles
|
|
77
80
|
- [AutoGluon-TimeSeries: Every Time Series Forecasting Model In One Library](https://towardsdatascience.com/autogluon-timeseries-every-time-series-forecasting-model-in-one-library-29a3bf6879db) (*Towards Data Science*, Jan 2024)
|
|
@@ -30,7 +30,6 @@ install_requires = [
|
|
|
30
30
|
"pandas", # version range defined in `core/_setup_utils.py`
|
|
31
31
|
"torch", # version range defined in `core/_setup_utils.py`
|
|
32
32
|
"lightning", # version range defined in `core/_setup_utils.py`
|
|
33
|
-
"pytorch_lightning", # version range defined in `core/_setup_utils.py`
|
|
34
33
|
"transformers[sentencepiece]", # version range defined in `core/_setup_utils.py`
|
|
35
34
|
"accelerate", # version range defined in `core/_setup_utils.py`
|
|
36
35
|
"gluonts>=0.15.0,<0.17",
|
|
@@ -42,10 +41,11 @@ install_requires = [
|
|
|
42
41
|
"fugue>=0.9.0", # prevent dependency clash with omegaconf
|
|
43
42
|
"tqdm", # version range defined in `core/_setup_utils.py`
|
|
44
43
|
"orjson~=3.9", # use faster JSON implementation in GluonTS
|
|
45
|
-
"
|
|
46
|
-
|
|
44
|
+
"einops>=0.7,<1", # required by Chronos-2 and Toto
|
|
45
|
+
"chronos-forecasting>=2.2.0rc4,<3",
|
|
46
|
+
"peft>=0.13.0,<0.18", # version range same as in chronos-forecasting[extras]
|
|
47
47
|
"tensorboard>=2.9,<3", # fixes https://github.com/autogluon/autogluon/issues/3612
|
|
48
|
-
f"autogluon.core
|
|
48
|
+
f"autogluon.core=={version}",
|
|
49
49
|
f"autogluon.common=={version}",
|
|
50
50
|
f"autogluon.features=={version}",
|
|
51
51
|
f"autogluon.tabular[catboost,lightgbm,xgboost]=={version}",
|
|
@@ -58,14 +58,12 @@ extras_require = {
|
|
|
58
58
|
"flaky>=3.7,<4",
|
|
59
59
|
"pytest-timeout>=2.1,<3",
|
|
60
60
|
],
|
|
61
|
-
"
|
|
62
|
-
"
|
|
63
|
-
"rotary-embedding-torch>=0.8,<1",
|
|
61
|
+
"ray": [
|
|
62
|
+
f"autogluon.core[raytune]=={version}",
|
|
64
63
|
],
|
|
65
64
|
}
|
|
66
65
|
|
|
67
|
-
|
|
68
|
-
extras_require["all"] = list(set.union(*(set(extras_require[extra]) for extra in ["toto"])))
|
|
66
|
+
extras_require["all"] = list(set.union(*(set(extras_require[extra]) for extra in ["ray"])))
|
|
69
67
|
install_requires = ag.get_dependency_version_ranges(install_requires)
|
|
70
68
|
|
|
71
69
|
if __name__ == "__main__":
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
from typing import Any
|
|
1
|
+
from typing import Any
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
def get_hyperparameter_presets() -> dict[str, dict[str,
|
|
4
|
+
def get_hyperparameter_presets() -> dict[str, dict[str, dict[str, Any] | list[dict[str, Any]]]]:
|
|
5
5
|
return {
|
|
6
6
|
"very_light": {
|
|
7
7
|
"Naive": {},
|
|
@@ -7,7 +7,7 @@ import reprlib
|
|
|
7
7
|
from collections.abc import Iterable
|
|
8
8
|
from itertools import islice
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from typing import TYPE_CHECKING, Any, Final,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Final, Type, overload
|
|
11
11
|
|
|
12
12
|
import numpy as np
|
|
13
13
|
import pandas as pd
|
|
@@ -122,10 +122,10 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
122
122
|
|
|
123
123
|
def __init__(
|
|
124
124
|
self,
|
|
125
|
-
data:
|
|
126
|
-
static_features:
|
|
127
|
-
id_column:
|
|
128
|
-
timestamp_column:
|
|
125
|
+
data: pd.DataFrame | str | Path | Iterable,
|
|
126
|
+
static_features: pd.DataFrame | str | Path | None = None,
|
|
127
|
+
id_column: str | None = None,
|
|
128
|
+
timestamp_column: str | None = None,
|
|
129
129
|
num_cpus: int = -1,
|
|
130
130
|
*args,
|
|
131
131
|
**kwargs,
|
|
@@ -149,7 +149,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
149
149
|
else:
|
|
150
150
|
raise ValueError(f"data must be a pd.DataFrame, Iterable, string or Path (received {type(data)}).")
|
|
151
151
|
super().__init__(data=data, *args, **kwargs) # type: ignore
|
|
152
|
-
self._static_features:
|
|
152
|
+
self._static_features: pd.DataFrame | None = None
|
|
153
153
|
if static_features is not None:
|
|
154
154
|
self.static_features = self._construct_static_features(static_features, id_column=id_column)
|
|
155
155
|
|
|
@@ -168,8 +168,8 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
168
168
|
def _construct_tsdf_from_data_frame(
|
|
169
169
|
cls,
|
|
170
170
|
df: pd.DataFrame,
|
|
171
|
-
id_column:
|
|
172
|
-
timestamp_column:
|
|
171
|
+
id_column: str | None = None,
|
|
172
|
+
timestamp_column: str | None = None,
|
|
173
173
|
) -> pd.DataFrame:
|
|
174
174
|
df = df.copy()
|
|
175
175
|
if id_column is not None:
|
|
@@ -272,9 +272,9 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
272
272
|
def from_data_frame(
|
|
273
273
|
cls,
|
|
274
274
|
df: pd.DataFrame,
|
|
275
|
-
id_column:
|
|
276
|
-
timestamp_column:
|
|
277
|
-
static_features_df:
|
|
275
|
+
id_column: str | None = None,
|
|
276
|
+
timestamp_column: str | None = None,
|
|
277
|
+
static_features_df: pd.DataFrame | None = None,
|
|
278
278
|
) -> TimeSeriesDataFrame:
|
|
279
279
|
"""Construct a ``TimeSeriesDataFrame`` from a pandas DataFrame.
|
|
280
280
|
|
|
@@ -315,10 +315,10 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
315
315
|
@classmethod
|
|
316
316
|
def from_path(
|
|
317
317
|
cls,
|
|
318
|
-
path:
|
|
319
|
-
id_column:
|
|
320
|
-
timestamp_column:
|
|
321
|
-
static_features_path:
|
|
318
|
+
path: str | Path,
|
|
319
|
+
id_column: str | None = None,
|
|
320
|
+
timestamp_column: str | None = None,
|
|
321
|
+
static_features_path: str | Path | None = None,
|
|
322
322
|
) -> TimeSeriesDataFrame:
|
|
323
323
|
"""Construct a ``TimeSeriesDataFrame`` from a CSV or Parquet file.
|
|
324
324
|
|
|
@@ -396,8 +396,8 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
396
396
|
@classmethod
|
|
397
397
|
def _construct_static_features(
|
|
398
398
|
cls,
|
|
399
|
-
static_features:
|
|
400
|
-
id_column:
|
|
399
|
+
static_features: pd.DataFrame | str | Path,
|
|
400
|
+
id_column: str | None = None,
|
|
401
401
|
) -> pd.DataFrame:
|
|
402
402
|
if isinstance(static_features, (str, Path)):
|
|
403
403
|
static_features = load_pd.load(str(static_features))
|
|
@@ -421,7 +421,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
421
421
|
return self._static_features
|
|
422
422
|
|
|
423
423
|
@static_features.setter
|
|
424
|
-
def static_features(self, value:
|
|
424
|
+
def static_features(self, value: pd.DataFrame | None):
|
|
425
425
|
# if the current item index is not a multiindex, then we are dealing with a single
|
|
426
426
|
# item slice. this should only happen when the user explicitly requests only a
|
|
427
427
|
# single item or during `slice_by_timestep`. In this case we do not set static features
|
|
@@ -454,7 +454,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
454
454
|
|
|
455
455
|
self._static_features = value
|
|
456
456
|
|
|
457
|
-
def infer_frequency(self, num_items:
|
|
457
|
+
def infer_frequency(self, num_items: int | None = None, raise_if_irregular: bool = False) -> str:
|
|
458
458
|
"""Infer the time series frequency based on the timestamps of the observations.
|
|
459
459
|
|
|
460
460
|
Parameters
|
|
@@ -570,7 +570,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
570
570
|
return obj
|
|
571
571
|
|
|
572
572
|
def __finalize__( # noqa
|
|
573
|
-
self: TimeSeriesDataFrame, other, method:
|
|
573
|
+
self: TimeSeriesDataFrame, other, method: str | None = None, **kwargs
|
|
574
574
|
) -> TimeSeriesDataFrame:
|
|
575
575
|
super().__finalize__(other=other, method=method, **kwargs)
|
|
576
576
|
# when finalizing the copy/slice operation, we use the property setter to stay consistent
|
|
@@ -602,9 +602,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
602
602
|
after = TimeSeriesDataFrame(data_after, static_features=self.static_features)
|
|
603
603
|
return before, after
|
|
604
604
|
|
|
605
|
-
def slice_by_timestep(
|
|
606
|
-
self, start_index: Optional[int] = None, end_index: Optional[int] = None
|
|
607
|
-
) -> TimeSeriesDataFrame:
|
|
605
|
+
def slice_by_timestep(self, start_index: int | None = None, end_index: int | None = None) -> TimeSeriesDataFrame:
|
|
608
606
|
"""Select a subsequence from each time series between start (inclusive) and end (exclusive) indices.
|
|
609
607
|
|
|
610
608
|
This operation is equivalent to selecting a slice ``[start_index : end_index]`` from each time series, and then
|
|
@@ -907,8 +905,8 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
907
905
|
return super().sort_index(*args, **kwargs) # type: ignore
|
|
908
906
|
|
|
909
907
|
def get_model_inputs_for_scoring(
|
|
910
|
-
self, prediction_length: int, known_covariates_names:
|
|
911
|
-
) -> tuple[TimeSeriesDataFrame,
|
|
908
|
+
self, prediction_length: int, known_covariates_names: list[str] | None = None
|
|
909
|
+
) -> tuple[TimeSeriesDataFrame, TimeSeriesDataFrame | None]:
|
|
912
910
|
"""Prepare model inputs necessary to predict the last ``prediction_length`` time steps of each time series in the dataset.
|
|
913
911
|
|
|
914
912
|
Parameters
|
|
@@ -938,8 +936,8 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
938
936
|
def train_test_split(
|
|
939
937
|
self,
|
|
940
938
|
prediction_length: int,
|
|
941
|
-
end_index:
|
|
942
|
-
suffix:
|
|
939
|
+
end_index: int | None = None,
|
|
940
|
+
suffix: str | None = None,
|
|
943
941
|
) -> tuple[TimeSeriesDataFrame, TimeSeriesDataFrame]:
|
|
944
942
|
"""Generate a train/test split from the given dataset.
|
|
945
943
|
|
|
@@ -984,7 +982,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
984
982
|
|
|
985
983
|
def convert_frequency(
|
|
986
984
|
self,
|
|
987
|
-
freq:
|
|
985
|
+
freq: str | pd.DateOffset,
|
|
988
986
|
agg_numeric: str = "mean",
|
|
989
987
|
agg_categorical: str = "first",
|
|
990
988
|
num_cpus: int = -1,
|
|
@@ -1003,7 +1001,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
1003
1001
|
|
|
1004
1002
|
Parameters
|
|
1005
1003
|
----------
|
|
1006
|
-
freq :
|
|
1004
|
+
freq : str | pd.DateOffset
|
|
1007
1005
|
Frequency to which the data should be converted. See `pandas frequency aliases <https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`_
|
|
1008
1006
|
for supported values.
|
|
1009
1007
|
agg_numeric : {"max", "min", "sum", "mean", "median", "first", "last"}, default = "mean"
|
|
@@ -1130,14 +1128,14 @@ class TimeSeriesDataFrame(pd.DataFrame):
|
|
|
1130
1128
|
def reindex(*args, **kwargs) -> Self: ... # type: ignore
|
|
1131
1129
|
|
|
1132
1130
|
@overload
|
|
1133
|
-
def __new__(cls, data: pd.DataFrame, static_features:
|
|
1131
|
+
def __new__(cls, data: pd.DataFrame, static_features: pd.DataFrame | None = None) -> Self: ... # type: ignore
|
|
1134
1132
|
@overload
|
|
1135
1133
|
def __new__(
|
|
1136
1134
|
cls,
|
|
1137
|
-
data:
|
|
1138
|
-
static_features:
|
|
1139
|
-
id_column:
|
|
1140
|
-
timestamp_column:
|
|
1135
|
+
data: pd.DataFrame | str | Path | Iterable,
|
|
1136
|
+
static_features: pd.DataFrame | str | Path | None = None,
|
|
1137
|
+
id_column: str | None = None,
|
|
1138
|
+
timestamp_column: str | None = None,
|
|
1141
1139
|
num_cpus: int = -1,
|
|
1142
1140
|
*args,
|
|
1143
1141
|
**kwargs,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import reprlib
|
|
3
3
|
import time
|
|
4
|
-
from typing import Any, Literal,
|
|
4
|
+
from typing import Any, Literal, Type
|
|
5
5
|
|
|
6
6
|
import pandas as pd
|
|
7
7
|
|
|
@@ -25,12 +25,12 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
25
25
|
self,
|
|
26
26
|
path_context: str,
|
|
27
27
|
target: str = "target",
|
|
28
|
-
known_covariates_names:
|
|
28
|
+
known_covariates_names: list[str] | None = None,
|
|
29
29
|
trainer_type: Type[TimeSeriesTrainer] = TimeSeriesTrainer,
|
|
30
|
-
eval_metric:
|
|
30
|
+
eval_metric: str | TimeSeriesScorer | None = None,
|
|
31
31
|
prediction_length: int = 1,
|
|
32
32
|
cache_predictions: bool = True,
|
|
33
|
-
ensemble_model_type:
|
|
33
|
+
ensemble_model_type: Type | None = None,
|
|
34
34
|
**kwargs,
|
|
35
35
|
):
|
|
36
36
|
super().__init__(path_context=path_context)
|
|
@@ -41,7 +41,7 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
41
41
|
self.prediction_length = prediction_length
|
|
42
42
|
self.quantile_levels = kwargs.get("quantile_levels", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
|
|
43
43
|
self.cache_predictions = cache_predictions
|
|
44
|
-
self.freq:
|
|
44
|
+
self.freq: str | None = None
|
|
45
45
|
self.ensemble_model_type = ensemble_model_type
|
|
46
46
|
|
|
47
47
|
self.feature_generator = TimeSeriesFeatureGenerator(
|
|
@@ -55,14 +55,14 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
55
55
|
def fit(
|
|
56
56
|
self,
|
|
57
57
|
train_data: TimeSeriesDataFrame,
|
|
58
|
-
hyperparameters:
|
|
59
|
-
val_data:
|
|
60
|
-
hyperparameter_tune_kwargs:
|
|
61
|
-
time_limit:
|
|
62
|
-
num_val_windows:
|
|
63
|
-
val_step_size:
|
|
64
|
-
refit_every_n_windows:
|
|
65
|
-
random_seed:
|
|
58
|
+
hyperparameters: str | dict,
|
|
59
|
+
val_data: TimeSeriesDataFrame | None = None,
|
|
60
|
+
hyperparameter_tune_kwargs: str | dict | None = None,
|
|
61
|
+
time_limit: float | None = None,
|
|
62
|
+
num_val_windows: int = 1,
|
|
63
|
+
val_step_size: int | None = None,
|
|
64
|
+
refit_every_n_windows: int | None = 1,
|
|
65
|
+
random_seed: int | None = None,
|
|
66
66
|
**kwargs,
|
|
67
67
|
) -> None:
|
|
68
68
|
self._time_limit = time_limit
|
|
@@ -95,7 +95,7 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
95
95
|
)
|
|
96
96
|
|
|
97
97
|
assert issubclass(self.trainer_type, TimeSeriesTrainer)
|
|
98
|
-
self.trainer:
|
|
98
|
+
self.trainer: TimeSeriesTrainer | None = self.trainer_type(**trainer_init_kwargs)
|
|
99
99
|
self.trainer_path = self.trainer.path
|
|
100
100
|
self.save()
|
|
101
101
|
|
|
@@ -122,9 +122,9 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
122
122
|
|
|
123
123
|
def _align_covariates_with_forecast_index(
|
|
124
124
|
self,
|
|
125
|
-
known_covariates:
|
|
125
|
+
known_covariates: TimeSeriesDataFrame | None,
|
|
126
126
|
data: TimeSeriesDataFrame,
|
|
127
|
-
) ->
|
|
127
|
+
) -> TimeSeriesDataFrame | None:
|
|
128
128
|
"""Select the relevant item_ids and timestamps from the known_covariates dataframe.
|
|
129
129
|
|
|
130
130
|
If some of the item_ids or timestamps are missing, an exception is raised.
|
|
@@ -163,10 +163,10 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
163
163
|
def predict(
|
|
164
164
|
self,
|
|
165
165
|
data: TimeSeriesDataFrame,
|
|
166
|
-
known_covariates:
|
|
167
|
-
model:
|
|
166
|
+
known_covariates: TimeSeriesDataFrame | None = None,
|
|
167
|
+
model: str | AbstractTimeSeriesModel | None = None,
|
|
168
168
|
use_cache: bool = True,
|
|
169
|
-
random_seed:
|
|
169
|
+
random_seed: int | None = None,
|
|
170
170
|
**kwargs,
|
|
171
171
|
) -> TimeSeriesDataFrame:
|
|
172
172
|
data = self.feature_generator.transform(data)
|
|
@@ -184,8 +184,8 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
184
184
|
def score(
|
|
185
185
|
self,
|
|
186
186
|
data: TimeSeriesDataFrame,
|
|
187
|
-
model:
|
|
188
|
-
metric:
|
|
187
|
+
model: str | AbstractTimeSeriesModel | None = None,
|
|
188
|
+
metric: str | TimeSeriesScorer | None = None,
|
|
189
189
|
use_cache: bool = True,
|
|
190
190
|
) -> float:
|
|
191
191
|
data = self.feature_generator.transform(data)
|
|
@@ -194,8 +194,8 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
194
194
|
def evaluate(
|
|
195
195
|
self,
|
|
196
196
|
data: TimeSeriesDataFrame,
|
|
197
|
-
model:
|
|
198
|
-
metrics:
|
|
197
|
+
model: str | None = None,
|
|
198
|
+
metrics: str | TimeSeriesScorer | list[str | TimeSeriesScorer] | None = None,
|
|
199
199
|
use_cache: bool = True,
|
|
200
200
|
) -> dict[str, float]:
|
|
201
201
|
data = self.feature_generator.transform(data)
|
|
@@ -203,15 +203,15 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
203
203
|
|
|
204
204
|
def get_feature_importance(
|
|
205
205
|
self,
|
|
206
|
-
data:
|
|
207
|
-
model:
|
|
208
|
-
metric:
|
|
209
|
-
features:
|
|
210
|
-
time_limit:
|
|
206
|
+
data: TimeSeriesDataFrame | None = None,
|
|
207
|
+
model: str | None = None,
|
|
208
|
+
metric: str | TimeSeriesScorer | None = None,
|
|
209
|
+
features: list[str] | None = None,
|
|
210
|
+
time_limit: float | None = None,
|
|
211
211
|
method: Literal["naive", "permutation"] = "permutation",
|
|
212
212
|
subsample_size: int = 50,
|
|
213
|
-
num_iterations:
|
|
214
|
-
random_seed:
|
|
213
|
+
num_iterations: int | None = None,
|
|
214
|
+
random_seed: int | None = None,
|
|
215
215
|
relative_scores: bool = False,
|
|
216
216
|
include_confidence_band: bool = True,
|
|
217
217
|
confidence_level: float = 0.99,
|
|
@@ -272,9 +272,9 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
272
272
|
|
|
273
273
|
def leaderboard(
|
|
274
274
|
self,
|
|
275
|
-
data:
|
|
275
|
+
data: TimeSeriesDataFrame | None = None,
|
|
276
276
|
extra_info: bool = False,
|
|
277
|
-
extra_metrics:
|
|
277
|
+
extra_metrics: list[str | TimeSeriesScorer] | None = None,
|
|
278
278
|
use_cache: bool = True,
|
|
279
279
|
) -> pd.DataFrame:
|
|
280
280
|
if data is not None:
|
|
@@ -301,7 +301,7 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
301
301
|
return learner_info
|
|
302
302
|
|
|
303
303
|
def persist_trainer(
|
|
304
|
-
self, models:
|
|
304
|
+
self, models: Literal["all", "best"] | list[str] = "all", with_ancestors: bool = False
|
|
305
305
|
) -> list[str]:
|
|
306
306
|
"""Loads models and trainer in memory so that they don't have to be
|
|
307
307
|
loaded during predictions
|
|
@@ -329,3 +329,35 @@ class TimeSeriesLearner(AbstractLearner):
|
|
|
329
329
|
|
|
330
330
|
def refit_full(self, model: str = "all") -> dict[str, str]:
|
|
331
331
|
return self.load_trainer().refit_full(model=model)
|
|
332
|
+
|
|
333
|
+
def backtest_predictions(
|
|
334
|
+
self,
|
|
335
|
+
data: TimeSeriesDataFrame | None,
|
|
336
|
+
model_names: list[str],
|
|
337
|
+
num_val_windows: int | None = None,
|
|
338
|
+
val_step_size: int | None = None,
|
|
339
|
+
use_cache: bool = True,
|
|
340
|
+
) -> dict[str, list[TimeSeriesDataFrame]]:
|
|
341
|
+
if data is not None:
|
|
342
|
+
data = self.feature_generator.transform(data)
|
|
343
|
+
return self.load_trainer().backtest_predictions(
|
|
344
|
+
model_names=model_names,
|
|
345
|
+
data=data,
|
|
346
|
+
num_val_windows=num_val_windows,
|
|
347
|
+
val_step_size=val_step_size,
|
|
348
|
+
use_cache=use_cache,
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
def backtest_targets(
|
|
352
|
+
self,
|
|
353
|
+
data: TimeSeriesDataFrame | None,
|
|
354
|
+
num_val_windows: int | None = None,
|
|
355
|
+
val_step_size: int | None = None,
|
|
356
|
+
) -> list[TimeSeriesDataFrame]:
|
|
357
|
+
if data is not None:
|
|
358
|
+
data = self.feature_generator.transform(data)
|
|
359
|
+
return self.load_trainer().backtest_targets(
|
|
360
|
+
data=data,
|
|
361
|
+
num_val_windows=num_val_windows,
|
|
362
|
+
val_step_size=val_step_size,
|
|
363
|
+
)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from pprint import pformat
|
|
4
|
-
from typing import Any,
|
|
4
|
+
from typing import Any, Sequence, Type
|
|
5
5
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
|
|
@@ -54,10 +54,10 @@ EXPERIMENTAL_METRICS: dict[str, Type[TimeSeriesScorer]] = {
|
|
|
54
54
|
|
|
55
55
|
|
|
56
56
|
def check_get_evaluation_metric(
|
|
57
|
-
eval_metric:
|
|
57
|
+
eval_metric: str | TimeSeriesScorer | Type[TimeSeriesScorer] | None,
|
|
58
58
|
prediction_length: int,
|
|
59
|
-
seasonal_period:
|
|
60
|
-
horizon_weight:
|
|
59
|
+
seasonal_period: int | None = None,
|
|
60
|
+
horizon_weight: Sequence[float] | np.ndarray | None = None,
|
|
61
61
|
) -> TimeSeriesScorer:
|
|
62
62
|
"""Factory method for TimeSeriesScorer objects.
|
|
63
63
|
|