autogluon.timeseries 1.4.1b20250819__tar.gz → 1.4.1b20250902__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/PKG-INFO +1 -1
  2. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/configs/__init__.py +4 -0
  3. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/configs/hyperparameter_presets.py +62 -0
  4. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/configs/predictor_presets.py +84 -0
  5. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/dataset/ts_dataframe.py +22 -9
  6. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/learner.py +14 -14
  7. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/metrics/__init__.py +5 -5
  8. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/metrics/abstract.py +11 -12
  9. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/__init__.py +2 -0
  10. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/abstract/abstract_timeseries_model.py +43 -42
  11. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/abstract/tunable.py +6 -6
  12. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/autogluon_tabular/mlforecast.py +34 -30
  13. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/autogluon_tabular/per_step.py +13 -12
  14. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/model.py +11 -10
  15. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/pipeline/base.py +8 -8
  16. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/pipeline/chronos.py +12 -12
  17. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +12 -12
  18. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/pipeline/utils.py +12 -12
  19. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/ensemble/abstract.py +19 -19
  20. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/ensemble/basic.py +8 -8
  21. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/ensemble/greedy.py +13 -13
  22. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/gluonts/abstract.py +24 -24
  23. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/gluonts/dataset.py +2 -2
  24. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/gluonts/models.py +22 -7
  25. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/local/abstract_local_model.py +12 -12
  26. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/local/naive.py +4 -0
  27. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/local/npts.py +1 -0
  28. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/local/statsforecast.py +27 -11
  29. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/multi_window/multi_window_model.py +33 -22
  30. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/models/registry.py +65 -0
  31. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/predictor.py +37 -37
  32. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/regressor.py +13 -13
  33. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/splitter.py +6 -6
  34. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/trainer/__init__.py +3 -0
  35. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/trainer/model_set_builder.py +256 -0
  36. autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/trainer/prediction_cache.py +149 -0
  37. {autogluon.timeseries-1.4.1b20250819/src/autogluon/timeseries → autogluon.timeseries-1.4.1b20250902/src/autogluon/timeseries/trainer}/trainer.py +72 -128
  38. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/transforms/covariate_scaler.py +3 -3
  39. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/transforms/target_scaler.py +7 -7
  40. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/datetime/lags.py +2 -2
  41. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/datetime/time_features.py +2 -2
  42. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/features.py +33 -33
  43. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/version.py +1 -1
  44. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/PKG-INFO +1 -1
  45. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/SOURCES.txt +7 -3
  46. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/requires.txt +4 -4
  47. autogluon.timeseries-1.4.1b20250819/src/autogluon/timeseries/configs/__init__.py +0 -3
  48. autogluon.timeseries-1.4.1b20250819/src/autogluon/timeseries/configs/presets_configs.py +0 -79
  49. autogluon.timeseries-1.4.1b20250819/src/autogluon/timeseries/models/presets.py +0 -358
  50. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/setup.cfg +0 -0
  51. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/setup.py +0 -0
  52. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/__init__.py +0 -0
  53. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/dataset/__init__.py +0 -0
  54. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/evaluator.py +0 -0
  55. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/metrics/point.py +0 -0
  56. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/metrics/quantile.py +0 -0
  57. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/metrics/utils.py +0 -0
  58. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/abstract/__init__.py +0 -0
  59. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/abstract/model_trial.py +0 -0
  60. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/autogluon_tabular/__init__.py +0 -0
  61. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/autogluon_tabular/transforms.py +0 -0
  62. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/autogluon_tabular/utils.py +0 -0
  63. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/__init__.py +0 -0
  64. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/chronos/pipeline/__init__.py +0 -0
  65. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/ensemble/__init__.py +0 -0
  66. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/gluonts/__init__.py +0 -0
  67. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/local/__init__.py +0 -0
  68. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/models/multi_window/__init__.py +0 -0
  69. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/transforms/__init__.py +0 -0
  70. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/__init__.py +0 -0
  71. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/datetime/__init__.py +0 -0
  72. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/datetime/base.py +0 -0
  73. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/datetime/seasonality.py +0 -0
  74. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/forecast.py +0 -0
  75. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon/timeseries/utils/warning_filters.py +0 -0
  76. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/dependency_links.txt +0 -0
  77. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/namespace_packages.txt +0 -0
  78. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/top_level.txt +0 -0
  79. {autogluon.timeseries-1.4.1b20250819 → autogluon.timeseries-1.4.1b20250902}/src/autogluon.timeseries.egg-info/zip-safe +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.4.1b20250819
3
+ Version: 1.4.1b20250902
4
4
  Summary: Fast and Accurate ML in 3 Lines of Code
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -0,0 +1,4 @@
1
+ from .hyperparameter_presets import get_hyperparameter_presets
2
+ from .predictor_presets import get_predictor_presets
3
+
4
+ __all__ = ["get_hyperparameter_presets", "get_predictor_presets"]
@@ -0,0 +1,62 @@
1
+ from typing import Any, Union
2
+
3
+
4
+ def get_hyperparameter_presets() -> dict[str, dict[str, Union[dict[str, Any], list[dict[str, Any]]]]]:
5
+ return {
6
+ "very_light": {
7
+ "Naive": {},
8
+ "SeasonalNaive": {},
9
+ "ETS": {},
10
+ "Theta": {},
11
+ "RecursiveTabular": {"max_num_samples": 100_000},
12
+ "DirectTabular": {"max_num_samples": 100_000},
13
+ },
14
+ "light": {
15
+ "Naive": {},
16
+ "SeasonalNaive": {},
17
+ "ETS": {},
18
+ "Theta": {},
19
+ "RecursiveTabular": {},
20
+ "DirectTabular": {},
21
+ "TemporalFusionTransformer": {},
22
+ "Chronos": {"model_path": "bolt_small"},
23
+ },
24
+ "light_inference": {
25
+ "SeasonalNaive": {},
26
+ "DirectTabular": {},
27
+ "RecursiveTabular": {},
28
+ "TemporalFusionTransformer": {},
29
+ "PatchTST": {},
30
+ },
31
+ "default": {
32
+ "SeasonalNaive": {},
33
+ "AutoETS": {},
34
+ "NPTS": {},
35
+ "DynamicOptimizedTheta": {},
36
+ "RecursiveTabular": {},
37
+ "DirectTabular": {},
38
+ "TemporalFusionTransformer": {},
39
+ "PatchTST": {},
40
+ "DeepAR": {},
41
+ "Chronos": [
42
+ {
43
+ "ag_args": {"name_suffix": "ZeroShot"},
44
+ "model_path": "bolt_base",
45
+ },
46
+ {
47
+ "ag_args": {"name_suffix": "FineTuned"},
48
+ "model_path": "bolt_small",
49
+ "fine_tune": True,
50
+ "target_scaler": "standard",
51
+ "covariate_regressor": {"model_name": "CAT", "model_hyperparameters": {"iterations": 1_000}},
52
+ },
53
+ ],
54
+ "TiDE": {
55
+ "encoder_hidden_dim": 256,
56
+ "decoder_hidden_dim": 256,
57
+ "temporal_hidden_dim": 64,
58
+ "num_batches_per_epoch": 100,
59
+ "lr": 1e-4,
60
+ },
61
+ },
62
+ }
@@ -0,0 +1,84 @@
1
+ """Preset configurations for autogluon.timeseries Predictors"""
2
+
3
+ from typing import Any
4
+
5
+ from . import get_hyperparameter_presets
6
+
7
+ TIMESERIES_PRESETS_ALIASES = dict(
8
+ chronos="chronos_small",
9
+ best="best_quality",
10
+ high="high_quality",
11
+ medium="medium_quality",
12
+ bq="best_quality",
13
+ hq="high_quality",
14
+ mq="medium_quality",
15
+ )
16
+
17
+
18
+ def get_predictor_presets() -> dict[str, Any]:
19
+ hp_presets = get_hyperparameter_presets()
20
+
21
+ predictor_presets = dict(
22
+ best_quality={"hyperparameters": "default", "num_val_windows": 2},
23
+ high_quality={"hyperparameters": "default"},
24
+ medium_quality={"hyperparameters": "light"},
25
+ fast_training={"hyperparameters": "very_light"},
26
+ # Chronos-Bolt models
27
+ bolt_tiny={
28
+ "hyperparameters": {"Chronos": {"model_path": "bolt_tiny"}},
29
+ "skip_model_selection": True,
30
+ },
31
+ bolt_mini={
32
+ "hyperparameters": {"Chronos": {"model_path": "bolt_mini"}},
33
+ "skip_model_selection": True,
34
+ },
35
+ bolt_small={
36
+ "hyperparameters": {"Chronos": {"model_path": "bolt_small"}},
37
+ "skip_model_selection": True,
38
+ },
39
+ bolt_base={
40
+ "hyperparameters": {"Chronos": {"model_path": "bolt_base"}},
41
+ "skip_model_selection": True,
42
+ },
43
+ # Original Chronos models
44
+ chronos_tiny={
45
+ "hyperparameters": {"Chronos": {"model_path": "tiny"}},
46
+ "skip_model_selection": True,
47
+ },
48
+ chronos_mini={
49
+ "hyperparameters": {"Chronos": {"model_path": "mini"}},
50
+ "skip_model_selection": True,
51
+ },
52
+ chronos_small={
53
+ "hyperparameters": {"Chronos": {"model_path": "small"}},
54
+ "skip_model_selection": True,
55
+ },
56
+ chronos_base={
57
+ "hyperparameters": {"Chronos": {"model_path": "base"}},
58
+ "skip_model_selection": True,
59
+ },
60
+ chronos_large={
61
+ "hyperparameters": {"Chronos": {"model_path": "large", "batch_size": 8}},
62
+ "skip_model_selection": True,
63
+ },
64
+ chronos_ensemble={
65
+ "hyperparameters": {
66
+ "Chronos": {"model_path": "small"},
67
+ **hp_presets["light_inference"],
68
+ }
69
+ },
70
+ chronos_large_ensemble={
71
+ "hyperparameters": {
72
+ "Chronos": {"model_path": "large", "batch_size": 8},
73
+ **hp_presets["light_inference"],
74
+ }
75
+ },
76
+ )
77
+
78
+ # update with aliases
79
+ predictor_presets = {
80
+ **predictor_presets,
81
+ **{k: predictor_presets[v].copy() for k, v in TIMESERIES_PRESETS_ALIASES.items()},
82
+ }
83
+
84
+ return predictor_presets
@@ -7,7 +7,7 @@ import reprlib
7
7
  from collections.abc import Iterable
8
8
  from itertools import islice
9
9
  from pathlib import Path
10
- from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, Union, overload
10
+ from typing import TYPE_CHECKING, Any, Optional, Type, Union, overload
11
11
 
12
12
  import numpy as np
13
13
  import pandas as pd
@@ -118,7 +118,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
118
118
 
119
119
  """
120
120
 
121
- index: pd.MultiIndex
121
+ index: pd.MultiIndex # type: ignore
122
122
  _metadata = ["_static_features"]
123
123
 
124
124
  def __init__(
@@ -572,7 +572,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
572
572
  self.static_features = other._static_features
573
573
  return self
574
574
 
575
- def split_by_time(self, cutoff_time: pd.Timestamp) -> Tuple[TimeSeriesDataFrame, TimeSeriesDataFrame]:
575
+ def split_by_time(self, cutoff_time: pd.Timestamp) -> tuple[TimeSeriesDataFrame, TimeSeriesDataFrame]:
576
576
  """Split dataframe to two different ``TimeSeriesDataFrame`` s before and after a certain ``cutoff_time``.
577
577
 
578
578
  Parameters
@@ -900,15 +900,15 @@ class TimeSeriesDataFrame(pd.DataFrame):
900
900
  return super().sort_index(*args, **kwargs) # type: ignore
901
901
 
902
902
  def get_model_inputs_for_scoring(
903
- self, prediction_length: int, known_covariates_names: Optional[List[str]] = None
904
- ) -> Tuple[TimeSeriesDataFrame, Optional[TimeSeriesDataFrame]]:
903
+ self, prediction_length: int, known_covariates_names: Optional[list[str]] = None
904
+ ) -> tuple[TimeSeriesDataFrame, Optional[TimeSeriesDataFrame]]:
905
905
  """Prepare model inputs necessary to predict the last ``prediction_length`` time steps of each time series in the dataset.
906
906
 
907
907
  Parameters
908
908
  ----------
909
909
  prediction_length : int
910
910
  The forecast horizon, i.e., How many time steps into the future must be predicted.
911
- known_covariates_names : List[str], optional
911
+ known_covariates_names : list[str], optional
912
912
  Names of the dataframe columns that contain covariates known in the future.
913
913
  See ``known_covariates_names`` of :class:`~autogluon.timeseries.TimeSeriesPredictor` for more details.
914
914
 
@@ -933,7 +933,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
933
933
  prediction_length: int,
934
934
  end_index: Optional[int] = None,
935
935
  suffix: Optional[str] = None,
936
- ) -> Tuple[TimeSeriesDataFrame, TimeSeriesDataFrame]:
936
+ ) -> tuple[TimeSeriesDataFrame, TimeSeriesDataFrame]:
937
937
  """Generate a train/test split from the given dataset.
938
938
 
939
939
  This method can be used to generate splits for multi-window backtesting.
@@ -1083,7 +1083,7 @@ class TimeSeriesDataFrame(pd.DataFrame):
1083
1083
  iterable = iter(iterable)
1084
1084
  return iter(lambda: tuple(islice(iterable, size)), ())
1085
1085
 
1086
- def resample_chunk(chunk: Iterable[Tuple[str, pd.DataFrame]]) -> pd.DataFrame:
1086
+ def resample_chunk(chunk: Iterable[tuple[str, pd.DataFrame]]) -> pd.DataFrame:
1087
1087
  resampled_dfs = []
1088
1088
  for item_id, df in chunk:
1089
1089
  resampled_df = df.resample(offset, level=TIMESTAMP, **kwargs).agg(aggregation)
@@ -1124,8 +1124,21 @@ class TimeSeriesDataFrame(pd.DataFrame):
1124
1124
 
1125
1125
  @overload
1126
1126
  def __new__(cls, data: pd.DataFrame, static_features: Optional[pd.DataFrame] = None) -> Self: ... # type: ignore
1127
+ @overload
1128
+ def __new__(
1129
+ cls,
1130
+ data: Union[pd.DataFrame, str, Path, Iterable],
1131
+ static_features: Optional[Union[pd.DataFrame, str, Path]] = None,
1132
+ id_column: Optional[str] = None,
1133
+ timestamp_column: Optional[str] = None,
1134
+ num_cpus: int = -1,
1135
+ *args,
1136
+ **kwargs,
1137
+ ) -> Self:
1138
+ """This overload is needed since in pandas, during type checking, the default constructor resolves to __new__"""
1139
+ ...
1127
1140
 
1128
1141
  @overload
1129
- def __getitem__(self, items: List[str]) -> Self: ... # type: ignore
1142
+ def __getitem__(self, items: list[str]) -> Self: ... # type: ignore
1130
1143
  @overload
1131
1144
  def __getitem__(self, item: str) -> pd.Series: ... # type: ignore
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  import reprlib
3
3
  import time
4
- from typing import Any, Dict, List, Literal, Optional, Type, Union
4
+ from typing import Any, Literal, Optional, Type, Union
5
5
 
6
6
  import pandas as pd
7
7
 
@@ -26,7 +26,7 @@ class TimeSeriesLearner(AbstractLearner):
26
26
  self,
27
27
  path_context: str,
28
28
  target: str = "target",
29
- known_covariates_names: Optional[List[str]] = None,
29
+ known_covariates_names: Optional[list[str]] = None,
30
30
  trainer_type: Type[TimeSeriesTrainer] = TimeSeriesTrainer,
31
31
  eval_metric: Union[str, TimeSeriesScorer, None] = None,
32
32
  prediction_length: int = 1,
@@ -56,7 +56,7 @@ class TimeSeriesLearner(AbstractLearner):
56
56
  def fit(
57
57
  self,
58
58
  train_data: TimeSeriesDataFrame,
59
- hyperparameters: Union[str, Dict],
59
+ hyperparameters: Union[str, dict],
60
60
  val_data: Optional[TimeSeriesDataFrame] = None,
61
61
  hyperparameter_tune_kwargs: Optional[Union[str, dict]] = None,
62
62
  time_limit: Optional[float] = None,
@@ -194,9 +194,9 @@ class TimeSeriesLearner(AbstractLearner):
194
194
  self,
195
195
  data: TimeSeriesDataFrame,
196
196
  model: Optional[str] = None,
197
- metrics: Optional[Union[str, TimeSeriesScorer, List[Union[str, TimeSeriesScorer]]]] = None,
197
+ metrics: Optional[Union[str, TimeSeriesScorer, list[Union[str, TimeSeriesScorer]]]] = None,
198
198
  use_cache: bool = True,
199
- ) -> Dict[str, float]:
199
+ ) -> dict[str, float]:
200
200
  data = self.feature_generator.transform(data)
201
201
  return self.load_trainer().evaluate(data=data, model=model, metrics=metrics, use_cache=use_cache)
202
202
 
@@ -205,7 +205,7 @@ class TimeSeriesLearner(AbstractLearner):
205
205
  data: Optional[TimeSeriesDataFrame] = None,
206
206
  model: Optional[str] = None,
207
207
  metric: Optional[Union[str, TimeSeriesScorer]] = None,
208
- features: Optional[List[str]] = None,
208
+ features: Optional[list[str]] = None,
209
209
  time_limit: Optional[float] = None,
210
210
  method: Literal["naive", "permutation"] = "permutation",
211
211
  subsample_size: int = 50,
@@ -273,7 +273,7 @@ class TimeSeriesLearner(AbstractLearner):
273
273
  self,
274
274
  data: Optional[TimeSeriesDataFrame] = None,
275
275
  extra_info: bool = False,
276
- extra_metrics: Optional[List[Union[str, TimeSeriesScorer]]] = None,
276
+ extra_metrics: Optional[list[Union[str, TimeSeriesScorer]]] = None,
277
277
  use_cache: bool = True,
278
278
  ) -> pd.DataFrame:
279
279
  if data is not None:
@@ -282,7 +282,7 @@ class TimeSeriesLearner(AbstractLearner):
282
282
  data, extra_info=extra_info, extra_metrics=extra_metrics, use_cache=use_cache
283
283
  )
284
284
 
285
- def get_info(self, include_model_info: bool = False, **kwargs) -> Dict[str, Any]:
285
+ def get_info(self, include_model_info: bool = False, **kwargs) -> dict[str, Any]:
286
286
  learner_info = super().get_info(include_model_info=include_model_info)
287
287
  trainer = self.load_trainer()
288
288
  trainer_info = trainer.get_info(include_model_info=include_model_info)
@@ -300,31 +300,31 @@ class TimeSeriesLearner(AbstractLearner):
300
300
  return learner_info
301
301
 
302
302
  def persist_trainer(
303
- self, models: Union[Literal["all", "best"], List[str]] = "all", with_ancestors: bool = False
304
- ) -> List[str]:
303
+ self, models: Union[Literal["all", "best"], list[str]] = "all", with_ancestors: bool = False
304
+ ) -> list[str]:
305
305
  """Loads models and trainer in memory so that they don't have to be
306
306
  loaded during predictions
307
307
 
308
308
  Returns
309
309
  -------
310
- list_of_models : List[str]
310
+ list_of_models
311
311
  List of models persisted in memory
312
312
  """
313
313
  self.trainer = self.load_trainer()
314
314
  return self.trainer.persist(models, with_ancestors=with_ancestors)
315
315
 
316
- def unpersist_trainer(self) -> List[str]:
316
+ def unpersist_trainer(self) -> list[str]:
317
317
  """Unloads models and trainer from memory. Models will have to be reloaded from disk
318
318
  when predicting.
319
319
 
320
320
  Returns
321
321
  -------
322
- list_of_models : List[str]
322
+ list_of_models
323
323
  List of models removed from memory
324
324
  """
325
325
  unpersisted_models = self.load_trainer().unpersist()
326
326
  self.trainer = None # type: ignore
327
327
  return unpersisted_models
328
328
 
329
- def refit_full(self, model: str = "all") -> Dict[str, str]:
329
+ def refit_full(self, model: str = "all") -> dict[str, str]:
330
330
  return self.load_trainer().refit_full(model=model)
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from pprint import pformat
4
- from typing import Any, Dict, Optional, Sequence, Type, Union
4
+ from typing import Any, Optional, Sequence, Type, Union
5
5
 
6
6
  import numpy as np
7
7
 
@@ -28,7 +28,7 @@ __all__ = [
28
28
 
29
29
  DEFAULT_METRIC_NAME = "WQL"
30
30
 
31
- AVAILABLE_METRICS: Dict[str, Type[TimeSeriesScorer]] = {
31
+ AVAILABLE_METRICS: dict[str, Type[TimeSeriesScorer]] = {
32
32
  "MASE": MASE,
33
33
  "MAPE": MAPE,
34
34
  "SMAPE": SMAPE,
@@ -48,7 +48,7 @@ DEPRECATED_METRICS = {
48
48
  }
49
49
 
50
50
  # Experimental metrics that are not yet user facing
51
- EXPERIMENTAL_METRICS: Dict[str, Type[TimeSeriesScorer]] = {
51
+ EXPERIMENTAL_METRICS: dict[str, Type[TimeSeriesScorer]] = {
52
52
  "WCD": WCD,
53
53
  }
54
54
 
@@ -63,7 +63,7 @@ def check_get_evaluation_metric(
63
63
 
64
64
  Returns
65
65
  -------
66
- scorer :
66
+ scorer
67
67
  A `TimeSeriesScorer` object based on the provided `eval_metric`.
68
68
 
69
69
  `scorer.prediction_length` is always set to the `prediction_length` provided to this method.
@@ -75,7 +75,7 @@ def check_get_evaluation_metric(
75
75
  value of `horizon_weight` is kept.
76
76
  """
77
77
  scorer: TimeSeriesScorer
78
- metric_kwargs: Dict[str, Any] = dict(
78
+ metric_kwargs: dict[str, Any] = dict(
79
79
  prediction_length=prediction_length, seasonal_period=seasonal_period, horizon_weight=horizon_weight
80
80
  )
81
81
  if isinstance(eval_metric, TimeSeriesScorer):
@@ -1,8 +1,7 @@
1
1
  import warnings
2
- from typing import Optional, Sequence, Tuple, Union, overload
2
+ from typing import Optional, Sequence, Union, overload
3
3
 
4
4
  import numpy as np
5
- import numpy.typing as npt
6
5
  import pandas as pd
7
6
 
8
7
  from autogluon.timeseries import TimeSeriesDataFrame
@@ -200,14 +199,14 @@ class TimeSeriesScorer:
200
199
  @staticmethod
201
200
  def _get_point_forecast_score_inputs(
202
201
  data_future: TimeSeriesDataFrame, predictions: TimeSeriesDataFrame, target: str = "target"
203
- ) -> Tuple[pd.Series, pd.Series]:
202
+ ) -> tuple[pd.Series, pd.Series]:
204
203
  """Get inputs necessary to compute point forecast metrics.
205
204
 
206
205
  Returns
207
206
  -------
208
- y_true : pd.Series, shape [num_items * prediction_length]
207
+ y_true
209
208
  Target time series values during the forecast horizon.
210
- y_pred : pd.Series, shape [num_items * prediction_length]
209
+ y_pred
211
210
  Predicted time series values during the forecast horizon.
212
211
  """
213
212
  y_true = data_future[target]
@@ -217,16 +216,16 @@ class TimeSeriesScorer:
217
216
  @staticmethod
218
217
  def _get_quantile_forecast_score_inputs(
219
218
  data_future: TimeSeriesDataFrame, predictions: TimeSeriesDataFrame, target: str = "target"
220
- ) -> Tuple[pd.Series, pd.DataFrame, np.ndarray]:
219
+ ) -> tuple[pd.Series, pd.DataFrame, np.ndarray]:
221
220
  """Get inputs necessary to compute quantile forecast metrics.
222
221
 
223
222
  Returns
224
223
  -------
225
- y_true : pd.Series, shape [num_items * prediction_length]
224
+ y_true
226
225
  Target time series values during the forecast horizon.
227
- q_pred : pd.DataFrame, shape [num_items * prediction_length, num_quantiles]
226
+ q_pred
228
227
  Quantile forecast for each predicted quantile level. Column order corresponds to ``quantile_levels``.
229
- quantile_levels : np.ndarray, shape [num_quantiles]
228
+ quantile_levels
230
229
  Quantile levels for which the forecasts are generated (as floats).
231
230
  """
232
231
  quantile_columns = [col for col in predictions.columns if col != "mean"]
@@ -242,18 +241,18 @@ class TimeSeriesScorer:
242
241
  @staticmethod
243
242
  def check_get_horizon_weight(
244
243
  horizon_weight: Union[Sequence[float], np.ndarray], prediction_length: int
245
- ) -> npt.NDArray[np.float64]: ...
244
+ ) -> np.ndarray: ...
246
245
 
247
246
  @staticmethod
248
247
  def check_get_horizon_weight(
249
248
  horizon_weight: Union[Sequence[float], np.ndarray, None], prediction_length: int
250
- ) -> Optional[npt.NDArray[np.float64]]:
249
+ ) -> Optional[np.ndarray]:
251
250
  """Convert horizon_weight to a non-negative numpy array that sums up to prediction_length.
252
251
  Raises an exception if horizon_weight has an invalid shape or contains invalid values.
253
252
 
254
253
  Returns
255
254
  -------
256
- horizon_weight:
255
+ horizon_weight
257
256
  None if the input is None, otherwise a numpy array of shape [1, prediction_length].
258
257
  """
259
258
  if horizon_weight is None:
@@ -27,6 +27,7 @@ from .local import (
27
27
  ThetaModel,
28
28
  ZeroModel,
29
29
  )
30
+ from .registry import ModelRegistry
30
31
 
31
32
  __all__ = [
32
33
  "ADIDAModel",
@@ -43,6 +44,7 @@ __all__ = [
43
44
  "ETSModel",
44
45
  "IMAPAModel",
45
46
  "ChronosModel",
47
+ "ModelRegistry",
46
48
  "NPTSModel",
47
49
  "NaiveModel",
48
50
  "PatchTSTModel",