autogluon.timeseries 1.2.1b20250218__py3-none-any.whl → 1.2.1b20250220__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,14 +17,6 @@ from autogluon.timeseries.utils.datetime import (
17
17
  # NOTE: We avoid imports for torch and lightning.pytorch at the top level and hide them inside class methods.
18
18
  # This is done to skip these imports during multiprocessing (which may cause bugs)
19
19
 
20
- # FIXME: introduces cpflows dependency. We exclude this model until a future release.
21
- # from gluonts.torch.model.mqf2 import MQF2MultiHorizonEstimator
22
-
23
- # FIXME: DeepNPTS does not implement the GluonTS PyTorch API, and does not use
24
- # PyTorch Lightning. We exclude this model until a future release.
25
- # from gluonts.torch.model.deep_npts import DeepNPTSEstimator
26
-
27
-
28
20
  logger = logging.getLogger(__name__)
29
21
 
30
22
 
@@ -63,8 +55,8 @@ class DeepARModel(AbstractGluonTSModel):
63
55
  (if None, defaults to [min(50, (cat+1)//2) for cat in cardinality])
64
56
  max_cat_cardinality : int, default = 100
65
57
  Maximum number of dimensions to use when one-hot-encoding categorical known_covariates.
66
- distr_output : gluonts.torch.distributions.DistributionOutput, default = StudentTOutput()
67
- Distribution to use to evaluate observations and sample predictions
58
+ distr_output : gluonts.torch.distributions.Output, default = StudentTOutput()
59
+ Distribution output object that defines how the model output is converted to a forecast, and how the loss is computed.
68
60
  scaling: bool, default = True
69
61
  If True, mean absolute scaling will be applied to each *context window* during training & prediction.
70
62
  Note that this is different from the `target_scaler` that is applied to the *entire time series*.
@@ -120,8 +112,8 @@ class SimpleFeedForwardModel(AbstractGluonTSModel):
120
112
  Number of time units that condition the predictions
121
113
  hidden_dimensions: List[int], default = [20, 20]
122
114
  Size of hidden layers in the feedforward network
123
- distr_output : gluonts.torch.distributions.DistributionOutput, default = StudentTOutput()
124
- Distribution to fit.
115
+ distr_output : gluonts.torch.distributions.Output, default = StudentTOutput()
116
+ Distribution output object that defines how the model output is converted to a forecast, and how the loss is computed.
125
117
  batch_normalization : bool, default = False
126
118
  Whether to use batch normalization
127
119
  mean_scaling : bool, default = True
@@ -169,6 +161,8 @@ class TemporalFusionTransformerModel(AbstractGluonTSModel):
169
161
  ----------------
170
162
  context_length : int, default = max(64, 2 * prediction_length)
171
163
  Number of past values used for prediction.
164
+ distr_output : gluonts.torch.distributions.Output, default = QuantileOutput()
165
+ Distribution output object that defines how the model output is converted to a forecast, and how the loss is computed.
172
166
  disable_static_features : bool, default = False
173
167
  If True, static features won't be used by the model even if they are present in the dataset.
174
168
  If False, static features will be used by the model if they are present in the dataset.
@@ -235,6 +229,10 @@ class TemporalFusionTransformerModel(AbstractGluonTSModel):
235
229
  init_kwargs["past_dynamic_cardinalities"] = self.past_feat_dynamic_cat_cardinality
236
230
 
237
231
  init_kwargs.setdefault("time_features", get_time_features_for_frequency(self.freq))
232
+
233
+ # 'distr_output' and 'quantiles' shouldn't be included at the same time (otherwise an exception will be raised)
234
+ if "distr_output" in init_kwargs:
235
+ init_kwargs.pop("quantiles", None)
238
236
  return init_kwargs
239
237
 
240
238
 
@@ -256,8 +254,8 @@ class DLinearModel(AbstractGluonTSModel):
256
254
  Number of time units that condition the predictions
257
255
  hidden_dimension: int, default = 20
258
256
  Size of hidden layers in the feedforward network
259
- distr_output : gluonts.torch.distributions.DistributionOutput, default = StudentTOutput()
260
- Distribution to fit.
257
+ distr_output : gluonts.torch.distributions.Output, default = StudentTOutput()
258
+ Distribution output object that defines how the model output is converted to a forecast, and how the loss is computed.
261
259
  scaling : {"mean", "std", None}, default = "mean"
262
260
  Scaling applied to each *context window* during training & prediction.
263
261
  One of ``"mean"`` (mean absolute scaling), ``"std"`` (standardization), ``None`` (no scaling).
@@ -320,8 +318,8 @@ class PatchTSTModel(AbstractGluonTSModel):
320
318
  Number of attention heads in the Transformer encoder which must divide d_model.
321
319
  num_encoder_layers : int, default = 2
322
320
  Number of layers in the Transformer encoder.
323
- distr_output : gluonts.torch.distributions.DistributionOutput, default = StudentTOutput()
324
- Distribution to fit.
321
+ distr_output : gluonts.torch.distributions.Output, default = StudentTOutput()
322
+ Distribution output object that defines how the model output is converted to a forecast, and how the loss is computed.
325
323
  scaling : {"mean", "std", None}, default = "mean"
326
324
  Scaling applied to each *context window* during training & prediction.
327
325
  One of ``"mean"`` (mean absolute scaling), ``"std"`` (standardization), ``None`` (no scaling).
@@ -1,4 +1,4 @@
1
1
  """This is the autogluon version file."""
2
2
 
3
- __version__ = "1.2.1b20250218"
3
+ __version__ = "1.2.1b20250220"
4
4
  __lite__ = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.2.1b20250218
3
+ Version: 1.2.1b20250220
4
4
  Summary: Fast and Accurate ML in 3 Lines of Code
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -55,9 +55,9 @@ Requires-Dist: fugue>=0.9.0
55
55
  Requires-Dist: tqdm<5,>=4.38
56
56
  Requires-Dist: orjson~=3.9
57
57
  Requires-Dist: tensorboard<3,>=2.9
58
- Requires-Dist: autogluon.core[raytune]==1.2.1b20250218
59
- Requires-Dist: autogluon.common==1.2.1b20250218
60
- Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.1b20250218
58
+ Requires-Dist: autogluon.core[raytune]==1.2.1b20250220
59
+ Requires-Dist: autogluon.common==1.2.1b20250220
60
+ Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.1b20250220
61
61
  Provides-Extra: all
62
62
  Provides-Extra: chronos-onnx
63
63
  Requires-Dist: optimum[onnxruntime]<1.20,>=1.17; extra == "chronos-onnx"
@@ -1,4 +1,4 @@
1
- autogluon.timeseries-1.2.1b20250218-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
1
+ autogluon.timeseries-1.2.1b20250220-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
2
2
  autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
3
3
  autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
4
4
  autogluon/timeseries/learner.py,sha256=PDAHFlos6q5JukwRE86tKoH0zxYf3nLzy7qfD_a5NYY,13849
@@ -6,7 +6,7 @@ autogluon/timeseries/predictor.py,sha256=HTE8a_R_9U0z-KlxyoELm-64BXNRzFu3mIEbTab
6
6
  autogluon/timeseries/regressor.py,sha256=dIXttb0SOGS8IAwZOMANNDc796spN0LMysGUvuKgskU,9623
7
7
  autogluon/timeseries/splitter.py,sha256=yzPca9p2bWV-_VJAptUyyzQsxu-uixAdpMoGQtDzMD4,3205
8
8
  autogluon/timeseries/trainer.py,sha256=L9FT5qERcqlWTgH9IgE6QsO0aBNj2nivRKF2Oy4UJOk,57250
9
- autogluon/timeseries/version.py,sha256=7H9aiBFc3yZ4VAfYTXCxWQ-TNBEjD1nB5tqyO9XpoOs,91
9
+ autogluon/timeseries/version.py,sha256=5_5isKRJ9Kd7emAdEPGzl72mtp4GDkqLCtc4C6IuV6k,91
10
10
  autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
11
11
  autogluon/timeseries/configs/presets_configs.py,sha256=cLat8ecLlWrI-SC5KLBDCX2SbVXaucemy2pjxJAtSY0,2543
12
12
  autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
@@ -38,7 +38,7 @@ autogluon/timeseries/models/ensemble/greedy_ensemble.py,sha256=UPEmNx-RSuqCXS7V0
38
38
  autogluon/timeseries/models/gluonts/__init__.py,sha256=asC1PTj4j9xMbilvk1IT1julnpeoKbv5ZNuAR6-DFgA,361
39
39
  autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=brf2lIMHH4a_AETwyOcOBVPWqWhLxr8iolJ3Z5AR8MA,30621
40
40
  autogluon/timeseries/models/gluonts/torch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
- autogluon/timeseries/models/gluonts/torch/models.py,sha256=zTl5vzwamUuR00KszT7e56w2XxapufydsQMHOasxLyc,25318
41
+ autogluon/timeseries/models/gluonts/torch/models.py,sha256=xlhblFM4DG_GQVj_4dxqWntLh1fPMyRF63eXPs_Z098,25672
42
42
  autogluon/timeseries/models/local/__init__.py,sha256=e2UImoJhmj70E148IIObv90C_bHxgyLNk6YsS4p7pfs,701
43
43
  autogluon/timeseries/models/local/abstract_local_model.py,sha256=CYDvOXs7ZNzyz75gMOAKI1socB_qGep51FSPfzXMopA,11948
44
44
  autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F_VAp6-jb_5SUE,7249
@@ -58,11 +58,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
58
58
  autogluon/timeseries/utils/datetime/lags.py,sha256=gQDk5_zmsY5DUWDUpSaCKYkQ9nHKKY-LsywJQRAoYSk,5988
59
59
  autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
60
60
  autogluon/timeseries/utils/datetime/time_features.py,sha256=MjLi3zQ00uWWJtXH9oGX2GJkTbvjdSiuabSa4kcVuxE,2672
61
- autogluon.timeseries-1.2.1b20250218.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
62
- autogluon.timeseries-1.2.1b20250218.dist-info/METADATA,sha256=6Sn8Fh6lrl8zrz7eB1R0bWnV-PipqjmZLt7h_sGMhOM,12684
63
- autogluon.timeseries-1.2.1b20250218.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
64
- autogluon.timeseries-1.2.1b20250218.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
65
- autogluon.timeseries-1.2.1b20250218.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
66
- autogluon.timeseries-1.2.1b20250218.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
67
- autogluon.timeseries-1.2.1b20250218.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
68
- autogluon.timeseries-1.2.1b20250218.dist-info/RECORD,,
61
+ autogluon.timeseries-1.2.1b20250220.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
62
+ autogluon.timeseries-1.2.1b20250220.dist-info/METADATA,sha256=Y8IV8UJYEepmgmBT_BjQ7Vh4ZLW3yExUzcQdtyVGSdk,12684
63
+ autogluon.timeseries-1.2.1b20250220.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
64
+ autogluon.timeseries-1.2.1b20250220.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
65
+ autogluon.timeseries-1.2.1b20250220.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
66
+ autogluon.timeseries-1.2.1b20250220.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
67
+ autogluon.timeseries-1.2.1b20250220.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
68
+ autogluon.timeseries-1.2.1b20250220.dist-info/RECORD,,