autogluon.timeseries 1.1.0b20240414__py3-none-any.whl → 1.1.0b20240416__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of autogluon.timeseries might be problematic. Click here for more details.

@@ -33,13 +33,13 @@ TIMESERIES_PRESETS_CONFIGS = dict(
33
33
  chronos_ensemble={
34
34
  "hyperparameters": {
35
35
  "Chronos": {"model_path": "small"},
36
- **get_default_hps("default"),
36
+ **get_default_hps("light_inference"),
37
37
  }
38
38
  },
39
39
  chronos_large_ensemble={
40
40
  "hyperparameters": {
41
41
  "Chronos": {"model_path": "large", "batch_size": 8},
42
- **get_default_hps("default"),
42
+ **get_default_hps("light_inference"),
43
43
  }
44
44
  },
45
45
  )
@@ -257,6 +257,10 @@ class AbstractMLForecastModel(AbstractTimeSeriesModel):
257
257
  if not df[col].isin([0, 1]).all():
258
258
  df[f"__scaled_{col}"] = df[col] / df[col].abs().groupby(df[ITEMID]).mean().reindex(df[ITEMID]).values
259
259
 
260
+ # Convert float64 to float32 to reduce memory usage
261
+ float64_cols = list(df.select_dtypes(include="float64"))
262
+ df[float64_cols] = df[float64_cols].astype("float32")
263
+
260
264
  # We assume that df is sorted by 'unique_id' inside `TimeSeriesPredictor._check_and_prepare_data_frame`
261
265
  return df.rename(columns=column_name_mapping)
262
266
 
@@ -230,8 +230,6 @@ def seasonal_naive_forecast(
230
230
  return arr[np.maximum.accumulate(idx)]
231
231
 
232
232
  forecast = {}
233
- # Convert to float64 since std computation can be unstable in float32
234
- target = target.astype(np.float64)
235
233
  # At least seasonal_period + 2 values are required to compute sigma for seasonal naive
236
234
  if len(target) > seasonal_period + 1 and seasonal_period > 1:
237
235
  if np.isnan(target[-(seasonal_period + 2) :]).any():
@@ -79,21 +79,23 @@ DEFAULT_MODEL_PRIORITY = dict(
79
79
  Average=100,
80
80
  SeasonalAverage=100,
81
81
  Zero=100,
82
- NPTS=90,
83
- ETS=90,
84
- CrostonSBA=90,
85
- Theta=80,
86
- DynamicOptimizedTheta=80,
87
- AutoETS=80,
88
- AutoARIMA=70,
89
- RecursiveTabular=60,
90
- Chronos=50,
91
- DirectTabular=50,
82
+ RecursiveTabular=90,
83
+ DirectTabular=85,
84
+ # All local models are grouped together to make sure that joblib parallel pool is reused
85
+ NPTS=80,
86
+ ETS=80,
87
+ CrostonSBA=80,
88
+ Theta=75,
89
+ DynamicOptimizedTheta=75,
90
+ AutoETS=70,
91
+ AutoARIMA=60,
92
+ Chronos=55,
93
+ # Models that can early stop are trained at the end
94
+ TemporalFusionTransformer=45,
92
95
  DeepAR=40,
93
- TemporalFusionTransformer=30,
94
- WaveNet=25,
95
- PatchTST=20,
96
+ PatchTST=30,
96
97
  # Models below are not included in any presets
98
+ WaveNet=25,
97
99
  AutoCES=10,
98
100
  ARIMA=10,
99
101
  ADIDA=10,
@@ -128,6 +130,13 @@ def get_default_hps(key):
128
130
  "DirectTabular": {},
129
131
  "TemporalFusionTransformer": {},
130
132
  },
133
+ "light_inference": {
134
+ "SeasonalNaive": {},
135
+ "DirectTabular": {},
136
+ "RecursiveTabular": {},
137
+ "TemporalFusionTransformer": {},
138
+ "PatchTST": {},
139
+ },
131
140
  "default": {
132
141
  "SeasonalNaive": {},
133
142
  "CrostonSBA": {},
@@ -143,6 +152,7 @@ def get_default_hps(key):
143
152
  "TemporalFusionTransformer": {},
144
153
  "PatchTST": {},
145
154
  "DeepAR": {},
155
+ "Chronos": {"model_path": "base"},
146
156
  },
147
157
  }
148
158
  return default_model_hps[key]
@@ -293,7 +293,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
293
293
  Preprocessed data in TimeSeriesDataFrame format.
294
294
  """
295
295
  df = self._to_data_frame(data, name=name)
296
- df = df.astype({self.target: "float32"})
296
+ df = df.astype({self.target: "float64"})
297
297
  # MultiIndex.is_monotonic_increasing checks if index is sorted by ["item_id", "timestamp"]
298
298
  if not df.index.is_monotonic_increasing:
299
299
  df = df.sort_index()
@@ -499,7 +499,7 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
499
499
  with reasonable training time.
500
500
  - ``"high_quality"``: All ML models available in AutoGluon + additional statistical models (``NPTS``, ``AutoETS``, ``AutoARIMA``, ``CrostonSBA``,
501
501
  ``DynamicOptimizedTheta``). Much more accurate than ``medium_quality``, but takes longer to train.
502
- - ``"best_quality"``: Same models as in ``"high_quality"`, but performs validation with multiple backtests. Usually better than ``high_quality``, but takes even longer to train.
502
+ - ``"best_quality"``: Same models as in ``"high_quality"``, but performs validation with multiple backtests. Usually better than ``high_quality``, but takes even longer to train.
503
503
 
504
504
  Available presets with the `Chronos <https://github.com/amazon-science/chronos-forecasting>`_ model:
505
505
 
@@ -507,8 +507,10 @@ class TimeSeriesPredictor(TimeSeriesPredictorDeprecatedMixin):
507
507
  See the documentation for ``ChronosModel`` or see `Hugging Face <https://huggingface.co/collections/amazon/chronos-models-65f1791d630a8d57cb718444>`_ for more information.
508
508
  Note that a GPU is required for model sizes ``small``, ``base`` and ``large``.
509
509
  - ``"chronos"``: alias for ``"chronos_small"``.
510
- - ``"chronos_ensemble"``: builds an ensemble of the models specified in ``"high_quality"`` and ``"chronos_small"``.
511
- - ``"chronos_large_ensemble"``: builds an ensemble of the models specified in ``"high_quality"`` and ``"chronos_large"``.
510
+ - ``"chronos_ensemble"``: builds an ensemble of seasonal naive, tree-based and deep learning models with fast inference
511
+ and ``"chronos_small"``.
512
+ - ``"chronos_large_ensemble"``: builds an ensemble of seasonal naive, tree-based and deep learning models
513
+ with fast inference and ``"chronos_large"``.
512
514
 
513
515
  Details for these presets can be found in ``autogluon/timeseries/configs/presets_configs.py``. If not
514
516
  provided, user-provided values for ``hyperparameters`` and ``hyperparameter_tune_kwargs`` will be used
@@ -613,7 +613,7 @@ class AbstractTimeSeriesTrainer(SimpleAbstractTrainer):
613
613
  break
614
614
 
615
615
  if random_seed is not None:
616
- seed_everything(random_seed)
616
+ seed_everything(random_seed + i)
617
617
 
618
618
  if contains_searchspace(model.get_user_params()):
619
619
  fit_log_message = f"Hyperparameter tuning model {model.name}. "
@@ -73,7 +73,7 @@ class ContinuousAndCategoricalFeatureGenerator(PipelineFeatureGenerator):
73
73
  Imputes missing categorical features with the most frequent value in the training set.
74
74
  """
75
75
 
76
- def __init__(self, verbosity: int = 0, minimum_cat_count=2, float_dtype: str = "float32", **kwargs):
76
+ def __init__(self, verbosity: int = 0, minimum_cat_count=2, float_dtype: str = "float64", **kwargs):
77
77
  generators = [
78
78
  CategoryFeatureGenerator(minimum_cat_count=minimum_cat_count, fillna="mode"),
79
79
  IdentityFeatureGenerator(infer_features_in_args={"valid_raw_types": [R_INT, R_FLOAT]}),
@@ -111,7 +111,7 @@ class ContinuousAndCategoricalFeatureGenerator(PipelineFeatureGenerator):
111
111
  class TimeSeriesFeatureGenerator:
112
112
  """Takes care of preprocessing for static_features and past/known covariates.
113
113
 
114
- All covariates & static features are converted into either float32 or categorical dtype.
114
+ All covariates & static features are converted into either float64 or categorical dtype.
115
115
 
116
116
  Missing values in the target column are left as-is but missing values in static features & covariates are imputed.
117
117
  Imputation logic is as follows:
@@ -121,16 +121,18 @@ class TimeSeriesFeatureGenerator:
121
121
  covariate values are missing, we fill them with the median of the training set.
122
122
  """
123
123
 
124
- def __init__(self, target: str, known_covariates_names: List[str], float_dtype: str = "float32"):
124
+ def __init__(self, target: str, known_covariates_names: List[str], float_dtype: str = "float64"):
125
125
  self.target = target
126
126
  self.float_dtype = float_dtype
127
127
  self._is_fit = False
128
128
  self.known_covariates_names = list(known_covariates_names)
129
129
  self.past_covariates_names = []
130
- self.known_covariates_pipeline = ContinuousAndCategoricalFeatureGenerator()
131
- self.past_covariates_pipeline = ContinuousAndCategoricalFeatureGenerator()
130
+ self.known_covariates_pipeline = ContinuousAndCategoricalFeatureGenerator(float_dtype=float_dtype)
131
+ self.past_covariates_pipeline = ContinuousAndCategoricalFeatureGenerator(float_dtype=float_dtype)
132
132
  # Cat features with cat_count=1 are fine in static_features since they are repeated for all time steps in a TS
133
- self.static_feature_pipeline = ContinuousAndCategoricalFeatureGenerator(minimum_cat_count=1)
133
+ self.static_feature_pipeline = ContinuousAndCategoricalFeatureGenerator(
134
+ minimum_cat_count=1, float_dtype=float_dtype
135
+ )
134
136
  self.covariate_metadata: CovariateMetadata = None
135
137
  self._train_covariates_real_median: Optional[pd.Series] = None
136
138
  self._train_static_real_median: Optional[pd.Series] = None
@@ -1,3 +1,3 @@
1
1
  """This is the autogluon version file."""
2
- __version__ = '1.1.0b20240414'
2
+ __version__ = '1.1.0b20240416'
3
3
  __lite__ = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: autogluon.timeseries
3
- Version: 1.1.0b20240414
3
+ Version: 1.1.0b20240416
4
4
  Summary: Fast and Accurate ML in 3 Lines of Code
5
5
  Home-page: https://github.com/autogluon/autogluon
6
6
  Author: AutoGluon Community
@@ -51,9 +51,9 @@ Requires-Dist: utilsforecast <0.0.11,>=0.0.10
51
51
  Requires-Dist: tqdm <5,>=4.38
52
52
  Requires-Dist: orjson ~=3.9
53
53
  Requires-Dist: tensorboard <3,>=2.9
54
- Requires-Dist: autogluon.core[raytune] ==1.1.0b20240414
55
- Requires-Dist: autogluon.common ==1.1.0b20240414
56
- Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost] ==1.1.0b20240414
54
+ Requires-Dist: autogluon.core[raytune] ==1.1.0b20240416
55
+ Requires-Dist: autogluon.common ==1.1.0b20240416
56
+ Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost] ==1.1.0b20240416
57
57
  Provides-Extra: all
58
58
  Requires-Dist: optimum[onnxruntime] <1.19,>=1.17 ; extra == 'all'
59
59
  Provides-Extra: chronos-onnx
@@ -1,12 +1,12 @@
1
- autogluon.timeseries-1.1.0b20240414-py3.8-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
1
+ autogluon.timeseries-1.1.0b20240416-py3.8-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
2
2
  autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
3
3
  autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
4
4
  autogluon/timeseries/learner.py,sha256=IYXpJSDyTzjZXjKL_SrTujt5Uke83mSJFA0sMj25_sM,13828
5
- autogluon/timeseries/predictor.py,sha256=9PGGTbmYfkl3JQ2n-9T0Ucyfdiyq6gX9WhMDZ1cM-9U,81823
5
+ autogluon/timeseries/predictor.py,sha256=ZwJ_ux4NgsVcUvO-t0hg5ftNsZt8BB8T0rnpkKC1Yv4,81910
6
6
  autogluon/timeseries/splitter.py,sha256=eghGwAAN2_cxGk5aJBILgjGWtLzjxJcytMy49gg_q18,3061
7
- autogluon/timeseries/version.py,sha256=Poy70LftZj-bttK37JGpP5-9yPZZyFx8O_OxUewqYXk,90
7
+ autogluon/timeseries/version.py,sha256=_IS4Ff7CNcJmTmqz6Q5eS4OFe9t-HGpvb--94UOdlPs,90
8
8
  autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
9
- autogluon/timeseries/configs/presets_configs.py,sha256=ZVV8BsnGnnHPgjBtJBqF-H35MYUdzRBQ8FP7zA3_11g,1949
9
+ autogluon/timeseries/configs/presets_configs.py,sha256=94-yL9teDHKs2irWjP3kpewI7FE1ChYCgEgz9XHJ6gc,1965
10
10
  autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
11
11
  autogluon/timeseries/dataset/ts_dataframe.py,sha256=QgCwzRx10CRzeWzQREX_zUDAl77-lkdNjM0bT3kK8NU,45595
12
12
  autogluon/timeseries/metrics/__init__.py,sha256=KzgXNj5or7RB_uadjgC8p5gxyV26zjj2hT58OmvnfmA,1875
@@ -15,12 +15,12 @@ autogluon/timeseries/metrics/point.py,sha256=xy8sKrBbuxZ7yTW21TDPayKnEj2FBj1AEse
15
15
  autogluon/timeseries/metrics/quantile.py,sha256=owMbOAJYwVyzdRkrJpuCGUXk937GU843QndCZyp5n9Y,3967
16
16
  autogluon/timeseries/metrics/utils.py,sha256=eJ63TCR-UwbeJ1c2Qm7B2q-8B3sFthPgiooEccrf2Kc,912
17
17
  autogluon/timeseries/models/__init__.py,sha256=HFjDOYKQWaGlgQWiLlOvfwE2dH0uDmeKJFC8GDL987c,1271
18
- autogluon/timeseries/models/presets.py,sha256=p36ROcuOnixgGsI1zBdr9VM-MH2pKCiJCS2Ofb4xT8o,11243
18
+ autogluon/timeseries/models/presets.py,sha256=W9H61MefSbxE-woh3ysgzTpc8eML40aa2l8q-AALBo0,11654
19
19
  autogluon/timeseries/models/abstract/__init__.py,sha256=wvDsQAZIV0N3AwBeMaGItoQ82trEfnT-nol2AAOIxBg,102
20
20
  autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=MvLF529b3fo0icgle-qmS0oce-ftiiQ1jPBLnY-39fk,23435
21
21
  autogluon/timeseries/models/abstract/model_trial.py,sha256=ENPg_7nsdxIvaNM0o0UShZ3x8jFlRmwRc5m0fGPC0TM,3720
22
22
  autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=r9i6jWcyeLHYClkcMSKRVsfrkBUMxpDrTATNTBc_qgQ,136
23
- autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=v-0-kTlWSm2BCRT60JvroKtoQefSDEsvtubqlRq3pmQ,31303
23
+ autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=4WbvCgfUCKbg7J5OJisSQK4LMiz8PtTnxU7nkGosmGY,31491
24
24
  autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=4-gTrBtizxeMVQlsuscugPqw9unaXWXhS1TVVssfzYY,2125
25
25
  autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
26
26
  autogluon/timeseries/models/chronos/model.py,sha256=0ZxOpGyx7MmXYDr9zeDt6-rIu50Bm7ssR9zTIvd6vmQ,14659
@@ -34,17 +34,17 @@ autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=X4KChuSVSoxLOcrto
34
34
  autogluon/timeseries/models/gluonts/torch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
35
  autogluon/timeseries/models/gluonts/torch/models.py,sha256=Eo_AI5bWpHx3_05lnates4tnessBrUrVkUAyGoAb0zk,19780
36
36
  autogluon/timeseries/models/local/__init__.py,sha256=JyckWWgMG1BTIWJqFTW6e1O-eb0LPPOwtXwmb1ErohQ,756
37
- autogluon/timeseries/models/local/abstract_local_model.py,sha256=5wvwt7d99kw-PTDnuT45uoCeXk6POjUArCAwUj8mSok,11836
37
+ autogluon/timeseries/models/local/abstract_local_model.py,sha256=JfjDXOSBgD_10JrIq5nWS038-4moRNI0001BLta8nRs,11723
38
38
  autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F_VAp6-jb_5SUE,7249
39
39
  autogluon/timeseries/models/local/npts.py,sha256=Bp74doKnfpGE8ywP4FWOCI_RwRMsmgocYDfGtq764DA,4143
40
40
  autogluon/timeseries/models/local/statsforecast.py,sha256=oDYKKM2LZXEQLhPLEgZZWhvSEC1iE1wBexpl8P-Cxwc,32991
41
41
  autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
42
42
  autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=HiujLv8MJ31fWxRM5iXG2PzobFn4Mus0nJPu0MP2Rw4,11374
43
43
  autogluon/timeseries/trainer/__init__.py,sha256=lxiOT-Gc6BEnr_yWQqra85kEngeM_wtH2SCaRbmC_qE,170
44
- autogluon/timeseries/trainer/abstract_trainer.py,sha256=2nPLskmbOGRzkj6ttX0tHVkj9h2Y72MHaZy7L78MBZQ,59100
44
+ autogluon/timeseries/trainer/abstract_trainer.py,sha256=HUyX6xz1R_MoXtZQ2Hci9gEZ7IGdjTD7d6zbJ7UW9Qg,59104
45
45
  autogluon/timeseries/trainer/auto_trainer.py,sha256=psJFZBwWWPlLjNwAgvO4OUJXsRW1sTN2YS9a4pdoeoE,3344
46
46
  autogluon/timeseries/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- autogluon/timeseries/utils/features.py,sha256=kG1xin3VCjtLwfaDkxlGKaesS7Ah-hIvsUNdwToLxYY,19492
47
+ autogluon/timeseries/utils/features.py,sha256=hEir-2lU8fvHjt5r_LG9tLZEk5wNdRdeLRE7qF5z3_Y,19585
48
48
  autogluon/timeseries/utils/forecast.py,sha256=p0WKM9Q0nLAwwmCgYZI1zi9mCOWXWJfllEt2lPRQl4M,1882
49
49
  autogluon/timeseries/utils/warning_filters.py,sha256=HMXNDo9jOUdf9wvyp-Db55xTq_Ctj6uso7qPhngoJPQ,1964
50
50
  autogluon/timeseries/utils/datetime/__init__.py,sha256=bTMR8jLh1LW55vHjbOr1zvWRMF_PqbvxpS-cUcNIDWI,173
@@ -52,11 +52,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
52
52
  autogluon/timeseries/utils/datetime/lags.py,sha256=GoLtvcZ8oKb3QkoBJ9E59LSPLOP7Qjxrr2UmMSZgjyw,5909
53
53
  autogluon/timeseries/utils/datetime/seasonality.py,sha256=h_4w00iEytAz_N_EpCENQ8RCXy7KQITczrYjBgVqWkQ,764
54
54
  autogluon/timeseries/utils/datetime/time_features.py,sha256=PAXbYbQ0z_5GFbkxSNi41zLY_2-U3x0Ynm1m_WhdtGc,2572
55
- autogluon.timeseries-1.1.0b20240414.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
56
- autogluon.timeseries-1.1.0b20240414.dist-info/METADATA,sha256=WXC5Hg9xK24Vwmt8QBeX8YpwrIwbNAWHayVnb_Kzx6Q,12488
57
- autogluon.timeseries-1.1.0b20240414.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
58
- autogluon.timeseries-1.1.0b20240414.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
59
- autogluon.timeseries-1.1.0b20240414.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
60
- autogluon.timeseries-1.1.0b20240414.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
61
- autogluon.timeseries-1.1.0b20240414.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
62
- autogluon.timeseries-1.1.0b20240414.dist-info/RECORD,,
55
+ autogluon.timeseries-1.1.0b20240416.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
56
+ autogluon.timeseries-1.1.0b20240416.dist-info/METADATA,sha256=G7NbMJu3QDKgPDzo_V8xiqjw7wLxdcDl_OtPPu0Hslk,12488
57
+ autogluon.timeseries-1.1.0b20240416.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
58
+ autogluon.timeseries-1.1.0b20240416.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
59
+ autogluon.timeseries-1.1.0b20240416.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
60
+ autogluon.timeseries-1.1.0b20240416.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
61
+ autogluon.timeseries-1.1.0b20240416.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
62
+ autogluon.timeseries-1.1.0b20240416.dist-info/RECORD,,