autogluon.timeseries 1.2.1b20250204__py3-none-any.whl → 1.2.1b20250206__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autogluon/timeseries/models/chronos/model.py +55 -42
- autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/METADATA +4 -4
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/RECORD +11 -11
- /autogluon.timeseries-1.2.1b20250204-py3.9-nspkg.pth → /autogluon.timeseries-1.2.1b20250206-py3.9-nspkg.pth +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/LICENSE +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/NOTICE +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/WHEEL +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.2.1b20250204.dist-info → autogluon.timeseries-1.2.1b20250206.dist-info}/zip-safe +0 -0
@@ -9,6 +9,7 @@ import numpy as np
|
|
9
9
|
import pandas as pd
|
10
10
|
|
11
11
|
from autogluon.common.loaders import load_pkl
|
12
|
+
from autogluon.common.space import Space
|
12
13
|
from autogluon.timeseries.dataset.ts_dataframe import TimeSeriesDataFrame
|
13
14
|
from autogluon.timeseries.models.abstract import AbstractTimeSeriesModel
|
14
15
|
from autogluon.timeseries.utils.warning_filters import disable_duplicate_logs, warning_filter
|
@@ -192,39 +193,11 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
192
193
|
model_path_input = hyperparameters.get("model_path", self.default_model_path)
|
193
194
|
self.model_path = MODEL_ALIASES.get(model_path_input, model_path_input)
|
194
195
|
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
# if the model requires a GPU, set the torch dtype to bfloat16
|
201
|
-
self.torch_dtype = hyperparameters.get("torch_dtype", self.default_torch_dtype)
|
202
|
-
|
203
|
-
self.data_loader_num_workers = hyperparameters.get("data_loader_num_workers", 0)
|
204
|
-
self.optimization_strategy: Optional[Literal["onnx", "openvino"]] = hyperparameters.get(
|
205
|
-
"optimization_strategy", None
|
206
|
-
)
|
207
|
-
if self.optimization_strategy is not None:
|
208
|
-
warnings.warn(
|
209
|
-
(
|
210
|
-
"optimization_strategy is deprecated and will be removed in a future release. "
|
211
|
-
"We recommend using Chronos-Bolt models for fast inference on the CPU."
|
212
|
-
),
|
213
|
-
category=FutureWarning,
|
214
|
-
stacklevel=3,
|
215
|
-
)
|
216
|
-
self.context_length = hyperparameters.get("context_length")
|
217
|
-
|
218
|
-
if self.context_length is not None and self.context_length > self.maximum_context_length:
|
219
|
-
logger.info(
|
220
|
-
f"\tContext length {self.context_length} exceeds maximum context length {self.maximum_context_length}."
|
221
|
-
f"Context length will be set to {self.maximum_context_length}."
|
222
|
-
)
|
223
|
-
self.context_length = self.maximum_context_length
|
224
|
-
|
225
|
-
# we truncate the name to avoid long path errors on Windows
|
226
|
-
model_path_safe = str(model_path_input).replace("/", "__").replace(os.path.sep, "__")[-50:]
|
227
|
-
name = (name if name is not None else "Chronos") + f"[{model_path_safe}]"
|
196
|
+
name = name if name is not None else "Chronos"
|
197
|
+
if not isinstance(model_path_input, Space):
|
198
|
+
# we truncate the name to avoid long path errors on Windows
|
199
|
+
model_path_safe = str(model_path_input).replace("/", "__").replace(os.path.sep, "__")[-50:]
|
200
|
+
name += f"[{model_path_safe}]"
|
228
201
|
|
229
202
|
super().__init__(
|
230
203
|
path=path,
|
@@ -332,6 +305,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
332
305
|
self._model_pipeline = pipeline
|
333
306
|
|
334
307
|
def persist(self) -> "ChronosModel":
|
308
|
+
# TODO: Check the model has been fit before persist
|
335
309
|
self.load_model_pipeline()
|
336
310
|
return self
|
337
311
|
|
@@ -344,6 +318,14 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
344
318
|
"""Gets params that are passed to the inner model."""
|
345
319
|
init_args = super()._get_model_params().copy()
|
346
320
|
|
321
|
+
init_args.setdefault("batch_size", self.default_batch_size)
|
322
|
+
init_args.setdefault("num_samples", self.default_num_samples)
|
323
|
+
init_args.setdefault("device", None)
|
324
|
+
# if the model requires a GPU, set the torch dtype to bfloat16
|
325
|
+
init_args.setdefault("torch_dtype", self.default_torch_dtype)
|
326
|
+
init_args.setdefault("data_loader_num_workers", 0)
|
327
|
+
init_args.setdefault("context_length", None)
|
328
|
+
init_args.setdefault("optimization_strategy", None)
|
347
329
|
init_args.setdefault("fine_tune", False)
|
348
330
|
init_args.setdefault("keep_transformers_logs", False)
|
349
331
|
init_args.setdefault("fine_tune_lr", 1e-5)
|
@@ -370,7 +352,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
370
352
|
report_to="none",
|
371
353
|
max_steps=init_args["fine_tune_steps"],
|
372
354
|
gradient_accumulation_steps=1,
|
373
|
-
dataloader_num_workers=
|
355
|
+
dataloader_num_workers=init_args["data_loader_num_workers"],
|
374
356
|
tf32=self._has_tf32(),
|
375
357
|
save_only_model=True,
|
376
358
|
prediction_loss_only=True,
|
@@ -389,6 +371,36 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
389
371
|
|
390
372
|
return init_args
|
391
373
|
|
374
|
+
def _validate_and_assign_attributes(self, model_params: dict):
|
375
|
+
# we validate the params here because their values are concrete,
|
376
|
+
# unlike in the constructor where they may be a search space
|
377
|
+
|
378
|
+
# TODO: automatically determine batch size based on GPU / memory availability
|
379
|
+
self.batch_size = model_params["batch_size"]
|
380
|
+
self.num_samples = model_params["num_samples"]
|
381
|
+
self.device = model_params["device"]
|
382
|
+
self.torch_dtype = model_params["torch_dtype"]
|
383
|
+
self.data_loader_num_workers = model_params["data_loader_num_workers"]
|
384
|
+
self.optimization_strategy: Optional[Literal["onnx", "openvino"]] = model_params["optimization_strategy"]
|
385
|
+
|
386
|
+
if self.optimization_strategy is not None:
|
387
|
+
warnings.warn(
|
388
|
+
(
|
389
|
+
"optimization_strategy is deprecated and will be removed in a future release. "
|
390
|
+
"We recommend using Chronos-Bolt models for fast inference on the CPU."
|
391
|
+
),
|
392
|
+
category=FutureWarning,
|
393
|
+
stacklevel=3,
|
394
|
+
)
|
395
|
+
self.context_length = model_params["context_length"]
|
396
|
+
|
397
|
+
if self.context_length is not None and self.context_length > self.maximum_context_length:
|
398
|
+
logger.info(
|
399
|
+
f"\tContext length {self.context_length} exceeds maximum context length {self.maximum_context_length}."
|
400
|
+
f"Context length will be set to {self.maximum_context_length}."
|
401
|
+
)
|
402
|
+
self.context_length = self.maximum_context_length
|
403
|
+
|
392
404
|
def _fit(
|
393
405
|
self,
|
394
406
|
train_data: TimeSeriesDataFrame,
|
@@ -419,13 +431,14 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
419
431
|
|
420
432
|
self._check_fit_params()
|
421
433
|
|
422
|
-
|
423
|
-
|
434
|
+
model_params = self._get_model_params()
|
435
|
+
self._validate_and_assign_attributes(model_params)
|
436
|
+
do_fine_tune = model_params["fine_tune"]
|
424
437
|
|
425
438
|
if do_fine_tune:
|
426
439
|
assert train_data is not None, "train_data cannot be None when fine_tune=True"
|
427
440
|
|
428
|
-
eval_during_fine_tune = val_data is not None and
|
441
|
+
eval_during_fine_tune = val_data is not None and model_params["eval_during_fine_tune"]
|
429
442
|
|
430
443
|
if do_fine_tune:
|
431
444
|
context_length = self._get_context_length(train_data)
|
@@ -461,7 +474,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
461
474
|
else:
|
462
475
|
raise ValueError(f"Unsupported model pipeline: {type(self.model_pipeline)}")
|
463
476
|
|
464
|
-
fine_tune_trainer_kwargs =
|
477
|
+
fine_tune_trainer_kwargs = model_params["fine_tune_trainer_kwargs"]
|
465
478
|
fine_tune_trainer_kwargs["use_cpu"] = str(self.model_pipeline.inner_model.device) == "cpu"
|
466
479
|
|
467
480
|
if fine_tune_trainer_kwargs["use_cpu"]:
|
@@ -493,7 +506,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
493
506
|
# the original Chronos models otherwise the data is returned in ChronosBolt's format
|
494
507
|
tokenizer=getattr(self.model_pipeline, "tokenizer", None),
|
495
508
|
mode="training",
|
496
|
-
).shuffle(
|
509
|
+
).shuffle(model_params["fine_tune_shuffle_buffer_size"])
|
497
510
|
|
498
511
|
callbacks = []
|
499
512
|
if time_limit is not None:
|
@@ -503,8 +516,8 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
503
516
|
callbacks.append(EvaluateAndSaveFinalStepCallback())
|
504
517
|
# evaluate on a randomly-sampled subset
|
505
518
|
fine_tune_eval_max_items = (
|
506
|
-
min(val_data.num_items,
|
507
|
-
if
|
519
|
+
min(val_data.num_items, model_params["fine_tune_eval_max_items"])
|
520
|
+
if model_params["fine_tune_eval_max_items"] is not None
|
508
521
|
else val_data.num_items
|
509
522
|
)
|
510
523
|
|
@@ -548,7 +561,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
548
561
|
logger.info(f"\tSaving fine-tuned model to {fine_tuned_ckpt_path}")
|
549
562
|
self.model_pipeline.inner_model.save_pretrained(Path(self.path) / self.fine_tuned_ckpt_name)
|
550
563
|
|
551
|
-
if not
|
564
|
+
if not model_params["keep_transformers_logs"]:
|
552
565
|
logger.debug(f"Removing transformers_logs directory {output_dir}")
|
553
566
|
shutil.rmtree(output_dir)
|
554
567
|
|
autogluon/timeseries/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: autogluon.timeseries
|
3
|
-
Version: 1.2.
|
3
|
+
Version: 1.2.1b20250206
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
6
6
|
Author: AutoGluon Community
|
@@ -55,9 +55,9 @@ Requires-Dist: fugue>=0.9.0
|
|
55
55
|
Requires-Dist: tqdm<5,>=4.38
|
56
56
|
Requires-Dist: orjson~=3.9
|
57
57
|
Requires-Dist: tensorboard<3,>=2.9
|
58
|
-
Requires-Dist: autogluon.core[raytune]==1.2.
|
59
|
-
Requires-Dist: autogluon.common==1.2.
|
60
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.
|
58
|
+
Requires-Dist: autogluon.core[raytune]==1.2.1b20250206
|
59
|
+
Requires-Dist: autogluon.common==1.2.1b20250206
|
60
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.1b20250206
|
61
61
|
Provides-Extra: all
|
62
62
|
Provides-Extra: chronos-onnx
|
63
63
|
Requires-Dist: optimum[onnxruntime]<1.20,>=1.17; extra == "chronos-onnx"
|
@@ -1,4 +1,4 @@
|
|
1
|
-
autogluon.timeseries-1.2.
|
1
|
+
autogluon.timeseries-1.2.1b20250206-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
3
3
|
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
4
4
|
autogluon/timeseries/learner.py,sha256=PDAHFlos6q5JukwRE86tKoH0zxYf3nLzy7qfD_a5NYY,13849
|
@@ -6,7 +6,7 @@ autogluon/timeseries/predictor.py,sha256=HTE8a_R_9U0z-KlxyoELm-64BXNRzFu3mIEbTab
|
|
6
6
|
autogluon/timeseries/regressor.py,sha256=dIXttb0SOGS8IAwZOMANNDc796spN0LMysGUvuKgskU,9623
|
7
7
|
autogluon/timeseries/splitter.py,sha256=yzPca9p2bWV-_VJAptUyyzQsxu-uixAdpMoGQtDzMD4,3205
|
8
8
|
autogluon/timeseries/trainer.py,sha256=IAX6zJBpljdWvgm0gjlI9EiOzU3BMSIttNydzy66zHg,57333
|
9
|
-
autogluon/timeseries/version.py,sha256=
|
9
|
+
autogluon/timeseries/version.py,sha256=HW8IYQYcNP5gBk-bKSY_rkK5vHCBYKLhFTjTRVYE_j4,91
|
10
10
|
autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
|
11
11
|
autogluon/timeseries/configs/presets_configs.py,sha256=cLat8ecLlWrI-SC5KLBDCX2SbVXaucemy2pjxJAtSY0,2543
|
12
12
|
autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
|
@@ -26,7 +26,7 @@ autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=H2UlpnJcIIEi_
|
|
26
26
|
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=XVoy8KpvoeX38lHHAXq4Be9LCxKjxZ36SOFeSAICRFM,2524
|
27
27
|
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
28
28
|
autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
|
29
|
-
autogluon/timeseries/models/chronos/model.py,sha256=
|
29
|
+
autogluon/timeseries/models/chronos/model.py,sha256=ijupqg4S6kRxdJWanNt6bnyPoGAaJluUHHmZpyQrfDE,31211
|
30
30
|
autogluon/timeseries/models/chronos/pipeline/__init__.py,sha256=N-YZH9BGBoi99r5cznJe1zEEjwjIg7cOYIHZkKuJq44,247
|
31
31
|
autogluon/timeseries/models/chronos/pipeline/base.py,sha256=14OAKHmio6LmO4mVom2mPGB0CvIrOjMGJzb-MVSAq-s,5596
|
32
32
|
autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=uFJLsSb2WQiSrmDZ0g2mO-lhTFUlq7vplGRBXZ9_VBk,22591
|
@@ -58,11 +58,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
|
|
58
58
|
autogluon/timeseries/utils/datetime/lags.py,sha256=gQDk5_zmsY5DUWDUpSaCKYkQ9nHKKY-LsywJQRAoYSk,5988
|
59
59
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
|
60
60
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=MjLi3zQ00uWWJtXH9oGX2GJkTbvjdSiuabSa4kcVuxE,2672
|
61
|
-
autogluon.timeseries-1.2.
|
62
|
-
autogluon.timeseries-1.2.
|
63
|
-
autogluon.timeseries-1.2.
|
64
|
-
autogluon.timeseries-1.2.
|
65
|
-
autogluon.timeseries-1.2.
|
66
|
-
autogluon.timeseries-1.2.
|
67
|
-
autogluon.timeseries-1.2.
|
68
|
-
autogluon.timeseries-1.2.
|
61
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
62
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/METADATA,sha256=H-VEWEC46px1FIkn59ZDKYa_ZayhvGIEmLlnR0s9nv0,12662
|
63
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
64
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
65
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
66
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
67
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
68
|
+
autogluon.timeseries-1.2.1b20250206.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|