autogluon.timeseries 1.4.1b20251218__py3-none-any.whl → 1.5.0b20251223__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.timeseries might be problematic. Click here for more details.
- autogluon/timeseries/configs/hyperparameter_presets.py +8 -9
- autogluon/timeseries/models/abstract/abstract_timeseries_model.py +0 -11
- autogluon/timeseries/models/autogluon_tabular/per_step.py +1 -1
- autogluon/timeseries/models/chronos/chronos2.py +2 -2
- autogluon/timeseries/models/chronos/model.py +1 -1
- autogluon/timeseries/models/ensemble/per_item_greedy.py +1 -1
- autogluon/timeseries/models/ensemble/weighted/greedy.py +2 -0
- autogluon/timeseries/models/local/statsforecast.py +1 -1
- autogluon/timeseries/predictor.py +14 -7
- autogluon/timeseries/trainer/trainer.py +0 -1
- autogluon/timeseries/version.py +1 -1
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/METADATA +7 -7
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/RECORD +20 -20
- /autogluon.timeseries-1.4.1b20251218-py3.11-nspkg.pth → /autogluon.timeseries-1.5.0b20251223-py3.11-nspkg.pth +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/WHEEL +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/licenses/LICENSE +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/licenses/NOTICE +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/namespace_packages.txt +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/top_level.txt +0 -0
- {autogluon_timeseries-1.4.1b20251218.dist-info → autogluon_timeseries-1.5.0b20251223.dist-info}/zip-safe +0 -0
|
@@ -12,21 +12,13 @@ def get_hyperparameter_presets() -> dict[str, dict[str, dict[str, Any] | list[di
|
|
|
12
12
|
"DirectTabular": {"max_num_samples": 100_000},
|
|
13
13
|
},
|
|
14
14
|
"light": {
|
|
15
|
-
"Naive": {},
|
|
16
15
|
"SeasonalNaive": {},
|
|
17
16
|
"ETS": {},
|
|
18
17
|
"Theta": {},
|
|
19
18
|
"RecursiveTabular": {},
|
|
20
19
|
"DirectTabular": {},
|
|
21
20
|
"TemporalFusionTransformer": {},
|
|
22
|
-
"
|
|
23
|
-
},
|
|
24
|
-
"light_inference": {
|
|
25
|
-
"SeasonalNaive": {},
|
|
26
|
-
"DirectTabular": {},
|
|
27
|
-
"RecursiveTabular": {},
|
|
28
|
-
"TemporalFusionTransformer": {},
|
|
29
|
-
"PatchTST": {},
|
|
21
|
+
"Chronos2": {"model_path": "autogluon/chronos-2-small"},
|
|
30
22
|
},
|
|
31
23
|
"default": {
|
|
32
24
|
"SeasonalNaive": {},
|
|
@@ -35,6 +27,7 @@ def get_hyperparameter_presets() -> dict[str, dict[str, dict[str, Any] | list[di
|
|
|
35
27
|
"RecursiveTabular": {},
|
|
36
28
|
"DirectTabular": {},
|
|
37
29
|
"TemporalFusionTransformer": {},
|
|
30
|
+
"DeepAR": {},
|
|
38
31
|
"Chronos2": [
|
|
39
32
|
{},
|
|
40
33
|
{
|
|
@@ -44,5 +37,11 @@ def get_hyperparameter_presets() -> dict[str, dict[str, dict[str, Any] | list[di
|
|
|
44
37
|
"eval_during_fine_tune": True,
|
|
45
38
|
},
|
|
46
39
|
],
|
|
40
|
+
"Chronos": {
|
|
41
|
+
"ag_args": {"name_suffix": "WithRegressor"},
|
|
42
|
+
"model_path": "bolt_small",
|
|
43
|
+
"target_scaler": "standard",
|
|
44
|
+
"covariate_regressor": {"model_name": "CAT", "model_hyperparameters": {"iterations": 1000}},
|
|
45
|
+
},
|
|
47
46
|
},
|
|
48
47
|
}
|
|
@@ -668,7 +668,6 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
|
|
|
668
668
|
pass
|
|
669
669
|
|
|
670
670
|
def _preprocess_time_limit(self, time_limit: float) -> float:
|
|
671
|
-
original_time_limit = time_limit
|
|
672
671
|
max_time_limit_ratio = self._extra_ag_args.get("max_time_limit_ratio", self.default_max_time_limit_ratio)
|
|
673
672
|
max_time_limit = self._extra_ag_args.get("max_time_limit")
|
|
674
673
|
|
|
@@ -677,16 +676,6 @@ class AbstractTimeSeriesModel(TimeSeriesModelBase, TimeSeriesTunable, metaclass=
|
|
|
677
676
|
if max_time_limit is not None:
|
|
678
677
|
time_limit = min(time_limit, max_time_limit)
|
|
679
678
|
|
|
680
|
-
if original_time_limit != time_limit:
|
|
681
|
-
time_limit_og_str = f"{original_time_limit:.2f}s" if original_time_limit is not None else "None"
|
|
682
|
-
time_limit_str = f"{time_limit:.2f}s" if time_limit is not None else "None"
|
|
683
|
-
logger.debug(
|
|
684
|
-
f"\tTime limit adjusted due to model hyperparameters: "
|
|
685
|
-
f"{time_limit_og_str} -> {time_limit_str} "
|
|
686
|
-
f"(ag.max_time_limit={max_time_limit}, "
|
|
687
|
-
f"ag.max_time_limit_ratio={max_time_limit_ratio}"
|
|
688
|
-
)
|
|
689
|
-
|
|
690
679
|
return time_limit
|
|
691
680
|
|
|
692
681
|
def _get_search_space(self):
|
|
@@ -83,7 +83,7 @@ class Chronos2Model(AbstractTimeSeriesModel):
|
|
|
83
83
|
"""
|
|
84
84
|
|
|
85
85
|
ag_model_aliases = ["Chronos-2"]
|
|
86
|
-
ag_priority =
|
|
86
|
+
ag_priority = 75
|
|
87
87
|
fine_tuned_ckpt_name: str = "fine-tuned-ckpt"
|
|
88
88
|
|
|
89
89
|
_supports_known_covariates = True
|
|
@@ -147,7 +147,7 @@ class Chronos2Model(AbstractTimeSeriesModel):
|
|
|
147
147
|
self.load_model_pipeline()
|
|
148
148
|
|
|
149
149
|
# NOTE: This must be placed after load_model_pipeline to ensure that the loggers are available in loggerDict
|
|
150
|
-
self._update_transformers_loggers(logging.ERROR if verbosity <= 3 else logging.
|
|
150
|
+
self._update_transformers_loggers(logging.ERROR if verbosity <= 3 else logging.WARNING)
|
|
151
151
|
|
|
152
152
|
if self.get_hyperparameter("fine_tune"):
|
|
153
153
|
self._fine_tune(train_data, val_data, time_limit=time_limit, verbosity=verbosity)
|
|
@@ -471,7 +471,7 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
|
471
471
|
for logger_name in logging.root.manager.loggerDict:
|
|
472
472
|
if "transformers" in logger_name:
|
|
473
473
|
transformers_logger = logging.getLogger(logger_name)
|
|
474
|
-
transformers_logger.setLevel(logging.ERROR if verbosity <= 3 else logging.
|
|
474
|
+
transformers_logger.setLevel(logging.ERROR if verbosity <= 3 else logging.WARNING)
|
|
475
475
|
|
|
476
476
|
self._check_fit_params()
|
|
477
477
|
self._log_unused_hyperparameters()
|
|
@@ -102,7 +102,7 @@ class PerItemGreedyEnsemble(AbstractTimeSeriesEnsembleModel):
|
|
|
102
102
|
self.average_weight = self.average_weight[models_to_keep]
|
|
103
103
|
|
|
104
104
|
weights_for_printing = {model: round(float(weight), 2) for model, weight in self.average_weight.items()}
|
|
105
|
-
logger.info(f"\tAverage ensemble weights: {pprint.pformat(weights_for_printing, width=
|
|
105
|
+
logger.info(f"\tAverage ensemble weights: {pprint.pformat(weights_for_printing, width=1000)}")
|
|
106
106
|
|
|
107
107
|
def _split_predictions_per_item(
|
|
108
108
|
self, predictions_per_window: dict[str, list[TimeSeriesDataFrame]]
|
|
@@ -14,6 +14,8 @@ class GreedyEnsemble(AbstractWeightedTimeSeriesEnsembleModel):
|
|
|
14
14
|
"""Greedy ensemble selection algorithm that iteratively builds an ensemble by selecting models with
|
|
15
15
|
replacement.
|
|
16
16
|
|
|
17
|
+
Also known as ``WeightedEnsemble`` for backward compatibility.
|
|
18
|
+
|
|
17
19
|
This class implements the Ensemble Selection algorithm by Caruana et al. [Car2004]_, which starts
|
|
18
20
|
with an empty ensemble and repeatedly adds the model that most improves the ensemble's validation
|
|
19
21
|
performance. Models can be selected multiple times, allowing the algorithm to assign higher effective
|
|
@@ -269,7 +269,7 @@ class AutoETSModel(AbstractProbabilisticStatsForecastModel):
|
|
|
269
269
|
This significantly speeds up fitting and usually leads to no change in accuracy.
|
|
270
270
|
"""
|
|
271
271
|
|
|
272
|
-
ag_priority =
|
|
272
|
+
ag_priority = 60
|
|
273
273
|
init_time_in_seconds = 0 # C++ models require no compilation
|
|
274
274
|
allowed_local_model_args = [
|
|
275
275
|
"damped",
|
|
@@ -10,6 +10,7 @@ from typing import Any, Literal, Type, cast, overload
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
import pandas as pd
|
|
12
12
|
|
|
13
|
+
from autogluon.common.utils.decorators import apply_presets
|
|
13
14
|
from autogluon.common.utils.log_utils import (
|
|
14
15
|
add_log_to_file,
|
|
15
16
|
set_logger_verbosity,
|
|
@@ -17,7 +18,6 @@ from autogluon.common.utils.log_utils import (
|
|
|
17
18
|
)
|
|
18
19
|
from autogluon.common.utils.system_info import get_ag_system_info
|
|
19
20
|
from autogluon.common.utils.utils import check_saved_predictor_version, setup_outputdir
|
|
20
|
-
from autogluon.core.utils.decorators import apply_presets
|
|
21
21
|
from autogluon.core.utils.loaders import load_pkl, load_str
|
|
22
22
|
from autogluon.core.utils.savers import save_pkl, save_str
|
|
23
23
|
from autogluon.timeseries import __version__ as current_ag_version
|
|
@@ -487,18 +487,23 @@ class TimeSeriesPredictor:
|
|
|
487
487
|
Available presets:
|
|
488
488
|
|
|
489
489
|
- ``"fast_training"``: Simple statistical and tree-based ML models. These models are fast to train but may not be very accurate.
|
|
490
|
-
- ``"medium_quality"``: Same models as above, plus deep learning models ``TemporalFusionTransformer`` and Chronos-
|
|
490
|
+
- ``"medium_quality"``: Same models as above, plus deep learning models ``TemporalFusionTransformer`` and Chronos-2 (small). Produces good forecasts with reasonable training time.
|
|
491
491
|
- ``"high_quality"``: A mix of multiple DL, ML and statistical forecasting models available in AutoGluon that offers the best forecast accuracy. Much more accurate than ``medium_quality``, but takes longer to train.
|
|
492
492
|
- ``"best_quality"``: Same models as in ``"high_quality"``, but performs validation with multiple backtests. Usually better than ``high_quality``, but takes even longer to train.
|
|
493
493
|
|
|
494
|
-
Available presets with the `Chronos-Bolt <https://github.com/amazon-science/chronos-forecasting>`_
|
|
494
|
+
Available presets with the `Chronos-2 and Chronos-Bolt <https://github.com/amazon-science/chronos-forecasting>`_ models:
|
|
495
495
|
|
|
496
|
+
- ``"chronos2"``: `Chronos-2 <https://huggingface.co/amazon/chronos-2>`_ base model for zero-shot forecasting.
|
|
497
|
+
- ``"chronos2_small"``: Smaller Chronos-2 model for faster zero-shot forecasting with lower memory footprint.
|
|
498
|
+
- ``"chronos2_ensemble"``: Ensemble combining zero-shot Chronos-2 base model with fine-tuned Chronos-2 small model for improved accuracy.
|
|
496
499
|
- ``"bolt_{model_size}"``: where model size is one of ``tiny,mini,small,base``. Uses the Chronos-Bolt pretrained model for zero-shot forecasting.
|
|
497
|
-
See the documentation for ``ChronosModel`` or see `Hugging Face <https://huggingface.co/collections/amazon/chronos-models-65f1791d630a8d57cb718444>`_ for more information.
|
|
498
500
|
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
501
|
+
See the documentation for ``Chronos2`` and ``Chronos`` models in :ref:`Forecasting Time Series - Model Zoo <forecasting_model_zoo>`
|
|
502
|
+
or see `Hugging Face <https://huggingface.co/collections/amazon/chronos-models-65f1791d630a8d57cb718444>`_ for more information.
|
|
503
|
+
|
|
504
|
+
Exact definitions of all presets can be found in the source code
|
|
505
|
+
[`1 <https://github.com/autogluon/autogluon/blob/stable/timeseries/src/autogluon/timeseries/configs/predictor_presets.py>`_,
|
|
506
|
+
`2 <https://github.com/autogluon/autogluon/blob/stable/timeseries/src/autogluon/timeseries/configs/hyperparameter_presets.py>`_].
|
|
502
507
|
|
|
503
508
|
If no ``presets`` are selected, user-provided values for ``hyperparameters`` will be used (defaulting to their
|
|
504
509
|
default values specified below).
|
|
@@ -574,6 +579,8 @@ class TimeSeriesPredictor:
|
|
|
574
579
|
* "bayes": Perform HPO with HyperOpt on GluonTS-backed models via Ray tune. Perform random search on other models.
|
|
575
580
|
* "auto": alias for "bayes"
|
|
576
581
|
|
|
582
|
+
To enable HyperOpt, install the corresponding extra with ``pip install "autogluon.timeseries[ray]"``.
|
|
583
|
+
|
|
577
584
|
The "scheduler" and "searcher" key are required when providing a dict.
|
|
578
585
|
|
|
579
586
|
Example::
|
|
@@ -495,7 +495,6 @@ class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
|
|
495
495
|
time_reserved_for_ensemble = min(
|
|
496
496
|
self.max_ensemble_time_limit, time_left / (num_base_models - i + 1)
|
|
497
497
|
)
|
|
498
|
-
logger.debug(f"Reserving {time_reserved_for_ensemble:.1f}s for ensemble")
|
|
499
498
|
else:
|
|
500
499
|
time_reserved_for_ensemble = 0.0
|
|
501
500
|
time_left_for_model = (time_left - time_reserved_for_ensemble) / (num_base_models - i)
|
autogluon/timeseries/version.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: autogluon.timeseries
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0b20251223
|
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
|
6
6
|
Author: AutoGluon Community
|
|
@@ -56,19 +56,19 @@ Requires-Dist: einops<1,>=0.7
|
|
|
56
56
|
Requires-Dist: chronos-forecasting<2.4,>=2.2.2
|
|
57
57
|
Requires-Dist: peft<0.18,>=0.13.0
|
|
58
58
|
Requires-Dist: tensorboard<3,>=2.9
|
|
59
|
-
Requires-Dist: autogluon.core==1.
|
|
60
|
-
Requires-Dist: autogluon.common==1.
|
|
61
|
-
Requires-Dist: autogluon.features==1.
|
|
62
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.
|
|
59
|
+
Requires-Dist: autogluon.core==1.5.0b20251223
|
|
60
|
+
Requires-Dist: autogluon.common==1.5.0b20251223
|
|
61
|
+
Requires-Dist: autogluon.features==1.5.0b20251223
|
|
62
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.5.0b20251223
|
|
63
63
|
Provides-Extra: tests
|
|
64
64
|
Requires-Dist: pytest; extra == "tests"
|
|
65
65
|
Requires-Dist: ruff>=0.0.285; extra == "tests"
|
|
66
66
|
Requires-Dist: flaky<4,>=3.7; extra == "tests"
|
|
67
67
|
Requires-Dist: pytest-timeout<3,>=2.1; extra == "tests"
|
|
68
68
|
Provides-Extra: ray
|
|
69
|
-
Requires-Dist: autogluon.core[raytune]==1.
|
|
69
|
+
Requires-Dist: autogluon.core[raytune]==1.5.0b20251223; extra == "ray"
|
|
70
70
|
Provides-Extra: all
|
|
71
|
-
Requires-Dist: autogluon.core[raytune]==1.
|
|
71
|
+
Requires-Dist: autogluon.core[raytune]==1.5.0b20251223; extra == "all"
|
|
72
72
|
Dynamic: author
|
|
73
73
|
Dynamic: classifier
|
|
74
74
|
Dynamic: description
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
autogluon.timeseries-1.
|
|
1
|
+
autogluon.timeseries-1.5.0b20251223-py3.11-nspkg.pth,sha256=kAlKxjI5mE3Pwwqphu2maN5OBQk8W8ew70e_qbI1c6A,482
|
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
|
3
3
|
autogluon/timeseries/learner.py,sha256=9kGn0ACGfbyZRlZmwkrgBbkwq7c2715yKDCh1EK3EWQ,14961
|
|
4
|
-
autogluon/timeseries/predictor.py,sha256=
|
|
4
|
+
autogluon/timeseries/predictor.py,sha256=egBu6YKVe8GRrLJ1khOKflzSu-VDSH5_mCCQuaXQNRY,106686
|
|
5
5
|
autogluon/timeseries/regressor.py,sha256=HDdqi7MYRheW3uZy5c50sqVDAHap0ooyQBdOvKEKkWM,11718
|
|
6
6
|
autogluon/timeseries/splitter.py,sha256=2rypDxDKkqOC2v5nPJ6m0cmHQTZ9D6qUFrQV1HC9lz4,2329
|
|
7
|
-
autogluon/timeseries/version.py,sha256=
|
|
7
|
+
autogluon/timeseries/version.py,sha256=8JLrm5aNz-MCYrYXeYCWT8qjMtgup-QxkV_u6DNr9vQ,91
|
|
8
8
|
autogluon/timeseries/configs/__init__.py,sha256=wiLBwxZkDTQBJkSJ9-xz3p_yJxX0dbHe108dS1P5O6A,183
|
|
9
|
-
autogluon/timeseries/configs/hyperparameter_presets.py,sha256=
|
|
9
|
+
autogluon/timeseries/configs/hyperparameter_presets.py,sha256=F38QMemh3LR4cT60xMZctI6O1XTOgNpVSleGOKMfhQQ,1586
|
|
10
10
|
autogluon/timeseries/configs/predictor_presets.py,sha256=2CkUXIFtup5w5sQkIhoU5G84b9jiNfcUC0yEug3izGY,2327
|
|
11
11
|
autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
|
|
12
12
|
autogluon/timeseries/dataset/ts_dataframe.py,sha256=IOIkwV_VPV3JvilNt98gZ77gMHIpk-Ug-trDvqSk_Jg,52228
|
|
@@ -18,22 +18,22 @@ autogluon/timeseries/metrics/utils.py,sha256=_Nz6GLbs91WhqN1PoA53wD4xEEuPIQ0juV5
|
|
|
18
18
|
autogluon/timeseries/models/__init__.py,sha256=zPdwxiveOTGU9658tDPMFXbflZ5fzd_AJdbCacbfZ0s,1375
|
|
19
19
|
autogluon/timeseries/models/registry.py,sha256=dkuyKG5UK2xiGtXcsuyRDXrI-YC84zkPre8Z3wt9T_A,2115
|
|
20
20
|
autogluon/timeseries/models/abstract/__init__.py,sha256=Htfkjjc3vo92RvyM8rIlQ0PLWt3jcrCKZES07UvCMV0,146
|
|
21
|
-
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=
|
|
21
|
+
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=80ShlklwAnGoMEKjMDjgoVrkGG6x2Eg3nOwbjJVMCqk,31909
|
|
22
22
|
autogluon/timeseries/models/abstract/model_trial.py,sha256=xKD6Nw8hIqAq4HxNVcGUhr9BuEqzFn7FX0TenvZHU0Q,3753
|
|
23
23
|
autogluon/timeseries/models/abstract/tunable.py,sha256=thl_wJjB9ao1T5NNF1RVH5k3yFqmao0irX-eUNqDs8k,7111
|
|
24
24
|
autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=E5fZsdFPgVdyCVyj5bGmn_lQFlCMn2NvuRLBMcCFvhM,205
|
|
25
25
|
autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=FJlYqMZJaltTlh54LMrDOgICgGanIymBI2F4OevVQ6A,36690
|
|
26
|
-
autogluon/timeseries/models/autogluon_tabular/per_step.py,sha256=
|
|
26
|
+
autogluon/timeseries/models/autogluon_tabular/per_step.py,sha256=q2V8dSyHns7gkcDwAIpczFse3vHHyYSm4LjJ4KICsWo,23360
|
|
27
27
|
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=AkXEInK4GocApU5GylECH01qgz5cLLLqC9apuN0eUbQ,2873
|
|
28
28
|
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
|
29
29
|
autogluon/timeseries/models/chronos/__init__.py,sha256=dIoAImmZc0dTlut4CZkJxcg1bpuHKZkS8x8Y6fBoUAY,113
|
|
30
|
-
autogluon/timeseries/models/chronos/chronos2.py,sha256=
|
|
31
|
-
autogluon/timeseries/models/chronos/model.py,sha256=
|
|
30
|
+
autogluon/timeseries/models/chronos/chronos2.py,sha256=N8TW1EaYwqPqeRbntlkhz9L7uADBna0LDZPMYWH4w3c,16800
|
|
31
|
+
autogluon/timeseries/models/chronos/model.py,sha256=npTcHR6nSi7lfCzZfVl_9G6iHJwIMRm3wfqjOAnBcIQ,33681
|
|
32
32
|
autogluon/timeseries/models/chronos/utils.py,sha256=33_kycc7AVasS3c7-AuVFtqBTZzV_yszr-MpKe28S3M,14449
|
|
33
33
|
autogluon/timeseries/models/ensemble/__init__.py,sha256=3_Vn6RHpjouthrEoXs1guKUpUX6JoUgMVCgxPt2pyLw,1302
|
|
34
34
|
autogluon/timeseries/models/ensemble/abstract.py,sha256=gAaspq4f67MTfs7KW6ADVU0KfPeBKySPstCqUeC7JYs,4579
|
|
35
35
|
autogluon/timeseries/models/ensemble/ensemble_selection.py,sha256=hepycVJTtbibzTKq5Sk04L_vUuYlLFItkSybaCc_Jv8,6366
|
|
36
|
-
autogluon/timeseries/models/ensemble/per_item_greedy.py,sha256=
|
|
36
|
+
autogluon/timeseries/models/ensemble/per_item_greedy.py,sha256=XaUEUO6fQD7Ck7X0882pvax5e3g55o_g8uMBnSP-c5M,7869
|
|
37
37
|
autogluon/timeseries/models/ensemble/array_based/__init__.py,sha256=u4vGTH9gP6oATYKkxnvoiDZvc5rqfnfgrODHxIvHP7U,207
|
|
38
38
|
autogluon/timeseries/models/ensemble/array_based/abstract.py,sha256=Oci1XEgFFTle0JF5Z8PhnMjG1iPrhhtunoKUPUPhTLw,10190
|
|
39
39
|
autogluon/timeseries/models/ensemble/array_based/models.py,sha256=UOV3t3QH_j0AGg2y3gJIWZ5rS5tHI39z3yUJlhkEyA0,8603
|
|
@@ -45,7 +45,7 @@ autogluon/timeseries/models/ensemble/array_based/regressor/tabular.py,sha256=prH
|
|
|
45
45
|
autogluon/timeseries/models/ensemble/weighted/__init__.py,sha256=_LipTsDnYvTFmjZWsb1Vrm-eALsVVfUlF2gOpcaqE2Q,206
|
|
46
46
|
autogluon/timeseries/models/ensemble/weighted/abstract.py,sha256=meGVoSfPOjmEwTKGRTUQJ1N9bZtpewJ217TGqKNye04,1839
|
|
47
47
|
autogluon/timeseries/models/ensemble/weighted/basic.py,sha256=KsFcdmhkjywqSYvx9rdWoFzjLO-czKsOj3CWuC61SS4,3715
|
|
48
|
-
autogluon/timeseries/models/ensemble/weighted/greedy.py,sha256=
|
|
48
|
+
autogluon/timeseries/models/ensemble/weighted/greedy.py,sha256=ziyudzlLDPLW_vGuh2U9uZ2YH0h478mRpM3H9q7nf4M,2657
|
|
49
49
|
autogluon/timeseries/models/gluonts/__init__.py,sha256=YfyNYOkhhNsloA4MAavfmqKO29_q6o4lwPoV7L4_h7M,355
|
|
50
50
|
autogluon/timeseries/models/gluonts/abstract.py,sha256=qJ60DSkzSI4E1kx5RGeGBehkiMvcAVGSUXYSpZXo8nk,27699
|
|
51
51
|
autogluon/timeseries/models/gluonts/dataset.py,sha256=ApR-r4o0OV4jQ2hYUppJ4yjvWX02JoHod5O4acEKiHw,5074
|
|
@@ -54,7 +54,7 @@ autogluon/timeseries/models/local/__init__.py,sha256=TiKY7M6Foy8vtshfZiStEH58_XG
|
|
|
54
54
|
autogluon/timeseries/models/local/abstract_local_model.py,sha256=7pbyE4vhXgoCEcHAhxpxBVCOEG-LSrBptGwjLXd-s8o,11335
|
|
55
55
|
autogluon/timeseries/models/local/naive.py,sha256=w0XuMcgcTvTUEi2iXcd6BGvyHKB-kpqbv9c9iK4pMOA,7490
|
|
56
56
|
autogluon/timeseries/models/local/npts.py,sha256=mKuDsGnaYV8QkIgGR8se-1pXb2JAxzafESt2g_21ENA,4530
|
|
57
|
-
autogluon/timeseries/models/local/statsforecast.py,sha256=
|
|
57
|
+
autogluon/timeseries/models/local/statsforecast.py,sha256=h_WpYGAEA761ehhZv6RZXsGC-WVr4BkPh1C8xUbcKuQ,33275
|
|
58
58
|
autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
|
|
59
59
|
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=bv8_ux-7JXPwhbFXeBN893xQo6echCCMwqH4aEMK250,12937
|
|
60
60
|
autogluon/timeseries/models/toto/__init__.py,sha256=rQaVjZJV5ZsJGC0jhQ6CA4nYeXdV1KtlyDz2i2usQnY,54
|
|
@@ -77,7 +77,7 @@ autogluon/timeseries/trainer/__init__.py,sha256=_tw3iioJfvtIV7wnjtEMv0yS8oabmCFx
|
|
|
77
77
|
autogluon/timeseries/trainer/ensemble_composer.py,sha256=zGa8vocPQGsHf-7ti8DsHwjEA176FkCt7up2LwWCK4Y,19465
|
|
78
78
|
autogluon/timeseries/trainer/model_set_builder.py,sha256=kROApbu10_ro-GVYlnx3oTKZj2TcNswWbOFB1QyBCOc,10737
|
|
79
79
|
autogluon/timeseries/trainer/prediction_cache.py,sha256=KKs22UUGrVfQN_81IgzL7Bfc8tjWk3k6YW3uHURaSs0,5496
|
|
80
|
-
autogluon/timeseries/trainer/trainer.py,sha256=
|
|
80
|
+
autogluon/timeseries/trainer/trainer.py,sha256=vraCZtARPV3gNHlhktmDlbTr8Mn59H-JOIaXAdpTDw8,56057
|
|
81
81
|
autogluon/timeseries/trainer/utils.py,sha256=7N4vRP6GFUlRAahxQ9PqppdIMFqMz3wpZ5u-_onR24M,588
|
|
82
82
|
autogluon/timeseries/transforms/__init__.py,sha256=fKlT4pkJ_8Gl7IUTc3uSDzt2Xow5iH5w6fPB3ePNrTg,127
|
|
83
83
|
autogluon/timeseries/transforms/covariate_scaler.py,sha256=CpTtokiE1uEg_RJa4kEUUuBwXZpPL11OC2fgCkRpGlQ,6986
|
|
@@ -93,11 +93,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
|
|
|
93
93
|
autogluon/timeseries/utils/datetime/lags.py,sha256=dijskkPDJXhXbRHGQZPhUFuEom3typKbOeET7cxkHGY,5965
|
|
94
94
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=-w3bULdkIZKP-JrO1ahHLyNCanLhejocHlasZShuwA0,802
|
|
95
95
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=kEOFls4Nzh8nO0Pcz1DwLsC_NA3hMI4JUlZI3kuvuts,2666
|
|
96
|
-
autogluon_timeseries-1.
|
|
97
|
-
autogluon_timeseries-1.
|
|
98
|
-
autogluon_timeseries-1.
|
|
99
|
-
autogluon_timeseries-1.
|
|
100
|
-
autogluon_timeseries-1.
|
|
101
|
-
autogluon_timeseries-1.
|
|
102
|
-
autogluon_timeseries-1.
|
|
103
|
-
autogluon_timeseries-1.
|
|
96
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/licenses/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
|
97
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/licenses/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
|
98
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/METADATA,sha256=RN3ZXM3arl4aTC8jFDaGt7BS-VTtZ1hRyGySOp8r43s,13425
|
|
99
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
|
|
100
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
101
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
102
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
103
|
+
autogluon_timeseries-1.5.0b20251223.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|