autogluon.timeseries 1.4.1b20250829__py3-none-any.whl → 1.4.1b20250905__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.timeseries might be problematic. Click here for more details.
- autogluon/timeseries/models/chronos/model.py +4 -1
- autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +50 -0
- autogluon/timeseries/trainer/prediction_cache.py +149 -0
- autogluon/timeseries/trainer/trainer.py +13 -69
- autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/METADATA +5 -5
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/RECORD +14 -13
- /autogluon.timeseries-1.4.1b20250829-py3.9-nspkg.pth → /autogluon.timeseries-1.4.1b20250905-py3.9-nspkg.pth +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/LICENSE +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/NOTICE +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/WHEEL +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.4.1b20250829.dist-info → autogluon.timeseries-1.4.1b20250905.dist-info}/zip-safe +0 -0
@@ -482,9 +482,12 @@ class ChronosModel(AbstractTimeSeriesModel):
|
|
482
482
|
|
483
483
|
if self.prediction_length != fine_tune_prediction_length:
|
484
484
|
logger.debug(
|
485
|
-
f"\
|
485
|
+
f"\tChronos-Bolt models can only be fine-tuned with a maximum prediction_length of {model_prediction_length}. "
|
486
486
|
f"Fine-tuning prediction_length has been changed to {fine_tune_prediction_length}."
|
487
487
|
)
|
488
|
+
if self.quantile_levels != self.model_pipeline.quantiles:
|
489
|
+
self.model_pipeline.model.update_output_quantiles(self.quantile_levels)
|
490
|
+
logger.info(f"\tChronos-Bolt will be fine-tuned with quantile_levels={self.quantile_levels}")
|
488
491
|
else:
|
489
492
|
raise ValueError(f"Unsupported model pipeline: {type(self.model_pipeline)}")
|
490
493
|
|
@@ -371,6 +371,56 @@ class ChronosBoltModelForForecasting(T5PreTrainedModel):
|
|
371
371
|
|
372
372
|
return decoder_outputs.last_hidden_state # sequence_outputs, b x 1 x d_model
|
373
373
|
|
374
|
+
def update_output_quantiles(self, new_quantiles: list[float]) -> None:
|
375
|
+
"""In-place updates model's output layer to support only the specified new quantiles by copying weights from closest existing quantiles."""
|
376
|
+
old_quantiles = self.chronos_config.quantiles
|
377
|
+
new_quantiles = sorted(new_quantiles)
|
378
|
+
|
379
|
+
if new_quantiles == old_quantiles:
|
380
|
+
return
|
381
|
+
|
382
|
+
self.chronos_config.quantiles = new_quantiles
|
383
|
+
self.num_quantiles = len(new_quantiles)
|
384
|
+
self.register_buffer("quantiles", torch.tensor(new_quantiles, dtype=self.dtype), persistent=False)
|
385
|
+
|
386
|
+
old_output_layer = self.output_patch_embedding
|
387
|
+
new_output_layer = ResidualBlock(
|
388
|
+
in_dim=self.config.d_model,
|
389
|
+
h_dim=self.config.d_ff,
|
390
|
+
out_dim=len(new_quantiles) * self.chronos_config.prediction_length,
|
391
|
+
act_fn_name=self.config.dense_act_fn,
|
392
|
+
dropout_p=self.config.dropout_rate,
|
393
|
+
)
|
394
|
+
|
395
|
+
# hidden_layer is shared across all quantiles
|
396
|
+
new_output_layer.hidden_layer.weight.data.copy_(old_output_layer.hidden_layer.weight.data)
|
397
|
+
if old_output_layer.hidden_layer.bias is not None:
|
398
|
+
new_output_layer.hidden_layer.bias.data.copy_(old_output_layer.hidden_layer.bias.data)
|
399
|
+
|
400
|
+
def copy_quantile_weights(src_idx: int, dst_idx: int):
|
401
|
+
"""Copy weights for one quantile from src_idx to dst_idx"""
|
402
|
+
prediction_length = self.chronos_config.prediction_length
|
403
|
+
src_start, src_end = src_idx * prediction_length, (src_idx + 1) * prediction_length
|
404
|
+
dst_start, dst_end = dst_idx * prediction_length, (dst_idx + 1) * prediction_length
|
405
|
+
|
406
|
+
for layer_name in ["output_layer", "residual_layer"]:
|
407
|
+
old_layer_attr = getattr(old_output_layer, layer_name)
|
408
|
+
new_layer_attr = getattr(new_output_layer, layer_name)
|
409
|
+
|
410
|
+
new_layer_attr.weight[dst_start:dst_end] = old_layer_attr.weight[src_start:src_end]
|
411
|
+
if old_layer_attr.bias is not None:
|
412
|
+
new_layer_attr.bias[dst_start:dst_end] = old_layer_attr.bias[src_start:src_end]
|
413
|
+
|
414
|
+
with torch.no_grad():
|
415
|
+
for new_idx, new_q in enumerate(new_quantiles):
|
416
|
+
closest_q = min(old_quantiles, key=lambda x: abs(x - new_q))
|
417
|
+
closest_idx = old_quantiles.index(closest_q)
|
418
|
+
copy_quantile_weights(closest_idx, new_idx)
|
419
|
+
|
420
|
+
self.output_patch_embedding = new_output_layer
|
421
|
+
self.config.chronos_config["quantiles"] = new_quantiles
|
422
|
+
self.chronos_config.quantiles = new_quantiles
|
423
|
+
|
374
424
|
|
375
425
|
class ChronosBoltPipeline(BaseChronosPipeline):
|
376
426
|
forecast_type: ForecastType = ForecastType.QUANTILES
|
@@ -0,0 +1,149 @@
|
|
1
|
+
import logging
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import Any, Optional
|
5
|
+
|
6
|
+
from autogluon.common.utils.utils import hash_pandas_df
|
7
|
+
from autogluon.core.utils.loaders import load_pkl
|
8
|
+
from autogluon.core.utils.savers import save_pkl
|
9
|
+
from autogluon.timeseries import TimeSeriesDataFrame
|
10
|
+
|
11
|
+
logger = logging.getLogger(__name__)
|
12
|
+
|
13
|
+
|
14
|
+
class PredictionCache(ABC):
|
15
|
+
"""A prediction cache is an abstract key-value store for time series predictions. The storage is keyed by
|
16
|
+
(data, known_covariates) pairs and stores (model_pred_dict, pred_time_dict) pair values. In this stored pair,
|
17
|
+
(model_pred_dict, pred_time_dict), both dictionaries are keyed by model names.
|
18
|
+
"""
|
19
|
+
|
20
|
+
def __init__(self, root_path: str):
|
21
|
+
self.root_path = Path(root_path)
|
22
|
+
|
23
|
+
@abstractmethod
|
24
|
+
def get(
|
25
|
+
self, data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame]
|
26
|
+
) -> tuple[dict[str, Optional[TimeSeriesDataFrame]], dict[str, float]]:
|
27
|
+
pass
|
28
|
+
|
29
|
+
@abstractmethod
|
30
|
+
def put(
|
31
|
+
self,
|
32
|
+
data: TimeSeriesDataFrame,
|
33
|
+
known_covariates: Optional[TimeSeriesDataFrame],
|
34
|
+
model_pred_dict: dict[str, Optional[TimeSeriesDataFrame]],
|
35
|
+
pred_time_dict: dict[str, float],
|
36
|
+
) -> None:
|
37
|
+
pass
|
38
|
+
|
39
|
+
@abstractmethod
|
40
|
+
def clear(self) -> None:
|
41
|
+
pass
|
42
|
+
|
43
|
+
|
44
|
+
def get_prediction_cache(use_cache: bool, root_path: str) -> PredictionCache:
|
45
|
+
if use_cache:
|
46
|
+
return FileBasedPredictionCache(root_path=root_path)
|
47
|
+
else:
|
48
|
+
return NoOpPredictionCache(root_path=root_path)
|
49
|
+
|
50
|
+
|
51
|
+
def compute_dataset_hash(data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame] = None) -> str:
|
52
|
+
"""Compute a unique string that identifies the time series dataset."""
|
53
|
+
combined_hash = hash_pandas_df(data) + hash_pandas_df(known_covariates) + hash_pandas_df(data.static_features)
|
54
|
+
return combined_hash
|
55
|
+
|
56
|
+
|
57
|
+
class NoOpPredictionCache(PredictionCache):
|
58
|
+
"""A dummy (no-op) prediction cache."""
|
59
|
+
|
60
|
+
def get(
|
61
|
+
self, data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame]
|
62
|
+
) -> tuple[dict[str, Optional[TimeSeriesDataFrame]], dict[str, float]]:
|
63
|
+
return {}, {}
|
64
|
+
|
65
|
+
def put(
|
66
|
+
self,
|
67
|
+
data: TimeSeriesDataFrame,
|
68
|
+
known_covariates: Optional[TimeSeriesDataFrame],
|
69
|
+
model_pred_dict: dict[str, Optional[TimeSeriesDataFrame]],
|
70
|
+
pred_time_dict: dict[str, float],
|
71
|
+
) -> None:
|
72
|
+
pass
|
73
|
+
|
74
|
+
def clear(self) -> None:
|
75
|
+
pass
|
76
|
+
|
77
|
+
|
78
|
+
class FileBasedPredictionCache(PredictionCache):
|
79
|
+
"""A file-backed cache of model predictions."""
|
80
|
+
|
81
|
+
_cached_predictions_filename = "cached_predictions.pkl"
|
82
|
+
|
83
|
+
@property
|
84
|
+
def path(self) -> Path:
|
85
|
+
return Path(self.root_path) / self._cached_predictions_filename
|
86
|
+
|
87
|
+
def get(
|
88
|
+
self, data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame]
|
89
|
+
) -> tuple[dict[str, Optional[TimeSeriesDataFrame]], dict[str, float]]:
|
90
|
+
dataset_hash = compute_dataset_hash(data, known_covariates)
|
91
|
+
return self._get_cached_pred_dicts(dataset_hash)
|
92
|
+
|
93
|
+
def put(
|
94
|
+
self,
|
95
|
+
data: TimeSeriesDataFrame,
|
96
|
+
known_covariates: Optional[TimeSeriesDataFrame],
|
97
|
+
model_pred_dict: dict[str, Optional[TimeSeriesDataFrame]],
|
98
|
+
pred_time_dict: dict[str, float],
|
99
|
+
) -> None:
|
100
|
+
dataset_hash = compute_dataset_hash(data, known_covariates)
|
101
|
+
self._save_cached_pred_dicts(dataset_hash, model_pred_dict, pred_time_dict)
|
102
|
+
|
103
|
+
def clear(self) -> None:
|
104
|
+
if self.path.exists():
|
105
|
+
logger.debug(f"Removing existing cached predictions file {self.path}")
|
106
|
+
self.path.unlink()
|
107
|
+
|
108
|
+
def _load_cached_predictions(self) -> dict[str, dict[str, dict[str, Any]]]:
|
109
|
+
if self.path.exists():
|
110
|
+
try:
|
111
|
+
cached_predictions = load_pkl.load(str(self.path))
|
112
|
+
except Exception:
|
113
|
+
cached_predictions = {}
|
114
|
+
else:
|
115
|
+
cached_predictions = {}
|
116
|
+
return cached_predictions
|
117
|
+
|
118
|
+
def _get_cached_pred_dicts(
|
119
|
+
self, dataset_hash: str
|
120
|
+
) -> tuple[dict[str, Optional[TimeSeriesDataFrame]], dict[str, float]]:
|
121
|
+
"""Load cached predictions for given dataset_hash from disk, if possible.
|
122
|
+
|
123
|
+
If loading fails for any reason, empty dicts are returned.
|
124
|
+
"""
|
125
|
+
cached_predictions = self._load_cached_predictions()
|
126
|
+
if dataset_hash in cached_predictions:
|
127
|
+
try:
|
128
|
+
model_pred_dict = cached_predictions[dataset_hash]["model_pred_dict"]
|
129
|
+
pred_time_dict = cached_predictions[dataset_hash]["pred_time_dict"]
|
130
|
+
assert model_pred_dict.keys() == pred_time_dict.keys()
|
131
|
+
return model_pred_dict, pred_time_dict
|
132
|
+
except Exception:
|
133
|
+
logger.warning("Cached predictions are corrupted. Predictions will be made from scratch.")
|
134
|
+
return {}, {}
|
135
|
+
|
136
|
+
def _save_cached_pred_dicts(
|
137
|
+
self,
|
138
|
+
dataset_hash: str,
|
139
|
+
model_pred_dict: dict[str, Optional[TimeSeriesDataFrame]],
|
140
|
+
pred_time_dict: dict[str, float],
|
141
|
+
) -> None:
|
142
|
+
cached_predictions = self._load_cached_predictions()
|
143
|
+
# Do not save results for models that failed
|
144
|
+
cached_predictions[dataset_hash] = {
|
145
|
+
"model_pred_dict": {k: v for k, v in model_pred_dict.items() if v is not None},
|
146
|
+
"pred_time_dict": {k: v for k, v in pred_time_dict.items() if v is not None},
|
147
|
+
}
|
148
|
+
save_pkl.save(str(self.path), object=cached_predictions)
|
149
|
+
logger.debug(f"Cached predictions saved to {self.path}")
|
@@ -12,7 +12,7 @@ import numpy as np
|
|
12
12
|
import pandas as pd
|
13
13
|
from tqdm import tqdm
|
14
14
|
|
15
|
-
from autogluon.common.utils.utils import
|
15
|
+
from autogluon.common.utils.utils import seed_everything
|
16
16
|
from autogluon.core.trainer.abstract_trainer import AbstractTrainer
|
17
17
|
from autogluon.core.utils.exceptions import TimeLimitExceeded
|
18
18
|
from autogluon.core.utils.loaders import load_pkl
|
@@ -31,13 +31,12 @@ from autogluon.timeseries.utils.features import (
|
|
31
31
|
from autogluon.timeseries.utils.warning_filters import disable_tqdm, warning_filter
|
32
32
|
|
33
33
|
from .model_set_builder import TrainableModelSetBuilder, contains_searchspace
|
34
|
+
from .prediction_cache import PredictionCache, get_prediction_cache
|
34
35
|
|
35
36
|
logger = logging.getLogger("autogluon.timeseries.trainer")
|
36
37
|
|
37
38
|
|
38
39
|
class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
39
|
-
_cached_predictions_filename = "cached_predictions.pkl"
|
40
|
-
|
41
40
|
max_rel_importance_score: float = 1e5
|
42
41
|
eps_abs_importance_score: float = 1e-5
|
43
42
|
max_ensemble_time_limit: float = 600.0
|
@@ -92,12 +91,10 @@ class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
|
92
91
|
assert isinstance(val_splitter, AbstractWindowSplitter), "val_splitter must be of type AbstractWindowSplitter"
|
93
92
|
self.val_splitter = val_splitter
|
94
93
|
self.refit_every_n_windows = refit_every_n_windows
|
95
|
-
self.cache_predictions = cache_predictions
|
96
94
|
self.hpo_results = {}
|
97
95
|
|
98
|
-
|
99
|
-
|
100
|
-
self._cached_predictions_path.unlink()
|
96
|
+
self.prediction_cache: PredictionCache = get_prediction_cache(cache_predictions, self.path)
|
97
|
+
self.prediction_cache.clear()
|
101
98
|
|
102
99
|
@property
|
103
100
|
def path_pkl(self) -> str:
|
@@ -1055,9 +1052,10 @@ class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
|
1055
1052
|
use_cache
|
1056
1053
|
If False, will ignore the cache even if it's available.
|
1057
1054
|
"""
|
1058
|
-
if
|
1059
|
-
|
1060
|
-
|
1055
|
+
if use_cache:
|
1056
|
+
model_pred_dict, pred_time_dict_marginal = self.prediction_cache.get(
|
1057
|
+
data=data, known_covariates=known_covariates
|
1058
|
+
)
|
1061
1059
|
else:
|
1062
1060
|
model_pred_dict = {}
|
1063
1061
|
pred_time_dict_marginal: dict[str, Any] = {}
|
@@ -1093,9 +1091,11 @@ class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
|
1093
1091
|
|
1094
1092
|
if len(failed_models) > 0 and raise_exception_if_failed:
|
1095
1093
|
raise RuntimeError(f"Following models failed to predict: {failed_models}")
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1094
|
+
|
1095
|
+
if use_cache:
|
1096
|
+
self.prediction_cache.put(
|
1097
|
+
data=data,
|
1098
|
+
known_covariates=known_covariates,
|
1099
1099
|
model_pred_dict=model_pred_dict,
|
1100
1100
|
pred_time_dict=pred_time_dict_marginal,
|
1101
1101
|
)
|
@@ -1114,62 +1114,6 @@ class TimeSeriesTrainer(AbstractTrainer[TimeSeriesModelBase]):
|
|
1114
1114
|
pred_time_dict_total[model_name] += pred_time_dict_marginal[base_model]
|
1115
1115
|
return dict(pred_time_dict_total)
|
1116
1116
|
|
1117
|
-
@property
|
1118
|
-
def _cached_predictions_path(self) -> Path:
|
1119
|
-
return Path(self.path) / self._cached_predictions_filename
|
1120
|
-
|
1121
|
-
@staticmethod
|
1122
|
-
def _compute_dataset_hash(
|
1123
|
-
data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame] = None
|
1124
|
-
) -> str:
|
1125
|
-
"""Compute a unique string that identifies the time series dataset."""
|
1126
|
-
combined_hash = hash_pandas_df(data) + hash_pandas_df(known_covariates) + hash_pandas_df(data.static_features)
|
1127
|
-
return combined_hash
|
1128
|
-
|
1129
|
-
def _load_cached_predictions(self) -> dict[str, dict[str, dict[str, Any]]]:
|
1130
|
-
"""Load cached predictions from disk. If loading fails, an empty dictionary is returned."""
|
1131
|
-
if self._cached_predictions_path.exists():
|
1132
|
-
try:
|
1133
|
-
cached_predictions = load_pkl.load(str(self._cached_predictions_path))
|
1134
|
-
except Exception:
|
1135
|
-
cached_predictions = {}
|
1136
|
-
else:
|
1137
|
-
cached_predictions = {}
|
1138
|
-
return cached_predictions
|
1139
|
-
|
1140
|
-
def _get_cached_pred_dicts(
|
1141
|
-
self, dataset_hash: str
|
1142
|
-
) -> tuple[dict[str, Optional[TimeSeriesDataFrame]], dict[str, float]]:
|
1143
|
-
"""Load cached predictions for given dataset_hash from disk, if possible.
|
1144
|
-
|
1145
|
-
If loading fails for any reason, empty dicts are returned.
|
1146
|
-
"""
|
1147
|
-
cached_predictions = self._load_cached_predictions()
|
1148
|
-
if dataset_hash in cached_predictions:
|
1149
|
-
try:
|
1150
|
-
model_pred_dict = cached_predictions[dataset_hash]["model_pred_dict"]
|
1151
|
-
pred_time_dict = cached_predictions[dataset_hash]["pred_time_dict"]
|
1152
|
-
assert model_pred_dict.keys() == pred_time_dict.keys()
|
1153
|
-
return model_pred_dict, pred_time_dict
|
1154
|
-
except Exception:
|
1155
|
-
logger.warning("Cached predictions are corrupted. Predictions will be made from scratch.")
|
1156
|
-
return {}, {}
|
1157
|
-
|
1158
|
-
def _save_cached_pred_dicts(
|
1159
|
-
self,
|
1160
|
-
dataset_hash: str,
|
1161
|
-
model_pred_dict: dict[str, Optional[TimeSeriesDataFrame]],
|
1162
|
-
pred_time_dict: dict[str, float],
|
1163
|
-
) -> None:
|
1164
|
-
cached_predictions = self._load_cached_predictions()
|
1165
|
-
# Do not save results for models that failed
|
1166
|
-
cached_predictions[dataset_hash] = {
|
1167
|
-
"model_pred_dict": {k: v for k, v in model_pred_dict.items() if v is not None},
|
1168
|
-
"pred_time_dict": {k: v for k, v in pred_time_dict.items() if v is not None},
|
1169
|
-
}
|
1170
|
-
save_pkl.save(str(self._cached_predictions_path), object=cached_predictions)
|
1171
|
-
logger.debug(f"Cached predictions saved to {self._cached_predictions_path}")
|
1172
|
-
|
1173
1117
|
def _merge_refit_full_data(
|
1174
1118
|
self, train_data: TimeSeriesDataFrame, val_data: Optional[TimeSeriesDataFrame]
|
1175
1119
|
) -> TimeSeriesDataFrame:
|
autogluon/timeseries/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: autogluon.timeseries
|
3
|
-
Version: 1.4.
|
3
|
+
Version: 1.4.1b20250905
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
6
6
|
Author: AutoGluon Community
|
@@ -55,10 +55,10 @@ Requires-Dist: fugue>=0.9.0
|
|
55
55
|
Requires-Dist: tqdm<5,>=4.38
|
56
56
|
Requires-Dist: orjson~=3.9
|
57
57
|
Requires-Dist: tensorboard<3,>=2.9
|
58
|
-
Requires-Dist: autogluon.core[raytune]==1.4.
|
59
|
-
Requires-Dist: autogluon.common==1.4.
|
60
|
-
Requires-Dist: autogluon.features==1.4.
|
61
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.
|
58
|
+
Requires-Dist: autogluon.core[raytune]==1.4.1b20250905
|
59
|
+
Requires-Dist: autogluon.common==1.4.1b20250905
|
60
|
+
Requires-Dist: autogluon.features==1.4.1b20250905
|
61
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.1b20250905
|
62
62
|
Provides-Extra: all
|
63
63
|
Provides-Extra: tests
|
64
64
|
Requires-Dist: pytest; extra == "tests"
|
@@ -1,11 +1,11 @@
|
|
1
|
-
autogluon.timeseries-1.4.
|
1
|
+
autogluon.timeseries-1.4.1b20250905-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
3
3
|
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
4
4
|
autogluon/timeseries/learner.py,sha256=eQrqFVOmL-2JC85LgCMkbyoLpKS02Dilg1T8RUeS_LI,13887
|
5
5
|
autogluon/timeseries/predictor.py,sha256=7X4YsWYa3Xk2RI1Irf2O-c3-I82Zqhg-cgj8cj_4AoA,88427
|
6
6
|
autogluon/timeseries/regressor.py,sha256=lc8Qr3-8v4oxajtCnV3sxpUaW6vxXXJOA6Kr-qVne4k,11926
|
7
7
|
autogluon/timeseries/splitter.py,sha256=8ACkuCXeUhQGUx4jz_Vv17q814WrHJQeKvq2v4-oE6s,3158
|
8
|
-
autogluon/timeseries/version.py,sha256=
|
8
|
+
autogluon/timeseries/version.py,sha256=IPQrSZMK4KFRJPrPfw9aEl6tbYRIUkJ_GZ7f_Y5oSxQ,91
|
9
9
|
autogluon/timeseries/configs/__init__.py,sha256=wiLBwxZkDTQBJkSJ9-xz3p_yJxX0dbHe108dS1P5O6A,183
|
10
10
|
autogluon/timeseries/configs/hyperparameter_presets.py,sha256=GbI2sd3uakWtaeaMyF7B5z_lmyfb6ToK6PZEUZTyG9w,2031
|
11
11
|
autogluon/timeseries/configs/predictor_presets.py,sha256=B5HFHIelh91hhG0YYE5SJ7_14P7sylFAABgHX8n_53M,2712
|
@@ -28,11 +28,11 @@ autogluon/timeseries/models/autogluon_tabular/per_step.py,sha256=M5rhj_jjcQz27wP
|
|
28
28
|
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=aI1QJLJaOB5Xy2WA0jo6Jh25MRVyyZ8ONrqlV96kpw0,2735
|
29
29
|
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
30
30
|
autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
|
31
|
-
autogluon/timeseries/models/chronos/model.py,sha256
|
31
|
+
autogluon/timeseries/models/chronos/model.py,sha256=-z6Y5Fyo5_X-U8BCeSZBhqQqaJaGBCNIAYDd5y6WaMQ,32614
|
32
32
|
autogluon/timeseries/models/chronos/pipeline/__init__.py,sha256=bkTR0LSKIxAaKFOr9A0HSkCtnRdikDPUPp810WOKgxE,247
|
33
33
|
autogluon/timeseries/models/chronos/pipeline/base.py,sha256=Us-TUpHSN3mM3ut05IVc2a9Q6KYq1n9pTb7JZG7b6kA,5546
|
34
34
|
autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=bgow5FkHG7y5qWBXcggqXemnistJUfrl0lWFXcGXg5g,20197
|
35
|
-
autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=
|
35
|
+
autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=5zM8G6K9id7qrWhRT37z_xoPVE-BJXwms1SwjF0TBG4,23949
|
36
36
|
autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=WYeCKFP5dxs4u09XTncBI2486VV22O1DiM9a3ZvZ1OE,12790
|
37
37
|
autogluon/timeseries/models/ensemble/__init__.py,sha256=x2Y6dWk15XugTEWNUKq8U5z6nIjelo3UjpI-TfS13OE,159
|
38
38
|
autogluon/timeseries/models/ensemble/abstract.py,sha256=wvtXNZTwiYpIurPkOYSzsi3XTRRx5guJLMYLmXTdOeQ,5695
|
@@ -51,7 +51,8 @@ autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdze
|
|
51
51
|
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=Hn-H2jLdeuB0_TxhAdununS8ti-iO-WSl3FOoxzcEJA,12369
|
52
52
|
autogluon/timeseries/trainer/__init__.py,sha256=_tw3iioJfvtIV7wnjtEMv0yS8oabmCFxDnGRodYE7RI,72
|
53
53
|
autogluon/timeseries/trainer/model_set_builder.py,sha256=s6tozfND3lLfst6Vxa_oP_wgCmDapyCJYFmCjkEn-es,10788
|
54
|
-
autogluon/timeseries/trainer/
|
54
|
+
autogluon/timeseries/trainer/prediction_cache.py,sha256=Vi6EbMiMheq_smA93U_MoMxYUV85RdPm0dvJFdsM8K4,5551
|
55
|
+
autogluon/timeseries/trainer/trainer.py,sha256=LF2X5UNnrU8w5h_i09SphGWvGFvZ6KvPDq89Z3GzZZQ,54959
|
55
56
|
autogluon/timeseries/transforms/__init__.py,sha256=fKlT4pkJ_8Gl7IUTc3uSDzt2Xow5iH5w6fPB3ePNrTg,127
|
56
57
|
autogluon/timeseries/transforms/covariate_scaler.py,sha256=9lEfDS4wnVZohQNnm9OcAXr3voUl83RCnctKR3O66iU,7030
|
57
58
|
autogluon/timeseries/transforms/target_scaler.py,sha256=kTQrXAsDHCnYuqfpaVuvefyTgyp_ylDpUIPz7pArjeY,6043
|
@@ -64,11 +65,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
|
|
64
65
|
autogluon/timeseries/utils/datetime/lags.py,sha256=rjJtdBU0M41R1jwfmvCbo045s-6XBjhGVnGBQJ9-U1E,5997
|
65
66
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
|
66
67
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=kEOFls4Nzh8nO0Pcz1DwLsC_NA3hMI4JUlZI3kuvuts,2666
|
67
|
-
autogluon.timeseries-1.4.
|
68
|
-
autogluon.timeseries-1.4.
|
69
|
-
autogluon.timeseries-1.4.
|
70
|
-
autogluon.timeseries-1.4.
|
71
|
-
autogluon.timeseries-1.4.
|
72
|
-
autogluon.timeseries-1.4.
|
73
|
-
autogluon.timeseries-1.4.
|
74
|
-
autogluon.timeseries-1.4.
|
68
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
69
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/METADATA,sha256=aYgm8hJDfpEySfHy1jBFoYUp2wd0OSsA7ZrCsFxRMnA,12463
|
70
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
71
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
72
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
73
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
74
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
75
|
+
autogluon.timeseries-1.4.1b20250905.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|