autogluon.timeseries 1.4.1b20250926__py3-none-any.whl → 1.4.1b20250929__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autogluon/timeseries/models/__init__.py +2 -0
- autogluon/timeseries/models/toto/__init__.py +3 -0
- autogluon/timeseries/models/toto/_internal/__init__.py +9 -0
- autogluon/timeseries/models/toto/_internal/backbone/__init__.py +3 -0
- autogluon/timeseries/models/toto/_internal/backbone/attention.py +197 -0
- autogluon/timeseries/models/toto/_internal/backbone/backbone.py +262 -0
- autogluon/timeseries/models/toto/_internal/backbone/distribution.py +70 -0
- autogluon/timeseries/models/toto/_internal/backbone/kvcache.py +136 -0
- autogluon/timeseries/models/toto/_internal/backbone/rope.py +94 -0
- autogluon/timeseries/models/toto/_internal/backbone/scaler.py +306 -0
- autogluon/timeseries/models/toto/_internal/backbone/transformer.py +333 -0
- autogluon/timeseries/models/toto/_internal/dataset.py +165 -0
- autogluon/timeseries/models/toto/_internal/forecaster.py +423 -0
- autogluon/timeseries/models/toto/dataloader.py +108 -0
- autogluon/timeseries/models/toto/hf_pretrained_model.py +119 -0
- autogluon/timeseries/models/toto/model.py +234 -0
- autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/METADATA +10 -5
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/RECORD +26 -11
- /autogluon.timeseries-1.4.1b20250926-py3.9-nspkg.pth → /autogluon.timeseries-1.4.1b20250929-py3.9-nspkg.pth +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/LICENSE +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/NOTICE +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/WHEEL +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.4.1b20250926.dist-info → autogluon.timeseries-1.4.1b20250929.dist-info}/zip-safe +0 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from typing_extensions import Self
|
|
8
|
+
|
|
9
|
+
from autogluon.common.loaders import load_pkl
|
|
10
|
+
from autogluon.timeseries import TimeSeriesDataFrame
|
|
11
|
+
from autogluon.timeseries.models.abstract import AbstractTimeSeriesModel
|
|
12
|
+
from autogluon.timeseries.utils.features import CovariateMetadata
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from ._internal import TotoForecaster
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TotoModel(AbstractTimeSeriesModel):
|
|
21
|
+
"""Toto (Time-Series-Optimized Transformer for Observability) [CohenKhwajaetal2025]_ pretrained time series forecasting model.
|
|
22
|
+
|
|
23
|
+
Toto is a 151M parameter model trained on over 1T data points from DataDog's internal observability systems, as well as
|
|
24
|
+
the GIFT-eval pretrain, Chronos pretraining, and synthetically generated time series corpora. It is a decoder-only
|
|
25
|
+
architecture that autoregressively outputs parametric distribution forecasts. More details can be found on
|
|
26
|
+
`Hugging Face <https://huggingface.co/Datadog/Toto-Open-Base-1.0>`_ and `GitHub <https://github.com/DataDog/toto>`_.
|
|
27
|
+
|
|
28
|
+
The AutoGluon implementation of Toto is on a port of the original implementation. It is optimized for easy maintenance
|
|
29
|
+
with the rest of the AutoGluon model zoo, and does not feature some important optimizations such as xformers and flash-attention
|
|
30
|
+
available in the original model repository. The AutoGluon implementation of Toto requires a CUDA-compatible GPU.
|
|
31
|
+
|
|
32
|
+
References
|
|
33
|
+
----------
|
|
34
|
+
.. [CohenKhwajaetal2025] Cohen, Ben, Khwaja, Emaad et al.
|
|
35
|
+
"This Time is Different: An Observability Perspective on Time Series Foundation Models."
|
|
36
|
+
https://arxiv.org/abs/2505.14766
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
Other Parameters
|
|
40
|
+
----------------
|
|
41
|
+
model_path : str, default = "Datadog/Toto-Open-Base-1.0"
|
|
42
|
+
Model path used for the model, i.e., a HuggingFace transformers ``name_or_path``. Can be a
|
|
43
|
+
compatible model name on HuggingFace Hub or a local path to a model directory.
|
|
44
|
+
batch_size : int, default = 24
|
|
45
|
+
Size of batches used during inference.
|
|
46
|
+
num_samples : int, default = 256
|
|
47
|
+
Number of samples used during inference.
|
|
48
|
+
device : str, default = "cuda"
|
|
49
|
+
Device to use for inference. Toto requires a CUDA-compatible GPU to run.
|
|
50
|
+
context_length : int or None, default = 4096
|
|
51
|
+
The context length to use in the model. Shorter context lengths will decrease model accuracy, but result
|
|
52
|
+
in faster inference.
|
|
53
|
+
compile_model : bool, default = True
|
|
54
|
+
Whether to compile the model using torch.compile() for faster inference. May increase initial loading time
|
|
55
|
+
but can provide speedups during inference.
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
default_model_path: str = "Datadog/Toto-Open-Base-1.0"
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
path: Optional[str] = None,
|
|
63
|
+
name: Optional[str] = None,
|
|
64
|
+
hyperparameters: Optional[dict[str, Any]] = None,
|
|
65
|
+
freq: Optional[str] = None,
|
|
66
|
+
prediction_length: int = 1,
|
|
67
|
+
covariate_metadata: Optional[CovariateMetadata] = None,
|
|
68
|
+
target: str = "target",
|
|
69
|
+
quantile_levels: Sequence[float] = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9),
|
|
70
|
+
eval_metric: Any = None,
|
|
71
|
+
):
|
|
72
|
+
hyperparameters = hyperparameters if hyperparameters is not None else {}
|
|
73
|
+
|
|
74
|
+
self.model_path = hyperparameters.get("model_path", self.default_model_path)
|
|
75
|
+
|
|
76
|
+
super().__init__(
|
|
77
|
+
path=path,
|
|
78
|
+
name=name,
|
|
79
|
+
hyperparameters=hyperparameters,
|
|
80
|
+
freq=freq,
|
|
81
|
+
prediction_length=prediction_length,
|
|
82
|
+
covariate_metadata=covariate_metadata,
|
|
83
|
+
target=target,
|
|
84
|
+
quantile_levels=quantile_levels,
|
|
85
|
+
eval_metric=eval_metric,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
self._forecaster: Optional[TotoForecaster] = None
|
|
89
|
+
|
|
90
|
+
def save(self, path: Optional[str] = None, verbose: bool = True) -> str:
|
|
91
|
+
forecaster = self._forecaster
|
|
92
|
+
self._forecaster = None
|
|
93
|
+
path = super().save(path=path, verbose=verbose)
|
|
94
|
+
self._forecaster = forecaster
|
|
95
|
+
|
|
96
|
+
return str(path)
|
|
97
|
+
|
|
98
|
+
@classmethod
|
|
99
|
+
def load(cls, path: str, reset_paths: bool = True, load_oof: bool = False, verbose: bool = True) -> Self:
|
|
100
|
+
model = load_pkl.load(path=os.path.join(path, cls.model_file_name), verbose=verbose)
|
|
101
|
+
if reset_paths:
|
|
102
|
+
model.set_contexts(path)
|
|
103
|
+
|
|
104
|
+
return model
|
|
105
|
+
|
|
106
|
+
def _is_gpu_available(self) -> bool:
|
|
107
|
+
import torch.cuda
|
|
108
|
+
|
|
109
|
+
return torch.cuda.is_available()
|
|
110
|
+
|
|
111
|
+
def get_minimum_resources(self, is_gpu_available: bool = False) -> dict[str, Union[int, float]]:
|
|
112
|
+
return {"num_cpus": 1, "num_gpus": 1}
|
|
113
|
+
|
|
114
|
+
def load_forecaster(self):
|
|
115
|
+
from ._internal import TotoForecaster
|
|
116
|
+
from .hf_pretrained_model import TotoConfig, TotoPretrainedModel
|
|
117
|
+
|
|
118
|
+
if not self._is_gpu_available():
|
|
119
|
+
raise RuntimeError(
|
|
120
|
+
f"{self.name} requires a GPU to run, but no GPU was detected. "
|
|
121
|
+
"Please make sure that you are using a computer with a CUDA-compatible GPU and "
|
|
122
|
+
"`import torch; torch.cuda.is_available()` returns `True`."
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
hyperparameters = self.get_hyperparameters()
|
|
126
|
+
pretrained_model = TotoPretrainedModel.from_pretrained(
|
|
127
|
+
self.model_path,
|
|
128
|
+
config=TotoConfig.from_pretrained(self.model_path),
|
|
129
|
+
device_map=hyperparameters["device"],
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
if hyperparameters["compile_model"]:
|
|
133
|
+
pretrained_model.model.compile()
|
|
134
|
+
|
|
135
|
+
self._forecaster = TotoForecaster(model=pretrained_model.model)
|
|
136
|
+
|
|
137
|
+
def persist(self) -> Self:
|
|
138
|
+
if self._forecaster is None:
|
|
139
|
+
self.load_forecaster()
|
|
140
|
+
return self
|
|
141
|
+
|
|
142
|
+
def _get_default_hyperparameters(self) -> dict:
|
|
143
|
+
return {
|
|
144
|
+
"batch_size": 24,
|
|
145
|
+
"num_samples": 256,
|
|
146
|
+
"device": "cuda",
|
|
147
|
+
"context_length": 4096,
|
|
148
|
+
"compile_model": True,
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
@property
|
|
152
|
+
def allowed_hyperparameters(self) -> list[str]:
|
|
153
|
+
return super().allowed_hyperparameters + [
|
|
154
|
+
"model_path",
|
|
155
|
+
"batch_size",
|
|
156
|
+
"num_samples",
|
|
157
|
+
"device",
|
|
158
|
+
"context_length",
|
|
159
|
+
"compile_model",
|
|
160
|
+
]
|
|
161
|
+
|
|
162
|
+
def _more_tags(self) -> dict:
|
|
163
|
+
return {
|
|
164
|
+
"allow_nan": True,
|
|
165
|
+
"can_use_train_data": False,
|
|
166
|
+
"can_use_val_data": False,
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
def _fit(
|
|
170
|
+
self,
|
|
171
|
+
train_data: TimeSeriesDataFrame,
|
|
172
|
+
val_data: Optional[TimeSeriesDataFrame] = None,
|
|
173
|
+
time_limit: Optional[float] = None,
|
|
174
|
+
num_cpus: Optional[int] = None,
|
|
175
|
+
num_gpus: Optional[int] = None,
|
|
176
|
+
verbosity: int = 2,
|
|
177
|
+
**kwargs,
|
|
178
|
+
) -> None:
|
|
179
|
+
self._check_fit_params()
|
|
180
|
+
self.load_forecaster()
|
|
181
|
+
|
|
182
|
+
def _predict(
|
|
183
|
+
self, data: TimeSeriesDataFrame, known_covariates: Optional[TimeSeriesDataFrame] = None, **kwargs
|
|
184
|
+
) -> TimeSeriesDataFrame:
|
|
185
|
+
import torch
|
|
186
|
+
|
|
187
|
+
from .dataloader import TotoDataLoader, TotoInferenceDataset
|
|
188
|
+
|
|
189
|
+
hyperparameters = self.get_hyperparameters()
|
|
190
|
+
|
|
191
|
+
if self._forecaster is None:
|
|
192
|
+
self.load_forecaster()
|
|
193
|
+
assert self._forecaster, "Toto model failed to load"
|
|
194
|
+
device = self._forecaster.model.device
|
|
195
|
+
|
|
196
|
+
dataset = TotoInferenceDataset(
|
|
197
|
+
target_df=data.fill_missing_values("auto"),
|
|
198
|
+
max_context_length=hyperparameters["context_length"],
|
|
199
|
+
)
|
|
200
|
+
loader = TotoDataLoader(
|
|
201
|
+
dataset,
|
|
202
|
+
freq=self.freq,
|
|
203
|
+
batch_size=hyperparameters["batch_size"],
|
|
204
|
+
time_limit=kwargs.get("time_limit"),
|
|
205
|
+
device=device,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
batch_means, batch_quantiles = [], []
|
|
209
|
+
with torch.inference_mode():
|
|
210
|
+
for masked_timeseries in loader:
|
|
211
|
+
forecast = self._forecaster.forecast(
|
|
212
|
+
masked_timeseries,
|
|
213
|
+
prediction_length=self.prediction_length,
|
|
214
|
+
num_samples=hyperparameters["num_samples"],
|
|
215
|
+
samples_per_batch=32,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
batch_means.append(forecast.mean.cpu().numpy())
|
|
219
|
+
qs = np.array([forecast.quantile(q).cpu().numpy() for q in self.quantile_levels])
|
|
220
|
+
batch_quantiles.append(qs.squeeze(2).transpose(1, 2, 0))
|
|
221
|
+
|
|
222
|
+
df = pd.DataFrame(
|
|
223
|
+
np.concatenate(
|
|
224
|
+
[
|
|
225
|
+
np.concatenate(batch_means, axis=0).reshape(-1, 1),
|
|
226
|
+
np.concatenate(batch_quantiles, axis=0).reshape(-1, len(self.quantile_levels)),
|
|
227
|
+
],
|
|
228
|
+
axis=1,
|
|
229
|
+
),
|
|
230
|
+
columns=["mean"] + [str(q) for q in self.quantile_levels],
|
|
231
|
+
index=self.get_forecast_horizon_index(data),
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
return TimeSeriesDataFrame(df)
|
autogluon/timeseries/version.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: autogluon.timeseries
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1b20250929
|
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
|
6
6
|
Author: AutoGluon Community
|
|
@@ -55,16 +55,21 @@ Requires-Dist: fugue>=0.9.0
|
|
|
55
55
|
Requires-Dist: tqdm<5,>=4.38
|
|
56
56
|
Requires-Dist: orjson~=3.9
|
|
57
57
|
Requires-Dist: tensorboard<3,>=2.9
|
|
58
|
-
Requires-Dist: autogluon.core[raytune]==1.4.
|
|
59
|
-
Requires-Dist: autogluon.common==1.4.
|
|
60
|
-
Requires-Dist: autogluon.features==1.4.
|
|
61
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.
|
|
58
|
+
Requires-Dist: autogluon.core[raytune]==1.4.1b20250929
|
|
59
|
+
Requires-Dist: autogluon.common==1.4.1b20250929
|
|
60
|
+
Requires-Dist: autogluon.features==1.4.1b20250929
|
|
61
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.4.1b20250929
|
|
62
62
|
Provides-Extra: all
|
|
63
|
+
Requires-Dist: einops<1,>=0.7; extra == "all"
|
|
64
|
+
Requires-Dist: rotary-embedding-torch<1,>=0.8; extra == "all"
|
|
63
65
|
Provides-Extra: tests
|
|
64
66
|
Requires-Dist: pytest; extra == "tests"
|
|
65
67
|
Requires-Dist: ruff>=0.0.285; extra == "tests"
|
|
66
68
|
Requires-Dist: flaky<4,>=3.7; extra == "tests"
|
|
67
69
|
Requires-Dist: pytest-timeout<3,>=2.1; extra == "tests"
|
|
70
|
+
Provides-Extra: toto
|
|
71
|
+
Requires-Dist: einops<1,>=0.7; extra == "toto"
|
|
72
|
+
Requires-Dist: rotary-embedding-torch<1,>=0.8; extra == "toto"
|
|
68
73
|
|
|
69
74
|
|
|
70
75
|
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
autogluon.timeseries-1.4.
|
|
1
|
+
autogluon.timeseries-1.4.1b20250929-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
|
3
3
|
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
|
4
4
|
autogluon/timeseries/learner.py,sha256=eQrqFVOmL-2JC85LgCMkbyoLpKS02Dilg1T8RUeS_LI,13887
|
|
5
5
|
autogluon/timeseries/predictor.py,sha256=7X4YsWYa3Xk2RI1Irf2O-c3-I82Zqhg-cgj8cj_4AoA,88427
|
|
6
6
|
autogluon/timeseries/regressor.py,sha256=lc8Qr3-8v4oxajtCnV3sxpUaW6vxXXJOA6Kr-qVne4k,11926
|
|
7
7
|
autogluon/timeseries/splitter.py,sha256=8ACkuCXeUhQGUx4jz_Vv17q814WrHJQeKvq2v4-oE6s,3158
|
|
8
|
-
autogluon/timeseries/version.py,sha256=
|
|
8
|
+
autogluon/timeseries/version.py,sha256=cQlaqPFj_K4DgifHiB0n9ZcX7eQipDbvlJ1jsoQCfsU,91
|
|
9
9
|
autogluon/timeseries/configs/__init__.py,sha256=wiLBwxZkDTQBJkSJ9-xz3p_yJxX0dbHe108dS1P5O6A,183
|
|
10
10
|
autogluon/timeseries/configs/hyperparameter_presets.py,sha256=GbI2sd3uakWtaeaMyF7B5z_lmyfb6ToK6PZEUZTyG9w,2031
|
|
11
11
|
autogluon/timeseries/configs/predictor_presets.py,sha256=B5HFHIelh91hhG0YYE5SJ7_14P7sylFAABgHX8n_53M,2712
|
|
@@ -16,7 +16,7 @@ autogluon/timeseries/metrics/abstract.py,sha256=6jbluvHXfLc_cuK1Fx0ZYle2sR4WGG6Y
|
|
|
16
16
|
autogluon/timeseries/metrics/point.py,sha256=sS__n_Em7m4CUaBu3PNWQ_dHw1YCOHbEyC15fhytFL8,18308
|
|
17
17
|
autogluon/timeseries/metrics/quantile.py,sha256=x0cq44fXRoMiuI4BVQ7mpWk1YgrK4OwLTlJAhCHQ7Xg,4634
|
|
18
18
|
autogluon/timeseries/metrics/utils.py,sha256=HuDe1BNe8yJU4f_DKM913nNrUueoRaw6zhxm1-S20s0,910
|
|
19
|
-
autogluon/timeseries/models/__init__.py,sha256=
|
|
19
|
+
autogluon/timeseries/models/__init__.py,sha256=9NY9mqYaZe_7XB70M6psHARH-Lpkfroj4toUUPO9BmI,1339
|
|
20
20
|
autogluon/timeseries/models/registry.py,sha256=8n7W04ql0ckNQUzKcAW7bxreLI8wTAUTymACgLklH9M,2158
|
|
21
21
|
autogluon/timeseries/models/abstract/__init__.py,sha256=Htfkjjc3vo92RvyM8rIlQ0PLWt3jcrCKZES07UvCMV0,146
|
|
22
22
|
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=97HOi7fRPxtx8Y9hq-xdJI-kLMp6Z-8LUSvcfBjXFsM,31978
|
|
@@ -49,6 +49,21 @@ autogluon/timeseries/models/local/npts.py,sha256=VRZk5tEJOIentt0tLM6lxyoU8US736n
|
|
|
49
49
|
autogluon/timeseries/models/local/statsforecast.py,sha256=sZ6aEFzAyPNZX3rMULGWFht0Toapjb3EwHe5Rb76ZxA,33318
|
|
50
50
|
autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
|
|
51
51
|
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=Hn-H2jLdeuB0_TxhAdununS8ti-iO-WSl3FOoxzcEJA,12369
|
|
52
|
+
autogluon/timeseries/models/toto/__init__.py,sha256=rQaVjZJV5ZsJGC0jhQ6CA4nYeXdV1KtlyDz2i2usQnY,54
|
|
53
|
+
autogluon/timeseries/models/toto/dataloader.py,sha256=A5WHhnAe0J7fPo2KKG43hYLSrtUBGNweuqxMmClu3_A,3598
|
|
54
|
+
autogluon/timeseries/models/toto/hf_pretrained_model.py,sha256=Q8bVUaSlQVE4xFn_v7H0h_NFTxzHiM1V17KFytc50jk,4783
|
|
55
|
+
autogluon/timeseries/models/toto/model.py,sha256=eP0SAoUjv9l_ExK4eoPl9ZZHW_MXa-OVLYxhj3f1bl4,8809
|
|
56
|
+
autogluon/timeseries/models/toto/_internal/__init__.py,sha256=tKkiux9bD2Xu0AuVyTEx_sNOZutcluC7-d7tn7wsmec,193
|
|
57
|
+
autogluon/timeseries/models/toto/_internal/dataset.py,sha256=xuAEOhoQNJGMoCxkLVLrgpdoOJuukAYbrSrnrkwFob0,6103
|
|
58
|
+
autogluon/timeseries/models/toto/_internal/forecaster.py,sha256=UXiohiySn_Gs8kLheeVcVCO8qoEtYlEfMH1tukAOHsk,18520
|
|
59
|
+
autogluon/timeseries/models/toto/_internal/backbone/__init__.py,sha256=hq5W62boH6HiEP8z3sHkI6_KM-Dd6TkDfWDm6DYE3J8,63
|
|
60
|
+
autogluon/timeseries/models/toto/_internal/backbone/attention.py,sha256=HLUFoyqR8EqxUMT1BK-AjI4ClS8au35LcUo7Jx7Xhm0,9394
|
|
61
|
+
autogluon/timeseries/models/toto/_internal/backbone/backbone.py,sha256=HUjpY2ZWed74UYKjp31erXF2ZHf3mmQMw_5_cCFeJGg,10104
|
|
62
|
+
autogluon/timeseries/models/toto/_internal/backbone/distribution.py,sha256=8NXiaEVLuvjTW7L1t1RzooZFNERWv50zyLddbAwuYpo,2502
|
|
63
|
+
autogluon/timeseries/models/toto/_internal/backbone/kvcache.py,sha256=QSVCrnbS2oD7wkJodZbP9XMVmrfCH6M3Zp44siF28Fg,5399
|
|
64
|
+
autogluon/timeseries/models/toto/_internal/backbone/rope.py,sha256=Ghngo08DjHbwbyp6b-GXCyLeYR10dH-Y_RMOTYwIxPY,3527
|
|
65
|
+
autogluon/timeseries/models/toto/_internal/backbone/scaler.py,sha256=opqyhHIZ6mPdPlrr3gA0qt9FFogIAYNDSq-P7CyQiqE,13728
|
|
66
|
+
autogluon/timeseries/models/toto/_internal/backbone/transformer.py,sha256=5c-ngj4XHKlaedz1NkgdfQgqD2kUGkMn4mtGH_lTXsE,12410
|
|
52
67
|
autogluon/timeseries/trainer/__init__.py,sha256=_tw3iioJfvtIV7wnjtEMv0yS8oabmCFxDnGRodYE7RI,72
|
|
53
68
|
autogluon/timeseries/trainer/model_set_builder.py,sha256=s6tozfND3lLfst6Vxa_oP_wgCmDapyCJYFmCjkEn-es,10788
|
|
54
69
|
autogluon/timeseries/trainer/prediction_cache.py,sha256=Vi6EbMiMheq_smA93U_MoMxYUV85RdPm0dvJFdsM8K4,5551
|
|
@@ -65,11 +80,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
|
|
|
65
80
|
autogluon/timeseries/utils/datetime/lags.py,sha256=rjJtdBU0M41R1jwfmvCbo045s-6XBjhGVnGBQJ9-U1E,5997
|
|
66
81
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
|
|
67
82
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=kEOFls4Nzh8nO0Pcz1DwLsC_NA3hMI4JUlZI3kuvuts,2666
|
|
68
|
-
autogluon.timeseries-1.4.
|
|
69
|
-
autogluon.timeseries-1.4.
|
|
70
|
-
autogluon.timeseries-1.4.
|
|
71
|
-
autogluon.timeseries-1.4.
|
|
72
|
-
autogluon.timeseries-1.4.
|
|
73
|
-
autogluon.timeseries-1.4.
|
|
74
|
-
autogluon.timeseries-1.4.
|
|
75
|
-
autogluon.timeseries-1.4.
|
|
83
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
|
84
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/METADATA,sha256=lksqXJKRyVZUDFaFzmpKC_dkxd_YrCZ24jpLfYEILSs,12702
|
|
85
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
|
86
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
87
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
88
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
89
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
90
|
+
autogluon.timeseries-1.4.1b20250929.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|