autogluon.timeseries 1.1.2b20241109__py3-none-any.whl → 1.1.2b20241112__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autogluon/timeseries/dataset/ts_dataframe.py +5 -1
- autogluon/timeseries/models/abstract/abstract_timeseries_model.py +73 -5
- autogluon/timeseries/models/chronos/model.py +67 -38
- autogluon/timeseries/models/chronos/pipeline/__init__.py +11 -0
- autogluon/timeseries/models/chronos/pipeline/base.py +146 -0
- autogluon/timeseries/models/chronos/{pipeline.py → pipeline/chronos.py} +66 -102
- autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +511 -0
- autogluon/timeseries/models/chronos/{utils.py → pipeline/utils.py} +37 -1
- autogluon/timeseries/models/gluonts/abstract_gluonts.py +1 -0
- autogluon/timeseries/models/gluonts/torch/models.py +3 -0
- autogluon/timeseries/models/local/abstract_local_model.py +4 -1
- autogluon/timeseries/models/local/statsforecast.py +3 -0
- autogluon/timeseries/models/multi_window/multi_window_model.py +5 -0
- autogluon/timeseries/predictor.py +1 -1
- autogluon/timeseries/regressor.py +146 -0
- autogluon/timeseries/transforms/scaler.py +1 -1
- autogluon/timeseries/utils/warning_filters.py +20 -0
- autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/METADATA +5 -5
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/RECORD +27 -23
- /autogluon.timeseries-1.1.2b20241109-py3.8-nspkg.pth → /autogluon.timeseries-1.1.2b20241112-py3.8-nspkg.pth +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/LICENSE +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/NOTICE +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/WHEEL +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.1.2b20241109.dist-info → autogluon.timeseries-1.1.2b20241112.dist-info}/zip-safe +0 -0
@@ -0,0 +1,146 @@
|
|
1
|
+
from typing import Any, Dict, Optional
|
2
|
+
|
3
|
+
import numpy as np
|
4
|
+
import pandas as pd
|
5
|
+
|
6
|
+
from autogluon.core.models import AbstractModel
|
7
|
+
from autogluon.tabular.trainer.model_presets.presets import MODEL_TYPES as TABULAR_MODEL_TYPES
|
8
|
+
from autogluon.timeseries.dataset.ts_dataframe import ITEMID, TimeSeriesDataFrame
|
9
|
+
from autogluon.timeseries.utils.features import CovariateMetadata
|
10
|
+
|
11
|
+
|
12
|
+
class CovariateRegressor:
|
13
|
+
"""Predicts target values from the covariates for the same observation.
|
14
|
+
|
15
|
+
The model construct the feature matrix using known_covariates and static_features.
|
16
|
+
|
17
|
+
Parameters
|
18
|
+
----------
|
19
|
+
model_name : str
|
20
|
+
Name of the tabular regression model. See `autogluon.tabular.trainer.model_presets.presets.MODEL_TYPES` for the
|
21
|
+
list of available models.
|
22
|
+
model_hyperparameters : dict or None
|
23
|
+
Hyperparameters passed to the tabular regression model.
|
24
|
+
eval_metric : str
|
25
|
+
Metric provided as `eval_metric` to the tabular regression model. Must be compatible with `problem_type="regression"`.
|
26
|
+
refit_during_predict : bool
|
27
|
+
If True, the model will be re-trained every time `fit_transform` is called. If False, the model will only be
|
28
|
+
trained the first time that `fit_transform` is called, and future calls to `fit_transform` will only perform a
|
29
|
+
`transform`.
|
30
|
+
max_num_samples : int or None
|
31
|
+
If not None, training dataset passed to regression model will contain at most this many rows.
|
32
|
+
metadata : CovariateMetadata
|
33
|
+
Metadata object describing the covariates available in the dataset.
|
34
|
+
target : str
|
35
|
+
Name of the target column.
|
36
|
+
validation_frac : float, optional
|
37
|
+
Fraction of observations that are reserved as the validation set during training (starting from the end of each
|
38
|
+
time series).
|
39
|
+
"""
|
40
|
+
|
41
|
+
def __init__(
|
42
|
+
self,
|
43
|
+
model_name: str = "GBM",
|
44
|
+
model_hyperparameters: Optional[Dict[str, Any]] = None,
|
45
|
+
eval_metric: str = "mean_absolute_error",
|
46
|
+
refit_during_predict: bool = False,
|
47
|
+
max_num_samples: Optional[int] = 500_000,
|
48
|
+
metadata: Optional[CovariateMetadata] = None,
|
49
|
+
target: str = "target",
|
50
|
+
validation_fraction: Optional[float] = 0.1,
|
51
|
+
):
|
52
|
+
if model_name not in TABULAR_MODEL_TYPES:
|
53
|
+
raise ValueError(
|
54
|
+
f"Tabular model {model_name} not supported. Available models: {list(TABULAR_MODEL_TYPES)}"
|
55
|
+
)
|
56
|
+
self.target = target
|
57
|
+
self.model_type = TABULAR_MODEL_TYPES[model_name]
|
58
|
+
self.model_name = model_name
|
59
|
+
self.model_hyperparameters = model_hyperparameters or {}
|
60
|
+
self.refit_during_predict = refit_during_predict
|
61
|
+
self.tabular_eval_metric = eval_metric
|
62
|
+
self.max_num_samples = max_num_samples
|
63
|
+
self.validation_fraction = validation_fraction
|
64
|
+
self.model: Optional[AbstractModel] = None
|
65
|
+
self.metadata = metadata or CovariateMetadata()
|
66
|
+
|
67
|
+
def is_fit(self) -> bool:
|
68
|
+
return self.model is not None
|
69
|
+
|
70
|
+
def fit(self, data: TimeSeriesDataFrame, time_limit: Optional[float] = None, **kwargs) -> "CovariateRegressor":
|
71
|
+
"""Fit the tabular regressor on the target column using covariates as features."""
|
72
|
+
tabular_df = self._get_tabular_df(data, static_features=data.static_features, include_target=True)
|
73
|
+
tabular_df = tabular_df.query(f"{self.target}.notnull()")
|
74
|
+
|
75
|
+
median_ts_length = data.num_timesteps_per_item().median()
|
76
|
+
if self.validation_fraction is not None:
|
77
|
+
grouped_df = tabular_df.groupby(ITEMID)
|
78
|
+
val_size = max(int(self.validation_fraction * median_ts_length), 1)
|
79
|
+
train_df = self._subsample_df(grouped_df.head(-val_size))
|
80
|
+
val_df = self._subsample_df(grouped_df.tail(val_size))
|
81
|
+
X = train_df.drop(columns=[self.target])
|
82
|
+
y = train_df[self.target]
|
83
|
+
X_val = val_df.drop(columns=[self.target])
|
84
|
+
y_val = val_df[self.target]
|
85
|
+
else:
|
86
|
+
tabular_df = self._subsample_df(tabular_df)
|
87
|
+
X = tabular_df.drop(columns=[self.target])
|
88
|
+
y = tabular_df[self.target]
|
89
|
+
X_val = None
|
90
|
+
y_val = None
|
91
|
+
|
92
|
+
self.model = self.model_type(
|
93
|
+
problem_type="regression",
|
94
|
+
hyperparameters=self.model_hyperparameters,
|
95
|
+
eval_metric=self.tabular_eval_metric,
|
96
|
+
)
|
97
|
+
self.model.fit(X=X, y=y, X_val=X_val, y_val=y_val, time_limit=time_limit, **kwargs)
|
98
|
+
return self
|
99
|
+
|
100
|
+
def transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame:
|
101
|
+
"""Subtract the tabular regressor predictions from the target column."""
|
102
|
+
y_pred = self._predict(data, static_features=data.static_features)
|
103
|
+
return data.assign(**{self.target: data[self.target] - y_pred})
|
104
|
+
|
105
|
+
def fit_transform(
|
106
|
+
self, data: TimeSeriesDataFrame, time_limit: Optional[float] = None, **kwargs
|
107
|
+
) -> TimeSeriesDataFrame:
|
108
|
+
if not self.is_fit() or self.refit_during_predict:
|
109
|
+
self.fit(data=data, time_limit=time_limit, **kwargs)
|
110
|
+
return self.transform(data=data)
|
111
|
+
|
112
|
+
def inverse_transform(
|
113
|
+
self,
|
114
|
+
predictions: TimeSeriesDataFrame,
|
115
|
+
known_covariates: TimeSeriesDataFrame,
|
116
|
+
static_features: Optional[pd.DataFrame],
|
117
|
+
) -> TimeSeriesDataFrame:
|
118
|
+
"""Add the tabular regressor predictions to the target column."""
|
119
|
+
y_pred = self._predict(known_covariates, static_features=static_features)
|
120
|
+
return predictions.assign(**{col: predictions[col] + y_pred for col in predictions.columns})
|
121
|
+
|
122
|
+
def _predict(self, data: TimeSeriesDataFrame, static_features: Optional[pd.DataFrame]) -> np.ndarray:
|
123
|
+
"""Construct the tabular features matrix and make predictions"""
|
124
|
+
tabular_df = self._get_tabular_df(data, static_features=static_features)
|
125
|
+
return self.model.predict(X=tabular_df)
|
126
|
+
|
127
|
+
def _get_tabular_df(
|
128
|
+
self,
|
129
|
+
data: TimeSeriesDataFrame,
|
130
|
+
static_features: Optional[pd.DataFrame] = None,
|
131
|
+
include_target: bool = False,
|
132
|
+
) -> pd.DataFrame:
|
133
|
+
"""Construct a tabular dataframe from known covariates and static features."""
|
134
|
+
available_columns = [ITEMID] + self.metadata.known_covariates
|
135
|
+
if include_target:
|
136
|
+
available_columns += [self.target]
|
137
|
+
tabular_df = pd.DataFrame(data).reset_index()[available_columns].astype({ITEMID: "category"})
|
138
|
+
if static_features is not None:
|
139
|
+
tabular_df = pd.merge(tabular_df, static_features, on=ITEMID)
|
140
|
+
return tabular_df
|
141
|
+
|
142
|
+
def _subsample_df(self, df: pd.DataFrame) -> pd.DataFrame:
|
143
|
+
"""Randomly subsample the dataframe if it contains more than self.max_num_samples rows."""
|
144
|
+
if self.max_num_samples is not None and len(df) > self.max_num_samples:
|
145
|
+
df = df.sample(n=self.max_num_samples)
|
146
|
+
return df
|
@@ -85,7 +85,7 @@ class LocalMinMaxScaler(LocalTargetScaler):
|
|
85
85
|
def _compute_loc_scale(self, target_series: pd.Series) -> Tuple[pd.Series, pd.Series]:
|
86
86
|
stats = target_series.abs().groupby(level=ITEMID, sort=False).agg(["min", "max"])
|
87
87
|
scale = (stats["max"] - stats["min"]).clip(lower=self.min_scale)
|
88
|
-
loc = stats["min"]
|
88
|
+
loc = stats["min"]
|
89
89
|
return loc, scale
|
90
90
|
|
91
91
|
|
@@ -6,6 +6,7 @@ import os
|
|
6
6
|
import re
|
7
7
|
import sys
|
8
8
|
import warnings
|
9
|
+
from collections import Counter
|
9
10
|
|
10
11
|
__all__ = ["warning_filter", "disable_root_logger", "disable_tqdm"]
|
11
12
|
|
@@ -70,3 +71,22 @@ def disable_stdout():
|
|
70
71
|
sys.stdout = io.StringIO()
|
71
72
|
yield
|
72
73
|
sys.stdout = save_stdout
|
74
|
+
|
75
|
+
|
76
|
+
class DuplicateLogFilter:
|
77
|
+
def __init__(self, max_count: int = 1):
|
78
|
+
self.messages = Counter()
|
79
|
+
self.max_count = max_count
|
80
|
+
|
81
|
+
def filter(self, record):
|
82
|
+
count = self.messages[record.msg]
|
83
|
+
self.messages[record.msg] += 1
|
84
|
+
return count < self.max_count
|
85
|
+
|
86
|
+
|
87
|
+
@contextlib.contextmanager
|
88
|
+
def disable_duplicate_logs(logger, max_count: int = 1):
|
89
|
+
log_filter = DuplicateLogFilter(max_count=max_count)
|
90
|
+
logger.addFilter(log_filter)
|
91
|
+
yield
|
92
|
+
logger.removeFilter(log_filter)
|
autogluon/timeseries/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: autogluon.timeseries
|
3
|
-
Version: 1.1.
|
3
|
+
Version: 1.1.2b20241112
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
6
6
|
Author: AutoGluon Community
|
@@ -43,7 +43,7 @@ Requires-Dist: lightning<2.6,>=2.2
|
|
43
43
|
Requires-Dist: pytorch-lightning
|
44
44
|
Requires-Dist: transformers[sentencepiece]<5,>=4.38.0
|
45
45
|
Requires-Dist: accelerate<1.0,>=0.32.0
|
46
|
-
Requires-Dist: gluonts==0.
|
46
|
+
Requires-Dist: gluonts==0.16.0
|
47
47
|
Requires-Dist: networkx<4,>=3.0
|
48
48
|
Requires-Dist: statsforecast<1.8,>=1.7.0
|
49
49
|
Requires-Dist: mlforecast==0.13.4
|
@@ -53,9 +53,9 @@ Requires-Dist: fugue>=0.9.0
|
|
53
53
|
Requires-Dist: tqdm<5,>=4.38
|
54
54
|
Requires-Dist: orjson~=3.9
|
55
55
|
Requires-Dist: tensorboard<3,>=2.9
|
56
|
-
Requires-Dist: autogluon.core[raytune]==1.1.
|
57
|
-
Requires-Dist: autogluon.common==1.1.
|
58
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.1.
|
56
|
+
Requires-Dist: autogluon.core[raytune]==1.1.2b20241112
|
57
|
+
Requires-Dist: autogluon.common==1.1.2b20241112
|
58
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.1.2b20241112
|
59
59
|
Provides-Extra: all
|
60
60
|
Requires-Dist: optimum[onnxruntime]<1.20,>=1.17; extra == "all"
|
61
61
|
Provides-Extra: chronos-onnx
|
@@ -1,14 +1,15 @@
|
|
1
|
-
autogluon.timeseries-1.1.
|
1
|
+
autogluon.timeseries-1.1.2b20241112-py3.8-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
3
3
|
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
4
4
|
autogluon/timeseries/learner.py,sha256=3dUxI-U6TGfNtRQUzWTvBIo1GKeXYOhxIX_q7Fed9eA,14013
|
5
|
-
autogluon/timeseries/predictor.py,sha256=
|
5
|
+
autogluon/timeseries/predictor.py,sha256=R9m-TYmlA4WoJbdYEL_AnEM26EhRIclynOfSmpO7mBk,84926
|
6
|
+
autogluon/timeseries/regressor.py,sha256=wcYbvE7kFopdscubfhIfeLI3ovxKe_fUVtt0b1zWdV0,6823
|
6
7
|
autogluon/timeseries/splitter.py,sha256=eghGwAAN2_cxGk5aJBILgjGWtLzjxJcytMy49gg_q18,3061
|
7
|
-
autogluon/timeseries/version.py,sha256=
|
8
|
+
autogluon/timeseries/version.py,sha256=lXKlufs6aLjs5PeI3KNuQ6bVkvRKxHvpSZbuJ-wwTeo,90
|
8
9
|
autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
|
9
10
|
autogluon/timeseries/configs/presets_configs.py,sha256=94-yL9teDHKs2irWjP3kpewI7FE1ChYCgEgz9XHJ6gc,1965
|
10
11
|
autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
|
11
|
-
autogluon/timeseries/dataset/ts_dataframe.py,sha256=
|
12
|
+
autogluon/timeseries/dataset/ts_dataframe.py,sha256=UQ-iT2dGVJF57hlGkivbSEaBwf-5NP0Amohp4DccLUA,48492
|
12
13
|
autogluon/timeseries/metrics/__init__.py,sha256=KzgXNj5or7RB_uadjgC8p5gxyV26zjj2hT58OmvnfmA,1875
|
13
14
|
autogluon/timeseries/metrics/abstract.py,sha256=9xCFQ3NaR1C0hn01M7oBd72a_CiNV-w6QFcRjwUbKYI,8183
|
14
15
|
autogluon/timeseries/metrics/point.py,sha256=xy8sKrBbuxZ7yTW21TDPayKnEj2FBj1AEseJxUdneqE,13399
|
@@ -17,49 +18,52 @@ autogluon/timeseries/metrics/utils.py,sha256=eJ63TCR-UwbeJ1c2Qm7B2q-8B3sFthPgioo
|
|
17
18
|
autogluon/timeseries/models/__init__.py,sha256=MYD9JJ-wUDE5B6jW6E6LU2eXQ6vflfQBvqQJkdzJa3A,1189
|
18
19
|
autogluon/timeseries/models/presets.py,sha256=ujNt_hft_5eNkh-Wj_Na9GBdBmI-JdnBnOEHq8X0qXc,11778
|
19
20
|
autogluon/timeseries/models/abstract/__init__.py,sha256=wvDsQAZIV0N3AwBeMaGItoQ82trEfnT-nol2AAOIxBg,102
|
20
|
-
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=
|
21
|
+
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=kVbJHyDWXmBJDL_4mUhEvpTG_d85vEjW5Og57d5CNN0,28092
|
21
22
|
autogluon/timeseries/models/abstract/model_trial.py,sha256=ENPg_7nsdxIvaNM0o0UShZ3x8jFlRmwRc5m0fGPC0TM,3720
|
22
23
|
autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=r9i6jWcyeLHYClkcMSKRVsfrkBUMxpDrTATNTBc_qgQ,136
|
23
24
|
autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=C1WVcuNlTcqo_qGm3v0uPpraO06mdVnBNeflPbCPjNQ,32861
|
24
25
|
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=FozTzwcp1QjevEhrMLXsJHy8fymOcq1146oX4Al60wg,2517
|
25
26
|
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
26
27
|
autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
|
27
|
-
autogluon/timeseries/models/chronos/model.py,sha256=
|
28
|
-
autogluon/timeseries/models/chronos/pipeline.py,sha256=
|
29
|
-
autogluon/timeseries/models/chronos/
|
28
|
+
autogluon/timeseries/models/chronos/model.py,sha256=ULH7XCBcl93CQ4aKtY-RzhxlXyfb1bTfOmnAJWe2zLU,16589
|
29
|
+
autogluon/timeseries/models/chronos/pipeline/__init__.py,sha256=N-YZH9BGBoi99r5cznJe1zEEjwjIg7cOYIHZkKuJq44,247
|
30
|
+
autogluon/timeseries/models/chronos/pipeline/base.py,sha256=sVhADJ9fkJBL6p6UYw9zRb07Z1rGGHog95uOMrcs7dA,5044
|
31
|
+
autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=cbDS7_9Z8Sn1NZdwr2DqqCqxm_zWejDxKHHRLCvLDNE,19284
|
32
|
+
autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=vKXyz8lfIyOhobFp4kLkCQ61CWhm3BTXeluOq1EZaqU,19563
|
33
|
+
autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=C8S-1gr_PMxLYes7SP7Ep0hOrA5k9jRjM4wJAcSNuCE,3519
|
30
34
|
autogluon/timeseries/models/ensemble/__init__.py,sha256=kFr11Gmt7lQJu9Rr8HuIPphQN5l1TsoorfbJm_O3a_s,128
|
31
35
|
autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py,sha256=tifETwmiEGt-YtQ9eNK7ojJ3fBvtFMUJvisbfkIJ7gw,3393
|
32
36
|
autogluon/timeseries/models/ensemble/greedy_ensemble.py,sha256=5HvZuW5osgsZg3V69k82nKEOy_YgeH1JTfQa7F3cU7s,7220
|
33
37
|
autogluon/timeseries/models/gluonts/__init__.py,sha256=asC1PTj4j9xMbilvk1IT1julnpeoKbv5ZNuAR6-DFgA,361
|
34
|
-
autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=
|
38
|
+
autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=tViaFXFOVjGQi2S6cUIW-ak0Evv7rKUm2QWsmpKDMEk,34076
|
35
39
|
autogluon/timeseries/models/gluonts/torch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
36
|
-
autogluon/timeseries/models/gluonts/torch/models.py,sha256=
|
40
|
+
autogluon/timeseries/models/gluonts/torch/models.py,sha256=ASTMabSAzBRJ_s5Iqjp-9fZvt4aKN892Uz35kzqGMWM,25018
|
37
41
|
autogluon/timeseries/models/local/__init__.py,sha256=e2UImoJhmj70E148IIObv90C_bHxgyLNk6YsS4p7pfs,701
|
38
|
-
autogluon/timeseries/models/local/abstract_local_model.py,sha256=
|
42
|
+
autogluon/timeseries/models/local/abstract_local_model.py,sha256=OxEkqzfAd5diQDUYStw2nI-X2lo3H8GcMLDJ6-1XL_Y,12417
|
39
43
|
autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F_VAp6-jb_5SUE,7249
|
40
44
|
autogluon/timeseries/models/local/npts.py,sha256=Bp74doKnfpGE8ywP4FWOCI_RwRMsmgocYDfGtq764DA,4143
|
41
|
-
autogluon/timeseries/models/local/statsforecast.py,sha256=
|
45
|
+
autogluon/timeseries/models/local/statsforecast.py,sha256=cFJ_A7LR2jTmFNGgMxt3xvEivQVYuV6bDCMii8-TKH0,32424
|
42
46
|
autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
|
43
|
-
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=
|
47
|
+
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=aNS0W4bh9quaxgFRtyJesVziPKHvg4lfCDrAQutqEjk,12014
|
44
48
|
autogluon/timeseries/trainer/__init__.py,sha256=lxiOT-Gc6BEnr_yWQqra85kEngeM_wtH2SCaRbmC_qE,170
|
45
49
|
autogluon/timeseries/trainer/abstract_trainer.py,sha256=hZI4QcsFvU1gxP2yv_DRCIMlc6q02ptR7UDA9EgJPoM,60409
|
46
50
|
autogluon/timeseries/trainer/auto_trainer.py,sha256=psJFZBwWWPlLjNwAgvO4OUJXsRW1sTN2YS9a4pdoeoE,3344
|
47
51
|
autogluon/timeseries/transforms/__init__.py,sha256=lzDavxdgGIz5m_DmSpNa9ewNU9Evndam3YXfOEk6kwY,174
|
48
|
-
autogluon/timeseries/transforms/scaler.py,sha256=
|
52
|
+
autogluon/timeseries/transforms/scaler.py,sha256=jgj9-637zgDREJidNpavKIQbF0y6RB_zwPGKWAGa6lw,5344
|
49
53
|
autogluon/timeseries/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
50
54
|
autogluon/timeseries/utils/features.py,sha256=VvBQzaymSSzxI9khtcXbpir-qo1NWHe51O7F6ynyh_s,21943
|
51
55
|
autogluon/timeseries/utils/forecast.py,sha256=p0WKM9Q0nLAwwmCgYZI1zi9mCOWXWJfllEt2lPRQl4M,1882
|
52
|
-
autogluon/timeseries/utils/warning_filters.py,sha256=
|
56
|
+
autogluon/timeseries/utils/warning_filters.py,sha256=JRitJwTsowvIXhNMEWjYIhLGgmXz1Kkj0J6ON3EVyS4,2508
|
53
57
|
autogluon/timeseries/utils/datetime/__init__.py,sha256=bTMR8jLh1LW55vHjbOr1zvWRMF_PqbvxpS-cUcNIDWI,173
|
54
58
|
autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbjy4DJ_YYOGuu9x4,1341
|
55
59
|
autogluon/timeseries/utils/datetime/lags.py,sha256=GoLtvcZ8oKb3QkoBJ9E59LSPLOP7Qjxrr2UmMSZgjyw,5909
|
56
60
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=h_4w00iEytAz_N_EpCENQ8RCXy7KQITczrYjBgVqWkQ,764
|
57
61
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=PAXbYbQ0z_5GFbkxSNi41zLY_2-U3x0Ynm1m_WhdtGc,2572
|
58
|
-
autogluon.timeseries-1.1.
|
59
|
-
autogluon.timeseries-1.1.
|
60
|
-
autogluon.timeseries-1.1.
|
61
|
-
autogluon.timeseries-1.1.
|
62
|
-
autogluon.timeseries-1.1.
|
63
|
-
autogluon.timeseries-1.1.
|
64
|
-
autogluon.timeseries-1.1.
|
65
|
-
autogluon.timeseries-1.1.
|
62
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
63
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/METADATA,sha256=l1LD3PIyOjR4eUt0Y-j8AmPpj0u4w1SYh9kzhvkCuZ0,12388
|
64
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
65
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/WHEEL,sha256=bFJAMchF8aTQGUgMZzHJyDDMPTO3ToJ7x23SLJa1SVo,92
|
66
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
67
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
68
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
69
|
+
autogluon.timeseries-1.1.2b20241112.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|