autogluon.timeseries 1.2.1b20250303__py3-none-any.whl → 1.2.1b20250305__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autogluon/timeseries/models/abstract/abstract_timeseries_model.py +59 -131
- autogluon/timeseries/models/autogluon_tabular/mlforecast.py +3 -4
- autogluon/timeseries/models/autogluon_tabular/transforms.py +2 -2
- autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py +8 -0
- autogluon/timeseries/models/multi_window/multi_window_model.py +0 -5
- autogluon/timeseries/predictor.py +6 -1
- autogluon/timeseries/regressor.py +54 -6
- autogluon/timeseries/transforms/__init__.py +2 -13
- autogluon/timeseries/transforms/covariate_scaler.py +28 -34
- autogluon/timeseries/transforms/target_scaler.py +22 -5
- autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/METADATA +4 -4
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/RECORD +20 -20
- /autogluon.timeseries-1.2.1b20250303-py3.9-nspkg.pth → /autogluon.timeseries-1.2.1b20250305-py3.9-nspkg.pth +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/LICENSE +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/NOTICE +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/WHEEL +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/top_level.txt +0 -0
- {autogluon.timeseries-1.2.1b20250303.dist-info → autogluon.timeseries-1.2.1b20250305.dist-info}/zip-safe +0 -0
@@ -5,6 +5,7 @@ import logging
|
|
5
5
|
import os
|
6
6
|
import re
|
7
7
|
import time
|
8
|
+
from abc import ABC, abstractmethod
|
8
9
|
from contextlib import nullcontext
|
9
10
|
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
10
11
|
|
@@ -27,13 +28,8 @@ from autogluon.core.models import ModelBase
|
|
27
28
|
from autogluon.core.utils.exceptions import TimeLimitExceeded
|
28
29
|
from autogluon.timeseries.dataset import TimeSeriesDataFrame
|
29
30
|
from autogluon.timeseries.metrics import TimeSeriesScorer, check_get_evaluation_metric
|
30
|
-
from autogluon.timeseries.regressor import CovariateRegressor
|
31
|
-
from autogluon.timeseries.transforms import
|
32
|
-
CovariateScaler,
|
33
|
-
LocalTargetScaler,
|
34
|
-
get_covariate_scaler_from_name,
|
35
|
-
get_target_scaler_from_name,
|
36
|
-
)
|
31
|
+
from autogluon.timeseries.regressor import CovariateRegressor, get_covariate_regressor
|
32
|
+
from autogluon.timeseries.transforms import CovariateScaler, TargetScaler, get_covariate_scaler, get_target_scaler
|
37
33
|
from autogluon.timeseries.utils.features import CovariateMetadata
|
38
34
|
from autogluon.timeseries.utils.forecast import get_forecast_horizon_index_ts_dataframe
|
39
35
|
from autogluon.timeseries.utils.warning_filters import disable_stdout, warning_filter
|
@@ -120,9 +116,9 @@ def check_and_split_hyperparameters(
|
|
120
116
|
return params, params_aux
|
121
117
|
|
122
118
|
|
123
|
-
# TODO: refactor. remove params_aux, etc. make
|
119
|
+
# TODO: refactor. remove params_aux, etc. make overrides and abstract
|
124
120
|
# methods clear, change name to TimeSeriesModel, et al.
|
125
|
-
class AbstractTimeSeriesModel(ModelBase):
|
121
|
+
class AbstractTimeSeriesModel(ModelBase, ABC):
|
126
122
|
"""Abstract class for all `Model` objects in autogluon.timeseries.
|
127
123
|
|
128
124
|
Parameters
|
@@ -212,19 +208,16 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
212
208
|
else:
|
213
209
|
self.must_drop_median = False
|
214
210
|
|
211
|
+
self._user_params, self._user_params_aux = check_and_split_hyperparameters(hyperparameters)
|
215
212
|
self._oof_predictions: Optional[List[TimeSeriesDataFrame]] = None
|
216
|
-
self.target_scaler: Optional[LocalTargetScaler] = None
|
217
|
-
self.covariate_scaler: Optional[CovariateScaler] = None
|
218
|
-
self.covariate_regressor: Optional[CovariateRegressor] = None
|
219
213
|
|
220
|
-
|
221
|
-
self.
|
214
|
+
self.params: Dict[str, Any] = {}
|
215
|
+
self.params_aux: Dict[str, Any] = {}
|
216
|
+
self._init_params_aux()
|
217
|
+
self._init_params()
|
218
|
+
self._is_initialized = True
|
222
219
|
|
223
|
-
|
224
|
-
self._user_params, self._user_params_aux = check_and_split_hyperparameters(hyperparameters)
|
225
|
-
|
226
|
-
self.params = {}
|
227
|
-
self.params_aux = {}
|
220
|
+
# TODO: remove the variables below
|
228
221
|
self.nondefault_params: List[str] = []
|
229
222
|
|
230
223
|
self.fit_time: Optional[float] = None # Time taken to fit in seconds (Training data)
|
@@ -234,6 +227,11 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
234
227
|
)
|
235
228
|
self.val_score: Optional[float] = None # Score with eval_metric (Validation data)
|
236
229
|
|
230
|
+
self.target_scaler: Optional[TargetScaler]
|
231
|
+
self.covariate_scaler: Optional[CovariateScaler]
|
232
|
+
self.covariate_regressor: Optional[CovariateRegressor]
|
233
|
+
self._initialize_transforms_and_regressor()
|
234
|
+
|
237
235
|
def __repr__(self) -> str:
|
238
236
|
return self.name
|
239
237
|
|
@@ -263,9 +261,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
263
261
|
self._oof_predictions = None
|
264
262
|
|
265
263
|
file_path = os.path.join(path, self.model_file_name)
|
266
|
-
_model = self.model
|
267
264
|
save_pkl.save(path=file_path, object=self, verbose=verbose)
|
268
|
-
self.model = _model
|
269
265
|
|
270
266
|
self._oof_predictions = oof_predictions
|
271
267
|
return path
|
@@ -310,21 +306,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
310
306
|
self._oof_predictions = self.load_oof_predictions(self.path)
|
311
307
|
return self._oof_predictions
|
312
308
|
|
313
|
-
def _get_default_auxiliary_params(self) -> dict:
|
314
|
-
return dict(
|
315
|
-
# ratio of given time_limit to use during fit(). If time_limit == 10 and max_time_limit_ratio=0.3,
|
316
|
-
# time_limit would be changed to 3.
|
317
|
-
max_time_limit_ratio=self.default_max_time_limit_ratio,
|
318
|
-
# max time_limit value during fit(). If the provided time_limit is greater than this value, it will be
|
319
|
-
# replaced by max_time_limit. Occurs after max_time_limit_ratio is applied.
|
320
|
-
max_time_limit=None,
|
321
|
-
)
|
322
|
-
|
323
|
-
# TODO: remove
|
324
|
-
@classmethod
|
325
|
-
def _get_default_ag_args(cls) -> dict:
|
326
|
-
return {}
|
327
|
-
|
328
309
|
def _init_params(self):
|
329
310
|
"""Initializes model hyperparameters"""
|
330
311
|
hyperparameters = self._user_params
|
@@ -342,26 +323,35 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
342
323
|
For documentation on some of the available options and their defaults, refer to `self._get_default_auxiliary_params`.
|
343
324
|
"""
|
344
325
|
hyperparameters_aux = self._user_params_aux or {}
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
self.
|
357
|
-
self.
|
326
|
+
default_aux_params = dict(
|
327
|
+
# ratio of given time_limit to use during fit(). If time_limit == 10 and max_time_limit_ratio=0.3,
|
328
|
+
# time_limit would be changed to 3.
|
329
|
+
max_time_limit_ratio=self.default_max_time_limit_ratio,
|
330
|
+
# max time_limit value during fit(). If the provided time_limit is greater than this value, it will be
|
331
|
+
# replaced by max_time_limit. Occurs after max_time_limit_ratio is applied.
|
332
|
+
max_time_limit=None,
|
333
|
+
)
|
334
|
+
self.params_aux = {**default_aux_params, **hyperparameters_aux}
|
335
|
+
|
336
|
+
def _initialize_transforms_and_regressor(self) -> None:
|
337
|
+
self.target_scaler = get_target_scaler(self._get_model_params().get("target_scaler"), target=self.target)
|
338
|
+
self.covariate_scaler = get_covariate_scaler(
|
339
|
+
self._get_model_params().get("covariate_scaler"),
|
340
|
+
covariate_metadata=self.metadata,
|
341
|
+
use_static_features=self.supports_static_features,
|
342
|
+
use_known_covariates=self.supports_known_covariates,
|
343
|
+
use_past_covariates=self.supports_past_covariates,
|
344
|
+
)
|
345
|
+
self.covariate_regressor = get_covariate_regressor(
|
346
|
+
self._get_model_params().get("covariate_regressor"),
|
347
|
+
target=self.target,
|
348
|
+
covariate_metadata=self.metadata,
|
349
|
+
)
|
358
350
|
|
359
351
|
def _get_model_params(self) -> dict:
|
360
352
|
return self.params.copy()
|
361
353
|
|
362
354
|
def get_params(self) -> dict:
|
363
|
-
# TODO: do not extract to AbstractModel if this is only used for getting a
|
364
|
-
# prototype of the object for HPO.
|
365
355
|
hyperparameters = self._user_params.copy()
|
366
356
|
if self._user_params_aux:
|
367
357
|
hyperparameters[AG_ARGS_FIT] = self._user_params_aux.copy()
|
@@ -378,19 +368,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
378
368
|
target=self.target,
|
379
369
|
)
|
380
370
|
|
381
|
-
@classmethod
|
382
|
-
def load_info(cls, path: str, load_model_if_required: bool = True) -> dict:
|
383
|
-
# TODO: remove?
|
384
|
-
load_path = os.path.join(path, cls.model_info_name)
|
385
|
-
try:
|
386
|
-
return load_pkl.load(path=load_path)
|
387
|
-
except:
|
388
|
-
if load_model_if_required:
|
389
|
-
model = cls.load(path=path, reset_paths=True)
|
390
|
-
return model.get_info()
|
391
|
-
else:
|
392
|
-
raise
|
393
|
-
|
394
371
|
def get_info(self) -> dict:
|
395
372
|
"""
|
396
373
|
Returns a dictionary of numerous fields describing the model.
|
@@ -410,6 +387,19 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
410
387
|
}
|
411
388
|
return info
|
412
389
|
|
390
|
+
@classmethod
|
391
|
+
def load_info(cls, path: str, load_model_if_required: bool = True) -> dict:
|
392
|
+
# TODO: remove?
|
393
|
+
load_path = os.path.join(path, cls.model_info_name)
|
394
|
+
try:
|
395
|
+
return load_pkl.load(path=load_path)
|
396
|
+
except:
|
397
|
+
if load_model_if_required:
|
398
|
+
model = cls.load(path=path, reset_paths=True)
|
399
|
+
return model.get_info()
|
400
|
+
else:
|
401
|
+
raise
|
402
|
+
|
413
403
|
def fit( # type: ignore
|
414
404
|
self,
|
415
405
|
train_data: TimeSeriesDataFrame,
|
@@ -452,7 +442,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
452
442
|
The fitted model object
|
453
443
|
"""
|
454
444
|
start_time = time.monotonic()
|
455
|
-
self.initialize()
|
456
445
|
|
457
446
|
if self.target_scaler is not None:
|
458
447
|
train_data = self.target_scaler.fit_transform(train_data)
|
@@ -525,6 +514,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
525
514
|
|
526
515
|
return time_limit
|
527
516
|
|
517
|
+
@abstractmethod
|
528
518
|
def _fit( # type: ignore
|
529
519
|
self,
|
530
520
|
train_data: TimeSeriesDataFrame,
|
@@ -539,8 +529,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
539
529
|
the model training logic, `fit` additionally implements other logic such as keeping
|
540
530
|
track of the time limit, etc.
|
541
531
|
"""
|
542
|
-
|
543
|
-
raise NotImplementedError
|
532
|
+
pass
|
544
533
|
|
545
534
|
# TODO: perform this check inside fit() ?
|
546
535
|
def _check_fit_params(self):
|
@@ -556,57 +545,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
556
545
|
"""List of hyperparameters allowed by the model."""
|
557
546
|
return ["target_scaler", "covariate_regressor"]
|
558
547
|
|
559
|
-
def _create_target_scaler(self) -> Optional[LocalTargetScaler]:
|
560
|
-
"""Create a LocalTargetScaler object based on the value of the `target_scaler` hyperparameter."""
|
561
|
-
# TODO: Add support for custom target transforms (e.g., Box-Cox, log1p, ...)
|
562
|
-
target_scaler_type = self._get_model_params().get("target_scaler")
|
563
|
-
if target_scaler_type is not None:
|
564
|
-
return get_target_scaler_from_name(target_scaler_type, target=self.target)
|
565
|
-
else:
|
566
|
-
return None
|
567
|
-
|
568
|
-
def _create_covariate_scaler(self) -> Optional[CovariateScaler]:
|
569
|
-
"""Create a CovariateScaler object based on the value of the `covariate_scaler` hyperparameter."""
|
570
|
-
covariate_scaler_type = self._get_model_params().get("covariate_scaler")
|
571
|
-
if covariate_scaler_type is not None:
|
572
|
-
return get_covariate_scaler_from_name(
|
573
|
-
covariate_scaler_type,
|
574
|
-
metadata=self.metadata,
|
575
|
-
use_static_features=self.supports_static_features,
|
576
|
-
use_known_covariates=self.supports_known_covariates,
|
577
|
-
use_past_covariates=self.supports_past_covariates,
|
578
|
-
)
|
579
|
-
else:
|
580
|
-
return None
|
581
|
-
|
582
|
-
def _create_covariate_regressor(self) -> Optional[CovariateRegressor]:
|
583
|
-
"""Create a CovariateRegressor object based on the value of the `covariate_regressor` hyperparameter."""
|
584
|
-
covariate_regressor = self._get_model_params().get("covariate_regressor")
|
585
|
-
if covariate_regressor is not None:
|
586
|
-
if len(self.metadata.known_covariates + self.metadata.static_features) == 0:
|
587
|
-
logger.info(
|
588
|
-
"\tSkipping covariate_regressor since the dataset contains no covariates or static features."
|
589
|
-
)
|
590
|
-
return None
|
591
|
-
else:
|
592
|
-
if isinstance(covariate_regressor, str):
|
593
|
-
return CovariateRegressor(covariate_regressor, target=self.target, metadata=self.metadata)
|
594
|
-
elif isinstance(covariate_regressor, dict):
|
595
|
-
return CovariateRegressor(**covariate_regressor, target=self.target, metadata=self.metadata)
|
596
|
-
elif isinstance(covariate_regressor, CovariateRegressor):
|
597
|
-
logger.warning(
|
598
|
-
"\tUsing a custom covariate_regressor is experimental functionality that may break in the future!"
|
599
|
-
)
|
600
|
-
covariate_regressor.target = self.target
|
601
|
-
covariate_regressor.metadata = self.metadata
|
602
|
-
return covariate_regressor
|
603
|
-
else:
|
604
|
-
raise ValueError(
|
605
|
-
f"Invalid value for covariate_regressor {covariate_regressor} of type {type(covariate_regressor)}"
|
606
|
-
)
|
607
|
-
else:
|
608
|
-
return None
|
609
|
-
|
610
548
|
def predict( # type: ignore
|
611
549
|
self,
|
612
550
|
data: Union[TimeSeriesDataFrame, Dict[str, Optional[TimeSeriesDataFrame]]],
|
@@ -682,6 +620,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
682
620
|
"""For each item in the dataframe, get timestamps for the next `prediction_length` time steps into the future."""
|
683
621
|
return get_forecast_horizon_index_ts_dataframe(data, prediction_length=self.prediction_length, freq=self.freq)
|
684
622
|
|
623
|
+
@abstractmethod
|
685
624
|
def _predict(
|
686
625
|
self,
|
687
626
|
data: Union[TimeSeriesDataFrame, Dict[str, TimeSeriesDataFrame]],
|
@@ -689,7 +628,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
689
628
|
**kwargs,
|
690
629
|
) -> TimeSeriesDataFrame:
|
691
630
|
"""Private method for `predict`. See `predict` for documentation of arguments."""
|
692
|
-
|
631
|
+
pass
|
693
632
|
|
694
633
|
def _score_with_predictions(
|
695
634
|
self,
|
@@ -733,8 +672,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
733
672
|
The computed forecast evaluation score on the last `self.prediction_length`
|
734
673
|
time steps of each time series.
|
735
674
|
"""
|
736
|
-
# TODO: align method signature in the new AbstractModel
|
737
|
-
|
738
675
|
past_data, known_covariates = data.get_model_inputs_for_scoring(
|
739
676
|
prediction_length=self.prediction_length, known_covariates_names=self.metadata.known_covariates
|
740
677
|
)
|
@@ -795,8 +732,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
795
732
|
hyperparameter_tune_kwargs, default_num_trials=default_num_trials, time_limit=time_limit
|
796
733
|
)
|
797
734
|
|
798
|
-
self.initialize()
|
799
|
-
|
800
735
|
# we use k_fold=1 to circumvent autogluon.core logic to manage resources during parallelization
|
801
736
|
# of different folds
|
802
737
|
# FIXME: we pass in self which currently does not inherit from AbstractModel
|
@@ -845,7 +780,7 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
845
780
|
directory=self.path,
|
846
781
|
minimum_cpu_per_trial=minimum_cpu_per_trial,
|
847
782
|
minimum_gpu_per_trial=minimum_resources.get("num_gpus", 0),
|
848
|
-
model_estimate_memory_usage=None,
|
783
|
+
model_estimate_memory_usage=None, # type: ignore
|
849
784
|
adapter_type="timeseries",
|
850
785
|
)
|
851
786
|
|
@@ -913,7 +848,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
913
848
|
**kwargs,
|
914
849
|
) -> Tuple[TimeSeriesDataFrame, Optional[TimeSeriesDataFrame]]:
|
915
850
|
"""Method that implements model-specific preprocessing logic."""
|
916
|
-
# TODO: move to new AbstractModel
|
917
851
|
return data, known_covariates
|
918
852
|
|
919
853
|
def persist(self) -> Self:
|
@@ -953,12 +887,6 @@ class AbstractTimeSeriesModel(ModelBase):
|
|
953
887
|
if AG_ARGS_FIT not in params["hyperparameters"]:
|
954
888
|
params["hyperparameters"][AG_ARGS_FIT] = dict()
|
955
889
|
|
956
|
-
# TODO: remove
|
957
|
-
# Increase memory limit by 25% to avoid memory restrictions during fit
|
958
|
-
params["hyperparameters"][AG_ARGS_FIT]["max_memory_usage_ratio"] = (
|
959
|
-
params["hyperparameters"][AG_ARGS_FIT].get("max_memory_usage_ratio", 1.0) * 1.25
|
960
|
-
)
|
961
|
-
|
962
890
|
params["hyperparameters"].update(self.params_trained)
|
963
891
|
params["name"] = params["name"] + REFIT_FULL_SUFFIX
|
964
892
|
template = self.__class__(**params)
|
@@ -88,6 +88,9 @@ class AbstractMLForecastModel(AbstractTimeSeriesModel):
|
|
88
88
|
self._train_target_median: Optional[float] = None
|
89
89
|
self._non_boolean_real_covariates: List[str] = []
|
90
90
|
|
91
|
+
# Do not create a scaler in the model, scaler will be passed to MLForecast
|
92
|
+
self.target_scaler = None
|
93
|
+
|
91
94
|
@property
|
92
95
|
def tabular_predictor_path(self) -> str:
|
93
96
|
return os.path.join(self.path, "tabular_predictor")
|
@@ -420,10 +423,6 @@ class AbstractMLForecastModel(AbstractTimeSeriesModel):
|
|
420
423
|
def _more_tags(self) -> dict:
|
421
424
|
return {"allow_nan": True, "can_refit_full": True}
|
422
425
|
|
423
|
-
def _create_target_scaler(self):
|
424
|
-
# Do not create a scaler in the model, scaler will be passed to MLForecast
|
425
|
-
return None
|
426
|
-
|
427
426
|
|
428
427
|
class DirectTabularModel(AbstractMLForecastModel):
|
429
428
|
"""Predict all future time series values simultaneously using TabularPredictor from AutoGluon-Tabular.
|
@@ -13,7 +13,7 @@ from autogluon.timeseries.dataset.ts_dataframe import (
|
|
13
13
|
TIMESTAMP,
|
14
14
|
TimeSeriesDataFrame,
|
15
15
|
)
|
16
|
-
from autogluon.timeseries.transforms.target_scaler import LocalTargetScaler,
|
16
|
+
from autogluon.timeseries.transforms.target_scaler import LocalTargetScaler, get_target_scaler
|
17
17
|
|
18
18
|
from .utils import MLF_ITEMID, MLF_TIMESTAMP
|
19
19
|
|
@@ -31,7 +31,7 @@ class MLForecastScaler(BaseTargetTransform):
|
|
31
31
|
return pd.DataFrame(ts_df).reset_index().rename(columns={ITEMID: self.id_col, TIMESTAMP: self.time_col})
|
32
32
|
|
33
33
|
def fit_transform(self, df: pd.DataFrame) -> pd.DataFrame:
|
34
|
-
self.ag_scaler =
|
34
|
+
self.ag_scaler = get_target_scaler(name=self.scaler_type, target=self.target_col)
|
35
35
|
transformed = self.ag_scaler.fit_transform(self._df_to_tsdf(df)).reset_index()
|
36
36
|
return self._tsdf_to_df(transformed)
|
37
37
|
|
@@ -76,3 +76,11 @@ class AbstractTimeSeriesEnsembleModel(AbstractTimeSeriesModel):
|
|
76
76
|
This method should be called after performing refit_full to point to the refitted base models, if necessary.
|
77
77
|
"""
|
78
78
|
raise NotImplementedError
|
79
|
+
|
80
|
+
# TODO: remove
|
81
|
+
def _fit(*args, **kwargs):
|
82
|
+
pass
|
83
|
+
|
84
|
+
# TODO: remove
|
85
|
+
def _predict(*args, **kwargs):
|
86
|
+
pass
|
@@ -222,11 +222,6 @@ class MultiWindowBacktestingModel(AbstractTimeSeriesModel):
|
|
222
222
|
# Do not initialize the target_scaler and covariate_regressor in the multi window model!
|
223
223
|
pass
|
224
224
|
|
225
|
-
def initialize(self, **kwargs) -> dict:
|
226
|
-
super().initialize(**kwargs)
|
227
|
-
self.model_base.initialize(**kwargs)
|
228
|
-
return kwargs
|
229
|
-
|
230
225
|
def _get_hpo_train_fn_kwargs(self, **train_fn_kwargs) -> dict:
|
231
226
|
train_fn_kwargs["is_bagged_model"] = True
|
232
227
|
train_fn_kwargs["init_params"]["model_base"] = self.model_base.__class__
|
@@ -10,7 +10,11 @@ from typing import Any, Dict, List, Literal, Optional, Type, Union, cast
|
|
10
10
|
import numpy as np
|
11
11
|
import pandas as pd
|
12
12
|
|
13
|
-
from autogluon.common.utils.log_utils import
|
13
|
+
from autogluon.common.utils.log_utils import (
|
14
|
+
add_log_to_file,
|
15
|
+
set_logger_verbosity,
|
16
|
+
warn_if_mlflow_autologging_is_enabled,
|
17
|
+
)
|
14
18
|
from autogluon.common.utils.system_info import get_ag_system_info
|
15
19
|
from autogluon.common.utils.utils import check_saved_predictor_version, setup_outputdir
|
16
20
|
from autogluon.core.utils.decorators import apply_presets
|
@@ -655,6 +659,7 @@ class TimeSeriesPredictor:
|
|
655
659
|
if verbosity is None:
|
656
660
|
verbosity = self.verbosity
|
657
661
|
set_logger_verbosity(verbosity, logger=logger)
|
662
|
+
warn_if_mlflow_autologging_is_enabled(logger=logger)
|
658
663
|
|
659
664
|
logger.info("Beginning AutoGluon training..." + (f" Time limit = {time_limit}s" if time_limit else ""))
|
660
665
|
logger.info(f"AutoGluon will save models to '{self.path}'")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import logging
|
2
2
|
import time
|
3
|
-
from typing import Any, Dict, Optional
|
3
|
+
from typing import Any, Dict, Optional, Protocol, Union, overload, runtime_checkable
|
4
4
|
|
5
5
|
import numpy as np
|
6
6
|
import pandas as pd
|
@@ -13,7 +13,27 @@ from autogluon.timeseries.utils.features import CovariateMetadata
|
|
13
13
|
logger = logging.getLogger(__name__)
|
14
14
|
|
15
15
|
|
16
|
-
|
16
|
+
@runtime_checkable
|
17
|
+
class CovariateRegressor(Protocol):
|
18
|
+
def is_fit(self) -> bool: ...
|
19
|
+
|
20
|
+
def fit(self, data: TimeSeriesDataFrame, time_limit: Optional[float] = None, **kwargs) -> "CovariateRegressor": ...
|
21
|
+
|
22
|
+
def transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
23
|
+
|
24
|
+
def fit_transform(
|
25
|
+
self, data: TimeSeriesDataFrame, time_limit: Optional[float] = None, **kwargs
|
26
|
+
) -> TimeSeriesDataFrame: ...
|
27
|
+
|
28
|
+
def inverse_transform(
|
29
|
+
self,
|
30
|
+
predictions: TimeSeriesDataFrame,
|
31
|
+
known_covariates: TimeSeriesDataFrame,
|
32
|
+
static_features: Optional[pd.DataFrame],
|
33
|
+
) -> TimeSeriesDataFrame: ...
|
34
|
+
|
35
|
+
|
36
|
+
class GlobalCovariateRegressor(CovariateRegressor):
|
17
37
|
"""Predicts target values from the covariates for the same observation.
|
18
38
|
|
19
39
|
The model construct the feature matrix using known_covariates and static_features.
|
@@ -33,7 +53,7 @@ class CovariateRegressor:
|
|
33
53
|
`transform`.
|
34
54
|
max_num_samples : int or None
|
35
55
|
If not None, training dataset passed to regression model will contain at most this many rows.
|
36
|
-
|
56
|
+
covariate_metadata : CovariateMetadata
|
37
57
|
Metadata object describing the covariates available in the dataset.
|
38
58
|
target : str
|
39
59
|
Name of the target column.
|
@@ -58,7 +78,7 @@ class CovariateRegressor:
|
|
58
78
|
eval_metric: str = "mean_absolute_error",
|
59
79
|
refit_during_predict: bool = False,
|
60
80
|
max_num_samples: Optional[int] = 500_000,
|
61
|
-
|
81
|
+
covariate_metadata: Optional[CovariateMetadata] = None,
|
62
82
|
target: str = "target",
|
63
83
|
validation_fraction: Optional[float] = 0.1,
|
64
84
|
fit_time_fraction: float = 0.5,
|
@@ -83,7 +103,7 @@ class CovariateRegressor:
|
|
83
103
|
|
84
104
|
self.model: Optional[AbstractModel] = None
|
85
105
|
self.disabled = False
|
86
|
-
self.
|
106
|
+
self.covariate_metadata = covariate_metadata or CovariateMetadata()
|
87
107
|
|
88
108
|
def is_fit(self) -> bool:
|
89
109
|
return self.model is not None
|
@@ -188,7 +208,7 @@ class CovariateRegressor:
|
|
188
208
|
include_target: bool = False,
|
189
209
|
) -> pd.DataFrame:
|
190
210
|
"""Construct a tabular dataframe from known covariates and static features."""
|
191
|
-
available_columns = [ITEMID] + self.
|
211
|
+
available_columns = [ITEMID] + self.covariate_metadata.known_covariates
|
192
212
|
if include_target:
|
193
213
|
available_columns += [self.target]
|
194
214
|
tabular_df = pd.DataFrame(data).reset_index()[available_columns].astype({ITEMID: "category"})
|
@@ -201,3 +221,31 @@ class CovariateRegressor:
|
|
201
221
|
if self.max_num_samples is not None and len(df) > self.max_num_samples:
|
202
222
|
df = df.sample(n=self.max_num_samples)
|
203
223
|
return df
|
224
|
+
|
225
|
+
|
226
|
+
@overload
|
227
|
+
def get_covariate_regressor(covariate_regressor: None, target: str, covariate_metadata: CovariateMetadata) -> None: ...
|
228
|
+
@overload
|
229
|
+
def get_covariate_regressor(
|
230
|
+
covariate_regressor: Union[str, dict], target: str, covariate_metadata: CovariateMetadata
|
231
|
+
) -> CovariateRegressor: ...
|
232
|
+
def get_covariate_regressor(
|
233
|
+
covariate_regressor: Optional[Union[str, dict]], target: str, covariate_metadata: CovariateMetadata
|
234
|
+
) -> Optional[CovariateRegressor]:
|
235
|
+
"""Create a CovariateRegressor object based on the value of the `covariate_regressor` hyperparameter."""
|
236
|
+
if covariate_regressor is None:
|
237
|
+
return None
|
238
|
+
elif len(covariate_metadata.known_covariates + covariate_metadata.static_features) == 0:
|
239
|
+
logger.info("\tSkipping covariate_regressor since the dataset contains no covariates or static features.")
|
240
|
+
return None
|
241
|
+
else:
|
242
|
+
if isinstance(covariate_regressor, str):
|
243
|
+
return GlobalCovariateRegressor(covariate_regressor, target=target, covariate_metadata=covariate_metadata)
|
244
|
+
elif isinstance(covariate_regressor, dict):
|
245
|
+
return GlobalCovariateRegressor(
|
246
|
+
**covariate_regressor, target=target, covariate_metadata=covariate_metadata
|
247
|
+
)
|
248
|
+
else:
|
249
|
+
raise ValueError(
|
250
|
+
f"Invalid value for covariate_regressor {covariate_regressor} of type {type(covariate_regressor)}"
|
251
|
+
)
|
@@ -1,13 +1,2 @@
|
|
1
|
-
from .covariate_scaler import
|
2
|
-
|
3
|
-
GlobalCovariateScaler,
|
4
|
-
get_covariate_scaler_from_name,
|
5
|
-
)
|
6
|
-
from .target_scaler import (
|
7
|
-
LocalStandardScaler,
|
8
|
-
LocalMinMaxScaler,
|
9
|
-
LocalMeanAbsScaler,
|
10
|
-
LocalRobustScaler,
|
11
|
-
LocalTargetScaler,
|
12
|
-
get_target_scaler_from_name
|
13
|
-
)
|
1
|
+
from .covariate_scaler import CovariateScaler, get_covariate_scaler
|
2
|
+
from .target_scaler import TargetScaler, get_target_scaler
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import logging
|
2
|
-
from typing import Dict, List, Literal, Optional
|
2
|
+
from typing import Dict, List, Literal, Optional, Protocol, overload, runtime_checkable
|
3
3
|
|
4
4
|
import numpy as np
|
5
5
|
import pandas as pd
|
@@ -13,35 +13,20 @@ from autogluon.timeseries.utils.warning_filters import warning_filter
|
|
13
13
|
logger = logging.getLogger(__name__)
|
14
14
|
|
15
15
|
|
16
|
-
|
16
|
+
@runtime_checkable
|
17
|
+
class CovariateScaler(Protocol):
|
17
18
|
"""Apply scaling to covariates and static features.
|
18
19
|
|
19
20
|
This can be helpful for deep learning models that assume that the inputs are normalized.
|
20
21
|
"""
|
21
22
|
|
22
|
-
def
|
23
|
-
self,
|
24
|
-
metadata: CovariateMetadata,
|
25
|
-
use_known_covariates: bool = True,
|
26
|
-
use_past_covariates: bool = True,
|
27
|
-
use_static_features: bool = True,
|
28
|
-
**kwargs,
|
29
|
-
):
|
30
|
-
self.metadata = metadata
|
31
|
-
self.use_known_covariates = use_known_covariates
|
32
|
-
self.use_past_covariates = use_past_covariates
|
33
|
-
self.use_static_features = use_static_features
|
34
|
-
|
35
|
-
def fit_transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame:
|
36
|
-
raise NotImplementedError
|
23
|
+
def fit_transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
37
24
|
|
38
|
-
def transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame:
|
39
|
-
raise NotImplementedError
|
25
|
+
def transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
40
26
|
|
41
27
|
def transform_known_covariates(
|
42
28
|
self, known_covariates: Optional[TimeSeriesDataFrame] = None
|
43
|
-
) -> Optional[TimeSeriesDataFrame]:
|
44
|
-
raise NotImplementedError
|
29
|
+
) -> Optional[TimeSeriesDataFrame]: ...
|
45
30
|
|
46
31
|
|
47
32
|
class GlobalCovariateScaler(CovariateScaler):
|
@@ -57,13 +42,16 @@ class GlobalCovariateScaler(CovariateScaler):
|
|
57
42
|
|
58
43
|
def __init__(
|
59
44
|
self,
|
60
|
-
|
45
|
+
covariate_metadata: CovariateMetadata,
|
61
46
|
use_known_covariates: bool = True,
|
62
47
|
use_past_covariates: bool = True,
|
63
48
|
use_static_features: bool = True,
|
64
49
|
skew_threshold: float = 0.99,
|
65
50
|
):
|
66
|
-
|
51
|
+
self.covariate_metadata = covariate_metadata
|
52
|
+
self.use_known_covariates = use_known_covariates
|
53
|
+
self.use_past_covariates = use_past_covariates
|
54
|
+
self.use_static_features = use_static_features
|
67
55
|
self.skew_threshold = skew_threshold
|
68
56
|
self._column_transformers: Optional[Dict[Literal["known", "past", "static"], ColumnTransformer]] = None
|
69
57
|
|
@@ -73,18 +61,18 @@ class GlobalCovariateScaler(CovariateScaler):
|
|
73
61
|
def fit(self, data: TimeSeriesDataFrame) -> "GlobalCovariateScaler":
|
74
62
|
self._column_transformers = {}
|
75
63
|
|
76
|
-
if self.use_known_covariates and len(self.
|
64
|
+
if self.use_known_covariates and len(self.covariate_metadata.known_covariates_real) > 0:
|
77
65
|
self._column_transformers["known"] = self._get_transformer_for_columns(
|
78
|
-
data, columns=self.
|
66
|
+
data, columns=self.covariate_metadata.known_covariates_real
|
79
67
|
)
|
80
|
-
if self.use_past_covariates and len(self.
|
68
|
+
if self.use_past_covariates and len(self.covariate_metadata.past_covariates_real) > 0:
|
81
69
|
self._column_transformers["past"] = self._get_transformer_for_columns(
|
82
|
-
data, columns=self.
|
70
|
+
data, columns=self.covariate_metadata.past_covariates_real
|
83
71
|
)
|
84
|
-
if self.use_static_features and len(self.
|
72
|
+
if self.use_static_features and len(self.covariate_metadata.static_features_real) > 0:
|
85
73
|
assert data.static_features is not None
|
86
74
|
self._column_transformers["static"] = self._get_transformer_for_columns(
|
87
|
-
data.static_features, columns=self.
|
75
|
+
data.static_features, columns=self.covariate_metadata.static_features_real
|
88
76
|
)
|
89
77
|
|
90
78
|
return self
|
@@ -100,15 +88,15 @@ class GlobalCovariateScaler(CovariateScaler):
|
|
100
88
|
assert self._column_transformers is not None, "CovariateScaler must be fit before transform can be called"
|
101
89
|
|
102
90
|
if "known" in self._column_transformers:
|
103
|
-
columns = self.
|
91
|
+
columns = self.covariate_metadata.known_covariates_real
|
104
92
|
data[columns] = self._column_transformers["known"].transform(data[columns])
|
105
93
|
|
106
94
|
if "past" in self._column_transformers:
|
107
|
-
columns = self.
|
95
|
+
columns = self.covariate_metadata.past_covariates_real
|
108
96
|
data[columns] = self._column_transformers["past"].transform(data[columns])
|
109
97
|
|
110
98
|
if "static" in self._column_transformers:
|
111
|
-
columns = self.
|
99
|
+
columns = self.covariate_metadata.static_features_real
|
112
100
|
assert data.static_features is not None
|
113
101
|
|
114
102
|
data.static_features[columns] = self._column_transformers["static"].transform(
|
@@ -122,7 +110,7 @@ class GlobalCovariateScaler(CovariateScaler):
|
|
122
110
|
assert self._column_transformers is not None, "CovariateScaler must be fit before transform can be called"
|
123
111
|
|
124
112
|
if "known" in self._column_transformers:
|
125
|
-
columns = self.
|
113
|
+
columns = self.covariate_metadata.known_covariates_real
|
126
114
|
assert known_covariates is not None
|
127
115
|
|
128
116
|
known_covariates = known_covariates.copy()
|
@@ -162,7 +150,13 @@ AVAILABLE_COVARIATE_SCALERS = {
|
|
162
150
|
}
|
163
151
|
|
164
152
|
|
165
|
-
|
153
|
+
@overload
|
154
|
+
def get_covariate_scaler(name: None, **scaler_kwargs) -> None: ...
|
155
|
+
@overload
|
156
|
+
def get_covariate_scaler(name: Literal["global"], **scaler_kwargs) -> GlobalCovariateScaler: ...
|
157
|
+
def get_covariate_scaler(name: Optional[Literal["global"]] = None, **scaler_kwargs) -> Optional[CovariateScaler]:
|
158
|
+
if name is None:
|
159
|
+
return None
|
166
160
|
if name not in AVAILABLE_COVARIATE_SCALERS:
|
167
161
|
raise KeyError(
|
168
162
|
f"Covariate scaler type {name} not supported. Available scalers: {list(AVAILABLE_COVARIATE_SCALERS)}"
|
@@ -1,12 +1,23 @@
|
|
1
|
-
from typing import Literal, Optional, Tuple, Union
|
1
|
+
from typing import Literal, Optional, Protocol, Tuple, Union, overload
|
2
2
|
|
3
3
|
import numpy as np
|
4
4
|
import pandas as pd
|
5
|
+
from typing_extensions import Self
|
5
6
|
|
6
7
|
from autogluon.timeseries.dataset.ts_dataframe import ITEMID, TimeSeriesDataFrame
|
7
8
|
|
8
9
|
|
9
|
-
class
|
10
|
+
class TargetScaler(Protocol):
|
11
|
+
def fit_transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
12
|
+
|
13
|
+
def fit(self, data: TimeSeriesDataFrame) -> Self: ...
|
14
|
+
|
15
|
+
def transform(self, data: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
16
|
+
|
17
|
+
def inverse_transform(self, predictions: TimeSeriesDataFrame) -> TimeSeriesDataFrame: ...
|
18
|
+
|
19
|
+
|
20
|
+
class LocalTargetScaler(TargetScaler):
|
10
21
|
"""Applies an affine transformation (x - loc) / scale independently to each time series in the dataset."""
|
11
22
|
|
12
23
|
def __init__(
|
@@ -123,10 +134,16 @@ AVAILABLE_TARGET_SCALERS = {
|
|
123
134
|
}
|
124
135
|
|
125
136
|
|
126
|
-
|
127
|
-
|
128
|
-
|
137
|
+
@overload
|
138
|
+
def get_target_scaler(name: None, **scaler_kwargs) -> None: ...
|
139
|
+
@overload
|
140
|
+
def get_target_scaler(name: Literal["standard", "mean_abs", "min_max", "robust"], **scaler_kwargs) -> TargetScaler: ...
|
141
|
+
def get_target_scaler(
|
142
|
+
name: Optional[Literal["standard", "mean_abs", "min_max", "robust"]], **scaler_kwargs
|
143
|
+
) -> Optional[TargetScaler]:
|
129
144
|
"""Get LocalTargetScaler object from a string."""
|
145
|
+
if name is None:
|
146
|
+
return None
|
130
147
|
if name not in AVAILABLE_TARGET_SCALERS:
|
131
148
|
raise KeyError(f"Scaler type {name} not supported. Available scalers: {list(AVAILABLE_TARGET_SCALERS)}")
|
132
149
|
return AVAILABLE_TARGET_SCALERS[name](**scaler_kwargs)
|
autogluon/timeseries/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: autogluon.timeseries
|
3
|
-
Version: 1.2.
|
3
|
+
Version: 1.2.1b20250305
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
6
6
|
Author: AutoGluon Community
|
@@ -55,9 +55,9 @@ Requires-Dist: fugue>=0.9.0
|
|
55
55
|
Requires-Dist: tqdm<5,>=4.38
|
56
56
|
Requires-Dist: orjson~=3.9
|
57
57
|
Requires-Dist: tensorboard<3,>=2.9
|
58
|
-
Requires-Dist: autogluon.core[raytune]==1.2.
|
59
|
-
Requires-Dist: autogluon.common==1.2.
|
60
|
-
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.
|
58
|
+
Requires-Dist: autogluon.core[raytune]==1.2.1b20250305
|
59
|
+
Requires-Dist: autogluon.common==1.2.1b20250305
|
60
|
+
Requires-Dist: autogluon.tabular[catboost,lightgbm,xgboost]==1.2.1b20250305
|
61
61
|
Provides-Extra: all
|
62
62
|
Provides-Extra: chronos-onnx
|
63
63
|
Requires-Dist: optimum[onnxruntime]<1.20,>=1.17; extra == "chronos-onnx"
|
@@ -1,12 +1,12 @@
|
|
1
|
-
autogluon.timeseries-1.2.
|
1
|
+
autogluon.timeseries-1.2.1b20250305-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
2
2
|
autogluon/timeseries/__init__.py,sha256=_CrLLc1fkjen7UzWoO0Os8WZoHOgvZbHKy46I8v_4k4,304
|
3
3
|
autogluon/timeseries/evaluator.py,sha256=l642tYfTHsl8WVIq_vV6qhgAFVFr9UuZD7gLra3A_Kc,250
|
4
4
|
autogluon/timeseries/learner.py,sha256=PDAHFlos6q5JukwRE86tKoH0zxYf3nLzy7qfD_a5NYY,13849
|
5
|
-
autogluon/timeseries/predictor.py,sha256=
|
6
|
-
autogluon/timeseries/regressor.py,sha256=
|
5
|
+
autogluon/timeseries/predictor.py,sha256=DgKNvDfduVyauR7MXQZk04JyT3fc5erXAGVp3XOwDt4,85288
|
6
|
+
autogluon/timeseries/regressor.py,sha256=3MlTpP-M1ayTZ52UQDK0wIMMFUijPep-iEyftlDdKPg,11804
|
7
7
|
autogluon/timeseries/splitter.py,sha256=yzPca9p2bWV-_VJAptUyyzQsxu-uixAdpMoGQtDzMD4,3205
|
8
8
|
autogluon/timeseries/trainer.py,sha256=L9FT5qERcqlWTgH9IgE6QsO0aBNj2nivRKF2Oy4UJOk,57250
|
9
|
-
autogluon/timeseries/version.py,sha256=
|
9
|
+
autogluon/timeseries/version.py,sha256=hjp__CpU1tLt2wGIEfd4tVSKrES_x0vHTkZdLuoYGnE,91
|
10
10
|
autogluon/timeseries/configs/__init__.py,sha256=BTtHIPCYeGjqgOcvqb8qPD4VNX-ICKOg6wnkew1cPOE,98
|
11
11
|
autogluon/timeseries/configs/presets_configs.py,sha256=cLat8ecLlWrI-SC5KLBDCX2SbVXaucemy2pjxJAtSY0,2543
|
12
12
|
autogluon/timeseries/dataset/__init__.py,sha256=UvnhAN5tjgxXTHoZMQDy64YMDj4Xxa68yY7NP4vAw0o,81
|
@@ -19,11 +19,11 @@ autogluon/timeseries/metrics/utils.py,sha256=HuDe1BNe8yJU4f_DKM913nNrUueoRaw6zhx
|
|
19
19
|
autogluon/timeseries/models/__init__.py,sha256=MYD9JJ-wUDE5B6jW6E6LU2eXQ6vflfQBvqQJkdzJa3A,1189
|
20
20
|
autogluon/timeseries/models/presets.py,sha256=GezDk-p591Mlhm5UTIjKKJqQE2mnWw9rdsDYKen4zJo,12478
|
21
21
|
autogluon/timeseries/models/abstract/__init__.py,sha256=wvDsQAZIV0N3AwBeMaGItoQ82trEfnT-nol2AAOIxBg,102
|
22
|
-
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=
|
22
|
+
autogluon/timeseries/models/abstract/abstract_timeseries_model.py,sha256=ouQJu75JydLOzWAroHWHzLhaRgkeOXOu0d5F1dh_4Yc,40598
|
23
23
|
autogluon/timeseries/models/abstract/model_trial.py,sha256=ENPg_7nsdxIvaNM0o0UShZ3x8jFlRmwRc5m0fGPC0TM,3720
|
24
24
|
autogluon/timeseries/models/autogluon_tabular/__init__.py,sha256=r9i6jWcyeLHYClkcMSKRVsfrkBUMxpDrTATNTBc_qgQ,136
|
25
|
-
autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=
|
26
|
-
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=
|
25
|
+
autogluon/timeseries/models/autogluon_tabular/mlforecast.py,sha256=HGuV6_63TnBK9RqVD-VUTbbBuxQG9lmKxo5kLQLTlug,33016
|
26
|
+
autogluon/timeseries/models/autogluon_tabular/transforms.py,sha256=CVvNun8DKH7UQGyXU-iO2xmvBIHRQElw72gIrZ7QjkU,2504
|
27
27
|
autogluon/timeseries/models/autogluon_tabular/utils.py,sha256=Fn3Vu_Q0PCtEUbtNgLp1xIblg7dOdpFlF3W5kLHgruI,63
|
28
28
|
autogluon/timeseries/models/chronos/__init__.py,sha256=wT77HzTtmQxW3sw2k0mA5Ot6PSHivX-Uvn5fjM05EU4,60
|
29
29
|
autogluon/timeseries/models/chronos/model.py,sha256=ijupqg4S6kRxdJWanNt6bnyPoGAaJluUHHmZpyQrfDE,31211
|
@@ -33,7 +33,7 @@ autogluon/timeseries/models/chronos/pipeline/chronos.py,sha256=uFJLsSb2WQiSrmDZ0
|
|
33
33
|
autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py,sha256=2MJuik-YFgONZ3X2DciAph5So6ABys5ppQhBC81gLyk,20083
|
34
34
|
autogluon/timeseries/models/chronos/pipeline/utils.py,sha256=dtDX5Pyu95bGv7qmqgfUc1iYowWPY84dnGN0uyqyHyQ,13131
|
35
35
|
autogluon/timeseries/models/ensemble/__init__.py,sha256=kFr11Gmt7lQJu9Rr8HuIPphQN5l1TsoorfbJm_O3a_s,128
|
36
|
-
autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py,sha256=
|
36
|
+
autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py,sha256=LzL64JASiwkLsuFxGToXJGRItcMxq5_Ig2QP5Zm7SHw,3537
|
37
37
|
autogluon/timeseries/models/ensemble/greedy_ensemble.py,sha256=UPEmNx-RSuqCXS7V093NEid_AwwEigM6AXMcZtof8vg,7230
|
38
38
|
autogluon/timeseries/models/gluonts/__init__.py,sha256=asC1PTj4j9xMbilvk1IT1julnpeoKbv5ZNuAR6-DFgA,361
|
39
39
|
autogluon/timeseries/models/gluonts/abstract_gluonts.py,sha256=brf2lIMHH4a_AETwyOcOBVPWqWhLxr8iolJ3Z5AR8MA,30621
|
@@ -45,10 +45,10 @@ autogluon/timeseries/models/local/naive.py,sha256=iwRcFMFmJKPWPbD9TWaIUS51oav69F
|
|
45
45
|
autogluon/timeseries/models/local/npts.py,sha256=Bp74doKnfpGE8ywP4FWOCI_RwRMsmgocYDfGtq764DA,4143
|
46
46
|
autogluon/timeseries/models/local/statsforecast.py,sha256=s3Byp7WAUy0Rnfl1qYMSIm44MKD9t8E732xuNLk_aao,32615
|
47
47
|
autogluon/timeseries/models/multi_window/__init__.py,sha256=Bq7AT2Jxdd4WNqmjTdzeqgNiwn1NCyWp4tBIWaM-zfI,60
|
48
|
-
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=
|
49
|
-
autogluon/timeseries/transforms/__init__.py,sha256=
|
50
|
-
autogluon/timeseries/transforms/covariate_scaler.py,sha256=
|
51
|
-
autogluon/timeseries/transforms/target_scaler.py,sha256=
|
48
|
+
autogluon/timeseries/models/multi_window/multi_window_model.py,sha256=1WJfVUCmkf8DlZtIG5Hq6vPGKQ02YrpNXZEYLmiSskw,11836
|
49
|
+
autogluon/timeseries/transforms/__init__.py,sha256=fkFc4Q1Dlh0vVRgO7nPD7BgNL9dOki8THPWFkfdIKkM,128
|
50
|
+
autogluon/timeseries/transforms/covariate_scaler.py,sha256=G56PTHKqCFKiXRKLkLun7mN3-T09jxN-5oI1ISADJdQ,7042
|
51
|
+
autogluon/timeseries/transforms/target_scaler.py,sha256=BeT1aP51Wq9EidxC0dVg6dHvampKafpG1uKu4ZaaJPs,6050
|
52
52
|
autogluon/timeseries/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
autogluon/timeseries/utils/features.py,sha256=FYgcv_i5fFfxGOW86GLLrLanmaIpA2OXoMwUwvPsdkI,22443
|
54
54
|
autogluon/timeseries/utils/forecast.py,sha256=kMeP2KxrlX5Gk31D2BesuVIc0F4hEahvAcFpJHgcWG4,2082
|
@@ -58,11 +58,11 @@ autogluon/timeseries/utils/datetime/base.py,sha256=3NdsH3NDq4cVAOSoy3XpaNixyNlbj
|
|
58
58
|
autogluon/timeseries/utils/datetime/lags.py,sha256=gQDk5_zmsY5DUWDUpSaCKYkQ9nHKKY-LsywJQRAoYSk,5988
|
59
59
|
autogluon/timeseries/utils/datetime/seasonality.py,sha256=YK_2k8hvYIMW-sJPnjGWRtCnvIOthwA2hATB3nwVoD4,834
|
60
60
|
autogluon/timeseries/utils/datetime/time_features.py,sha256=MjLi3zQ00uWWJtXH9oGX2GJkTbvjdSiuabSa4kcVuxE,2672
|
61
|
-
autogluon.timeseries-1.2.
|
62
|
-
autogluon.timeseries-1.2.
|
63
|
-
autogluon.timeseries-1.2.
|
64
|
-
autogluon.timeseries-1.2.
|
65
|
-
autogluon.timeseries-1.2.
|
66
|
-
autogluon.timeseries-1.2.
|
67
|
-
autogluon.timeseries-1.2.
|
68
|
-
autogluon.timeseries-1.2.
|
61
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
62
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/METADATA,sha256=yPmePcrpFvseKU8tl3A7y22PYvTf5SfD_2lAahDWfU8,12684
|
63
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
64
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
65
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
66
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
67
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
68
|
+
autogluon.timeseries-1.2.1b20250305.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|