autogluon.timeseries 1.2.1b20250219__tar.gz → 1.2.1b20250221__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/PKG-INFO +1 -1
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/abstract/abstract_timeseries_model.py +342 -141
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/version.py +1 -1
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/PKG-INFO +1 -1
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/requires.txt +3 -3
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/setup.cfg +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/setup.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/configs/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/configs/presets_configs.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/dataset/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/dataset/ts_dataframe.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/evaluator.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/learner.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/metrics/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/metrics/abstract.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/metrics/point.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/metrics/quantile.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/metrics/utils.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/abstract/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/abstract/model_trial.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/autogluon_tabular/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/autogluon_tabular/mlforecast.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/autogluon_tabular/transforms.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/autogluon_tabular/utils.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/model.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/pipeline/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/pipeline/base.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/pipeline/chronos.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/pipeline/chronos_bolt.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/chronos/pipeline/utils.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/ensemble/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/ensemble/abstract_timeseries_ensemble.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/ensemble/greedy_ensemble.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/gluonts/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/gluonts/abstract_gluonts.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/gluonts/torch/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/gluonts/torch/models.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/local/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/local/abstract_local_model.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/local/naive.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/local/npts.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/local/statsforecast.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/multi_window/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/multi_window/multi_window_model.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/models/presets.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/predictor.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/regressor.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/splitter.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/trainer.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/transforms/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/transforms/covariate_scaler.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/transforms/target_scaler.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/datetime/__init__.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/datetime/base.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/datetime/lags.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/datetime/seasonality.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/datetime/time_features.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/features.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/forecast.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon/timeseries/utils/warning_filters.py +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/SOURCES.txt +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/dependency_links.txt +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/namespace_packages.txt +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/top_level.txt +0 -0
- {autogluon.timeseries-1.2.1b20250219 → autogluon.timeseries-1.2.1b20250221}/src/autogluon.timeseries.egg-info/zip-safe +0 -0
@@ -1,9 +1,12 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import copy
|
1
4
|
import logging
|
2
5
|
import os
|
3
6
|
import re
|
4
7
|
import time
|
5
8
|
from contextlib import nullcontext
|
6
|
-
from typing import Any,
|
9
|
+
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
7
10
|
|
8
11
|
import pandas as pd
|
9
12
|
from typing_extensions import Self
|
@@ -11,10 +14,16 @@ from typing_extensions import Self
|
|
11
14
|
from autogluon.common import space
|
12
15
|
from autogluon.common.loaders import load_pkl
|
13
16
|
from autogluon.common.savers import save_pkl
|
14
|
-
from autogluon.
|
17
|
+
from autogluon.common.utils.distribute_utils import DistributedContext
|
18
|
+
from autogluon.common.utils.log_utils import DuplicateFilter
|
19
|
+
from autogluon.common.utils.resource_utils import get_resource_manager
|
20
|
+
from autogluon.common.utils.try_import import try_import_ray
|
21
|
+
from autogluon.common.utils.utils import setup_outputdir
|
22
|
+
from autogluon.core.constants import AG_ARG_PREFIX, AG_ARGS_FIT, REFIT_FULL_SUFFIX
|
23
|
+
from autogluon.core.hpo.constants import CUSTOM_BACKEND, RAY_BACKEND
|
15
24
|
from autogluon.core.hpo.exceptions import EmptySearchSpace
|
16
|
-
from autogluon.core.hpo.executors import HpoExecutor, RayHpoExecutor
|
17
|
-
from autogluon.core.models import
|
25
|
+
from autogluon.core.hpo.executors import HpoExecutor, HpoExecutorFactory, RayHpoExecutor
|
26
|
+
from autogluon.core.models import ModelBase
|
18
27
|
from autogluon.core.utils.exceptions import TimeLimitExceeded
|
19
28
|
from autogluon.timeseries.dataset import TimeSeriesDataFrame
|
20
29
|
from autogluon.timeseries.metrics import TimeSeriesScorer, check_get_evaluation_metric
|
@@ -32,9 +41,88 @@ from autogluon.timeseries.utils.warning_filters import disable_stdout, warning_f
|
|
32
41
|
from .model_trial import model_trial, skip_hpo
|
33
42
|
|
34
43
|
logger = logging.getLogger(__name__)
|
44
|
+
dup_filter = DuplicateFilter()
|
45
|
+
logger.addFilter(dup_filter)
|
46
|
+
|
35
47
|
|
48
|
+
# TODO: refactor and move to util. We do not need to use "params_aux" in time series
|
49
|
+
def check_and_split_hyperparameters(
|
50
|
+
params: Optional[Dict[str, Any]] = None,
|
51
|
+
ag_args_fit: str = AG_ARGS_FIT,
|
52
|
+
ag_arg_prefix: str = AG_ARG_PREFIX,
|
53
|
+
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
54
|
+
"""
|
55
|
+
Given the user-specified hyperparameters, split into `params` and `params_aux`.
|
36
56
|
|
37
|
-
|
57
|
+
Parameters
|
58
|
+
----------
|
59
|
+
params : Optional[Dict[str, Any]], default = None
|
60
|
+
The model hyperparameters dictionary
|
61
|
+
ag_args_fit : str, default = "ag_args_fit"
|
62
|
+
The params key to look for that contains params_aux.
|
63
|
+
If the key is present, the value is used for params_aux and popped from params.
|
64
|
+
If no such key is found, then initialize params_aux as an empty dictionary.
|
65
|
+
ag_arg_prefix : str, default = "ag."
|
66
|
+
The key prefix to look for that indicates a parameter is intended for params_aux.
|
67
|
+
If None, this logic is skipped.
|
68
|
+
If a key starts with this prefix, it is popped from params and added to params_aux with the prefix removed.
|
69
|
+
For example:
|
70
|
+
input: params={'ag.foo': 2, 'abc': 7}, params_aux={'bar': 3}, and ag_arg_prefix='.ag',
|
71
|
+
output: params={'abc': 7}, params_aux={'bar': 3, 'foo': 2}
|
72
|
+
In cases where the key is specified multiple times, the value of the key with the prefix will always take priority.
|
73
|
+
A warning will be logged if a key is present multiple times.
|
74
|
+
For example, given the most complex scenario:
|
75
|
+
input: params={'ag.foo': 1, 'foo': 2, 'ag_args_fit': {'ag.foo': 3, 'foo': 4}}
|
76
|
+
output: params={'foo': 2}, params_aux={'foo': 1}
|
77
|
+
|
78
|
+
Returns
|
79
|
+
-------
|
80
|
+
params, params_aux : (Dict[str, Any], Dict[str, Any])
|
81
|
+
params will contain the native model hyperparameters
|
82
|
+
params_aux will contain special auxiliary hyperparameters
|
83
|
+
"""
|
84
|
+
params = copy.deepcopy(params) if params is not None else dict()
|
85
|
+
assert isinstance(params, dict), f"Invalid dtype of params! Expected dict, but got {type(params)}"
|
86
|
+
for k in params.keys():
|
87
|
+
if not isinstance(k, str):
|
88
|
+
logger.warning(
|
89
|
+
f"Warning: Specified hyperparameter key is not of type str: {k} (type={type(k)}). "
|
90
|
+
f"There might be a bug in your configuration."
|
91
|
+
)
|
92
|
+
|
93
|
+
params_aux = params.pop(ag_args_fit, dict())
|
94
|
+
if params_aux is None:
|
95
|
+
params_aux = dict()
|
96
|
+
assert isinstance(params_aux, dict), f"Invalid dtype of params_aux! Expected dict, but got {type(params_aux)}"
|
97
|
+
if ag_arg_prefix is not None:
|
98
|
+
param_aux_keys = list(params_aux.keys())
|
99
|
+
for k in param_aux_keys:
|
100
|
+
if isinstance(k, str) and k.startswith(ag_arg_prefix):
|
101
|
+
k_no_prefix = k[len(ag_arg_prefix) :]
|
102
|
+
if k_no_prefix in params_aux:
|
103
|
+
logger.warning(
|
104
|
+
f'Warning: hyperparameter "{k}" is present '
|
105
|
+
f'in `ag_args_fit` as both "{k}" and "{k_no_prefix}". '
|
106
|
+
f'Will use "{k}" and ignore "{k_no_prefix}".'
|
107
|
+
)
|
108
|
+
params_aux[k_no_prefix] = params_aux.pop(k)
|
109
|
+
param_keys = list(params.keys())
|
110
|
+
for k in param_keys:
|
111
|
+
if isinstance(k, str) and k.startswith(ag_arg_prefix):
|
112
|
+
k_no_prefix = k[len(ag_arg_prefix) :]
|
113
|
+
if k_no_prefix in params_aux:
|
114
|
+
logger.warning(
|
115
|
+
f'Warning: hyperparameter "{k}" is present '
|
116
|
+
f"in both `ag_args_fit` and `hyperparameters`. "
|
117
|
+
f"Will use `hyperparameters` value."
|
118
|
+
)
|
119
|
+
params_aux[k_no_prefix] = params.pop(k)
|
120
|
+
return params, params_aux
|
121
|
+
|
122
|
+
|
123
|
+
# TODO: refactor. remove params_aux, etc. make class inherit from ABC, make overrides and abstract
|
124
|
+
# methods clear, change name to TimeSeriesModel, et al.
|
125
|
+
class AbstractTimeSeriesModel(ModelBase):
|
38
126
|
"""Abstract class for all `Model` objects in autogluon.timeseries.
|
39
127
|
|
40
128
|
Parameters
|
@@ -67,49 +155,51 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
67
155
|
If None, model defaults are used. This is identical to passing an empty dictionary.
|
68
156
|
"""
|
69
157
|
|
158
|
+
model_file_name = "model.pkl"
|
159
|
+
model_info_name = "info.pkl"
|
70
160
|
_oof_filename = "oof.pkl"
|
161
|
+
|
71
162
|
# TODO: For which models should we override this parameter?
|
72
163
|
_covariate_regressor_fit_time_fraction: float = 0.5
|
73
164
|
default_max_time_limit_ratio: float = 0.9
|
74
165
|
|
75
|
-
# TODO: This is a hack to override the AbstractModel method, which the HPO module
|
76
|
-
# also circumvents with an ugly None-check.
|
77
|
-
estimate_memory_usage: Callable = None # type: ignore
|
78
|
-
|
79
166
|
_supports_known_covariates: bool = False
|
80
167
|
_supports_past_covariates: bool = False
|
81
168
|
_supports_static_features: bool = False
|
82
169
|
|
83
170
|
def __init__(
|
84
171
|
self,
|
85
|
-
freq: Optional[str] = None,
|
86
|
-
prediction_length: int = 1,
|
87
172
|
path: Optional[str] = None,
|
88
173
|
name: Optional[str] = None,
|
174
|
+
hyperparameters: Optional[Dict[str, Any]] = None,
|
175
|
+
freq: Optional[str] = None,
|
176
|
+
prediction_length: int = 1,
|
89
177
|
metadata: Optional[CovariateMetadata] = None,
|
178
|
+
target: str = "target",
|
179
|
+
quantile_levels: Sequence[float] = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9),
|
90
180
|
eval_metric: Union[str, TimeSeriesScorer, None] = None,
|
91
181
|
eval_metric_seasonal_period: Optional[int] = None,
|
92
|
-
hyperparameters: Optional[Dict[str, Union[int, float, str, space.Space]]] = None,
|
93
|
-
**kwargs,
|
94
182
|
):
|
95
|
-
name = name or re.sub(r"Model$", "", self.__class__.__name__)
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
183
|
+
self.name = name or re.sub(r"Model$", "", self.__class__.__name__)
|
184
|
+
|
185
|
+
self.path_root = path
|
186
|
+
if self.path_root is None:
|
187
|
+
path_suffix = self.name
|
188
|
+
# TODO: Would be ideal to not create dir, but still track that it is unique. However, this isn't possible to do without a global list of used dirs or using UUID.
|
189
|
+
path_cur = setup_outputdir(path=None, create_dir=True, path_suffix=path_suffix)
|
190
|
+
self.path_root = path_cur.rsplit(self.name, 1)[0]
|
191
|
+
logger.log(20, f"Warning: No path was specified for model, defaulting to: {self.path_root}")
|
192
|
+
|
193
|
+
self.path = os.path.join(self.path_root, self.name)
|
194
|
+
|
103
195
|
self.eval_metric: TimeSeriesScorer = check_get_evaluation_metric(eval_metric)
|
104
196
|
self.eval_metric_seasonal_period = eval_metric_seasonal_period
|
105
|
-
self.
|
106
|
-
self.conformalize = False
|
107
|
-
self.target: str = kwargs.get("target", "target")
|
197
|
+
self.target: str = target
|
108
198
|
self.metadata = metadata or CovariateMetadata()
|
109
199
|
|
110
200
|
self.freq: Optional[str] = freq
|
111
201
|
self.prediction_length: int = prediction_length
|
112
|
-
self.quantile_levels =
|
202
|
+
self.quantile_levels: list[float] = list(quantile_levels)
|
113
203
|
|
114
204
|
if not all(0 < q < 1 for q in self.quantile_levels):
|
115
205
|
raise ValueError("Invalid quantile_levels specified. Quantiles must be between 0 and 1 (exclusive).")
|
@@ -126,29 +216,69 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
126
216
|
self.target_scaler: Optional[LocalTargetScaler] = None
|
127
217
|
self.covariate_scaler: Optional[CovariateScaler] = None
|
128
218
|
self.covariate_regressor: Optional[CovariateRegressor] = None
|
129
|
-
|
219
|
+
|
220
|
+
# TODO: remove the variables below
|
221
|
+
self.model = None
|
222
|
+
|
223
|
+
self._is_initialized = False
|
224
|
+
self._user_params, self._user_params_aux = check_and_split_hyperparameters(hyperparameters)
|
225
|
+
|
226
|
+
self.params = {}
|
227
|
+
self.params_aux = {}
|
228
|
+
self.nondefault_params: List[str] = []
|
229
|
+
|
230
|
+
self.fit_time: Optional[float] = None # Time taken to fit in seconds (Training data)
|
231
|
+
self.predict_time: Optional[float] = None # Time taken to predict in seconds (Validation data)
|
232
|
+
self.predict_1_time: Optional[float] = (
|
233
|
+
None # Time taken to predict 1 row of data in seconds (with batch size `predict_1_batch_size` in params_aux)
|
234
|
+
)
|
235
|
+
self.val_score: Optional[float] = None # Score with eval_metric (Validation data)
|
130
236
|
|
131
237
|
def __repr__(self) -> str:
|
132
238
|
return self.name
|
133
239
|
|
240
|
+
def rename(self, name: str) -> None:
|
241
|
+
if self.name is not None and len(self.name) > 0:
|
242
|
+
self.path = os.path.join(os.path.dirname(self.path), name)
|
243
|
+
else:
|
244
|
+
self.path = os.path.join(self.path, name)
|
245
|
+
self.name = name
|
246
|
+
|
247
|
+
def set_contexts(self, path_context):
|
248
|
+
self.path = path_context
|
249
|
+
self.path_root = self.path.rsplit(self.name, 1)[0]
|
250
|
+
|
134
251
|
def save(self, path: Optional[str] = None, verbose=True) -> str:
|
252
|
+
if path is None:
|
253
|
+
path = self.path
|
254
|
+
|
135
255
|
# Save self._oof_predictions as a separate file, not model attribute
|
136
256
|
if self._oof_predictions is not None:
|
137
257
|
save_pkl.save(
|
138
|
-
path=os.path.join(
|
258
|
+
path=os.path.join(path, "utils", self._oof_filename),
|
139
259
|
object=self._oof_predictions,
|
140
260
|
verbose=verbose,
|
141
261
|
)
|
142
262
|
oof_predictions = self._oof_predictions
|
143
263
|
self._oof_predictions = None
|
144
|
-
|
264
|
+
|
265
|
+
file_path = os.path.join(path, self.model_file_name)
|
266
|
+
_model = self.model
|
267
|
+
save_pkl.save(path=file_path, object=self, verbose=verbose)
|
268
|
+
self.model = _model
|
269
|
+
|
145
270
|
self._oof_predictions = oof_predictions
|
146
|
-
return
|
271
|
+
return path
|
147
272
|
|
148
273
|
@classmethod
|
149
274
|
def load(cls, path: str, reset_paths: bool = True, load_oof: bool = False, verbose: bool = True) -> Self: # type: ignore
|
150
|
-
|
151
|
-
model =
|
275
|
+
file_path = os.path.join(path, cls.model_file_name)
|
276
|
+
model = load_pkl.load(path=file_path, verbose=verbose)
|
277
|
+
if reset_paths:
|
278
|
+
model.set_contexts(path)
|
279
|
+
if hasattr(model, "_compiler"):
|
280
|
+
if model._compiler is not None and not model._compiler.save_in_pkl:
|
281
|
+
model.model = model._compiler.load(path=path)
|
152
282
|
if load_oof and model._oof_predictions is None:
|
153
283
|
model._oof_predictions = cls.load_oof_predictions(path=path, verbose=verbose)
|
154
284
|
return model
|
@@ -181,7 +311,6 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
181
311
|
return self._oof_predictions
|
182
312
|
|
183
313
|
def _get_default_auxiliary_params(self) -> dict:
|
184
|
-
# TODO: refine to values that are absolutely necessary
|
185
314
|
return dict(
|
186
315
|
# ratio of given time_limit to use during fit(). If time_limit == 10 and max_time_limit_ratio=0.3,
|
187
316
|
# time_limit would be changed to 3.
|
@@ -191,27 +320,45 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
191
320
|
max_time_limit=None,
|
192
321
|
)
|
193
322
|
|
194
|
-
|
195
|
-
|
196
|
-
|
323
|
+
# TODO: remove
|
324
|
+
@classmethod
|
325
|
+
def _get_default_ag_args(cls) -> dict:
|
326
|
+
return {}
|
327
|
+
|
328
|
+
def _init_params(self):
|
329
|
+
"""Initializes model hyperparameters"""
|
330
|
+
hyperparameters = self._user_params
|
331
|
+
self.nondefault_params = []
|
332
|
+
if hyperparameters is not None:
|
333
|
+
self.params.update(hyperparameters)
|
334
|
+
# These are hyperparameters that user has specified.
|
335
|
+
self.nondefault_params = list(hyperparameters.keys())[:]
|
336
|
+
self.params_trained = {}
|
337
|
+
|
338
|
+
def _init_params_aux(self):
|
339
|
+
"""
|
340
|
+
Initializes auxiliary hyperparameters.
|
341
|
+
These parameters are generally not model specific and can have a wide variety of effects.
|
342
|
+
For documentation on some of the available options and their defaults, refer to `self._get_default_auxiliary_params`.
|
343
|
+
"""
|
344
|
+
hyperparameters_aux = self._user_params_aux or {}
|
345
|
+
self.params_aux = {**self._get_default_auxiliary_params(), **hyperparameters_aux}
|
197
346
|
|
347
|
+
def initialize(self) -> None:
|
198
348
|
if not self._is_initialized:
|
199
349
|
self._init_params_aux()
|
200
350
|
self._init_params()
|
201
351
|
self._initialize_transforms()
|
202
352
|
self._is_initialized = True
|
203
353
|
|
204
|
-
# TODO: remove
|
205
|
-
kwargs.pop("feature_metadata", None)
|
206
|
-
kwargs.pop("num_classes", None)
|
207
|
-
|
208
|
-
return kwargs
|
209
|
-
|
210
354
|
def _initialize_transforms(self) -> None:
|
211
355
|
self.target_scaler = self._create_target_scaler()
|
212
356
|
self.covariate_scaler = self._create_covariate_scaler()
|
213
357
|
self.covariate_regressor = self._create_covariate_regressor()
|
214
358
|
|
359
|
+
def _get_model_params(self) -> dict:
|
360
|
+
return self.params.copy()
|
361
|
+
|
215
362
|
def get_params(self) -> dict:
|
216
363
|
# TODO: do not extract to AbstractModel if this is only used for getting a
|
217
364
|
# prototype of the object for HPO.
|
@@ -222,7 +369,6 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
222
369
|
return dict(
|
223
370
|
path=self.path_root,
|
224
371
|
name=self.name,
|
225
|
-
problem_type=self.problem_type,
|
226
372
|
eval_metric=self.eval_metric,
|
227
373
|
hyperparameters=hyperparameters,
|
228
374
|
freq=self.freq,
|
@@ -232,6 +378,19 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
232
378
|
target=self.target,
|
233
379
|
)
|
234
380
|
|
381
|
+
@classmethod
|
382
|
+
def load_info(cls, path: str, load_model_if_required: bool = True) -> dict:
|
383
|
+
# TODO: remove?
|
384
|
+
load_path = os.path.join(path, cls.model_info_name)
|
385
|
+
try:
|
386
|
+
return load_pkl.load(path=load_path)
|
387
|
+
except:
|
388
|
+
if load_model_if_required:
|
389
|
+
model = cls.load(path=path, reset_paths=True)
|
390
|
+
return model.get_info()
|
391
|
+
else:
|
392
|
+
raise
|
393
|
+
|
235
394
|
def get_info(self) -> dict:
|
236
395
|
"""
|
237
396
|
Returns a dictionary of numerous fields describing the model.
|
@@ -256,6 +415,7 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
256
415
|
train_data: TimeSeriesDataFrame,
|
257
416
|
val_data: Optional[TimeSeriesDataFrame] = None,
|
258
417
|
time_limit: Optional[float] = None,
|
418
|
+
verbosity: int = 2,
|
259
419
|
**kwargs,
|
260
420
|
) -> Self:
|
261
421
|
"""Fit timeseries model.
|
@@ -291,9 +451,8 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
291
451
|
model: AbstractTimeSeriesModel
|
292
452
|
The fitted model object
|
293
453
|
"""
|
294
|
-
# TODO: align method signature in new AbstractModel as fit(*args, **kwargs)
|
295
454
|
start_time = time.monotonic()
|
296
|
-
self.initialize(
|
455
|
+
self.initialize()
|
297
456
|
|
298
457
|
if self.target_scaler is not None:
|
299
458
|
train_data = self.target_scaler.fit_transform(train_data)
|
@@ -308,7 +467,7 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
308
467
|
self.covariate_regressor.fit(
|
309
468
|
train_data,
|
310
469
|
time_limit=covariate_regressor_time_limit,
|
311
|
-
verbosity=
|
470
|
+
verbosity=verbosity,
|
312
471
|
)
|
313
472
|
|
314
473
|
if self._get_tags()["can_use_train_data"]:
|
@@ -334,15 +493,12 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
334
493
|
)
|
335
494
|
raise TimeLimitExceeded
|
336
495
|
|
337
|
-
# TODO: disentangle fit_resources computation and validation from tabular logic
|
338
|
-
kwargs = self._preprocess_fit_resources(**kwargs)
|
339
|
-
self.validate_fit_resources(**kwargs)
|
340
|
-
|
341
496
|
self._fit(
|
342
497
|
train_data=train_data,
|
343
498
|
val_data=val_data,
|
344
499
|
time_limit=time_limit,
|
345
|
-
|
500
|
+
verbosity=verbosity,
|
501
|
+
**(self._get_system_resources() | kwargs),
|
346
502
|
)
|
347
503
|
|
348
504
|
return self
|
@@ -369,6 +525,32 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
369
525
|
|
370
526
|
return time_limit
|
371
527
|
|
528
|
+
def _fit( # type: ignore
|
529
|
+
self,
|
530
|
+
train_data: TimeSeriesDataFrame,
|
531
|
+
val_data: Optional[TimeSeriesDataFrame] = None,
|
532
|
+
time_limit: Optional[float] = None,
|
533
|
+
num_cpus: Optional[int] = None,
|
534
|
+
num_gpus: Optional[int] = None,
|
535
|
+
verbosity: int = 2,
|
536
|
+
**kwargs,
|
537
|
+
) -> None:
|
538
|
+
"""Private method for `fit`. See `fit` for documentation of arguments. Apart from
|
539
|
+
the model training logic, `fit` additionally implements other logic such as keeping
|
540
|
+
track of the time limit, etc.
|
541
|
+
"""
|
542
|
+
# TODO: Make the models respect `num_cpus` and `num_gpus` parameters
|
543
|
+
raise NotImplementedError
|
544
|
+
|
545
|
+
# TODO: perform this check inside fit() ?
|
546
|
+
def _check_fit_params(self):
|
547
|
+
# gracefully handle hyperparameter specifications if they are provided to fit instead
|
548
|
+
if any(isinstance(v, space.Space) for v in self.params.values()):
|
549
|
+
raise ValueError(
|
550
|
+
"Hyperparameter spaces provided to `fit`. Please provide concrete values "
|
551
|
+
"as hyperparameters when initializing or use `hyperparameter_tune` instead."
|
552
|
+
)
|
553
|
+
|
372
554
|
@property
|
373
555
|
def allowed_hyperparameters(self) -> List[str]:
|
374
556
|
"""List of hyperparameters allowed by the model."""
|
@@ -425,33 +607,6 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
425
607
|
else:
|
426
608
|
return None
|
427
609
|
|
428
|
-
def _fit( # type: ignore
|
429
|
-
self,
|
430
|
-
train_data: TimeSeriesDataFrame,
|
431
|
-
val_data: Optional[TimeSeriesDataFrame] = None,
|
432
|
-
time_limit: Optional[float] = None,
|
433
|
-
num_cpus: Optional[int] = None,
|
434
|
-
num_gpus: Optional[int] = None,
|
435
|
-
verbosity: int = 2,
|
436
|
-
**kwargs,
|
437
|
-
) -> None:
|
438
|
-
"""Private method for `fit`. See `fit` for documentation of arguments. Apart from
|
439
|
-
the model training logic, `fit` additionally implements other logic such as keeping
|
440
|
-
track of the time limit, etc.
|
441
|
-
"""
|
442
|
-
# TODO: will not be extracted to new AbstractModel
|
443
|
-
|
444
|
-
# TODO: Make the models respect `num_cpus` and `num_gpus` parameters
|
445
|
-
raise NotImplementedError
|
446
|
-
|
447
|
-
def _check_fit_params(self):
|
448
|
-
# gracefully handle hyperparameter specifications if they are provided to fit instead
|
449
|
-
if any(isinstance(v, space.Space) for v in self.params.values()):
|
450
|
-
raise ValueError(
|
451
|
-
"Hyperparameter spaces provided to `fit`. Please provide concrete values "
|
452
|
-
"as hyperparameters when initializing or use `hyperparameter_tune` instead."
|
453
|
-
)
|
454
|
-
|
455
610
|
def predict( # type: ignore
|
456
611
|
self,
|
457
612
|
data: Union[TimeSeriesDataFrame, Dict[str, Optional[TimeSeriesDataFrame]]],
|
@@ -480,8 +635,6 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
480
635
|
data is given as a separate forecast item in the dictionary, keyed by the `item_id`s
|
481
636
|
of input items.
|
482
637
|
"""
|
483
|
-
# TODO: align method signature in new AbstractModel as predict(*args, **kwargs)
|
484
|
-
|
485
638
|
# TODO: the method signature is not aligned with the model interface in general as it allows dict
|
486
639
|
assert isinstance(data, TimeSeriesDataFrame)
|
487
640
|
|
@@ -617,56 +770,40 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
617
770
|
def _is_gpu_available(self) -> bool:
|
618
771
|
return False
|
619
772
|
|
773
|
+
@staticmethod
|
774
|
+
def _get_system_resources() -> Dict[str, Any]:
|
775
|
+
resource_manager = get_resource_manager()
|
776
|
+
system_num_cpus = resource_manager.get_cpu_count()
|
777
|
+
system_num_gpus = resource_manager.get_gpu_count()
|
778
|
+
return {
|
779
|
+
"num_cpus": system_num_cpus,
|
780
|
+
"num_gpus": system_num_gpus,
|
781
|
+
}
|
782
|
+
|
620
783
|
def hyperparameter_tune(
|
621
784
|
self,
|
785
|
+
train_data: TimeSeriesDataFrame,
|
786
|
+
val_data: Optional[TimeSeriesDataFrame],
|
787
|
+
val_splitter: Any = None,
|
788
|
+
default_num_trials: Optional[int] = 1,
|
789
|
+
refit_every_n_windows: Optional[int] = 1,
|
622
790
|
hyperparameter_tune_kwargs: Union[str, dict] = "auto",
|
623
|
-
hpo_executor: Optional[HpoExecutor] = None,
|
624
791
|
time_limit: Optional[float] = None,
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
default_num_trials = kwargs.pop("default_num_trials", None)
|
630
|
-
hpo_executor.initialize(
|
631
|
-
hyperparameter_tune_kwargs, default_num_trials=default_num_trials, time_limit=time_limit
|
632
|
-
)
|
633
|
-
|
634
|
-
kwargs = self.initialize(time_limit=time_limit, **kwargs)
|
635
|
-
|
636
|
-
kwargs = self._preprocess_fit_resources(
|
637
|
-
parallel_hpo=hpo_executor.executor_type == "ray", silent=True, **kwargs
|
792
|
+
) -> Tuple[Dict[str, Any], Any]:
|
793
|
+
hpo_executor = self._get_default_hpo_executor()
|
794
|
+
hpo_executor.initialize(
|
795
|
+
hyperparameter_tune_kwargs, default_num_trials=default_num_trials, time_limit=time_limit
|
638
796
|
)
|
639
|
-
self.validate_fit_resources(**kwargs)
|
640
797
|
|
641
|
-
|
642
|
-
# used in trials, which results in unintended setting of num_gpus=0. We override this
|
643
|
-
# logic here, and set to minimum num_gpus to 1 if it is set to 0 when GPUs are available
|
644
|
-
kwargs["num_gpus"] = 0 if not self._is_gpu_available() else max(kwargs.get("num_gpus", 1), 1)
|
798
|
+
self.initialize()
|
645
799
|
|
646
800
|
# we use k_fold=1 to circumvent autogluon.core logic to manage resources during parallelization
|
647
801
|
# of different folds
|
648
|
-
|
802
|
+
# FIXME: we pass in self which currently does not inherit from AbstractModel
|
803
|
+
hpo_executor.register_resources(self, k_fold=1, **self._get_system_resources()) # type: ignore
|
649
804
|
|
650
|
-
# TODO: Clean up call to _hyperparameter_tune
|
651
|
-
return self._hyperparameter_tune(hpo_executor=hpo_executor, **kwargs) # type: ignore
|
652
|
-
|
653
|
-
def persist(self) -> Self:
|
654
|
-
"""Ask the model to persist its assets in memory, i.e., to predict with low latency. In practice
|
655
|
-
this is used for pretrained models that have to lazy-load model parameters to device memory at
|
656
|
-
prediction time.
|
657
|
-
"""
|
658
|
-
return self
|
659
|
-
|
660
|
-
def _hyperparameter_tune( # type: ignore
|
661
|
-
self,
|
662
|
-
train_data: TimeSeriesDataFrame,
|
663
|
-
val_data: TimeSeriesDataFrame,
|
664
|
-
hpo_executor: HpoExecutor,
|
665
|
-
**kwargs,
|
666
|
-
):
|
667
|
-
# TODO: do not extract to new AbstractModel
|
668
805
|
time_start = time.time()
|
669
|
-
logger.debug(f"\tStarting
|
806
|
+
logger.debug(f"\tStarting hyperparameter tuning for {self.name}")
|
670
807
|
search_space = self._get_search_space()
|
671
808
|
|
672
809
|
try:
|
@@ -674,35 +811,22 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
674
811
|
except EmptySearchSpace:
|
675
812
|
return skip_hpo(self, train_data, val_data, time_limit=hpo_executor.time_limit)
|
676
813
|
|
677
|
-
self.
|
678
|
-
directory = self.path
|
679
|
-
dataset_train_filename = "dataset_train.pkl"
|
680
|
-
train_path = os.path.join(self.path, dataset_train_filename)
|
681
|
-
save_pkl.save(path=train_path, object=train_data)
|
682
|
-
|
683
|
-
dataset_val_filename = "dataset_val.pkl"
|
684
|
-
val_path = os.path.join(self.path, dataset_val_filename)
|
685
|
-
save_pkl.save(path=val_path, object=val_data)
|
814
|
+
train_path, val_path = self._save_with_data(train_data, val_data)
|
686
815
|
|
687
|
-
fit_kwargs = dict(
|
688
|
-
val_splitter=kwargs.get("val_splitter"),
|
689
|
-
refit_every_n_windows=kwargs.get("refit_every_n_windows", 1),
|
690
|
-
)
|
691
816
|
train_fn_kwargs = self._get_hpo_train_fn_kwargs(
|
692
817
|
model_cls=self.__class__,
|
693
818
|
init_params=self.get_params(),
|
694
819
|
time_start=time_start,
|
695
820
|
time_limit=hpo_executor.time_limit,
|
696
|
-
fit_kwargs=
|
821
|
+
fit_kwargs=dict(
|
822
|
+
val_splitter=val_splitter,
|
823
|
+
refit_every_n_windows=refit_every_n_windows,
|
824
|
+
),
|
697
825
|
train_path=train_path,
|
698
826
|
val_path=val_path,
|
699
827
|
hpo_executor=hpo_executor,
|
700
828
|
)
|
701
829
|
|
702
|
-
model_estimate_memory_usage = None
|
703
|
-
if self.estimate_memory_usage is not None:
|
704
|
-
model_estimate_memory_usage = self.estimate_memory_usage(**kwargs)
|
705
|
-
|
706
830
|
minimum_resources = self.get_minimum_resources(is_gpu_available=self._is_gpu_available())
|
707
831
|
hpo_context = disable_stdout if isinstance(hpo_executor, RayHpoExecutor) else nullcontext
|
708
832
|
|
@@ -718,10 +842,10 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
718
842
|
hpo_executor.execute(
|
719
843
|
model_trial=model_trial,
|
720
844
|
train_fn_kwargs=train_fn_kwargs,
|
721
|
-
directory=
|
845
|
+
directory=self.path,
|
722
846
|
minimum_cpu_per_trial=minimum_cpu_per_trial,
|
723
847
|
minimum_gpu_per_trial=minimum_resources.get("num_gpus", 0),
|
724
|
-
model_estimate_memory_usage=
|
848
|
+
model_estimate_memory_usage=None,
|
725
849
|
adapter_type="timeseries",
|
726
850
|
)
|
727
851
|
|
@@ -734,6 +858,53 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
734
858
|
|
735
859
|
return hpo_models, analysis
|
736
860
|
|
861
|
+
@property
|
862
|
+
def is_ensemble(self) -> bool:
|
863
|
+
"""Return True if the model is an ensemble model or a container of multiple models."""
|
864
|
+
return self._get_model_base() is self
|
865
|
+
|
866
|
+
def _get_default_hpo_executor(self) -> HpoExecutor:
|
867
|
+
backend = (
|
868
|
+
self._get_model_base()._get_hpo_backend()
|
869
|
+
) # If ensemble, will use the base model to determine backend
|
870
|
+
if backend == RAY_BACKEND:
|
871
|
+
try:
|
872
|
+
try_import_ray()
|
873
|
+
except Exception as e:
|
874
|
+
warning_msg = f"Will use custom hpo logic because ray import failed. Reason: {str(e)}"
|
875
|
+
dup_filter.attach_filter_targets(warning_msg)
|
876
|
+
logger.warning(warning_msg)
|
877
|
+
backend = CUSTOM_BACKEND
|
878
|
+
hpo_executor = HpoExecutorFactory.get_hpo_executor(backend)() # type: ignore
|
879
|
+
return hpo_executor
|
880
|
+
|
881
|
+
def _get_model_base(self) -> AbstractTimeSeriesModel:
|
882
|
+
return self
|
883
|
+
|
884
|
+
def _get_hpo_backend(self) -> str:
|
885
|
+
"""Choose which backend("ray" or "custom") to use for hpo"""
|
886
|
+
if DistributedContext.is_distributed_mode():
|
887
|
+
return RAY_BACKEND
|
888
|
+
return CUSTOM_BACKEND
|
889
|
+
|
890
|
+
def _get_search_space(self):
|
891
|
+
"""Sets up default search space for HPO. Each hyperparameter which user did not specify is converted from
|
892
|
+
default fixed value to default search space.
|
893
|
+
"""
|
894
|
+
params = self.params.copy()
|
895
|
+
return params
|
896
|
+
|
897
|
+
def _save_with_data(self, train_data, val_data):
|
898
|
+
self.set_contexts(os.path.abspath(self.path))
|
899
|
+
dataset_train_filename = "dataset_train.pkl"
|
900
|
+
train_path = os.path.join(self.path, dataset_train_filename)
|
901
|
+
save_pkl.save(path=train_path, object=train_data)
|
902
|
+
|
903
|
+
dataset_val_filename = "dataset_val.pkl"
|
904
|
+
val_path = os.path.join(self.path, dataset_val_filename)
|
905
|
+
save_pkl.save(path=val_path, object=val_data)
|
906
|
+
return train_path, val_path
|
907
|
+
|
737
908
|
def preprocess( # type: ignore
|
738
909
|
self,
|
739
910
|
data: TimeSeriesDataFrame,
|
@@ -745,9 +916,12 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
745
916
|
# TODO: move to new AbstractModel
|
746
917
|
return data, known_covariates
|
747
918
|
|
748
|
-
def
|
749
|
-
|
750
|
-
|
919
|
+
def persist(self) -> Self:
|
920
|
+
"""Ask the model to persist its assets in memory, i.e., to predict with low latency. In practice
|
921
|
+
this is used for pretrained models that have to lazy-load model parameters to device memory at
|
922
|
+
prediction time.
|
923
|
+
"""
|
924
|
+
return self
|
751
925
|
|
752
926
|
def convert_to_refit_full_via_copy(self) -> Self:
|
753
927
|
# save the model as a new model on disk
|
@@ -764,6 +938,33 @@ class AbstractTimeSeriesModel(AbstractModel):
|
|
764
938
|
|
765
939
|
return refit_model
|
766
940
|
|
941
|
+
def convert_to_refit_full_template(self):
|
942
|
+
"""
|
943
|
+
After calling this function, returned model should be able to be fit without X_val, y_val using the iterations trained by the original model.
|
944
|
+
|
945
|
+
Increase max_memory_usage_ratio by 25% to reduce the chance that the refit model will trigger NotEnoughMemoryError and skip training.
|
946
|
+
This can happen without the 25% increase since the refit model generally will use more training data and thus require more memory.
|
947
|
+
"""
|
948
|
+
params = copy.deepcopy(self.get_params())
|
949
|
+
|
950
|
+
if "hyperparameters" not in params:
|
951
|
+
params["hyperparameters"] = dict()
|
952
|
+
|
953
|
+
if AG_ARGS_FIT not in params["hyperparameters"]:
|
954
|
+
params["hyperparameters"][AG_ARGS_FIT] = dict()
|
955
|
+
|
956
|
+
# TODO: remove
|
957
|
+
# Increase memory limit by 25% to avoid memory restrictions during fit
|
958
|
+
params["hyperparameters"][AG_ARGS_FIT]["max_memory_usage_ratio"] = (
|
959
|
+
params["hyperparameters"][AG_ARGS_FIT].get("max_memory_usage_ratio", 1.0) * 1.25
|
960
|
+
)
|
961
|
+
|
962
|
+
params["hyperparameters"].update(self.params_trained)
|
963
|
+
params["name"] = params["name"] + REFIT_FULL_SUFFIX
|
964
|
+
template = self.__class__(**params)
|
965
|
+
|
966
|
+
return template
|
967
|
+
|
767
968
|
def get_user_params(self) -> dict:
|
768
969
|
"""Used to access user-specified parameters for the model before initialization."""
|
769
970
|
if self._user_params is None:
|
@@ -17,9 +17,9 @@ fugue>=0.9.0
|
|
17
17
|
tqdm<5,>=4.38
|
18
18
|
orjson~=3.9
|
19
19
|
tensorboard<3,>=2.9
|
20
|
-
autogluon.core[raytune]==1.2.
|
21
|
-
autogluon.common==1.2.
|
22
|
-
autogluon.tabular[catboost,lightgbm,xgboost]==1.2.
|
20
|
+
autogluon.core[raytune]==1.2.1b20250221
|
21
|
+
autogluon.common==1.2.1b20250221
|
22
|
+
autogluon.tabular[catboost,lightgbm,xgboost]==1.2.1b20250221
|
23
23
|
|
24
24
|
[all]
|
25
25
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|