autogluon.tabular 1.3.2b20250723__py3-none-any.whl → 1.4.0b20250724__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.tabular might be problematic. Click here for more details.
- autogluon/tabular/configs/hyperparameter_configs.py +2 -265
- autogluon/tabular/configs/presets_configs.py +47 -21
- autogluon/tabular/configs/zeroshot/zeroshot_portfolio_2023.py +0 -1
- autogluon/tabular/configs/zeroshot/zeroshot_portfolio_2025.py +309 -0
- autogluon/tabular/models/mitra/_internal/core/trainer_finetune.py +18 -6
- autogluon/tabular/models/mitra/_internal/data/dataset_finetune.py +8 -4
- autogluon/tabular/models/mitra/_internal/data/dataset_split.py +5 -1
- autogluon/tabular/models/mitra/_internal/models/tab2d.py +3 -0
- autogluon/tabular/models/mitra/mitra_model.py +72 -21
- autogluon/tabular/models/mitra/sklearn_interface.py +15 -13
- autogluon/tabular/models/tabicl/tabicl_model.py +3 -3
- autogluon/tabular/models/tabm/rtdl_num_embeddings.py +3 -0
- autogluon/tabular/models/tabm/tabm_reference.py +2 -0
- autogluon/tabular/models/tabpfnv2/tabpfnv2_model.py +15 -6
- autogluon/tabular/predictor/predictor.py +41 -1
- autogluon/tabular/trainer/abstract_trainer.py +2 -0
- autogluon/tabular/version.py +1 -1
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/METADATA +38 -16
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/RECORD +26 -25
- /autogluon.tabular-1.3.2b20250723-py3.9-nspkg.pth → /autogluon.tabular-1.4.0b20250724-py3.9-nspkg.pth +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/LICENSE +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/NOTICE +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/WHEEL +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/namespace_packages.txt +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/top_level.txt +0 -0
- {autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/zip-safe +0 -0
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
1
4
|
import time
|
|
2
5
|
from pathlib import Path
|
|
3
6
|
import contextlib
|
|
@@ -76,6 +79,7 @@ class MitraBase(BaseEstimator):
|
|
|
76
79
|
random_mirror_regression=RANDOM_MIRROR_REGRESSION,
|
|
77
80
|
random_mirror_x=RANDOM_MIRROR_X,
|
|
78
81
|
seed=SEED,
|
|
82
|
+
verbose=True,
|
|
79
83
|
):
|
|
80
84
|
"""
|
|
81
85
|
Initialize the base Mitra model.
|
|
@@ -114,8 +118,11 @@ class MitraBase(BaseEstimator):
|
|
|
114
118
|
self.trainers = []
|
|
115
119
|
self.train_time = 0
|
|
116
120
|
self.seed = seed
|
|
121
|
+
self.verbose = verbose
|
|
117
122
|
|
|
118
|
-
set_seed
|
|
123
|
+
# FIXME: set_seed was removed in v1.4 as quality and speed reduction was observed when setting seed.
|
|
124
|
+
# This should be investigated and fixed for v1.5
|
|
125
|
+
# set_seed(self.seed)
|
|
119
126
|
|
|
120
127
|
def _create_config(self, task, dim_output, time_limit=None):
|
|
121
128
|
cfg = ConfigRun(
|
|
@@ -183,6 +190,7 @@ class MitraBase(BaseEstimator):
|
|
|
183
190
|
"""Train the ensemble of models."""
|
|
184
191
|
|
|
185
192
|
cfg, Tab2D = self._create_config(task, dim_output, time_limit)
|
|
193
|
+
rng = np.random.RandomState(cfg.seed)
|
|
186
194
|
|
|
187
195
|
success = False
|
|
188
196
|
while not (success and cfg.hyperparams["max_samples_support"] > 0 and cfg.hyperparams["max_samples_query"] > 0):
|
|
@@ -217,7 +225,7 @@ class MitraBase(BaseEstimator):
|
|
|
217
225
|
path_to_weights=Path(self.state_dict),
|
|
218
226
|
device=self.device,
|
|
219
227
|
)
|
|
220
|
-
trainer = TrainerFinetune(cfg, model, n_classes=n_classes, device=self.device)
|
|
228
|
+
trainer = TrainerFinetune(cfg, model, n_classes=n_classes, device=self.device, rng=rng, verbose=self.verbose)
|
|
221
229
|
|
|
222
230
|
start_time = time.time()
|
|
223
231
|
trainer.train(X_train, y_train, X_valid, y_valid)
|
|
@@ -275,6 +283,7 @@ class MitraClassifier(MitraBase, ClassifierMixin):
|
|
|
275
283
|
random_mirror_regression=RANDOM_MIRROR_REGRESSION,
|
|
276
284
|
random_mirror_x=RANDOM_MIRROR_X,
|
|
277
285
|
seed=SEED,
|
|
286
|
+
verbose=True,
|
|
278
287
|
):
|
|
279
288
|
"""Initialize the classifier."""
|
|
280
289
|
super().__init__(
|
|
@@ -294,6 +303,7 @@ class MitraClassifier(MitraBase, ClassifierMixin):
|
|
|
294
303
|
random_mirror_regression=random_mirror_regression,
|
|
295
304
|
random_mirror_x=random_mirror_x,
|
|
296
305
|
seed=seed,
|
|
306
|
+
verbose=verbose,
|
|
297
307
|
)
|
|
298
308
|
self.task = 'classification'
|
|
299
309
|
|
|
@@ -403,6 +413,7 @@ class MitraRegressor(MitraBase, RegressorMixin):
|
|
|
403
413
|
random_mirror_regression=RANDOM_MIRROR_REGRESSION,
|
|
404
414
|
random_mirror_x=RANDOM_MIRROR_X,
|
|
405
415
|
seed=SEED,
|
|
416
|
+
verbose=True,
|
|
406
417
|
):
|
|
407
418
|
"""Initialize the regressor."""
|
|
408
419
|
super().__init__(
|
|
@@ -422,6 +433,7 @@ class MitraRegressor(MitraBase, RegressorMixin):
|
|
|
422
433
|
random_mirror_regression=random_mirror_regression,
|
|
423
434
|
random_mirror_x=random_mirror_x,
|
|
424
435
|
seed=seed,
|
|
436
|
+
verbose=verbose,
|
|
425
437
|
)
|
|
426
438
|
self.task = 'regression'
|
|
427
439
|
|
|
@@ -492,14 +504,4 @@ class MitraRegressor(MitraBase, RegressorMixin):
|
|
|
492
504
|
@contextlib.contextmanager
|
|
493
505
|
def mitra_deterministic_context():
|
|
494
506
|
"""Context manager to set deterministic settings only for Mitra operations."""
|
|
495
|
-
|
|
496
|
-
original_deterministic_algorithms_set = False
|
|
497
|
-
|
|
498
|
-
try:
|
|
499
|
-
torch.use_deterministic_algorithms(True)
|
|
500
|
-
original_deterministic_algorithms_set = True
|
|
501
|
-
yield
|
|
502
|
-
|
|
503
|
-
finally:
|
|
504
|
-
if original_deterministic_algorithms_set:
|
|
505
|
-
torch.use_deterministic_algorithms(False)
|
|
507
|
+
yield
|
|
@@ -98,8 +98,8 @@ class TabICLModel(AbstractModel):
|
|
|
98
98
|
default_auxiliary_params = super()._get_default_auxiliary_params()
|
|
99
99
|
default_auxiliary_params.update(
|
|
100
100
|
{
|
|
101
|
-
"max_rows":
|
|
102
|
-
"max_features":
|
|
101
|
+
"max_rows": 30000,
|
|
102
|
+
"max_features": 2000,
|
|
103
103
|
}
|
|
104
104
|
)
|
|
105
105
|
return default_auxiliary_params
|
|
@@ -147,7 +147,7 @@ class TabICLModel(AbstractModel):
|
|
|
147
147
|
model_mem_estimate *= 1.3 # add 30% buffer
|
|
148
148
|
|
|
149
149
|
# TODO: Observed memory spikes above expected values on large datasets, increasing mem estimate to compensate
|
|
150
|
-
model_mem_estimate *= 1.5
|
|
150
|
+
model_mem_estimate *= 2.0 # Note: 1.5 is not large enough, still gets OOM
|
|
151
151
|
|
|
152
152
|
mem_estimate = model_mem_estimate + dataset_size_mem_est + baseline_overhead_mem_est
|
|
153
153
|
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
# taken from https://github.com/yandex-research/rtdl-num-embeddings/blob/main/package/rtdl_num_embeddings.py
|
|
2
2
|
"""On Embeddings for Numerical Features in Tabular Deep Learning."""
|
|
3
3
|
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
4
6
|
__version__ = '0.0.12'
|
|
5
7
|
|
|
6
8
|
__all__ = [
|
|
@@ -12,6 +14,7 @@ __all__ = [
|
|
|
12
14
|
'compute_bins',
|
|
13
15
|
]
|
|
14
16
|
|
|
17
|
+
|
|
15
18
|
import math
|
|
16
19
|
import warnings
|
|
17
20
|
from typing import Any, Literal, Optional, Union
|
|
@@ -119,12 +119,14 @@ class TabPFNV2Model(AbstractModel):
|
|
|
119
119
|
super().__init__(**kwargs)
|
|
120
120
|
self._feature_generator = None
|
|
121
121
|
self._cat_features = None
|
|
122
|
+
self._cat_indices = None
|
|
122
123
|
|
|
123
124
|
def _preprocess(self, X: pd.DataFrame, is_train=False, **kwargs) -> pd.DataFrame:
|
|
124
125
|
X = super()._preprocess(X, **kwargs)
|
|
125
|
-
self._cat_indices = []
|
|
126
126
|
|
|
127
127
|
if is_train:
|
|
128
|
+
self._cat_indices = []
|
|
129
|
+
|
|
128
130
|
# X will be the training data.
|
|
129
131
|
self._feature_generator = LabelEncoderFeatureGenerator(verbosity=0)
|
|
130
132
|
self._feature_generator.fit(X=X)
|
|
@@ -136,10 +138,11 @@ class TabPFNV2Model(AbstractModel):
|
|
|
136
138
|
X=X
|
|
137
139
|
)
|
|
138
140
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
self._cat_features
|
|
142
|
-
|
|
141
|
+
if is_train:
|
|
142
|
+
# Detect/set cat features and indices
|
|
143
|
+
if self._cat_features is None:
|
|
144
|
+
self._cat_features = self._feature_generator.features_in[:]
|
|
145
|
+
self._cat_indices = [X.columns.get_loc(col) for col in self._cat_features]
|
|
143
146
|
|
|
144
147
|
return X
|
|
145
148
|
|
|
@@ -187,6 +190,12 @@ class TabPFNV2Model(AbstractModel):
|
|
|
187
190
|
# logs "Built with PriorLabs-TabPFN"
|
|
188
191
|
self._log_license(device=device)
|
|
189
192
|
|
|
193
|
+
if num_gpus == 0:
|
|
194
|
+
logger.log(
|
|
195
|
+
30,
|
|
196
|
+
f"\tWARNING: Running TabPFNv2 on CPU. This can be very slow. We recommend using a GPU instead."
|
|
197
|
+
)
|
|
198
|
+
|
|
190
199
|
X = self.preprocess(X, is_train=True)
|
|
191
200
|
|
|
192
201
|
hps = self._get_model_params()
|
|
@@ -366,7 +375,7 @@ class TabPFNV2Model(AbstractModel):
|
|
|
366
375
|
|
|
367
376
|
# Add some buffer to each term + 1 GB overhead to be safe
|
|
368
377
|
return int(
|
|
369
|
-
model_mem + 4 * X_mem +
|
|
378
|
+
model_mem + 4 * X_mem + 2 * activation_mem + baseline_overhead_mem_est
|
|
370
379
|
)
|
|
371
380
|
|
|
372
381
|
@classmethod
|
|
@@ -1068,7 +1068,7 @@ class TabularPredictor:
|
|
|
1068
1068
|
20,
|
|
1069
1069
|
"No presets specified! To achieve strong results with AutoGluon, it is recommended to use the available presets. Defaulting to `'medium'`...\n"
|
|
1070
1070
|
"\tRecommended Presets (For more details refer to https://auto.gluon.ai/stable/tutorials/tabular/tabular-essentials.html#presets):\n"
|
|
1071
|
-
"\tpresets='experimental' : New in v1.
|
|
1071
|
+
"\tpresets='experimental' : New in v1.4: Massively better than 'best' on datasets <10000 samples by using new models: TabPFNv2, TabICL, Mitra, and TabM. Absolute best accuracy. Requires a GPU. Recommended 64 GB CPU memory and 32+ GB GPU memory.\n"
|
|
1072
1072
|
"\tpresets='best' : Maximize accuracy. Recommended for most users. Use in competitions and benchmarks.\n"
|
|
1073
1073
|
"\tpresets='high' : Strong accuracy with fast inference speed.\n"
|
|
1074
1074
|
"\tpresets='good' : Good accuracy with very fast inference speed.\n"
|
|
@@ -1127,10 +1127,48 @@ class TabularPredictor:
|
|
|
1127
1127
|
)
|
|
1128
1128
|
infer_limit, infer_limit_batch_size = self._validate_infer_limit(infer_limit=infer_limit, infer_limit_batch_size=infer_limit_batch_size)
|
|
1129
1129
|
|
|
1130
|
+
# TODO: Temporary for v1.4. Make this more extensible for v1.5 by letting users make their own dynamic hyperparameters.
|
|
1131
|
+
dynamic_hyperparameters = kwargs["_experimental_dynamic_hyperparameters"]
|
|
1132
|
+
if dynamic_hyperparameters:
|
|
1133
|
+
logger.log(20, f"Experimental preset uses a dynamic portfolio based on dataset size...")
|
|
1134
|
+
assert hyperparameters is None, f"hyperparameters must be unspecified when `_experimental_dynamic_hyperparameters=True`."
|
|
1135
|
+
n_samples = len(train_data)
|
|
1136
|
+
if n_samples > 30000:
|
|
1137
|
+
data_size = "large"
|
|
1138
|
+
else:
|
|
1139
|
+
data_size = "small"
|
|
1140
|
+
assert data_size in ["large", "small"]
|
|
1141
|
+
if data_size == "large":
|
|
1142
|
+
logger.log(20, f"\tDetected data size: large (>30000 samples), using `zeroshot` portfolio (identical to 'best_quality' preset).")
|
|
1143
|
+
hyperparameters = "zeroshot"
|
|
1144
|
+
else:
|
|
1145
|
+
if "num_stack_levels" not in kwargs_orig:
|
|
1146
|
+
# disable stacking for tabfm portfolio
|
|
1147
|
+
num_stack_levels = 0
|
|
1148
|
+
kwargs["num_stack_levels"] = 0
|
|
1149
|
+
logger.log(
|
|
1150
|
+
20,
|
|
1151
|
+
f"\tDetected data size: small (<=30000 samples), using `zeroshot_2025_tabfm` portfolio."
|
|
1152
|
+
f"\n\t\tNote: `zeroshot_2025_tabfm` portfolio requires a CUDA compatible GPU for best performance."
|
|
1153
|
+
f"\n\t\tMake sure you have all the relevant dependencies installed: "
|
|
1154
|
+
f"`pip install autogluon.tabular[tabarena]`."
|
|
1155
|
+
f"\n\t\tIt is strongly recommended to use a machine with 64+ GB memory "
|
|
1156
|
+
f"and a CUDA compatible GPU with 32+ GB vRAM when using this preset. "
|
|
1157
|
+
f"\n\t\tThis portfolio will download foundation model weights from HuggingFace during training. "
|
|
1158
|
+
f"Ensure you have an internet connection or have pre-downloaded the weights to use these models."
|
|
1159
|
+
f"\n\t\tThis portfolio was meta-learned with TabArena: https://tabarena.ai"
|
|
1160
|
+
)
|
|
1161
|
+
hyperparameters = "zeroshot_2025_tabfm"
|
|
1162
|
+
|
|
1130
1163
|
if hyperparameters is None:
|
|
1131
1164
|
hyperparameters = "default"
|
|
1132
1165
|
if isinstance(hyperparameters, str):
|
|
1166
|
+
hyperparameters_str = hyperparameters
|
|
1133
1167
|
hyperparameters = get_hyperparameter_config(hyperparameters)
|
|
1168
|
+
logger.log(
|
|
1169
|
+
20,
|
|
1170
|
+
f"Using hyperparameters preset: hyperparameters='{hyperparameters_str}'",
|
|
1171
|
+
)
|
|
1134
1172
|
self._validate_hyperparameters(hyperparameters=hyperparameters)
|
|
1135
1173
|
self.fit_hyperparameters_ = hyperparameters
|
|
1136
1174
|
|
|
@@ -5042,6 +5080,8 @@ class TabularPredictor:
|
|
|
5042
5080
|
learning_curves=False,
|
|
5043
5081
|
test_data=None,
|
|
5044
5082
|
raise_on_model_failure=False,
|
|
5083
|
+
# experimental
|
|
5084
|
+
_experimental_dynamic_hyperparameters=False,
|
|
5045
5085
|
)
|
|
5046
5086
|
kwargs, ds_valid_keys = self._sanitize_dynamic_stacking_kwargs(kwargs)
|
|
5047
5087
|
kwargs = self._validate_fit_extra_kwargs(kwargs, extra_valid_keys=list(fit_kwargs_default.keys()) + ds_valid_keys)
|
|
@@ -2131,6 +2131,8 @@ class AbstractTabularTrainer(AbstractTrainer[AbstractModel]):
|
|
|
2131
2131
|
if isinstance(model, BaggedEnsembleModel) and not compute_score:
|
|
2132
2132
|
# Do not perform OOF predictions when we don't compute a score.
|
|
2133
2133
|
model_fit_kwargs["_skip_oof"] = True
|
|
2134
|
+
if not isinstance(model, BaggedEnsembleModel):
|
|
2135
|
+
model_fit_kwargs.setdefault("log_resources", True)
|
|
2134
2136
|
|
|
2135
2137
|
model_fit_kwargs = dict(
|
|
2136
2138
|
model=model,
|
autogluon/tabular/version.py
CHANGED
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: autogluon.tabular
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0b20250724
|
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
|
6
6
|
Author: AutoGluon Community
|
|
@@ -41,20 +41,22 @@ Requires-Dist: scipy<1.17,>=1.5.4
|
|
|
41
41
|
Requires-Dist: pandas<2.4.0,>=2.0.0
|
|
42
42
|
Requires-Dist: scikit-learn<1.8.0,>=1.4.0
|
|
43
43
|
Requires-Dist: networkx<4,>=3.0
|
|
44
|
-
Requires-Dist: autogluon.core==1.
|
|
45
|
-
Requires-Dist: autogluon.features==1.
|
|
44
|
+
Requires-Dist: autogluon.core==1.4.0b20250724
|
|
45
|
+
Requires-Dist: autogluon.features==1.4.0b20250724
|
|
46
46
|
Provides-Extra: all
|
|
47
|
+
Requires-Dist: transformers; extra == "all"
|
|
48
|
+
Requires-Dist: huggingface-hub[torch]; extra == "all"
|
|
49
|
+
Requires-Dist: numpy<2.3.0,>=1.25; extra == "all"
|
|
50
|
+
Requires-Dist: autogluon.core[all]==1.4.0b20250724; extra == "all"
|
|
47
51
|
Requires-Dist: spacy<3.9; extra == "all"
|
|
48
|
-
Requires-Dist:
|
|
52
|
+
Requires-Dist: fastai<2.9,>=2.3.1; extra == "all"
|
|
49
53
|
Requires-Dist: torch<2.8,>=2.2; extra == "all"
|
|
50
|
-
Requires-Dist: pytabkit<1.6,>=1.5; extra == "all"
|
|
51
54
|
Requires-Dist: catboost<1.3,>=1.2; extra == "all"
|
|
52
|
-
Requires-Dist: fastai<2.9,>=2.3.1; extra == "all"
|
|
53
|
-
Requires-Dist: huggingface-hub[torch]; extra == "all"
|
|
54
|
-
Requires-Dist: einops<0.9,>=0.7; extra == "all"
|
|
55
|
-
Requires-Dist: numpy<2.3.0,>=1.25; extra == "all"
|
|
56
|
-
Requires-Dist: lightgbm<4.7,>=4.0; extra == "all"
|
|
57
55
|
Requires-Dist: xgboost<3.1,>=2.0; extra == "all"
|
|
56
|
+
Requires-Dist: loguru; extra == "all"
|
|
57
|
+
Requires-Dist: einx; extra == "all"
|
|
58
|
+
Requires-Dist: lightgbm<4.7,>=4.0; extra == "all"
|
|
59
|
+
Requires-Dist: omegaconf; extra == "all"
|
|
58
60
|
Requires-Dist: blis<1.2.1,>=0.7.0; (platform_system == "Windows" and python_version == "3.9") and extra == "all"
|
|
59
61
|
Provides-Extra: catboost
|
|
60
62
|
Requires-Dist: numpy<2.3.0,>=1.25; extra == "catboost"
|
|
@@ -72,9 +74,11 @@ Provides-Extra: mitra
|
|
|
72
74
|
Requires-Dist: loguru; extra == "mitra"
|
|
73
75
|
Requires-Dist: einx; extra == "mitra"
|
|
74
76
|
Requires-Dist: omegaconf; extra == "mitra"
|
|
77
|
+
Requires-Dist: torch<2.8,>=2.2; extra == "mitra"
|
|
75
78
|
Requires-Dist: transformers; extra == "mitra"
|
|
79
|
+
Requires-Dist: huggingface-hub[torch]; extra == "mitra"
|
|
76
80
|
Provides-Extra: ray
|
|
77
|
-
Requires-Dist: autogluon.core[all]==1.
|
|
81
|
+
Requires-Dist: autogluon.core[all]==1.4.0b20250724; extra == "ray"
|
|
78
82
|
Provides-Extra: realmlp
|
|
79
83
|
Requires-Dist: pytabkit<1.6,>=1.5; extra == "realmlp"
|
|
80
84
|
Provides-Extra: skex
|
|
@@ -85,6 +89,24 @@ Requires-Dist: onnxruntime<1.20.0,>=1.17.0; extra == "skl2onnx"
|
|
|
85
89
|
Requires-Dist: onnxruntime-gpu<1.20.0,>=1.17.0; extra == "skl2onnx"
|
|
86
90
|
Requires-Dist: onnx<1.18.0,>=1.13.0; platform_system != "Windows" and extra == "skl2onnx"
|
|
87
91
|
Requires-Dist: onnx<1.16.2,>=1.13.0; platform_system == "Windows" and extra == "skl2onnx"
|
|
92
|
+
Provides-Extra: tabarena
|
|
93
|
+
Requires-Dist: tabpfn<2.2,>=2.0.9; extra == "tabarena"
|
|
94
|
+
Requires-Dist: transformers; extra == "tabarena"
|
|
95
|
+
Requires-Dist: huggingface-hub[torch]; extra == "tabarena"
|
|
96
|
+
Requires-Dist: pytabkit<1.6,>=1.5; extra == "tabarena"
|
|
97
|
+
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tabarena"
|
|
98
|
+
Requires-Dist: numpy<2.3.0,>=1.25; extra == "tabarena"
|
|
99
|
+
Requires-Dist: autogluon.core[all]==1.4.0b20250724; extra == "tabarena"
|
|
100
|
+
Requires-Dist: spacy<3.9; extra == "tabarena"
|
|
101
|
+
Requires-Dist: fastai<2.9,>=2.3.1; extra == "tabarena"
|
|
102
|
+
Requires-Dist: torch<2.8,>=2.2; extra == "tabarena"
|
|
103
|
+
Requires-Dist: catboost<1.3,>=1.2; extra == "tabarena"
|
|
104
|
+
Requires-Dist: xgboost<3.1,>=2.0; extra == "tabarena"
|
|
105
|
+
Requires-Dist: loguru; extra == "tabarena"
|
|
106
|
+
Requires-Dist: einx; extra == "tabarena"
|
|
107
|
+
Requires-Dist: lightgbm<4.7,>=4.0; extra == "tabarena"
|
|
108
|
+
Requires-Dist: omegaconf; extra == "tabarena"
|
|
109
|
+
Requires-Dist: blis<1.2.1,>=0.7.0; (platform_system == "Windows" and python_version == "3.9") and extra == "tabarena"
|
|
88
110
|
Provides-Extra: tabicl
|
|
89
111
|
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tabicl"
|
|
90
112
|
Provides-Extra: tabm
|
|
@@ -96,16 +118,16 @@ Requires-Dist: torch<2.8,>=2.2; extra == "tabpfnmix"
|
|
|
96
118
|
Requires-Dist: huggingface-hub[torch]; extra == "tabpfnmix"
|
|
97
119
|
Requires-Dist: einops<0.9,>=0.7; extra == "tabpfnmix"
|
|
98
120
|
Provides-Extra: tests
|
|
121
|
+
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tests"
|
|
99
122
|
Requires-Dist: tabpfn<2.2,>=2.0.9; extra == "tests"
|
|
123
|
+
Requires-Dist: pytabkit<1.6,>=1.5; extra == "tests"
|
|
124
|
+
Requires-Dist: torch<2.8,>=2.2; extra == "tests"
|
|
125
|
+
Requires-Dist: huggingface-hub[torch]; extra == "tests"
|
|
126
|
+
Requires-Dist: einops<0.9,>=0.7; extra == "tests"
|
|
100
127
|
Requires-Dist: imodels<2.1.0,>=1.3.10; extra == "tests"
|
|
101
128
|
Requires-Dist: skl2onnx<1.18.0,>=1.15.0; extra == "tests"
|
|
102
129
|
Requires-Dist: onnxruntime<1.20.0,>=1.17.0; extra == "tests"
|
|
103
130
|
Requires-Dist: onnxruntime-gpu<1.20.0,>=1.17.0; extra == "tests"
|
|
104
|
-
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tests"
|
|
105
|
-
Requires-Dist: loguru; extra == "tests"
|
|
106
|
-
Requires-Dist: einx; extra == "tests"
|
|
107
|
-
Requires-Dist: omegaconf; extra == "tests"
|
|
108
|
-
Requires-Dist: transformers; extra == "tests"
|
|
109
131
|
Requires-Dist: onnx<1.18.0,>=1.13.0; platform_system != "Windows" and extra == "tests"
|
|
110
132
|
Requires-Dist: onnx<1.16.2,>=1.13.0; platform_system == "Windows" and extra == "tests"
|
|
111
133
|
Provides-Extra: xgboost
|
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/RECORD
RENAMED
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
autogluon.tabular-1.
|
|
1
|
+
autogluon.tabular-1.4.0b20250724-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
|
2
2
|
autogluon/tabular/__init__.py,sha256=2OXpJCvENRHubBTYNIPpHX93WWuFZzsJBtTZbNVHVas,400
|
|
3
|
-
autogluon/tabular/version.py,sha256=
|
|
3
|
+
autogluon/tabular/version.py,sha256=sQbAMP9TVXNv7nE2ealK-zzy0cZUUxkxcLTpbykHhxU,91
|
|
4
4
|
autogluon/tabular/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
autogluon/tabular/configs/config_helper.py,sha256=JsdVGmpcYL88GPKBznPtqJ1sGaByOSvLn7KWU-HyVoQ,21085
|
|
6
6
|
autogluon/tabular/configs/feature_generator_presets.py,sha256=EV5Ym8VW15q92MwOUpTi7wZFS2QooM51fLg3RdUsn-M,1223
|
|
7
|
-
autogluon/tabular/configs/hyperparameter_configs.py,sha256=
|
|
8
|
-
autogluon/tabular/configs/presets_configs.py,sha256=
|
|
7
|
+
autogluon/tabular/configs/hyperparameter_configs.py,sha256=aQ1rrF8P0MX4Ic5M33O96JtKV-K7YpDrgJmWhYmEyug,6848
|
|
8
|
+
autogluon/tabular/configs/presets_configs.py,sha256=hIT9CgdIol5y-YLKrcupXWySP_P9HZp_ky_vXmFrFWk,7682
|
|
9
9
|
autogluon/tabular/configs/zeroshot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
autogluon/tabular/configs/zeroshot/zeroshot_portfolio_2023.py,sha256=
|
|
10
|
+
autogluon/tabular/configs/zeroshot/zeroshot_portfolio_2023.py,sha256=6yd84vPqOk-6sLCoM_e_PlphrR2NZUjliS7L1SMKMug,29777
|
|
11
|
+
autogluon/tabular/configs/zeroshot/zeroshot_portfolio_2025.py,sha256=1EH54KvJT35xNVegh1SuvBWt0Qx07vQUdHyc10TwaOI,11966
|
|
11
12
|
autogluon/tabular/experimental/__init__.py,sha256=PpkdMSv_pPZted1XRIuzcFWKjM-66VMUukTnCcoiW0s,100
|
|
12
13
|
autogluon/tabular/experimental/_scikit_mixin.py,sha256=cKeCmtURAXZnhQGrkCBw5rmACCQF7biAWTT3qX8bM2Q,2281
|
|
13
14
|
autogluon/tabular/experimental/_tabular_classifier.py,sha256=7lGoFdvkHiZS3VpcXo97q4ENV9qyIVDExlWkm0wzL3s,2527
|
|
@@ -68,8 +69,8 @@ autogluon/tabular/models/lr/hyperparameters/__init__.py,sha256=47DEQpj8HBSa-_TIm
|
|
|
68
69
|
autogluon/tabular/models/lr/hyperparameters/parameters.py,sha256=Hr5YC13zjbt3CfCbzGj8iXUIuDn-Q7FvDT2uSuiSVlM,1414
|
|
69
70
|
autogluon/tabular/models/lr/hyperparameters/searchspaces.py,sha256=Igywc-B6qJ9EBLdasrDhW-Ot5FGirIzbXLwv5HRe5Xo,276
|
|
70
71
|
autogluon/tabular/models/mitra/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
|
-
autogluon/tabular/models/mitra/mitra_model.py,sha256=
|
|
72
|
-
autogluon/tabular/models/mitra/sklearn_interface.py,sha256=
|
|
72
|
+
autogluon/tabular/models/mitra/mitra_model.py,sha256=ha-54y_LgEysUpcPr7dB76fETu9kEQ2oLUXteGYVVeE,11927
|
|
73
|
+
autogluon/tabular/models/mitra/sklearn_interface.py,sha256=Znwx1uMagauu1DwcutM_kgGY8maQrxOE0KsP1uS46qE,18751
|
|
73
74
|
autogluon/tabular/models/mitra/_internal/__init__.py,sha256=dN2dz1pGMgQTFiSf9oYbyq23iJUxV8QNlOX3qw3KUO4,35
|
|
74
75
|
autogluon/tabular/models/mitra/_internal/config/__init__.py,sha256=Exu_Sx6-K-D5peDQ_TibsjZpqAALs2-9IXfq8hu1mwU,40
|
|
75
76
|
autogluon/tabular/models/mitra/_internal/config/config_pretrain.py,sha256=CeaD96EcDX69LdcLTYGlFmYLdBNINEJXRMWmJ6LbhTg,6038
|
|
@@ -81,16 +82,16 @@ autogluon/tabular/models/mitra/_internal/core/get_loss.py,sha256=hv0t7zvyZ-DgA5P
|
|
|
81
82
|
autogluon/tabular/models/mitra/_internal/core/get_optimizer.py,sha256=UgGO6lduVZTKZmYAmE207o2Dqs4e3_hyzaoSOQ0iK6A,3412
|
|
82
83
|
autogluon/tabular/models/mitra/_internal/core/get_scheduler.py,sha256=2lzdAxDOYZNq76pmK-FjCOX5MX6cqUSMjqVu8BX9jfY,2238
|
|
83
84
|
autogluon/tabular/models/mitra/_internal/core/prediction_metrics.py,sha256=fai0VnDm0mNjJzx8e1JXdB77PKQsmfbtn8zybD9_qD0,4394
|
|
84
|
-
autogluon/tabular/models/mitra/_internal/core/trainer_finetune.py,sha256=
|
|
85
|
+
autogluon/tabular/models/mitra/_internal/core/trainer_finetune.py,sha256=tI8sN9mv3PtEBdmDxcBgzderZ7YQdtn6MxtOWAc8or8,17908
|
|
85
86
|
autogluon/tabular/models/mitra/_internal/data/__init__.py,sha256=u4ZTvTQNIHqqxilkVqTmYShI2jFMCOyMdv1GRExvtj0,42
|
|
86
87
|
autogluon/tabular/models/mitra/_internal/data/collator.py,sha256=o2F7ODs_eUnV947lCQTx9RugrANidCdiwnZWtdVNJnE,2300
|
|
87
|
-
autogluon/tabular/models/mitra/_internal/data/dataset_finetune.py,sha256=
|
|
88
|
-
autogluon/tabular/models/mitra/_internal/data/dataset_split.py,sha256=
|
|
88
|
+
autogluon/tabular/models/mitra/_internal/data/dataset_finetune.py,sha256=AYxyQ1NJZ3pAp6ny-Y_hqw_4VtyW5X1AABchf7pVsSM,4340
|
|
89
|
+
autogluon/tabular/models/mitra/_internal/data/dataset_split.py,sha256=0uvfyiKrzipde4ZcCDwTE1E3zHelE8xbuNvCeL38J5c,2033
|
|
89
90
|
autogluon/tabular/models/mitra/_internal/data/preprocessor.py,sha256=zx2pWrpDaGSSawPaj7ieRjFOtct_Fyh8LYjo_YtlNG0,13821
|
|
90
91
|
autogluon/tabular/models/mitra/_internal/models/__init__.py,sha256=K0vh5pyrntXp-o7gWNgQ0ZvDbxgeQuRgb6u8ecdjFhA,45
|
|
91
92
|
autogluon/tabular/models/mitra/_internal/models/base.py,sha256=PKpMPT5OT9JFnmYPnhzFUeZPwdNM1e-k97_gW8GZq0Y,468
|
|
92
93
|
autogluon/tabular/models/mitra/_internal/models/embedding.py,sha256=74O6cGWhUyHxg4-wiQwy4sPeDYQze2ekI9H5mLUtSLg,6223
|
|
93
|
-
autogluon/tabular/models/mitra/_internal/models/tab2d.py,sha256=
|
|
94
|
+
autogluon/tabular/models/mitra/_internal/models/tab2d.py,sha256=TorZsQR7LE5QRq2EAq1iT2asLuuAHpgy-PXXrTMxgSs,25743
|
|
94
95
|
autogluon/tabular/models/mitra/_internal/utils/__init__.py,sha256=0mhykAqjMmcEc8Y2od_DMPMk8f66LZHWM7qFdUrPddU,34
|
|
95
96
|
autogluon/tabular/models/mitra/_internal/utils/set_seed.py,sha256=UnXzYfhmfT_tNAofKtLkKpwB9b6HVf9cpI4mKvoBuNM,340
|
|
96
97
|
autogluon/tabular/models/realmlp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -103,12 +104,12 @@ autogluon/tabular/models/rf/compilers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
|
|
|
103
104
|
autogluon/tabular/models/rf/compilers/native.py,sha256=HhaqQRkVuf9UEEJPsHcdYCmuWBMYtyqRwwB_N2qxG2M,1313
|
|
104
105
|
autogluon/tabular/models/rf/compilers/onnx.py,sha256=pvaZWdl2JJaE2pFU0mFugzhnybePqe0x1-5oLOvogA0,4318
|
|
105
106
|
autogluon/tabular/models/tabicl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
106
|
-
autogluon/tabular/models/tabicl/tabicl_model.py,sha256=
|
|
107
|
+
autogluon/tabular/models/tabicl/tabicl_model.py,sha256=ovytUwFdqpVelEp1cDhHczWQkG72icAnuXTW6GdBi_8,6087
|
|
107
108
|
autogluon/tabular/models/tabm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
108
109
|
autogluon/tabular/models/tabm/_tabm_internal.py,sha256=fRQ-s5PN94kWqf3LRDen7su_fd-d332YKxdms30FoZM,21066
|
|
109
|
-
autogluon/tabular/models/tabm/rtdl_num_embeddings.py,sha256=
|
|
110
|
+
autogluon/tabular/models/tabm/rtdl_num_embeddings.py,sha256=XssNMaUM0E0G8Grzl_VkVsLt2FcMf3I4cplfvQdVum0,30156
|
|
110
111
|
autogluon/tabular/models/tabm/tabm_model.py,sha256=IQ4RHM1wnf9GHuEa1zDO_yWUPfmh5xUMEVtQ4EFeQRI,10152
|
|
111
|
-
autogluon/tabular/models/tabm/tabm_reference.py,sha256=
|
|
112
|
+
autogluon/tabular/models/tabm/tabm_reference.py,sha256=byyP6lcJjA4THbP1VDTgJkj62zyz2S3mEvxWB-kFROw,21944
|
|
112
113
|
autogluon/tabular/models/tabpfnmix/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
113
114
|
autogluon/tabular/models/tabpfnmix/tabpfnmix_model.py,sha256=7cLjAfstq6Xb-l2DxBdwtSAIanSJN2sMfKPtijDQwXo,16193
|
|
114
115
|
autogluon/tabular/models/tabpfnmix/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -136,7 +137,7 @@ autogluon/tabular/models/tabpfnmix/_internal/models/foundation/foundation_transf
|
|
|
136
137
|
autogluon/tabular/models/tabpfnmix/_internal/results/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
137
138
|
autogluon/tabular/models/tabpfnmix/_internal/results/prediction_metrics.py,sha256=1tRPHyViSSLJ7BkQJi6wai-PwXJ56od86Dy1WWKWZq4,1743
|
|
138
139
|
autogluon/tabular/models/tabpfnv2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
139
|
-
autogluon/tabular/models/tabpfnv2/tabpfnv2_model.py,sha256=
|
|
140
|
+
autogluon/tabular/models/tabpfnv2/tabpfnv2_model.py,sha256=e3M_JM92ebbOAk4raZSgrWxaVkGlbbMedlHJVmyXfJ4,14570
|
|
140
141
|
autogluon/tabular/models/tabpfnv2/rfpfn/__init__.py,sha256=yE5XAhGxKEFV0JcelZ_JTQZIWGlVEVUQ9a-lxcH_Esc,585
|
|
141
142
|
autogluon/tabular/models/tabpfnv2/rfpfn/configs.py,sha256=lzBY9kKOeBZACVrtRDPHF4ATs9g1rxyNnIs2CMjE20c,1175
|
|
142
143
|
autogluon/tabular/models/tabpfnv2/rfpfn/scoring_utils.py,sha256=uvHsfvnnMdg4tP3_7zAilktkw7nr65LaqfVKXabXAow,6785
|
|
@@ -172,7 +173,7 @@ autogluon/tabular/models/xt/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
172
173
|
autogluon/tabular/models/xt/xt_model.py,sha256=qOHJ5h1lHI7uYJfbl0BWm-29R3MNp2WeZB9ptcq5Xis,1003
|
|
173
174
|
autogluon/tabular/predictor/__init__.py,sha256=zCMgjxQlWpDWnr1l1xjBCiK3rWC3N3RoD8UXBnazT74,107
|
|
174
175
|
autogluon/tabular/predictor/interpretable_predictor.py,sha256=5UeKgnMFsfY65tiO3kxfHBPr03lyswLrgdtjPhI0Y7Q,6934
|
|
175
|
-
autogluon/tabular/predictor/predictor.py,sha256=
|
|
176
|
+
autogluon/tabular/predictor/predictor.py,sha256=E4Z_2V0T3tBNuH8dxCm1pDpGkyGDs7rEgDE1SKOy0ow,359075
|
|
176
177
|
autogluon/tabular/registry/__init__.py,sha256=vZpzX4Xve7bfA9crt5LxjgQv9PPfxbi1E1U6Im0Y_xU,93
|
|
177
178
|
autogluon/tabular/registry/_ag_model_registry.py,sha256=Aa-o_KZZiroPBpvZozIBXOlWYvQJN-MVsl_Gl66gkE8,1550
|
|
178
179
|
autogluon/tabular/registry/_model_registry.py,sha256=Rl8Q7BLzaif4hxNxJF20xGE02vrWwh2ZuUaTmA-UJnE,6824
|
|
@@ -181,18 +182,18 @@ autogluon/tabular/testing/fit_helper.py,sha256=0eTvPtqM8k8hlOUIHQiwTzik4juTjHQt1
|
|
|
181
182
|
autogluon/tabular/testing/generate_datasets.py,sha256=nvcAmI-tOh5fwx_ZTx2aRa1n7CsXb96wbR-xqNy1C5w,3884
|
|
182
183
|
autogluon/tabular/testing/model_fit_helper.py,sha256=ZjWpw2nyeFnsrccmkfQtx3qbA8HJx282XX2rwdS-LIs,3808
|
|
183
184
|
autogluon/tabular/trainer/__init__.py,sha256=PW_PGL-tWoQzx3ES2S53bQEZOtsRWTYiM9QdOqsk0dI,38
|
|
184
|
-
autogluon/tabular/trainer/abstract_trainer.py,sha256=
|
|
185
|
+
autogluon/tabular/trainer/abstract_trainer.py,sha256=9FiBqOV2h8era6KfydFSqhTlh7RnHkvlvzqsZuij7nE,232527
|
|
185
186
|
autogluon/tabular/trainer/auto_trainer.py,sha256=ZQgQKFT1iHzzun5o5ojdq5pSQmr9ctTkNhe2r9OPOr0,8731
|
|
186
187
|
autogluon/tabular/trainer/model_presets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
187
188
|
autogluon/tabular/trainer/model_presets/presets.py,sha256=hoWADaOG576Q_XLV1nY_ju1OWi7EJwHay4jjljqt_E0,16546
|
|
188
189
|
autogluon/tabular/trainer/model_presets/presets_distill.py,sha256=MnFC2GJc6RmDBNAGbsO2XMfo3PjR8cUrZoilWW8gTYQ,3295
|
|
189
190
|
autogluon/tabular/tuning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
190
191
|
autogluon/tabular/tuning/feature_pruner.py,sha256=9iNku8gVbYEkjuKlyITPJDicsNkoraaQOlINQq9iZlQ,6877
|
|
191
|
-
autogluon.tabular-1.
|
|
192
|
-
autogluon.tabular-1.
|
|
193
|
-
autogluon.tabular-1.
|
|
194
|
-
autogluon.tabular-1.
|
|
195
|
-
autogluon.tabular-1.
|
|
196
|
-
autogluon.tabular-1.
|
|
197
|
-
autogluon.tabular-1.
|
|
198
|
-
autogluon.tabular-1.
|
|
192
|
+
autogluon.tabular-1.4.0b20250724.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
|
193
|
+
autogluon.tabular-1.4.0b20250724.dist-info/METADATA,sha256=N8GqPqMCIoGwOwmffPt1scTY3FPCZdP3f3uds0Nm8PI,16071
|
|
194
|
+
autogluon.tabular-1.4.0b20250724.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
|
195
|
+
autogluon.tabular-1.4.0b20250724.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
196
|
+
autogluon.tabular-1.4.0b20250724.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
197
|
+
autogluon.tabular-1.4.0b20250724.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
198
|
+
autogluon.tabular-1.4.0b20250724.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
199
|
+
autogluon.tabular-1.4.0b20250724.dist-info/RECORD,,
|
|
File without changes
|
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/LICENSE
RENAMED
|
File without changes
|
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/NOTICE
RENAMED
|
File without changes
|
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{autogluon.tabular-1.3.2b20250723.dist-info → autogluon.tabular-1.4.0b20250724.dist-info}/zip-safe
RENAMED
|
File without changes
|