autogluon.tabular 1.4.1b20250909__py3-none-any.whl → 1.4.1b20251006__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of autogluon.tabular might be problematic. Click here for more details.
- autogluon/tabular/models/catboost/catboost_model.py +2 -0
- autogluon/tabular/models/knn/knn_model.py +6 -2
- autogluon/tabular/models/lgb/lgb_model.py +54 -15
- autogluon/tabular/models/lr/lr_model.py +4 -0
- autogluon/tabular/models/rf/rf_model.py +4 -0
- autogluon/tabular/models/xgboost/xgboost_model.py +7 -1
- autogluon/tabular/version.py +1 -1
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/METADATA +25 -25
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/RECORD +16 -16
- /autogluon.tabular-1.4.1b20250909-py3.9-nspkg.pth → /autogluon.tabular-1.4.1b20251006-py3.9-nspkg.pth +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/LICENSE +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/NOTICE +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/WHEEL +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/namespace_packages.txt +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/top_level.txt +0 -0
- {autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/zip-safe +0 -0
|
@@ -314,6 +314,8 @@ class CatBoostModel(AbstractModel):
|
|
|
314
314
|
max_memory_iters = math.floor(available_mem * max_memory_proportion / mem_usage_per_iter)
|
|
315
315
|
|
|
316
316
|
final_iters = min(default_iters, min(max_memory_iters, estimated_iters_in_time))
|
|
317
|
+
if final_iters < 1:
|
|
318
|
+
raise TimeLimitExceeded
|
|
317
319
|
return final_iters
|
|
318
320
|
|
|
319
321
|
def _predict_proba(self, X, **kwargs):
|
|
@@ -255,9 +255,13 @@ class KNNModel(AbstractModel):
|
|
|
255
255
|
self._X_unused_index = [i for i in range(num_rows_max) if i not in idx]
|
|
256
256
|
return self.model
|
|
257
257
|
|
|
258
|
-
def _get_maximum_resources(self) ->
|
|
258
|
+
def _get_maximum_resources(self) -> dict[str, int | float]:
|
|
259
259
|
# use at most 32 cpus to avoid OpenBLAS error: https://github.com/autogluon/autogluon/issues/1020
|
|
260
|
-
|
|
260
|
+
# no GPU support
|
|
261
|
+
return {
|
|
262
|
+
"num_cpus": 32,
|
|
263
|
+
"num_gpus": 0,
|
|
264
|
+
}
|
|
261
265
|
|
|
262
266
|
def _get_default_resources(self):
|
|
263
267
|
# use at most 32 cpus to avoid OpenBLAS error: https://github.com/autogluon/autogluon/issues/1020
|
|
@@ -168,7 +168,7 @@ class LGBModel(AbstractModel):
|
|
|
168
168
|
# Before enabling GPU, we should add code to detect that GPU-enabled version is installed and that a valid GPU exists.
|
|
169
169
|
# GPU training heavily alters accuracy, often in a negative manner. We will have to be careful about when to use GPU.
|
|
170
170
|
params["device"] = "gpu"
|
|
171
|
-
logger.log(20, f"\
|
|
171
|
+
logger.log(20, f"\tWarning: Training LightGBM with GPU. This may negatively impact model quality compared to CPU training.")
|
|
172
172
|
logger.log(15, f"\tFitting {num_boost_round} rounds... Hyperparameters: {params}")
|
|
173
173
|
|
|
174
174
|
if "num_threads" not in params:
|
|
@@ -305,16 +305,28 @@ class LGBModel(AbstractModel):
|
|
|
305
305
|
try:
|
|
306
306
|
self.model = train_lgb_model(early_stopping_callback_kwargs=early_stopping_callback_kwargs, **train_params)
|
|
307
307
|
except LightGBMError:
|
|
308
|
-
if train_params["params"].get("device", "cpu")
|
|
308
|
+
if train_params["params"].get("device", "cpu") not in ["gpu", "cuda"]:
|
|
309
309
|
raise
|
|
310
310
|
else:
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
311
|
+
if train_params["params"]["device"] == "gpu":
|
|
312
|
+
logger.warning(
|
|
313
|
+
"Warning: GPU mode might not be installed for LightGBM, "
|
|
314
|
+
"GPU training raised an exception. Falling back to CPU training..."
|
|
315
|
+
"Refer to LightGBM GPU documentation: "
|
|
316
|
+
"https://github.com/Microsoft/LightGBM/tree/master/python-package#build-gpu-version"
|
|
317
|
+
"One possible method is:"
|
|
318
|
+
"\tpip uninstall lightgbm -y"
|
|
319
|
+
"\tpip install lightgbm --install-option=--gpu"
|
|
320
|
+
)
|
|
321
|
+
elif train_params["params"]["device"] == "cuda":
|
|
322
|
+
# Current blocker for using CUDA over GPU: https://github.com/microsoft/LightGBM/issues/6828
|
|
323
|
+
# Note that device="cuda" works if AutoGluon (and therefore LightGBM) is installed via conda.
|
|
324
|
+
logger.warning(
|
|
325
|
+
"Warning: CUDA mode might not be installed for LightGBM, "
|
|
326
|
+
"CUDA training raised an exception. Falling back to CPU training..."
|
|
327
|
+
"Refer to LightGBM CUDA documentation: "
|
|
328
|
+
"https://github.com/Microsoft/LightGBM/tree/master/python-package#build-cuda-version"
|
|
329
|
+
)
|
|
318
330
|
train_params["params"]["device"] = "cpu"
|
|
319
331
|
self.model = train_lgb_model(early_stopping_callback_kwargs=early_stopping_callback_kwargs, **train_params)
|
|
320
332
|
retrain = False
|
|
@@ -515,17 +527,44 @@ class LGBModel(AbstractModel):
|
|
|
515
527
|
default_auxiliary_params.update(extra_auxiliary_params)
|
|
516
528
|
return default_auxiliary_params
|
|
517
529
|
|
|
518
|
-
|
|
530
|
+
@staticmethod
|
|
531
|
+
def _is_gpu_lgbm_installed():
|
|
519
532
|
# Taken from https://github.com/microsoft/LightGBM/issues/3939
|
|
520
533
|
try_import_lightgbm()
|
|
521
534
|
import lightgbm
|
|
522
535
|
|
|
536
|
+
rng = np.random.RandomState(42)
|
|
537
|
+
data = rng.rand(25, 2)
|
|
538
|
+
label = rng.randint(2, size=25)
|
|
539
|
+
|
|
540
|
+
try:
|
|
541
|
+
train_data = lightgbm.Dataset(data, label=label)
|
|
542
|
+
params = {
|
|
543
|
+
"device": "gpu",
|
|
544
|
+
"verbose": -1,
|
|
545
|
+
}
|
|
546
|
+
gbm = lightgbm.train(params, num_boost_round=10, train_set=train_data)
|
|
547
|
+
return True
|
|
548
|
+
except Exception as e:
|
|
549
|
+
return False
|
|
550
|
+
|
|
551
|
+
@staticmethod
|
|
552
|
+
def _is_cuda_lgbm_installed():
|
|
553
|
+
# Taken from https://github.com/microsoft/LightGBM/issues/3939
|
|
554
|
+
try_import_lightgbm()
|
|
555
|
+
import lightgbm
|
|
556
|
+
|
|
557
|
+
rng = np.random.RandomState(42)
|
|
558
|
+
data = rng.rand(25, 2)
|
|
559
|
+
label = rng.randint(2, size=25)
|
|
560
|
+
|
|
523
561
|
try:
|
|
524
|
-
data = np.random.rand(50, 2)
|
|
525
|
-
label = np.random.randint(2, size=50)
|
|
526
562
|
train_data = lightgbm.Dataset(data, label=label)
|
|
527
|
-
params = {
|
|
528
|
-
|
|
563
|
+
params = {
|
|
564
|
+
"device": "cuda",
|
|
565
|
+
"verbose": -1,
|
|
566
|
+
}
|
|
567
|
+
gbm = lightgbm.train(params, num_boost_round=10, train_set=train_data)
|
|
529
568
|
return True
|
|
530
569
|
except Exception as e:
|
|
531
570
|
return False
|
|
@@ -534,7 +573,7 @@ class LGBModel(AbstractModel):
|
|
|
534
573
|
minimum_resources = {
|
|
535
574
|
"num_cpus": 1,
|
|
536
575
|
}
|
|
537
|
-
if is_gpu_available
|
|
576
|
+
if is_gpu_available:
|
|
538
577
|
minimum_resources["num_gpus"] = 0.5
|
|
539
578
|
return minimum_resources
|
|
540
579
|
|
|
@@ -322,6 +322,10 @@ class LinearModel(AbstractModel):
|
|
|
322
322
|
) -> int:
|
|
323
323
|
return 4 * get_approximate_df_mem_usage(X).sum()
|
|
324
324
|
|
|
325
|
+
def _get_maximum_resources(self) -> dict[str, int | float]:
|
|
326
|
+
# no GPU support
|
|
327
|
+
return {"num_gpus": 0}
|
|
328
|
+
|
|
325
329
|
@classmethod
|
|
326
330
|
def supported_problem_types(cls) -> list[str] | None:
|
|
327
331
|
return ["binary", "multiclass", "regression"]
|
|
@@ -370,6 +370,10 @@ class RFModel(AbstractModel):
|
|
|
370
370
|
|
|
371
371
|
return self._convert_proba_to_unified_form(y_oof_pred_proba)
|
|
372
372
|
|
|
373
|
+
def _get_maximum_resources(self) -> dict[str, int | float]:
|
|
374
|
+
# no GPU support
|
|
375
|
+
return {"num_gpus": 0}
|
|
376
|
+
|
|
373
377
|
def _get_default_auxiliary_params(self) -> dict:
|
|
374
378
|
default_auxiliary_params = super()._get_default_auxiliary_params()
|
|
375
379
|
extra_auxiliary_params = dict(
|
|
@@ -186,12 +186,18 @@ class XGBoostModel(AbstractModel):
|
|
|
186
186
|
from xgboost import XGBClassifier, XGBRegressor
|
|
187
187
|
|
|
188
188
|
model_type = XGBClassifier if self.problem_type in PROBLEM_TYPES_CLASSIFICATION else XGBRegressor
|
|
189
|
-
|
|
189
|
+
|
|
190
190
|
import warnings
|
|
191
191
|
|
|
192
192
|
with warnings.catch_warnings():
|
|
193
193
|
# FIXME: v1.1: Upgrade XGBoost to 2.0.1+ to avoid deprecation warnings from Pandas 2.1+ during XGBoost fit.
|
|
194
194
|
warnings.simplefilter(action="ignore", category=FutureWarning)
|
|
195
|
+
if params.get("device", "cpu") == "cuda:0":
|
|
196
|
+
# verbosity=0 to hide UserWarning: Falling back to prediction using DMatrix due to mismatched devices.
|
|
197
|
+
# TODO: Find a way to hide this warning without setting verbosity=0
|
|
198
|
+
# ref: https://github.com/dmlc/xgboost/issues/9791
|
|
199
|
+
params["verbosity"] = 0
|
|
200
|
+
self.model = model_type(**params)
|
|
195
201
|
self.model.fit(X=X, y=y, eval_set=eval_set, verbose=False, sample_weight=sample_weight)
|
|
196
202
|
|
|
197
203
|
if generate_curves:
|
autogluon/tabular/version.py
CHANGED
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: autogluon.tabular
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1b20251006
|
|
4
4
|
Summary: Fast and Accurate ML in 3 Lines of Code
|
|
5
5
|
Home-page: https://github.com/autogluon/autogluon
|
|
6
6
|
Author: AutoGluon Community
|
|
@@ -41,23 +41,23 @@ Requires-Dist: scipy<1.17,>=1.5.4
|
|
|
41
41
|
Requires-Dist: pandas<2.4.0,>=2.0.0
|
|
42
42
|
Requires-Dist: scikit-learn<1.8.0,>=1.4.0
|
|
43
43
|
Requires-Dist: networkx<4,>=3.0
|
|
44
|
-
Requires-Dist: autogluon.core==1.4.
|
|
45
|
-
Requires-Dist: autogluon.features==1.4.
|
|
44
|
+
Requires-Dist: autogluon.core==1.4.1b20251006
|
|
45
|
+
Requires-Dist: autogluon.features==1.4.1b20251006
|
|
46
46
|
Provides-Extra: all
|
|
47
|
-
Requires-Dist:
|
|
48
|
-
Requires-Dist:
|
|
49
|
-
Requires-Dist: huggingface-hub[torch]; extra == "all"
|
|
50
|
-
Requires-Dist: spacy<3.9; extra == "all"
|
|
47
|
+
Requires-Dist: autogluon.core[all]==1.4.1b20251006; extra == "all"
|
|
48
|
+
Requires-Dist: numpy<2.3.0,>=1.25; extra == "all"
|
|
51
49
|
Requires-Dist: lightgbm<4.7,>=4.0; extra == "all"
|
|
52
|
-
Requires-Dist: einx; extra == "all"
|
|
53
|
-
Requires-Dist: xgboost<3.1,>=2.0; extra == "all"
|
|
54
|
-
Requires-Dist: omegaconf; extra == "all"
|
|
55
|
-
Requires-Dist: autogluon.core[all]==1.4.1b20250909; extra == "all"
|
|
56
50
|
Requires-Dist: catboost<1.3,>=1.2; extra == "all"
|
|
57
|
-
Requires-Dist:
|
|
51
|
+
Requires-Dist: loguru; extra == "all"
|
|
52
|
+
Requires-Dist: fastai<2.9,>=2.3.1; extra == "all"
|
|
58
53
|
Requires-Dist: torch<2.8,>=2.6; extra == "all"
|
|
54
|
+
Requires-Dist: xgboost<3.1,>=2.0; extra == "all"
|
|
55
|
+
Requires-Dist: omegaconf; extra == "all"
|
|
56
|
+
Requires-Dist: spacy<3.9; extra == "all"
|
|
57
|
+
Requires-Dist: transformers; extra == "all"
|
|
58
|
+
Requires-Dist: einx; extra == "all"
|
|
59
59
|
Requires-Dist: einops<0.9,>=0.7; extra == "all"
|
|
60
|
-
Requires-Dist:
|
|
60
|
+
Requires-Dist: huggingface-hub[torch]; extra == "all"
|
|
61
61
|
Requires-Dist: blis<1.2.1,>=0.7.0; (platform_system == "Windows" and python_version == "3.9") and extra == "all"
|
|
62
62
|
Provides-Extra: catboost
|
|
63
63
|
Requires-Dist: numpy<2.3.0,>=1.25; extra == "catboost"
|
|
@@ -82,7 +82,7 @@ Requires-Dist: transformers; extra == "mitra"
|
|
|
82
82
|
Requires-Dist: huggingface-hub[torch]; extra == "mitra"
|
|
83
83
|
Requires-Dist: einops<0.9,>=0.7; extra == "mitra"
|
|
84
84
|
Provides-Extra: ray
|
|
85
|
-
Requires-Dist: autogluon.core[all]==1.4.
|
|
85
|
+
Requires-Dist: autogluon.core[all]==1.4.1b20251006; extra == "ray"
|
|
86
86
|
Provides-Extra: realmlp
|
|
87
87
|
Requires-Dist: pytabkit<1.7,>=1.6; extra == "realmlp"
|
|
88
88
|
Provides-Extra: skex
|
|
@@ -94,24 +94,24 @@ Requires-Dist: onnxruntime-gpu<1.20.0,>=1.17.0; extra == "skl2onnx"
|
|
|
94
94
|
Requires-Dist: onnx<1.18.0,>=1.13.0; platform_system != "Windows" and extra == "skl2onnx"
|
|
95
95
|
Requires-Dist: onnx<1.16.2,>=1.13.0; platform_system == "Windows" and extra == "skl2onnx"
|
|
96
96
|
Provides-Extra: tabarena
|
|
97
|
-
Requires-Dist:
|
|
98
|
-
Requires-Dist: xgboost<3.1,>=2.0; extra == "tabarena"
|
|
99
|
-
Requires-Dist: pytabkit<1.7,>=1.6; extra == "tabarena"
|
|
100
|
-
Requires-Dist: numpy<2.3.0,>=1.25; extra == "tabarena"
|
|
101
|
-
Requires-Dist: einx; extra == "tabarena"
|
|
102
|
-
Requires-Dist: autogluon.core[all]==1.4.1b20250909; extra == "tabarena"
|
|
97
|
+
Requires-Dist: autogluon.core[all]==1.4.1b20251006; extra == "tabarena"
|
|
103
98
|
Requires-Dist: lightgbm<4.7,>=4.0; extra == "tabarena"
|
|
104
|
-
Requires-Dist:
|
|
99
|
+
Requires-Dist: pytabkit<1.7,>=1.6; extra == "tabarena"
|
|
100
|
+
Requires-Dist: xgboost<3.1,>=2.0; extra == "tabarena"
|
|
105
101
|
Requires-Dist: omegaconf; extra == "tabarena"
|
|
106
|
-
Requires-Dist:
|
|
107
|
-
Requires-Dist:
|
|
108
|
-
Requires-Dist: huggingface-hub[torch]; extra == "tabarena"
|
|
102
|
+
Requires-Dist: spacy<3.9; extra == "tabarena"
|
|
103
|
+
Requires-Dist: interpret-core<0.8,>=0.7.2; extra == "tabarena"
|
|
109
104
|
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tabarena"
|
|
105
|
+
Requires-Dist: tabpfn<2.2,>=2.0.9; extra == "tabarena"
|
|
106
|
+
Requires-Dist: einx; extra == "tabarena"
|
|
107
|
+
Requires-Dist: numpy<2.3.0,>=1.25; extra == "tabarena"
|
|
110
108
|
Requires-Dist: catboost<1.3,>=1.2; extra == "tabarena"
|
|
111
109
|
Requires-Dist: torch<2.8,>=2.6; extra == "tabarena"
|
|
110
|
+
Requires-Dist: transformers; extra == "tabarena"
|
|
112
111
|
Requires-Dist: einops<0.9,>=0.7; extra == "tabarena"
|
|
113
|
-
Requires-Dist: interpret-core<0.8,>=0.7.2; extra == "tabarena"
|
|
114
112
|
Requires-Dist: loguru; extra == "tabarena"
|
|
113
|
+
Requires-Dist: fastai<2.9,>=2.3.1; extra == "tabarena"
|
|
114
|
+
Requires-Dist: huggingface-hub[torch]; extra == "tabarena"
|
|
115
115
|
Requires-Dist: blis<1.2.1,>=0.7.0; (platform_system == "Windows" and python_version == "3.9") and extra == "tabarena"
|
|
116
116
|
Provides-Extra: tabicl
|
|
117
117
|
Requires-Dist: tabicl<0.2,>=0.1.3; extra == "tabicl"
|
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/RECORD
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
autogluon.tabular-1.4.
|
|
1
|
+
autogluon.tabular-1.4.1b20251006-py3.9-nspkg.pth,sha256=cQGwpuGPqg1GXscIwt-7PmME1OnSpD-7ixkikJ31WAY,554
|
|
2
2
|
autogluon/tabular/__init__.py,sha256=2OXpJCvENRHubBTYNIPpHX93WWuFZzsJBtTZbNVHVas,400
|
|
3
|
-
autogluon/tabular/version.py,sha256=
|
|
3
|
+
autogluon/tabular/version.py,sha256=vVZUVsC-_j3dHtfP4Mqpe1c8X8CyPCQYUAA3dkds0UQ,91
|
|
4
4
|
autogluon/tabular/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
autogluon/tabular/configs/config_helper.py,sha256=Rby5gRhuY5IlZWdKbtsmzbSt948B97qxwQ2f1MbH_38,21070
|
|
6
6
|
autogluon/tabular/configs/feature_generator_presets.py,sha256=EV5Ym8VW15q92MwOUpTi7wZFS2QooM51fLg3RdUsn-M,1223
|
|
@@ -27,7 +27,7 @@ autogluon/tabular/models/automm/automm_model.py,sha256=MoydDuPEd5atbUPlVDzWLTKLB
|
|
|
27
27
|
autogluon/tabular/models/automm/ft_transformer.py,sha256=X-IEi5uKme7SoRcHnPjGTByzrjCB85I7RpB0hS36TLQ,3897
|
|
28
28
|
autogluon/tabular/models/catboost/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
29
|
autogluon/tabular/models/catboost/callbacks.py,sha256=QvyiynQoxjvfYaYwGNSF5N3gc_wqI9mi1nQiawL0EJ4,7194
|
|
30
|
-
autogluon/tabular/models/catboost/catboost_model.py,sha256=
|
|
30
|
+
autogluon/tabular/models/catboost/catboost_model.py,sha256=xDL5x4YG7BI79LbfCVdJz1IGLbMWdgNtnpL5TzCYHaA,18214
|
|
31
31
|
autogluon/tabular/models/catboost/catboost_softclass_utils.py,sha256=UiW0SUb3hFueW5qYtQn6Sbk7Wg7BWN4jqKWeFtbMvgU,3919
|
|
32
32
|
autogluon/tabular/models/catboost/catboost_utils.py,sha256=zJMIsbgyW_JH0eULhUeu_TWR0Qfmf34CnED7c7NvXBw,3899
|
|
33
33
|
autogluon/tabular/models/catboost/hyperparameters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -57,18 +57,18 @@ autogluon/tabular/models/imodels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
|
|
|
57
57
|
autogluon/tabular/models/imodels/imodels_models.py,sha256=89uQwbRAtqcUvPwYsKnER8SUMIbwkGZUd9spoG_mP10,4878
|
|
58
58
|
autogluon/tabular/models/knn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
59
59
|
autogluon/tabular/models/knn/_knn_loo_variants.py,sha256=-n2znYS7OBA0bZvtei6JZiEMRWp4GX-Qp64uheaHyhQ,4562
|
|
60
|
-
autogluon/tabular/models/knn/knn_model.py,sha256=
|
|
60
|
+
autogluon/tabular/models/knn/knn_model.py,sha256=I7wPRy38oD03f_3KN7Q_CyoJJucDPrPQyJqjgovmx8Q,14061
|
|
61
61
|
autogluon/tabular/models/knn/knn_rapids_model.py,sha256=0FFApNZFH8nyrDqlBSUV7jO-2fLe0-h_UHp1GsyQJ8E,1550
|
|
62
62
|
autogluon/tabular/models/knn/knn_utils.py,sha256=XU1cxVXp1BAoQnja2_KmSIn9_q9gZkjAya7-9b0uStk,7455
|
|
63
63
|
autogluon/tabular/models/lgb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
64
64
|
autogluon/tabular/models/lgb/callbacks.py,sha256=KJB1KmebA88qHT206KSfvm5NamGuv5lRzy7O9dOwW-M,12243
|
|
65
|
-
autogluon/tabular/models/lgb/lgb_model.py,sha256=
|
|
65
|
+
autogluon/tabular/models/lgb/lgb_model.py,sha256=UWkBeVkEvOXgKaZCRILyMyC8NfnA1UA3t-wuvWYy8pY,27677
|
|
66
66
|
autogluon/tabular/models/lgb/lgb_utils.py,sha256=jzTDTzP-z7gcBGZyy1_0YkyTOLbU5DLeRqtil4FCZPI,7382
|
|
67
67
|
autogluon/tabular/models/lgb/hyperparameters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
68
|
autogluon/tabular/models/lgb/hyperparameters/parameters.py,sha256=LLEQ-Ns3HElWBsFJx3ogRV7L6qw_nXlcl7EyO0C0fVQ,1336
|
|
69
69
|
autogluon/tabular/models/lgb/hyperparameters/searchspaces.py,sha256=tvNNR7niWz_B-PndYQXb6vVNABxSfBYRHj6ZVQJ1x2E,1930
|
|
70
70
|
autogluon/tabular/models/lr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
|
-
autogluon/tabular/models/lr/lr_model.py,sha256=
|
|
71
|
+
autogluon/tabular/models/lr/lr_model.py,sha256=o4DM6-EvFolh6QNmInWwn7_0h1itbTAT38o0cVBRmCc,15886
|
|
72
72
|
autogluon/tabular/models/lr/lr_preprocessing_utils.py,sha256=zkmVZtv05BQPDasVBz1J8LmXEfLgoggsv57s6cXuTMQ,1094
|
|
73
73
|
autogluon/tabular/models/lr/lr_rapids_model.py,sha256=XIB1KCPPfBZMxTRC3Wc1Dsl5NTMQSM_m8Uc2igyTLX8,3939
|
|
74
74
|
autogluon/tabular/models/lr/hyperparameters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -103,7 +103,7 @@ autogluon/tabular/models/mitra/_internal/utils/set_seed.py,sha256=UnXzYfhmfT_tNA
|
|
|
103
103
|
autogluon/tabular/models/realmlp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
104
104
|
autogluon/tabular/models/realmlp/realmlp_model.py,sha256=ASplFpuDmzm-PMjaG_V7swhAgcowr5qYZo8QcsHDltA,14740
|
|
105
105
|
autogluon/tabular/models/rf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
106
|
-
autogluon/tabular/models/rf/rf_model.py,sha256=
|
|
106
|
+
autogluon/tabular/models/rf/rf_model.py,sha256=c6Ec-qCxWN-6IlllBj2Cp0qV9m95B8Ai6Ft2MOpW2AM,21917
|
|
107
107
|
autogluon/tabular/models/rf/rf_quantile.py,sha256=2S8FE8po9lMnZaeKuVkzOUFOcdil46ZbFqm49OuvNZY,36460
|
|
108
108
|
autogluon/tabular/models/rf/rf_rapids_model.py,sha256=3s-8M11dzCl_2Lu5iB3H8YjHLgyP_SElrm_4w_HfmqY,2028
|
|
109
109
|
autogluon/tabular/models/rf/compilers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -170,7 +170,7 @@ autogluon/tabular/models/text_prediction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5
|
|
|
170
170
|
autogluon/tabular/models/text_prediction/text_prediction_v1_model.py,sha256=PBN7F98qgEAO6U76rV_hxZfAmKr_XpVKjElOdBvfX8c,1090
|
|
171
171
|
autogluon/tabular/models/xgboost/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
172
172
|
autogluon/tabular/models/xgboost/callbacks.py,sha256=PuRQUg3AEjgvFa-dpstRFoEVM9jHDe5W4XYSdDPRqoE,7009
|
|
173
|
-
autogluon/tabular/models/xgboost/xgboost_model.py,sha256=
|
|
173
|
+
autogluon/tabular/models/xgboost/xgboost_model.py,sha256=xzfvUKqmlv87n8I92D2QWH9EnCNQ3AKsOrlOEtrsdco,15796
|
|
174
174
|
autogluon/tabular/models/xgboost/xgboost_utils.py,sha256=FVqZ8h4JAe_pifSvNx83cLZHwsuzTXylrrcan07AoNo,5757
|
|
175
175
|
autogluon/tabular/models/xgboost/hyperparameters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
176
176
|
autogluon/tabular/models/xgboost/hyperparameters/parameters.py,sha256=ay6bVVpiPzftbtz6TTS76w7j4vjDjzHFpuf2Bjf6Zu4,1673
|
|
@@ -195,11 +195,11 @@ autogluon/tabular/trainer/model_presets/presets.py,sha256=hoWADaOG576Q_XLV1nY_ju
|
|
|
195
195
|
autogluon/tabular/trainer/model_presets/presets_distill.py,sha256=MnFC2GJc6RmDBNAGbsO2XMfo3PjR8cUrZoilWW8gTYQ,3295
|
|
196
196
|
autogluon/tabular/tuning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
197
197
|
autogluon/tabular/tuning/feature_pruner.py,sha256=9iNku8gVbYEkjuKlyITPJDicsNkoraaQOlINQq9iZlQ,6877
|
|
198
|
-
autogluon.tabular-1.4.
|
|
199
|
-
autogluon.tabular-1.4.
|
|
200
|
-
autogluon.tabular-1.4.
|
|
201
|
-
autogluon.tabular-1.4.
|
|
202
|
-
autogluon.tabular-1.4.
|
|
203
|
-
autogluon.tabular-1.4.
|
|
204
|
-
autogluon.tabular-1.4.
|
|
205
|
-
autogluon.tabular-1.4.
|
|
198
|
+
autogluon.tabular-1.4.1b20251006.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
|
|
199
|
+
autogluon.tabular-1.4.1b20251006.dist-info/METADATA,sha256=q0tY9w1D1oFzekDjODz7GsYkxnnfdYtc3KyYNAfTMzw,16451
|
|
200
|
+
autogluon.tabular-1.4.1b20251006.dist-info/NOTICE,sha256=7nPQuj8Kp-uXsU0S5so3-2dNU5EctS5hDXvvzzehd7E,114
|
|
201
|
+
autogluon.tabular-1.4.1b20251006.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
202
|
+
autogluon.tabular-1.4.1b20251006.dist-info/namespace_packages.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
203
|
+
autogluon.tabular-1.4.1b20251006.dist-info/top_level.txt,sha256=giERA4R78OkJf2ijn5slgjURlhRPzfLr7waIcGkzYAo,10
|
|
204
|
+
autogluon.tabular-1.4.1b20251006.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
205
|
+
autogluon.tabular-1.4.1b20251006.dist-info/RECORD,,
|
|
File without changes
|
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/LICENSE
RENAMED
|
File without changes
|
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/NOTICE
RENAMED
|
File without changes
|
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{autogluon.tabular-1.4.1b20250909.dist-info → autogluon.tabular-1.4.1b20251006.dist-info}/zip-safe
RENAMED
|
File without changes
|