unifiedbooster 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unifiedbooster/gbdt.py +3 -1
- unifiedbooster/gbdt_classification.py +36 -36
- unifiedbooster/gbdt_regression.py +36 -36
- unifiedbooster/gpoptimization.py +12 -6
- {unifiedbooster-0.4.2.dist-info → unifiedbooster-0.5.0.dist-info}/METADATA +1 -1
- unifiedbooster-0.5.0.dist-info/RECORD +11 -0
- unifiedbooster-0.4.2.dist-info/RECORD +0 -11
- {unifiedbooster-0.4.2.dist-info → unifiedbooster-0.5.0.dist-info}/LICENSE +0 -0
- {unifiedbooster-0.4.2.dist-info → unifiedbooster-0.5.0.dist-info}/WHEEL +0 -0
- {unifiedbooster-0.4.2.dist-info → unifiedbooster-0.5.0.dist-info}/entry_points.txt +0 -0
- {unifiedbooster-0.4.2.dist-info → unifiedbooster-0.5.0.dist-info}/top_level.txt +0 -0
unifiedbooster/gbdt.py
CHANGED
|
@@ -90,7 +90,9 @@ class GBDT(BaseEstimator):
|
|
|
90
90
|
"depth": self.max_depth,
|
|
91
91
|
"verbose": self.verbose,
|
|
92
92
|
"random_seed": self.seed,
|
|
93
|
-
"
|
|
93
|
+
"boosting_type": "Plain",
|
|
94
|
+
"leaf_estimation_iterations": 1,
|
|
95
|
+
"bootstrap_type": "Bernoulli",
|
|
94
96
|
**kwargs,
|
|
95
97
|
}
|
|
96
98
|
elif self.model_type == "gradientboosting":
|
|
@@ -51,42 +51,42 @@ class GBDTClassifier(GBDT, ClassifierMixin):
|
|
|
51
51
|
|
|
52
52
|
Examples:
|
|
53
53
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
54
|
+
```python
|
|
55
|
+
import unifiedbooster as ub
|
|
56
|
+
from sklearn.datasets import load_iris
|
|
57
|
+
from sklearn.model_selection import train_test_split
|
|
58
|
+
from sklearn.metrics import accuracy_score
|
|
59
|
+
|
|
60
|
+
# Load dataset
|
|
61
|
+
iris = load_iris()
|
|
62
|
+
X, y = iris.data, iris.target
|
|
63
|
+
|
|
64
|
+
# Split dataset into training and testing sets
|
|
65
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
|
66
|
+
|
|
67
|
+
# Initialize the unified regressor (example with XGBoost)
|
|
68
|
+
regressor1 = ub.GBDTClassifier(model_type='xgboost')
|
|
69
|
+
#regressor2 = ub.GBDTClassifier(model_type='catboost')
|
|
70
|
+
regressor3 = ub.GBDTClassifier(model_type='lightgbm')
|
|
71
|
+
|
|
72
|
+
# Fit the model
|
|
73
|
+
regressor1.fit(X_train, y_train)
|
|
74
|
+
#regressor2.fit(X_train, y_train)
|
|
75
|
+
regressor3.fit(X_train, y_train)
|
|
76
|
+
|
|
77
|
+
# Predict on the test set
|
|
78
|
+
y_pred1 = regressor1.predict(X_test)
|
|
79
|
+
#y_pred2 = regressor2.predict(X_test)
|
|
80
|
+
y_pred3 = regressor3.predict(X_test)
|
|
81
|
+
|
|
82
|
+
# Evaluate the model
|
|
83
|
+
accuracy1 = accuracy_score(y_test, y_pred1)
|
|
84
|
+
#accuracy2 = accuracy_score(y_test, y_pred2)
|
|
85
|
+
accuracy3 = accuracy_score(y_test, y_pred3)
|
|
86
|
+
print(f"Classification Accuracy xgboost: {accuracy1:.2f}")
|
|
87
|
+
#print(f"Classification Accuracy catboost: {accuracy2:.2f}")
|
|
88
|
+
print(f"Classification Accuracy lightgbm: {accuracy3:.2f}")
|
|
89
|
+
```
|
|
90
90
|
"""
|
|
91
91
|
|
|
92
92
|
def __init__(
|
|
@@ -51,42 +51,42 @@ class GBDTRegressor(GBDT, RegressorMixin):
|
|
|
51
51
|
|
|
52
52
|
Examples:
|
|
53
53
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
54
|
+
```python
|
|
55
|
+
import unifiedbooster as ub
|
|
56
|
+
from sklearn.datasets import fetch_california_housing
|
|
57
|
+
from sklearn.model_selection import train_test_split
|
|
58
|
+
from sklearn.metrics import mean_squared_error
|
|
59
|
+
|
|
60
|
+
# Load dataset
|
|
61
|
+
housing = fetch_california_housing()
|
|
62
|
+
X, y = housing.data, housing.target
|
|
63
|
+
|
|
64
|
+
# Split dataset into training and testing sets
|
|
65
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
|
66
|
+
|
|
67
|
+
# Initialize the unified regressor (example with XGBoost)
|
|
68
|
+
regressor1 = ub.GBDTRegressor(model_type='xgboost')
|
|
69
|
+
#regressor2 = ub.GBDTRegressor(model_type='catboost')
|
|
70
|
+
regressor3 = ub.GBDTRegressor(model_type='lightgbm')
|
|
71
|
+
|
|
72
|
+
# Fit the model
|
|
73
|
+
regressor1.fit(X_train, y_train)
|
|
74
|
+
#regressor2.fit(X_train, y_train)
|
|
75
|
+
regressor3.fit(X_train, y_train)
|
|
76
|
+
|
|
77
|
+
# Predict on the test set
|
|
78
|
+
y_pred1 = regressor1.predict(X_test)
|
|
79
|
+
#y_pred2 = regressor2.predict(X_test)
|
|
80
|
+
y_pred3 = regressor3.predict(X_test)
|
|
81
|
+
|
|
82
|
+
# Evaluate the model
|
|
83
|
+
mse1 = mean_squared_error(y_test, y_pred1)
|
|
84
|
+
#mse2 = mean_squared_error(y_test, y_pred2)
|
|
85
|
+
mse3 = mean_squared_error(y_test, y_pred3)
|
|
86
|
+
print(f"Regression Mean Squared Error xgboost: {mse1:.2f}")
|
|
87
|
+
#print(f"Regression Mean Squared Error catboost: {mse2:.2f}")
|
|
88
|
+
print(f"Regression Mean Squared Error lightgbm: {mse3:.2f}")
|
|
89
|
+
```
|
|
90
90
|
"""
|
|
91
91
|
|
|
92
92
|
def __init__(
|
unifiedbooster/gpoptimization.py
CHANGED
|
@@ -325,9 +325,14 @@ def lazy_cross_val_optim(
|
|
|
325
325
|
Examples:
|
|
326
326
|
|
|
327
327
|
```python
|
|
328
|
+
import os
|
|
328
329
|
import unifiedbooster as ub
|
|
329
330
|
from sklearn.datasets import load_breast_cancer
|
|
330
331
|
from sklearn.model_selection import train_test_split
|
|
332
|
+
from sklearn.metrics import accuracy_score
|
|
333
|
+
from time import time
|
|
334
|
+
|
|
335
|
+
print(f"\n ----- Running: {os.path.basename(__file__)}... ----- \n")
|
|
331
336
|
|
|
332
337
|
dataset = load_breast_cancer()
|
|
333
338
|
X, y = dataset.data, dataset.target
|
|
@@ -335,25 +340,26 @@ def lazy_cross_val_optim(
|
|
|
335
340
|
X, y, test_size=0.2, random_state=42
|
|
336
341
|
)
|
|
337
342
|
|
|
338
|
-
|
|
343
|
+
start = time()
|
|
344
|
+
res4 = ub.lazy_cross_val_optim(
|
|
339
345
|
X_train,
|
|
340
346
|
y_train,
|
|
341
|
-
X_test=
|
|
342
|
-
y_test=
|
|
347
|
+
X_test=X_test,
|
|
348
|
+
y_test=y_test,
|
|
343
349
|
model_type="lightgbm",
|
|
344
350
|
type_fit="classification",
|
|
345
351
|
scoring="accuracy",
|
|
346
352
|
n_estimators=100,
|
|
347
|
-
surrogate_obj=None,
|
|
348
353
|
cv=5,
|
|
349
354
|
n_jobs=None,
|
|
350
355
|
n_init=10,
|
|
351
356
|
n_iter=190,
|
|
352
357
|
abs_tol=1e-3,
|
|
353
|
-
verbose=2,
|
|
354
358
|
seed=123,
|
|
359
|
+
customize=False
|
|
355
360
|
)
|
|
356
|
-
print(
|
|
361
|
+
print(f"Elapsed: {time()-start}")
|
|
362
|
+
print(res4)
|
|
357
363
|
```
|
|
358
364
|
"""
|
|
359
365
|
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
unifiedbooster/__init__.py,sha256=8FEkWCZ2tT8xcW46Z0X_BS9_r0kQWVAu37IncLq6QWU,301
|
|
2
|
+
unifiedbooster/gbdt.py,sha256=u5Sjw-V8BlDS4LUo_SNOfuz66EFcJhP1Al6Es41R_X8,4932
|
|
3
|
+
unifiedbooster/gbdt_classification.py,sha256=wifw86cUvsyiKSz8MTxIgH6j7Gd1voIxXUiJVsE68bk,4219
|
|
4
|
+
unifiedbooster/gbdt_regression.py,sha256=YQIDtW4hV7DxHAHuoMMkD1aRy0dzVXxx2rwPu3InTA8,3710
|
|
5
|
+
unifiedbooster/gpoptimization.py,sha256=S-yZI8qI_QZyoCqWj8MT0a2Djlo3YrYRjyXApLS9FXM,12831
|
|
6
|
+
unifiedbooster-0.5.0.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
|
|
7
|
+
unifiedbooster-0.5.0.dist-info/METADATA,sha256=mao-q4w_f26KVwKSy4ZPEJBZQIRARtXsWEN7t7JEwRw,955
|
|
8
|
+
unifiedbooster-0.5.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
9
|
+
unifiedbooster-0.5.0.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
|
|
10
|
+
unifiedbooster-0.5.0.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
|
|
11
|
+
unifiedbooster-0.5.0.dist-info/RECORD,,
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
unifiedbooster/__init__.py,sha256=8FEkWCZ2tT8xcW46Z0X_BS9_r0kQWVAu37IncLq6QWU,301
|
|
2
|
-
unifiedbooster/gbdt.py,sha256=1qVdOeoEyBxxbJ7HBHZegGJo2d2onXs73o8_JntOtN8,4819
|
|
3
|
-
unifiedbooster/gbdt_classification.py,sha256=RLoM_lCmvEDrpNLRFlEzwKBA2oc0mkYUVKLFOTYAPrs,4099
|
|
4
|
-
unifiedbooster/gbdt_regression.py,sha256=Eavj3mV5Lsjpx-d03GLgT8GrwEYuBmBEWkUyDPcJu_g,3590
|
|
5
|
-
unifiedbooster/gpoptimization.py,sha256=xomHqQHu1wvG2wDdmErY8fYgB39pmNMo0-IvJdwEpoM,12606
|
|
6
|
-
unifiedbooster-0.4.2.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
|
|
7
|
-
unifiedbooster-0.4.2.dist-info/METADATA,sha256=FiWDX64O41lbiNDL406XjArYUcnoIKKAZjNdxkzbHGo,955
|
|
8
|
-
unifiedbooster-0.4.2.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
9
|
-
unifiedbooster-0.4.2.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
|
|
10
|
-
unifiedbooster-0.4.2.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
|
|
11
|
-
unifiedbooster-0.4.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|