unifiedbooster 0.3.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,12 @@
1
1
  from .gbdt import GBDT
2
2
  from .gbdt_classification import GBDTClassifier
3
3
  from .gbdt_regression import GBDTRegressor
4
+ from .gpoptimization import cross_val_optim, lazy_cross_val_optim
4
5
 
5
- __all__ = ["GBDT", "GBDTClassifier", "GBDTRegressor"]
6
+ __all__ = [
7
+ "GBDT",
8
+ "GBDTClassifier",
9
+ "GBDTRegressor",
10
+ "cross_val_optim",
11
+ "lazy_cross_val_optim",
12
+ ]
unifiedbooster/gbdt.py CHANGED
@@ -17,6 +17,9 @@ class GBDT(BaseEstimator):
17
17
  learning_rate: float
18
18
  shrinkage rate; used for reducing the gradient step
19
19
 
20
+ max_depth: int
21
+ maximum tree depth
22
+
20
23
  rowsample: float
21
24
  subsample ratio of the training instances
22
25
 
@@ -87,6 +90,8 @@ class GBDT(BaseEstimator):
87
90
  "depth": self.max_depth,
88
91
  "verbose": self.verbose,
89
92
  "random_seed": self.seed,
93
+ "boosting_type": "Plain",
94
+ "leaf_estimation_iterations": 1,
90
95
  "bootstrap_type": "Bernoulli",
91
96
  **kwargs,
92
97
  }
@@ -1,12 +1,18 @@
1
1
  from .gbdt import GBDT
2
2
  from sklearn.base import ClassifierMixin
3
- from xgboost import XGBClassifier
4
3
 
4
+ try:
5
+ from xgboost import XGBClassifier
6
+ except:
7
+ pass
5
8
  try:
6
9
  from catboost import CatBoostClassifier
7
10
  except:
8
- print("catboost package can't be built")
9
- from lightgbm import LGBMClassifier
11
+ pass
12
+ try:
13
+ from lightgbm import LGBMClassifier
14
+ except:
15
+ pass
10
16
  from sklearn.ensemble import GradientBoostingClassifier
11
17
 
12
18
 
@@ -25,6 +31,9 @@ class GBDTClassifier(GBDT, ClassifierMixin):
25
31
  learning_rate: float
26
32
  shrinkage rate; used for reducing the gradient step
27
33
 
34
+ max_depth: int
35
+ maximum tree depth
36
+
28
37
  rowsample: float
29
38
  subsample ratio of the training instances
30
39
 
@@ -42,42 +51,42 @@ class GBDTClassifier(GBDT, ClassifierMixin):
42
51
 
43
52
  Examples:
44
53
 
45
- ```python
46
- import unifiedbooster as ub
47
- from sklearn.datasets import load_iris
48
- from sklearn.model_selection import train_test_split
49
- from sklearn.metrics import accuracy_score
50
-
51
- # Load dataset
52
- iris = load_iris()
53
- X, y = iris.data, iris.target
54
-
55
- # Split dataset into training and testing sets
56
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
57
-
58
- # Initialize the unified regressor (example with XGBoost)
59
- regressor1 = ub.GBDTClassifier(model_type='xgboost')
60
- #regressor2 = ub.GBDTClassifier(model_type='catboost')
61
- regressor3 = ub.GBDTClassifier(model_type='lightgbm')
62
-
63
- # Fit the model
64
- regressor1.fit(X_train, y_train)
65
- #regressor2.fit(X_train, y_train)
66
- regressor3.fit(X_train, y_train)
67
-
68
- # Predict on the test set
69
- y_pred1 = regressor1.predict(X_test)
70
- #y_pred2 = regressor2.predict(X_test)
71
- y_pred3 = regressor3.predict(X_test)
72
-
73
- # Evaluate the model
74
- accuracy1 = accuracy_score(y_test, y_pred1)
75
- #accuracy2 = accuracy_score(y_test, y_pred2)
76
- accuracy3 = accuracy_score(y_test, y_pred3)
77
- print(f"Classification Accuracy xgboost: {accuracy1:.2f}")
78
- #print(f"Classification Accuracy catboost: {accuracy2:.2f}")
79
- print(f"Classification Accuracy lightgbm: {accuracy3:.2f}")
80
- ```
54
+ ```python
55
+ import unifiedbooster as ub
56
+ from sklearn.datasets import load_iris
57
+ from sklearn.model_selection import train_test_split
58
+ from sklearn.metrics import accuracy_score
59
+
60
+ # Load dataset
61
+ iris = load_iris()
62
+ X, y = iris.data, iris.target
63
+
64
+ # Split dataset into training and testing sets
65
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
66
+
67
+ # Initialize the unified regressor (example with XGBoost)
68
+ regressor1 = ub.GBDTClassifier(model_type='xgboost')
69
+ #regressor2 = ub.GBDTClassifier(model_type='catboost')
70
+ regressor3 = ub.GBDTClassifier(model_type='lightgbm')
71
+
72
+ # Fit the model
73
+ regressor1.fit(X_train, y_train)
74
+ #regressor2.fit(X_train, y_train)
75
+ regressor3.fit(X_train, y_train)
76
+
77
+ # Predict on the test set
78
+ y_pred1 = regressor1.predict(X_test)
79
+ #y_pred2 = regressor2.predict(X_test)
80
+ y_pred3 = regressor3.predict(X_test)
81
+
82
+ # Evaluate the model
83
+ accuracy1 = accuracy_score(y_test, y_pred1)
84
+ #accuracy2 = accuracy_score(y_test, y_pred2)
85
+ accuracy3 = accuracy_score(y_test, y_pred3)
86
+ print(f"Classification Accuracy xgboost: {accuracy1:.2f}")
87
+ #print(f"Classification Accuracy catboost: {accuracy2:.2f}")
88
+ print(f"Classification Accuracy lightgbm: {accuracy3:.2f}")
89
+ ```
81
90
  """
82
91
 
83
92
  def __init__(
@@ -1,12 +1,18 @@
1
1
  from .gbdt import GBDT
2
2
  from sklearn.base import RegressorMixin
3
- from xgboost import XGBRegressor
4
3
 
4
+ try:
5
+ from xgboost import XGBRegressor
6
+ except:
7
+ pass
5
8
  try:
6
9
  from catboost import CatBoostRegressor
7
10
  except:
8
- print("catboost package can't be built")
9
- from lightgbm import LGBMRegressor
11
+ pass
12
+ try:
13
+ from lightgbm import LGBMRegressor
14
+ except:
15
+ pass
10
16
  from sklearn.ensemble import GradientBoostingRegressor
11
17
 
12
18
 
@@ -25,6 +31,9 @@ class GBDTRegressor(GBDT, RegressorMixin):
25
31
  learning_rate: float
26
32
  shrinkage rate; used for reducing the gradient step
27
33
 
34
+ max_depth: int
35
+ maximum tree depth
36
+
28
37
  rowsample: float
29
38
  subsample ratio of the training instances
30
39
 
@@ -42,42 +51,42 @@ class GBDTRegressor(GBDT, RegressorMixin):
42
51
 
43
52
  Examples:
44
53
 
45
- ```python
46
- import unifiedbooster as ub
47
- from sklearn.datasets import fetch_california_housing
48
- from sklearn.model_selection import train_test_split
49
- from sklearn.metrics import mean_squared_error
50
-
51
- # Load dataset
52
- housing = fetch_california_housing()
53
- X, y = housing.data, housing.target
54
-
55
- # Split dataset into training and testing sets
56
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
57
-
58
- # Initialize the unified regressor (example with XGBoost)
59
- regressor1 = ub.GBDTRegressor(model_type='xgboost')
60
- #regressor2 = ub.GBDTRegressor(model_type='catboost')
61
- regressor3 = ub.GBDTRegressor(model_type='lightgbm')
62
-
63
- # Fit the model
64
- regressor1.fit(X_train, y_train)
65
- #regressor2.fit(X_train, y_train)
66
- regressor3.fit(X_train, y_train)
67
-
68
- # Predict on the test set
69
- y_pred1 = regressor1.predict(X_test)
70
- #y_pred2 = regressor2.predict(X_test)
71
- y_pred3 = regressor3.predict(X_test)
72
-
73
- # Evaluate the model
74
- mse1 = mean_squared_error(y_test, y_pred1)
75
- #mse2 = mean_squared_error(y_test, y_pred2)
76
- mse3 = mean_squared_error(y_test, y_pred3)
77
- print(f"Regression Mean Squared Error xgboost: {mse1:.2f}")
78
- #print(f"Regression Mean Squared Error catboost: {mse2:.2f}")
79
- print(f"Regression Mean Squared Error lightgbm: {mse3:.2f}")
80
- ```
54
+ ```python
55
+ import unifiedbooster as ub
56
+ from sklearn.datasets import fetch_california_housing
57
+ from sklearn.model_selection import train_test_split
58
+ from sklearn.metrics import mean_squared_error
59
+
60
+ # Load dataset
61
+ housing = fetch_california_housing()
62
+ X, y = housing.data, housing.target
63
+
64
+ # Split dataset into training and testing sets
65
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
66
+
67
+ # Initialize the unified regressor (example with XGBoost)
68
+ regressor1 = ub.GBDTRegressor(model_type='xgboost')
69
+ #regressor2 = ub.GBDTRegressor(model_type='catboost')
70
+ regressor3 = ub.GBDTRegressor(model_type='lightgbm')
71
+
72
+ # Fit the model
73
+ regressor1.fit(X_train, y_train)
74
+ #regressor2.fit(X_train, y_train)
75
+ regressor3.fit(X_train, y_train)
76
+
77
+ # Predict on the test set
78
+ y_pred1 = regressor1.predict(X_test)
79
+ #y_pred2 = regressor2.predict(X_test)
80
+ y_pred3 = regressor3.predict(X_test)
81
+
82
+ # Evaluate the model
83
+ mse1 = mean_squared_error(y_test, y_pred1)
84
+ #mse2 = mean_squared_error(y_test, y_pred2)
85
+ mse3 = mean_squared_error(y_test, y_pred3)
86
+ print(f"Regression Mean Squared Error xgboost: {mse1:.2f}")
87
+ #print(f"Regression Mean Squared Error catboost: {mse2:.2f}")
88
+ print(f"Regression Mean Squared Error lightgbm: {mse3:.2f}")
89
+ ```
81
90
  """
82
91
 
83
92
  def __init__(
@@ -0,0 +1,428 @@
1
+ import GPopt as gp
2
+ import nnetsauce as ns
3
+ import numpy as np
4
+ from collections import namedtuple
5
+ from .gbdt_classification import GBDTClassifier
6
+ from .gbdt_regression import GBDTRegressor
7
+ from sklearn.model_selection import cross_val_score
8
+ from sklearn.base import ClassifierMixin, RegressorMixin
9
+ from sklearn.utils import all_estimators
10
+ from sklearn import metrics
11
+
12
+
13
+ def cross_val_optim(
14
+ X_train,
15
+ y_train,
16
+ X_test=None,
17
+ y_test=None,
18
+ model_type="xgboost",
19
+ type_fit="classification",
20
+ scoring="accuracy",
21
+ n_estimators=100,
22
+ surrogate_obj=None,
23
+ cv=5,
24
+ n_jobs=None,
25
+ n_init=10,
26
+ n_iter=190,
27
+ abs_tol=1e-3,
28
+ verbose=2,
29
+ seed=123,
30
+ ):
31
+ """Cross-validation function and hyperparameters' search
32
+
33
+ Parameters:
34
+
35
+ X_train: array-like,
36
+ Training vectors, where rows is the number of samples
37
+ and columns is the number of features.
38
+
39
+ y_train: array-like,
40
+ Training vectors, where rows is the number of samples
41
+ and columns is the number of features.
42
+
43
+ X_test: array-like,
44
+ Testing vectors, where rows is the number of samples
45
+ and columns is the number of features.
46
+
47
+ y_test: array-like,
48
+ Testing vectors, where rows is the number of samples
49
+ and columns is the number of features.
50
+
51
+ model_type: str
52
+ type of gradient boosting algorithm: 'xgboost', 'lightgbm',
53
+ 'catboost', 'gradientboosting'
54
+
55
+ type_fit: str
56
+ "regression" or "classification"
57
+
58
+ scoring: str
59
+ scoring metric; see https://scikit-learn.org/stable/modules/model_evaluation.html#the-scoring-parameter-defining-model-evaluation-rules
60
+
61
+ n_estimators: int
62
+ maximum number of trees that can be built
63
+
64
+ surrogate_obj: an object;
65
+ An ML model for estimating the uncertainty around the objective function
66
+
67
+ cv: int;
68
+ number of cross-validation folds
69
+
70
+ n_jobs: int;
71
+ number of jobs for parallel execution
72
+
73
+ n_init: an integer;
74
+ number of points in the initial setting, when `x_init` and `y_init` are not provided
75
+
76
+ n_iter: an integer;
77
+ number of iterations of the minimization algorithm
78
+
79
+ abs_tol: a float;
80
+ tolerance for convergence of the optimizer (early stopping based on acquisition function)
81
+
82
+ verbose: int
83
+ controls verbosity
84
+
85
+ seed: int
86
+ reproducibility seed
87
+
88
+ Examples:
89
+
90
+ ```python
91
+ import unifiedbooster as ub
92
+ from sklearn.datasets import load_breast_cancer
93
+ from sklearn.model_selection import train_test_split
94
+
95
+ dataset = load_breast_cancer()
96
+ X, y = dataset.data, dataset.target
97
+ X_train, X_test, y_train, y_test = train_test_split(
98
+ X, y, test_size=0.2, random_state=42
99
+ )
100
+
101
+ res1 = ub.cross_val_optim(
102
+ X_train,
103
+ y_train,
104
+ X_test=None,
105
+ y_test=None,
106
+ model_type="lightgbm",
107
+ type_fit="classification",
108
+ scoring="accuracy",
109
+ n_estimators=100,
110
+ surrogate_obj=None,
111
+ cv=5,
112
+ n_jobs=None,
113
+ n_init=10,
114
+ n_iter=190,
115
+ abs_tol=1e-3,
116
+ verbose=2,
117
+ seed=123,
118
+ )
119
+ print(res1)
120
+ ```
121
+ """
122
+
123
+ def gbdt_cv(
124
+ X_train,
125
+ y_train,
126
+ model_type="xgboost",
127
+ n_estimators=100,
128
+ learning_rate=0.1,
129
+ max_depth=3,
130
+ rowsample=1.0,
131
+ colsample=1.0,
132
+ cv=5,
133
+ n_jobs=None,
134
+ type_fit="classification",
135
+ scoring="accuracy",
136
+ seed=123,
137
+ ):
138
+ if type_fit == "regression":
139
+ estimator = GBDTRegressor(
140
+ model_type=model_type,
141
+ n_estimators=n_estimators,
142
+ learning_rate=learning_rate,
143
+ max_depth=max_depth,
144
+ rowsample=rowsample,
145
+ colsample=colsample,
146
+ verbose=0,
147
+ seed=seed,
148
+ )
149
+ elif type_fit == "classification":
150
+ estimator = GBDTClassifier(
151
+ model_type=model_type,
152
+ n_estimators=n_estimators,
153
+ learning_rate=learning_rate,
154
+ max_depth=max_depth,
155
+ rowsample=rowsample,
156
+ colsample=colsample,
157
+ verbose=0,
158
+ seed=seed,
159
+ )
160
+ return -cross_val_score(
161
+ estimator,
162
+ X_train,
163
+ y_train,
164
+ scoring=scoring,
165
+ cv=cv,
166
+ n_jobs=n_jobs,
167
+ verbose=0,
168
+ ).mean()
169
+
170
+ # objective function for hyperparams tuning
171
+ def crossval_objective(xx):
172
+ return gbdt_cv(
173
+ X_train=X_train,
174
+ y_train=y_train,
175
+ model_type=model_type,
176
+ n_estimators=n_estimators,
177
+ learning_rate=10 ** xx[0],
178
+ max_depth=int(xx[1]),
179
+ rowsample=xx[2],
180
+ colsample=xx[3],
181
+ cv=cv,
182
+ n_jobs=n_jobs,
183
+ type_fit=type_fit,
184
+ scoring=scoring,
185
+ seed=seed,
186
+ )
187
+
188
+ if surrogate_obj is None:
189
+ gp_opt = gp.GPOpt(
190
+ objective_func=crossval_objective,
191
+ lower_bound=np.array([-6, 1, 0.5, 0.5]),
192
+ upper_bound=np.array([0, 16, 1.0, 1.0]),
193
+ params_names=[
194
+ "learning_rate",
195
+ "max_depth",
196
+ "rowsample",
197
+ "colsample",
198
+ ],
199
+ method="bayesian",
200
+ n_init=n_init,
201
+ n_iter=n_iter,
202
+ seed=seed,
203
+ )
204
+ else:
205
+ gp_opt = gp.GPOpt(
206
+ objective_func=crossval_objective,
207
+ lower_bound=np.array([-6, 1, 0.5, 0.5]),
208
+ upper_bound=np.array([0, 16, 1.0, 1.0]),
209
+ params_names=[
210
+ "learning_rate",
211
+ "max_depth",
212
+ "rowsample",
213
+ "colsample",
214
+ ],
215
+ acquisition="ucb",
216
+ method="splitconformal",
217
+ surrogate_obj=ns.PredictionInterval(
218
+ obj=surrogate_obj, method="splitconformal"
219
+ ),
220
+ n_init=n_init,
221
+ n_iter=n_iter,
222
+ seed=seed,
223
+ )
224
+
225
+ res = gp_opt.optimize(verbose=verbose, abs_tol=abs_tol)
226
+ res.best_params["model_type"] = model_type
227
+ res.best_params["n_estimators"] = int(n_estimators)
228
+ res.best_params["learning_rate"] = 10 ** res.best_params["learning_rate"]
229
+ res.best_params["max_depth"] = int(res.best_params["max_depth"])
230
+ res.best_params["rowsample"] = res.best_params["rowsample"]
231
+ res.best_params["colsample"] = res.best_params["colsample"]
232
+
233
+ # out-of-sample error
234
+ if X_test is not None and y_test is not None:
235
+ if type_fit == "regression":
236
+ estimator = GBDTRegressor(**res.best_params, verbose=0, seed=seed)
237
+ elif type_fit == "classification":
238
+ estimator = GBDTClassifier(**res.best_params, verbose=0, seed=seed)
239
+ preds = estimator.fit(X_train, y_train).predict(X_test)
240
+ # check error on y_test
241
+ oos_err = getattr(metrics, scoring + "_score")(
242
+ y_true=y_test, y_pred=preds
243
+ )
244
+ result = namedtuple("result", res._fields + ("test_" + scoring,))
245
+ return result(*res, oos_err)
246
+ else:
247
+ return res
248
+
249
+
250
+ def lazy_cross_val_optim(
251
+ X_train,
252
+ y_train,
253
+ X_test=None,
254
+ y_test=None,
255
+ model_type="xgboost",
256
+ type_fit="classification",
257
+ scoring="accuracy",
258
+ customize=False,
259
+ n_estimators=100,
260
+ cv=5,
261
+ n_jobs=None,
262
+ n_init=10,
263
+ n_iter=190,
264
+ abs_tol=1e-3,
265
+ verbose=1,
266
+ seed=123,
267
+ ):
268
+ """Automated Cross-validation function and hyperparameters' search using multiple surrogates
269
+
270
+ Parameters:
271
+
272
+ X_train: array-like,
273
+ Training vectors, where rows is the number of samples
274
+ and columns is the number of features.
275
+
276
+ y_train: array-like,
277
+ Training vectors, where rows is the number of samples
278
+ and columns is the number of features.
279
+
280
+ X_test: array-like,
281
+ Testing vectors, where rows is the number of samples
282
+ and columns is the number of features.
283
+
284
+ y_test: array-like,
285
+ Testing vectors, where rows is the number of samples
286
+ and columns is the number of features.
287
+
288
+ model_type: str
289
+ type of gradient boosting algorithm: 'xgboost', 'lightgbm',
290
+ 'catboost', 'gradientboosting'
291
+
292
+ type_fit: str
293
+ "regression" or "classification"
294
+
295
+ scoring: str
296
+ scoring metric; see https://scikit-learn.org/stable/modules/model_evaluation.html#the-scoring-parameter-defining-model-evaluation-rules
297
+
298
+ customize: boolean
299
+ if True, the surrogate is transformed into a quasi-randomized network (default is False)
300
+
301
+ n_estimators: int
302
+ maximum number of trees that can be built
303
+
304
+ cv: int;
305
+ number of cross-validation folds
306
+
307
+ n_jobs: int;
308
+ number of jobs for parallel execution
309
+
310
+ n_init: an integer;
311
+ number of points in the initial setting, when `x_init` and `y_init` are not provided
312
+
313
+ n_iter: an integer;
314
+ number of iterations of the minimization algorithm
315
+
316
+ abs_tol: a float;
317
+ tolerance for convergence of the optimizer (early stopping based on acquisition function)
318
+
319
+ verbose: int
320
+ controls verbosity
321
+
322
+ seed: int
323
+ reproducibility seed
324
+
325
+ Examples:
326
+
327
+ ```python
328
+ import os
329
+ import unifiedbooster as ub
330
+ from sklearn.datasets import load_breast_cancer
331
+ from sklearn.model_selection import train_test_split
332
+ from sklearn.metrics import accuracy_score
333
+ from time import time
334
+
335
+ print(f"\n ----- Running: {os.path.basename(__file__)}... ----- \n")
336
+
337
+ dataset = load_breast_cancer()
338
+ X, y = dataset.data, dataset.target
339
+ X_train, X_test, y_train, y_test = train_test_split(
340
+ X, y, test_size=0.2, random_state=42
341
+ )
342
+
343
+ start = time()
344
+ res4 = ub.lazy_cross_val_optim(
345
+ X_train,
346
+ y_train,
347
+ X_test=X_test,
348
+ y_test=y_test,
349
+ model_type="lightgbm",
350
+ type_fit="classification",
351
+ scoring="accuracy",
352
+ n_estimators=100,
353
+ cv=5,
354
+ n_jobs=None,
355
+ n_init=10,
356
+ n_iter=190,
357
+ abs_tol=1e-3,
358
+ seed=123,
359
+ customize=False
360
+ )
361
+ print(f"Elapsed: {time()-start}")
362
+ print(res4)
363
+ ```
364
+ """
365
+
366
+ removed_regressors = [
367
+ "TheilSenRegressor",
368
+ "ARDRegression",
369
+ "CCA",
370
+ "GaussianProcessRegressor",
371
+ "GradientBoostingRegressor",
372
+ "HistGradientBoostingRegressor",
373
+ "IsotonicRegression",
374
+ "MultiOutputRegressor",
375
+ "MultiTaskElasticNet",
376
+ "MultiTaskElasticNetCV",
377
+ "MultiTaskLasso",
378
+ "MultiTaskLassoCV",
379
+ "OrthogonalMatchingPursuit",
380
+ "OrthogonalMatchingPursuitCV",
381
+ "PLSCanonical",
382
+ "PLSRegression",
383
+ "RadiusNeighborsRegressor",
384
+ "RegressorChain",
385
+ "StackingRegressor",
386
+ "VotingRegressor",
387
+ ]
388
+
389
+ results = []
390
+
391
+ for est in all_estimators():
392
+ if issubclass(est[1], RegressorMixin) and (
393
+ est[0] not in removed_regressors
394
+ ):
395
+ try:
396
+ if customize == True:
397
+ print(f"\n surrogate: CustomRegressor({est[0]})")
398
+ surr_obj = ns.CustomRegressor(obj=est[1]())
399
+ else:
400
+ print(f"\n surrogate: {est[0]}")
401
+ surr_obj = est[1]()
402
+ res = cross_val_optim(
403
+ X_train=X_train,
404
+ y_train=y_train,
405
+ X_test=X_test,
406
+ y_test=y_test,
407
+ model_type=model_type,
408
+ n_estimators=n_estimators,
409
+ surrogate_obj=surr_obj,
410
+ cv=cv,
411
+ n_jobs=n_jobs,
412
+ type_fit=type_fit,
413
+ scoring=scoring,
414
+ n_init=n_init,
415
+ n_iter=n_iter,
416
+ abs_tol=abs_tol,
417
+ verbose=verbose,
418
+ seed=seed,
419
+ )
420
+ print(f"\n result: {res}")
421
+ if customize == True:
422
+ results.append((f"CustomRegressor({est[0]})", res))
423
+ else:
424
+ results.append((est[0], res))
425
+ except:
426
+ pass
427
+
428
+ return results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unifiedbooster
3
- Version: 0.3.0
3
+ Version: 0.5.0
4
4
  Summary: Unified interface for Gradient Boosted Decision Trees
5
5
  Home-page: https://github.com/thierrymoudiki/unifiedbooster
6
6
  Author: T. Moudiki
@@ -24,5 +24,6 @@ Requires-Dist: xgboost
24
24
  Requires-Dist: lightgbm
25
25
  Requires-Dist: catboost
26
26
  Requires-Dist: GPopt
27
+ Requires-Dist: nnetsauce
27
28
 
28
29
  Unified interface for Gradient Boosted Decision Trees
@@ -0,0 +1,11 @@
1
+ unifiedbooster/__init__.py,sha256=8FEkWCZ2tT8xcW46Z0X_BS9_r0kQWVAu37IncLq6QWU,301
2
+ unifiedbooster/gbdt.py,sha256=u5Sjw-V8BlDS4LUo_SNOfuz66EFcJhP1Al6Es41R_X8,4932
3
+ unifiedbooster/gbdt_classification.py,sha256=wifw86cUvsyiKSz8MTxIgH6j7Gd1voIxXUiJVsE68bk,4219
4
+ unifiedbooster/gbdt_regression.py,sha256=YQIDtW4hV7DxHAHuoMMkD1aRy0dzVXxx2rwPu3InTA8,3710
5
+ unifiedbooster/gpoptimization.py,sha256=S-yZI8qI_QZyoCqWj8MT0a2Djlo3YrYRjyXApLS9FXM,12831
6
+ unifiedbooster-0.5.0.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
7
+ unifiedbooster-0.5.0.dist-info/METADATA,sha256=mao-q4w_f26KVwKSy4ZPEJBZQIRARtXsWEN7t7JEwRw,955
8
+ unifiedbooster-0.5.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
9
+ unifiedbooster-0.5.0.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
10
+ unifiedbooster-0.5.0.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
11
+ unifiedbooster-0.5.0.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- unifiedbooster/__init__.py,sha256=3d8wQVXaeVIxqtk_STM6nvIGZiGTxKn9aAWjuwiDYuo,169
2
- unifiedbooster/gbdt.py,sha256=QCcWfXYfrOXdiSeygPEvVMjg9fVNjRaOnW9KsHK6bvo,4770
3
- unifiedbooster/gbdt_classification.py,sha256=UqZEOjDp_2hSm4jCxVoqz8vNQ-8JRW4Xn5CjFqPqRF4,4028
4
- unifiedbooster/gbdt_regression.py,sha256=ZNX5RJF-Wk2KJpOUD-lgNnqruDHZpzSTxdKeayv6iw0,3519
5
- unifiedbooster-0.3.0.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
6
- unifiedbooster-0.3.0.dist-info/METADATA,sha256=E0drgIWtoGZNF1lkxrj_zlbMxq8QmOPIW4iDY_GPKm0,930
7
- unifiedbooster-0.3.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
8
- unifiedbooster-0.3.0.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
9
- unifiedbooster-0.3.0.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
10
- unifiedbooster-0.3.0.dist-info/RECORD,,