unifiedbooster 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,610 @@
1
+ #!/usr/bin/env python
2
+
3
+ """
4
+ Nonconformity functions.
5
+ """
6
+
7
+ # Authors: Henrik Linusson
8
+ # Yaniv Romano modified RegressorNc class to include CQR
9
+
10
+ from __future__ import division
11
+
12
+ import abc
13
+ import numpy as np
14
+ import sklearn.base
15
+ from .base import ClassifierAdapter, RegressorAdapter
16
+ from .base import OobClassifierAdapter, OobRegressorAdapter
17
+
18
+ # -----------------------------------------------------------------------------
19
+ # Error functions
20
+ # -----------------------------------------------------------------------------
21
+
22
+
23
+ class ClassificationErrFunc(object):
24
+ """Base class for classification model error functions."""
25
+
26
+ __metaclass__ = abc.ABCMeta
27
+
28
+ def __init__(self):
29
+ super(ClassificationErrFunc, self).__init__()
30
+
31
+ @abc.abstractmethod
32
+ def apply(self, prediction, y):
33
+ """Apply the nonconformity function.
34
+
35
+ Parameters
36
+ ----------
37
+ prediction : numpy array of shape [n_samples, n_classes]
38
+ Class probability estimates for each sample.
39
+
40
+ y : numpy array of shape [n_samples]
41
+ True output labels of each sample.
42
+
43
+ Returns
44
+ -------
45
+ nc : numpy array of shape [n_samples]
46
+ Nonconformity scores of the samples.
47
+ """
48
+ pass
49
+
50
+
51
+ class RegressionErrFunc(object):
52
+ """Base class for regression model error functions."""
53
+
54
+ __metaclass__ = abc.ABCMeta
55
+
56
+ def __init__(self):
57
+ super(RegressionErrFunc, self).__init__()
58
+
59
+ @abc.abstractmethod
60
+ def apply(self, prediction, y): # , norm=None, beta=0):
61
+ """Apply the nonconformity function.
62
+
63
+ Parameters
64
+ ----------
65
+ prediction : numpy array of shape [n_samples, n_classes]
66
+ Class probability estimates for each sample.
67
+
68
+ y : numpy array of shape [n_samples]
69
+ True output labels of each sample.
70
+
71
+ Returns
72
+ -------
73
+ nc : numpy array of shape [n_samples]
74
+ Nonconformity scores of the samples.
75
+ """
76
+ pass
77
+
78
+ @abc.abstractmethod
79
+ def apply_inverse(self, nc, significance): # , norm=None, beta=0):
80
+ """Apply the inverse of the nonconformity function (i.e.,
81
+ calculate prediction interval).
82
+
83
+ Parameters
84
+ ----------
85
+ nc : numpy array of shape [n_calibration_samples]
86
+ Nonconformity scores obtained for conformal predictor.
87
+
88
+ significance : float
89
+ Significance level (0, 1).
90
+
91
+ Returns
92
+ -------
93
+ interval : numpy array of shape [n_samples, 2]
94
+ Minimum and maximum interval boundaries for each prediction.
95
+ """
96
+ pass
97
+
98
+
99
+ class InverseProbabilityErrFunc(ClassificationErrFunc):
100
+ """Calculates the probability of not predicting the correct class.
101
+
102
+ For each correct output in ``y``, nonconformity is defined as
103
+
104
+ .. math::
105
+ 1 - \hat{P}(y_i | x) \, .
106
+ """
107
+
108
+ def __init__(self):
109
+ super(InverseProbabilityErrFunc, self).__init__()
110
+
111
+ def apply(self, prediction, y):
112
+ prob = np.zeros(y.size, dtype=np.float32)
113
+ for i, y_ in enumerate(y):
114
+ if y_ >= prediction.shape[1]:
115
+ prob[i] = 0
116
+ else:
117
+ prob[i] = prediction[i, int(y_)]
118
+ return 1 - prob
119
+
120
+
121
+ class MarginErrFunc(ClassificationErrFunc):
122
+ """
123
+ Calculates the margin error.
124
+
125
+ For each correct output in ``y``, nonconformity is defined as
126
+
127
+ .. math::
128
+ 0.5 - \dfrac{\hat{P}(y_i | x) - max_{y \, != \, y_i} \hat{P}(y | x)}{2}
129
+ """
130
+
131
+ def __init__(self):
132
+ super(MarginErrFunc, self).__init__()
133
+
134
+ def apply(self, prediction, y):
135
+ prob = np.zeros(y.size, dtype=np.float32)
136
+ for i, y_ in enumerate(y):
137
+ if y_ >= prediction.shape[1]:
138
+ prob[i] = 0
139
+ else:
140
+ prob[i] = prediction[i, int(y_)]
141
+ prediction[i, int(y_)] = -np.inf
142
+ return 0.5 - ((prob - prediction.max(axis=1)) / 2)
143
+
144
+
145
+ class AbsErrorErrFunc(RegressionErrFunc):
146
+ """Calculates absolute error nonconformity for regression problems.
147
+
148
+ For each correct output in ``y``, nonconformity is defined as
149
+
150
+ .. math::
151
+ | y_i - \hat{y}_i |
152
+ """
153
+
154
+ def __init__(self):
155
+ super(AbsErrorErrFunc, self).__init__()
156
+
157
+ def apply(self, prediction, y):
158
+ return np.abs(prediction - y)
159
+
160
+ def apply_inverse(self, nc, significance):
161
+ nc = np.sort(nc)[::-1]
162
+ border = int(np.floor(significance * (nc.size + 1))) - 1
163
+ # TODO: should probably warn against too few calibration examples
164
+ border = min(max(border, 0), nc.size - 1)
165
+ return np.vstack([nc[border], nc[border]])
166
+
167
+
168
+ class SignErrorErrFunc(RegressionErrFunc):
169
+ """Calculates signed error nonconformity for regression problems.
170
+
171
+ For each correct output in ``y``, nonconformity is defined as
172
+
173
+ .. math::
174
+ y_i - \hat{y}_i
175
+
176
+ References
177
+ ----------
178
+ .. [1] Linusson, Henrik, Ulf Johansson, and Tuve Lofstrom.
179
+ Signed-error conformal regression. Pacific-Asia Conference on Knowledge
180
+ Discovery and Data Mining. Springer International Publishing, 2014.
181
+ """
182
+
183
+ def __init__(self):
184
+ super(SignErrorErrFunc, self).__init__()
185
+
186
+ def apply(self, prediction, y):
187
+ return prediction - y
188
+
189
+ def apply_inverse(self, nc, significance):
190
+
191
+ err_high = -nc
192
+ err_low = nc
193
+
194
+ err_high = np.reshape(err_high, (nc.shape[0], 1))
195
+ err_low = np.reshape(err_low, (nc.shape[0], 1))
196
+
197
+ nc = np.concatenate((err_low, err_high), 1)
198
+
199
+ nc = np.sort(nc, 0)
200
+ index = int(np.ceil((1 - significance / 2) * (nc.shape[0] + 1))) - 1
201
+ index = min(max(index, 0), nc.shape[0] - 1)
202
+ return np.vstack([nc[index, 0], nc[index, 1]])
203
+
204
+
205
+ # CQR symmetric error function
206
+ class QuantileRegErrFunc(RegressionErrFunc):
207
+ """Calculates conformalized quantile regression error.
208
+
209
+ For each correct output in ``y``, nonconformity is defined as
210
+
211
+ .. math::
212
+ max{\hat{q}_low - y, y - \hat{q}_high}
213
+
214
+ """
215
+
216
+ def __init__(self):
217
+ super(QuantileRegErrFunc, self).__init__()
218
+
219
+ def apply(self, prediction, y):
220
+ y_lower = prediction[:, 0]
221
+ y_upper = prediction[:, -1]
222
+ error_low = y_lower - y
223
+ error_high = y - y_upper
224
+ err = np.maximum(error_high, error_low)
225
+ return err
226
+
227
+ def apply_inverse(self, nc, significance):
228
+ nc = np.sort(nc, 0)
229
+ index = int(np.ceil((1 - significance) * (nc.shape[0] + 1))) - 1
230
+ index = min(max(index, 0), nc.shape[0] - 1)
231
+ return np.vstack([nc[index], nc[index]])
232
+
233
+
234
+ # CQR asymmetric error function
235
+ class QuantileRegAsymmetricErrFunc(RegressionErrFunc):
236
+ """Calculates conformalized quantile regression asymmetric error function.
237
+
238
+ For each correct output in ``y``, nonconformity is defined as
239
+
240
+ .. math::
241
+ E_low = \hat{q}_low - y
242
+ E_high = y - \hat{q}_high
243
+
244
+ """
245
+
246
+ def __init__(self):
247
+ super(QuantileRegAsymmetricErrFunc, self).__init__()
248
+
249
+ def apply(self, prediction, y):
250
+ y_lower = prediction[:, 0]
251
+ y_upper = prediction[:, -1]
252
+
253
+ error_high = y - y_upper
254
+ error_low = y_lower - y
255
+
256
+ err_high = np.reshape(error_high, (y_upper.shape[0], 1))
257
+ err_low = np.reshape(error_low, (y_lower.shape[0], 1))
258
+
259
+ return np.concatenate((err_low, err_high), 1)
260
+
261
+ def apply_inverse(self, nc, significance):
262
+ nc = np.sort(nc, 0)
263
+ index = int(np.ceil((1 - significance / 2) * (nc.shape[0] + 1))) - 1
264
+ index = min(max(index, 0), nc.shape[0] - 1)
265
+ return np.vstack([nc[index, 0], nc[index, 1]])
266
+
267
+
268
+ # -----------------------------------------------------------------------------
269
+ # Base nonconformity scorer
270
+ # -----------------------------------------------------------------------------
271
+ class BaseScorer(sklearn.base.BaseEstimator):
272
+ __metaclass__ = abc.ABCMeta
273
+
274
+ def __init__(self):
275
+ super(BaseScorer, self).__init__()
276
+
277
+ @abc.abstractmethod
278
+ def fit(self, x, y):
279
+ pass
280
+
281
+ @abc.abstractmethod
282
+ def score(self, x, y=None):
283
+ pass
284
+
285
+
286
+ class RegressorNormalizer(BaseScorer):
287
+ def __init__(self, base_model, normalizer_model, err_func):
288
+ super(RegressorNormalizer, self).__init__()
289
+ self.base_model = base_model
290
+ self.normalizer_model = normalizer_model
291
+ self.err_func = err_func
292
+
293
+ def fit(self, x, y):
294
+ residual_prediction = self.base_model.predict(x)
295
+ residual_error = np.abs(self.err_func.apply(residual_prediction, y))
296
+
297
+ ######################################################################
298
+ # Optional: use logarithmic function as in the original implementation
299
+ # available in https://github.com/donlnz/nonconformist
300
+ #
301
+ # CODE:
302
+ # residual_error += 0.00001 # Add small term to avoid log(0)
303
+ # log_err = np.log(residual_error)
304
+ ######################################################################
305
+
306
+ log_err = residual_error
307
+ self.normalizer_model.fit(x, log_err)
308
+
309
+ def score(self, x, y=None):
310
+
311
+ ######################################################################
312
+ # Optional: use logarithmic function as in the original implementation
313
+ # available in https://github.com/donlnz/nonconformist
314
+ #
315
+ # CODE:
316
+ # norm = np.exp(self.normalizer_model.predict(x))
317
+ ######################################################################
318
+
319
+ norm = np.abs(self.normalizer_model.predict(x))
320
+ return norm
321
+
322
+
323
+ class NcFactory(object):
324
+ @staticmethod
325
+ def create_nc(model, err_func=None, normalizer_model=None, oob=False):
326
+ if normalizer_model is not None:
327
+ normalizer_adapter = RegressorAdapter(normalizer_model)
328
+ else:
329
+ normalizer_adapter = None
330
+
331
+ if isinstance(model, sklearn.base.ClassifierMixin):
332
+ err_func = MarginErrFunc() if err_func is None else err_func
333
+ if oob:
334
+ c = sklearn.base.clone(model)
335
+ c.fit([[0], [1]], [0, 1])
336
+ if hasattr(c, "oob_decision_function_"):
337
+ adapter = OobClassifierAdapter(model)
338
+ else:
339
+ raise AttributeError(
340
+ "Cannot use out-of-bag "
341
+ "calibration with {}".format(model.__class__.__name__)
342
+ )
343
+ else:
344
+ adapter = ClassifierAdapter(model)
345
+
346
+ if normalizer_adapter is not None:
347
+ normalizer = RegressorNormalizer(
348
+ adapter, normalizer_adapter, err_func
349
+ )
350
+ return ClassifierNc(adapter, err_func, normalizer)
351
+ else:
352
+ return ClassifierNc(adapter, err_func)
353
+
354
+ elif isinstance(model, sklearn.base.RegressorMixin):
355
+ err_func = AbsErrorErrFunc() if err_func is None else err_func
356
+ if oob:
357
+ c = sklearn.base.clone(model)
358
+ c.fit([[0], [1]], [0, 1])
359
+ if hasattr(c, "oob_prediction_"):
360
+ adapter = OobRegressorAdapter(model)
361
+ else:
362
+ raise AttributeError(
363
+ "Cannot use out-of-bag "
364
+ "calibration with {}".format(model.__class__.__name__)
365
+ )
366
+ else:
367
+ adapter = RegressorAdapter(model)
368
+
369
+ if normalizer_adapter is not None:
370
+ normalizer = RegressorNormalizer(
371
+ adapter, normalizer_adapter, err_func
372
+ )
373
+ return RegressorNc(adapter, err_func, normalizer)
374
+ else:
375
+ return RegressorNc(adapter, err_func)
376
+
377
+
378
+ class BaseModelNc(BaseScorer):
379
+ """Base class for nonconformity scorers based on an underlying model.
380
+
381
+ Parameters
382
+ ----------
383
+ model : ClassifierAdapter or RegressorAdapter
384
+ Underlying classification model used for calculating nonconformity
385
+ scores.
386
+
387
+ err_func : ClassificationErrFunc or RegressionErrFunc
388
+ Error function object.
389
+
390
+ normalizer : BaseScorer
391
+ Normalization model.
392
+
393
+ beta : float
394
+ Normalization smoothing parameter. As the beta-value increases,
395
+ the normalized nonconformity function approaches a non-normalized
396
+ equivalent.
397
+ """
398
+
399
+ def __init__(self, model, err_func, normalizer=None, beta=1e-6):
400
+ super(BaseModelNc, self).__init__()
401
+ self.err_func = err_func
402
+ self.model = model
403
+ self.normalizer = normalizer
404
+ self.beta = beta
405
+
406
+ # If we use sklearn.base.clone (e.g., during cross-validation),
407
+ # object references get jumbled, so we need to make sure that the
408
+ # normalizer has a reference to the proper model adapter, if applicable.
409
+ if self.normalizer is not None and hasattr(
410
+ self.normalizer, "base_model"
411
+ ):
412
+ self.normalizer.base_model = self.model
413
+
414
+ self.last_x, self.last_y = None, None
415
+ self.last_prediction = None
416
+ self.clean = False
417
+
418
+ def fit(self, x, y):
419
+ """Fits the underlying model of the nonconformity scorer.
420
+
421
+ Parameters
422
+ ----------
423
+ x : numpy array of shape [n_samples, n_features]
424
+ Inputs of examples for fitting the underlying model.
425
+
426
+ y : numpy array of shape [n_samples]
427
+ Outputs of examples for fitting the underlying model.
428
+
429
+ Returns
430
+ -------
431
+ None
432
+ """
433
+ self.model.fit(x, y)
434
+ if self.normalizer is not None:
435
+ self.normalizer.fit(x, y)
436
+ self.clean = False
437
+
438
+ def score(self, x, y=None):
439
+ """Calculates the nonconformity score of a set of samples.
440
+
441
+ Parameters
442
+ ----------
443
+ x : numpy array of shape [n_samples, n_features]
444
+ Inputs of examples for which to calculate a nonconformity score.
445
+
446
+ y : numpy array of shape [n_samples]
447
+ Outputs of examples for which to calculate a nonconformity score.
448
+
449
+ Returns
450
+ -------
451
+ nc : numpy array of shape [n_samples]
452
+ Nonconformity scores of samples.
453
+ """
454
+ prediction = self.model.predict(x)
455
+ n_test = x.shape[0]
456
+ if self.normalizer is not None:
457
+ norm = self.normalizer.score(x) + self.beta
458
+ else:
459
+ norm = np.ones(n_test)
460
+ if prediction.ndim > 1:
461
+ ret_val = self.err_func.apply(prediction, y)
462
+ else:
463
+ ret_val = self.err_func.apply(prediction, y) / norm
464
+ return ret_val
465
+
466
+
467
+ # -----------------------------------------------------------------------------
468
+ # Classification nonconformity scorers
469
+ # -----------------------------------------------------------------------------
470
+ class ClassifierNc(BaseModelNc):
471
+ """Nonconformity scorer using an underlying class probability estimating
472
+ model.
473
+
474
+ Parameters
475
+ ----------
476
+ model : ClassifierAdapter
477
+ Underlying classification model used for calculating nonconformity
478
+ scores.
479
+
480
+ err_func : ClassificationErrFunc
481
+ Error function object.
482
+
483
+ normalizer : BaseScorer
484
+ Normalization model.
485
+
486
+ beta : float
487
+ Normalization smoothing parameter. As the beta-value increases,
488
+ the normalized nonconformity function approaches a non-normalized
489
+ equivalent.
490
+
491
+ Attributes
492
+ ----------
493
+ model : ClassifierAdapter
494
+ Underlying model object.
495
+
496
+ err_func : ClassificationErrFunc
497
+ Scorer function used to calculate nonconformity scores.
498
+
499
+ See also
500
+ --------
501
+ RegressorNc, NormalizedRegressorNc
502
+ """
503
+
504
+ def __init__(
505
+ self, model, err_func=MarginErrFunc(), normalizer=None, beta=1e-6
506
+ ):
507
+ super(ClassifierNc, self).__init__(model, err_func, normalizer, beta)
508
+
509
+
510
+ # -----------------------------------------------------------------------------
511
+ # Regression nonconformity scorers
512
+ # -----------------------------------------------------------------------------
513
+ class RegressorNc(BaseModelNc):
514
+ """Nonconformity scorer using an underlying regression model.
515
+
516
+ Parameters
517
+ ----------
518
+ model : RegressorAdapter
519
+ Underlying regression model used for calculating nonconformity scores.
520
+
521
+ err_func : RegressionErrFunc
522
+ Error function object.
523
+
524
+ normalizer : BaseScorer
525
+ Normalization model.
526
+
527
+ beta : float
528
+ Normalization smoothing parameter. As the beta-value increases,
529
+ the normalized nonconformity function approaches a non-normalized
530
+ equivalent.
531
+
532
+ Attributes
533
+ ----------
534
+ model : RegressorAdapter
535
+ Underlying model object.
536
+
537
+ err_func : RegressionErrFunc
538
+ Scorer function used to calculate nonconformity scores.
539
+
540
+ See also
541
+ --------
542
+ ProbEstClassifierNc, NormalizedRegressorNc
543
+ """
544
+
545
+ def __init__(
546
+ self, model, err_func=AbsErrorErrFunc(), normalizer=None, beta=1e-6
547
+ ):
548
+ super(RegressorNc, self).__init__(model, err_func, normalizer, beta)
549
+
550
+ def predict(self, x, nc, significance=None):
551
+ """Constructs prediction intervals for a set of test examples.
552
+
553
+ Predicts the output of each test pattern using the underlying model,
554
+ and applies the (partial) inverse nonconformity function to each
555
+ prediction, resulting in a prediction interval for each test pattern.
556
+
557
+ Parameters
558
+ ----------
559
+ x : numpy array of shape [n_samples, n_features]
560
+ Inputs of patters for which to predict output values.
561
+
562
+ significance : float
563
+ Significance level (maximum allowed error rate) of predictions.
564
+ Should be a float between 0 and 1. If ``None``, then intervals for
565
+ all significance levels (0.01, 0.02, ..., 0.99) are output in a
566
+ 3d-matrix.
567
+
568
+ Returns
569
+ -------
570
+ p : numpy array of shape [n_samples, 2] or [n_samples, 2, 99]
571
+ If significance is ``None``, then p contains the interval (minimum
572
+ and maximum boundaries) for each test pattern, and each significance
573
+ level (0.01, 0.02, ..., 0.99). If significance is a float between
574
+ 0 and 1, then p contains the prediction intervals (minimum and
575
+ maximum boundaries) for the set of test patterns at the chosen
576
+ significance level.
577
+ """
578
+ n_test = x.shape[0]
579
+ prediction = self.model.predict(x)
580
+ if self.normalizer is not None:
581
+ norm = self.normalizer.score(x) + self.beta
582
+ else:
583
+ norm = np.ones(n_test)
584
+
585
+ if significance:
586
+ intervals = np.zeros((x.shape[0], 2))
587
+ err_dist = self.err_func.apply_inverse(nc, significance)
588
+ err_dist = np.hstack([err_dist] * n_test)
589
+ if prediction.ndim > 1: # CQR
590
+ intervals[:, 0] = prediction[:, 0] - err_dist[0, :]
591
+ intervals[:, 1] = prediction[:, -1] + err_dist[1, :]
592
+ else: # regular conformal prediction
593
+ err_dist *= norm
594
+ intervals[:, 0] = prediction - err_dist[0, :]
595
+ intervals[:, 1] = prediction + err_dist[1, :]
596
+
597
+ return intervals
598
+ else: # Not tested for CQR
599
+ significance = np.arange(0.01, 1.0, 0.01)
600
+ intervals = np.zeros((x.shape[0], 2, significance.size))
601
+
602
+ for i, s in enumerate(significance):
603
+ err_dist = self.err_func.apply_inverse(nc, s)
604
+ err_dist = np.hstack([err_dist] * n_test)
605
+ err_dist *= norm
606
+
607
+ intervals[:, 0, i] = prediction - err_dist[0, :]
608
+ intervals[:, 1, i] = prediction + err_dist[0, :]
609
+
610
+ return intervals
@@ -0,0 +1,9 @@
1
+ from __future__ import division
2
+ import numpy as np
3
+
4
+
5
+ def calc_p(ncal, ngt, neq, smoothing=False):
6
+ if smoothing:
7
+ return (ngt + (neq + 1) * np.random.uniform(0, 1)) / (ncal + 1)
8
+ else:
9
+ return (ngt + neq + 1) / (ncal + 1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unifiedbooster
3
- Version: 0.4.2
3
+ Version: 0.6.0
4
4
  Summary: Unified interface for Gradient Boosted Decision Trees
5
5
  Home-page: https://github.com/thierrymoudiki/unifiedbooster
6
6
  Author: T. Moudiki
@@ -0,0 +1,19 @@
1
+ unifiedbooster/__init__.py,sha256=8FEkWCZ2tT8xcW46Z0X_BS9_r0kQWVAu37IncLq6QWU,301
2
+ unifiedbooster/gbdt.py,sha256=u5Sjw-V8BlDS4LUo_SNOfuz66EFcJhP1Al6Es41R_X8,4932
3
+ unifiedbooster/gbdt_classification.py,sha256=wifw86cUvsyiKSz8MTxIgH6j7Gd1voIxXUiJVsE68bk,4219
4
+ unifiedbooster/gbdt_regression.py,sha256=YQIDtW4hV7DxHAHuoMMkD1aRy0dzVXxx2rwPu3InTA8,3710
5
+ unifiedbooster/gpoptimization.py,sha256=GvOcJLNtI5PD7umbnuXEM_zmL9zpzXhWE4zBlPiksiY,15292
6
+ unifiedbooster/nonconformist/__init__.py,sha256=GFRHsqNvx7JrBhph5p9iMtVgcjOjyMYFyt6rwCbAbpg,568
7
+ unifiedbooster/nonconformist/acp.py,sha256=SrfBVCWjXvntkBJ7GXTFYE6i6NU3Pv-5ibwhpItDKDw,11553
8
+ unifiedbooster/nonconformist/base.py,sha256=0Iiuz_34KXZasbTkbXwXG2-1HiDbG7LpCQypTHuGtcs,3958
9
+ unifiedbooster/nonconformist/cp.py,sha256=YKiBFKwvaJbWnJcgi-saiVD_2ci-LBDHgytf70jHvFg,6174
10
+ unifiedbooster/nonconformist/evaluation.py,sha256=b24buhhW3v3CKRSi69WKCq9Sb38Unmjr8TAZr66Cdns,15906
11
+ unifiedbooster/nonconformist/icp.py,sha256=wqOaoy22KiF2ebVQOjp8MR-zvEjT0hE0NiMfeNZOQEw,15982
12
+ unifiedbooster/nonconformist/nc.py,sha256=_ED8Yn068Ivio9Xr0SjwKh4Ts5MfUACZFY40ObxPJ60,19644
13
+ unifiedbooster/nonconformist/util.py,sha256=UBKlAEb0mj9MVWBOKCRAq_OQP5Z53FMqWlTyo7RIg5Q,242
14
+ unifiedbooster-0.6.0.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
15
+ unifiedbooster-0.6.0.dist-info/METADATA,sha256=plXwSokDSPdwYPHo8WXgP1F8Tk_d3wjXMwiFwYCwi1I,955
16
+ unifiedbooster-0.6.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
17
+ unifiedbooster-0.6.0.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
18
+ unifiedbooster-0.6.0.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
19
+ unifiedbooster-0.6.0.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- unifiedbooster/__init__.py,sha256=8FEkWCZ2tT8xcW46Z0X_BS9_r0kQWVAu37IncLq6QWU,301
2
- unifiedbooster/gbdt.py,sha256=1qVdOeoEyBxxbJ7HBHZegGJo2d2onXs73o8_JntOtN8,4819
3
- unifiedbooster/gbdt_classification.py,sha256=RLoM_lCmvEDrpNLRFlEzwKBA2oc0mkYUVKLFOTYAPrs,4099
4
- unifiedbooster/gbdt_regression.py,sha256=Eavj3mV5Lsjpx-d03GLgT8GrwEYuBmBEWkUyDPcJu_g,3590
5
- unifiedbooster/gpoptimization.py,sha256=xomHqQHu1wvG2wDdmErY8fYgB39pmNMo0-IvJdwEpoM,12606
6
- unifiedbooster-0.4.2.dist-info/LICENSE,sha256=3rWw63btcdqbC0XMnpzCQhxDP8Vx7yKkKS7EDgJiY_4,1061
7
- unifiedbooster-0.4.2.dist-info/METADATA,sha256=FiWDX64O41lbiNDL406XjArYUcnoIKKAZjNdxkzbHGo,955
8
- unifiedbooster-0.4.2.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
9
- unifiedbooster-0.4.2.dist-info/entry_points.txt,sha256=OVNTsCzMYnaJ11WIByB7G8Lym_dj-ERKZyQxWFUcW30,59
10
- unifiedbooster-0.4.2.dist-info/top_level.txt,sha256=gOMxxpRtx8_nJXTWsXJDFkNeCsjSJQPs6aUXKK5_nI4,15
11
- unifiedbooster-0.4.2.dist-info/RECORD,,