rumale 0.18.2 → 0.18.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/lib/rumale.rb +1 -1
- data/lib/rumale/ensemble/ada_boost_classifier.rb +0 -21
- data/lib/rumale/ensemble/ada_boost_regressor.rb +0 -21
- data/lib/rumale/ensemble/extra_trees_classifier.rb +0 -12
- data/lib/rumale/ensemble/extra_trees_regressor.rb +0 -12
- data/lib/rumale/ensemble/gradient_boosting_classifier.rb +0 -23
- data/lib/rumale/ensemble/gradient_boosting_regressor.rb +0 -21
- data/lib/rumale/ensemble/random_forest_classifier.rb +0 -21
- data/lib/rumale/ensemble/random_forest_regressor.rb +0 -19
- data/lib/rumale/model_selection/grid_search_cv.rb +0 -23
- data/lib/rumale/multiclass/one_vs_rest_classifier.rb +0 -17
- data/lib/rumale/nearest_neighbors/k_neighbors_regressor.rb +4 -4
- data/lib/rumale/optimizer/ada_grad.rb +0 -15
- data/lib/rumale/optimizer/adam.rb +0 -19
- data/lib/rumale/optimizer/nadam.rb +0 -21
- data/lib/rumale/optimizer/rmsprop.rb +0 -17
- data/lib/rumale/optimizer/sgd.rb +0 -17
- data/lib/rumale/optimizer/yellow_fin.rb +0 -37
- data/lib/rumale/pairwise_metric.rb +5 -4
- data/lib/rumale/tree/decision_tree_classifier.rb +0 -23
- data/lib/rumale/tree/decision_tree_regressor.rb +0 -21
- data/lib/rumale/tree/extra_tree_classifier.rb +0 -12
- data/lib/rumale/tree/extra_tree_regressor.rb +0 -12
- data/lib/rumale/tree/gradient_tree_regressor.rb +0 -21
- data/lib/rumale/tree/node.rb +0 -31
- data/lib/rumale/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a3def8720c4043695c73b01ba991b8d394e8c684bf0fc74458c544d1d17d1512
|
4
|
+
data.tar.gz: c758d68350a6f97706aab6ecb1934af21aac0cbd9d4ca03cd8dc940c982411b3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 23202fa7a3a0f61d137575d5742a58e04ba4f84a912e287013465b5620db1b2b88f96fde72ea4a138c4f4386160c7005ef94c8288b8548576c588aa705febd61
|
7
|
+
data.tar.gz: 346cf325f959a52ad3b530f4d3a7488b1f313501ad0b244d9a43357e1cc636ef6a3f00781a6a7cdc763bc1e6df291ded3967fea61cedc4ce58a1ca368d76f9e5
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,13 @@
|
|
1
|
+
# 0.18.3
|
2
|
+
- Fix API documentation on [KNeighborsRegressor](https://yoshoku.github.io/rumale/doc/Rumale/NearestNeighbors/KNeighborsRegressor.html)
|
3
|
+
- Refector [rbf_kernel](https://yoshoku.github.io/rumale/doc/Rumale/PairwiseMetric.html#rbf_kernel-class_method) method.
|
4
|
+
- Delete unneeded marshal dump and load methods. The deletion work is complete.
|
5
|
+
- [Tree](https://yoshoku.github.io/rumale/doc/Rumale/Tree.html),
|
6
|
+
[Ensemble](https://yoshoku.github.io/rumale/doc/Rumale/Ensemble.html),
|
7
|
+
[Optimizer](https://yoshoku.github.io/rumale/doc/Rumale/Optimizer.html),
|
8
|
+
[OneVsRestClassifier](https://yoshoku.github.io/rumale/doc/Rumale/Multiclass/OneVsRestClassifier.html),
|
9
|
+
[GridSearchCV](https://yoshoku.github.io/rumale/doc/Rumale/ModelSelection/GridSearchCV.html).
|
10
|
+
|
1
11
|
# 0.18.2
|
2
12
|
- Change file composition of naive bayes classifiers.
|
3
13
|
- Add classifier class for [ComplementNaiveBayes](https://yoshoku.github.io/rumale/doc/Rumale/NaiveBayes/ComplementNB.html).
|
data/lib/rumale.rb
CHANGED
@@ -84,7 +84,7 @@ require 'rumale/decomposition/fast_ica'
|
|
84
84
|
require 'rumale/manifold/tsne'
|
85
85
|
require 'rumale/manifold/mds'
|
86
86
|
require 'rumale/metric_learning/fisher_discriminant_analysis'
|
87
|
-
require 'rumale/metric_learning/neighbourhood_component_analysis
|
87
|
+
require 'rumale/metric_learning/neighbourhood_component_analysis'
|
88
88
|
require 'rumale/neural_network/adam'
|
89
89
|
require 'rumale/neural_network/base_mlp'
|
90
90
|
require 'rumale/neural_network/mlp_regressor'
|
@@ -171,27 +171,6 @@ module Rumale
|
|
171
171
|
probs /= Numo::DFloat[sum_probs].transpose
|
172
172
|
probs
|
173
173
|
end
|
174
|
-
|
175
|
-
# Dump marshal data.
|
176
|
-
# @return [Hash] The marshal data about AdaBoostClassifier.
|
177
|
-
def marshal_dump
|
178
|
-
{ params: @params,
|
179
|
-
estimators: @estimators,
|
180
|
-
classes: @classes,
|
181
|
-
feature_importances: @feature_importances,
|
182
|
-
rng: @rng }
|
183
|
-
end
|
184
|
-
|
185
|
-
# Load marshal data.
|
186
|
-
# @return [nil]
|
187
|
-
def marshal_load(obj)
|
188
|
-
@params = obj[:params]
|
189
|
-
@estimators = obj[:estimators]
|
190
|
-
@classes = obj[:classes]
|
191
|
-
@feature_importances = obj[:feature_importances]
|
192
|
-
@rng = obj[:rng]
|
193
|
-
nil
|
194
|
-
end
|
195
174
|
end
|
196
175
|
end
|
197
176
|
end
|
@@ -153,27 +153,6 @@ module Rumale
|
|
153
153
|
sum_weight = @estimator_weights.sum
|
154
154
|
predictions / sum_weight
|
155
155
|
end
|
156
|
-
|
157
|
-
# Dump marshal data.
|
158
|
-
# @return [Hash] The marshal data about AdaBoostRegressor.
|
159
|
-
def marshal_dump
|
160
|
-
{ params: @params,
|
161
|
-
estimators: @estimators,
|
162
|
-
estimator_weights: @estimator_weights,
|
163
|
-
feature_importances: @feature_importances,
|
164
|
-
rng: @rng }
|
165
|
-
end
|
166
|
-
|
167
|
-
# Load marshal data.
|
168
|
-
# @return [nil]
|
169
|
-
def marshal_load(obj)
|
170
|
-
@params = obj[:params]
|
171
|
-
@estimators = obj[:estimators]
|
172
|
-
@estimator_weights = obj[:estimator_weights]
|
173
|
-
@feature_importances = obj[:feature_importances]
|
174
|
-
@rng = obj[:rng]
|
175
|
-
nil
|
176
|
-
end
|
177
156
|
end
|
178
157
|
end
|
179
158
|
end
|
@@ -125,18 +125,6 @@ module Rumale
|
|
125
125
|
super
|
126
126
|
end
|
127
127
|
|
128
|
-
# Dump marshal data.
|
129
|
-
# @return [Hash] The marshal data about ExtraTreesClassifier.
|
130
|
-
def marshal_dump
|
131
|
-
super
|
132
|
-
end
|
133
|
-
|
134
|
-
# Load marshal data.
|
135
|
-
# @return [nil]
|
136
|
-
def marshal_load(obj)
|
137
|
-
super
|
138
|
-
end
|
139
|
-
|
140
128
|
private
|
141
129
|
|
142
130
|
def plant_tree(rnd_seed)
|
@@ -111,18 +111,6 @@ module Rumale
|
|
111
111
|
super
|
112
112
|
end
|
113
113
|
|
114
|
-
# Dump marshal data.
|
115
|
-
# @return [Hash] The marshal data about ExtraTreesRegressor.
|
116
|
-
def marshal_dump
|
117
|
-
super
|
118
|
-
end
|
119
|
-
|
120
|
-
# Load marshal data.
|
121
|
-
# @return [nil]
|
122
|
-
def marshal_load(obj)
|
123
|
-
super
|
124
|
-
end
|
125
|
-
|
126
114
|
private
|
127
115
|
|
128
116
|
def plant_tree(rnd_seed)
|
@@ -185,29 +185,6 @@ module Rumale
|
|
185
185
|
Numo::Int32[*leaf_ids].transpose
|
186
186
|
end
|
187
187
|
|
188
|
-
# Dump marshal data.
|
189
|
-
# @return [Hash] The marshal data about GradientBoostingClassifier.
|
190
|
-
def marshal_dump
|
191
|
-
{ params: @params,
|
192
|
-
estimators: @estimators,
|
193
|
-
classes: @classes,
|
194
|
-
base_predictions: @base_predictions,
|
195
|
-
feature_importances: @feature_importances,
|
196
|
-
rng: @rng }
|
197
|
-
end
|
198
|
-
|
199
|
-
# Load marshal data.
|
200
|
-
# @return [nil]
|
201
|
-
def marshal_load(obj)
|
202
|
-
@params = obj[:params]
|
203
|
-
@estimators = obj[:estimators]
|
204
|
-
@classes = obj[:classes]
|
205
|
-
@base_predictions = obj[:base_predictions]
|
206
|
-
@feature_importances = obj[:feature_importances]
|
207
|
-
@rng = obj[:rng]
|
208
|
-
nil
|
209
|
-
end
|
210
|
-
|
211
188
|
private
|
212
189
|
|
213
190
|
def partial_fit(x, y, init_pred)
|
@@ -149,27 +149,6 @@ module Rumale
|
|
149
149
|
Numo::Int32[*leaf_ids].transpose
|
150
150
|
end
|
151
151
|
|
152
|
-
# Dump marshal data.
|
153
|
-
# @return [Hash] The marshal data about GradientBoostingRegressor.
|
154
|
-
def marshal_dump
|
155
|
-
{ params: @params,
|
156
|
-
estimators: @estimators,
|
157
|
-
base_predictions: @base_predictions,
|
158
|
-
feature_importances: @feature_importances,
|
159
|
-
rng: @rng }
|
160
|
-
end
|
161
|
-
|
162
|
-
# Load marshal data.
|
163
|
-
# @return [nil]
|
164
|
-
def marshal_load(obj)
|
165
|
-
@params = obj[:params]
|
166
|
-
@estimators = obj[:estimators]
|
167
|
-
@base_predictions = obj[:base_predictions]
|
168
|
-
@feature_importances = obj[:feature_importances]
|
169
|
-
@rng = obj[:rng]
|
170
|
-
nil
|
171
|
-
end
|
172
|
-
|
173
152
|
private
|
174
153
|
|
175
154
|
def partial_fit(x, y, init_pred)
|
@@ -162,27 +162,6 @@ module Rumale
|
|
162
162
|
Numo::Int32[*Array.new(@params[:n_estimators]) { |n| @estimators[n].apply(x) }].transpose
|
163
163
|
end
|
164
164
|
|
165
|
-
# Dump marshal data.
|
166
|
-
# @return [Hash] The marshal data about RandomForestClassifier.
|
167
|
-
def marshal_dump
|
168
|
-
{ params: @params,
|
169
|
-
estimators: @estimators,
|
170
|
-
classes: @classes,
|
171
|
-
feature_importances: @feature_importances,
|
172
|
-
rng: @rng }
|
173
|
-
end
|
174
|
-
|
175
|
-
# Load marshal data.
|
176
|
-
# @return [nil]
|
177
|
-
def marshal_load(obj)
|
178
|
-
@params = obj[:params]
|
179
|
-
@estimators = obj[:estimators]
|
180
|
-
@classes = obj[:classes]
|
181
|
-
@feature_importances = obj[:feature_importances]
|
182
|
-
@rng = obj[:rng]
|
183
|
-
nil
|
184
|
-
end
|
185
|
-
|
186
165
|
private
|
187
166
|
|
188
167
|
def plant_tree(rnd_seed)
|
@@ -139,25 +139,6 @@ module Rumale
|
|
139
139
|
Numo::Int32[*Array.new(@params[:n_estimators]) { |n| @estimators[n].apply(x) }].transpose
|
140
140
|
end
|
141
141
|
|
142
|
-
# Dump marshal data.
|
143
|
-
# @return [Hash] The marshal data about RandomForestRegressor.
|
144
|
-
def marshal_dump
|
145
|
-
{ params: @params,
|
146
|
-
estimators: @estimators,
|
147
|
-
feature_importances: @feature_importances,
|
148
|
-
rng: @rng }
|
149
|
-
end
|
150
|
-
|
151
|
-
# Load marshal data.
|
152
|
-
# @return [nil]
|
153
|
-
def marshal_load(obj)
|
154
|
-
@params = obj[:params]
|
155
|
-
@estimators = obj[:estimators]
|
156
|
-
@feature_importances = obj[:feature_importances]
|
157
|
-
@rng = obj[:rng]
|
158
|
-
nil
|
159
|
-
end
|
160
|
-
|
161
142
|
private
|
162
143
|
|
163
144
|
def plant_tree(rnd_seed)
|
@@ -152,29 +152,6 @@ module Rumale
|
|
152
152
|
@best_estimator.score(x, y)
|
153
153
|
end
|
154
154
|
|
155
|
-
# Dump marshal data.
|
156
|
-
# @return [Hash] The marshal data about GridSearchCV.
|
157
|
-
def marshal_dump
|
158
|
-
{ params: @params,
|
159
|
-
cv_results: @cv_results,
|
160
|
-
best_score: @best_score,
|
161
|
-
best_params: @best_params,
|
162
|
-
best_index: @best_index,
|
163
|
-
best_estimator: @best_estimator }
|
164
|
-
end
|
165
|
-
|
166
|
-
# Load marshal data.
|
167
|
-
# @return [nil]
|
168
|
-
def marshal_load(obj)
|
169
|
-
@params = obj[:params]
|
170
|
-
@cv_results = obj[:cv_results]
|
171
|
-
@best_score = obj[:best_score]
|
172
|
-
@best_params = obj[:best_params]
|
173
|
-
@best_index = obj[:best_index]
|
174
|
-
@best_estimator = obj[:best_estimator]
|
175
|
-
nil
|
176
|
-
end
|
177
|
-
|
178
155
|
private
|
179
156
|
|
180
157
|
def valid_param_grid(grid)
|
@@ -78,23 +78,6 @@ module Rumale
|
|
78
78
|
decision_values = decision_function(x)
|
79
79
|
Numo::Int32.asarray(Array.new(n_samples) { |n| @classes[decision_values[n, true].max_index] })
|
80
80
|
end
|
81
|
-
|
82
|
-
# Dump marshal data.
|
83
|
-
# @return [Hash] The marshal data about OneVsRestClassifier.
|
84
|
-
def marshal_dump
|
85
|
-
{ params: @params,
|
86
|
-
classes: @classes,
|
87
|
-
estimators: @estimators.map { |e| Marshal.dump(e) } }
|
88
|
-
end
|
89
|
-
|
90
|
-
# Load marshal data.
|
91
|
-
# @return [nil]
|
92
|
-
def marshal_load(obj)
|
93
|
-
@params = obj[:params]
|
94
|
-
@classes = obj[:classes]
|
95
|
-
@estimators = obj[:estimators].map { |e| Marshal.load(e) }
|
96
|
-
nil
|
97
|
-
end
|
98
81
|
end
|
99
82
|
end
|
100
83
|
end
|
@@ -21,11 +21,11 @@ module Rumale
|
|
21
21
|
# Return the prototypes for the nearest neighbor regressor.
|
22
22
|
# If the metric is 'precomputed', that returns nil.
|
23
23
|
# If the algorithm is 'vptree', that returns Rumale::NearestNeighbors::VPTree.
|
24
|
-
# @return [Numo::DFloat] (shape: [
|
24
|
+
# @return [Numo::DFloat] (shape: [n_training_samples, n_features])
|
25
25
|
attr_reader :prototypes
|
26
26
|
|
27
27
|
# Return the values of the prototypes
|
28
|
-
# @return [Numo::DFloat] (shape: [
|
28
|
+
# @return [Numo::DFloat] (shape: [n_training_samples, n_outputs])
|
29
29
|
attr_reader :values
|
30
30
|
|
31
31
|
# Create a new regressor with the nearest neighbor rule.
|
@@ -74,9 +74,9 @@ module Rumale
|
|
74
74
|
|
75
75
|
# Predict values for samples.
|
76
76
|
#
|
77
|
-
# @param x [Numo::DFloat] (shape: [
|
77
|
+
# @param x [Numo::DFloat] (shape: [n_testing_samples, n_features]) The samples to predict the values.
|
78
78
|
# If the metric is 'precomputed', x must be a square distance matrix (shape: [n_testing_samples, n_training_samples]).
|
79
|
-
# @return [Numo::DFloat] (shape: [
|
79
|
+
# @return [Numo::DFloat] (shape: [n_testing_samples, n_outputs]) Predicted values per sample.
|
80
80
|
def predict(x)
|
81
81
|
x = check_convert_sample_array(x)
|
82
82
|
if @params[:metric] == 'precomputed' && x.shape[1] != @values.shape[0]
|
@@ -34,21 +34,6 @@ module Rumale
|
|
34
34
|
@moment += gradient**2
|
35
35
|
weight - (@params[:learning_rate] / (@moment**0.5 + 1.0e-8)) * gradient
|
36
36
|
end
|
37
|
-
|
38
|
-
# Dump marshal data.
|
39
|
-
# @return [Hash] The marshal data.
|
40
|
-
def marshal_dump
|
41
|
-
{ params: @params,
|
42
|
-
moment: @moment }
|
43
|
-
end
|
44
|
-
|
45
|
-
# Load marshal data.
|
46
|
-
# @return [nil]
|
47
|
-
def marshal_load(obj)
|
48
|
-
@params = obj[:params]
|
49
|
-
@moment = obj[:moment]
|
50
|
-
nil
|
51
|
-
end
|
52
37
|
end
|
53
38
|
end
|
54
39
|
end
|
@@ -48,25 +48,6 @@ module Rumale
|
|
48
48
|
|
49
49
|
weight - @params[:learning_rate] * nm_fst_moment / (nm_sec_moment**0.5 + 1e-8)
|
50
50
|
end
|
51
|
-
|
52
|
-
# Dump marshal data.
|
53
|
-
# @return [Hash] The marshal data.
|
54
|
-
def marshal_dump
|
55
|
-
{ params: @params,
|
56
|
-
fst_moment: @fst_moment,
|
57
|
-
sec_moment: @sec_moment,
|
58
|
-
iter: @iter }
|
59
|
-
end
|
60
|
-
|
61
|
-
# Load marshal data.
|
62
|
-
# @return [nil]
|
63
|
-
def marshal_load(obj)
|
64
|
-
@params = obj[:params]
|
65
|
-
@fst_moment = obj[:fst_moment]
|
66
|
-
@sec_moment = obj[:sec_moment]
|
67
|
-
@iter = obj[:iter]
|
68
|
-
nil
|
69
|
-
end
|
70
51
|
end
|
71
52
|
end
|
72
53
|
end
|
@@ -57,27 +57,6 @@ module Rumale
|
|
57
57
|
|
58
58
|
weight - (@params[:learning_rate] / (nm_sec_moment**0.5 + 1e-8)) * ((1 - decay1_curr) * nm_gradient + decay1_next * nm_fst_moment)
|
59
59
|
end
|
60
|
-
|
61
|
-
# Dump marshal data.
|
62
|
-
# @return [Hash] The marshal data.
|
63
|
-
def marshal_dump
|
64
|
-
{ params: @params,
|
65
|
-
fst_moment: @fst_moment,
|
66
|
-
sec_moment: @sec_moment,
|
67
|
-
decay1_prod: @decay1_prod,
|
68
|
-
iter: @iter }
|
69
|
-
end
|
70
|
-
|
71
|
-
# Load marshal data.
|
72
|
-
# @return [nil]
|
73
|
-
def marshal_load(obj)
|
74
|
-
@params = obj[:params]
|
75
|
-
@fst_moment = obj[:fst_moment]
|
76
|
-
@sec_moment = obj[:sec_moment]
|
77
|
-
@decay1_prod = obj[:decay1_prod]
|
78
|
-
@iter = obj[:iter]
|
79
|
-
nil
|
80
|
-
end
|
81
60
|
end
|
82
61
|
end
|
83
62
|
end
|
@@ -42,23 +42,6 @@ module Rumale
|
|
42
42
|
@update = @params[:momentum] * @update - (@params[:learning_rate] / (@moment**0.5 + 1.0e-8)) * gradient
|
43
43
|
weight + @update
|
44
44
|
end
|
45
|
-
|
46
|
-
# Dump marshal data.
|
47
|
-
# @return [Hash] The marshal data.
|
48
|
-
def marshal_dump
|
49
|
-
{ params: @params,
|
50
|
-
moment: @moment,
|
51
|
-
update: @update }
|
52
|
-
end
|
53
|
-
|
54
|
-
# Load marshal data.
|
55
|
-
# @return [nil]
|
56
|
-
def marshal_load(obj)
|
57
|
-
@params = obj[:params]
|
58
|
-
@moment = obj[:moment]
|
59
|
-
@update = obj[:update]
|
60
|
-
nil
|
61
|
-
end
|
62
45
|
end
|
63
46
|
end
|
64
47
|
end
|
data/lib/rumale/optimizer/sgd.rb
CHANGED
@@ -38,23 +38,6 @@ module Rumale
|
|
38
38
|
@update = @params[:momentum] * @update - current_learning_rate * gradient
|
39
39
|
weight + @update
|
40
40
|
end
|
41
|
-
|
42
|
-
# Dump marshal data.
|
43
|
-
# @return [Hash] The marshal data.
|
44
|
-
def marshal_dump
|
45
|
-
{ params: @params,
|
46
|
-
iter: @iter,
|
47
|
-
update: @update }
|
48
|
-
end
|
49
|
-
|
50
|
-
# Load marshal data.
|
51
|
-
# @return [nil]
|
52
|
-
def marshal_load(obj)
|
53
|
-
@params = obj[:params]
|
54
|
-
@iter = obj[:iter]
|
55
|
-
@update = obj[:update]
|
56
|
-
nil
|
57
|
-
end
|
58
41
|
end
|
59
42
|
end
|
60
43
|
end
|
@@ -96,43 +96,6 @@ module Rumale
|
|
96
96
|
@curve_mean = @params[:decay] * @curve_mean + (1 - @params[:decay]) * grad_sqr
|
97
97
|
@distance_mean = @params[:decay] * @distance_mean + (1 - @params[:decay]) * (@grad_norm_mean / @curve_mean)
|
98
98
|
end
|
99
|
-
|
100
|
-
# Dump marshal data.
|
101
|
-
# @return [Hash] The marshal data.
|
102
|
-
def marshal_dump
|
103
|
-
{ params: @params,
|
104
|
-
smth_learning_rate: @smth_learning_rate,
|
105
|
-
smth_momentum: @smth_momentum,
|
106
|
-
grad_norms: @grad_norms,
|
107
|
-
grad_norm_min: @grad_norm_min,
|
108
|
-
grad_norm_max: @grad_norm_max,
|
109
|
-
grad_mean_sqr: @grad_mean_sqr,
|
110
|
-
grad_mean: @grad_mean,
|
111
|
-
grad_var: @grad_var,
|
112
|
-
grad_norm_mean: @grad_norm_mean,
|
113
|
-
curve_mean: @curve_mean,
|
114
|
-
distance_mean: @distance_mean,
|
115
|
-
update: @update }
|
116
|
-
end
|
117
|
-
|
118
|
-
# Load marshal data.
|
119
|
-
# @return [nis]
|
120
|
-
def marshal_load(obj)
|
121
|
-
@params = obj[:params]
|
122
|
-
@smth_learning_rate = obj[:smth_learning_rate]
|
123
|
-
@smth_momentum = obj[:smth_momentum]
|
124
|
-
@grad_norms = obj[:grad_norms]
|
125
|
-
@grad_norm_min = obj[:grad_norm_min]
|
126
|
-
@grad_norm_max = obj[:grad_norm_max]
|
127
|
-
@grad_mean_sqr = obj[:grad_mean_sqr]
|
128
|
-
@grad_mean = obj[:grad_mean]
|
129
|
-
@grad_var = obj[:grad_var]
|
130
|
-
@grad_norm_mean = obj[:grad_norm_mean]
|
131
|
-
@curve_mean = obj[:curve_mean]
|
132
|
-
@distance_mean = obj[:distance_mean]
|
133
|
-
@update = obj[:update]
|
134
|
-
nil
|
135
|
-
end
|
136
99
|
end
|
137
100
|
end
|
138
101
|
end
|
@@ -61,12 +61,13 @@ module Rumale
|
|
61
61
|
# @param gamma [Float] The parameter of rbf kernel, if nil it is 1 / n_features.
|
62
62
|
# @return [Numo::DFloat] (shape: [n_samples_x, n_samples_x] or [n_samples_x, n_samples_y] if y is given)
|
63
63
|
def rbf_kernel(x, y = nil, gamma = nil)
|
64
|
-
|
65
|
-
|
64
|
+
y_not_given = y.nil?
|
65
|
+
y = x if y_not_given
|
66
66
|
x = Rumale::Validation.check_convert_sample_array(x)
|
67
|
-
y = Rumale::Validation.check_convert_sample_array(y)
|
67
|
+
y = Rumale::Validation.check_convert_sample_array(y) unless y_not_given
|
68
|
+
gamma ||= 1.0 / x.shape[1]
|
68
69
|
Rumale::Validation.check_params_numeric(gamma: gamma)
|
69
|
-
Numo::NMath.exp(-gamma * squared_error(x, y)
|
70
|
+
Numo::NMath.exp(-gamma * squared_error(x, y))
|
70
71
|
end
|
71
72
|
|
72
73
|
# Calculate the linear kernel between x and y.
|
@@ -104,29 +104,6 @@ module Rumale
|
|
104
104
|
Numo::DFloat[*(Array.new(x.shape[0]) { |n| predict_proba_at_node(@tree, x[n, true]) })]
|
105
105
|
end
|
106
106
|
|
107
|
-
# Dump marshal data.
|
108
|
-
# @return [Hash] The marshal data about DecisionTreeClassifier
|
109
|
-
def marshal_dump
|
110
|
-
{ params: @params,
|
111
|
-
classes: @classes,
|
112
|
-
tree: @tree,
|
113
|
-
feature_importances: @feature_importances,
|
114
|
-
leaf_labels: @leaf_labels,
|
115
|
-
rng: @rng }
|
116
|
-
end
|
117
|
-
|
118
|
-
# Load marshal data.
|
119
|
-
# @return [nil]
|
120
|
-
def marshal_load(obj)
|
121
|
-
@params = obj[:params]
|
122
|
-
@classes = obj[:classes]
|
123
|
-
@tree = obj[:tree]
|
124
|
-
@feature_importances = obj[:feature_importances]
|
125
|
-
@leaf_labels = obj[:leaf_labels]
|
126
|
-
@rng = obj[:rng]
|
127
|
-
nil
|
128
|
-
end
|
129
|
-
|
130
107
|
private
|
131
108
|
|
132
109
|
def predict_proba_at_node(node, sample)
|
@@ -90,27 +90,6 @@ module Rumale
|
|
90
90
|
@leaf_values.shape[1].nil? ? @leaf_values[apply(x)].dup : @leaf_values[apply(x), true].dup
|
91
91
|
end
|
92
92
|
|
93
|
-
# Dump marshal data.
|
94
|
-
# @return [Hash] The marshal data about DecisionTreeRegressor
|
95
|
-
def marshal_dump
|
96
|
-
{ params: @params,
|
97
|
-
tree: @tree,
|
98
|
-
feature_importances: @feature_importances,
|
99
|
-
leaf_values: @leaf_values,
|
100
|
-
rng: @rng }
|
101
|
-
end
|
102
|
-
|
103
|
-
# Load marshal data.
|
104
|
-
# @return [nil]
|
105
|
-
def marshal_load(obj)
|
106
|
-
@params = obj[:params]
|
107
|
-
@tree = obj[:tree]
|
108
|
-
@feature_importances = obj[:feature_importances]
|
109
|
-
@leaf_values = obj[:leaf_values]
|
110
|
-
@rng = obj[:rng]
|
111
|
-
nil
|
112
|
-
end
|
113
|
-
|
114
93
|
private
|
115
94
|
|
116
95
|
def stop_growing?(y)
|
@@ -89,18 +89,6 @@ module Rumale
|
|
89
89
|
super
|
90
90
|
end
|
91
91
|
|
92
|
-
# Dump marshal data.
|
93
|
-
# @return [Hash] The marshal data about ExtraTreeClassifier
|
94
|
-
def marshal_dump
|
95
|
-
super
|
96
|
-
end
|
97
|
-
|
98
|
-
# Load marshal data.
|
99
|
-
# @return [nil]
|
100
|
-
def marshal_load(obj)
|
101
|
-
super
|
102
|
-
end
|
103
|
-
|
104
92
|
private
|
105
93
|
|
106
94
|
def best_split(features, y, whole_impurity)
|
@@ -76,18 +76,6 @@ module Rumale
|
|
76
76
|
super
|
77
77
|
end
|
78
78
|
|
79
|
-
# Dump marshal data.
|
80
|
-
# @return [Hash] The marshal data about ExtraTreeRegressor
|
81
|
-
def marshal_dump
|
82
|
-
super
|
83
|
-
end
|
84
|
-
|
85
|
-
# Load marshal data.
|
86
|
-
# @return [nil]
|
87
|
-
def marshal_load(obj)
|
88
|
-
super
|
89
|
-
end
|
90
|
-
|
91
79
|
private
|
92
80
|
|
93
81
|
def best_split(features, y, whole_impurity)
|
@@ -118,27 +118,6 @@ module Rumale
|
|
118
118
|
Numo::Int32[*(Array.new(x.shape[0]) { |n| apply_at_node(@tree, x[n, true]) })]
|
119
119
|
end
|
120
120
|
|
121
|
-
# Dump marshal data.
|
122
|
-
# @return [Hash] The marshal data about DecisionTreeRegressor
|
123
|
-
def marshal_dump
|
124
|
-
{ params: @params,
|
125
|
-
tree: @tree,
|
126
|
-
feature_importances: @feature_importances,
|
127
|
-
leaf_weights: @leaf_weights,
|
128
|
-
rng: @rng }
|
129
|
-
end
|
130
|
-
|
131
|
-
# Load marshal data.
|
132
|
-
# @return [nil]
|
133
|
-
def marshal_load(obj)
|
134
|
-
@params = obj[:params]
|
135
|
-
@tree = obj[:tree]
|
136
|
-
@feature_importances = obj[:feature_importances]
|
137
|
-
@leaf_weights = obj[:leaf_weights]
|
138
|
-
@rng = obj[:rng]
|
139
|
-
nil
|
140
|
-
end
|
141
|
-
|
142
121
|
private
|
143
122
|
|
144
123
|
def apply_at_node(node, sample)
|
data/lib/rumale/tree/node.rb
CHANGED
@@ -34,37 +34,6 @@ module Rumale
|
|
34
34
|
@feature_id = feature_id
|
35
35
|
@threshold = threshold
|
36
36
|
end
|
37
|
-
|
38
|
-
# Dump marshal data.
|
39
|
-
# @return [Hash] The marshal data about Node
|
40
|
-
def marshal_dump
|
41
|
-
{ depth: @depth,
|
42
|
-
impurity: @impurity,
|
43
|
-
n_samples: @n_samples,
|
44
|
-
probs: @probs,
|
45
|
-
leaf: @leaf,
|
46
|
-
leaf_id: @leaf_id,
|
47
|
-
left: @left,
|
48
|
-
right: @right,
|
49
|
-
feature_id: @feature_id,
|
50
|
-
threshold: @threshold }
|
51
|
-
end
|
52
|
-
|
53
|
-
# Load marshal data.
|
54
|
-
# @return [nil]
|
55
|
-
def marshal_load(obj)
|
56
|
-
@depth = obj[:depth]
|
57
|
-
@impurity = obj[:impurity]
|
58
|
-
@n_samples = obj[:n_samples]
|
59
|
-
@probs = obj[:probs]
|
60
|
-
@leaf = obj[:leaf]
|
61
|
-
@leaf_id = obj[:leaf_id]
|
62
|
-
@left = obj[:left]
|
63
|
-
@right = obj[:right]
|
64
|
-
@feature_id = obj[:feature_id]
|
65
|
-
@threshold = obj[:threshold]
|
66
|
-
nil
|
67
|
-
end
|
68
37
|
end
|
69
38
|
end
|
70
39
|
end
|
data/lib/rumale/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rumale
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.18.
|
4
|
+
version: 0.18.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- yoshoku
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-04-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: numo-narray
|