rumale 0.18.0 → 0.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/lib/rumale.rb +1 -0
- data/lib/rumale/decomposition/factor_analysis.rb +0 -22
- data/lib/rumale/decomposition/fast_ica.rb +0 -23
- data/lib/rumale/decomposition/nmf.rb +0 -17
- data/lib/rumale/decomposition/pca.rb +0 -19
- data/lib/rumale/evaluation_measure/function.rb +109 -0
- data/lib/rumale/manifold/mds.rb +0 -21
- data/lib/rumale/manifold/tsne.rb +0 -21
- data/lib/rumale/naive_bayes/naive_bayes.rb +0 -65
- data/lib/rumale/polynomial_model/factorization_machine_classifier.rb +0 -23
- data/lib/rumale/polynomial_model/factorization_machine_regressor.rb +0 -21
- data/lib/rumale/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1b344bc829bda6e2b5f60baba450f3c38d5f90ebf0c0cfccd02d2894189d540e
|
4
|
+
data.tar.gz: 91fcce138ced31e94363b6f137bd66b6e1637d1fb5d03a1b1b531a3e1d2a3502
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2d2176c440222ba9b1265ce97b575649670c4067da4ddd51529b219ba63d8e15852227843474ae40c8af328d7c366b5ee49f63dac5982c0e1db96157315dd256
|
7
|
+
data.tar.gz: 2dc2d4d16ed22e837e603cd2b7c50a06e3c57d3c5882f47df671ea096aadbab6cae8c0ca50d940f9545d59163fa3034a667d80df84f600900b2c29c7b519dded
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,13 @@
|
|
1
|
+
# 0.18.1
|
2
|
+
- Add [module function](https://yoshoku.github.io/rumale/doc/Rumale/EvaluationMeasure.html#classification_report-class_method) for generating summary of classification performance.
|
3
|
+
- Delete marshal dump and load methods for documentation.
|
4
|
+
The marshal methods are written in estimator classes for indicating on API documentation that the learned model can be saved with Marshal.
|
5
|
+
Even without these methods, Marshal can save the learned model, so they are deleted sequentially.
|
6
|
+
- [Manifold](https://yoshoku.github.io/rumale/doc/Rumale/Manifold.html)
|
7
|
+
- [NaiveBayes](https://yoshoku.github.io/rumale/doc/Rumale/NaiveBayes.html)
|
8
|
+
- [PolynomialModel](https://yoshoku.github.io/rumale/doc/Rumale/PolynomialModel.html)
|
9
|
+
- [Decomposition](https://yoshoku.github.io/doc/Rumale/Decomposition.html)
|
10
|
+
|
1
11
|
# 0.18.0
|
2
12
|
- Add transformer class for [FisherDiscriminantAnalysis](https://yoshoku.github.io/rumale/doc/Rumale/MetricLearning/FisherDiscriminantAnalysis.html).
|
3
13
|
- Add transformer class for [NeighbourhoodComponentAnalysis](https://yoshoku.github.io/rumale/doc/Rumale/MetricLearning/NeighbourhoodComponentAnalysis.html).
|
data/lib/rumale.rb
CHANGED
@@ -122,3 +122,4 @@ require 'rumale/evaluation_measure/normalized_mutual_information'
|
|
122
122
|
require 'rumale/evaluation_measure/silhouette_score'
|
123
123
|
require 'rumale/evaluation_measure/davies_bouldin_score'
|
124
124
|
require 'rumale/evaluation_measure/calinski_harabasz_score'
|
125
|
+
require 'rumale/evaluation_measure/function'
|
@@ -128,28 +128,6 @@ module Rumale
|
|
128
128
|
@params[:n_components] == 1 ? z[true, 0].dup : z
|
129
129
|
end
|
130
130
|
|
131
|
-
# Dump marshal data.
|
132
|
-
# @return [Hash] The marshal data.
|
133
|
-
def marshal_dump
|
134
|
-
{ params: @params,
|
135
|
-
mean: @mean,
|
136
|
-
noise_variance: @noise_variance,
|
137
|
-
components: @components,
|
138
|
-
loglike: @loglike,
|
139
|
-
n_iter: @n_iter }
|
140
|
-
end
|
141
|
-
|
142
|
-
# Load marshal data.
|
143
|
-
# @return [nil]
|
144
|
-
def marshal_load(obj)
|
145
|
-
@params = obj[:params]
|
146
|
-
@mean = obj[:mean]
|
147
|
-
@noise_variance = obj[:noise_variance]
|
148
|
-
@components = obj[:components]
|
149
|
-
@loglike = obj[:loglike]
|
150
|
-
@n_iter = obj[:n_iter]
|
151
|
-
end
|
152
|
-
|
153
131
|
private
|
154
132
|
|
155
133
|
def log_likelihood(cov_mat, factors, noise_vars)
|
@@ -123,29 +123,6 @@ module Rumale
|
|
123
123
|
x
|
124
124
|
end
|
125
125
|
|
126
|
-
# Dump marshal data.
|
127
|
-
# @return [Hash] The marshal data.
|
128
|
-
def marshal_dump
|
129
|
-
{ params: @params,
|
130
|
-
components: @components,
|
131
|
-
mixing: @mixing,
|
132
|
-
n_iter: @n_iter,
|
133
|
-
mean: @mean,
|
134
|
-
rng: @rng }
|
135
|
-
end
|
136
|
-
|
137
|
-
# Load marshal data.
|
138
|
-
# @return [nil]
|
139
|
-
def marshal_load(obj)
|
140
|
-
@params = obj[:params]
|
141
|
-
@components = obj[:components]
|
142
|
-
@mixing = obj[:mixing]
|
143
|
-
@n_iter = obj[:n_iter]
|
144
|
-
@mean = obj[:mean]
|
145
|
-
@rng = obj[:rng]
|
146
|
-
nil
|
147
|
-
end
|
148
|
-
|
149
126
|
private
|
150
127
|
|
151
128
|
def whitening(x, n_components)
|
@@ -89,23 +89,6 @@ module Rumale
|
|
89
89
|
z.dot(@components)
|
90
90
|
end
|
91
91
|
|
92
|
-
# Dump marshal data.
|
93
|
-
# @return [Hash] The marshal data.
|
94
|
-
def marshal_dump
|
95
|
-
{ params: @params,
|
96
|
-
components: @components,
|
97
|
-
rng: @rng }
|
98
|
-
end
|
99
|
-
|
100
|
-
# Load marshal data.
|
101
|
-
# @return [nil]
|
102
|
-
def marshal_load(obj)
|
103
|
-
@params = obj[:params]
|
104
|
-
@components = obj[:components]
|
105
|
-
@rng = obj[:rng]
|
106
|
-
nil
|
107
|
-
end
|
108
|
-
|
109
92
|
private
|
110
93
|
|
111
94
|
def partial_fit(x, update_comps = true)
|
@@ -125,25 +125,6 @@ module Rumale
|
|
125
125
|
z.dot(c) + @mean
|
126
126
|
end
|
127
127
|
|
128
|
-
# Dump marshal data.
|
129
|
-
# @return [Hash] The marshal data.
|
130
|
-
def marshal_dump
|
131
|
-
{ params: @params,
|
132
|
-
components: @components,
|
133
|
-
mean: @mean,
|
134
|
-
rng: @rng }
|
135
|
-
end
|
136
|
-
|
137
|
-
# Load marshal data.
|
138
|
-
# @return [nil]
|
139
|
-
def marshal_load(obj)
|
140
|
-
@params = obj[:params]
|
141
|
-
@components = obj[:components]
|
142
|
-
@mean = obj[:mean]
|
143
|
-
@rng = obj[:rng]
|
144
|
-
nil
|
145
|
-
end
|
146
|
-
|
147
128
|
private
|
148
129
|
|
149
130
|
def orthogonalize(pcvec)
|
@@ -0,0 +1,109 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'rumale/validation'
|
4
|
+
require 'rumale/evaluation_measure/accuracy'
|
5
|
+
require 'rumale/evaluation_measure/precision_recall'
|
6
|
+
|
7
|
+
module Rumale
|
8
|
+
module EvaluationMeasure
|
9
|
+
module_function
|
10
|
+
|
11
|
+
# Output a summary of classification performance for each class.
|
12
|
+
#
|
13
|
+
# @example
|
14
|
+
# y_true = Numo::Int32[0, 1, 1, 2, 2, 2, 0]
|
15
|
+
# y_pred = Numo::Int32[1, 1, 1, 0, 0, 2, 0]
|
16
|
+
# puts Rumale::EvaluationMeasure.classification_report(y_true, y_pred)
|
17
|
+
#
|
18
|
+
# # precision recall f1-score support
|
19
|
+
# #
|
20
|
+
# # 0 0.33 0.50 0.40 2
|
21
|
+
# # 1 0.67 1.00 0.80 2
|
22
|
+
# # 2 1.00 0.33 0.50 3
|
23
|
+
# #
|
24
|
+
# # accuracy 0.57 7
|
25
|
+
# # macro avg 0.67 0.61 0.57 7
|
26
|
+
# # weighted avg 0.71 0.57 0.56 7
|
27
|
+
#
|
28
|
+
# @param y_true [Numo::Int32] (shape: [n_samples]) The ground truth labels.
|
29
|
+
# @param y_pred [Numo::Int32] (shape: [n_samples]) The predicted labels.
|
30
|
+
# @param target_name [Nil/Array] The label names.
|
31
|
+
# @param output_hash [Boolean] The flag indicating whether to output with Ruby Hash.
|
32
|
+
# @return [String/Hash] The summary of classification performance.
|
33
|
+
# If output_hash is true, it returns the summary with Ruby Hash.
|
34
|
+
def classification_report(y_true, y_pred, target_name: nil, output_hash: false)
|
35
|
+
y_true = Rumale::Validation.check_convert_label_array(y_true)
|
36
|
+
y_pred = Rumale::Validation.check_convert_label_array(y_pred)
|
37
|
+
# calculate each evaluation measure.
|
38
|
+
supports = y_true.bincount
|
39
|
+
precisions = Rumale::EvaluationMeasure::PrecisionRecall.precision_each_class(y_true, y_pred)
|
40
|
+
recalls = Rumale::EvaluationMeasure::PrecisionRecall.recall_each_class(y_true, y_pred)
|
41
|
+
fscores = Rumale::EvaluationMeasure::PrecisionRecall.f_score_each_class(y_true, y_pred)
|
42
|
+
macro_precision = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_precision(y_true, y_pred)
|
43
|
+
macro_recall = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_recall(y_true, y_pred)
|
44
|
+
macro_fscore = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_f_score(y_true, y_pred)
|
45
|
+
accuracy = Rumale::EvaluationMeasure::Accuracy.new.score(y_true, y_pred)
|
46
|
+
sum_supports = supports.sum
|
47
|
+
weights = Numo::DFloat.cast(supports) / sum_supports
|
48
|
+
weighted_precision = (Numo::DFloat.cast(precisions) * weights).sum
|
49
|
+
weighted_recall = (Numo::DFloat.cast(recalls) * weights).sum
|
50
|
+
weighted_fscore = (Numo::DFloat.cast(fscores) * weights).sum
|
51
|
+
# output reults.
|
52
|
+
target_name ||= y_true.to_a.uniq.sort.map(&:to_s)
|
53
|
+
if output_hash
|
54
|
+
res = {}
|
55
|
+
target_name.each_with_index do |label, n|
|
56
|
+
res[label] = {
|
57
|
+
precision: precisions[n],
|
58
|
+
recall: recalls[n],
|
59
|
+
fscore: fscores[n],
|
60
|
+
support: supports[n]
|
61
|
+
}
|
62
|
+
end
|
63
|
+
res[:accuracy] = accuracy
|
64
|
+
res[:macro_avg] = {
|
65
|
+
precision: macro_precision,
|
66
|
+
recall: macro_recall,
|
67
|
+
fscore: macro_fscore,
|
68
|
+
support: sum_supports
|
69
|
+
}
|
70
|
+
res[:weighted_avg] = {
|
71
|
+
precision: weighted_precision,
|
72
|
+
recall: weighted_recall,
|
73
|
+
fscore: weighted_fscore,
|
74
|
+
support: sum_supports
|
75
|
+
}
|
76
|
+
res
|
77
|
+
else
|
78
|
+
width = ['weighted avg'.size, target_name.map(&:size).max].max
|
79
|
+
res = +''
|
80
|
+
res << "#{' ' * width} precision recall f1-score support\n"
|
81
|
+
res << "\n"
|
82
|
+
target_name.each_with_index do |label, n|
|
83
|
+
label_str = format("%##{width}s", label)
|
84
|
+
precision_str = format('%#10s', format('%.2f', precisions[n]))
|
85
|
+
recall_str = format('%#10s', format('%.2f', recalls[n]))
|
86
|
+
fscore_str = format('%#10s', format('%.2f', fscores[n]))
|
87
|
+
supports_str = format('%#10s', supports[n])
|
88
|
+
res << "#{label_str} #{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n"
|
89
|
+
end
|
90
|
+
res << "\n"
|
91
|
+
supports_str = format('%#10s', sum_supports)
|
92
|
+
accuracy_str = format('%#30s', format('%.2f', accuracy))
|
93
|
+
res << format("%##{width}s ", 'accuracy')
|
94
|
+
res << "#{accuracy_str}#{supports_str}\n"
|
95
|
+
precision_str = format('%#10s', format('%.2f', macro_precision))
|
96
|
+
recall_str = format('%#10s', format('%.2f', macro_recall))
|
97
|
+
fscore_str = format('%#10s', format('%.2f', macro_fscore))
|
98
|
+
res << format("%##{width}s ", 'macro avg')
|
99
|
+
res << "#{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n"
|
100
|
+
precision_str = format('%#10s', format('%.2f', weighted_precision))
|
101
|
+
recall_str = format('%#10s', format('%.2f', weighted_recall))
|
102
|
+
fscore_str = format('%#10s', format('%.2f', weighted_fscore))
|
103
|
+
res << format("%##{width}s ", 'weighted avg')
|
104
|
+
res << "#{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n"
|
105
|
+
res
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
data/lib/rumale/manifold/mds.rb
CHANGED
@@ -126,27 +126,6 @@ module Rumale
|
|
126
126
|
@embedding.dup
|
127
127
|
end
|
128
128
|
|
129
|
-
# Dump marshal data.
|
130
|
-
# @return [Hash] The marshal data.
|
131
|
-
def marshal_dump
|
132
|
-
{ params: @params,
|
133
|
-
embedding: @embedding,
|
134
|
-
stress: @stress,
|
135
|
-
n_iter: @n_iter,
|
136
|
-
rng: @rng }
|
137
|
-
end
|
138
|
-
|
139
|
-
# Load marshal data.
|
140
|
-
# @return [nil]
|
141
|
-
def marshal_load(obj)
|
142
|
-
@params = obj[:params]
|
143
|
-
@embedding = obj[:embedding]
|
144
|
-
@stress = obj[:stress]
|
145
|
-
@n_iter = obj[:n_iter]
|
146
|
-
@rng = obj[:rng]
|
147
|
-
nil
|
148
|
-
end
|
149
|
-
|
150
129
|
private
|
151
130
|
|
152
131
|
def init_embedding(x)
|
data/lib/rumale/manifold/tsne.rb
CHANGED
@@ -126,27 +126,6 @@ module Rumale
|
|
126
126
|
@embedding.dup
|
127
127
|
end
|
128
128
|
|
129
|
-
# Dump marshal data.
|
130
|
-
# @return [Hash] The marshal data.
|
131
|
-
def marshal_dump
|
132
|
-
{ params: @params,
|
133
|
-
embedding: @embedding,
|
134
|
-
kl_divergence: @kl_divergence,
|
135
|
-
n_iter: @n_iter,
|
136
|
-
rng: @rng }
|
137
|
-
end
|
138
|
-
|
139
|
-
# Load marshal data.
|
140
|
-
# @return [nil]
|
141
|
-
def marshal_load(obj)
|
142
|
-
@params = obj[:params]
|
143
|
-
@embedding = obj[:embedding]
|
144
|
-
@kl_divergence = obj[:kl_divergence]
|
145
|
-
@n_iter = obj[:n_iter]
|
146
|
-
@rng = obj[:rng]
|
147
|
-
nil
|
148
|
-
end
|
149
|
-
|
150
129
|
private
|
151
130
|
|
152
131
|
def init_embedding(x)
|
@@ -103,29 +103,6 @@ module Rumale
|
|
103
103
|
end
|
104
104
|
Numo::DFloat[*log_likelihoods].transpose
|
105
105
|
end
|
106
|
-
|
107
|
-
# Dump marshal data.
|
108
|
-
#
|
109
|
-
# @return [Hash] The marshal data about GaussianNB.
|
110
|
-
def marshal_dump
|
111
|
-
{ params: @params,
|
112
|
-
classes: @classes,
|
113
|
-
class_priors: @class_priors,
|
114
|
-
means: @means,
|
115
|
-
variances: @variances }
|
116
|
-
end
|
117
|
-
|
118
|
-
# Load marshal data.
|
119
|
-
#
|
120
|
-
# @return [nil]
|
121
|
-
def marshal_load(obj)
|
122
|
-
@params = obj[:params]
|
123
|
-
@classes = obj[:classes]
|
124
|
-
@class_priors = obj[:class_priors]
|
125
|
-
@means = obj[:means]
|
126
|
-
@variances = obj[:variances]
|
127
|
-
nil
|
128
|
-
end
|
129
106
|
end
|
130
107
|
|
131
108
|
# MultinomialNB is a class that implements Multinomial Naive Bayes classifier.
|
@@ -193,27 +170,6 @@ module Rumale
|
|
193
170
|
end
|
194
171
|
Numo::DFloat[*log_likelihoods].transpose
|
195
172
|
end
|
196
|
-
|
197
|
-
# Dump marshal data.
|
198
|
-
#
|
199
|
-
# @return [Hash] The marshal data about MultinomialNB.
|
200
|
-
def marshal_dump
|
201
|
-
{ params: @params,
|
202
|
-
classes: @classes,
|
203
|
-
class_priors: @class_priors,
|
204
|
-
feature_probs: @feature_probs }
|
205
|
-
end
|
206
|
-
|
207
|
-
# Load marshal data.
|
208
|
-
#
|
209
|
-
# @return [nil]
|
210
|
-
def marshal_load(obj)
|
211
|
-
@params = obj[:params]
|
212
|
-
@classes = obj[:classes]
|
213
|
-
@class_priors = obj[:class_priors]
|
214
|
-
@feature_probs = obj[:feature_probs]
|
215
|
-
nil
|
216
|
-
end
|
217
173
|
end
|
218
174
|
|
219
175
|
# BernoulliNB is a class that implements Bernoulli Naive Bayes classifier.
|
@@ -289,27 +245,6 @@ module Rumale
|
|
289
245
|
end
|
290
246
|
Numo::DFloat[*log_likelihoods].transpose
|
291
247
|
end
|
292
|
-
|
293
|
-
# Dump marshal data.
|
294
|
-
#
|
295
|
-
# @return [Hash] The marshal data about BernoulliNB.
|
296
|
-
def marshal_dump
|
297
|
-
{ params: @params,
|
298
|
-
classes: @classes,
|
299
|
-
class_priors: @class_priors,
|
300
|
-
feature_probs: @feature_probs }
|
301
|
-
end
|
302
|
-
|
303
|
-
# Load marshal data.
|
304
|
-
#
|
305
|
-
# @return [nil]
|
306
|
-
def marshal_load(obj)
|
307
|
-
@params = obj[:params]
|
308
|
-
@classes = obj[:classes]
|
309
|
-
@class_priors = obj[:class_priors]
|
310
|
-
@feature_probs = obj[:feature_probs]
|
311
|
-
nil
|
312
|
-
end
|
313
248
|
end
|
314
249
|
end
|
315
250
|
end
|
@@ -171,29 +171,6 @@ module Rumale
|
|
171
171
|
probs
|
172
172
|
end
|
173
173
|
|
174
|
-
# Dump marshal data.
|
175
|
-
# @return [Hash] The marshal data about FactorizationMachineClassifier.
|
176
|
-
def marshal_dump
|
177
|
-
{ params: @params,
|
178
|
-
factor_mat: @factor_mat,
|
179
|
-
weight_vec: @weight_vec,
|
180
|
-
bias_term: @bias_term,
|
181
|
-
classes: @classes,
|
182
|
-
rng: @rng }
|
183
|
-
end
|
184
|
-
|
185
|
-
# Load marshal data.
|
186
|
-
# @return [nil]
|
187
|
-
def marshal_load(obj)
|
188
|
-
@params = obj[:params]
|
189
|
-
@factor_mat = obj[:factor_mat]
|
190
|
-
@weight_vec = obj[:weight_vec]
|
191
|
-
@bias_term = obj[:bias_term]
|
192
|
-
@classes = obj[:classes]
|
193
|
-
@rng = obj[:rng]
|
194
|
-
nil
|
195
|
-
end
|
196
|
-
|
197
174
|
private
|
198
175
|
|
199
176
|
def bin_decision_function(x, ex_x, factor, weight)
|
@@ -113,27 +113,6 @@ module Rumale
|
|
113
113
|
linear_term + factor_term
|
114
114
|
end
|
115
115
|
|
116
|
-
# Dump marshal data.
|
117
|
-
# @return [Hash] The marshal data about FactorizationMachineRegressor.
|
118
|
-
def marshal_dump
|
119
|
-
{ params: @params,
|
120
|
-
factor_mat: @factor_mat,
|
121
|
-
weight_vec: @weight_vec,
|
122
|
-
bias_term: @bias_term,
|
123
|
-
rng: @rng }
|
124
|
-
end
|
125
|
-
|
126
|
-
# Load marshal data.
|
127
|
-
# @return [nil]
|
128
|
-
def marshal_load(obj)
|
129
|
-
@params = obj[:params]
|
130
|
-
@factor_mat = obj[:factor_mat]
|
131
|
-
@weight_vec = obj[:weight_vec]
|
132
|
-
@bias_term = obj[:bias_term]
|
133
|
-
@rng = obj[:rng]
|
134
|
-
nil
|
135
|
-
end
|
136
|
-
|
137
116
|
private
|
138
117
|
|
139
118
|
def loss_func(x, ex_x, y, factor, weight)
|
data/lib/rumale/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rumale
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.18.
|
4
|
+
version: 0.18.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- yoshoku
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-03-
|
11
|
+
date: 2020-03-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: numo-narray
|
@@ -223,6 +223,7 @@ files:
|
|
223
223
|
- lib/rumale/evaluation_measure/davies_bouldin_score.rb
|
224
224
|
- lib/rumale/evaluation_measure/explained_variance_score.rb
|
225
225
|
- lib/rumale/evaluation_measure/f_score.rb
|
226
|
+
- lib/rumale/evaluation_measure/function.rb
|
226
227
|
- lib/rumale/evaluation_measure/log_loss.rb
|
227
228
|
- lib/rumale/evaluation_measure/mean_absolute_error.rb
|
228
229
|
- lib/rumale/evaluation_measure/mean_squared_error.rb
|