rumale 0.18.5 → 0.19.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (93) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +15 -3
  3. data/.travis.yml +3 -3
  4. data/CHANGELOG.md +44 -0
  5. data/Gemfile +9 -0
  6. data/README.md +6 -44
  7. data/lib/rumale.rb +3 -0
  8. data/lib/rumale/base/base_estimator.rb +2 -0
  9. data/lib/rumale/clustering/dbscan.rb +5 -1
  10. data/lib/rumale/clustering/gaussian_mixture.rb +2 -0
  11. data/lib/rumale/clustering/hdbscan.rb +5 -3
  12. data/lib/rumale/clustering/k_means.rb +2 -1
  13. data/lib/rumale/clustering/k_medoids.rb +5 -1
  14. data/lib/rumale/clustering/mini_batch_k_means.rb +139 -0
  15. data/lib/rumale/clustering/power_iteration.rb +3 -1
  16. data/lib/rumale/clustering/single_linkage.rb +3 -1
  17. data/lib/rumale/clustering/snn.rb +2 -2
  18. data/lib/rumale/clustering/spectral_clustering.rb +2 -2
  19. data/lib/rumale/dataset.rb +2 -0
  20. data/lib/rumale/decomposition/factor_analysis.rb +3 -1
  21. data/lib/rumale/decomposition/fast_ica.rb +2 -2
  22. data/lib/rumale/decomposition/nmf.rb +1 -1
  23. data/lib/rumale/decomposition/pca.rb +25 -6
  24. data/lib/rumale/ensemble/ada_boost_classifier.rb +4 -1
  25. data/lib/rumale/ensemble/ada_boost_regressor.rb +4 -2
  26. data/lib/rumale/ensemble/extra_trees_classifier.rb +1 -1
  27. data/lib/rumale/ensemble/extra_trees_regressor.rb +1 -1
  28. data/lib/rumale/ensemble/gradient_boosting_classifier.rb +4 -4
  29. data/lib/rumale/ensemble/gradient_boosting_regressor.rb +4 -4
  30. data/lib/rumale/evaluation_measure/adjusted_rand_score.rb +1 -1
  31. data/lib/rumale/evaluation_measure/calinski_harabasz_score.rb +1 -1
  32. data/lib/rumale/evaluation_measure/davies_bouldin_score.rb +1 -1
  33. data/lib/rumale/evaluation_measure/function.rb +2 -1
  34. data/lib/rumale/evaluation_measure/mutual_information.rb +1 -1
  35. data/lib/rumale/evaluation_measure/normalized_mutual_information.rb +4 -2
  36. data/lib/rumale/evaluation_measure/precision_recall.rb +5 -0
  37. data/lib/rumale/evaluation_measure/purity.rb +1 -1
  38. data/lib/rumale/evaluation_measure/roc_auc.rb +3 -0
  39. data/lib/rumale/evaluation_measure/silhouette_score.rb +3 -1
  40. data/lib/rumale/feature_extraction/feature_hasher.rb +14 -1
  41. data/lib/rumale/feature_extraction/hash_vectorizer.rb +1 -0
  42. data/lib/rumale/feature_extraction/tfidf_transformer.rb +113 -0
  43. data/lib/rumale/kernel_approximation/nystroem.rb +1 -1
  44. data/lib/rumale/kernel_approximation/rbf.rb +1 -1
  45. data/lib/rumale/kernel_machine/kernel_fda.rb +1 -1
  46. data/lib/rumale/kernel_machine/kernel_pca.rb +1 -1
  47. data/lib/rumale/kernel_machine/kernel_ridge.rb +2 -0
  48. data/lib/rumale/kernel_machine/kernel_svc.rb +1 -1
  49. data/lib/rumale/linear_model/base_linear_model.rb +2 -0
  50. data/lib/rumale/linear_model/elastic_net.rb +3 -3
  51. data/lib/rumale/linear_model/lasso.rb +3 -3
  52. data/lib/rumale/linear_model/linear_regression.rb +2 -1
  53. data/lib/rumale/linear_model/logistic_regression.rb +3 -3
  54. data/lib/rumale/linear_model/ridge.rb +2 -1
  55. data/lib/rumale/linear_model/svc.rb +3 -3
  56. data/lib/rumale/linear_model/svr.rb +3 -3
  57. data/lib/rumale/manifold/mds.rb +3 -1
  58. data/lib/rumale/manifold/tsne.rb +6 -2
  59. data/lib/rumale/metric_learning/neighbourhood_component_analysis.rb +14 -1
  60. data/lib/rumale/model_selection/grid_search_cv.rb +1 -0
  61. data/lib/rumale/naive_bayes/bernoulli_nb.rb +1 -1
  62. data/lib/rumale/naive_bayes/multinomial_nb.rb +1 -1
  63. data/lib/rumale/nearest_neighbors/k_neighbors_classifier.rb +1 -0
  64. data/lib/rumale/nearest_neighbors/k_neighbors_regressor.rb +2 -0
  65. data/lib/rumale/nearest_neighbors/vp_tree.rb +1 -1
  66. data/lib/rumale/neural_network/adam.rb +2 -2
  67. data/lib/rumale/neural_network/base_mlp.rb +1 -0
  68. data/lib/rumale/optimizer/ada_grad.rb +4 -1
  69. data/lib/rumale/optimizer/adam.rb +4 -1
  70. data/lib/rumale/optimizer/nadam.rb +6 -1
  71. data/lib/rumale/optimizer/rmsprop.rb +5 -2
  72. data/lib/rumale/optimizer/sgd.rb +3 -0
  73. data/lib/rumale/optimizer/yellow_fin.rb +4 -1
  74. data/lib/rumale/pipeline/pipeline.rb +3 -0
  75. data/lib/rumale/polynomial_model/base_factorization_machine.rb +5 -0
  76. data/lib/rumale/polynomial_model/factorization_machine_classifier.rb +7 -2
  77. data/lib/rumale/polynomial_model/factorization_machine_regressor.rb +7 -2
  78. data/lib/rumale/preprocessing/l1_normalizer.rb +62 -0
  79. data/lib/rumale/preprocessing/l2_normalizer.rb +2 -1
  80. data/lib/rumale/preprocessing/one_hot_encoder.rb +3 -0
  81. data/lib/rumale/preprocessing/ordinal_encoder.rb +2 -0
  82. data/lib/rumale/preprocessing/polynomial_features.rb +1 -0
  83. data/lib/rumale/probabilistic_output.rb +4 -2
  84. data/lib/rumale/tree/base_decision_tree.rb +2 -0
  85. data/lib/rumale/tree/decision_tree_classifier.rb +1 -0
  86. data/lib/rumale/tree/extra_tree_classifier.rb +1 -1
  87. data/lib/rumale/tree/extra_tree_regressor.rb +1 -1
  88. data/lib/rumale/tree/gradient_tree_regressor.rb +5 -5
  89. data/lib/rumale/utils.rb +1 -0
  90. data/lib/rumale/validation.rb +7 -0
  91. data/lib/rumale/version.rb +1 -1
  92. data/rumale.gemspec +1 -13
  93. metadata +10 -133
@@ -13,7 +13,7 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - F. Lin and W W. Cohen, "Power Iteration Clustering," Proc. ICML'10, pp. 655--662, 2010.
16
+ # - Lin, F., and Cohen, W W., "Power Iteration Clustering," Proc. ICML'10, pp. 655--662, 2010.
17
17
  class PowerIteration
18
18
  include Base::BaseEstimator
19
19
  include Base::ClusterAnalyzer
@@ -71,6 +71,7 @@ module Rumale
71
71
  def fit(x, _y = nil)
72
72
  x = check_convert_sample_array(x)
73
73
  raise ArgumentError, 'Expect the input affinity matrix to be square.' if @params[:affinity] == 'precomputed' && x.shape[0] != x.shape[1]
74
+
74
75
  fit_predict(x)
75
76
  self
76
77
  end
@@ -107,6 +108,7 @@ module Rumale
107
108
  new_embedded_line /= new_embedded_line.abs.sum
108
109
  new_error = (new_embedded_line - embedded_line).abs
109
110
  break if (new_error - error).abs.max <= tol
111
+
110
112
  embedded_line = new_embedded_line
111
113
  error = new_error
112
114
  end
@@ -15,7 +15,7 @@ module Rumale
15
15
  # cluster_labels = analyzer.fit_predict(samples)
16
16
  #
17
17
  # *Reference*
18
- # - D. Mullner, "Modern hierarchical, agglomerative clustering algorithms," arXiv:1109.2378, 2011.
18
+ # - Mullner, D., "Modern hierarchical, agglomerative clustering algorithms," arXiv:1109.2378, 2011.
19
19
  class SingleLinkage
20
20
  include Base::BaseEstimator
21
21
  include Base::ClusterAnalyzer
@@ -54,6 +54,7 @@ module Rumale
54
54
  def fit(x, _y = nil)
55
55
  x = check_convert_sample_array(x)
56
56
  raise ArgumentError, 'Expect the input distance matrix to be square.' if @params[:metric] == 'precomputed' && x.shape[0] != x.shape[1]
57
+
57
58
  fit_predict(x)
58
59
  self
59
60
  end
@@ -66,6 +67,7 @@ module Rumale
66
67
  def fit_predict(x)
67
68
  x = check_convert_sample_array(x)
68
69
  raise ArgumentError, 'Expect the input distance matrix to be square.' if @params[:metric] == 'precomputed' && x.shape[0] != x.shape[1]
70
+
69
71
  distance_mat = @params[:metric] == 'precomputed' ? x : Rumale::PairwiseMetric.euclidean_distance(x)
70
72
  @labels = partial_fit(distance_mat)
71
73
  end
@@ -13,8 +13,8 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - L. Ertoz, M. Steinbach, and V. Kumar, "Finding Clusters of Different Sizes, Shapes, and Densities in Noisy, High Dimensional Data," Proc. SDM'03, pp. 47--58, 2003.
17
- # - M E. Houle, H-P. Kriegel, P. Kroger, E. Schubert, and A. Zimek, "Can Shared-Neighbor Distances Defeat the Curse of Dimensionality?," Proc. SSDBM'10, pp. 482--500, 2010.
16
+ # - Ertoz, L., Steinbach, M., and Kumar, V., "Finding Clusters of Different Sizes, Shapes, and Densities in Noisy, High Dimensional Data," Proc. SDM'03, pp. 47--58, 2003.
17
+ # - Houle, M E., Kriegel, H-P., Kroger, P., Schubert, E., and Zimek, A., "Can Shared-Neighbor Distances Defeat the Curse of Dimensionality?," Proc. SSDBM'10, pp. 482--500, 2010.
18
18
  class SNN < DBSCAN
19
19
  # Create a new cluster analyzer with Shared Neareset Neighbor method.
20
20
  #
@@ -16,8 +16,8 @@ module Rumale
16
16
  # cluster_labels = analyzer.fit_predict(samples)
17
17
  #
18
18
  # *Reference*
19
- # - A Y. Ng, M I. Jordan, and Y. Weiss, "On Spectral Clustering: Analyssi and an algorithm," Proc. NIPS'01, pp. 849--856, 2001.
20
- # - U von Luxburg, "A tutorial on spectral clustering," Statistics and Computing, Vol. 17 (4), pp. 395--416, 2007.
19
+ # - Ng, A Y., Jordan, M I., and Weiss, Y., "On Spectral Clustering: Analyssi and an algorithm," Proc. NIPS'01, pp. 849--856, 2001.
20
+ # - von Luxburg, U., "A tutorial on spectral clustering," Statistics and Computing, Vol. 17 (4), pp. 395--416, 2007.
21
21
  class SpectralClustering
22
22
  include Base::BaseEstimator
23
23
  include Base::ClusterAnalyzer
@@ -65,6 +65,7 @@ module Rumale
65
65
  Rumale::Validation.check_params_numeric_or_nil(noise: noise, random_seed: random_seed)
66
66
  raise ArgumentError, 'The number of samples must be more than 2.' if n_samples <= 1
67
67
  raise RangeError, 'The interval of factor is (0, 1).' if factor <= 0 || factor >= 1
68
+
68
69
  # initialize some variables.
69
70
  rs = random_seed
70
71
  rs ||= srand
@@ -101,6 +102,7 @@ module Rumale
101
102
  Rumale::Validation.check_params_boolean(shuffle: shuffle)
102
103
  Rumale::Validation.check_params_numeric_or_nil(noise: noise, random_seed: random_seed)
103
104
  raise ArgumentError, 'The number of samples must be more than 2.' if n_samples <= 1
105
+
104
106
  # initialize some variables.
105
107
  rs = random_seed
106
108
  rs ||= srand
@@ -14,7 +14,7 @@ module Rumale
14
14
  # representaion = decomposer.fit_transform(samples)
15
15
  #
16
16
  # *Reference*
17
- # - D. Barber, "Bayesian Reasoning and Machine Learning," Cambridge University Press, 2012.
17
+ # - Barber, D., "Bayesian Reasoning and Machine Learning," Cambridge University Press, 2012.
18
18
  class FactorAnalysis
19
19
  include Base::BaseEstimator
20
20
  include Base::Transformer
@@ -90,9 +90,11 @@ module Rumale
90
90
  @components = (sqrt_noise_variance.diag.dot(u) * scaler).transpose.dup
91
91
  @noise_variance = Numo::DFloat.maximum(sample_vars - @components.transpose.dot(@components).diagonal, 1e-12)
92
92
  next if @params[:tol].nil?
93
+
93
94
  new_loglike = log_likelihood(cov_mat, @components, @noise_variance)
94
95
  @loglike.push(new_loglike)
95
96
  break if (old_loglike - new_loglike).abs <= @params[:tol]
97
+
96
98
  old_loglike = new_loglike
97
99
  end
98
100
 
@@ -14,8 +14,8 @@ module Rumale
14
14
  # source_data = transformer.fit_transform(observed_data)
15
15
  #
16
16
  # *Reference*
17
- # - A. Hyvarinen "Fast and Robust Fixed-Point Algorithms for Independent Component Analysis," IEEE Trans. Neural Networks, Vol. 10 (3), pp. 626--634, 1999.
18
- # - A. Hyvarinen and E. Oja, "Independent Component Analysis: Algorithms and Applications," Neural Networks, Vol. 13 (4-5), pp. 411--430, 2000.
17
+ # - Hyvarinen, A., "Fast and Robust Fixed-Point Algorithms for Independent Component Analysis," IEEE Trans. Neural Networks, Vol. 10 (3), pp. 626--634, 1999.
18
+ # - Hyvarinen, A., and Oja, E., "Independent Component Analysis: Algorithms and Applications," Neural Networks, Vol. 13 (4-5), pp. 411--430, 2000.
19
19
  class FastICA
20
20
  include Base::BaseEstimator
21
21
  include Base::Transformer
@@ -13,7 +13,7 @@ module Rumale
13
13
  # representaion = decomposer.fit_transform(samples)
14
14
  #
15
15
  # *Reference*
16
- # - W. Xu, X. Liu, and Y.Gong, "Document Clustering Based On Non-negative Matrix Factorization," Proc. SIGIR' 03 , pp. 267--273, 2003.
16
+ # - Xu, W., Liu, X., and Gong, Y., "Document Clustering Based On Non-negative Matrix Factorization," Proc. SIGIR' 03 , pp. 267--273, 2003.
17
17
  class NMF
18
18
  include Base::BaseEstimator
19
19
  include Base::Transformer
@@ -9,7 +9,7 @@ module Rumale
9
9
  # PCA is a class that implements Principal Component Analysis.
10
10
  #
11
11
  # @example
12
- # decomposer = Rumale::Decomposition::PCA.new(n_components: 2)
12
+ # decomposer = Rumale::Decomposition::PCA.new(n_components: 2, solver: 'fpt')
13
13
  # representaion = decomposer.fit_transform(samples)
14
14
  #
15
15
  # # If Numo::Linalg is installed, you can specify 'evd' for the solver option.
@@ -17,8 +17,13 @@ module Rumale
17
17
  # decomposer = Rumale::Decomposition::PCA.new(n_components: 2, solver: 'evd')
18
18
  # representaion = decomposer.fit_transform(samples)
19
19
  #
20
+ # # If Numo::Linalg is loaded and the solver option is not given,
21
+ # # the solver option is choosen 'evd' automatically.
22
+ # decomposer = Rumale::Decomposition::PCA.new(n_components: 2)
23
+ # representaion = decomposer.fit_transform(samples)
24
+ #
20
25
  # *Reference*
21
- # - A. Sharma and K K. Paliwal, "Fast principal component analysis using fixed-point algorithm," Pattern Recognition Letters, 28, pp. 1151--1155, 2007.
26
+ # - Sharma, A., and Paliwal, K K., "Fast principal component analysis using fixed-point algorithm," Pattern Recognition Letters, 28, pp. 1151--1155, 2007.
22
27
  class PCA
23
28
  include Base::BaseEstimator
24
29
  include Base::Transformer
@@ -38,18 +43,24 @@ module Rumale
38
43
  # Create a new transformer with PCA.
39
44
  #
40
45
  # @param n_components [Integer] The number of principal components.
41
- # @param solver [String] The algorithm for the optimization ('fpt' or 'evd').
42
- # 'fpt' uses the fixed-point algorithm. 'evd' performs eigen value decomposition of the covariance matrix of samples.
46
+ # @param solver [String] The algorithm for the optimization ('auto', 'fpt' or 'evd').
47
+ # 'auto' chooses the 'evd' solver if Numo::Linalg is loaded. Otherwise, it chooses the 'fpt' solver.
48
+ # 'fpt' uses the fixed-point algorithm.
49
+ # 'evd' performs eigen value decomposition of the covariance matrix of samples.
43
50
  # @param max_iter [Integer] The maximum number of iterations. If solver = 'evd', this parameter is ignored.
44
51
  # @param tol [Float] The tolerance of termination criterion. If solver = 'evd', this parameter is ignored.
45
52
  # @param random_seed [Integer] The seed value using to initialize the random generator.
46
- def initialize(n_components: 2, solver: 'fpt', max_iter: 100, tol: 1.0e-4, random_seed: nil)
53
+ def initialize(n_components: 2, solver: 'auto', max_iter: 100, tol: 1.0e-4, random_seed: nil)
47
54
  check_params_numeric(n_components: n_components, max_iter: max_iter, tol: tol)
48
55
  check_params_string(solver: solver)
49
56
  check_params_numeric_or_nil(random_seed: random_seed)
50
57
  check_params_positive(n_components: n_components, max_iter: max_iter, tol: tol)
51
58
  @params = {}
52
- @params[:solver] = solver != 'evd' ? 'fpt' : 'evd'
59
+ @params[:solver] = if solver == 'auto'
60
+ load_linalg? ? 'evd' : 'fpt'
61
+ else
62
+ solver != 'evd' ? 'fpt' : 'evd'
63
+ end
53
64
  @params[:n_components] = n_components
54
65
  @params[:max_iter] = max_iter
55
66
  @params[:tol] = tol
@@ -87,6 +98,7 @@ module Rumale
87
98
  @params[:max_iter].times do
88
99
  updated = orthogonalize(covariance_mat.dot(comp_vec))
89
100
  break if (updated.dot(comp_vec) - 1).abs < @params[:tol]
101
+
90
102
  comp_vec = updated
91
103
  end
92
104
  @components = @components.nil? ? comp_vec : Numo::NArray.vstack([@components, comp_vec])
@@ -127,6 +139,13 @@ module Rumale
127
139
 
128
140
  private
129
141
 
142
+ def load_linalg?
143
+ return false if defined?(Numo::Linalg).nil?
144
+ return false if Numo::Linalg::VERSION < '0.1.4'
145
+
146
+ true
147
+ end
148
+
130
149
  def orthogonalize(pcvec)
131
150
  unless @components.nil?
132
151
  delta = @components.dot(pcvec) * @components.transpose
@@ -19,7 +19,7 @@ module Rumale
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
21
  # *Reference*
22
- # - J. Zhu, S. Rosset, H. Zou, and T.Hashie, "Multi-class AdaBoost," Technical Report No. 430, Department of Statistics, University of Michigan, 2005.
22
+ # - Zhu, J., Rosset, S., Zou, H., and Hashie, T., "Multi-class AdaBoost," Technical Report No. 430, Department of Statistics, University of Michigan, 2005.
23
23
  class AdaBoostClassifier
24
24
  include Base::BaseEstimator
25
25
  include Base::Classifier
@@ -105,6 +105,7 @@ module Rumale
105
105
  # Fit classfier.
106
106
  ids = Rumale::Utils.choice_ids(n_samples, observation_weights, sub_rng)
107
107
  break if y[ids].to_a.uniq.size != n_classes
108
+
108
109
  tree = Tree::DecisionTreeClassifier.new(
109
110
  criterion: @params[:criterion], max_depth: @params[:max_depth],
110
111
  max_leaf_nodes: @params[:max_leaf_nodes], min_samples_leaf: @params[:min_samples_leaf],
@@ -120,12 +121,14 @@ module Rumale
120
121
  @estimators.push(tree)
121
122
  @feature_importances += tree.feature_importances
122
123
  break if error.zero?
124
+
123
125
  # Update observation weights.
124
126
  log_proba = Numo::NMath.log(proba)
125
127
  observation_weights *= Numo::NMath.exp(-1.0 * (n_classes - 1).fdiv(n_classes) * (y_codes * log_proba).sum(1))
126
128
  observation_weights = observation_weights.clip(1.0e-15, nil)
127
129
  sum_observation_weights = observation_weights.sum
128
130
  break if sum_observation_weights.zero?
131
+
129
132
  observation_weights /= sum_observation_weights
130
133
  end
131
134
  @feature_importances /= @feature_importances.sum
@@ -18,8 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - D. L. Shrestha and D. P. Solomatine, "Experiments with AdaBoost.RT, an Improved Boosting Scheme for Regression," Neural Computation 18 (7), pp. 1678--1710, 2006.
22
- #
21
+ # - Shrestha, D. L., and Solomatine, D. P., "Experiments with AdaBoost.RT, an Improved Boosting Scheme for Regression," Neural Computation 18 (7), pp. 1678--1710, 2006.
23
22
  class AdaBoostRegressor
24
23
  include Base::BaseEstimator
25
24
  include Base::Regressor
@@ -94,6 +93,7 @@ module Rumale
94
93
  check_sample_tvalue_size(x, y)
95
94
  # Check target values
96
95
  raise ArgumentError, 'Expect target value vector to be 1-D arrray' unless y.shape.size == 1
96
+
97
97
  # Initialize some variables.
98
98
  n_samples, n_features = x.shape
99
99
  @params[:max_features] = n_features unless @params[:max_features].is_a?(Integer)
@@ -118,6 +118,7 @@ module Rumale
118
118
  abs_err = ((p - y) / y).abs
119
119
  err = observation_weights[abs_err.gt(@params[:threshold])].sum
120
120
  break if err <= 0.0
121
+
121
122
  # Calculate weight.
122
123
  beta = err**@params[:exponent]
123
124
  weight = Math.log(1.fdiv(beta))
@@ -132,6 +133,7 @@ module Rumale
132
133
  observation_weights = observation_weights.clip(1.0e-15, nil)
133
134
  sum_observation_weights = observation_weights.sum
134
135
  break if sum_observation_weights.zero?
136
+
135
137
  observation_weights /= sum_observation_weights
136
138
  end
137
139
  @estimator_weights = Numo::DFloat.asarray(@estimator_weights)
@@ -18,7 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
21
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
22
22
  class ExtraTreesClassifier < RandomForestClassifier
23
23
  # Return the set of estimators.
24
24
  # @return [Array<ExtraTreeClassifier>]
@@ -18,7 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
21
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
22
22
  class ExtraTreesRegressor < RandomForestRegressor
23
23
  # Return the set of estimators.
24
24
  # @return [Array<ExtraTreeRegressor>]
@@ -18,10 +18,10 @@ module Rumale
18
18
  # estimator.fit(training_samples, traininig_values)
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
- # *reference*
22
- # - J H. Friedman, "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
23
- # - J H. Friedman, "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
24
- # - T. Chen and C. Guestrin, "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
21
+ # *Reference*
22
+ # - Friedman, J H., "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
23
+ # - Friedman, J H., "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
24
+ # - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
25
25
  #
26
26
  class GradientBoostingClassifier
27
27
  include Base::BaseEstimator
@@ -17,10 +17,10 @@ module Rumale
17
17
  # estimator.fit(training_samples, traininig_values)
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
- # *reference*
21
- # - J H. Friedman, "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
22
- # - J H. Friedman, "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
23
- # - T. Chen and C. Guestrin, "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
20
+ # *Reference*
21
+ # - Friedman, J H. "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
22
+ # - Friedman, J H. "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
23
+ # - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
24
24
  #
25
25
  class GradientBoostingRegressor
26
26
  include Base::BaseEstimator
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance", J. Machine Learnig Research, Vol. 11, pp.2837--2854, 2010.
14
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance", J. Machine Learnig Research, Vol. 11, pp.2837--2854, 2010.
15
15
  class AdjustedRandScore
16
16
  include Base::Evaluator
17
17
 
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(x, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - T. Calinski and J. Harabsz, "A dendrite method for cluster analysis," Communication in Statistics, Vol. 3 (1), pp. 1--27, 1972.
14
+ # - Calinski, T., and Harabsz, J., "A dendrite method for cluster analysis," Communication in Statistics, Vol. 3 (1), pp. 1--27, 1972.
15
15
  class CalinskiHarabaszScore
16
16
  include Base::Evaluator
17
17
 
@@ -12,7 +12,7 @@ module Rumale
12
12
  # puts evaluator.score(x, predicted)
13
13
  #
14
14
  # *Reference*
15
- # - D L. Davies and D W. Bouldin, "A Cluster Separation Measure," IEEE Trans. Pattern Analysis and Machine Intelligence, Vol. PAMI-1, No. 2, pp. 224--227, 1979.
15
+ # - Davies, D L., and Bouldin, D W., "A Cluster Separation Measure," IEEE Trans. Pattern Analysis and Machine Intelligence, Vol. PAMI-1, No. 2, pp. 224--227, 1979.
16
16
  class DaviesBouldinScore
17
17
  include Base::Evaluator
18
18
 
@@ -86,7 +86,8 @@ module Rumale
86
86
  weighted_recall = (Numo::DFloat.cast(recalls) * weights).sum
87
87
  weighted_fscore = (Numo::DFloat.cast(fscores) * weights).sum
88
88
  # output reults.
89
- target_name ||= classes.map(&:to_s)
89
+ target_name ||= classes
90
+ target_name.map!(&:to_s)
90
91
  if output_hash
91
92
  res = {}
92
93
  target_name.each_with_index do |label, n|
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
14
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
15
15
  class MutualInformation
16
16
  include Base::Evaluator
17
17
 
@@ -12,8 +12,8 @@ module Rumale
12
12
  # puts evaluator.score(ground_truth, predicted)
13
13
  #
14
14
  # *Reference*
15
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
15
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
17
17
  class NormalizedMutualInformation
18
18
  include Base::Evaluator
19
19
 
@@ -28,8 +28,10 @@ module Rumale
28
28
  # calculate entropies.
29
29
  class_entropy = entropy(y_true)
30
30
  return 0.0 if class_entropy.zero?
31
+
31
32
  cluster_entropy = entropy(y_pred)
32
33
  return 0.0 if cluster_entropy.zero?
34
+
33
35
  # calculate mutual information.
34
36
  mi = MutualInformation.new
35
37
  mi.score(y_true, y_pred) / Math.sqrt(class_entropy * cluster_entropy)
@@ -14,6 +14,7 @@ module Rumale
14
14
  y_true.sort.to_a.uniq.map do |label|
15
15
  target_positions = y_pred.eq(label)
16
16
  next 0.0 if y_pred[target_positions].empty?
17
+
17
18
  n_true_positives = Numo::Int32.cast(y_true[target_positions].eq(y_pred[target_positions])).sum.to_f
18
19
  n_false_positives = Numo::Int32.cast(y_true[target_positions].ne(y_pred[target_positions])).sum.to_f
19
20
  n_true_positives / (n_true_positives + n_false_positives)
@@ -25,6 +26,7 @@ module Rumale
25
26
  y_true.sort.to_a.uniq.map do |label|
26
27
  target_positions = y_true.eq(label)
27
28
  next 0.0 if y_pred[target_positions].empty?
29
+
28
30
  n_true_positives = Numo::Int32.cast(y_true[target_positions].eq(y_pred[target_positions])).sum.to_f
29
31
  n_false_negatives = Numo::Int32.cast(y_true[target_positions].ne(y_pred[target_positions])).sum.to_f
30
32
  n_true_positives / (n_true_positives + n_false_negatives)
@@ -35,6 +37,7 @@ module Rumale
35
37
  def f_score_each_class(y_true, y_pred)
36
38
  precision_each_class(y_true, y_pred).zip(recall_each_class(y_true, y_pred)).map do |p, r|
37
39
  next 0.0 if p.zero? && r.zero?
40
+
38
41
  (2.0 * p * r) / (p + r)
39
42
  end
40
43
  end
@@ -44,6 +47,7 @@ module Rumale
44
47
  evaluated_values = y_true.sort.to_a.uniq.map do |label|
45
48
  target_positions = y_pred.eq(label)
46
49
  next [0.0, 0.0] if y_pred[target_positions].empty?
50
+
47
51
  n_true_positives = Numo::Int32.cast(y_true[target_positions].eq(y_pred[target_positions])).sum.to_f
48
52
  n_false_positives = Numo::Int32.cast(y_true[target_positions].ne(y_pred[target_positions])).sum.to_f
49
53
  [n_true_positives, n_true_positives + n_false_positives]
@@ -57,6 +61,7 @@ module Rumale
57
61
  evaluated_values = y_true.sort.to_a.uniq.map do |label|
58
62
  target_positions = y_true.eq(label)
59
63
  next 0.0 if y_pred[target_positions].empty?
64
+
60
65
  n_true_positives = Numo::Int32.cast(y_true[target_positions].eq(y_pred[target_positions])).sum.to_f
61
66
  n_false_negatives = Numo::Int32.cast(y_true[target_positions].ne(y_pred[target_positions])).sum.to_f
62
67
  [n_true_positives, n_true_positives + n_false_negatives]
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
14
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
15
15
  class Purity
16
16
  include Base::Evaluator
17
17
 
@@ -64,6 +64,7 @@ module Rumale
64
64
  y_score = Numo::DFloat.cast(y_score) unless y_score.is_a?(Numo::DFloat)
65
65
  raise ArgumentError, 'Expect y_true to be 1-D arrray.' unless y_true.shape[1].nil?
66
66
  raise ArgumentError, 'Expect y_score to be 1-D arrray.' unless y_score.shape[1].nil?
67
+
67
68
  labels = y_true.to_a.uniq
68
69
  if pos_label.nil?
69
70
  raise ArgumentError, 'y_true must be binary labels or pos_label must be specified if y_true is multi-label' unless labels.size == 2
@@ -96,8 +97,10 @@ module Rumale
96
97
  y = Numo::NArray.asarray(y) unless y.is_a?(Numo::NArray)
97
98
  raise ArgumentError, 'Expect x to be 1-D arrray.' unless x.shape[1].nil?
98
99
  raise ArgumentError, 'Expect y to be 1-D arrray.' unless y.shape[1].nil?
100
+
99
101
  n_samples = [x.shape[0], y.shape[0]].min
100
102
  raise ArgumentError, 'At least two points are required to calculate area under curve.' if n_samples < 2
103
+
101
104
  (0...n_samples).to_a.each_cons(2).map { |i, j| 0.5 * (x[i] - x[j]).abs * (y[i] + y[j]) }.reduce(&:+)
102
105
  end
103
106