rumale 0.18.5 → 0.18.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/.travis.yml +3 -3
  4. data/CHANGELOG.md +4 -0
  5. data/Gemfile +7 -0
  6. data/README.md +1 -8
  7. data/lib/rumale/clustering/dbscan.rb +1 -1
  8. data/lib/rumale/clustering/hdbscan.rb +3 -3
  9. data/lib/rumale/clustering/k_means.rb +1 -1
  10. data/lib/rumale/clustering/k_medoids.rb +1 -1
  11. data/lib/rumale/clustering/power_iteration.rb +1 -1
  12. data/lib/rumale/clustering/single_linkage.rb +1 -1
  13. data/lib/rumale/clustering/snn.rb +2 -2
  14. data/lib/rumale/clustering/spectral_clustering.rb +2 -2
  15. data/lib/rumale/decomposition/factor_analysis.rb +1 -1
  16. data/lib/rumale/decomposition/fast_ica.rb +2 -2
  17. data/lib/rumale/decomposition/nmf.rb +1 -1
  18. data/lib/rumale/decomposition/pca.rb +1 -1
  19. data/lib/rumale/ensemble/ada_boost_classifier.rb +1 -1
  20. data/lib/rumale/ensemble/ada_boost_regressor.rb +1 -2
  21. data/lib/rumale/ensemble/extra_trees_classifier.rb +1 -1
  22. data/lib/rumale/ensemble/extra_trees_regressor.rb +1 -1
  23. data/lib/rumale/ensemble/gradient_boosting_classifier.rb +4 -4
  24. data/lib/rumale/ensemble/gradient_boosting_regressor.rb +4 -4
  25. data/lib/rumale/evaluation_measure/adjusted_rand_score.rb +1 -1
  26. data/lib/rumale/evaluation_measure/calinski_harabasz_score.rb +1 -1
  27. data/lib/rumale/evaluation_measure/davies_bouldin_score.rb +1 -1
  28. data/lib/rumale/evaluation_measure/mutual_information.rb +1 -1
  29. data/lib/rumale/evaluation_measure/normalized_mutual_information.rb +2 -2
  30. data/lib/rumale/evaluation_measure/purity.rb +1 -1
  31. data/lib/rumale/evaluation_measure/silhouette_score.rb +1 -1
  32. data/lib/rumale/kernel_approximation/nystroem.rb +1 -1
  33. data/lib/rumale/kernel_approximation/rbf.rb +1 -1
  34. data/lib/rumale/kernel_machine/kernel_fda.rb +1 -1
  35. data/lib/rumale/kernel_machine/kernel_pca.rb +1 -1
  36. data/lib/rumale/kernel_machine/kernel_svc.rb +1 -1
  37. data/lib/rumale/linear_model/elastic_net.rb +3 -3
  38. data/lib/rumale/linear_model/lasso.rb +3 -3
  39. data/lib/rumale/linear_model/linear_regression.rb +1 -1
  40. data/lib/rumale/linear_model/logistic_regression.rb +3 -3
  41. data/lib/rumale/linear_model/ridge.rb +1 -1
  42. data/lib/rumale/linear_model/svc.rb +3 -3
  43. data/lib/rumale/linear_model/svr.rb +3 -3
  44. data/lib/rumale/manifold/mds.rb +1 -1
  45. data/lib/rumale/manifold/tsne.rb +2 -2
  46. data/lib/rumale/naive_bayes/bernoulli_nb.rb +1 -1
  47. data/lib/rumale/naive_bayes/multinomial_nb.rb +1 -1
  48. data/lib/rumale/nearest_neighbors/vp_tree.rb +1 -1
  49. data/lib/rumale/neural_network/adam.rb +1 -1
  50. data/lib/rumale/optimizer/ada_grad.rb +1 -1
  51. data/lib/rumale/optimizer/adam.rb +1 -1
  52. data/lib/rumale/optimizer/nadam.rb +1 -1
  53. data/lib/rumale/optimizer/rmsprop.rb +2 -2
  54. data/lib/rumale/optimizer/yellow_fin.rb +1 -1
  55. data/lib/rumale/polynomial_model/factorization_machine_classifier.rb +2 -2
  56. data/lib/rumale/polynomial_model/factorization_machine_regressor.rb +2 -2
  57. data/lib/rumale/probabilistic_output.rb +2 -2
  58. data/lib/rumale/tree/extra_tree_classifier.rb +1 -1
  59. data/lib/rumale/tree/extra_tree_regressor.rb +1 -1
  60. data/lib/rumale/tree/gradient_tree_regressor.rb +4 -5
  61. data/lib/rumale/version.rb +1 -1
  62. data/rumale.gemspec +2 -12
  63. metadata +10 -108
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 90ea96f8e47e9710da8f9e3d2c0d88b45e5ec86f349fb4dc1d9058867750c88c
4
- data.tar.gz: dde564e3cb15d6abbbb35c2629d1b9c53c79a456f29a60a5cfe7780aa06dd47b
3
+ metadata.gz: 40d5504cf4463721f53a4202ed99ec3f015c571fbadf3a4a4c7e0ac6eb00c7a7
4
+ data.tar.gz: fae3bebad1e88aa166d9279e5f5a2de4ebbad5f79fd416fef68d33d4f66ba2c6
5
5
  SHA512:
6
- metadata.gz: aaaad994a319b406686962301ad28a67e9d92b3fd15930f9792eb0e6249342d98b08a822737771f19501928036cfdf64a41dede62e9a3fe114aefa13a77f72c6
7
- data.tar.gz: f71fe52af3418d4ff74641a6f651fda783eca32450c36023de276ed41f0e326e035f3996f9433e2514569f8bcb496276bc582d0b507a5c603c90164ad388fad4
6
+ metadata.gz: b9d32bc9bd5c5f37d27b06fcaa554c28f9a209debaaac4024c1c2a1f6fb367484ce760168f62a2d9e1ee24d9372ad9cccd1d36e7280f202734e5330105a995fa
7
+ data.tar.gz: c18470cb533df4f6315324942afc98b5c52f4b7f6246078f459987a3407b79ae60a42599f40bc6236d5adba3dc85799a091e0d7ae5e9a1a3fd9fc626206cbef2
@@ -7,7 +7,7 @@ AllCops:
7
7
  DisplayCopNames: true
8
8
  DisplayStyleGuide: true
9
9
  Exclude:
10
- - 'bin/*'
10
+ - 'ext/rumale/extconf.rb'
11
11
  - 'rumale.gemspec'
12
12
  - 'Rakefile'
13
13
  - 'Gemfile'
@@ -1,6 +1,6 @@
1
- os: linux
2
- dist: xenial
1
+ ---
3
2
  language: ruby
3
+ cache: bundler
4
4
  rvm:
5
5
  - '2.4'
6
6
  - '2.5'
@@ -14,4 +14,4 @@ addons:
14
14
  - liblapacke-dev
15
15
 
16
16
  before_install:
17
- - gem install bundler -v 2.0.2
17
+ - gem install bundler -v 2.1.4
@@ -1,3 +1,7 @@
1
+ # 0.18.6
2
+ - Fix some configuration files.
3
+ - Update API documentation.
4
+
1
5
  # 0.18.5
2
6
  - Add functions for calculation of cosine similarity and distance to [Rumale::PairwiseMetric](https://yoshoku.github.io/rumale/doc/Rumale/PairwiseMetric.html).
3
7
  - Refactor some codes with Rubocop.
data/Gemfile CHANGED
@@ -2,3 +2,10 @@ source 'https://rubygems.org'
2
2
 
3
3
  # Specify your gem's dependencies in rumale.gemspec
4
4
  gemspec
5
+
6
+ gem 'coveralls', '~> 0.8'
7
+ gem 'numo-linalg', '>= 0.1.4'
8
+ gem 'parallel', '>= 1.17.0'
9
+ gem 'rake', '~> 12.0'
10
+ gem 'rake-compiler', '~> 1.0'
11
+ gem 'rspec', '~> 3.0'
data/README.md CHANGED
@@ -200,7 +200,7 @@ $ ruby pipeline.rb
200
200
  5-CV mean accuracy: 99.6 %
201
201
  ```
202
202
 
203
- ## Speeding up
203
+ ## Speed up
204
204
 
205
205
  ### Numo::Linalg
206
206
  Loading the [Numo::Linalg](https://github.com/ruby-numo/numo-linalg) allows to perform matrix product of Numo::NArray using BLAS libraries.
@@ -259,13 +259,6 @@ When -1 is given to n_jobs parameter, all processors are used.
259
259
  estimator = Rumale::Ensemble::RandomForestClassifier.new(n_jobs: -1, random_seed: 1)
260
260
  ```
261
261
 
262
-
263
- ## Development
264
-
265
- After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
266
-
267
- To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
268
-
269
262
  ## Contributing
270
263
 
271
264
  Bug reports and pull requests are welcome on GitHub at https://github.com/yoshoku/rumale.
@@ -13,7 +13,7 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - M. Ester, H-P. Kriegel, J. Sander, and X. Xu, "A density-based algorithm for discovering clusters in large spatial databases with noise," Proc. KDD' 96, pp. 266--231, 1996.
16
+ # - Ester, M., Kriegel, H-P., Sander, J., and Xu, X., "A density-based algorithm for discovering clusters in large spatial databases with noise," Proc. KDD' 96, pp. 266--231, 1996.
17
17
  class DBSCAN
18
18
  include Base::BaseEstimator
19
19
  include Base::ClusterAnalyzer
@@ -15,9 +15,9 @@ module Rumale
15
15
  # cluster_labels = analyzer.fit_predict(samples)
16
16
  #
17
17
  # *Reference*
18
- # - R J. G. B. Campello, D. Moulavi, A. Zimek, and J. Sander, "Hierarchical Density Estimates for Data Clustering, Visualization, and Outlier Detection," TKDD, Vol. 10 (1), pp. 5:1--5:51, 2015.
19
- # - R J. G. B. Campello, D. Moulavi, and J Sander, "Density-Based Clustering Based on Hierarchical Density Estimates," Proc. PAKDD'13, pp. 160--172, 2013.
20
- # - L. Lelis and J. Sander, "Semi-Supervised Density-Based Clustering," Proc. ICDM'09, pp. 842--847, 2009.
18
+ # - Campello, R J. G. B., Moulavi, D., Zimek, A., and Sander, J., "Hierarchical Density Estimates for Data Clustering, Visualization, and Outlier Detection," TKDD, Vol. 10 (1), pp. 5:1--5:51, 2015.
19
+ # - Campello, R J. G. B., Moulavi, D., and Sander, J., "Density-Based Clustering Based on Hierarchical Density Estimates," Proc. PAKDD'13, pp. 160--172, 2013.
20
+ # - Lelis, L., and Sander, J., "Semi-Supervised Density-Based Clustering," Proc. ICDM'09, pp. 842--847, 2009.
21
21
  class HDBSCAN
22
22
  include Base::BaseEstimator
23
23
  include Base::ClusterAnalyzer
@@ -15,7 +15,7 @@ module Rumale
15
15
  # cluster_labels = analyzer.fit_predict(samples)
16
16
  #
17
17
  # *Reference*
18
- # - D. Arthur and S. Vassilvitskii, "k-means++: the advantages of careful seeding," Proc. SODA'07, pp. 1027--1035, 2007.
18
+ # - Arthur, D., and Vassilvitskii, S., "k-means++: the advantages of careful seeding," Proc. SODA'07, pp. 1027--1035, 2007.
19
19
  class KMeans
20
20
  include Base::BaseEstimator
21
21
  include Base::ClusterAnalyzer
@@ -13,7 +13,7 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - D. Arthur and S. Vassilvitskii, "k-means++: the advantages of careful seeding," Proc. SODA'07, pp. 1027--1035, 2007.
16
+ # - Arthur, D., and Vassilvitskii, S., "k-means++: the advantages of careful seeding," Proc. SODA'07, pp. 1027--1035, 2007.
17
17
  class KMedoids
18
18
  include Base::BaseEstimator
19
19
  include Base::ClusterAnalyzer
@@ -13,7 +13,7 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - F. Lin and W W. Cohen, "Power Iteration Clustering," Proc. ICML'10, pp. 655--662, 2010.
16
+ # - Lin, F., and Cohen, W W., "Power Iteration Clustering," Proc. ICML'10, pp. 655--662, 2010.
17
17
  class PowerIteration
18
18
  include Base::BaseEstimator
19
19
  include Base::ClusterAnalyzer
@@ -15,7 +15,7 @@ module Rumale
15
15
  # cluster_labels = analyzer.fit_predict(samples)
16
16
  #
17
17
  # *Reference*
18
- # - D. Mullner, "Modern hierarchical, agglomerative clustering algorithms," arXiv:1109.2378, 2011.
18
+ # - Mullner, D., "Modern hierarchical, agglomerative clustering algorithms," arXiv:1109.2378, 2011.
19
19
  class SingleLinkage
20
20
  include Base::BaseEstimator
21
21
  include Base::ClusterAnalyzer
@@ -13,8 +13,8 @@ module Rumale
13
13
  # cluster_labels = analyzer.fit_predict(samples)
14
14
  #
15
15
  # *Reference*
16
- # - L. Ertoz, M. Steinbach, and V. Kumar, "Finding Clusters of Different Sizes, Shapes, and Densities in Noisy, High Dimensional Data," Proc. SDM'03, pp. 47--58, 2003.
17
- # - M E. Houle, H-P. Kriegel, P. Kroger, E. Schubert, and A. Zimek, "Can Shared-Neighbor Distances Defeat the Curse of Dimensionality?," Proc. SSDBM'10, pp. 482--500, 2010.
16
+ # - Ertoz, L., Steinbach, M., and Kumar, V., "Finding Clusters of Different Sizes, Shapes, and Densities in Noisy, High Dimensional Data," Proc. SDM'03, pp. 47--58, 2003.
17
+ # - Houle, M E., Kriegel, H-P., Kroger, P., Schubert, E., and Zimek, A., "Can Shared-Neighbor Distances Defeat the Curse of Dimensionality?," Proc. SSDBM'10, pp. 482--500, 2010.
18
18
  class SNN < DBSCAN
19
19
  # Create a new cluster analyzer with Shared Neareset Neighbor method.
20
20
  #
@@ -16,8 +16,8 @@ module Rumale
16
16
  # cluster_labels = analyzer.fit_predict(samples)
17
17
  #
18
18
  # *Reference*
19
- # - A Y. Ng, M I. Jordan, and Y. Weiss, "On Spectral Clustering: Analyssi and an algorithm," Proc. NIPS'01, pp. 849--856, 2001.
20
- # - U von Luxburg, "A tutorial on spectral clustering," Statistics and Computing, Vol. 17 (4), pp. 395--416, 2007.
19
+ # - Ng, A Y., Jordan, M I., and Weiss, Y., "On Spectral Clustering: Analyssi and an algorithm," Proc. NIPS'01, pp. 849--856, 2001.
20
+ # - von Luxburg, U., "A tutorial on spectral clustering," Statistics and Computing, Vol. 17 (4), pp. 395--416, 2007.
21
21
  class SpectralClustering
22
22
  include Base::BaseEstimator
23
23
  include Base::ClusterAnalyzer
@@ -14,7 +14,7 @@ module Rumale
14
14
  # representaion = decomposer.fit_transform(samples)
15
15
  #
16
16
  # *Reference*
17
- # - D. Barber, "Bayesian Reasoning and Machine Learning," Cambridge University Press, 2012.
17
+ # - Barber, D., "Bayesian Reasoning and Machine Learning," Cambridge University Press, 2012.
18
18
  class FactorAnalysis
19
19
  include Base::BaseEstimator
20
20
  include Base::Transformer
@@ -14,8 +14,8 @@ module Rumale
14
14
  # source_data = transformer.fit_transform(observed_data)
15
15
  #
16
16
  # *Reference*
17
- # - A. Hyvarinen "Fast and Robust Fixed-Point Algorithms for Independent Component Analysis," IEEE Trans. Neural Networks, Vol. 10 (3), pp. 626--634, 1999.
18
- # - A. Hyvarinen and E. Oja, "Independent Component Analysis: Algorithms and Applications," Neural Networks, Vol. 13 (4-5), pp. 411--430, 2000.
17
+ # - Hyvarinen, A., "Fast and Robust Fixed-Point Algorithms for Independent Component Analysis," IEEE Trans. Neural Networks, Vol. 10 (3), pp. 626--634, 1999.
18
+ # - Hyvarinen, A., and Oja, E., "Independent Component Analysis: Algorithms and Applications," Neural Networks, Vol. 13 (4-5), pp. 411--430, 2000.
19
19
  class FastICA
20
20
  include Base::BaseEstimator
21
21
  include Base::Transformer
@@ -13,7 +13,7 @@ module Rumale
13
13
  # representaion = decomposer.fit_transform(samples)
14
14
  #
15
15
  # *Reference*
16
- # - W. Xu, X. Liu, and Y.Gong, "Document Clustering Based On Non-negative Matrix Factorization," Proc. SIGIR' 03 , pp. 267--273, 2003.
16
+ # - Xu, W., Liu, X., and Gong, Y., "Document Clustering Based On Non-negative Matrix Factorization," Proc. SIGIR' 03 , pp. 267--273, 2003.
17
17
  class NMF
18
18
  include Base::BaseEstimator
19
19
  include Base::Transformer
@@ -18,7 +18,7 @@ module Rumale
18
18
  # representaion = decomposer.fit_transform(samples)
19
19
  #
20
20
  # *Reference*
21
- # - A. Sharma and K K. Paliwal, "Fast principal component analysis using fixed-point algorithm," Pattern Recognition Letters, 28, pp. 1151--1155, 2007.
21
+ # - Sharma, A., and Paliwal, K K., "Fast principal component analysis using fixed-point algorithm," Pattern Recognition Letters, 28, pp. 1151--1155, 2007.
22
22
  class PCA
23
23
  include Base::BaseEstimator
24
24
  include Base::Transformer
@@ -19,7 +19,7 @@ module Rumale
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
21
  # *Reference*
22
- # - J. Zhu, S. Rosset, H. Zou, and T.Hashie, "Multi-class AdaBoost," Technical Report No. 430, Department of Statistics, University of Michigan, 2005.
22
+ # - Zhu, J., Rosset, S., Zou, H., and Hashie, T., "Multi-class AdaBoost," Technical Report No. 430, Department of Statistics, University of Michigan, 2005.
23
23
  class AdaBoostClassifier
24
24
  include Base::BaseEstimator
25
25
  include Base::Classifier
@@ -18,8 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - D. L. Shrestha and D. P. Solomatine, "Experiments with AdaBoost.RT, an Improved Boosting Scheme for Regression," Neural Computation 18 (7), pp. 1678--1710, 2006.
22
- #
21
+ # - Shrestha, D. L., and Solomatine, D. P., "Experiments with AdaBoost.RT, an Improved Boosting Scheme for Regression," Neural Computation 18 (7), pp. 1678--1710, 2006.
23
22
  class AdaBoostRegressor
24
23
  include Base::BaseEstimator
25
24
  include Base::Regressor
@@ -18,7 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
21
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
22
22
  class ExtraTreesClassifier < RandomForestClassifier
23
23
  # Return the set of estimators.
24
24
  # @return [Array<ExtraTreeClassifier>]
@@ -18,7 +18,7 @@ module Rumale
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
20
  # *Reference*
21
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
21
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
22
22
  class ExtraTreesRegressor < RandomForestRegressor
23
23
  # Return the set of estimators.
24
24
  # @return [Array<ExtraTreeRegressor>]
@@ -18,10 +18,10 @@ module Rumale
18
18
  # estimator.fit(training_samples, traininig_values)
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
- # *reference*
22
- # - J H. Friedman, "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
23
- # - J H. Friedman, "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
24
- # - T. Chen and C. Guestrin, "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
21
+ # *Reference*
22
+ # - Friedman, J H., "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
23
+ # - Friedman, J H., "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
24
+ # - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
25
25
  #
26
26
  class GradientBoostingClassifier
27
27
  include Base::BaseEstimator
@@ -17,10 +17,10 @@ module Rumale
17
17
  # estimator.fit(training_samples, traininig_values)
18
18
  # results = estimator.predict(testing_samples)
19
19
  #
20
- # *reference*
21
- # - J H. Friedman, "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
22
- # - J H. Friedman, "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
23
- # - T. Chen and C. Guestrin, "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
20
+ # *Reference*
21
+ # - Friedman, J H. "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
22
+ # - Friedman, J H. "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
23
+ # - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
24
24
  #
25
25
  class GradientBoostingRegressor
26
26
  include Base::BaseEstimator
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance", J. Machine Learnig Research, Vol. 11, pp.2837--2854, 2010.
14
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance", J. Machine Learnig Research, Vol. 11, pp.2837--2854, 2010.
15
15
  class AdjustedRandScore
16
16
  include Base::Evaluator
17
17
 
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(x, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - T. Calinski and J. Harabsz, "A dendrite method for cluster analysis," Communication in Statistics, Vol. 3 (1), pp. 1--27, 1972.
14
+ # - Calinski, T., and Harabsz, J., "A dendrite method for cluster analysis," Communication in Statistics, Vol. 3 (1), pp. 1--27, 1972.
15
15
  class CalinskiHarabaszScore
16
16
  include Base::Evaluator
17
17
 
@@ -12,7 +12,7 @@ module Rumale
12
12
  # puts evaluator.score(x, predicted)
13
13
  #
14
14
  # *Reference*
15
- # - D L. Davies and D W. Bouldin, "A Cluster Separation Measure," IEEE Trans. Pattern Analysis and Machine Intelligence, Vol. PAMI-1, No. 2, pp. 224--227, 1979.
15
+ # - Davies, D L., and Bouldin, D W., "A Cluster Separation Measure," IEEE Trans. Pattern Analysis and Machine Intelligence, Vol. PAMI-1, No. 2, pp. 224--227, 1979.
16
16
  class DaviesBouldinScore
17
17
  include Base::Evaluator
18
18
 
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
14
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
15
15
  class MutualInformation
16
16
  include Base::Evaluator
17
17
 
@@ -12,8 +12,8 @@ module Rumale
12
12
  # puts evaluator.score(ground_truth, predicted)
13
13
  #
14
14
  # *Reference*
15
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
- # - N X. Vinh, J. Epps, and J. Bailey, "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
15
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
+ # - Vinh, N X., Epps, J., and Bailey, J., "Information Theoretic Measures for Clusterings Comparison: Variants, Properties, Normalization and Correction for Chance," J. Machine Learning Research, vol. 11, pp. 2837--1854, 2010.
17
17
  class NormalizedMutualInformation
18
18
  include Base::Evaluator
19
19
 
@@ -11,7 +11,7 @@ module Rumale
11
11
  # puts evaluator.score(ground_truth, predicted)
12
12
  #
13
13
  # *Reference*
14
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
14
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
15
15
  class Purity
16
16
  include Base::Evaluator
17
17
 
@@ -12,7 +12,7 @@ module Rumale
12
12
  # puts evaluator.score(x, predicted)
13
13
  #
14
14
  # *Reference*
15
- # - P J. Rousseuw, "Silhouettes: A graphical aid to the interpretation and validation of cluster analysis," Journal of Computational and Applied Mathematics, Vol. 20, pp. 53--65, 1987.
15
+ # - Rousseuw, P J., "Silhouettes: A graphical aid to the interpretation and validation of cluster analysis," Journal of Computational and Applied Mathematics, Vol. 20, pp. 53--65, 1987.
16
16
  class SilhouetteScore
17
17
  include Base::Evaluator
18
18
 
@@ -16,7 +16,7 @@ module Rumale
16
16
  # new_testing_samples = transformer.transform(testing_samples)
17
17
  #
18
18
  # *Reference*
19
- # 1. T. Yang, Y. Li, M. Mahdavi, R. Jin, and Z-H. Zhou, "Nystrom Method vs Random Fourier Features: A Theoretical and Empirical Comparison," Advances in NIPS'12, Vol. 1, pp. 476--484, 2012.
19
+ # - Yang, T., Li, Y., Mahdavi, M., Jin, R., and Zhou, Z-H., "Nystrom Method vs Random Fourier Features: A Theoretical and Empirical Comparison," Advances in NIPS'12, Vol. 1, pp. 476--484, 2012.
20
20
  class Nystroem
21
21
  include Base::BaseEstimator
22
22
  include Base::Transformer
@@ -15,7 +15,7 @@ module Rumale
15
15
  # new_testing_samples = transformer.transform(testing_samples)
16
16
  #
17
17
  # *Refernce*:
18
- # 1. A. Rahimi and B. Recht, "Random Features for Large-Scale Kernel Machines," Proc. NIPS'07, pp.1177--1184, 2007.
18
+ # - Rahimi, A., and Recht, B., "Random Features for Large-Scale Kernel Machines," Proc. NIPS'07, pp.1177--1184, 2007.
19
19
  class RBF
20
20
  include Base::BaseEstimator
21
21
  include Base::Transformer
@@ -18,7 +18,7 @@ module Rumale
18
18
  # mapped_test_samples = kfda.transform(kernel_mat_test)
19
19
  #
20
20
  # *Reference*
21
- # - Baudat, G. and Anouar, F., "Generalized Discriminant Analysis using a Kernel Approach," Neural Computation, vol. 12, pp. 2385--2404, 2000.
21
+ # - Baudat, G., and Anouar, F., "Generalized Discriminant Analysis using a Kernel Approach," Neural Computation, vol. 12, pp. 2385--2404, 2000.
22
22
  class KernelFDA
23
23
  include Base::BaseEstimator
24
24
  include Base::Transformer
@@ -18,7 +18,7 @@ module Rumale
18
18
  # mapped_test_samples = kpca.transform(kernel_mat_test)
19
19
  #
20
20
  # *Reference*
21
- # - B. Scholkopf, A. Smola, and K-R. Muller, "Nonlinear Component Analysis as a Kernel Eigenvalue Problem," Neural Computation, Vol. 10 (5), pp. 1299--1319, 1998.
21
+ # - Scholkopf, B., Smola, A., and Muller, K-R., "Nonlinear Component Analysis as a Kernel Eigenvalue Problem," Neural Computation, Vol. 10 (5), pp. 1299--1319, 1998.
22
22
  class KernelPCA
23
23
  include Base::BaseEstimator
24
24
  include Base::Transformer
@@ -24,7 +24,7 @@ module Rumale
24
24
  # results = estimator.predict(testing_kernel_matrix)
25
25
  #
26
26
  # *Reference*
27
- # 1. S. Shalev-Shwartz, Y. Singer, N. Srebro, and A. Cotter, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Mathematical Programming, vol. 127 (1), pp. 3--30, 2011.
27
+ # - Shalev-Shwartz, S., Singer, Y., Srebro, N., and Cotter, A., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Mathematical Programming, vol. 127 (1), pp. 3--30, 2011.
28
28
  class KernelSVC
29
29
  include Base::BaseEstimator
30
30
  include Base::Classifier
@@ -15,9 +15,9 @@ module Rumale
15
15
  # results = estimator.predict(testing_samples)
16
16
  #
17
17
  # *Reference*
18
- # - S. Shalev-Shwartz and Y. Singer, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
19
- # - Y. Tsuruoka, J. Tsujii, and S. Ananiadou, "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
20
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
18
+ # - Shalev-Shwartz, S., and Singer, Y., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
19
+ # - Tsuruoka, Y., Tsujii, J., and Ananiadou, S., "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
20
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
21
21
  class ElasticNet < BaseSGD
22
22
  include Base::Regressor
23
23
 
@@ -15,9 +15,9 @@ module Rumale
15
15
  # results = estimator.predict(testing_samples)
16
16
  #
17
17
  # *Reference*
18
- # - S. Shalev-Shwartz and Y. Singer, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
19
- # - Y. Tsuruoka, J. Tsujii, and S. Ananiadou, "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
20
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
18
+ # - Shalev-Shwartz, S., and Singer, Y., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
19
+ # - Tsuruoka, Y., Tsujii, J., and Ananiadou, S., "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
20
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
21
21
  class Lasso < BaseSGD
22
22
  include Base::Regressor
23
23
 
@@ -21,7 +21,7 @@ module Rumale
21
21
  # results = estimator.predict(testing_samples)
22
22
  #
23
23
  # *Reference*
24
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
24
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
25
25
  class LinearRegression < BaseSGD
26
26
  include Base::Regressor
27
27
 
@@ -20,9 +20,9 @@ module Rumale
20
20
  # results = estimator.predict(testing_samples)
21
21
  #
22
22
  # *Reference*
23
- # - S. Shalev-Shwartz, Y. Singer, N. Srebro, and A. Cotter, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Mathematical Programming, vol. 127 (1), pp. 3--30, 2011.
24
- # - Y. Tsuruoka, J. Tsujii, and S. Ananiadou, "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
25
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
23
+ # - Shalev-Shwartz, S., Singer, Y., Srebro, N., and Cotter, A., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Mathematical Programming, vol. 127 (1), pp. 3--30, 2011.
24
+ # - Tsuruoka, Y., Tsujii, J., and Ananiadou, S., "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
25
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
26
26
  class LogisticRegression < BaseSGD
27
27
  include Base::Classifier
28
28
 
@@ -21,7 +21,7 @@ module Rumale
21
21
  # results = estimator.predict(testing_samples)
22
22
  #
23
23
  # *Reference*
24
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
24
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
25
25
  class Ridge < BaseSGD
26
26
  include Base::Regressor
27
27
 
@@ -22,9 +22,9 @@ module Rumale
22
22
  # results = estimator.predict(testing_samples)
23
23
  #
24
24
  # *Reference*
25
- # - S. Shalev-Shwartz and Y. Singer, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
26
- # - Y. Tsuruoka, J. Tsujii, and S. Ananiadou, "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
27
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
25
+ # - Shalev-Shwartz, S., and Singer, Y., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
26
+ # - Tsuruoka, Y., Tsujii, J., and Ananiadou, S., "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
27
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
28
28
  class SVC < BaseSGD
29
29
  include Base::Classifier
30
30
 
@@ -19,9 +19,9 @@ module Rumale
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
21
  # *Reference*
22
- # - S. Shalev-Shwartz and Y. Singer, "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
23
- # - Y. Tsuruoka, J. Tsujii, and S. Ananiadou, "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
24
- # - L. Bottou, "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
22
+ # - Shalev-Shwartz, S., and Singer, Y., "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM," Proc. ICML'07, pp. 807--814, 2007.
23
+ # - Tsuruoka, Y., Tsujii, J., and Ananiadou, S., "Stochastic Gradient Descent Training for L1-regularized Log-linear Models with Cumulative Penalty," Proc. ACL'09, pp. 477--485, 2009.
24
+ # - Bottou, L., "Large-Scale Machine Learning with Stochastic Gradient Descent," Proc. COMPSTAT'10, pp. 177--186, 2010.
25
25
  class SVR < BaseSGD
26
26
  include Base::Regressor
27
27
 
@@ -16,7 +16,7 @@ module Rumale
16
16
  # representations = mds.fit_transform(samples)
17
17
  #
18
18
  # *Reference*
19
- # - P J. F. Groenen and M. van de Velden, "Multidimensional Scaling by Majorization: A Review," J. of Statistical Software, Vol. 73 (8), 2016.
19
+ # - Groenen, P J. F. and van de Velden, M., "Multidimensional Scaling by Majorization: A Review," J. of Statistical Software, Vol. 73 (8), 2016.
20
20
  class MDS
21
21
  include Base::BaseEstimator
22
22
  include Base::Transformer
@@ -19,8 +19,8 @@ module Rumale
19
19
  # representations = tsne.fit_transform(samples)
20
20
  #
21
21
  # *Reference*
22
- # - L. van der Maaten and G. Hinton, "Visualizing data using t-SNE," J. of Machine Learning Research, vol. 9, pp. 2579--2605, 2008.
23
- # - Z. Yang, I. King, Z. Xu, and E. Oja, "Heavy-Tailed Symmetric Stochastic Neighbor Embedding," Proc. NIPS'09, pp. 2169--2177, 2009.
22
+ # - van der Maaten, L., and Hinton, G., "Visualizing data using t-SNE," J. of Machine Learning Research, vol. 9, pp. 2579--2605, 2008.
23
+ # - Yang, Z., King, I., Xu, Z., and Oja, E., "Heavy-Tailed Symmetric Stochastic Neighbor Embedding," Proc. NIPS'09, pp. 2169--2177, 2009.
24
24
  class TSNE
25
25
  include Base::BaseEstimator
26
26
  include Base::Transformer
@@ -12,7 +12,7 @@ module Rumale
12
12
  # results = estimator.predict(testing_samples)
13
13
  #
14
14
  # *Reference*
15
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
15
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
16
  class BernoulliNB < BaseNaiveBayes
17
17
  # Return the class labels.
18
18
  # @return [Numo::Int32] (size: n_classes)
@@ -12,7 +12,7 @@ module Rumale
12
12
  # results = estimator.predict(testing_samples)
13
13
  #
14
14
  # *Reference*
15
- # - C D. Manning, P. Raghavan, and H. Schutze, "Introduction to Information Retrieval," Cambridge University Press., 2008.
15
+ # - Manning, C D., Raghavan, P., and Schutze, H., "Introduction to Information Retrieval," Cambridge University Press., 2008.
16
16
  class MultinomialNB < BaseNaiveBayes
17
17
  # Return the class labels.
18
18
  # @return [Numo::Int32] (size: n_classes)
@@ -11,7 +11,7 @@ module Rumale
11
11
  # This class is used internally for k-nearest neighbor estimators.
12
12
  #
13
13
  # *Reference*
14
- # P N. Yianilos, "Data Structures and Algorithms for Nearest Neighbor Search in General Metric Spaces," Proc. SODA'93, pp. 311--321, 1993.
14
+ # - Yianilos, P N., "Data Structures and Algorithms for Nearest Neighbor Search in General Metric Spaces," Proc. SODA'93, pp. 311--321, 1993.
15
15
  class VPTree
16
16
  include Validation
17
17
  include Base::BaseEstimator
@@ -11,7 +11,7 @@ module Rumale
11
11
  # Adam is a class that implements Adam optimizer.
12
12
  #
13
13
  # *Reference*
14
- # - D P. Kingma and J. Ba, "Adam: A Method for Stochastic Optimization," Proc. ICLR'15, 2015.
14
+ # - Kingma, D P., and Ba, J., "Adam: A Method for Stochastic Optimization," Proc. ICLR'15, 2015.
15
15
  class Adam
16
16
  include Base::BaseEstimator
17
17
 
@@ -8,7 +8,7 @@ module Rumale
8
8
  # AdaGrad is a class that implements AdaGrad optimizer.
9
9
  #
10
10
  # *Reference*
11
- # - J. Duchi, E Hazan, and Y. Singer, "Adaptive Subgradient Methods for Online Learning and Stochastic Optimization," J. Machine Learning Research, vol. 12, pp. 2121--2159, 2011.
11
+ # - Duchi, J., Hazan, E., and Singer, Y., "Adaptive Subgradient Methods for Online Learning and Stochastic Optimization," J. Machine Learning Research, vol. 12, pp. 2121--2159, 2011.
12
12
  class AdaGrad
13
13
  include Base::BaseEstimator
14
14
  include Validation
@@ -8,7 +8,7 @@ module Rumale
8
8
  # Adam is a class that implements Adam optimizer.
9
9
  #
10
10
  # *Reference*
11
- # - D P. Kingma and J. Ba, "Adam: A Method for Stochastic Optimization," Proc. ICLR'15, 2015.
11
+ # - Kingma, D P., and Ba, J., "Adam: A Method for Stochastic Optimization," Proc. ICLR'15, 2015.
12
12
  class Adam
13
13
  include Base::BaseEstimator
14
14
  include Validation
@@ -9,7 +9,7 @@ module Rumale
9
9
  # Nadam is a class that implements Nadam optimizer.
10
10
  #
11
11
  # *Reference*
12
- # - T. Dozat, "Incorporating Nesterov Momentum into Adam," Tech. Repo. Stanford University, 2015.
12
+ # - Dozat, T., "Incorporating Nesterov Momentum into Adam," Tech. Repo. Stanford University, 2015.
13
13
  class Nadam
14
14
  include Base::BaseEstimator
15
15
  include Validation
@@ -8,8 +8,8 @@ module Rumale
8
8
  # RMSProp is a class that implements RMSProp optimizer.
9
9
  #
10
10
  # *Reference*
11
- # - I. Sutskever, J. Martens, G. Dahl, and G. Hinton, "On the importance of initialization and momentum in deep learning," Proc. ICML' 13, pp. 1139--1147, 2013.
12
- # - G. Hinton, N. Srivastava, and K. Swersky, "Lecture 6e rmsprop," Neural Networks for Machine Learning, 2012.
11
+ # - Sutskever, I., Martens, J., Dahl, G., and Hinton, G., "On the importance of initialization and momentum in deep learning," Proc. ICML' 13, pp. 1139--1147, 2013.
12
+ # - Hinton, G., Srivastava, N., and Swersky, K., "Lecture 6e rmsprop," Neural Networks for Machine Learning, 2012.
13
13
  class RMSProp
14
14
  include Base::BaseEstimator
15
15
  include Validation
@@ -8,7 +8,7 @@ module Rumale
8
8
  # YellowFin is a class that implements YellowFin optimizer.
9
9
  #
10
10
  # *Reference*
11
- # - J. Zhang and I. Mitliagkas, "YellowFin and the Art of Momentum Tuning," CoRR abs/1706.03471, 2017.
11
+ # - Zhang, J., and Mitliagkas, I., "YellowFin and the Art of Momentum Tuning," CoRR abs/1706.03471, 2017.
12
12
  class YellowFin
13
13
  include Base::BaseEstimator
14
14
  include Validation
@@ -19,8 +19,8 @@ module Rumale
19
19
  # results = estimator.predict(testing_samples)
20
20
  #
21
21
  # *Reference*
22
- # - S. Rendle, "Factorization Machines with libFM," ACM TIST, vol. 3 (3), pp. 57:1--57:22, 2012.
23
- # - S. Rendle, "Factorization Machines," Proc. ICDM'10, pp. 995--1000, 2010.
22
+ # - Rendle, S., "Factorization Machines with libFM," ACM TIST, vol. 3 (3), pp. 57:1--57:22, 2012.
23
+ # - Rendle, S., "Factorization Machines," Proc. ICDM'10, pp. 995--1000, 2010.
24
24
  class FactorizationMachineClassifier < BaseFactorizationMachine
25
25
  include Base::Classifier
26
26
 
@@ -17,8 +17,8 @@ module Rumale
17
17
  # results = estimator.predict(testing_samples)
18
18
  #
19
19
  # *Reference*
20
- # - S. Rendle, "Factorization Machines with libFM," ACM TIST, vol. 3 (3), pp. 57:1--57:22, 2012.
21
- # - S. Rendle, "Factorization Machines," Proc. ICDM'10, pp. 995--1000, 2010.
20
+ # - Rendle, S., "Factorization Machines with libFM," ACM TIST, vol. 3 (3), pp. 57:1--57:22, 2012.
21
+ # - Rendle, S., "Factorization Machines," Proc. ICDM'10, pp. 995--1000, 2010.
22
22
  class FactorizationMachineRegressor < BaseFactorizationMachine
23
23
  include Base::Regressor
24
24
 
@@ -12,8 +12,8 @@ module Rumale
12
12
  # probs = 1 / (Numo::NMath.exp(params[0] * df + params[1]) + 1)
13
13
  #
14
14
  # *Reference*
15
- # 1. J C. Platt, "Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods," Adv. Large Margin Classifiers, pp. 61--74, 2000.
16
- # 1. H-T Lin, C-J Lin, and R C.Weng, "A Note on Platt's Probabilistic Outputs for Support Vector Machines," J. Machine Learning, Vol. 63 (3), pp. 267--276, 2007.
15
+ # - Platt, J C., "Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods," Adv. Large Margin Classifiers, pp. 61--74, 2000.
16
+ # - Lin, H-T., Lin, C-J., and Weng, R C., "A Note on Platt's Probabilistic Outputs for Support Vector Machines," J. Machine Learning, Vol. 63 (3), pp. 267--276, 2007.
17
17
  module ProbabilisticOutput
18
18
  class << self
19
19
  # Fit the probabilistic model for binary SVM outputs.
@@ -14,7 +14,7 @@ module Rumale
14
14
  # results = estimator.predict(testing_samples)
15
15
  #
16
16
  # *Reference*
17
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
17
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
18
18
  class ExtraTreeClassifier < DecisionTreeClassifier
19
19
  # Return the class labels.
20
20
  # @return [Numo::Int32] (size: n_classes)
@@ -14,7 +14,7 @@ module Rumale
14
14
  # results = estimator.predict(testing_samples)
15
15
  #
16
16
  # *Reference*
17
- # - P. Geurts, D. Ernst, and L. Wehenkel, "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
17
+ # - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
18
18
  class ExtraTreeRegressor < DecisionTreeRegressor
19
19
  # Return the importance for each feature.
20
20
  # @return [Numo::DFloat] (size: n_features)
@@ -10,11 +10,10 @@ module Rumale
10
10
  # GradientTreeRegressor is a class that implements decision tree for regression with exact gredy algorithm.
11
11
  # This class is used internally for estimators with gradient tree boosting.
12
12
  #
13
- # *reference*
14
- # - J H. Friedman, "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
15
- # - J H. Friedman, "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
16
- # - T. Chen and C. Guestrin, "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
17
- #
13
+ # *Reference*
14
+ # - Friedman, J H., "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
15
+ # - Friedman, J H., "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
16
+ # - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
18
17
  class GradientTreeRegressor
19
18
  include Base::BaseEstimator
20
19
  include Base::Regressor
@@ -3,5 +3,5 @@
3
3
  # Rumale is a machine learning library in Ruby.
4
4
  module Rumale
5
5
  # The version of Rumale you are using.
6
- VERSION = '0.18.5'
6
+ VERSION = '0.18.6'
7
7
  end
@@ -44,17 +44,7 @@ Gem::Specification.new do |spec|
44
44
  'bug_tracker_uri' => 'https://github.com/yoshoku/rumale/issues'
45
45
  }
46
46
 
47
- spec.required_ruby_version = '>= 2.3'
48
-
49
47
  spec.add_runtime_dependency 'numo-narray', '>= 0.9.1'
50
- spec.add_runtime_dependency 'mopti', '~> 0.1'
51
- spec.add_runtime_dependency 'mmh3', '~> 0.1'
52
-
53
- spec.add_development_dependency 'bundler', '~> 2.0'
54
- spec.add_development_dependency 'coveralls', '~> 0.8'
55
- spec.add_development_dependency 'numo-linalg', '>= 0.1.4'
56
- spec.add_development_dependency 'parallel', '>= 1.17.0'
57
- spec.add_development_dependency 'rake', '~> 12.0'
58
- spec.add_development_dependency 'rake-compiler', '~> 1.0'
59
- spec.add_development_dependency 'rspec', '~> 3.0'
48
+ spec.add_runtime_dependency 'mopti', '>= 0.1.0'
49
+ spec.add_runtime_dependency 'mmh3', '>= 0.1.0'
60
50
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rumale
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.18.5
4
+ version: 0.18.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2020-04-18 00:00:00.000000000 Z
11
+ date: 2020-05-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -26,130 +26,32 @@ dependencies:
26
26
  version: 0.9.1
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: mopti
29
- requirement: !ruby/object:Gem::Requirement
30
- requirements:
31
- - - "~>"
32
- - !ruby/object:Gem::Version
33
- version: '0.1'
34
- type: :runtime
35
- prerelease: false
36
- version_requirements: !ruby/object:Gem::Requirement
37
- requirements:
38
- - - "~>"
39
- - !ruby/object:Gem::Version
40
- version: '0.1'
41
- - !ruby/object:Gem::Dependency
42
- name: mmh3
43
- requirement: !ruby/object:Gem::Requirement
44
- requirements:
45
- - - "~>"
46
- - !ruby/object:Gem::Version
47
- version: '0.1'
48
- type: :runtime
49
- prerelease: false
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - "~>"
53
- - !ruby/object:Gem::Version
54
- version: '0.1'
55
- - !ruby/object:Gem::Dependency
56
- name: bundler
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - "~>"
60
- - !ruby/object:Gem::Version
61
- version: '2.0'
62
- type: :development
63
- prerelease: false
64
- version_requirements: !ruby/object:Gem::Requirement
65
- requirements:
66
- - - "~>"
67
- - !ruby/object:Gem::Version
68
- version: '2.0'
69
- - !ruby/object:Gem::Dependency
70
- name: coveralls
71
- requirement: !ruby/object:Gem::Requirement
72
- requirements:
73
- - - "~>"
74
- - !ruby/object:Gem::Version
75
- version: '0.8'
76
- type: :development
77
- prerelease: false
78
- version_requirements: !ruby/object:Gem::Requirement
79
- requirements:
80
- - - "~>"
81
- - !ruby/object:Gem::Version
82
- version: '0.8'
83
- - !ruby/object:Gem::Dependency
84
- name: numo-linalg
85
29
  requirement: !ruby/object:Gem::Requirement
86
30
  requirements:
87
31
  - - ">="
88
32
  - !ruby/object:Gem::Version
89
- version: 0.1.4
90
- type: :development
33
+ version: 0.1.0
34
+ type: :runtime
91
35
  prerelease: false
92
36
  version_requirements: !ruby/object:Gem::Requirement
93
37
  requirements:
94
38
  - - ">="
95
39
  - !ruby/object:Gem::Version
96
- version: 0.1.4
40
+ version: 0.1.0
97
41
  - !ruby/object:Gem::Dependency
98
- name: parallel
42
+ name: mmh3
99
43
  requirement: !ruby/object:Gem::Requirement
100
44
  requirements:
101
45
  - - ">="
102
46
  - !ruby/object:Gem::Version
103
- version: 1.17.0
104
- type: :development
47
+ version: 0.1.0
48
+ type: :runtime
105
49
  prerelease: false
106
50
  version_requirements: !ruby/object:Gem::Requirement
107
51
  requirements:
108
52
  - - ">="
109
53
  - !ruby/object:Gem::Version
110
- version: 1.17.0
111
- - !ruby/object:Gem::Dependency
112
- name: rake
113
- requirement: !ruby/object:Gem::Requirement
114
- requirements:
115
- - - "~>"
116
- - !ruby/object:Gem::Version
117
- version: '12.0'
118
- type: :development
119
- prerelease: false
120
- version_requirements: !ruby/object:Gem::Requirement
121
- requirements:
122
- - - "~>"
123
- - !ruby/object:Gem::Version
124
- version: '12.0'
125
- - !ruby/object:Gem::Dependency
126
- name: rake-compiler
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - "~>"
130
- - !ruby/object:Gem::Version
131
- version: '1.0'
132
- type: :development
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - "~>"
137
- - !ruby/object:Gem::Version
138
- version: '1.0'
139
- - !ruby/object:Gem::Dependency
140
- name: rspec
141
- requirement: !ruby/object:Gem::Requirement
142
- requirements:
143
- - - "~>"
144
- - !ruby/object:Gem::Version
145
- version: '3.0'
146
- type: :development
147
- prerelease: false
148
- version_requirements: !ruby/object:Gem::Requirement
149
- requirements:
150
- - - "~>"
151
- - !ruby/object:Gem::Version
152
- version: '3.0'
54
+ version: 0.1.0
153
55
  description: |
154
56
  Rumale is a machine learning library in Ruby.
155
57
  Rumale provides machine learning algorithms with interfaces similar to Scikit-Learn in Python.
@@ -330,7 +232,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
330
232
  requirements:
331
233
  - - ">="
332
234
  - !ruby/object:Gem::Version
333
- version: '2.3'
235
+ version: '0'
334
236
  required_rubygems_version: !ruby/object:Gem::Requirement
335
237
  requirements:
336
238
  - - ">="