rumale 0.14.2 → 0.14.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 7fed739280b26a4afad6081eb1bb32fef9d5292f
4
- data.tar.gz: 757d76214e895fd1dfebdf03b79be2e71b60a8e0
3
+ metadata.gz: d1df6dee93147a75173bc099cd68dd116e7729c1
4
+ data.tar.gz: da85a19ca4964ee95cf026a69f2610b0c5d2b92c
5
5
  SHA512:
6
- metadata.gz: c76042f5fbaa269884191bd6856674fcf8c89f499d68e0cd4c5e653bdb16a041a647e25d5b2592d7fd840fe55a7da45c195b58cebaa8d81ca8b957088c2e97b9
7
- data.tar.gz: a7d8220b679419e3f875e910b44a446dc571dee34e5a44296ef1a3506a65d18c96997d0f979a8ffb0d5195aabd5113ad301c9f9d60a6ea791ca02841be33669c
6
+ metadata.gz: 893ae704bf217de39ee1b4ccbb0601ffea3804252c992c5b4d79f9dc68171d6a7f0be8d6218af315cd540331bc6b91627d8c990cd53c10cf3d13e5a5123481c0
7
+ data.tar.gz: 5faf0ce1a7f38974a996534b0817557fea033bf0aea5a867f1fd2460db7153a09bc4ab7ddc233791d73be7c220be9da714830b8dcfbc20a9b366164ef48ea617
data/.rubocop.yml CHANGED
@@ -48,7 +48,7 @@ Metrics/ParameterLists:
48
48
  Security/MarshalLoad:
49
49
  Enabled: false
50
50
 
51
- Naming/UncommunicativeMethodParamName:
51
+ Naming/MethodParameterName:
52
52
  Enabled: false
53
53
 
54
54
  Naming/ConstantName:
@@ -66,6 +66,9 @@ Layout/EmptyLineAfterGuardClause:
66
66
  RSpec/MultipleExpectations:
67
67
  Enabled: false
68
68
 
69
+ RSpec/NestedGroups:
70
+ Max: 4
71
+
69
72
  RSpec/ExampleLength:
70
73
  Max: 40
71
74
 
data/CHANGELOG.md CHANGED
@@ -1,3 +1,8 @@
1
+ # 0.14.3
2
+ - Fix documents of GradientBoosting, RandomForest, and ExtraTrees.
3
+ - Refactor gaussian mixture clustering with Rubocop.
4
+ - Refactor specs.
5
+
1
6
  # 0.14.2
2
7
  - Refactor extension codes of decision tree estimators.
3
8
  - Refactor specs.
data/README.md CHANGED
@@ -15,7 +15,8 @@ Logistic Regression, Ridge, Lasso, Factorization Machine,
15
15
  Multi-layer Perceptron,
16
16
  Naive Bayes, Decision Tree, Gradient Tree Boosting, Random Forest,
17
17
  K-Means, Gaussian Mixture Model, DBSCAN, Spectral Clustering,
18
- Mutidimensional Scaling, t-SNE, Principal Component Analysis, and Non-negative Matrix Factorization.
18
+ Mutidimensional Scaling, t-SNE, Principal Component Analysis, Non-negative Matrix Factorization,
19
+ and many other algorithms.
19
20
 
20
21
  This project was formerly known as "SVMKit".
21
22
  If you are using SVMKit, please install Rumale and replace `SVMKit` constants with `Rumale`.
@@ -229,9 +229,8 @@ module Rumale
229
229
  end
230
230
 
231
231
  def check_enable_linalg(method_name)
232
- if (@params[:covariance_type] == 'full') && !enable_linalg?
233
- raise "GaussianMixture##{method_name} requires Numo::Linalg when covariance_type is 'full' but that is not loaded."
234
- end
232
+ return unless @params[:covariance_type] == 'full' && !enable_linalg?
233
+ raise "GaussianMixture##{method_name} requires Numo::Linalg when covariance_type is 'full' but that is not loaded."
235
234
  end
236
235
  end
237
236
  end
@@ -46,7 +46,7 @@ module Rumale
46
46
  # If nil is given, number of leaves is not limited.
47
47
  # @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
48
48
  # @param max_features [Integer] The number of features to consider when searching optimal split point.
49
- # If nil is given, split process considers all features.
49
+ # If nil is given, split process considers 'Math.sqrt(n_features)' features.
50
50
  # @param n_jobs [Integer] The number of jobs for running the fit method in parallel.
51
51
  # If nil is given, the method does not execute in parallel.
52
52
  # If zero or less is given, it becomes equal to the number of processors.
@@ -77,7 +77,7 @@ module Rumale
77
77
  check_sample_label_size(x, y)
78
78
  # Initialize some variables.
79
79
  n_features = x.shape[1]
80
- @params[:max_features] = Math.sqrt(n_features).to_i unless @params[:max_features].is_a?(Integer)
80
+ @params[:max_features] = Math.sqrt(n_features).to_i if @params[:max_features].nil?
81
81
  @params[:max_features] = [[1, @params[:max_features]].max, n_features].min
82
82
  @classes = Numo::Int32.asarray(y.to_a.uniq.sort)
83
83
  sub_rng = @rng.dup
@@ -42,7 +42,7 @@ module Rumale
42
42
  # If nil is given, number of leaves is not limited.
43
43
  # @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
44
44
  # @param max_features [Integer] The number of features to consider when searching optimal split point.
45
- # If nil is given, split process considers all features.
45
+ # If nil is given, split process considers 'Math.sqrt(n_features)' features.
46
46
  # @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
47
47
  # If nil is given, the methods do not execute in parallel.
48
48
  # If zero or less is given, it becomes equal to the number of processors.
@@ -73,7 +73,7 @@ module Rumale
73
73
  check_sample_tvalue_size(x, y)
74
74
  # Initialize some variables.
75
75
  n_features = x.shape[1]
76
- @params[:max_features] = Math.sqrt(n_features).to_i unless @params[:max_features].is_a?(Integer)
76
+ @params[:max_features] = Math.sqrt(n_features).to_i if @params[:max_features].nil?
77
77
  @params[:max_features] = [[1, @params[:max_features]].max, n_features].min
78
78
  sub_rng = @rng.dup
79
79
  # Construct forest.
@@ -49,6 +49,7 @@ module Rumale
49
49
  # @param n_estimators [Integer] The numeber of trees for contructing classifier.
50
50
  # @param learning_rate [Float] The boosting learining rate
51
51
  # @param reg_lambda [Float] The L2 regularization term on weight.
52
+ # @param subsample [Float] The subsampling ratio of the training samples.
52
53
  # @param max_depth [Integer] The maximum depth of the tree.
53
54
  # If nil is given, decision tree grows without concern for depth.
54
55
  # @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
@@ -44,6 +44,7 @@ module Rumale
44
44
  # @param n_estimators [Integer] The numeber of trees for contructing regressor.
45
45
  # @param learning_rate [Float] The boosting learining rate
46
46
  # @param reg_lambda [Float] The L2 regularization term on weight.
47
+ # @param subsample [Float] The subsampling ratio of the training samples.
47
48
  # @param max_depth [Integer] The maximum depth of the tree.
48
49
  # If nil is given, decision tree grows without concern for depth.
49
50
  # @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
@@ -47,7 +47,7 @@ module Rumale
47
47
  # If nil is given, number of leaves is not limited.
48
48
  # @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
49
49
  # @param max_features [Integer] The number of features to consider when searching optimal split point.
50
- # If nil is given, split process considers all features.
50
+ # If nil is given, split process considers 'Math.sqrt(n_features)' features.
51
51
  # @param n_jobs [Integer] The number of jobs for running the fit method in parallel.
52
52
  # If nil is given, the method does not execute in parallel.
53
53
  # If zero or less is given, it becomes equal to the number of processors.
@@ -91,7 +91,7 @@ module Rumale
91
91
  check_sample_label_size(x, y)
92
92
  # Initialize some variables.
93
93
  n_samples, n_features = x.shape
94
- @params[:max_features] = Math.sqrt(n_features).to_i unless @params[:max_features].is_a?(Integer)
94
+ @params[:max_features] = Math.sqrt(n_features).to_i if @params[:max_features].nil?
95
95
  @params[:max_features] = [[1, @params[:max_features]].max, n_features].min
96
96
  @classes = Numo::Int32.asarray(y.to_a.uniq.sort)
97
97
  sub_rng = @rng.dup
@@ -42,7 +42,7 @@ module Rumale
42
42
  # If nil is given, number of leaves is not limited.
43
43
  # @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
44
44
  # @param max_features [Integer] The number of features to consider when searching optimal split point.
45
- # If nil is given, split process considers all features.
45
+ # If nil is given, split process considers 'Math.sqrt(n_features)' features.
46
46
  # @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
47
47
  # If nil is given, the methods do not execute in parallel.
48
48
  # If zero or less is given, it becomes equal to the number of processors.
@@ -85,7 +85,7 @@ module Rumale
85
85
  check_sample_tvalue_size(x, y)
86
86
  # Initialize some variables.
87
87
  n_samples, n_features = x.shape
88
- @params[:max_features] = Math.sqrt(n_features).to_i unless @params[:max_features].is_a?(Integer)
88
+ @params[:max_features] = Math.sqrt(n_features).to_i if @params[:max_features].nil?
89
89
  @params[:max_features] = [[1, @params[:max_features]].max, n_features].min
90
90
  single_target = y.shape[1].nil?
91
91
  sub_rng = @rng.dup
@@ -3,5 +3,5 @@
3
3
  # Rumale is a machine learning library in Ruby.
4
4
  module Rumale
5
5
  # The version of Rumale you are using.
6
- VERSION = '0.14.2'
6
+ VERSION = '0.14.3'
7
7
  end
data/rumale.gemspec CHANGED
@@ -20,7 +20,8 @@ Gem::Specification.new do |spec|
20
20
  Multi-layer Perceptron,
21
21
  Naive Bayes, Decision Tree, Gradient Tree Boosting, Random Forest,
22
22
  K-Means, Gaussian Mixture Model, DBSCAN, Spectral Clustering,
23
- Mutidimensional Scaling, t-SNE, Principal Component Analysis, and Non-negative Matrix Factorization.
23
+ Mutidimensional Scaling, t-SNE, Principal Component Analysis, Non-negative Matrix Factorization,
24
+ and many other algorithms.
24
25
  MSG
25
26
  spec.homepage = 'https://github.com/yoshoku/rumale'
26
27
  spec.license = 'BSD-2-Clause'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rumale
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.2
4
+ version: 0.14.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-12-06 00:00:00.000000000 Z
11
+ date: 2019-12-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -130,7 +130,8 @@ description: |
130
130
  Multi-layer Perceptron,
131
131
  Naive Bayes, Decision Tree, Gradient Tree Boosting, Random Forest,
132
132
  K-Means, Gaussian Mixture Model, DBSCAN, Spectral Clustering,
133
- Mutidimensional Scaling, t-SNE, Principal Component Analysis, and Non-negative Matrix Factorization.
133
+ Mutidimensional Scaling, t-SNE, Principal Component Analysis, Non-negative Matrix Factorization,
134
+ and many other algorithms.
134
135
  email:
135
136
  - yoshoku@outlook.com
136
137
  executables: []