rumale 0.8.3 → 0.8.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 38e459906ed25e84791a9d872e9536d633d9db2b
4
- data.tar.gz: ea80195eeacbaf3ed7fccaf828e3e809baa862a7
3
+ metadata.gz: 8e895ca9462569e1ec2b3b9c1cb985aebd7d4b19
4
+ data.tar.gz: 9374969bd955cada6ba54ec6edf1d7bc174a5102
5
5
  SHA512:
6
- metadata.gz: 639d266a1045d9ee1fbf37f770bf6171ff8afc5a8183441fbaf0283af7a17dc2e6e05ee44cb601c8b9ea301d5f46d914fc77601fd60345b904aff509f07e5277
7
- data.tar.gz: 7010cbf3f11f0139a0dda334cfce4f3fd8ffe059eb776f93e7aba32f91c3f517c8fe4723975ef2036e9981131d741345bab4158ce8354431a8a8b57000dacc2f
6
+ metadata.gz: 7e9eadf3404e74ee887007a1fb1df4e2b933f10394c1a61afdd8400492afcc4fbb40479c453983d2349db5d5c7cd52ab280a9ef28aa26d91b19cb41253bdb233
7
+ data.tar.gz: 674bf164a3f1be2971fa2f882999ed34a22773d15674d6c2c24da66af888be855ffd3daa1d66a08ee7ec4bc99c87e864ffe61ebb7d6356dfae78a0a20e97c3a9
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ # 0.8.4
2
+ - Remove unused parameter on Nadam.
3
+ - Fix condition to stop growing tree about decision tree.
4
+
1
5
  # 0.8.3
2
6
  - Add optimizer class for AdaGrad.
3
7
  - Add evaluator class for ROC AUC.
data/README.md CHANGED
@@ -1,5 +1,7 @@
1
1
  # Rumale
2
2
 
3
+ ![Rumale](https://dl.dropboxusercontent.com/s/joxruk2720ur66o/rumale_header_400.png)
4
+
3
5
  [![Build Status](https://travis-ci.org/yoshoku/rumale.svg?branch=master)](https://travis-ci.org/yoshoku/rumale)
4
6
  [![Coverage Status](https://coveralls.io/repos/github/yoshoku/rumale/badge.svg?branch=master)](https://coveralls.io/github/yoshoku/rumale?branch=master)
5
7
  [![Gem Version](https://badge.fury.io/rb/rumale.svg)](https://badge.fury.io/rb/rumale)
@@ -9,7 +9,7 @@ module Rumale
9
9
  # Nadam is a class that implements Nadam optimizer.
10
10
  #
11
11
  # @example
12
- # optimizer = Rumale::Optimizer::Nadam.new(learning_rate: 0.01, momentum: 0.9, decay1: 0.9, decay2: 0.999)
12
+ # optimizer = Rumale::Optimizer::Nadam.new(learning_rate: 0.01, decay1: 0.9, decay2: 0.999)
13
13
  # estimator = Rumale::LinearModel::LinearRegression.new(optimizer: optimizer, random_seed: 1)
14
14
  # estimator.fit(samples, values)
15
15
  #
@@ -22,15 +22,13 @@ module Rumale
22
22
  # Create a new optimizer with Nadam
23
23
  #
24
24
  # @param learning_rate [Float] The initial value of learning rate.
25
- # @param momentum [Float] The initial value of momentum.
26
25
  # @param decay1 [Float] The smoothing parameter for the first moment.
27
26
  # @param decay2 [Float] The smoothing parameter for the second moment.
28
- def initialize(learning_rate: 0.01, momentum: 0.9, decay1: 0.9, decay2: 0.999)
29
- check_params_float(learning_rate: learning_rate, momentum: momentum, decay1: decay1, decay2: decay2)
30
- check_params_positive(learning_rate: learning_rate, momentum: momentum, decay1: decay1, decay2: decay2)
27
+ def initialize(learning_rate: 0.01, decay1: 0.9, decay2: 0.999)
28
+ check_params_float(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
29
+ check_params_positive(learning_rate: learning_rate, decay1: decay1, decay2: decay2)
31
30
  @params = {}
32
31
  @params[:learning_rate] = learning_rate
33
- @params[:momentum] = momentum
34
32
  @params[:decay1] = decay1
35
33
  @params[:decay2] = decay2
36
34
  @fst_moment = nil
@@ -67,21 +67,25 @@ module Rumale
67
67
  end
68
68
 
69
69
  def grow_node(depth, x, y, whole_impurity)
70
+ # intialize node.
71
+ n_samples, n_features = x.shape
72
+ node = Node.new(depth: depth, impurity: whole_impurity, n_samples: n_samples)
73
+
74
+ # terminate growing.
70
75
  unless @params[:max_leaf_nodes].nil?
71
76
  return nil if @n_leaves >= @params[:max_leaf_nodes]
72
77
  end
73
78
 
74
- n_samples, n_features = x.shape
75
- return nil if n_samples <= @params[:min_samples_leaf]
76
-
77
- node = Node.new(depth: depth, impurity: whole_impurity, n_samples: n_samples)
78
-
79
- return put_leaf(node, y) if stop_growing?(y)
79
+ return nil if n_samples < @params[:min_samples_leaf]
80
+ return put_leaf(node, y) if n_samples == @params[:min_samples_leaf]
80
81
 
81
82
  unless @params[:max_depth].nil?
82
83
  return put_leaf(node, y) if depth == @params[:max_depth]
83
84
  end
84
85
 
86
+ return put_leaf(node, y) if stop_growing?(y)
87
+
88
+ # calculate optimal parameters.
85
89
  feature_id, threshold, left_ids, right_ids, left_impurity, right_impurity, gain =
86
90
  rand_ids(n_features).map { |f_id| [f_id, *best_split(x[true, f_id], y, whole_impurity)] }.max_by(&:last)
87
91
 
@@ -111,7 +111,7 @@ module Rumale
111
111
  private
112
112
 
113
113
  def stop_growing?(y)
114
- (y - y.mean(0)).sum.abs.zero?
114
+ y.to_a.uniq.size == 1
115
115
  end
116
116
 
117
117
  def put_leaf(node, y)
@@ -3,5 +3,5 @@
3
3
  # Rumale is a machine learning library in Ruby.
4
4
  module Rumale
5
5
  # The version of Rumale you are using.
6
- VERSION = '0.8.3'
6
+ VERSION = '0.8.4'
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rumale
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.3
4
+ version: 0.8.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - yoshoku
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-04-02 00:00:00.000000000 Z
11
+ date: 2019-04-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray