ruby-dnn 0.9.2 → 0.9.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc2897efaefa857bc21c4a4237c4c8d6fac3ec508708ba0935874050f8dab7f9
4
- data.tar.gz: 2eaea58d620043e47c197f1ace3ec94f8108d84dbe3883a0455b64f39c8d6f82
3
+ metadata.gz: 2926b7a9ca6f3766ee46bba676837bd4cb98d55220ba37e4ef12109ef244a444
4
+ data.tar.gz: 57db99b2200d3d0946c6bb886f97308f6b09e550f5ab66802f61a7d13bd9b8fe
5
5
  SHA512:
6
- metadata.gz: e74b37d9d31af87cc833b4237b2f90051199a2020d645b733e5af2eb6f07b26b87debaac651818a95fca27ffeedb3c08a62c55191e3213b6b7a8c85762ba91fd
7
- data.tar.gz: b4b25e79cd9ac57e464d70248178048473e4e6a26bcae34da9b5b63f109c336be5f3ccec700a23dc9dce427b7014be81b41ba547fa59ad3f5d9ebb5fa5d881c6
6
+ metadata.gz: 19c77e661bb6b208c279ce80d85a115572e124bad50dd97f3c8ec1097aa1bf4f245cef11bee58c3ffbc5c8c3a47f4aa6e9e3bb89db0ba8c1c7ff6ef2066764b3
7
+ data.tar.gz: 9a8d414fcb3401b883bde2cd54f6b320817ee66389e43971f62a4d9f85e1888db36842d704c3c2392f27f68266f6dcd8faf207723dfca25195651966e3713652
data/lib/dnn.rb CHANGED
@@ -18,6 +18,7 @@ require_relative "dnn/core/initializers"
18
18
  require_relative "dnn/core/layers"
19
19
  require_relative "dnn/core/activations"
20
20
  require_relative "dnn/core/losses"
21
+ require_relative "dnn/core/regularizers"
21
22
  require_relative "dnn/core/cnn_layers"
22
23
  require_relative "dnn/core/rnn_layers"
23
24
  require_relative "dnn/core/optimizers"
@@ -135,34 +135,11 @@ module DNN
135
135
  @params[:bias] = @bias = Param.new
136
136
  end
137
137
 
138
- def lasso
139
- if @l1_lambda > 0
140
- @l1_lambda * @weight.data.abs.sum
141
- else
142
- 0
143
- end
144
- end
145
-
146
- def ridge
147
- if @l2_lambda > 0
148
- 0.5 * @l2_lambda * (@weight.data**2).sum
149
- else
150
- 0
151
- end
152
- end
153
-
154
- def d_lasso
155
- if @l1_lambda > 0
156
- dlasso = Xumo::SFloat.ones(*@weight.data.shape)
157
- dlasso[@weight.data < 0] = -1
158
- @weight.grad += @l1_lambda * dlasso
159
- end
160
- end
161
-
162
- def d_ridge
163
- if @l2_lambda > 0
164
- @weight.grad += @l2_lambda * @weight.data
165
- end
138
+ def regularizers
139
+ regularizers = []
140
+ regularizers << Lasso.new(@l1_lambda, @weight) if @l1_lambda > 0
141
+ regularizers << Ridge.new(@l2_lambda, @weight) if @l2_lambda > 0
142
+ regularizers
166
143
  end
167
144
 
168
145
  def to_hash(merge_hash)
@@ -3,19 +3,25 @@ module DNN
3
3
 
4
4
  class Loss
5
5
  def forward(out, y, layers)
6
- regularize = layers.select { |layer| layer.is_a?(Connection) }
7
- .reduce(0) { |sum, layer| sum + layer.lasso + layer.ridge }
8
- loss(out, y) + regularize
6
+ loss_value = loss(out, y)
7
+ regularizers = layers.select { |layer| layer.is_a?(Connection) }
8
+ .map { |layer| layer.regularizers }.flatten
9
+
10
+ regularizers.each do |regularizer|
11
+ loss_value = regularizer.forward(loss_value)
12
+ end
13
+ loss_value
9
14
  end
10
15
 
11
16
  def backward(y)
12
17
  raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward'")
13
18
  end
14
19
 
15
- def d_regularize(layers)
20
+ def regularizes_backward(layers)
16
21
  layers.select { |layer| layer.is_a?(Connection) }.each do |layer|
17
- layer.d_lasso
18
- layer.d_ridge
22
+ layer.regularizers.each do |regularizer|
23
+ regularizer.backward
24
+ end
19
25
  end
20
26
  end
21
27
 
@@ -256,7 +256,7 @@ module DNN
256
256
  loss_value = @loss.forward(out, y, get_all_layers)
257
257
  dout = @loss.backward(y)
258
258
  backward(dout)
259
- @loss.d_regularize(get_all_layers)
259
+ @loss.regularizes_backward(get_all_layers)
260
260
  update
261
261
  loss_value
262
262
  end
@@ -0,0 +1,36 @@
1
+ module DNN
2
+
3
+ class Lasso
4
+ def initialize(l1_lambda, param)
5
+ @l1_lambda = l1_lambda
6
+ @param = param
7
+ end
8
+
9
+ def forward(x)
10
+ x + @l1_lambda * @param.data.abs.sum
11
+ end
12
+
13
+ def backward
14
+ dlasso = Xumo::SFloat.ones(*@param.data.shape)
15
+ dlasso[@param.data < 0] = -1
16
+ @param.grad += @l1_lambda * dlasso
17
+ end
18
+ end
19
+
20
+
21
+ class Ridge
22
+ def initialize(l2_lambda, param)
23
+ @l2_lambda = l2_lambda
24
+ @param = param
25
+ end
26
+
27
+ def forward(x)
28
+ x + 0.5 * @l2_lambda * (@param.data**2).sum
29
+ end
30
+
31
+ def backward
32
+ @param.grad += @l2_lambda * @param.data
33
+ end
34
+ end
35
+
36
+ end
@@ -9,7 +9,7 @@ module DNN
9
9
  attr_reader :num_nodes
10
10
  # @return [Bool] Maintain state between batches.
11
11
  attr_reader :stateful
12
- # @return [Bool] Only the last of each cell of RNN is left.
12
+ # @return [Bool] Set the false, only the last of each cell of RNN is left.
13
13
  attr_reader :return_sequences
14
14
 
15
15
  def initialize(num_nodes,
@@ -82,38 +82,17 @@ module DNN
82
82
  @hidden.data = @hidden.data.fill(0) if @hidden.data
83
83
  end
84
84
 
85
- def lasso
85
+ def regularizers
86
+ regularizers = []
86
87
  if @l1_lambda > 0
87
- @l1_lambda * (@weight.data.abs.sum + @weight2.data.abs.sum)
88
- else
89
- 0
88
+ regularizers << Lasso.new(@l1_lambda, @weight)
89
+ regularizers << Lasso.new(@l1_lambda, @weight2)
90
90
  end
91
- end
92
-
93
- def ridge
94
- if @l2_lambda > 0
95
- 0.5 * (@l2_lambda * ((@weight.data**2).sum + (@weight2.data**2).sum))
96
- else
97
- 0
98
- end
99
- end
100
-
101
- def d_lasso
102
- if @l1_lambda > 0
103
- dlasso = Xumo::SFloat.ones(*@weight.data.shape)
104
- dlasso[@weight.data < 0] = -1
105
- @weight.grad += @l1_lambda * dlasso
106
- dlasso2 = Xumo::SFloat.ones(*@weight2.data.shape)
107
- dlasso2[@weight2.data < 0] = -1
108
- @weight2.grad += @l1_lambda * dlasso2
109
- end
110
- end
111
-
112
- def d_ridge
113
91
  if @l2_lambda > 0
114
- @weight.grad += l2_lambda * @weight.data
115
- @weight2.grad += l2_lambda * @weight2.data
92
+ regularizers << Ridge.new(@l2_lambda, @weight)
93
+ regularizers << Ridge.new(@l2_lambda, @weight2)
116
94
  end
95
+ regularizers
117
96
  end
118
97
 
119
98
  private
@@ -181,8 +160,8 @@ module DNN
181
160
  return_sequences: return_sequences,
182
161
  weight_initializer: weight_initializer,
183
162
  bias_initializer: bias_initializer,
184
- l1_lambda: 0,
185
- l2_lambda: 0)
163
+ l1_lambda: l1_lambda,
164
+ l2_lambda: l2_lambda)
186
165
  @activation = activation
187
166
  end
188
167
 
@@ -202,7 +181,7 @@ module DNN
202
181
  @weight_initializer.init_param(self, @weight2)
203
182
  @bias_initializer.init_param(self, @bias)
204
183
  @time_length.times do |t|
205
- @layers << SimpleRNN_Dense.new(@weight, @weight2, @bias)
184
+ @layers << SimpleRNN_Dense.new(@weight, @weight2, @bias, @activation)
206
185
  end
207
186
  end
208
187
  end
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.9.2"
2
+ VERSION = "0.9.3"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.2
4
+ version: 0.9.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-05-06 00:00:00.000000000 Z
11
+ date: 2019-05-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -120,6 +120,7 @@ files:
120
120
  - lib/dnn/core/model.rb
121
121
  - lib/dnn/core/optimizers.rb
122
122
  - lib/dnn/core/param.rb
123
+ - lib/dnn/core/regularizers.rb
123
124
  - lib/dnn/core/rnn_layers.rb
124
125
  - lib/dnn/core/utils.rb
125
126
  - lib/dnn/lib/cifar10.rb