ruby-dnn 0.9.2 → 0.9.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/dnn.rb +1 -0
- data/lib/dnn/core/layers.rb +5 -28
- data/lib/dnn/core/losses.rb +12 -6
- data/lib/dnn/core/model.rb +1 -1
- data/lib/dnn/core/regularizers.rb +36 -0
- data/lib/dnn/core/rnn_layers.rb +11 -32
- data/lib/dnn/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2926b7a9ca6f3766ee46bba676837bd4cb98d55220ba37e4ef12109ef244a444
|
4
|
+
data.tar.gz: 57db99b2200d3d0946c6bb886f97308f6b09e550f5ab66802f61a7d13bd9b8fe
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 19c77e661bb6b208c279ce80d85a115572e124bad50dd97f3c8ec1097aa1bf4f245cef11bee58c3ffbc5c8c3a47f4aa6e9e3bb89db0ba8c1c7ff6ef2066764b3
|
7
|
+
data.tar.gz: 9a8d414fcb3401b883bde2cd54f6b320817ee66389e43971f62a4d9f85e1888db36842d704c3c2392f27f68266f6dcd8faf207723dfca25195651966e3713652
|
data/lib/dnn.rb
CHANGED
@@ -18,6 +18,7 @@ require_relative "dnn/core/initializers"
|
|
18
18
|
require_relative "dnn/core/layers"
|
19
19
|
require_relative "dnn/core/activations"
|
20
20
|
require_relative "dnn/core/losses"
|
21
|
+
require_relative "dnn/core/regularizers"
|
21
22
|
require_relative "dnn/core/cnn_layers"
|
22
23
|
require_relative "dnn/core/rnn_layers"
|
23
24
|
require_relative "dnn/core/optimizers"
|
data/lib/dnn/core/layers.rb
CHANGED
@@ -135,34 +135,11 @@ module DNN
|
|
135
135
|
@params[:bias] = @bias = Param.new
|
136
136
|
end
|
137
137
|
|
138
|
-
def
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
end
|
144
|
-
end
|
145
|
-
|
146
|
-
def ridge
|
147
|
-
if @l2_lambda > 0
|
148
|
-
0.5 * @l2_lambda * (@weight.data**2).sum
|
149
|
-
else
|
150
|
-
0
|
151
|
-
end
|
152
|
-
end
|
153
|
-
|
154
|
-
def d_lasso
|
155
|
-
if @l1_lambda > 0
|
156
|
-
dlasso = Xumo::SFloat.ones(*@weight.data.shape)
|
157
|
-
dlasso[@weight.data < 0] = -1
|
158
|
-
@weight.grad += @l1_lambda * dlasso
|
159
|
-
end
|
160
|
-
end
|
161
|
-
|
162
|
-
def d_ridge
|
163
|
-
if @l2_lambda > 0
|
164
|
-
@weight.grad += @l2_lambda * @weight.data
|
165
|
-
end
|
138
|
+
def regularizers
|
139
|
+
regularizers = []
|
140
|
+
regularizers << Lasso.new(@l1_lambda, @weight) if @l1_lambda > 0
|
141
|
+
regularizers << Ridge.new(@l2_lambda, @weight) if @l2_lambda > 0
|
142
|
+
regularizers
|
166
143
|
end
|
167
144
|
|
168
145
|
def to_hash(merge_hash)
|
data/lib/dnn/core/losses.rb
CHANGED
@@ -3,19 +3,25 @@ module DNN
|
|
3
3
|
|
4
4
|
class Loss
|
5
5
|
def forward(out, y, layers)
|
6
|
-
|
7
|
-
|
8
|
-
|
6
|
+
loss_value = loss(out, y)
|
7
|
+
regularizers = layers.select { |layer| layer.is_a?(Connection) }
|
8
|
+
.map { |layer| layer.regularizers }.flatten
|
9
|
+
|
10
|
+
regularizers.each do |regularizer|
|
11
|
+
loss_value = regularizer.forward(loss_value)
|
12
|
+
end
|
13
|
+
loss_value
|
9
14
|
end
|
10
15
|
|
11
16
|
def backward(y)
|
12
17
|
raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward'")
|
13
18
|
end
|
14
19
|
|
15
|
-
def
|
20
|
+
def regularizes_backward(layers)
|
16
21
|
layers.select { |layer| layer.is_a?(Connection) }.each do |layer|
|
17
|
-
layer.
|
18
|
-
|
22
|
+
layer.regularizers.each do |regularizer|
|
23
|
+
regularizer.backward
|
24
|
+
end
|
19
25
|
end
|
20
26
|
end
|
21
27
|
|
data/lib/dnn/core/model.rb
CHANGED
@@ -0,0 +1,36 @@
|
|
1
|
+
module DNN
|
2
|
+
|
3
|
+
class Lasso
|
4
|
+
def initialize(l1_lambda, param)
|
5
|
+
@l1_lambda = l1_lambda
|
6
|
+
@param = param
|
7
|
+
end
|
8
|
+
|
9
|
+
def forward(x)
|
10
|
+
x + @l1_lambda * @param.data.abs.sum
|
11
|
+
end
|
12
|
+
|
13
|
+
def backward
|
14
|
+
dlasso = Xumo::SFloat.ones(*@param.data.shape)
|
15
|
+
dlasso[@param.data < 0] = -1
|
16
|
+
@param.grad += @l1_lambda * dlasso
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
|
21
|
+
class Ridge
|
22
|
+
def initialize(l2_lambda, param)
|
23
|
+
@l2_lambda = l2_lambda
|
24
|
+
@param = param
|
25
|
+
end
|
26
|
+
|
27
|
+
def forward(x)
|
28
|
+
x + 0.5 * @l2_lambda * (@param.data**2).sum
|
29
|
+
end
|
30
|
+
|
31
|
+
def backward
|
32
|
+
@param.grad += @l2_lambda * @param.data
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
data/lib/dnn/core/rnn_layers.rb
CHANGED
@@ -9,7 +9,7 @@ module DNN
|
|
9
9
|
attr_reader :num_nodes
|
10
10
|
# @return [Bool] Maintain state between batches.
|
11
11
|
attr_reader :stateful
|
12
|
-
# @return [Bool]
|
12
|
+
# @return [Bool] Set the false, only the last of each cell of RNN is left.
|
13
13
|
attr_reader :return_sequences
|
14
14
|
|
15
15
|
def initialize(num_nodes,
|
@@ -82,38 +82,17 @@ module DNN
|
|
82
82
|
@hidden.data = @hidden.data.fill(0) if @hidden.data
|
83
83
|
end
|
84
84
|
|
85
|
-
def
|
85
|
+
def regularizers
|
86
|
+
regularizers = []
|
86
87
|
if @l1_lambda > 0
|
87
|
-
|
88
|
-
|
89
|
-
0
|
88
|
+
regularizers << Lasso.new(@l1_lambda, @weight)
|
89
|
+
regularizers << Lasso.new(@l1_lambda, @weight2)
|
90
90
|
end
|
91
|
-
end
|
92
|
-
|
93
|
-
def ridge
|
94
|
-
if @l2_lambda > 0
|
95
|
-
0.5 * (@l2_lambda * ((@weight.data**2).sum + (@weight2.data**2).sum))
|
96
|
-
else
|
97
|
-
0
|
98
|
-
end
|
99
|
-
end
|
100
|
-
|
101
|
-
def d_lasso
|
102
|
-
if @l1_lambda > 0
|
103
|
-
dlasso = Xumo::SFloat.ones(*@weight.data.shape)
|
104
|
-
dlasso[@weight.data < 0] = -1
|
105
|
-
@weight.grad += @l1_lambda * dlasso
|
106
|
-
dlasso2 = Xumo::SFloat.ones(*@weight2.data.shape)
|
107
|
-
dlasso2[@weight2.data < 0] = -1
|
108
|
-
@weight2.grad += @l1_lambda * dlasso2
|
109
|
-
end
|
110
|
-
end
|
111
|
-
|
112
|
-
def d_ridge
|
113
91
|
if @l2_lambda > 0
|
114
|
-
|
115
|
-
|
92
|
+
regularizers << Ridge.new(@l2_lambda, @weight)
|
93
|
+
regularizers << Ridge.new(@l2_lambda, @weight2)
|
116
94
|
end
|
95
|
+
regularizers
|
117
96
|
end
|
118
97
|
|
119
98
|
private
|
@@ -181,8 +160,8 @@ module DNN
|
|
181
160
|
return_sequences: return_sequences,
|
182
161
|
weight_initializer: weight_initializer,
|
183
162
|
bias_initializer: bias_initializer,
|
184
|
-
l1_lambda:
|
185
|
-
l2_lambda:
|
163
|
+
l1_lambda: l1_lambda,
|
164
|
+
l2_lambda: l2_lambda)
|
186
165
|
@activation = activation
|
187
166
|
end
|
188
167
|
|
@@ -202,7 +181,7 @@ module DNN
|
|
202
181
|
@weight_initializer.init_param(self, @weight2)
|
203
182
|
@bias_initializer.init_param(self, @bias)
|
204
183
|
@time_length.times do |t|
|
205
|
-
@layers << SimpleRNN_Dense.new(@weight, @weight2, @bias)
|
184
|
+
@layers << SimpleRNN_Dense.new(@weight, @weight2, @bias, @activation)
|
206
185
|
end
|
207
186
|
end
|
208
187
|
end
|
data/lib/dnn/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-dnn
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.9.
|
4
|
+
version: 0.9.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- unagiootoro
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-05-
|
11
|
+
date: 2019-05-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: numo-narray
|
@@ -120,6 +120,7 @@ files:
|
|
120
120
|
- lib/dnn/core/model.rb
|
121
121
|
- lib/dnn/core/optimizers.rb
|
122
122
|
- lib/dnn/core/param.rb
|
123
|
+
- lib/dnn/core/regularizers.rb
|
123
124
|
- lib/dnn/core/rnn_layers.rb
|
124
125
|
- lib/dnn/core/utils.rb
|
125
126
|
- lib/dnn/lib/cifar10.rb
|