ruby-dnn 0.5.10 → 0.5.11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c7df0888afbde3ac66d5aaa66d7ef87f66edd58156945d4f1663552994e0d12f
4
- data.tar.gz: 4c638c3f14f4cb0bea17ec31225c19aa2ca30b14a4e81c12bb8ee791137d3072
3
+ metadata.gz: 7042e521d632ba478fe494c644e5fa9c525f0dbc98ff168a39cafc0d7990db78
4
+ data.tar.gz: 624f38a50832523e0bbb42d7b392d7e77b0c926d6cd28258ba158ce50a4be56b
5
5
  SHA512:
6
- metadata.gz: 4d3672bcb7f3763bd269d1adda133f4e90a8e1ab70f91bc5fb971eb18dfa346ebed16e1b88269fcb923d97fb2cdf131d680f74c487546ec0d121be8254a85e6f
7
- data.tar.gz: 0f1f13a53ffb712ac97fea6cd9d06cb10d1076c032728cdc056f7e5489e8a97ac86e6000ce66a62858c1320441ac0b8562d3c47f44ec14eca0203995a80b48e5
6
+ metadata.gz: 6aa33554d27f1f1b126e4dc65a191d84f3e5f96648a0b14368fa456aa703214c60f568d37416b2d09478e6ea9f03dc819c61e18663f5c523dcdb3e47a20a9ae9
7
+ data.tar.gz: 12efc285e47702e6134518a13c582354329344b0e6a754f73e760794d1952b1d381848c534a4e0e571c26afc38ee98cb4e381b22040f26849bce4292aaf88997
data/API-Reference.ja.md CHANGED
@@ -2,7 +2,7 @@
2
2
  ruby-dnnのAPIリファレンスです。このリファレンスでは、APIを利用するうえで必要となるクラスとメソッドしか記載していません。
3
3
  そのため、プログラムの詳細が必要な場合は、ソースコードを参照してください。
4
4
 
5
- 最終更新バージョン:0.5.10
5
+ 最終更新バージョン:0.5.11
6
6
 
7
7
  # module DNN
8
8
  ruby-dnnの名前空間をなすモジュールです。
@@ -414,13 +414,16 @@ Float
414
414
 
415
415
  ## 【Instance methods】
416
416
 
417
- ## def initialize(num_nodes, stateful: false, weight_initializer: nil, bias_initializer: nil, weight_decay: 0)
417
+ ## def initialize(num_nodes, stateful: false, return_sequences: true, weight_initializer: nil, bias_initializer: nil, weight_decay: 0)
418
418
  コンストラクタ。
419
419
  ### arguments
420
420
  * Integer num_nodes
421
421
  レイヤーのノード数を設定します。
422
422
  * bool stateful
423
423
  trueを設定すると、一つ前に計算した中間層の値を使用して学習を行うことができます。
424
+ * bool return_sequences
425
+ trueを設定すると、時系列ネットワークの中間層全てを出力します。
426
+ falseを設定すると、時系列ネットワークの中間層の最後のみを出力します。
424
427
  * Layer activation
425
428
  リカレントニューラルネットワークにおいて、使用する活性化関数を設定します。
426
429
  nilを指定すると、Tanhが使用されます。
@@ -15,12 +15,11 @@ module DNN
15
15
 
16
16
  class Tanh < Layer
17
17
  def forward(x)
18
- @x = x
19
- Xumo::NMath.tanh(x)
18
+ @out = Xumo::NMath.tanh(x)
20
19
  end
21
20
 
22
21
  def backward(dout)
23
- dout * (1.0 / Xumo::NMath.cosh(@x)**2)
22
+ dout * (1 - @out**2)
24
23
  end
25
24
  end
26
25
 
@@ -38,6 +38,7 @@ module DNN
38
38
  def self.load_hash(hash)
39
39
  self.new(hash[:num_nodes],
40
40
  stateful: hash[:stateful],
41
+ return_sequences: hash[:return_sequences],
41
42
  activation: Util.load_hash(hash[:activation]),
42
43
  weight_initializer: Util.load_hash(hash[:weight_initializer]),
43
44
  bias_initializer: Util.load_hash(hash[:bias_initializer]),
@@ -46,6 +47,7 @@ module DNN
46
47
 
47
48
  def initialize(num_nodes,
48
49
  stateful: false,
50
+ return_sequences: true,
49
51
  activation: nil,
50
52
  weight_initializer: nil,
51
53
  bias_initializer: nil,
@@ -53,6 +55,7 @@ module DNN
53
55
  super()
54
56
  @num_nodes = num_nodes
55
57
  @stateful = stateful
58
+ @return_sequences = return_sequences
56
59
  @activation = (activation || Tanh.new)
57
60
  @weight_initializer = (weight_initializer || RandomNormal.new)
58
61
  @bias_initializer = (bias_initializer || Zeros.new)
@@ -63,7 +66,7 @@ module DNN
63
66
 
64
67
  def forward(xs)
65
68
  @xs_shape = xs.shape
66
- hs = Xumo::SFloat.zeros(xs.shape[0], *shape)
69
+ hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
67
70
  h = (@stateful && @h) ? @h : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
68
71
  xs.shape[1].times do |t|
69
72
  x = xs[true, t, false]
@@ -71,13 +74,18 @@ module DNN
71
74
  hs[true, t, false] = h
72
75
  end
73
76
  @h = h
74
- hs
77
+ @return_sequences ? hs : h
75
78
  end
76
79
 
77
80
  def backward(dh2s)
78
81
  @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
79
82
  @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
80
83
  @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
84
+ unless @return_sequences
85
+ dh = dh2s
86
+ dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
87
+ dh2s[true, -1, false] = dh
88
+ end
81
89
  dxs = Xumo::SFloat.zeros(@xs_shape)
82
90
  dh = 0
83
91
  (0...dh2s.shape[1]).to_a.reverse.each do |t|
@@ -89,7 +97,7 @@ module DNN
89
97
  end
90
98
 
91
99
  def shape
92
- [@time_length, @num_nodes]
100
+ @return_sequences ? [@time_length, @num_nodes] : [@num_nodes]
93
101
  end
94
102
 
95
103
  def ridge
@@ -103,6 +111,7 @@ module DNN
103
111
  def to_hash
104
112
  super({num_nodes: @num_nodes,
105
113
  stateful: @stateful,
114
+ return_sequences: @return_sequences,
106
115
  activation: @activation.to_hash,
107
116
  weight_initializer: @weight_initializer.to_hash,
108
117
  bias_initializer: @bias_initializer.to_hash,
@@ -112,7 +121,7 @@ module DNN
112
121
  private
113
122
 
114
123
  def init_params
115
- @time_length = prev_layer.shape[0]
124
+ @time_length = prev_layer.shape[0]
116
125
  num_prev_nodes = prev_layer.shape[1]
117
126
  @params[:weight] = Xumo::SFloat.new(num_prev_nodes, @num_nodes)
118
127
  @params[:weight2] = Xumo::SFloat.new(@num_nodes, @num_nodes)
@@ -128,8 +137,6 @@ module DNN
128
137
 
129
138
 
130
139
  class LSTM_Dense
131
- include Xumo
132
-
133
140
  def initialize(params, grads)
134
141
  @params = params
135
142
  @grads = grads
@@ -148,8 +155,8 @@ module DNN
148
155
  a = x.dot(@params[:weight]) + h.dot(@params[:weight2]) + @params[:bias]
149
156
 
150
157
  @forget = @forget_sigmoid.forward(a[true, 0...num_nodes])
151
- @g = @g_tanh.forward(a[(num_nodes * 2)...(num_nodes * 3)])
152
- @in = @in_sigmoid.forward(a[true, num_nodes...(num_nodes * 2)])
158
+ @g = @g_tanh.forward(a[true, num_nodes...(num_nodes * 2)])
159
+ @in = @in_sigmoid.forward(a[true, (num_nodes * 2)...(num_nodes * 3)])
153
160
  @out = @out_sigmoid.forward(a[true, (num_nodes * 3)..-1])
154
161
 
155
162
  @cell2 = @forget * cell + @g * @in
@@ -192,6 +199,7 @@ module DNN
192
199
  def self.load_hash(hash)
193
200
  self.new(hash[:num_nodes],
194
201
  stateful: hash[:stateful],
202
+ return_sequences: hash[:return_sequences],
195
203
  weight_initializer: Util.load_hash(hash[:weight_initializer]),
196
204
  bias_initializer: Util.load_hash(hash[:bias_initializer]),
197
205
  weight_decay: hash[:weight_decay])
@@ -199,12 +207,14 @@ module DNN
199
207
 
200
208
  def initialize(num_nodes,
201
209
  stateful: false,
210
+ return_sequences: true,
202
211
  weight_initializer: nil,
203
212
  bias_initializer: nil,
204
213
  weight_decay: 0)
205
214
  super()
206
215
  @num_nodes = num_nodes
207
216
  @stateful = stateful
217
+ @return_sequences = return_sequences
208
218
  @weight_initializer = (weight_initializer || RandomNormal.new)
209
219
  @bias_initializer = (bias_initializer || Zeros.new)
210
220
  @weight_decay = weight_decay
@@ -215,7 +225,7 @@ module DNN
215
225
 
216
226
  def forward(xs)
217
227
  @xs_shape = xs.shape
218
- hs = Xumo::SFloat.zeros(xs.shape[0], *shape)
228
+ hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
219
229
  h = nil
220
230
  cell = nil
221
231
  if @stateful
@@ -231,13 +241,18 @@ module DNN
231
241
  end
232
242
  @h = h
233
243
  @cell = cell
234
- hs
244
+ @return_sequences ? hs : h
235
245
  end
236
246
 
237
247
  def backward(dh2s)
238
248
  @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
239
249
  @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
240
250
  @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
251
+ unless @return_sequences
252
+ dh = dh2s
253
+ dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
254
+ dh2s[true, -1, false] = dh
255
+ end
241
256
  dxs = Xumo::SFloat.zeros(@xs_shape)
242
257
  dh = 0
243
258
  dcell = 0
@@ -250,7 +265,7 @@ module DNN
250
265
  end
251
266
 
252
267
  def shape
253
- [@time_length, @num_nodes]
268
+ @return_sequences ? [@time_length, @num_nodes] : [@num_nodes]
254
269
  end
255
270
 
256
271
  def ridge
@@ -264,6 +279,7 @@ module DNN
264
279
  def to_hash
265
280
  super({num_nodes: @num_nodes,
266
281
  stateful: @stateful,
282
+ return_sequences: @return_sequences,
267
283
  weight_initializer: @weight_initializer.to_hash,
268
284
  bias_initializer: @bias_initializer.to_hash,
269
285
  weight_decay: @weight_decay})
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.5.10"
2
+ VERSION = "0.5.11"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.10
4
+ version: 0.5.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-08-12 00:00:00.000000000 Z
11
+ date: 2018-08-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray