ruby-dnn 0.5.5 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c8f2e9e1ed8484a0c44c9f09f5215ff85cb05216f87965c5ce9691223d89247a
4
- data.tar.gz: 2974f71d2b573d584b9238a574b69721230f19a1ff3558a59852d94168471bdd
3
+ metadata.gz: 4e54e26acfc92ade6be07da5c62e340b8d8c021a69b390a3a3bba21853ee6f3d
4
+ data.tar.gz: b111ee117a07e554d7166512756caf0da329ca70a9241495b268761597a48d98
5
5
  SHA512:
6
- metadata.gz: 9ff44365ec20680b547161c43a525c5d76cc8b0f07c9394bd4e836e1ca188a327067126048b6788a1127e5d52ae3734f39f6f6e67d89985bcbdf98dac2086cc2
7
- data.tar.gz: a1cd8a85e491a9668f0f4401dde8fc0b35d4db8fc1d81bdad7c4a4cb9df5317dd816cd864717a6f9a7d0d2908715c43252dd08be8400aa3f8d9944ebc760757a
6
+ metadata.gz: 82877266a1b2e01478e545fbf04071bd12a4c8ab07b13c2f11dc413aa7a6842d50a75206e7359ead5fd13a734bc3eccf9e7fb0122af3b20f92a989574c7e0741
7
+ data.tar.gz: 69c4999978dd9b4c2b4d7d2fbc3d4d6784b6894e606cee50eaaa1e1235542ca46f188a2f69c3d9ff7efb61a1e238660c2529424d2b9642d16d573b30eecce587
data/API-Reference.ja.md CHANGED
@@ -2,7 +2,7 @@
2
2
  ruby-dnnのAPIリファレンスです。このリファレンスでは、APIを利用するうえで必要となるクラスとメソッドしか記載していません。
3
3
  そのため、プログラムの詳細が必要な場合は、ソースコードを参照してください。
4
4
 
5
- 最終更新バージョン:0.5.3
5
+ 最終更新バージョン:0.5.6
6
6
 
7
7
  # module DNN
8
8
  ruby-dnnの名前空間をなすモジュールです。
@@ -387,6 +387,45 @@ Array
387
387
  Arrayで指定する場合、[Integer height, Integer width]の形式で指定します。
388
388
 
389
389
 
390
+ # class SimpleRNN < HasParamLayer
391
+ リカレントニューラルネットワークのレイヤーを扱うクラスです。
392
+
393
+ ## 【Properties】
394
+
395
+ ## attr_reader :num_nodes
396
+ Integer
397
+ レイヤーのノード数を取得します。
398
+
399
+ ## attr_reader :stateful
400
+ bool
401
+ レイヤーがステートフルであるか否かを返します。
402
+
403
+ ## attr_reader :weight_decay
404
+ Float
405
+ 重み減衰の係数を取得します。
406
+
407
+ ## 【Instance methods】
408
+
409
+ ## def initialize(num_nodes, stateful: false, weight_initializer: nil, bias_initializer: nil, weight_decay: 0)
410
+ コンストラクタ。
411
+ ### arguments
412
+ * Integer num_nodes
413
+ レイヤーのノード数を設定します。
414
+ * bool stateful
415
+ trueを設定すると、一つ前に計算した中間層の値を使用して学習を行うことができます。
416
+ * Layer activation
417
+ リカレントニューラルネットワークにおいて、使用する活性化関数を設定します。
418
+ nilを指定すると、Tanhが使用されます。
419
+ * Initializer weight_initializer: nil
420
+ 重みの初期化に使用するイニシャライザーを設定します。
421
+ nilを指定すると、RandomNormalイニシャライザーが使用されます。
422
+ * Initializer bias_initializer: nil
423
+ バイアスの初期化に使用するイニシャライザーを設定します。
424
+ nilを指定すると、Zerosイニシャライザーが使用されます。
425
+ * Float weight_decay: 0
426
+ 重み減衰の係数を設定します。
427
+
428
+
390
429
  # class Flatten
391
430
  N次元のデータを平坦化します。
392
431
 
data/lib/dnn.rb CHANGED
@@ -11,7 +11,8 @@ require "dnn/core/error"
11
11
  require "dnn/core/model"
12
12
  require "dnn/core/initializers"
13
13
  require "dnn/core/layers"
14
- require "dnn/core/cnn_layers"
15
14
  require "dnn/core/activations"
15
+ require "dnn/core/cnn_layers"
16
+ require "dnn/core/rnn_layers"
16
17
  require "dnn/core/optimizers"
17
18
  require "dnn/core/util"
@@ -107,6 +107,39 @@ module DNN
107
107
  (@out - y).abs.sum / batch_size + ridge
108
108
  end
109
109
  end
110
+
111
+
112
+ class IdentityHuber < Layers::OutputLayer
113
+ def forward(x)
114
+ @out = x
115
+ end
116
+
117
+ def loss(y)
118
+ loss = loss_l1(y)
119
+ @loss = loss > 1 ? loss : loss_l2(y)
120
+ end
121
+
122
+ def backward(y)
123
+ dout = @out - y
124
+ if @loss > 1
125
+ dout[dout >= 0] = 1
126
+ dout[dout < 0] = -1
127
+ end
128
+ dout
129
+ end
130
+
131
+ private
132
+
133
+ def loss_l1(y)
134
+ batch_size = y.shape[0]
135
+ (@out - y).abs.sum / batch_size
136
+ end
137
+
138
+ def loss_l2(y)
139
+ batch_size = y.shape[0]
140
+ 0.5 * ((@out - y)**2).sum / batch_size
141
+ end
142
+ end
110
143
 
111
144
 
112
145
  class SoftmaxWithLoss < Layers::OutputLayer
@@ -130,6 +130,14 @@ module DNN
130
130
  [*@out_size, @num_filters]
131
131
  end
132
132
 
133
+ def ridge
134
+ if @weight_decay > 0
135
+ 0.5 * @weight_decay * (@params[:weight]**2).sum
136
+ else
137
+ 0
138
+ end
139
+ end
140
+
133
141
  def to_hash
134
142
  super({num_filters: @num_filters,
135
143
  filter_size: @filter_size,
@@ -142,6 +142,14 @@ module DNN
142
142
  [@num_nodes]
143
143
  end
144
144
 
145
+ def ridge
146
+ if @weight_decay > 0
147
+ 0.5 * @weight_decay * (@params[:weight]**2).sum
148
+ else
149
+ 0
150
+ end
151
+ end
152
+
145
153
  def to_hash
146
154
  super({num_nodes: @num_nodes,
147
155
  weight_initializer: @weight_initializer.to_hash,
@@ -209,8 +217,8 @@ module DNN
209
217
  private
210
218
 
211
219
  def ridge
212
- 0.5 * @model.layers.select { |layer| layer.respond_to?(:weight_decay) }
213
- .reduce(0) { |sum, layer| layer.weight_decay * (layer.params[:weight]**2).sum }
220
+ @model.layers.select { |layer| layer.respond_to?(:ridge) }
221
+ .reduce(0) { |sum, layer| sum + layer.ridge }
214
222
  end
215
223
  end
216
224
 
@@ -0,0 +1,104 @@
1
+ module DNN
2
+ module Layers
3
+
4
+ class SimpleRNN < HasParamLayer
5
+ include Initializers
6
+ include Activations
7
+
8
+ attr_reader :num_nodes
9
+ attr_reader :stateful
10
+ attr_reader :weight_decay
11
+
12
+ def self.load_hash(hash)
13
+ self.new(hash[:num_nodes],
14
+ stateful: hash[:stateful],
15
+ activation: Util.load_hash(hash[:activation]),
16
+ weight_initializer: Util.load_hash(hash[:weight_initializer]),
17
+ bias_initializer: Util.load_hash(hash[:bias_initializer]),
18
+ weight_decay: hash[:weight_decay])
19
+ end
20
+
21
+ def initialize(num_nodes,
22
+ stateful: false,
23
+ activation: nil,
24
+ weight_initializer: nil,
25
+ bias_initializer: nil,
26
+ weight_decay: 0)
27
+ super()
28
+ @num_nodes = num_nodes
29
+ @stateful = stateful
30
+ @activation = (activation || Tanh.new)
31
+ @weight_initializer = (weight_initializer || RandomNormal.new)
32
+ @bias_initializer = (bias_initializer || Zeros.new)
33
+ @weight_decay = weight_decay
34
+ @h = nil
35
+ end
36
+
37
+ def forward(xs)
38
+ @xs = xs
39
+ @hs = SFloat.zeros(xs.shape[0], *shape)
40
+ h = (@stateful && @h) ? @h : SFloat.zeros(xs.shape[0], @num_nodes)
41
+ xs.shape[1].times do |t|
42
+ x = xs[true, t, false]
43
+ h = x.dot(@params[:weight]) + h.dot(@params[:weight2]) + @params[:bias]
44
+ h = @activation.forward(h)
45
+ @hs[true, t, false] = h
46
+ end
47
+ @h = h
48
+ @hs
49
+ end
50
+
51
+ def backward(douts)
52
+ @grads[:weight] = SFloat.zeros(*@params[:weight].shape)
53
+ @grads[:weight2] = SFloat.zeros(*@params[:weight2].shape)
54
+ dxs = SFloat.zeros(@xs.shape)
55
+ (0...douts.shape[1]).to_a.reverse.each do |t|
56
+ dout = douts[true, t, false]
57
+ x = @xs[true, t, false]
58
+ h = @hs[true, t, false]
59
+ dout = @activation.backward(dout)
60
+ @grads[:weight] += x.transpose.dot(dout)
61
+ @grads[:weight2] += h.transpose.dot(dout)
62
+ dxs[true, t, false] = dout.dot(@params[:weight].transpose)
63
+ end
64
+ @grads[:bias] = douts.sum(0).sum(0)
65
+ dxs
66
+ end
67
+
68
+ def shape
69
+ [@time_length, @num_nodes]
70
+ end
71
+
72
+ def ridge
73
+ if @weight_decay > 0
74
+ 0.5 * (@weight_decay * (@params[:weight]**2).sum + @weight_decay * (@params[:weight]**2).sum)
75
+ else
76
+ 0
77
+ end
78
+ end
79
+
80
+ def to_hash
81
+ super({num_nodes: @num_nodes,
82
+ stateful: @stateful,
83
+ activation: @activation.to_hash,
84
+ weight_initializer: @weight_initializer.to_hash,
85
+ bias_initializer: @bias_initializer.to_hash,
86
+ weight_decay: @weight_decay})
87
+ end
88
+
89
+ private
90
+
91
+ def init_params
92
+ @time_length = prev_layer.shape[0]
93
+ num_prev_nodes = prev_layer.shape[1]
94
+ @params[:weight] = SFloat.new(num_prev_nodes, @num_nodes)
95
+ @params[:weight2] = SFloat.new(@num_nodes, @num_nodes)
96
+ @params[:bias] = SFloat.new(@num_nodes)
97
+ @weight_initializer.init_param(self, :weight)
98
+ @weight_initializer.init_param(self, :weight2)
99
+ @bias_initializer.init_param(self, :bias)
100
+ end
101
+ end
102
+
103
+ end
104
+ end
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.5.5"
2
+ VERSION = "0.5.6"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.5
4
+ version: 0.5.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-08-05 00:00:00.000000000 Z
11
+ date: 2018-08-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -112,6 +112,7 @@ files:
112
112
  - lib/dnn/core/layers.rb
113
113
  - lib/dnn/core/model.rb
114
114
  - lib/dnn/core/optimizers.rb
115
+ - lib/dnn/core/rnn_layers.rb
115
116
  - lib/dnn/core/util.rb
116
117
  - lib/dnn/ext/cifar10_loader/cifar10_loader.c
117
118
  - lib/dnn/ext/cifar10_loader/extconf.rb