ruby-dnn 0.5.6 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4e54e26acfc92ade6be07da5c62e340b8d8c021a69b390a3a3bba21853ee6f3d
4
- data.tar.gz: b111ee117a07e554d7166512756caf0da329ca70a9241495b268761597a48d98
3
+ metadata.gz: 0a3a2ea891bd75d6d2d3b97bc8e850c220c4118d87a1e7fcd47e1f9dd6943a08
4
+ data.tar.gz: 002d15961430e42b06b1b180718170b4e7e8f10b99b6569c312a550dc486a0e9
5
5
  SHA512:
6
- metadata.gz: 82877266a1b2e01478e545fbf04071bd12a4c8ab07b13c2f11dc413aa7a6842d50a75206e7359ead5fd13a734bc3eccf9e7fb0122af3b20f92a989574c7e0741
7
- data.tar.gz: 69c4999978dd9b4c2b4d7d2fbc3d4d6784b6894e606cee50eaaa1e1235542ca46f188a2f69c3d9ff7efb61a1e238660c2529424d2b9642d16d573b30eecce587
6
+ metadata.gz: ed91236978caff9d0def15edae2d743d03bb5e11b2546a8fec4f5d7cd3653af4b3f98d8fdf5cc2d93b10273dbebe84cca0f3b3fc7347afb846e489646dae51c2
7
+ data.tar.gz: e44ec8438481761ca2ba28ca6b6839abe4c97025a1985eb1be717bb85b9c44d11e71355a68370db1305412e67a3bcf5094c00fa05303f2ba2f33d5df5c324d87
data/API-Reference.ja.md CHANGED
@@ -2,7 +2,7 @@
2
2
  ruby-dnnのAPIリファレンスです。このリファレンスでは、APIを利用するうえで必要となるクラスとメソッドしか記載していません。
3
3
  そのため、プログラムの詳細が必要な場合は、ソースコードを参照してください。
4
4
 
5
- 最終更新バージョン:0.5.6
5
+ 最終更新バージョン:0.5.7
6
6
 
7
7
  # module DNN
8
8
  ruby-dnnの名前空間をなすモジュールです。
@@ -335,8 +335,8 @@ Arrayで指定する場合、[Integer height, Integer width]の形式で指定
335
335
  重み減衰を行うL2正則化項の強さを設定します。
336
336
 
337
337
 
338
- # class MaxPool2D < Layer
339
- maxプーリングを行うレイヤーです。
338
+ # class Pool2D < Layer
339
+ 全ての2Dプーリングレイヤーのスーパークラスです。
340
340
 
341
341
  ## 【Properties】
342
342
 
@@ -367,6 +367,14 @@ Arrayで指定する場合、[Integer height, Integer width]の形式で指定
367
367
  ゼロパディングを行います。
368
368
 
369
369
 
370
+ # class MaxPool2D < Pool2D
371
+ maxプーリングを行うレイヤーです。
372
+
373
+
374
+ # class AvgPool2D < Pool2D
375
+ averageプーリングを行うレイヤーです。
376
+
377
+
370
378
  # class UnPool2D < Layer
371
379
  逆プーリングを行うレイヤーです。
372
380
 
@@ -158,16 +158,16 @@ module DNN
158
158
  @bias_initializer.init_param(self, :bias)
159
159
  end
160
160
  end
161
-
162
-
163
- class MaxPool2D < Layer
161
+
162
+ #Super class of all pooling2D class.
163
+ class Pool2D < Layer
164
164
  include Conv2DModule
165
165
 
166
166
  attr_reader :pool_size
167
167
  attr_reader :strides
168
168
 
169
- def self.load_hash(hash)
170
- MaxPool2D.new(hash[:pool_size], strides: hash[:strides], padding: hash[:padding])
169
+ def self.load_hash(pool2d_class, hash)
170
+ pool2d_class.new(hash[:pool_size], strides: hash[:strides], padding: hash[:padding])
171
171
  end
172
172
 
173
173
  def initialize(pool_size, strides: nil, padding: false)
@@ -197,15 +197,10 @@ module DNN
197
197
  x = padding(x, @pad) if @padding
198
198
  @x_shape = x.shape
199
199
  col = im2col(x, *@out_size, *@pool_size, @strides)
200
- col = col.reshape(x.shape[0] * @out_size.reduce(:*) * x.shape[3], @pool_size.reduce(:*))
201
- @max_index = col.max_index(1)
202
- col.max(1).reshape(x.shape[0], *@out_size, x.shape[3])
200
+ col.reshape(x.shape[0] * @out_size.reduce(:*) * x.shape[3], @pool_size.reduce(:*))
203
201
  end
204
202
 
205
- def backward(dout)
206
- dmax = SFloat.zeros(dout.size * @pool_size.reduce(:*))
207
- dmax[@max_index] = dout.flatten
208
- dcol = dmax.reshape(dout.shape[0..2].reduce(:*), dout.shape[3] * @pool_size.reduce(:*))
203
+ def backward(dcol)
209
204
  dx = col2im(dcol, @x_shape, *@out_size, *@pool_size, @strides)
210
205
  @padding ? back_padding(dx, @pad) : dx
211
206
  end
@@ -221,6 +216,49 @@ module DNN
221
216
  padding: @padding})
222
217
  end
223
218
  end
219
+
220
+
221
+ class MaxPool2D < Pool2D
222
+ def self.load_hash(hash)
223
+ Pool2D.load_hash(self, hash)
224
+ end
225
+
226
+ def forward(x)
227
+ col = super(x)
228
+ @max_index = col.max_index(1)
229
+ col.max(1).reshape(x.shape[0], *@out_size, x.shape[3])
230
+ end
231
+
232
+ def backward(dout)
233
+ dmax = SFloat.zeros(dout.size * @pool_size.reduce(:*))
234
+ dmax[@max_index] = dout.flatten
235
+ dcol = dmax.reshape(dout.shape[0..2].reduce(:*), dout.shape[3] * @pool_size.reduce(:*))
236
+ super(dcol)
237
+ end
238
+ end
239
+
240
+
241
+ class AvgPool2D < Pool2D
242
+ def self.load_hash(hash)
243
+ Pool2D.load_hash(self, hash)
244
+ end
245
+
246
+ def forward(x)
247
+ col = super(x)
248
+ col.mean(1).reshape(x.shape[0], *@out_size, x.shape[3])
249
+ end
250
+
251
+ def backward(dout)
252
+ row_length = @pool_size.reduce(:*)
253
+ dout /= row_length
254
+ davg = SFloat.zeros(dout.size, row_length)
255
+ row_length.times do |i|
256
+ davg[true, i] = dout.flatten
257
+ end
258
+ dcol = davg.reshape(dout.shape[0..2].reduce(:*), dout.shape[3] * @pool_size.reduce(:*))
259
+ super(dcol)
260
+ end
261
+ end
224
262
 
225
263
 
226
264
  class UnPool2D < Layer
@@ -1,7 +1,5 @@
1
1
  module DNN
2
2
  class DNN_Error < StandardError; end
3
3
 
4
- class DNN_TypeError < DNN_Error; end
5
-
6
- class DNN_SharpError < DNN_Error; end
4
+ class DNN_ShapeError < DNN_Error; end
7
5
  end
@@ -69,7 +69,7 @@ module DNN
69
69
 
70
70
  def <<(layer)
71
71
  unless layer.is_a?(Layers::Layer)
72
- raise DNN_TypeError.new("layer is not an instance of the DNN::Layers::Layer class.")
72
+ raise TypeError.new("layer is not an instance of the DNN::Layers::Layer class.")
73
73
  end
74
74
  @layers << layer
75
75
  self
@@ -77,7 +77,7 @@ module DNN
77
77
 
78
78
  def compile(optimizer)
79
79
  unless optimizer.is_a?(Optimizers::Optimizer)
80
- raise DNN_TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.")
80
+ raise TypeError.new("optimizer is not an instance of the DNN::Optimizers::Optimizer class.")
81
81
  end
82
82
  @compiled = true
83
83
  layers_check
@@ -202,10 +202,10 @@ module DNN
202
202
 
203
203
  def layers_check
204
204
  unless @layers.first.is_a?(Layers::InputLayer)
205
- raise DNN_Error.new("The first layer is not an InputLayer.")
205
+ raise TypeError.new("The first layer is not an InputLayer.")
206
206
  end
207
207
  unless @layers.last.is_a?(Layers::OutputLayer)
208
- raise DNN_Error.new("The last layer is not an OutputLayer.")
208
+ raise TypeError.new("The last layer is not an OutputLayer.")
209
209
  end
210
210
  end
211
211
 
@@ -214,12 +214,12 @@ module DNN
214
214
  if layer.is_a?(Layers::Dense)
215
215
  prev_shape = layer.prev_layer.shape
216
216
  if prev_shape.length != 1
217
- raise DNN_SharpError.new("layer index(#{i}) Dense: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 1 dimensional.")
217
+ raise DNN_ShapeError.new("layer index(#{i}) Dense: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 1 dimensional.")
218
218
  end
219
219
  elsif layer.is_a?(Layers::Conv2D) || layer.is_a?(Layers::MaxPool2D)
220
220
  prev_shape = layer.prev_layer.shape
221
221
  if prev_shape.length != 3
222
- raise DNN_SharpError.new("layer index(#{i}) Conv2D: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 3 dimensional.")
222
+ raise DNN_ShapeError.new("layer index(#{i}) Conv2D: The shape of the previous layer is #{prev_shape}. The shape of the previous layer must be 3 dimensional.")
223
223
  end
224
224
  end
225
225
  end
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.5.6"
2
+ VERSION = "0.5.7"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.6
4
+ version: 0.5.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-08-06 00:00:00.000000000 Z
11
+ date: 2018-08-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray