ruby-dnn 0.13.4 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -83,7 +83,6 @@ module DNN
83
83
  end
84
84
  end
85
85
 
86
-
87
86
  class Conv2D < Connection
88
87
  include Conv2DUtils
89
88
 
@@ -114,7 +113,7 @@ module DNN
114
113
 
115
114
  def build(input_shape)
116
115
  unless input_shape.length == 3
117
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 3 dimensional.")
116
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
118
117
  end
119
118
  super
120
119
  prev_h, prev_w, num_prev_filters = *input_shape
@@ -122,12 +121,12 @@ module DNN
122
121
  @bias.data = Xumo::SFloat.new(@num_filters) if @bias
123
122
  init_weight_and_bias
124
123
  @pad_size = if @padding == true
125
- calc_conv2d_padding_size(prev_h, prev_w, *@filter_size, @strides)
126
- elsif @padding.is_a?(Array)
127
- @padding
128
- else
129
- [0, 0]
130
- end
124
+ calc_conv2d_padding_size(prev_h, prev_w, *@filter_size, @strides)
125
+ elsif @padding.is_a?(Array)
126
+ @padding
127
+ else
128
+ [0, 0]
129
+ end
131
130
  @out_size = calc_conv2d_out_size(prev_h, prev_w, *@filter_size, *@pad_size, @strides)
132
131
  end
133
132
 
@@ -186,7 +185,6 @@ module DNN
186
185
  end
187
186
  end
188
187
 
189
-
190
188
  class Conv2DTranspose < Connection
191
189
  include Conv2DUtils
192
190
 
@@ -217,7 +215,7 @@ module DNN
217
215
 
218
216
  def build(input_shape)
219
217
  unless input_shape.length == 3
220
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 3 dimensional.")
218
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
221
219
  end
222
220
  super
223
221
  prev_h, prev_w, num_prev_filters = *input_shape
@@ -225,12 +223,12 @@ module DNN
225
223
  @bias.data = Xumo::SFloat.new(@num_filters) if @bias
226
224
  init_weight_and_bias
227
225
  @pad_size = if @padding == true
228
- calc_conv2d_transpose_padding_size(prev_h, prev_w, *@filter_size, @strides)
229
- elsif @padding.is_a?(Array)
230
- @padding
231
- else
232
- [0, 0]
233
- end
226
+ calc_conv2d_transpose_padding_size(prev_h, prev_w, *@filter_size, @strides)
227
+ elsif @padding.is_a?(Array)
228
+ @padding
229
+ else
230
+ [0, 0]
231
+ end
234
232
  @out_size = calc_conv2d_transpose_out_size(prev_h, prev_w, *@filter_size, *@pad_size, @strides)
235
233
  end
236
234
 
@@ -291,7 +289,6 @@ module DNN
291
289
  end
292
290
  end
293
291
 
294
-
295
292
  # Super class of all pooling2D class.
296
293
  class Pool2D < Layer
297
294
  include Conv2DUtils
@@ -308,27 +305,27 @@ module DNN
308
305
  super()
309
306
  @pool_size = pool_size.is_a?(Integer) ? [pool_size, pool_size] : pool_size
310
307
  @strides = if strides
311
- strides.is_a?(Integer) ? [strides, strides] : strides
312
- else
313
- @pool_size.clone
314
- end
308
+ strides.is_a?(Integer) ? [strides, strides] : strides
309
+ else
310
+ @pool_size.clone
311
+ end
315
312
  @padding = padding.is_a?(Integer) ? [padding, padding] : padding
316
313
  end
317
314
 
318
315
  def build(input_shape)
319
316
  unless input_shape.length == 3
320
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 3 dimensional.")
317
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
321
318
  end
322
319
  super
323
320
  prev_h, prev_w = input_shape[0..1]
324
321
  @num_channel = input_shape[2]
325
322
  @pad_size = if @padding == true
326
- calc_conv2d_padding_size(prev_h, prev_w, *@pool_size, @strides)
327
- elsif @padding.is_a?(Array)
328
- @padding
329
- else
330
- [0, 0]
331
- end
323
+ calc_conv2d_padding_size(prev_h, prev_w, *@pool_size, @strides)
324
+ elsif @padding.is_a?(Array)
325
+ @padding
326
+ else
327
+ [0, 0]
328
+ end
332
329
  @out_size = calc_conv2d_out_size(prev_h, prev_w, *@pool_size, *@pad_size, @strides)
333
330
  end
334
331
 
@@ -345,10 +342,8 @@ module DNN
345
342
  def load_hash(hash)
346
343
  initialize(hash[:pool_size], strides: hash[:strides], padding: hash[:padding])
347
344
  end
348
-
349
345
  end
350
346
 
351
-
352
347
  class MaxPool2D < Pool2D
353
348
  def forward(x)
354
349
  x = zero_padding(x, @pad_size) if @padding
@@ -368,7 +363,6 @@ module DNN
368
363
  end
369
364
  end
370
365
 
371
-
372
366
  class AvgPool2D < Pool2D
373
367
  def forward(x)
374
368
  x = zero_padding(x, @pad_size) if @padding
@@ -391,7 +385,6 @@ module DNN
391
385
  end
392
386
  end
393
387
 
394
-
395
388
  class UnPool2D < Layer
396
389
  include Conv2DUtils
397
390
 
@@ -405,7 +398,7 @@ module DNN
405
398
 
406
399
  def build(input_shape)
407
400
  unless input_shape.length == 3
408
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 3 dimensional.")
401
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
409
402
  end
410
403
  super
411
404
  prev_h, prev_w = input_shape[0..1]
@@ -21,9 +21,9 @@ module DNN
21
21
  @weight_regularizer = weight_regularizer
22
22
  end
23
23
 
24
- def call(input)
24
+ def call(input_tensor)
25
25
  build unless built?
26
- [forward(input), Link.new(nil, self)]
26
+ Tensor.new(forward(input_tensor.data), Link.new(nil, self))
27
27
  end
28
28
 
29
29
  def build
@@ -56,6 +56,24 @@ module DNN
56
56
  @weight_regularizer ? [@weight_regularizer] : []
57
57
  end
58
58
 
59
+ def to_proc
60
+ method(:call).to_proc
61
+ end
62
+
63
+ def >>(layer)
64
+ if RUBY_VERSION < "2.6.0"
65
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
66
+ end
67
+ to_proc >> layer
68
+ end
69
+
70
+ def <<(layer)
71
+ if RUBY_VERSION < "2.6.0"
72
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
73
+ end
74
+ to_proc << layer
75
+ end
76
+
59
77
  def to_hash
60
78
  super(input_shape: @input_shape, input_length: @input_length,
61
79
  weight_initializer: @weight_initializer.to_hash, weight_regularizer: @weight_regularizer&.to_hash)
@@ -2,6 +2,4 @@ module DNN
2
2
  class DNN_Error < StandardError; end
3
3
 
4
4
  class DNN_ShapeError < DNN_Error; end
5
-
6
- class DNN_UnknownEventError < DNN_Error; end
7
5
  end
@@ -6,7 +6,7 @@ module DNN
6
6
  return nil unless hash
7
7
  initializer_class = DNN.const_get(hash[:class])
8
8
  initializer = initializer_class.allocate
9
- raise DNN_Error.new("#{initializer.class} is not an instance of #{self} class.") unless initializer.is_a?(self)
9
+ raise DNN_Error, "#{initializer.class} is not an instance of #{self} class." unless initializer.is_a?(self)
10
10
  initializer.load_hash(hash)
11
11
  initializer
12
12
  end
@@ -21,7 +21,7 @@ module DNN
21
21
  # @param [DNN::Layers::Layer] layer Layer that owns learning parameters.
22
22
  # @param [DNN::Param] param Learning parameter to be initialized.
23
23
  def init_param(layer, param)
24
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'init_param'")
24
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'init_param'"
25
25
  end
26
26
 
27
27
  def to_hash(merge_hash = nil)
@@ -35,14 +35,12 @@ module DNN
35
35
  end
36
36
  end
37
37
 
38
-
39
38
  class Zeros < Initializer
40
39
  def init_param(layer, param)
41
40
  param.data = param.data.fill(0)
42
41
  end
43
42
  end
44
43
 
45
-
46
44
  class Const < Initializer
47
45
  attr_reader :const
48
46
 
@@ -65,7 +63,6 @@ module DNN
65
63
  end
66
64
  end
67
65
 
68
-
69
66
  class RandomNormal < Initializer
70
67
  attr_reader :mean
71
68
  attr_reader :std
@@ -92,7 +89,6 @@ module DNN
92
89
  end
93
90
  end
94
91
 
95
-
96
92
  class RandomUniform < Initializer
97
93
  attr_reader :min
98
94
  attr_reader :max
@@ -119,7 +115,6 @@ module DNN
119
115
  end
120
116
  end
121
117
 
122
-
123
118
  class Xavier < Initializer
124
119
  def initialize(seed: true)
125
120
  super
@@ -132,7 +127,6 @@ module DNN
132
127
  end
133
128
  end
134
129
 
135
-
136
130
  class He < Initializer
137
131
  def initialize(seed: true)
138
132
  super
@@ -1,13 +1,18 @@
1
1
  module DNN
2
2
  # This class manages input datas and output datas together.
3
3
  class Iterator
4
+ attr_reader :num_datas
5
+ attr_reader :last_round_down
6
+
4
7
  # @param [Numo::SFloat] x_datas input datas.
5
8
  # @param [Numo::SFloat] y_datas output datas.
6
9
  # @param [Boolean] random Set true to return batches randomly. Setting false returns batches in order of index.
7
- def initialize(x_datas, y_datas, random: true)
10
+ # @param [Boolean] last_round_down Set true to round down for last batch data when call foreach.
11
+ def initialize(x_datas, y_datas, random: true, last_round_down: false)
8
12
  @x_datas = x_datas
9
13
  @y_datas = y_datas
10
14
  @random = random
15
+ @last_round_down = last_round_down
11
16
  @num_datas = x_datas.is_a?(Array) ? x_datas[0].shape[0] : x_datas.shape[0]
12
17
  reset
13
18
  end
@@ -15,7 +20,7 @@ module DNN
15
20
  # Return the next batch.
16
21
  # @param [Integer] batch_size Required batch size.
17
22
  def next_batch(batch_size)
18
- raise DNN_Error.new("This iterator has not next batch. Please call reset.") unless has_next?
23
+ raise DNN_Error, "This iterator has not next batch. Please call reset." unless has_next?
19
24
  if @indexes.length <= batch_size
20
25
  batch_indexes = @indexes
21
26
  @has_next = false
@@ -23,15 +28,15 @@ module DNN
23
28
  batch_indexes = @indexes.shift(batch_size)
24
29
  end
25
30
  x_batch = if @x_datas.is_a?(Array)
26
- @x_datas.map { |datas| datas[batch_indexes, false] }
27
- else
28
- @x_datas[batch_indexes, false]
29
- end
31
+ @x_datas.map { |datas| datas[batch_indexes, false] }
32
+ else
33
+ @x_datas[batch_indexes, false]
34
+ end
30
35
  y_batch = if @y_datas.is_a?(Array)
31
- @y_datas.map { |datas| datas[batch_indexes, false] }
32
- else
33
- @y_datas[batch_indexes, false]
34
- end
36
+ @y_datas.map { |datas| datas[batch_indexes, false] }
37
+ else
38
+ @y_datas[batch_indexes, false]
39
+ end
35
40
  [x_batch, y_batch]
36
41
  end
37
42
 
@@ -48,11 +53,10 @@ module DNN
48
53
  end
49
54
 
50
55
  def foreach(batch_size, &block)
51
- step = 0
52
- while has_next?
56
+ steps = @last_round_down ? @num_datas / batch_size : (@num_datas.to_f / batch_size).ceil
57
+ steps.times do |step|
53
58
  x_batch, y_batch = next_batch(batch_size)
54
59
  block.call(x_batch, y_batch, step)
55
- step += 1
56
60
  end
57
61
  reset
58
62
  end
@@ -7,15 +7,16 @@ module DNN
7
7
  attr_reader :input_shape
8
8
 
9
9
  def self.call(x, *args)
10
- self.new(*args).(x)
10
+ new(*args).(x)
11
11
  end
12
12
 
13
13
  def self.from_hash(hash)
14
14
  return nil unless hash
15
15
  layer_class = DNN.const_get(hash[:class])
16
16
  layer = layer_class.allocate
17
- raise DNN_Error.new("#{layer.class} is not an instance of #{self} class.") unless layer.is_a?(self)
17
+ raise DNN_Error, "#{layer.class} is not an instance of #{self} class." unless layer.is_a?(self)
18
18
  layer.load_hash(hash)
19
+ layer.name = hash[:name]&.to_sym
19
20
  layer
20
21
  end
21
22
 
@@ -25,13 +26,15 @@ module DNN
25
26
  end
26
27
 
27
28
  # Forward propagation and create a link.
28
- # @param [Array] input Array of the form [x_input_data, prev_link].
29
- def call(input)
30
- x, prev_link = *input
29
+ # @param [Tensor] input_tensor Input tensor.
30
+ # @return [Tensor] Output tensor.
31
+ def call(input_tensor)
32
+ x = input_tensor.data
33
+ prev_link = input_tensor.link
31
34
  build(x.shape[1..-1]) unless built?
32
35
  y = forward(x)
33
36
  link = Link.new(prev_link, self)
34
- [y, link]
37
+ Tensor.new(y, link)
35
38
  end
36
39
 
37
40
  # Build the layer.
@@ -49,13 +52,13 @@ module DNN
49
52
  # Forward propagation.
50
53
  # @param [Numo::SFloat] x Input data.
51
54
  def forward(x)
52
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward'")
55
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'forward'"
53
56
  end
54
57
 
55
58
  # Backward propagation.
56
59
  # @param [Numo::SFloat] dy Differential value of output data.
57
60
  def backward(dy)
58
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward'")
61
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'backward'"
59
62
  end
60
63
 
61
64
  # Please reimplement this method as needed.
@@ -77,7 +80,6 @@ module DNN
77
80
  end
78
81
  end
79
82
 
80
-
81
83
  # This class is a superclass of all classes with learning parameters.
82
84
  class HasParamLayer < Layer
83
85
  # @return [Boolean] Setting false prevents learning of parameters.
@@ -90,15 +92,13 @@ module DNN
90
92
 
91
93
  # @return [Array] The parameters of the layer.
92
94
  def get_params
93
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'get_params'")
95
+ raise NotImplementedError, "Class '#{self.class.name}' has implement method 'get_params'"
94
96
  end
95
97
  end
96
98
 
97
-
98
99
  class InputLayer < Layer
99
- def self.call(input)
100
- shape = input.is_a?(Array) ? input[0].shape : input.shape
101
- self.new(shape[1..-1]).(input)
100
+ def self.call(input_tensor)
101
+ new(input_tensor.data.shape[1..-1]).(input_tensor)
102
102
  end
103
103
 
104
104
  # @param [Array] input_dim_or_shape Setting the shape or dimension of the input data.
@@ -107,16 +107,9 @@ module DNN
107
107
  @input_shape = input_dim_or_shape.is_a?(Array) ? input_dim_or_shape : [input_dim_or_shape]
108
108
  end
109
109
 
110
- def call(input)
110
+ def call(input_tensor)
111
111
  build unless built?
112
- if input.is_a?(Array)
113
- x, prev_link = *input
114
- else
115
- x = input
116
- prev_link = nil
117
- end
118
- link = prev_link ? Link.new(prev_link, self) : Link.new(nil, self)
119
- [forward(x), link]
112
+ Tensor.new(forward(input_tensor.data), Link.new(input_tensor&.link, self))
120
113
  end
121
114
 
122
115
  def build
@@ -125,7 +118,7 @@ module DNN
125
118
 
126
119
  def forward(x)
127
120
  unless x.shape[1..-1] == @input_shape
128
- raise DNN_ShapeError.new("The shape of x does not match the input shape. input shape is #{@input_shape}, but x shape is #{x.shape[1..-1]}.")
121
+ raise DNN_ShapeError, "The shape of x does not match the input shape. input shape is #{@input_shape}, but x shape is #{x.shape[1..-1]}."
129
122
  end
130
123
  x
131
124
  end
@@ -134,6 +127,24 @@ module DNN
134
127
  dy
135
128
  end
136
129
 
130
+ def to_proc
131
+ method(:call).to_proc
132
+ end
133
+
134
+ def >>(layer)
135
+ if RUBY_VERSION < "2.6.0"
136
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
137
+ end
138
+ to_proc >> layer
139
+ end
140
+
141
+ def <<(layer)
142
+ if RUBY_VERSION < "2.6.0"
143
+ raise DNN_Error, "Function composition is not supported before ruby version 2.6.0."
144
+ end
145
+ to_proc << layer
146
+ end
147
+
137
148
  def to_hash
138
149
  super(input_shape: @input_shape)
139
150
  end
@@ -143,7 +154,6 @@ module DNN
143
154
  end
144
155
  end
145
156
 
146
-
147
157
  # It is a superclass of all connection layers.
148
158
  class Connection < HasParamLayer
149
159
  attr_reader :weight
@@ -206,7 +216,6 @@ module DNN
206
216
  end
207
217
  end
208
218
 
209
-
210
219
  class Dense < Connection
211
220
  attr_reader :num_nodes
212
221
 
@@ -224,7 +233,7 @@ module DNN
224
233
 
225
234
  def build(input_shape)
226
235
  unless input_shape.length == 1
227
- raise DNN_ShapeError.new("Input shape is #{input_shape}. But input shape must be 1 dimensional.")
236
+ raise DNN_ShapeError, "Input shape is #{input_shape}. But input shape must be 1 dimensional."
228
237
  end
229
238
  super
230
239
  num_prev_nodes = input_shape[0]
@@ -266,7 +275,6 @@ module DNN
266
275
  end
267
276
  end
268
277
 
269
-
270
278
  class Flatten < Layer
271
279
  def forward(x)
272
280
  x.reshape(x.shape[0], *output_shape)
@@ -281,8 +289,9 @@ module DNN
281
289
  end
282
290
  end
283
291
 
284
-
285
292
  class Reshape < Layer
293
+ attr_reader :output_shape
294
+
286
295
  def initialize(output_shape)
287
296
  super()
288
297
  @output_shape = output_shape
@@ -296,10 +305,6 @@ module DNN
296
305
  dy.reshape(dy.shape[0], *@input_shape)
297
306
  end
298
307
 
299
- def output_shape
300
- @output_shape
301
- end
302
-
303
308
  def to_hash
304
309
  super(output_shape: @output_shape)
305
310
  end
@@ -309,7 +314,6 @@ module DNN
309
314
  end
310
315
  end
311
316
 
312
-
313
317
  class Dropout < Layer
314
318
  attr_accessor :dropout_ratio
315
319
  attr_reader :use_scale