ruby-dnn 0.12.4 → 0.13.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2835cd619265328ffa05c41515915aaeb54924ca4ddd4958edba992ea4feda89
4
- data.tar.gz: 5ab01dbcf1219cdfac7b3a0e20bc5783fa15baaf82225ea1a9a2d076569cb7f1
3
+ metadata.gz: 5a0f021b01f03d45c51e52a1147b05703f8da8895380560d4add4fd3f2fc3d90
4
+ data.tar.gz: 2c5b3b04d23b0285a0acd1fbde3ca4f774948645d016c471264484f45230fa6f
5
5
  SHA512:
6
- metadata.gz: 1335b77cf701501930e32f07a9faa027706b908d79a24b1b369228b45c5ef15a38ac476c6951d14b7ce398b1eb7789bf0c96566427b84fffb1ed892af7e43629
7
- data.tar.gz: baf8ceeec91699a75a14bde79e815561189e26c9317e34f09580f90f45562120ba12540a7a64e42e562d4d26162490b6c2170721459b8e635731e900812a357b
6
+ metadata.gz: 33758acb29f83b8523accd213293c84d4b887591926de256e38bd41895f86358c80ee64f2876cd4fb679a9a2c71a64929780149a324dd80020f5b6d397c9a608
7
+ data.tar.gz: 6a8e95ca8c57a968a846562ad4fa921e382cef83c0839599651ab93ad2d7f6d6cb0c36ada945fb125c0b13e10aa1333030ce019b138146b00e19ed9882237ae7
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/cifar100"
2
+ require "dnn/datasets/cifar100"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/cifar10"
2
+ require "dnn/datasets/cifar10"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -68,9 +68,13 @@ class Discriminator < Model
68
68
  end
69
69
 
70
70
  def call(x, trainable = true)
71
- has_param_layers.each do |layer|
72
- layer.trainable = trainable
73
- end
71
+ @l1.trainable = trainable
72
+ @l2.trainable = trainable
73
+ @l3.trainable = trainable
74
+ @l4.trainable = trainable
75
+ @l5.trainable = trainable
76
+ @l6.trainable = trainable
77
+
74
78
  x = InputLayer.new([28, 28, 1]).(x)
75
79
  x = @l1.(x)
76
80
  x = LeakyReLU.(x, 0.2)
@@ -7,7 +7,7 @@ Image = DNN::Image
7
7
 
8
8
  batch_size = 100
9
9
 
10
- dcgan = Model.load("trained/dcgan_model_epoch20.marshal")
10
+ dcgan = DCGAN.load("trained/dcgan_model_epoch20.marshal")
11
11
  gen = dcgan.gen
12
12
 
13
13
  Numo::SFloat.srand(rand(1 << 31))
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/mnist"
2
+ require "dnn/datasets/mnist"
3
3
  require "numo/linalg/autoloader"
4
4
  require_relative "dcgan"
5
5
 
@@ -22,9 +22,11 @@ x_train = Numo::SFloat.cast(x_train)
22
22
  x_train = x_train / 127.5 - 1
23
23
 
24
24
  iter = DNN::Iterator.new(x_train, y_train)
25
+ num_batchs = x_train.shape[0] / batch_size
25
26
  (1..epochs).each do |epoch|
26
27
  puts "epoch: #{epoch}"
27
- iter.foreach(batch_size) do |x_batch, y_batch, index|
28
+ num_batchs.times do |index|
29
+ x_batch, y_batch = iter.next_batch(batch_size)
28
30
  noise = Numo::SFloat.new(batch_size, 20).rand(-1, 1)
29
31
  images = gen.predict(noise)
30
32
  x = x_batch.concatenate(images)
@@ -37,5 +39,6 @@ iter = DNN::Iterator.new(x_train, y_train)
37
39
 
38
40
  puts "index: #{index}, dis_loss: #{dis_loss.mean}, dcgan_loss: #{dcgan_loss.mean}"
39
41
  end
42
+ iter.reset
40
43
  dcgan.save("trained/dcgan_model_epoch#{epoch}.marshal")
41
44
  end
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/iris"
2
+ require "dnn/datasets/iris"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/mnist"
2
+ require "dnn/datasets/mnist"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/mnist"
2
+ require "dnn/datasets/mnist"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/mnist"
2
+ require "dnn/datasets/mnist"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
@@ -1,5 +1,5 @@
1
1
  require "dnn"
2
- require "dnn/mnist"
2
+ require "dnn/datasets/mnist"
3
3
  # If you use numo/linalg then please uncomment out.
4
4
  # require "numo/linalg/autoloader"
5
5
 
data/lib/dnn.rb CHANGED
@@ -9,6 +9,7 @@ end
9
9
 
10
10
  require_relative "dnn/version"
11
11
  require_relative "dnn/core/error"
12
+ require_relative "dnn/core/global"
12
13
  require_relative "dnn/core/models"
13
14
  require_relative "dnn/core/param"
14
15
  require_relative "dnn/core/link"
@@ -24,4 +25,5 @@ require_relative "dnn/core/cnn_layers"
24
25
  require_relative "dnn/core/embedding"
25
26
  require_relative "dnn/core/rnn_layers"
26
27
  require_relative "dnn/core/optimizers"
28
+ require_relative "dnn/core/savers"
27
29
  require_relative "dnn/core/utils"
@@ -28,9 +28,8 @@ module DNN
28
28
  end
29
29
 
30
30
  def call(input)
31
- x, *, learning_phase = *input
32
31
  build unless built?
33
- [forward(x), Link.new(nil, self), learning_phase]
32
+ [forward(input), Link.new(nil, self)]
34
33
  end
35
34
 
36
35
  def build
@@ -2,4 +2,6 @@ module DNN
2
2
  class DNN_Error < StandardError; end
3
3
 
4
4
  class DNN_ShapeError < DNN_Error; end
5
+
6
+ class DNN_UnknownEventError < DNN_Error; end
5
7
  end
@@ -0,0 +1,9 @@
1
+ module DNN
2
+ def self.learning_phase
3
+ @learning_phase
4
+ end
5
+
6
+ def self.learning_phase=(bool)
7
+ @learning_phase = bool
8
+ end
9
+ end
@@ -2,7 +2,7 @@ module DNN
2
2
  module Initializers
3
3
 
4
4
  class Initializer
5
- # @param [Boolean | Integer] seed Seed of random number used for masking.
5
+ # @param [Boolean | Integer] seed Seed of random number used for initialize parameter.
6
6
  # Set true to determine seed as random.
7
7
  def initialize(seed: false)
8
8
  @seed = seed == true ? rand(1 << 31) : seed
@@ -8,7 +8,7 @@ module DNN
8
8
  @x_datas = x_datas
9
9
  @y_datas = y_datas
10
10
  @random = random
11
- @num_datas = x_datas.shape[0]
11
+ @num_datas = x_datas.is_a?(Array) ? x_datas[0].shape[0] : x_datas.shape[0]
12
12
  reset
13
13
  end
14
14
 
@@ -22,8 +22,16 @@ module DNN
22
22
  else
23
23
  batch_indexes = @indexes.shift(batch_size)
24
24
  end
25
- x_batch = @x_datas[batch_indexes, false]
26
- y_batch = @y_datas[batch_indexes, false]
25
+ x_batch = if @x_datas.is_a?(Array)
26
+ @x_datas.map { |datas| datas[batch_indexes, false] }
27
+ else
28
+ @x_datas[batch_indexes, false]
29
+ end
30
+ y_batch = if @y_datas.is_a?(Array)
31
+ @y_datas.map { |datas| datas[batch_indexes, false] }
32
+ else
33
+ @y_datas[batch_indexes, false]
34
+ end
27
35
  [x_batch, y_batch]
28
36
  end
29
37
 
@@ -3,6 +3,7 @@ module DNN
3
3
 
4
4
  # Super class of all layer classes.
5
5
  class Layer
6
+ attr_accessor :name
6
7
  attr_reader :input_shape
7
8
 
8
9
  def self.call(x, *args)
@@ -14,13 +15,13 @@ module DNN
14
15
  end
15
16
 
16
17
  # Forward propagation and create a link.
17
- # @param [Array] input Array of the form [x_input_data, prev_link, learning_phase].
18
+ # @param [Array] input Array of the form [x_input_data, prev_link].
18
19
  def call(input)
19
- x, prev_link, learning_phase = *input
20
+ x, prev_link = *input
20
21
  build(x.shape[1..-1]) unless built?
21
22
  y = forward(x)
22
23
  link = Link.new(prev_link, self)
23
- [y, link, learning_phase]
24
+ [y, link]
24
25
  end
25
26
 
26
27
  # Build the layer.
@@ -98,9 +99,14 @@ module DNN
98
99
 
99
100
  def call(input)
100
101
  build unless built?
101
- x, prev_link, learning_phase = *input
102
+ if input.is_a?(Array)
103
+ x, prev_link = *input
104
+ else
105
+ x = input
106
+ prev_link = nil
107
+ end
102
108
  link = prev_link ? Link.new(prev_link, self) : Link.new(nil, self)
103
- [forward(x), link, learning_phase]
109
+ [forward(x), link]
104
110
  end
105
111
 
106
112
  def build
@@ -310,16 +316,8 @@ module DNN
310
316
  @rnd = Random.new(@seed)
311
317
  end
312
318
 
313
- def call(input)
314
- x, prev_link, learning_phase = *input
315
- build(x.shape[1..-1]) unless built?
316
- y = forward(x, learning_phase)
317
- link = Link.new(prev_link, self)
318
- [y, link, learning_phase]
319
- end
320
-
321
- def forward(x, learning_phase)
322
- if learning_phase
319
+ def forward(x)
320
+ if DNN.learning_phase
323
321
  Xumo::SFloat.srand(@rnd.rand(1 << 31))
324
322
  @mask = Xumo::SFloat.ones(*x.shape).rand < @dropout_ratio
325
323
  x[@mask] = 0
data/lib/dnn/core/link.rb CHANGED
@@ -7,6 +7,11 @@ module DNN
7
7
  @prev = prev
8
8
  @layer = layer
9
9
  end
10
+
11
+ def backward(dy)
12
+ dy = @layer.backward(dy)
13
+ @prev&.backward(dy)
14
+ end
10
15
  end
11
16
 
12
17
 
@@ -20,5 +25,11 @@ module DNN
20
25
  @prev2 = prev2
21
26
  @layer = layer
22
27
  end
28
+
29
+ def backward(dy)
30
+ dy1, dy2 =* @layer.backward(dy)
31
+ @prev1&.backward(dy1)
32
+ @prev2&.backward(dy2)
33
+ end
23
34
  end
24
35
  end
@@ -2,25 +2,37 @@ module DNN
2
2
  module Losses
3
3
 
4
4
  class Loss
5
- def forward(y, t, layers)
5
+ def loss(y, t, layers = nil)
6
6
  unless y.shape == t.shape
7
7
  raise DNN_ShapeError.new("The shape of y does not match the t shape. y shape is #{y.shape}, but t shape is #{t.shape}.")
8
8
  end
9
- loss_value = forward_loss(y, t)
10
- regularizers = layers.select { |layer| layer.is_a?(Layers::Connection) }
11
- .map(&:regularizers).flatten
9
+ loss_value = forward(y, t)
10
+ loss_value += regularizers_forward(layers) if layers
11
+ loss_value
12
+ end
13
+
14
+ def forward(y, t)
15
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward'")
16
+ end
12
17
 
18
+ def backward(y, t)
19
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward'")
20
+ end
21
+
22
+ def regularizers_forward(layers)
23
+ loss_value = 0
24
+ regularizers = layers.select { |layer| layer.respond_to?(:regularizers) }
25
+ .map(&:regularizers).flatten
13
26
  regularizers.each do |regularizer|
14
27
  loss_value = regularizer.forward(loss_value)
15
28
  end
16
29
  loss_value
17
30
  end
18
31
 
19
- def backward(t, layers)
32
+ def regularizers_backward(layers)
20
33
  layers.select { |layer| layer.respond_to?(:regularizers) }.each do |layer|
21
34
  layer.regularizers.each(&:backward)
22
35
  end
23
- backward_loss(t)
24
36
  end
25
37
 
26
38
  def to_hash(merge_hash = nil)
@@ -28,44 +40,28 @@ module DNN
28
40
  hash.merge!(merge_hash) if merge_hash
29
41
  hash
30
42
  end
31
-
32
- private
33
-
34
- def forward_loss(y, t)
35
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward_loss'")
36
- end
37
-
38
- def backward_loss(t)
39
- raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'backward_loss'")
40
- end
41
43
  end
42
44
 
43
45
  class MeanSquaredError < Loss
44
- private
45
-
46
- def forward_loss(y, t)
47
- @y = y
46
+ def forward(y, t)
48
47
  batch_size = t.shape[0]
49
48
  0.5 * ((y - t) ** 2).sum / batch_size
50
49
  end
51
50
 
52
- def backward_loss(t)
53
- @y - t
51
+ def backward(y, t)
52
+ y - t
54
53
  end
55
54
  end
56
55
 
57
56
 
58
57
  class MeanAbsoluteError < Loss
59
- private
60
-
61
- def forward_loss(y, t)
62
- @y = y
58
+ def forward(y, t)
63
59
  batch_size = t.shape[0]
64
60
  (y - t).abs.sum / batch_size
65
61
  end
66
62
 
67
- def backward_loss(t)
68
- dy = @y - t
63
+ def backward(y, t)
64
+ dy = y - t
69
65
  dy[dy >= 0] = 1
70
66
  dy[dy < 0] = -1
71
67
  dy
@@ -74,14 +70,12 @@ module DNN
74
70
 
75
71
 
76
72
  class Hinge < Loss
77
- private
78
-
79
- def forward_loss(y, t)
73
+ def forward(y, t)
80
74
  @a = 1 - y * t
81
75
  Xumo::SFloat.maximum(0, @a)
82
76
  end
83
77
 
84
- def backward_loss(t)
78
+ def backward(y, t)
85
79
  a = Xumo::SFloat.ones(*@a.shape)
86
80
  a[@a <= 0] = 0
87
81
  a * -t
@@ -90,16 +84,13 @@ module DNN
90
84
 
91
85
 
92
86
  class HuberLoss < Loss
93
- private
94
-
95
- def forward_loss(y, t)
96
- @y = y
97
- loss_l1_value = loss_l1(t)
98
- @loss_value = loss_l1_value > 1 ? loss_l1_value : loss_l2(t)
87
+ def forward(y, t)
88
+ loss_l1_value = loss_l1(y, t)
89
+ @loss_value = loss_l1_value > 1 ? loss_l1_value : loss_l2(y, t)
99
90
  end
100
91
 
101
- def backward_loss(t)
102
- dy = @y - t
92
+ def backward(y, t)
93
+ dy = y - t
103
94
  if @loss_value > 1
104
95
  dy[dy >= 0] = 1
105
96
  dy[dy < 0] = -1
@@ -107,14 +98,16 @@ module DNN
107
98
  dy
108
99
  end
109
100
 
110
- def loss_l1(t)
101
+ private
102
+
103
+ def loss_l1(y, t)
111
104
  batch_size = t.shape[0]
112
- (@y - t).abs.sum / batch_size
105
+ (y - t).abs.sum / batch_size
113
106
  end
114
107
 
115
- def loss_l2(t)
108
+ def loss_l2(y, t)
116
109
  batch_size = t.shape[0]
117
- 0.5 * ((@y - t) ** 2).sum / batch_size
110
+ 0.5 * ((y - t) ** 2).sum / batch_size
118
111
  end
119
112
  end
120
113
 
@@ -135,20 +128,18 @@ module DNN
135
128
  @eps = eps
136
129
  end
137
130
 
138
- def to_hash
139
- super(eps: @eps)
131
+ def forward(y, t)
132
+ @x = SoftmaxCrossEntropy.softmax(y)
133
+ batch_size = t.shape[0]
134
+ -(t * Xumo::NMath.log(@x + @eps)).sum / batch_size
140
135
  end
141
136
 
142
- private
143
-
144
- def forward_loss(y, t)
145
- @y = SoftmaxCrossEntropy.softmax(y)
146
- batch_size = t.shape[0]
147
- -(t * Xumo::NMath.log(@y + @eps)).sum / batch_size
137
+ def backward(y, t)
138
+ @x - t
148
139
  end
149
140
 
150
- def backward_loss(t)
151
- @y - t
141
+ def to_hash
142
+ super(eps: @eps)
152
143
  end
153
144
  end
154
145
 
@@ -165,19 +156,17 @@ module DNN
165
156
  @eps = eps
166
157
  end
167
158
 
168
- def to_hash
169
- super(eps: @eps)
159
+ def forward(y, t)
160
+ @x = Activations::Sigmoid.new.forward(y)
161
+ -(t * Xumo::NMath.log(@x) + (1 - t) * Xumo::NMath.log(1 - @x))
170
162
  end
171
163
 
172
- private
173
-
174
- def forward_loss(y, t)
175
- @y = Activations::Sigmoid.new.forward(y)
176
- -(t * Xumo::NMath.log(@y) + (1 - t) * Xumo::NMath.log(1 - @y))
164
+ def backward(y, t)
165
+ @x - t
177
166
  end
178
167
 
179
- def backward_loss(t)
180
- @y - t
168
+ def to_hash
169
+ super(eps: @eps)
181
170
  end
182
171
  end
183
172