ruby-dnn 1.1.0 → 1.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 62482330aab914fc53313fa7a1170a53ea3644e72dfe1cecb2fffca1160f6f7e
4
- data.tar.gz: e21d0e28ba2c603179ece6fe9cb2c4722ea8bbaa85c72cc357f8b5fa691e33d0
3
+ metadata.gz: 97822807b84847cb2ad475bef5dd65329ae0c699847c577eb8b30a2be0425ec0
4
+ data.tar.gz: 6871849ea256e466f4d10e354c5e7698247be8328c17e540e5d4b35adf46ee27
5
5
  SHA512:
6
- metadata.gz: 5f7a34efbba1465aa4e245b83928bbbe764d67708e9e6c3faef7030b13873dc27f775adbf8b32b0f3015280f1b3bbb600a1e4c4bff9c1a5be40387217efc2e26
7
- data.tar.gz: 9bba32d0b4142df90ab07b75191bb7590fa000f38470f9d9149f4cd95c346f5ba90fbf4549d714dee5374d1024184cf45eb61bcd1ba3e10e1ab0aa5eda398011
6
+ metadata.gz: aa8fb779d8cec6e4acd1ce63a952f626eceae801ee7e16045a890412d8b9189e95d3798c9d840ab07de4afe517ebeac9230391e193b24cedb316927bda4726c9
7
+ data.tar.gz: 16aa82f8e009eca027e693adb3fb543046376f11539b881a65ef29dd484b8225b34c17ee3171d72a6a9b2a3712b8463630deac778f1a851830df6ed53b7feca8
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
 
5
5
  ruby-dnn is a ruby deep learning library. This library supports full connected neural network and convolution neural network
6
6
  and recurrent neural network.
7
- Currently, you can get 99% accuracy with MNIST and 78% with CIFAR 10.
7
+ Currently, you can get 99% accuracy with MNIST and 82% with CIFAR 10.
8
8
 
9
9
  ## Installation
10
10
 
@@ -43,7 +43,6 @@ model.setup(Adam.new, SoftmaxCrossEntropy.new)
43
43
 
44
44
  model.train(x_train, y_train, 10, batch_size: 128, test: [x_test, y_test])
45
45
 
46
-
47
46
  accuracy, loss = model.evaluate(x_test, y_test)
48
47
  puts "accuracy: #{accuracy}"
49
48
  puts "loss: #{loss}"
@@ -42,7 +42,7 @@ end
42
42
  out = net.(x, y)
43
43
  loss = lf.(out, y)
44
44
  loss.link.backward
45
- puts "epoch: #{epoch}, step: #{step}, loss = #{loss.data}"
45
+ puts "epoch: #{epoch}, step: #{step}, loss = #{loss.data.to_f}"
46
46
  opt.update([w1, b1, w2, b2])
47
47
  end
48
48
  end
@@ -8,7 +8,7 @@ module DNN
8
8
  y = forward_node(x)
9
9
  link = Link.new(prev, self)
10
10
  prev.next = link if prev.is_a?(Link)
11
- Tensor.new(y, link)
11
+ Tensor.convert(y, link)
12
12
  end
13
13
 
14
14
  def forward_node(x)
@@ -46,7 +46,7 @@ module DNN
46
46
  # @param [Tensor | Param] input Input tensor or param.
47
47
  # @return [Tensor] Output tensor.
48
48
  def call(input)
49
- input = Tensor.new(input) if !input.is_a?(Tensor) && !input.is_a?(Param)
49
+ input = Tensor.convert(input) if !input.is_a?(Tensor) && !input.is_a?(Param)
50
50
  build(input.data.shape[1..-1]) unless built?
51
51
  forward(input)
52
52
  end
@@ -1,5 +1,50 @@
1
1
  module DNN
2
2
  module Layers
3
+ module MathUtils
4
+ def self.align_ndim(shape1, shape2)
5
+ if shape1.length < shape2.length
6
+ shape2.length.times do |axis|
7
+ unless shape1[axis] == shape2[axis]
8
+ shape1.insert(axis, 1)
9
+ end
10
+ end
11
+ elsif shape1.length > shape2.length
12
+ shape1.length.times do |axis|
13
+ unless shape1[axis] == shape2[axis]
14
+ shape2.insert(axis, 1)
15
+ end
16
+ end
17
+ end
18
+ [shape1, shape2]
19
+ end
20
+
21
+ def self.broadcast_to(x, target_shape)
22
+ return x if x.shape == target_shape
23
+ x_shape, target_shape = align_ndim(x.shape, target_shape)
24
+ x = x.reshape(*x_shape)
25
+ x_shape.length.times do |axis|
26
+ unless x.shape[axis] == target_shape[axis]
27
+ tmp = x
28
+ (target_shape[axis] - 1).times do
29
+ x = x.concatenate(tmp, axis: axis)
30
+ end
31
+ end
32
+ end
33
+ x
34
+ end
35
+
36
+ def self.sum_to(x, target_shape)
37
+ return x if x.shape == target_shape
38
+ x_shape, target_shape = align_ndim(x.shape, target_shape)
39
+ x = x.reshape(*x_shape)
40
+ x_shape.length.times do |axis|
41
+ unless x.shape[axis] == target_shape[axis]
42
+ x = x.sum(axis: axis, keepdims: true)
43
+ end
44
+ end
45
+ x
46
+ end
47
+ end
3
48
 
4
49
  class Neg < Layer
5
50
  include LayerNode
@@ -17,11 +62,15 @@ module DNN
17
62
  include MergeLayerNode
18
63
 
19
64
  def forward_node(x1, x2)
65
+ @x1_shape = x1.shape
66
+ @x2_shape = x2.shape
20
67
  x1 + x2
21
68
  end
22
69
 
23
70
  def backward_node(dy)
24
- [dy, dy]
71
+ dx1 = MathUtils.sum_to(dy, @x1_shape)
72
+ dx2 = MathUtils.sum_to(dy, @x2_shape)
73
+ [dx1, dx2]
25
74
  end
26
75
  end
27
76
 
@@ -29,11 +78,15 @@ module DNN
29
78
  include MergeLayerNode
30
79
 
31
80
  def forward_node(x1, x2)
81
+ @x1_shape = x1.shape
82
+ @x2_shape = x2.shape
32
83
  x1 - x2
33
84
  end
34
85
 
35
86
  def backward_node(dy)
36
- [dy, -dy]
87
+ dx1 = MathUtils.sum_to(dy, @x1_shape)
88
+ dx2 = MathUtils.sum_to(-dy, @x2_shape)
89
+ [dx1, dx2]
37
90
  end
38
91
  end
39
92
 
@@ -46,7 +99,9 @@ module DNN
46
99
  end
47
100
 
48
101
  def backward_node(dy)
49
- [dy * @x2, dy * @x1]
102
+ dx1 = MathUtils.sum_to(dy * @x2, @x1.shape)
103
+ dx2 = MathUtils.sum_to(dy * @x1, @x2.shape)
104
+ [dx1, dx2]
50
105
  end
51
106
  end
52
107
 
@@ -59,8 +114,8 @@ module DNN
59
114
  end
60
115
 
61
116
  def backward_node(dy)
62
- dx1 = dy / @x2
63
- dx2 = dy * -(@x1 / @x2**2)
117
+ dx1 = MathUtils.sum_to(dy / @x2, @x1.shape)
118
+ dx2 = MathUtils.sum_to(dy * -(@x1 / @x2**2), @x2.shape)
64
119
  [dx1, dx2]
65
120
  end
66
121
  end
@@ -9,7 +9,7 @@ module DNN
9
9
  prev2 = (input2.is_a?(Tensor) ? input2.link : input2)
10
10
  y = forward_node(x1, x2)
11
11
  link = TwoInputLink.new(prev1, prev2, self)
12
- Tensor.new(y, link)
12
+ Tensor.convert(y, link)
13
13
  end
14
14
 
15
15
  def backward(dy)
@@ -31,8 +31,8 @@ module DNN
31
31
  end
32
32
 
33
33
  def call(input1, input2)
34
- input1 = Tensor.new(input1) if !input1.is_a?(Tensor) && !input1.is_a?(Param)
35
- input2 = Tensor.new(input2) if !input2.is_a?(Tensor) && !input2.is_a?(Param)
34
+ input1 = Tensor.convert(input1) if !input1.is_a?(Tensor) && !input1.is_a?(Param)
35
+ input2 = Tensor.convert(input2) if !input2.is_a?(Tensor) && !input2.is_a?(Param)
36
36
  if input1.data.is_a?(Numo::NArray)
37
37
  build(input1.data.shape[1..-1]) unless built?
38
38
  else
@@ -2,7 +2,7 @@ class Integer
2
2
  alias dnn__add +
3
3
  def +(other)
4
4
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
5
- DNN::Layers::Add.(self, other)
5
+ DNN::Layers::Add.(DNN::Tensor.convert(self), other)
6
6
  else
7
7
  dnn__add(other)
8
8
  end
@@ -11,7 +11,7 @@ class Integer
11
11
  alias dnn__sub -
12
12
  def -(other)
13
13
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
14
- DNN::Layers::Sub.(self, other)
14
+ DNN::Layers::Sub.(DNN::Tensor.convert(self), other)
15
15
  else
16
16
  dnn__sub(other)
17
17
  end
@@ -20,7 +20,7 @@ class Integer
20
20
  alias dnn__mul *
21
21
  def *(other)
22
22
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
23
- DNN::Layers::Mul.(self, other)
23
+ DNN::Layers::Mul.(DNN::Tensor.convert(self), other)
24
24
  else
25
25
  dnn__mul(other)
26
26
  end
@@ -29,7 +29,7 @@ class Integer
29
29
  alias dnn__div /
30
30
  def /(other)
31
31
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
32
- DNN::Layers::Div.(self, other)
32
+ DNN::Layers::Div.(DNN::Tensor.convert(self), other)
33
33
  else
34
34
  dnn__div(other)
35
35
  end
@@ -40,7 +40,7 @@ class Float
40
40
  alias dnn__add +
41
41
  def +(other)
42
42
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
43
- DNN::Layers::Add.(self, other)
43
+ DNN::Layers::Add.(DNN::Tensor.convert(self), other)
44
44
  else
45
45
  dnn__add(other)
46
46
  end
@@ -49,7 +49,7 @@ class Float
49
49
  alias dnn__sub -
50
50
  def -(other)
51
51
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
52
- DNN::Layers::Sub.(self, other)
52
+ DNN::Layers::Sub.(DNN::Tensor.convert(self), other)
53
53
  else
54
54
  dnn__sub(other)
55
55
  end
@@ -58,7 +58,7 @@ class Float
58
58
  alias dnn__mul *
59
59
  def *(other)
60
60
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
61
- DNN::Layers::Mul.(self, other)
61
+ DNN::Layers::Mul.(DNN::Tensor.convert(self), other)
62
62
  else
63
63
  dnn__mul(other)
64
64
  end
@@ -67,7 +67,7 @@ class Float
67
67
  alias dnn__div /
68
68
  def /(other)
69
69
  if other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
70
- DNN::Layers::Div.(self, other)
70
+ DNN::Layers::Div.(DNN::Tensor.convert(self), other)
71
71
  else
72
72
  dnn__div(other)
73
73
  end
@@ -38,18 +38,22 @@ module DNN
38
38
  end
39
39
 
40
40
  def +(other)
41
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
41
42
  Layers::Add.(self, other)
42
43
  end
43
44
 
44
45
  def -(other)
46
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
45
47
  Layers::Sub.(self, other)
46
48
  end
47
49
 
48
50
  def *(other)
51
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
49
52
  Layers::Mul.(self, other)
50
53
  end
51
54
 
52
55
  def /(other)
56
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
53
57
  Layers::Div.(self, other)
54
58
  end
55
59
 
@@ -3,11 +3,13 @@ module DNN
3
3
  attr_reader :data
4
4
  attr_accessor :link
5
5
 
6
- def self.convert(inputs)
6
+ def self.convert(inputs, link = nil)
7
7
  if inputs.is_a?(Array)
8
- inputs.map { |input| Tensor.new(input) }
8
+ inputs.map { |input| Tensor.new(input, link) }
9
+ elsif inputs.is_a?(Integer) || inputs.is_a?(Float)
10
+ Tensor.new(Xumo::SFloat[inputs], link)
9
11
  else
10
- Tensor.new(inputs)
12
+ Tensor.new(inputs, link)
11
13
  end
12
14
  end
13
15
 
@@ -33,18 +35,22 @@ module DNN
33
35
  end
34
36
 
35
37
  def +(other)
38
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
36
39
  Layers::Add.(self, other)
37
40
  end
38
41
 
39
42
  def -(other)
43
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
40
44
  Layers::Sub.(self, other)
41
45
  end
42
46
 
43
47
  def *(other)
48
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
44
49
  Layers::Mul.(self, other)
45
50
  end
46
51
 
47
52
  def /(other)
53
+ other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param)
48
54
  Layers::Div.(self, other)
49
55
  end
50
56
 
@@ -21,6 +21,14 @@ module DNN
21
21
  dnn_class.from_hash(hash)
22
22
  end
23
23
 
24
+ # Broadcast to target shape.
25
+ # @param [Numo::SFloat] x Data to broadcast.
26
+ # @param [Array] Shape to broadcast.
27
+ # @return [Numo::SFloat] Broadcasted data.
28
+ def self.broadcast_to(x, target_shape)
29
+ Layers::MathUtils.broadcast_to(x, target_shape)
30
+ end
31
+
24
32
  # Return the result of the sigmoid function.
25
33
  def self.sigmoid(x)
26
34
  Losses::SigmoidCrossEntropy.sigmoid(x)
@@ -15,6 +15,16 @@ pyfrom :"keras.models", import: :Sequential
15
15
  pyfrom :"keras.layers", import: [:Dense, :Dropout, :Conv2D, :Activation, :MaxPooling2D, :Flatten]
16
16
  pyfrom :"keras.layers.normalization", import: :BatchNormalization
17
17
 
18
+ module DNN
19
+ module Layers
20
+ class Softmax < Layer
21
+ def forward(x)
22
+ Exp.(x) / Sum.(Exp.(x), axis: 1)
23
+ end
24
+ end
25
+ end
26
+ end
27
+
18
28
  class DNNKerasModelConvertError < DNN::DNNError; end
19
29
 
20
30
  class KerasModelConvertor
@@ -34,18 +44,21 @@ class KerasModelConvertor
34
44
  unless @k_model.__class__.__name__ == "Sequential"
35
45
  raise DNNKerasModelConvertError.new("#{@k_model.__class__.__name__} models do not support convert.")
36
46
  end
37
- dnn_model = DNN::Models::Sequential.new
38
- @k_model.layers.each do |k_layer|
39
- dnn_layer = layer_convert(k_layer)
40
- dnn_model << dnn_layer if dnn_layer
41
- end
47
+ layers = convert_layers(@k_model.layers)
42
48
  input_shape = @k_model.layers[0].input_shape.to_a[1..-1]
43
49
  input_layer = DNN::Layers::InputLayer.new(input_shape)
44
50
  input_layer.build(input_shape)
45
- dnn_model.insert(0, input_layer)
51
+ layers.unshift(input_layer)
52
+ dnn_model = DNN::Models::Sequential.new(layers)
46
53
  dnn_model
47
54
  end
48
55
 
56
+ def convert_layers(k_layers)
57
+ k_layers.map do |k_layer|
58
+ layer_convert(k_layer)
59
+ end
60
+ end
61
+
49
62
  private
50
63
 
51
64
  def layer_convert(k_layer)
@@ -80,15 +93,15 @@ class KerasModelConvertor
80
93
 
81
94
  def convert_Activation(k_activation)
82
95
  activation_name = k_activation.get_config[:activation].to_s
83
- case k_activation.get_config[:activation].to_s
96
+ activation = case k_activation.get_config[:activation].to_s
84
97
  when "sigmoid"
85
- activation = DNN::Layers::Sigmoid.new
98
+ DNN::Layers::Sigmoid.new
86
99
  when "tanh"
87
- activation = DNN::Layers::Tanh.new
100
+ DNN::Layers::Tanh.new
88
101
  when "relu"
89
- activation = DNN::Layers::ReLU.new
102
+ DNN::Layers::ReLU.new
90
103
  when "softmax"
91
- return nil
104
+ DNN::Layers::Softmax.new
92
105
  else
93
106
  raise DNNKerasModelConvertError.new("#{activation_name} activation do not support convert.")
94
107
  end
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "1.1.0"
2
+ VERSION = "1.1.1"
3
3
  end
@@ -17,6 +17,7 @@ Gem::Specification.new do |spec|
17
17
 
18
18
  spec.add_dependency "numo-narray"
19
19
  spec.add_dependency "archive-tar-minitar"
20
+ spec.add_dependency "yard"
20
21
 
21
22
  # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
22
23
  # to allow pushing to a single host or delete this section to allow pushing to any host.7
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0
4
+ version: 1.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2020-02-02 00:00:00.000000000 Z
11
+ date: 2020-02-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray
@@ -38,6 +38,20 @@ dependencies:
38
38
  - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: yard
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
41
55
  - !ruby/object:Gem::Dependency
42
56
  name: bundler
43
57
  requirement: !ruby/object:Gem::Requirement