ruby-dnn 0.6.2 → 0.6.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 530a8532898c2b8f934c257ff0d986407763fb3df432df8f6d061d31147b97d8
4
- data.tar.gz: 333fb2f4e0cfd6c6aa78d3bea8f9159433acec613d8017218d24688ab1384d24
3
+ metadata.gz: 83bf68ae55435acfd33ccebf5798252ad670c7047f31de52c5b1951e0d0064f0
4
+ data.tar.gz: 63edb99dcafd2abb6b66f09a5fe44ada8e50f2674b56af2d3b3100980ba6e841
5
5
  SHA512:
6
- metadata.gz: 9979622493c8056c75ea3c9a5561ffa6cb2a63147d7c230a43870125ae8a023d44ed9615b0fa48f169c29d19a3979d109570f46baf9122046be01080d10baf01
7
- data.tar.gz: f9730237fcc631dab2109909508d73559dcbbcdad996578be7f1252776ab048cab0510b17e58a6662358dde6e8bd81356348c2576347b73aa8aaa0c0e4cd689b
6
+ metadata.gz: 78baa0705d6062f066d132f46c500277daf03957ddaa4e7530c9f75b8bfcc08d0d4ac6ffd3130d8cbf7e01b32ce6a1e60ab1b046af9ed187c2b658d6ee71bb8e
7
+ data.tar.gz: ae2930b1fcccd676f3e3273e0031c63888fe264d069c2e6c16aeb9c7def9a8bf5e05e2de2ee9265d2ce1e633c964e0b91843b3e2c0fa845428c77db09c9a05cd
data/README.md CHANGED
@@ -21,7 +21,9 @@ Or install it yourself as:
21
21
 
22
22
  ## Usage
23
23
 
24
- TODO: Write usage instructions here
24
+ Currently, only simple Japanese documents are available.
25
+ Please refer to examples for basic usage.
26
+ If you want to know more detailed information, please refer to the source code.
25
27
 
26
28
  ## Development
27
29
 
@@ -0,0 +1,36 @@
1
+ require "dnn"
2
+ require "dnn/lib/mnist"
3
+ #require "numo/linalg/autoloader"
4
+
5
+ include Numo
6
+ include DNN::Layers
7
+ include DNN::Activations
8
+ include DNN::Optimizers
9
+ Model = DNN::Model
10
+ MNIST = DNN::MNIST
11
+
12
+ x_train, y_train = MNIST.load_train
13
+ x_test, y_test = MNIST.load_test
14
+
15
+ x_train = Numo::SFloat.cast(x_train).reshape(x_train.shape[0], 28, 28)
16
+ x_test = Numo::SFloat.cast(x_test).reshape(x_test.shape[0], 28, 28)
17
+
18
+ x_train /= 255
19
+ x_test /= 255
20
+
21
+ y_train = DNN::Util.to_categorical(y_train, 10)
22
+ y_test = DNN::Util.to_categorical(y_test, 10)
23
+
24
+ model = Model.new
25
+
26
+ model << InputLayer.new([28, 28])
27
+
28
+ model << LSTM.new(200)
29
+ model << LSTM.new(200, return_sequences: false)
30
+
31
+ model << Dense.new(10)
32
+ model << SoftmaxWithLoss.new
33
+
34
+ model.compile(Adam.new)
35
+
36
+ model.train(x_train, y_train, 10, batch_size: 100, test: [x_test, y_test])
data/lib/dnn.rb CHANGED
@@ -1,6 +1,9 @@
1
- require "numo/narray"
2
-
3
- Xumo = Numo
1
+ if defined? Cumo
2
+ Xumo = Cumo
3
+ else
4
+ require "numo/narray"
5
+ Xumo = Numo
6
+ end
4
7
 
5
8
  Xumo::SFloat.srand(rand(2**64))
6
9
 
@@ -17,15 +17,47 @@ module DNN
17
17
  weight_initializer: nil,
18
18
  bias_initializer: nil,
19
19
  weight_decay: 0)
20
- super()
21
- @num_nodes = num_nodes
22
- @stateful = stateful
23
- @return_sequences = return_sequences
24
- @weight_initializer = (weight_initializer || RandomNormal.new)
25
- @bias_initializer = (bias_initializer || Zeros.new)
26
- @weight_decay = weight_decay
27
- @layers = []
28
- @h = nil
20
+ super()
21
+ @num_nodes = num_nodes
22
+ @stateful = stateful
23
+ @return_sequences = return_sequences
24
+ @weight_initializer = (weight_initializer || RandomNormal.new)
25
+ @bias_initializer = (bias_initializer || Zeros.new)
26
+ @weight_decay = weight_decay
27
+ @layers = []
28
+ @h = nil
29
+ end
30
+
31
+ def forward(xs)
32
+ @xs_shape = xs.shape
33
+ hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
34
+ h = (@stateful && @h) ? @h : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
35
+ xs.shape[1].times do |t|
36
+ x = xs[true, t, false]
37
+ h = @layers[t].forward(x, h)
38
+ hs[true, t, false] = h
39
+ end
40
+ @h = h
41
+ @return_sequences ? hs : h
42
+ end
43
+
44
+ def backward(dh2s)
45
+ @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
46
+ @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
47
+ @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
48
+ unless @return_sequences
49
+ dh = dh2s
50
+ dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
51
+ dh2s[true, -1, false] = dh
52
+ end
53
+ dxs = Xumo::SFloat.zeros(@xs_shape)
54
+ dh = 0
55
+ (0...dh2s.shape[1]).to_a.reverse.each do |t|
56
+ dh2 = dh2s[true, t, false]
57
+ dx, dh = @layers[t].backward(dh2 + dh)
58
+ dxs[true, t, false] = dx
59
+ end
60
+ dxs
29
61
  end
30
62
 
31
63
  def to_hash(merge_hash = nil)
@@ -34,7 +66,6 @@ module DNN
34
66
  num_nodes: @num_nodes,
35
67
  stateful: @stateful,
36
68
  return_sequences: @return_sequences,
37
- activation: @activation.to_hash,
38
69
  weight_initializer: @weight_initializer.to_hash,
39
70
  bias_initializer: @bias_initializer.to_hash,
40
71
  weight_decay: @weight_decay,
@@ -114,38 +145,6 @@ module DNN
114
145
  @activation = (activation || Tanh.new)
115
146
  end
116
147
 
117
- def forward(xs)
118
- @xs_shape = xs.shape
119
- hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
120
- h = (@stateful && @h) ? @h : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
121
- xs.shape[1].times do |t|
122
- x = xs[true, t, false]
123
- h = @layers[t].forward(x, h)
124
- hs[true, t, false] = h
125
- end
126
- @h = h
127
- @return_sequences ? hs : h
128
- end
129
-
130
- def backward(dh2s)
131
- @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
132
- @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
133
- @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
134
- unless @return_sequences
135
- dh = dh2s
136
- dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
137
- dh2s[true, -1, false] = dh
138
- end
139
- dxs = Xumo::SFloat.zeros(@xs_shape)
140
- dh = 0
141
- (0...dh2s.shape[1]).to_a.reverse.each do |t|
142
- dh2 = dh2s[true, t, false]
143
- dx, dh = @layers[t].backward(dh2 + dh)
144
- dxs[true, t, false] = dx
145
- end
146
- dxs
147
- end
148
-
149
148
  def to_hash
150
149
  super({activation: @activation.to_hash})
151
150
  end
@@ -374,38 +373,6 @@ module DNN
374
373
  super
375
374
  end
376
375
 
377
- def forward(xs)
378
- @xs_shape = xs.shape
379
- hs = Xumo::SFloat.zeros(xs.shape[0], @time_length, @num_nodes)
380
- h = (@stateful && @h) ? @h : Xumo::SFloat.zeros(xs.shape[0], @num_nodes)
381
- xs.shape[1].times do |t|
382
- x = xs[true, t, false]
383
- h = @layers[t].forward(x, h)
384
- hs[true, t, false] = h
385
- end
386
- @h = h
387
- @return_sequences ? hs : h
388
- end
389
-
390
- def backward(dh2s)
391
- @grads[:weight] = Xumo::SFloat.zeros(*@params[:weight].shape)
392
- @grads[:weight2] = Xumo::SFloat.zeros(*@params[:weight2].shape)
393
- @grads[:bias] = Xumo::SFloat.zeros(*@params[:bias].shape)
394
- unless @return_sequences
395
- dh = dh2s
396
- dh2s = Xumo::SFloat.zeros(dh.shape[0], @time_length, dh.shape[1])
397
- dh2s[true, -1, false] = dh
398
- end
399
- dxs = Xumo::SFloat.zeros(@xs_shape)
400
- dh = 0
401
- (0...dh2s.shape[1]).to_a.reverse.each do |t|
402
- dh2 = dh2s[true, t, false]
403
- dx, dh = @layers[t].backward(dh2 + dh)
404
- dxs[true, t, false] = dx
405
- end
406
- dxs
407
- end
408
-
409
376
  private
410
377
 
411
378
  def init_params
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.6.2"
2
+ VERSION = "0.6.3"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.2
4
+ version: 0.6.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
@@ -101,8 +101,9 @@ files:
101
101
  - bin/console
102
102
  - bin/setup
103
103
  - examples/cifar10_example.rb
104
+ - examples/mnist_conv2d_example.rb
104
105
  - examples/mnist_example.rb
105
- - examples/mnist_example2.rb
106
+ - examples/mnist_lstm_example.rb
106
107
  - examples/xor_example.rb
107
108
  - lib/dnn.rb
108
109
  - lib/dnn/core/activations.rb