ruby-dnn 0.15.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6be46c1c89fbb1f1f2091c95f15ccab21473d52571ee47953a4c07446ea8544c
4
- data.tar.gz: e01eb929c13b2a33a350ee42b8bb8584446722ac3ba3dc7e163f7f3429ec068a
3
+ metadata.gz: 8fb316d982b142cfc31201f65e89b99579bdd1669d226c42dbc562c7af3b2c2a
4
+ data.tar.gz: 1839e28107f410d0008a21d9de3670fd5880e1a526ec24eddc34562e18390958
5
5
  SHA512:
6
- metadata.gz: cc5bd6608d17a90cb97f6f5530877e5441a2dc73fc18e7b821c33d606469d6a15c22d519570b8c8821282915de84b677b3527221b1eb076a145a85997a58d2d9
7
- data.tar.gz: 721c578f41a7648dfc5c462a1b9a81f6b9604b50540e8fa4a58bb801adc8059ee7cff9f27d3c1b1a710ce0857e426ca2087f4902bf46d9c621870e3ae7cb664a
6
+ metadata.gz: fd07fbd6409079cca07d14d08051e87b0e4b43772128f46f8cfe62857d2a1053bcb5dc13964f992bcb637ae64e7430613541bb19699d1fe90b142f9f950acfb8
7
+ data.tar.gz: 575171f1247cf0aa84fc71aee2b7318db10b51a0772e8dc1647e8189e1af70c39bd275edca1e0de8c9b45793051fe0fe8d75bce7bb6ec2e32f11359710592aa3
data/README.md CHANGED
@@ -39,9 +39,9 @@ model << ReLU.new
39
39
 
40
40
  model << Dense.new(10)
41
41
 
42
- model.setup(RMSProp.new, SoftmaxCrossEntropy.new)
42
+ model.setup(Adam.new, SoftmaxCrossEntropy.new)
43
43
 
44
- model.train(x_train, y_train, 10, batch_size: 100, test: [x_test, y_test])
44
+ model.train(x_train, y_train, 10, batch_size: 128, test: [x_test, y_test])
45
45
  ```
46
46
 
47
47
  When create a model with 'define by run' style:
@@ -70,7 +70,7 @@ model = MLP.new
70
70
 
71
71
  model.setup(Adam.new, SoftmaxCrossEntropy.new)
72
72
 
73
- model.train(x_train, y_train, 10, batch_size: 100, test: [x_test, y_test])
73
+ model.train(x_train, y_train, 10, batch_size: 128, test: [x_test, y_test])
74
74
  ```
75
75
 
76
76
  Please refer to examples for basic usage.
@@ -80,7 +80,7 @@ If you want to know more detailed information, please refer to the source code.
80
80
  || Implemented classes |
81
81
  |:-----------|------------:|
82
82
  | Connections | Dense, Conv2D, Conv2DTranspose, Embedding, SimpleRNN, LSTM, GRU |
83
- | Activations | Sigmoid, Tanh, Softsign, Softplus, Swish, ReLU, LeakyReLU, ELU |
83
+ | Activations | Sigmoid, Tanh, Softsign, Softplus, Swish, ReLU, LeakyReLU, ELU, Mish |
84
84
  | Basic | Flatten, Reshape, Dropout, BatchNormalization |
85
85
  | Pooling | MaxPool2D, AvgPool2D, GlobalAvgPool2D, UnPool2D |
86
86
  | Optimizers | SGD, Nesterov, AdaGrad, RMSProp, AdaDelta, RMSPropGraves, Adam, AdaBound |
@@ -134,5 +134,18 @@ module DNN
134
134
  end
135
135
  end
136
136
 
137
+ class Mish < Layer
138
+ def forward(x)
139
+ @x = x
140
+ x * Xumo::NMath.tanh(Softplus.new.forward(x))
141
+ end
142
+
143
+ def backward(dy)
144
+ omega = 4 * (@x + 1) + 4 * Xumo::NMath.exp(2 * @x) + Xumo::NMath.exp(3 * @x) + Xumo::NMath.exp(@x) * (4 * @x + 6)
145
+ delta = 2 * Xumo::NMath.exp(@x) + Xumo::NMath.exp(2 * @x) + 2
146
+ dy * (Xumo::NMath.exp(@x) * omega) / delta**2
147
+ end
148
+ end
149
+
137
150
  end
138
151
  end
@@ -158,7 +158,7 @@ module DNN
158
158
 
159
159
  # Start training by iterator.
160
160
  # Setup the model before use this method.
161
- # @param [Iterator] train_iterator Iterator used for training.
161
+ # @param [DNN::Iterator] train_iterator Iterator used for training.
162
162
  # @param [Integer] epochs Number of training.
163
163
  # @param [Integer] batch_size Batch size used for one training.
164
164
  # @param [Integer] initial_epoch Initial epoch.
@@ -260,10 +260,13 @@ module DNN
260
260
  # @return [Array] Returns the test data accuracy and mean loss in the form [accuracy, mean_loss].
261
261
  def evaluate(x, y, batch_size: 100)
262
262
  check_xy_type(x, y)
263
- evaluate_by_iterator(Iterator.new(x, y, random: false))
263
+ evaluate_by_iterator(Iterator.new(x, y, random: false), batch_size: batch_size)
264
264
  end
265
265
 
266
- # Evaluate model by iterator
266
+ # Evaluate model by iterator.
267
+ # @param [DNN::Iterator] test_iterator Iterator used for testing.
268
+ # @param [Integer] batch_size Batch size used for one test.
269
+ # @return [Array] Returns the test data accuracy and mean loss in the form [accuracy, mean_loss].
267
270
  def evaluate_by_iterator(test_iterator, batch_size: 100)
268
271
  num_test_datas = test_iterator.num_datas
269
272
  batch_size = batch_size >= num_test_datas[0] ? num_test_datas : batch_size
@@ -298,6 +301,7 @@ module DNN
298
301
  # Implement the process to accuracy this model.
299
302
  # @param [Numo::SFloat] x Input test data.
300
303
  # @param [Numo::SFloat] y Output test data.
304
+ # @return [Integer] Returns the test data accuracy.
301
305
  private def accuracy(x, y)
302
306
  if x.shape[1..-1] == [1]
303
307
  correct = 0
@@ -30,5 +30,10 @@ module DNN
30
30
  def self.softmax(x)
31
31
  Losses::SoftmaxCrossEntropy.softmax(x)
32
32
  end
33
+
34
+ # Perform numerical differentiation.
35
+ def self.numerical_grad(x, func)
36
+ (func.(x + 1e-7) - func.(x)) / 1e-7
37
+ end
33
38
  end
34
39
  end
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.15.0"
2
+ VERSION = "0.15.1"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.15.0
4
+ version: 0.15.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-11-16 00:00:00.000000000 Z
11
+ date: 2019-11-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray