ruby-dnn 0.15.0 → 0.15.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -4
- data/lib/dnn/core/layers/activations.rb +13 -0
- data/lib/dnn/core/models.rb +7 -3
- data/lib/dnn/core/utils.rb +5 -0
- data/lib/dnn/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 8fb316d982b142cfc31201f65e89b99579bdd1669d226c42dbc562c7af3b2c2a
|
|
4
|
+
data.tar.gz: 1839e28107f410d0008a21d9de3670fd5880e1a526ec24eddc34562e18390958
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: fd07fbd6409079cca07d14d08051e87b0e4b43772128f46f8cfe62857d2a1053bcb5dc13964f992bcb637ae64e7430613541bb19699d1fe90b142f9f950acfb8
|
|
7
|
+
data.tar.gz: 575171f1247cf0aa84fc71aee2b7318db10b51a0772e8dc1647e8189e1af70c39bd275edca1e0de8c9b45793051fe0fe8d75bce7bb6ec2e32f11359710592aa3
|
data/README.md
CHANGED
|
@@ -39,9 +39,9 @@ model << ReLU.new
|
|
|
39
39
|
|
|
40
40
|
model << Dense.new(10)
|
|
41
41
|
|
|
42
|
-
model.setup(
|
|
42
|
+
model.setup(Adam.new, SoftmaxCrossEntropy.new)
|
|
43
43
|
|
|
44
|
-
model.train(x_train, y_train, 10, batch_size:
|
|
44
|
+
model.train(x_train, y_train, 10, batch_size: 128, test: [x_test, y_test])
|
|
45
45
|
```
|
|
46
46
|
|
|
47
47
|
When create a model with 'define by run' style:
|
|
@@ -70,7 +70,7 @@ model = MLP.new
|
|
|
70
70
|
|
|
71
71
|
model.setup(Adam.new, SoftmaxCrossEntropy.new)
|
|
72
72
|
|
|
73
|
-
model.train(x_train, y_train, 10, batch_size:
|
|
73
|
+
model.train(x_train, y_train, 10, batch_size: 128, test: [x_test, y_test])
|
|
74
74
|
```
|
|
75
75
|
|
|
76
76
|
Please refer to examples for basic usage.
|
|
@@ -80,7 +80,7 @@ If you want to know more detailed information, please refer to the source code.
|
|
|
80
80
|
|| Implemented classes |
|
|
81
81
|
|:-----------|------------:|
|
|
82
82
|
| Connections | Dense, Conv2D, Conv2DTranspose, Embedding, SimpleRNN, LSTM, GRU |
|
|
83
|
-
| Activations | Sigmoid, Tanh, Softsign, Softplus, Swish, ReLU, LeakyReLU, ELU |
|
|
83
|
+
| Activations | Sigmoid, Tanh, Softsign, Softplus, Swish, ReLU, LeakyReLU, ELU, Mish |
|
|
84
84
|
| Basic | Flatten, Reshape, Dropout, BatchNormalization |
|
|
85
85
|
| Pooling | MaxPool2D, AvgPool2D, GlobalAvgPool2D, UnPool2D |
|
|
86
86
|
| Optimizers | SGD, Nesterov, AdaGrad, RMSProp, AdaDelta, RMSPropGraves, Adam, AdaBound |
|
|
@@ -134,5 +134,18 @@ module DNN
|
|
|
134
134
|
end
|
|
135
135
|
end
|
|
136
136
|
|
|
137
|
+
class Mish < Layer
|
|
138
|
+
def forward(x)
|
|
139
|
+
@x = x
|
|
140
|
+
x * Xumo::NMath.tanh(Softplus.new.forward(x))
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def backward(dy)
|
|
144
|
+
omega = 4 * (@x + 1) + 4 * Xumo::NMath.exp(2 * @x) + Xumo::NMath.exp(3 * @x) + Xumo::NMath.exp(@x) * (4 * @x + 6)
|
|
145
|
+
delta = 2 * Xumo::NMath.exp(@x) + Xumo::NMath.exp(2 * @x) + 2
|
|
146
|
+
dy * (Xumo::NMath.exp(@x) * omega) / delta**2
|
|
147
|
+
end
|
|
148
|
+
end
|
|
149
|
+
|
|
137
150
|
end
|
|
138
151
|
end
|
data/lib/dnn/core/models.rb
CHANGED
|
@@ -158,7 +158,7 @@ module DNN
|
|
|
158
158
|
|
|
159
159
|
# Start training by iterator.
|
|
160
160
|
# Setup the model before use this method.
|
|
161
|
-
# @param [Iterator] train_iterator Iterator used for training.
|
|
161
|
+
# @param [DNN::Iterator] train_iterator Iterator used for training.
|
|
162
162
|
# @param [Integer] epochs Number of training.
|
|
163
163
|
# @param [Integer] batch_size Batch size used for one training.
|
|
164
164
|
# @param [Integer] initial_epoch Initial epoch.
|
|
@@ -260,10 +260,13 @@ module DNN
|
|
|
260
260
|
# @return [Array] Returns the test data accuracy and mean loss in the form [accuracy, mean_loss].
|
|
261
261
|
def evaluate(x, y, batch_size: 100)
|
|
262
262
|
check_xy_type(x, y)
|
|
263
|
-
evaluate_by_iterator(Iterator.new(x, y, random: false))
|
|
263
|
+
evaluate_by_iterator(Iterator.new(x, y, random: false), batch_size: batch_size)
|
|
264
264
|
end
|
|
265
265
|
|
|
266
|
-
# Evaluate model by iterator
|
|
266
|
+
# Evaluate model by iterator.
|
|
267
|
+
# @param [DNN::Iterator] test_iterator Iterator used for testing.
|
|
268
|
+
# @param [Integer] batch_size Batch size used for one test.
|
|
269
|
+
# @return [Array] Returns the test data accuracy and mean loss in the form [accuracy, mean_loss].
|
|
267
270
|
def evaluate_by_iterator(test_iterator, batch_size: 100)
|
|
268
271
|
num_test_datas = test_iterator.num_datas
|
|
269
272
|
batch_size = batch_size >= num_test_datas[0] ? num_test_datas : batch_size
|
|
@@ -298,6 +301,7 @@ module DNN
|
|
|
298
301
|
# Implement the process to accuracy this model.
|
|
299
302
|
# @param [Numo::SFloat] x Input test data.
|
|
300
303
|
# @param [Numo::SFloat] y Output test data.
|
|
304
|
+
# @return [Integer] Returns the test data accuracy.
|
|
301
305
|
private def accuracy(x, y)
|
|
302
306
|
if x.shape[1..-1] == [1]
|
|
303
307
|
correct = 0
|
data/lib/dnn/core/utils.rb
CHANGED
data/lib/dnn/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: ruby-dnn
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.15.
|
|
4
|
+
version: 0.15.1
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- unagiootoro
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2019-11-
|
|
11
|
+
date: 2019-11-23 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: numo-narray
|