ruby-dnn 0.13.1 → 0.13.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 58ef35a4277a86304c39350b743d308145da921fdc2db4308fa6ed208be47d93
4
- data.tar.gz: b8238e52c849e222277284c20723c1a1ab72e70fed409ad1f2962a9ba93190b8
3
+ metadata.gz: d6410781490b2e9ce5ca1370bcca4905d4476bce2d49db64dda5f6b4a933f89f
4
+ data.tar.gz: e7ab72ce5b64a85c116ecbf81798aa6abd6f3356ca1a5e8512b2f0e6090b01c7
5
5
  SHA512:
6
- metadata.gz: fd335af25c5d11745960364dc6096f18fbf78387358f5a1b24959328b001e87c3454132960133a74ba9cb81ac90f98e8d2663a9a9492b78d7a8558124471dc44
7
- data.tar.gz: 132da9ac8fccee2c0de894543e2800b8066b75e746e4faed609594096e6d894a3938e69c0e1155056aef384dc522d39c647b9dfce8b9a5ceb659716f6ba961f0
6
+ metadata.gz: a08c6f35faccf3b18e61f04535fbf67ac6c961f57a52a75c82be573269cb1d0dd5e8d2d9d215a3c311919c84ceb82c94c0c78db04de2b872fa98078e475596e7
7
+ data.tar.gz: 221da0acff79523f23e881d5a53d511348162840bd79d27221d2ec65e0c1c508efc2090b79646711856c20cd4e407aa8a8d08a51650587083a2af1284cc92abf
@@ -74,10 +74,6 @@ module DNN
74
74
  class LeakyReLU < Layers::Layer
75
75
  attr_reader :alpha
76
76
 
77
- def self.from_hash(hash)
78
- self.new(hash[:alpha])
79
- end
80
-
81
77
  # @param [Float] alpha The slope when the output value is negative.
82
78
  def initialize(alpha = 0.3)
83
79
  super()
@@ -100,16 +96,16 @@ module DNN
100
96
  def to_hash
101
97
  super(alpha: @alpha)
102
98
  end
99
+
100
+ def load_hash(hash)
101
+ initialize(hash[:alpha])
102
+ end
103
103
  end
104
104
 
105
105
 
106
106
  class ELU < Layers::Layer
107
107
  attr_reader :alpha
108
108
 
109
- def self.from_hash(hash)
110
- self.new(hash[:alpha])
111
- end
112
-
113
109
  # @param [Float] alpha The slope when the output value is negative.
114
110
  def initialize(alpha = 1.0)
115
111
  super()
@@ -139,6 +135,10 @@ module DNN
139
135
  def to_hash
140
136
  super(alpha: @alpha)
141
137
  end
138
+
139
+ def load_hash(hash)
140
+ initialize(hash[:alpha])
141
+ end
142
142
  end
143
143
 
144
144
  end
@@ -92,17 +92,6 @@ module DNN
92
92
  attr_reader :strides
93
93
  attr_reader :padding
94
94
 
95
- def self.from_hash(hash)
96
- self.new(hash[:num_filters], hash[:filter_size],
97
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
98
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
99
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
100
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
101
- use_bias: hash[:use_bias],
102
- strides: hash[:strides],
103
- padding: hash[:padding])
104
- end
105
-
106
95
  # @param [Integer] num_filters Number of filters.
107
96
  # @param [Array | Integer] filter_size Filter size. Filter size is of the form [height, width].
108
97
  # @param [Array | Integer] strides Stride length. Stride length is of the form [height, width].
@@ -184,6 +173,17 @@ module DNN
184
173
  strides: @strides,
185
174
  padding: @padding)
186
175
  end
176
+
177
+ def load_hash(hash)
178
+ initialize(hash[:num_filters], hash[:filter_size],
179
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
180
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
181
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
182
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
183
+ use_bias: hash[:use_bias],
184
+ strides: hash[:strides],
185
+ padding: hash[:padding])
186
+ end
187
187
  end
188
188
 
189
189
 
@@ -195,17 +195,6 @@ module DNN
195
195
  attr_reader :strides
196
196
  attr_reader :padding
197
197
 
198
- def self.from_hash(hash)
199
- self.new(hash[:num_filters], hash[:filter_size],
200
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
201
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
202
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
203
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
204
- use_bias: hash[:use_bias],
205
- strides: hash[:strides],
206
- padding: hash[:padding])
207
- end
208
-
209
198
  # @param [Integer] num_filters Number of filters.
210
199
  # @param [Array | Integer] filter_size Filter size. Filter size is of the form [height, width].
211
200
  # @param [Array | Integer] strides Stride length. Stride length is of the form [height, width].
@@ -289,6 +278,17 @@ module DNN
289
278
  strides: @strides,
290
279
  padding: @padding)
291
280
  end
281
+
282
+ def load_hash(hash)
283
+ initialize(hash[:num_filters], hash[:filter_size],
284
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
285
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
286
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
287
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
288
+ use_bias: hash[:use_bias],
289
+ strides: hash[:strides],
290
+ padding: hash[:padding])
291
+ end
292
292
  end
293
293
 
294
294
 
@@ -300,10 +300,6 @@ module DNN
300
300
  attr_reader :strides
301
301
  attr_reader :padding
302
302
 
303
- def self.from_hash(hash)
304
- self.new(hash[:pool_size], strides: hash[:strides], padding: hash[:padding])
305
- end
306
-
307
303
  # @param [Array | Integer] pool_size Pooling size. Pooling size is of the form [height, width].
308
304
  # @param [Array | Integer | NilClass] strides Stride length. Stride length is of the form [height, width].
309
305
  # If you set nil, treat pool_size as strides.
@@ -345,6 +341,11 @@ module DNN
345
341
  strides: @strides,
346
342
  padding: @padding)
347
343
  end
344
+
345
+ def load_hash(hash)
346
+ initialize(hash[:pool_size], strides: hash[:strides], padding: hash[:padding])
347
+ end
348
+
348
349
  end
349
350
 
350
351
 
@@ -396,10 +397,6 @@ module DNN
396
397
 
397
398
  attr_reader :unpool_size
398
399
 
399
- def self.from_hash(hash)
400
- self.new(hash[:unpool_size])
401
- end
402
-
403
400
  # @param [Array | Integer] unpool_size Unpooling size. unpooling size is of the form [height, width].
404
401
  def initialize(unpool_size)
405
402
  super()
@@ -445,6 +442,10 @@ module DNN
445
442
  def to_hash
446
443
  super(unpool_size: @unpool_size)
447
444
  end
445
+
446
+ def load_hash(hash)
447
+ initialize(hash[:unpool_size])
448
+ end
448
449
  end
449
450
 
450
451
  end
@@ -7,12 +7,6 @@ module DNN
7
7
  attr_reader :weight_initializer
8
8
  attr_reader :weight_regularizer
9
9
 
10
- def self.from_hash(hash)
11
- self.new(hash[:input_shape], hash[:input_length],
12
- weight_initializer: DNN::Utils.hash_to_obj(hash[:weight_initializer]),
13
- weight_regularizer: DNN::Utils.hash_to_obj(hash[:weight_regularizer]))
14
- end
15
-
16
10
  # @param [Integer | Array] input_dim_or_shape Set input data dimension or shape.
17
11
  # @param [Integer] input_length Set the time series length of input data.
18
12
  # @param [DNN::Initializers::Initializer] weight_initializer Weight initializer.
@@ -67,6 +61,12 @@ module DNN
67
61
  weight_initializer: @weight_initializer.to_hash, weight_regularizer: @weight_regularizer&.to_hash)
68
62
  end
69
63
 
64
+ def load_hash(hash)
65
+ initialize(hash[:input_shape], hash[:input_length],
66
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
67
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]))
68
+ end
69
+
70
70
  def get_params
71
71
  { weight: @weight }
72
72
  end
@@ -2,6 +2,15 @@ module DNN
2
2
  module Initializers
3
3
 
4
4
  class Initializer
5
+ def self.from_hash(hash)
6
+ return nil unless hash
7
+ initializer_class = DNN.const_get(hash[:class])
8
+ initializer = initializer_class.allocate
9
+ raise DNN_Error.new("#{initializer.class} is not an instance of #{self} class.") unless initializer.is_a?(self)
10
+ initializer.load_hash(hash)
11
+ initializer
12
+ end
13
+
5
14
  # @param [Boolean | Integer] seed Seed of random number used for initialize parameter.
6
15
  # Set true to determine seed as random.
7
16
  def initialize(seed: false)
@@ -20,6 +29,10 @@ module DNN
20
29
  hash.merge!(merge_hash) if merge_hash
21
30
  hash
22
31
  end
32
+
33
+ def load_hash(hash)
34
+ initialize
35
+ end
23
36
  end
24
37
 
25
38
 
@@ -33,10 +46,6 @@ module DNN
33
46
  class Const < Initializer
34
47
  attr_reader :const
35
48
 
36
- def self.from_hash(hash)
37
- self.new(hash[:const])
38
- end
39
-
40
49
  # @param [Float] const Constant value of initialization.
41
50
  def initialize(const)
42
51
  super()
@@ -50,6 +59,10 @@ module DNN
50
59
  def to_hash
51
60
  super(const: @const)
52
61
  end
62
+
63
+ def load_hash(hash)
64
+ initialize(hash[:const])
65
+ end
53
66
  end
54
67
 
55
68
 
@@ -57,10 +70,6 @@ module DNN
57
70
  attr_reader :mean
58
71
  attr_reader :std
59
72
 
60
- def self.from_hash(hash)
61
- self.new(hash[:mean], hash[:std], seed: hash[:seed])
62
- end
63
-
64
73
  # @param [Float] mean Average of initialization value.
65
74
  # @param [Float] std Variance of initialization value.
66
75
  def initialize(mean = 0, std = 0.05, seed: true)
@@ -77,6 +86,10 @@ module DNN
77
86
  def to_hash
78
87
  super(mean: @mean, std: @std)
79
88
  end
89
+
90
+ def load_hash(hash)
91
+ initialize(hash[:mean], hash[:std], seed: hash[:seed])
92
+ end
80
93
  end
81
94
 
82
95
 
@@ -84,10 +97,6 @@ module DNN
84
97
  attr_reader :min
85
98
  attr_reader :max
86
99
 
87
- def self.from_hash(hash)
88
- self.new(hash[:min], hash[:max], seed: hash[:seed])
89
- end
90
-
91
100
  # @param [Float] min Min of initialization value.
92
101
  # @param [Float] max Max of initialization value.
93
102
  def initialize(min = -0.05, max = 0.05, seed: true)
@@ -104,6 +113,10 @@ module DNN
104
113
  def to_hash
105
114
  super(min: @min, max: @max)
106
115
  end
116
+
117
+ def load_hash(hash)
118
+ initialize(hash[:min], hash[:max], seed: hash[:seed])
119
+ end
107
120
  end
108
121
 
109
122
 
@@ -10,8 +10,18 @@ module DNN
10
10
  self.new(*args).(x)
11
11
  end
12
12
 
13
+ def self.from_hash(hash)
14
+ return nil unless hash
15
+ layer_class = DNN.const_get(hash[:class])
16
+ layer = layer_class.allocate
17
+ raise DNN_Error.new("#{layer.class} is not an instance of #{self} class.") unless layer.is_a?(self)
18
+ layer.load_hash(hash)
19
+ layer
20
+ end
21
+
13
22
  def initialize
14
23
  @built = false
24
+ @name = nil
15
25
  end
16
26
 
17
27
  # Forward propagation and create a link.
@@ -57,10 +67,14 @@ module DNN
57
67
 
58
68
  # Layer to a hash.
59
69
  def to_hash(merge_hash = nil)
60
- hash = { class: self.class.name }
70
+ hash = { class: self.class.name, name: @name }
61
71
  hash.merge!(merge_hash) if merge_hash
62
72
  hash
63
73
  end
74
+
75
+ def load_hash(hash)
76
+ initialize
77
+ end
64
78
  end
65
79
 
66
80
 
@@ -87,10 +101,6 @@ module DNN
87
101
  self.new(shape[1..-1]).(input)
88
102
  end
89
103
 
90
- def self.from_hash(hash)
91
- self.new(hash[:input_shape])
92
- end
93
-
94
104
  # @param [Array] input_dim_or_shape Setting the shape or dimension of the input data.
95
105
  def initialize(input_dim_or_shape)
96
106
  super()
@@ -127,6 +137,10 @@ module DNN
127
137
  def to_hash
128
138
  super(input_shape: @input_shape)
129
139
  end
140
+
141
+ def load_hash(hash)
142
+ initialize(hash[:input_shape])
143
+ end
130
144
  end
131
145
 
132
146
 
@@ -196,15 +210,6 @@ module DNN
196
210
  class Dense < Connection
197
211
  attr_reader :num_nodes
198
212
 
199
- def self.from_hash(hash)
200
- self.new(hash[:num_nodes],
201
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
202
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
203
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
204
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
205
- use_bias: hash[:use_bias])
206
- end
207
-
208
213
  # @param [Integer] num_nodes Number of nodes.
209
214
  def initialize(num_nodes,
210
215
  weight_initializer: Initializers::RandomNormal.new,
@@ -250,6 +255,15 @@ module DNN
250
255
  def to_hash
251
256
  super(num_nodes: @num_nodes)
252
257
  end
258
+
259
+ def load_hash(hash)
260
+ initialize(hash[:num_nodes],
261
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
262
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
263
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
264
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
265
+ use_bias: hash[:use_bias])
266
+ end
253
267
  end
254
268
 
255
269
 
@@ -269,10 +283,6 @@ module DNN
269
283
 
270
284
 
271
285
  class Reshape < Layer
272
- def self.from_hash(hash)
273
- self.new(hash[:output_shape])
274
- end
275
-
276
286
  def initialize(output_shape)
277
287
  super()
278
288
  @output_shape = output_shape
@@ -293,6 +303,10 @@ module DNN
293
303
  def to_hash
294
304
  super(output_shape: @output_shape)
295
305
  end
306
+
307
+ def load_hash(hash)
308
+ initialize(hash[:output_shape])
309
+ end
296
310
  end
297
311
 
298
312
 
@@ -300,10 +314,6 @@ module DNN
300
314
  attr_accessor :dropout_ratio
301
315
  attr_reader :use_scale
302
316
 
303
- def self.from_hash(hash)
304
- self.new(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale])
305
- end
306
-
307
317
  # @param [Float] dropout_ratio Nodes dropout ratio.
308
318
  # @param [Integer] seed Seed of random number used for masking.
309
319
  # @param [Boolean] use_scale Set to true to scale the output according to the dropout ratio.
@@ -335,6 +345,10 @@ module DNN
335
345
  def to_hash
336
346
  super(dropout_ratio: @dropout_ratio, seed: @seed, use_scale: @use_scale)
337
347
  end
348
+
349
+ def load_hash(hash)
350
+ initialize(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale])
351
+ end
338
352
  end
339
353
 
340
354
  end
@@ -2,6 +2,15 @@ module DNN
2
2
  module Losses
3
3
 
4
4
  class Loss
5
+ def self.from_hash(hash)
6
+ return nil unless hash
7
+ loss_class = DNN.const_get(hash[:class])
8
+ loss = loss_class.allocate
9
+ raise DNN_Error.new("#{loss.class} is not an instance of #{self} class.") unless loss.is_a?(self)
10
+ loss.load_hash(hash)
11
+ loss
12
+ end
13
+
5
14
  def loss(y, t, layers = nil)
6
15
  unless y.shape == t.shape
7
16
  raise DNN_ShapeError.new("The shape of y does not match the t shape. y shape is #{y.shape}, but t shape is #{t.shape}.")
@@ -40,6 +49,10 @@ module DNN
40
49
  hash.merge!(merge_hash) if merge_hash
41
50
  hash
42
51
  end
52
+
53
+ def load_hash(hash)
54
+ initialize
55
+ end
43
56
  end
44
57
 
45
58
  class MeanSquaredError < Loss
@@ -115,10 +128,6 @@ module DNN
115
128
  class SoftmaxCrossEntropy < Loss
116
129
  attr_accessor :eps
117
130
 
118
- def self.from_hash(hash)
119
- self.new(eps: hash[:eps])
120
- end
121
-
122
131
  def self.softmax(y)
123
132
  Xumo::NMath.exp(y) / Xumo::NMath.exp(y).sum(1, keepdims: true)
124
133
  end
@@ -141,16 +150,16 @@ module DNN
141
150
  def to_hash
142
151
  super(eps: @eps)
143
152
  end
153
+
154
+ def load_hash(hash)
155
+ initialize(eps: hash[:eps])
156
+ end
144
157
  end
145
158
 
146
159
 
147
160
  class SigmoidCrossEntropy < Loss
148
161
  attr_accessor :eps
149
162
 
150
- def self.from_hash(hash)
151
- self.new(eps: hash[:eps])
152
- end
153
-
154
163
  # @param [Float] eps Value to avoid nan.
155
164
  def initialize(eps: 1e-7)
156
165
  @eps = eps
@@ -168,6 +177,10 @@ module DNN
168
177
  def to_hash
169
178
  super(eps: @eps)
170
179
  end
180
+
181
+ def load_hash(hash)
182
+ initialize(eps: hash[:eps])
183
+ end
171
184
  end
172
185
 
173
186
  end
@@ -43,10 +43,6 @@ module DNN
43
43
  class Concatenate < MergeLayer
44
44
  attr_reader :axis
45
45
 
46
- def self.from_hash(hash)
47
- self.new(axis: hash[:axis])
48
- end
49
-
50
46
  def initialize(axis: 1)
51
47
  super()
52
48
  @axis = axis
@@ -65,6 +61,10 @@ module DNN
65
61
  def to_hash
66
62
  super(axis: @axis)
67
63
  end
64
+
65
+ def load_hash(hash)
66
+ initialize(axis: hash[:axis])
67
+ end
68
68
  end
69
69
 
70
70
  end
@@ -10,10 +10,6 @@ module DNN
10
10
  attr_accessor :momentum
11
11
  attr_accessor :eps
12
12
 
13
- def self.from_hash(hash)
14
- self.new(axis: hash[:axis], momentum: hash[:momentum])
15
- end
16
-
17
13
  # @param [Integer] axis The axis to normalization.
18
14
  # @param [Float] momentum Exponential moving average of mean and variance.
19
15
  # @param [Float] eps Value to avoid division by zero.
@@ -68,6 +64,10 @@ module DNN
68
64
  super(axis: @axis, momentum: @momentum, eps: @eps)
69
65
  end
70
66
 
67
+ def load_hash(hash)
68
+ initialize(axis: hash[:axis], momentum: hash[:momentum])
69
+ end
70
+
71
71
  def get_params
72
72
  { gamma: @gamma, beta: @beta, running_mean: @running_mean, running_var: @running_var }
73
73
  end
@@ -6,8 +6,17 @@ module DNN
6
6
  attr_reader :status
7
7
  attr_accessor :clip_norm
8
8
 
9
+ def self.from_hash(hash)
10
+ return nil unless hash
11
+ optimizer_class = DNN.const_get(hash[:class])
12
+ optimizer = optimizer_class.allocate
13
+ raise DNN_Error.new("#{optimizer.class} is not an instance of #{self} class.") unless optimizer.is_a?(self)
14
+ optimizer.load_hash(hash)
15
+ optimizer
16
+ end
17
+
9
18
  def self.load(dumped)
10
- opt = Utils.hash_to_obj(dumped[:hash])
19
+ opt = from_hash(dumped[:hash])
11
20
  dumped[:status].each do |key, state|
12
21
  state = state.clone
13
22
  opt.status[key] = state
@@ -56,6 +65,10 @@ module DNN
56
65
  param.grad *= rate
57
66
  end
58
67
  end
68
+
69
+ def load_hash(hash)
70
+ initialize(clip_norm: hash[:clip_norm])
71
+ end
59
72
  end
60
73
 
61
74
 
@@ -63,10 +76,6 @@ module DNN
63
76
  attr_accessor :lr
64
77
  attr_accessor :momentum
65
78
 
66
- def self.from_hash(hash)
67
- self.new(hash[:lr], momentum: hash[:momentum], clip_norm: hash[:clip_norm])
68
- end
69
-
70
79
  # @param [Float] lr Learning rate.
71
80
  # @param [Float] momentum Momentum coefficient.
72
81
  def initialize(lr = 0.01, momentum: 0, clip_norm: nil)
@@ -92,6 +101,10 @@ module DNN
92
101
  param.data -= amount
93
102
  end
94
103
  end
104
+
105
+ def load_hash(hash)
106
+ initialize(hash[:lr], momentum: hash[:momentum], clip_norm: hash[:clip_norm])
107
+ end
95
108
  end
96
109
 
97
110
 
@@ -115,10 +128,6 @@ module DNN
115
128
  attr_accessor :lr
116
129
  attr_accessor :eps
117
130
 
118
- def self.from_hash(hash)
119
- self.new(hash[:lr], eps: hash[:eps], clip_norm: hash[:clip_norm])
120
- end
121
-
122
131
  # @param [Float] lr Learning rate.
123
132
  # @param [Float] eps Value to avoid division by zero.
124
133
  def initialize(lr = 0.01, eps: 1e-7, clip_norm: nil)
@@ -140,6 +149,10 @@ module DNN
140
149
  def to_hash
141
150
  super(lr: @lr, eps: @eps)
142
151
  end
152
+
153
+ def load_hash(hash)
154
+ initialize(hash[:lr], eps: hash[:eps], clip_norm: hash[:clip_norm])
155
+ end
143
156
  end
144
157
 
145
158
 
@@ -148,10 +161,6 @@ module DNN
148
161
  attr_accessor :alpha
149
162
  attr_accessor :eps
150
163
 
151
- def self.from_hash(hash)
152
- self.new(hash[:lr], alpha: hash[:alpha], eps: hash[:eps], clip_norm: hash[:clip_norm])
153
- end
154
-
155
164
  # @param [Float] lr Learning rate.
156
165
  # @param [Float] alpha Moving average index of past slopes.
157
166
  # @param [Float] eps Value to avoid division by zero.
@@ -175,6 +184,10 @@ module DNN
175
184
  param.data -= (@lr / Xumo::NMath.sqrt(@g[param.name] + @eps)) * param.grad
176
185
  end
177
186
  end
187
+
188
+ def load_hash(hash)
189
+ initialize(hash[:lr], alpha: hash[:alpha], eps: hash[:eps], clip_norm: hash[:clip_norm])
190
+ end
178
191
  end
179
192
 
180
193
 
@@ -182,10 +195,6 @@ module DNN
182
195
  attr_accessor :rho
183
196
  attr_accessor :eps
184
197
 
185
- def self.from_hash(hash)
186
- self.new(rho: hash[:rho], eps: hash[:eps], clip_norm: hash[:clip_norm])
187
- end
188
-
189
198
  # @param [Float] rho Moving average index of past slopes.
190
199
  # @param [Float] eps Value to avoid division by zero.
191
200
  def initialize(rho: 0.95, eps: 1e-6, clip_norm: nil)
@@ -211,6 +220,10 @@ module DNN
211
220
  param.data -= v
212
221
  end
213
222
  end
223
+
224
+ def load_hash(hash)
225
+ initialize(rho: hash[:rho], eps: hash[:eps], clip_norm: hash[:clip_norm])
226
+ end
214
227
  end
215
228
 
216
229
 
@@ -219,10 +232,6 @@ module DNN
219
232
  attr_accessor :alpha
220
233
  attr_accessor :eps
221
234
 
222
- def self.from_hash(hash)
223
- self.new(hash[:lr], alpha: hash[:alpha], eps: hash[:eps], clip_norm: hash[:clip_norm])
224
- end
225
-
226
235
  # @param [Float] lr Learning rate.
227
236
  # @param [Float] alpha Moving average index of past slopes.
228
237
  # @param [Float] eps Value to avoid division by zero.
@@ -249,6 +258,10 @@ module DNN
249
258
  param.data -= (@lr / Xumo::NMath.sqrt(@v[param.name] - @m[param.name] ** 2 + @eps)) * param.grad
250
259
  end
251
260
  end
261
+
262
+ def load_hash(hash)
263
+ initialize(hash[:lr], alpha: hash[:alpha], eps: hash[:eps], clip_norm: hash[:clip_norm])
264
+ end
252
265
  end
253
266
 
254
267
 
@@ -259,11 +272,6 @@ module DNN
259
272
  attr_accessor :eps
260
273
  attr_reader :amsgrad
261
274
 
262
- def self.from_hash(hash)
263
- self.new(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
264
- eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
265
- end
266
-
267
275
  # @param [Float] alpha Value used to calculate learning rate.
268
276
  # @param [Float] beta1 Moving average index of beta1.
269
277
  # @param [Float] beta2 Moving average index of beta2.
@@ -307,6 +315,11 @@ module DNN
307
315
  end
308
316
  end
309
317
  end
318
+
319
+ def load_hash(hash)
320
+ initialize(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
321
+ eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
322
+ end
310
323
  end
311
324
 
312
325
 
@@ -314,11 +327,6 @@ module DNN
314
327
  attr_accessor :final_lr
315
328
  attr_accessor :gamma
316
329
 
317
- def self.from_hash(hash)
318
- self.new(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
319
- final_lr: hash[:final_lr], gamma: hash[:gamma], eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
320
- end
321
-
322
330
  # @param [Float] final_lr Final learning rate.
323
331
  # @param [Float] gamma Lower and upper range value.
324
332
  def initialize(alpha: 0.001, beta1: 0.9, beta2: 0.999, final_lr: 0.1, gamma: 0.001, eps: 1e-7, amsgrad: false, clip_norm: nil)
@@ -360,6 +368,11 @@ module DNN
360
368
  lr[lr > upper_bound] = upper_bound
361
369
  lr
362
370
  end
371
+
372
+ def load_hash(hash)
373
+ initialize(alpha: hash[:alpha], beta1: hash[:beta1], beta2: hash[:beta2],
374
+ final_lr: hash[:final_lr], gamma: hash[:gamma], eps: hash[:eps], amsgrad: hash[:amsgrad], clip_norm: hash[:clip_norm])
375
+ end
363
376
  end
364
377
 
365
378
  end
@@ -4,6 +4,15 @@ module DNN
4
4
  class Regularizer
5
5
  attr_accessor :param
6
6
 
7
+ def self.from_hash(hash)
8
+ return nil unless hash
9
+ regularizer_class = DNN.const_get(hash[:class])
10
+ regularizer = regularizer_class.allocate
11
+ raise DNN_Error.new("#{regularizer.class} is not an instance of #{self} class.") unless regularizer.is_a?(self)
12
+ regularizer.load_hash(hash)
13
+ regularizer
14
+ end
15
+
7
16
  def forward(x)
8
17
  raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'forward'")
9
18
  end
@@ -17,15 +26,15 @@ module DNN
17
26
  hash.merge!(merge_hash)
18
27
  hash
19
28
  end
29
+
30
+ def load_hash(hash)
31
+ raise NotImplementedError.new("Class '#{self.class.name}' has implement method 'load_hash'")
32
+ end
20
33
  end
21
34
 
22
35
  class L1 < Regularizer
23
36
  attr_accessor :l1_lambda
24
37
 
25
- def self.from_hash(hash)
26
- self.new(hash[:l1_lambda])
27
- end
28
-
29
38
  # @param [Float] l1_lambda L1 regularizer coefficient.
30
39
  def initialize(l1_lambda = 0.01)
31
40
  @l1_lambda = l1_lambda
@@ -44,16 +53,16 @@ module DNN
44
53
  def to_hash
45
54
  super(l1_lambda: @l1_lambda)
46
55
  end
56
+
57
+ def load_hash(hash)
58
+ initialize(hash[:l1_lambda])
59
+ end
47
60
  end
48
61
 
49
62
 
50
63
  class L2 < Regularizer
51
64
  attr_accessor :l2_lambda
52
65
 
53
- def self.from_hash(hash)
54
- self.new(hash[:l2_lambda])
55
- end
56
-
57
66
  # @param [Float] l2_lambda L2 regularizer coefficient.
58
67
  def initialize(l2_lambda = 0.01)
59
68
  @l2_lambda = l2_lambda
@@ -70,16 +79,16 @@ module DNN
70
79
  def to_hash
71
80
  super(l2_lambda: @l2_lambda)
72
81
  end
82
+
83
+ def load_hash(hash)
84
+ initialize(hash[:l2_lambda])
85
+ end
73
86
  end
74
87
 
75
88
  class L1L2 < Regularizer
76
89
  attr_accessor :l1_lambda
77
90
  attr_accessor :l2_lambda
78
91
 
79
- def self.from_hash(hash)
80
- self.new(hash[:l1_lambda], hash[:l2_lambda])
81
- end
82
-
83
92
  # @param [Float] l1_lambda L1 regularizer coefficient.
84
93
  # @param [Float] l2_lambda L2 regularizer coefficient.
85
94
  def initialize(l1_lambda = 0.01, l2_lambda = 0.01)
@@ -103,6 +112,10 @@ module DNN
103
112
  def to_hash
104
113
  super(l1_lambda: l1_lambda, l2_lambda: l2_lambda)
105
114
  end
115
+
116
+ def load_hash(hash)
117
+ initialize(hash[:l1_lambda], hash[:l2_lambda])
118
+ end
106
119
  end
107
120
 
108
121
  end
@@ -11,19 +11,6 @@ module DNN
11
11
  attr_reader :recurrent_weight_initializer
12
12
  attr_reader :recurrent_weight_regularizer
13
13
 
14
- def self.from_hash(hash)
15
- self.new(hash[:num_nodes],
16
- stateful: hash[:stateful],
17
- return_sequences: hash[:return_sequences],
18
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
19
- recurrent_weight_initializer: Utils.hash_to_obj(hash[:recurrent_weight_initializer]),
20
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
21
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
22
- recurrent_weight_regularizer: Utils.hash_to_obj(hash[:recurrent_weight_regularizer]),
23
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
24
- use_bias: hash[:use_bias])
25
- end
26
-
27
14
  # @param [Integer] num_nodes Number of nodes.
28
15
  # @param [Boolean] stateful Maintain state between batches.
29
16
  # @param [Boolean] return_sequences Set the false, only the last of each cell of RNN is left.
@@ -105,6 +92,19 @@ module DNN
105
92
  super(hash)
106
93
  end
107
94
 
95
+ def load_hash(hash)
96
+ initialize(hash[:num_nodes],
97
+ stateful: hash[:stateful],
98
+ return_sequences: hash[:return_sequences],
99
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
100
+ recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]),
101
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
102
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
103
+ recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]),
104
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
105
+ use_bias: hash[:use_bias])
106
+ end
107
+
108
108
  def get_params
109
109
  { weight: @weight, recurrent_weight: @recurrent_weight, bias: @bias, hidden: @hidden }
110
110
  end
@@ -166,20 +166,6 @@ module DNN
166
166
  class SimpleRNN < RNN
167
167
  attr_reader :activation
168
168
 
169
- def self.from_hash(hash)
170
- self.new(hash[:num_nodes],
171
- stateful: hash[:stateful],
172
- return_sequences: hash[:return_sequences],
173
- activation: Utils.hash_to_obj(hash[:activation]),
174
- weight_initializer: Utils.hash_to_obj(hash[:weight_initializer]),
175
- recurrent_weight_initializer: Utils.hash_to_obj(hash[:recurrent_weight_initializer]),
176
- bias_initializer: Utils.hash_to_obj(hash[:bias_initializer]),
177
- weight_regularizer: Utils.hash_to_obj(hash[:weight_regularizer]),
178
- recurrent_weight_regularizer: Utils.hash_to_obj(hash[:recurrent_weight_regularizer]),
179
- bias_regularizer: Utils.hash_to_obj(hash[:bias_regularizer]),
180
- use_bias: hash[:use_bias])
181
- end
182
-
183
169
  # @param [DNN::Layers::Layer] activation Activation function to use in a recurrent network.
184
170
  def initialize(num_nodes,
185
171
  stateful: false,
@@ -220,6 +206,20 @@ module DNN
220
206
  def to_hash
221
207
  super(activation: @activation.to_hash)
222
208
  end
209
+
210
+ def load_hash(hash)
211
+ initialize(hash[:num_nodes],
212
+ stateful: hash[:stateful],
213
+ return_sequences: hash[:return_sequences],
214
+ activation: Layers::Layer.from_hash(hash[:activation]),
215
+ weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
216
+ recurrent_weight_initializer: Initializers::Initializer.from_hash(hash[:recurrent_weight_initializer]),
217
+ bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
218
+ weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
219
+ recurrent_weight_regularizer: Regularizers::Regularizer.from_hash(hash[:recurrent_weight_regularizer]),
220
+ bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
221
+ use_bias: hash[:use_bias])
222
+ end
223
223
  end
224
224
 
225
225
 
@@ -35,7 +35,7 @@ module DNN
35
35
  private def load_bin(bin)
36
36
  data = Marshal.load(Zlib::Inflate.inflate(bin))
37
37
  opt = Optimizers::Optimizer.load(data[:optimizer])
38
- loss_func = Utils.hash_to_obj(data[:loss_func])
38
+ loss_func = Losses::Loss.from_hash(data[:loss_func])
39
39
  @model.setup(opt, loss_func)
40
40
  @model.predict1(Xumo::SFloat.zeros(*data[:input_shape]))
41
41
  set_all_params_data(data[:params])
@@ -47,8 +47,8 @@ module DNN
47
47
 
48
48
  def load_bin(bin)
49
49
  data = JSON.parse(bin, symbolize_names: true)
50
- opt = Utils.hash_to_obj(data[:optimizer])
51
- loss_func = Utils.hash_to_obj(data[:loss_func])
50
+ opt = Optimizers::Optimizer.from_hash(data[:optimizer])
51
+ loss_func = Losses::Loss.from_hash(data[:loss_func])
52
52
  @model.setup(opt, loss_func)
53
53
  @model.predict1(Xumo::SFloat.zeros(*data[:input_shape]))
54
54
  base64_to_params_data(data[:params])
@@ -18,10 +18,7 @@ module DNN
18
18
  def self.hash_to_obj(hash)
19
19
  return nil if hash == nil
20
20
  dnn_class = DNN.const_get(hash[:class])
21
- if dnn_class.respond_to?(:from_hash)
22
- return dnn_class.from_hash(hash)
23
- end
24
- dnn_class.new
21
+ dnn_class.from_hash(hash)
25
22
  end
26
23
 
27
24
  # Return the result of the sigmoid function.
data/lib/dnn/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module DNN
2
- VERSION = "0.13.1"
2
+ VERSION = "0.13.2"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-dnn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.1
4
+ version: 0.13.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - unagiootoro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-09-21 00:00:00.000000000 Z
11
+ date: 2019-09-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: numo-narray