CooCoo 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +16 -0
- data/CooCoo.gemspec +47 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +88 -0
- data/README.md +123 -0
- data/Rakefile +81 -0
- data/bin/cuda-dev-info +25 -0
- data/bin/cuda-free +28 -0
- data/bin/cuda-free-trend +7 -0
- data/bin/ffi-gen +267 -0
- data/bin/spec_runner_html.sh +42 -0
- data/bin/trainer +198 -0
- data/bin/trend-cost +13 -0
- data/examples/char-rnn.rb +405 -0
- data/examples/cifar/cifar.rb +94 -0
- data/examples/img-similarity.rb +201 -0
- data/examples/math_ops.rb +57 -0
- data/examples/mnist.rb +365 -0
- data/examples/mnist_classifier.rb +293 -0
- data/examples/mnist_dream.rb +214 -0
- data/examples/seeds.rb +268 -0
- data/examples/seeds_dataset.txt +210 -0
- data/examples/t10k-images-idx3-ubyte +0 -0
- data/examples/t10k-labels-idx1-ubyte +0 -0
- data/examples/train-images-idx3-ubyte +0 -0
- data/examples/train-labels-idx1-ubyte +0 -0
- data/ext/buffer/Rakefile +50 -0
- data/ext/buffer/buffer.pre.cu +727 -0
- data/ext/buffer/matrix.pre.cu +49 -0
- data/lib/CooCoo.rb +1 -0
- data/lib/coo-coo.rb +18 -0
- data/lib/coo-coo/activation_functions.rb +344 -0
- data/lib/coo-coo/consts.rb +5 -0
- data/lib/coo-coo/convolution.rb +298 -0
- data/lib/coo-coo/core_ext.rb +75 -0
- data/lib/coo-coo/cost_functions.rb +91 -0
- data/lib/coo-coo/cuda.rb +116 -0
- data/lib/coo-coo/cuda/device_buffer.rb +240 -0
- data/lib/coo-coo/cuda/device_buffer/ffi.rb +109 -0
- data/lib/coo-coo/cuda/error.rb +51 -0
- data/lib/coo-coo/cuda/host_buffer.rb +117 -0
- data/lib/coo-coo/cuda/runtime.rb +157 -0
- data/lib/coo-coo/cuda/vector.rb +315 -0
- data/lib/coo-coo/data_sources.rb +2 -0
- data/lib/coo-coo/data_sources/xournal.rb +25 -0
- data/lib/coo-coo/data_sources/xournal/bitmap_stream.rb +197 -0
- data/lib/coo-coo/data_sources/xournal/document.rb +377 -0
- data/lib/coo-coo/data_sources/xournal/loader.rb +144 -0
- data/lib/coo-coo/data_sources/xournal/renderer.rb +101 -0
- data/lib/coo-coo/data_sources/xournal/saver.rb +99 -0
- data/lib/coo-coo/data_sources/xournal/training_document.rb +78 -0
- data/lib/coo-coo/data_sources/xournal/training_document/constants.rb +15 -0
- data/lib/coo-coo/data_sources/xournal/training_document/document_maker.rb +89 -0
- data/lib/coo-coo/data_sources/xournal/training_document/document_reader.rb +105 -0
- data/lib/coo-coo/data_sources/xournal/training_document/example.rb +37 -0
- data/lib/coo-coo/data_sources/xournal/training_document/sets.rb +76 -0
- data/lib/coo-coo/debug.rb +8 -0
- data/lib/coo-coo/dot.rb +129 -0
- data/lib/coo-coo/drawing.rb +4 -0
- data/lib/coo-coo/drawing/cairo_canvas.rb +100 -0
- data/lib/coo-coo/drawing/canvas.rb +68 -0
- data/lib/coo-coo/drawing/chunky_canvas.rb +101 -0
- data/lib/coo-coo/drawing/sixel.rb +214 -0
- data/lib/coo-coo/enum.rb +17 -0
- data/lib/coo-coo/from_name.rb +58 -0
- data/lib/coo-coo/fully_connected_layer.rb +205 -0
- data/lib/coo-coo/generation_script.rb +38 -0
- data/lib/coo-coo/grapher.rb +140 -0
- data/lib/coo-coo/image.rb +286 -0
- data/lib/coo-coo/layer.rb +67 -0
- data/lib/coo-coo/layer_factory.rb +26 -0
- data/lib/coo-coo/linear_layer.rb +59 -0
- data/lib/coo-coo/math.rb +607 -0
- data/lib/coo-coo/math/abstract_vector.rb +121 -0
- data/lib/coo-coo/math/functions.rb +39 -0
- data/lib/coo-coo/math/interpolation.rb +7 -0
- data/lib/coo-coo/network.rb +264 -0
- data/lib/coo-coo/neuron.rb +112 -0
- data/lib/coo-coo/neuron_layer.rb +168 -0
- data/lib/coo-coo/option_parser.rb +18 -0
- data/lib/coo-coo/platform.rb +17 -0
- data/lib/coo-coo/progress_bar.rb +11 -0
- data/lib/coo-coo/recurrence/backend.rb +99 -0
- data/lib/coo-coo/recurrence/frontend.rb +101 -0
- data/lib/coo-coo/sequence.rb +187 -0
- data/lib/coo-coo/shell.rb +2 -0
- data/lib/coo-coo/temporal_network.rb +291 -0
- data/lib/coo-coo/trainer.rb +21 -0
- data/lib/coo-coo/trainer/base.rb +67 -0
- data/lib/coo-coo/trainer/batch.rb +82 -0
- data/lib/coo-coo/trainer/batch_stats.rb +27 -0
- data/lib/coo-coo/trainer/momentum_stochastic.rb +59 -0
- data/lib/coo-coo/trainer/stochastic.rb +47 -0
- data/lib/coo-coo/transformer.rb +272 -0
- data/lib/coo-coo/vector_layer.rb +194 -0
- data/lib/coo-coo/version.rb +3 -0
- data/lib/coo-coo/weight_deltas.rb +23 -0
- data/prototypes/convolution.rb +116 -0
- data/prototypes/linear_drop.rb +51 -0
- data/prototypes/recurrent_layers.rb +79 -0
- data/www/images/screamer.png +0 -0
- data/www/images/screamer.xcf +0 -0
- data/www/index.html +82 -0
- metadata +373 -0
@@ -0,0 +1,121 @@
|
|
1
|
+
module CooCoo
|
2
|
+
module Math
|
3
|
+
class AbstractVector
|
4
|
+
def self.rand(length, range = nil)
|
5
|
+
new(length) do |i|
|
6
|
+
args = [ range ] if range
|
7
|
+
Random.rand(*args)
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.zeros(length)
|
12
|
+
new(length, 0.0)
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.ones(length)
|
16
|
+
new(length, 1.0)
|
17
|
+
end
|
18
|
+
|
19
|
+
def zero
|
20
|
+
self.class.zeros(size)
|
21
|
+
end
|
22
|
+
|
23
|
+
def max
|
24
|
+
minmax[1]
|
25
|
+
end
|
26
|
+
|
27
|
+
def min
|
28
|
+
minmax[0]
|
29
|
+
end
|
30
|
+
|
31
|
+
def minmax
|
32
|
+
each.minmax
|
33
|
+
end
|
34
|
+
|
35
|
+
def minmax_normalize(use_zeros = false)
|
36
|
+
min, max = minmax
|
37
|
+
delta = (max - min)
|
38
|
+
if use_zeros && delta == 0.0
|
39
|
+
zero
|
40
|
+
else
|
41
|
+
(self - min) / delta
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
[ :log, :log2, :log10, :sqrt ].each do |op|
|
46
|
+
define_method(op) do
|
47
|
+
self.class[each.collect(&op)]
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
def slice_2d(src_width, src_height, origin_x, origin_y, width, height, initial = 0.0)
|
52
|
+
samples = height.times.collect do |y|
|
53
|
+
py = origin_y + y
|
54
|
+
|
55
|
+
width.times.collect do |x|
|
56
|
+
px = origin_x + x
|
57
|
+
if px >= 0 && px < src_width
|
58
|
+
i = py * src_width + px
|
59
|
+
if i >= 0 && i < size
|
60
|
+
self[i]
|
61
|
+
else
|
62
|
+
initial
|
63
|
+
end
|
64
|
+
else
|
65
|
+
initial
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end.flatten
|
69
|
+
|
70
|
+
self.class[samples]
|
71
|
+
end
|
72
|
+
|
73
|
+
def set2d!(width, src, src_width, x, y)
|
74
|
+
raise ArgumentError.new("src's size needs to be a multiple of the width") if src.kind_of?(self.class) && src.size % src_width > 0
|
75
|
+
|
76
|
+
src.each_slice(src_width).with_index do |row, i|
|
77
|
+
index = (y+i) * width + x
|
78
|
+
next if index >= size
|
79
|
+
row.each_with_index do |p, px|
|
80
|
+
break if (x + px) >= width
|
81
|
+
self[index.to_i + px] = p
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
self
|
86
|
+
end
|
87
|
+
|
88
|
+
def collect_equal?(n)
|
89
|
+
if n.respond_to?(:each)
|
90
|
+
self.class[each.zip(n).collect { |a, b| a == b ? 1.0 : 0.0 }]
|
91
|
+
else
|
92
|
+
self.class[each.collect { |e| e == n ? 1.0 : 0.0 }]
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def collect_not_equal?(n)
|
97
|
+
if n.respond_to?(:each)
|
98
|
+
self.class[each.zip(n).collect { |a, b| a != b ? 1.0 : 0.0 }]
|
99
|
+
else
|
100
|
+
self.class[each.collect { |e| e != n ? 1.0 : 0.0 }]
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def collect_nan?
|
105
|
+
self.class[each.collect { |e| e.nan? ? 1.0 : 0.0 }]
|
106
|
+
end
|
107
|
+
|
108
|
+
def nan?
|
109
|
+
each.any?(&:nan?)
|
110
|
+
end
|
111
|
+
|
112
|
+
def collect_infinite?
|
113
|
+
self.class[each.collect { |e| e.infinite? ? 1.0 : 0.0 }]
|
114
|
+
end
|
115
|
+
|
116
|
+
def infinite?
|
117
|
+
each.any?(&:infinite?)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
module CooCoo
|
2
|
+
module Math
|
3
|
+
class << self
|
4
|
+
def max(a, b)
|
5
|
+
if a
|
6
|
+
if b
|
7
|
+
(a >= b) ? a : b
|
8
|
+
else
|
9
|
+
a
|
10
|
+
end
|
11
|
+
else
|
12
|
+
b
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
def min(a, b)
|
17
|
+
if a
|
18
|
+
if b
|
19
|
+
(a <= b) ? a : b
|
20
|
+
else
|
21
|
+
a
|
22
|
+
end
|
23
|
+
else
|
24
|
+
b
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def clamp(n, min, max)
|
29
|
+
if n < min
|
30
|
+
min
|
31
|
+
elsif n > max
|
32
|
+
max
|
33
|
+
else
|
34
|
+
n
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,264 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
require 'coo-coo/consts'
|
3
|
+
require 'coo-coo/debug'
|
4
|
+
require 'coo-coo/core_ext'
|
5
|
+
require 'coo-coo/math'
|
6
|
+
require 'coo-coo/layer'
|
7
|
+
require 'coo-coo/enum'
|
8
|
+
require 'coo-coo/cost_functions'
|
9
|
+
|
10
|
+
module CooCoo
|
11
|
+
class Network
|
12
|
+
attr_reader :age, :activation_function
|
13
|
+
attr_accessor :command, :comments
|
14
|
+
|
15
|
+
def initialize
|
16
|
+
@layers = Array.new
|
17
|
+
@age = 0
|
18
|
+
@command = [ $0 ] + ARGV
|
19
|
+
yield(self) if block_given?
|
20
|
+
end
|
21
|
+
|
22
|
+
def num_inputs
|
23
|
+
@layers.first.num_inputs
|
24
|
+
end
|
25
|
+
|
26
|
+
def num_outputs
|
27
|
+
@layers.last.size
|
28
|
+
end
|
29
|
+
|
30
|
+
def num_layers
|
31
|
+
@layers.size
|
32
|
+
end
|
33
|
+
|
34
|
+
def layers
|
35
|
+
@layers
|
36
|
+
end
|
37
|
+
|
38
|
+
def layer_index(layer)
|
39
|
+
@layers.find_index { |l| l.eql?(layer) }
|
40
|
+
end
|
41
|
+
|
42
|
+
def layer(new_layer)
|
43
|
+
@layers << new_layer
|
44
|
+
self
|
45
|
+
end
|
46
|
+
|
47
|
+
def activation_function
|
48
|
+
unless @activation_function
|
49
|
+
layer = @layers.find { |l| l.activation_function }
|
50
|
+
@activation_function = layer.activation_function
|
51
|
+
end
|
52
|
+
|
53
|
+
@activation_function
|
54
|
+
end
|
55
|
+
|
56
|
+
def output_activation_function
|
57
|
+
unless @output_activation_function
|
58
|
+
layer = @layers.reverse.find { |l| l.activation_function }
|
59
|
+
@output_activation_function = layer.activation_function
|
60
|
+
end
|
61
|
+
|
62
|
+
@output_activation_function
|
63
|
+
end
|
64
|
+
|
65
|
+
def prep_input(input)
|
66
|
+
activation_function.prep_input(input)
|
67
|
+
end
|
68
|
+
|
69
|
+
def prep_output_target(target)
|
70
|
+
output_activation_function.prep_output_target(target)
|
71
|
+
end
|
72
|
+
|
73
|
+
def final_output(outputs)
|
74
|
+
outputs.last
|
75
|
+
end
|
76
|
+
|
77
|
+
def forward(input, hidden_state = nil, flattened = false, processed = false)
|
78
|
+
unless flattened || input.kind_of?(CooCoo::Vector)
|
79
|
+
input = CooCoo::Vector[input.to_a.flatten, num_inputs]
|
80
|
+
end
|
81
|
+
|
82
|
+
hidden_state ||= Hash.new
|
83
|
+
|
84
|
+
output = if processed
|
85
|
+
input
|
86
|
+
else
|
87
|
+
prep_input(input)
|
88
|
+
end
|
89
|
+
|
90
|
+
outputs = @layers.each_with_index.inject([]) do |acc, (layer, i)|
|
91
|
+
#debug("Layer: #{i} #{layer.num_inputs} #{layer.size}")
|
92
|
+
#debug("Input: #{input}")
|
93
|
+
#debug("Weights: #{layer.neurons[0].weights}")
|
94
|
+
output, hidden_state = layer.forward(output, hidden_state)
|
95
|
+
acc << output
|
96
|
+
#debug("Output: #{input}")
|
97
|
+
end
|
98
|
+
|
99
|
+
return outputs, hidden_state
|
100
|
+
end
|
101
|
+
|
102
|
+
def predict(input, hidden_state = nil, flattened = false, processed = false)
|
103
|
+
hidden_state ||= Hash.new
|
104
|
+
outputs, hidden_state = forward(input, hidden_state, flattened, processed)
|
105
|
+
out = final_output(outputs)
|
106
|
+
return out, hidden_state
|
107
|
+
end
|
108
|
+
|
109
|
+
def backprop(inputs, outputs, errors, hidden_state = nil)
|
110
|
+
hidden_state ||= Hash.new
|
111
|
+
d = @layers.reverse_each.each_with_index.inject([]) do |acc, (layer, i)|
|
112
|
+
input = if i < (@layers.size - 1)
|
113
|
+
outputs[@layers.size - i - 2]
|
114
|
+
else
|
115
|
+
prep_input(inputs) # TODO condition prep_input
|
116
|
+
end
|
117
|
+
#CooCoo.debug("#{self.class.name}.#{__method__}\t#{i} #{@layers.size - i - 1}\t#{input.size}\t#{outputs.size}")
|
118
|
+
deltas, hidden_state = layer.backprop(input,
|
119
|
+
outputs[@layers.size - i - 1],
|
120
|
+
errors,
|
121
|
+
hidden_state)
|
122
|
+
errors = layer.transfer_error(deltas)
|
123
|
+
acc.unshift(deltas)
|
124
|
+
end
|
125
|
+
|
126
|
+
return Sequence[d], hidden_state
|
127
|
+
end
|
128
|
+
|
129
|
+
def transfer_errors(deltas)
|
130
|
+
@layers.zip(deltas).collect do |layer, delta|
|
131
|
+
layer.transfer_error(delta)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def update_weights!(input, outputs, deltas)
|
136
|
+
adjust_weights!(weight_deltas(input, outputs, deltas))
|
137
|
+
self
|
138
|
+
end
|
139
|
+
|
140
|
+
def adjust_weights!(deltas)
|
141
|
+
@layers.each_with_index do |layer, i|
|
142
|
+
layer.adjust_weights!(deltas[i])
|
143
|
+
end
|
144
|
+
|
145
|
+
@age += 1
|
146
|
+
self
|
147
|
+
end
|
148
|
+
|
149
|
+
def weight_deltas(input, outputs, deltas)
|
150
|
+
d = @layers.each_with_index.collect do |layer, i|
|
151
|
+
inputs = if i != 0
|
152
|
+
outputs[i - 1]
|
153
|
+
else
|
154
|
+
prep_input(input)
|
155
|
+
end
|
156
|
+
layer.weight_deltas(inputs, deltas[i])
|
157
|
+
end
|
158
|
+
|
159
|
+
d
|
160
|
+
end
|
161
|
+
|
162
|
+
def learn(input, expecting, rate, cost_function = CostFunctions::MeanSquare, hidden_state = nil)
|
163
|
+
hidden_state ||= Hash.new
|
164
|
+
output, hidden_state = forward(input, hidden_state)
|
165
|
+
cost = cost_function.derivative(prep_input(expecting), output.last)
|
166
|
+
deltas, hidden_state = backprop(input, output, cost, hidden_state)
|
167
|
+
update_weights!(input, output, deltas * rate)
|
168
|
+
return self, hidden_state
|
169
|
+
rescue
|
170
|
+
CooCoo.debug("Network#learn caught #{$!}", input, expecting)
|
171
|
+
raise
|
172
|
+
end
|
173
|
+
|
174
|
+
def save(path)
|
175
|
+
File.write_to(path) do |f|
|
176
|
+
f.write(to_hash.to_yaml)
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def load!(path)
|
181
|
+
yaml = YAML.load(File.read(path))
|
182
|
+
raise RuntimeError.new("Invalid YAML definition in #{path}") if yaml.nil?
|
183
|
+
|
184
|
+
update_from_hash!(yaml)
|
185
|
+
|
186
|
+
self
|
187
|
+
end
|
188
|
+
|
189
|
+
def update_from_hash!(h)
|
190
|
+
@layers = Array.new
|
191
|
+
|
192
|
+
h[:layers].each do |layer_hash|
|
193
|
+
@layers << CooCoo::LayerFactory.from_hash(layer_hash, self)
|
194
|
+
end
|
195
|
+
|
196
|
+
@age = h.fetch(:age, 0)
|
197
|
+
@command = h.fetch(:command, nil)
|
198
|
+
@comments = h.fetch(:comments) { Array.new }
|
199
|
+
|
200
|
+
self
|
201
|
+
end
|
202
|
+
|
203
|
+
def to_hash
|
204
|
+
{ age: @age,
|
205
|
+
command: @command,
|
206
|
+
comments: @comments,
|
207
|
+
layers: @layers.collect { |l| l.to_hash(self) }
|
208
|
+
}
|
209
|
+
end
|
210
|
+
|
211
|
+
class << self
|
212
|
+
def from_a(layers)
|
213
|
+
self.new().update_from_a!(layers)
|
214
|
+
end
|
215
|
+
|
216
|
+
def from_hash(h)
|
217
|
+
self.new.update_from_hash!(h)
|
218
|
+
end
|
219
|
+
|
220
|
+
def load(path)
|
221
|
+
self.new().load!(path)
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
225
|
+
end
|
226
|
+
|
227
|
+
if __FILE__ == $0
|
228
|
+
SIZE = 10
|
229
|
+
net = CooCoo::Network.new()
|
230
|
+
net.layer(CooCoo::Layer.new(SIZE, SIZE / 2))
|
231
|
+
#net.layer(CooCoo::Layer.new(3, 3))
|
232
|
+
net.layer(CooCoo::Layer.new(SIZE / 2, SIZE / 2))
|
233
|
+
net.layer(CooCoo::Layer.new(SIZE / 2, 2))
|
234
|
+
|
235
|
+
inputs = 3.times.collect do |i|
|
236
|
+
CooCoo::Vector.zeros(SIZE)
|
237
|
+
end
|
238
|
+
inputs[0][0] = 1.0
|
239
|
+
inputs[1][2] = 1.0
|
240
|
+
inputs[2][3] = 1.0
|
241
|
+
targets = [ [ 1.0, 0.0 ],
|
242
|
+
[ 0.0, 1.0 ],
|
243
|
+
[ 0.0, 1.0 ]
|
244
|
+
].collect do |v|
|
245
|
+
CooCoo::Vector[v]
|
246
|
+
end
|
247
|
+
|
248
|
+
ENV.fetch('LOOPS', 100).to_i.times do |i|
|
249
|
+
targets.zip(inputs).each do |target, input|
|
250
|
+
net.learn(input, target, 0.3)
|
251
|
+
end
|
252
|
+
end
|
253
|
+
|
254
|
+
inputs.each.zip(targets) do |input, target|
|
255
|
+
output, hidden_state = net.forward(input)
|
256
|
+
err = (net.prep_input(target) - output.last)
|
257
|
+
puts("#{input} -> #{target}\t#{err}")
|
258
|
+
output.each_with_index do |o, i|
|
259
|
+
puts("\tLayer #{i}:\t#{o}")
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
puts(net.to_hash)
|
264
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
require 'coo-coo/consts'
|
2
|
+
require 'coo-coo/debug'
|
3
|
+
require 'coo-coo/math'
|
4
|
+
require 'coo-coo/enum'
|
5
|
+
require 'coo-coo/activation_functions'
|
6
|
+
|
7
|
+
module CooCoo
|
8
|
+
class Neuron
|
9
|
+
def initialize(num_inputs, activation_func = CooCoo.default_activation)
|
10
|
+
@num_inputs = num_inputs
|
11
|
+
@activation_func = activation_func
|
12
|
+
@weights = @activation_func.initial_weights(num_inputs, 1)
|
13
|
+
@bias = @activation_func.initial_bias(1)[0]
|
14
|
+
end
|
15
|
+
|
16
|
+
def to_hash
|
17
|
+
{ num_inputs: @num_inputs,
|
18
|
+
weights: @weights.to_a,
|
19
|
+
bias: @bias,
|
20
|
+
f: @activation_func.name
|
21
|
+
}
|
22
|
+
end
|
23
|
+
|
24
|
+
def update_from_hash!(h)
|
25
|
+
@num_inputs = h.fetch(:num_inputs, h.fetch(:weights, []).size)
|
26
|
+
@weights = CooCoo::Vector[h[:weights]]
|
27
|
+
@activation_func = CooCoo::ActivationFunctions.from_name(h[:f] || CooCoo.default_activation.name)
|
28
|
+
@bias = h.fetch(:bias, @activation_func.initial_bias(1)[0])
|
29
|
+
self
|
30
|
+
end
|
31
|
+
|
32
|
+
def self.from_hash(h)
|
33
|
+
self.new(h[:num_inputs] || h[:weights].size).update_from_hash!(h)
|
34
|
+
end
|
35
|
+
|
36
|
+
attr_reader :num_inputs
|
37
|
+
attr_reader :weights
|
38
|
+
attr_reader :bias
|
39
|
+
|
40
|
+
def forward(input)
|
41
|
+
transfer(activate(input))
|
42
|
+
end
|
43
|
+
|
44
|
+
def activate(input)
|
45
|
+
(@weights * input).sum + @bias
|
46
|
+
end
|
47
|
+
|
48
|
+
def transfer(activation)
|
49
|
+
@activation_func.call(activation)
|
50
|
+
end
|
51
|
+
|
52
|
+
def backprop(input, output, error)
|
53
|
+
# Properly: error * @activation_func.derivative(activate(input), output)
|
54
|
+
error * @activation_func.derivative(nil, output)
|
55
|
+
end
|
56
|
+
|
57
|
+
def transfer_error(delta)
|
58
|
+
@weights * delta
|
59
|
+
end
|
60
|
+
|
61
|
+
def weight_deltas(inputs, delta)
|
62
|
+
[ delta, inputs * delta ]
|
63
|
+
rescue
|
64
|
+
CooCoo.debug("#{$!}\n\t#{inputs.class}\t#{inputs}\n\t#{@weights.class}\t#{@weights}\n\t#{delta.class}\t#{delta}")
|
65
|
+
raise
|
66
|
+
end
|
67
|
+
|
68
|
+
def update_weights!(inputs, delta)
|
69
|
+
adjust_weights!(*weight_deltas(inputs, delta))
|
70
|
+
end
|
71
|
+
|
72
|
+
def adjust_weights!(bias_delta, weight_deltas)
|
73
|
+
@bias -= bias_delta
|
74
|
+
@weights -= weight_deltas
|
75
|
+
end
|
76
|
+
|
77
|
+
def ==(other)
|
78
|
+
if other.kind_of?(self.class)
|
79
|
+
num_inputs == other.num_inputs && @weights == other.weights
|
80
|
+
else
|
81
|
+
false
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
if __FILE__ == $0
|
88
|
+
require 'coo-coo/cost_functions'
|
89
|
+
|
90
|
+
n = CooCoo::Neuron.from_hash({ f: ENV.fetch("ACTIVATION", "Logistic"),
|
91
|
+
weights: [ 0.5, 0.5 ]
|
92
|
+
})
|
93
|
+
inputs = [ CooCoo::Vector[[ 0.25, 0.75 ]], CooCoo::Vector[[ 0.0, 1.0 ]] ]
|
94
|
+
targets = [ 0.0, 1.0 ]
|
95
|
+
|
96
|
+
ENV.fetch('LOOPS', 100).to_i.times do |i|
|
97
|
+
inputs.zip(targets).each do |input, target|
|
98
|
+
puts("#{i}: #{input} -> #{target}")
|
99
|
+
o = n.forward(input)
|
100
|
+
err1 = CooCoo::CostFunctions::MeanSquare.derivative(target, o)
|
101
|
+
puts("\tPre: #{input} * #{n.weights} = #{o}\t#{err1}\t#{CooCoo::CostFunctions::MeanSquare.call(target, o)}")
|
102
|
+
delta = n.backprop(input, o, err1)
|
103
|
+
puts("\tDelta: #{delta}")
|
104
|
+
n.update_weights!(input, delta * 0.3)
|
105
|
+
o = n.forward(input)
|
106
|
+
err2 = CooCoo::CostFunctions::MeanSquare.derivative(target, o)
|
107
|
+
puts("\tPost: #{input} * #{n.weights} = #{o}\t#{err2}")
|
108
|
+
puts("\tChange in Cost: #{err2} - #{err1} = #{err2 - err1}")
|
109
|
+
puts("")
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|