synaptical 0.0.1.pre.beta1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,74 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ # Representation of a connection between layers
5
+ class LayerConnection
6
+ attr_reader :id, :from, :to, :selfconnection, :type, :connections, :list,
7
+ :size, :gatedfrom
8
+ def initialize(from, to, type, weights)
9
+ @id = self.class.uid
10
+ @from = from
11
+ @to = to
12
+ @selfconnection = to == from
13
+ @type = type
14
+ @connections = {}
15
+ @list = []
16
+ @size = 0
17
+ @gatedfrom = []
18
+
19
+ init_type
20
+
21
+ connect!(weights)
22
+ end
23
+
24
+ # Initialize connection type if not provided
25
+ def init_type
26
+ return unless type.nil?
27
+ @type = if from == to
28
+ Synaptical::Layer::CONNECTION_TYPE[:ONE_TO_ONE]
29
+ else
30
+ Synaptical::Layer::CONNECTION_TYPE[:ALL_TO_ALL]
31
+ end
32
+ end
33
+
34
+ def connect!(weights)
35
+ if type == Synaptical::Layer::CONNECTION_TYPE[:ALL_TO_ALL] ||
36
+ type == Synaptical::Layer::CONNECTION_TYPE[:ALL_TO_ELSE]
37
+ from.list.each do |from|
38
+ to.list.each do |to|
39
+ if type == Synaptical::Layer::CONNECTION_TYPE[:ALL_TO_ELSE] &&
40
+ from == to
41
+ next
42
+ end
43
+
44
+ connection = from.project(to, weights)
45
+ @connections[connection.id] = connection
46
+ list.push(connection)
47
+ @size = list.size
48
+ end
49
+ end
50
+ elsif type == Synaptical::Layer::CONNECTION_TYPE[:ONE_TO_ONE]
51
+ from.list.each_with_index do |from, idx|
52
+ to = to.list[idx]
53
+ connection = from.project(to, weights)
54
+
55
+ @connections[connection.id] = connection
56
+ list.push(connection)
57
+ @size = list.size
58
+ end
59
+ end
60
+
61
+ from.connected_to << self
62
+ end
63
+
64
+ class << self
65
+ attr_reader :connections
66
+
67
+ def uid
68
+ @connections += 1
69
+ end
70
+ end
71
+
72
+ @connections = 0
73
+ end
74
+ end
@@ -0,0 +1,125 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ # Representation of a network
5
+ class Network
6
+ Layers = Struct.new(:input, :hidden, :output)
7
+
8
+ attr_reader :optimized, :layers
9
+
10
+ def initialize(input:, hidden:, output:)
11
+ @layers = Layers.new(input, hidden, output)
12
+ @optimized = false
13
+ end
14
+
15
+ # Feed-forward activation of all the layers to produce an output
16
+ # @param input [Array<Numeric>] Input
17
+ #
18
+ # @return [Array<Numeric>] Output
19
+ def activate(input)
20
+ raise if optimized
21
+ layers.input.activate(input)
22
+ layers.hidden.each(&:activate)
23
+ layers.output.activate
24
+ end
25
+
26
+ # Back-propagate the error through the network
27
+ # @param rate [Float] Learning rate
28
+ # @param target [Array<Numeric>] Target values
29
+ def propagate(rate, target)
30
+ raise if optimized
31
+ layers.output.propagate(rate, target)
32
+ layers.hidden.each { |layer| layer.propagate(rate) }
33
+ end
34
+
35
+ # Project output onto another layer or network
36
+ # @param unit [Synaptical::Network, Synaptical::Layer] Object to project against
37
+ # @param type [type] [description]
38
+ # @param weights [type] [description]
39
+ def project(unit, type, weights)
40
+ raise if optimized
41
+ case unit
42
+ when Network
43
+ layers.output.project(unit.layers.input, type, weights)
44
+ when Layer
45
+ layers.output.project(unit, type, weights)
46
+ else
47
+ raise ArgumentError, 'Invalid argument'
48
+ end
49
+ end
50
+
51
+ def gate(connection, type)
52
+ raise if optimized
53
+ layers.output.gate(connection, type)
54
+ end
55
+
56
+ def clear
57
+ restore
58
+ ([layers.input, layers.output] + layers.hidden).each(&:clear)
59
+ end
60
+
61
+ def reset
62
+ restore
63
+ ([layers.input, layers.output] + layers.hidden).each(&:reset)
64
+ end
65
+
66
+ def optimize
67
+ raise
68
+ end
69
+
70
+ def restore
71
+ raise if optimized
72
+ end
73
+
74
+ # Return all neurons in all layers
75
+ #
76
+ # @return [Array<Hash>] A list of neurons and which layer they belong to
77
+ def neurons
78
+ layers.input.neurons.map { |n| { neuron: n, layer: 'input' } } +
79
+ layers.hidden
80
+ .flat_map(&:neurons)
81
+ .each_with_index
82
+ .map { |n, i| { neuron: n, layer: i } } +
83
+ layers.output.neurons.map { |n| { neuron: n, layer: 'output' } }
84
+ end
85
+
86
+ # Return number of inputs
87
+ #
88
+ # @return [Integer] Number of inputs
89
+ def inputs
90
+ layers.input.size
91
+ end
92
+
93
+ # Return number of outputs
94
+ #
95
+ # @return [Integer] Number of outputs
96
+ def outputs
97
+ layers.output.size
98
+ end
99
+
100
+ def set
101
+ raise 'TODO'
102
+ end
103
+
104
+ def set_optimize
105
+ raise 'TODO'
106
+ end
107
+
108
+ # Export the network as JSON
109
+ #
110
+ # @return [Hash] Hash ready for JSON serialization
111
+ def to_json
112
+ restore
113
+
114
+ Synaptical::Serializer::JSON.as_json(self)
115
+ end
116
+
117
+ def to_dot
118
+ raise 'TODO'
119
+ end
120
+
121
+ def from_json
122
+ raise 'TODO'
123
+ end
124
+ end
125
+ end
@@ -0,0 +1,312 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ # Representation of a neuron
5
+ class Neuron
6
+ CONNECTION_TYPES = %i[inputs projected gated].freeze
7
+
8
+ Connections = Struct.new(:inputs, :projected, :gated)
9
+ Connection = Struct.new(:type, :connection)
10
+ Error = Struct.new(:responsibility, :projected, :gated)
11
+ Trace = Struct.new(:elegibility, :extended, :influences)
12
+
13
+ attr_reader :id, :connections, :error, :trace, :state, :old, :activation,
14
+ :selfconnection, :squash, :neighbors, :bias
15
+ # Creates an instance of a Neuron
16
+ def initialize
17
+ @id = self.class.uid
18
+ @connections = Connections.new({}, {}, {})
19
+ @error = Error.new(0.0, 0.0, 0.0)
20
+ @trace = Trace.new({}, {}, {})
21
+
22
+ @state = @old = @activation = 0.0
23
+ @selfconnection = Synaptical::Connection.new(self, self, 0.0)
24
+ @squash = Synaptical::Squash::Logistic
25
+ @neighbors = {}
26
+ @bias = rand * 0.2 - 0.1
27
+ end
28
+
29
+ # Activate the neuron
30
+ # @param input = nil [Numeric] input value
31
+ #
32
+ # @return [Numeric] output value
33
+ def activate(input = nil)
34
+ # Is neuron in input layer
35
+ unless input.nil?
36
+ @activation = input
37
+ @derivative = 0
38
+ @bias = 0
39
+ return activation
40
+ end
41
+
42
+ @old = @state
43
+
44
+ # eq. 15.
45
+ @state = selfconnection.gain * selfconnection.weight * state + bias
46
+
47
+ connections.inputs.each_value do |neuron|
48
+ @state += neuron.from.activation * neuron.weight * neuron.gain
49
+ end
50
+
51
+ # eq. 16.
52
+ @activation = squash.call(@state)
53
+
54
+ # f'(s)
55
+ @derivative = squash.derivate(@activation)
56
+
57
+ # Update traces
58
+ influences = []
59
+ trace.extended.each_key do |id|
60
+ neuron = @neighbors[id]
61
+
62
+ influence = neuron.selfconnection.gater == self ? neuron.old : 0
63
+
64
+ trace.influences[neuron.id].each do |incoming|
65
+ influence +=
66
+ trace.influences[neuron.id][incoming].weight *
67
+ trace.influences[id][incoming].from.activation
68
+ end
69
+
70
+ influences[neuron.id] = influence
71
+ end
72
+
73
+ connections.inputs.each_value do |input_neuron|
74
+ # elegibility trace - eq. 17
75
+ trace.elegibility[input_neuron.id] =
76
+ selfconnection.gain *
77
+ selfconnection.weight *
78
+ trace.elegibility[input_neuron.id] +
79
+ input_neuron.gain *
80
+ input_neuron.from.activation
81
+
82
+ trace.extended.each do |id, xtrace|
83
+ neuron = neighbors[id]
84
+ influence = influences[neuron.id]
85
+
86
+ xtrace[input_neuron.id] =
87
+ neuron.selfconnection.gain *
88
+ neuron.selfconnection.weight *
89
+ xtrace[input_neuron.id] +
90
+ @derivative *
91
+ trace.elegibility[input_neuron.id] *
92
+ influence
93
+ end
94
+ end
95
+
96
+ # Update gated connection's gains
97
+ connections.gated.each { |conn| conn.gain = @activation }
98
+
99
+ @activation
100
+ end
101
+
102
+ # Back propagate the error
103
+ # @param rate [Float] Learning rate
104
+ # @param target = nil [Numeric] Target value
105
+ def propagate(rate = 0.1, target = nil)
106
+ error = 0.0
107
+
108
+ # Is neuron in output layer
109
+ if !target.nil?
110
+ # Eq. 10.
111
+ @error.responsibility = @error.projected = target - @activation
112
+ else
113
+ # The rest of the neuron compute their error responsibilities by back-
114
+ # propagation
115
+ connections.projected.each_value do |connection|
116
+ neuron = connection.to
117
+
118
+ # Eq. 21.
119
+ error +=
120
+ neuron.error.responsibility * connection.gain * connection.weight
121
+ end
122
+
123
+ # Projected error responsibility
124
+ @error.projected = @derivative * error
125
+
126
+ error = 0.0
127
+ # Error responsibilities from all the connections gated by this neuron
128
+ trace.extended.each do |id, _|
129
+ neuron = @neighbors[id] # gated neuron
130
+ # If gated neuron's selfconnection is gated by this neuron
131
+ influence = neuron.selfconnection.gater == self ? neuron.old : 0.0
132
+
133
+ # Index runs over all th econnections to the gated neuron that are
134
+ # gated by this neuron
135
+ trace.influences[id].each do |input, infl|
136
+ # Captures the effect that the input connection of this neuron have,
137
+ # on a neuron which its input/s is/are gated by this neuron
138
+ influence +=
139
+ infl.weight *
140
+ trace.influences[neuron.id][input].from.activation
141
+ end
142
+
143
+ # Eq. 22.
144
+ error += neuron.error.responsibility * influence
145
+ end
146
+
147
+ # Gated error responsibility
148
+ @error.gated = @derivative * error
149
+
150
+ # Error responsibility - Eq. 23.
151
+ @error.responsibility = @error.projected + @error.gated
152
+ end
153
+
154
+ connections.inputs.each_value do |input_neuron|
155
+ # Eq. 24
156
+ gradient = @error.projected * trace.elegibility[input_neuron.id]
157
+ trace.extended.each do |id, _|
158
+ neuron = neighbors[id]
159
+ gradient += neuron.error.responsibility *
160
+ trace.extended[neuron.id][input_neuron.id]
161
+ end
162
+
163
+ # Adjust weights - aka. learn
164
+ input_neuron.weight += rate * gradient
165
+ end
166
+
167
+ # Adjust bias
168
+ @bias += rate * @error.responsibility
169
+ end
170
+
171
+ # [project description]
172
+ # @param neuron [Synaptical::Neuron] Other neuron
173
+ # @param weight = nil [Float] Weight
174
+ #
175
+ # @return [Synaptical::Connection] Connection
176
+ def project(neuron, weight = nil)
177
+ if neuron == self
178
+ selfconnection.weight = 1
179
+ return selfconnection
180
+ end
181
+
182
+ # Check if connection already exists
183
+ connected = connected(neuron)
184
+ if connected && connected.type == :projected
185
+ # Update connection
186
+ connected.connection.weight = weight unless weight.nil?
187
+ return connected.connection
188
+ else
189
+ connection = ::Synaptical::Connection.new(self, neuron, weight)
190
+ end
191
+
192
+ # Reference all te connections and traces
193
+ connections.projected[connection.id] = connection
194
+ neighbors[neuron.id] = neuron
195
+ neuron.connections.inputs[connection.id] = connection
196
+ neuron.trace.elegibility[connection.id] = 0
197
+
198
+ neuron.trace.extended.each do |_id, trace|
199
+ trace[connection.id] = 0
200
+ end
201
+
202
+ connection
203
+ end
204
+
205
+ # Add connection to gated list
206
+ # @param connection [Synaptical::Connection] Connection
207
+ def gate(connection)
208
+ connections.gated[connection.id] = connection
209
+
210
+ neuron = connection.to
211
+ unless trace.extended.key?(neuron.id)
212
+ # Extended trace
213
+ neighbors[neuron.id] = neuron
214
+ xtrace = trace.extended[neuron.id] = {}
215
+ connection.inputs.each_value do |input|
216
+ xtrace[input.id] = 0
217
+ end
218
+ end
219
+
220
+ # Keep track
221
+ if trace.influences.key?(neuron.id)
222
+ trace.influences[neuron.id] << connection
223
+ else
224
+ trace.influences[neuron.id] = [connection]
225
+ end
226
+
227
+ # Set gater
228
+ connection.gater = self
229
+ end
230
+
231
+ # Returns wheter the neuron is self connected
232
+ #
233
+ # @return [Boolean] true if self connected, false otherwise
234
+ def selfconnected?
235
+ !selfconnection.weight.zero?
236
+ end
237
+
238
+ # Returns whether the neuron is connected to another neuron
239
+ # @param neuron [Synaptical::Neuron] Other neuron
240
+ #
241
+ # @return [Boolean, Hash] Connection type if connected to other neuron,
242
+ # false otherwise
243
+ def connected(neuron)
244
+ result = Connection.new
245
+
246
+ if self == neuron
247
+ return nil unless selfconnected?
248
+ result.type = :selfconnection
249
+ result.connection = selfconnection
250
+ return result
251
+ end
252
+
253
+ CONNECTION_TYPES
254
+ .map { |ct| connections.send(ct).values }
255
+ .flatten
256
+ .each do |connection|
257
+ next unless connection.to == neuron || connection.from == neuron
258
+ result.type = type
259
+ result.connection = type
260
+ return result
261
+ end
262
+
263
+ nil
264
+ end
265
+
266
+ # Clear the context of the neuron, but keeps connections
267
+ def clear
268
+ trace.elegibility.transform_values { |_| 0 }
269
+ trace.extended.each_value do |ext|
270
+ ext.transform_values { |_| 0 }
271
+ end
272
+
273
+ error.responsibility = error.projected = error.gated = 0
274
+ end
275
+
276
+ # Clears traces and randomizes connections
277
+ def reset
278
+ clear
279
+ CONNECTION_TYPES.map { |ct| connections.send(ct) }.each do |conn_group|
280
+ conn_group.each_value { |conn| conn.weight = rand * 0.2 - 0.1 }
281
+ end
282
+
283
+ @bias = rand * 0.2 - 0.1
284
+ @old = @state = @activation = 0
285
+ end
286
+
287
+ # Hard codes the behavior of the neuron into an optimized function
288
+ # @param optimized [Hash] [description]
289
+ # @param layer [type] [description]
290
+ #
291
+ # @return [type] [description]
292
+ def optimize(_optimized, _layer)
293
+ raise 'TODO'
294
+ end
295
+
296
+ class << self
297
+ attr_reader :neurons
298
+ # Returns the next id in the sequence
299
+ #
300
+ # @return [type] [description]
301
+ def uid
302
+ @neurons += 1
303
+ end
304
+
305
+ def quantity
306
+ { neurons: neurons, connections: Connection.connections }
307
+ end
308
+ end
309
+
310
+ @neurons = 0
311
+ end
312
+ end