neuronet 6.1.0 → 8.0.251113

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Arrayable avoids explicit `to_a` calls for common Array methods.
5
+ module Arrayable
6
+ def each(&blk) = to_a.each { blk[it] }
7
+ def each_with_index(&blk) = to_a.each_with_index { |n, i| blk[n, i] }
8
+ def [](index) = to_a[index]
9
+ def map(&) = to_a.map(&)
10
+ def size = to_a.size
11
+ def reverse = to_a.reverse
12
+ end
13
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Backpropagate provides simple, clamp-limited weight/bias updates.
5
+ module Backpropagate
6
+ # Back-propagates errors, updating bias and connection weights.
7
+ # Clamps updates to [-max, +max].
8
+ # Recursively calls on connected neurons.
9
+ # rubocop: disable Metrics, Style
10
+ def backpropagate(error)
11
+ bmax = Config.bias_clamp
12
+ b = bias + error
13
+ self.bias = b.abs > bmax ? (b.positive? ? bmax : -bmax) : b
14
+
15
+ wmax = Config.weight_clamp
16
+ connections.each do |c|
17
+ n = c.neuron
18
+ w = c.weight + (n.activation * error)
19
+ c.weight = w.abs > wmax ? (w.positive? ? wmax : -wmax) : w
20
+ n.backpropagate(error)
21
+ end
22
+ end
23
+ # rubocop: enable Metrics, Style
24
+ end
25
+ end
@@ -0,0 +1,10 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Maximum values for biases and weights
5
+ module Config
6
+ class << self; attr_accessor :bias_clamp, :weight_clamp; end
7
+ self.bias_clamp = 18.0
8
+ self.weight_clamp = 9.0
9
+ end
10
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Connection is a lightweight struct for weighted neuron links.
5
+ Connection = Struct.new('Connection', :neuron, :weight) do
6
+ # Weighted activation value
7
+ def value = neuron.activation * weight
8
+ end
9
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Feed Forward
5
+ class Deep
6
+ include NetworkStats
7
+ include Exportable
8
+ include Trainable
9
+ include Arrayable
10
+
11
+ # rubocop: disable Metrics
12
+ def initialize(*sizes, input_neuron: InputNeuron,
13
+ middle_neuron: MiddleNeuron,
14
+ output_neuron: OutputNeuron)
15
+ length = sizes.length
16
+ raise 'Need at least 3 layers' if length < 3
17
+
18
+ @input_layer = InputLayer.new(sizes.shift, input_neuron:)
19
+ @output_layer = OutputLayer.new(sizes.pop, output_neuron:)
20
+ @hidden_layers = sizes.map { MiddleLayer.new(it, middle_neuron:) }
21
+ previous = @input_layer
22
+ @hidden_layers.each do |layer|
23
+ layer.connect(previous)
24
+ previous = layer
25
+ end
26
+ @output_layer.connect(previous)
27
+ end
28
+ # rubocop: enable Metrics
29
+
30
+ attr_reader :input_layer, :hidden_layers, :output_layer
31
+
32
+ def set(values)
33
+ @input_layer.set(values)
34
+ end
35
+
36
+ def update
37
+ @hidden_layers.each(&:update)
38
+ end
39
+
40
+ def values
41
+ @output_layer.values
42
+ end
43
+
44
+ def *(other)
45
+ set(other)
46
+ update
47
+ values
48
+ end
49
+
50
+ def to_a = [@input_layer, *@hidden_layers, @output_layer]
51
+ end
52
+ end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Exportable serializes network biases and weights only.
5
+ # Human-readable, compact, excludes activations.
6
+ module Exportable
7
+ # Writes serialized network to writer(from self).
8
+ # rubocop: disable Metrics
9
+ def export(writer)
10
+ sizes = map(&:size)
11
+ writer.puts "# #{self.class}"
12
+ # The first "float" here is the number of layers in the FFN...
13
+ # Just to be consistent:
14
+ writer.puts "#{sizes.size.to_f} #{sizes.join(' ')}"
15
+ each_with_index do |layer, i|
16
+ next if i.zero? # skip input layer
17
+
18
+ layer.each_with_index do |neuron, j|
19
+ writer.puts "# neuron = FFN[#{i}, #{j}]"
20
+ writer.puts "#{neuron.bias} #{i} #{j}"
21
+ neuron.connections.each_with_index do |connection, k|
22
+ writer.puts "#{connection.weight} #{i} #{j} #{k}"
23
+ end
24
+ end
25
+ end
26
+ end
27
+ # rubocop: enable Metrics
28
+
29
+ def export_to_file(filename) = File.open(filename, 'w') { export it }
30
+ def import_from_file(filename) = File.open(filename, 'r') { import it }
31
+
32
+ # Reads and validates serialized network from reader to set self.
33
+ # rubocop: disable Metrics
34
+ def import(reader)
35
+ gets_data = lambda do |reader|
36
+ return nil unless (line = reader.gets)
37
+
38
+ line = reader.gets while line.start_with?('#')
39
+ fs, *is = line.strip.split
40
+ [fs.to_f, *is.map(&:to_i)]
41
+ end
42
+
43
+ size, *sizes = gets_data[reader]
44
+ raise 'Size/Sizes mismatch' unless size == sizes.size
45
+ raise 'Sizes mismatch' unless sizes == map(&:size)
46
+
47
+ each_with_index do |layer, i|
48
+ next if i.zero? # skip input layer
49
+
50
+ layer.each_with_index do |neuron, j|
51
+ bias, *indeces = gets_data[reader]
52
+ raise "bad bias index: #{indeces}" unless indeces == [i, j]
53
+
54
+ neuron.bias = bias
55
+ neuron.connections.each_with_index do |connection, k|
56
+ weight, *indeces = gets_data[reader]
57
+ raise "bad weight index: #{indeces}" unless indeces == [i, j, k]
58
+
59
+ connection.weight = weight
60
+ end
61
+ end
62
+ end
63
+ raise 'Expected end of file.' unless gets_data[reader].nil?
64
+ end
65
+ # rubocop: enable Metrics
66
+ end
67
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # FeedForward is a fully connected neural network with >= 3 layers.
5
+ class FeedForward
6
+ # [NetwordStats](network_stats.rb)
7
+ include NetworkStats
8
+ # [Exportable](exportable.rb)
9
+ include Exportable
10
+ # [Trainable](trainable.rb)
11
+ include Trainable
12
+ # [Arrayble](arrayable.rb)
13
+ include Arrayable
14
+
15
+ # Example:
16
+ # ff = Neuronet::FeedForward.new(4, 8, 4)
17
+ def initialize(*sizes, full_neuron: Neuron)
18
+ length = sizes.length
19
+ raise 'Need at least 3 layers' if length < 3
20
+
21
+ @layers = Array.new(length) { Layer.new(sizes[it], full_neuron:) }
22
+ 1.upto(length - 1) { @layers[it].connect(@layers[it - 1]) }
23
+ @input_layer = @layers[0]
24
+ @output_layer = @layers[-1]
25
+ @hidden_layers = @layers[1...-1]
26
+ end
27
+
28
+ attr_reader :input_layer, :hidden_layers, :output_layer
29
+
30
+ # Sets the input values
31
+ def set(values)
32
+ @input_layer.set(values)
33
+ end
34
+
35
+ # Updates hidden layers (input assumed set).
36
+ def update
37
+ @hidden_layers.each(&:update)
38
+ end
39
+
40
+ # Gets output
41
+ def values
42
+ @output_layer.values
43
+ end
44
+
45
+ # Forward pass: set input, update, return output.
46
+ def *(other)
47
+ set(other)
48
+ update
49
+ values
50
+ end
51
+
52
+ def to_a = @layers
53
+ end
54
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Input Layer
5
+ class InputLayer
6
+ include Arrayable
7
+
8
+ def initialize(length, input_neuron: InputNeuron)
9
+ @layer = Array.new(length) { input_neuron.new }
10
+ @endex = length - 1
11
+ end
12
+
13
+ def set(values)
14
+ 0.upto(@endex) { @layer[it].set values[it] }
15
+ end
16
+
17
+ def to_a = @layer
18
+ end
19
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Input Neuron
5
+ class InputNeuron
6
+ include NeuronStats
7
+ include Squash
8
+
9
+ EMPTY = [].freeze
10
+
11
+ def initialize
12
+ @activation = 0.5
13
+ end
14
+
15
+ attr_reader :activation
16
+
17
+ def bias = nil
18
+ def connections = EMPTY
19
+ def value = nil
20
+
21
+ def set(value)
22
+ @activation = squash(value)
23
+ end
24
+
25
+ def backpropagate(_) = nil
26
+ end
27
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Layer is a collection of neurons with array-like behavior.
5
+ class Layer
6
+ # [LayerPresets](layer_presets.rb)
7
+ include LayerPresets
8
+ # [Arrayable](arrayable.rb)
9
+ include Arrayable
10
+
11
+ # Creates layer with `length` number of neurons.
12
+ def initialize(length, full_neuron: Neuron)
13
+ @layer = Array.new(length) { full_neuron.new }
14
+ @endex = length - 1
15
+ end
16
+
17
+ # Set each neuron's activation from values array.
18
+ # Allows the layer to be used as an input layer.
19
+ def set(values)
20
+ 0.upto(@endex) { @layer[it].set values[it] }
21
+ end
22
+
23
+ # For each neuron in the layer, updates the neuron's activation.
24
+ def update = @layer.each(&:update)
25
+
26
+ # Fully connects this layer to another.
27
+ def connect(layer)
28
+ each do |neuron|
29
+ layer.each { neuron.connect(it) }
30
+ end
31
+ end
32
+
33
+ # Raw pre-squashed values of all neurons in the layer.
34
+ # Allows the layer to be use as an output layer.
35
+ def values
36
+ @layer.map(&:value)
37
+ end
38
+
39
+ def to_a = @layer
40
+ end
41
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # LayerPresets initializes layer weights/biases for interpret-able functions.
5
+ module LayerPresets
6
+ BZERO = 0.5 / (0.5 - Squash.squash(1.0))
7
+ WONE = -2.0 * BZERO
8
+
9
+ # Set layer to roughly mirror it's input.
10
+ # Input should be the same size as the layer.
11
+ def mirror(sign = 1.0)
12
+ each_with_index do |neuron, index|
13
+ neuron.bias = sign * BZERO
14
+ neuron.connections[index].weight = sign * WONE
15
+ end
16
+ end
17
+
18
+ # Doubles up the input both mirroring and anti-mirroring it.
19
+ # The layer should be twice the size of the input.
20
+ def antithesis
21
+ sign = 1.0
22
+ each_with_index do |neuron, index|
23
+ neuron.bias = sign * BZERO
24
+ neuron.connections[index / 2].weight = sign * WONE
25
+ sign = -sign
26
+ end
27
+ end
28
+
29
+ # Sums two corresponding input neurons above each neuron in the layer.
30
+ # Input should be twice the size of the layer.
31
+ def synthesis(sign = 1.0)
32
+ semi = sign * WONE / 2.0
33
+ each_with_index do |neuron, index|
34
+ neuron.bias = sign * BZERO
35
+ j = index * 2
36
+ connections = neuron.connections
37
+ connections[j].weight = semi
38
+ connections[j + 1].weight = semi
39
+ end
40
+ end
41
+
42
+ # Set layer to average input.
43
+ def average(sign = 1.0)
44
+ bias = sign * BZERO
45
+ each do |neuron|
46
+ neuron.bias = bias
47
+ connections = neuron.connections
48
+ weight = sign * WONE / connections.size
49
+ connections.each { it.weight = weight }
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Neuron Layer
5
+ class MiddleLayer
6
+ include LayerPresets
7
+ include Arrayable
8
+
9
+ def initialize(length, middle_neuron: MiddleNeuron)
10
+ @layer = Array.new(length) { middle_neuron.new }
11
+ end
12
+
13
+ def update = @layer.each(&:update)
14
+
15
+ def connect(layer)
16
+ each do |neuron|
17
+ layer.each { neuron.connect(it) }
18
+ end
19
+ end
20
+
21
+ def to_a = @layer
22
+ end
23
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class MiddleNeuron
6
+ include NeuronStats
7
+ include Backpropagate
8
+ include Squash
9
+
10
+ def initialize
11
+ @activation = 0.5
12
+ @bias = 0.0
13
+ @connections = []
14
+ end
15
+
16
+ attr_accessor :bias
17
+ attr_reader :activation, :connections
18
+
19
+ def connect(neuron, weight = 0.0)
20
+ @connections << Connection.new(neuron, weight)
21
+ end
22
+
23
+ def value
24
+ @bias + @connections.sum(&:value)
25
+ end
26
+
27
+ def update
28
+ @activation = squash(value)
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Multi Layer Perceptron(3 layers)
5
+ class MLP
6
+ include NetworkStats
7
+ include Exportable
8
+ include Trainable
9
+ include Arrayable
10
+
11
+ # rubocop: disable Metrics
12
+ def initialize(input_size, middle_size, output_size,
13
+ input_neuron: InputNeuron,
14
+ middle_neuron: MiddleNeuron,
15
+ output_neuron: OutputNeuron)
16
+ @input_layer = InputLayer.new(input_size, input_neuron:)
17
+ @middle_layer = MiddleLayer.new(middle_size, middle_neuron:)
18
+ @output_layer = OutputLayer.new(output_size, output_neuron:)
19
+ @middle_layer.connect(@input_layer)
20
+ @output_layer.connect(@middle_layer)
21
+ end
22
+ # rubocop: enable Metrics
23
+
24
+ attr_reader :input_layer, :middle_layer, :output_layer
25
+
26
+ def set(values)
27
+ @input_layer.set(values)
28
+ end
29
+
30
+ def update
31
+ @middle_layer.update
32
+ end
33
+
34
+ def values
35
+ @output_layer.values
36
+ end
37
+
38
+ def *(other)
39
+ set(other)
40
+ update
41
+ values
42
+ end
43
+
44
+ def to_a = [@input_layer, @middle_layer, @output_layer]
45
+ end
46
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Network Stats
5
+ module NetworkStats
6
+ # See https://github.com/carlosjhr64/neuronet/wiki
7
+ # |𝝂| = 𝔪 + ¼√𝑁*𝔪' + ¼√𝑁*¼√𝑁'*𝔪" + ...
8
+ def expected_nju!
9
+ nju = 0.0
10
+ mult = 1.0
11
+ reverse[1..].each do |layer|
12
+ size = layer.size
13
+ mju = 1 + (0.5 * size)
14
+ nju += mult * mju
15
+ mult *= 0.25 * Math.sqrt(size)
16
+ end
17
+ @expected_nju = nju
18
+ end
19
+
20
+ def expected_nju
21
+ @expected_nju || expected_nju!
22
+ end
23
+
24
+ def njus
25
+ output_layer.map(&:nju)
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Neuron represents a single node in a neural network.
5
+ # It holds @activation, @bias, and incoming @connections.
6
+ class Neuron
7
+ # [nju computation](neuron_stats.rb)
8
+ include NeuronStats
9
+ # [back-propagation of errors](backpropagate.rb)
10
+ include Backpropagate
11
+ # [squash/unsquash methods](squash.rb)
12
+ include Squash
13
+
14
+ # Initializes a neuron with default activation 0.5 and zero bias.
15
+ def initialize
16
+ @activation = 0.5
17
+ @bias = 0.0
18
+ @connections = [] # incoming connections
19
+ end
20
+
21
+ attr_accessor :bias # bias is settable
22
+ attr_reader :activation, :connections # activation is read-only
23
+
24
+ # Sets activation by applying squash to raw input value.
25
+ def set(value)
26
+ @activation = squash(value)
27
+ end
28
+
29
+ # Creates a weighted connection to another neuron.
30
+ # See [Neuronet::Connection](connection.rb)
31
+ def connect(neuron, weight = 0.0)
32
+ @connections << Connection.new(neuron, weight)
33
+ end
34
+
35
+ # Computes(raw output)value: bias + sum of incoming connection values.
36
+ def value
37
+ @bias + @connections.sum(&:value)
38
+ end
39
+
40
+ # Updates activation by squashing the current value(see above).
41
+ def update
42
+ @activation = squash(value)
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # NeuronStats provides network analysis methods.
5
+ module NeuronStats
6
+ # Returns the total tally of parameters in the downstream
7
+ # network subgraph from this neuron.
8
+ # This includes the neuron's bias (1 parameter),
9
+ # the weights of its incoming connections (one per connection),
10
+ # and the sum of parameters from all downstream neurons.
11
+ # Parameters from a shared neuron are counted multiple times
12
+ # if accessed via multiple pathways,
13
+ # reflecting the total parameter influence through all paths.
14
+ # Returns 0 for a neuron with no downstream connections.
15
+ def downstream_params_tally
16
+ return 0 if (size = connections.size).zero?
17
+
18
+ 1 + size + connections.sum { it.neuron.downstream_params_tally }
19
+ end
20
+
21
+ # Sum of activations + 1. It's a component of the sensitivity measure nju.
22
+ # See [wiki](https://github.com/carlosjhr64/neuronet/wiki)
23
+ def mju = 1 + connections.sum { it.neuron.activation }
24
+
25
+ # Sensitivity measure nju:
26
+ # 𝒆 ~ 𝜀𝝁 + 𝑾 𝓑𝒂'𝒆'
27
+ # 𝝂 ≜ 𝒆/𝜀
28
+ # 𝝂 ~ 𝝁 + 𝑾 𝓑𝒂'𝝂'
29
+ # See the [wiki](https://github.com/carlosjhr64/neuronet/wiki)
30
+ # See also test/tc_epsilon:
31
+ # https://github.com/carlosjhr64/neuronet/blob/master/test/tc_epsilon
32
+ # rubocop: disable Metrics
33
+ def nju
34
+ return 0 if connections.empty?
35
+
36
+ mju + connections.sum do |connection|
37
+ n = connection.neuron
38
+ next 0.0 if (nju = n.nju).zero? || (a = n.activation).zero? || a >= 1.0
39
+
40
+ connection.weight * a * (1.0 - a) * nju
41
+ end
42
+ end
43
+ # rubocop: enable Metrics
44
+ end
45
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Noisy Backpropagate
5
+ module NoisyBackpropagate
6
+ # rubocop: disable Metrics, Style
7
+ def backpropagate(error)
8
+ bmax = Config.bias_clamp
9
+ b = bias + (error * (rand + rand))
10
+ self.bias = b.abs > bmax ? (b.positive? ? bmax : -bmax) : b
11
+
12
+ wmax = Config.weight_clamp
13
+ connections.each do |c|
14
+ n = c.neuron
15
+ w = c.weight + (n.activation * error * (rand + rand))
16
+ c.weight = w.abs > wmax ? (w.positive? ? wmax : -wmax) : w
17
+ n.backpropagate(error)
18
+ end
19
+ end
20
+ # rubocop: enable Metrics, Style
21
+ end
22
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyMiddleNeuron < MiddleNeuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyNeuron < Neuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyOutputNeuron < OutputNeuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Output Layer
5
+ class OutputLayer
6
+ include LayerPresets
7
+ include Arrayable
8
+
9
+ def initialize(length, output_neuron: OutputNeuron)
10
+ @layer = Array.new(length) { output_neuron.new }
11
+ end
12
+
13
+ def connect(layer)
14
+ each do |neuron|
15
+ layer.each { neuron.connect(it) }
16
+ end
17
+ end
18
+
19
+ def values
20
+ @layer.map(&:value)
21
+ end
22
+
23
+ def to_a = @layer
24
+ end
25
+ end