neuronet 7.0.230416 → 8.0.251113

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,111 +1,41 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # Neuronet module
4
3
  module Neuronet
5
- # Layer is an array of neurons.
6
- class Layer < Array
7
- # Length is the number of neurons in the layer.
8
- def initialize(length)
9
- super(length) { Neuron.new }
10
- end
11
-
12
- # This is where one enters the "real world" inputs.
13
- def set(inputs)
14
- 0.upto(length - 1) { self[_1].value = inputs[_1] || 0.0 }
15
- self
16
- end
17
-
18
- # Returns the real world values: [value, ...]
19
- def values
20
- map(&:value)
21
- end
22
-
23
- # Allows one to fully connect layers.
24
- def connect(layer = Layer.new(length), weights: [])
25
- # creates the neuron matrix...
26
- each_with_index do |neuron, i|
27
- weight = weights[i] || 0.0
28
- layer.each { neuron.connect(_1, weight:) }
29
- end
30
- # The layer is returned for chaining.
31
- layer
32
- end
33
-
34
- # Set layer to mirror input:
35
- # bias = BZERO.
36
- # weight = WONE
37
- # Input should be the same size as the layer. One can set sign to -1 to
38
- # anti-mirror. One can set sign to other than |1| to scale.
39
- def mirror(sign = 1)
40
- each_with_index do |neuron, index|
41
- neuron.bias = sign * Neuronet.bzero
42
- neuron.connections[index].weight = sign * Neuronet.wone
43
- end
44
- end
4
+ # Layer is a collection of neurons with array-like behavior.
5
+ class Layer
6
+ # [LayerPresets](layer_presets.rb)
7
+ include LayerPresets
8
+ # [Arrayable](arrayable.rb)
9
+ include Arrayable
45
10
 
46
- # Doubles up the input mirroring and anti-mirroring it. The layer should be
47
- # twice the size of the input.
48
- def antithesis
49
- sign = 1
50
- each_with_index do |n, i|
51
- n.connections[i / 2].weight = sign * Neuronet.wone
52
- n.bias = sign * Neuronet.bzero
53
- sign = -sign
54
- end
11
+ # Creates layer with `length` number of neurons.
12
+ def initialize(length, full_neuron: Neuron)
13
+ @layer = Array.new(length) { full_neuron.new }
14
+ @endex = length - 1
55
15
  end
56
16
 
57
- # Sums two corresponding input neurons above each neuron in the layer.
58
- # Input should be twice the size of the layer.
59
- def synthesis
60
- semi = Neuronet.wone / 2
61
- each_with_index do |n, i|
62
- j = i * 2
63
- c = n.connections
64
- n.bias = Neuronet.bzero
65
- c[j].weight = semi
66
- c[j + 1].weight = semi
67
- end
17
+ # Set each neuron's activation from values array.
18
+ # Allows the layer to be used as an input layer.
19
+ def set(values)
20
+ 0.upto(@endex) { @layer[it].set values[it] }
68
21
  end
69
22
 
70
- # Set layer to average input.
71
- def average(sign = 1)
72
- bias = sign * Neuronet.bzero
73
- each do |n|
74
- n.bias = bias
75
- weight = sign * Neuronet.wone / n.connections.length
76
- n.connections.each { _1.weight = weight }
77
- end
78
- end
23
+ # For each neuron in the layer, updates the neuron's activation.
24
+ def update = @layer.each(&:update)
79
25
 
80
- # updates layer with current values of the previous layer
81
- def partial
82
- each(&:partial)
83
- end
84
-
85
- def average_mju
86
- Neuronet.learning * sum { Neuron.mju(_1) } / length
87
- end
88
-
89
- # Takes the real world target for each neuron in this layer and
90
- # backpropagates the error to each neuron.
91
- def train(target, mju = nil)
92
- 0.upto(length - 1) do |index|
93
- neuron = self[index]
94
- error = (target[index] - neuron.value) /
95
- (mju || (Neuronet.learning * Neuron.mju(neuron)))
96
- neuron.backpropagate(error)
26
+ # Fully connects this layer to another.
27
+ def connect(layer)
28
+ each do |neuron|
29
+ layer.each { neuron.connect(it) }
97
30
  end
98
- self
99
31
  end
100
32
 
101
- # Layer inspects as "label:value,..."
102
- def inspect
103
- map(&:inspect).join(',')
33
+ # Raw pre-squashed values of all neurons in the layer.
34
+ # Allows the layer to be use as an output layer.
35
+ def values
36
+ @layer.map(&:value)
104
37
  end
105
38
 
106
- # Layer puts as "label,..."
107
- def to_s
108
- map(&:to_s).join(',')
109
- end
39
+ def to_a = @layer
110
40
  end
111
41
  end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # LayerPresets initializes layer weights/biases for interpret-able functions.
5
+ module LayerPresets
6
+ BZERO = 0.5 / (0.5 - Squash.squash(1.0))
7
+ WONE = -2.0 * BZERO
8
+
9
+ # Set layer to roughly mirror it's input.
10
+ # Input should be the same size as the layer.
11
+ def mirror(sign = 1.0)
12
+ each_with_index do |neuron, index|
13
+ neuron.bias = sign * BZERO
14
+ neuron.connections[index].weight = sign * WONE
15
+ end
16
+ end
17
+
18
+ # Doubles up the input both mirroring and anti-mirroring it.
19
+ # The layer should be twice the size of the input.
20
+ def antithesis
21
+ sign = 1.0
22
+ each_with_index do |neuron, index|
23
+ neuron.bias = sign * BZERO
24
+ neuron.connections[index / 2].weight = sign * WONE
25
+ sign = -sign
26
+ end
27
+ end
28
+
29
+ # Sums two corresponding input neurons above each neuron in the layer.
30
+ # Input should be twice the size of the layer.
31
+ def synthesis(sign = 1.0)
32
+ semi = sign * WONE / 2.0
33
+ each_with_index do |neuron, index|
34
+ neuron.bias = sign * BZERO
35
+ j = index * 2
36
+ connections = neuron.connections
37
+ connections[j].weight = semi
38
+ connections[j + 1].weight = semi
39
+ end
40
+ end
41
+
42
+ # Set layer to average input.
43
+ def average(sign = 1.0)
44
+ bias = sign * BZERO
45
+ each do |neuron|
46
+ neuron.bias = bias
47
+ connections = neuron.connections
48
+ weight = sign * WONE / connections.size
49
+ connections.each { it.weight = weight }
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Neuron Layer
5
+ class MiddleLayer
6
+ include LayerPresets
7
+ include Arrayable
8
+
9
+ def initialize(length, middle_neuron: MiddleNeuron)
10
+ @layer = Array.new(length) { middle_neuron.new }
11
+ end
12
+
13
+ def update = @layer.each(&:update)
14
+
15
+ def connect(layer)
16
+ each do |neuron|
17
+ layer.each { neuron.connect(it) }
18
+ end
19
+ end
20
+
21
+ def to_a = @layer
22
+ end
23
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class MiddleNeuron
6
+ include NeuronStats
7
+ include Backpropagate
8
+ include Squash
9
+
10
+ def initialize
11
+ @activation = 0.5
12
+ @bias = 0.0
13
+ @connections = []
14
+ end
15
+
16
+ attr_accessor :bias
17
+ attr_reader :activation, :connections
18
+
19
+ def connect(neuron, weight = 0.0)
20
+ @connections << Connection.new(neuron, weight)
21
+ end
22
+
23
+ def value
24
+ @bias + @connections.sum(&:value)
25
+ end
26
+
27
+ def update
28
+ @activation = squash(value)
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Multi Layer Perceptron(3 layers)
5
+ class MLP
6
+ include NetworkStats
7
+ include Exportable
8
+ include Trainable
9
+ include Arrayable
10
+
11
+ # rubocop: disable Metrics
12
+ def initialize(input_size, middle_size, output_size,
13
+ input_neuron: InputNeuron,
14
+ middle_neuron: MiddleNeuron,
15
+ output_neuron: OutputNeuron)
16
+ @input_layer = InputLayer.new(input_size, input_neuron:)
17
+ @middle_layer = MiddleLayer.new(middle_size, middle_neuron:)
18
+ @output_layer = OutputLayer.new(output_size, output_neuron:)
19
+ @middle_layer.connect(@input_layer)
20
+ @output_layer.connect(@middle_layer)
21
+ end
22
+ # rubocop: enable Metrics
23
+
24
+ attr_reader :input_layer, :middle_layer, :output_layer
25
+
26
+ def set(values)
27
+ @input_layer.set(values)
28
+ end
29
+
30
+ def update
31
+ @middle_layer.update
32
+ end
33
+
34
+ def values
35
+ @output_layer.values
36
+ end
37
+
38
+ def *(other)
39
+ set(other)
40
+ update
41
+ values
42
+ end
43
+
44
+ def to_a = [@input_layer, @middle_layer, @output_layer]
45
+ end
46
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Network Stats
5
+ module NetworkStats
6
+ # See https://github.com/carlosjhr64/neuronet/wiki
7
+ # |𝝂| = 𝔪 + ¼√𝑁*𝔪' + ¼√𝑁*¼√𝑁'*𝔪" + ...
8
+ def expected_nju!
9
+ nju = 0.0
10
+ mult = 1.0
11
+ reverse[1..].each do |layer|
12
+ size = layer.size
13
+ mju = 1 + (0.5 * size)
14
+ nju += mult * mju
15
+ mult *= 0.25 * Math.sqrt(size)
16
+ end
17
+ @expected_nju = nju
18
+ end
19
+
20
+ def expected_nju
21
+ @expected_nju || expected_nju!
22
+ end
23
+
24
+ def njus
25
+ output_layer.map(&:nju)
26
+ end
27
+ end
28
+ end
@@ -1,146 +1,45 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # Neuronet module / Neuron class
4
3
  module Neuronet
5
- # A Neuron is capable of creating connections to other neurons. The
6
- # connections attribute is a list of the neuron's connections to other
7
- # neurons. A neuron's bias is it's kicker (or deduction) to it's activation
8
- # value, a sum of its connections values.
4
+ # Neuron represents a single node in a neural network.
5
+ # It holds @activation, @bias, and incoming @connections.
9
6
  class Neuron
10
- # For bookkeeping, each Neuron is given a label, starting with 'a' by
11
- # default.
12
- class << self; attr_accessor :label; end
13
- Neuron.label = 'a'
7
+ # [nju computation](neuron_stats.rb)
8
+ include NeuronStats
9
+ # [back-propagation of errors](backpropagate.rb)
10
+ include Backpropagate
11
+ # [squash/unsquash methods](squash.rb)
12
+ include Squash
14
13
 
15
- attr_reader :label, :activation, :connections
16
- attr_accessor :bias
17
-
18
- # The neuron's mu is the sum of the connections' mu(activation), plus one
19
- # for the bias:
20
- # 𝛍 := 1+∑𝐚'
21
- def mu
22
- return 0.0 if @connections.empty?
23
-
24
- 1 + @connections.sum(&:mu)
25
- end
26
-
27
- # Reference the library's wiki:
28
- # 𝒆ₕ ~ 𝜀(𝝁ₕ + 𝜧ₕⁱ𝝁ᵢ + 𝜧ₕⁱ𝜧ᵢʲ𝝁ⱼ + 𝜧ₕⁱ𝜧ᵢʲ𝜧ⱼᵏ𝝁ₖ + ...)
29
- # 𝜧ₕⁱ𝝁ᵢ is:
30
- # neuron.mju{ |connected_neuron| connected_neuron.mu }
31
- # 𝜧ₕⁱ𝜧ᵢʲ𝝁ⱼ is:
32
- # nh.mju{ |ni| ni.mju{ |nj| nj.mu }}
33
- def mju(&block)
34
- @connections.sum { _1.mju * block[_1.neuron] }
14
+ # Initializes a neuron with default activation 0.5 and zero bias.
15
+ def initialize
16
+ @activation = 0.5
17
+ @bias = 0.0
18
+ @connections = [] # incoming connections
35
19
  end
36
20
 
37
- # Full recursive implementation of mju:
38
- def self.mju(neuron)
39
- return 0.0 if neuron.connections.empty?
21
+ attr_accessor :bias # bias is settable
22
+ attr_reader :activation, :connections # activation is read-only
40
23
 
41
- neuron.mu + neuron.mju { |connected_neuron| Neuron.mju(connected_neuron) }
24
+ # Sets activation by applying squash to raw input value.
25
+ def set(value)
26
+ @activation = squash(value)
42
27
  end
43
28
 
44
- # 𝓓𝒗⌈𝒗 = (1-⌈𝒗)⌈𝒗 = (1-𝒂)𝒂 = 𝓑𝒂
45
- def derivative = Neuronet.derivative[@activation]
46
-
47
- # 𝝀 = 𝓑𝒂𝛍
48
- def lamda = derivative * mu
49
-
50
- # 𝜿 := 𝜧 𝝁' = 𝑾 𝓑𝒂'𝝁' = 𝑾 𝝀'
51
- # def kappa = mju(&:mu)
52
- def kappa = @connections.sum(&:kappa)
53
-
54
- # 𝜾 := 𝜧 𝜧' 𝝁" = 𝜧 𝜿'
55
- def iota = mju(&:kappa)
56
-
57
- # One can explicitly set the neuron's value, typically used to set the input
58
- # neurons. The given "real world" value is squashed into the neuron's
59
- # activation value.
60
- def value=(value)
61
- # If value is out of bounds, set it to the bound.
62
- if value.abs > Neuronet.maxv
63
- value = value.positive? ? Neuronet.maxv : -Neuronet.maxv
64
- end
65
- @activation = Neuronet.squash[value]
29
+ # Creates a weighted connection to another neuron.
30
+ # See [Neuronet::Connection](connection.rb)
31
+ def connect(neuron, weight = 0.0)
32
+ @connections << Connection.new(neuron, weight)
66
33
  end
67
34
 
68
- # The "real world" value of the neuron is the unsquashed activation value.
69
- def value = Neuronet.unsquash[@activation]
70
-
71
- # The initialize method sets the neuron's value, bias and connections.
72
- def initialize(value = 0.0, bias: 0.0, connections: [])
73
- self.value = value
74
- @connections = connections
75
- @bias = bias
76
- @label = Neuron.label
77
- Neuron.label = Neuron.label.next
35
+ # Computes(raw output)value: bias + sum of incoming connection values.
36
+ def value
37
+ @bias + @connections.sum(&:value)
78
38
  end
79
39
 
80
- # Updates the activation with the current value of bias and updated values
81
- # of connections.
40
+ # Updates activation by squashing the current value(see above).
82
41
  def update
83
- return @activation if @connections.empty?
84
-
85
- self.value = @bias + @connections.sum(&:update)
86
- @activation
42
+ @activation = squash(value)
87
43
  end
88
-
89
- # For when connections are already updated, Neuron#partial updates the
90
- # activation with the current values of bias and connections. It is not
91
- # always necessary to burrow all the way down to the terminal input neuron
92
- # to update the current neuron if it's connected neurons have all been
93
- # updated. The implementation should set it's algorithm to use partial
94
- # instead of update as update will most likely needlessly update previously
95
- # updated neurons.
96
- def partial
97
- return @activation if @connections.empty?
98
-
99
- self.value = @bias + @connections.sum(&:partial)
100
- @activation
101
- end
102
-
103
- # The backpropagate method modifies the neuron's bias in proportion to the
104
- # given error and passes on this error to each of its connection's
105
- # backpropagate method. While updates flows from input to output, back-
106
- # propagation of errors flows from output to input.
107
- def backpropagate(error)
108
- return self if @connections.empty?
109
-
110
- @bias += Neuronet.noise[error]
111
- if @bias.abs > Neuronet.maxb
112
- @bias = @bias.positive? ? Neuronet.maxb : -Neuronet.maxb
113
- end
114
- @connections.each { |connection| connection.backpropagate(error) }
115
- self
116
- end
117
-
118
- # Connects the neuron to another neuron. The default weight=0 means there
119
- # is no initial association. The connect method is how the implementation
120
- # adds a connection, the way to connect a neuron to another. To connect
121
- # "output" to "input", for example, it is:
122
- # input = Neuronet::Neuron.new
123
- # output = Neuronet::Neuron.new
124
- # output.connect(input)
125
- # Think "output" connects to "input".
126
- def connect(neuron = Neuron.new, weight: 0.0)
127
- @connections.push(Connection.new(neuron, weight:))
128
- # Note that we're returning the connected neuron:
129
- neuron
130
- end
131
-
132
- # Tacks on to neuron's inspect method to show the neuron's bias and
133
- # connections.
134
- def inspect
135
- fmt = Neuronet.format
136
- if @connections.empty?
137
- "#{@label}:#{fmt % value}"
138
- else
139
- "#{@label}:#{fmt % value}|#{[(fmt % @bias), *@connections].join('+')}"
140
- end
141
- end
142
-
143
- # A neuron plainly puts itself as it's label.
144
- def to_s = @label
145
44
  end
146
45
  end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # NeuronStats provides network analysis methods.
5
+ module NeuronStats
6
+ # Returns the total tally of parameters in the downstream
7
+ # network subgraph from this neuron.
8
+ # This includes the neuron's bias (1 parameter),
9
+ # the weights of its incoming connections (one per connection),
10
+ # and the sum of parameters from all downstream neurons.
11
+ # Parameters from a shared neuron are counted multiple times
12
+ # if accessed via multiple pathways,
13
+ # reflecting the total parameter influence through all paths.
14
+ # Returns 0 for a neuron with no downstream connections.
15
+ def downstream_params_tally
16
+ return 0 if (size = connections.size).zero?
17
+
18
+ 1 + size + connections.sum { it.neuron.downstream_params_tally }
19
+ end
20
+
21
+ # Sum of activations + 1. It's a component of the sensitivity measure nju.
22
+ # See [wiki](https://github.com/carlosjhr64/neuronet/wiki)
23
+ def mju = 1 + connections.sum { it.neuron.activation }
24
+
25
+ # Sensitivity measure nju:
26
+ # 𝒆 ~ 𝜀𝝁 + 𝑾 𝓑𝒂'𝒆'
27
+ # 𝝂 ≜ 𝒆/𝜀
28
+ # 𝝂 ~ 𝝁 + 𝑾 𝓑𝒂'𝝂'
29
+ # See the [wiki](https://github.com/carlosjhr64/neuronet/wiki)
30
+ # See also test/tc_epsilon:
31
+ # https://github.com/carlosjhr64/neuronet/blob/master/test/tc_epsilon
32
+ # rubocop: disable Metrics
33
+ def nju
34
+ return 0 if connections.empty?
35
+
36
+ mju + connections.sum do |connection|
37
+ n = connection.neuron
38
+ next 0.0 if (nju = n.nju).zero? || (a = n.activation).zero? || a >= 1.0
39
+
40
+ connection.weight * a * (1.0 - a) * nju
41
+ end
42
+ end
43
+ # rubocop: enable Metrics
44
+ end
45
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Noisy Backpropagate
5
+ module NoisyBackpropagate
6
+ # rubocop: disable Metrics, Style
7
+ def backpropagate(error)
8
+ bmax = Config.bias_clamp
9
+ b = bias + (error * (rand + rand))
10
+ self.bias = b.abs > bmax ? (b.positive? ? bmax : -bmax) : b
11
+
12
+ wmax = Config.weight_clamp
13
+ connections.each do |c|
14
+ n = c.neuron
15
+ w = c.weight + (n.activation * error * (rand + rand))
16
+ c.weight = w.abs > wmax ? (w.positive? ? wmax : -wmax) : w
17
+ n.backpropagate(error)
18
+ end
19
+ end
20
+ # rubocop: enable Metrics, Style
21
+ end
22
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyMiddleNeuron < MiddleNeuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyNeuron < Neuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Middle Neuron
5
+ class NoisyOutputNeuron < OutputNeuron
6
+ include NoisyBackpropagate
7
+ end
8
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Output Layer
5
+ class OutputLayer
6
+ include LayerPresets
7
+ include Arrayable
8
+
9
+ def initialize(length, output_neuron: OutputNeuron)
10
+ @layer = Array.new(length) { output_neuron.new }
11
+ end
12
+
13
+ def connect(layer)
14
+ each do |neuron|
15
+ layer.each { neuron.connect(it) }
16
+ end
17
+ end
18
+
19
+ def values
20
+ @layer.map(&:value)
21
+ end
22
+
23
+ def to_a = @layer
24
+ end
25
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Output Neuron
5
+ class OutputNeuron
6
+ include NeuronStats
7
+ include Backpropagate
8
+
9
+ def initialize
10
+ @bias = 0.0
11
+ @connections = []
12
+ end
13
+
14
+ attr_accessor :bias
15
+ attr_reader :connections
16
+
17
+ def activation = nil
18
+
19
+ def connect(neuron, weight = 0.0)
20
+ @connections << Connection.new(neuron, weight)
21
+ end
22
+
23
+ def value
24
+ @bias + @connections.sum(&:value)
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Neuronet
4
+ # Perceptron
5
+ class Perceptron
6
+ include NetworkStats
7
+ include Exportable
8
+ include Trainable
9
+ include Arrayable
10
+
11
+ def initialize(input_size, output_size,
12
+ input_neuron: InputNeuron, output_neuron: OutputNeuron)
13
+ @input_layer = InputLayer.new(input_size, input_neuron:)
14
+ @output_layer = OutputLayer.new(output_size, output_neuron:)
15
+ @output_layer.connect(@input_layer)
16
+ end
17
+
18
+ attr_reader :input_layer, :output_layer
19
+
20
+ def set(values)
21
+ @input_layer.set(values)
22
+ end
23
+
24
+ def values
25
+ @output_layer.values
26
+ end
27
+
28
+ def *(other)
29
+ set(other)
30
+ values
31
+ end
32
+
33
+ def to_a = [@input_layer, @output_layer]
34
+ end
35
+ end