simple_neural_network 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 4ddf41db0522abafd12d51c425246499522c36cd
4
+ data.tar.gz: 4b576ca87629cd9d98845b3d02c2c41e0a700cf1
5
+ SHA512:
6
+ metadata.gz: d11cff2d470e190617dc1ed71b3db48d6585b65b728e9c6490dcc4711535febc34c16130461bfedc1c871b63938906685c36ca34aef2e7882067eb5fdbff17ef
7
+ data.tar.gz: 6e69176cc45847c10af9d662d70964ca0ff5e868bc1a7dff32ff6ff7f9b0fc1cec62dfd6567f10206f33b5b7a951fc7e30e7a55aa7f7ec86b133c956f370b633
data/lib/layer.rb ADDED
@@ -0,0 +1,74 @@
1
+ require_relative "neuron"
2
+
3
+ class SimpleNeuralNetwork
4
+ class Layer
5
+ # Number of neurons
6
+ attr_accessor :size
7
+
8
+ attr_accessor :prev_layer
9
+ attr_accessor :next_layer
10
+
11
+ # List of #{size} neurons
12
+ attr_accessor :neurons
13
+
14
+ attr_accessor :network
15
+
16
+ def initialize(size, network)
17
+ @size = size
18
+ @neurons = []
19
+ @network = network
20
+
21
+ @prev_layer = nil
22
+ @next_layer = nil
23
+
24
+ populate_neurons
25
+ end
26
+
27
+ # The method that drives network output resolution.
28
+ # get_output calculates the array of neuron values for this layer.
29
+ # This is calculated by recursively fetching the output from the previous layer, then applying edge/node weight and bias rules.
30
+ # The first layer will fetch it's values from @network.inputs
31
+ def get_output
32
+ if !prev_layer
33
+ # This is the first layer, so the output set is simply the network input set
34
+ @network.inputs
35
+ else
36
+ # Each neuron output value is calculated by:
37
+ # output[i] = (
38
+ # (prev_layer.neurons[0] * prev_layer.neurons[0].edges[i])
39
+ # + (prev_layer.neurons[1] * prev_layer.neurons[1].edges[i])
40
+ # + ...
41
+ # ) + self.neurons[i].bias
42
+
43
+ prev_layer_output = prev_layer.get_output
44
+
45
+ # Generate the output values for the layer
46
+ (0..@size-1).map do |i|
47
+ value = 0
48
+
49
+ prev_layer_output.each_with_index do |output, index|
50
+ value += (output * prev_layer.neurons[index].edges[i])
51
+ end
52
+
53
+ value + @neurons[i].bias
54
+ end
55
+ end
56
+ end
57
+
58
+ def initialize_neuron_edges
59
+ return unless @next_layer
60
+
61
+ @neurons.each do |neuron|
62
+ neuron.initialize_edges(@next_layer.size)
63
+ end
64
+ end
65
+
66
+ private
67
+
68
+ def populate_neurons
69
+ @size.times do
70
+ @neurons << Neuron.new(layer: self)
71
+ end
72
+ end
73
+ end
74
+ end
data/lib/network.rb ADDED
@@ -0,0 +1,154 @@
1
+ require_relative "layer"
2
+ require 'json'
3
+
4
+ # To properly initialze a network:
5
+ # - Initialize the new Network object
6
+ # - Create layers using Network#create_layer
7
+ # (This creates layers from left to right, input -> hidden layers -> output layer)
8
+
9
+ # Sample usage:
10
+ #
11
+ # network = SimpleNeuralNetwork::Network.new
12
+ #
13
+ # network.create_layer(neurons: 10)
14
+ # network.create_layer(neurons: 2)
15
+
16
+ # network.run([0.5]*10)
17
+
18
+ class SimpleNeuralNetwork
19
+ class Network
20
+ class InvalidInputError < StandardError; end
21
+ # An array of layers
22
+ attr_accessor :layers
23
+
24
+ attr_accessor :inputs
25
+
26
+ attr_writer :normalization_function
27
+
28
+ attr_accessor :edge_initialization_function
29
+ attr_accessor :neuron_bias_initialization_function
30
+
31
+ def initialize
32
+ @layers = []
33
+ @inputs = []
34
+
35
+ @normalization_function = method(:default_normalization_function)
36
+ @edge_initialization_function = method(:default_edge_initialization_function)
37
+ @neuron_bias_initialization_function = method(:default_neuron_bias_initialization_function)
38
+ end
39
+
40
+ # Run an input set against the neural network.
41
+ # Accepts an array of input integers between 0 and 1
42
+ # Input array length must be equal to the size of the first layer.
43
+ # Returns an array of outputs.
44
+ def run(inputs)
45
+ unless inputs.size == input_size && inputs.all? { |input| input >= 0 && input <= 1 }
46
+ raise InvalidInputError.new("Invalid input passed to Network#run")
47
+ end
48
+
49
+ @inputs = inputs
50
+
51
+ # Get output from last layer. It recursively depends on layers before it.
52
+ @layers[-1].get_output.map do |output|
53
+ @normalization_function.call(output)
54
+ end
55
+ end
56
+
57
+ # Returns the number of input nodes
58
+ def input_size
59
+ @layers[0].size
60
+ end
61
+
62
+ # Returns the number of output nodes
63
+ def output_size
64
+ @layers[-1].size
65
+ end
66
+
67
+ def create_layer(neurons:)
68
+ unless @layers.empty?
69
+ new_layer = Layer.new(neurons, self)
70
+ prev_layer = @layers.last
71
+
72
+ @layers << new_layer
73
+
74
+ new_layer.prev_layer = prev_layer
75
+ prev_layer.next_layer = new_layer
76
+
77
+ prev_layer.initialize_neuron_edges
78
+ else
79
+ @layers << Layer.new(neurons, self)
80
+ end
81
+ end
82
+
83
+ def reset_normalization_function
84
+ @normalization_function = method(:default_normalization_function)
85
+ end
86
+
87
+ # Serializes the neural network into a JSON string. This can later be deserialized back into a Network object
88
+ # Useful for storing partially trained neural networks.
89
+ # Note: Currently does not serialize bias init function, edge init function, or normalization function
90
+ def serialize
91
+ {
92
+ layers: layers.map do |layer|
93
+ {
94
+ neurons: layer.neurons.map do |neuron|
95
+ {
96
+ bias: neuron.bias.to_f,
97
+ edges: neuron.edges.map(&:to_f)
98
+ }
99
+ end
100
+ }
101
+ end
102
+ }.to_json
103
+ end
104
+
105
+ # Deserialize a JSON neural network back into a Ruby object
106
+ # Note that the normalization function will need to be reset.
107
+ # Normalization function serialization in the future would be cool.
108
+ def self.deserialize(string)
109
+ hash = JSON.parse(string)
110
+
111
+ network = Network.new
112
+
113
+ hash["layers"].each do |layer|
114
+ neurons_array = layer["neurons"]
115
+ layer = Layer.new(neurons_array.length, network)
116
+ network.layers << layer
117
+
118
+ layer.neurons.each_with_index do |neuron, index|
119
+ neuron_hash = neurons_array[index]
120
+
121
+ neuron.bias = neuron_hash["bias"].to_f
122
+ neuron.edges = neuron_hash["edges"].map(&:to_f)
123
+ end
124
+ end
125
+
126
+ network.layers.each_with_index do |layer, index|
127
+ unless index == 0
128
+ layer.prev_layer = network.layers[index - 1]
129
+ end
130
+
131
+ layer.next_layer = network.layers[index + 1]
132
+ end
133
+
134
+ network
135
+ end
136
+
137
+ private
138
+
139
+ # The default normalization function for the network output
140
+ # The standard logistic sigmoid function
141
+ # f(x) = 1 / (1 + e^(-x))
142
+ def default_normalization_function(output)
143
+ 1 / (1 + (Math::E ** (-1 * output)))
144
+ end
145
+
146
+ def default_edge_initialization_function
147
+ rand(-5..5)
148
+ end
149
+
150
+ def default_neuron_bias_initialization_function
151
+ 0
152
+ end
153
+ end
154
+ end
data/lib/neuron.rb ADDED
@@ -0,0 +1,23 @@
1
+ class SimpleNeuralNetwork
2
+ class Neuron
3
+ attr_accessor :bias
4
+
5
+ # A neuron's edges connect it to the #{layer.next_layer.size} neurons of the next layer
6
+ attr_accessor :edges
7
+
8
+ def initialize(layer:)
9
+ @layer = layer
10
+ @bias = layer.network.neuron_bias_initialization_function.call
11
+ @edges = []
12
+ end
13
+
14
+ # A neuron should have one edge per neuron in the next layer
15
+ def initialize_edges(next_layer_size)
16
+ init_function = @layer.network.edge_initialization_function
17
+
18
+ next_layer_size.times do
19
+ @edges << init_function.call
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,3 @@
1
+ class SimpleNeuralNetwork
2
+ require "network"
3
+ end
metadata ADDED
@@ -0,0 +1,47 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: simple_neural_network
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Nathaniel Woodthorpe
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2018-03-11 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description: A simple neural network implementation in Ruby.
14
+ email: njwoodthorpe@gmail.com
15
+ executables: []
16
+ extensions: []
17
+ extra_rdoc_files: []
18
+ files:
19
+ - lib/layer.rb
20
+ - lib/network.rb
21
+ - lib/neuron.rb
22
+ - lib/simple_neural_network.rb
23
+ homepage: https://github.com/d12/SimpleNeuralNetwork
24
+ licenses:
25
+ - MIT
26
+ metadata: {}
27
+ post_install_message:
28
+ rdoc_options: []
29
+ require_paths:
30
+ - lib
31
+ required_ruby_version: !ruby/object:Gem::Requirement
32
+ requirements:
33
+ - - ">="
34
+ - !ruby/object:Gem::Version
35
+ version: '0'
36
+ required_rubygems_version: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ requirements: []
42
+ rubyforge_project:
43
+ rubygems_version: 2.5.2
44
+ signing_key:
45
+ specification_version: 4
46
+ summary: A simple neural network implementation in Ruby.
47
+ test_files: []