the_noggin 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: f2c52db539548af3d06e7ce7e291d44439f4610b
4
+ data.tar.gz: a03adc11a237bec24c7dd0ae8a59cd59ee9ec4b3
5
+ SHA512:
6
+ metadata.gz: 3608bb2ae571b06ad72c9ed67387c3c436f79b42e4933ef5bf5e9d988e5681afeefb4650c23c964b653faf975774f7e5a14b12caed4baad135be48f55f1cbaf7
7
+ data.tar.gz: 5eb96f0af2ae976f8fe1fb35a50fc6134f8e30afd2e92375e07eaaab637db1afebc78df51c9b380a22d0f9bb773ff2637a6b809e86e1b2290b53fa3033adaa78
data/.gitignore ADDED
@@ -0,0 +1,18 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
18
+ .idea
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2015 Shawn
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,47 @@
1
+ # Noggin
2
+ Ruby Neural Network implementation using backpropagation and gradient descent for training
3
+
4
+
5
+ ``` Ruby
6
+ network = Noggin::Network.new
7
+
8
+ network.train([
9
+ { input: [0, 0], output: 0 },
10
+ { input: [0, 1], output: 1 },
11
+ { input: [1, 0], output: 1 },
12
+ { input: [1, 1], output: 0 }
13
+ ])
14
+
15
+ network.run [0, 0] # 0.0163
16
+ network.run [0, 1] # 0.9573
17
+ network.run [1, 0] # 0.9702
18
+ network.run [1, 1] # 0.0142
19
+
20
+ ```
21
+
22
+ ## Options
23
+ ``` Ruby
24
+ Noggin::Network.new(
25
+ max_training_laps: 100000, # How many propgation of errors to do when training
26
+ learning_rate: 0.1, # How fast the network learns
27
+ hidden_layer_size: 1 , # Number of hidden layers
28
+ hidden_layer_node_size: 2 # Number of nodes each hidden layer has
29
+ )
30
+ ```
31
+
32
+ ## Print Network
33
+
34
+ ``` Ruby
35
+ network.pretty_print
36
+ ```
37
+ ```
38
+ ------ ------ --------------
39
+ | | -EDGE--(w: 0.438443, d: 0.01759) | | -EDGE--(w: 0.515923, d: 0.09704) | ed: 0.668486
40
+ | | -EDGE--(w: 0.746539, d: 0.013825) ------ | d: 0.148145
41
+ ------ ------ | e: 0.223437
42
+ ------ | | -EDGE--(w: 0.485781, d: 0.11099) | o: 0.668486
43
+ | | -EDGE--(w: 0.199745, d: 0.01759) ------ --------------
44
+ | | -EDGE--(w: 0.345684, d: 0.013825)
45
+ ------
46
+
47
+ ```
data/Rakefile ADDED
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
@@ -0,0 +1,104 @@
1
+ module Noggin
2
+ class Network
3
+
4
+ attr_reader :input_nodes
5
+ attr_reader :output_node
6
+ attr_reader :layers
7
+ attr_reader :options
8
+
9
+ DEFAULTS = {
10
+ learning_rate: 0.2,
11
+ max_training_laps: 100000,
12
+ hidden_layer_size: 1,
13
+ hidden_layer_node_size: 2
14
+ }
15
+
16
+ def initialize **opts
17
+ @options = DEFAULTS.merge opts
18
+ @ready = false
19
+ @layers = []
20
+ end
21
+
22
+ def run input
23
+ update_input_nodes input
24
+ output_node.output
25
+ end
26
+
27
+ def train data_batch
28
+ init_network(data_batch)unless @ready
29
+ options[:max_training_laps].times do
30
+ data_batch.each do |batch|
31
+ propagate_error! batch[:input], batch[:output]
32
+ end
33
+ end
34
+ return self
35
+ end
36
+
37
+ def propagate_error! input, expected
38
+ update_input_nodes input
39
+ output_node.expected = expected
40
+ input_nodes.each { |node| node.derivative_chain }
41
+ update_weights!
42
+ end
43
+
44
+ def update_weights!
45
+ edges.each do |edge|
46
+ edge.weight -= options[:learning_rate] * edge.derivative
47
+ end
48
+ end
49
+
50
+ def init_network data_batch
51
+ @input_nodes = Array.new(data_batch.first[:input].size){ Noggin::Node::Input.new }
52
+ @layers << @input_nodes
53
+ last_layer = @input_nodes
54
+ options[:hidden_layer_size].times do |i|
55
+ new_layer = Array.new(options[:hidden_layer_node_size]){ Noggin::Node::Base.new }
56
+ @layers << new_layer
57
+ connect_layer last_layer, new_layer
58
+ last_layer = new_layer
59
+ end
60
+ @output_node = Noggin::Node::Output.new
61
+ @layers << [@output_node]
62
+ last_layer.each { |node| connect_nodes(node, output_node) }
63
+ @ready = true
64
+ end
65
+
66
+ def edges
67
+ edges = []
68
+ queue = [output_node]
69
+ while queue.size != 0 do
70
+ node = queue.pop
71
+ node.origins.each do |edge|
72
+ edges << edge
73
+ queue << edge.origin
74
+ end
75
+ end
76
+ edges
77
+ end
78
+
79
+ def connect_nodes origin, dest
80
+ edge = Noggin::Node::Edge.new origin: origin, dest: dest
81
+ origin.dests << edge
82
+ dest.origins << edge
83
+ end
84
+
85
+ def connect_layer origins, dests
86
+ origins.each do |origin|
87
+ dests.each do |dest|
88
+ connect_nodes origin, dest
89
+ end
90
+ end
91
+ end
92
+
93
+ def update_input_nodes input
94
+ input_nodes.each_with_index do | node, i |
95
+ node.output = input[i]
96
+ end
97
+ end
98
+
99
+ def pretty_print
100
+ Noggin::PrettyPrinter.print_network layers
101
+ end
102
+
103
+ end
104
+ end
@@ -0,0 +1,42 @@
1
+ module Noggin
2
+ module Node
3
+ class Base
4
+
5
+ attr_reader :origins
6
+ attr_reader :dests
7
+ attr_accessor :derivative
8
+
9
+ def initialize
10
+ @origins = []
11
+ @dests = []
12
+ end
13
+
14
+ def input
15
+ origins.inject(0) { |sum, edge | sum += edge.value }
16
+ end
17
+
18
+ def output
19
+ 1 / ( 1 + Math.exp(-1 * input) )
20
+ end
21
+
22
+ def output_derivative
23
+ output * (1 - output)
24
+ end
25
+
26
+ def derivative_chain
27
+ derivative = output_derivative * dests.inject(0) { |sum, edge| sum += edge.derivative_chain }
28
+ end
29
+
30
+ def pretty_print
31
+ out = []
32
+ out << " ------"
33
+ dests.each do |edge|
34
+ out << "| | -EDGE--(#{edge.pretty_print}) "
35
+ end
36
+ out << " ------"
37
+ out
38
+ end
39
+
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,35 @@
1
+ module Noggin
2
+ module Node
3
+ class Edge
4
+
5
+ attr_accessor :origin
6
+ attr_accessor :dest
7
+ attr_accessor :weight
8
+ attr_accessor :derivative
9
+
10
+ def initialize origin: origin, dest: dest, weight: rand(0.20...0.80)
11
+ @origin = origin
12
+ @dest = dest
13
+ @weight = weight
14
+ end
15
+
16
+ def input
17
+ origin.output
18
+ end
19
+
20
+ def value
21
+ origin.output * weight
22
+ end
23
+
24
+ def derivative_chain
25
+ @derivative = input * dest.derivative_chain
26
+ @weight * dest.derivative_chain
27
+ end
28
+
29
+ def pretty_print
30
+ "w: #{@weight.round(6)}, d: #{@derivative.round(6)}"
31
+ end
32
+
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,7 @@
1
+ module Noggin
2
+ module Node
3
+ class Input < Noggin::Node::Base
4
+ attr_accessor :output
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,31 @@
1
+ module Noggin
2
+ module Node
3
+ class Output < Noggin::Node::Base
4
+
5
+ attr_accessor :expected
6
+
7
+ def error
8
+ 0.5 * (expected - output)**2
9
+ end
10
+
11
+ def error_derivative
12
+ output - expected
13
+ end
14
+
15
+ def derivative_chain
16
+ output_derivative * error_derivative
17
+ end
18
+
19
+ def pretty_print
20
+ out = []
21
+ out << " --------------"
22
+ out << "| ed: #{error_derivative.round(6)}"
23
+ out << "| d: #{derivative_chain.round(6)}"
24
+ out << "| e: #{error.round(6)}"
25
+ out << "| o: #{output.round(6)}"
26
+ out << " --------------"
27
+ out
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,26 @@
1
+ module Noggin
2
+ class PrettyPrinter
3
+ def self.print_network layers
4
+ grid = []
5
+ layers.each do |layer|
6
+ grid << col = []
7
+ layer.each do |node|
8
+ col << node.pretty_print
9
+ end
10
+ col.flatten!
11
+ end
12
+ grid[0].zip(*grid[1..-1]).each do |row|
13
+ row.each_with_index do |cell, col_i|
14
+ max_length = grid[col_i].max_by{|s| s.size }.size
15
+ if cell
16
+ room = max_length - cell.length
17
+ print cell
18
+ print " " * room
19
+ print " "
20
+ end
21
+ end
22
+ print "\n"
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,3 @@
1
+ module Noggin
2
+ VERSION = '0.0.1'
3
+ end
data/lib/noggin.rb ADDED
@@ -0,0 +1,6 @@
1
+ require 'noggin/node/base'
2
+ require 'noggin/node/input'
3
+ require 'noggin/node/output'
4
+ require 'noggin/node/edge'
5
+ require 'noggin/network'
6
+ require 'noggin/pretty_printer'
@@ -0,0 +1,37 @@
1
+ require_relative '../lib/noggin'
2
+
3
+ describe Noggin::Network do
4
+
5
+ subject { Noggin::Network.new( max_training_laps: 1, learning_rate: 0.1, hidden_layer_size: 1, hidden_layer_node_size: 1 ) }
6
+
7
+ let(:input_node) { subject.layers[0].first }
8
+ let(:hidden_node) { subject.layers[1].first }
9
+ let(:output_node) { subject.layers[2].first }
10
+
11
+ before do
12
+ subject.train [{ input: [1], output: 0 }]
13
+
14
+ end
15
+
16
+ it 'sets up the network graph according to settings' do
17
+ expect(input_node.dests.first.dest).to eq hidden_node
18
+ expect(hidden_node.dests.first.dest).to eq output_node
19
+ end
20
+
21
+ it 'sets hidden layer size' do
22
+ expect(subject.layers.size).to eq(3)
23
+ end
24
+
25
+ it 'sets hidden layer node size' do
26
+ expect(subject.layers[1].size).to eq(1)
27
+ end
28
+
29
+ it 'sets max max training laps' do
30
+
31
+ end
32
+
33
+ it 'backpropagates error' do
34
+
35
+ end
36
+
37
+ end
metadata ADDED
@@ -0,0 +1,104 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: the_noggin
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Shawn
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2015-02-21 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.3'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '1.3'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '10.4'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.4'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3.2'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3.2'
55
+ description: Ruby Neural Network implementation using backpropagation and gradient
56
+ descent for training
57
+ email:
58
+ - shaw3257@gmail.com
59
+ executables: []
60
+ extensions: []
61
+ extra_rdoc_files: []
62
+ files:
63
+ - ".gitignore"
64
+ - Gemfile
65
+ - LICENSE.txt
66
+ - README.md
67
+ - Rakefile
68
+ - lib/noggin.rb
69
+ - lib/noggin/network.rb
70
+ - lib/noggin/node/base.rb
71
+ - lib/noggin/node/edge.rb
72
+ - lib/noggin/node/input.rb
73
+ - lib/noggin/node/output.rb
74
+ - lib/noggin/pretty_printer.rb
75
+ - lib/noggin/version.rb
76
+ - spec/network_spec.rb
77
+ homepage: https://github.com/shaw3257/noggin
78
+ licenses:
79
+ - MIT
80
+ metadata: {}
81
+ post_install_message:
82
+ rdoc_options: []
83
+ require_paths:
84
+ - lib
85
+ required_ruby_version: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ required_rubygems_version: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - ">="
93
+ - !ruby/object:Gem::Version
94
+ version: '0'
95
+ requirements: []
96
+ rubyforge_project:
97
+ rubygems_version: 2.4.3
98
+ signing_key:
99
+ specification_version: 4
100
+ summary: Pass in training samples, and the network will try to find pathways that
101
+ lead to the least amount of error. The network is customizable in that it let’s
102
+ you control the learning rate, hidden layer size and depth, and max training iterations.
103
+ test_files:
104
+ - spec/network_spec.rb