newral 0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. checksums.yaml +7 -0
  2. data/Gemfile +4 -0
  3. data/LICENSE +21 -0
  4. data/README.md +278 -0
  5. data/Rakefile +10 -0
  6. data/lib/newral.rb +53 -0
  7. data/lib/newral/bayes.rb +39 -0
  8. data/lib/newral/classifier/dendogram.rb +68 -0
  9. data/lib/newral/classifier/k_means_cluster.rb +45 -0
  10. data/lib/newral/classifier/node.rb +58 -0
  11. data/lib/newral/classifier/node_distance.rb +19 -0
  12. data/lib/newral/data/base.rb +153 -0
  13. data/lib/newral/data/cluster.rb +37 -0
  14. data/lib/newral/data/cluster_set.rb +38 -0
  15. data/lib/newral/data/csv.rb +23 -0
  16. data/lib/newral/data/idx.rb +48 -0
  17. data/lib/newral/error_calculation.rb +28 -0
  18. data/lib/newral/functions/base.rb +102 -0
  19. data/lib/newral/functions/block.rb +34 -0
  20. data/lib/newral/functions/gaussian.rb +41 -0
  21. data/lib/newral/functions/line.rb +52 -0
  22. data/lib/newral/functions/polynomial.rb +48 -0
  23. data/lib/newral/functions/radial_basis_function_network.rb +54 -0
  24. data/lib/newral/functions/ricker_wavelet.rb +13 -0
  25. data/lib/newral/functions/vector.rb +59 -0
  26. data/lib/newral/genetic/tree.rb +70 -0
  27. data/lib/newral/graphs/a_star.rb +12 -0
  28. data/lib/newral/graphs/cheapest_first.rb +11 -0
  29. data/lib/newral/graphs/edge.rb +24 -0
  30. data/lib/newral/graphs/graph.rb +63 -0
  31. data/lib/newral/graphs/node.rb +11 -0
  32. data/lib/newral/graphs/path.rb +50 -0
  33. data/lib/newral/graphs/tree_search.rb +60 -0
  34. data/lib/newral/networks/backpropagation_network.rb +68 -0
  35. data/lib/newral/networks/layer.rb +28 -0
  36. data/lib/newral/networks/network.rb +146 -0
  37. data/lib/newral/networks/perceptron.rb +84 -0
  38. data/lib/newral/networks/sigmoid.rb +55 -0
  39. data/lib/newral/probability.rb +42 -0
  40. data/lib/newral/probability_set.rb +108 -0
  41. data/lib/newral/q_learning/base.rb +90 -0
  42. data/lib/newral/tools.rb +135 -0
  43. data/lib/newral/training/gradient_descent.rb +36 -0
  44. data/lib/newral/training/greedy.rb +36 -0
  45. data/lib/newral/training/hill_climbing.rb +77 -0
  46. data/lib/newral/training/linear_regression.rb +30 -0
  47. data/lib/newral/training/linear_regression_matrix.rb +32 -0
  48. metadata +147 -0
@@ -0,0 +1,12 @@
1
+ module Newral
2
+ module Graphs
3
+ class AStar < TreeSearch
4
+
5
+
6
+ def measure( path )
7
+ path.cost + Newral::Tools.euclidian_distance( @end_node.location, path.end_node.location )
8
+ end
9
+
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,11 @@
1
+ module Newral
2
+ module Graphs
3
+ class CheapestFirst < TreeSearch
4
+
5
+ def measure( path )
6
+ path.cost
7
+ end
8
+
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,24 @@
1
+ module Newral
2
+ module Graphs
3
+ class Edge
4
+ attr_accessor :start_node, :end_node, :directed, :cost, :data
5
+ def initialize( key:nil, start_node: nil, end_node: nil, directed: false, cost: nil, data:nil )
6
+ @key = key
7
+ @start_node = start_node
8
+ @end_node = end_node
9
+ @directed = directed
10
+ @cost = cost
11
+ @data = data
12
+
13
+ end
14
+
15
+ def key
16
+ @key || "#{ @start_node }#{ directed ? '=>' : '<=>' }#{ @end_node }"
17
+ end
18
+
19
+ def to_s
20
+ key
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,63 @@
1
+ module Newral
2
+ module Graphs
3
+ module Errors
4
+ class UnknownNode < StandardError; end
5
+ end
6
+ class Graph
7
+ attr_reader :nodes, :edges
8
+ def initialize( nodes: [], edges: [] )
9
+ @nodes = nodes
10
+ @edges = edges
11
+ end
12
+
13
+ def add_edge( edge )
14
+ unless @nodes.member?( edge.start_node ) && @nodes.member?( edge.end_node )
15
+ # let´s try to find it
16
+ @nodes.each do |node|
17
+ edge.start_node = node if node.respond_to?( :name ) && node.name == edge.start_node
18
+ edge.end_node = node if node.respond_to?( :name ) && node.name == edge.end_node
19
+ end
20
+ raise Errors::UnkownNode unless @nodes.member?( edge.start_node ) && @nodes.member?( edge.end_node )
21
+ end
22
+ @edges << edge
23
+ self
24
+ end
25
+
26
+ def add_node( node )
27
+ @nodes < node
28
+ self
29
+ end
30
+
31
+ def add_nodes( nodes )
32
+ @nodes = @nodes+nodes
33
+ self
34
+ end
35
+
36
+ def find_node_by_name( name )
37
+ @nodes.find{ |node| node.name == name }
38
+ end
39
+
40
+ # we can add also like this {1=> 2, 2 => 5 }
41
+ def add_edges( edges, directed: false )
42
+ if edges.kind_of?( Hash )
43
+ edges.each do |from,to|
44
+ @edges << Edge.new( start_node: from, end_node: to, directed: directed )
45
+ end
46
+ else
47
+ edges.each do |edge|
48
+ add_edge edge
49
+ end
50
+ end
51
+ self
52
+ end
53
+
54
+ def find_edges( node )
55
+ @edges.collect do |edge|
56
+ keep = edge.directed ? edge.start_node == node : edge.start_node == node || edge.end_node == node
57
+ edge if keep
58
+ end.compact
59
+ end
60
+
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,11 @@
1
+ module Newral
2
+ module Graphs
3
+ class Node
4
+ attr_reader :name, :location
5
+ def initialize( name: nil, location: nil )
6
+ @name = name
7
+ @location = location
8
+ end
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,50 @@
1
+ module Newral
2
+ module Graphs
3
+ module Errors
4
+ class CanOnlyConnectToLastEdge < ::StandardError; end
5
+ class CircularPath < ::StandardError; end
6
+ end
7
+
8
+ class Path
9
+ attr_reader :edges
10
+ def initialize( edges:[], allow_circular_paths: true )
11
+ @edges = edges.dup
12
+ @allow_circular_paths = allow_circular_paths
13
+ end
14
+
15
+ def add_edge( edge )
16
+ last_edge = @edges.last
17
+ raise Errors::CanOnlyConnectToLastEdge,[last_edge,edge] unless @edges.empty? || last_edge.end_node == edge.start_node
18
+ raise Errors::CircularPath unless @allow_circular_paths && !@edges.index{|edge1| edge1.start_node == edge.end_node || edge1.end_node == edge.end_node }
19
+ @edges << edge
20
+ self
21
+ end
22
+
23
+ def length
24
+ @edges.length
25
+ end
26
+
27
+ def cost
28
+ @edges.inject(0){ |value,edge| value+edge.cost }
29
+ end
30
+
31
+
32
+ def start_node
33
+ @edges.first.start_node
34
+ end
35
+
36
+
37
+ def end_node
38
+ @edges.last.end_node
39
+ end
40
+
41
+ def to_s
42
+ @edges.join(', ')
43
+ end
44
+
45
+
46
+
47
+
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,60 @@
1
+ module Newral
2
+ module Graphs
3
+ module Errors
4
+ class FrontierEmpty < StandardError; end
5
+ end
6
+ # the algorithms are heavily inspired by the Udacity course from
7
+ # Sebastian Thrun https://classroom.udacity.com/courses/cs271
8
+ class TreeSearch
9
+ attr_reader :frontier
10
+ def initialize( graph: nil, start_node: nil, end_node: nil )
11
+ @graph = graph
12
+ @start_node = start_node
13
+ @end_node = end_node
14
+ path = Path.new(edges:[ Edge.new( start_node: start_node, end_node: start_node, directed: true, cost:0 )])
15
+ @explored = {end_node: 0 }
16
+ @frontier = [ path ]
17
+ end
18
+
19
+ def run
20
+
21
+ while @frontier.length > 0
22
+ path = remove_choice
23
+ return path if path.end_node == @end_node
24
+ edges = @graph.find_edges( path.end_node)
25
+ puts "no edges found for #{path.end_node.name} #{@graph.edges.length}" unless edges.length > 0
26
+ edges.each do |edge|
27
+ begin
28
+ end_node = edge.start_node == path.end_node ? edge.end_node : edge.start_node
29
+ new_edge = Edge.new( start_node: path.end_node, end_node: end_node, directed: true, cost: edge.cost )
30
+ puts( "n:#{ new_edge.to_s } e:#{edge} n:#{end_node} s:#{path.end_node} #{edge.start_node == new_edge.end_node } #{edge.end_node}")
31
+ new_path = Path.new(edges:path.edges).add_edge( new_edge )
32
+ if @explored[new_path.end_node].nil? || measure( path ) < @explored[new_path.end_node]
33
+ @frontier << new_path
34
+ @explored[ new_path.end_node ] = measure( new_path )
35
+ end
36
+ rescue Errors::CircularPath
37
+ puts "circular #{ new_path.to_s }"
38
+ # no need to check this path
39
+ end
40
+ end
41
+ end
42
+ raise Errors::FrontierEmpty
43
+ end
44
+
45
+ def remove_choice
46
+ @frontier.sort! do |path1,path2|
47
+ measure( path2 ) <=> measure( path1 ) # reverse
48
+ end
49
+ puts "frontier: #{@frontier.length}"
50
+ @frontier.pop # pops shortest
51
+ end
52
+
53
+ # the standard approach is breath first
54
+ def measure( path )
55
+ path.length
56
+ end
57
+
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,68 @@
1
+ module Newral
2
+ module Networks
3
+ module Errors
4
+ class Errors::OnlyPossibleForHidden < StandardError ; end
5
+ end
6
+
7
+ class BackpropagationNetwork < Network
8
+ def initialize( number_of_inputs:2, number_of_hidden:2, number_of_outputs:2 )
9
+ super()
10
+ add_layer "hidden" do
11
+ number_of_hidden.times do |idx|
12
+ add_neuron "hidden_#{idx}", weight_length:number_of_inputs
13
+ end
14
+ end
15
+
16
+
17
+ add_layer "output" do
18
+ number_of_outputs.times do |idx|
19
+ add_neuron "output_#{idx}", weight_length:number_of_hidden
20
+ end
21
+ end
22
+
23
+ # in this network all hidden neurons link to all output neurons
24
+ @layers["hidden"].neurons.each do |hidden_neuron|
25
+ @layers["output"].neurons.each do |output_neuron|
26
+ output_neuron.add_input hidden_neuron
27
+ end
28
+ end
29
+ end
30
+
31
+
32
+
33
+ # gets an array of inputs and the corresponding expected outputs
34
+ # first we update our output layer then our hidden layer
35
+ def train( input: [], output: [] )
36
+ before_error = calculate_error( input: input,output: output )
37
+ input.each_with_index do |input,idx|
38
+ calculated_output = update_with_vector( input )
39
+ @layers["output"].neurons.each_with_index do |neuron,neuron_idx|
40
+ neuron.adjust_weights( expected: output[ idx ][ neuron_idx ])
41
+ end
42
+
43
+ @layers["hidden"].neurons.each do |neuron|
44
+ neuron.adjust_weights( expected: output[ idx ], layer: :hidden, output: calculated_output, weights_at_output_nodes: output_weights( neuron ))
45
+ end
46
+ end
47
+ new_error = calculate_error( input: input,output: output )
48
+ before_error-new_error
49
+ end
50
+
51
+ # gets the weights of the output neurons this input feeds to
52
+ # this of course can be done much simpler (as its always the nth weight of the output neuron)
53
+ # however we want to stay explicit
54
+ def output_weights( neuron )
55
+ raise Errors::OnlyPossibleForHidden unless @layers["hidden"].neurons.member?( neuron )
56
+ weights = []
57
+ @layers["output"].neurons.each do |output_neuron|
58
+ output_neuron.inputs.each_with_index do |input,idx|
59
+ weights << output_neuron.weights[ idx ] if input == neuron
60
+ end
61
+ end
62
+ weights
63
+ end
64
+
65
+ end
66
+
67
+ end
68
+ end
@@ -0,0 +1,28 @@
1
+ module Newral
2
+ module Networks
3
+ class Layer
4
+ attr_reader :neurons, :identifier
5
+
6
+ def initialize( identifier: nil )
7
+ @identifier = identifier
8
+ @neurons = []
9
+ end
10
+
11
+ def add_neuron( neuron )
12
+ @neurons << neuron
13
+ end
14
+
15
+ def weights
16
+ @neurons.collect(&:weights).flatten
17
+ end
18
+
19
+ def biases
20
+ neurons.collect(&:bias)
21
+ end
22
+
23
+ def outputs
24
+ neurons.collect(&:output)
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,146 @@
1
+ module Newral
2
+ module Networks
3
+ module Errors
4
+ class InvalidType < StandardError; end
5
+ class IdentifierExists < StandardError; end
6
+ class NotImplemented < StandardError; end
7
+ end
8
+
9
+ class Network
10
+ attr_reader :output, :neurons, :layers
11
+
12
+ def initialize
13
+ @layers = {}
14
+ @neurons = {}
15
+ @layer_identifier = "input"
16
+ end
17
+
18
+ def self.define( &block )
19
+ layout = self.new
20
+ layout.instance_eval( &block )
21
+ layout
22
+ end
23
+
24
+ def add_layer( identifier, &block )
25
+ @layer_identifier = identifier
26
+ @layers[ identifier ] = Layer.new( identifier: identifier )
27
+ self.instance_eval &block if block_given?
28
+ end
29
+
30
+ def add_neuron( identifier, neuron: nil, weights: nil, bias: nil, weight_length: nil, type: 'sigmoid' )
31
+ raise Errors::IdentifierExists if @neurons[ identifier ] && ( neuron || weights || bias )
32
+ unless neuron
33
+ neuron = case type.to_s
34
+ when 'perceptron' then Perceptron.new( weights: weights, bias: bias, weight_length: weight_length )
35
+ when 'sigmoid' then Sigmoid.new( weights: weights, bias: bias , weight_length: weight_length )
36
+ else
37
+ raise Errors::InvalidType
38
+ end
39
+ end
40
+
41
+ @neurons[ identifier ] = neuron
42
+ @layers[ @layer_identifier ].add_neuron( neuron )
43
+ end
44
+
45
+ # specify the identifiers of the two neurons to connect
46
+ def connect( from: nil, to: nil )
47
+ input_neuron = @neurons[ to ]
48
+ output_neuron = @neurons[ from ]
49
+ input_neuron.add_input( output_neuron )
50
+ end
51
+
52
+ def update_first_layer_with_vector( input )
53
+ layer = @layers.first
54
+ @output = layer[1].neurons.collect do |n|
55
+ n.update_with_vector input
56
+ end
57
+ @output
58
+ end
59
+
60
+ def update_neuron( identifier, input )
61
+ @neurons[ identifier ].update_with_vector( input )
62
+ end
63
+
64
+ def update_layers( start:0 )
65
+ @layers.to_a[start..@layers.size].each do |layer |
66
+ @output = layer[1].neurons.collect do |n|
67
+ n.output
68
+ end
69
+ end
70
+ end
71
+
72
+ def update_with_vector( input )
73
+ update_first_layer_with_vector( input )
74
+ update_layers( start: 1)
75
+ @output
76
+ end
77
+
78
+ # use this for simple networks were neurons are set by hand
79
+ def update( &block )
80
+ self.instance_eval( &block ) if block_given?
81
+ update_layers
82
+ @output
83
+ end
84
+
85
+ def output_of_neuron( identifier )
86
+ @neurons[ identifier ].output
87
+ end
88
+
89
+ def train( inputs: [], output: [] )
90
+ raise Errors::NotImplemented, "Use Subclass Backpropagation Training"
91
+ end
92
+
93
+ def set_weights_and_bias( layer: 'hidden', weights: [], bias: [])
94
+ @layers[layer].neurons.each_with_index do |neuron,idx|
95
+ neuron.set_weights_and_bias( weights: weights[ idx ], bias: bias[idx])
96
+ end
97
+ end
98
+
99
+ # by implementing these functions we can use a network for all
100
+ # training algorithms (although this is really just a proove of concept as using Greedy for Neural Networks does not lead to great results)
101
+
102
+ def calculate( input )
103
+ update_with_vector( input )
104
+ end
105
+
106
+ def calculate_error( input: [],output: [] )
107
+ expected_values = [] # output can be longer than input
108
+ calculated_values = []
109
+ input.each_with_index do |x,idx|
110
+ calculated_values << calculate( x )
111
+ expected_values << output[idx]
112
+ end
113
+ Newral::ErrorCalculation.mean_square( calculated_values, expected_values )/2
114
+ end
115
+
116
+ def number_of_directions
117
+ @neurons.sum{ |n| n[1].number_of_directions }
118
+ end
119
+
120
+ def move( direction: 0, step:0.01, step_percentage: nil )
121
+ raise Errors::InvalidDirection if direction >= number_of_directions
122
+ new_network = Marshal.load(Marshal.dump(self))
123
+ idx = 0
124
+ new_network.neurons.each do |key,neuron|
125
+ if idx+neuron.number_of_directions-1 >= direction #
126
+ meuron = neuron.dup.move( direction: direction-idx, step: step, step_percentage: step_percentage)
127
+ return new_network
128
+ end
129
+ idx = idx+neuron.number_of_directions
130
+ end
131
+ new_network
132
+ end
133
+
134
+ def move_random( low_range: -0.9, high_range: 0.9 )
135
+ number_of_directions.times do |direction|
136
+ step = low_range+rand()*(high_range.to_f-low_range.to_f)
137
+ move( direction: direction, step: step )
138
+ end
139
+ self
140
+ end
141
+
142
+
143
+
144
+ end
145
+ end
146
+ end