newral 0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +7 -0
  2. data/Gemfile +4 -0
  3. data/LICENSE +21 -0
  4. data/README.md +278 -0
  5. data/Rakefile +10 -0
  6. data/lib/newral.rb +53 -0
  7. data/lib/newral/bayes.rb +39 -0
  8. data/lib/newral/classifier/dendogram.rb +68 -0
  9. data/lib/newral/classifier/k_means_cluster.rb +45 -0
  10. data/lib/newral/classifier/node.rb +58 -0
  11. data/lib/newral/classifier/node_distance.rb +19 -0
  12. data/lib/newral/data/base.rb +153 -0
  13. data/lib/newral/data/cluster.rb +37 -0
  14. data/lib/newral/data/cluster_set.rb +38 -0
  15. data/lib/newral/data/csv.rb +23 -0
  16. data/lib/newral/data/idx.rb +48 -0
  17. data/lib/newral/error_calculation.rb +28 -0
  18. data/lib/newral/functions/base.rb +102 -0
  19. data/lib/newral/functions/block.rb +34 -0
  20. data/lib/newral/functions/gaussian.rb +41 -0
  21. data/lib/newral/functions/line.rb +52 -0
  22. data/lib/newral/functions/polynomial.rb +48 -0
  23. data/lib/newral/functions/radial_basis_function_network.rb +54 -0
  24. data/lib/newral/functions/ricker_wavelet.rb +13 -0
  25. data/lib/newral/functions/vector.rb +59 -0
  26. data/lib/newral/genetic/tree.rb +70 -0
  27. data/lib/newral/graphs/a_star.rb +12 -0
  28. data/lib/newral/graphs/cheapest_first.rb +11 -0
  29. data/lib/newral/graphs/edge.rb +24 -0
  30. data/lib/newral/graphs/graph.rb +63 -0
  31. data/lib/newral/graphs/node.rb +11 -0
  32. data/lib/newral/graphs/path.rb +50 -0
  33. data/lib/newral/graphs/tree_search.rb +60 -0
  34. data/lib/newral/networks/backpropagation_network.rb +68 -0
  35. data/lib/newral/networks/layer.rb +28 -0
  36. data/lib/newral/networks/network.rb +146 -0
  37. data/lib/newral/networks/perceptron.rb +84 -0
  38. data/lib/newral/networks/sigmoid.rb +55 -0
  39. data/lib/newral/probability.rb +42 -0
  40. data/lib/newral/probability_set.rb +108 -0
  41. data/lib/newral/q_learning/base.rb +90 -0
  42. data/lib/newral/tools.rb +135 -0
  43. data/lib/newral/training/gradient_descent.rb +36 -0
  44. data/lib/newral/training/greedy.rb +36 -0
  45. data/lib/newral/training/hill_climbing.rb +77 -0
  46. data/lib/newral/training/linear_regression.rb +30 -0
  47. data/lib/newral/training/linear_regression_matrix.rb +32 -0
  48. metadata +147 -0
@@ -0,0 +1,102 @@
1
+ module Newral
2
+ module Functions
3
+ module Errors
4
+ class InvalidDirection < ::StandardError; end
5
+ class NotImplemented < ::StandardError; end
6
+ end
7
+
8
+ class Base
9
+ attr_accessor :center
10
+
11
+ def calculate
12
+ raise NotImplemented
13
+ end
14
+
15
+ # approximates the descent calculation at a certain point
16
+ def calculate_descent( input, difference: nil )
17
+ difference = (input/10000.0).abs unless difference
18
+ (calculate( input+difference )- calculate( input ))/difference
19
+ end
20
+
21
+ # finds the (local) minimum descent of a function
22
+ def find_minimum( start_input, max_iterations: 1000, treshold: 10**-9 , learning_rate: 0.01 )
23
+ descent = calculate_descent( start_input )
24
+ iterations = 0
25
+ input = start_input
26
+ while descent.abs > treshold && iterations < max_iterations
27
+ old_input = input
28
+ input = input-descent.to_f*learning_rate
29
+ new_descent = calculate_descent( input )
30
+ learning_rate = learning_rate.to_f/10 if new_descent*descent < 0 # slow down if descent changes
31
+ descent = new_descent
32
+ iterations = iterations+1
33
+ end
34
+ { input: input, descent: descent, learning_rate: learning_rate, output: calculate( input ), iterations: iterations }
35
+ end
36
+
37
+ def calculate_for_center_distance( vector1 )
38
+ calculate Newral::Tools.euclidian_distance( vector1, @center )
39
+ end
40
+
41
+ # if a function implements calculate,move+number_of_directions we can use it for
42
+ # all training algorithms (like hill climbing)
43
+ # as shown in the Networks / network you do not need to derive from base function
44
+
45
+ def self.create_random( low_range: -9, high_range: 9 )
46
+ raise Errors::NotImplemented
47
+ end
48
+
49
+ def number_of_directions
50
+ raise Errors::NotImplemented
51
+ end
52
+
53
+
54
+ def move( direction: 0, step:0.01, step_percentage: nil )
55
+ raise Errors::NotImplemented
56
+ end
57
+
58
+ def move_several( directions:[], step:0.01, step_percentage: nil )
59
+ directions.each do |direction|
60
+ move( direction: direction, step: step, step_percentage: step_percentage)
61
+ end
62
+ self
63
+ end
64
+
65
+ # moves all directions randomly
66
+ def move_random( low_range: -0.9, high_range: 0.9 )
67
+ number_of_directions.times do |direction|
68
+ step = low_range+rand()*(high_range.to_f-low_range.to_f)
69
+ move( direction: direction, step: step )
70
+ end
71
+ self
72
+ end
73
+
74
+ def calculate_error( input: [],output: [] )
75
+ expected_values = [] # output can be longer than input
76
+ calculated_values = []
77
+ input.each_with_index do |x,idx|
78
+ calculated_values << calculate( x )
79
+ expected_values << output[idx]
80
+ end
81
+ Newral::ErrorCalculation.root_mean_square( calculated_values, expected_values )
82
+ end
83
+
84
+ def error_gradient_approximation( direction: nil, step: 0.01, input: nil, output: nil )
85
+ current_error = calculate_error( input: input, output: output)
86
+ new_pos = self.dup.move( direction: direction, step: step )
87
+ new_error = new_pos.calculate_error( input: input, output: output)
88
+ (new_error-current_error)/step
89
+ end
90
+
91
+ # for general functions we can only estimate the gradient of the error
92
+ # by taking small steps
93
+ def move_with_gradient( input:[], output:[], learning_rate: 0.01, step: 0.01 )
94
+ number_of_directions.times do |direction|
95
+ error_gradient = error_gradient_approximation( direction: direction, step: step, input: input, output: output )
96
+ move( direction: direction, step:(-error_gradient*learning_rate))
97
+ end
98
+ self
99
+ end
100
+ end
101
+ end
102
+ end
@@ -0,0 +1,34 @@
1
+ module Newral
2
+ module Functions
3
+ module Errors
4
+ class NoBlock < ::StandardError; end
5
+ end
6
+ attr_reader :calculate_block
7
+ # as its ruby we of course have to also offer the possibility for blocks
8
+ class Block < Base
9
+
10
+ def initialize( directions:0, params:[], &block )
11
+ raise Errors::NoBlock unless block_given?
12
+ @calculate_block = block
13
+ @params = params
14
+ @directions = directions || params.size
15
+ end
16
+
17
+ def calculate( input )
18
+ @calculate_block.call input, @params
19
+ end
20
+
21
+ def move( direction: 0, step:0.01, step_percentage: nil )
22
+ raise Errors::InvalidDirection unless direction >0 && direction<@directions
23
+ @params[direction]=(step_percentage ? @params[direction]*(1+step_percentage.to_f/100) : @params[direction]+step )
24
+ self
25
+ end
26
+
27
+ def number_of_directions
28
+ @directions
29
+ end
30
+
31
+
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,41 @@
1
+ module Newral
2
+ module Functions
3
+ class Gaussian < Base
4
+ attr_reader :factors
5
+ def initialize( center:[0], factor:1 )
6
+ @factor = factor
7
+ @center = center.dup
8
+ end
9
+
10
+ def calculate( input )
11
+ calculate_for_center_distance( [input])
12
+ end
13
+
14
+ def calculate_for_center_distance( vector1 )
15
+ distance = Newral::Tools.euclidian_distance( vector1, @center )
16
+ Math.exp(-distance**2)*@factor
17
+ end
18
+
19
+ def self.create_random( low_range: -9, high_range: 9 )
20
+ self.new center:[low_range+rand(high_range-low_range)],factor: low_range+rand(high_range-low_range)
21
+ end
22
+
23
+ def number_of_directions
24
+ 1+@center.size
25
+ end
26
+
27
+ def move( direction: 0, step:0.01, step_percentage: nil )
28
+ raise Errors::InvalidDirection if direction >= number_of_directions
29
+ if direction == 0
30
+ @factor = ( step_percentage ? @factor*(1+step_percentage/100) : @factor+step_percentage )
31
+ else
32
+ @center = @center.dup
33
+ @center[direction-1] = step_percentage ? @center[direction-1]*(1+step_percentage/100) : @center[direction-1]+step
34
+ end
35
+ self
36
+ end
37
+
38
+
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,52 @@
1
+ module Newral
2
+ module Functions
3
+
4
+
5
+ class Line < Base
6
+ attr_accessor :center
7
+
8
+ def initialize( factor: 1, bias: 0, center: nil )
9
+ @factor = factor
10
+ @bias = bias
11
+ @center = center.dup if center
12
+ end
13
+
14
+ def calculate( input )
15
+ @factor*input+@bias
16
+ end
17
+
18
+ def self.create_random( low_range: -9, high_range: 9 )
19
+ factor= low_range+rand(high_range-low_range)
20
+ bias = low_range+rand(high_range-low_range)
21
+ self.new( factor: factor, bias: bias )
22
+ end
23
+
24
+ def number_of_directions
25
+ 2
26
+ end
27
+
28
+ def move( direction: 0, step:0.01, step_percentage: nil )
29
+ case direction
30
+ when 0 then @bias=(step_percentage ? @bias*(1+step_percentage.to_f/100) : @bias+step )
31
+ when 1 then @factor=(step_percentage ? @factor*(1+step_percentage.to_f/100) : @factor+step )
32
+ else
33
+ raise Errors::InvalidDirection
34
+ end
35
+ self
36
+ end
37
+
38
+ def move_with_gradient( input:[], output:[], learning_rate: 0.01 )
39
+ bias_gradient = 0
40
+ factor_gradient = 0
41
+ input.each_with_index do |x,idx|
42
+ bias_gradient = bias_gradient-2.0/input.size*( output[idx]-( @factor*x + @bias ))
43
+ factor_gradient = factor_gradient-2.0/input.size*x*( output[idx]-(@factor*x+@bias) )
44
+ # b_gradient += -(2/N) * (points[i].y - ((m_current*points[i].x) + b_current))
45
+ # m_gradient += -(2/N) * points[i].x * (points[i].y - ((m_current * points[i].x) + b_current))
46
+ end
47
+ @bias = @bias - (learning_rate * bias_gradient)
48
+ @factor = @factor - (learning_rate * factor_gradient)
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,48 @@
1
+ module Newral
2
+ module Functions
3
+ class Polynomial < Base
4
+ attr_reader :factors
5
+ def initialize( factors:nil )
6
+ @factors = factors.dup || [1]
7
+ @length = @factors.size
8
+ end
9
+
10
+ def calculate( input )
11
+ result = 0
12
+ @factors.each_with_index do |factor, idx|
13
+ result = result+input**(@length-idx-1)*factor # this way its more readable
14
+ end
15
+ result
16
+ end
17
+
18
+ # caluates descent at input
19
+ def calculate_descent( input )
20
+ descent = 0
21
+ @factors.each_with_index do |factor, idx|
22
+ descent = descent+input**(@length-idx-2)*factor*(@length-idx-1) if @length-idx-2>= 0 # this way its more readable
23
+ end
24
+ descent
25
+ end
26
+
27
+ def self.create_random( length: 3, low_range: -9, high_range: 9 )
28
+ factors = []
29
+ length.times do
30
+ factors << low_range+rand(high_range-low_range)
31
+ end
32
+ self.new( factors: factors )
33
+ end
34
+
35
+ def number_of_directions
36
+ @factors.size
37
+ end
38
+
39
+ def move( direction: 0, step:0.01, step_percentage: nil )
40
+ raise Errors::InvalidDirection if direction >= number_of_directions
41
+ @factors = @factors.dup
42
+ @factors[direction] = step_percentage ? @factors[direction]*(1+step_percentage.to_f/100) : @factors[direction]+step
43
+ self
44
+ end
45
+
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,54 @@
1
+ module Newral
2
+ module Functions
3
+ class RadialBasisFunctionNetwork < Base
4
+ attr_reader :weights, :functions
5
+ def initialize( centers:[], weights: [], bias_weight: 1, klass: Newral::Functions::Gaussian )
6
+ @klass = klass
7
+ @weights = weights.dup
8
+ @centers = centers.dup
9
+ @functions = []
10
+ @weights.each_with_index do |weight,idx|
11
+ @functions << klass.new( center:centers[idx], factor: weight )
12
+ end
13
+ @functions << Line.new(factor:0,bias: bias_weight) if bias_weight != 0
14
+ end
15
+
16
+ def calculate( input )
17
+ result = 0
18
+ @functions.each do |function|
19
+ result = result + function.calculate( input )
20
+ end
21
+ result.to_f
22
+ end
23
+
24
+
25
+ def self.create_random( length: 3, low_range: -9, high_range: 9, klass: Newral::Functions::Gaussian )
26
+ weights = []
27
+ centers = []
28
+ length.times do
29
+ weights << low_range+rand(high_range-low_range)
30
+ centers << [low_range+rand(high_range-low_range)]
31
+ end
32
+ self.new( centers:centers, weights: weights, bias_weight: low_range+rand(high_range-low_range), klass: klass )
33
+ end
34
+
35
+ def number_of_directions
36
+ @weights.size+@centers.collect{ |c| c.size }.sum
37
+ end
38
+
39
+ def move( direction: 0, step:0.01, step_percentage: nil )
40
+ raise Errors::InvalidDirection if direction >= number_of_directions
41
+ if direction < @weights.size
42
+ @weights = @weights.dup
43
+ @weights[direction] = step_percentage ? @weights[direction]*(1+step_percentage.to_f/100) : @weights[direction]+step
44
+ else
45
+ mod = @centers.first.size
46
+ @centers = @centers.dup
47
+ @centers[(direction-@weights.size)/mod][(direction-@weights.size)%mod] = step_percentage ? @centers[(direction-@weights.size)/mod][(direction-@weights.size)%mod]*(1+step_percentage.to_f/100) : @centers[(direction-@weights.size)/mod][(direction-@weights.size)%mod]+step
48
+ end
49
+ self
50
+ end
51
+
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,13 @@
1
+ module Newral
2
+ module Functions
3
+ class RickerWavelet < Gaussian
4
+ # The mexican sombrero function
5
+
6
+ def calculate_for_center_distance( vector1 )
7
+ distance = Newral::Tools.euclidian_distance( vector1, @center )
8
+ (1-distance**2)*Math.exp(-distance.to_f**2/2)*@factor
9
+ end
10
+
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,59 @@
1
+ module Newral
2
+ module Functions
3
+
4
+
5
+ class Vector < Base
6
+ attr_accessor :weights, :bias
7
+ def initialize( vector: [1,1], bias: 0 )
8
+ @vector = vector
9
+ @bias = bias
10
+ end
11
+
12
+ def calculate( input )
13
+ @vector.zip( input ).map{|x, y| x * y}.sum + @bias
14
+ end
15
+
16
+ def self.create_random( length: 2, low_range: -9, high_range: 9 )
17
+ vector = []
18
+ length.times do
19
+ vector << low_range+rand(high_range-low_range)
20
+ end
21
+ self.new( vector: vector, bias: low_range+rand(high_range-low_range) )
22
+ end
23
+
24
+ def number_of_directions
25
+ @vector.size+1
26
+ end
27
+
28
+ def move( direction: 0, step:0.01, step_percentage: nil )
29
+ raise Errors::InvalidDirection if direction >= number_of_directions
30
+ if direction < @vector.size
31
+ @vector[direction] = step_percentage ? @vector[direction]*(1+step_percentage.to_f/100) : @vector[direction]+step
32
+ else
33
+ @bias = step_percentage ? @bias*(1+step_percentage.to_f/100) : @bias+step
34
+ end
35
+ self
36
+ end
37
+
38
+ # step argument is ignored here
39
+ def move_with_gradient( input:[], output:[], learning_rate: 0.01, step: nil )
40
+ bias_gradient = 0
41
+ vector_gradient = [0]*@vector.length
42
+ input.each_with_index do |input_vector,idx|
43
+ bias_gradient = bias_gradient-2.0/input.size*( output[idx]-( @vector.zip( input_vector ).map{|x, y| x * y}.sum + @bias ))
44
+ vector_gradient.each_with_index do |v,idx_2|
45
+ vector_gradient[idx_2]=v-2.0/input.size*input_vector[idx_2]*( output[idx]-( @vector.zip( input_vector ).map{|x, y| x * y}.sum+@bias) )
46
+ end
47
+ end
48
+ @bias = @bias - (learning_rate * bias_gradient)
49
+ new_vector = []
50
+ @vector.each_with_index do |value,idx|
51
+ new_vector << value - (learning_rate * vector_gradient[idx])
52
+ end
53
+ @vector = new_vector
54
+ self
55
+ end
56
+
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,70 @@
1
+ module Newral
2
+ module Genetic
3
+ class Tree
4
+ attr_reader :value, :left_child, :right_child, :sub_node_count
5
+ attr_accessor :parent
6
+ OPERANDS = {
7
+ '+' => 2,
8
+ '*' => 2,
9
+ '/' => 2,
10
+ '-' => 2,
11
+ 'pow' => 2,
12
+ 'sqrt' => 1
13
+ }
14
+ def initialize( parent: nil, value:nil, right_child: nil, left_child: nil )
15
+ @parent = parent
16
+ @left_child = left_child
17
+ @right_child = right_child
18
+ update_node_count
19
+ @value = value
20
+ end
21
+
22
+ def update_node_count
23
+ @sub_node_count = ( @left_child && 1 ).to_i+( @left_child && @left_child.sub_node_count ).to_i+( @right_child && 1 ).to_i+( @right_child && @right_child.sub_node_count ).to_i
24
+ @parent.update_node_count if @parent
25
+ @sub_node_count
26
+ end
27
+
28
+ def node_count
29
+ @sub_node_count+1
30
+ end
31
+
32
+ def set_child_trees( left_child: nil, right_child: nil, force: false )
33
+ @left_child = left_child if left_child || force
34
+ @right_child = right_child if right_child || force
35
+ @right_child.parent = self if @right_child
36
+ @left_child.parent = self if @left_child
37
+ update_node_count
38
+ self
39
+ end
40
+
41
+ def eval
42
+ case @value
43
+ when '+' then @left_child.eval+@right_child.eval
44
+ when '-' then @left_child.eval-@right_child.eval
45
+ when '*' then @left_child.eval*@right_child.eval
46
+ when '/' then @left_child.eval/@right_child.eval
47
+ when 'pow' then @left_child.eval**@right_child.eval
48
+ when 'sqrt' then @left_child.eval**0.5
49
+ else
50
+ @value
51
+ end
52
+ end
53
+
54
+ def self.full_tree( depth:3, allowed_operands: OPERANDS.keys, terminal_nodes:[] )
55
+ if depth > 1
56
+ value = allowed_operands[ rand(allowed_operands.size) ]
57
+ tree = Tree.new( value: value )
58
+ tree.set_child_trees( left_child: Tree.full_tree( depth: depth-1, allowed_operands: allowed_operands, terminal_nodes: terminal_nodes ))
59
+ if OPERANDS[value] == 2
60
+ tree.set_child_trees( right_child: Tree.full_tree( depth: depth-1, allowed_operands: allowed_operands, terminal_nodes: terminal_nodes ))
61
+ end
62
+ tree
63
+ else
64
+ Tree.new( value: terminal_nodes[rand(terminal_nodes.size)])
65
+ end
66
+ end
67
+
68
+ end
69
+ end
70
+ end