ruby_nn 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 7350371a3d431b9a7c1fee174ef6f7a5f8296b416a89b068e2c2ee87648db786
4
+ data.tar.gz: b1a0957ca5b3af89982d51191ca2a8739c358ad061f7cda928676e5ff3078da7
5
+ SHA512:
6
+ metadata.gz: 7e34c2a2cf69311061b014d456d5f4be48cd852f1146e3456d46c43ec9def7c592108d9367f264d3df68e99a43a10b39d9c5a9d208ea49fdc7260563186fa9d7
7
+ data.tar.gz: 50f86c5e3cc3b8a8fbf56102f257dcd8ba9cd1161e0afa264fdf2a5cd261cfa6dc60ce8e7f3b51d44045360cc865b0c49e99b26cb7f3e46f18b8dc55959873cd
@@ -0,0 +1,200 @@
1
+ require 'neural_network_error'
2
+ module RubyNN
3
+ class NeuralNetwork
4
+ attr_reader :layer_parameters, :alpha, :error
5
+
6
+ def initialize(layer_parameters, alpha = 0.001)
7
+ @alpha = alpha
8
+ @weight_matrix = []
9
+ @layer_parameters = layer_parameters
10
+ @error = 0
11
+ end
12
+
13
+ def initialize_weights
14
+ weights = []
15
+ weight_counts.reduce(0, :+).times { weights << rand }
16
+ layer_parameters[0..-2].each_with_index do |layer, i|
17
+ @weight_matrix[i] = find_weights(i, weights)
18
+ end
19
+ @weight_matrix
20
+ end
21
+
22
+ def offsets
23
+ if @offsets
24
+ @offsets
25
+ else
26
+ @offsets = [0]
27
+ weight_count_size = weight_counts.size
28
+
29
+ weight_counts.each_with_index do |weight_count, i|
30
+ if weight_count_size > i + 1
31
+ @offsets << @offsets.last + weight_count
32
+ end
33
+ end
34
+ @offsets
35
+ end
36
+ end
37
+
38
+ def set_weights(weight_matrix)
39
+ @weight_matrix = weight_matrix
40
+ end
41
+
42
+ def weight_counts
43
+ if @weight_counts
44
+ @weight_counts
45
+ else
46
+ @weight_counts = []
47
+ layer_parameters.each_with_index do |count, i|
48
+ if layer_parameters[i + 1]
49
+ @weight_counts << (layer_parameters[i] * layer_parameters[i + 1])
50
+ end
51
+ end
52
+ @weight_counts
53
+ end
54
+ end
55
+
56
+ def calculate_prediction(input)
57
+ predictions = []
58
+ layer_parameters[0..-2].each_with_index do |layer, i|
59
+ input_value = i == 0 ? input : predictions[i - 1]
60
+ prediction_vector = multiply_vector(input_value, @weight_matrix[i])
61
+ prediction_vector = leaky_relu(prediction_vector) if layer_parameters[0..-2][i + 1]
62
+ predictions << prediction_vector
63
+ end
64
+ predictions
65
+ end
66
+
67
+ def weighted_sum(input, weights)
68
+ total_weight = 0
69
+ raise raise NeuralNetworkError, 'arrays are not equal length' if input.size != weights.size
70
+ input.size.times do |index|
71
+ total_weight += input[index] * weights[index]
72
+ end
73
+ total_weight
74
+ end
75
+
76
+ def multiply_vector(input, weight_matrix)
77
+ predictions = []
78
+ weight_matrix.size.times do |index|
79
+ predictions[index] = weighted_sum(input, weight_matrix[index])
80
+ end
81
+ predictions
82
+ end
83
+
84
+ def find_weights(i, weights)
85
+ weight_amount, offset, slice_value = weight_counts[i], offsets[i], layer_parameters[i]
86
+ weights[(offset)...(offset + weight_amount)].each_slice(slice_value).to_a
87
+ end
88
+
89
+ def train(input, target_output)
90
+ predictions = calculate_prediction(input)
91
+ back_propagate(predictions, target_output)
92
+ end
93
+
94
+ def back_propagate(predictions, target_output)
95
+ reversed_weight_matrix = @weight_matrix.reverse
96
+ last_weighted = []
97
+ predictions.reverse.each_with_index do |prediction, i|
98
+ delta_set = find_deltas(prediction, target_output) if i == 0
99
+ if i != 0
100
+ delta_set = back_propagation_multiplyer(last_weighted, relu_derivative(prediction))
101
+ end
102
+ weighted = multiply_vector(delta_set, reversed_weight_matrix[i].transpose)
103
+ last_weighted = weighted
104
+ update_weights(delta_set, i)
105
+ end
106
+ end
107
+
108
+ def save_weights(filename)
109
+ File.open(filename, "w") do |f|
110
+ f.write(@weight_matrix.to_json)
111
+ end
112
+ puts 'saved weights to ' + filename
113
+ end
114
+
115
+ def find_deltas(predictions, outcomes)
116
+ deltas = []
117
+ predictions.size.times do |index|
118
+ delta = predictions[index] - outcomes[index]
119
+ deltas[index] = delta
120
+ @error = delta ** 2
121
+ end
122
+
123
+ deltas
124
+ end
125
+
126
+ def update_weights(weighted_deltas, i)
127
+ reversed_weight_matrix = @weight_matrix.reverse
128
+ @weight_matrix.reverse[i].size.times do |index|
129
+ @weight_matrix.reverse[i][index].size.times do |count|
130
+ weight = @weight_matrix.reverse[i][index][count]
131
+ adjusted_value = (weight - (@alpha * weighted_deltas[index]))
132
+ @weight_matrix.reverse[i][index][count] = adjusted_value if adjusted_value > 0
133
+ end
134
+ end
135
+ end
136
+
137
+ def calculate_deltas(input, deltas)
138
+ weighted_deltas = []
139
+ deltas.each { weighted_deltas.push([]) }
140
+
141
+ deltas.size.times do |index|
142
+ input.size.times do |count|
143
+ weighted_deltas[index][count] = input[count] * deltas[index]
144
+ end
145
+ end
146
+
147
+ weighted_deltas
148
+ end
149
+
150
+ def leaky_relu(input)
151
+ input.map { |value| value > 0 ? value : 0.0001 }
152
+ end
153
+
154
+ def relu_derivative(output)
155
+ output.map { |value| value > 0 ? 1 : 0.0001 }
156
+ end
157
+
158
+ def calculate_outcomes(abstraction)
159
+ first = 0.0
160
+ second = 0.0
161
+ third = 0.0
162
+ abstraction.setups.each do |setup|
163
+ white_wins = setup.outcomes[:white_wins].to_f
164
+ black_wins = setup.outcomes[:black_wins].to_f
165
+ draws = setup.outcomes[:draws].to_f
166
+
167
+ if setup.position_signature[-1] == 'w'
168
+ first += white_wins
169
+ second += black_wins
170
+ else
171
+ second += black_wins
172
+ first += white_wins
173
+ end
174
+
175
+ third = draws
176
+ end
177
+
178
+ [first, second, third]
179
+ end
180
+
181
+ def back_propagation_multiplyer(v1, v2)
182
+ v1.zip(v2).map { |set| set[0] * set[1] }
183
+ end
184
+
185
+ def softmax(vector)
186
+ sum = vector.sum.to_f
187
+ vector.map do |value|
188
+ if value == 0
189
+ 0
190
+ else
191
+ value / sum
192
+ end
193
+ end
194
+ end
195
+
196
+ def get_weights
197
+ @weight_matrix
198
+ end
199
+ end
200
+ end
@@ -0,0 +1,4 @@
1
+ module RubyNN
2
+ class NeuralNetworkError < StandardError
3
+ end
4
+ end
@@ -0,0 +1 @@
1
+ require 'neural_network'
metadata ADDED
@@ -0,0 +1,46 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: ruby_nn
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.2.0
5
+ platform: ruby
6
+ authors:
7
+ - Charles Ellison
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2010-04-28 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description: 'This is a configurable feed forward mlp neural network. Documentation:
14
+ https://github.com/chadellison/ruby_nn'
15
+ email: chad.ellison0123@gmail.com
16
+ executables: []
17
+ extensions: []
18
+ extra_rdoc_files: []
19
+ files:
20
+ - lib/neural_network.rb
21
+ - lib/neural_network_error.rb
22
+ - lib/ruby_nn.rb
23
+ homepage: https://rubygems.org/gems/ruby_nn
24
+ licenses:
25
+ - MIT
26
+ metadata: {}
27
+ post_install_message:
28
+ rdoc_options: []
29
+ require_paths:
30
+ - lib
31
+ required_ruby_version: !ruby/object:Gem::Requirement
32
+ requirements:
33
+ - - ">="
34
+ - !ruby/object:Gem::Version
35
+ version: '0'
36
+ required_rubygems_version: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ requirements: []
42
+ rubygems_version: 3.0.3
43
+ signing_key:
44
+ specification_version: 4
45
+ summary: A neural network built in ruby
46
+ test_files: []