deepbeige 0.2.0 → 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (7) hide show
  1. data/VERSION +1 -1
  2. data/deep_beige.rb +43 -33
  3. data/deepbeige.gemspec +2 -2
  4. data/main.rb +11 -1
  5. data/neural_net.rb +48 -5
  6. data/node.rb +25 -21
  7. metadata +3 -3
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.2.0
1
+ 0.2.1
data/deep_beige.rb CHANGED
@@ -75,40 +75,15 @@ class DeepBeige
75
75
  puts "Evolving Generation #{generation_number}"
76
76
  player_number = 0
77
77
  @population.each do |neuralnet|
78
- player1 = DeepBeige.new
79
- player1.neural_net = neuralnet
80
- player1.game_name = @game_name
81
-
82
- 5.times do
83
- game = game.class.new
84
- game.quiet = true
85
- opponent_number = rand(@population.count)
86
- #puts "#{player_number} versus opponent #{opponent_number}"
87
- opponent_net = @population[opponent_number]
88
- player2 = DeepBeige.new
89
- player2.neural_net = opponent_net
90
- player2.game_name = @game_name
91
-
92
- players = [player1,player2]
93
- table = Table.new game, players
94
- table.quiet = true
95
- table.play_game
96
- if game.drawn?
97
- players.each do |player|
98
- scores[player.id] +=1
99
- end
100
-
101
- elsif game.won?
102
- winner = players[game.winner]
103
- players.each do |player|
104
- if player.id == winner.id
105
- scores[player.id] +=2
106
- else
107
- scores[player.id] -=2
108
- end
109
- end
110
- end
78
+ player = DeepBeige.new
79
+ player.neural_net = neuralnet
80
+ player.game_name = game.name
111
81
 
82
+ 5.times do
83
+ play_as_player game.name, player, 1, scores
84
+ end
85
+ 5.times do
86
+ play_as_player game.name, player, 2, scores
112
87
  end
113
88
  player_number += 1
114
89
  end
@@ -208,6 +183,41 @@ private
208
183
  end
209
184
  save_population name
210
185
  end
186
+
187
+ private
188
+ def play_as_player game_name, player, player_number, scores
189
+ game = game_from_name game_name
190
+ game.quiet = true
191
+ opponent_number = rand(@population.count)
192
+ #puts "#{player_number} versus opponent #{opponent_number}"
193
+ opponent_net = @population[opponent_number]
194
+ opponent = DeepBeige.new
195
+ opponent.neural_net = opponent_net
196
+ opponent.game_name = @game_name
197
+ players = [player, opponent]
198
+ if player_number == 2
199
+ player = [opponent,player]
200
+ end
201
+
202
+ table = Table.new game, players
203
+ table.quiet = true
204
+ table.play_game
205
+ if game.drawn?
206
+ players.each do |player|
207
+ scores[player.id] +=1
208
+ end
209
+
210
+ elsif game.won?
211
+ winner = players[game.winner]
212
+ players.each do |player|
213
+ if player.id == winner.id
214
+ scores[player.id] +=2
215
+ else
216
+ scores[player.id] -=2
217
+ end
218
+ end
219
+ end
220
+ end
211
221
  end
212
222
 
213
223
 
data/deepbeige.gemspec CHANGED
@@ -5,11 +5,11 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = %q{deepbeige}
8
- s.version = "0.2.0"
8
+ s.version = "0.2.1"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["David Bochenski"]
12
- s.date = %q{2010-09-04}
12
+ s.date = %q{2010-09-05}
13
13
  s.description = %q{An AI learning program that plays board games}
14
14
  s.email = %q{david@bochenski.co.uk}
15
15
  s.extra_rdoc_files = [
data/main.rb CHANGED
@@ -27,7 +27,11 @@ def player_vs_deepbeige db, game
27
27
  #ok so now I'm interested in playing my best creation
28
28
  db.start_game game.name
29
29
  me = Human.new game
30
- play_game game, me, db, []
30
+ if which_player == "1"
31
+ play_game game, me, db, []
32
+ else
33
+ play_game game, db, me, []
34
+ end
31
35
  end
32
36
 
33
37
  def player_vs_player game
@@ -36,6 +40,12 @@ def player_vs_player game
36
40
  play_game game, p1,p2, []
37
41
  end
38
42
 
43
+ def which_player
44
+ puts
45
+ puts "Would you like to be player 1 or 2?"
46
+ gets.chop
47
+ end
48
+
39
49
  def options
40
50
  puts
41
51
  puts "What would you like to do?"
data/neural_net.rb CHANGED
@@ -9,6 +9,7 @@ class NeuralNet
9
9
  def initialize
10
10
  @id = UUID.new.to_s.split(':')[1].chop
11
11
  @network = []
12
+ @sigma = 0.05
12
13
  end
13
14
 
14
15
  def evaluate
@@ -46,7 +47,7 @@ class NeuralNet
46
47
  @network = []
47
48
  input_nodes = []
48
49
  inputs.times do
49
- input_nodes << Node.new
50
+ input_nodes << Node.new(@sigma)
50
51
  end
51
52
  if input_nodes.count > 0
52
53
  @network << input_nodes
@@ -54,31 +55,34 @@ class NeuralNet
54
55
  (tiers - 2).times do
55
56
  tier = []
56
57
  10.times do
57
- tier << Node.new
58
+ tier << Node.new(@sigma)
58
59
  end
59
60
  @network << tier
60
61
  end
61
62
 
62
63
  output_nodes = []
63
64
  outputs.times do
64
- output_nodes << Node.new
65
+ output_nodes << Node.new(@sigma)
65
66
  end
66
67
  if output_nodes.count >0
67
68
  @network << output_nodes
68
69
  end
69
70
  link_tiers
71
+ recalculate_tau
70
72
  end
71
73
 
72
74
  def fingerprint
73
75
  topline = ""
74
76
  fingerprint = ""
77
+ sigma = ""
78
+ tau = ""
75
79
  @network.each do |tier|
76
80
  topline << "#{tier.count},"
77
81
  tier.each do |node|
78
82
  fingerprint << node.fingerprint
79
83
  end
80
84
  end
81
- topline.chop + "\n" + fingerprint
85
+ topline.chop + "\n" + @sigma.to_s + "\n" + @tau.to_s + "\n" + fingerprint
82
86
  end
83
87
 
84
88
  def reload fingerprint
@@ -86,6 +90,10 @@ class NeuralNet
86
90
  i = 0
87
91
  tiers = fingerprint[i].split(',').to_a
88
92
  i += 1
93
+ @sigma = fingerprint[i].to_f
94
+ i += 1
95
+ @tau = fingerprint[i].to_f
96
+ i += 1
89
97
 
90
98
  @network = []
91
99
  tiers.each do |tier|
@@ -93,7 +101,7 @@ class NeuralNet
93
101
  tier.to_i.times do
94
102
  node_fingerprint = fingerprint[i]
95
103
  i += 1
96
- node = Node.new
104
+ node = Node.new(@sigma)
97
105
  node.reload node_fingerprint
98
106
  nodes << node
99
107
  end
@@ -110,8 +118,12 @@ class NeuralNet
110
118
  #the ability to mutate the number of
111
119
  #nodes and their configuration
112
120
  #focussing instead on simple node weight mutation
121
+
122
+ #first we mutate sigma
123
+ @sigma = @sigma * Math.exp(@tau * gaussian_random)
113
124
  @network.each do |tier|
114
125
  tier.each do |node|
126
+ node.sigma = @sigma
115
127
  node.mutate
116
128
  end
117
129
  end
@@ -120,6 +132,8 @@ class NeuralNet
120
132
 
121
133
  def clone
122
134
  clone = NeuralNet.new
135
+ clone.sigma = self.sigma
136
+ clone.tau = self.tau
123
137
  #iterate in through each tier
124
138
  @network.each do |tier|
125
139
  nodes = []
@@ -155,6 +169,8 @@ class NeuralNet
155
169
  end
156
170
 
157
171
  protected
172
+ attr_accessor :sigma, :tau
173
+
158
174
  def link_tiers
159
175
  #first cut lets link every node on a tier to each node on the subsequent tier
160
176
  i = 1
@@ -171,4 +187,31 @@ protected
171
187
  i +=1
172
188
  end
173
189
  end
190
+
191
+ private
192
+ #This is a constant related to the size of our network
193
+ #and the number of connections it contains
194
+ def recalculate_tau
195
+ number_of_variables = 0
196
+ @network.each do |tier|
197
+ tier.each do |node|
198
+ number_of_variables += 1 + node.weights.count
199
+ end
200
+ end
201
+ @tau = 1 / (Math.sqrt(2 * Math.sqrt(number_of_variables)))
202
+ end
203
+
204
+ def gaussian_random
205
+ u1 = u2 = w = g1 = g2 = 0 # declare
206
+ begin
207
+ u1 = 2 * rand - 1
208
+ u2 = 2 * rand - 1
209
+ w = u1 * u1 + u2 * u2
210
+ end while w >= 1
211
+
212
+ w = Math::sqrt( ( -2 * Math::log(w)) / w )
213
+ g2 = u1 * w;
214
+ g1 = u2 * w;
215
+ # g1 is returned
216
+ end
174
217
  end
data/node.rb CHANGED
@@ -1,16 +1,18 @@
1
1
  class Node
2
2
  attr_reader :forward_nodes, :output_value
3
- attr_accessor :input_value, :deviation, :weights
4
- def initialize
3
+ attr_accessor :input_value, :sigma, :weights, :bias
4
+ def initialize sigma
5
+ @input_value = 0
5
6
  @forward_nodes = {}
6
7
  @weights =[]
7
- @input_value = 0
8
- @deviation = rand / 4
8
+ @bias = 0
9
+ @sigma = sigma
9
10
  end
10
- #take our input value, pass it through our sigmoid function (tanh)
11
- #and then pass on our output value to each of our forward nodes
11
+ #take our input value (sum of weighted outputs of backward connected nodes)
12
+ #, subtract the bias and pass the result through our sigmoid function (tanh)
13
+ # finally then pass on our output value to each of our forward nodes
12
14
  def evaluate
13
- @output_value = Math.tanh(@input_value)
15
+ @output_value = Math.tanh(@input_value - @bias)
14
16
  #p "output value #{@output_value}"
15
17
  @forward_nodes.each do |node, weight|
16
18
  #p "weight #{weight} old input #{node.input_value}"
@@ -22,7 +24,7 @@ class Node
22
24
 
23
25
  def attach_forward_node node, sequence
24
26
  if @weights.count <= sequence
25
- @weights << rand
27
+ @weights << ((rand * 0.4 ) - 0.2) #sampled from a uniform distribution in range ± 0.2
26
28
  end
27
29
 
28
30
  @forward_nodes[node] = @weights[sequence]
@@ -35,7 +37,7 @@ class Node
35
37
  def mutate
36
38
  new_weights = []
37
39
  @weights.each do |weight|
38
- new_weights << gaussian_random * @deviation + weight # new_random_number = gaussian_rand * standard_deviation + average
40
+ new_weights << weight + @sigma * gaussian_random # new_random_number = average + standard_deviation * gaussian_rand
39
41
  end
40
42
  @weights = new_weights
41
43
  if @forward_values
@@ -46,17 +48,18 @@ class Node
46
48
  end
47
49
  end
48
50
 
49
- #and now mutate the deviation
50
- @deviation = (gaussian_random * @deviation)/2 + @deviation
51
- end
52
-
53
- def breed
51
+ #mutate the bias
52
+ @bias = @bias + gaussian_random * @sigma
53
+
54
+ self
54
55
  end
55
-
56
+
56
57
  def clone
57
- clone = Node.new
58
+ clone = Node.new(self.sigma)
59
+
60
+ clone.sigma = self.sigma
61
+ clone.bias = self.bias
58
62
 
59
- clone.deviation = self.deviation
60
63
  @weights.each do |weight|
61
64
  clone.weights << weight
62
65
  end
@@ -71,7 +74,7 @@ class Node
71
74
  end
72
75
 
73
76
  def fingerprint
74
- fingerprint = "#{@deviation}:"
77
+ fingerprint = "#{@sigma.to_s}:#{@bias.to_s}:"
75
78
  @weights.each do |weight|
76
79
  fingerprint += "#{weight.to_s},"
77
80
  end
@@ -81,9 +84,10 @@ class Node
81
84
  def reload fingerprint
82
85
  self.detatch_all_forward_nodes
83
86
  @weights = []
84
- self.deviation = fingerprint.split(':')[0].to_f
85
- if fingerprint.split(":").count == 2
86
- fingerprint.split(":")[1].split(',').each do |weight|
87
+ self.sigma = fingerprint.split(':')[0].to_f
88
+ self.bias = fingerprint.split(':')[1].to_f
89
+ if fingerprint.split(":").count == 3
90
+ fingerprint.split(":")[2].split(',').each do |weight|
87
91
  @weights << weight.to_f
88
92
  end
89
93
  end
metadata CHANGED
@@ -5,8 +5,8 @@ version: !ruby/object:Gem::Version
5
5
  segments:
6
6
  - 0
7
7
  - 2
8
- - 0
9
- version: 0.2.0
8
+ - 1
9
+ version: 0.2.1
10
10
  platform: ruby
11
11
  authors:
12
12
  - David Bochenski
@@ -14,7 +14,7 @@ autorequire:
14
14
  bindir: bin
15
15
  cert_chain: []
16
16
 
17
- date: 2010-09-04 00:00:00 +01:00
17
+ date: 2010-09-05 00:00:00 +01:00
18
18
  default_executable:
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency