ruby-fann 0.7.7 → 0.7.8

Sign up to get free protection for your applications and to get access to all the features.
data/History.txt CHANGED
@@ -1,33 +1,41 @@
1
- == 0.0.1 2007-12-18
1
+ == 0.7.8 2007-03-25
2
2
 
3
3
  * 1 major enhancement:
4
- * Initial release
4
+ * training_callback(args) will be automatically called during training if implemented on subclass. See README for details.
5
5
 
6
- == 0.7.2 2007-12-18
6
+ == 0.7.7 2007-01-23
7
7
 
8
- * 1 major enhancement:
9
- * Include docs
8
+ * 1 minor enhancement:
9
+ * Cull bias neuron(s) from get_neurons function
10
10
 
11
- == 0.7.3 2007-12-19
11
+ == 0.7.6 2007-01-14
12
12
 
13
- * 1 minor enhancement
14
- * Directives to build native extensions
13
+ * 1 minor bug fix:
14
+ * Exception (instead of pukage) if zero-length array given on training data
15
+
16
+ == 0.7.5 2007-12-21
17
+
18
+ * 3 minor enhancements:
19
+ * Fixed rdoc to use actual parameter names
20
+ * Minor code cleanup & symbol fixage
21
+ * Nicer error messages for windows users
15
22
 
16
23
  == 0.7.4 2007-12-19
17
24
 
18
- * 1 minor enhancement
19
- * Nicer message if FANN not installed
25
+ * 1 minor enhancement:
26
+ * Nicer message if FANN not installed
20
27
 
21
- == 0.7.5 2007-12-21
22
- * 2 minor enhancements
23
- * Fixed rdoc to use actual parameter names
24
- * Minor code cleanup & symbol fixage
25
- * Nicer message for windows users
28
+ == 0.7.3 2007-12-19
26
29
 
27
- == 0.7.6 2007-01-14
28
- * 1 minor bug fix
29
- * Exception (instead of pukage) if zero-length array given on training data
30
+ * 1 minor enhancement:
31
+ * Directives to build native extensions
30
32
 
31
- == 0.7.7 2007-01-23
32
- * 1 minor enhncement
33
- * Cull bias neuron(s) from get_neurons function
33
+ == 0.7.2 2007-12-18
34
+
35
+ * 1 major enhancement:
36
+ * Include docs
37
+
38
+ == 0.0.1 2007-12-18
39
+
40
+ * 1 major enhancement:
41
+ * Initial release
data/README.txt CHANGED
@@ -11,3 +11,21 @@ Bindings to use FANN from within ruby/rails environment. Fann is a is a free op
11
11
  fann = RubyFann::Standard.new(:num_inputs=>5, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
12
12
  fann.train_on_data(train, 1000, 10, 0.1)
13
13
  outputs = fann.run([3.0, 2.0, 3.0])
14
+
15
+ == Now implements a callback method
16
+ This callback function can be called during training when using train_on_data, train_on_file or cascadetrain_on_data.
17
+
18
+ It is very useful for doing custom things during training. It is recommended to use this function when implementing custom training procedures, or when visualizing the training in a GUI etc. The args which the callback function takes is the parameters given to the train_on_data, plus an epochs parameter which tells how many epochs the training have taken so far.
19
+
20
+ The callback method should return an integer, if the callback function returns -1, the training will terminate.
21
+
22
+ The callback (training_callback) will be automatically called if it is implemented on your subclass as follows:
23
+
24
+ <code>
25
+ class MyFann < RubyFann::Standard
26
+ def training_callback(args)
27
+ puts "ARGS: #{args.inspect}"
28
+ 0
29
+ end
30
+ end
31
+ </code>
@@ -181,6 +181,49 @@ static VALUE fann_training_data_allocate (VALUE klass)
181
181
  }
182
182
 
183
183
 
184
+ // static VALUE invoke_training_callback(VALUE self)
185
+ // {
186
+ // VALUE callback = rb_funcall(self, rb_intern("training_callback"), 0);
187
+ // return callback;
188
+ // }
189
+
190
+ // static int FANN_API internal_callback(struct fann *ann, struct fann_train_data *train,
191
+ // unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error, unsigned int epochs)
192
+
193
+ static int FANN_API fann_training_callback(struct fann *ann, struct fann_train_data *train,
194
+ unsigned int max_epochs, unsigned int epochs_between_reports,
195
+ float desired_error, unsigned int epochs)
196
+ {
197
+ VALUE self = (VALUE)fann_get_user_data(ann);
198
+ VALUE args = rb_hash_new();
199
+
200
+ // Set attributes on hash & push on array:
201
+ VALUE max_epochs_sym = ID2SYM(rb_intern("max_epochs"));
202
+ VALUE epochs_between_reports_sym = ID2SYM(rb_intern("epochs_between_reports"));
203
+ VALUE desired_error_sym = ID2SYM(rb_intern("desired_error"));
204
+ VALUE epochs_sym = ID2SYM(rb_intern("epochs"));
205
+
206
+ rb_hash_aset(args, max_epochs_sym, INT2NUM(max_epochs));
207
+ rb_hash_aset(args, epochs_between_reports_sym, INT2NUM(epochs_between_reports));
208
+ rb_hash_aset(args, desired_error_sym, rb_float_new(desired_error));
209
+ rb_hash_aset(args, epochs_sym, INT2NUM(epochs));
210
+
211
+ VALUE callback = rb_funcall(self, rb_intern("training_callback"), 1, args);
212
+
213
+ if (TYPE(callback)!=T_FIXNUM)
214
+ {
215
+ rb_raise (rb_eRuntimeError, "Callback method must return an integer (-1 to stop training).");
216
+ }
217
+
218
+ int status = NUM2INT(callback);
219
+ if (status==-1)
220
+ {
221
+ printf("Callback method returned -1; training will stop.\n");
222
+ }
223
+
224
+ return status;
225
+ }
226
+
184
227
  /** call-seq: new(hash) -> new ruby-fann neural network object
185
228
 
186
229
  Initialization routine for both standard, shortcut & filename forms of FANN:
@@ -204,7 +247,7 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
204
247
  VALUE num_inputs = rb_hash_aref(hash, ID2SYM(rb_intern("num_inputs")));
205
248
  VALUE num_outputs = rb_hash_aref(hash, ID2SYM(rb_intern("num_outputs")));
206
249
  VALUE hidden_neurons = rb_hash_aref(hash, ID2SYM(rb_intern("hidden_neurons")));
207
-
250
+ printf("initializing\n\n\n");
208
251
  struct fann* ann;
209
252
  if (TYPE(filename)==T_STRING)
210
253
  {
@@ -243,14 +286,32 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
243
286
  int i;
244
287
  for (i=1; i<=num_layers-2; i++) {
245
288
  layers[i]=NUM2UINT(RARRAY(hidden_neurons)->ptr[i-1]);
246
- printf("Setting layer [%d] to [%d]\n", i, layers[i]);
247
289
  }
248
290
 
249
291
  ann = fann_create_standard_array(num_layers, layers);
250
292
  printf("Created RubyFann::Standard [%d].\n", ann);
251
293
  }
252
-
294
+
253
295
  DATA_PTR(self) = ann;
296
+
297
+ printf("Checking for callback...");
298
+
299
+ //int callback = rb_protect(invoke_training_callback, (self), &status);
300
+ // VALUE callback = rb_funcall(DATA_PTR(self), "training_callback", 0);
301
+ if(rb_respond_to(self, rb_intern("training_callback")))
302
+ {
303
+ printf("found(%d).\n", ann->callback);
304
+ fann_set_callback(ann, &fann_training_callback);
305
+ fann_set_user_data(ann, self);
306
+ printf("found(%d).\n", ann->callback);
307
+ }
308
+ else
309
+ {
310
+ printf("none found.\n");
311
+ }
312
+
313
+
314
+ //DATA_PTR(self) = ann;
254
315
  return (VALUE)ann;
255
316
  }
256
317
 
@@ -45,7 +45,7 @@ module RubyFann
45
45
 
46
46
  # Add nodes:
47
47
  neurons.each do |neuron|
48
- fillcolor = "bisque2" # : "khaki3"
48
+ fillcolor = "transparent" # : "khaki3"
49
49
  layer = neuron[:layer]
50
50
  fillcolor = case layer
51
51
  when 0: @input_layer_color
@@ -55,7 +55,8 @@ module RubyFann
55
55
 
56
56
  #puts "adding neuron with #{neuron[:value]}"
57
57
  node_id = neuron.object_id.to_s
58
- label = (layer==0) ? ("%d-%0.3f-%0.3f" % [neuron[:layer], neuron[:value], neuron[:sum]]) : ("%d-%0.3f-%0.3f" % [neuron[:layer], neuron[:value], neuron[:sum]])
58
+ # label = (layer==0) ? ("%d-%0.3f-%0.3f" % [neuron[:layer], neuron[:value], neuron[:sum]]) : ("%d-%0.3f-%0.3f" % [neuron[:layer], neuron[:value], neuron[:sum]])
59
+ label = (layer==0 || layer==max_layer) ? ("%0.3f" % neuron[:value]) : ("%0.3f" % rand) #neuron[:sum])
59
60
  graph_node_hash[node_id] = graph_viz.add_node(
60
61
  node_id,
61
62
  :label=>label,
@@ -2,7 +2,7 @@ module RubyFann #:nodoc:
2
2
  module VERSION #:nodoc:
3
3
  MAJOR = 0
4
4
  MINOR = 7
5
- TINY = 7
5
+ TINY = 8
6
6
 
7
7
  STRING = [MAJOR, MINOR, TINY].join('.')
8
8
  end
data/neurotica1.png CHANGED
Binary file
data/neurotica2.vrml CHANGED
@@ -4,9 +4,9 @@ Group { children [
4
4
  scale 0.028 0.028 0.028
5
5
  children [
6
6
  Background { skyColor 1.000 1.000 1.000 }
7
- # node 2898230
7
+ # node 201760
8
8
  Transform {
9
- translation 6.000 6.000 89.000
9
+ translation 6.000 46.000 85.000
10
10
  scale 2.000 2.000 2.000
11
11
  children [
12
12
  Transform {
@@ -24,9 +24,9 @@ Transform {
24
24
  }
25
25
  ]
26
26
  }
27
- # node 2897300
27
+ # node 200210
28
28
  Transform {
29
- translation 28.000 6.000 56.000
29
+ translation 50.000 6.000 30.000
30
30
  scale 2.000 2.000 2.000
31
31
  children [
32
32
  Transform {
@@ -44,9 +44,44 @@ Transform {
44
44
  }
45
45
  ]
46
46
  }
47
- # node 2897070
47
+ # edge 201760 -> 200210
48
+ Group { children [
48
49
  Transform {
49
- translation 50.000 6.000 0.000
50
+ children [
51
+ Shape {
52
+ geometry Cylinder {
53
+ bottom FALSE top FALSE
54
+ height 38.822 radius 1.000 }
55
+ appearance Appearance {
56
+ material Material {
57
+ ambientIntensity 0.33
58
+ diffuseColor 0.000 0.000 1.000
59
+ }
60
+ }
61
+ }
62
+ Transform {
63
+ translation 0 24.411 0
64
+ children [
65
+ Shape {
66
+ geometry Cone {bottomRadius 3.500 height 10.000 }
67
+ appearance Appearance {
68
+ material Material {
69
+ ambientIntensity 0.33
70
+ diffuseColor 0.000 0.000 1.000
71
+ }
72
+ }
73
+ }
74
+ ]
75
+ }
76
+ ]
77
+ center 0 5.000 0
78
+ rotation -0.000 0 -22.000 -3.975
79
+ translation 24.000 17.000 0.000
80
+ }
81
+ ] }
82
+ # node 201430
83
+ Transform {
84
+ translation 28.000 46.000 32.000
50
85
  scale 2.000 2.000 2.000
51
86
  children [
52
87
  Transform {
@@ -64,9 +99,44 @@ Transform {
64
99
  }
65
100
  ]
66
101
  }
67
- # node 2895200
102
+ # edge 201430 -> 200210
103
+ Group { children [
104
+ Transform {
105
+ children [
106
+ Shape {
107
+ geometry Cylinder {
108
+ bottom FALSE top FALSE
109
+ height 25.133 radius 1.000 }
110
+ appearance Appearance {
111
+ material Material {
112
+ ambientIntensity 0.33
113
+ diffuseColor 0.000 0.000 1.000
114
+ }
115
+ }
116
+ }
68
117
  Transform {
69
- translation 72.000 6.000 96.000
118
+ translation 0 17.566 0
119
+ children [
120
+ Shape {
121
+ geometry Cone {bottomRadius 3.500 height 10.000 }
122
+ appearance Appearance {
123
+ material Material {
124
+ ambientIntensity 0.33
125
+ diffuseColor 0.000 0.000 1.000
126
+ }
127
+ }
128
+ }
129
+ ]
130
+ }
131
+ ]
132
+ center 0 5.000 0
133
+ rotation -0.000 0 -11.000 -3.644
134
+ translation 35.000 17.000 0.000
135
+ }
136
+ ] }
137
+ # node 201330
138
+ Transform {
139
+ translation 50.000 46.000 63.000
70
140
  scale 2.000 2.000 2.000
71
141
  children [
72
142
  Transform {
@@ -84,9 +154,44 @@ Transform {
84
154
  }
85
155
  ]
86
156
  }
87
- # node 2893280
157
+ # edge 201330 -> 200210
158
+ Group { children [
159
+ Transform {
160
+ children [
161
+ Shape {
162
+ geometry Cylinder {
163
+ bottom FALSE top FALSE
164
+ height 19.035 radius 1.000 }
165
+ appearance Appearance {
166
+ material Material {
167
+ ambientIntensity 0.33
168
+ diffuseColor 0.000 0.000 1.000
169
+ }
170
+ }
171
+ }
88
172
  Transform {
89
- translation 94.000 6.000 47.000
173
+ translation 0 14.518 0
174
+ children [
175
+ Shape {
176
+ geometry Cone {bottomRadius 3.500 height 10.000 }
177
+ appearance Appearance {
178
+ material Material {
179
+ ambientIntensity 0.33
180
+ diffuseColor 0.000 0.000 1.000
181
+ }
182
+ }
183
+ }
184
+ ]
185
+ }
186
+ ]
187
+ center 0 5.000 0
188
+ rotation -0.000 0 1.000 -3.142
189
+ translation 46.000 17.000 0.000
190
+ }
191
+ ] }
192
+ # node 200520
193
+ Transform {
194
+ translation 72.000 46.000 55.000
90
195
  scale 2.000 2.000 2.000
91
196
  children [
92
197
  Transform {
@@ -104,9 +209,44 @@ Transform {
104
209
  }
105
210
  ]
106
211
  }
107
- # node 2892270
212
+ # edge 200520 -> 200210
213
+ Group { children [
214
+ Transform {
215
+ children [
216
+ Shape {
217
+ geometry Cylinder {
218
+ bottom FALSE top FALSE
219
+ height 24.018 radius 1.000 }
220
+ appearance Appearance {
221
+ material Material {
222
+ ambientIntensity 0.33
223
+ diffuseColor 0.000 0.000 1.000
224
+ }
225
+ }
226
+ }
227
+ Transform {
228
+ translation 0 17.009 0
229
+ children [
230
+ Shape {
231
+ geometry Cone {bottomRadius 3.500 height 10.000 }
232
+ appearance Appearance {
233
+ material Material {
234
+ ambientIntensity 0.33
235
+ diffuseColor 0.000 0.000 1.000
236
+ }
237
+ }
238
+ }
239
+ ]
240
+ }
241
+ ]
242
+ center 0 5.000 0
243
+ rotation -0.000 0 11.000 -3.644
244
+ translation 57.000 17.000 0.000
245
+ }
246
+ ] }
247
+ # node 200290
108
248
  Transform {
109
- translation 116.000 6.000 38.000
249
+ translation 94.000 46.000 53.000
110
250
  scale 2.000 2.000 2.000
111
251
  children [
112
252
  Transform {
@@ -124,6 +264,41 @@ Transform {
124
264
  }
125
265
  ]
126
266
  }
267
+ # edge 200290 -> 200210
268
+ Group { children [
269
+ Transform {
270
+ children [
271
+ Shape {
272
+ geometry Cylinder {
273
+ bottom FALSE top FALSE
274
+ height 37.531 radius 1.000 }
275
+ appearance Appearance {
276
+ material Material {
277
+ ambientIntensity 0.33
278
+ diffuseColor 0.000 0.000 1.000
279
+ }
280
+ }
281
+ }
282
+ Transform {
283
+ translation 0 23.765 0
284
+ children [
285
+ Shape {
286
+ geometry Cone {bottomRadius 3.500 height 10.000 }
287
+ appearance Appearance {
288
+ material Material {
289
+ ambientIntensity 0.33
290
+ diffuseColor 0.000 0.000 1.000
291
+ }
292
+ }
293
+ }
294
+ ]
295
+ }
296
+ ]
297
+ center 0 5.000 0
298
+ rotation -0.000 0 22.000 -3.975
299
+ translation 68.000 17.000 0.000
300
+ }
301
+ ] }
127
302
  ] }
128
- Viewpoint {position 2.259 0.222 7.273}
303
+ Viewpoint {position 1.852 0.963 7.072}
129
304
  ] }
@@ -5,23 +5,26 @@ require 'ruby_fann/neural_network'
5
5
  require 'ruby_fann/neurotica'
6
6
 
7
7
  class NeuroticaTest < Test::Unit::TestCase
8
- def test_basic_output
9
-
8
+ def test_basic_output
9
+ neurotica = RubyFann::Neurotica.new
10
+
10
11
  train = RubyFann::TrainData.new(
11
12
  :inputs=>[[0.3, 0.4, 0.5, 1.0, -1.0], [0.1, 0.2, 0.3, 1.0, 1.0], [0.6, 0.74, 0.58, -1.0, -1.0], [0.109, 0.677, 0.21, -1.0, 1.0]],
12
13
  :desired_outputs=>[[0.7, 0.4, 0.9], [0.8, -0.2, -0.5], [-0.33, 0.34, -0.22], [0.129, -0.87, 0.25]])
13
14
 
14
- neural_net = RubyFann::Standard.new(:num_inputs=>3, :hidden_neurons=>[4, 7, 9, 3, 5], :num_outputs=>3)
15
+ neural_net = RubyFann::Standard.new(:num_inputs=>3, :hidden_neurons=>[4, 2, 1], :num_outputs=>3)
15
16
  neural_net.train_on_data(train, 100, 20, 0.01)
16
17
 
18
+ neurotica.graph(neural_net, "neurotica1.png")
19
+
17
20
  # train = RubyFann::TrainData.new(:inputs=>[[0.3, 0.4, 0.5], [0.1, 0.2, 0.3]], :desired_outputs=>[[0.7], [0.8]])
18
21
  neural_net = RubyFann::Shortcut.new(:num_inputs=>3, :num_outputs=>3)
19
22
  neural_net.cascadetrain_on_data(train, 5, 10, 0.1)
23
+ neural_net.train_on_data(train, 5, 10, 0.1)
20
24
 
21
25
 
22
- neurotica = RubyFann::Neurotica.new
23
26
 
24
- neurotica.graph(neural_net, "neurotica1.png")
27
+ neurotica.graph(neural_net, "neurotica2.png")
25
28
  end
26
29
 
27
30
  def test_3d_output
@@ -8,6 +8,21 @@ class MyShortcut < RubyFann::Shortcut
8
8
  end
9
9
  end
10
10
 
11
+ class MyFann < RubyFann::Standard
12
+ attr_accessor :callback_invoked
13
+ # def initialize
14
+ # super(:num_inputs=>5, :num_outputs=>1)
15
+ # end
16
+ def training_callback(args)
17
+ puts "ARGS: #{args.inspect}"
18
+ @callback_invoked=true
19
+ 0
20
+ end
21
+ end
22
+
23
+
24
+
25
+
11
26
  class RubyFannTest < Test::Unit::TestCase
12
27
  def test_create_standard
13
28
  fann = RubyFann::Standard.new(:num_inputs=>1, :hidden_neurons=>[3, 4, 3, 4], :num_outputs=>1)
@@ -130,12 +145,37 @@ class RubyFannTest < Test::Unit::TestCase
130
145
 
131
146
  def test_train_on_data
132
147
  train = RubyFann::TrainData.new(:inputs=>[[0.3, 0.4, 0.5], [0.1, 0.2, 0.3]], :desired_outputs=>[[0.7], [0.8]])
133
- fann = RubyFann::Standard.new(:num_inputs=>5, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
148
+ fann = RubyFann::Standard.new(:num_inputs=>3, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
134
149
  fann.train_on_data(train, 1000, 10, 0.1)
135
150
  outputs = fann.run([3.0, 2.0, 3.0])
136
151
  puts "OUTPUT FROM RUN WAS #{outputs.inspect}"
137
152
  end
138
153
 
154
+ def test_train_callback
155
+ puts "train callback"
156
+ train = RubyFann::TrainData.new(:inputs=>[[0.3, 0.4, 0.5], [0.1, 0.2, 0.3]], :desired_outputs=>[[0.7], [0.8]])
157
+ fann = MyFann.new(:num_inputs=>3, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
158
+
159
+ assert(!fann.callback_invoked)
160
+ fann.train_on_data(train, 1000, 1, 0.01)
161
+ assert(fann.callback_invoked)
162
+ end
163
+
164
+ def test_train_bug
165
+ require 'rubygems'
166
+ require 'ruby_fann/neural_network'
167
+ training_data = RubyFann::TrainData.new(
168
+ :inputs=>[[0.3, 0.4, 0.5], [0.1, 0.2, 0.3]],
169
+ :desired_outputs=>[[0.7], [0.8]])
170
+
171
+ fann = RubyFann::Standard.new(
172
+ :num_inputs=>3,
173
+ :hidden_neurons=>[2, 8, 4, 3, 4],
174
+ :num_outputs=>1)
175
+
176
+ fann.train_on_data(training_data, 1000, 1, 0.1)
177
+ end
178
+
139
179
  def test_activation_function
140
180
  fann = RubyFann::Standard.new(:num_inputs=>5, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
141
181
  fann.set_activation_function(:linear, 1, 2)
data/website/index.html CHANGED
@@ -33,7 +33,7 @@
33
33
  <h1>ruby-fann</h1>
34
34
  <div id="version" class="clickable" onclick='document.location = "http://rubyforge.org/projects/ruby-fann"; return false'>
35
35
  <p>Get Version</p>
36
- <a href="http://rubyforge.org/projects/ruby-fann" class="numbers">0.7.7</a>
36
+ <a href="http://rubyforge.org/projects/ruby-fann" class="numbers">0.7.8</a>
37
37
  </div>
38
38
  <p><em>Bindings to use <a href="http://leenissen.dk/fann/"><span class="caps">FANN</span></a> (Fast Artificial Neural Network) from within ruby/rails environment.</em></p>
39
39
 
@@ -62,9 +62,9 @@
62
62
 
63
63
  <ul>
64
64
  <li>Fann 2.1 or greater (preferably in /usr/local/lib). </li>
65
- <li>Ruby 1.8.6 or greater. Windows Ruby should be built natively in Cygwin/MingW. I haven&#8217;t had much luck with any native extensions and the one click installer.</li>
65
+ <li>Ruby 1.8.6 or greater. Windows Ruby should be built natively in Cygwin/MingW. The OneClick installer seems to have problems with any gem that has a native component.</li>
66
66
  <li>gnu make tools or equiv for native code in ext (tested on linux, mac os x, and windows with <a href="http://www.cygwin.com/">Cygwin</a>).</li>
67
- <li>graphviz and ruby-graphviz is required for <a href="http://ruby-fann.rubyforge.org/rdoc/classes/RubyFann/Neurotica.html">Neurotica</a> (<em>experimental</em>) graphical output.</li>
67
+ <li>graphviz and ruby-graphviz is required for experimental <a href="http://ruby-fann.rubyforge.org/rdoc/classes/RubyFann/Neurotica.html">Neurotica</a> (<em>experimental</em>) graphical output.</li>
68
68
  </ul>
69
69
 
70
70
 
@@ -100,6 +100,30 @@
100
100
  </pre></p>
101
101
 
102
102
 
103
+ <h2>Now implements a callback method</h2>
104
+
105
+
106
+ <p>This callback function can be called during training when using train_on_data, train_on_file or cascadetrain_on_data.</p>
107
+
108
+
109
+ <p>It is very useful for doing custom things during training. It is recommended to use this function when implementing custom training procedures, or when visualizing the training in a <span class="caps">GUI</span> etc. The args which the callback function takes is the parameters given to the train_on_data, plus an epochs parameter which tells how many epochs the training have taken so far.</p>
110
+
111
+
112
+ <p>The callback method should return an integer, if the callback function returns -1, the training will terminate.</p>
113
+
114
+
115
+ <p>The callback (training_callback) will be automatically called if it is implemented on your subclass as follows:</p>
116
+
117
+
118
+ <pre>
119
+ class MyFann &lt; RubyFann::Standard
120
+ def training_callback(args)
121
+ puts "ARGS: #{args.inspect}"
122
+ 0
123
+ end
124
+ end
125
+ </pre>
126
+
103
127
  <h2>Forum</h2>
104
128
 
105
129
 
@@ -126,7 +150,7 @@
126
150
 
127
151
  <p>Comments are welcome. Send an email to <a href="mailto:steven@7bpeople.com">Steven Miers</a> email via the <a href="http://groups.google.com/group/ruby_fann">forum</a></p>
128
152
  <p class="coda">
129
- <a href="steven@7bpeople.com">Steven Miers</a>, 8th January 2008<br>
153
+ <a href="steven@7bpeople.com">Steven Miers</a>, 25th March 2008<br>
130
154
  </p>
131
155
  </div>
132
156
 
data/website/index.txt CHANGED
@@ -17,9 +17,9 @@ h2. Installing
17
17
  h2. Requirements:
18
18
 
19
19
  * Fann 2.1 or greater (preferably in /usr/local/lib).
20
- * Ruby 1.8.6 or greater. Windows Ruby should be built natively in Cygwin/MingW. I haven't had much luck with any native extensions and the one click installer.
20
+ * Ruby 1.8.6 or greater. Windows Ruby should be built natively in Cygwin/MingW. The OneClick installer seems to have problems with any gem that has a native component.
21
21
  * gnu make tools or equiv for native code in ext (tested on linux, mac os x, and windows with "Cygwin":http://www.cygwin.com/).
22
- * graphviz and ruby-graphviz is required for "Neurotica":http://ruby-fann.rubyforge.org/rdoc/classes/RubyFann/Neurotica.html (_experimental_) graphical output.
22
+ * graphviz and ruby-graphviz is required for experimental "Neurotica":http://ruby-fann.rubyforge.org/rdoc/classes/RubyFann/Neurotica.html (_experimental_) graphical output.
23
23
 
24
24
  h2. Unit Tests
25
25
 
@@ -49,6 +49,25 @@ fann.train_on_data(training_data, 1000, 1, 0.1)
49
49
  outputs = fann.run([0.7, 0.9, 0.2])
50
50
  </pre>
51
51
 
52
+ h2. Now implements a callback method
53
+
54
+ This callback function can be called during training when using train_on_data, train_on_file or cascadetrain_on_data.
55
+
56
+ It is very useful for doing custom things during training. It is recommended to use this function when implementing custom training procedures, or when visualizing the training in a GUI etc. The args which the callback function takes is the parameters given to the train_on_data, plus an epochs parameter which tells how many epochs the training have taken so far.
57
+
58
+ The callback method should return an integer, if the callback function returns -1, the training will terminate.
59
+
60
+ The callback (training_callback) will be automatically called if it is implemented on your subclass as follows:
61
+
62
+ <pre>
63
+ class MyFann < RubyFann::Standard
64
+ def training_callback(args)
65
+ puts "ARGS: #{args.inspect}"
66
+ 0
67
+ end
68
+ end
69
+ </pre>
70
+
52
71
  h2. Forum
53
72
 
54
73
  "http://groups.google.com/group/ruby_fann":http://groups.google.com/group/ruby_fann
data/xor_cascade.net CHANGED
@@ -30,5 +30,5 @@ cascade_activation_steepnesses_count=4
30
30
  cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
31
31
  layer_sizes=3 1 1 1
32
32
  scale_included=0
33
- neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 14, 1.00000000000000000000e+00) (4, 3, 1.00000000000000000000e+00) (5, 5, 5.00000000000000000000e-01)
34
- connections (connected_to_neuron, weight)=(0, 1.86109735793926978076e+00) (1, 1.53457642869428334542e+00) (2, 1.57379632519913137401e+00) (0, -1.63855205202685857979e-02) (1, -4.37714446815499758081e-02) (2, -1.79293283324296588432e+01) (3, -3.03416410098376498894e-01) (0, 1.77185261840735880368e-01) (1, 1.41681920756182133658e-01) (2, 2.22790414769918176940e-01) (3, 4.00602073083754319782e+01) (4, -7.43623769268710044233e-02)
33
+ neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 8, 7.50000000000000000000e-01) (4, 3, 1.00000000000000000000e+00) (5, 5, 5.00000000000000000000e-01)
34
+ connections (connected_to_neuron, weight)=(0, 1.17487274070837233175e+00) (1, 5.72167203598355156302e-01) (2, -9.47632295698525028982e-04) (0, -6.26256319600163036121e-03) (1, -5.45082622451080987813e-03) (2, -1.71164041899664596258e+01) (3, 4.09645059009046297316e-01) (0, 1.14152533557484198168e-01) (1, 5.04601192810541165912e-01) (2, 1.60318449249861605388e-01) (3, 6.18285862204485852089e+01) (4, 6.55252632598424034072e-01)
data/xor_float.net CHANGED
@@ -31,4 +31,4 @@ cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000
31
31
  layer_sizes=3 4 2
32
32
  scale_included=0
33
33
  neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (0, 5, 1.00000000000000000000e+00) (4, 5, 1.00000000000000000000e+00) (0, 5, 1.00000000000000000000e+00)
34
- connections (connected_to_neuron, weight)=(0, 2.82674709981290295246e+00) (1, 1.83285679199681794671e+00) (2, 1.79351510444946171674e+00) (0, 1.30190337108136544586e+00) (1, 2.12157151948249156348e+00) (2, -1.35178282494634038891e+00) (0, -1.40745923743137546502e+00) (1, 1.66503130606607463449e+00) (2, -2.25816505377594189952e+00) (3, 3.96208974962423310373e+00) (4, -4.07142363835370790781e+00) (5, 2.94122805959320210434e+00) (6, -8.99181418321290504281e-01)
34
+ connections (connected_to_neuron, weight)=(0, -1.16530225097567696757e+00) (1, 1.28392029697533782695e+00) (2, -1.86287924359784518025e+00) (0, -1.54401822430982371692e+00) (1, -2.14060738181401077895e+00) (2, 1.32833706012518493189e+00) (0, 2.19704260703105402897e+00) (1, 1.68408615028061281471e+00) (2, 1.42573306421817536105e+00) (3, 2.17038801863726327213e+00) (4, 4.85232503890282362846e+00) (5, 4.87237610174419533138e+00) (6, -2.15603708927686987806e+00)
metadata CHANGED
@@ -3,8 +3,8 @@ rubygems_version: 0.9.2
3
3
  specification_version: 1
4
4
  name: ruby-fann
5
5
  version: !ruby/object:Gem::Version
6
- version: 0.7.7
7
- date: 2008-01-23 00:00:00 -06:00
6
+ version: 0.7.8
7
+ date: 2008-03-25 00:00:00 -05:00
8
8
  summary: Bindings to use FANN from within ruby/rails environment.
9
9
  require_paths:
10
10
  - lib