ruby-fann 0.7.8 → 0.7.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/History.txt CHANGED
@@ -1,3 +1,10 @@
1
+ == 0.7.9 2008-08-15
2
+
3
+ * 1 major bug fix:
4
+ * Fix set_activation_function for correct # of args (2 instead of 1)
5
+ * 1 minor enhancement
6
+ * Provide support for fann_get_activation_function
7
+
1
8
  == 0.7.8 2007-03-25
2
9
 
3
10
  * 1 major enhancement:
@@ -300,7 +300,6 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
300
300
  // VALUE callback = rb_funcall(DATA_PTR(self), "training_callback", 0);
301
301
  if(rb_respond_to(self, rb_intern("training_callback")))
302
302
  {
303
- printf("found(%d).\n", ann->callback);
304
303
  fann_set_callback(ann, &fann_training_callback);
305
304
  fann_set_user_data(ann, self);
306
305
  printf("found(%d).\n", ann->callback);
@@ -450,8 +449,10 @@ static VALUE set_activation_function_hidden(VALUE self, VALUE activation_func)
450
449
  :linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
451
450
  :sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
452
451
  :elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
453
- :sin, :cos*/
452
+ :sin, :cos
454
453
 
454
+ It is not possible to set activation functions for the neurons in the input layer.
455
+ */
455
456
  static VALUE set_activation_function_layer(VALUE self, VALUE activation_func, VALUE layer)
456
457
  {
457
458
  Check_Type(activation_func, T_SYMBOL);
@@ -461,6 +462,23 @@ static VALUE set_activation_function_layer(VALUE self, VALUE activation_func, VA
461
462
  fann_set_activation_function_layer(f, sym_to_activation_function(activation_func), NUM2INT(layer));
462
463
  }
463
464
 
465
+ /** call-seq: get_activation_function(layer) -> return value
466
+
467
+ Get the activation function for neuron number *neuron* in layer number *layer*,
468
+ counting the input layer as layer 0.
469
+
470
+ It is not possible to get activation functions for the neurons in the input layer.
471
+ */
472
+ static VALUE get_activation_function(VALUE self, VALUE layer, VALUE neuron)
473
+ {
474
+ Check_Type(layer, T_FIXNUM);
475
+ Check_Type(neuron, T_FIXNUM);
476
+ struct fann* f;
477
+ Data_Get_Struct(self, struct fann, f);
478
+ fann_type val = fann_get_activation_function(f, NUM2INT(layer), NUM2INT(neuron));
479
+ return activation_function_to_sym(val);
480
+ }
481
+
464
482
  /** call-seq: set_activation_function_output(activation_func)
465
483
 
466
484
  Set the activation function for the output layer. activation_func must be one of the following symbols:
@@ -1479,7 +1497,8 @@ void Init_neural_network ()
1479
1497
  rb_define_method(m_rb_fann_standard_class, "init_weights", init_weights, 1);
1480
1498
  rb_define_method(m_rb_fann_standard_class, "set_activation_function", set_activation_function, 3);
1481
1499
  rb_define_method(m_rb_fann_standard_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
1482
- rb_define_method(m_rb_fann_standard_class, "set_activation_function_layer", set_activation_function_layer, 1);
1500
+ rb_define_method(m_rb_fann_standard_class, "set_activation_function_layer", set_activation_function_layer, 2);
1501
+ rb_define_method(m_rb_fann_standard_class, "get_activation_function", get_activation_function, 2);
1483
1502
  rb_define_method(m_rb_fann_standard_class, "set_activation_function_output", set_activation_function_output, 1);
1484
1503
  rb_define_method(m_rb_fann_standard_class, "get_activation_steepness", get_activation_steepness, 2);
1485
1504
  rb_define_method(m_rb_fann_standard_class, "set_activation_steepness", set_activation_steepness, 3);
@@ -1577,7 +1596,8 @@ void Init_neural_network ()
1577
1596
  rb_define_method(m_rb_fann_shortcut_class, "init_weights", init_weights, 1);
1578
1597
  rb_define_method(m_rb_fann_shortcut_class, "set_activation_function", set_activation_function, 3);
1579
1598
  rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
1580
- rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_layer", set_activation_function_layer, 1);
1599
+ rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_layer", set_activation_function_layer, 2);
1600
+ rb_define_method(m_rb_fann_standard_class, "get_activation_function", get_activation_function, 2);
1581
1601
  rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_output", set_activation_function_output, 1);
1582
1602
  rb_define_method(m_rb_fann_shortcut_class, "get_activation_steepness", get_activation_steepness, 2);
1583
1603
  rb_define_method(m_rb_fann_shortcut_class, "set_activation_steepness", set_activation_steepness, 3);
@@ -2,7 +2,7 @@ module RubyFann #:nodoc:
2
2
  module VERSION #:nodoc:
3
3
  MAJOR = 0
4
4
  MINOR = 7
5
- TINY = 8
5
+ TINY = 9
6
6
 
7
7
  STRING = [MAJOR, MINOR, TINY].join('.')
8
8
  end
data/neurotica1.png CHANGED
Binary file
data/neurotica2.vrml CHANGED
@@ -4,9 +4,9 @@ Group { children [
4
4
  scale 0.028 0.028 0.028
5
5
  children [
6
6
  Background { skyColor 1.000 1.000 1.000 }
7
- # node 201760
7
+ # node 1994420
8
8
  Transform {
9
- translation 6.000 46.000 85.000
9
+ translation 6.000 46.000 52.000
10
10
  scale 2.000 2.000 2.000
11
11
  children [
12
12
  Transform {
@@ -24,9 +24,9 @@ Transform {
24
24
  }
25
25
  ]
26
26
  }
27
- # node 200210
27
+ # node 1994000
28
28
  Transform {
29
- translation 50.000 6.000 30.000
29
+ translation 50.000 6.000 98.000
30
30
  scale 2.000 2.000 2.000
31
31
  children [
32
32
  Transform {
@@ -44,7 +44,7 @@ Transform {
44
44
  }
45
45
  ]
46
46
  }
47
- # edge 201760 -> 200210
47
+ # edge 1994420 -> 1994000
48
48
  Group { children [
49
49
  Transform {
50
50
  children [
@@ -79,9 +79,9 @@ Transform {
79
79
  translation 24.000 17.000 0.000
80
80
  }
81
81
  ] }
82
- # node 201430
82
+ # node 1994200
83
83
  Transform {
84
- translation 28.000 46.000 32.000
84
+ translation 28.000 46.000 37.000
85
85
  scale 2.000 2.000 2.000
86
86
  children [
87
87
  Transform {
@@ -99,7 +99,7 @@ Transform {
99
99
  }
100
100
  ]
101
101
  }
102
- # edge 201430 -> 200210
102
+ # edge 1994200 -> 1994000
103
103
  Group { children [
104
104
  Transform {
105
105
  children [
@@ -134,9 +134,9 @@ Transform {
134
134
  translation 35.000 17.000 0.000
135
135
  }
136
136
  ] }
137
- # node 201330
137
+ # node 1994150
138
138
  Transform {
139
- translation 50.000 46.000 63.000
139
+ translation 50.000 46.000 20.000
140
140
  scale 2.000 2.000 2.000
141
141
  children [
142
142
  Transform {
@@ -154,7 +154,7 @@ Transform {
154
154
  }
155
155
  ]
156
156
  }
157
- # edge 201330 -> 200210
157
+ # edge 1994150 -> 1994000
158
158
  Group { children [
159
159
  Transform {
160
160
  children [
@@ -189,9 +189,9 @@ Transform {
189
189
  translation 46.000 17.000 0.000
190
190
  }
191
191
  ] }
192
- # node 200520
192
+ # node 1994100
193
193
  Transform {
194
- translation 72.000 46.000 55.000
194
+ translation 72.000 46.000 6.000
195
195
  scale 2.000 2.000 2.000
196
196
  children [
197
197
  Transform {
@@ -209,7 +209,7 @@ Transform {
209
209
  }
210
210
  ]
211
211
  }
212
- # edge 200520 -> 200210
212
+ # edge 1994100 -> 1994000
213
213
  Group { children [
214
214
  Transform {
215
215
  children [
@@ -244,9 +244,9 @@ Transform {
244
244
  translation 57.000 17.000 0.000
245
245
  }
246
246
  ] }
247
- # node 200290
247
+ # node 1994050
248
248
  Transform {
249
- translation 94.000 46.000 53.000
249
+ translation 94.000 46.000 55.000
250
250
  scale 2.000 2.000 2.000
251
251
  children [
252
252
  Transform {
@@ -264,7 +264,7 @@ Transform {
264
264
  }
265
265
  ]
266
266
  }
267
- # edge 200290 -> 200210
267
+ # edge 1994050 -> 1994000
268
268
  Group { children [
269
269
  Transform {
270
270
  children [
@@ -300,5 +300,5 @@ Transform {
300
300
  }
301
301
  ] }
302
302
  ] }
303
- Viewpoint {position 1.852 0.963 7.072}
303
+ Viewpoint {position 1.852 0.963 6.184}
304
304
  ] }
@@ -188,6 +188,13 @@ class RubyFannTest < Test::Unit::TestCase
188
188
  assert_raises(RuntimeError) { fann.set_activation_function_hidden(:fake) }
189
189
  fann.set_activation_function_hidden(:elliot)
190
190
  end
191
+
192
+ def test_activation_function_layer
193
+ fann = RubyFann::Standard.new(:num_inputs=>5, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
194
+ fann.set_activation_function_layer(:linear, 1)
195
+ assert_raises(RuntimeError) { fann.set_activation_function_layer(:fake, 0) }
196
+ assert_equal(:linear, fann.get_activation_function(1, 0))
197
+ end
191
198
 
192
199
  def test_activation_function_output
193
200
  fann = RubyFann::Standard.new(:num_inputs=>5, :hidden_neurons=>[2, 8, 4, 3, 4], :num_outputs=>1)
data/website/index.html CHANGED
@@ -33,7 +33,7 @@
33
33
  <h1>ruby-fann</h1>
34
34
  <div id="version" class="clickable" onclick='document.location = "http://rubyforge.org/projects/ruby-fann"; return false'>
35
35
  <p>Get Version</p>
36
- <a href="http://rubyforge.org/projects/ruby-fann" class="numbers">0.7.8</a>
36
+ <a href="http://rubyforge.org/projects/ruby-fann" class="numbers">0.7.9</a>
37
37
  </div>
38
38
  <p><em>Bindings to use <a href="http://leenissen.dk/fann/"><span class="caps">FANN</span></a> (Fast Artificial Neural Network) from within ruby/rails environment.</em></p>
39
39
 
@@ -57,6 +57,12 @@
57
57
  </ul>
58
58
 
59
59
 
60
+ <ul>
61
+ <li>Some Mac <span class="caps">OS X</span> users (possibly that are using, or have used, MacPorts) need to use <strong>sudo env <span class="caps">ARCHFLAGS</span>=&#8221;-arch i386&#8221;</strong> in front of the install command:
62
+ <pre class='syntax'><span class="ident">sudo</span> <span class="ident">env</span> <span class="constant">ARCHFLAGS</span><span class="punct">=&quot;</span><span class="string">-arch i386</span><span class="punct">&quot;</span> <span class="ident">gem</span> <span class="ident">install</span> <span class="ident">ruby</span><span class="punct">-</span><span class="ident">fann</span></pre></li>
63
+ </ul>
64
+
65
+
60
66
  <h2>Requirements:</h2>
61
67
 
62
68
 
@@ -150,7 +156,7 @@ end
150
156
 
151
157
  <p>Comments are welcome. Send an email to <a href="mailto:steven@7bpeople.com">Steven Miers</a> email via the <a href="http://groups.google.com/group/ruby_fann">forum</a></p>
152
158
  <p class="coda">
153
- <a href="steven@7bpeople.com">Steven Miers</a>, 25th March 2008<br>
159
+ <a href="steven@7bpeople.com">Steven Miers</a>, 9th April 2008<br>
154
160
  </p>
155
161
  </div>
156
162
 
data/website/index.txt CHANGED
@@ -14,6 +14,9 @@ h2. Installing
14
14
  * Then, install ruby-fann:
15
15
  <pre syntax="ruby">[sudo] gem install ruby-fann</pre>
16
16
 
17
+ * Some Mac OS X users (possibly that are using, or have used, MacPorts) need to use *sudo env ARCHFLAGS="-arch i386"* in front of the install command:
18
+ <pre syntax="ruby">sudo env ARCHFLAGS="-arch i386" gem install ruby-fann</pre>
19
+
17
20
  h2. Requirements:
18
21
 
19
22
  * Fann 2.1 or greater (preferably in /usr/local/lib).
data/xor_cascade.net CHANGED
@@ -30,5 +30,5 @@ cascade_activation_steepnesses_count=4
30
30
  cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
31
31
  layer_sizes=3 1 1 1
32
32
  scale_included=0
33
- neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 8, 7.50000000000000000000e-01) (4, 3, 1.00000000000000000000e+00) (5, 5, 5.00000000000000000000e-01)
34
- connections (connected_to_neuron, weight)=(0, 1.17487274070837233175e+00) (1, 5.72167203598355156302e-01) (2, -9.47632295698525028982e-04) (0, -6.26256319600163036121e-03) (1, -5.45082622451080987813e-03) (2, -1.71164041899664596258e+01) (3, 4.09645059009046297316e-01) (0, 1.14152533557484198168e-01) (1, 5.04601192810541165912e-01) (2, 1.60318449249861605388e-01) (3, 6.18285862204485852089e+01) (4, 6.55252632598424034072e-01)
33
+ neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 15, 7.50000000000000000000e-01) (4, 3, 1.00000000000000000000e+00) (5, 5, 5.00000000000000000000e-01)
34
+ connections (connected_to_neuron, weight)=(0, 2.10286480710118617310e+00) (1, 2.04373163145531488993e+00) (2, 4.63108727828299038265e-01) (0, 8.37135096431359040925e-03) (1, 4.53412101729364080960e-03) (2, -1.60546889498873532887e+01) (3, -5.16049715334957539992e-02) (0, 8.95993205212286980554e-01) (1, 3.52223488932136952201e-01) (2, -5.61956417965705057327e-01) (3, 4.08175747120339238450e+01) (4, -5.73087151679878559207e-01)
data/xor_float.net CHANGED
@@ -31,4 +31,4 @@ cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000
31
31
  layer_sizes=3 4 2
32
32
  scale_included=0
33
33
  neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (3, 5, 1.00000000000000000000e+00) (0, 5, 1.00000000000000000000e+00) (4, 5, 1.00000000000000000000e+00) (0, 5, 1.00000000000000000000e+00)
34
- connections (connected_to_neuron, weight)=(0, -1.16530225097567696757e+00) (1, 1.28392029697533782695e+00) (2, -1.86287924359784518025e+00) (0, -1.54401822430982371692e+00) (1, -2.14060738181401077895e+00) (2, 1.32833706012518493189e+00) (0, 2.19704260703105402897e+00) (1, 1.68408615028061281471e+00) (2, 1.42573306421817536105e+00) (3, 2.17038801863726327213e+00) (4, 4.85232503890282362846e+00) (5, 4.87237610174419533138e+00) (6, -2.15603708927686987806e+00)
34
+ connections (connected_to_neuron, weight)=(0, 2.36087072243863227428e+00) (1, 2.23300336307513402900e+00) (2, 1.96336343675226854266e+00) (0, 2.50794112269722768715e+00) (1, 2.44626718636426510045e+00) (2, -1.88110670483874753423e+00) (0, 1.92424255389214260425e+00) (1, 1.85908978706197380859e+00) (2, 1.86489077619511700590e+00) (3, 3.92070597842786483511e+00) (4, -4.51811730870642414004e+00) (5, 6.57427812243734366326e-01) (6, -4.31244371074561883006e+00)
metadata CHANGED
@@ -1,34 +1,30 @@
1
1
  --- !ruby/object:Gem::Specification
2
- rubygems_version: 0.9.2
3
- specification_version: 1
4
2
  name: ruby-fann
5
3
  version: !ruby/object:Gem::Version
6
- version: 0.7.8
7
- date: 2008-03-25 00:00:00 -05:00
8
- summary: Bindings to use FANN from within ruby/rails environment.
9
- require_paths:
10
- - lib
11
- - ext
12
- email: steven@7bpeople.com
13
- homepage: http://ruby-fann.rubyforge.org
14
- rubyforge_project: ruby-fann
15
- description: Bindings to use FANN from within ruby/rails environment.
16
- autorequire:
17
- default_executable:
18
- bindir: bin
19
- has_rdoc: true
20
- required_ruby_version: !ruby/object:Gem::Version::Requirement
21
- requirements:
22
- - - ">"
23
- - !ruby/object:Gem::Version
24
- version: 0.0.0
25
- version:
4
+ version: 0.7.9
26
5
  platform: ruby
27
- signing_key:
28
- cert_chain:
29
- post_install_message:
30
6
  authors:
31
7
  - Steven Miers
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+
12
+ date: 2008-08-15 00:00:00 -05:00
13
+ default_executable:
14
+ dependencies: []
15
+
16
+ description: Bindings to use FANN from within ruby/rails environment.
17
+ email: steven@7bpeople.com
18
+ executables: []
19
+
20
+ extensions:
21
+ - ext/ruby_fann/extconf.rb
22
+ extra_rdoc_files:
23
+ - History.txt
24
+ - License.txt
25
+ - Manifest.txt
26
+ - README.txt
27
+ - website/index.txt
32
28
  files:
33
29
  - History.txt
34
30
  - License.txt
@@ -69,25 +65,36 @@ files:
69
65
  - xor.train
70
66
  - xor_cascade.net
71
67
  - xor_float.net
72
- test_files:
73
- - test/test_helper.rb
74
- - test/test_neurotica.rb
75
- - test/test_ruby_fann.rb
76
- - test/test_ruby_fann_functional.rb
68
+ has_rdoc: true
69
+ homepage: http://ruby-fann.rubyforge.org
70
+ post_install_message:
77
71
  rdoc_options:
78
72
  - --main
79
73
  - README.txt
80
- extra_rdoc_files:
81
- - History.txt
82
- - License.txt
83
- - Manifest.txt
84
- - README.txt
85
- - website/index.txt
86
- executables: []
87
-
88
- extensions:
89
- - ext/ruby_fann/extconf.rb
74
+ require_paths:
75
+ - lib
76
+ - ext
77
+ required_ruby_version: !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - ">="
80
+ - !ruby/object:Gem::Version
81
+ version: "0"
82
+ version:
83
+ required_rubygems_version: !ruby/object:Gem::Requirement
84
+ requirements:
85
+ - - ">="
86
+ - !ruby/object:Gem::Version
87
+ version: "0"
88
+ version:
90
89
  requirements: []
91
90
 
92
- dependencies: []
93
-
91
+ rubyforge_project: ruby-fann
92
+ rubygems_version: 1.0.1
93
+ signing_key:
94
+ specification_version: 2
95
+ summary: Bindings to use FANN from within ruby/rails environment.
96
+ test_files:
97
+ - test/test_helper.rb
98
+ - test/test_neurotica.rb
99
+ - test/test_ruby_fann.rb
100
+ - test/test_ruby_fann_functional.rb