ruby-fann 0.7.10 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. data/History.txt +6 -1
  2. data/License.txt +1 -1
  3. data/Manifest.txt +22 -1
  4. data/README.txt +0 -1
  5. data/Rakefile +0 -0
  6. data/config/hoe.rb +0 -0
  7. data/config/requirements.rb +0 -0
  8. data/ext/ruby_fann/MANIFEST +0 -0
  9. data/ext/ruby_fann/Makefile +36 -28
  10. data/ext/ruby_fann/doublefann.c +30 -0
  11. data/ext/ruby_fann/doublefann.h +33 -0
  12. data/ext/ruby_fann/extconf.rb +9 -5
  13. data/ext/ruby_fann/fann.c +1552 -0
  14. data/ext/ruby_fann/fann_activation.h +144 -0
  15. data/ext/ruby_fann/fann_augment.h +0 -0
  16. data/ext/ruby_fann/fann_cascade.c +1031 -0
  17. data/ext/ruby_fann/fann_cascade.h +503 -0
  18. data/ext/ruby_fann/fann_data.h +799 -0
  19. data/ext/ruby_fann/fann_error.c +204 -0
  20. data/ext/ruby_fann/fann_error.h +161 -0
  21. data/ext/ruby_fann/fann_internal.h +148 -0
  22. data/ext/ruby_fann/fann_io.c +762 -0
  23. data/ext/ruby_fann/fann_io.h +100 -0
  24. data/ext/ruby_fann/fann_train.c +962 -0
  25. data/ext/ruby_fann/fann_train.h +1203 -0
  26. data/ext/ruby_fann/fann_train_data.c +1231 -0
  27. data/ext/ruby_fann/neural_network.c +0 -0
  28. data/lib/ruby_fann/neurotica.rb +0 -0
  29. data/lib/ruby_fann/version.rb +3 -3
  30. data/lib/ruby_fann.rb +0 -0
  31. data/neurotica1.png +0 -0
  32. data/neurotica2.vrml +18 -18
  33. data/setup.rb +0 -0
  34. data/tasks/deployment.rake +0 -0
  35. data/tasks/environment.rake +0 -0
  36. data/tasks/website.rake +0 -0
  37. data/test/test.train +0 -0
  38. data/test/test_helper.rb +0 -0
  39. data/test/test_neurotica.rb +0 -0
  40. data/test/test_ruby_fann.rb +0 -0
  41. data/test/test_ruby_fann_functional.rb +0 -0
  42. data/verify.train +0 -0
  43. data/website/index.html +42 -92
  44. data/website/index.txt +0 -0
  45. data/website/javascripts/rounded_corners_lite.inc.js +0 -0
  46. data/website/stylesheets/screen.css +0 -0
  47. data/website/template.rhtml +0 -0
  48. data/xor.train +0 -0
  49. data/xor_cascade.net +2 -2
  50. data/xor_float.net +1 -1
  51. metadata +22 -6
  52. data/log/debug.log +0 -0
@@ -0,0 +1,1552 @@
1
+ /*
2
+ Fast Artificial Neural Network Library (fann)
3
+ Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
4
+
5
+ This library is free software; you can redistribute it and/or
6
+ modify it under the terms of the GNU Lesser General Public
7
+ License as published by the Free Software Foundation; either
8
+ version 2.1 of the License, or (at your option) any later version.
9
+
10
+ This library is distributed in the hope that it will be useful,
11
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
+ Lesser General Public License for more details.
14
+
15
+ You should have received a copy of the GNU Lesser General Public
16
+ License along with this library; if not, write to the Free Software
17
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
+ */
19
+
20
+ #include <stdio.h>
21
+ #include <stdlib.h>
22
+ #include <stdarg.h>
23
+ #include <string.h>
24
+ #include <time.h>
25
+ #include <math.h>
26
+
27
+ #include "config.h"
28
+ #include "fann.h"
29
+
30
+ FANN_EXTERNAL struct fann *FANN_API fann_create_standard(unsigned int num_layers, ...)
31
+ {
32
+ struct fann *ann;
33
+ va_list layer_sizes;
34
+ int i;
35
+ unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
36
+
37
+ if(layers == NULL)
38
+ {
39
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
40
+ return NULL;
41
+ }
42
+
43
+ va_start(layer_sizes, num_layers);
44
+ for(i = 0; i < (int) num_layers; i++)
45
+ {
46
+ layers[i] = va_arg(layer_sizes, unsigned int);
47
+ }
48
+ va_end(layer_sizes);
49
+
50
+ ann = fann_create_standard_array(num_layers, layers);
51
+
52
+ free(layers);
53
+
54
+ return ann;
55
+ }
56
+
57
+ FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers,
58
+ const unsigned int *layers)
59
+ {
60
+ return fann_create_sparse_array(1, num_layers, layers);
61
+ }
62
+
63
+ FANN_EXTERNAL struct fann *FANN_API fann_create_sparse(float connection_rate,
64
+ unsigned int num_layers, ...)
65
+ {
66
+ struct fann *ann;
67
+ va_list layer_sizes;
68
+ int i;
69
+ unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
70
+
71
+ if(layers == NULL)
72
+ {
73
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
74
+ return NULL;
75
+ }
76
+
77
+ va_start(layer_sizes, num_layers);
78
+ for(i = 0; i < (int) num_layers; i++)
79
+ {
80
+ layers[i] = va_arg(layer_sizes, unsigned int);
81
+ }
82
+ va_end(layer_sizes);
83
+
84
+ ann = fann_create_sparse_array(connection_rate, num_layers, layers);
85
+
86
+ free(layers);
87
+
88
+ return ann;
89
+ }
90
+
91
+ FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate,
92
+ unsigned int num_layers,
93
+ const unsigned int *layers)
94
+ {
95
+ struct fann_layer *layer_it, *last_layer, *prev_layer;
96
+ struct fann *ann;
97
+ struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron;
98
+ #ifdef DEBUG
99
+ unsigned int prev_layer_size;
100
+ #endif
101
+ unsigned int num_neurons_in, num_neurons_out, i, j;
102
+ unsigned int min_connections, max_connections, num_connections;
103
+ unsigned int connections_per_neuron, allocated_connections;
104
+ unsigned int random_number, found_connection, tmp_con;
105
+
106
+ #ifdef FIXEDFANN
107
+ unsigned int decimal_point;
108
+ unsigned int multiplier;
109
+ #endif
110
+ if(connection_rate > 1)
111
+ {
112
+ connection_rate = 1;
113
+ }
114
+
115
+ /* seed random */
116
+ #ifndef FANN_NO_SEED
117
+ fann_seed_rand();
118
+ #endif
119
+
120
+ /* allocate the general structure */
121
+ ann = fann_allocate_structure(num_layers);
122
+ if(ann == NULL)
123
+ {
124
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
125
+ return NULL;
126
+ }
127
+
128
+ ann->connection_rate = connection_rate;
129
+ #ifdef FIXEDFANN
130
+ decimal_point = ann->decimal_point;
131
+ multiplier = ann->multiplier;
132
+ fann_update_stepwise(ann);
133
+ #endif
134
+
135
+ /* determine how many neurons there should be in each layer */
136
+ i = 0;
137
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
138
+ {
139
+ /* we do not allocate room here, but we make sure that
140
+ * last_neuron - first_neuron is the number of neurons */
141
+ layer_it->first_neuron = NULL;
142
+ layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */
143
+ ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
144
+ }
145
+
146
+ ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1;
147
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
148
+
149
+ /* allocate room for the actual neurons */
150
+ fann_allocate_neurons(ann);
151
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
152
+ {
153
+ fann_destroy(ann);
154
+ return NULL;
155
+ }
156
+
157
+ #ifdef DEBUG
158
+ printf("creating network with connection rate %f\n", connection_rate);
159
+ printf("input\n");
160
+ printf(" layer : %d neurons, 1 bias\n",
161
+ ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
162
+ #endif
163
+
164
+ num_neurons_in = ann->num_input;
165
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
166
+ {
167
+ num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
168
+ /*�if all neurons in each layer should be connected to at least one neuron
169
+ * in the previous layer, and one neuron in the next layer.
170
+ * and the bias node should be connected to the all neurons in the next layer.
171
+ * Then this is the minimum amount of neurons */
172
+ min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out;
173
+ max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */
174
+ num_connections = fann_max(min_connections,
175
+ (unsigned int) (0.5 + (connection_rate * max_connections)) +
176
+ num_neurons_out);
177
+
178
+ connections_per_neuron = num_connections / num_neurons_out;
179
+ allocated_connections = 0;
180
+ /* Now split out the connections on the different neurons */
181
+ for(i = 0; i != num_neurons_out; i++)
182
+ {
183
+ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
184
+ allocated_connections += connections_per_neuron;
185
+ layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
186
+
187
+ layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
188
+ #ifdef FIXEDFANN
189
+ layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
190
+ #else
191
+ layer_it->first_neuron[i].activation_steepness = 0.5;
192
+ #endif
193
+
194
+ if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out)
195
+ {
196
+ layer_it->first_neuron[i].last_con++;
197
+ allocated_connections++;
198
+ }
199
+ }
200
+
201
+ /* bias neuron also gets stuff */
202
+ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
203
+ layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
204
+
205
+ ann->total_connections += num_connections;
206
+
207
+ /* used in the next run of the loop */
208
+ num_neurons_in = num_neurons_out;
209
+ }
210
+
211
+ fann_allocate_connections(ann);
212
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
213
+ {
214
+ fann_destroy(ann);
215
+ return NULL;
216
+ }
217
+
218
+ if(connection_rate >= 1)
219
+ {
220
+ #ifdef DEBUG
221
+ prev_layer_size = ann->num_input + 1;
222
+ #endif
223
+ prev_layer = ann->first_layer;
224
+ last_layer = ann->last_layer;
225
+ for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
226
+ {
227
+ last_neuron = layer_it->last_neuron - 1;
228
+ for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
229
+ {
230
+ tmp_con = neuron_it->last_con - 1;
231
+ for(i = neuron_it->first_con; i != tmp_con; i++)
232
+ {
233
+ ann->weights[i] = (fann_type) fann_random_weight();
234
+ /* these connections are still initialized for fully connected networks, to allow
235
+ * operations to work, that are not optimized for fully connected networks.
236
+ */
237
+ ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con);
238
+ }
239
+
240
+ /* bias weight */
241
+ ann->weights[tmp_con] = (fann_type) fann_random_bias_weight();
242
+ ann->connections[tmp_con] = prev_layer->first_neuron + (tmp_con - neuron_it->first_con);
243
+ }
244
+ #ifdef DEBUG
245
+ prev_layer_size = layer_it->last_neuron - layer_it->first_neuron;
246
+ #endif
247
+ prev_layer = layer_it;
248
+ #ifdef DEBUG
249
+ printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1);
250
+ #endif
251
+ }
252
+ }
253
+ else
254
+ {
255
+ /* make connections for a network, that are not fully connected */
256
+
257
+ /* generally, what we do is first to connect all the input
258
+ * neurons to a output neuron, respecting the number of
259
+ * available input neurons for each output neuron. Then
260
+ * we go through all the output neurons, and connect the
261
+ * rest of the connections to input neurons, that they are
262
+ * not allready connected to.
263
+ */
264
+
265
+ /* All the connections are cleared by calloc, because we want to
266
+ * be able to see which connections are allready connected */
267
+
268
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
269
+ {
270
+
271
+ num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
272
+ num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1;
273
+
274
+ /* first connect the bias neuron */
275
+ bias_neuron = (layer_it - 1)->last_neuron - 1;
276
+ last_neuron = layer_it->last_neuron - 1;
277
+ for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
278
+ {
279
+
280
+ ann->connections[neuron_it->first_con] = bias_neuron;
281
+ ann->weights[neuron_it->first_con] = (fann_type) fann_random_bias_weight();
282
+ }
283
+
284
+ /* then connect all neurons in the input layer */
285
+ last_neuron = (layer_it - 1)->last_neuron - 1;
286
+ for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++)
287
+ {
288
+
289
+ /* random neuron in the output layer that has space
290
+ * for more connections */
291
+ do
292
+ {
293
+ random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1));
294
+ random_neuron = layer_it->first_neuron + random_number;
295
+ /* checks the last space in the connections array for room */
296
+ }
297
+ while(ann->connections[random_neuron->last_con - 1]);
298
+
299
+ /* find an empty space in the connection array and connect */
300
+ for(i = random_neuron->first_con; i < random_neuron->last_con; i++)
301
+ {
302
+ if(ann->connections[i] == NULL)
303
+ {
304
+ ann->connections[i] = neuron_it;
305
+ ann->weights[i] = (fann_type) fann_random_weight();
306
+ break;
307
+ }
308
+ }
309
+ }
310
+
311
+ /* then connect the rest of the unconnected neurons */
312
+ last_neuron = layer_it->last_neuron - 1;
313
+ for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
314
+ {
315
+ /* find empty space in the connection array and connect */
316
+ for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
317
+ {
318
+ /* continue if allready connected */
319
+ if(ann->connections[i] != NULL)
320
+ continue;
321
+
322
+ do
323
+ {
324
+ found_connection = 0;
325
+ random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1));
326
+ random_neuron = (layer_it - 1)->first_neuron + random_number;
327
+
328
+ /* check to see if this connection is allready there */
329
+ for(j = neuron_it->first_con; j < i; j++)
330
+ {
331
+ if(random_neuron == ann->connections[j])
332
+ {
333
+ found_connection = 1;
334
+ break;
335
+ }
336
+ }
337
+
338
+ }
339
+ while(found_connection);
340
+
341
+ /* we have found a neuron that is not allready
342
+ * connected to us, connect it */
343
+ ann->connections[i] = random_neuron;
344
+ ann->weights[i] = (fann_type) fann_random_weight();
345
+ }
346
+ }
347
+
348
+ #ifdef DEBUG
349
+ printf(" layer : %d neurons, 1 bias\n", num_neurons_out);
350
+ #endif
351
+ }
352
+
353
+ /* TODO it would be nice to have the randomly created
354
+ * connections sorted for smoother memory access.
355
+ */
356
+ }
357
+
358
+ #ifdef DEBUG
359
+ printf("output\n");
360
+ #endif
361
+
362
+ return ann;
363
+ }
364
+
365
+
366
+ FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut(unsigned int num_layers, ...)
367
+ {
368
+ struct fann *ann;
369
+ int i;
370
+ va_list layer_sizes;
371
+ unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
372
+
373
+ if(layers == NULL)
374
+ {
375
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
376
+ return NULL;
377
+ }
378
+
379
+
380
+ va_start(layer_sizes, num_layers);
381
+ for(i = 0; i < (int) num_layers; i++)
382
+ {
383
+ layers[i] = va_arg(layer_sizes, unsigned int);
384
+ }
385
+ va_end(layer_sizes);
386
+
387
+ ann = fann_create_shortcut_array(num_layers, layers);
388
+
389
+ free(layers);
390
+
391
+ return ann;
392
+ }
393
+
394
+ FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_layers,
395
+ const unsigned int *layers)
396
+ {
397
+ struct fann_layer *layer_it, *layer_it2, *last_layer;
398
+ struct fann *ann;
399
+ struct fann_neuron *neuron_it, *neuron_it2 = 0;
400
+ unsigned int i;
401
+ unsigned int num_neurons_in, num_neurons_out;
402
+
403
+ #ifdef FIXEDFANN
404
+ unsigned int decimal_point;
405
+ unsigned int multiplier;
406
+ #endif
407
+ /* seed random */
408
+ #ifndef FANN_NO_SEED
409
+ fann_seed_rand();
410
+ #endif
411
+
412
+ /* allocate the general structure */
413
+ ann = fann_allocate_structure(num_layers);
414
+ if(ann == NULL)
415
+ {
416
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
417
+ return NULL;
418
+ }
419
+
420
+ ann->connection_rate = 1;
421
+ ann->network_type = FANN_NETTYPE_SHORTCUT;
422
+ #ifdef FIXEDFANN
423
+ decimal_point = ann->decimal_point;
424
+ multiplier = ann->multiplier;
425
+ fann_update_stepwise(ann);
426
+ #endif
427
+
428
+ /* determine how many neurons there should be in each layer */
429
+ i = 0;
430
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
431
+ {
432
+ /* we do not allocate room here, but we make sure that
433
+ * last_neuron - first_neuron is the number of neurons */
434
+ layer_it->first_neuron = NULL;
435
+ layer_it->last_neuron = layer_it->first_neuron + layers[i++];
436
+ if(layer_it == ann->first_layer)
437
+ {
438
+ /* there is a bias neuron in the first layer */
439
+ layer_it->last_neuron++;
440
+ }
441
+
442
+ ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
443
+ }
444
+
445
+ ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron;
446
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
447
+
448
+ /* allocate room for the actual neurons */
449
+ fann_allocate_neurons(ann);
450
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
451
+ {
452
+ fann_destroy(ann);
453
+ return NULL;
454
+ }
455
+
456
+ #ifdef DEBUG
457
+ printf("creating fully shortcut connected network.\n");
458
+ printf("input\n");
459
+ printf(" layer : %d neurons, 1 bias\n",
460
+ ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
461
+ #endif
462
+
463
+ num_neurons_in = ann->num_input;
464
+ last_layer = ann->last_layer;
465
+ for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
466
+ {
467
+ num_neurons_out = layer_it->last_neuron - layer_it->first_neuron;
468
+
469
+ /* Now split out the connections on the different neurons */
470
+ for(i = 0; i != num_neurons_out; i++)
471
+ {
472
+ layer_it->first_neuron[i].first_con = ann->total_connections;
473
+ ann->total_connections += num_neurons_in + 1;
474
+ layer_it->first_neuron[i].last_con = ann->total_connections;
475
+
476
+ layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
477
+ #ifdef FIXEDFANN
478
+ layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
479
+ #else
480
+ layer_it->first_neuron[i].activation_steepness = 0.5;
481
+ #endif
482
+ }
483
+
484
+ #ifdef DEBUG
485
+ printf(" layer : %d neurons, 0 bias\n", num_neurons_out);
486
+ #endif
487
+ /* used in the next run of the loop */
488
+ num_neurons_in += num_neurons_out;
489
+ }
490
+
491
+ fann_allocate_connections(ann);
492
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
493
+ {
494
+ fann_destroy(ann);
495
+ return NULL;
496
+ }
497
+
498
+ /* Connections are created from all neurons to all neurons in later layers
499
+ */
500
+ num_neurons_in = ann->num_input + 1;
501
+ for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
502
+ {
503
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
504
+ {
505
+
506
+ i = neuron_it->first_con;
507
+ for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++)
508
+ {
509
+ for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron;
510
+ neuron_it2++)
511
+ {
512
+
513
+ ann->weights[i] = (fann_type) fann_random_weight();
514
+ ann->connections[i] = neuron_it2;
515
+ i++;
516
+ }
517
+ }
518
+ }
519
+ num_neurons_in += layer_it->last_neuron - layer_it->first_neuron;
520
+ }
521
+
522
+ #ifdef DEBUG
523
+ printf("output\n");
524
+ #endif
525
+
526
+ return ann;
527
+ }
528
+
529
+ FANN_EXTERNAL fann_type *FANN_API fann_run(struct fann * ann, fann_type * input)
530
+ {
531
+ struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers;
532
+ unsigned int i, num_connections, num_input, num_output;
533
+ fann_type neuron_sum, *output;
534
+ fann_type *weights;
535
+ struct fann_layer *layer_it, *last_layer;
536
+ unsigned int activation_function;
537
+ fann_type steepness;
538
+
539
+ /* store some variabels local for fast access */
540
+ struct fann_neuron *first_neuron = ann->first_layer->first_neuron;
541
+
542
+ #ifdef FIXEDFANN
543
+ int multiplier = ann->multiplier;
544
+ unsigned int decimal_point = ann->decimal_point;
545
+
546
+ /* values used for the stepwise linear sigmoid function */
547
+ fann_type r1 = 0, r2 = 0, r3 = 0, r4 = 0, r5 = 0, r6 = 0;
548
+ fann_type v1 = 0, v2 = 0, v3 = 0, v4 = 0, v5 = 0, v6 = 0;
549
+
550
+ fann_type last_steepness = 0;
551
+ unsigned int last_activation_function = 0;
552
+ #else
553
+ fann_type max_sum;
554
+ #endif
555
+
556
+ /* first set the input */
557
+ num_input = ann->num_input;
558
+ for(i = 0; i != num_input; i++)
559
+ {
560
+ #ifdef FIXEDFANN
561
+ if(fann_abs(input[i]) > multiplier)
562
+ {
563
+ printf
564
+ ("Warning input number %d is out of range -%d - %d with value %d, integer overflow may occur.\n",
565
+ i, multiplier, multiplier, input[i]);
566
+ }
567
+ #endif
568
+ first_neuron[i].value = input[i];
569
+ }
570
+ /* Set the bias neuron in the input layer */
571
+ #ifdef FIXEDFANN
572
+ (ann->first_layer->last_neuron - 1)->value = multiplier;
573
+ #else
574
+ (ann->first_layer->last_neuron - 1)->value = 1;
575
+ #endif
576
+
577
+ last_layer = ann->last_layer;
578
+ for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
579
+ {
580
+ last_neuron = layer_it->last_neuron;
581
+ for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
582
+ {
583
+ if(neuron_it->first_con == neuron_it->last_con)
584
+ {
585
+ /* bias neurons */
586
+ #ifdef FIXEDFANN
587
+ neuron_it->value = multiplier;
588
+ #else
589
+ neuron_it->value = 1;
590
+ #endif
591
+ continue;
592
+ }
593
+
594
+ activation_function = neuron_it->activation_function;
595
+ steepness = neuron_it->activation_steepness;
596
+
597
+ neuron_sum = 0;
598
+ num_connections = neuron_it->last_con - neuron_it->first_con;
599
+ weights = ann->weights + neuron_it->first_con;
600
+
601
+ if(ann->connection_rate >= 1)
602
+ {
603
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT)
604
+ {
605
+ neurons = ann->first_layer->first_neuron;
606
+ }
607
+ else
608
+ {
609
+ neurons = (layer_it - 1)->first_neuron;
610
+ }
611
+
612
+
613
+ /* unrolled loop start */
614
+ i = num_connections & 3; /* same as modulo 4 */
615
+ switch (i)
616
+ {
617
+ case 3:
618
+ neuron_sum += fann_mult(weights[2], neurons[2].value);
619
+ case 2:
620
+ neuron_sum += fann_mult(weights[1], neurons[1].value);
621
+ case 1:
622
+ neuron_sum += fann_mult(weights[0], neurons[0].value);
623
+ case 0:
624
+ break;
625
+ }
626
+
627
+ for(; i != num_connections; i += 4)
628
+ {
629
+ neuron_sum +=
630
+ fann_mult(weights[i], neurons[i].value) +
631
+ fann_mult(weights[i + 1], neurons[i + 1].value) +
632
+ fann_mult(weights[i + 2], neurons[i + 2].value) +
633
+ fann_mult(weights[i + 3], neurons[i + 3].value);
634
+ }
635
+ /* unrolled loop end */
636
+
637
+ /*
638
+ * for(i = 0;i != num_connections; i++){
639
+ * printf("%f += %f*%f, ", neuron_sum, weights[i], neurons[i].value);
640
+ * neuron_sum += fann_mult(weights[i], neurons[i].value);
641
+ * }
642
+ */
643
+ }
644
+ else
645
+ {
646
+ neuron_pointers = ann->connections + neuron_it->first_con;
647
+
648
+ i = num_connections & 3; /* same as modulo 4 */
649
+ switch (i)
650
+ {
651
+ case 3:
652
+ neuron_sum += fann_mult(weights[2], neuron_pointers[2]->value);
653
+ case 2:
654
+ neuron_sum += fann_mult(weights[1], neuron_pointers[1]->value);
655
+ case 1:
656
+ neuron_sum += fann_mult(weights[0], neuron_pointers[0]->value);
657
+ case 0:
658
+ break;
659
+ }
660
+
661
+ for(; i != num_connections; i += 4)
662
+ {
663
+ neuron_sum +=
664
+ fann_mult(weights[i], neuron_pointers[i]->value) +
665
+ fann_mult(weights[i + 1], neuron_pointers[i + 1]->value) +
666
+ fann_mult(weights[i + 2], neuron_pointers[i + 2]->value) +
667
+ fann_mult(weights[i + 3], neuron_pointers[i + 3]->value);
668
+ }
669
+ }
670
+
671
+ #ifdef FIXEDFANN
672
+ neuron_it->sum = fann_mult(steepness, neuron_sum);
673
+
674
+ if(activation_function != last_activation_function || steepness != last_steepness)
675
+ {
676
+ switch (activation_function)
677
+ {
678
+ case FANN_SIGMOID:
679
+ case FANN_SIGMOID_STEPWISE:
680
+ r1 = ann->sigmoid_results[0];
681
+ r2 = ann->sigmoid_results[1];
682
+ r3 = ann->sigmoid_results[2];
683
+ r4 = ann->sigmoid_results[3];
684
+ r5 = ann->sigmoid_results[4];
685
+ r6 = ann->sigmoid_results[5];
686
+ v1 = ann->sigmoid_values[0] / steepness;
687
+ v2 = ann->sigmoid_values[1] / steepness;
688
+ v3 = ann->sigmoid_values[2] / steepness;
689
+ v4 = ann->sigmoid_values[3] / steepness;
690
+ v5 = ann->sigmoid_values[4] / steepness;
691
+ v6 = ann->sigmoid_values[5] / steepness;
692
+ break;
693
+ case FANN_SIGMOID_SYMMETRIC:
694
+ case FANN_SIGMOID_SYMMETRIC_STEPWISE:
695
+ r1 = ann->sigmoid_symmetric_results[0];
696
+ r2 = ann->sigmoid_symmetric_results[1];
697
+ r3 = ann->sigmoid_symmetric_results[2];
698
+ r4 = ann->sigmoid_symmetric_results[3];
699
+ r5 = ann->sigmoid_symmetric_results[4];
700
+ r6 = ann->sigmoid_symmetric_results[5];
701
+ v1 = ann->sigmoid_symmetric_values[0] / steepness;
702
+ v2 = ann->sigmoid_symmetric_values[1] / steepness;
703
+ v3 = ann->sigmoid_symmetric_values[2] / steepness;
704
+ v4 = ann->sigmoid_symmetric_values[3] / steepness;
705
+ v5 = ann->sigmoid_symmetric_values[4] / steepness;
706
+ v6 = ann->sigmoid_symmetric_values[5] / steepness;
707
+ break;
708
+ case FANN_THRESHOLD:
709
+ break;
710
+ }
711
+ }
712
+
713
+ switch (activation_function)
714
+ {
715
+ case FANN_SIGMOID:
716
+ case FANN_SIGMOID_STEPWISE:
717
+ neuron_it->value =
718
+ (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6, 0,
719
+ multiplier, neuron_sum);
720
+ break;
721
+ case FANN_SIGMOID_SYMMETRIC:
722
+ case FANN_SIGMOID_SYMMETRIC_STEPWISE:
723
+ neuron_it->value =
724
+ (fann_type) fann_stepwise(v1, v2, v3, v4, v5, v6, r1, r2, r3, r4, r5, r6,
725
+ -multiplier, multiplier, neuron_sum);
726
+ break;
727
+ case FANN_THRESHOLD:
728
+ neuron_it->value = (fann_type) ((neuron_sum < 0) ? 0 : multiplier);
729
+ break;
730
+ case FANN_THRESHOLD_SYMMETRIC:
731
+ neuron_it->value = (fann_type) ((neuron_sum < 0) ? -multiplier : multiplier);
732
+ break;
733
+ case FANN_LINEAR:
734
+ neuron_it->value = neuron_sum;
735
+ break;
736
+ case FANN_LINEAR_PIECE:
737
+ neuron_it->value = (fann_type)((neuron_sum < 0) ? 0 : (neuron_sum > multiplier) ? multiplier : neuron_sum);
738
+ break;
739
+ case FANN_LINEAR_PIECE_SYMMETRIC:
740
+ neuron_it->value = (fann_type)((neuron_sum < -multiplier) ? -multiplier : (neuron_sum > multiplier) ? multiplier : neuron_sum);
741
+ break;
742
+ case FANN_ELLIOT:
743
+ case FANN_ELLIOT_SYMMETRIC:
744
+ case FANN_GAUSSIAN:
745
+ case FANN_GAUSSIAN_SYMMETRIC:
746
+ case FANN_GAUSSIAN_STEPWISE:
747
+ case FANN_SIN_SYMMETRIC:
748
+ case FANN_COS_SYMMETRIC:
749
+ fann_error((struct fann_error *) ann, FANN_E_CANT_USE_ACTIVATION);
750
+ break;
751
+ }
752
+ last_steepness = steepness;
753
+ last_activation_function = activation_function;
754
+ #else
755
+ neuron_sum = fann_mult(steepness, neuron_sum);
756
+
757
+ max_sum = 150/steepness;
758
+ if(neuron_sum > max_sum)
759
+ neuron_sum = max_sum;
760
+ else if(neuron_sum < -max_sum)
761
+ neuron_sum = -max_sum;
762
+
763
+ neuron_it->sum = neuron_sum;
764
+
765
+ fann_activation_switch(activation_function, neuron_sum, neuron_it->value);
766
+ #endif
767
+ }
768
+ }
769
+
770
+ /* set the output */
771
+ output = ann->output;
772
+ num_output = ann->num_output;
773
+ neurons = (ann->last_layer - 1)->first_neuron;
774
+ for(i = 0; i != num_output; i++)
775
+ {
776
+ output[i] = neurons[i].value;
777
+ }
778
+ return ann->output;
779
+ }
780
+
781
+ FANN_EXTERNAL void FANN_API fann_destroy(struct fann *ann)
782
+ {
783
+ if(ann == NULL)
784
+ return;
785
+ fann_safe_free(ann->weights);
786
+ fann_safe_free(ann->connections);
787
+ fann_safe_free(ann->first_layer->first_neuron);
788
+ fann_safe_free(ann->first_layer);
789
+ fann_safe_free(ann->output);
790
+ fann_safe_free(ann->train_errors);
791
+ fann_safe_free(ann->train_slopes);
792
+ fann_safe_free(ann->prev_train_slopes);
793
+ fann_safe_free(ann->prev_steps);
794
+ fann_safe_free(ann->prev_weights_deltas);
795
+ fann_safe_free(ann->errstr);
796
+ fann_safe_free(ann->cascade_activation_functions);
797
+ fann_safe_free(ann->cascade_activation_steepnesses);
798
+
799
+ #ifndef FIXEDFANN
800
+ fann_safe_free( ann->scale_mean_in );
801
+ fann_safe_free( ann->scale_deviation_in );
802
+ fann_safe_free( ann->scale_new_min_in );
803
+ fann_safe_free( ann->scale_factor_in );
804
+
805
+ fann_safe_free( ann->scale_mean_out );
806
+ fann_safe_free( ann->scale_deviation_out );
807
+ fann_safe_free( ann->scale_new_min_out );
808
+ fann_safe_free( ann->scale_factor_out );
809
+ #endif
810
+
811
+ fann_safe_free(ann);
812
+ }
813
+
814
+ FANN_EXTERNAL void FANN_API fann_randomize_weights(struct fann *ann, fann_type min_weight,
815
+ fann_type max_weight)
816
+ {
817
+ fann_type *last_weight;
818
+ fann_type *weights = ann->weights;
819
+
820
+ last_weight = weights + ann->total_connections;
821
+ for(; weights != last_weight; weights++)
822
+ {
823
+ *weights = (fann_type) (fann_rand(min_weight, max_weight));
824
+ }
825
+
826
+ #ifndef FIXEDFANN
827
+ if(ann->prev_train_slopes != NULL)
828
+ {
829
+ fann_clear_train_arrays(ann);
830
+ }
831
+ #endif
832
+ }
833
+
834
+ FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
835
+ {
836
+ struct fann_layer *layer_it;
837
+ struct fann_neuron *neuron_it;
838
+ unsigned int i;
839
+ int value;
840
+ char *neurons;
841
+ unsigned int num_neurons = fann_get_total_neurons(ann) - fann_get_num_output(ann);
842
+
843
+ neurons = (char *) malloc(num_neurons + 1);
844
+ if(neurons == NULL)
845
+ {
846
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
847
+ return;
848
+ }
849
+ neurons[num_neurons] = 0;
850
+
851
+ printf("Layer / Neuron ");
852
+ for(i = 0; i < num_neurons; i++)
853
+ {
854
+ printf("%d", i % 10);
855
+ }
856
+ printf("\n");
857
+
858
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
859
+ {
860
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
861
+ {
862
+
863
+ memset(neurons, (int) '.', num_neurons);
864
+ for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
865
+ {
866
+ if(ann->weights[i] < 0)
867
+ {
868
+ #ifdef FIXEDFANN
869
+ value = (int) ((ann->weights[i] / (double) ann->multiplier) - 0.5);
870
+ #else
871
+ value = (int) ((ann->weights[i]) - 0.5);
872
+ #endif
873
+ if(value < -25)
874
+ value = -25;
875
+ neurons[ann->connections[i] - ann->first_layer->first_neuron] = (char)('a' - value);
876
+ }
877
+ else
878
+ {
879
+ #ifdef FIXEDFANN
880
+ value = (int) ((ann->weights[i] / (double) ann->multiplier) + 0.5);
881
+ #else
882
+ value = (int) ((ann->weights[i]) + 0.5);
883
+ #endif
884
+ if(value > 25)
885
+ value = 25;
886
+ neurons[ann->connections[i] - ann->first_layer->first_neuron] = (char)('A' + value);
887
+ }
888
+ }
889
+ printf("L %3d / N %4d %s\n", layer_it - ann->first_layer,
890
+ neuron_it - ann->first_layer->first_neuron, neurons);
891
+ }
892
+ }
893
+
894
+ free(neurons);
895
+ }
896
+
897
+ /* Initialize the weights using Widrow + Nguyen's algorithm.
898
+ */
899
+ FANN_EXTERNAL void FANN_API fann_init_weights(struct fann *ann, struct fann_train_data *train_data)
900
+ {
901
+ fann_type smallest_inp, largest_inp;
902
+ unsigned int dat = 0, elem, num_connect, num_hidden_neurons;
903
+ struct fann_layer *layer_it;
904
+ struct fann_neuron *neuron_it, *last_neuron, *bias_neuron;
905
+
906
+ #ifdef FIXEDFANN
907
+ unsigned int multiplier = ann->multiplier;
908
+ #endif
909
+ float scale_factor;
910
+
911
+ for(smallest_inp = largest_inp = train_data->input[0][0]; dat < train_data->num_data; dat++)
912
+ {
913
+ for(elem = 0; elem < train_data->num_input; elem++)
914
+ {
915
+ if(train_data->input[dat][elem] < smallest_inp)
916
+ smallest_inp = train_data->input[dat][elem];
917
+ if(train_data->input[dat][elem] > largest_inp)
918
+ largest_inp = train_data->input[dat][elem];
919
+ }
920
+ }
921
+
922
+ num_hidden_neurons =
923
+ ann->total_neurons - (ann->num_input + ann->num_output +
924
+ (ann->last_layer - ann->first_layer));
925
+ scale_factor =
926
+ (float) (pow
927
+ ((double) (0.7f * (double) num_hidden_neurons),
928
+ (double) (1.0f / (double) ann->num_input)) / (double) (largest_inp -
929
+ smallest_inp));
930
+
931
+ #ifdef DEBUG
932
+ printf("Initializing weights with scale factor %f\n", scale_factor);
933
+ #endif
934
+ bias_neuron = ann->first_layer->last_neuron - 1;
935
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
936
+ {
937
+ last_neuron = layer_it->last_neuron;
938
+
939
+ if(ann->network_type == FANN_NETTYPE_LAYER)
940
+ {
941
+ bias_neuron = (layer_it - 1)->last_neuron - 1;
942
+ }
943
+
944
+ for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
945
+ {
946
+ for(num_connect = neuron_it->first_con; num_connect < neuron_it->last_con;
947
+ num_connect++)
948
+ {
949
+ if(bias_neuron == ann->connections[num_connect])
950
+ {
951
+ #ifdef FIXEDFANN
952
+ ann->weights[num_connect] =
953
+ (fann_type) fann_rand(-scale_factor, scale_factor * multiplier);
954
+ #else
955
+ ann->weights[num_connect] = (fann_type) fann_rand(-scale_factor, scale_factor);
956
+ #endif
957
+ }
958
+ else
959
+ {
960
+ #ifdef FIXEDFANN
961
+ ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor * multiplier);
962
+ #else
963
+ ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor);
964
+ #endif
965
+ }
966
+ }
967
+ }
968
+ }
969
+
970
+ #ifndef FIXEDFANN
971
+ if(ann->prev_train_slopes != NULL)
972
+ {
973
+ fann_clear_train_arrays(ann);
974
+ }
975
+ #endif
976
+ }
977
+
978
+ FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
979
+ {
980
+ struct fann_layer *layer_it;
981
+ #ifndef FIXEDFANN
982
+ unsigned int i;
983
+ #endif
984
+
985
+ printf("Input layer :%4d neurons, 1 bias\n", ann->num_input);
986
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer - 1; layer_it++)
987
+ {
988
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT)
989
+ {
990
+ printf(" Hidden layer :%4d neurons, 0 bias\n",
991
+ layer_it->last_neuron - layer_it->first_neuron);
992
+ }
993
+ else
994
+ {
995
+ printf(" Hidden layer :%4d neurons, 1 bias\n",
996
+ layer_it->last_neuron - layer_it->first_neuron - 1);
997
+ }
998
+ }
999
+ printf("Output layer :%4d neurons\n", ann->num_output);
1000
+ printf("Total neurons and biases :%4d\n", fann_get_total_neurons(ann));
1001
+ printf("Total connections :%4d\n", ann->total_connections);
1002
+ printf("Connection rate :%8.3f\n", ann->connection_rate);
1003
+ printf("Network type : %s\n", FANN_NETTYPE_NAMES[ann->network_type]);
1004
+ #ifdef FIXEDFANN
1005
+ printf("Decimal point :%4d\n", ann->decimal_point);
1006
+ printf("Multiplier :%4d\n", ann->multiplier);
1007
+ #else
1008
+ printf("Training algorithm : %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
1009
+ printf("Training error function : %s\n", FANN_ERRORFUNC_NAMES[ann->train_error_function]);
1010
+ printf("Training stop function : %s\n", FANN_STOPFUNC_NAMES[ann->train_stop_function]);
1011
+ #endif
1012
+ #ifdef FIXEDFANN
1013
+ printf("Bit fail limit :%4d\n", ann->bit_fail_limit);
1014
+ #else
1015
+ printf("Bit fail limit :%8.3f\n", ann->bit_fail_limit);
1016
+ printf("Learning rate :%8.3f\n", ann->learning_rate);
1017
+ printf("Learning momentum :%8.3f\n", ann->learning_momentum);
1018
+ printf("Quickprop decay :%11.6f\n", ann->quickprop_decay);
1019
+ printf("Quickprop mu :%8.3f\n", ann->quickprop_mu);
1020
+ printf("RPROP increase factor :%8.3f\n", ann->rprop_increase_factor);
1021
+ printf("RPROP decrease factor :%8.3f\n", ann->rprop_decrease_factor);
1022
+ printf("RPROP delta min :%8.3f\n", ann->rprop_delta_min);
1023
+ printf("RPROP delta max :%8.3f\n", ann->rprop_delta_max);
1024
+ printf("Cascade output change fraction :%11.6f\n", ann->cascade_output_change_fraction);
1025
+ printf("Cascade candidate change fraction :%11.6f\n", ann->cascade_candidate_change_fraction);
1026
+ printf("Cascade output stagnation epochs :%4d\n", ann->cascade_output_stagnation_epochs);
1027
+ printf("Cascade candidate stagnation epochs :%4d\n", ann->cascade_candidate_stagnation_epochs);
1028
+ printf("Cascade max output epochs :%4d\n", ann->cascade_max_out_epochs);
1029
+ printf("Cascade max candidate epochs :%4d\n", ann->cascade_max_cand_epochs);
1030
+ printf("Cascade weight multiplier :%8.3f\n", ann->cascade_weight_multiplier);
1031
+ printf("Cascade candidate limit :%8.3f\n", ann->cascade_candidate_limit);
1032
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
1033
+ printf("Cascade activation functions[%d] : %s\n", i,
1034
+ FANN_ACTIVATIONFUNC_NAMES[ann->cascade_activation_functions[i]]);
1035
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
1036
+ printf("Cascade activation steepnesses[%d] :%8.3f\n", i,
1037
+ ann->cascade_activation_steepnesses[i]);
1038
+
1039
+ printf("Cascade candidate groups :%4d\n", ann->cascade_num_candidate_groups);
1040
+ printf("Cascade no. of candidates :%4d\n", fann_get_cascade_num_candidates(ann));
1041
+
1042
+ /* TODO: dump scale parameters */
1043
+ #endif
1044
+ }
1045
+
1046
+ FANN_GET(unsigned int, num_input)
1047
+ FANN_GET(unsigned int, num_output)
1048
+
1049
+ FANN_EXTERNAL unsigned int FANN_API fann_get_total_neurons(struct fann *ann)
1050
+ {
1051
+ if(ann->network_type)
1052
+ {
1053
+ return ann->total_neurons;
1054
+ }
1055
+ else
1056
+ {
1057
+ /* -1, because there is always an unused bias neuron in the last layer */
1058
+ return ann->total_neurons - 1;
1059
+ }
1060
+ }
1061
+
1062
+ FANN_GET(unsigned int, total_connections)
1063
+
1064
+ FANN_EXTERNAL enum fann_nettype_enum FANN_API fann_get_network_type(struct fann *ann)
1065
+ {
1066
+ /* Currently two types: LAYER = 0, SHORTCUT = 1 */
1067
+ /* Enum network_types must be set to match the return values */
1068
+ return ann->network_type;
1069
+ }
1070
+
1071
+ FANN_EXTERNAL float FANN_API fann_get_connection_rate(struct fann *ann)
1072
+ {
1073
+ return ann->connection_rate;
1074
+ }
1075
+
1076
+ FANN_EXTERNAL unsigned int FANN_API fann_get_num_layers(struct fann *ann)
1077
+ {
1078
+ return ann->last_layer - ann->first_layer;
1079
+ }
1080
+
1081
+ FANN_EXTERNAL void FANN_API fann_get_layer_array(struct fann *ann, unsigned int *layers)
1082
+ {
1083
+ struct fann_layer *layer_it;
1084
+
1085
+ for (layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) {
1086
+ unsigned int count = layer_it->last_neuron - layer_it->first_neuron;
1087
+ /* Remove the bias from the count of neurons. */
1088
+ switch (fann_get_network_type(ann)) {
1089
+ case FANN_NETTYPE_LAYER: {
1090
+ --count;
1091
+ break;
1092
+ }
1093
+ case FANN_NETTYPE_SHORTCUT: {
1094
+ /* The bias in the first layer is reused for all layers */
1095
+ if (layer_it == ann->first_layer)
1096
+ --count;
1097
+ break;
1098
+ }
1099
+ default: {
1100
+ /* Unknown network type, assume no bias present */
1101
+ break;
1102
+ }
1103
+ }
1104
+ *layers++ = count;
1105
+ }
1106
+ }
1107
+
1108
+ FANN_EXTERNAL void FANN_API fann_get_bias_array(struct fann *ann, unsigned int *bias)
1109
+ {
1110
+ struct fann_layer *layer_it;
1111
+
1112
+ for (layer_it = ann->first_layer; layer_it != ann->last_layer; ++layer_it, ++bias) {
1113
+ switch (fann_get_network_type(ann)) {
1114
+ case FANN_NETTYPE_LAYER: {
1115
+ /* Report one bias in each layer except the last */
1116
+ if (layer_it != ann->last_layer-1)
1117
+ *bias = 1;
1118
+ else
1119
+ *bias = 0;
1120
+ break;
1121
+ }
1122
+ case FANN_NETTYPE_SHORTCUT: {
1123
+ /* The bias in the first layer is reused for all layers */
1124
+ if (layer_it == ann->first_layer)
1125
+ *bias = 1;
1126
+ else
1127
+ *bias = 0;
1128
+ break;
1129
+ }
1130
+ default: {
1131
+ /* Unknown network type, assume no bias present */
1132
+ *bias = 0;
1133
+ break;
1134
+ }
1135
+ }
1136
+ }
1137
+ }
1138
+
1139
+ FANN_EXTERNAL void FANN_API fann_get_connection_array(struct fann *ann, struct fann_connection *connections)
1140
+ {
1141
+ struct fann_neuron *first_neuron;
1142
+ struct fann_layer *layer_it;
1143
+ struct fann_neuron *neuron_it;
1144
+ unsigned int index;
1145
+ unsigned int source_index;
1146
+ unsigned int destination_index;
1147
+
1148
+ first_neuron = ann->first_layer->first_neuron;
1149
+
1150
+ source_index = 0;
1151
+ destination_index = 0;
1152
+
1153
+ /* The following assumes that the last unused bias has no connections */
1154
+
1155
+ /* for each layer */
1156
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++){
1157
+ /* for each neuron */
1158
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++){
1159
+ /* for each connection */
1160
+ for (index = neuron_it->first_con; index < neuron_it->last_con; index++){
1161
+ /* Assign the source, destination and weight */
1162
+ connections->from_neuron = ann->connections[source_index] - first_neuron;
1163
+ connections->to_neuron = destination_index;
1164
+ connections->weight = ann->weights[source_index];
1165
+
1166
+ connections++;
1167
+ source_index++;
1168
+ }
1169
+ destination_index++;
1170
+ }
1171
+ }
1172
+ }
1173
+
1174
+ FANN_EXTERNAL void FANN_API fann_set_weight_array(struct fann *ann,
1175
+ struct fann_connection *connections, unsigned int num_connections)
1176
+ {
1177
+ unsigned int index;
1178
+
1179
+ for (index = 0; index < num_connections; index++) {
1180
+ fann_set_weight(ann, connections[index].from_neuron,
1181
+ connections[index].to_neuron, connections[index].weight);
1182
+ }
1183
+ }
1184
+
1185
+ FANN_EXTERNAL void FANN_API fann_set_weight(struct fann *ann,
1186
+ unsigned int from_neuron, unsigned int to_neuron, fann_type weight)
1187
+ {
1188
+ struct fann_neuron *first_neuron;
1189
+ struct fann_layer *layer_it;
1190
+ struct fann_neuron *neuron_it;
1191
+ unsigned int index;
1192
+ unsigned int source_index;
1193
+ unsigned int destination_index;
1194
+
1195
+ first_neuron = ann->first_layer->first_neuron;
1196
+
1197
+ source_index = 0;
1198
+ destination_index = 0;
1199
+
1200
+ /* Find the connection, simple brute force search through the network
1201
+ for one or more connections that match to minimize datastructure dependencies.
1202
+ Nothing is done if the connection does not already exist in the network. */
1203
+
1204
+ /* for each layer */
1205
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++){
1206
+ /* for each neuron */
1207
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++){
1208
+ /* for each connection */
1209
+ for (index = neuron_it->first_con; index < neuron_it->last_con; index++){
1210
+ /* If the source and destination neurons match, assign the weight */
1211
+ if (((int)from_neuron == ann->connections[source_index] - first_neuron) &&
1212
+ (to_neuron == destination_index))
1213
+ {
1214
+ ann->weights[source_index] = weight;
1215
+ }
1216
+ source_index++;
1217
+ }
1218
+ destination_index++;
1219
+ }
1220
+ }
1221
+ }
1222
+
1223
+ FANN_GET_SET(void *, user_data)
1224
+
1225
+ #ifdef FIXEDFANN
1226
+
1227
+ FANN_GET(unsigned int, decimal_point)
1228
+ FANN_GET(unsigned int, multiplier)
1229
+
1230
+ /* INTERNAL FUNCTION
1231
+ Adjust the steepwise functions (if used)
1232
+ */
1233
+ void fann_update_stepwise(struct fann *ann)
1234
+ {
1235
+ unsigned int i = 0;
1236
+
1237
+ /* Calculate the parameters for the stepwise linear
1238
+ * sigmoid function fixed point.
1239
+ * Using a rewritten sigmoid function.
1240
+ * results 0.005, 0.05, 0.25, 0.75, 0.95, 0.995
1241
+ */
1242
+ ann->sigmoid_results[0] = fann_max((fann_type) (ann->multiplier / 200.0 + 0.5), 1);
1243
+ ann->sigmoid_results[1] = fann_max((fann_type) (ann->multiplier / 20.0 + 0.5), 1);
1244
+ ann->sigmoid_results[2] = fann_max((fann_type) (ann->multiplier / 4.0 + 0.5), 1);
1245
+ ann->sigmoid_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 4.0 + 0.5), ann->multiplier - 1);
1246
+ ann->sigmoid_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 20.0 + 0.5), ann->multiplier - 1);
1247
+ ann->sigmoid_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 200.0 + 0.5), ann->multiplier - 1);
1248
+
1249
+ ann->sigmoid_symmetric_results[0] = fann_max((fann_type) ((ann->multiplier / 100.0) - ann->multiplier - 0.5),
1250
+ (fann_type) (1 - (fann_type) ann->multiplier));
1251
+ ann->sigmoid_symmetric_results[1] = fann_max((fann_type) ((ann->multiplier / 10.0) - ann->multiplier - 0.5),
1252
+ (fann_type) (1 - (fann_type) ann->multiplier));
1253
+ ann->sigmoid_symmetric_results[2] = fann_max((fann_type) ((ann->multiplier / 2.0) - ann->multiplier - 0.5),
1254
+ (fann_type) (1 - (fann_type) ann->multiplier));
1255
+ ann->sigmoid_symmetric_results[3] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 2.0 + 0.5),
1256
+ ann->multiplier - 1);
1257
+ ann->sigmoid_symmetric_results[4] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 10.0 + 0.5),
1258
+ ann->multiplier - 1);
1259
+ ann->sigmoid_symmetric_results[5] = fann_min(ann->multiplier - (fann_type) (ann->multiplier / 100.0 + 1.0),
1260
+ ann->multiplier - 1);
1261
+
1262
+ for(i = 0; i < 6; i++)
1263
+ {
1264
+ ann->sigmoid_values[i] =
1265
+ (fann_type) (((log(ann->multiplier / (float) ann->sigmoid_results[i] - 1) *
1266
+ (float) ann->multiplier) / -2.0) * (float) ann->multiplier);
1267
+ ann->sigmoid_symmetric_values[i] =
1268
+ (fann_type) (((log
1269
+ ((ann->multiplier -
1270
+ (float) ann->sigmoid_symmetric_results[i]) /
1271
+ ((float) ann->sigmoid_symmetric_results[i] +
1272
+ ann->multiplier)) * (float) ann->multiplier) / -2.0) *
1273
+ (float) ann->multiplier);
1274
+ }
1275
+ }
1276
+ #endif
1277
+
1278
+
1279
+ /* INTERNAL FUNCTION
1280
+ Allocates the main structure and sets some default values.
1281
+ */
1282
+ struct fann *fann_allocate_structure(unsigned int num_layers)
1283
+ {
1284
+ struct fann *ann;
1285
+
1286
+ if(num_layers < 2)
1287
+ {
1288
+ #ifdef DEBUG
1289
+ printf("less than 2 layers - ABORTING.\n");
1290
+ #endif
1291
+ return NULL;
1292
+ }
1293
+
1294
+ /* allocate and initialize the main network structure */
1295
+ ann = (struct fann *) malloc(sizeof(struct fann));
1296
+ if(ann == NULL)
1297
+ {
1298
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1299
+ return NULL;
1300
+ }
1301
+
1302
+ ann->errno_f = FANN_E_NO_ERROR;
1303
+ ann->error_log = fann_default_error_log;
1304
+ ann->errstr = NULL;
1305
+ ann->learning_rate = 0.7f;
1306
+ ann->learning_momentum = 0.0;
1307
+ ann->total_neurons = 0;
1308
+ ann->total_connections = 0;
1309
+ ann->num_input = 0;
1310
+ ann->num_output = 0;
1311
+ ann->train_errors = NULL;
1312
+ ann->train_slopes = NULL;
1313
+ ann->prev_steps = NULL;
1314
+ ann->prev_train_slopes = NULL;
1315
+ ann->prev_weights_deltas = NULL;
1316
+ ann->training_algorithm = FANN_TRAIN_RPROP;
1317
+ ann->num_MSE = 0;
1318
+ ann->MSE_value = 0;
1319
+ ann->num_bit_fail = 0;
1320
+ ann->bit_fail_limit = (fann_type)0.35;
1321
+ ann->network_type = FANN_NETTYPE_LAYER;
1322
+ ann->train_error_function = FANN_ERRORFUNC_TANH;
1323
+ ann->train_stop_function = FANN_STOPFUNC_MSE;
1324
+ ann->callback = NULL;
1325
+ ann->user_data = NULL; /* User is responsible for deallocation */
1326
+ ann->weights = NULL;
1327
+ ann->connections = NULL;
1328
+ ann->output = NULL;
1329
+ #ifndef FIXEDFANN
1330
+ ann->scale_mean_in = NULL;
1331
+ ann->scale_deviation_in = NULL;
1332
+ ann->scale_new_min_in = NULL;
1333
+ ann->scale_factor_in = NULL;
1334
+ ann->scale_mean_out = NULL;
1335
+ ann->scale_deviation_out = NULL;
1336
+ ann->scale_new_min_out = NULL;
1337
+ ann->scale_factor_out = NULL;
1338
+ #endif
1339
+
1340
+ /* variables used for cascade correlation (reasonable defaults) */
1341
+ ann->cascade_output_change_fraction = 0.01f;
1342
+ ann->cascade_candidate_change_fraction = 0.01f;
1343
+ ann->cascade_output_stagnation_epochs = 12;
1344
+ ann->cascade_candidate_stagnation_epochs = 12;
1345
+ ann->cascade_num_candidate_groups = 2;
1346
+ ann->cascade_weight_multiplier = (fann_type)0.4;
1347
+ ann->cascade_candidate_limit = (fann_type)1000.0;
1348
+ ann->cascade_max_out_epochs = 150;
1349
+ ann->cascade_max_cand_epochs = 150;
1350
+ ann->cascade_candidate_scores = NULL;
1351
+ ann->cascade_activation_functions_count = 10;
1352
+ ann->cascade_activation_functions =
1353
+ (enum fann_activationfunc_enum *)calloc(ann->cascade_activation_functions_count,
1354
+ sizeof(enum fann_activationfunc_enum));
1355
+ if(ann->cascade_activation_functions == NULL)
1356
+ {
1357
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1358
+ free(ann);
1359
+ return NULL;
1360
+ }
1361
+
1362
+ ann->cascade_activation_functions[0] = FANN_SIGMOID;
1363
+ ann->cascade_activation_functions[1] = FANN_SIGMOID_SYMMETRIC;
1364
+ ann->cascade_activation_functions[2] = FANN_GAUSSIAN;
1365
+ ann->cascade_activation_functions[3] = FANN_GAUSSIAN_SYMMETRIC;
1366
+ ann->cascade_activation_functions[4] = FANN_ELLIOT;
1367
+ ann->cascade_activation_functions[5] = FANN_ELLIOT_SYMMETRIC;
1368
+ ann->cascade_activation_functions[6] = FANN_SIN_SYMMETRIC;
1369
+ ann->cascade_activation_functions[7] = FANN_COS_SYMMETRIC;
1370
+ ann->cascade_activation_functions[8] = FANN_SIN;
1371
+ ann->cascade_activation_functions[9] = FANN_COS;
1372
+
1373
+ ann->cascade_activation_steepnesses_count = 4;
1374
+ ann->cascade_activation_steepnesses =
1375
+ (fann_type *)calloc(ann->cascade_activation_steepnesses_count,
1376
+ sizeof(fann_type));
1377
+ if(ann->cascade_activation_steepnesses == NULL)
1378
+ {
1379
+ fann_safe_free(ann->cascade_activation_functions);
1380
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1381
+ free(ann);
1382
+ return NULL;
1383
+ }
1384
+
1385
+ ann->cascade_activation_steepnesses[0] = (fann_type)0.25;
1386
+ ann->cascade_activation_steepnesses[1] = (fann_type)0.5;
1387
+ ann->cascade_activation_steepnesses[2] = (fann_type)0.75;
1388
+ ann->cascade_activation_steepnesses[3] = (fann_type)1.0;
1389
+
1390
+ /* Variables for use with with Quickprop training (reasonable defaults) */
1391
+ ann->quickprop_decay = (float) -0.0001;
1392
+ ann->quickprop_mu = 1.75;
1393
+
1394
+ /* Variables for use with with RPROP training (reasonable defaults) */
1395
+ ann->rprop_increase_factor = (float) 1.2;
1396
+ ann->rprop_decrease_factor = 0.5;
1397
+ ann->rprop_delta_min = 0.0;
1398
+ ann->rprop_delta_max = 50.0;
1399
+ ann->rprop_delta_zero = 0.1;
1400
+
1401
+ fann_init_error_data((struct fann_error *) ann);
1402
+
1403
+ #ifdef FIXEDFANN
1404
+ /* these values are only boring defaults, and should really
1405
+ * never be used, since the real values are always loaded from a file. */
1406
+ ann->decimal_point = 8;
1407
+ ann->multiplier = 256;
1408
+ #endif
1409
+
1410
+ /* allocate room for the layers */
1411
+ ann->first_layer = (struct fann_layer *) calloc(num_layers, sizeof(struct fann_layer));
1412
+ if(ann->first_layer == NULL)
1413
+ {
1414
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
1415
+ free(ann);
1416
+ return NULL;
1417
+ }
1418
+
1419
+ ann->last_layer = ann->first_layer + num_layers;
1420
+
1421
+ return ann;
1422
+ }
1423
+
1424
+ /* INTERNAL FUNCTION
1425
+ Allocates room for the scaling parameters.
1426
+ */
1427
+ int fann_allocate_scale(struct fann *ann)
1428
+ {
1429
+ /* todo this should only be allocated when needed */
1430
+ #ifndef FIXEDFANN
1431
+ unsigned int i = 0;
1432
+ #define SCALE_ALLOCATE( what, where, default_value ) \
1433
+ ann->what##_##where = (float *)calloc( \
1434
+ ann->num_##where##put, \
1435
+ sizeof( float ) \
1436
+ ); \
1437
+ if( ann->what##_##where == NULL ) \
1438
+ { \
1439
+ fann_error( NULL, FANN_E_CANT_ALLOCATE_MEM ); \
1440
+ fann_destroy( ann ); \
1441
+ return 1; \
1442
+ } \
1443
+ for( i = 0; i < ann->num_##where##put; i++ ) \
1444
+ ann->what##_##where[ i ] = ( default_value );
1445
+
1446
+ SCALE_ALLOCATE( scale_mean, in, 0.0 )
1447
+ SCALE_ALLOCATE( scale_deviation, in, 1.0 )
1448
+ SCALE_ALLOCATE( scale_new_min, in, -1.0 )
1449
+ SCALE_ALLOCATE( scale_factor, in, 1.0 )
1450
+
1451
+ SCALE_ALLOCATE( scale_mean, out, 0.0 )
1452
+ SCALE_ALLOCATE( scale_deviation, out, 1.0 )
1453
+ SCALE_ALLOCATE( scale_new_min, out, -1.0 )
1454
+ SCALE_ALLOCATE( scale_factor, out, 1.0 )
1455
+ #undef SCALE_ALLOCATE
1456
+ #endif
1457
+ return 0;
1458
+ }
1459
+
1460
+ /* INTERNAL FUNCTION
1461
+ Allocates room for the neurons.
1462
+ */
1463
+ void fann_allocate_neurons(struct fann *ann)
1464
+ {
1465
+ struct fann_layer *layer_it;
1466
+ struct fann_neuron *neurons;
1467
+ unsigned int num_neurons_so_far = 0;
1468
+ unsigned int num_neurons = 0;
1469
+
1470
+ /* all the neurons is allocated in one long array (calloc clears mem) */
1471
+ neurons = (struct fann_neuron *) calloc(ann->total_neurons, sizeof(struct fann_neuron));
1472
+ ann->total_neurons_allocated = ann->total_neurons;
1473
+
1474
+ if(neurons == NULL)
1475
+ {
1476
+ fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1477
+ return;
1478
+ }
1479
+
1480
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
1481
+ {
1482
+ num_neurons = layer_it->last_neuron - layer_it->first_neuron;
1483
+ layer_it->first_neuron = neurons + num_neurons_so_far;
1484
+ layer_it->last_neuron = layer_it->first_neuron + num_neurons;
1485
+ num_neurons_so_far += num_neurons;
1486
+ }
1487
+
1488
+ ann->output = (fann_type *) calloc(num_neurons, sizeof(fann_type));
1489
+ if(ann->output == NULL)
1490
+ {
1491
+ fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1492
+ return;
1493
+ }
1494
+ }
1495
+
1496
+ /* INTERNAL FUNCTION
1497
+ Allocate room for the connections.
1498
+ */
1499
+ void fann_allocate_connections(struct fann *ann)
1500
+ {
1501
+ ann->weights = (fann_type *) calloc(ann->total_connections, sizeof(fann_type));
1502
+ if(ann->weights == NULL)
1503
+ {
1504
+ fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1505
+ return;
1506
+ }
1507
+ ann->total_connections_allocated = ann->total_connections;
1508
+
1509
+ /* TODO make special cases for all places where the connections
1510
+ * is used, so that it is not needed for fully connected networks.
1511
+ */
1512
+ ann->connections =
1513
+ (struct fann_neuron **) calloc(ann->total_connections_allocated,
1514
+ sizeof(struct fann_neuron *));
1515
+ if(ann->connections == NULL)
1516
+ {
1517
+ fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
1518
+ return;
1519
+ }
1520
+ }
1521
+
1522
+
1523
+ /* INTERNAL FUNCTION
1524
+ Seed the random function.
1525
+ */
1526
+ void fann_seed_rand()
1527
+ {
1528
+ #ifndef _WIN32
1529
+ FILE *fp = fopen("/dev/urandom", "r");
1530
+ unsigned int foo;
1531
+ struct timeval t;
1532
+
1533
+ if(!fp)
1534
+ {
1535
+ gettimeofday(&t, NULL);
1536
+ foo = t.tv_usec;
1537
+ #ifdef DEBUG
1538
+ printf("unable to open /dev/urandom\n");
1539
+ #endif
1540
+ }
1541
+ else
1542
+ {
1543
+ fread(&foo, sizeof(foo), 1, fp);
1544
+ fclose(fp);
1545
+ }
1546
+ srand(foo);
1547
+ #else
1548
+ /* COMPAT_TIME REPLACEMENT */
1549
+ srand(GetTickCount());
1550
+ #endif
1551
+ }
1552
+