ruby-fann 0.7.10 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. data/History.txt +6 -1
  2. data/License.txt +1 -1
  3. data/Manifest.txt +22 -1
  4. data/README.txt +0 -1
  5. data/Rakefile +0 -0
  6. data/config/hoe.rb +0 -0
  7. data/config/requirements.rb +0 -0
  8. data/ext/ruby_fann/MANIFEST +0 -0
  9. data/ext/ruby_fann/Makefile +36 -28
  10. data/ext/ruby_fann/doublefann.c +30 -0
  11. data/ext/ruby_fann/doublefann.h +33 -0
  12. data/ext/ruby_fann/extconf.rb +9 -5
  13. data/ext/ruby_fann/fann.c +1552 -0
  14. data/ext/ruby_fann/fann_activation.h +144 -0
  15. data/ext/ruby_fann/fann_augment.h +0 -0
  16. data/ext/ruby_fann/fann_cascade.c +1031 -0
  17. data/ext/ruby_fann/fann_cascade.h +503 -0
  18. data/ext/ruby_fann/fann_data.h +799 -0
  19. data/ext/ruby_fann/fann_error.c +204 -0
  20. data/ext/ruby_fann/fann_error.h +161 -0
  21. data/ext/ruby_fann/fann_internal.h +148 -0
  22. data/ext/ruby_fann/fann_io.c +762 -0
  23. data/ext/ruby_fann/fann_io.h +100 -0
  24. data/ext/ruby_fann/fann_train.c +962 -0
  25. data/ext/ruby_fann/fann_train.h +1203 -0
  26. data/ext/ruby_fann/fann_train_data.c +1231 -0
  27. data/ext/ruby_fann/neural_network.c +0 -0
  28. data/lib/ruby_fann/neurotica.rb +0 -0
  29. data/lib/ruby_fann/version.rb +3 -3
  30. data/lib/ruby_fann.rb +0 -0
  31. data/neurotica1.png +0 -0
  32. data/neurotica2.vrml +18 -18
  33. data/setup.rb +0 -0
  34. data/tasks/deployment.rake +0 -0
  35. data/tasks/environment.rake +0 -0
  36. data/tasks/website.rake +0 -0
  37. data/test/test.train +0 -0
  38. data/test/test_helper.rb +0 -0
  39. data/test/test_neurotica.rb +0 -0
  40. data/test/test_ruby_fann.rb +0 -0
  41. data/test/test_ruby_fann_functional.rb +0 -0
  42. data/verify.train +0 -0
  43. data/website/index.html +42 -92
  44. data/website/index.txt +0 -0
  45. data/website/javascripts/rounded_corners_lite.inc.js +0 -0
  46. data/website/stylesheets/screen.css +0 -0
  47. data/website/template.rhtml +0 -0
  48. data/xor.train +0 -0
  49. data/xor_cascade.net +2 -2
  50. data/xor_float.net +1 -1
  51. metadata +22 -6
  52. data/log/debug.log +0 -0
@@ -0,0 +1,762 @@
1
+ /*
2
+ Fast Artificial Neural Network Library (fann)
3
+ Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
4
+
5
+ This library is free software; you can redistribute it and/or
6
+ modify it under the terms of the GNU Lesser General Public
7
+ License as published by the Free Software Foundation; either
8
+ version 2.1 of the License, or (at your option) any later version.
9
+
10
+ This library is distributed in the hope that it will be useful,
11
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
+ Lesser General Public License for more details.
14
+
15
+ You should have received a copy of the GNU Lesser General Public
16
+ License along with this library; if not, write to the Free Software
17
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
+ */
19
+
20
+ #include <stdio.h>
21
+ #include <stdlib.h>
22
+ #include <stdarg.h>
23
+ #include <string.h>
24
+
25
+ #include "config.h"
26
+ #include "fann.h"
27
+
28
+ /* Create a network from a configuration file.
29
+ */
30
+ FANN_EXTERNAL struct fann *FANN_API fann_create_from_file(const char *configuration_file)
31
+ {
32
+ struct fann *ann;
33
+ FILE *conf = fopen(configuration_file, "r");
34
+
35
+ if(!conf)
36
+ {
37
+ fann_error(NULL, FANN_E_CANT_OPEN_CONFIG_R, configuration_file);
38
+ return NULL;
39
+ }
40
+ ann = fann_create_from_fd(conf, configuration_file);
41
+ fclose(conf);
42
+ return ann;
43
+ }
44
+
45
+ /* Save the network.
46
+ */
47
+ FANN_EXTERNAL int FANN_API fann_save(struct fann *ann, const char *configuration_file)
48
+ {
49
+ return fann_save_internal(ann, configuration_file, 0);
50
+ }
51
+
52
+ /* Save the network as fixed point data.
53
+ */
54
+ FANN_EXTERNAL int FANN_API fann_save_to_fixed(struct fann *ann, const char *configuration_file)
55
+ {
56
+ return fann_save_internal(ann, configuration_file, 1);
57
+ }
58
+
59
+ /* INTERNAL FUNCTION
60
+ Used to save the network to a file.
61
+ */
62
+ int fann_save_internal(struct fann *ann, const char *configuration_file, unsigned int save_as_fixed)
63
+ {
64
+ int retval;
65
+ FILE *conf = fopen(configuration_file, "w+");
66
+
67
+ if(!conf)
68
+ {
69
+ fann_error((struct fann_error *) ann, FANN_E_CANT_OPEN_CONFIG_W, configuration_file);
70
+ return -1;
71
+ }
72
+ retval = fann_save_internal_fd(ann, conf, configuration_file, save_as_fixed);
73
+ fclose(conf);
74
+ return retval;
75
+ }
76
+
77
+ /* INTERNAL FUNCTION
78
+ Used to save the network to a file descriptor.
79
+ */
80
+ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configuration_file,
81
+ unsigned int save_as_fixed)
82
+ {
83
+ struct fann_layer *layer_it;
84
+ int calculated_decimal_point = 0;
85
+ struct fann_neuron *neuron_it, *first_neuron;
86
+ fann_type *weights;
87
+ struct fann_neuron **connected_neurons;
88
+ unsigned int i = 0;
89
+
90
+ #ifndef FIXEDFANN
91
+ /* variabels for use when saving floats as fixed point variabels */
92
+ unsigned int decimal_point = 0;
93
+ unsigned int fixed_multiplier = 0;
94
+ fann_type max_possible_value = 0;
95
+ unsigned int bits_used_for_max = 0;
96
+ fann_type current_max_value = 0;
97
+ #endif
98
+
99
+ #ifndef FIXEDFANN
100
+ if(save_as_fixed)
101
+ {
102
+ /* save the version information */
103
+ fprintf(conf, FANN_FIX_VERSION "\n");
104
+ }
105
+ else
106
+ {
107
+ /* save the version information */
108
+ fprintf(conf, FANN_FLO_VERSION "\n");
109
+ }
110
+ #else
111
+ /* save the version information */
112
+ fprintf(conf, FANN_FIX_VERSION "\n");
113
+ #endif
114
+
115
+ #ifndef FIXEDFANN
116
+ if(save_as_fixed)
117
+ {
118
+ /* calculate the maximal possible shift value */
119
+
120
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
121
+ {
122
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
123
+ {
124
+ /* look at all connections to each neurons, and see how high a value we can get */
125
+ current_max_value = 0;
126
+ for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
127
+ {
128
+ current_max_value += fann_abs(ann->weights[i]);
129
+ }
130
+
131
+ if(current_max_value > max_possible_value)
132
+ {
133
+ max_possible_value = current_max_value;
134
+ }
135
+ }
136
+ }
137
+
138
+ for(bits_used_for_max = 0; max_possible_value >= 1; bits_used_for_max++)
139
+ {
140
+ max_possible_value /= 2.0;
141
+ }
142
+
143
+ /* The maximum number of bits we shift the fix point, is the number
144
+ * of bits in a integer, minus one for the sign, one for the minus
145
+ * in stepwise, and minus the bits used for the maximum.
146
+ * This is devided by two, to allow multiplication of two fixed
147
+ * point numbers.
148
+ */
149
+ calculated_decimal_point = (sizeof(int) * 8 - 2 - bits_used_for_max) / 2;
150
+
151
+ if(calculated_decimal_point < 0)
152
+ {
153
+ decimal_point = 0;
154
+ }
155
+ else
156
+ {
157
+ decimal_point = calculated_decimal_point;
158
+ }
159
+
160
+ fixed_multiplier = 1 << decimal_point;
161
+
162
+ #ifdef DEBUG
163
+ printf("calculated_decimal_point=%d, decimal_point=%u, bits_used_for_max=%u\n",
164
+ calculated_decimal_point, decimal_point, bits_used_for_max);
165
+ #endif
166
+
167
+ /* save the decimal_point on a seperate line */
168
+ fprintf(conf, "decimal_point=%u\n", decimal_point);
169
+ }
170
+ #else
171
+ /* save the decimal_point on a seperate line */
172
+ fprintf(conf, "decimal_point=%u\n", ann->decimal_point);
173
+
174
+ #endif
175
+
176
+ /* Save network parameters */
177
+ fprintf(conf, "num_layers=%u\n", ann->last_layer - ann->first_layer);
178
+ fprintf(conf, "learning_rate=%f\n", ann->learning_rate);
179
+ fprintf(conf, "connection_rate=%f\n", ann->connection_rate);
180
+ fprintf(conf, "network_type=%u\n", ann->network_type);
181
+
182
+ fprintf(conf, "learning_momentum=%f\n", ann->learning_momentum);
183
+ fprintf(conf, "training_algorithm=%u\n", ann->training_algorithm);
184
+ fprintf(conf, "train_error_function=%u\n", ann->train_error_function);
185
+ fprintf(conf, "train_stop_function=%u\n", ann->train_stop_function);
186
+ fprintf(conf, "cascade_output_change_fraction=%f\n", ann->cascade_output_change_fraction);
187
+ fprintf(conf, "quickprop_decay=%f\n", ann->quickprop_decay);
188
+ fprintf(conf, "quickprop_mu=%f\n", ann->quickprop_mu);
189
+ fprintf(conf, "rprop_increase_factor=%f\n", ann->rprop_increase_factor);
190
+ fprintf(conf, "rprop_decrease_factor=%f\n", ann->rprop_decrease_factor);
191
+ fprintf(conf, "rprop_delta_min=%f\n", ann->rprop_delta_min);
192
+ fprintf(conf, "rprop_delta_max=%f\n", ann->rprop_delta_max);
193
+ fprintf(conf, "rprop_delta_zero=%f\n", ann->rprop_delta_zero);
194
+ fprintf(conf, "cascade_output_stagnation_epochs=%u\n", ann->cascade_output_stagnation_epochs);
195
+ fprintf(conf, "cascade_candidate_change_fraction=%f\n", ann->cascade_candidate_change_fraction);
196
+ fprintf(conf, "cascade_candidate_stagnation_epochs=%u\n", ann->cascade_candidate_stagnation_epochs);
197
+ fprintf(conf, "cascade_max_out_epochs=%u\n", ann->cascade_max_out_epochs);
198
+ fprintf(conf, "cascade_max_cand_epochs=%u\n", ann->cascade_max_cand_epochs);
199
+ fprintf(conf, "cascade_num_candidate_groups=%u\n", ann->cascade_num_candidate_groups);
200
+
201
+ #ifndef FIXEDFANN
202
+ if(save_as_fixed)
203
+ {
204
+ fprintf(conf, "bit_fail_limit=%u\n", (int) floor((ann->bit_fail_limit * fixed_multiplier) + 0.5));
205
+ fprintf(conf, "cascade_candidate_limit=%u\n", (int) floor((ann->cascade_candidate_limit * fixed_multiplier) + 0.5));
206
+ fprintf(conf, "cascade_weight_multiplier=%u\n", (int) floor((ann->cascade_weight_multiplier * fixed_multiplier) + 0.5));
207
+ }
208
+ else
209
+ #endif
210
+ {
211
+ fprintf(conf, "bit_fail_limit="FANNPRINTF"\n", ann->bit_fail_limit);
212
+ fprintf(conf, "cascade_candidate_limit="FANNPRINTF"\n", ann->cascade_candidate_limit);
213
+ fprintf(conf, "cascade_weight_multiplier="FANNPRINTF"\n", ann->cascade_weight_multiplier);
214
+ }
215
+
216
+ fprintf(conf, "cascade_activation_functions_count=%u\n", ann->cascade_activation_functions_count);
217
+ fprintf(conf, "cascade_activation_functions=");
218
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
219
+ fprintf(conf, "%u ", ann->cascade_activation_functions[i]);
220
+ fprintf(conf, "\n");
221
+
222
+ fprintf(conf, "cascade_activation_steepnesses_count=%u\n", ann->cascade_activation_steepnesses_count);
223
+ fprintf(conf, "cascade_activation_steepnesses=");
224
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
225
+ {
226
+ #ifndef FIXEDFANN
227
+ if(save_as_fixed)
228
+ fprintf(conf, "%u ", (int) floor((ann->cascade_activation_steepnesses[i] * fixed_multiplier) + 0.5));
229
+ else
230
+ #endif
231
+ fprintf(conf, FANNPRINTF" ", ann->cascade_activation_steepnesses[i]);
232
+ }
233
+ fprintf(conf, "\n");
234
+
235
+ fprintf(conf, "layer_sizes=");
236
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
237
+ {
238
+ /* the number of neurons in the layers (in the last layer, there is always one too many neurons, because of an unused bias) */
239
+ fprintf(conf, "%u ", layer_it->last_neuron - layer_it->first_neuron);
240
+ }
241
+ fprintf(conf, "\n");
242
+
243
+ #ifndef FIXEDFANN
244
+ /* 2.1 */
245
+ #define SCALE_SAVE( what, where ) \
246
+ fprintf( conf, #what "_" #where "=" ); \
247
+ for( i = 0; i < ann->num_##where##put; i++ ) \
248
+ fprintf( conf, "%f ", ann->what##_##where[ i ] ); \
249
+ fprintf( conf, "\n" );
250
+
251
+ if(!save_as_fixed)
252
+ {
253
+ if(ann->scale_mean_in != NULL)
254
+ {
255
+ fprintf(conf, "scale_included=1\n");
256
+ SCALE_SAVE( scale_mean, in )
257
+ SCALE_SAVE( scale_deviation, in )
258
+ SCALE_SAVE( scale_new_min, in )
259
+ SCALE_SAVE( scale_factor, in )
260
+
261
+ SCALE_SAVE( scale_mean, out )
262
+ SCALE_SAVE( scale_deviation, out )
263
+ SCALE_SAVE( scale_new_min, out )
264
+ SCALE_SAVE( scale_factor, out )
265
+ }
266
+ else
267
+ fprintf(conf, "scale_included=0\n");
268
+ }
269
+ #undef SCALE_SAVE
270
+ #endif
271
+
272
+ /* 2.0 */
273
+ fprintf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
274
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
275
+ {
276
+ /* the neurons */
277
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
278
+ {
279
+ #ifndef FIXEDFANN
280
+ if(save_as_fixed)
281
+ {
282
+ fprintf(conf, "(%u, %u, %u) ", neuron_it->last_con - neuron_it->first_con,
283
+ neuron_it->activation_function,
284
+ (int) floor((neuron_it->activation_steepness * fixed_multiplier) + 0.5));
285
+ }
286
+ else
287
+ {
288
+ fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
289
+ neuron_it->activation_function, neuron_it->activation_steepness);
290
+ }
291
+ #else
292
+ fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
293
+ neuron_it->activation_function, neuron_it->activation_steepness);
294
+ #endif
295
+ }
296
+ }
297
+ fprintf(conf, "\n");
298
+
299
+ connected_neurons = ann->connections;
300
+ weights = ann->weights;
301
+ first_neuron = ann->first_layer->first_neuron;
302
+
303
+ /* Now save all the connections.
304
+ * We only need to save the source and the weight,
305
+ * since the destination is given by the order.
306
+ *
307
+ * The weight is not saved binary due to differences
308
+ * in binary definition of floating point numbers.
309
+ * Especially an iPAQ does not use the same binary
310
+ * representation as an i386 machine.
311
+ */
312
+ fprintf(conf, "connections (connected_to_neuron, weight)=");
313
+ for(i = 0; i < ann->total_connections; i++)
314
+ {
315
+ #ifndef FIXEDFANN
316
+ if(save_as_fixed)
317
+ {
318
+ /* save the connection "(source weight) " */
319
+ fprintf(conf, "(%u, %d) ",
320
+ connected_neurons[i] - first_neuron,
321
+ (int) floor((weights[i] * fixed_multiplier) + 0.5));
322
+ }
323
+ else
324
+ {
325
+ /* save the connection "(source weight) " */
326
+ fprintf(conf, "(%u, " FANNPRINTF ") ", connected_neurons[i] - first_neuron, weights[i]);
327
+ }
328
+ #else
329
+ /* save the connection "(source weight) " */
330
+ fprintf(conf, "(%u, " FANNPRINTF ") ", connected_neurons[i] - first_neuron, weights[i]);
331
+ #endif
332
+
333
+ }
334
+ fprintf(conf, "\n");
335
+
336
+ return calculated_decimal_point;
337
+ }
338
+
339
+ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file);
340
+
341
+ #define fann_scanf(type, name, val) \
342
+ { \
343
+ if(fscanf(conf, name"="type"\n", val) != 1) \
344
+ { \
345
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
346
+ fann_destroy(ann); \
347
+ return NULL; \
348
+ } \
349
+ }
350
+
351
+ /* INTERNAL FUNCTION
352
+ Create a network from a configuration file descriptor.
353
+ */
354
+ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
355
+ {
356
+ unsigned int num_layers, layer_size, input_neuron, i, num_connections;
357
+ #ifdef FIXEDFANN
358
+ unsigned int decimal_point, multiplier;
359
+ #else
360
+ unsigned int scale_included;
361
+ #endif
362
+ struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
363
+ fann_type *weights;
364
+ struct fann_layer *layer_it;
365
+ struct fann *ann = NULL;
366
+
367
+ char *read_version;
368
+
369
+ read_version = (char *) calloc(strlen(FANN_CONF_VERSION "\n"), 1);
370
+ if(read_version == NULL)
371
+ {
372
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
373
+ return NULL;
374
+ }
375
+
376
+ fread(read_version, 1, strlen(FANN_CONF_VERSION "\n"), conf); /* reads version */
377
+
378
+ /* compares the version information */
379
+ if(strncmp(read_version, FANN_CONF_VERSION "\n", strlen(FANN_CONF_VERSION "\n")) != 0)
380
+ {
381
+ #ifdef FIXEDFANN
382
+ if(strncmp(read_version, "FANN_FIX_1.1\n", strlen("FANN_FIX_1.1\n")) == 0)
383
+ {
384
+ #else
385
+ if(strncmp(read_version, "FANN_FLO_1.1\n", strlen("FANN_FLO_1.1\n")) == 0)
386
+ {
387
+ #endif
388
+ free(read_version);
389
+ return fann_create_from_fd_1_1(conf, configuration_file);
390
+ }
391
+
392
+ #ifndef FIXEDFANN
393
+ /* Maintain compatibility with 2.0 version that doesnt have scale parameters. */
394
+ if(strncmp(read_version, "FANN_FLO_2.0\n", strlen("FANN_FLO_2.0\n")) != 0 &&
395
+ strncmp(read_version, "FANN_FLO_2.1\n", strlen("FANN_FLO_2.1\n")) != 0)
396
+ #else
397
+ if(strncmp(read_version, "FANN_FIX_2.0\n", strlen("FANN_FIX_2.0\n")) != 0 &&
398
+ strncmp(read_version, "FANN_FIX_2.1\n", strlen("FANN_FIX_2.1\n")) != 0)
399
+ #endif
400
+ {
401
+ free(read_version);
402
+ fann_error(NULL, FANN_E_WRONG_CONFIG_VERSION, configuration_file);
403
+
404
+ return NULL;
405
+ }
406
+ }
407
+
408
+ free(read_version);
409
+
410
+ #ifdef FIXEDFANN
411
+ fann_scanf("%u", "decimal_point", &decimal_point);
412
+ multiplier = 1 << decimal_point;
413
+ #endif
414
+
415
+ fann_scanf("%u", "num_layers", &num_layers);
416
+
417
+ ann = fann_allocate_structure(num_layers);
418
+ if(ann == NULL)
419
+ {
420
+ return NULL;
421
+ }
422
+
423
+ fann_scanf("%f", "learning_rate", &ann->learning_rate);
424
+ fann_scanf("%f", "connection_rate", &ann->connection_rate);
425
+ fann_scanf("%u", "network_type", (unsigned int *)&ann->network_type);
426
+ fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
427
+ fann_scanf("%u", "training_algorithm", (unsigned int *)&ann->training_algorithm);
428
+ fann_scanf("%u", "train_error_function", (unsigned int *)&ann->train_error_function);
429
+ fann_scanf("%u", "train_stop_function", (unsigned int *)&ann->train_stop_function);
430
+ fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
431
+ fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
432
+ fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
433
+ fann_scanf("%f", "rprop_increase_factor", &ann->rprop_increase_factor);
434
+ fann_scanf("%f", "rprop_decrease_factor", &ann->rprop_decrease_factor);
435
+ fann_scanf("%f", "rprop_delta_min", &ann->rprop_delta_min);
436
+ fann_scanf("%f", "rprop_delta_max", &ann->rprop_delta_max);
437
+ fann_scanf("%f", "rprop_delta_zero", &ann->rprop_delta_zero);
438
+ fann_scanf("%u", "cascade_output_stagnation_epochs", &ann->cascade_output_stagnation_epochs);
439
+ fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
440
+ fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
441
+ fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
442
+ fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);
443
+ fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
444
+
445
+ fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
446
+ fann_scanf(FANNSCANF, "cascade_candidate_limit", &ann->cascade_candidate_limit);
447
+ fann_scanf(FANNSCANF, "cascade_weight_multiplier", &ann->cascade_weight_multiplier);
448
+
449
+
450
+ fann_scanf("%u", "cascade_activation_functions_count", &ann->cascade_activation_functions_count);
451
+
452
+ /* reallocate mem */
453
+ ann->cascade_activation_functions =
454
+ (enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions,
455
+ ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
456
+ if(ann->cascade_activation_functions == NULL)
457
+ {
458
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
459
+ fann_destroy(ann);
460
+ return NULL;
461
+ }
462
+
463
+ fscanf(conf, "cascade_activation_functions=");
464
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
465
+ fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]);
466
+
467
+ fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
468
+
469
+ /* reallocate mem */
470
+ ann->cascade_activation_steepnesses =
471
+ (fann_type *)realloc(ann->cascade_activation_steepnesses,
472
+ ann->cascade_activation_steepnesses_count * sizeof(fann_type));
473
+ if(ann->cascade_activation_steepnesses == NULL)
474
+ {
475
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
476
+ fann_destroy(ann);
477
+ return NULL;
478
+ }
479
+
480
+ fscanf(conf, "cascade_activation_steepnesses=");
481
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
482
+ fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]);
483
+
484
+ #ifdef FIXEDFANN
485
+ ann->decimal_point = decimal_point;
486
+ ann->multiplier = multiplier;
487
+ #endif
488
+
489
+ #ifdef FIXEDFANN
490
+ fann_update_stepwise(ann);
491
+ #endif
492
+
493
+ #ifdef DEBUG
494
+ printf("creating network with %d layers\n", num_layers);
495
+ printf("input\n");
496
+ #endif
497
+
498
+ fscanf(conf, "layer_sizes=");
499
+ /* determine how many neurons there should be in each layer */
500
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
501
+ {
502
+ if(fscanf(conf, "%u ", &layer_size) != 1)
503
+ {
504
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, "layer_sizes", configuration_file);
505
+ fann_destroy(ann);
506
+ return NULL;
507
+ }
508
+ /* we do not allocate room here, but we make sure that
509
+ * last_neuron - first_neuron is the number of neurons */
510
+ layer_it->first_neuron = NULL;
511
+ layer_it->last_neuron = layer_it->first_neuron + layer_size;
512
+ ann->total_neurons += layer_size;
513
+ #ifdef DEBUG
514
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
515
+ {
516
+ printf(" layer : %d neurons, 0 bias\n", layer_size);
517
+ }
518
+ else
519
+ {
520
+ printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
521
+ }
522
+ #endif
523
+ }
524
+
525
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
526
+ ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
527
+ if(ann->network_type == FANN_NETTYPE_LAYER)
528
+ {
529
+ /* one too many (bias) in the output layer */
530
+ ann->num_output--;
531
+ }
532
+
533
+ #ifndef FIXEDFANN
534
+ #define SCALE_LOAD( what, where ) \
535
+ fscanf( conf, #what "_" #where "=" ); \
536
+ for(i = 0; i < ann->num_##where##put; i++) \
537
+ { \
538
+ if(fscanf( conf, "%f ", (float *)&ann->what##_##where[ i ] ) != 1) \
539
+ { \
540
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, #what "_" #where, configuration_file); \
541
+ fann_destroy(ann); \
542
+ return NULL; \
543
+ } \
544
+ }
545
+
546
+ if(fscanf(conf, "scale_included=%u\n", &scale_included) == 1 && scale_included == 1)
547
+ {
548
+ fann_allocate_scale(ann);
549
+ SCALE_LOAD( scale_mean, in )
550
+ SCALE_LOAD( scale_deviation, in )
551
+ SCALE_LOAD( scale_new_min, in )
552
+ SCALE_LOAD( scale_factor, in )
553
+
554
+ SCALE_LOAD( scale_mean, out )
555
+ SCALE_LOAD( scale_deviation, out )
556
+ SCALE_LOAD( scale_new_min, out )
557
+ SCALE_LOAD( scale_factor, out )
558
+ }
559
+ #undef SCALE_LOAD
560
+ #endif
561
+
562
+ /* allocate room for the actual neurons */
563
+ fann_allocate_neurons(ann);
564
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
565
+ {
566
+ fann_destroy(ann);
567
+ return NULL;
568
+ }
569
+
570
+ last_neuron = (ann->last_layer - 1)->last_neuron;
571
+ fscanf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
572
+ for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
573
+ {
574
+ if(fscanf
575
+ (conf, "(%u, %u, " FANNSCANF ") ", &num_connections, (unsigned int *)&neuron_it->activation_function,
576
+ &neuron_it->activation_steepness) != 3)
577
+ {
578
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
579
+ fann_destroy(ann);
580
+ return NULL;
581
+ }
582
+ neuron_it->first_con = ann->total_connections;
583
+ ann->total_connections += num_connections;
584
+ neuron_it->last_con = ann->total_connections;
585
+ }
586
+
587
+ fann_allocate_connections(ann);
588
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
589
+ {
590
+ fann_destroy(ann);
591
+ return NULL;
592
+ }
593
+
594
+ connected_neurons = ann->connections;
595
+ weights = ann->weights;
596
+ first_neuron = ann->first_layer->first_neuron;
597
+
598
+ fscanf(conf, "connections (connected_to_neuron, weight)=");
599
+ for(i = 0; i < ann->total_connections; i++)
600
+ {
601
+ if(fscanf(conf, "(%u, " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
602
+ {
603
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
604
+ fann_destroy(ann);
605
+ return NULL;
606
+ }
607
+ connected_neurons[i] = first_neuron + input_neuron;
608
+ }
609
+
610
+ #ifdef DEBUG
611
+ printf("output\n");
612
+ #endif
613
+ return ann;
614
+ }
615
+
616
+
617
+ /* INTERNAL FUNCTION
618
+ Create a network from a configuration file descriptor. (backward compatible read of version 1.1 files)
619
+ */
620
+ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file)
621
+ {
622
+ unsigned int num_layers, layer_size, input_neuron, i, network_type, num_connections;
623
+ unsigned int activation_function_hidden, activation_function_output;
624
+ #ifdef FIXEDFANN
625
+ unsigned int decimal_point, multiplier;
626
+ #endif
627
+ fann_type activation_steepness_hidden, activation_steepness_output;
628
+ float learning_rate, connection_rate;
629
+ struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
630
+ fann_type *weights;
631
+ struct fann_layer *layer_it;
632
+ struct fann *ann;
633
+
634
+ #ifdef FIXEDFANN
635
+ if(fscanf(conf, "%u\n", &decimal_point) != 1)
636
+ {
637
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "decimal_point", configuration_file);
638
+ return NULL;
639
+ }
640
+ multiplier = 1 << decimal_point;
641
+ #endif
642
+
643
+ if(fscanf(conf, "%u %f %f %u %u %u " FANNSCANF " " FANNSCANF "\n", &num_layers, &learning_rate,
644
+ &connection_rate, &network_type, &activation_function_hidden,
645
+ &activation_function_output, &activation_steepness_hidden,
646
+ &activation_steepness_output) != 8)
647
+ {
648
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "parameters", configuration_file);
649
+ return NULL;
650
+ }
651
+
652
+ ann = fann_allocate_structure(num_layers);
653
+ if(ann == NULL)
654
+ {
655
+ return NULL;
656
+ }
657
+ ann->connection_rate = connection_rate;
658
+ ann->network_type = (enum fann_nettype_enum)network_type;
659
+ ann->learning_rate = learning_rate;
660
+
661
+ #ifdef FIXEDFANN
662
+ ann->decimal_point = decimal_point;
663
+ ann->multiplier = multiplier;
664
+ #endif
665
+
666
+ #ifdef FIXEDFANN
667
+ fann_update_stepwise(ann);
668
+ #endif
669
+
670
+ #ifdef DEBUG
671
+ printf("creating network with learning rate %f\n", learning_rate);
672
+ printf("input\n");
673
+ #endif
674
+
675
+ /* determine how many neurons there should be in each layer */
676
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
677
+ {
678
+ if(fscanf(conf, "%u ", &layer_size) != 1)
679
+ {
680
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
681
+ fann_destroy(ann);
682
+ return NULL;
683
+ }
684
+ /* we do not allocate room here, but we make sure that
685
+ * last_neuron - first_neuron is the number of neurons */
686
+ layer_it->first_neuron = NULL;
687
+ layer_it->last_neuron = layer_it->first_neuron + layer_size;
688
+ ann->total_neurons += layer_size;
689
+ #ifdef DEBUG
690
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
691
+ {
692
+ printf(" layer : %d neurons, 0 bias\n", layer_size);
693
+ }
694
+ else
695
+ {
696
+ printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
697
+ }
698
+ #endif
699
+ }
700
+
701
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
702
+ ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
703
+ if(ann->network_type == FANN_NETTYPE_LAYER)
704
+ {
705
+ /* one too many (bias) in the output layer */
706
+ ann->num_output--;
707
+ }
708
+
709
+ /* allocate room for the actual neurons */
710
+ fann_allocate_neurons(ann);
711
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
712
+ {
713
+ fann_destroy(ann);
714
+ return NULL;
715
+ }
716
+
717
+ last_neuron = (ann->last_layer - 1)->last_neuron;
718
+ for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
719
+ {
720
+ if(fscanf(conf, "%u ", &num_connections) != 1)
721
+ {
722
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
723
+ fann_destroy(ann);
724
+ return NULL;
725
+ }
726
+ neuron_it->first_con = ann->total_connections;
727
+ ann->total_connections += num_connections;
728
+ neuron_it->last_con = ann->total_connections;
729
+ }
730
+
731
+ fann_allocate_connections(ann);
732
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
733
+ {
734
+ fann_destroy(ann);
735
+ return NULL;
736
+ }
737
+
738
+ connected_neurons = ann->connections;
739
+ weights = ann->weights;
740
+ first_neuron = ann->first_layer->first_neuron;
741
+
742
+ for(i = 0; i < ann->total_connections; i++)
743
+ {
744
+ if(fscanf(conf, "(%u " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
745
+ {
746
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
747
+ fann_destroy(ann);
748
+ return NULL;
749
+ }
750
+ connected_neurons[i] = first_neuron + input_neuron;
751
+ }
752
+
753
+ fann_set_activation_steepness_hidden(ann, activation_steepness_hidden);
754
+ fann_set_activation_steepness_output(ann, activation_steepness_output);
755
+ fann_set_activation_function_hidden(ann, (enum fann_activationfunc_enum)activation_function_hidden);
756
+ fann_set_activation_function_output(ann, (enum fann_activationfunc_enum)activation_function_output);
757
+
758
+ #ifdef DEBUG
759
+ printf("output\n");
760
+ #endif
761
+ return ann;
762
+ }