moo_fann 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,802 @@
1
+ /*
2
+ Fast Artificial Neural Network Library (fann)
3
+ Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
4
+
5
+ This library is free software; you can redistribute it and/or
6
+ modify it under the terms of the GNU Lesser General Public
7
+ License as published by the Free Software Foundation; either
8
+ version 2.1 of the License, or (at your option) any later version.
9
+
10
+ This library is distributed in the hope that it will be useful,
11
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
+ Lesser General Public License for more details.
14
+
15
+ You should have received a copy of the GNU Lesser General Public
16
+ License along with this library; if not, write to the Free Software
17
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
+ */
19
+
20
+ #include <stdio.h>
21
+ #include <stdlib.h>
22
+ #include <stdarg.h>
23
+ #include <string.h>
24
+
25
+ #include "config.h"
26
+ #include "fann.h"
27
+ #include "fann_data.h"
28
+
29
+ /* Create a network from a configuration file.
30
+ */
31
+ FANN_EXTERNAL struct fann *FANN_API fann_create_from_file(const char *configuration_file)
32
+ {
33
+ struct fann *ann;
34
+ FILE *conf = fopen(configuration_file, "r");
35
+
36
+ if(!conf)
37
+ {
38
+ fann_error(NULL, FANN_E_CANT_OPEN_CONFIG_R, configuration_file);
39
+ return NULL;
40
+ }
41
+ ann = fann_create_from_fd(conf, configuration_file);
42
+ fclose(conf);
43
+ return ann;
44
+ }
45
+
46
+ /* Save the network.
47
+ */
48
+ FANN_EXTERNAL int FANN_API fann_save(struct fann *ann, const char *configuration_file)
49
+ {
50
+ return fann_save_internal(ann, configuration_file, 0);
51
+ }
52
+
53
+ /* Save the network as fixed point data.
54
+ */
55
+ FANN_EXTERNAL int FANN_API fann_save_to_fixed(struct fann *ann, const char *configuration_file)
56
+ {
57
+ return fann_save_internal(ann, configuration_file, 1);
58
+ }
59
+
60
+ /* INTERNAL FUNCTION
61
+ Used to save the network to a file.
62
+ */
63
+ int fann_save_internal(struct fann *ann, const char *configuration_file, unsigned int save_as_fixed)
64
+ {
65
+ int retval;
66
+ FILE *conf = fopen(configuration_file, "w+");
67
+
68
+ if(!conf)
69
+ {
70
+ fann_error((struct fann_error *) ann, FANN_E_CANT_OPEN_CONFIG_W, configuration_file);
71
+ return -1;
72
+ }
73
+ retval = fann_save_internal_fd(ann, conf, configuration_file, save_as_fixed);
74
+ fclose(conf);
75
+ return retval;
76
+ }
77
+
78
+ /* INTERNAL FUNCTION
79
+ Used to save the network to a file descriptor.
80
+ */
81
+ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configuration_file,
82
+ unsigned int save_as_fixed)
83
+ {
84
+ struct fann_layer *layer_it;
85
+ int calculated_decimal_point = 0;
86
+ struct fann_neuron *neuron_it, *first_neuron;
87
+ fann_type *weights;
88
+ struct fann_neuron **connected_neurons;
89
+ unsigned int i = 0;
90
+
91
+ #ifndef FIXEDFANN
92
+ /* variabels for use when saving floats as fixed point variabels */
93
+ unsigned int decimal_point = 0;
94
+ unsigned int fixed_multiplier = 0;
95
+ fann_type max_possible_value = 0;
96
+ unsigned int bits_used_for_max = 0;
97
+ fann_type current_max_value = 0;
98
+ #endif
99
+
100
+ #ifndef FIXEDFANN
101
+ if(save_as_fixed)
102
+ {
103
+ /* save the version information */
104
+ fprintf(conf, FANN_FIX_VERSION "\n");
105
+ }
106
+ else
107
+ {
108
+ /* save the version information */
109
+ fprintf(conf, FANN_FLO_VERSION "\n");
110
+ }
111
+ #else
112
+ /* save the version information */
113
+ fprintf(conf, FANN_FIX_VERSION "\n");
114
+ #endif
115
+
116
+ #ifndef FIXEDFANN
117
+ if(save_as_fixed)
118
+ {
119
+ /* calculate the maximal possible shift value */
120
+
121
+ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
122
+ {
123
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
124
+ {
125
+ /* look at all connections to each neurons, and see how high a value we can get */
126
+ current_max_value = 0;
127
+ for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
128
+ {
129
+ current_max_value += fann_abs(ann->weights[i]);
130
+ }
131
+
132
+ if(current_max_value > max_possible_value)
133
+ {
134
+ max_possible_value = current_max_value;
135
+ }
136
+ }
137
+ }
138
+
139
+ for(bits_used_for_max = 0; max_possible_value >= 1; bits_used_for_max++)
140
+ {
141
+ max_possible_value /= 2.0;
142
+ }
143
+
144
+ /* The maximum number of bits we shift the fix point, is the number
145
+ * of bits in a integer, minus one for the sign, one for the minus
146
+ * in stepwise, and minus the bits used for the maximum.
147
+ * This is devided by two, to allow multiplication of two fixed
148
+ * point numbers.
149
+ */
150
+ calculated_decimal_point = (sizeof(int) * 8 - 2 - bits_used_for_max) / 2;
151
+
152
+ if(calculated_decimal_point < 0)
153
+ {
154
+ decimal_point = 0;
155
+ }
156
+ else
157
+ {
158
+ decimal_point = calculated_decimal_point;
159
+ }
160
+
161
+ fixed_multiplier = 1 << decimal_point;
162
+
163
+ #ifdef DEBUG
164
+ printf("calculated_decimal_point=%d, decimal_point=%u, bits_used_for_max=%u\n",
165
+ calculated_decimal_point, decimal_point, bits_used_for_max);
166
+ #endif
167
+
168
+ /* save the decimal_point on a seperate line */
169
+ fprintf(conf, "decimal_point=%u\n", decimal_point);
170
+ }
171
+ #else
172
+ /* save the decimal_point on a seperate line */
173
+ fprintf(conf, "decimal_point=%u\n", ann->decimal_point);
174
+
175
+ #endif
176
+
177
+ /* Save network parameters */
178
+ fprintf(conf, "num_layers=%d\n", (int)(ann->last_layer - ann->first_layer));
179
+ fprintf(conf, "learning_rate=%f\n", ann->learning_rate);
180
+ fprintf(conf, "connection_rate=%f\n", ann->connection_rate);
181
+ fprintf(conf, "network_type=%u\n", ann->network_type);
182
+
183
+ fprintf(conf, "learning_momentum=%f\n", ann->learning_momentum);
184
+ fprintf(conf, "training_algorithm=%u\n", ann->training_algorithm);
185
+ fprintf(conf, "train_error_function=%u\n", ann->train_error_function);
186
+ fprintf(conf, "train_stop_function=%u\n", ann->train_stop_function);
187
+ fprintf(conf, "cascade_output_change_fraction=%f\n", ann->cascade_output_change_fraction);
188
+ fprintf(conf, "quickprop_decay=%f\n", ann->quickprop_decay);
189
+ fprintf(conf, "quickprop_mu=%f\n", ann->quickprop_mu);
190
+ fprintf(conf, "rprop_increase_factor=%f\n", ann->rprop_increase_factor);
191
+ fprintf(conf, "rprop_decrease_factor=%f\n", ann->rprop_decrease_factor);
192
+ fprintf(conf, "rprop_delta_min=%f\n", ann->rprop_delta_min);
193
+ fprintf(conf, "rprop_delta_max=%f\n", ann->rprop_delta_max);
194
+ fprintf(conf, "rprop_delta_zero=%f\n", ann->rprop_delta_zero);
195
+ fprintf(conf, "cascade_output_stagnation_epochs=%u\n", ann->cascade_output_stagnation_epochs);
196
+ fprintf(conf, "cascade_candidate_change_fraction=%f\n", ann->cascade_candidate_change_fraction);
197
+ fprintf(conf, "cascade_candidate_stagnation_epochs=%u\n", ann->cascade_candidate_stagnation_epochs);
198
+ fprintf(conf, "cascade_max_out_epochs=%u\n", ann->cascade_max_out_epochs);
199
+ fprintf(conf, "cascade_min_out_epochs=%u\n", ann->cascade_min_out_epochs);
200
+ fprintf(conf, "cascade_max_cand_epochs=%u\n", ann->cascade_max_cand_epochs);
201
+ fprintf(conf, "cascade_min_cand_epochs=%u\n", ann->cascade_min_cand_epochs);
202
+ fprintf(conf, "cascade_num_candidate_groups=%u\n", ann->cascade_num_candidate_groups);
203
+
204
+ #ifndef FIXEDFANN
205
+ if(save_as_fixed)
206
+ {
207
+ fprintf(conf, "bit_fail_limit=%u\n", (int) floor((ann->bit_fail_limit * fixed_multiplier) + 0.5));
208
+ fprintf(conf, "cascade_candidate_limit=%u\n", (int) floor((ann->cascade_candidate_limit * fixed_multiplier) + 0.5));
209
+ fprintf(conf, "cascade_weight_multiplier=%u\n", (int) floor((ann->cascade_weight_multiplier * fixed_multiplier) + 0.5));
210
+ }
211
+ else
212
+ #endif
213
+ {
214
+ fprintf(conf, "bit_fail_limit="FANNPRINTF"\n", ann->bit_fail_limit);
215
+ fprintf(conf, "cascade_candidate_limit="FANNPRINTF"\n", ann->cascade_candidate_limit);
216
+ fprintf(conf, "cascade_weight_multiplier="FANNPRINTF"\n", ann->cascade_weight_multiplier);
217
+ }
218
+
219
+ fprintf(conf, "cascade_activation_functions_count=%u\n", ann->cascade_activation_functions_count);
220
+ fprintf(conf, "cascade_activation_functions=");
221
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
222
+ fprintf(conf, "%u ", ann->cascade_activation_functions[i]);
223
+ fprintf(conf, "\n");
224
+
225
+ fprintf(conf, "cascade_activation_steepnesses_count=%u\n", ann->cascade_activation_steepnesses_count);
226
+ fprintf(conf, "cascade_activation_steepnesses=");
227
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
228
+ {
229
+ #ifndef FIXEDFANN
230
+ if(save_as_fixed)
231
+ fprintf(conf, "%u ", (int) floor((ann->cascade_activation_steepnesses[i] * fixed_multiplier) + 0.5));
232
+ else
233
+ #endif
234
+ fprintf(conf, FANNPRINTF" ", ann->cascade_activation_steepnesses[i]);
235
+ }
236
+ fprintf(conf, "\n");
237
+
238
+ fprintf(conf, "layer_sizes=");
239
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
240
+ {
241
+ /* the number of neurons in the layers (in the last layer, there is always one too many neurons, because of an unused bias) */
242
+ fprintf(conf, "%d ", (int)(layer_it->last_neuron - layer_it->first_neuron));
243
+ }
244
+ fprintf(conf, "\n");
245
+
246
+ #ifndef FIXEDFANN
247
+ /* 2.1 */
248
+ #define SCALE_SAVE( what, where ) \
249
+ fprintf( conf, #what "_" #where "=" ); \
250
+ for( i = 0; i < ann->num_##where##put; i++ ) \
251
+ fprintf( conf, "%f ", ann->what##_##where[ i ] ); \
252
+ fprintf( conf, "\n" );
253
+
254
+ if(!save_as_fixed)
255
+ {
256
+ if(ann->scale_mean_in != NULL)
257
+ {
258
+ fprintf(conf, "scale_included=1\n");
259
+ SCALE_SAVE( scale_mean, in )
260
+ SCALE_SAVE( scale_deviation, in )
261
+ SCALE_SAVE( scale_new_min, in )
262
+ SCALE_SAVE( scale_factor, in )
263
+
264
+ SCALE_SAVE( scale_mean, out )
265
+ SCALE_SAVE( scale_deviation, out )
266
+ SCALE_SAVE( scale_new_min, out )
267
+ SCALE_SAVE( scale_factor, out )
268
+ }
269
+ else
270
+ fprintf(conf, "scale_included=0\n");
271
+ }
272
+ #undef SCALE_SAVE
273
+ #endif
274
+
275
+ /* 2.0 */
276
+ fprintf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
277
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
278
+ {
279
+ /* the neurons */
280
+ for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
281
+ {
282
+ #ifndef FIXEDFANN
283
+ if(save_as_fixed)
284
+ {
285
+ fprintf(conf, "(%u, %u, %u) ", neuron_it->last_con - neuron_it->first_con,
286
+ neuron_it->activation_function,
287
+ (int) floor((neuron_it->activation_steepness * fixed_multiplier) + 0.5));
288
+ }
289
+ else
290
+ {
291
+ fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
292
+ neuron_it->activation_function, neuron_it->activation_steepness);
293
+ }
294
+ #else
295
+ fprintf(conf, "(%u, %u, " FANNPRINTF ") ", neuron_it->last_con - neuron_it->first_con,
296
+ neuron_it->activation_function, neuron_it->activation_steepness);
297
+ #endif
298
+ }
299
+ }
300
+ fprintf(conf, "\n");
301
+
302
+ connected_neurons = ann->connections;
303
+ weights = ann->weights;
304
+ first_neuron = ann->first_layer->first_neuron;
305
+
306
+ /* Now save all the connections.
307
+ * We only need to save the source and the weight,
308
+ * since the destination is given by the order.
309
+ *
310
+ * The weight is not saved binary due to differences
311
+ * in binary definition of floating point numbers.
312
+ * Especially an iPAQ does not use the same binary
313
+ * representation as an i386 machine.
314
+ */
315
+ fprintf(conf, "connections (connected_to_neuron, weight)=");
316
+ for(i = 0; i < ann->total_connections; i++)
317
+ {
318
+ #ifndef FIXEDFANN
319
+ if(save_as_fixed)
320
+ {
321
+ /* save the connection "(source weight) " */
322
+ fprintf(conf, "(%d, %d) ",
323
+ (int)(connected_neurons[i] - first_neuron),
324
+ (int) floor((weights[i] * fixed_multiplier) + 0.5));
325
+ }
326
+ else
327
+ {
328
+ /* save the connection "(source weight) " */
329
+ fprintf(conf, "(%d, " FANNPRINTF ") ", (int)(connected_neurons[i] - first_neuron), weights[i]);
330
+ }
331
+ #else
332
+ /* save the connection "(source weight) " */
333
+ fprintf(conf, "(%d, " FANNPRINTF ") ", (int)(connected_neurons[i] - first_neuron), weights[i]);
334
+ #endif
335
+
336
+ }
337
+ fprintf(conf, "\n");
338
+
339
+ return calculated_decimal_point;
340
+ }
341
+
342
+ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file);
343
+
344
+ #define fann_scanf(type, name, val) \
345
+ { \
346
+ if(fscanf(conf, name"="type"\n", val) != 1) \
347
+ { \
348
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
349
+ fann_destroy(ann); \
350
+ return NULL; \
351
+ } \
352
+ }
353
+
354
+ #define fann_skip(name) \
355
+ { \
356
+ if(fscanf(conf, name) != 0) \
357
+ { \
358
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
359
+ fann_destroy(ann); \
360
+ return NULL; \
361
+ } \
362
+ }
363
+
364
+ /* INTERNAL FUNCTION
365
+ Create a network from a configuration file descriptor.
366
+ */
367
+ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
368
+ {
369
+ unsigned int num_layers, layer_size, input_neuron, i, num_connections;
370
+ unsigned int tmpVal;
371
+ #ifdef FIXEDFANN
372
+ unsigned int decimal_point, multiplier;
373
+ #else
374
+ unsigned int scale_included;
375
+ #endif
376
+ struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
377
+ fann_type *weights;
378
+ struct fann_layer *layer_it;
379
+ struct fann *ann = NULL;
380
+
381
+ char *read_version;
382
+
383
+ read_version = (char *) calloc(strlen(FANN_CONF_VERSION "\n"), 1);
384
+ if(read_version == NULL)
385
+ {
386
+ fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
387
+ return NULL;
388
+ }
389
+
390
+ if(fread(read_version, 1, strlen(FANN_CONF_VERSION "\n"), conf) == 1)
391
+ {
392
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "FANN_VERSION", configuration_file);
393
+ return NULL;
394
+ }
395
+
396
+ /* compares the version information */
397
+ if(strncmp(read_version, FANN_CONF_VERSION "\n", strlen(FANN_CONF_VERSION "\n")) != 0)
398
+ {
399
+ #ifdef FIXEDFANN
400
+ if(strncmp(read_version, "FANN_FIX_1.1\n", strlen("FANN_FIX_1.1\n")) == 0)
401
+ {
402
+ #else
403
+ if(strncmp(read_version, "FANN_FLO_1.1\n", strlen("FANN_FLO_1.1\n")) == 0)
404
+ {
405
+ #endif
406
+ free(read_version);
407
+ return fann_create_from_fd_1_1(conf, configuration_file);
408
+ }
409
+
410
+ #ifndef FIXEDFANN
411
+ /* Maintain compatibility with 2.0 version that doesnt have scale parameters. */
412
+ if(strncmp(read_version, "FANN_FLO_2.0\n", strlen("FANN_FLO_2.0\n")) != 0 &&
413
+ strncmp(read_version, "FANN_FLO_2.1\n", strlen("FANN_FLO_2.1\n")) != 0)
414
+ #else
415
+ if(strncmp(read_version, "FANN_FIX_2.0\n", strlen("FANN_FIX_2.0\n")) != 0 &&
416
+ strncmp(read_version, "FANN_FIX_2.1\n", strlen("FANN_FIX_2.1\n")) != 0)
417
+ #endif
418
+ {
419
+ free(read_version);
420
+ fann_error(NULL, FANN_E_WRONG_CONFIG_VERSION, configuration_file);
421
+
422
+ return NULL;
423
+ }
424
+ }
425
+
426
+ free(read_version);
427
+
428
+ #ifdef FIXEDFANN
429
+ fann_scanf("%u", "decimal_point", &decimal_point);
430
+ multiplier = 1 << decimal_point;
431
+ #endif
432
+
433
+ fann_scanf("%u", "num_layers", &num_layers);
434
+
435
+ ann = fann_allocate_structure(num_layers);
436
+ if(ann == NULL)
437
+ {
438
+ return NULL;
439
+ }
440
+
441
+ fann_scanf("%f", "learning_rate", &ann->learning_rate);
442
+ fann_scanf("%f", "connection_rate", &ann->connection_rate);
443
+ fann_scanf("%u", "network_type", &tmpVal);
444
+ ann->network_type = (enum fann_nettype_enum)tmpVal;
445
+ fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
446
+ fann_scanf("%u", "training_algorithm", &tmpVal);
447
+ ann->training_algorithm = (enum fann_train_enum)tmpVal;
448
+ fann_scanf("%u", "train_error_function", &tmpVal);
449
+ ann->train_error_function = (enum fann_errorfunc_enum)tmpVal;
450
+ fann_scanf("%u", "train_stop_function", &tmpVal);
451
+ ann->train_stop_function = (enum fann_stopfunc_enum)tmpVal;
452
+ fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
453
+ fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
454
+ fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
455
+ fann_scanf("%f", "rprop_increase_factor", &ann->rprop_increase_factor);
456
+ fann_scanf("%f", "rprop_decrease_factor", &ann->rprop_decrease_factor);
457
+ fann_scanf("%f", "rprop_delta_min", &ann->rprop_delta_min);
458
+ fann_scanf("%f", "rprop_delta_max", &ann->rprop_delta_max);
459
+ fann_scanf("%f", "rprop_delta_zero", &ann->rprop_delta_zero);
460
+ fann_scanf("%u", "cascade_output_stagnation_epochs", &ann->cascade_output_stagnation_epochs);
461
+ fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
462
+ fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
463
+ fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
464
+ fann_scanf("%u", "cascade_min_out_epochs", &ann->cascade_min_out_epochs);
465
+ fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);
466
+ fann_scanf("%u", "cascade_min_cand_epochs", &ann->cascade_min_cand_epochs);
467
+ fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
468
+
469
+ fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
470
+ fann_scanf(FANNSCANF, "cascade_candidate_limit", &ann->cascade_candidate_limit);
471
+ fann_scanf(FANNSCANF, "cascade_weight_multiplier", &ann->cascade_weight_multiplier);
472
+
473
+
474
+ fann_scanf("%u", "cascade_activation_functions_count", &ann->cascade_activation_functions_count);
475
+
476
+ /* reallocate mem */
477
+ ann->cascade_activation_functions =
478
+ (enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions,
479
+ ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
480
+ if(ann->cascade_activation_functions == NULL)
481
+ {
482
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
483
+ fann_destroy(ann);
484
+ return NULL;
485
+ }
486
+
487
+
488
+ fann_skip("cascade_activation_functions=");
489
+ for(i = 0; i < ann->cascade_activation_functions_count; i++)
490
+ {
491
+ if(fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]) != 1)
492
+ {
493
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "cascade_activation_functions", configuration_file);
494
+ fann_destroy(ann);
495
+ return NULL;
496
+ }
497
+ }
498
+
499
+ fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
500
+
501
+ /* reallocate mem */
502
+ ann->cascade_activation_steepnesses =
503
+ (fann_type *)realloc(ann->cascade_activation_steepnesses,
504
+ ann->cascade_activation_steepnesses_count * sizeof(fann_type));
505
+ if(ann->cascade_activation_steepnesses == NULL)
506
+ {
507
+ fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
508
+ fann_destroy(ann);
509
+ return NULL;
510
+ }
511
+
512
+ fann_skip("cascade_activation_steepnesses=");
513
+ for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
514
+ {
515
+ if(fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]) != 1)
516
+ {
517
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "cascade_activation_steepnesses", configuration_file);
518
+ fann_destroy(ann);
519
+ return NULL;
520
+ }
521
+ }
522
+
523
+ #ifdef FIXEDFANN
524
+ ann->decimal_point = decimal_point;
525
+ ann->multiplier = multiplier;
526
+ #endif
527
+
528
+ #ifdef FIXEDFANN
529
+ fann_update_stepwise(ann);
530
+ #endif
531
+
532
+ #ifdef DEBUG
533
+ printf("creating network with %d layers\n", num_layers);
534
+ printf("input\n");
535
+ #endif
536
+
537
+ fann_skip("layer_sizes=");
538
+ /* determine how many neurons there should be in each layer */
539
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
540
+ {
541
+ if(fscanf(conf, "%u ", &layer_size) != 1)
542
+ {
543
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, "layer_sizes", configuration_file);
544
+ fann_destroy(ann);
545
+ return NULL;
546
+ }
547
+ /* we do not allocate room here, but we make sure that
548
+ * last_neuron - first_neuron is the number of neurons */
549
+ layer_it->first_neuron = NULL;
550
+ layer_it->last_neuron = layer_it->first_neuron + layer_size;
551
+ ann->total_neurons += layer_size;
552
+ #ifdef DEBUG
553
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
554
+ {
555
+ printf(" layer : %d neurons, 0 bias\n", layer_size);
556
+ }
557
+ else
558
+ {
559
+ printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
560
+ }
561
+ #endif
562
+ }
563
+
564
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
565
+ ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
566
+ if(ann->network_type == FANN_NETTYPE_LAYER)
567
+ {
568
+ /* one too many (bias) in the output layer */
569
+ ann->num_output--;
570
+ }
571
+
572
+ #ifndef FIXEDFANN
573
+ #define SCALE_LOAD( what, where ) \
574
+ fann_skip( #what "_" #where "=" ); \
575
+ for(i = 0; i < ann->num_##where##put; i++) \
576
+ { \
577
+ if(fscanf( conf, "%f ", (float *)&ann->what##_##where[ i ] ) != 1) \
578
+ { \
579
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, #what "_" #where, configuration_file); \
580
+ fann_destroy(ann); \
581
+ return NULL; \
582
+ } \
583
+ }
584
+
585
+ if(fscanf(conf, "scale_included=%u\n", &scale_included) == 1 && scale_included == 1)
586
+ {
587
+ fann_allocate_scale(ann);
588
+ SCALE_LOAD( scale_mean, in )
589
+ SCALE_LOAD( scale_deviation, in )
590
+ SCALE_LOAD( scale_new_min, in )
591
+ SCALE_LOAD( scale_factor, in )
592
+
593
+ SCALE_LOAD( scale_mean, out )
594
+ SCALE_LOAD( scale_deviation, out )
595
+ SCALE_LOAD( scale_new_min, out )
596
+ SCALE_LOAD( scale_factor, out )
597
+ }
598
+ #undef SCALE_LOAD
599
+ #endif
600
+
601
+ /* allocate room for the actual neurons */
602
+ fann_allocate_neurons(ann);
603
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
604
+ {
605
+ fann_destroy(ann);
606
+ return NULL;
607
+ }
608
+
609
+ last_neuron = (ann->last_layer - 1)->last_neuron;
610
+ fann_skip("neurons (num_inputs, activation_function, activation_steepness)=");
611
+ for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
612
+ {
613
+ if(fscanf
614
+ (conf, "(%u, %u, " FANNSCANF ") ", &num_connections, &tmpVal,
615
+ &neuron_it->activation_steepness) != 3)
616
+ {
617
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
618
+ fann_destroy(ann);
619
+ return NULL;
620
+ }
621
+ neuron_it->activation_function = (enum fann_activationfunc_enum)tmpVal;
622
+ neuron_it->first_con = ann->total_connections;
623
+ ann->total_connections += num_connections;
624
+ neuron_it->last_con = ann->total_connections;
625
+ }
626
+
627
+ fann_allocate_connections(ann);
628
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
629
+ {
630
+ fann_destroy(ann);
631
+ return NULL;
632
+ }
633
+
634
+ connected_neurons = ann->connections;
635
+ weights = ann->weights;
636
+ first_neuron = ann->first_layer->first_neuron;
637
+
638
+ fann_skip("connections (connected_to_neuron, weight)=");
639
+ for(i = 0; i < ann->total_connections; i++)
640
+ {
641
+ if(fscanf(conf, "(%u, " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
642
+ {
643
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
644
+ fann_destroy(ann);
645
+ return NULL;
646
+ }
647
+ connected_neurons[i] = first_neuron + input_neuron;
648
+ }
649
+
650
+ #ifdef DEBUG
651
+ printf("output\n");
652
+ #endif
653
+ return ann;
654
+ }
655
+
656
+
657
+ /* INTERNAL FUNCTION
658
+ Create a network from a configuration file descriptor. (backward compatible read of version 1.1 files)
659
+ */
660
+ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file)
661
+ {
662
+ unsigned int num_layers, layer_size, input_neuron, i, network_type, num_connections;
663
+ unsigned int activation_function_hidden, activation_function_output;
664
+ #ifdef FIXEDFANN
665
+ unsigned int decimal_point, multiplier;
666
+ #endif
667
+ fann_type activation_steepness_hidden, activation_steepness_output;
668
+ float learning_rate, connection_rate;
669
+ struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
670
+ fann_type *weights;
671
+ struct fann_layer *layer_it;
672
+ struct fann *ann;
673
+
674
+ #ifdef FIXEDFANN
675
+ if(fscanf(conf, "%u\n", &decimal_point) != 1)
676
+ {
677
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "decimal_point", configuration_file);
678
+ return NULL;
679
+ }
680
+ multiplier = 1 << decimal_point;
681
+ #endif
682
+
683
+ if(fscanf(conf, "%u %f %f %u %u %u " FANNSCANF " " FANNSCANF "\n", &num_layers, &learning_rate,
684
+ &connection_rate, &network_type, &activation_function_hidden,
685
+ &activation_function_output, &activation_steepness_hidden,
686
+ &activation_steepness_output) != 8)
687
+ {
688
+ fann_error(NULL, FANN_E_CANT_READ_CONFIG, "parameters", configuration_file);
689
+ return NULL;
690
+ }
691
+
692
+ ann = fann_allocate_structure(num_layers);
693
+ if(ann == NULL)
694
+ {
695
+ return NULL;
696
+ }
697
+ ann->connection_rate = connection_rate;
698
+ ann->network_type = (enum fann_nettype_enum)network_type;
699
+ ann->learning_rate = learning_rate;
700
+
701
+ #ifdef FIXEDFANN
702
+ ann->decimal_point = decimal_point;
703
+ ann->multiplier = multiplier;
704
+ #endif
705
+
706
+ #ifdef FIXEDFANN
707
+ fann_update_stepwise(ann);
708
+ #endif
709
+
710
+ #ifdef DEBUG
711
+ printf("creating network with learning rate %f\n", learning_rate);
712
+ printf("input\n");
713
+ #endif
714
+
715
+ /* determine how many neurons there should be in each layer */
716
+ for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
717
+ {
718
+ if(fscanf(conf, "%u ", &layer_size) != 1)
719
+ {
720
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
721
+ fann_destroy(ann);
722
+ return NULL;
723
+ }
724
+ /* we do not allocate room here, but we make sure that
725
+ * last_neuron - first_neuron is the number of neurons */
726
+ layer_it->first_neuron = NULL;
727
+ layer_it->last_neuron = layer_it->first_neuron + layer_size;
728
+ ann->total_neurons += layer_size;
729
+ #ifdef DEBUG
730
+ if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
731
+ {
732
+ printf(" layer : %d neurons, 0 bias\n", layer_size);
733
+ }
734
+ else
735
+ {
736
+ printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
737
+ }
738
+ #endif
739
+ }
740
+
741
+ ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
742
+ ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
743
+ if(ann->network_type == FANN_NETTYPE_LAYER)
744
+ {
745
+ /* one too many (bias) in the output layer */
746
+ ann->num_output--;
747
+ }
748
+
749
+ /* allocate room for the actual neurons */
750
+ fann_allocate_neurons(ann);
751
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
752
+ {
753
+ fann_destroy(ann);
754
+ return NULL;
755
+ }
756
+
757
+ last_neuron = (ann->last_layer - 1)->last_neuron;
758
+ for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
759
+ {
760
+ if(fscanf(conf, "%u ", &num_connections) != 1)
761
+ {
762
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
763
+ fann_destroy(ann);
764
+ return NULL;
765
+ }
766
+ neuron_it->first_con = ann->total_connections;
767
+ ann->total_connections += num_connections;
768
+ neuron_it->last_con = ann->total_connections;
769
+ }
770
+
771
+ fann_allocate_connections(ann);
772
+ if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
773
+ {
774
+ fann_destroy(ann);
775
+ return NULL;
776
+ }
777
+
778
+ connected_neurons = ann->connections;
779
+ weights = ann->weights;
780
+ first_neuron = ann->first_layer->first_neuron;
781
+
782
+ for(i = 0; i < ann->total_connections; i++)
783
+ {
784
+ if(fscanf(conf, "(%u " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
785
+ {
786
+ fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
787
+ fann_destroy(ann);
788
+ return NULL;
789
+ }
790
+ connected_neurons[i] = first_neuron + input_neuron;
791
+ }
792
+
793
+ fann_set_activation_steepness_hidden(ann, activation_steepness_hidden);
794
+ fann_set_activation_steepness_output(ann, activation_steepness_output);
795
+ fann_set_activation_function_hidden(ann, (enum fann_activationfunc_enum)activation_function_hidden);
796
+ fann_set_activation_function_output(ann, (enum fann_activationfunc_enum)activation_function_output);
797
+
798
+ #ifdef DEBUG
799
+ printf("output\n");
800
+ #endif
801
+ return ann;
802
+ }