ruby-fann 1.2.5 → 1.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/README.md +6 -1
- data/ext/ruby_fann/config.h +4 -61
- data/ext/ruby_fann/doublefann.c +1 -1
- data/ext/ruby_fann/doublefann.h +1 -1
- data/ext/ruby_fann/fann.c +279 -28
- data/ext/ruby_fann/fann.h +11 -1
- data/ext/ruby_fann/fann_activation.h +1 -1
- data/ext/ruby_fann/fann_cascade.c +27 -10
- data/ext/ruby_fann/fann_cascade.h +55 -1
- data/ext/ruby_fann/fann_data.h +28 -3
- data/ext/ruby_fann/fann_error.c +7 -1
- data/ext/ruby_fann/fann_error.h +6 -2
- data/ext/ruby_fann/fann_internal.h +7 -3
- data/ext/ruby_fann/fann_io.c +67 -27
- data/ext/ruby_fann/fann_io.h +1 -1
- data/ext/ruby_fann/fann_train.c +86 -1
- data/ext/ruby_fann/fann_train.h +108 -1
- data/ext/ruby_fann/fann_train_data.c +144 -132
- data/lib/ruby_fann/version.rb +1 -1
- metadata +2 -2
data/ext/ruby_fann/fann.h
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -266,6 +266,16 @@ FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_
|
|
266
266
|
FANN_EXTERNAL void FANN_API fann_destroy(struct fann *ann);
|
267
267
|
|
268
268
|
|
269
|
+
/* Function: fann_copy
|
270
|
+
Creates a copy of a fann structure.
|
271
|
+
|
272
|
+
Data in the user data <fann_set_user_data> is not copied, but the user data pointer is copied.
|
273
|
+
|
274
|
+
This function appears in FANN >= 2.2.0.
|
275
|
+
*/
|
276
|
+
FANN_EXTERNAL struct fann * FANN_API fann_copy(struct fann *ann);
|
277
|
+
|
278
|
+
|
269
279
|
/* Function: fann_run
|
270
280
|
Will run input through the neural network, returning an array of outputs, the number of which being
|
271
281
|
equal to the number of neurons in the output layer.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -76,7 +76,7 @@ FANN_EXTERNAL void FANN_API fann_cascadetrain_on_data(struct fann *ann, struct f
|
|
76
76
|
{
|
77
77
|
printf
|
78
78
|
("Neurons %3d. Current error: %.6f. Total error:%8.4f. Epochs %5d. Bit fail %3d",
|
79
|
-
i, error, ann->MSE_value, total_epochs, ann->num_bit_fail);
|
79
|
+
i-1, error, ann->MSE_value, total_epochs, ann->num_bit_fail);
|
80
80
|
if((ann->last_layer-2) != ann->first_layer)
|
81
81
|
{
|
82
82
|
printf(". candidate steepness %.2f. function %s",
|
@@ -147,6 +147,7 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
|
|
147
147
|
float backslide_improvement = -1.0e20f;
|
148
148
|
unsigned int i;
|
149
149
|
unsigned int max_epochs = ann->cascade_max_out_epochs;
|
150
|
+
unsigned int min_epochs = ann->cascade_min_out_epochs;
|
150
151
|
unsigned int stagnation = max_epochs;
|
151
152
|
|
152
153
|
/* TODO should perhaps not clear all arrays */
|
@@ -177,7 +178,11 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
|
|
177
178
|
|
178
179
|
/* After any significant change, set a new goal and
|
179
180
|
* allow a new quota of epochs to reach it */
|
180
|
-
|
181
|
+
|
182
|
+
if((target_improvement >= 0 &&
|
183
|
+
(error_improvement > target_improvement || error_improvement < backslide_improvement)) ||
|
184
|
+
(target_improvement < 0 &&
|
185
|
+
(error_improvement < target_improvement || error_improvement > backslide_improvement)))
|
181
186
|
{
|
182
187
|
/*printf("error_improvement=%f, target_improvement=%f, backslide_improvement=%f, stagnation=%d\n", error_improvement, target_improvement, backslide_improvement, stagnation); */
|
183
188
|
|
@@ -187,7 +192,7 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
|
|
187
192
|
}
|
188
193
|
|
189
194
|
/* No improvement in allotted period, so quit */
|
190
|
-
if(i >= stagnation)
|
195
|
+
if(i >= stagnation && i >= min_epochs)
|
191
196
|
{
|
192
197
|
return i + 1;
|
193
198
|
}
|
@@ -199,7 +204,7 @@ int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float des
|
|
199
204
|
float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data)
|
200
205
|
{
|
201
206
|
unsigned int i;
|
202
|
-
|
207
|
+
|
203
208
|
fann_reset_MSE(ann);
|
204
209
|
|
205
210
|
for(i = 0; i < data->num_data; i++)
|
@@ -215,6 +220,11 @@ float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data)
|
|
215
220
|
fann_update_weights_irpropm(ann, (ann->last_layer - 1)->first_neuron->first_con,
|
216
221
|
ann->total_connections);
|
217
222
|
break;
|
223
|
+
case FANN_TRAIN_SARPROP:
|
224
|
+
fann_update_weights_sarprop(ann, ann->sarprop_epoch, (ann->last_layer - 1)->first_neuron->first_con,
|
225
|
+
ann->total_connections);
|
226
|
+
++(ann->sarprop_epoch);
|
227
|
+
break;
|
218
228
|
case FANN_TRAIN_QUICKPROP:
|
219
229
|
fann_update_weights_quickprop(ann, data->num_data,
|
220
230
|
(ann->last_layer - 1)->first_neuron->first_con,
|
@@ -414,9 +424,8 @@ int fann_initialize_candidates(struct fann *ann)
|
|
414
424
|
}
|
415
425
|
}
|
416
426
|
|
417
|
-
/* Some
|
418
|
-
scale_factor = (float) 2.
|
419
|
-
(double) (1.0f / (double) ann->num_input)));
|
427
|
+
/* Some code to do semi Widrow + Nguyen initialization */
|
428
|
+
scale_factor = (float) (2.0 * pow(0.7f * (float)num_hidden_neurons, 1.0f / (float) ann->num_input));
|
420
429
|
if(scale_factor > 8)
|
421
430
|
scale_factor = 8;
|
422
431
|
else if(scale_factor < 0.5)
|
@@ -487,6 +496,7 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data)
|
|
487
496
|
fann_type backslide_cand_score = -1.0e20f;
|
488
497
|
unsigned int i;
|
489
498
|
unsigned int max_epochs = ann->cascade_max_cand_epochs;
|
499
|
+
unsigned int min_epochs = ann->cascade_min_cand_epochs;
|
490
500
|
unsigned int stagnation = max_epochs;
|
491
501
|
|
492
502
|
if(ann->cascade_candidate_scores == NULL)
|
@@ -527,7 +537,7 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data)
|
|
527
537
|
}
|
528
538
|
|
529
539
|
/* No improvement in allotted period, so quit */
|
530
|
-
if(i >= stagnation)
|
540
|
+
if(i >= stagnation && i >= min_epochs)
|
531
541
|
{
|
532
542
|
#ifdef CASCADE_DEBUG
|
533
543
|
printf("Stagnation with %d epochs, best candidate score %f, real score: %f\n", i + 1,
|
@@ -661,6 +671,11 @@ void fann_update_candidate_weights(struct fann *ann, unsigned int num_data)
|
|
661
671
|
fann_update_weights_irpropm(ann, first_cand->first_con,
|
662
672
|
last_cand->last_con + ann->num_output);
|
663
673
|
break;
|
674
|
+
case FANN_TRAIN_SARPROP:
|
675
|
+
/* TODO: increase epoch? */
|
676
|
+
fann_update_weights_sarprop(ann, ann->sarprop_epoch, first_cand->first_con,
|
677
|
+
last_cand->last_con + ann->num_output);
|
678
|
+
break;
|
664
679
|
case FANN_TRAIN_QUICKPROP:
|
665
680
|
fann_update_weights_quickprop(ann, num_data, first_cand->first_con,
|
666
681
|
last_cand->last_con + ann->num_output);
|
@@ -755,7 +770,7 @@ fann_type fann_train_candidates_epoch(struct fann *ann, struct fann_train_data *
|
|
755
770
|
}
|
756
771
|
|
757
772
|
ann->cascade_best_candidate = ann->total_neurons + best_candidate + 1;
|
758
|
-
#ifdef
|
773
|
+
#ifdef CASCADE_DEBUG
|
759
774
|
printf("Best candidate[%d]: with score %f, real score: %f\n", best_candidate,
|
760
775
|
ann->MSE_value - best_score, best_score);
|
761
776
|
#endif
|
@@ -973,6 +988,8 @@ FANN_GET_SET(fann_type, cascade_weight_multiplier)
|
|
973
988
|
FANN_GET_SET(fann_type, cascade_candidate_limit)
|
974
989
|
FANN_GET_SET(unsigned int, cascade_max_out_epochs)
|
975
990
|
FANN_GET_SET(unsigned int, cascade_max_cand_epochs)
|
991
|
+
FANN_GET_SET(unsigned int, cascade_min_out_epochs)
|
992
|
+
FANN_GET_SET(unsigned int, cascade_min_cand_epochs)
|
976
993
|
|
977
994
|
FANN_GET(unsigned int, cascade_activation_functions_count)
|
978
995
|
FANN_GET(enum fann_activationfunc_enum *, cascade_activation_functions)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -309,6 +309,33 @@ FANN_EXTERNAL void FANN_API fann_set_cascade_max_out_epochs(struct fann *ann,
|
|
309
309
|
unsigned int cascade_max_out_epochs);
|
310
310
|
|
311
311
|
|
312
|
+
/* Function: fann_get_cascade_min_out_epochs
|
313
|
+
|
314
|
+
The minimum out epochs determines the minimum number of epochs the output connections
|
315
|
+
must be trained after adding a new candidate neuron.
|
316
|
+
|
317
|
+
The default min out epochs is 50
|
318
|
+
|
319
|
+
See also:
|
320
|
+
<fann_set_cascade_min_out_epochs>
|
321
|
+
|
322
|
+
This function appears in FANN >= 2.2.0.
|
323
|
+
*/
|
324
|
+
FANN_EXTERNAL unsigned int FANN_API fann_get_cascade_min_out_epochs(struct fann *ann);
|
325
|
+
|
326
|
+
|
327
|
+
/* Function: fann_set_cascade_min_out_epochs
|
328
|
+
|
329
|
+
Sets the minimum out epochs.
|
330
|
+
|
331
|
+
See also:
|
332
|
+
<fann_get_cascade_min_out_epochs>
|
333
|
+
|
334
|
+
This function appears in FANN >= 2.2.0.
|
335
|
+
*/
|
336
|
+
FANN_EXTERNAL void FANN_API fann_set_cascade_min_out_epochs(struct fann *ann,
|
337
|
+
unsigned int cascade_min_out_epochs);
|
338
|
+
|
312
339
|
/* Function: fann_get_cascade_max_cand_epochs
|
313
340
|
|
314
341
|
The maximum candidate epochs determines the maximum number of epochs the input
|
@@ -337,6 +364,33 @@ FANN_EXTERNAL void FANN_API fann_set_cascade_max_cand_epochs(struct fann *ann,
|
|
337
364
|
unsigned int cascade_max_cand_epochs);
|
338
365
|
|
339
366
|
|
367
|
+
/* Function: fann_get_cascade_min_cand_epochs
|
368
|
+
|
369
|
+
The minimum candidate epochs determines the minimum number of epochs the input
|
370
|
+
connections to the candidates may be trained before adding a new candidate neuron.
|
371
|
+
|
372
|
+
The default min candidate epochs is 50
|
373
|
+
|
374
|
+
See also:
|
375
|
+
<fann_set_cascade_min_cand_epochs>
|
376
|
+
|
377
|
+
This function appears in FANN >= 2.2.0.
|
378
|
+
*/
|
379
|
+
FANN_EXTERNAL unsigned int FANN_API fann_get_cascade_min_cand_epochs(struct fann *ann);
|
380
|
+
|
381
|
+
|
382
|
+
/* Function: fann_set_cascade_min_cand_epochs
|
383
|
+
|
384
|
+
Sets the min candidate epochs.
|
385
|
+
|
386
|
+
See also:
|
387
|
+
<fann_get_cascade_min_cand_epochs>
|
388
|
+
|
389
|
+
This function appears in FANN >= 2.2.0.
|
390
|
+
*/
|
391
|
+
FANN_EXTERNAL void FANN_API fann_set_cascade_min_cand_epochs(struct fann *ann,
|
392
|
+
unsigned int cascade_min_cand_epochs);
|
393
|
+
|
340
394
|
/* Function: fann_get_cascade_num_candidates
|
341
395
|
|
342
396
|
The number of candidates used during training (calculated by multiplying <fann_get_cascade_activation_functions_count>,
|
data/ext/ruby_fann/fann_data.h
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -77,7 +77,8 @@ enum fann_train_enum
|
|
77
77
|
FANN_TRAIN_INCREMENTAL = 0,
|
78
78
|
FANN_TRAIN_BATCH,
|
79
79
|
FANN_TRAIN_RPROP,
|
80
|
-
FANN_TRAIN_QUICKPROP
|
80
|
+
FANN_TRAIN_QUICKPROP,
|
81
|
+
FANN_TRAIN_SARPROP
|
81
82
|
};
|
82
83
|
|
83
84
|
/* Constant: FANN_TRAIN_NAMES
|
@@ -95,7 +96,8 @@ static char const *const FANN_TRAIN_NAMES[] = {
|
|
95
96
|
"FANN_TRAIN_INCREMENTAL",
|
96
97
|
"FANN_TRAIN_BATCH",
|
97
98
|
"FANN_TRAIN_RPROP",
|
98
|
-
"FANN_TRAIN_QUICKPROP"
|
99
|
+
"FANN_TRAIN_QUICKPROP",
|
100
|
+
"FANN_TRAIN_SARPROP"
|
99
101
|
};
|
100
102
|
|
101
103
|
/* Enums: fann_activationfunc_enum
|
@@ -649,6 +651,14 @@ struct fann
|
|
649
651
|
*/
|
650
652
|
unsigned int cascade_max_cand_epochs;
|
651
653
|
|
654
|
+
/* Minimum epochs to train the output neurons during cascade training
|
655
|
+
*/
|
656
|
+
unsigned int cascade_min_out_epochs;
|
657
|
+
|
658
|
+
/* Minimum epochs to train the candidate neurons during cascade training
|
659
|
+
*/
|
660
|
+
unsigned int cascade_min_cand_epochs;
|
661
|
+
|
652
662
|
/* An array consisting of the activation functions used when doing
|
653
663
|
* cascade training.
|
654
664
|
*/
|
@@ -716,6 +726,21 @@ struct fann
|
|
716
726
|
/* The initial stepsize */
|
717
727
|
float rprop_delta_zero;
|
718
728
|
|
729
|
+
/* Defines how much the weights are constrained to smaller values at the beginning */
|
730
|
+
float sarprop_weight_decay_shift;
|
731
|
+
|
732
|
+
/* Decides if the stepsize is too big with regard to the error */
|
733
|
+
float sarprop_step_error_threshold_factor;
|
734
|
+
|
735
|
+
/* Defines how much the stepsize is influenced by the error */
|
736
|
+
float sarprop_step_error_shift;
|
737
|
+
|
738
|
+
/* Defines how much the epoch influences weight decay and noise */
|
739
|
+
float sarprop_temperature;
|
740
|
+
|
741
|
+
/* Current training epoch */
|
742
|
+
unsigned int sarprop_epoch;
|
743
|
+
|
719
744
|
/* Used to contain the slope errors used during batch training
|
720
745
|
* Is allocated during first training session,
|
721
746
|
* which means that if we do not train, it is never allocated.
|
data/ext/ruby_fann/fann_error.c
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -174,6 +174,12 @@ void fann_error(struct fann_error *errdat, const enum fann_errno_enum errno_f, .
|
|
174
174
|
case FANN_E_SCALE_NOT_PRESENT:
|
175
175
|
sprintf(errstr, "Scaling parameters not present.\n");
|
176
176
|
break;
|
177
|
+
case FANN_E_INPUT_NO_MATCH:
|
178
|
+
vsprintf(errstr, "The number of input neurons in the ann (%d) and data (%d) don't match\n", ap);
|
179
|
+
break;
|
180
|
+
case FANN_E_OUTPUT_NO_MATCH:
|
181
|
+
vsprintf(errstr, "The number of output neurons in the ann (%d) and data (%d) don't match\n", ap);
|
182
|
+
break;
|
177
183
|
}
|
178
184
|
va_end(ap);
|
179
185
|
|
data/ext/ruby_fann/fann_error.h
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -60,6 +60,8 @@ struct fann_error;
|
|
60
60
|
FANN_E_TRAIN_DATA_SUBSET - Trying to take subset which is not within the training set
|
61
61
|
FANN_E_INDEX_OUT_OF_BOUND - Index is out of bound
|
62
62
|
FANN_E_SCALE_NOT_PRESENT - Scaling parameters not present
|
63
|
+
FANN_E_INPUT_NO_MATCH - The number of input neurons in the ann and data don't match
|
64
|
+
FANN_E_OUTPUT_NO_MATCH - The number of output neurons in the ann and data don't match
|
63
65
|
*/
|
64
66
|
enum fann_errno_enum
|
65
67
|
{
|
@@ -81,7 +83,9 @@ enum fann_errno_enum
|
|
81
83
|
FANN_E_CANT_USE_TRAIN_ALG,
|
82
84
|
FANN_E_TRAIN_DATA_SUBSET,
|
83
85
|
FANN_E_INDEX_OUT_OF_BOUND,
|
84
|
-
FANN_E_SCALE_NOT_PRESENT
|
86
|
+
FANN_E_SCALE_NOT_PRESENT,
|
87
|
+
FANN_E_INPUT_NO_MATCH,
|
88
|
+
FANN_E_OUTPUT_NO_MATCH
|
85
89
|
};
|
86
90
|
|
87
91
|
/* Group: Error Handling */
|
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -90,11 +90,13 @@ void fann_update_weights_batch(struct fann *ann, unsigned int num_data, unsigned
|
|
90
90
|
unsigned int past_end);
|
91
91
|
void fann_update_weights_irpropm(struct fann *ann, unsigned int first_weight,
|
92
92
|
unsigned int past_end);
|
93
|
+
void fann_update_weights_sarprop(struct fann *ann, unsigned int epoch, unsigned int first_weight,
|
94
|
+
unsigned int past_end);
|
93
95
|
|
94
96
|
void fann_clear_train_arrays(struct fann *ann);
|
95
97
|
|
96
|
-
fann_type fann_activation(struct fann * ann, unsigned int activation_function, fann_type steepness,
|
97
|
-
fann_type value);
|
98
|
+
fann_type fann_activation(struct fann * ann, unsigned int activation_function, fann_type steepness,
|
99
|
+
fann_type value);
|
98
100
|
|
99
101
|
fann_type fann_activation_derived(unsigned int activation_function,
|
100
102
|
fann_type steepness, fann_type value, fann_type sum);
|
@@ -111,6 +113,7 @@ int fann_train_candidates(struct fann *ann, struct fann_train_data *data);
|
|
111
113
|
fann_type fann_train_candidates_epoch(struct fann *ann, struct fann_train_data *data);
|
112
114
|
|
113
115
|
void fann_install_candidate(struct fann *ann);
|
116
|
+
int fann_check_input_output_sizes(struct fann *ann, struct fann_train_data *data);
|
114
117
|
|
115
118
|
int fann_initialize_candidates(struct fann *ann);
|
116
119
|
|
@@ -123,6 +126,7 @@ int fann_allocate_scale(struct fann *ann);
|
|
123
126
|
#define fann_min(x, y) (((x) < (y)) ? (x) : (y))
|
124
127
|
#define fann_safe_free(x) {if(x) { free(x); x = NULL; }}
|
125
128
|
#define fann_clip(x, lo, hi) (((x) < (lo)) ? (lo) : (((x) > (hi)) ? (hi) : (x)))
|
129
|
+
#define fann_exp2(x) exp(0.69314718055994530942*(x))
|
126
130
|
/*#define fann_clip(x, lo, hi) (x)*/
|
127
131
|
|
128
132
|
#define fann_rand(min_value, max_value) (((float)(min_value))+(((float)(max_value)-((float)(min_value)))*rand()/(RAND_MAX+1.0f)))
|
data/ext/ruby_fann/fann_io.c
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/*
|
2
2
|
Fast Artificial Neural Network Library (fann)
|
3
|
-
Copyright (C) 2003 Steffen Nissen (
|
3
|
+
Copyright (C) 2003-2012 Steffen Nissen (sn@leenissen.dk)
|
4
4
|
|
5
5
|
This library is free software; you can redistribute it and/or
|
6
6
|
modify it under the terms of the GNU Lesser General Public
|
@@ -24,6 +24,7 @@
|
|
24
24
|
|
25
25
|
#include "config.h"
|
26
26
|
#include "fann.h"
|
27
|
+
#include "fann_data.h"
|
27
28
|
|
28
29
|
/* Create a network from a configuration file.
|
29
30
|
*/
|
@@ -174,7 +175,7 @@ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configurati
|
|
174
175
|
#endif
|
175
176
|
|
176
177
|
/* Save network parameters */
|
177
|
-
fprintf(conf, "num_layers=%
|
178
|
+
fprintf(conf, "num_layers=%d\n", (int)(ann->last_layer - ann->first_layer));
|
178
179
|
fprintf(conf, "learning_rate=%f\n", ann->learning_rate);
|
179
180
|
fprintf(conf, "connection_rate=%f\n", ann->connection_rate);
|
180
181
|
fprintf(conf, "network_type=%u\n", ann->network_type);
|
@@ -195,7 +196,9 @@ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configurati
|
|
195
196
|
fprintf(conf, "cascade_candidate_change_fraction=%f\n", ann->cascade_candidate_change_fraction);
|
196
197
|
fprintf(conf, "cascade_candidate_stagnation_epochs=%u\n", ann->cascade_candidate_stagnation_epochs);
|
197
198
|
fprintf(conf, "cascade_max_out_epochs=%u\n", ann->cascade_max_out_epochs);
|
199
|
+
fprintf(conf, "cascade_min_out_epochs=%u\n", ann->cascade_min_out_epochs);
|
198
200
|
fprintf(conf, "cascade_max_cand_epochs=%u\n", ann->cascade_max_cand_epochs);
|
201
|
+
fprintf(conf, "cascade_min_cand_epochs=%u\n", ann->cascade_min_cand_epochs);
|
199
202
|
fprintf(conf, "cascade_num_candidate_groups=%u\n", ann->cascade_num_candidate_groups);
|
200
203
|
|
201
204
|
#ifndef FIXEDFANN
|
@@ -236,7 +239,7 @@ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configurati
|
|
236
239
|
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
|
237
240
|
{
|
238
241
|
/* the number of neurons in the layers (in the last layer, there is always one too many neurons, because of an unused bias) */
|
239
|
-
fprintf(conf, "%
|
242
|
+
fprintf(conf, "%d ", (int)(layer_it->last_neuron - layer_it->first_neuron));
|
240
243
|
}
|
241
244
|
fprintf(conf, "\n");
|
242
245
|
|
@@ -316,18 +319,18 @@ int fann_save_internal_fd(struct fann *ann, FILE * conf, const char *configurati
|
|
316
319
|
if(save_as_fixed)
|
317
320
|
{
|
318
321
|
/* save the connection "(source weight) " */
|
319
|
-
fprintf(conf, "(%
|
320
|
-
connected_neurons[i] - first_neuron,
|
322
|
+
fprintf(conf, "(%d, %d) ",
|
323
|
+
(int)(connected_neurons[i] - first_neuron),
|
321
324
|
(int) floor((weights[i] * fixed_multiplier) + 0.5));
|
322
325
|
}
|
323
326
|
else
|
324
327
|
{
|
325
328
|
/* save the connection "(source weight) " */
|
326
|
-
fprintf(conf, "(%
|
329
|
+
fprintf(conf, "(%d, " FANNPRINTF ") ", (int)(connected_neurons[i] - first_neuron), weights[i]);
|
327
330
|
}
|
328
331
|
#else
|
329
332
|
/* save the connection "(source weight) " */
|
330
|
-
fprintf(conf, "(%
|
333
|
+
fprintf(conf, "(%d, " FANNPRINTF ") ", (int)(connected_neurons[i] - first_neuron), weights[i]);
|
331
334
|
#endif
|
332
335
|
|
333
336
|
}
|
@@ -348,12 +351,23 @@ struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file
|
|
348
351
|
} \
|
349
352
|
}
|
350
353
|
|
354
|
+
#define fann_skip(name) \
|
355
|
+
{ \
|
356
|
+
if(fscanf(conf, name) != 0) \
|
357
|
+
{ \
|
358
|
+
fann_error(NULL, FANN_E_CANT_READ_CONFIG, name, configuration_file); \
|
359
|
+
fann_destroy(ann); \
|
360
|
+
return NULL; \
|
361
|
+
} \
|
362
|
+
}
|
363
|
+
|
351
364
|
/* INTERNAL FUNCTION
|
352
365
|
Create a network from a configuration file descriptor.
|
353
366
|
*/
|
354
367
|
struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
355
368
|
{
|
356
369
|
unsigned int num_layers, layer_size, input_neuron, i, num_connections;
|
370
|
+
unsigned int tmpVal;
|
357
371
|
#ifdef FIXEDFANN
|
358
372
|
unsigned int decimal_point, multiplier;
|
359
373
|
#else
|
@@ -373,7 +387,11 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
373
387
|
return NULL;
|
374
388
|
}
|
375
389
|
|
376
|
-
fread(read_version, 1, strlen(FANN_CONF_VERSION "\n"), conf)
|
390
|
+
if(fread(read_version, 1, strlen(FANN_CONF_VERSION "\n"), conf) == 1)
|
391
|
+
{
|
392
|
+
fann_error(NULL, FANN_E_CANT_READ_CONFIG, "FANN_VERSION", configuration_file);
|
393
|
+
return NULL;
|
394
|
+
}
|
377
395
|
|
378
396
|
/* compares the version information */
|
379
397
|
if(strncmp(read_version, FANN_CONF_VERSION "\n", strlen(FANN_CONF_VERSION "\n")) != 0)
|
@@ -408,11 +426,11 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
408
426
|
free(read_version);
|
409
427
|
|
410
428
|
#ifdef FIXEDFANN
|
411
|
-
|
429
|
+
fann_scanf("%u", "decimal_point", &decimal_point);
|
412
430
|
multiplier = 1 << decimal_point;
|
413
431
|
#endif
|
414
432
|
|
415
|
-
|
433
|
+
fann_scanf("%u", "num_layers", &num_layers);
|
416
434
|
|
417
435
|
ann = fann_allocate_structure(num_layers);
|
418
436
|
if(ann == NULL)
|
@@ -420,13 +438,17 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
420
438
|
return NULL;
|
421
439
|
}
|
422
440
|
|
423
|
-
|
424
|
-
|
425
|
-
|
441
|
+
fann_scanf("%f", "learning_rate", &ann->learning_rate);
|
442
|
+
fann_scanf("%f", "connection_rate", &ann->connection_rate);
|
443
|
+
fann_scanf("%u", "network_type", &tmpVal);
|
444
|
+
ann->network_type = (enum fann_nettype_enum)tmpVal;
|
426
445
|
fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
|
427
|
-
fann_scanf("%u", "training_algorithm",
|
428
|
-
|
429
|
-
fann_scanf("%u", "
|
446
|
+
fann_scanf("%u", "training_algorithm", &tmpVal);
|
447
|
+
ann->training_algorithm = (enum fann_train_enum)tmpVal;
|
448
|
+
fann_scanf("%u", "train_error_function", &tmpVal);
|
449
|
+
ann->train_error_function = (enum fann_errorfunc_enum)tmpVal;
|
450
|
+
fann_scanf("%u", "train_stop_function", &tmpVal);
|
451
|
+
ann->train_stop_function = (enum fann_stopfunc_enum)tmpVal;
|
430
452
|
fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
|
431
453
|
fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
|
432
454
|
fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
|
@@ -439,7 +461,9 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
439
461
|
fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
|
440
462
|
fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
|
441
463
|
fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
|
464
|
+
fann_scanf("%u", "cascade_min_out_epochs", &ann->cascade_min_out_epochs);
|
442
465
|
fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);
|
466
|
+
fann_scanf("%u", "cascade_min_cand_epochs", &ann->cascade_min_cand_epochs);
|
443
467
|
fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
|
444
468
|
|
445
469
|
fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
|
@@ -460,10 +484,18 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
460
484
|
return NULL;
|
461
485
|
}
|
462
486
|
|
463
|
-
|
487
|
+
|
488
|
+
fann_skip("cascade_activation_functions=");
|
464
489
|
for(i = 0; i < ann->cascade_activation_functions_count; i++)
|
465
|
-
|
466
|
-
|
490
|
+
{
|
491
|
+
if(fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]) != 1)
|
492
|
+
{
|
493
|
+
fann_error(NULL, FANN_E_CANT_READ_CONFIG, "cascade_activation_functions", configuration_file);
|
494
|
+
fann_destroy(ann);
|
495
|
+
return NULL;
|
496
|
+
}
|
497
|
+
}
|
498
|
+
|
467
499
|
fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
|
468
500
|
|
469
501
|
/* reallocate mem */
|
@@ -477,9 +509,16 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
477
509
|
return NULL;
|
478
510
|
}
|
479
511
|
|
480
|
-
|
481
|
-
for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
|
482
|
-
|
512
|
+
fann_skip("cascade_activation_steepnesses=");
|
513
|
+
for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
|
514
|
+
{
|
515
|
+
if(fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]) != 1)
|
516
|
+
{
|
517
|
+
fann_error(NULL, FANN_E_CANT_READ_CONFIG, "cascade_activation_steepnesses", configuration_file);
|
518
|
+
fann_destroy(ann);
|
519
|
+
return NULL;
|
520
|
+
}
|
521
|
+
}
|
483
522
|
|
484
523
|
#ifdef FIXEDFANN
|
485
524
|
ann->decimal_point = decimal_point;
|
@@ -495,7 +534,7 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
495
534
|
printf("input\n");
|
496
535
|
#endif
|
497
536
|
|
498
|
-
|
537
|
+
fann_skip("layer_sizes=");
|
499
538
|
/* determine how many neurons there should be in each layer */
|
500
539
|
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
|
501
540
|
{
|
@@ -532,7 +571,7 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
532
571
|
|
533
572
|
#ifndef FIXEDFANN
|
534
573
|
#define SCALE_LOAD( what, where ) \
|
535
|
-
|
574
|
+
fann_skip( #what "_" #where "=" ); \
|
536
575
|
for(i = 0; i < ann->num_##where##put; i++) \
|
537
576
|
{ \
|
538
577
|
if(fscanf( conf, "%f ", (float *)&ann->what##_##where[ i ] ) != 1) \
|
@@ -568,17 +607,18 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
568
607
|
}
|
569
608
|
|
570
609
|
last_neuron = (ann->last_layer - 1)->last_neuron;
|
571
|
-
|
610
|
+
fann_skip("neurons (num_inputs, activation_function, activation_steepness)=");
|
572
611
|
for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
|
573
612
|
{
|
574
613
|
if(fscanf
|
575
|
-
(conf, "(%u, %u, " FANNSCANF ") ", &num_connections,
|
614
|
+
(conf, "(%u, %u, " FANNSCANF ") ", &num_connections, &tmpVal,
|
576
615
|
&neuron_it->activation_steepness) != 3)
|
577
616
|
{
|
578
617
|
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
|
579
618
|
fann_destroy(ann);
|
580
619
|
return NULL;
|
581
620
|
}
|
621
|
+
neuron_it->activation_function = (enum fann_activationfunc_enum)tmpVal;
|
582
622
|
neuron_it->first_con = ann->total_connections;
|
583
623
|
ann->total_connections += num_connections;
|
584
624
|
neuron_it->last_con = ann->total_connections;
|
@@ -595,7 +635,7 @@ struct fann *fann_create_from_fd(FILE * conf, const char *configuration_file)
|
|
595
635
|
weights = ann->weights;
|
596
636
|
first_neuron = ann->first_layer->first_neuron;
|
597
637
|
|
598
|
-
|
638
|
+
fann_skip("connections (connected_to_neuron, weight)=");
|
599
639
|
for(i = 0; i < ann->total_connections; i++)
|
600
640
|
{
|
601
641
|
if(fscanf(conf, "(%u, " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
|