ruby-fann 1.4.2 → 2.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ext/ruby_fann/fann_augment.h +68 -68
- data/ext/ruby_fann/fann_train.h +268 -268
- data/ext/ruby_fann/ruby_fann.c +609 -512
- data/lib/ruby_fann/version.rb +3 -3
- metadata +4 -3
data/ext/ruby_fann/ruby_fann.c
CHANGED
@@ -9,96 +9,133 @@ static VALUE m_rb_fann_standard_class;
|
|
9
9
|
static VALUE m_rb_fann_shortcut_class;
|
10
10
|
static VALUE m_rb_fann_train_data_class;
|
11
11
|
|
12
|
-
#define RETURN_FANN_INT(fn)
|
13
|
-
struct fann*
|
14
|
-
Data_Get_Struct
|
15
|
-
return INT2NUM(fn(f));
|
16
|
-
|
17
|
-
#define SET_FANN_INT(attr_name, fann_fn)
|
18
|
-
Check_Type(attr_name, T_FIXNUM);
|
19
|
-
struct fann*
|
20
|
-
Data_Get_Struct(self, struct fann, f); \
|
21
|
-
fann_fn(f, NUM2INT(attr_name));
|
22
|
-
return 0;
|
23
|
-
|
24
|
-
#define RETURN_FANN_UINT(fn)
|
25
|
-
struct fann*
|
26
|
-
Data_Get_Struct
|
27
|
-
return
|
28
|
-
|
29
|
-
#define SET_FANN_UINT(attr_name, fann_fn)
|
30
|
-
Check_Type(attr_name, T_FIXNUM);
|
31
|
-
struct fann*
|
32
|
-
Data_Get_Struct(self, struct fann, f); \
|
33
|
-
fann_fn(f, NUM2UINT(attr_name));
|
34
|
-
return 0;
|
12
|
+
#define RETURN_FANN_INT(fn) \
|
13
|
+
struct fann *f; \
|
14
|
+
Data_Get_Struct(self, struct fann, f); \
|
15
|
+
return INT2NUM(fn(f));
|
16
|
+
|
17
|
+
#define SET_FANN_INT(attr_name, fann_fn) \
|
18
|
+
Check_Type(attr_name, T_FIXNUM); \
|
19
|
+
struct fann *f; \
|
20
|
+
Data_Get_Struct(self, struct fann, f); \
|
21
|
+
fann_fn(f, NUM2INT(attr_name)); \
|
22
|
+
return 0;
|
23
|
+
|
24
|
+
#define RETURN_FANN_UINT(fn) \
|
25
|
+
struct fann *f; \
|
26
|
+
Data_Get_Struct(self, struct fann, f); \
|
27
|
+
return rb_int_new(fn(f));
|
28
|
+
|
29
|
+
#define SET_FANN_UINT(attr_name, fann_fn) \
|
30
|
+
Check_Type(attr_name, T_FIXNUM); \
|
31
|
+
struct fann *f; \
|
32
|
+
Data_Get_Struct(self, struct fann, f); \
|
33
|
+
fann_fn(f, NUM2UINT(attr_name)); \
|
34
|
+
return 0;
|
35
35
|
|
36
36
|
// Converts float return values to a double with same precision, avoids floating point errors.
|
37
|
-
#define RETURN_FANN_FLT(fn)
|
38
|
-
struct fann*
|
39
|
-
Data_Get_Struct
|
40
|
-
char buffy[20];
|
41
|
-
sprintf(buffy, "%0.6g", fn(f));
|
42
|
-
return rb_float_new(atof(buffy));
|
43
|
-
|
44
|
-
#define SET_FANN_FLT(attr_name, fann_fn)
|
45
|
-
Check_Type(attr_name, T_FLOAT);
|
46
|
-
struct fann*
|
47
|
-
Data_Get_Struct(self, struct fann, f); \
|
48
|
-
fann_fn(f, NUM2DBL(attr_name));
|
49
|
-
return self;
|
50
|
-
|
51
|
-
#define RETURN_FANN_DBL(fn)
|
52
|
-
struct fann*
|
53
|
-
Data_Get_Struct
|
54
|
-
return rb_float_new(fn(f));
|
37
|
+
#define RETURN_FANN_FLT(fn) \
|
38
|
+
struct fann *f; \
|
39
|
+
Data_Get_Struct(self, struct fann, f); \
|
40
|
+
char buffy[20]; \
|
41
|
+
sprintf(buffy, "%0.6g", fn(f)); \
|
42
|
+
return rb_float_new(atof(buffy));
|
43
|
+
|
44
|
+
#define SET_FANN_FLT(attr_name, fann_fn) \
|
45
|
+
Check_Type(attr_name, T_FLOAT); \
|
46
|
+
struct fann *f; \
|
47
|
+
Data_Get_Struct(self, struct fann, f); \
|
48
|
+
fann_fn(f, NUM2DBL(attr_name)); \
|
49
|
+
return self;
|
50
|
+
|
51
|
+
#define RETURN_FANN_DBL(fn) \
|
52
|
+
struct fann *f; \
|
53
|
+
Data_Get_Struct(self, struct fann, f); \
|
54
|
+
return rb_float_new(fn(f));
|
55
55
|
|
56
56
|
#define SET_FANN_DBL SET_FANN_FLT
|
57
57
|
|
58
58
|
// Convert ruby symbol to corresponding FANN enum type for activation function:
|
59
59
|
enum fann_activationfunc_enum sym_to_activation_function(VALUE activation_func)
|
60
60
|
{
|
61
|
-
ID id=SYM2ID(activation_func);
|
61
|
+
ID id = SYM2ID(activation_func);
|
62
62
|
enum fann_activationfunc_enum activation_function;
|
63
|
-
if(id==rb_intern("linear"))
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
63
|
+
if (id == rb_intern("linear"))
|
64
|
+
{
|
65
|
+
activation_function = FANN_LINEAR;
|
66
|
+
}
|
67
|
+
else if (id == rb_intern("threshold"))
|
68
|
+
{
|
69
|
+
activation_function = FANN_THRESHOLD;
|
70
|
+
}
|
71
|
+
else if (id == rb_intern("threshold_symmetric"))
|
72
|
+
{
|
73
|
+
activation_function = FANN_THRESHOLD_SYMMETRIC;
|
74
|
+
}
|
75
|
+
else if (id == rb_intern("sigmoid"))
|
76
|
+
{
|
77
|
+
activation_function = FANN_SIGMOID;
|
78
|
+
}
|
79
|
+
else if (id == rb_intern("sigmoid_stepwise"))
|
80
|
+
{
|
81
|
+
activation_function = FANN_SIGMOID_STEPWISE;
|
82
|
+
}
|
83
|
+
else if (id == rb_intern("sigmoid_symmetric"))
|
84
|
+
{
|
85
|
+
activation_function = FANN_SIGMOID_SYMMETRIC;
|
86
|
+
}
|
87
|
+
else if (id == rb_intern("sigmoid_symmetric_stepwise"))
|
88
|
+
{
|
89
|
+
activation_function = FANN_SIGMOID_SYMMETRIC_STEPWISE;
|
90
|
+
}
|
91
|
+
else if (id == rb_intern("gaussian"))
|
92
|
+
{
|
93
|
+
activation_function = FANN_GAUSSIAN;
|
94
|
+
}
|
95
|
+
else if (id == rb_intern("gaussian_symmetric"))
|
96
|
+
{
|
97
|
+
activation_function = FANN_GAUSSIAN_SYMMETRIC;
|
98
|
+
}
|
99
|
+
else if (id == rb_intern("gaussian_stepwise"))
|
100
|
+
{
|
101
|
+
activation_function = FANN_GAUSSIAN_STEPWISE;
|
102
|
+
}
|
103
|
+
else if (id == rb_intern("elliot"))
|
104
|
+
{
|
105
|
+
activation_function = FANN_ELLIOT;
|
106
|
+
}
|
107
|
+
else if (id == rb_intern("elliot_symmetric"))
|
108
|
+
{
|
109
|
+
activation_function = FANN_ELLIOT_SYMMETRIC;
|
110
|
+
}
|
111
|
+
else if (id == rb_intern("linear_piece"))
|
112
|
+
{
|
113
|
+
activation_function = FANN_LINEAR_PIECE;
|
114
|
+
}
|
115
|
+
else if (id == rb_intern("linear_piece_symmetric"))
|
116
|
+
{
|
117
|
+
activation_function = FANN_LINEAR_PIECE_SYMMETRIC;
|
118
|
+
}
|
119
|
+
else if (id == rb_intern("sin_symmetric"))
|
120
|
+
{
|
121
|
+
activation_function = FANN_SIN_SYMMETRIC;
|
122
|
+
}
|
123
|
+
else if (id == rb_intern("cos_symmetric"))
|
124
|
+
{
|
125
|
+
activation_function = FANN_COS_SYMMETRIC;
|
126
|
+
}
|
127
|
+
else if (id == rb_intern("sin"))
|
128
|
+
{
|
129
|
+
activation_function = FANN_SIN;
|
130
|
+
}
|
131
|
+
else if (id == rb_intern("cos"))
|
132
|
+
{
|
133
|
+
activation_function = FANN_COS;
|
134
|
+
}
|
135
|
+
else
|
136
|
+
{
|
100
137
|
rb_raise(rb_eRuntimeError, "Unrecognized activation function: [%s]", rb_id2name(SYM2ID(activation_func)));
|
101
|
-
}
|
138
|
+
}
|
102
139
|
return activation_function;
|
103
140
|
}
|
104
141
|
|
@@ -106,142 +143,177 @@ enum fann_activationfunc_enum sym_to_activation_function(VALUE activation_func)
|
|
106
143
|
VALUE activation_function_to_sym(enum fann_activationfunc_enum fn)
|
107
144
|
{
|
108
145
|
VALUE activation_function;
|
109
|
-
|
110
|
-
if(fn==FANN_LINEAR)
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
146
|
+
|
147
|
+
if (fn == FANN_LINEAR)
|
148
|
+
{
|
149
|
+
activation_function = ID2SYM(rb_intern("linear"));
|
150
|
+
}
|
151
|
+
else if (fn == FANN_THRESHOLD)
|
152
|
+
{
|
153
|
+
activation_function = ID2SYM(rb_intern("threshold"));
|
154
|
+
}
|
155
|
+
else if (fn == FANN_THRESHOLD_SYMMETRIC)
|
156
|
+
{
|
157
|
+
activation_function = ID2SYM(rb_intern("threshold_symmetric"));
|
158
|
+
}
|
159
|
+
else if (fn == FANN_SIGMOID)
|
160
|
+
{
|
161
|
+
activation_function = ID2SYM(rb_intern("sigmoid"));
|
162
|
+
}
|
163
|
+
else if (fn == FANN_SIGMOID_STEPWISE)
|
164
|
+
{
|
165
|
+
activation_function = ID2SYM(rb_intern("sigmoid_stepwise"));
|
166
|
+
}
|
167
|
+
else if (fn == FANN_SIGMOID_SYMMETRIC)
|
168
|
+
{
|
169
|
+
activation_function = ID2SYM(rb_intern("sigmoid_symmetric"));
|
170
|
+
}
|
171
|
+
else if (fn == FANN_SIGMOID_SYMMETRIC_STEPWISE)
|
172
|
+
{
|
173
|
+
activation_function = ID2SYM(rb_intern("sigmoid_symmetric_stepwise"));
|
174
|
+
}
|
175
|
+
else if (fn == FANN_GAUSSIAN)
|
176
|
+
{
|
177
|
+
activation_function = ID2SYM(rb_intern("gaussian"));
|
178
|
+
}
|
179
|
+
else if (fn == FANN_GAUSSIAN_SYMMETRIC)
|
180
|
+
{
|
181
|
+
activation_function = ID2SYM(rb_intern("gaussian_symmetric"));
|
182
|
+
}
|
183
|
+
else if (fn == FANN_GAUSSIAN_STEPWISE)
|
184
|
+
{
|
185
|
+
activation_function = ID2SYM(rb_intern("gaussian_stepwise"));
|
186
|
+
}
|
187
|
+
else if (fn == FANN_ELLIOT)
|
188
|
+
{
|
189
|
+
activation_function = ID2SYM(rb_intern("elliot"));
|
190
|
+
}
|
191
|
+
else if (fn == FANN_ELLIOT_SYMMETRIC)
|
192
|
+
{
|
193
|
+
activation_function = ID2SYM(rb_intern("elliot_symmetric"));
|
194
|
+
}
|
195
|
+
else if (fn == FANN_LINEAR_PIECE)
|
196
|
+
{
|
197
|
+
activation_function = ID2SYM(rb_intern("linear_piece"));
|
198
|
+
}
|
199
|
+
else if (fn == FANN_LINEAR_PIECE_SYMMETRIC)
|
200
|
+
{
|
201
|
+
activation_function = ID2SYM(rb_intern("linear_piece_symmetric"));
|
202
|
+
}
|
203
|
+
else if (fn == FANN_SIN_SYMMETRIC)
|
204
|
+
{
|
205
|
+
activation_function = ID2SYM(rb_intern("sin_symmetric"));
|
206
|
+
}
|
207
|
+
else if (fn == FANN_COS_SYMMETRIC)
|
208
|
+
{
|
209
|
+
activation_function = ID2SYM(rb_intern("cos_symmetric"));
|
210
|
+
}
|
211
|
+
else if (fn == FANN_SIN)
|
212
|
+
{
|
213
|
+
activation_function = ID2SYM(rb_intern("sin"));
|
214
|
+
}
|
215
|
+
else if (fn == FANN_COS)
|
216
|
+
{
|
217
|
+
activation_function = ID2SYM(rb_intern("cos"));
|
218
|
+
}
|
219
|
+
else
|
220
|
+
{
|
147
221
|
rb_raise(rb_eRuntimeError, "Unrecognized activation function: [%d]", fn);
|
148
|
-
}
|
222
|
+
}
|
149
223
|
return activation_function;
|
150
224
|
}
|
151
225
|
|
152
|
-
|
153
226
|
// Unused for now:
|
154
|
-
static void fann_mark
|
227
|
+
static void fann_mark(struct fann *ann) {}
|
155
228
|
|
156
229
|
// #define DEBUG 1
|
157
230
|
|
158
231
|
// Free memory associated with FANN:
|
159
|
-
static void fann_free
|
232
|
+
static void fann_free(struct fann *ann)
|
160
233
|
{
|
161
|
-
|
234
|
+
fann_destroy(ann);
|
162
235
|
// ("Destroyed FANN network [%d].\n", ann);
|
163
236
|
}
|
164
237
|
|
165
238
|
// Free memory associated with FANN Training data:
|
166
|
-
static void fann_training_data_free
|
239
|
+
static void fann_training_data_free(struct fann_train_data *train_data)
|
167
240
|
{
|
168
|
-
|
241
|
+
fann_destroy_train(train_data);
|
169
242
|
// printf("Destroyed Training data [%d].\n", train_data);
|
170
243
|
}
|
171
244
|
|
172
|
-
// Create wrapper, but don't allocate anything...do that in
|
245
|
+
// Create wrapper, but don't allocate anything...do that in
|
173
246
|
// initialize, so we can construct with args:
|
174
|
-
static VALUE fann_allocate
|
247
|
+
static VALUE fann_allocate(VALUE klass)
|
175
248
|
{
|
176
|
-
return Data_Wrap_Struct
|
249
|
+
return Data_Wrap_Struct(klass, fann_mark, fann_free, 0);
|
177
250
|
}
|
178
251
|
|
179
|
-
// Create wrapper, but don't allocate annything...do that in
|
252
|
+
// Create wrapper, but don't allocate annything...do that in
|
180
253
|
// initialize, so we can construct with args:
|
181
|
-
static VALUE fann_training_data_allocate
|
254
|
+
static VALUE fann_training_data_allocate(VALUE klass)
|
182
255
|
{
|
183
|
-
return Data_Wrap_Struct
|
256
|
+
return Data_Wrap_Struct(klass, fann_mark, fann_training_data_free, 0);
|
184
257
|
}
|
185
258
|
|
186
|
-
|
187
|
-
// static VALUE invoke_training_callback(VALUE self)
|
259
|
+
// static VALUE invoke_training_callback(VALUE self)
|
188
260
|
// {
|
189
261
|
// VALUE callback = rb_funcall(self, rb_intern("training_callback"), 0);
|
190
262
|
// return callback;
|
191
263
|
// }
|
192
264
|
|
193
|
-
// static int FANN_API internal_callback(struct fann *ann, struct fann_train_data *train,
|
265
|
+
// static int FANN_API internal_callback(struct fann *ann, struct fann_train_data *train,
|
194
266
|
// unsigned int max_epochs, unsigned int epochs_between_reports, float desired_error, unsigned int epochs)
|
195
267
|
|
196
268
|
static int FANN_API fann_training_callback(struct fann *ann, struct fann_train_data *train,
|
197
|
-
|
198
|
-
|
269
|
+
unsigned int max_epochs, unsigned int epochs_between_reports,
|
270
|
+
float desired_error, unsigned int epochs)
|
199
271
|
{
|
200
272
|
VALUE self = (VALUE)fann_get_user_data(ann);
|
201
273
|
VALUE args = rb_hash_new();
|
202
|
-
|
274
|
+
|
203
275
|
// Set attributes on hash & push on array:
|
204
276
|
VALUE max_epochs_sym = ID2SYM(rb_intern("max_epochs"));
|
205
277
|
VALUE epochs_between_reports_sym = ID2SYM(rb_intern("epochs_between_reports"));
|
206
278
|
VALUE desired_error_sym = ID2SYM(rb_intern("desired_error"));
|
207
279
|
VALUE epochs_sym = ID2SYM(rb_intern("epochs"));
|
208
|
-
|
280
|
+
|
209
281
|
rb_hash_aset(args, max_epochs_sym, INT2NUM(max_epochs));
|
210
282
|
rb_hash_aset(args, epochs_between_reports_sym, INT2NUM(epochs_between_reports));
|
211
283
|
rb_hash_aset(args, desired_error_sym, rb_float_new(desired_error));
|
212
284
|
rb_hash_aset(args, epochs_sym, INT2NUM(epochs));
|
213
|
-
|
285
|
+
|
214
286
|
VALUE callback = rb_funcall(self, rb_intern("training_callback"), 1, args);
|
215
|
-
|
216
|
-
if (TYPE(callback)!=T_FIXNUM)
|
287
|
+
|
288
|
+
if (TYPE(callback) != T_FIXNUM)
|
217
289
|
{
|
218
|
-
rb_raise
|
290
|
+
rb_raise(rb_eRuntimeError, "Callback method must return an integer (-1 to stop training).");
|
219
291
|
}
|
220
292
|
|
221
|
-
int status = NUM2INT(callback);
|
222
|
-
if (status
|
293
|
+
int status = NUM2INT(callback);
|
294
|
+
if (status == -1)
|
223
295
|
{
|
224
296
|
printf("Callback method returned -1; training will stop.\n");
|
225
297
|
}
|
226
|
-
|
298
|
+
|
227
299
|
return status;
|
228
300
|
}
|
229
301
|
|
230
|
-
/** call-seq: new(hash) -> new ruby-fann neural network object
|
302
|
+
/** call-seq: new(hash) -> new ruby-fann neural network object
|
231
303
|
|
232
304
|
Initialization routine for both standard, shortcut & filename forms of FANN:
|
233
305
|
|
234
306
|
Standard Initialization:
|
235
307
|
RubyFann::Standard.new(:num_inputs=>1, :hidden_neurons=>[3, 4, 3, 4], :num_outputs=>1)
|
236
|
-
|
308
|
+
|
237
309
|
Shortcut Initialization (e.g., for use in cascade training):
|
238
|
-
RubyFann::Shortcut.new(:num_inputs=>5, :num_outputs=>1)
|
239
|
-
|
310
|
+
RubyFann::Shortcut.new(:num_inputs=>5, :num_outputs=>1)
|
311
|
+
|
240
312
|
File Initialization
|
241
|
-
RubyFann::Standard.new(:filename=>'xor_float.net')
|
242
|
-
|
243
|
-
|
244
|
-
|
313
|
+
RubyFann::Standard.new(:filename=>'xor_float.net')
|
314
|
+
|
315
|
+
|
316
|
+
|
245
317
|
*/
|
246
318
|
static VALUE fann_initialize(VALUE self, VALUE hash)
|
247
319
|
{
|
@@ -250,24 +322,24 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
|
|
250
322
|
VALUE num_inputs = rb_hash_aref(hash, ID2SYM(rb_intern("num_inputs")));
|
251
323
|
VALUE num_outputs = rb_hash_aref(hash, ID2SYM(rb_intern("num_outputs")));
|
252
324
|
VALUE hidden_neurons = rb_hash_aref(hash, ID2SYM(rb_intern("hidden_neurons")));
|
253
|
-
|
254
|
-
struct fann*
|
255
|
-
if (TYPE(filename)==T_STRING)
|
325
|
+
// printf("initializing\n\n\n");
|
326
|
+
struct fann *ann;
|
327
|
+
if (TYPE(filename) == T_STRING)
|
256
328
|
{
|
257
329
|
// Initialize with file:
|
258
330
|
// train_data = fann_read_train_from_file(StringValuePtr(filename));
|
259
331
|
// DATA_PTR(self) = train_data;
|
260
332
|
ann = fann_create_from_file(StringValuePtr(filename));
|
261
|
-
|
262
|
-
}
|
263
|
-
else if(rb_obj_is_kind_of(self, m_rb_fann_shortcut_class))
|
333
|
+
// printf("Created RubyFann::Standard [%d] from file [%s].\n", ann, StringValuePtr(filename));
|
334
|
+
}
|
335
|
+
else if (rb_obj_is_kind_of(self, m_rb_fann_shortcut_class))
|
264
336
|
{
|
265
337
|
// Initialize as shortcut, suitable for cascade training:
|
266
|
-
//ann = fann_create_shortcut_array(num_layers, layers);
|
338
|
+
// ann = fann_create_shortcut_array(num_layers, layers);
|
267
339
|
Check_Type(num_inputs, T_FIXNUM);
|
268
340
|
Check_Type(num_outputs, T_FIXNUM);
|
269
|
-
|
270
|
-
ann = fann_create_shortcut(2, NUM2INT(num_inputs), NUM2INT(num_outputs));
|
341
|
+
|
342
|
+
ann = fann_create_shortcut(2, NUM2INT(num_inputs), NUM2INT(num_outputs));
|
271
343
|
// printf("Created RubyFann::Shortcut [%d].\n", ann);
|
272
344
|
}
|
273
345
|
else
|
@@ -276,32 +348,31 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
|
|
276
348
|
Check_Type(num_inputs, T_FIXNUM);
|
277
349
|
Check_Type(hidden_neurons, T_ARRAY);
|
278
350
|
Check_Type(num_outputs, T_FIXNUM);
|
279
|
-
|
351
|
+
|
280
352
|
// Initialize layers:
|
281
|
-
unsigned int num_layers=
|
353
|
+
unsigned int num_layers = RARRAY_LEN(hidden_neurons) + 2;
|
282
354
|
unsigned int layers[num_layers];
|
283
355
|
|
284
356
|
// Input:
|
285
|
-
layers[0]=NUM2INT(num_inputs);
|
357
|
+
layers[0] = NUM2INT(num_inputs);
|
286
358
|
// Output:
|
287
|
-
layers[num_layers-1]=NUM2INT(num_outputs);
|
359
|
+
layers[num_layers - 1] = NUM2INT(num_outputs);
|
288
360
|
// Hidden:
|
289
361
|
unsigned int i;
|
290
|
-
for (i=1; i<=num_layers-2; i++)
|
291
|
-
|
362
|
+
for (i = 1; i <= num_layers - 2; i++)
|
363
|
+
{
|
364
|
+
layers[i] = NUM2INT(RARRAY_PTR(hidden_neurons)[i - 1]);
|
292
365
|
}
|
293
|
-
|
294
|
-
|
295
|
-
// printf("Created RubyFann::Standard [%d].\n", ann);
|
296
|
-
}
|
366
|
+
ann = fann_create_standard_array(num_layers, layers);
|
367
|
+
}
|
297
368
|
|
298
369
|
DATA_PTR(self) = ann;
|
299
|
-
|
370
|
+
|
300
371
|
// printf("Checking for callback...");
|
301
|
-
|
302
|
-
//int callback = rb_protect(invoke_training_callback, (self), &status);
|
303
|
-
//
|
304
|
-
if(rb_respond_to(self, rb_intern("training_callback")))
|
372
|
+
|
373
|
+
// int callback = rb_protect(invoke_training_callback, (self), &status);
|
374
|
+
// VALUE callback = rb_funcall(DATA_PTR(self), "training_callback", 0);
|
375
|
+
if (rb_respond_to(self, rb_intern("training_callback")))
|
305
376
|
{
|
306
377
|
fann_set_callback(ann, &fann_training_callback);
|
307
378
|
fann_set_user_data(ann, self);
|
@@ -311,14 +382,14 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
|
|
311
382
|
{
|
312
383
|
// printf("none found.\n");
|
313
384
|
}
|
314
|
-
|
315
|
-
return (VALUE)ann;
|
385
|
+
|
386
|
+
return (VALUE)ann;
|
316
387
|
}
|
317
388
|
|
318
389
|
/** call-seq: new(hash) -> new ruby-fann training data object (RubyFann::TrainData)
|
319
|
-
|
390
|
+
|
320
391
|
Initialize in one of the following forms:
|
321
|
-
|
392
|
+
|
322
393
|
# This is a flat file with training data as described in FANN docs.
|
323
394
|
RubyFann::TrainData.new(:filename => 'path/to/training_file.train')
|
324
395
|
OR
|
@@ -327,75 +398,79 @@ static VALUE fann_initialize(VALUE self, VALUE hash)
|
|
327
398
|
# All sub-arrays on inputs should be of same length
|
328
399
|
# All sub-arrays on desired_outputs should be of same length
|
329
400
|
# Sub-arrays on inputs & desired_outputs can be different sizes from one another
|
330
|
-
RubyFann::TrainData.new(:inputs=>[[0.2, 0.3, 0.4], [0.8, 0.9, 0.7]], :desired_outputs=>[[3.14], [6.33]])
|
401
|
+
RubyFann::TrainData.new(:inputs=>[[0.2, 0.3, 0.4], [0.8, 0.9, 0.7]], :desired_outputs=>[[3.14], [6.33]])
|
331
402
|
*/
|
332
403
|
static VALUE fann_train_data_initialize(VALUE self, VALUE hash)
|
333
404
|
{
|
334
|
-
struct fann_train_data*
|
405
|
+
struct fann_train_data *train_data;
|
335
406
|
Check_Type(hash, T_HASH);
|
336
|
-
|
407
|
+
|
337
408
|
VALUE filename = rb_hash_aref(hash, ID2SYM(rb_intern("filename")));
|
338
409
|
VALUE inputs = rb_hash_aref(hash, ID2SYM(rb_intern("inputs")));
|
339
410
|
VALUE desired_outputs = rb_hash_aref(hash, ID2SYM(rb_intern("desired_outputs")));
|
340
411
|
|
341
|
-
if (TYPE(filename)==T_STRING)
|
412
|
+
if (TYPE(filename) == T_STRING)
|
342
413
|
{
|
343
414
|
train_data = fann_read_train_from_file(StringValuePtr(filename));
|
344
415
|
DATA_PTR(self) = train_data;
|
345
|
-
}
|
346
|
-
else if (TYPE(inputs)==T_ARRAY)
|
416
|
+
}
|
417
|
+
else if (TYPE(inputs) == T_ARRAY)
|
347
418
|
{
|
348
|
-
if (TYPE(desired_outputs)!=T_ARRAY)
|
419
|
+
if (TYPE(desired_outputs) != T_ARRAY)
|
349
420
|
{
|
350
|
-
rb_raise
|
421
|
+
rb_raise(rb_eRuntimeError, "[desired_outputs] must be present when [inputs] used.");
|
351
422
|
}
|
352
423
|
|
353
424
|
if (RARRAY_LEN(inputs) < 1)
|
354
425
|
{
|
355
|
-
rb_raise
|
426
|
+
rb_raise(rb_eRuntimeError, "[inputs] must contain at least one value.");
|
427
|
+
}
|
428
|
+
|
429
|
+
if (RARRAY_LEN(desired_outputs) < 1)
|
430
|
+
{
|
431
|
+
rb_raise(rb_eRuntimeError, "[desired_outputs] must contain at least one value.");
|
356
432
|
}
|
357
433
|
|
358
434
|
// The data is here, start constructing:
|
359
|
-
if(RARRAY_LEN(inputs) != RARRAY_LEN(desired_outputs))
|
435
|
+
if (RARRAY_LEN(inputs) != RARRAY_LEN(desired_outputs))
|
360
436
|
{
|
361
|
-
rb_raise
|
362
|
-
rb_eRuntimeError,
|
363
|
-
"Number of inputs must match number of outputs: (%d != %d)",
|
364
|
-
(int)RARRAY_LEN(inputs),
|
437
|
+
rb_raise(
|
438
|
+
rb_eRuntimeError,
|
439
|
+
"Number of inputs must match number of outputs: (%d != %d)",
|
440
|
+
(int)RARRAY_LEN(inputs),
|
365
441
|
(int)RARRAY_LEN(desired_outputs));
|
366
442
|
}
|
367
443
|
|
368
|
-
train_data = fann_create_train_from_rb_ary(inputs, desired_outputs);
|
369
|
-
DATA_PTR(self) = train_data;
|
370
|
-
}
|
371
|
-
else
|
444
|
+
train_data = fann_create_train_from_rb_ary(inputs, desired_outputs);
|
445
|
+
DATA_PTR(self) = train_data;
|
446
|
+
}
|
447
|
+
else
|
372
448
|
{
|
373
|
-
rb_raise
|
449
|
+
rb_raise(rb_eRuntimeError, "Must construct with a filename(string) or inputs/desired_outputs(arrays). All args passed via hash with symbols as keys.");
|
374
450
|
}
|
375
|
-
|
451
|
+
|
376
452
|
return (VALUE)train_data;
|
377
453
|
}
|
378
454
|
|
379
|
-
|
380
455
|
/** call-seq: save(filename)
|
381
456
|
|
382
|
-
Save to given filename
|
457
|
+
Save to given filename
|
383
458
|
*/
|
384
459
|
static VALUE training_save(VALUE self, VALUE filename)
|
385
460
|
{
|
386
|
-
Check_Type(filename, T_STRING);
|
387
|
-
struct fann_train_data*
|
388
|
-
Data_Get_Struct
|
461
|
+
Check_Type(filename, T_STRING);
|
462
|
+
struct fann_train_data *t;
|
463
|
+
Data_Get_Struct(self, struct fann_train_data, t);
|
389
464
|
fann_save_train(t, StringValuePtr(filename));
|
390
|
-
return self;
|
465
|
+
return self;
|
391
466
|
}
|
392
467
|
|
393
|
-
/** Shuffles training data, randomizing the order.
|
468
|
+
/** Shuffles training data, randomizing the order.
|
394
469
|
This is recommended for incremental training, while it will have no influence during batch training.*/
|
395
470
|
static VALUE shuffle(VALUE self)
|
396
471
|
{
|
397
|
-
struct fann_train_data*
|
398
|
-
Data_Get_Struct
|
472
|
+
struct fann_train_data *t;
|
473
|
+
Data_Get_Struct(self, struct fann_train_data, t);
|
399
474
|
fann_shuffle_train_data(t);
|
400
475
|
return self;
|
401
476
|
}
|
@@ -403,27 +478,27 @@ static VALUE shuffle(VALUE self)
|
|
403
478
|
/** Length of training data*/
|
404
479
|
static VALUE length_train_data(VALUE self)
|
405
480
|
{
|
406
|
-
struct fann_train_data*
|
407
|
-
Data_Get_Struct
|
408
|
-
return(UINT2NUM(fann_length_train_data(t)));
|
481
|
+
struct fann_train_data *t;
|
482
|
+
Data_Get_Struct(self, struct fann_train_data, t);
|
483
|
+
return (UINT2NUM(fann_length_train_data(t)));
|
409
484
|
return self;
|
410
485
|
}
|
411
486
|
|
412
487
|
/** call-seq: set_activation_function(activation_func, layer, neuron)
|
413
488
|
|
414
|
-
Set the activation function for neuron number *neuron* in layer number *layer*,
|
489
|
+
Set the activation function for neuron number *neuron* in layer number *layer*,
|
415
490
|
counting the input layer as layer 0. activation_func must be one of the following symbols:
|
416
|
-
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
417
|
-
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
418
|
-
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
491
|
+
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
492
|
+
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
493
|
+
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
419
494
|
:sin, :cos*/
|
420
495
|
static VALUE set_activation_function(VALUE self, VALUE activation_func, VALUE layer, VALUE neuron)
|
421
496
|
{
|
422
497
|
Check_Type(activation_func, T_SYMBOL);
|
423
498
|
Check_Type(layer, T_FIXNUM);
|
424
499
|
Check_Type(neuron, T_FIXNUM);
|
425
|
-
|
426
|
-
struct fann*
|
500
|
+
|
501
|
+
struct fann *f;
|
427
502
|
Data_Get_Struct(self, struct fann, f);
|
428
503
|
fann_set_activation_function(f, sym_to_activation_function(activation_func), NUM2INT(layer), NUM2INT(neuron));
|
429
504
|
return self;
|
@@ -432,14 +507,14 @@ static VALUE set_activation_function(VALUE self, VALUE activation_func, VALUE la
|
|
432
507
|
/** call-seq: set_activation_function_hidden(activation_func)
|
433
508
|
|
434
509
|
Set the activation function for all of the hidden layers. activation_func must be one of the following symbols:
|
435
|
-
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
436
|
-
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
437
|
-
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
510
|
+
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
511
|
+
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
512
|
+
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
438
513
|
:sin, :cos*/
|
439
514
|
static VALUE set_activation_function_hidden(VALUE self, VALUE activation_func)
|
440
515
|
{
|
441
516
|
Check_Type(activation_func, T_SYMBOL);
|
442
|
-
struct fann*
|
517
|
+
struct fann *f;
|
443
518
|
Data_Get_Struct(self, struct fann, f);
|
444
519
|
fann_set_activation_function_hidden(f, sym_to_activation_function(activation_func));
|
445
520
|
return self;
|
@@ -447,37 +522,37 @@ static VALUE set_activation_function_hidden(VALUE self, VALUE activation_func)
|
|
447
522
|
|
448
523
|
/** call-seq: set_activation_function_layer(activation_func, layer)
|
449
524
|
|
450
|
-
Set the activation function for all the neurons in the layer number *layer*,
|
525
|
+
Set the activation function for all the neurons in the layer number *layer*,
|
451
526
|
counting the input layer as layer 0. activation_func must be one of the following symbols:
|
452
|
-
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
453
|
-
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
454
|
-
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
527
|
+
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
528
|
+
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
529
|
+
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
455
530
|
:sin, :cos
|
456
|
-
|
531
|
+
|
457
532
|
It is not possible to set activation functions for the neurons in the input layer.
|
458
|
-
*/
|
533
|
+
*/
|
459
534
|
static VALUE set_activation_function_layer(VALUE self, VALUE activation_func, VALUE layer)
|
460
535
|
{
|
461
536
|
Check_Type(activation_func, T_SYMBOL);
|
462
537
|
Check_Type(layer, T_FIXNUM);
|
463
|
-
struct fann*
|
538
|
+
struct fann *f;
|
464
539
|
Data_Get_Struct(self, struct fann, f);
|
465
540
|
fann_set_activation_function_layer(f, sym_to_activation_function(activation_func), NUM2INT(layer));
|
466
541
|
return self;
|
467
542
|
}
|
468
543
|
|
469
|
-
/** call-seq: get_activation_function(layer) -> return value
|
470
|
-
|
471
|
-
Get the activation function for neuron number *neuron* in layer number *layer*,
|
472
|
-
counting the input layer as layer 0.
|
544
|
+
/** call-seq: get_activation_function(layer) -> return value
|
473
545
|
|
474
|
-
|
546
|
+
Get the activation function for neuron number *neuron* in layer number *layer*,
|
547
|
+
counting the input layer as layer 0.
|
548
|
+
|
549
|
+
It is not possible to get activation functions for the neurons in the input layer.
|
475
550
|
*/
|
476
551
|
static VALUE get_activation_function(VALUE self, VALUE layer, VALUE neuron)
|
477
552
|
{
|
478
553
|
Check_Type(layer, T_FIXNUM);
|
479
554
|
Check_Type(neuron, T_FIXNUM);
|
480
|
-
struct fann*
|
555
|
+
struct fann *f;
|
481
556
|
Data_Get_Struct(self, struct fann, f);
|
482
557
|
fann_type val = fann_get_activation_function(f, NUM2INT(layer), NUM2INT(neuron));
|
483
558
|
return activation_function_to_sym(val);
|
@@ -486,29 +561,29 @@ static VALUE get_activation_function(VALUE self, VALUE layer, VALUE neuron)
|
|
486
561
|
/** call-seq: set_activation_function_output(activation_func)
|
487
562
|
|
488
563
|
Set the activation function for the output layer. activation_func must be one of the following symbols:
|
489
|
-
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
490
|
-
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
491
|
-
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
564
|
+
:linear, :threshold, :threshold_symmetric, :sigmoid, :sigmoid_stepwise, :sigmoid_symmetric,
|
565
|
+
:sigmoid_symmetric_stepwise, :gaussian, :gaussian_symmetric, :gaussian_stepwise, :elliot,
|
566
|
+
:elliot_symmetric, :linear_piece, :linear_piece_symmetric, :sin_symmetric, :cos_symmetric,
|
492
567
|
:sin, :cos*/
|
493
568
|
|
494
569
|
static VALUE set_activation_function_output(VALUE self, VALUE activation_func)
|
495
570
|
{
|
496
571
|
Check_Type(activation_func, T_SYMBOL);
|
497
|
-
struct fann*
|
572
|
+
struct fann *f;
|
498
573
|
Data_Get_Struct(self, struct fann, f);
|
499
574
|
fann_set_activation_function_output(f, sym_to_activation_function(activation_func));
|
500
575
|
return self;
|
501
576
|
}
|
502
577
|
|
503
|
-
/** call-seq: get_activation_steepness(layer, neuron) -> return value
|
504
|
-
|
505
|
-
Get the activation steepness for neuron number neuron in layer number layer, counting the input layer as layer 0.
|
578
|
+
/** call-seq: get_activation_steepness(layer, neuron) -> return value
|
579
|
+
|
580
|
+
Get the activation steepness for neuron number neuron in layer number layer, counting the input layer as layer 0.
|
506
581
|
*/
|
507
582
|
static VALUE get_activation_steepness(VALUE self, VALUE layer, VALUE neuron)
|
508
583
|
{
|
509
584
|
Check_Type(layer, T_FIXNUM);
|
510
585
|
Check_Type(neuron, T_FIXNUM);
|
511
|
-
struct fann*
|
586
|
+
struct fann *f;
|
512
587
|
Data_Get_Struct(self, struct fann, f);
|
513
588
|
fann_type val = fann_get_activation_steepness(f, NUM2INT(layer), NUM2INT(neuron));
|
514
589
|
return rb_float_new(val);
|
@@ -516,21 +591,21 @@ static VALUE get_activation_steepness(VALUE self, VALUE layer, VALUE neuron)
|
|
516
591
|
|
517
592
|
/** call-seq: set_activation_steepness(steepness, layer, neuron)
|
518
593
|
|
519
|
-
Set the activation steepness for neuron number {neuron} in layer number {layer},
|
594
|
+
Set the activation steepness for neuron number {neuron} in layer number {layer},
|
520
595
|
counting the input layer as layer 0.*/
|
521
596
|
static VALUE set_activation_steepness(VALUE self, VALUE steepness, VALUE layer, VALUE neuron)
|
522
597
|
{
|
523
598
|
Check_Type(steepness, T_FLOAT);
|
524
599
|
Check_Type(layer, T_FIXNUM);
|
525
600
|
Check_Type(neuron, T_FIXNUM);
|
526
|
-
|
527
|
-
struct fann*
|
601
|
+
|
602
|
+
struct fann *f;
|
528
603
|
Data_Get_Struct(self, struct fann, f);
|
529
604
|
fann_set_activation_steepness(f, NUM2DBL(steepness), NUM2INT(layer), NUM2INT(neuron));
|
530
605
|
return self;
|
531
606
|
}
|
532
607
|
|
533
|
-
/** call-seq: set_activation_steepness_hidden(arg) -> return value
|
608
|
+
/** call-seq: set_activation_steepness_hidden(arg) -> return value
|
534
609
|
|
535
610
|
Set the activation steepness in all of the hidden layers.*/
|
536
611
|
static VALUE set_activation_steepness_hidden(VALUE self, VALUE steepness)
|
@@ -540,14 +615,14 @@ static VALUE set_activation_steepness_hidden(VALUE self, VALUE steepness)
|
|
540
615
|
|
541
616
|
/** call-seq: set_activation_steepness_layer(steepness, layer)
|
542
617
|
|
543
|
-
Set the activation steepness all of the neurons in layer number *layer*,
|
618
|
+
Set the activation steepness all of the neurons in layer number *layer*,
|
544
619
|
counting the input layer as layer 0.*/
|
545
620
|
static VALUE set_activation_steepness_layer(VALUE self, VALUE steepness, VALUE layer)
|
546
621
|
{
|
547
622
|
Check_Type(steepness, T_FLOAT);
|
548
623
|
Check_Type(layer, T_FIXNUM);
|
549
|
-
|
550
|
-
struct fann*
|
624
|
+
|
625
|
+
struct fann *f;
|
551
626
|
Data_Get_Struct(self, struct fann, f);
|
552
627
|
fann_set_activation_steepness_layer(f, NUM2DBL(steepness), NUM2INT(layer));
|
553
628
|
return self;
|
@@ -575,8 +650,8 @@ static VALUE set_bit_fail_limit(VALUE self, VALUE bit_fail_limit)
|
|
575
650
|
SET_FANN_FLT(bit_fail_limit, fann_set_bit_fail_limit);
|
576
651
|
}
|
577
652
|
|
578
|
-
/** The decay is a small negative valued number which is the factor that the weights
|
579
|
-
should become smaller in each iteration during quickprop training. This is used
|
653
|
+
/** The decay is a small negative valued number which is the factor that the weights
|
654
|
+
should become smaller in each iteration during quickprop training. This is used
|
580
655
|
to make sure that the weights do not become too high during training.*/
|
581
656
|
static VALUE get_quickprop_decay(VALUE self)
|
582
657
|
{
|
@@ -591,8 +666,8 @@ static VALUE set_quickprop_decay(VALUE self, VALUE quickprop_decay)
|
|
591
666
|
SET_FANN_FLT(quickprop_decay, fann_set_quickprop_decay);
|
592
667
|
}
|
593
668
|
|
594
|
-
/** The mu factor is used to increase and decrease the step-size during quickprop training.
|
595
|
-
The mu factor should always be above 1, since it would otherwise decrease the step-size
|
669
|
+
/** The mu factor is used to increase and decrease the step-size during quickprop training.
|
670
|
+
The mu factor should always be above 1, since it would otherwise decrease the step-size
|
596
671
|
when it was suppose to increase it. */
|
597
672
|
static VALUE get_quickprop_mu(VALUE self)
|
598
673
|
{
|
@@ -607,7 +682,7 @@ static VALUE set_quickprop_mu(VALUE self, VALUE quickprop_mu)
|
|
607
682
|
SET_FANN_FLT(quickprop_mu, fann_set_quickprop_mu);
|
608
683
|
}
|
609
684
|
|
610
|
-
/** The increase factor is a value larger than 1, which is used to
|
685
|
+
/** The increase factor is a value larger than 1, which is used to
|
611
686
|
increase the step-size during RPROP training.*/
|
612
687
|
static VALUE get_rprop_increase_factor(VALUE self)
|
613
688
|
{
|
@@ -681,27 +756,27 @@ static VALUE set_rprop_delta_zero(VALUE self, VALUE rprop_delta_zero)
|
|
681
756
|
/** Return array of bias(es)*/
|
682
757
|
static VALUE get_bias_array(VALUE self)
|
683
758
|
{
|
684
|
-
struct fann*
|
759
|
+
struct fann *f;
|
685
760
|
unsigned int num_layers;
|
686
|
-
Data_Get_Struct
|
761
|
+
Data_Get_Struct(self, struct fann, f);
|
687
762
|
num_layers = fann_get_num_layers(f);
|
688
763
|
unsigned int layers[num_layers];
|
689
|
-
fann_get_bias_array(f, layers);
|
690
|
-
|
764
|
+
fann_get_bias_array(f, layers);
|
765
|
+
|
691
766
|
// Create ruby array & set outputs:
|
692
767
|
VALUE arr;
|
693
768
|
arr = rb_ary_new();
|
694
769
|
unsigned int i;
|
695
|
-
for (i=0; i<num_layers; i++)
|
770
|
+
for (i = 0; i < num_layers; i++)
|
696
771
|
{
|
697
772
|
rb_ary_push(arr, INT2NUM(layers[i]));
|
698
773
|
}
|
699
|
-
|
774
|
+
|
700
775
|
return arr;
|
701
776
|
}
|
702
777
|
|
703
|
-
/** The number of fail bits; means the number of output neurons which differ more
|
704
|
-
than the bit fail limit (see <fann_get_bit_fail_limit>, <fann_set_bit_fail_limit>).
|
778
|
+
/** The number of fail bits; means the number of output neurons which differ more
|
779
|
+
than the bit fail limit (see <fann_get_bit_fail_limit>, <fann_set_bit_fail_limit>).
|
705
780
|
The bits are counted in all of the training data, so this number can be higher than
|
706
781
|
the number of training data.*/
|
707
782
|
static VALUE get_bit_fail(VALUE self)
|
@@ -715,7 +790,7 @@ static VALUE get_connection_rate(VALUE self)
|
|
715
790
|
RETURN_FANN_INT(fann_get_connection_rate);
|
716
791
|
}
|
717
792
|
|
718
|
-
/** call-seq: get_neurons(layer) -> return value
|
793
|
+
/** call-seq: get_neurons(layer) -> return value
|
719
794
|
|
720
795
|
Return array<hash> where each array element is a hash
|
721
796
|
representing a neuron. It contains the following keys:
|
@@ -724,19 +799,19 @@ static VALUE get_connection_rate(VALUE self)
|
|
724
799
|
:sum=float -- The sum of the inputs multiplied with the weights
|
725
800
|
:value=float -- The value of the activation fuction applied to the sum
|
726
801
|
:connections=array<int> -- indices of connected neurons(inputs)
|
727
|
-
|
802
|
+
|
728
803
|
This could be done more elegantly (e.g., defining more ruby ext classes).
|
729
804
|
This method does not directly correlate to anything in FANN, and accesses
|
730
|
-
structs that are not guaranteed to not change.
|
805
|
+
structs that are not guaranteed to not change.
|
731
806
|
*/
|
732
807
|
static VALUE get_neurons(VALUE self, VALUE layer)
|
733
808
|
{
|
734
809
|
struct fann_layer *layer_it;
|
735
810
|
struct fann_neuron *neuron_it;
|
736
|
-
|
737
|
-
struct fann*
|
811
|
+
|
812
|
+
struct fann *f;
|
738
813
|
unsigned int i;
|
739
|
-
Data_Get_Struct
|
814
|
+
Data_Get_Struct(self, struct fann, f);
|
740
815
|
|
741
816
|
VALUE neuron_array = rb_ary_new();
|
742
817
|
VALUE activation_function_sym = ID2SYM(rb_intern("activation_function"));
|
@@ -746,22 +821,23 @@ static VALUE get_neurons(VALUE self, VALUE layer)
|
|
746
821
|
VALUE value_sym = ID2SYM(rb_intern("value"));
|
747
822
|
VALUE connections_sym = ID2SYM(rb_intern("connections"));
|
748
823
|
unsigned int layer_num = 0;
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
for(layer_it = f->first_layer; layer_it != f->last_layer; layer_it++)
|
824
|
+
|
825
|
+
int nuke_bias_neuron = (fann_get_network_type(f) == FANN_NETTYPE_LAYER);
|
826
|
+
for (layer_it = f->first_layer; layer_it != f->last_layer; layer_it++)
|
753
827
|
{
|
754
|
-
for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
|
828
|
+
for (neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
|
755
829
|
{
|
756
|
-
if (nuke_bias_neuron && (neuron_it==(layer_it->last_neuron)-1))
|
830
|
+
if (nuke_bias_neuron && (neuron_it == (layer_it->last_neuron) - 1))
|
831
|
+
continue;
|
757
832
|
// Create array of connection indicies:
|
758
833
|
VALUE connection_array = rb_ary_new();
|
759
|
-
for (i = neuron_it->first_con; i < neuron_it->last_con; i++)
|
760
|
-
|
834
|
+
for (i = neuron_it->first_con; i < neuron_it->last_con; i++)
|
835
|
+
{
|
836
|
+
rb_ary_push(connection_array, INT2NUM(f->connections[i] - f->first_layer->first_neuron));
|
761
837
|
}
|
762
838
|
|
763
839
|
VALUE neuron = rb_hash_new();
|
764
|
-
|
840
|
+
|
765
841
|
// Set attributes on hash & push on array:
|
766
842
|
rb_hash_aset(neuron, activation_function_sym, activation_function_to_sym(neuron_it->activation_function));
|
767
843
|
rb_hash_aset(neuron, activation_steepness_sym, rb_float_new(neuron_it->activation_steepness));
|
@@ -769,47 +845,46 @@ static VALUE get_neurons(VALUE self, VALUE layer)
|
|
769
845
|
rb_hash_aset(neuron, sum_sym, rb_float_new(neuron_it->sum));
|
770
846
|
rb_hash_aset(neuron, value_sym, rb_float_new(neuron_it->value));
|
771
847
|
rb_hash_aset(neuron, connections_sym, connection_array);
|
772
|
-
|
773
|
-
rb_ary_push(neuron_array, neuron);
|
848
|
+
|
849
|
+
rb_ary_push(neuron_array, neuron);
|
774
850
|
}
|
775
851
|
++layer_num;
|
776
852
|
}
|
777
853
|
|
778
|
-
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
784
|
-
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
854
|
+
// switch (fann_get_network_type(ann)) {
|
855
|
+
// case FANN_NETTYPE_LAYER: {
|
856
|
+
// /* Report one bias in each layer except the last */
|
857
|
+
// if (layer_it != ann->last_layer-1)
|
858
|
+
// *bias = 1;
|
859
|
+
// else
|
860
|
+
// *bias = 0;
|
861
|
+
// break;
|
862
|
+
// }
|
863
|
+
// case FANN_NETTYPE_SHORTCUT: {
|
789
864
|
|
790
|
-
return neuron_array;
|
865
|
+
return neuron_array;
|
791
866
|
}
|
792
867
|
|
793
868
|
/** Get list of layers in array format where each element contains number of neurons in that layer*/
|
794
869
|
static VALUE get_layer_array(VALUE self)
|
795
870
|
{
|
796
|
-
struct fann*
|
871
|
+
struct fann *f;
|
797
872
|
unsigned int num_layers;
|
798
|
-
Data_Get_Struct
|
873
|
+
Data_Get_Struct(self, struct fann, f);
|
799
874
|
num_layers = fann_get_num_layers(f);
|
800
875
|
unsigned int layers[num_layers];
|
801
|
-
fann_get_layer_array(f, layers);
|
802
|
-
|
876
|
+
fann_get_layer_array(f, layers);
|
877
|
+
|
803
878
|
// Create ruby array & set outputs:
|
804
879
|
VALUE arr;
|
805
880
|
arr = rb_ary_new();
|
806
881
|
unsigned int i;
|
807
|
-
for (i=0; i<num_layers; i++)
|
882
|
+
for (i = 0; i < num_layers; i++)
|
808
883
|
{
|
809
884
|
rb_ary_push(arr, INT2NUM(layers[i]));
|
810
885
|
}
|
811
|
-
|
812
|
-
|
886
|
+
|
887
|
+
return arr;
|
813
888
|
}
|
814
889
|
|
815
890
|
/** Reads the mean square error from the network.*/
|
@@ -819,33 +894,33 @@ static VALUE get_MSE(VALUE self)
|
|
819
894
|
}
|
820
895
|
|
821
896
|
/** Resets the mean square error from the network.
|
822
|
-
This function also resets the number of bits that fail.*/
|
897
|
+
This function also resets the number of bits that fail.*/
|
823
898
|
static VALUE reset_MSE(VALUE self)
|
824
899
|
{
|
825
|
-
struct fann*
|
826
|
-
Data_Get_Struct
|
900
|
+
struct fann *f;
|
901
|
+
Data_Get_Struct(self, struct fann, f);
|
827
902
|
fann_reset_MSE(f);
|
828
|
-
return self;
|
903
|
+
return self;
|
829
904
|
}
|
830
905
|
|
831
906
|
/** Get the type of network. Returns as ruby symbol (one of :shortcut, :layer)*/
|
832
907
|
static VALUE get_network_type(VALUE self)
|
833
908
|
{
|
834
|
-
struct fann*
|
909
|
+
struct fann *f;
|
835
910
|
enum fann_nettype_enum net_type;
|
836
911
|
VALUE ret_val;
|
837
|
-
Data_Get_Struct
|
912
|
+
Data_Get_Struct(self, struct fann, f);
|
838
913
|
|
839
914
|
net_type = fann_get_network_type(f);
|
840
|
-
|
841
|
-
if(net_type==FANN_NETTYPE_LAYER)
|
915
|
+
|
916
|
+
if (net_type == FANN_NETTYPE_LAYER)
|
842
917
|
{
|
843
918
|
ret_val = ID2SYM(rb_intern("layer")); // (rb_str_new2("FANN_NETTYPE_LAYER"));
|
844
919
|
}
|
845
|
-
else if(net_type==FANN_NETTYPE_SHORTCUT)
|
920
|
+
else if (net_type == FANN_NETTYPE_SHORTCUT)
|
846
921
|
{
|
847
922
|
ret_val = ID2SYM(rb_intern("shortcut")); // (rb_str_new2("FANN_NETTYPE_SHORTCUT"));
|
848
|
-
}
|
923
|
+
}
|
849
924
|
return ret_val;
|
850
925
|
}
|
851
926
|
|
@@ -854,7 +929,7 @@ static VALUE get_num_input(VALUE self)
|
|
854
929
|
{
|
855
930
|
RETURN_FANN_INT(fann_get_num_input);
|
856
931
|
}
|
857
|
-
|
932
|
+
|
858
933
|
/** Get the number of layers in the network.*/
|
859
934
|
static VALUE get_num_layers(VALUE self)
|
860
935
|
{
|
@@ -886,43 +961,48 @@ static VALUE get_total_neurons(VALUE self)
|
|
886
961
|
static VALUE set_train_error_function(VALUE self, VALUE train_error_function)
|
887
962
|
{
|
888
963
|
Check_Type(train_error_function, T_SYMBOL);
|
889
|
-
|
890
|
-
ID id=SYM2ID(train_error_function);
|
964
|
+
|
965
|
+
ID id = SYM2ID(train_error_function);
|
891
966
|
enum fann_errorfunc_enum fann_train_error_function;
|
892
967
|
|
893
|
-
if(id==rb_intern("linear"))
|
894
|
-
|
895
|
-
|
896
|
-
|
897
|
-
|
968
|
+
if (id == rb_intern("linear"))
|
969
|
+
{
|
970
|
+
fann_train_error_function = FANN_ERRORFUNC_LINEAR;
|
971
|
+
}
|
972
|
+
else if (id == rb_intern("tanh"))
|
973
|
+
{
|
974
|
+
fann_train_error_function = FANN_ERRORFUNC_TANH;
|
975
|
+
}
|
976
|
+
else
|
977
|
+
{
|
898
978
|
rb_raise(rb_eRuntimeError, "Unrecognized train error function: [%s]", rb_id2name(SYM2ID(train_error_function)));
|
899
|
-
}
|
979
|
+
}
|
900
980
|
|
901
|
-
struct fann*
|
902
|
-
Data_Get_Struct
|
981
|
+
struct fann *f;
|
982
|
+
Data_Get_Struct(self, struct fann, f);
|
903
983
|
fann_set_train_error_function(f, fann_train_error_function);
|
904
|
-
return self;
|
984
|
+
return self;
|
905
985
|
}
|
906
986
|
|
907
987
|
/** Returns the error function used during training. One of the following symbols:
|
908
|
-
:linear, :tanh*/
|
988
|
+
:linear, :tanh*/
|
909
989
|
static VALUE get_train_error_function(VALUE self)
|
910
990
|
{
|
911
|
-
struct fann*
|
991
|
+
struct fann *f;
|
912
992
|
enum fann_errorfunc_enum train_error;
|
913
993
|
VALUE ret_val;
|
914
|
-
Data_Get_Struct
|
994
|
+
Data_Get_Struct(self, struct fann, f);
|
915
995
|
|
916
996
|
train_error = fann_get_train_error_function(f);
|
917
|
-
|
918
|
-
if(train_error==FANN_ERRORFUNC_LINEAR)
|
997
|
+
|
998
|
+
if (train_error == FANN_ERRORFUNC_LINEAR)
|
919
999
|
{
|
920
|
-
ret_val = ID2SYM(rb_intern("linear"));
|
1000
|
+
ret_val = ID2SYM(rb_intern("linear"));
|
921
1001
|
}
|
922
|
-
else
|
1002
|
+
else
|
923
1003
|
{
|
924
|
-
ret_val = ID2SYM(rb_intern("tanh"));
|
925
|
-
}
|
1004
|
+
ret_val = ID2SYM(rb_intern("tanh"));
|
1005
|
+
}
|
926
1006
|
return ret_val;
|
927
1007
|
}
|
928
1008
|
|
@@ -933,113 +1013,133 @@ static VALUE get_train_error_function(VALUE self)
|
|
933
1013
|
static VALUE set_training_algorithm(VALUE self, VALUE train_error_function)
|
934
1014
|
{
|
935
1015
|
Check_Type(train_error_function, T_SYMBOL);
|
936
|
-
|
937
|
-
ID id=SYM2ID(train_error_function);
|
1016
|
+
|
1017
|
+
ID id = SYM2ID(train_error_function);
|
938
1018
|
enum fann_train_enum fann_train_algorithm;
|
939
1019
|
|
940
|
-
if(id==rb_intern("incremental"))
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
1020
|
+
if (id == rb_intern("incremental"))
|
1021
|
+
{
|
1022
|
+
fann_train_algorithm = FANN_TRAIN_INCREMENTAL;
|
1023
|
+
}
|
1024
|
+
else if (id == rb_intern("batch"))
|
1025
|
+
{
|
1026
|
+
fann_train_algorithm = FANN_TRAIN_BATCH;
|
1027
|
+
}
|
1028
|
+
else if (id == rb_intern("rprop"))
|
1029
|
+
{
|
1030
|
+
fann_train_algorithm = FANN_TRAIN_RPROP;
|
1031
|
+
}
|
1032
|
+
else if (id == rb_intern("quickprop"))
|
1033
|
+
{
|
1034
|
+
fann_train_algorithm = FANN_TRAIN_QUICKPROP;
|
1035
|
+
}
|
1036
|
+
else
|
1037
|
+
{
|
949
1038
|
rb_raise(rb_eRuntimeError, "Unrecognized training algorithm function: [%s]", rb_id2name(SYM2ID(train_error_function)));
|
950
|
-
}
|
1039
|
+
}
|
951
1040
|
|
952
|
-
struct fann*
|
953
|
-
Data_Get_Struct
|
1041
|
+
struct fann *f;
|
1042
|
+
Data_Get_Struct(self, struct fann, f);
|
954
1043
|
fann_set_training_algorithm(f, fann_train_algorithm);
|
955
|
-
return self;
|
1044
|
+
return self;
|
956
1045
|
}
|
957
1046
|
|
958
1047
|
/** Returns the training algorithm. One of the following symbols:
|
959
1048
|
:incremental, :batch, :rprop, :quickprop */
|
960
1049
|
static VALUE get_training_algorithm(VALUE self)
|
961
1050
|
{
|
962
|
-
struct fann*
|
1051
|
+
struct fann *f;
|
963
1052
|
enum fann_train_enum fann_train_algorithm;
|
964
1053
|
VALUE ret_val;
|
965
|
-
Data_Get_Struct
|
1054
|
+
Data_Get_Struct(self, struct fann, f);
|
966
1055
|
|
967
1056
|
fann_train_algorithm = fann_get_training_algorithm(f);
|
968
|
-
|
969
|
-
if(fann_train_algorithm==FANN_TRAIN_INCREMENTAL)
|
1057
|
+
|
1058
|
+
if (fann_train_algorithm == FANN_TRAIN_INCREMENTAL)
|
1059
|
+
{
|
970
1060
|
ret_val = ID2SYM(rb_intern("incremental"));
|
971
|
-
}
|
972
|
-
|
973
|
-
|
974
|
-
ret_val = ID2SYM(rb_intern("
|
975
|
-
}
|
976
|
-
|
977
|
-
|
1061
|
+
}
|
1062
|
+
else if (fann_train_algorithm == FANN_TRAIN_BATCH)
|
1063
|
+
{
|
1064
|
+
ret_val = ID2SYM(rb_intern("batch"));
|
1065
|
+
}
|
1066
|
+
else if (fann_train_algorithm == FANN_TRAIN_RPROP)
|
1067
|
+
{
|
1068
|
+
ret_val = ID2SYM(rb_intern("rprop"));
|
1069
|
+
}
|
1070
|
+
else if (fann_train_algorithm == FANN_TRAIN_QUICKPROP)
|
1071
|
+
{
|
1072
|
+
ret_val = ID2SYM(rb_intern("quickprop"));
|
1073
|
+
}
|
978
1074
|
return ret_val;
|
979
1075
|
}
|
980
1076
|
|
981
|
-
/** call-seq: set_train_stop_function(train_stop_function) -> return value
|
1077
|
+
/** call-seq: set_train_stop_function(train_stop_function) -> return value
|
982
1078
|
|
983
1079
|
Set the training stop function. One of the following symbols:
|
984
1080
|
:mse, :bit */
|
985
1081
|
static VALUE set_train_stop_function(VALUE self, VALUE train_stop_function)
|
986
1082
|
{
|
987
1083
|
Check_Type(train_stop_function, T_SYMBOL);
|
988
|
-
ID id=SYM2ID(train_stop_function);
|
1084
|
+
ID id = SYM2ID(train_stop_function);
|
989
1085
|
enum fann_stopfunc_enum fann_train_stop_function;
|
990
1086
|
|
991
|
-
if(id==rb_intern("mse"))
|
992
|
-
|
993
|
-
|
994
|
-
|
995
|
-
|
1087
|
+
if (id == rb_intern("mse"))
|
1088
|
+
{
|
1089
|
+
fann_train_stop_function = FANN_STOPFUNC_MSE;
|
1090
|
+
}
|
1091
|
+
else if (id == rb_intern("bit"))
|
1092
|
+
{
|
1093
|
+
fann_train_stop_function = FANN_STOPFUNC_BIT;
|
1094
|
+
}
|
1095
|
+
else
|
1096
|
+
{
|
996
1097
|
rb_raise(rb_eRuntimeError, "Unrecognized stop function: [%s]", rb_id2name(SYM2ID(train_stop_function)));
|
997
|
-
}
|
1098
|
+
}
|
998
1099
|
|
999
|
-
struct fann*
|
1000
|
-
Data_Get_Struct
|
1100
|
+
struct fann *f;
|
1101
|
+
Data_Get_Struct(self, struct fann, f);
|
1001
1102
|
fann_set_train_stop_function(f, fann_train_stop_function);
|
1002
|
-
return self;
|
1103
|
+
return self;
|
1003
1104
|
}
|
1004
1105
|
|
1005
1106
|
/** Returns the training stop function. One of the following symbols:
|
1006
1107
|
:mse, :bit */
|
1007
1108
|
static VALUE get_train_stop_function(VALUE self)
|
1008
1109
|
{
|
1009
|
-
struct fann*
|
1110
|
+
struct fann *f;
|
1010
1111
|
enum fann_stopfunc_enum train_stop;
|
1011
1112
|
VALUE ret_val;
|
1012
|
-
Data_Get_Struct
|
1113
|
+
Data_Get_Struct(self, struct fann, f);
|
1013
1114
|
|
1014
1115
|
train_stop = fann_get_train_stop_function(f);
|
1015
|
-
|
1016
|
-
if(train_stop==FANN_STOPFUNC_MSE)
|
1116
|
+
|
1117
|
+
if (train_stop == FANN_STOPFUNC_MSE)
|
1017
1118
|
{
|
1018
1119
|
ret_val = ID2SYM(rb_intern("mse")); // (rb_str_new2("FANN_NETTYPE_LAYER"));
|
1019
1120
|
}
|
1020
1121
|
else // if(train_stop==FANN_STOPFUNC_BIT)
|
1021
1122
|
{
|
1022
1123
|
ret_val = ID2SYM(rb_intern("bit")); // (rb_str_new2("FANN_NETTYPE_SHORTCUT"));
|
1023
|
-
}
|
1124
|
+
}
|
1024
1125
|
return ret_val;
|
1025
1126
|
}
|
1026
1127
|
|
1027
|
-
|
1028
|
-
/** Will print the connections of the ann in a compact matrix,
|
1128
|
+
/** Will print the connections of the ann in a compact matrix,
|
1029
1129
|
for easy viewing of the internals of the ann. */
|
1030
1130
|
static VALUE print_connections(VALUE self)
|
1031
1131
|
{
|
1032
|
-
struct fann*
|
1033
|
-
Data_Get_Struct
|
1132
|
+
struct fann *f;
|
1133
|
+
Data_Get_Struct(self, struct fann, f);
|
1034
1134
|
fann_print_connections(f);
|
1035
|
-
return self;
|
1135
|
+
return self;
|
1036
1136
|
}
|
1037
1137
|
|
1038
1138
|
/** Print current NN parameters to stdout */
|
1039
1139
|
static VALUE print_parameters(VALUE self)
|
1040
1140
|
{
|
1041
|
-
struct fann*
|
1042
|
-
Data_Get_Struct
|
1141
|
+
struct fann *f;
|
1142
|
+
Data_Get_Struct(self, struct fann, f);
|
1043
1143
|
fann_print_parameters(f);
|
1044
1144
|
return Qnil;
|
1045
1145
|
}
|
@@ -1051,64 +1151,63 @@ static VALUE randomize_weights(VALUE self, VALUE min_weight, VALUE max_weight)
|
|
1051
1151
|
{
|
1052
1152
|
Check_Type(min_weight, T_FLOAT);
|
1053
1153
|
Check_Type(max_weight, T_FLOAT);
|
1054
|
-
struct fann*
|
1055
|
-
Data_Get_Struct
|
1154
|
+
struct fann *f;
|
1155
|
+
Data_Get_Struct(self, struct fann, f);
|
1056
1156
|
fann_randomize_weights(f, NUM2DBL(min_weight), NUM2DBL(max_weight));
|
1057
|
-
return self;
|
1157
|
+
return self;
|
1058
1158
|
}
|
1059
1159
|
|
1060
|
-
/** call-seq: run(inputs) -> return value
|
1160
|
+
/** call-seq: run(inputs) -> return value
|
1061
1161
|
|
1062
|
-
Run neural net on array<Float> of inputs with current parameters.
|
1162
|
+
Run neural net on array<Float> of inputs with current parameters.
|
1063
1163
|
Returns array<Float> as output */
|
1064
|
-
static VALUE run
|
1164
|
+
static VALUE run(VALUE self, VALUE inputs)
|
1065
1165
|
{
|
1066
1166
|
Check_Type(inputs, T_ARRAY);
|
1067
1167
|
|
1068
|
-
|
1168
|
+
struct fann *f;
|
1069
1169
|
unsigned int i;
|
1070
|
-
fann_type*
|
1071
|
-
|
1170
|
+
fann_type *outputs;
|
1171
|
+
|
1072
1172
|
// Convert inputs to type needed for NN:
|
1073
|
-
unsigned int len =
|
1173
|
+
unsigned int len = RARRAY_LEN(inputs);
|
1074
1174
|
fann_type fann_inputs[len];
|
1075
|
-
for (i=0; i<len; i++)
|
1175
|
+
for (i = 0; i < len; i++)
|
1076
1176
|
{
|
1077
1177
|
fann_inputs[i] = NUM2DBL(RARRAY_PTR(inputs)[i]);
|
1078
1178
|
}
|
1079
|
-
|
1080
|
-
|
1179
|
+
|
1081
1180
|
// Obtain NN & run method:
|
1082
|
-
|
1181
|
+
Data_Get_Struct(self, struct fann, f);
|
1083
1182
|
outputs = fann_run(f, fann_inputs);
|
1084
1183
|
|
1085
1184
|
// Create ruby array & set outputs:
|
1086
1185
|
VALUE arr;
|
1087
1186
|
arr = rb_ary_new();
|
1088
|
-
unsigned int output_len=fann_get_num_output(f);
|
1089
|
-
for (i=0; i<output_len; i++)
|
1090
|
-
{
|
1187
|
+
unsigned int output_len = fann_get_num_output(f);
|
1188
|
+
for (i = 0; i < output_len; i++)
|
1189
|
+
{
|
1091
1190
|
rb_ary_push(arr, rb_float_new(outputs[i]));
|
1092
1191
|
}
|
1093
|
-
|
1094
|
-
|
1192
|
+
|
1193
|
+
return arr;
|
1095
1194
|
}
|
1096
1195
|
|
1097
|
-
/** call-seq: init_weights(train_data) -> return value
|
1196
|
+
/** call-seq: init_weights(train_data) -> return value
|
1098
1197
|
|
1099
1198
|
Initialize the weights using Widrow + Nguyen's algorithm. */
|
1100
1199
|
static VALUE init_weights(VALUE self, VALUE train_data)
|
1101
1200
|
{
|
1102
|
-
|
1201
|
+
|
1103
1202
|
Check_Type(train_data, T_DATA);
|
1104
|
-
|
1105
|
-
struct fann* f;
|
1106
|
-
struct fann_train_data* t;
|
1107
|
-
Data_Get_Struct (self, struct fann, f);
|
1108
|
-
Data_Get_Struct (train_data, struct fann_train_data, t);
|
1109
1203
|
|
1110
|
-
|
1111
|
-
|
1204
|
+
struct fann *f;
|
1205
|
+
struct fann_train_data *t;
|
1206
|
+
Data_Get_Struct(self, struct fann, f);
|
1207
|
+
Data_Get_Struct(train_data, struct fann_train_data, t);
|
1208
|
+
|
1209
|
+
fann_init_weights(f, t);
|
1210
|
+
return self;
|
1112
1211
|
}
|
1113
1212
|
|
1114
1213
|
/** call-seq: train(input, expected_output)
|
@@ -1121,21 +1220,23 @@ static VALUE train(VALUE self, VALUE input, VALUE expected_output)
|
|
1121
1220
|
Check_Type(input, T_ARRAY);
|
1122
1221
|
Check_Type(expected_output, T_ARRAY);
|
1123
1222
|
|
1124
|
-
struct fann*
|
1223
|
+
struct fann *f;
|
1125
1224
|
Data_Get_Struct(self, struct fann, f);
|
1126
1225
|
|
1127
|
-
unsigned int num_input =
|
1128
|
-
unsigned int num_output =
|
1226
|
+
unsigned int num_input = RARRAY_LEN(input);
|
1227
|
+
unsigned int num_output = RARRAY_LEN(expected_output);
|
1129
1228
|
|
1130
1229
|
fann_type data_input[num_input], data_output[num_output];
|
1131
1230
|
|
1132
1231
|
unsigned int i;
|
1133
1232
|
|
1134
|
-
for (i = 0; i < num_input; i++)
|
1233
|
+
for (i = 0; i < num_input; i++)
|
1234
|
+
{
|
1135
1235
|
data_input[i] = NUM2DBL(RARRAY_PTR(input)[i]);
|
1136
1236
|
}
|
1137
1237
|
|
1138
|
-
for (i = 0; i < num_output; i++)
|
1238
|
+
for (i = 0; i < num_output; i++)
|
1239
|
+
{
|
1139
1240
|
data_output[i] = NUM2DBL(RARRAY_PTR(expected_output)[i]);
|
1140
1241
|
}
|
1141
1242
|
|
@@ -1157,42 +1258,42 @@ static VALUE train_on_data(VALUE self, VALUE train_data, VALUE max_epochs, VALUE
|
|
1157
1258
|
Check_Type(max_epochs, T_FIXNUM);
|
1158
1259
|
Check_Type(epochs_between_reports, T_FIXNUM);
|
1159
1260
|
Check_Type(desired_error, T_FLOAT);
|
1160
|
-
|
1161
|
-
struct fann*
|
1162
|
-
struct fann_train_data*
|
1163
|
-
Data_Get_Struct
|
1164
|
-
Data_Get_Struct
|
1261
|
+
|
1262
|
+
struct fann *f;
|
1263
|
+
struct fann_train_data *t;
|
1264
|
+
Data_Get_Struct(self, struct fann, f);
|
1265
|
+
Data_Get_Struct(train_data, struct fann_train_data, t);
|
1165
1266
|
|
1166
1267
|
unsigned int fann_max_epochs = NUM2INT(max_epochs);
|
1167
1268
|
unsigned int fann_epochs_between_reports = NUM2INT(epochs_between_reports);
|
1168
|
-
float fann_desired_error = NUM2DBL(desired_error);
|
1269
|
+
float fann_desired_error = NUM2DBL(desired_error);
|
1169
1270
|
fann_train_on_data(f, t, fann_max_epochs, fann_epochs_between_reports, fann_desired_error);
|
1170
1271
|
return rb_int_new(0);
|
1171
1272
|
}
|
1172
1273
|
|
1173
|
-
/** call-seq: train_epoch(train_data) -> return value
|
1274
|
+
/** call-seq: train_epoch(train_data) -> return value
|
1174
1275
|
|
1175
1276
|
Train one epoch with a set of training data, created with RubyFann::TrainData.new */
|
1176
1277
|
static VALUE train_epoch(VALUE self, VALUE train_data)
|
1177
1278
|
{
|
1178
1279
|
Check_Type(train_data, T_DATA);
|
1179
|
-
struct fann*
|
1180
|
-
struct fann_train_data*
|
1181
|
-
Data_Get_Struct
|
1182
|
-
Data_Get_Struct
|
1280
|
+
struct fann *f;
|
1281
|
+
struct fann_train_data *t;
|
1282
|
+
Data_Get_Struct(self, struct fann, f);
|
1283
|
+
Data_Get_Struct(train_data, struct fann_train_data, t);
|
1183
1284
|
return rb_float_new(fann_train_epoch(f, t));
|
1184
1285
|
}
|
1185
1286
|
|
1186
|
-
/** call-seq: test_data(train_data) -> return value
|
1287
|
+
/** call-seq: test_data(train_data) -> return value
|
1187
1288
|
|
1188
1289
|
Test a set of training data and calculates the MSE for the training data. */
|
1189
1290
|
static VALUE test_data(VALUE self, VALUE train_data)
|
1190
1291
|
{
|
1191
1292
|
Check_Type(train_data, T_DATA);
|
1192
|
-
struct fann*
|
1193
|
-
struct fann_train_data*
|
1194
|
-
Data_Get_Struct
|
1195
|
-
Data_Get_Struct
|
1293
|
+
struct fann *f;
|
1294
|
+
struct fann_train_data *t;
|
1295
|
+
Data_Get_Struct(self, struct fann, f);
|
1296
|
+
Data_Get_Struct(train_data, struct fann_train_data, t);
|
1196
1297
|
return rb_float_new(fann_test_data(f, t));
|
1197
1298
|
}
|
1198
1299
|
|
@@ -1204,7 +1305,7 @@ static VALUE test_data(VALUE self, VALUE train_data)
|
|
1204
1305
|
// Data_Get_Struct (self, struct fann, f);
|
1205
1306
|
// return INT2NUM(fann_get_decimal_point(f));
|
1206
1307
|
// }
|
1207
|
-
|
1308
|
+
|
1208
1309
|
// returns the multiplier that fix point data is multiplied with.
|
1209
1310
|
|
1210
1311
|
// Only available in fixed-point mode, which we don't need:
|
@@ -1228,19 +1329,19 @@ static VALUE cascadetrain_on_data(VALUE self, VALUE train_data, VALUE max_neuron
|
|
1228
1329
|
Check_Type(max_neurons, T_FIXNUM);
|
1229
1330
|
Check_Type(neurons_between_reports, T_FIXNUM);
|
1230
1331
|
Check_Type(desired_error, T_FLOAT);
|
1231
|
-
|
1232
|
-
struct fann*
|
1233
|
-
struct fann_train_data*
|
1234
|
-
Data_Get_Struct
|
1235
|
-
Data_Get_Struct
|
1332
|
+
|
1333
|
+
struct fann *f;
|
1334
|
+
struct fann_train_data *t;
|
1335
|
+
Data_Get_Struct(self, struct fann, f);
|
1336
|
+
Data_Get_Struct(train_data, struct fann_train_data, t);
|
1236
1337
|
|
1237
1338
|
unsigned int fann_max_neurons = NUM2INT(max_neurons);
|
1238
1339
|
unsigned int fann_neurons_between_reports = NUM2INT(neurons_between_reports);
|
1239
1340
|
float fann_desired_error = NUM2DBL(desired_error);
|
1240
|
-
|
1341
|
+
|
1241
1342
|
fann_cascadetrain_on_data(f, t, fann_max_neurons, fann_neurons_between_reports, fann_desired_error);
|
1242
|
-
return self;
|
1243
|
-
}
|
1343
|
+
return self;
|
1344
|
+
}
|
1244
1345
|
|
1245
1346
|
/** The cascade output change fraction is a number between 0 and 1 */
|
1246
1347
|
static VALUE get_cascade_output_change_fraction(VALUE self)
|
@@ -1256,7 +1357,7 @@ static VALUE set_cascade_output_change_fraction(VALUE self, VALUE cascade_output
|
|
1256
1357
|
SET_FANN_FLT(cascade_output_change_fraction, fann_set_cascade_output_change_fraction);
|
1257
1358
|
}
|
1258
1359
|
|
1259
|
-
/** The number of cascade output stagnation epochs determines the number of epochs training is allowed to
|
1360
|
+
/** The number of cascade output stagnation epochs determines the number of epochs training is allowed to
|
1260
1361
|
continue without changing the MSE by a fraction of <get_cascade_output_change_fraction>. */
|
1261
1362
|
static VALUE get_cascade_output_stagnation_epochs(VALUE self)
|
1262
1363
|
{
|
@@ -1265,7 +1366,7 @@ static VALUE get_cascade_output_stagnation_epochs(VALUE self)
|
|
1265
1366
|
|
1266
1367
|
/** call-seq: set_cascade_output_stagnation_epochs(cascade_output_stagnation_epochs)
|
1267
1368
|
|
1268
|
-
The number of cascade output stagnation epochs determines the number of epochs training is allowed to
|
1369
|
+
The number of cascade output stagnation epochs determines the number of epochs training is allowed to
|
1269
1370
|
continue without changing the MSE by a fraction of <get_cascade_output_change_fraction>. */
|
1270
1371
|
static VALUE set_cascade_output_stagnation_epochs(VALUE self, VALUE cascade_output_stagnation_epochs)
|
1271
1372
|
{
|
@@ -1300,7 +1401,7 @@ static VALUE get_cascade_candidate_stagnation_epochs(VALUE self)
|
|
1300
1401
|
static VALUE set_cascade_candidate_stagnation_epochs(VALUE self, VALUE cascade_candidate_stagnation_epochs)
|
1301
1402
|
{
|
1302
1403
|
SET_FANN_UINT(cascade_candidate_stagnation_epochs, fann_set_cascade_candidate_stagnation_epochs);
|
1303
|
-
}
|
1404
|
+
}
|
1304
1405
|
|
1305
1406
|
/** The weight multiplier is a parameter which is used to multiply the weights from the candidate neuron
|
1306
1407
|
before adding the neuron to the neural network. This parameter is usually between 0 and 1, and is used
|
@@ -1352,7 +1453,7 @@ static VALUE set_cascade_max_out_epochs(VALUE self, VALUE cascade_max_out_epochs
|
|
1352
1453
|
SET_FANN_UINT(cascade_max_out_epochs, fann_set_cascade_max_out_epochs);
|
1353
1454
|
}
|
1354
1455
|
|
1355
|
-
/** The maximum candidate epochs determines the maximum number of epochs the input
|
1456
|
+
/** The maximum candidate epochs determines the maximum number of epochs the input
|
1356
1457
|
connections to the candidates may be trained before adding a new candidate neuron. */
|
1357
1458
|
static VALUE get_cascade_max_cand_epochs(VALUE self)
|
1358
1459
|
{
|
@@ -1361,7 +1462,7 @@ static VALUE get_cascade_max_cand_epochs(VALUE self)
|
|
1361
1462
|
|
1362
1463
|
/** call-seq: set_cascade_max_cand_epochs(cascade_max_cand_epochs)
|
1363
1464
|
|
1364
|
-
The maximum candidate epochs determines the maximum number of epochs the input
|
1465
|
+
The maximum candidate epochs determines the maximum number of epochs the input
|
1365
1466
|
connections to the candidates may be trained before adding a new candidate neuron. */
|
1366
1467
|
static VALUE set_cascade_max_cand_epochs(VALUE self, VALUE cascade_max_cand_epochs)
|
1367
1468
|
{
|
@@ -1383,18 +1484,18 @@ static VALUE get_cascade_activation_functions_count(VALUE self)
|
|
1383
1484
|
|
1384
1485
|
/** The learning rate is used to determine how aggressive training should be for some of the
|
1385
1486
|
training algorithms (:incremental, :batch, :quickprop).
|
1386
|
-
Do however note that it is not used in :rprop.
|
1487
|
+
Do however note that it is not used in :rprop.
|
1387
1488
|
The default learning rate is 0.7. */
|
1388
1489
|
static VALUE get_learning_rate(VALUE self)
|
1389
1490
|
{
|
1390
1491
|
RETURN_FANN_FLT(fann_get_learning_rate);
|
1391
1492
|
}
|
1392
1493
|
|
1393
|
-
/** call-seq: set_learning_rate(learning_rate) -> return value
|
1494
|
+
/** call-seq: set_learning_rate(learning_rate) -> return value
|
1394
1495
|
|
1395
1496
|
The learning rate is used to determine how aggressive training should be for some of the
|
1396
1497
|
training algorithms (:incremental, :batch, :quickprop).
|
1397
|
-
Do however note that it is not used in :rprop.
|
1498
|
+
Do however note that it is not used in :rprop.
|
1398
1499
|
The default learning rate is 0.7. */
|
1399
1500
|
static VALUE set_learning_rate(VALUE self, VALUE learning_rate)
|
1400
1501
|
{
|
@@ -1407,8 +1508,8 @@ static VALUE get_learning_momentum(VALUE self)
|
|
1407
1508
|
RETURN_FANN_FLT(fann_get_learning_momentum);
|
1408
1509
|
}
|
1409
1510
|
|
1410
|
-
/** call-seq: set_learning_momentum(learning_momentum) -> return value
|
1411
|
-
|
1511
|
+
/** call-seq: set_learning_momentum(learning_momentum) -> return value
|
1512
|
+
|
1412
1513
|
Set the learning momentum. */
|
1413
1514
|
static VALUE set_learning_momentum(VALUE self, VALUE learning_momentum)
|
1414
1515
|
{
|
@@ -1422,35 +1523,35 @@ static VALUE set_learning_momentum(VALUE self, VALUE learning_momentum)
|
|
1422
1523
|
static VALUE set_cascade_activation_functions(VALUE self, VALUE cascade_activation_functions)
|
1423
1524
|
{
|
1424
1525
|
Check_Type(cascade_activation_functions, T_ARRAY);
|
1425
|
-
struct fann*
|
1426
|
-
Data_Get_Struct
|
1427
|
-
|
1428
|
-
unsigned
|
1526
|
+
struct fann *f;
|
1527
|
+
Data_Get_Struct(self, struct fann, f);
|
1528
|
+
|
1529
|
+
unsigned long cnt = RARRAY_LEN(cascade_activation_functions);
|
1429
1530
|
enum fann_activationfunc_enum fann_activation_functions[cnt];
|
1430
1531
|
unsigned int i;
|
1431
|
-
for (i=0; i<cnt; i++)
|
1532
|
+
for (i = 0; i < cnt; i++)
|
1432
1533
|
{
|
1433
1534
|
fann_activation_functions[i] = sym_to_activation_function(RARRAY_PTR(cascade_activation_functions)[i]);
|
1434
1535
|
}
|
1435
|
-
|
1536
|
+
|
1436
1537
|
fann_set_cascade_activation_functions(f, fann_activation_functions, cnt);
|
1437
|
-
return self;
|
1538
|
+
return self;
|
1438
1539
|
}
|
1439
1540
|
|
1440
1541
|
/** The cascade activation functions is an array of the different activation functions used by
|
1441
1542
|
the candidates. The default is [:sigmoid, :sigmoid_symmetric, :gaussian, :gaussian_symmetric, :elliot, :elliot_symmetric] */
|
1442
1543
|
static VALUE get_cascade_activation_functions(VALUE self)
|
1443
1544
|
{
|
1444
|
-
struct fann*
|
1445
|
-
Data_Get_Struct
|
1545
|
+
struct fann *f;
|
1546
|
+
Data_Get_Struct(self, struct fann, f);
|
1446
1547
|
unsigned int cnt = fann_get_cascade_activation_functions_count(f);
|
1447
|
-
enum fann_activationfunc_enum*
|
1548
|
+
enum fann_activationfunc_enum *fann_functions = fann_get_cascade_activation_functions(f);
|
1448
1549
|
|
1449
1550
|
// Create ruby array & set outputs:
|
1450
1551
|
VALUE arr;
|
1451
1552
|
arr = rb_ary_new();
|
1452
1553
|
unsigned int i;
|
1453
|
-
for (i=0; i<cnt; i++)
|
1554
|
+
for (i = 0; i < cnt; i++)
|
1454
1555
|
{
|
1455
1556
|
rb_ary_push(arr, activation_function_to_sym(fann_functions[i]));
|
1456
1557
|
}
|
@@ -1486,17 +1587,17 @@ static VALUE set_cascade_num_candidate_groups(VALUE self, VALUE cascade_num_cand
|
|
1486
1587
|
static VALUE set_cascade_activation_steepnesses(VALUE self, VALUE cascade_activation_steepnesses)
|
1487
1588
|
{
|
1488
1589
|
Check_Type(cascade_activation_steepnesses, T_ARRAY);
|
1489
|
-
struct fann*
|
1490
|
-
Data_Get_Struct
|
1491
|
-
|
1492
|
-
unsigned int cnt =
|
1590
|
+
struct fann *f;
|
1591
|
+
Data_Get_Struct(self, struct fann, f);
|
1592
|
+
|
1593
|
+
unsigned int cnt = RARRAY_LEN(cascade_activation_steepnesses);
|
1493
1594
|
fann_type fann_activation_steepnesses[cnt];
|
1494
1595
|
unsigned int i;
|
1495
|
-
for (i=0; i<cnt; i++)
|
1596
|
+
for (i = 0; i < cnt; i++)
|
1496
1597
|
{
|
1497
1598
|
fann_activation_steepnesses[i] = NUM2DBL(RARRAY_PTR(cascade_activation_steepnesses)[i]);
|
1498
1599
|
}
|
1499
|
-
|
1600
|
+
|
1500
1601
|
fann_set_cascade_activation_steepnesses(f, fann_activation_steepnesses, cnt);
|
1501
1602
|
return self;
|
1502
1603
|
}
|
@@ -1505,16 +1606,16 @@ static VALUE set_cascade_activation_steepnesses(VALUE self, VALUE cascade_activa
|
|
1505
1606
|
the candidates. */
|
1506
1607
|
static VALUE get_cascade_activation_steepnesses(VALUE self)
|
1507
1608
|
{
|
1508
|
-
struct fann*
|
1509
|
-
Data_Get_Struct
|
1510
|
-
fann_type*
|
1609
|
+
struct fann *f;
|
1610
|
+
Data_Get_Struct(self, struct fann, f);
|
1611
|
+
fann_type *fann_steepnesses = fann_get_cascade_activation_steepnesses(f);
|
1511
1612
|
unsigned int cnt = fann_get_cascade_activation_steepnesses_count(f);
|
1512
1613
|
|
1513
1614
|
// Create ruby array & set outputs:
|
1514
1615
|
VALUE arr;
|
1515
1616
|
arr = rb_ary_new();
|
1516
1617
|
unsigned int i;
|
1517
|
-
for (i=0; i<cnt; i++)
|
1618
|
+
for (i = 0; i < cnt; i++)
|
1518
1619
|
{
|
1519
1620
|
rb_ary_push(arr, rb_float_new(fann_steepnesses[i]));
|
1520
1621
|
}
|
@@ -1527,28 +1628,28 @@ static VALUE get_cascade_activation_steepnesses(VALUE self)
|
|
1527
1628
|
Save the entire network to configuration file with given name */
|
1528
1629
|
static VALUE nn_save(VALUE self, VALUE filename)
|
1529
1630
|
{
|
1530
|
-
struct fann*
|
1531
|
-
Data_Get_Struct
|
1631
|
+
struct fann *f;
|
1632
|
+
Data_Get_Struct(self, struct fann, f);
|
1532
1633
|
int status = fann_save(f, StringValuePtr(filename));
|
1533
1634
|
return INT2NUM(status);
|
1534
1635
|
}
|
1535
1636
|
|
1536
1637
|
/** Initializes class under RubyFann module/namespace. */
|
1537
|
-
void Init_ruby_fann
|
1638
|
+
void Init_ruby_fann()
|
1538
1639
|
{
|
1539
1640
|
// RubyFann module/namespace:
|
1540
|
-
m_rb_fann_module = rb_define_module
|
1641
|
+
m_rb_fann_module = rb_define_module("RubyFann");
|
1541
1642
|
|
1542
1643
|
// Standard NN class:
|
1543
|
-
m_rb_fann_standard_class = rb_define_class_under
|
1544
|
-
rb_define_alloc_func
|
1644
|
+
m_rb_fann_standard_class = rb_define_class_under(m_rb_fann_module, "Standard", rb_cObject);
|
1645
|
+
rb_define_alloc_func(m_rb_fann_standard_class, fann_allocate);
|
1545
1646
|
rb_define_method(m_rb_fann_standard_class, "initialize", fann_initialize, 1);
|
1546
1647
|
rb_define_method(m_rb_fann_standard_class, "init_weights", init_weights, 1);
|
1547
|
-
rb_define_method(m_rb_fann_standard_class, "set_activation_function", set_activation_function, 3);
|
1548
|
-
rb_define_method(m_rb_fann_standard_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
|
1549
|
-
rb_define_method(m_rb_fann_standard_class, "set_activation_function_layer", set_activation_function_layer, 2);
|
1550
|
-
rb_define_method(m_rb_fann_standard_class, "get_activation_function", get_activation_function, 2);
|
1551
|
-
rb_define_method(m_rb_fann_standard_class, "set_activation_function_output", set_activation_function_output, 1);
|
1648
|
+
rb_define_method(m_rb_fann_standard_class, "set_activation_function", set_activation_function, 3);
|
1649
|
+
rb_define_method(m_rb_fann_standard_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
|
1650
|
+
rb_define_method(m_rb_fann_standard_class, "set_activation_function_layer", set_activation_function_layer, 2);
|
1651
|
+
rb_define_method(m_rb_fann_standard_class, "get_activation_function", get_activation_function, 2);
|
1652
|
+
rb_define_method(m_rb_fann_standard_class, "set_activation_function_output", set_activation_function_output, 1);
|
1552
1653
|
rb_define_method(m_rb_fann_standard_class, "get_activation_steepness", get_activation_steepness, 2);
|
1553
1654
|
rb_define_method(m_rb_fann_standard_class, "set_activation_steepness", set_activation_steepness, 3);
|
1554
1655
|
rb_define_method(m_rb_fann_standard_class, "set_activation_steepness_hidden", set_activation_steepness_hidden, 1);
|
@@ -1578,14 +1679,14 @@ void Init_ruby_fann ()
|
|
1578
1679
|
rb_define_method(m_rb_fann_standard_class, "get_connection_rate", get_connection_rate, 0);
|
1579
1680
|
rb_define_method(m_rb_fann_standard_class, "get_layer_array", get_layer_array, 0);
|
1580
1681
|
rb_define_method(m_rb_fann_standard_class, "get_network_type", get_network_type, 0);
|
1581
|
-
rb_define_method(m_rb_fann_standard_class, "get_neurons", get_neurons, 0);
|
1682
|
+
rb_define_method(m_rb_fann_standard_class, "get_neurons", get_neurons, 0);
|
1582
1683
|
rb_define_method(m_rb_fann_standard_class, "get_num_input", get_num_input, 0);
|
1583
1684
|
rb_define_method(m_rb_fann_standard_class, "get_num_layers", get_num_layers, 0);
|
1584
|
-
rb_define_method(m_rb_fann_standard_class, "get_num_output", get_num_output, 0);
|
1685
|
+
rb_define_method(m_rb_fann_standard_class, "get_num_output", get_num_output, 0);
|
1585
1686
|
rb_define_method(m_rb_fann_standard_class, "get_total_connections", get_total_connections, 0);
|
1586
1687
|
rb_define_method(m_rb_fann_standard_class, "get_total_neurons", get_total_neurons, 0);
|
1587
1688
|
// rb_define_method(m_rb_fann_standard_class, "get_train_error_function", get_train_error_function, 0);
|
1588
|
-
// rb_define_method(m_rb_fann_standard_class, "set_train_error_function", set_train_error_function, 1);
|
1689
|
+
// rb_define_method(m_rb_fann_standard_class, "set_train_error_function", set_train_error_function, 1);
|
1589
1690
|
rb_define_method(m_rb_fann_standard_class, "print_connections", print_connections, 0);
|
1590
1691
|
rb_define_method(m_rb_fann_standard_class, "print_parameters", print_parameters, 0);
|
1591
1692
|
rb_define_method(m_rb_fann_standard_class, "randomize_weights", randomize_weights, 2);
|
@@ -1593,7 +1694,7 @@ void Init_ruby_fann ()
|
|
1593
1694
|
rb_define_method(m_rb_fann_standard_class, "train", train, 2);
|
1594
1695
|
rb_define_method(m_rb_fann_standard_class, "train_on_data", train_on_data, 4);
|
1595
1696
|
rb_define_method(m_rb_fann_standard_class, "train_epoch", train_epoch, 1);
|
1596
|
-
rb_define_method(m_rb_fann_standard_class, "test_data", test_data, 1);
|
1697
|
+
rb_define_method(m_rb_fann_standard_class, "test_data", test_data, 1);
|
1597
1698
|
rb_define_method(m_rb_fann_standard_class, "get_MSE", get_MSE, 0);
|
1598
1699
|
rb_define_method(m_rb_fann_standard_class, "get_bit_fail", get_bit_fail, 0);
|
1599
1700
|
rb_define_method(m_rb_fann_standard_class, "reset_MSE", reset_MSE, 0);
|
@@ -1603,8 +1704,7 @@ void Init_ruby_fann ()
|
|
1603
1704
|
rb_define_method(m_rb_fann_standard_class, "set_learning_momentum", set_learning_momentum, 1);
|
1604
1705
|
rb_define_method(m_rb_fann_standard_class, "get_training_algorithm", get_training_algorithm, 0);
|
1605
1706
|
rb_define_method(m_rb_fann_standard_class, "set_training_algorithm", set_training_algorithm, 1);
|
1606
|
-
|
1607
|
-
|
1707
|
+
|
1608
1708
|
// Cascade functions:
|
1609
1709
|
rb_define_method(m_rb_fann_standard_class, "cascadetrain_on_data", cascadetrain_on_data, 4);
|
1610
1710
|
rb_define_method(m_rb_fann_standard_class, "get_cascade_output_change_fraction", get_cascade_output_change_fraction, 0);
|
@@ -1630,25 +1730,24 @@ void Init_ruby_fann ()
|
|
1630
1730
|
rb_define_method(m_rb_fann_standard_class, "get_cascade_activation_steepnesses_count", get_cascade_activation_steepnesses_count, 0);
|
1631
1731
|
rb_define_method(m_rb_fann_standard_class, "get_cascade_activation_steepnesses", get_cascade_activation_steepnesses, 0);
|
1632
1732
|
rb_define_method(m_rb_fann_standard_class, "set_cascade_activation_steepnesses", set_cascade_activation_steepnesses, 1);
|
1633
|
-
rb_define_method(m_rb_fann_standard_class, "get_cascade_num_candidate_groups", get_cascade_num_candidate_groups, 0);
|
1634
|
-
rb_define_method(m_rb_fann_standard_class, "set_cascade_num_candidate_groups", set_cascade_num_candidate_groups, 1);
|
1733
|
+
rb_define_method(m_rb_fann_standard_class, "get_cascade_num_candidate_groups", get_cascade_num_candidate_groups, 0);
|
1734
|
+
rb_define_method(m_rb_fann_standard_class, "set_cascade_num_candidate_groups", set_cascade_num_candidate_groups, 1);
|
1635
1735
|
rb_define_method(m_rb_fann_standard_class, "save", nn_save, 1);
|
1636
1736
|
|
1637
|
-
|
1638
1737
|
// Uncomment for fixed-point mode (also recompile fann). Probably not going to be needed:
|
1639
|
-
//rb_define_method(clazz, "get_decimal_point", get_decimal_point, 0);
|
1640
|
-
//rb_define_method(clazz, "get_multiplier", get_multiplier, 0);
|
1641
|
-
|
1738
|
+
// rb_define_method(clazz, "get_decimal_point", get_decimal_point, 0);
|
1739
|
+
// rb_define_method(clazz, "get_multiplier", get_multiplier, 0);
|
1740
|
+
|
1642
1741
|
// Shortcut NN class (duplicated from above so that rdoc generation tools can find the methods:):
|
1643
|
-
m_rb_fann_shortcut_class = rb_define_class_under
|
1644
|
-
rb_define_alloc_func
|
1742
|
+
m_rb_fann_shortcut_class = rb_define_class_under(m_rb_fann_module, "Shortcut", rb_cObject);
|
1743
|
+
rb_define_alloc_func(m_rb_fann_shortcut_class, fann_allocate);
|
1645
1744
|
rb_define_method(m_rb_fann_shortcut_class, "initialize", fann_initialize, 1);
|
1646
1745
|
rb_define_method(m_rb_fann_shortcut_class, "init_weights", init_weights, 1);
|
1647
|
-
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function", set_activation_function, 3);
|
1648
|
-
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
|
1649
|
-
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_layer", set_activation_function_layer, 2);
|
1650
|
-
rb_define_method(m_rb_fann_shortcut_class, "get_activation_function", get_activation_function, 2);
|
1651
|
-
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_output", set_activation_function_output, 1);
|
1746
|
+
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function", set_activation_function, 3);
|
1747
|
+
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_hidden", set_activation_function_hidden, 1);
|
1748
|
+
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_layer", set_activation_function_layer, 2);
|
1749
|
+
rb_define_method(m_rb_fann_shortcut_class, "get_activation_function", get_activation_function, 2);
|
1750
|
+
rb_define_method(m_rb_fann_shortcut_class, "set_activation_function_output", set_activation_function_output, 1);
|
1652
1751
|
rb_define_method(m_rb_fann_shortcut_class, "get_activation_steepness", get_activation_steepness, 2);
|
1653
1752
|
rb_define_method(m_rb_fann_shortcut_class, "set_activation_steepness", set_activation_steepness, 3);
|
1654
1753
|
rb_define_method(m_rb_fann_shortcut_class, "set_activation_steepness_hidden", set_activation_steepness_hidden, 1);
|
@@ -1678,14 +1777,14 @@ void Init_ruby_fann ()
|
|
1678
1777
|
rb_define_method(m_rb_fann_shortcut_class, "get_connection_rate", get_connection_rate, 0);
|
1679
1778
|
rb_define_method(m_rb_fann_shortcut_class, "get_layer_array", get_layer_array, 0);
|
1680
1779
|
rb_define_method(m_rb_fann_shortcut_class, "get_network_type", get_network_type, 0);
|
1681
|
-
rb_define_method(m_rb_fann_shortcut_class, "get_neurons", get_neurons, 0);
|
1780
|
+
rb_define_method(m_rb_fann_shortcut_class, "get_neurons", get_neurons, 0);
|
1682
1781
|
rb_define_method(m_rb_fann_shortcut_class, "get_num_input", get_num_input, 0);
|
1683
1782
|
rb_define_method(m_rb_fann_shortcut_class, "get_num_layers", get_num_layers, 0);
|
1684
|
-
rb_define_method(m_rb_fann_shortcut_class, "get_num_output", get_num_output, 0);
|
1783
|
+
rb_define_method(m_rb_fann_shortcut_class, "get_num_output", get_num_output, 0);
|
1685
1784
|
rb_define_method(m_rb_fann_shortcut_class, "get_total_connections", get_total_connections, 0);
|
1686
1785
|
rb_define_method(m_rb_fann_shortcut_class, "get_total_neurons", get_total_neurons, 0);
|
1687
1786
|
// rb_define_method(m_rb_fann_shortcut_class, "get_train_error_function", get_train_error_function, 0);
|
1688
|
-
// rb_define_method(m_rb_fann_shortcut_class, "set_train_error_function", set_train_error_function, 1);
|
1787
|
+
// rb_define_method(m_rb_fann_shortcut_class, "set_train_error_function", set_train_error_function, 1);
|
1689
1788
|
rb_define_method(m_rb_fann_shortcut_class, "print_connections", print_connections, 0);
|
1690
1789
|
rb_define_method(m_rb_fann_shortcut_class, "print_parameters", print_parameters, 0);
|
1691
1790
|
rb_define_method(m_rb_fann_shortcut_class, "randomize_weights", randomize_weights, 2);
|
@@ -1693,7 +1792,7 @@ void Init_ruby_fann ()
|
|
1693
1792
|
rb_define_method(m_rb_fann_shortcut_class, "train", train, 2);
|
1694
1793
|
rb_define_method(m_rb_fann_shortcut_class, "train_on_data", train_on_data, 4);
|
1695
1794
|
rb_define_method(m_rb_fann_shortcut_class, "train_epoch", train_epoch, 1);
|
1696
|
-
rb_define_method(m_rb_fann_shortcut_class, "test_data", test_data, 1);
|
1795
|
+
rb_define_method(m_rb_fann_shortcut_class, "test_data", test_data, 1);
|
1697
1796
|
rb_define_method(m_rb_fann_shortcut_class, "get_MSE", get_MSE, 0);
|
1698
1797
|
rb_define_method(m_rb_fann_shortcut_class, "get_bit_fail", get_bit_fail, 0);
|
1699
1798
|
rb_define_method(m_rb_fann_shortcut_class, "reset_MSE", reset_MSE, 0);
|
@@ -1703,7 +1802,7 @@ void Init_ruby_fann ()
|
|
1703
1802
|
rb_define_method(m_rb_fann_shortcut_class, "set_learning_momentum", set_learning_momentum, 1);
|
1704
1803
|
rb_define_method(m_rb_fann_shortcut_class, "get_training_algorithm", get_training_algorithm, 0);
|
1705
1804
|
rb_define_method(m_rb_fann_shortcut_class, "set_training_algorithm", set_training_algorithm, 1);
|
1706
|
-
|
1805
|
+
|
1707
1806
|
// Cascade functions:
|
1708
1807
|
rb_define_method(m_rb_fann_shortcut_class, "cascadetrain_on_data", cascadetrain_on_data, 4);
|
1709
1808
|
rb_define_method(m_rb_fann_shortcut_class, "get_cascade_output_change_fraction", get_cascade_output_change_fraction, 0);
|
@@ -1729,19 +1828,17 @@ void Init_ruby_fann ()
|
|
1729
1828
|
rb_define_method(m_rb_fann_shortcut_class, "get_cascade_activation_steepnesses_count", get_cascade_activation_steepnesses_count, 0);
|
1730
1829
|
rb_define_method(m_rb_fann_shortcut_class, "get_cascade_activation_steepnesses", get_cascade_activation_steepnesses, 0);
|
1731
1830
|
rb_define_method(m_rb_fann_shortcut_class, "set_cascade_activation_steepnesses", set_cascade_activation_steepnesses, 1);
|
1732
|
-
rb_define_method(m_rb_fann_shortcut_class, "get_cascade_num_candidate_groups", get_cascade_num_candidate_groups, 0);
|
1733
|
-
rb_define_method(m_rb_fann_shortcut_class, "set_cascade_num_candidate_groups", set_cascade_num_candidate_groups, 1);
|
1831
|
+
rb_define_method(m_rb_fann_shortcut_class, "get_cascade_num_candidate_groups", get_cascade_num_candidate_groups, 0);
|
1832
|
+
rb_define_method(m_rb_fann_shortcut_class, "set_cascade_num_candidate_groups", set_cascade_num_candidate_groups, 1);
|
1734
1833
|
rb_define_method(m_rb_fann_shortcut_class, "save", nn_save, 1);
|
1735
|
-
|
1736
1834
|
|
1737
1835
|
// TrainData NN class:
|
1738
|
-
m_rb_fann_train_data_class = rb_define_class_under
|
1739
|
-
rb_define_alloc_func
|
1836
|
+
m_rb_fann_train_data_class = rb_define_class_under(m_rb_fann_module, "TrainData", rb_cObject);
|
1837
|
+
rb_define_alloc_func(m_rb_fann_train_data_class, fann_training_data_allocate);
|
1740
1838
|
rb_define_method(m_rb_fann_train_data_class, "initialize", fann_train_data_initialize, 1);
|
1741
1839
|
rb_define_method(m_rb_fann_train_data_class, "length", length_train_data, 0);
|
1742
|
-
rb_define_method(m_rb_fann_train_data_class, "shuffle", shuffle, 0);
|
1840
|
+
rb_define_method(m_rb_fann_train_data_class, "shuffle", shuffle, 0);
|
1743
1841
|
rb_define_method(m_rb_fann_train_data_class, "save", training_save, 1);
|
1744
|
-
|
1842
|
+
|
1745
1843
|
// printf("Initialized Ruby Bindings for FANN.\n");
|
1746
1844
|
}
|
1747
|
-
|