tlearn 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,116 @@
1
+ #include <stdio.h>
2
+
3
+ extern int nn; /* number of nodes */
4
+ extern int ni; /* number of inputs */
5
+ extern int nt; /* nn + ni + 1 */
6
+
7
+ extern long tsweeps; /* total sweeps */
8
+ extern long sweep; /* current sweep */
9
+
10
+ extern float **wt; /* (nn x nt): weights */
11
+ extern float **dwt; /* (nn x nt) delta weight at time t */
12
+ extern float **winc; /* (nn x nt) accumulated weight increment*/
13
+
14
+ extern char loadfile[]; /* .wts file to start with */
15
+ extern char root[]; /* root file name */
16
+
17
+
18
+ save_wts()
19
+ {
20
+
21
+ FILE *fp;
22
+ FILE *fopen();
23
+
24
+ register int i;
25
+ register int j;
26
+
27
+ float *w;
28
+ float **wp;
29
+
30
+ char file[128];
31
+
32
+ #ifdef ibmpc
33
+ /*
34
+ * if running under DOS, probably can't have filenames
35
+ * with more than 8 chars total, or multiple "."s, so
36
+ * "fileroot.nnnnn.wts" becomes "fileroot_nnnnn.wts"
37
+ */
38
+ sprintf(file, "%s_%ld.wts", root, sweep);
39
+ #else
40
+ sprintf(file, "%s.%ld.wts", root, sweep);
41
+ #endif
42
+ if ((fp=fopen(file, "w+")) == NULL) {
43
+ perror("Can't open .wts file\n");
44
+ exit(1);
45
+ }
46
+ fprintf(fp, "NETWORK CONFIGURED BY TLEARN\n");
47
+ fprintf(fp, "# weights after %ld sweeps\n", sweep);
48
+ fprintf(fp, "# WEIGHTS\n");
49
+
50
+ /* to each node */
51
+ wp = wt;
52
+ for (i = 0; i < nn; i++, wp++){
53
+ fprintf(fp, "# TO NODE %d\n",i+1);
54
+ w = *wp;
55
+ /* from each bias, input, and node */
56
+ for (j = 0; j < nt; j++,w++){
57
+ fprintf(fp,"%f\n",*w);
58
+ }
59
+ }
60
+
61
+ fflush(fp);
62
+ fclose(fp);
63
+
64
+ return;
65
+ }
66
+
67
+ load_wts()
68
+ {
69
+
70
+ FILE *fp;
71
+ FILE *fopen();
72
+
73
+ register int i;
74
+ register int j;
75
+
76
+ register float *w;
77
+ register float *wi;
78
+ register float *dw;
79
+ register float **wp;
80
+
81
+ int tmp;
82
+
83
+ char mode[10];
84
+
85
+ if ((fp=fopen(loadfile, "r")) == NULL) {
86
+ perror(loadfile);
87
+ exit(1);
88
+ }
89
+ fscanf(fp, "NETWORK CONFIGURED BY %s\n", mode);
90
+ if (strcmp(mode, "TLEARN") != 0) {
91
+ printf("Saved weights not for tlearn-configured network\n");
92
+ exit(1);
93
+ }
94
+ fscanf(fp, "# weights after %ld sweeps\n", &tsweeps);
95
+ fscanf(fp, "# WEIGHTS\n");
96
+
97
+ /* to each of nn nodes */
98
+ wp = wt;
99
+ for (i = 0; i < nn; i++, wp++){
100
+ fscanf(fp, "# TO NODE %d\n",&tmp);
101
+ w = *wp;
102
+ dw = *(dwt+i);
103
+ wi = *(winc+i);
104
+ /* from each bias, input, and node */
105
+ for (j = 0; j < nt; j++, w++){
106
+ fscanf(fp,"%f\n",w);
107
+ *dw = 0.;
108
+ *wi = 0.;
109
+ }
110
+ }
111
+
112
+ return;
113
+
114
+ }
115
+
116
+
data/lib/tlearn.rb ADDED
@@ -0,0 +1,17 @@
1
+ $:.unshift(File.dirname(__FILE__) + '/lib')
2
+
3
+ require 'tlearn/training_data'
4
+ require 'tlearn/fitness_data'
5
+ require 'tlearn/config'
6
+ require 'tlearn/run_tlearn'
7
+
8
+ require 'tlearn/run'
9
+
10
+ def tlearn_extension
11
+ File.exists?(File.dirname(__FILE__) + '/tlearn.so') ? 'tlearn.so' : 'tlearn.bundle'
12
+ end
13
+
14
+ require tlearn_extension
15
+
16
+ module TLearn
17
+ end
@@ -0,0 +1,101 @@
1
+ module TLearn
2
+ class Config
3
+ WORKING_DIR = File.dirname(__FILE__) + '/../../data'
4
+ TLEARN_NAMESPACE = 'evaluator'
5
+ NUMBER_OF_RESET_TIMEPOINTS = 3497
6
+ DEFAULT_NUMBER_OF_SWEEPS = 1333000
7
+
8
+ def initialize(config)
9
+ @connections_config = config[:connections] || {}
10
+ @special_config = config[:special] || {}
11
+ @nodes_config = config[:nodes] || {}
12
+ end
13
+
14
+ def setup_config(training_data)
15
+ File.open("#{file_root}.cf", "w") {|f| f.write(evaulator_config(training_data))}
16
+ end
17
+
18
+ def setup_fitness_data(data)
19
+ File.open("#{file_root}.data", "w") {|f| f.write(build_data(data))}
20
+ end
21
+
22
+ def setup_training_data(training_data)
23
+ File.open("#{file_root}.reset", "w") {|f| f.write(build_reset_config(training_data))}
24
+ File.open("#{file_root}.data", "w") {|f| f.write(build_data(training_data))}
25
+ File.open("#{file_root}.teach", "w") {|f| f.write(build_teach_data(training_data))}
26
+ end
27
+
28
+ def number_of_sweeps
29
+ DEFAULT_NUMBER_OF_SWEEPS
30
+ end
31
+
32
+ def file_root
33
+ "#{WORKING_DIR}/#{TLEARN_NAMESPACE}"
34
+ end
35
+
36
+ private
37
+
38
+ def connections_ranges_to_strings(connections_config)
39
+ connections_config.map{|hash| {hash.keys[0].to_s.gsub('..','-') => hash.values[0]}}
40
+ end
41
+
42
+ def evaulator_config(training_data)
43
+ nodes_config = {
44
+ :nodes => @nodes_config[:number_of_nodes],
45
+ :inputs => training_data.no_of_inputs,
46
+ :outputs => training_data.no_of_outputs,
47
+ :output_nodes => @nodes_config[:output_nodes]
48
+ }
49
+
50
+ @connections_config = connections_ranges_to_strings(@connections_config)
51
+
52
+ output_nodes = nodes_config.delete(:output_nodes)
53
+ node_config_strings = nodes_config.map{|key,value| "#{key.to_s.gsub('_',' ')} = #{value}" }
54
+ node_config_strings << "output nodes are #{output_nodes}"
55
+
56
+ connection_config_strings = @connections_config.map{|mapping| "#{mapping.keys[0]} from #{mapping.values[0]}" }
57
+ connection_config_strings = ["groups = #{0}"] + connection_config_strings
58
+
59
+
60
+ config = <<EOS
61
+ NODES:
62
+ #{node_config_strings.join("\n")}
63
+ CONNECTIONS:
64
+ #{connection_config_strings.join("\n")}
65
+ SPECIAL:
66
+ #{@special_config.map{|key,value| "#{key} = #{value}" }.join("\n")}
67
+ EOS
68
+ end
69
+
70
+ def build_data(training_data)
71
+ data_file = <<EOS
72
+ distributed
73
+ #{training_data.no_of_data_values}
74
+ #{training_data.data.map{|d| d.join(" ")}.join("\n")}
75
+ EOS
76
+ end
77
+
78
+ def build_teach_data(training_data)
79
+ data_strings = training_data.output_data.map{|d| d.join(" ")}
80
+ data_strings = data_strings.each_with_index.map{|data, index| "#{index} #{data}" }
81
+
82
+ teach_file = <<EOS
83
+ distributed
84
+ #{training_data.no_of_data_values}
85
+ #{data_strings.join("\n")}
86
+ EOS
87
+ end
88
+
89
+ def build_reset_config(training_data)
90
+ reset_points = training_data.reset_points
91
+ reset_file = <<EOS
92
+ #{reset_points.join("\n")}
93
+ EOS
94
+ end
95
+
96
+ def number_of_outputs(training_data)
97
+ training_data.values[0].length
98
+ end
99
+
100
+ end
101
+ end
@@ -0,0 +1,24 @@
1
+ module TLearn
2
+ class FitnessData
3
+ def initialize(data)
4
+ @data = data
5
+ end
6
+
7
+ def reset_points
8
+ [0, 2]
9
+ end
10
+
11
+ def data
12
+ [@data]
13
+ end
14
+
15
+ def no_of_data_values
16
+ 1
17
+ end
18
+
19
+ def no_of_inputs
20
+ @data_list.length
21
+ end
22
+
23
+ end
24
+ end
data/lib/tlearn/run.rb ADDED
@@ -0,0 +1,29 @@
1
+ module TLearn
2
+ class Run
3
+
4
+ def initialize(config, out=STDOUT)
5
+ @config = config
6
+ @out = out
7
+ end
8
+
9
+ def train(data, number_of_sweeps = nil)
10
+ run_tlearn = RunTLearn.new(@config)
11
+
12
+ results = run_tlearn.train(TrainingData.new(data), number_of_sweeps)
13
+
14
+ if results
15
+ results.each{|r| @out.puts(r.inspect)}
16
+ results
17
+ else
18
+ @out.puts("[Error] Training failed")
19
+ end
20
+ end
21
+
22
+ def fitness(data, number_of_sweeps = nil)
23
+ run_tlearn = RunTLearn.new(@config)
24
+
25
+ run_tlearn.fitness(FitnessData.new(data), number_of_sweeps)
26
+ end
27
+
28
+ end
29
+ end
@@ -0,0 +1,68 @@
1
+ module TLearn
2
+ class RunTLearn
3
+ class UntrainedError < Exception; end;
4
+
5
+ def initialize(config = {})
6
+ @config = Config.new(config)
7
+ end
8
+
9
+ def fitness(data, number_of_sweeps = @config.number_of_sweeps)
10
+ raise UntrainedError.new("Train me first!") unless network_trained?
11
+
12
+ clear_previous_fitness_session
13
+
14
+ @config.setup_fitness_data(data)
15
+
16
+ execute_tlearn_fitness(number_of_sweeps)
17
+ end
18
+
19
+ def train(training_data, number_of_sweeps = @config.number_of_sweeps)
20
+ clear_entire_training_data
21
+
22
+ @config.setup_config(training_data)
23
+ @config.setup_training_data(training_data)
24
+
25
+ execute_tlearn_train(number_of_sweeps)
26
+
27
+ if training_successful?(number_of_sweeps)
28
+ weights = File.read("#{Config::WORKING_DIR}/#{Config::TLEARN_NAMESPACE}.#{number_of_sweeps}.wts").split("\n")
29
+ `cp #{Config::WORKING_DIR}/#{Config::TLEARN_NAMESPACE}.#{number_of_sweeps}.wts #{Config::WORKING_DIR}/#{Config::TLEARN_NAMESPACE}.wts`
30
+ weights.map{|line| line.split("\t").map{|number| number.strip}}
31
+ else
32
+ false
33
+ end
34
+ end
35
+
36
+ private
37
+
38
+ def file_root
39
+ "#{File.expand_path(Config::WORKING_DIR)}/#{Config::TLEARN_NAMESPACE}"
40
+ end
41
+
42
+ def clear_previous_fitness_session
43
+ FileUtils.rm_f("#{file_root}.output")
44
+ FileUtils.rm_f("#{file_root}.reset")
45
+ end
46
+
47
+ def clear_entire_training_data
48
+ FileUtils.rm_f(Dir.glob("#{Config::WORKING_DIR}/*"))
49
+ end
50
+
51
+ def training_successful?(number_of_sweeps)
52
+ File.exists?("#{file_root}.#{number_of_sweeps}.wts")
53
+ end
54
+
55
+ def network_trained?
56
+ File.exists?("#{file_root}.wts")
57
+ end
58
+
59
+ def execute_tlearn_fitness(number_of_sweeps)
60
+ TLearnExt.fitness({:sweeps => number_of_sweeps, :file_root => file_root})
61
+ end
62
+
63
+ def execute_tlearn_train(number_of_sweeps)
64
+ TLearnExt.train({:sweeps => number_of_sweeps, :file_root => file_root})
65
+ end
66
+
67
+ end
68
+ end
@@ -0,0 +1,41 @@
1
+ module TLearn
2
+ class TrainingData
3
+ def initialize(data)
4
+ @data_list = data
5
+ end
6
+
7
+ def reset_points
8
+ sum = 0
9
+ reset_times = @data_list.map do |data|
10
+ sum = sum + data.length
11
+ sum
12
+ end
13
+ [sum, 0] + reset_times
14
+ end
15
+
16
+ def output_data
17
+ @data_list.reduce([]){|all_data, data| all_data + data.
18
+ reduce([]){|data_list, data_hash| data_list << data_hash.values[0] }
19
+ }
20
+ end
21
+
22
+ def data
23
+ @data_list.reduce([]){|all_data, data| all_data + data.
24
+ reduce([]){|data_list, data_hash| data_list << data_hash.keys[0] }
25
+ }
26
+ end
27
+
28
+ def no_of_data_values
29
+ @data_list.map{|data| data.length}.reduce(&:+)
30
+ end
31
+
32
+ def no_of_inputs
33
+ @data_list[0][0].keys[0].length
34
+ end
35
+
36
+ def no_of_outputs
37
+ @data_list[0][0].values[0].length
38
+ end
39
+
40
+ end
41
+ end
metadata ADDED
@@ -0,0 +1,64 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: tlearn
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Joseph Wilk
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2012-10-19 00:00:00.000000000 Z
13
+ dependencies: []
14
+ description:
15
+ email:
16
+ - joe@josephwilk.net
17
+ executables: []
18
+ extensions:
19
+ - ext/tlearn/extconf.rb
20
+ extra_rdoc_files: []
21
+ files:
22
+ - lib/tlearn/config.rb
23
+ - lib/tlearn/fitness_data.rb
24
+ - lib/tlearn/run.rb
25
+ - lib/tlearn/run_tlearn.rb
26
+ - lib/tlearn/training_data.rb
27
+ - lib/tlearn.rb
28
+ - ext/tlearn/activate.c
29
+ - ext/tlearn/arrays.c
30
+ - ext/tlearn/compute.c
31
+ - ext/tlearn/Exp/exp.c
32
+ - ext/tlearn/getopt.c
33
+ - ext/tlearn/parse.c
34
+ - ext/tlearn/subs.c
35
+ - ext/tlearn/tlearn.c
36
+ - ext/tlearn/tlearn_ext.c
37
+ - ext/tlearn/update.c
38
+ - ext/tlearn/weights.c
39
+ - ext/tlearn/extconf.rb
40
+ homepage:
41
+ licenses: []
42
+ post_install_message:
43
+ rdoc_options: []
44
+ require_paths:
45
+ - lib
46
+ required_ruby_version: !ruby/object:Gem::Requirement
47
+ none: false
48
+ requirements:
49
+ - - ! '>='
50
+ - !ruby/object:Gem::Version
51
+ version: '0'
52
+ required_rubygems_version: !ruby/object:Gem::Requirement
53
+ none: false
54
+ requirements:
55
+ - - ! '>='
56
+ - !ruby/object:Gem::Version
57
+ version: '0'
58
+ requirements: []
59
+ rubyforge_project:
60
+ rubygems_version: 1.8.24
61
+ signing_key:
62
+ specification_version: 3
63
+ summary: ruby bindings for tlearn
64
+ test_files: []