t_learn 0.1.1.0 → 0.1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b6597d2b7b9978e0c84e5e87920fb4f3fb765887
4
- data.tar.gz: 8c54a272723b8640a8d89e0e8937f84a1ab1a279
3
+ metadata.gz: bcf9731528ca98ed43cfe5f2a7686707def2e6a9
4
+ data.tar.gz: a90853e05203740a90c3c5ce6cf9fb13f0ab36bf
5
5
  SHA512:
6
- metadata.gz: 12134be65f46bbe5d9dec8aaaee3b90c8e3c02f9602f30419a75e7a74cb1c1683ca73725cbcbc081e16918114f357b292e5ceadd5fdd4117de9dcf5ae105641a
7
- data.tar.gz: 1198ca3aaa4551929242a30e0afa117676702d26ebccc6d7eda6bcadebbfda9a1fb3a9aa7e2150d257a0f9f9a06ae3c3294cd37b2573a2459206d2b68ebc68b4
6
+ metadata.gz: 7b25db6487317f85009cb795bec1060e71cdca91804ce74f013e8933d3dfc6f1128c65d02829cac5956b31747cc159907f92fecdb7a910ef18219c95759aa8a5
7
+ data.tar.gz: 86dfa12d25b29010c0e6ccebbb120e2f28062e70bdece7e43d830e79d503038fd808553218f0a8d621f11f800be22cd2e30b8e940c5c752b7ba28e008e60a403
@@ -0,0 +1,7 @@
1
+ build
2
+ clean
3
+ clobber
4
+ install
5
+ install:local
6
+ release[remote]
7
+ spec
data/README.md CHANGED
@@ -24,25 +24,23 @@ Or install it yourself as:
24
24
  respect for keras.
25
25
 
26
26
  ``` ruby
27
- def test_fun(x)
28
- return ( x*0.9 ).round(2)
29
- end
30
-
31
27
 
32
28
  model = TLearn::FeedForwardNeuralNetwork.new
29
+
33
30
  model.add_layer(2)
34
- model.add_layer(5)
31
+ model.add_layer(3)
35
32
  model.add_layer(1)
36
33
 
37
- x_train = [[0.1, 1.0],[0.2, 1.0], [0.4, 1.0], [0.6, 1.0]]
34
+ x_train = [[0.0, 0.0],[0.0, 1.0], [1.0, 0.0], [1.0, 1.0]]
35
+ y_train = [[ 0.0 ], [ 1.0 ],[ 1.0 ],[ 0.0 ]]
36
+ model.fit(x_train, y_train, 50000)
38
37
 
39
- y_train = [[ test_fun(x_train[0][0]) ], [ test_fun(x_train[1][0]) ],[ test_fun(x_train[2][0]) ],[ test_fun(x_train[3][0]) ]]
40
- model.fit(x_train, y_train, 500000)
38
+ x_test = x_train
39
+ y_test = y_train
41
40
 
42
- x_test = [[0.1, 1.0],[0.2, 1.0], [0.4, 1.0], [0.6, 1.0]]
43
- y_test = [[ test_fun(x_train[0][0]) ], [ test_fun(x_train[1][0]) ],[ test_fun(x_train[2][0]) ],[ test_fun(x_train[3][0]) ]]
41
+ err_rate = model.evaluate(x_test, y_test)
44
42
 
45
- model.evaluate(x_test, y_test)
43
+ puts "err rate: #{err_rate}%"
46
44
 
47
45
  ```
48
46
 
@@ -51,10 +49,11 @@ model.evaluate(x_test, y_test)
51
49
 
52
50
  ```
53
51
 
54
- x [0.1, 1.0], y 0.09 , output 0.22505163646378912
55
- x [0.2, 1.0], y 0.18 , output 0.2817288022885251
56
- x [0.4, 1.0], y 0.36 , output 0.3699200581887254
57
- x [0.6, 1.0], y 0.54 , output 0.42524180537036876
52
+ x [0.0, 0.0], y [0.0] , output [0.03286460161620565]
53
+ x [0.0, 1.0], y [1.0] , output [0.9733866321804969]
54
+ x [1.0, 0.0], y [1.0] , output [0.9731963536942299]
55
+ x [1.0, 1.0], y [0.0] , output [0.014481150692655216]
56
+ err rate: 2.5190691608533524%
58
57
 
59
58
  ```
60
59
 
@@ -7,19 +7,21 @@ module TLearn
7
7
  # ==
8
8
  #
9
9
  class FeedForwardNeuralNetwork
10
- attr_accessor :layer_list, :layer_size, :link_list, :node_id, :learning_rate
11
- def initialize(learning_rate=0.3)
10
+ attr_accessor :layer_list, :layer_size, :link_list, :node_id, :learning_rate, :err_list, :threshold
11
+ def initialize(learning_rate=0.1, threshold=0.0)
12
12
  @layer_size = 0 #layer iterator
13
13
  @layer_list = Array.new
14
14
  @link_list = Hash.new
15
15
  @node_id = 0
16
16
  @learning_rate = learning_rate
17
+ @err_list = Array.new
18
+ @threshold = threshold
17
19
  end
18
20
 
19
21
  def add_layer(node_num)
20
22
  node_list = Array.new()
21
23
  node_num.times do |num|
22
- node = Node.new(0.2)
24
+ node = Node.new(0.0,"sig", @threshold)
23
25
  node.set_id(@node_id)
24
26
  node_list.push(node)
25
27
  @node_id += 1
@@ -40,7 +42,7 @@ module TLearn
40
42
  def connect_nodes
41
43
  @layer_list[@layer_size - 1].each do |from_node|
42
44
  @layer_list[@layer_size].each do |to_node|
43
- @link_list["#{from_node.id}_#{to_node.id}"] = 0.0
45
+ @link_list["#{from_node.id}_#{to_node.id}"] = rand(-1.0...1.0)
44
46
  end
45
47
  end
46
48
  end
@@ -54,11 +56,15 @@ module TLearn
54
56
  def fit(x_train, y_train, epoch)
55
57
  # input teacher_datas
56
58
  epoch.times do
59
+ epoch_err = 0.0
57
60
  x_train.zip(y_train).each do |x, y|
61
+ x, y = x_train.zip(y_train).sample
62
+ # puts "x #{x}, y #{y}"
58
63
  propagation(x)
59
- # back_propagation
64
+ epoch_err += calc_ave_err(y)
60
65
  back_propagation(y)
61
66
  end
67
+ @err_list.push(epoch_err)
62
68
  end
63
69
  end
64
70
 
@@ -75,12 +81,22 @@ module TLearn
75
81
  @layer_list[layer_num].each do |from_node|
76
82
  sum_all_from_node += @link_list["#{from_node.id}_#{to_node.id}"] * from_node.w
77
83
  end
78
- to_node.update_w(sum_all_from_node)
84
+ to_node.update_w(sum_all_from_node + 1.0)
79
85
  end
80
86
  end
81
87
  end
82
88
  end
83
89
 
90
+ def calc_ave_err(y)
91
+ sum_err = 0.0
92
+ @layer_list[@layer_size - 1].each_with_index do |node, i|
93
+ sum_err += calc_err(node.w,y[i]).abs
94
+ end
95
+ ave_err = (sum_err)/y.size
96
+ return ave_err
97
+ end
98
+
99
+
84
100
  #
85
101
  # ===
86
102
  #
@@ -88,56 +104,62 @@ module TLearn
88
104
  #
89
105
  def back_propagation(y)
90
106
  delta = {}
91
- ( @layer_size - 1).downto(1) do |layer_num|
107
+ ( @layer_size - 1).downto(0) do |layer_num|
92
108
  if ( @layer_size - 1) == layer_num # if output layer
93
- @layer_list[layer_num].each_with_index do |to_node, i|
94
- @layer_list[layer_num - 1].each do |from_node|
95
- delta["#{from_node.id}_#{to_node.id}"] = - calc_err(to_node.w,y[i]) * to_node.w * (1.0 - to_node.w)
96
- # puts "delta[#{from_node}_#{to_node}] #{delta['#{from_node}_#{to_node}']}"
97
- delta_weight = -1.0 * @learning_rate * delta["#{from_node.id}_#{to_node.id}"] * to_node.w
98
- @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + delta_weight ;
99
- end
109
+ @layer_list[layer_num].each_with_index do |output_node, i|
110
+ delta["#{output_node.id}"] = -1.0 * calc_err(y[i], output_node.w) * output_node.w * (1.0 -output_node.w)
100
111
  end
101
112
  else
102
- @layer_list[layer_num].each do |to_node|
103
- @layer_list[layer_num - 1].each do |from_node|
104
- delta["#{from_node.id}_#{to_node.id}"] = calc_delta(delta,layer_num, to_node) * to_node.w * (1.0 - to_node.w)
105
- delta_weight = -1.0 * @learning_rate * delta["#{from_node.id}_#{to_node.id}"] * to_node.w
106
- @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + delta_weight
113
+ @layer_list[layer_num].each do |from_node|
114
+ # リンクの更新
115
+ @layer_list[layer_num + 1].each do |to_node|
116
+ update_weight = -1.0 * @learning_rate * delta["#{to_node.id}"] * from_node.w
117
+ @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + update_weight
107
118
  end
119
+ # その層のdeltaの更新
120
+ delta["#{from_node.id}"] = calc_delta(delta,layer_num, from_node) * from_node.w * (1.0 - from_node.w)
108
121
  end
109
122
  end
110
123
  end
111
124
  end
112
125
 
113
- def calc_err(w, teacher_data)
114
- return (teacher_data - w )
126
+ def calc_err(teacher_data, w)
127
+ return (teacher_data -w)
115
128
  end
116
129
 
117
130
  def calc_delta(delta,layer_i, from_node)
118
131
  sum = 0.0
119
132
  @layer_list[layer_i+1].each do |to_node|
120
- sum += delta["#{from_node.id}_#{to_node.id}"] * from_node.w
133
+ sum += delta["#{to_node.id}"] * @link_list["#{from_node.id}_#{to_node.id}"]
121
134
  end
122
135
  return sum
123
136
  end
124
137
 
125
138
  def evaluate(x_test, y_test)
126
139
  # compare teacher_datas and output of nn
127
- sum = 0.0
140
+ sum_err = 0.0
128
141
  x_test.zip(y_test).each do |x, y|
129
142
  propagation(x)
130
- @layer_list[@layer_size -1].zip(y).each do |output, y_|
131
- puts "x #{x}, y #{y_} , output #{output.w}"
132
- sum += 1 if output.w == y_
143
+ output = []
144
+ err = 0.0
145
+ @layer_list[@layer_size -1].zip(y).each do |o, y_f|
146
+ output.push(o.w)
147
+ err += (y_f - o.w).abs
133
148
  end
149
+ sum_err += (err/y_test[0].size)
150
+ puts "x #{x}, y #{y} , output #{output}"
134
151
  end
135
- return (sum/y_test.size) * 100.0
152
+ return (sum_err/y_test.size) * 100.0
153
+ # return 0.0
154
+ end
155
+
156
+ def get_output_layer
157
+ return @layer_list[@layer_size-1]
136
158
  end
137
159
 
138
160
  class Node
139
161
  attr_accessor :w,:active_function, :threshold, :id
140
- def initialize(w = 0.0, active_function = "sig", threshold = 0.5)
162
+ def initialize(w = 0.0, active_function = "sig", threshold = 0.0)
141
163
  @w = w
142
164
  @threshold = threshold
143
165
  @active_function = active_function
@@ -158,8 +180,7 @@ module TLearn
158
180
  end
159
181
 
160
182
  def sigmoid_fun(x, a=1)
161
- result= (1.0/(1.0+Math.exp(-1.0 * a * x))) ;
162
- return result
183
+ return (1.0/(1.0+Math.exp(-1.0 * a * x)))
163
184
  end
164
185
  end
165
186
  end
@@ -1,3 +1,3 @@
1
1
  module TLearn
2
- VERSION = "0.1.1.0"
2
+ VERSION = "0.1.1.2"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: t_learn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1.0
4
+ version: 0.1.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tcom242242
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2016-12-27 00:00:00.000000000 Z
11
+ date: 2017-01-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -60,6 +60,7 @@ extensions: []
60
60
  extra_rdoc_files: []
61
61
  files:
62
62
  - ".gitignore"
63
+ - ".rake_tasks~"
63
64
  - ".rspec"
64
65
  - ".travis.yml"
65
66
  - CODE_OF_CONDUCT.md