t_learn 0.1.0.1 → 0.1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 749c02d42d8f70120b364673f13e01e8fcca764e
4
- data.tar.gz: 9b476e3d0c694fd77503150190a0a8127af3d9ca
3
+ metadata.gz: b6597d2b7b9978e0c84e5e87920fb4f3fb765887
4
+ data.tar.gz: 8c54a272723b8640a8d89e0e8937f84a1ab1a279
5
5
  SHA512:
6
- metadata.gz: e22afcd74b8809c322eb9f9c9a920705005c1519a5a47e1ae20157934f800f48d5741ffdd804ff6df96ba651e5a87195629e133b3312f8780e6e32853e7b130a
7
- data.tar.gz: d4c7e03f7ca8d8b3d75b76c4113e60406124e72b98db21db644546f8440909ea6449efd712e6e103af7ac7e2a9827e26a2a1c9a870cb6d276b8ae522a50231b5
6
+ metadata.gz: 12134be65f46bbe5d9dec8aaaee3b90c8e3c02f9602f30419a75e7a74cb1c1683ca73725cbcbc081e16918114f357b292e5ceadd5fdd4117de9dcf5ae105641a
7
+ data.tar.gz: 1198ca3aaa4551929242a30e0afa117676702d26ebccc6d7eda6bcadebbfda9a1fb3a9aa7e2150d257a0f9f9a06ae3c3294cd37b2573a2459206d2b68ebc68b4
data/README.md CHANGED
@@ -1,6 +1,5 @@
1
1
  # TLearn
2
-
3
- my machine learning library.
2
+ This is my hobby machine learning library.
4
3
  I will add machine learning items.
5
4
 
6
5
  ## Installation
@@ -20,9 +19,52 @@ Or install it yourself as:
20
19
  $ gem install t_learn
21
20
 
22
21
  ## Usage
23
- ### hop filed net
22
+
23
+ ### simple feedforward_neural_network
24
+ respect for keras.
24
25
 
25
26
  ``` ruby
27
+ def test_fun(x)
28
+ return ( x*0.9 ).round(2)
29
+ end
30
+
31
+
32
+ model = TLearn::FeedForwardNeuralNetwork.new
33
+ model.add_layer(2)
34
+ model.add_layer(5)
35
+ model.add_layer(1)
36
+
37
+ x_train = [[0.1, 1.0],[0.2, 1.0], [0.4, 1.0], [0.6, 1.0]]
38
+
39
+ y_train = [[ test_fun(x_train[0][0]) ], [ test_fun(x_train[1][0]) ],[ test_fun(x_train[2][0]) ],[ test_fun(x_train[3][0]) ]]
40
+ model.fit(x_train, y_train, 500000)
41
+
42
+ x_test = [[0.1, 1.0],[0.2, 1.0], [0.4, 1.0], [0.6, 1.0]]
43
+ y_test = [[ test_fun(x_train[0][0]) ], [ test_fun(x_train[1][0]) ],[ test_fun(x_train[2][0]) ],[ test_fun(x_train[3][0]) ]]
44
+
45
+ model.evaluate(x_test, y_test)
46
+
47
+ ```
48
+
49
+ ### result
50
+ ....
51
+
52
+ ```
53
+
54
+ x [0.1, 1.0], y 0.09 , output 0.22505163646378912
55
+ x [0.2, 1.0], y 0.18 , output 0.2817288022885251
56
+ x [0.4, 1.0], y 0.36 , output 0.3699200581887254
57
+ x [0.6, 1.0], y 0.54 , output 0.42524180537036876
58
+
59
+ ```
60
+
61
+
62
+ ### hop filed net
63
+ sample
64
+ ``` ruby
65
+
66
+ require "t_learn"
67
+
26
68
  data = [1.0, 1.0, -1.0, -1.0, 1.0] # teacher data
27
69
  hop_field_net = TLearn::HopFieldNet.new(0.0, data)
28
70
  hop_field_net.memorize
@@ -0,0 +1,174 @@
1
+ #!/usr/bin/ruby
2
+ # -*- encoding: utf-8 -*-
3
+
4
+ module TLearn
5
+
6
+ #
7
+ # ==
8
+ #
9
+ class FeedForwardNeuralNetwork
10
+ attr_accessor :layer_list, :layer_size, :link_list, :node_id, :learning_rate
11
+ def initialize(learning_rate=0.3)
12
+ @layer_size = 0 #layer iterator
13
+ @layer_list = Array.new
14
+ @link_list = Hash.new
15
+ @node_id = 0
16
+ @learning_rate = learning_rate
17
+ end
18
+
19
+ def add_layer(node_num)
20
+ node_list = Array.new()
21
+ node_num.times do |num|
22
+ node = Node.new(0.2)
23
+ node.set_id(@node_id)
24
+ node_list.push(node)
25
+ @node_id += 1
26
+ end
27
+
28
+ @layer_list.push(node_list)
29
+ # connect link
30
+ if @layer_size != 0 # if not first layer
31
+ # connect link to @layer_size - 1 layer
32
+ connect_nodes
33
+ end
34
+ @layer_size += 1
35
+ end
36
+
37
+ #
38
+ # === connect_nodes
39
+ #
40
+ def connect_nodes
41
+ @layer_list[@layer_size - 1].each do |from_node|
42
+ @layer_list[@layer_size].each do |to_node|
43
+ @link_list["#{from_node.id}_#{to_node.id}"] = 0.0
44
+ end
45
+ end
46
+ end
47
+
48
+ #
49
+ # ===
50
+ #
51
+ # @param x_train Array
52
+ # @param y_train Array
53
+ #
54
+ def fit(x_train, y_train, epoch)
55
+ # input teacher_datas
56
+ epoch.times do
57
+ x_train.zip(y_train).each do |x, y|
58
+ propagation(x)
59
+ # back_propagation
60
+ back_propagation(y)
61
+ end
62
+ end
63
+ end
64
+
65
+ def propagation(x)
66
+ # input data
67
+ @layer_list[0].each_with_index do |node, i|
68
+ node.input (x[i])
69
+ end
70
+ @layer_size.times do |layer_num|
71
+ if layer_num != (@layer_size-1)
72
+ # puts "layernum #{layer_num}"
73
+ @layer_list[layer_num + 1].each do |to_node|
74
+ sum_all_from_node = 0.0
75
+ @layer_list[layer_num].each do |from_node|
76
+ sum_all_from_node += @link_list["#{from_node.id}_#{to_node.id}"] * from_node.w
77
+ end
78
+ to_node.update_w(sum_all_from_node)
79
+ end
80
+ end
81
+ end
82
+ end
83
+
84
+ #
85
+ # ===
86
+ #
87
+ # @param y Array teacher_data
88
+ #
89
+ def back_propagation(y)
90
+ delta = {}
91
+ ( @layer_size - 1).downto(1) do |layer_num|
92
+ if ( @layer_size - 1) == layer_num # if output layer
93
+ @layer_list[layer_num].each_with_index do |to_node, i|
94
+ @layer_list[layer_num - 1].each do |from_node|
95
+ delta["#{from_node.id}_#{to_node.id}"] = - calc_err(to_node.w,y[i]) * to_node.w * (1.0 - to_node.w)
96
+ # puts "delta[#{from_node}_#{to_node}] #{delta['#{from_node}_#{to_node}']}"
97
+ delta_weight = -1.0 * @learning_rate * delta["#{from_node.id}_#{to_node.id}"] * to_node.w
98
+ @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + delta_weight ;
99
+ end
100
+ end
101
+ else
102
+ @layer_list[layer_num].each do |to_node|
103
+ @layer_list[layer_num - 1].each do |from_node|
104
+ delta["#{from_node.id}_#{to_node.id}"] = calc_delta(delta,layer_num, to_node) * to_node.w * (1.0 - to_node.w)
105
+ delta_weight = -1.0 * @learning_rate * delta["#{from_node.id}_#{to_node.id}"] * to_node.w
106
+ @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + delta_weight
107
+ end
108
+ end
109
+ end
110
+ end
111
+ end
112
+
113
+ def calc_err(w, teacher_data)
114
+ return (teacher_data - w )
115
+ end
116
+
117
+ def calc_delta(delta,layer_i, from_node)
118
+ sum = 0.0
119
+ @layer_list[layer_i+1].each do |to_node|
120
+ sum += delta["#{from_node.id}_#{to_node.id}"] * from_node.w
121
+ end
122
+ return sum
123
+ end
124
+
125
+ def evaluate(x_test, y_test)
126
+ # compare teacher_datas and output of nn
127
+ sum = 0.0
128
+ x_test.zip(y_test).each do |x, y|
129
+ propagation(x)
130
+ @layer_list[@layer_size -1].zip(y).each do |output, y_|
131
+ puts "x #{x}, y #{y_} , output #{output.w}"
132
+ sum += 1 if output.w == y_
133
+ end
134
+ end
135
+ return (sum/y_test.size) * 100.0
136
+ end
137
+
138
+ class Node
139
+ attr_accessor :w,:active_function, :threshold, :id
140
+ def initialize(w = 0.0, active_function = "sig", threshold = 0.5)
141
+ @w = w
142
+ @threshold = threshold
143
+ @active_function = active_function
144
+ end
145
+
146
+ def set_id(id)
147
+ @id = id
148
+ end
149
+
150
+ # it can use input fase
151
+ def input(w)
152
+ @w = w
153
+ end
154
+
155
+ def update_w(input)
156
+ # update by sigmoid
157
+ @w = sigmoid_fun(input)
158
+ end
159
+
160
+ def sigmoid_fun(x, a=1)
161
+ result= (1.0/(1.0+Math.exp(-1.0 * a * x))) ;
162
+ return result
163
+ end
164
+ end
165
+ end
166
+ end
167
+
168
+
169
+ #
170
+ # 実行用
171
+ #
172
+ if($0 == __FILE__) then
173
+ end
174
+
@@ -1,3 +1,3 @@
1
1
  module TLearn
2
- VERSION = "0.1.0.1"
2
+ VERSION = "0.1.1.0"
3
3
  end
data/lib/t_learn.rb CHANGED
@@ -1,5 +1,6 @@
1
1
  require "t_learn/version"
2
2
  require "t_learn/hop_field_net"
3
+ require "t_learn/feedforward_neural_network"
3
4
 
4
5
  module TLearn
5
6
  # Your code goes here...
data/t_learn.gemspec CHANGED
@@ -7,7 +7,7 @@ Gem::Specification.new do |spec|
7
7
  spec.name = "t_learn"
8
8
  spec.version = TLearn::VERSION
9
9
  spec.authors = ["Tcom242242"]
10
- spec.email = ["katsuo242242@gmail.com"]
10
+ spec.email = ["okano565656@gmail.com"]
11
11
 
12
12
  spec.summary = %q{my machine learning lib}
13
13
  spec.description = %q{my machine learning lib}
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: t_learn
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0.1
4
+ version: 0.1.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tcom242242
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2016-12-22 00:00:00.000000000 Z
11
+ date: 2016-12-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -54,7 +54,7 @@ dependencies:
54
54
  version: '3.0'
55
55
  description: my machine learning lib
56
56
  email:
57
- - katsuo242242@gmail.com
57
+ - okano565656@gmail.com
58
58
  executables: []
59
59
  extensions: []
60
60
  extra_rdoc_files: []
@@ -71,6 +71,7 @@ files:
71
71
  - bin/setup
72
72
  - lib/t_learn.rb
73
73
  - lib/t_learn/em.rb
74
+ - lib/t_learn/feedforward_neural_network.rb
74
75
  - lib/t_learn/hop_field_net.rb
75
76
  - lib/t_learn/version.rb
76
77
  - t_learn.gemspec