t_nn 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: a940f773ac9b387e51a7576b260df901b63b6b8c
4
+ data.tar.gz: d1aa2afc9610677c0e274bef30b6c6e73f91d585
5
+ SHA512:
6
+ metadata.gz: c997c47c726e991d44e0d2eae8ac93abb0daad6afd9161dcb144df04c9dc7cb4e2d8af0d425ed037a27780dbf04f97af23346035c986da4f5104405080d39c82
7
+ data.tar.gz: c7790b31206887edced932d7c1b5dd87da21b5ba2b4de1878b20cc20fee77e6f7459cfb0d0878e45a6a0fa9dff1369bdec2d1edc75827d3d7f95ce658f28ab42
data/.gitignore ADDED
@@ -0,0 +1,9 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /Gemfile.lock
4
+ /_yardoc/
5
+ /coverage/
6
+ /doc/
7
+ /pkg/
8
+ /spec/reports/
9
+ /tmp/
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --format documentation
2
+ --color
data/.travis.yml ADDED
@@ -0,0 +1,5 @@
1
+ sudo: false
2
+ language: ruby
3
+ rvm:
4
+ - 2.3.0
5
+ before_install: gem install bundler -v 1.13.1
@@ -0,0 +1,74 @@
1
+ # Contributor Covenant Code of Conduct
2
+
3
+ ## Our Pledge
4
+
5
+ In the interest of fostering an open and welcoming environment, we as
6
+ contributors and maintainers pledge to making participation in our project and
7
+ our community a harassment-free experience for everyone, regardless of age, body
8
+ size, disability, ethnicity, gender identity and expression, level of experience,
9
+ nationality, personal appearance, race, religion, or sexual identity and
10
+ orientation.
11
+
12
+ ## Our Standards
13
+
14
+ Examples of behavior that contributes to creating a positive environment
15
+ include:
16
+
17
+ * Using welcoming and inclusive language
18
+ * Being respectful of differing viewpoints and experiences
19
+ * Gracefully accepting constructive criticism
20
+ * Focusing on what is best for the community
21
+ * Showing empathy towards other community members
22
+
23
+ Examples of unacceptable behavior by participants include:
24
+
25
+ * The use of sexualized language or imagery and unwelcome sexual attention or
26
+ advances
27
+ * Trolling, insulting/derogatory comments, and personal or political attacks
28
+ * Public or private harassment
29
+ * Publishing others' private information, such as a physical or electronic
30
+ address, without explicit permission
31
+ * Other conduct which could reasonably be considered inappropriate in a
32
+ professional setting
33
+
34
+ ## Our Responsibilities
35
+
36
+ Project maintainers are responsible for clarifying the standards of acceptable
37
+ behavior and are expected to take appropriate and fair corrective action in
38
+ response to any instances of unacceptable behavior.
39
+
40
+ Project maintainers have the right and responsibility to remove, edit, or
41
+ reject comments, commits, code, wiki edits, issues, and other contributions
42
+ that are not aligned to this Code of Conduct, or to ban temporarily or
43
+ permanently any contributor for other behaviors that they deem inappropriate,
44
+ threatening, offensive, or harmful.
45
+
46
+ ## Scope
47
+
48
+ This Code of Conduct applies both within project spaces and in public spaces
49
+ when an individual is representing the project or its community. Examples of
50
+ representing a project or community include using an official project e-mail
51
+ address, posting via an official social media account, or acting as an appointed
52
+ representative at an online or offline event. Representation of a project may be
53
+ further defined and clarified by project maintainers.
54
+
55
+ ## Enforcement
56
+
57
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be
58
+ reported by contacting the project team at okano565656@gmail.com. All
59
+ complaints will be reviewed and investigated and will result in a response that
60
+ is deemed necessary and appropriate to the circumstances. The project team is
61
+ obligated to maintain confidentiality with regard to the reporter of an incident.
62
+ Further details of specific enforcement policies may be posted separately.
63
+
64
+ Project maintainers who do not follow or enforce the Code of Conduct in good
65
+ faith may face temporary or permanent repercussions as determined by other
66
+ members of the project's leadership.
67
+
68
+ ## Attribution
69
+
70
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71
+ available at [http://contributor-covenant.org/version/1/4][version]
72
+
73
+ [homepage]: http://contributor-covenant.org
74
+ [version]: http://contributor-covenant.org/version/1/4/
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in t_nn.gemspec
4
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2017 tcom
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,100 @@
1
+ # TNn
2
+
3
+ Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/t_nn`. To experiment with that code, run `bin/console` for an interactive prompt.
4
+
5
+ TODO: Delete this and the text above, and describe your gem
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application's Gemfile:
10
+
11
+ ```ruby
12
+ gem 't_nn'
13
+ ```
14
+
15
+ And then execute:
16
+
17
+ $ bundle
18
+
19
+ Or install it yourself as:
20
+
21
+ $ gem install t_nn
22
+
23
+ ## Usage
24
+ ### simple feedforward_neural_network
25
+ respect for keras.
26
+
27
+ ``` ruby
28
+
29
+ require "t_nn"
30
+
31
+ model = TNN::FeedForwardNeuralNetwork.new(learning_rate=0.1)
32
+
33
+ model.add_layer(node_num=2)
34
+ model.add_layer(node_num=3)
35
+ model.add_layer(node_num=1)
36
+
37
+ x_train = [[0.0, 0.0],[0.0, 1.0], [1.0, 0.0], [1.0, 1.0]]
38
+ y_train = [[ 0.0 ], [ 1.0 ],[ 1.0 ],[ 0.0 ]]
39
+ model.fit(x_train, y_train, epoch=50000)
40
+
41
+ x_test = x_train
42
+ y_test = y_train
43
+
44
+ err_rate = model.evaluate(x_test, y_test)
45
+
46
+ puts "err rate: #{err_rate}%"
47
+
48
+ # p x_test[0]
49
+ # model.propagation(x_text[0])
50
+ # puts model.get_output_layer[1].w
51
+
52
+ ```
53
+
54
+ ### result
55
+ ....
56
+
57
+ ```
58
+
59
+ x [0.0, 0.0], y [0.0] , output [0.03286460161620565]
60
+ x [0.0, 1.0], y [1.0] , output [0.9733866321804969]
61
+ x [1.0, 0.0], y [1.0] , output [0.9731963536942299]
62
+ x [1.0, 1.0], y [0.0] , output [0.014481150692655216]
63
+ err rate: 2.5190691608533524%
64
+
65
+ ```
66
+
67
+
68
+ ### hop filed net
69
+ sample
70
+ ``` ruby
71
+
72
+ require "t_nn"
73
+
74
+ data = [1.0, 1.0, -1.0, -1.0, 1.0] # teacher data
75
+ hop_field_net = TNN::HopFieldNetwork.new(0.0, data)
76
+ hop_field_net.memorize
77
+ noisedData = TNN.add_noise_data(data, 0.0) # make test data
78
+ puts "======[before]======"
79
+ puts "#{TNN.evaluate(data, noisedData)}%"
80
+ hop_field_net.remember(noisedData)
81
+ puts "======[after]======"
82
+ puts "#{ TNN.evaluate(data,hop_field_net.nodes) }%"
83
+
84
+ ```
85
+
86
+ ## Development
87
+
88
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
89
+
90
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
91
+
92
+ ## Contributing
93
+
94
+ Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/t_nn. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
95
+
96
+
97
+ ## License
98
+
99
+ The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
100
+
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ require "bundler/gem_tasks"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
data/bin/console ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "t_nn"
5
+
6
+ # You can add fixtures and/or initialization code here to make experimenting
7
+ # with your gem easier. You can also use a different console, if you like.
8
+
9
+ # (If you use this, don't forget to add pry to your Gemfile!)
10
+ # require "pry"
11
+ # Pry.start
12
+
13
+ require "irb"
14
+ IRB.start
data/bin/setup ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
@@ -0,0 +1,189 @@
1
+ #!/usr/bin/ruby
2
+ # -*- encoding: utf-8 -*-
3
+
4
+ module TNN
5
+
6
+ #
7
+ # ==
8
+ #
9
+ class FeedForwardNeuralNetwork
10
+ attr_accessor :layer_list, :layer_size, :link_list, :node_id, :learning_rate, :err_list, :threshold
11
+ def initialize(learning_rate=0.1, threshold=0.0)
12
+ @layer_size = 0 #layer iterator
13
+ @layer_list = Array.new
14
+ @link_list = Hash.new
15
+ @node_id = 0
16
+ @learning_rate = learning_rate
17
+ @err_list = Array.new
18
+ @threshold = threshold
19
+ end
20
+
21
+ def add_layer(node_num)
22
+ node_list = Array.new()
23
+ node_num.times do |num|
24
+ node = Node.new(0.0,"sig", @threshold)
25
+ node.set_id(@node_id)
26
+ node_list.push(node)
27
+ @node_id += 1
28
+ end
29
+
30
+ @layer_list.push(node_list)
31
+ # connect link
32
+ if @layer_size != 0 # if not first layer
33
+ # connect link to @layer_size - 1 layer
34
+ connect_nodes
35
+ end
36
+ @layer_size += 1
37
+ end
38
+
39
+ #
40
+ # === connect_nodes
41
+ #
42
+ def connect_nodes
43
+ @layer_list[@layer_size - 1].each do |from_node|
44
+ @layer_list[@layer_size].each do |to_node|
45
+ @link_list["#{from_node.id}_#{to_node.id}"] = rand(-1.0...1.0)
46
+ end
47
+ end
48
+ end
49
+
50
+ #
51
+ # ===
52
+ #
53
+ # @param x_train Array
54
+ # @param y_train Array
55
+ #
56
+ def fit(x_train, y_train, epoch)
57
+ # input teacher_datas
58
+ epoch.times do
59
+ epoch_err = 0.0
60
+ x_train.zip(y_train).each do |x, y|
61
+ x, y = x_train.zip(y_train).sample
62
+ # puts "x #{x}, y #{y}"
63
+ propagation(x)
64
+ epoch_err += calc_ave_err(y)
65
+ back_propagation(y)
66
+ end
67
+ @err_list.push(epoch_err)
68
+ end
69
+ end
70
+
71
+ def propagation(x)
72
+ # input data
73
+ @layer_list[0].each_with_index do |node, i|
74
+ node.input (x[i])
75
+ end
76
+ @layer_size.times do |layer_num|
77
+ if layer_num != (@layer_size-1)
78
+ # puts "layernum #{layer_num}"
79
+ @layer_list[layer_num + 1].each do |to_node|
80
+ sum_all_from_node = 0.0
81
+ @layer_list[layer_num].each do |from_node|
82
+ sum_all_from_node += @link_list["#{from_node.id}_#{to_node.id}"] * from_node.w
83
+ end
84
+ to_node.update_w(sum_all_from_node + 1.0)
85
+ end
86
+ end
87
+ end
88
+ end
89
+
90
+ def calc_ave_err(y)
91
+ sum_err = 0.0
92
+ @layer_list[@layer_size - 1].each_with_index do |node, i|
93
+ sum_err += calc_err(node.w,y[i]).abs
94
+ end
95
+ ave_err = (sum_err)/y.size
96
+ return ave_err
97
+ end
98
+
99
+
100
+ #
101
+ # ===
102
+ #
103
+ # @param y Array teacher_data
104
+ #
105
+ def back_propagation(y)
106
+ delta = {}
107
+ ( @layer_size - 1).downto(0) do |layer_num|
108
+ if ( @layer_size - 1) == layer_num # if output layer
109
+ @layer_list[layer_num].each_with_index do |output_node, i|
110
+ delta["#{output_node.id}"] = -1.0 * calc_err(y[i], output_node.w) * output_node.w * (1.0 -output_node.w)
111
+ end
112
+ else
113
+ @layer_list[layer_num].each do |from_node|
114
+ # リンクの更新
115
+ @layer_list[layer_num + 1].each do |to_node|
116
+ update_weight = -1.0 * @learning_rate * delta["#{to_node.id}"] * from_node.w
117
+ @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + update_weight
118
+ end
119
+ # その層のdeltaの更新
120
+ delta["#{from_node.id}"] = calc_delta(delta,layer_num, from_node) * from_node.w * (1.0 - from_node.w)
121
+ end
122
+ end
123
+ end
124
+ end
125
+
126
+ def calc_err(teacher_data, w)
127
+ return (teacher_data -w)
128
+ end
129
+
130
+ def calc_delta(delta,layer_i, from_node)
131
+ sum = 0.0
132
+ @layer_list[layer_i+1].each do |to_node|
133
+ sum += delta["#{to_node.id}"] * @link_list["#{from_node.id}_#{to_node.id}"]
134
+ end
135
+ return sum
136
+ end
137
+
138
+ def evaluate(x_test, y_test)
139
+ # compare teacher_datas and output of nn
140
+ sum_err = 0.0
141
+ x_test.zip(y_test).each do |x, y|
142
+ propagation(x)
143
+ output = []
144
+ err = 0.0
145
+ @layer_list[@layer_size -1].zip(y).each do |o, y_f|
146
+ output.push(o.w)
147
+ err += (y_f - o.w).abs
148
+ end
149
+ sum_err += (err/y_test[0].size)
150
+ puts "x #{x}, y #{y} , output #{output}"
151
+ end
152
+ return (sum_err/y_test.size) * 100.0
153
+ # return 0.0
154
+ end
155
+
156
+ def get_output_layer
157
+ return @layer_list[@layer_size-1]
158
+ end
159
+
160
+ class Node
161
+ attr_accessor :w,:active_function, :threshold, :id
162
+ def initialize(w = 0.0, active_function = "sig", threshold = 0.0)
163
+ @w = w
164
+ @threshold = threshold
165
+ @active_function = active_function
166
+ end
167
+
168
+ def set_id(id)
169
+ @id = id
170
+ end
171
+
172
+ # it can use input fase
173
+ def input(w)
174
+ @w = w
175
+ end
176
+
177
+ def update_w(input)
178
+ # update by sigmoid
179
+ @w = sigmoid_fun(input)
180
+ end
181
+
182
+ def sigmoid_fun(x, a=1)
183
+ return (1.0/(1.0+Math.exp(-1.0 * a * x)))
184
+ end
185
+ end
186
+ end
187
+ end
188
+
189
+
@@ -0,0 +1,134 @@
1
+ #!/usr/bin/ruby
2
+ # -*- encoding: utf-8 -*-
3
+
4
+
5
+ module TNN
6
+ class HopFieldNetwork
7
+ attr_accessor :net,:train_datas,:threshold ,:nodes,:dim,:is_train
8
+
9
+ def initialize(threshold=nil, data)
10
+ if threshold == nil
11
+ @threshold = 0.0
12
+ else
13
+ @threshold = threshold
14
+ end
15
+ @train_datas=Array.new
16
+ load_train_data(data)
17
+ @nodes = Array.new(@train_datas[0].length, 1.0)
18
+ @dim = @train_datas[0].length
19
+ @net = Array.new(@dim**2,0.0)
20
+
21
+ end
22
+
23
+ def memorize
24
+ @nodes.length.times do |node_id|
25
+ @nodes.length.times do |node2_id|
26
+ sum = 0.0
27
+ @train_datas.each do |train_data|
28
+ sum += train_data[node_id] * train_data[node2_id] if(node_id != node2_id)
29
+ end
30
+ @net[node_id * @dim + node2_id] = sum
31
+ @net[node2_id*@dim + node_id] = sum
32
+ end
33
+ end
34
+ end
35
+
36
+ #
37
+ # ===
38
+ #
39
+ # @param [Array] datas datas which has noise
40
+ #
41
+ def remember(datas)
42
+ @nodes = datas
43
+ e = energy
44
+ loop do
45
+ @nodes.each_with_index do |node,node_id|
46
+ internal_w = calc_connected_factor(node_id)
47
+ update_external_w(node_id,internal_w)
48
+ end
49
+ new_e = energy
50
+ break if (e == new_e)
51
+ e = new_e
52
+ end
53
+ puts "energy : #{energy}"
54
+ end
55
+
56
+ def calc_connected_factor(target_node_id)
57
+ sum = 0.0
58
+ @nodes.each_with_index do |node,node_id|
59
+ sum += @net[target_node_id*@dim + node_id] * node if (target_node_id != node_id )
60
+ end
61
+ return sum
62
+ end
63
+
64
+ def update_external_w(node_id,i_w)
65
+ if i_w >= @threshold
66
+ @nodes[node_id] = 1.0
67
+ else
68
+ @nodes[node_id] = -1.0
69
+ end
70
+ end
71
+
72
+ #
73
+ # calc energy function
74
+ #
75
+ def energy
76
+ sum = 0.0
77
+ @nodes.each_with_index do |node,node_id|
78
+ @nodes.each_with_index do |node2,node2_id|
79
+ sum += @net[node2_id*@dim + node_id] * node * node2 if ( node != node2)
80
+ end
81
+ end
82
+ sum2 = 0.0
83
+ @nodes.each do |node|
84
+ sum2 += @threshold * node
85
+ end
86
+
87
+ result = (-1.0/2.0)*sum + sum2
88
+ return result
89
+ end
90
+
91
+
92
+ def load_train_data(data)
93
+ @train_datas.push(data)
94
+ end
95
+
96
+ end
97
+
98
+ #
99
+ # === add noise to sample datas
100
+ # @param data Array data which we want to add noise
101
+ # @param noise_rate float rate of noise
102
+ #
103
+ def TNN.add_noise_data(data,noise_rate)
104
+ data_with_noise = Marshal.load(Marshal.dump(data))
105
+ data.size.times do |n|
106
+ if rand <= noise_rate
107
+ if data_with_noise[n] == -1.0
108
+ data_with_noise[n] = 1.0
109
+ else
110
+ data_with_noise[n] = -1.0
111
+ end
112
+ end
113
+ end
114
+
115
+ return data_with_noise
116
+ end
117
+
118
+ #
119
+ # === evaluate predict data with teatcher data
120
+ #
121
+ def TNN.evaluate(teacher_data,data)
122
+ dominator = 0.0
123
+ molecule = 0.0
124
+ teacher_data.zip(data).each do |td,d|
125
+ dominator += 1
126
+ molecule += 1 if td == d
127
+ end
128
+
129
+ return (molecule/dominator)*100
130
+ end
131
+
132
+ end
133
+
134
+
@@ -0,0 +1,3 @@
1
+ module TNn
2
+ VERSION = "0.0.1"
3
+ end
data/lib/t_nn.rb ADDED
@@ -0,0 +1,7 @@
1
+ require "t_nn/version"
2
+ require "t_nn/feedforward_neural_network"
3
+ require "t_nn/hop_field_network"
4
+
5
+ module TNn
6
+ # Your code goes here...
7
+ end
data/t_nn.gemspec ADDED
@@ -0,0 +1,36 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 't_nn/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "t_nn"
8
+ spec.version = TNn::VERSION
9
+ spec.authors = ["tcom"]
10
+ spec.email = ["okano565656@gmail.com"]
11
+
12
+ spec.summary = %q{my nn lib}
13
+ spec.description = %q{my nn lib}
14
+ spec.homepage = "https://github.com/Tcom242242/t_nn"
15
+ spec.license = "MIT"
16
+
17
+ # Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
18
+ # to allow pushing to a single host or delete this section to allow pushing to any host.
19
+ if spec.respond_to?(:metadata)
20
+ # spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
21
+ else
22
+ raise "RubyGems 2.0 or newer is required to protect against " \
23
+ "public gem pushes."
24
+ end
25
+
26
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
27
+ f.match(%r{^(test|spec|features)/})
28
+ end
29
+ spec.bindir = "exe"
30
+ spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
31
+ spec.require_paths = ["lib"]
32
+
33
+ spec.add_development_dependency "bundler", "~> 1.13"
34
+ spec.add_development_dependency "rake", "~> 10.0"
35
+ spec.add_development_dependency "rspec", "~> 3.0"
36
+ end
metadata ADDED
@@ -0,0 +1,101 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: t_nn
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - tcom
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2017-01-06 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.13'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '1.13'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '10.0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '3.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '3.0'
55
+ description: my nn lib
56
+ email:
57
+ - okano565656@gmail.com
58
+ executables: []
59
+ extensions: []
60
+ extra_rdoc_files: []
61
+ files:
62
+ - ".gitignore"
63
+ - ".rspec"
64
+ - ".travis.yml"
65
+ - CODE_OF_CONDUCT.md
66
+ - Gemfile
67
+ - LICENSE.txt
68
+ - README.md
69
+ - Rakefile
70
+ - bin/console
71
+ - bin/setup
72
+ - lib/t_nn.rb
73
+ - lib/t_nn/feedforward_neural_network.rb
74
+ - lib/t_nn/hop_field_network.rb
75
+ - lib/t_nn/version.rb
76
+ - t_nn.gemspec
77
+ homepage: https://github.com/Tcom242242/t_nn
78
+ licenses:
79
+ - MIT
80
+ metadata: {}
81
+ post_install_message:
82
+ rdoc_options: []
83
+ require_paths:
84
+ - lib
85
+ required_ruby_version: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ required_rubygems_version: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - ">="
93
+ - !ruby/object:Gem::Version
94
+ version: '0'
95
+ requirements: []
96
+ rubyforge_project:
97
+ rubygems_version: 2.5.1
98
+ signing_key:
99
+ specification_version: 4
100
+ summary: my nn lib
101
+ test_files: []