rann 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: b474ce98e49bb5067d2fc8ddba98a9c6f887c618
4
+ data.tar.gz: feb50a3a4804494c0a4c2551e7a2b3807b89dd69
5
+ SHA512:
6
+ metadata.gz: fc4f352df3b64af7d6b4c513d0aefd343ec35b4b6b599e7154cdbb7eefd06365023c2192c1736e3d7298a6895b388db3047387cfb4e52e7136f97eb09a320e5d
7
+ data.tar.gz: eec1f19c738594b4b29c5de6da0268d249c4eaaddec7f243919f96ee73331025e571a52d215e894255c79f2451b55de5eb43caaa53a91d898048548190ee355e
data/.gitignore ADDED
@@ -0,0 +1,8 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /_yardoc/
4
+ /coverage/
5
+ /doc/
6
+ /pkg/
7
+ /spec/reports/
8
+ /tmp/
data/.travis.yml ADDED
@@ -0,0 +1,5 @@
1
+ sudo: false
2
+ language: ruby
3
+ rvm:
4
+ - 2.3.1
5
+ before_install: gem install bundler -v 1.16.0
data/CHANGES.md ADDED
@@ -0,0 +1,3 @@
1
+ - Basic classes required for most neural network designs and backprop.
2
+
3
+ *Michael Campbell*
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ source "https://rubygems.org"
2
+
3
+ git_source(:github){ |repo_name| "https://github.com/#{repo_name}" }
4
+
5
+ gemspec
data/Gemfile.lock ADDED
@@ -0,0 +1,26 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ rann (0.1.0)
5
+ parallel (~> 1.12.0)
6
+ ruby-graphviz (~> 1.2.3)
7
+
8
+ GEM
9
+ remote: https://rubygems.org/
10
+ specs:
11
+ minitest (5.10.3)
12
+ parallel (1.12.0)
13
+ rake (10.5.0)
14
+ ruby-graphviz (1.2.3)
15
+
16
+ PLATFORMS
17
+ ruby
18
+
19
+ DEPENDENCIES
20
+ bundler (~> 1.16)
21
+ minitest (~> 5.0)
22
+ rake (~> 10.0)
23
+ rann!
24
+
25
+ BUNDLED WITH
26
+ 1.16.0
data/LICENCE ADDED
@@ -0,0 +1,201 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2017 Michael Campbell
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
data/README.md ADDED
@@ -0,0 +1,59 @@
1
+ # RANN
2
+
3
+ This library provides objects and algorithms for designing, processing and
4
+ training Artificial Neural Networks in Ruby.
5
+
6
+ ## Installation
7
+
8
+ Add this line to your application's Gemfile:
9
+
10
+ ```ruby
11
+ gem 'rann'
12
+ ```
13
+
14
+ And then execute:
15
+
16
+ $ bundle
17
+
18
+ Or install it yourself as:
19
+
20
+ $ gem install rann
21
+
22
+ ## Usage
23
+
24
+ See examples/
25
+
26
+ To run an example:
27
+
28
+ ```
29
+ git clone https://github.com/mikecmpbll/rann.git
30
+ cd rann
31
+ bin/setup
32
+ ruby examples/xor.rb
33
+ ```
34
+
35
+ ## TODO
36
+
37
+ So much. So much.
38
+
39
+ - Convenience methods for setting up standard network topologies, crucially,
40
+ layers
41
+ - Batch normalization/drop out/early stopping
42
+ - Hyperparameter optimisation
43
+ - Other adaptive learning rate algorithms (Adadelta, Adam, etc?)
44
+ - Explore matrix operations and other ways to optimise performance of algorithms
45
+ - RPROP?
46
+ - Use enumerable-statistics gem?
47
+ - Speed up by adding a reduce step to the parallel gem?
48
+ - More examples
49
+ - Tests
50
+
51
+ ## Development
52
+
53
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
54
+
55
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
56
+
57
+ ## Contributing
58
+
59
+ Bug reports and pull requests are welcome on GitHub at https://github.com/mikecmpbll/rann.
data/Rakefile ADDED
@@ -0,0 +1,10 @@
1
+ require "bundler/gem_tasks"
2
+ require "rake/testtask"
3
+
4
+ Rake::TestTask.new :test do |t|
5
+ t.libs << "test"
6
+ t.libs << "lib"
7
+ t.test_files = FileList["test/**/*_test.rb"]
8
+ end
9
+
10
+ task default: :test
data/bin/console ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "rann"
5
+
6
+ # You can add fixtures and/or initialization code here to make experimenting
7
+ # with your gem easier. You can also use a different console, if you like.
8
+
9
+ # (If you use this, don't forget to add pry to your Gemfile!)
10
+ # require "pry"
11
+ # Pry.start
12
+
13
+ require "irb"
14
+ IRB.start(__FILE__)
data/bin/setup ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
data/examples/xor.rb ADDED
@@ -0,0 +1,46 @@
1
+ require "bundler/setup"
2
+ require "rann"
3
+
4
+ # inputs
5
+ inputs = Array.new(2){ |i| RANN::Neuron.new "input #{i}", 0, :input }
6
+
7
+ # hidden layer
8
+ hiddens = Array.new(3){ |i| RANN::Neuron.new "hidden #{i}", 3 }
9
+ bias = RANN::Neuron.new "bias", 0, :bias
10
+
11
+ # output layer
12
+ output = RANN::Neuron.new "output", 3, :output, :sig
13
+
14
+ # connect it all
15
+ connections = []
16
+ hiddens.each do |h|
17
+ inputs.each do |i|
18
+ connections.push RANN::Connection.new i, h
19
+ end
20
+ connections.push RANN::Connection.new bias, h
21
+ connections.push RANN::Connection.new h, output
22
+ end
23
+
24
+ network = RANN::Network.new connections
25
+ backprop = RANN::Backprop.new network
26
+
27
+ inputs = [[0,0],[0,1],[1,0],[1,1]]
28
+ targets = [[0],[1],[1],[0]]
29
+
30
+ i = 0
31
+ loop do
32
+ i += 1
33
+ sample_index = (rand * inputs.size).to_i
34
+
35
+ avg_error =
36
+ backprop.run_batch(
37
+ [inputs[sample_index].map(&:to_d)],
38
+ [targets[sample_index].map(&:to_d)],
39
+ processes: 0,
40
+ checking: false
41
+ )
42
+
43
+ puts "iteration #{i} error: #{avg_error.to_f}"
44
+
45
+ break if avg_error < 0.0001
46
+ end
@@ -0,0 +1,222 @@
1
+ require "parallel"
2
+ require "rann/gradient_checker"
3
+ require "rann/util/array_ext"
4
+
5
+ module RANN
6
+ class Backprop
7
+ include Util::ArrayExt
8
+
9
+ ACTIVATION_DERIVATIVES = {
10
+ relu: ->(x){ x > 0 ? 1.to_d : 0.to_d },
11
+ sig: ->(x){ x.mult(1 - x, 10) },
12
+ linear: ->(_){ 1.to_d },
13
+ tanh: ->(x){ 1 - x.power(2, 10) },
14
+ step: ->(_){ 0.to_d },
15
+ }
16
+
17
+ DECAY = BigDecimal.new('0.9')
18
+ MASTER_STEP_SIZE = BigDecimal.new('0.01')
19
+ FUDGE_FACTOR = BigDecimal.new('0.00000001')
20
+ LEARNING_RATE = BigDecimal.new('0.01')
21
+ FRICTION = BigDecimal.new('0.8')
22
+ NUM_ITERATIONS_BEFORE_LR_ANNEALING = BigDecimal.new('10')
23
+
24
+ attr_accessor :network, :lr, :velocities
25
+
26
+ def initialize network, restore = {}
27
+ @network = network
28
+ @connections_hash = network.connections.each.with_object({}){ |c, h| h[c.id] = c }
29
+ @lr = LEARNING_RATE
30
+ @friction = FRICTION
31
+ @velocities = Hash.new(BigDecimal.new('0'))
32
+ @historical_gradient = (restore[:historical_gradient] || {}).tap{ |h| h.default = 0.to_d }
33
+ @historical_update = Hash.new(MASTER_STEP_SIZE)
34
+ @batch_count = BigDecimal.new('0')
35
+ end
36
+
37
+ def run_batch(inputs, targets, opts = {})
38
+ @batch_count += 1
39
+
40
+ batch_size = inputs.size
41
+ avg_gradients = Hash.new{ |h, k| h[k] = 0 }
42
+ avg_batch_error = 0
43
+
44
+ # force longer bits of work per iteration, to maximise CPU usage
45
+ # less marshalling data etc, more work.
46
+ grouped_inputs = in_groups inputs, [1, opts[:processes]].max * 10, false
47
+ grouped_results =
48
+ Parallel.map_with_index grouped_inputs, in_processes: opts[:processes] do |inputs, i|
49
+ group_avg_gradients = Hash.new{ |h, k| h[k] = 0.to_d }
50
+ group_avg_error = 0.to_d
51
+
52
+ inputs.each do |input|
53
+ gradients, error = Backprop.run_single(network, input, targets[i])
54
+
55
+ gradients.each do |cid, g|
56
+ group_avg_gradients[cid] += g.div batch_size, 10
57
+ end
58
+ group_avg_error += error.div batch_size, 10
59
+ end
60
+
61
+ group_avg_gradients.default_proc = nil
62
+ [group_avg_gradients, group_avg_error]
63
+ end
64
+
65
+ grouped_results.each do |group_avg_gradients, group_avg_error|
66
+ avg_gradients.merge!(group_avg_gradients){ |_, o, n| o + n }
67
+ avg_batch_error += group_avg_error
68
+ end
69
+
70
+ if opts[:checking]
71
+ # check assumes batchsize 1 for now
72
+ sorted_gradients = avg_gradients.values_at *network.connections.map(&:id)
73
+ if GradientChecker.check network, inputs.first, targets.first, sorted_gradients
74
+ puts "gradient valid"
75
+ else
76
+ puts "gradient INVALID"
77
+ end
78
+ end
79
+
80
+ avg_gradients.each do |con_id, gradient|
81
+ con = @connections_hash[con_id]
82
+ next if con.locked?
83
+
84
+ update = adagrad gradient, con.id
85
+
86
+ con.weight += update
87
+ end
88
+
89
+ avg_batch_error
90
+ end
91
+
92
+ def self.run_single network, inputs, targets
93
+ states = []
94
+ inputs = [inputs] if inputs.flatten == inputs
95
+
96
+ # run the data into the network. (feed forward)
97
+ # all but last
98
+ (inputs.size - 1).times do |timestep|
99
+ network.evaluate inputs[timestep]
100
+ states[timestep] = network.reset!
101
+ end
102
+ # last
103
+ outputs = network.evaluate inputs.last
104
+ states[inputs.size - 1] = network.reset!
105
+
106
+ # calculate error
107
+ error = mse targets, outputs
108
+
109
+ # backward pass with unravelling for recurrent networks
110
+ deltas = Hash.new{ |h, k| h[k] = Hash.new(0.to_d) }
111
+
112
+ # outputs first
113
+ network.output_neurons.each.with_index do |o, i|
114
+ activation_derivative = ACTIVATION_DERIVATIVES[o.activation_function]
115
+
116
+ deltas[0][o.id] = mse_delta(targets[i], outputs[i], activation_derivative)
117
+ end
118
+
119
+ # remove this push mechanism, shouldn't be necessary and uses extra memory.
120
+ incoming_deltas = Hash.new{ |h, k| h[k] = Hash.new{ |h, k| h[k] = [] } }
121
+ # each timestep backwards through time
122
+ (inputs.size - 1).downto 0 do |t|
123
+ network.output_neurons.each do |o|
124
+ traverse from: o, network: network, timestep: t, deltas: deltas do |other, con|
125
+ if other.context?
126
+ this_t = t - 1
127
+ other = o
128
+ else
129
+ this_t = t
130
+ end
131
+
132
+ incoming_deltas[this_t][other.id] <<
133
+ deltas[t][o.id].mult(con.weight, 10)
134
+
135
+ if incoming_deltas[this_t][other.id].size == network.connections_from(other).size
136
+ sum_of_deltas = incoming_deltas[this_t][other.id].reduce(:+)
137
+
138
+ deltas[this_t][other.id] =
139
+ ACTIVATION_DERIVATIVES[other.activation_function]
140
+ .call(states[this_t][other.id])
141
+ .mult(sum_of_deltas, 10)
142
+ end
143
+ end
144
+ end
145
+ end
146
+
147
+ gradients = {}
148
+
149
+ network.connections.each_with_index do |con, i|
150
+ gradients[con.id] = 0.to_d
151
+ next if con.output_neuron.context?
152
+
153
+ (inputs.size - 1).downto 0 do |t|
154
+ if nd = deltas[t][con.output_neuron.id]
155
+ gradient =
156
+ if con.input_neuron.context?
157
+ t == 0 ? 0.to_d : nd.mult(states[t - 1][con.input_neuron.id], 10)
158
+ else
159
+ nd.mult states[t][con.input_neuron.id], 10
160
+ end
161
+
162
+ gradients[con.id] += gradient
163
+ end
164
+ end
165
+ end
166
+
167
+ reset! network
168
+ [gradients, error]
169
+ end
170
+
171
+ def state
172
+ { historical_gradient: @historical_gradient }
173
+ end
174
+
175
+ def self.reset! network
176
+ network.reset!
177
+ network.neurons.select(&:context?).each{ |n| n.value = 0.to_d }
178
+ end
179
+
180
+ def adagrad avg_grad, cid
181
+ @historical_gradient[cid] = DECAY.mult(@historical_gradient[cid], 10) + (1 - DECAY).mult(avg_grad.power(2, 10), 10)
182
+
183
+ avg_grad.mult(- @lr.div((FUDGE_FACTOR + @historical_gradient[cid]).sqrt(10), 10), 10)
184
+ end
185
+
186
+ def self.mse targets, outputs
187
+ total_squared_error = 0.to_d
188
+
189
+ targets.size.times do |i|
190
+ total_squared_error += (targets[i] - outputs[i]).power(2, 10).div(2, 10)
191
+ end
192
+
193
+ total_squared_error
194
+ end
195
+
196
+ def self.mse_delta target, actual, activation_derivative
197
+ step_one = actual - target
198
+ step_two = activation_derivative.call actual
199
+
200
+ step_one.mult step_two, 10
201
+ end
202
+
203
+ def self.traverse from:, network:, timestep:, deltas:, &block
204
+ # halt traversal if reached next timestep.
205
+ return if from.context?
206
+
207
+ bptt_connecting_to(from, network, timestep, deltas).each do |n, c|
208
+ yield n, c
209
+
210
+ traverse from: n, network: network, timestep: timestep, deltas: deltas, &block
211
+ end
212
+ end
213
+
214
+ def self.bptt_connecting_to neuron, network, timestep, deltas
215
+ network.connections_to(neuron).each.with_object [] do |c, a|
216
+ unless c.input_neuron.input? || deltas[timestep].key?(c.input_neuron.id)
217
+ a << [c.input_neuron, c]
218
+ end
219
+ end
220
+ end
221
+ end
222
+ end
@@ -0,0 +1,67 @@
1
+ require "securerandom"
2
+ require "bigdecimal"
3
+ require "bigdecimal/util"
4
+
5
+ module RANN
6
+ class Connection
7
+ attr_accessor *%i(
8
+ output_neuron
9
+ input_neuron
10
+ weight
11
+ processed
12
+ enabled
13
+ id
14
+ )
15
+
16
+ def initialize input_neuron, output_neuron, weight = nil
17
+ @id = SecureRandom.hex
18
+ @output_neuron = output_neuron
19
+ @input_neuron = input_neuron
20
+ @weight = weight || initial_weight
21
+ @processed = false
22
+ @enabled = true
23
+ @locked = false
24
+ end
25
+
26
+ def process
27
+ if processable? && !processed?
28
+ out_value = input_neuron.value.mult weight, 10
29
+ output_neuron.push_value! out_value
30
+ @processed = true
31
+ end
32
+ end
33
+
34
+ def neurons
35
+ [output_neuron, input_neuron]
36
+ end
37
+
38
+ def processable?
39
+ input_neuron.value
40
+ end
41
+
42
+ def enabled?
43
+ enabled
44
+ end
45
+
46
+ def processed?
47
+ processed
48
+ end
49
+
50
+ def locked?
51
+ @locked
52
+ end
53
+
54
+ def reset!
55
+ @processed = false
56
+ end
57
+
58
+ private
59
+ def initial_weight
60
+ if output_neuron.context?
61
+ 1.to_d
62
+ else
63
+ rand.to_d 10
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,55 @@
1
+ module RANN
2
+ class GradientChecker
3
+ EPSILON = 10.to_d.power -4, 10
4
+
5
+ def self.check network, inputs, targets, dvec
6
+ gradapprox = []
7
+
8
+ network.params.size.times do |i|
9
+ thetaplus = network.params.dup
10
+ thetaplus[i] = thetaplus[i] + EPSILON
11
+ thetaminus = network.params.dup
12
+ thetaminus[i] = thetaminus[i] - EPSILON
13
+
14
+ network.impose thetaplus
15
+ outputs = network.evaluate inputs
16
+ error_thetaplus = error outputs, targets
17
+ network.reset!
18
+
19
+ network.impose thetaminus
20
+ outputs = network.evaluate inputs
21
+ error_thetaminus = error outputs, targets
22
+ network.reset!
23
+
24
+ gradapprox[i] = (error_thetaplus - error_thetaminus).div(EPSILON.mult(2, 10), 10)
25
+ end
26
+
27
+ gradapprox.each.with_index.all?{ |ga, i| in_epsilon? ga, dvec[i] }
28
+ end
29
+
30
+ def self.error outputs, targets
31
+ total_squared_error = 0.to_d
32
+
33
+ targets.size.times do |i|
34
+ total_squared_error += (targets[i] - outputs[i]).power(2, 10).div(2, 10)
35
+ end
36
+
37
+ total_squared_error
38
+ end
39
+
40
+ def self.in_epsilon? exp, act, epsilon = 0.001
41
+ # delta = [exp.abs, act.abs].min * epsilon
42
+ delta = epsilon
43
+ n = (exp - act).abs
44
+ msg = "Expected |#{exp} - #{act}| (#{n}) to be <= #{delta}"
45
+
46
+ if delta >= n
47
+ true
48
+ else
49
+ puts msg
50
+
51
+ false
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,10 @@
1
+ require "rann/connection"
2
+
3
+ module RANN
4
+ class LockedConnection < Connection
5
+ def initialize(*)
6
+ super
7
+ @locked = true
8
+ end
9
+ end
10
+ end
data/lib/rann/lstm.rb ADDED
@@ -0,0 +1,86 @@
1
+ require "rann/network"
2
+ require "rann/neuron"
3
+ require "rann/product_neuron"
4
+ require "rann/connection"
5
+ require "rann/locked_connection"
6
+
7
+ module RANN
8
+ class LSTM
9
+ attr_reader :network, :inputs, :outputs, :name
10
+
11
+ def initialize name
12
+ @name = name
13
+ @network = RANN::Network.new
14
+ @inputs = []
15
+ @outputs = []
16
+ end
17
+
18
+ def init
19
+ @inputs.each.with_index do |input, i|
20
+ f = RANN::Neuron.new("LSTM #{name} F #{i}", 3, :standard, :sig).tap{ |n| @network.add n }
21
+ i = RANN::Neuron.new("LSTM #{name} I #{i}", 4, :standard, :sig).tap{ |n| @network.add n }
22
+ g = RANN::Neuron.new("LSTM #{name} G #{i}", 3, :standard, :tanh).tap{ |n| @network.add n }
23
+ o = RANN::Neuron.new("LSTM #{name} O #{i}", 3, :standard, :sig).tap{ |n| @network.add n }
24
+ bias_f = RANN::Neuron.new("LSTM #{name} Bias F #{i}", 0, :bias).tap do |n|
25
+ @network.add n
26
+ n.value = 1.to_d
27
+ end
28
+ bias_i = RANN::Neuron.new("LSTM #{name} Bias I #{i}", 0, :bias).tap do |n|
29
+ @network.add n
30
+ n.value = 1.to_d
31
+ end
32
+ bias_g = RANN::Neuron.new("LSTM #{name} Bias G #{i}", 0, :bias).tap do |n|
33
+ @network.add n
34
+ n.value = 1.to_d
35
+ end
36
+ bias_o = RANN::Neuron.new("LSTM #{name} Bias O #{i}", 0, :bias).tap do |n|
37
+ @network.add n
38
+ n.value = 1.to_d
39
+ end
40
+ memory_product = RANN::ProductNeuron.new("LSTM #{name} Mem Product #{i}", 2, :standard, :linear).tap{ |n| @network.add n }
41
+ i_g_product = RANN::ProductNeuron.new("LSTM #{name} Hidden 2/3 Product #{i}", 2, :standard, :linear).tap{ |n| @network.add n }
42
+ memory_standard = RANN::Neuron.new("LSTM #{name} Mem Standard #{i}", 2, :standard, :linear).tap{ |n| @network.add n }
43
+ memory_tanh = RANN::Neuron.new("LSTM #{name} Mem Tanh #{i}", 1, :standard, :tanh).tap{ |n| @network.add n }
44
+ memory_o_product = RANN::ProductNeuron.new("LSTM #{name} Mem/Hidden 4 Product #{i}", 2, :standard, :linear).tap{ |n| @network.add n }
45
+ output = RANN::Neuron.new("LSTM #{name} Output #{i}", 1, :standard, :linear).tap{ |n| @network.add n }
46
+ @outputs << output
47
+ memory_context = RANN::Neuron.new("LSTM #{name} Mem Context #{i}", 1, :context).tap{ |n| @network.add n }
48
+ output_context = RANN::Neuron.new("LSTM #{name} Output Context #{i}", 1, :context).tap{ |n| @network.add n }
49
+
50
+ @network.add RANN::LockedConnection.new input, f, 1
51
+ @network.add RANN::LockedConnection.new input, i, 1
52
+ @network.add RANN::LockedConnection.new input, g, 1
53
+ @network.add RANN::LockedConnection.new input, o, 1
54
+ @network.add RANN::LockedConnection.new f, memory_product, 1
55
+ @network.add RANN::LockedConnection.new i, i_g_product, 1
56
+ @network.add RANN::LockedConnection.new g, i_g_product, 1
57
+ @network.add RANN::LockedConnection.new i_g_product, memory_standard, 1
58
+ @network.add RANN::LockedConnection.new memory_product, memory_standard, 1
59
+ @network.add RANN::LockedConnection.new memory_standard, memory_tanh, 1
60
+ @network.add RANN::LockedConnection.new o, memory_o_product, 1
61
+ @network.add RANN::LockedConnection.new memory_tanh, memory_o_product, 1
62
+ @network.add RANN::LockedConnection.new memory_o_product, output, 1
63
+ @network.add RANN::LockedConnection.new memory_standard, memory_context, 1
64
+ @network.add RANN::Connection.new memory_context, memory_product
65
+ @network.add RANN::Connection.new memory_context, i
66
+ @network.add RANN::LockedConnection.new memory_o_product, output_context, 1
67
+ @network.add RANN::Connection.new output_context, f
68
+ @network.add RANN::Connection.new output_context, i
69
+ @network.add RANN::Connection.new output_context, g
70
+ @network.add RANN::Connection.new output_context, o
71
+ @network.add RANN::Connection.new bias_f, f
72
+ @network.add RANN::Connection.new bias_i, i
73
+ @network.add RANN::Connection.new bias_g, g
74
+ @network.add RANN::Connection.new bias_o, o
75
+ end
76
+ end
77
+
78
+ def add_input neuron
79
+ input = RANN::Neuron.new "LSTM #{name} Input #{neuron.name}", 0, :standard, :linear
80
+ @network.add input
81
+ @inputs << input
82
+ connection = RANN::Connection.new neuron, input
83
+ @network.add connection
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,171 @@
1
+ require "graphviz"
2
+ require "yaml"
3
+
4
+ module RANN
5
+ class Network
6
+ UnconnectedNetworkError = Class.new(StandardError)
7
+
8
+ attr_accessor *%i(
9
+ neurons
10
+ input_neurons
11
+ hidden_neurons
12
+ output_neurons
13
+ connections
14
+ structure
15
+ )
16
+
17
+ def initialize connections = []
18
+ @connections = connections
19
+ @neurons = connections.flat_map(&:neurons).uniq
20
+ @input_neurons = @neurons.select &:input?
21
+ @output_neurons = @neurons.select &:output?
22
+ @hidden_neurons = @neurons - @input_neurons - @output_neurons
23
+ end
24
+
25
+ def impose weights
26
+ connections.each.with_index do |c, i|
27
+ c.weight = weights[i]
28
+ end
29
+ end
30
+
31
+ def params
32
+ connections.map(&:weight)
33
+ end
34
+
35
+ def evaluate input
36
+ input_neurons.each.with_index do |neuron, i|
37
+ neuron.value = input[i]
38
+ end
39
+
40
+ # use some proper graph traversal, rather than this crude blanketing?
41
+ # would probably be easier to detect circular dependency this way too?
42
+ begin
43
+ i = 0
44
+ until output_neurons.all?{ |neuron| neuron.value }
45
+ i += 1
46
+ connections.each do |connection|
47
+ next if !connection.enabled?
48
+
49
+ connection.process
50
+ end
51
+ raise UnconnectedNetworkError if i > 5_000
52
+ end
53
+ rescue UnconnectedNetworkError
54
+ visualise
55
+ raise
56
+ end
57
+
58
+ outputs
59
+ end
60
+
61
+ def visualise
62
+ # Create a new graph
63
+ g = GraphViz.new(:G, type: :digraph)
64
+
65
+ # Create nodes
66
+
67
+ missing_nodes = connections.each.with_object([]) do |c, o|
68
+ o << c.output_neuron unless neurons.include? c.output_neuron
69
+ o << c.input_neuron unless neurons.include? c.input_neuron
70
+ end
71
+
72
+ graph_nodes = neurons.each.with_object({}) do |n, h|
73
+ h[n] = g.add_nodes("#{n.name}: #{n.value&.to_f&.round(5)}")
74
+ end
75
+
76
+ # Create edges between the nodes
77
+ connections.each do |c|
78
+ g.add_edges(
79
+ graph_nodes[c.input_neuron],
80
+ graph_nodes[c.output_neuron],
81
+ color: c.processed? ? "#ff0000" : "#000000",
82
+ label: c.weight.to_f.round(5)
83
+ )
84
+ end
85
+
86
+ # Generate output image
87
+ g.output png: "nnet.png"
88
+ `open nnet.png`
89
+ end
90
+
91
+ def dump_weights
92
+ File.write "nn_weights_dump_#{DateTime.now.strftime('%Y-%m-%d-%H-%M-%S')}.yml", params.to_yaml
93
+ end
94
+
95
+ def outputs
96
+ output_neurons.map &:value
97
+ end
98
+
99
+ def state
100
+ neurons.each.with_object({}){ |n, s| s[n.id] = n.value }
101
+ end
102
+
103
+ def connections_to neuron
104
+ @connections_to = {} unless defined? @connections_to
105
+
106
+ @connections_to[neuron] ||= connections.select{ |con| con.output_neuron == neuron }
107
+ end
108
+
109
+ def connections_from neuron
110
+ @connections_from = {} unless defined? @connections_from
111
+
112
+ @connections_from[neuron] ||= connections.select{ |con| con.input_neuron == neuron }
113
+ end
114
+
115
+ def add *features
116
+ features.each do |feature|
117
+ case feature
118
+ when Neuron
119
+ case feature.type
120
+ when :input
121
+ @input_neurons << feature
122
+ when :output
123
+ @output_neurons << feature
124
+ else
125
+ @hidden_neurons << feature
126
+ end
127
+
128
+ @neurons << feature
129
+ when Connection
130
+ @connections << feature
131
+ when Network
132
+ add *feature.neurons
133
+ add *feature.connections
134
+ end
135
+ end
136
+ end
137
+
138
+ def remove *features
139
+ features.each do |feature|
140
+ case feature
141
+ when Neuron
142
+ case feature.type
143
+ when :input
144
+ raise "trying to remove an input neuron ..."
145
+ when :output
146
+ raise "trying to remove an output neuron ..."
147
+ else
148
+ @hidden_neurons.delete feature
149
+ end
150
+
151
+ @neurons.delete feature
152
+ when Connection
153
+ @connections.delete feature
154
+ end
155
+ end
156
+ end
157
+
158
+ def reset!
159
+ state.tap do
160
+ neurons.each{ |neuron| neuron.reset! }
161
+ connections.each{ |connection| connection.reset! }
162
+ end
163
+ end
164
+
165
+ def recalculate_neuron_connection_counts!
166
+ neurons.each do |neuron|
167
+ neuron.connection_count = connections.count{ |c| c.output_neuron == neuron }
168
+ end
169
+ end
170
+ end
171
+ end
@@ -0,0 +1,83 @@
1
+ require "securerandom"
2
+ require "bigdecimal"
3
+ require "bigdecimal/util"
4
+
5
+ module RANN
6
+ class Neuron
7
+ ACTIVATION_FUNCTIONS = {
8
+ sig: ->(v){ 1.to_d.div(1 + (Math::E ** -v), 10) },
9
+ tanh: ->(v){ Math.tanh(v).to_d(10) },
10
+ relu: ->(v){ [0.to_d, v].max },
11
+ linear: ->(v){ v },
12
+ step: ->(v){ v > 0.5 ? 1.to_d : 0.to_d },
13
+ }
14
+
15
+ attr_accessor *%i(
16
+ activation_function
17
+ value
18
+ incoming
19
+ connection_count
20
+ type
21
+ name
22
+ id
23
+ )
24
+
25
+ def initialize name, connection_count, type = :standard, af = nil
26
+ @id = SecureRandom.hex
27
+ @connection_count = connection_count
28
+ @type = type
29
+ @incoming = []
30
+ @activation_function = af || initial_activation_function
31
+ @name = name
32
+
33
+ set_default_value!
34
+ end
35
+
36
+ def push_value! value
37
+ incoming << value
38
+ set_value! if incoming.size == connection_count
39
+ end
40
+
41
+ def set_value!
42
+ intermediate = incoming.reduce :+
43
+ self.value = ACTIVATION_FUNCTIONS[activation_function].call intermediate
44
+ end
45
+
46
+ def reset!
47
+ set_default_value!
48
+ @incoming.clear
49
+ end
50
+
51
+ def increment_connection_count!
52
+ @connection_count += 1
53
+ end
54
+
55
+ def decrement_connection_count!
56
+ @connection_count -= 1
57
+ end
58
+
59
+ %i(input output context bias standard).each do |t|
60
+ define_method "#{t}?" do
61
+ type == t
62
+ end
63
+ end
64
+
65
+ private
66
+ def set_default_value!
67
+ self.value =
68
+ if context?
69
+ value || 0.to_d
70
+ elsif bias?
71
+ 1.to_d
72
+ end
73
+ end
74
+
75
+ def initial_activation_function
76
+ if standard? || context?
77
+ :relu
78
+ else
79
+ :linear
80
+ end
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,10 @@
1
+ require "rann/neuron"
2
+
3
+ module RANN
4
+ class ProductNeuron < Neuron
5
+ def set_value!
6
+ intermediate = incoming.reduce{ |i, m| m.mult i, 10 }
7
+ self.value = ACTIVATION_FUNCTIONS[activation_function].call intermediate
8
+ end
9
+ end
10
+ end
@@ -0,0 +1,56 @@
1
+ module RANN
2
+ module Util
3
+ module ArrayExt
4
+ # Method `in_groups` from
5
+ # activesupport/lib/active_support/core_ext/array/grouping.rb under MIT
6
+ # licence. Original licence printed below.
7
+ #
8
+ # Copyright (c) 2005-2017 David Heinemeier Hansson
9
+
10
+ # Permission is hereby granted, free of charge, to any person obtaining
11
+ # a copy of this software and associated documentation files (the
12
+ # "Software"), to deal in the Software without restriction, including
13
+ # without limitation the rights to use, copy, modify, merge, publish,
14
+ # distribute, sublicense, and/or sell copies of the Software, and to
15
+ # permit persons to whom the Software is furnished to do so, subject to
16
+ # the following conditions:
17
+
18
+ # The above copyright notice and this permission notice shall be
19
+ # included in all copies or substantial portions of the Software.
20
+
21
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
22
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
23
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
24
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
25
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
26
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
27
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28
+
29
+ def in_groups array, number, fill_with = nil
30
+ # size.div number gives minor group size;
31
+ # size % number gives how many objects need extra accommodation;
32
+ # each group hold either division or division + 1 items.
33
+ division = array.size.div number
34
+ modulo = array.size % number
35
+
36
+ # create a new array avoiding dup
37
+ groups = []
38
+ start = 0
39
+
40
+ number.times do |index|
41
+ length = division + (modulo > 0 && modulo > index ? 1 : 0)
42
+ groups << last_group = array.slice(start, length)
43
+ last_group << fill_with if fill_with != false &&
44
+ modulo > 0 && length == division
45
+ start += length
46
+ end
47
+
48
+ if block_given?
49
+ groups.each{ |g| yield(g) }
50
+ else
51
+ groups
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,3 @@
1
+ module RANN
2
+ VERSION = "0.1.0"
3
+ end
data/lib/rann.rb ADDED
@@ -0,0 +1,9 @@
1
+ require "bigdecimal"
2
+ require "bigdecimal/util"
3
+ require "rann/version"
4
+ require "rann/network"
5
+ require "rann/neuron"
6
+ require "rann/product_neuron"
7
+ require "rann/connection"
8
+ require "rann/locked_connection"
9
+ require "rann/backprop"
data/rann.gemspec ADDED
@@ -0,0 +1,29 @@
1
+
2
+ lib = File.expand_path("../lib", __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require "rann/version"
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "rann"
8
+ spec.version = RANN::VERSION
9
+ spec.authors = ["Michael Campbell"]
10
+ spec.email = ["mike@ydd.io"]
11
+
12
+ spec.summary = %q{Ruby Artificial Neural Networks}
13
+ spec.description = %q{Libary for working with neural networks in Ruby.}
14
+ spec.homepage = "https://github.com/mikecmpbll/rann"
15
+
16
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
17
+ f.match(%r{^(test|spec|features)/})
18
+ end
19
+ spec.bindir = "exe"
20
+ spec.executables = spec.files.grep(%r{^exe/}){ |f| File.basename(f) }
21
+ spec.require_paths = ["lib"]
22
+
23
+ spec.add_runtime_dependency "parallel", "~> 1.12.0"
24
+ spec.add_runtime_dependency "ruby-graphviz", "~> 1.2.3"
25
+
26
+ spec.add_development_dependency "bundler", "~> 1.16"
27
+ spec.add_development_dependency "rake", "~> 10.0"
28
+ spec.add_development_dependency "minitest", "~> 5.0"
29
+ end
metadata ADDED
@@ -0,0 +1,136 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rann
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Michael Campbell
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2017-11-08 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: parallel
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: 1.12.0
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: 1.12.0
27
+ - !ruby/object:Gem::Dependency
28
+ name: ruby-graphviz
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 1.2.3
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 1.2.3
41
+ - !ruby/object:Gem::Dependency
42
+ name: bundler
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '1.16'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '1.16'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rake
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '10.0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '10.0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: minitest
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '5.0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '5.0'
83
+ description: Libary for working with neural networks in Ruby.
84
+ email:
85
+ - mike@ydd.io
86
+ executables: []
87
+ extensions: []
88
+ extra_rdoc_files: []
89
+ files:
90
+ - ".gitignore"
91
+ - ".travis.yml"
92
+ - CHANGES.md
93
+ - Gemfile
94
+ - Gemfile.lock
95
+ - LICENCE
96
+ - README.md
97
+ - Rakefile
98
+ - bin/console
99
+ - bin/setup
100
+ - examples/xor.rb
101
+ - lib/rann.rb
102
+ - lib/rann/backprop.rb
103
+ - lib/rann/connection.rb
104
+ - lib/rann/gradient_checker.rb
105
+ - lib/rann/locked_connection.rb
106
+ - lib/rann/lstm.rb
107
+ - lib/rann/network.rb
108
+ - lib/rann/neuron.rb
109
+ - lib/rann/product_neuron.rb
110
+ - lib/rann/util/array_ext.rb
111
+ - lib/rann/version.rb
112
+ - rann.gemspec
113
+ homepage: https://github.com/mikecmpbll/rann
114
+ licenses: []
115
+ metadata: {}
116
+ post_install_message:
117
+ rdoc_options: []
118
+ require_paths:
119
+ - lib
120
+ required_ruby_version: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ required_rubygems_version: !ruby/object:Gem::Requirement
126
+ requirements:
127
+ - - ">="
128
+ - !ruby/object:Gem::Version
129
+ version: '0'
130
+ requirements: []
131
+ rubyforge_project:
132
+ rubygems_version: 2.5.1
133
+ signing_key:
134
+ specification_version: 4
135
+ summary: Ruby Artificial Neural Networks
136
+ test_files: []