brainy 2.0.1-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 8bd9b3d723d372a31e94f3ae82400de2b04467fb
4
+ data.tar.gz: 2e20aff692a4e0566b58c0fcbc8b533f90b27a4c
5
+ SHA512:
6
+ metadata.gz: 4c7962fbcedeadd153df7b02627dca1ca86759d3b13217ca0f6eb2507f0704e425e47d3978067ef882552bfecb414f557d9a6a73c33d0e87074a846e9d712e1a
7
+ data.tar.gz: f2574f3a69dc530d33f1b827b261f7626651a7f7dd97d91ade352ec49ac68b9ab2c7e6df0cfbf25bdd682c67fa3054f96d1ca32e36d8f0d65fedd482380771c2
data/LICENSE ADDED
@@ -0,0 +1,8 @@
1
+ The MIT License (MIT)
2
+ Copyright (c) 2016 Robert Scott Reis
3
+
4
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
5
+
6
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
7
+
8
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,32 @@
1
+ # Brainy - An Artificial Neural Network [![Build Status](https://travis-ci.org/EvilScott/brainy.svg?branch=master)](https://travis-ci.org/EvilScott/brainy)
2
+
3
+ Brainy is an [Artificial Neural Network (ANN)](https://en.wikipedia.org/wiki/Artificial_neural_network) using the
4
+ [Backpropagation](https://en.wikipedia.org/wiki/Backpropagation) algorithm. It was originally created as part of
5
+ the Neural NFL project [here](https://github.com/EvilScott/neuralnfl), but was broken out into a gem to be more reusable.
6
+
7
+ _NOTE_: As of v2.0 Brainy is _JRuby only_. This decision was made for performance reasons, as it can now leverage the fast linear algebra functionality
8
+ found in the [JBLAS](http://jblas.org/) Java library.
9
+
10
+ ### Usage
11
+ From [examples/sin.rb](https://github.com/EvilScott/brainy/blob/master/examples/sin.rb):
12
+ ```ruby
13
+ # Example using sin wave function
14
+ require_relative '../lib/brainy'
15
+ net = Brainy::Network.new(1, 3, 1, learning_rate: 1.0)
16
+
17
+ # training
18
+ 4000.times do
19
+ i = rand(0..(Math::PI/2))
20
+ o = Math.sin(i)
21
+ net.train!([i], [o])
22
+ end
23
+
24
+ # testing
25
+ mse = 1000.times.map do
26
+ i = rand(0..(Math::PI/2))
27
+ o = Math.sin(i)
28
+ (o - net.evaluate([i]).first) ** 2
29
+ end.reduce(:+) / 1000
30
+
31
+ puts "your MSE: #{ mse.round(6) }" # smaller is better
32
+ ```
@@ -0,0 +1,11 @@
1
+ module Brainy
2
+ java_import java.util.Random
3
+
4
+ class Gaussian
5
+ @rand = Random.new
6
+
7
+ def self.next
8
+ @rand.nextGaussian
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,66 @@
1
+ module Brainy
2
+ java_import org.jblas.DoubleMatrix
3
+
4
+ class JMatrix
5
+ attr_accessor :java_matrix
6
+
7
+ def initialize(data)
8
+ if data.is_a?(DoubleMatrix)
9
+ @java_matrix = data
10
+ elsif data.first.is_a?(Array)
11
+ @java_matrix = DoubleMatrix.new(data.to_java(Java::double[]))
12
+ else
13
+ @java_matrix = DoubleMatrix.new(data.to_java(Java::double))
14
+ end
15
+ end
16
+
17
+ def self.build(row_count, column_count, &block) #TODO refactor for performance (!!!)
18
+ JMatrix.new(row_count.times.map do |row|
19
+ column_count.times.map { |col| block.yield(row, col) }
20
+ end)
21
+ end
22
+
23
+ def *(x)
24
+ if x.is_a? JMatrix
25
+ JMatrix.new(@java_matrix.mmul(x.java_matrix))
26
+ else
27
+ JMatrix.new(@java_matrix.mul(x))
28
+ end
29
+ end
30
+
31
+ def -(mat)
32
+ JMatrix.new(@java_matrix.sub(mat.java_matrix))
33
+ end
34
+
35
+ def to_a
36
+ return @java_matrix.getRow(0).toArray.to_a if rows == 1
37
+ return @java_matrix.getColumn(0).toArray.to_a if columns == 1
38
+ @java_matrix.rowsAsList.toArray.map { |row| row.toArray.to_a }
39
+ end
40
+
41
+ def row_vectors
42
+ @java_matrix.rowsAsList.toArray.map { |row| JMatrix.new(row) }
43
+ end
44
+
45
+ def rows
46
+ @java_matrix.rows
47
+ end
48
+
49
+ def columns
50
+ @java_matrix.columns
51
+ end
52
+
53
+ def map(&block)
54
+ return to_a.map { |a| a.to_a.map(&block) } if to_a.first.is_a?(Array)
55
+ to_a.map(&block)
56
+ end
57
+
58
+ def each_with_index(&block)
59
+ to_a.each_with_index(&block)
60
+ end
61
+
62
+ def [](idx)
63
+ @java_matrix.get(idx)
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,80 @@
1
+ module Brainy
2
+ class Network
3
+ attr_accessor :layers
4
+
5
+ def initialize(input_count, hidden_count, output_count, options = {})
6
+ options = default_options.update(options)
7
+ @learning_rate = options[:learning_rate]
8
+ @momentum = options[:momentum]
9
+ @activate = options[:activate]
10
+ @activate_prime = options[:activate_prime]
11
+ @weight_init = options[:weight_init]
12
+ @layers = [
13
+ JMatrix.build(hidden_count, input_count + 1) { @weight_init.call },
14
+ JMatrix.build(output_count, hidden_count + 1) { @weight_init.call }
15
+ ]
16
+ @last_changes = []
17
+ end
18
+
19
+ def default_options
20
+ {
21
+ learning_rate: 0.25,
22
+ momentum: 0.9,
23
+ activate: lambda { |x| 1 / (1 + Math.exp(-1 * x)) },
24
+ activate_prime: lambda { |x| x * (1 - x) },
25
+ weight_init: lambda { Gaussian.next * 0.1 }
26
+ }
27
+ end
28
+
29
+ def evaluate(inputs)
30
+ @layers.reduce(inputs) do |input, layer|
31
+ (layer * JMatrix.new(input.to_a + [1.0])).map(&@activate)
32
+ end
33
+ end
34
+
35
+ def train!(inputs, expected)
36
+ inputs = JMatrix.new(inputs + [1.0])
37
+ hidden_outs = JMatrix.new((@layers.first * inputs).map(&@activate).to_a + [1.0])
38
+ output_outs = (@layers.last * hidden_outs).map(&@activate)
39
+ output_deltas = get_output_deltas(expected, output_outs)
40
+ hidden_deltas = get_hidden_deltas(hidden_outs, @layers.last, output_deltas)
41
+ changes = [get_weight_change(inputs, hidden_deltas), get_weight_change(hidden_outs, output_deltas)]
42
+ @layers.length.times do |idx|
43
+ @layers[idx] -= changes[idx]
44
+ @layers[idx] -= (@last_changes[idx] * @momentum) unless @last_changes[idx].nil?
45
+ end
46
+ @last_changes = changes
47
+ end
48
+
49
+ def get_output_deltas(expected, output)
50
+ expected.zip(output.to_a).map do |expect, out|
51
+ (out - expect) * @activate_prime.call(out)
52
+ end
53
+ end
54
+
55
+ def get_hidden_deltas(hidden_outs, output_nodes, output_deltas)
56
+ hidden_outs.to_a.slice(0...-1).each_with_index.map do |out, index|
57
+ output_nodes.row_vectors.zip(output_deltas)
58
+ .map { |weights, delta| weights[index] * delta }
59
+ .reduce(:+) * @activate_prime.call(out)
60
+ end
61
+ end
62
+
63
+ def get_weight_change(inputs, deltas)
64
+ JMatrix.build(deltas.count, inputs.to_a.count) do |row, col|
65
+ @learning_rate * deltas[row] * inputs[col]
66
+ end
67
+ end
68
+
69
+ def serialize
70
+ YAML.dump({ layers: layers.map(&:to_a) })
71
+ end
72
+
73
+ def self.from_serialized(dump, options = {})
74
+ layer_values = YAML.load(dump.class == File ? dump : File.open(dump))[:layers]
75
+ net = Network.new(1, 1, 1, options)
76
+ net.layers = layer_values.map { |vals| JMatrix.new(vals) }
77
+ net
78
+ end
79
+ end
80
+ end
@@ -0,0 +1,3 @@
1
+ module Brainy
2
+ VERSION = '2.0.1'
3
+ end
data/lib/brainy.rb ADDED
@@ -0,0 +1,9 @@
1
+ require 'yaml'
2
+ require 'java'
3
+
4
+ require_relative 'jar/jblas-1.2.4.jar'
5
+ require_relative 'brainy/gaussian'
6
+ require_relative 'brainy/jblas'
7
+ require_relative 'brainy/network'
8
+
9
+ module Brainy; end
Binary file
@@ -0,0 +1,12 @@
1
+ require_relative '../../spec_helper'
2
+
3
+ module Brainy
4
+ describe Gaussian do
5
+ describe '.next' do
6
+ it 'returns a random number in the normal distribution' do
7
+ expect(Gaussian.next).to be_a Float
8
+ end
9
+ end
10
+
11
+ end
12
+ end
@@ -0,0 +1,62 @@
1
+ require_relative '../../spec_helper'
2
+
3
+ module Brainy
4
+ describe JMatrix do
5
+ let (:mat_data) { [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] }
6
+ let (:vec_data) { [1.0, 2.0] }
7
+ let (:mat) { JMatrix.new(mat_data) }
8
+ let (:vec) { JMatrix.new(vec_data) }
9
+
10
+ describe '.build' do
11
+ it 'builds a new JMatrix' do
12
+ built_mat = JMatrix.build(3, 2) { 0 }
13
+ expect(built_mat).to be_a JMatrix
14
+ expect(built_mat.rows).to eq 3
15
+ expect(built_mat.columns).to eq 2
16
+ end
17
+ end
18
+
19
+ describe '#*' do
20
+ context 'when the second param is a matrix' do
21
+ it 'returns a new matrix from matrix multiplication' do
22
+ result = mat * vec
23
+ expect(result.to_a).to eq [5.0, 11.0, 17.0]
24
+ end
25
+ end
26
+ context 'when the second param is a scalar' do
27
+ it 'returns a new matrix from scalar multiplication' do
28
+ result = mat * 2.0
29
+ expect(result.to_a).to eq mat_data.map { |x| x.map { |y| y * 2.0 }}
30
+ end
31
+ end
32
+ end
33
+
34
+ describe '#-' do
35
+ it 'returns the difference between the two matrices' do
36
+ result = JMatrix.new([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) - JMatrix.new([[0.0, 1.0, 2.0], [3.0, 4.0, 5.0]])
37
+ expect(result.to_a).to eq [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]
38
+ end
39
+ end
40
+
41
+ describe '#to_a' do
42
+ it 'returns the matrix data as an array' do
43
+ expect(mat.to_a).to eq mat_data
44
+ expect(vec.to_a).to eq vec_data
45
+ expect(JMatrix.new([[1.0], [2.0]]).to_a).to eq [1.0, 2.0]
46
+ end
47
+ end
48
+
49
+ describe '#row_vectors' do
50
+ it 'returns an array of row vectors' do
51
+ expect(mat.row_vectors.map(&:to_a)).to eq mat_data
52
+ end
53
+ end
54
+
55
+ describe '#[]' do
56
+ it 'returns the specified element of the matrix' do
57
+ expect(vec[0]).to eq 1.0
58
+ expect(JMatrix.new([[1.0], [2.0]])[0]).to eq 1.0
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,62 @@
1
+ require_relative '../../spec_helper'
2
+
3
+ module Brainy
4
+ describe Network do
5
+ let (:net) { Network.new(4, 3, 2) }
6
+
7
+ describe '#initialize' do
8
+ it 'creates a network with random weights' do
9
+ weights = net.layers.flatten.map(&:to_a).flatten
10
+ expect(weights.count).to eq weights.uniq.count
11
+ end
12
+ end
13
+
14
+ describe '#evaluate' do
15
+ it 'evaluates the network for a set of inputs' do
16
+ net.instance_variable_set(:@layers, [
17
+ JMatrix.new([
18
+ [0.1, 0.2, 0.3, 0.4, 0.5],
19
+ [0.5, 0.6, 0.7, 0.8, 0.9],
20
+ [0.9, 0.1, 0.2, 0.3, 0.4]
21
+ ]),
22
+ JMatrix.new([
23
+ [0.1, 0.2, 0.3, 0.4],
24
+ [0.4, 0.5, 0.6, 0.5]
25
+ ])
26
+ ])
27
+ out = net.evaluate([0.1, 0.3, 0.5, 0.7]).map { |x| x.round(6) }
28
+ expect(out).to eq [0.702451, 0.839256]
29
+ end
30
+ end
31
+
32
+ describe '#get_output_deltas' do
33
+ it 'provides deltas for the output layer' do
34
+ expected, output = [0.4, 0.6], [0.3, 0.8]
35
+ deltas = net.get_output_deltas(expected, output)
36
+ expect(deltas.map { |x| x.round(6) }).to eq [-0.021, 0.032]
37
+ end
38
+ end
39
+
40
+ describe '#get_hidden_deltas' do
41
+ it 'provides deltas for the hidden layer' do
42
+ hidden_outs, output_deltas = [0.9, 0.8, 0.7, 1.0], [0.6, 0.4]
43
+ output_nodes = JMatrix.new([[0.2, 0.3, 0.4], [0.4, 0.3, 0.2]])
44
+ deltas = net.get_hidden_deltas(hidden_outs, output_nodes, output_deltas)
45
+ expect(deltas.map { |x| x.round(6) }).to eq [0.0252, 0.048, 0.0672]
46
+ end
47
+ end
48
+
49
+ describe '#get_weight_change' do
50
+ it 'updates the hidden weights' do
51
+ inputs, deltas = [0.2, 0.3, 0.4, 0.5], [0.7, 0.6, 0.5]
52
+ change = net.get_weight_change(inputs, deltas)
53
+ expected = [
54
+ [0.035, 0.0525, 0.07, 0.0875],
55
+ [0.03, 0.045, 0.06, 0.075],
56
+ [0.025, 0.0375, 0.05, 0.0625],
57
+ ]
58
+ expect(change.map { |x| x.round(6) }).to eq expected
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,3 @@
1
+ require 'bundler/setup'
2
+ Bundler.setup(:test)
3
+ require_relative '../lib/brainy'
metadata ADDED
@@ -0,0 +1,83 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: brainy
3
+ version: !ruby/object:Gem::Version
4
+ version: 2.0.1
5
+ platform: java
6
+ authors:
7
+ - Robert Scott Reis
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2016-04-15 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - "~>"
17
+ - !ruby/object:Gem::Version
18
+ version: '3.2'
19
+ name: rspec
20
+ prerelease: false
21
+ type: :development
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '3.2'
27
+ - !ruby/object:Gem::Dependency
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - "~>"
31
+ - !ruby/object:Gem::Version
32
+ version: '10.4'
33
+ name: rake
34
+ prerelease: false
35
+ type: :development
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '10.4'
41
+ description: Brainy is an Artificial Neural Network (ANN) using the Backpropagation algorithm.
42
+ email: reis.robert.s@gmail.com
43
+ executables: []
44
+ extensions: []
45
+ extra_rdoc_files: []
46
+ files:
47
+ - LICENSE
48
+ - README.md
49
+ - lib/brainy.rb
50
+ - lib/brainy/gaussian.rb
51
+ - lib/brainy/jblas.rb
52
+ - lib/brainy/network.rb
53
+ - lib/brainy/version.rb
54
+ - lib/jar/jblas-1.2.4.jar
55
+ - spec/lib/brainy/gaussian_spec.rb
56
+ - spec/lib/brainy/jblas_spec.rb
57
+ - spec/lib/brainy/network_spec.rb
58
+ - spec/spec_helper.rb
59
+ homepage: https://github.com/EvilScott/brainy
60
+ licenses:
61
+ - MIT
62
+ metadata: {}
63
+ post_install_message:
64
+ rdoc_options: []
65
+ require_paths:
66
+ - lib
67
+ required_ruby_version: !ruby/object:Gem::Requirement
68
+ requirements:
69
+ - - ">="
70
+ - !ruby/object:Gem::Version
71
+ version: '0'
72
+ required_rubygems_version: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - ">="
75
+ - !ruby/object:Gem::Version
76
+ version: '0'
77
+ requirements: []
78
+ rubyforge_project:
79
+ rubygems_version: 2.4.8
80
+ signing_key:
81
+ specification_version: 4
82
+ summary: An Artificial Neural Network
83
+ test_files: []