brains 0.1.0-java → 0.1.1-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 0d640546a10c8984b876f7f95feeab5cddc10f39
4
- data.tar.gz: ed2c32d65a0cd4cda1ca00667f821b95e5cc70c7
3
+ metadata.gz: 555149c0570a03a1ef8af988f8114013457cc2ca
4
+ data.tar.gz: 01fb0792ebd5b148ea5ab0da4f43cf5da4d482a4
5
5
  SHA512:
6
- metadata.gz: 95bb48840ee28b6d10f527b0b2efc876db9da382669e5e66027305bd92651846ec9343a217dba1fef4a9d95366cb240d1aeb08a8c73aa98062e58785b81291ec
7
- data.tar.gz: 77f3df827145cd2314238a5eb0c09f9f52bbf016dd3a6cc9786c472fe5298f7d2e014edd3642bdc1155f15c0bf630176b6317ac45779d16e5cbf52a07c03b222
6
+ metadata.gz: 96f733e7575451619ae2c354921059cb4c2b06524d69d14433b0aeb3b202054caf79cf6ea42153e69e14d16a8c3e551de004629b4189d24e648e39a92c49807f
7
+ data.tar.gz: ee83b4c978701cfd4095cd0d8de754e8e371fe0e543648d8f1cb4c625c265cd52d9ab3de4b3a14212f5b3f27ea0bfc6d15b530ce10a55d0b1651c37f43a4de39
data/README.md CHANGED
@@ -2,7 +2,9 @@
2
2
 
3
3
  A Feedforward neural network toolkit for JRuby. Easily add machine learning features
4
4
  to your ruby application using this Gem. Though there are faster native C implementations
5
- this java backend provides a balance of performance and ease of use.
5
+ available (e.g. FANN) we need something that is simple, beginner friendly and just works.
6
+
7
+ This java based implementation provides a balance of performance and ease of use.
6
8
 
7
9
  ## Installation
8
10
 
@@ -23,6 +25,12 @@ Or install it yourself as:
23
25
 
24
26
  $ gem install brains
25
27
 
28
+ ## Features
29
+
30
+ * Customizable network parameters depending on requirements
31
+ * Fast (A bit slower than FANN but significantly faster than a pure ruby implementation)
32
+ * NN backend implementation in Java which allows for platform agnostic implementation
33
+
26
34
  ## Usage
27
35
 
28
36
  The brains gem contains facilities for training and using the feedforward neural network
@@ -38,9 +46,8 @@ require 'brains'
38
46
 
39
47
  # Build a 3 layer network: 4 input neurons, 4 hidden neurons, 3 output neurons
40
48
  # Bias neurons are automatically added to input + hidden layers; no need to specify these
41
- # 5 = 4 in one hidden layer + 1 output neuron (input neurons not counted)
42
49
 
43
- nn = Brains::Net.create(2, 1, 5, { neurons_per_layer: 4 })
50
+ nn = Brains::Net.create(2 /* no. of inputs */, 1 /*no. of outputs */, 1 /*hidden layer*/, { neurons_per_layer: 4 })
44
51
  nn.randomize_weights
45
52
  ```
46
53
 
@@ -133,6 +140,10 @@ You can then save it to a file. You can then load it back using load()
133
140
 
134
141
  ```ruby
135
142
  nn = Brains::Net.load(saved_state)
143
+
144
+
145
+ # use
146
+ nn.feed([0.9, 0.9])
136
147
  ```
137
148
 
138
149
  For other samples please take a look at the example folder.
data/example/colors.rb ADDED
@@ -0,0 +1,45 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'brains'
4
+
5
+ #This neural network will identify the main color name based on rgb values
6
+
7
+ label_encodings = {
8
+ "Red" => [1, 0, 0],
9
+ "Green" => [0, 1, 0],
10
+ "Blue" => [0, 0 ,1]
11
+ }
12
+ #0000ff
13
+ training_data = [
14
+ [[0xf0/0xff.to_f, 0xf8.to_f/0xff.to_f, 1],[0,0,1]],
15
+ [[0x00/0xff.to_f, 0x00.to_f/0xff.to_f, 1],[0,0,1]],
16
+ [[0x00/0xff.to_f, 0x00.to_f/0xff.to_f, 0x8b.to_f/0xff.to_f],[0,0,1]]
17
+ ]
18
+
19
+ nn = Brains::Net.create(2, 1, 9, { neurons_per_layer: 4 })
20
+ nn.randomize_weights
21
+
22
+
23
+ # test on untrained data
24
+ #0000ee
25
+ test_data = [
26
+ [0x00/0xff.to_f, 0x00.to_f/0xff.to_f, 0xee.to_f/0xff.to_f],
27
+ ]
28
+
29
+ results = test_data.collect { |item|
30
+ nn.feed(item)
31
+ }
32
+
33
+ p results
34
+
35
+ result = nn.optimize(training_data, 0.01, 1_000 ) { |i, error|
36
+ puts "#{i} #{error}"
37
+ }
38
+
39
+ puts "after training"
40
+
41
+ results = test_data.collect { |item|
42
+ nn.feed(item)
43
+ }
44
+
45
+ p results
data/example/iris.rb CHANGED
@@ -50,7 +50,7 @@ end
50
50
 
51
51
  # Build a 3 layer network: 4 input neurons, 4 hidden neurons, 3 output neurons
52
52
  # Bias neurons are automatically added to input + hidden layers; no need to specify these
53
- nn = Brains::Net.create(4, 3, 7, { neurons_per_layer: 4 })
53
+ nn = Brains::Net.create(4, 3, 1, { neurons_per_layer: 4 })
54
54
  nn.randomize_weights
55
55
 
56
56
  prediction_success = -> (actual, ideal) {
data/example/xor.rb CHANGED
@@ -7,7 +7,7 @@ require 'brains'
7
7
  # Bias neurons are automatically added to input + hidden layers; no need to specify these
8
8
  # 5 = 4 in one hidden layer + 1 output neuron (input neurons not counted)
9
9
 
10
- nn = Brains::Net.create(2, 1, 5, { neurons_per_layer: 4 })
10
+ nn = Brains::Net.create(2, 1, 1, { neurons_per_layer: 4 })
11
11
  nn.randomize_weights
12
12
 
13
13
  # A B A XOR B
@@ -37,7 +37,7 @@ results = test_data.collect { |item|
37
37
 
38
38
  p results
39
39
 
40
- result = nn.optimize(training_data, 0.01, 1_000 ) { |i, error|
40
+ result = nn.optimize(training_data, 0.01, 1_000_000 ) { |i, error|
41
41
  puts "#{i} #{error}"
42
42
  }
43
43
 
Binary file
data/lib/brains/net.rb CHANGED
@@ -4,13 +4,16 @@ module Brains
4
4
  class Net
5
5
  attr_accessor :nn, :config
6
6
 
7
- def self.create(input, output, total, opts = {})
8
- config = com.dayosoft.nn.NeuralNet::Config.new(input, output, total)
7
+ def self.create(input, output, total_hidden_layers = 1, opts = {})
8
+ neurons_per_layer = opts[:neurons_per_layer] || 5
9
+
10
+ config = com.dayosoft.nn.NeuralNet::Config.new(input, output, total_hidden_layers * neurons_per_layer + output)
9
11
  config.bias = opts[:bias] || 1.0
10
12
  config.outputBias = opts[:output_bias] || 1.0
11
13
  config.learningRate = opts[:learning_rate] || 0.1
12
- config.neuronsPerLayer = opts[:neurons_per_layer] || 5
14
+ config.neuronsPerLayer = neurons_per_layer
13
15
  config.momentumFactor = opts[:momentum_factor] || 0.5
16
+ config.backPropagationAlgorithm = opt_t_back_alg(opts[:train_method] || :standard)
14
17
  config.activationFunctionType = opt_to_func(opts[:activation_function] || :htan)
15
18
  config.outputActivationFunctionType = opt_to_func(opts[:activation_function] || :sigmoid)
16
19
  config.errorFormula = opt_to_error_func(opts[:activation_function] || :mean_squared)
@@ -38,7 +41,7 @@ module Brains
38
41
  @nn.dumpWeights.to_a.map(&:to_a)
39
42
  end
40
43
 
41
- def optimize(test_cases, target_error = 0.01, max_epoch = 1_000_000_000, is_batch = false, &callback)
44
+ def optimize(test_cases, target_error = 0.01, max_epoch = 1_000_000_000, callback_interval = 1000, &callback)
42
45
  inputs = []
43
46
  outputs = []
44
47
 
@@ -47,7 +50,8 @@ module Brains
47
50
  outputs << item[1].to_java(Java::double)
48
51
  end
49
52
 
50
- result = @nn.optimize(java.util.ArrayList.new(inputs), java.util.ArrayList.new(outputs), target_error, max_epoch, is_batch, callback)
53
+ result = @nn.optimize(java.util.ArrayList.new(inputs), java.util.ArrayList.new(outputs), target_error, max_epoch,
54
+ callback_interval, callback)
51
55
  { iterations: result.first, error: result.second }
52
56
  end
53
57
 
@@ -91,6 +95,15 @@ module Brains
91
95
  end
92
96
  end
93
97
 
98
+ def self.opt_t_back_alg(func)
99
+ case func
100
+ when :standard
101
+ com.dayosoft.nn.NeuralNet::Config::STANDARD_BACKPROPAGATION
102
+ when :rprop
103
+ com.dayosoft.nn.NeuralNet::Config::RPROP_BACKPROPAGATION
104
+ end
105
+ end
106
+
94
107
  def self.opt_to_error_func(func)
95
108
  case func
96
109
  when :mean_squared
@@ -1,3 +1,3 @@
1
1
  module Brains
2
- VERSION = "0.1.0"
2
+ VERSION = "0.1.1"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: brains
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Emmanuel Dayo
@@ -11,47 +11,47 @@ cert_chain: []
11
11
  date: 2017-02-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ version_requirements: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.12'
14
20
  requirement: !ruby/object:Gem::Requirement
15
21
  requirements:
16
22
  - - "~>"
17
23
  - !ruby/object:Gem::Version
18
24
  version: '1.12'
19
- name: bundler
20
25
  prerelease: false
21
26
  type: :development
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
22
29
  version_requirements: !ruby/object:Gem::Requirement
23
30
  requirements:
24
31
  - - "~>"
25
32
  - !ruby/object:Gem::Version
26
- version: '1.12'
27
- - !ruby/object:Gem::Dependency
33
+ version: '10.0'
28
34
  requirement: !ruby/object:Gem::Requirement
29
35
  requirements:
30
36
  - - "~>"
31
37
  - !ruby/object:Gem::Version
32
38
  version: '10.0'
33
- name: rake
34
39
  prerelease: false
35
40
  type: :development
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
36
43
  version_requirements: !ruby/object:Gem::Requirement
37
44
  requirements:
38
45
  - - "~>"
39
46
  - !ruby/object:Gem::Version
40
- version: '10.0'
41
- - !ruby/object:Gem::Dependency
47
+ version: '3.0'
42
48
  requirement: !ruby/object:Gem::Requirement
43
49
  requirements:
44
50
  - - "~>"
45
51
  - !ruby/object:Gem::Version
46
52
  version: '3.0'
47
- name: rspec
48
53
  prerelease: false
49
54
  type: :development
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - "~>"
53
- - !ruby/object:Gem::Version
54
- version: '3.0'
55
55
  description: A feedforward neural network library for JRuby
56
56
  email:
57
57
  - joseph.dayo@gmail.com
@@ -70,6 +70,7 @@ files:
70
70
  - bin/console
71
71
  - bin/setup
72
72
  - brains.gemspec
73
+ - example/colors.rb
73
74
  - example/iris.rb
74
75
  - example/xor.rb
75
76
  - iris.data