db_mlp 0.0.7 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,8 @@
1
1
  = Multi-Layer Perceptron Neural Network
2
2
 
3
- This is a sqlite backed version of my previous MLP.
3
+ This is a Multi-Layer Perceptron Neural Network that uses early stopping to prevent itself from overfitting.
4
4
 
5
- This version also provides training validation to prevent the MLP from overfitting.
6
-
7
- This is first release and because of that it's a bit slow, I'll probably try out using Memcached or something else as its data store.
5
+ It also saves its state so that you can train the network and then re-use it again when-ever you want.
8
6
 
9
7
  == Install
10
8
 
@@ -15,13 +13,13 @@ This is first release and because of that it's a bit slow, I'll probably try out
15
13
 
16
14
  require 'rubygems'
17
15
  require 'db_mlp'
18
-
16
+
19
17
  a = DBMLP.new(path_to_db, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
20
18
 
21
19
  training = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
22
20
  testing = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
23
21
  validation = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
24
-
22
+
25
23
  a.train(training, testing, validation, number_of_training_iterations)
26
24
 
27
25
  puts "Test data"
@@ -30,7 +28,13 @@ This is first release and because of that it's a bit slow, I'll probably try out
30
28
  puts "[1,0] = > #{a.feed_forward([1,0]).inspect}"
31
29
  puts "[1,1] = > #{a.feed_forward([1,1]).inspect}"
32
30
 
33
- You can also tell the network what iterations you would like it to perform validation on:
31
+ After training has finished the network is saved to the file path specified. When you want to re-use the network just call:
32
+
33
+ a = DBMLP.load(path_to_db)
34
+
35
+ a.feed_for_forward([0,1])
36
+
37
+ You can also tell the network what iterations you would like it to perform validations on:
34
38
 
35
39
  DBMLP.new(path_to_db, :hidden_layers => [2],
36
40
  :output_nodes => 1,
@@ -51,7 +55,7 @@ If you want it to, the MLP can produce a test report. The basic idea is that at
51
55
  The above example produces these times (3000 iterations)
52
56
 
53
57
         user     system      total        real
54
- DBMLP 9.460000 0.150000 9.610000 ( 10.322743)
58
+ DBMLP 0.870000 0.000000 0.870000 (0.877338)
55
59
 
56
60
  == Copyright
57
61
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.7
1
+ 0.0.8
Binary file
@@ -1,22 +1,15 @@
1
- require 'rubygems'
2
- require 'benchmarker'
3
1
  require 'benchmark'
4
2
  require File.dirname(__FILE__) + '/../lib/db_mlp'
5
-
6
- Benchmarker.go('lib') do
7
-
8
- db = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/data.rdb"
9
3
 
4
+ db = File.dirname(File.expand_path(__FILE__)) + "/data.txt"
10
5
 
11
- training = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
12
- testing = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
13
- validation = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
14
-
15
- Benchmark.bm do |x|
16
- x.report do
17
- a = DBMLP.new(db, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
18
- a.train(training, testing, validation, 10)
19
- end
20
- end
6
+ training = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
7
+ testing = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
8
+ validation = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
21
9
 
22
- end
10
+ Benchmark.bm do |x|
11
+ x.report do
12
+ a = DBMLP.new(db, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
13
+ a.train(training, testing, validation, 3000)
14
+ end
15
+ end
@@ -5,11 +5,11 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = %q{db_mlp}
8
- s.version = "0.0.7"
8
+ s.version = "0.0.8"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["reddavis"]
12
- s.date = %q{2009-11-11}
12
+ s.date = %q{2010-01-05}
13
13
  s.description = %q{Database backed Multi-Layer Perceptron Neural Network in Ruby}
14
14
  s.email = %q{reddavis@gmail.com}
15
15
  s.extra_rdoc_files = [
@@ -23,26 +23,27 @@ Gem::Specification.new do |s|
23
23
  "README.rdoc",
24
24
  "Rakefile",
25
25
  "VERSION",
26
- "benchmarks/data.rdb",
26
+ "benchmarks/data.txt",
27
27
  "benchmarks/mlp_benchmark.rb",
28
28
  "db_mlp.gemspec",
29
29
  "examples/backpropagation_example.rb",
30
30
  "examples/data.rdb",
31
+ "examples/data.txt",
31
32
  "examples/patterns_with_base_noise.rb",
32
33
  "examples/patterns_with_noise.rb",
33
34
  "examples/training_patterns.rb",
34
35
  "examples/xor.rb",
35
36
  "lib/db_mlp.rb",
36
- "lib/models/neuron.rb",
37
- "lib/modules/create_test_results.rb",
38
- "lib/modules/db.rb",
39
- "lib/modules/test_results_parser.rb",
40
- "lib/modules/training.rb",
37
+ "lib/db_mlp/network.rb",
38
+ "lib/db_mlp/neuron.rb",
39
+ "lib/db_mlp/test_results.rb",
40
+ "lib/db_mlp/test_results_parser.rb",
41
+ "lib/db_mlp/training.rb",
41
42
  "profiling/profile.rb",
42
- "test/db/test.txt",
43
- "test/db/test_results_test/results.txt",
43
+ "test/db/db.txt",
44
44
  "test/helper.rb",
45
- "test/test_db_mlp.rb"
45
+ "test/test_db_mlp.rb",
46
+ "test/test_neuron.rb"
46
47
  ]
47
48
  s.homepage = %q{http://github.com/reddavis/dbmlp}
48
49
  s.rdoc_options = ["--charset=UTF-8"]
@@ -52,6 +53,7 @@ Gem::Specification.new do |s|
52
53
  s.test_files = [
53
54
  "test/helper.rb",
54
55
  "test/test_db_mlp.rb",
56
+ "test/test_neuron.rb",
55
57
  "examples/backpropagation_example.rb",
56
58
  "examples/patterns_with_base_noise.rb",
57
59
  "examples/patterns_with_noise.rb",
Binary file
@@ -1,7 +1,7 @@
1
1
  require File.expand_path(File.dirname(__FILE__) + '/../lib/db_mlp')
2
2
  require 'benchmark'
3
3
 
4
- db = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/data.rdb"
4
+ db = File.dirname(File.expand_path(__FILE__)) + "/data.txt"
5
5
  a = DBMLP.new(db, :hidden_layers => [2],
6
6
  :output_nodes => 1,
7
7
  :inputs => 2,
@@ -1,29 +1,42 @@
1
- require 'rubygems'
2
- require 'datamapper'
3
- require File.expand_path(File.dirname(__FILE__) + '/models/neuron')
4
- require File.expand_path(File.dirname(__FILE__) + '/modules/create_test_results')
5
- require File.expand_path(File.dirname(__FILE__) + '/modules/db')
6
- require File.expand_path(File.dirname(__FILE__) + '/modules/training')
7
- require File.expand_path(File.dirname(__FILE__) + '/modules/test_results_parser')
1
+ require File.expand_path(File.dirname(__FILE__) + '/db_mlp/neuron')
2
+ require File.expand_path(File.dirname(__FILE__) + '/db_mlp/test_results')
3
+ require File.expand_path(File.dirname(__FILE__) + '/db_mlp/training')
4
+ require File.expand_path(File.dirname(__FILE__) + '/db_mlp/test_results_parser')
5
+ require File.expand_path(File.dirname(__FILE__) + '/db_mlp/network')
8
6
 
9
7
  class DBMLP
10
- include DB
8
+ include Network
11
9
  include Training
12
- include CreateTestResults
10
+ include TestResults
13
11
  include TestResultsParser
14
-
12
+
13
+ class << self
14
+ def load(db_path)
15
+ data = ""
16
+ File.open(db_path) do |f|
17
+ while line = f.gets
18
+ data << line
19
+ end
20
+ end
21
+ Marshal.load(data)
22
+ end
23
+ end
24
+
15
25
  def initialize(db_path, options={})
16
26
  @input_size = options[:inputs]
17
27
  @hidden_layers = options[:hidden_layers]
18
- @number_of_output_nodes = options[:output_nodes]
19
- @verbose = options[:verbose] || false
28
+ @output_nodes = options[:output_nodes]
29
+ @verbose = options[:verbose]
20
30
  @validate_every = options[:validate_every] || 200
21
- connect_to_db(db_path)
22
- setup_network
31
+ @db_path = db_path
32
+
33
+ @network = setup_network
23
34
  end
24
35
 
25
36
  def feed_forward(input)
26
37
  @network.each_with_index do |layer, layer_index|
38
+ # We go through each layer taking the previous layers outputs and using them
39
+ # as the next layers inputs
27
40
  layer.each do |neuron|
28
41
  if layer_index == 0
29
42
  neuron.fire(input)
@@ -33,18 +46,26 @@ class DBMLP
33
46
  end
34
47
  end
35
48
  end
36
- @network.last.map {|x| x.last_output}
49
+ last_outputs
37
50
  end
38
51
 
39
52
  def train(training, testing, validations, n=3000, report_path=nil)
40
53
  train_and_cross_validate(training, validations, n)
54
+ # Create a test report if they want one
41
55
  create_test_report(testing, report_path) unless report_path.nil?
56
+ save
42
57
  end
43
58
 
44
59
  def inspect
45
60
  @network
46
61
  end
47
62
 
63
+ def save
64
+ File.open(@db_path, 'w+') do |f|
65
+ f.write(Marshal.dump(self))
66
+ end
67
+ end
68
+
48
69
  private
49
70
 
50
71
  def last_outputs
@@ -0,0 +1,32 @@
1
+ module Network
2
+ private
3
+
4
+ # Creates a network from left to right (output nodes on the right)
5
+ def setup_network
6
+ hidden_layers << output_layer
7
+ end
8
+
9
+ def hidden_layers
10
+ network = []
11
+ @hidden_layers.each_with_index do |neurons, index|
12
+ # Number of inputs
13
+ if index == 0
14
+ inputs = @input_size
15
+ else
16
+ inputs = network.last.size
17
+ end
18
+
19
+ layer = []
20
+ neurons.times { layer << Neuron.new(inputs, index) }
21
+ network << layer
22
+ end
23
+ network
24
+ end
25
+
26
+ def output_layer
27
+ nodes = []
28
+ inputs = @hidden_layers.last
29
+ @output_nodes.times {|n| nodes << Neuron.new(inputs, n) }
30
+ nodes
31
+ end
32
+ end
@@ -1,27 +1,24 @@
1
1
  class Neuron
2
- include DataMapper::Resource
3
- property :id, Serial
4
- property :layer_index, Integer, :index => true
5
- property :last_output, Float
6
- property :db_weights, String
7
- property :delta, Float
2
+
3
+ attr_accessor :delta
4
+
5
+ attr_reader :layer_index, :last_output
8
6
 
9
7
  def initialize(number_of_inputs, layer_index)
10
8
  create_weights(number_of_inputs)
11
- self.layer_index = layer_index
9
+ @layer_index = layer_index
12
10
  end
13
11
 
14
12
  def fire(input)
15
- self.last_output = activation_function(input)
13
+ @last_output = activation_function(input)
16
14
  end
17
15
 
18
16
  def update_weight(inputs, training_rate)
19
- inputs << -1 # Add the bias
20
- new_weights = weights
17
+ inputs << -1 # Add the bias node
18
+
21
19
  weights.each_index do |i|
22
- new_weights[i] += training_rate * delta * inputs[i]
20
+ weights[i] += training_rate * delta * inputs[i]
23
21
  end
24
- self.db_weights = new_weights.join(',')
25
22
  end
26
23
 
27
24
  def inspect
@@ -29,7 +26,7 @@ class Neuron
29
26
  end
30
27
 
31
28
  def weights
32
- db_weights.split(',').map {|x| x.to_f}
29
+ @weights ||= []
33
30
  end
34
31
 
35
32
  private
@@ -37,7 +34,6 @@ class Neuron
37
34
  def activation_function(input)
38
35
  sum = 0
39
36
  input.each_with_index do |n, index|
40
- # puts "index:#{index} weight: #{@weights[index]} input: #{n} input_size: #{input.size}"
41
37
  sum += weights[index] * n
42
38
  end
43
39
  sum += weights.last * -1 #bias node
@@ -50,13 +46,11 @@ class Neuron
50
46
  end
51
47
 
52
48
  def create_weights(number_of_inputs)
53
- # Create random weights between 0 & 1
49
+ # Create random weights between -1 & 1
54
50
  # Plus another one for the bias node
55
- weights = []
56
51
  (number_of_inputs + 1).times do
57
52
  weights << (rand > 0.5 ? -rand : rand)
58
53
  end
59
- self.db_weights = weights.join(',')
60
54
  end
61
55
 
62
56
  end
@@ -1,7 +1,8 @@
1
- module CreateTestResults
1
+ module TestResults
2
2
 
3
3
  private
4
4
 
5
+ # Create a tab seperated file
5
6
  def create_test_report(test_examples, report_path)
6
7
  results = []
7
8
  results << "ID\tAttributes\tTarget\tResults\tError" # Add the headers
@@ -21,6 +22,7 @@ module CreateTestResults
21
22
  end
22
23
  end
23
24
 
25
+ # Calculates sum-of-squares error
24
26
  def calculate_error(targets)
25
27
  outputs = last_outputs
26
28
  sum = 0
@@ -5,6 +5,9 @@ module TestResultsParser
5
5
 
6
6
  module Parser
7
7
 
8
+ # This goes through the test results file created by calling
9
+ # #create_test_report. It then tells you how accurate the
10
+ # classification has been on the testing data.
8
11
  def parse_test_results(filepath, error_limit=0.05)
9
12
  total, correct = 0.0, 0.0
10
13
  File.open(filepath) do |f|
@@ -3,36 +3,44 @@ module Training
3
3
  private
4
4
 
5
5
  def train_and_cross_validate(training, validations, n)
6
- errors = []
7
6
  1.upto(n) do |i|
8
7
  if i % @validate_every == 0
9
8
  print_message("Validating at #{i}")
10
- if validate(validations)
9
+
10
+ if validates?(validations)
11
11
  print_message("Stopping at #{i}")
12
12
  break
13
13
  end
14
14
  end
15
+
15
16
  print_message("Iteration #{i}/#{n}")
16
- training = training.sort_by { rand } #shaken or stirred?
17
+
18
+ # Move the training data around a bit
19
+ training = training.sort_by { rand }
20
+
17
21
  training.each do |t|
18
22
  input, target = t[0], t[1]
19
23
  training_process(input, target)
20
24
  end
21
- end
22
- save_all_neurons
25
+ end #1.upto
23
26
  end
24
27
 
25
- def validate(validations)
26
- @validations ||= []
27
- sum = 0
28
+ # We are checking if the error has increased since we last checked
29
+ # If it is we should probably stop training as we dont want to overfit the data
30
+ def validates?(validations)
31
+ validation = 0
32
+
28
33
  validations.each do |v|
29
34
  input, target = v[0], v[1]
30
35
  feed_forward(input)
31
- sum += calculate_error(target)
36
+ validation += calculate_error(target)
37
+ end
38
+
39
+ if @last_validation.nil? || (validation > @last_validation)
40
+ false
41
+ else
42
+ true
32
43
  end
33
- @validations << sum
34
- return false if @validations.size < 2
35
- @validations[-1] > @validations[-2] ? true : false
36
44
  end
37
45
 
38
46
  def training_process(input, targets)
@@ -42,12 +50,6 @@ module Training
42
50
  update_weights(input)
43
51
  end
44
52
 
45
- def save_all_neurons
46
- @network.each do |layer|
47
- layer.each {|n| n.save!}
48
- end
49
- end
50
-
51
53
  def update_weights(input)
52
54
  reversed_network = @network.reverse
53
55
  reversed_network.each_with_index do |layer, layer_index|
@@ -60,6 +62,8 @@ module Training
60
62
  end
61
63
 
62
64
  def update_output_weights(layer, layer_index, input)
65
+ # If we have no hidden layer, just use the input, otherwise take
66
+ # the outputs of the last hidden layer
63
67
  inputs = @hidden_layers.empty? ? input : @network[-2].map {|x| x.last_output}
64
68
  layer.each do |neuron|
65
69
  neuron.update_weight(inputs, 0.25)
@@ -67,8 +71,10 @@ module Training
67
71
  end
68
72
 
69
73
  def update_hidden_weights(layer, layer_index, original_input)
74
+ # If we're on the first hidden layer, we want to use the inputs from the input
70
75
  if layer_index == (@network.size - 1)
71
76
  inputs = original_input.clone
77
+ # Or we want to use the inputs from the outputs of the previous layer
72
78
  else
73
79
  inputs = @network.reverse[layer_index+1].map {|x| x.last_output}
74
80
  end
@@ -98,8 +104,8 @@ module Training
98
104
  def compute_hidden_deltas(layer, targets, previous_layer)
99
105
  layer.each_with_index do |neuron, neuron_index|
100
106
  error = 0
101
- previous_layer.each do |output_neuron|
102
- error += output_neuron.delta * output_neuron.weights[neuron_index]
107
+ previous_layer.each do |previous_layer_neuron|
108
+ error += previous_layer_neuron.delta * previous_layer_neuron.weights[neuron_index]
103
109
  end
104
110
  output = neuron.last_output
105
111
  neuron.delta = output * (1 - output) * error
@@ -2,7 +2,7 @@ require File.dirname(__FILE__) + '/../lib/db_mlp'
2
2
  require 'rubygems'
3
3
  require 'ruby-prof'
4
4
 
5
- db = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/../benchmarks/data.rdb"
5
+ db = File.dirname(File.expand_path(__FILE__)) + "/../benchmarks/data.txt"
6
6
 
7
7
  a = DBMLP.new(db, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
8
8
 
Binary file
@@ -1,45 +1,18 @@
1
1
  require 'helper'
2
2
 
3
- class TestDBMLP < Test::Unit::TestCase
4
- context "Testing Report" do
5
- setup do
6
- set_data_variables
7
- db_path = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/db/data.rdb"
8
- @test_results_path = File.dirname(File.expand_path(__FILE__)) + '/db/test_results.txt'
9
- a = DBMLP.new(db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
10
- a.train(@training, @testing, @validation, 1, @test_results_path)
11
- end
12
-
13
- should "create a test results .txt file" do
14
- assert File.exists?(@test_results_path)
15
- end
16
-
17
- should "contain some text" do
18
- File.open(@test_results_path, 'r+') do |file|
19
- assert !file.readlines.empty?
20
- end
21
- end
22
- end
23
-
3
+ class TestDBMLP < Test::Unit::TestCase
24
4
  context "DBMLP Instance" do
25
5
  setup do
26
6
  set_data_variables
27
- @db_path = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/db/data.rdb"
7
+ @db_path = saved_db_path
28
8
  end
29
9
 
30
- should "contain 4 layers" do
10
+ should "contain 4 layers (including output layer)" do
31
11
  a = DBMLP.new(@db_path, :hidden_layers => [2, 2, 2], :output_nodes => 2, :inputs => 2)
32
12
  assert_equal 4, a.inspect.size
33
13
  end
34
14
 
35
- should "contain saved 3 layers" do
36
- DBMLP.new(@db_path, :hidden_layers => [2, 2], :output_nodes => 2, :inputs => 2)
37
- b = Neuron.all.map {|x| x.layer_index}.uniq.size
38
- assert_equal 3, b
39
- end
40
-
41
15
  should "contain 1 output node" do
42
- DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes =>4, :inputs => 2)
43
16
  a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
44
17
  assert_equal 1, a.inspect.last.size
45
18
  end
@@ -59,91 +32,34 @@ class TestDBMLP < Test::Unit::TestCase
59
32
  assert_kind_of Array, a.feed_forward([0,1])
60
33
  end
61
34
 
62
- should "save its neurons deltas" do
63
- a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
64
- a.train(@training, @testing, @validation, 1)
65
- b = Neuron.all(:delta.not => nil)
66
- assert !b.empty?
67
- end
68
-
69
- should "save its output neurons weights" do
70
- a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
71
- before = Neuron.first(:layer_index => -1).weights.inject([]) do |array, n|
72
- array << n
73
- end
74
-
75
- a.train(@training, @testing, @validation, 1)
76
-
77
- after = Neuron.first(:layer_index => -1).weights.inject([]) do |array, n|
78
- array << n
79
- end
80
- assert_not_equal before, after
81
- end
82
-
83
- should "update its hidden neurons weights" do
35
+ should "set its neurons deltas" do
84
36
  a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
85
- before = Neuron.first(:layer_index => 0).weights.inject([]) do |array, n|
86
- array << n
87
- end
88
-
89
37
  a.train(@training, @testing, @validation, 1)
90
- after = Neuron.first(:layer_index => 0).weights.inject([]) do |array, n|
91
- array << n
92
- end
93
- assert_not_equal before, after
38
+ b = a.inspect.flatten.map {|x| x.delta}.delete_if {|x| !x.nil?}
39
+ assert b.empty?
94
40
  end
95
41
  end
96
42
 
97
- context "DB for a new mlp" do
98
- setup do
99
- db_path = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/db/data.rdb"
100
- @a = DBMLP.new(db_path, :hidden_layers => [2, 2], :output_nodes => 2, :inputs => 2)
101
- end
102
-
103
- should "save 6 neurons" do
104
- assert_equal 6, Neuron.count
105
- end
106
-
107
- should "save 2 hidden neurons in the first hidden layer" do
108
- assert_equal 2, Neuron.count(:layer_index => 0)
109
- end
110
- end
111
-
112
- context "Neuron" do
43
+ context "Network Structure" do
113
44
  setup do
114
- @db_path = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/db/data.rdb"
45
+ @db_path = saved_db_path
115
46
  end
116
-
117
- should "have 2 weights on output neuron" do
118
- a = DBMLP.new(@db_path, :hidden_layers => [1], :output_nodes => 1, :inputs => 2)
119
- assert_equal 2, a.inspect.last.last.weights.size
120
- end
121
-
122
- should "have saved 2 weights on output neuron" do
123
- a = DBMLP.new(@db_path, :hidden_layers => [1], :output_nodes => 1, :inputs => 2)
124
- assert_equal 2, Neuron.first(:layer_index => -1).weights.size
125
- end
126
-
47
+
127
48
  should "have 3 weights on output neuron" do
128
49
  a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
129
50
  assert_equal 3, a.inspect.last.last.weights.size
130
51
  end
131
52
 
132
- should "have saved 3 weights on output neuron" do
53
+ should "have saved 2 neurons on the first hidden layer" do
133
54
  a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
134
- assert_equal 3, Neuron.first(:layer_index => -1).weights.size
135
- end
136
-
137
- should "create a hidden neuron with 3 weights" do
138
- a = DBMLP.new(@db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
139
- assert_equal 3, a.inspect.first.last.weights.size
55
+ assert_equal 2, a.inspect[0].size
140
56
  end
141
57
  end
142
58
 
143
59
  context "Validations" do
144
60
  setup do
145
61
  $stdout = StringIO.new
146
- @db_path = "sqlite3://#{File.dirname(File.expand_path(__FILE__))}/db/data.rdb"
62
+ @db_path = saved_db_path
147
63
  set_data_variables
148
64
  end
149
65
 
@@ -159,25 +75,61 @@ class TestDBMLP < Test::Unit::TestCase
159
75
  assert_equal 2, output.size
160
76
  end
161
77
  end
162
-
163
- context "Testing Results Parser" do
164
- setup do
165
- @test_results = File.dirname(__FILE__) + '/db/test_results_test/results.txt'
166
- end
167
-
168
- should "return 100%" do
169
- result = DBMLP.parse_test_results(@test_results, 1)
170
- assert_equal 100, result
171
- end
172
-
173
- should "return 50%" do
174
- result = DBMLP.parse_test_results(@test_results, 0.00002)
175
- assert_equal 50, result
176
- end
177
- end
78
+
79
+ context "Testing Report" do
80
+ setup do
81
+ set_data_variables
82
+ db_path = saved_db_path
83
+ @test_results_path = File.dirname(File.expand_path(__FILE__)) + '/db/test_results.txt'
84
+ a = DBMLP.new(db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
85
+ a.train(@training, @testing, @validation, 1, @test_results_path)
86
+ end
87
+
88
+ should "create a test results .txt file" do
89
+ assert File.exists?(@test_results_path)
90
+ end
91
+
92
+ should "contain some text" do
93
+ File.open(@test_results_path, 'r+') do |file|
94
+ assert !file.readlines.empty?
95
+ end
96
+ end
97
+ end
98
+
99
+ context "IO" do
100
+ context "Save" do
101
+ setup do
102
+ db_path = saved_db_path
103
+ FileUtils.rm(db_path, :force => true)
104
+ @a = DBMLP.new(db_path, :hidden_layers => [2], :output_nodes => 1, :inputs => 2)
105
+ end
106
+
107
+ should "create a file" do
108
+ @a.save
109
+ assert File.exists?(saved_db_path)
110
+ end
111
+ end
112
+
113
+ context "Load" do
114
+ setup do
115
+ @db_path = saved_db_path
116
+ FileUtils.rm(@db_path, :force => true)
117
+ DBMLP.new(@db_path, :hidden_layers => [8], :output_nodes => 1, :inputs => 2).save
118
+ end
119
+
120
+ should "create a file" do
121
+ a = DBMLP.load(@db_path)
122
+ assert_equal 8, a.inspect[0].size
123
+ end
124
+ end
125
+ end
178
126
 
179
127
  private
180
128
 
129
+ def saved_db_path
130
+ File.expand_path(File.dirname(__FILE__) + '/db/db.txt')
131
+ end
132
+
181
133
  def set_data_variables
182
134
  @training = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
183
135
  @testing = [[[0,0], [0]], [[0,1], [1]], [[1,0], [1]], [[1,1], [0]]]
@@ -0,0 +1,35 @@
1
+ require 'helper'
2
+
3
+ class TestNeuron < Test::Unit::TestCase
4
+ context "Initialization" do
5
+ should "set initial weights" do
6
+ a = create_neuron
7
+ assert !a.weights.empty?
8
+ assert_equal 4, a.weights.size # + Bias node
9
+ end
10
+ end
11
+
12
+ context "Weight Update" do
13
+ should "change the weight of the neuron" do
14
+ a = create_neuron
15
+ before = a.weights.clone
16
+ a.delta = 0.9
17
+ a.update_weight([1,2,3], 0.5)
18
+ assert_not_equal before, a.weights
19
+ end
20
+ end
21
+
22
+ context "Fire" do
23
+ should "change last_output" do
24
+ a = create_neuron
25
+ a.fire([1,2,3])
26
+ assert a.last_output
27
+ end
28
+ end
29
+
30
+ private
31
+
32
+ def create_neuron(weights=3, layer_index=0)
33
+ Neuron.new(weights, layer_index)
34
+ end
35
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: db_mlp
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.7
4
+ version: 0.0.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - reddavis
@@ -9,7 +9,7 @@ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
11
 
12
- date: 2009-11-11 00:00:00 +00:00
12
+ date: 2010-01-05 00:00:00 +00:00
13
13
  default_executable:
14
14
  dependencies: []
15
15
 
@@ -29,26 +29,27 @@ files:
29
29
  - README.rdoc
30
30
  - Rakefile
31
31
  - VERSION
32
- - benchmarks/data.rdb
32
+ - benchmarks/data.txt
33
33
  - benchmarks/mlp_benchmark.rb
34
34
  - db_mlp.gemspec
35
35
  - examples/backpropagation_example.rb
36
36
  - examples/data.rdb
37
+ - examples/data.txt
37
38
  - examples/patterns_with_base_noise.rb
38
39
  - examples/patterns_with_noise.rb
39
40
  - examples/training_patterns.rb
40
41
  - examples/xor.rb
41
42
  - lib/db_mlp.rb
42
- - lib/models/neuron.rb
43
- - lib/modules/create_test_results.rb
44
- - lib/modules/db.rb
45
- - lib/modules/test_results_parser.rb
46
- - lib/modules/training.rb
43
+ - lib/db_mlp/network.rb
44
+ - lib/db_mlp/neuron.rb
45
+ - lib/db_mlp/test_results.rb
46
+ - lib/db_mlp/test_results_parser.rb
47
+ - lib/db_mlp/training.rb
47
48
  - profiling/profile.rb
48
- - test/db/test.txt
49
- - test/db/test_results_test/results.txt
49
+ - test/db/db.txt
50
50
  - test/helper.rb
51
51
  - test/test_db_mlp.rb
52
+ - test/test_neuron.rb
52
53
  has_rdoc: true
53
54
  homepage: http://github.com/reddavis/dbmlp
54
55
  licenses: []
@@ -80,6 +81,7 @@ summary: Database backed Multi-Layer Perceptron Neural Network in Ruby
80
81
  test_files:
81
82
  - test/helper.rb
82
83
  - test/test_db_mlp.rb
84
+ - test/test_neuron.rb
83
85
  - examples/backpropagation_example.rb
84
86
  - examples/patterns_with_base_noise.rb
85
87
  - examples/patterns_with_noise.rb
@@ -1,67 +0,0 @@
1
- module DB
2
-
3
- private
4
-
5
- def setup_network
6
- @network = []
7
- if new_mlp?
8
- wipe_db!
9
- # Hidden Layers
10
- @hidden_layers.each_with_index do |number_of_neurons, index|
11
- layer = []
12
- inputs = index == 0 ? @input_size : @hidden_layers[index-1]#.size
13
- number_of_neurons.times { layer << Neuron.new(inputs, index) }
14
- @network << layer
15
- layer.each {|x| x.save!}
16
- end
17
- # Output layer
18
- inputs = @hidden_layers.empty? ? @input_size : @hidden_layers.last
19
- layer = []
20
- @number_of_output_nodes.times { layer << Neuron.new(inputs, -1)}
21
- @network << layer
22
- layer.each {|x| x.save!}
23
- else
24
- # Problematic area???
25
- @hidden_layers.each_index do |index|
26
- layer = Neuron.all(:layer_index => index, :order => [:id.asc])
27
- @network << layer
28
- end
29
- layer = Neuron.all(:layer_index => -1, :order => [:id.asc])
30
- @network << layer
31
- end
32
- end
33
-
34
- def wipe_db!
35
- DataMapper.auto_migrate!
36
- end
37
-
38
- # Only one mlp per DB, so if this mlp's shape is diff
39
- # to whats in the db then we empty and create a new one
40
- # if its the same then we carry on as we left off
41
- def new_mlp?
42
- new_mlp = false
43
- # Check hidden_layers
44
- @hidden_layers.each_index do |i|
45
- if Neuron.count(:layer_index => i) != @hidden_layers[i]
46
- new_mlp = true
47
- end
48
- end
49
- # Check output layer
50
- if Neuron.count(:layer_index => -1) != @number_of_output_nodes
51
- new_mlp = true
52
- end
53
-
54
- if Neuron.count != (@hidden_layers.size + 1)
55
- new_mlp = true
56
- end
57
- new_mlp
58
- end
59
-
60
- def connect_to_db(db_path)
61
- # DataMapper::Logger.new(STDOUT, :debug)
62
- # DataObjects::Sqlite3.logger = DataObjects::Logger.new(STDOUT, 0)
63
- DataMapper.setup(:default, db_path)
64
- DataMapper.auto_upgrade!
65
- end
66
-
67
- end
File without changes
@@ -1,5 +0,0 @@
1
- ID Attributes Target Results Error
2
- 0 [0, 0] [0] [0.685402162886773] 0.001
3
- 1 [0, 1] [1] [0.682897049007785] 0.00002
4
- 2 [1, 0] [1] [0.675313520548373] 0.00001235
5
- 3 [1, 1] [0] [0.673899630689015] 0.00000000009