rubygrad 1.1.2 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. checksums.yaml +4 -4
  2. data/lib/nn.rb +56 -19
  3. data/mlp_example.rb +15 -6
  4. metadata +2 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d2e2b978ae8290e86a85ad55bdda2c86ceed5fb38d19f8713e6104ce5d1c447d
4
- data.tar.gz: 5da44a923ca6d8ed8bcbc55a7a8ac937b1d71d1b8b560854320290d853b2450d
3
+ metadata.gz: 8f74bc1d833eda69afa375ab9cbdbca37fd201a3635e9c517264b1255102c613
4
+ data.tar.gz: 72461737f3c24099121b35ddbf26300898ce825b1b18625b7c899ac50941fa18
5
5
  SHA512:
6
- metadata.gz: 68f06f445e60e400c863485ee63c3e919a39df86dc7da2f9c5febdb57c4bcf5475877fbca9356a32d0e9cd5130e2ec290ff910a4f155a7565cb08d0040ac7993
7
- data.tar.gz: c0947a0b25cd505594e7d30a1d5da7e48c47f5899e6683047251ba33b720db7ed9456e251826f02ec68947007cf88db802d4ee1c0630c2edaebf4746e2794be5
6
+ metadata.gz: e7e42b0f1f0e0895635ec37cccfc950d302690cddc53c432f4a3ce8970b9b3e9f80b58e2dc1d7136ab35cf49277e56ec929eaa473466257aca152611dcaee0d4
7
+ data.tar.gz: 6d5ba4a6b70f91e7909c6b878c99a8bd8534ddec279f70ac7a0afe395a7c849b9b687101ef0304ccd50fd234e38109be27e1280d63e998cfe6a0f9396315d5f0
data/lib/nn.rb CHANGED
@@ -2,12 +2,14 @@ require_relative "value.rb"
2
2
 
3
3
  class Neuron
4
4
 
5
- def initialize(number_of_inputs)
5
+ def initialize(number_of_inputs, activation_function)
6
6
  @initial_weights = Array.new(number_of_inputs) { rand(-1.0..1.0) }
7
7
  @initial_bias = rand(-1.0..1.0)
8
8
 
9
9
  @weights = @initial_weights.map { |w| Value.new(w) }
10
10
  @bias = Value.new(@initial_bias)
11
+
12
+ @activation_function = activation_function
11
13
  end
12
14
 
13
15
  def reset_params
@@ -30,7 +32,7 @@ class Neuron
30
32
  self.weights + [self.bias]
31
33
  end
32
34
 
33
- def calc(inputs, activation)
35
+ def calc(inputs)
34
36
  # xw + b
35
37
  n = self.weights.size
36
38
  raise "Wrong number of inputs! #{inputs.size} expected #{n}" unless n == inputs.size
@@ -38,25 +40,26 @@ class Neuron
38
40
  n.times do |index|
39
41
  sum += self.weights[index] * inputs[index]
40
42
  end
41
- if activation == :tanh
43
+ if @activation_function == :tanh
42
44
  sum.tanh
43
- elsif activation == :relu
45
+ elsif @activation_function == :relu
44
46
  sum.relu
45
- elsif activation == :sigmoid
47
+ elsif @activation_function == :sigmoid
46
48
  sum.sigmoid
47
49
  else
48
- raise "Unsupported activation function: #{activation}"
50
+ raise "Unsupported activation function: #{activation_function}"
49
51
  end
50
52
  end
51
53
  end
52
54
 
53
55
  class Layer
54
56
 
55
- def initialize(number_of_inputs, number_of_outputs)
56
- @neurons = Array.new(number_of_outputs) { Neuron.new(number_of_inputs) }
57
+ def initialize(number_of_inputs, number_of_outputs, activation_function)
58
+ @neurons = Array.new(number_of_outputs) { Neuron.new(number_of_inputs, activation_function) }
59
+ @activation_function = activation_function
57
60
  end
58
61
 
59
- attr_reader :neurons
62
+ attr_reader :neurons, :activation_function
60
63
 
61
64
  def parameters
62
65
  params = []
@@ -68,10 +71,10 @@ class Layer
68
71
  self.neurons.each { |n| n.reset_params }
69
72
  end
70
73
 
71
- def calc(inputs, activation)
74
+ def calc(inputs)
72
75
  outs = []
73
76
  self.neurons.each do |neuron|
74
- outs << neuron.calc(inputs, activation)
77
+ outs << neuron.calc(inputs)
75
78
  end
76
79
  outs
77
80
  end
@@ -80,18 +83,52 @@ end
80
83
  class MLP
81
84
 
82
85
  def initialize(*layers_config)
83
- number_of_layers = layers_config.size
86
+
87
+ number_of_layers = layers_config.size - 1 # last param is the activation function
88
+
89
+ act = layers_config.last
90
+
91
+ if !act.is_a?(Symbol) and !act.is_a?(Array)
92
+ raise "Activation function must be passed as the last parameter: #{act.class} expected Symbol or Array of Symbols"
93
+ end
94
+
95
+ single_activation_function = nil
96
+
97
+ if act.is_a?(Symbol)
98
+
99
+ single_activation_function = act
100
+
101
+ else # is Array
102
+
103
+ if not act.all? { |item| item.is_a?(Symbol) }
104
+ raise "Array with activation functions must contain symbols: #{act}"
105
+ end
106
+
107
+ if act.size == 1
108
+ single_activation_function = act.first
109
+ elsif act.size != number_of_layers - 1
110
+ raise "Array size does not match number of layers with activation functions: #{act.size} expected #{number_of_layers - 1}"
111
+ end
112
+ end
113
+
84
114
  @layers = Array.new(number_of_layers - 1) # input layer is not really a layer object
85
115
  (number_of_layers - 1).times do |i|
86
- @layers[i] = Layer.new(layers_config[i], layers_config[i + 1])
116
+ @layers[i] = Layer.new(layers_config[i], layers_config[i + 1], single_activation_function.nil? ? act[i] : single_activation_function)
87
117
  end
118
+
88
119
  @layers_config = layers_config
89
120
  end
90
121
 
91
122
  attr_reader :layers
92
123
 
93
124
  def inspect
94
- "MLP(#{@layers_config.join(", ")})"
125
+ lay = @layers_config[0..-2].join(", ") # slice to remove last element
126
+ act = @layers_config.last.inspect
127
+ "MLP(#{lay}, #{act})"
128
+ end
129
+
130
+ def to_s
131
+ inspect
95
132
  end
96
133
 
97
134
  def parameters
@@ -102,11 +139,11 @@ class MLP
102
139
 
103
140
  def show_params(in_words = false)
104
141
  if in_words
105
- n = @layers_config[0]
142
+ n = @layers_config.first
106
143
  puts "Layer 0: (#{n} input#{n > 1 ? "s" : ""})"
107
144
  self.layers.each_with_index do |layer, i|
108
145
  n = layer.neurons.size
109
- puts "Layer #{i + 1}: (#{n} neuron#{n > 1 ? "s" : ""})"
146
+ puts "Layer #{i + 1}: (#{n} neuron#{n > 1 ? "s" : ""}, #{layer.activation_function.inspect} activation)"
110
147
  layer.neurons.each_with_index do |neuron, ii|
111
148
  n = neuron.weights.size
112
149
  puts "\tNeuron #{ii + 1}: (#{n} weight#{n > 1 ? "s" : ""})"
@@ -116,7 +153,7 @@ class MLP
116
153
  end
117
154
  end
118
155
  else
119
- n = @layers_config[0]
156
+ n = @layers_config.first
120
157
  self.layers.each_with_index do |layer, i|
121
158
  n = layer.neurons.size
122
159
  puts "["
@@ -146,10 +183,10 @@ class MLP
146
183
  self.parameters.each { |p| p.grad = 0.0 }
147
184
  end
148
185
 
149
- def calc(inputs, activation)
186
+ def calc(inputs)
150
187
  out = inputs
151
188
  self.layers.each do |layer|
152
- out = layer.calc(out, activation) # chain the results forward, layer by layer
189
+ out = layer.calc(out) # chain the results forward, layer by layer
153
190
  end
154
191
  out.size == 1 ? out[0] : out # for convenience
155
192
  end
data/mlp_example.rb CHANGED
@@ -1,14 +1,22 @@
1
- require_relative 'lib/nn.rb'
1
+ require 'rubygrad'
2
2
 
3
- nn = MLP.new(3, 4, 4, 1)
3
+ # Build a Machine Learning Perceptron with 4 layers
4
+ # First Layer (Layer 0) => Input Layer => 3 Neurons => 3 Inputs
5
+ # Second Layer (Layer 1) => Hidden Layer => 4 Neurons
6
+ # Third Layer (Layer 2) => Hidden Layer => 4 Neurons
7
+ # Fourth Layer (Layer 3) => Output Layer => 1 Neuron => 1 Output
8
+ nn = MLP.new(3, 4, 4, 1, :tanh)
4
9
 
10
+ # 4 input samples
5
11
  x_inputs = [
6
12
  [2.0, 3.0, -1.0],
7
13
  [3.0, -1.0, 0.5],
8
14
  [0.5, 1.0, 1.0],
9
15
  [1.0, 1.0, -1.0]
10
16
  ]
11
- y_expected = [1.0, -1.0, -1.0, 1.0] # desired
17
+
18
+ # expected output for each of the 4 inputs above
19
+ y_expected = [1.0, -1.0, -1.0, 1.0]
12
20
 
13
21
  passes = 2000
14
22
  learning_rate = 0.2
@@ -20,7 +28,7 @@ _loss_format = "%.#{_loss_precision}f"
20
28
  (0...passes).each do |pass|
21
29
 
22
30
  # forward pass (calculate output)
23
- y_calculated = x_inputs.map { |x| nn.calc(x, :tanh) }
31
+ y_calculated = x_inputs.map { |x| nn.calc(x) }
24
32
 
25
33
  # loss function (check how good the neural net is)
26
34
  loss = 0.0
@@ -38,6 +46,7 @@ _loss_format = "%.#{_loss_precision}f"
38
46
  break if loss.value == 0 # just for fun and just in case
39
47
  end
40
48
 
41
- y_calculated = x_inputs.map { |x| nn.calc(x, :tanh) }
49
+ y_calculated = x_inputs.map { |x| nn.calc(x) }
42
50
  puts
43
- puts y_calculated
51
+ puts "Final NN results:"
52
+ y_calculated.each_with_index { |y_c, i| puts "Output: #{y_c} => Expected: #{y_expected[i]}" }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rubygrad
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2
4
+ version: 1.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sergio Oliveira Jr
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-21 00:00:00.000000000 Z
11
+ date: 2023-03-22 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description:
14
14
  email: sergio.oliveira.jr@gmail.com