simple_neural_network 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. checksums.yaml +4 -4
  2. data/lib/layer.rb +3 -1
  3. data/lib/network.rb +20 -8
  4. metadata +1 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c118791ad07a0945cbc340ca50e05e5817ce1bd0
4
- data.tar.gz: c1b921e8d031942788fec9148b06f8b7175a727e
3
+ metadata.gz: 38af9f9ff92a917ee9d8468adfae6f0d747219a7
4
+ data.tar.gz: 4aafd03d59079c099d3aaa0c595fc8abf48034ba
5
5
  SHA512:
6
- metadata.gz: 2e70cbbee76e460546772478ff0d1e8c6e5e037bf546b0870c9b31dd36dbb3722de2f37f71793d8984f6b4db5012c0b96a3696e2e47e294ab6d89a7d63741249
7
- data.tar.gz: a386118b62a17c49949191d39a65a3ae7a4844d6b1ee175448d7ea4272b9db257ad383a09032ff4b728be240664d733d35ab605bbd07214f836236b121860d4b
6
+ metadata.gz: 3b419df2647c65d17ab71758a001d78498fd7c8fccbcf71547040b795b1521d6863e0c7807768a82ee84811afd7bdd17619f4f767df9c96a90062103203e9cb9
7
+ data.tar.gz: 435dcc47fa8333ba5a82a8142cbfa57558cdce2302078468e034890a1e057d5dcaba0a996a60feca17de332cacb8228f180bcf3967f4429903c0be77866335fa
data/lib/layer.rb CHANGED
@@ -30,7 +30,7 @@ class SimpleNeuralNetwork
30
30
  # get_output calculates the array of neuron values for this layer.
31
31
  # This is calculated by recursively fetching the output from the previous layer, then applying edge/node weight and bias rules.
32
32
  # The first layer will fetch it's values from @network.inputs
33
- def get_output
33
+ def get_output(normalize: @network.hidden_layer_normalization_function)
34
34
  if !prev_layer
35
35
  # This is the first layer, so the output set is simply the network input set
36
36
  @network.inputs
@@ -47,6 +47,8 @@ class SimpleNeuralNetwork
47
47
  result = (edge_matrix.dot(prev_output_matrix)).each_with_index.map do |val, i|
48
48
  val + @neurons[i].bias
49
49
  end
50
+
51
+ result.map {|item| normalize.call(item) }
50
52
  end
51
53
  end
52
54
 
data/lib/network.rb CHANGED
@@ -23,7 +23,8 @@ class SimpleNeuralNetwork
23
23
 
24
24
  attr_accessor :inputs
25
25
 
26
- attr_accessor :normalization_function
26
+ attr_accessor :hidden_layer_normalization_function
27
+ attr_accessor :output_normalization_function
27
28
 
28
29
  attr_accessor :edge_initialization_function
29
30
  attr_accessor :neuron_bias_initialization_function
@@ -32,7 +33,9 @@ class SimpleNeuralNetwork
32
33
  @layers = []
33
34
  @inputs = []
34
35
 
35
- @normalization_function = method(:default_normalization_function)
36
+ @hidden_layer_normalization_function = method(:default_hidden_layer_normalization_function)
37
+ @output_normalization_function = method(:default_output_normalization_function)
38
+
36
39
  @edge_initialization_function = method(:default_edge_initialization_function)
37
40
  @neuron_bias_initialization_function = method(:default_neuron_bias_initialization_function)
38
41
  end
@@ -53,9 +56,7 @@ class SimpleNeuralNetwork
53
56
  @inputs = inputs
54
57
 
55
58
  # Get output from last layer. It recursively depends on layers before it.
56
- @layers[-1].get_output.map do |output|
57
- (@normalization_function || method(:default_normalization_function)).call(output)
58
- end
59
+ @layers[-1].get_output(normalize: output_normalization_function)
59
60
  end
60
61
 
61
62
  # Returns the number of input nodes
@@ -84,8 +85,9 @@ class SimpleNeuralNetwork
84
85
  end
85
86
  end
86
87
 
87
- def reset_normalization_function
88
- @normalization_function = method(:default_normalization_function)
88
+ def reset_normalization_functions
89
+ @output_normalization_function = method(:default_output_normalization_function)
90
+ @hidden_layer_normalization_function = method(:default_hidden_layer_normalization_function)
89
91
  end
90
92
 
91
93
  def clear_edge_caches
@@ -149,10 +151,20 @@ class SimpleNeuralNetwork
149
151
  # The default normalization function for the network output
150
152
  # The standard logistic sigmoid function
151
153
  # f(x) = 1 / (1 + e^(-x))
152
- def default_normalization_function(output)
154
+ def default_output_normalization_function(output)
153
155
  1 / (1 + (Math::E ** (-1 * output)))
154
156
  end
155
157
 
158
+ # The default hidden layer normalization function
159
+ # The ReLU function
160
+ def default_hidden_layer_normalization_function(output)
161
+ if output < 0
162
+ 0
163
+ else
164
+ output
165
+ end
166
+ end
167
+
156
168
  def default_edge_initialization_function
157
169
  rand(-5..5)
158
170
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: simple_neural_network
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nathaniel Woodthorpe