simple_neural_network 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/layer.rb +3 -1
- data/lib/network.rb +20 -8
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 38af9f9ff92a917ee9d8468adfae6f0d747219a7
|
4
|
+
data.tar.gz: 4aafd03d59079c099d3aaa0c595fc8abf48034ba
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3b419df2647c65d17ab71758a001d78498fd7c8fccbcf71547040b795b1521d6863e0c7807768a82ee84811afd7bdd17619f4f767df9c96a90062103203e9cb9
|
7
|
+
data.tar.gz: 435dcc47fa8333ba5a82a8142cbfa57558cdce2302078468e034890a1e057d5dcaba0a996a60feca17de332cacb8228f180bcf3967f4429903c0be77866335fa
|
data/lib/layer.rb
CHANGED
@@ -30,7 +30,7 @@ class SimpleNeuralNetwork
|
|
30
30
|
# get_output calculates the array of neuron values for this layer.
|
31
31
|
# This is calculated by recursively fetching the output from the previous layer, then applying edge/node weight and bias rules.
|
32
32
|
# The first layer will fetch it's values from @network.inputs
|
33
|
-
def get_output
|
33
|
+
def get_output(normalize: @network.hidden_layer_normalization_function)
|
34
34
|
if !prev_layer
|
35
35
|
# This is the first layer, so the output set is simply the network input set
|
36
36
|
@network.inputs
|
@@ -47,6 +47,8 @@ class SimpleNeuralNetwork
|
|
47
47
|
result = (edge_matrix.dot(prev_output_matrix)).each_with_index.map do |val, i|
|
48
48
|
val + @neurons[i].bias
|
49
49
|
end
|
50
|
+
|
51
|
+
result.map {|item| normalize.call(item) }
|
50
52
|
end
|
51
53
|
end
|
52
54
|
|
data/lib/network.rb
CHANGED
@@ -23,7 +23,8 @@ class SimpleNeuralNetwork
|
|
23
23
|
|
24
24
|
attr_accessor :inputs
|
25
25
|
|
26
|
-
attr_accessor :
|
26
|
+
attr_accessor :hidden_layer_normalization_function
|
27
|
+
attr_accessor :output_normalization_function
|
27
28
|
|
28
29
|
attr_accessor :edge_initialization_function
|
29
30
|
attr_accessor :neuron_bias_initialization_function
|
@@ -32,7 +33,9 @@ class SimpleNeuralNetwork
|
|
32
33
|
@layers = []
|
33
34
|
@inputs = []
|
34
35
|
|
35
|
-
@
|
36
|
+
@hidden_layer_normalization_function = method(:default_hidden_layer_normalization_function)
|
37
|
+
@output_normalization_function = method(:default_output_normalization_function)
|
38
|
+
|
36
39
|
@edge_initialization_function = method(:default_edge_initialization_function)
|
37
40
|
@neuron_bias_initialization_function = method(:default_neuron_bias_initialization_function)
|
38
41
|
end
|
@@ -53,9 +56,7 @@ class SimpleNeuralNetwork
|
|
53
56
|
@inputs = inputs
|
54
57
|
|
55
58
|
# Get output from last layer. It recursively depends on layers before it.
|
56
|
-
@layers[-1].get_output
|
57
|
-
(@normalization_function || method(:default_normalization_function)).call(output)
|
58
|
-
end
|
59
|
+
@layers[-1].get_output(normalize: output_normalization_function)
|
59
60
|
end
|
60
61
|
|
61
62
|
# Returns the number of input nodes
|
@@ -84,8 +85,9 @@ class SimpleNeuralNetwork
|
|
84
85
|
end
|
85
86
|
end
|
86
87
|
|
87
|
-
def
|
88
|
-
@
|
88
|
+
def reset_normalization_functions
|
89
|
+
@output_normalization_function = method(:default_output_normalization_function)
|
90
|
+
@hidden_layer_normalization_function = method(:default_hidden_layer_normalization_function)
|
89
91
|
end
|
90
92
|
|
91
93
|
def clear_edge_caches
|
@@ -149,10 +151,20 @@ class SimpleNeuralNetwork
|
|
149
151
|
# The default normalization function for the network output
|
150
152
|
# The standard logistic sigmoid function
|
151
153
|
# f(x) = 1 / (1 + e^(-x))
|
152
|
-
def
|
154
|
+
def default_output_normalization_function(output)
|
153
155
|
1 / (1 + (Math::E ** (-1 * output)))
|
154
156
|
end
|
155
157
|
|
158
|
+
# The default hidden layer normalization function
|
159
|
+
# The ReLU function
|
160
|
+
def default_hidden_layer_normalization_function(output)
|
161
|
+
if output < 0
|
162
|
+
0
|
163
|
+
else
|
164
|
+
output
|
165
|
+
end
|
166
|
+
end
|
167
|
+
|
156
168
|
def default_edge_initialization_function
|
157
169
|
rand(-5..5)
|
158
170
|
end
|