synaptical 0.0.1.pre.beta1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 2eb79f8db08a2545b01bee1be288ba3f3b0887cf5dab92abbff912dee9206726
4
+ data.tar.gz: ffc9a05fff7c76dfbf8e5c1d4c5b24d5b238320ae23fa5ef885da1c0e56ce336
5
+ SHA512:
6
+ metadata.gz: 4d60328ad93e877c08df009a8057b87085119d234db531fdb43e82ef58be77305bb6d9620bc4a2dc9c33e2bdc61416fb5d7c2fc8bb2658d65b1a3aaa8e2b0b29
7
+ data.tar.gz: 98c8d2cc964b1f2ba6f396c00b711227230fa03669dbe8083334f70ce0ba7dd09ca2141e54c931685514a183b0ca8fc73b1e58aef947cc8c4ad47004b58a5e10
@@ -0,0 +1,11 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /_yardoc/
4
+ /coverage/
5
+ /doc/
6
+ /pkg/
7
+ /spec/reports/
8
+ /tmp/
9
+
10
+ # rspec failure tracking
11
+ .rspec_status
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --format documentation
2
+ --color
3
+ --require spec_helper
@@ -0,0 +1 @@
1
+ synaptical
@@ -0,0 +1 @@
1
+ 2.5.1
@@ -0,0 +1,7 @@
1
+ ---
2
+ sudo: false
3
+ language: ruby
4
+ cache: bundler
5
+ rvm:
6
+ - 2.5.1
7
+ before_install: gem install bundler -v 1.16.3
data/Gemfile ADDED
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ source 'https://rubygems.org'
4
+
5
+ git_source(:github) { |repo_name| "https://github.com/#{repo_name}" }
6
+
7
+ # Specify your gem's dependencies in synaptical.gemspec
8
+ gemspec
9
+
10
+ group :development do
11
+ gem 'benchmark-ips'
12
+ gem 'benchmark-memory'
13
+ gem 'hotch'
14
+ end
@@ -0,0 +1,47 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ synaptical (0.0.1.pre.beta1)
5
+
6
+ GEM
7
+ remote: https://rubygems.org/
8
+ specs:
9
+ allocation_tracer (0.6.3)
10
+ benchmark-ips (2.7.2)
11
+ benchmark-memory (0.1.2)
12
+ memory_profiler (~> 0.9)
13
+ diff-lcs (1.3)
14
+ hotch (0.5.0)
15
+ allocation_tracer (~> 0.6.3)
16
+ stackprof (~> 0.2.10)
17
+ memory_profiler (0.9.11)
18
+ rake (10.5.0)
19
+ rspec (3.7.0)
20
+ rspec-core (~> 3.7.0)
21
+ rspec-expectations (~> 3.7.0)
22
+ rspec-mocks (~> 3.7.0)
23
+ rspec-core (3.7.1)
24
+ rspec-support (~> 3.7.0)
25
+ rspec-expectations (3.7.0)
26
+ diff-lcs (>= 1.2.0, < 2.0)
27
+ rspec-support (~> 3.7.0)
28
+ rspec-mocks (3.7.0)
29
+ diff-lcs (>= 1.2.0, < 2.0)
30
+ rspec-support (~> 3.7.0)
31
+ rspec-support (3.7.1)
32
+ stackprof (0.2.12)
33
+
34
+ PLATFORMS
35
+ ruby
36
+
37
+ DEPENDENCIES
38
+ benchmark-ips
39
+ benchmark-memory
40
+ bundler (~> 1.16)
41
+ hotch
42
+ rake (~> 10.0)
43
+ rspec (~> 3.0)
44
+ synaptical!
45
+
46
+ BUNDLED WITH
47
+ 1.16.3
@@ -0,0 +1,101 @@
1
+ # Synaptical
2
+
3
+ Synaptical is a Ruby port of [synaptic.js](https://github.com/cazala/synaptic)
4
+
5
+ **NOTE: This is work in progress and some componets of synaptic.js are still missing**
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application's Gemfile:
10
+
11
+ ```ruby
12
+ gem 'synaptical'
13
+ ```
14
+
15
+ And then execute:
16
+
17
+ $ bundle
18
+
19
+ Or install it yourself as:
20
+
21
+ $ gem install synaptical
22
+
23
+ ## Usage
24
+
25
+ Usage is identical to [synaptic.js](https://github.com/cazala/synaptic).
26
+
27
+ Example network trained to solve a XOR gate:
28
+
29
+ ```ruby
30
+ input_layer = Synaptical::Layer.new(2)
31
+ hidden_layer = Synaptical::Layer.new(3)
32
+ output_layer = Synaptical::Layer.new(1)
33
+
34
+ input_layer.project(hidden_layer)
35
+ hidden_layer.project(output_layer)
36
+
37
+ network = Synaptical::Network.new(
38
+ input: input_layer,
39
+ hidden: [hidden_layer],
40
+ output: output_layer
41
+ )
42
+
43
+ learning_rate = 0.3
44
+
45
+ 10_000.times do
46
+ network.activate([0, 0])
47
+ network.propagate(learning_rate, [0])
48
+
49
+ network.activate([0, 1])
50
+ network.propagate(learning_rate, [1])
51
+
52
+ network.activate([1, 0])
53
+ network.propagate(learning_rate, [1])
54
+
55
+ network.activate([1, 1])
56
+ network.propagate(learning_rate, [0])
57
+ end
58
+
59
+ network.activate([0, 0])
60
+ # => [0.00020797967275049887]
61
+ network.activate([0, 1])
62
+ # => [0.9991989909682668]
63
+ network.activate([1, 0])
64
+ # => [0.9992882541963027]
65
+ network.activate([1, 1])
66
+ # => [0.0011764423621223423]
67
+ ```
68
+
69
+ or create the network with the Perceptron architect and the Trainer:
70
+
71
+ ```ruby
72
+
73
+ network = Synaptical::Architect::Perceptron.new(2, 3, 1)
74
+ trainer = Synaptical::Trainer.new(network)
75
+ trainer.train([
76
+ { input: [0, 0], output: [0] },
77
+ { input: [0, 1], output: [1] },
78
+ { input: [1, 0], output: [1] },
79
+ { input: [1, 1], output: [0] }
80
+ ])
81
+
82
+ network.activate([0, 0])
83
+ # => [0.04564830744951351]
84
+ network.activate([0, 1])
85
+ # => [0.9590894310802323]
86
+ network.activate([1, 0])
87
+ # => [0.9112358846059638]
88
+ network.activate([1, 1])
89
+ # => [0.0832359653922508]
90
+
91
+ ```
92
+
93
+ ## Development
94
+
95
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
96
+
97
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
98
+
99
+ ## Contributing
100
+
101
+ Bug reports and pull requests are welcome on GitHub at https://github.com/wallin/synaptical.
@@ -0,0 +1,96 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'bundler/gem_tasks'
4
+ require 'rspec/core/rake_task'
5
+ require 'synaptical'
6
+
7
+ RSpec::Core::RakeTask.new(:spec)
8
+
9
+ task default: :spec
10
+
11
+ def create_network
12
+ input_layer = Synaptical::Layer.new(2)
13
+ hidden_layer = Synaptical::Layer.new(3)
14
+ output_layer = Synaptical::Layer.new(1)
15
+
16
+ input_layer.project(hidden_layer)
17
+ hidden_layer.project(output_layer)
18
+
19
+ Synaptical::Network.new(
20
+ input: input_layer, hidden: [hidden_layer], output: output_layer
21
+ )
22
+ end
23
+
24
+ def train(network)
25
+ learning_rate = 0.3
26
+
27
+ network.activate([0, 0])
28
+ network.propagate(learning_rate, [0])
29
+
30
+ network.activate([0, 1])
31
+ network.propagate(learning_rate, [1])
32
+
33
+ network.activate([1, 0])
34
+ network.propagate(learning_rate, [1])
35
+
36
+ network.activate([1, 1])
37
+ network.propagate(learning_rate, [0])
38
+ end
39
+
40
+ def activate(network)
41
+ network.activate([rand(2), rand(2)])
42
+ end
43
+
44
+ task :benchmark do
45
+ require 'benchmark/ips'
46
+ require 'benchmark/memory'
47
+
48
+ network = create_network
49
+
50
+ train = lambda do |x|
51
+ x.report('training') { train(network) }
52
+ x.compare!
53
+ end
54
+
55
+ activate = lambda do |x|
56
+ x.report('activate') { activate(network) }
57
+ x.compare!
58
+ end
59
+
60
+ Benchmark.ips(&train)
61
+ Benchmark.memory(&train)
62
+ Benchmark.ips(&activate)
63
+ Benchmark.memory(&activate)
64
+ end
65
+
66
+ namespace :profile do
67
+ require 'hotch'
68
+ require 'hotch/memory'
69
+
70
+ task :training do
71
+ network = create_network
72
+ Hotch() do
73
+ 10_000.times do
74
+ train(network)
75
+ end
76
+ end
77
+
78
+ Hotch.memory do
79
+ 10_000.times do
80
+ train(network)
81
+ end
82
+ end
83
+ end
84
+
85
+ task :activate do
86
+ network = create_network
87
+ 10_000.times { train(network) }
88
+ Hotch() do
89
+ 10_000.times { activate(network) }
90
+ end
91
+
92
+ Hotch.memory do
93
+ 10_000.times { activate(network) }
94
+ end
95
+ end
96
+ end
@@ -0,0 +1,15 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require 'bundler/setup'
5
+ require 'synaptical'
6
+
7
+ # You can add fixtures and/or initialization code here to make experimenting
8
+ # with your gem easier. You can also use a different console, if you like.
9
+
10
+ # (If you use this, don't forget to add pry to your Gemfile!)
11
+ # require "pry"
12
+ # Pry.start
13
+
14
+ require 'irb'
15
+ IRB.start(__FILE__)
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ %w[
4
+ synaptical/squash/logistic
5
+ synaptical/squash/tanh
6
+ synaptical/connection
7
+ synaptical/layer
8
+ synaptical/layer_connection
9
+ synaptical/network
10
+ synaptical/neuron
11
+ synaptical/trainer
12
+ synaptical/version
13
+ synaptical/architect/perceptron
14
+ synaptical/cost/mse
15
+ synaptical/serializer/json
16
+ ].each(&method(:require))
17
+
18
+ module Synaptical; end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ module Architect
5
+ class Perceptron < Network
6
+ # Creates a new perceptron network
7
+ # @param *layers [Integer] Each integer in the args represent a layer of that size
8
+ #
9
+ # @return [Synaptical::Network] The created network
10
+ def initialize(*layers)
11
+ raise ArgumentError, 'not enough layers (minimum 3)' if layers.size < 3
12
+
13
+ input = Synaptical::Layer.new(layers.shift)
14
+ output = Synaptical::Layer.new(layers.pop)
15
+ previous = input
16
+ hidden = layers.map do |size|
17
+ Synaptical::Layer.new(size).tap do |layer|
18
+ previous.project(layer)
19
+ previous = layer
20
+ end
21
+ end
22
+ previous.project(output)
23
+
24
+ super(
25
+ input: input,
26
+ hidden: hidden,
27
+ output: output
28
+ )
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ # Representation of a connection between two neurons
5
+ class Connection
6
+ attr_reader :id, :from, :to, :gain, :gater
7
+ attr_accessor :weight
8
+
9
+ # Creates a connection between two neurons
10
+ # @param from [Synaptical::Neuron] First neuron
11
+ # @param to [Synaptical::Neuron] Second neuron
12
+ # @param weight = nil [Float] connection weight
13
+ def initialize(from, to, weight = nil)
14
+ @id = self.class.uid
15
+ @from = from
16
+ @to = to
17
+ @weight = weight.nil? ? (rand * 0.2 - 0.1) : weight
18
+ @gain = 1.0
19
+ end
20
+
21
+ class << self
22
+ attr_reader :connections
23
+
24
+ def uid
25
+ @connections += 1
26
+ end
27
+ end
28
+
29
+ @connections = 0
30
+ end
31
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ module Cost
5
+ # Mean square error
6
+ module Mse
7
+ class << self
8
+ # Calculates mean square error for a series of results
9
+ # @param target [Array<Numeric>] Desired values
10
+ # @param output [Array<Numeric>] Output values from prediction
11
+ #
12
+ # @return [Float] Combined mean square error
13
+ def call(target, output)
14
+ mse = 0.0
15
+ target.zip(output).each { |t, o| mse += (t - o)**2 }
16
+ mse.fdiv(output.size)
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,143 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Synaptical
4
+ # Representation of a layer in a network
5
+ class Layer
6
+ CONNECTION_TYPE = {
7
+ ALL_TO_ALL: 'ALL TO ALL',
8
+ ONE_TO_ONE: 'ONE TO ONE',
9
+ ALL_TO_ELSE: 'ALL TO ELSE'
10
+ }.freeze
11
+
12
+ GATE_TYPE = {
13
+ INPUT: 'INPUT',
14
+ OUTPUT: 'OUTPUT',
15
+ ONE_TO_ONE: 'ONE TO ONE'
16
+ }.freeze
17
+
18
+ attr_reader :list, :connected_to, :size
19
+
20
+ # Creates a new layer with a given size
21
+ # @param size [Integer] Size of layer
22
+ def initialize(size)
23
+ @size = size
24
+ @connected_to = []
25
+ @list = Array.new(size).map { Synaptical::Neuron.new }
26
+ end
27
+
28
+ # Activates the neurons in the layer
29
+ # @param input [Array<Numeric>] Array of inputs with same size as the layer
30
+ #
31
+ # @raise [ArgumentError] if input size mismatch layer size
32
+ #
33
+ # @return [Array<Numeric>] Array of output values with same size as the layer
34
+ def activate(input = nil)
35
+ if input.nil?
36
+ list.map(&:activate)
37
+ else
38
+ raise ArgumentError unless input.size == size
39
+ list.zip(input).map { |neuron, value| neuron.activate(value) }
40
+ end
41
+ end
42
+
43
+ # Propagates the error on all the neurons of the layer
44
+ # @param rate [Float] Learning rate
45
+ # @param target = nil [Array<Numeric>] Target value
46
+ #
47
+ # @raise [ArgumentError] if target size mismatch layer size
48
+ def propagate(rate, target = nil)
49
+ if target.nil?
50
+ list.each { |neuron| neuron.propagate(rate) }
51
+ else
52
+ raise ArgumentError unless target.size == size
53
+ list.zip(target).each { |neuron, value| neuron.propagate(rate, value) }
54
+ end
55
+ end
56
+
57
+ # Projects a connection from this layer to another one
58
+ # @param layer [Synaptical::Layer, Synaptical::Network] Network/Layer to project to
59
+ # @param type = nil [type] [description]
60
+ # @param weight = nil [type] [description]
61
+ #
62
+ # @raise [ArgumentError] if layer is already connected
63
+ def project(layer, type = nil, weight = nil)
64
+ layer = layer.layers.input if layer.is_a?(Network)
65
+
66
+ raise ArgumentError if connected(layer)
67
+
68
+ LayerConnection.new(self, layer, type, weight)
69
+ end
70
+
71
+ # Gates a connection between two layers
72
+ # @param conntection [LayerConnection] Layer connection
73
+ # @param type [type] [description]
74
+ def gate(_connection, _type)
75
+ raise 'TODO'
76
+ end
77
+
78
+ # Returns wether the whole layer is self-connected or not
79
+ #
80
+ # @return [Boolean] true if self-connected, false otherwise
81
+ def selfconnected?
82
+ list.all?(&:selfconnected?)
83
+ end
84
+
85
+ # Returns whether the layer is connected to another layer
86
+ # @param layer [Synaptical::Layer] Other layer
87
+ #
88
+ # @return [Boolean] true if connected to the other layer, false otherwise
89
+ def connected(layer)
90
+ # Check if ALL to ALL connection
91
+ connections = 0
92
+ list.each do |from|
93
+ layer.list.each do |to|
94
+ connected = from.connected(to)
95
+ connections += 1 if connected&.type == :projected
96
+ end
97
+ end
98
+
99
+ return Layer::CONNECTION_TYPE[:ALL_TO_ALL] if connections == size * layer.size
100
+
101
+ # Check if ONE to ONE connection
102
+ connections = 0
103
+ list.each_with_index do |from, idx|
104
+ to = layer.list[idx]
105
+ connected = from.connected(to)
106
+ connections += 1 if connected&.type == :projected
107
+ end
108
+
109
+ return Layer::CONNECTION_TYPE[:ONE_TO_ONE] if connections == size
110
+
111
+ false
112
+ end
113
+
114
+ # Clears all the neurons in the layer
115
+ def clear
116
+ list.each(&:clear)
117
+ end
118
+
119
+ # Resets all the neurons in the layer
120
+ def reset
121
+ list.each(&:reset)
122
+ end
123
+
124
+ # Returns all the neurons in the layer
125
+ #
126
+ # @return [Array<Synaptical::Neuron>] List of neurons in the layer
127
+ alias neurons list
128
+
129
+ # Adds a neuron to the layer
130
+ # @param neuron [Synaptical::Neuron] The new neuron
131
+ def add(neuron = Neuron.new)
132
+ list << neuron
133
+ size += 1
134
+ end
135
+
136
+ # Configure layer options
137
+ #
138
+ # @return [Hash] Hash with options options
139
+ def set(_options)
140
+ raise 'TODO'
141
+ end
142
+ end
143
+ end