differentiation 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ac6ecb62757c2673747b141af828086090e2cc42addb0ae5445503908f108f47
4
- data.tar.gz: 39b075aca09020073e28b34aced45567b41a92bd9ddd755e2de60515ecd4299e
3
+ metadata.gz: 56dda61629d3925285ac54fafbf2ba9dea87171ad88062fced1c655bc40bb174
4
+ data.tar.gz: 6c67cbe07765683e1b2cda12d3e0217497035ef2a2fda50a3e62e981c37304ed
5
5
  SHA512:
6
- metadata.gz: 6971cae699fbcdb58f86d36cb1e92908dec436d270d4f99d04c1480826674dbc016955510bf3172b96a924691c2fb9cdee2f9c895939742f239465ce24100fb3
7
- data.tar.gz: e7d079380efccc25f93be4331f9e5ee18df5bb78b3f70cfc678ec43720439c77f76639bdff178627f8c99a0557c17a2e80a5a6078af8e6616c769c757c6a3db7
6
+ metadata.gz: e1fad5d9d8f66808baf9f5477fd24706c73cbf6a61d978775ec1d5e1f2bad514b85b4340536780ade19aeff940e92cc454903b9b0bfe0a35635dacb3aff8bc1c
7
+ data.tar.gz: 837a1274dbabb47e2ffcc203f05a673a815b716235f578ef21171e76fc8029427783991b4d3e619bfb98eb18a81576e30f559152d0da68cf7864c29ed3cb0b98
@@ -0,0 +1,57 @@
1
+ require "matrix"
2
+ require "differentiation"
3
+
4
+ Dataset = {
5
+ inputs: [
6
+ Matrix[[0.0], [0.0]],
7
+ Matrix[[0.0], [1.0]],
8
+ Matrix[[1.0], [0.0]],
9
+ Matrix[[1.0], [1.0]],
10
+ ],
11
+ targets: [
12
+ 0.0,
13
+ 1.0,
14
+ 1.0,
15
+ 0.0
16
+ ]
17
+ }
18
+
19
+ def sigmoid(x, a=1.0)
20
+ 1.0 / (1.0 + Math.exp(-a * x))
21
+ end
22
+
23
+ def mlp(input, w1, b1, w2, b2)
24
+ x = w1 * input + b1
25
+ x = x.map{|i| sigmoid(i) }
26
+ x = w2 * x + b2
27
+ sigmoid(x[0, 0])
28
+ end
29
+
30
+ differential def loss(w1, b1, w2, b2)
31
+ l = 0.0
32
+ Dataset[:inputs].zip(Dataset[:targets]) do |x, y|
33
+ l += (y - mlp(x, w1, b1, w2, b2)) ** 2
34
+ end
35
+ l
36
+ end
37
+
38
+ weights1 = Matrix.build(2, 2){ (rand() - 0.5) * 0.5 }
39
+ bias1 = Matrix.build(2, 1) { 0.0 }
40
+ weights2 = Matrix.build(1, 2){ (rand() - 0.5) * 0.5 }
41
+ bias2 = Matrix.build(1, 1) { 0.0 }
42
+
43
+ rate = 2.0
44
+
45
+ 10001.times do |step|
46
+ _loss = loss(weights1, bias1, weights2, bias2)
47
+ puts "%5d\t%.05f" % [step, _loss] if step % 1000 == 0
48
+ gw1, gb1, gw2, gb2 = _loss.gradients(:w1, :b1, :w2, :b2)
49
+ weights1 -= gw1.map{|v| v * rate }
50
+ bias1 -= gb1.map{|v| v * rate }
51
+ weights2 -= gw2.map{|v| v * rate }
52
+ bias2 -= gb2.map{|v| v * rate }
53
+ end
54
+
55
+ Dataset[:inputs].each do |x|
56
+ puts "#{[x[0,0], x[1, 0]]} => #{mlp(x, weights1, bias1, weights2, bias2)}"
57
+ end
@@ -92,6 +92,14 @@ module Differentiation
92
92
  end
93
93
  end
94
94
 
95
+ def +@
96
+ self
97
+ end
98
+
99
+ def -@
100
+ self * -1
101
+ end
102
+
95
103
  def +(other)
96
104
  if other.is_a?(DualNumber)
97
105
  n = @n + other.n
@@ -134,7 +142,7 @@ module Differentiation
134
142
  def /(other)
135
143
  if other.is_a?(DualNumber)
136
144
  n = @n / other.n
137
- diff = ->(var) { (self.derivative(var) / other) + (@n * other.derivative(var)) / (other.n ** 2) }
145
+ diff = ->(var) { (self.derivative(var) / other.n) - (@n * other.derivative(var)) / (other.n ** 2) }
138
146
  named_variables = @named_variables.merge(other.named_variables)
139
147
  else
140
148
  n = @n / other
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module DualMath
4
+ def sin(x)
5
+ if x.is_a?(Differentiation::DualNumber)
6
+ Differentiation::DualNumber.new(super(x.n), ->(var){ cos(x.n) * x.derivative(var) }, named_variables: x.named_variables)
7
+ else
8
+ super(x)
9
+ end
10
+ end
11
+
12
+ def cos(x)
13
+ if x.is_a?(Differentiation::DualNumber)
14
+ Differentiation::DualNumber.new(super(x.n), ->(var){ -1.0 * sin(x.n) * x.derivative(var) }, named_variables: x.named_variables)
15
+ else
16
+ super(x)
17
+ end
18
+ end
19
+
20
+ def tan(x)
21
+ if x.is_a?(Differentiation::DualNumber)
22
+ Differentiation::DualNumber.new(super(x.n), ->(var){ ((1.0 / cos(x.n)) ** 2) * x.derivative(var) }, named_variables: x.named_variables)
23
+ else
24
+ super(x)
25
+ end
26
+ end
27
+
28
+ def exp(x)
29
+ if x.is_a?(Differentiation::DualNumber)
30
+ Differentiation::DualNumber.new(super(x.n), ->(var){ exp(x.n) * x.derivative(var) }, named_variables: x.named_variables)
31
+ else
32
+ super(x)
33
+ end
34
+ end
35
+ end
36
+
37
+ module Math
38
+ prepend DualMath
39
+ end
40
+ Math.singleton_class.prepend(DualMath)
@@ -1,3 +1,3 @@
1
1
  module Differentiation
2
- VERSION = "0.2.0"
2
+ VERSION = "0.2.1"
3
3
  end
@@ -73,4 +73,4 @@ end
73
73
  require "differentiation/ext/kernel"
74
74
  require "differentiation/ext/integer"
75
75
  require "differentiation/ext/float"
76
-
76
+ require "differentiation/ext/math"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: differentiation
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - nagachika
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-04-12 00:00:00.000000000 Z
11
+ date: 2019-04-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -68,11 +68,13 @@ files:
68
68
  - bin/console
69
69
  - bin/setup
70
70
  - differentiation.gemspec
71
+ - examples/xor_perceptron.rb
71
72
  - lib/differentiation.rb
72
73
  - lib/differentiation/dual_number.rb
73
74
  - lib/differentiation/ext/float.rb
74
75
  - lib/differentiation/ext/integer.rb
75
76
  - lib/differentiation/ext/kernel.rb
77
+ - lib/differentiation/ext/math.rb
76
78
  - lib/differentiation/version.rb
77
79
  homepage: https://github.com/nagachika/differentiation
78
80
  licenses: []
@@ -93,7 +95,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
93
95
  version: '0'
94
96
  requirements: []
95
97
  rubyforge_project:
96
- rubygems_version: 2.7.6
98
+ rubygems_version: 2.7.6.2
97
99
  signing_key:
98
100
  specification_version: 4
99
101
  summary: Make Ruby Differentiable.