daimond 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,241 @@
1
+ require 'numo/narray'
2
+ require_relative 'rust_backend' rescue nil
3
+
4
+ module Daimond
5
+ class Tensor
6
+ attr_accessor :data, :grad, :prev, :op, :_backward, :label
7
+
8
+ def initialize(data, prev: [], op: nil, label: nil)
9
+ @data = data.is_a?(Numo::DFloat) ? data : Numo::DFloat[*data]
10
+ @grad = Numo::DFloat.zeros(*@data.shape)
11
+ @prev = prev
12
+ @op = op
13
+ @label = label
14
+ @_backward = lambda {} # По умолчанию пустая функция
15
+ end
16
+
17
+ def shape
18
+ @data.shape
19
+ end
20
+
21
+ def +(other)
22
+ other = other.is_a?(Tensor) ? other : Tensor.new(other)
23
+ left = self
24
+ right = other
25
+ out = Tensor.new(@data + other.data, prev: [self, other], op: '+')
26
+
27
+ out._backward = lambda do
28
+ grad = out.grad
29
+
30
+ # Для left (может быть broadcasted, но здесь обычно нет)
31
+ if grad.shape.length > left.shape.length
32
+ left.grad += grad.sum(axis: 0)
33
+ else
34
+ left.grad += grad
35
+ end
36
+
37
+ # Для right (bias) — суммируем по batch
38
+ if grad.shape.length > right.shape.length
39
+ right.grad += grad.sum(axis: 0)
40
+ else
41
+ right.grad += grad
42
+ end
43
+ end
44
+
45
+ out
46
+ end
47
+
48
+ def -(other)
49
+ other = other.is_a?(Tensor) ? other : Tensor.new(other)
50
+ left = self
51
+ right = other
52
+ out = Tensor.new(@data - other.data, prev: [self, other], op: '-')
53
+
54
+ out._backward = lambda do
55
+ grad = out.grad
56
+ left.grad += grad
57
+
58
+ if grad.shape.length > right.shape.length
59
+ right.grad -= grad.sum(axis: 0)
60
+ else
61
+ right.grad -= grad
62
+ end
63
+ end
64
+
65
+ out
66
+ end
67
+
68
+ def *(other) # Поэлементное
69
+ other = other.is_a?(Tensor) ? other : Tensor.new(other)
70
+ left = self
71
+ right = other
72
+ out = Tensor.new(@data * other.data, prev: [self, other], op: '*')
73
+
74
+ out._backward = lambda do
75
+ grad = out.grad
76
+ left.grad += right.data * grad
77
+ right.grad += left.data * grad
78
+ end
79
+
80
+ out
81
+ end
82
+
83
+ def dot(other)
84
+ other = other.is_a?(Tensor) ? other : Tensor.new(other)
85
+
86
+ inner_dim = @data.shape[1]
87
+ out_dim = other.data.shape[1]
88
+
89
+ # Глобальный счетчик для отладки
90
+ $rust_count ||= 0
91
+ $ruby_count ||= 0
92
+
93
+ rust_available = defined?(Daimond::Rust)
94
+ condition = (inner_dim > 100 || out_dim > 50)
95
+
96
+ if rust_available && condition
97
+ begin
98
+ rust_a = Daimond::Rust::Tensor.from_array(@data.to_a)
99
+ rust_b = Daimond::Rust::Tensor.from_array(other.data.to_a)
100
+ rust_result = rust_a.matmul(rust_b)
101
+
102
+ out = Tensor.new(Numo::DFloat[*rust_result.to_a], prev: [self, other], op: 'dot')
103
+ out._backward = lambda do
104
+ grad = out.grad
105
+ self.grad += grad.dot(other.data.transpose)
106
+ other.grad += self.data.transpose.dot(grad)
107
+ end
108
+
109
+ $rust_count += 1
110
+ out
111
+ rescue => e
112
+ $ruby_count += 1
113
+
114
+ # Fallback
115
+ out = Tensor.new(@data.dot(other.data), prev: [self, other], op: 'dot')
116
+ out._backward = lambda do
117
+ grad = out.grad
118
+ self.grad += grad.dot(other.data.transpose)
119
+ other.grad += self.data.transpose.dot(grad)
120
+ end
121
+ out
122
+ end
123
+ else
124
+ $ruby_count += 1
125
+ # Ruby version
126
+ out = Tensor.new(@data.dot(other.data), prev: [self, other], op: 'dot')
127
+ out._backward = lambda do
128
+ grad = out.grad
129
+ self.grad += grad.dot(other.data.transpose)
130
+ other.grad += self.data.transpose.dot(grad)
131
+ end
132
+ out
133
+ end
134
+ end
135
+
136
+ def relu
137
+ out_data = @data.map { |x| x > 0 ? x : 0.0 }
138
+ out = Tensor.new(out_data, prev: [self], op: 'relu')
139
+ input = self
140
+
141
+ out._backward = lambda do
142
+ grad = out.grad
143
+ mask = input.data.map { |x| x > 0 ? 1.0 : 0.0 }
144
+ input.grad += mask * grad
145
+ end
146
+
147
+ out
148
+ end
149
+
150
+ def sigmoid
151
+ input = self
152
+ s = @data.map { |x| 1.0 / (1.0 + Math.exp(-x)) }
153
+ out = Tensor.new(s, prev: [self], op: 'sigmoid')
154
+
155
+ out._backward = lambda do
156
+ grad = out.grad
157
+ input.grad += (out.data * (1.0 - out.data)) * grad
158
+ end
159
+
160
+ out
161
+ end
162
+
163
+ def sum
164
+ input = self
165
+ out = Tensor.new(Numo::DFloat[@data.sum], prev: [self], op: 'sum')
166
+
167
+ out._backward = lambda do
168
+ input.grad += Numo::DFloat.ones(*input.shape) * out.grad[0]
169
+ end
170
+
171
+ out
172
+ end
173
+
174
+ def mean
175
+ input = self
176
+ out = Tensor.new(Numo::DFloat[@data.mean], prev: [self], op: 'mean')
177
+ n = @data.size
178
+
179
+ out._backward = lambda do
180
+ input.grad += Numo::DFloat.ones(*input.shape) * (out.grad[0] / n)
181
+ end
182
+
183
+ out
184
+ end
185
+
186
+ def backward!
187
+ # Топологическая сортировка
188
+ topo = []
189
+ visited = []
190
+
191
+ build_topo = lambda do |v|
192
+ return if visited.include?(v)
193
+ visited << v
194
+ v.prev.each { |child| build_topo.call(child) }
195
+ topo << v
196
+ end
197
+
198
+ build_topo.call(self)
199
+
200
+ self.grad = Numo::DFloat[1.0] # seed gradient
201
+
202
+ # Идём в обратном порядке (от loss к входам)
203
+ topo.reverse.each do |node|
204
+ node._backward.call
205
+ end
206
+ end
207
+
208
+ def to_s
209
+ "Tensor(shape=#{shape}, mean=#{@data.mean.round(4)})"
210
+ end
211
+
212
+ alias inspect to_s
213
+
214
+ def self.randn(*shape)
215
+ data = Numo::DFloat.new(*shape).rand_norm
216
+ Tensor.new(data)
217
+ end
218
+
219
+ def self.zeros(*shape)
220
+ Tensor.new(Numo::DFloat.zeros(*shape))
221
+ end
222
+
223
+ def softmax
224
+ input = self
225
+ # Численная стабильность: вычитаем max по каждой строке
226
+ max_val = @data.max(axis: 1).reshape(@data.shape[0], 1)
227
+ exp_data = Numo::NMath.exp(@data - max_val)
228
+ sum_exp = exp_data.sum(axis: 1).reshape(@data.shape[0], 1)
229
+ out_data = exp_data / sum_exp
230
+
231
+ out = Tensor.new(out_data, prev: [self], op: 'softmax')
232
+
233
+ # Backward упрощенный (для связки с CrossEntropy)
234
+ out._backward = lambda do
235
+ input.grad += out.grad
236
+ end
237
+
238
+ out
239
+ end
240
+ end
241
+ end
@@ -0,0 +1,111 @@
1
+ require 'csv'
2
+
3
+ module Daimond
4
+ module Utils
5
+ class TrainingLogger
6
+ def initialize(filename = 'training_log.csv')
7
+ @filename = filename
8
+ @history = []
9
+ @start_time = Time.now
10
+
11
+ CSV.open(@filename, 'w') do |csv|
12
+ csv << ['epoch', 'loss', 'accuracy', 'time_elapsed']
13
+ end
14
+ end
15
+
16
+ def log(epoch, loss, accuracy)
17
+ elapsed = Time.now - @start_time
18
+ @history << {epoch: epoch, loss: loss, accuracy: accuracy, time: elapsed}
19
+
20
+ CSV.open(@filename, 'a') do |csv|
21
+ csv << [epoch, loss, accuracy, elapsed.round(2)]
22
+ end
23
+ end
24
+
25
+ def plot_loss(width: 60, height: 10)
26
+ return if @history.empty?
27
+
28
+ losses = @history.map { |h| h[:loss] }
29
+ min_loss = losses.min
30
+ max_loss = losses.max
31
+ range = max_loss - min_loss
32
+ range = 1.0 if range == 0
33
+
34
+ puts "\n📉 Loss Curve:"
35
+ puts "-" * (width + 10)
36
+
37
+ height.times do |row|
38
+ y_val = max_loss - (row * range / height)
39
+ line = sprintf("%6.3f |", y_val)
40
+ line = line.ljust(width + 9) # <-- Важно!
41
+
42
+ @history.each_with_index do |h, i|
43
+ x_pos = (i * (width - 1) / [@history.size - 1, 1].max).to_i
44
+ idx = 8 + x_pos
45
+
46
+ if idx < line.length && (h[:loss] - y_val).abs < (range / height / 2)
47
+ line[idx] = "●"
48
+ end
49
+ end
50
+
51
+ puts line
52
+ end
53
+
54
+ puts " +" + "-" * width
55
+ puts " Epoch: 1" + " " * (width - 10) + "#{@history.size}"
56
+ end
57
+
58
+ def plot_accuracy(width: 60, height: 10)
59
+ return if @history.empty?
60
+
61
+ accs = @history.map { |h| h[:accuracy] }
62
+ min_acc = [accs.min, 0].min
63
+ max_acc = [accs.max, 100].max
64
+ range = max_acc - min_acc
65
+ range = 100 if range == 0
66
+
67
+ puts "\n📈 Accuracy Curve:"
68
+ puts "-" * (width + 10)
69
+
70
+ height.times do |row|
71
+ y_val = min_acc + ((height - row) * range / height)
72
+ line = sprintf("%6.1f%%|", y_val)
73
+ line = line.ljust(width + 9) # <-- Важно!
74
+
75
+ @history.each_with_index do |h, i|
76
+ x_pos = (i * (width - 1) / [@history.size - 1, 1].max).to_i
77
+ idx = 8 + x_pos
78
+
79
+ if idx < line.length && (h[:accuracy] - y_val).abs < (range / height / 2)
80
+ line[idx] = "★"
81
+ end
82
+ end
83
+
84
+ puts line
85
+ end
86
+
87
+ puts " +" + "-" * width
88
+ puts " Epoch: 1" + " " * (width - 10) + "#{@history.size}"
89
+ end
90
+
91
+ def summary
92
+ return if @history.empty?
93
+
94
+ first = @history.first
95
+ last = @history.last
96
+
97
+ puts "\n📊 Training Summary:"
98
+ puts "=" * 50
99
+ puts "Duration: #{(last[:time] / 60).round(1)} minutes"
100
+ puts "Epochs: #{last[:epoch]}"
101
+ puts "Initial Loss: #{first[:loss].round(4)}"
102
+ puts "Final Loss: #{last[:loss].round(4)}"
103
+ puts "Initial Acc: #{first[:accuracy].round(2)}%"
104
+ puts "Final Acc: #{last[:accuracy].round(2)}%"
105
+ puts "Improvement: +#{(last[:accuracy] - first[:accuracy]).round(2)}%"
106
+ puts "=" * 50
107
+ puts "Log saved to: #{@filename}"
108
+ end
109
+ end
110
+ end
111
+ end
@@ -0,0 +1,3 @@
1
+ module Daimond
2
+ VERSION = "0.1.0"
3
+ end
data/lib/daimond.rb ADDED
@@ -0,0 +1,40 @@
1
+ require 'numo/narray'
2
+
3
+ require_relative 'daimond/tensor'
4
+ require_relative 'daimond/nn/module'
5
+ require_relative 'daimond/nn/linear'
6
+ require_relative 'daimond/nn/functional'
7
+ require_relative 'daimond/optim/sgd'
8
+ require_relative 'daimond/loss/mse'
9
+ require_relative 'daimond/loss/cross_entropy'
10
+ require_relative 'daimond/data/mnist'
11
+ require_relative 'daimond/data/data_loader'
12
+ require_relative 'daimond/nn/conv2d'
13
+ require_relative 'daimond/nn/max_pool2d'
14
+ require_relative 'daimond/nn/flatten'
15
+ require_relative 'daimond/optim/adam'
16
+ require_relative 'daimond/nn/conv2d_rust'
17
+ require_relative 'daimond/nn/max_pool2d_rust'
18
+ require_relative 'daimond/version'
19
+ begin
20
+ require_relative 'daimond/rust_bridge'
21
+ rescue LoadError
22
+ # Rust backend не обязателен
23
+ end
24
+
25
+ module Daimond
26
+
27
+ def self.randn(*args)
28
+ Tensor.randn(*args)
29
+ end
30
+
31
+ def self.zeros(*args)
32
+ Tensor.zeros(*args)
33
+ end
34
+ end
35
+
36
+ begin
37
+ require_relative 'daimond/rust_backend'
38
+ rescue LoadError
39
+ # Rust backend optional
40
+ end
metadata ADDED
@@ -0,0 +1,134 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: daimond
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Hudzone
8
+ autorequire:
9
+ bindir: exe
10
+ cert_chain: []
11
+ date: 2026-01-31 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: numo-narray
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '0.9'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '0.9'
27
+ - !ruby/object:Gem::Dependency
28
+ name: bundler
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '2.0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '2.0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '13.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '13.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rake-compiler
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '1.0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '1.0'
69
+ description: dAImond is a PyTorch-inspired deep learning framework for Ruby featuring
70
+ automatic differentiation, neural networks, and a high-performance Rust backend
71
+ for tensor operations. Achieves 89%+ accuracy on MNIST.
72
+ email:
73
+ - your-email@example.com
74
+ executables: []
75
+ extensions: []
76
+ extra_rdoc_files: []
77
+ files:
78
+ - CONTRIBUTIONG.md
79
+ - README.ja.md
80
+ - README.md
81
+ - README.ru.md
82
+ - ext/daimond_rust/Cargo.lock
83
+ - ext/daimond_rust/Cargo.toml
84
+ - ext/daimond_rust/build.rs
85
+ - ext/daimond_rust/src/lib.rs
86
+ - lib/daimond.rb
87
+ - lib/daimond/autograd.rb
88
+ - lib/daimond/data/data_loader.rb
89
+ - lib/daimond/data/mnist.rb
90
+ - lib/daimond/loss/cross_entropy.rb
91
+ - lib/daimond/loss/mse.rb
92
+ - lib/daimond/nn/conv2d.rb
93
+ - lib/daimond/nn/conv2d_rust.rb
94
+ - lib/daimond/nn/flatten.rb
95
+ - lib/daimond/nn/functional.rb
96
+ - lib/daimond/nn/linear.rb
97
+ - lib/daimond/nn/max_pool2d.rb
98
+ - lib/daimond/nn/max_pool2d_rust.rb
99
+ - lib/daimond/nn/module.rb
100
+ - lib/daimond/optim/adam.rb
101
+ - lib/daimond/optim/sgd.rb
102
+ - lib/daimond/rust/daimond_rust.bundle
103
+ - lib/daimond/rust_backend.rb
104
+ - lib/daimond/rust_bridge.rb
105
+ - lib/daimond/tensor.rb
106
+ - lib/daimond/utils/training_logger.rb
107
+ - lib/daimond/version.rb
108
+ homepage: https://github.com/Hudzone/daimond-ml
109
+ licenses:
110
+ - MIT
111
+ metadata:
112
+ homepage_uri: https://github.com/Hudzone/daimond-ml
113
+ source_code_uri: https://github.com/Hudzone/daimond-ml
114
+ bug_tracker_uri: https://github.com/Hudzone/daimond-ml/issues
115
+ post_install_message:
116
+ rdoc_options: []
117
+ require_paths:
118
+ - lib
119
+ required_ruby_version: !ruby/object:Gem::Requirement
120
+ requirements:
121
+ - - ">="
122
+ - !ruby/object:Gem::Version
123
+ version: 2.7.0
124
+ required_rubygems_version: !ruby/object:Gem::Requirement
125
+ requirements:
126
+ - - ">="
127
+ - !ruby/object:Gem::Version
128
+ version: '0'
129
+ requirements: []
130
+ rubygems_version: 3.2.3
131
+ signing_key:
132
+ specification_version: 4
133
+ summary: Deep Learning framework for Ruby with Rust backend
134
+ test_files: []