grydra 1.0.0 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,183 @@
1
+ module GRYDRA
2
+ module Utils
3
+ module Examples
4
+ # Suggest network structure
5
+ def self.suggest_structure(inputs, outputs = 1)
6
+ hidden = [(inputs + outputs) * 2, (inputs + outputs)].uniq
7
+ [[inputs, *hidden, outputs]]
8
+ end
9
+
10
+ # Test all normalization methods
11
+ def self.test_all_normalizations(inputs, outputs, structures)
12
+ %i[max zscore].each do |type|
13
+ puts "\n🧪 Testing normalization: #{type}"
14
+ network = Networks::EasyNetwork.new(false)
15
+ network.train_numerical(inputs, outputs, structures, 0.1, 5000, type)
16
+ puts " ✓ Training completed with #{type} normalization"
17
+ end
18
+ end
19
+
20
+ # Generate example code
21
+ def self.generate_example(num_example, filename = 'example', extension = 'rb', path = Dir.pwd)
22
+ content = get_example_content(num_example)
23
+
24
+ unless content
25
+ puts "\e[1;35m⚠️ Available examples are from 1 to 12\e[0m"
26
+ return
27
+ end
28
+
29
+ File.write(File.join(path, "#{filename}.#{extension}"), content)
30
+ puts "✅ Example generated and saved to \e[33m#{File.join(path, filename)}.#{extension}\e[0m"
31
+ end
32
+
33
+ private
34
+
35
+ def self.get_example_content(num)
36
+ examples = {
37
+ 1 => example_1_basic_training,
38
+ 2 => example_2_load_and_predict,
39
+ 3 => example_3_xor_problem,
40
+ 4 => example_4_temperature_conversion,
41
+ 5 => example_5_zscore_normalization,
42
+ 6 => example_6_weight_prediction,
43
+ 7 => example_7_advanced_weight,
44
+ 8 => example_8_load_model,
45
+ 9 => example_9_product_pricing,
46
+ 10 => example_10_cross_validation,
47
+ 11 => example_11_text_processing,
48
+ 12 => example_12_classification_metrics
49
+ }
50
+ examples[num]
51
+ end
52
+
53
+ def self.example_1_basic_training
54
+ <<~RUBY
55
+ require 'grydra'
56
+
57
+ # Training data
58
+ training_data = [
59
+ { name: "Company 1", num_employees: 5, is_new: false, site: true, label: 0 },
60
+ { name: "Company 2", num_employees: 4, is_new: true, site: false, label: 0 },
61
+ { name: "Company 3", num_employees: 4, is_new: false, site: false, label: 1 },
62
+ { name: "Company 4", num_employees: 20, is_new: false, site: false, label: 1 },
63
+ { name: "Company 5", num_employees: 60, is_new: false, site: false, label: 1 }
64
+ ]
65
+
66
+ # Create and train model
67
+ model = GRYDRA::Networks::EasyNetwork.new(true, true)
68
+ model.configure_adam_optimizer(0.001, 0.9, 0.999)
69
+
70
+ model.train_hashes(
71
+ training_data,
72
+ [:num_employees, :is_new, :site],
73
+ :label,
74
+ [[3, 4, 1]],
75
+ 0.05,
76
+ 12000,
77
+ :max,
78
+ lambda_l2: 0.001,
79
+ dropout: true,
80
+ dropout_rate: 0.3
81
+ )
82
+
83
+ # Save model
84
+ GRYDRA::Utils::Persistence.save_model(model, "company_model")
85
+ puts "✅ Training completed!"
86
+ RUBY
87
+ end
88
+
89
+ def self.example_2_load_and_predict
90
+ <<~RUBY
91
+ require 'grydra'
92
+
93
+ # Load model
94
+ model = GRYDRA::Utils::Persistence.load_model("company_model")
95
+
96
+ # New data
97
+ new_data = [
98
+ { name: "New Company A", num_employees: 12, is_new: true, site: true },
99
+ { name: "New Company B", num_employees: 50, is_new: false, site: false }
100
+ ]
101
+
102
+ # Predict
103
+ predictions = model.predict_hashes(new_data, [:num_employees, :is_new, :site])
104
+
105
+ new_data.each_with_index do |company, i|
106
+ prediction = predictions[i].first.round(3)
107
+ label = prediction >= 0.5 ? 'Label 1 (Yes)' : 'Label 0 (No)'
108
+ puts "Company: \#{company[:name]} → Prediction: \#{prediction} (\#{label})"
109
+ end
110
+ RUBY
111
+ end
112
+
113
+ def self.example_3_xor_problem
114
+ <<~RUBY
115
+ require 'grydra'
116
+
117
+ # XOR problem
118
+ network = GRYDRA::Networks::MainNetwork.new(true)
119
+ network.add_subnet([2, 3, 1], [:tanh, :tanh])
120
+ network.add_subnet([2, 4, 1], [:sigmoid, :sigmoid])
121
+
122
+ inputs = [[0, 0], [0, 1], [1, 0], [1, 1]]
123
+ outputs = [[0], [1], [1], [0]]
124
+
125
+ network.train_subnets(
126
+ [
127
+ {input: inputs, output: outputs},
128
+ {input: inputs, output: outputs}
129
+ ],
130
+ 0.9,
131
+ 6000,
132
+ batch_size: 1,
133
+ patience: 100,
134
+ decay: 0.995
135
+ )
136
+
137
+ puts "\\n📊 XOR Evaluation:"
138
+ inputs.each do |input|
139
+ output = network.combine_results(input)
140
+ puts "Input: \#{input} => Output: \#{output.map { |v| v.round(3) }}"
141
+ end
142
+ RUBY
143
+ end
144
+
145
+ # Add more examples as needed...
146
+ def self.example_4_temperature_conversion
147
+ "# Temperature conversion example - See documentation"
148
+ end
149
+
150
+ def self.example_5_zscore_normalization
151
+ "# Z-score normalization example - See documentation"
152
+ end
153
+
154
+ def self.example_6_weight_prediction
155
+ "# Weight prediction example - See documentation"
156
+ end
157
+
158
+ def self.example_7_advanced_weight
159
+ "# Advanced weight prediction - See documentation"
160
+ end
161
+
162
+ def self.example_8_load_model
163
+ "# Load and use saved model - See documentation"
164
+ end
165
+
166
+ def self.example_9_product_pricing
167
+ "# Product pricing example - See documentation"
168
+ end
169
+
170
+ def self.example_10_cross_validation
171
+ "# Cross-validation example - See documentation"
172
+ end
173
+
174
+ def self.example_11_text_processing
175
+ "# Text processing with TF-IDF - See documentation"
176
+ end
177
+
178
+ def self.example_12_classification_metrics
179
+ "# Classification metrics example - See documentation"
180
+ end
181
+ end
182
+ end
183
+ end
@@ -0,0 +1,94 @@
1
+ module GRYDRA
2
+ module Utils
3
+ module Persistence
4
+ # Save model to file
5
+ def self.save_model(model, name, path = Dir.pwd, vocabulary = nil)
6
+ file_path = File.join(path, "#{name}.net")
7
+ File.open(file_path, 'wb') { |f| Marshal.dump(model, f) }
8
+ puts "Model saved to '#{file_path}'"
9
+
10
+ save_vocabulary(vocabulary, name, path) if vocabulary
11
+ end
12
+
13
+ # Load model from file
14
+ def self.load_model(name, path = Dir.pwd)
15
+ model = nil
16
+ file_path = File.join(path, "#{name}.net")
17
+ File.open(file_path, 'rb') { |f| model = Marshal.load(f) }
18
+ model
19
+ end
20
+
21
+ # Save vocabulary to file
22
+ def self.save_vocabulary(vocabulary, name, path = Dir.pwd)
23
+ file_path = File.join(path, "#{name}_vocab.bin")
24
+ File.open(file_path, 'wb') { |f| Marshal.dump(vocabulary, f) }
25
+ puts "Vocabulary saved to '#{file_path}'"
26
+ end
27
+
28
+ # Load vocabulary from file
29
+ def self.load_vocabulary(name, path = Dir.pwd)
30
+ vocabulary = nil
31
+ file_path = File.join(path, "#{name}_vocab.bin")
32
+ File.open(file_path, 'rb') { |f| vocabulary = Marshal.load(f) }
33
+ vocabulary
34
+ end
35
+
36
+ # Validate model
37
+ def self.validate_model(model)
38
+ if model.nil?
39
+ puts "ERROR: model is nil"
40
+ false
41
+ elsif model.is_a?(Networks::EasyNetwork) || model.is_a?(Networks::MainNetwork)
42
+ puts "Valid model of type #{model.class}"
43
+ true
44
+ else
45
+ puts "WARNING: The loaded model is not a known instance (#{model.class})"
46
+ false
47
+ end
48
+ end
49
+
50
+ # Summary of model
51
+ def self.summary_model(model, input_test: nil)
52
+ puts "\nModel Summary:"
53
+ puts "=" * 60
54
+
55
+ model = model.network if model.respond_to?(:network) && model.network.respond_to?(:subnets)
56
+
57
+ if model.respond_to?(:subnets)
58
+ total_params = 0
59
+
60
+ model.subnets.each_with_index do |subnet, i|
61
+ puts "\nSubnet ##{i + 1}:"
62
+ structure = subnet.layers.map { |l| l.neurons.size }
63
+ hidden_activations = subnet.layers[0...-1].map(&:activation)
64
+ output_function = subnet.layers.last.activation
65
+
66
+ subnet_params = subnet.layers.sum do |layer|
67
+ layer.neurons.sum { |n| n.weights.size + 1 }
68
+ end
69
+ total_params += subnet_params
70
+
71
+ puts " Structure: #{structure.inspect}"
72
+ puts " Hidden activations: #{hidden_activations.inspect}"
73
+ puts " Output activation: #{output_function.inspect}"
74
+ puts " Parameters: #{subnet_params}"
75
+
76
+ if input_test
77
+ begin
78
+ output = subnet.calculate_outputs(input_test)
79
+ puts " Test output: #{output.map { |v| v.round(4) }.inspect}"
80
+ rescue StandardError => e
81
+ puts " Error calculating output: #{e.message}"
82
+ end
83
+ end
84
+ end
85
+
86
+ puts "\n" + "=" * 60
87
+ puts "Total parameters: #{total_params}"
88
+ else
89
+ validate_model(model)
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,105 @@
1
+ module GRYDRA
2
+ module Utils
3
+ module Visualization
4
+ # Plot error graph
5
+ def self.plot_error(errors, print_every: 5, bar_width: 40, delta_min: 0.001)
6
+ max_error = errors.max
7
+ first_error = errors.first
8
+
9
+ puts "\nTraining Error History"
10
+ puts '=' * (bar_width + 40)
11
+
12
+ last_printed = nil
13
+ errors.each_with_index do |error, i|
14
+ epoch = i + 1
15
+ next unless epoch == 1 || epoch == errors.size || epoch % print_every == 0
16
+
17
+ if last_printed && (last_printed - error).abs < delta_min && epoch != errors.size
18
+ next
19
+ end
20
+
21
+ bar_length = [(bar_width * error / max_error).round, 1].max
22
+ bar = '=' * bar_length
23
+ improvement_pct = ((first_error - error) / first_error.to_f) * 100
24
+ improvement_str = improvement_pct >= 0 ? "+#{improvement_pct.round(2)}%" : "#{improvement_pct.round(2)}%"
25
+
26
+ puts "Epoch #{epoch.to_s.ljust(4)} | #{bar.ljust(bar_width)} | Error: #{error.round(6)} | Improvement: #{improvement_str}"
27
+ last_printed = error
28
+ end
29
+
30
+ puts '=' * (bar_width + 40)
31
+ puts "Initial error: #{first_error.round(6)}, Final error: #{errors.last.round(6)}"
32
+ improvement = ((first_error - errors.last) / first_error * 100).round(2)
33
+ puts "Total improvement: #{improvement}%\n"
34
+ end
35
+
36
+ # Plot architecture in ASCII
37
+ def self.plot_architecture_ascii(model)
38
+ puts "\nNetwork Architecture:"
39
+ puts '=' * 60
40
+
41
+ if model.respond_to?(:subnets)
42
+ model.subnets.each_with_index do |subnet, idx|
43
+ puts "\n Subnet #{idx + 1}:"
44
+ plot_individual_network(subnet)
45
+ end
46
+ else
47
+ plot_individual_network(model)
48
+ end
49
+
50
+ puts '=' * 60
51
+ end
52
+
53
+ def self.plot_individual_network(network)
54
+ network.layers.each_with_index do |layer, i|
55
+ neurons = layer.neurons.size
56
+ activation = layer.activation || :linear
57
+ params = layer.neurons.sum { |n| n.weights.size + 1 }
58
+
59
+ symbols = if neurons <= 10
60
+ 'O' * neurons
61
+ else
62
+ 'O' * 8 + "... (#{neurons} neurons)"
63
+ end
64
+
65
+ puts " Layer #{i + 1}: #{symbols}"
66
+ puts " Neurons: #{neurons}, Activation: #{activation}, Parameters: #{params}"
67
+ puts " |" unless i == network.layers.size - 1
68
+ puts " v" unless i == network.layers.size - 1
69
+ end
70
+ end
71
+
72
+ # Analyze gradients
73
+ def self.analyze_gradients(model)
74
+ gradients = []
75
+
76
+ if model.respond_to?(:layers)
77
+ model.layers.each do |layer|
78
+ layer.neurons.each do |neuron|
79
+ gradients << neuron.delta.abs if neuron.delta
80
+ end
81
+ end
82
+ elsif model.respond_to?(:subnets)
83
+ model.subnets.each do |subnet|
84
+ subnet.layers.each do |layer|
85
+ layer.neurons.each do |neuron|
86
+ gradients << neuron.delta.abs if neuron.delta
87
+ end
88
+ end
89
+ end
90
+ end
91
+
92
+ return { message: 'No gradients to analyze' } if gradients.empty?
93
+
94
+ average = gradients.sum / gradients.size.to_f
95
+ {
96
+ average: average,
97
+ maximum: gradients.max,
98
+ minimum: gradients.min,
99
+ deviation: Math.sqrt(gradients.map { |g| (g - average)**2 }.sum / gradients.size.to_f),
100
+ total_parameters: gradients.size
101
+ }
102
+ end
103
+ end
104
+ end
105
+ end
@@ -0,0 +1,3 @@
1
+ module GRYDRA
2
+ VERSION = "2.0.1"
3
+ end
data/lib/grydra.rb CHANGED
@@ -1,2 +1,162 @@
1
- require "gr/version"
2
- require "gr/core"
1
+ # frozen_string_literal: true
2
+
3
+ require 'set'
4
+
5
+ # GRYDRA - Complete Neural Network Library for Ruby
6
+ module GRYDRA
7
+ # Core modules
8
+ require 'grydra/version'
9
+ require 'grydra/activations'
10
+ require 'grydra/initializers'
11
+ require 'grydra/regularization'
12
+ require 'grydra/normalization'
13
+ require 'grydra/metrics'
14
+ require 'grydra/optimizers'
15
+ require 'grydra/losses'
16
+ require 'grydra/callbacks'
17
+
18
+ # Layers
19
+ require 'grydra/layers/base'
20
+ require 'grydra/layers/dense'
21
+ require 'grydra/layers/conv'
22
+ require 'grydra/layers/lstm'
23
+
24
+ # Networks
25
+ require 'grydra/networks/neuron'
26
+ require 'grydra/networks/neural_network'
27
+ require 'grydra/networks/main_network'
28
+ require 'grydra/networks/easy_network'
29
+
30
+ # Preprocessing
31
+ require 'grydra/preprocessing/data'
32
+ require 'grydra/preprocessing/text'
33
+ require 'grydra/preprocessing/pca'
34
+
35
+ # Training
36
+ require 'grydra/training/cross_validation'
37
+ require 'grydra/training/hyperparameter_search'
38
+
39
+ # Utils
40
+ require 'grydra/utils/visualization'
41
+ require 'grydra/utils/persistence'
42
+ require 'grydra/utils/examples'
43
+ require 'grydra/documentation'
44
+
45
+ # Backward compatibility aliases
46
+ class << self
47
+ # Activation functions
48
+ def tanh(x) = Activations.tanh(x)
49
+ def derivative_tanh(x) = Activations.derivative_tanh(x)
50
+ def relu(x) = Activations.relu(x)
51
+ def derivative_relu(x) = Activations.derivative_relu(x)
52
+ def sigmoid(x) = Activations.sigmoid(x)
53
+ def derivative_sigmoid(x) = Activations.derivative_sigmoid(x)
54
+ def softmax(vector) = Activations.softmax(vector)
55
+ def leaky_relu(x, alpha = 0.01) = Activations.leaky_relu(x, alpha)
56
+ def derivative_leaky_relu(x, alpha = 0.01) = Activations.derivative_leaky_relu(x, alpha)
57
+ def swish(x) = Activations.swish(x)
58
+ def derivative_swish(x) = Activations.derivative_swish(x)
59
+ def gelu(x) = Activations.gelu(x)
60
+ def derivative_gelu(x) = Activations.derivative_gelu(x)
61
+
62
+ # Regularization
63
+ def apply_dropout(outputs, dropout_rate = 0.5, training = true)
64
+ Regularization.apply_dropout(outputs, dropout_rate, training)
65
+ end
66
+ def l1_regularization(weights, lambda_l1) = Regularization.l1_regularization(weights, lambda_l1)
67
+ def l2_regularization(weights, lambda_l2) = Regularization.l2_regularization(weights, lambda_l2)
68
+
69
+ # Initializers
70
+ def xavier_init(num_inputs) = Initializers.xavier_init(num_inputs)
71
+ def he_init(num_inputs) = Initializers.he_init(num_inputs)
72
+
73
+ # Normalization
74
+ def zscore_normalize(data) = Normalization.zscore_normalize(data)
75
+ def zscore_denormalize(normalized, means, std_devs) = Normalization.zscore_denormalize(normalized, means, std_devs)
76
+ def min_max_normalize(data, min_val = 0, max_val = 1) = Normalization.min_max_normalize(data, min_val, max_val)
77
+ def normalize_multiple(data, max_values, method = :max) = Normalization.normalize_multiple(data, max_values, method)
78
+ def calculate_max_values(data, method = :max) = Normalization.calculate_max_values(data, method)
79
+
80
+ # Metrics
81
+ def mse(predictions, actuals) = Metrics.mse(predictions, actuals)
82
+ def mae(predictions, actuals) = Metrics.mae(predictions, actuals)
83
+ def precision(tp, fp) = Metrics.precision(tp, fp)
84
+ def recall(tp, fn) = Metrics.recall(tp, fn)
85
+ def f1(precision, recall) = Metrics.f1(precision, recall)
86
+ def confusion_matrix(predictions, actuals, threshold = 0.5) = Metrics.confusion_matrix(predictions, actuals, threshold)
87
+ def auc_roc(predictions, actuals) = Metrics.auc_roc(predictions, actuals)
88
+ def accuracy(predictions, actuals, threshold = 0.5) = Metrics.accuracy(predictions, actuals, threshold)
89
+
90
+ # Preprocessing
91
+ def split_data(data_x, data_y, training_ratio = 0.8, seed = nil)
92
+ Preprocessing::Data.split_data(data_x, data_y, training_ratio, seed)
93
+ end
94
+ def generate_synthetic_data(n_samples, n_features, noise = 0.1, seed = nil)
95
+ Preprocessing::Data.generate_synthetic_data(n_samples, n_features, noise, seed)
96
+ end
97
+ def convert_hashes_to_vectors(array_hashes, keys)
98
+ Preprocessing::Data.convert_hashes_to_vectors(array_hashes, keys)
99
+ end
100
+ def pca(data, components = 2) = Preprocessing::PCA.pca(data, components)
101
+
102
+ # Text preprocessing
103
+ def create_vocabulary(texts) = Preprocessing::Text.create_vocabulary(texts)
104
+ def create_advanced_vocabulary(texts, min_frequency = 1, max_words = nil)
105
+ Preprocessing::Text.create_advanced_vocabulary(texts, min_frequency, max_words)
106
+ end
107
+ def vectorize_text(text, vocabulary) = Preprocessing::Text.vectorize_text(text, vocabulary)
108
+ def vectorize_text_tfidf(text, vocabulary, corpus_frequencies)
109
+ Preprocessing::Text.vectorize_text_tfidf(text, vocabulary, corpus_frequencies)
110
+ end
111
+ def normalize_with_vocabulary(data, vocabulary) = Preprocessing::Text.normalize_with_vocabulary(data, vocabulary)
112
+
113
+ # Training
114
+ def cross_validation(data_input, data_output, k_folds = 5, &block)
115
+ Training::CrossValidation.cross_validation(data_input, data_output, k_folds, &block)
116
+ end
117
+ def hyperparameter_search(data_x, data_y, param_grid, &block)
118
+ Training::HyperparameterSearch.hyperparameter_search(data_x, data_y, param_grid, &block)
119
+ end
120
+
121
+ # Visualization
122
+ def plot_error(errors, print_every = 5, bar_width = 40, delta_min = 0.001)
123
+ Utils::Visualization.plot_error(errors, print_every, bar_width, delta_min)
124
+ end
125
+ def plot_architecture_ascii(model) = Utils::Visualization.plot_architecture_ascii(model)
126
+ def analyze_gradients(model) = Utils::Visualization.analyze_gradients(model)
127
+
128
+ # Persistence
129
+ def save_model(model, name, path = Dir.pwd, vocabulary = nil)
130
+ Utils::Persistence.save_model(model, name, path, vocabulary)
131
+ end
132
+ def load_model(name, path = Dir.pwd) = Utils::Persistence.load_model(name, path)
133
+ def save_vocabulary(vocabulary, name, path = Dir.pwd)
134
+ Utils::Persistence.save_vocabulary(vocabulary, name, path)
135
+ end
136
+ def load_vocabulary(name, path = Dir.pwd) = Utils::Persistence.load_vocabulary(name, path)
137
+ def validate_model(model) = Utils::Persistence.validate_model(model)
138
+ def summary_model(model, input_test = nil) = Utils::Persistence.summary_model(model, input_test)
139
+
140
+ # Examples and documentation
141
+ def suggest_structure(inputs, outputs = 1) = Utils::Examples.suggest_structure(inputs, outputs)
142
+ def test_all_normalizations(inputs, outputs, structures)
143
+ Utils::Examples.test_all_normalizations(inputs, outputs, structures)
144
+ end
145
+ def generate_example(num_example, filename = 'example', extension = 'rb', path = Dir.pwd)
146
+ Utils::Examples.generate_example(num_example, filename, extension, path)
147
+ end
148
+ def describe_method(class_name, method_name) = Documentation.describe_method(class_name, method_name)
149
+ def list_methods_available = Documentation.list_methods_available
150
+ end
151
+
152
+ # Legacy class aliases for backward compatibility
153
+ AdamOptimizer = Optimizers::Adam
154
+ Neuron = Networks::Neuron
155
+ Layer = Layers::Base
156
+ DenseLayer = Layers::Dense
157
+ ConvLayer = Layers::Conv
158
+ LSTMLayer = Layers::LSTM
159
+ NeuralNetwork = Networks::NeuralNetwork
160
+ MainNetwork = Networks::MainNetwork
161
+ EasyNetwork = Networks::EasyNetwork
162
+ end