desiru 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +1 -0
- data/.rubocop.yml +55 -0
- data/CLAUDE.md +22 -0
- data/Gemfile +36 -0
- data/Gemfile.lock +255 -0
- data/LICENSE +21 -0
- data/README.md +343 -0
- data/Rakefile +18 -0
- data/desiru.gemspec +44 -0
- data/examples/README.md +55 -0
- data/examples/async_processing.rb +135 -0
- data/examples/few_shot_learning.rb +66 -0
- data/examples/graphql_api.rb +190 -0
- data/examples/graphql_integration.rb +114 -0
- data/examples/rag_retrieval.rb +80 -0
- data/examples/simple_qa.rb +31 -0
- data/examples/typed_signatures.rb +45 -0
- data/lib/desiru/async_capable.rb +170 -0
- data/lib/desiru/cache.rb +116 -0
- data/lib/desiru/configuration.rb +40 -0
- data/lib/desiru/field.rb +171 -0
- data/lib/desiru/graphql/data_loader.rb +210 -0
- data/lib/desiru/graphql/executor.rb +115 -0
- data/lib/desiru/graphql/schema_generator.rb +301 -0
- data/lib/desiru/jobs/async_predict.rb +52 -0
- data/lib/desiru/jobs/base.rb +53 -0
- data/lib/desiru/jobs/batch_processor.rb +71 -0
- data/lib/desiru/jobs/optimizer_job.rb +45 -0
- data/lib/desiru/models/base.rb +112 -0
- data/lib/desiru/models/raix_adapter.rb +210 -0
- data/lib/desiru/module.rb +204 -0
- data/lib/desiru/modules/chain_of_thought.rb +106 -0
- data/lib/desiru/modules/predict.rb +142 -0
- data/lib/desiru/modules/retrieve.rb +199 -0
- data/lib/desiru/optimizers/base.rb +130 -0
- data/lib/desiru/optimizers/bootstrap_few_shot.rb +212 -0
- data/lib/desiru/program.rb +106 -0
- data/lib/desiru/registry.rb +74 -0
- data/lib/desiru/signature.rb +322 -0
- data/lib/desiru/version.rb +5 -0
- data/lib/desiru.rb +67 -0
- metadata +184 -0
@@ -0,0 +1,190 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'bundler/setup'
|
5
|
+
require 'desiru'
|
6
|
+
require 'desiru/graphql/schema_generator'
|
7
|
+
require 'desiru/graphql/executor'
|
8
|
+
|
9
|
+
# Configure Desiru
|
10
|
+
Desiru.configure do |config|
|
11
|
+
# Use a mock model for demonstration
|
12
|
+
config.default_model = Desiru::Models::RaixAdapter.new
|
13
|
+
end
|
14
|
+
|
15
|
+
# Create some example modules
|
16
|
+
class AnswerQuestion < Desiru::Module
|
17
|
+
def initialize
|
18
|
+
signature = Desiru::Signature.new(
|
19
|
+
"question: string -> answer: string, confidence: float",
|
20
|
+
descriptions: {
|
21
|
+
question: "The question to answer",
|
22
|
+
answer: "The answer to the question",
|
23
|
+
confidence: "Confidence score between 0 and 1"
|
24
|
+
}
|
25
|
+
)
|
26
|
+
super(signature)
|
27
|
+
end
|
28
|
+
|
29
|
+
def forward(question:)
|
30
|
+
# Simulate processing
|
31
|
+
{
|
32
|
+
answer: "The answer to '#{question}' is 42.",
|
33
|
+
confidence: 0.95
|
34
|
+
}
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class SummarizeText < Desiru::Module
|
39
|
+
def initialize
|
40
|
+
signature = Desiru::Signature.new(
|
41
|
+
"text: string, max_words: int -> summary: string, word_count: int",
|
42
|
+
descriptions: {
|
43
|
+
text: "The text to summarize",
|
44
|
+
max_words: "Maximum words in summary",
|
45
|
+
summary: "The summarized text",
|
46
|
+
word_count: "Actual word count of summary"
|
47
|
+
}
|
48
|
+
)
|
49
|
+
super(signature)
|
50
|
+
end
|
51
|
+
|
52
|
+
def forward(text:, max_words:)
|
53
|
+
words = text.split.take(max_words)
|
54
|
+
summary = words.join(' ') + (words.length < text.split.length ? '...' : '')
|
55
|
+
|
56
|
+
{
|
57
|
+
summary: summary,
|
58
|
+
word_count: words.length
|
59
|
+
}
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
class ClassifySentiment < Desiru::Module
|
64
|
+
def initialize
|
65
|
+
signature = Desiru::Signature.new(
|
66
|
+
"text: string -> sentiment: Literal['positive', 'negative', 'neutral'], reasoning: string"
|
67
|
+
)
|
68
|
+
super(signature)
|
69
|
+
end
|
70
|
+
|
71
|
+
def forward(text:)
|
72
|
+
# Simple rule-based sentiment for demo
|
73
|
+
positive_words = %w[good great excellent amazing wonderful]
|
74
|
+
negative_words = %w[bad terrible awful horrible poor]
|
75
|
+
|
76
|
+
text_lower = text.downcase
|
77
|
+
positive_count = positive_words.count { |word| text_lower.include?(word) }
|
78
|
+
negative_count = negative_words.count { |word| text_lower.include?(word) }
|
79
|
+
|
80
|
+
if positive_count > negative_count
|
81
|
+
sentiment = 'positive'
|
82
|
+
reasoning = "Found #{positive_count} positive indicators"
|
83
|
+
elsif negative_count > positive_count
|
84
|
+
sentiment = 'negative'
|
85
|
+
reasoning = "Found #{negative_count} negative indicators"
|
86
|
+
else
|
87
|
+
sentiment = 'neutral'
|
88
|
+
reasoning = "No strong sentiment indicators found"
|
89
|
+
end
|
90
|
+
|
91
|
+
{ sentiment: sentiment, reasoning: reasoning }
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
# Set up GraphQL schema
|
96
|
+
puts "Setting up GraphQL schema..."
|
97
|
+
generator = Desiru::GraphQL::SchemaGenerator.new
|
98
|
+
|
99
|
+
# Register modules
|
100
|
+
generator.register_module(:answerQuestion, AnswerQuestion.new)
|
101
|
+
generator.register_module(:summarizeText, SummarizeText.new)
|
102
|
+
generator.register_module(:classifySentiment, ClassifySentiment.new)
|
103
|
+
|
104
|
+
# Generate schema
|
105
|
+
schema = generator.generate_schema
|
106
|
+
|
107
|
+
# Create executor with batch loading
|
108
|
+
executor = Desiru::GraphQL::Executor.new(schema, data_loader: generator.data_loader)
|
109
|
+
|
110
|
+
# Example queries
|
111
|
+
puts "\n=== Example 1: Simple Question ==="
|
112
|
+
query1 = <<~GRAPHQL
|
113
|
+
query {
|
114
|
+
answerQuestion(question: "What is the meaning of life?") {
|
115
|
+
answer
|
116
|
+
confidence
|
117
|
+
}
|
118
|
+
}
|
119
|
+
GRAPHQL
|
120
|
+
|
121
|
+
result1 = executor.execute(query1)
|
122
|
+
puts "Query: #{query1}"
|
123
|
+
puts "Result: #{result1.to_h}"
|
124
|
+
|
125
|
+
puts "\n=== Example 2: Text Summarization ==="
|
126
|
+
query2 = <<~GRAPHQL
|
127
|
+
query {
|
128
|
+
summarizeText(
|
129
|
+
text: "Ruby is a dynamic, open source programming language with a focus on simplicity and productivity. It has an elegant syntax that is natural to read and easy to write."
|
130
|
+
maxWords: 10
|
131
|
+
) {
|
132
|
+
summary
|
133
|
+
wordCount
|
134
|
+
}
|
135
|
+
}
|
136
|
+
GRAPHQL
|
137
|
+
|
138
|
+
result2 = executor.execute(query2)
|
139
|
+
puts "Query: #{query2}"
|
140
|
+
puts "Result: #{result2.to_h}"
|
141
|
+
|
142
|
+
puts "\n=== Example 3: Sentiment Classification ==="
|
143
|
+
query3 = <<~GRAPHQL
|
144
|
+
query {
|
145
|
+
classifySentiment(text: "This framework is absolutely amazing and wonderful!") {
|
146
|
+
sentiment
|
147
|
+
reasoning
|
148
|
+
}
|
149
|
+
}
|
150
|
+
GRAPHQL
|
151
|
+
|
152
|
+
result3 = executor.execute(query3)
|
153
|
+
puts "Query: #{query3}"
|
154
|
+
puts "Result: #{result3.to_h}"
|
155
|
+
|
156
|
+
puts "\n=== Example 4: Batch Query ==="
|
157
|
+
batch_query = <<~GRAPHQL
|
158
|
+
query {
|
159
|
+
positive: classifySentiment(text: "I love this great product") {
|
160
|
+
sentiment
|
161
|
+
reasoning
|
162
|
+
}
|
163
|
+
negative: classifySentiment(text: "This is terrible and awful") {
|
164
|
+
sentiment
|
165
|
+
reasoning
|
166
|
+
}
|
167
|
+
neutral: classifySentiment(text: "It exists and works") {
|
168
|
+
sentiment
|
169
|
+
reasoning
|
170
|
+
}
|
171
|
+
}
|
172
|
+
GRAPHQL
|
173
|
+
|
174
|
+
result4 = executor.execute(batch_query)
|
175
|
+
puts "Query: #{batch_query}"
|
176
|
+
puts "Result: #{result4.to_h}"
|
177
|
+
|
178
|
+
# Demonstrate batch execution of multiple queries
|
179
|
+
puts "\n=== Example 5: Batch Execution ==="
|
180
|
+
queries = [
|
181
|
+
{ query: 'query { answerQuestion(question: "What is Ruby?") { answer } }' },
|
182
|
+
{ query: 'query { answerQuestion(question: "What is GraphQL?") { answer } }' },
|
183
|
+
{ query: 'query { answerQuestion(question: "What is Desiru?") { answer } }' }
|
184
|
+
]
|
185
|
+
|
186
|
+
results = executor.execute_batch(queries)
|
187
|
+
puts "Executing #{queries.length} queries in batch..."
|
188
|
+
results.each_with_index do |result, i|
|
189
|
+
puts "Query #{i + 1}: #{result.to_h}"
|
190
|
+
end
|
@@ -0,0 +1,114 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'bundler/setup'
|
5
|
+
require 'desiru'
|
6
|
+
require 'desiru/graphql/schema_generator'
|
7
|
+
|
8
|
+
# Example: GraphQL Integration with Desiru
|
9
|
+
# This demonstrates how to generate GraphQL schemas from Desiru signatures
|
10
|
+
|
11
|
+
# Configure Desiru
|
12
|
+
Desiru.configure do |config|
|
13
|
+
config.default_model = Desiru::Models::RaixAdapter.new(
|
14
|
+
client: 'openai',
|
15
|
+
model: 'gpt-3.5-turbo'
|
16
|
+
)
|
17
|
+
end
|
18
|
+
|
19
|
+
# Create a schema generator
|
20
|
+
generator = Desiru::GraphQL::SchemaGenerator.new
|
21
|
+
|
22
|
+
# Register multiple Desiru signatures as GraphQL operations
|
23
|
+
generator.register_signature(
|
24
|
+
'translateText',
|
25
|
+
Desiru::Signature.new(
|
26
|
+
'text: string, target_language: string, source_language?: string -> translation: string, confidence: float'
|
27
|
+
)
|
28
|
+
)
|
29
|
+
|
30
|
+
generator.register_signature(
|
31
|
+
'analyzeSentiment',
|
32
|
+
Desiru::Signature.new(
|
33
|
+
"text: string -> sentiment: Literal['positive', 'negative', 'neutral'], score: float"
|
34
|
+
)
|
35
|
+
)
|
36
|
+
|
37
|
+
generator.register_signature(
|
38
|
+
'summarizeBatch',
|
39
|
+
Desiru::Signature.new(
|
40
|
+
'documents: list[string], max_length: int -> summaries: list[string], total_words: int'
|
41
|
+
)
|
42
|
+
)
|
43
|
+
|
44
|
+
# Generate the GraphQL schema
|
45
|
+
schema = generator.generate_schema
|
46
|
+
|
47
|
+
# Example GraphQL query execution
|
48
|
+
puts "GraphQL Schema generated with operations:"
|
49
|
+
schema.query.fields.each do |name, field|
|
50
|
+
puts " - #{name}: #{field.description}"
|
51
|
+
end
|
52
|
+
|
53
|
+
# Example queries you can run:
|
54
|
+
puts "\nExample GraphQL queries:"
|
55
|
+
puts <<~GRAPHQL
|
56
|
+
# Translation query
|
57
|
+
query {
|
58
|
+
translateText(text: "Hello world", targetLanguage: "es") {
|
59
|
+
translation
|
60
|
+
confidence
|
61
|
+
}
|
62
|
+
}
|
63
|
+
|
64
|
+
# Sentiment analysis query#{' '}
|
65
|
+
query {
|
66
|
+
analyzeSentiment(text: "This framework is amazing!") {
|
67
|
+
sentiment
|
68
|
+
score
|
69
|
+
}
|
70
|
+
}
|
71
|
+
|
72
|
+
# Batch summarization query
|
73
|
+
query {
|
74
|
+
summarizeBatch(
|
75
|
+
documents: ["Long document 1...", "Long document 2..."],
|
76
|
+
maxLength: 100
|
77
|
+
) {
|
78
|
+
summaries
|
79
|
+
totalWords
|
80
|
+
}
|
81
|
+
}
|
82
|
+
GRAPHQL
|
83
|
+
|
84
|
+
# Execute a sample query
|
85
|
+
result = schema.execute(<<~GRAPHQL)
|
86
|
+
query {
|
87
|
+
analyzeSentiment(text: "GraphQL integration with Desiru is fantastic!") {
|
88
|
+
sentiment
|
89
|
+
score
|
90
|
+
}
|
91
|
+
}
|
92
|
+
GRAPHQL
|
93
|
+
|
94
|
+
puts "\nQuery result:"
|
95
|
+
puts result.to_h.inspect
|
96
|
+
|
97
|
+
# Integration with GraphQL servers
|
98
|
+
puts "\n\nTo use this schema with a GraphQL server (e.g., graphql-ruby with Rails):"
|
99
|
+
puts <<~RUBY
|
100
|
+
# In your GraphQL controller:
|
101
|
+
class GraphQLController < ApplicationController
|
102
|
+
def execute
|
103
|
+
result = DesiruSchema.execute(
|
104
|
+
params[:query],
|
105
|
+
variables: params[:variables],
|
106
|
+
context: { current_user: current_user }
|
107
|
+
)
|
108
|
+
render json: result
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
# Where DesiruSchema is your generated schema
|
113
|
+
DesiruSchema = generator.generate_schema
|
114
|
+
RUBY
|
@@ -0,0 +1,80 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative '../lib/desiru'
|
4
|
+
|
5
|
+
# Example: Using the Retrieve module for RAG (Retrieval Augmented Generation)
|
6
|
+
|
7
|
+
# Create a dummy model for demonstration
|
8
|
+
class DummyModel
|
9
|
+
def complete(prompt, **_options)
|
10
|
+
{ content: "Answer based on retrieved context: #{prompt[:user]}" }
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
# Initialize the Retrieve module
|
15
|
+
retrieve = Desiru::Retrieve.new(model: DummyModel.new)
|
16
|
+
|
17
|
+
# Add some documents to the knowledge base
|
18
|
+
documents = [
|
19
|
+
'Desiru is a Ruby implementation of DSPy for programming language models.',
|
20
|
+
'DSPy (Declarative Self-improving Language Programs) enables systematic optimization of LM prompts and weights.',
|
21
|
+
'Ruby is a dynamic, open source programming language with a focus on simplicity and productivity.',
|
22
|
+
'The Retrieve module implements vector search for RAG (Retrieval Augmented Generation) applications.',
|
23
|
+
'RAG combines retrieval from a knowledge base with language model generation for more accurate responses.',
|
24
|
+
'Vector embeddings enable semantic search by representing text as high-dimensional numerical vectors.',
|
25
|
+
'The InMemoryBackend stores documents and embeddings in memory for fast prototyping.',
|
26
|
+
'Production systems might use vector databases like Pinecone, Weaviate, or Qdrant.'
|
27
|
+
]
|
28
|
+
|
29
|
+
puts "Adding #{documents.size} documents to the retrieval index..."
|
30
|
+
retrieve.add_documents(documents)
|
31
|
+
puts "Index now contains #{retrieve.document_count} documents\n\n"
|
32
|
+
|
33
|
+
# Perform some searches
|
34
|
+
queries = [
|
35
|
+
{ query: 'What is Desiru?', k: 3 },
|
36
|
+
{ query: 'vector search implementation', k: 2 },
|
37
|
+
{ query: 'Ruby programming', k: 4 }
|
38
|
+
]
|
39
|
+
|
40
|
+
queries.each do |params|
|
41
|
+
puts "Query: '#{params[:query]}' (top #{params[:k]} results)"
|
42
|
+
puts '-' * 50
|
43
|
+
|
44
|
+
result = retrieve.call(**params)
|
45
|
+
|
46
|
+
result.documents.each_with_index do |doc, idx|
|
47
|
+
score = result.scores[idx]
|
48
|
+
puts "#{idx + 1}. [Score: #{score.round(3)}] #{doc}"
|
49
|
+
end
|
50
|
+
|
51
|
+
puts "\n"
|
52
|
+
end
|
53
|
+
|
54
|
+
# Example: Using custom embeddings
|
55
|
+
puts "Example with custom embeddings:"
|
56
|
+
puts '-' * 50
|
57
|
+
|
58
|
+
# Create simple one-hot encoded embeddings for demonstration
|
59
|
+
custom_docs = %w[apple banana cherry]
|
60
|
+
custom_embeddings = [
|
61
|
+
[1.0, 0.0, 0.0], # apple
|
62
|
+
[0.0, 1.0, 0.0], # banana
|
63
|
+
[0.0, 0.0, 1.0] # cherry
|
64
|
+
]
|
65
|
+
|
66
|
+
# Create a new retrieve instance with custom backend
|
67
|
+
custom_retrieve = Desiru::Retrieve.new(
|
68
|
+
model: DummyModel.new,
|
69
|
+
backend: Desiru::Modules::InMemoryBackend.new(distance_metric: :euclidean)
|
70
|
+
)
|
71
|
+
|
72
|
+
custom_retrieve.add_documents(custom_docs, embeddings: custom_embeddings)
|
73
|
+
|
74
|
+
# Search with a custom query embedding # Closer to apple
|
75
|
+
results = custom_retrieve.backend.search('custom', k: 3)
|
76
|
+
|
77
|
+
puts "Custom embedding search results:"
|
78
|
+
results.each_with_index do |result, idx|
|
79
|
+
puts "#{idx + 1}. #{result[:document]} (distance: #{result[:score].round(3)})"
|
80
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'bundler/setup'
|
5
|
+
require 'desiru'
|
6
|
+
|
7
|
+
# Configure Desiru with OpenAI model
|
8
|
+
Desiru.configure do |config|
|
9
|
+
config.default_model = Desiru::Models::RaixAdapter.new(
|
10
|
+
provider: :openai,
|
11
|
+
model: 'gpt-3.5-turbo',
|
12
|
+
api_key: ENV['OPENAI_API_KEY'] || raise('Please set OPENAI_API_KEY environment variable')
|
13
|
+
)
|
14
|
+
end
|
15
|
+
|
16
|
+
# Create a simple question-answering module
|
17
|
+
qa = Desiru::Modules::Predict.new('question -> answer')
|
18
|
+
|
19
|
+
# Ask a question
|
20
|
+
result = qa.call(question: 'What is the capital of France?')
|
21
|
+
puts 'Question: What is the capital of France?'
|
22
|
+
puts "Answer: #{result.answer}"
|
23
|
+
|
24
|
+
# Create a Chain of Thought module for more complex reasoning
|
25
|
+
cot = Desiru::Modules::ChainOfThought.new('question -> answer')
|
26
|
+
|
27
|
+
# Ask a more complex question
|
28
|
+
result = cot.call(question: 'Two dice are tossed. What is the probability that the sum equals two?')
|
29
|
+
puts "\nQuestion: Two dice are tossed. What is the probability that the sum equals two?"
|
30
|
+
puts "Answer: #{result.answer}"
|
31
|
+
puts "Reasoning: #{result.reasoning}" if result.respond_to?(:reasoning)
|
@@ -0,0 +1,45 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'bundler/setup'
|
5
|
+
require 'desiru'
|
6
|
+
|
7
|
+
# Configure Desiru
|
8
|
+
Desiru.configure do |config|
|
9
|
+
config.default_model = Desiru::Models::RaixAdapter.new(
|
10
|
+
provider: :openai,
|
11
|
+
model: 'gpt-3.5-turbo',
|
12
|
+
api_key: ENV['OPENAI_API_KEY'] || raise('Please set OPENAI_API_KEY environment variable')
|
13
|
+
)
|
14
|
+
end
|
15
|
+
|
16
|
+
# Create a module with typed signature and descriptions
|
17
|
+
summarizer = Desiru::Modules::Predict.new(
|
18
|
+
'document: string, max_length: int -> summary: string, key_points: list',
|
19
|
+
descriptions: {
|
20
|
+
'document' => 'The text document to summarize',
|
21
|
+
'max_length' => 'Maximum number of words in the summary',
|
22
|
+
'summary' => 'A concise summary of the document',
|
23
|
+
'key_points' => 'List of key points from the document'
|
24
|
+
}
|
25
|
+
)
|
26
|
+
|
27
|
+
# Test document
|
28
|
+
document = <<~TEXT
|
29
|
+
Ruby is a dynamic, open source programming language with a focus on simplicity and productivity.
|
30
|
+
It has an elegant syntax that is natural to read and easy to write. Ruby was created by Yukihiro
|
31
|
+
Matsumoto in the mid-1990s. The language emphasizes the principle of least surprise, meaning that
|
32
|
+
the language should behave in a way that minimizes confusion for experienced users.
|
33
|
+
TEXT
|
34
|
+
|
35
|
+
# Generate summary
|
36
|
+
result = summarizer.call(document: document, max_length: 50)
|
37
|
+
|
38
|
+
puts 'Original Document:'
|
39
|
+
puts document
|
40
|
+
puts "\nSummary (max #{result.to_h[:max_length] || 50} words):"
|
41
|
+
puts result.summary
|
42
|
+
puts "\nKey Points:"
|
43
|
+
result.key_points.each_with_index do |point, i|
|
44
|
+
puts "#{i + 1}. #{point}"
|
45
|
+
end
|
@@ -0,0 +1,170 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'redis'
|
5
|
+
require 'json'
|
6
|
+
|
7
|
+
module Desiru
|
8
|
+
module AsyncCapable
|
9
|
+
def call_async(inputs = {})
|
10
|
+
job_id = SecureRandom.uuid
|
11
|
+
|
12
|
+
Desiru::Jobs::AsyncPredict.perform_async(
|
13
|
+
job_id,
|
14
|
+
self.class.name,
|
15
|
+
signature.to_s,
|
16
|
+
inputs,
|
17
|
+
{
|
18
|
+
'model_class' => model.class.name,
|
19
|
+
'model_config' => model_config,
|
20
|
+
'config' => config,
|
21
|
+
'demos' => demos
|
22
|
+
}
|
23
|
+
)
|
24
|
+
|
25
|
+
AsyncResult.new(job_id)
|
26
|
+
end
|
27
|
+
|
28
|
+
def call_batch_async(inputs_array)
|
29
|
+
batch_id = SecureRandom.uuid
|
30
|
+
|
31
|
+
Desiru::Jobs::BatchProcessor.perform_async(
|
32
|
+
batch_id,
|
33
|
+
self.class.name,
|
34
|
+
signature.to_s,
|
35
|
+
inputs_array,
|
36
|
+
{
|
37
|
+
'model_class' => model.class.name,
|
38
|
+
'model_config' => model_config,
|
39
|
+
'config' => config,
|
40
|
+
'demos' => demos
|
41
|
+
}
|
42
|
+
)
|
43
|
+
|
44
|
+
BatchResult.new(batch_id)
|
45
|
+
end
|
46
|
+
|
47
|
+
private
|
48
|
+
|
49
|
+
def model_config
|
50
|
+
return {} unless model.respond_to?(:to_config)
|
51
|
+
|
52
|
+
model.to_config
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
class AsyncResult
|
57
|
+
attr_reader :job_id
|
58
|
+
|
59
|
+
def initialize(job_id)
|
60
|
+
@job_id = job_id
|
61
|
+
@redis = Redis.new(url: Desiru.configuration.redis_url || ENV.fetch('REDIS_URL', nil))
|
62
|
+
end
|
63
|
+
|
64
|
+
def ready?
|
65
|
+
result = fetch_result
|
66
|
+
!result.nil?
|
67
|
+
end
|
68
|
+
|
69
|
+
def success?
|
70
|
+
result = fetch_result
|
71
|
+
result && result[:success]
|
72
|
+
end
|
73
|
+
|
74
|
+
def failed?
|
75
|
+
result = fetch_result
|
76
|
+
result && !result[:success]
|
77
|
+
end
|
78
|
+
|
79
|
+
def result
|
80
|
+
data = fetch_result
|
81
|
+
return nil unless data
|
82
|
+
|
83
|
+
raise ModuleError, "Async job failed: #{data[:error]}" unless data[:success]
|
84
|
+
|
85
|
+
ModuleResult.new(data[:result], metadata: { async: true, job_id: job_id })
|
86
|
+
end
|
87
|
+
|
88
|
+
def error
|
89
|
+
data = fetch_result
|
90
|
+
return nil unless data && !data[:success]
|
91
|
+
|
92
|
+
{
|
93
|
+
message: data[:error],
|
94
|
+
class: data[:error_class]
|
95
|
+
}
|
96
|
+
end
|
97
|
+
|
98
|
+
def status
|
99
|
+
status_data = fetch_status
|
100
|
+
return 'pending' unless status_data
|
101
|
+
|
102
|
+
status_data[:status] || 'pending'
|
103
|
+
end
|
104
|
+
|
105
|
+
def progress
|
106
|
+
status_data = fetch_status
|
107
|
+
return nil unless status_data
|
108
|
+
|
109
|
+
status_data[:progress]
|
110
|
+
end
|
111
|
+
|
112
|
+
def wait(timeout: 60, poll_interval: 0.5)
|
113
|
+
start_time = Time.now
|
114
|
+
|
115
|
+
while Time.now - start_time < timeout
|
116
|
+
return result if ready?
|
117
|
+
|
118
|
+
sleep poll_interval
|
119
|
+
end
|
120
|
+
|
121
|
+
raise TimeoutError, "Async result not ready after #{timeout} seconds"
|
122
|
+
end
|
123
|
+
|
124
|
+
private
|
125
|
+
|
126
|
+
def fetch_result
|
127
|
+
raw = @redis.get("desiru:results:#{job_id}")
|
128
|
+
return nil unless raw
|
129
|
+
|
130
|
+
JSON.parse(raw, symbolize_names: true)
|
131
|
+
end
|
132
|
+
|
133
|
+
def fetch_status
|
134
|
+
raw = @redis.get("desiru:status:#{job_id}")
|
135
|
+
return nil unless raw
|
136
|
+
|
137
|
+
JSON.parse(raw, symbolize_names: true)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
class BatchResult < AsyncResult
|
142
|
+
def results
|
143
|
+
data = fetch_result
|
144
|
+
return [] unless data && data[:results]
|
145
|
+
|
146
|
+
data[:results].map do |item|
|
147
|
+
ModuleResult.new(item[:result], metadata: { batch_index: item[:index] }) if item[:success]
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
def errors
|
152
|
+
data = fetch_result
|
153
|
+
return [] unless data && data[:errors]
|
154
|
+
|
155
|
+
data[:errors]
|
156
|
+
end
|
157
|
+
|
158
|
+
def stats
|
159
|
+
data = fetch_result
|
160
|
+
return {} unless data
|
161
|
+
|
162
|
+
{
|
163
|
+
total: data[:total],
|
164
|
+
successful: data[:successful],
|
165
|
+
failed: data[:failed],
|
166
|
+
success_rate: data[:total].positive? ? data[:successful].to_f / data[:total] : 0
|
167
|
+
}
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|