flow_nodes 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.qlty.yml +40 -0
- data/.rspec +3 -0
- data/.rubocop.yml +53 -0
- data/CHANGELOG.md +59 -0
- data/CODE_OF_CONDUCT.md +132 -0
- data/LICENSE.txt +21 -0
- data/README.md +315 -0
- data/Rakefile +12 -0
- data/examples/advanced_workflow.rb +299 -0
- data/examples/batch_processing.rb +108 -0
- data/examples/chatbot.rb +91 -0
- data/examples/llm_calendar_parser.rb +429 -0
- data/examples/llm_content_processor.rb +603 -0
- data/examples/llm_document_analyzer.rb +276 -0
- data/examples/simple_llm_example.rb +166 -0
- data/examples/workflow.rb +158 -0
- data/lib/flow_nodes/async_batch_flow.rb +16 -0
- data/lib/flow_nodes/async_batch_node.rb +15 -0
- data/lib/flow_nodes/async_flow.rb +49 -0
- data/lib/flow_nodes/async_node.rb +48 -0
- data/lib/flow_nodes/async_parallel_batch_flow.rb +17 -0
- data/lib/flow_nodes/async_parallel_batch_node.rb +18 -0
- data/lib/flow_nodes/base_node.rb +117 -0
- data/lib/flow_nodes/batch_flow.rb +16 -0
- data/lib/flow_nodes/batch_node.rb +15 -0
- data/lib/flow_nodes/conditional_transition.rb +17 -0
- data/lib/flow_nodes/flow.rb +65 -0
- data/lib/flow_nodes/node.rb +54 -0
- data/lib/flow_nodes/version.rb +5 -0
- data/lib/flow_nodes.rb +20 -0
- data/sig/flow_nodes.rbs +4 -0
- metadata +82 -0
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../lib/flow_nodes"
|
|
4
|
+
require "json"
|
|
5
|
+
require "securerandom"
|
|
6
|
+
|
|
7
|
+
# Simple LLM Document Analysis Pipeline
|
|
8
|
+
# This demonstrates the core patterns for LLM integration with FlowNodes:
|
|
9
|
+
# 1. Document ingestion
|
|
10
|
+
# 2. LLM analysis with proper error handling
|
|
11
|
+
# 3. Result formatting and delivery
|
|
12
|
+
# 4. Symbol-based flow control
|
|
13
|
+
|
|
14
|
+
module LLMDocumentAnalyzer
|
|
15
|
+
# Mock LLM service
|
|
16
|
+
class LLMService
|
|
17
|
+
def self.analyze_document(content, analysis_type = "summary")
|
|
18
|
+
case analysis_type
|
|
19
|
+
when "summary"
|
|
20
|
+
{
|
|
21
|
+
summary: content.split('.').first(2).join('.') + '.',
|
|
22
|
+
word_count: content.split.length,
|
|
23
|
+
key_themes: ["productivity", "analysis", "automation"]
|
|
24
|
+
}
|
|
25
|
+
when "sentiment"
|
|
26
|
+
{
|
|
27
|
+
sentiment: content.include?("good") ? "positive" : "neutral",
|
|
28
|
+
confidence: 0.85,
|
|
29
|
+
emotional_tone: "professional"
|
|
30
|
+
}
|
|
31
|
+
when "extract_entities"
|
|
32
|
+
{
|
|
33
|
+
people: content.scan(/\b[A-Z][a-z]+ [A-Z][a-z]+\b/),
|
|
34
|
+
organizations: content.scan(/\b[A-Z][a-z]+ Inc\.|Corp\.|LLC\b/),
|
|
35
|
+
dates: content.scan(/\d{4}-\d{2}-\d{2}/)
|
|
36
|
+
}
|
|
37
|
+
else
|
|
38
|
+
{ error: "Unknown analysis type: #{analysis_type}" }
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
class DocumentIngestionNode < FlowNodes::Node
|
|
44
|
+
def prep(state)
|
|
45
|
+
puts "📄 Starting document ingestion..."
|
|
46
|
+
state[:start_time] = Time.now
|
|
47
|
+
nil
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def exec(params)
|
|
51
|
+
puts "📊 Loading document: #{params[:document_path] || 'sample.txt'}"
|
|
52
|
+
|
|
53
|
+
# Mock document content
|
|
54
|
+
document = {
|
|
55
|
+
id: SecureRandom.hex(4),
|
|
56
|
+
content: "This is a sample document for analysis. It contains good information about productivity and automation tools. The document discusses various approaches to streamline workflows and improve efficiency.",
|
|
57
|
+
metadata: {
|
|
58
|
+
title: "Sample Document",
|
|
59
|
+
author: "John Smith",
|
|
60
|
+
created_at: "2024-01-15",
|
|
61
|
+
word_count: 31
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
puts "✅ Document loaded successfully"
|
|
66
|
+
|
|
67
|
+
# Store document for next node
|
|
68
|
+
@params.merge!(document: document)
|
|
69
|
+
|
|
70
|
+
:document_loaded
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def post(state, params, result)
|
|
74
|
+
puts "📈 Document ingestion completed"
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
class LLMAnalysisNode < FlowNodes::Node
|
|
79
|
+
def initialize(analysis_type: "summary")
|
|
80
|
+
super(max_retries: 3, wait: 1)
|
|
81
|
+
@analysis_type = analysis_type
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def prep(state)
|
|
85
|
+
puts "🤖 Starting LLM analysis: #{@analysis_type}"
|
|
86
|
+
state[:llm_start] = Time.now
|
|
87
|
+
nil
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def exec(params)
|
|
91
|
+
document = params[:document]
|
|
92
|
+
|
|
93
|
+
unless document
|
|
94
|
+
puts "❌ No document found in params"
|
|
95
|
+
return :missing_document
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
puts "🧠 Analyzing document: #{document[:id]}"
|
|
99
|
+
|
|
100
|
+
# Call LLM service
|
|
101
|
+
analysis_result = LLMService.analyze_document(document[:content], @analysis_type)
|
|
102
|
+
|
|
103
|
+
puts "✅ LLM analysis completed"
|
|
104
|
+
|
|
105
|
+
# Store analysis result
|
|
106
|
+
@params.merge!(analysis: analysis_result)
|
|
107
|
+
|
|
108
|
+
:analysis_completed
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def post(state, params, result)
|
|
112
|
+
duration = Time.now - state[:llm_start]
|
|
113
|
+
puts "📈 LLM analysis completed in #{duration.round(3)}s"
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def exec_fallback(params, exception)
|
|
117
|
+
puts "⚠️ LLM analysis failed: #{exception.message}"
|
|
118
|
+
|
|
119
|
+
# Fallback analysis
|
|
120
|
+
@params.merge!(analysis: { error: "LLM service unavailable", fallback: true })
|
|
121
|
+
|
|
122
|
+
:analysis_failed
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
class ResultFormattingNode < FlowNodes::Node
|
|
127
|
+
def initialize(format: "json")
|
|
128
|
+
super()
|
|
129
|
+
@format = format
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def prep(state)
|
|
133
|
+
puts "📝 Formatting results as #{@format}"
|
|
134
|
+
nil
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def exec(params)
|
|
138
|
+
document = params[:document]
|
|
139
|
+
analysis = params[:analysis]
|
|
140
|
+
|
|
141
|
+
formatted_result = case @format
|
|
142
|
+
when "json"
|
|
143
|
+
JSON.pretty_generate({
|
|
144
|
+
document: document,
|
|
145
|
+
analysis: analysis,
|
|
146
|
+
processed_at: Time.now
|
|
147
|
+
})
|
|
148
|
+
when "summary"
|
|
149
|
+
"""
|
|
150
|
+
Document Analysis Results
|
|
151
|
+
========================
|
|
152
|
+
|
|
153
|
+
Document: #{document[:metadata][:title]}
|
|
154
|
+
Author: #{document[:metadata][:author]}
|
|
155
|
+
Analysis Type: summary
|
|
156
|
+
|
|
157
|
+
Results:
|
|
158
|
+
#{analysis.map { |k, v| "#{k}: #{v}" }.join("\n")}
|
|
159
|
+
|
|
160
|
+
Processed at: #{Time.now}
|
|
161
|
+
"""
|
|
162
|
+
else
|
|
163
|
+
"Analysis completed for document #{document[:id]}"
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
puts "✅ Results formatted successfully"
|
|
167
|
+
|
|
168
|
+
@params.merge!(formatted_result: formatted_result)
|
|
169
|
+
|
|
170
|
+
:results_formatted
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
def post(state, params, result)
|
|
174
|
+
puts "📈 Results formatting completed"
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
class OutputDeliveryNode < FlowNodes::Node
|
|
179
|
+
def prep(state)
|
|
180
|
+
puts "📤 Preparing output delivery"
|
|
181
|
+
nil
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
def exec(params)
|
|
185
|
+
puts "🚀 Delivering results..."
|
|
186
|
+
|
|
187
|
+
# Display results
|
|
188
|
+
puts "\n" + "="*50
|
|
189
|
+
puts "📋 DOCUMENT ANALYSIS RESULTS"
|
|
190
|
+
puts "="*50
|
|
191
|
+
puts params[:formatted_result]
|
|
192
|
+
puts "="*50
|
|
193
|
+
|
|
194
|
+
puts "\n✅ Analysis pipeline completed successfully!"
|
|
195
|
+
|
|
196
|
+
nil # End flow
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
def post(state, params, result)
|
|
200
|
+
if state[:start_time]
|
|
201
|
+
total_duration = Time.now - state[:start_time]
|
|
202
|
+
puts "📈 Total pipeline duration: #{total_duration.round(3)}s"
|
|
203
|
+
end
|
|
204
|
+
end
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
class ErrorHandlerNode < FlowNodes::Node
|
|
208
|
+
def prep(state)
|
|
209
|
+
puts "🚨 Handling analysis error"
|
|
210
|
+
nil
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def exec(params)
|
|
214
|
+
puts "❌ LLM analysis failed, providing fallback response"
|
|
215
|
+
|
|
216
|
+
# Simple fallback
|
|
217
|
+
@params.merge!(
|
|
218
|
+
formatted_result: "Document analysis failed. Please try again later."
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
:error_handled
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
def post(state, params, result)
|
|
225
|
+
puts "🔧 Error handling completed"
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
# Demo showing LLM document analysis
|
|
231
|
+
if $PROGRAM_NAME == __FILE__
|
|
232
|
+
puts "🤖 LLM DOCUMENT ANALYSIS PIPELINE"
|
|
233
|
+
puts "=" * 45
|
|
234
|
+
|
|
235
|
+
# Create nodes
|
|
236
|
+
ingestion = LLMDocumentAnalyzer::DocumentIngestionNode.new
|
|
237
|
+
analysis = LLMDocumentAnalyzer::LLMAnalysisNode.new(analysis_type: "summary")
|
|
238
|
+
formatting = LLMDocumentAnalyzer::ResultFormattingNode.new(format: "summary")
|
|
239
|
+
delivery = LLMDocumentAnalyzer::OutputDeliveryNode.new
|
|
240
|
+
error_handler = LLMDocumentAnalyzer::ErrorHandlerNode.new
|
|
241
|
+
|
|
242
|
+
# Connect with symbol-based routing
|
|
243
|
+
ingestion - :document_loaded >> analysis
|
|
244
|
+
analysis - :analysis_completed >> formatting
|
|
245
|
+
analysis - :analysis_failed >> error_handler
|
|
246
|
+
formatting - :results_formatted >> delivery
|
|
247
|
+
error_handler - :error_handled >> delivery
|
|
248
|
+
|
|
249
|
+
# Create flow
|
|
250
|
+
flow = FlowNodes::Flow.new(start: ingestion)
|
|
251
|
+
|
|
252
|
+
# Test successful analysis
|
|
253
|
+
puts "\n📋 SCENARIO 1: Successful Document Analysis"
|
|
254
|
+
puts "-" * 40
|
|
255
|
+
state = { pipeline_id: SecureRandom.hex(4) }
|
|
256
|
+
flow.set_params(document_path: "sample_document.txt")
|
|
257
|
+
flow.run(state)
|
|
258
|
+
|
|
259
|
+
# Test with different analysis type
|
|
260
|
+
puts "\n📋 SCENARIO 2: Sentiment Analysis"
|
|
261
|
+
puts "-" * 40
|
|
262
|
+
sentiment_analysis = LLMDocumentAnalyzer::LLMAnalysisNode.new(analysis_type: "sentiment")
|
|
263
|
+
json_formatting = LLMDocumentAnalyzer::ResultFormattingNode.new(format: "json")
|
|
264
|
+
|
|
265
|
+
# Create new flow for sentiment analysis
|
|
266
|
+
ingestion - :document_loaded >> sentiment_analysis
|
|
267
|
+
sentiment_analysis - :analysis_completed >> json_formatting
|
|
268
|
+
json_formatting - :results_formatted >> delivery
|
|
269
|
+
|
|
270
|
+
flow2 = FlowNodes::Flow.new(start: ingestion)
|
|
271
|
+
state2 = { pipeline_id: SecureRandom.hex(4) }
|
|
272
|
+
flow2.set_params(document_path: "sentiment_test.txt")
|
|
273
|
+
flow2.run(state2)
|
|
274
|
+
|
|
275
|
+
puts "\n🎯 All document analysis scenarios completed!"
|
|
276
|
+
end
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../lib/flow_nodes"
|
|
4
|
+
|
|
5
|
+
# Simple LLM Integration Example
|
|
6
|
+
# This demonstrates the core patterns for LLM workflows with FlowNodes
|
|
7
|
+
|
|
8
|
+
module SimpleLLMExample
|
|
9
|
+
# Mock LLM service
|
|
10
|
+
class LLMService
|
|
11
|
+
def self.process(text, operation)
|
|
12
|
+
case operation
|
|
13
|
+
when "summarize"
|
|
14
|
+
"Summary: #{text.split('.').first}."
|
|
15
|
+
when "classify"
|
|
16
|
+
text.downcase.include?("error") ? "error_report" : "general_content"
|
|
17
|
+
when "extract_keywords"
|
|
18
|
+
text.scan(/\b\w{4,}\b/).uniq.first(3).join(", ")
|
|
19
|
+
else
|
|
20
|
+
"Processed: #{text}"
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
class TextInputNode < FlowNodes::Node
|
|
26
|
+
def exec(params)
|
|
27
|
+
puts "📄 Processing text input..."
|
|
28
|
+
|
|
29
|
+
# Simulate getting text from params
|
|
30
|
+
text = params[:text] || "This is a sample document about productivity tools. Error handling is important."
|
|
31
|
+
|
|
32
|
+
puts "📝 Text received: #{text[0..50]}..."
|
|
33
|
+
|
|
34
|
+
# Return text for next node
|
|
35
|
+
text
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
class LLMProcessorNode < FlowNodes::Node
|
|
40
|
+
def initialize(operation: "summarize")
|
|
41
|
+
super()
|
|
42
|
+
@operation = operation
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def exec(text)
|
|
46
|
+
puts "🤖 Processing with LLM operation: #{@operation}"
|
|
47
|
+
|
|
48
|
+
# Call LLM service
|
|
49
|
+
result = LLMService.process(text, @operation)
|
|
50
|
+
|
|
51
|
+
puts "✅ LLM processing completed"
|
|
52
|
+
|
|
53
|
+
# Return result
|
|
54
|
+
result
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
class OutputNode < FlowNodes::Node
|
|
59
|
+
def exec(result)
|
|
60
|
+
puts "📤 Delivering result:"
|
|
61
|
+
puts "Result: #{result}"
|
|
62
|
+
|
|
63
|
+
nil # End flow
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Conditional node that routes based on classification
|
|
68
|
+
class ClassificationRouterNode < FlowNodes::Node
|
|
69
|
+
def exec(text)
|
|
70
|
+
puts "🔍 Classifying content..."
|
|
71
|
+
|
|
72
|
+
classification = LLMService.process(text, "classify")
|
|
73
|
+
|
|
74
|
+
puts "📋 Classification: #{classification}"
|
|
75
|
+
|
|
76
|
+
# Return symbol for routing
|
|
77
|
+
classification.to_sym
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
class ErrorHandlerNode < FlowNodes::Node
|
|
82
|
+
def exec(text)
|
|
83
|
+
puts "🚨 Handling error content..."
|
|
84
|
+
puts "Error analysis: #{text[0..100]}"
|
|
85
|
+
|
|
86
|
+
nil # End flow
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
class GeneralProcessorNode < FlowNodes::Node
|
|
91
|
+
def exec(text)
|
|
92
|
+
puts "📊 Processing general content..."
|
|
93
|
+
|
|
94
|
+
keywords = LLMService.process(text, "extract_keywords")
|
|
95
|
+
summary = LLMService.process(text, "summarize")
|
|
96
|
+
|
|
97
|
+
puts "Keywords: #{keywords}"
|
|
98
|
+
puts "Summary: #{summary}"
|
|
99
|
+
|
|
100
|
+
nil # End flow
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Demo showing different LLM workflow patterns
|
|
106
|
+
if $PROGRAM_NAME == __FILE__
|
|
107
|
+
puts "🤖 SIMPLE LLM WORKFLOW EXAMPLES"
|
|
108
|
+
puts "=" * 40
|
|
109
|
+
|
|
110
|
+
# Example 1: Basic LLM Pipeline
|
|
111
|
+
puts "\n📋 EXAMPLE 1: Basic LLM Processing"
|
|
112
|
+
puts "-" * 30
|
|
113
|
+
|
|
114
|
+
input_node = SimpleLLMExample::TextInputNode.new
|
|
115
|
+
llm_processor = SimpleLLMExample::LLMProcessorNode.new(operation: "summarize")
|
|
116
|
+
output_node = SimpleLLMExample::OutputNode.new
|
|
117
|
+
|
|
118
|
+
# Connect nodes
|
|
119
|
+
input_node >> llm_processor >> output_node
|
|
120
|
+
|
|
121
|
+
# Create and run flow
|
|
122
|
+
flow = FlowNodes::Flow.new(start: input_node)
|
|
123
|
+
flow.set_params(text: "This is a comprehensive document about artificial intelligence and machine learning applications. The technology shows great promise for automation.")
|
|
124
|
+
flow.run(nil)
|
|
125
|
+
|
|
126
|
+
# Example 2: Conditional LLM Routing
|
|
127
|
+
puts "\n📋 EXAMPLE 2: Conditional LLM Routing"
|
|
128
|
+
puts "-" * 30
|
|
129
|
+
|
|
130
|
+
input_node = SimpleLLMExample::TextInputNode.new
|
|
131
|
+
classifier = SimpleLLMExample::ClassificationRouterNode.new
|
|
132
|
+
error_handler = SimpleLLMExample::ErrorHandlerNode.new
|
|
133
|
+
general_processor = SimpleLLMExample::GeneralProcessorNode.new
|
|
134
|
+
|
|
135
|
+
# Connect with conditional routing
|
|
136
|
+
input_node >> classifier
|
|
137
|
+
classifier - :error_report >> error_handler
|
|
138
|
+
classifier - :general_content >> general_processor
|
|
139
|
+
|
|
140
|
+
# Test with error content
|
|
141
|
+
flow = FlowNodes::Flow.new(start: input_node)
|
|
142
|
+
flow.set_params(text: "System error occurred during processing. Database connection failed.")
|
|
143
|
+
flow.run(nil)
|
|
144
|
+
|
|
145
|
+
# Test with general content
|
|
146
|
+
flow.set_params(text: "Today's productivity tips focus on time management and workflow optimization.")
|
|
147
|
+
flow.run(nil)
|
|
148
|
+
|
|
149
|
+
# Example 3: Multi-step LLM Processing
|
|
150
|
+
puts "\n📋 EXAMPLE 3: Multi-step LLM Processing"
|
|
151
|
+
puts "-" * 30
|
|
152
|
+
|
|
153
|
+
input_node = SimpleLLMExample::TextInputNode.new
|
|
154
|
+
summarizer = SimpleLLMExample::LLMProcessorNode.new(operation: "summarize")
|
|
155
|
+
keyword_extractor = SimpleLLMExample::LLMProcessorNode.new(operation: "extract_keywords")
|
|
156
|
+
output_node = SimpleLLMExample::OutputNode.new
|
|
157
|
+
|
|
158
|
+
# Chain multiple LLM operations
|
|
159
|
+
input_node >> summarizer >> keyword_extractor >> output_node
|
|
160
|
+
|
|
161
|
+
flow = FlowNodes::Flow.new(start: input_node)
|
|
162
|
+
flow.set_params(text: "Artificial intelligence and machine learning are transforming modern business operations. Companies are investing heavily in automation technologies to improve efficiency and reduce costs.")
|
|
163
|
+
flow.run(nil)
|
|
164
|
+
|
|
165
|
+
puts "\n🎯 All LLM workflow examples completed!"
|
|
166
|
+
end
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../lib/flow_nodes"
|
|
4
|
+
require "securerandom"
|
|
5
|
+
|
|
6
|
+
module WorkflowDemo
|
|
7
|
+
class DataValidationNode < FlowNodes::Node
|
|
8
|
+
def prep(state)
|
|
9
|
+
puts "🔍 Starting validation process..."
|
|
10
|
+
state[:validation_start] = Time.now
|
|
11
|
+
nil # Use node's own params
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def exec(data)
|
|
15
|
+
puts "📝 Validating data: #{data.inspect}"
|
|
16
|
+
return :invalid if data.nil? || data.empty?
|
|
17
|
+
return :invalid unless data.is_a?(Hash)
|
|
18
|
+
return :invalid unless data.key?(:email) && data.key?(:name)
|
|
19
|
+
|
|
20
|
+
:valid
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def post(state, params, result)
|
|
24
|
+
duration = Time.now - state[:validation_start]
|
|
25
|
+
puts "✅ Validation completed in #{duration.round(3)}s with result: #{result}"
|
|
26
|
+
state[:validation_result] = result
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
class ProcessDataNode < FlowNodes::Node
|
|
31
|
+
def prep(state)
|
|
32
|
+
puts "⚙️ Preparing data processing..."
|
|
33
|
+
state[:processing_start] = Time.now
|
|
34
|
+
# Transform params - add processing metadata
|
|
35
|
+
@params.merge({
|
|
36
|
+
processing_id: SecureRandom.hex(8),
|
|
37
|
+
processed_at: Time.now
|
|
38
|
+
})
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def exec(data)
|
|
42
|
+
puts "⚡ Processing data for #{data[:name]} (#{data[:email]})"
|
|
43
|
+
puts "📊 Processing ID: #{data[:processing_id]}"
|
|
44
|
+
|
|
45
|
+
# Simulate processing work
|
|
46
|
+
sleep(0.1)
|
|
47
|
+
|
|
48
|
+
data[:processed] = true
|
|
49
|
+
data[:processed_at] = Time.now
|
|
50
|
+
:success
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def post(state, params, result)
|
|
54
|
+
duration = Time.now - state[:processing_start]
|
|
55
|
+
puts "✅ Processing completed in #{duration.round(3)}s"
|
|
56
|
+
state[:processed_count] = (state[:processed_count] || 0) + 1
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
class SendEmailNode < FlowNodes::Node
|
|
61
|
+
def prep(state)
|
|
62
|
+
puts "📧 Preparing email service..."
|
|
63
|
+
state[:email_attempts] = (state[:email_attempts] || 0) + 1
|
|
64
|
+
nil
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def exec(data)
|
|
68
|
+
puts "📤 Sending welcome email to #{data[:email]}"
|
|
69
|
+
puts "📊 Processing ID: #{data[:processing_id]}"
|
|
70
|
+
|
|
71
|
+
# Simulate email sending
|
|
72
|
+
sleep(0.1)
|
|
73
|
+
|
|
74
|
+
:email_sent
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def post(state, params, result)
|
|
78
|
+
puts "✅ Email sent successfully"
|
|
79
|
+
state[:emails_sent] = (state[:emails_sent] || 0) + 1
|
|
80
|
+
state[:last_email_sent] = Time.now
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
class ErrorHandlerNode < FlowNodes::Node
|
|
85
|
+
def prep(state)
|
|
86
|
+
puts "🚨 Error handling activated..."
|
|
87
|
+
state[:error_count] = (state[:error_count] || 0) + 1
|
|
88
|
+
nil
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def exec(data)
|
|
92
|
+
puts "❌ Error: Invalid data received - #{data}"
|
|
93
|
+
puts "📊 Total errors handled: #{@params[:error_count] || 0}"
|
|
94
|
+
:error_handled
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def post(state, params, result)
|
|
98
|
+
puts "🔧 Error handling completed"
|
|
99
|
+
state[:last_error_handled] = Time.now
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
class CompletionNode < FlowNodes::Node
|
|
104
|
+
def prep(state)
|
|
105
|
+
puts "🎯 Finalizing workflow..."
|
|
106
|
+
state[:completion_start] = Time.now
|
|
107
|
+
nil
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def exec(data)
|
|
111
|
+
puts "🎉 Workflow completed successfully for #{data[:name]}"
|
|
112
|
+
puts "📊 Processing ID: #{data[:processing_id]}"
|
|
113
|
+
puts "📈 Final data: #{data}"
|
|
114
|
+
nil # End the flow
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def post(state, params, result)
|
|
118
|
+
duration = Time.now - state[:completion_start]
|
|
119
|
+
puts "✅ Workflow finalized in #{duration.round(3)}s"
|
|
120
|
+
puts "📊 Final state: #{state}"
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
# Demo script
|
|
126
|
+
if $PROGRAM_NAME == __FILE__
|
|
127
|
+
# Create nodes
|
|
128
|
+
validator = WorkflowDemo::DataValidationNode.new
|
|
129
|
+
processor = WorkflowDemo::ProcessDataNode.new
|
|
130
|
+
emailer = WorkflowDemo::SendEmailNode.new
|
|
131
|
+
error_handler = WorkflowDemo::ErrorHandlerNode.new
|
|
132
|
+
completion = WorkflowDemo::CompletionNode.new
|
|
133
|
+
|
|
134
|
+
# Connect the workflow using symbols
|
|
135
|
+
validator - :valid >> processor
|
|
136
|
+
validator - :invalid >> error_handler
|
|
137
|
+
processor - :success >> emailer
|
|
138
|
+
emailer - :email_sent >> completion
|
|
139
|
+
|
|
140
|
+
# Create flow
|
|
141
|
+
flow = FlowNodes::Flow.new(start: validator)
|
|
142
|
+
|
|
143
|
+
# Test with valid data
|
|
144
|
+
puts "=== Testing with valid data ==="
|
|
145
|
+
state = { workflow_id: SecureRandom.hex(4) }
|
|
146
|
+
flow.set_params({ email: "user@example.com", name: "John Doe" })
|
|
147
|
+
flow.run(state)
|
|
148
|
+
|
|
149
|
+
puts "\n=== Testing with invalid data ==="
|
|
150
|
+
state = { workflow_id: SecureRandom.hex(4) }
|
|
151
|
+
flow.set_params({ invalid: "data" })
|
|
152
|
+
flow.run(state)
|
|
153
|
+
|
|
154
|
+
puts "\n=== Testing with nil data ==="
|
|
155
|
+
state = { workflow_id: SecureRandom.hex(4) }
|
|
156
|
+
flow.set_params(nil)
|
|
157
|
+
flow.run(state)
|
|
158
|
+
end
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module FlowNodes
|
|
4
|
+
# An async flow that processes a batch of items sequentially.
|
|
5
|
+
class AsyncBatchFlow < AsyncFlow
|
|
6
|
+
protected
|
|
7
|
+
|
|
8
|
+
def _run_async(s)
|
|
9
|
+
batch_params = prep_async(s) || []
|
|
10
|
+
batch_params.each do |item_params|
|
|
11
|
+
_orch_async(s, params: @params.merge(item_params))
|
|
12
|
+
end
|
|
13
|
+
post_async(s, batch_params, nil)
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module FlowNodes
|
|
4
|
+
# An async node that processes a batch of items sequentially.
|
|
5
|
+
class AsyncBatchNode < AsyncNode
|
|
6
|
+
protected
|
|
7
|
+
|
|
8
|
+
def _exec_async(items)
|
|
9
|
+
return [] if items.nil?
|
|
10
|
+
|
|
11
|
+
items_array = items.is_a?(Array) ? items : [items]
|
|
12
|
+
items_array.map { |item| super(item) }
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module FlowNodes
|
|
4
|
+
# A flow that can orchestrate both synchronous and asynchronous nodes.
|
|
5
|
+
class AsyncFlow < Flow
|
|
6
|
+
def run(s) = run_async(s)
|
|
7
|
+
|
|
8
|
+
def run_async(s)
|
|
9
|
+
_run_async(s)
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def _run(_s)
|
|
13
|
+
raise "Use run_async for AsyncFlow."
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
protected
|
|
17
|
+
|
|
18
|
+
def prep_async(_s) = nil
|
|
19
|
+
def post_async(_s, _p, _e) = nil
|
|
20
|
+
|
|
21
|
+
def _run_async(s)
|
|
22
|
+
prepared_params = prep_async(s)
|
|
23
|
+
result = _orch_async(s, params: prepared_params || @params)
|
|
24
|
+
post_async(s, prepared_params, result)
|
|
25
|
+
result
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# @note Asynchronous operations use Ruby threads and are subject to the GVL.
|
|
29
|
+
# This is best suited for I/O-bound tasks, not CPU-bound parallel processing.
|
|
30
|
+
def _orch_async(state, params: nil)
|
|
31
|
+
raise "Flow has no start node" unless @start_node
|
|
32
|
+
|
|
33
|
+
current_node = @start_node.dup
|
|
34
|
+
flow_params = params ? params.dup : @params.dup
|
|
35
|
+
last_result = nil
|
|
36
|
+
|
|
37
|
+
while current_node
|
|
38
|
+
current_node.set_params(flow_params)
|
|
39
|
+
last_result = if current_node.is_a?(AsyncNode)
|
|
40
|
+
current_node.send(:_run_async, state)
|
|
41
|
+
else
|
|
42
|
+
current_node._run(state)
|
|
43
|
+
end
|
|
44
|
+
current_node = get_next_node(current_node, last_result)&.dup
|
|
45
|
+
end
|
|
46
|
+
last_result
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
end
|