flow_nodes 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.qlty.yml +40 -0
- data/.rspec +3 -0
- data/.rubocop.yml +53 -0
- data/CHANGELOG.md +59 -0
- data/CODE_OF_CONDUCT.md +132 -0
- data/LICENSE.txt +21 -0
- data/README.md +315 -0
- data/Rakefile +12 -0
- data/examples/advanced_workflow.rb +299 -0
- data/examples/batch_processing.rb +108 -0
- data/examples/chatbot.rb +91 -0
- data/examples/llm_calendar_parser.rb +429 -0
- data/examples/llm_content_processor.rb +603 -0
- data/examples/llm_document_analyzer.rb +276 -0
- data/examples/simple_llm_example.rb +166 -0
- data/examples/workflow.rb +158 -0
- data/lib/flow_nodes/async_batch_flow.rb +16 -0
- data/lib/flow_nodes/async_batch_node.rb +15 -0
- data/lib/flow_nodes/async_flow.rb +49 -0
- data/lib/flow_nodes/async_node.rb +48 -0
- data/lib/flow_nodes/async_parallel_batch_flow.rb +17 -0
- data/lib/flow_nodes/async_parallel_batch_node.rb +18 -0
- data/lib/flow_nodes/base_node.rb +117 -0
- data/lib/flow_nodes/batch_flow.rb +16 -0
- data/lib/flow_nodes/batch_node.rb +15 -0
- data/lib/flow_nodes/conditional_transition.rb +17 -0
- data/lib/flow_nodes/flow.rb +65 -0
- data/lib/flow_nodes/node.rb +54 -0
- data/lib/flow_nodes/version.rb +5 -0
- data/lib/flow_nodes.rb +20 -0
- data/sig/flow_nodes.rbs +4 -0
- metadata +82 -0
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../lib/flow_nodes"
|
|
4
|
+
require "json"
|
|
5
|
+
require "time"
|
|
6
|
+
require "securerandom"
|
|
7
|
+
|
|
8
|
+
# Example: Calendar Data Processing with LLM Integration
|
|
9
|
+
# This demonstrates parsing calendar data, extracting insights with LLM,
|
|
10
|
+
# and formatting results - a common MCP (Model Context Protocol) pattern
|
|
11
|
+
|
|
12
|
+
module LLMCalendarParser
|
|
13
|
+
# Mock LLM service - in real usage, this would call OpenAI, Claude, etc.
|
|
14
|
+
class MockLLMService
|
|
15
|
+
def self.call(prompt, context = {})
|
|
16
|
+
# Simulate LLM processing with realistic responses
|
|
17
|
+
if prompt.include?("extract key information")
|
|
18
|
+
extract_calendar_info(context[:events])
|
|
19
|
+
elsif prompt.include?("suggest optimizations")
|
|
20
|
+
suggest_optimizations(context[:events])
|
|
21
|
+
elsif prompt.include?("format as")
|
|
22
|
+
format_events(context[:events], context[:format])
|
|
23
|
+
else
|
|
24
|
+
"I understand you want me to process calendar data."
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
private
|
|
29
|
+
|
|
30
|
+
def self.extract_calendar_info(events)
|
|
31
|
+
busy_hours = events.map { |e| e[:duration] || 1 }.sum
|
|
32
|
+
conflicts = events.select { |e| e[:title].downcase.include?("conflict") }.length
|
|
33
|
+
|
|
34
|
+
{
|
|
35
|
+
summary: "Found #{events.length} events totaling #{busy_hours} hours",
|
|
36
|
+
busy_hours: busy_hours,
|
|
37
|
+
conflicts: conflicts,
|
|
38
|
+
busiest_day: events.group_by { |e| e[:date] }.max_by { |_, v| v.length }&.first,
|
|
39
|
+
meeting_types: events.map { |e| e[:type] }.uniq.compact
|
|
40
|
+
}
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def self.suggest_optimizations(events)
|
|
44
|
+
suggestions = []
|
|
45
|
+
|
|
46
|
+
# Back-to-back meetings
|
|
47
|
+
if events.any? { |e| e[:title].include?("Back-to-back") }
|
|
48
|
+
suggestions << "Consider adding buffer time between meetings"
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Too many meetings in one day
|
|
52
|
+
busy_days = events.group_by { |e| e[:date] }.select { |_, v| v.length > 5 }
|
|
53
|
+
if busy_days.any?
|
|
54
|
+
suggestions << "Consider redistributing meetings from busy days: #{busy_days.keys.join(', ')}"
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Long meetings
|
|
58
|
+
long_meetings = events.select { |e| (e[:duration] || 1) > 2 }
|
|
59
|
+
if long_meetings.any?
|
|
60
|
+
suggestions << "Review necessity of long meetings: #{long_meetings.map { |e| e[:title] }.join(', ')}"
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
suggestions.empty? ? ["Schedule looks well optimized!"] : suggestions
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def self.format_events(events, format)
|
|
67
|
+
return "No events to format" if events.nil? || events.empty?
|
|
68
|
+
|
|
69
|
+
case format
|
|
70
|
+
when "executive_summary"
|
|
71
|
+
total_time = events.sum { |e| e[:duration] || 1 }
|
|
72
|
+
"Executive Summary: #{events.length} meetings scheduled, #{total_time} total hours"
|
|
73
|
+
when "daily_agenda"
|
|
74
|
+
events.group_by { |e| e[:date] }.map do |date, day_events|
|
|
75
|
+
"#{date}: #{day_events.length} events (#{day_events.sum { |e| e[:duration] || 1 }}h)"
|
|
76
|
+
end.join("\n")
|
|
77
|
+
when "json"
|
|
78
|
+
JSON.pretty_generate(events)
|
|
79
|
+
else
|
|
80
|
+
events.map { |e| "#{e[:date]} #{e[:time]}: #{e[:title]}" }.join("\n")
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
class CalendarDataIngestionNode < FlowNodes::Node
|
|
86
|
+
def prep(state)
|
|
87
|
+
puts "š
[#{Time.now.strftime('%H:%M:%S')}] Starting calendar data ingestion..."
|
|
88
|
+
state[:ingestion_start] = Time.now
|
|
89
|
+
state[:source] = "calendar_api"
|
|
90
|
+
nil
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def exec(params)
|
|
94
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Ingesting calendar data from #{params[:source] || 'default source'}..."
|
|
95
|
+
|
|
96
|
+
# Simulate calendar data ingestion
|
|
97
|
+
sleep(0.1)
|
|
98
|
+
|
|
99
|
+
# Mock calendar events
|
|
100
|
+
events = [
|
|
101
|
+
{
|
|
102
|
+
id: "evt_1",
|
|
103
|
+
title: "Team Standup",
|
|
104
|
+
date: "2024-01-15",
|
|
105
|
+
time: "09:00",
|
|
106
|
+
duration: 0.5,
|
|
107
|
+
type: "recurring",
|
|
108
|
+
attendees: ["alice@company.com", "bob@company.com"]
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
id: "evt_2",
|
|
112
|
+
title: "Project Planning Session",
|
|
113
|
+
date: "2024-01-15",
|
|
114
|
+
time: "10:00",
|
|
115
|
+
duration: 2,
|
|
116
|
+
type: "planning",
|
|
117
|
+
attendees: ["alice@company.com", "charlie@company.com"]
|
|
118
|
+
},
|
|
119
|
+
{
|
|
120
|
+
id: "evt_3",
|
|
121
|
+
title: "Back-to-back Client Call",
|
|
122
|
+
date: "2024-01-15",
|
|
123
|
+
time: "14:00",
|
|
124
|
+
duration: 1,
|
|
125
|
+
type: "external",
|
|
126
|
+
attendees: ["client@external.com"]
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
id: "evt_4",
|
|
130
|
+
title: "Code Review",
|
|
131
|
+
date: "2024-01-16",
|
|
132
|
+
time: "11:00",
|
|
133
|
+
duration: 1,
|
|
134
|
+
type: "technical",
|
|
135
|
+
attendees: ["dev@company.com"]
|
|
136
|
+
}
|
|
137
|
+
]
|
|
138
|
+
|
|
139
|
+
puts "ā
[#{Time.now.strftime('%H:%M:%S')}] Ingested #{events.length} calendar events"
|
|
140
|
+
|
|
141
|
+
# Store events in params for next node
|
|
142
|
+
@params.merge!({ events: events })
|
|
143
|
+
|
|
144
|
+
# Return symbol for flow routing
|
|
145
|
+
:data_ingested
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def post(state, params, result)
|
|
149
|
+
duration = Time.now - state[:ingestion_start]
|
|
150
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Ingestion completed in #{duration.round(3)}s"
|
|
151
|
+
state[:ingestion_duration] = duration
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
class LLMAnalysisNode < FlowNodes::Node
|
|
156
|
+
def initialize(analysis_type: "extract")
|
|
157
|
+
super(max_retries: 3, wait: 1)
|
|
158
|
+
@analysis_type = analysis_type
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def prep(state)
|
|
162
|
+
puts "š¤ [#{Time.now.strftime('%H:%M:%S')}] Starting LLM analysis: #{@analysis_type}..."
|
|
163
|
+
state[:analysis_start] = Time.now
|
|
164
|
+
state[:llm_calls] = (state[:llm_calls] || 0) + 1
|
|
165
|
+
|
|
166
|
+
# Add analysis metadata to params
|
|
167
|
+
@params.merge({
|
|
168
|
+
analysis_type: @analysis_type,
|
|
169
|
+
analysis_id: SecureRandom.hex(6),
|
|
170
|
+
timestamp: Time.now
|
|
171
|
+
})
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def exec(calendar_data)
|
|
175
|
+
puts "š§ [#{Time.now.strftime('%H:%M:%S')}] Analyzing calendar data with LLM..."
|
|
176
|
+
puts "š Analysis ID: #{calendar_data[:analysis_id]}"
|
|
177
|
+
|
|
178
|
+
# Construct LLM prompt based on analysis type
|
|
179
|
+
prompt = case @analysis_type
|
|
180
|
+
when "extract"
|
|
181
|
+
"Please extract key information from this calendar data: meeting count, total hours, conflicts, and patterns."
|
|
182
|
+
when "optimize"
|
|
183
|
+
"Please suggest optimizations for this calendar schedule to improve productivity and reduce meeting fatigue."
|
|
184
|
+
when "insights"
|
|
185
|
+
"Please provide strategic insights about this calendar data: time allocation, meeting patterns, and recommendations."
|
|
186
|
+
else
|
|
187
|
+
"Please analyze this calendar data and provide relevant insights."
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# Call LLM service
|
|
191
|
+
events = calendar_data[:events] || []
|
|
192
|
+
llm_response = MockLLMService.call(prompt, { events: events })
|
|
193
|
+
|
|
194
|
+
puts "ā
[#{Time.now.strftime('%H:%M:%S')}] LLM analysis completed"
|
|
195
|
+
|
|
196
|
+
# Merge LLM response with calendar data
|
|
197
|
+
merged_data = calendar_data.merge({
|
|
198
|
+
llm_analysis: llm_response,
|
|
199
|
+
analysis_type: @analysis_type,
|
|
200
|
+
processed_at: Time.now
|
|
201
|
+
})
|
|
202
|
+
|
|
203
|
+
# Store in params for next node
|
|
204
|
+
@params.merge!(merged_data)
|
|
205
|
+
|
|
206
|
+
# Return symbol for routing
|
|
207
|
+
:llm_analysis_completed
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
def post(state, params, result)
|
|
211
|
+
duration = Time.now - state[:analysis_start]
|
|
212
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] LLM analysis completed in #{duration.round(3)}s"
|
|
213
|
+
state[:analysis_duration] = duration
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
def exec_fallback(params, exception)
|
|
217
|
+
puts "ā ļø [#{Time.now.strftime('%H:%M:%S')}] LLM analysis failed: #{exception.message}"
|
|
218
|
+
puts "š Using fallback analysis..."
|
|
219
|
+
|
|
220
|
+
# Fallback to simple analysis
|
|
221
|
+
params.merge({
|
|
222
|
+
llm_analysis: "Analysis unavailable - LLM service error",
|
|
223
|
+
analysis_type: "fallback",
|
|
224
|
+
error: exception.message
|
|
225
|
+
})
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
class DataFormattingNode < FlowNodes::Node
|
|
230
|
+
def initialize(format: "json")
|
|
231
|
+
super()
|
|
232
|
+
@format = format
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def prep(state)
|
|
236
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Formatting data as #{@format}..."
|
|
237
|
+
state[:formatting_start] = Time.now
|
|
238
|
+
nil
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def exec(analyzed_data)
|
|
242
|
+
puts "šØ [#{Time.now.strftime('%H:%M:%S')}] Formatting analysis results..."
|
|
243
|
+
|
|
244
|
+
# Use LLM for formatting if needed
|
|
245
|
+
formatted_output = case @format
|
|
246
|
+
when "executive_summary"
|
|
247
|
+
format_executive_summary(analyzed_data)
|
|
248
|
+
when "daily_agenda"
|
|
249
|
+
format_daily_agenda(analyzed_data)
|
|
250
|
+
when "json"
|
|
251
|
+
format_json(analyzed_data)
|
|
252
|
+
else
|
|
253
|
+
format_default(analyzed_data)
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
puts "ā
[#{Time.now.strftime('%H:%M:%S')}] Data formatted successfully"
|
|
257
|
+
|
|
258
|
+
# Store formatted data in params for next node
|
|
259
|
+
formatted_data = analyzed_data.merge({
|
|
260
|
+
formatted_output: formatted_output,
|
|
261
|
+
format: @format,
|
|
262
|
+
formatted_at: Time.now
|
|
263
|
+
})
|
|
264
|
+
|
|
265
|
+
@params.merge!(formatted_data)
|
|
266
|
+
|
|
267
|
+
# Return symbol for routing
|
|
268
|
+
:data_formatted
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
def post(state, params, result)
|
|
272
|
+
duration = Time.now - state[:formatting_start]
|
|
273
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Formatting completed in #{duration.round(3)}s"
|
|
274
|
+
state[:formatting_duration] = duration
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
private
|
|
278
|
+
|
|
279
|
+
def format_executive_summary(data)
|
|
280
|
+
MockLLMService.call("format as executive_summary", { events: data[:events] })
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
def format_daily_agenda(data)
|
|
284
|
+
MockLLMService.call("format as daily_agenda", { events: data[:events] })
|
|
285
|
+
end
|
|
286
|
+
|
|
287
|
+
def format_json(data)
|
|
288
|
+
JSON.pretty_generate(data)
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
def format_default(data)
|
|
292
|
+
"""
|
|
293
|
+
Calendar Analysis Results
|
|
294
|
+
=========================
|
|
295
|
+
|
|
296
|
+
Analysis Type: #{data[:analysis_type]}
|
|
297
|
+
Processed At: #{data[:processed_at]}
|
|
298
|
+
|
|
299
|
+
LLM Analysis:
|
|
300
|
+
#{data[:llm_analysis]}
|
|
301
|
+
|
|
302
|
+
Raw Data:
|
|
303
|
+
#{data[:events]&.length || 0} events processed
|
|
304
|
+
"""
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
class OutputDeliveryNode < FlowNodes::Node
|
|
309
|
+
def initialize(delivery_method: "console")
|
|
310
|
+
super()
|
|
311
|
+
@delivery_method = delivery_method
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
def prep(state)
|
|
315
|
+
puts "š¤ [#{Time.now.strftime('%H:%M:%S')}] Preparing output delivery via #{@delivery_method}..."
|
|
316
|
+
state[:delivery_start] = Time.now
|
|
317
|
+
nil
|
|
318
|
+
end
|
|
319
|
+
|
|
320
|
+
def exec(formatted_data)
|
|
321
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Delivering formatted output..."
|
|
322
|
+
|
|
323
|
+
case @delivery_method
|
|
324
|
+
when "console"
|
|
325
|
+
deliver_to_console(formatted_data)
|
|
326
|
+
when "file"
|
|
327
|
+
deliver_to_file(formatted_data)
|
|
328
|
+
when "api"
|
|
329
|
+
deliver_to_api(formatted_data)
|
|
330
|
+
else
|
|
331
|
+
puts "š Output: #{formatted_data[:formatted_output]}"
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
puts "ā
[#{Time.now.strftime('%H:%M:%S')}] Output delivered successfully"
|
|
335
|
+
|
|
336
|
+
nil # End of flow
|
|
337
|
+
end
|
|
338
|
+
|
|
339
|
+
def post(state, params, result)
|
|
340
|
+
duration = Time.now - state[:delivery_start]
|
|
341
|
+
|
|
342
|
+
puts "š [#{Time.now.strftime('%H:%M:%S')}] Delivery completed in #{duration.round(3)}s"
|
|
343
|
+
|
|
344
|
+
if state[:ingestion_start]
|
|
345
|
+
total_duration = Time.now - state[:ingestion_start]
|
|
346
|
+
puts "šÆ [#{Time.now.strftime('%H:%M:%S')}] Total pipeline duration: #{total_duration.round(3)}s"
|
|
347
|
+
end
|
|
348
|
+
|
|
349
|
+
puts "š Pipeline Statistics:"
|
|
350
|
+
puts " - Ingestion: #{state[:ingestion_duration]&.round(3)}s"
|
|
351
|
+
puts " - LLM Analysis: #{state[:analysis_duration]&.round(3)}s"
|
|
352
|
+
puts " - Formatting: #{state[:formatting_duration]&.round(3)}s"
|
|
353
|
+
puts " - Delivery: #{duration.round(3)}s"
|
|
354
|
+
puts " - Total LLM Calls: #{state[:llm_calls] || 0}"
|
|
355
|
+
end
|
|
356
|
+
|
|
357
|
+
private
|
|
358
|
+
|
|
359
|
+
def deliver_to_console(data)
|
|
360
|
+
puts "\n" + "="*60
|
|
361
|
+
puts "š CALENDAR ANALYSIS RESULTS"
|
|
362
|
+
puts "="*60
|
|
363
|
+
puts data[:formatted_output]
|
|
364
|
+
puts "="*60
|
|
365
|
+
end
|
|
366
|
+
|
|
367
|
+
def deliver_to_file(data)
|
|
368
|
+
filename = "calendar_analysis_#{Time.now.strftime('%Y%m%d_%H%M%S')}.txt"
|
|
369
|
+
File.write(filename, data[:formatted_output])
|
|
370
|
+
puts "š Results saved to: #{filename}"
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
def deliver_to_api(data)
|
|
374
|
+
puts "š” Sending to API endpoint..."
|
|
375
|
+
# Simulate API delivery
|
|
376
|
+
sleep(0.1)
|
|
377
|
+
puts "ā
API delivery completed"
|
|
378
|
+
end
|
|
379
|
+
end
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
# Demo script showing different LLM workflows
|
|
383
|
+
if $PROGRAM_NAME == __FILE__
|
|
384
|
+
puts "š¤ LLM CALENDAR PROCESSING PIPELINE"
|
|
385
|
+
puts "=" * 50
|
|
386
|
+
|
|
387
|
+
# Create shared state for the entire pipeline
|
|
388
|
+
state = {
|
|
389
|
+
pipeline_id: SecureRandom.hex(4),
|
|
390
|
+
user_id: "user_123"
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
# Scenario 1: Basic extraction and formatting
|
|
394
|
+
puts "\nš SCENARIO 1: Calendar Data Extraction & Analysis"
|
|
395
|
+
puts "-" * 40
|
|
396
|
+
|
|
397
|
+
ingestion = LLMCalendarParser::CalendarDataIngestionNode.new
|
|
398
|
+
analysis = LLMCalendarParser::LLMAnalysisNode.new(analysis_type: "extract")
|
|
399
|
+
formatting = LLMCalendarParser::DataFormattingNode.new(format: "executive_summary")
|
|
400
|
+
delivery = LLMCalendarParser::OutputDeliveryNode.new(delivery_method: "console")
|
|
401
|
+
|
|
402
|
+
# Connect with symbol-based routing
|
|
403
|
+
ingestion - :data_ingested >> analysis
|
|
404
|
+
analysis - :llm_analysis_completed >> formatting
|
|
405
|
+
formatting - :data_formatted >> delivery
|
|
406
|
+
|
|
407
|
+
flow = FlowNodes::Flow.new(start: ingestion)
|
|
408
|
+
flow.set_params({ source: "google_calendar_api" })
|
|
409
|
+
flow.run(state)
|
|
410
|
+
|
|
411
|
+
# Scenario 2: Optimization suggestions
|
|
412
|
+
puts "\nš SCENARIO 2: Calendar Optimization Suggestions"
|
|
413
|
+
puts "-" * 40
|
|
414
|
+
|
|
415
|
+
state[:pipeline_id] = SecureRandom.hex(4)
|
|
416
|
+
|
|
417
|
+
optimization_analysis = LLMCalendarParser::LLMAnalysisNode.new(analysis_type: "optimize")
|
|
418
|
+
daily_formatting = LLMCalendarParser::DataFormattingNode.new(format: "daily_agenda")
|
|
419
|
+
|
|
420
|
+
ingestion - :data_ingested >> optimization_analysis
|
|
421
|
+
optimization_analysis - :llm_analysis_completed >> daily_formatting
|
|
422
|
+
daily_formatting - :data_formatted >> delivery
|
|
423
|
+
|
|
424
|
+
flow2 = FlowNodes::Flow.new(start: ingestion)
|
|
425
|
+
flow2.set_params({ source: "outlook_calendar_api" })
|
|
426
|
+
flow2.run(state)
|
|
427
|
+
|
|
428
|
+
puts "\nšÆ All calendar processing scenarios completed!"
|
|
429
|
+
end
|