durable_workflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.claude/todo/01.amend.md +133 -0
- data/.claude/todo/02.amend.md +444 -0
- data/.claude/todo/phase-1-core/01-GEMSPEC.md +193 -0
- data/.claude/todo/phase-1-core/02-TYPES.md +462 -0
- data/.claude/todo/phase-1-core/03-EXECUTION.md +551 -0
- data/.claude/todo/phase-1-core/04-STEPS.md +603 -0
- data/.claude/todo/phase-1-core/05-PARSER.md +719 -0
- data/.claude/todo/phase-1-core/todo.md +574 -0
- data/.claude/todo/phase-2-runtime/01-STORAGE.md +641 -0
- data/.claude/todo/phase-2-runtime/02-RUNNERS.md +511 -0
- data/.claude/todo/phase-3-extensions/01-EXTENSION-SYSTEM.md +298 -0
- data/.claude/todo/phase-3-extensions/02-AI-PLUGIN.md +936 -0
- data/.claude/todo/phase-3-extensions/todo.md +262 -0
- data/.claude/todo/phase-4-ai-rework/01-DEPENDENCIES.md +107 -0
- data/.claude/todo/phase-4-ai-rework/02-CONFIGURATION.md +123 -0
- data/.claude/todo/phase-4-ai-rework/03-TOOL-REGISTRY.md +237 -0
- data/.claude/todo/phase-4-ai-rework/04-MCP-SERVER.md +432 -0
- data/.claude/todo/phase-4-ai-rework/05-MCP-CLIENT.md +333 -0
- data/.claude/todo/phase-4-ai-rework/06-EXECUTORS.md +397 -0
- data/.claude/todo/phase-4-ai-rework/todo.md +265 -0
- data/.claude/todo/phase-5-validation/.DS_Store +0 -0
- data/.claude/todo/phase-5-validation/01-TEST-GAPS.md +615 -0
- data/.claude/todo/phase-5-validation/01-TESTS.md +2378 -0
- data/.claude/todo/phase-5-validation/02-EXAMPLES-SIMPLE.md +744 -0
- data/.claude/todo/phase-5-validation/02-EXAMPLES.md +1857 -0
- data/.claude/todo/phase-5-validation/03-EXAMPLE-SUPPORT-AGENT.md +95 -0
- data/.claude/todo/phase-5-validation/04-EXAMPLE-ORDER-FULFILLMENT.md +94 -0
- data/.claude/todo/phase-5-validation/05-EXAMPLE-DATA-PIPELINE.md +145 -0
- data/.env.example +3 -0
- data/.rubocop.yml +64 -0
- data/0.3.amend.md +89 -0
- data/CHANGELOG.md +5 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/Gemfile +22 -0
- data/Gemfile.lock +192 -0
- data/LICENSE.txt +21 -0
- data/README.md +39 -0
- data/Rakefile +16 -0
- data/durable_workflow.gemspec +43 -0
- data/examples/approval_request.rb +106 -0
- data/examples/calculator.rb +154 -0
- data/examples/file_search_demo.rb +77 -0
- data/examples/hello_workflow.rb +57 -0
- data/examples/item_processor.rb +96 -0
- data/examples/order_fulfillment/Gemfile +6 -0
- data/examples/order_fulfillment/README.md +84 -0
- data/examples/order_fulfillment/run.rb +85 -0
- data/examples/order_fulfillment/services.rb +146 -0
- data/examples/order_fulfillment/workflow.yml +188 -0
- data/examples/parallel_fetch.rb +102 -0
- data/examples/service_integration.rb +137 -0
- data/examples/support_agent/Gemfile +6 -0
- data/examples/support_agent/README.md +91 -0
- data/examples/support_agent/config/claude_desktop.json +12 -0
- data/examples/support_agent/mcp_server.rb +49 -0
- data/examples/support_agent/run.rb +67 -0
- data/examples/support_agent/services.rb +113 -0
- data/examples/support_agent/workflow.yml +286 -0
- data/lib/durable_workflow/core/condition.rb +45 -0
- data/lib/durable_workflow/core/engine.rb +145 -0
- data/lib/durable_workflow/core/executors/approval.rb +51 -0
- data/lib/durable_workflow/core/executors/assign.rb +18 -0
- data/lib/durable_workflow/core/executors/base.rb +90 -0
- data/lib/durable_workflow/core/executors/call.rb +76 -0
- data/lib/durable_workflow/core/executors/end.rb +19 -0
- data/lib/durable_workflow/core/executors/halt.rb +24 -0
- data/lib/durable_workflow/core/executors/loop.rb +118 -0
- data/lib/durable_workflow/core/executors/parallel.rb +77 -0
- data/lib/durable_workflow/core/executors/registry.rb +34 -0
- data/lib/durable_workflow/core/executors/router.rb +26 -0
- data/lib/durable_workflow/core/executors/start.rb +61 -0
- data/lib/durable_workflow/core/executors/transform.rb +71 -0
- data/lib/durable_workflow/core/executors/workflow.rb +32 -0
- data/lib/durable_workflow/core/parser.rb +189 -0
- data/lib/durable_workflow/core/resolver.rb +61 -0
- data/lib/durable_workflow/core/schema_validator.rb +47 -0
- data/lib/durable_workflow/core/types/base.rb +41 -0
- data/lib/durable_workflow/core/types/condition.rb +25 -0
- data/lib/durable_workflow/core/types/configs.rb +103 -0
- data/lib/durable_workflow/core/types/entry.rb +26 -0
- data/lib/durable_workflow/core/types/results.rb +41 -0
- data/lib/durable_workflow/core/types/state.rb +95 -0
- data/lib/durable_workflow/core/types/step_def.rb +15 -0
- data/lib/durable_workflow/core/types/workflow_def.rb +43 -0
- data/lib/durable_workflow/core/types.rb +29 -0
- data/lib/durable_workflow/core/validator.rb +318 -0
- data/lib/durable_workflow/extensions/ai/ai.rb +149 -0
- data/lib/durable_workflow/extensions/ai/configuration.rb +41 -0
- data/lib/durable_workflow/extensions/ai/executors/agent.rb +150 -0
- data/lib/durable_workflow/extensions/ai/executors/file_search.rb +52 -0
- data/lib/durable_workflow/extensions/ai/executors/guardrail.rb +152 -0
- data/lib/durable_workflow/extensions/ai/executors/handoff.rb +33 -0
- data/lib/durable_workflow/extensions/ai/executors/mcp.rb +47 -0
- data/lib/durable_workflow/extensions/ai/mcp/adapter.rb +73 -0
- data/lib/durable_workflow/extensions/ai/mcp/client.rb +77 -0
- data/lib/durable_workflow/extensions/ai/mcp/rack_app.rb +66 -0
- data/lib/durable_workflow/extensions/ai/mcp/server.rb +122 -0
- data/lib/durable_workflow/extensions/ai/tool_registry.rb +63 -0
- data/lib/durable_workflow/extensions/ai/types.rb +213 -0
- data/lib/durable_workflow/extensions/ai.rb +6 -0
- data/lib/durable_workflow/extensions/base.rb +77 -0
- data/lib/durable_workflow/runners/adapters/inline.rb +42 -0
- data/lib/durable_workflow/runners/adapters/sidekiq.rb +69 -0
- data/lib/durable_workflow/runners/async.rb +100 -0
- data/lib/durable_workflow/runners/stream.rb +126 -0
- data/lib/durable_workflow/runners/sync.rb +40 -0
- data/lib/durable_workflow/storage/active_record.rb +148 -0
- data/lib/durable_workflow/storage/redis.rb +133 -0
- data/lib/durable_workflow/storage/sequel.rb +144 -0
- data/lib/durable_workflow/storage/store.rb +43 -0
- data/lib/durable_workflow/utils.rb +25 -0
- data/lib/durable_workflow/version.rb +5 -0
- data/lib/durable_workflow.rb +70 -0
- data/sig/durable_workflow.rbs +4 -0
- metadata +275 -0
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
|
|
5
|
+
module DurableWorkflow
|
|
6
|
+
module Storage
|
|
7
|
+
class ActiveRecord < Store
|
|
8
|
+
# Assumes two tables exist:
|
|
9
|
+
# workflow_executions: id (uuid), workflow_id, status, input (json), ctx (json),
|
|
10
|
+
# current_step, result (json), recover_to, halt_data (json),
|
|
11
|
+
# error (text), created_at, updated_at
|
|
12
|
+
# workflow_entries: id (uuid), execution_id, step_id, step_type, action,
|
|
13
|
+
# duration_ms, input (json), output (json), error, timestamp
|
|
14
|
+
|
|
15
|
+
def initialize(execution_class:, entry_class:)
|
|
16
|
+
@execution_class = execution_class
|
|
17
|
+
@entry_class = entry_class
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def save(execution)
|
|
21
|
+
record = @execution_class.find_or_initialize_by(id: execution.id)
|
|
22
|
+
record.assign_attributes(
|
|
23
|
+
workflow_id: execution.workflow_id,
|
|
24
|
+
status: execution.status.to_s,
|
|
25
|
+
input: execution.input.to_json,
|
|
26
|
+
ctx: execution.ctx.to_json,
|
|
27
|
+
current_step: execution.current_step,
|
|
28
|
+
result: execution.result&.to_json,
|
|
29
|
+
recover_to: execution.recover_to,
|
|
30
|
+
halt_data: execution.halt_data&.to_json,
|
|
31
|
+
error: execution.error
|
|
32
|
+
)
|
|
33
|
+
record.save!
|
|
34
|
+
execution
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def load(execution_id)
|
|
38
|
+
record = @execution_class.find_by(id: execution_id)
|
|
39
|
+
return nil unless record
|
|
40
|
+
|
|
41
|
+
Core::Execution.new(
|
|
42
|
+
id: record.id,
|
|
43
|
+
workflow_id: record.workflow_id,
|
|
44
|
+
status: record.status.to_sym,
|
|
45
|
+
input: parse_json(record.input) || {},
|
|
46
|
+
ctx: parse_json(record.ctx) || {},
|
|
47
|
+
current_step: record.current_step,
|
|
48
|
+
result: parse_json_any(record.respond_to?(:result) ? record.result : nil),
|
|
49
|
+
recover_to: record.respond_to?(:recover_to) ? record.recover_to : nil,
|
|
50
|
+
halt_data: parse_json(record.respond_to?(:halt_data) ? record.halt_data : nil),
|
|
51
|
+
error: record.respond_to?(:error) ? record.error : nil,
|
|
52
|
+
created_at: record.created_at,
|
|
53
|
+
updated_at: record.updated_at
|
|
54
|
+
)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def record(entry)
|
|
58
|
+
@entry_class.create!(
|
|
59
|
+
id: entry.id,
|
|
60
|
+
execution_id: entry.execution_id,
|
|
61
|
+
step_id: entry.step_id,
|
|
62
|
+
step_type: entry.step_type,
|
|
63
|
+
action: entry.action.to_s,
|
|
64
|
+
duration_ms: entry.duration_ms,
|
|
65
|
+
input: entry.input&.to_json,
|
|
66
|
+
output: entry.output&.to_json,
|
|
67
|
+
error: entry.error,
|
|
68
|
+
timestamp: entry.timestamp
|
|
69
|
+
)
|
|
70
|
+
entry
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def entries(execution_id)
|
|
74
|
+
@entry_class.where(execution_id:).order(:timestamp).map do |r|
|
|
75
|
+
Core::Entry.new(
|
|
76
|
+
id: r.id,
|
|
77
|
+
execution_id: r.execution_id,
|
|
78
|
+
step_id: r.step_id,
|
|
79
|
+
step_type: r.step_type,
|
|
80
|
+
action: r.action.to_sym,
|
|
81
|
+
duration_ms: r.duration_ms,
|
|
82
|
+
input: parse_json(r.input),
|
|
83
|
+
output: parse_json(r.output),
|
|
84
|
+
error: r.error,
|
|
85
|
+
timestamp: r.timestamp
|
|
86
|
+
)
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def find(workflow_id: nil, status: nil, limit: 100)
|
|
91
|
+
scope = @execution_class.all
|
|
92
|
+
scope = scope.where(workflow_id:) if workflow_id
|
|
93
|
+
scope = scope.where(status: status.to_s) if status
|
|
94
|
+
scope.limit(limit).order(created_at: :desc).map do |record|
|
|
95
|
+
Core::Execution.new(
|
|
96
|
+
id: record.id,
|
|
97
|
+
workflow_id: record.workflow_id,
|
|
98
|
+
status: record.status.to_sym,
|
|
99
|
+
input: parse_json(record.input) || {},
|
|
100
|
+
ctx: parse_json(record.ctx) || {},
|
|
101
|
+
current_step: record.current_step,
|
|
102
|
+
result: parse_json_any(record.respond_to?(:result) ? record.result : nil),
|
|
103
|
+
recover_to: record.respond_to?(:recover_to) ? record.recover_to : nil,
|
|
104
|
+
halt_data: parse_json(record.respond_to?(:halt_data) ? record.halt_data : nil),
|
|
105
|
+
error: record.respond_to?(:error) ? record.error : nil,
|
|
106
|
+
created_at: record.created_at,
|
|
107
|
+
updated_at: record.updated_at
|
|
108
|
+
)
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def delete(execution_id)
|
|
113
|
+
record = @execution_class.find_by(id: execution_id)
|
|
114
|
+
return false unless record
|
|
115
|
+
|
|
116
|
+
@entry_class.where(execution_id:).delete_all
|
|
117
|
+
record.destroy
|
|
118
|
+
true
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def execution_ids(workflow_id: nil, limit: 1000)
|
|
122
|
+
scope = @execution_class.all
|
|
123
|
+
scope = scope.where(workflow_id:) if workflow_id
|
|
124
|
+
scope.limit(limit).pluck(:id)
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
private
|
|
128
|
+
|
|
129
|
+
def parse_json(str)
|
|
130
|
+
return nil if str.nil? || str.empty?
|
|
131
|
+
|
|
132
|
+
result = JSON.parse(str)
|
|
133
|
+
result.is_a?(Hash) ? DurableWorkflow::Utils.deep_symbolize(result) : result
|
|
134
|
+
rescue JSON::ParserError
|
|
135
|
+
nil
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def parse_json_any(str)
|
|
139
|
+
return nil if str.nil? || str.empty?
|
|
140
|
+
|
|
141
|
+
result = JSON.parse(str)
|
|
142
|
+
result.is_a?(Hash) ? DurableWorkflow::Utils.deep_symbolize(result) : result
|
|
143
|
+
rescue JSON::ParserError
|
|
144
|
+
nil
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
end
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
require 'redis'
|
|
5
|
+
|
|
6
|
+
module DurableWorkflow
|
|
7
|
+
module Storage
|
|
8
|
+
class Redis < Store
|
|
9
|
+
PREFIX = 'durable_workflow'
|
|
10
|
+
|
|
11
|
+
def initialize(redis: nil, url: nil, ttl: 86_400 * 7)
|
|
12
|
+
@redis = redis || ::Redis.new(url:)
|
|
13
|
+
@ttl = ttl
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def save(execution)
|
|
17
|
+
key = exec_key(execution.id)
|
|
18
|
+
data = serialize_execution(execution)
|
|
19
|
+
@redis.setex(key, @ttl, data)
|
|
20
|
+
index_add(execution)
|
|
21
|
+
execution
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def load(execution_id)
|
|
25
|
+
data = @redis.get(exec_key(execution_id))
|
|
26
|
+
data ? deserialize_execution(data) : nil
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def record(entry)
|
|
30
|
+
key = entries_key(entry.execution_id)
|
|
31
|
+
data = serialize_entry(entry)
|
|
32
|
+
@redis.rpush(key, data)
|
|
33
|
+
@redis.expire(key, @ttl)
|
|
34
|
+
entry
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def entries(execution_id)
|
|
38
|
+
key = entries_key(execution_id)
|
|
39
|
+
@redis.lrange(key, 0, -1).map { deserialize_entry(_1) }
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def find(workflow_id: nil, status: nil, limit: 100)
|
|
43
|
+
ids = if workflow_id
|
|
44
|
+
@redis.smembers(index_key(workflow_id)).first(limit)
|
|
45
|
+
else
|
|
46
|
+
scan_execution_ids(limit)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
results = ids.filter_map { load(_1) }
|
|
50
|
+
results = results.select { _1.status == status } if status
|
|
51
|
+
results.first(limit)
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def delete(execution_id)
|
|
55
|
+
execution = load(execution_id)
|
|
56
|
+
return false unless execution
|
|
57
|
+
|
|
58
|
+
@redis.del(exec_key(execution_id))
|
|
59
|
+
@redis.del(entries_key(execution_id))
|
|
60
|
+
index_remove(execution)
|
|
61
|
+
true
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def execution_ids(workflow_id: nil, limit: 1000)
|
|
65
|
+
if workflow_id
|
|
66
|
+
@redis.smembers(index_key(workflow_id)).first(limit)
|
|
67
|
+
else
|
|
68
|
+
scan_execution_ids(limit)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
private
|
|
73
|
+
|
|
74
|
+
def exec_key(id)
|
|
75
|
+
"#{PREFIX}:exec:#{id}"
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def entries_key(id)
|
|
79
|
+
"#{PREFIX}:entries:#{id}"
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def index_key(wf_id)
|
|
83
|
+
"#{PREFIX}:idx:#{wf_id}"
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def index_add(execution)
|
|
87
|
+
@redis.sadd(index_key(execution.workflow_id), execution.id)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def index_remove(execution)
|
|
91
|
+
@redis.srem(index_key(execution.workflow_id), execution.id)
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def scan_execution_ids(limit)
|
|
95
|
+
ids = []
|
|
96
|
+
cursor = '0'
|
|
97
|
+
pattern = "#{PREFIX}:exec:*"
|
|
98
|
+
|
|
99
|
+
loop do
|
|
100
|
+
cursor, keys = @redis.scan(cursor, match: pattern, count: 100)
|
|
101
|
+
ids.concat(keys.map { _1.split(':').last })
|
|
102
|
+
break if cursor == '0' || ids.size >= limit
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
ids.first(limit)
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def serialize_execution(execution)
|
|
109
|
+
JSON.generate(execution.to_h)
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def deserialize_execution(json)
|
|
113
|
+
Core::Execution.from_h(symbolize(JSON.parse(json)))
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def serialize_entry(entry)
|
|
117
|
+
JSON.generate(entry.to_h)
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def deserialize_entry(json)
|
|
121
|
+
Core::Entry.from_h(symbolize(JSON.parse(json)))
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def symbolize(obj)
|
|
125
|
+
case obj
|
|
126
|
+
when Hash then obj.transform_keys(&:to_sym).transform_values { symbolize(_1) }
|
|
127
|
+
when Array then obj.map { symbolize(_1) }
|
|
128
|
+
else obj
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
require 'sequel'
|
|
5
|
+
|
|
6
|
+
module DurableWorkflow
|
|
7
|
+
module Storage
|
|
8
|
+
class Sequel < Store
|
|
9
|
+
# Tables:
|
|
10
|
+
# workflow_executions: id (uuid pk), workflow_id, status, input (jsonb), ctx (jsonb),
|
|
11
|
+
# current_step, result (jsonb), recover_to, halt_data (jsonb),
|
|
12
|
+
# error (text), created_at, updated_at
|
|
13
|
+
# workflow_entries: id (uuid pk), execution_id (fk), step_id, step_type, action,
|
|
14
|
+
# duration_ms, input (jsonb), output (jsonb), error, timestamp
|
|
15
|
+
|
|
16
|
+
def initialize(db:, executions_table: :workflow_executions, entries_table: :workflow_entries)
|
|
17
|
+
@db = db
|
|
18
|
+
@executions = db[executions_table]
|
|
19
|
+
@entries = db[entries_table]
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def save(execution)
|
|
23
|
+
now = Time.now
|
|
24
|
+
data = {
|
|
25
|
+
workflow_id: execution.workflow_id,
|
|
26
|
+
status: execution.status.to_s,
|
|
27
|
+
input: ::Sequel.pg_jsonb(execution.input),
|
|
28
|
+
ctx: ::Sequel.pg_jsonb(execution.ctx),
|
|
29
|
+
current_step: execution.current_step,
|
|
30
|
+
result: execution.result ? ::Sequel.pg_jsonb(execution.result) : nil,
|
|
31
|
+
recover_to: execution.recover_to,
|
|
32
|
+
halt_data: execution.halt_data ? ::Sequel.pg_jsonb(execution.halt_data) : nil,
|
|
33
|
+
error: execution.error,
|
|
34
|
+
updated_at: now
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
if @executions.where(id: execution.id).any?
|
|
38
|
+
@executions.where(id: execution.id).update(data)
|
|
39
|
+
else
|
|
40
|
+
@executions.insert(data.merge(id: execution.id, created_at: now))
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
execution
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def load(execution_id)
|
|
47
|
+
row = @executions.where(id: execution_id).first
|
|
48
|
+
return nil unless row
|
|
49
|
+
|
|
50
|
+
Core::Execution.new(
|
|
51
|
+
id: row[:id],
|
|
52
|
+
workflow_id: row[:workflow_id],
|
|
53
|
+
status: row[:status].to_sym,
|
|
54
|
+
input: symbolize(row[:input] || {}),
|
|
55
|
+
ctx: symbolize(row[:ctx] || {}),
|
|
56
|
+
current_step: row[:current_step],
|
|
57
|
+
result: symbolize(row[:result]),
|
|
58
|
+
recover_to: row[:recover_to],
|
|
59
|
+
halt_data: symbolize(row[:halt_data]),
|
|
60
|
+
error: row[:error],
|
|
61
|
+
created_at: row[:created_at],
|
|
62
|
+
updated_at: row[:updated_at]
|
|
63
|
+
)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def record(entry)
|
|
67
|
+
@entries.insert(
|
|
68
|
+
id: entry.id,
|
|
69
|
+
execution_id: entry.execution_id,
|
|
70
|
+
step_id: entry.step_id,
|
|
71
|
+
step_type: entry.step_type,
|
|
72
|
+
action: entry.action.to_s,
|
|
73
|
+
duration_ms: entry.duration_ms,
|
|
74
|
+
input: entry.input ? ::Sequel.pg_jsonb(entry.input) : nil,
|
|
75
|
+
output: entry.output ? ::Sequel.pg_jsonb(entry.output) : nil,
|
|
76
|
+
error: entry.error,
|
|
77
|
+
timestamp: entry.timestamp
|
|
78
|
+
)
|
|
79
|
+
entry
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def entries(execution_id)
|
|
83
|
+
@entries.where(execution_id:).order(:timestamp).map do |row|
|
|
84
|
+
Core::Entry.new(
|
|
85
|
+
id: row[:id],
|
|
86
|
+
execution_id: row[:execution_id],
|
|
87
|
+
step_id: row[:step_id],
|
|
88
|
+
step_type: row[:step_type],
|
|
89
|
+
action: row[:action].to_sym,
|
|
90
|
+
duration_ms: row[:duration_ms],
|
|
91
|
+
input: symbolize(row[:input]),
|
|
92
|
+
output: symbolize(row[:output]),
|
|
93
|
+
error: row[:error],
|
|
94
|
+
timestamp: row[:timestamp]
|
|
95
|
+
)
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def find(workflow_id: nil, status: nil, limit: 100)
|
|
100
|
+
scope = @executions
|
|
101
|
+
scope = scope.where(workflow_id:) if workflow_id
|
|
102
|
+
scope = scope.where(status: status.to_s) if status
|
|
103
|
+
scope.order(::Sequel.desc(:created_at)).limit(limit).map do |row|
|
|
104
|
+
Core::Execution.new(
|
|
105
|
+
id: row[:id],
|
|
106
|
+
workflow_id: row[:workflow_id],
|
|
107
|
+
status: row[:status].to_sym,
|
|
108
|
+
input: symbolize(row[:input] || {}),
|
|
109
|
+
ctx: symbolize(row[:ctx] || {}),
|
|
110
|
+
current_step: row[:current_step],
|
|
111
|
+
result: symbolize(row[:result]),
|
|
112
|
+
recover_to: row[:recover_to],
|
|
113
|
+
halt_data: symbolize(row[:halt_data]),
|
|
114
|
+
error: row[:error],
|
|
115
|
+
created_at: row[:created_at],
|
|
116
|
+
updated_at: row[:updated_at]
|
|
117
|
+
)
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def delete(execution_id)
|
|
122
|
+
count = @executions.where(id: execution_id).delete
|
|
123
|
+
@entries.where(execution_id:).delete
|
|
124
|
+
count.positive?
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def execution_ids(workflow_id: nil, limit: 1000)
|
|
128
|
+
scope = @executions
|
|
129
|
+
scope = scope.where(workflow_id:) if workflow_id
|
|
130
|
+
scope.limit(limit).select_map(:id)
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
private
|
|
134
|
+
|
|
135
|
+
def symbolize(obj)
|
|
136
|
+
case obj
|
|
137
|
+
when Hash then obj.transform_keys(&:to_sym).transform_values { symbolize(_1) }
|
|
138
|
+
when Array then obj.map { symbolize(_1) }
|
|
139
|
+
else obj
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Storage
|
|
5
|
+
# Abstract base class for storage backends
|
|
6
|
+
class Store
|
|
7
|
+
# Save execution state
|
|
8
|
+
def save(state)
|
|
9
|
+
raise NotImplementedError
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# Load execution state by ID
|
|
13
|
+
def load(execution_id)
|
|
14
|
+
raise NotImplementedError
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Record audit entry
|
|
18
|
+
def record(entry)
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Get entries for execution
|
|
23
|
+
def entries(execution_id)
|
|
24
|
+
raise NotImplementedError
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Find executions
|
|
28
|
+
def find(workflow_id: nil, status: nil, limit: 100)
|
|
29
|
+
raise NotImplementedError
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Delete execution
|
|
33
|
+
def delete(execution_id)
|
|
34
|
+
raise NotImplementedError
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# List all execution IDs (for cleanup, admin)
|
|
38
|
+
def execution_ids(workflow_id: nil, limit: 1000)
|
|
39
|
+
raise NotImplementedError
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Utils
|
|
5
|
+
module_function
|
|
6
|
+
|
|
7
|
+
def deep_symbolize(obj)
|
|
8
|
+
case obj
|
|
9
|
+
when Hash
|
|
10
|
+
obj.transform_keys(&:to_sym).transform_values { deep_symbolize(_1) }
|
|
11
|
+
when Array
|
|
12
|
+
obj.map { deep_symbolize(_1) }
|
|
13
|
+
else
|
|
14
|
+
obj
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Indifferent access - handles both symbol and string keys
|
|
19
|
+
def fetch(hash, key, default = nil)
|
|
20
|
+
return default unless hash.is_a?(Hash)
|
|
21
|
+
|
|
22
|
+
hash[key.to_sym] || hash[key.to_s] || default
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'securerandom'
|
|
4
|
+
require 'time'
|
|
5
|
+
require_relative 'durable_workflow/version'
|
|
6
|
+
|
|
7
|
+
module DurableWorkflow
|
|
8
|
+
class Error < StandardError; end
|
|
9
|
+
class ConfigError < Error; end
|
|
10
|
+
class ValidationError < Error; end
|
|
11
|
+
class ExecutionError < Error; end
|
|
12
|
+
|
|
13
|
+
class << self
|
|
14
|
+
attr_accessor :config
|
|
15
|
+
|
|
16
|
+
def configure
|
|
17
|
+
self.config ||= Config.new
|
|
18
|
+
yield config if block_given?
|
|
19
|
+
config
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def load(source)
|
|
23
|
+
wf = Core::Parser.parse(source)
|
|
24
|
+
Core::Validator.validate!(wf)
|
|
25
|
+
wf
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def registry
|
|
29
|
+
@registry ||= {}
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def register(workflow)
|
|
33
|
+
registry[workflow.id] = workflow
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def log(level, msg, **data)
|
|
37
|
+
config&.logger&.send(level, "[DurableWorkflow] #{msg} #{data}")
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
Config = Struct.new(:store, :service_resolver, :logger, keyword_init: true)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
# Core (always loaded)
|
|
45
|
+
require_relative 'durable_workflow/utils'
|
|
46
|
+
require_relative 'durable_workflow/core/types'
|
|
47
|
+
require_relative 'durable_workflow/core/parser'
|
|
48
|
+
require_relative 'durable_workflow/core/validator'
|
|
49
|
+
require_relative 'durable_workflow/core/resolver'
|
|
50
|
+
require_relative 'durable_workflow/core/condition'
|
|
51
|
+
require_relative 'durable_workflow/core/schema_validator'
|
|
52
|
+
require_relative 'durable_workflow/core/executors/registry'
|
|
53
|
+
require_relative 'durable_workflow/core/executors/base'
|
|
54
|
+
|
|
55
|
+
# Load all core executors
|
|
56
|
+
Dir[File.join(__dir__, 'durable_workflow/core/executors/*.rb')].each { |f| require f }
|
|
57
|
+
|
|
58
|
+
require_relative 'durable_workflow/core/engine'
|
|
59
|
+
|
|
60
|
+
# Storage (no default - must be configured)
|
|
61
|
+
require_relative 'durable_workflow/storage/store'
|
|
62
|
+
|
|
63
|
+
# Runners
|
|
64
|
+
require_relative 'durable_workflow/runners/sync'
|
|
65
|
+
require_relative 'durable_workflow/runners/async'
|
|
66
|
+
require_relative 'durable_workflow/runners/stream'
|
|
67
|
+
require_relative 'durable_workflow/runners/adapters/inline'
|
|
68
|
+
|
|
69
|
+
# Extensions (base only - specific extensions loaded separately)
|
|
70
|
+
require_relative 'durable_workflow/extensions/base'
|