durable_workflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.claude/todo/01.amend.md +133 -0
- data/.claude/todo/02.amend.md +444 -0
- data/.claude/todo/phase-1-core/01-GEMSPEC.md +193 -0
- data/.claude/todo/phase-1-core/02-TYPES.md +462 -0
- data/.claude/todo/phase-1-core/03-EXECUTION.md +551 -0
- data/.claude/todo/phase-1-core/04-STEPS.md +603 -0
- data/.claude/todo/phase-1-core/05-PARSER.md +719 -0
- data/.claude/todo/phase-1-core/todo.md +574 -0
- data/.claude/todo/phase-2-runtime/01-STORAGE.md +641 -0
- data/.claude/todo/phase-2-runtime/02-RUNNERS.md +511 -0
- data/.claude/todo/phase-3-extensions/01-EXTENSION-SYSTEM.md +298 -0
- data/.claude/todo/phase-3-extensions/02-AI-PLUGIN.md +936 -0
- data/.claude/todo/phase-3-extensions/todo.md +262 -0
- data/.claude/todo/phase-4-ai-rework/01-DEPENDENCIES.md +107 -0
- data/.claude/todo/phase-4-ai-rework/02-CONFIGURATION.md +123 -0
- data/.claude/todo/phase-4-ai-rework/03-TOOL-REGISTRY.md +237 -0
- data/.claude/todo/phase-4-ai-rework/04-MCP-SERVER.md +432 -0
- data/.claude/todo/phase-4-ai-rework/05-MCP-CLIENT.md +333 -0
- data/.claude/todo/phase-4-ai-rework/06-EXECUTORS.md +397 -0
- data/.claude/todo/phase-4-ai-rework/todo.md +265 -0
- data/.claude/todo/phase-5-validation/.DS_Store +0 -0
- data/.claude/todo/phase-5-validation/01-TEST-GAPS.md +615 -0
- data/.claude/todo/phase-5-validation/01-TESTS.md +2378 -0
- data/.claude/todo/phase-5-validation/02-EXAMPLES-SIMPLE.md +744 -0
- data/.claude/todo/phase-5-validation/02-EXAMPLES.md +1857 -0
- data/.claude/todo/phase-5-validation/03-EXAMPLE-SUPPORT-AGENT.md +95 -0
- data/.claude/todo/phase-5-validation/04-EXAMPLE-ORDER-FULFILLMENT.md +94 -0
- data/.claude/todo/phase-5-validation/05-EXAMPLE-DATA-PIPELINE.md +145 -0
- data/.env.example +3 -0
- data/.rubocop.yml +64 -0
- data/0.3.amend.md +89 -0
- data/CHANGELOG.md +5 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/Gemfile +22 -0
- data/Gemfile.lock +192 -0
- data/LICENSE.txt +21 -0
- data/README.md +39 -0
- data/Rakefile +16 -0
- data/durable_workflow.gemspec +43 -0
- data/examples/approval_request.rb +106 -0
- data/examples/calculator.rb +154 -0
- data/examples/file_search_demo.rb +77 -0
- data/examples/hello_workflow.rb +57 -0
- data/examples/item_processor.rb +96 -0
- data/examples/order_fulfillment/Gemfile +6 -0
- data/examples/order_fulfillment/README.md +84 -0
- data/examples/order_fulfillment/run.rb +85 -0
- data/examples/order_fulfillment/services.rb +146 -0
- data/examples/order_fulfillment/workflow.yml +188 -0
- data/examples/parallel_fetch.rb +102 -0
- data/examples/service_integration.rb +137 -0
- data/examples/support_agent/Gemfile +6 -0
- data/examples/support_agent/README.md +91 -0
- data/examples/support_agent/config/claude_desktop.json +12 -0
- data/examples/support_agent/mcp_server.rb +49 -0
- data/examples/support_agent/run.rb +67 -0
- data/examples/support_agent/services.rb +113 -0
- data/examples/support_agent/workflow.yml +286 -0
- data/lib/durable_workflow/core/condition.rb +45 -0
- data/lib/durable_workflow/core/engine.rb +145 -0
- data/lib/durable_workflow/core/executors/approval.rb +51 -0
- data/lib/durable_workflow/core/executors/assign.rb +18 -0
- data/lib/durable_workflow/core/executors/base.rb +90 -0
- data/lib/durable_workflow/core/executors/call.rb +76 -0
- data/lib/durable_workflow/core/executors/end.rb +19 -0
- data/lib/durable_workflow/core/executors/halt.rb +24 -0
- data/lib/durable_workflow/core/executors/loop.rb +118 -0
- data/lib/durable_workflow/core/executors/parallel.rb +77 -0
- data/lib/durable_workflow/core/executors/registry.rb +34 -0
- data/lib/durable_workflow/core/executors/router.rb +26 -0
- data/lib/durable_workflow/core/executors/start.rb +61 -0
- data/lib/durable_workflow/core/executors/transform.rb +71 -0
- data/lib/durable_workflow/core/executors/workflow.rb +32 -0
- data/lib/durable_workflow/core/parser.rb +189 -0
- data/lib/durable_workflow/core/resolver.rb +61 -0
- data/lib/durable_workflow/core/schema_validator.rb +47 -0
- data/lib/durable_workflow/core/types/base.rb +41 -0
- data/lib/durable_workflow/core/types/condition.rb +25 -0
- data/lib/durable_workflow/core/types/configs.rb +103 -0
- data/lib/durable_workflow/core/types/entry.rb +26 -0
- data/lib/durable_workflow/core/types/results.rb +41 -0
- data/lib/durable_workflow/core/types/state.rb +95 -0
- data/lib/durable_workflow/core/types/step_def.rb +15 -0
- data/lib/durable_workflow/core/types/workflow_def.rb +43 -0
- data/lib/durable_workflow/core/types.rb +29 -0
- data/lib/durable_workflow/core/validator.rb +318 -0
- data/lib/durable_workflow/extensions/ai/ai.rb +149 -0
- data/lib/durable_workflow/extensions/ai/configuration.rb +41 -0
- data/lib/durable_workflow/extensions/ai/executors/agent.rb +150 -0
- data/lib/durable_workflow/extensions/ai/executors/file_search.rb +52 -0
- data/lib/durable_workflow/extensions/ai/executors/guardrail.rb +152 -0
- data/lib/durable_workflow/extensions/ai/executors/handoff.rb +33 -0
- data/lib/durable_workflow/extensions/ai/executors/mcp.rb +47 -0
- data/lib/durable_workflow/extensions/ai/mcp/adapter.rb +73 -0
- data/lib/durable_workflow/extensions/ai/mcp/client.rb +77 -0
- data/lib/durable_workflow/extensions/ai/mcp/rack_app.rb +66 -0
- data/lib/durable_workflow/extensions/ai/mcp/server.rb +122 -0
- data/lib/durable_workflow/extensions/ai/tool_registry.rb +63 -0
- data/lib/durable_workflow/extensions/ai/types.rb +213 -0
- data/lib/durable_workflow/extensions/ai.rb +6 -0
- data/lib/durable_workflow/extensions/base.rb +77 -0
- data/lib/durable_workflow/runners/adapters/inline.rb +42 -0
- data/lib/durable_workflow/runners/adapters/sidekiq.rb +69 -0
- data/lib/durable_workflow/runners/async.rb +100 -0
- data/lib/durable_workflow/runners/stream.rb +126 -0
- data/lib/durable_workflow/runners/sync.rb +40 -0
- data/lib/durable_workflow/storage/active_record.rb +148 -0
- data/lib/durable_workflow/storage/redis.rb +133 -0
- data/lib/durable_workflow/storage/sequel.rb +144 -0
- data/lib/durable_workflow/storage/store.rb +43 -0
- data/lib/durable_workflow/utils.rb +25 -0
- data/lib/durable_workflow/version.rb +5 -0
- data/lib/durable_workflow.rb +70 -0
- data/sig/durable_workflow.rbs +4 -0
- metadata +275 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class End < Base
|
|
7
|
+
FINISHED = '__FINISHED__'
|
|
8
|
+
Registry.register('end', self)
|
|
9
|
+
|
|
10
|
+
def call(state)
|
|
11
|
+
raw = config.result || state.ctx.dup
|
|
12
|
+
result = resolve(state, raw)
|
|
13
|
+
state = store(state, :result, result)
|
|
14
|
+
continue(state, next_step: FINISHED, output: result)
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Halt < Base
|
|
7
|
+
Registry.register('halt', self)
|
|
8
|
+
|
|
9
|
+
def call(state)
|
|
10
|
+
extra_data = resolve(state, config.data)
|
|
11
|
+
|
|
12
|
+
halt(state,
|
|
13
|
+
data: {
|
|
14
|
+
reason: resolve(state, config.reason) || 'Halted',
|
|
15
|
+
halted_at: Time.now.iso8601,
|
|
16
|
+
**extra_data
|
|
17
|
+
},
|
|
18
|
+
resume_step: config.resume_step || next_step,
|
|
19
|
+
prompt: resolve(state, config.reason))
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
end
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Loop < Base
|
|
7
|
+
Registry.register('loop', self)
|
|
8
|
+
MAX_ITER = 100
|
|
9
|
+
|
|
10
|
+
def call(state)
|
|
11
|
+
config.over ? foreach_loop(state) : while_loop(state)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
private
|
|
15
|
+
|
|
16
|
+
def foreach_loop(state)
|
|
17
|
+
collection = resolve(state, config.over)
|
|
18
|
+
raise ExecutionError, "Loop 'over' must be array" unless collection.is_a?(Array)
|
|
19
|
+
|
|
20
|
+
item_key = config.as
|
|
21
|
+
index_key = config.index_as
|
|
22
|
+
max = config.max
|
|
23
|
+
raise ExecutionError, "Collection exceeds max (#{max})" if collection.size > max
|
|
24
|
+
|
|
25
|
+
results = []
|
|
26
|
+
collection.each_with_index do |item, i|
|
|
27
|
+
state = store(state, item_key, item)
|
|
28
|
+
state = store(state, index_key, i)
|
|
29
|
+
outcome = execute_body(state)
|
|
30
|
+
|
|
31
|
+
# Bubble up halts
|
|
32
|
+
return outcome if outcome.result.is_a?(HaltResult)
|
|
33
|
+
|
|
34
|
+
state = outcome.state
|
|
35
|
+
results << outcome.result.output
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
state = cleanup(state, item_key, index_key)
|
|
39
|
+
state = store(state, config.output, results)
|
|
40
|
+
continue(state)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def while_loop(state)
|
|
44
|
+
cond = config.while
|
|
45
|
+
max = config.max
|
|
46
|
+
results = []
|
|
47
|
+
i = 0
|
|
48
|
+
|
|
49
|
+
while ConditionEvaluator.match?(state, cond)
|
|
50
|
+
i += 1
|
|
51
|
+
if i > max
|
|
52
|
+
return config.on_exhausted ? continue(state, next_step: config.on_exhausted) : raise(ExecutionError, 'Loop exceeded max')
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
state = store(state, :iteration, i)
|
|
56
|
+
outcome = execute_body(state)
|
|
57
|
+
|
|
58
|
+
# Bubble up halts
|
|
59
|
+
return outcome if outcome.result.is_a?(HaltResult)
|
|
60
|
+
|
|
61
|
+
state = outcome.state
|
|
62
|
+
results << outcome.result.output
|
|
63
|
+
break if state.ctx[:break_loop]
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
state = cleanup(state, :iteration, :break_loop)
|
|
67
|
+
state = store(state, config.output, results)
|
|
68
|
+
continue(state)
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def execute_body(state)
|
|
72
|
+
body = config.do
|
|
73
|
+
result = nil
|
|
74
|
+
|
|
75
|
+
body.each do |step_def|
|
|
76
|
+
executor = Registry[step_def.type]
|
|
77
|
+
raise ExecutionError, "Unknown step type: #{step_def.type}" unless executor
|
|
78
|
+
|
|
79
|
+
start_time = Time.now
|
|
80
|
+
outcome = executor.new(step_def).call(state)
|
|
81
|
+
duration = ((Time.now - start_time) * 1000).to_i
|
|
82
|
+
|
|
83
|
+
record_nested_entry(state, step_def, outcome, duration)
|
|
84
|
+
|
|
85
|
+
# Bubble up halts
|
|
86
|
+
return outcome if outcome.result.is_a?(HaltResult)
|
|
87
|
+
|
|
88
|
+
state = outcome.state
|
|
89
|
+
result = outcome.result
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
StepOutcome.new(state:, result: result || ContinueResult.new)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def record_nested_entry(state, step_def, outcome, duration)
|
|
96
|
+
wf_store = DurableWorkflow.config&.store
|
|
97
|
+
return unless wf_store
|
|
98
|
+
|
|
99
|
+
wf_store.record(Entry.new(
|
|
100
|
+
id: SecureRandom.uuid,
|
|
101
|
+
execution_id: state.execution_id,
|
|
102
|
+
step_id: "#{step.id}:#{step_def.id}",
|
|
103
|
+
step_type: step_def.type,
|
|
104
|
+
action: outcome.result.is_a?(HaltResult) ? :halted : :completed,
|
|
105
|
+
duration_ms: duration,
|
|
106
|
+
output: outcome.result.output,
|
|
107
|
+
timestamp: Time.now
|
|
108
|
+
))
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def cleanup(state, *keys)
|
|
112
|
+
new_ctx = state.ctx.except(*keys)
|
|
113
|
+
state.with(ctx: new_ctx)
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
end
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
begin
|
|
4
|
+
require 'async'
|
|
5
|
+
require 'async/barrier'
|
|
6
|
+
rescue LoadError
|
|
7
|
+
# async gem not available - parallel executor will fail at runtime if used
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
module DurableWorkflow
|
|
11
|
+
module Core
|
|
12
|
+
module Executors
|
|
13
|
+
class Parallel < Base
|
|
14
|
+
Registry.register('parallel', self)
|
|
15
|
+
|
|
16
|
+
def call(state)
|
|
17
|
+
branches = config.branches
|
|
18
|
+
return continue(state) if branches.empty?
|
|
19
|
+
|
|
20
|
+
raise ExecutionError, "Parallel executor requires 'async' gem. Add it to your Gemfile." unless defined?(Async)
|
|
21
|
+
|
|
22
|
+
wait_mode = config.wait
|
|
23
|
+
required = case wait_mode
|
|
24
|
+
when 'all' then branches.size
|
|
25
|
+
when 'any' then 1
|
|
26
|
+
when Integer then [wait_mode, branches.size].min
|
|
27
|
+
else branches.size
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
outcomes = Array.new(branches.size)
|
|
31
|
+
errors = []
|
|
32
|
+
|
|
33
|
+
Sync do
|
|
34
|
+
barrier = Async::Barrier.new
|
|
35
|
+
|
|
36
|
+
begin
|
|
37
|
+
branches.each_with_index do |branch, i|
|
|
38
|
+
barrier.async do
|
|
39
|
+
executor = Registry[branch.type]
|
|
40
|
+
raise ExecutionError, "Unknown branch type: #{branch.type}" unless executor
|
|
41
|
+
|
|
42
|
+
outcomes[i] = executor.new(branch).call(state)
|
|
43
|
+
rescue StandardError => e
|
|
44
|
+
errors << { branch: branch.id, error: e.message }
|
|
45
|
+
outcomes[i] = nil
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
if wait_mode == 'any'
|
|
50
|
+
barrier.wait { break if outcomes.compact.size >= required }
|
|
51
|
+
else
|
|
52
|
+
barrier.wait
|
|
53
|
+
end
|
|
54
|
+
ensure
|
|
55
|
+
barrier.stop
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
raise ExecutionError, "Parallel failed: #{errors.size} errors" if wait_mode == 'all' && errors.any?
|
|
60
|
+
raise ExecutionError, 'Insufficient completions' if outcomes.compact.size < required
|
|
61
|
+
|
|
62
|
+
# Merge contexts from all branches
|
|
63
|
+
# Strategy: last-write-wins (branch processed later overwrites earlier values)
|
|
64
|
+
merged_ctx = outcomes.compact.reduce(state.ctx) do |ctx, outcome|
|
|
65
|
+
ctx.merge(outcome.state.ctx)
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
results = outcomes.map { _1&.result&.output }
|
|
69
|
+
final_state = state.with(ctx: merged_ctx)
|
|
70
|
+
final_state = store(final_state, config.output, results)
|
|
71
|
+
|
|
72
|
+
continue(final_state, output: results)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Registry
|
|
7
|
+
@executors = {}
|
|
8
|
+
|
|
9
|
+
class << self
|
|
10
|
+
def register(type, klass)
|
|
11
|
+
@executors[type.to_s] = klass
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def [](type)
|
|
15
|
+
@executors[type.to_s]
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def types
|
|
19
|
+
@executors.keys
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def registered?(type)
|
|
23
|
+
@executors.key?(type.to_s)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Convenience method for registration
|
|
29
|
+
def self.register(type)
|
|
30
|
+
->(klass) { Registry.register(type, klass) }
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Router < Base
|
|
7
|
+
Registry.register('router', self)
|
|
8
|
+
|
|
9
|
+
def call(state)
|
|
10
|
+
routes = config.routes
|
|
11
|
+
default = config.default
|
|
12
|
+
|
|
13
|
+
route = ConditionEvaluator.find_route(state, routes)
|
|
14
|
+
|
|
15
|
+
if route
|
|
16
|
+
continue(state, next_step: route.target)
|
|
17
|
+
elsif default
|
|
18
|
+
continue(state, next_step: default)
|
|
19
|
+
else
|
|
20
|
+
raise ExecutionError, "No matching route and no default for '#{step.id}'"
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Start < Base
|
|
7
|
+
Registry.register('start', self)
|
|
8
|
+
|
|
9
|
+
def call(state)
|
|
10
|
+
validate_inputs!(state)
|
|
11
|
+
state = apply_defaults(state)
|
|
12
|
+
state = store(state, :input, state.input)
|
|
13
|
+
continue(state)
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
private
|
|
17
|
+
|
|
18
|
+
def workflow_inputs(state)
|
|
19
|
+
DurableWorkflow.registry[state.workflow_id]&.inputs || []
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def validate_inputs!(state)
|
|
23
|
+
workflow_inputs(state).each do |input_def|
|
|
24
|
+
value = state.input[input_def.name.to_sym]
|
|
25
|
+
|
|
26
|
+
raise ValidationError, "Missing required input: #{input_def.name}" if input_def.required && value.nil?
|
|
27
|
+
|
|
28
|
+
next if value.nil?
|
|
29
|
+
|
|
30
|
+
validate_type!(input_def.name, value, input_def.type)
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def validate_type!(name, value, type)
|
|
35
|
+
valid = case type
|
|
36
|
+
when 'string' then value.is_a?(String)
|
|
37
|
+
when 'integer' then value.is_a?(Integer)
|
|
38
|
+
when 'number' then value.is_a?(Numeric)
|
|
39
|
+
when 'boolean' then [true, false].include?(value)
|
|
40
|
+
when 'array' then value.is_a?(Array)
|
|
41
|
+
when 'object' then value.is_a?(Hash)
|
|
42
|
+
else true
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
raise ValidationError, "Input '#{name}' must be #{type}, got #{value.class}" unless valid
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def apply_defaults(state)
|
|
49
|
+
updates = {}
|
|
50
|
+
workflow_inputs(state).each do |input_def|
|
|
51
|
+
key = input_def.name.to_sym
|
|
52
|
+
updates[key] = input_def.default if state.input[key].nil? && !input_def.default.nil?
|
|
53
|
+
end
|
|
54
|
+
return state if updates.empty?
|
|
55
|
+
|
|
56
|
+
state.with(input: state.input.merge(updates))
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class Transform < Base
|
|
7
|
+
Registry.register('transform', self)
|
|
8
|
+
|
|
9
|
+
OPS = {
|
|
10
|
+
'map' => lambda { |d, a|
|
|
11
|
+
if d.is_a?(Array)
|
|
12
|
+
d.map { |i| a.is_a?(String) ? Transform.dig(i, a) : i }
|
|
13
|
+
else
|
|
14
|
+
d
|
|
15
|
+
end
|
|
16
|
+
},
|
|
17
|
+
'select' => ->(d, a) { d.is_a?(Array) ? d.select { |i| Transform.match?(i, a) } : d },
|
|
18
|
+
'reject' => ->(d, a) { d.is_a?(Array) ? d.reject { |i| Transform.match?(i, a) } : d },
|
|
19
|
+
'pluck' => ->(d, a) { d.is_a?(Array) ? d.map { |i| Transform.dig(i, a) } : d },
|
|
20
|
+
'first' => ->(d, a) { d.is_a?(Array) ? d.first(a || 1) : d },
|
|
21
|
+
'last' => ->(d, a) { d.is_a?(Array) ? d.last(a || 1) : d },
|
|
22
|
+
'flatten' => ->(d, a) { d.is_a?(Array) ? d.flatten(a || 1) : d },
|
|
23
|
+
'compact' => ->(d, _) { d.is_a?(Array) ? d.compact : d },
|
|
24
|
+
'uniq' => ->(d, _) { d.is_a?(Array) ? d.uniq : d },
|
|
25
|
+
'reverse' => ->(d, _) { d.is_a?(Array) ? d.reverse : d },
|
|
26
|
+
'sort' => lambda { |d, a|
|
|
27
|
+
if d.is_a?(Array)
|
|
28
|
+
a ? d.sort_by { |i| Transform.dig(i, a) } : d.sort
|
|
29
|
+
else
|
|
30
|
+
d
|
|
31
|
+
end
|
|
32
|
+
},
|
|
33
|
+
'count' => ->(d, _) { d.respond_to?(:size) ? d.size : 1 },
|
|
34
|
+
'sum' => lambda { |d, a|
|
|
35
|
+
if d.is_a?(Array)
|
|
36
|
+
a ? d.sum { |i| Transform.dig(i, a).to_f } : d.sum(&:to_f)
|
|
37
|
+
else
|
|
38
|
+
d
|
|
39
|
+
end
|
|
40
|
+
},
|
|
41
|
+
'keys' => ->(d, _) { d.is_a?(Hash) ? d.keys : [] },
|
|
42
|
+
'values' => ->(d, _) { d.is_a?(Hash) ? d.values : [] },
|
|
43
|
+
'pick' => ->(d, a) { d.is_a?(Hash) ? d.slice(*Array(a).map(&:to_sym)) : d },
|
|
44
|
+
'omit' => ->(d, a) { d.is_a?(Hash) ? d.except(*Array(a).map(&:to_sym)) : d },
|
|
45
|
+
'merge' => ->(d, a) { d.is_a?(Hash) && a.is_a?(Hash) ? d.merge(a) : d }
|
|
46
|
+
}.freeze
|
|
47
|
+
|
|
48
|
+
def call(state)
|
|
49
|
+
input = config.input ? resolve(state, "$#{config.input}") : state.ctx.dup
|
|
50
|
+
expr = config.expression
|
|
51
|
+
|
|
52
|
+
result = expr.reduce(input) do |data, (op, arg)|
|
|
53
|
+
OPS.fetch(op.to_s) { ->(d, _) { d } }.call(data, arg)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
state = store(state, config.output, result)
|
|
57
|
+
continue(state, output: result)
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def self.dig(obj, key)
|
|
61
|
+
keys = key.to_s.split('.')
|
|
62
|
+
keys.reduce(obj) { |o, k| o.is_a?(Hash) ? Utils.fetch(o, k) : nil }
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def self.match?(obj, conditions)
|
|
66
|
+
conditions.all? { |k, v| dig(obj, k) == v }
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableWorkflow
|
|
4
|
+
module Core
|
|
5
|
+
module Executors
|
|
6
|
+
class SubWorkflow < Base
|
|
7
|
+
Registry.register('workflow', self)
|
|
8
|
+
|
|
9
|
+
def call(state)
|
|
10
|
+
child_wf = DurableWorkflow.registry[config.workflow_id]
|
|
11
|
+
raise ExecutionError, "Workflow not found: #{config.workflow_id}" unless child_wf
|
|
12
|
+
|
|
13
|
+
input = resolve(state, config.input) || {}
|
|
14
|
+
|
|
15
|
+
result = with_timeout(config.timeout) do
|
|
16
|
+
Engine.new(child_wf, store: DurableWorkflow.config&.store).run(input:)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
case result.status
|
|
20
|
+
when :completed
|
|
21
|
+
state = store(state, config.output, result.output)
|
|
22
|
+
continue(state, output: result.output)
|
|
23
|
+
when :halted
|
|
24
|
+
halt(state, data: result.halt.data, resume_step: step.id, prompt: result.halt.prompt)
|
|
25
|
+
when :failed
|
|
26
|
+
raise ExecutionError, "Sub-workflow failed: #{result.error}"
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'yaml'
|
|
4
|
+
|
|
5
|
+
module DurableWorkflow
|
|
6
|
+
module Core
|
|
7
|
+
class Parser
|
|
8
|
+
# Hook system for extensions
|
|
9
|
+
@before_hooks = []
|
|
10
|
+
@after_hooks = []
|
|
11
|
+
@config_transformers = {}
|
|
12
|
+
|
|
13
|
+
class << self
|
|
14
|
+
attr_reader :before_hooks, :after_hooks, :config_transformers
|
|
15
|
+
|
|
16
|
+
def parse(source)
|
|
17
|
+
new.parse(source)
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Register a before-parse hook (receives raw YAML hash)
|
|
21
|
+
def before_parse(&block)
|
|
22
|
+
@before_hooks << block
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Register an after-parse hook (receives WorkflowDef, can return modified)
|
|
26
|
+
def after_parse(&block)
|
|
27
|
+
@after_hooks << block
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Register a config transformer for a step type
|
|
31
|
+
def transform_config(type, &block)
|
|
32
|
+
@config_transformers[type.to_s] = block
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# Reset hooks (for testing)
|
|
36
|
+
def reset_hooks!
|
|
37
|
+
@before_hooks = []
|
|
38
|
+
@after_hooks = []
|
|
39
|
+
@config_transformers = {}
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def parse(source)
|
|
44
|
+
yaml = load_yaml(source)
|
|
45
|
+
|
|
46
|
+
# Run before hooks
|
|
47
|
+
self.class.before_hooks.each { |hook| yaml = hook.call(yaml) || yaml }
|
|
48
|
+
|
|
49
|
+
workflow = build_workflow(yaml)
|
|
50
|
+
|
|
51
|
+
# Run after hooks - pass both workflow and raw yaml for extension data
|
|
52
|
+
self.class.after_hooks.each { |hook| workflow = hook.call(workflow, yaml) || workflow }
|
|
53
|
+
|
|
54
|
+
workflow
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
private
|
|
58
|
+
|
|
59
|
+
def load_yaml(source)
|
|
60
|
+
raw = case source
|
|
61
|
+
when Hash then source
|
|
62
|
+
when String
|
|
63
|
+
source.include?("\n") ? YAML.safe_load(source) : YAML.load_file(source)
|
|
64
|
+
else
|
|
65
|
+
raise Error, "Invalid source: #{source.class}"
|
|
66
|
+
end
|
|
67
|
+
DurableWorkflow::Utils.deep_symbolize(raw)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def build_workflow(y)
|
|
71
|
+
WorkflowDef.new(
|
|
72
|
+
id: y.fetch(:id),
|
|
73
|
+
name: y.fetch(:name),
|
|
74
|
+
version: y[:version],
|
|
75
|
+
description: y[:description],
|
|
76
|
+
timeout: y[:timeout],
|
|
77
|
+
inputs: parse_inputs(y[:inputs]),
|
|
78
|
+
steps: parse_steps(y.fetch(:steps)),
|
|
79
|
+
extensions: {} # Extensions populate this via after_parse hooks
|
|
80
|
+
)
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def parse_inputs(inputs)
|
|
84
|
+
return [] unless inputs
|
|
85
|
+
|
|
86
|
+
inputs.map do |name, cfg|
|
|
87
|
+
cfg ||= {}
|
|
88
|
+
InputDef.new(
|
|
89
|
+
name: name.to_s,
|
|
90
|
+
type: cfg[:type],
|
|
91
|
+
required: cfg.fetch(:required, true),
|
|
92
|
+
default: cfg[:default],
|
|
93
|
+
description: cfg[:description]
|
|
94
|
+
)
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def parse_steps(steps)
|
|
99
|
+
steps.map { parse_step(_1) }
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def parse_step(s)
|
|
103
|
+
type = s.fetch(:type)
|
|
104
|
+
raw_config = extract_config(s)
|
|
105
|
+
config = build_typed_config(type, raw_config)
|
|
106
|
+
|
|
107
|
+
StepDef.new(
|
|
108
|
+
id: s.fetch(:id),
|
|
109
|
+
type:,
|
|
110
|
+
config:,
|
|
111
|
+
next_step: s[:next],
|
|
112
|
+
on_error: s[:on_error]
|
|
113
|
+
)
|
|
114
|
+
rescue Dry::Struct::Error => e
|
|
115
|
+
raise ValidationError, "Invalid config for step '#{s[:id]}': #{e.message}"
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def build_typed_config(type, raw_config)
|
|
119
|
+
# Check for extension transformer first
|
|
120
|
+
if (transformer = self.class.config_transformers[type])
|
|
121
|
+
raw_config = transformer.call(raw_config)
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Find config class from registry
|
|
125
|
+
config_class = Core.config_registry[type]
|
|
126
|
+
|
|
127
|
+
config_class ? config_class.new(raw_config) : raw_config
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def extract_config(s)
|
|
131
|
+
base = s.except(:id, :type, :next, :on_error)
|
|
132
|
+
|
|
133
|
+
case s[:type]
|
|
134
|
+
when 'call'
|
|
135
|
+
# Rename method -> method_name to avoid collision with Ruby's Object#method
|
|
136
|
+
base[:method_name] = base.delete(:method) if base.key?(:method)
|
|
137
|
+
# Handle output with schema
|
|
138
|
+
base[:output] = parse_output(base[:output]) if base[:output]
|
|
139
|
+
when 'router'
|
|
140
|
+
base[:routes] = parse_routes(base[:routes])
|
|
141
|
+
when 'loop'
|
|
142
|
+
base[:while] = parse_condition(base[:while]) if base[:while]
|
|
143
|
+
base[:do] = base[:do]&.map { parse_step(_1) }
|
|
144
|
+
when 'parallel'
|
|
145
|
+
base[:branches] = base[:branches]&.map { parse_step(_1) }
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
base
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
def parse_output(output)
|
|
152
|
+
case output
|
|
153
|
+
when Hash
|
|
154
|
+
if output.key?(:key) || output.key?(:schema)
|
|
155
|
+
OutputConfig.new(
|
|
156
|
+
key: output[:key] || output[:name],
|
|
157
|
+
schema: output[:schema]
|
|
158
|
+
)
|
|
159
|
+
else
|
|
160
|
+
output
|
|
161
|
+
end
|
|
162
|
+
when String, Symbol
|
|
163
|
+
output.to_sym
|
|
164
|
+
else
|
|
165
|
+
output
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def parse_routes(routes)
|
|
170
|
+
return [] unless routes
|
|
171
|
+
|
|
172
|
+
routes.map do |r|
|
|
173
|
+
Route.new(
|
|
174
|
+
field: r.dig(:when, :field),
|
|
175
|
+
op: r.dig(:when, :op),
|
|
176
|
+
value: r.dig(:when, :value),
|
|
177
|
+
target: r[:then]
|
|
178
|
+
)
|
|
179
|
+
end
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def parse_condition(c)
|
|
183
|
+
return nil unless c
|
|
184
|
+
|
|
185
|
+
Condition.new(field: c[:field], op: c[:op], value: c[:value])
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|