floe 0.14.0 → 0.15.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.codeclimate.yml +21 -1
- data/CHANGELOG.md +26 -1
- data/README.md +8 -0
- data/examples/map.asl +46 -0
- data/examples/parallel.asl +32 -0
- data/lib/floe/cli.rb +46 -13
- data/lib/floe/container_runner/docker.rb +4 -4
- data/lib/floe/container_runner/kubernetes.rb +4 -1
- data/lib/floe/logging.rb +5 -1
- data/lib/floe/runner.rb +0 -2
- data/lib/floe/version.rb +1 -1
- data/lib/floe/workflow/branch.rb +8 -0
- data/lib/floe/workflow/choice_rule/data.rb +90 -43
- data/lib/floe/workflow/context.rb +5 -1
- data/lib/floe/workflow/item_processor.rb +14 -0
- data/lib/floe/workflow/state.rb +11 -4
- data/lib/floe/workflow/states/child_workflow_mixin.rb +57 -0
- data/lib/floe/workflow/states/choice.rb +5 -6
- data/lib/floe/workflow/states/map.rb +116 -2
- data/lib/floe/workflow/states/parallel.rb +65 -2
- data/lib/floe/workflow/states/retry_catch_mixin.rb +57 -0
- data/lib/floe/workflow/states/task.rb +3 -49
- data/lib/floe/workflow.rb +12 -35
- data/lib/floe/workflow_base.rb +108 -0
- data/lib/floe.rb +9 -2
- data/tools/step_functions +110 -0
- metadata +10 -3
- data/sig/floe.rbs/floe.rbs +0 -4
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Floe
|
4
|
+
class Workflow
|
5
|
+
module States
|
6
|
+
module ChildWorkflowMixin
|
7
|
+
def run_nonblock!(context)
|
8
|
+
start(context) unless context.state_started?
|
9
|
+
step_nonblock!(context)
|
10
|
+
return Errno::EAGAIN unless ready?(context)
|
11
|
+
|
12
|
+
finish(context) if ended?(context)
|
13
|
+
end
|
14
|
+
|
15
|
+
def finish(context)
|
16
|
+
if success?(context)
|
17
|
+
result = each_child_context(context).map(&:output)
|
18
|
+
context.output = process_output(context, result)
|
19
|
+
else
|
20
|
+
error = parse_error(context)
|
21
|
+
retry_state!(context, error) || catch_error!(context, error) || fail_workflow!(context, error)
|
22
|
+
end
|
23
|
+
|
24
|
+
super
|
25
|
+
end
|
26
|
+
|
27
|
+
def ready?(context)
|
28
|
+
!context.state_started? || each_child_workflow(context).any? { |wf, ctx| wf.step_nonblock_ready?(ctx) }
|
29
|
+
end
|
30
|
+
|
31
|
+
def wait_until(context)
|
32
|
+
each_child_workflow(context).filter_map { |wf, ctx| wf.wait_until(ctx) }.min
|
33
|
+
end
|
34
|
+
|
35
|
+
def waiting?(context)
|
36
|
+
each_child_workflow(context).any? { |wf, ctx| wf.waiting?(ctx) }
|
37
|
+
end
|
38
|
+
|
39
|
+
def running?(context)
|
40
|
+
!ended?(context)
|
41
|
+
end
|
42
|
+
|
43
|
+
def ended?(context)
|
44
|
+
each_child_context(context).all?(&:ended?)
|
45
|
+
end
|
46
|
+
|
47
|
+
def success?(context)
|
48
|
+
each_child_context(context).none?(&:failed?)
|
49
|
+
end
|
50
|
+
|
51
|
+
def each_child_context(context)
|
52
|
+
context.state[child_context_key].map { |ctx| Context.new(ctx) }
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -9,10 +9,9 @@ module Floe
|
|
9
9
|
def initialize(workflow, name, payload)
|
10
10
|
super
|
11
11
|
|
12
|
-
|
13
|
-
|
14
|
-
@choices = payload["Choices"].map.with_index { |choice, i| ChoiceRule.build(workflow, name + ["Choices", i.to_s], choice) }
|
12
|
+
@choices = payload["Choices"]&.map&.with_index { |choice, i| ChoiceRule.build(workflow, name + ["Choices", i.to_s], choice) }
|
15
13
|
@default = payload["Default"]
|
14
|
+
validate_state!(workflow)
|
16
15
|
|
17
16
|
@input_path = Path.new(payload.fetch("InputPath", "$"))
|
18
17
|
@output_path = Path.new(payload.fetch("OutputPath", "$"))
|
@@ -45,12 +44,12 @@ module Floe
|
|
45
44
|
end
|
46
45
|
|
47
46
|
def validate_state_choices!
|
48
|
-
missing_field_error!("Choices")
|
49
|
-
invalid_field_error!("Choices", nil, "must be a non-empty array") unless
|
47
|
+
missing_field_error!("Choices") if @choices.nil?
|
48
|
+
invalid_field_error!("Choices", nil, "must be a non-empty array") unless @choices.kind_of?(Array) && !@choices.empty?
|
50
49
|
end
|
51
50
|
|
52
51
|
def validate_state_default!(workflow)
|
53
|
-
invalid_field_error!("Default",
|
52
|
+
invalid_field_error!("Default", @default, "is not found in \"States\"") if @default && !workflow_state?(@default, workflow)
|
54
53
|
end
|
55
54
|
end
|
56
55
|
end
|
@@ -4,9 +4,123 @@ module Floe
|
|
4
4
|
class Workflow
|
5
5
|
module States
|
6
6
|
class Map < Floe::Workflow::State
|
7
|
-
|
7
|
+
include ChildWorkflowMixin
|
8
|
+
include InputOutputMixin
|
9
|
+
include NonTerminalMixin
|
10
|
+
include RetryCatchMixin
|
11
|
+
|
12
|
+
attr_reader :end, :next, :parameters, :input_path, :output_path, :result_path,
|
13
|
+
:result_selector, :retry, :catch, :item_processor, :items_path,
|
14
|
+
:item_reader, :item_selector, :item_batcher, :result_writer,
|
15
|
+
:max_concurrency, :tolerated_failure_percentage, :tolerated_failure_count
|
16
|
+
|
17
|
+
def initialize(workflow, name, payload)
|
18
|
+
super
|
19
|
+
|
20
|
+
missing_field_error!("InputProcessor") if payload["ItemProcessor"].nil?
|
21
|
+
|
22
|
+
@next = payload["Next"]
|
23
|
+
@end = !!payload["End"]
|
24
|
+
@parameters = PayloadTemplate.new(payload["Parameters"]) if payload["Parameters"]
|
25
|
+
@input_path = Path.new(payload.fetch("InputPath", "$"))
|
26
|
+
@output_path = Path.new(payload.fetch("OutputPath", "$"))
|
27
|
+
@result_path = ReferencePath.new(payload.fetch("ResultPath", "$"))
|
28
|
+
@result_selector = PayloadTemplate.new(payload["ResultSelector"]) if payload["ResultSelector"]
|
29
|
+
@retry = payload["Retry"].to_a.map { |retrier| Retrier.new(retrier) }
|
30
|
+
@catch = payload["Catch"].to_a.map { |catcher| Catcher.new(catcher) }
|
31
|
+
@item_processor = ItemProcessor.new(payload["ItemProcessor"], name)
|
32
|
+
@items_path = ReferencePath.new(payload.fetch("ItemsPath", "$"))
|
33
|
+
@item_reader = payload["ItemReader"]
|
34
|
+
@item_selector = payload["ItemSelector"]
|
35
|
+
@item_batcher = payload["ItemBatcher"]
|
36
|
+
@result_writer = payload["ResultWriter"]
|
37
|
+
@max_concurrency = payload["MaxConcurrency"]&.to_i
|
38
|
+
@tolerated_failure_percentage = payload["ToleratedFailurePercentage"]&.to_i
|
39
|
+
@tolerated_failure_count = payload["ToleratedFailureCount"]&.to_i
|
40
|
+
|
41
|
+
validate_state!(workflow)
|
42
|
+
end
|
43
|
+
|
44
|
+
def process_input(context)
|
45
|
+
input = super
|
46
|
+
items_path.value(context, input)
|
47
|
+
end
|
48
|
+
|
49
|
+
def start(context)
|
8
50
|
super
|
9
|
-
|
51
|
+
|
52
|
+
input = process_input(context)
|
53
|
+
|
54
|
+
context.state["ItemProcessorContext"] = input.map { |item| Context.new({"Execution" => {"Id" => context.execution["Id"]}}, :input => item.to_json).to_h }
|
55
|
+
end
|
56
|
+
|
57
|
+
def end?
|
58
|
+
@end
|
59
|
+
end
|
60
|
+
|
61
|
+
def success?(context)
|
62
|
+
contexts = each_child_context(context)
|
63
|
+
num_failed = contexts.count(&:failed?)
|
64
|
+
total = contexts.count
|
65
|
+
|
66
|
+
return true if num_failed.zero? || total.zero?
|
67
|
+
return false if tolerated_failure_count.nil? && tolerated_failure_percentage.nil?
|
68
|
+
|
69
|
+
# Some have failed, check the tolerated_failure thresholds to see if
|
70
|
+
# we should fail the whole state.
|
71
|
+
#
|
72
|
+
# If either ToleratedFailureCount or ToleratedFailurePercentage are breached
|
73
|
+
# then the whole state is considered failed.
|
74
|
+
count_tolerated = tolerated_failure_count.nil? || num_failed < tolerated_failure_count
|
75
|
+
pct_tolerated = tolerated_failure_percentage.nil? || tolerated_failure_percentage == 100 ||
|
76
|
+
((100 * num_failed / total.to_f) < tolerated_failure_percentage)
|
77
|
+
|
78
|
+
count_tolerated && pct_tolerated
|
79
|
+
end
|
80
|
+
|
81
|
+
private
|
82
|
+
|
83
|
+
def step_nonblock!(context)
|
84
|
+
each_child_context(context).each do |ctx|
|
85
|
+
# If this iteration isn't already running and we can't start any more
|
86
|
+
next if !ctx.started? && concurrency_exceeded?(context)
|
87
|
+
|
88
|
+
item_processor.run_nonblock(ctx) if item_processor.step_nonblock_ready?(ctx)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def each_child_workflow(context)
|
93
|
+
each_child_context(context).map do |ctx|
|
94
|
+
[item_processor, Context.new(ctx)]
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
def concurrency_exceeded?(context)
|
99
|
+
max_concurrency && num_running(context) >= max_concurrency
|
100
|
+
end
|
101
|
+
|
102
|
+
def num_running(context)
|
103
|
+
each_child_context(context).count(&:running?)
|
104
|
+
end
|
105
|
+
|
106
|
+
def parse_error(context)
|
107
|
+
# If ToleratedFailureCount or ToleratedFailurePercentage is present
|
108
|
+
# then use States.ExceedToleratedFailureThreshold otherwise
|
109
|
+
# take the error from the first failed state
|
110
|
+
if tolerated_failure_count || tolerated_failure_percentage
|
111
|
+
{"Error" => "States.ExceedToleratedFailureThreshold"}
|
112
|
+
else
|
113
|
+
each_child_context(context).detect(&:failed?)&.output || {"Error" => "States.Error"}
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
def child_context_key
|
118
|
+
"ItemProcessorContext"
|
119
|
+
end
|
120
|
+
|
121
|
+
def validate_state!(workflow)
|
122
|
+
validate_state_next!(workflow)
|
123
|
+
invalid_field_error!("MaxConcurrency", @max_concurrency, "must be greater than 0") if @max_concurrency && @max_concurrency <= 0
|
10
124
|
end
|
11
125
|
end
|
12
126
|
end
|
@@ -4,9 +4,72 @@ module Floe
|
|
4
4
|
class Workflow
|
5
5
|
module States
|
6
6
|
class Parallel < Floe::Workflow::State
|
7
|
-
|
7
|
+
include ChildWorkflowMixin
|
8
|
+
include InputOutputMixin
|
9
|
+
include NonTerminalMixin
|
10
|
+
include RetryCatchMixin
|
11
|
+
|
12
|
+
attr_reader :end, :next, :parameters, :input_path, :output_path, :result_path,
|
13
|
+
:result_selector, :retry, :catch, :branches
|
14
|
+
|
15
|
+
def initialize(workflow, name, payload)
|
16
|
+
super
|
17
|
+
|
18
|
+
missing_field_error!("Branches") if payload["Branches"].nil?
|
19
|
+
|
20
|
+
@next = payload["Next"]
|
21
|
+
@end = !!payload["End"]
|
22
|
+
@parameters = PayloadTemplate.new(payload["Parameters"]) if payload["Parameters"]
|
23
|
+
@input_path = Path.new(payload.fetch("InputPath", "$"))
|
24
|
+
@output_path = Path.new(payload.fetch("OutputPath", "$"))
|
25
|
+
@result_path = ReferencePath.new(payload.fetch("ResultPath", "$"))
|
26
|
+
@result_selector = PayloadTemplate.new(payload["ResultSelector"]) if payload["ResultSelector"]
|
27
|
+
@retry = payload["Retry"].to_a.map { |retrier| Retrier.new(retrier) }
|
28
|
+
@catch = payload["Catch"].to_a.map { |catcher| Catcher.new(catcher) }
|
29
|
+
@branches = payload["Branches"].map { |branch| Branch.new(branch) }
|
30
|
+
|
31
|
+
validate_state!(workflow)
|
32
|
+
end
|
33
|
+
|
34
|
+
def start(context)
|
8
35
|
super
|
9
|
-
|
36
|
+
|
37
|
+
input = process_input(context)
|
38
|
+
|
39
|
+
context.state["BranchContext"] = branches.map { |_branch| Context.new({"Execution" => {"Id" => context.execution["Id"]}}, :input => input.to_json).to_h }
|
40
|
+
end
|
41
|
+
|
42
|
+
def end?
|
43
|
+
@end
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
def step_nonblock!(context)
|
49
|
+
each_child_workflow(context).each do |wf, ctx|
|
50
|
+
wf.run_nonblock(ctx) if wf.step_nonblock_ready?(ctx)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def each_child_workflow(context)
|
55
|
+
branches.filter_map.with_index do |branch, i|
|
56
|
+
ctx = context.state.dig("BranchContext", i)
|
57
|
+
next if ctx.nil?
|
58
|
+
|
59
|
+
[branch, Context.new(ctx)]
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
def parse_error(context)
|
64
|
+
each_child_context(context).detect(&:failed?)&.output || {"Error" => "States.Error"}
|
65
|
+
end
|
66
|
+
|
67
|
+
def child_context_key
|
68
|
+
"BranchContext"
|
69
|
+
end
|
70
|
+
|
71
|
+
def validate_state!(workflow)
|
72
|
+
validate_state_next!(workflow)
|
10
73
|
end
|
11
74
|
end
|
12
75
|
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Floe
|
4
|
+
class Workflow
|
5
|
+
module States
|
6
|
+
module RetryCatchMixin
|
7
|
+
def find_retrier(error)
|
8
|
+
self.retry.detect { |r| r.match_error?(error) }
|
9
|
+
end
|
10
|
+
|
11
|
+
def find_catcher(error)
|
12
|
+
self.catch.detect { |c| c.match_error?(error) }
|
13
|
+
end
|
14
|
+
|
15
|
+
def retry_state!(context, error)
|
16
|
+
retrier = find_retrier(error["Error"]) if error
|
17
|
+
return if retrier.nil?
|
18
|
+
|
19
|
+
# If a different retrier is hit reset the context
|
20
|
+
if !context["State"].key?("RetryCount") || context["State"]["Retrier"] != retrier.error_equals
|
21
|
+
context["State"]["RetryCount"] = 0
|
22
|
+
context["State"]["Retrier"] = retrier.error_equals
|
23
|
+
end
|
24
|
+
|
25
|
+
context["State"]["RetryCount"] += 1
|
26
|
+
|
27
|
+
return if context["State"]["RetryCount"] > retrier.max_attempts
|
28
|
+
|
29
|
+
wait_until!(context, :seconds => retrier.sleep_duration(context["State"]["RetryCount"]))
|
30
|
+
context.next_state = context.state_name
|
31
|
+
context.output = error
|
32
|
+
context.logger.info("Running state: [#{long_name}] with input [#{context.json_input}] got error[#{context.json_output}]...Retry - delay: #{wait_until(context)}")
|
33
|
+
true
|
34
|
+
end
|
35
|
+
|
36
|
+
def catch_error!(context, error)
|
37
|
+
catcher = find_catcher(error["Error"]) if error
|
38
|
+
return if catcher.nil?
|
39
|
+
|
40
|
+
context.next_state = catcher.next
|
41
|
+
context.output = catcher.result_path.set(context.input, error)
|
42
|
+
context.logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...CatchError - next state: [#{context.next_state}] output: [#{context.json_output}]")
|
43
|
+
|
44
|
+
true
|
45
|
+
end
|
46
|
+
|
47
|
+
def fail_workflow!(context, error)
|
48
|
+
# next_state is nil, and will be set to nil again in super
|
49
|
+
# keeping in here for completeness
|
50
|
+
context.next_state = nil
|
51
|
+
context.output = error
|
52
|
+
context.logger.error("Running state: [#{long_name}] with input [#{context.json_input}]...Complete workflow - output: [#{context.json_output}]")
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -6,6 +6,7 @@ module Floe
|
|
6
6
|
class Task < Floe::Workflow::State
|
7
7
|
include InputOutputMixin
|
8
8
|
include NonTerminalMixin
|
9
|
+
include RetryCatchMixin
|
9
10
|
|
10
11
|
attr_reader :credentials, :end, :heartbeat_seconds, :next, :parameters,
|
11
12
|
:result_selector, :resource, :timeout_seconds, :retry, :catch,
|
@@ -60,7 +61,8 @@ module Floe
|
|
60
61
|
end
|
61
62
|
|
62
63
|
def running?(context)
|
63
|
-
return true
|
64
|
+
return true if waiting?(context)
|
65
|
+
return false if finished?(context)
|
64
66
|
|
65
67
|
runner.status!(context.state["RunnerContext"])
|
66
68
|
runner.running?(context.state["RunnerContext"])
|
@@ -82,54 +84,6 @@ module Floe
|
|
82
84
|
runner.success?(context.state["RunnerContext"])
|
83
85
|
end
|
84
86
|
|
85
|
-
def find_retrier(error)
|
86
|
-
self.retry.detect { |r| r.match_error?(error) }
|
87
|
-
end
|
88
|
-
|
89
|
-
def find_catcher(error)
|
90
|
-
self.catch.detect { |c| c.match_error?(error) }
|
91
|
-
end
|
92
|
-
|
93
|
-
def retry_state!(context, error)
|
94
|
-
retrier = find_retrier(error["Error"]) if error
|
95
|
-
return if retrier.nil?
|
96
|
-
|
97
|
-
# If a different retrier is hit reset the context
|
98
|
-
if !context["State"].key?("RetryCount") || context["State"]["Retrier"] != retrier.error_equals
|
99
|
-
context["State"]["RetryCount"] = 0
|
100
|
-
context["State"]["Retrier"] = retrier.error_equals
|
101
|
-
end
|
102
|
-
|
103
|
-
context["State"]["RetryCount"] += 1
|
104
|
-
|
105
|
-
return if context["State"]["RetryCount"] > retrier.max_attempts
|
106
|
-
|
107
|
-
wait_until!(context, :seconds => retrier.sleep_duration(context["State"]["RetryCount"]))
|
108
|
-
context.next_state = context.state_name
|
109
|
-
context.output = error
|
110
|
-
logger.info("Running state: [#{long_name}] with input [#{context.json_input}] got error[#{context.json_output}]...Retry - delay: #{wait_until(context)}")
|
111
|
-
true
|
112
|
-
end
|
113
|
-
|
114
|
-
def catch_error!(context, error)
|
115
|
-
catcher = find_catcher(error["Error"]) if error
|
116
|
-
return if catcher.nil?
|
117
|
-
|
118
|
-
context.next_state = catcher.next
|
119
|
-
context.output = catcher.result_path.set(context.input, error)
|
120
|
-
logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...CatchError - next state: [#{context.next_state}] output: [#{context.json_output}]")
|
121
|
-
|
122
|
-
true
|
123
|
-
end
|
124
|
-
|
125
|
-
def fail_workflow!(context, error)
|
126
|
-
# next_state is nil, and will be set to nil again in super
|
127
|
-
# keeping in here for completeness
|
128
|
-
context.next_state = nil
|
129
|
-
context.output = error
|
130
|
-
logger.error("Running state: [#{long_name}] with input [#{context.json_input}]...Complete workflow - output: [#{context.json_output}]")
|
131
|
-
end
|
132
|
-
|
133
87
|
def parse_error(output)
|
134
88
|
return if output.nil?
|
135
89
|
return output if output.kind_of?(Hash)
|
data/lib/floe/workflow.rb
CHANGED
@@ -4,9 +4,8 @@ require "securerandom"
|
|
4
4
|
require "json"
|
5
5
|
|
6
6
|
module Floe
|
7
|
-
class Workflow
|
7
|
+
class Workflow < Floe::WorkflowBase
|
8
8
|
include Logging
|
9
|
-
include ValidationMixin
|
10
9
|
|
11
10
|
class << self
|
12
11
|
def load(path_or_io, context = nil, credentials = {}, name = nil)
|
@@ -19,7 +18,6 @@ module Floe
|
|
19
18
|
|
20
19
|
def wait(workflows, timeout: nil, &block)
|
21
20
|
workflows = [workflows] if workflows.kind_of?(self)
|
22
|
-
logger.info("checking #{workflows.count} workflows...")
|
23
21
|
|
24
22
|
run_until = Time.now.utc + timeout if timeout.to_i > 0
|
25
23
|
ready = []
|
@@ -66,29 +64,20 @@ module Floe
|
|
66
64
|
event, data = queue.pop
|
67
65
|
break if event.nil?
|
68
66
|
|
69
|
-
|
70
|
-
|
71
|
-
# If the event is for one of our workflows set the updated runner_context
|
72
|
-
workflows.each do |workflow|
|
73
|
-
next unless workflow.context.state.dig("RunnerContext", "container_ref") == runner_context["container_ref"]
|
74
|
-
|
75
|
-
workflow.context.state["RunnerContext"] = runner_context
|
76
|
-
end
|
77
|
-
|
78
|
-
break if queue.empty?
|
67
|
+
# break out of the loop if the event is for one of our workflows
|
68
|
+
break if queue.empty? || workflows.detect { |wf| wf.execution_id == data["execution_id"] }
|
79
69
|
end
|
80
70
|
ensure
|
81
71
|
sleep_thread&.kill
|
82
72
|
end
|
83
73
|
|
84
|
-
logger.info("checking #{workflows.count} workflows...Complete - #{ready.count} ready")
|
85
74
|
ready
|
86
75
|
ensure
|
87
76
|
wait_thread&.kill
|
88
77
|
end
|
89
78
|
end
|
90
79
|
|
91
|
-
attr_reader :
|
80
|
+
attr_reader :comment, :context
|
92
81
|
|
93
82
|
def initialize(payload, context = nil, credentials = nil, name = nil)
|
94
83
|
payload = JSON.parse(payload) if payload.kind_of?(String)
|
@@ -99,20 +88,10 @@ module Floe
|
|
99
88
|
# caller should really put credentials into context and not pass that variable
|
100
89
|
context.credentials = credentials if credentials
|
101
90
|
|
102
|
-
|
103
|
-
@
|
104
|
-
@payload = payload
|
105
|
-
@context = context
|
106
|
-
@comment = payload["Comment"]
|
107
|
-
@start_at = payload["StartAt"]
|
108
|
-
|
109
|
-
# NOTE: Everywhere else we include our name (i.e.: parent name) when building the child name.
|
110
|
-
# When creating the states, we are dropping our name (i.e.: the workflow name)
|
111
|
-
@states = payload["States"].to_a.map { |state_name, state| State.build!(self, ["States", state_name], state) }
|
91
|
+
@context = context
|
92
|
+
@comment = payload["Comment"]
|
112
93
|
|
113
|
-
|
114
|
-
|
115
|
-
@states_by_name = @states.each_with_object({}) { |state, result| result[state.short_name] = state }
|
94
|
+
super(payload, name)
|
116
95
|
rescue Floe::Error
|
117
96
|
raise
|
118
97
|
rescue => err
|
@@ -185,7 +164,7 @@ module Floe
|
|
185
164
|
|
186
165
|
# NOTE: Expecting the context to be initialized (via start_workflow) before this
|
187
166
|
def current_state
|
188
|
-
|
167
|
+
states_by_name[context.state_name]
|
189
168
|
end
|
190
169
|
|
191
170
|
# backwards compatibility. Caller should access directly from context
|
@@ -193,14 +172,12 @@ module Floe
|
|
193
172
|
@context.credentials
|
194
173
|
end
|
195
174
|
|
196
|
-
|
197
|
-
|
198
|
-
def validate_workflow
|
199
|
-
missing_field_error!("States") if @states.empty?
|
200
|
-
missing_field_error!("StartAt") if @start_at.nil?
|
201
|
-
invalid_field_error!("StartAt", @start_at, "is not found in \"States\"") unless workflow_state?(@start_at, self)
|
175
|
+
def execution_id
|
176
|
+
@context.execution["Id"]
|
202
177
|
end
|
203
178
|
|
179
|
+
private
|
180
|
+
|
204
181
|
def step!
|
205
182
|
next_state = {"Name" => context.next_state, "Guid" => SecureRandom.uuid, "PreviousStateGuid" => context.state["Guid"]}
|
206
183
|
|
@@ -0,0 +1,108 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Floe
|
4
|
+
class WorkflowBase
|
5
|
+
include ValidationMixin
|
6
|
+
|
7
|
+
attr_reader :name, :payload, :start_at, :states, :states_by_name
|
8
|
+
|
9
|
+
def initialize(payload, name = nil)
|
10
|
+
# NOTE: this is a string, and states use an array
|
11
|
+
@name = name || "State Machine"
|
12
|
+
@payload = payload
|
13
|
+
@start_at = payload["StartAt"]
|
14
|
+
|
15
|
+
# NOTE: Everywhere else we include our name (i.e.: parent name) when building the child name.
|
16
|
+
# When creating the states, we are dropping our name (i.e.: the workflow name)
|
17
|
+
@states = payload["States"].to_a.map { |state_name, state| Floe::Workflow::State.build!(self, ["States", state_name], state) }
|
18
|
+
@states_by_name = @states.to_h { |state| [state.short_name, state] }
|
19
|
+
|
20
|
+
validate_workflow!
|
21
|
+
end
|
22
|
+
|
23
|
+
def run(context)
|
24
|
+
run_nonblock(context) until context.ended?
|
25
|
+
end
|
26
|
+
|
27
|
+
def run_nonblock(context)
|
28
|
+
start_workflow(context)
|
29
|
+
loop while step_nonblock(context) == 0 && !context.ended?
|
30
|
+
self
|
31
|
+
end
|
32
|
+
|
33
|
+
def step_nonblock(context)
|
34
|
+
return Errno::EPERM if context.ended?
|
35
|
+
|
36
|
+
result = current_state(context).run_nonblock!(context)
|
37
|
+
return result if result != 0
|
38
|
+
|
39
|
+
context.state_history << context.state
|
40
|
+
context.next_state ? step!(context) : end_workflow!(context)
|
41
|
+
|
42
|
+
result
|
43
|
+
end
|
44
|
+
|
45
|
+
def step_nonblock_ready?(context)
|
46
|
+
!context.started? || current_state(context).ready?(context)
|
47
|
+
end
|
48
|
+
|
49
|
+
def waiting?(context)
|
50
|
+
current_state(context)&.waiting?(context)
|
51
|
+
end
|
52
|
+
|
53
|
+
def wait_until(context)
|
54
|
+
current_state(context)&.wait_until(context)
|
55
|
+
end
|
56
|
+
|
57
|
+
def start_workflow(context)
|
58
|
+
return if context.state_name
|
59
|
+
|
60
|
+
context.state["Name"] = start_at
|
61
|
+
context.state["Input"] = context.execution["Input"].dup
|
62
|
+
|
63
|
+
context.execution["StartTime"] = Time.now.utc.iso8601
|
64
|
+
|
65
|
+
self
|
66
|
+
end
|
67
|
+
|
68
|
+
def current_state(context)
|
69
|
+
states_by_name[context.state_name]
|
70
|
+
end
|
71
|
+
|
72
|
+
def end?(context)
|
73
|
+
context.ended?
|
74
|
+
end
|
75
|
+
|
76
|
+
def output(context)
|
77
|
+
context.output.to_json if end?(context)
|
78
|
+
end
|
79
|
+
|
80
|
+
private
|
81
|
+
|
82
|
+
def step!(context)
|
83
|
+
next_state = {"Name" => context.next_state}
|
84
|
+
|
85
|
+
# if rerunning due to an error (and we are using Retry)
|
86
|
+
if context.state_name == context.next_state && context.failed? && context.state.key?("Retrier")
|
87
|
+
next_state.merge!(context.state.slice("RetryCount", "Input", "Retrier"))
|
88
|
+
else
|
89
|
+
next_state["Input"] = context.output
|
90
|
+
end
|
91
|
+
|
92
|
+
context.state = next_state
|
93
|
+
end
|
94
|
+
|
95
|
+
# Avoiding State#running? because that is potentially expensive.
|
96
|
+
# State#run_nonblock! already called running? via State#ready? and
|
97
|
+
# called State#finished -- which is what Context#state_finished? is detecting
|
98
|
+
def end_workflow!(context)
|
99
|
+
context.execution["EndTime"] = context.state["FinishedTime"]
|
100
|
+
end
|
101
|
+
|
102
|
+
def validate_workflow!
|
103
|
+
missing_field_error!("States") if @states.empty?
|
104
|
+
missing_field_error!("StartAt") if @start_at.nil?
|
105
|
+
invalid_field_error!("StartAt", @start_at, "is not found in \"States\"") unless workflow_state?(@start_at, self)
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
data/lib/floe.rb
CHANGED
@@ -8,8 +8,11 @@ require_relative "floe/logging"
|
|
8
8
|
require_relative "floe/runner"
|
9
9
|
|
10
10
|
require_relative "floe/validation_mixin"
|
11
|
+
require_relative "floe/workflow_base"
|
11
12
|
require_relative "floe/workflow"
|
13
|
+
# mixins used by workflow components
|
12
14
|
require_relative "floe/workflow/error_matcher_mixin"
|
15
|
+
require_relative "floe/workflow/branch"
|
13
16
|
require_relative "floe/workflow/catcher"
|
14
17
|
require_relative "floe/workflow/choice_rule"
|
15
18
|
require_relative "floe/workflow/choice_rule/not"
|
@@ -17,6 +20,7 @@ require_relative "floe/workflow/choice_rule/or"
|
|
17
20
|
require_relative "floe/workflow/choice_rule/and"
|
18
21
|
require_relative "floe/workflow/choice_rule/data"
|
19
22
|
require_relative "floe/workflow/context"
|
23
|
+
require_relative "floe/workflow/item_processor"
|
20
24
|
require_relative "floe/workflow/intrinsic_function"
|
21
25
|
require_relative "floe/workflow/intrinsic_function/parser"
|
22
26
|
require_relative "floe/workflow/intrinsic_function/transformer"
|
@@ -25,11 +29,14 @@ require_relative "floe/workflow/payload_template"
|
|
25
29
|
require_relative "floe/workflow/reference_path"
|
26
30
|
require_relative "floe/workflow/retrier"
|
27
31
|
require_relative "floe/workflow/state"
|
32
|
+
# mixins used by states
|
33
|
+
require_relative "floe/workflow/states/child_workflow_mixin"
|
34
|
+
require_relative "floe/workflow/states/input_output_mixin"
|
35
|
+
require_relative "floe/workflow/states/non_terminal_mixin"
|
36
|
+
require_relative "floe/workflow/states/retry_catch_mixin"
|
28
37
|
require_relative "floe/workflow/states/choice"
|
29
38
|
require_relative "floe/workflow/states/fail"
|
30
|
-
require_relative "floe/workflow/states/input_output_mixin"
|
31
39
|
require_relative "floe/workflow/states/map"
|
32
|
-
require_relative "floe/workflow/states/non_terminal_mixin"
|
33
40
|
require_relative "floe/workflow/states/parallel"
|
34
41
|
require_relative "floe/workflow/states/pass"
|
35
42
|
require_relative "floe/workflow/states/succeed"
|