floe 0.15.0 → 0.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +16 -1
- data/floe.gemspec +1 -0
- data/lib/floe/cli.rb +31 -5
- data/lib/floe/container_runner/docker.rb +4 -4
- data/lib/floe/container_runner/kubernetes.rb +4 -1
- data/lib/floe/logging.rb +5 -1
- data/lib/floe/runner.rb +0 -2
- data/lib/floe/version.rb +1 -1
- data/lib/floe/workflow/choice_rule/data.rb +32 -37
- data/lib/floe/workflow/context.rb +5 -1
- data/lib/floe/workflow/item_batcher.rb +62 -0
- data/lib/floe/workflow/state.rb +11 -4
- data/lib/floe/workflow/states/child_workflow_mixin.rb +1 -2
- data/lib/floe/workflow/states/map.rb +19 -4
- data/lib/floe/workflow/states/retry_catch_mixin.rb +3 -3
- data/lib/floe/workflow/states/task.rb +2 -1
- data/lib/floe/workflow.rb +0 -2
- data/lib/floe.rb +1 -0
- data/renovate.json +2 -3
- metadata +18 -7
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2d592d8fe18169e547f72b096e1135e93775f15027614f7c9dcdd97714896b44
|
4
|
+
data.tar.gz: dcad7afbd661be7f53566c143d48f9abd945ec15ceff24ed38d4c6ae14c34b3d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1246c428448b2631a994095a0919cd533c1c5b52d814c13fab6ab1a9a54599388e6611eef3e472dd5cf330b43987e3b38e2833bba5dc5914cd14ea7e9bf22a47
|
7
|
+
data.tar.gz: 576450ba34cdb2d263792c88b72f8710cc420ade67d28228f075917cb1ae6b9d27b90a6e9a278243960d3e562f58228d159214f06ec2fdfb9b895ee642ef9c7a
|
data/CHANGELOG.md
CHANGED
@@ -4,6 +4,18 @@ This project adheres to [Semantic Versioning](http://semver.org/).
|
|
4
4
|
|
5
5
|
## [Unreleased]
|
6
6
|
|
7
|
+
## [0.16.0] - 2025-04-08
|
8
|
+
### Added
|
9
|
+
- Add Map state ItemBatcher/ItemSelector support ([#294](https://github.com/ManageIQ/floe/pull/294))
|
10
|
+
|
11
|
+
### Fixed
|
12
|
+
- Fix JSON.parse exception expectation ([#300](https://github.com/ManageIQ/floe/pull/300))
|
13
|
+
|
14
|
+
## [0.15.1] - 2024-11-21
|
15
|
+
### Fixed
|
16
|
+
- Fix Map/Parallel States checking container_runner#status! of finished states ([#296](https://github.com/ManageIQ/floe/pull/296))
|
17
|
+
- Fix child workflow mixin tight loop ([#297](https://github.com/ManageIQ/floe/pull/297))
|
18
|
+
|
7
19
|
## [0.15.0] - 2024-10-28
|
8
20
|
### Added
|
9
21
|
- Add WorkflowBase base class for Workflow ([#279](https://github.com/ManageIQ/floe/pull/279))
|
@@ -267,7 +279,10 @@ This project adheres to [Semantic Versioning](http://semver.org/).
|
|
267
279
|
### Added
|
268
280
|
- Initial release
|
269
281
|
|
270
|
-
[Unreleased]: https://github.com/ManageIQ/floe/compare/v0.
|
282
|
+
[Unreleased]: https://github.com/ManageIQ/floe/compare/v0.16.0...HEAD
|
283
|
+
[0.16.0]: https://github.com/ManageIQ/floe/compare/v0.15.1...v0.16.0
|
284
|
+
[0.15.1]: https://github.com/ManageIQ/floe/compare/v0.15.0...v0.15.1
|
285
|
+
[0.15.0]: https://github.com/ManageIQ/floe/compare/v0.14.0...v0.15.0
|
271
286
|
[0.14.0]: https://github.com/ManageIQ/floe/compare/v0.13.1...v0.14.0
|
272
287
|
[0.13.1]: https://github.com/ManageIQ/floe/compare/v0.13.0...v0.13.1
|
273
288
|
[0.13.0]: https://github.com/ManageIQ/floe/compare/v0.12.0...v0.13.0
|
data/floe.gemspec
CHANGED
@@ -37,6 +37,7 @@ Gem::Specification.new do |spec|
|
|
37
37
|
spec.add_dependency "kubeclient", "~>4.7"
|
38
38
|
spec.add_dependency "optimist", "~>3.0"
|
39
39
|
spec.add_dependency "parslet", "~>2.0"
|
40
|
+
spec.add_dependency "json", "~>2.10"
|
40
41
|
|
41
42
|
spec.add_development_dependency "manageiq-style", ">= 1.5.2"
|
42
43
|
spec.add_development_dependency "rake", "~> 13.0"
|
data/lib/floe/cli.rb
CHANGED
@@ -1,9 +1,12 @@
|
|
1
|
+
require "floe"
|
2
|
+
require "floe/container_runner"
|
3
|
+
|
1
4
|
module Floe
|
2
5
|
class CLI
|
6
|
+
include Logging
|
7
|
+
|
3
8
|
def initialize
|
4
9
|
require "optimist"
|
5
|
-
require "floe"
|
6
|
-
require "floe/container_runner"
|
7
10
|
require "logger"
|
8
11
|
|
9
12
|
Floe.logger = Logger.new($stdout)
|
@@ -20,12 +23,22 @@ module Floe
|
|
20
23
|
create_workflow(workflow, opts[:context], input, credentials)
|
21
24
|
end
|
22
25
|
|
23
|
-
|
26
|
+
output_streams = create_loggers(workflows, opts[:segment_output])
|
27
|
+
|
28
|
+
logger.info("Checking #{workflows.count} workflows...")
|
29
|
+
ready = Floe::Workflow.wait(workflows, &:run_nonblock)
|
30
|
+
logger.info("Checking #{workflows.count} workflows...Complete - #{ready.count} ready")
|
24
31
|
|
25
32
|
# Display status
|
26
33
|
workflows.each do |workflow|
|
27
|
-
|
28
|
-
|
34
|
+
if workflows.size > 1
|
35
|
+
logger.info("")
|
36
|
+
logger.info("#{workflow.name}#{" (#{workflow.status})" unless workflow.context.success?}")
|
37
|
+
logger.info("===")
|
38
|
+
end
|
39
|
+
|
40
|
+
logger.info(output_streams[workflow].string) if output_streams[workflow]
|
41
|
+
logger.info(workflow.output)
|
29
42
|
end
|
30
43
|
|
31
44
|
workflows.all? { |workflow| workflow.context.success? }
|
@@ -49,6 +62,7 @@ module Floe
|
|
49
62
|
opt :context, "JSON payload of the Context", :type => :string
|
50
63
|
opt :credentials, "JSON payload with Credentials", :type => :string
|
51
64
|
opt :credentials_file, "Path to a file with Credentials", :type => :string
|
65
|
+
opt :segment_output, "Segment output by each worker", :default => false
|
52
66
|
|
53
67
|
Floe::ContainerRunner.cli_options(self)
|
54
68
|
|
@@ -89,5 +103,17 @@ module Floe
|
|
89
103
|
context = Floe::Workflow::Context.new(context_payload, :input => input, :credentials => credentials)
|
90
104
|
Floe::Workflow.load(workflow, context)
|
91
105
|
end
|
106
|
+
|
107
|
+
def create_loggers(workflows, segment_output)
|
108
|
+
if workflows.size == 1 || !segment_output
|
109
|
+
# no extra work necessary
|
110
|
+
{}
|
111
|
+
else
|
112
|
+
workflows.each_with_object({}) do |workflow, h|
|
113
|
+
workflow.context.logger = Logger.new(output = StringIO.new)
|
114
|
+
h[workflow] = output
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
92
118
|
end
|
93
119
|
end
|
@@ -30,7 +30,7 @@ module Floe
|
|
30
30
|
end
|
31
31
|
|
32
32
|
begin
|
33
|
-
runner_context["container_ref"] = run_container(image, env, execution_id, runner_context["secrets_ref"])
|
33
|
+
runner_context["container_ref"] = run_container(image, env, execution_id, runner_context["secrets_ref"], context.logger)
|
34
34
|
runner_context
|
35
35
|
rescue AwesomeSpawn::CommandResultError => err
|
36
36
|
cleanup(runner_context)
|
@@ -123,7 +123,7 @@ module Floe
|
|
123
123
|
|
124
124
|
attr_reader :network
|
125
125
|
|
126
|
-
def run_container(image, env, execution_id, secrets_file)
|
126
|
+
def run_container(image, env, execution_id, secrets_file, logger)
|
127
127
|
params = run_container_params(image, env, execution_id, secrets_file)
|
128
128
|
|
129
129
|
logger.debug("Running #{AwesomeSpawn.build_command_line(self.class::DOCKER_COMMAND, params)}")
|
@@ -182,8 +182,8 @@ module Floe
|
|
182
182
|
|
183
183
|
def inspect_container(container_id)
|
184
184
|
JSON.parse(docker!("inspect", container_id).output).first
|
185
|
-
rescue
|
186
|
-
|
185
|
+
rescue AwesomeSpawn::CommandResultError => err
|
186
|
+
raise Floe::ExecutionError.new("Failed to get status for container #{container_id}: #{err}")
|
187
187
|
end
|
188
188
|
|
189
189
|
def delete_container(container_id)
|
@@ -155,6 +155,8 @@ module Floe
|
|
155
155
|
|
156
156
|
def pod_info(pod_name)
|
157
157
|
kubeclient.get_pod(pod_name, namespace)
|
158
|
+
rescue Kubeclient::HttpError => err
|
159
|
+
raise Floe::ExecutionError.new("Failed to get status for pod #{namespace}/#{pod_name}: #{err}")
|
158
160
|
end
|
159
161
|
|
160
162
|
def pod_running?(context)
|
@@ -285,7 +287,8 @@ module Floe
|
|
285
287
|
code = notice.object&.code
|
286
288
|
reason = notice.object&.reason
|
287
289
|
|
288
|
-
|
290
|
+
# This feels like a global concern and not an end user's concern
|
291
|
+
Floe.logger.warn("Received [#{code} #{reason}], [#{message}]")
|
289
292
|
|
290
293
|
true
|
291
294
|
end
|
data/lib/floe/logging.rb
CHANGED
data/lib/floe/runner.rb
CHANGED
data/lib/floe/version.rb
CHANGED
@@ -11,7 +11,7 @@ module Floe
|
|
11
11
|
# e.g.: (String)(LessThan)(Path), (Numeric)(GreaterThanEquals)()
|
12
12
|
OPERATION = /^(#{(TYPES - %w[Null Present]).join("|")})(#{COMPARES.join("|")})(Path)?$/.freeze
|
13
13
|
|
14
|
-
attr_reader :variable, :compare_key, :type, :compare_predicate, :path
|
14
|
+
attr_reader :variable, :compare_key, :operation, :type, :compare_predicate, :path
|
15
15
|
|
16
16
|
def initialize(_workflow, _name, payload)
|
17
17
|
super
|
@@ -25,39 +25,7 @@ module Floe
|
|
25
25
|
|
26
26
|
lhs = variable_value(context, input)
|
27
27
|
rhs = compare_value(context, input)
|
28
|
-
|
29
|
-
case compare_key
|
30
|
-
when "IsNull" then is_null?(lhs, rhs)
|
31
|
-
when "IsNumeric" then is_numeric?(lhs, rhs)
|
32
|
-
when "IsString" then is_string?(lhs, rhs)
|
33
|
-
when "IsBoolean" then is_boolean?(lhs, rhs)
|
34
|
-
when "IsTimestamp" then is_timestamp?(lhs, rhs)
|
35
|
-
when "StringEquals", "StringEqualsPath",
|
36
|
-
"NumericEquals", "NumericEqualsPath",
|
37
|
-
"BooleanEquals", "BooleanEqualsPath",
|
38
|
-
"TimestampEquals", "TimestampEqualsPath"
|
39
|
-
lhs == rhs
|
40
|
-
when "StringLessThan", "StringLessThanPath",
|
41
|
-
"NumericLessThan", "NumericLessThanPath",
|
42
|
-
"TimestampLessThan", "TimestampLessThanPath"
|
43
|
-
lhs < rhs
|
44
|
-
when "StringGreaterThan", "StringGreaterThanPath",
|
45
|
-
"NumericGreaterThan", "NumericGreaterThanPath",
|
46
|
-
"TimestampGreaterThan", "TimestampGreaterThanPath"
|
47
|
-
lhs > rhs
|
48
|
-
when "StringLessThanEquals", "StringLessThanEqualsPath",
|
49
|
-
"NumericLessThanEquals", "NumericLessThanEqualsPath",
|
50
|
-
"TimestampLessThanEquals", "TimestampLessThanEqualsPath"
|
51
|
-
lhs <= rhs
|
52
|
-
when "StringGreaterThanEquals", "StringGreaterThanEqualsPath",
|
53
|
-
"NumericGreaterThanEquals", "NumericGreaterThanEqualsPath",
|
54
|
-
"TimestampGreaterThanEquals", "TimestampGreaterThanEqualsPath"
|
55
|
-
lhs >= rhs
|
56
|
-
when "StringMatches"
|
57
|
-
lhs.match?(Regexp.escape(rhs).gsub('\*', '.*?'))
|
58
|
-
else
|
59
|
-
raise Floe::InvalidWorkflowError, "Invalid choice [#{compare_key}]"
|
60
|
-
end
|
28
|
+
send(operation, lhs, rhs)
|
61
29
|
end
|
62
30
|
|
63
31
|
private
|
@@ -112,26 +80,53 @@ module Floe
|
|
112
80
|
# rubocop:enable Naming/PredicateName
|
113
81
|
# rubocop:enable Style/OptionalBooleanParameter
|
114
82
|
|
83
|
+
def op_equals?(lhs, rhs)
|
84
|
+
lhs == rhs
|
85
|
+
end
|
86
|
+
|
87
|
+
def op_lessthan?(lhs, rhs)
|
88
|
+
lhs < rhs
|
89
|
+
end
|
90
|
+
|
91
|
+
def op_greaterthan?(lhs, rhs)
|
92
|
+
lhs > rhs
|
93
|
+
end
|
94
|
+
|
95
|
+
def op_lessthanequals?(lhs, rhs)
|
96
|
+
lhs <= rhs
|
97
|
+
end
|
98
|
+
|
99
|
+
def op_greaterthanequals?(lhs, rhs)
|
100
|
+
lhs >= rhs
|
101
|
+
end
|
102
|
+
|
103
|
+
def op_matches?(lhs, rhs)
|
104
|
+
lhs.match?(Regexp.escape(rhs).gsub('\*', '.*?'))
|
105
|
+
end
|
106
|
+
|
115
107
|
# parse the compare key at initialization time
|
116
108
|
def parse_compare_key
|
117
109
|
payload.each_key do |key|
|
118
110
|
# e.g. (String)(GreaterThan)(Path)
|
119
111
|
if (match_values = OPERATION.match(key))
|
120
112
|
@compare_key = key
|
121
|
-
@type,
|
113
|
+
@type, operator, @path = match_values.captures
|
114
|
+
@operation = "op_#{operator.downcase}?".to_sym
|
122
115
|
@compare_predicate = parse_predicate(type)
|
123
116
|
break
|
124
117
|
end
|
125
118
|
# e.g. (Is)(String)
|
126
|
-
if TYPE_CHECK.match
|
119
|
+
if (match_value = TYPE_CHECK.match(key))
|
127
120
|
@compare_key = key
|
121
|
+
_operator, type = match_value.captures
|
128
122
|
# type: nil means no runtime type checking.
|
129
123
|
@type = @path = nil
|
124
|
+
@operation = "is_#{type.downcase}?".to_sym
|
130
125
|
@compare_predicate = parse_predicate("Boolean")
|
131
126
|
break
|
132
127
|
end
|
133
128
|
end
|
134
|
-
parser_error!("requires a compare key")
|
129
|
+
parser_error!("requires a compare key") if compare_key.nil? || operation.nil?
|
135
130
|
end
|
136
131
|
|
137
132
|
# parse predicate at initilization time
|
@@ -3,11 +3,13 @@
|
|
3
3
|
module Floe
|
4
4
|
class Workflow
|
5
5
|
class Context
|
6
|
+
include Logging
|
7
|
+
|
6
8
|
attr_accessor :credentials
|
7
9
|
|
8
10
|
# @param context [Json|Hash] (default, create another with input and execution params)
|
9
11
|
# @param input [Hash] (default: {})
|
10
|
-
def initialize(context = nil, input: nil, credentials: {})
|
12
|
+
def initialize(context = nil, input: nil, credentials: {}, logger: nil)
|
11
13
|
context = JSON.parse(context) if context.kind_of?(String)
|
12
14
|
input = JSON.parse(input || "{}")
|
13
15
|
|
@@ -20,6 +22,8 @@ module Floe
|
|
20
22
|
self["Task"] ||= {}
|
21
23
|
|
22
24
|
@credentials = credentials || {}
|
25
|
+
|
26
|
+
self.logger = logger if logger
|
23
27
|
rescue JSON::ParserError => err
|
24
28
|
raise Floe::InvalidExecutionInput, "Invalid State Machine Execution Input: #{err}: was expecting (JSON String, Number, Array, Object or token 'null', 'true' or 'false')"
|
25
29
|
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Floe
|
4
|
+
class Workflow
|
5
|
+
class ItemBatcher
|
6
|
+
include ValidationMixin
|
7
|
+
|
8
|
+
attr_reader :name, :batch_input, :max_items_per_batch, :max_items_per_batch_path, :max_input_bytes_per_batch, :max_input_bytes_per_batch_path
|
9
|
+
|
10
|
+
def initialize(payload, name)
|
11
|
+
@name = name
|
12
|
+
|
13
|
+
@batch_input = PayloadTemplate.new(payload["BatchInput"]) if payload["BatchInput"]
|
14
|
+
@max_items_per_batch = payload["MaxItemsPerBatch"]
|
15
|
+
@max_input_bytes_per_batch = payload["MaxInputBytesPerBatch"]
|
16
|
+
|
17
|
+
@max_items_per_batch_path = ReferencePath.new(payload["MaxItemsPerBatchPath"]) if payload["MaxItemsPerBatchPath"]
|
18
|
+
@max_input_bytes_per_batch_path = ReferencePath.new(payload["MaxInputBytesPerBatchPath"]) if payload["MaxInputBytesPerBatchPath"]
|
19
|
+
|
20
|
+
validate!
|
21
|
+
end
|
22
|
+
|
23
|
+
def value(context, input, state_input = nil)
|
24
|
+
state_input ||= input
|
25
|
+
|
26
|
+
output = batch_input ? batch_input.value(context, state_input) : {}
|
27
|
+
|
28
|
+
input.each_slice(max_items(context, state_input)).map do |batch|
|
29
|
+
output.merge("Items" => batch)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
34
|
+
|
35
|
+
def max_items(context, state_input)
|
36
|
+
return max_items_per_batch if max_items_per_batch
|
37
|
+
return if max_items_per_batch_path.nil?
|
38
|
+
|
39
|
+
result = max_items_per_batch_path.value(context, state_input)
|
40
|
+
raise runtime_field_error!("MaxItemsPerBatchPath", result, "must be a positive integer") if result.nil? || !result.kind_of?(Integer) || result <= 0
|
41
|
+
|
42
|
+
result
|
43
|
+
end
|
44
|
+
|
45
|
+
def validate!
|
46
|
+
if [max_items_per_batch, max_items_per_batch_path].all?(&:nil?)
|
47
|
+
parser_error!("must have one of \"MaxItemsPerBatch\", \"MaxItemsPerBatchPath\"")
|
48
|
+
end
|
49
|
+
|
50
|
+
parser_error!("must not specify both \"MaxItemsPerBatch\" and \"MaxItemsPerBatchPath\"") if max_items_per_batch && max_items_per_batch_path
|
51
|
+
parser_error!("must not specify both \"MaxInputBytesPerBatch\" and \"MaxInputBytesPerBatchPath\"") if max_input_bytes_per_batch && max_input_bytes_per_batch_path
|
52
|
+
|
53
|
+
if max_items_per_batch && (!max_items_per_batch.kind_of?(Integer) || max_items_per_batch <= 0)
|
54
|
+
invalid_field_error!("MaxItemsPerBatch", max_items_per_batch, "must be a positive integer")
|
55
|
+
end
|
56
|
+
if max_input_bytes_per_batch && (!max_input_bytes_per_batch.kind_of?(Integer) || max_input_bytes_per_batch <= 0)
|
57
|
+
invalid_field_error!("MaxInputBytesPerBatch", max_input_bytes_per_batch, "must be a positive integer")
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
data/lib/floe/workflow/state.rb
CHANGED
@@ -3,7 +3,6 @@
|
|
3
3
|
module Floe
|
4
4
|
class Workflow
|
5
5
|
class State
|
6
|
-
include Logging
|
7
6
|
include ValidationMixin
|
8
7
|
|
9
8
|
class << self
|
@@ -56,14 +55,22 @@ module Floe
|
|
56
55
|
mark_started(context)
|
57
56
|
end
|
58
57
|
|
58
|
+
def started?(context)
|
59
|
+
context.state_started?
|
60
|
+
end
|
61
|
+
|
59
62
|
def finish(context)
|
60
63
|
mark_finished(context)
|
61
64
|
end
|
62
65
|
|
66
|
+
def finished?(context)
|
67
|
+
context.state_finished?
|
68
|
+
end
|
69
|
+
|
63
70
|
def mark_started(context)
|
64
71
|
context.state["EnteredTime"] = Time.now.utc.iso8601
|
65
72
|
|
66
|
-
logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...")
|
73
|
+
context.logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...")
|
67
74
|
end
|
68
75
|
|
69
76
|
def mark_finished(context)
|
@@ -74,7 +81,7 @@ module Floe
|
|
74
81
|
context.state["Duration"] = finished_time - entered_time
|
75
82
|
|
76
83
|
level = context.failed? ? :error : :info
|
77
|
-
logger.public_send(level, "Running state: [#{long_name}] with input [#{context.json_input}]...Complete #{context.next_state ? "- next state [#{context.next_state}]" : "workflow -"} output: [#{context.json_output}]")
|
84
|
+
context.logger.public_send(level, "Running state: [#{long_name}] with input [#{context.json_input}]...Complete #{context.next_state ? "- next state [#{context.next_state}]" : "workflow -"} output: [#{context.json_output}]")
|
78
85
|
|
79
86
|
0
|
80
87
|
end
|
@@ -88,7 +95,7 @@ module Floe
|
|
88
95
|
end
|
89
96
|
|
90
97
|
def ready?(context)
|
91
|
-
!context
|
98
|
+
!started?(context) || !running?(context)
|
92
99
|
end
|
93
100
|
|
94
101
|
def running?(context)
|
@@ -6,8 +6,7 @@ module Floe
|
|
6
6
|
module ChildWorkflowMixin
|
7
7
|
def run_nonblock!(context)
|
8
8
|
start(context) unless context.state_started?
|
9
|
-
|
10
|
-
step_nonblock!(context) while running?(context)
|
9
|
+
step_nonblock!(context)
|
11
10
|
return Errno::EAGAIN unless ready?(context)
|
12
11
|
|
13
12
|
finish(context) if ended?(context)
|
@@ -31,8 +31,8 @@ module Floe
|
|
31
31
|
@item_processor = ItemProcessor.new(payload["ItemProcessor"], name)
|
32
32
|
@items_path = ReferencePath.new(payload.fetch("ItemsPath", "$"))
|
33
33
|
@item_reader = payload["ItemReader"]
|
34
|
-
@item_selector = payload["ItemSelector"]
|
35
|
-
@item_batcher = payload["ItemBatcher"]
|
34
|
+
@item_selector = PayloadTemplate.new(payload["ItemSelector"]) if payload["ItemSelector"]
|
35
|
+
@item_batcher = ItemBatcher.new(payload["ItemBatcher"], name + ["ItemBatcher"]) if payload["ItemBatcher"]
|
36
36
|
@result_writer = payload["ResultWriter"]
|
37
37
|
@max_concurrency = payload["MaxConcurrency"]&.to_i
|
38
38
|
@tolerated_failure_percentage = payload["ToleratedFailurePercentage"]&.to_i
|
@@ -43,7 +43,9 @@ module Floe
|
|
43
43
|
|
44
44
|
def process_input(context)
|
45
45
|
input = super
|
46
|
-
items_path.value(context, input)
|
46
|
+
input = items_path.value(context, input)
|
47
|
+
input = item_batcher.value(context, input, context.state["Input"]) if item_batcher
|
48
|
+
input
|
47
49
|
end
|
48
50
|
|
49
51
|
def start(context)
|
@@ -51,7 +53,20 @@ module Floe
|
|
51
53
|
|
52
54
|
input = process_input(context)
|
53
55
|
|
54
|
-
context.state["ItemProcessorContext"] = input.map
|
56
|
+
context.state["ItemProcessorContext"] = input.map.with_index do |item, index|
|
57
|
+
item_processor_context = {
|
58
|
+
"Execution" => {
|
59
|
+
"Id" => context.execution["Id"]
|
60
|
+
},
|
61
|
+
"Map" => {
|
62
|
+
"Item" => {"Index" => index, "Value" => item}
|
63
|
+
}
|
64
|
+
}
|
65
|
+
|
66
|
+
item_processor_input = item_selector ? item_selector.value(item_processor_context, context.state["Input"]) : item
|
67
|
+
|
68
|
+
Context.new(item_processor_context, :input => item_processor_input.to_json).to_h
|
69
|
+
end
|
55
70
|
end
|
56
71
|
|
57
72
|
def end?
|
@@ -29,7 +29,7 @@ module Floe
|
|
29
29
|
wait_until!(context, :seconds => retrier.sleep_duration(context["State"]["RetryCount"]))
|
30
30
|
context.next_state = context.state_name
|
31
31
|
context.output = error
|
32
|
-
logger.info("Running state: [#{long_name}] with input [#{context.json_input}] got error[#{context.json_output}]...Retry - delay: #{wait_until(context)}")
|
32
|
+
context.logger.info("Running state: [#{long_name}] with input [#{context.json_input}] got error[#{context.json_output}]...Retry - delay: #{wait_until(context)}")
|
33
33
|
true
|
34
34
|
end
|
35
35
|
|
@@ -39,7 +39,7 @@ module Floe
|
|
39
39
|
|
40
40
|
context.next_state = catcher.next
|
41
41
|
context.output = catcher.result_path.set(context.input, error)
|
42
|
-
logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...CatchError - next state: [#{context.next_state}] output: [#{context.json_output}]")
|
42
|
+
context.logger.info("Running state: [#{long_name}] with input [#{context.json_input}]...CatchError - next state: [#{context.next_state}] output: [#{context.json_output}]")
|
43
43
|
|
44
44
|
true
|
45
45
|
end
|
@@ -49,7 +49,7 @@ module Floe
|
|
49
49
|
# keeping in here for completeness
|
50
50
|
context.next_state = nil
|
51
51
|
context.output = error
|
52
|
-
logger.error("Running state: [#{long_name}] with input [#{context.json_input}]...Complete workflow - output: [#{context.json_output}]")
|
52
|
+
context.logger.error("Running state: [#{long_name}] with input [#{context.json_input}]...Complete workflow - output: [#{context.json_output}]")
|
53
53
|
end
|
54
54
|
end
|
55
55
|
end
|
@@ -61,7 +61,8 @@ module Floe
|
|
61
61
|
end
|
62
62
|
|
63
63
|
def running?(context)
|
64
|
-
return true
|
64
|
+
return true if waiting?(context)
|
65
|
+
return false if finished?(context)
|
65
66
|
|
66
67
|
runner.status!(context.state["RunnerContext"])
|
67
68
|
runner.running?(context.state["RunnerContext"])
|
data/lib/floe/workflow.rb
CHANGED
@@ -18,7 +18,6 @@ module Floe
|
|
18
18
|
|
19
19
|
def wait(workflows, timeout: nil, &block)
|
20
20
|
workflows = [workflows] if workflows.kind_of?(self)
|
21
|
-
logger.info("Checking #{workflows.count} workflows...")
|
22
21
|
|
23
22
|
run_until = Time.now.utc + timeout if timeout.to_i > 0
|
24
23
|
ready = []
|
@@ -72,7 +71,6 @@ module Floe
|
|
72
71
|
sleep_thread&.kill
|
73
72
|
end
|
74
73
|
|
75
|
-
logger.info("Checking #{workflows.count} workflows...Complete - #{ready.count} ready")
|
76
74
|
ready
|
77
75
|
ensure
|
78
76
|
wait_thread&.kill
|
data/lib/floe.rb
CHANGED
@@ -20,6 +20,7 @@ require_relative "floe/workflow/choice_rule/or"
|
|
20
20
|
require_relative "floe/workflow/choice_rule/and"
|
21
21
|
require_relative "floe/workflow/choice_rule/data"
|
22
22
|
require_relative "floe/workflow/context"
|
23
|
+
require_relative "floe/workflow/item_batcher"
|
23
24
|
require_relative "floe/workflow/item_processor"
|
24
25
|
require_relative "floe/workflow/intrinsic_function"
|
25
26
|
require_relative "floe/workflow/intrinsic_function/parser"
|
data/renovate.json
CHANGED
metadata
CHANGED
@@ -1,14 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: floe
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.16.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- ManageIQ Developers
|
8
|
-
autorequire:
|
9
8
|
bindir: exe
|
10
9
|
cert_chain: []
|
11
|
-
date:
|
10
|
+
date: 2025-04-08 00:00:00.000000000 Z
|
12
11
|
dependencies:
|
13
12
|
- !ruby/object:Gem::Dependency
|
14
13
|
name: awesome_spawn
|
@@ -94,6 +93,20 @@ dependencies:
|
|
94
93
|
- - "~>"
|
95
94
|
- !ruby/object:Gem::Version
|
96
95
|
version: '2.0'
|
96
|
+
- !ruby/object:Gem::Dependency
|
97
|
+
name: json
|
98
|
+
requirement: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - "~>"
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: '2.10'
|
103
|
+
type: :runtime
|
104
|
+
prerelease: false
|
105
|
+
version_requirements: !ruby/object:Gem::Requirement
|
106
|
+
requirements:
|
107
|
+
- - "~>"
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '2.10'
|
97
110
|
- !ruby/object:Gem::Dependency
|
98
111
|
name: manageiq-style
|
99
112
|
requirement: !ruby/object:Gem::Requirement
|
@@ -165,7 +178,6 @@ dependencies:
|
|
165
178
|
- !ruby/object:Gem::Version
|
166
179
|
version: '0'
|
167
180
|
description: Floe is a runner for Amazon States Language workflows.
|
168
|
-
email:
|
169
181
|
executables:
|
170
182
|
- floe
|
171
183
|
extensions: []
|
@@ -213,6 +225,7 @@ files:
|
|
213
225
|
- lib/floe/workflow/intrinsic_function.rb
|
214
226
|
- lib/floe/workflow/intrinsic_function/parser.rb
|
215
227
|
- lib/floe/workflow/intrinsic_function/transformer.rb
|
228
|
+
- lib/floe/workflow/item_batcher.rb
|
216
229
|
- lib/floe/workflow/item_processor.rb
|
217
230
|
- lib/floe/workflow/path.rb
|
218
231
|
- lib/floe/workflow/payload_template.rb
|
@@ -243,7 +256,6 @@ metadata:
|
|
243
256
|
homepage_uri: https://github.com/ManageIQ/floe
|
244
257
|
source_code_uri: https://github.com/ManageIQ/floe
|
245
258
|
changelog_uri: https://github.com/ManageIQ/floe/blob/master/CHANGELOG.md
|
246
|
-
post_install_message:
|
247
259
|
rdoc_options: []
|
248
260
|
require_paths:
|
249
261
|
- lib
|
@@ -258,8 +270,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
258
270
|
- !ruby/object:Gem::Version
|
259
271
|
version: '0'
|
260
272
|
requirements: []
|
261
|
-
rubygems_version: 3.
|
262
|
-
signing_key:
|
273
|
+
rubygems_version: 3.6.6
|
263
274
|
specification_version: 4
|
264
275
|
summary: Floe is a runner for Amazon States Language workflows.
|
265
276
|
test_files: []
|