aws-flow 2.3.1 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/aws-flow.gemspec +3 -2
- data/bin/aws-flow-ruby +1 -1
- data/bin/aws-flow-utils +5 -0
- data/lib/aws/decider.rb +7 -0
- data/lib/aws/decider/async_retrying_executor.rb +1 -1
- data/lib/aws/decider/data_converter.rb +161 -0
- data/lib/aws/decider/decider.rb +27 -14
- data/lib/aws/decider/flow_defaults.rb +28 -0
- data/lib/aws/decider/implementation.rb +0 -1
- data/lib/aws/decider/options.rb +2 -2
- data/lib/aws/decider/starter.rb +207 -0
- data/lib/aws/decider/task_poller.rb +4 -4
- data/lib/aws/decider/utilities.rb +38 -0
- data/lib/aws/decider/version.rb +1 -1
- data/lib/aws/decider/worker.rb +8 -7
- data/lib/aws/decider/workflow_definition_factory.rb +1 -1
- data/lib/aws/runner.rb +146 -65
- data/lib/aws/templates.rb +4 -0
- data/lib/aws/templates/activity.rb +69 -0
- data/lib/aws/templates/base.rb +87 -0
- data/lib/aws/templates/default.rb +146 -0
- data/lib/aws/templates/starter.rb +256 -0
- data/lib/aws/utils.rb +270 -0
- data/spec/aws/decider/integration/activity_spec.rb +7 -1
- data/spec/aws/decider/integration/data_converter_spec.rb +39 -0
- data/spec/aws/decider/integration/integration_spec.rb +12 -5
- data/spec/aws/decider/integration/options_spec.rb +23 -9
- data/spec/aws/decider/integration/starter_spec.rb +209 -0
- data/spec/aws/decider/unit/data_converter_spec.rb +276 -0
- data/spec/aws/decider/unit/decider_spec.rb +1360 -1386
- data/spec/aws/decider/unit/options_spec.rb +21 -22
- data/spec/aws/decider/unit/retry_spec.rb +8 -0
- data/spec/aws/decider/unit/starter_spec.rb +159 -0
- data/spec/aws/runner/integration/runner_integration_spec.rb +2 -3
- data/spec/aws/runner/unit/runner_unit_spec.rb +128 -38
- data/spec/aws/templates/unit/activity_spec.rb +89 -0
- data/spec/aws/templates/unit/base_spec.rb +72 -0
- data/spec/aws/templates/unit/default_spec.rb +141 -0
- data/spec/aws/templates/unit/starter_spec.rb +271 -0
- data/spec/spec_helper.rb +9 -11
- metadata +41 -4
@@ -0,0 +1,209 @@
|
|
1
|
+
require_relative 'setup'
|
2
|
+
|
3
|
+
describe "AWS::Flow" do
|
4
|
+
|
5
|
+
before(:all) do
|
6
|
+
@domain = get_test_domain
|
7
|
+
@domain.workflow_executions.each { |x| x.terminate }
|
8
|
+
puts @domain.inspect
|
9
|
+
end
|
10
|
+
|
11
|
+
context "#start" do
|
12
|
+
|
13
|
+
it "starts default workflow with the correct activity type" do
|
14
|
+
AWS::Flow::start("StarterTestActivity.foo", {input: "Hello"}, {domain: @domain.name})
|
15
|
+
|
16
|
+
until @domain.workflow_executions.count.count > 0
|
17
|
+
sleep 2
|
18
|
+
end
|
19
|
+
|
20
|
+
@domain.workflow_executions.each do |x|
|
21
|
+
x.execution_start_to_close_timeout.should == FlowConstants.defaults[:execution_start_to_close_timeout].to_i
|
22
|
+
x.workflow_type.name.should == "#{FlowConstants.defaults[:prefix_name]}.#{FlowConstants.defaults[:execution_method]}"
|
23
|
+
x.workflow_type.version.should == "#{FlowConstants.defaults[:version]}"
|
24
|
+
x.tags.should include('StarterTestActivity.foo')
|
25
|
+
|
26
|
+
data_converter = FlowConstants.defaults[:data_converter]
|
27
|
+
input = data_converter.load(x.events.first.attributes[:input]).first
|
28
|
+
|
29
|
+
root = input[:definition]
|
30
|
+
root.result_step.should be_nil
|
31
|
+
root.should be_kind_of(AWS::Flow::Templates::RootTemplate)
|
32
|
+
|
33
|
+
activity = root.step
|
34
|
+
activity.should be_kind_of(AWS::Flow::Templates::ActivityTemplate)
|
35
|
+
activity.name.should == "foo"
|
36
|
+
activity.options.should include(
|
37
|
+
version: "1.0",
|
38
|
+
prefix_name: "StarterTestActivity",
|
39
|
+
exponential_retry: {
|
40
|
+
maximum_attempts: 3
|
41
|
+
}
|
42
|
+
)
|
43
|
+
|
44
|
+
input[:args].should include(input: "Hello")
|
45
|
+
x.terminate
|
46
|
+
end
|
47
|
+
|
48
|
+
end
|
49
|
+
|
50
|
+
it "starts default workflow with the correct activity type with overriden options" do
|
51
|
+
|
52
|
+
options = {
|
53
|
+
execution_start_to_close_timeout: 100,
|
54
|
+
task_list: "bar",
|
55
|
+
version: "2.0",
|
56
|
+
tag_list: ['overriden_test'],
|
57
|
+
wait: true,
|
58
|
+
domain: @domain.name
|
59
|
+
}
|
60
|
+
|
61
|
+
executor = ForkingExecutor.new
|
62
|
+
executor.execute { AWS::Flow::start("StarterTestActivity.foo", {input: "Hello"}, options) }
|
63
|
+
|
64
|
+
executor.shutdown(1)
|
65
|
+
|
66
|
+
until @domain.workflow_executions.count.count > 0
|
67
|
+
sleep 2
|
68
|
+
end
|
69
|
+
|
70
|
+
@domain.workflow_executions.each do |x|
|
71
|
+
x.execution_start_to_close_timeout.should == 100
|
72
|
+
x.workflow_type.name.should == "#{FlowConstants.defaults[:prefix_name]}.#{FlowConstants.defaults[:execution_method]}"
|
73
|
+
x.workflow_type.version.should == "#{FlowConstants.defaults[:version]}"
|
74
|
+
x.tags.should include('overriden_test', 'StarterTestActivity.foo')
|
75
|
+
|
76
|
+
data_converter = FlowConstants.defaults[:data_converter]
|
77
|
+
attrs = x.events.first.attributes
|
78
|
+
|
79
|
+
input = data_converter.load(x.events.first.attributes[:input]).first
|
80
|
+
|
81
|
+
root = input[:definition]
|
82
|
+
root.should be_kind_of(AWS::Flow::Templates::RootTemplate)
|
83
|
+
root.result_step.should_not be_nil
|
84
|
+
result = root.result_step
|
85
|
+
result.should be_kind_of(AWS::Flow::Templates::ActivityTemplate)
|
86
|
+
|
87
|
+
activity = root.step
|
88
|
+
activity.should be_kind_of(AWS::Flow::Templates::ActivityTemplate)
|
89
|
+
activity.name.should == "foo"
|
90
|
+
activity.options.should include(
|
91
|
+
version: "2.0",
|
92
|
+
prefix_name: "StarterTestActivity",
|
93
|
+
task_list: "bar",
|
94
|
+
exponential_retry: {
|
95
|
+
maximum_attempts: 3
|
96
|
+
}
|
97
|
+
)
|
98
|
+
|
99
|
+
input[:args].should include(input: "Hello")
|
100
|
+
x.terminate
|
101
|
+
end
|
102
|
+
|
103
|
+
end
|
104
|
+
|
105
|
+
end
|
106
|
+
|
107
|
+
context "#start_workflow" do
|
108
|
+
before(:all) do
|
109
|
+
class StartWorkflowTest
|
110
|
+
extend AWS::Flow::Workflows
|
111
|
+
workflow :start do
|
112
|
+
{
|
113
|
+
version: "1.0",
|
114
|
+
default_task_list: "foo",
|
115
|
+
default_execution_start_to_close_timeout: 60
|
116
|
+
}
|
117
|
+
end
|
118
|
+
end
|
119
|
+
AWS::Flow::WorkflowWorker.new(@domain.client, @domain, nil, StartWorkflowTest).register
|
120
|
+
|
121
|
+
end
|
122
|
+
|
123
|
+
it "starts a regular workflow correctly" do
|
124
|
+
options = {
|
125
|
+
version: "1.0",
|
126
|
+
domain: @domain.name,
|
127
|
+
execution_start_to_close_timeout: 100,
|
128
|
+
tag_list: ["Test1"]
|
129
|
+
}
|
130
|
+
AWS::Flow::start_workflow("StartWorkflowTest.start", "some input", options)
|
131
|
+
|
132
|
+
until @domain.workflow_executions.count.count > 0
|
133
|
+
sleep 2
|
134
|
+
end
|
135
|
+
|
136
|
+
@domain.workflow_executions.tagged("Test1").each do |x|
|
137
|
+
x.execution_start_to_close_timeout.should == 100
|
138
|
+
x.workflow_type.name.should == "StartWorkflowTest.start"
|
139
|
+
x.workflow_type.version.should == "1.0"
|
140
|
+
|
141
|
+
data_converter = FlowConstants.defaults[:data_converter]
|
142
|
+
input = data_converter.load(x.events.first.attributes[:input]).first
|
143
|
+
input.should == "some input"
|
144
|
+
|
145
|
+
x.terminate
|
146
|
+
end
|
147
|
+
|
148
|
+
end
|
149
|
+
|
150
|
+
it "starts a workflow with type passed in through options" do
|
151
|
+
options = {
|
152
|
+
version: "1.0",
|
153
|
+
prefix_name: "StartWorkflowTest",
|
154
|
+
execution_method: "start",
|
155
|
+
domain: @domain.name,
|
156
|
+
execution_start_to_close_timeout: 100,
|
157
|
+
tag_list: ["Test2"]
|
158
|
+
}
|
159
|
+
AWS::Flow::start_workflow(nil, "some input", options)
|
160
|
+
|
161
|
+
until @domain.workflow_executions.count.count > 0
|
162
|
+
sleep 2
|
163
|
+
end
|
164
|
+
|
165
|
+
@domain.workflow_executions.tagged("Test2").each do |x|
|
166
|
+
x.execution_start_to_close_timeout.should == 100
|
167
|
+
x.workflow_type.name.should == "StartWorkflowTest.start"
|
168
|
+
x.workflow_type.version.should == "1.0"
|
169
|
+
|
170
|
+
data_converter = FlowConstants.defaults[:data_converter]
|
171
|
+
input = data_converter.load(x.events.first.attributes[:input]).first
|
172
|
+
input.should == "some input"
|
173
|
+
|
174
|
+
x.terminate
|
175
|
+
end
|
176
|
+
|
177
|
+
end
|
178
|
+
|
179
|
+
it "starts workflow with from_options option correctly" do
|
180
|
+
options = {
|
181
|
+
from_class: "StartWorkflowTest",
|
182
|
+
domain: @domain.name,
|
183
|
+
tag_list: ["Test3"]
|
184
|
+
}
|
185
|
+
|
186
|
+
AWS::Flow::start_workflow(nil, "some input", options)
|
187
|
+
|
188
|
+
until @domain.workflow_executions.count.count > 0
|
189
|
+
|
190
|
+
sleep 2
|
191
|
+
end
|
192
|
+
|
193
|
+
@domain.workflow_executions.tagged("Test3").each do |x|
|
194
|
+
x.execution_start_to_close_timeout.should == 60
|
195
|
+
x.workflow_type.name.should == "StartWorkflowTest.start"
|
196
|
+
x.workflow_type.version.should == "1.0"
|
197
|
+
|
198
|
+
data_converter = FlowConstants.defaults[:data_converter]
|
199
|
+
input = data_converter.load(x.events.first.attributes[:input]).first
|
200
|
+
input.should == "some input"
|
201
|
+
|
202
|
+
x.terminate
|
203
|
+
end
|
204
|
+
|
205
|
+
end
|
206
|
+
|
207
|
+
end
|
208
|
+
|
209
|
+
end
|
@@ -0,0 +1,276 @@
|
|
1
|
+
require_relative 'setup'
|
2
|
+
|
3
|
+
describe YAMLDataConverter do
|
4
|
+
|
5
|
+
let(:converter) {YAMLDataConverter.new}
|
6
|
+
|
7
|
+
%w{syck psych}.each do |engine|
|
8
|
+
describe "ensures that x == load(dump(x)) is true using #{engine}" do
|
9
|
+
before :all do
|
10
|
+
YAML::ENGINE.yamler = engine
|
11
|
+
end
|
12
|
+
|
13
|
+
{
|
14
|
+
Fixnum => 5,
|
15
|
+
String => "Hello World",
|
16
|
+
Hash => {:test => "good"},
|
17
|
+
Array => ["Hello", "World", 5],
|
18
|
+
Symbol => :test,
|
19
|
+
NilClass => nil,
|
20
|
+
}.each_pair do |klass, exemplar|
|
21
|
+
it "tests #{klass}" do
|
22
|
+
1.upto(10).each do |i|
|
23
|
+
converted_exemplar = exemplar
|
24
|
+
i.times {converted_exemplar = converter.dump converted_exemplar}
|
25
|
+
i.times {converted_exemplar = converter.load converted_exemplar}
|
26
|
+
converted_exemplar.should == exemplar
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
it 'loads exception backtraces correctly' do
|
32
|
+
exemplar = Exception.new('exception')
|
33
|
+
exemplar.set_backtrace(caller)
|
34
|
+
converted_exemplar = converter.load(converter.dump(exemplar))
|
35
|
+
converted_exemplar.should == exemplar
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
describe S3DataConverter do
|
42
|
+
|
43
|
+
before(:all) do
|
44
|
+
@bucket = ENV['AWS_SWF_BUCKET_NAME']
|
45
|
+
end
|
46
|
+
after(:all) do
|
47
|
+
if @bucket
|
48
|
+
ENV['AWS_SWF_BUCKET_NAME'] = @bucket
|
49
|
+
else
|
50
|
+
ENV.delete('AWS_SWF_BUCKET_NAME')
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
let(:obj) { double }
|
55
|
+
|
56
|
+
before(:each) do
|
57
|
+
S3DataConverter.conv = nil
|
58
|
+
allow(AWS::S3).to receive(:new).and_return(obj)
|
59
|
+
allow(obj).to receive(:buckets).and_return(obj)
|
60
|
+
allow(obj).to receive(:[]).and_return(obj)
|
61
|
+
allow(obj).to receive(:exists?).and_return(true)
|
62
|
+
end
|
63
|
+
|
64
|
+
it "should not be used when AWS_SWF_BUCKET_NAME ENV variable is not set" do
|
65
|
+
ENV['AWS_SWF_BUCKET_NAME'] = nil
|
66
|
+
FlowConstants.data_converter.should be_kind_of(YAMLDataConverter)
|
67
|
+
end
|
68
|
+
|
69
|
+
it "should be used when AWS_SWF_BUCKET_NAME ENV variable is set" do
|
70
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
71
|
+
FlowConstants.data_converter.should be_kind_of(S3DataConverter)
|
72
|
+
end
|
73
|
+
|
74
|
+
it "uses YAMLDataConverter internally" do
|
75
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
76
|
+
FlowConstants.data_converter.converter.should be_kind_of(YAMLDataConverter)
|
77
|
+
end
|
78
|
+
|
79
|
+
context "#put_to_s3" do
|
80
|
+
|
81
|
+
it "writes string to s3" do
|
82
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
83
|
+
allow(obj).to receive(:objects).and_return(obj)
|
84
|
+
allow(obj).to receive(:create) do |filename, string|
|
85
|
+
string.should == "foo"
|
86
|
+
end
|
87
|
+
|
88
|
+
converter = FlowConstants.data_converter
|
89
|
+
converter.send(:put_to_s3, "foo")
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
context "#get_from_s3" do
|
94
|
+
|
95
|
+
it "reads data from s3" do
|
96
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
97
|
+
allow(obj).to receive(:objects).at_least(:once).and_return(obj)
|
98
|
+
allow(obj).to receive(:[]).and_return(obj)
|
99
|
+
allow(obj).to receive(:read).and_return("foo")
|
100
|
+
|
101
|
+
converter = FlowConstants.data_converter
|
102
|
+
converter.send(:get_from_s3, "foo_filename").should == "foo"
|
103
|
+
end
|
104
|
+
|
105
|
+
end
|
106
|
+
|
107
|
+
context "#dump, #load" do
|
108
|
+
|
109
|
+
it "dumps and loads regular sized input correctly" do
|
110
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
111
|
+
expect_any_instance_of(S3DataConverter).not_to receive(:put_to_s3)
|
112
|
+
converter = S3DataConverter.converter
|
113
|
+
list = {
|
114
|
+
input: "asdf",
|
115
|
+
output: "ddd",
|
116
|
+
test: 123,
|
117
|
+
}
|
118
|
+
s3_link = converter.dump(list)
|
119
|
+
converter.load(s3_link).should == list
|
120
|
+
end
|
121
|
+
|
122
|
+
it "dumps large input correctly" do
|
123
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
124
|
+
expect_any_instance_of(S3DataConverter).to receive(:put_to_s3) do |str|
|
125
|
+
conv = YAMLDataConverter.new
|
126
|
+
conv.load(str).should include(
|
127
|
+
input: "asdf",
|
128
|
+
test: "a"*33000
|
129
|
+
)
|
130
|
+
end
|
131
|
+
converter = S3DataConverter.converter
|
132
|
+
list = {
|
133
|
+
input: "asdf",
|
134
|
+
test: "a"*33000,
|
135
|
+
}
|
136
|
+
converter.dump(list)
|
137
|
+
end
|
138
|
+
|
139
|
+
it "loads large input correctly" do
|
140
|
+
ENV['AWS_SWF_BUCKET_NAME'] = 'foo'
|
141
|
+
list = {
|
142
|
+
input: "asdf",
|
143
|
+
test: "a"*33000,
|
144
|
+
}
|
145
|
+
expect_any_instance_of(S3DataConverter).to receive(:get_from_s3) do |filename|
|
146
|
+
YAMLDataConverter.new.dump(list)
|
147
|
+
end
|
148
|
+
converter = S3DataConverter.converter
|
149
|
+
filename = YAMLDataConverter.new.dump(s3_filename: "foo")
|
150
|
+
converter.load(filename).should == list
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
context "#cache" do
|
155
|
+
|
156
|
+
context "#write" do
|
157
|
+
|
158
|
+
it "ensures basic cache read/write works" do
|
159
|
+
|
160
|
+
converter = S3DataConverter.converter
|
161
|
+
msg = "a"*33000
|
162
|
+
|
163
|
+
allow(obj).to receive(:objects).and_return(obj)
|
164
|
+
allow(obj).to receive(:create)
|
165
|
+
|
166
|
+
s3_link = converter.dump(msg)
|
167
|
+
key = YAMLDataConverter.new.load(s3_link)
|
168
|
+
|
169
|
+
converter.cache[key[:s3_filename]].should_not be_nil
|
170
|
+
converter.cache[key[:s3_filename]].should == YAMLDataConverter.new.dump(msg)
|
171
|
+
|
172
|
+
data = converter.load(s3_link)
|
173
|
+
data.should == msg
|
174
|
+
|
175
|
+
end
|
176
|
+
|
177
|
+
it "ensures eviction" do
|
178
|
+
|
179
|
+
converter = S3DataConverter.converter
|
180
|
+
msg = "a"*33000
|
181
|
+
|
182
|
+
allow(obj).to receive(:objects).and_return(obj)
|
183
|
+
allow(obj).to receive(:create)
|
184
|
+
|
185
|
+
first = YAMLDataConverter.new.load(converter.dump(msg))
|
186
|
+
|
187
|
+
# Add 1000 more entries to evict the first one
|
188
|
+
(1..1000).each { |x| converter.dump(msg) }
|
189
|
+
|
190
|
+
# Ensure cache doesn't contain the first entry
|
191
|
+
converter.cache[first[:s3_filename]].should be_nil
|
192
|
+
|
193
|
+
end
|
194
|
+
|
195
|
+
end
|
196
|
+
|
197
|
+
context "#hit" do
|
198
|
+
|
199
|
+
it "returns the entry and doesn't call S3" do
|
200
|
+
|
201
|
+
converter = S3DataConverter.converter
|
202
|
+
msg = "a"*33000
|
203
|
+
|
204
|
+
allow(obj).to receive(:objects).and_return(obj)
|
205
|
+
allow(obj).to receive(:create)
|
206
|
+
|
207
|
+
s3_link = converter.dump(msg)
|
208
|
+
|
209
|
+
# The following line confirms the file is not read from S3.
|
210
|
+
expect(obj).not_to receive(:read)
|
211
|
+
|
212
|
+
# Ensure the entry is correct
|
213
|
+
converter.load(s3_link).should == msg
|
214
|
+
|
215
|
+
end
|
216
|
+
|
217
|
+
it "ensures lru behavior of cache" do
|
218
|
+
|
219
|
+
converter = S3DataConverter.converter
|
220
|
+
msg = "a"*33000
|
221
|
+
|
222
|
+
allow(obj).to receive(:objects).and_return(obj)
|
223
|
+
allow(obj).to receive(:create)
|
224
|
+
|
225
|
+
first = YAMLDataConverter.new.load(converter.dump(msg))
|
226
|
+
|
227
|
+
(1..999).each { |x| converter.dump(msg) }
|
228
|
+
|
229
|
+
# Use the first entry to bring it at the front of the queue
|
230
|
+
converter.cache[first[:s3_filename]]
|
231
|
+
|
232
|
+
# Add a few more entries to the cache
|
233
|
+
converter.dump(msg)
|
234
|
+
converter.dump(msg)
|
235
|
+
|
236
|
+
# Ensure cache still contains the entry
|
237
|
+
converter.cache[first[:s3_filename]].should_not be_nil
|
238
|
+
|
239
|
+
end
|
240
|
+
|
241
|
+
end
|
242
|
+
|
243
|
+
context "#miss" do
|
244
|
+
|
245
|
+
it "calls S3 to get the object and adds it to the cache" do
|
246
|
+
converter = S3DataConverter.converter
|
247
|
+
s3_link = { s3_filename: "foo" }
|
248
|
+
s3_link = YAMLDataConverter.new.dump(s3_link)
|
249
|
+
|
250
|
+
# This following 2 lines confirm that we call S3 in case of a cache miss
|
251
|
+
allow(obj).to receive(:objects).and_return(obj)
|
252
|
+
expect(obj).to receive(:read).and_return("bar")
|
253
|
+
|
254
|
+
# Expect the cache to get populated with a new entry
|
255
|
+
expect(converter.cache).to receive(:[]=).with("foo", "bar")
|
256
|
+
|
257
|
+
ret = converter.load(s3_link)
|
258
|
+
end
|
259
|
+
|
260
|
+
end
|
261
|
+
|
262
|
+
it "tests max size" do
|
263
|
+
converter = S3DataConverter.converter
|
264
|
+
msg = "a"*33000
|
265
|
+
|
266
|
+
allow(obj).to receive(:objects).and_return(obj)
|
267
|
+
allow(obj).to receive(:create)
|
268
|
+
|
269
|
+
(1..1010).each { |x| converter.dump(msg) }
|
270
|
+
|
271
|
+
converter.cache.cache.to_a.size.should == 1000
|
272
|
+
converter.cache.cache.clear
|
273
|
+
end
|
274
|
+
|
275
|
+
end
|
276
|
+
end
|