aws-flow 1.3.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +15 -0
- data/aws-flow.gemspec +1 -0
- data/lib/aws/decider/activity.rb +8 -6
- data/lib/aws/decider/async_decider.rb +1 -0
- data/lib/aws/decider/async_retrying_executor.rb +3 -3
- data/lib/aws/decider/decider.rb +16 -14
- data/lib/aws/decider/executor.rb +35 -22
- data/lib/aws/decider/flow_defaults.rb +28 -14
- data/lib/aws/decider/generic_client.rb +3 -4
- data/lib/aws/decider/options.rb +91 -117
- data/lib/aws/decider/state_machines.rb +1 -0
- data/lib/aws/decider/utilities.rb +15 -0
- data/lib/aws/decider/version.rb +1 -1
- data/lib/aws/decider/worker.rb +14 -8
- data/lib/aws/decider/workflow_client.rb +16 -11
- data/lib/aws/runner.rb +43 -39
- data/spec/aws/decider/integration/activity_spec.rb +345 -0
- data/spec/aws/{integration → decider/integration}/integration_spec.rb +818 -1183
- data/spec/aws/decider/integration/setup.rb +3 -0
- data/spec/aws/decider/unit/activity_spec.rb +233 -0
- data/spec/aws/decider/unit/async_retrying_executor_spec.rb +131 -0
- data/spec/aws/{unit → decider/unit}/decider_spec.rb +171 -718
- data/spec/aws/decider/unit/executor_spec.rb +123 -0
- data/spec/aws/decider/unit/flow_defaults_spec.rb +62 -0
- data/spec/aws/decider/unit/misc_spec.rb +101 -0
- data/spec/aws/decider/unit/options_spec.rb +289 -0
- data/spec/aws/decider/unit/retry_spec.rb +217 -0
- data/spec/aws/{unit → decider/unit}/rubyflow.rb +0 -0
- data/spec/aws/decider/unit/setup.rb +3 -0
- data/spec/aws/decider/unit/worker_spec.rb +325 -0
- data/spec/aws/decider/unit/workflow_client_spec.rb +83 -0
- data/spec/aws/{unit → flow}/async_backtrace_spec.rb +0 -0
- data/spec/aws/{unit → flow}/async_scope_spec.rb +0 -0
- data/spec/aws/{unit → flow}/begin_rescue_ensure_spec.rb +1 -0
- data/spec/aws/{unit → flow}/external_task_spec.rb +0 -0
- data/spec/aws/{unit → flow}/factories.rb +0 -0
- data/spec/aws/{unit → flow}/fiber_condition_variable_spec.rb +0 -0
- data/spec/aws/{unit → flow}/fiber_spec.rb +0 -0
- data/spec/aws/{unit → flow}/flow_spec.rb +0 -0
- data/spec/aws/{unit → flow}/future_spec.rb +0 -0
- data/spec/aws/{unit → flow}/simple_dfa_spec.rb +0 -0
- data/spec/aws/{integration → runner/integration}/runner_integration_spec.rb +16 -43
- data/spec/aws/{unit → runner/unit}/runner_unit_spec.rb +18 -18
- data/spec/spec_helper.rb +264 -2
- metadata +37 -28
- data/spec/aws/unit/executor_spec.rb +0 -49
- data/spec/aws/unit/options_spec.rb +0 -293
- data/spec/aws/unit/preinclude_tests.rb +0 -149
@@ -334,6 +334,7 @@ module AWS
|
|
334
334
|
:tag_list => @attributes[:tag_list]
|
335
335
|
}
|
336
336
|
}
|
337
|
+
result[:start_child_workflow_execution_decision_attributes].delete(:task_list) if options.task_list.nil?
|
337
338
|
#TODO Figure out what control is
|
338
339
|
to_add = options.get_options([:execution_start_to_close_timeout, :task_start_to_close_timeout, :child_policy, :tag_list, :input])
|
339
340
|
result[attribute_name].merge!(to_add)
|
@@ -58,6 +58,21 @@ module AWS
|
|
58
58
|
youngest.get_full_options
|
59
59
|
end
|
60
60
|
|
61
|
+
# @api private
|
62
|
+
def self.client_options_from_method_name(method_name, options)
|
63
|
+
client_options = options.dup
|
64
|
+
if method_name.nil?
|
65
|
+
client_options.precursors = options.precursors.empty? ? [] : [options.precursors.first]
|
66
|
+
else
|
67
|
+
client_options.precursors = options.precursors.select { |x| x.name.split(".").last.to_sym == method_name }
|
68
|
+
end
|
69
|
+
|
70
|
+
unless options.precursors.empty?
|
71
|
+
client_options.precursors.map!(&:options)
|
72
|
+
end
|
73
|
+
client_options
|
74
|
+
end
|
75
|
+
|
61
76
|
|
62
77
|
# @api private
|
63
78
|
def self.interpret_block_for_options(option_class, block, use_defaults = false)
|
data/lib/aws/decider/version.rb
CHANGED
data/lib/aws/decider/worker.rb
CHANGED
@@ -40,7 +40,6 @@ module AWS
|
|
40
40
|
@service = service
|
41
41
|
@domain = domain
|
42
42
|
@task_list = task_list_to_poll
|
43
|
-
@options = Utilities::interpret_block_for_options(WorkerOptions, block)
|
44
43
|
if args
|
45
44
|
args.each { |klass_or_instance| add_implementation(klass_or_instance) }
|
46
45
|
end
|
@@ -65,6 +64,10 @@ module AWS
|
|
65
64
|
downcase
|
66
65
|
end
|
67
66
|
|
67
|
+
def resolve_default_task_list(name)
|
68
|
+
name == FlowConstants.use_worker_task_list ? @task_list : name
|
69
|
+
end
|
70
|
+
|
68
71
|
end
|
69
72
|
|
70
73
|
module GenericTypeModule
|
@@ -137,7 +140,7 @@ module AWS
|
|
137
140
|
end
|
138
141
|
|
139
142
|
def set_workflow_implementation_types(workflow_implementation_types)
|
140
|
-
workflow_implementation_types.each {|type|
|
143
|
+
workflow_implementation_types.each {|type| add_workflow_implementation(type)}
|
141
144
|
end
|
142
145
|
|
143
146
|
def add_implementation(workflow_class)
|
@@ -154,7 +157,7 @@ module AWS
|
|
154
157
|
options = workflow_type.options
|
155
158
|
execution_method = options.execution_method
|
156
159
|
version = workflow_type.version
|
157
|
-
registration_options =
|
160
|
+
registration_options = options.get_registration_options
|
158
161
|
implementation_options = nil
|
159
162
|
get_state_method = workflow_class.get_state_method
|
160
163
|
signals = workflow_class.signals
|
@@ -181,9 +184,10 @@ module AWS
|
|
181
184
|
:version => version
|
182
185
|
}
|
183
186
|
)
|
187
|
+
|
184
188
|
if options.default_task_list
|
185
189
|
workflow_hash.merge!(
|
186
|
-
|
190
|
+
:default_task_list => {:name => resolve_default_task_list(options.default_task_list)}
|
187
191
|
)
|
188
192
|
end
|
189
193
|
@workflow_type_options << workflow_hash
|
@@ -378,11 +382,13 @@ module AWS
|
|
378
382
|
:version => activity_type.version
|
379
383
|
}
|
380
384
|
|
381
|
-
option_hash.merge!(options.
|
385
|
+
option_hash.merge!(options.get_registration_options)
|
382
386
|
|
383
|
-
|
384
|
-
|
385
|
-
|
387
|
+
if options.default_task_list
|
388
|
+
option_hash.merge!(
|
389
|
+
:default_task_list => {:name => resolve_default_task_list(options.default_task_list)}
|
390
|
+
)
|
391
|
+
end
|
386
392
|
|
387
393
|
@activity_type_options << option_hash
|
388
394
|
end
|
@@ -189,10 +189,14 @@ module AWS
|
|
189
189
|
# A hash of {StartWorkflowOptions} to use for this workflow execution.
|
190
190
|
#
|
191
191
|
def start_execution(*input, &block)
|
192
|
+
start_execution_method(nil, *input, &block)
|
193
|
+
end
|
194
|
+
|
195
|
+
def start_execution_method(method_name, *input, &block)
|
192
196
|
if Utilities::is_external
|
193
|
-
self.start_external_workflow(input, &block)
|
197
|
+
self.start_external_workflow(method_name, input, &block)
|
194
198
|
else
|
195
|
-
self.start_internal_workflow(input, &block)
|
199
|
+
self.start_internal_workflow(method_name, input, &block)
|
196
200
|
end
|
197
201
|
end
|
198
202
|
|
@@ -239,8 +243,6 @@ module AWS
|
|
239
243
|
workflow_execution = workflow_execution.workflow_execution if workflow_execution.respond_to? :workflow_execution
|
240
244
|
options.signal_name ||= signal_name.to_s
|
241
245
|
options.workflow_id ||= workflow_execution.workflow_id.get.to_s
|
242
|
-
execution_method = options.execution_method || @options.execution_method
|
243
|
-
raise "You haven't specified an execution method!" if execution_method.nil?
|
244
246
|
Utilities::merge_all_options(options)
|
245
247
|
open_request = OpenRequestInfo.new
|
246
248
|
decision_id = @decision_helper.get_next_id(:Signal)
|
@@ -264,13 +266,15 @@ module AWS
|
|
264
266
|
|
265
267
|
# Called by {#start_execution}.
|
266
268
|
# @api private
|
267
|
-
def start_internal_workflow(input = NoInput.new, &block)
|
269
|
+
def start_internal_workflow(method_name, input = NoInput.new, &block)
|
268
270
|
get_decision_context
|
269
271
|
options = Utilities::interpret_block_for_options(StartWorkflowOptions, block)
|
272
|
+
client_options = Utilities::client_options_from_method_name(method_name, @options)
|
273
|
+
options = Utilities::merge_all_options(client_options, options)
|
274
|
+
|
270
275
|
workflow_id_future, run_id_future = Future.new, Future.new
|
271
276
|
minimal_domain = MinimalDomain.new(@domain.name.to_s)
|
272
277
|
output = WorkflowFuture.new(AWS::Flow::MinimalWorkflowExecution.new(minimal_domain, workflow_id_future, run_id_future))
|
273
|
-
options = Utilities::merge_all_options(@options, options)
|
274
278
|
new_options = StartWorkflowOptions.new(options)
|
275
279
|
open_request = OpenRequestInfo.new
|
276
280
|
workflow_id = new_options.workflow_id
|
@@ -337,9 +341,11 @@ module AWS
|
|
337
341
|
|
338
342
|
# Called by {#start_execution}.
|
339
343
|
# @api private
|
340
|
-
def start_external_workflow(input = NoInput.new, &block)
|
344
|
+
def start_external_workflow(method_name, input = NoInput.new, &block)
|
341
345
|
options = Utilities::interpret_block_for_options(StartWorkflowOptions, block)
|
342
|
-
|
346
|
+
client_options = Utilities::client_options_from_method_name(method_name, @options)
|
347
|
+
options = Utilities::merge_all_options(client_options, options)
|
348
|
+
|
343
349
|
@converter ||= YAMLDataConverter.new
|
344
350
|
# Basically, we want to avoid the special "NoInput, but allow stuff like nil in"
|
345
351
|
if ! (input.class <= NoInput || input.empty?)
|
@@ -349,8 +355,7 @@ module AWS
|
|
349
355
|
execution_method = @options.execution_method
|
350
356
|
version = @options.version
|
351
357
|
else
|
352
|
-
|
353
|
-
workflow_type = @workflow_class.workflows.first
|
358
|
+
workflow_type = method_name.nil? ? @workflow_class.workflows.first : @workflow_class.workflows.select { |x| x.options.execution_method.to_sym == method_name }.first
|
354
359
|
execution_method = workflow_type.options.execution_method
|
355
360
|
version = workflow_type.version
|
356
361
|
end
|
@@ -381,7 +386,7 @@ module AWS
|
|
381
386
|
|
382
387
|
def method_missing(method_name, *args, &block)
|
383
388
|
if is_execution_method(method_name)
|
384
|
-
|
389
|
+
start_execution_method(method_name, *args, &block)
|
385
390
|
else
|
386
391
|
super(method_name, *args, &block)
|
387
392
|
end
|
data/lib/aws/runner.rb
CHANGED
@@ -17,17 +17,14 @@ module AWS
|
|
17
17
|
|
18
18
|
# Example of the format:
|
19
19
|
# {
|
20
|
-
# "
|
21
|
-
#
|
22
|
-
#
|
23
|
-
#
|
24
|
-
#
|
25
|
-
# //, ... can add more
|
26
|
-
# ],
|
20
|
+
# "domain":
|
21
|
+
# {
|
22
|
+
# "name": <name_of_the_domain>,
|
23
|
+
# "retention_in_days": <days>
|
24
|
+
# }
|
27
25
|
# "activity_workers": [
|
28
26
|
#
|
29
27
|
# {
|
30
|
-
# "domain": <name_of_the_domain>,
|
31
28
|
# "task_list": <name_of_the_task_list>,
|
32
29
|
# "activity_classes": [ <name_of_class_containing_the_activities_to_be_worked_on> ],
|
33
30
|
# "number_of_workers": <number_of_activity_workers_to_spawn>,
|
@@ -37,7 +34,6 @@ module AWS
|
|
37
34
|
# ],
|
38
35
|
# "workflow_workers": [
|
39
36
|
# {
|
40
|
-
# "domain": <name_of_the_domain>,
|
41
37
|
# "task_list": <name_of_the_task_list>,
|
42
38
|
# "workflow_classes": [ <name_of_class_containing_the_workflows_to_be_worked_on> ],
|
43
39
|
# "number_of_workers": <number_of_workflow_workers_to_spawn>
|
@@ -56,19 +52,24 @@ module AWS
|
|
56
52
|
# }
|
57
53
|
|
58
54
|
|
59
|
-
# registers the
|
60
|
-
def self.
|
55
|
+
# registers the domain if it is not
|
56
|
+
def self.setup_domain(json_config)
|
61
57
|
|
62
58
|
swf = create_service_client(json_config)
|
63
59
|
|
64
|
-
json_config['
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
60
|
+
domain = json_config['domain']
|
61
|
+
# If retention period is not provided, default it to 7 days
|
62
|
+
retention = domain['retention_in_days'] || FlowConstants::RETENTION_DEFAULT
|
63
|
+
|
64
|
+
begin
|
65
|
+
swf.client.register_domain({
|
66
|
+
name: domain['name'],
|
67
|
+
workflow_execution_retention_period_in_days: retention.to_s
|
68
|
+
})
|
69
|
+
rescue AWS::SimpleWorkflow::Errors::DomainAlreadyExistsFault => e
|
70
|
+
# possible log an INFO/WARN if the domain already exists.
|
71
71
|
end
|
72
|
+
return AWS::SimpleWorkflow::Domain.new( domain['name'] )
|
72
73
|
end
|
73
74
|
|
74
75
|
def self.set_process_name(name)
|
@@ -106,7 +107,8 @@ module AWS
|
|
106
107
|
|
107
108
|
def self.spawn_and_start_workers(json_fragment, process_name, worker)
|
108
109
|
workers = []
|
109
|
-
json_fragment['number_of_workers']
|
110
|
+
num_of_workers = json_fragment['number_of_workers'] || FlowConstants::NUM_OF_WORKERS_DEFAULT
|
111
|
+
num_of_workers.times do
|
110
112
|
workers << fork do
|
111
113
|
set_process_name(process_name)
|
112
114
|
worker.start()
|
@@ -149,23 +151,25 @@ module AWS
|
|
149
151
|
end
|
150
152
|
end
|
151
153
|
|
152
|
-
def self.start_activity_workers(swf, config_path, json_config)
|
154
|
+
def self.start_activity_workers(swf, domain = nil, config_path, json_config)
|
153
155
|
workers = []
|
154
156
|
# load all classes for the activities
|
155
|
-
load_files(config_path, json_config, {:
|
156
|
-
:
|
157
|
+
load_files(config_path, json_config, {config_key: 'activity_paths',
|
158
|
+
default_file: File.join('flow', 'activities.rb')})
|
159
|
+
domain = setup_domain(json_config) if domain.nil?
|
157
160
|
|
158
161
|
# TODO: logger
|
159
162
|
# start the workers for each spec
|
160
163
|
json_config['activity_workers'].each do |w|
|
161
|
-
|
162
|
-
|
164
|
+
# If number of forks is not provided, it will automatically default to 20
|
165
|
+
# within the ActivityWorker
|
166
|
+
fork_count = w['number_of_forks_per_worker']
|
163
167
|
task_list = expand_task_list(w['task_list'])
|
164
168
|
|
165
169
|
# create a worker
|
166
|
-
worker = ActivityWorker.new(swf.client, domain, task_list, *w['activities']) {{ :
|
167
|
-
add_implementations(worker, w, {:
|
168
|
-
:
|
170
|
+
worker = ActivityWorker.new(swf.client, domain, task_list, *w['activities']) {{ max_workers: fork_count }}
|
171
|
+
add_implementations(worker, w, {config_key: 'activity_classes',
|
172
|
+
clazz: AWS::Flow::Activities})
|
169
173
|
|
170
174
|
# start as many workers as desired in child processes
|
171
175
|
workers << spawn_and_start_workers(w, "activity-worker", worker)
|
@@ -174,22 +178,22 @@ module AWS
|
|
174
178
|
return workers
|
175
179
|
end
|
176
180
|
|
177
|
-
def self.start_workflow_workers(swf, config_path, json_config)
|
181
|
+
def self.start_workflow_workers(swf, domain = nil, config_path, json_config)
|
178
182
|
workers = []
|
179
183
|
# load all the classes for the workflows
|
180
|
-
load_files(config_path, json_config, {:
|
181
|
-
:
|
184
|
+
load_files(config_path, json_config, {config_key: 'workflow_paths',
|
185
|
+
default_file: File.join('flow', 'workflows.rb')})
|
186
|
+
domain = setup_domain(json_config) if domain.nil?
|
182
187
|
|
183
188
|
# TODO: logger
|
184
189
|
# start the workers for each spec
|
185
190
|
json_config['workflow_workers'].each do |w|
|
186
|
-
domain = AWS::SimpleWorkflow::Domain.new( w['domain'] )
|
187
191
|
task_list = expand_task_list(w['task_list'])
|
188
192
|
|
189
193
|
# create a worker
|
190
194
|
worker = WorkflowWorker.new(swf.client, domain, task_list, *w['workflows'])
|
191
|
-
add_implementations(worker, w, {:
|
192
|
-
:
|
195
|
+
add_implementations(worker, w, {config_key: 'workflow_classes',
|
196
|
+
clazz: AWS::Flow::Workflows})
|
193
197
|
|
194
198
|
# start as many workers as desired in child processes
|
195
199
|
workers << spawn_and_start_workers(w, "workflow-worker", worker)
|
@@ -201,7 +205,7 @@ module AWS
|
|
201
205
|
def self.create_service_client(json_config)
|
202
206
|
# set the UserAgent prefix for all clients
|
203
207
|
if json_config['user_agent_prefix'] then
|
204
|
-
AWS.config(:
|
208
|
+
AWS.config(user_agent_prefix: json_config['user_agent_prefix'])
|
205
209
|
end
|
206
210
|
|
207
211
|
swf = AWS::SimpleWorkflow.new
|
@@ -211,14 +215,14 @@ module AWS
|
|
211
215
|
# this will start all the workers and return an array of pids for the worker
|
212
216
|
# processes
|
213
217
|
#
|
214
|
-
def self.start_workers(config_path, json_config)
|
218
|
+
def self.start_workers(domain = nil, config_path, json_config)
|
215
219
|
|
216
220
|
workers = []
|
217
221
|
|
218
222
|
swf = create_service_client(json_config)
|
219
223
|
|
220
|
-
workers << start_activity_workers(swf, config_path, json_config)
|
221
|
-
workers << start_workflow_workers(swf, config_path, json_config)
|
224
|
+
workers << start_activity_workers(swf, domain, config_path, json_config)
|
225
|
+
workers << start_workflow_workers(swf, domain, config_path, json_config)
|
222
226
|
|
223
227
|
# needed to avoid returning nested arrays based on the calls above
|
224
228
|
workers.flatten!
|
@@ -279,8 +283,8 @@ module AWS
|
|
279
283
|
config_path = options[:file]
|
280
284
|
config = load_config_json( config_path )
|
281
285
|
add_dir_to_load_path( Pathname.new(config_path).dirname )
|
282
|
-
|
283
|
-
workers = start_workers(config_path, config)
|
286
|
+
domain = setup_domain(config)
|
287
|
+
workers = start_workers(domain, config_path, config)
|
284
288
|
setup_signal_handling(workers)
|
285
289
|
|
286
290
|
# hang there until killed: this process is used to relay signals to children
|
@@ -0,0 +1,345 @@
|
|
1
|
+
require_relative 'setup'
|
2
|
+
|
3
|
+
describe Activities do
|
4
|
+
before(:all) do
|
5
|
+
@swf, @domain = setup_swf
|
6
|
+
end
|
7
|
+
|
8
|
+
it "ensures that a real activity will get scheduled" do
|
9
|
+
task_list = "activity_task_list"
|
10
|
+
class Blah
|
11
|
+
extend AWS::Flow::Activities
|
12
|
+
end
|
13
|
+
class BasicActivity
|
14
|
+
extend AWS::Flow::Activities
|
15
|
+
|
16
|
+
activity :run_activity1 do
|
17
|
+
{
|
18
|
+
default_task_heartbeat_timeout: 60,
|
19
|
+
version: "1",
|
20
|
+
default_task_list: "activity_task_list",
|
21
|
+
default_task_schedule_to_close_timeout: 60,
|
22
|
+
default_task_schedule_to_start_timeout: 30,
|
23
|
+
default_task_start_to_close_timeout: 30,
|
24
|
+
}
|
25
|
+
end
|
26
|
+
def run_activity1; end
|
27
|
+
end
|
28
|
+
class BasicWorkflow
|
29
|
+
extend AWS::Flow::Workflows
|
30
|
+
workflow :start do
|
31
|
+
{
|
32
|
+
version: "1.0",
|
33
|
+
default_task_start_to_close_timeout: 30,
|
34
|
+
default_execution_start_to_close_timeout: 300,
|
35
|
+
default_child_policy: "REQUEST_CANCEL",
|
36
|
+
default_task_list: "activity_task_list"
|
37
|
+
}
|
38
|
+
end
|
39
|
+
activity_client(:activity) { { from_class: "BasicActivity" } }
|
40
|
+
def start
|
41
|
+
activity.run_activity1
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
worker = WorkflowWorker.new(@domain.client, @domain, task_list, BasicWorkflow)
|
46
|
+
activity_worker = ActivityWorker.new(@domain.client, @domain, task_list, BasicActivity)
|
47
|
+
workflow_type_name = "BasicWorkflow.start"
|
48
|
+
worker.register
|
49
|
+
activity_worker.register
|
50
|
+
workflow_type, _ = @domain.workflow_types.page(:per_page => 1000).select {|x| x.name == workflow_type_name}
|
51
|
+
|
52
|
+
workflow_execution = workflow_type.start_execution
|
53
|
+
|
54
|
+
worker.run_once
|
55
|
+
activity_worker.run_once
|
56
|
+
worker.run_once
|
57
|
+
wait_for_execution(workflow_execution)
|
58
|
+
|
59
|
+
workflow_execution.events.map(&:event_type).should ==
|
60
|
+
["WorkflowExecutionStarted", "DecisionTaskScheduled", "DecisionTaskStarted", "DecisionTaskCompleted", "ActivityTaskScheduled", "ActivityTaskStarted", "ActivityTaskCompleted", "DecisionTaskScheduled", "DecisionTaskStarted", "DecisionTaskCompleted", "WorkflowExecutionCompleted"]
|
61
|
+
end
|
62
|
+
|
63
|
+
it "tests to see what two activities look like" do
|
64
|
+
|
65
|
+
class DoubleActivity
|
66
|
+
extend AWS::Flow::Activities
|
67
|
+
activity :run_activity1, :run_activity2 do
|
68
|
+
{
|
69
|
+
default_task_heartbeat_timeout: 60,
|
70
|
+
version: "1.0",
|
71
|
+
default_task_list: "double_activity_task_list",
|
72
|
+
default_task_schedule_to_close_timeout: 60,
|
73
|
+
default_task_schedule_to_start_timeout: 30,
|
74
|
+
default_task_start_to_close_timeout: 30,
|
75
|
+
exponential_retry: {
|
76
|
+
retries_per_exception: {
|
77
|
+
ActivityTaskTimedOutException: Float::INFINITY
|
78
|
+
}
|
79
|
+
}
|
80
|
+
}
|
81
|
+
end
|
82
|
+
def run_activity1; end
|
83
|
+
def run_activity2; end
|
84
|
+
end
|
85
|
+
|
86
|
+
class DoubleWorkflow
|
87
|
+
extend AWS::Flow::Workflows
|
88
|
+
workflow(:start) do
|
89
|
+
{
|
90
|
+
version: "1.0",
|
91
|
+
default_task_start_to_close_timeout: 30,
|
92
|
+
default_execution_start_to_close_timeout: 300,
|
93
|
+
default_child_policy: "REQUEST_CANCEL",
|
94
|
+
default_task_list: "double_activity_task_list"
|
95
|
+
}
|
96
|
+
end
|
97
|
+
activity_client(:activity) { { from_class: "DoubleActivity" } }
|
98
|
+
def start
|
99
|
+
activity.send_async(:run_activity1)
|
100
|
+
activity.run_activity2
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
task_list = "double_activity_task_list"
|
105
|
+
|
106
|
+
worker = WorkflowWorker.new(@domain.client, @domain, task_list, DoubleWorkflow)
|
107
|
+
activity_worker = ActivityWorker.new(@domain.client, @domain, task_list, DoubleActivity)
|
108
|
+
workflow_type_name = "DoubleWorkflow.start"
|
109
|
+
worker.register
|
110
|
+
activity_worker.register
|
111
|
+
workflow_id = "basic_activity_workflow"
|
112
|
+
|
113
|
+
run_id = @swf.client.start_workflow_execution(
|
114
|
+
:workflow_type => {
|
115
|
+
:name => workflow_type_name,
|
116
|
+
:version => "1.0"
|
117
|
+
},
|
118
|
+
:workflow_id => workflow_id,
|
119
|
+
:domain => @domain.name.to_s
|
120
|
+
)
|
121
|
+
workflow_execution = AWS::SimpleWorkflow::WorkflowExecution.new(@domain, workflow_id, run_id["runId"])
|
122
|
+
@forking_executor = ForkingExecutor.new(:max_workers => 3)
|
123
|
+
@forking_executor.execute { worker.start }
|
124
|
+
@forking_executor.execute { activity_worker.start }
|
125
|
+
wait_for_execution(workflow_execution)
|
126
|
+
|
127
|
+
workflow_history = workflow_execution.events.map(&:event_type)
|
128
|
+
workflow_history.count("ActivityTaskCompleted").should == 2
|
129
|
+
workflow_history.count("WorkflowExecutionCompleted").should == 1
|
130
|
+
end
|
131
|
+
|
132
|
+
it "tests to see that two subsequent activities are supported" do
|
133
|
+
task_list = "subsequent_activity_task_list"
|
134
|
+
workflow_tasklist = "subsequent_workflow_task_list"
|
135
|
+
class SubsequentActivity
|
136
|
+
extend AWS::Flow::Activities
|
137
|
+
|
138
|
+
activity :run_activity1, :run_activity2 do
|
139
|
+
{
|
140
|
+
default_task_heartbeat_timeout: 300,
|
141
|
+
version: "1.2",
|
142
|
+
default_task_list: "subsequent_activity_task_list",
|
143
|
+
default_task_schedule_to_close_timeout: 60,
|
144
|
+
default_task_schedule_to_start_timeout: 30,
|
145
|
+
default_task_start_to_close_timeout: 30,
|
146
|
+
}
|
147
|
+
end
|
148
|
+
def run_activity1; end
|
149
|
+
def run_activity2; end
|
150
|
+
end
|
151
|
+
class SubsequentWorkflow
|
152
|
+
extend AWS::Flow::Workflows
|
153
|
+
workflow :start do
|
154
|
+
{
|
155
|
+
version: "1.2",
|
156
|
+
default_task_start_to_close_timeout: 30,
|
157
|
+
default_execution_start_to_close_timeout: 300,
|
158
|
+
default_child_policy: "REQUEST_CANCEL",
|
159
|
+
default_task_list: "subsequent_workflow_task_list"
|
160
|
+
}
|
161
|
+
end
|
162
|
+
activity_client(:activity) { { from_class: "SubsequentActivity" } }
|
163
|
+
def start
|
164
|
+
activity.run_activity1
|
165
|
+
activity.run_activity2
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
worker = WorkflowWorker.new(@domain.client, @domain, workflow_tasklist, SubsequentWorkflow)
|
170
|
+
activity_worker = ActivityWorker.new(@domain.client, @domain, task_list, SubsequentActivity)
|
171
|
+
worker.register
|
172
|
+
activity_worker.register
|
173
|
+
|
174
|
+
client = AWS::Flow::workflow_client(@domain.client, @domain) { { from_class: "SubsequentWorkflow" } }
|
175
|
+
|
176
|
+
workflow_execution = client.start_execution
|
177
|
+
|
178
|
+
@forking_executor = ForkingExecutor.new(:max_workers => 3)
|
179
|
+
@forking_executor.execute { worker.start }
|
180
|
+
@forking_executor.execute { activity_worker.start }
|
181
|
+
|
182
|
+
wait_for_execution(workflow_execution)
|
183
|
+
workflow_execution.events.map(&:event_type).count("WorkflowExecutionCompleted").should == 1
|
184
|
+
end
|
185
|
+
|
186
|
+
it "tests a much larger workflow" do
|
187
|
+
class LargeActivity
|
188
|
+
extend AWS::Flow::Activities
|
189
|
+
|
190
|
+
activity :run_activity1, :run_activity2, :run_activity3, :run_activity4 do
|
191
|
+
{
|
192
|
+
default_task_heartbeat_timeout: 60,
|
193
|
+
version: "1.0",
|
194
|
+
default_task_list: "large_activity_task_list",
|
195
|
+
default_task_schedule_to_close_timeout: 10,
|
196
|
+
default_task_schedule_to_start_timeout: 5,
|
197
|
+
default_task_start_to_close_timeout: 5,
|
198
|
+
exponential_retry: {
|
199
|
+
retries_per_exception: {
|
200
|
+
ActivityTaskTimedOutException => Float::INFINITY,
|
201
|
+
}
|
202
|
+
}
|
203
|
+
}
|
204
|
+
end
|
205
|
+
def run_activity1
|
206
|
+
"My name is Ozymandias - 1"
|
207
|
+
end
|
208
|
+
def run_activity2
|
209
|
+
"King of Kings! - 2 "
|
210
|
+
end
|
211
|
+
def run_activity3
|
212
|
+
"Look on my works, ye mighty - 3"
|
213
|
+
end
|
214
|
+
def run_activity4
|
215
|
+
"And Despair! - 4"
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
class LargeWorkflow
|
220
|
+
extend AWS::Flow::Workflows
|
221
|
+
workflow :start do
|
222
|
+
{
|
223
|
+
version: "1.0",
|
224
|
+
default_task_start_to_close_timeout: 30,
|
225
|
+
default_execution_start_to_close_timeout: 300,
|
226
|
+
default_child_policy: "REQUEST_CANCEL",
|
227
|
+
default_task_list: "large_activity_task_list"
|
228
|
+
}
|
229
|
+
end
|
230
|
+
activity_client(:activity) { { from_class: "LargeActivity" } }
|
231
|
+
def start
|
232
|
+
activity.send_async(:run_activity1)
|
233
|
+
activity.send_async(:run_activity2)
|
234
|
+
activity.send_async(:run_activity3)
|
235
|
+
activity.run_activity4
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
task_list = "large_activity_task_list"
|
240
|
+
worker = WorkflowWorker.new(@swf.client, @domain, task_list, LargeWorkflow)
|
241
|
+
activity_worker = ActivityWorker.new(@swf.client, @domain, task_list, LargeActivity)
|
242
|
+
worker.register
|
243
|
+
activity_worker.register
|
244
|
+
|
245
|
+
workflow_type_name = "LargeWorkflow.start"
|
246
|
+
workflow_type, _ = @domain.workflow_types.page(:per_page => 1000).select {|x| x.name == workflow_type_name}
|
247
|
+
|
248
|
+
workflow_execution = workflow_type.start_execution
|
249
|
+
|
250
|
+
@forking_executor = ForkingExecutor.new(:max_workers => 5)
|
251
|
+
@forking_executor.execute { activity_worker.start }
|
252
|
+
|
253
|
+
@forking_executor.execute { worker.start }
|
254
|
+
|
255
|
+
wait_for_execution(workflow_execution)
|
256
|
+
|
257
|
+
@forking_executor.shutdown(1)
|
258
|
+
workflow_history = workflow_execution.events.map(&:event_type)
|
259
|
+
workflow_history.count("WorkflowExecutionCompleted").should == 1
|
260
|
+
workflow_history.count("ActivityTaskCompleted").should == 4
|
261
|
+
end
|
262
|
+
|
263
|
+
context "Github issue 57" do
|
264
|
+
|
265
|
+
before(:all) do
|
266
|
+
|
267
|
+
class GithubIssue57TestActivity
|
268
|
+
extend AWS::Flow::Activities
|
269
|
+
activity :not_retryable do
|
270
|
+
{
|
271
|
+
version: 1.0,
|
272
|
+
default_task_list: "github_57_activity_tasklist",
|
273
|
+
default_task_schedule_to_start_timeout: 30,
|
274
|
+
default_task_start_to_close_timeout: 30,
|
275
|
+
}
|
276
|
+
end
|
277
|
+
|
278
|
+
activity :retryable do
|
279
|
+
{
|
280
|
+
version: 1.0,
|
281
|
+
default_task_list: "github_57_activity_tasklist",
|
282
|
+
default_task_schedule_to_start_timeout: 30,
|
283
|
+
default_task_start_to_close_timeout: 30,
|
284
|
+
exponential_retry: {
|
285
|
+
maximum_attempts: 3,
|
286
|
+
},
|
287
|
+
}
|
288
|
+
end
|
289
|
+
|
290
|
+
def not_retryable
|
291
|
+
raise 'blah'
|
292
|
+
end
|
293
|
+
def retryable
|
294
|
+
raise 'asdf'
|
295
|
+
end
|
296
|
+
end
|
297
|
+
|
298
|
+
class GithubIssue57TestWorkflow
|
299
|
+
extend AWS::Flow::Workflows
|
300
|
+
|
301
|
+
workflow :test do
|
302
|
+
{
|
303
|
+
version: 1.0,
|
304
|
+
default_task_list: "github_57_workflow_tasklist",
|
305
|
+
default_execution_start_to_close_timeout: 300,
|
306
|
+
default_task_start_to_close_timeout: 30
|
307
|
+
}
|
308
|
+
end
|
309
|
+
|
310
|
+
activity_client(:client) { { from_class: "GithubIssue57TestActivity" } }
|
311
|
+
|
312
|
+
def test
|
313
|
+
client.not_retryable
|
314
|
+
end
|
315
|
+
end
|
316
|
+
end
|
317
|
+
|
318
|
+
|
319
|
+
it "ensures _options method returns an array of type ActivityType" do
|
320
|
+
GithubIssue57TestActivity._options.size == 2
|
321
|
+
GithubIssue57TestActivity._options.each { |x| x.should be_an ActivityType }
|
322
|
+
end
|
323
|
+
|
324
|
+
it "ensures the activity gets set with the right options" do
|
325
|
+
worker = WorkflowWorker.new(@domain.client, @domain, "github_57_workflow_tasklist", GithubIssue57TestWorkflow)
|
326
|
+
activity_worker = ActivityWorker.new(@domain.client, @domain, "github_57_activity_tasklist", GithubIssue57TestActivity)
|
327
|
+
worker.register
|
328
|
+
activity_worker.register
|
329
|
+
client = workflow_client(@domain.client, @domain) { { from_class: GithubIssue57TestWorkflow } }
|
330
|
+
|
331
|
+
workflow_execution = client.start_execution
|
332
|
+
|
333
|
+
@forking_executor = ForkingExecutor.new(:max_workers => 3)
|
334
|
+
@forking_executor.execute { worker.start }
|
335
|
+
@forking_executor.execute { activity_worker.start }
|
336
|
+
|
337
|
+
wait_for_execution(workflow_execution)
|
338
|
+
|
339
|
+
history_events = workflow_execution.events.map(&:event_type)
|
340
|
+
history_events.last.should == "WorkflowExecutionFailed"
|
341
|
+
history_events.count("ActivityTaskFailed").should == 1
|
342
|
+
end
|
343
|
+
end
|
344
|
+
end
|
345
|
+
|