aws-flow 1.0.6 → 1.0.7

Sign up to get free protection for your applications and to get access to all the features.
data/Gemfile CHANGED
@@ -5,4 +5,5 @@ gemspec
5
5
  group :test do
6
6
  gem "rspec", "1.3.0"
7
7
  gem "rake"
8
+ gem "factory_girl"
8
9
  end
@@ -77,7 +77,12 @@ module AWS
77
77
  ensure
78
78
  @instance._activity_execution_context = nil
79
79
  end
80
- return @converter.dump result
80
+ converted_result = @converter.dump(result)
81
+ # We are going to have to convert this object into a string to submit it, and that's where the 32k limit will be enforced, so it's valid to turn the object to a string and check the size of the result
82
+ if converted_result.to_s.size > 32768
83
+ return @converter.dump("The result was too large, so we could not serialize it correctly. You can find the full result in the ActivityTaskPoller logs."), result, true
84
+ end
85
+ return converted_result, result, false
81
86
  end
82
87
 
83
88
  end
@@ -85,10 +85,13 @@ module AWS
85
85
  result[:return_on_start] = true
86
86
  result
87
87
  end
88
- # Otherwise, it will expect an options object passed in, and will do things on that object. So make our new Proc do that, and add an option
88
+ # Otherwise, it will expect an options object passed in, and will do
89
+ # things on that object. So make our new Proc do that, and add an
90
+ # option
89
91
  else modified_options = Proc.new do |x|
90
92
  result = block.call(x)
91
- # We need to copy the hash to make sure that we don't mutate it
93
+ # Same as the above dup, we'll copy to avoid any possible mutation
94
+ # of inputted objects
92
95
  result = result.dup
93
96
  result.return_on_start = true
94
97
  result
@@ -72,9 +72,10 @@ module AWS
72
72
  end
73
73
  @service.respond_decision_task_completed(task_completed_request)
74
74
  rescue AWS::SimpleWorkflow::Errors::UnknownResourceFault => e
75
- # Log stuff
76
75
  @logger.debug "Error in the poller, #{e}"
77
76
  @logger.debug "The error class in #{e.class}"
77
+ rescue Exception => e
78
+ @logger.debug "Error in the poller, #{e}"
78
79
  end
79
80
  end
80
81
  end
@@ -97,16 +98,18 @@ module AWS
97
98
  context = ActivityExecutionContext.new(@service, @domain, task)
98
99
  activity_implementation = @activity_definition_map[activity_type]
99
100
  raise "This activity worker was told to work on activity type #{activity_type.name}, but this activity worker only knows how to work on #{@activity_definition_map.keys.map(&:name).join' '}" unless activity_implementation
100
- output = activity_implementation.execute(task.input, context)
101
+ output, original_result, too_large = activity_implementation.execute(task.input, context)
101
102
  @logger.debug "Responding on task_token #{task.task_token} for task #{task}"
102
- if output.length > 32768
103
- output = output.slice(0..32767)
104
- respond_activity_task_failed_with_retry(task.task_token, "You cannot send a response with a result greater thank 32768. Please reduce the response size. The first part of the output is included in the details field.", output )
103
+ if too_large
104
+ @logger.warn "The output of this activity was too large (greater than 2^15), and therefore aws-flow could not return it to SWF. aws-flow is now attempting to mark this activity as failed. For reference, the result was #{original_result}"
105
+ respond_activity_task_failed_with_retry(task.task_token, "An activity cannot send a response with a result larger than 32768 characters. Please reduce the response size. A truncated prefix output is included in the details field.", output)
105
106
  elsif ! activity_implementation.execution_options.manual_completion
106
107
  @service.respond_activity_task_completed(:task_token => task.task_token, :result => output)
107
108
  end
108
109
  rescue ActivityFailureException => e
110
+ @logger.debug "The activity failed, with original output of #{original_result} and dataconverted result of #{output}. aws-flow will now attempt to fail it."
109
111
  respond_activity_task_failed_with_retry(task.task_token, e.message, e.details)
112
+
110
113
  end
111
114
  #TODO all the completion stuffs
112
115
  end
@@ -136,7 +139,10 @@ module AWS
136
139
  end
137
140
 
138
141
  def process_single_task(task)
139
- @service = AWS::SimpleWorkflow.new.client.with_http_handler(AWS::Core::Http::NetHttpHandler.new(AWS.config.to_h))
142
+ previous_config = @service.config.to_h
143
+ previous_config.delete(:http_handler)
144
+ @service = AWS::SimpleWorkflow.new(previous_config).client
145
+ @service = @service.with_http_handler(AWS::Core::Http::NetHttpHandler.new(previous_config))
140
146
  begin
141
147
  begin
142
148
  execute(task)
@@ -39,7 +39,6 @@ module AWS
39
39
  future
40
40
  end
41
41
 
42
-
43
42
  # @!visibility private
44
43
  def self.merge_all_options(*args)
45
44
  args.compact!
@@ -16,7 +16,7 @@
16
16
  module AWS
17
17
  module Flow
18
18
  def self.version
19
- "1.0.6"
19
+ "1.0.7"
20
20
  end
21
21
  end
22
22
  end
@@ -42,6 +42,18 @@ module AWS
42
42
  @task_list = task_list_to_poll
43
43
  @options = Utilities::interpret_block_for_options(WorkerOptions, block)
44
44
  args.each { |klass_or_instance| add_implementation(klass_or_instance) } if args
45
+ @shutting_down = false
46
+ %w{ TERM INT }.each do |signal|
47
+ Signal.trap(signal) do
48
+ if @shutting_down
49
+ @executor.shutdown(0) if @executor
50
+ Kernel.exit! 1
51
+ else
52
+ @shutting_down = true
53
+ @shutdown_first_time_function.call if @shutdown_first_time_function
54
+ end
55
+ end
56
+ end
45
57
  end
46
58
 
47
59
  # @!visibility private
@@ -113,6 +125,8 @@ module AWS
113
125
  @workflow_definition_map = {}
114
126
  @workflow_type_options = []
115
127
  super(service, domain, task_list, *args)
128
+
129
+
116
130
  end
117
131
 
118
132
  def set_workflow_implementation_types(workflow_implementation_types)
@@ -196,6 +210,7 @@ module AWS
196
210
  def run_once(should_register = false, poller = nil)
197
211
  register if should_register
198
212
  poller = WorkflowTaskPoller.new(@service, @domain, DecisionTaskHandler.new(@workflow_definition_map, @options), @task_list, @options) if poller.nil?
213
+ Kernel.exit if @shutting_down
199
214
  poller.poll_and_process_single_task
200
215
  end
201
216
  end
@@ -235,22 +250,11 @@ module AWS
235
250
  max_workers = @options.execution_workers if @options
236
251
  max_workers = 20 if (max_workers.nil? || max_workers.zero?)
237
252
  @executor = ForkingExecutor.new(:max_workers => max_workers, :logger => @logger)
238
- super(service, domain, task_list, *args)
239
-
240
- @shutting_down = false
241
- %w{ TERM INT }.each do |signal|
242
- Signal.trap(signal) do
243
- if @shutting_down
244
- @executor.shutdown 0
245
- Kernel.exit! 1
246
- else
247
- @shutting_down = true
248
- @executor.shutdown Float::INFINITY
249
- Kernel.exit
250
- end
251
- end
253
+ @shutdown_first_time_function = lambda do
254
+ @executor.shutdown Float::INFINITY
255
+ Kernel.exit
252
256
  end
253
-
257
+ super(service, domain, task_list, *args)
254
258
  end
255
259
 
256
260
  # Adds an Activity implementation to this ActivityWorker.
@@ -60,6 +60,7 @@ class FakeDomain
60
60
  def page; FakePage.new(@workflow_type_object); end
61
61
  def workflow_executions; FakeWorkflowExecutionCollecton.new; end
62
62
  def name; "fake_domain"; end
63
+
63
64
  end
64
65
 
65
66
 
@@ -193,11 +194,11 @@ describe ActivityDefinition do
193
194
  end
194
195
  it "ensures that an activity definition can handle one argument" do
195
196
  activity_definition = ActivityDefinition.new(MyActivity.new, :test_one_argument, nil , nil, TrivialConverter.new)
196
- activity_definition.execute(5, nil).should == 5
197
+ activity_definition.execute(5, nil).first.should == 5
197
198
  end
198
199
  it "ensures that you can get the activity context " do
199
200
  activity_definition = ActivityDefinition.new(MyActivity.new, :test_getting_context, nil , nil, TrivialConverter.new)
200
- (activity_definition.execute(nil, ActivityExecutionContext.new(nil, nil, nil)).is_a? ActivityExecutionContext).should == true
201
+ (activity_definition.execute(nil, ActivityExecutionContext.new(nil, nil, nil)).first.is_a? ActivityExecutionContext).should == true
201
202
  end
202
203
  it "ensures that the activity context gets unset after the execute" do
203
204
  activity_definition = ActivityDefinition.new(MyActivity.new, :test_getting_context, nil , nil, TrivialConverter.new)
@@ -210,11 +211,12 @@ describe ActivityDefinition do
210
211
  end
211
212
  it "ensures that an activity definition can handle multiple arguments" do
212
213
  activity_definition = ActivityDefinition.new(MyActivity.new, :test_three_arguments, nil , nil, TrivialConverter.new)
213
- activity_definition.execute([1,2,3], nil).should == 6
214
+
215
+ activity_definition.execute([1,2,3], nil).first.should == 6
214
216
  end
215
217
  it "ensures that an activity definition can handle no arguments" do
216
218
  activity_definition = ActivityDefinition.new(MyActivity.new, :test_no_arguments, nil , nil, TrivialConverter.new)
217
- activity_definition.execute(nil, nil).should == :no_arguments
219
+ activity_definition.execute(nil, nil).first.should == :no_arguments
218
220
  end
219
221
  end
220
222
 
@@ -232,6 +234,7 @@ describe WorkflowDefinitionFactory do
232
234
  def multiple_arguments(arg1, arg2, arg3)
233
235
  arg3
234
236
  end
237
+
235
238
  end
236
239
  class WorkflowDefinition
237
240
  attr_accessor :decision_helper, :workflow_method, :converter
@@ -272,7 +275,7 @@ describe WorkflowDefinitionFactory do
272
275
  end
273
276
  end
274
277
  end
275
- p
278
+
276
279
  describe ForkingExecutor do
277
280
  it "makes sure that forking executors basic execute works" do
278
281
  test_file_name = "ForkingExecutorTestFile"
@@ -1310,7 +1313,6 @@ describe "Misc tests" do
1310
1313
  File.unlink(test_file_name)
1311
1314
  end
1312
1315
 
1313
-
1314
1316
  it "ensures that using send_async doesn't mutate the original hash" do
1315
1317
  class GenericClientTest < GenericClient
1316
1318
  def call_options(*args, &options)
@@ -1370,7 +1372,18 @@ describe FlowConstants do
1370
1372
  end
1371
1373
 
1372
1374
  end
1375
+ class TestWorkflow
1376
+ extend Workflows
1377
+ workflow :entry_point do
1378
+ {
1379
+ :execution_start_to_close_timeout => 30, :version => "1"
1380
+ }
1381
+ end
1382
+ def entry_point
1373
1383
 
1384
+ end
1385
+
1386
+ end
1374
1387
  class TestActivity
1375
1388
  extend Activity
1376
1389
 
@@ -1399,41 +1412,106 @@ class TestActivityWorker < ActivityWorker
1399
1412
  end
1400
1413
  end
1401
1414
 
1415
+ class FakeTaskPoller < WorkflowTaskPoller
1416
+ def get_decision_tasks
1417
+ nil
1418
+ end
1419
+ end
1420
+ def dumb_fib(n)
1421
+ n < 1 ? 1 : dumb_fib(n - 1) + dumb_fib(n - 2)
1422
+ end
1423
+ describe WorkflowWorker do
1424
+ it "will test whether WorkflowWorker shuts down cleanly when an interrupt is received" do
1425
+ task_list = "TestWorkflow_tasklist"
1426
+ service = FakeServiceClient.new
1427
+ workflow_type_object = double("workflow_type", :name => "TestWorkflow.entry_point", :start_execution => "" )
1428
+ domain = FakeDomain.new(workflow_type_object)
1429
+ workflow_worker = WorkflowWorker.new(service, domain, task_list)
1430
+ workflow_worker.add_workflow_implementation(TestWorkflow)
1431
+ pid = fork do
1432
+ loop do
1433
+ workflow_worker.run_once(true, FakeTaskPoller.new(service, domain, nil, task_list, nil))
1434
+ end
1435
+ end
1436
+ # Send an interrupt to the child process
1437
+ Process.kill("INT", pid)
1438
+ # Adding a sleep to let things get setup correctly (not ideal but going with
1439
+ # this for now)
1440
+ sleep 5
1441
+ return_pid, status = Process.wait2(pid, Process::WNOHANG)
1442
+ Process.kill("KILL", pid) if return_pid.nil?
1443
+ return_pid.should_not be nil
1444
+ status.success?.should be_true
1445
+ end
1446
+
1447
+ it "will test whether WorkflowWorker dies cleanly when two interrupts are received" do
1448
+ class FakeTaskPoller
1449
+ def poll_and_process_single_task
1450
+ dumb_fib(5000)
1451
+ end
1452
+ end
1453
+ task_list = "TestWorkflow_tasklist"
1454
+ service = FakeServiceClient.new
1455
+ workflow_type_object = double("workflow_type", :name => "TestWorkflow.entry_point", :start_execution => "" )
1456
+ domain = FakeDomain.new(workflow_type_object)
1457
+ workflow_worker = WorkflowWorker.new(service, domain, task_list)
1458
+ workflow_worker.add_workflow_implementation(TestWorkflow)
1459
+ pid = fork do
1460
+ loop do
1461
+ workflow_worker.run_once(true, FakeTaskPoller.new(service, domain, nil, task_list, nil))
1462
+ end
1463
+ end
1464
+ # Send an interrupt to the child process
1465
+ sleep 3
1466
+ 2.times { Process.kill("INT", pid); sleep 2 }
1467
+ return_pid, status = Process.wait2(pid, Process::WNOHANG)
1468
+
1469
+ Process.kill("KILL", pid) if return_pid.nil?
1470
+ return_pid.should_not be nil
1471
+ status.success?.should be_false
1472
+ end
1473
+
1474
+ end
1402
1475
  describe ActivityWorker do
1403
1476
 
1404
- # it "will test whether the ActivityWorker shuts down cleanly when an interrupt is received" do
1405
-
1406
- # task_list = "TestWorkflow_tasklist"
1407
- # service = FakeServiceClient.new
1408
- # workflow_type_object = double("workflow_type", :name => "TestWorkflow.entry_point", :start_execution => "" )
1409
- # domain = FakeDomain.new(workflow_type_object)
1410
- # forking_executor = ForkingExecutor.new
1411
- # activity_worker = TestActivityWorker.new(service, domain, task_list, forking_executor) { {:logger => FakeLogger.new} }
1412
-
1413
- # activity_worker.add_activities_implementation(TestActivity)
1414
- # # Starts the activity worker in a forked process. Also, attaches an at_exit
1415
- # # handler to the process. When the process exits, the handler checks whether
1416
- # # the executor's internal is_shutdown variable is set correctly or not.
1417
- # pid = fork do
1418
- # at_exit {
1419
- # activity_worker.executor.is_shutdown.should == true
1420
- # }
1421
- # activity_worker.start true
1422
- # end
1423
- # # Adding a sleep to let things get setup correctly (not ideal but going with
1424
- # # this for now)
1425
- # #sleep 1
1426
- # # Send an interrupt to the child process
1427
- # Process.kill("INT", pid)
1428
- # status = Process.waitall
1429
- # status[0][1].success?.should be_true
1430
- # end
1477
+ class FakeDomain
1478
+ def activity_tasks
1479
+ sleep 30
1480
+ end
1481
+ end
1482
+ it "will test whether the ActivityWorker shuts down cleanly when an interrupt is received" do
1483
+
1484
+ task_list = "TestWorkflow_tasklist"
1485
+ service = FakeServiceClient.new
1486
+ workflow_type_object = double("workflow_type", :name => "TestWorkflow.entry_point", :start_execution => "" )
1487
+ domain = FakeDomain.new(workflow_type_object)
1488
+ forking_executor = ForkingExecutor.new
1489
+ activity_worker = TestActivityWorker.new(service, domain, task_list, forking_executor) { {:logger => FakeLogger.new} }
1490
+ activity_worker.add_activities_implementation(TestActivity)
1491
+ # Starts the activity worker in a forked process. Also, attaches an at_exit
1492
+ # handler to the process. When the process exits, the handler checks whether
1493
+ # the executor's internal is_shutdown variable is set correctly or not.
1494
+ pid = fork do
1495
+ at_exit {
1496
+ activity_worker.executor.is_shutdown.should == true
1497
+ }
1498
+ activity_worker.start true
1499
+ end
1500
+ # Send an interrupt to the child process
1501
+ Process.kill("INT", pid)
1502
+ # Adding a sleep to let things get setup correctly (not ideal but going with
1503
+ # this for now)
1504
+ sleep 5
1505
+ return_pid, status = Process.wait2(pid, Process::WNOHANG)
1506
+ Process.kill("KILL", pid) if return_pid.nil?
1507
+ return_pid.should_not be nil
1508
+
1509
+ status.success?.should be_true
1510
+ end
1431
1511
 
1432
1512
  # This method will take a long time to run, allowing us to test our shutdown
1433
1513
  # scenarios
1434
- def dumb_fib(n)
1435
- n < 1 ? 1 : dumb_fib(n - 1) + dumb_fib(n - 2)
1436
- end
1514
+
1437
1515
 
1438
1516
  it "will test whether the ActivityWorker shuts down immediately if two or more interrupts are received" do
1439
1517
  task_list = "TestWorkflow_tasklist"
@@ -13,10 +13,15 @@
13
13
  # permissions and limitations under the License.
14
14
  ##
15
15
 
16
+
16
17
  require 'aws/flow'
17
18
  include AWS::Flow::Core
18
19
 
19
20
 
21
+ require 'factory_girl'
22
+ require 'aws/flow'
23
+ include AWS::Flow::Core
24
+
20
25
  class FlowFactory
21
26
 
22
27
  attr_accessor :async_scope
@@ -42,33 +47,9 @@ class FlowFactory
42
47
  return @async_scope
43
48
  end
44
49
 
45
- end
46
-
47
- class WorkflowGenerator
48
- class << self
49
- def generate_workflow(domain, options = {})
50
-
51
- name = options[:name] || "default_name"
52
- version = options[:version] || "1"
53
- task_list = options[:task_list] || "default_task_list"
54
- child_policy = options[:child_policy] || :request_cancel
55
- task_start_to_close = options[:task_start_to_close] || 3600
56
- default_execution_timeout = options[:default_execution_timeout] || 24 * 3600
57
-
58
-
59
- target_workflow = domain.workflow_types.page.select { |x| x.name == name}
60
- if target_workflow.length == 0
61
- workflow_type = domain.workflow_types.create(name, version,
62
- :default_task_list => task_list,
63
- :default_child_policy => child_policy,
64
- :default_task_start_to_close_timeout => task_start_to_close,
65
- :default_execution_start_to_close_timeout => default_execution_timeout)
66
- else
67
- workflow_type = target_workflow.first
68
- end
50
+ def generate_daemon_task(options = {})
51
+ scope = generate_scope
52
+ task = DaemonTask.new
69
53
 
70
- return workflow_type
71
- end
72
54
  end
73
-
74
55
  end
@@ -43,17 +43,23 @@ def setup_swf
43
43
  File.open(file_name, 'w+') {|f| f.write(last_run)}
44
44
  current_date = Time.now.strftime("%d-%m-%Y")
45
45
  config_file = File.open('credentials.cfg') { |f| f.read }
46
- config = YAML.load(config_file).first
47
- AWS.config(config)
48
- swf = AWS::SimpleWorkflow.new
46
+ if config_file.include? ":Primary"
47
+ yaml_config = YAML.load(config_file)
48
+ swf = AWS::SimpleWorkflow.new(yaml_config[:Primary])
49
+ secondary_swf = AWS::SimpleWorkflow.new(yaml_config[:Secondary])
50
+ else
51
+ config = YAML.load(config_file).first
52
+ AWS.config(config)
53
+ swf = AWS::SimpleWorkflow.new
54
+ secondary_swf = nil
55
+ end
49
56
  $RUBYFLOW_DECIDER_DOMAIN = "rubyflow_decider_domain_#{current_date}-#{last_run}"
50
57
  begin
51
58
  domain = swf.domains.create($RUBYFLOW_DECIDER_DOMAIN, "10")
52
59
  rescue AWS::SimpleWorkflow::Errors::DomainAlreadyExistsFault => e
53
60
  domain = swf.domains[$RUBYFLOW_DECIDER_DOMAIN]
54
61
  end
55
-
56
- return swf, domain, $RUBYFLOW_DECIDER_DOMAIN
62
+ return swf, domain, $RUBYFLOW_DECIDER_DOMAIN, secondary_swf
57
63
  end
58
64
 
59
65
 
@@ -76,8 +82,8 @@ describe "RubyFlowDecider" do
76
82
  version "1"
77
83
  # TODO more of the stuff from the proposal
78
84
  end
79
- @swf, @domain, $RUBYFLOW_DECIDER_DOMAIN = setup_swf
80
- $swf, $domain = @swf, @domain
85
+ @swf, @domain, $RUBYFLOW_DECIDER_DOMAIN, @swf_secondary = setup_swf
86
+ $swf, $domain, $swf_secondary = @swf, @domain, @swf_secondary
81
87
  # If there are any outstanding decision tasks before we start the test, that
82
88
  # could really mess things up, and make the tests non-idempotent. So lets
83
89
  # clear those out
@@ -530,7 +536,7 @@ describe "RubyFlowDecider" do
530
536
  # Make sure we return something that's over 32k. Note this won't
531
537
  # necessarily work with all converters, as it's pretty trivially
532
538
  # compressible
533
- return "a" * 33000
539
+ return ":" + "a" * 33000
534
540
  end
535
541
  end
536
542
  workflow_execution = @my_workflow_client.start_execution
@@ -539,11 +545,61 @@ describe "RubyFlowDecider" do
539
545
  @worker.run_once
540
546
  history_events = workflow_execution.events.map(&:event_type)
541
547
  # Previously, it would time out, as the failure would include the original
542
- # large output that killed the completion call. Thus, we need to check that
543
- # we fail the ActivityTask correctly.
548
+ # large output that killed the completion and failure call. Thus, we need to
549
+ # check that we fail the ActivityTask.
544
550
  history_events.should include "ActivityTaskFailed"
551
+
552
+ workflow_execution.events.to_a.last.attributes.details.should_not =~ /Psych/
545
553
  history_events.last.should == "WorkflowExecutionFailed"
554
+ end
555
+
556
+ it "ensures that activities can be processed with different configurations" do
557
+ class TwoConfigActivity
558
+ extend Activities
559
+ activity :run_activity1 do
560
+ {
561
+ :default_task_heartbeat_timeout => "3600",
562
+ :default_task_list => "TwoConfigTaskList",
563
+ :default_task_schedule_to_start_timeout => 120,
564
+ :default_task_start_to_close_timeout => 120,
565
+ :version => "1",
566
+ }
567
+ end
568
+ def run_activity1
569
+ end
570
+ end
546
571
 
572
+ class TwoConfigWorkflow
573
+ extend Workflows
574
+ activity_client(:activity) { { :from_class => TwoConfigActivity }}
575
+ workflow :entry_point do
576
+ {
577
+ :version => 1,
578
+ :default_execution_start_to_close_timeout => 30,
579
+ :default_child_policy => "request_cancel",
580
+ :default_task_list => "TwoConfigTaskList"
581
+ }
582
+ end
583
+ def entry_point
584
+ activity.run_activity1
585
+ activity.run_activity1 { {:task_list => "other_config_task_list"} }
586
+ end
587
+ end
588
+ worker = WorkflowWorker.new(@swf.client, @domain, "TwoConfigTaskList", TwoConfigWorkflow)
589
+ activity_worker = ActivityWorker.new(@swf.client, @domain, "TwoConfigTaskList", TwoConfigActivity) {{ :use_forking => false }}
590
+ swf = AWS::SimpleWorkflow.new(access_key_id: "AWS_SWF_KEY", secret_access_key: "AWS_SWF_SECRET")
591
+ activity_worker_different_config = ActivityWorker.new($swf_secondary.client, @domain, "other_config_task_list", TwoConfigActivity) {{ :use_forking => false }}
592
+ my_workflow_client = workflow_client(@swf.client, @domain) {{:from_class => TwoConfigWorkflow}}
593
+
594
+ worker.register
595
+ activity_worker.register
596
+ workflow_execution = my_workflow_client.start_execution
597
+ worker.run_once
598
+ activity_worker.run_once
599
+ worker.run_once
600
+ activity_worker_different_config.run_once
601
+ worker.run_once
602
+ workflow_execution.events.map(&:event_type).last == "WorkflowExecutionCompleted"
547
603
  end
548
604
 
549
605
  it "ensures that not filling in details/reason for activity_task_failed is handled correctly" do
@@ -2288,7 +2344,7 @@ describe "RubyFlowDecider" do
2288
2344
  options.task_list = "timeout_test"
2289
2345
  end
2290
2346
  my_workflow_client = my_workflow_factory.get_client
2291
- num_tests = 30
2347
+ num_tests = 15
2292
2348
  workflow_executions = []
2293
2349
  1.upto(num_tests) { |i| workflow_executions << my_workflow_client.entry_point }
2294
2350
  forking_executor = ForkingExecutor.new(:max_workers => 3)
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-flow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.6
4
+ version: 1.0.7
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-12-13 00:00:00.000000000 Z
12
+ date: 2013-12-23 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: aws-sdk