aws-flow 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/aws-flow.gemspec +4 -2
- data/bin/aws-flow-ruby +5 -0
- data/lib/aws/decider.rb +1 -1
- data/lib/aws/decider/version.rb +1 -1
- data/lib/aws/runner.rb +300 -0
- data/spec/aws/integration/runner_integration_spec.rb +181 -0
- data/spec/aws/unit/runner_unit_spec.rb +536 -0
- metadata +18 -10
- checksums.yaml +0 -15
data/aws-flow.gemspec
CHANGED
@@ -4,9 +4,11 @@ Gem::Specification.new do |s|
|
|
4
4
|
s.name = 'aws-flow'
|
5
5
|
s.version = AWS::Flow::version
|
6
6
|
s.date = Time.now
|
7
|
-
s.summary = "AWS Flow
|
7
|
+
s.summary = "AWS Flow Framework for Ruby"
|
8
8
|
s.description = "Library to provide the AWS Flow Framework for Ruby"
|
9
|
-
s.authors = "Michael Steger"
|
9
|
+
s.authors = "Michael Steger, Paritosh Mohan, Jacques Thomas"
|
10
|
+
s.executables = ["aws-flow-ruby"]
|
11
|
+
s.homepage = "https://aws.amazon.com/swf/details/flow/"
|
10
12
|
s.email = ''
|
11
13
|
s.files = `git ls-files`.split("\n").reject {|file| file =~ /aws-flow-core/}
|
12
14
|
s.require_paths << "lib/aws/"
|
data/bin/aws-flow-ruby
ADDED
data/lib/aws/decider.rb
CHANGED
@@ -20,7 +20,7 @@ require 'aws-sdk'
|
|
20
20
|
require 'securerandom'
|
21
21
|
|
22
22
|
# Setting the user-agent as ruby-flow for all calls to the service
|
23
|
-
AWS.config(:user_agent_prefix => "ruby-flow")
|
23
|
+
AWS.config(:user_agent_prefix => "ruby-flow") unless AWS.config.user_agent_prefix
|
24
24
|
|
25
25
|
require "aws/decider/utilities"
|
26
26
|
require "aws/decider/worker"
|
data/lib/aws/decider/version.rb
CHANGED
data/lib/aws/runner.rb
ADDED
@@ -0,0 +1,300 @@
|
|
1
|
+
module AWS
|
2
|
+
module Flow
|
3
|
+
module Runner
|
4
|
+
|
5
|
+
# import the necessary gems to run Ruby Flow code
|
6
|
+
require 'aws/decider'
|
7
|
+
include AWS::Flow
|
8
|
+
require 'json'
|
9
|
+
require 'optparse'
|
10
|
+
require 'socket'
|
11
|
+
|
12
|
+
|
13
|
+
##
|
14
|
+
## Helper to start workflow and activity workers according to a predefined
|
15
|
+
## JSON file format that decribes where to find the required elements
|
16
|
+
##
|
17
|
+
|
18
|
+
# Example of the format:
|
19
|
+
# {
|
20
|
+
# "domains": [
|
21
|
+
# {
|
22
|
+
# "name": <name_of_the_domain>,
|
23
|
+
# "retention_in_days": <days>
|
24
|
+
# }
|
25
|
+
# //, ... can add more
|
26
|
+
# ],
|
27
|
+
# "activity_workers": [
|
28
|
+
#
|
29
|
+
# {
|
30
|
+
# "domain": <name_of_the_domain>,
|
31
|
+
# "task_list": <name_of_the_task_list>,
|
32
|
+
# "activity_classes": [ <name_of_class_containing_the_activities_to_be_worked_on> ],
|
33
|
+
# "number_of_workers": <number_of_activity_workers_to_spawn>,
|
34
|
+
# "number_of_forks_per_worker": <number_of_forked_workers>
|
35
|
+
# }
|
36
|
+
# //, ... can add more
|
37
|
+
# ],
|
38
|
+
# "workflow_workers": [
|
39
|
+
# {
|
40
|
+
# "domain": <name_of_the_domain>,
|
41
|
+
# "task_list": <name_of_the_task_list>,
|
42
|
+
# "workflow_classes": [ <name_of_class_containing_the_workflows_to_be_worked_on> ],
|
43
|
+
# "number_of_workers": <number_of_workflow_workers_to_spawn>
|
44
|
+
# }
|
45
|
+
# //, ... can add more
|
46
|
+
# ],
|
47
|
+
# // Configure which files are 'require'd in order to load the classes
|
48
|
+
# "workflow_paths": [
|
49
|
+
# "lib/workflow.rb"
|
50
|
+
# ],
|
51
|
+
# "activity_paths": [
|
52
|
+
# "lib/activity.rb"
|
53
|
+
# ],
|
54
|
+
# // This is used by the opsworks recipe
|
55
|
+
# "user_agent_prefix" : "ruby-flow-opsworks"
|
56
|
+
# }
|
57
|
+
|
58
|
+
|
59
|
+
# registers the domains if they are not
|
60
|
+
def self.setup_domains(json_config)
|
61
|
+
|
62
|
+
swf = create_service_client(json_config)
|
63
|
+
|
64
|
+
json_config['domains'].each do |d|
|
65
|
+
begin
|
66
|
+
swf.client.describe_domain :name => d['name']
|
67
|
+
rescue
|
68
|
+
swf.client.register_domain( { :name => d['name'],
|
69
|
+
:workflow_execution_retention_period_in_days => d['retention_in_days'].to_s })
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
def self.set_process_name(name)
|
75
|
+
$0 = name
|
76
|
+
end
|
77
|
+
|
78
|
+
# searches the object space for all subclasses of clazz
|
79
|
+
def self.all_subclasses(clazz)
|
80
|
+
ObjectSpace.each_object(Class).select { |klass| klass.is_a? clazz }
|
81
|
+
end
|
82
|
+
|
83
|
+
# used to extract and validate the 'activity_classes'
|
84
|
+
# and 'workflow_classes' fields from the config, or autodiscover
|
85
|
+
# subclasses in the ObjectSpace
|
86
|
+
def self.get_classes(json_fragment, what)
|
87
|
+
classes = json_fragment[what[:config_key]]
|
88
|
+
if classes.nil? || classes.empty? then
|
89
|
+
# discover the classes
|
90
|
+
classes = all_subclasses( what[:clazz] )
|
91
|
+
else
|
92
|
+
# constantize the class names we just read from the config
|
93
|
+
classes.map! { |c| Object.const_get(c) }
|
94
|
+
end
|
95
|
+
if classes.nil? || classes.empty? then
|
96
|
+
raise ArgumentError.new "need at least one implementation class"
|
97
|
+
end
|
98
|
+
classes
|
99
|
+
end
|
100
|
+
|
101
|
+
# used to add implementations to workers; see get_classes
|
102
|
+
def self.add_implementations(worker, json_fragment, what)
|
103
|
+
classes = get_classes(json_fragment, what)
|
104
|
+
classes.each { |c| worker.add_implementation(c) }
|
105
|
+
end
|
106
|
+
|
107
|
+
def self.spawn_and_start_workers(json_fragment, process_name, worker)
|
108
|
+
workers = []
|
109
|
+
json_fragment['number_of_workers'].times do
|
110
|
+
workers << fork do
|
111
|
+
set_process_name(process_name)
|
112
|
+
worker.start()
|
113
|
+
end
|
114
|
+
end
|
115
|
+
workers
|
116
|
+
end
|
117
|
+
|
118
|
+
# used to support host-specific task lists
|
119
|
+
# when the string "|hostname|" is found in the task list
|
120
|
+
# it is replaced by the host name
|
121
|
+
def self.expand_task_list(value)
|
122
|
+
raise ArgumentError.new unless value
|
123
|
+
ret = value
|
124
|
+
ret.gsub!("|hostname|", Socket.gethostname)
|
125
|
+
ret
|
126
|
+
end
|
127
|
+
|
128
|
+
def self.is_empty_field?(json_fragment, field_name)
|
129
|
+
field = json_fragment[field_name]
|
130
|
+
field.nil? || field.empty?
|
131
|
+
end
|
132
|
+
|
133
|
+
# This is used to issue the necessary "require" commands to
|
134
|
+
# load the code needed to run a module
|
135
|
+
#
|
136
|
+
# config_path: the path where the config file is, to be able to
|
137
|
+
# resolve relative references
|
138
|
+
# json_config: the content of the config
|
139
|
+
# what: what should loaded. This is a hash expected to contain two keys:
|
140
|
+
# - :default_file : the file to load unless a specific list is provided
|
141
|
+
# - :config_key : the key of the config element which can contain a
|
142
|
+
# specific list of files to load
|
143
|
+
def self.load_files(config_path, json_config, what)
|
144
|
+
if is_empty_field?(json_config, what[:config_key]) then
|
145
|
+
file = File.join(File.dirname(config_path), what[:default_file])
|
146
|
+
require file if File.exists? file
|
147
|
+
else
|
148
|
+
json_config[what[:config_key]].each { |file| require file if File.exists? file }
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
def self.start_activity_workers(swf, config_path, json_config)
|
153
|
+
workers = []
|
154
|
+
# load all classes for the activities
|
155
|
+
load_files(config_path, json_config, {:config_key => 'activity_paths',
|
156
|
+
:default_file => File.join('flow', 'activities.rb')})
|
157
|
+
|
158
|
+
# TODO: logger
|
159
|
+
# start the workers for each spec
|
160
|
+
json_config['activity_workers'].each do |w|
|
161
|
+
fork_count = w['number_of_forks_per_worker'] || 1
|
162
|
+
domain = AWS::SimpleWorkflow::Domain.new( w['domain'] )
|
163
|
+
task_list = expand_task_list(w['task_list'])
|
164
|
+
|
165
|
+
# create a worker
|
166
|
+
worker = ActivityWorker.new(swf.client, domain, task_list, *w['activities']) {{ :max_workers => fork_count }}
|
167
|
+
add_implementations(worker, w, {:config_key => 'activity_classes',
|
168
|
+
:clazz => AWS::Flow::Activities})
|
169
|
+
|
170
|
+
# start as many workers as desired in child processes
|
171
|
+
workers << spawn_and_start_workers(w, "activity-worker", worker)
|
172
|
+
end
|
173
|
+
|
174
|
+
return workers
|
175
|
+
end
|
176
|
+
|
177
|
+
def self.start_workflow_workers(swf, config_path, json_config)
|
178
|
+
workers = []
|
179
|
+
# load all the classes for the workflows
|
180
|
+
load_files(config_path, json_config, {:config_key => 'workflow_paths',
|
181
|
+
:default_file => File.join('flow', 'workflows.rb')})
|
182
|
+
|
183
|
+
# TODO: logger
|
184
|
+
# start the workers for each spec
|
185
|
+
json_config['workflow_workers'].each do |w|
|
186
|
+
domain = AWS::SimpleWorkflow::Domain.new( w['domain'] )
|
187
|
+
task_list = expand_task_list(w['task_list'])
|
188
|
+
|
189
|
+
# create a worker
|
190
|
+
worker = WorkflowWorker.new(swf.client, domain, task_list, *w['workflows'])
|
191
|
+
add_implementations(worker, w, {:config_key => 'workflow_classes',
|
192
|
+
:clazz => AWS::Flow::Workflows})
|
193
|
+
|
194
|
+
# start as many workers as desired in child processes
|
195
|
+
workers << spawn_and_start_workers(w, "workflow-worker", worker)
|
196
|
+
end
|
197
|
+
|
198
|
+
return workers
|
199
|
+
end
|
200
|
+
|
201
|
+
def self.create_service_client(json_config)
|
202
|
+
# set the UserAgent prefix for all clients
|
203
|
+
if json_config['user_agent_prefix'] then
|
204
|
+
AWS.config(:user_agent_prefix => json_config['user_agent_prefix'])
|
205
|
+
end
|
206
|
+
|
207
|
+
swf = AWS::SimpleWorkflow.new
|
208
|
+
end
|
209
|
+
|
210
|
+
#
|
211
|
+
# this will start all the workers and return an array of pids for the worker
|
212
|
+
# processes
|
213
|
+
#
|
214
|
+
def self.start_workers(config_path, json_config)
|
215
|
+
|
216
|
+
workers = []
|
217
|
+
|
218
|
+
swf = create_service_client(json_config)
|
219
|
+
|
220
|
+
workers << start_activity_workers(swf, config_path, json_config)
|
221
|
+
workers << start_workflow_workers(swf, config_path, json_config)
|
222
|
+
|
223
|
+
# needed to avoid returning nested arrays based on the calls above
|
224
|
+
workers.flatten!
|
225
|
+
|
226
|
+
end
|
227
|
+
|
228
|
+
# setup forwarding of signals to child processes, to facilitate and support
|
229
|
+
# orderly shutdown
|
230
|
+
def self.setup_signal_handling(workers)
|
231
|
+
Signal.trap("INT") { workers.each { |w| Process.kill("INT", w) } }
|
232
|
+
end
|
233
|
+
|
234
|
+
# TODO: use a logger
|
235
|
+
# this will wait until all the child workers have died
|
236
|
+
def self.wait_for_child_processes(workers)
|
237
|
+
until workers.empty?
|
238
|
+
puts "waiting on workers " + workers.to_s + " to complete"
|
239
|
+
dead_guys = Process.waitall
|
240
|
+
dead_guys.each { |pid, status| workers.delete(pid); puts pid.to_s + " exited" }
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
# this is used to extend the load path so that the 'require'
|
245
|
+
# of workflow and activity implementation files can succeed
|
246
|
+
# before adding the implementation classes to the workers
|
247
|
+
def self.add_dir_to_load_path(path)
|
248
|
+
raise ArgumentError.new("Invalid directory path: \"" + path.to_s + "\"") if not FileTest.directory? path
|
249
|
+
$LOAD_PATH.unshift path.to_s
|
250
|
+
end
|
251
|
+
|
252
|
+
#
|
253
|
+
# loads the configuration from a JSON file
|
254
|
+
#
|
255
|
+
def self.load_config_json(path)
|
256
|
+
raise ArgumentError.new("Invalid file path: \"" + path.to_s + "\"") if not File.file? path
|
257
|
+
config = JSON.parse(File.open(path) { |f| f.read })
|
258
|
+
end
|
259
|
+
|
260
|
+
|
261
|
+
def self.parse_command_line(argv = ARGV)
|
262
|
+
options = {}
|
263
|
+
optparse = OptionParser.new do |opts|
|
264
|
+
opts.on('-f', '--file JSON_CONFIG_FILE', "Mandatory JSON config file") do |f|
|
265
|
+
options[:file] = f
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
optparse.parse!(argv)
|
270
|
+
|
271
|
+
# file parameter is not optional
|
272
|
+
raise OptionParser::MissingArgument.new("file") if options[:file].nil?
|
273
|
+
|
274
|
+
return options
|
275
|
+
end
|
276
|
+
|
277
|
+
def self.main
|
278
|
+
options = parse_command_line
|
279
|
+
config_path = options[:file]
|
280
|
+
config = load_config_json( config_path )
|
281
|
+
add_dir_to_load_path( Pathname.new(config_path).dirname )
|
282
|
+
setup_domains(config)
|
283
|
+
workers = start_workers(config_path, config)
|
284
|
+
setup_signal_handling(workers)
|
285
|
+
|
286
|
+
# hang there until killed: this process is used to relay signals to children
|
287
|
+
# to support and facilitate an orderly shutdown
|
288
|
+
wait_for_child_processes(workers)
|
289
|
+
|
290
|
+
end
|
291
|
+
|
292
|
+
end
|
293
|
+
end
|
294
|
+
end
|
295
|
+
|
296
|
+
if __FILE__ == $0
|
297
|
+
AWS::Flow::Runner.main()
|
298
|
+
end
|
299
|
+
|
300
|
+
|
@@ -0,0 +1,181 @@
|
|
1
|
+
require 'runner'
|
2
|
+
require 'bundler/setup'
|
3
|
+
require 'aws/decider'
|
4
|
+
require 'logger'
|
5
|
+
require 'socket'
|
6
|
+
|
7
|
+
describe "Runner" do
|
8
|
+
|
9
|
+
# Copied from the utilities for the samples and recipes
|
10
|
+
module SharedUtils
|
11
|
+
|
12
|
+
def setup_domain(domain_name)
|
13
|
+
swf = AWS::SimpleWorkflow.new
|
14
|
+
domain = swf.domains[domain_name]
|
15
|
+
unless domain.exists?
|
16
|
+
swf.domains.create(domain_name, 10)
|
17
|
+
end
|
18
|
+
domain
|
19
|
+
end
|
20
|
+
|
21
|
+
def build_workflow_worker(domain, klass, task_list)
|
22
|
+
AWS::Flow::WorkflowWorker.new(domain.client, domain, task_list, klass)
|
23
|
+
end
|
24
|
+
|
25
|
+
def build_generic_activity_worker(domain, task_list)
|
26
|
+
AWS::Flow::ActivityWorker.new(domain.client, domain, task_list)
|
27
|
+
end
|
28
|
+
|
29
|
+
def build_activity_worker(domain, klass, task_list)
|
30
|
+
AWS::Flow::ActivityWorker.new(domain.client, domain, task_list, klass)
|
31
|
+
end
|
32
|
+
|
33
|
+
def build_workflow_client(domain, options_hash)
|
34
|
+
AWS::Flow::workflow_client(domain.client, domain) { options_hash }
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class PingUtils
|
39
|
+
include SharedUtils
|
40
|
+
|
41
|
+
WF_VERSION = "1.0"
|
42
|
+
ACTIVITY_VERSION = "1.0"
|
43
|
+
WF_TASKLIST = "workflow_tasklist"
|
44
|
+
ACTIVITY_TASKLIST = "activity_tasklist"
|
45
|
+
DOMAIN = "PingTest"
|
46
|
+
|
47
|
+
def initialize
|
48
|
+
@domain = setup_domain(DOMAIN)
|
49
|
+
end
|
50
|
+
|
51
|
+
def activity_worker
|
52
|
+
build_activity_worker(@domain, PingActivity, ACTIVITY_TASKLIST)
|
53
|
+
end
|
54
|
+
|
55
|
+
def workflow_worker
|
56
|
+
build_workflow_worker(@domain, PingWorkflow, WF_TASKLIST)
|
57
|
+
end
|
58
|
+
|
59
|
+
def workflow_client
|
60
|
+
build_workflow_client(@domain, from_class: "PingWorkflow")
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# PingActivity class defines a set of activities for the Ping sample.
|
65
|
+
class PingActivity
|
66
|
+
extend AWS::Flow::Activities
|
67
|
+
|
68
|
+
# The activity method is used to define activities. It accepts a list of names
|
69
|
+
# of activities and a block specifying registration options for those
|
70
|
+
# activities
|
71
|
+
activity :ping do
|
72
|
+
{
|
73
|
+
version: PingUtils::ACTIVITY_VERSION,
|
74
|
+
default_task_list: PingUtils::ACTIVITY_TASKLIST,
|
75
|
+
default_task_schedule_to_start_timeout: 30,
|
76
|
+
default_task_start_to_close_timeout: 30
|
77
|
+
}
|
78
|
+
end
|
79
|
+
|
80
|
+
# This activity will say hello when invoked by the workflow
|
81
|
+
def ping()
|
82
|
+
puts "Pong from #{Socket.gethostbyname(Socket.gethostname).first}"
|
83
|
+
"Pong from #{Socket.gethostbyname(Socket.gethostname).first}"
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
# PingWorkflow class defines the workflows for the Ping sample
|
88
|
+
class PingWorkflow
|
89
|
+
extend AWS::Flow::Workflows
|
90
|
+
|
91
|
+
workflow :ping do
|
92
|
+
{
|
93
|
+
version: PingUtils::WF_VERSION,
|
94
|
+
task_list: PingUtils::WF_TASKLIST,
|
95
|
+
execution_start_to_close_timeout: 30,
|
96
|
+
}
|
97
|
+
end
|
98
|
+
|
99
|
+
# Create an activity client using the activity_client method to schedule
|
100
|
+
# activities
|
101
|
+
activity_client(:client) { { from_class: "PingActivity" } }
|
102
|
+
|
103
|
+
# This is the entry point for the workflow
|
104
|
+
def ping()
|
105
|
+
# Use the activity client 'client' to invoke the say_hello activity
|
106
|
+
pong=client.ping()
|
107
|
+
"Got #{pong}"
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
describe "Sanity Check" do
|
112
|
+
|
113
|
+
it "makes sure credentials and region are in the execution environment" do
|
114
|
+
# note: this could be refactored with a map, but errors are easier to figure out this way
|
115
|
+
begin
|
116
|
+
ENV['AWS_ACCESS_KEY_ID'].should_not be_nil
|
117
|
+
ENV['AWS_SECRET_ACCESS_KEY'].should_not be_nil
|
118
|
+
ENV['AWS_REGION'].should_not be_nil
|
119
|
+
rescue RSpec::Expectations::ExpectationNotMetError
|
120
|
+
# FIXME: there ought to be a better way to pass a useful message to the user
|
121
|
+
puts "\tPlease see the getting started to set up the environment"
|
122
|
+
puts "\thttp://docs.aws.amazon.com/amazonswf/latest/awsrbflowguide/installing.html#installing-credentials"
|
123
|
+
raise RSpec::Expectations::ExpectationNotMetError
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
it "makes sure the credentials and region in the environment can be used to talk to SWF" do
|
128
|
+
swf = AWS::SimpleWorkflow.new
|
129
|
+
domains = swf.client.list_domains "registration_status" => "REGISTERED"
|
130
|
+
end
|
131
|
+
|
132
|
+
end
|
133
|
+
|
134
|
+
describe "Hello World" do
|
135
|
+
|
136
|
+
it "runs" do
|
137
|
+
|
138
|
+
runner_config = JSON.parse('{
|
139
|
+
"workflow_paths": [],
|
140
|
+
"workflow_workers": [
|
141
|
+
{
|
142
|
+
"domain": ' + "\"#{PingUtils::DOMAIN}\"" + ',
|
143
|
+
"task_list": ' + "\"#{PingUtils::WF_TASKLIST}\"" + ',
|
144
|
+
"workflow_classes": [ ' + "\"PingWorkflow\"" + ' ],
|
145
|
+
"number_of_workers": 1
|
146
|
+
}
|
147
|
+
],
|
148
|
+
"activity_paths": [],
|
149
|
+
"activity_workers": [
|
150
|
+
{
|
151
|
+
"domain": ' + "\"#{PingUtils::DOMAIN}\"" + ',
|
152
|
+
"task_list": ' + "\"#{PingUtils::ACTIVITY_TASKLIST}\"" + ',
|
153
|
+
"activity_classes": [ ' + "\"PingActivity\"" + ' ],
|
154
|
+
"number_of_forks_per_worker": 1,
|
155
|
+
"number_of_workers": 1
|
156
|
+
}
|
157
|
+
]
|
158
|
+
}')
|
159
|
+
|
160
|
+
# mock the load_files method to avoid having to create default files
|
161
|
+
AWS::Flow::Runner.stub(:load_files)
|
162
|
+
|
163
|
+
workers = AWS::Flow::Runner.start_workers("", runner_config)
|
164
|
+
|
165
|
+
utils = PingUtils.new
|
166
|
+
wf_client = utils.workflow_client
|
167
|
+
|
168
|
+
workflow_execution = wf_client.ping()
|
169
|
+
|
170
|
+
sleep 3 until [
|
171
|
+
"WorkflowExecutionCompleted",
|
172
|
+
"WorkflowExecutionTimedOut",
|
173
|
+
"WorkflowExecutionFailed"
|
174
|
+
].include? workflow_execution.events.to_a.last.event_type
|
175
|
+
|
176
|
+
# kill the workers
|
177
|
+
workers.each { |w| Process.kill("KILL", w) }
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
end
|
@@ -0,0 +1,536 @@
|
|
1
|
+
require 'runner'
|
2
|
+
require 'tempfile'
|
3
|
+
require 'socket'
|
4
|
+
require 'fileutils'
|
5
|
+
require_relative '../../spec_helper.rb'
|
6
|
+
|
7
|
+
describe "Runner" do
|
8
|
+
|
9
|
+
describe "Command line" do
|
10
|
+
it "makes sure that the JSON file must be provided on the command line" do
|
11
|
+
expect { AWS::Flow::Runner.parse_command_line([]) }.to raise_error( OptionParser::MissingArgument )
|
12
|
+
end
|
13
|
+
|
14
|
+
it "makes sure that the JSON file must be provided on the command line (switch must be followed by argument)" do
|
15
|
+
expect { AWS::Flow::Runner.parse_command_line(["-f"]) }.to raise_error( OptionParser::MissingArgument )
|
16
|
+
end
|
17
|
+
|
18
|
+
it "makes sure that the JSON file must be provided on the command line (switch must be followed by argument which is valid file; valid case)" do
|
19
|
+
file = Tempfile.new('foo')
|
20
|
+
begin
|
21
|
+
expect { AWS::Flow::Runner.parse_command_line(["-f", file.path]) }.not_to raise_error
|
22
|
+
ensure
|
23
|
+
file.unlink
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
28
|
+
|
29
|
+
describe "JSON loading" do
|
30
|
+
|
31
|
+
it "makes sure that the JSON file exists" do
|
32
|
+
file = Tempfile.new('foo')
|
33
|
+
path = file.path
|
34
|
+
file.unlink
|
35
|
+
expect { AWS::Flow::Runner.load_config_json(path) }.to raise_error(ArgumentError)
|
36
|
+
end
|
37
|
+
|
38
|
+
it "makes sure that the JSON file has valid content" do
|
39
|
+
file = Tempfile.new('foo')
|
40
|
+
begin
|
41
|
+
File.write(file, "garbage{")
|
42
|
+
expect { AWS::Flow::Runner.load_config_json(file.path) }.to raise_error(JSON::ParserError)
|
43
|
+
ensure
|
44
|
+
file.unlink
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
end
|
49
|
+
|
50
|
+
|
51
|
+
describe "JSON validation" do
|
52
|
+
|
53
|
+
it "makes sure activity classes are provided (empty list)" do
|
54
|
+
document = '{
|
55
|
+
"activity_paths": [],
|
56
|
+
"activity_workers": [
|
57
|
+
{
|
58
|
+
"domain": "foo",
|
59
|
+
"task_list": "bar",
|
60
|
+
"activity_classes": [],
|
61
|
+
"number_of_workers": 3
|
62
|
+
}
|
63
|
+
]
|
64
|
+
}'
|
65
|
+
js = JSON.parse(document)
|
66
|
+
|
67
|
+
# just in case so we don't start chid processes
|
68
|
+
AWS::Flow::Runner.stub(:fork)
|
69
|
+
|
70
|
+
# make sure the error is thrown
|
71
|
+
expect {
|
72
|
+
AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, js)
|
73
|
+
}.to raise_error(ArgumentError)
|
74
|
+
|
75
|
+
end
|
76
|
+
|
77
|
+
it "makes sure activity classes are provided (no list)" do
|
78
|
+
document = '{
|
79
|
+
"activity_paths": [],
|
80
|
+
"activity_workers": [
|
81
|
+
{
|
82
|
+
"domain": "foo",
|
83
|
+
"task_list": "bar",
|
84
|
+
"number_of_workers": 3
|
85
|
+
}
|
86
|
+
]
|
87
|
+
}'
|
88
|
+
js = JSON.parse(document)
|
89
|
+
|
90
|
+
# just in case so we don't start chid processes
|
91
|
+
allow(AWS::Flow::Runner).to receive(:fork).and_return(42)
|
92
|
+
|
93
|
+
# make sure the error is thrown
|
94
|
+
expect {
|
95
|
+
AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, js)
|
96
|
+
}.to raise_error(ArgumentError)
|
97
|
+
|
98
|
+
end
|
99
|
+
|
100
|
+
it "makes sure workflow classes are provided (empty list)" do
|
101
|
+
document = '{
|
102
|
+
"workflow_paths": [],
|
103
|
+
"workflow_workers": [
|
104
|
+
{
|
105
|
+
"domain": "foo",
|
106
|
+
"task_list": "bar",
|
107
|
+
"workflow_classes": [],
|
108
|
+
"number_of_workers": 3
|
109
|
+
}
|
110
|
+
]
|
111
|
+
}'
|
112
|
+
js = JSON.parse(document)
|
113
|
+
|
114
|
+
# just in case so we don't start chid processes
|
115
|
+
AWS::Flow::Runner.stub(:fork)
|
116
|
+
|
117
|
+
# make sure the error is thrown
|
118
|
+
expect {
|
119
|
+
AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, js)
|
120
|
+
}.to raise_error(ArgumentError)
|
121
|
+
|
122
|
+
end
|
123
|
+
|
124
|
+
it "makes sure workflow classes are provided (no list)" do
|
125
|
+
document = '{
|
126
|
+
"workflow_paths": [],
|
127
|
+
"workflow_workers": [
|
128
|
+
{
|
129
|
+
"domain": "foo",
|
130
|
+
"task_list": "bar",
|
131
|
+
"number_of_workers": 3
|
132
|
+
}
|
133
|
+
]
|
134
|
+
}'
|
135
|
+
js = JSON.parse(document)
|
136
|
+
|
137
|
+
# just in case so we don't start chid processes
|
138
|
+
allow(AWS::Flow::Runner).to receive(:fork).and_return(42)
|
139
|
+
|
140
|
+
# make sure the error is thrown
|
141
|
+
expect {
|
142
|
+
AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, js)
|
143
|
+
}.to raise_error(ArgumentError)
|
144
|
+
|
145
|
+
end
|
146
|
+
|
147
|
+
end
|
148
|
+
|
149
|
+
describe "Starting workers" do
|
150
|
+
|
151
|
+
def workflow_js
|
152
|
+
document = '{
|
153
|
+
"workflow_paths": [],
|
154
|
+
"workflow_workers": [
|
155
|
+
{
|
156
|
+
"domain": "foo",
|
157
|
+
"task_list": "bar",
|
158
|
+
"workflow_classes": [ "Object", "String" ],
|
159
|
+
"number_of_workers": 3
|
160
|
+
}
|
161
|
+
]
|
162
|
+
}'
|
163
|
+
JSON.parse(document)
|
164
|
+
end
|
165
|
+
|
166
|
+
def activity_js
|
167
|
+
document = '{
|
168
|
+
"activity_paths": [],
|
169
|
+
"activity_workers": [
|
170
|
+
{
|
171
|
+
"domain": "foo",
|
172
|
+
"task_list": "bar",
|
173
|
+
"activity_classes": [ "Object", "String" ],
|
174
|
+
"number_of_workers": 3
|
175
|
+
}
|
176
|
+
]
|
177
|
+
}'
|
178
|
+
JSON.parse(document)
|
179
|
+
end
|
180
|
+
|
181
|
+
it "makes sure the number of workflow workers is correct" do
|
182
|
+
# mock out a few methods to focus on the fact that the workers were created
|
183
|
+
allow_any_instance_of(AWS::Flow::WorkflowWorker).to receive(:add_implementation).and_return(nil)
|
184
|
+
allow_any_instance_of(AWS::Flow::WorkflowWorker).to receive(:start).and_return(nil)
|
185
|
+
AWS::Flow::Runner.stub(:load_files)
|
186
|
+
|
187
|
+
# what we are testing:
|
188
|
+
expect(AWS::Flow::Runner).to receive(:fork).exactly(3).times
|
189
|
+
|
190
|
+
# start the workers
|
191
|
+
workers = AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, "", workflow_js)
|
192
|
+
end
|
193
|
+
|
194
|
+
|
195
|
+
|
196
|
+
it "makes sure the number of activity workers is correct" do
|
197
|
+
# mock out a few methods to focus on the fact that the workers were created
|
198
|
+
allow_any_instance_of(AWS::Flow::ActivityWorker).to receive(:add_implementation).and_return(nil)
|
199
|
+
allow_any_instance_of(AWS::Flow::ActivityWorker).to receive(:start).and_return(nil)
|
200
|
+
AWS::Flow::Runner.stub(:load_files)
|
201
|
+
|
202
|
+
# what we are testing:
|
203
|
+
expect(AWS::Flow::Runner).to receive(:fork).exactly(3).times
|
204
|
+
|
205
|
+
# start the workers
|
206
|
+
workers = AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, "",activity_js)
|
207
|
+
end
|
208
|
+
|
209
|
+
it "makes sure the workflow implementation classes are added" do
|
210
|
+
# mock out a few methods to focus on the implementations being added
|
211
|
+
allow_any_instance_of(AWS::Flow::WorkflowWorker).to receive(:start).and_return(nil)
|
212
|
+
AWS::Flow::Runner.stub(:fork)
|
213
|
+
AWS::Flow::Runner.stub(:load_files)
|
214
|
+
|
215
|
+
# stub that we can query later
|
216
|
+
implems = []
|
217
|
+
AWS::Flow::WorkflowWorker.any_instance.stub(:add_implementation) do |arg|
|
218
|
+
implems << arg
|
219
|
+
end
|
220
|
+
|
221
|
+
# start the workers
|
222
|
+
workers = AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, "",workflow_js)
|
223
|
+
|
224
|
+
# validate
|
225
|
+
expect(implems).to include(Object.const_get("Object"), Object.const_get("String"))
|
226
|
+
end
|
227
|
+
|
228
|
+
it "makes sure the activity implementation classes are added" do
|
229
|
+
# mock out a few methods to focus on the implementations being added
|
230
|
+
allow_any_instance_of(AWS::Flow::ActivityWorker).to receive(:start).and_return(nil)
|
231
|
+
AWS::Flow::Runner.stub(:fork)
|
232
|
+
AWS::Flow::Runner.stub(:load_files)
|
233
|
+
|
234
|
+
# stub that we can query later
|
235
|
+
implems = []
|
236
|
+
AWS::Flow::ActivityWorker.any_instance.stub(:add_implementation) do |arg|
|
237
|
+
implems << arg
|
238
|
+
end
|
239
|
+
|
240
|
+
# start the workers
|
241
|
+
workers = AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, "",activity_js)
|
242
|
+
|
243
|
+
# validate
|
244
|
+
expect(implems).to include(Object.const_get("Object"), Object.const_get("String"))
|
245
|
+
end
|
246
|
+
|
247
|
+
it "makes sure the workflow worker is started" do
|
248
|
+
# mock out a few methods to focus on the worker getting started
|
249
|
+
allow_any_instance_of(AWS::Flow::WorkflowWorker).to receive(:add_implementation).and_return(nil)
|
250
|
+
AWS::Flow::Runner.stub(:fork).and_yield
|
251
|
+
AWS::Flow::Runner.stub(:load_files)
|
252
|
+
|
253
|
+
# stub that we can query later
|
254
|
+
starts = 0
|
255
|
+
AWS::Flow::WorkflowWorker.any_instance.stub(:start) do |arg|
|
256
|
+
starts += 1
|
257
|
+
end
|
258
|
+
|
259
|
+
# start the workers
|
260
|
+
workers = AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, "",workflow_js)
|
261
|
+
|
262
|
+
# validate
|
263
|
+
expect(starts).to equal(3)
|
264
|
+
end
|
265
|
+
|
266
|
+
it "makes sure the activity worker is started" do
|
267
|
+
# mock out a few methods to focus on the worker getting started
|
268
|
+
allow_any_instance_of(AWS::Flow::ActivityWorker).to receive(:add_implementation).and_return(nil)
|
269
|
+
AWS::Flow::Runner.stub(:fork).and_yield
|
270
|
+
AWS::Flow::Runner.stub(:load_files)
|
271
|
+
|
272
|
+
# stub that we can query later
|
273
|
+
starts = 0
|
274
|
+
AWS::Flow::ActivityWorker.any_instance.stub(:start) do |arg|
|
275
|
+
starts += 1
|
276
|
+
end
|
277
|
+
|
278
|
+
# start the workers
|
279
|
+
workers = AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, "",activity_js)
|
280
|
+
|
281
|
+
# validate
|
282
|
+
expect(starts).to equal(3)
|
283
|
+
end
|
284
|
+
|
285
|
+
end
|
286
|
+
|
287
|
+
|
288
|
+
|
289
|
+
describe "Loading files" do
|
290
|
+
|
291
|
+
before(:each) do
|
292
|
+
# let's pretend the files exist, so that loading proceeds
|
293
|
+
allow(File).to receive(:exists?).and_return(true)
|
294
|
+
# stubs to avoid running code that should not be run/covered in these tests
|
295
|
+
AWS::Flow::Runner.stub(:add_implementations)
|
296
|
+
AWS::Flow::Runner.stub(:spawn_and_start_workers)
|
297
|
+
end
|
298
|
+
|
299
|
+
it "looks in the directory where the config is and loads the specified default" do
|
300
|
+
base = "/tmp/blahdir"
|
301
|
+
relative = File.join('flow', 'activities.rb')
|
302
|
+
|
303
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join(base, relative))
|
304
|
+
|
305
|
+
AWS::Flow::Runner.load_files( File.join(base, "blahconfig"), "",
|
306
|
+
{:config_key => "any_key_name",
|
307
|
+
:default_file => relative})
|
308
|
+
end
|
309
|
+
|
310
|
+
it "loads the default only if needed" do
|
311
|
+
base = "/tmp/blahdir"
|
312
|
+
relative = File.join('flow', 'activities.rb')
|
313
|
+
|
314
|
+
expect(AWS::Flow::Runner).to_not receive(:require).with(File.join(base, relative))
|
315
|
+
expect(AWS::Flow::Runner).to receive(:require).with("foo")
|
316
|
+
expect(AWS::Flow::Runner).to receive(:require).with("bar")
|
317
|
+
|
318
|
+
AWS::Flow::Runner.load_files( File.join(base, "blahconfig"),
|
319
|
+
JSON.parse('{ "activity_paths": [ "foo", "bar"] }'),
|
320
|
+
{:config_key => "activity_paths",
|
321
|
+
:default_file => relative})
|
322
|
+
end
|
323
|
+
|
324
|
+
it "loads the \"flow/activities.rb\" by default for activity worker" do
|
325
|
+
def activity_js
|
326
|
+
document = '{
|
327
|
+
"activity_workers": [
|
328
|
+
{
|
329
|
+
"domain": "foo",
|
330
|
+
"task_list": "bar",
|
331
|
+
"activity_classes": [ "Object", "String" ],
|
332
|
+
"number_of_workers": 3
|
333
|
+
}
|
334
|
+
]
|
335
|
+
}'
|
336
|
+
JSON.parse(document)
|
337
|
+
end
|
338
|
+
|
339
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join(".", "flow", "activities.rb"))
|
340
|
+
|
341
|
+
AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, ".", activity_js)
|
342
|
+
end
|
343
|
+
|
344
|
+
it "loads the \"flow/workflows.rb\" by default for workflow worker" do
|
345
|
+
def workflow_js
|
346
|
+
document = '{
|
347
|
+
"workflow_workers": [
|
348
|
+
{
|
349
|
+
"domain": "foo",
|
350
|
+
"task_list": "bar",
|
351
|
+
"workflow_classes": [ "Object", "String" ],
|
352
|
+
"number_of_workers": 3
|
353
|
+
}
|
354
|
+
]
|
355
|
+
}'
|
356
|
+
JSON.parse(document)
|
357
|
+
end
|
358
|
+
|
359
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join(".", "flow", "workflows.rb"))
|
360
|
+
|
361
|
+
AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, ".", workflow_js)
|
362
|
+
end
|
363
|
+
|
364
|
+
it "takes activity_paths as override to \"flow/activities.rb\"" do
|
365
|
+
def activity_js
|
366
|
+
document = '{
|
367
|
+
"activity_paths": [ "foo", "bar"],
|
368
|
+
"activity_workers": [
|
369
|
+
{
|
370
|
+
"domain": "foo",
|
371
|
+
"task_list": "bar",
|
372
|
+
"activity_classes": [ "Object", "String" ],
|
373
|
+
"number_of_workers": 3
|
374
|
+
}
|
375
|
+
]
|
376
|
+
}'
|
377
|
+
JSON.parse(document)
|
378
|
+
end
|
379
|
+
|
380
|
+
expect(AWS::Flow::Runner).to_not receive(:require).with(File.join(".", "flow", "activities.rb"))
|
381
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join("foo"))
|
382
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join("bar"))
|
383
|
+
|
384
|
+
AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, ".", activity_js)
|
385
|
+
end
|
386
|
+
|
387
|
+
it "takes workflow_paths as override to \"flow/workflows.rb\"" do
|
388
|
+
def workflow_js
|
389
|
+
document = '{
|
390
|
+
"workflow_paths": [ "foo", "bar"],
|
391
|
+
"workflow_workers": [
|
392
|
+
{
|
393
|
+
"domain": "foo",
|
394
|
+
"task_list": "bar",
|
395
|
+
"workflow_classes": [ "Object", "String" ],
|
396
|
+
"number_of_workers": 3
|
397
|
+
}
|
398
|
+
]
|
399
|
+
}'
|
400
|
+
JSON.parse(document)
|
401
|
+
end
|
402
|
+
|
403
|
+
expect(AWS::Flow::Runner).to_not receive(:require).with(File.join(".", "flow", "workflows.rb"))
|
404
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join("foo"))
|
405
|
+
expect(AWS::Flow::Runner).to receive(:require).with(File.join("bar"))
|
406
|
+
|
407
|
+
AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, ".", workflow_js)
|
408
|
+
end
|
409
|
+
|
410
|
+
end
|
411
|
+
|
412
|
+
|
413
|
+
|
414
|
+
|
415
|
+
describe "Implementation classes discovery" do
|
416
|
+
|
417
|
+
# because the object space is not reset between test runs, these
|
418
|
+
# classes are declared here for all the tests in this section to use
|
419
|
+
class MyActivity1
|
420
|
+
extend AWS::Flow::Activities
|
421
|
+
end
|
422
|
+
class MyActivity2
|
423
|
+
extend AWS::Flow::Activities
|
424
|
+
end
|
425
|
+
|
426
|
+
class MyWorkflow1
|
427
|
+
extend AWS::Flow::Workflows
|
428
|
+
end
|
429
|
+
class MyWorkflow2
|
430
|
+
extend AWS::Flow::Workflows
|
431
|
+
end
|
432
|
+
|
433
|
+
before(:each) do
|
434
|
+
# stubs to avoid running code that should not be run/covered in these tests
|
435
|
+
AWS::Flow::Runner.stub(:spawn_and_start_workers)
|
436
|
+
end
|
437
|
+
|
438
|
+
it "finds all the subclasses properly" do
|
439
|
+
module Clown
|
440
|
+
end
|
441
|
+
class Whiteface
|
442
|
+
extend Clown
|
443
|
+
end
|
444
|
+
class Auguste
|
445
|
+
extend Clown
|
446
|
+
end
|
447
|
+
|
448
|
+
sub = AWS::Flow::Runner.all_subclasses(Clown)
|
449
|
+
expect(sub).to include(Whiteface)
|
450
|
+
expect(sub).to include(Auguste)
|
451
|
+
end
|
452
|
+
|
453
|
+
it "finds all the subclasses of AWS::Flow::Activities properly" do
|
454
|
+
sub = AWS::Flow::Runner.all_subclasses(AWS::Flow::Activities)
|
455
|
+
expect(sub).to include(MyActivity1)
|
456
|
+
expect(sub).to include(MyActivity2)
|
457
|
+
end
|
458
|
+
|
459
|
+
it "finds all the subclasses of AWS::Flow::Workflows properly" do
|
460
|
+
sub = AWS::Flow::Runner.all_subclasses(AWS::Flow::Workflows)
|
461
|
+
expect(sub).to include(MyWorkflow1)
|
462
|
+
expect(sub).to include(MyWorkflow2)
|
463
|
+
end
|
464
|
+
|
465
|
+
it "finds the activity implementations when they are in the environment" do
|
466
|
+
def activity_js
|
467
|
+
document = '{
|
468
|
+
"activity_workers": [
|
469
|
+
{
|
470
|
+
"domain": "foo",
|
471
|
+
"task_list": "bar",
|
472
|
+
"number_of_workers": 3
|
473
|
+
}
|
474
|
+
]
|
475
|
+
}'
|
476
|
+
JSON.parse(document)
|
477
|
+
end
|
478
|
+
|
479
|
+
impls = []
|
480
|
+
AWS::Flow::ActivityWorker.any_instance.stub(:add_implementation) do |impl|
|
481
|
+
impls << impl
|
482
|
+
end
|
483
|
+
|
484
|
+
AWS::Flow::Runner.start_activity_workers(AWS::SimpleWorkflow.new, ".", activity_js)
|
485
|
+
|
486
|
+
expect(impls).to include(MyActivity2)
|
487
|
+
expect(impls).to include(MyActivity1)
|
488
|
+
end
|
489
|
+
|
490
|
+
it "finds the workflow implementations when they are in the environment" do
|
491
|
+
def workflow_js
|
492
|
+
document = '{
|
493
|
+
"workflow_workers": [
|
494
|
+
{
|
495
|
+
"domain": "foo",
|
496
|
+
"task_list": "bar",
|
497
|
+
"number_of_workers": 3
|
498
|
+
}
|
499
|
+
]
|
500
|
+
}'
|
501
|
+
JSON.parse(document)
|
502
|
+
end
|
503
|
+
|
504
|
+
impls = []
|
505
|
+
AWS::Flow::WorkflowWorker.any_instance.stub(:add_implementation) do |impl|
|
506
|
+
impls << impl
|
507
|
+
end
|
508
|
+
|
509
|
+
AWS::Flow::Runner.start_workflow_workers(AWS::SimpleWorkflow.new, ".", workflow_js)
|
510
|
+
|
511
|
+
expect(impls).to include(MyWorkflow2)
|
512
|
+
expect(impls).to include(MyWorkflow1)
|
513
|
+
end
|
514
|
+
|
515
|
+
end
|
516
|
+
|
517
|
+
describe "Host-specific tasklists" do
|
518
|
+
|
519
|
+
it "expand to the local host name" do
|
520
|
+
# note how we test for value equality; not object equality
|
521
|
+
expect(AWS::Flow::Runner.expand_task_list("|hostname|")).to eq(Socket.gethostname)
|
522
|
+
end
|
523
|
+
|
524
|
+
it "expand to the local host name even in multiple places" do
|
525
|
+
# note how we test for value equality; not object equality
|
526
|
+
expect(AWS::Flow::Runner.expand_task_list("xxx|hostname|yy|hostname|zz")).to eq("xxx#{Socket.gethostname}yy#{Socket.gethostname}zz")
|
527
|
+
end
|
528
|
+
|
529
|
+
it "preserves the task list value if no expanded pattern found" do
|
530
|
+
# note how we test for value equality; not object equality
|
531
|
+
expect(AWS::Flow::Runner.expand_task_list("xxxzz")).to eq("xxxzz")
|
532
|
+
end
|
533
|
+
|
534
|
+
end
|
535
|
+
|
536
|
+
end
|
metadata
CHANGED
@@ -1,18 +1,20 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: aws-flow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.3.0
|
5
|
+
prerelease:
|
5
6
|
platform: ruby
|
6
7
|
authors:
|
7
|
-
- Michael Steger
|
8
|
+
- Michael Steger, Paritosh Mohan, Jacques Thomas
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain: []
|
11
|
-
date: 2014-
|
12
|
+
date: 2014-07-09 00:00:00.000000000 Z
|
12
13
|
dependencies:
|
13
14
|
- !ruby/object:Gem::Dependency
|
14
15
|
name: aws-sdk
|
15
16
|
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
16
18
|
requirements:
|
17
19
|
- - ~>
|
18
20
|
- !ruby/object:Gem::Version
|
@@ -23,6 +25,7 @@ dependencies:
|
|
23
25
|
type: :runtime
|
24
26
|
prerelease: false
|
25
27
|
version_requirements: !ruby/object:Gem::Requirement
|
28
|
+
none: false
|
26
29
|
requirements:
|
27
30
|
- - ~>
|
28
31
|
- !ruby/object:Gem::Version
|
@@ -32,7 +35,8 @@ dependencies:
|
|
32
35
|
version: 1.39.0
|
33
36
|
description: Library to provide the AWS Flow Framework for Ruby
|
34
37
|
email: ''
|
35
|
-
executables:
|
38
|
+
executables:
|
39
|
+
- aws-flow-ruby
|
36
40
|
extensions: []
|
37
41
|
extra_rdoc_files: []
|
38
42
|
files:
|
@@ -41,6 +45,7 @@ files:
|
|
41
45
|
- NOTICE.TXT
|
42
46
|
- Rakefile
|
43
47
|
- aws-flow.gemspec
|
48
|
+
- bin/aws-flow-ruby
|
44
49
|
- lib/aws/decider.rb
|
45
50
|
- lib/aws/decider/activity.rb
|
46
51
|
- lib/aws/decider/activity_definition.rb
|
@@ -77,7 +82,9 @@ files:
|
|
77
82
|
- lib/aws/flow/implementation.rb
|
78
83
|
- lib/aws/flow/simple_dfa.rb
|
79
84
|
- lib/aws/flow/tasks.rb
|
85
|
+
- lib/aws/runner.rb
|
80
86
|
- spec/aws/integration/integration_spec.rb
|
87
|
+
- spec/aws/integration/runner_integration_spec.rb
|
81
88
|
- spec/aws/unit/async_backtrace_spec.rb
|
82
89
|
- spec/aws/unit/async_scope_spec.rb
|
83
90
|
- spec/aws/unit/begin_rescue_ensure_spec.rb
|
@@ -92,31 +99,32 @@ files:
|
|
92
99
|
- spec/aws/unit/options_spec.rb
|
93
100
|
- spec/aws/unit/preinclude_tests.rb
|
94
101
|
- spec/aws/unit/rubyflow.rb
|
102
|
+
- spec/aws/unit/runner_unit_spec.rb
|
95
103
|
- spec/aws/unit/simple_dfa_spec.rb
|
96
104
|
- spec/spec_helper.rb
|
97
|
-
homepage:
|
105
|
+
homepage: https://aws.amazon.com/swf/details/flow/
|
98
106
|
licenses: []
|
99
|
-
metadata: {}
|
100
107
|
post_install_message:
|
101
108
|
rdoc_options: []
|
102
109
|
require_paths:
|
103
110
|
- lib
|
104
111
|
- lib/aws/
|
105
112
|
required_ruby_version: !ruby/object:Gem::Requirement
|
113
|
+
none: false
|
106
114
|
requirements:
|
107
115
|
- - ! '>='
|
108
116
|
- !ruby/object:Gem::Version
|
109
117
|
version: '0'
|
110
118
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
119
|
+
none: false
|
111
120
|
requirements:
|
112
121
|
- - ! '>='
|
113
122
|
- !ruby/object:Gem::Version
|
114
123
|
version: '0'
|
115
124
|
requirements: []
|
116
125
|
rubyforge_project:
|
117
|
-
rubygems_version:
|
126
|
+
rubygems_version: 1.8.23
|
118
127
|
signing_key:
|
119
|
-
specification_version:
|
120
|
-
summary: AWS Flow
|
128
|
+
specification_version: 3
|
129
|
+
summary: AWS Flow Framework for Ruby
|
121
130
|
test_files: []
|
122
|
-
has_rdoc:
|
checksums.yaml
DELETED
@@ -1,15 +0,0 @@
|
|
1
|
-
---
|
2
|
-
!binary "U0hBMQ==":
|
3
|
-
metadata.gz: !binary |-
|
4
|
-
ODBhMDgxOTcxNDAyNGUzMzZmMDllMDdjMWRiZmE3ZjZmM2NmOTA3MQ==
|
5
|
-
data.tar.gz: !binary |-
|
6
|
-
ZDhmNDA5OTBhNThkNDhmMGQ4MjM2ZDcyMGJhM2U1YTNlMGZiYzkyNg==
|
7
|
-
SHA512:
|
8
|
-
metadata.gz: !binary |-
|
9
|
-
NTRlNDUxZDIyZjQyNGRhNzAyOTZhZmQ0NjkwNjI0NThmZGIwOTJmYWVlN2Y3
|
10
|
-
ODY2MGZiZjFmOWE4MGJjN2VhNzJkNzBkMjhiYmNmYmNlMTExNjg3NTlkZjRm
|
11
|
-
YzlmZGQ3MjE5MWEyM2I2YjEyYmZiN2NmOGY2YTllMDY0MzE4YTg=
|
12
|
-
data.tar.gz: !binary |-
|
13
|
-
YTczMTcwODZmMWZkNTgxODQ2NTZjY2FjNmE4ZGRiY2I5NzA0OWVhNTcxY2Ey
|
14
|
-
OThhNGI3ZDY2MDM2ZmM1ZmQ3M2QyZTcyN2JkZGYzNDlkYjM4NjVlMjA1NTMw
|
15
|
-
N2VhMjllYTRhYTE5M2E4M2FhNGZlMGY2ZjEzYjdjMzI4NjFkZmE=
|