aws-flow 2.3.1 → 2.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +8 -8
  2. data/aws-flow.gemspec +3 -2
  3. data/bin/aws-flow-ruby +1 -1
  4. data/bin/aws-flow-utils +5 -0
  5. data/lib/aws/decider.rb +7 -0
  6. data/lib/aws/decider/async_retrying_executor.rb +1 -1
  7. data/lib/aws/decider/data_converter.rb +161 -0
  8. data/lib/aws/decider/decider.rb +27 -14
  9. data/lib/aws/decider/flow_defaults.rb +28 -0
  10. data/lib/aws/decider/implementation.rb +0 -1
  11. data/lib/aws/decider/options.rb +2 -2
  12. data/lib/aws/decider/starter.rb +207 -0
  13. data/lib/aws/decider/task_poller.rb +4 -4
  14. data/lib/aws/decider/utilities.rb +38 -0
  15. data/lib/aws/decider/version.rb +1 -1
  16. data/lib/aws/decider/worker.rb +8 -7
  17. data/lib/aws/decider/workflow_definition_factory.rb +1 -1
  18. data/lib/aws/runner.rb +146 -65
  19. data/lib/aws/templates.rb +4 -0
  20. data/lib/aws/templates/activity.rb +69 -0
  21. data/lib/aws/templates/base.rb +87 -0
  22. data/lib/aws/templates/default.rb +146 -0
  23. data/lib/aws/templates/starter.rb +256 -0
  24. data/lib/aws/utils.rb +270 -0
  25. data/spec/aws/decider/integration/activity_spec.rb +7 -1
  26. data/spec/aws/decider/integration/data_converter_spec.rb +39 -0
  27. data/spec/aws/decider/integration/integration_spec.rb +12 -5
  28. data/spec/aws/decider/integration/options_spec.rb +23 -9
  29. data/spec/aws/decider/integration/starter_spec.rb +209 -0
  30. data/spec/aws/decider/unit/data_converter_spec.rb +276 -0
  31. data/spec/aws/decider/unit/decider_spec.rb +1360 -1386
  32. data/spec/aws/decider/unit/options_spec.rb +21 -22
  33. data/spec/aws/decider/unit/retry_spec.rb +8 -0
  34. data/spec/aws/decider/unit/starter_spec.rb +159 -0
  35. data/spec/aws/runner/integration/runner_integration_spec.rb +2 -3
  36. data/spec/aws/runner/unit/runner_unit_spec.rb +128 -38
  37. data/spec/aws/templates/unit/activity_spec.rb +89 -0
  38. data/spec/aws/templates/unit/base_spec.rb +72 -0
  39. data/spec/aws/templates/unit/default_spec.rb +141 -0
  40. data/spec/aws/templates/unit/starter_spec.rb +271 -0
  41. data/spec/spec_helper.rb +9 -11
  42. metadata +41 -4
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- OWUxYzEyYWVmYzRiZmM3MGIyYzNjN2YwMGZmNmVlMDQ2Nzg4ZjY5NQ==
4
+ MDQyOWZlMGM0YjZiZDcwMmQyNjhjNGFhODY3NzE1ZjZhZDhiNmE4YQ==
5
5
  data.tar.gz: !binary |-
6
- MzhhZTE1ODE3ZmQ4YTYxN2I3Zjc1ZjgxM2ExOTM2MWM5YTg0MTBmMQ==
6
+ ZTUxM2MwZTJlNzMzMmMxOWY5NTM0MGM2Y2UyMjA2MjE5NWQxMmJkOA==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- MWRlY2NmMGU2ZDYzMjA3ZmRiNDg1M2I1MDNmYTE2M2MwYzc4OTIxYmI4OWZi
10
- Y2JiNDAwYzk5MmUyZGIyY2Y2Yjc0YjVkZjJhMjNjZWM5YjhmMzc4ZjExYTM3
11
- MzBkYmU3N2EzZGJhNTU2YTc5ZTM3YTE4YzE0MmJjYjFmZDlmZjk=
9
+ MTJiNjFiODFlY2IyN2Q5ODc2YTViNjY4MTM0NGIwZTc0MzE1Y2FmYzM5NjUw
10
+ MDM5YzkyZTFkNDYzOWFmMDA4YjVmNmY5ODM3MGVkMzQwYjMyMmNiNTlmNmU0
11
+ NzYyYWNjNmUwYTJkNDg4OTU4ZDVkMjE5MWUxZjA0MDFkZmZiZTM=
12
12
  data.tar.gz: !binary |-
13
- MjIxZjgxZjRmZTNmMjExYjBhMWJmMDA3NjQxMDdjZjFlMTMwYjJjZjI4Mjc5
14
- YjllMzJlYjBkZjQ0MmMzOTc5NWI0NjllM2FkZDc4MmJhMWVjMTRhYjA1NjM1
15
- NWI2NzRhOTZlMGRlMWE4MGMyZmNmYWI2OGVkNDBjNTYwNzdiY2Y=
13
+ ZWY5NmZjZTE2OTg3NjdiMzg5MjBmNjk3OGUyODY5ODQ5NGJlMjc1OGZkMjI5
14
+ YTA2NjM3MTA4MjFkMWNmZTVmODVjZTQ2ODZmZDFlMGM5Njk2MGNiMzAxMDc3
15
+ M2EwOGNmODg2OWQzZjAwM2EyNjhjYTAxYjQyMjJmYjI0OGI1YzE=
@@ -7,11 +7,12 @@ Gem::Specification.new do |s|
7
7
  s.summary = "AWS Flow Framework for Ruby"
8
8
  s.description = "Library to provide the AWS Flow Framework for Ruby"
9
9
  s.authors = "Michael Steger, Paritosh Mohan, Jacques Thomas"
10
- s.executables = ["aws-flow-ruby"]
10
+ s.executables = ["aws-flow-ruby", "aws-flow-utils"]
11
11
  s.homepage = "https://aws.amazon.com/swf/details/flow/"
12
12
  s.email = ''
13
13
  s.files = `git ls-files`.split("\n").reject {|file| file =~ /aws-flow-core/}
14
14
  s.require_paths << "lib/aws/"
15
15
  s.required_ruby_version = ">= 1.9.1"
16
- s.add_dependency "aws-sdk-v1", "~> 1", ">= 1.60.1"
16
+ s.add_dependency "aws-sdk-v1", "~> 1", ">= 1.60.2"
17
+ s.add_dependency "lru_redux", "~> 0", ">= 0.8.1"
17
18
  end
@@ -2,4 +2,4 @@
2
2
 
3
3
  require 'runner'
4
4
 
5
- AWS::Flow::Runner.main()
5
+ AWS::Flow::Runner.main
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'utils'
4
+
5
+ AWS::Flow::Utils.main
@@ -47,8 +47,15 @@ require "aws/decider/exceptions"
47
47
  require "aws/decider/task_poller"
48
48
  require "aws/decider/flow_defaults"
49
49
  require "aws/decider/implementation"
50
+ require "aws/decider/starter"
50
51
  require "aws/decider/version"
51
52
 
53
+ # Add the runner files
54
+ require 'aws/runner'
55
+
56
+ # Add the Workflow Template files
57
+ require 'aws/templates'
58
+
52
59
  # @api private
53
60
  def get_const(name)
54
61
  name = name.split('::').reverse
@@ -125,7 +125,7 @@ module AWS
125
125
  # otherwise.
126
126
  #
127
127
  def isRetryable(failure)
128
- if failure.respond_to? :cause
128
+ if failure.respond_to?(:cause) && !failure.cause.nil?
129
129
  failure_class = failure.cause.class
130
130
  else
131
131
  failure_class = failure.class
@@ -19,12 +19,23 @@ module AWS
19
19
  # properly, and they are very handy for debugging.
20
20
  class YAMLDataConverter
21
21
 
22
+ # Serializes a ruby object into a YAML string.
23
+ #
24
+ # @param object
25
+ # The object that needs to be serialized into a string.
26
+ #
22
27
  def dump(object)
23
28
  if object.is_a? Exception
24
29
  return YAML.dump_stream(object, object.backtrace)
25
30
  end
26
31
  object.to_yaml
27
32
  end
33
+
34
+ # Deserializes a YAML string into a ruby object.
35
+ #
36
+ # @param source
37
+ # The source YAML string that needs to be deserialized into a ruby object.
38
+ #
28
39
  def load(source)
29
40
  return nil if source.nil?
30
41
  output = YAML.load source
@@ -40,5 +51,155 @@ module AWS
40
51
  end
41
52
  end
42
53
 
54
+ # S3DataConverter uses YAMLDataConverter internally to serialize and
55
+ # deserialize ruby objects. Additionally it stores objects larger than
56
+ # 32k characeters in AWS S3 and returns a serialized s3 link to be
57
+ # deserialized remotely. It caches objects locally to minimize calls to S3.
58
+ #
59
+ # AWS Flow Framework for Ruby doesn't delete files from S3 to prevent loss
60
+ # of data. It is recommended that users use Object Lifecycle Management in
61
+ # AWS S3 to auto delete files.
62
+ #
63
+ # More information about object expiration can be found at:
64
+ # http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectExpiration.html
65
+ class S3DataConverter
66
+
67
+ require 'lru_redux'
68
+
69
+ # S3Cache is a wrapper around the LruRedux cache.
70
+ class S3Cache
71
+ attr_reader :cache
72
+
73
+ MAX_SIZE = 1000
74
+
75
+ def initialize
76
+ @cache = LruRedux::ThreadSafeCache.new(MAX_SIZE)
77
+ end
78
+
79
+ # Cache lookup
80
+ def [](key)
81
+ @cache[key]
82
+ end
83
+
84
+ # Cache entry
85
+ def []=(key, value)
86
+ @cache[key] = value
87
+ end
88
+
89
+ end
90
+
91
+ attr_reader :converter, :bucket, :cache
92
+
93
+ class << self
94
+ attr_accessor :conv
95
+
96
+ # Ensures singleton
97
+ def converter
98
+ return self.conv if self.conv
99
+ name = ENV['AWS_SWF_BUCKET_NAME']
100
+ if name.nil?
101
+ raise "Need a valid S3 bucket name to initialize S3DataConverter."\
102
+ " Please set the AWS_SWF_BUCKET_NAME environment variable with the"\
103
+ " bucket name."
104
+ end
105
+ self.conv ||= self.new(name)
106
+ return self.conv
107
+ end
108
+
109
+ end
110
+
111
+ def initialize(bucket)
112
+ @bucket = bucket
113
+ @cache = S3Cache.new
114
+ s3 = AWS::S3.new
115
+ s3.buckets.create(bucket) unless s3.buckets[bucket].exists?
116
+ @converter = FlowConstants.default_data_converter
117
+ end
118
+
119
+ # Serializes a ruby object into a string. If the size of the converted
120
+ # string is greater than 32k characters, the string is uploaded to an
121
+ # AWS S3 file and a serialized hash containing the filename is returned
122
+ # instead. The filename is generated at random in the following format -
123
+ # rubyflow_data_<UUID>.
124
+ #
125
+ # The format of the returned serialized hash is - { s3_filename: <filename> }
126
+ #
127
+ # @param object
128
+ # The object that needs to be serialized into a string. By default it
129
+ # serializes the object into a YAML string.
130
+ #
131
+ def dump(object)
132
+ string = @converter.dump(object)
133
+ ret = string
134
+ if string.size > 32768
135
+ filename = put_to_s3(string)
136
+ ret = @converter.dump({ s3_filename: filename })
137
+ end
138
+ ret
139
+ end
140
+
141
+ # Deserializes a string into a ruby object. If the deserialized
142
+ # string is a ruby hash of the format { s3_filename: <filename> }, then
143
+ # it will first look for the file in a local cache. In case of a cache miss,
144
+ # it will try to download the file from AWS S3, deserialize the contents
145
+ # of the file and return the new object.
146
+ #
147
+ # @param source
148
+ # The source that needs to be deserialized into a ruby object. By
149
+ # default it expects the source to be a YAML string. #
150
+ def load(source)
151
+ object = @converter.load(source)
152
+ ret = object
153
+ if object.is_a?(Hash) && object[:s3_filename]
154
+ ret = @converter.load(get_from_s3(object[:s3_filename]))
155
+ end
156
+ ret
157
+ end
158
+
159
+ # Helper method to write a string to an s3 file. A random filename is
160
+ # generated of the format - rubyflow_data_<UUID>
161
+ #
162
+ # @param string
163
+ # The string to be uploaded to S3
164
+ #
165
+ # @api private
166
+ def put_to_s3(string)
167
+ filename = "rubyflow_data_#{SecureRandom.uuid}"
168
+ s3 = AWS::S3.new
169
+ s3.buckets[@bucket].objects.create(filename, string)
170
+ @cache[filename] = string
171
+ return filename
172
+ end
173
+
174
+ # Helper method to read an s3 file
175
+ # @param s3_filename
176
+ # File name to be deleted
177
+ #
178
+ # @api private
179
+ def get_from_s3(s3_filename)
180
+ return @cache[s3_filename] if @cache[s3_filename]
181
+ s3 = AWS::S3.new
182
+ s3_object = s3.buckets[@bucket].objects[s3_filename]
183
+ begin
184
+ ret = s3_object.read
185
+ @cache[s3_filename] = ret
186
+ rescue AWS::S3::Errors::NoSuchKey => e
187
+ raise "Could not find key #{s3_filename} in bucket #{@bucket} on S3. #{e}"
188
+ end
189
+ return ret
190
+ end
191
+
192
+ # Helper method to delete an s3 file
193
+ # @param s3_filename
194
+ # File name to be deleted
195
+ #
196
+ # @api private
197
+ def delete_from_s3(s3_filename)
198
+ s3 = AWS::S3.new
199
+ s3.buckets[@bucket].objects.delete(s3_filename)
200
+ end
201
+
202
+ end
203
+
43
204
  end
44
205
  end
@@ -243,16 +243,18 @@ module AWS
243
243
  # Sets or returns the {WorkflowOptions} for this decider.
244
244
  #
245
245
  module Workflows
246
- attr_accessor :version
246
+ attr_accessor :version, :workflows
247
247
  extend Utilities::UpwardLookups
248
248
  @precursors ||= []
249
249
  def look_upwards(variable)
250
- precursors = self.ancestors.dup
251
- precursors.delete(self)
252
- results = precursors.map { |x| x.send(variable) if x.methods.map(&:to_sym).include? variable }.compact.flatten.uniq
250
+ unless self.ancestors.nil?
251
+ precursors = self.ancestors.dup
252
+ precursors.delete(self)
253
+ results = precursors.map { |x| x.send(variable) if x.methods.map(&:to_sym).include? variable }.compact.flatten.uniq
254
+ end
253
255
  end
254
256
  property(:workflows, [])
255
- @workflows = []
257
+ @workflows ||= []
256
258
  def self.extended(base)
257
259
  base.send :include, InstanceMethods
258
260
  end
@@ -264,11 +266,12 @@ module AWS
264
266
  if input
265
267
  @entry_point = input
266
268
  workflow_type = WorkflowType.new(self.to_s + "." + input.to_s, nil, WorkflowRegistrationOptions.new(:execution_method => input))
267
- self.workflows.each { |workflow| workflow.name = self.to_s + "." + input.to_s }
268
- self.workflows.each do |workflow|
269
+ @workflows ||= []
270
+ @workflows.each { |workflow| workflow.name = self.to_s + "." + input.to_s }
271
+ @workflows.each do |workflow|
269
272
  workflow.options = WorkflowRegistrationOptions.new(:execution_method => input)
270
273
  end
271
- self.workflows = self.workflows << workflow_type
274
+ @workflows << workflow_type
272
275
  end
273
276
  return @entry_point if @entry_point
274
277
  raise "You must set an entry point on the workflow definition"
@@ -278,8 +281,9 @@ module AWS
278
281
  # @api private
279
282
  def version(arg = nil)
280
283
  if arg
281
- self.workflows.each { |workflow| workflow.version = arg }
282
- self.workflows = self.workflows << WorkflowType.new(nil, arg, WorkflowOptions.new)
284
+ @workflows ||= []
285
+ @workflows.each { |workflow| workflow.version = arg }
286
+ @workflows << WorkflowType.new(nil, arg, WorkflowOptions.new)
283
287
  end
284
288
  return @version
285
289
  end
@@ -325,9 +329,18 @@ module AWS
325
329
  instance_variable_get(client_name)
326
330
  end
327
331
 
332
+ # Convenience method to set the child workflow client
333
+ def child_workflow_client(name, &block)
334
+ client_name = "@child_client_#{name}"
335
+ define_method(name) do
336
+ return instance_variable_get(client_name) if instance_variable_get(client_name)
337
+ client = AWS::Flow.send(:workflow_client, nil, nil, &block)
338
+ end
339
+ instance_variable_get(client_name)
340
+ end
328
341
 
329
342
  # @api private
330
- def _options; self.workflows; end
343
+ def _options; @workflows; end
331
344
 
332
345
  # Defines a new workflow.
333
346
  #
@@ -343,7 +356,8 @@ module AWS
343
356
  options.execution_method = workflow_name
344
357
  prefix_name = options.prefix_name || self.to_s
345
358
  workflow_type = WorkflowType.new(prefix_name.to_s + "." + workflow_name.to_s, options.version, options)
346
- self.workflows = self.workflows << workflow_type
359
+ @workflows ||= []
360
+ @workflows << workflow_type
347
361
  end
348
362
  end
349
363
 
@@ -368,7 +382,7 @@ module AWS
368
382
  data_converter = options[:data_converter]
369
383
  signal_name = options[:signal_name]
370
384
  signal_name ||= method_name.to_s
371
- data_converter ||= FlowConstants.default_data_converter
385
+ data_converter ||= FlowConstants.data_converter
372
386
  @signals ||= {}
373
387
  @signals[signal_name] = MethodPair.new(method_name, data_converter)
374
388
  @signals
@@ -438,7 +452,6 @@ module AWS
438
452
  client
439
453
  end
440
454
 
441
-
442
455
  # Creates a timer on the workflow that executes the supplied block after a specified delay.
443
456
  #
444
457
  # @param delay_seconds
@@ -113,6 +113,26 @@ module AWS
113
113
  INFINITY = -1
114
114
  RETENTION_DEFAULT = 7
115
115
  NUM_OF_WORKERS_DEFAULT = 1
116
+
117
+ def self.defaults
118
+ {
119
+ domain: "FlowDefault",
120
+ prefix_name: "FlowDefaultWorkflowRuby",
121
+ execution_method: "start",
122
+ version: "1.0",
123
+ # execution timeout (1 hour)
124
+ execution_start_to_close_timeout: "3600",
125
+ data_converter: self.data_converter,
126
+ schedule_to_start_timeout: 60,
127
+ start_to_close_timeout: 60,
128
+ retry_policy: { maximum_attempts: 3 },
129
+ task_list: "flow_default_ruby",
130
+ result_activity_prefix: "FlowDefaultResultActivityRuby",
131
+ result_activity_version: "1.0",
132
+ result_activity_method: "run"
133
+ }
134
+ end
135
+
116
136
  @exponential_retry_maximum_attempts = Float::INFINITY
117
137
  @exponential_retry_maximum_retry_interval_seconds = -1
118
138
  @exponential_retry_retry_expiration_seconds = -1
@@ -159,6 +179,14 @@ module AWS
159
179
  random.rand(max_value)
160
180
  end
161
181
 
182
+ # Selects the data converter to use. By default, YAMLDataConverter is
183
+ # used. S3DataConverter is used when AWS_SWF_BUCKET_NAME environment
184
+ # variable is set.
185
+ def self.data_converter
186
+ return self.default_data_converter unless ENV['AWS_SWF_BUCKET_NAME']
187
+ S3DataConverter.converter
188
+ end
189
+
162
190
  @default_data_converter = YAMLDataConverter.new
163
191
  @use_worker_task_list = "USE_WORKER_TASK_LIST"
164
192
  end
@@ -41,7 +41,6 @@ module AWS
41
41
  AWS::Flow.send(:workflow_client, service, domain, &block)
42
42
  end
43
43
 
44
-
45
44
  # Execute a block with retries within a workflow context.
46
45
  #
47
46
  # @param options
@@ -340,7 +340,7 @@ module AWS
340
340
  class WorkflowDefaults < Defaults
341
341
 
342
342
  # The default data converter. By default, this is {YAMLDataConverter}.
343
- def data_converter; FlowConstants.default_data_converter; end
343
+ def data_converter; FlowConstants.data_converter; end
344
344
 
345
345
  end
346
346
 
@@ -554,7 +554,7 @@ module AWS
554
554
 
555
555
  # Defaults for the {ActivityOptions} class.
556
556
  class ActivityDefaults < Defaults
557
- def data_converter; FlowConstants.default_data_converter; end
557
+ def data_converter; FlowConstants.data_converter; end
558
558
  end
559
559
 
560
560
  # Default values for a registered activity type. These values are set by
@@ -0,0 +1,207 @@
1
+ module AWS
2
+ module Flow
3
+
4
+ # Utility method used to start a workflow execution with the service.
5
+ #
6
+ # @param [String or Class (that extends AWS::Flow::Workflows)] workflow
7
+ # Represents an AWS Flow Framework workflow class. If not provided,
8
+ # details of the workflow must be passed via the opts Hash.
9
+ #
10
+ # @param [Hash] input
11
+ # Input hash for the workflow execution
12
+ #
13
+ # @param [Hash] opts
14
+ # Hash of options to configure the workflow execution
15
+ #
16
+ # @option opts [String] *Required* :domain
17
+ #
18
+ # @option opts [String] *Required* :version
19
+ #
20
+ # @option opts [String] *Optional* :prefix_name
21
+ # Must be specified if workflow is not passed in as an argument
22
+ #
23
+ # @option opts [String] *Optional* :execution_method
24
+ # Must be specified if workflow is not passed in as an argument
25
+ #
26
+ # @option opts [String] *Optional* :from_class
27
+ #
28
+ # @option opts [String] *Optional* :workflow_id
29
+ #
30
+ # @option opts [Integer] *Optional* :execution_start_to_close_timeout
31
+ #
32
+ # @option opts [Integer] *Optional* :task_start_to_close_timeout
33
+ #
34
+ # @option opts [Integer] *Optional* :task_priority
35
+ #
36
+ # @option opts [String] *Optional* :task_list
37
+ #
38
+ # @option opts [String] *Optional* :child_policy
39
+ #
40
+ # @option opts [Array] *Optional* :tag_list
41
+ #
42
+ # @option opts *Optional* :data_converter
43
+ #
44
+ # Usage -
45
+ #
46
+ # 1) Passing a fully qualified workflow <prefix_name>.<execution_method> name -
47
+ #
48
+ # AWS::Flow::start_workflow("HelloWorkflow.say_hello", "world", {
49
+ # domain: "FooDomain",
50
+ # version: "1.0"
51
+ # ...
52
+ # })
53
+ #
54
+ # 2) Passing workflow class name with other details in the options hash -
55
+ #
56
+ # AWS::Flow::start_workflow("HelloWorkflow", "world", {
57
+ # domain: "FooDomain",
58
+ # execution_method: "say_hello",
59
+ # version: "1.0"
60
+ # ...
61
+ # })
62
+ #
63
+ # 3) Acquiring options using the :from_class option -
64
+ #
65
+ # AWS::Flow::start_workflow(nil, "hello", {
66
+ # domain: "FooDomain",
67
+ # from_class: "HelloWorkflow"
68
+ # })
69
+ #
70
+ # # This will take all the required options from the HelloWorkflow class.
71
+ # # If execution_method options is not passed in, it will use the first
72
+ # # workflow method in the class.
73
+ #
74
+ # 4) All workflow options are present in the options hash. This is the case
75
+ # when this method is called by AWS::Flow#start
76
+ #
77
+ # AWS::Flow::start_workflow(nil, "hello", {
78
+ # domain: "FooDomain",
79
+ # prefix_name: "HelloWorkflow",
80
+ # execution_method: "say_hello",
81
+ # version: "1.0",
82
+ # ...
83
+ # })
84
+ def self.start_workflow(workflow = nil, input, opts)
85
+
86
+ raise ArgumentError, "Please provide an options hash" if opts.nil? || !opts.is_a?(Hash)
87
+
88
+ options = opts.dup
89
+
90
+ # Get the domain out of the options hash.
91
+ domain = options.delete(:domain)
92
+
93
+ raise ArgumentError, "You must provide a :domain in the options hash" if domain.nil?
94
+
95
+ if options[:from_class]
96
+ # Do nothing. Use options as they are. They will be taken care of in the
97
+ # workflow client
98
+ elsif workflow.nil?
99
+ # This block is usually executed when #start_workflow is called from
100
+ # #start. All options required to start the workflow must be present
101
+ # in the options hash.
102
+ prefix_name = options[:prefix_name] || options[:workflow_name]
103
+ # Check if required options are present
104
+ raise ArgumentError, "You must provide a :prefix_name in the options hash" unless prefix_name
105
+ raise ArgumentError, "You must provide an :execution_method in the options hash" unless options[:execution_method]
106
+ raise ArgumentError, "You must provide a :version in the options hash" unless options[:version]
107
+ else
108
+ # When a workflow class name is given along with some options
109
+
110
+ # If a fully qualified workflow name is given, split it into prefix_name
111
+ # and execution_method
112
+ prefix_name, execution_method = workflow.to_s.split(".")
113
+ # If a fully qualified name is not given, then look for it in the options
114
+ # hash
115
+ execution_method ||= options[:execution_method]
116
+
117
+ # Make sure all required options are present
118
+ raise ArgumentError, "You must provide an :execution_method in the options hash" unless execution_method
119
+ raise ArgumentError, "You must provide a :version in the options hash" unless options[:version]
120
+
121
+ # Set the :prefix_name and :execution_method options correctly
122
+ options.merge!(
123
+ prefix_name: prefix_name,
124
+ execution_method: execution_method,
125
+ )
126
+ end
127
+
128
+ swf = AWS::SimpleWorkflow.new
129
+ domain = swf.domains[domain]
130
+
131
+ # Get a workflow client for the domain
132
+ client = workflow_client(domain.client, domain) { options }
133
+
134
+ # Start the workflow execution
135
+ client.start_execution(input)
136
+ end
137
+
138
+ # Starts an Activity or a Workflow Template execution using the
139
+ # default workflow class FlowDefaultWorkflowRuby
140
+ #
141
+ # @param [String or AWS::Flow::Templates::TemplateBase] name_or_klass
142
+ # The Activity or the Workflow Template that needs to be scheduled via
143
+ # the default workflow. This argument can either be a string that
144
+ # represents a fully qualified activity name - <ActivityClass>.<method_name>
145
+ # or it can be an instance of AWS::Flow::Templates::TemplateBase
146
+ #
147
+ # @param [Hash] input
148
+ # Input hash for the workflow execution
149
+ #
150
+ # @param [Hash] opts
151
+ # Additional options to configure the workflow or activity execution.
152
+ #
153
+ # @option opts [true, false] :wait
154
+ # *Optional* This boolean flag can be set to true if the result of the
155
+ # task is required. Default value is false.
156
+ #
157
+ # @option opts [Integer] :wait_timeout
158
+ # *Optional* This sets the timeout value for :wait. Default value is
159
+ # nil.
160
+ #
161
+ # @option opts [Hash] :exponential_retry
162
+ # A hash of {AWS::Flow::ExponentialRetryOptions}. Default value is -
163
+ # { maximum_attempts: 3 }
164
+ #
165
+ # @option opts [String] *Optional* :domain
166
+ # Default value is FlowDefault
167
+ #
168
+ # @option opts [Integer] *Optional* :execution_start_to_close_timeout
169
+ # Default value is 3600 seconds (1 hour)
170
+ #
171
+ # @option opts [String] *Optional* :workflow_id
172
+ #
173
+ # @option opts [Integer] *Optional* :task_priority
174
+ # Default value is 0
175
+ #
176
+ # @option opts [String] *Optional* :tag_list
177
+ # By default, the name of the activity task gets added to the workflow's
178
+ # tag_list
179
+ #
180
+ # @option opts *Optional* :data_converter
181
+ # Default value is {AWS::Flow::YAMLDataConverter}. To use the
182
+ # {AWS::Flow::S3DataConverter}, set the AWS_SWF_BUCKET_NAME environment
183
+ # variable name with a valid AWS S3 bucket name.
184
+ #
185
+ # @option opts *Optional* A hash of {AWS::Flow::ActivityOptions}
186
+ #
187
+ # Usage -
188
+ #
189
+ # AWS::Flow::start("<ActivityClassName>.<method_name>", <input_hash>,
190
+ # <options_hash> )
191
+ #
192
+ # Example -
193
+ #
194
+ # 1) Start an activity execution -
195
+ # AWS::Flow::start("HelloWorldActivity.say_hello", { name: "World" })
196
+ #
197
+ # 2) Start an activity execution with overriden options -
198
+ # AWS::Flow::start("HelloWorldActivity.say_hello", { name: "World" }, {
199
+ # exponential_retry: { maximum_attempts: 10 } }
200
+ # )
201
+ #
202
+ def self.start(name_or_klass, input, options = {})
203
+ AWS::Flow::Templates.start(name_or_klass, input, options)
204
+ end
205
+
206
+ end
207
+ end