iron_worker 2.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README.markdown +72 -0
- data/VERSION.yml +5 -0
- data/lib/generators/iron_worker/iron_worker_generator.rb +13 -0
- data/lib/iron_worker.rb +49 -0
- data/lib/iron_worker/api.rb +242 -0
- data/lib/iron_worker/base.rb +432 -0
- data/lib/iron_worker/config.rb +265 -0
- data/lib/iron_worker/rails2_init.rb +8 -0
- data/lib/iron_worker/railtie.rb +16 -0
- data/lib/iron_worker/server/overrides.rb +199 -0
- data/lib/iron_worker/server/runner.rb +80 -0
- data/lib/iron_worker/service.rb +553 -0
- data/lib/iron_worker/uber_client.rb +117 -0
- data/lib/iron_worker/used_in_worker.rb +11 -0
- data/lib/iron_worker/utils.rb +11 -0
- data/rails/init.rb +0 -0
- metadata +105 -0
@@ -0,0 +1,80 @@
|
|
1
|
+
# This is the file that gets executed on the server.
|
2
|
+
|
3
|
+
def init_database_connection(sw_config)
|
4
|
+
if sw_config
|
5
|
+
db_config = sw_config['database']
|
6
|
+
if db_config
|
7
|
+
#@logger.info "Connecting to database using ActiveRecord..."
|
8
|
+
require 'active_record'
|
9
|
+
ActiveRecord::Base.establish_connection(db_config)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
def init_mailer(sw_config)
|
15
|
+
if sw_config
|
16
|
+
mailer_config = sw_config['mailer']
|
17
|
+
if mailer_config
|
18
|
+
require 'action_mailer'
|
19
|
+
ActionMailer::Base.raise_delivery_errors = true
|
20
|
+
ActionMailer::Base.smtp_settings = mailer_config
|
21
|
+
ActionMailer::Base.delivery_method = :smtp
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def get_class_to_run(class_name)
|
27
|
+
runner_class = constantize(class_name)
|
28
|
+
return runner_class
|
29
|
+
end
|
30
|
+
|
31
|
+
# File activesupport/lib/active_support/inflector/methods.rb, line 107
|
32
|
+
# Shoutout to the MIT License
|
33
|
+
def constantize(camel_cased_word)
|
34
|
+
names = camel_cased_word.split('::')
|
35
|
+
names.shift if names.empty? || names.first.empty?
|
36
|
+
|
37
|
+
constant = Object
|
38
|
+
names.each do |name|
|
39
|
+
constant = constant.const_defined?(name) ? constant.const_get(name) : constant.const_missing(name)
|
40
|
+
end
|
41
|
+
constant
|
42
|
+
end
|
43
|
+
|
44
|
+
def init_runner(runner_class, job_data, user_dir, task_id)
|
45
|
+
# ensure initialize takes no arguments
|
46
|
+
init_arity = runner_class.instance_method(:initialize).arity
|
47
|
+
if init_arity == 0 || init_arity == -1
|
48
|
+
# good. -1 can be if it's not defined at all
|
49
|
+
else
|
50
|
+
raise IronWorker::InvalidWorkerError, "Worker initialize method must accept zero arguments."
|
51
|
+
end
|
52
|
+
runner = runner_class.new
|
53
|
+
runner.instance_variable_set(:@task_id, task_id)
|
54
|
+
runner.instance_variable_set(:@job_data, job_data)
|
55
|
+
runner.instance_variable_set(:@sw_config, job_data['sw_config'])
|
56
|
+
runner.instance_variable_set(:@user_dir, user_dir)
|
57
|
+
runner.sw_set_data(job_data)
|
58
|
+
runner
|
59
|
+
end
|
60
|
+
|
61
|
+
def init_worker_service_for_runner(job_data)
|
62
|
+
IronWorker.configure do |config|
|
63
|
+
sw_config = job_data['sw_config']
|
64
|
+
config.token = sw_config['token']
|
65
|
+
config.project_id = sw_config['project_id']
|
66
|
+
config.scheme = sw_config['scheme'] if sw_config['scheme']
|
67
|
+
config.host = sw_config['host'] if sw_config['host']
|
68
|
+
config.port = sw_config['port'] if sw_config['port']
|
69
|
+
db_config = sw_config['database']
|
70
|
+
if db_config
|
71
|
+
config.database = db_config
|
72
|
+
end
|
73
|
+
mailer_config = sw_config['mailer']
|
74
|
+
if mailer_config && config.respond_to?(:mailer)
|
75
|
+
config.mailer = mailer_config
|
76
|
+
end
|
77
|
+
config.global_attributes = sw_config['global_attributes'] if sw_config['global_attributes']
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
@@ -0,0 +1,553 @@
|
|
1
|
+
require 'base64'
|
2
|
+
require 'logger'
|
3
|
+
require 'zip'
|
4
|
+
require 'rest_client'
|
5
|
+
require 'json'
|
6
|
+
|
7
|
+
require_relative 'api'
|
8
|
+
|
9
|
+
module IronWorker
|
10
|
+
|
11
|
+
class Service < IronWorker::Api::Client
|
12
|
+
|
13
|
+
attr_accessor :config
|
14
|
+
|
15
|
+
def initialize(token, options={})
|
16
|
+
if options[:config]
|
17
|
+
self.config = options[:config]
|
18
|
+
else
|
19
|
+
c = IronWorker::Config.new unless self.config
|
20
|
+
c.token = token
|
21
|
+
self.config = c
|
22
|
+
end
|
23
|
+
options[:version] = IronWorker.api_version
|
24
|
+
options[:logger] = IronWorker.logger
|
25
|
+
super("worker-aws-us-east-1.iron.io", token, options)
|
26
|
+
self.host = self.config.host if self.config && self.config.host
|
27
|
+
# automerge simple worker gem and dependenices
|
28
|
+
self.config.merge_gem('rest-client')
|
29
|
+
self.config.merge_gem('iron_worker')
|
30
|
+
IronWorker.logger.info 'IronWorker initialized.'
|
31
|
+
IronWorker.logger.debug ' host = ' + self.host.inspect
|
32
|
+
end
|
33
|
+
|
34
|
+
# Options:
|
35
|
+
# - :callback_url
|
36
|
+
# - :merge => array of files to merge in with this file
|
37
|
+
def upload(filename, class_name, options={})
|
38
|
+
name = options[:name] || class_name
|
39
|
+
project_id = get_project_id(options)
|
40
|
+
tmp = Dir.tmpdir()
|
41
|
+
md5file = "iron_worker_#{class_name.gsub("::", ".")}_#{token[0, 8]}.md5"
|
42
|
+
existing_md5 = nil
|
43
|
+
md5_f = File.join(tmp, md5file)
|
44
|
+
if File.exists?(md5_f)
|
45
|
+
existing_md5 = IO.read(md5_f)
|
46
|
+
end
|
47
|
+
|
48
|
+
# Check for code changes.
|
49
|
+
md5 = Digest::MD5.hexdigest(File.read(filename))
|
50
|
+
new_code = false
|
51
|
+
if self.config.force_upload || md5 != existing_md5
|
52
|
+
IronWorker.logger.info "Uploading #{class_name}, code modified."
|
53
|
+
File.open(md5_f, 'w') { |f| f.write(md5) }
|
54
|
+
new_code = true
|
55
|
+
# todo: delete md5 file if error occurs during upload process
|
56
|
+
else
|
57
|
+
# puts "#{class_name}: same code, not uploading"
|
58
|
+
return
|
59
|
+
end
|
60
|
+
|
61
|
+
begin
|
62
|
+
|
63
|
+
zip_filename = build_merged_file(filename, options[:merge], options[:unmerge], options[:merged_gems], options[:merged_mailers], options[:merged_folders])
|
64
|
+
|
65
|
+
if new_code
|
66
|
+
upload_code(name, zip_filename, 'runner.rb', :runtime=>'ruby')
|
67
|
+
end
|
68
|
+
|
69
|
+
rescue Exception => ex
|
70
|
+
# if it errors, let's delete md5 since it wouldn't have uploaded.
|
71
|
+
File.delete(md5_f)
|
72
|
+
raise ex
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def get_server_gems
|
77
|
+
hash = get("gems/list")
|
78
|
+
JSON.parse(hash["gems"])
|
79
|
+
end
|
80
|
+
|
81
|
+
def logger
|
82
|
+
IronWorker.logger
|
83
|
+
end
|
84
|
+
|
85
|
+
def self.get_gem_path(gem_info)
|
86
|
+
# gem_name =(gem_info[:require] || gem_info[:name].match(/^[a-zA-Z0-9\-_]+/)[0])
|
87
|
+
gem_name =(gem_info[:name].match(/^[a-zA-Z0-9\-_]+/)[0])
|
88
|
+
#puts "Searching for #{gem_name}..."
|
89
|
+
gems= Gem::Specification.respond_to?(:each) ? Gem::Specification.find_all_by_name(gem_name) : Gem::GemPathSearcher.new.find_all(gem_name)
|
90
|
+
if (!Gem::GemPathSearcher.private_instance_methods.include?(:_deprecated_initialize)) && (!gems || gems.empty?)
|
91
|
+
gems = Gem::GemPathSearcher.new.init_gemspecs.select { |gem| gem.name==gem_name }
|
92
|
+
end
|
93
|
+
IronWorker.logger.debug 'gems found=' + gems.inspect
|
94
|
+
gems = gems.select { |g| g.version.version==gem_info[:version] } if gem_info[:version]
|
95
|
+
if !gems.empty?
|
96
|
+
gem = gems.last
|
97
|
+
gem.full_gem_path
|
98
|
+
else
|
99
|
+
return nil
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def build_merged_file(filename, merged, unmerge, merged_gems, merged_mailers, merged_folders)
|
104
|
+
|
105
|
+
merge = IronWorker.config.merged.dup
|
106
|
+
merge.merge!(merged) if merged
|
107
|
+
if unmerge
|
108
|
+
unmerge.each_pair do |x, y|
|
109
|
+
deleted = merge.delete x
|
110
|
+
IronWorker.logger.debug "Unmerging #{x}. Success? #{deleted}"
|
111
|
+
end
|
112
|
+
end
|
113
|
+
merged = merge
|
114
|
+
IronWorker.logger.debug 'merged=' + merged.inspect
|
115
|
+
|
116
|
+
merged_gems = merged_gems.merge(IronWorker.config.merged_gems)
|
117
|
+
IronWorker.logger.debug 'merged_gems=' + merged_gems.inspect
|
118
|
+
IronWorker.config.unmerged_gems.each_pair do |k, v|
|
119
|
+
IronWorker.logger.debug 'unmerging gem=' + k.inspect
|
120
|
+
merged_gems.delete(k)
|
121
|
+
end
|
122
|
+
IronWorker.logger.debug 'merged_gems_after=' + merged_gems.inspect
|
123
|
+
|
124
|
+
merged_mailers ||= {}
|
125
|
+
merged_mailers = merged_mailers.merge(IronWorker.config.mailers) if IronWorker.config.mailers
|
126
|
+
|
127
|
+
#tmp_file = File.join(Dir.tmpdir(), File.basename(filename))
|
128
|
+
tmp_file = File.join(Dir.tmpdir(), 'runner.rb')
|
129
|
+
File.open(tmp_file, "w") do |f|
|
130
|
+
|
131
|
+
f.write("
|
132
|
+
# Find environment (-e)
|
133
|
+
dirname = ''
|
134
|
+
i = 0
|
135
|
+
task_data_file = nil
|
136
|
+
task_id = nil
|
137
|
+
#puts \"args for single file=\" + ARGV.inspect
|
138
|
+
ARGV.each do |arg|
|
139
|
+
if arg == \"-d\"
|
140
|
+
# the user's writable directory
|
141
|
+
dirname = ARGV[i+1]
|
142
|
+
end
|
143
|
+
if arg == \"-id\"
|
144
|
+
# task_id
|
145
|
+
task_id = ARGV[i+1]
|
146
|
+
end
|
147
|
+
if arg == \"-payload\"
|
148
|
+
# path to job data
|
149
|
+
task_data_file = ARGV[i+1]
|
150
|
+
end
|
151
|
+
i+=1
|
152
|
+
end
|
153
|
+
require 'json'
|
154
|
+
")
|
155
|
+
# require merged gems
|
156
|
+
merged_gems.each_pair do |k, gem|
|
157
|
+
IronWorker.logger.debug "Bundling gem #{gem[:name]}..."
|
158
|
+
f.write "$LOAD_PATH << File.join(File.dirname(__FILE__), '/gems/#{gem[:name]}/lib')\n"
|
159
|
+
IronWorker.logger.debug 'writing requires: ' + gem[:require].inspect
|
160
|
+
if gem[:require].nil?
|
161
|
+
gem[:require] = []
|
162
|
+
elsif gem[:require].is_a?(String)
|
163
|
+
gem[:require] = [gem[:require]]
|
164
|
+
end
|
165
|
+
IronWorker.logger.debug "gem[:require]: " + gem[:require].inspect
|
166
|
+
gem[:require].each do |r|
|
167
|
+
IronWorker.logger.debug 'adding require to file ' + r.to_s
|
168
|
+
f.write "require '#{r}'\n"
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
File.open(File.join(File.dirname(__FILE__), 'server', 'overrides.rb'), 'r') do |fr|
|
173
|
+
while line = fr.gets
|
174
|
+
f.write line
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
# Now we must disable queuing while loading up classes. This is from the overrides.rb file
|
179
|
+
f.write("
|
180
|
+
IronWorker.disable_queueing()
|
181
|
+
")
|
182
|
+
|
183
|
+
|
184
|
+
File.open(File.join(File.dirname(__FILE__), "server", 'runner.rb'), 'r') do |fr|
|
185
|
+
while line = fr.gets
|
186
|
+
f.write line
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
# load job data
|
191
|
+
f.write("
|
192
|
+
# Change to user directory
|
193
|
+
#puts 'dirname=' + dirname.inspect
|
194
|
+
Dir.chdir(dirname)
|
195
|
+
# Load in job data
|
196
|
+
job_data = JSON.load(File.open(task_data_file))
|
197
|
+
puts 'job_data=' + job_data.inspect
|
198
|
+
sw_config = job_data['sw_config']
|
199
|
+
IronWorker.task_data = job_data
|
200
|
+
|
201
|
+
if IronWorker.task_data['rails']
|
202
|
+
module ::Rails
|
203
|
+
def self.version
|
204
|
+
IronWorker.task_data['rails']['version']
|
205
|
+
end
|
206
|
+
def self.env
|
207
|
+
IronWorker.task_data['rails']['env']
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
211
|
+
")
|
212
|
+
|
213
|
+
if IronWorker.config.extra_requires
|
214
|
+
IronWorker.config.extra_requires.each do |r|
|
215
|
+
f.write "require '#{r}'\n"
|
216
|
+
end
|
217
|
+
end
|
218
|
+
if merged_mailers && !merged_mailers.empty?
|
219
|
+
# todo: isn't 'action_mailer already required in railtie?
|
220
|
+
f.write "require 'action_mailer'\n"
|
221
|
+
f.write "init_mailer(sw_config)\n"
|
222
|
+
f.write "ActionMailer::Base.prepend_view_path('templates')\n"
|
223
|
+
end
|
224
|
+
f.write "init_database_connection(sw_config)\n"
|
225
|
+
|
226
|
+
merged.each_pair do |k, v|
|
227
|
+
if v[:extname] == ".rb"
|
228
|
+
f.write "require_relative '#{File.basename(v[:path])}'\n"
|
229
|
+
end
|
230
|
+
end
|
231
|
+
merged_mailers.each_pair do |k, mailer|
|
232
|
+
f.write "require_relative '#{mailer[:name]}'\n"
|
233
|
+
end
|
234
|
+
#end
|
235
|
+
#f.write File.open(filename, 'r') { |mo| mo.read }
|
236
|
+
f.write("require_relative '#{File.basename(filename)}'\n")
|
237
|
+
|
238
|
+
f.write("
|
239
|
+
runner_class = get_class_to_run(job_data['class_name'])
|
240
|
+
IronWorker.running_class = runner_class
|
241
|
+
runner = init_runner(runner_class, job_data, dirname, task_id)
|
242
|
+
init_worker_service_for_runner(job_data)
|
243
|
+
|
244
|
+
# Now reenable after loading
|
245
|
+
IronWorker.enable_queueing()
|
246
|
+
|
247
|
+
# Let's run it!
|
248
|
+
runner_return_data = runner.run
|
249
|
+
")
|
250
|
+
|
251
|
+
end
|
252
|
+
#puts 'funner.rb=' + tmp_file
|
253
|
+
merge['runner.rb'] = {:path=>tmp_file}
|
254
|
+
#puts 'filename=' + filename
|
255
|
+
merge[File.basename(filename)] = {:path=>filename}
|
256
|
+
#puts "merge before uniq! " + merge.inspect
|
257
|
+
# puts "merge after uniq! " + merge.inspect
|
258
|
+
|
259
|
+
fname2 = tmp_file + ".zip"
|
260
|
+
# puts 'fname2=' + fname2
|
261
|
+
# puts 'merged_file_array=' + merge.inspect
|
262
|
+
#File.open(fname2, "w") do |f|
|
263
|
+
File.delete(fname2) if File.exist?(fname2)
|
264
|
+
Zip::ZipFile.open(fname2, 'w') do |f|
|
265
|
+
if merged_gems && merged_gems.size > 0
|
266
|
+
merged_gems.each_pair do |k, gem|
|
267
|
+
next unless gem[:merge]
|
268
|
+
# puts 'gem=' + gem.inspect
|
269
|
+
path = gem[:path]
|
270
|
+
if path
|
271
|
+
IronWorker.logger.debug "Collecting gem #{path}"
|
272
|
+
paths_to_use = ["#{path}/*", "#{path}/lib/**/**"]
|
273
|
+
if gem[:include_dirs]
|
274
|
+
IronWorker.logger.debug "including extra dirs: " + gem[:include_dirs].inspect
|
275
|
+
gem[:include_dirs].each do |dir|
|
276
|
+
paths_to_use << "#{path}/#{dir}/**/**"
|
277
|
+
end
|
278
|
+
end
|
279
|
+
IronWorker.logger.debug 'paths_to_use: ' + paths_to_use.inspect
|
280
|
+
Dir.glob(paths_to_use).each do |file|
|
281
|
+
# todo: could check if directory and it not lib, skip it
|
282
|
+
IronWorker.logger.debug 'file for gem=' + file.inspect
|
283
|
+
# puts 'gem2=' + gem.inspect
|
284
|
+
zdest = "gems/#{gem[:name]}/#{file.sub(path+'/', '')}"
|
285
|
+
# puts 'gem file=' + file.to_s
|
286
|
+
IronWorker.logger.debug 'zip dest=' + zdest
|
287
|
+
f.add(zdest, file)
|
288
|
+
end
|
289
|
+
else
|
290
|
+
if gem[:auto_merged]
|
291
|
+
# todo: should only continue if the gem was auto merged.
|
292
|
+
IronWorker.logger.warn "Gem #{gem[:name]} #{gem[:version]} was not found, continuing anyways."
|
293
|
+
else
|
294
|
+
raise "Gem #{gem[:name]} #{gem[:version]} was not found. This will occour when gem_name.gemspec is not the same as the gems primary require."
|
295
|
+
end
|
296
|
+
|
297
|
+
end
|
298
|
+
end
|
299
|
+
end
|
300
|
+
if merged_folders && merged_folders.size > 0
|
301
|
+
merged_folders.each do |folder, files|
|
302
|
+
IronWorker.logger.debug "Collecting folder #{folder}"
|
303
|
+
if files and files.size>0
|
304
|
+
files.each do |file|
|
305
|
+
zdest = "#{Digest::MD5.hexdigest(folder)}/#{file.sub(':', '_').sub('/', '_')}"
|
306
|
+
IronWorker.logger.debug 'put file to=' + zdest
|
307
|
+
f.add(zdest, file)
|
308
|
+
end
|
309
|
+
end
|
310
|
+
end
|
311
|
+
end
|
312
|
+
|
313
|
+
IronWorker.logger.debug "merge=" + merge.inspect
|
314
|
+
merge.each_pair do |k, v|
|
315
|
+
IronWorker.logger.debug "merging k=#{k.inspect} v=#{v.inspect} into #{filename}"
|
316
|
+
f.add(File.basename(v[:path]), v[:path])
|
317
|
+
end
|
318
|
+
if merged_mailers && merged_mailers.size > 0
|
319
|
+
# puts " MERGED MAILERS" + merged_mailers.inspect
|
320
|
+
merged_mailers.each_pair do |k, mailer|
|
321
|
+
IronWorker.logger.debug "Collecting mailer #{mailer[:name]}"
|
322
|
+
f.add(File.basename(mailer[:filename]), mailer[:filename])
|
323
|
+
path = mailer[:path_to_templates]
|
324
|
+
Dir["#{path}/**/**"].each do |file|
|
325
|
+
zdest = "templates/#{mailer[:name]}/#{file.sub(path+'/', '')}"
|
326
|
+
f.add(zdest, file)
|
327
|
+
end
|
328
|
+
end
|
329
|
+
end
|
330
|
+
end
|
331
|
+
fname2
|
332
|
+
end
|
333
|
+
|
334
|
+
# This will package up files into a zip file ready for uploading.
|
335
|
+
def package_code(files)
|
336
|
+
fname2 = "package.zip"
|
337
|
+
File.delete(fname2) if File.exist?(fname2)
|
338
|
+
Zip::ZipFile.open(fname2, 'w') do |f|
|
339
|
+
files.each do |file|
|
340
|
+
f.add(file, file)
|
341
|
+
end
|
342
|
+
end
|
343
|
+
fname2
|
344
|
+
end
|
345
|
+
|
346
|
+
# options:
|
347
|
+
# :runtime => 'ruby', 'python', 'node', 'java', 'go'
|
348
|
+
def upload_code(name, package_file, exec_file, options={})
|
349
|
+
IronWorker.logger.info 'file size to upload: ' + File.size(package_file).to_s
|
350
|
+
options = {
|
351
|
+
"name"=>name,
|
352
|
+
"standalone"=>true,
|
353
|
+
"runtime"=>options[:runtime] || "ruby",
|
354
|
+
"file_name"=> exec_file # File.basename(filename)
|
355
|
+
}
|
356
|
+
#puts 'options for upload=' + options.inspect
|
357
|
+
IronWorker.logger.info "Uploading now..."
|
358
|
+
ret = post_file("#{project_url_prefix(get_project_id(options))}codes", File.new(package_file), options)
|
359
|
+
IronWorker.logger.info "Done uploading."
|
360
|
+
return ret
|
361
|
+
end
|
362
|
+
|
363
|
+
def project_url_prefix(project_id = 0)
|
364
|
+
# IronWorker.logger.info "project_url_prefix, project_id = " + project_id.inspect
|
365
|
+
if project_id == 0
|
366
|
+
return false
|
367
|
+
project_id = config.project_id
|
368
|
+
end
|
369
|
+
"projects/#{project_id}/"
|
370
|
+
end
|
371
|
+
|
372
|
+
def wait_until_complete(task_id, options={})
|
373
|
+
tries = 0
|
374
|
+
status = nil
|
375
|
+
sleep 1
|
376
|
+
while tries < 100
|
377
|
+
status = status(task_id)
|
378
|
+
puts "Waiting... status=" + status["status"]
|
379
|
+
if status["status"] != "queued" && status["status"] != "running"
|
380
|
+
break
|
381
|
+
end
|
382
|
+
sleep 2
|
383
|
+
end
|
384
|
+
status
|
385
|
+
end
|
386
|
+
|
387
|
+
def add_sw_params(hash_to_send)
|
388
|
+
# todo: remove secret key?? Can use worker service from within a worker without it now
|
389
|
+
hash_to_send["oauth"] = self.token
|
390
|
+
hash_to_send["api_version"] = IronWorker.api_version
|
391
|
+
end
|
392
|
+
|
393
|
+
def check_config
|
394
|
+
if self.config.nil? || self.config.token.nil? || self.config.project_id.nil?
|
395
|
+
raise "Invalid IronWorker configuration, token and project_id required."
|
396
|
+
end
|
397
|
+
end
|
398
|
+
|
399
|
+
def enqueue(name, data={}, options={})
|
400
|
+
queue(name, data, options)
|
401
|
+
end
|
402
|
+
|
403
|
+
# name: The name of previously upload worker code, eg: MySuperWorker
|
404
|
+
# data: Arbitrary hash of your own data that your task will need to run.
|
405
|
+
def queue(name, data={}, options={})
|
406
|
+
puts "Queuing #{name}..."
|
407
|
+
check_config
|
408
|
+
if !data.is_a?(Array)
|
409
|
+
data = [data]
|
410
|
+
end
|
411
|
+
# Now we need to add class_name to the payload
|
412
|
+
tasks = []
|
413
|
+
data.each do |d|
|
414
|
+
d['class_name'] = name unless d['class_name']
|
415
|
+
task = {}
|
416
|
+
task["payload"] = d.to_json
|
417
|
+
task["code_name"] = name
|
418
|
+
task["priority"] = options[:priority] if options[:priority]
|
419
|
+
task["timeout"] = options[:timeout] if options[:timeout]
|
420
|
+
tasks << task
|
421
|
+
end
|
422
|
+
name = options[:name] || name
|
423
|
+
hash_to_send = {}
|
424
|
+
hash_to_send["options"] = options
|
425
|
+
hash_to_send["tasks"] = tasks
|
426
|
+
add_sw_params(hash_to_send)
|
427
|
+
if defined?(RAILS_ENV)
|
428
|
+
# todo: REMOVE THIS
|
429
|
+
hash_to_send["rails_env"] = RAILS_ENV
|
430
|
+
end
|
431
|
+
return queue_raw(name, hash_to_send, options)
|
432
|
+
end
|
433
|
+
|
434
|
+
def queue_raw(name, data={}, options={})
|
435
|
+
params = nil
|
436
|
+
hash_to_send = data
|
437
|
+
#hash_to_send["class_name"] = name unless hash_to_send["class_name"]
|
438
|
+
hash_to_send["name"] = name unless hash_to_send["name"]
|
439
|
+
uri = project_url_prefix(get_project_id(options)) + "tasks"
|
440
|
+
IronWorker.logger.debug 'queue_raw , uri = ' + uri
|
441
|
+
ret = post(uri, hash_to_send)
|
442
|
+
ret
|
443
|
+
end
|
444
|
+
|
445
|
+
#
|
446
|
+
# schedule: hash of scheduling options that can include:
|
447
|
+
# Required:
|
448
|
+
# - start_at: Time of first run - DateTime or Time object.
|
449
|
+
# Optional:
|
450
|
+
# - run_every: Time in seconds between runs. If ommitted, task will only run once.
|
451
|
+
# - delay_type: Fixed Rate or Fixed Delay. Default is fixed_delay.
|
452
|
+
# - end_at: Scheduled task will stop running after this date (optional, if ommitted, runs forever or until cancelled)
|
453
|
+
# - run_times: Task will run exactly :run_times. For instance if :run_times is 5, then the task will run 5 times.
|
454
|
+
#
|
455
|
+
def schedule(name, data, schedule)
|
456
|
+
puts "Scheduling #{name}..."
|
457
|
+
raise "Schedule must be a hash." if !schedule.is_a? Hash
|
458
|
+
hash_to_send = {}
|
459
|
+
schedules = []
|
460
|
+
schedule["payload"] = data.to_json
|
461
|
+
schedule["name"] = name unless schedule["name"]
|
462
|
+
schedule["code_name"] = name unless schedule["code_name"]
|
463
|
+
schedules << schedule
|
464
|
+
hash_to_send["schedules"] = schedules
|
465
|
+
add_sw_params(hash_to_send)
|
466
|
+
# puts ' about to send ' + hash_to_send.inspect
|
467
|
+
uri = project_url_prefix(get_project_id(data)) + "schedules"
|
468
|
+
ret = post(uri, hash_to_send)
|
469
|
+
ret
|
470
|
+
end
|
471
|
+
|
472
|
+
def cancel_schedule(scheduled_task_id, options={})
|
473
|
+
raise "Must include a schedule id." if scheduled_task_id.blank?
|
474
|
+
hash_to_send = {}
|
475
|
+
hash_to_send["schedule_id"] = scheduled_task_id
|
476
|
+
uri = "#{project_url_prefix(get_project_id(options))}schedules/#{scheduled_task_id}"
|
477
|
+
delete(uri, hash_to_send)
|
478
|
+
end
|
479
|
+
|
480
|
+
def get_projects()
|
481
|
+
hash_to_send = {}
|
482
|
+
ret = get("projects", hash_to_send)
|
483
|
+
ret
|
484
|
+
end
|
485
|
+
|
486
|
+
def get_project_id(options={})
|
487
|
+
options[:project_id] || config.project_id
|
488
|
+
end
|
489
|
+
|
490
|
+
def get_project(options={})
|
491
|
+
hash_to_send = {}
|
492
|
+
|
493
|
+
ret = get("projects/"+ get_project_id(options) +"/", hash_to_send)
|
494
|
+
#uri = project_url_prefix(id)
|
495
|
+
#puts "get_project, uri = " + uri
|
496
|
+
#ret = get(uri, hash_to_send)
|
497
|
+
ret
|
498
|
+
end
|
499
|
+
|
500
|
+
def get_codes(options={})
|
501
|
+
hash_to_send = {}
|
502
|
+
uri = "projects/" + get_project_id(options) + "/codes/"
|
503
|
+
ret = get(uri, hash_to_send)
|
504
|
+
ret
|
505
|
+
end
|
506
|
+
|
507
|
+
def get_schedules(options={})
|
508
|
+
hash_to_send = {}
|
509
|
+
uri = "projects/" + get_project_id(options) + "/schedules/"
|
510
|
+
ret = get(uri, hash_to_send)
|
511
|
+
ret
|
512
|
+
end
|
513
|
+
|
514
|
+
def get_jobs(options={})
|
515
|
+
hash_to_send = {}
|
516
|
+
uri = "projects/" + get_project_id(options) + "/tasks/"
|
517
|
+
ret = get(uri, hash_to_send)
|
518
|
+
ret
|
519
|
+
end
|
520
|
+
|
521
|
+
def get_log(job_id, options={})
|
522
|
+
log(job_id, options)
|
523
|
+
end
|
524
|
+
|
525
|
+
def log(task_id, options={})
|
526
|
+
data = {}
|
527
|
+
ret = get("#{project_url_prefix(get_project_id(options))}tasks/#{task_id}/log", data, :parse=>false)
|
528
|
+
ret
|
529
|
+
end
|
530
|
+
|
531
|
+
|
532
|
+
def status(task_id, options={})
|
533
|
+
data = {"task_id"=>task_id}
|
534
|
+
ret = get("#{project_url_prefix(get_project_id(options))}tasks/#{task_id}", data)
|
535
|
+
ret
|
536
|
+
end
|
537
|
+
|
538
|
+
def schedule_status(schedule_id, options={})
|
539
|
+
data = {"schedule_id"=>schedule_id}
|
540
|
+
ret = get("#{project_url_prefix(get_project_id(options))}schedules/#{schedule_id}", data)
|
541
|
+
ret
|
542
|
+
end
|
543
|
+
|
544
|
+
# data is a hash, should include 'percent' and 'msg'
|
545
|
+
def set_progress(task_id, options={})
|
546
|
+
#data={"data"=>data, "task_id"=>task_id}
|
547
|
+
post("#{project_url_prefix(get_project_id(options))}tasks/#{task_id}/progress", options)
|
548
|
+
end
|
549
|
+
|
550
|
+
|
551
|
+
end
|
552
|
+
|
553
|
+
end
|