fanforce-worker 0.16.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +6 -0
- data/Rakefile +40 -8
- data/fanforce-worker.gemspec +1 -0
- data/lib/fanforce/worker.rb +3 -0
- data/lib/fanforce/worker/errors.rb +189 -0
- data/lib/fanforce/worker/runner.rb +111 -0
- data/lib/fanforce/worker/utils.rb +10 -0
- data/lib/fanforce/worker/version.rb +1 -1
- data/lib/fanforce/worker/worker.rb +23 -226
- data/test/controllers/enqueue_test.rb +27 -0
- data/test/controllers/error_test.rb +33 -0
- data/test/test_helper.rb +23 -0
- metadata +26 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d36e19ed57fc30867e1b0f89a4b79c364eb53585
|
4
|
+
data.tar.gz: 7ead02630173e5c0838edec00eda291796c54745
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 53aa7c5275e2a824637d3c4f5d65ae7312cd92d356145153c076c2242bda2dc1043e47e05a455e4649169a6c00bfd7fec598638c032b1b2cf9f9277867e29ff0
|
7
|
+
data.tar.gz: 67c2b488289bbf1e5eb400bd5721c2f33e3473b9ea87aaf1e846ae558fddc975c392613f373aafc16a9612f6c0dfe8e7f8240dc9d737b218ab0fa8bd1ba21b9c
|
data/Gemfile
CHANGED
data/Rakefile
CHANGED
@@ -1,16 +1,48 @@
|
|
1
|
-
|
2
|
-
require '
|
1
|
+
#!/usr/bin/env rake
|
2
|
+
require 'rubygems'
|
3
|
+
require 'bundler/setup'
|
3
4
|
require 'fileutils'
|
4
|
-
|
5
|
+
require 'logger'
|
6
|
+
require 'rake/testtask'
|
7
|
+
require 'active_support/all'
|
8
|
+
require 'bundler/gem_tasks'
|
9
|
+
|
10
|
+
#########################################################################################################
|
5
11
|
|
6
|
-
#
|
7
|
-
|
12
|
+
# Load ENV vars
|
13
|
+
if FileTest.exist?('.powenv')
|
14
|
+
File.open('.powenv', 'rb') do |powenv|
|
15
|
+
contents = powenv.read
|
8
16
|
|
9
|
-
|
17
|
+
lines = contents.gsub('export ', '').split(/\n\r?/).reject{|line| line.blank?}
|
18
|
+
lines.each do |line|
|
19
|
+
keyValue = line.split('=', 2)
|
20
|
+
next unless keyValue.count == 2
|
21
|
+
ENV[keyValue.first] = keyValue.last.gsub("'",'').gsub('"','')
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end if !ENV['RACK_ENV'] or ENV['RACK_ENV'] == 'development'
|
10
25
|
|
11
26
|
Rake::TestTask.new do |t|
|
12
|
-
t.libs.push
|
13
|
-
t.libs.push
|
27
|
+
t.libs.push 'lib'
|
28
|
+
t.libs.push 'test'
|
14
29
|
t.pattern = 'test/**/*_test.rb'
|
15
30
|
t.verbose = false
|
16
31
|
end
|
32
|
+
|
33
|
+
#########################################################################################################
|
34
|
+
|
35
|
+
Rake::TaskManager.record_task_metadata = true
|
36
|
+
|
37
|
+
task :default do
|
38
|
+
puts "\nAVAILABLE TASKS:"
|
39
|
+
Rake.application.options.show_tasks = :tasks
|
40
|
+
Rake.application.options.full_description = false
|
41
|
+
Rake.application.options.show_task_pattern = //
|
42
|
+
Rake.application.display_tasks_and_comments
|
43
|
+
puts "\n"
|
44
|
+
end
|
45
|
+
|
46
|
+
task :environment do
|
47
|
+
|
48
|
+
end
|
data/fanforce-worker.gemspec
CHANGED
@@ -20,6 +20,7 @@ Gem::Specification.new do |gem|
|
|
20
20
|
gem.add_runtime_dependency 'iron_cache', '~> 1.4.2'
|
21
21
|
gem.add_runtime_dependency 'uuidtools', '~> 2.1.4'
|
22
22
|
gem.add_runtime_dependency 'activesupport', '3.2.13'
|
23
|
+
gem.add_runtime_dependency 'redis', '~> 3.0.7'
|
23
24
|
|
24
25
|
gem.add_runtime_dependency 'fanforce-api', '~> 0.18'
|
25
26
|
end
|
data/lib/fanforce/worker.rb
CHANGED
@@ -0,0 +1,189 @@
|
|
1
|
+
require 'redis'
|
2
|
+
require 'uuidtools'
|
3
|
+
|
4
|
+
class Fanforce::Worker::Errors
|
5
|
+
include Fanforce::Worker::Utils
|
6
|
+
|
7
|
+
@@redis = nil
|
8
|
+
|
9
|
+
def self.redis
|
10
|
+
@@redis ||= Redis.new(url: Fanforce::Worker.redis_url)
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.find(queue_id, error_id)
|
14
|
+
Fanforce::Worker::Error.new(queue_id, error_id)
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.list(queue_id, from_num=0, to_num=100)
|
18
|
+
error_ids = redis.lrange("ERRORS:#{queue_id}", from_num, to_num)
|
19
|
+
Fanforce::Worker::ErrorList.new(queue_id, error_ids)
|
20
|
+
end
|
21
|
+
|
22
|
+
def self.list_summaries(queue_id, from_num=0, to_num=100)
|
23
|
+
error_ids = redis.lrange("ERRORS:#{queue_id}", from_num, to_num)
|
24
|
+
Fanforce::Worker::ErrorList.new(queue_id, error_ids).summaries
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.delete_all(queue_id, error_ids)
|
28
|
+
Fanforce::Worker::ErrorList.new(queue_id, error_ids).delete
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.retry_all(queue_id, error_ids)
|
32
|
+
Fanforce::Worker::ErrorList.new(queue_id, error_ids).retry
|
33
|
+
end
|
34
|
+
|
35
|
+
def self.add(queue_id, e, job_data, worker_env)
|
36
|
+
error_id = UUIDTools::UUID.random_create.to_s
|
37
|
+
error = {
|
38
|
+
error_id: error_id,
|
39
|
+
exception: e.class.name,
|
40
|
+
http_code: (e.code if e.respond_to?(:code)),
|
41
|
+
message: e.message,
|
42
|
+
backtrace: e.backtrace.to_json,
|
43
|
+
errored_at: Time.now,
|
44
|
+
queue_id: queue_id,
|
45
|
+
raw_json: job_data[:params].to_json,
|
46
|
+
env_vars: worker_env.to_json,
|
47
|
+
retries: job_data[:retries],
|
48
|
+
curl_command: (e.curl_command if e.respond_to?(:curl_command))
|
49
|
+
}
|
50
|
+
redis.rpush("ERRORS:#{queue_id}", error_id)
|
51
|
+
redis.hmset("ERRORS:#{queue_id}:#{error_id}", error.flatten)
|
52
|
+
rescue => e
|
53
|
+
log.fatal '-----------------------------------------------------'
|
54
|
+
log.fatal 'WORKER ERROR WHILE RECOVERING FROM JOB ERROR:'
|
55
|
+
log.fatal e.message
|
56
|
+
log.fatal e.backtrace
|
57
|
+
log.fatal '-----------------------------------------------------'
|
58
|
+
log.fatal 'JOB ERROR:'
|
59
|
+
error.each {|k,v| log.fatal "#{k}: #{v}" }
|
60
|
+
end
|
61
|
+
|
62
|
+
end
|
63
|
+
|
64
|
+
###################################################################################################################
|
65
|
+
|
66
|
+
class Fanforce::Worker::Error
|
67
|
+
|
68
|
+
def initialize(queue_id, error_id)
|
69
|
+
@queue_id = queue_id
|
70
|
+
@error_id = error_id
|
71
|
+
end
|
72
|
+
|
73
|
+
def delete
|
74
|
+
self.class.delete(@queue_id, @error_id)
|
75
|
+
return nil
|
76
|
+
end
|
77
|
+
|
78
|
+
def details
|
79
|
+
v = self.class.get_details(@queue_id, @error_id)
|
80
|
+
self.class.format_details(v)
|
81
|
+
end
|
82
|
+
|
83
|
+
def summary
|
84
|
+
v = self.class.get_summary(@queue_id, @error_id)
|
85
|
+
self.class.format_summary(v)
|
86
|
+
end
|
87
|
+
|
88
|
+
def retry
|
89
|
+
v = Error.get_all(@queue_id, @error_id)
|
90
|
+
Error.retry(@queue_id, v)
|
91
|
+
end
|
92
|
+
|
93
|
+
###################################################################################################################
|
94
|
+
|
95
|
+
SUMMARY_FIELDS = [:error_id, :exception, :message, :errored_at, :raw_json, :retries]
|
96
|
+
DETAIL_FIELDS = [:error_id, :exception, :http_code, :message, :backtrace, :errored_at, :raw_json, :env_vars, :retries, :curl_command]
|
97
|
+
|
98
|
+
def self.redis; Fanforce::Worker::Errors.redis end
|
99
|
+
|
100
|
+
def self.delete(queue_id, error_id)
|
101
|
+
redis.srem("ERRORS:#{queue_id}", error_id)
|
102
|
+
redis.del("ERRORS:#{queue_id}:#{error_id}")
|
103
|
+
end
|
104
|
+
|
105
|
+
def self.get_summary(queue_id, error_id)
|
106
|
+
redis.hmget("ERRORS:#{queue_id}:#{error_id}", *SUMMARY_FIELDS)
|
107
|
+
end
|
108
|
+
|
109
|
+
def self.format_summary(summary)
|
110
|
+
format(Hash[SUMMARY_FIELDS.zip(summary)])
|
111
|
+
end
|
112
|
+
|
113
|
+
def self.format(error)
|
114
|
+
error.each do |k,v|
|
115
|
+
error[k] = case k
|
116
|
+
when :backtrace then MultiJson.load(v) rescue []
|
117
|
+
when :env_vars then MultiJson.load(v) rescue {}
|
118
|
+
when :retries then v.to_i
|
119
|
+
else v
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
def self.get_details(queue_id, error_id)
|
125
|
+
redis.hmget("ERRORS:#{queue_id}:#{error_id}", *DETAIL_FIELDS)
|
126
|
+
end
|
127
|
+
|
128
|
+
def self.format_details(details)
|
129
|
+
format(Hash[DETAIL_FIELDS.zip(details)])
|
130
|
+
end
|
131
|
+
|
132
|
+
def self.get_all(queue_id, error_id)
|
133
|
+
redis.hgetall("ERRORS:#{queue_id}:#{error_id}")
|
134
|
+
end
|
135
|
+
|
136
|
+
def self.retry(queue_id, raw_error)
|
137
|
+
error = format(raw_error.symbolize_keys)
|
138
|
+
params = MultiJson.load(v, symbolize_keys: true) rescue {}
|
139
|
+
Fanforce::Worker.enqueue(queue_id, params, :retries => error[:retries] + 1)
|
140
|
+
delete(:queue_id, error[:_id])
|
141
|
+
end
|
142
|
+
|
143
|
+
end
|
144
|
+
|
145
|
+
###################################################################################################################
|
146
|
+
|
147
|
+
class Fanforce::Worker::ErrorList
|
148
|
+
Error = Fanforce::Worker::Error
|
149
|
+
|
150
|
+
def initialize(queue_id, error_ids)
|
151
|
+
@queue_id = queue_id
|
152
|
+
@error_ids = error_ids
|
153
|
+
end
|
154
|
+
|
155
|
+
def redis; Fanforce::Worker::Errors.redis end
|
156
|
+
|
157
|
+
def summaries
|
158
|
+
redis_responses = []
|
159
|
+
redis.multi do
|
160
|
+
@error_ids.each do |error_id|
|
161
|
+
redis_responses << Error.get_summary(@queue_id, error_id)
|
162
|
+
end
|
163
|
+
end
|
164
|
+
redis_responses.map do |redis_response|
|
165
|
+
Error.format_summary(redis_response.value)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def retry
|
170
|
+
redis_responses = []
|
171
|
+
redis.multi do
|
172
|
+
@error_ids.each do |error_id|
|
173
|
+
redis_responses << Error.get_all(@queue_id, error_id)
|
174
|
+
end
|
175
|
+
end
|
176
|
+
redis_responses.map do |redis_response|
|
177
|
+
Error.retry(@queue_id, redis_response.value)
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
def delete
|
182
|
+
redis.multi do
|
183
|
+
@error_ids.each do |error_id|
|
184
|
+
Error.delete(@queue_id, error_id)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
return nil
|
188
|
+
end
|
189
|
+
end
|
@@ -0,0 +1,111 @@
|
|
1
|
+
require 'iron_mq'
|
2
|
+
require 'fanforce/api'
|
3
|
+
require 'timeout'
|
4
|
+
|
5
|
+
class Fanforce::Worker::Runner
|
6
|
+
include Fanforce::Worker::Utils
|
7
|
+
|
8
|
+
MAX_EXECUTION_TIME = 3300
|
9
|
+
class Timeout < RuntimeError; end
|
10
|
+
|
11
|
+
def initialize(worker_data, min_execution_time=300, &code_block)
|
12
|
+
raise "min_execution_time was set to #{min_execution_time}, which is #{min_execution_time - MAX_EXECUTION_TIME} seconds too long" if min_execution_time > MAX_EXECUTION_TIME
|
13
|
+
log.debug 'LOADING WORKER ENV'
|
14
|
+
|
15
|
+
@worker_started_at = Time.now
|
16
|
+
|
17
|
+
@queue_id = worker_data['queue_id'] || (raise 'worker_data must contain queue_id')
|
18
|
+
@worker_env = worker_data['env_vars'] || {}
|
19
|
+
|
20
|
+
@min_execution_time = min_execution_time
|
21
|
+
@code_block = code_block
|
22
|
+
|
23
|
+
load_env
|
24
|
+
load_jobs
|
25
|
+
end
|
26
|
+
|
27
|
+
def load_jobs
|
28
|
+
log.debug 'PROCESSING JOBS...'
|
29
|
+
log.debug '------------------------------------------------------------------------------------'
|
30
|
+
job_num = 0
|
31
|
+
job_data = nil
|
32
|
+
while job_has_enough_time_to_run and (job = Fanforce::Worker.iron_mq.queue(@queue_id).get(timeout: 3600)) do
|
33
|
+
log.debug "- JOB #{job_num+=1}: #{job.body}"
|
34
|
+
timeout(worker_time_remaining, Timeout) do
|
35
|
+
job_data = nil
|
36
|
+
job_data = Fanforce.decode_json(job.body)
|
37
|
+
run_job(job, job_data, &@code_block)
|
38
|
+
end
|
39
|
+
delete_job
|
40
|
+
log.debug '------------------------------------------------------------------------------------'
|
41
|
+
end
|
42
|
+
delete_job
|
43
|
+
log.debug 'WINDING DOWN WORKER!'
|
44
|
+
|
45
|
+
rescue Exception => e
|
46
|
+
handle_job_loading_error(e, job, job_data)
|
47
|
+
end
|
48
|
+
|
49
|
+
def load_env
|
50
|
+
if File.exists?('.developmentenv.rb')
|
51
|
+
require '.developmentenv'
|
52
|
+
elsif File.exists?('.stagingenv.rb')
|
53
|
+
require '.stagingenv'
|
54
|
+
elsif File.exists?('.productionenv.rb')
|
55
|
+
require '.productionenv'
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def run_job(job, job_data, &code_block)
|
60
|
+
@current_job = job
|
61
|
+
@current_params = job_data[:params]
|
62
|
+
@current_retries = job_data[:retries]
|
63
|
+
|
64
|
+
set_env_vars(@worker_env)
|
65
|
+
code_block.call(job_data[:params].clone, retries: job_data[:retries], queue_id: @queue_id)
|
66
|
+
delete_job(job)
|
67
|
+
|
68
|
+
rescue Exception => e
|
69
|
+
handle_job_error(e, job, job_data)
|
70
|
+
end
|
71
|
+
|
72
|
+
def handle_job_loading_error(e, job, job_data)
|
73
|
+
raise($!, "#{$!}: THERE IS NO JOB", $!.backtrace) if job.nil?
|
74
|
+
|
75
|
+
delete_job(job)
|
76
|
+
log.debug 'REMOVED JOB FROM QUEUE, BUT COULD NOT SAVE TO ERROR CACHE...'
|
77
|
+
raise($!, "#{$!}: #{job_data.to_json}", $!.backtrace)
|
78
|
+
end
|
79
|
+
|
80
|
+
def handle_job_error(e, job, job_data)
|
81
|
+
raise($!, "#{$!}: THERE IS NO JOB", $!.backtrace) if job.nil?
|
82
|
+
|
83
|
+
delete_job(job)
|
84
|
+
require_relative 'errors'
|
85
|
+
log.debug 'REMOVED JOB FROM QUEUE, AND SAVING TO ERROR CACHE...'
|
86
|
+
Fanforce::Worker::Errors.add(@queue_id, e, job_data, @worker_env)
|
87
|
+
end
|
88
|
+
|
89
|
+
def worker_time_remaining
|
90
|
+
time_since_load = Time.now - @worker_started_at
|
91
|
+
MAX_EXECUTION_TIME - time_since_load
|
92
|
+
end
|
93
|
+
|
94
|
+
def job_has_enough_time_to_run
|
95
|
+
time_since_load = Time.now - @worker_started_at
|
96
|
+
return false if time_since_load > MAX_EXECUTION_TIME
|
97
|
+
return false if worker_time_remaining < @min_execution_time
|
98
|
+
return true
|
99
|
+
end
|
100
|
+
|
101
|
+
def set_env_vars(vars)
|
102
|
+
vars.each {|k,v| ENV[k.to_s]=v }
|
103
|
+
end
|
104
|
+
|
105
|
+
def delete_job(job=nil)
|
106
|
+
return if job.nil? and @current_job.nil?
|
107
|
+
(job || @current_job).delete
|
108
|
+
@current_job = nil
|
109
|
+
end
|
110
|
+
|
111
|
+
end
|
@@ -1,251 +1,48 @@
|
|
1
|
-
require 'timeout'
|
2
|
-
|
3
1
|
class Fanforce::Worker
|
4
2
|
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
MAX_EXECUTION_TIME = 3300
|
3
|
+
@@iron = {}
|
4
|
+
@@redis = {}
|
5
|
+
@@log = Logger.new($stdout)
|
9
6
|
|
10
|
-
def
|
11
|
-
|
12
|
-
end
|
13
|
-
|
14
|
-
def iron_mq
|
15
|
-
require 'iron_mq'
|
16
|
-
@iron_mq ||= IronMQ::Client.new(:token => @opts[:token] || ENV['IRON_TOKEN'], :project_id => @opts[:project_id] || ENV['IRON_PROJECT_ID'])
|
7
|
+
def self.iron_token
|
8
|
+
@@iron[:token] || ENV['IRON_TOKEN']
|
17
9
|
end
|
18
10
|
|
19
|
-
def
|
20
|
-
|
21
|
-
@iron_cache ||= IronCache::Client.new(:token => @opts[:token] || ENV['IRON_TOKEN'], :project_id => @opts[:project_id] || ENV['IRON_PROJECT_ID'])
|
11
|
+
def self.iron_project_id
|
12
|
+
@@iron[:project_id] || ENV['IRON_PROJECT_ID']
|
22
13
|
end
|
23
14
|
|
24
|
-
def
|
25
|
-
|
26
|
-
|
27
|
-
iron_mq.queue(queue_id).post({params: params, retries: retries}.to_json, options)
|
28
|
-
end
|
29
|
-
|
30
|
-
def add_error(queue_id, error)
|
31
|
-
require 'uuidtools'
|
32
|
-
details_id = UUIDTools::UUID.random_create.to_s
|
33
|
-
iron_cache.cache("#{queue_id}-ERRORS").put(details_id, error.to_json)
|
34
|
-
iron_mq.queue("#{queue_id}-ERRORS").post({
|
35
|
-
details_id: details_id,
|
36
|
-
http_code: error[:http_code],
|
37
|
-
exception: truncate(error[:exception]),
|
38
|
-
message: truncate(error[:message].to_s),
|
39
|
-
params: truncate(error[:params].to_json),
|
40
|
-
errored_at: error[:errored_at],
|
41
|
-
retries: error[:retries],
|
42
|
-
env_vars: truncate(error[:env_vars].to_json),
|
43
|
-
curl_command: truncate(error[:curl_command].to_s)
|
44
|
-
}.to_json)
|
45
|
-
rescue => e
|
46
|
-
puts '-----------------------------------------------------'
|
47
|
-
puts 'WORKER ERROR WHILE RECOVERING FROM JOB ERROR:'
|
48
|
-
puts e.message
|
49
|
-
puts e.backtrace
|
50
|
-
puts '-----------------------------------------------------'
|
51
|
-
puts 'JOB ERROR:'
|
52
|
-
puts "details_id: #{details_id}"
|
53
|
-
puts "http_code: #{error[:http_code]}"
|
54
|
-
puts "exception: #{truncate(error[:exception])}"
|
55
|
-
puts "message: #{truncate(error[:message].to_s)}"
|
56
|
-
puts "params: #{truncate(error[:params].to_json)}"
|
57
|
-
puts "errored_at: #{error[:errored_at]}"
|
58
|
-
puts "retries: #{error[:retries]}"
|
59
|
-
puts "env_vars: #{truncate(error[:env_vars].to_json)}"
|
60
|
-
puts "curl_command: #{truncate(error[:curl_command].to_s)}"
|
61
|
-
end
|
62
|
-
|
63
|
-
def delete_error(queue_id, job_id, details_id)
|
64
|
-
iron_mq.queue("#{queue_id}-ERRORS").delete(job_id)
|
65
|
-
iron_cache.cache("#{queue_id}-ERRORS").delete(details_id)
|
15
|
+
def self.iron_mq
|
16
|
+
require 'iron_mq'
|
17
|
+
@@iron[:mq] ||= IronMQ::Client.new(:token => @@iron[:token], :project_id => @@iron[:project_id])
|
66
18
|
end
|
67
19
|
|
68
|
-
def
|
69
|
-
|
70
|
-
MultiJson.load(cache.value, :symbolize_keys => true)
|
20
|
+
def self.redis_url
|
21
|
+
@@redis[:url] || ENV['REDIS_URL_ERRORLOG']
|
71
22
|
end
|
72
23
|
|
73
|
-
def
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
cache.delete and iron_mq.queue("#{queue_id}-ERRORS").delete(job_id)
|
24
|
+
def self.set_config(obj)
|
25
|
+
@@iron[:token] = obj[:iron_token] if obj[:iron_token]
|
26
|
+
@@iron[:project_id] = obj[:iron_project_id] if obj[:iron_project_id]
|
27
|
+
@@redis[:url] = obj[:redis_url] if obj[:redis_url]
|
79
28
|
end
|
80
29
|
|
81
|
-
def
|
82
|
-
|
83
|
-
l = length - truncate_string.chars.to_a.length
|
84
|
-
chars = text.chars.to_a
|
85
|
-
(chars.length > length ? chars[0...l].join('') + truncate_string : text).to_s
|
86
|
-
end
|
30
|
+
def self.log
|
31
|
+
@@log
|
87
32
|
end
|
88
33
|
|
89
34
|
##########################################################################################
|
90
35
|
|
91
36
|
def self.enqueue(queue_id, params, options={})
|
92
|
-
|
93
|
-
end
|
94
|
-
|
95
|
-
def self.add_error(queue_id, error)
|
96
|
-
self.new.add_error(queue_id, error)
|
97
|
-
end
|
98
|
-
|
99
|
-
##########################################################################################
|
100
|
-
|
101
|
-
def self.current_queue_id=(queue_id)
|
102
|
-
@current_queue_id = queue_id
|
103
|
-
end
|
104
|
-
|
105
|
-
def self.current_queue_id
|
106
|
-
@current_queue_id
|
107
|
-
end
|
108
|
-
|
109
|
-
def self.current_worker_env=(env_vars)
|
110
|
-
@current_worker_env = env_vars
|
111
|
-
end
|
112
|
-
|
113
|
-
def self.current_worker_env
|
114
|
-
@current_worker_env
|
115
|
-
end
|
116
|
-
|
117
|
-
def self.current_params=(params)
|
118
|
-
@current_params = params
|
119
|
-
end
|
120
|
-
|
121
|
-
def self.current_params
|
122
|
-
@current_params
|
123
|
-
end
|
124
|
-
|
125
|
-
def self.current_retries=(retries)
|
126
|
-
@current_retries = retries
|
127
|
-
end
|
128
|
-
|
129
|
-
def self.current_retries
|
130
|
-
@current_retries
|
131
|
-
end
|
132
|
-
|
133
|
-
def self.current_job=(job)
|
134
|
-
@current_job = job
|
135
|
-
end
|
136
|
-
|
137
|
-
def self.current_job
|
138
|
-
@current_job
|
139
|
-
end
|
37
|
+
raise 'Params being sent to the queue must be a Hash' if !params.is_a?(Hash)
|
140
38
|
|
141
|
-
|
142
|
-
|
143
|
-
require '.developmentenv'
|
144
|
-
elsif File.exists?('.stagingenv.rb')
|
145
|
-
require '.stagingenv'
|
146
|
-
elsif File.exists?('.productionenv.rb')
|
147
|
-
require '.productionenv'
|
148
|
-
end
|
39
|
+
retries = (options[:retries].present?) ? options.delete(:retries) : 0
|
40
|
+
iron_mq.queue(queue_id).post({params: params, retries: retries}.to_json, options)
|
149
41
|
end
|
150
42
|
|
151
43
|
def self.run(worker_data, min_execution_time=300, &code_block)
|
152
|
-
|
153
|
-
|
154
|
-
puts 'LOADING WORKER ENV...'
|
155
|
-
load_env
|
156
|
-
require 'iron_mq'
|
157
|
-
require 'iron_cache'
|
158
|
-
require 'fanforce/api'
|
159
|
-
require 'active_support/all'
|
160
|
-
|
161
|
-
self.current_queue_id = worker_data['queue_id']
|
162
|
-
self.current_worker_env = worker_data['env_vars']
|
163
|
-
queue = IronMQ::Client.new.queue(current_queue_id)
|
164
|
-
|
165
|
-
puts 'PROCESSING JOBS...'
|
166
|
-
puts '------------------------------------------------------------------------------------'
|
167
|
-
job_num = 0
|
168
|
-
job_data = nil
|
169
|
-
while job_has_enough_time_to_run(min_execution_time) and (job = queue.get(timeout: 3600)) do
|
170
|
-
puts "- JOB #{job_num+=1}: #{job.body}"
|
171
|
-
timeout(worker_time_remaining, Fanforce::Worker::Timeout) do
|
172
|
-
job_data = nil
|
173
|
-
job_data = Fanforce.decode_json(job.body)
|
174
|
-
run_job(job, job_data, &code_block)
|
175
|
-
end
|
176
|
-
self.delete_job
|
177
|
-
puts '------------------------------------------------------------------------------------'
|
178
|
-
end
|
179
|
-
self.delete_job
|
180
|
-
puts 'WINDING DOWN WORKER!'
|
181
|
-
rescue Exception => e
|
182
|
-
handle_job_loading_error(e, job, job_data)
|
183
|
-
end
|
184
|
-
|
185
|
-
def self.worker_time_remaining
|
186
|
-
time_since_load = Time.now - LOADED_AT
|
187
|
-
MAX_EXECUTION_TIME - time_since_load
|
188
|
-
end
|
189
|
-
|
190
|
-
def self.job_has_enough_time_to_run(min_execution_time)
|
191
|
-
time_since_load = Time.now - LOADED_AT
|
192
|
-
return false if time_since_load > MAX_EXECUTION_TIME
|
193
|
-
return false if worker_time_remaining < min_execution_time
|
194
|
-
return true
|
195
|
-
end
|
196
|
-
|
197
|
-
def self.retry(options)
|
198
|
-
self.new.enqueue(current_queue_id, current_params, options.merge(retries: current_retries + 1))
|
199
|
-
end
|
200
|
-
|
201
|
-
def self.run_job(job, job_data, &code_block)
|
202
|
-
self.current_job = job
|
203
|
-
self.current_params = job_data[:params]
|
204
|
-
self.current_retries = job_data[:retries]
|
205
|
-
|
206
|
-
set_env_vars(current_worker_env)
|
207
|
-
code_block.call(job_data[:params].clone, retries: job_data[:retries], queue_id: current_queue_id)
|
208
|
-
self.delete_job(job)
|
209
|
-
|
210
|
-
rescue Exception => e
|
211
|
-
handle_job_error(e, job, job_data)
|
212
|
-
end
|
213
|
-
|
214
|
-
def self.handle_job_loading_error(e, job, job_data)
|
215
|
-
raise($!, "#{$!}: THERE IS NO JOB", $!.backtrace) if job.nil?
|
216
|
-
|
217
|
-
self.delete_job(job)
|
218
|
-
puts 'REMOVED JOB FROM QUEUE, BUT COULD NOT SAVE TO ERROR CACHE...'
|
219
|
-
raise($!, "#{$!}: #{job_data.to_json}", $!.backtrace)
|
220
|
-
end
|
221
|
-
|
222
|
-
def self.handle_job_error(e, job, job_data)
|
223
|
-
raise($!, "#{$!}: THERE IS NO JOB", $!.backtrace) if job.nil?
|
224
|
-
|
225
|
-
error = job_data.merge(
|
226
|
-
http_code: (e.code if e.respond_to?(:code)),
|
227
|
-
exception: e.class.name,
|
228
|
-
message: e.message,
|
229
|
-
backtrace: e.backtrace,
|
230
|
-
errored_at: Time.now,
|
231
|
-
env_vars: current_worker_env
|
232
|
-
)
|
233
|
-
error[:curl_command] = e.curl_command if e.respond_to?(:curl_command)
|
234
|
-
|
235
|
-
self.delete_job(job)
|
236
|
-
puts 'REMOVED JOB FROM QUEUE, AND SAVING TO ERROR CACHE...'
|
237
|
-
puts error.to_json
|
238
|
-
self.add_error current_queue_id, error
|
239
|
-
end
|
240
|
-
|
241
|
-
def self.delete_job(job=nil)
|
242
|
-
return if job.nil? and current_job.nil?
|
243
|
-
(job || current_job).delete
|
244
|
-
self.current_job = nil
|
245
|
-
end
|
246
|
-
|
247
|
-
def self.set_env_vars(vars)
|
248
|
-
vars.each {|k,v| ENV[k.to_s]=v }
|
44
|
+
require_relative 'runner'
|
45
|
+
Runner.new(worker_data, min_execution_time, &code_block)
|
249
46
|
end
|
250
47
|
|
251
48
|
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'test_helper'
|
2
|
+
|
3
|
+
describe Fanforce::Worker do
|
4
|
+
|
5
|
+
before do
|
6
|
+
clean_dbs
|
7
|
+
end
|
8
|
+
|
9
|
+
it 'should enqueue a job' do
|
10
|
+
Fanforce::Worker.enqueue('test', {name: 'caleb'})
|
11
|
+
assert Fanforce::Worker.iron_mq.queue('test').size == 1
|
12
|
+
end
|
13
|
+
|
14
|
+
it 'should run an enqueued job' do
|
15
|
+
Fanforce::Worker.enqueue('test', {name: 'caleb'})
|
16
|
+
processed_job = false
|
17
|
+
|
18
|
+
Fanforce::Worker.run({'queue_id' => 'test'}) do |params|
|
19
|
+
processed_job = true
|
20
|
+
assert params[:name] == 'caleb'
|
21
|
+
end
|
22
|
+
|
23
|
+
assert processed_job == true
|
24
|
+
assert Fanforce::Worker.iron_mq.queue('test').size == 0
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require 'test_helper'
|
2
|
+
require 'fanforce/worker/errors'
|
3
|
+
|
4
|
+
describe Fanforce::Worker::Errors do
|
5
|
+
|
6
|
+
before do
|
7
|
+
clean_dbs
|
8
|
+
end
|
9
|
+
|
10
|
+
it 'should save error to redis' do
|
11
|
+
Fanforce::Worker.enqueue('test', {name: 'caleb'})
|
12
|
+
ran_job = false
|
13
|
+
|
14
|
+
Fanforce::Worker.run({'queue_id' => 'test'}) do |params|
|
15
|
+
ran_job = true
|
16
|
+
raise 'test'
|
17
|
+
end
|
18
|
+
assert ran_job == true
|
19
|
+
assert Fanforce::Worker::Errors.list_summaries('test').size == 1
|
20
|
+
assert Fanforce::Worker.iron_mq.queue('test').size == 0
|
21
|
+
end
|
22
|
+
|
23
|
+
it 'should correctly retry error' do
|
24
|
+
Fanforce::Worker.enqueue('test', {name: 'caleb'})
|
25
|
+
|
26
|
+
Fanforce::Worker.run({'queue_id' => 'test'}) do |params|
|
27
|
+
raise 'test'
|
28
|
+
end
|
29
|
+
assert Fanforce::Worker::Errors.list('test').retry
|
30
|
+
assert Fanforce::Worker.iron_mq.queue('test').size == 1
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
data/test/test_helper.rb
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'minitest/autorun'
|
3
|
+
require 'minitest/spec'
|
4
|
+
require 'rack/test'
|
5
|
+
|
6
|
+
ENV['RACK_ENV'] = 'test'
|
7
|
+
|
8
|
+
require 'fanforce/worker'
|
9
|
+
require 'fanforce/worker/errors'
|
10
|
+
|
11
|
+
# Clean databases before each test case
|
12
|
+
def clean_dbs
|
13
|
+
Fanforce::Worker.set_config(
|
14
|
+
iron_token: 'TF0YNz8na2H_reJ7_EoRnQAeKHM',
|
15
|
+
iron_project_id: '53725dfe0db3ef0005000001',
|
16
|
+
redis_url: 'redis://localhost:6379/5'
|
17
|
+
)
|
18
|
+
Fanforce::Worker.iron_mq.queues.all.each do |queue|
|
19
|
+
queue.delete_queue
|
20
|
+
end
|
21
|
+
Fanforce::Worker::Errors.redis.flushdb
|
22
|
+
Fanforce::Worker.log.level = Logger::FATAL
|
23
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fanforce-worker
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.17.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Caleb Clark
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-05-
|
11
|
+
date: 2014-05-13 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: iron_mq
|
@@ -66,6 +66,20 @@ dependencies:
|
|
66
66
|
- - '='
|
67
67
|
- !ruby/object:Gem::Version
|
68
68
|
version: 3.2.13
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: redis
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ~>
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: 3.0.7
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ~>
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: 3.0.7
|
69
83
|
- !ruby/object:Gem::Dependency
|
70
84
|
name: fanforce-api
|
71
85
|
requirement: !ruby/object:Gem::Requirement
|
@@ -93,8 +107,14 @@ files:
|
|
93
107
|
- Rakefile
|
94
108
|
- fanforce-worker.gemspec
|
95
109
|
- lib/fanforce/worker.rb
|
110
|
+
- lib/fanforce/worker/errors.rb
|
111
|
+
- lib/fanforce/worker/runner.rb
|
112
|
+
- lib/fanforce/worker/utils.rb
|
96
113
|
- lib/fanforce/worker/version.rb
|
97
114
|
- lib/fanforce/worker/worker.rb
|
115
|
+
- test/controllers/enqueue_test.rb
|
116
|
+
- test/controllers/error_test.rb
|
117
|
+
- test/test_helper.rb
|
98
118
|
homepage: http://github.com/fanforce/fanforce-worker
|
99
119
|
licenses: []
|
100
120
|
metadata: {}
|
@@ -118,4 +138,7 @@ rubygems_version: 2.0.3
|
|
118
138
|
signing_key:
|
119
139
|
specification_version: 4
|
120
140
|
summary: Fanforce worker used by various Fanforce addons for background processing.
|
121
|
-
test_files:
|
141
|
+
test_files:
|
142
|
+
- test/controllers/enqueue_test.rb
|
143
|
+
- test/controllers/error_test.rb
|
144
|
+
- test/test_helper.rb
|