qs 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (65) hide show
  1. data/.gitignore +1 -0
  2. data/Gemfile +6 -1
  3. data/LICENSE.txt +1 -1
  4. data/bench/config.qs +46 -0
  5. data/bench/queue.rb +8 -0
  6. data/bench/report.rb +114 -0
  7. data/bench/report.txt +11 -0
  8. data/bin/qs +7 -0
  9. data/lib/qs/cli.rb +124 -0
  10. data/lib/qs/client.rb +121 -0
  11. data/lib/qs/config_file.rb +79 -0
  12. data/lib/qs/daemon.rb +350 -0
  13. data/lib/qs/daemon_data.rb +46 -0
  14. data/lib/qs/error_handler.rb +58 -0
  15. data/lib/qs/job.rb +70 -0
  16. data/lib/qs/job_handler.rb +90 -0
  17. data/lib/qs/logger.rb +23 -0
  18. data/lib/qs/payload_handler.rb +136 -0
  19. data/lib/qs/pid_file.rb +42 -0
  20. data/lib/qs/process.rb +136 -0
  21. data/lib/qs/process_signal.rb +20 -0
  22. data/lib/qs/qs_runner.rb +49 -0
  23. data/lib/qs/queue.rb +69 -0
  24. data/lib/qs/redis_item.rb +33 -0
  25. data/lib/qs/route.rb +52 -0
  26. data/lib/qs/runner.rb +26 -0
  27. data/lib/qs/test_helpers.rb +17 -0
  28. data/lib/qs/test_runner.rb +43 -0
  29. data/lib/qs/version.rb +1 -1
  30. data/lib/qs.rb +92 -2
  31. data/qs.gemspec +7 -2
  32. data/test/helper.rb +8 -1
  33. data/test/support/app_daemon.rb +74 -0
  34. data/test/support/config.qs +7 -0
  35. data/test/support/config_files/empty.qs +0 -0
  36. data/test/support/config_files/invalid.qs +1 -0
  37. data/test/support/config_files/valid.qs +7 -0
  38. data/test/support/config_invalid_run.qs +3 -0
  39. data/test/support/config_no_run.qs +0 -0
  40. data/test/support/factory.rb +14 -0
  41. data/test/support/pid_file_spy.rb +19 -0
  42. data/test/support/runner_spy.rb +17 -0
  43. data/test/system/daemon_tests.rb +226 -0
  44. data/test/unit/cli_tests.rb +188 -0
  45. data/test/unit/client_tests.rb +269 -0
  46. data/test/unit/config_file_tests.rb +59 -0
  47. data/test/unit/daemon_data_tests.rb +96 -0
  48. data/test/unit/daemon_tests.rb +702 -0
  49. data/test/unit/error_handler_tests.rb +163 -0
  50. data/test/unit/job_handler_tests.rb +253 -0
  51. data/test/unit/job_tests.rb +132 -0
  52. data/test/unit/logger_tests.rb +38 -0
  53. data/test/unit/payload_handler_tests.rb +276 -0
  54. data/test/unit/pid_file_tests.rb +70 -0
  55. data/test/unit/process_signal_tests.rb +61 -0
  56. data/test/unit/process_tests.rb +371 -0
  57. data/test/unit/qs_runner_tests.rb +166 -0
  58. data/test/unit/qs_tests.rb +217 -0
  59. data/test/unit/queue_tests.rb +132 -0
  60. data/test/unit/redis_item_tests.rb +49 -0
  61. data/test/unit/route_tests.rb +81 -0
  62. data/test/unit/runner_tests.rb +63 -0
  63. data/test/unit/test_helper_tests.rb +61 -0
  64. data/test/unit/test_runner_tests.rb +128 -0
  65. metadata +180 -15
data/lib/qs/daemon.rb ADDED
@@ -0,0 +1,350 @@
1
+ require 'dat-worker-pool'
2
+ require 'ns-options'
3
+ require 'pathname'
4
+ require 'system_timer'
5
+ require 'thread'
6
+ require 'qs'
7
+ require 'qs/client'
8
+ require 'qs/daemon_data'
9
+ require 'qs/logger'
10
+ require 'qs/payload_handler'
11
+ require 'qs/redis_item'
12
+
13
+ module Qs
14
+
15
+ module Daemon
16
+
17
+ InvalidError = Class.new(ArgumentError)
18
+
19
+ def self.included(klass)
20
+ klass.class_eval do
21
+ extend ClassMethods
22
+ include InstanceMethods
23
+ end
24
+ end
25
+
26
+ module InstanceMethods
27
+
28
+ attr_reader :daemon_data, :logger
29
+ attr_reader :signals_redis_key, :queue_redis_keys
30
+
31
+ # * Set the size of the client to the max workers + 1. This ensures we
32
+ # have 1 connection for fetching work from redis and at least 1
33
+ # connection for each worker to requeue its job when hard-shutdown.
34
+ def initialize
35
+ self.class.configuration.validate!
36
+ Qs.init
37
+ @daemon_data = DaemonData.new(self.class.configuration.to_hash)
38
+ @logger = @daemon_data.logger
39
+
40
+ @client = QsClient.new(Qs.redis_config.merge({
41
+ :timeout => 1,
42
+ :size => self.daemon_data.max_workers + 1
43
+ }))
44
+ @queue_redis_keys = self.daemon_data.queue_redis_keys
45
+
46
+ @work_loop_thread = nil
47
+ @worker_pool = nil
48
+
49
+ @signals_redis_key = "signals:#{@daemon_data.name}-" \
50
+ "#{Socket.gethostname}-#{::Process.pid}"
51
+
52
+ @worker_available_io = IOPipe.new
53
+ @signal = Signal.new(:stop)
54
+ rescue InvalidError => exception
55
+ exception.set_backtrace(caller)
56
+ raise exception
57
+ end
58
+
59
+ def name
60
+ @daemon_data.name
61
+ end
62
+
63
+ def pid_file
64
+ @daemon_data.pid_file
65
+ end
66
+
67
+ def running?
68
+ !!(@work_loop_thread && @work_loop_thread.alive?)
69
+ end
70
+
71
+ def start
72
+ @signal.set :start
73
+ @work_loop_thread ||= Thread.new{ work_loop }
74
+ end
75
+
76
+ def stop(wait = false)
77
+ return unless self.running?
78
+ @signal.set :stop
79
+ wakeup_work_loop_thread
80
+ wait_for_shutdown if wait
81
+ end
82
+
83
+ def halt(wait = false)
84
+ return unless self.running?
85
+ @signal.set :halt
86
+ wakeup_work_loop_thread
87
+ wait_for_shutdown if wait
88
+ end
89
+
90
+ private
91
+
92
+ def process(redis_item)
93
+ Qs::PayloadHandler.new(self.daemon_data, redis_item).run
94
+ end
95
+
96
+ def work_loop
97
+ self.logger.debug "Starting work loop..."
98
+ setup_redis_and_ios
99
+ @worker_pool = build_worker_pool
100
+ process_inputs while @signal.start?
101
+ self.logger.debug "Stopping work loop..."
102
+ shutdown_worker_pool
103
+ rescue StandardError => exception
104
+ self.logger.error "Exception occurred, stopping daemon!"
105
+ self.logger.error "#{exception.class}: #{exception.message}"
106
+ self.logger.error exception.backtrace.join("\n")
107
+ ensure
108
+ @worker_available_io.teardown
109
+ @work_loop_thread = nil
110
+ self.logger.debug "Stopped work loop"
111
+ end
112
+
113
+ def setup_redis_and_ios
114
+ # clear any signals that are already on the signals redis list
115
+ @client.clear(self.signals_redis_key)
116
+ @worker_available_io.setup
117
+ end
118
+
119
+ def build_worker_pool
120
+ wp = DatWorkerPool.new(
121
+ self.daemon_data.min_workers,
122
+ self.daemon_data.max_workers
123
+ ){ |redis_item| process(redis_item) }
124
+ wp.on_worker_error do |worker, exception, redis_item|
125
+ handle_worker_exception(redis_item)
126
+ end
127
+ wp.on_worker_sleep{ @worker_available_io.signal }
128
+ wp.start
129
+ wp
130
+ end
131
+
132
+ # * Shuffle the queue redis keys to avoid queue starvation. Redis will
133
+ # pull jobs off queues in the order they are passed to the command, by
134
+ # shuffling we ensure they are randomly ordered so every queue should
135
+ # get a chance.
136
+ # * Use 0 for the brpop timeout which means block indefinitely.
137
+ def process_inputs
138
+ wait_for_available_worker
139
+ return unless @worker_pool.worker_available? && @signal.start?
140
+
141
+ args = [self.signals_redis_key, self.queue_redis_keys.shuffle, 0].flatten
142
+ redis_key, serialized_payload = @client.block_dequeue(*args)
143
+ if redis_key != @signals_redis_key
144
+ @worker_pool.add_work(RedisItem.new(redis_key, serialized_payload))
145
+ end
146
+ end
147
+
148
+ def wait_for_available_worker
149
+ if !@worker_pool.worker_available? && @signal.start?
150
+ @worker_available_io.wait
151
+ end
152
+ end
153
+
154
+ def shutdown_worker_pool
155
+ self.logger.debug "Shutting down worker pool"
156
+ timeout = @signal.stop? ? self.daemon_data.shutdown_timeout : 0
157
+ @worker_pool.shutdown(timeout)
158
+ @worker_pool.work_items.each do |ri|
159
+ @client.prepend(ri.queue_redis_key, ri.serialized_payload)
160
+ end
161
+ end
162
+
163
+ def wait_for_shutdown
164
+ @work_loop_thread.join if @work_loop_thread
165
+ end
166
+
167
+ def wakeup_work_loop_thread
168
+ @client.append(self.signals_redis_key, '.')
169
+ @worker_available_io.signal
170
+ end
171
+
172
+ # * This only catches errors that happen outside of running the payload
173
+ # handler. The only known use-case for this is dat worker pools
174
+ # hard-shutdown errors.
175
+ # * If there isn't a redis item (this can happen when an idle worker is
176
+ # being forced to exit) then we don't need to do anything.
177
+ # * If we never started processing the redis item, its safe to requeue it.
178
+ # Otherwise it happened while processing so the payload handler caught
179
+ # it or it happened after the payload handler which we don't care about.
180
+ def handle_worker_exception(redis_item)
181
+ return if redis_item.nil?
182
+ if !redis_item.started
183
+ @client.prepend(redis_item.queue_redis_key, redis_item.serialized_payload)
184
+ end
185
+ end
186
+
187
+ end
188
+
189
+ module ClassMethods
190
+
191
+ def configuration
192
+ @configuration ||= Configuration.new
193
+ end
194
+
195
+ def name(*args)
196
+ self.configuration.name(*args)
197
+ end
198
+
199
+ def pid_file(*args)
200
+ self.configuration.pid_file(*args)
201
+ end
202
+
203
+ def min_workers(*args)
204
+ self.configuration.min_workers(*args)
205
+ end
206
+
207
+ def max_workers(*args)
208
+ self.configuration.max_workers(*args)
209
+ end
210
+
211
+ def workers(*args)
212
+ self.min_workers(*args)
213
+ self.max_workers(*args)
214
+ end
215
+
216
+ def verbose_logging(*args)
217
+ self.configuration.verbose_logging(*args)
218
+ end
219
+
220
+ def logger(*args)
221
+ self.configuration.logger(*args)
222
+ end
223
+
224
+ def shutdown_timeout(*args)
225
+ self.configuration.shutdown_timeout(*args)
226
+ end
227
+
228
+ def init(&block)
229
+ self.configuration.init_procs << block
230
+ end
231
+
232
+ def error(&block)
233
+ self.configuration.error_procs << block
234
+ end
235
+
236
+ def queue(queue)
237
+ self.configuration.queues << queue
238
+ end
239
+
240
+ end
241
+
242
+ class Configuration
243
+ include NsOptions::Proxy
244
+
245
+ option :name, String, :required => true
246
+ option :pid_file, Pathname
247
+
248
+ option :min_workers, Integer, :default => 1
249
+ option :max_workers, Integer, :default => 4
250
+
251
+ option :verbose_logging, :default => true
252
+ option :logger, :default => proc{ Qs::NullLogger.new }
253
+
254
+ option :shutdown_timeout
255
+
256
+ attr_accessor :init_procs, :error_procs
257
+ attr_accessor :queues
258
+
259
+ def initialize(values = nil)
260
+ super(values)
261
+ @init_procs, @error_procs = [], []
262
+ @queues = []
263
+ @valid = nil
264
+ end
265
+
266
+ def routes
267
+ @queues.map(&:routes).flatten
268
+ end
269
+
270
+ def to_hash
271
+ super.merge({
272
+ :error_procs => self.error_procs,
273
+ :queue_redis_keys => self.queues.map(&:redis_key),
274
+ :routes => self.routes
275
+ })
276
+ end
277
+
278
+ def valid?
279
+ !!@valid
280
+ end
281
+
282
+ def validate!
283
+ return @valid if !@valid.nil?
284
+ self.init_procs.each(&:call)
285
+ if self.queues.empty? || !self.required_set?
286
+ raise InvalidError, "a name and queue must be configured"
287
+ end
288
+ self.routes.each(&:validate!)
289
+ @valid = true
290
+ end
291
+ end
292
+
293
+ class IOPipe
294
+ NULL = File.open('/dev/null', 'w')
295
+ SIGNAL = '.'.freeze
296
+
297
+ attr_reader :reader, :writer
298
+
299
+ def initialize
300
+ @reader = NULL
301
+ @writer = NULL
302
+ end
303
+
304
+ def wait
305
+ ::IO.select([@reader])
306
+ @reader.read_nonblock(SIGNAL.bytesize)
307
+ end
308
+
309
+ def signal
310
+ @writer.write_nonblock(SIGNAL)
311
+ end
312
+
313
+ def setup
314
+ @reader, @writer = ::IO.pipe
315
+ end
316
+
317
+ def teardown
318
+ @reader.close unless @reader === NULL
319
+ @writer.close unless @writer === NULL
320
+ @reader = NULL
321
+ @writer = NULL
322
+ end
323
+ end
324
+
325
+ class Signal
326
+ def initialize(value)
327
+ @value = value
328
+ @mutex = Mutex.new
329
+ end
330
+
331
+ def set(value)
332
+ @mutex.synchronize{ @value = value }
333
+ end
334
+
335
+ def start?
336
+ @mutex.synchronize{ @value == :start }
337
+ end
338
+
339
+ def stop?
340
+ @mutex.synchronize{ @value == :stop }
341
+ end
342
+
343
+ def halt?
344
+ @mutex.synchronize{ @value == :halt }
345
+ end
346
+ end
347
+
348
+ end
349
+
350
+ end
@@ -0,0 +1,46 @@
1
+ module Qs
2
+
3
+ class DaemonData
4
+
5
+ # The daemon uses this to "compile" its configuration for speed. NsOptions
6
+ # is relatively slow everytime an option is read. To avoid this, we read the
7
+ # options one time here and memoize their values. This way, we don't pay the
8
+ # NsOptions overhead when reading them while handling a job.
9
+
10
+ attr_reader :name
11
+ attr_reader :pid_file
12
+ attr_reader :min_workers, :max_workers
13
+ attr_reader :logger, :verbose_logging
14
+ attr_reader :shutdown_timeout
15
+ attr_reader :error_procs
16
+ attr_reader :queue_redis_keys, :routes
17
+
18
+ def initialize(args = nil)
19
+ args ||= {}
20
+ @name = args[:name]
21
+ @pid_file = args[:pid_file]
22
+ @min_workers = args[:min_workers]
23
+ @max_workers = args[:max_workers]
24
+ @logger = args[:logger]
25
+ @verbose_logging = !!args[:verbose_logging]
26
+ @shutdown_timeout = args[:shutdown_timeout]
27
+ @error_procs = args[:error_procs] || []
28
+ @queue_redis_keys = args[:queue_redis_keys] || []
29
+ @routes = build_routes(args[:routes] || [])
30
+ end
31
+
32
+ def route_for(name)
33
+ @routes[name] || raise(NotFoundError, "no service named '#{name}'")
34
+ end
35
+
36
+ private
37
+
38
+ def build_routes(routes)
39
+ routes.inject({}){ |h, route| h.merge(route.name => route) }
40
+ end
41
+
42
+ end
43
+
44
+ NotFoundError = Class.new(RuntimeError)
45
+
46
+ end
@@ -0,0 +1,58 @@
1
+ require 'qs/queue'
2
+
3
+ module Qs
4
+
5
+ class ErrorHandler
6
+
7
+ attr_reader :exception, :context, :error_procs
8
+
9
+ def initialize(exception, context_hash)
10
+ @exception = exception
11
+ @context = ErrorContext.new(context_hash)
12
+ @error_procs = context_hash[:daemon_data].error_procs.reverse
13
+ end
14
+
15
+ # The exception that we are handling can change in the case that the
16
+ # configured error proc raises an exception. If this occurs, the new
17
+ # exception will be passed to subsequent error procs. This is designed to
18
+ # avoid "hidden" errors, this way the daemon will log based on the last
19
+ # exception that occurred.
20
+ def run
21
+ @error_procs.each do |error_proc|
22
+ begin
23
+ error_proc.call(@exception, @context)
24
+ rescue StandardError => proc_exception
25
+ @exception = proc_exception
26
+ end
27
+ end
28
+ end
29
+
30
+ end
31
+
32
+ class ErrorContext
33
+ attr_reader :daemon_data
34
+ attr_reader :queue_name, :serialized_payload
35
+ attr_reader :job, :handler_class
36
+
37
+ def initialize(args)
38
+ @daemon_data = args[:daemon_data]
39
+ @queue_name = Queue::RedisKey.parse_name(args[:queue_redis_key].to_s)
40
+ @serialized_payload = args[:serialized_payload]
41
+ @job = args[:job]
42
+ @handler_class = args[:handler_class]
43
+ end
44
+
45
+ def ==(other)
46
+ if other.kind_of?(self.class)
47
+ self.daemon_data == other.daemon_data &&
48
+ self.queue_name == other.queue_name &&
49
+ self.serialized_payload == other.serialized_payload &&
50
+ self.job == other.job &&
51
+ self.handler_class == other.handler_class
52
+ else
53
+ super
54
+ end
55
+ end
56
+ end
57
+
58
+ end
data/lib/qs/job.rb ADDED
@@ -0,0 +1,70 @@
1
+ module Qs
2
+
3
+ class Job
4
+
5
+ def self.parse(payload)
6
+ created_at = Time.at(payload['created_at'].to_i)
7
+ self.new(payload['name'], payload['params'], created_at)
8
+ end
9
+
10
+ attr_reader :name, :params, :created_at
11
+
12
+ def initialize(name, params, created_at = nil)
13
+ validate!(name, params)
14
+ @name = name
15
+ @params = params
16
+ @created_at = created_at || Time.now
17
+ end
18
+
19
+ def to_payload
20
+ { 'name' => self.name.to_s,
21
+ 'params' => StringifyParams.new(self.params),
22
+ 'created_at' => self.created_at.to_i
23
+ }
24
+ end
25
+
26
+ def inspect
27
+ reference = '0x0%x' % (self.object_id << 1)
28
+ "#<#{self.class}:#{reference} " \
29
+ "@name=#{self.name.inspect} " \
30
+ "@params=#{self.params.inspect} " \
31
+ "@created_at=#{self.created_at.inspect}>"
32
+ end
33
+
34
+ def ==(other)
35
+ if other.kind_of?(self.class)
36
+ self.to_payload == other.to_payload
37
+ else
38
+ super
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ def validate!(name, params)
45
+ problem = if name.to_s.empty?
46
+ "The job doesn't have a name."
47
+ elsif !params.kind_of?(::Hash)
48
+ "The job's params are not valid."
49
+ end
50
+ raise(BadJobError, problem) if problem
51
+ end
52
+
53
+ module StringifyParams
54
+ def self.new(object)
55
+ case(object)
56
+ when Hash
57
+ object.inject({}){ |h, (k, v)| h.merge(k.to_s => self.new(v)) }
58
+ when Array
59
+ object.map{ |item| self.new(item) }
60
+ else
61
+ object
62
+ end
63
+ end
64
+ end
65
+
66
+ end
67
+
68
+ BadJobError = Class.new(ArgumentError)
69
+
70
+ end
@@ -0,0 +1,90 @@
1
+ module Qs
2
+
3
+ module JobHandler
4
+
5
+ def self.included(klass)
6
+ klass.class_eval do
7
+ extend ClassMethods
8
+ include InstanceMethods
9
+ end
10
+ end
11
+
12
+ module InstanceMethods
13
+
14
+ def initialize(runner)
15
+ @qs_runner = runner
16
+ end
17
+
18
+ def init
19
+ run_callback 'before_init'
20
+ self.init!
21
+ run_callback 'after_init'
22
+ end
23
+
24
+ def init!
25
+ end
26
+
27
+ def run
28
+ run_callback 'before_run'
29
+ self.run!
30
+ run_callback 'after_run'
31
+ end
32
+
33
+ def run!
34
+ raise NotImplementedError
35
+ end
36
+
37
+ def inspect
38
+ reference = '0x0%x' % (self.object_id << 1)
39
+ "#<#{self.class}:#{reference} @job=#{job.inspect}>"
40
+ end
41
+
42
+ private
43
+
44
+ # Helpers
45
+
46
+ def job; @qs_runner.job; end
47
+ def params; @qs_runner.params; end
48
+ def logger; @qs_runner.logger; end
49
+
50
+ def run_callback(callback)
51
+ (self.class.send("#{callback}_callbacks") || []).each do |callback|
52
+ self.instance_eval(&callback)
53
+ end
54
+ end
55
+
56
+ end
57
+
58
+ module ClassMethods
59
+
60
+ def timeout(value = nil)
61
+ @timeout = value.to_f if value
62
+ @timeout
63
+ end
64
+
65
+ def before_callbacks; @before_callbacks ||= []; end
66
+ def after_callbacks; @after_callbacks ||= []; end
67
+ def before_init_callbacks; @before_init_callbacks ||= []; end
68
+ def after_init_callbacks; @after_init_callbacks ||= []; end
69
+ def before_run_callbacks; @before_run_callbacks ||= []; end
70
+ def after_run_callbacks; @after_run_callbacks ||= []; end
71
+
72
+ def before(&block); self.before_callbacks << block; end
73
+ def after(&block); self.after_callbacks << block; end
74
+ def before_init(&block); self.before_init_callbacks << block; end
75
+ def after_init(&block); self.after_init_callbacks << block; end
76
+ def before_run(&block); self.before_run_callbacks << block; end
77
+ def after_run(&block); self.after_run_callbacks << block; end
78
+
79
+ def prepend_before(&block); self.before_callbacks.unshift(block); end
80
+ def prepend_after(&block); self.after_callbacks.unshift(block); end
81
+ def prepend_before_init(&block); self.before_init_callbacks.unshift(block); end
82
+ def prepend_after_init(&block); self.after_init_callbacks.unshift(block); end
83
+ def prepend_before_run(&block); self.before_run_callbacks.unshift(block); end
84
+ def prepend_after_run(&block); self.after_run_callbacks.unshift(block); end
85
+
86
+ end
87
+
88
+ end
89
+
90
+ end
data/lib/qs/logger.rb ADDED
@@ -0,0 +1,23 @@
1
+ module Qs
2
+
3
+ class Logger
4
+ attr_reader :summary, :verbose
5
+
6
+ def initialize(logger, verbose = true)
7
+ loggers = [logger, Qs::NullLogger.new]
8
+ loggers.reverse! if !verbose
9
+ @verbose, @summary = loggers
10
+ end
11
+
12
+ end
13
+
14
+ class NullLogger
15
+ require 'logger'
16
+
17
+ ::Logger::Severity.constants.each do |name|
18
+ define_method(name.downcase){ |*args| } # no-op
19
+ end
20
+
21
+ end
22
+
23
+ end