pika_que 0.1.6 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a9831c848341b3207490dfa3050b377cea179be7
4
- data.tar.gz: 7ac6527b9df72fb0e2ea7741fb2e3cff8c36a684
3
+ metadata.gz: 3a1121f57097546120c68023942601cd3a404ea1
4
+ data.tar.gz: 5bedb7976f30278c0cd7d0ec7c9c6196f4f08bf6
5
5
  SHA512:
6
- metadata.gz: 82309e8a1fa9d93b76f5881f5693059720cc0b496b8a0c7ceacdd20594b6ff70b0575ea8bbf5a0117d0ebffe5243d5da734ca13ac5770ddbcd0b3b58792f6394
7
- data.tar.gz: e7ba16c2307b6c53a89db6d51d1320b6d40c26066871aa1a4c54c63aa4f59ca585c121b2ca69aa1b7a13e2de3b62e0ee98fd743c7e1a7c3a42e7de4c8d5a9236
6
+ metadata.gz: 66f7a312844dd5f73186f630c74d4210ee4e57c0302b6e5a616bbdd00e355410572a0fc72f7baa281d0ee2862f2ed263da2f3bce3d2d8c85566b8e9c3215ab80
7
+ data.tar.gz: 4ab9064e8e1c3d463d36c6e136ba816025e593d07a6074295a3d6878297d15c462dc1c4ddbafef8aa31e2444c20138aca208d6e4e7b227e5b33ecd042daeaa9b
data/README.md CHANGED
@@ -1,5 +1,7 @@
1
1
  # PikaQue
2
2
 
3
+ ![Build Status](https://travis-ci.org/dwkoogt/pika_que.svg?branch=master)
4
+
3
5
  A RabbitMQ background processing framework for Ruby with built-in support for Rails integration.
4
6
 
5
7
  PikaQue is inspired by Sneakers, Hutch, and Sidekiq. It is intended to implement more support for Rails a la Sidekiq.
@@ -6,12 +6,12 @@ require 'pika_que/publisher'
6
6
 
7
7
  require 'dev_worker'
8
8
  require 'demo_worker'
9
- require 'demo_reporter'
9
+ require 'demo_middleware'
10
10
 
11
11
  PikaQue.logger.level = ::Logger::DEBUG
12
12
 
13
13
  PikaQue.middleware do |chain|
14
- chain.add DemoReporter
14
+ chain.add DemoMiddleware
15
15
  end
16
16
 
17
17
  workers = [DemoWorker,DevWorker]
@@ -33,10 +33,11 @@ class LowPriorityWorker
33
33
 
34
34
  end
35
35
 
36
- PikaQue.config.add_processor(workers: [LowPriorityWorker], concurrency: 10)
37
- PikaQue.config.add_processor(workers: [HighPriorityWorker], concurrency: 10)
36
+ PikaQue.config[:processors] << { workers: [LowPriorityWorker], concurrency: 10 }
37
+ PikaQue.config[:processors] << { workers: [HighPriorityWorker], concurrency: 10 }
38
38
 
39
39
  runner = PikaQue::Runner.new
40
+ runner.setup_processors
40
41
 
41
42
  begin
42
43
  runner.run
@@ -16,10 +16,11 @@ end
16
16
 
17
17
  PikaQue.logger.level = ::Logger::DEBUG
18
18
 
19
- PikaQue.config.add_processor(PikaQue.config.delete(:delay_options))
20
- PikaQue.config.add_processor(workers: [DemoWorker])
19
+ PikaQue.config[:delay] = true
20
+ PikaQue.config[:processors] << { workers: [DemoWorker] }
21
21
 
22
22
  runner = PikaQue::Runner.new
23
+ runner.setup_processors
23
24
 
24
25
  begin
25
26
  runner.run
@@ -0,0 +1,43 @@
1
+ # > bundle exec ruby examples/demo_dlx_retry.rb
2
+ # Retry using x-dead-letter-exchange
3
+ # Constant backoff only
4
+ require 'pika_que'
5
+ require 'pika_que/worker'
6
+ require 'pika_que/handlers/dlx_retry_handler'
7
+
8
+ class DlxWorker
9
+ include PikaQue::Worker
10
+ from_queue "pika-que-dlx", :arguments => { :'x-dead-letter-exchange' => 'pika-que-retry-60' }
11
+ handle_with PikaQue::Handlers::DLXRetryHandler, retry_max_times: 3, retry_dlx: 'pika-que-retry-60'
12
+
13
+ def perform(msg)
14
+ logger.info msg["msg"]
15
+ raise "BOOM!"
16
+ ack!
17
+ end
18
+
19
+ end
20
+
21
+ PikaQue.logger.level = ::Logger::DEBUG
22
+
23
+ workers = [DlxWorker]
24
+
25
+ begin
26
+ pro = PikaQue::Processor.new(workers: workers)
27
+ pro.start
28
+ rescue => e
29
+ puts e
30
+ puts e.backtrace.join("\n")
31
+ end
32
+
33
+ sleep 3
34
+
35
+ DlxWorker.enqueue({ msg: "retry message" })
36
+
37
+ sleep 200
38
+
39
+ pro.stop
40
+
41
+ puts "bye"
42
+
43
+ exit 1
@@ -0,0 +1,19 @@
1
+ class DemoMiddleware
2
+
3
+ def initialize(opts = {})
4
+ STDOUT.sync = true
5
+ end
6
+
7
+ def call(worker, delivery_info, metadata, msg)
8
+ puts "entering middleware DemoMiddleware for msg: #{msg}"
9
+ begin
10
+ yield
11
+ rescue => e
12
+ puts "error caught in middleware DemoMiddleware for msg: #{msg}, error: #{e.message}"
13
+ raise e
14
+ ensure
15
+ puts "leaving middleware DemoMiddleware for msg: #{msg}"
16
+ end
17
+ end
18
+
19
+ end
@@ -1,4 +1,6 @@
1
1
  # > bundle exec ruby examples/demo_retry.rb
2
+ # Retry with routing to a headers exchange
3
+ # Select between constant backoff and exponential backoff modes
2
4
  require 'pika_que'
3
5
  require 'pika_que/worker'
4
6
  require 'pika_que/handlers/retry_handler'
@@ -1,5 +1,4 @@
1
- require 'pika_que'
2
- require 'pika_que/codecs/rails'
1
+ require 'pika_que/rails_worker'
3
2
  require 'thread'
4
3
 
5
4
  module ActiveJob
@@ -20,21 +19,16 @@ module ActiveJob
20
19
  class << self
21
20
  def enqueue(job) #:nodoc:
22
21
  @monitor.synchronize do
23
- JobWrapper.enqueue job.serialize, to_queue: job.queue_name
22
+ PikaQue::RailsWorker.enqueue job.serialize, to_queue: job.queue_name
24
23
  end
25
24
  end
26
25
 
27
26
  def enqueue_at(job, timestamp) #:nodoc:
28
27
  @monitor.synchronize do
29
- JobWrapper.enqueue_at job.serialize, timestamp, routing_key: job.queue_name
28
+ PikaQue::RailsWorker.enqueue_at job.serialize, timestamp, routing_key: job.queue_name
30
29
  end
31
30
  end
32
31
  end
33
-
34
- class JobWrapper #:nodoc:
35
- extend PikaQue::Worker::ClassMethods
36
- config codec: PikaQue::Codecs::RAILS
37
- end
38
32
  end
39
33
 
40
34
  class PikaQueRails5
@@ -44,20 +38,15 @@ module ActiveJob
44
38
 
45
39
  def enqueue(job) #:nodoc:
46
40
  @monitor.synchronize do
47
- JobWrapper.enqueue job.serialize, to_queue: job.queue_name
41
+ PikaQue::RailsWorker.enqueue job.serialize, to_queue: job.queue_name, priority: job.priority
48
42
  end
49
43
  end
50
44
 
51
45
  def enqueue_at(job, timestamp) #:nodoc:
52
46
  @monitor.synchronize do
53
- JobWrapper.enqueue_at job.serialize, timestamp, routing_key: job.queue_name
47
+ PikaQue::RailsWorker.enqueue_at job.serialize, timestamp, routing_key: job.queue_name
54
48
  end
55
49
  end
56
-
57
- class JobWrapper #:nodoc:
58
- extend PikaQue::Worker::ClassMethods
59
- config codec: PikaQue::Codecs::RAILS
60
- end
61
50
  end
62
51
 
63
52
  PikaQueAdapter = (::Rails::VERSION::MAJOR < 5) ? PikaQueRails4 : PikaQueRails5
@@ -42,14 +42,16 @@ module PikaQue
42
42
  end
43
43
 
44
44
  def self.reporters
45
- config[:reporters] << PikaQue::Reporters::LogReporter.new if config[:reporters].empty?
46
- config[:reporters]
45
+ @reporters ||= [PikaQue::Reporters::LogReporter.new]
46
+ yield @reporters if block_given?
47
+ @reporters
47
48
  end
48
49
 
49
50
  def self.reset!
50
51
  @config = nil
51
52
  @connection = nil
52
53
  @chain = nil
54
+ @reporters = nil
53
55
  end
54
56
 
55
57
  end
@@ -11,7 +11,7 @@ module PikaQue
11
11
 
12
12
  def parse(args = ARGV)
13
13
  opts = parse_options(args)
14
- config.merge!(opts)
14
+ init_config(opts)
15
15
  init_logger
16
16
  daemonize
17
17
  write_pid
@@ -20,10 +20,9 @@ module PikaQue
20
20
  def run
21
21
 
22
22
  load_app
23
+ prepare_server
23
24
 
24
- PikaQue.middleware
25
-
26
- runner = Runner.new
25
+ runner = Runner.new.tap{ |r| r.setup_processors }
27
26
 
28
27
  begin
29
28
 
@@ -43,6 +42,13 @@ module PikaQue
43
42
  PikaQue.config
44
43
  end
45
44
 
45
+ def init_config(opts)
46
+ if opts[:config]
47
+ config.load(File.expand_path(opts[:config]))
48
+ end
49
+ config.merge!(opts)
50
+ end
51
+
46
52
  def init_logger
47
53
  PikaQue::Logging.init_logger(config[:logfile]) if config[:logfile]
48
54
  PikaQue.logger.level = ::Logger::WARN if config[:quiet]
@@ -101,17 +107,16 @@ module PikaQue
101
107
  else
102
108
  require(File.expand_path(config[:require])) || raise(ArgumentError, 'require returned false')
103
109
  end
110
+ end
104
111
 
105
- if config[:delay]
106
- config.add_processor(config.delete(:delay_options))
107
- else
108
- config.delete(:delay_options)
112
+ def prepare_server
113
+ PikaQue.middleware do |chain|
114
+ config[:middlewares].each{ |m| chain.add PikaQue::Util.constantize(m) } if config[:middlewares]
109
115
  end
110
116
 
111
- if config[:workers]
112
- config.add_processor({ workers: config.delete(:workers) })
117
+ PikaQue.reporters do |rptrs|
118
+ config[:reporters].each{ |r| rptrs << PikaQue::Util.constantize(r).new }
113
119
  end
114
-
115
120
  end
116
121
 
117
122
  def parse_options(args)
@@ -148,6 +153,14 @@ module PikaQue
148
153
  opts[:workers] = arg.split(",")
149
154
  end
150
155
 
156
+ o.on '--no-delay', "turn off delay processor" do |arg|
157
+ opts[:delay] = arg
158
+ end
159
+
160
+ o.on '-C', '--config PATH', "path to config yml file" do |arg|
161
+ opts[:config] = arg
162
+ end
163
+
151
164
  o.on '-L', '--logfile PATH', "path to writable logfile" do |arg|
152
165
  opts[:logfile] = arg
153
166
  end
@@ -1,4 +1,5 @@
1
1
  require 'forwardable'
2
+ require 'yaml'
2
3
 
3
4
  require 'pika_que/codecs/json'
4
5
  require 'pika_que/codecs/noop'
@@ -81,6 +82,12 @@ module PikaQue
81
82
  @config[:vhost] = AMQ::Settings.parse_amqp_url(@config[:amqp]).fetch(:vhost, '/')
82
83
  end
83
84
 
85
+ def load(filename)
86
+ loaded = YAML.load_file(filename)
87
+ converted = JSON.parse(JSON.dump(loaded), symbolize_names: true)
88
+ merge! converted
89
+ end
90
+
84
91
  def merge!(other = {})
85
92
  @config = deep_merge(@config, other)
86
93
  end
@@ -97,16 +104,5 @@ module PikaQue
97
104
  first.merge(second, &merger)
98
105
  end
99
106
 
100
- def processor(opts = {})
101
- {
102
- :processor => PikaQue::Processor,
103
- :workers => []
104
- }.merge(opts)
105
- end
106
-
107
- def add_processor(opts = {})
108
- @config[:processors] << processor(opts)
109
- end
110
-
111
107
  end
112
108
  end
@@ -4,6 +4,7 @@ module PikaQue
4
4
  autoload :DefaultHandler, 'pika_que/handlers/default_handler'
5
5
  autoload :ErrorHandler, 'pika_que/handlers/error_handler'
6
6
  autoload :RetryHandler, 'pika_que/handlers/retry_handler'
7
+ autoload :DLXRetryHandler, 'pika_que/handlers/dlx_retry_handler'
7
8
 
8
9
  end
9
10
  end
@@ -0,0 +1,151 @@
1
+ module PikaQue
2
+ module Handlers
3
+ class DLXRetryHandler
4
+
5
+ # Create following exchanges with retry_prefix = pika-que and default backoff
6
+ # pika-que-retry-60
7
+ # pika-que-retry-requeue
8
+ # pika-que-error
9
+ # and following queue
10
+ # pika-que-retry-60 (with default backoff)
11
+ #
12
+ # retry_mode can be either :exp or :const
13
+
14
+ DEFAULT_RETRY_OPTS = {
15
+ :retry_prefix => 'pika-que',
16
+ :retry_max_times => 5,
17
+ :retry_backoff => 60,
18
+ :retry_backoff_multiplier => 1000,
19
+ }.freeze
20
+
21
+ def initialize(opts = {})
22
+ @opts = PikaQue.config.merge(DEFAULT_RETRY_OPTS).merge(opts)
23
+ @connection = opts[:connection] || PikaQue.connection
24
+ @channel = @connection.create_channel
25
+ @error_monitor = Monitor.new
26
+
27
+ @max_retries = @opts[:retry_max_times]
28
+ @backoff_multiplier = @opts[:retry_backoff_multiplier] # This is for example/dev/test
29
+
30
+ @retry_ex_name = @opts[:retry_dlx] || "#{@opts[:retry_prefix]}-retry-#{@opts[:retry_backoff]}"
31
+ @retry_name = "#{@opts[:retry_prefix]}-retry"
32
+ @requeue_name = "#{@opts[:retry_prefix]}-retry-requeue"
33
+ @error_name = "#{@opts[:retry_prefix]}-error"
34
+
35
+ @queue_name_lookup = {}
36
+
37
+ setup_exchanges
38
+ setup_queues
39
+ end
40
+
41
+ def bind_queue(queue, routing_key)
42
+ # bind the worker queue to requeue exchange
43
+ @queue_name_lookup[routing_key] = queue.name
44
+ queue.bind(@requeue_exchange, :routing_key => routing_key)
45
+ end
46
+
47
+ def handle(response_code, channel, delivery_info, metadata, msg, error = nil)
48
+ case response_code
49
+ when :ack
50
+ PikaQue.logger.debug "DLXRetryHandler acknowledge <#{msg}>"
51
+ channel.acknowledge(delivery_info.delivery_tag, false)
52
+ when :reject
53
+ PikaQue.logger.debug "DLXRetryHandler reject retry <#{msg}>"
54
+ handle_retry(channel, delivery_info, metadata, msg, :reject)
55
+ when :requeue
56
+ PikaQue.logger.debug "DLXRetryHandler requeue <#{msg}>"
57
+ channel.reject(delivery_info.delivery_tag, true)
58
+ else
59
+ PikaQue.logger.debug "DLXRetryHandler error retry <#{msg}>"
60
+ handle_retry(channel, delivery_info, metadata, msg, error)
61
+ end
62
+ end
63
+
64
+ def close
65
+ @channel.close unless @channel.closed?
66
+ end
67
+
68
+ private
69
+
70
+ def setup_exchanges
71
+ @retry_exchange, @error_exchange, @requeue_exchange = [@retry_ex_name, @error_name, @requeue_name].map do |name|
72
+ PikaQue.logger.debug "DLXRetryHandler creating exchange=#{name}"
73
+ @channel.exchange(name, :type => 'topic', :durable => exchange_durable?)
74
+ end
75
+ end
76
+
77
+ def setup_queues
78
+ bo = @opts[:retry_backoff]
79
+
80
+ PikaQue.logger.debug "DLXRetryHandler creating queue=#{@retry_name}-#{bo} x-dead-letter-exchange=#{@requeue_name}"
81
+ backoff_queue = @channel.queue("#{@retry_name}-#{bo}",
82
+ :durable => queue_durable?,
83
+ :arguments => {
84
+ :'x-dead-letter-exchange' => @requeue_name,
85
+ :'x-message-ttl' => bo * @backoff_multiplier
86
+ })
87
+ backoff_queue.bind(@retry_exchange, :routing_key => '#')
88
+
89
+ PikaQue.logger.debug "DLXRetryHandler creating queue=#{@error_name}"
90
+ @error_queue = @channel.queue(@error_name, :durable => queue_durable?)
91
+ @error_queue.bind(@error_exchange, :routing_key => '#')
92
+ end
93
+
94
+ def queue_durable?
95
+ @opts.fetch(:queue_options, {}).fetch(:durable, false)
96
+ end
97
+
98
+ def exchange_durable?
99
+ @opts.fetch(:exchange_options, {}).fetch(:durable, false)
100
+ end
101
+
102
+ def handle_retry(channel, delivery_info, metadata, msg, reason)
103
+ # +1 for the current attempt
104
+ num_attempts = failure_count(metadata[:headers], delivery_info) + 1
105
+ if num_attempts <= @max_retries
106
+ # Publish message to the x-dead-letter-exchange (ie. retry exchange)
107
+ PikaQue.logger.info "DLXRetryHandler msg=retrying, count=#{num_attempts}, headers=#{metadata[:headers] || {}}"
108
+
109
+ channel.reject(delivery_info.delivery_tag, false)
110
+ else
111
+ PikaQue.logger.info "DLXRetryHandler msg=failing, retried_count=#{num_attempts - 1}, headers=#{metadata[:headers]}, reason=#{reason}"
112
+
113
+ publish_error(delivery_info, msg)
114
+ channel.acknowledge(delivery_info.delivery_tag, false)
115
+ end
116
+ end
117
+
118
+ # Uses the x-death header to determine the number of failures this job has
119
+ # seen in the past. This does not count the current failure. So for
120
+ # instance, the first time the job fails, this will return 0, the second
121
+ # time, 1, etc.
122
+ # @param headers [Hash] Hash of headers that Rabbit delivers as part of
123
+ # the message
124
+ # @return [Integer] Count of number of failures.
125
+ def failure_count(headers, delivery_info)
126
+ if headers.nil? || headers['x-death'].nil?
127
+ 0
128
+ else
129
+ queue_name = headers['x-first-death-queue'] || @queue_name_lookup[delivery_info.routing_key]
130
+ x_death_array = headers['x-death'].select do |x_death|
131
+ x_death['queue'] == queue_name
132
+ end
133
+ if x_death_array.count > 0 && x_death_array.first['count']
134
+ # Newer versions of RabbitMQ return headers with a count key
135
+ x_death_array.inject(0) {|sum, x_death| sum + x_death['count']}
136
+ else
137
+ # Older versions return a separate x-death header for each failure
138
+ x_death_array.count
139
+ end
140
+ end
141
+ end
142
+
143
+ def publish_error(delivery_info, msg)
144
+ @error_monitor.synchronize do
145
+ @error_exchange.publish(msg, routing_key: delivery_info.routing_key)
146
+ end
147
+ end
148
+
149
+ end
150
+ end
151
+ end
@@ -3,19 +3,22 @@ module PikaQue
3
3
  class ErrorHandler
4
4
 
5
5
  DEFAULT_ERROR_OPTS = {
6
- :exchange => 'pika-que-error',
7
- :exchange_options => { :type => :topic },
8
- :queue => 'pika-que-error',
9
- :routing_key => '#'
6
+ :error_prefix => 'pika-que'
10
7
  }.freeze
11
8
 
12
9
  def initialize(opts = {})
13
10
  @opts = PikaQue.config.merge(DEFAULT_ERROR_OPTS).merge(opts)
14
11
  @connection = @opts[:connection] || PikaQue.connection
15
12
  @channel = @connection.create_channel
16
- @exchange = @channel.exchange(@opts[:exchange], type: exchange_type, durable: exchange_durable?)
17
- @queue = @channel.queue(@opts[:queue], durable: queue_durable?)
18
- @queue.bind(@exchange, routing_key: @opts[:routing_key])
13
+ error_ex_name = error_q_name = "#{@opts[:error_prefix]}-error"
14
+ if @opts[:queue]
15
+ # handle deprecated options
16
+ error_ex_name = @opts[:exchange]
17
+ error_q_name = @opts[:queue]
18
+ end
19
+ @exchange = @channel.exchange(error_ex_name, type: :topic, durable: exchange_durable?)
20
+ @queue = @channel.queue(error_q_name, durable: queue_durable?)
21
+ @queue.bind(@exchange, routing_key: '#')
19
22
  @monitor = Monitor.new
20
23
  end
21
24
 
@@ -36,7 +39,7 @@ module PikaQue
36
39
  else
37
40
  PikaQue.logger.debug "ErrorHandler publishing <#{msg}> to [#{@queue.name}]"
38
41
  publish(delivery_info, msg)
39
- channel.reject(delivery_info.delivery_tag, false)
42
+ channel.acknowledge(delivery_info.delivery_tag, false)
40
43
  end
41
44
  end
42
45
 
@@ -54,10 +57,6 @@ module PikaQue
54
57
  @opts.fetch(:exchange_options, {}).fetch(:durable, false)
55
58
  end
56
59
 
57
- def exchange_type
58
- @opts.fetch(:exchange_options, {}).fetch(:type, :topic)
59
- end
60
-
61
60
  def publish(delivery_info, msg)
62
61
  @monitor.synchronize do
63
62
  @exchange.publish(msg, routing_key: delivery_info.routing_key)
@@ -138,12 +138,12 @@ module PikaQue
138
138
  end
139
139
 
140
140
  publish_retry(delivery_info, msg, { backoff: backoff_ttl, count: num_attempts })
141
- channel.reject(delivery_info.delivery_tag, false)
141
+ channel.acknowledge(delivery_info.delivery_tag, false)
142
142
  else
143
143
  PikaQue.logger.info "RetryHandler msg=failing, retried_count=#{num_attempts - 1}, headers=#{metadata[:headers]}, reason=#{reason}"
144
144
 
145
145
  publish_error(delivery_info, msg)
146
- channel.reject(delivery_info.delivery_tag, false)
146
+ channel.acknowledge(delivery_info.delivery_tag, false)
147
147
  end
148
148
  end
149
149
 
@@ -1,5 +1,5 @@
1
- require 'pika_que/worker'
2
- require 'pika_que/codecs/rails'
1
+ require 'pika_que/rails_worker'
2
+ require 'pika_que/util'
3
3
 
4
4
  module PikaQue
5
5
  class Rails < ::Rails::Engine
@@ -20,10 +20,10 @@ module PikaQue
20
20
  config.after_initialize do
21
21
  config_file = ::Rails.root.join('config').join('pika_que.yml')
22
22
  if File.exist? config_file
23
- PIKA_QUE_CONFIG = YAML.load_file(config_file)
23
+ PIKA_QUE_CONFIG = YAML.load_file(config_file).deep_symbolize_keys
24
24
  else
25
25
  mailer_queue = (::Rails::VERSION::MAJOR < 5) ? ActionMailer::DeliveryJob.queue_name : ActionMailer::Base.deliver_later_queue_name
26
- PIKA_QUE_CONFIG = { "processors" => [{ "workers" => [{ "queue" => ActiveJob::Base.queue_name }, { "queue" => mailer_queue.to_s }] }] }
26
+ PIKA_QUE_CONFIG = { processors: [{ workers: [{ queue: ActiveJob::Base.queue_name }, { queue: mailer_queue.to_s }] }] }
27
27
  end
28
28
 
29
29
  workers_dir = ::Rails.root.join('app').join('workers')
@@ -33,28 +33,30 @@ module PikaQue
33
33
  worker_files = []
34
34
  end
35
35
 
36
- PIKA_QUE_CONFIG['processors'].each do |processor|
36
+ PIKA_QUE_CONFIG[:processors].each do |processor|
37
37
  workers = []
38
- processor['workers'].each do |worker|
39
- queue = worker['queue']
40
- worker_name = worker['worker'] || "#{queue.underscore.classify}Worker"
41
- Object.const_set(worker_name, Class.new do
42
- include PikaQue::Worker
43
- from_queue queue
44
- config codec: PikaQue::Codecs::RAILS
45
-
46
- def perform(msg)
47
- ActiveJob::Base.execute msg
48
- ack!
38
+ processor[:workers].each do |worker|
39
+ if worker.is_a? Hash
40
+ if worker[:worker]
41
+ worker_name = worker[:worker]
42
+ else
43
+ queue_name = worker[:queue_name] || worker[:queue]
44
+ queue_opts = worker[:queue_opts] || {}
45
+ worker_name = "#{queue_name.underscore.classify}Worker"
46
+ unless worker_files.detect{ |w| w =~ /#{worker_name.underscore}/ }
47
+ PikaQue::Util.register_worker_class(worker_name, PikaQue::RailsWorker, queue_name, queue_opts)
49
48
  end
50
49
  end
51
- ) unless worker_files.detect{ |w| w =~ /#{worker_name.underscore}/ }
50
+ else
51
+ worker_name = worker
52
+ end
52
53
  workers << worker_name
53
54
  end
54
- proc_args = processor.symbolize_keys
55
- proc_args[:workers] = workers
56
- PikaQue.logger.info "Adding rails processor: #{proc_args}"
57
- PikaQue.config.add_processor(proc_args)
55
+ processor[:workers] = workers
56
+ unless PikaQue.config[:workers] || PikaQue.config[:config]
57
+ PikaQue.logger.info "Adding rails processor: #{processor}"
58
+ PikaQue.config[:processors] << processor
59
+ end
58
60
  end
59
61
  end
60
62
 
@@ -0,0 +1,15 @@
1
+ require 'pika_que/worker'
2
+ require 'pika_que/codecs/rails'
3
+
4
+ module PikaQue
5
+ class RailsWorker
6
+ include PikaQue::Worker
7
+ config codec: PikaQue::Codecs::RAILS
8
+
9
+ def perform(msg)
10
+ ActiveJob::Base.execute msg
11
+ ack!
12
+ end
13
+
14
+ end
15
+ end
@@ -2,23 +2,51 @@ module PikaQue
2
2
  class Runner
3
3
 
4
4
  def run
5
- run_config = {}
6
-
7
5
  # TODO anything to add to run_config?
6
+ run_config = {}
8
7
 
9
- @processors = []
10
- PikaQue.config[:processors].each do |processor_hash|
8
+ @processes = []
9
+ processors.each do |processor_hash|
11
10
  _processor = PikaQue::Util.constantize(processor_hash[:processor]).new(processor_hash.merge(run_config))
12
11
  _processor.start
13
- @processors << _processor
12
+ @processes << _processor
14
13
  end
15
14
  end
16
15
 
17
16
  # halt? pause?
18
17
  def stop
19
- @processors.each(&:stop)
18
+ @processes.each(&:stop)
20
19
  PikaQue.connection.disconnect!
21
20
  end
22
21
 
22
+ def setup_processors
23
+ add_processor(config[:delay_options]) if config[:delay]
24
+ if config[:workers]
25
+ add_processor({ workers: config[:workers] })
26
+ else
27
+ config[:processors].each{ |p| add_processor(p) }
28
+ end
29
+ end
30
+
31
+ def processor(opts = {})
32
+ {
33
+ :processor => PikaQue::Processor,
34
+ :workers => []
35
+ }.merge(opts)
36
+ end
37
+
38
+ def add_processor(opts = {})
39
+ classified_workers = { :workers => PikaQue::Util.worker_classes(opts[:workers]) }
40
+ processors << processor(opts.merge(classified_workers))
41
+ end
42
+
43
+ def processors
44
+ @processors ||= []
45
+ end
46
+
47
+ def config
48
+ PikaQue.config
49
+ end
50
+
23
51
  end
24
52
  end
@@ -1,3 +1,5 @@
1
+ require "dry/inflector"
2
+
1
3
  module PikaQue
2
4
  module Util
3
5
  extend self
@@ -13,5 +15,35 @@ module PikaQue
13
15
  end
14
16
  end
15
17
 
18
+ def register_worker_class(worker_name, base_class, queue_name, queue_opts = {}, handler_class = nil, handler_opts = {}, local_config = {})
19
+ Object.const_set(worker_name, Class.new(base_class) do
20
+ from_queue queue_name, queue_opts
21
+ handle_with handler_class, handler_opts if handler_class
22
+ config local_config if local_config.any?
23
+ end
24
+ )
25
+ end
26
+
27
+ def worker_classes(workers = [])
28
+ return [] if workers.nil?
29
+
30
+ workers.map do |worker|
31
+ if worker.is_a? Hash
32
+ if worker[:worker]
33
+ worker[:worker]
34
+ else
35
+ queue_name = worker[:queue_name] || worker[:queue]
36
+ "#{inflector.classify(inflector.underscore(queue_name))}Worker"
37
+ end
38
+ else
39
+ worker
40
+ end
41
+ end
42
+ end
43
+
44
+ def inflector
45
+ @inflector ||= Dry::Inflector.new
46
+ end
47
+
16
48
  end
17
49
  end
@@ -1,3 +1,3 @@
1
1
  module PikaQue
2
- VERSION = "0.1.6"
2
+ VERSION = "0.2.0"
3
3
  end
@@ -30,6 +30,7 @@ Gem::Specification.new do |spec|
30
30
  spec.add_dependency 'bunny', '~> 2.6'
31
31
  spec.add_dependency 'concurrent-ruby', '~> 1.0'
32
32
  spec.add_dependency 'json', '~> 1.8'
33
+ spec.add_dependency 'dry-inflector'
33
34
 
34
35
  spec.add_development_dependency "bundler", "~> 1.11"
35
36
  spec.add_development_dependency "rake", "~> 10.0"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pika_que
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.6
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dong Wook Koo
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2018-03-26 00:00:00.000000000 Z
11
+ date: 2018-04-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bunny
@@ -52,6 +52,20 @@ dependencies:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
54
  version: '1.8'
55
+ - !ruby/object:Gem::Dependency
56
+ name: dry-inflector
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
55
69
  - !ruby/object:Gem::Dependency
56
70
  name: bundler
57
71
  requirement: !ruby/object:Gem::Requirement
@@ -129,9 +143,10 @@ files:
129
143
  - examples/demo.rb
130
144
  - examples/demo_conpriority.rb
131
145
  - examples/demo_delay.rb
146
+ - examples/demo_dlx_retry.rb
147
+ - examples/demo_middleware.rb
132
148
  - examples/demo_oneoff.rb
133
149
  - examples/demo_priority.rb
134
- - examples/demo_reporter.rb
135
150
  - examples/demo_retry.rb
136
151
  - examples/demo_worker.rb
137
152
  - examples/dev_worker.rb
@@ -152,6 +167,7 @@ files:
152
167
  - lib/pika_que/handlers.rb
153
168
  - lib/pika_que/handlers/default_handler.rb
154
169
  - lib/pika_que/handlers/delay_handler.rb
170
+ - lib/pika_que/handlers/dlx_retry_handler.rb
155
171
  - lib/pika_que/handlers/error_handler.rb
156
172
  - lib/pika_que/handlers/retry_handler.rb
157
173
  - lib/pika_que/launcher.rb
@@ -164,6 +180,7 @@ files:
164
180
  - lib/pika_que/processor.rb
165
181
  - lib/pika_que/publisher.rb
166
182
  - lib/pika_que/rails.rb
183
+ - lib/pika_que/rails_worker.rb
167
184
  - lib/pika_que/reporters.rb
168
185
  - lib/pika_que/reporters/log_reporter.rb
169
186
  - lib/pika_que/runner.rb
@@ -192,7 +209,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
192
209
  version: '0'
193
210
  requirements: []
194
211
  rubyforge_project:
195
- rubygems_version: 2.4.5.2
212
+ rubygems_version: 2.5.2.2
196
213
  signing_key:
197
214
  specification_version: 4
198
215
  summary: Ruby background processor for RabbitMQ.
@@ -1,19 +0,0 @@
1
- class DemoReporter
2
-
3
- def initialize(opts = {})
4
- STDOUT.sync = true
5
- end
6
-
7
- def call(worker, delivery_info, metadata, msg)
8
- puts "entering middleware DemoReporter for msg: #{msg}"
9
- begin
10
- yield
11
- rescue => e
12
- puts "error caught in middleware DemoReporter for msg: #{msg}, error: #{e.message}"
13
- raise e
14
- ensure
15
- puts "leaving middleware DemoReporter for msg: #{msg}"
16
- end
17
- end
18
-
19
- end