sidekiq 5.0.5 → 5.2.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (49) hide show
  1. checksums.yaml +4 -4
  2. data/.github/issue_template.md +3 -1
  3. data/.travis.yml +5 -4
  4. data/Changes.md +53 -1
  5. data/Ent-Changes.md +21 -0
  6. data/Gemfile +10 -28
  7. data/LICENSE +1 -1
  8. data/Pro-4.0-Upgrade.md +35 -0
  9. data/Pro-Changes.md +86 -0
  10. data/README.md +4 -2
  11. data/Rakefile +0 -4
  12. data/bin/sidekiqload +1 -1
  13. data/lib/sidekiq/api.rb +65 -29
  14. data/lib/sidekiq/cli.rb +54 -24
  15. data/lib/sidekiq/client.rb +32 -32
  16. data/lib/sidekiq/delay.rb +1 -0
  17. data/lib/sidekiq/exception_handler.rb +2 -4
  18. data/lib/sidekiq/fetch.rb +1 -1
  19. data/lib/sidekiq/job_logger.rb +2 -1
  20. data/lib/sidekiq/job_retry.rb +18 -5
  21. data/lib/sidekiq/launcher.rb +17 -11
  22. data/lib/sidekiq/logging.rb +9 -5
  23. data/lib/sidekiq/manager.rb +2 -3
  24. data/lib/sidekiq/middleware/server/active_record.rb +2 -1
  25. data/lib/sidekiq/processor.rb +33 -18
  26. data/lib/sidekiq/rails.rb +6 -0
  27. data/lib/sidekiq/redis_connection.rb +11 -3
  28. data/lib/sidekiq/scheduled.rb +35 -8
  29. data/lib/sidekiq/testing.rb +4 -4
  30. data/lib/sidekiq/util.rb +6 -2
  31. data/lib/sidekiq/version.rb +1 -1
  32. data/lib/sidekiq/web/action.rb +2 -2
  33. data/lib/sidekiq/web/application.rb +18 -2
  34. data/lib/sidekiq/web/helpers.rb +7 -5
  35. data/lib/sidekiq/web/router.rb +10 -10
  36. data/lib/sidekiq/web.rb +4 -4
  37. data/lib/sidekiq/worker.rb +7 -7
  38. data/lib/sidekiq.rb +20 -13
  39. data/sidekiq.gemspec +3 -8
  40. data/web/assets/javascripts/application.js +0 -0
  41. data/web/assets/stylesheets/application.css +0 -0
  42. data/web/assets/stylesheets/bootstrap.css +2 -2
  43. data/web/locales/en.yml +1 -0
  44. data/web/locales/es.yml +4 -3
  45. data/web/locales/ja.yml +5 -3
  46. data/web/views/_footer.erb +3 -0
  47. data/web/views/layout.erb +1 -1
  48. data/web/views/queues.erb +2 -0
  49. metadata +8 -89
data/lib/sidekiq/cli.rb CHANGED
@@ -1,4 +1,3 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  $stdout.sync = true
4
3
 
@@ -17,7 +16,7 @@ module Sidekiq
17
16
  include Singleton unless $TESTING
18
17
 
19
18
  PROCTITLES = [
20
- proc { 'sidekiq'.freeze },
19
+ proc { 'sidekiq' },
21
20
  proc { Sidekiq::VERSION },
22
21
  proc { |me, data| data['tag'] },
23
22
  proc { |me, data| "[#{Processor::WORKER_STATE.size} of #{data['concurrency']} busy]" },
@@ -65,7 +64,7 @@ module Sidekiq
65
64
  sigs.each do |sig|
66
65
  begin
67
66
  trap sig do
68
- self_write.puts(sig)
67
+ self_write.write("#{sig}\n")
69
68
  end
70
69
  rescue ArgumentError
71
70
  puts "Signal #{sig} not supported"
@@ -81,6 +80,12 @@ module Sidekiq
81
80
  ver = Sidekiq.redis_info['redis_version']
82
81
  raise "You are using Redis v#{ver}, Sidekiq requires Redis v2.8.0 or greater" if ver < '2.8'
83
82
 
83
+ # Since the user can pass us a connection pool explicitly in the initializer, we
84
+ # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
85
+ cursize = Sidekiq.redis_pool.size
86
+ needed = Sidekiq.options[:concurrency] + 2
87
+ raise "Your pool of #{cursize} Redis connections is too small, please increase the size to at least #{needed}" if cursize < needed
88
+
84
89
  # cache process identity
85
90
  Sidekiq.options[:identity] = identity
86
91
 
@@ -89,7 +94,7 @@ module Sidekiq
89
94
 
90
95
  # Before this point, the process is initializing with just the main thread.
91
96
  # Starting here the process will now have multiple threads running.
92
- fire_event(:startup)
97
+ fire_event(:startup, reverse: false, reraise: true)
93
98
 
94
99
  logger.debug { "Client Middleware: #{Sidekiq.client_middleware.map(&:klass).join(', ')}" }
95
100
  logger.debug { "Server Middleware: #{Sidekiq.server_middleware.map(&:klass).join(', ')}" }
@@ -135,37 +140,45 @@ module Sidekiq
135
140
  }
136
141
  end
137
142
 
138
- def handle_signal(sig)
139
- Sidekiq.logger.debug "Got #{sig} signal"
140
- case sig
141
- when 'INT'
142
- # Handle Ctrl-C in JRuby like MRI
143
- # http://jira.codehaus.org/browse/JRUBY-4637
144
- raise Interrupt
145
- when 'TERM'
146
- # Heroku sends TERM and then waits 10 seconds for process to exit.
147
- raise Interrupt
148
- when 'USR1'
143
+ SIGNAL_HANDLERS = {
144
+ # Ctrl-C in terminal
145
+ 'INT' => ->(cli) { raise Interrupt },
146
+ # TERM is the signal that Sidekiq must exit.
147
+ # Heroku sends TERM and then waits 30 seconds for process to exit.
148
+ 'TERM' => ->(cli) { raise Interrupt },
149
+ 'USR1' => ->(cli) {
149
150
  Sidekiq.logger.info "Received USR1, no longer accepting new work"
150
- launcher.quiet
151
- when 'TSTP'
152
- # USR1 is not available on JVM, allow TSTP as an alternate signal
151
+ cli.launcher.quiet
152
+ },
153
+ 'TSTP' => ->(cli) {
153
154
  Sidekiq.logger.info "Received TSTP, no longer accepting new work"
154
- launcher.quiet
155
- when 'USR2'
155
+ cli.launcher.quiet
156
+ },
157
+ 'USR2' => ->(cli) {
156
158
  if Sidekiq.options[:logfile]
157
159
  Sidekiq.logger.info "Received USR2, reopening log file"
158
160
  Sidekiq::Logging.reopen_logs
159
161
  end
160
- when 'TTIN'
162
+ },
163
+ 'TTIN' => ->(cli) {
161
164
  Thread.list.each do |thread|
162
- Sidekiq.logger.warn "Thread TID-#{thread.object_id.to_s(36)} #{thread['sidekiq_label']}"
165
+ Sidekiq.logger.warn "Thread TID-#{(thread.object_id ^ ::Process.pid).to_s(36)} #{thread['sidekiq_label']}"
163
166
  if thread.backtrace
164
167
  Sidekiq.logger.warn thread.backtrace.join("\n")
165
168
  else
166
169
  Sidekiq.logger.warn "<no backtrace available>"
167
170
  end
168
171
  end
172
+ },
173
+ }
174
+
175
+ def handle_signal(sig)
176
+ Sidekiq.logger.debug "Got #{sig} signal"
177
+ handy = SIGNAL_HANDLERS[sig]
178
+ if handy
179
+ handy.call(self)
180
+ else
181
+ Sidekiq.logger.info { "No signal handler for #{sig}" }
169
182
  end
170
183
  end
171
184
 
@@ -214,6 +227,14 @@ module Sidekiq
214
227
  @environment = cli_env || ENV['RAILS_ENV'] || ENV['RACK_ENV'] || 'development'
215
228
  end
216
229
 
230
+ def symbolize_keys_deep!(hash)
231
+ hash.keys.each do |k|
232
+ symkey = k.respond_to?(:to_sym) ? k.to_sym : k
233
+ hash[symkey] = hash.delete k
234
+ symbolize_keys_deep! hash[symkey] if hash[symkey].kind_of? Hash
235
+ end
236
+ end
237
+
217
238
  alias_method :die, :exit
218
239
  alias_method :☠, :exit
219
240
 
@@ -282,7 +303,7 @@ module Sidekiq
282
303
  if !File.exist?(options[:require]) ||
283
304
  (File.directory?(options[:require]) && !File.exist?("#{options[:require]}/config/application.rb"))
284
305
  logger.info "=================================================================="
285
- logger.info " Please point sidekiq to a Rails 3/4 application or a Ruby file "
306
+ logger.info " Please point sidekiq to a Rails 4/5 application or a Ruby file "
286
307
  logger.info " to load your worker classes with -r [DIR|FILE]."
287
308
  logger.info "=================================================================="
288
309
  logger.info @parser
@@ -314,6 +335,8 @@ module Sidekiq
314
335
  opts[:tag] = arg
315
336
  end
316
337
 
338
+ # this index remains here for backwards compatibility but none of the Sidekiq
339
+ # family use this value anymore. it was used by Pro's original reliable_fetch.
317
340
  o.on '-i', '--index INT', "unique process index on this machine" do |arg|
318
341
  opts[:index] = Integer(arg.match(/\d+/)[0])
319
342
  end
@@ -386,7 +409,14 @@ module Sidekiq
386
409
  opts = {}
387
410
  if File.exist?(cfile)
388
411
  opts = YAML.load(ERB.new(IO.read(cfile)).result) || opts
389
- opts = opts.merge(opts.delete(environment) || {})
412
+
413
+ if opts.respond_to? :deep_symbolize_keys!
414
+ opts.deep_symbolize_keys!
415
+ else
416
+ symbolize_keys_deep!(opts)
417
+ end
418
+
419
+ opts = opts.merge(opts.delete(environment.to_sym) || {})
390
420
  parse_queues(opts, opts.delete(:queues) || [])
391
421
  else
392
422
  # allow a non-existent config file so Sidekiq
@@ -68,18 +68,19 @@ module Sidekiq
68
68
  #
69
69
  def push(item)
70
70
  normed = normalize_item(item)
71
- payload = process_single(item['class'.freeze], normed)
71
+ payload = process_single(item['class'], normed)
72
72
 
73
73
  if payload
74
74
  raw_push([payload])
75
- payload['jid'.freeze]
75
+ payload['jid']
76
76
  end
77
77
  end
78
78
 
79
79
  ##
80
- # Push a large number of jobs to Redis. In practice this method is only
81
- # useful if you are pushing thousands of jobs or more. This method
82
- # cuts out the redis network round trip latency.
80
+ # Push a large number of jobs to Redis. This method cuts out the redis
81
+ # network round trip latency. I wouldn't recommend pushing more than
82
+ # 1000 per call but YMMV based on network quality, size of job args, etc.
83
+ # A large number of jobs can cause a bit of Redis command processing latency.
83
84
  #
84
85
  # Takes the same arguments as #push except that args is expected to be
85
86
  # an Array of Arrays. All other keys are duplicated for each job. Each job
@@ -89,19 +90,19 @@ module Sidekiq
89
90
  # Returns an array of the of pushed jobs' jids. The number of jobs pushed can be less
90
91
  # than the number given if the middleware stopped processing for one or more jobs.
91
92
  def push_bulk(items)
92
- arg = items['args'.freeze].first
93
+ arg = items['args'].first
93
94
  return [] unless arg # no jobs to push
94
95
  raise ArgumentError, "Bulk arguments must be an Array of Arrays: [[1], [2]]" if !arg.is_a?(Array)
95
96
 
96
97
  normed = normalize_item(items)
97
- payloads = items['args'.freeze].map do |args|
98
- copy = normed.merge('args'.freeze => args, 'jid'.freeze => SecureRandom.hex(12), 'enqueued_at'.freeze => Time.now.to_f)
99
- result = process_single(items['class'.freeze], copy)
98
+ payloads = items['args'].map do |args|
99
+ copy = normed.merge('args' => args, 'jid' => SecureRandom.hex(12), 'enqueued_at' => Time.now.to_f)
100
+ result = process_single(items['class'], copy)
100
101
  result ? result : nil
101
102
  end.compact
102
103
 
103
104
  raw_push(payloads) if !payloads.empty?
104
- payloads.collect { |payload| payload['jid'.freeze] }
105
+ payloads.collect { |payload| payload['jid'] }
105
106
  end
106
107
 
107
108
  # Allows sharding of jobs across any number of Redis instances. All jobs
@@ -119,11 +120,10 @@ module Sidekiq
119
120
  def self.via(pool)
120
121
  raise ArgumentError, "No pool given" if pool.nil?
121
122
  current_sidekiq_pool = Thread.current[:sidekiq_via_pool]
122
- raise RuntimeError, "Sidekiq::Client.via is not re-entrant" if current_sidekiq_pool && current_sidekiq_pool != pool
123
123
  Thread.current[:sidekiq_via_pool] = pool
124
124
  yield
125
125
  ensure
126
- Thread.current[:sidekiq_via_pool] = nil
126
+ Thread.current[:sidekiq_via_pool] = current_sidekiq_pool
127
127
  end
128
128
 
129
129
  class << self
@@ -145,14 +145,14 @@ module Sidekiq
145
145
  # Messages are enqueued to the 'default' queue.
146
146
  #
147
147
  def enqueue(klass, *args)
148
- klass.client_push('class'.freeze => klass, 'args'.freeze => args)
148
+ klass.client_push('class' => klass, 'args' => args)
149
149
  end
150
150
 
151
151
  # Example usage:
152
152
  # Sidekiq::Client.enqueue_to(:queue_name, MyWorker, 'foo', 1, :bat => 'bar')
153
153
  #
154
154
  def enqueue_to(queue, klass, *args)
155
- klass.client_push('queue'.freeze => queue, 'class'.freeze => klass, 'args'.freeze => args)
155
+ klass.client_push('queue' => queue, 'class' => klass, 'args' => args)
156
156
  end
157
157
 
158
158
  # Example usage:
@@ -163,8 +163,8 @@ module Sidekiq
163
163
  now = Time.now.to_f
164
164
  ts = (int < 1_000_000_000 ? now + int : int)
165
165
 
166
- item = { 'class'.freeze => klass, 'args'.freeze => args, 'at'.freeze => ts, 'queue'.freeze => queue }
167
- item.delete('at'.freeze) if ts <= now
166
+ item = { 'class' => klass, 'args' => args, 'at' => ts, 'queue' => queue }
167
+ item.delete('at') if ts <= now
168
168
 
169
169
  klass.client_push(item)
170
170
  end
@@ -189,25 +189,25 @@ module Sidekiq
189
189
  end
190
190
 
191
191
  def atomic_push(conn, payloads)
192
- if payloads.first['at'.freeze]
193
- conn.zadd('schedule'.freeze, payloads.map do |hash|
194
- at = hash.delete('at'.freeze).to_s
192
+ if payloads.first['at']
193
+ conn.zadd('schedule', payloads.map do |hash|
194
+ at = hash.delete('at').to_s
195
195
  [at, Sidekiq.dump_json(hash)]
196
196
  end)
197
197
  else
198
- q = payloads.first['queue'.freeze]
198
+ q = payloads.first['queue']
199
199
  now = Time.now.to_f
200
200
  to_push = payloads.map do |entry|
201
- entry['enqueued_at'.freeze] = now
201
+ entry['enqueued_at'] = now
202
202
  Sidekiq.dump_json(entry)
203
203
  end
204
- conn.sadd('queues'.freeze, q)
204
+ conn.sadd('queues', q)
205
205
  conn.lpush("queue:#{q}", to_push)
206
206
  end
207
207
  end
208
208
 
209
209
  def process_single(worker_class, item)
210
- queue = item['queue'.freeze]
210
+ queue = item['queue']
211
211
 
212
212
  middleware.invoke(worker_class, item, queue, @redis_pool) do
213
213
  item
@@ -215,25 +215,25 @@ module Sidekiq
215
215
  end
216
216
 
217
217
  def normalize_item(item)
218
- raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class'.freeze) && item.has_key?('args'.freeze)
218
+ raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class') && item.has_key?('args')
219
219
  raise(ArgumentError, "Job args must be an Array") unless item['args'].is_a?(Array)
220
- raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'.freeze].is_a?(Class) || item['class'.freeze].is_a?(String)
221
- raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at'.freeze) && !item['at'].is_a?(Numeric)
220
+ raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'].is_a?(Class) || item['class'].is_a?(String)
221
+ raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at') && !item['at'].is_a?(Numeric)
222
222
  #raise(ArgumentError, "Arguments must be native JSON types, see https://github.com/mperham/sidekiq/wiki/Best-Practices") unless JSON.load(JSON.dump(item['args'])) == item['args']
223
223
 
224
- normalized_hash(item['class'.freeze])
224
+ normalized_hash(item['class'])
225
225
  .each{ |key, value| item[key] = value if item[key].nil? }
226
226
 
227
- item['class'.freeze] = item['class'.freeze].to_s
228
- item['queue'.freeze] = item['queue'.freeze].to_s
229
- item['jid'.freeze] ||= SecureRandom.hex(12)
230
- item['created_at'.freeze] ||= Time.now.to_f
227
+ item['class'] = item['class'].to_s
228
+ item['queue'] = item['queue'].to_s
229
+ item['jid'] ||= SecureRandom.hex(12)
230
+ item['created_at'] ||= Time.now.to_f
231
231
  item
232
232
  end
233
233
 
234
234
  def normalized_hash(item_class)
235
235
  if item_class.is_a?(Class)
236
- raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options'.freeze)
236
+ raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options')
237
237
  item_class.get_sidekiq_options
238
238
  else
239
239
  Sidekiq.default_worker_options
data/lib/sidekiq/delay.rb CHANGED
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  module Sidekiq
2
3
  module Extensions
3
4
 
@@ -7,11 +7,10 @@ module Sidekiq
7
7
  class Logger
8
8
  def call(ex, ctxHash)
9
9
  Sidekiq.logger.warn(Sidekiq.dump_json(ctxHash)) if !ctxHash.empty?
10
- Sidekiq.logger.warn "#{ex.class.name}: #{ex.message}"
11
- Sidekiq.logger.warn ex.backtrace.join("\n") unless ex.backtrace.nil?
10
+ Sidekiq.logger.warn("#{ex.class.name}: #{ex.message}")
11
+ Sidekiq.logger.warn(ex.backtrace.join("\n")) unless ex.backtrace.nil?
12
12
  end
13
13
 
14
- # Set up default handler which just logs the error
15
14
  Sidekiq.error_handlers << Sidekiq::ExceptionHandler::Logger.new
16
15
  end
17
16
 
@@ -26,6 +25,5 @@ module Sidekiq
26
25
  end
27
26
  end
28
27
  end
29
-
30
28
  end
31
29
  end
data/lib/sidekiq/fetch.rb CHANGED
@@ -13,7 +13,7 @@ module Sidekiq
13
13
  end
14
14
 
15
15
  def queue_name
16
- queue.sub(/.*queue:/, ''.freeze)
16
+ queue.sub(/.*queue:/, '')
17
17
  end
18
18
 
19
19
  def requeue
@@ -1,9 +1,10 @@
1
+ # frozen_string_literal: true
1
2
  module Sidekiq
2
3
  class JobLogger
3
4
 
4
5
  def call(item, queue)
5
6
  start = Time.now
6
- logger.info("start".freeze)
7
+ logger.info("start")
7
8
  yield
8
9
  logger.info("done: #{elapsed(start)} sec")
9
10
  rescue Exception
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  require 'sidekiq/scheduled'
2
3
  require 'sidekiq/api'
3
4
 
@@ -172,10 +173,18 @@ module Sidekiq
172
173
  def retries_exhausted(worker, msg, exception)
173
174
  logger.debug { "Retries exhausted for job" }
174
175
  begin
175
- block = worker && worker.sidekiq_retries_exhausted_block || Sidekiq.default_retries_exhausted
176
+ block = worker && worker.sidekiq_retries_exhausted_block
176
177
  block.call(msg, exception) if block
177
178
  rescue => e
178
- handle_exception(e, { context: "Error calling retries_exhausted for #{msg['class']}", job: msg })
179
+ handle_exception(e, { context: "Error calling retries_exhausted", job: msg })
180
+ end
181
+
182
+ Sidekiq.death_handlers.each do |handler|
183
+ begin
184
+ handler.call(msg, exception)
185
+ rescue => e
186
+ handle_exception(e, { context: "Error calling death handler", job: msg })
187
+ end
179
188
  end
180
189
 
181
190
  send_to_morgue(msg) unless msg['dead'] == false
@@ -184,7 +193,7 @@ module Sidekiq
184
193
  def send_to_morgue(msg)
185
194
  Sidekiq.logger.info { "Adding dead #{msg['class']} job #{msg['jid']}" }
186
195
  payload = Sidekiq.dump_json(msg)
187
- DeadSet.new.kill(payload)
196
+ DeadSet.new.kill(payload, notify_failure: false)
188
197
  end
189
198
 
190
199
  def retry_attempts_from(msg_retry, default)
@@ -196,7 +205,11 @@ module Sidekiq
196
205
  end
197
206
 
198
207
  def delay_for(worker, count, exception)
199
- worker && worker.sidekiq_retry_in_block && retry_in(worker, count, exception) || seconds_to_delay(count)
208
+ if worker && worker.sidekiq_retry_in_block
209
+ custom_retry_in = retry_in(worker, count, exception).to_i
210
+ return custom_retry_in if custom_retry_in > 0
211
+ end
212
+ seconds_to_delay(count)
200
213
  end
201
214
 
202
215
  # delayed_job uses the same basic formula
@@ -206,7 +219,7 @@ module Sidekiq
206
219
 
207
220
  def retry_in(worker, count, exception)
208
221
  begin
209
- worker.sidekiq_retry_in_block.call(count, exception).to_i
222
+ worker.sidekiq_retry_in_block.call(count, exception)
210
223
  rescue Exception => e
211
224
  handle_exception(e, { context: "Failure scheduling retry using the defined `sidekiq_retry_in` in #{worker.class.name}, falling back to default" })
212
225
  nil
@@ -1,4 +1,3 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  require 'sidekiq/manager'
4
3
  require 'sidekiq/fetch'
@@ -14,6 +13,8 @@ module Sidekiq
14
13
 
15
14
  attr_accessor :manager, :poller, :fetcher
16
15
 
16
+ STATS_TTL = 5*365*24*60*60
17
+
17
18
  def initialize(options)
18
19
  @manager = Sidekiq::Manager.new(options)
19
20
  @poller = Sidekiq::Scheduled::Poller.new
@@ -73,19 +74,24 @@ module Sidekiq
73
74
  key = identity
74
75
  fails = procd = 0
75
76
  begin
76
- Processor::FAILURE.update {|curr| fails = curr; 0 }
77
- Processor::PROCESSED.update {|curr| procd = curr; 0 }
77
+ fails = Processor::FAILURE.reset
78
+ procd = Processor::PROCESSED.reset
79
+ curstate = Processor::WORKER_STATE.dup
78
80
 
79
- workers_key = "#{key}:workers".freeze
80
- nowdate = Time.now.utc.strftime("%Y-%m-%d".freeze)
81
+ workers_key = "#{key}:workers"
82
+ nowdate = Time.now.utc.strftime("%Y-%m-%d")
81
83
  Sidekiq.redis do |conn|
82
84
  conn.multi do
83
- conn.incrby("stat:processed".freeze, procd)
85
+ conn.incrby("stat:processed", procd)
84
86
  conn.incrby("stat:processed:#{nowdate}", procd)
85
- conn.incrby("stat:failed".freeze, fails)
87
+ conn.expire("stat:processed:#{nowdate}", STATS_TTL)
88
+
89
+ conn.incrby("stat:failed", fails)
86
90
  conn.incrby("stat:failed:#{nowdate}", fails)
91
+ conn.expire("stat:failed:#{nowdate}", STATS_TTL)
92
+
87
93
  conn.del(workers_key)
88
- Processor::WORKER_STATE.each_pair do |tid, hash|
94
+ curstate.each_pair do |tid, hash|
89
95
  conn.hset(workers_key, tid, Sidekiq.dump_json(hash))
90
96
  end
91
97
  conn.expire(workers_key, 60)
@@ -97,7 +103,7 @@ module Sidekiq
97
103
  conn.multi do
98
104
  conn.sadd('processes', key)
99
105
  conn.exists(key)
100
- conn.hmset(key, 'info', to_json, 'busy', Processor::WORKER_STATE.size, 'beat', Time.now.to_f, 'quiet', @done)
106
+ conn.hmset(key, 'info', to_json, 'busy', curstate.size, 'beat', Time.now.to_f, 'quiet', @done)
101
107
  conn.expire(key, 60)
102
108
  conn.rpop("#{key}-signals")
103
109
  end
@@ -113,8 +119,8 @@ module Sidekiq
113
119
  # ignore all redis/network issues
114
120
  logger.error("heartbeat: #{e.message}")
115
121
  # don't lose the counts if there was a network issue
116
- Processor::PROCESSED.increment(procd)
117
- Processor::FAILURE.increment(fails)
122
+ Processor::PROCESSED.incr(procd)
123
+ Processor::FAILURE.incr(fails)
118
124
  end
119
125
  end
120
126
 
@@ -11,7 +11,7 @@ module Sidekiq
11
11
 
12
12
  # Provide a call() method that returns the formatted message.
13
13
  def call(severity, time, program_name, message)
14
- "#{time.utc.iso8601(3)} #{::Process.pid} TID-#{Thread.current.object_id.to_s(36)}#{context} #{severity}: #{message}\n"
14
+ "#{time.utc.iso8601(3)} #{::Process.pid} TID-#{Sidekiq::Logging.tid}#{context} #{severity}: #{message}\n"
15
15
  end
16
16
 
17
17
  def context
@@ -22,16 +22,20 @@ module Sidekiq
22
22
 
23
23
  class WithoutTimestamp < Pretty
24
24
  def call(severity, time, program_name, message)
25
- "#{::Process.pid} TID-#{Thread.current.object_id.to_s(36)}#{context} #{severity}: #{message}\n"
25
+ "#{::Process.pid} TID-#{Sidekiq::Logging.tid}#{context} #{severity}: #{message}\n"
26
26
  end
27
27
  end
28
28
 
29
+ def self.tid
30
+ Thread.current['sidekiq_tid'] ||= (Thread.current.object_id ^ ::Process.pid).to_s(36)
31
+ end
32
+
29
33
  def self.job_hash_context(job_hash)
30
34
  # If we're using a wrapper class, like ActiveJob, use the "wrapped"
31
35
  # attribute to expose the underlying thing.
32
- klass = job_hash['wrapped'.freeze] || job_hash["class".freeze]
33
- bid = job_hash['bid'.freeze]
34
- "#{klass} JID-#{job_hash['jid'.freeze]}#{" BID-#{bid}" if bid}"
36
+ klass = job_hash['wrapped'] || job_hash["class"]
37
+ bid = job_hash['bid']
38
+ "#{klass} JID-#{job_hash['jid']}#{" BID-#{bid}" if bid}"
35
39
  end
36
40
 
37
41
  def self.with_job_hash_context(job_hash, &block)
@@ -1,4 +1,3 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  require 'sidekiq/util'
4
3
  require 'sidekiq/processor'
@@ -54,7 +53,7 @@ module Sidekiq
54
53
 
55
54
  logger.info { "Terminating quiet workers" }
56
55
  @workers.each { |x| x.terminate }
57
- fire_event(:quiet, true)
56
+ fire_event(:quiet, reverse: true)
58
57
  end
59
58
 
60
59
  # hack for quicker development / testing environment #2774
@@ -62,7 +61,7 @@ module Sidekiq
62
61
 
63
62
  def stop(deadline)
64
63
  quiet
65
- fire_event(:shutdown, true)
64
+ fire_event(:shutdown, reverse: true)
66
65
 
67
66
  # some of the shutdown events can be async,
68
67
  # we don't have any way to know when they're done but
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  module Sidekiq
2
3
  module Middleware
3
4
  module Server
@@ -6,7 +7,7 @@ module Sidekiq
6
7
  def initialize
7
8
  # With Rails 5+ we must use the Reloader **always**.
8
9
  # The reloader handles code loading and db connection management.
9
- if ::Rails::VERSION::MAJOR >= 5
10
+ if defined?(::Rails) && ::Rails::VERSION::MAJOR >= 5
10
11
  raise ArgumentError, "Rails 5 no longer needs or uses the ActiveRecord middleware."
11
12
  end
12
13
  end
@@ -4,8 +4,6 @@ require 'sidekiq/fetch'
4
4
  require 'sidekiq/job_logger'
5
5
  require 'sidekiq/job_retry'
6
6
  require 'thread'
7
- require 'concurrent/map'
8
- require 'concurrent/atomic/atomic_fixnum'
9
7
 
10
8
  module Sidekiq
11
9
  ##
@@ -111,9 +109,7 @@ module Sidekiq
111
109
  if !@down
112
110
  @down = Time.now
113
111
  logger.error("Error fetching job: #{ex}")
114
- ex.backtrace.each do |bt|
115
- logger.error(bt)
116
- end
112
+ handle_exception(ex)
117
113
  end
118
114
  sleep(1)
119
115
  nil
@@ -134,9 +130,9 @@ module Sidekiq
134
130
  # the Reloader. It handles code loading, db connection management, etc.
135
131
  # Effectively this block denotes a "unit of work" to Rails.
136
132
  @reloader.call do
137
- klass = constantize(job_hash['class'.freeze])
133
+ klass = constantize(job_hash['class'])
138
134
  worker = klass.new
139
- worker.jid = job_hash['jid'.freeze]
135
+ worker.jid = job_hash['jid']
140
136
  @retrier.local(worker, pristine, queue) do
141
137
  yield worker
142
138
  end
@@ -159,7 +155,8 @@ module Sidekiq
159
155
  job_hash = Sidekiq.load_json(jobstr)
160
156
  rescue => ex
161
157
  handle_exception(ex, { :context => "Invalid JSON for job", :jobstr => jobstr })
162
- DeadSet.new.kill(jobstr)
158
+ # we can't notify because the job isn't a valid hash payload.
159
+ DeadSet.new.kill(jobstr, notify_failure: false)
163
160
  ack = true
164
161
  raise
165
162
  end
@@ -167,7 +164,7 @@ module Sidekiq
167
164
  ack = true
168
165
  dispatch(job_hash, queue) do |worker|
169
166
  Sidekiq.server_middleware.invoke(worker, job_hash, queue) do
170
- execute_job(worker, cloned(job_hash['args'.freeze]))
167
+ execute_job(worker, cloned(job_hash['args']))
171
168
  end
172
169
  end
173
170
  rescue Sidekiq::Shutdown
@@ -188,26 +185,42 @@ module Sidekiq
188
185
  worker.perform(*cloned_args)
189
186
  end
190
187
 
191
- def thread_identity
192
- @str ||= Thread.current.object_id.to_s(36)
188
+ # Ruby doesn't provide atomic counters out of the box so we'll
189
+ # implement something simple ourselves.
190
+ # https://bugs.ruby-lang.org/issues/14706
191
+ class Counter
192
+ def initialize
193
+ @value = 0
194
+ @lock = Mutex.new
195
+ end
196
+
197
+ def incr(amount=1)
198
+ @lock.synchronize { @value = @value + amount }
199
+ end
200
+
201
+ def reset
202
+ @lock.synchronize { val = @value; @value = 0; val }
203
+ end
193
204
  end
194
205
 
195
- WORKER_STATE = Concurrent::Map.new
196
- PROCESSED = Concurrent::AtomicFixnum.new
197
- FAILURE = Concurrent::AtomicFixnum.new
206
+ PROCESSED = Counter.new
207
+ FAILURE = Counter.new
208
+ # This is mutable global state but because each thread is storing
209
+ # its own unique key/value, there's no thread-safety issue AFAIK.
210
+ WORKER_STATE = {}
198
211
 
199
212
  def stats(job_hash, queue)
200
- tid = thread_identity
213
+ tid = Sidekiq::Logging.tid
201
214
  WORKER_STATE[tid] = {:queue => queue, :payload => job_hash, :run_at => Time.now.to_i }
202
215
 
203
216
  begin
204
217
  yield
205
218
  rescue Exception
206
- FAILURE.increment
219
+ FAILURE.incr
207
220
  raise
208
221
  ensure
209
222
  WORKER_STATE.delete(tid)
210
- PROCESSED.increment
223
+ PROCESSED.incr
211
224
  end
212
225
  end
213
226
 
@@ -223,7 +236,9 @@ module Sidekiq
223
236
  names.shift if names.empty? || names.first.empty?
224
237
 
225
238
  names.inject(Object) do |constant, name|
226
- constant.const_defined?(name) ? constant.const_get(name) : constant.const_missing(name)
239
+ # the false flag limits search for name to under the constant namespace
240
+ # which mimics Rails' behaviour
241
+ constant.const_defined?(name, false) ? constant.const_get(name, false) : constant.const_missing(name)
227
242
  end
228
243
  end
229
244