sidekiq 5.2.5 → 6.0.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (79) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +82 -0
  3. data/.gitignore +0 -2
  4. data/.standard.yml +20 -0
  5. data/6.0-Upgrade.md +72 -0
  6. data/COMM-LICENSE +11 -9
  7. data/Changes.md +130 -0
  8. data/Ent-2.0-Upgrade.md +37 -0
  9. data/Ent-Changes.md +32 -1
  10. data/Gemfile +12 -17
  11. data/Gemfile.lock +196 -0
  12. data/Pro-5.0-Upgrade.md +25 -0
  13. data/Pro-Changes.md +26 -2
  14. data/README.md +19 -31
  15. data/Rakefile +5 -4
  16. data/bin/sidekiqload +33 -25
  17. data/bin/sidekiqmon +8 -0
  18. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  19. data/lib/generators/sidekiq/worker_generator.rb +20 -12
  20. data/lib/sidekiq.rb +61 -42
  21. data/lib/sidekiq/api.rb +220 -192
  22. data/lib/sidekiq/cli.rb +111 -174
  23. data/lib/sidekiq/client.rb +51 -46
  24. data/lib/sidekiq/delay.rb +5 -6
  25. data/lib/sidekiq/exception_handler.rb +10 -12
  26. data/lib/sidekiq/extensions/action_mailer.rb +10 -20
  27. data/lib/sidekiq/extensions/active_record.rb +9 -7
  28. data/lib/sidekiq/extensions/class_methods.rb +9 -7
  29. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  30. data/lib/sidekiq/fetch.rb +11 -12
  31. data/lib/sidekiq/job_logger.rb +45 -7
  32. data/lib/sidekiq/job_retry.rb +71 -60
  33. data/lib/sidekiq/launcher.rb +57 -51
  34. data/lib/sidekiq/logger.rb +165 -0
  35. data/lib/sidekiq/manager.rb +7 -9
  36. data/lib/sidekiq/middleware/chain.rb +14 -4
  37. data/lib/sidekiq/middleware/i18n.rb +5 -7
  38. data/lib/sidekiq/monitor.rb +133 -0
  39. data/lib/sidekiq/paginator.rb +18 -14
  40. data/lib/sidekiq/processor.rb +83 -75
  41. data/lib/sidekiq/rails.rb +23 -29
  42. data/lib/sidekiq/redis_connection.rb +31 -37
  43. data/lib/sidekiq/scheduled.rb +28 -29
  44. data/lib/sidekiq/testing.rb +34 -23
  45. data/lib/sidekiq/testing/inline.rb +2 -1
  46. data/lib/sidekiq/util.rb +17 -16
  47. data/lib/sidekiq/version.rb +2 -1
  48. data/lib/sidekiq/web.rb +41 -49
  49. data/lib/sidekiq/web/action.rb +14 -10
  50. data/lib/sidekiq/web/application.rb +64 -66
  51. data/lib/sidekiq/web/helpers.rb +89 -71
  52. data/lib/sidekiq/web/router.rb +17 -14
  53. data/lib/sidekiq/worker.rb +129 -97
  54. data/sidekiq.gemspec +16 -16
  55. data/web/assets/javascripts/dashboard.js +4 -23
  56. data/web/assets/stylesheets/application-dark.css +125 -0
  57. data/web/assets/stylesheets/application.css +9 -0
  58. data/web/assets/stylesheets/bootstrap.css +1 -1
  59. data/web/locales/de.yml +14 -2
  60. data/web/locales/ja.yml +2 -1
  61. data/web/views/_job_info.erb +2 -1
  62. data/web/views/busy.erb +4 -1
  63. data/web/views/dead.erb +2 -2
  64. data/web/views/layout.erb +1 -0
  65. data/web/views/morgue.erb +4 -1
  66. data/web/views/queue.erb +10 -1
  67. data/web/views/queues.erb +1 -1
  68. data/web/views/retries.erb +4 -1
  69. data/web/views/retry.erb +2 -2
  70. data/web/views/scheduled.erb +4 -1
  71. metadata +21 -32
  72. data/.travis.yml +0 -17
  73. data/Appraisals +0 -9
  74. data/bin/sidekiqctl +0 -237
  75. data/gemfiles/rails_4.gemfile +0 -31
  76. data/gemfiles/rails_5.gemfile +0 -31
  77. data/lib/sidekiq/core_ext.rb +0 -1
  78. data/lib/sidekiq/logging.rb +0 -122
  79. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'sidekiq/monitor'
4
+
5
+ section = "all"
6
+ section = ARGV[0] if ARGV.size == 1
7
+
8
+ Sidekiq::Monitor::Status.new.display(section)
@@ -1,6 +1,6 @@
1
1
  require 'test_helper'
2
2
  <% module_namespacing do -%>
3
- class <%= class_name %>WorkerTest < <% if defined? Minitest::Test %>Minitest::Test<% else %>MiniTest::Unit::TestCase<% end %>
3
+ class <%= class_name %>WorkerTest < Minitest::Test
4
4
  def test_example
5
5
  skip "add some examples to (or delete) #{__FILE__}"
6
6
  end
@@ -1,21 +1,23 @@
1
- require 'rails/generators/named_base'
1
+ require "rails/generators/named_base"
2
2
 
3
3
  module Sidekiq
4
4
  module Generators # :nodoc:
5
5
  class WorkerGenerator < ::Rails::Generators::NamedBase # :nodoc:
6
- desc 'This generator creates a Sidekiq Worker in app/workers and a corresponding test'
6
+ desc "This generator creates a Sidekiq Worker in app/workers and a corresponding test"
7
7
 
8
- check_class_collision suffix: 'Worker'
8
+ check_class_collision suffix: "Worker"
9
9
 
10
10
  def self.default_generator_root
11
11
  File.dirname(__FILE__)
12
12
  end
13
13
 
14
14
  def create_worker_file
15
- template 'worker.rb.erb', File.join('app/workers', class_path, "#{file_name}_worker.rb")
15
+ template "worker.rb.erb", File.join("app/workers", class_path, "#{file_name}_worker.rb")
16
16
  end
17
17
 
18
18
  def create_test_file
19
+ return unless test_framework
20
+
19
21
  if defined?(RSpec)
20
22
  create_worker_spec
21
23
  else
@@ -27,23 +29,29 @@ module Sidekiq
27
29
 
28
30
  def create_worker_spec
29
31
  template_file = File.join(
30
- 'spec/workers',
31
- class_path,
32
- "#{file_name}_worker_spec.rb"
32
+ "spec/workers",
33
+ class_path,
34
+ "#{file_name}_worker_spec.rb"
33
35
  )
34
- template 'worker_spec.rb.erb', template_file
36
+ template "worker_spec.rb.erb", template_file
35
37
  end
36
38
 
37
39
  def create_worker_test
38
40
  template_file = File.join(
39
- 'test/workers',
40
- class_path,
41
- "#{file_name}_worker_test.rb"
41
+ "test/workers",
42
+ class_path,
43
+ "#{file_name}_worker_test.rb"
42
44
  )
43
- template 'worker_test.rb.erb', template_file
45
+ template "worker_test.rb.erb", template_file
44
46
  end
45
47
 
48
+ def file_name
49
+ @_file_name ||= super.sub(/_?worker\z/i, "")
50
+ end
46
51
 
52
+ def test_framework
53
+ ::Rails.application.config.generators.options[:rails][:test_framework]
54
+ end
47
55
  end
48
56
  end
49
57
  end
@@ -1,27 +1,27 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'sidekiq/version'
4
- fail "Sidekiq #{Sidekiq::VERSION} does not support Ruby versions below 2.2.2." if RUBY_PLATFORM != 'java' && Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.2.2')
3
+ require "sidekiq/version"
4
+ fail "Sidekiq #{Sidekiq::VERSION} does not support Ruby versions below 2.5.0." if RUBY_PLATFORM != "java" && Gem::Version.new(RUBY_VERSION) < Gem::Version.new("2.5.0")
5
5
 
6
- require 'sidekiq/logging'
7
- require 'sidekiq/client'
8
- require 'sidekiq/worker'
9
- require 'sidekiq/redis_connection'
10
- require 'sidekiq/delay'
6
+ require "sidekiq/logger"
7
+ require "sidekiq/client"
8
+ require "sidekiq/worker"
9
+ require "sidekiq/redis_connection"
10
+ require "sidekiq/delay"
11
11
 
12
- require 'json'
12
+ require "json"
13
13
 
14
14
  module Sidekiq
15
- NAME = 'Sidekiq'
16
- LICENSE = 'See LICENSE and the LGPL-3.0 for licensing details.'
15
+ NAME = "Sidekiq"
16
+ LICENSE = "See LICENSE and the LGPL-3.0 for licensing details."
17
17
 
18
18
  DEFAULTS = {
19
19
  queues: [],
20
20
  labels: [],
21
21
  concurrency: 10,
22
- require: '.',
22
+ require: ".",
23
23
  environment: nil,
24
- timeout: 8,
24
+ timeout: 25,
25
25
  poll_interval_average: nil,
26
26
  average_scheduled_poll_interval: 5,
27
27
  error_handlers: [],
@@ -38,8 +38,8 @@ module Sidekiq
38
38
  }
39
39
 
40
40
  DEFAULT_WORKER_OPTIONS = {
41
- 'retry' => true,
42
- 'queue' => 'default'
41
+ "retry" => true,
42
+ "queue" => "default",
43
43
  }
44
44
 
45
45
  FAKE_INFO = {
@@ -47,7 +47,7 @@ module Sidekiq
47
47
  "uptime_in_days" => "9999",
48
48
  "connected_clients" => "9999",
49
49
  "used_memory_human" => "9P",
50
- "used_memory_peak_human" => "9P"
50
+ "used_memory_peak_human" => "9P",
51
51
  }
52
52
 
53
53
  def self.❨╯°□°❩╯︵┻━┻
@@ -96,9 +96,13 @@ module Sidekiq
96
96
  begin
97
97
  yield conn
98
98
  rescue Redis::CommandError => ex
99
- #2550 Failover can cause the server to become a replica, need
99
+ # 2550 Failover can cause the server to become a replica, need
100
100
  # to disconnect and reopen the socket to get back to the primary.
101
- (conn.disconnect!; retryable = false; retry) if retryable && ex.message =~ /READONLY/
101
+ if retryable && ex.message =~ /READONLY/
102
+ conn.disconnect!
103
+ retryable = false
104
+ retry
105
+ end
102
106
  raise
103
107
  end
104
108
  end
@@ -106,19 +110,17 @@ module Sidekiq
106
110
 
107
111
  def self.redis_info
108
112
  redis do |conn|
109
- begin
110
- # admin commands can't go through redis-namespace starting
111
- # in redis-namespace 2.0
112
- if conn.respond_to?(:namespace)
113
- conn.redis.info
114
- else
115
- conn.info
116
- end
117
- rescue Redis::CommandError => ex
118
- #2850 return fake version when INFO command has (probably) been renamed
119
- raise unless ex.message =~ /unknown command/
120
- FAKE_INFO
113
+ # admin commands can't go through redis-namespace starting
114
+ # in redis-namespace 2.0
115
+ if conn.respond_to?(:namespace)
116
+ conn.redis.info
117
+ else
118
+ conn.info
121
119
  end
120
+ rescue Redis::CommandError => ex
121
+ # 2850 return fake version when INFO command has (probably) been renamed
122
+ raise unless /unknown command/.match?(ex.message)
123
+ FAKE_INFO
122
124
  end
123
125
  end
124
126
 
@@ -152,18 +154,13 @@ module Sidekiq
152
154
 
153
155
  def self.default_worker_options=(hash)
154
156
  # stringify
155
- @default_worker_options = default_worker_options.merge(Hash[hash.map{|k, v| [k.to_s, v]}])
157
+ @default_worker_options = default_worker_options.merge(Hash[hash.map { |k, v| [k.to_s, v] }])
156
158
  end
159
+
157
160
  def self.default_worker_options
158
161
  defined?(@default_worker_options) ? @default_worker_options : DEFAULT_WORKER_OPTIONS
159
162
  end
160
163
 
161
- def self.default_retries_exhausted=(prok)
162
- logger.info { "default_retries_exhausted is deprecated, please use `config.death_handlers << -> {|job, ex| }`" }
163
- return nil unless prok
164
- death_handlers << prok
165
- end
166
-
167
164
  ##
168
165
  # Death handlers are called when all retries for a job have been exhausted and
169
166
  # the job dies. It's the notification to your application
@@ -180,15 +177,37 @@ module Sidekiq
180
177
  def self.load_json(string)
181
178
  JSON.parse(string)
182
179
  end
180
+
183
181
  def self.dump_json(object)
184
182
  JSON.generate(object)
185
183
  end
186
184
 
185
+ def self.log_formatter
186
+ @log_formatter ||= if ENV["DYNO"]
187
+ Sidekiq::Logger::Formatters::WithoutTimestamp.new
188
+ else
189
+ Sidekiq::Logger::Formatters::Pretty.new
190
+ end
191
+ end
192
+
193
+ def self.log_formatter=(log_formatter)
194
+ @log_formatter = log_formatter
195
+ logger.formatter = log_formatter
196
+ end
197
+
187
198
  def self.logger
188
- Sidekiq::Logging.logger
199
+ @logger ||= Sidekiq::Logger.new(STDOUT, level: Logger::INFO)
189
200
  end
190
- def self.logger=(log)
191
- Sidekiq::Logging.logger = log
201
+
202
+ def self.logger=(logger)
203
+ if logger.nil?
204
+ self.logger.level = Logger::FATAL
205
+ return self.logger
206
+ end
207
+
208
+ logger.extend(Sidekiq::LoggingUtils)
209
+
210
+ @logger = logger
192
211
  end
193
212
 
194
213
  # How frequently Redis should be checked by a random Sidekiq process for
@@ -197,7 +216,7 @@ module Sidekiq
197
216
  #
198
217
  # See sidekiq/scheduled.rb for an in-depth explanation of this value
199
218
  def self.average_scheduled_poll_interval=(interval)
200
- self.options[:average_scheduled_poll_interval] = interval
219
+ options[:average_scheduled_poll_interval] = interval
201
220
  end
202
221
 
203
222
  # Register a proc to handle any error which occurs within the Sidekiq process.
@@ -208,7 +227,7 @@ module Sidekiq
208
227
  #
209
228
  # The default error handler logs errors to Sidekiq.logger.
210
229
  def self.error_handlers
211
- self.options[:error_handlers]
230
+ options[:error_handlers]
212
231
  end
213
232
 
214
233
  # Register a block to run at a point in the Sidekiq lifecycle.
@@ -234,4 +253,4 @@ module Sidekiq
234
253
  class Shutdown < Interrupt; end
235
254
  end
236
255
 
237
- require 'sidekiq/rails' if defined?(::Rails::Engine)
256
+ require "sidekiq/rails" if defined?(::Rails::Engine)
@@ -1,24 +1,12 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "base64"
18
7
 
8
+ module Sidekiq
19
9
  class Stats
20
- include RedisScanner
21
-
22
10
  def initialize
23
11
  fetch_stats!
24
12
  end
@@ -64,61 +52,65 @@ module Sidekiq
64
52
  end
65
53
 
66
54
  def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
55
+ pipe1_res = Sidekiq.redis { |conn|
68
56
  conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
57
+ conn.get("stat:processed")
58
+ conn.get("stat:failed")
59
+ conn.zcard("schedule")
60
+ conn.zcard("retry")
61
+ conn.zcard("dead")
62
+ conn.scard("processes")
63
+ conn.lrange("queue:default", -1, -1)
76
64
  end
77
- end
65
+ }
78
66
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
81
- end
67
+ processes = Sidekiq.redis { |conn|
68
+ conn.sscan_each("processes").to_a
69
+ }
82
70
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
71
+ queues = Sidekiq.redis { |conn|
72
+ conn.sscan_each("queues").to_a
73
+ }
86
74
 
87
- pipe2_res = Sidekiq.redis do |conn|
75
+ pipe2_res = Sidekiq.redis { |conn|
88
76
  conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
77
+ processes.each { |key| conn.hget(key, "busy") }
78
+ queues.each { |queue| conn.llen("queue:#{queue}") }
91
79
  end
92
- end
80
+ }
93
81
 
94
82
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
83
+ workers_size = pipe2_res[0...s].sum(&:to_i)
84
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
85
 
98
86
  default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
87
+ job = begin
88
+ Sidekiq.load_json(entry)
89
+ rescue
90
+ {}
91
+ end
92
+ now = Time.now.to_f
93
+ thence = job["enqueued_at"] || now
94
+ now - thence
95
+ else
96
+ 0
97
+ end
106
98
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
99
+ processed: pipe1_res[0].to_i,
100
+ failed: pipe1_res[1].to_i,
101
+ scheduled_size: pipe1_res[2],
102
+ retry_size: pipe1_res[3],
103
+ dead_size: pipe1_res[4],
104
+ processes_size: pipe1_res[5],
113
105
 
114
106
  default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
107
+ workers_size: workers_size,
108
+ enqueued: enqueued,
117
109
  }
118
110
  end
119
111
 
120
112
  def reset(*stats)
121
- all = %w(failed processed)
113
+ all = %w[failed processed]
122
114
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
115
 
124
116
  mset_args = []
@@ -138,24 +130,21 @@ module Sidekiq
138
130
  end
139
131
 
140
132
  class Queues
141
- include RedisScanner
142
-
143
133
  def lengths
144
134
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
135
+ queues = conn.sscan_each("queues").to_a
146
136
 
147
- lengths = conn.pipelined do
137
+ lengths = conn.pipelined {
148
138
  queues.each do |queue|
149
139
  conn.llen("queue:#{queue}")
150
140
  end
151
- end
141
+ }
152
142
 
153
143
  i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
144
+ array_of_arrays = queues.each_with_object({}) { |queue, memo|
155
145
  memo[queue] = lengths[i]
156
146
  i += 1
157
- memo
158
- end.sort_by { |_, size| size }
147
+ }.sort_by { |_, size| size }
159
148
 
160
149
  Hash[array_of_arrays.reverse]
161
150
  end
@@ -222,18 +211,17 @@ module Sidekiq
222
211
  #
223
212
  class Queue
224
213
  include Enumerable
225
- extend RedisScanner
226
214
 
227
215
  ##
228
216
  # Return all known queues within Redis.
229
217
  #
230
218
  def self.all
231
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
219
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
232
220
  end
233
221
 
234
222
  attr_reader :name
235
223
 
236
- def initialize(name="default")
224
+ def initialize(name = "default")
237
225
  @name = name.to_s
238
226
  @rname = "queue:#{name}"
239
227
  end
@@ -253,13 +241,13 @@ module Sidekiq
253
241
  #
254
242
  # @return Float
255
243
  def latency
256
- entry = Sidekiq.redis do |conn|
244
+ entry = Sidekiq.redis { |conn|
257
245
  conn.lrange(@rname, -1, -1)
258
- end.first
246
+ }.first
259
247
  return 0 unless entry
260
248
  job = Sidekiq.load_json(entry)
261
249
  now = Time.now.to_f
262
- thence = job['enqueued_at'] || now
250
+ thence = job["enqueued_at"] || now
263
251
  now - thence
264
252
  end
265
253
 
@@ -269,12 +257,12 @@ module Sidekiq
269
257
  page = 0
270
258
  page_size = 50
271
259
 
272
- while true do
260
+ loop do
273
261
  range_start = page * page_size - deleted_size
274
- range_end = range_start + page_size - 1
275
- entries = Sidekiq.redis do |conn|
262
+ range_end = range_start + page_size - 1
263
+ entries = Sidekiq.redis { |conn|
276
264
  conn.lrange @rname, range_start, range_end
277
- end
265
+ }
278
266
  break if entries.empty?
279
267
  page += 1
280
268
  entries.each do |entry|
@@ -315,11 +303,11 @@ module Sidekiq
315
303
  attr_reader :item
316
304
  attr_reader :value
317
305
 
318
- def initialize(item, queue_name=nil)
306
+ def initialize(item, queue_name = nil)
319
307
  @args = nil
320
308
  @value = item
321
309
  @item = item.is_a?(Hash) ? item : parse(item)
322
- @queue = queue_name || @item['queue']
310
+ @queue = queue_name || @item["queue"]
323
311
  end
324
312
 
325
313
  def parse(item)
@@ -334,7 +322,7 @@ module Sidekiq
334
322
  end
335
323
 
336
324
  def klass
337
- self['class']
325
+ self["class"]
338
326
  end
339
327
 
340
328
  def display_class
@@ -345,16 +333,16 @@ module Sidekiq
345
333
  "#{target}.#{method}"
346
334
  end
347
335
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
348
- job_class = @item['wrapped'] || args[0]
349
- if 'ActionMailer::DeliveryJob' == job_class
336
+ job_class = @item["wrapped"] || args[0]
337
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
350
338
  # MailerClass#mailer_method
351
- args[0]['arguments'][0..1].join('#')
339
+ args[0]["arguments"][0..1].join("#")
352
340
  else
353
- job_class
341
+ job_class
354
342
  end
355
343
  else
356
344
  klass
357
- end
345
+ end
358
346
  end
359
347
 
360
348
  def display_args
@@ -365,53 +353,68 @@ module Sidekiq
365
353
  arg
366
354
  end
367
355
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
368
- job_args = self['wrapped'] ? args[0]["arguments"] : []
369
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
356
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
357
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
370
358
  # remove MailerClass, mailer_method and 'deliver_now'
371
359
  job_args.drop(3)
360
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
361
+ # remove MailerClass, mailer_method and 'deliver_now'
362
+ job_args.drop(3).first["args"]
372
363
  else
373
364
  job_args
374
365
  end
375
366
  else
376
- if self['encrypt']
367
+ if self["encrypt"]
377
368
  # no point in showing 150+ bytes of random garbage
378
- args[-1] = '[encrypted data]'
369
+ args[-1] = "[encrypted data]"
379
370
  end
380
371
  args
381
- end
372
+ end
382
373
  end
383
374
 
384
375
  def args
385
- @args || @item['args']
376
+ @args || @item["args"]
386
377
  end
387
378
 
388
379
  def jid
389
- self['jid']
380
+ self["jid"]
390
381
  end
391
382
 
392
383
  def enqueued_at
393
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
384
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
394
385
  end
395
386
 
396
387
  def created_at
397
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
388
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
398
389
  end
399
390
 
400
- def queue
401
- @queue
391
+ def tags
392
+ self["tags"] || []
393
+ end
394
+
395
+ def error_backtrace
396
+ # Cache nil values
397
+ if defined?(@error_backtrace)
398
+ @error_backtrace
399
+ else
400
+ value = self["error_backtrace"]
401
+ @error_backtrace = value && uncompress_backtrace(value)
402
+ end
402
403
  end
403
404
 
405
+ attr_reader :queue
406
+
404
407
  def latency
405
408
  now = Time.now.to_f
406
- now - (@item['enqueued_at'] || @item['created_at'] || now)
409
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
407
410
  end
408
411
 
409
412
  ##
410
413
  # Remove this job from the queue.
411
414
  def delete
412
- count = Sidekiq.redis do |conn|
415
+ count = Sidekiq.redis { |conn|
413
416
  conn.lrem("queue:#{@queue}", 1, @value)
414
- end
417
+ }
415
418
  count != 0
416
419
  end
417
420
 
@@ -425,13 +428,28 @@ module Sidekiq
425
428
  private
426
429
 
427
430
  def safe_load(content, default)
428
- begin
429
- yield(*YAML.load(content))
430
- rescue => ex
431
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
- # memory yet so the YAML can't be loaded.
433
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
434
- default
431
+ yield(*YAML.load(content))
432
+ rescue => ex
433
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
434
+ # memory yet so the YAML can't be loaded.
435
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
436
+ default
437
+ end
438
+
439
+ def uncompress_backtrace(backtrace)
440
+ if backtrace.is_a?(Array)
441
+ # Handle old jobs with raw Array backtrace format
442
+ backtrace
443
+ else
444
+ decoded = Base64.decode64(backtrace)
445
+ uncompressed = Zlib::Inflate.inflate(decoded)
446
+ begin
447
+ Sidekiq.load_json(uncompressed)
448
+ rescue
449
+ # Handle old jobs with marshalled backtrace format
450
+ # TODO Remove in 7.x
451
+ Marshal.load(uncompressed)
452
+ end
435
453
  end
436
454
  end
437
455
  end
@@ -459,8 +477,9 @@ module Sidekiq
459
477
  end
460
478
 
461
479
  def reschedule(at)
462
- delete
463
- @parent.schedule(at, item)
480
+ Sidekiq.redis do |conn|
481
+ conn.zincrby(@parent.name, at - @score, Sidekiq.dump_json(@item))
482
+ end
464
483
  end
465
484
 
466
485
  def add_to_queue
@@ -473,7 +492,7 @@ module Sidekiq
473
492
  def retry
474
493
  remove_job do |message|
475
494
  msg = Sidekiq.load_json(message)
476
- msg['retry_count'] -= 1 if msg['retry_count']
495
+ msg["retry_count"] -= 1 if msg["retry_count"]
477
496
  Sidekiq::Client.push(msg)
478
497
  end
479
498
  end
@@ -487,31 +506,31 @@ module Sidekiq
487
506
  end
488
507
 
489
508
  def error?
490
- !!item['error_class']
509
+ !!item["error_class"]
491
510
  end
492
511
 
493
512
  private
494
513
 
495
514
  def remove_job
496
515
  Sidekiq.redis do |conn|
497
- results = conn.multi do
516
+ results = conn.multi {
498
517
  conn.zrangebyscore(parent.name, score, score)
499
518
  conn.zremrangebyscore(parent.name, score, score)
500
- end.first
519
+ }.first
501
520
 
502
521
  if results.size == 1
503
522
  yield results.first
504
523
  else
505
524
  # multiple jobs with the same score
506
525
  # find the one with the right JID and push it
507
- hash = results.group_by do |message|
526
+ hash = results.group_by { |message|
508
527
  if message.index(jid)
509
528
  msg = Sidekiq.load_json(message)
510
- msg['jid'] == jid
529
+ msg["jid"] == jid
511
530
  else
512
531
  false
513
532
  end
514
- end
533
+ }
515
534
 
516
535
  msg = hash.fetch(true, []).first
517
536
  yield msg if msg
@@ -525,7 +544,6 @@ module Sidekiq
525
544
  end
526
545
  end
527
546
  end
528
-
529
547
  end
530
548
 
531
549
  class SortedSet
@@ -542,6 +560,17 @@ module Sidekiq
542
560
  Sidekiq.redis { |c| c.zcard(name) }
543
561
  end
544
562
 
563
+ def scan(match, count = 100)
564
+ return to_enum(:scan, match, count) unless block_given?
565
+
566
+ match = "*#{match}*" unless match.include?("*")
567
+ Sidekiq.redis do |conn|
568
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
569
+ yield SortedEntry.new(self, score, entry)
570
+ end
571
+ end
572
+ end
573
+
545
574
  def clear
546
575
  Sidekiq.redis do |conn|
547
576
  conn.del(name)
@@ -551,7 +580,6 @@ module Sidekiq
551
580
  end
552
581
 
553
582
  class JobSet < SortedSet
554
-
555
583
  def schedule(timestamp, message)
556
584
  Sidekiq.redis do |conn|
557
585
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -564,44 +592,55 @@ module Sidekiq
564
592
  page = -1
565
593
  page_size = 50
566
594
 
567
- while true do
595
+ loop do
568
596
  range_start = page * page_size + offset_size
569
- range_end = range_start + page_size - 1
570
- elements = Sidekiq.redis do |conn|
597
+ range_end = range_start + page_size - 1
598
+ elements = Sidekiq.redis { |conn|
571
599
  conn.zrange name, range_start, range_end, with_scores: true
572
- end
600
+ }
573
601
  break if elements.empty?
574
602
  page -= 1
575
- elements.reverse.each do |element, score|
603
+ elements.reverse_each do |element, score|
576
604
  yield SortedEntry.new(self, score, element)
577
605
  end
578
606
  offset_size = initial_size - @_size
579
607
  end
580
608
  end
581
609
 
610
+ ##
611
+ # Fetch jobs that match a given time or Range. Job ID is an
612
+ # optional second argument.
582
613
  def fetch(score, jid = nil)
583
- elements = Sidekiq.redis do |conn|
584
- conn.zrangebyscore(name, score, score)
585
- end
586
-
587
- elements.inject([]) do |result, element|
588
- entry = SortedEntry.new(self, score, element)
589
- if jid
590
- result << entry if entry.jid == jid
614
+ begin_score, end_score =
615
+ if score.is_a?(Range)
616
+ [score.first, score.last]
591
617
  else
592
- result << entry
618
+ [score, score]
593
619
  end
594
- result
620
+
621
+ elements = Sidekiq.redis { |conn|
622
+ conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
623
+ }
624
+
625
+ elements.each_with_object([]) do |element, result|
626
+ data, job_score = element
627
+ entry = SortedEntry.new(self, job_score, data)
628
+ result << entry if jid.nil? || entry.jid == jid
595
629
  end
596
630
  end
597
631
 
598
632
  ##
599
633
  # Find the job with the given JID within this sorted set.
600
- #
601
- # This is a slow, inefficient operation. Do not use under
602
- # normal conditions. Sidekiq Pro contains a faster version.
634
+ # This is a slower O(n) operation. Do not use for app logic.
603
635
  def find_job(jid)
604
- self.detect { |j| j.jid == jid }
636
+ Sidekiq.redis do |conn|
637
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
638
+ job = JSON.parse(entry)
639
+ matched = job["jid"] == jid
640
+ return SortedEntry.new(self, score, entry) if matched
641
+ end
642
+ end
643
+ nil
605
644
  end
606
645
 
607
646
  def delete_by_value(name, value)
@@ -616,13 +655,14 @@ module Sidekiq
616
655
  Sidekiq.redis do |conn|
617
656
  elements = conn.zrangebyscore(name, score, score)
618
657
  elements.each do |element|
619
- message = Sidekiq.load_json(element)
620
- if message["jid"] == jid
621
- ret = conn.zrem(name, element)
622
- @_size -= 1 if ret
623
- break ret
658
+ if element.index(jid)
659
+ message = Sidekiq.load_json(element)
660
+ if message["jid"] == jid
661
+ ret = conn.zrem(name, element)
662
+ @_size -= 1 if ret
663
+ break ret
664
+ end
624
665
  end
625
- false
626
666
  end
627
667
  end
628
668
  end
@@ -644,7 +684,7 @@ module Sidekiq
644
684
  # end.map(&:delete)
645
685
  class ScheduledSet < JobSet
646
686
  def initialize
647
- super 'schedule'
687
+ super "schedule"
648
688
  end
649
689
  end
650
690
 
@@ -662,19 +702,15 @@ module Sidekiq
662
702
  # end.map(&:delete)
663
703
  class RetrySet < JobSet
664
704
  def initialize
665
- super 'retry'
705
+ super "retry"
666
706
  end
667
707
 
668
708
  def retry_all
669
- while size > 0
670
- each(&:retry)
671
- end
709
+ each(&:retry) while size > 0
672
710
  end
673
711
 
674
712
  def kill_all
675
- while size > 0
676
- each(&:kill)
677
- end
713
+ each(&:kill) while size > 0
678
714
  end
679
715
  end
680
716
 
@@ -683,15 +719,15 @@ module Sidekiq
683
719
  #
684
720
  class DeadSet < JobSet
685
721
  def initialize
686
- super 'dead'
722
+ super "dead"
687
723
  end
688
724
 
689
- def kill(message, opts={})
725
+ def kill(message, opts = {})
690
726
  now = Time.now.to_f
691
727
  Sidekiq.redis do |conn|
692
728
  conn.multi do
693
729
  conn.zadd(name, now.to_s, message)
694
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
730
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
695
731
  conn.zremrangebyrank(name, 0, - self.class.max_jobs)
696
732
  end
697
733
  end
@@ -708,9 +744,7 @@ module Sidekiq
708
744
  end
709
745
 
710
746
  def retry_all
711
- while size > 0
712
- each(&:retry)
713
- end
747
+ each(&:retry) while size > 0
714
748
  end
715
749
 
716
750
  def self.max_jobs
@@ -724,16 +758,15 @@ module Sidekiq
724
758
 
725
759
  ##
726
760
  # Enumerates the set of Sidekiq processes which are actively working
727
- # right now. Each process send a heartbeat to Redis every 5 seconds
761
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
728
762
  # so this set should be relatively accurate, barring network partitions.
729
763
  #
730
764
  # Yields a Sidekiq::Process.
731
765
  #
732
766
  class ProcessSet
733
767
  include Enumerable
734
- include RedisScanner
735
768
 
736
- def initialize(clean_plz=true)
769
+ def initialize(clean_plz = true)
737
770
  cleanup if clean_plz
738
771
  end
739
772
 
@@ -742,12 +775,12 @@ module Sidekiq
742
775
  def cleanup
743
776
  count = 0
744
777
  Sidekiq.redis do |conn|
745
- procs = sscan(conn, 'processes').sort
746
- heartbeats = conn.pipelined do
778
+ procs = conn.sscan_each("processes").to_a.sort
779
+ heartbeats = conn.pipelined {
747
780
  procs.each do |key|
748
- conn.hget(key, 'info')
781
+ conn.hget(key, "info")
749
782
  end
750
- end
783
+ }
751
784
 
752
785
  # the hash named key has an expiry of 60 seconds.
753
786
  # if it's not found, that means the process has not reported
@@ -756,36 +789,34 @@ module Sidekiq
756
789
  heartbeats.each_with_index do |beat, i|
757
790
  to_prune << procs[i] if beat.nil?
758
791
  end
759
- count = conn.srem('processes', to_prune) unless to_prune.empty?
792
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
760
793
  end
761
794
  count
762
795
  end
763
796
 
764
797
  def each
765
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
798
+ result = Sidekiq.redis { |conn|
799
+ procs = conn.sscan_each("processes").to_a.sort
766
800
 
767
- Sidekiq.redis do |conn|
768
801
  # We're making a tradeoff here between consuming more memory instead of
769
802
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
770
803
  # you'll be happier this way
771
- result = conn.pipelined do
804
+ conn.pipelined do
772
805
  procs.each do |key|
773
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
806
+ conn.hmget(key, "info", "busy", "beat", "quiet")
774
807
  end
775
808
  end
809
+ }
776
810
 
777
- result.each do |info, busy, at_s, quiet|
778
- # If a process is stopped between when we query Redis for `procs` and
779
- # when we query for `result`, we will have an item in `result` that is
780
- # composed of `nil` values.
781
- next if info.nil?
811
+ result.each do |info, busy, at_s, quiet|
812
+ # If a process is stopped between when we query Redis for `procs` and
813
+ # when we query for `result`, we will have an item in `result` that is
814
+ # composed of `nil` values.
815
+ next if info.nil?
782
816
 
783
- hash = Sidekiq.load_json(info)
784
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
785
- end
817
+ hash = Sidekiq.load_json(info)
818
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
786
819
  end
787
-
788
- nil
789
820
  end
790
821
 
791
822
  # This method is not guaranteed accurate since it does not prune the set
@@ -793,7 +824,7 @@ module Sidekiq
793
824
  # contains Sidekiq processes which have sent a heartbeat within the last
794
825
  # 60 seconds.
795
826
  def size
796
- Sidekiq.redis { |conn| conn.scard('processes') }
827
+ Sidekiq.redis { |conn| conn.scard("processes") }
797
828
  end
798
829
 
799
830
  # Returns the identity of the current cluster leader or "" if no leader.
@@ -801,9 +832,9 @@ module Sidekiq
801
832
  # or Sidekiq Pro.
802
833
  def leader
803
834
  @leader ||= begin
804
- x = Sidekiq.redis {|c| c.get("dear-leader") }
835
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
805
836
  # need a non-falsy value so we can memoize
806
- x = "" unless x
837
+ x ||= ""
807
838
  x
808
839
  end
809
840
  end
@@ -830,11 +861,11 @@ module Sidekiq
830
861
  end
831
862
 
832
863
  def tag
833
- self['tag']
864
+ self["tag"]
834
865
  end
835
866
 
836
867
  def labels
837
- Array(self['labels'])
868
+ Array(self["labels"])
838
869
  end
839
870
 
840
871
  def [](key)
@@ -842,23 +873,23 @@ module Sidekiq
842
873
  end
843
874
 
844
875
  def identity
845
- self['identity']
876
+ self["identity"]
846
877
  end
847
878
 
848
879
  def quiet!
849
- signal('TSTP')
880
+ signal("TSTP")
850
881
  end
851
882
 
852
883
  def stop!
853
- signal('TERM')
884
+ signal("TERM")
854
885
  end
855
886
 
856
887
  def dump_threads
857
- signal('TTIN')
888
+ signal("TTIN")
858
889
  end
859
890
 
860
891
  def stopping?
861
- self['quiet'] == 'true'
892
+ self["quiet"] == "true"
862
893
  end
863
894
 
864
895
  private
@@ -872,7 +903,6 @@ module Sidekiq
872
903
  end
873
904
  end
874
905
  end
875
-
876
906
  end
877
907
 
878
908
  ##
@@ -897,16 +927,15 @@ module Sidekiq
897
927
  #
898
928
  class Workers
899
929
  include Enumerable
900
- include RedisScanner
901
930
 
902
931
  def each
903
932
  Sidekiq.redis do |conn|
904
- procs = sscan(conn, 'processes')
933
+ procs = conn.sscan_each("processes").to_a
905
934
  procs.sort.each do |key|
906
- valid, workers = conn.pipelined do
935
+ valid, workers = conn.pipelined {
907
936
  conn.exists(key)
908
937
  conn.hgetall("#{key}:workers")
909
- end
938
+ }
910
939
  next unless valid
911
940
  workers.each_pair do |tid, json|
912
941
  yield key, tid, Sidekiq.load_json(json)
@@ -923,18 +952,17 @@ module Sidekiq
923
952
  # which can easily get out of sync with crashy processes.
924
953
  def size
925
954
  Sidekiq.redis do |conn|
926
- procs = sscan(conn, 'processes')
955
+ procs = conn.sscan_each("processes").to_a
927
956
  if procs.empty?
928
957
  0
929
958
  else
930
- conn.pipelined do
959
+ conn.pipelined {
931
960
  procs.each do |key|
932
- conn.hget(key, 'busy')
961
+ conn.hget(key, "busy")
933
962
  end
934
- end.map(&:to_i).inject(:+)
963
+ }.sum(&:to_i)
935
964
  end
936
965
  end
937
966
  end
938
967
  end
939
-
940
968
  end