sidekiq 6.3.1 → 7.0.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (110) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +140 -4
  3. data/LICENSE.txt +9 -0
  4. data/README.md +19 -13
  5. data/bin/sidekiq +4 -9
  6. data/bin/sidekiqload +71 -76
  7. data/bin/sidekiqmon +1 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +267 -186
  13. data/lib/sidekiq/capsule.rb +110 -0
  14. data/lib/sidekiq/cli.rb +82 -78
  15. data/lib/sidekiq/client.rb +73 -80
  16. data/lib/sidekiq/{util.rb → component.rb} +13 -42
  17. data/lib/sidekiq/config.rb +271 -0
  18. data/lib/sidekiq/deploy.rb +62 -0
  19. data/lib/sidekiq/embedded.rb +61 -0
  20. data/lib/sidekiq/fetch.rb +22 -21
  21. data/lib/sidekiq/job.rb +375 -10
  22. data/lib/sidekiq/job_logger.rb +16 -28
  23. data/lib/sidekiq/job_retry.rb +79 -56
  24. data/lib/sidekiq/job_util.rb +71 -0
  25. data/lib/sidekiq/launcher.rb +76 -82
  26. data/lib/sidekiq/logger.rb +9 -44
  27. data/lib/sidekiq/manager.rb +40 -41
  28. data/lib/sidekiq/metrics/query.rb +153 -0
  29. data/lib/sidekiq/metrics/shared.rb +95 -0
  30. data/lib/sidekiq/metrics/tracking.rb +134 -0
  31. data/lib/sidekiq/middleware/chain.rb +84 -42
  32. data/lib/sidekiq/middleware/current_attributes.rb +19 -13
  33. data/lib/sidekiq/middleware/i18n.rb +6 -4
  34. data/lib/sidekiq/middleware/modules.rb +21 -0
  35. data/lib/sidekiq/monitor.rb +1 -1
  36. data/lib/sidekiq/paginator.rb +16 -8
  37. data/lib/sidekiq/processor.rb +56 -59
  38. data/lib/sidekiq/rails.rb +10 -9
  39. data/lib/sidekiq/redis_client_adapter.rb +118 -0
  40. data/lib/sidekiq/redis_connection.rb +13 -82
  41. data/lib/sidekiq/ring_buffer.rb +29 -0
  42. data/lib/sidekiq/scheduled.rb +75 -37
  43. data/lib/sidekiq/testing/inline.rb +4 -4
  44. data/lib/sidekiq/testing.rb +41 -68
  45. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  46. data/lib/sidekiq/version.rb +2 -1
  47. data/lib/sidekiq/web/action.rb +3 -3
  48. data/lib/sidekiq/web/application.rb +27 -8
  49. data/lib/sidekiq/web/csrf_protection.rb +3 -3
  50. data/lib/sidekiq/web/helpers.rb +22 -20
  51. data/lib/sidekiq/web.rb +6 -17
  52. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  53. data/lib/sidekiq.rb +85 -202
  54. data/sidekiq.gemspec +29 -5
  55. data/web/assets/javascripts/application.js +58 -26
  56. data/web/assets/javascripts/base-charts.js +106 -0
  57. data/web/assets/javascripts/chart.min.js +13 -0
  58. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  59. data/web/assets/javascripts/dashboard-charts.js +166 -0
  60. data/web/assets/javascripts/dashboard.js +3 -240
  61. data/web/assets/javascripts/metrics.js +236 -0
  62. data/web/assets/stylesheets/application-dark.css +13 -17
  63. data/web/assets/stylesheets/application-rtl.css +2 -91
  64. data/web/assets/stylesheets/application.css +67 -300
  65. data/web/locales/ar.yml +70 -70
  66. data/web/locales/cs.yml +62 -62
  67. data/web/locales/da.yml +52 -52
  68. data/web/locales/de.yml +65 -65
  69. data/web/locales/el.yml +43 -24
  70. data/web/locales/en.yml +82 -69
  71. data/web/locales/es.yml +68 -68
  72. data/web/locales/fa.yml +65 -65
  73. data/web/locales/fr.yml +67 -67
  74. data/web/locales/he.yml +65 -64
  75. data/web/locales/hi.yml +59 -59
  76. data/web/locales/it.yml +53 -53
  77. data/web/locales/ja.yml +71 -68
  78. data/web/locales/ko.yml +52 -52
  79. data/web/locales/lt.yml +66 -66
  80. data/web/locales/nb.yml +61 -61
  81. data/web/locales/nl.yml +52 -52
  82. data/web/locales/pl.yml +45 -45
  83. data/web/locales/pt-br.yml +63 -55
  84. data/web/locales/pt.yml +51 -51
  85. data/web/locales/ru.yml +67 -66
  86. data/web/locales/sv.yml +53 -53
  87. data/web/locales/ta.yml +60 -60
  88. data/web/locales/uk.yml +62 -61
  89. data/web/locales/ur.yml +64 -64
  90. data/web/locales/vi.yml +67 -67
  91. data/web/locales/zh-cn.yml +37 -11
  92. data/web/locales/zh-tw.yml +42 -8
  93. data/web/views/_footer.erb +5 -2
  94. data/web/views/_nav.erb +1 -1
  95. data/web/views/_summary.erb +1 -1
  96. data/web/views/busy.erb +9 -4
  97. data/web/views/dashboard.erb +36 -4
  98. data/web/views/metrics.erb +80 -0
  99. data/web/views/metrics_for_job.erb +69 -0
  100. data/web/views/queue.erb +5 -1
  101. metadata +75 -27
  102. data/LICENSE +0 -9
  103. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  104. data/lib/sidekiq/delay.rb +0 -41
  105. data/lib/sidekiq/exception_handler.rb +0 -27
  106. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  107. data/lib/sidekiq/extensions/active_record.rb +0 -43
  108. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  109. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  110. data/lib/sidekiq/worker.rb +0 -311
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,28 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ require "sidekiq/metrics/query"
10
+
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
8
22
  module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
9
28
  class Stats
10
29
  def initialize
11
30
  fetch_stats_fast!
@@ -48,20 +67,32 @@ module Sidekiq
48
67
  end
49
68
 
50
69
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
72
+
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
+
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
+ end
52
82
  end
53
83
 
54
84
  # O(1) redis calls
85
+ # @api private
55
86
  def fetch_stats_fast!
56
87
  pipe1_res = Sidekiq.redis { |conn|
57
- conn.pipelined do
58
- conn.get("stat:processed")
59
- conn.get("stat:failed")
60
- conn.zcard("schedule")
61
- conn.zcard("retry")
62
- conn.zcard("dead")
63
- conn.scard("processes")
64
- conn.lrange("queue:default", -1, -1)
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lrange("queue:default", -1, -1)
65
96
  end
66
97
  }
67
98
 
@@ -91,36 +122,39 @@ module Sidekiq
91
122
  end
92
123
 
93
124
  # O(number of processes + number of queues) redis calls
125
+ # @api private
94
126
  def fetch_stats_slow!
95
127
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
128
+ conn.sscan("processes").to_a
97
129
  }
98
130
 
99
131
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
132
+ conn.sscan("queues").to_a
101
133
  }
102
134
 
103
135
  pipe2_res = Sidekiq.redis { |conn|
104
- conn.pipelined do
105
- processes.each { |key| conn.hget(key, "busy") }
106
- queues.each { |queue| conn.llen("queue:#{queue}") }
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
139
  end
108
140
  }
109
141
 
110
142
  s = processes.size
111
143
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
145
 
114
146
  @stats[:workers_size] = workers_size
115
147
  @stats[:enqueued] = enqueued
116
148
  @stats
117
149
  end
118
150
 
151
+ # @api private
119
152
  def fetch_stats!
120
153
  fetch_stats_fast!
121
154
  fetch_stats_slow!
122
155
  end
123
156
 
157
+ # @api private
124
158
  def reset(*stats)
125
159
  all = %w[failed processed]
126
160
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +176,10 @@ module Sidekiq
142
176
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
177
  end
144
178
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined {
151
- queues.each do |queue|
152
- conn.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
179
  class History
163
- def initialize(days_previous, start_date = nil)
180
+ def initialize(days_previous, start_date = nil, pool: nil)
181
+ # we only store five years of data in Redis
182
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
164
183
  @days_previous = days_previous
165
184
  @start_date = start_date || Time.now.utc.to_date
166
185
  end
@@ -183,15 +202,10 @@ module Sidekiq
183
202
 
184
203
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
185
204
 
186
- begin
187
- Sidekiq.redis do |conn|
188
- conn.mget(keys).each_with_index do |value, idx|
189
- stat_hash[dates[idx]] = value ? value.to_i : 0
190
- end
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
191
208
  end
192
- rescue Redis::CommandError
193
- # mget will trigger a CROSSSLOT error when run against a Cluster
194
- # TODO Someone want to add Cluster support?
195
209
  end
196
210
 
197
211
  stat_hash
@@ -200,9 +214,10 @@ module Sidekiq
200
214
  end
201
215
 
202
216
  ##
203
- # Encapsulates a queue within Sidekiq.
217
+ # Represents a queue within Sidekiq.
204
218
  # Allows enumeration of all jobs within the queue
205
- # and deletion of jobs.
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
206
221
  #
207
222
  # queue = Sidekiq::Queue.new("mailer")
208
223
  # queue.each do |job|
@@ -210,29 +225,34 @@ module Sidekiq
210
225
  # job.args # => [1, 2, 3]
211
226
  # job.delete if job.jid == 'abcdef1234567890'
212
227
  # end
213
- #
214
228
  class Queue
215
229
  include Enumerable
216
230
 
217
231
  ##
218
- # Return all known queues within Redis.
232
+ # Fetch all known queues within Redis.
219
233
  #
234
+ # @return [Array<Sidekiq::Queue>]
220
235
  def self.all
221
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
222
237
  end
223
238
 
224
239
  attr_reader :name
225
240
 
241
+ # @param name [String] the name of the queue
226
242
  def initialize(name = "default")
227
243
  @name = name.to_s
228
244
  @rname = "queue:#{name}"
229
245
  end
230
246
 
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
231
251
  def size
232
252
  Sidekiq.redis { |con| con.llen(@rname) }
233
253
  end
234
254
 
235
- # Sidekiq Pro overrides this
255
+ # @return [Boolean] if the queue is currently paused
236
256
  def paused?
237
257
  false
238
258
  end
@@ -241,7 +261,7 @@ module Sidekiq
241
261
  # Calculates this queue's latency, the difference in seconds since the oldest
242
262
  # job in the queue was enqueued.
243
263
  #
244
- # @return Float
264
+ # @return [Float] in seconds
245
265
  def latency
246
266
  entry = Sidekiq.redis { |conn|
247
267
  conn.lrange(@rname, -1, -1)
@@ -277,34 +297,54 @@ module Sidekiq
277
297
  ##
278
298
  # Find the job with the given JID within this queue.
279
299
  #
280
- # This is a slow, inefficient operation. Do not use under
300
+ # This is a *slow, inefficient* operation. Do not use under
281
301
  # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
282
306
  def find_job(jid)
283
307
  detect { |j| j.jid == jid }
284
308
  end
285
309
 
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
286
312
  def clear
287
313
  Sidekiq.redis do |conn|
288
- conn.multi do
289
- conn.unlink(@rname)
290
- conn.srem("queues", name)
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
291
317
  end
292
318
  end
319
+ true
293
320
  end
294
321
  alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
295
328
  end
296
329
 
297
330
  ##
298
- # Encapsulates a pending job within a Sidekiq queue or
299
- # sorted set.
331
+ # Represents a pending job within a Sidekiq queue.
300
332
  #
301
333
  # The job should be considered immutable but may be
302
334
  # removed from the queue via JobRecord#delete.
303
- #
304
335
  class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
305
338
  attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
306
341
  attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
307
345
 
346
+ # :nodoc:
347
+ # @api private
308
348
  def initialize(item, queue_name = nil)
309
349
  @args = nil
310
350
  @value = item
@@ -312,6 +352,8 @@ module Sidekiq
312
352
  @queue = queue_name || @item["queue"]
313
353
  end
314
354
 
355
+ # :nodoc:
356
+ # @api private
315
357
  def parse(item)
316
358
  Sidekiq.load_json(item)
317
359
  rescue JSON::ParserError
@@ -323,6 +365,8 @@ module Sidekiq
323
365
  {}
324
366
  end
325
367
 
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
326
370
  def klass
327
371
  self["class"]
328
372
  end
@@ -330,12 +374,7 @@ module Sidekiq
330
374
  def display_class
331
375
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
332
376
  @klass ||= self["display_class"] || begin
333
- case klass
334
- when /\ASidekiq::Extensions::Delayed/
335
- safe_load(args[0], klass) do |target, method, _|
336
- "#{target}.#{method}"
337
- end
338
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
339
378
  job_class = @item["wrapped"] || args[0]
340
379
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
341
380
  # MailerClass#mailer_method
@@ -351,28 +390,23 @@ module Sidekiq
351
390
 
352
391
  def display_args
353
392
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
354
- @display_args ||= case klass
355
- when /\ASidekiq::Extensions::Delayed/
356
- safe_load(args[0], args) do |_, _, arg|
357
- arg
358
- end
359
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
360
- job_args = self["wrapped"] ? args[0]["arguments"] : []
361
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
362
- # remove MailerClass, mailer_method and 'deliver_now'
363
- job_args.drop(3)
364
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
365
- # remove MailerClass, mailer_method and 'deliver_now'
366
- job_args.drop(3).first["args"]
367
- else
368
- job_args
369
- end
370
- else
371
- if self["encrypt"]
372
- # no point in showing 150+ bytes of random garbage
373
- args[-1] = "[encrypted data]"
374
- end
375
- args
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first["args"]
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
376
410
  end
377
411
  end
378
412
 
@@ -406,15 +440,12 @@ module Sidekiq
406
440
  end
407
441
  end
408
442
 
409
- attr_reader :queue
410
-
411
443
  def latency
412
444
  now = Time.now.to_f
413
445
  now - (@item["enqueued_at"] || @item["created_at"] || now)
414
446
  end
415
447
 
416
- ##
417
- # Remove this job from the queue.
448
+ # Remove this job from the queue
418
449
  def delete
419
450
  count = Sidekiq.redis { |conn|
420
451
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -422,6 +453,7 @@ module Sidekiq
422
453
  count != 0
423
454
  end
424
455
 
456
+ # Access arbitrary attributes within the job hash
425
457
  def [](name)
426
458
  # nil will happen if the JSON fails to parse.
427
459
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -431,47 +463,35 @@ module Sidekiq
431
463
 
432
464
  private
433
465
 
434
- def safe_load(content, default)
435
- yield(*YAML.load(content))
436
- rescue => ex
437
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
438
- # memory yet so the YAML can't be loaded.
439
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
440
- default
441
- end
442
-
443
466
  def uncompress_backtrace(backtrace)
444
- if backtrace.is_a?(Array)
445
- # Handle old jobs with raw Array backtrace format
446
- backtrace
447
- else
448
- decoded = Base64.decode64(backtrace)
449
- uncompressed = Zlib::Inflate.inflate(decoded)
450
- begin
451
- Sidekiq.load_json(uncompressed)
452
- rescue
453
- # Handle old jobs with marshalled backtrace format
454
- # TODO Remove in 7.x
455
- Marshal.load(uncompressed)
456
- end
457
- end
467
+ decoded = Base64.decode64(backtrace)
468
+ uncompressed = Zlib::Inflate.inflate(decoded)
469
+ Sidekiq.load_json(uncompressed)
458
470
  end
459
471
  end
460
472
 
473
+ # Represents a job within a Redis sorted set where the score
474
+ # represents a timestamp associated with the job. This timestamp
475
+ # could be the scheduled time for it to run (e.g. scheduled set),
476
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
461
477
  class SortedEntry < JobRecord
462
478
  attr_reader :score
463
479
  attr_reader :parent
464
480
 
481
+ # :nodoc:
482
+ # @api private
465
483
  def initialize(parent, score, item)
466
484
  super(item)
467
- @score = score
485
+ @score = Float(score)
468
486
  @parent = parent
469
487
  end
470
488
 
489
+ # The timestamp associated with this entry
471
490
  def at
472
491
  Time.at(score).utc
473
492
  end
474
493
 
494
+ # remove this entry from the sorted set
475
495
  def delete
476
496
  if @value
477
497
  @parent.delete_by_value(@parent.name, @value)
@@ -480,12 +500,17 @@ module Sidekiq
480
500
  end
481
501
  end
482
502
 
503
+ # Change the scheduled time for this job.
504
+ #
505
+ # @param at [Time] the new timestamp for this job
483
506
  def reschedule(at)
484
507
  Sidekiq.redis do |conn|
485
508
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
486
509
  end
487
510
  end
488
511
 
512
+ # Enqueue this job from the scheduled or dead set so it will
513
+ # be executed at some point in the near future.
489
514
  def add_to_queue
490
515
  remove_job do |message|
491
516
  msg = Sidekiq.load_json(message)
@@ -493,6 +518,8 @@ module Sidekiq
493
518
  end
494
519
  end
495
520
 
521
+ # enqueue this job from the retry set so it will be executed
522
+ # at some point in the near future.
496
523
  def retry
497
524
  remove_job do |message|
498
525
  msg = Sidekiq.load_json(message)
@@ -501,8 +528,7 @@ module Sidekiq
501
528
  end
502
529
  end
503
530
 
504
- ##
505
- # Place job in the dead set
531
+ # Move this job from its current set into the Dead set.
506
532
  def kill
507
533
  remove_job do |message|
508
534
  DeadSet.new.kill(message)
@@ -517,9 +543,9 @@ module Sidekiq
517
543
 
518
544
  def remove_job
519
545
  Sidekiq.redis do |conn|
520
- results = conn.multi {
521
- conn.zrangebyscore(parent.name, score, score)
522
- conn.zremrangebyscore(parent.name, score, score)
546
+ results = conn.multi { |transaction|
547
+ transaction.zrangebyscore(parent.name, score, score)
548
+ transaction.zremrangebyscore(parent.name, score, score)
523
549
  }.first
524
550
 
525
551
  if results.size == 1
@@ -540,9 +566,9 @@ module Sidekiq
540
566
  yield msg if msg
541
567
 
542
568
  # push the rest back onto the sorted set
543
- conn.multi do
569
+ conn.multi do |transaction|
544
570
  nonmatched.each do |message|
545
- conn.zadd(parent.name, score.to_f.to_s, message)
571
+ transaction.zadd(parent.name, score.to_f.to_s, message)
546
572
  end
547
573
  end
548
574
  end
@@ -550,43 +576,69 @@ module Sidekiq
550
576
  end
551
577
  end
552
578
 
579
+ # Base class for all sorted sets within Sidekiq.
553
580
  class SortedSet
554
581
  include Enumerable
555
582
 
583
+ # Redis key of the set
584
+ # @!attribute [r] Name
556
585
  attr_reader :name
557
586
 
587
+ # :nodoc:
588
+ # @api private
558
589
  def initialize(name)
559
590
  @name = name
560
591
  @_size = size
561
592
  end
562
593
 
594
+ # real-time size of the set, will change
563
595
  def size
564
596
  Sidekiq.redis { |c| c.zcard(name) }
565
597
  end
566
598
 
599
+ # Scan through each element of the sorted set, yielding each to the supplied block.
600
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
601
+ #
602
+ # @param match [String] a snippet or regexp to filter matches.
603
+ # @param count [Integer] number of elements to retrieve at a time, default 100
604
+ # @yieldparam [Sidekiq::SortedEntry] each entry
567
605
  def scan(match, count = 100)
568
606
  return to_enum(:scan, match, count) unless block_given?
569
607
 
570
608
  match = "*#{match}*" unless match.include?("*")
571
609
  Sidekiq.redis do |conn|
572
- conn.zscan_each(name, match: match, count: count) do |entry, score|
610
+ conn.zscan(name, match: match, count: count) do |entry, score|
573
611
  yield SortedEntry.new(self, score, entry)
574
612
  end
575
613
  end
576
614
  end
577
615
 
616
+ # @return [Boolean] always true
578
617
  def clear
579
618
  Sidekiq.redis do |conn|
580
619
  conn.unlink(name)
581
620
  end
621
+ true
582
622
  end
583
623
  alias_method :💣, :clear
624
+
625
+ # :nodoc:
626
+ # @api private
627
+ def as_json(options = nil)
628
+ {name: name} # 5336
629
+ end
584
630
  end
585
631
 
632
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
633
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
634
+ # e.g. Batches.
586
635
  class JobSet < SortedSet
587
- def schedule(timestamp, message)
636
+ # Add a job with the associated timestamp to this set.
637
+ # @param timestamp [Time] the score for the job
638
+ # @param job [Hash] the job data
639
+ def schedule(timestamp, job)
588
640
  Sidekiq.redis do |conn|
589
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
641
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
590
642
  end
591
643
  end
592
644
 
@@ -600,7 +652,7 @@ module Sidekiq
600
652
  range_start = page * page_size + offset_size
601
653
  range_end = range_start + page_size - 1
602
654
  elements = Sidekiq.redis { |conn|
603
- conn.zrange name, range_start, range_end, with_scores: true
655
+ conn.zrange name, range_start, range_end, withscores: true
604
656
  }
605
657
  break if elements.empty?
606
658
  page -= 1
@@ -614,6 +666,10 @@ module Sidekiq
614
666
  ##
615
667
  # Fetch jobs that match a given time or Range. Job ID is an
616
668
  # optional second argument.
669
+ #
670
+ # @param score [Time,Range] a specific timestamp or range
671
+ # @param jid [String, optional] find a specific JID within the score
672
+ # @return [Array<SortedEntry>] any results found, can be empty
617
673
  def fetch(score, jid = nil)
618
674
  begin_score, end_score =
619
675
  if score.is_a?(Range)
@@ -623,7 +679,7 @@ module Sidekiq
623
679
  end
624
680
 
625
681
  elements = Sidekiq.redis { |conn|
626
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
682
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
627
683
  }
628
684
 
629
685
  elements.each_with_object([]) do |element, result|
@@ -635,10 +691,13 @@ module Sidekiq
635
691
 
636
692
  ##
637
693
  # Find the job with the given JID within this sorted set.
638
- # This is a slower O(n) operation. Do not use for app logic.
694
+ # *This is a slow O(n) operation*. Do not use for app logic.
695
+ #
696
+ # @param jid [String] the job identifier
697
+ # @return [SortedEntry] the record or nil
639
698
  def find_job(jid)
640
699
  Sidekiq.redis do |conn|
641
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
700
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
642
701
  job = JSON.parse(entry)
643
702
  matched = job["jid"] == jid
644
703
  return SortedEntry.new(self, score, entry) if matched
@@ -647,6 +706,8 @@ module Sidekiq
647
706
  nil
648
707
  end
649
708
 
709
+ # :nodoc:
710
+ # @api private
650
711
  def delete_by_value(name, value)
651
712
  Sidekiq.redis do |conn|
652
713
  ret = conn.zrem(name, value)
@@ -655,6 +716,8 @@ module Sidekiq
655
716
  end
656
717
  end
657
718
 
719
+ # :nodoc:
720
+ # @api private
658
721
  def delete_by_jid(score, jid)
659
722
  Sidekiq.redis do |conn|
660
723
  elements = conn.zrangebyscore(name, score, score)
@@ -675,17 +738,13 @@ module Sidekiq
675
738
  end
676
739
 
677
740
  ##
678
- # Allows enumeration of scheduled jobs within Sidekiq.
741
+ # The set of scheduled jobs within Sidekiq.
679
742
  # Based on this, you can search/filter for jobs. Here's an
680
- # example where I'm selecting all jobs of a certain type
681
- # and deleting them from the schedule queue.
743
+ # example where I'm selecting jobs based on some complex logic
744
+ # and deleting them from the scheduled set.
745
+ #
746
+ # See the API wiki page for usage notes and examples.
682
747
  #
683
- # r = Sidekiq::ScheduledSet.new
684
- # r.select do |scheduled|
685
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
686
- # scheduled.args[0] == 'User' &&
687
- # scheduled.args[1] == 'setup_new_subscriber'
688
- # end.map(&:delete)
689
748
  class ScheduledSet < JobSet
690
749
  def initialize
691
750
  super "schedule"
@@ -693,46 +752,48 @@ module Sidekiq
693
752
  end
694
753
 
695
754
  ##
696
- # Allows enumeration of retries within Sidekiq.
755
+ # The set of retries within Sidekiq.
697
756
  # Based on this, you can search/filter for jobs. Here's an
698
757
  # example where I'm selecting all jobs of a certain type
699
758
  # and deleting them from the retry queue.
700
759
  #
701
- # r = Sidekiq::RetrySet.new
702
- # r.select do |retri|
703
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
704
- # retri.args[0] == 'User' &&
705
- # retri.args[1] == 'setup_new_subscriber'
706
- # end.map(&:delete)
760
+ # See the API wiki page for usage notes and examples.
761
+ #
707
762
  class RetrySet < JobSet
708
763
  def initialize
709
764
  super "retry"
710
765
  end
711
766
 
767
+ # Enqueues all jobs pending within the retry set.
712
768
  def retry_all
713
769
  each(&:retry) while size > 0
714
770
  end
715
771
 
772
+ # Kills all jobs pending within the retry set.
716
773
  def kill_all
717
774
  each(&:kill) while size > 0
718
775
  end
719
776
  end
720
777
 
721
778
  ##
722
- # Allows enumeration of dead jobs within Sidekiq.
779
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
780
+ # their retries and are helding in this set pending some sort of manual
781
+ # fix. They will be removed after 6 months (dead_timeout) if not.
723
782
  #
724
783
  class DeadSet < JobSet
725
784
  def initialize
726
785
  super "dead"
727
786
  end
728
787
 
788
+ # Add the given job to the Dead set.
789
+ # @param message [String] the job data as JSON
729
790
  def kill(message, opts = {})
730
791
  now = Time.now.to_f
731
792
  Sidekiq.redis do |conn|
732
- conn.multi do
733
- conn.zadd(name, now.to_s, message)
734
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
735
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
793
+ conn.multi do |transaction|
794
+ transaction.zadd(name, now.to_s, message)
795
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
796
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
736
797
  end
737
798
  end
738
799
 
@@ -740,24 +801,17 @@ module Sidekiq
740
801
  job = Sidekiq.load_json(message)
741
802
  r = RuntimeError.new("Job killed by API")
742
803
  r.set_backtrace(caller)
743
- Sidekiq.death_handlers.each do |handle|
804
+ Sidekiq.default_configuration.death_handlers.each do |handle|
744
805
  handle.call(job, r)
745
806
  end
746
807
  end
747
808
  true
748
809
  end
749
810
 
811
+ # Enqueue all dead jobs
750
812
  def retry_all
751
813
  each(&:retry) while size > 0
752
814
  end
753
-
754
- def self.max_jobs
755
- Sidekiq.options[:dead_max_jobs]
756
- end
757
-
758
- def self.timeout
759
- Sidekiq.options[:dead_timeout_in_seconds]
760
- end
761
815
  end
762
816
 
763
817
  ##
@@ -765,24 +819,31 @@ module Sidekiq
765
819
  # right now. Each process sends a heartbeat to Redis every 5 seconds
766
820
  # so this set should be relatively accurate, barring network partitions.
767
821
  #
768
- # Yields a Sidekiq::Process.
822
+ # @yieldparam [Sidekiq::Process]
769
823
  #
770
824
  class ProcessSet
771
825
  include Enumerable
772
826
 
827
+ # :nodoc:
828
+ # @api private
773
829
  def initialize(clean_plz = true)
774
830
  cleanup if clean_plz
775
831
  end
776
832
 
777
833
  # Cleans up dead processes recorded in Redis.
778
834
  # Returns the number of processes cleaned.
835
+ # :nodoc:
836
+ # @api private
779
837
  def cleanup
838
+ # dont run cleanup more than once per minute
839
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
840
+
780
841
  count = 0
781
842
  Sidekiq.redis do |conn|
782
- procs = conn.sscan_each("processes").to_a.sort
783
- heartbeats = conn.pipelined {
843
+ procs = conn.sscan("processes").to_a
844
+ heartbeats = conn.pipelined { |pipeline|
784
845
  procs.each do |key|
785
- conn.hget(key, "info")
846
+ pipeline.hget(key, "info")
786
847
  end
787
848
  }
788
849
 
@@ -799,14 +860,14 @@ module Sidekiq
799
860
 
800
861
  def each
801
862
  result = Sidekiq.redis { |conn|
802
- procs = conn.sscan_each("processes").to_a.sort
863
+ procs = conn.sscan("processes").to_a.sort
803
864
 
804
865
  # We're making a tradeoff here between consuming more memory instead of
805
866
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
806
867
  # you'll be happier this way
807
- conn.pipelined do
868
+ conn.pipelined do |pipeline|
808
869
  procs.each do |key|
809
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
870
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
810
871
  end
811
872
  end
812
873
  }
@@ -819,10 +880,10 @@ module Sidekiq
819
880
 
820
881
  hash = Sidekiq.load_json(info)
821
882
  yield Process.new(hash.merge("busy" => busy.to_i,
822
- "beat" => at_s.to_f,
823
- "quiet" => quiet,
824
- "rss" => rss.to_i,
825
- "rtt_us" => rtt.to_i))
883
+ "beat" => at_s.to_f,
884
+ "quiet" => quiet,
885
+ "rss" => rss.to_i,
886
+ "rtt_us" => rtt.to_i))
826
887
  end
827
888
  end
828
889
 
@@ -830,6 +891,7 @@ module Sidekiq
830
891
  # based on current heartbeat. #each does that and ensures the set only
831
892
  # contains Sidekiq processes which have sent a heartbeat within the last
832
893
  # 60 seconds.
894
+ # @return [Integer] current number of registered Sidekiq processes
833
895
  def size
834
896
  Sidekiq.redis { |conn| conn.scard("processes") }
835
897
  end
@@ -837,10 +899,12 @@ module Sidekiq
837
899
  # Total number of threads available to execute jobs.
838
900
  # For Sidekiq Enterprise customers this number (in production) must be
839
901
  # less than or equal to your licensed concurrency.
902
+ # @return [Integer] the sum of process concurrency
840
903
  def total_concurrency
841
904
  sum { |x| x["concurrency"].to_i }
842
905
  end
843
906
 
907
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
844
908
  def total_rss_in_kb
845
909
  sum { |x| x["rss"].to_i }
846
910
  end
@@ -849,6 +913,8 @@ module Sidekiq
849
913
  # Returns the identity of the current cluster leader or "" if no leader.
850
914
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
851
915
  # or Sidekiq Pro.
916
+ # @return [String] Identity of cluster leader
917
+ # @return [String] empty string if no leader
852
918
  def leader
853
919
  @leader ||= begin
854
920
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -875,6 +941,8 @@ module Sidekiq
875
941
  # 'identity' => <unique string identifying the process>,
876
942
  # }
877
943
  class Process
944
+ # :nodoc:
945
+ # @api private
878
946
  def initialize(hash)
879
947
  @attribs = hash
880
948
  end
@@ -884,7 +952,7 @@ module Sidekiq
884
952
  end
885
953
 
886
954
  def labels
887
- Array(self["labels"])
955
+ self["labels"].to_a
888
956
  end
889
957
 
890
958
  def [](key)
@@ -899,18 +967,31 @@ module Sidekiq
899
967
  self["queues"]
900
968
  end
901
969
 
970
+ # Signal this process to stop processing new jobs.
971
+ # It will continue to execute jobs it has already fetched.
972
+ # This method is *asynchronous* and it can take 5-10
973
+ # seconds for the process to quiet.
902
974
  def quiet!
903
975
  signal("TSTP")
904
976
  end
905
977
 
978
+ # Signal this process to shutdown.
979
+ # It will shutdown within its configured :timeout value, default 25 seconds.
980
+ # This method is *asynchronous* and it can take 5-10
981
+ # seconds for the process to start shutting down.
906
982
  def stop!
907
983
  signal("TERM")
908
984
  end
909
985
 
986
+ # Signal this process to log backtraces for all threads.
987
+ # Useful if you have a frozen or deadlocked process which is
988
+ # still sending a heartbeat.
989
+ # This method is *asynchronous* and it can take 5-10 seconds.
910
990
  def dump_threads
911
991
  signal("TTIN")
912
992
  end
913
993
 
994
+ # @return [Boolean] true if this process is quiet or shutting down
914
995
  def stopping?
915
996
  self["quiet"] == "true"
916
997
  end
@@ -920,9 +1001,9 @@ module Sidekiq
920
1001
  def signal(sig)
921
1002
  key = "#{identity}-signals"
922
1003
  Sidekiq.redis do |c|
923
- c.multi do
924
- c.lpush(key, sig)
925
- c.expire(key, 60)
1004
+ c.multi do |transaction|
1005
+ transaction.lpush(key, sig)
1006
+ transaction.expire(key, 60)
926
1007
  end
927
1008
  end
928
1009
  end
@@ -953,24 +1034,24 @@ module Sidekiq
953
1034
 
954
1035
  def each(&block)
955
1036
  results = []
1037
+ procs = nil
1038
+ all_works = nil
1039
+
956
1040
  Sidekiq.redis do |conn|
957
- procs = conn.sscan_each("processes").to_a
958
- procs.sort.each do |key|
959
- valid, workers = conn.pipelined {
960
- conn.exists?(key)
961
- conn.hgetall("#{key}:workers")
962
- }
963
- next unless valid
964
- workers.each_pair do |tid, json|
965
- hsh = Sidekiq.load_json(json)
966
- p = hsh["payload"]
967
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
968
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
969
- results << [key, tid, hsh]
1041
+ procs = conn.sscan("processes").to_a.sort
1042
+ all_works = conn.pipelined do |pipeline|
1043
+ procs.each do |key|
1044
+ pipeline.hgetall("#{key}:work")
970
1045
  end
971
1046
  end
972
1047
  end
973
1048
 
1049
+ procs.zip(all_works).each do |key, workers|
1050
+ workers.each_pair do |tid, json|
1051
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1052
+ end
1053
+ end
1054
+
974
1055
  results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
975
1056
  end
976
1057
 
@@ -982,13 +1063,13 @@ module Sidekiq
982
1063
  # which can easily get out of sync with crashy processes.
983
1064
  def size
984
1065
  Sidekiq.redis do |conn|
985
- procs = conn.sscan_each("processes").to_a
1066
+ procs = conn.sscan("processes").to_a
986
1067
  if procs.empty?
987
1068
  0
988
1069
  else
989
- conn.pipelined {
1070
+ conn.pipelined { |pipeline|
990
1071
  procs.each do |key|
991
- conn.hget(key, "busy")
1072
+ pipeline.hget(key, "busy")
992
1073
  end
993
1074
  }.sum(&:to_i)
994
1075
  end