sidekiq 6.4.0 → 7.0.8

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +187 -12
  3. data/README.md +44 -31
  4. data/bin/sidekiq +4 -9
  5. data/bin/sidekiqload +189 -117
  6. data/bin/sidekiqmon +4 -1
  7. data/lib/sidekiq/api.rb +304 -186
  8. data/lib/sidekiq/capsule.rb +127 -0
  9. data/lib/sidekiq/cli.rb +83 -80
  10. data/lib/sidekiq/client.rb +71 -44
  11. data/lib/sidekiq/component.rb +68 -0
  12. data/lib/sidekiq/config.rb +270 -0
  13. data/lib/sidekiq/deploy.rb +62 -0
  14. data/lib/sidekiq/embedded.rb +61 -0
  15. data/lib/sidekiq/fetch.rb +23 -24
  16. data/lib/sidekiq/job.rb +375 -10
  17. data/lib/sidekiq/job_logger.rb +16 -28
  18. data/lib/sidekiq/job_retry.rb +76 -54
  19. data/lib/sidekiq/job_util.rb +59 -19
  20. data/lib/sidekiq/launcher.rb +103 -95
  21. data/lib/sidekiq/logger.rb +9 -44
  22. data/lib/sidekiq/manager.rb +33 -32
  23. data/lib/sidekiq/metrics/query.rb +153 -0
  24. data/lib/sidekiq/metrics/shared.rb +95 -0
  25. data/lib/sidekiq/metrics/tracking.rb +136 -0
  26. data/lib/sidekiq/middleware/chain.rb +96 -51
  27. data/lib/sidekiq/middleware/current_attributes.rb +16 -17
  28. data/lib/sidekiq/middleware/i18n.rb +6 -4
  29. data/lib/sidekiq/middleware/modules.rb +21 -0
  30. data/lib/sidekiq/monitor.rb +17 -4
  31. data/lib/sidekiq/paginator.rb +17 -9
  32. data/lib/sidekiq/processor.rb +60 -60
  33. data/lib/sidekiq/rails.rb +12 -10
  34. data/lib/sidekiq/redis_client_adapter.rb +115 -0
  35. data/lib/sidekiq/redis_connection.rb +13 -82
  36. data/lib/sidekiq/ring_buffer.rb +29 -0
  37. data/lib/sidekiq/scheduled.rb +65 -37
  38. data/lib/sidekiq/testing/inline.rb +4 -4
  39. data/lib/sidekiq/testing.rb +41 -68
  40. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  41. data/lib/sidekiq/version.rb +2 -1
  42. data/lib/sidekiq/web/action.rb +3 -3
  43. data/lib/sidekiq/web/application.rb +40 -9
  44. data/lib/sidekiq/web/csrf_protection.rb +3 -3
  45. data/lib/sidekiq/web/helpers.rb +35 -21
  46. data/lib/sidekiq/web.rb +10 -17
  47. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  48. data/lib/sidekiq.rb +84 -206
  49. data/sidekiq.gemspec +21 -10
  50. data/web/assets/javascripts/application.js +76 -26
  51. data/web/assets/javascripts/base-charts.js +106 -0
  52. data/web/assets/javascripts/chart.min.js +13 -0
  53. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  54. data/web/assets/javascripts/dashboard-charts.js +166 -0
  55. data/web/assets/javascripts/dashboard.js +3 -240
  56. data/web/assets/javascripts/metrics.js +264 -0
  57. data/web/assets/stylesheets/application-dark.css +4 -0
  58. data/web/assets/stylesheets/application-rtl.css +2 -91
  59. data/web/assets/stylesheets/application.css +66 -297
  60. data/web/locales/ar.yml +70 -70
  61. data/web/locales/cs.yml +62 -62
  62. data/web/locales/da.yml +60 -53
  63. data/web/locales/de.yml +65 -65
  64. data/web/locales/el.yml +43 -24
  65. data/web/locales/en.yml +82 -69
  66. data/web/locales/es.yml +68 -68
  67. data/web/locales/fa.yml +65 -65
  68. data/web/locales/fr.yml +67 -67
  69. data/web/locales/he.yml +65 -64
  70. data/web/locales/hi.yml +59 -59
  71. data/web/locales/it.yml +53 -53
  72. data/web/locales/ja.yml +73 -68
  73. data/web/locales/ko.yml +52 -52
  74. data/web/locales/lt.yml +66 -66
  75. data/web/locales/nb.yml +61 -61
  76. data/web/locales/nl.yml +52 -52
  77. data/web/locales/pl.yml +45 -45
  78. data/web/locales/pt-br.yml +63 -55
  79. data/web/locales/pt.yml +51 -51
  80. data/web/locales/ru.yml +67 -66
  81. data/web/locales/sv.yml +53 -53
  82. data/web/locales/ta.yml +60 -60
  83. data/web/locales/uk.yml +62 -61
  84. data/web/locales/ur.yml +64 -64
  85. data/web/locales/vi.yml +67 -67
  86. data/web/locales/zh-cn.yml +43 -16
  87. data/web/locales/zh-tw.yml +42 -8
  88. data/web/views/_footer.erb +5 -2
  89. data/web/views/_job_info.erb +18 -2
  90. data/web/views/_metrics_period_select.erb +12 -0
  91. data/web/views/_nav.erb +1 -1
  92. data/web/views/_paging.erb +2 -0
  93. data/web/views/_poll_link.erb +1 -1
  94. data/web/views/_summary.erb +1 -1
  95. data/web/views/busy.erb +42 -26
  96. data/web/views/dashboard.erb +36 -4
  97. data/web/views/metrics.erb +82 -0
  98. data/web/views/metrics_for_job.erb +71 -0
  99. data/web/views/morgue.erb +5 -9
  100. data/web/views/queue.erb +15 -15
  101. data/web/views/queues.erb +3 -1
  102. data/web/views/retries.erb +5 -9
  103. data/web/views/scheduled.erb +12 -13
  104. metadata +64 -28
  105. data/lib/sidekiq/delay.rb +0 -43
  106. data/lib/sidekiq/exception_handler.rb +0 -27
  107. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  108. data/lib/sidekiq/extensions/active_record.rb +0 -43
  109. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  110. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  111. data/lib/sidekiq/util.rb +0 -108
  112. data/lib/sidekiq/worker.rb +0 -364
  113. /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,28 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ require "sidekiq/metrics/query"
10
+
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
8
22
  module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
9
28
  class Stats
10
29
  def initialize
11
30
  fetch_stats_fast!
@@ -48,20 +67,32 @@ module Sidekiq
48
67
  end
49
68
 
50
69
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
72
+
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
+
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
+ end
52
82
  end
53
83
 
54
84
  # O(1) redis calls
85
+ # @api private
55
86
  def fetch_stats_fast!
56
87
  pipe1_res = Sidekiq.redis { |conn|
57
- conn.pipelined do
58
- conn.get("stat:processed")
59
- conn.get("stat:failed")
60
- conn.zcard("schedule")
61
- conn.zcard("retry")
62
- conn.zcard("dead")
63
- conn.scard("processes")
64
- conn.lrange("queue:default", -1, -1)
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lrange("queue:default", -1, -1)
65
96
  end
66
97
  }
67
98
 
@@ -91,36 +122,39 @@ module Sidekiq
91
122
  end
92
123
 
93
124
  # O(number of processes + number of queues) redis calls
125
+ # @api private
94
126
  def fetch_stats_slow!
95
127
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
128
+ conn.sscan("processes").to_a
97
129
  }
98
130
 
99
131
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
132
+ conn.sscan("queues").to_a
101
133
  }
102
134
 
103
135
  pipe2_res = Sidekiq.redis { |conn|
104
- conn.pipelined do
105
- processes.each { |key| conn.hget(key, "busy") }
106
- queues.each { |queue| conn.llen("queue:#{queue}") }
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
139
  end
108
140
  }
109
141
 
110
142
  s = processes.size
111
143
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
145
 
114
146
  @stats[:workers_size] = workers_size
115
147
  @stats[:enqueued] = enqueued
116
148
  @stats
117
149
  end
118
150
 
151
+ # @api private
119
152
  def fetch_stats!
120
153
  fetch_stats_fast!
121
154
  fetch_stats_slow!
122
155
  end
123
156
 
157
+ # @api private
124
158
  def reset(*stats)
125
159
  all = %w[failed processed]
126
160
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +176,8 @@ module Sidekiq
142
176
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
177
  end
144
178
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined {
151
- queues.each do |queue|
152
- conn.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
179
  class History
163
- def initialize(days_previous, start_date = nil)
180
+ def initialize(days_previous, start_date = nil, pool: nil)
164
181
  # we only store five years of data in Redis
165
182
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
166
183
  @days_previous = days_previous
@@ -185,15 +202,10 @@ module Sidekiq
185
202
 
186
203
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
187
204
 
188
- begin
189
- Sidekiq.redis do |conn|
190
- conn.mget(keys).each_with_index do |value, idx|
191
- stat_hash[dates[idx]] = value ? value.to_i : 0
192
- end
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
193
208
  end
194
- rescue Redis::CommandError
195
- # mget will trigger a CROSSSLOT error when run against a Cluster
196
- # TODO Someone want to add Cluster support?
197
209
  end
198
210
 
199
211
  stat_hash
@@ -202,9 +214,10 @@ module Sidekiq
202
214
  end
203
215
 
204
216
  ##
205
- # Encapsulates a queue within Sidekiq.
217
+ # Represents a queue within Sidekiq.
206
218
  # Allows enumeration of all jobs within the queue
207
- # and deletion of jobs.
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
208
221
  #
209
222
  # queue = Sidekiq::Queue.new("mailer")
210
223
  # queue.each do |job|
@@ -212,29 +225,34 @@ module Sidekiq
212
225
  # job.args # => [1, 2, 3]
213
226
  # job.delete if job.jid == 'abcdef1234567890'
214
227
  # end
215
- #
216
228
  class Queue
217
229
  include Enumerable
218
230
 
219
231
  ##
220
- # Return all known queues within Redis.
232
+ # Fetch all known queues within Redis.
221
233
  #
234
+ # @return [Array<Sidekiq::Queue>]
222
235
  def self.all
223
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
224
237
  end
225
238
 
226
239
  attr_reader :name
227
240
 
241
+ # @param name [String] the name of the queue
228
242
  def initialize(name = "default")
229
243
  @name = name.to_s
230
244
  @rname = "queue:#{name}"
231
245
  end
232
246
 
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
233
251
  def size
234
252
  Sidekiq.redis { |con| con.llen(@rname) }
235
253
  end
236
254
 
237
- # Sidekiq Pro overrides this
255
+ # @return [Boolean] if the queue is currently paused
238
256
  def paused?
239
257
  false
240
258
  end
@@ -243,7 +261,7 @@ module Sidekiq
243
261
  # Calculates this queue's latency, the difference in seconds since the oldest
244
262
  # job in the queue was enqueued.
245
263
  #
246
- # @return Float
264
+ # @return [Float] in seconds
247
265
  def latency
248
266
  entry = Sidekiq.redis { |conn|
249
267
  conn.lrange(@rname, -1, -1)
@@ -279,34 +297,54 @@ module Sidekiq
279
297
  ##
280
298
  # Find the job with the given JID within this queue.
281
299
  #
282
- # This is a slow, inefficient operation. Do not use under
300
+ # This is a *slow, inefficient* operation. Do not use under
283
301
  # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
284
306
  def find_job(jid)
285
307
  detect { |j| j.jid == jid }
286
308
  end
287
309
 
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
288
312
  def clear
289
313
  Sidekiq.redis do |conn|
290
- conn.multi do
291
- conn.unlink(@rname)
292
- conn.srem("queues", name)
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
293
317
  end
294
318
  end
319
+ true
295
320
  end
296
321
  alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
297
328
  end
298
329
 
299
330
  ##
300
- # Encapsulates a pending job within a Sidekiq queue or
301
- # sorted set.
331
+ # Represents a pending job within a Sidekiq queue.
302
332
  #
303
333
  # The job should be considered immutable but may be
304
334
  # removed from the queue via JobRecord#delete.
305
- #
306
335
  class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
307
338
  attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
308
341
  attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
309
345
 
346
+ # :nodoc:
347
+ # @api private
310
348
  def initialize(item, queue_name = nil)
311
349
  @args = nil
312
350
  @value = item
@@ -314,6 +352,8 @@ module Sidekiq
314
352
  @queue = queue_name || @item["queue"]
315
353
  end
316
354
 
355
+ # :nodoc:
356
+ # @api private
317
357
  def parse(item)
318
358
  Sidekiq.load_json(item)
319
359
  rescue JSON::ParserError
@@ -325,6 +365,8 @@ module Sidekiq
325
365
  {}
326
366
  end
327
367
 
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
328
370
  def klass
329
371
  self["class"]
330
372
  end
@@ -332,12 +374,7 @@ module Sidekiq
332
374
  def display_class
333
375
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
334
376
  @klass ||= self["display_class"] || begin
335
- case klass
336
- when /\ASidekiq::Extensions::Delayed/
337
- safe_load(args[0], klass) do |target, method, _|
338
- "#{target}.#{method}"
339
- end
340
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
341
378
  job_class = @item["wrapped"] || args[0]
342
379
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
343
380
  # MailerClass#mailer_method
@@ -353,28 +390,23 @@ module Sidekiq
353
390
 
354
391
  def display_args
355
392
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
356
- @display_args ||= case klass
357
- when /\ASidekiq::Extensions::Delayed/
358
- safe_load(args[0], args) do |_, _, arg|
359
- arg
360
- end
361
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
362
- job_args = self["wrapped"] ? args[0]["arguments"] : []
363
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
364
- # remove MailerClass, mailer_method and 'deliver_now'
365
- job_args.drop(3)
366
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
367
- # remove MailerClass, mailer_method and 'deliver_now'
368
- job_args.drop(3).first["args"]
369
- else
370
- job_args
371
- end
372
- else
373
- if self["encrypt"]
374
- # no point in showing 150+ bytes of random garbage
375
- args[-1] = "[encrypted data]"
376
- end
377
- args
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first["args"]
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
378
410
  end
379
411
  end
380
412
 
@@ -386,6 +418,10 @@ module Sidekiq
386
418
  self["jid"]
387
419
  end
388
420
 
421
+ def bid
422
+ self["bid"]
423
+ end
424
+
389
425
  def enqueued_at
390
426
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
391
427
  end
@@ -408,15 +444,12 @@ module Sidekiq
408
444
  end
409
445
  end
410
446
 
411
- attr_reader :queue
412
-
413
447
  def latency
414
448
  now = Time.now.to_f
415
449
  now - (@item["enqueued_at"] || @item["created_at"] || now)
416
450
  end
417
451
 
418
- ##
419
- # Remove this job from the queue.
452
+ # Remove this job from the queue
420
453
  def delete
421
454
  count = Sidekiq.redis { |conn|
422
455
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -424,6 +457,7 @@ module Sidekiq
424
457
  count != 0
425
458
  end
426
459
 
460
+ # Access arbitrary attributes within the job hash
427
461
  def [](name)
428
462
  # nil will happen if the JSON fails to parse.
429
463
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -433,47 +467,35 @@ module Sidekiq
433
467
 
434
468
  private
435
469
 
436
- def safe_load(content, default)
437
- yield(*YAML.load(content))
438
- rescue => ex
439
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
440
- # memory yet so the YAML can't be loaded.
441
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
442
- default
443
- end
444
-
445
470
  def uncompress_backtrace(backtrace)
446
- if backtrace.is_a?(Array)
447
- # Handle old jobs with raw Array backtrace format
448
- backtrace
449
- else
450
- decoded = Base64.decode64(backtrace)
451
- uncompressed = Zlib::Inflate.inflate(decoded)
452
- begin
453
- Sidekiq.load_json(uncompressed)
454
- rescue
455
- # Handle old jobs with marshalled backtrace format
456
- # TODO Remove in 7.x
457
- Marshal.load(uncompressed)
458
- end
459
- end
471
+ decoded = Base64.decode64(backtrace)
472
+ uncompressed = Zlib::Inflate.inflate(decoded)
473
+ Sidekiq.load_json(uncompressed)
460
474
  end
461
475
  end
462
476
 
477
+ # Represents a job within a Redis sorted set where the score
478
+ # represents a timestamp associated with the job. This timestamp
479
+ # could be the scheduled time for it to run (e.g. scheduled set),
480
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
463
481
  class SortedEntry < JobRecord
464
482
  attr_reader :score
465
483
  attr_reader :parent
466
484
 
485
+ # :nodoc:
486
+ # @api private
467
487
  def initialize(parent, score, item)
468
488
  super(item)
469
- @score = score
489
+ @score = Float(score)
470
490
  @parent = parent
471
491
  end
472
492
 
493
+ # The timestamp associated with this entry
473
494
  def at
474
495
  Time.at(score).utc
475
496
  end
476
497
 
498
+ # remove this entry from the sorted set
477
499
  def delete
478
500
  if @value
479
501
  @parent.delete_by_value(@parent.name, @value)
@@ -482,12 +504,17 @@ module Sidekiq
482
504
  end
483
505
  end
484
506
 
507
+ # Change the scheduled time for this job.
508
+ #
509
+ # @param at [Time] the new timestamp for this job
485
510
  def reschedule(at)
486
511
  Sidekiq.redis do |conn|
487
512
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
488
513
  end
489
514
  end
490
515
 
516
+ # Enqueue this job from the scheduled or dead set so it will
517
+ # be executed at some point in the near future.
491
518
  def add_to_queue
492
519
  remove_job do |message|
493
520
  msg = Sidekiq.load_json(message)
@@ -495,6 +522,8 @@ module Sidekiq
495
522
  end
496
523
  end
497
524
 
525
+ # enqueue this job from the retry set so it will be executed
526
+ # at some point in the near future.
498
527
  def retry
499
528
  remove_job do |message|
500
529
  msg = Sidekiq.load_json(message)
@@ -503,8 +532,7 @@ module Sidekiq
503
532
  end
504
533
  end
505
534
 
506
- ##
507
- # Place job in the dead set
535
+ # Move this job from its current set into the Dead set.
508
536
  def kill
509
537
  remove_job do |message|
510
538
  DeadSet.new.kill(message)
@@ -519,9 +547,9 @@ module Sidekiq
519
547
 
520
548
  def remove_job
521
549
  Sidekiq.redis do |conn|
522
- results = conn.multi {
523
- conn.zrangebyscore(parent.name, score, score)
524
- conn.zremrangebyscore(parent.name, score, score)
550
+ results = conn.multi { |transaction|
551
+ transaction.zrangebyscore(parent.name, score, score)
552
+ transaction.zremrangebyscore(parent.name, score, score)
525
553
  }.first
526
554
 
527
555
  if results.size == 1
@@ -542,9 +570,9 @@ module Sidekiq
542
570
  yield msg if msg
543
571
 
544
572
  # push the rest back onto the sorted set
545
- conn.multi do
573
+ conn.multi do |transaction|
546
574
  nonmatched.each do |message|
547
- conn.zadd(parent.name, score.to_f.to_s, message)
575
+ transaction.zadd(parent.name, score.to_f.to_s, message)
548
576
  end
549
577
  end
550
578
  end
@@ -552,43 +580,69 @@ module Sidekiq
552
580
  end
553
581
  end
554
582
 
583
+ # Base class for all sorted sets within Sidekiq.
555
584
  class SortedSet
556
585
  include Enumerable
557
586
 
587
+ # Redis key of the set
588
+ # @!attribute [r] Name
558
589
  attr_reader :name
559
590
 
591
+ # :nodoc:
592
+ # @api private
560
593
  def initialize(name)
561
594
  @name = name
562
595
  @_size = size
563
596
  end
564
597
 
598
+ # real-time size of the set, will change
565
599
  def size
566
600
  Sidekiq.redis { |c| c.zcard(name) }
567
601
  end
568
602
 
603
+ # Scan through each element of the sorted set, yielding each to the supplied block.
604
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
605
+ #
606
+ # @param match [String] a snippet or regexp to filter matches.
607
+ # @param count [Integer] number of elements to retrieve at a time, default 100
608
+ # @yieldparam [Sidekiq::SortedEntry] each entry
569
609
  def scan(match, count = 100)
570
610
  return to_enum(:scan, match, count) unless block_given?
571
611
 
572
612
  match = "*#{match}*" unless match.include?("*")
573
613
  Sidekiq.redis do |conn|
574
- conn.zscan_each(name, match: match, count: count) do |entry, score|
614
+ conn.zscan(name, match: match, count: count) do |entry, score|
575
615
  yield SortedEntry.new(self, score, entry)
576
616
  end
577
617
  end
578
618
  end
579
619
 
620
+ # @return [Boolean] always true
580
621
  def clear
581
622
  Sidekiq.redis do |conn|
582
623
  conn.unlink(name)
583
624
  end
625
+ true
584
626
  end
585
627
  alias_method :💣, :clear
628
+
629
+ # :nodoc:
630
+ # @api private
631
+ def as_json(options = nil)
632
+ {name: name} # 5336
633
+ end
586
634
  end
587
635
 
636
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
637
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
638
+ # e.g. Batches.
588
639
  class JobSet < SortedSet
589
- def schedule(timestamp, message)
640
+ # Add a job with the associated timestamp to this set.
641
+ # @param timestamp [Time] the score for the job
642
+ # @param job [Hash] the job data
643
+ def schedule(timestamp, job)
590
644
  Sidekiq.redis do |conn|
591
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
645
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
592
646
  end
593
647
  end
594
648
 
@@ -602,7 +656,7 @@ module Sidekiq
602
656
  range_start = page * page_size + offset_size
603
657
  range_end = range_start + page_size - 1
604
658
  elements = Sidekiq.redis { |conn|
605
- conn.zrange name, range_start, range_end, with_scores: true
659
+ conn.zrange name, range_start, range_end, withscores: true
606
660
  }
607
661
  break if elements.empty?
608
662
  page -= 1
@@ -616,6 +670,10 @@ module Sidekiq
616
670
  ##
617
671
  # Fetch jobs that match a given time or Range. Job ID is an
618
672
  # optional second argument.
673
+ #
674
+ # @param score [Time,Range] a specific timestamp or range
675
+ # @param jid [String, optional] find a specific JID within the score
676
+ # @return [Array<SortedEntry>] any results found, can be empty
619
677
  def fetch(score, jid = nil)
620
678
  begin_score, end_score =
621
679
  if score.is_a?(Range)
@@ -625,7 +683,7 @@ module Sidekiq
625
683
  end
626
684
 
627
685
  elements = Sidekiq.redis { |conn|
628
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
686
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
629
687
  }
630
688
 
631
689
  elements.each_with_object([]) do |element, result|
@@ -637,11 +695,14 @@ module Sidekiq
637
695
 
638
696
  ##
639
697
  # Find the job with the given JID within this sorted set.
640
- # This is a slower O(n) operation. Do not use for app logic.
698
+ # *This is a slow O(n) operation*. Do not use for app logic.
699
+ #
700
+ # @param jid [String] the job identifier
701
+ # @return [SortedEntry] the record or nil
641
702
  def find_job(jid)
642
703
  Sidekiq.redis do |conn|
643
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
644
- job = JSON.parse(entry)
704
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
705
+ job = Sidekiq.load_json(entry)
645
706
  matched = job["jid"] == jid
646
707
  return SortedEntry.new(self, score, entry) if matched
647
708
  end
@@ -649,6 +710,8 @@ module Sidekiq
649
710
  nil
650
711
  end
651
712
 
713
+ # :nodoc:
714
+ # @api private
652
715
  def delete_by_value(name, value)
653
716
  Sidekiq.redis do |conn|
654
717
  ret = conn.zrem(name, value)
@@ -657,6 +720,8 @@ module Sidekiq
657
720
  end
658
721
  end
659
722
 
723
+ # :nodoc:
724
+ # @api private
660
725
  def delete_by_jid(score, jid)
661
726
  Sidekiq.redis do |conn|
662
727
  elements = conn.zrangebyscore(name, score, score)
@@ -677,17 +742,13 @@ module Sidekiq
677
742
  end
678
743
 
679
744
  ##
680
- # Allows enumeration of scheduled jobs within Sidekiq.
745
+ # The set of scheduled jobs within Sidekiq.
681
746
  # Based on this, you can search/filter for jobs. Here's an
682
- # example where I'm selecting all jobs of a certain type
683
- # and deleting them from the schedule queue.
747
+ # example where I'm selecting jobs based on some complex logic
748
+ # and deleting them from the scheduled set.
749
+ #
750
+ # See the API wiki page for usage notes and examples.
684
751
  #
685
- # r = Sidekiq::ScheduledSet.new
686
- # r.select do |scheduled|
687
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
688
- # scheduled.args[0] == 'User' &&
689
- # scheduled.args[1] == 'setup_new_subscriber'
690
- # end.map(&:delete)
691
752
  class ScheduledSet < JobSet
692
753
  def initialize
693
754
  super "schedule"
@@ -695,46 +756,48 @@ module Sidekiq
695
756
  end
696
757
 
697
758
  ##
698
- # Allows enumeration of retries within Sidekiq.
759
+ # The set of retries within Sidekiq.
699
760
  # Based on this, you can search/filter for jobs. Here's an
700
761
  # example where I'm selecting all jobs of a certain type
701
762
  # and deleting them from the retry queue.
702
763
  #
703
- # r = Sidekiq::RetrySet.new
704
- # r.select do |retri|
705
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
706
- # retri.args[0] == 'User' &&
707
- # retri.args[1] == 'setup_new_subscriber'
708
- # end.map(&:delete)
764
+ # See the API wiki page for usage notes and examples.
765
+ #
709
766
  class RetrySet < JobSet
710
767
  def initialize
711
768
  super "retry"
712
769
  end
713
770
 
771
+ # Enqueues all jobs pending within the retry set.
714
772
  def retry_all
715
773
  each(&:retry) while size > 0
716
774
  end
717
775
 
776
+ # Kills all jobs pending within the retry set.
718
777
  def kill_all
719
778
  each(&:kill) while size > 0
720
779
  end
721
780
  end
722
781
 
723
782
  ##
724
- # Allows enumeration of dead jobs within Sidekiq.
783
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
784
+ # their retries and are helding in this set pending some sort of manual
785
+ # fix. They will be removed after 6 months (dead_timeout) if not.
725
786
  #
726
787
  class DeadSet < JobSet
727
788
  def initialize
728
789
  super "dead"
729
790
  end
730
791
 
792
+ # Add the given job to the Dead set.
793
+ # @param message [String] the job data as JSON
731
794
  def kill(message, opts = {})
732
795
  now = Time.now.to_f
733
796
  Sidekiq.redis do |conn|
734
- conn.multi do
735
- conn.zadd(name, now.to_s, message)
736
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
737
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
797
+ conn.multi do |transaction|
798
+ transaction.zadd(name, now.to_s, message)
799
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
800
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
738
801
  end
739
802
  end
740
803
 
@@ -742,24 +805,17 @@ module Sidekiq
742
805
  job = Sidekiq.load_json(message)
743
806
  r = RuntimeError.new("Job killed by API")
744
807
  r.set_backtrace(caller)
745
- Sidekiq.death_handlers.each do |handle|
808
+ Sidekiq.default_configuration.death_handlers.each do |handle|
746
809
  handle.call(job, r)
747
810
  end
748
811
  end
749
812
  true
750
813
  end
751
814
 
815
+ # Enqueue all dead jobs
752
816
  def retry_all
753
817
  each(&:retry) while size > 0
754
818
  end
755
-
756
- def self.max_jobs
757
- Sidekiq.options[:dead_max_jobs]
758
- end
759
-
760
- def self.timeout
761
- Sidekiq.options[:dead_timeout_in_seconds]
762
- end
763
819
  end
764
820
 
765
821
  ##
@@ -767,24 +823,49 @@ module Sidekiq
767
823
  # right now. Each process sends a heartbeat to Redis every 5 seconds
768
824
  # so this set should be relatively accurate, barring network partitions.
769
825
  #
770
- # Yields a Sidekiq::Process.
826
+ # @yieldparam [Sidekiq::Process]
771
827
  #
772
828
  class ProcessSet
773
829
  include Enumerable
774
830
 
831
+ def self.[](identity)
832
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
833
+ conn.multi { |transaction|
834
+ transaction.sismember("processes", identity)
835
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
836
+ }
837
+ }
838
+
839
+ return nil if exists == 0 || info.nil?
840
+
841
+ hash = Sidekiq.load_json(info)
842
+ Process.new(hash.merge("busy" => busy.to_i,
843
+ "beat" => beat.to_f,
844
+ "quiet" => quiet,
845
+ "rss" => rss.to_i,
846
+ "rtt_us" => rtt_us.to_i))
847
+ end
848
+
849
+ # :nodoc:
850
+ # @api private
775
851
  def initialize(clean_plz = true)
776
852
  cleanup if clean_plz
777
853
  end
778
854
 
779
855
  # Cleans up dead processes recorded in Redis.
780
856
  # Returns the number of processes cleaned.
857
+ # :nodoc:
858
+ # @api private
781
859
  def cleanup
860
+ # dont run cleanup more than once per minute
861
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
862
+
782
863
  count = 0
783
864
  Sidekiq.redis do |conn|
784
- procs = conn.sscan_each("processes").to_a.sort
785
- heartbeats = conn.pipelined {
865
+ procs = conn.sscan("processes").to_a
866
+ heartbeats = conn.pipelined { |pipeline|
786
867
  procs.each do |key|
787
- conn.hget(key, "info")
868
+ pipeline.hget(key, "info")
788
869
  end
789
870
  }
790
871
 
@@ -801,19 +882,19 @@ module Sidekiq
801
882
 
802
883
  def each
803
884
  result = Sidekiq.redis { |conn|
804
- procs = conn.sscan_each("processes").to_a.sort
885
+ procs = conn.sscan("processes").to_a.sort
805
886
 
806
887
  # We're making a tradeoff here between consuming more memory instead of
807
888
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
808
889
  # you'll be happier this way
809
- conn.pipelined do
890
+ conn.pipelined do |pipeline|
810
891
  procs.each do |key|
811
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
892
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
812
893
  end
813
894
  end
814
895
  }
815
896
 
816
- result.each do |info, busy, at_s, quiet, rss, rtt|
897
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
817
898
  # If a process is stopped between when we query Redis for `procs` and
818
899
  # when we query for `result`, we will have an item in `result` that is
819
900
  # composed of `nil` values.
@@ -821,10 +902,10 @@ module Sidekiq
821
902
 
822
903
  hash = Sidekiq.load_json(info)
823
904
  yield Process.new(hash.merge("busy" => busy.to_i,
824
- "beat" => at_s.to_f,
905
+ "beat" => beat.to_f,
825
906
  "quiet" => quiet,
826
907
  "rss" => rss.to_i,
827
- "rtt_us" => rtt.to_i))
908
+ "rtt_us" => rtt_us.to_i))
828
909
  end
829
910
  end
830
911
 
@@ -832,6 +913,7 @@ module Sidekiq
832
913
  # based on current heartbeat. #each does that and ensures the set only
833
914
  # contains Sidekiq processes which have sent a heartbeat within the last
834
915
  # 60 seconds.
916
+ # @return [Integer] current number of registered Sidekiq processes
835
917
  def size
836
918
  Sidekiq.redis { |conn| conn.scard("processes") }
837
919
  end
@@ -839,10 +921,12 @@ module Sidekiq
839
921
  # Total number of threads available to execute jobs.
840
922
  # For Sidekiq Enterprise customers this number (in production) must be
841
923
  # less than or equal to your licensed concurrency.
924
+ # @return [Integer] the sum of process concurrency
842
925
  def total_concurrency
843
926
  sum { |x| x["concurrency"].to_i }
844
927
  end
845
928
 
929
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
846
930
  def total_rss_in_kb
847
931
  sum { |x| x["rss"].to_i }
848
932
  end
@@ -851,6 +935,8 @@ module Sidekiq
851
935
  # Returns the identity of the current cluster leader or "" if no leader.
852
936
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
853
937
  # or Sidekiq Pro.
938
+ # @return [String] Identity of cluster leader
939
+ # @return [String] empty string if no leader
854
940
  def leader
855
941
  @leader ||= begin
856
942
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -875,8 +961,11 @@ module Sidekiq
875
961
  # 'busy' => 10,
876
962
  # 'beat' => <last heartbeat>,
877
963
  # 'identity' => <unique string identifying the process>,
964
+ # 'embedded' => true,
878
965
  # }
879
966
  class Process
967
+ # :nodoc:
968
+ # @api private
880
969
  def initialize(hash)
881
970
  @attribs = hash
882
971
  end
@@ -886,7 +975,7 @@ module Sidekiq
886
975
  end
887
976
 
888
977
  def labels
889
- Array(self["labels"])
978
+ self["labels"].to_a
890
979
  end
891
980
 
892
981
  def [](key)
@@ -901,18 +990,47 @@ module Sidekiq
901
990
  self["queues"]
902
991
  end
903
992
 
993
+ def weights
994
+ self["weights"]
995
+ end
996
+
997
+ def version
998
+ self["version"]
999
+ end
1000
+
1001
+ def embedded?
1002
+ self["embedded"]
1003
+ end
1004
+
1005
+ # Signal this process to stop processing new jobs.
1006
+ # It will continue to execute jobs it has already fetched.
1007
+ # This method is *asynchronous* and it can take 5-10
1008
+ # seconds for the process to quiet.
904
1009
  def quiet!
1010
+ raise "Can't quiet an embedded process" if embedded?
1011
+
905
1012
  signal("TSTP")
906
1013
  end
907
1014
 
1015
+ # Signal this process to shutdown.
1016
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1017
+ # This method is *asynchronous* and it can take 5-10
1018
+ # seconds for the process to start shutting down.
908
1019
  def stop!
1020
+ raise "Can't stop an embedded process" if embedded?
1021
+
909
1022
  signal("TERM")
910
1023
  end
911
1024
 
1025
+ # Signal this process to log backtraces for all threads.
1026
+ # Useful if you have a frozen or deadlocked process which is
1027
+ # still sending a heartbeat.
1028
+ # This method is *asynchronous* and it can take 5-10 seconds.
912
1029
  def dump_threads
913
1030
  signal("TTIN")
914
1031
  end
915
1032
 
1033
+ # @return [Boolean] true if this process is quiet or shutting down
916
1034
  def stopping?
917
1035
  self["quiet"] == "true"
918
1036
  end
@@ -922,9 +1040,9 @@ module Sidekiq
922
1040
  def signal(sig)
923
1041
  key = "#{identity}-signals"
924
1042
  Sidekiq.redis do |c|
925
- c.multi do
926
- c.lpush(key, sig)
927
- c.expire(key, 60)
1043
+ c.multi do |transaction|
1044
+ transaction.lpush(key, sig)
1045
+ transaction.expire(key, 60)
928
1046
  end
929
1047
  end
930
1048
  end
@@ -955,24 +1073,24 @@ module Sidekiq
955
1073
 
956
1074
  def each(&block)
957
1075
  results = []
1076
+ procs = nil
1077
+ all_works = nil
1078
+
958
1079
  Sidekiq.redis do |conn|
959
- procs = conn.sscan_each("processes").to_a
960
- procs.sort.each do |key|
961
- valid, workers = conn.pipelined {
962
- conn.exists?(key)
963
- conn.hgetall("#{key}:workers")
964
- }
965
- next unless valid
966
- workers.each_pair do |tid, json|
967
- hsh = Sidekiq.load_json(json)
968
- p = hsh["payload"]
969
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
970
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
971
- results << [key, tid, hsh]
1080
+ procs = conn.sscan("processes").to_a.sort
1081
+ all_works = conn.pipelined do |pipeline|
1082
+ procs.each do |key|
1083
+ pipeline.hgetall("#{key}:work")
972
1084
  end
973
1085
  end
974
1086
  end
975
1087
 
1088
+ procs.zip(all_works).each do |key, workers|
1089
+ workers.each_pair do |tid, json|
1090
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1091
+ end
1092
+ end
1093
+
976
1094
  results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
977
1095
  end
978
1096
 
@@ -984,13 +1102,13 @@ module Sidekiq
984
1102
  # which can easily get out of sync with crashy processes.
985
1103
  def size
986
1104
  Sidekiq.redis do |conn|
987
- procs = conn.sscan_each("processes").to_a
1105
+ procs = conn.sscan("processes").to_a
988
1106
  if procs.empty?
989
1107
  0
990
1108
  else
991
- conn.pipelined {
1109
+ conn.pipelined { |pipeline|
992
1110
  procs.each do |key|
993
- conn.hget(key, "busy")
1111
+ pipeline.hget(key, "busy")
994
1112
  end
995
1113
  }.sum(&:to_i)
996
1114
  end