sidekiq 6.3.1 → 7.0.7

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (118) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +205 -11
  3. data/LICENSE.txt +9 -0
  4. data/README.md +45 -32
  5. data/bin/sidekiq +4 -9
  6. data/bin/sidekiqload +189 -117
  7. data/bin/sidekiqmon +4 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +308 -188
  13. data/lib/sidekiq/capsule.rb +127 -0
  14. data/lib/sidekiq/cli.rb +85 -80
  15. data/lib/sidekiq/client.rb +74 -81
  16. data/lib/sidekiq/{util.rb → component.rb} +13 -40
  17. data/lib/sidekiq/config.rb +270 -0
  18. data/lib/sidekiq/deploy.rb +62 -0
  19. data/lib/sidekiq/embedded.rb +61 -0
  20. data/lib/sidekiq/fetch.rb +23 -24
  21. data/lib/sidekiq/job.rb +375 -10
  22. data/lib/sidekiq/job_logger.rb +16 -28
  23. data/lib/sidekiq/job_retry.rb +81 -57
  24. data/lib/sidekiq/job_util.rb +105 -0
  25. data/lib/sidekiq/launcher.rb +103 -95
  26. data/lib/sidekiq/logger.rb +9 -44
  27. data/lib/sidekiq/manager.rb +40 -41
  28. data/lib/sidekiq/metrics/query.rb +153 -0
  29. data/lib/sidekiq/metrics/shared.rb +95 -0
  30. data/lib/sidekiq/metrics/tracking.rb +136 -0
  31. data/lib/sidekiq/middleware/chain.rb +96 -51
  32. data/lib/sidekiq/middleware/current_attributes.rb +17 -13
  33. data/lib/sidekiq/middleware/i18n.rb +6 -4
  34. data/lib/sidekiq/middleware/modules.rb +21 -0
  35. data/lib/sidekiq/monitor.rb +17 -4
  36. data/lib/sidekiq/paginator.rb +17 -9
  37. data/lib/sidekiq/processor.rb +60 -60
  38. data/lib/sidekiq/rails.rb +12 -10
  39. data/lib/sidekiq/redis_client_adapter.rb +115 -0
  40. data/lib/sidekiq/redis_connection.rb +13 -82
  41. data/lib/sidekiq/ring_buffer.rb +29 -0
  42. data/lib/sidekiq/scheduled.rb +75 -37
  43. data/lib/sidekiq/testing/inline.rb +4 -4
  44. data/lib/sidekiq/testing.rb +41 -68
  45. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  46. data/lib/sidekiq/version.rb +2 -1
  47. data/lib/sidekiq/web/action.rb +3 -3
  48. data/lib/sidekiq/web/application.rb +45 -11
  49. data/lib/sidekiq/web/csrf_protection.rb +3 -3
  50. data/lib/sidekiq/web/helpers.rb +35 -21
  51. data/lib/sidekiq/web.rb +10 -17
  52. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  53. data/lib/sidekiq.rb +85 -202
  54. data/sidekiq.gemspec +20 -10
  55. data/web/assets/javascripts/application.js +76 -26
  56. data/web/assets/javascripts/base-charts.js +106 -0
  57. data/web/assets/javascripts/chart.min.js +13 -0
  58. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  59. data/web/assets/javascripts/dashboard-charts.js +166 -0
  60. data/web/assets/javascripts/dashboard.js +3 -240
  61. data/web/assets/javascripts/metrics.js +264 -0
  62. data/web/assets/stylesheets/application-dark.css +17 -17
  63. data/web/assets/stylesheets/application-rtl.css +2 -91
  64. data/web/assets/stylesheets/application.css +69 -302
  65. data/web/locales/ar.yml +70 -70
  66. data/web/locales/cs.yml +62 -62
  67. data/web/locales/da.yml +60 -53
  68. data/web/locales/de.yml +65 -65
  69. data/web/locales/el.yml +43 -24
  70. data/web/locales/en.yml +82 -69
  71. data/web/locales/es.yml +68 -68
  72. data/web/locales/fa.yml +65 -65
  73. data/web/locales/fr.yml +67 -67
  74. data/web/locales/he.yml +65 -64
  75. data/web/locales/hi.yml +59 -59
  76. data/web/locales/it.yml +53 -53
  77. data/web/locales/ja.yml +73 -68
  78. data/web/locales/ko.yml +52 -52
  79. data/web/locales/lt.yml +66 -66
  80. data/web/locales/nb.yml +61 -61
  81. data/web/locales/nl.yml +52 -52
  82. data/web/locales/pl.yml +45 -45
  83. data/web/locales/pt-br.yml +63 -55
  84. data/web/locales/pt.yml +51 -51
  85. data/web/locales/ru.yml +67 -66
  86. data/web/locales/sv.yml +53 -53
  87. data/web/locales/ta.yml +60 -60
  88. data/web/locales/uk.yml +62 -61
  89. data/web/locales/ur.yml +64 -64
  90. data/web/locales/vi.yml +67 -67
  91. data/web/locales/zh-cn.yml +43 -16
  92. data/web/locales/zh-tw.yml +42 -8
  93. data/web/views/_footer.erb +5 -2
  94. data/web/views/_job_info.erb +18 -2
  95. data/web/views/_metrics_period_select.erb +12 -0
  96. data/web/views/_nav.erb +1 -1
  97. data/web/views/_paging.erb +2 -0
  98. data/web/views/_poll_link.erb +1 -1
  99. data/web/views/_summary.erb +1 -1
  100. data/web/views/busy.erb +42 -26
  101. data/web/views/dashboard.erb +36 -4
  102. data/web/views/metrics.erb +82 -0
  103. data/web/views/metrics_for_job.erb +71 -0
  104. data/web/views/morgue.erb +5 -9
  105. data/web/views/queue.erb +15 -15
  106. data/web/views/queues.erb +3 -1
  107. data/web/views/retries.erb +5 -9
  108. data/web/views/scheduled.erb +12 -13
  109. metadata +68 -32
  110. data/LICENSE +0 -9
  111. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  112. data/lib/sidekiq/delay.rb +0 -41
  113. data/lib/sidekiq/exception_handler.rb +0 -27
  114. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  115. data/lib/sidekiq/extensions/active_record.rb +0 -43
  116. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  117. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  118. data/lib/sidekiq/worker.rb +0 -311
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,28 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ require "sidekiq/metrics/query"
10
+
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
8
22
  module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
9
28
  class Stats
10
29
  def initialize
11
30
  fetch_stats_fast!
@@ -48,20 +67,32 @@ module Sidekiq
48
67
  end
49
68
 
50
69
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
72
+
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
+
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
+ end
52
82
  end
53
83
 
54
84
  # O(1) redis calls
85
+ # @api private
55
86
  def fetch_stats_fast!
56
87
  pipe1_res = Sidekiq.redis { |conn|
57
- conn.pipelined do
58
- conn.get("stat:processed")
59
- conn.get("stat:failed")
60
- conn.zcard("schedule")
61
- conn.zcard("retry")
62
- conn.zcard("dead")
63
- conn.scard("processes")
64
- conn.lrange("queue:default", -1, -1)
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lrange("queue:default", -1, -1)
65
96
  end
66
97
  }
67
98
 
@@ -91,36 +122,39 @@ module Sidekiq
91
122
  end
92
123
 
93
124
  # O(number of processes + number of queues) redis calls
125
+ # @api private
94
126
  def fetch_stats_slow!
95
127
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
128
+ conn.sscan("processes").to_a
97
129
  }
98
130
 
99
131
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
132
+ conn.sscan("queues").to_a
101
133
  }
102
134
 
103
135
  pipe2_res = Sidekiq.redis { |conn|
104
- conn.pipelined do
105
- processes.each { |key| conn.hget(key, "busy") }
106
- queues.each { |queue| conn.llen("queue:#{queue}") }
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
139
  end
108
140
  }
109
141
 
110
142
  s = processes.size
111
143
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
145
 
114
146
  @stats[:workers_size] = workers_size
115
147
  @stats[:enqueued] = enqueued
116
148
  @stats
117
149
  end
118
150
 
151
+ # @api private
119
152
  def fetch_stats!
120
153
  fetch_stats_fast!
121
154
  fetch_stats_slow!
122
155
  end
123
156
 
157
+ # @api private
124
158
  def reset(*stats)
125
159
  all = %w[failed processed]
126
160
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +176,10 @@ module Sidekiq
142
176
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
177
  end
144
178
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined {
151
- queues.each do |queue|
152
- conn.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
179
  class History
163
- def initialize(days_previous, start_date = nil)
180
+ def initialize(days_previous, start_date = nil, pool: nil)
181
+ # we only store five years of data in Redis
182
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
164
183
  @days_previous = days_previous
165
184
  @start_date = start_date || Time.now.utc.to_date
166
185
  end
@@ -183,15 +202,10 @@ module Sidekiq
183
202
 
184
203
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
185
204
 
186
- begin
187
- Sidekiq.redis do |conn|
188
- conn.mget(keys).each_with_index do |value, idx|
189
- stat_hash[dates[idx]] = value ? value.to_i : 0
190
- end
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
191
208
  end
192
- rescue Redis::CommandError
193
- # mget will trigger a CROSSSLOT error when run against a Cluster
194
- # TODO Someone want to add Cluster support?
195
209
  end
196
210
 
197
211
  stat_hash
@@ -200,9 +214,10 @@ module Sidekiq
200
214
  end
201
215
 
202
216
  ##
203
- # Encapsulates a queue within Sidekiq.
217
+ # Represents a queue within Sidekiq.
204
218
  # Allows enumeration of all jobs within the queue
205
- # and deletion of jobs.
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
206
221
  #
207
222
  # queue = Sidekiq::Queue.new("mailer")
208
223
  # queue.each do |job|
@@ -210,29 +225,34 @@ module Sidekiq
210
225
  # job.args # => [1, 2, 3]
211
226
  # job.delete if job.jid == 'abcdef1234567890'
212
227
  # end
213
- #
214
228
  class Queue
215
229
  include Enumerable
216
230
 
217
231
  ##
218
- # Return all known queues within Redis.
232
+ # Fetch all known queues within Redis.
219
233
  #
234
+ # @return [Array<Sidekiq::Queue>]
220
235
  def self.all
221
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
222
237
  end
223
238
 
224
239
  attr_reader :name
225
240
 
241
+ # @param name [String] the name of the queue
226
242
  def initialize(name = "default")
227
243
  @name = name.to_s
228
244
  @rname = "queue:#{name}"
229
245
  end
230
246
 
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
231
251
  def size
232
252
  Sidekiq.redis { |con| con.llen(@rname) }
233
253
  end
234
254
 
235
- # Sidekiq Pro overrides this
255
+ # @return [Boolean] if the queue is currently paused
236
256
  def paused?
237
257
  false
238
258
  end
@@ -241,7 +261,7 @@ module Sidekiq
241
261
  # Calculates this queue's latency, the difference in seconds since the oldest
242
262
  # job in the queue was enqueued.
243
263
  #
244
- # @return Float
264
+ # @return [Float] in seconds
245
265
  def latency
246
266
  entry = Sidekiq.redis { |conn|
247
267
  conn.lrange(@rname, -1, -1)
@@ -277,34 +297,54 @@ module Sidekiq
277
297
  ##
278
298
  # Find the job with the given JID within this queue.
279
299
  #
280
- # This is a slow, inefficient operation. Do not use under
300
+ # This is a *slow, inefficient* operation. Do not use under
281
301
  # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
282
306
  def find_job(jid)
283
307
  detect { |j| j.jid == jid }
284
308
  end
285
309
 
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
286
312
  def clear
287
313
  Sidekiq.redis do |conn|
288
- conn.multi do
289
- conn.unlink(@rname)
290
- conn.srem("queues", name)
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
291
317
  end
292
318
  end
319
+ true
293
320
  end
294
321
  alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
295
328
  end
296
329
 
297
330
  ##
298
- # Encapsulates a pending job within a Sidekiq queue or
299
- # sorted set.
331
+ # Represents a pending job within a Sidekiq queue.
300
332
  #
301
333
  # The job should be considered immutable but may be
302
334
  # removed from the queue via JobRecord#delete.
303
- #
304
335
  class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
305
338
  attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
306
341
  attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
307
345
 
346
+ # :nodoc:
347
+ # @api private
308
348
  def initialize(item, queue_name = nil)
309
349
  @args = nil
310
350
  @value = item
@@ -312,6 +352,8 @@ module Sidekiq
312
352
  @queue = queue_name || @item["queue"]
313
353
  end
314
354
 
355
+ # :nodoc:
356
+ # @api private
315
357
  def parse(item)
316
358
  Sidekiq.load_json(item)
317
359
  rescue JSON::ParserError
@@ -323,6 +365,8 @@ module Sidekiq
323
365
  {}
324
366
  end
325
367
 
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
326
370
  def klass
327
371
  self["class"]
328
372
  end
@@ -330,12 +374,7 @@ module Sidekiq
330
374
  def display_class
331
375
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
332
376
  @klass ||= self["display_class"] || begin
333
- case klass
334
- when /\ASidekiq::Extensions::Delayed/
335
- safe_load(args[0], klass) do |target, method, _|
336
- "#{target}.#{method}"
337
- end
338
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
339
378
  job_class = @item["wrapped"] || args[0]
340
379
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
341
380
  # MailerClass#mailer_method
@@ -351,28 +390,23 @@ module Sidekiq
351
390
 
352
391
  def display_args
353
392
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
354
- @display_args ||= case klass
355
- when /\ASidekiq::Extensions::Delayed/
356
- safe_load(args[0], args) do |_, _, arg|
357
- arg
358
- end
359
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
360
- job_args = self["wrapped"] ? args[0]["arguments"] : []
361
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
362
- # remove MailerClass, mailer_method and 'deliver_now'
363
- job_args.drop(3)
364
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
365
- # remove MailerClass, mailer_method and 'deliver_now'
366
- job_args.drop(3).first["args"]
367
- else
368
- job_args
369
- end
370
- else
371
- if self["encrypt"]
372
- # no point in showing 150+ bytes of random garbage
373
- args[-1] = "[encrypted data]"
374
- end
375
- args
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first["args"]
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
376
410
  end
377
411
  end
378
412
 
@@ -384,6 +418,10 @@ module Sidekiq
384
418
  self["jid"]
385
419
  end
386
420
 
421
+ def bid
422
+ self["bid"]
423
+ end
424
+
387
425
  def enqueued_at
388
426
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
389
427
  end
@@ -406,15 +444,12 @@ module Sidekiq
406
444
  end
407
445
  end
408
446
 
409
- attr_reader :queue
410
-
411
447
  def latency
412
448
  now = Time.now.to_f
413
449
  now - (@item["enqueued_at"] || @item["created_at"] || now)
414
450
  end
415
451
 
416
- ##
417
- # Remove this job from the queue.
452
+ # Remove this job from the queue
418
453
  def delete
419
454
  count = Sidekiq.redis { |conn|
420
455
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -422,6 +457,7 @@ module Sidekiq
422
457
  count != 0
423
458
  end
424
459
 
460
+ # Access arbitrary attributes within the job hash
425
461
  def [](name)
426
462
  # nil will happen if the JSON fails to parse.
427
463
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -431,47 +467,35 @@ module Sidekiq
431
467
 
432
468
  private
433
469
 
434
- def safe_load(content, default)
435
- yield(*YAML.load(content))
436
- rescue => ex
437
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
438
- # memory yet so the YAML can't be loaded.
439
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
440
- default
441
- end
442
-
443
470
  def uncompress_backtrace(backtrace)
444
- if backtrace.is_a?(Array)
445
- # Handle old jobs with raw Array backtrace format
446
- backtrace
447
- else
448
- decoded = Base64.decode64(backtrace)
449
- uncompressed = Zlib::Inflate.inflate(decoded)
450
- begin
451
- Sidekiq.load_json(uncompressed)
452
- rescue
453
- # Handle old jobs with marshalled backtrace format
454
- # TODO Remove in 7.x
455
- Marshal.load(uncompressed)
456
- end
457
- end
471
+ decoded = Base64.decode64(backtrace)
472
+ uncompressed = Zlib::Inflate.inflate(decoded)
473
+ Sidekiq.load_json(uncompressed)
458
474
  end
459
475
  end
460
476
 
477
+ # Represents a job within a Redis sorted set where the score
478
+ # represents a timestamp associated with the job. This timestamp
479
+ # could be the scheduled time for it to run (e.g. scheduled set),
480
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
461
481
  class SortedEntry < JobRecord
462
482
  attr_reader :score
463
483
  attr_reader :parent
464
484
 
485
+ # :nodoc:
486
+ # @api private
465
487
  def initialize(parent, score, item)
466
488
  super(item)
467
- @score = score
489
+ @score = Float(score)
468
490
  @parent = parent
469
491
  end
470
492
 
493
+ # The timestamp associated with this entry
471
494
  def at
472
495
  Time.at(score).utc
473
496
  end
474
497
 
498
+ # remove this entry from the sorted set
475
499
  def delete
476
500
  if @value
477
501
  @parent.delete_by_value(@parent.name, @value)
@@ -480,12 +504,17 @@ module Sidekiq
480
504
  end
481
505
  end
482
506
 
507
+ # Change the scheduled time for this job.
508
+ #
509
+ # @param at [Time] the new timestamp for this job
483
510
  def reschedule(at)
484
511
  Sidekiq.redis do |conn|
485
512
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
486
513
  end
487
514
  end
488
515
 
516
+ # Enqueue this job from the scheduled or dead set so it will
517
+ # be executed at some point in the near future.
489
518
  def add_to_queue
490
519
  remove_job do |message|
491
520
  msg = Sidekiq.load_json(message)
@@ -493,6 +522,8 @@ module Sidekiq
493
522
  end
494
523
  end
495
524
 
525
+ # enqueue this job from the retry set so it will be executed
526
+ # at some point in the near future.
496
527
  def retry
497
528
  remove_job do |message|
498
529
  msg = Sidekiq.load_json(message)
@@ -501,8 +532,7 @@ module Sidekiq
501
532
  end
502
533
  end
503
534
 
504
- ##
505
- # Place job in the dead set
535
+ # Move this job from its current set into the Dead set.
506
536
  def kill
507
537
  remove_job do |message|
508
538
  DeadSet.new.kill(message)
@@ -517,9 +547,9 @@ module Sidekiq
517
547
 
518
548
  def remove_job
519
549
  Sidekiq.redis do |conn|
520
- results = conn.multi {
521
- conn.zrangebyscore(parent.name, score, score)
522
- conn.zremrangebyscore(parent.name, score, score)
550
+ results = conn.multi { |transaction|
551
+ transaction.zrangebyscore(parent.name, score, score)
552
+ transaction.zremrangebyscore(parent.name, score, score)
523
553
  }.first
524
554
 
525
555
  if results.size == 1
@@ -540,9 +570,9 @@ module Sidekiq
540
570
  yield msg if msg
541
571
 
542
572
  # push the rest back onto the sorted set
543
- conn.multi do
573
+ conn.multi do |transaction|
544
574
  nonmatched.each do |message|
545
- conn.zadd(parent.name, score.to_f.to_s, message)
575
+ transaction.zadd(parent.name, score.to_f.to_s, message)
546
576
  end
547
577
  end
548
578
  end
@@ -550,43 +580,69 @@ module Sidekiq
550
580
  end
551
581
  end
552
582
 
583
+ # Base class for all sorted sets within Sidekiq.
553
584
  class SortedSet
554
585
  include Enumerable
555
586
 
587
+ # Redis key of the set
588
+ # @!attribute [r] Name
556
589
  attr_reader :name
557
590
 
591
+ # :nodoc:
592
+ # @api private
558
593
  def initialize(name)
559
594
  @name = name
560
595
  @_size = size
561
596
  end
562
597
 
598
+ # real-time size of the set, will change
563
599
  def size
564
600
  Sidekiq.redis { |c| c.zcard(name) }
565
601
  end
566
602
 
603
+ # Scan through each element of the sorted set, yielding each to the supplied block.
604
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
605
+ #
606
+ # @param match [String] a snippet or regexp to filter matches.
607
+ # @param count [Integer] number of elements to retrieve at a time, default 100
608
+ # @yieldparam [Sidekiq::SortedEntry] each entry
567
609
  def scan(match, count = 100)
568
610
  return to_enum(:scan, match, count) unless block_given?
569
611
 
570
612
  match = "*#{match}*" unless match.include?("*")
571
613
  Sidekiq.redis do |conn|
572
- conn.zscan_each(name, match: match, count: count) do |entry, score|
614
+ conn.zscan(name, match: match, count: count) do |entry, score|
573
615
  yield SortedEntry.new(self, score, entry)
574
616
  end
575
617
  end
576
618
  end
577
619
 
620
+ # @return [Boolean] always true
578
621
  def clear
579
622
  Sidekiq.redis do |conn|
580
623
  conn.unlink(name)
581
624
  end
625
+ true
582
626
  end
583
627
  alias_method :💣, :clear
628
+
629
+ # :nodoc:
630
+ # @api private
631
+ def as_json(options = nil)
632
+ {name: name} # 5336
633
+ end
584
634
  end
585
635
 
636
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
637
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
638
+ # e.g. Batches.
586
639
  class JobSet < SortedSet
587
- def schedule(timestamp, message)
640
+ # Add a job with the associated timestamp to this set.
641
+ # @param timestamp [Time] the score for the job
642
+ # @param job [Hash] the job data
643
+ def schedule(timestamp, job)
588
644
  Sidekiq.redis do |conn|
589
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
645
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
590
646
  end
591
647
  end
592
648
 
@@ -600,7 +656,7 @@ module Sidekiq
600
656
  range_start = page * page_size + offset_size
601
657
  range_end = range_start + page_size - 1
602
658
  elements = Sidekiq.redis { |conn|
603
- conn.zrange name, range_start, range_end, with_scores: true
659
+ conn.zrange name, range_start, range_end, withscores: true
604
660
  }
605
661
  break if elements.empty?
606
662
  page -= 1
@@ -614,6 +670,10 @@ module Sidekiq
614
670
  ##
615
671
  # Fetch jobs that match a given time or Range. Job ID is an
616
672
  # optional second argument.
673
+ #
674
+ # @param score [Time,Range] a specific timestamp or range
675
+ # @param jid [String, optional] find a specific JID within the score
676
+ # @return [Array<SortedEntry>] any results found, can be empty
617
677
  def fetch(score, jid = nil)
618
678
  begin_score, end_score =
619
679
  if score.is_a?(Range)
@@ -623,7 +683,7 @@ module Sidekiq
623
683
  end
624
684
 
625
685
  elements = Sidekiq.redis { |conn|
626
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
686
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
627
687
  }
628
688
 
629
689
  elements.each_with_object([]) do |element, result|
@@ -635,11 +695,14 @@ module Sidekiq
635
695
 
636
696
  ##
637
697
  # Find the job with the given JID within this sorted set.
638
- # This is a slower O(n) operation. Do not use for app logic.
698
+ # *This is a slow O(n) operation*. Do not use for app logic.
699
+ #
700
+ # @param jid [String] the job identifier
701
+ # @return [SortedEntry] the record or nil
639
702
  def find_job(jid)
640
703
  Sidekiq.redis do |conn|
641
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
642
- job = JSON.parse(entry)
704
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
705
+ job = Sidekiq.load_json(entry)
643
706
  matched = job["jid"] == jid
644
707
  return SortedEntry.new(self, score, entry) if matched
645
708
  end
@@ -647,6 +710,8 @@ module Sidekiq
647
710
  nil
648
711
  end
649
712
 
713
+ # :nodoc:
714
+ # @api private
650
715
  def delete_by_value(name, value)
651
716
  Sidekiq.redis do |conn|
652
717
  ret = conn.zrem(name, value)
@@ -655,6 +720,8 @@ module Sidekiq
655
720
  end
656
721
  end
657
722
 
723
+ # :nodoc:
724
+ # @api private
658
725
  def delete_by_jid(score, jid)
659
726
  Sidekiq.redis do |conn|
660
727
  elements = conn.zrangebyscore(name, score, score)
@@ -675,17 +742,13 @@ module Sidekiq
675
742
  end
676
743
 
677
744
  ##
678
- # Allows enumeration of scheduled jobs within Sidekiq.
745
+ # The set of scheduled jobs within Sidekiq.
679
746
  # Based on this, you can search/filter for jobs. Here's an
680
- # example where I'm selecting all jobs of a certain type
681
- # and deleting them from the schedule queue.
747
+ # example where I'm selecting jobs based on some complex logic
748
+ # and deleting them from the scheduled set.
749
+ #
750
+ # See the API wiki page for usage notes and examples.
682
751
  #
683
- # r = Sidekiq::ScheduledSet.new
684
- # r.select do |scheduled|
685
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
686
- # scheduled.args[0] == 'User' &&
687
- # scheduled.args[1] == 'setup_new_subscriber'
688
- # end.map(&:delete)
689
752
  class ScheduledSet < JobSet
690
753
  def initialize
691
754
  super "schedule"
@@ -693,46 +756,48 @@ module Sidekiq
693
756
  end
694
757
 
695
758
  ##
696
- # Allows enumeration of retries within Sidekiq.
759
+ # The set of retries within Sidekiq.
697
760
  # Based on this, you can search/filter for jobs. Here's an
698
761
  # example where I'm selecting all jobs of a certain type
699
762
  # and deleting them from the retry queue.
700
763
  #
701
- # r = Sidekiq::RetrySet.new
702
- # r.select do |retri|
703
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
704
- # retri.args[0] == 'User' &&
705
- # retri.args[1] == 'setup_new_subscriber'
706
- # end.map(&:delete)
764
+ # See the API wiki page for usage notes and examples.
765
+ #
707
766
  class RetrySet < JobSet
708
767
  def initialize
709
768
  super "retry"
710
769
  end
711
770
 
771
+ # Enqueues all jobs pending within the retry set.
712
772
  def retry_all
713
773
  each(&:retry) while size > 0
714
774
  end
715
775
 
776
+ # Kills all jobs pending within the retry set.
716
777
  def kill_all
717
778
  each(&:kill) while size > 0
718
779
  end
719
780
  end
720
781
 
721
782
  ##
722
- # Allows enumeration of dead jobs within Sidekiq.
783
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
784
+ # their retries and are helding in this set pending some sort of manual
785
+ # fix. They will be removed after 6 months (dead_timeout) if not.
723
786
  #
724
787
  class DeadSet < JobSet
725
788
  def initialize
726
789
  super "dead"
727
790
  end
728
791
 
792
+ # Add the given job to the Dead set.
793
+ # @param message [String] the job data as JSON
729
794
  def kill(message, opts = {})
730
795
  now = Time.now.to_f
731
796
  Sidekiq.redis do |conn|
732
- conn.multi do
733
- conn.zadd(name, now.to_s, message)
734
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
735
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
797
+ conn.multi do |transaction|
798
+ transaction.zadd(name, now.to_s, message)
799
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
800
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
736
801
  end
737
802
  end
738
803
 
@@ -740,24 +805,17 @@ module Sidekiq
740
805
  job = Sidekiq.load_json(message)
741
806
  r = RuntimeError.new("Job killed by API")
742
807
  r.set_backtrace(caller)
743
- Sidekiq.death_handlers.each do |handle|
808
+ Sidekiq.default_configuration.death_handlers.each do |handle|
744
809
  handle.call(job, r)
745
810
  end
746
811
  end
747
812
  true
748
813
  end
749
814
 
815
+ # Enqueue all dead jobs
750
816
  def retry_all
751
817
  each(&:retry) while size > 0
752
818
  end
753
-
754
- def self.max_jobs
755
- Sidekiq.options[:dead_max_jobs]
756
- end
757
-
758
- def self.timeout
759
- Sidekiq.options[:dead_timeout_in_seconds]
760
- end
761
819
  end
762
820
 
763
821
  ##
@@ -765,24 +823,49 @@ module Sidekiq
765
823
  # right now. Each process sends a heartbeat to Redis every 5 seconds
766
824
  # so this set should be relatively accurate, barring network partitions.
767
825
  #
768
- # Yields a Sidekiq::Process.
826
+ # @yieldparam [Sidekiq::Process]
769
827
  #
770
828
  class ProcessSet
771
829
  include Enumerable
772
830
 
831
+ def self.[](identity)
832
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
833
+ conn.multi { |transaction|
834
+ transaction.sismember("processes", identity)
835
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
836
+ }
837
+ }
838
+
839
+ return nil if exists == 0 || info.nil?
840
+
841
+ hash = Sidekiq.load_json(info)
842
+ Process.new(hash.merge("busy" => busy.to_i,
843
+ "beat" => beat.to_f,
844
+ "quiet" => quiet,
845
+ "rss" => rss.to_i,
846
+ "rtt_us" => rtt_us.to_i))
847
+ end
848
+
849
+ # :nodoc:
850
+ # @api private
773
851
  def initialize(clean_plz = true)
774
852
  cleanup if clean_plz
775
853
  end
776
854
 
777
855
  # Cleans up dead processes recorded in Redis.
778
856
  # Returns the number of processes cleaned.
857
+ # :nodoc:
858
+ # @api private
779
859
  def cleanup
860
+ # dont run cleanup more than once per minute
861
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
862
+
780
863
  count = 0
781
864
  Sidekiq.redis do |conn|
782
- procs = conn.sscan_each("processes").to_a.sort
783
- heartbeats = conn.pipelined {
865
+ procs = conn.sscan("processes").to_a
866
+ heartbeats = conn.pipelined { |pipeline|
784
867
  procs.each do |key|
785
- conn.hget(key, "info")
868
+ pipeline.hget(key, "info")
786
869
  end
787
870
  }
788
871
 
@@ -799,19 +882,19 @@ module Sidekiq
799
882
 
800
883
  def each
801
884
  result = Sidekiq.redis { |conn|
802
- procs = conn.sscan_each("processes").to_a.sort
885
+ procs = conn.sscan("processes").to_a.sort
803
886
 
804
887
  # We're making a tradeoff here between consuming more memory instead of
805
888
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
806
889
  # you'll be happier this way
807
- conn.pipelined do
890
+ conn.pipelined do |pipeline|
808
891
  procs.each do |key|
809
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
892
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
810
893
  end
811
894
  end
812
895
  }
813
896
 
814
- result.each do |info, busy, at_s, quiet, rss, rtt|
897
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
815
898
  # If a process is stopped between when we query Redis for `procs` and
816
899
  # when we query for `result`, we will have an item in `result` that is
817
900
  # composed of `nil` values.
@@ -819,10 +902,10 @@ module Sidekiq
819
902
 
820
903
  hash = Sidekiq.load_json(info)
821
904
  yield Process.new(hash.merge("busy" => busy.to_i,
822
- "beat" => at_s.to_f,
823
- "quiet" => quiet,
824
- "rss" => rss.to_i,
825
- "rtt_us" => rtt.to_i))
905
+ "beat" => beat.to_f,
906
+ "quiet" => quiet,
907
+ "rss" => rss.to_i,
908
+ "rtt_us" => rtt_us.to_i))
826
909
  end
827
910
  end
828
911
 
@@ -830,6 +913,7 @@ module Sidekiq
830
913
  # based on current heartbeat. #each does that and ensures the set only
831
914
  # contains Sidekiq processes which have sent a heartbeat within the last
832
915
  # 60 seconds.
916
+ # @return [Integer] current number of registered Sidekiq processes
833
917
  def size
834
918
  Sidekiq.redis { |conn| conn.scard("processes") }
835
919
  end
@@ -837,10 +921,12 @@ module Sidekiq
837
921
  # Total number of threads available to execute jobs.
838
922
  # For Sidekiq Enterprise customers this number (in production) must be
839
923
  # less than or equal to your licensed concurrency.
924
+ # @return [Integer] the sum of process concurrency
840
925
  def total_concurrency
841
926
  sum { |x| x["concurrency"].to_i }
842
927
  end
843
928
 
929
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
844
930
  def total_rss_in_kb
845
931
  sum { |x| x["rss"].to_i }
846
932
  end
@@ -849,6 +935,8 @@ module Sidekiq
849
935
  # Returns the identity of the current cluster leader or "" if no leader.
850
936
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
851
937
  # or Sidekiq Pro.
938
+ # @return [String] Identity of cluster leader
939
+ # @return [String] empty string if no leader
852
940
  def leader
853
941
  @leader ||= begin
854
942
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -873,8 +961,11 @@ module Sidekiq
873
961
  # 'busy' => 10,
874
962
  # 'beat' => <last heartbeat>,
875
963
  # 'identity' => <unique string identifying the process>,
964
+ # 'embedded' => true,
876
965
  # }
877
966
  class Process
967
+ # :nodoc:
968
+ # @api private
878
969
  def initialize(hash)
879
970
  @attribs = hash
880
971
  end
@@ -884,7 +975,7 @@ module Sidekiq
884
975
  end
885
976
 
886
977
  def labels
887
- Array(self["labels"])
978
+ self["labels"].to_a
888
979
  end
889
980
 
890
981
  def [](key)
@@ -899,18 +990,47 @@ module Sidekiq
899
990
  self["queues"]
900
991
  end
901
992
 
993
+ def weights
994
+ self["weights"]
995
+ end
996
+
997
+ def version
998
+ self["version"]
999
+ end
1000
+
1001
+ def embedded?
1002
+ self["embedded"]
1003
+ end
1004
+
1005
+ # Signal this process to stop processing new jobs.
1006
+ # It will continue to execute jobs it has already fetched.
1007
+ # This method is *asynchronous* and it can take 5-10
1008
+ # seconds for the process to quiet.
902
1009
  def quiet!
1010
+ raise "Can't quiet an embedded process" if embedded?
1011
+
903
1012
  signal("TSTP")
904
1013
  end
905
1014
 
1015
+ # Signal this process to shutdown.
1016
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1017
+ # This method is *asynchronous* and it can take 5-10
1018
+ # seconds for the process to start shutting down.
906
1019
  def stop!
1020
+ raise "Can't stop an embedded process" if embedded?
1021
+
907
1022
  signal("TERM")
908
1023
  end
909
1024
 
1025
+ # Signal this process to log backtraces for all threads.
1026
+ # Useful if you have a frozen or deadlocked process which is
1027
+ # still sending a heartbeat.
1028
+ # This method is *asynchronous* and it can take 5-10 seconds.
910
1029
  def dump_threads
911
1030
  signal("TTIN")
912
1031
  end
913
1032
 
1033
+ # @return [Boolean] true if this process is quiet or shutting down
914
1034
  def stopping?
915
1035
  self["quiet"] == "true"
916
1036
  end
@@ -920,9 +1040,9 @@ module Sidekiq
920
1040
  def signal(sig)
921
1041
  key = "#{identity}-signals"
922
1042
  Sidekiq.redis do |c|
923
- c.multi do
924
- c.lpush(key, sig)
925
- c.expire(key, 60)
1043
+ c.multi do |transaction|
1044
+ transaction.lpush(key, sig)
1045
+ transaction.expire(key, 60)
926
1046
  end
927
1047
  end
928
1048
  end
@@ -953,24 +1073,24 @@ module Sidekiq
953
1073
 
954
1074
  def each(&block)
955
1075
  results = []
1076
+ procs = nil
1077
+ all_works = nil
1078
+
956
1079
  Sidekiq.redis do |conn|
957
- procs = conn.sscan_each("processes").to_a
958
- procs.sort.each do |key|
959
- valid, workers = conn.pipelined {
960
- conn.exists?(key)
961
- conn.hgetall("#{key}:workers")
962
- }
963
- next unless valid
964
- workers.each_pair do |tid, json|
965
- hsh = Sidekiq.load_json(json)
966
- p = hsh["payload"]
967
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
968
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
969
- results << [key, tid, hsh]
1080
+ procs = conn.sscan("processes").to_a.sort
1081
+ all_works = conn.pipelined do |pipeline|
1082
+ procs.each do |key|
1083
+ pipeline.hgetall("#{key}:work")
970
1084
  end
971
1085
  end
972
1086
  end
973
1087
 
1088
+ procs.zip(all_works).each do |key, workers|
1089
+ workers.each_pair do |tid, json|
1090
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1091
+ end
1092
+ end
1093
+
974
1094
  results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
975
1095
  end
976
1096
 
@@ -982,13 +1102,13 @@ module Sidekiq
982
1102
  # which can easily get out of sync with crashy processes.
983
1103
  def size
984
1104
  Sidekiq.redis do |conn|
985
- procs = conn.sscan_each("processes").to_a
1105
+ procs = conn.sscan("processes").to_a
986
1106
  if procs.empty?
987
1107
  0
988
1108
  else
989
- conn.pipelined {
1109
+ conn.pipelined { |pipeline|
990
1110
  procs.each do |key|
991
- conn.hget(key, "busy")
1111
+ pipeline.hget(key, "busy")
992
1112
  end
993
1113
  }.sum(&:to_i)
994
1114
  end