sidekiq 6.5.1 → 7.3.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (125) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +376 -12
  3. data/README.md +43 -35
  4. data/bin/multi_queue_bench +271 -0
  5. data/bin/sidekiq +3 -8
  6. data/bin/sidekiqload +213 -118
  7. data/bin/sidekiqmon +3 -0
  8. data/lib/active_job/queue_adapters/sidekiq_adapter.rb +88 -0
  9. data/lib/generators/sidekiq/job_generator.rb +2 -0
  10. data/lib/sidekiq/api.rb +378 -173
  11. data/lib/sidekiq/capsule.rb +132 -0
  12. data/lib/sidekiq/cli.rb +61 -63
  13. data/lib/sidekiq/client.rb +89 -40
  14. data/lib/sidekiq/component.rb +6 -2
  15. data/lib/sidekiq/config.rb +305 -0
  16. data/lib/sidekiq/deploy.rb +64 -0
  17. data/lib/sidekiq/embedded.rb +63 -0
  18. data/lib/sidekiq/fetch.rb +11 -14
  19. data/lib/sidekiq/iterable_job.rb +55 -0
  20. data/lib/sidekiq/job/interrupt_handler.rb +24 -0
  21. data/lib/sidekiq/job/iterable/active_record_enumerator.rb +53 -0
  22. data/lib/sidekiq/job/iterable/csv_enumerator.rb +47 -0
  23. data/lib/sidekiq/job/iterable/enumerators.rb +135 -0
  24. data/lib/sidekiq/job/iterable.rb +294 -0
  25. data/lib/sidekiq/job.rb +382 -10
  26. data/lib/sidekiq/job_logger.rb +8 -7
  27. data/lib/sidekiq/job_retry.rb +89 -46
  28. data/lib/sidekiq/job_util.rb +53 -15
  29. data/lib/sidekiq/launcher.rb +77 -69
  30. data/lib/sidekiq/logger.rb +2 -27
  31. data/lib/sidekiq/manager.rb +9 -11
  32. data/lib/sidekiq/metrics/query.rb +158 -0
  33. data/lib/sidekiq/metrics/shared.rb +106 -0
  34. data/lib/sidekiq/metrics/tracking.rb +148 -0
  35. data/lib/sidekiq/middleware/chain.rb +84 -48
  36. data/lib/sidekiq/middleware/current_attributes.rb +87 -20
  37. data/lib/sidekiq/middleware/modules.rb +2 -0
  38. data/lib/sidekiq/monitor.rb +19 -5
  39. data/lib/sidekiq/paginator.rb +11 -3
  40. data/lib/sidekiq/processor.rb +67 -56
  41. data/lib/sidekiq/rails.rb +22 -16
  42. data/lib/sidekiq/redis_client_adapter.rb +31 -71
  43. data/lib/sidekiq/redis_connection.rb +44 -117
  44. data/lib/sidekiq/ring_buffer.rb +2 -0
  45. data/lib/sidekiq/scheduled.rb +62 -35
  46. data/lib/sidekiq/systemd.rb +2 -0
  47. data/lib/sidekiq/testing.rb +37 -46
  48. data/lib/sidekiq/transaction_aware_client.rb +11 -5
  49. data/lib/sidekiq/version.rb +6 -1
  50. data/lib/sidekiq/web/action.rb +15 -5
  51. data/lib/sidekiq/web/application.rb +94 -24
  52. data/lib/sidekiq/web/csrf_protection.rb +10 -7
  53. data/lib/sidekiq/web/helpers.rb +118 -45
  54. data/lib/sidekiq/web/router.rb +5 -2
  55. data/lib/sidekiq/web.rb +67 -15
  56. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  57. data/lib/sidekiq.rb +78 -266
  58. data/sidekiq.gemspec +12 -10
  59. data/web/assets/javascripts/application.js +46 -1
  60. data/web/assets/javascripts/base-charts.js +106 -0
  61. data/web/assets/javascripts/chart.min.js +13 -0
  62. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  63. data/web/assets/javascripts/dashboard-charts.js +192 -0
  64. data/web/assets/javascripts/dashboard.js +11 -250
  65. data/web/assets/javascripts/metrics.js +298 -0
  66. data/web/assets/stylesheets/application-dark.css +4 -0
  67. data/web/assets/stylesheets/application-rtl.css +10 -89
  68. data/web/assets/stylesheets/application.css +98 -295
  69. data/web/locales/ar.yml +70 -70
  70. data/web/locales/cs.yml +62 -62
  71. data/web/locales/da.yml +60 -53
  72. data/web/locales/de.yml +65 -65
  73. data/web/locales/el.yml +43 -24
  74. data/web/locales/en.yml +83 -69
  75. data/web/locales/es.yml +68 -68
  76. data/web/locales/fa.yml +65 -65
  77. data/web/locales/fr.yml +80 -67
  78. data/web/locales/gd.yml +98 -0
  79. data/web/locales/he.yml +65 -64
  80. data/web/locales/hi.yml +59 -59
  81. data/web/locales/it.yml +85 -54
  82. data/web/locales/ja.yml +72 -68
  83. data/web/locales/ko.yml +52 -52
  84. data/web/locales/lt.yml +66 -66
  85. data/web/locales/nb.yml +61 -61
  86. data/web/locales/nl.yml +52 -52
  87. data/web/locales/pl.yml +45 -45
  88. data/web/locales/pt-br.yml +78 -69
  89. data/web/locales/pt.yml +51 -51
  90. data/web/locales/ru.yml +67 -66
  91. data/web/locales/sv.yml +53 -53
  92. data/web/locales/ta.yml +60 -60
  93. data/web/locales/tr.yml +100 -0
  94. data/web/locales/uk.yml +85 -61
  95. data/web/locales/ur.yml +64 -64
  96. data/web/locales/vi.yml +67 -67
  97. data/web/locales/zh-cn.yml +42 -16
  98. data/web/locales/zh-tw.yml +41 -8
  99. data/web/views/_footer.erb +17 -2
  100. data/web/views/_job_info.erb +18 -2
  101. data/web/views/_metrics_period_select.erb +12 -0
  102. data/web/views/_nav.erb +1 -1
  103. data/web/views/_paging.erb +2 -0
  104. data/web/views/_poll_link.erb +1 -1
  105. data/web/views/_summary.erb +7 -7
  106. data/web/views/busy.erb +49 -33
  107. data/web/views/dashboard.erb +28 -6
  108. data/web/views/filtering.erb +6 -0
  109. data/web/views/layout.erb +6 -6
  110. data/web/views/metrics.erb +90 -0
  111. data/web/views/metrics_for_job.erb +59 -0
  112. data/web/views/morgue.erb +5 -9
  113. data/web/views/queue.erb +15 -15
  114. data/web/views/queues.erb +9 -3
  115. data/web/views/retries.erb +5 -9
  116. data/web/views/scheduled.erb +12 -13
  117. metadata +61 -26
  118. data/lib/sidekiq/.DS_Store +0 -0
  119. data/lib/sidekiq/delay.rb +0 -43
  120. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  121. data/lib/sidekiq/extensions/active_record.rb +0 -43
  122. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  123. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  124. data/lib/sidekiq/worker.rb +0 -367
  125. /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,27 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
- require "base64"
6
+ require "set"
7
+
8
+ require "sidekiq/metrics/query"
9
+
10
+ #
11
+ # Sidekiq's Data API provides a Ruby object model on top
12
+ # of Sidekiq's runtime data in Redis. This API should never
13
+ # be used within application code for business logic.
14
+ #
15
+ # The Sidekiq server process never uses this API: all data
16
+ # manipulation is done directly for performance reasons to
17
+ # ensure we are using Redis as efficiently as possible at
18
+ # every callsite.
19
+ #
7
20
 
8
21
  module Sidekiq
22
+ # Retrieve runtime statistics from Redis regarding
23
+ # this Sidekiq cluster.
24
+ #
25
+ # stat = Sidekiq::Stats.new
26
+ # stat.processed
9
27
  class Stats
10
28
  def initialize
11
29
  fetch_stats_fast!
@@ -48,10 +66,22 @@ module Sidekiq
48
66
  end
49
67
 
50
68
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
69
+ Sidekiq.redis do |conn|
70
+ queues = conn.sscan("queues").to_a
71
+
72
+ lengths = conn.pipelined { |pipeline|
73
+ queues.each do |queue|
74
+ pipeline.llen("queue:#{queue}")
75
+ end
76
+ }
77
+
78
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
79
+ array_of_arrays.to_h
80
+ end
52
81
  end
53
82
 
54
83
  # O(1) redis calls
84
+ # @api private
55
85
  def fetch_stats_fast!
56
86
  pipe1_res = Sidekiq.redis { |conn|
57
87
  conn.pipelined do |pipeline|
@@ -61,11 +91,11 @@ module Sidekiq
61
91
  pipeline.zcard("retry")
62
92
  pipeline.zcard("dead")
63
93
  pipeline.scard("processes")
64
- pipeline.lrange("queue:default", -1, -1)
94
+ pipeline.lindex("queue:default", -1)
65
95
  end
66
96
  }
67
97
 
68
- default_queue_latency = if (entry = pipe1_res[6].first)
98
+ default_queue_latency = if (entry = pipe1_res[6])
69
99
  job = begin
70
100
  Sidekiq.load_json(entry)
71
101
  rescue
@@ -91,13 +121,14 @@ module Sidekiq
91
121
  end
92
122
 
93
123
  # O(number of processes + number of queues) redis calls
124
+ # @api private
94
125
  def fetch_stats_slow!
95
126
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
127
+ conn.sscan("processes").to_a
97
128
  }
98
129
 
99
130
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
131
+ conn.sscan("queues").to_a
101
132
  }
102
133
 
103
134
  pipe2_res = Sidekiq.redis { |conn|
@@ -109,18 +140,20 @@ module Sidekiq
109
140
 
110
141
  s = processes.size
111
142
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
143
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
144
 
114
145
  @stats[:workers_size] = workers_size
115
146
  @stats[:enqueued] = enqueued
116
147
  @stats
117
148
  end
118
149
 
150
+ # @api private
119
151
  def fetch_stats!
120
152
  fetch_stats_fast!
121
153
  fetch_stats_slow!
122
154
  end
123
155
 
156
+ # @api private
124
157
  def reset(*stats)
125
158
  all = %w[failed processed]
126
159
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +175,8 @@ module Sidekiq
142
175
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
176
  end
144
177
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined { |pipeline|
151
- queues.each do |queue|
152
- pipeline.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
178
  class History
163
- def initialize(days_previous, start_date = nil)
179
+ def initialize(days_previous, start_date = nil, pool: nil)
164
180
  # we only store five years of data in Redis
165
181
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
166
182
  @days_previous = days_previous
@@ -185,15 +201,10 @@ module Sidekiq
185
201
 
186
202
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
187
203
 
188
- begin
189
- Sidekiq.redis do |conn|
190
- conn.mget(keys).each_with_index do |value, idx|
191
- stat_hash[dates[idx]] = value ? value.to_i : 0
192
- end
204
+ Sidekiq.redis do |conn|
205
+ conn.mget(keys).each_with_index do |value, idx|
206
+ stat_hash[dates[idx]] = value ? value.to_i : 0
193
207
  end
194
- rescue RedisConnection.adapter::CommandError
195
- # mget will trigger a CROSSSLOT error when run against a Cluster
196
- # TODO Someone want to add Cluster support?
197
208
  end
198
209
 
199
210
  stat_hash
@@ -202,9 +213,10 @@ module Sidekiq
202
213
  end
203
214
 
204
215
  ##
205
- # Encapsulates a queue within Sidekiq.
216
+ # Represents a queue within Sidekiq.
206
217
  # Allows enumeration of all jobs within the queue
207
- # and deletion of jobs.
218
+ # and deletion of jobs. NB: this queue data is real-time
219
+ # and is changing within Redis moment by moment.
208
220
  #
209
221
  # queue = Sidekiq::Queue.new("mailer")
210
222
  # queue.each do |job|
@@ -212,7 +224,6 @@ module Sidekiq
212
224
  # job.args # => [1, 2, 3]
213
225
  # job.delete if job.jid == 'abcdef1234567890'
214
226
  # end
215
- #
216
227
  class Queue
217
228
  include Enumerable
218
229
 
@@ -221,7 +232,7 @@ module Sidekiq
221
232
  #
222
233
  # @return [Array<Sidekiq::Queue>]
223
234
  def self.all
224
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
235
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
225
236
  end
226
237
 
227
238
  attr_reader :name
@@ -252,8 +263,8 @@ module Sidekiq
252
263
  # @return [Float] in seconds
253
264
  def latency
254
265
  entry = Sidekiq.redis { |conn|
255
- conn.lrange(@rname, -1, -1)
256
- }.first
266
+ conn.lindex(@rname, -1)
267
+ }
257
268
  return 0 unless entry
258
269
  job = Sidekiq.load_json(entry)
259
270
  now = Time.now.to_f
@@ -296,41 +307,53 @@ module Sidekiq
296
307
  end
297
308
 
298
309
  # delete all jobs within this queue
310
+ # @return [Boolean] true
299
311
  def clear
300
312
  Sidekiq.redis do |conn|
301
313
  conn.multi do |transaction|
302
314
  transaction.unlink(@rname)
303
- transaction.srem("queues", name)
315
+ transaction.srem("queues", [name])
304
316
  end
305
317
  end
318
+ true
306
319
  end
307
320
  alias_method :💣, :clear
308
321
 
309
- def as_json(options = nil) # :nodoc:
322
+ # :nodoc:
323
+ # @api private
324
+ def as_json(options = nil)
310
325
  {name: name} # 5336
311
326
  end
312
327
  end
313
328
 
314
329
  ##
315
- # Encapsulates a pending job within a Sidekiq queue or
316
- # sorted set.
330
+ # Represents a pending job within a Sidekiq queue.
317
331
  #
318
332
  # The job should be considered immutable but may be
319
333
  # removed from the queue via JobRecord#delete.
320
- #
321
334
  class JobRecord
335
+ # the parsed Hash of job data
336
+ # @!attribute [r] Item
322
337
  attr_reader :item
338
+ # the underlying String in Redis
339
+ # @!attribute [r] Value
323
340
  attr_reader :value
341
+ # the queue associated with this job
342
+ # @!attribute [r] Queue
324
343
  attr_reader :queue
325
344
 
326
- def initialize(item, queue_name = nil) # :nodoc:
345
+ # :nodoc:
346
+ # @api private
347
+ def initialize(item, queue_name = nil)
327
348
  @args = nil
328
349
  @value = item
329
350
  @item = item.is_a?(Hash) ? item : parse(item)
330
351
  @queue = queue_name || @item["queue"]
331
352
  end
332
353
 
333
- def parse(item) # :nodoc:
354
+ # :nodoc:
355
+ # @api private
356
+ def parse(item)
334
357
  Sidekiq.load_json(item)
335
358
  rescue JSON::ParserError
336
359
  # If the job payload in Redis is invalid JSON, we'll load
@@ -341,6 +364,8 @@ module Sidekiq
341
364
  {}
342
365
  end
343
366
 
367
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
368
+ # this class will be the ActiveJob adapter class rather than a specific job.
344
369
  def klass
345
370
  self["class"]
346
371
  end
@@ -348,12 +373,7 @@ module Sidekiq
348
373
  def display_class
349
374
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
350
375
  @klass ||= self["display_class"] || begin
351
- case klass
352
- when /\ASidekiq::Extensions::Delayed/
353
- safe_load(args[0], klass) do |target, method, _|
354
- "#{target}.#{method}"
355
- end
356
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
376
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper" || klass == "Sidekiq::ActiveJob::Wrapper"
357
377
  job_class = @item["wrapped"] || args[0]
358
378
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
359
379
  # MailerClass#mailer_method
@@ -369,23 +389,14 @@ module Sidekiq
369
389
 
370
390
  def display_args
371
391
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
372
- @display_args ||= case klass
373
- when /\ASidekiq::Extensions::Delayed/
374
- safe_load(args[0], args) do |_, _, arg, kwarg|
375
- if !kwarg || kwarg.empty?
376
- arg
377
- else
378
- [arg, kwarg]
379
- end
380
- end
381
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
382
- job_args = self["wrapped"] ? args[0]["arguments"] : []
392
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper" || klass == "Sidekiq::ActiveJob::Wrapper"
393
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
383
394
  if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
384
395
  # remove MailerClass, mailer_method and 'deliver_now'
385
396
  job_args.drop(3)
386
397
  elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
387
398
  # remove MailerClass, mailer_method and 'deliver_now'
388
- job_args.drop(3).first["args"]
399
+ job_args.drop(3).first.values_at("params", "args")
389
400
  else
390
401
  job_args
391
402
  end
@@ -406,6 +417,10 @@ module Sidekiq
406
417
  self["jid"]
407
418
  end
408
419
 
420
+ def bid
421
+ self["bid"]
422
+ end
423
+
409
424
  def enqueued_at
410
425
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
411
426
  end
@@ -451,50 +466,58 @@ module Sidekiq
451
466
 
452
467
  private
453
468
 
454
- def safe_load(content, default)
455
- yield(*YAML.load(content))
456
- rescue => ex
457
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
458
- # memory yet so the YAML can't be loaded.
459
- # TODO is this still necessary? Zeitwerk reloader should handle?
460
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.config[:environment] == "development"
461
- default
462
- end
469
+ ACTIVE_JOB_PREFIX = "_aj_"
470
+ GLOBALID_KEY = "_aj_globalid"
463
471
 
464
- def uncompress_backtrace(backtrace)
465
- if backtrace.is_a?(Array)
466
- # Handle old jobs with raw Array backtrace format
467
- backtrace
468
- else
469
- decoded = Base64.decode64(backtrace)
470
- uncompressed = Zlib::Inflate.inflate(decoded)
471
- begin
472
- Sidekiq.load_json(uncompressed)
473
- rescue
474
- # Handle old jobs with marshalled backtrace format
475
- # TODO Remove in 7.x
476
- Marshal.load(uncompressed)
472
+ def deserialize_argument(argument)
473
+ case argument
474
+ when Array
475
+ argument.map { |arg| deserialize_argument(arg) }
476
+ when Hash
477
+ if serialized_global_id?(argument)
478
+ argument[GLOBALID_KEY]
479
+ else
480
+ argument.transform_values { |v| deserialize_argument(v) }
481
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
477
482
  end
483
+ else
484
+ argument
478
485
  end
479
486
  end
487
+
488
+ def serialized_global_id?(hash)
489
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
490
+ end
491
+
492
+ def uncompress_backtrace(backtrace)
493
+ strict_base64_decoded = backtrace.unpack1("m")
494
+ uncompressed = Zlib::Inflate.inflate(strict_base64_decoded)
495
+ Sidekiq.load_json(uncompressed)
496
+ end
480
497
  end
481
498
 
482
499
  # Represents a job within a Redis sorted set where the score
483
- # represents a timestamp for the job.
500
+ # represents a timestamp associated with the job. This timestamp
501
+ # could be the scheduled time for it to run (e.g. scheduled set),
502
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
484
503
  class SortedEntry < JobRecord
485
504
  attr_reader :score
486
505
  attr_reader :parent
487
506
 
488
- def initialize(parent, score, item) # :nodoc:
507
+ # :nodoc:
508
+ # @api private
509
+ def initialize(parent, score, item)
489
510
  super(item)
490
511
  @score = Float(score)
491
512
  @parent = parent
492
513
  end
493
514
 
515
+ # The timestamp associated with this entry
494
516
  def at
495
517
  Time.at(score).utc
496
518
  end
497
519
 
520
+ # remove this entry from the sorted set
498
521
  def delete
499
522
  if @value
500
523
  @parent.delete_by_value(@parent.name, @value)
@@ -505,7 +528,7 @@ module Sidekiq
505
528
 
506
529
  # Change the scheduled time for this job.
507
530
  #
508
- # @param [Time] the new timestamp when this job will be enqueued.
531
+ # @param at [Time] the new timestamp for this job
509
532
  def reschedule(at)
510
533
  Sidekiq.redis do |conn|
511
534
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
@@ -547,7 +570,7 @@ module Sidekiq
547
570
  def remove_job
548
571
  Sidekiq.redis do |conn|
549
572
  results = conn.multi { |transaction|
550
- transaction.zrangebyscore(parent.name, score, score)
573
+ transaction.zrange(parent.name, score, score, "BYSCORE")
551
574
  transaction.zremrangebyscore(parent.name, score, score)
552
575
  }.first
553
576
 
@@ -579,47 +602,104 @@ module Sidekiq
579
602
  end
580
603
  end
581
604
 
605
+ # Base class for all sorted sets within Sidekiq.
582
606
  class SortedSet
583
607
  include Enumerable
584
608
 
609
+ # Redis key of the set
610
+ # @!attribute [r] Name
585
611
  attr_reader :name
586
612
 
613
+ # :nodoc:
614
+ # @api private
587
615
  def initialize(name)
588
616
  @name = name
589
617
  @_size = size
590
618
  end
591
619
 
620
+ # real-time size of the set, will change
592
621
  def size
593
622
  Sidekiq.redis { |c| c.zcard(name) }
594
623
  end
595
624
 
625
+ # Scan through each element of the sorted set, yielding each to the supplied block.
626
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
627
+ #
628
+ # @param match [String] a snippet or regexp to filter matches.
629
+ # @param count [Integer] number of elements to retrieve at a time, default 100
630
+ # @yieldparam [Sidekiq::SortedEntry] each entry
596
631
  def scan(match, count = 100)
597
632
  return to_enum(:scan, match, count) unless block_given?
598
633
 
599
634
  match = "*#{match}*" unless match.include?("*")
600
635
  Sidekiq.redis do |conn|
601
- conn.zscan_each(name, match: match, count: count) do |entry, score|
636
+ conn.zscan(name, match: match, count: count) do |entry, score|
602
637
  yield SortedEntry.new(self, score, entry)
603
638
  end
604
639
  end
605
640
  end
606
641
 
642
+ # @return [Boolean] always true
607
643
  def clear
608
644
  Sidekiq.redis do |conn|
609
645
  conn.unlink(name)
610
646
  end
647
+ true
611
648
  end
612
649
  alias_method :💣, :clear
613
650
 
614
- def as_json(options = nil) # :nodoc:
651
+ # :nodoc:
652
+ # @api private
653
+ def as_json(options = nil)
615
654
  {name: name} # 5336
616
655
  end
617
656
  end
618
657
 
658
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
659
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
660
+ # e.g. Batches.
619
661
  class JobSet < SortedSet
620
- def schedule(timestamp, message)
662
+ # Add a job with the associated timestamp to this set.
663
+ # @param timestamp [Time] the score for the job
664
+ # @param job [Hash] the job data
665
+ def schedule(timestamp, job)
621
666
  Sidekiq.redis do |conn|
622
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
667
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
668
+ end
669
+ end
670
+
671
+ def pop_each
672
+ Sidekiq.redis do |c|
673
+ size.times do
674
+ data, score = c.zpopmin(name, 1)&.first
675
+ break unless data
676
+ yield data, score
677
+ end
678
+ end
679
+ end
680
+
681
+ def retry_all
682
+ c = Sidekiq::Client.new
683
+ pop_each do |msg, _|
684
+ job = Sidekiq.load_json(msg)
685
+ # Manual retries should not count against the retry limit.
686
+ job["retry_count"] -= 1 if job["retry_count"]
687
+ c.push(job)
688
+ end
689
+ end
690
+
691
+ # Move all jobs from this Set to the Dead Set.
692
+ # See DeadSet#kill
693
+ def kill_all(notify_failure: false, ex: nil)
694
+ ds = DeadSet.new
695
+ opts = {notify_failure: notify_failure, ex: ex, trim: false}
696
+
697
+ begin
698
+ pop_each do |msg, _|
699
+ ds.kill(msg, opts)
700
+ end
701
+ ensure
702
+ ds.trim
623
703
  end
624
704
  end
625
705
 
@@ -633,7 +713,7 @@ module Sidekiq
633
713
  range_start = page * page_size + offset_size
634
714
  range_end = range_start + page_size - 1
635
715
  elements = Sidekiq.redis { |conn|
636
- conn.zrange name, range_start, range_end, withscores: true
716
+ conn.zrange name, range_start, range_end, "withscores"
637
717
  }
638
718
  break if elements.empty?
639
719
  page -= 1
@@ -647,6 +727,10 @@ module Sidekiq
647
727
  ##
648
728
  # Fetch jobs that match a given time or Range. Job ID is an
649
729
  # optional second argument.
730
+ #
731
+ # @param score [Time,Range] a specific timestamp or range
732
+ # @param jid [String, optional] find a specific JID within the score
733
+ # @return [Array<SortedEntry>] any results found, can be empty
650
734
  def fetch(score, jid = nil)
651
735
  begin_score, end_score =
652
736
  if score.is_a?(Range)
@@ -656,7 +740,7 @@ module Sidekiq
656
740
  end
657
741
 
658
742
  elements = Sidekiq.redis { |conn|
659
- conn.zrangebyscore(name, begin_score, end_score, withscores: true)
743
+ conn.zrange(name, begin_score, end_score, "BYSCORE", "withscores")
660
744
  }
661
745
 
662
746
  elements.each_with_object([]) do |element, result|
@@ -668,11 +752,14 @@ module Sidekiq
668
752
 
669
753
  ##
670
754
  # Find the job with the given JID within this sorted set.
671
- # This is a slower O(n) operation. Do not use for app logic.
755
+ # *This is a slow O(n) operation*. Do not use for app logic.
756
+ #
757
+ # @param jid [String] the job identifier
758
+ # @return [SortedEntry] the record or nil
672
759
  def find_job(jid)
673
760
  Sidekiq.redis do |conn|
674
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
675
- job = JSON.parse(entry)
761
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
762
+ job = Sidekiq.load_json(entry)
676
763
  matched = job["jid"] == jid
677
764
  return SortedEntry.new(self, score, entry) if matched
678
765
  end
@@ -680,6 +767,8 @@ module Sidekiq
680
767
  nil
681
768
  end
682
769
 
770
+ # :nodoc:
771
+ # @api private
683
772
  def delete_by_value(name, value)
684
773
  Sidekiq.redis do |conn|
685
774
  ret = conn.zrem(name, value)
@@ -688,9 +777,11 @@ module Sidekiq
688
777
  end
689
778
  end
690
779
 
780
+ # :nodoc:
781
+ # @api private
691
782
  def delete_by_jid(score, jid)
692
783
  Sidekiq.redis do |conn|
693
- elements = conn.zrangebyscore(name, score, score)
784
+ elements = conn.zrange(name, score, score, "BYSCORE")
694
785
  elements.each do |element|
695
786
  if element.index(jid)
696
787
  message = Sidekiq.load_json(element)
@@ -708,89 +799,74 @@ module Sidekiq
708
799
  end
709
800
 
710
801
  ##
711
- # Allows enumeration of scheduled jobs within Sidekiq.
712
- # Based on this, you can search/filter for jobs. Here's an
713
- # example where I'm selecting all jobs of a certain type
714
- # and deleting them from the schedule queue.
802
+ # The set of scheduled jobs within Sidekiq.
803
+ # See the API wiki page for usage notes and examples.
715
804
  #
716
- # r = Sidekiq::ScheduledSet.new
717
- # r.select do |scheduled|
718
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
719
- # scheduled.args[0] == 'User' &&
720
- # scheduled.args[1] == 'setup_new_subscriber'
721
- # end.map(&:delete)
722
805
  class ScheduledSet < JobSet
723
806
  def initialize
724
- super "schedule"
807
+ super("schedule")
725
808
  end
726
809
  end
727
810
 
728
811
  ##
729
- # Allows enumeration of retries within Sidekiq.
730
- # Based on this, you can search/filter for jobs. Here's an
731
- # example where I'm selecting all jobs of a certain type
732
- # and deleting them from the retry queue.
812
+ # The set of retries within Sidekiq.
813
+ # See the API wiki page for usage notes and examples.
733
814
  #
734
- # r = Sidekiq::RetrySet.new
735
- # r.select do |retri|
736
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
737
- # retri.args[0] == 'User' &&
738
- # retri.args[1] == 'setup_new_subscriber'
739
- # end.map(&:delete)
740
815
  class RetrySet < JobSet
741
816
  def initialize
742
- super "retry"
743
- end
744
-
745
- def retry_all
746
- each(&:retry) while size > 0
747
- end
748
-
749
- def kill_all
750
- each(&:kill) while size > 0
817
+ super("retry")
751
818
  end
752
819
  end
753
820
 
754
821
  ##
755
- # Allows enumeration of dead jobs within Sidekiq.
822
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
823
+ # their retries and are helding in this set pending some sort of manual
824
+ # fix. They will be removed after 6 months (dead_timeout) if not.
756
825
  #
757
826
  class DeadSet < JobSet
758
827
  def initialize
759
- super "dead"
828
+ super("dead")
760
829
  end
761
830
 
762
- def kill(message, opts = {})
831
+ # Trim dead jobs which are over our storage limits
832
+ def trim
833
+ hash = Sidekiq.default_configuration
763
834
  now = Time.now.to_f
764
835
  Sidekiq.redis do |conn|
765
836
  conn.multi do |transaction|
766
- transaction.zadd(name, now.to_s, message)
767
- transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
768
- transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
837
+ transaction.zremrangebyscore(name, "-inf", now - hash[:dead_timeout_in_seconds])
838
+ transaction.zremrangebyrank(name, 0, - hash[:dead_max_jobs])
769
839
  end
770
840
  end
841
+ end
842
+
843
+ # Add the given job to the Dead set.
844
+ # @param message [String] the job data as JSON
845
+ # @option opts [Boolean] :notify_failure (true) Whether death handlers should be called
846
+ # @option opts [Boolean] :trim (true) Whether Sidekiq should trim the structure to keep it within configuration
847
+ # @option opts [Exception] :ex (RuntimeError) An exception to pass to the death handlers
848
+ def kill(message, opts = {})
849
+ now = Time.now.to_f
850
+ Sidekiq.redis do |conn|
851
+ conn.zadd(name, now.to_s, message)
852
+ end
853
+
854
+ trim if opts[:trim] != false
771
855
 
772
856
  if opts[:notify_failure] != false
773
857
  job = Sidekiq.load_json(message)
774
- r = RuntimeError.new("Job killed by API")
775
- r.set_backtrace(caller)
776
- Sidekiq.death_handlers.each do |handle|
777
- handle.call(job, r)
858
+ if opts[:ex]
859
+ ex = opts[:ex]
860
+ else
861
+ ex = RuntimeError.new("Job killed by API")
862
+ ex.set_backtrace(caller)
863
+ end
864
+ Sidekiq.default_configuration.death_handlers.each do |handle|
865
+ handle.call(job, ex)
778
866
  end
779
867
  end
780
868
  true
781
869
  end
782
-
783
- def retry_all
784
- each(&:retry) while size > 0
785
- end
786
-
787
- def self.max_jobs
788
- Sidekiq[:dead_max_jobs]
789
- end
790
-
791
- def self.timeout
792
- Sidekiq[:dead_timeout_in_seconds]
793
- end
794
870
  end
795
871
 
796
872
  ##
@@ -798,21 +874,46 @@ module Sidekiq
798
874
  # right now. Each process sends a heartbeat to Redis every 5 seconds
799
875
  # so this set should be relatively accurate, barring network partitions.
800
876
  #
801
- # Yields a Sidekiq::Process.
877
+ # @yieldparam [Sidekiq::Process]
802
878
  #
803
879
  class ProcessSet
804
880
  include Enumerable
805
881
 
882
+ def self.[](identity)
883
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
884
+ conn.multi { |transaction|
885
+ transaction.sismember("processes", identity)
886
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
887
+ }
888
+ }
889
+
890
+ return nil if exists == 0 || info.nil?
891
+
892
+ hash = Sidekiq.load_json(info)
893
+ Process.new(hash.merge("busy" => busy.to_i,
894
+ "beat" => beat.to_f,
895
+ "quiet" => quiet,
896
+ "rss" => rss.to_i,
897
+ "rtt_us" => rtt_us.to_i))
898
+ end
899
+
900
+ # :nodoc:
901
+ # @api private
806
902
  def initialize(clean_plz = true)
807
903
  cleanup if clean_plz
808
904
  end
809
905
 
810
906
  # Cleans up dead processes recorded in Redis.
811
907
  # Returns the number of processes cleaned.
908
+ # :nodoc:
909
+ # @api private
812
910
  def cleanup
911
+ # dont run cleanup more than once per minute
912
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", "NX", "EX", "60") }
913
+
813
914
  count = 0
814
915
  Sidekiq.redis do |conn|
815
- procs = conn.sscan_each("processes").to_a.sort
916
+ procs = conn.sscan("processes").to_a
816
917
  heartbeats = conn.pipelined { |pipeline|
817
918
  procs.each do |key|
818
919
  pipeline.hget(key, "info")
@@ -832,7 +933,7 @@ module Sidekiq
832
933
 
833
934
  def each
834
935
  result = Sidekiq.redis { |conn|
835
- procs = conn.sscan_each("processes").to_a.sort
936
+ procs = conn.sscan("processes").to_a.sort
836
937
 
837
938
  # We're making a tradeoff here between consuming more memory instead of
838
939
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
@@ -844,7 +945,7 @@ module Sidekiq
844
945
  end
845
946
  }
846
947
 
847
- result.each do |info, busy, at_s, quiet, rss, rtt|
948
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
848
949
  # If a process is stopped between when we query Redis for `procs` and
849
950
  # when we query for `result`, we will have an item in `result` that is
850
951
  # composed of `nil` values.
@@ -852,10 +953,10 @@ module Sidekiq
852
953
 
853
954
  hash = Sidekiq.load_json(info)
854
955
  yield Process.new(hash.merge("busy" => busy.to_i,
855
- "beat" => at_s.to_f,
956
+ "beat" => beat.to_f,
856
957
  "quiet" => quiet,
857
958
  "rss" => rss.to_i,
858
- "rtt_us" => rtt.to_i))
959
+ "rtt_us" => rtt_us.to_i))
859
960
  end
860
961
  end
861
962
 
@@ -863,6 +964,7 @@ module Sidekiq
863
964
  # based on current heartbeat. #each does that and ensures the set only
864
965
  # contains Sidekiq processes which have sent a heartbeat within the last
865
966
  # 60 seconds.
967
+ # @return [Integer] current number of registered Sidekiq processes
866
968
  def size
867
969
  Sidekiq.redis { |conn| conn.scard("processes") }
868
970
  end
@@ -870,10 +972,12 @@ module Sidekiq
870
972
  # Total number of threads available to execute jobs.
871
973
  # For Sidekiq Enterprise customers this number (in production) must be
872
974
  # less than or equal to your licensed concurrency.
975
+ # @return [Integer] the sum of process concurrency
873
976
  def total_concurrency
874
977
  sum { |x| x["concurrency"].to_i }
875
978
  end
876
979
 
980
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
877
981
  def total_rss_in_kb
878
982
  sum { |x| x["rss"].to_i }
879
983
  end
@@ -882,6 +986,8 @@ module Sidekiq
882
986
  # Returns the identity of the current cluster leader or "" if no leader.
883
987
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
884
988
  # or Sidekiq Pro.
989
+ # @return [String] Identity of cluster leader
990
+ # @return [String] empty string if no leader
885
991
  def leader
886
992
  @leader ||= begin
887
993
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -906,8 +1012,11 @@ module Sidekiq
906
1012
  # 'busy' => 10,
907
1013
  # 'beat' => <last heartbeat>,
908
1014
  # 'identity' => <unique string identifying the process>,
1015
+ # 'embedded' => true,
909
1016
  # }
910
1017
  class Process
1018
+ # :nodoc:
1019
+ # @api private
911
1020
  def initialize(hash)
912
1021
  @attribs = hash
913
1022
  end
@@ -917,7 +1026,7 @@ module Sidekiq
917
1026
  end
918
1027
 
919
1028
  def labels
920
- Array(self["labels"])
1029
+ self["labels"].to_a
921
1030
  end
922
1031
 
923
1032
  def [](key)
@@ -932,18 +1041,47 @@ module Sidekiq
932
1041
  self["queues"]
933
1042
  end
934
1043
 
1044
+ def weights
1045
+ self["weights"]
1046
+ end
1047
+
1048
+ def version
1049
+ self["version"]
1050
+ end
1051
+
1052
+ def embedded?
1053
+ self["embedded"]
1054
+ end
1055
+
1056
+ # Signal this process to stop processing new jobs.
1057
+ # It will continue to execute jobs it has already fetched.
1058
+ # This method is *asynchronous* and it can take 5-10
1059
+ # seconds for the process to quiet.
935
1060
  def quiet!
1061
+ raise "Can't quiet an embedded process" if embedded?
1062
+
936
1063
  signal("TSTP")
937
1064
  end
938
1065
 
1066
+ # Signal this process to shutdown.
1067
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1068
+ # This method is *asynchronous* and it can take 5-10
1069
+ # seconds for the process to start shutting down.
939
1070
  def stop!
1071
+ raise "Can't stop an embedded process" if embedded?
1072
+
940
1073
  signal("TERM")
941
1074
  end
942
1075
 
1076
+ # Signal this process to log backtraces for all threads.
1077
+ # Useful if you have a frozen or deadlocked process which is
1078
+ # still sending a heartbeat.
1079
+ # This method is *asynchronous* and it can take 5-10 seconds.
943
1080
  def dump_threads
944
1081
  signal("TTIN")
945
1082
  end
946
1083
 
1084
+ # @return [Boolean] true if this process is quiet or shutting down
947
1085
  def stopping?
948
1086
  self["quiet"] == "true"
949
1087
  end
@@ -986,25 +1124,25 @@ module Sidekiq
986
1124
 
987
1125
  def each(&block)
988
1126
  results = []
1127
+ procs = nil
1128
+ all_works = nil
1129
+
989
1130
  Sidekiq.redis do |conn|
990
- procs = conn.sscan_each("processes").to_a
991
- procs.sort.each do |key|
992
- valid, workers = conn.pipelined { |pipeline|
993
- pipeline.exists?(key)
1131
+ procs = conn.sscan("processes").to_a.sort
1132
+ all_works = conn.pipelined do |pipeline|
1133
+ procs.each do |key|
994
1134
  pipeline.hgetall("#{key}:work")
995
- }
996
- next unless valid
997
- workers.each_pair do |tid, json|
998
- hsh = Sidekiq.load_json(json)
999
- p = hsh["payload"]
1000
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
1001
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
1002
- results << [key, tid, hsh]
1003
1135
  end
1004
1136
  end
1005
1137
  end
1006
1138
 
1007
- results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
1139
+ procs.zip(all_works).each do |key, workers|
1140
+ workers.each_pair do |tid, json|
1141
+ results << [key, tid, Sidekiq::Work.new(key, tid, Sidekiq.load_json(json))] unless json.empty?
1142
+ end
1143
+ end
1144
+
1145
+ results.sort_by { |(_, _, hsh)| hsh.raw("run_at") }.each(&block)
1008
1146
  end
1009
1147
 
1010
1148
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -1015,7 +1153,7 @@ module Sidekiq
1015
1153
  # which can easily get out of sync with crashy processes.
1016
1154
  def size
1017
1155
  Sidekiq.redis do |conn|
1018
- procs = conn.sscan_each("processes").to_a
1156
+ procs = conn.sscan("processes").to_a
1019
1157
  if procs.empty?
1020
1158
  0
1021
1159
  else
@@ -1027,7 +1165,74 @@ module Sidekiq
1027
1165
  end
1028
1166
  end
1029
1167
  end
1168
+
1169
+ ##
1170
+ # Find the work which represents a job with the given JID.
1171
+ # *This is a slow O(n) operation*. Do not use for app logic.
1172
+ #
1173
+ # @param jid [String] the job identifier
1174
+ # @return [Sidekiq::Work] the work or nil
1175
+ def find_work_by_jid(jid)
1176
+ each do |_process_id, _thread_id, work|
1177
+ job = work.job
1178
+ return work if job.jid == jid
1179
+ end
1180
+ nil
1181
+ end
1182
+ end
1183
+
1184
+ # Sidekiq::Work represents a job which is currently executing.
1185
+ class Work
1186
+ attr_reader :process_id
1187
+ attr_reader :thread_id
1188
+
1189
+ def initialize(pid, tid, hsh)
1190
+ @process_id = pid
1191
+ @thread_id = tid
1192
+ @hsh = hsh
1193
+ @job = nil
1194
+ end
1195
+
1196
+ def queue
1197
+ @hsh["queue"]
1198
+ end
1199
+
1200
+ def run_at
1201
+ Time.at(@hsh["run_at"])
1202
+ end
1203
+
1204
+ def job
1205
+ @job ||= Sidekiq::JobRecord.new(@hsh["payload"])
1206
+ end
1207
+
1208
+ def payload
1209
+ @hsh["payload"]
1210
+ end
1211
+
1212
+ # deprecated
1213
+ def [](key)
1214
+ kwargs = {uplevel: 1}
1215
+ kwargs[:category] = :deprecated if RUBY_VERSION > "3.0" # TODO
1216
+ warn("Direct access to `Sidekiq::Work` attributes is deprecated, please use `#payload`, `#queue`, `#run_at` or `#job` instead", **kwargs)
1217
+
1218
+ @hsh[key]
1219
+ end
1220
+
1221
+ # :nodoc:
1222
+ # @api private
1223
+ def raw(name)
1224
+ @hsh[name]
1225
+ end
1226
+
1227
+ def method_missing(*all)
1228
+ @hsh.send(*all)
1229
+ end
1230
+
1231
+ def respond_to_missing?(name, *args)
1232
+ @hsh.respond_to?(name)
1233
+ end
1030
1234
  end
1235
+
1031
1236
  # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1032
1237
  # Is "worker" a process, a type of job, a thread? Undefined!
1033
1238
  # WorkSet better describes the data.