sidekiq 6.4.1 → 7.2.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (116) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +307 -12
  3. data/README.md +43 -35
  4. data/bin/multi_queue_bench +268 -0
  5. data/bin/sidekiq +3 -8
  6. data/bin/sidekiqload +206 -114
  7. data/bin/sidekiqmon +3 -0
  8. data/lib/sidekiq/api.rb +356 -167
  9. data/lib/sidekiq/capsule.rb +127 -0
  10. data/lib/sidekiq/cli.rb +85 -89
  11. data/lib/sidekiq/client.rb +87 -59
  12. data/lib/sidekiq/component.rb +68 -0
  13. data/lib/sidekiq/config.rb +287 -0
  14. data/lib/sidekiq/deploy.rb +62 -0
  15. data/lib/sidekiq/embedded.rb +61 -0
  16. data/lib/sidekiq/fetch.rb +21 -22
  17. data/lib/sidekiq/job.rb +371 -10
  18. data/lib/sidekiq/job_logger.rb +2 -2
  19. data/lib/sidekiq/job_retry.rb +97 -58
  20. data/lib/sidekiq/job_util.rb +62 -20
  21. data/lib/sidekiq/launcher.rb +91 -83
  22. data/lib/sidekiq/logger.rb +6 -45
  23. data/lib/sidekiq/manager.rb +33 -32
  24. data/lib/sidekiq/metrics/query.rb +156 -0
  25. data/lib/sidekiq/metrics/shared.rb +95 -0
  26. data/lib/sidekiq/metrics/tracking.rb +140 -0
  27. data/lib/sidekiq/middleware/chain.rb +96 -51
  28. data/lib/sidekiq/middleware/current_attributes.rb +58 -20
  29. data/lib/sidekiq/middleware/i18n.rb +6 -4
  30. data/lib/sidekiq/middleware/modules.rb +21 -0
  31. data/lib/sidekiq/monitor.rb +17 -4
  32. data/lib/sidekiq/paginator.rb +11 -3
  33. data/lib/sidekiq/processor.rb +81 -80
  34. data/lib/sidekiq/rails.rb +21 -14
  35. data/lib/sidekiq/redis_client_adapter.rb +111 -0
  36. data/lib/sidekiq/redis_connection.rb +16 -85
  37. data/lib/sidekiq/ring_buffer.rb +29 -0
  38. data/lib/sidekiq/scheduled.rb +66 -38
  39. data/lib/sidekiq/testing/inline.rb +4 -4
  40. data/lib/sidekiq/testing.rb +67 -75
  41. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  42. data/lib/sidekiq/version.rb +2 -1
  43. data/lib/sidekiq/web/action.rb +3 -3
  44. data/lib/sidekiq/web/application.rb +107 -10
  45. data/lib/sidekiq/web/csrf_protection.rb +8 -5
  46. data/lib/sidekiq/web/helpers.rb +65 -43
  47. data/lib/sidekiq/web.rb +19 -14
  48. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  49. data/lib/sidekiq.rb +84 -207
  50. data/sidekiq.gemspec +12 -10
  51. data/web/assets/javascripts/application.js +92 -26
  52. data/web/assets/javascripts/base-charts.js +106 -0
  53. data/web/assets/javascripts/chart.min.js +13 -0
  54. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  55. data/web/assets/javascripts/dashboard-charts.js +182 -0
  56. data/web/assets/javascripts/dashboard.js +10 -249
  57. data/web/assets/javascripts/metrics.js +298 -0
  58. data/web/assets/stylesheets/application-dark.css +4 -0
  59. data/web/assets/stylesheets/application-rtl.css +2 -91
  60. data/web/assets/stylesheets/application.css +75 -299
  61. data/web/locales/ar.yml +70 -70
  62. data/web/locales/cs.yml +62 -62
  63. data/web/locales/da.yml +60 -53
  64. data/web/locales/de.yml +65 -65
  65. data/web/locales/el.yml +43 -24
  66. data/web/locales/en.yml +84 -69
  67. data/web/locales/es.yml +68 -68
  68. data/web/locales/fa.yml +65 -65
  69. data/web/locales/fr.yml +81 -67
  70. data/web/locales/gd.yml +99 -0
  71. data/web/locales/he.yml +65 -64
  72. data/web/locales/hi.yml +59 -59
  73. data/web/locales/it.yml +53 -53
  74. data/web/locales/ja.yml +73 -68
  75. data/web/locales/ko.yml +52 -52
  76. data/web/locales/lt.yml +66 -66
  77. data/web/locales/nb.yml +61 -61
  78. data/web/locales/nl.yml +52 -52
  79. data/web/locales/pl.yml +45 -45
  80. data/web/locales/pt-br.yml +83 -55
  81. data/web/locales/pt.yml +51 -51
  82. data/web/locales/ru.yml +67 -66
  83. data/web/locales/sv.yml +53 -53
  84. data/web/locales/ta.yml +60 -60
  85. data/web/locales/uk.yml +62 -61
  86. data/web/locales/ur.yml +64 -64
  87. data/web/locales/vi.yml +67 -67
  88. data/web/locales/zh-cn.yml +43 -16
  89. data/web/locales/zh-tw.yml +42 -8
  90. data/web/views/_footer.erb +5 -2
  91. data/web/views/_job_info.erb +18 -2
  92. data/web/views/_metrics_period_select.erb +12 -0
  93. data/web/views/_nav.erb +1 -1
  94. data/web/views/_paging.erb +2 -0
  95. data/web/views/_poll_link.erb +1 -1
  96. data/web/views/_summary.erb +7 -7
  97. data/web/views/busy.erb +50 -34
  98. data/web/views/dashboard.erb +26 -4
  99. data/web/views/filtering.erb +7 -0
  100. data/web/views/metrics.erb +91 -0
  101. data/web/views/metrics_for_job.erb +59 -0
  102. data/web/views/morgue.erb +5 -9
  103. data/web/views/queue.erb +15 -15
  104. data/web/views/queues.erb +9 -3
  105. data/web/views/retries.erb +5 -9
  106. data/web/views/scheduled.erb +12 -13
  107. metadata +58 -27
  108. data/lib/sidekiq/delay.rb +0 -43
  109. data/lib/sidekiq/exception_handler.rb +0 -27
  110. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  111. data/lib/sidekiq/extensions/active_record.rb +0 -43
  112. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  113. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  114. data/lib/sidekiq/util.rb +0 -108
  115. data/lib/sidekiq/worker.rb +0 -362
  116. /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,27 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
- require "base64"
6
+ require "set"
7
+
8
+ require "sidekiq/metrics/query"
9
+
10
+ #
11
+ # Sidekiq's Data API provides a Ruby object model on top
12
+ # of Sidekiq's runtime data in Redis. This API should never
13
+ # be used within application code for business logic.
14
+ #
15
+ # The Sidekiq server process never uses this API: all data
16
+ # manipulation is done directly for performance reasons to
17
+ # ensure we are using Redis as efficiently as possible at
18
+ # every callsite.
19
+ #
7
20
 
8
21
  module Sidekiq
22
+ # Retrieve runtime statistics from Redis regarding
23
+ # this Sidekiq cluster.
24
+ #
25
+ # stat = Sidekiq::Stats.new
26
+ # stat.processed
9
27
  class Stats
10
28
  def initialize
11
29
  fetch_stats_fast!
@@ -48,10 +66,22 @@ module Sidekiq
48
66
  end
49
67
 
50
68
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
69
+ Sidekiq.redis do |conn|
70
+ queues = conn.sscan("queues").to_a
71
+
72
+ lengths = conn.pipelined { |pipeline|
73
+ queues.each do |queue|
74
+ pipeline.llen("queue:#{queue}")
75
+ end
76
+ }
77
+
78
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
79
+ array_of_arrays.to_h
80
+ end
52
81
  end
53
82
 
54
83
  # O(1) redis calls
84
+ # @api private
55
85
  def fetch_stats_fast!
56
86
  pipe1_res = Sidekiq.redis { |conn|
57
87
  conn.pipelined do |pipeline|
@@ -61,11 +91,11 @@ module Sidekiq
61
91
  pipeline.zcard("retry")
62
92
  pipeline.zcard("dead")
63
93
  pipeline.scard("processes")
64
- pipeline.lrange("queue:default", -1, -1)
94
+ pipeline.lindex("queue:default", -1)
65
95
  end
66
96
  }
67
97
 
68
- default_queue_latency = if (entry = pipe1_res[6].first)
98
+ default_queue_latency = if (entry = pipe1_res[6])
69
99
  job = begin
70
100
  Sidekiq.load_json(entry)
71
101
  rescue
@@ -91,13 +121,14 @@ module Sidekiq
91
121
  end
92
122
 
93
123
  # O(number of processes + number of queues) redis calls
124
+ # @api private
94
125
  def fetch_stats_slow!
95
126
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
127
+ conn.sscan("processes").to_a
97
128
  }
98
129
 
99
130
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
131
+ conn.sscan("queues").to_a
101
132
  }
102
133
 
103
134
  pipe2_res = Sidekiq.redis { |conn|
@@ -109,18 +140,20 @@ module Sidekiq
109
140
 
110
141
  s = processes.size
111
142
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
143
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
144
 
114
145
  @stats[:workers_size] = workers_size
115
146
  @stats[:enqueued] = enqueued
116
147
  @stats
117
148
  end
118
149
 
150
+ # @api private
119
151
  def fetch_stats!
120
152
  fetch_stats_fast!
121
153
  fetch_stats_slow!
122
154
  end
123
155
 
156
+ # @api private
124
157
  def reset(*stats)
125
158
  all = %w[failed processed]
126
159
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +175,8 @@ module Sidekiq
142
175
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
176
  end
144
177
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined { |pipeline|
151
- queues.each do |queue|
152
- pipeline.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
178
  class History
163
- def initialize(days_previous, start_date = nil)
179
+ def initialize(days_previous, start_date = nil, pool: nil)
164
180
  # we only store five years of data in Redis
165
181
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
166
182
  @days_previous = days_previous
@@ -185,15 +201,10 @@ module Sidekiq
185
201
 
186
202
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
187
203
 
188
- begin
189
- Sidekiq.redis do |conn|
190
- conn.mget(keys).each_with_index do |value, idx|
191
- stat_hash[dates[idx]] = value ? value.to_i : 0
192
- end
204
+ Sidekiq.redis do |conn|
205
+ conn.mget(keys).each_with_index do |value, idx|
206
+ stat_hash[dates[idx]] = value ? value.to_i : 0
193
207
  end
194
- rescue Redis::CommandError
195
- # mget will trigger a CROSSSLOT error when run against a Cluster
196
- # TODO Someone want to add Cluster support?
197
208
  end
198
209
 
199
210
  stat_hash
@@ -202,9 +213,10 @@ module Sidekiq
202
213
  end
203
214
 
204
215
  ##
205
- # Encapsulates a queue within Sidekiq.
216
+ # Represents a queue within Sidekiq.
206
217
  # Allows enumeration of all jobs within the queue
207
- # and deletion of jobs.
218
+ # and deletion of jobs. NB: this queue data is real-time
219
+ # and is changing within Redis moment by moment.
208
220
  #
209
221
  # queue = Sidekiq::Queue.new("mailer")
210
222
  # queue.each do |job|
@@ -212,29 +224,34 @@ module Sidekiq
212
224
  # job.args # => [1, 2, 3]
213
225
  # job.delete if job.jid == 'abcdef1234567890'
214
226
  # end
215
- #
216
227
  class Queue
217
228
  include Enumerable
218
229
 
219
230
  ##
220
- # Return all known queues within Redis.
231
+ # Fetch all known queues within Redis.
221
232
  #
233
+ # @return [Array<Sidekiq::Queue>]
222
234
  def self.all
223
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
235
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
224
236
  end
225
237
 
226
238
  attr_reader :name
227
239
 
240
+ # @param name [String] the name of the queue
228
241
  def initialize(name = "default")
229
242
  @name = name.to_s
230
243
  @rname = "queue:#{name}"
231
244
  end
232
245
 
246
+ # The current size of the queue within Redis.
247
+ # This value is real-time and can change between calls.
248
+ #
249
+ # @return [Integer] the size
233
250
  def size
234
251
  Sidekiq.redis { |con| con.llen(@rname) }
235
252
  end
236
253
 
237
- # Sidekiq Pro overrides this
254
+ # @return [Boolean] if the queue is currently paused
238
255
  def paused?
239
256
  false
240
257
  end
@@ -243,11 +260,11 @@ module Sidekiq
243
260
  # Calculates this queue's latency, the difference in seconds since the oldest
244
261
  # job in the queue was enqueued.
245
262
  #
246
- # @return Float
263
+ # @return [Float] in seconds
247
264
  def latency
248
265
  entry = Sidekiq.redis { |conn|
249
- conn.lrange(@rname, -1, -1)
250
- }.first
266
+ conn.lindex(@rname, -1)
267
+ }
251
268
  return 0 unless entry
252
269
  job = Sidekiq.load_json(entry)
253
270
  now = Time.now.to_f
@@ -279,34 +296,54 @@ module Sidekiq
279
296
  ##
280
297
  # Find the job with the given JID within this queue.
281
298
  #
282
- # This is a slow, inefficient operation. Do not use under
299
+ # This is a *slow, inefficient* operation. Do not use under
283
300
  # normal conditions.
301
+ #
302
+ # @param jid [String] the job_id to look for
303
+ # @return [Sidekiq::JobRecord]
304
+ # @return [nil] if not found
284
305
  def find_job(jid)
285
306
  detect { |j| j.jid == jid }
286
307
  end
287
308
 
309
+ # delete all jobs within this queue
310
+ # @return [Boolean] true
288
311
  def clear
289
312
  Sidekiq.redis do |conn|
290
313
  conn.multi do |transaction|
291
314
  transaction.unlink(@rname)
292
- transaction.srem("queues", name)
315
+ transaction.srem("queues", [name])
293
316
  end
294
317
  end
318
+ true
295
319
  end
296
320
  alias_method :💣, :clear
321
+
322
+ # :nodoc:
323
+ # @api private
324
+ def as_json(options = nil)
325
+ {name: name} # 5336
326
+ end
297
327
  end
298
328
 
299
329
  ##
300
- # Encapsulates a pending job within a Sidekiq queue or
301
- # sorted set.
330
+ # Represents a pending job within a Sidekiq queue.
302
331
  #
303
332
  # The job should be considered immutable but may be
304
333
  # removed from the queue via JobRecord#delete.
305
- #
306
334
  class JobRecord
335
+ # the parsed Hash of job data
336
+ # @!attribute [r] Item
307
337
  attr_reader :item
338
+ # the underlying String in Redis
339
+ # @!attribute [r] Value
308
340
  attr_reader :value
341
+ # the queue associated with this job
342
+ # @!attribute [r] Queue
343
+ attr_reader :queue
309
344
 
345
+ # :nodoc:
346
+ # @api private
310
347
  def initialize(item, queue_name = nil)
311
348
  @args = nil
312
349
  @value = item
@@ -314,6 +351,8 @@ module Sidekiq
314
351
  @queue = queue_name || @item["queue"]
315
352
  end
316
353
 
354
+ # :nodoc:
355
+ # @api private
317
356
  def parse(item)
318
357
  Sidekiq.load_json(item)
319
358
  rescue JSON::ParserError
@@ -325,6 +364,8 @@ module Sidekiq
325
364
  {}
326
365
  end
327
366
 
367
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
368
+ # this class will be the ActiveJob adapter class rather than a specific job.
328
369
  def klass
329
370
  self["class"]
330
371
  end
@@ -332,12 +373,7 @@ module Sidekiq
332
373
  def display_class
333
374
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
334
375
  @klass ||= self["display_class"] || begin
335
- case klass
336
- when /\ASidekiq::Extensions::Delayed/
337
- safe_load(args[0], klass) do |target, method, _|
338
- "#{target}.#{method}"
339
- end
340
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
376
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
341
377
  job_class = @item["wrapped"] || args[0]
342
378
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
343
379
  # MailerClass#mailer_method
@@ -353,32 +389,23 @@ module Sidekiq
353
389
 
354
390
  def display_args
355
391
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
356
- @display_args ||= case klass
357
- when /\ASidekiq::Extensions::Delayed/
358
- safe_load(args[0], args) do |_, _, arg, kwarg|
359
- if !kwarg || kwarg.empty?
360
- arg
361
- else
362
- [arg, kwarg]
363
- end
364
- end
365
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
366
- job_args = self["wrapped"] ? args[0]["arguments"] : []
367
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
368
- # remove MailerClass, mailer_method and 'deliver_now'
369
- job_args.drop(3)
370
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
371
- # remove MailerClass, mailer_method and 'deliver_now'
372
- job_args.drop(3).first["args"]
373
- else
374
- job_args
375
- end
376
- else
377
- if self["encrypt"]
378
- # no point in showing 150+ bytes of random garbage
379
- args[-1] = "[encrypted data]"
380
- end
381
- args
392
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
393
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
394
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
395
+ # remove MailerClass, mailer_method and 'deliver_now'
396
+ job_args.drop(3)
397
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
398
+ # remove MailerClass, mailer_method and 'deliver_now'
399
+ job_args.drop(3).first.values_at("params", "args")
400
+ else
401
+ job_args
402
+ end
403
+ else
404
+ if self["encrypt"]
405
+ # no point in showing 150+ bytes of random garbage
406
+ args[-1] = "[encrypted data]"
407
+ end
408
+ args
382
409
  end
383
410
  end
384
411
 
@@ -390,6 +417,10 @@ module Sidekiq
390
417
  self["jid"]
391
418
  end
392
419
 
420
+ def bid
421
+ self["bid"]
422
+ end
423
+
393
424
  def enqueued_at
394
425
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
395
426
  end
@@ -412,15 +443,12 @@ module Sidekiq
412
443
  end
413
444
  end
414
445
 
415
- attr_reader :queue
416
-
417
446
  def latency
418
447
  now = Time.now.to_f
419
448
  now - (@item["enqueued_at"] || @item["created_at"] || now)
420
449
  end
421
450
 
422
- ##
423
- # Remove this job from the queue.
451
+ # Remove this job from the queue
424
452
  def delete
425
453
  count = Sidekiq.redis { |conn|
426
454
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -428,6 +456,7 @@ module Sidekiq
428
456
  count != 0
429
457
  end
430
458
 
459
+ # Access arbitrary attributes within the job hash
431
460
  def [](name)
432
461
  # nil will happen if the JSON fails to parse.
433
462
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -437,47 +466,58 @@ module Sidekiq
437
466
 
438
467
  private
439
468
 
440
- def safe_load(content, default)
441
- yield(*YAML.load(content))
442
- rescue => ex
443
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
444
- # memory yet so the YAML can't be loaded.
445
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
446
- default
447
- end
469
+ ACTIVE_JOB_PREFIX = "_aj_"
470
+ GLOBALID_KEY = "_aj_globalid"
448
471
 
449
- def uncompress_backtrace(backtrace)
450
- if backtrace.is_a?(Array)
451
- # Handle old jobs with raw Array backtrace format
452
- backtrace
453
- else
454
- decoded = Base64.decode64(backtrace)
455
- uncompressed = Zlib::Inflate.inflate(decoded)
456
- begin
457
- Sidekiq.load_json(uncompressed)
458
- rescue
459
- # Handle old jobs with marshalled backtrace format
460
- # TODO Remove in 7.x
461
- Marshal.load(uncompressed)
472
+ def deserialize_argument(argument)
473
+ case argument
474
+ when Array
475
+ argument.map { |arg| deserialize_argument(arg) }
476
+ when Hash
477
+ if serialized_global_id?(argument)
478
+ argument[GLOBALID_KEY]
479
+ else
480
+ argument.transform_values { |v| deserialize_argument(v) }
481
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
462
482
  end
483
+ else
484
+ argument
463
485
  end
464
486
  end
487
+
488
+ def serialized_global_id?(hash)
489
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
490
+ end
491
+
492
+ def uncompress_backtrace(backtrace)
493
+ strict_base64_decoded = backtrace.unpack1("m0")
494
+ uncompressed = Zlib::Inflate.inflate(strict_base64_decoded)
495
+ Sidekiq.load_json(uncompressed)
496
+ end
465
497
  end
466
498
 
499
+ # Represents a job within a Redis sorted set where the score
500
+ # represents a timestamp associated with the job. This timestamp
501
+ # could be the scheduled time for it to run (e.g. scheduled set),
502
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
467
503
  class SortedEntry < JobRecord
468
504
  attr_reader :score
469
505
  attr_reader :parent
470
506
 
507
+ # :nodoc:
508
+ # @api private
471
509
  def initialize(parent, score, item)
472
510
  super(item)
473
- @score = score
511
+ @score = Float(score)
474
512
  @parent = parent
475
513
  end
476
514
 
515
+ # The timestamp associated with this entry
477
516
  def at
478
517
  Time.at(score).utc
479
518
  end
480
519
 
520
+ # remove this entry from the sorted set
481
521
  def delete
482
522
  if @value
483
523
  @parent.delete_by_value(@parent.name, @value)
@@ -486,12 +526,17 @@ module Sidekiq
486
526
  end
487
527
  end
488
528
 
529
+ # Change the scheduled time for this job.
530
+ #
531
+ # @param at [Time] the new timestamp for this job
489
532
  def reschedule(at)
490
533
  Sidekiq.redis do |conn|
491
534
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
492
535
  end
493
536
  end
494
537
 
538
+ # Enqueue this job from the scheduled or dead set so it will
539
+ # be executed at some point in the near future.
495
540
  def add_to_queue
496
541
  remove_job do |message|
497
542
  msg = Sidekiq.load_json(message)
@@ -499,6 +544,8 @@ module Sidekiq
499
544
  end
500
545
  end
501
546
 
547
+ # enqueue this job from the retry set so it will be executed
548
+ # at some point in the near future.
502
549
  def retry
503
550
  remove_job do |message|
504
551
  msg = Sidekiq.load_json(message)
@@ -507,8 +554,7 @@ module Sidekiq
507
554
  end
508
555
  end
509
556
 
510
- ##
511
- # Place job in the dead set
557
+ # Move this job from its current set into the Dead set.
512
558
  def kill
513
559
  remove_job do |message|
514
560
  DeadSet.new.kill(message)
@@ -524,7 +570,7 @@ module Sidekiq
524
570
  def remove_job
525
571
  Sidekiq.redis do |conn|
526
572
  results = conn.multi { |transaction|
527
- transaction.zrangebyscore(parent.name, score, score)
573
+ transaction.zrange(parent.name, score, score, "BYSCORE")
528
574
  transaction.zremrangebyscore(parent.name, score, score)
529
575
  }.first
530
576
 
@@ -556,43 +602,69 @@ module Sidekiq
556
602
  end
557
603
  end
558
604
 
605
+ # Base class for all sorted sets within Sidekiq.
559
606
  class SortedSet
560
607
  include Enumerable
561
608
 
609
+ # Redis key of the set
610
+ # @!attribute [r] Name
562
611
  attr_reader :name
563
612
 
613
+ # :nodoc:
614
+ # @api private
564
615
  def initialize(name)
565
616
  @name = name
566
617
  @_size = size
567
618
  end
568
619
 
620
+ # real-time size of the set, will change
569
621
  def size
570
622
  Sidekiq.redis { |c| c.zcard(name) }
571
623
  end
572
624
 
625
+ # Scan through each element of the sorted set, yielding each to the supplied block.
626
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
627
+ #
628
+ # @param match [String] a snippet or regexp to filter matches.
629
+ # @param count [Integer] number of elements to retrieve at a time, default 100
630
+ # @yieldparam [Sidekiq::SortedEntry] each entry
573
631
  def scan(match, count = 100)
574
632
  return to_enum(:scan, match, count) unless block_given?
575
633
 
576
634
  match = "*#{match}*" unless match.include?("*")
577
635
  Sidekiq.redis do |conn|
578
- conn.zscan_each(name, match: match, count: count) do |entry, score|
636
+ conn.zscan(name, match: match, count: count) do |entry, score|
579
637
  yield SortedEntry.new(self, score, entry)
580
638
  end
581
639
  end
582
640
  end
583
641
 
642
+ # @return [Boolean] always true
584
643
  def clear
585
644
  Sidekiq.redis do |conn|
586
645
  conn.unlink(name)
587
646
  end
647
+ true
588
648
  end
589
649
  alias_method :💣, :clear
650
+
651
+ # :nodoc:
652
+ # @api private
653
+ def as_json(options = nil)
654
+ {name: name} # 5336
655
+ end
590
656
  end
591
657
 
658
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
659
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
660
+ # e.g. Batches.
592
661
  class JobSet < SortedSet
593
- def schedule(timestamp, message)
662
+ # Add a job with the associated timestamp to this set.
663
+ # @param timestamp [Time] the score for the job
664
+ # @param job [Hash] the job data
665
+ def schedule(timestamp, job)
594
666
  Sidekiq.redis do |conn|
595
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
667
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
596
668
  end
597
669
  end
598
670
 
@@ -606,7 +678,7 @@ module Sidekiq
606
678
  range_start = page * page_size + offset_size
607
679
  range_end = range_start + page_size - 1
608
680
  elements = Sidekiq.redis { |conn|
609
- conn.zrange name, range_start, range_end, with_scores: true
681
+ conn.zrange name, range_start, range_end, "withscores"
610
682
  }
611
683
  break if elements.empty?
612
684
  page -= 1
@@ -620,6 +692,10 @@ module Sidekiq
620
692
  ##
621
693
  # Fetch jobs that match a given time or Range. Job ID is an
622
694
  # optional second argument.
695
+ #
696
+ # @param score [Time,Range] a specific timestamp or range
697
+ # @param jid [String, optional] find a specific JID within the score
698
+ # @return [Array<SortedEntry>] any results found, can be empty
623
699
  def fetch(score, jid = nil)
624
700
  begin_score, end_score =
625
701
  if score.is_a?(Range)
@@ -629,7 +705,7 @@ module Sidekiq
629
705
  end
630
706
 
631
707
  elements = Sidekiq.redis { |conn|
632
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
708
+ conn.zrange(name, begin_score, end_score, "BYSCORE", "withscores")
633
709
  }
634
710
 
635
711
  elements.each_with_object([]) do |element, result|
@@ -641,11 +717,14 @@ module Sidekiq
641
717
 
642
718
  ##
643
719
  # Find the job with the given JID within this sorted set.
644
- # This is a slower O(n) operation. Do not use for app logic.
720
+ # *This is a slow O(n) operation*. Do not use for app logic.
721
+ #
722
+ # @param jid [String] the job identifier
723
+ # @return [SortedEntry] the record or nil
645
724
  def find_job(jid)
646
725
  Sidekiq.redis do |conn|
647
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
648
- job = JSON.parse(entry)
726
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
727
+ job = Sidekiq.load_json(entry)
649
728
  matched = job["jid"] == jid
650
729
  return SortedEntry.new(self, score, entry) if matched
651
730
  end
@@ -653,6 +732,8 @@ module Sidekiq
653
732
  nil
654
733
  end
655
734
 
735
+ # :nodoc:
736
+ # @api private
656
737
  def delete_by_value(name, value)
657
738
  Sidekiq.redis do |conn|
658
739
  ret = conn.zrem(name, value)
@@ -661,9 +742,11 @@ module Sidekiq
661
742
  end
662
743
  end
663
744
 
745
+ # :nodoc:
746
+ # @api private
664
747
  def delete_by_jid(score, jid)
665
748
  Sidekiq.redis do |conn|
666
- elements = conn.zrangebyscore(name, score, score)
749
+ elements = conn.zrange(name, score, score, "BYSCORE")
667
750
  elements.each do |element|
668
751
  if element.index(jid)
669
752
  message = Sidekiq.load_json(element)
@@ -681,17 +764,13 @@ module Sidekiq
681
764
  end
682
765
 
683
766
  ##
684
- # Allows enumeration of scheduled jobs within Sidekiq.
767
+ # The set of scheduled jobs within Sidekiq.
685
768
  # Based on this, you can search/filter for jobs. Here's an
686
- # example where I'm selecting all jobs of a certain type
687
- # and deleting them from the schedule queue.
769
+ # example where I'm selecting jobs based on some complex logic
770
+ # and deleting them from the scheduled set.
771
+ #
772
+ # See the API wiki page for usage notes and examples.
688
773
  #
689
- # r = Sidekiq::ScheduledSet.new
690
- # r.select do |scheduled|
691
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
692
- # scheduled.args[0] == 'User' &&
693
- # scheduled.args[1] == 'setup_new_subscriber'
694
- # end.map(&:delete)
695
774
  class ScheduledSet < JobSet
696
775
  def initialize
697
776
  super "schedule"
@@ -699,46 +778,48 @@ module Sidekiq
699
778
  end
700
779
 
701
780
  ##
702
- # Allows enumeration of retries within Sidekiq.
781
+ # The set of retries within Sidekiq.
703
782
  # Based on this, you can search/filter for jobs. Here's an
704
783
  # example where I'm selecting all jobs of a certain type
705
784
  # and deleting them from the retry queue.
706
785
  #
707
- # r = Sidekiq::RetrySet.new
708
- # r.select do |retri|
709
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
710
- # retri.args[0] == 'User' &&
711
- # retri.args[1] == 'setup_new_subscriber'
712
- # end.map(&:delete)
786
+ # See the API wiki page for usage notes and examples.
787
+ #
713
788
  class RetrySet < JobSet
714
789
  def initialize
715
790
  super "retry"
716
791
  end
717
792
 
793
+ # Enqueues all jobs pending within the retry set.
718
794
  def retry_all
719
795
  each(&:retry) while size > 0
720
796
  end
721
797
 
798
+ # Kills all jobs pending within the retry set.
722
799
  def kill_all
723
800
  each(&:kill) while size > 0
724
801
  end
725
802
  end
726
803
 
727
804
  ##
728
- # Allows enumeration of dead jobs within Sidekiq.
805
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
806
+ # their retries and are helding in this set pending some sort of manual
807
+ # fix. They will be removed after 6 months (dead_timeout) if not.
729
808
  #
730
809
  class DeadSet < JobSet
731
810
  def initialize
732
811
  super "dead"
733
812
  end
734
813
 
814
+ # Add the given job to the Dead set.
815
+ # @param message [String] the job data as JSON
735
816
  def kill(message, opts = {})
736
817
  now = Time.now.to_f
737
818
  Sidekiq.redis do |conn|
738
819
  conn.multi do |transaction|
739
820
  transaction.zadd(name, now.to_s, message)
740
- transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
741
- transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
821
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
822
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
742
823
  end
743
824
  end
744
825
 
@@ -746,24 +827,17 @@ module Sidekiq
746
827
  job = Sidekiq.load_json(message)
747
828
  r = RuntimeError.new("Job killed by API")
748
829
  r.set_backtrace(caller)
749
- Sidekiq.death_handlers.each do |handle|
830
+ Sidekiq.default_configuration.death_handlers.each do |handle|
750
831
  handle.call(job, r)
751
832
  end
752
833
  end
753
834
  true
754
835
  end
755
836
 
837
+ # Enqueue all dead jobs
756
838
  def retry_all
757
839
  each(&:retry) while size > 0
758
840
  end
759
-
760
- def self.max_jobs
761
- Sidekiq.options[:dead_max_jobs]
762
- end
763
-
764
- def self.timeout
765
- Sidekiq.options[:dead_timeout_in_seconds]
766
- end
767
841
  end
768
842
 
769
843
  ##
@@ -771,21 +845,46 @@ module Sidekiq
771
845
  # right now. Each process sends a heartbeat to Redis every 5 seconds
772
846
  # so this set should be relatively accurate, barring network partitions.
773
847
  #
774
- # Yields a Sidekiq::Process.
848
+ # @yieldparam [Sidekiq::Process]
775
849
  #
776
850
  class ProcessSet
777
851
  include Enumerable
778
852
 
853
+ def self.[](identity)
854
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
855
+ conn.multi { |transaction|
856
+ transaction.sismember("processes", identity)
857
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
858
+ }
859
+ }
860
+
861
+ return nil if exists == 0 || info.nil?
862
+
863
+ hash = Sidekiq.load_json(info)
864
+ Process.new(hash.merge("busy" => busy.to_i,
865
+ "beat" => beat.to_f,
866
+ "quiet" => quiet,
867
+ "rss" => rss.to_i,
868
+ "rtt_us" => rtt_us.to_i))
869
+ end
870
+
871
+ # :nodoc:
872
+ # @api private
779
873
  def initialize(clean_plz = true)
780
874
  cleanup if clean_plz
781
875
  end
782
876
 
783
877
  # Cleans up dead processes recorded in Redis.
784
878
  # Returns the number of processes cleaned.
879
+ # :nodoc:
880
+ # @api private
785
881
  def cleanup
882
+ # dont run cleanup more than once per minute
883
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", "NX", "EX", "60") }
884
+
786
885
  count = 0
787
886
  Sidekiq.redis do |conn|
788
- procs = conn.sscan_each("processes").to_a.sort
887
+ procs = conn.sscan("processes").to_a
789
888
  heartbeats = conn.pipelined { |pipeline|
790
889
  procs.each do |key|
791
890
  pipeline.hget(key, "info")
@@ -805,7 +904,7 @@ module Sidekiq
805
904
 
806
905
  def each
807
906
  result = Sidekiq.redis { |conn|
808
- procs = conn.sscan_each("processes").to_a.sort
907
+ procs = conn.sscan("processes").to_a.sort
809
908
 
810
909
  # We're making a tradeoff here between consuming more memory instead of
811
910
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
@@ -817,7 +916,7 @@ module Sidekiq
817
916
  end
818
917
  }
819
918
 
820
- result.each do |info, busy, at_s, quiet, rss, rtt|
919
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
821
920
  # If a process is stopped between when we query Redis for `procs` and
822
921
  # when we query for `result`, we will have an item in `result` that is
823
922
  # composed of `nil` values.
@@ -825,10 +924,10 @@ module Sidekiq
825
924
 
826
925
  hash = Sidekiq.load_json(info)
827
926
  yield Process.new(hash.merge("busy" => busy.to_i,
828
- "beat" => at_s.to_f,
927
+ "beat" => beat.to_f,
829
928
  "quiet" => quiet,
830
929
  "rss" => rss.to_i,
831
- "rtt_us" => rtt.to_i))
930
+ "rtt_us" => rtt_us.to_i))
832
931
  end
833
932
  end
834
933
 
@@ -836,6 +935,7 @@ module Sidekiq
836
935
  # based on current heartbeat. #each does that and ensures the set only
837
936
  # contains Sidekiq processes which have sent a heartbeat within the last
838
937
  # 60 seconds.
938
+ # @return [Integer] current number of registered Sidekiq processes
839
939
  def size
840
940
  Sidekiq.redis { |conn| conn.scard("processes") }
841
941
  end
@@ -843,10 +943,12 @@ module Sidekiq
843
943
  # Total number of threads available to execute jobs.
844
944
  # For Sidekiq Enterprise customers this number (in production) must be
845
945
  # less than or equal to your licensed concurrency.
946
+ # @return [Integer] the sum of process concurrency
846
947
  def total_concurrency
847
948
  sum { |x| x["concurrency"].to_i }
848
949
  end
849
950
 
951
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
850
952
  def total_rss_in_kb
851
953
  sum { |x| x["rss"].to_i }
852
954
  end
@@ -855,6 +957,8 @@ module Sidekiq
855
957
  # Returns the identity of the current cluster leader or "" if no leader.
856
958
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
857
959
  # or Sidekiq Pro.
960
+ # @return [String] Identity of cluster leader
961
+ # @return [String] empty string if no leader
858
962
  def leader
859
963
  @leader ||= begin
860
964
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -879,8 +983,11 @@ module Sidekiq
879
983
  # 'busy' => 10,
880
984
  # 'beat' => <last heartbeat>,
881
985
  # 'identity' => <unique string identifying the process>,
986
+ # 'embedded' => true,
882
987
  # }
883
988
  class Process
989
+ # :nodoc:
990
+ # @api private
884
991
  def initialize(hash)
885
992
  @attribs = hash
886
993
  end
@@ -890,7 +997,7 @@ module Sidekiq
890
997
  end
891
998
 
892
999
  def labels
893
- Array(self["labels"])
1000
+ self["labels"].to_a
894
1001
  end
895
1002
 
896
1003
  def [](key)
@@ -905,18 +1012,47 @@ module Sidekiq
905
1012
  self["queues"]
906
1013
  end
907
1014
 
1015
+ def weights
1016
+ self["weights"]
1017
+ end
1018
+
1019
+ def version
1020
+ self["version"]
1021
+ end
1022
+
1023
+ def embedded?
1024
+ self["embedded"]
1025
+ end
1026
+
1027
+ # Signal this process to stop processing new jobs.
1028
+ # It will continue to execute jobs it has already fetched.
1029
+ # This method is *asynchronous* and it can take 5-10
1030
+ # seconds for the process to quiet.
908
1031
  def quiet!
1032
+ raise "Can't quiet an embedded process" if embedded?
1033
+
909
1034
  signal("TSTP")
910
1035
  end
911
1036
 
1037
+ # Signal this process to shutdown.
1038
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1039
+ # This method is *asynchronous* and it can take 5-10
1040
+ # seconds for the process to start shutting down.
912
1041
  def stop!
1042
+ raise "Can't stop an embedded process" if embedded?
1043
+
913
1044
  signal("TERM")
914
1045
  end
915
1046
 
1047
+ # Signal this process to log backtraces for all threads.
1048
+ # Useful if you have a frozen or deadlocked process which is
1049
+ # still sending a heartbeat.
1050
+ # This method is *asynchronous* and it can take 5-10 seconds.
916
1051
  def dump_threads
917
1052
  signal("TTIN")
918
1053
  end
919
1054
 
1055
+ # @return [Boolean] true if this process is quiet or shutting down
920
1056
  def stopping?
921
1057
  self["quiet"] == "true"
922
1058
  end
@@ -959,25 +1095,25 @@ module Sidekiq
959
1095
 
960
1096
  def each(&block)
961
1097
  results = []
1098
+ procs = nil
1099
+ all_works = nil
1100
+
962
1101
  Sidekiq.redis do |conn|
963
- procs = conn.sscan_each("processes").to_a
964
- procs.sort.each do |key|
965
- valid, workers = conn.pipelined { |pipeline|
966
- pipeline.exists?(key)
967
- pipeline.hgetall("#{key}:workers")
968
- }
969
- next unless valid
970
- workers.each_pair do |tid, json|
971
- hsh = Sidekiq.load_json(json)
972
- p = hsh["payload"]
973
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
974
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
975
- results << [key, tid, hsh]
1102
+ procs = conn.sscan("processes").to_a.sort
1103
+ all_works = conn.pipelined do |pipeline|
1104
+ procs.each do |key|
1105
+ pipeline.hgetall("#{key}:work")
976
1106
  end
977
1107
  end
978
1108
  end
979
1109
 
980
- results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
1110
+ procs.zip(all_works).each do |key, workers|
1111
+ workers.each_pair do |tid, json|
1112
+ results << [key, tid, Sidekiq::Work.new(key, tid, Sidekiq.load_json(json))] unless json.empty?
1113
+ end
1114
+ end
1115
+
1116
+ results.sort_by { |(_, _, hsh)| hsh.raw("run_at") }.each(&block)
981
1117
  end
982
1118
 
983
1119
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -988,7 +1124,7 @@ module Sidekiq
988
1124
  # which can easily get out of sync with crashy processes.
989
1125
  def size
990
1126
  Sidekiq.redis do |conn|
991
- procs = conn.sscan_each("processes").to_a
1127
+ procs = conn.sscan("processes").to_a
992
1128
  if procs.empty?
993
1129
  0
994
1130
  else
@@ -1001,6 +1137,59 @@ module Sidekiq
1001
1137
  end
1002
1138
  end
1003
1139
  end
1140
+
1141
+ # Sidekiq::Work represents a job which is currently executing.
1142
+ class Work
1143
+ attr_reader :process_id
1144
+ attr_reader :thread_id
1145
+
1146
+ def initialize(pid, tid, hsh)
1147
+ @process_id = pid
1148
+ @thread_id = tid
1149
+ @hsh = hsh
1150
+ @job = nil
1151
+ end
1152
+
1153
+ def queue
1154
+ @hsh["queue"]
1155
+ end
1156
+
1157
+ def run_at
1158
+ Time.at(@hsh["run_at"])
1159
+ end
1160
+
1161
+ def job
1162
+ @job ||= Sidekiq::JobRecord.new(@hsh["payload"])
1163
+ end
1164
+
1165
+ def payload
1166
+ @hsh["payload"]
1167
+ end
1168
+
1169
+ # deprecated
1170
+ def [](key)
1171
+ kwargs = {uplevel: 1}
1172
+ kwargs[:category] = :deprecated if RUBY_VERSION > "3.0" # TODO
1173
+ warn("Direct access to `Sidekiq::Work` attributes is deprecated, please use `#payload`, `#queue`, `#run_at` or `#job` instead", **kwargs)
1174
+
1175
+ @hsh[key]
1176
+ end
1177
+
1178
+ # :nodoc:
1179
+ # @api private
1180
+ def raw(name)
1181
+ @hsh[name]
1182
+ end
1183
+
1184
+ def method_missing(*all)
1185
+ @hsh.send(*all)
1186
+ end
1187
+
1188
+ def respond_to_missing?(name)
1189
+ @hsh.respond_to?(name)
1190
+ end
1191
+ end
1192
+
1004
1193
  # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1005
1194
  # Is "worker" a process, a type of job, a thread? Undefined!
1006
1195
  # WorkSet better describes the data.