sidekiq 6.4.0 → 7.1.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (114) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +232 -12
  3. data/README.md +44 -31
  4. data/bin/sidekiq +4 -9
  5. data/bin/sidekiqload +207 -117
  6. data/bin/sidekiqmon +4 -1
  7. data/lib/sidekiq/api.rb +329 -188
  8. data/lib/sidekiq/capsule.rb +127 -0
  9. data/lib/sidekiq/cli.rb +85 -81
  10. data/lib/sidekiq/client.rb +98 -58
  11. data/lib/sidekiq/component.rb +68 -0
  12. data/lib/sidekiq/config.rb +278 -0
  13. data/lib/sidekiq/deploy.rb +62 -0
  14. data/lib/sidekiq/embedded.rb +61 -0
  15. data/lib/sidekiq/fetch.rb +23 -24
  16. data/lib/sidekiq/job.rb +371 -10
  17. data/lib/sidekiq/job_logger.rb +16 -28
  18. data/lib/sidekiq/job_retry.rb +80 -56
  19. data/lib/sidekiq/job_util.rb +60 -20
  20. data/lib/sidekiq/launcher.rb +103 -95
  21. data/lib/sidekiq/logger.rb +9 -44
  22. data/lib/sidekiq/manager.rb +33 -32
  23. data/lib/sidekiq/metrics/query.rb +153 -0
  24. data/lib/sidekiq/metrics/shared.rb +95 -0
  25. data/lib/sidekiq/metrics/tracking.rb +136 -0
  26. data/lib/sidekiq/middleware/chain.rb +96 -51
  27. data/lib/sidekiq/middleware/current_attributes.rb +58 -20
  28. data/lib/sidekiq/middleware/i18n.rb +6 -4
  29. data/lib/sidekiq/middleware/modules.rb +21 -0
  30. data/lib/sidekiq/monitor.rb +17 -4
  31. data/lib/sidekiq/paginator.rb +17 -9
  32. data/lib/sidekiq/processor.rb +60 -60
  33. data/lib/sidekiq/rails.rb +22 -10
  34. data/lib/sidekiq/redis_client_adapter.rb +96 -0
  35. data/lib/sidekiq/redis_connection.rb +13 -82
  36. data/lib/sidekiq/ring_buffer.rb +29 -0
  37. data/lib/sidekiq/scheduled.rb +66 -38
  38. data/lib/sidekiq/testing/inline.rb +4 -4
  39. data/lib/sidekiq/testing.rb +41 -68
  40. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  41. data/lib/sidekiq/version.rb +2 -1
  42. data/lib/sidekiq/web/action.rb +3 -3
  43. data/lib/sidekiq/web/application.rb +40 -9
  44. data/lib/sidekiq/web/csrf_protection.rb +3 -3
  45. data/lib/sidekiq/web/helpers.rb +35 -21
  46. data/lib/sidekiq/web.rb +10 -17
  47. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  48. data/lib/sidekiq.rb +84 -206
  49. data/sidekiq.gemspec +12 -10
  50. data/web/assets/javascripts/application.js +76 -26
  51. data/web/assets/javascripts/base-charts.js +106 -0
  52. data/web/assets/javascripts/chart.min.js +13 -0
  53. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  54. data/web/assets/javascripts/dashboard-charts.js +166 -0
  55. data/web/assets/javascripts/dashboard.js +3 -240
  56. data/web/assets/javascripts/metrics.js +264 -0
  57. data/web/assets/stylesheets/application-dark.css +4 -0
  58. data/web/assets/stylesheets/application-rtl.css +2 -91
  59. data/web/assets/stylesheets/application.css +66 -297
  60. data/web/locales/ar.yml +70 -70
  61. data/web/locales/cs.yml +62 -62
  62. data/web/locales/da.yml +60 -53
  63. data/web/locales/de.yml +65 -65
  64. data/web/locales/el.yml +43 -24
  65. data/web/locales/en.yml +82 -69
  66. data/web/locales/es.yml +68 -68
  67. data/web/locales/fa.yml +65 -65
  68. data/web/locales/fr.yml +81 -67
  69. data/web/locales/gd.yml +99 -0
  70. data/web/locales/he.yml +65 -64
  71. data/web/locales/hi.yml +59 -59
  72. data/web/locales/it.yml +53 -53
  73. data/web/locales/ja.yml +73 -68
  74. data/web/locales/ko.yml +52 -52
  75. data/web/locales/lt.yml +66 -66
  76. data/web/locales/nb.yml +61 -61
  77. data/web/locales/nl.yml +52 -52
  78. data/web/locales/pl.yml +45 -45
  79. data/web/locales/pt-br.yml +63 -55
  80. data/web/locales/pt.yml +51 -51
  81. data/web/locales/ru.yml +67 -66
  82. data/web/locales/sv.yml +53 -53
  83. data/web/locales/ta.yml +60 -60
  84. data/web/locales/uk.yml +62 -61
  85. data/web/locales/ur.yml +64 -64
  86. data/web/locales/vi.yml +67 -67
  87. data/web/locales/zh-cn.yml +43 -16
  88. data/web/locales/zh-tw.yml +42 -8
  89. data/web/views/_footer.erb +5 -2
  90. data/web/views/_job_info.erb +18 -2
  91. data/web/views/_metrics_period_select.erb +12 -0
  92. data/web/views/_nav.erb +1 -1
  93. data/web/views/_paging.erb +2 -0
  94. data/web/views/_poll_link.erb +1 -1
  95. data/web/views/_summary.erb +1 -1
  96. data/web/views/busy.erb +44 -28
  97. data/web/views/dashboard.erb +36 -4
  98. data/web/views/metrics.erb +82 -0
  99. data/web/views/metrics_for_job.erb +68 -0
  100. data/web/views/morgue.erb +5 -9
  101. data/web/views/queue.erb +15 -15
  102. data/web/views/queues.erb +3 -1
  103. data/web/views/retries.erb +5 -9
  104. data/web/views/scheduled.erb +12 -13
  105. metadata +56 -27
  106. data/lib/sidekiq/delay.rb +0 -43
  107. data/lib/sidekiq/exception_handler.rb +0 -27
  108. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  109. data/lib/sidekiq/extensions/active_record.rb +0 -43
  110. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  111. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  112. data/lib/sidekiq/util.rb +0 -108
  113. data/lib/sidekiq/worker.rb +0 -364
  114. /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,28 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ require "sidekiq/metrics/query"
10
+
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
8
22
  module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
9
28
  class Stats
10
29
  def initialize
11
30
  fetch_stats_fast!
@@ -48,24 +67,36 @@ module Sidekiq
48
67
  end
49
68
 
50
69
  def queues
51
- Sidekiq::Stats::Queues.new.lengths
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
72
+
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
+
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
+ end
52
82
  end
53
83
 
54
84
  # O(1) redis calls
85
+ # @api private
55
86
  def fetch_stats_fast!
56
87
  pipe1_res = Sidekiq.redis { |conn|
57
- conn.pipelined do
58
- conn.get("stat:processed")
59
- conn.get("stat:failed")
60
- conn.zcard("schedule")
61
- conn.zcard("retry")
62
- conn.zcard("dead")
63
- conn.scard("processes")
64
- conn.lrange("queue:default", -1, -1)
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lindex("queue:default", -1)
65
96
  end
66
97
  }
67
98
 
68
- default_queue_latency = if (entry = pipe1_res[6].first)
99
+ default_queue_latency = if (entry = pipe1_res[6])
69
100
  job = begin
70
101
  Sidekiq.load_json(entry)
71
102
  rescue
@@ -91,36 +122,39 @@ module Sidekiq
91
122
  end
92
123
 
93
124
  # O(number of processes + number of queues) redis calls
125
+ # @api private
94
126
  def fetch_stats_slow!
95
127
  processes = Sidekiq.redis { |conn|
96
- conn.sscan_each("processes").to_a
128
+ conn.sscan("processes").to_a
97
129
  }
98
130
 
99
131
  queues = Sidekiq.redis { |conn|
100
- conn.sscan_each("queues").to_a
132
+ conn.sscan("queues").to_a
101
133
  }
102
134
 
103
135
  pipe2_res = Sidekiq.redis { |conn|
104
- conn.pipelined do
105
- processes.each { |key| conn.hget(key, "busy") }
106
- queues.each { |queue| conn.llen("queue:#{queue}") }
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
139
  end
108
140
  }
109
141
 
110
142
  s = processes.size
111
143
  workers_size = pipe2_res[0...s].sum(&:to_i)
112
- enqueued = pipe2_res[s..-1].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
113
145
 
114
146
  @stats[:workers_size] = workers_size
115
147
  @stats[:enqueued] = enqueued
116
148
  @stats
117
149
  end
118
150
 
151
+ # @api private
119
152
  def fetch_stats!
120
153
  fetch_stats_fast!
121
154
  fetch_stats_slow!
122
155
  end
123
156
 
157
+ # @api private
124
158
  def reset(*stats)
125
159
  all = %w[failed processed]
126
160
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -142,25 +176,8 @@ module Sidekiq
142
176
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
143
177
  end
144
178
 
145
- class Queues
146
- def lengths
147
- Sidekiq.redis do |conn|
148
- queues = conn.sscan_each("queues").to_a
149
-
150
- lengths = conn.pipelined {
151
- queues.each do |queue|
152
- conn.llen("queue:#{queue}")
153
- end
154
- }
155
-
156
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
- array_of_arrays.to_h
158
- end
159
- end
160
- end
161
-
162
179
  class History
163
- def initialize(days_previous, start_date = nil)
180
+ def initialize(days_previous, start_date = nil, pool: nil)
164
181
  # we only store five years of data in Redis
165
182
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
166
183
  @days_previous = days_previous
@@ -185,15 +202,10 @@ module Sidekiq
185
202
 
186
203
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
187
204
 
188
- begin
189
- Sidekiq.redis do |conn|
190
- conn.mget(keys).each_with_index do |value, idx|
191
- stat_hash[dates[idx]] = value ? value.to_i : 0
192
- end
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
193
208
  end
194
- rescue Redis::CommandError
195
- # mget will trigger a CROSSSLOT error when run against a Cluster
196
- # TODO Someone want to add Cluster support?
197
209
  end
198
210
 
199
211
  stat_hash
@@ -202,9 +214,10 @@ module Sidekiq
202
214
  end
203
215
 
204
216
  ##
205
- # Encapsulates a queue within Sidekiq.
217
+ # Represents a queue within Sidekiq.
206
218
  # Allows enumeration of all jobs within the queue
207
- # and deletion of jobs.
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
208
221
  #
209
222
  # queue = Sidekiq::Queue.new("mailer")
210
223
  # queue.each do |job|
@@ -212,29 +225,34 @@ module Sidekiq
212
225
  # job.args # => [1, 2, 3]
213
226
  # job.delete if job.jid == 'abcdef1234567890'
214
227
  # end
215
- #
216
228
  class Queue
217
229
  include Enumerable
218
230
 
219
231
  ##
220
- # Return all known queues within Redis.
232
+ # Fetch all known queues within Redis.
221
233
  #
234
+ # @return [Array<Sidekiq::Queue>]
222
235
  def self.all
223
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
224
237
  end
225
238
 
226
239
  attr_reader :name
227
240
 
241
+ # @param name [String] the name of the queue
228
242
  def initialize(name = "default")
229
243
  @name = name.to_s
230
244
  @rname = "queue:#{name}"
231
245
  end
232
246
 
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
233
251
  def size
234
252
  Sidekiq.redis { |con| con.llen(@rname) }
235
253
  end
236
254
 
237
- # Sidekiq Pro overrides this
255
+ # @return [Boolean] if the queue is currently paused
238
256
  def paused?
239
257
  false
240
258
  end
@@ -243,11 +261,11 @@ module Sidekiq
243
261
  # Calculates this queue's latency, the difference in seconds since the oldest
244
262
  # job in the queue was enqueued.
245
263
  #
246
- # @return Float
264
+ # @return [Float] in seconds
247
265
  def latency
248
266
  entry = Sidekiq.redis { |conn|
249
- conn.lrange(@rname, -1, -1)
250
- }.first
267
+ conn.lindex(@rname, -1)
268
+ }
251
269
  return 0 unless entry
252
270
  job = Sidekiq.load_json(entry)
253
271
  now = Time.now.to_f
@@ -279,34 +297,54 @@ module Sidekiq
279
297
  ##
280
298
  # Find the job with the given JID within this queue.
281
299
  #
282
- # This is a slow, inefficient operation. Do not use under
300
+ # This is a *slow, inefficient* operation. Do not use under
283
301
  # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
284
306
  def find_job(jid)
285
307
  detect { |j| j.jid == jid }
286
308
  end
287
309
 
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
288
312
  def clear
289
313
  Sidekiq.redis do |conn|
290
- conn.multi do
291
- conn.unlink(@rname)
292
- conn.srem("queues", name)
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
293
317
  end
294
318
  end
319
+ true
295
320
  end
296
321
  alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
297
328
  end
298
329
 
299
330
  ##
300
- # Encapsulates a pending job within a Sidekiq queue or
301
- # sorted set.
331
+ # Represents a pending job within a Sidekiq queue.
302
332
  #
303
333
  # The job should be considered immutable but may be
304
334
  # removed from the queue via JobRecord#delete.
305
- #
306
335
  class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
307
338
  attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
308
341
  attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
309
345
 
346
+ # :nodoc:
347
+ # @api private
310
348
  def initialize(item, queue_name = nil)
311
349
  @args = nil
312
350
  @value = item
@@ -314,6 +352,8 @@ module Sidekiq
314
352
  @queue = queue_name || @item["queue"]
315
353
  end
316
354
 
355
+ # :nodoc:
356
+ # @api private
317
357
  def parse(item)
318
358
  Sidekiq.load_json(item)
319
359
  rescue JSON::ParserError
@@ -325,6 +365,8 @@ module Sidekiq
325
365
  {}
326
366
  end
327
367
 
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
328
370
  def klass
329
371
  self["class"]
330
372
  end
@@ -332,12 +374,7 @@ module Sidekiq
332
374
  def display_class
333
375
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
334
376
  @klass ||= self["display_class"] || begin
335
- case klass
336
- when /\ASidekiq::Extensions::Delayed/
337
- safe_load(args[0], klass) do |target, method, _|
338
- "#{target}.#{method}"
339
- end
340
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
341
378
  job_class = @item["wrapped"] || args[0]
342
379
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
343
380
  # MailerClass#mailer_method
@@ -353,28 +390,23 @@ module Sidekiq
353
390
 
354
391
  def display_args
355
392
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
356
- @display_args ||= case klass
357
- when /\ASidekiq::Extensions::Delayed/
358
- safe_load(args[0], args) do |_, _, arg|
359
- arg
360
- end
361
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
362
- job_args = self["wrapped"] ? args[0]["arguments"] : []
363
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
364
- # remove MailerClass, mailer_method and 'deliver_now'
365
- job_args.drop(3)
366
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
367
- # remove MailerClass, mailer_method and 'deliver_now'
368
- job_args.drop(3).first["args"]
369
- else
370
- job_args
371
- end
372
- else
373
- if self["encrypt"]
374
- # no point in showing 150+ bytes of random garbage
375
- args[-1] = "[encrypted data]"
376
- end
377
- args
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first.values_at("params", "args")
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
378
410
  end
379
411
  end
380
412
 
@@ -386,6 +418,10 @@ module Sidekiq
386
418
  self["jid"]
387
419
  end
388
420
 
421
+ def bid
422
+ self["bid"]
423
+ end
424
+
389
425
  def enqueued_at
390
426
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
391
427
  end
@@ -408,15 +444,12 @@ module Sidekiq
408
444
  end
409
445
  end
410
446
 
411
- attr_reader :queue
412
-
413
447
  def latency
414
448
  now = Time.now.to_f
415
449
  now - (@item["enqueued_at"] || @item["created_at"] || now)
416
450
  end
417
451
 
418
- ##
419
- # Remove this job from the queue.
452
+ # Remove this job from the queue
420
453
  def delete
421
454
  count = Sidekiq.redis { |conn|
422
455
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -424,6 +457,7 @@ module Sidekiq
424
457
  count != 0
425
458
  end
426
459
 
460
+ # Access arbitrary attributes within the job hash
427
461
  def [](name)
428
462
  # nil will happen if the JSON fails to parse.
429
463
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -433,47 +467,58 @@ module Sidekiq
433
467
 
434
468
  private
435
469
 
436
- def safe_load(content, default)
437
- yield(*YAML.load(content))
438
- rescue => ex
439
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
440
- # memory yet so the YAML can't be loaded.
441
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
442
- default
443
- end
470
+ ACTIVE_JOB_PREFIX = "_aj_"
471
+ GLOBALID_KEY = "_aj_globalid"
444
472
 
445
- def uncompress_backtrace(backtrace)
446
- if backtrace.is_a?(Array)
447
- # Handle old jobs with raw Array backtrace format
448
- backtrace
449
- else
450
- decoded = Base64.decode64(backtrace)
451
- uncompressed = Zlib::Inflate.inflate(decoded)
452
- begin
453
- Sidekiq.load_json(uncompressed)
454
- rescue
455
- # Handle old jobs with marshalled backtrace format
456
- # TODO Remove in 7.x
457
- Marshal.load(uncompressed)
473
+ def deserialize_argument(argument)
474
+ case argument
475
+ when Array
476
+ argument.map { |arg| deserialize_argument(arg) }
477
+ when Hash
478
+ if serialized_global_id?(argument)
479
+ argument[GLOBALID_KEY]
480
+ else
481
+ argument.transform_values { |v| deserialize_argument(v) }
482
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
458
483
  end
484
+ else
485
+ argument
459
486
  end
460
487
  end
488
+
489
+ def serialized_global_id?(hash)
490
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
491
+ end
492
+
493
+ def uncompress_backtrace(backtrace)
494
+ decoded = Base64.decode64(backtrace)
495
+ uncompressed = Zlib::Inflate.inflate(decoded)
496
+ Sidekiq.load_json(uncompressed)
497
+ end
461
498
  end
462
499
 
500
+ # Represents a job within a Redis sorted set where the score
501
+ # represents a timestamp associated with the job. This timestamp
502
+ # could be the scheduled time for it to run (e.g. scheduled set),
503
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
463
504
  class SortedEntry < JobRecord
464
505
  attr_reader :score
465
506
  attr_reader :parent
466
507
 
508
+ # :nodoc:
509
+ # @api private
467
510
  def initialize(parent, score, item)
468
511
  super(item)
469
- @score = score
512
+ @score = Float(score)
470
513
  @parent = parent
471
514
  end
472
515
 
516
+ # The timestamp associated with this entry
473
517
  def at
474
518
  Time.at(score).utc
475
519
  end
476
520
 
521
+ # remove this entry from the sorted set
477
522
  def delete
478
523
  if @value
479
524
  @parent.delete_by_value(@parent.name, @value)
@@ -482,12 +527,17 @@ module Sidekiq
482
527
  end
483
528
  end
484
529
 
530
+ # Change the scheduled time for this job.
531
+ #
532
+ # @param at [Time] the new timestamp for this job
485
533
  def reschedule(at)
486
534
  Sidekiq.redis do |conn|
487
535
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
488
536
  end
489
537
  end
490
538
 
539
+ # Enqueue this job from the scheduled or dead set so it will
540
+ # be executed at some point in the near future.
491
541
  def add_to_queue
492
542
  remove_job do |message|
493
543
  msg = Sidekiq.load_json(message)
@@ -495,6 +545,8 @@ module Sidekiq
495
545
  end
496
546
  end
497
547
 
548
+ # enqueue this job from the retry set so it will be executed
549
+ # at some point in the near future.
498
550
  def retry
499
551
  remove_job do |message|
500
552
  msg = Sidekiq.load_json(message)
@@ -503,8 +555,7 @@ module Sidekiq
503
555
  end
504
556
  end
505
557
 
506
- ##
507
- # Place job in the dead set
558
+ # Move this job from its current set into the Dead set.
508
559
  def kill
509
560
  remove_job do |message|
510
561
  DeadSet.new.kill(message)
@@ -519,9 +570,9 @@ module Sidekiq
519
570
 
520
571
  def remove_job
521
572
  Sidekiq.redis do |conn|
522
- results = conn.multi {
523
- conn.zrangebyscore(parent.name, score, score)
524
- conn.zremrangebyscore(parent.name, score, score)
573
+ results = conn.multi { |transaction|
574
+ transaction.zrange(parent.name, score, score, "BYSCORE")
575
+ transaction.zremrangebyscore(parent.name, score, score)
525
576
  }.first
526
577
 
527
578
  if results.size == 1
@@ -542,9 +593,9 @@ module Sidekiq
542
593
  yield msg if msg
543
594
 
544
595
  # push the rest back onto the sorted set
545
- conn.multi do
596
+ conn.multi do |transaction|
546
597
  nonmatched.each do |message|
547
- conn.zadd(parent.name, score.to_f.to_s, message)
598
+ transaction.zadd(parent.name, score.to_f.to_s, message)
548
599
  end
549
600
  end
550
601
  end
@@ -552,43 +603,69 @@ module Sidekiq
552
603
  end
553
604
  end
554
605
 
606
+ # Base class for all sorted sets within Sidekiq.
555
607
  class SortedSet
556
608
  include Enumerable
557
609
 
610
+ # Redis key of the set
611
+ # @!attribute [r] Name
558
612
  attr_reader :name
559
613
 
614
+ # :nodoc:
615
+ # @api private
560
616
  def initialize(name)
561
617
  @name = name
562
618
  @_size = size
563
619
  end
564
620
 
621
+ # real-time size of the set, will change
565
622
  def size
566
623
  Sidekiq.redis { |c| c.zcard(name) }
567
624
  end
568
625
 
626
+ # Scan through each element of the sorted set, yielding each to the supplied block.
627
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
628
+ #
629
+ # @param match [String] a snippet or regexp to filter matches.
630
+ # @param count [Integer] number of elements to retrieve at a time, default 100
631
+ # @yieldparam [Sidekiq::SortedEntry] each entry
569
632
  def scan(match, count = 100)
570
633
  return to_enum(:scan, match, count) unless block_given?
571
634
 
572
635
  match = "*#{match}*" unless match.include?("*")
573
636
  Sidekiq.redis do |conn|
574
- conn.zscan_each(name, match: match, count: count) do |entry, score|
637
+ conn.zscan(name, match: match, count: count) do |entry, score|
575
638
  yield SortedEntry.new(self, score, entry)
576
639
  end
577
640
  end
578
641
  end
579
642
 
643
+ # @return [Boolean] always true
580
644
  def clear
581
645
  Sidekiq.redis do |conn|
582
646
  conn.unlink(name)
583
647
  end
648
+ true
584
649
  end
585
650
  alias_method :💣, :clear
651
+
652
+ # :nodoc:
653
+ # @api private
654
+ def as_json(options = nil)
655
+ {name: name} # 5336
656
+ end
586
657
  end
587
658
 
659
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
660
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
661
+ # e.g. Batches.
588
662
  class JobSet < SortedSet
589
- def schedule(timestamp, message)
663
+ # Add a job with the associated timestamp to this set.
664
+ # @param timestamp [Time] the score for the job
665
+ # @param job [Hash] the job data
666
+ def schedule(timestamp, job)
590
667
  Sidekiq.redis do |conn|
591
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
668
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
592
669
  end
593
670
  end
594
671
 
@@ -602,7 +679,7 @@ module Sidekiq
602
679
  range_start = page * page_size + offset_size
603
680
  range_end = range_start + page_size - 1
604
681
  elements = Sidekiq.redis { |conn|
605
- conn.zrange name, range_start, range_end, with_scores: true
682
+ conn.zrange name, range_start, range_end, withscores: true
606
683
  }
607
684
  break if elements.empty?
608
685
  page -= 1
@@ -616,6 +693,10 @@ module Sidekiq
616
693
  ##
617
694
  # Fetch jobs that match a given time or Range. Job ID is an
618
695
  # optional second argument.
696
+ #
697
+ # @param score [Time,Range] a specific timestamp or range
698
+ # @param jid [String, optional] find a specific JID within the score
699
+ # @return [Array<SortedEntry>] any results found, can be empty
619
700
  def fetch(score, jid = nil)
620
701
  begin_score, end_score =
621
702
  if score.is_a?(Range)
@@ -625,7 +706,7 @@ module Sidekiq
625
706
  end
626
707
 
627
708
  elements = Sidekiq.redis { |conn|
628
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
709
+ conn.zrange(name, begin_score, end_score, "BYSCORE", withscores: true)
629
710
  }
630
711
 
631
712
  elements.each_with_object([]) do |element, result|
@@ -637,11 +718,14 @@ module Sidekiq
637
718
 
638
719
  ##
639
720
  # Find the job with the given JID within this sorted set.
640
- # This is a slower O(n) operation. Do not use for app logic.
721
+ # *This is a slow O(n) operation*. Do not use for app logic.
722
+ #
723
+ # @param jid [String] the job identifier
724
+ # @return [SortedEntry] the record or nil
641
725
  def find_job(jid)
642
726
  Sidekiq.redis do |conn|
643
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
644
- job = JSON.parse(entry)
727
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
728
+ job = Sidekiq.load_json(entry)
645
729
  matched = job["jid"] == jid
646
730
  return SortedEntry.new(self, score, entry) if matched
647
731
  end
@@ -649,6 +733,8 @@ module Sidekiq
649
733
  nil
650
734
  end
651
735
 
736
+ # :nodoc:
737
+ # @api private
652
738
  def delete_by_value(name, value)
653
739
  Sidekiq.redis do |conn|
654
740
  ret = conn.zrem(name, value)
@@ -657,9 +743,11 @@ module Sidekiq
657
743
  end
658
744
  end
659
745
 
746
+ # :nodoc:
747
+ # @api private
660
748
  def delete_by_jid(score, jid)
661
749
  Sidekiq.redis do |conn|
662
- elements = conn.zrangebyscore(name, score, score)
750
+ elements = conn.zrange(name, score, score, "BYSCORE")
663
751
  elements.each do |element|
664
752
  if element.index(jid)
665
753
  message = Sidekiq.load_json(element)
@@ -677,17 +765,13 @@ module Sidekiq
677
765
  end
678
766
 
679
767
  ##
680
- # Allows enumeration of scheduled jobs within Sidekiq.
768
+ # The set of scheduled jobs within Sidekiq.
681
769
  # Based on this, you can search/filter for jobs. Here's an
682
- # example where I'm selecting all jobs of a certain type
683
- # and deleting them from the schedule queue.
770
+ # example where I'm selecting jobs based on some complex logic
771
+ # and deleting them from the scheduled set.
772
+ #
773
+ # See the API wiki page for usage notes and examples.
684
774
  #
685
- # r = Sidekiq::ScheduledSet.new
686
- # r.select do |scheduled|
687
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
688
- # scheduled.args[0] == 'User' &&
689
- # scheduled.args[1] == 'setup_new_subscriber'
690
- # end.map(&:delete)
691
775
  class ScheduledSet < JobSet
692
776
  def initialize
693
777
  super "schedule"
@@ -695,46 +779,48 @@ module Sidekiq
695
779
  end
696
780
 
697
781
  ##
698
- # Allows enumeration of retries within Sidekiq.
782
+ # The set of retries within Sidekiq.
699
783
  # Based on this, you can search/filter for jobs. Here's an
700
784
  # example where I'm selecting all jobs of a certain type
701
785
  # and deleting them from the retry queue.
702
786
  #
703
- # r = Sidekiq::RetrySet.new
704
- # r.select do |retri|
705
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
706
- # retri.args[0] == 'User' &&
707
- # retri.args[1] == 'setup_new_subscriber'
708
- # end.map(&:delete)
787
+ # See the API wiki page for usage notes and examples.
788
+ #
709
789
  class RetrySet < JobSet
710
790
  def initialize
711
791
  super "retry"
712
792
  end
713
793
 
794
+ # Enqueues all jobs pending within the retry set.
714
795
  def retry_all
715
796
  each(&:retry) while size > 0
716
797
  end
717
798
 
799
+ # Kills all jobs pending within the retry set.
718
800
  def kill_all
719
801
  each(&:kill) while size > 0
720
802
  end
721
803
  end
722
804
 
723
805
  ##
724
- # Allows enumeration of dead jobs within Sidekiq.
806
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
807
+ # their retries and are helding in this set pending some sort of manual
808
+ # fix. They will be removed after 6 months (dead_timeout) if not.
725
809
  #
726
810
  class DeadSet < JobSet
727
811
  def initialize
728
812
  super "dead"
729
813
  end
730
814
 
815
+ # Add the given job to the Dead set.
816
+ # @param message [String] the job data as JSON
731
817
  def kill(message, opts = {})
732
818
  now = Time.now.to_f
733
819
  Sidekiq.redis do |conn|
734
- conn.multi do
735
- conn.zadd(name, now.to_s, message)
736
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
737
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
820
+ conn.multi do |transaction|
821
+ transaction.zadd(name, now.to_s, message)
822
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
823
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
738
824
  end
739
825
  end
740
826
 
@@ -742,24 +828,17 @@ module Sidekiq
742
828
  job = Sidekiq.load_json(message)
743
829
  r = RuntimeError.new("Job killed by API")
744
830
  r.set_backtrace(caller)
745
- Sidekiq.death_handlers.each do |handle|
831
+ Sidekiq.default_configuration.death_handlers.each do |handle|
746
832
  handle.call(job, r)
747
833
  end
748
834
  end
749
835
  true
750
836
  end
751
837
 
838
+ # Enqueue all dead jobs
752
839
  def retry_all
753
840
  each(&:retry) while size > 0
754
841
  end
755
-
756
- def self.max_jobs
757
- Sidekiq.options[:dead_max_jobs]
758
- end
759
-
760
- def self.timeout
761
- Sidekiq.options[:dead_timeout_in_seconds]
762
- end
763
842
  end
764
843
 
765
844
  ##
@@ -767,24 +846,49 @@ module Sidekiq
767
846
  # right now. Each process sends a heartbeat to Redis every 5 seconds
768
847
  # so this set should be relatively accurate, barring network partitions.
769
848
  #
770
- # Yields a Sidekiq::Process.
849
+ # @yieldparam [Sidekiq::Process]
771
850
  #
772
851
  class ProcessSet
773
852
  include Enumerable
774
853
 
854
+ def self.[](identity)
855
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
856
+ conn.multi { |transaction|
857
+ transaction.sismember("processes", identity)
858
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
859
+ }
860
+ }
861
+
862
+ return nil if exists == 0 || info.nil?
863
+
864
+ hash = Sidekiq.load_json(info)
865
+ Process.new(hash.merge("busy" => busy.to_i,
866
+ "beat" => beat.to_f,
867
+ "quiet" => quiet,
868
+ "rss" => rss.to_i,
869
+ "rtt_us" => rtt_us.to_i))
870
+ end
871
+
872
+ # :nodoc:
873
+ # @api private
775
874
  def initialize(clean_plz = true)
776
875
  cleanup if clean_plz
777
876
  end
778
877
 
779
878
  # Cleans up dead processes recorded in Redis.
780
879
  # Returns the number of processes cleaned.
880
+ # :nodoc:
881
+ # @api private
781
882
  def cleanup
883
+ # dont run cleanup more than once per minute
884
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
885
+
782
886
  count = 0
783
887
  Sidekiq.redis do |conn|
784
- procs = conn.sscan_each("processes").to_a.sort
785
- heartbeats = conn.pipelined {
888
+ procs = conn.sscan("processes").to_a
889
+ heartbeats = conn.pipelined { |pipeline|
786
890
  procs.each do |key|
787
- conn.hget(key, "info")
891
+ pipeline.hget(key, "info")
788
892
  end
789
893
  }
790
894
 
@@ -801,19 +905,19 @@ module Sidekiq
801
905
 
802
906
  def each
803
907
  result = Sidekiq.redis { |conn|
804
- procs = conn.sscan_each("processes").to_a.sort
908
+ procs = conn.sscan("processes").to_a.sort
805
909
 
806
910
  # We're making a tradeoff here between consuming more memory instead of
807
911
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
808
912
  # you'll be happier this way
809
- conn.pipelined do
913
+ conn.pipelined do |pipeline|
810
914
  procs.each do |key|
811
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
915
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
812
916
  end
813
917
  end
814
918
  }
815
919
 
816
- result.each do |info, busy, at_s, quiet, rss, rtt|
920
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
817
921
  # If a process is stopped between when we query Redis for `procs` and
818
922
  # when we query for `result`, we will have an item in `result` that is
819
923
  # composed of `nil` values.
@@ -821,10 +925,10 @@ module Sidekiq
821
925
 
822
926
  hash = Sidekiq.load_json(info)
823
927
  yield Process.new(hash.merge("busy" => busy.to_i,
824
- "beat" => at_s.to_f,
928
+ "beat" => beat.to_f,
825
929
  "quiet" => quiet,
826
930
  "rss" => rss.to_i,
827
- "rtt_us" => rtt.to_i))
931
+ "rtt_us" => rtt_us.to_i))
828
932
  end
829
933
  end
830
934
 
@@ -832,6 +936,7 @@ module Sidekiq
832
936
  # based on current heartbeat. #each does that and ensures the set only
833
937
  # contains Sidekiq processes which have sent a heartbeat within the last
834
938
  # 60 seconds.
939
+ # @return [Integer] current number of registered Sidekiq processes
835
940
  def size
836
941
  Sidekiq.redis { |conn| conn.scard("processes") }
837
942
  end
@@ -839,10 +944,12 @@ module Sidekiq
839
944
  # Total number of threads available to execute jobs.
840
945
  # For Sidekiq Enterprise customers this number (in production) must be
841
946
  # less than or equal to your licensed concurrency.
947
+ # @return [Integer] the sum of process concurrency
842
948
  def total_concurrency
843
949
  sum { |x| x["concurrency"].to_i }
844
950
  end
845
951
 
952
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
846
953
  def total_rss_in_kb
847
954
  sum { |x| x["rss"].to_i }
848
955
  end
@@ -851,6 +958,8 @@ module Sidekiq
851
958
  # Returns the identity of the current cluster leader or "" if no leader.
852
959
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
853
960
  # or Sidekiq Pro.
961
+ # @return [String] Identity of cluster leader
962
+ # @return [String] empty string if no leader
854
963
  def leader
855
964
  @leader ||= begin
856
965
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -875,8 +984,11 @@ module Sidekiq
875
984
  # 'busy' => 10,
876
985
  # 'beat' => <last heartbeat>,
877
986
  # 'identity' => <unique string identifying the process>,
987
+ # 'embedded' => true,
878
988
  # }
879
989
  class Process
990
+ # :nodoc:
991
+ # @api private
880
992
  def initialize(hash)
881
993
  @attribs = hash
882
994
  end
@@ -886,7 +998,7 @@ module Sidekiq
886
998
  end
887
999
 
888
1000
  def labels
889
- Array(self["labels"])
1001
+ self["labels"].to_a
890
1002
  end
891
1003
 
892
1004
  def [](key)
@@ -901,18 +1013,47 @@ module Sidekiq
901
1013
  self["queues"]
902
1014
  end
903
1015
 
1016
+ def weights
1017
+ self["weights"]
1018
+ end
1019
+
1020
+ def version
1021
+ self["version"]
1022
+ end
1023
+
1024
+ def embedded?
1025
+ self["embedded"]
1026
+ end
1027
+
1028
+ # Signal this process to stop processing new jobs.
1029
+ # It will continue to execute jobs it has already fetched.
1030
+ # This method is *asynchronous* and it can take 5-10
1031
+ # seconds for the process to quiet.
904
1032
  def quiet!
1033
+ raise "Can't quiet an embedded process" if embedded?
1034
+
905
1035
  signal("TSTP")
906
1036
  end
907
1037
 
1038
+ # Signal this process to shutdown.
1039
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1040
+ # This method is *asynchronous* and it can take 5-10
1041
+ # seconds for the process to start shutting down.
908
1042
  def stop!
1043
+ raise "Can't stop an embedded process" if embedded?
1044
+
909
1045
  signal("TERM")
910
1046
  end
911
1047
 
1048
+ # Signal this process to log backtraces for all threads.
1049
+ # Useful if you have a frozen or deadlocked process which is
1050
+ # still sending a heartbeat.
1051
+ # This method is *asynchronous* and it can take 5-10 seconds.
912
1052
  def dump_threads
913
1053
  signal("TTIN")
914
1054
  end
915
1055
 
1056
+ # @return [Boolean] true if this process is quiet or shutting down
916
1057
  def stopping?
917
1058
  self["quiet"] == "true"
918
1059
  end
@@ -922,9 +1063,9 @@ module Sidekiq
922
1063
  def signal(sig)
923
1064
  key = "#{identity}-signals"
924
1065
  Sidekiq.redis do |c|
925
- c.multi do
926
- c.lpush(key, sig)
927
- c.expire(key, 60)
1066
+ c.multi do |transaction|
1067
+ transaction.lpush(key, sig)
1068
+ transaction.expire(key, 60)
928
1069
  end
929
1070
  end
930
1071
  end
@@ -955,24 +1096,24 @@ module Sidekiq
955
1096
 
956
1097
  def each(&block)
957
1098
  results = []
1099
+ procs = nil
1100
+ all_works = nil
1101
+
958
1102
  Sidekiq.redis do |conn|
959
- procs = conn.sscan_each("processes").to_a
960
- procs.sort.each do |key|
961
- valid, workers = conn.pipelined {
962
- conn.exists?(key)
963
- conn.hgetall("#{key}:workers")
964
- }
965
- next unless valid
966
- workers.each_pair do |tid, json|
967
- hsh = Sidekiq.load_json(json)
968
- p = hsh["payload"]
969
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
970
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
971
- results << [key, tid, hsh]
1103
+ procs = conn.sscan("processes").to_a.sort
1104
+ all_works = conn.pipelined do |pipeline|
1105
+ procs.each do |key|
1106
+ pipeline.hgetall("#{key}:work")
972
1107
  end
973
1108
  end
974
1109
  end
975
1110
 
1111
+ procs.zip(all_works).each do |key, workers|
1112
+ workers.each_pair do |tid, json|
1113
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1114
+ end
1115
+ end
1116
+
976
1117
  results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
977
1118
  end
978
1119
 
@@ -984,13 +1125,13 @@ module Sidekiq
984
1125
  # which can easily get out of sync with crashy processes.
985
1126
  def size
986
1127
  Sidekiq.redis do |conn|
987
- procs = conn.sscan_each("processes").to_a
1128
+ procs = conn.sscan("processes").to_a
988
1129
  if procs.empty?
989
1130
  0
990
1131
  else
991
- conn.pipelined {
1132
+ conn.pipelined { |pipeline|
992
1133
  procs.each do |key|
993
- conn.hget(key, "busy")
1134
+ pipeline.hget(key, "busy")
994
1135
  end
995
1136
  }.sum(&:to_i)
996
1137
  end