sidekiq 6.1.2 → 6.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +215 -2
  3. data/LICENSE +3 -3
  4. data/README.md +9 -4
  5. data/bin/sidekiq +3 -3
  6. data/bin/sidekiqload +70 -66
  7. data/bin/sidekiqmon +1 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +321 -145
  13. data/lib/sidekiq/cli.rb +73 -40
  14. data/lib/sidekiq/client.rb +48 -72
  15. data/lib/sidekiq/{util.rb → component.rb} +12 -14
  16. data/lib/sidekiq/delay.rb +3 -1
  17. data/lib/sidekiq/extensions/generic_proxy.rb +4 -2
  18. data/lib/sidekiq/fetch.rb +31 -20
  19. data/lib/sidekiq/job.rb +13 -0
  20. data/lib/sidekiq/job_logger.rb +16 -28
  21. data/lib/sidekiq/job_retry.rb +79 -59
  22. data/lib/sidekiq/job_util.rb +71 -0
  23. data/lib/sidekiq/launcher.rb +126 -65
  24. data/lib/sidekiq/logger.rb +11 -20
  25. data/lib/sidekiq/manager.rb +35 -34
  26. data/lib/sidekiq/metrics/deploy.rb +47 -0
  27. data/lib/sidekiq/metrics/query.rb +153 -0
  28. data/lib/sidekiq/metrics/shared.rb +94 -0
  29. data/lib/sidekiq/metrics/tracking.rb +134 -0
  30. data/lib/sidekiq/middleware/chain.rb +87 -41
  31. data/lib/sidekiq/middleware/current_attributes.rb +63 -0
  32. data/lib/sidekiq/middleware/i18n.rb +6 -4
  33. data/lib/sidekiq/middleware/modules.rb +21 -0
  34. data/lib/sidekiq/monitor.rb +1 -1
  35. data/lib/sidekiq/paginator.rb +8 -8
  36. data/lib/sidekiq/processor.rb +47 -41
  37. data/lib/sidekiq/rails.rb +22 -4
  38. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  39. data/lib/sidekiq/redis_connection.rb +84 -55
  40. data/lib/sidekiq/ring_buffer.rb +29 -0
  41. data/lib/sidekiq/scheduled.rb +55 -25
  42. data/lib/sidekiq/testing/inline.rb +4 -4
  43. data/lib/sidekiq/testing.rb +38 -39
  44. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  45. data/lib/sidekiq/version.rb +1 -1
  46. data/lib/sidekiq/web/action.rb +3 -3
  47. data/lib/sidekiq/web/application.rb +37 -13
  48. data/lib/sidekiq/web/csrf_protection.rb +30 -8
  49. data/lib/sidekiq/web/helpers.rb +60 -28
  50. data/lib/sidekiq/web/router.rb +4 -1
  51. data/lib/sidekiq/web.rb +38 -78
  52. data/lib/sidekiq/worker.rb +136 -13
  53. data/lib/sidekiq.rb +114 -31
  54. data/sidekiq.gemspec +12 -4
  55. data/web/assets/images/apple-touch-icon.png +0 -0
  56. data/web/assets/javascripts/application.js +113 -60
  57. data/web/assets/javascripts/chart.min.js +13 -0
  58. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  59. data/web/assets/javascripts/dashboard.js +50 -67
  60. data/web/assets/javascripts/graph.js +16 -0
  61. data/web/assets/javascripts/metrics.js +262 -0
  62. data/web/assets/stylesheets/application-dark.css +36 -36
  63. data/web/assets/stylesheets/application-rtl.css +0 -4
  64. data/web/assets/stylesheets/application.css +82 -237
  65. data/web/locales/ar.yml +8 -2
  66. data/web/locales/el.yml +43 -19
  67. data/web/locales/en.yml +11 -1
  68. data/web/locales/es.yml +18 -2
  69. data/web/locales/fr.yml +8 -1
  70. data/web/locales/ja.yml +3 -0
  71. data/web/locales/lt.yml +1 -1
  72. data/web/locales/pt-br.yml +27 -9
  73. data/web/views/_footer.erb +1 -1
  74. data/web/views/_job_info.erb +1 -1
  75. data/web/views/_nav.erb +1 -1
  76. data/web/views/_poll_link.erb +2 -5
  77. data/web/views/_summary.erb +7 -7
  78. data/web/views/busy.erb +50 -19
  79. data/web/views/dashboard.erb +23 -14
  80. data/web/views/dead.erb +1 -1
  81. data/web/views/layout.erb +2 -1
  82. data/web/views/metrics.erb +69 -0
  83. data/web/views/metrics_for_job.erb +87 -0
  84. data/web/views/morgue.erb +6 -6
  85. data/web/views/queue.erb +15 -11
  86. data/web/views/queues.erb +3 -3
  87. data/web/views/retries.erb +7 -7
  88. data/web/views/retry.erb +1 -1
  89. data/web/views/scheduled.erb +1 -1
  90. metadata +43 -36
  91. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -20
  92. data/.github/contributing.md +0 -32
  93. data/.github/workflows/ci.yml +0 -41
  94. data/.gitignore +0 -13
  95. data/.standard.yml +0 -20
  96. data/3.0-Upgrade.md +0 -70
  97. data/4.0-Upgrade.md +0 -53
  98. data/5.0-Upgrade.md +0 -56
  99. data/6.0-Upgrade.md +0 -72
  100. data/COMM-LICENSE +0 -97
  101. data/Ent-2.0-Upgrade.md +0 -37
  102. data/Ent-Changes.md +0 -281
  103. data/Gemfile +0 -24
  104. data/Gemfile.lock +0 -192
  105. data/Pro-2.0-Upgrade.md +0 -138
  106. data/Pro-3.0-Upgrade.md +0 -44
  107. data/Pro-4.0-Upgrade.md +0 -35
  108. data/Pro-5.0-Upgrade.md +0 -25
  109. data/Pro-Changes.md +0 -805
  110. data/Rakefile +0 -10
  111. data/code_of_conduct.md +0 -50
  112. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  113. data/lib/sidekiq/exception_handler.rb +0 -27
data/lib/sidekiq/api.rb CHANGED
@@ -3,12 +3,23 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ if ENV["SIDEKIQ_METRICS_BETA"]
10
+ require "sidekiq/metrics/deploy"
11
+ require "sidekiq/metrics/query"
12
+ end
13
+
8
14
  module Sidekiq
15
+ # Retrieve runtime statistics from Redis regarding
16
+ # this Sidekiq cluster.
17
+ #
18
+ # stat = Sidekiq::Stats.new
19
+ # stat.processed
9
20
  class Stats
10
21
  def initialize
11
- fetch_stats!
22
+ fetch_stats_fast!
12
23
  end
13
24
 
14
25
  def processed
@@ -51,50 +62,34 @@ module Sidekiq
51
62
  Sidekiq::Stats::Queues.new.lengths
52
63
  end
53
64
 
54
- def fetch_stats!
65
+ # O(1) redis calls
66
+ # @api private
67
+ def fetch_stats_fast!
55
68
  pipe1_res = Sidekiq.redis { |conn|
56
- conn.pipelined do
57
- conn.get("stat:processed")
58
- conn.get("stat:failed")
59
- conn.zcard("schedule")
60
- conn.zcard("retry")
61
- conn.zcard("dead")
62
- conn.scard("processes")
63
- conn.lrange("queue:default", -1, -1)
69
+ conn.pipelined do |pipeline|
70
+ pipeline.get("stat:processed")
71
+ pipeline.get("stat:failed")
72
+ pipeline.zcard("schedule")
73
+ pipeline.zcard("retry")
74
+ pipeline.zcard("dead")
75
+ pipeline.scard("processes")
76
+ pipeline.lrange("queue:default", -1, -1)
64
77
  end
65
78
  }
66
79
 
67
- processes = Sidekiq.redis { |conn|
68
- conn.sscan_each("processes").to_a
69
- }
70
-
71
- queues = Sidekiq.redis { |conn|
72
- conn.sscan_each("queues").to_a
73
- }
74
-
75
- pipe2_res = Sidekiq.redis { |conn|
76
- conn.pipelined do
77
- processes.each { |key| conn.hget(key, "busy") }
78
- queues.each { |queue| conn.llen("queue:#{queue}") }
79
- end
80
- }
81
-
82
- s = processes.size
83
- workers_size = pipe2_res[0...s].sum(&:to_i)
84
- enqueued = pipe2_res[s..-1].sum(&:to_i)
85
-
86
80
  default_queue_latency = if (entry = pipe1_res[6].first)
87
81
  job = begin
88
- Sidekiq.load_json(entry)
89
- rescue
90
- {}
91
- end
82
+ Sidekiq.load_json(entry)
83
+ rescue
84
+ {}
85
+ end
92
86
  now = Time.now.to_f
93
87
  thence = job["enqueued_at"] || now
94
88
  now - thence
95
89
  else
96
90
  0
97
91
  end
92
+
98
93
  @stats = {
99
94
  processed: pipe1_res[0].to_i,
100
95
  failed: pipe1_res[1].to_i,
@@ -103,12 +98,44 @@ module Sidekiq
103
98
  dead_size: pipe1_res[4],
104
99
  processes_size: pipe1_res[5],
105
100
 
106
- default_queue_latency: default_queue_latency,
107
- workers_size: workers_size,
108
- enqueued: enqueued
101
+ default_queue_latency: default_queue_latency
102
+ }
103
+ end
104
+
105
+ # O(number of processes + number of queues) redis calls
106
+ # @api private
107
+ def fetch_stats_slow!
108
+ processes = Sidekiq.redis { |conn|
109
+ conn.sscan_each("processes").to_a
110
+ }
111
+
112
+ queues = Sidekiq.redis { |conn|
113
+ conn.sscan_each("queues").to_a
114
+ }
115
+
116
+ pipe2_res = Sidekiq.redis { |conn|
117
+ conn.pipelined do |pipeline|
118
+ processes.each { |key| pipeline.hget(key, "busy") }
119
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
120
+ end
109
121
  }
122
+
123
+ s = processes.size
124
+ workers_size = pipe2_res[0...s].sum(&:to_i)
125
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
126
+
127
+ @stats[:workers_size] = workers_size
128
+ @stats[:enqueued] = enqueued
129
+ @stats
110
130
  end
111
131
 
132
+ # @api private
133
+ def fetch_stats!
134
+ fetch_stats_fast!
135
+ fetch_stats_slow!
136
+ end
137
+
138
+ # @api private
112
139
  def reset(*stats)
113
140
  all = %w[failed processed]
114
141
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -126,7 +153,8 @@ module Sidekiq
126
153
  private
127
154
 
128
155
  def stat(s)
129
- @stats[s]
156
+ fetch_stats_slow! if @stats[s].nil?
157
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
130
158
  end
131
159
 
132
160
  class Queues
@@ -134,20 +162,22 @@ module Sidekiq
134
162
  Sidekiq.redis do |conn|
135
163
  queues = conn.sscan_each("queues").to_a
136
164
 
137
- lengths = conn.pipelined {
165
+ lengths = conn.pipelined { |pipeline|
138
166
  queues.each do |queue|
139
- conn.llen("queue:#{queue}")
167
+ pipeline.llen("queue:#{queue}")
140
168
  end
141
169
  }
142
170
 
143
171
  array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
- Hash[array_of_arrays]
172
+ array_of_arrays.to_h
145
173
  end
146
174
  end
147
175
  end
148
176
 
149
177
  class History
150
178
  def initialize(days_previous, start_date = nil)
179
+ # we only store five years of data in Redis
180
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
151
181
  @days_previous = days_previous
152
182
  @start_date = start_date || Time.now.utc.to_date
153
183
  end
@@ -176,7 +206,7 @@ module Sidekiq
176
206
  stat_hash[dates[idx]] = value ? value.to_i : 0
177
207
  end
178
208
  end
179
- rescue Redis::CommandError
209
+ rescue RedisConnection.adapter::CommandError
180
210
  # mget will trigger a CROSSSLOT error when run against a Cluster
181
211
  # TODO Someone want to add Cluster support?
182
212
  end
@@ -187,9 +217,10 @@ module Sidekiq
187
217
  end
188
218
 
189
219
  ##
190
- # Encapsulates a queue within Sidekiq.
220
+ # Represents a queue within Sidekiq.
191
221
  # Allows enumeration of all jobs within the queue
192
- # and deletion of jobs.
222
+ # and deletion of jobs. NB: this queue data is real-time
223
+ # and is changing within Redis moment by moment.
193
224
  #
194
225
  # queue = Sidekiq::Queue.new("mailer")
195
226
  # queue.each do |job|
@@ -197,29 +228,34 @@ module Sidekiq
197
228
  # job.args # => [1, 2, 3]
198
229
  # job.delete if job.jid == 'abcdef1234567890'
199
230
  # end
200
- #
201
231
  class Queue
202
232
  include Enumerable
203
233
 
204
234
  ##
205
- # Return all known queues within Redis.
235
+ # Fetch all known queues within Redis.
206
236
  #
237
+ # @return [Array<Sidekiq::Queue>]
207
238
  def self.all
208
239
  Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
209
240
  end
210
241
 
211
242
  attr_reader :name
212
243
 
244
+ # @param name [String] the name of the queue
213
245
  def initialize(name = "default")
214
246
  @name = name.to_s
215
247
  @rname = "queue:#{name}"
216
248
  end
217
249
 
250
+ # The current size of the queue within Redis.
251
+ # This value is real-time and can change between calls.
252
+ #
253
+ # @return [Integer] the size
218
254
  def size
219
255
  Sidekiq.redis { |con| con.llen(@rname) }
220
256
  end
221
257
 
222
- # Sidekiq Pro overrides this
258
+ # @return [Boolean] if the queue is currently paused
223
259
  def paused?
224
260
  false
225
261
  end
@@ -228,7 +264,7 @@ module Sidekiq
228
264
  # Calculates this queue's latency, the difference in seconds since the oldest
229
265
  # job in the queue was enqueued.
230
266
  #
231
- # @return Float
267
+ # @return [Float] in seconds
232
268
  def latency
233
269
  entry = Sidekiq.redis { |conn|
234
270
  conn.lrange(@rname, -1, -1)
@@ -255,7 +291,7 @@ module Sidekiq
255
291
  break if entries.empty?
256
292
  page += 1
257
293
  entries.each do |entry|
258
- yield Job.new(entry, @name)
294
+ yield JobRecord.new(entry, @name)
259
295
  end
260
296
  deleted_size = initial_size - size
261
297
  end
@@ -264,34 +300,54 @@ module Sidekiq
264
300
  ##
265
301
  # Find the job with the given JID within this queue.
266
302
  #
267
- # This is a slow, inefficient operation. Do not use under
268
- # normal conditions. Sidekiq Pro contains a faster version.
303
+ # This is a *slow, inefficient* operation. Do not use under
304
+ # normal conditions.
305
+ #
306
+ # @param jid [String] the job_id to look for
307
+ # @return [Sidekiq::JobRecord]
308
+ # @return [nil] if not found
269
309
  def find_job(jid)
270
310
  detect { |j| j.jid == jid }
271
311
  end
272
312
 
313
+ # delete all jobs within this queue
314
+ # @return [Boolean] true
273
315
  def clear
274
316
  Sidekiq.redis do |conn|
275
- conn.multi do
276
- conn.unlink(@rname)
277
- conn.srem("queues", name)
317
+ conn.multi do |transaction|
318
+ transaction.unlink(@rname)
319
+ transaction.srem("queues", [name])
278
320
  end
279
321
  end
322
+ true
280
323
  end
281
324
  alias_method :💣, :clear
325
+
326
+ # :nodoc:
327
+ # @api private
328
+ def as_json(options = nil)
329
+ {name: name} # 5336
330
+ end
282
331
  end
283
332
 
284
333
  ##
285
- # Encapsulates a pending job within a Sidekiq queue or
286
- # sorted set.
334
+ # Represents a pending job within a Sidekiq queue.
287
335
  #
288
336
  # The job should be considered immutable but may be
289
- # removed from the queue via Job#delete.
290
- #
291
- class Job
337
+ # removed from the queue via JobRecord#delete.
338
+ class JobRecord
339
+ # the parsed Hash of job data
340
+ # @!attribute [r] Item
292
341
  attr_reader :item
342
+ # the underlying String in Redis
343
+ # @!attribute [r] Value
293
344
  attr_reader :value
345
+ # the queue associated with this job
346
+ # @!attribute [r] Queue
347
+ attr_reader :queue
294
348
 
349
+ # :nodoc:
350
+ # @api private
295
351
  def initialize(item, queue_name = nil)
296
352
  @args = nil
297
353
  @value = item
@@ -299,6 +355,8 @@ module Sidekiq
299
355
  @queue = queue_name || @item["queue"]
300
356
  end
301
357
 
358
+ # :nodoc:
359
+ # @api private
302
360
  def parse(item)
303
361
  Sidekiq.load_json(item)
304
362
  rescue JSON::ParserError
@@ -310,54 +368,62 @@ module Sidekiq
310
368
  {}
311
369
  end
312
370
 
371
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
372
+ # this class will be the ActiveJob adapter class rather than a specific job.
313
373
  def klass
314
374
  self["class"]
315
375
  end
316
376
 
317
377
  def display_class
318
378
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
319
- @klass ||= case klass
320
- when /\ASidekiq::Extensions::Delayed/
321
- safe_load(args[0], klass) do |target, method, _|
322
- "#{target}.#{method}"
323
- end
324
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
325
- job_class = @item["wrapped"] || args[0]
326
- if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
327
- # MailerClass#mailer_method
328
- args[0]["arguments"][0..1].join("#")
329
- else
330
- job_class
331
- end
332
- else
333
- klass
379
+ @klass ||= self["display_class"] || begin
380
+ case klass
381
+ when /\ASidekiq::Extensions::Delayed/
382
+ safe_load(args[0], klass) do |target, method, _|
383
+ "#{target}.#{method}"
384
+ end
385
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
386
+ job_class = @item["wrapped"] || args[0]
387
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
388
+ # MailerClass#mailer_method
389
+ args[0]["arguments"][0..1].join("#")
390
+ else
391
+ job_class
392
+ end
393
+ else
394
+ klass
395
+ end
334
396
  end
335
397
  end
336
398
 
337
399
  def display_args
338
400
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
339
401
  @display_args ||= case klass
340
- when /\ASidekiq::Extensions::Delayed/
341
- safe_load(args[0], args) do |_, _, arg|
342
- arg
343
- end
344
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
345
- job_args = self["wrapped"] ? args[0]["arguments"] : []
346
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
347
- # remove MailerClass, mailer_method and 'deliver_now'
348
- job_args.drop(3)
349
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
- # remove MailerClass, mailer_method and 'deliver_now'
351
- job_args.drop(3).first["args"]
352
- else
353
- job_args
354
- end
355
- else
356
- if self["encrypt"]
357
- # no point in showing 150+ bytes of random garbage
358
- args[-1] = "[encrypted data]"
359
- end
360
- args
402
+ when /\ASidekiq::Extensions::Delayed/
403
+ safe_load(args[0], args) do |_, _, arg, kwarg|
404
+ if !kwarg || kwarg.empty?
405
+ arg
406
+ else
407
+ [arg, kwarg]
408
+ end
409
+ end
410
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
411
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
412
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
413
+ # remove MailerClass, mailer_method and 'deliver_now'
414
+ job_args.drop(3)
415
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
416
+ # remove MailerClass, mailer_method and 'deliver_now'
417
+ job_args.drop(3).first["args"]
418
+ else
419
+ job_args
420
+ end
421
+ else
422
+ if self["encrypt"]
423
+ # no point in showing 150+ bytes of random garbage
424
+ args[-1] = "[encrypted data]"
425
+ end
426
+ args
361
427
  end
362
428
  end
363
429
 
@@ -391,15 +457,12 @@ module Sidekiq
391
457
  end
392
458
  end
393
459
 
394
- attr_reader :queue
395
-
396
460
  def latency
397
461
  now = Time.now.to_f
398
462
  now - (@item["enqueued_at"] || @item["created_at"] || now)
399
463
  end
400
464
 
401
- ##
402
- # Remove this job from the queue.
465
+ # Remove this job from the queue
403
466
  def delete
404
467
  count = Sidekiq.redis { |conn|
405
468
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -407,6 +470,7 @@ module Sidekiq
407
470
  count != 0
408
471
  end
409
472
 
473
+ # Access arbitrary attributes within the job hash
410
474
  def [](name)
411
475
  # nil will happen if the JSON fails to parse.
412
476
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -421,6 +485,7 @@ module Sidekiq
421
485
  rescue => ex
422
486
  # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
423
487
  # memory yet so the YAML can't be loaded.
488
+ # TODO is this still necessary? Zeitwerk reloader should handle?
424
489
  Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
425
490
  default
426
491
  end
@@ -443,20 +508,28 @@ module Sidekiq
443
508
  end
444
509
  end
445
510
 
446
- class SortedEntry < Job
511
+ # Represents a job within a Redis sorted set where the score
512
+ # represents a timestamp associated with the job. This timestamp
513
+ # could be the scheduled time for it to run (e.g. scheduled set),
514
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
515
+ class SortedEntry < JobRecord
447
516
  attr_reader :score
448
517
  attr_reader :parent
449
518
 
519
+ # :nodoc:
520
+ # @api private
450
521
  def initialize(parent, score, item)
451
522
  super(item)
452
- @score = score
523
+ @score = Float(score)
453
524
  @parent = parent
454
525
  end
455
526
 
527
+ # The timestamp associated with this entry
456
528
  def at
457
529
  Time.at(score).utc
458
530
  end
459
531
 
532
+ # remove this entry from the sorted set
460
533
  def delete
461
534
  if @value
462
535
  @parent.delete_by_value(@parent.name, @value)
@@ -465,12 +538,17 @@ module Sidekiq
465
538
  end
466
539
  end
467
540
 
541
+ # Change the scheduled time for this job.
542
+ #
543
+ # @param at [Time] the new timestamp for this job
468
544
  def reschedule(at)
469
545
  Sidekiq.redis do |conn|
470
546
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
547
  end
472
548
  end
473
549
 
550
+ # Enqueue this job from the scheduled or dead set so it will
551
+ # be executed at some point in the near future.
474
552
  def add_to_queue
475
553
  remove_job do |message|
476
554
  msg = Sidekiq.load_json(message)
@@ -478,6 +556,8 @@ module Sidekiq
478
556
  end
479
557
  end
480
558
 
559
+ # enqueue this job from the retry set so it will be executed
560
+ # at some point in the near future.
481
561
  def retry
482
562
  remove_job do |message|
483
563
  msg = Sidekiq.load_json(message)
@@ -486,8 +566,7 @@ module Sidekiq
486
566
  end
487
567
  end
488
568
 
489
- ##
490
- # Place job in the dead set
569
+ # Move this job from its current set into the Dead set.
491
570
  def kill
492
571
  remove_job do |message|
493
572
  DeadSet.new.kill(message)
@@ -502,9 +581,9 @@ module Sidekiq
502
581
 
503
582
  def remove_job
504
583
  Sidekiq.redis do |conn|
505
- results = conn.multi {
506
- conn.zrangebyscore(parent.name, score, score)
507
- conn.zremrangebyscore(parent.name, score, score)
584
+ results = conn.multi { |transaction|
585
+ transaction.zrangebyscore(parent.name, score, score)
586
+ transaction.zremrangebyscore(parent.name, score, score)
508
587
  }.first
509
588
 
510
589
  if results.size == 1
@@ -525,9 +604,9 @@ module Sidekiq
525
604
  yield msg if msg
526
605
 
527
606
  # push the rest back onto the sorted set
528
- conn.multi do
607
+ conn.multi do |transaction|
529
608
  nonmatched.each do |message|
530
- conn.zadd(parent.name, score.to_f.to_s, message)
609
+ transaction.zadd(parent.name, score.to_f.to_s, message)
531
610
  end
532
611
  end
533
612
  end
@@ -535,20 +614,32 @@ module Sidekiq
535
614
  end
536
615
  end
537
616
 
617
+ # Base class for all sorted sets within Sidekiq.
538
618
  class SortedSet
539
619
  include Enumerable
540
620
 
621
+ # Redis key of the set
622
+ # @!attribute [r] Name
541
623
  attr_reader :name
542
624
 
625
+ # :nodoc:
626
+ # @api private
543
627
  def initialize(name)
544
628
  @name = name
545
629
  @_size = size
546
630
  end
547
631
 
632
+ # real-time size of the set, will change
548
633
  def size
549
634
  Sidekiq.redis { |c| c.zcard(name) }
550
635
  end
551
636
 
637
+ # Scan through each element of the sorted set, yielding each to the supplied block.
638
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
639
+ #
640
+ # @param match [String] a snippet or regexp to filter matches.
641
+ # @param count [Integer] number of elements to retrieve at a time, default 100
642
+ # @yieldparam [Sidekiq::SortedEntry] each entry
552
643
  def scan(match, count = 100)
553
644
  return to_enum(:scan, match, count) unless block_given?
554
645
 
@@ -560,18 +651,32 @@ module Sidekiq
560
651
  end
561
652
  end
562
653
 
654
+ # @return [Boolean] always true
563
655
  def clear
564
656
  Sidekiq.redis do |conn|
565
657
  conn.unlink(name)
566
658
  end
659
+ true
567
660
  end
568
661
  alias_method :💣, :clear
662
+
663
+ # :nodoc:
664
+ # @api private
665
+ def as_json(options = nil)
666
+ {name: name} # 5336
667
+ end
569
668
  end
570
669
 
670
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
671
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
672
+ # e.g. Batches.
571
673
  class JobSet < SortedSet
572
- def schedule(timestamp, message)
674
+ # Add a job with the associated timestamp to this set.
675
+ # @param timestamp [Time] the score for the job
676
+ # @param job [Hash] the job data
677
+ def schedule(timestamp, job)
573
678
  Sidekiq.redis do |conn|
574
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
679
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
575
680
  end
576
681
  end
577
682
 
@@ -585,7 +690,7 @@ module Sidekiq
585
690
  range_start = page * page_size + offset_size
586
691
  range_end = range_start + page_size - 1
587
692
  elements = Sidekiq.redis { |conn|
588
- conn.zrange name, range_start, range_end, with_scores: true
693
+ conn.zrange name, range_start, range_end, withscores: true
589
694
  }
590
695
  break if elements.empty?
591
696
  page -= 1
@@ -599,6 +704,10 @@ module Sidekiq
599
704
  ##
600
705
  # Fetch jobs that match a given time or Range. Job ID is an
601
706
  # optional second argument.
707
+ #
708
+ # @param score [Time,Range] a specific timestamp or range
709
+ # @param jid [String, optional] find a specific JID within the score
710
+ # @return [Array<SortedEntry>] any results found, can be empty
602
711
  def fetch(score, jid = nil)
603
712
  begin_score, end_score =
604
713
  if score.is_a?(Range)
@@ -608,7 +717,7 @@ module Sidekiq
608
717
  end
609
718
 
610
719
  elements = Sidekiq.redis { |conn|
611
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
720
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
612
721
  }
613
722
 
614
723
  elements.each_with_object([]) do |element, result|
@@ -620,7 +729,10 @@ module Sidekiq
620
729
 
621
730
  ##
622
731
  # Find the job with the given JID within this sorted set.
623
- # This is a slower O(n) operation. Do not use for app logic.
732
+ # *This is a slow O(n) operation*. Do not use for app logic.
733
+ #
734
+ # @param jid [String] the job identifier
735
+ # @return [SortedEntry] the record or nil
624
736
  def find_job(jid)
625
737
  Sidekiq.redis do |conn|
626
738
  conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
@@ -632,6 +744,8 @@ module Sidekiq
632
744
  nil
633
745
  end
634
746
 
747
+ # :nodoc:
748
+ # @api private
635
749
  def delete_by_value(name, value)
636
750
  Sidekiq.redis do |conn|
637
751
  ret = conn.zrem(name, value)
@@ -640,6 +754,8 @@ module Sidekiq
640
754
  end
641
755
  end
642
756
 
757
+ # :nodoc:
758
+ # @api private
643
759
  def delete_by_jid(score, jid)
644
760
  Sidekiq.redis do |conn|
645
761
  elements = conn.zrangebyscore(name, score, score)
@@ -660,10 +776,10 @@ module Sidekiq
660
776
  end
661
777
 
662
778
  ##
663
- # Allows enumeration of scheduled jobs within Sidekiq.
779
+ # The set of scheduled jobs within Sidekiq.
664
780
  # Based on this, you can search/filter for jobs. Here's an
665
- # example where I'm selecting all jobs of a certain type
666
- # and deleting them from the schedule queue.
781
+ # example where I'm selecting jobs based on some complex logic
782
+ # and deleting them from the scheduled set.
667
783
  #
668
784
  # r = Sidekiq::ScheduledSet.new
669
785
  # r.select do |scheduled|
@@ -678,7 +794,7 @@ module Sidekiq
678
794
  end
679
795
 
680
796
  ##
681
- # Allows enumeration of retries within Sidekiq.
797
+ # The set of retries within Sidekiq.
682
798
  # Based on this, you can search/filter for jobs. Here's an
683
799
  # example where I'm selecting all jobs of a certain type
684
800
  # and deleting them from the retry queue.
@@ -694,30 +810,36 @@ module Sidekiq
694
810
  super "retry"
695
811
  end
696
812
 
813
+ # Enqueues all jobs pending within the retry set.
697
814
  def retry_all
698
815
  each(&:retry) while size > 0
699
816
  end
700
817
 
818
+ # Kills all jobs pending within the retry set.
701
819
  def kill_all
702
820
  each(&:kill) while size > 0
703
821
  end
704
822
  end
705
823
 
706
824
  ##
707
- # Allows enumeration of dead jobs within Sidekiq.
825
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
826
+ # their retries and are helding in this set pending some sort of manual
827
+ # fix. They will be removed after 6 months (dead_timeout) if not.
708
828
  #
709
829
  class DeadSet < JobSet
710
830
  def initialize
711
831
  super "dead"
712
832
  end
713
833
 
834
+ # Add the given job to the Dead set.
835
+ # @param message [String] the job data as JSON
714
836
  def kill(message, opts = {})
715
837
  now = Time.now.to_f
716
838
  Sidekiq.redis do |conn|
717
- conn.multi do
718
- conn.zadd(name, now.to_s, message)
719
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
720
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
839
+ conn.multi do |transaction|
840
+ transaction.zadd(name, now.to_s, message)
841
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
842
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
721
843
  end
722
844
  end
723
845
 
@@ -732,16 +854,21 @@ module Sidekiq
732
854
  true
733
855
  end
734
856
 
857
+ # Enqueue all dead jobs
735
858
  def retry_all
736
859
  each(&:retry) while size > 0
737
860
  end
738
861
 
862
+ # The maximum size of the Dead set. Older entries will be trimmed
863
+ # to stay within this limit. Default value is 10,000.
739
864
  def self.max_jobs
740
- Sidekiq.options[:dead_max_jobs]
865
+ Sidekiq[:dead_max_jobs]
741
866
  end
742
867
 
868
+ # The time limit for entries within the Dead set. Older entries will be thrown away.
869
+ # Default value is six months.
743
870
  def self.timeout
744
- Sidekiq.options[:dead_timeout_in_seconds]
871
+ Sidekiq[:dead_timeout_in_seconds]
745
872
  end
746
873
  end
747
874
 
@@ -750,24 +877,29 @@ module Sidekiq
750
877
  # right now. Each process sends a heartbeat to Redis every 5 seconds
751
878
  # so this set should be relatively accurate, barring network partitions.
752
879
  #
753
- # Yields a Sidekiq::Process.
880
+ # @yieldparam [Sidekiq::Process]
754
881
  #
755
882
  class ProcessSet
756
883
  include Enumerable
757
884
 
885
+ # :nodoc:
886
+ # @api private
758
887
  def initialize(clean_plz = true)
759
888
  cleanup if clean_plz
760
889
  end
761
890
 
762
891
  # Cleans up dead processes recorded in Redis.
763
892
  # Returns the number of processes cleaned.
893
+ # :nodoc:
894
+ # @api private
764
895
  def cleanup
896
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
765
897
  count = 0
766
898
  Sidekiq.redis do |conn|
767
899
  procs = conn.sscan_each("processes").to_a.sort
768
- heartbeats = conn.pipelined {
900
+ heartbeats = conn.pipelined { |pipeline|
769
901
  procs.each do |key|
770
- conn.hget(key, "info")
902
+ pipeline.hget(key, "info")
771
903
  end
772
904
  }
773
905
 
@@ -789,21 +921,25 @@ module Sidekiq
789
921
  # We're making a tradeoff here between consuming more memory instead of
790
922
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
791
923
  # you'll be happier this way
792
- conn.pipelined do
924
+ conn.pipelined do |pipeline|
793
925
  procs.each do |key|
794
- conn.hmget(key, "info", "busy", "beat", "quiet")
926
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
795
927
  end
796
928
  end
797
929
  }
798
930
 
799
- result.each do |info, busy, at_s, quiet|
931
+ result.each do |info, busy, at_s, quiet, rss, rtt|
800
932
  # If a process is stopped between when we query Redis for `procs` and
801
933
  # when we query for `result`, we will have an item in `result` that is
802
934
  # composed of `nil` values.
803
935
  next if info.nil?
804
936
 
805
937
  hash = Sidekiq.load_json(info)
806
- yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
938
+ yield Process.new(hash.merge("busy" => busy.to_i,
939
+ "beat" => at_s.to_f,
940
+ "quiet" => quiet,
941
+ "rss" => rss.to_i,
942
+ "rtt_us" => rtt.to_i))
807
943
  end
808
944
  end
809
945
 
@@ -811,13 +947,30 @@ module Sidekiq
811
947
  # based on current heartbeat. #each does that and ensures the set only
812
948
  # contains Sidekiq processes which have sent a heartbeat within the last
813
949
  # 60 seconds.
950
+ # @return [Integer] current number of registered Sidekiq processes
814
951
  def size
815
952
  Sidekiq.redis { |conn| conn.scard("processes") }
816
953
  end
817
954
 
955
+ # Total number of threads available to execute jobs.
956
+ # For Sidekiq Enterprise customers this number (in production) must be
957
+ # less than or equal to your licensed concurrency.
958
+ # @return [Integer] the sum of process concurrency
959
+ def total_concurrency
960
+ sum { |x| x["concurrency"].to_i }
961
+ end
962
+
963
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
964
+ def total_rss_in_kb
965
+ sum { |x| x["rss"].to_i }
966
+ end
967
+ alias_method :total_rss, :total_rss_in_kb
968
+
818
969
  # Returns the identity of the current cluster leader or "" if no leader.
819
970
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
820
971
  # or Sidekiq Pro.
972
+ # @return [String] Identity of cluster leader
973
+ # @return [String] empty string if no leader
821
974
  def leader
822
975
  @leader ||= begin
823
976
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -844,6 +997,8 @@ module Sidekiq
844
997
  # 'identity' => <unique string identifying the process>,
845
998
  # }
846
999
  class Process
1000
+ # :nodoc:
1001
+ # @api private
847
1002
  def initialize(hash)
848
1003
  @attribs = hash
849
1004
  end
@@ -864,18 +1019,35 @@ module Sidekiq
864
1019
  self["identity"]
865
1020
  end
866
1021
 
1022
+ def queues
1023
+ self["queues"]
1024
+ end
1025
+
1026
+ # Signal this process to stop processing new jobs.
1027
+ # It will continue to execute jobs it has already fetched.
1028
+ # This method is *asynchronous* and it can take 5-10
1029
+ # seconds for the process to quiet.
867
1030
  def quiet!
868
1031
  signal("TSTP")
869
1032
  end
870
1033
 
1034
+ # Signal this process to shutdown.
1035
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1036
+ # This method is *asynchronous* and it can take 5-10
1037
+ # seconds for the process to start shutting down.
871
1038
  def stop!
872
1039
  signal("TERM")
873
1040
  end
874
1041
 
1042
+ # Signal this process to log backtraces for all threads.
1043
+ # Useful if you have a frozen or deadlocked process which is
1044
+ # still sending a heartbeat.
1045
+ # This method is *asynchronous* and it can take 5-10 seconds.
875
1046
  def dump_threads
876
1047
  signal("TTIN")
877
1048
  end
878
1049
 
1050
+ # @return [Boolean] true if this process is quiet or shutting down
879
1051
  def stopping?
880
1052
  self["quiet"] == "true"
881
1053
  end
@@ -885,17 +1057,17 @@ module Sidekiq
885
1057
  def signal(sig)
886
1058
  key = "#{identity}-signals"
887
1059
  Sidekiq.redis do |c|
888
- c.multi do
889
- c.lpush(key, sig)
890
- c.expire(key, 60)
1060
+ c.multi do |transaction|
1061
+ transaction.lpush(key, sig)
1062
+ transaction.expire(key, 60)
891
1063
  end
892
1064
  end
893
1065
  end
894
1066
  end
895
1067
 
896
1068
  ##
897
- # A worker is a thread that is currently processing a job.
898
- # Programmatic access to the current active worker set.
1069
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1070
+ # It tracks the process and thread working on each job.
899
1071
  #
900
1072
  # WARNING WARNING WARNING
901
1073
  #
@@ -903,17 +1075,17 @@ module Sidekiq
903
1075
  # If you call #size => 5 and then expect #each to be
904
1076
  # called 5 times, you're going to have a bad time.
905
1077
  #
906
- # workers = Sidekiq::Workers.new
907
- # workers.size => 2
908
- # workers.each do |process_id, thread_id, work|
1078
+ # works = Sidekiq::WorkSet.new
1079
+ # works.size => 2
1080
+ # works.each do |process_id, thread_id, work|
909
1081
  # # process_id is a unique identifier per Sidekiq process
910
1082
  # # thread_id is a unique identifier per thread
911
1083
  # # work is a Hash which looks like:
912
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
1084
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
913
1085
  # # run_at is an epoch Integer.
914
1086
  # end
915
1087
  #
916
- class Workers
1088
+ class WorkSet
917
1089
  include Enumerable
918
1090
 
919
1091
  def each(&block)
@@ -921,9 +1093,9 @@ module Sidekiq
921
1093
  Sidekiq.redis do |conn|
922
1094
  procs = conn.sscan_each("processes").to_a
923
1095
  procs.sort.each do |key|
924
- valid, workers = conn.pipelined {
925
- conn.exists?(key)
926
- conn.hgetall("#{key}:workers")
1096
+ valid, workers = conn.pipelined { |pipeline|
1097
+ pipeline.exists?(key)
1098
+ pipeline.hgetall("#{key}:work")
927
1099
  }
928
1100
  next unless valid
929
1101
  workers.each_pair do |tid, json|
@@ -951,13 +1123,17 @@ module Sidekiq
951
1123
  if procs.empty?
952
1124
  0
953
1125
  else
954
- conn.pipelined {
1126
+ conn.pipelined { |pipeline|
955
1127
  procs.each do |key|
956
- conn.hget(key, "busy")
1128
+ pipeline.hget(key, "busy")
957
1129
  end
958
1130
  }.sum(&:to_i)
959
1131
  end
960
1132
  end
961
1133
  end
962
1134
  end
1135
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1136
+ # Is "worker" a process, a type of job, a thread? Undefined!
1137
+ # WorkSet better describes the data.
1138
+ Workers = WorkSet
963
1139
  end