sidekiq 6.2.1 → 6.5.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (79) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +132 -1
  3. data/LICENSE +3 -3
  4. data/README.md +9 -4
  5. data/bin/sidekiq +3 -3
  6. data/bin/sidekiqload +70 -66
  7. data/bin/sidekiqmon +1 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/.DS_Store +0 -0
  13. data/lib/sidekiq/api.rb +192 -135
  14. data/lib/sidekiq/cli.rb +59 -40
  15. data/lib/sidekiq/client.rb +46 -66
  16. data/lib/sidekiq/{util.rb → component.rb} +11 -42
  17. data/lib/sidekiq/delay.rb +3 -1
  18. data/lib/sidekiq/extensions/generic_proxy.rb +4 -2
  19. data/lib/sidekiq/fetch.rb +23 -20
  20. data/lib/sidekiq/job.rb +13 -0
  21. data/lib/sidekiq/job_logger.rb +16 -28
  22. data/lib/sidekiq/job_retry.rb +37 -38
  23. data/lib/sidekiq/job_util.rb +71 -0
  24. data/lib/sidekiq/launcher.rb +67 -65
  25. data/lib/sidekiq/logger.rb +8 -18
  26. data/lib/sidekiq/manager.rb +35 -34
  27. data/lib/sidekiq/middleware/chain.rb +27 -16
  28. data/lib/sidekiq/middleware/current_attributes.rb +61 -0
  29. data/lib/sidekiq/middleware/i18n.rb +6 -4
  30. data/lib/sidekiq/middleware/modules.rb +21 -0
  31. data/lib/sidekiq/monitor.rb +1 -1
  32. data/lib/sidekiq/paginator.rb +8 -8
  33. data/lib/sidekiq/processor.rb +38 -38
  34. data/lib/sidekiq/rails.rb +22 -4
  35. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  36. data/lib/sidekiq/redis_connection.rb +85 -54
  37. data/lib/sidekiq/ring_buffer.rb +29 -0
  38. data/lib/sidekiq/scheduled.rb +60 -24
  39. data/lib/sidekiq/testing/inline.rb +4 -4
  40. data/lib/sidekiq/testing.rb +38 -39
  41. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  42. data/lib/sidekiq/version.rb +1 -1
  43. data/lib/sidekiq/web/action.rb +1 -1
  44. data/lib/sidekiq/web/application.rb +9 -6
  45. data/lib/sidekiq/web/csrf_protection.rb +2 -2
  46. data/lib/sidekiq/web/helpers.rb +14 -26
  47. data/lib/sidekiq/web.rb +6 -5
  48. data/lib/sidekiq/worker.rb +136 -13
  49. data/lib/sidekiq.rb +105 -30
  50. data/sidekiq.gemspec +1 -1
  51. data/web/assets/javascripts/application.js +113 -60
  52. data/web/assets/javascripts/dashboard.js +51 -51
  53. data/web/assets/stylesheets/application-dark.css +28 -45
  54. data/web/assets/stylesheets/application-rtl.css +0 -4
  55. data/web/assets/stylesheets/application.css +24 -237
  56. data/web/locales/ar.yml +8 -2
  57. data/web/locales/en.yml +4 -1
  58. data/web/locales/es.yml +18 -2
  59. data/web/locales/fr.yml +7 -0
  60. data/web/locales/ja.yml +3 -0
  61. data/web/locales/lt.yml +1 -1
  62. data/web/locales/pt-br.yml +27 -9
  63. data/web/views/_footer.erb +1 -1
  64. data/web/views/_job_info.erb +1 -1
  65. data/web/views/_poll_link.erb +2 -5
  66. data/web/views/_summary.erb +7 -7
  67. data/web/views/busy.erb +8 -8
  68. data/web/views/dashboard.erb +22 -14
  69. data/web/views/dead.erb +1 -1
  70. data/web/views/layout.erb +1 -1
  71. data/web/views/morgue.erb +6 -6
  72. data/web/views/queue.erb +10 -10
  73. data/web/views/queues.erb +3 -3
  74. data/web/views/retries.erb +7 -7
  75. data/web/views/retry.erb +1 -1
  76. data/web/views/scheduled.erb +1 -1
  77. metadata +17 -10
  78. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  79. data/lib/sidekiq/exception_handler.rb +0 -27
data/lib/sidekiq/api.rb CHANGED
@@ -8,7 +8,7 @@ require "base64"
8
8
  module Sidekiq
9
9
  class Stats
10
10
  def initialize
11
- fetch_stats!
11
+ fetch_stats_fast!
12
12
  end
13
13
 
14
14
  def processed
@@ -51,38 +51,20 @@ module Sidekiq
51
51
  Sidekiq::Stats::Queues.new.lengths
52
52
  end
53
53
 
54
- def fetch_stats!
54
+ # O(1) redis calls
55
+ def fetch_stats_fast!
55
56
  pipe1_res = Sidekiq.redis { |conn|
56
- conn.pipelined do
57
- conn.get("stat:processed")
58
- conn.get("stat:failed")
59
- conn.zcard("schedule")
60
- conn.zcard("retry")
61
- conn.zcard("dead")
62
- conn.scard("processes")
63
- conn.lrange("queue:default", -1, -1)
64
- end
65
- }
66
-
67
- processes = Sidekiq.redis { |conn|
68
- conn.sscan_each("processes").to_a
69
- }
70
-
71
- queues = Sidekiq.redis { |conn|
72
- conn.sscan_each("queues").to_a
73
- }
74
-
75
- pipe2_res = Sidekiq.redis { |conn|
76
- conn.pipelined do
77
- processes.each { |key| conn.hget(key, "busy") }
78
- queues.each { |queue| conn.llen("queue:#{queue}") }
57
+ conn.pipelined do |pipeline|
58
+ pipeline.get("stat:processed")
59
+ pipeline.get("stat:failed")
60
+ pipeline.zcard("schedule")
61
+ pipeline.zcard("retry")
62
+ pipeline.zcard("dead")
63
+ pipeline.scard("processes")
64
+ pipeline.lrange("queue:default", -1, -1)
79
65
  end
80
66
  }
81
67
 
82
- s = processes.size
83
- workers_size = pipe2_res[0...s].sum(&:to_i)
84
- enqueued = pipe2_res[s..-1].sum(&:to_i)
85
-
86
68
  default_queue_latency = if (entry = pipe1_res[6].first)
87
69
  job = begin
88
70
  Sidekiq.load_json(entry)
@@ -95,6 +77,7 @@ module Sidekiq
95
77
  else
96
78
  0
97
79
  end
80
+
98
81
  @stats = {
99
82
  processed: pipe1_res[0].to_i,
100
83
  failed: pipe1_res[1].to_i,
@@ -103,10 +86,39 @@ module Sidekiq
103
86
  dead_size: pipe1_res[4],
104
87
  processes_size: pipe1_res[5],
105
88
 
106
- default_queue_latency: default_queue_latency,
107
- workers_size: workers_size,
108
- enqueued: enqueued
89
+ default_queue_latency: default_queue_latency
90
+ }
91
+ end
92
+
93
+ # O(number of processes + number of queues) redis calls
94
+ def fetch_stats_slow!
95
+ processes = Sidekiq.redis { |conn|
96
+ conn.sscan_each("processes").to_a
97
+ }
98
+
99
+ queues = Sidekiq.redis { |conn|
100
+ conn.sscan_each("queues").to_a
101
+ }
102
+
103
+ pipe2_res = Sidekiq.redis { |conn|
104
+ conn.pipelined do |pipeline|
105
+ processes.each { |key| pipeline.hget(key, "busy") }
106
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
+ end
109
108
  }
109
+
110
+ s = processes.size
111
+ workers_size = pipe2_res[0...s].sum(&:to_i)
112
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
113
+
114
+ @stats[:workers_size] = workers_size
115
+ @stats[:enqueued] = enqueued
116
+ @stats
117
+ end
118
+
119
+ def fetch_stats!
120
+ fetch_stats_fast!
121
+ fetch_stats_slow!
110
122
  end
111
123
 
112
124
  def reset(*stats)
@@ -126,7 +138,8 @@ module Sidekiq
126
138
  private
127
139
 
128
140
  def stat(s)
129
- @stats[s]
141
+ fetch_stats_slow! if @stats[s].nil?
142
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
130
143
  end
131
144
 
132
145
  class Queues
@@ -134,20 +147,22 @@ module Sidekiq
134
147
  Sidekiq.redis do |conn|
135
148
  queues = conn.sscan_each("queues").to_a
136
149
 
137
- lengths = conn.pipelined {
150
+ lengths = conn.pipelined { |pipeline|
138
151
  queues.each do |queue|
139
- conn.llen("queue:#{queue}")
152
+ pipeline.llen("queue:#{queue}")
140
153
  end
141
154
  }
142
155
 
143
156
  array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
- Hash[array_of_arrays]
157
+ array_of_arrays.to_h
145
158
  end
146
159
  end
147
160
  end
148
161
 
149
162
  class History
150
163
  def initialize(days_previous, start_date = nil)
164
+ # we only store five years of data in Redis
165
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
151
166
  @days_previous = days_previous
152
167
  @start_date = start_date || Time.now.utc.to_date
153
168
  end
@@ -176,7 +191,7 @@ module Sidekiq
176
191
  stat_hash[dates[idx]] = value ? value.to_i : 0
177
192
  end
178
193
  end
179
- rescue Redis::CommandError
194
+ rescue RedisConnection.adapter::CommandError
180
195
  # mget will trigger a CROSSSLOT error when run against a Cluster
181
196
  # TODO Someone want to add Cluster support?
182
197
  end
@@ -202,24 +217,30 @@ module Sidekiq
202
217
  include Enumerable
203
218
 
204
219
  ##
205
- # Return all known queues within Redis.
220
+ # Fetch all known queues within Redis.
206
221
  #
222
+ # @return [Array<Sidekiq::Queue>]
207
223
  def self.all
208
224
  Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
209
225
  end
210
226
 
211
227
  attr_reader :name
212
228
 
229
+ # @param name [String] the name of the queue
213
230
  def initialize(name = "default")
214
231
  @name = name.to_s
215
232
  @rname = "queue:#{name}"
216
233
  end
217
234
 
235
+ # The current size of the queue within Redis.
236
+ # This value is real-time and can change between calls.
237
+ #
238
+ # @return [Integer] the size
218
239
  def size
219
240
  Sidekiq.redis { |con| con.llen(@rname) }
220
241
  end
221
242
 
222
- # Sidekiq Pro overrides this
243
+ # @return [Boolean] if the queue is currently paused
223
244
  def paused?
224
245
  false
225
246
  end
@@ -228,7 +249,7 @@ module Sidekiq
228
249
  # Calculates this queue's latency, the difference in seconds since the oldest
229
250
  # job in the queue was enqueued.
230
251
  #
231
- # @return Float
252
+ # @return [Float] in seconds
232
253
  def latency
233
254
  entry = Sidekiq.redis { |conn|
234
255
  conn.lrange(@rname, -1, -1)
@@ -255,7 +276,7 @@ module Sidekiq
255
276
  break if entries.empty?
256
277
  page += 1
257
278
  entries.each do |entry|
258
- yield Job.new(entry, @name)
279
+ yield JobRecord.new(entry, @name)
259
280
  end
260
281
  deleted_size = initial_size - size
261
282
  end
@@ -264,21 +285,30 @@ module Sidekiq
264
285
  ##
265
286
  # Find the job with the given JID within this queue.
266
287
  #
267
- # This is a slow, inefficient operation. Do not use under
268
- # normal conditions. Sidekiq Pro contains a faster version.
288
+ # This is a *slow, inefficient* operation. Do not use under
289
+ # normal conditions.
290
+ #
291
+ # @param jid [String] the job_id to look for
292
+ # @return [Sidekiq::JobRecord]
293
+ # @return [nil] if not found
269
294
  def find_job(jid)
270
295
  detect { |j| j.jid == jid }
271
296
  end
272
297
 
298
+ # delete all jobs within this queue
273
299
  def clear
274
300
  Sidekiq.redis do |conn|
275
- conn.multi do
276
- conn.unlink(@rname)
277
- conn.srem("queues", name)
301
+ conn.multi do |transaction|
302
+ transaction.unlink(@rname)
303
+ transaction.srem("queues", name)
278
304
  end
279
305
  end
280
306
  end
281
307
  alias_method :💣, :clear
308
+
309
+ def as_json(options = nil) # :nodoc:
310
+ {name: name} # 5336
311
+ end
282
312
  end
283
313
 
284
314
  ##
@@ -286,20 +316,21 @@ module Sidekiq
286
316
  # sorted set.
287
317
  #
288
318
  # The job should be considered immutable but may be
289
- # removed from the queue via Job#delete.
319
+ # removed from the queue via JobRecord#delete.
290
320
  #
291
- class Job
321
+ class JobRecord
292
322
  attr_reader :item
293
323
  attr_reader :value
324
+ attr_reader :queue
294
325
 
295
- def initialize(item, queue_name = nil)
326
+ def initialize(item, queue_name = nil) # :nodoc:
296
327
  @args = nil
297
328
  @value = item
298
329
  @item = item.is_a?(Hash) ? item : parse(item)
299
330
  @queue = queue_name || @item["queue"]
300
331
  end
301
332
 
302
- def parse(item)
333
+ def parse(item) # :nodoc:
303
334
  Sidekiq.load_json(item)
304
335
  rescue JSON::ParserError
305
336
  # If the job payload in Redis is invalid JSON, we'll load
@@ -316,48 +347,54 @@ module Sidekiq
316
347
 
317
348
  def display_class
318
349
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
319
- @klass ||= case klass
320
- when /\ASidekiq::Extensions::Delayed/
321
- safe_load(args[0], klass) do |target, method, _|
322
- "#{target}.#{method}"
323
- end
324
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
325
- job_class = @item["wrapped"] || args[0]
326
- if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
327
- # MailerClass#mailer_method
328
- args[0]["arguments"][0..1].join("#")
329
- else
330
- job_class
331
- end
332
- else
333
- klass
350
+ @klass ||= self["display_class"] || begin
351
+ case klass
352
+ when /\ASidekiq::Extensions::Delayed/
353
+ safe_load(args[0], klass) do |target, method, _|
354
+ "#{target}.#{method}"
355
+ end
356
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
357
+ job_class = @item["wrapped"] || args[0]
358
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
359
+ # MailerClass#mailer_method
360
+ args[0]["arguments"][0..1].join("#")
361
+ else
362
+ job_class
363
+ end
364
+ else
365
+ klass
366
+ end
334
367
  end
335
368
  end
336
369
 
337
370
  def display_args
338
371
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
339
372
  @display_args ||= case klass
340
- when /\ASidekiq::Extensions::Delayed/
341
- safe_load(args[0], args) do |_, _, arg|
342
- arg
343
- end
344
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
345
- job_args = self["wrapped"] ? args[0]["arguments"] : []
346
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
347
- # remove MailerClass, mailer_method and 'deliver_now'
348
- job_args.drop(3)
349
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
- # remove MailerClass, mailer_method and 'deliver_now'
351
- job_args.drop(3).first["args"]
352
- else
353
- job_args
354
- end
355
- else
356
- if self["encrypt"]
357
- # no point in showing 150+ bytes of random garbage
358
- args[-1] = "[encrypted data]"
359
- end
360
- args
373
+ when /\ASidekiq::Extensions::Delayed/
374
+ safe_load(args[0], args) do |_, _, arg, kwarg|
375
+ if !kwarg || kwarg.empty?
376
+ arg
377
+ else
378
+ [arg, kwarg]
379
+ end
380
+ end
381
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
382
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
383
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
384
+ # remove MailerClass, mailer_method and 'deliver_now'
385
+ job_args.drop(3)
386
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
387
+ # remove MailerClass, mailer_method and 'deliver_now'
388
+ job_args.drop(3).first["args"]
389
+ else
390
+ job_args
391
+ end
392
+ else
393
+ if self["encrypt"]
394
+ # no point in showing 150+ bytes of random garbage
395
+ args[-1] = "[encrypted data]"
396
+ end
397
+ args
361
398
  end
362
399
  end
363
400
 
@@ -391,15 +428,12 @@ module Sidekiq
391
428
  end
392
429
  end
393
430
 
394
- attr_reader :queue
395
-
396
431
  def latency
397
432
  now = Time.now.to_f
398
433
  now - (@item["enqueued_at"] || @item["created_at"] || now)
399
434
  end
400
435
 
401
- ##
402
- # Remove this job from the queue.
436
+ # Remove this job from the queue
403
437
  def delete
404
438
  count = Sidekiq.redis { |conn|
405
439
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -407,6 +441,7 @@ module Sidekiq
407
441
  count != 0
408
442
  end
409
443
 
444
+ # Access arbitrary attributes within the job hash
410
445
  def [](name)
411
446
  # nil will happen if the JSON fails to parse.
412
447
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -421,7 +456,8 @@ module Sidekiq
421
456
  rescue => ex
422
457
  # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
423
458
  # memory yet so the YAML can't be loaded.
424
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
459
+ # TODO is this still necessary? Zeitwerk reloader should handle?
460
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.config[:environment] == "development"
425
461
  default
426
462
  end
427
463
 
@@ -443,13 +479,15 @@ module Sidekiq
443
479
  end
444
480
  end
445
481
 
446
- class SortedEntry < Job
482
+ # Represents a job within a Redis sorted set where the score
483
+ # represents a timestamp for the job.
484
+ class SortedEntry < JobRecord
447
485
  attr_reader :score
448
486
  attr_reader :parent
449
487
 
450
- def initialize(parent, score, item)
488
+ def initialize(parent, score, item) # :nodoc:
451
489
  super(item)
452
- @score = score
490
+ @score = Float(score)
453
491
  @parent = parent
454
492
  end
455
493
 
@@ -465,12 +503,17 @@ module Sidekiq
465
503
  end
466
504
  end
467
505
 
506
+ # Change the scheduled time for this job.
507
+ #
508
+ # @param [Time] the new timestamp when this job will be enqueued.
468
509
  def reschedule(at)
469
510
  Sidekiq.redis do |conn|
470
511
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
512
  end
472
513
  end
473
514
 
515
+ # Enqueue this job from the scheduled or dead set so it will
516
+ # be executed at some point in the near future.
474
517
  def add_to_queue
475
518
  remove_job do |message|
476
519
  msg = Sidekiq.load_json(message)
@@ -478,6 +521,8 @@ module Sidekiq
478
521
  end
479
522
  end
480
523
 
524
+ # enqueue this job from the retry set so it will be executed
525
+ # at some point in the near future.
481
526
  def retry
482
527
  remove_job do |message|
483
528
  msg = Sidekiq.load_json(message)
@@ -486,8 +531,7 @@ module Sidekiq
486
531
  end
487
532
  end
488
533
 
489
- ##
490
- # Place job in the dead set
534
+ # Move this job from its current set into the Dead set.
491
535
  def kill
492
536
  remove_job do |message|
493
537
  DeadSet.new.kill(message)
@@ -502,9 +546,9 @@ module Sidekiq
502
546
 
503
547
  def remove_job
504
548
  Sidekiq.redis do |conn|
505
- results = conn.multi {
506
- conn.zrangebyscore(parent.name, score, score)
507
- conn.zremrangebyscore(parent.name, score, score)
549
+ results = conn.multi { |transaction|
550
+ transaction.zrangebyscore(parent.name, score, score)
551
+ transaction.zremrangebyscore(parent.name, score, score)
508
552
  }.first
509
553
 
510
554
  if results.size == 1
@@ -525,9 +569,9 @@ module Sidekiq
525
569
  yield msg if msg
526
570
 
527
571
  # push the rest back onto the sorted set
528
- conn.multi do
572
+ conn.multi do |transaction|
529
573
  nonmatched.each do |message|
530
- conn.zadd(parent.name, score.to_f.to_s, message)
574
+ transaction.zadd(parent.name, score.to_f.to_s, message)
531
575
  end
532
576
  end
533
577
  end
@@ -566,6 +610,10 @@ module Sidekiq
566
610
  end
567
611
  end
568
612
  alias_method :💣, :clear
613
+
614
+ def as_json(options = nil) # :nodoc:
615
+ {name: name} # 5336
616
+ end
569
617
  end
570
618
 
571
619
  class JobSet < SortedSet
@@ -585,7 +633,7 @@ module Sidekiq
585
633
  range_start = page * page_size + offset_size
586
634
  range_end = range_start + page_size - 1
587
635
  elements = Sidekiq.redis { |conn|
588
- conn.zrange name, range_start, range_end, with_scores: true
636
+ conn.zrange name, range_start, range_end, withscores: true
589
637
  }
590
638
  break if elements.empty?
591
639
  page -= 1
@@ -608,7 +656,7 @@ module Sidekiq
608
656
  end
609
657
 
610
658
  elements = Sidekiq.redis { |conn|
611
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
659
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
612
660
  }
613
661
 
614
662
  elements.each_with_object([]) do |element, result|
@@ -714,10 +762,10 @@ module Sidekiq
714
762
  def kill(message, opts = {})
715
763
  now = Time.now.to_f
716
764
  Sidekiq.redis do |conn|
717
- conn.multi do
718
- conn.zadd(name, now.to_s, message)
719
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
720
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
765
+ conn.multi do |transaction|
766
+ transaction.zadd(name, now.to_s, message)
767
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
768
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
721
769
  end
722
770
  end
723
771
 
@@ -737,11 +785,11 @@ module Sidekiq
737
785
  end
738
786
 
739
787
  def self.max_jobs
740
- Sidekiq.options[:dead_max_jobs]
788
+ Sidekiq[:dead_max_jobs]
741
789
  end
742
790
 
743
791
  def self.timeout
744
- Sidekiq.options[:dead_timeout_in_seconds]
792
+ Sidekiq[:dead_timeout_in_seconds]
745
793
  end
746
794
  end
747
795
 
@@ -765,9 +813,9 @@ module Sidekiq
765
813
  count = 0
766
814
  Sidekiq.redis do |conn|
767
815
  procs = conn.sscan_each("processes").to_a.sort
768
- heartbeats = conn.pipelined {
816
+ heartbeats = conn.pipelined { |pipeline|
769
817
  procs.each do |key|
770
- conn.hget(key, "info")
818
+ pipeline.hget(key, "info")
771
819
  end
772
820
  }
773
821
 
@@ -789,9 +837,9 @@ module Sidekiq
789
837
  # We're making a tradeoff here between consuming more memory instead of
790
838
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
791
839
  # you'll be happier this way
792
- conn.pipelined do
840
+ conn.pipelined do |pipeline|
793
841
  procs.each do |key|
794
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
842
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
795
843
  end
796
844
  end
797
845
  }
@@ -804,10 +852,10 @@ module Sidekiq
804
852
 
805
853
  hash = Sidekiq.load_json(info)
806
854
  yield Process.new(hash.merge("busy" => busy.to_i,
807
- "beat" => at_s.to_f,
808
- "quiet" => quiet,
809
- "rss" => rss.to_i,
810
- "rtt_us" => rtt.to_i))
855
+ "beat" => at_s.to_f,
856
+ "quiet" => quiet,
857
+ "rss" => rss.to_i,
858
+ "rtt_us" => rtt.to_i))
811
859
  end
812
860
  end
813
861
 
@@ -823,12 +871,13 @@ module Sidekiq
823
871
  # For Sidekiq Enterprise customers this number (in production) must be
824
872
  # less than or equal to your licensed concurrency.
825
873
  def total_concurrency
826
- sum { |x| x["concurrency"] }
874
+ sum { |x| x["concurrency"].to_i }
827
875
  end
828
876
 
829
- def total_rss
830
- sum { |x| x["rss"] || 0 }
877
+ def total_rss_in_kb
878
+ sum { |x| x["rss"].to_i }
831
879
  end
880
+ alias_method :total_rss, :total_rss_in_kb
832
881
 
833
882
  # Returns the identity of the current cluster leader or "" if no leader.
834
883
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
@@ -879,6 +928,10 @@ module Sidekiq
879
928
  self["identity"]
880
929
  end
881
930
 
931
+ def queues
932
+ self["queues"]
933
+ end
934
+
882
935
  def quiet!
883
936
  signal("TSTP")
884
937
  end
@@ -900,17 +953,17 @@ module Sidekiq
900
953
  def signal(sig)
901
954
  key = "#{identity}-signals"
902
955
  Sidekiq.redis do |c|
903
- c.multi do
904
- c.lpush(key, sig)
905
- c.expire(key, 60)
956
+ c.multi do |transaction|
957
+ transaction.lpush(key, sig)
958
+ transaction.expire(key, 60)
906
959
  end
907
960
  end
908
961
  end
909
962
  end
910
963
 
911
964
  ##
912
- # A worker is a thread that is currently processing a job.
913
- # Programmatic access to the current active worker set.
965
+ # The WorkSet stores the work being done by this Sidekiq cluster.
966
+ # It tracks the process and thread working on each job.
914
967
  #
915
968
  # WARNING WARNING WARNING
916
969
  #
@@ -918,17 +971,17 @@ module Sidekiq
918
971
  # If you call #size => 5 and then expect #each to be
919
972
  # called 5 times, you're going to have a bad time.
920
973
  #
921
- # workers = Sidekiq::Workers.new
922
- # workers.size => 2
923
- # workers.each do |process_id, thread_id, work|
974
+ # works = Sidekiq::WorkSet.new
975
+ # works.size => 2
976
+ # works.each do |process_id, thread_id, work|
924
977
  # # process_id is a unique identifier per Sidekiq process
925
978
  # # thread_id is a unique identifier per thread
926
979
  # # work is a Hash which looks like:
927
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
980
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
928
981
  # # run_at is an epoch Integer.
929
982
  # end
930
983
  #
931
- class Workers
984
+ class WorkSet
932
985
  include Enumerable
933
986
 
934
987
  def each(&block)
@@ -936,9 +989,9 @@ module Sidekiq
936
989
  Sidekiq.redis do |conn|
937
990
  procs = conn.sscan_each("processes").to_a
938
991
  procs.sort.each do |key|
939
- valid, workers = conn.pipelined {
940
- conn.exists?(key)
941
- conn.hgetall("#{key}:workers")
992
+ valid, workers = conn.pipelined { |pipeline|
993
+ pipeline.exists?(key)
994
+ pipeline.hgetall("#{key}:work")
942
995
  }
943
996
  next unless valid
944
997
  workers.each_pair do |tid, json|
@@ -966,13 +1019,17 @@ module Sidekiq
966
1019
  if procs.empty?
967
1020
  0
968
1021
  else
969
- conn.pipelined {
1022
+ conn.pipelined { |pipeline|
970
1023
  procs.each do |key|
971
- conn.hget(key, "busy")
1024
+ pipeline.hget(key, "busy")
972
1025
  end
973
1026
  }.sum(&:to_i)
974
1027
  end
975
1028
  end
976
1029
  end
977
1030
  end
1031
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1032
+ # Is "worker" a process, a type of job, a thread? Undefined!
1033
+ # WorkSet better describes the data.
1034
+ Workers = WorkSet
978
1035
  end