sidekiq 6.0.7 → 6.5.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (108) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +213 -2
  3. data/LICENSE +3 -3
  4. data/README.md +11 -10
  5. data/bin/sidekiq +8 -3
  6. data/bin/sidekiqload +70 -66
  7. data/bin/sidekiqmon +1 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/.DS_Store +0 -0
  13. data/lib/sidekiq/api.rb +211 -136
  14. data/lib/sidekiq/cli.rb +81 -46
  15. data/lib/sidekiq/client.rb +52 -71
  16. data/lib/sidekiq/{util.rb → component.rb} +11 -14
  17. data/lib/sidekiq/delay.rb +3 -1
  18. data/lib/sidekiq/extensions/action_mailer.rb +3 -2
  19. data/lib/sidekiq/extensions/active_record.rb +4 -3
  20. data/lib/sidekiq/extensions/class_methods.rb +5 -4
  21. data/lib/sidekiq/extensions/generic_proxy.rb +4 -2
  22. data/lib/sidekiq/fetch.rb +42 -31
  23. data/lib/sidekiq/job.rb +13 -0
  24. data/lib/sidekiq/job_logger.rb +16 -28
  25. data/lib/sidekiq/job_retry.rb +38 -38
  26. data/lib/sidekiq/job_util.rb +71 -0
  27. data/lib/sidekiq/launcher.rb +123 -63
  28. data/lib/sidekiq/logger.rb +11 -20
  29. data/lib/sidekiq/manager.rb +35 -34
  30. data/lib/sidekiq/middleware/chain.rb +28 -17
  31. data/lib/sidekiq/middleware/current_attributes.rb +61 -0
  32. data/lib/sidekiq/middleware/i18n.rb +6 -4
  33. data/lib/sidekiq/middleware/modules.rb +21 -0
  34. data/lib/sidekiq/monitor.rb +1 -1
  35. data/lib/sidekiq/paginator.rb +8 -8
  36. data/lib/sidekiq/processor.rb +41 -41
  37. data/lib/sidekiq/rails.rb +38 -22
  38. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  39. data/lib/sidekiq/redis_connection.rb +87 -53
  40. data/lib/sidekiq/ring_buffer.rb +29 -0
  41. data/lib/sidekiq/scheduled.rb +60 -24
  42. data/lib/sidekiq/sd_notify.rb +1 -1
  43. data/lib/sidekiq/testing/inline.rb +4 -4
  44. data/lib/sidekiq/testing.rb +39 -40
  45. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  46. data/lib/sidekiq/version.rb +1 -1
  47. data/lib/sidekiq/web/action.rb +2 -2
  48. data/lib/sidekiq/web/application.rb +21 -12
  49. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  50. data/lib/sidekiq/web/helpers.rb +40 -34
  51. data/lib/sidekiq/web/router.rb +5 -2
  52. data/lib/sidekiq/web.rb +36 -72
  53. data/lib/sidekiq/worker.rb +136 -16
  54. data/lib/sidekiq.rb +107 -30
  55. data/sidekiq.gemspec +11 -4
  56. data/web/assets/images/apple-touch-icon.png +0 -0
  57. data/web/assets/javascripts/application.js +113 -65
  58. data/web/assets/javascripts/dashboard.js +51 -51
  59. data/web/assets/stylesheets/application-dark.css +64 -43
  60. data/web/assets/stylesheets/application-rtl.css +0 -4
  61. data/web/assets/stylesheets/application.css +42 -239
  62. data/web/locales/ar.yml +8 -2
  63. data/web/locales/en.yml +4 -1
  64. data/web/locales/es.yml +18 -2
  65. data/web/locales/fr.yml +8 -1
  66. data/web/locales/ja.yml +3 -0
  67. data/web/locales/lt.yml +1 -1
  68. data/web/locales/pl.yml +4 -4
  69. data/web/locales/pt-br.yml +27 -9
  70. data/web/locales/ru.yml +4 -0
  71. data/web/views/_footer.erb +1 -1
  72. data/web/views/_job_info.erb +1 -1
  73. data/web/views/_poll_link.erb +2 -5
  74. data/web/views/_summary.erb +7 -7
  75. data/web/views/busy.erb +51 -20
  76. data/web/views/dashboard.erb +22 -14
  77. data/web/views/dead.erb +1 -1
  78. data/web/views/layout.erb +2 -1
  79. data/web/views/morgue.erb +6 -6
  80. data/web/views/queue.erb +11 -11
  81. data/web/views/queues.erb +4 -4
  82. data/web/views/retries.erb +7 -7
  83. data/web/views/retry.erb +1 -1
  84. data/web/views/scheduled.erb +1 -1
  85. metadata +30 -51
  86. data/.circleci/config.yml +0 -60
  87. data/.github/contributing.md +0 -32
  88. data/.github/issue_template.md +0 -11
  89. data/.gitignore +0 -13
  90. data/.standard.yml +0 -20
  91. data/3.0-Upgrade.md +0 -70
  92. data/4.0-Upgrade.md +0 -53
  93. data/5.0-Upgrade.md +0 -56
  94. data/6.0-Upgrade.md +0 -72
  95. data/COMM-LICENSE +0 -97
  96. data/Ent-2.0-Upgrade.md +0 -37
  97. data/Ent-Changes.md +0 -256
  98. data/Gemfile +0 -24
  99. data/Gemfile.lock +0 -208
  100. data/Pro-2.0-Upgrade.md +0 -138
  101. data/Pro-3.0-Upgrade.md +0 -44
  102. data/Pro-4.0-Upgrade.md +0 -35
  103. data/Pro-5.0-Upgrade.md +0 -25
  104. data/Pro-Changes.md +0 -782
  105. data/Rakefile +0 -10
  106. data/code_of_conduct.md +0 -50
  107. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  108. data/lib/sidekiq/exception_handler.rb +0 -27
data/lib/sidekiq/api.rb CHANGED
@@ -8,7 +8,7 @@ require "base64"
8
8
  module Sidekiq
9
9
  class Stats
10
10
  def initialize
11
- fetch_stats!
11
+ fetch_stats_fast!
12
12
  end
13
13
 
14
14
  def processed
@@ -51,50 +51,33 @@ module Sidekiq
51
51
  Sidekiq::Stats::Queues.new.lengths
52
52
  end
53
53
 
54
- def fetch_stats!
54
+ # O(1) redis calls
55
+ def fetch_stats_fast!
55
56
  pipe1_res = Sidekiq.redis { |conn|
56
- conn.pipelined do
57
- conn.get("stat:processed")
58
- conn.get("stat:failed")
59
- conn.zcard("schedule")
60
- conn.zcard("retry")
61
- conn.zcard("dead")
62
- conn.scard("processes")
63
- conn.lrange("queue:default", -1, -1)
64
- end
65
- }
66
-
67
- processes = Sidekiq.redis { |conn|
68
- conn.sscan_each("processes").to_a
69
- }
70
-
71
- queues = Sidekiq.redis { |conn|
72
- conn.sscan_each("queues").to_a
73
- }
74
-
75
- pipe2_res = Sidekiq.redis { |conn|
76
- conn.pipelined do
77
- processes.each { |key| conn.hget(key, "busy") }
78
- queues.each { |queue| conn.llen("queue:#{queue}") }
57
+ conn.pipelined do |pipeline|
58
+ pipeline.get("stat:processed")
59
+ pipeline.get("stat:failed")
60
+ pipeline.zcard("schedule")
61
+ pipeline.zcard("retry")
62
+ pipeline.zcard("dead")
63
+ pipeline.scard("processes")
64
+ pipeline.lrange("queue:default", -1, -1)
79
65
  end
80
66
  }
81
67
 
82
- s = processes.size
83
- workers_size = pipe2_res[0...s].sum(&:to_i)
84
- enqueued = pipe2_res[s..-1].sum(&:to_i)
85
-
86
68
  default_queue_latency = if (entry = pipe1_res[6].first)
87
69
  job = begin
88
- Sidekiq.load_json(entry)
89
- rescue
90
- {}
91
- end
70
+ Sidekiq.load_json(entry)
71
+ rescue
72
+ {}
73
+ end
92
74
  now = Time.now.to_f
93
75
  thence = job["enqueued_at"] || now
94
76
  now - thence
95
77
  else
96
78
  0
97
79
  end
80
+
98
81
  @stats = {
99
82
  processed: pipe1_res[0].to_i,
100
83
  failed: pipe1_res[1].to_i,
@@ -103,10 +86,39 @@ module Sidekiq
103
86
  dead_size: pipe1_res[4],
104
87
  processes_size: pipe1_res[5],
105
88
 
106
- default_queue_latency: default_queue_latency,
107
- workers_size: workers_size,
108
- enqueued: enqueued
89
+ default_queue_latency: default_queue_latency
90
+ }
91
+ end
92
+
93
+ # O(number of processes + number of queues) redis calls
94
+ def fetch_stats_slow!
95
+ processes = Sidekiq.redis { |conn|
96
+ conn.sscan_each("processes").to_a
97
+ }
98
+
99
+ queues = Sidekiq.redis { |conn|
100
+ conn.sscan_each("queues").to_a
109
101
  }
102
+
103
+ pipe2_res = Sidekiq.redis { |conn|
104
+ conn.pipelined do |pipeline|
105
+ processes.each { |key| pipeline.hget(key, "busy") }
106
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
+ end
108
+ }
109
+
110
+ s = processes.size
111
+ workers_size = pipe2_res[0...s].sum(&:to_i)
112
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
113
+
114
+ @stats[:workers_size] = workers_size
115
+ @stats[:enqueued] = enqueued
116
+ @stats
117
+ end
118
+
119
+ def fetch_stats!
120
+ fetch_stats_fast!
121
+ fetch_stats_slow!
110
122
  end
111
123
 
112
124
  def reset(*stats)
@@ -126,7 +138,8 @@ module Sidekiq
126
138
  private
127
139
 
128
140
  def stat(s)
129
- @stats[s]
141
+ fetch_stats_slow! if @stats[s].nil?
142
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
130
143
  end
131
144
 
132
145
  class Queues
@@ -134,20 +147,22 @@ module Sidekiq
134
147
  Sidekiq.redis do |conn|
135
148
  queues = conn.sscan_each("queues").to_a
136
149
 
137
- lengths = conn.pipelined {
150
+ lengths = conn.pipelined { |pipeline|
138
151
  queues.each do |queue|
139
- conn.llen("queue:#{queue}")
152
+ pipeline.llen("queue:#{queue}")
140
153
  end
141
154
  }
142
155
 
143
156
  array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
- Hash[array_of_arrays]
157
+ array_of_arrays.to_h
145
158
  end
146
159
  end
147
160
  end
148
161
 
149
162
  class History
150
163
  def initialize(days_previous, start_date = nil)
164
+ # we only store five years of data in Redis
165
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
151
166
  @days_previous = days_previous
152
167
  @start_date = start_date || Time.now.utc.to_date
153
168
  end
@@ -176,7 +191,7 @@ module Sidekiq
176
191
  stat_hash[dates[idx]] = value ? value.to_i : 0
177
192
  end
178
193
  end
179
- rescue Redis::CommandError
194
+ rescue RedisConnection.adapter::CommandError
180
195
  # mget will trigger a CROSSSLOT error when run against a Cluster
181
196
  # TODO Someone want to add Cluster support?
182
197
  end
@@ -202,24 +217,30 @@ module Sidekiq
202
217
  include Enumerable
203
218
 
204
219
  ##
205
- # Return all known queues within Redis.
220
+ # Fetch all known queues within Redis.
206
221
  #
222
+ # @return [Array<Sidekiq::Queue>]
207
223
  def self.all
208
224
  Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
209
225
  end
210
226
 
211
227
  attr_reader :name
212
228
 
229
+ # @param name [String] the name of the queue
213
230
  def initialize(name = "default")
214
231
  @name = name.to_s
215
232
  @rname = "queue:#{name}"
216
233
  end
217
234
 
235
+ # The current size of the queue within Redis.
236
+ # This value is real-time and can change between calls.
237
+ #
238
+ # @return [Integer] the size
218
239
  def size
219
240
  Sidekiq.redis { |con| con.llen(@rname) }
220
241
  end
221
242
 
222
- # Sidekiq Pro overrides this
243
+ # @return [Boolean] if the queue is currently paused
223
244
  def paused?
224
245
  false
225
246
  end
@@ -228,7 +249,7 @@ module Sidekiq
228
249
  # Calculates this queue's latency, the difference in seconds since the oldest
229
250
  # job in the queue was enqueued.
230
251
  #
231
- # @return Float
252
+ # @return [Float] in seconds
232
253
  def latency
233
254
  entry = Sidekiq.redis { |conn|
234
255
  conn.lrange(@rname, -1, -1)
@@ -255,7 +276,7 @@ module Sidekiq
255
276
  break if entries.empty?
256
277
  page += 1
257
278
  entries.each do |entry|
258
- yield Job.new(entry, @name)
279
+ yield JobRecord.new(entry, @name)
259
280
  end
260
281
  deleted_size = initial_size - size
261
282
  end
@@ -264,21 +285,30 @@ module Sidekiq
264
285
  ##
265
286
  # Find the job with the given JID within this queue.
266
287
  #
267
- # This is a slow, inefficient operation. Do not use under
268
- # normal conditions. Sidekiq Pro contains a faster version.
288
+ # This is a *slow, inefficient* operation. Do not use under
289
+ # normal conditions.
290
+ #
291
+ # @param jid [String] the job_id to look for
292
+ # @return [Sidekiq::JobRecord]
293
+ # @return [nil] if not found
269
294
  def find_job(jid)
270
295
  detect { |j| j.jid == jid }
271
296
  end
272
297
 
298
+ # delete all jobs within this queue
273
299
  def clear
274
300
  Sidekiq.redis do |conn|
275
- conn.multi do
276
- conn.unlink(@rname)
277
- conn.srem("queues", name)
301
+ conn.multi do |transaction|
302
+ transaction.unlink(@rname)
303
+ transaction.srem("queues", name)
278
304
  end
279
305
  end
280
306
  end
281
307
  alias_method :💣, :clear
308
+
309
+ def as_json(options = nil) # :nodoc:
310
+ {name: name} # 5336
311
+ end
282
312
  end
283
313
 
284
314
  ##
@@ -286,20 +316,21 @@ module Sidekiq
286
316
  # sorted set.
287
317
  #
288
318
  # The job should be considered immutable but may be
289
- # removed from the queue via Job#delete.
319
+ # removed from the queue via JobRecord#delete.
290
320
  #
291
- class Job
321
+ class JobRecord
292
322
  attr_reader :item
293
323
  attr_reader :value
324
+ attr_reader :queue
294
325
 
295
- def initialize(item, queue_name = nil)
326
+ def initialize(item, queue_name = nil) # :nodoc:
296
327
  @args = nil
297
328
  @value = item
298
329
  @item = item.is_a?(Hash) ? item : parse(item)
299
330
  @queue = queue_name || @item["queue"]
300
331
  end
301
332
 
302
- def parse(item)
333
+ def parse(item) # :nodoc:
303
334
  Sidekiq.load_json(item)
304
335
  rescue JSON::ParserError
305
336
  # If the job payload in Redis is invalid JSON, we'll load
@@ -316,48 +347,54 @@ module Sidekiq
316
347
 
317
348
  def display_class
318
349
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
319
- @klass ||= case klass
320
- when /\ASidekiq::Extensions::Delayed/
321
- safe_load(args[0], klass) do |target, method, _|
322
- "#{target}.#{method}"
323
- end
324
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
325
- job_class = @item["wrapped"] || args[0]
326
- if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
327
- # MailerClass#mailer_method
328
- args[0]["arguments"][0..1].join("#")
329
- else
330
- job_class
331
- end
332
- else
333
- klass
350
+ @klass ||= self["display_class"] || begin
351
+ case klass
352
+ when /\ASidekiq::Extensions::Delayed/
353
+ safe_load(args[0], klass) do |target, method, _|
354
+ "#{target}.#{method}"
355
+ end
356
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
357
+ job_class = @item["wrapped"] || args[0]
358
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
359
+ # MailerClass#mailer_method
360
+ args[0]["arguments"][0..1].join("#")
361
+ else
362
+ job_class
363
+ end
364
+ else
365
+ klass
366
+ end
334
367
  end
335
368
  end
336
369
 
337
370
  def display_args
338
371
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
339
372
  @display_args ||= case klass
340
- when /\ASidekiq::Extensions::Delayed/
341
- safe_load(args[0], args) do |_, _, arg|
342
- arg
343
- end
344
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
345
- job_args = self["wrapped"] ? args[0]["arguments"] : []
346
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
347
- # remove MailerClass, mailer_method and 'deliver_now'
348
- job_args.drop(3)
349
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
- # remove MailerClass, mailer_method and 'deliver_now'
351
- job_args.drop(3).first["args"]
352
- else
353
- job_args
354
- end
355
- else
356
- if self["encrypt"]
357
- # no point in showing 150+ bytes of random garbage
358
- args[-1] = "[encrypted data]"
359
- end
360
- args
373
+ when /\ASidekiq::Extensions::Delayed/
374
+ safe_load(args[0], args) do |_, _, arg, kwarg|
375
+ if !kwarg || kwarg.empty?
376
+ arg
377
+ else
378
+ [arg, kwarg]
379
+ end
380
+ end
381
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
382
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
383
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
384
+ # remove MailerClass, mailer_method and 'deliver_now'
385
+ job_args.drop(3)
386
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
387
+ # remove MailerClass, mailer_method and 'deliver_now'
388
+ job_args.drop(3).first["args"]
389
+ else
390
+ job_args
391
+ end
392
+ else
393
+ if self["encrypt"]
394
+ # no point in showing 150+ bytes of random garbage
395
+ args[-1] = "[encrypted data]"
396
+ end
397
+ args
361
398
  end
362
399
  end
363
400
 
@@ -391,15 +428,12 @@ module Sidekiq
391
428
  end
392
429
  end
393
430
 
394
- attr_reader :queue
395
-
396
431
  def latency
397
432
  now = Time.now.to_f
398
433
  now - (@item["enqueued_at"] || @item["created_at"] || now)
399
434
  end
400
435
 
401
- ##
402
- # Remove this job from the queue.
436
+ # Remove this job from the queue
403
437
  def delete
404
438
  count = Sidekiq.redis { |conn|
405
439
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -407,6 +441,7 @@ module Sidekiq
407
441
  count != 0
408
442
  end
409
443
 
444
+ # Access arbitrary attributes within the job hash
410
445
  def [](name)
411
446
  # nil will happen if the JSON fails to parse.
412
447
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -421,7 +456,8 @@ module Sidekiq
421
456
  rescue => ex
422
457
  # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
423
458
  # memory yet so the YAML can't be loaded.
424
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
459
+ # TODO is this still necessary? Zeitwerk reloader should handle?
460
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.config[:environment] == "development"
425
461
  default
426
462
  end
427
463
 
@@ -443,13 +479,15 @@ module Sidekiq
443
479
  end
444
480
  end
445
481
 
446
- class SortedEntry < Job
482
+ # Represents a job within a Redis sorted set where the score
483
+ # represents a timestamp for the job.
484
+ class SortedEntry < JobRecord
447
485
  attr_reader :score
448
486
  attr_reader :parent
449
487
 
450
- def initialize(parent, score, item)
488
+ def initialize(parent, score, item) # :nodoc:
451
489
  super(item)
452
- @score = score
490
+ @score = Float(score)
453
491
  @parent = parent
454
492
  end
455
493
 
@@ -465,12 +503,17 @@ module Sidekiq
465
503
  end
466
504
  end
467
505
 
506
+ # Change the scheduled time for this job.
507
+ #
508
+ # @param [Time] the new timestamp when this job will be enqueued.
468
509
  def reschedule(at)
469
510
  Sidekiq.redis do |conn|
470
511
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
512
  end
472
513
  end
473
514
 
515
+ # Enqueue this job from the scheduled or dead set so it will
516
+ # be executed at some point in the near future.
474
517
  def add_to_queue
475
518
  remove_job do |message|
476
519
  msg = Sidekiq.load_json(message)
@@ -478,6 +521,8 @@ module Sidekiq
478
521
  end
479
522
  end
480
523
 
524
+ # enqueue this job from the retry set so it will be executed
525
+ # at some point in the near future.
481
526
  def retry
482
527
  remove_job do |message|
483
528
  msg = Sidekiq.load_json(message)
@@ -486,8 +531,7 @@ module Sidekiq
486
531
  end
487
532
  end
488
533
 
489
- ##
490
- # Place job in the dead set
534
+ # Move this job from its current set into the Dead set.
491
535
  def kill
492
536
  remove_job do |message|
493
537
  DeadSet.new.kill(message)
@@ -502,9 +546,9 @@ module Sidekiq
502
546
 
503
547
  def remove_job
504
548
  Sidekiq.redis do |conn|
505
- results = conn.multi {
506
- conn.zrangebyscore(parent.name, score, score)
507
- conn.zremrangebyscore(parent.name, score, score)
549
+ results = conn.multi { |transaction|
550
+ transaction.zrangebyscore(parent.name, score, score)
551
+ transaction.zremrangebyscore(parent.name, score, score)
508
552
  }.first
509
553
 
510
554
  if results.size == 1
@@ -525,9 +569,9 @@ module Sidekiq
525
569
  yield msg if msg
526
570
 
527
571
  # push the rest back onto the sorted set
528
- conn.multi do
572
+ conn.multi do |transaction|
529
573
  nonmatched.each do |message|
530
- conn.zadd(parent.name, score.to_f.to_s, message)
574
+ transaction.zadd(parent.name, score.to_f.to_s, message)
531
575
  end
532
576
  end
533
577
  end
@@ -566,6 +610,10 @@ module Sidekiq
566
610
  end
567
611
  end
568
612
  alias_method :💣, :clear
613
+
614
+ def as_json(options = nil) # :nodoc:
615
+ {name: name} # 5336
616
+ end
569
617
  end
570
618
 
571
619
  class JobSet < SortedSet
@@ -585,7 +633,7 @@ module Sidekiq
585
633
  range_start = page * page_size + offset_size
586
634
  range_end = range_start + page_size - 1
587
635
  elements = Sidekiq.redis { |conn|
588
- conn.zrange name, range_start, range_end, with_scores: true
636
+ conn.zrange name, range_start, range_end, withscores: true
589
637
  }
590
638
  break if elements.empty?
591
639
  page -= 1
@@ -608,7 +656,7 @@ module Sidekiq
608
656
  end
609
657
 
610
658
  elements = Sidekiq.redis { |conn|
611
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
659
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
612
660
  }
613
661
 
614
662
  elements.each_with_object([]) do |element, result|
@@ -714,10 +762,10 @@ module Sidekiq
714
762
  def kill(message, opts = {})
715
763
  now = Time.now.to_f
716
764
  Sidekiq.redis do |conn|
717
- conn.multi do
718
- conn.zadd(name, now.to_s, message)
719
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
720
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
765
+ conn.multi do |transaction|
766
+ transaction.zadd(name, now.to_s, message)
767
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
768
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
721
769
  end
722
770
  end
723
771
 
@@ -737,11 +785,11 @@ module Sidekiq
737
785
  end
738
786
 
739
787
  def self.max_jobs
740
- Sidekiq.options[:dead_max_jobs]
788
+ Sidekiq[:dead_max_jobs]
741
789
  end
742
790
 
743
791
  def self.timeout
744
- Sidekiq.options[:dead_timeout_in_seconds]
792
+ Sidekiq[:dead_timeout_in_seconds]
745
793
  end
746
794
  end
747
795
 
@@ -765,9 +813,9 @@ module Sidekiq
765
813
  count = 0
766
814
  Sidekiq.redis do |conn|
767
815
  procs = conn.sscan_each("processes").to_a.sort
768
- heartbeats = conn.pipelined {
816
+ heartbeats = conn.pipelined { |pipeline|
769
817
  procs.each do |key|
770
- conn.hget(key, "info")
818
+ pipeline.hget(key, "info")
771
819
  end
772
820
  }
773
821
 
@@ -789,21 +837,25 @@ module Sidekiq
789
837
  # We're making a tradeoff here between consuming more memory instead of
790
838
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
791
839
  # you'll be happier this way
792
- conn.pipelined do
840
+ conn.pipelined do |pipeline|
793
841
  procs.each do |key|
794
- conn.hmget(key, "info", "busy", "beat", "quiet")
842
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
795
843
  end
796
844
  end
797
845
  }
798
846
 
799
- result.each do |info, busy, at_s, quiet|
847
+ result.each do |info, busy, at_s, quiet, rss, rtt|
800
848
  # If a process is stopped between when we query Redis for `procs` and
801
849
  # when we query for `result`, we will have an item in `result` that is
802
850
  # composed of `nil` values.
803
851
  next if info.nil?
804
852
 
805
853
  hash = Sidekiq.load_json(info)
806
- yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
854
+ yield Process.new(hash.merge("busy" => busy.to_i,
855
+ "beat" => at_s.to_f,
856
+ "quiet" => quiet,
857
+ "rss" => rss.to_i,
858
+ "rtt_us" => rtt.to_i))
807
859
  end
808
860
  end
809
861
 
@@ -815,6 +867,18 @@ module Sidekiq
815
867
  Sidekiq.redis { |conn| conn.scard("processes") }
816
868
  end
817
869
 
870
+ # Total number of threads available to execute jobs.
871
+ # For Sidekiq Enterprise customers this number (in production) must be
872
+ # less than or equal to your licensed concurrency.
873
+ def total_concurrency
874
+ sum { |x| x["concurrency"].to_i }
875
+ end
876
+
877
+ def total_rss_in_kb
878
+ sum { |x| x["rss"].to_i }
879
+ end
880
+ alias_method :total_rss, :total_rss_in_kb
881
+
818
882
  # Returns the identity of the current cluster leader or "" if no leader.
819
883
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
820
884
  # or Sidekiq Pro.
@@ -864,6 +928,10 @@ module Sidekiq
864
928
  self["identity"]
865
929
  end
866
930
 
931
+ def queues
932
+ self["queues"]
933
+ end
934
+
867
935
  def quiet!
868
936
  signal("TSTP")
869
937
  end
@@ -885,17 +953,17 @@ module Sidekiq
885
953
  def signal(sig)
886
954
  key = "#{identity}-signals"
887
955
  Sidekiq.redis do |c|
888
- c.multi do
889
- c.lpush(key, sig)
890
- c.expire(key, 60)
956
+ c.multi do |transaction|
957
+ transaction.lpush(key, sig)
958
+ transaction.expire(key, 60)
891
959
  end
892
960
  end
893
961
  end
894
962
  end
895
963
 
896
964
  ##
897
- # A worker is a thread that is currently processing a job.
898
- # Programmatic access to the current active worker set.
965
+ # The WorkSet stores the work being done by this Sidekiq cluster.
966
+ # It tracks the process and thread working on each job.
899
967
  #
900
968
  # WARNING WARNING WARNING
901
969
  #
@@ -903,26 +971,27 @@ module Sidekiq
903
971
  # If you call #size => 5 and then expect #each to be
904
972
  # called 5 times, you're going to have a bad time.
905
973
  #
906
- # workers = Sidekiq::Workers.new
907
- # workers.size => 2
908
- # workers.each do |process_id, thread_id, work|
974
+ # works = Sidekiq::WorkSet.new
975
+ # works.size => 2
976
+ # works.each do |process_id, thread_id, work|
909
977
  # # process_id is a unique identifier per Sidekiq process
910
978
  # # thread_id is a unique identifier per thread
911
979
  # # work is a Hash which looks like:
912
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
980
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
913
981
  # # run_at is an epoch Integer.
914
982
  # end
915
983
  #
916
- class Workers
984
+ class WorkSet
917
985
  include Enumerable
918
986
 
919
- def each
987
+ def each(&block)
988
+ results = []
920
989
  Sidekiq.redis do |conn|
921
990
  procs = conn.sscan_each("processes").to_a
922
991
  procs.sort.each do |key|
923
- valid, workers = conn.pipelined {
924
- conn.exists(key)
925
- conn.hgetall("#{key}:workers")
992
+ valid, workers = conn.pipelined { |pipeline|
993
+ pipeline.exists?(key)
994
+ pipeline.hgetall("#{key}:work")
926
995
  }
927
996
  next unless valid
928
997
  workers.each_pair do |tid, json|
@@ -930,10 +999,12 @@ module Sidekiq
930
999
  p = hsh["payload"]
931
1000
  # avoid breaking API, this is a side effect of the JSON optimization in #4316
932
1001
  hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
933
- yield key, tid, hsh
1002
+ results << [key, tid, hsh]
934
1003
  end
935
1004
  end
936
1005
  end
1006
+
1007
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
937
1008
  end
938
1009
 
939
1010
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -948,13 +1019,17 @@ module Sidekiq
948
1019
  if procs.empty?
949
1020
  0
950
1021
  else
951
- conn.pipelined {
1022
+ conn.pipelined { |pipeline|
952
1023
  procs.each do |key|
953
- conn.hget(key, "busy")
1024
+ pipeline.hget(key, "busy")
954
1025
  end
955
1026
  }.sum(&:to_i)
956
1027
  end
957
1028
  end
958
1029
  end
959
1030
  end
1031
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1032
+ # Is "worker" a process, a type of job, a thread? Undefined!
1033
+ # WorkSet better describes the data.
1034
+ Workers = WorkSet
960
1035
  end