sidekiq 5.0.0 → 6.0.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (79) hide show
  1. checksums.yaml +5 -5
  2. data/.circleci/config.yml +61 -0
  3. data/.github/issue_template.md +3 -1
  4. data/.gitignore +1 -1
  5. data/.standard.yml +20 -0
  6. data/6.0-Upgrade.md +70 -0
  7. data/COMM-LICENSE +12 -10
  8. data/Changes.md +169 -1
  9. data/Ent-2.0-Upgrade.md +37 -0
  10. data/Ent-Changes.md +76 -0
  11. data/Gemfile +16 -21
  12. data/Gemfile.lock +196 -0
  13. data/LICENSE +1 -1
  14. data/Pro-4.0-Upgrade.md +35 -0
  15. data/Pro-5.0-Upgrade.md +25 -0
  16. data/Pro-Changes.md +137 -1
  17. data/README.md +18 -30
  18. data/Rakefile +6 -8
  19. data/bin/sidekiqload +28 -24
  20. data/bin/sidekiqmon +9 -0
  21. data/lib/generators/sidekiq/templates/worker_spec.rb.erb +1 -1
  22. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  23. data/lib/generators/sidekiq/worker_generator.rb +12 -14
  24. data/lib/sidekiq.rb +69 -49
  25. data/lib/sidekiq/api.rb +216 -160
  26. data/lib/sidekiq/cli.rb +174 -207
  27. data/lib/sidekiq/client.rb +55 -51
  28. data/lib/sidekiq/delay.rb +24 -4
  29. data/lib/sidekiq/exception_handler.rb +12 -16
  30. data/lib/sidekiq/extensions/action_mailer.rb +10 -20
  31. data/lib/sidekiq/extensions/active_record.rb +9 -7
  32. data/lib/sidekiq/extensions/class_methods.rb +9 -7
  33. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  34. data/lib/sidekiq/fetch.rb +5 -6
  35. data/lib/sidekiq/job_logger.rb +42 -14
  36. data/lib/sidekiq/job_retry.rb +71 -57
  37. data/lib/sidekiq/launcher.rb +74 -60
  38. data/lib/sidekiq/logger.rb +69 -0
  39. data/lib/sidekiq/manager.rb +12 -15
  40. data/lib/sidekiq/middleware/chain.rb +3 -2
  41. data/lib/sidekiq/middleware/i18n.rb +5 -7
  42. data/lib/sidekiq/monitor.rb +148 -0
  43. data/lib/sidekiq/paginator.rb +11 -12
  44. data/lib/sidekiq/processor.rb +126 -82
  45. data/lib/sidekiq/rails.rb +24 -32
  46. data/lib/sidekiq/redis_connection.rb +46 -14
  47. data/lib/sidekiq/scheduled.rb +50 -25
  48. data/lib/sidekiq/testing.rb +35 -27
  49. data/lib/sidekiq/testing/inline.rb +2 -1
  50. data/lib/sidekiq/util.rb +20 -14
  51. data/lib/sidekiq/version.rb +2 -1
  52. data/lib/sidekiq/web.rb +45 -53
  53. data/lib/sidekiq/web/action.rb +14 -10
  54. data/lib/sidekiq/web/application.rb +83 -58
  55. data/lib/sidekiq/web/helpers.rb +105 -67
  56. data/lib/sidekiq/web/router.rb +18 -15
  57. data/lib/sidekiq/worker.rb +144 -41
  58. data/sidekiq.gemspec +16 -27
  59. data/web/assets/javascripts/application.js +0 -0
  60. data/web/assets/javascripts/dashboard.js +21 -23
  61. data/web/assets/stylesheets/application.css +35 -2
  62. data/web/assets/stylesheets/bootstrap.css +2 -2
  63. data/web/locales/ar.yml +1 -0
  64. data/web/locales/en.yml +2 -0
  65. data/web/locales/es.yml +4 -3
  66. data/web/locales/ja.yml +7 -4
  67. data/web/views/_footer.erb +4 -1
  68. data/web/views/_nav.erb +3 -17
  69. data/web/views/busy.erb +5 -1
  70. data/web/views/layout.erb +1 -1
  71. data/web/views/queue.erb +1 -0
  72. data/web/views/queues.erb +2 -0
  73. data/web/views/retries.erb +4 -0
  74. metadata +25 -171
  75. data/.travis.yml +0 -18
  76. data/bin/sidekiqctl +0 -99
  77. data/lib/sidekiq/core_ext.rb +0 -119
  78. data/lib/sidekiq/logging.rb +0 -106
  79. data/lib/sidekiq/middleware/server/active_record.rb +0 -22
@@ -1,9 +1,24 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
- require 'sidekiq'
2
+
3
+ require "sidekiq"
4
4
 
5
5
  module Sidekiq
6
+ module RedisScanner
7
+ def sscan(conn, key)
8
+ cursor = "0"
9
+ result = []
10
+ loop do
11
+ cursor, values = conn.sscan(key, cursor)
12
+ result.push(*values)
13
+ break if cursor == "0"
14
+ end
15
+ result
16
+ end
17
+ end
18
+
6
19
  class Stats
20
+ include RedisScanner
21
+
7
22
  def initialize
8
23
  fetch_stats!
9
24
  end
@@ -49,55 +64,65 @@ module Sidekiq
49
64
  end
50
65
 
51
66
  def fetch_stats!
52
- pipe1_res = Sidekiq.redis do |conn|
67
+ pipe1_res = Sidekiq.redis { |conn|
53
68
  conn.pipelined do
54
- conn.get('stat:processed'.freeze)
55
- conn.get('stat:failed'.freeze)
56
- conn.zcard('schedule'.freeze)
57
- conn.zcard('retry'.freeze)
58
- conn.zcard('dead'.freeze)
59
- conn.scard('processes'.freeze)
60
- conn.lrange('queue:default'.freeze, -1, -1)
61
- conn.smembers('processes'.freeze)
62
- conn.smembers('queues'.freeze)
69
+ conn.get("stat:processed")
70
+ conn.get("stat:failed")
71
+ conn.zcard("schedule")
72
+ conn.zcard("retry")
73
+ conn.zcard("dead")
74
+ conn.scard("processes")
75
+ conn.lrange("queue:default", -1, -1)
63
76
  end
64
- end
77
+ }
78
+
79
+ processes = Sidekiq.redis { |conn|
80
+ sscan(conn, "processes")
81
+ }
82
+
83
+ queues = Sidekiq.redis { |conn|
84
+ sscan(conn, "queues")
85
+ }
65
86
 
66
- pipe2_res = Sidekiq.redis do |conn|
87
+ pipe2_res = Sidekiq.redis { |conn|
67
88
  conn.pipelined do
68
- pipe1_res[7].each {|key| conn.hget(key, 'busy'.freeze) }
69
- pipe1_res[8].each {|queue| conn.llen("queue:#{queue}") }
89
+ processes.each { |key| conn.hget(key, "busy") }
90
+ queues.each { |queue| conn.llen("queue:#{queue}") }
70
91
  end
71
- end
92
+ }
72
93
 
73
- s = pipe1_res[7].size
94
+ s = processes.size
74
95
  workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
75
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
96
+ enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
76
97
 
77
98
  default_queue_latency = if (entry = pipe1_res[6].first)
78
- job = Sidekiq.load_json(entry) rescue {}
79
- now = Time.now.to_f
80
- thence = job['enqueued_at'.freeze] || now
81
- now - thence
82
- else
83
- 0
84
- end
99
+ job = begin
100
+ Sidekiq.load_json(entry)
101
+ rescue
102
+ {}
103
+ end
104
+ now = Time.now.to_f
105
+ thence = job["enqueued_at"] || now
106
+ now - thence
107
+ else
108
+ 0
109
+ end
85
110
  @stats = {
86
- processed: pipe1_res[0].to_i,
87
- failed: pipe1_res[1].to_i,
88
- scheduled_size: pipe1_res[2],
89
- retry_size: pipe1_res[3],
90
- dead_size: pipe1_res[4],
91
- processes_size: pipe1_res[5],
111
+ processed: pipe1_res[0].to_i,
112
+ failed: pipe1_res[1].to_i,
113
+ scheduled_size: pipe1_res[2],
114
+ retry_size: pipe1_res[3],
115
+ dead_size: pipe1_res[4],
116
+ processes_size: pipe1_res[5],
92
117
 
93
118
  default_queue_latency: default_queue_latency,
94
- workers_size: workers_size,
95
- enqueued: enqueued
119
+ workers_size: workers_size,
120
+ enqueued: enqueued,
96
121
  }
97
122
  end
98
123
 
99
124
  def reset(*stats)
100
- all = %w(failed processed)
125
+ all = %w[failed processed]
101
126
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
102
127
 
103
128
  mset_args = []
@@ -117,22 +142,23 @@ module Sidekiq
117
142
  end
118
143
 
119
144
  class Queues
145
+ include RedisScanner
146
+
120
147
  def lengths
121
148
  Sidekiq.redis do |conn|
122
- queues = conn.smembers('queues'.freeze)
149
+ queues = sscan(conn, "queues")
123
150
 
124
- lengths = conn.pipelined do
151
+ lengths = conn.pipelined {
125
152
  queues.each do |queue|
126
153
  conn.llen("queue:#{queue}")
127
154
  end
128
- end
155
+ }
129
156
 
130
157
  i = 0
131
- array_of_arrays = queues.inject({}) do |memo, queue|
158
+ array_of_arrays = queues.each_with_object({}) { |queue, memo|
132
159
  memo[queue] = lengths[i]
133
160
  i += 1
134
- memo
135
- end.sort_by { |_, size| size }
161
+ }.sort_by { |_, size| size }
136
162
 
137
163
  Hash[array_of_arrays.reverse]
138
164
  end
@@ -146,11 +172,11 @@ module Sidekiq
146
172
  end
147
173
 
148
174
  def processed
149
- date_stat_hash("processed")
175
+ @processed ||= date_stat_hash("processed")
150
176
  end
151
177
 
152
178
  def failed
153
- date_stat_hash("failed")
179
+ @failed ||= date_stat_hash("failed")
154
180
  end
155
181
 
156
182
  private
@@ -163,16 +189,21 @@ module Sidekiq
163
189
 
164
190
  while i < @days_previous
165
191
  date = @start_date - i
166
- datestr = date.strftime("%Y-%m-%d".freeze)
192
+ datestr = date.strftime("%Y-%m-%d")
167
193
  keys << "stat:#{stat}:#{datestr}"
168
194
  dates << datestr
169
195
  i += 1
170
196
  end
171
197
 
172
- Sidekiq.redis do |conn|
173
- conn.mget(keys).each_with_index do |value, idx|
174
- stat_hash[dates[idx]] = value ? value.to_i : 0
198
+ begin
199
+ Sidekiq.redis do |conn|
200
+ conn.mget(keys).each_with_index do |value, idx|
201
+ stat_hash[dates[idx]] = value ? value.to_i : 0
202
+ end
175
203
  end
204
+ rescue Redis::CommandError
205
+ # mget will trigger a CROSSSLOT error when run against a Cluster
206
+ # TODO Someone want to add Cluster support?
176
207
  end
177
208
 
178
209
  stat_hash
@@ -194,18 +225,19 @@ module Sidekiq
194
225
  #
195
226
  class Queue
196
227
  include Enumerable
228
+ extend RedisScanner
197
229
 
198
230
  ##
199
231
  # Return all known queues within Redis.
200
232
  #
201
233
  def self.all
202
- Sidekiq.redis { |c| c.smembers('queues'.freeze) }.sort.map { |q| Sidekiq::Queue.new(q) }
234
+ Sidekiq.redis { |c| sscan(c, "queues") }.sort.map { |q| Sidekiq::Queue.new(q) }
203
235
  end
204
236
 
205
237
  attr_reader :name
206
238
 
207
- def initialize(name="default")
208
- @name = name
239
+ def initialize(name = "default")
240
+ @name = name.to_s
209
241
  @rname = "queue:#{name}"
210
242
  end
211
243
 
@@ -224,13 +256,13 @@ module Sidekiq
224
256
  #
225
257
  # @return Float
226
258
  def latency
227
- entry = Sidekiq.redis do |conn|
259
+ entry = Sidekiq.redis { |conn|
228
260
  conn.lrange(@rname, -1, -1)
229
- end.first
261
+ }.first
230
262
  return 0 unless entry
231
263
  job = Sidekiq.load_json(entry)
232
264
  now = Time.now.to_f
233
- thence = job['enqueued_at'] || now
265
+ thence = job["enqueued_at"] || now
234
266
  now - thence
235
267
  end
236
268
 
@@ -240,12 +272,12 @@ module Sidekiq
240
272
  page = 0
241
273
  page_size = 50
242
274
 
243
- while true do
275
+ loop do
244
276
  range_start = page * page_size - deleted_size
245
- range_end = range_start + page_size - 1
246
- entries = Sidekiq.redis do |conn|
277
+ range_end = range_start + page_size - 1
278
+ entries = Sidekiq.redis { |conn|
247
279
  conn.lrange @rname, range_start, range_end
248
- end
280
+ }
249
281
  break if entries.empty?
250
282
  page += 1
251
283
  entries.each do |entry|
@@ -268,7 +300,7 @@ module Sidekiq
268
300
  Sidekiq.redis do |conn|
269
301
  conn.multi do
270
302
  conn.del(@rname)
271
- conn.srem("queues".freeze, name)
303
+ conn.srem("queues", name)
272
304
  end
273
305
  end
274
306
  end
@@ -286,11 +318,11 @@ module Sidekiq
286
318
  attr_reader :item
287
319
  attr_reader :value
288
320
 
289
- def initialize(item, queue_name=nil)
321
+ def initialize(item, queue_name = nil)
290
322
  @args = nil
291
323
  @value = item
292
324
  @item = item.is_a?(Hash) ? item : parse(item)
293
- @queue = queue_name || @item['queue']
325
+ @queue = queue_name || @item["queue"]
294
326
  end
295
327
 
296
328
  def parse(item)
@@ -305,7 +337,7 @@ module Sidekiq
305
337
  end
306
338
 
307
339
  def klass
308
- self['class']
340
+ self["class"]
309
341
  end
310
342
 
311
343
  def display_class
@@ -316,73 +348,71 @@ module Sidekiq
316
348
  "#{target}.#{method}"
317
349
  end
318
350
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
319
- job_class = @item['wrapped'] || args[0]
320
- if 'ActionMailer::DeliveryJob' == job_class
351
+ job_class = @item["wrapped"] || args[0]
352
+ if job_class == "ActionMailer::DeliveryJob"
321
353
  # MailerClass#mailer_method
322
- args[0]['arguments'][0..1].join('#')
354
+ args[0]["arguments"][0..1].join("#")
323
355
  else
324
- job_class
356
+ job_class
325
357
  end
326
358
  else
327
359
  klass
328
- end
360
+ end
329
361
  end
330
362
 
331
363
  def display_args
332
364
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
333
- @args ||= case klass
365
+ @display_args ||= case klass
334
366
  when /\ASidekiq::Extensions::Delayed/
335
367
  safe_load(args[0], args) do |_, _, arg|
336
368
  arg
337
369
  end
338
370
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
339
- job_args = self['wrapped'] ? args[0]["arguments"] : []
340
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
371
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
372
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
341
373
  # remove MailerClass, mailer_method and 'deliver_now'
342
374
  job_args.drop(3)
343
375
  else
344
376
  job_args
345
377
  end
346
378
  else
347
- if self['encrypt'.freeze]
379
+ if self["encrypt"]
348
380
  # no point in showing 150+ bytes of random garbage
349
- args[-1] = '[encrypted data]'.freeze
381
+ args[-1] = "[encrypted data]"
350
382
  end
351
383
  args
352
- end
384
+ end
353
385
  end
354
386
 
355
387
  def args
356
- @args || @item['args']
388
+ @args || @item["args"]
357
389
  end
358
390
 
359
391
  def jid
360
- self['jid']
392
+ self["jid"]
361
393
  end
362
394
 
363
395
  def enqueued_at
364
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
396
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
365
397
  end
366
398
 
367
399
  def created_at
368
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
400
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
369
401
  end
370
402
 
371
- def queue
372
- @queue
373
- end
403
+ attr_reader :queue
374
404
 
375
405
  def latency
376
406
  now = Time.now.to_f
377
- now - (@item['enqueued_at'] || @item['created_at'] || now)
407
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
378
408
  end
379
409
 
380
410
  ##
381
411
  # Remove this job from the queue.
382
412
  def delete
383
- count = Sidekiq.redis do |conn|
413
+ count = Sidekiq.redis { |conn|
384
414
  conn.lrem("queue:#{@queue}", 1, @value)
385
- end
415
+ }
386
416
  count != 0
387
417
  end
388
418
 
@@ -396,14 +426,12 @@ module Sidekiq
396
426
  private
397
427
 
398
428
  def safe_load(content, default)
399
- begin
400
- yield(*YAML.load(content))
401
- rescue => ex
402
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
403
- # memory yet so the YAML can't be loaded.
404
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
405
- default
406
- end
429
+ yield(*YAML.load(content))
430
+ rescue => ex
431
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
+ # memory yet so the YAML can't be loaded.
433
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
434
+ default
407
435
  end
408
436
  end
409
437
 
@@ -444,7 +472,7 @@ module Sidekiq
444
472
  def retry
445
473
  remove_job do |message|
446
474
  msg = Sidekiq.load_json(message)
447
- msg['retry_count'] -= 1 if msg['retry_count']
475
+ msg["retry_count"] -= 1 if msg["retry_count"]
448
476
  Sidekiq::Client.push(msg)
449
477
  end
450
478
  end
@@ -453,43 +481,36 @@ module Sidekiq
453
481
  # Place job in the dead set
454
482
  def kill
455
483
  remove_job do |message|
456
- now = Time.now.to_f
457
- Sidekiq.redis do |conn|
458
- conn.multi do
459
- conn.zadd('dead', now, message)
460
- conn.zremrangebyscore('dead', '-inf', now - DeadSet.timeout)
461
- conn.zremrangebyrank('dead', 0, - DeadSet.max_jobs)
462
- end
463
- end
484
+ DeadSet.new.kill(message)
464
485
  end
465
486
  end
466
487
 
467
488
  def error?
468
- !!item['error_class']
489
+ !!item["error_class"]
469
490
  end
470
491
 
471
492
  private
472
493
 
473
494
  def remove_job
474
495
  Sidekiq.redis do |conn|
475
- results = conn.multi do
496
+ results = conn.multi {
476
497
  conn.zrangebyscore(parent.name, score, score)
477
498
  conn.zremrangebyscore(parent.name, score, score)
478
- end.first
499
+ }.first
479
500
 
480
501
  if results.size == 1
481
502
  yield results.first
482
503
  else
483
504
  # multiple jobs with the same score
484
505
  # find the one with the right JID and push it
485
- hash = results.group_by do |message|
506
+ hash = results.group_by { |message|
486
507
  if message.index(jid)
487
508
  msg = Sidekiq.load_json(message)
488
- msg['jid'] == jid
509
+ msg["jid"] == jid
489
510
  else
490
511
  false
491
512
  end
492
- end
513
+ }
493
514
 
494
515
  msg = hash.fetch(true, []).first
495
516
  yield msg if msg
@@ -503,7 +524,6 @@ module Sidekiq
503
524
  end
504
525
  end
505
526
  end
506
-
507
527
  end
508
528
 
509
529
  class SortedSet
@@ -529,7 +549,6 @@ module Sidekiq
529
549
  end
530
550
 
531
551
  class JobSet < SortedSet
532
-
533
552
  def schedule(timestamp, message)
534
553
  Sidekiq.redis do |conn|
535
554
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -542,15 +561,15 @@ module Sidekiq
542
561
  page = -1
543
562
  page_size = 50
544
563
 
545
- while true do
564
+ loop do
546
565
  range_start = page * page_size + offset_size
547
- range_end = range_start + page_size - 1
548
- elements = Sidekiq.redis do |conn|
566
+ range_end = range_start + page_size - 1
567
+ elements = Sidekiq.redis { |conn|
549
568
  conn.zrange name, range_start, range_end, with_scores: true
550
- end
569
+ }
551
570
  break if elements.empty?
552
571
  page -= 1
553
- elements.each do |element, score|
572
+ elements.reverse_each do |element, score|
554
573
  yield SortedEntry.new(self, score, element)
555
574
  end
556
575
  offset_size = initial_size - @_size
@@ -558,18 +577,17 @@ module Sidekiq
558
577
  end
559
578
 
560
579
  def fetch(score, jid = nil)
561
- elements = Sidekiq.redis do |conn|
580
+ elements = Sidekiq.redis { |conn|
562
581
  conn.zrangebyscore(name, score, score)
563
- end
582
+ }
564
583
 
565
- elements.inject([]) do |result, element|
584
+ elements.each_with_object([]) do |element, result|
566
585
  entry = SortedEntry.new(self, score, element)
567
586
  if jid
568
587
  result << entry if entry.jid == jid
569
588
  else
570
589
  result << entry
571
590
  end
572
- result
573
591
  end
574
592
  end
575
593
 
@@ -579,7 +597,7 @@ module Sidekiq
579
597
  # This is a slow, inefficient operation. Do not use under
580
598
  # normal conditions. Sidekiq Pro contains a faster version.
581
599
  def find_job(jid)
582
- self.detect { |j| j.jid == jid }
600
+ detect { |j| j.jid == jid }
583
601
  end
584
602
 
585
603
  def delete_by_value(name, value)
@@ -600,7 +618,6 @@ module Sidekiq
600
618
  @_size -= 1 if ret
601
619
  break ret
602
620
  end
603
- false
604
621
  end
605
622
  end
606
623
  end
@@ -622,7 +639,7 @@ module Sidekiq
622
639
  # end.map(&:delete)
623
640
  class ScheduledSet < JobSet
624
641
  def initialize
625
- super 'schedule'
642
+ super "schedule"
626
643
  end
627
644
  end
628
645
 
@@ -640,13 +657,15 @@ module Sidekiq
640
657
  # end.map(&:delete)
641
658
  class RetrySet < JobSet
642
659
  def initialize
643
- super 'retry'
660
+ super "retry"
644
661
  end
645
662
 
646
663
  def retry_all
647
- while size > 0
648
- each(&:retry)
649
- end
664
+ each(&:retry) while size > 0
665
+ end
666
+
667
+ def kill_all
668
+ each(&:kill) while size > 0
650
669
  end
651
670
  end
652
671
 
@@ -655,13 +674,32 @@ module Sidekiq
655
674
  #
656
675
  class DeadSet < JobSet
657
676
  def initialize
658
- super 'dead'
677
+ super "dead"
659
678
  end
660
679
 
661
- def retry_all
662
- while size > 0
663
- each(&:retry)
680
+ def kill(message, opts = {})
681
+ now = Time.now.to_f
682
+ Sidekiq.redis do |conn|
683
+ conn.multi do
684
+ conn.zadd(name, now.to_s, message)
685
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
686
+ conn.zremrangebyrank(name, 0, - self.class.max_jobs)
687
+ end
688
+ end
689
+
690
+ if opts[:notify_failure] != false
691
+ job = Sidekiq.load_json(message)
692
+ r = RuntimeError.new("Job killed by API")
693
+ r.set_backtrace(caller)
694
+ Sidekiq.death_handlers.each do |handle|
695
+ handle.call(job, r)
696
+ end
664
697
  end
698
+ true
699
+ end
700
+
701
+ def retry_all
702
+ each(&:retry) while size > 0
665
703
  end
666
704
 
667
705
  def self.max_jobs
@@ -675,29 +713,30 @@ module Sidekiq
675
713
 
676
714
  ##
677
715
  # Enumerates the set of Sidekiq processes which are actively working
678
- # right now. Each process send a heartbeat to Redis every 5 seconds
716
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
679
717
  # so this set should be relatively accurate, barring network partitions.
680
718
  #
681
719
  # Yields a Sidekiq::Process.
682
720
  #
683
721
  class ProcessSet
684
722
  include Enumerable
723
+ include RedisScanner
685
724
 
686
- def initialize(clean_plz=true)
687
- self.class.cleanup if clean_plz
725
+ def initialize(clean_plz = true)
726
+ cleanup if clean_plz
688
727
  end
689
728
 
690
729
  # Cleans up dead processes recorded in Redis.
691
730
  # Returns the number of processes cleaned.
692
- def self.cleanup
731
+ def cleanup
693
732
  count = 0
694
733
  Sidekiq.redis do |conn|
695
- procs = conn.smembers('processes').sort
696
- heartbeats = conn.pipelined do
734
+ procs = sscan(conn, "processes").sort
735
+ heartbeats = conn.pipelined {
697
736
  procs.each do |key|
698
- conn.hget(key, 'info')
737
+ conn.hget(key, "info")
699
738
  end
700
- end
739
+ }
701
740
 
702
741
  # the hash named key has an expiry of 60 seconds.
703
742
  # if it's not found, that means the process has not reported
@@ -706,27 +745,32 @@ module Sidekiq
706
745
  heartbeats.each_with_index do |beat, i|
707
746
  to_prune << procs[i] if beat.nil?
708
747
  end
709
- count = conn.srem('processes', to_prune) unless to_prune.empty?
748
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
710
749
  end
711
750
  count
712
751
  end
713
752
 
714
753
  def each
715
- procs = Sidekiq.redis { |conn| conn.smembers('processes') }.sort
754
+ procs = Sidekiq.redis { |conn| sscan(conn, "processes") }.sort
716
755
 
717
756
  Sidekiq.redis do |conn|
718
757
  # We're making a tradeoff here between consuming more memory instead of
719
758
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
720
759
  # you'll be happier this way
721
- result = conn.pipelined do
760
+ result = conn.pipelined {
722
761
  procs.each do |key|
723
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
762
+ conn.hmget(key, "info", "busy", "beat", "quiet")
724
763
  end
725
- end
764
+ }
726
765
 
727
766
  result.each do |info, busy, at_s, quiet|
767
+ # If a process is stopped between when we query Redis for `procs` and
768
+ # when we query for `result`, we will have an item in `result` that is
769
+ # composed of `nil` values.
770
+ next if info.nil?
771
+
728
772
  hash = Sidekiq.load_json(info)
729
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
773
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
730
774
  end
731
775
  end
732
776
 
@@ -738,7 +782,19 @@ module Sidekiq
738
782
  # contains Sidekiq processes which have sent a heartbeat within the last
739
783
  # 60 seconds.
740
784
  def size
741
- Sidekiq.redis { |conn| conn.scard('processes') }
785
+ Sidekiq.redis { |conn| conn.scard("processes") }
786
+ end
787
+
788
+ # Returns the identity of the current cluster leader or "" if no leader.
789
+ # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
790
+ # or Sidekiq Pro.
791
+ def leader
792
+ @leader ||= begin
793
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
794
+ # need a non-falsy value so we can memoize
795
+ x ||= ""
796
+ x
797
+ end
742
798
  end
743
799
  end
744
800
 
@@ -763,31 +819,35 @@ module Sidekiq
763
819
  end
764
820
 
765
821
  def tag
766
- self['tag']
822
+ self["tag"]
767
823
  end
768
824
 
769
825
  def labels
770
- Array(self['labels'])
826
+ Array(self["labels"])
771
827
  end
772
828
 
773
829
  def [](key)
774
830
  @attribs[key]
775
831
  end
776
832
 
833
+ def identity
834
+ self["identity"]
835
+ end
836
+
777
837
  def quiet!
778
- signal('TSTP')
838
+ signal("TSTP")
779
839
  end
780
840
 
781
841
  def stop!
782
- signal('TERM')
842
+ signal("TERM")
783
843
  end
784
844
 
785
845
  def dump_threads
786
- signal('TTIN')
846
+ signal("TTIN")
787
847
  end
788
848
 
789
849
  def stopping?
790
- self['quiet'] == 'true'
850
+ self["quiet"] == "true"
791
851
  end
792
852
 
793
853
  private
@@ -801,10 +861,6 @@ module Sidekiq
801
861
  end
802
862
  end
803
863
  end
804
-
805
- def identity
806
- self['identity']
807
- end
808
864
  end
809
865
 
810
866
  ##
@@ -829,15 +885,16 @@ module Sidekiq
829
885
  #
830
886
  class Workers
831
887
  include Enumerable
888
+ include RedisScanner
832
889
 
833
890
  def each
834
891
  Sidekiq.redis do |conn|
835
- procs = conn.smembers('processes')
892
+ procs = sscan(conn, "processes")
836
893
  procs.sort.each do |key|
837
- valid, workers = conn.pipelined do
894
+ valid, workers = conn.pipelined {
838
895
  conn.exists(key)
839
896
  conn.hgetall("#{key}:workers")
840
- end
897
+ }
841
898
  next unless valid
842
899
  workers.each_pair do |tid, json|
843
900
  yield key, tid, Sidekiq.load_json(json)
@@ -854,18 +911,17 @@ module Sidekiq
854
911
  # which can easily get out of sync with crashy processes.
855
912
  def size
856
913
  Sidekiq.redis do |conn|
857
- procs = conn.smembers('processes')
914
+ procs = sscan(conn, "processes")
858
915
  if procs.empty?
859
916
  0
860
917
  else
861
- conn.pipelined do
918
+ conn.pipelined {
862
919
  procs.each do |key|
863
- conn.hget(key, 'busy')
920
+ conn.hget(key, "busy")
864
921
  end
865
- end.map(&:to_i).inject(:+)
922
+ }.map(&:to_i).inject(:+)
866
923
  end
867
924
  end
868
925
  end
869
926
  end
870
-
871
927
  end