sidekiq 5.2.7 → 6.0.3

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (74) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +21 -0
  3. data/.gitignore +0 -2
  4. data/.standard.yml +20 -0
  5. data/6.0-Upgrade.md +72 -0
  6. data/Changes.md +121 -0
  7. data/Ent-2.0-Upgrade.md +37 -0
  8. data/Ent-Changes.md +18 -0
  9. data/Gemfile +12 -11
  10. data/Gemfile.lock +196 -0
  11. data/Pro-5.0-Upgrade.md +25 -0
  12. data/Pro-Changes.md +18 -1
  13. data/README.md +18 -30
  14. data/Rakefile +5 -4
  15. data/bin/sidekiqload +32 -24
  16. data/bin/sidekiqmon +8 -0
  17. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  18. data/lib/generators/sidekiq/worker_generator.rb +20 -12
  19. data/lib/sidekiq.rb +61 -42
  20. data/lib/sidekiq/api.rb +230 -214
  21. data/lib/sidekiq/cli.rb +111 -174
  22. data/lib/sidekiq/client.rb +55 -46
  23. data/lib/sidekiq/delay.rb +5 -6
  24. data/lib/sidekiq/exception_handler.rb +10 -12
  25. data/lib/sidekiq/extensions/action_mailer.rb +10 -20
  26. data/lib/sidekiq/extensions/active_record.rb +9 -7
  27. data/lib/sidekiq/extensions/class_methods.rb +9 -7
  28. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  29. data/lib/sidekiq/fetch.rb +11 -12
  30. data/lib/sidekiq/job_logger.rb +45 -7
  31. data/lib/sidekiq/job_retry.rb +60 -60
  32. data/lib/sidekiq/launcher.rb +57 -51
  33. data/lib/sidekiq/logger.rb +165 -0
  34. data/lib/sidekiq/manager.rb +7 -9
  35. data/lib/sidekiq/middleware/chain.rb +14 -4
  36. data/lib/sidekiq/middleware/i18n.rb +5 -7
  37. data/lib/sidekiq/monitor.rb +133 -0
  38. data/lib/sidekiq/paginator.rb +18 -14
  39. data/lib/sidekiq/processor.rb +67 -66
  40. data/lib/sidekiq/rails.rb +23 -29
  41. data/lib/sidekiq/redis_connection.rb +31 -37
  42. data/lib/sidekiq/scheduled.rb +28 -29
  43. data/lib/sidekiq/testing.rb +34 -23
  44. data/lib/sidekiq/testing/inline.rb +2 -1
  45. data/lib/sidekiq/util.rb +17 -16
  46. data/lib/sidekiq/version.rb +2 -1
  47. data/lib/sidekiq/web.rb +41 -49
  48. data/lib/sidekiq/web/action.rb +14 -10
  49. data/lib/sidekiq/web/application.rb +64 -66
  50. data/lib/sidekiq/web/helpers.rb +83 -72
  51. data/lib/sidekiq/web/router.rb +17 -14
  52. data/lib/sidekiq/worker.rb +124 -97
  53. data/sidekiq.gemspec +16 -16
  54. data/web/assets/javascripts/dashboard.js +4 -23
  55. data/web/assets/stylesheets/application-dark.css +125 -0
  56. data/web/assets/stylesheets/application.css +9 -0
  57. data/web/locales/de.yml +14 -2
  58. data/web/locales/ja.yml +2 -1
  59. data/web/views/_job_info.erb +2 -1
  60. data/web/views/busy.erb +4 -1
  61. data/web/views/dead.erb +2 -2
  62. data/web/views/layout.erb +1 -0
  63. data/web/views/morgue.erb +4 -1
  64. data/web/views/queue.erb +10 -1
  65. data/web/views/retries.erb +4 -1
  66. data/web/views/retry.erb +2 -2
  67. data/web/views/scheduled.erb +4 -1
  68. metadata +20 -29
  69. data/.travis.yml +0 -11
  70. data/bin/sidekiqctl +0 -20
  71. data/lib/sidekiq/core_ext.rb +0 -1
  72. data/lib/sidekiq/ctl.rb +0 -221
  73. data/lib/sidekiq/logging.rb +0 -122
  74. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
@@ -1,24 +1,12 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "base64"
18
7
 
8
+ module Sidekiq
19
9
  class Stats
20
- include RedisScanner
21
-
22
10
  def initialize
23
11
  fetch_stats!
24
12
  end
@@ -64,61 +52,65 @@ module Sidekiq
64
52
  end
65
53
 
66
54
  def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
55
+ pipe1_res = Sidekiq.redis { |conn|
68
56
  conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
57
+ conn.get("stat:processed")
58
+ conn.get("stat:failed")
59
+ conn.zcard("schedule")
60
+ conn.zcard("retry")
61
+ conn.zcard("dead")
62
+ conn.scard("processes")
63
+ conn.lrange("queue:default", -1, -1)
76
64
  end
77
- end
65
+ }
78
66
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
81
- end
67
+ processes = Sidekiq.redis { |conn|
68
+ conn.sscan_each("processes").to_a
69
+ }
82
70
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
71
+ queues = Sidekiq.redis { |conn|
72
+ conn.sscan_each("queues").to_a
73
+ }
86
74
 
87
- pipe2_res = Sidekiq.redis do |conn|
75
+ pipe2_res = Sidekiq.redis { |conn|
88
76
  conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
77
+ processes.each { |key| conn.hget(key, "busy") }
78
+ queues.each { |queue| conn.llen("queue:#{queue}") }
91
79
  end
92
- end
80
+ }
93
81
 
94
82
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
83
+ workers_size = pipe2_res[0...s].sum(&:to_i)
84
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
85
 
98
86
  default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
87
+ job = begin
88
+ Sidekiq.load_json(entry)
89
+ rescue
90
+ {}
91
+ end
92
+ now = Time.now.to_f
93
+ thence = job["enqueued_at"] || now
94
+ now - thence
95
+ else
96
+ 0
97
+ end
106
98
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
99
+ processed: pipe1_res[0].to_i,
100
+ failed: pipe1_res[1].to_i,
101
+ scheduled_size: pipe1_res[2],
102
+ retry_size: pipe1_res[3],
103
+ dead_size: pipe1_res[4],
104
+ processes_size: pipe1_res[5],
113
105
 
114
106
  default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
107
+ workers_size: workers_size,
108
+ enqueued: enqueued,
117
109
  }
118
110
  end
119
111
 
120
112
  def reset(*stats)
121
- all = %w(failed processed)
113
+ all = %w[failed processed]
122
114
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
115
 
124
116
  mset_args = []
@@ -138,26 +130,18 @@ module Sidekiq
138
130
  end
139
131
 
140
132
  class Queues
141
- include RedisScanner
142
-
143
133
  def lengths
144
134
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
135
+ queues = conn.sscan_each("queues").to_a
146
136
 
147
- lengths = conn.pipelined do
137
+ lengths = conn.pipelined {
148
138
  queues.each do |queue|
149
139
  conn.llen("queue:#{queue}")
150
140
  end
151
- end
152
-
153
- i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
155
- memo[queue] = lengths[i]
156
- i += 1
157
- memo
158
- end.sort_by { |_, size| size }
141
+ }
159
142
 
160
- Hash[array_of_arrays.reverse]
143
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
+ Hash[array_of_arrays]
161
145
  end
162
146
  end
163
147
  end
@@ -179,18 +163,12 @@ module Sidekiq
179
163
  private
180
164
 
181
165
  def date_stat_hash(stat)
182
- i = 0
183
166
  stat_hash = {}
184
- keys = []
185
- dates = []
186
-
187
- while i < @days_previous
188
- date = @start_date - i
189
- datestr = date.strftime("%Y-%m-%d")
190
- keys << "stat:#{stat}:#{datestr}"
191
- dates << datestr
192
- i += 1
193
- end
167
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
168
+ date.strftime("%Y-%m-%d")
169
+ }
170
+
171
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
194
172
 
195
173
  begin
196
174
  Sidekiq.redis do |conn|
@@ -222,18 +200,17 @@ module Sidekiq
222
200
  #
223
201
  class Queue
224
202
  include Enumerable
225
- extend RedisScanner
226
203
 
227
204
  ##
228
205
  # Return all known queues within Redis.
229
206
  #
230
207
  def self.all
231
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
208
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
232
209
  end
233
210
 
234
211
  attr_reader :name
235
212
 
236
- def initialize(name="default")
213
+ def initialize(name = "default")
237
214
  @name = name.to_s
238
215
  @rname = "queue:#{name}"
239
216
  end
@@ -253,13 +230,13 @@ module Sidekiq
253
230
  #
254
231
  # @return Float
255
232
  def latency
256
- entry = Sidekiq.redis do |conn|
233
+ entry = Sidekiq.redis { |conn|
257
234
  conn.lrange(@rname, -1, -1)
258
- end.first
235
+ }.first
259
236
  return 0 unless entry
260
237
  job = Sidekiq.load_json(entry)
261
238
  now = Time.now.to_f
262
- thence = job['enqueued_at'] || now
239
+ thence = job["enqueued_at"] || now
263
240
  now - thence
264
241
  end
265
242
 
@@ -269,12 +246,12 @@ module Sidekiq
269
246
  page = 0
270
247
  page_size = 50
271
248
 
272
- while true do
249
+ loop do
273
250
  range_start = page * page_size - deleted_size
274
- range_end = range_start + page_size - 1
275
- entries = Sidekiq.redis do |conn|
251
+ range_end = range_start + page_size - 1
252
+ entries = Sidekiq.redis { |conn|
276
253
  conn.lrange @rname, range_start, range_end
277
- end
254
+ }
278
255
  break if entries.empty?
279
256
  page += 1
280
257
  entries.each do |entry|
@@ -315,11 +292,11 @@ module Sidekiq
315
292
  attr_reader :item
316
293
  attr_reader :value
317
294
 
318
- def initialize(item, queue_name=nil)
295
+ def initialize(item, queue_name = nil)
319
296
  @args = nil
320
297
  @value = item
321
298
  @item = item.is_a?(Hash) ? item : parse(item)
322
- @queue = queue_name || @item['queue']
299
+ @queue = queue_name || @item["queue"]
323
300
  end
324
301
 
325
302
  def parse(item)
@@ -334,7 +311,7 @@ module Sidekiq
334
311
  end
335
312
 
336
313
  def klass
337
- self['class']
314
+ self["class"]
338
315
  end
339
316
 
340
317
  def display_class
@@ -345,16 +322,16 @@ module Sidekiq
345
322
  "#{target}.#{method}"
346
323
  end
347
324
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
348
- job_class = @item['wrapped'] || args[0]
349
- if 'ActionMailer::DeliveryJob' == job_class
325
+ job_class = @item["wrapped"] || args[0]
326
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
350
327
  # MailerClass#mailer_method
351
- args[0]['arguments'][0..1].join('#')
328
+ args[0]["arguments"][0..1].join("#")
352
329
  else
353
- job_class
330
+ job_class
354
331
  end
355
332
  else
356
333
  klass
357
- end
334
+ end
358
335
  end
359
336
 
360
337
  def display_args
@@ -365,53 +342,68 @@ module Sidekiq
365
342
  arg
366
343
  end
367
344
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
368
- job_args = self['wrapped'] ? args[0]["arguments"] : []
369
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
345
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
346
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
370
347
  # remove MailerClass, mailer_method and 'deliver_now'
371
348
  job_args.drop(3)
349
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
+ # remove MailerClass, mailer_method and 'deliver_now'
351
+ job_args.drop(3).first["args"]
372
352
  else
373
353
  job_args
374
354
  end
375
355
  else
376
- if self['encrypt']
356
+ if self["encrypt"]
377
357
  # no point in showing 150+ bytes of random garbage
378
- args[-1] = '[encrypted data]'
358
+ args[-1] = "[encrypted data]"
379
359
  end
380
360
  args
381
- end
361
+ end
382
362
  end
383
363
 
384
364
  def args
385
- @args || @item['args']
365
+ @args || @item["args"]
386
366
  end
387
367
 
388
368
  def jid
389
- self['jid']
369
+ self["jid"]
390
370
  end
391
371
 
392
372
  def enqueued_at
393
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
373
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
394
374
  end
395
375
 
396
376
  def created_at
397
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
377
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
398
378
  end
399
379
 
400
- def queue
401
- @queue
380
+ def tags
381
+ self["tags"] || []
402
382
  end
403
383
 
384
+ def error_backtrace
385
+ # Cache nil values
386
+ if defined?(@error_backtrace)
387
+ @error_backtrace
388
+ else
389
+ value = self["error_backtrace"]
390
+ @error_backtrace = value && uncompress_backtrace(value)
391
+ end
392
+ end
393
+
394
+ attr_reader :queue
395
+
404
396
  def latency
405
397
  now = Time.now.to_f
406
- now - (@item['enqueued_at'] || @item['created_at'] || now)
398
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
407
399
  end
408
400
 
409
401
  ##
410
402
  # Remove this job from the queue.
411
403
  def delete
412
- count = Sidekiq.redis do |conn|
404
+ count = Sidekiq.redis { |conn|
413
405
  conn.lrem("queue:#{@queue}", 1, @value)
414
- end
406
+ }
415
407
  count != 0
416
408
  end
417
409
 
@@ -425,13 +417,28 @@ module Sidekiq
425
417
  private
426
418
 
427
419
  def safe_load(content, default)
428
- begin
429
- yield(*YAML.load(content))
430
- rescue => ex
431
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
- # memory yet so the YAML can't be loaded.
433
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
434
- default
420
+ yield(*YAML.load(content))
421
+ rescue => ex
422
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
423
+ # memory yet so the YAML can't be loaded.
424
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
425
+ default
426
+ end
427
+
428
+ def uncompress_backtrace(backtrace)
429
+ if backtrace.is_a?(Array)
430
+ # Handle old jobs with raw Array backtrace format
431
+ backtrace
432
+ else
433
+ decoded = Base64.decode64(backtrace)
434
+ uncompressed = Zlib::Inflate.inflate(decoded)
435
+ begin
436
+ Sidekiq.load_json(uncompressed)
437
+ rescue
438
+ # Handle old jobs with marshalled backtrace format
439
+ # TODO Remove in 7.x
440
+ Marshal.load(uncompressed)
441
+ end
435
442
  end
436
443
  end
437
444
  end
@@ -459,8 +466,9 @@ module Sidekiq
459
466
  end
460
467
 
461
468
  def reschedule(at)
462
- delete
463
- @parent.schedule(at, item)
469
+ Sidekiq.redis do |conn|
470
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
+ end
464
472
  end
465
473
 
466
474
  def add_to_queue
@@ -473,7 +481,7 @@ module Sidekiq
473
481
  def retry
474
482
  remove_job do |message|
475
483
  msg = Sidekiq.load_json(message)
476
- msg['retry_count'] -= 1 if msg['retry_count']
484
+ msg["retry_count"] -= 1 if msg["retry_count"]
477
485
  Sidekiq::Client.push(msg)
478
486
  end
479
487
  end
@@ -487,45 +495,44 @@ module Sidekiq
487
495
  end
488
496
 
489
497
  def error?
490
- !!item['error_class']
498
+ !!item["error_class"]
491
499
  end
492
500
 
493
501
  private
494
502
 
495
503
  def remove_job
496
504
  Sidekiq.redis do |conn|
497
- results = conn.multi do
505
+ results = conn.multi {
498
506
  conn.zrangebyscore(parent.name, score, score)
499
507
  conn.zremrangebyscore(parent.name, score, score)
500
- end.first
508
+ }.first
501
509
 
502
510
  if results.size == 1
503
511
  yield results.first
504
512
  else
505
513
  # multiple jobs with the same score
506
514
  # find the one with the right JID and push it
507
- hash = results.group_by do |message|
515
+ matched, nonmatched = results.partition { |message|
508
516
  if message.index(jid)
509
517
  msg = Sidekiq.load_json(message)
510
- msg['jid'] == jid
518
+ msg["jid"] == jid
511
519
  else
512
520
  false
513
521
  end
514
- end
522
+ }
515
523
 
516
- msg = hash.fetch(true, []).first
524
+ msg = matched.first
517
525
  yield msg if msg
518
526
 
519
527
  # push the rest back onto the sorted set
520
528
  conn.multi do
521
- hash.fetch(false, []).each do |message|
529
+ nonmatched.each do |message|
522
530
  conn.zadd(parent.name, score.to_f.to_s, message)
523
531
  end
524
532
  end
525
533
  end
526
534
  end
527
535
  end
528
-
529
536
  end
530
537
 
531
538
  class SortedSet
@@ -542,6 +549,17 @@ module Sidekiq
542
549
  Sidekiq.redis { |c| c.zcard(name) }
543
550
  end
544
551
 
552
+ def scan(match, count = 100)
553
+ return to_enum(:scan, match, count) unless block_given?
554
+
555
+ match = "*#{match}*" unless match.include?("*")
556
+ Sidekiq.redis do |conn|
557
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
558
+ yield SortedEntry.new(self, score, entry)
559
+ end
560
+ end
561
+ end
562
+
545
563
  def clear
546
564
  Sidekiq.redis do |conn|
547
565
  conn.del(name)
@@ -551,7 +569,6 @@ module Sidekiq
551
569
  end
552
570
 
553
571
  class JobSet < SortedSet
554
-
555
572
  def schedule(timestamp, message)
556
573
  Sidekiq.redis do |conn|
557
574
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -564,44 +581,55 @@ module Sidekiq
564
581
  page = -1
565
582
  page_size = 50
566
583
 
567
- while true do
584
+ loop do
568
585
  range_start = page * page_size + offset_size
569
- range_end = range_start + page_size - 1
570
- elements = Sidekiq.redis do |conn|
586
+ range_end = range_start + page_size - 1
587
+ elements = Sidekiq.redis { |conn|
571
588
  conn.zrange name, range_start, range_end, with_scores: true
572
- end
589
+ }
573
590
  break if elements.empty?
574
591
  page -= 1
575
- elements.reverse.each do |element, score|
592
+ elements.reverse_each do |element, score|
576
593
  yield SortedEntry.new(self, score, element)
577
594
  end
578
595
  offset_size = initial_size - @_size
579
596
  end
580
597
  end
581
598
 
599
+ ##
600
+ # Fetch jobs that match a given time or Range. Job ID is an
601
+ # optional second argument.
582
602
  def fetch(score, jid = nil)
583
- elements = Sidekiq.redis do |conn|
584
- conn.zrangebyscore(name, score, score)
585
- end
586
-
587
- elements.inject([]) do |result, element|
588
- entry = SortedEntry.new(self, score, element)
589
- if jid
590
- result << entry if entry.jid == jid
603
+ begin_score, end_score =
604
+ if score.is_a?(Range)
605
+ [score.first, score.last]
591
606
  else
592
- result << entry
607
+ [score, score]
593
608
  end
594
- result
609
+
610
+ elements = Sidekiq.redis { |conn|
611
+ conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
612
+ }
613
+
614
+ elements.each_with_object([]) do |element, result|
615
+ data, job_score = element
616
+ entry = SortedEntry.new(self, job_score, data)
617
+ result << entry if jid.nil? || entry.jid == jid
595
618
  end
596
619
  end
597
620
 
598
621
  ##
599
622
  # Find the job with the given JID within this sorted set.
600
- #
601
- # This is a slow, inefficient operation. Do not use under
602
- # normal conditions. Sidekiq Pro contains a faster version.
623
+ # This is a slower O(n) operation. Do not use for app logic.
603
624
  def find_job(jid)
604
- self.detect { |j| j.jid == jid }
625
+ Sidekiq.redis do |conn|
626
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
627
+ job = JSON.parse(entry)
628
+ matched = job["jid"] == jid
629
+ return SortedEntry.new(self, score, entry) if matched
630
+ end
631
+ end
632
+ nil
605
633
  end
606
634
 
607
635
  def delete_by_value(name, value)
@@ -616,13 +644,14 @@ module Sidekiq
616
644
  Sidekiq.redis do |conn|
617
645
  elements = conn.zrangebyscore(name, score, score)
618
646
  elements.each do |element|
619
- message = Sidekiq.load_json(element)
620
- if message["jid"] == jid
621
- ret = conn.zrem(name, element)
622
- @_size -= 1 if ret
623
- break ret
647
+ if element.index(jid)
648
+ message = Sidekiq.load_json(element)
649
+ if message["jid"] == jid
650
+ ret = conn.zrem(name, element)
651
+ @_size -= 1 if ret
652
+ break ret
653
+ end
624
654
  end
625
- false
626
655
  end
627
656
  end
628
657
  end
@@ -644,7 +673,7 @@ module Sidekiq
644
673
  # end.map(&:delete)
645
674
  class ScheduledSet < JobSet
646
675
  def initialize
647
- super 'schedule'
676
+ super "schedule"
648
677
  end
649
678
  end
650
679
 
@@ -662,19 +691,15 @@ module Sidekiq
662
691
  # end.map(&:delete)
663
692
  class RetrySet < JobSet
664
693
  def initialize
665
- super 'retry'
694
+ super "retry"
666
695
  end
667
696
 
668
697
  def retry_all
669
- while size > 0
670
- each(&:retry)
671
- end
698
+ each(&:retry) while size > 0
672
699
  end
673
700
 
674
701
  def kill_all
675
- while size > 0
676
- each(&:kill)
677
- end
702
+ each(&:kill) while size > 0
678
703
  end
679
704
  end
680
705
 
@@ -683,15 +708,15 @@ module Sidekiq
683
708
  #
684
709
  class DeadSet < JobSet
685
710
  def initialize
686
- super 'dead'
711
+ super "dead"
687
712
  end
688
713
 
689
- def kill(message, opts={})
714
+ def kill(message, opts = {})
690
715
  now = Time.now.to_f
691
716
  Sidekiq.redis do |conn|
692
717
  conn.multi do
693
718
  conn.zadd(name, now.to_s, message)
694
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
719
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
695
720
  conn.zremrangebyrank(name, 0, - self.class.max_jobs)
696
721
  end
697
722
  end
@@ -708,9 +733,7 @@ module Sidekiq
708
733
  end
709
734
 
710
735
  def retry_all
711
- while size > 0
712
- each(&:retry)
713
- end
736
+ each(&:retry) while size > 0
714
737
  end
715
738
 
716
739
  def self.max_jobs
@@ -724,16 +747,15 @@ module Sidekiq
724
747
 
725
748
  ##
726
749
  # Enumerates the set of Sidekiq processes which are actively working
727
- # right now. Each process send a heartbeat to Redis every 5 seconds
750
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
728
751
  # so this set should be relatively accurate, barring network partitions.
729
752
  #
730
753
  # Yields a Sidekiq::Process.
731
754
  #
732
755
  class ProcessSet
733
756
  include Enumerable
734
- include RedisScanner
735
757
 
736
- def initialize(clean_plz=true)
758
+ def initialize(clean_plz = true)
737
759
  cleanup if clean_plz
738
760
  end
739
761
 
@@ -742,50 +764,47 @@ module Sidekiq
742
764
  def cleanup
743
765
  count = 0
744
766
  Sidekiq.redis do |conn|
745
- procs = sscan(conn, 'processes').sort
746
- heartbeats = conn.pipelined do
767
+ procs = conn.sscan_each("processes").to_a.sort
768
+ heartbeats = conn.pipelined {
747
769
  procs.each do |key|
748
- conn.hget(key, 'info')
770
+ conn.hget(key, "info")
749
771
  end
750
- end
772
+ }
751
773
 
752
774
  # the hash named key has an expiry of 60 seconds.
753
775
  # if it's not found, that means the process has not reported
754
776
  # in to Redis and probably died.
755
- to_prune = []
756
- heartbeats.each_with_index do |beat, i|
757
- to_prune << procs[i] if beat.nil?
758
- end
759
- count = conn.srem('processes', to_prune) unless to_prune.empty?
777
+ to_prune = procs.select.with_index { |proc, i|
778
+ heartbeats[i].nil?
779
+ }
780
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
760
781
  end
761
782
  count
762
783
  end
763
784
 
764
785
  def each
765
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
786
+ result = Sidekiq.redis { |conn|
787
+ procs = conn.sscan_each("processes").to_a.sort
766
788
 
767
- Sidekiq.redis do |conn|
768
789
  # We're making a tradeoff here between consuming more memory instead of
769
790
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
770
791
  # you'll be happier this way
771
- result = conn.pipelined do
792
+ conn.pipelined do
772
793
  procs.each do |key|
773
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
794
+ conn.hmget(key, "info", "busy", "beat", "quiet")
774
795
  end
775
796
  end
797
+ }
776
798
 
777
- result.each do |info, busy, at_s, quiet|
778
- # If a process is stopped between when we query Redis for `procs` and
779
- # when we query for `result`, we will have an item in `result` that is
780
- # composed of `nil` values.
781
- next if info.nil?
799
+ result.each do |info, busy, at_s, quiet|
800
+ # If a process is stopped between when we query Redis for `procs` and
801
+ # when we query for `result`, we will have an item in `result` that is
802
+ # composed of `nil` values.
803
+ next if info.nil?
782
804
 
783
- hash = Sidekiq.load_json(info)
784
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
785
- end
805
+ hash = Sidekiq.load_json(info)
806
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
786
807
  end
787
-
788
- nil
789
808
  end
790
809
 
791
810
  # This method is not guaranteed accurate since it does not prune the set
@@ -793,7 +812,7 @@ module Sidekiq
793
812
  # contains Sidekiq processes which have sent a heartbeat within the last
794
813
  # 60 seconds.
795
814
  def size
796
- Sidekiq.redis { |conn| conn.scard('processes') }
815
+ Sidekiq.redis { |conn| conn.scard("processes") }
797
816
  end
798
817
 
799
818
  # Returns the identity of the current cluster leader or "" if no leader.
@@ -801,9 +820,9 @@ module Sidekiq
801
820
  # or Sidekiq Pro.
802
821
  def leader
803
822
  @leader ||= begin
804
- x = Sidekiq.redis {|c| c.get("dear-leader") }
823
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
805
824
  # need a non-falsy value so we can memoize
806
- x = "" unless x
825
+ x ||= ""
807
826
  x
808
827
  end
809
828
  end
@@ -830,11 +849,11 @@ module Sidekiq
830
849
  end
831
850
 
832
851
  def tag
833
- self['tag']
852
+ self["tag"]
834
853
  end
835
854
 
836
855
  def labels
837
- Array(self['labels'])
856
+ Array(self["labels"])
838
857
  end
839
858
 
840
859
  def [](key)
@@ -842,23 +861,23 @@ module Sidekiq
842
861
  end
843
862
 
844
863
  def identity
845
- self['identity']
864
+ self["identity"]
846
865
  end
847
866
 
848
867
  def quiet!
849
- signal('TSTP')
868
+ signal("TSTP")
850
869
  end
851
870
 
852
871
  def stop!
853
- signal('TERM')
872
+ signal("TERM")
854
873
  end
855
874
 
856
875
  def dump_threads
857
- signal('TTIN')
876
+ signal("TTIN")
858
877
  end
859
878
 
860
879
  def stopping?
861
- self['quiet'] == 'true'
880
+ self["quiet"] == "true"
862
881
  end
863
882
 
864
883
  private
@@ -872,7 +891,6 @@ module Sidekiq
872
891
  end
873
892
  end
874
893
  end
875
-
876
894
  end
877
895
 
878
896
  ##
@@ -897,16 +915,15 @@ module Sidekiq
897
915
  #
898
916
  class Workers
899
917
  include Enumerable
900
- include RedisScanner
901
918
 
902
919
  def each
903
920
  Sidekiq.redis do |conn|
904
- procs = sscan(conn, 'processes')
921
+ procs = conn.sscan_each("processes").to_a
905
922
  procs.sort.each do |key|
906
- valid, workers = conn.pipelined do
923
+ valid, workers = conn.pipelined {
907
924
  conn.exists(key)
908
925
  conn.hgetall("#{key}:workers")
909
- end
926
+ }
910
927
  next unless valid
911
928
  workers.each_pair do |tid, json|
912
929
  yield key, tid, Sidekiq.load_json(json)
@@ -923,18 +940,17 @@ module Sidekiq
923
940
  # which can easily get out of sync with crashy processes.
924
941
  def size
925
942
  Sidekiq.redis do |conn|
926
- procs = sscan(conn, 'processes')
943
+ procs = conn.sscan_each("processes").to_a
927
944
  if procs.empty?
928
945
  0
929
946
  else
930
- conn.pipelined do
947
+ conn.pipelined {
931
948
  procs.each do |key|
932
- conn.hget(key, 'busy')
949
+ conn.hget(key, "busy")
933
950
  end
934
- end.map(&:to_i).inject(:+)
951
+ }.sum(&:to_i)
935
952
  end
936
953
  end
937
954
  end
938
955
  end
939
-
940
956
  end