sidekiq 5.2.7 → 6.0.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (61) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +0 -2
  3. data/.standard.yml +20 -0
  4. data/6.0-Upgrade.md +70 -0
  5. data/Changes.md +34 -0
  6. data/Ent-2.0-Upgrade.md +37 -0
  7. data/Ent-Changes.md +12 -0
  8. data/Gemfile +12 -11
  9. data/Gemfile.lock +196 -0
  10. data/Pro-5.0-Upgrade.md +25 -0
  11. data/Pro-Changes.md +12 -3
  12. data/README.md +16 -30
  13. data/Rakefile +5 -4
  14. data/bin/sidekiqload +26 -22
  15. data/bin/sidekiqmon +9 -0
  16. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  17. data/lib/generators/sidekiq/worker_generator.rb +12 -14
  18. data/lib/sidekiq.rb +53 -42
  19. data/lib/sidekiq/api.rb +138 -151
  20. data/lib/sidekiq/cli.rb +97 -162
  21. data/lib/sidekiq/client.rb +45 -46
  22. data/lib/sidekiq/delay.rb +5 -6
  23. data/lib/sidekiq/exception_handler.rb +10 -12
  24. data/lib/sidekiq/extensions/action_mailer.rb +10 -20
  25. data/lib/sidekiq/extensions/active_record.rb +9 -7
  26. data/lib/sidekiq/extensions/class_methods.rb +9 -7
  27. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  28. data/lib/sidekiq/fetch.rb +5 -6
  29. data/lib/sidekiq/job_logger.rb +37 -7
  30. data/lib/sidekiq/job_retry.rb +45 -58
  31. data/lib/sidekiq/launcher.rb +59 -51
  32. data/lib/sidekiq/logger.rb +69 -0
  33. data/lib/sidekiq/manager.rb +7 -9
  34. data/lib/sidekiq/middleware/chain.rb +3 -2
  35. data/lib/sidekiq/middleware/i18n.rb +5 -7
  36. data/lib/sidekiq/monitor.rb +148 -0
  37. data/lib/sidekiq/paginator.rb +11 -12
  38. data/lib/sidekiq/processor.rb +52 -49
  39. data/lib/sidekiq/rails.rb +23 -29
  40. data/lib/sidekiq/redis_connection.rb +31 -37
  41. data/lib/sidekiq/scheduled.rb +17 -19
  42. data/lib/sidekiq/testing.rb +22 -23
  43. data/lib/sidekiq/testing/inline.rb +2 -1
  44. data/lib/sidekiq/util.rb +17 -14
  45. data/lib/sidekiq/version.rb +2 -1
  46. data/lib/sidekiq/web.rb +41 -49
  47. data/lib/sidekiq/web/action.rb +14 -10
  48. data/lib/sidekiq/web/application.rb +60 -57
  49. data/lib/sidekiq/web/helpers.rb +66 -67
  50. data/lib/sidekiq/web/router.rb +17 -14
  51. data/lib/sidekiq/worker.rb +124 -97
  52. data/sidekiq.gemspec +16 -16
  53. data/web/assets/javascripts/dashboard.js +2 -21
  54. data/web/locales/ja.yml +2 -1
  55. metadata +19 -29
  56. data/.travis.yml +0 -11
  57. data/bin/sidekiqctl +0 -20
  58. data/lib/sidekiq/core_ext.rb +0 -1
  59. data/lib/sidekiq/ctl.rb +0 -221
  60. data/lib/sidekiq/logging.rb +0 -122
  61. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
5
+ module Sidekiq
6
6
  module RedisScanner
7
7
  def sscan(conn, key)
8
- cursor = '0'
8
+ cursor = "0"
9
9
  result = []
10
10
  loop do
11
11
  cursor, values = conn.sscan(key, cursor)
12
12
  result.push(*values)
13
- break if cursor == '0'
13
+ break if cursor == "0"
14
14
  end
15
15
  result
16
16
  end
@@ -64,61 +64,65 @@ module Sidekiq
64
64
  end
65
65
 
66
66
  def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
67
+ pipe1_res = Sidekiq.redis { |conn|
68
68
  conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
69
+ conn.get("stat:processed")
70
+ conn.get("stat:failed")
71
+ conn.zcard("schedule")
72
+ conn.zcard("retry")
73
+ conn.zcard("dead")
74
+ conn.scard("processes")
75
+ conn.lrange("queue:default", -1, -1)
76
76
  end
77
- end
77
+ }
78
78
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
81
- end
79
+ processes = Sidekiq.redis { |conn|
80
+ sscan(conn, "processes")
81
+ }
82
82
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
83
+ queues = Sidekiq.redis { |conn|
84
+ sscan(conn, "queues")
85
+ }
86
86
 
87
- pipe2_res = Sidekiq.redis do |conn|
87
+ pipe2_res = Sidekiq.redis { |conn|
88
88
  conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
89
+ processes.each { |key| conn.hget(key, "busy") }
90
+ queues.each { |queue| conn.llen("queue:#{queue}") }
91
91
  end
92
- end
92
+ }
93
93
 
94
94
  s = processes.size
95
95
  workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
96
+ enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
97
97
 
98
98
  default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
99
+ job = begin
100
+ Sidekiq.load_json(entry)
101
+ rescue
102
+ {}
103
+ end
104
+ now = Time.now.to_f
105
+ thence = job["enqueued_at"] || now
106
+ now - thence
107
+ else
108
+ 0
109
+ end
106
110
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
111
+ processed: pipe1_res[0].to_i,
112
+ failed: pipe1_res[1].to_i,
113
+ scheduled_size: pipe1_res[2],
114
+ retry_size: pipe1_res[3],
115
+ dead_size: pipe1_res[4],
116
+ processes_size: pipe1_res[5],
113
117
 
114
118
  default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
119
+ workers_size: workers_size,
120
+ enqueued: enqueued,
117
121
  }
118
122
  end
119
123
 
120
124
  def reset(*stats)
121
- all = %w(failed processed)
125
+ all = %w[failed processed]
122
126
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
127
 
124
128
  mset_args = []
@@ -142,20 +146,19 @@ module Sidekiq
142
146
 
143
147
  def lengths
144
148
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
149
+ queues = sscan(conn, "queues")
146
150
 
147
- lengths = conn.pipelined do
151
+ lengths = conn.pipelined {
148
152
  queues.each do |queue|
149
153
  conn.llen("queue:#{queue}")
150
154
  end
151
- end
155
+ }
152
156
 
153
157
  i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
158
+ array_of_arrays = queues.each_with_object({}) { |queue, memo|
155
159
  memo[queue] = lengths[i]
156
160
  i += 1
157
- memo
158
- end.sort_by { |_, size| size }
161
+ }.sort_by { |_, size| size }
159
162
 
160
163
  Hash[array_of_arrays.reverse]
161
164
  end
@@ -228,12 +231,12 @@ module Sidekiq
228
231
  # Return all known queues within Redis.
229
232
  #
230
233
  def self.all
231
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
234
+ Sidekiq.redis { |c| sscan(c, "queues") }.sort.map { |q| Sidekiq::Queue.new(q) }
232
235
  end
233
236
 
234
237
  attr_reader :name
235
238
 
236
- def initialize(name="default")
239
+ def initialize(name = "default")
237
240
  @name = name.to_s
238
241
  @rname = "queue:#{name}"
239
242
  end
@@ -253,13 +256,13 @@ module Sidekiq
253
256
  #
254
257
  # @return Float
255
258
  def latency
256
- entry = Sidekiq.redis do |conn|
259
+ entry = Sidekiq.redis { |conn|
257
260
  conn.lrange(@rname, -1, -1)
258
- end.first
261
+ }.first
259
262
  return 0 unless entry
260
263
  job = Sidekiq.load_json(entry)
261
264
  now = Time.now.to_f
262
- thence = job['enqueued_at'] || now
265
+ thence = job["enqueued_at"] || now
263
266
  now - thence
264
267
  end
265
268
 
@@ -269,12 +272,12 @@ module Sidekiq
269
272
  page = 0
270
273
  page_size = 50
271
274
 
272
- while true do
275
+ loop do
273
276
  range_start = page * page_size - deleted_size
274
- range_end = range_start + page_size - 1
275
- entries = Sidekiq.redis do |conn|
277
+ range_end = range_start + page_size - 1
278
+ entries = Sidekiq.redis { |conn|
276
279
  conn.lrange @rname, range_start, range_end
277
- end
280
+ }
278
281
  break if entries.empty?
279
282
  page += 1
280
283
  entries.each do |entry|
@@ -315,11 +318,11 @@ module Sidekiq
315
318
  attr_reader :item
316
319
  attr_reader :value
317
320
 
318
- def initialize(item, queue_name=nil)
321
+ def initialize(item, queue_name = nil)
319
322
  @args = nil
320
323
  @value = item
321
324
  @item = item.is_a?(Hash) ? item : parse(item)
322
- @queue = queue_name || @item['queue']
325
+ @queue = queue_name || @item["queue"]
323
326
  end
324
327
 
325
328
  def parse(item)
@@ -334,7 +337,7 @@ module Sidekiq
334
337
  end
335
338
 
336
339
  def klass
337
- self['class']
340
+ self["class"]
338
341
  end
339
342
 
340
343
  def display_class
@@ -345,16 +348,16 @@ module Sidekiq
345
348
  "#{target}.#{method}"
346
349
  end
347
350
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
348
- job_class = @item['wrapped'] || args[0]
349
- if 'ActionMailer::DeliveryJob' == job_class
351
+ job_class = @item["wrapped"] || args[0]
352
+ if job_class == "ActionMailer::DeliveryJob"
350
353
  # MailerClass#mailer_method
351
- args[0]['arguments'][0..1].join('#')
354
+ args[0]["arguments"][0..1].join("#")
352
355
  else
353
- job_class
356
+ job_class
354
357
  end
355
358
  else
356
359
  klass
357
- end
360
+ end
358
361
  end
359
362
 
360
363
  def display_args
@@ -365,53 +368,51 @@ module Sidekiq
365
368
  arg
366
369
  end
367
370
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
368
- job_args = self['wrapped'] ? args[0]["arguments"] : []
369
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
371
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
372
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
370
373
  # remove MailerClass, mailer_method and 'deliver_now'
371
374
  job_args.drop(3)
372
375
  else
373
376
  job_args
374
377
  end
375
378
  else
376
- if self['encrypt']
379
+ if self["encrypt"]
377
380
  # no point in showing 150+ bytes of random garbage
378
- args[-1] = '[encrypted data]'
381
+ args[-1] = "[encrypted data]"
379
382
  end
380
383
  args
381
- end
384
+ end
382
385
  end
383
386
 
384
387
  def args
385
- @args || @item['args']
388
+ @args || @item["args"]
386
389
  end
387
390
 
388
391
  def jid
389
- self['jid']
392
+ self["jid"]
390
393
  end
391
394
 
392
395
  def enqueued_at
393
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
396
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
394
397
  end
395
398
 
396
399
  def created_at
397
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
400
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
398
401
  end
399
402
 
400
- def queue
401
- @queue
402
- end
403
+ attr_reader :queue
403
404
 
404
405
  def latency
405
406
  now = Time.now.to_f
406
- now - (@item['enqueued_at'] || @item['created_at'] || now)
407
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
407
408
  end
408
409
 
409
410
  ##
410
411
  # Remove this job from the queue.
411
412
  def delete
412
- count = Sidekiq.redis do |conn|
413
+ count = Sidekiq.redis { |conn|
413
414
  conn.lrem("queue:#{@queue}", 1, @value)
414
- end
415
+ }
415
416
  count != 0
416
417
  end
417
418
 
@@ -425,14 +426,12 @@ module Sidekiq
425
426
  private
426
427
 
427
428
  def safe_load(content, default)
428
- begin
429
- yield(*YAML.load(content))
430
- rescue => ex
431
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
- # memory yet so the YAML can't be loaded.
433
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
434
- default
435
- end
429
+ yield(*YAML.load(content))
430
+ rescue => ex
431
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
+ # memory yet so the YAML can't be loaded.
433
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
434
+ default
436
435
  end
437
436
  end
438
437
 
@@ -473,7 +472,7 @@ module Sidekiq
473
472
  def retry
474
473
  remove_job do |message|
475
474
  msg = Sidekiq.load_json(message)
476
- msg['retry_count'] -= 1 if msg['retry_count']
475
+ msg["retry_count"] -= 1 if msg["retry_count"]
477
476
  Sidekiq::Client.push(msg)
478
477
  end
479
478
  end
@@ -487,31 +486,31 @@ module Sidekiq
487
486
  end
488
487
 
489
488
  def error?
490
- !!item['error_class']
489
+ !!item["error_class"]
491
490
  end
492
491
 
493
492
  private
494
493
 
495
494
  def remove_job
496
495
  Sidekiq.redis do |conn|
497
- results = conn.multi do
496
+ results = conn.multi {
498
497
  conn.zrangebyscore(parent.name, score, score)
499
498
  conn.zremrangebyscore(parent.name, score, score)
500
- end.first
499
+ }.first
501
500
 
502
501
  if results.size == 1
503
502
  yield results.first
504
503
  else
505
504
  # multiple jobs with the same score
506
505
  # find the one with the right JID and push it
507
- hash = results.group_by do |message|
506
+ hash = results.group_by { |message|
508
507
  if message.index(jid)
509
508
  msg = Sidekiq.load_json(message)
510
- msg['jid'] == jid
509
+ msg["jid"] == jid
511
510
  else
512
511
  false
513
512
  end
514
- end
513
+ }
515
514
 
516
515
  msg = hash.fetch(true, []).first
517
516
  yield msg if msg
@@ -525,7 +524,6 @@ module Sidekiq
525
524
  end
526
525
  end
527
526
  end
528
-
529
527
  end
530
528
 
531
529
  class SortedSet
@@ -551,7 +549,6 @@ module Sidekiq
551
549
  end
552
550
 
553
551
  class JobSet < SortedSet
554
-
555
552
  def schedule(timestamp, message)
556
553
  Sidekiq.redis do |conn|
557
554
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -564,15 +561,15 @@ module Sidekiq
564
561
  page = -1
565
562
  page_size = 50
566
563
 
567
- while true do
564
+ loop do
568
565
  range_start = page * page_size + offset_size
569
- range_end = range_start + page_size - 1
570
- elements = Sidekiq.redis do |conn|
566
+ range_end = range_start + page_size - 1
567
+ elements = Sidekiq.redis { |conn|
571
568
  conn.zrange name, range_start, range_end, with_scores: true
572
- end
569
+ }
573
570
  break if elements.empty?
574
571
  page -= 1
575
- elements.reverse.each do |element, score|
572
+ elements.reverse_each do |element, score|
576
573
  yield SortedEntry.new(self, score, element)
577
574
  end
578
575
  offset_size = initial_size - @_size
@@ -580,18 +577,17 @@ module Sidekiq
580
577
  end
581
578
 
582
579
  def fetch(score, jid = nil)
583
- elements = Sidekiq.redis do |conn|
580
+ elements = Sidekiq.redis { |conn|
584
581
  conn.zrangebyscore(name, score, score)
585
- end
582
+ }
586
583
 
587
- elements.inject([]) do |result, element|
584
+ elements.each_with_object([]) do |element, result|
588
585
  entry = SortedEntry.new(self, score, element)
589
586
  if jid
590
587
  result << entry if entry.jid == jid
591
588
  else
592
589
  result << entry
593
590
  end
594
- result
595
591
  end
596
592
  end
597
593
 
@@ -601,7 +597,7 @@ module Sidekiq
601
597
  # This is a slow, inefficient operation. Do not use under
602
598
  # normal conditions. Sidekiq Pro contains a faster version.
603
599
  def find_job(jid)
604
- self.detect { |j| j.jid == jid }
600
+ detect { |j| j.jid == jid }
605
601
  end
606
602
 
607
603
  def delete_by_value(name, value)
@@ -622,7 +618,6 @@ module Sidekiq
622
618
  @_size -= 1 if ret
623
619
  break ret
624
620
  end
625
- false
626
621
  end
627
622
  end
628
623
  end
@@ -644,7 +639,7 @@ module Sidekiq
644
639
  # end.map(&:delete)
645
640
  class ScheduledSet < JobSet
646
641
  def initialize
647
- super 'schedule'
642
+ super "schedule"
648
643
  end
649
644
  end
650
645
 
@@ -662,19 +657,15 @@ module Sidekiq
662
657
  # end.map(&:delete)
663
658
  class RetrySet < JobSet
664
659
  def initialize
665
- super 'retry'
660
+ super "retry"
666
661
  end
667
662
 
668
663
  def retry_all
669
- while size > 0
670
- each(&:retry)
671
- end
664
+ each(&:retry) while size > 0
672
665
  end
673
666
 
674
667
  def kill_all
675
- while size > 0
676
- each(&:kill)
677
- end
668
+ each(&:kill) while size > 0
678
669
  end
679
670
  end
680
671
 
@@ -683,15 +674,15 @@ module Sidekiq
683
674
  #
684
675
  class DeadSet < JobSet
685
676
  def initialize
686
- super 'dead'
677
+ super "dead"
687
678
  end
688
679
 
689
- def kill(message, opts={})
680
+ def kill(message, opts = {})
690
681
  now = Time.now.to_f
691
682
  Sidekiq.redis do |conn|
692
683
  conn.multi do
693
684
  conn.zadd(name, now.to_s, message)
694
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
685
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
695
686
  conn.zremrangebyrank(name, 0, - self.class.max_jobs)
696
687
  end
697
688
  end
@@ -708,9 +699,7 @@ module Sidekiq
708
699
  end
709
700
 
710
701
  def retry_all
711
- while size > 0
712
- each(&:retry)
713
- end
702
+ each(&:retry) while size > 0
714
703
  end
715
704
 
716
705
  def self.max_jobs
@@ -724,7 +713,7 @@ module Sidekiq
724
713
 
725
714
  ##
726
715
  # Enumerates the set of Sidekiq processes which are actively working
727
- # right now. Each process send a heartbeat to Redis every 5 seconds
716
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
728
717
  # so this set should be relatively accurate, barring network partitions.
729
718
  #
730
719
  # Yields a Sidekiq::Process.
@@ -733,7 +722,7 @@ module Sidekiq
733
722
  include Enumerable
734
723
  include RedisScanner
735
724
 
736
- def initialize(clean_plz=true)
725
+ def initialize(clean_plz = true)
737
726
  cleanup if clean_plz
738
727
  end
739
728
 
@@ -742,12 +731,12 @@ module Sidekiq
742
731
  def cleanup
743
732
  count = 0
744
733
  Sidekiq.redis do |conn|
745
- procs = sscan(conn, 'processes').sort
746
- heartbeats = conn.pipelined do
734
+ procs = sscan(conn, "processes").sort
735
+ heartbeats = conn.pipelined {
747
736
  procs.each do |key|
748
- conn.hget(key, 'info')
737
+ conn.hget(key, "info")
749
738
  end
750
- end
739
+ }
751
740
 
752
741
  # the hash named key has an expiry of 60 seconds.
753
742
  # if it's not found, that means the process has not reported
@@ -756,23 +745,23 @@ module Sidekiq
756
745
  heartbeats.each_with_index do |beat, i|
757
746
  to_prune << procs[i] if beat.nil?
758
747
  end
759
- count = conn.srem('processes', to_prune) unless to_prune.empty?
748
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
760
749
  end
761
750
  count
762
751
  end
763
752
 
764
753
  def each
765
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
754
+ procs = Sidekiq.redis { |conn| sscan(conn, "processes") }.sort
766
755
 
767
756
  Sidekiq.redis do |conn|
768
757
  # We're making a tradeoff here between consuming more memory instead of
769
758
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
770
759
  # you'll be happier this way
771
- result = conn.pipelined do
760
+ result = conn.pipelined {
772
761
  procs.each do |key|
773
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
762
+ conn.hmget(key, "info", "busy", "beat", "quiet")
774
763
  end
775
- end
764
+ }
776
765
 
777
766
  result.each do |info, busy, at_s, quiet|
778
767
  # If a process is stopped between when we query Redis for `procs` and
@@ -781,7 +770,7 @@ module Sidekiq
781
770
  next if info.nil?
782
771
 
783
772
  hash = Sidekiq.load_json(info)
784
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
773
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
785
774
  end
786
775
  end
787
776
 
@@ -793,7 +782,7 @@ module Sidekiq
793
782
  # contains Sidekiq processes which have sent a heartbeat within the last
794
783
  # 60 seconds.
795
784
  def size
796
- Sidekiq.redis { |conn| conn.scard('processes') }
785
+ Sidekiq.redis { |conn| conn.scard("processes") }
797
786
  end
798
787
 
799
788
  # Returns the identity of the current cluster leader or "" if no leader.
@@ -801,9 +790,9 @@ module Sidekiq
801
790
  # or Sidekiq Pro.
802
791
  def leader
803
792
  @leader ||= begin
804
- x = Sidekiq.redis {|c| c.get("dear-leader") }
793
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
805
794
  # need a non-falsy value so we can memoize
806
- x = "" unless x
795
+ x ||= ""
807
796
  x
808
797
  end
809
798
  end
@@ -830,11 +819,11 @@ module Sidekiq
830
819
  end
831
820
 
832
821
  def tag
833
- self['tag']
822
+ self["tag"]
834
823
  end
835
824
 
836
825
  def labels
837
- Array(self['labels'])
826
+ Array(self["labels"])
838
827
  end
839
828
 
840
829
  def [](key)
@@ -842,23 +831,23 @@ module Sidekiq
842
831
  end
843
832
 
844
833
  def identity
845
- self['identity']
834
+ self["identity"]
846
835
  end
847
836
 
848
837
  def quiet!
849
- signal('TSTP')
838
+ signal("TSTP")
850
839
  end
851
840
 
852
841
  def stop!
853
- signal('TERM')
842
+ signal("TERM")
854
843
  end
855
844
 
856
845
  def dump_threads
857
- signal('TTIN')
846
+ signal("TTIN")
858
847
  end
859
848
 
860
849
  def stopping?
861
- self['quiet'] == 'true'
850
+ self["quiet"] == "true"
862
851
  end
863
852
 
864
853
  private
@@ -872,7 +861,6 @@ module Sidekiq
872
861
  end
873
862
  end
874
863
  end
875
-
876
864
  end
877
865
 
878
866
  ##
@@ -901,12 +889,12 @@ module Sidekiq
901
889
 
902
890
  def each
903
891
  Sidekiq.redis do |conn|
904
- procs = sscan(conn, 'processes')
892
+ procs = sscan(conn, "processes")
905
893
  procs.sort.each do |key|
906
- valid, workers = conn.pipelined do
894
+ valid, workers = conn.pipelined {
907
895
  conn.exists(key)
908
896
  conn.hgetall("#{key}:workers")
909
- end
897
+ }
910
898
  next unless valid
911
899
  workers.each_pair do |tid, json|
912
900
  yield key, tid, Sidekiq.load_json(json)
@@ -923,18 +911,17 @@ module Sidekiq
923
911
  # which can easily get out of sync with crashy processes.
924
912
  def size
925
913
  Sidekiq.redis do |conn|
926
- procs = sscan(conn, 'processes')
914
+ procs = sscan(conn, "processes")
927
915
  if procs.empty?
928
916
  0
929
917
  else
930
- conn.pipelined do
918
+ conn.pipelined {
931
919
  procs.each do |key|
932
- conn.hget(key, 'busy')
920
+ conn.hget(key, "busy")
933
921
  end
934
- end.map(&:to_i).inject(:+)
922
+ }.map(&:to_i).inject(:+)
935
923
  end
936
924
  end
937
925
  end
938
926
  end
939
-
940
927
  end