sidekiq 5.2.1 → 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (70) hide show
  1. checksums.yaml +5 -5
  2. data/.circleci/config.yml +61 -0
  3. data/.gitignore +1 -1
  4. data/.standard.yml +20 -0
  5. data/6.0-Upgrade.md +70 -0
  6. data/COMM-LICENSE +11 -9
  7. data/Changes.md +79 -0
  8. data/Ent-2.0-Upgrade.md +37 -0
  9. data/Ent-Changes.md +30 -1
  10. data/Gemfile +19 -9
  11. data/Gemfile.lock +196 -0
  12. data/Pro-5.0-Upgrade.md +25 -0
  13. data/Pro-Changes.md +29 -0
  14. data/README.md +17 -31
  15. data/Rakefile +6 -4
  16. data/bin/sidekiqload +27 -23
  17. data/bin/sidekiqmon +9 -0
  18. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  19. data/lib/generators/sidekiq/worker_generator.rb +12 -14
  20. data/lib/sidekiq.rb +56 -43
  21. data/lib/sidekiq/api.rb +141 -148
  22. data/lib/sidekiq/cli.rb +142 -207
  23. data/lib/sidekiq/client.rb +45 -46
  24. data/lib/sidekiq/delay.rb +5 -6
  25. data/lib/sidekiq/exception_handler.rb +10 -12
  26. data/lib/sidekiq/extensions/action_mailer.rb +10 -20
  27. data/lib/sidekiq/extensions/active_record.rb +9 -7
  28. data/lib/sidekiq/extensions/class_methods.rb +9 -7
  29. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  30. data/lib/sidekiq/fetch.rb +5 -6
  31. data/lib/sidekiq/job_logger.rb +39 -9
  32. data/lib/sidekiq/job_retry.rb +62 -54
  33. data/lib/sidekiq/launcher.rb +60 -52
  34. data/lib/sidekiq/logger.rb +69 -0
  35. data/lib/sidekiq/manager.rb +10 -12
  36. data/lib/sidekiq/middleware/chain.rb +3 -2
  37. data/lib/sidekiq/middleware/i18n.rb +5 -7
  38. data/lib/sidekiq/monitor.rb +148 -0
  39. data/lib/sidekiq/paginator.rb +11 -12
  40. data/lib/sidekiq/processor.rb +98 -62
  41. data/lib/sidekiq/rails.rb +24 -29
  42. data/lib/sidekiq/redis_connection.rb +34 -21
  43. data/lib/sidekiq/scheduled.rb +17 -19
  44. data/lib/sidekiq/testing.rb +22 -23
  45. data/lib/sidekiq/testing/inline.rb +2 -1
  46. data/lib/sidekiq/util.rb +17 -14
  47. data/lib/sidekiq/version.rb +2 -1
  48. data/lib/sidekiq/web.rb +41 -49
  49. data/lib/sidekiq/web/action.rb +14 -10
  50. data/lib/sidekiq/web/application.rb +67 -58
  51. data/lib/sidekiq/web/helpers.rb +72 -66
  52. data/lib/sidekiq/web/router.rb +17 -14
  53. data/lib/sidekiq/worker.rb +134 -91
  54. data/sidekiq.gemspec +16 -18
  55. data/web/assets/javascripts/dashboard.js +14 -23
  56. data/web/assets/stylesheets/application.css +35 -2
  57. data/web/assets/stylesheets/bootstrap.css +1 -1
  58. data/web/locales/ar.yml +1 -0
  59. data/web/locales/en.yml +1 -0
  60. data/web/locales/ja.yml +2 -1
  61. data/web/views/_nav.erb +3 -17
  62. data/web/views/queue.erb +1 -0
  63. data/web/views/queues.erb +1 -1
  64. data/web/views/retries.erb +4 -0
  65. metadata +31 -26
  66. data/.travis.yml +0 -14
  67. data/bin/sidekiqctl +0 -99
  68. data/lib/sidekiq/core_ext.rb +0 -1
  69. data/lib/sidekiq/logging.rb +0 -122
  70. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
5
+ module Sidekiq
6
6
  module RedisScanner
7
7
  def sscan(conn, key)
8
- cursor = '0'
8
+ cursor = "0"
9
9
  result = []
10
10
  loop do
11
11
  cursor, values = conn.sscan(key, cursor)
12
12
  result.push(*values)
13
- break if cursor == '0'
13
+ break if cursor == "0"
14
14
  end
15
15
  result
16
16
  end
@@ -64,61 +64,65 @@ module Sidekiq
64
64
  end
65
65
 
66
66
  def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
67
+ pipe1_res = Sidekiq.redis { |conn|
68
68
  conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
69
+ conn.get("stat:processed")
70
+ conn.get("stat:failed")
71
+ conn.zcard("schedule")
72
+ conn.zcard("retry")
73
+ conn.zcard("dead")
74
+ conn.scard("processes")
75
+ conn.lrange("queue:default", -1, -1)
76
76
  end
77
- end
77
+ }
78
78
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
81
- end
79
+ processes = Sidekiq.redis { |conn|
80
+ sscan(conn, "processes")
81
+ }
82
82
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
83
+ queues = Sidekiq.redis { |conn|
84
+ sscan(conn, "queues")
85
+ }
86
86
 
87
- pipe2_res = Sidekiq.redis do |conn|
87
+ pipe2_res = Sidekiq.redis { |conn|
88
88
  conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
89
+ processes.each { |key| conn.hget(key, "busy") }
90
+ queues.each { |queue| conn.llen("queue:#{queue}") }
91
91
  end
92
- end
92
+ }
93
93
 
94
94
  s = processes.size
95
95
  workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
96
+ enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
97
97
 
98
98
  default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
99
+ job = begin
100
+ Sidekiq.load_json(entry)
101
+ rescue
102
+ {}
103
+ end
104
+ now = Time.now.to_f
105
+ thence = job["enqueued_at"] || now
106
+ now - thence
107
+ else
108
+ 0
109
+ end
106
110
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
111
+ processed: pipe1_res[0].to_i,
112
+ failed: pipe1_res[1].to_i,
113
+ scheduled_size: pipe1_res[2],
114
+ retry_size: pipe1_res[3],
115
+ dead_size: pipe1_res[4],
116
+ processes_size: pipe1_res[5],
113
117
 
114
118
  default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
119
+ workers_size: workers_size,
120
+ enqueued: enqueued,
117
121
  }
118
122
  end
119
123
 
120
124
  def reset(*stats)
121
- all = %w(failed processed)
125
+ all = %w[failed processed]
122
126
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
127
 
124
128
  mset_args = []
@@ -142,20 +146,19 @@ module Sidekiq
142
146
 
143
147
  def lengths
144
148
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
149
+ queues = sscan(conn, "queues")
146
150
 
147
- lengths = conn.pipelined do
151
+ lengths = conn.pipelined {
148
152
  queues.each do |queue|
149
153
  conn.llen("queue:#{queue}")
150
154
  end
151
- end
155
+ }
152
156
 
153
157
  i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
158
+ array_of_arrays = queues.each_with_object({}) { |queue, memo|
155
159
  memo[queue] = lengths[i]
156
160
  i += 1
157
- memo
158
- end.sort_by { |_, size| size }
161
+ }.sort_by { |_, size| size }
159
162
 
160
163
  Hash[array_of_arrays.reverse]
161
164
  end
@@ -228,12 +231,12 @@ module Sidekiq
228
231
  # Return all known queues within Redis.
229
232
  #
230
233
  def self.all
231
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
234
+ Sidekiq.redis { |c| sscan(c, "queues") }.sort.map { |q| Sidekiq::Queue.new(q) }
232
235
  end
233
236
 
234
237
  attr_reader :name
235
238
 
236
- def initialize(name="default")
239
+ def initialize(name = "default")
237
240
  @name = name.to_s
238
241
  @rname = "queue:#{name}"
239
242
  end
@@ -253,13 +256,13 @@ module Sidekiq
253
256
  #
254
257
  # @return Float
255
258
  def latency
256
- entry = Sidekiq.redis do |conn|
259
+ entry = Sidekiq.redis { |conn|
257
260
  conn.lrange(@rname, -1, -1)
258
- end.first
261
+ }.first
259
262
  return 0 unless entry
260
263
  job = Sidekiq.load_json(entry)
261
264
  now = Time.now.to_f
262
- thence = job['enqueued_at'] || now
265
+ thence = job["enqueued_at"] || now
263
266
  now - thence
264
267
  end
265
268
 
@@ -269,12 +272,12 @@ module Sidekiq
269
272
  page = 0
270
273
  page_size = 50
271
274
 
272
- while true do
275
+ loop do
273
276
  range_start = page * page_size - deleted_size
274
- range_end = range_start + page_size - 1
275
- entries = Sidekiq.redis do |conn|
277
+ range_end = range_start + page_size - 1
278
+ entries = Sidekiq.redis { |conn|
276
279
  conn.lrange @rname, range_start, range_end
277
- end
280
+ }
278
281
  break if entries.empty?
279
282
  page += 1
280
283
  entries.each do |entry|
@@ -315,11 +318,11 @@ module Sidekiq
315
318
  attr_reader :item
316
319
  attr_reader :value
317
320
 
318
- def initialize(item, queue_name=nil)
321
+ def initialize(item, queue_name = nil)
319
322
  @args = nil
320
323
  @value = item
321
324
  @item = item.is_a?(Hash) ? item : parse(item)
322
- @queue = queue_name || @item['queue']
325
+ @queue = queue_name || @item["queue"]
323
326
  end
324
327
 
325
328
  def parse(item)
@@ -334,7 +337,7 @@ module Sidekiq
334
337
  end
335
338
 
336
339
  def klass
337
- self['class']
340
+ self["class"]
338
341
  end
339
342
 
340
343
  def display_class
@@ -345,16 +348,16 @@ module Sidekiq
345
348
  "#{target}.#{method}"
346
349
  end
347
350
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
348
- job_class = @item['wrapped'] || args[0]
349
- if 'ActionMailer::DeliveryJob' == job_class
351
+ job_class = @item["wrapped"] || args[0]
352
+ if job_class == "ActionMailer::DeliveryJob"
350
353
  # MailerClass#mailer_method
351
- args[0]['arguments'][0..1].join('#')
354
+ args[0]["arguments"][0..1].join("#")
352
355
  else
353
- job_class
356
+ job_class
354
357
  end
355
358
  else
356
359
  klass
357
- end
360
+ end
358
361
  end
359
362
 
360
363
  def display_args
@@ -365,53 +368,51 @@ module Sidekiq
365
368
  arg
366
369
  end
367
370
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
368
- job_args = self['wrapped'] ? args[0]["arguments"] : []
369
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
371
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
372
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
370
373
  # remove MailerClass, mailer_method and 'deliver_now'
371
374
  job_args.drop(3)
372
375
  else
373
376
  job_args
374
377
  end
375
378
  else
376
- if self['encrypt']
379
+ if self["encrypt"]
377
380
  # no point in showing 150+ bytes of random garbage
378
- args[-1] = '[encrypted data]'
381
+ args[-1] = "[encrypted data]"
379
382
  end
380
383
  args
381
- end
384
+ end
382
385
  end
383
386
 
384
387
  def args
385
- @args || @item['args']
388
+ @args || @item["args"]
386
389
  end
387
390
 
388
391
  def jid
389
- self['jid']
392
+ self["jid"]
390
393
  end
391
394
 
392
395
  def enqueued_at
393
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
396
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
394
397
  end
395
398
 
396
399
  def created_at
397
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
400
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
398
401
  end
399
402
 
400
- def queue
401
- @queue
402
- end
403
+ attr_reader :queue
403
404
 
404
405
  def latency
405
406
  now = Time.now.to_f
406
- now - (@item['enqueued_at'] || @item['created_at'] || now)
407
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
407
408
  end
408
409
 
409
410
  ##
410
411
  # Remove this job from the queue.
411
412
  def delete
412
- count = Sidekiq.redis do |conn|
413
+ count = Sidekiq.redis { |conn|
413
414
  conn.lrem("queue:#{@queue}", 1, @value)
414
- end
415
+ }
415
416
  count != 0
416
417
  end
417
418
 
@@ -425,14 +426,12 @@ module Sidekiq
425
426
  private
426
427
 
427
428
  def safe_load(content, default)
428
- begin
429
- yield(*YAML.load(content))
430
- rescue => ex
431
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
- # memory yet so the YAML can't be loaded.
433
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
434
- default
435
- end
429
+ yield(*YAML.load(content))
430
+ rescue => ex
431
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
+ # memory yet so the YAML can't be loaded.
433
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
434
+ default
436
435
  end
437
436
  end
438
437
 
@@ -473,7 +472,7 @@ module Sidekiq
473
472
  def retry
474
473
  remove_job do |message|
475
474
  msg = Sidekiq.load_json(message)
476
- msg['retry_count'] -= 1 if msg['retry_count']
475
+ msg["retry_count"] -= 1 if msg["retry_count"]
477
476
  Sidekiq::Client.push(msg)
478
477
  end
479
478
  end
@@ -487,31 +486,31 @@ module Sidekiq
487
486
  end
488
487
 
489
488
  def error?
490
- !!item['error_class']
489
+ !!item["error_class"]
491
490
  end
492
491
 
493
492
  private
494
493
 
495
494
  def remove_job
496
495
  Sidekiq.redis do |conn|
497
- results = conn.multi do
496
+ results = conn.multi {
498
497
  conn.zrangebyscore(parent.name, score, score)
499
498
  conn.zremrangebyscore(parent.name, score, score)
500
- end.first
499
+ }.first
501
500
 
502
501
  if results.size == 1
503
502
  yield results.first
504
503
  else
505
504
  # multiple jobs with the same score
506
505
  # find the one with the right JID and push it
507
- hash = results.group_by do |message|
506
+ hash = results.group_by { |message|
508
507
  if message.index(jid)
509
508
  msg = Sidekiq.load_json(message)
510
- msg['jid'] == jid
509
+ msg["jid"] == jid
511
510
  else
512
511
  false
513
512
  end
514
- end
513
+ }
515
514
 
516
515
  msg = hash.fetch(true, []).first
517
516
  yield msg if msg
@@ -525,7 +524,6 @@ module Sidekiq
525
524
  end
526
525
  end
527
526
  end
528
-
529
527
  end
530
528
 
531
529
  class SortedSet
@@ -551,7 +549,6 @@ module Sidekiq
551
549
  end
552
550
 
553
551
  class JobSet < SortedSet
554
-
555
552
  def schedule(timestamp, message)
556
553
  Sidekiq.redis do |conn|
557
554
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -564,15 +561,15 @@ module Sidekiq
564
561
  page = -1
565
562
  page_size = 50
566
563
 
567
- while true do
564
+ loop do
568
565
  range_start = page * page_size + offset_size
569
- range_end = range_start + page_size - 1
570
- elements = Sidekiq.redis do |conn|
566
+ range_end = range_start + page_size - 1
567
+ elements = Sidekiq.redis { |conn|
571
568
  conn.zrange name, range_start, range_end, with_scores: true
572
- end
569
+ }
573
570
  break if elements.empty?
574
571
  page -= 1
575
- elements.reverse.each do |element, score|
572
+ elements.reverse_each do |element, score|
576
573
  yield SortedEntry.new(self, score, element)
577
574
  end
578
575
  offset_size = initial_size - @_size
@@ -580,18 +577,17 @@ module Sidekiq
580
577
  end
581
578
 
582
579
  def fetch(score, jid = nil)
583
- elements = Sidekiq.redis do |conn|
580
+ elements = Sidekiq.redis { |conn|
584
581
  conn.zrangebyscore(name, score, score)
585
- end
582
+ }
586
583
 
587
- elements.inject([]) do |result, element|
584
+ elements.each_with_object([]) do |element, result|
588
585
  entry = SortedEntry.new(self, score, element)
589
586
  if jid
590
587
  result << entry if entry.jid == jid
591
588
  else
592
589
  result << entry
593
590
  end
594
- result
595
591
  end
596
592
  end
597
593
 
@@ -601,7 +597,7 @@ module Sidekiq
601
597
  # This is a slow, inefficient operation. Do not use under
602
598
  # normal conditions. Sidekiq Pro contains a faster version.
603
599
  def find_job(jid)
604
- self.detect { |j| j.jid == jid }
600
+ detect { |j| j.jid == jid }
605
601
  end
606
602
 
607
603
  def delete_by_value(name, value)
@@ -622,7 +618,6 @@ module Sidekiq
622
618
  @_size -= 1 if ret
623
619
  break ret
624
620
  end
625
- false
626
621
  end
627
622
  end
628
623
  end
@@ -644,7 +639,7 @@ module Sidekiq
644
639
  # end.map(&:delete)
645
640
  class ScheduledSet < JobSet
646
641
  def initialize
647
- super 'schedule'
642
+ super "schedule"
648
643
  end
649
644
  end
650
645
 
@@ -662,13 +657,15 @@ module Sidekiq
662
657
  # end.map(&:delete)
663
658
  class RetrySet < JobSet
664
659
  def initialize
665
- super 'retry'
660
+ super "retry"
666
661
  end
667
662
 
668
663
  def retry_all
669
- while size > 0
670
- each(&:retry)
671
- end
664
+ each(&:retry) while size > 0
665
+ end
666
+
667
+ def kill_all
668
+ each(&:kill) while size > 0
672
669
  end
673
670
  end
674
671
 
@@ -677,15 +674,15 @@ module Sidekiq
677
674
  #
678
675
  class DeadSet < JobSet
679
676
  def initialize
680
- super 'dead'
677
+ super "dead"
681
678
  end
682
679
 
683
- def kill(message, opts={})
680
+ def kill(message, opts = {})
684
681
  now = Time.now.to_f
685
682
  Sidekiq.redis do |conn|
686
683
  conn.multi do
687
684
  conn.zadd(name, now.to_s, message)
688
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
685
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
689
686
  conn.zremrangebyrank(name, 0, - self.class.max_jobs)
690
687
  end
691
688
  end
@@ -702,9 +699,7 @@ module Sidekiq
702
699
  end
703
700
 
704
701
  def retry_all
705
- while size > 0
706
- each(&:retry)
707
- end
702
+ each(&:retry) while size > 0
708
703
  end
709
704
 
710
705
  def self.max_jobs
@@ -718,7 +713,7 @@ module Sidekiq
718
713
 
719
714
  ##
720
715
  # Enumerates the set of Sidekiq processes which are actively working
721
- # right now. Each process send a heartbeat to Redis every 5 seconds
716
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
722
717
  # so this set should be relatively accurate, barring network partitions.
723
718
  #
724
719
  # Yields a Sidekiq::Process.
@@ -727,7 +722,7 @@ module Sidekiq
727
722
  include Enumerable
728
723
  include RedisScanner
729
724
 
730
- def initialize(clean_plz=true)
725
+ def initialize(clean_plz = true)
731
726
  cleanup if clean_plz
732
727
  end
733
728
 
@@ -736,12 +731,12 @@ module Sidekiq
736
731
  def cleanup
737
732
  count = 0
738
733
  Sidekiq.redis do |conn|
739
- procs = sscan(conn, 'processes').sort
740
- heartbeats = conn.pipelined do
734
+ procs = sscan(conn, "processes").sort
735
+ heartbeats = conn.pipelined {
741
736
  procs.each do |key|
742
- conn.hget(key, 'info')
737
+ conn.hget(key, "info")
743
738
  end
744
- end
739
+ }
745
740
 
746
741
  # the hash named key has an expiry of 60 seconds.
747
742
  # if it's not found, that means the process has not reported
@@ -750,23 +745,23 @@ module Sidekiq
750
745
  heartbeats.each_with_index do |beat, i|
751
746
  to_prune << procs[i] if beat.nil?
752
747
  end
753
- count = conn.srem('processes', to_prune) unless to_prune.empty?
748
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
754
749
  end
755
750
  count
756
751
  end
757
752
 
758
753
  def each
759
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
754
+ procs = Sidekiq.redis { |conn| sscan(conn, "processes") }.sort
760
755
 
761
756
  Sidekiq.redis do |conn|
762
757
  # We're making a tradeoff here between consuming more memory instead of
763
758
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
764
759
  # you'll be happier this way
765
- result = conn.pipelined do
760
+ result = conn.pipelined {
766
761
  procs.each do |key|
767
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
762
+ conn.hmget(key, "info", "busy", "beat", "quiet")
768
763
  end
769
- end
764
+ }
770
765
 
771
766
  result.each do |info, busy, at_s, quiet|
772
767
  # If a process is stopped between when we query Redis for `procs` and
@@ -775,7 +770,7 @@ module Sidekiq
775
770
  next if info.nil?
776
771
 
777
772
  hash = Sidekiq.load_json(info)
778
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
773
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
779
774
  end
780
775
  end
781
776
 
@@ -787,7 +782,7 @@ module Sidekiq
787
782
  # contains Sidekiq processes which have sent a heartbeat within the last
788
783
  # 60 seconds.
789
784
  def size
790
- Sidekiq.redis { |conn| conn.scard('processes') }
785
+ Sidekiq.redis { |conn| conn.scard("processes") }
791
786
  end
792
787
 
793
788
  # Returns the identity of the current cluster leader or "" if no leader.
@@ -795,9 +790,9 @@ module Sidekiq
795
790
  # or Sidekiq Pro.
796
791
  def leader
797
792
  @leader ||= begin
798
- x = Sidekiq.redis {|c| c.get("dear-leader") }
793
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
799
794
  # need a non-falsy value so we can memoize
800
- x = "" unless x
795
+ x ||= ""
801
796
  x
802
797
  end
803
798
  end
@@ -824,11 +819,11 @@ module Sidekiq
824
819
  end
825
820
 
826
821
  def tag
827
- self['tag']
822
+ self["tag"]
828
823
  end
829
824
 
830
825
  def labels
831
- Array(self['labels'])
826
+ Array(self["labels"])
832
827
  end
833
828
 
834
829
  def [](key)
@@ -836,23 +831,23 @@ module Sidekiq
836
831
  end
837
832
 
838
833
  def identity
839
- self['identity']
834
+ self["identity"]
840
835
  end
841
836
 
842
837
  def quiet!
843
- signal('TSTP')
838
+ signal("TSTP")
844
839
  end
845
840
 
846
841
  def stop!
847
- signal('TERM')
842
+ signal("TERM")
848
843
  end
849
844
 
850
845
  def dump_threads
851
- signal('TTIN')
846
+ signal("TTIN")
852
847
  end
853
848
 
854
849
  def stopping?
855
- self['quiet'] == 'true'
850
+ self["quiet"] == "true"
856
851
  end
857
852
 
858
853
  private
@@ -866,7 +861,6 @@ module Sidekiq
866
861
  end
867
862
  end
868
863
  end
869
-
870
864
  end
871
865
 
872
866
  ##
@@ -895,12 +889,12 @@ module Sidekiq
895
889
 
896
890
  def each
897
891
  Sidekiq.redis do |conn|
898
- procs = sscan(conn, 'processes')
892
+ procs = sscan(conn, "processes")
899
893
  procs.sort.each do |key|
900
- valid, workers = conn.pipelined do
894
+ valid, workers = conn.pipelined {
901
895
  conn.exists(key)
902
896
  conn.hgetall("#{key}:workers")
903
- end
897
+ }
904
898
  next unless valid
905
899
  workers.each_pair do |tid, json|
906
900
  yield key, tid, Sidekiq.load_json(json)
@@ -917,18 +911,17 @@ module Sidekiq
917
911
  # which can easily get out of sync with crashy processes.
918
912
  def size
919
913
  Sidekiq.redis do |conn|
920
- procs = sscan(conn, 'processes')
914
+ procs = sscan(conn, "processes")
921
915
  if procs.empty?
922
916
  0
923
917
  else
924
- conn.pipelined do
918
+ conn.pipelined {
925
919
  procs.each do |key|
926
- conn.hget(key, 'busy')
920
+ conn.hget(key, "busy")
927
921
  end
928
- end.map(&:to_i).inject(:+)
922
+ }.map(&:to_i).inject(:+)
929
923
  end
930
924
  end
931
925
  end
932
926
  end
933
-
934
927
  end