sidekiq 6.0.0 → 6.4.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (102) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +290 -2
  3. data/LICENSE +3 -3
  4. data/README.md +7 -9
  5. data/bin/sidekiq +26 -2
  6. data/bin/sidekiqload +8 -4
  7. data/bin/sidekiqmon +4 -5
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +222 -145
  13. data/lib/sidekiq/cli.rb +67 -28
  14. data/lib/sidekiq/client.rb +17 -34
  15. data/lib/sidekiq/delay.rb +2 -0
  16. data/lib/sidekiq/extensions/action_mailer.rb +5 -4
  17. data/lib/sidekiq/extensions/active_record.rb +6 -5
  18. data/lib/sidekiq/extensions/class_methods.rb +7 -6
  19. data/lib/sidekiq/extensions/generic_proxy.rb +5 -3
  20. data/lib/sidekiq/fetch.rb +36 -27
  21. data/lib/sidekiq/job.rb +13 -0
  22. data/lib/sidekiq/job_logger.rb +13 -5
  23. data/lib/sidekiq/job_retry.rb +33 -21
  24. data/lib/sidekiq/job_util.rb +65 -0
  25. data/lib/sidekiq/launcher.rb +110 -28
  26. data/lib/sidekiq/logger.rb +109 -12
  27. data/lib/sidekiq/manager.rb +10 -12
  28. data/lib/sidekiq/middleware/chain.rb +17 -6
  29. data/lib/sidekiq/middleware/current_attributes.rb +57 -0
  30. data/lib/sidekiq/monitor.rb +3 -18
  31. data/lib/sidekiq/paginator.rb +7 -2
  32. data/lib/sidekiq/processor.rb +22 -24
  33. data/lib/sidekiq/rails.rb +27 -18
  34. data/lib/sidekiq/redis_connection.rb +19 -13
  35. data/lib/sidekiq/scheduled.rb +48 -12
  36. data/lib/sidekiq/sd_notify.rb +149 -0
  37. data/lib/sidekiq/systemd.rb +24 -0
  38. data/lib/sidekiq/testing.rb +14 -4
  39. data/lib/sidekiq/util.rb +40 -1
  40. data/lib/sidekiq/version.rb +1 -1
  41. data/lib/sidekiq/web/action.rb +2 -2
  42. data/lib/sidekiq/web/application.rb +41 -31
  43. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  44. data/lib/sidekiq/web/helpers.rb +51 -33
  45. data/lib/sidekiq/web/router.rb +6 -5
  46. data/lib/sidekiq/web.rb +37 -73
  47. data/lib/sidekiq/worker.rb +133 -16
  48. data/lib/sidekiq.rb +29 -8
  49. data/sidekiq.gemspec +13 -6
  50. data/web/assets/images/apple-touch-icon.png +0 -0
  51. data/web/assets/javascripts/application.js +83 -64
  52. data/web/assets/javascripts/dashboard.js +53 -53
  53. data/web/assets/stylesheets/application-dark.css +143 -0
  54. data/web/assets/stylesheets/application-rtl.css +0 -4
  55. data/web/assets/stylesheets/application.css +43 -232
  56. data/web/locales/ar.yml +8 -2
  57. data/web/locales/de.yml +14 -2
  58. data/web/locales/en.yml +6 -1
  59. data/web/locales/es.yml +18 -2
  60. data/web/locales/fr.yml +10 -3
  61. data/web/locales/ja.yml +5 -0
  62. data/web/locales/lt.yml +83 -0
  63. data/web/locales/pl.yml +4 -4
  64. data/web/locales/ru.yml +4 -0
  65. data/web/locales/vi.yml +83 -0
  66. data/web/views/_footer.erb +1 -1
  67. data/web/views/_job_info.erb +3 -2
  68. data/web/views/_poll_link.erb +2 -5
  69. data/web/views/_summary.erb +7 -7
  70. data/web/views/busy.erb +54 -20
  71. data/web/views/dashboard.erb +22 -14
  72. data/web/views/dead.erb +3 -3
  73. data/web/views/layout.erb +3 -1
  74. data/web/views/morgue.erb +9 -6
  75. data/web/views/queue.erb +19 -10
  76. data/web/views/queues.erb +10 -2
  77. data/web/views/retries.erb +11 -8
  78. data/web/views/retry.erb +3 -3
  79. data/web/views/scheduled.erb +5 -2
  80. metadata +34 -54
  81. data/.circleci/config.yml +0 -61
  82. data/.github/contributing.md +0 -32
  83. data/.github/issue_template.md +0 -11
  84. data/.gitignore +0 -13
  85. data/.standard.yml +0 -20
  86. data/3.0-Upgrade.md +0 -70
  87. data/4.0-Upgrade.md +0 -53
  88. data/5.0-Upgrade.md +0 -56
  89. data/6.0-Upgrade.md +0 -70
  90. data/COMM-LICENSE +0 -97
  91. data/Ent-2.0-Upgrade.md +0 -37
  92. data/Ent-Changes.md +0 -250
  93. data/Gemfile +0 -24
  94. data/Gemfile.lock +0 -196
  95. data/Pro-2.0-Upgrade.md +0 -138
  96. data/Pro-3.0-Upgrade.md +0 -44
  97. data/Pro-4.0-Upgrade.md +0 -35
  98. data/Pro-5.0-Upgrade.md +0 -25
  99. data/Pro-Changes.md +0 -768
  100. data/Rakefile +0 -10
  101. data/code_of_conduct.md +0 -50
  102. data/lib/generators/sidekiq/worker_generator.rb +0 -47
data/lib/sidekiq/api.rb CHANGED
@@ -2,25 +2,13 @@
2
2
 
3
3
  require "sidekiq"
4
4
 
5
- module Sidekiq
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = "0"
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == "0"
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "base64"
18
7
 
8
+ module Sidekiq
19
9
  class Stats
20
- include RedisScanner
21
-
22
10
  def initialize
23
- fetch_stats!
11
+ fetch_stats_fast!
24
12
  end
25
13
 
26
14
  def processed
@@ -63,7 +51,8 @@ module Sidekiq
63
51
  Sidekiq::Stats::Queues.new.lengths
64
52
  end
65
53
 
66
- def fetch_stats!
54
+ # O(1) redis calls
55
+ def fetch_stats_fast!
67
56
  pipe1_res = Sidekiq.redis { |conn|
68
57
  conn.pipelined do
69
58
  conn.get("stat:processed")
@@ -76,37 +65,19 @@ module Sidekiq
76
65
  end
77
66
  }
78
67
 
79
- processes = Sidekiq.redis { |conn|
80
- sscan(conn, "processes")
81
- }
82
-
83
- queues = Sidekiq.redis { |conn|
84
- sscan(conn, "queues")
85
- }
86
-
87
- pipe2_res = Sidekiq.redis { |conn|
88
- conn.pipelined do
89
- processes.each { |key| conn.hget(key, "busy") }
90
- queues.each { |queue| conn.llen("queue:#{queue}") }
91
- end
92
- }
93
-
94
- s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
97
-
98
68
  default_queue_latency = if (entry = pipe1_res[6].first)
99
69
  job = begin
100
- Sidekiq.load_json(entry)
101
- rescue
102
- {}
103
- end
70
+ Sidekiq.load_json(entry)
71
+ rescue
72
+ {}
73
+ end
104
74
  now = Time.now.to_f
105
75
  thence = job["enqueued_at"] || now
106
76
  now - thence
107
77
  else
108
78
  0
109
79
  end
80
+
110
81
  @stats = {
111
82
  processed: pipe1_res[0].to_i,
112
83
  failed: pipe1_res[1].to_i,
@@ -115,10 +86,39 @@ module Sidekiq
115
86
  dead_size: pipe1_res[4],
116
87
  processes_size: pipe1_res[5],
117
88
 
118
- default_queue_latency: default_queue_latency,
119
- workers_size: workers_size,
120
- enqueued: enqueued,
89
+ default_queue_latency: default_queue_latency
90
+ }
91
+ end
92
+
93
+ # O(number of processes + number of queues) redis calls
94
+ def fetch_stats_slow!
95
+ processes = Sidekiq.redis { |conn|
96
+ conn.sscan_each("processes").to_a
97
+ }
98
+
99
+ queues = Sidekiq.redis { |conn|
100
+ conn.sscan_each("queues").to_a
101
+ }
102
+
103
+ pipe2_res = Sidekiq.redis { |conn|
104
+ conn.pipelined do
105
+ processes.each { |key| conn.hget(key, "busy") }
106
+ queues.each { |queue| conn.llen("queue:#{queue}") }
107
+ end
121
108
  }
109
+
110
+ s = processes.size
111
+ workers_size = pipe2_res[0...s].sum(&:to_i)
112
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
113
+
114
+ @stats[:workers_size] = workers_size
115
+ @stats[:enqueued] = enqueued
116
+ @stats
117
+ end
118
+
119
+ def fetch_stats!
120
+ fetch_stats_fast!
121
+ fetch_stats_slow!
122
122
  end
123
123
 
124
124
  def reset(*stats)
@@ -138,15 +138,14 @@ module Sidekiq
138
138
  private
139
139
 
140
140
  def stat(s)
141
- @stats[s]
141
+ fetch_stats_slow! if @stats[s].nil?
142
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
142
143
  end
143
144
 
144
145
  class Queues
145
- include RedisScanner
146
-
147
146
  def lengths
148
147
  Sidekiq.redis do |conn|
149
- queues = sscan(conn, "queues")
148
+ queues = conn.sscan_each("queues").to_a
150
149
 
151
150
  lengths = conn.pipelined {
152
151
  queues.each do |queue|
@@ -154,19 +153,16 @@ module Sidekiq
154
153
  end
155
154
  }
156
155
 
157
- i = 0
158
- array_of_arrays = queues.each_with_object({}) { |queue, memo|
159
- memo[queue] = lengths[i]
160
- i += 1
161
- }.sort_by { |_, size| size }
162
-
163
- Hash[array_of_arrays.reverse]
156
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
157
+ array_of_arrays.to_h
164
158
  end
165
159
  end
166
160
  end
167
161
 
168
162
  class History
169
163
  def initialize(days_previous, start_date = nil)
164
+ # we only store five years of data in Redis
165
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
170
166
  @days_previous = days_previous
171
167
  @start_date = start_date || Time.now.utc.to_date
172
168
  end
@@ -182,18 +178,12 @@ module Sidekiq
182
178
  private
183
179
 
184
180
  def date_stat_hash(stat)
185
- i = 0
186
181
  stat_hash = {}
187
- keys = []
188
- dates = []
189
-
190
- while i < @days_previous
191
- date = @start_date - i
192
- datestr = date.strftime("%Y-%m-%d")
193
- keys << "stat:#{stat}:#{datestr}"
194
- dates << datestr
195
- i += 1
196
- end
182
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
183
+ date.strftime("%Y-%m-%d")
184
+ }
185
+
186
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
197
187
 
198
188
  begin
199
189
  Sidekiq.redis do |conn|
@@ -225,13 +215,12 @@ module Sidekiq
225
215
  #
226
216
  class Queue
227
217
  include Enumerable
228
- extend RedisScanner
229
218
 
230
219
  ##
231
220
  # Return all known queues within Redis.
232
221
  #
233
222
  def self.all
234
- Sidekiq.redis { |c| sscan(c, "queues") }.sort.map { |q| Sidekiq::Queue.new(q) }
223
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
235
224
  end
236
225
 
237
226
  attr_reader :name
@@ -281,7 +270,7 @@ module Sidekiq
281
270
  break if entries.empty?
282
271
  page += 1
283
272
  entries.each do |entry|
284
- yield Job.new(entry, @name)
273
+ yield JobRecord.new(entry, @name)
285
274
  end
286
275
  deleted_size = initial_size - size
287
276
  end
@@ -291,7 +280,7 @@ module Sidekiq
291
280
  # Find the job with the given JID within this queue.
292
281
  #
293
282
  # This is a slow, inefficient operation. Do not use under
294
- # normal conditions. Sidekiq Pro contains a faster version.
283
+ # normal conditions.
295
284
  def find_job(jid)
296
285
  detect { |j| j.jid == jid }
297
286
  end
@@ -299,7 +288,7 @@ module Sidekiq
299
288
  def clear
300
289
  Sidekiq.redis do |conn|
301
290
  conn.multi do
302
- conn.del(@rname)
291
+ conn.unlink(@rname)
303
292
  conn.srem("queues", name)
304
293
  end
305
294
  end
@@ -312,9 +301,9 @@ module Sidekiq
312
301
  # sorted set.
313
302
  #
314
303
  # The job should be considered immutable but may be
315
- # removed from the queue via Job#delete.
304
+ # removed from the queue via JobRecord#delete.
316
305
  #
317
- class Job
306
+ class JobRecord
318
307
  attr_reader :item
319
308
  attr_reader :value
320
309
 
@@ -342,21 +331,23 @@ module Sidekiq
342
331
 
343
332
  def display_class
344
333
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
345
- @klass ||= case klass
346
- when /\ASidekiq::Extensions::Delayed/
347
- safe_load(args[0], klass) do |target, method, _|
348
- "#{target}.#{method}"
349
- end
350
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
351
- job_class = @item["wrapped"] || args[0]
352
- if job_class == "ActionMailer::DeliveryJob"
353
- # MailerClass#mailer_method
354
- args[0]["arguments"][0..1].join("#")
355
- else
356
- job_class
357
- end
358
- else
359
- klass
334
+ @klass ||= self["display_class"] || begin
335
+ case klass
336
+ when /\ASidekiq::Extensions::Delayed/
337
+ safe_load(args[0], klass) do |target, method, _|
338
+ "#{target}.#{method}"
339
+ end
340
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
341
+ job_class = @item["wrapped"] || args[0]
342
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
343
+ # MailerClass#mailer_method
344
+ args[0]["arguments"][0..1].join("#")
345
+ else
346
+ job_class
347
+ end
348
+ else
349
+ klass
350
+ end
360
351
  end
361
352
  end
362
353
 
@@ -372,6 +363,9 @@ module Sidekiq
372
363
  if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
373
364
  # remove MailerClass, mailer_method and 'deliver_now'
374
365
  job_args.drop(3)
366
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
367
+ # remove MailerClass, mailer_method and 'deliver_now'
368
+ job_args.drop(3).first["args"]
375
369
  else
376
370
  job_args
377
371
  end
@@ -400,6 +394,20 @@ module Sidekiq
400
394
  Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
401
395
  end
402
396
 
397
+ def tags
398
+ self["tags"] || []
399
+ end
400
+
401
+ def error_backtrace
402
+ # Cache nil values
403
+ if defined?(@error_backtrace)
404
+ @error_backtrace
405
+ else
406
+ value = self["error_backtrace"]
407
+ @error_backtrace = value && uncompress_backtrace(value)
408
+ end
409
+ end
410
+
403
411
  attr_reader :queue
404
412
 
405
413
  def latency
@@ -433,9 +441,26 @@ module Sidekiq
433
441
  Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
434
442
  default
435
443
  end
444
+
445
+ def uncompress_backtrace(backtrace)
446
+ if backtrace.is_a?(Array)
447
+ # Handle old jobs with raw Array backtrace format
448
+ backtrace
449
+ else
450
+ decoded = Base64.decode64(backtrace)
451
+ uncompressed = Zlib::Inflate.inflate(decoded)
452
+ begin
453
+ Sidekiq.load_json(uncompressed)
454
+ rescue
455
+ # Handle old jobs with marshalled backtrace format
456
+ # TODO Remove in 7.x
457
+ Marshal.load(uncompressed)
458
+ end
459
+ end
460
+ end
436
461
  end
437
462
 
438
- class SortedEntry < Job
463
+ class SortedEntry < JobRecord
439
464
  attr_reader :score
440
465
  attr_reader :parent
441
466
 
@@ -458,8 +483,9 @@ module Sidekiq
458
483
  end
459
484
 
460
485
  def reschedule(at)
461
- delete
462
- @parent.schedule(at, item)
486
+ Sidekiq.redis do |conn|
487
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
488
+ end
463
489
  end
464
490
 
465
491
  def add_to_queue
@@ -503,7 +529,7 @@ module Sidekiq
503
529
  else
504
530
  # multiple jobs with the same score
505
531
  # find the one with the right JID and push it
506
- hash = results.group_by { |message|
532
+ matched, nonmatched = results.partition { |message|
507
533
  if message.index(jid)
508
534
  msg = Sidekiq.load_json(message)
509
535
  msg["jid"] == jid
@@ -512,12 +538,12 @@ module Sidekiq
512
538
  end
513
539
  }
514
540
 
515
- msg = hash.fetch(true, []).first
541
+ msg = matched.first
516
542
  yield msg if msg
517
543
 
518
544
  # push the rest back onto the sorted set
519
545
  conn.multi do
520
- hash.fetch(false, []).each do |message|
546
+ nonmatched.each do |message|
521
547
  conn.zadd(parent.name, score.to_f.to_s, message)
522
548
  end
523
549
  end
@@ -540,9 +566,20 @@ module Sidekiq
540
566
  Sidekiq.redis { |c| c.zcard(name) }
541
567
  end
542
568
 
569
+ def scan(match, count = 100)
570
+ return to_enum(:scan, match, count) unless block_given?
571
+
572
+ match = "*#{match}*" unless match.include?("*")
573
+ Sidekiq.redis do |conn|
574
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
575
+ yield SortedEntry.new(self, score, entry)
576
+ end
577
+ end
578
+ end
579
+
543
580
  def clear
544
581
  Sidekiq.redis do |conn|
545
- conn.del(name)
582
+ conn.unlink(name)
546
583
  end
547
584
  end
548
585
  alias_method :💣, :clear
@@ -576,28 +613,40 @@ module Sidekiq
576
613
  end
577
614
  end
578
615
 
616
+ ##
617
+ # Fetch jobs that match a given time or Range. Job ID is an
618
+ # optional second argument.
579
619
  def fetch(score, jid = nil)
620
+ begin_score, end_score =
621
+ if score.is_a?(Range)
622
+ [score.first, score.last]
623
+ else
624
+ [score, score]
625
+ end
626
+
580
627
  elements = Sidekiq.redis { |conn|
581
- conn.zrangebyscore(name, score, score)
628
+ conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
582
629
  }
583
630
 
584
631
  elements.each_with_object([]) do |element, result|
585
- entry = SortedEntry.new(self, score, element)
586
- if jid
587
- result << entry if entry.jid == jid
588
- else
589
- result << entry
590
- end
632
+ data, job_score = element
633
+ entry = SortedEntry.new(self, job_score, data)
634
+ result << entry if jid.nil? || entry.jid == jid
591
635
  end
592
636
  end
593
637
 
594
638
  ##
595
639
  # Find the job with the given JID within this sorted set.
596
- #
597
- # This is a slow, inefficient operation. Do not use under
598
- # normal conditions. Sidekiq Pro contains a faster version.
640
+ # This is a slower O(n) operation. Do not use for app logic.
599
641
  def find_job(jid)
600
- detect { |j| j.jid == jid }
642
+ Sidekiq.redis do |conn|
643
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
644
+ job = JSON.parse(entry)
645
+ matched = job["jid"] == jid
646
+ return SortedEntry.new(self, score, entry) if matched
647
+ end
648
+ end
649
+ nil
601
650
  end
602
651
 
603
652
  def delete_by_value(name, value)
@@ -612,11 +661,13 @@ module Sidekiq
612
661
  Sidekiq.redis do |conn|
613
662
  elements = conn.zrangebyscore(name, score, score)
614
663
  elements.each do |element|
615
- message = Sidekiq.load_json(element)
616
- if message["jid"] == jid
617
- ret = conn.zrem(name, element)
618
- @_size -= 1 if ret
619
- break ret
664
+ if element.index(jid)
665
+ message = Sidekiq.load_json(element)
666
+ if message["jid"] == jid
667
+ ret = conn.zrem(name, element)
668
+ @_size -= 1 if ret
669
+ break ret
670
+ end
620
671
  end
621
672
  end
622
673
  end
@@ -720,7 +771,6 @@ module Sidekiq
720
771
  #
721
772
  class ProcessSet
722
773
  include Enumerable
723
- include RedisScanner
724
774
 
725
775
  def initialize(clean_plz = true)
726
776
  cleanup if clean_plz
@@ -731,7 +781,7 @@ module Sidekiq
731
781
  def cleanup
732
782
  count = 0
733
783
  Sidekiq.redis do |conn|
734
- procs = sscan(conn, "processes").sort
784
+ procs = conn.sscan_each("processes").to_a.sort
735
785
  heartbeats = conn.pipelined {
736
786
  procs.each do |key|
737
787
  conn.hget(key, "info")
@@ -741,40 +791,41 @@ module Sidekiq
741
791
  # the hash named key has an expiry of 60 seconds.
742
792
  # if it's not found, that means the process has not reported
743
793
  # in to Redis and probably died.
744
- to_prune = []
745
- heartbeats.each_with_index do |beat, i|
746
- to_prune << procs[i] if beat.nil?
747
- end
794
+ to_prune = procs.select.with_index { |proc, i|
795
+ heartbeats[i].nil?
796
+ }
748
797
  count = conn.srem("processes", to_prune) unless to_prune.empty?
749
798
  end
750
799
  count
751
800
  end
752
801
 
753
802
  def each
754
- procs = Sidekiq.redis { |conn| sscan(conn, "processes") }.sort
803
+ result = Sidekiq.redis { |conn|
804
+ procs = conn.sscan_each("processes").to_a.sort
755
805
 
756
- Sidekiq.redis do |conn|
757
806
  # We're making a tradeoff here between consuming more memory instead of
758
807
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
759
808
  # you'll be happier this way
760
- result = conn.pipelined {
809
+ conn.pipelined do
761
810
  procs.each do |key|
762
- conn.hmget(key, "info", "busy", "beat", "quiet")
811
+ conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
763
812
  end
764
- }
813
+ end
814
+ }
765
815
 
766
- result.each do |info, busy, at_s, quiet|
767
- # If a process is stopped between when we query Redis for `procs` and
768
- # when we query for `result`, we will have an item in `result` that is
769
- # composed of `nil` values.
770
- next if info.nil?
816
+ result.each do |info, busy, at_s, quiet, rss, rtt|
817
+ # If a process is stopped between when we query Redis for `procs` and
818
+ # when we query for `result`, we will have an item in `result` that is
819
+ # composed of `nil` values.
820
+ next if info.nil?
771
821
 
772
- hash = Sidekiq.load_json(info)
773
- yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
774
- end
822
+ hash = Sidekiq.load_json(info)
823
+ yield Process.new(hash.merge("busy" => busy.to_i,
824
+ "beat" => at_s.to_f,
825
+ "quiet" => quiet,
826
+ "rss" => rss.to_i,
827
+ "rtt_us" => rtt.to_i))
775
828
  end
776
-
777
- nil
778
829
  end
779
830
 
780
831
  # This method is not guaranteed accurate since it does not prune the set
@@ -785,6 +836,18 @@ module Sidekiq
785
836
  Sidekiq.redis { |conn| conn.scard("processes") }
786
837
  end
787
838
 
839
+ # Total number of threads available to execute jobs.
840
+ # For Sidekiq Enterprise customers this number (in production) must be
841
+ # less than or equal to your licensed concurrency.
842
+ def total_concurrency
843
+ sum { |x| x["concurrency"].to_i }
844
+ end
845
+
846
+ def total_rss_in_kb
847
+ sum { |x| x["rss"].to_i }
848
+ end
849
+ alias_method :total_rss, :total_rss_in_kb
850
+
788
851
  # Returns the identity of the current cluster leader or "" if no leader.
789
852
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
790
853
  # or Sidekiq Pro.
@@ -834,6 +897,10 @@ module Sidekiq
834
897
  self["identity"]
835
898
  end
836
899
 
900
+ def queues
901
+ self["queues"]
902
+ end
903
+
837
904
  def quiet!
838
905
  signal("TSTP")
839
906
  end
@@ -864,8 +931,8 @@ module Sidekiq
864
931
  end
865
932
 
866
933
  ##
867
- # A worker is a thread that is currently processing a job.
868
- # Programmatic access to the current active worker set.
934
+ # The WorkSet stores the work being done by this Sidekiq cluster.
935
+ # It tracks the process and thread working on each job.
869
936
  #
870
937
  # WARNING WARNING WARNING
871
938
  #
@@ -873,34 +940,40 @@ module Sidekiq
873
940
  # If you call #size => 5 and then expect #each to be
874
941
  # called 5 times, you're going to have a bad time.
875
942
  #
876
- # workers = Sidekiq::Workers.new
877
- # workers.size => 2
878
- # workers.each do |process_id, thread_id, work|
943
+ # works = Sidekiq::WorkSet.new
944
+ # works.size => 2
945
+ # works.each do |process_id, thread_id, work|
879
946
  # # process_id is a unique identifier per Sidekiq process
880
947
  # # thread_id is a unique identifier per thread
881
948
  # # work is a Hash which looks like:
882
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
949
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
883
950
  # # run_at is an epoch Integer.
884
951
  # end
885
952
  #
886
- class Workers
953
+ class WorkSet
887
954
  include Enumerable
888
- include RedisScanner
889
955
 
890
- def each
956
+ def each(&block)
957
+ results = []
891
958
  Sidekiq.redis do |conn|
892
- procs = sscan(conn, "processes")
959
+ procs = conn.sscan_each("processes").to_a
893
960
  procs.sort.each do |key|
894
961
  valid, workers = conn.pipelined {
895
- conn.exists(key)
962
+ conn.exists?(key)
896
963
  conn.hgetall("#{key}:workers")
897
964
  }
898
965
  next unless valid
899
966
  workers.each_pair do |tid, json|
900
- yield key, tid, Sidekiq.load_json(json)
967
+ hsh = Sidekiq.load_json(json)
968
+ p = hsh["payload"]
969
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
970
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
971
+ results << [key, tid, hsh]
901
972
  end
902
973
  end
903
974
  end
975
+
976
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
904
977
  end
905
978
 
906
979
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -911,7 +984,7 @@ module Sidekiq
911
984
  # which can easily get out of sync with crashy processes.
912
985
  def size
913
986
  Sidekiq.redis do |conn|
914
- procs = sscan(conn, "processes")
987
+ procs = conn.sscan_each("processes").to_a
915
988
  if procs.empty?
916
989
  0
917
990
  else
@@ -919,9 +992,13 @@ module Sidekiq
919
992
  procs.each do |key|
920
993
  conn.hget(key, "busy")
921
994
  end
922
- }.map(&:to_i).inject(:+)
995
+ }.sum(&:to_i)
923
996
  end
924
997
  end
925
998
  end
926
999
  end
1000
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1001
+ # Is "worker" a process, a type of job, a thread? Undefined!
1002
+ # WorkSet better describes the data.
1003
+ Workers = WorkSet
927
1004
  end