sidekiq 5.2.9 → 6.2.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (92) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +232 -0
  3. data/README.md +18 -34
  4. data/bin/sidekiq +26 -2
  5. data/bin/sidekiqload +32 -24
  6. data/bin/sidekiqmon +8 -0
  7. data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
  8. data/lib/generators/sidekiq/worker_generator.rb +21 -13
  9. data/lib/sidekiq/api.rb +257 -219
  10. data/lib/sidekiq/cli.rb +144 -180
  11. data/lib/sidekiq/client.rb +64 -48
  12. data/lib/sidekiq/delay.rb +5 -6
  13. data/lib/sidekiq/exception_handler.rb +10 -12
  14. data/lib/sidekiq/extensions/action_mailer.rb +13 -22
  15. data/lib/sidekiq/extensions/active_record.rb +13 -10
  16. data/lib/sidekiq/extensions/class_methods.rb +14 -11
  17. data/lib/sidekiq/extensions/generic_proxy.rb +4 -4
  18. data/lib/sidekiq/fetch.rb +38 -31
  19. data/lib/sidekiq/job_logger.rb +45 -7
  20. data/lib/sidekiq/job_retry.rb +62 -61
  21. data/lib/sidekiq/launcher.rb +142 -52
  22. data/lib/sidekiq/logger.rb +166 -0
  23. data/lib/sidekiq/manager.rb +11 -13
  24. data/lib/sidekiq/middleware/chain.rb +15 -5
  25. data/lib/sidekiq/middleware/i18n.rb +5 -7
  26. data/lib/sidekiq/monitor.rb +133 -0
  27. data/lib/sidekiq/paginator.rb +18 -14
  28. data/lib/sidekiq/processor.rb +71 -70
  29. data/lib/sidekiq/rails.rb +29 -37
  30. data/lib/sidekiq/redis_connection.rb +50 -48
  31. data/lib/sidekiq/scheduled.rb +28 -29
  32. data/lib/sidekiq/sd_notify.rb +149 -0
  33. data/lib/sidekiq/systemd.rb +24 -0
  34. data/lib/sidekiq/testing/inline.rb +2 -1
  35. data/lib/sidekiq/testing.rb +35 -24
  36. data/lib/sidekiq/util.rb +45 -16
  37. data/lib/sidekiq/version.rb +2 -1
  38. data/lib/sidekiq/web/action.rb +15 -11
  39. data/lib/sidekiq/web/application.rb +84 -74
  40. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  41. data/lib/sidekiq/web/helpers.rb +108 -79
  42. data/lib/sidekiq/web/router.rb +23 -19
  43. data/lib/sidekiq/web.rb +60 -105
  44. data/lib/sidekiq/worker.rb +126 -102
  45. data/lib/sidekiq.rb +69 -44
  46. data/sidekiq.gemspec +23 -16
  47. data/web/assets/images/apple-touch-icon.png +0 -0
  48. data/web/assets/javascripts/application.js +25 -27
  49. data/web/assets/javascripts/dashboard.js +4 -23
  50. data/web/assets/stylesheets/application-dark.css +160 -0
  51. data/web/assets/stylesheets/application.css +33 -8
  52. data/web/locales/de.yml +14 -2
  53. data/web/locales/en.yml +2 -0
  54. data/web/locales/fr.yml +3 -3
  55. data/web/locales/ja.yml +4 -1
  56. data/web/locales/lt.yml +83 -0
  57. data/web/locales/pl.yml +4 -4
  58. data/web/locales/ru.yml +4 -0
  59. data/web/locales/vi.yml +83 -0
  60. data/web/views/_job_info.erb +2 -1
  61. data/web/views/busy.erb +51 -17
  62. data/web/views/dead.erb +2 -2
  63. data/web/views/layout.erb +2 -0
  64. data/web/views/morgue.erb +5 -2
  65. data/web/views/queue.erb +11 -2
  66. data/web/views/queues.erb +9 -1
  67. data/web/views/retries.erb +5 -2
  68. data/web/views/retry.erb +2 -2
  69. data/web/views/scheduled.erb +5 -2
  70. metadata +27 -60
  71. data/.circleci/config.yml +0 -61
  72. data/.github/contributing.md +0 -32
  73. data/.github/issue_template.md +0 -11
  74. data/.gitignore +0 -15
  75. data/.travis.yml +0 -11
  76. data/3.0-Upgrade.md +0 -70
  77. data/4.0-Upgrade.md +0 -53
  78. data/5.0-Upgrade.md +0 -56
  79. data/COMM-LICENSE +0 -97
  80. data/Ent-Changes.md +0 -238
  81. data/Gemfile +0 -23
  82. data/Pro-2.0-Upgrade.md +0 -138
  83. data/Pro-3.0-Upgrade.md +0 -44
  84. data/Pro-4.0-Upgrade.md +0 -35
  85. data/Pro-Changes.md +0 -759
  86. data/Rakefile +0 -9
  87. data/bin/sidekiqctl +0 -20
  88. data/code_of_conduct.md +0 -50
  89. data/lib/sidekiq/core_ext.rb +0 -1
  90. data/lib/sidekiq/ctl.rb +0 -221
  91. data/lib/sidekiq/logging.rb +0 -122
  92. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
data/lib/sidekiq/api.rb CHANGED
@@ -1,24 +1,12 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "base64"
18
7
 
8
+ module Sidekiq
19
9
  class Stats
20
- include RedisScanner
21
-
22
10
  def initialize
23
11
  fetch_stats!
24
12
  end
@@ -64,61 +52,65 @@ module Sidekiq
64
52
  end
65
53
 
66
54
  def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
55
+ pipe1_res = Sidekiq.redis { |conn|
68
56
  conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
57
+ conn.get("stat:processed")
58
+ conn.get("stat:failed")
59
+ conn.zcard("schedule")
60
+ conn.zcard("retry")
61
+ conn.zcard("dead")
62
+ conn.scard("processes")
63
+ conn.lrange("queue:default", -1, -1)
76
64
  end
77
- end
65
+ }
78
66
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
81
- end
67
+ processes = Sidekiq.redis { |conn|
68
+ conn.sscan_each("processes").to_a
69
+ }
82
70
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
71
+ queues = Sidekiq.redis { |conn|
72
+ conn.sscan_each("queues").to_a
73
+ }
86
74
 
87
- pipe2_res = Sidekiq.redis do |conn|
75
+ pipe2_res = Sidekiq.redis { |conn|
88
76
  conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
77
+ processes.each { |key| conn.hget(key, "busy") }
78
+ queues.each { |queue| conn.llen("queue:#{queue}") }
91
79
  end
92
- end
80
+ }
93
81
 
94
82
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
83
+ workers_size = pipe2_res[0...s].sum(&:to_i)
84
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
85
 
98
86
  default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
87
+ job = begin
88
+ Sidekiq.load_json(entry)
89
+ rescue
90
+ {}
91
+ end
92
+ now = Time.now.to_f
93
+ thence = job["enqueued_at"] || now
94
+ now - thence
95
+ else
96
+ 0
97
+ end
106
98
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
99
+ processed: pipe1_res[0].to_i,
100
+ failed: pipe1_res[1].to_i,
101
+ scheduled_size: pipe1_res[2],
102
+ retry_size: pipe1_res[3],
103
+ dead_size: pipe1_res[4],
104
+ processes_size: pipe1_res[5],
113
105
 
114
106
  default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
107
+ workers_size: workers_size,
108
+ enqueued: enqueued
117
109
  }
118
110
  end
119
111
 
120
112
  def reset(*stats)
121
- all = %w(failed processed)
113
+ all = %w[failed processed]
122
114
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
115
 
124
116
  mset_args = []
@@ -138,26 +130,18 @@ module Sidekiq
138
130
  end
139
131
 
140
132
  class Queues
141
- include RedisScanner
142
-
143
133
  def lengths
144
134
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
135
+ queues = conn.sscan_each("queues").to_a
146
136
 
147
- lengths = conn.pipelined do
137
+ lengths = conn.pipelined {
148
138
  queues.each do |queue|
149
139
  conn.llen("queue:#{queue}")
150
140
  end
151
- end
141
+ }
152
142
 
153
- i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
155
- memo[queue] = lengths[i]
156
- i += 1
157
- memo
158
- end.sort_by { |_, size| size }
159
-
160
- Hash[array_of_arrays.reverse]
143
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
+ Hash[array_of_arrays]
161
145
  end
162
146
  end
163
147
  end
@@ -179,18 +163,12 @@ module Sidekiq
179
163
  private
180
164
 
181
165
  def date_stat_hash(stat)
182
- i = 0
183
166
  stat_hash = {}
184
- keys = []
185
- dates = []
186
-
187
- while i < @days_previous
188
- date = @start_date - i
189
- datestr = date.strftime("%Y-%m-%d")
190
- keys << "stat:#{stat}:#{datestr}"
191
- dates << datestr
192
- i += 1
193
- end
167
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
168
+ date.strftime("%Y-%m-%d")
169
+ }
170
+
171
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
194
172
 
195
173
  begin
196
174
  Sidekiq.redis do |conn|
@@ -222,18 +200,17 @@ module Sidekiq
222
200
  #
223
201
  class Queue
224
202
  include Enumerable
225
- extend RedisScanner
226
203
 
227
204
  ##
228
205
  # Return all known queues within Redis.
229
206
  #
230
207
  def self.all
231
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
208
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
232
209
  end
233
210
 
234
211
  attr_reader :name
235
212
 
236
- def initialize(name="default")
213
+ def initialize(name = "default")
237
214
  @name = name.to_s
238
215
  @rname = "queue:#{name}"
239
216
  end
@@ -253,13 +230,13 @@ module Sidekiq
253
230
  #
254
231
  # @return Float
255
232
  def latency
256
- entry = Sidekiq.redis do |conn|
233
+ entry = Sidekiq.redis { |conn|
257
234
  conn.lrange(@rname, -1, -1)
258
- end.first
235
+ }.first
259
236
  return 0 unless entry
260
237
  job = Sidekiq.load_json(entry)
261
238
  now = Time.now.to_f
262
- thence = job['enqueued_at'] || now
239
+ thence = job["enqueued_at"] || now
263
240
  now - thence
264
241
  end
265
242
 
@@ -269,12 +246,12 @@ module Sidekiq
269
246
  page = 0
270
247
  page_size = 50
271
248
 
272
- while true do
249
+ loop do
273
250
  range_start = page * page_size - deleted_size
274
- range_end = range_start + page_size - 1
275
- entries = Sidekiq.redis do |conn|
251
+ range_end = range_start + page_size - 1
252
+ entries = Sidekiq.redis { |conn|
276
253
  conn.lrange @rname, range_start, range_end
277
- end
254
+ }
278
255
  break if entries.empty?
279
256
  page += 1
280
257
  entries.each do |entry|
@@ -296,7 +273,7 @@ module Sidekiq
296
273
  def clear
297
274
  Sidekiq.redis do |conn|
298
275
  conn.multi do
299
- conn.del(@rname)
276
+ conn.unlink(@rname)
300
277
  conn.srem("queues", name)
301
278
  end
302
279
  end
@@ -315,11 +292,11 @@ module Sidekiq
315
292
  attr_reader :item
316
293
  attr_reader :value
317
294
 
318
- def initialize(item, queue_name=nil)
295
+ def initialize(item, queue_name = nil)
319
296
  @args = nil
320
297
  @value = item
321
298
  @item = item.is_a?(Hash) ? item : parse(item)
322
- @queue = queue_name || @item['queue']
299
+ @queue = queue_name || @item["queue"]
323
300
  end
324
301
 
325
302
  def parse(item)
@@ -334,7 +311,7 @@ module Sidekiq
334
311
  end
335
312
 
336
313
  def klass
337
- self['class']
314
+ self["class"]
338
315
  end
339
316
 
340
317
  def display_class
@@ -345,16 +322,16 @@ module Sidekiq
345
322
  "#{target}.#{method}"
346
323
  end
347
324
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
348
- job_class = @item['wrapped'] || args[0]
349
- if 'ActionMailer::DeliveryJob' == job_class
325
+ job_class = @item["wrapped"] || args[0]
326
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
350
327
  # MailerClass#mailer_method
351
- args[0]['arguments'][0..1].join('#')
328
+ args[0]["arguments"][0..1].join("#")
352
329
  else
353
- job_class
330
+ job_class
354
331
  end
355
332
  else
356
333
  klass
357
- end
334
+ end
358
335
  end
359
336
 
360
337
  def display_args
@@ -365,53 +342,68 @@ module Sidekiq
365
342
  arg
366
343
  end
367
344
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
368
- job_args = self['wrapped'] ? args[0]["arguments"] : []
369
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
345
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
346
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
370
347
  # remove MailerClass, mailer_method and 'deliver_now'
371
348
  job_args.drop(3)
349
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
+ # remove MailerClass, mailer_method and 'deliver_now'
351
+ job_args.drop(3).first["args"]
372
352
  else
373
353
  job_args
374
354
  end
375
355
  else
376
- if self['encrypt']
356
+ if self["encrypt"]
377
357
  # no point in showing 150+ bytes of random garbage
378
- args[-1] = '[encrypted data]'
358
+ args[-1] = "[encrypted data]"
379
359
  end
380
360
  args
381
- end
361
+ end
382
362
  end
383
363
 
384
364
  def args
385
- @args || @item['args']
365
+ @args || @item["args"]
386
366
  end
387
367
 
388
368
  def jid
389
- self['jid']
369
+ self["jid"]
390
370
  end
391
371
 
392
372
  def enqueued_at
393
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
373
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
394
374
  end
395
375
 
396
376
  def created_at
397
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
377
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
398
378
  end
399
379
 
400
- def queue
401
- @queue
380
+ def tags
381
+ self["tags"] || []
402
382
  end
403
383
 
384
+ def error_backtrace
385
+ # Cache nil values
386
+ if defined?(@error_backtrace)
387
+ @error_backtrace
388
+ else
389
+ value = self["error_backtrace"]
390
+ @error_backtrace = value && uncompress_backtrace(value)
391
+ end
392
+ end
393
+
394
+ attr_reader :queue
395
+
404
396
  def latency
405
397
  now = Time.now.to_f
406
- now - (@item['enqueued_at'] || @item['created_at'] || now)
398
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
407
399
  end
408
400
 
409
401
  ##
410
402
  # Remove this job from the queue.
411
403
  def delete
412
- count = Sidekiq.redis do |conn|
404
+ count = Sidekiq.redis { |conn|
413
405
  conn.lrem("queue:#{@queue}", 1, @value)
414
- end
406
+ }
415
407
  count != 0
416
408
  end
417
409
 
@@ -425,13 +417,28 @@ module Sidekiq
425
417
  private
426
418
 
427
419
  def safe_load(content, default)
428
- begin
429
- yield(*YAML.load(content))
430
- rescue => ex
431
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
432
- # memory yet so the YAML can't be loaded.
433
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
434
- default
420
+ yield(*YAML.load(content))
421
+ rescue => ex
422
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
423
+ # memory yet so the YAML can't be loaded.
424
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
425
+ default
426
+ end
427
+
428
+ def uncompress_backtrace(backtrace)
429
+ if backtrace.is_a?(Array)
430
+ # Handle old jobs with raw Array backtrace format
431
+ backtrace
432
+ else
433
+ decoded = Base64.decode64(backtrace)
434
+ uncompressed = Zlib::Inflate.inflate(decoded)
435
+ begin
436
+ Sidekiq.load_json(uncompressed)
437
+ rescue
438
+ # Handle old jobs with marshalled backtrace format
439
+ # TODO Remove in 7.x
440
+ Marshal.load(uncompressed)
441
+ end
435
442
  end
436
443
  end
437
444
  end
@@ -459,8 +466,9 @@ module Sidekiq
459
466
  end
460
467
 
461
468
  def reschedule(at)
462
- delete
463
- @parent.schedule(at, item)
469
+ Sidekiq.redis do |conn|
470
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
+ end
464
472
  end
465
473
 
466
474
  def add_to_queue
@@ -473,7 +481,7 @@ module Sidekiq
473
481
  def retry
474
482
  remove_job do |message|
475
483
  msg = Sidekiq.load_json(message)
476
- msg['retry_count'] -= 1 if msg['retry_count']
484
+ msg["retry_count"] -= 1 if msg["retry_count"]
477
485
  Sidekiq::Client.push(msg)
478
486
  end
479
487
  end
@@ -487,45 +495,44 @@ module Sidekiq
487
495
  end
488
496
 
489
497
  def error?
490
- !!item['error_class']
498
+ !!item["error_class"]
491
499
  end
492
500
 
493
501
  private
494
502
 
495
503
  def remove_job
496
504
  Sidekiq.redis do |conn|
497
- results = conn.multi do
505
+ results = conn.multi {
498
506
  conn.zrangebyscore(parent.name, score, score)
499
507
  conn.zremrangebyscore(parent.name, score, score)
500
- end.first
508
+ }.first
501
509
 
502
510
  if results.size == 1
503
511
  yield results.first
504
512
  else
505
513
  # multiple jobs with the same score
506
514
  # find the one with the right JID and push it
507
- hash = results.group_by do |message|
515
+ matched, nonmatched = results.partition { |message|
508
516
  if message.index(jid)
509
517
  msg = Sidekiq.load_json(message)
510
- msg['jid'] == jid
518
+ msg["jid"] == jid
511
519
  else
512
520
  false
513
521
  end
514
- end
522
+ }
515
523
 
516
- msg = hash.fetch(true, []).first
524
+ msg = matched.first
517
525
  yield msg if msg
518
526
 
519
527
  # push the rest back onto the sorted set
520
528
  conn.multi do
521
- hash.fetch(false, []).each do |message|
529
+ nonmatched.each do |message|
522
530
  conn.zadd(parent.name, score.to_f.to_s, message)
523
531
  end
524
532
  end
525
533
  end
526
534
  end
527
535
  end
528
-
529
536
  end
530
537
 
531
538
  class SortedSet
@@ -542,16 +549,26 @@ module Sidekiq
542
549
  Sidekiq.redis { |c| c.zcard(name) }
543
550
  end
544
551
 
552
+ def scan(match, count = 100)
553
+ return to_enum(:scan, match, count) unless block_given?
554
+
555
+ match = "*#{match}*" unless match.include?("*")
556
+ Sidekiq.redis do |conn|
557
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
558
+ yield SortedEntry.new(self, score, entry)
559
+ end
560
+ end
561
+ end
562
+
545
563
  def clear
546
564
  Sidekiq.redis do |conn|
547
- conn.del(name)
565
+ conn.unlink(name)
548
566
  end
549
567
  end
550
568
  alias_method :💣, :clear
551
569
  end
552
570
 
553
571
  class JobSet < SortedSet
554
-
555
572
  def schedule(timestamp, message)
556
573
  Sidekiq.redis do |conn|
557
574
  conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
@@ -564,44 +581,55 @@ module Sidekiq
564
581
  page = -1
565
582
  page_size = 50
566
583
 
567
- while true do
584
+ loop do
568
585
  range_start = page * page_size + offset_size
569
- range_end = range_start + page_size - 1
570
- elements = Sidekiq.redis do |conn|
586
+ range_end = range_start + page_size - 1
587
+ elements = Sidekiq.redis { |conn|
571
588
  conn.zrange name, range_start, range_end, with_scores: true
572
- end
589
+ }
573
590
  break if elements.empty?
574
591
  page -= 1
575
- elements.reverse.each do |element, score|
592
+ elements.reverse_each do |element, score|
576
593
  yield SortedEntry.new(self, score, element)
577
594
  end
578
595
  offset_size = initial_size - @_size
579
596
  end
580
597
  end
581
598
 
599
+ ##
600
+ # Fetch jobs that match a given time or Range. Job ID is an
601
+ # optional second argument.
582
602
  def fetch(score, jid = nil)
583
- elements = Sidekiq.redis do |conn|
584
- conn.zrangebyscore(name, score, score)
585
- end
586
-
587
- elements.inject([]) do |result, element|
588
- entry = SortedEntry.new(self, score, element)
589
- if jid
590
- result << entry if entry.jid == jid
603
+ begin_score, end_score =
604
+ if score.is_a?(Range)
605
+ [score.first, score.last]
591
606
  else
592
- result << entry
607
+ [score, score]
593
608
  end
594
- result
609
+
610
+ elements = Sidekiq.redis { |conn|
611
+ conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
612
+ }
613
+
614
+ elements.each_with_object([]) do |element, result|
615
+ data, job_score = element
616
+ entry = SortedEntry.new(self, job_score, data)
617
+ result << entry if jid.nil? || entry.jid == jid
595
618
  end
596
619
  end
597
620
 
598
621
  ##
599
622
  # Find the job with the given JID within this sorted set.
600
- #
601
- # This is a slow, inefficient operation. Do not use under
602
- # normal conditions. Sidekiq Pro contains a faster version.
623
+ # This is a slower O(n) operation. Do not use for app logic.
603
624
  def find_job(jid)
604
- self.detect { |j| j.jid == jid }
625
+ Sidekiq.redis do |conn|
626
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
627
+ job = JSON.parse(entry)
628
+ matched = job["jid"] == jid
629
+ return SortedEntry.new(self, score, entry) if matched
630
+ end
631
+ end
632
+ nil
605
633
  end
606
634
 
607
635
  def delete_by_value(name, value)
@@ -616,13 +644,14 @@ module Sidekiq
616
644
  Sidekiq.redis do |conn|
617
645
  elements = conn.zrangebyscore(name, score, score)
618
646
  elements.each do |element|
619
- message = Sidekiq.load_json(element)
620
- if message["jid"] == jid
621
- ret = conn.zrem(name, element)
622
- @_size -= 1 if ret
623
- break ret
647
+ if element.index(jid)
648
+ message = Sidekiq.load_json(element)
649
+ if message["jid"] == jid
650
+ ret = conn.zrem(name, element)
651
+ @_size -= 1 if ret
652
+ break ret
653
+ end
624
654
  end
625
- false
626
655
  end
627
656
  end
628
657
  end
@@ -644,7 +673,7 @@ module Sidekiq
644
673
  # end.map(&:delete)
645
674
  class ScheduledSet < JobSet
646
675
  def initialize
647
- super 'schedule'
676
+ super "schedule"
648
677
  end
649
678
  end
650
679
 
@@ -662,19 +691,15 @@ module Sidekiq
662
691
  # end.map(&:delete)
663
692
  class RetrySet < JobSet
664
693
  def initialize
665
- super 'retry'
694
+ super "retry"
666
695
  end
667
696
 
668
697
  def retry_all
669
- while size > 0
670
- each(&:retry)
671
- end
698
+ each(&:retry) while size > 0
672
699
  end
673
700
 
674
701
  def kill_all
675
- while size > 0
676
- each(&:kill)
677
- end
702
+ each(&:kill) while size > 0
678
703
  end
679
704
  end
680
705
 
@@ -683,15 +708,15 @@ module Sidekiq
683
708
  #
684
709
  class DeadSet < JobSet
685
710
  def initialize
686
- super 'dead'
711
+ super "dead"
687
712
  end
688
713
 
689
- def kill(message, opts={})
714
+ def kill(message, opts = {})
690
715
  now = Time.now.to_f
691
716
  Sidekiq.redis do |conn|
692
717
  conn.multi do
693
718
  conn.zadd(name, now.to_s, message)
694
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
719
+ conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
695
720
  conn.zremrangebyrank(name, 0, - self.class.max_jobs)
696
721
  end
697
722
  end
@@ -708,9 +733,7 @@ module Sidekiq
708
733
  end
709
734
 
710
735
  def retry_all
711
- while size > 0
712
- each(&:retry)
713
- end
736
+ each(&:retry) while size > 0
714
737
  end
715
738
 
716
739
  def self.max_jobs
@@ -724,16 +747,15 @@ module Sidekiq
724
747
 
725
748
  ##
726
749
  # Enumerates the set of Sidekiq processes which are actively working
727
- # right now. Each process send a heartbeat to Redis every 5 seconds
750
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
728
751
  # so this set should be relatively accurate, barring network partitions.
729
752
  #
730
753
  # Yields a Sidekiq::Process.
731
754
  #
732
755
  class ProcessSet
733
756
  include Enumerable
734
- include RedisScanner
735
757
 
736
- def initialize(clean_plz=true)
758
+ def initialize(clean_plz = true)
737
759
  cleanup if clean_plz
738
760
  end
739
761
 
@@ -742,50 +764,51 @@ module Sidekiq
742
764
  def cleanup
743
765
  count = 0
744
766
  Sidekiq.redis do |conn|
745
- procs = sscan(conn, 'processes').sort
746
- heartbeats = conn.pipelined do
767
+ procs = conn.sscan_each("processes").to_a.sort
768
+ heartbeats = conn.pipelined {
747
769
  procs.each do |key|
748
- conn.hget(key, 'info')
770
+ conn.hget(key, "info")
749
771
  end
750
- end
772
+ }
751
773
 
752
774
  # the hash named key has an expiry of 60 seconds.
753
775
  # if it's not found, that means the process has not reported
754
776
  # in to Redis and probably died.
755
- to_prune = []
756
- heartbeats.each_with_index do |beat, i|
757
- to_prune << procs[i] if beat.nil?
758
- end
759
- count = conn.srem('processes', to_prune) unless to_prune.empty?
777
+ to_prune = procs.select.with_index { |proc, i|
778
+ heartbeats[i].nil?
779
+ }
780
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
760
781
  end
761
782
  count
762
783
  end
763
784
 
764
785
  def each
765
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
786
+ result = Sidekiq.redis { |conn|
787
+ procs = conn.sscan_each("processes").to_a.sort
766
788
 
767
- Sidekiq.redis do |conn|
768
789
  # We're making a tradeoff here between consuming more memory instead of
769
790
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
770
791
  # you'll be happier this way
771
- result = conn.pipelined do
792
+ conn.pipelined do
772
793
  procs.each do |key|
773
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
794
+ conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
774
795
  end
775
796
  end
797
+ }
776
798
 
777
- result.each do |info, busy, at_s, quiet|
778
- # If a process is stopped between when we query Redis for `procs` and
779
- # when we query for `result`, we will have an item in `result` that is
780
- # composed of `nil` values.
781
- next if info.nil?
799
+ result.each do |info, busy, at_s, quiet, rss, rtt|
800
+ # If a process is stopped between when we query Redis for `procs` and
801
+ # when we query for `result`, we will have an item in `result` that is
802
+ # composed of `nil` values.
803
+ next if info.nil?
782
804
 
783
- hash = Sidekiq.load_json(info)
784
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
785
- end
805
+ hash = Sidekiq.load_json(info)
806
+ yield Process.new(hash.merge("busy" => busy.to_i,
807
+ "beat" => at_s.to_f,
808
+ "quiet" => quiet,
809
+ "rss" => rss.to_i,
810
+ "rtt_us" => rtt.to_i))
786
811
  end
787
-
788
- nil
789
812
  end
790
813
 
791
814
  # This method is not guaranteed accurate since it does not prune the set
@@ -793,7 +816,18 @@ module Sidekiq
793
816
  # contains Sidekiq processes which have sent a heartbeat within the last
794
817
  # 60 seconds.
795
818
  def size
796
- Sidekiq.redis { |conn| conn.scard('processes') }
819
+ Sidekiq.redis { |conn| conn.scard("processes") }
820
+ end
821
+
822
+ # Total number of threads available to execute jobs.
823
+ # For Sidekiq Enterprise customers this number (in production) must be
824
+ # less than or equal to your licensed concurrency.
825
+ def total_concurrency
826
+ sum { |x| x["concurrency"] }
827
+ end
828
+
829
+ def total_rss
830
+ sum { |x| x["rss"] || 0 }
797
831
  end
798
832
 
799
833
  # Returns the identity of the current cluster leader or "" if no leader.
@@ -801,9 +835,9 @@ module Sidekiq
801
835
  # or Sidekiq Pro.
802
836
  def leader
803
837
  @leader ||= begin
804
- x = Sidekiq.redis {|c| c.get("dear-leader") }
838
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
805
839
  # need a non-falsy value so we can memoize
806
- x = "" unless x
840
+ x ||= ""
807
841
  x
808
842
  end
809
843
  end
@@ -830,11 +864,11 @@ module Sidekiq
830
864
  end
831
865
 
832
866
  def tag
833
- self['tag']
867
+ self["tag"]
834
868
  end
835
869
 
836
870
  def labels
837
- Array(self['labels'])
871
+ Array(self["labels"])
838
872
  end
839
873
 
840
874
  def [](key)
@@ -842,23 +876,23 @@ module Sidekiq
842
876
  end
843
877
 
844
878
  def identity
845
- self['identity']
879
+ self["identity"]
846
880
  end
847
881
 
848
882
  def quiet!
849
- signal('TSTP')
883
+ signal("TSTP")
850
884
  end
851
885
 
852
886
  def stop!
853
- signal('TERM')
887
+ signal("TERM")
854
888
  end
855
889
 
856
890
  def dump_threads
857
- signal('TTIN')
891
+ signal("TTIN")
858
892
  end
859
893
 
860
894
  def stopping?
861
- self['quiet'] == 'true'
895
+ self["quiet"] == "true"
862
896
  end
863
897
 
864
898
  private
@@ -872,7 +906,6 @@ module Sidekiq
872
906
  end
873
907
  end
874
908
  end
875
-
876
909
  end
877
910
 
878
911
  ##
@@ -897,22 +930,28 @@ module Sidekiq
897
930
  #
898
931
  class Workers
899
932
  include Enumerable
900
- include RedisScanner
901
933
 
902
- def each
934
+ def each(&block)
935
+ results = []
903
936
  Sidekiq.redis do |conn|
904
- procs = sscan(conn, 'processes')
937
+ procs = conn.sscan_each("processes").to_a
905
938
  procs.sort.each do |key|
906
- valid, workers = conn.pipelined do
907
- conn.exists(key)
939
+ valid, workers = conn.pipelined {
940
+ conn.exists?(key)
908
941
  conn.hgetall("#{key}:workers")
909
- end
942
+ }
910
943
  next unless valid
911
944
  workers.each_pair do |tid, json|
912
- yield key, tid, Sidekiq.load_json(json)
945
+ hsh = Sidekiq.load_json(json)
946
+ p = hsh["payload"]
947
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
948
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
949
+ results << [key, tid, hsh]
913
950
  end
914
951
  end
915
952
  end
953
+
954
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
916
955
  end
917
956
 
918
957
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -923,18 +962,17 @@ module Sidekiq
923
962
  # which can easily get out of sync with crashy processes.
924
963
  def size
925
964
  Sidekiq.redis do |conn|
926
- procs = sscan(conn, 'processes')
965
+ procs = conn.sscan_each("processes").to_a
927
966
  if procs.empty?
928
967
  0
929
968
  else
930
- conn.pipelined do
969
+ conn.pipelined {
931
970
  procs.each do |key|
932
- conn.hget(key, 'busy')
971
+ conn.hget(key, "busy")
933
972
  end
934
- end.map(&:to_i).inject(:+)
973
+ }.sum(&:to_i)
935
974
  end
936
975
  end
937
976
  end
938
977
  end
939
-
940
978
  end