sidekiq 6.3.1 → 6.5.9

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +134 -0
  3. data/LICENSE +3 -3
  4. data/README.md +7 -2
  5. data/bin/sidekiq +3 -3
  6. data/bin/sidekiqload +70 -66
  7. data/bin/sidekiqmon +1 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +261 -104
  13. data/lib/sidekiq/cli.rb +62 -38
  14. data/lib/sidekiq/client.rb +47 -67
  15. data/lib/sidekiq/{util.rb → component.rb} +12 -42
  16. data/lib/sidekiq/delay.rb +3 -1
  17. data/lib/sidekiq/extensions/generic_proxy.rb +1 -1
  18. data/lib/sidekiq/fetch.rb +20 -18
  19. data/lib/sidekiq/job_logger.rb +15 -27
  20. data/lib/sidekiq/job_retry.rb +78 -55
  21. data/lib/sidekiq/job_util.rb +71 -0
  22. data/lib/sidekiq/launcher.rb +58 -54
  23. data/lib/sidekiq/logger.rb +8 -18
  24. data/lib/sidekiq/manager.rb +35 -34
  25. data/lib/sidekiq/metrics/deploy.rb +47 -0
  26. data/lib/sidekiq/metrics/query.rb +153 -0
  27. data/lib/sidekiq/metrics/shared.rb +94 -0
  28. data/lib/sidekiq/metrics/tracking.rb +134 -0
  29. data/lib/sidekiq/middleware/chain.rb +82 -38
  30. data/lib/sidekiq/middleware/current_attributes.rb +19 -8
  31. data/lib/sidekiq/middleware/i18n.rb +6 -4
  32. data/lib/sidekiq/middleware/modules.rb +21 -0
  33. data/lib/sidekiq/monitor.rb +2 -2
  34. data/lib/sidekiq/paginator.rb +17 -9
  35. data/lib/sidekiq/processor.rb +47 -41
  36. data/lib/sidekiq/rails.rb +15 -8
  37. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  38. data/lib/sidekiq/redis_connection.rb +80 -49
  39. data/lib/sidekiq/ring_buffer.rb +29 -0
  40. data/lib/sidekiq/scheduled.rb +66 -27
  41. data/lib/sidekiq/testing/inline.rb +4 -4
  42. data/lib/sidekiq/testing.rb +37 -36
  43. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  44. data/lib/sidekiq/version.rb +1 -1
  45. data/lib/sidekiq/web/action.rb +3 -3
  46. data/lib/sidekiq/web/application.rb +26 -7
  47. data/lib/sidekiq/web/csrf_protection.rb +2 -2
  48. data/lib/sidekiq/web/helpers.rb +21 -8
  49. data/lib/sidekiq/web.rb +8 -4
  50. data/lib/sidekiq/worker.rb +78 -19
  51. data/lib/sidekiq.rb +111 -30
  52. data/sidekiq.gemspec +2 -2
  53. data/web/assets/javascripts/application.js +58 -26
  54. data/web/assets/javascripts/chart.min.js +13 -0
  55. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  56. data/web/assets/javascripts/dashboard.js +0 -17
  57. data/web/assets/javascripts/graph.js +16 -0
  58. data/web/assets/javascripts/metrics.js +262 -0
  59. data/web/assets/stylesheets/application-dark.css +13 -17
  60. data/web/assets/stylesheets/application.css +48 -6
  61. data/web/locales/el.yml +43 -19
  62. data/web/locales/en.yml +7 -0
  63. data/web/locales/ja.yml +7 -0
  64. data/web/locales/pt-br.yml +27 -9
  65. data/web/locales/zh-cn.yml +36 -11
  66. data/web/locales/zh-tw.yml +32 -7
  67. data/web/views/_nav.erb +1 -1
  68. data/web/views/_summary.erb +1 -1
  69. data/web/views/busy.erb +9 -4
  70. data/web/views/dashboard.erb +1 -0
  71. data/web/views/metrics.erb +69 -0
  72. data/web/views/metrics_for_job.erb +87 -0
  73. data/web/views/queue.erb +5 -1
  74. metadata +39 -13
  75. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  76. data/lib/sidekiq/exception_handler.rb +0 -27
data/lib/sidekiq/api.rb CHANGED
@@ -3,9 +3,31 @@
3
3
  require "sidekiq"
4
4
 
5
5
  require "zlib"
6
+ require "set"
6
7
  require "base64"
7
8
 
9
+ if ENV["SIDEKIQ_METRICS_BETA"]
10
+ require "sidekiq/metrics/deploy"
11
+ require "sidekiq/metrics/query"
12
+ end
13
+
14
+ #
15
+ # Sidekiq's Data API provides a Ruby object model on top
16
+ # of Sidekiq's runtime data in Redis. This API should never
17
+ # be used within application code for business logic.
18
+ #
19
+ # The Sidekiq server process never uses this API: all data
20
+ # manipulation is done directly for performance reasons to
21
+ # ensure we are using Redis as efficiently as possible at
22
+ # every callsite.
23
+ #
24
+
8
25
  module Sidekiq
26
+ # Retrieve runtime statistics from Redis regarding
27
+ # this Sidekiq cluster.
28
+ #
29
+ # stat = Sidekiq::Stats.new
30
+ # stat.processed
9
31
  class Stats
10
32
  def initialize
11
33
  fetch_stats_fast!
@@ -52,16 +74,17 @@ module Sidekiq
52
74
  end
53
75
 
54
76
  # O(1) redis calls
77
+ # @api private
55
78
  def fetch_stats_fast!
56
79
  pipe1_res = Sidekiq.redis { |conn|
57
- conn.pipelined do
58
- conn.get("stat:processed")
59
- conn.get("stat:failed")
60
- conn.zcard("schedule")
61
- conn.zcard("retry")
62
- conn.zcard("dead")
63
- conn.scard("processes")
64
- conn.lrange("queue:default", -1, -1)
80
+ conn.pipelined do |pipeline|
81
+ pipeline.get("stat:processed")
82
+ pipeline.get("stat:failed")
83
+ pipeline.zcard("schedule")
84
+ pipeline.zcard("retry")
85
+ pipeline.zcard("dead")
86
+ pipeline.scard("processes")
87
+ pipeline.lrange("queue:default", -1, -1)
65
88
  end
66
89
  }
67
90
 
@@ -91,6 +114,7 @@ module Sidekiq
91
114
  end
92
115
 
93
116
  # O(number of processes + number of queues) redis calls
117
+ # @api private
94
118
  def fetch_stats_slow!
95
119
  processes = Sidekiq.redis { |conn|
96
120
  conn.sscan_each("processes").to_a
@@ -101,9 +125,9 @@ module Sidekiq
101
125
  }
102
126
 
103
127
  pipe2_res = Sidekiq.redis { |conn|
104
- conn.pipelined do
105
- processes.each { |key| conn.hget(key, "busy") }
106
- queues.each { |queue| conn.llen("queue:#{queue}") }
128
+ conn.pipelined do |pipeline|
129
+ processes.each { |key| pipeline.hget(key, "busy") }
130
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
107
131
  end
108
132
  }
109
133
 
@@ -116,11 +140,13 @@ module Sidekiq
116
140
  @stats
117
141
  end
118
142
 
143
+ # @api private
119
144
  def fetch_stats!
120
145
  fetch_stats_fast!
121
146
  fetch_stats_slow!
122
147
  end
123
148
 
149
+ # @api private
124
150
  def reset(*stats)
125
151
  all = %w[failed processed]
126
152
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
@@ -147,9 +173,9 @@ module Sidekiq
147
173
  Sidekiq.redis do |conn|
148
174
  queues = conn.sscan_each("queues").to_a
149
175
 
150
- lengths = conn.pipelined {
176
+ lengths = conn.pipelined { |pipeline|
151
177
  queues.each do |queue|
152
- conn.llen("queue:#{queue}")
178
+ pipeline.llen("queue:#{queue}")
153
179
  end
154
180
  }
155
181
 
@@ -161,6 +187,8 @@ module Sidekiq
161
187
 
162
188
  class History
163
189
  def initialize(days_previous, start_date = nil)
190
+ # we only store five years of data in Redis
191
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
164
192
  @days_previous = days_previous
165
193
  @start_date = start_date || Time.now.utc.to_date
166
194
  end
@@ -189,7 +217,7 @@ module Sidekiq
189
217
  stat_hash[dates[idx]] = value ? value.to_i : 0
190
218
  end
191
219
  end
192
- rescue Redis::CommandError
220
+ rescue RedisConnection.adapter::CommandError
193
221
  # mget will trigger a CROSSSLOT error when run against a Cluster
194
222
  # TODO Someone want to add Cluster support?
195
223
  end
@@ -200,9 +228,10 @@ module Sidekiq
200
228
  end
201
229
 
202
230
  ##
203
- # Encapsulates a queue within Sidekiq.
231
+ # Represents a queue within Sidekiq.
204
232
  # Allows enumeration of all jobs within the queue
205
- # and deletion of jobs.
233
+ # and deletion of jobs. NB: this queue data is real-time
234
+ # and is changing within Redis moment by moment.
206
235
  #
207
236
  # queue = Sidekiq::Queue.new("mailer")
208
237
  # queue.each do |job|
@@ -210,29 +239,34 @@ module Sidekiq
210
239
  # job.args # => [1, 2, 3]
211
240
  # job.delete if job.jid == 'abcdef1234567890'
212
241
  # end
213
- #
214
242
  class Queue
215
243
  include Enumerable
216
244
 
217
245
  ##
218
- # Return all known queues within Redis.
246
+ # Fetch all known queues within Redis.
219
247
  #
248
+ # @return [Array<Sidekiq::Queue>]
220
249
  def self.all
221
250
  Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
222
251
  end
223
252
 
224
253
  attr_reader :name
225
254
 
255
+ # @param name [String] the name of the queue
226
256
  def initialize(name = "default")
227
257
  @name = name.to_s
228
258
  @rname = "queue:#{name}"
229
259
  end
230
260
 
261
+ # The current size of the queue within Redis.
262
+ # This value is real-time and can change between calls.
263
+ #
264
+ # @return [Integer] the size
231
265
  def size
232
266
  Sidekiq.redis { |con| con.llen(@rname) }
233
267
  end
234
268
 
235
- # Sidekiq Pro overrides this
269
+ # @return [Boolean] if the queue is currently paused
236
270
  def paused?
237
271
  false
238
272
  end
@@ -241,7 +275,7 @@ module Sidekiq
241
275
  # Calculates this queue's latency, the difference in seconds since the oldest
242
276
  # job in the queue was enqueued.
243
277
  #
244
- # @return Float
278
+ # @return [Float] in seconds
245
279
  def latency
246
280
  entry = Sidekiq.redis { |conn|
247
281
  conn.lrange(@rname, -1, -1)
@@ -277,34 +311,54 @@ module Sidekiq
277
311
  ##
278
312
  # Find the job with the given JID within this queue.
279
313
  #
280
- # This is a slow, inefficient operation. Do not use under
314
+ # This is a *slow, inefficient* operation. Do not use under
281
315
  # normal conditions.
316
+ #
317
+ # @param jid [String] the job_id to look for
318
+ # @return [Sidekiq::JobRecord]
319
+ # @return [nil] if not found
282
320
  def find_job(jid)
283
321
  detect { |j| j.jid == jid }
284
322
  end
285
323
 
324
+ # delete all jobs within this queue
325
+ # @return [Boolean] true
286
326
  def clear
287
327
  Sidekiq.redis do |conn|
288
- conn.multi do
289
- conn.unlink(@rname)
290
- conn.srem("queues", name)
328
+ conn.multi do |transaction|
329
+ transaction.unlink(@rname)
330
+ transaction.srem("queues", [name])
291
331
  end
292
332
  end
333
+ true
293
334
  end
294
335
  alias_method :💣, :clear
336
+
337
+ # :nodoc:
338
+ # @api private
339
+ def as_json(options = nil)
340
+ {name: name} # 5336
341
+ end
295
342
  end
296
343
 
297
344
  ##
298
- # Encapsulates a pending job within a Sidekiq queue or
299
- # sorted set.
345
+ # Represents a pending job within a Sidekiq queue.
300
346
  #
301
347
  # The job should be considered immutable but may be
302
348
  # removed from the queue via JobRecord#delete.
303
- #
304
349
  class JobRecord
350
+ # the parsed Hash of job data
351
+ # @!attribute [r] Item
305
352
  attr_reader :item
353
+ # the underlying String in Redis
354
+ # @!attribute [r] Value
306
355
  attr_reader :value
356
+ # the queue associated with this job
357
+ # @!attribute [r] Queue
358
+ attr_reader :queue
307
359
 
360
+ # :nodoc:
361
+ # @api private
308
362
  def initialize(item, queue_name = nil)
309
363
  @args = nil
310
364
  @value = item
@@ -312,6 +366,8 @@ module Sidekiq
312
366
  @queue = queue_name || @item["queue"]
313
367
  end
314
368
 
369
+ # :nodoc:
370
+ # @api private
315
371
  def parse(item)
316
372
  Sidekiq.load_json(item)
317
373
  rescue JSON::ParserError
@@ -323,6 +379,8 @@ module Sidekiq
323
379
  {}
324
380
  end
325
381
 
382
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
383
+ # this class will be the ActiveJob adapter class rather than a specific job.
326
384
  def klass
327
385
  self["class"]
328
386
  end
@@ -352,27 +410,31 @@ module Sidekiq
352
410
  def display_args
353
411
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
354
412
  @display_args ||= case klass
355
- when /\ASidekiq::Extensions::Delayed/
356
- safe_load(args[0], args) do |_, _, arg|
357
- arg
358
- end
359
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
360
- job_args = self["wrapped"] ? args[0]["arguments"] : []
361
- if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
362
- # remove MailerClass, mailer_method and 'deliver_now'
363
- job_args.drop(3)
364
- elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
365
- # remove MailerClass, mailer_method and 'deliver_now'
366
- job_args.drop(3).first["args"]
367
- else
368
- job_args
369
- end
370
- else
371
- if self["encrypt"]
372
- # no point in showing 150+ bytes of random garbage
373
- args[-1] = "[encrypted data]"
374
- end
375
- args
413
+ when /\ASidekiq::Extensions::Delayed/
414
+ safe_load(args[0], args) do |_, _, arg, kwarg|
415
+ if !kwarg || kwarg.empty?
416
+ arg
417
+ else
418
+ [arg, kwarg]
419
+ end
420
+ end
421
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
422
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
423
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
424
+ # remove MailerClass, mailer_method and 'deliver_now'
425
+ job_args.drop(3)
426
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
427
+ # remove MailerClass, mailer_method and 'deliver_now'
428
+ job_args.drop(3).first["args"]
429
+ else
430
+ job_args
431
+ end
432
+ else
433
+ if self["encrypt"]
434
+ # no point in showing 150+ bytes of random garbage
435
+ args[-1] = "[encrypted data]"
436
+ end
437
+ args
376
438
  end
377
439
  end
378
440
 
@@ -406,15 +468,12 @@ module Sidekiq
406
468
  end
407
469
  end
408
470
 
409
- attr_reader :queue
410
-
411
471
  def latency
412
472
  now = Time.now.to_f
413
473
  now - (@item["enqueued_at"] || @item["created_at"] || now)
414
474
  end
415
475
 
416
- ##
417
- # Remove this job from the queue.
476
+ # Remove this job from the queue
418
477
  def delete
419
478
  count = Sidekiq.redis { |conn|
420
479
  conn.lrem("queue:#{@queue}", 1, @value)
@@ -422,6 +481,7 @@ module Sidekiq
422
481
  count != 0
423
482
  end
424
483
 
484
+ # Access arbitrary attributes within the job hash
425
485
  def [](name)
426
486
  # nil will happen if the JSON fails to parse.
427
487
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -436,6 +496,7 @@ module Sidekiq
436
496
  rescue => ex
437
497
  # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
438
498
  # memory yet so the YAML can't be loaded.
499
+ # TODO is this still necessary? Zeitwerk reloader should handle?
439
500
  Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
440
501
  default
441
502
  end
@@ -458,20 +519,28 @@ module Sidekiq
458
519
  end
459
520
  end
460
521
 
522
+ # Represents a job within a Redis sorted set where the score
523
+ # represents a timestamp associated with the job. This timestamp
524
+ # could be the scheduled time for it to run (e.g. scheduled set),
525
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
461
526
  class SortedEntry < JobRecord
462
527
  attr_reader :score
463
528
  attr_reader :parent
464
529
 
530
+ # :nodoc:
531
+ # @api private
465
532
  def initialize(parent, score, item)
466
533
  super(item)
467
- @score = score
534
+ @score = Float(score)
468
535
  @parent = parent
469
536
  end
470
537
 
538
+ # The timestamp associated with this entry
471
539
  def at
472
540
  Time.at(score).utc
473
541
  end
474
542
 
543
+ # remove this entry from the sorted set
475
544
  def delete
476
545
  if @value
477
546
  @parent.delete_by_value(@parent.name, @value)
@@ -480,12 +549,17 @@ module Sidekiq
480
549
  end
481
550
  end
482
551
 
552
+ # Change the scheduled time for this job.
553
+ #
554
+ # @param at [Time] the new timestamp for this job
483
555
  def reschedule(at)
484
556
  Sidekiq.redis do |conn|
485
557
  conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
486
558
  end
487
559
  end
488
560
 
561
+ # Enqueue this job from the scheduled or dead set so it will
562
+ # be executed at some point in the near future.
489
563
  def add_to_queue
490
564
  remove_job do |message|
491
565
  msg = Sidekiq.load_json(message)
@@ -493,6 +567,8 @@ module Sidekiq
493
567
  end
494
568
  end
495
569
 
570
+ # enqueue this job from the retry set so it will be executed
571
+ # at some point in the near future.
496
572
  def retry
497
573
  remove_job do |message|
498
574
  msg = Sidekiq.load_json(message)
@@ -501,8 +577,7 @@ module Sidekiq
501
577
  end
502
578
  end
503
579
 
504
- ##
505
- # Place job in the dead set
580
+ # Move this job from its current set into the Dead set.
506
581
  def kill
507
582
  remove_job do |message|
508
583
  DeadSet.new.kill(message)
@@ -517,9 +592,9 @@ module Sidekiq
517
592
 
518
593
  def remove_job
519
594
  Sidekiq.redis do |conn|
520
- results = conn.multi {
521
- conn.zrangebyscore(parent.name, score, score)
522
- conn.zremrangebyscore(parent.name, score, score)
595
+ results = conn.multi { |transaction|
596
+ transaction.zrangebyscore(parent.name, score, score)
597
+ transaction.zremrangebyscore(parent.name, score, score)
523
598
  }.first
524
599
 
525
600
  if results.size == 1
@@ -540,9 +615,9 @@ module Sidekiq
540
615
  yield msg if msg
541
616
 
542
617
  # push the rest back onto the sorted set
543
- conn.multi do
618
+ conn.multi do |transaction|
544
619
  nonmatched.each do |message|
545
- conn.zadd(parent.name, score.to_f.to_s, message)
620
+ transaction.zadd(parent.name, score.to_f.to_s, message)
546
621
  end
547
622
  end
548
623
  end
@@ -550,20 +625,32 @@ module Sidekiq
550
625
  end
551
626
  end
552
627
 
628
+ # Base class for all sorted sets within Sidekiq.
553
629
  class SortedSet
554
630
  include Enumerable
555
631
 
632
+ # Redis key of the set
633
+ # @!attribute [r] Name
556
634
  attr_reader :name
557
635
 
636
+ # :nodoc:
637
+ # @api private
558
638
  def initialize(name)
559
639
  @name = name
560
640
  @_size = size
561
641
  end
562
642
 
643
+ # real-time size of the set, will change
563
644
  def size
564
645
  Sidekiq.redis { |c| c.zcard(name) }
565
646
  end
566
647
 
648
+ # Scan through each element of the sorted set, yielding each to the supplied block.
649
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
650
+ #
651
+ # @param match [String] a snippet or regexp to filter matches.
652
+ # @param count [Integer] number of elements to retrieve at a time, default 100
653
+ # @yieldparam [Sidekiq::SortedEntry] each entry
567
654
  def scan(match, count = 100)
568
655
  return to_enum(:scan, match, count) unless block_given?
569
656
 
@@ -575,18 +662,32 @@ module Sidekiq
575
662
  end
576
663
  end
577
664
 
665
+ # @return [Boolean] always true
578
666
  def clear
579
667
  Sidekiq.redis do |conn|
580
668
  conn.unlink(name)
581
669
  end
670
+ true
582
671
  end
583
672
  alias_method :💣, :clear
673
+
674
+ # :nodoc:
675
+ # @api private
676
+ def as_json(options = nil)
677
+ {name: name} # 5336
678
+ end
584
679
  end
585
680
 
681
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
682
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
683
+ # e.g. Batches.
586
684
  class JobSet < SortedSet
587
- def schedule(timestamp, message)
685
+ # Add a job with the associated timestamp to this set.
686
+ # @param timestamp [Time] the score for the job
687
+ # @param job [Hash] the job data
688
+ def schedule(timestamp, job)
588
689
  Sidekiq.redis do |conn|
589
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
690
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
590
691
  end
591
692
  end
592
693
 
@@ -600,7 +701,7 @@ module Sidekiq
600
701
  range_start = page * page_size + offset_size
601
702
  range_end = range_start + page_size - 1
602
703
  elements = Sidekiq.redis { |conn|
603
- conn.zrange name, range_start, range_end, with_scores: true
704
+ conn.zrange name, range_start, range_end, withscores: true
604
705
  }
605
706
  break if elements.empty?
606
707
  page -= 1
@@ -614,6 +715,10 @@ module Sidekiq
614
715
  ##
615
716
  # Fetch jobs that match a given time or Range. Job ID is an
616
717
  # optional second argument.
718
+ #
719
+ # @param score [Time,Range] a specific timestamp or range
720
+ # @param jid [String, optional] find a specific JID within the score
721
+ # @return [Array<SortedEntry>] any results found, can be empty
617
722
  def fetch(score, jid = nil)
618
723
  begin_score, end_score =
619
724
  if score.is_a?(Range)
@@ -623,7 +728,7 @@ module Sidekiq
623
728
  end
624
729
 
625
730
  elements = Sidekiq.redis { |conn|
626
- conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
731
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
627
732
  }
628
733
 
629
734
  elements.each_with_object([]) do |element, result|
@@ -635,7 +740,10 @@ module Sidekiq
635
740
 
636
741
  ##
637
742
  # Find the job with the given JID within this sorted set.
638
- # This is a slower O(n) operation. Do not use for app logic.
743
+ # *This is a slow O(n) operation*. Do not use for app logic.
744
+ #
745
+ # @param jid [String] the job identifier
746
+ # @return [SortedEntry] the record or nil
639
747
  def find_job(jid)
640
748
  Sidekiq.redis do |conn|
641
749
  conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
@@ -647,6 +755,8 @@ module Sidekiq
647
755
  nil
648
756
  end
649
757
 
758
+ # :nodoc:
759
+ # @api private
650
760
  def delete_by_value(name, value)
651
761
  Sidekiq.redis do |conn|
652
762
  ret = conn.zrem(name, value)
@@ -655,6 +765,8 @@ module Sidekiq
655
765
  end
656
766
  end
657
767
 
768
+ # :nodoc:
769
+ # @api private
658
770
  def delete_by_jid(score, jid)
659
771
  Sidekiq.redis do |conn|
660
772
  elements = conn.zrangebyscore(name, score, score)
@@ -675,10 +787,10 @@ module Sidekiq
675
787
  end
676
788
 
677
789
  ##
678
- # Allows enumeration of scheduled jobs within Sidekiq.
790
+ # The set of scheduled jobs within Sidekiq.
679
791
  # Based on this, you can search/filter for jobs. Here's an
680
- # example where I'm selecting all jobs of a certain type
681
- # and deleting them from the schedule queue.
792
+ # example where I'm selecting jobs based on some complex logic
793
+ # and deleting them from the scheduled set.
682
794
  #
683
795
  # r = Sidekiq::ScheduledSet.new
684
796
  # r.select do |scheduled|
@@ -693,7 +805,7 @@ module Sidekiq
693
805
  end
694
806
 
695
807
  ##
696
- # Allows enumeration of retries within Sidekiq.
808
+ # The set of retries within Sidekiq.
697
809
  # Based on this, you can search/filter for jobs. Here's an
698
810
  # example where I'm selecting all jobs of a certain type
699
811
  # and deleting them from the retry queue.
@@ -709,30 +821,36 @@ module Sidekiq
709
821
  super "retry"
710
822
  end
711
823
 
824
+ # Enqueues all jobs pending within the retry set.
712
825
  def retry_all
713
826
  each(&:retry) while size > 0
714
827
  end
715
828
 
829
+ # Kills all jobs pending within the retry set.
716
830
  def kill_all
717
831
  each(&:kill) while size > 0
718
832
  end
719
833
  end
720
834
 
721
835
  ##
722
- # Allows enumeration of dead jobs within Sidekiq.
836
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
837
+ # their retries and are helding in this set pending some sort of manual
838
+ # fix. They will be removed after 6 months (dead_timeout) if not.
723
839
  #
724
840
  class DeadSet < JobSet
725
841
  def initialize
726
842
  super "dead"
727
843
  end
728
844
 
845
+ # Add the given job to the Dead set.
846
+ # @param message [String] the job data as JSON
729
847
  def kill(message, opts = {})
730
848
  now = Time.now.to_f
731
849
  Sidekiq.redis do |conn|
732
- conn.multi do
733
- conn.zadd(name, now.to_s, message)
734
- conn.zremrangebyscore(name, "-inf", now - self.class.timeout)
735
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
850
+ conn.multi do |transaction|
851
+ transaction.zadd(name, now.to_s, message)
852
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
853
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
736
854
  end
737
855
  end
738
856
 
@@ -747,16 +865,21 @@ module Sidekiq
747
865
  true
748
866
  end
749
867
 
868
+ # Enqueue all dead jobs
750
869
  def retry_all
751
870
  each(&:retry) while size > 0
752
871
  end
753
872
 
873
+ # The maximum size of the Dead set. Older entries will be trimmed
874
+ # to stay within this limit. Default value is 10,000.
754
875
  def self.max_jobs
755
- Sidekiq.options[:dead_max_jobs]
876
+ Sidekiq[:dead_max_jobs]
756
877
  end
757
878
 
879
+ # The time limit for entries within the Dead set. Older entries will be thrown away.
880
+ # Default value is six months.
758
881
  def self.timeout
759
- Sidekiq.options[:dead_timeout_in_seconds]
882
+ Sidekiq[:dead_timeout_in_seconds]
760
883
  end
761
884
  end
762
885
 
@@ -765,24 +888,31 @@ module Sidekiq
765
888
  # right now. Each process sends a heartbeat to Redis every 5 seconds
766
889
  # so this set should be relatively accurate, barring network partitions.
767
890
  #
768
- # Yields a Sidekiq::Process.
891
+ # @yieldparam [Sidekiq::Process]
769
892
  #
770
893
  class ProcessSet
771
894
  include Enumerable
772
895
 
896
+ # :nodoc:
897
+ # @api private
773
898
  def initialize(clean_plz = true)
774
899
  cleanup if clean_plz
775
900
  end
776
901
 
777
902
  # Cleans up dead processes recorded in Redis.
778
903
  # Returns the number of processes cleaned.
904
+ # :nodoc:
905
+ # @api private
779
906
  def cleanup
907
+ # dont run cleanup more than once per minute
908
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
909
+
780
910
  count = 0
781
911
  Sidekiq.redis do |conn|
782
- procs = conn.sscan_each("processes").to_a.sort
783
- heartbeats = conn.pipelined {
912
+ procs = conn.sscan_each("processes").to_a
913
+ heartbeats = conn.pipelined { |pipeline|
784
914
  procs.each do |key|
785
- conn.hget(key, "info")
915
+ pipeline.hget(key, "info")
786
916
  end
787
917
  }
788
918
 
@@ -804,9 +934,9 @@ module Sidekiq
804
934
  # We're making a tradeoff here between consuming more memory instead of
805
935
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
806
936
  # you'll be happier this way
807
- conn.pipelined do
937
+ conn.pipelined do |pipeline|
808
938
  procs.each do |key|
809
- conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
939
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
810
940
  end
811
941
  end
812
942
  }
@@ -819,10 +949,10 @@ module Sidekiq
819
949
 
820
950
  hash = Sidekiq.load_json(info)
821
951
  yield Process.new(hash.merge("busy" => busy.to_i,
822
- "beat" => at_s.to_f,
823
- "quiet" => quiet,
824
- "rss" => rss.to_i,
825
- "rtt_us" => rtt.to_i))
952
+ "beat" => at_s.to_f,
953
+ "quiet" => quiet,
954
+ "rss" => rss.to_i,
955
+ "rtt_us" => rtt.to_i))
826
956
  end
827
957
  end
828
958
 
@@ -830,6 +960,7 @@ module Sidekiq
830
960
  # based on current heartbeat. #each does that and ensures the set only
831
961
  # contains Sidekiq processes which have sent a heartbeat within the last
832
962
  # 60 seconds.
963
+ # @return [Integer] current number of registered Sidekiq processes
833
964
  def size
834
965
  Sidekiq.redis { |conn| conn.scard("processes") }
835
966
  end
@@ -837,10 +968,12 @@ module Sidekiq
837
968
  # Total number of threads available to execute jobs.
838
969
  # For Sidekiq Enterprise customers this number (in production) must be
839
970
  # less than or equal to your licensed concurrency.
971
+ # @return [Integer] the sum of process concurrency
840
972
  def total_concurrency
841
973
  sum { |x| x["concurrency"].to_i }
842
974
  end
843
975
 
976
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
844
977
  def total_rss_in_kb
845
978
  sum { |x| x["rss"].to_i }
846
979
  end
@@ -849,6 +982,8 @@ module Sidekiq
849
982
  # Returns the identity of the current cluster leader or "" if no leader.
850
983
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
851
984
  # or Sidekiq Pro.
985
+ # @return [String] Identity of cluster leader
986
+ # @return [String] empty string if no leader
852
987
  def leader
853
988
  @leader ||= begin
854
989
  x = Sidekiq.redis { |c| c.get("dear-leader") }
@@ -875,6 +1010,8 @@ module Sidekiq
875
1010
  # 'identity' => <unique string identifying the process>,
876
1011
  # }
877
1012
  class Process
1013
+ # :nodoc:
1014
+ # @api private
878
1015
  def initialize(hash)
879
1016
  @attribs = hash
880
1017
  end
@@ -899,18 +1036,31 @@ module Sidekiq
899
1036
  self["queues"]
900
1037
  end
901
1038
 
1039
+ # Signal this process to stop processing new jobs.
1040
+ # It will continue to execute jobs it has already fetched.
1041
+ # This method is *asynchronous* and it can take 5-10
1042
+ # seconds for the process to quiet.
902
1043
  def quiet!
903
1044
  signal("TSTP")
904
1045
  end
905
1046
 
1047
+ # Signal this process to shutdown.
1048
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1049
+ # This method is *asynchronous* and it can take 5-10
1050
+ # seconds for the process to start shutting down.
906
1051
  def stop!
907
1052
  signal("TERM")
908
1053
  end
909
1054
 
1055
+ # Signal this process to log backtraces for all threads.
1056
+ # Useful if you have a frozen or deadlocked process which is
1057
+ # still sending a heartbeat.
1058
+ # This method is *asynchronous* and it can take 5-10 seconds.
910
1059
  def dump_threads
911
1060
  signal("TTIN")
912
1061
  end
913
1062
 
1063
+ # @return [Boolean] true if this process is quiet or shutting down
914
1064
  def stopping?
915
1065
  self["quiet"] == "true"
916
1066
  end
@@ -920,9 +1070,9 @@ module Sidekiq
920
1070
  def signal(sig)
921
1071
  key = "#{identity}-signals"
922
1072
  Sidekiq.redis do |c|
923
- c.multi do
924
- c.lpush(key, sig)
925
- c.expire(key, 60)
1073
+ c.multi do |transaction|
1074
+ transaction.lpush(key, sig)
1075
+ transaction.expire(key, 60)
926
1076
  end
927
1077
  end
928
1078
  end
@@ -953,24 +1103,31 @@ module Sidekiq
953
1103
 
954
1104
  def each(&block)
955
1105
  results = []
1106
+ procs = nil
1107
+ all_works = nil
1108
+
956
1109
  Sidekiq.redis do |conn|
957
- procs = conn.sscan_each("processes").to_a
958
- procs.sort.each do |key|
959
- valid, workers = conn.pipelined {
960
- conn.exists?(key)
961
- conn.hgetall("#{key}:workers")
962
- }
963
- next unless valid
964
- workers.each_pair do |tid, json|
965
- hsh = Sidekiq.load_json(json)
966
- p = hsh["payload"]
967
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
968
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
969
- results << [key, tid, hsh]
1110
+ procs = conn.sscan_each("processes").to_a.sort
1111
+
1112
+ all_works = conn.pipelined do |pipeline|
1113
+ procs.each do |key|
1114
+ pipeline.hgetall("#{key}:work")
970
1115
  end
971
1116
  end
972
1117
  end
973
1118
 
1119
+ procs.zip(all_works).each do |key, workers|
1120
+ workers.each_pair do |tid, json|
1121
+ next if json.empty?
1122
+
1123
+ hsh = Sidekiq.load_json(json)
1124
+ p = hsh["payload"]
1125
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
1126
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
1127
+ results << [key, tid, hsh]
1128
+ end
1129
+ end
1130
+
974
1131
  results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
975
1132
  end
976
1133
 
@@ -986,9 +1143,9 @@ module Sidekiq
986
1143
  if procs.empty?
987
1144
  0
988
1145
  else
989
- conn.pipelined {
1146
+ conn.pipelined { |pipeline|
990
1147
  procs.each do |key|
991
- conn.hget(key, "busy")
1148
+ pipeline.hget(key, "busy")
992
1149
  end
993
1150
  }.sum(&:to_i)
994
1151
  end