sidekiq 6.5.12 → 7.2.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (109) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +224 -20
  3. data/README.md +43 -35
  4. data/bin/multi_queue_bench +271 -0
  5. data/bin/sidekiq +3 -8
  6. data/bin/sidekiqload +204 -118
  7. data/bin/sidekiqmon +3 -0
  8. data/lib/sidekiq/api.rb +187 -135
  9. data/lib/sidekiq/capsule.rb +127 -0
  10. data/lib/sidekiq/cli.rb +59 -75
  11. data/lib/sidekiq/client.rb +66 -37
  12. data/lib/sidekiq/component.rb +4 -1
  13. data/lib/sidekiq/config.rb +287 -0
  14. data/lib/sidekiq/deploy.rb +62 -0
  15. data/lib/sidekiq/embedded.rb +61 -0
  16. data/lib/sidekiq/fetch.rb +11 -14
  17. data/lib/sidekiq/job.rb +371 -10
  18. data/lib/sidekiq/job_logger.rb +2 -2
  19. data/lib/sidekiq/job_retry.rb +36 -18
  20. data/lib/sidekiq/job_util.rb +51 -15
  21. data/lib/sidekiq/launcher.rb +71 -65
  22. data/lib/sidekiq/logger.rb +2 -27
  23. data/lib/sidekiq/manager.rb +9 -11
  24. data/lib/sidekiq/metrics/query.rb +7 -4
  25. data/lib/sidekiq/metrics/shared.rb +8 -7
  26. data/lib/sidekiq/metrics/tracking.rb +27 -21
  27. data/lib/sidekiq/middleware/chain.rb +19 -18
  28. data/lib/sidekiq/middleware/current_attributes.rb +52 -20
  29. data/lib/sidekiq/monitor.rb +16 -3
  30. data/lib/sidekiq/paginator.rb +2 -2
  31. data/lib/sidekiq/processor.rb +46 -51
  32. data/lib/sidekiq/rails.rb +15 -10
  33. data/lib/sidekiq/redis_client_adapter.rb +23 -66
  34. data/lib/sidekiq/redis_connection.rb +15 -117
  35. data/lib/sidekiq/scheduled.rb +22 -23
  36. data/lib/sidekiq/testing.rb +32 -41
  37. data/lib/sidekiq/transaction_aware_client.rb +11 -5
  38. data/lib/sidekiq/version.rb +2 -1
  39. data/lib/sidekiq/web/action.rb +8 -3
  40. data/lib/sidekiq/web/application.rb +108 -15
  41. data/lib/sidekiq/web/csrf_protection.rb +10 -7
  42. data/lib/sidekiq/web/helpers.rb +52 -38
  43. data/lib/sidekiq/web.rb +17 -16
  44. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  45. data/lib/sidekiq.rb +76 -274
  46. data/sidekiq.gemspec +12 -10
  47. data/web/assets/javascripts/application.js +39 -0
  48. data/web/assets/javascripts/base-charts.js +106 -0
  49. data/web/assets/javascripts/dashboard-charts.js +182 -0
  50. data/web/assets/javascripts/dashboard.js +10 -232
  51. data/web/assets/javascripts/metrics.js +151 -115
  52. data/web/assets/stylesheets/application-dark.css +4 -0
  53. data/web/assets/stylesheets/application-rtl.css +10 -89
  54. data/web/assets/stylesheets/application.css +45 -298
  55. data/web/locales/ar.yml +70 -70
  56. data/web/locales/cs.yml +62 -62
  57. data/web/locales/da.yml +60 -53
  58. data/web/locales/de.yml +65 -65
  59. data/web/locales/el.yml +2 -7
  60. data/web/locales/en.yml +78 -70
  61. data/web/locales/es.yml +68 -68
  62. data/web/locales/fa.yml +65 -65
  63. data/web/locales/fr.yml +81 -67
  64. data/web/locales/gd.yml +99 -0
  65. data/web/locales/he.yml +65 -64
  66. data/web/locales/hi.yml +59 -59
  67. data/web/locales/it.yml +53 -53
  68. data/web/locales/ja.yml +67 -69
  69. data/web/locales/ko.yml +52 -52
  70. data/web/locales/lt.yml +66 -66
  71. data/web/locales/nb.yml +61 -61
  72. data/web/locales/nl.yml +52 -52
  73. data/web/locales/pl.yml +45 -45
  74. data/web/locales/pt-br.yml +79 -69
  75. data/web/locales/pt.yml +51 -51
  76. data/web/locales/ru.yml +67 -66
  77. data/web/locales/sv.yml +53 -53
  78. data/web/locales/ta.yml +60 -60
  79. data/web/locales/uk.yml +62 -61
  80. data/web/locales/ur.yml +64 -64
  81. data/web/locales/vi.yml +67 -67
  82. data/web/locales/zh-cn.yml +20 -18
  83. data/web/locales/zh-tw.yml +10 -1
  84. data/web/views/_footer.erb +17 -2
  85. data/web/views/_job_info.erb +18 -2
  86. data/web/views/_metrics_period_select.erb +12 -0
  87. data/web/views/_paging.erb +2 -0
  88. data/web/views/_poll_link.erb +1 -1
  89. data/web/views/_summary.erb +7 -7
  90. data/web/views/busy.erb +46 -35
  91. data/web/views/dashboard.erb +26 -5
  92. data/web/views/filtering.erb +7 -0
  93. data/web/views/metrics.erb +46 -24
  94. data/web/views/metrics_for_job.erb +41 -69
  95. data/web/views/morgue.erb +5 -9
  96. data/web/views/queue.erb +10 -14
  97. data/web/views/queues.erb +9 -3
  98. data/web/views/retries.erb +5 -9
  99. data/web/views/scheduled.erb +12 -13
  100. metadata +44 -38
  101. data/lib/sidekiq/delay.rb +0 -43
  102. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  103. data/lib/sidekiq/extensions/active_record.rb +0 -43
  104. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  105. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  106. data/lib/sidekiq/metrics/deploy.rb +0 -47
  107. data/lib/sidekiq/worker.rb +0 -370
  108. data/web/assets/javascripts/graph.js +0 -16
  109. /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb CHANGED
@@ -4,12 +4,8 @@ require "sidekiq"
4
4
 
5
5
  require "zlib"
6
6
  require "set"
7
- require "base64"
8
7
 
9
- if ENV["SIDEKIQ_METRICS_BETA"]
10
- require "sidekiq/metrics/deploy"
11
- require "sidekiq/metrics/query"
12
- end
8
+ require "sidekiq/metrics/query"
13
9
 
14
10
  #
15
11
  # Sidekiq's Data API provides a Ruby object model on top
@@ -70,7 +66,18 @@ module Sidekiq
70
66
  end
71
67
 
72
68
  def queues
73
- Sidekiq::Stats::Queues.new.lengths
69
+ Sidekiq.redis do |conn|
70
+ queues = conn.sscan("queues").to_a
71
+
72
+ lengths = conn.pipelined { |pipeline|
73
+ queues.each do |queue|
74
+ pipeline.llen("queue:#{queue}")
75
+ end
76
+ }
77
+
78
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
79
+ array_of_arrays.to_h
80
+ end
74
81
  end
75
82
 
76
83
  # O(1) redis calls
@@ -84,11 +91,11 @@ module Sidekiq
84
91
  pipeline.zcard("retry")
85
92
  pipeline.zcard("dead")
86
93
  pipeline.scard("processes")
87
- pipeline.lrange("queue:default", -1, -1)
94
+ pipeline.lindex("queue:default", -1)
88
95
  end
89
96
  }
90
97
 
91
- default_queue_latency = if (entry = pipe1_res[6].first)
98
+ default_queue_latency = if (entry = pipe1_res[6])
92
99
  job = begin
93
100
  Sidekiq.load_json(entry)
94
101
  rescue
@@ -117,11 +124,11 @@ module Sidekiq
117
124
  # @api private
118
125
  def fetch_stats_slow!
119
126
  processes = Sidekiq.redis { |conn|
120
- conn.sscan_each("processes").to_a
127
+ conn.sscan("processes").to_a
121
128
  }
122
129
 
123
130
  queues = Sidekiq.redis { |conn|
124
- conn.sscan_each("queues").to_a
131
+ conn.sscan("queues").to_a
125
132
  }
126
133
 
127
134
  pipe2_res = Sidekiq.redis { |conn|
@@ -133,7 +140,7 @@ module Sidekiq
133
140
 
134
141
  s = processes.size
135
142
  workers_size = pipe2_res[0...s].sum(&:to_i)
136
- enqueued = pipe2_res[s..-1].sum(&:to_i)
143
+ enqueued = pipe2_res[s..].sum(&:to_i)
137
144
 
138
145
  @stats[:workers_size] = workers_size
139
146
  @stats[:enqueued] = enqueued
@@ -168,25 +175,8 @@ module Sidekiq
168
175
  @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
169
176
  end
170
177
 
171
- class Queues
172
- def lengths
173
- Sidekiq.redis do |conn|
174
- queues = conn.sscan_each("queues").to_a
175
-
176
- lengths = conn.pipelined { |pipeline|
177
- queues.each do |queue|
178
- pipeline.llen("queue:#{queue}")
179
- end
180
- }
181
-
182
- array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
183
- array_of_arrays.to_h
184
- end
185
- end
186
- end
187
-
188
178
  class History
189
- def initialize(days_previous, start_date = nil)
179
+ def initialize(days_previous, start_date = nil, pool: nil)
190
180
  # we only store five years of data in Redis
191
181
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
192
182
  @days_previous = days_previous
@@ -211,15 +201,10 @@ module Sidekiq
211
201
 
212
202
  keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
213
203
 
214
- begin
215
- Sidekiq.redis do |conn|
216
- conn.mget(keys).each_with_index do |value, idx|
217
- stat_hash[dates[idx]] = value ? value.to_i : 0
218
- end
204
+ Sidekiq.redis do |conn|
205
+ conn.mget(keys).each_with_index do |value, idx|
206
+ stat_hash[dates[idx]] = value ? value.to_i : 0
219
207
  end
220
- rescue RedisConnection.adapter::CommandError
221
- # mget will trigger a CROSSSLOT error when run against a Cluster
222
- # TODO Someone want to add Cluster support?
223
208
  end
224
209
 
225
210
  stat_hash
@@ -247,7 +232,7 @@ module Sidekiq
247
232
  #
248
233
  # @return [Array<Sidekiq::Queue>]
249
234
  def self.all
250
- Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
235
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
251
236
  end
252
237
 
253
238
  attr_reader :name
@@ -278,8 +263,8 @@ module Sidekiq
278
263
  # @return [Float] in seconds
279
264
  def latency
280
265
  entry = Sidekiq.redis { |conn|
281
- conn.lrange(@rname, -1, -1)
282
- }.first
266
+ conn.lindex(@rname, -1)
267
+ }
283
268
  return 0 unless entry
284
269
  job = Sidekiq.load_json(entry)
285
270
  now = Time.now.to_f
@@ -388,12 +373,7 @@ module Sidekiq
388
373
  def display_class
389
374
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
390
375
  @klass ||= self["display_class"] || begin
391
- case klass
392
- when /\ASidekiq::Extensions::Delayed/
393
- safe_load(args[0], klass) do |target, method, _|
394
- "#{target}.#{method}"
395
- end
396
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
376
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
397
377
  job_class = @item["wrapped"] || args[0]
398
378
  if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
399
379
  # MailerClass#mailer_method
@@ -409,23 +389,14 @@ module Sidekiq
409
389
 
410
390
  def display_args
411
391
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
412
- @display_args ||= case klass
413
- when /\ASidekiq::Extensions::Delayed/
414
- safe_load(args[0], args) do |_, _, arg, kwarg|
415
- if !kwarg || kwarg.empty?
416
- arg
417
- else
418
- [arg, kwarg]
419
- end
420
- end
421
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
422
- job_args = self["wrapped"] ? args[0]["arguments"] : []
392
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
393
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
423
394
  if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
424
395
  # remove MailerClass, mailer_method and 'deliver_now'
425
396
  job_args.drop(3)
426
397
  elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
427
398
  # remove MailerClass, mailer_method and 'deliver_now'
428
- job_args.drop(3).first["args"]
399
+ job_args.drop(3).first.values_at("params", "args")
429
400
  else
430
401
  job_args
431
402
  end
@@ -446,6 +417,10 @@ module Sidekiq
446
417
  self["jid"]
447
418
  end
448
419
 
420
+ def bid
421
+ self["bid"]
422
+ end
423
+
449
424
  def enqueued_at
450
425
  self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
451
426
  end
@@ -491,32 +466,34 @@ module Sidekiq
491
466
 
492
467
  private
493
468
 
494
- def safe_load(content, default)
495
- yield(*YAML.load(content))
496
- rescue => ex
497
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
498
- # memory yet so the YAML can't be loaded.
499
- # TODO is this still necessary? Zeitwerk reloader should handle?
500
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
501
- default
502
- end
469
+ ACTIVE_JOB_PREFIX = "_aj_"
470
+ GLOBALID_KEY = "_aj_globalid"
503
471
 
504
- def uncompress_backtrace(backtrace)
505
- if backtrace.is_a?(Array)
506
- # Handle old jobs with raw Array backtrace format
507
- backtrace
508
- else
509
- decoded = Base64.decode64(backtrace)
510
- uncompressed = Zlib::Inflate.inflate(decoded)
511
- begin
512
- Sidekiq.load_json(uncompressed)
513
- rescue
514
- # Handle old jobs with marshalled backtrace format
515
- # TODO Remove in 7.x
516
- Marshal.load(uncompressed)
472
+ def deserialize_argument(argument)
473
+ case argument
474
+ when Array
475
+ argument.map { |arg| deserialize_argument(arg) }
476
+ when Hash
477
+ if serialized_global_id?(argument)
478
+ argument[GLOBALID_KEY]
479
+ else
480
+ argument.transform_values { |v| deserialize_argument(v) }
481
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
517
482
  end
483
+ else
484
+ argument
518
485
  end
519
486
  end
487
+
488
+ def serialized_global_id?(hash)
489
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
490
+ end
491
+
492
+ def uncompress_backtrace(backtrace)
493
+ strict_base64_decoded = backtrace.unpack1("m")
494
+ uncompressed = Zlib::Inflate.inflate(strict_base64_decoded)
495
+ Sidekiq.load_json(uncompressed)
496
+ end
520
497
  end
521
498
 
522
499
  # Represents a job within a Redis sorted set where the score
@@ -593,7 +570,7 @@ module Sidekiq
593
570
  def remove_job
594
571
  Sidekiq.redis do |conn|
595
572
  results = conn.multi { |transaction|
596
- transaction.zrangebyscore(parent.name, score, score)
573
+ transaction.zrange(parent.name, score, score, "BYSCORE")
597
574
  transaction.zremrangebyscore(parent.name, score, score)
598
575
  }.first
599
576
 
@@ -656,7 +633,7 @@ module Sidekiq
656
633
 
657
634
  match = "*#{match}*" unless match.include?("*")
658
635
  Sidekiq.redis do |conn|
659
- conn.zscan_each(name, match: match, count: count) do |entry, score|
636
+ conn.zscan(name, match: match, count: count) do |entry, score|
660
637
  yield SortedEntry.new(self, score, entry)
661
638
  end
662
639
  end
@@ -701,7 +678,7 @@ module Sidekiq
701
678
  range_start = page * page_size + offset_size
702
679
  range_end = range_start + page_size - 1
703
680
  elements = Sidekiq.redis { |conn|
704
- conn.zrange name, range_start, range_end, withscores: true
681
+ conn.zrange name, range_start, range_end, "withscores"
705
682
  }
706
683
  break if elements.empty?
707
684
  page -= 1
@@ -728,7 +705,7 @@ module Sidekiq
728
705
  end
729
706
 
730
707
  elements = Sidekiq.redis { |conn|
731
- conn.zrangebyscore(name, begin_score, end_score, withscores: true)
708
+ conn.zrange(name, begin_score, end_score, "BYSCORE", "withscores")
732
709
  }
733
710
 
734
711
  elements.each_with_object([]) do |element, result|
@@ -746,8 +723,8 @@ module Sidekiq
746
723
  # @return [SortedEntry] the record or nil
747
724
  def find_job(jid)
748
725
  Sidekiq.redis do |conn|
749
- conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
750
- job = JSON.parse(entry)
726
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
727
+ job = Sidekiq.load_json(entry)
751
728
  matched = job["jid"] == jid
752
729
  return SortedEntry.new(self, score, entry) if matched
753
730
  end
@@ -769,7 +746,7 @@ module Sidekiq
769
746
  # @api private
770
747
  def delete_by_jid(score, jid)
771
748
  Sidekiq.redis do |conn|
772
- elements = conn.zrangebyscore(name, score, score)
749
+ elements = conn.zrange(name, score, score, "BYSCORE")
773
750
  elements.each do |element|
774
751
  if element.index(jid)
775
752
  message = Sidekiq.load_json(element)
@@ -792,15 +769,11 @@ module Sidekiq
792
769
  # example where I'm selecting jobs based on some complex logic
793
770
  # and deleting them from the scheduled set.
794
771
  #
795
- # r = Sidekiq::ScheduledSet.new
796
- # r.select do |scheduled|
797
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
798
- # scheduled.args[0] == 'User' &&
799
- # scheduled.args[1] == 'setup_new_subscriber'
800
- # end.map(&:delete)
772
+ # See the API wiki page for usage notes and examples.
773
+ #
801
774
  class ScheduledSet < JobSet
802
775
  def initialize
803
- super "schedule"
776
+ super("schedule")
804
777
  end
805
778
  end
806
779
 
@@ -810,15 +783,11 @@ module Sidekiq
810
783
  # example where I'm selecting all jobs of a certain type
811
784
  # and deleting them from the retry queue.
812
785
  #
813
- # r = Sidekiq::RetrySet.new
814
- # r.select do |retri|
815
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
816
- # retri.args[0] == 'User' &&
817
- # retri.args[1] == 'setup_new_subscriber'
818
- # end.map(&:delete)
786
+ # See the API wiki page for usage notes and examples.
787
+ #
819
788
  class RetrySet < JobSet
820
789
  def initialize
821
- super "retry"
790
+ super("retry")
822
791
  end
823
792
 
824
793
  # Enqueues all jobs pending within the retry set.
@@ -839,7 +808,7 @@ module Sidekiq
839
808
  #
840
809
  class DeadSet < JobSet
841
810
  def initialize
842
- super "dead"
811
+ super("dead")
843
812
  end
844
813
 
845
814
  # Add the given job to the Dead set.
@@ -849,8 +818,8 @@ module Sidekiq
849
818
  Sidekiq.redis do |conn|
850
819
  conn.multi do |transaction|
851
820
  transaction.zadd(name, now.to_s, message)
852
- transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
853
- transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
821
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
822
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
854
823
  end
855
824
  end
856
825
 
@@ -858,7 +827,7 @@ module Sidekiq
858
827
  job = Sidekiq.load_json(message)
859
828
  r = RuntimeError.new("Job killed by API")
860
829
  r.set_backtrace(caller)
861
- Sidekiq.death_handlers.each do |handle|
830
+ Sidekiq.default_configuration.death_handlers.each do |handle|
862
831
  handle.call(job, r)
863
832
  end
864
833
  end
@@ -869,18 +838,6 @@ module Sidekiq
869
838
  def retry_all
870
839
  each(&:retry) while size > 0
871
840
  end
872
-
873
- # The maximum size of the Dead set. Older entries will be trimmed
874
- # to stay within this limit. Default value is 10,000.
875
- def self.max_jobs
876
- Sidekiq[:dead_max_jobs]
877
- end
878
-
879
- # The time limit for entries within the Dead set. Older entries will be thrown away.
880
- # Default value is six months.
881
- def self.timeout
882
- Sidekiq[:dead_timeout_in_seconds]
883
- end
884
841
  end
885
842
 
886
843
  ##
@@ -893,6 +850,24 @@ module Sidekiq
893
850
  class ProcessSet
894
851
  include Enumerable
895
852
 
853
+ def self.[](identity)
854
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
855
+ conn.multi { |transaction|
856
+ transaction.sismember("processes", identity)
857
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
858
+ }
859
+ }
860
+
861
+ return nil if exists == 0 || info.nil?
862
+
863
+ hash = Sidekiq.load_json(info)
864
+ Process.new(hash.merge("busy" => busy.to_i,
865
+ "beat" => beat.to_f,
866
+ "quiet" => quiet,
867
+ "rss" => rss.to_i,
868
+ "rtt_us" => rtt_us.to_i))
869
+ end
870
+
896
871
  # :nodoc:
897
872
  # @api private
898
873
  def initialize(clean_plz = true)
@@ -905,11 +880,11 @@ module Sidekiq
905
880
  # @api private
906
881
  def cleanup
907
882
  # dont run cleanup more than once per minute
908
- return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
883
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", "NX", "EX", "60") }
909
884
 
910
885
  count = 0
911
886
  Sidekiq.redis do |conn|
912
- procs = conn.sscan_each("processes").to_a
887
+ procs = conn.sscan("processes").to_a
913
888
  heartbeats = conn.pipelined { |pipeline|
914
889
  procs.each do |key|
915
890
  pipeline.hget(key, "info")
@@ -929,7 +904,7 @@ module Sidekiq
929
904
 
930
905
  def each
931
906
  result = Sidekiq.redis { |conn|
932
- procs = conn.sscan_each("processes").to_a.sort
907
+ procs = conn.sscan("processes").to_a.sort
933
908
 
934
909
  # We're making a tradeoff here between consuming more memory instead of
935
910
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
@@ -941,7 +916,7 @@ module Sidekiq
941
916
  end
942
917
  }
943
918
 
944
- result.each do |info, busy, at_s, quiet, rss, rtt|
919
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
945
920
  # If a process is stopped between when we query Redis for `procs` and
946
921
  # when we query for `result`, we will have an item in `result` that is
947
922
  # composed of `nil` values.
@@ -949,10 +924,10 @@ module Sidekiq
949
924
 
950
925
  hash = Sidekiq.load_json(info)
951
926
  yield Process.new(hash.merge("busy" => busy.to_i,
952
- "beat" => at_s.to_f,
927
+ "beat" => beat.to_f,
953
928
  "quiet" => quiet,
954
929
  "rss" => rss.to_i,
955
- "rtt_us" => rtt.to_i))
930
+ "rtt_us" => rtt_us.to_i))
956
931
  end
957
932
  end
958
933
 
@@ -1008,6 +983,7 @@ module Sidekiq
1008
983
  # 'busy' => 10,
1009
984
  # 'beat' => <last heartbeat>,
1010
985
  # 'identity' => <unique string identifying the process>,
986
+ # 'embedded' => true,
1011
987
  # }
1012
988
  class Process
1013
989
  # :nodoc:
@@ -1021,7 +997,7 @@ module Sidekiq
1021
997
  end
1022
998
 
1023
999
  def labels
1024
- Array(self["labels"])
1000
+ self["labels"].to_a
1025
1001
  end
1026
1002
 
1027
1003
  def [](key)
@@ -1036,11 +1012,25 @@ module Sidekiq
1036
1012
  self["queues"]
1037
1013
  end
1038
1014
 
1015
+ def weights
1016
+ self["weights"]
1017
+ end
1018
+
1019
+ def version
1020
+ self["version"]
1021
+ end
1022
+
1023
+ def embedded?
1024
+ self["embedded"]
1025
+ end
1026
+
1039
1027
  # Signal this process to stop processing new jobs.
1040
1028
  # It will continue to execute jobs it has already fetched.
1041
1029
  # This method is *asynchronous* and it can take 5-10
1042
1030
  # seconds for the process to quiet.
1043
1031
  def quiet!
1032
+ raise "Can't quiet an embedded process" if embedded?
1033
+
1044
1034
  signal("TSTP")
1045
1035
  end
1046
1036
 
@@ -1049,6 +1039,8 @@ module Sidekiq
1049
1039
  # This method is *asynchronous* and it can take 5-10
1050
1040
  # seconds for the process to start shutting down.
1051
1041
  def stop!
1042
+ raise "Can't stop an embedded process" if embedded?
1043
+
1052
1044
  signal("TERM")
1053
1045
  end
1054
1046
 
@@ -1107,8 +1099,7 @@ module Sidekiq
1107
1099
  all_works = nil
1108
1100
 
1109
1101
  Sidekiq.redis do |conn|
1110
- procs = conn.sscan_each("processes").to_a.sort
1111
-
1102
+ procs = conn.sscan("processes").to_a.sort
1112
1103
  all_works = conn.pipelined do |pipeline|
1113
1104
  procs.each do |key|
1114
1105
  pipeline.hgetall("#{key}:work")
@@ -1118,17 +1109,11 @@ module Sidekiq
1118
1109
 
1119
1110
  procs.zip(all_works).each do |key, workers|
1120
1111
  workers.each_pair do |tid, json|
1121
- next if json.empty?
1122
-
1123
- hsh = Sidekiq.load_json(json)
1124
- p = hsh["payload"]
1125
- # avoid breaking API, this is a side effect of the JSON optimization in #4316
1126
- hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
1127
- results << [key, tid, hsh]
1112
+ results << [key, tid, Sidekiq::Work.new(key, tid, Sidekiq.load_json(json))] unless json.empty?
1128
1113
  end
1129
1114
  end
1130
1115
 
1131
- results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
1116
+ results.sort_by { |(_, _, hsh)| hsh.raw("run_at") }.each(&block)
1132
1117
  end
1133
1118
 
1134
1119
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -1139,7 +1124,7 @@ module Sidekiq
1139
1124
  # which can easily get out of sync with crashy processes.
1140
1125
  def size
1141
1126
  Sidekiq.redis do |conn|
1142
- procs = conn.sscan_each("processes").to_a
1127
+ procs = conn.sscan("processes").to_a
1143
1128
  if procs.empty?
1144
1129
  0
1145
1130
  else
@@ -1151,7 +1136,74 @@ module Sidekiq
1151
1136
  end
1152
1137
  end
1153
1138
  end
1139
+
1140
+ ##
1141
+ # Find the work which represents a job with the given JID.
1142
+ # *This is a slow O(n) operation*. Do not use for app logic.
1143
+ #
1144
+ # @param jid [String] the job identifier
1145
+ # @return [Sidekiq::Work] the work or nil
1146
+ def find_work_by_jid(jid)
1147
+ each do |_process_id, _thread_id, work|
1148
+ job = work.job
1149
+ return work if job.jid == jid
1150
+ end
1151
+ nil
1152
+ end
1154
1153
  end
1154
+
1155
+ # Sidekiq::Work represents a job which is currently executing.
1156
+ class Work
1157
+ attr_reader :process_id
1158
+ attr_reader :thread_id
1159
+
1160
+ def initialize(pid, tid, hsh)
1161
+ @process_id = pid
1162
+ @thread_id = tid
1163
+ @hsh = hsh
1164
+ @job = nil
1165
+ end
1166
+
1167
+ def queue
1168
+ @hsh["queue"]
1169
+ end
1170
+
1171
+ def run_at
1172
+ Time.at(@hsh["run_at"])
1173
+ end
1174
+
1175
+ def job
1176
+ @job ||= Sidekiq::JobRecord.new(@hsh["payload"])
1177
+ end
1178
+
1179
+ def payload
1180
+ @hsh["payload"]
1181
+ end
1182
+
1183
+ # deprecated
1184
+ def [](key)
1185
+ kwargs = {uplevel: 1}
1186
+ kwargs[:category] = :deprecated if RUBY_VERSION > "3.0" # TODO
1187
+ warn("Direct access to `Sidekiq::Work` attributes is deprecated, please use `#payload`, `#queue`, `#run_at` or `#job` instead", **kwargs)
1188
+
1189
+ @hsh[key]
1190
+ end
1191
+
1192
+ # :nodoc:
1193
+ # @api private
1194
+ def raw(name)
1195
+ @hsh[name]
1196
+ end
1197
+
1198
+ def method_missing(*all)
1199
+ @hsh.send(*all)
1200
+ end
1201
+
1202
+ def respond_to_missing?(name)
1203
+ @hsh.respond_to?(name)
1204
+ end
1205
+ end
1206
+
1155
1207
  # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1156
1208
  # Is "worker" a process, a type of job, a thread? Undefined!
1157
1209
  # WorkSet better describes the data.