sidekiq 5.2.10 → 6.5.6

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (124) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +391 -1
  3. data/LICENSE +3 -3
  4. data/README.md +24 -35
  5. data/bin/sidekiq +27 -3
  6. data/bin/sidekiqload +79 -67
  7. data/bin/sidekiqmon +8 -0
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +504 -307
  13. data/lib/sidekiq/cli.rb +190 -206
  14. data/lib/sidekiq/client.rb +77 -81
  15. data/lib/sidekiq/component.rb +65 -0
  16. data/lib/sidekiq/delay.rb +8 -7
  17. data/lib/sidekiq/extensions/action_mailer.rb +13 -22
  18. data/lib/sidekiq/extensions/active_record.rb +13 -10
  19. data/lib/sidekiq/extensions/class_methods.rb +14 -11
  20. data/lib/sidekiq/extensions/generic_proxy.rb +7 -5
  21. data/lib/sidekiq/fetch.rb +50 -40
  22. data/lib/sidekiq/job.rb +13 -0
  23. data/lib/sidekiq/job_logger.rb +33 -7
  24. data/lib/sidekiq/job_retry.rb +126 -106
  25. data/lib/sidekiq/job_util.rb +71 -0
  26. data/lib/sidekiq/launcher.rb +177 -83
  27. data/lib/sidekiq/logger.rb +156 -0
  28. data/lib/sidekiq/manager.rb +40 -41
  29. data/lib/sidekiq/metrics/deploy.rb +47 -0
  30. data/lib/sidekiq/metrics/query.rb +153 -0
  31. data/lib/sidekiq/metrics/shared.rb +94 -0
  32. data/lib/sidekiq/metrics/tracking.rb +134 -0
  33. data/lib/sidekiq/middleware/chain.rb +102 -46
  34. data/lib/sidekiq/middleware/current_attributes.rb +63 -0
  35. data/lib/sidekiq/middleware/i18n.rb +7 -7
  36. data/lib/sidekiq/middleware/modules.rb +21 -0
  37. data/lib/sidekiq/monitor.rb +133 -0
  38. data/lib/sidekiq/paginator.rb +20 -16
  39. data/lib/sidekiq/processor.rb +104 -97
  40. data/lib/sidekiq/rails.rb +47 -37
  41. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  42. data/lib/sidekiq/redis_connection.rb +108 -77
  43. data/lib/sidekiq/ring_buffer.rb +29 -0
  44. data/lib/sidekiq/scheduled.rb +64 -35
  45. data/lib/sidekiq/sd_notify.rb +149 -0
  46. data/lib/sidekiq/systemd.rb +24 -0
  47. data/lib/sidekiq/testing/inline.rb +6 -5
  48. data/lib/sidekiq/testing.rb +68 -58
  49. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  50. data/lib/sidekiq/version.rb +2 -1
  51. data/lib/sidekiq/web/action.rb +15 -11
  52. data/lib/sidekiq/web/application.rb +100 -77
  53. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  54. data/lib/sidekiq/web/helpers.rb +134 -94
  55. data/lib/sidekiq/web/router.rb +23 -19
  56. data/lib/sidekiq/web.rb +65 -105
  57. data/lib/sidekiq/worker.rb +253 -106
  58. data/lib/sidekiq.rb +170 -62
  59. data/sidekiq.gemspec +23 -16
  60. data/web/assets/images/apple-touch-icon.png +0 -0
  61. data/web/assets/javascripts/application.js +112 -61
  62. data/web/assets/javascripts/chart.min.js +13 -0
  63. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  64. data/web/assets/javascripts/dashboard.js +53 -89
  65. data/web/assets/javascripts/graph.js +16 -0
  66. data/web/assets/javascripts/metrics.js +262 -0
  67. data/web/assets/stylesheets/application-dark.css +143 -0
  68. data/web/assets/stylesheets/application-rtl.css +0 -4
  69. data/web/assets/stylesheets/application.css +88 -233
  70. data/web/locales/ar.yml +8 -2
  71. data/web/locales/de.yml +14 -2
  72. data/web/locales/el.yml +43 -19
  73. data/web/locales/en.yml +13 -1
  74. data/web/locales/es.yml +18 -2
  75. data/web/locales/fr.yml +10 -3
  76. data/web/locales/ja.yml +7 -1
  77. data/web/locales/lt.yml +83 -0
  78. data/web/locales/pl.yml +4 -4
  79. data/web/locales/pt-br.yml +27 -9
  80. data/web/locales/ru.yml +4 -0
  81. data/web/locales/vi.yml +83 -0
  82. data/web/views/_footer.erb +1 -1
  83. data/web/views/_job_info.erb +3 -2
  84. data/web/views/_nav.erb +1 -1
  85. data/web/views/_poll_link.erb +2 -5
  86. data/web/views/_summary.erb +7 -7
  87. data/web/views/busy.erb +56 -22
  88. data/web/views/dashboard.erb +23 -14
  89. data/web/views/dead.erb +3 -3
  90. data/web/views/layout.erb +3 -1
  91. data/web/views/metrics.erb +69 -0
  92. data/web/views/metrics_for_job.erb +87 -0
  93. data/web/views/morgue.erb +9 -6
  94. data/web/views/queue.erb +23 -10
  95. data/web/views/queues.erb +10 -2
  96. data/web/views/retries.erb +11 -8
  97. data/web/views/retry.erb +3 -3
  98. data/web/views/scheduled.erb +5 -2
  99. metadata +53 -64
  100. data/.circleci/config.yml +0 -61
  101. data/.github/contributing.md +0 -32
  102. data/.github/issue_template.md +0 -11
  103. data/.gitignore +0 -15
  104. data/.travis.yml +0 -11
  105. data/3.0-Upgrade.md +0 -70
  106. data/4.0-Upgrade.md +0 -53
  107. data/5.0-Upgrade.md +0 -56
  108. data/COMM-LICENSE +0 -97
  109. data/Ent-Changes.md +0 -238
  110. data/Gemfile +0 -19
  111. data/Pro-2.0-Upgrade.md +0 -138
  112. data/Pro-3.0-Upgrade.md +0 -44
  113. data/Pro-4.0-Upgrade.md +0 -35
  114. data/Pro-Changes.md +0 -759
  115. data/Rakefile +0 -9
  116. data/bin/sidekiqctl +0 -20
  117. data/code_of_conduct.md +0 -50
  118. data/lib/generators/sidekiq/worker_generator.rb +0 -49
  119. data/lib/sidekiq/core_ext.rb +0 -1
  120. data/lib/sidekiq/ctl.rb +0 -221
  121. data/lib/sidekiq/exception_handler.rb +0 -29
  122. data/lib/sidekiq/logging.rb +0 -122
  123. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
  124. data/lib/sidekiq/util.rb +0 -66
data/lib/sidekiq/api.rb CHANGED
@@ -1,26 +1,25 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "set"
7
+ require "base64"
18
8
 
19
- class Stats
20
- include RedisScanner
9
+ if ENV["SIDEKIQ_METRICS_BETA"]
10
+ require "sidekiq/metrics/deploy"
11
+ require "sidekiq/metrics/query"
12
+ end
21
13
 
14
+ module Sidekiq
15
+ # Retrieve runtime statistics from Redis regarding
16
+ # this Sidekiq cluster.
17
+ #
18
+ # stat = Sidekiq::Stats.new
19
+ # stat.processed
20
+ class Stats
22
21
  def initialize
23
- fetch_stats!
22
+ fetch_stats_fast!
24
23
  end
25
24
 
26
25
  def processed
@@ -63,62 +62,82 @@ module Sidekiq
63
62
  Sidekiq::Stats::Queues.new.lengths
64
63
  end
65
64
 
66
- def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
68
- conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
65
+ # O(1) redis calls
66
+ # @api private
67
+ def fetch_stats_fast!
68
+ pipe1_res = Sidekiq.redis { |conn|
69
+ conn.pipelined do |pipeline|
70
+ pipeline.get("stat:processed")
71
+ pipeline.get("stat:failed")
72
+ pipeline.zcard("schedule")
73
+ pipeline.zcard("retry")
74
+ pipeline.zcard("dead")
75
+ pipeline.scard("processes")
76
+ pipeline.lrange("queue:default", -1, -1)
76
77
  end
77
- end
78
+ }
78
79
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
80
+ default_queue_latency = if (entry = pipe1_res[6].first)
81
+ job = begin
82
+ Sidekiq.load_json(entry)
83
+ rescue
84
+ {}
85
+ end
86
+ now = Time.now.to_f
87
+ thence = job["enqueued_at"] || now
88
+ now - thence
89
+ else
90
+ 0
81
91
  end
82
92
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
93
+ @stats = {
94
+ processed: pipe1_res[0].to_i,
95
+ failed: pipe1_res[1].to_i,
96
+ scheduled_size: pipe1_res[2],
97
+ retry_size: pipe1_res[3],
98
+ dead_size: pipe1_res[4],
99
+ processes_size: pipe1_res[5],
100
+
101
+ default_queue_latency: default_queue_latency
102
+ }
103
+ end
104
+
105
+ # O(number of processes + number of queues) redis calls
106
+ # @api private
107
+ def fetch_stats_slow!
108
+ processes = Sidekiq.redis { |conn|
109
+ conn.sscan_each("processes").to_a
110
+ }
111
+
112
+ queues = Sidekiq.redis { |conn|
113
+ conn.sscan_each("queues").to_a
114
+ }
86
115
 
87
- pipe2_res = Sidekiq.redis do |conn|
88
- conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
116
+ pipe2_res = Sidekiq.redis { |conn|
117
+ conn.pipelined do |pipeline|
118
+ processes.each { |key| pipeline.hget(key, "busy") }
119
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
91
120
  end
92
- end
121
+ }
93
122
 
94
123
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
124
+ workers_size = pipe2_res[0...s].sum(&:to_i)
125
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
126
 
98
- default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
106
- @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
113
-
114
- default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
117
- }
127
+ @stats[:workers_size] = workers_size
128
+ @stats[:enqueued] = enqueued
129
+ @stats
130
+ end
131
+
132
+ # @api private
133
+ def fetch_stats!
134
+ fetch_stats_fast!
135
+ fetch_stats_slow!
118
136
  end
119
137
 
138
+ # @api private
120
139
  def reset(*stats)
121
- all = %w(failed processed)
140
+ all = %w[failed processed]
122
141
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
142
 
124
143
  mset_args = []
@@ -134,37 +153,30 @@ module Sidekiq
134
153
  private
135
154
 
136
155
  def stat(s)
137
- @stats[s]
156
+ fetch_stats_slow! if @stats[s].nil?
157
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
138
158
  end
139
159
 
140
160
  class Queues
141
- include RedisScanner
142
-
143
161
  def lengths
144
162
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
163
+ queues = conn.sscan_each("queues").to_a
146
164
 
147
- lengths = conn.pipelined do
165
+ lengths = conn.pipelined { |pipeline|
148
166
  queues.each do |queue|
149
- conn.llen("queue:#{queue}")
167
+ pipeline.llen("queue:#{queue}")
150
168
  end
151
- end
152
-
153
- i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
155
- memo[queue] = lengths[i]
156
- i += 1
157
- memo
158
- end.sort_by { |_, size| size }
169
+ }
159
170
 
160
- Hash[array_of_arrays.reverse]
171
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
172
+ array_of_arrays.to_h
161
173
  end
162
174
  end
163
175
  end
164
176
 
165
177
  class History
166
178
  def initialize(days_previous, start_date = nil)
167
- #we only store five years of data in Redis
179
+ # we only store five years of data in Redis
168
180
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
169
181
  @days_previous = days_previous
170
182
  @start_date = start_date || Time.now.utc.to_date
@@ -181,18 +193,12 @@ module Sidekiq
181
193
  private
182
194
 
183
195
  def date_stat_hash(stat)
184
- i = 0
185
196
  stat_hash = {}
186
- keys = []
187
- dates = []
188
-
189
- while i < @days_previous
190
- date = @start_date - i
191
- datestr = date.strftime("%Y-%m-%d")
192
- keys << "stat:#{stat}:#{datestr}"
193
- dates << datestr
194
- i += 1
195
- end
197
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
198
+ date.strftime("%Y-%m-%d")
199
+ }
200
+
201
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
196
202
 
197
203
  begin
198
204
  Sidekiq.redis do |conn|
@@ -200,7 +206,7 @@ module Sidekiq
200
206
  stat_hash[dates[idx]] = value ? value.to_i : 0
201
207
  end
202
208
  end
203
- rescue Redis::CommandError
209
+ rescue RedisConnection.adapter::CommandError
204
210
  # mget will trigger a CROSSSLOT error when run against a Cluster
205
211
  # TODO Someone want to add Cluster support?
206
212
  end
@@ -211,9 +217,10 @@ module Sidekiq
211
217
  end
212
218
 
213
219
  ##
214
- # Encapsulates a queue within Sidekiq.
220
+ # Represents a queue within Sidekiq.
215
221
  # Allows enumeration of all jobs within the queue
216
- # and deletion of jobs.
222
+ # and deletion of jobs. NB: this queue data is real-time
223
+ # and is changing within Redis moment by moment.
217
224
  #
218
225
  # queue = Sidekiq::Queue.new("mailer")
219
226
  # queue.each do |job|
@@ -221,30 +228,34 @@ module Sidekiq
221
228
  # job.args # => [1, 2, 3]
222
229
  # job.delete if job.jid == 'abcdef1234567890'
223
230
  # end
224
- #
225
231
  class Queue
226
232
  include Enumerable
227
- extend RedisScanner
228
233
 
229
234
  ##
230
- # Return all known queues within Redis.
235
+ # Fetch all known queues within Redis.
231
236
  #
237
+ # @return [Array<Sidekiq::Queue>]
232
238
  def self.all
233
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
239
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
234
240
  end
235
241
 
236
242
  attr_reader :name
237
243
 
238
- def initialize(name="default")
244
+ # @param name [String] the name of the queue
245
+ def initialize(name = "default")
239
246
  @name = name.to_s
240
247
  @rname = "queue:#{name}"
241
248
  end
242
249
 
250
+ # The current size of the queue within Redis.
251
+ # This value is real-time and can change between calls.
252
+ #
253
+ # @return [Integer] the size
243
254
  def size
244
255
  Sidekiq.redis { |con| con.llen(@rname) }
245
256
  end
246
257
 
247
- # Sidekiq Pro overrides this
258
+ # @return [Boolean] if the queue is currently paused
248
259
  def paused?
249
260
  false
250
261
  end
@@ -253,15 +264,15 @@ module Sidekiq
253
264
  # Calculates this queue's latency, the difference in seconds since the oldest
254
265
  # job in the queue was enqueued.
255
266
  #
256
- # @return Float
267
+ # @return [Float] in seconds
257
268
  def latency
258
- entry = Sidekiq.redis do |conn|
269
+ entry = Sidekiq.redis { |conn|
259
270
  conn.lrange(@rname, -1, -1)
260
- end.first
271
+ }.first
261
272
  return 0 unless entry
262
273
  job = Sidekiq.load_json(entry)
263
274
  now = Time.now.to_f
264
- thence = job['enqueued_at'] || now
275
+ thence = job["enqueued_at"] || now
265
276
  now - thence
266
277
  end
267
278
 
@@ -271,16 +282,16 @@ module Sidekiq
271
282
  page = 0
272
283
  page_size = 50
273
284
 
274
- while true do
285
+ loop do
275
286
  range_start = page * page_size - deleted_size
276
- range_end = range_start + page_size - 1
277
- entries = Sidekiq.redis do |conn|
287
+ range_end = range_start + page_size - 1
288
+ entries = Sidekiq.redis { |conn|
278
289
  conn.lrange @rname, range_start, range_end
279
- end
290
+ }
280
291
  break if entries.empty?
281
292
  page += 1
282
293
  entries.each do |entry|
283
- yield Job.new(entry, @name)
294
+ yield JobRecord.new(entry, @name)
284
295
  end
285
296
  deleted_size = initial_size - size
286
297
  end
@@ -289,41 +300,63 @@ module Sidekiq
289
300
  ##
290
301
  # Find the job with the given JID within this queue.
291
302
  #
292
- # This is a slow, inefficient operation. Do not use under
293
- # normal conditions. Sidekiq Pro contains a faster version.
303
+ # This is a *slow, inefficient* operation. Do not use under
304
+ # normal conditions.
305
+ #
306
+ # @param jid [String] the job_id to look for
307
+ # @return [Sidekiq::JobRecord]
308
+ # @return [nil] if not found
294
309
  def find_job(jid)
295
310
  detect { |j| j.jid == jid }
296
311
  end
297
312
 
313
+ # delete all jobs within this queue
314
+ # @return [Boolean] true
298
315
  def clear
299
316
  Sidekiq.redis do |conn|
300
- conn.multi do
301
- conn.del(@rname)
302
- conn.srem("queues", name)
317
+ conn.multi do |transaction|
318
+ transaction.unlink(@rname)
319
+ transaction.srem("queues", [name])
303
320
  end
304
321
  end
322
+ true
305
323
  end
306
324
  alias_method :💣, :clear
325
+
326
+ # :nodoc:
327
+ # @api private
328
+ def as_json(options = nil)
329
+ {name: name} # 5336
330
+ end
307
331
  end
308
332
 
309
333
  ##
310
- # Encapsulates a pending job within a Sidekiq queue or
311
- # sorted set.
334
+ # Represents a pending job within a Sidekiq queue.
312
335
  #
313
336
  # The job should be considered immutable but may be
314
- # removed from the queue via Job#delete.
315
- #
316
- class Job
337
+ # removed from the queue via JobRecord#delete.
338
+ class JobRecord
339
+ # the parsed Hash of job data
340
+ # @!attribute [r] Item
317
341
  attr_reader :item
342
+ # the underlying String in Redis
343
+ # @!attribute [r] Value
318
344
  attr_reader :value
345
+ # the queue associated with this job
346
+ # @!attribute [r] Queue
347
+ attr_reader :queue
319
348
 
320
- def initialize(item, queue_name=nil)
349
+ # :nodoc:
350
+ # @api private
351
+ def initialize(item, queue_name = nil)
321
352
  @args = nil
322
353
  @value = item
323
354
  @item = item.is_a?(Hash) ? item : parse(item)
324
- @queue = queue_name || @item['queue']
355
+ @queue = queue_name || @item["queue"]
325
356
  end
326
357
 
358
+ # :nodoc:
359
+ # @api private
327
360
  def parse(item)
328
361
  Sidekiq.load_json(item)
329
362
  rescue JSON::ParserError
@@ -335,88 +368,109 @@ module Sidekiq
335
368
  {}
336
369
  end
337
370
 
371
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
372
+ # this class will be the ActiveJob adapter class rather than a specific job.
338
373
  def klass
339
- self['class']
374
+ self["class"]
340
375
  end
341
376
 
342
377
  def display_class
343
378
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
344
- @klass ||= case klass
345
- when /\ASidekiq::Extensions::Delayed/
346
- safe_load(args[0], klass) do |target, method, _|
347
- "#{target}.#{method}"
348
- end
349
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
350
- job_class = @item['wrapped'] || args[0]
351
- if 'ActionMailer::DeliveryJob' == job_class
352
- # MailerClass#mailer_method
353
- args[0]['arguments'][0..1].join('#')
354
- else
355
- job_class
356
- end
357
- else
358
- klass
359
- end
379
+ @klass ||= self["display_class"] || begin
380
+ case klass
381
+ when /\ASidekiq::Extensions::Delayed/
382
+ safe_load(args[0], klass) do |target, method, _|
383
+ "#{target}.#{method}"
384
+ end
385
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
386
+ job_class = @item["wrapped"] || args[0]
387
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
388
+ # MailerClass#mailer_method
389
+ args[0]["arguments"][0..1].join("#")
390
+ else
391
+ job_class
392
+ end
393
+ else
394
+ klass
395
+ end
396
+ end
360
397
  end
361
398
 
362
399
  def display_args
363
400
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
364
401
  @display_args ||= case klass
365
- when /\ASidekiq::Extensions::Delayed/
366
- safe_load(args[0], args) do |_, _, arg|
367
- arg
368
- end
369
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
370
- job_args = self['wrapped'] ? args[0]["arguments"] : []
371
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
372
- # remove MailerClass, mailer_method and 'deliver_now'
373
- job_args.drop(3)
374
- else
375
- job_args
376
- end
377
- else
378
- if self['encrypt']
379
- # no point in showing 150+ bytes of random garbage
380
- args[-1] = '[encrypted data]'
381
- end
382
- args
383
- end
402
+ when /\ASidekiq::Extensions::Delayed/
403
+ safe_load(args[0], args) do |_, _, arg, kwarg|
404
+ if !kwarg || kwarg.empty?
405
+ arg
406
+ else
407
+ [arg, kwarg]
408
+ end
409
+ end
410
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
411
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
412
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
413
+ # remove MailerClass, mailer_method and 'deliver_now'
414
+ job_args.drop(3)
415
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
416
+ # remove MailerClass, mailer_method and 'deliver_now'
417
+ job_args.drop(3).first["args"]
418
+ else
419
+ job_args
420
+ end
421
+ else
422
+ if self["encrypt"]
423
+ # no point in showing 150+ bytes of random garbage
424
+ args[-1] = "[encrypted data]"
425
+ end
426
+ args
427
+ end
384
428
  end
385
429
 
386
430
  def args
387
- @args || @item['args']
431
+ @args || @item["args"]
388
432
  end
389
433
 
390
434
  def jid
391
- self['jid']
435
+ self["jid"]
392
436
  end
393
437
 
394
438
  def enqueued_at
395
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
439
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
396
440
  end
397
441
 
398
442
  def created_at
399
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
443
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
400
444
  end
401
445
 
402
- def queue
403
- @queue
446
+ def tags
447
+ self["tags"] || []
448
+ end
449
+
450
+ def error_backtrace
451
+ # Cache nil values
452
+ if defined?(@error_backtrace)
453
+ @error_backtrace
454
+ else
455
+ value = self["error_backtrace"]
456
+ @error_backtrace = value && uncompress_backtrace(value)
457
+ end
404
458
  end
405
459
 
406
460
  def latency
407
461
  now = Time.now.to_f
408
- now - (@item['enqueued_at'] || @item['created_at'] || now)
462
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
409
463
  end
410
464
 
411
- ##
412
- # Remove this job from the queue.
465
+ # Remove this job from the queue
413
466
  def delete
414
- count = Sidekiq.redis do |conn|
467
+ count = Sidekiq.redis { |conn|
415
468
  conn.lrem("queue:#{@queue}", 1, @value)
416
- end
469
+ }
417
470
  count != 0
418
471
  end
419
472
 
473
+ # Access arbitrary attributes within the job hash
420
474
  def [](name)
421
475
  # nil will happen if the JSON fails to parse.
422
476
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -427,31 +481,55 @@ module Sidekiq
427
481
  private
428
482
 
429
483
  def safe_load(content, default)
430
- begin
431
- yield(*YAML.load(content))
432
- rescue => ex
433
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
434
- # memory yet so the YAML can't be loaded.
435
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
436
- default
484
+ yield(*YAML.load(content))
485
+ rescue => ex
486
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
487
+ # memory yet so the YAML can't be loaded.
488
+ # TODO is this still necessary? Zeitwerk reloader should handle?
489
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
490
+ default
491
+ end
492
+
493
+ def uncompress_backtrace(backtrace)
494
+ if backtrace.is_a?(Array)
495
+ # Handle old jobs with raw Array backtrace format
496
+ backtrace
497
+ else
498
+ decoded = Base64.decode64(backtrace)
499
+ uncompressed = Zlib::Inflate.inflate(decoded)
500
+ begin
501
+ Sidekiq.load_json(uncompressed)
502
+ rescue
503
+ # Handle old jobs with marshalled backtrace format
504
+ # TODO Remove in 7.x
505
+ Marshal.load(uncompressed)
506
+ end
437
507
  end
438
508
  end
439
509
  end
440
510
 
441
- class SortedEntry < Job
511
+ # Represents a job within a Redis sorted set where the score
512
+ # represents a timestamp associated with the job. This timestamp
513
+ # could be the scheduled time for it to run (e.g. scheduled set),
514
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
515
+ class SortedEntry < JobRecord
442
516
  attr_reader :score
443
517
  attr_reader :parent
444
518
 
519
+ # :nodoc:
520
+ # @api private
445
521
  def initialize(parent, score, item)
446
522
  super(item)
447
- @score = score
523
+ @score = Float(score)
448
524
  @parent = parent
449
525
  end
450
526
 
527
+ # The timestamp associated with this entry
451
528
  def at
452
529
  Time.at(score).utc
453
530
  end
454
531
 
532
+ # remove this entry from the sorted set
455
533
  def delete
456
534
  if @value
457
535
  @parent.delete_by_value(@parent.name, @value)
@@ -460,11 +538,17 @@ module Sidekiq
460
538
  end
461
539
  end
462
540
 
541
+ # Change the scheduled time for this job.
542
+ #
543
+ # @param at [Time] the new timestamp for this job
463
544
  def reschedule(at)
464
- delete
465
- @parent.schedule(at, item)
545
+ Sidekiq.redis do |conn|
546
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
547
+ end
466
548
  end
467
549
 
550
+ # Enqueue this job from the scheduled or dead set so it will
551
+ # be executed at some point in the near future.
468
552
  def add_to_queue
469
553
  remove_job do |message|
470
554
  msg = Sidekiq.load_json(message)
@@ -472,16 +556,17 @@ module Sidekiq
472
556
  end
473
557
  end
474
558
 
559
+ # enqueue this job from the retry set so it will be executed
560
+ # at some point in the near future.
475
561
  def retry
476
562
  remove_job do |message|
477
563
  msg = Sidekiq.load_json(message)
478
- msg['retry_count'] -= 1 if msg['retry_count']
564
+ msg["retry_count"] -= 1 if msg["retry_count"]
479
565
  Sidekiq::Client.push(msg)
480
566
  end
481
567
  end
482
568
 
483
- ##
484
- # Place job in the dead set
569
+ # Move this job from its current set into the Dead set.
485
570
  def kill
486
571
  remove_job do |message|
487
572
  DeadSet.new.kill(message)
@@ -489,74 +574,109 @@ module Sidekiq
489
574
  end
490
575
 
491
576
  def error?
492
- !!item['error_class']
577
+ !!item["error_class"]
493
578
  end
494
579
 
495
580
  private
496
581
 
497
582
  def remove_job
498
583
  Sidekiq.redis do |conn|
499
- results = conn.multi do
500
- conn.zrangebyscore(parent.name, score, score)
501
- conn.zremrangebyscore(parent.name, score, score)
502
- end.first
584
+ results = conn.multi { |transaction|
585
+ transaction.zrangebyscore(parent.name, score, score)
586
+ transaction.zremrangebyscore(parent.name, score, score)
587
+ }.first
503
588
 
504
589
  if results.size == 1
505
590
  yield results.first
506
591
  else
507
592
  # multiple jobs with the same score
508
593
  # find the one with the right JID and push it
509
- hash = results.group_by do |message|
594
+ matched, nonmatched = results.partition { |message|
510
595
  if message.index(jid)
511
596
  msg = Sidekiq.load_json(message)
512
- msg['jid'] == jid
597
+ msg["jid"] == jid
513
598
  else
514
599
  false
515
600
  end
516
- end
601
+ }
517
602
 
518
- msg = hash.fetch(true, []).first
603
+ msg = matched.first
519
604
  yield msg if msg
520
605
 
521
606
  # push the rest back onto the sorted set
522
- conn.multi do
523
- hash.fetch(false, []).each do |message|
524
- conn.zadd(parent.name, score.to_f.to_s, message)
607
+ conn.multi do |transaction|
608
+ nonmatched.each do |message|
609
+ transaction.zadd(parent.name, score.to_f.to_s, message)
525
610
  end
526
611
  end
527
612
  end
528
613
  end
529
614
  end
530
-
531
615
  end
532
616
 
617
+ # Base class for all sorted sets within Sidekiq.
533
618
  class SortedSet
534
619
  include Enumerable
535
620
 
621
+ # Redis key of the set
622
+ # @!attribute [r] Name
536
623
  attr_reader :name
537
624
 
625
+ # :nodoc:
626
+ # @api private
538
627
  def initialize(name)
539
628
  @name = name
540
629
  @_size = size
541
630
  end
542
631
 
632
+ # real-time size of the set, will change
543
633
  def size
544
634
  Sidekiq.redis { |c| c.zcard(name) }
545
635
  end
546
636
 
637
+ # Scan through each element of the sorted set, yielding each to the supplied block.
638
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
639
+ #
640
+ # @param match [String] a snippet or regexp to filter matches.
641
+ # @param count [Integer] number of elements to retrieve at a time, default 100
642
+ # @yieldparam [Sidekiq::SortedEntry] each entry
643
+ def scan(match, count = 100)
644
+ return to_enum(:scan, match, count) unless block_given?
645
+
646
+ match = "*#{match}*" unless match.include?("*")
647
+ Sidekiq.redis do |conn|
648
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
649
+ yield SortedEntry.new(self, score, entry)
650
+ end
651
+ end
652
+ end
653
+
654
+ # @return [Boolean] always true
547
655
  def clear
548
656
  Sidekiq.redis do |conn|
549
- conn.del(name)
657
+ conn.unlink(name)
550
658
  end
659
+ true
551
660
  end
552
661
  alias_method :💣, :clear
662
+
663
+ # :nodoc:
664
+ # @api private
665
+ def as_json(options = nil)
666
+ {name: name} # 5336
667
+ end
553
668
  end
554
669
 
670
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
671
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
672
+ # e.g. Batches.
555
673
  class JobSet < SortedSet
556
-
557
- def schedule(timestamp, message)
674
+ # Add a job with the associated timestamp to this set.
675
+ # @param timestamp [Time] the score for the job
676
+ # @param job [Hash] the job data
677
+ def schedule(timestamp, job)
558
678
  Sidekiq.redis do |conn|
559
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
679
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
560
680
  end
561
681
  end
562
682
 
@@ -566,46 +686,66 @@ module Sidekiq
566
686
  page = -1
567
687
  page_size = 50
568
688
 
569
- while true do
689
+ loop do
570
690
  range_start = page * page_size + offset_size
571
- range_end = range_start + page_size - 1
572
- elements = Sidekiq.redis do |conn|
573
- conn.zrange name, range_start, range_end, with_scores: true
574
- end
691
+ range_end = range_start + page_size - 1
692
+ elements = Sidekiq.redis { |conn|
693
+ conn.zrange name, range_start, range_end, withscores: true
694
+ }
575
695
  break if elements.empty?
576
696
  page -= 1
577
- elements.reverse.each do |element, score|
697
+ elements.reverse_each do |element, score|
578
698
  yield SortedEntry.new(self, score, element)
579
699
  end
580
700
  offset_size = initial_size - @_size
581
701
  end
582
702
  end
583
703
 
704
+ ##
705
+ # Fetch jobs that match a given time or Range. Job ID is an
706
+ # optional second argument.
707
+ #
708
+ # @param score [Time,Range] a specific timestamp or range
709
+ # @param jid [String, optional] find a specific JID within the score
710
+ # @return [Array<SortedEntry>] any results found, can be empty
584
711
  def fetch(score, jid = nil)
585
- elements = Sidekiq.redis do |conn|
586
- conn.zrangebyscore(name, score, score)
587
- end
588
-
589
- elements.inject([]) do |result, element|
590
- entry = SortedEntry.new(self, score, element)
591
- if jid
592
- result << entry if entry.jid == jid
712
+ begin_score, end_score =
713
+ if score.is_a?(Range)
714
+ [score.first, score.last]
593
715
  else
594
- result << entry
716
+ [score, score]
595
717
  end
596
- result
718
+
719
+ elements = Sidekiq.redis { |conn|
720
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
721
+ }
722
+
723
+ elements.each_with_object([]) do |element, result|
724
+ data, job_score = element
725
+ entry = SortedEntry.new(self, job_score, data)
726
+ result << entry if jid.nil? || entry.jid == jid
597
727
  end
598
728
  end
599
729
 
600
730
  ##
601
731
  # Find the job with the given JID within this sorted set.
732
+ # *This is a slow O(n) operation*. Do not use for app logic.
602
733
  #
603
- # This is a slow, inefficient operation. Do not use under
604
- # normal conditions. Sidekiq Pro contains a faster version.
734
+ # @param jid [String] the job identifier
735
+ # @return [SortedEntry] the record or nil
605
736
  def find_job(jid)
606
- self.detect { |j| j.jid == jid }
737
+ Sidekiq.redis do |conn|
738
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
739
+ job = JSON.parse(entry)
740
+ matched = job["jid"] == jid
741
+ return SortedEntry.new(self, score, entry) if matched
742
+ end
743
+ end
744
+ nil
607
745
  end
608
746
 
747
+ # :nodoc:
748
+ # @api private
609
749
  def delete_by_value(name, value)
610
750
  Sidekiq.redis do |conn|
611
751
  ret = conn.zrem(name, value)
@@ -614,17 +754,20 @@ module Sidekiq
614
754
  end
615
755
  end
616
756
 
757
+ # :nodoc:
758
+ # @api private
617
759
  def delete_by_jid(score, jid)
618
760
  Sidekiq.redis do |conn|
619
761
  elements = conn.zrangebyscore(name, score, score)
620
762
  elements.each do |element|
621
- message = Sidekiq.load_json(element)
622
- if message["jid"] == jid
623
- ret = conn.zrem(name, element)
624
- @_size -= 1 if ret
625
- break ret
763
+ if element.index(jid)
764
+ message = Sidekiq.load_json(element)
765
+ if message["jid"] == jid
766
+ ret = conn.zrem(name, element)
767
+ @_size -= 1 if ret
768
+ break ret
769
+ end
626
770
  end
627
- false
628
771
  end
629
772
  end
630
773
  end
@@ -633,10 +776,10 @@ module Sidekiq
633
776
  end
634
777
 
635
778
  ##
636
- # Allows enumeration of scheduled jobs within Sidekiq.
779
+ # The set of scheduled jobs within Sidekiq.
637
780
  # Based on this, you can search/filter for jobs. Here's an
638
- # example where I'm selecting all jobs of a certain type
639
- # and deleting them from the schedule queue.
781
+ # example where I'm selecting jobs based on some complex logic
782
+ # and deleting them from the scheduled set.
640
783
  #
641
784
  # r = Sidekiq::ScheduledSet.new
642
785
  # r.select do |scheduled|
@@ -646,12 +789,12 @@ module Sidekiq
646
789
  # end.map(&:delete)
647
790
  class ScheduledSet < JobSet
648
791
  def initialize
649
- super 'schedule'
792
+ super "schedule"
650
793
  end
651
794
  end
652
795
 
653
796
  ##
654
- # Allows enumeration of retries within Sidekiq.
797
+ # The set of retries within Sidekiq.
655
798
  # Based on this, you can search/filter for jobs. Here's an
656
799
  # example where I'm selecting all jobs of a certain type
657
800
  # and deleting them from the retry queue.
@@ -664,37 +807,39 @@ module Sidekiq
664
807
  # end.map(&:delete)
665
808
  class RetrySet < JobSet
666
809
  def initialize
667
- super 'retry'
810
+ super "retry"
668
811
  end
669
812
 
813
+ # Enqueues all jobs pending within the retry set.
670
814
  def retry_all
671
- while size > 0
672
- each(&:retry)
673
- end
815
+ each(&:retry) while size > 0
674
816
  end
675
817
 
818
+ # Kills all jobs pending within the retry set.
676
819
  def kill_all
677
- while size > 0
678
- each(&:kill)
679
- end
820
+ each(&:kill) while size > 0
680
821
  end
681
822
  end
682
823
 
683
824
  ##
684
- # Allows enumeration of dead jobs within Sidekiq.
825
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
826
+ # their retries and are helding in this set pending some sort of manual
827
+ # fix. They will be removed after 6 months (dead_timeout) if not.
685
828
  #
686
829
  class DeadSet < JobSet
687
830
  def initialize
688
- super 'dead'
831
+ super "dead"
689
832
  end
690
833
 
691
- def kill(message, opts={})
834
+ # Add the given job to the Dead set.
835
+ # @param message [String] the job data as JSON
836
+ def kill(message, opts = {})
692
837
  now = Time.now.to_f
693
838
  Sidekiq.redis do |conn|
694
- conn.multi do
695
- conn.zadd(name, now.to_s, message)
696
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
697
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
839
+ conn.multi do |transaction|
840
+ transaction.zadd(name, now.to_s, message)
841
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
842
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
698
843
  end
699
844
  end
700
845
 
@@ -709,103 +854,128 @@ module Sidekiq
709
854
  true
710
855
  end
711
856
 
857
+ # Enqueue all dead jobs
712
858
  def retry_all
713
- while size > 0
714
- each(&:retry)
715
- end
859
+ each(&:retry) while size > 0
716
860
  end
717
861
 
862
+ # The maximum size of the Dead set. Older entries will be trimmed
863
+ # to stay within this limit. Default value is 10,000.
718
864
  def self.max_jobs
719
- Sidekiq.options[:dead_max_jobs]
865
+ Sidekiq[:dead_max_jobs]
720
866
  end
721
867
 
868
+ # The time limit for entries within the Dead set. Older entries will be thrown away.
869
+ # Default value is six months.
722
870
  def self.timeout
723
- Sidekiq.options[:dead_timeout_in_seconds]
871
+ Sidekiq[:dead_timeout_in_seconds]
724
872
  end
725
873
  end
726
874
 
727
875
  ##
728
876
  # Enumerates the set of Sidekiq processes which are actively working
729
- # right now. Each process send a heartbeat to Redis every 5 seconds
877
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
730
878
  # so this set should be relatively accurate, barring network partitions.
731
879
  #
732
- # Yields a Sidekiq::Process.
880
+ # @yieldparam [Sidekiq::Process]
733
881
  #
734
882
  class ProcessSet
735
883
  include Enumerable
736
- include RedisScanner
737
884
 
738
- def initialize(clean_plz=true)
885
+ # :nodoc:
886
+ # @api private
887
+ def initialize(clean_plz = true)
739
888
  cleanup if clean_plz
740
889
  end
741
890
 
742
891
  # Cleans up dead processes recorded in Redis.
743
892
  # Returns the number of processes cleaned.
893
+ # :nodoc:
894
+ # @api private
744
895
  def cleanup
896
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
745
897
  count = 0
746
898
  Sidekiq.redis do |conn|
747
- procs = sscan(conn, 'processes').sort
748
- heartbeats = conn.pipelined do
899
+ procs = conn.sscan_each("processes").to_a.sort
900
+ heartbeats = conn.pipelined { |pipeline|
749
901
  procs.each do |key|
750
- conn.hget(key, 'info')
902
+ pipeline.hget(key, "info")
751
903
  end
752
- end
904
+ }
753
905
 
754
906
  # the hash named key has an expiry of 60 seconds.
755
907
  # if it's not found, that means the process has not reported
756
908
  # in to Redis and probably died.
757
- to_prune = []
758
- heartbeats.each_with_index do |beat, i|
759
- to_prune << procs[i] if beat.nil?
760
- end
761
- count = conn.srem('processes', to_prune) unless to_prune.empty?
909
+ to_prune = procs.select.with_index { |proc, i|
910
+ heartbeats[i].nil?
911
+ }
912
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
762
913
  end
763
914
  count
764
915
  end
765
916
 
766
917
  def each
767
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
918
+ result = Sidekiq.redis { |conn|
919
+ procs = conn.sscan_each("processes").to_a.sort
768
920
 
769
- Sidekiq.redis do |conn|
770
921
  # We're making a tradeoff here between consuming more memory instead of
771
922
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
772
923
  # you'll be happier this way
773
- result = conn.pipelined do
924
+ conn.pipelined do |pipeline|
774
925
  procs.each do |key|
775
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
926
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
776
927
  end
777
928
  end
929
+ }
778
930
 
779
- result.each do |info, busy, at_s, quiet|
780
- # If a process is stopped between when we query Redis for `procs` and
781
- # when we query for `result`, we will have an item in `result` that is
782
- # composed of `nil` values.
783
- next if info.nil?
784
-
785
- hash = Sidekiq.load_json(info)
786
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
787
- end
931
+ result.each do |info, busy, at_s, quiet, rss, rtt|
932
+ # If a process is stopped between when we query Redis for `procs` and
933
+ # when we query for `result`, we will have an item in `result` that is
934
+ # composed of `nil` values.
935
+ next if info.nil?
936
+
937
+ hash = Sidekiq.load_json(info)
938
+ yield Process.new(hash.merge("busy" => busy.to_i,
939
+ "beat" => at_s.to_f,
940
+ "quiet" => quiet,
941
+ "rss" => rss.to_i,
942
+ "rtt_us" => rtt.to_i))
788
943
  end
789
-
790
- nil
791
944
  end
792
945
 
793
946
  # This method is not guaranteed accurate since it does not prune the set
794
947
  # based on current heartbeat. #each does that and ensures the set only
795
948
  # contains Sidekiq processes which have sent a heartbeat within the last
796
949
  # 60 seconds.
950
+ # @return [Integer] current number of registered Sidekiq processes
797
951
  def size
798
- Sidekiq.redis { |conn| conn.scard('processes') }
952
+ Sidekiq.redis { |conn| conn.scard("processes") }
953
+ end
954
+
955
+ # Total number of threads available to execute jobs.
956
+ # For Sidekiq Enterprise customers this number (in production) must be
957
+ # less than or equal to your licensed concurrency.
958
+ # @return [Integer] the sum of process concurrency
959
+ def total_concurrency
960
+ sum { |x| x["concurrency"].to_i }
961
+ end
962
+
963
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
964
+ def total_rss_in_kb
965
+ sum { |x| x["rss"].to_i }
799
966
  end
967
+ alias_method :total_rss, :total_rss_in_kb
800
968
 
801
969
  # Returns the identity of the current cluster leader or "" if no leader.
802
970
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
803
971
  # or Sidekiq Pro.
972
+ # @return [String] Identity of cluster leader
973
+ # @return [String] empty string if no leader
804
974
  def leader
805
975
  @leader ||= begin
806
- x = Sidekiq.redis {|c| c.get("dear-leader") }
976
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
807
977
  # need a non-falsy value so we can memoize
808
- x = "" unless x
978
+ x ||= ""
809
979
  x
810
980
  end
811
981
  end
@@ -827,16 +997,18 @@ module Sidekiq
827
997
  # 'identity' => <unique string identifying the process>,
828
998
  # }
829
999
  class Process
1000
+ # :nodoc:
1001
+ # @api private
830
1002
  def initialize(hash)
831
1003
  @attribs = hash
832
1004
  end
833
1005
 
834
1006
  def tag
835
- self['tag']
1007
+ self["tag"]
836
1008
  end
837
1009
 
838
1010
  def labels
839
- Array(self['labels'])
1011
+ Array(self["labels"])
840
1012
  end
841
1013
 
842
1014
  def [](key)
@@ -844,23 +1016,40 @@ module Sidekiq
844
1016
  end
845
1017
 
846
1018
  def identity
847
- self['identity']
1019
+ self["identity"]
848
1020
  end
849
1021
 
1022
+ def queues
1023
+ self["queues"]
1024
+ end
1025
+
1026
+ # Signal this process to stop processing new jobs.
1027
+ # It will continue to execute jobs it has already fetched.
1028
+ # This method is *asynchronous* and it can take 5-10
1029
+ # seconds for the process to quiet.
850
1030
  def quiet!
851
- signal('TSTP')
1031
+ signal("TSTP")
852
1032
  end
853
1033
 
1034
+ # Signal this process to shutdown.
1035
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1036
+ # This method is *asynchronous* and it can take 5-10
1037
+ # seconds for the process to start shutting down.
854
1038
  def stop!
855
- signal('TERM')
1039
+ signal("TERM")
856
1040
  end
857
1041
 
1042
+ # Signal this process to log backtraces for all threads.
1043
+ # Useful if you have a frozen or deadlocked process which is
1044
+ # still sending a heartbeat.
1045
+ # This method is *asynchronous* and it can take 5-10 seconds.
858
1046
  def dump_threads
859
- signal('TTIN')
1047
+ signal("TTIN")
860
1048
  end
861
1049
 
1050
+ # @return [Boolean] true if this process is quiet or shutting down
862
1051
  def stopping?
863
- self['quiet'] == 'true'
1052
+ self["quiet"] == "true"
864
1053
  end
865
1054
 
866
1055
  private
@@ -868,18 +1057,17 @@ module Sidekiq
868
1057
  def signal(sig)
869
1058
  key = "#{identity}-signals"
870
1059
  Sidekiq.redis do |c|
871
- c.multi do
872
- c.lpush(key, sig)
873
- c.expire(key, 60)
1060
+ c.multi do |transaction|
1061
+ transaction.lpush(key, sig)
1062
+ transaction.expire(key, 60)
874
1063
  end
875
1064
  end
876
1065
  end
877
-
878
1066
  end
879
1067
 
880
1068
  ##
881
- # A worker is a thread that is currently processing a job.
882
- # Programmatic access to the current active worker set.
1069
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1070
+ # It tracks the process and thread working on each job.
883
1071
  #
884
1072
  # WARNING WARNING WARNING
885
1073
  #
@@ -887,34 +1075,40 @@ module Sidekiq
887
1075
  # If you call #size => 5 and then expect #each to be
888
1076
  # called 5 times, you're going to have a bad time.
889
1077
  #
890
- # workers = Sidekiq::Workers.new
891
- # workers.size => 2
892
- # workers.each do |process_id, thread_id, work|
1078
+ # works = Sidekiq::WorkSet.new
1079
+ # works.size => 2
1080
+ # works.each do |process_id, thread_id, work|
893
1081
  # # process_id is a unique identifier per Sidekiq process
894
1082
  # # thread_id is a unique identifier per thread
895
1083
  # # work is a Hash which looks like:
896
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
1084
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
897
1085
  # # run_at is an epoch Integer.
898
1086
  # end
899
1087
  #
900
- class Workers
1088
+ class WorkSet
901
1089
  include Enumerable
902
- include RedisScanner
903
1090
 
904
- def each
1091
+ def each(&block)
1092
+ results = []
905
1093
  Sidekiq.redis do |conn|
906
- procs = sscan(conn, 'processes')
1094
+ procs = conn.sscan_each("processes").to_a
907
1095
  procs.sort.each do |key|
908
- valid, workers = conn.pipelined do
909
- conn.exists?(key)
910
- conn.hgetall("#{key}:workers")
911
- end
1096
+ valid, workers = conn.pipelined { |pipeline|
1097
+ pipeline.exists?(key)
1098
+ pipeline.hgetall("#{key}:work")
1099
+ }
912
1100
  next unless valid
913
1101
  workers.each_pair do |tid, json|
914
- yield key, tid, Sidekiq.load_json(json)
1102
+ hsh = Sidekiq.load_json(json)
1103
+ p = hsh["payload"]
1104
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
1105
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
1106
+ results << [key, tid, hsh]
915
1107
  end
916
1108
  end
917
1109
  end
1110
+
1111
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
918
1112
  end
919
1113
 
920
1114
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -925,18 +1119,21 @@ module Sidekiq
925
1119
  # which can easily get out of sync with crashy processes.
926
1120
  def size
927
1121
  Sidekiq.redis do |conn|
928
- procs = sscan(conn, 'processes')
1122
+ procs = conn.sscan_each("processes").to_a
929
1123
  if procs.empty?
930
1124
  0
931
1125
  else
932
- conn.pipelined do
1126
+ conn.pipelined { |pipeline|
933
1127
  procs.each do |key|
934
- conn.hget(key, 'busy')
1128
+ pipeline.hget(key, "busy")
935
1129
  end
936
- end.map(&:to_i).inject(:+)
1130
+ }.sum(&:to_i)
937
1131
  end
938
1132
  end
939
1133
  end
940
1134
  end
941
-
1135
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1136
+ # Is "worker" a process, a type of job, a thread? Undefined!
1137
+ # WorkSet better describes the data.
1138
+ Workers = WorkSet
942
1139
  end