sidekiq 5.2.10 → 6.5.12

Sign up to get free protection for your applications and to get access to all the features.
Files changed (126) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +422 -1
  3. data/LICENSE +3 -3
  4. data/README.md +24 -35
  5. data/bin/sidekiq +27 -3
  6. data/bin/sidekiqload +79 -67
  7. data/bin/sidekiqmon +8 -0
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +527 -310
  13. data/lib/sidekiq/cli.rb +204 -208
  14. data/lib/sidekiq/client.rb +78 -82
  15. data/lib/sidekiq/component.rb +65 -0
  16. data/lib/sidekiq/delay.rb +8 -7
  17. data/lib/sidekiq/extensions/action_mailer.rb +13 -22
  18. data/lib/sidekiq/extensions/active_record.rb +13 -10
  19. data/lib/sidekiq/extensions/class_methods.rb +14 -11
  20. data/lib/sidekiq/extensions/generic_proxy.rb +7 -5
  21. data/lib/sidekiq/fetch.rb +50 -40
  22. data/lib/sidekiq/job.rb +13 -0
  23. data/lib/sidekiq/job_logger.rb +33 -7
  24. data/lib/sidekiq/job_retry.rb +126 -106
  25. data/lib/sidekiq/job_util.rb +71 -0
  26. data/lib/sidekiq/launcher.rb +177 -83
  27. data/lib/sidekiq/logger.rb +156 -0
  28. data/lib/sidekiq/manager.rb +40 -41
  29. data/lib/sidekiq/metrics/deploy.rb +47 -0
  30. data/lib/sidekiq/metrics/query.rb +153 -0
  31. data/lib/sidekiq/metrics/shared.rb +94 -0
  32. data/lib/sidekiq/metrics/tracking.rb +134 -0
  33. data/lib/sidekiq/middleware/chain.rb +102 -46
  34. data/lib/sidekiq/middleware/current_attributes.rb +63 -0
  35. data/lib/sidekiq/middleware/i18n.rb +7 -7
  36. data/lib/sidekiq/middleware/modules.rb +21 -0
  37. data/lib/sidekiq/monitor.rb +133 -0
  38. data/lib/sidekiq/paginator.rb +28 -16
  39. data/lib/sidekiq/processor.rb +104 -97
  40. data/lib/sidekiq/rails.rb +46 -37
  41. data/lib/sidekiq/redis_client_adapter.rb +154 -0
  42. data/lib/sidekiq/redis_connection.rb +108 -77
  43. data/lib/sidekiq/ring_buffer.rb +29 -0
  44. data/lib/sidekiq/scheduled.rb +105 -42
  45. data/lib/sidekiq/sd_notify.rb +149 -0
  46. data/lib/sidekiq/systemd.rb +24 -0
  47. data/lib/sidekiq/testing/inline.rb +6 -5
  48. data/lib/sidekiq/testing.rb +68 -58
  49. data/lib/sidekiq/transaction_aware_client.rb +45 -0
  50. data/lib/sidekiq/version.rb +2 -1
  51. data/lib/sidekiq/web/action.rb +15 -11
  52. data/lib/sidekiq/web/application.rb +103 -77
  53. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  54. data/lib/sidekiq/web/helpers.rb +125 -95
  55. data/lib/sidekiq/web/router.rb +23 -19
  56. data/lib/sidekiq/web.rb +65 -105
  57. data/lib/sidekiq/worker.rb +259 -109
  58. data/lib/sidekiq.rb +170 -62
  59. data/sidekiq.gemspec +23 -16
  60. data/web/assets/images/apple-touch-icon.png +0 -0
  61. data/web/assets/javascripts/application.js +113 -61
  62. data/web/assets/javascripts/chart.min.js +13 -0
  63. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  64. data/web/assets/javascripts/dashboard.js +53 -89
  65. data/web/assets/javascripts/graph.js +16 -0
  66. data/web/assets/javascripts/metrics.js +262 -0
  67. data/web/assets/stylesheets/application-dark.css +143 -0
  68. data/web/assets/stylesheets/application-rtl.css +0 -4
  69. data/web/assets/stylesheets/application.css +88 -233
  70. data/web/locales/ar.yml +8 -2
  71. data/web/locales/de.yml +14 -2
  72. data/web/locales/el.yml +43 -19
  73. data/web/locales/en.yml +13 -1
  74. data/web/locales/es.yml +18 -2
  75. data/web/locales/fr.yml +10 -3
  76. data/web/locales/ja.yml +14 -1
  77. data/web/locales/lt.yml +83 -0
  78. data/web/locales/pl.yml +4 -4
  79. data/web/locales/pt-br.yml +27 -9
  80. data/web/locales/ru.yml +4 -0
  81. data/web/locales/vi.yml +83 -0
  82. data/web/locales/zh-cn.yml +36 -11
  83. data/web/locales/zh-tw.yml +32 -7
  84. data/web/views/_footer.erb +1 -1
  85. data/web/views/_job_info.erb +3 -2
  86. data/web/views/_nav.erb +1 -1
  87. data/web/views/_poll_link.erb +2 -5
  88. data/web/views/_summary.erb +7 -7
  89. data/web/views/busy.erb +61 -22
  90. data/web/views/dashboard.erb +23 -14
  91. data/web/views/dead.erb +3 -3
  92. data/web/views/layout.erb +3 -1
  93. data/web/views/metrics.erb +69 -0
  94. data/web/views/metrics_for_job.erb +87 -0
  95. data/web/views/morgue.erb +9 -6
  96. data/web/views/queue.erb +23 -10
  97. data/web/views/queues.erb +10 -2
  98. data/web/views/retries.erb +11 -8
  99. data/web/views/retry.erb +3 -3
  100. data/web/views/scheduled.erb +5 -2
  101. metadata +58 -63
  102. data/.circleci/config.yml +0 -61
  103. data/.github/contributing.md +0 -32
  104. data/.github/issue_template.md +0 -11
  105. data/.gitignore +0 -15
  106. data/.travis.yml +0 -11
  107. data/3.0-Upgrade.md +0 -70
  108. data/4.0-Upgrade.md +0 -53
  109. data/5.0-Upgrade.md +0 -56
  110. data/COMM-LICENSE +0 -97
  111. data/Ent-Changes.md +0 -238
  112. data/Gemfile +0 -19
  113. data/Pro-2.0-Upgrade.md +0 -138
  114. data/Pro-3.0-Upgrade.md +0 -44
  115. data/Pro-4.0-Upgrade.md +0 -35
  116. data/Pro-Changes.md +0 -759
  117. data/Rakefile +0 -9
  118. data/bin/sidekiqctl +0 -20
  119. data/code_of_conduct.md +0 -50
  120. data/lib/generators/sidekiq/worker_generator.rb +0 -49
  121. data/lib/sidekiq/core_ext.rb +0 -1
  122. data/lib/sidekiq/ctl.rb +0 -221
  123. data/lib/sidekiq/exception_handler.rb +0 -29
  124. data/lib/sidekiq/logging.rb +0 -122
  125. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
  126. data/lib/sidekiq/util.rb +0 -66
data/lib/sidekiq/api.rb CHANGED
@@ -1,26 +1,36 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "set"
7
+ require "base64"
18
8
 
19
- class Stats
20
- include RedisScanner
9
+ if ENV["SIDEKIQ_METRICS_BETA"]
10
+ require "sidekiq/metrics/deploy"
11
+ require "sidekiq/metrics/query"
12
+ end
21
13
 
14
+ #
15
+ # Sidekiq's Data API provides a Ruby object model on top
16
+ # of Sidekiq's runtime data in Redis. This API should never
17
+ # be used within application code for business logic.
18
+ #
19
+ # The Sidekiq server process never uses this API: all data
20
+ # manipulation is done directly for performance reasons to
21
+ # ensure we are using Redis as efficiently as possible at
22
+ # every callsite.
23
+ #
24
+
25
+ module Sidekiq
26
+ # Retrieve runtime statistics from Redis regarding
27
+ # this Sidekiq cluster.
28
+ #
29
+ # stat = Sidekiq::Stats.new
30
+ # stat.processed
31
+ class Stats
22
32
  def initialize
23
- fetch_stats!
33
+ fetch_stats_fast!
24
34
  end
25
35
 
26
36
  def processed
@@ -63,62 +73,82 @@ module Sidekiq
63
73
  Sidekiq::Stats::Queues.new.lengths
64
74
  end
65
75
 
66
- def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
68
- conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
76
+ # O(1) redis calls
77
+ # @api private
78
+ def fetch_stats_fast!
79
+ pipe1_res = Sidekiq.redis { |conn|
80
+ conn.pipelined do |pipeline|
81
+ pipeline.get("stat:processed")
82
+ pipeline.get("stat:failed")
83
+ pipeline.zcard("schedule")
84
+ pipeline.zcard("retry")
85
+ pipeline.zcard("dead")
86
+ pipeline.scard("processes")
87
+ pipeline.lrange("queue:default", -1, -1)
76
88
  end
77
- end
89
+ }
78
90
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
91
+ default_queue_latency = if (entry = pipe1_res[6].first)
92
+ job = begin
93
+ Sidekiq.load_json(entry)
94
+ rescue
95
+ {}
96
+ end
97
+ now = Time.now.to_f
98
+ thence = job["enqueued_at"] || now
99
+ now - thence
100
+ else
101
+ 0
81
102
  end
82
103
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
104
+ @stats = {
105
+ processed: pipe1_res[0].to_i,
106
+ failed: pipe1_res[1].to_i,
107
+ scheduled_size: pipe1_res[2],
108
+ retry_size: pipe1_res[3],
109
+ dead_size: pipe1_res[4],
110
+ processes_size: pipe1_res[5],
111
+
112
+ default_queue_latency: default_queue_latency
113
+ }
114
+ end
115
+
116
+ # O(number of processes + number of queues) redis calls
117
+ # @api private
118
+ def fetch_stats_slow!
119
+ processes = Sidekiq.redis { |conn|
120
+ conn.sscan_each("processes").to_a
121
+ }
122
+
123
+ queues = Sidekiq.redis { |conn|
124
+ conn.sscan_each("queues").to_a
125
+ }
86
126
 
87
- pipe2_res = Sidekiq.redis do |conn|
88
- conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
127
+ pipe2_res = Sidekiq.redis { |conn|
128
+ conn.pipelined do |pipeline|
129
+ processes.each { |key| pipeline.hget(key, "busy") }
130
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
91
131
  end
92
- end
132
+ }
93
133
 
94
134
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
135
+ workers_size = pipe2_res[0...s].sum(&:to_i)
136
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
137
 
98
- default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
106
- @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
113
-
114
- default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
117
- }
138
+ @stats[:workers_size] = workers_size
139
+ @stats[:enqueued] = enqueued
140
+ @stats
141
+ end
142
+
143
+ # @api private
144
+ def fetch_stats!
145
+ fetch_stats_fast!
146
+ fetch_stats_slow!
118
147
  end
119
148
 
149
+ # @api private
120
150
  def reset(*stats)
121
- all = %w(failed processed)
151
+ all = %w[failed processed]
122
152
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
153
 
124
154
  mset_args = []
@@ -134,37 +164,30 @@ module Sidekiq
134
164
  private
135
165
 
136
166
  def stat(s)
137
- @stats[s]
167
+ fetch_stats_slow! if @stats[s].nil?
168
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
138
169
  end
139
170
 
140
171
  class Queues
141
- include RedisScanner
142
-
143
172
  def lengths
144
173
  Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
174
+ queues = conn.sscan_each("queues").to_a
146
175
 
147
- lengths = conn.pipelined do
176
+ lengths = conn.pipelined { |pipeline|
148
177
  queues.each do |queue|
149
- conn.llen("queue:#{queue}")
178
+ pipeline.llen("queue:#{queue}")
150
179
  end
151
- end
152
-
153
- i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
155
- memo[queue] = lengths[i]
156
- i += 1
157
- memo
158
- end.sort_by { |_, size| size }
180
+ }
159
181
 
160
- Hash[array_of_arrays.reverse]
182
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
183
+ array_of_arrays.to_h
161
184
  end
162
185
  end
163
186
  end
164
187
 
165
188
  class History
166
189
  def initialize(days_previous, start_date = nil)
167
- #we only store five years of data in Redis
190
+ # we only store five years of data in Redis
168
191
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
169
192
  @days_previous = days_previous
170
193
  @start_date = start_date || Time.now.utc.to_date
@@ -181,18 +204,12 @@ module Sidekiq
181
204
  private
182
205
 
183
206
  def date_stat_hash(stat)
184
- i = 0
185
207
  stat_hash = {}
186
- keys = []
187
- dates = []
188
-
189
- while i < @days_previous
190
- date = @start_date - i
191
- datestr = date.strftime("%Y-%m-%d")
192
- keys << "stat:#{stat}:#{datestr}"
193
- dates << datestr
194
- i += 1
195
- end
208
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
209
+ date.strftime("%Y-%m-%d")
210
+ }
211
+
212
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
196
213
 
197
214
  begin
198
215
  Sidekiq.redis do |conn|
@@ -200,7 +217,7 @@ module Sidekiq
200
217
  stat_hash[dates[idx]] = value ? value.to_i : 0
201
218
  end
202
219
  end
203
- rescue Redis::CommandError
220
+ rescue RedisConnection.adapter::CommandError
204
221
  # mget will trigger a CROSSSLOT error when run against a Cluster
205
222
  # TODO Someone want to add Cluster support?
206
223
  end
@@ -211,9 +228,10 @@ module Sidekiq
211
228
  end
212
229
 
213
230
  ##
214
- # Encapsulates a queue within Sidekiq.
231
+ # Represents a queue within Sidekiq.
215
232
  # Allows enumeration of all jobs within the queue
216
- # and deletion of jobs.
233
+ # and deletion of jobs. NB: this queue data is real-time
234
+ # and is changing within Redis moment by moment.
217
235
  #
218
236
  # queue = Sidekiq::Queue.new("mailer")
219
237
  # queue.each do |job|
@@ -221,30 +239,34 @@ module Sidekiq
221
239
  # job.args # => [1, 2, 3]
222
240
  # job.delete if job.jid == 'abcdef1234567890'
223
241
  # end
224
- #
225
242
  class Queue
226
243
  include Enumerable
227
- extend RedisScanner
228
244
 
229
245
  ##
230
- # Return all known queues within Redis.
246
+ # Fetch all known queues within Redis.
231
247
  #
248
+ # @return [Array<Sidekiq::Queue>]
232
249
  def self.all
233
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
250
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
234
251
  end
235
252
 
236
253
  attr_reader :name
237
254
 
238
- def initialize(name="default")
255
+ # @param name [String] the name of the queue
256
+ def initialize(name = "default")
239
257
  @name = name.to_s
240
258
  @rname = "queue:#{name}"
241
259
  end
242
260
 
261
+ # The current size of the queue within Redis.
262
+ # This value is real-time and can change between calls.
263
+ #
264
+ # @return [Integer] the size
243
265
  def size
244
266
  Sidekiq.redis { |con| con.llen(@rname) }
245
267
  end
246
268
 
247
- # Sidekiq Pro overrides this
269
+ # @return [Boolean] if the queue is currently paused
248
270
  def paused?
249
271
  false
250
272
  end
@@ -253,15 +275,15 @@ module Sidekiq
253
275
  # Calculates this queue's latency, the difference in seconds since the oldest
254
276
  # job in the queue was enqueued.
255
277
  #
256
- # @return Float
278
+ # @return [Float] in seconds
257
279
  def latency
258
- entry = Sidekiq.redis do |conn|
280
+ entry = Sidekiq.redis { |conn|
259
281
  conn.lrange(@rname, -1, -1)
260
- end.first
282
+ }.first
261
283
  return 0 unless entry
262
284
  job = Sidekiq.load_json(entry)
263
285
  now = Time.now.to_f
264
- thence = job['enqueued_at'] || now
286
+ thence = job["enqueued_at"] || now
265
287
  now - thence
266
288
  end
267
289
 
@@ -271,16 +293,16 @@ module Sidekiq
271
293
  page = 0
272
294
  page_size = 50
273
295
 
274
- while true do
296
+ loop do
275
297
  range_start = page * page_size - deleted_size
276
- range_end = range_start + page_size - 1
277
- entries = Sidekiq.redis do |conn|
298
+ range_end = range_start + page_size - 1
299
+ entries = Sidekiq.redis { |conn|
278
300
  conn.lrange @rname, range_start, range_end
279
- end
301
+ }
280
302
  break if entries.empty?
281
303
  page += 1
282
304
  entries.each do |entry|
283
- yield Job.new(entry, @name)
305
+ yield JobRecord.new(entry, @name)
284
306
  end
285
307
  deleted_size = initial_size - size
286
308
  end
@@ -289,41 +311,63 @@ module Sidekiq
289
311
  ##
290
312
  # Find the job with the given JID within this queue.
291
313
  #
292
- # This is a slow, inefficient operation. Do not use under
293
- # normal conditions. Sidekiq Pro contains a faster version.
314
+ # This is a *slow, inefficient* operation. Do not use under
315
+ # normal conditions.
316
+ #
317
+ # @param jid [String] the job_id to look for
318
+ # @return [Sidekiq::JobRecord]
319
+ # @return [nil] if not found
294
320
  def find_job(jid)
295
321
  detect { |j| j.jid == jid }
296
322
  end
297
323
 
324
+ # delete all jobs within this queue
325
+ # @return [Boolean] true
298
326
  def clear
299
327
  Sidekiq.redis do |conn|
300
- conn.multi do
301
- conn.del(@rname)
302
- conn.srem("queues", name)
328
+ conn.multi do |transaction|
329
+ transaction.unlink(@rname)
330
+ transaction.srem("queues", [name])
303
331
  end
304
332
  end
333
+ true
305
334
  end
306
335
  alias_method :💣, :clear
336
+
337
+ # :nodoc:
338
+ # @api private
339
+ def as_json(options = nil)
340
+ {name: name} # 5336
341
+ end
307
342
  end
308
343
 
309
344
  ##
310
- # Encapsulates a pending job within a Sidekiq queue or
311
- # sorted set.
345
+ # Represents a pending job within a Sidekiq queue.
312
346
  #
313
347
  # The job should be considered immutable but may be
314
- # removed from the queue via Job#delete.
315
- #
316
- class Job
348
+ # removed from the queue via JobRecord#delete.
349
+ class JobRecord
350
+ # the parsed Hash of job data
351
+ # @!attribute [r] Item
317
352
  attr_reader :item
353
+ # the underlying String in Redis
354
+ # @!attribute [r] Value
318
355
  attr_reader :value
356
+ # the queue associated with this job
357
+ # @!attribute [r] Queue
358
+ attr_reader :queue
319
359
 
320
- def initialize(item, queue_name=nil)
360
+ # :nodoc:
361
+ # @api private
362
+ def initialize(item, queue_name = nil)
321
363
  @args = nil
322
364
  @value = item
323
365
  @item = item.is_a?(Hash) ? item : parse(item)
324
- @queue = queue_name || @item['queue']
366
+ @queue = queue_name || @item["queue"]
325
367
  end
326
368
 
369
+ # :nodoc:
370
+ # @api private
327
371
  def parse(item)
328
372
  Sidekiq.load_json(item)
329
373
  rescue JSON::ParserError
@@ -335,88 +379,109 @@ module Sidekiq
335
379
  {}
336
380
  end
337
381
 
382
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
383
+ # this class will be the ActiveJob adapter class rather than a specific job.
338
384
  def klass
339
- self['class']
385
+ self["class"]
340
386
  end
341
387
 
342
388
  def display_class
343
389
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
344
- @klass ||= case klass
345
- when /\ASidekiq::Extensions::Delayed/
346
- safe_load(args[0], klass) do |target, method, _|
347
- "#{target}.#{method}"
348
- end
349
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
350
- job_class = @item['wrapped'] || args[0]
351
- if 'ActionMailer::DeliveryJob' == job_class
352
- # MailerClass#mailer_method
353
- args[0]['arguments'][0..1].join('#')
354
- else
355
- job_class
356
- end
357
- else
358
- klass
359
- end
390
+ @klass ||= self["display_class"] || begin
391
+ case klass
392
+ when /\ASidekiq::Extensions::Delayed/
393
+ safe_load(args[0], klass) do |target, method, _|
394
+ "#{target}.#{method}"
395
+ end
396
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
397
+ job_class = @item["wrapped"] || args[0]
398
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
399
+ # MailerClass#mailer_method
400
+ args[0]["arguments"][0..1].join("#")
401
+ else
402
+ job_class
403
+ end
404
+ else
405
+ klass
406
+ end
407
+ end
360
408
  end
361
409
 
362
410
  def display_args
363
411
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
364
412
  @display_args ||= case klass
365
- when /\ASidekiq::Extensions::Delayed/
366
- safe_load(args[0], args) do |_, _, arg|
367
- arg
368
- end
369
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
370
- job_args = self['wrapped'] ? args[0]["arguments"] : []
371
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
372
- # remove MailerClass, mailer_method and 'deliver_now'
373
- job_args.drop(3)
374
- else
375
- job_args
376
- end
377
- else
378
- if self['encrypt']
379
- # no point in showing 150+ bytes of random garbage
380
- args[-1] = '[encrypted data]'
381
- end
382
- args
383
- end
413
+ when /\ASidekiq::Extensions::Delayed/
414
+ safe_load(args[0], args) do |_, _, arg, kwarg|
415
+ if !kwarg || kwarg.empty?
416
+ arg
417
+ else
418
+ [arg, kwarg]
419
+ end
420
+ end
421
+ when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
422
+ job_args = self["wrapped"] ? args[0]["arguments"] : []
423
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
424
+ # remove MailerClass, mailer_method and 'deliver_now'
425
+ job_args.drop(3)
426
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
427
+ # remove MailerClass, mailer_method and 'deliver_now'
428
+ job_args.drop(3).first["args"]
429
+ else
430
+ job_args
431
+ end
432
+ else
433
+ if self["encrypt"]
434
+ # no point in showing 150+ bytes of random garbage
435
+ args[-1] = "[encrypted data]"
436
+ end
437
+ args
438
+ end
384
439
  end
385
440
 
386
441
  def args
387
- @args || @item['args']
442
+ @args || @item["args"]
388
443
  end
389
444
 
390
445
  def jid
391
- self['jid']
446
+ self["jid"]
392
447
  end
393
448
 
394
449
  def enqueued_at
395
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
450
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
396
451
  end
397
452
 
398
453
  def created_at
399
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
454
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
455
+ end
456
+
457
+ def tags
458
+ self["tags"] || []
400
459
  end
401
460
 
402
- def queue
403
- @queue
461
+ def error_backtrace
462
+ # Cache nil values
463
+ if defined?(@error_backtrace)
464
+ @error_backtrace
465
+ else
466
+ value = self["error_backtrace"]
467
+ @error_backtrace = value && uncompress_backtrace(value)
468
+ end
404
469
  end
405
470
 
406
471
  def latency
407
472
  now = Time.now.to_f
408
- now - (@item['enqueued_at'] || @item['created_at'] || now)
473
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
409
474
  end
410
475
 
411
- ##
412
- # Remove this job from the queue.
476
+ # Remove this job from the queue
413
477
  def delete
414
- count = Sidekiq.redis do |conn|
478
+ count = Sidekiq.redis { |conn|
415
479
  conn.lrem("queue:#{@queue}", 1, @value)
416
- end
480
+ }
417
481
  count != 0
418
482
  end
419
483
 
484
+ # Access arbitrary attributes within the job hash
420
485
  def [](name)
421
486
  # nil will happen if the JSON fails to parse.
422
487
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -427,31 +492,55 @@ module Sidekiq
427
492
  private
428
493
 
429
494
  def safe_load(content, default)
430
- begin
431
- yield(*YAML.load(content))
432
- rescue => ex
433
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
434
- # memory yet so the YAML can't be loaded.
435
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
436
- default
495
+ yield(*YAML.load(content))
496
+ rescue => ex
497
+ # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
498
+ # memory yet so the YAML can't be loaded.
499
+ # TODO is this still necessary? Zeitwerk reloader should handle?
500
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
501
+ default
502
+ end
503
+
504
+ def uncompress_backtrace(backtrace)
505
+ if backtrace.is_a?(Array)
506
+ # Handle old jobs with raw Array backtrace format
507
+ backtrace
508
+ else
509
+ decoded = Base64.decode64(backtrace)
510
+ uncompressed = Zlib::Inflate.inflate(decoded)
511
+ begin
512
+ Sidekiq.load_json(uncompressed)
513
+ rescue
514
+ # Handle old jobs with marshalled backtrace format
515
+ # TODO Remove in 7.x
516
+ Marshal.load(uncompressed)
517
+ end
437
518
  end
438
519
  end
439
520
  end
440
521
 
441
- class SortedEntry < Job
522
+ # Represents a job within a Redis sorted set where the score
523
+ # represents a timestamp associated with the job. This timestamp
524
+ # could be the scheduled time for it to run (e.g. scheduled set),
525
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
526
+ class SortedEntry < JobRecord
442
527
  attr_reader :score
443
528
  attr_reader :parent
444
529
 
530
+ # :nodoc:
531
+ # @api private
445
532
  def initialize(parent, score, item)
446
533
  super(item)
447
- @score = score
534
+ @score = Float(score)
448
535
  @parent = parent
449
536
  end
450
537
 
538
+ # The timestamp associated with this entry
451
539
  def at
452
540
  Time.at(score).utc
453
541
  end
454
542
 
543
+ # remove this entry from the sorted set
455
544
  def delete
456
545
  if @value
457
546
  @parent.delete_by_value(@parent.name, @value)
@@ -460,11 +549,17 @@ module Sidekiq
460
549
  end
461
550
  end
462
551
 
552
+ # Change the scheduled time for this job.
553
+ #
554
+ # @param at [Time] the new timestamp for this job
463
555
  def reschedule(at)
464
- delete
465
- @parent.schedule(at, item)
556
+ Sidekiq.redis do |conn|
557
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
558
+ end
466
559
  end
467
560
 
561
+ # Enqueue this job from the scheduled or dead set so it will
562
+ # be executed at some point in the near future.
468
563
  def add_to_queue
469
564
  remove_job do |message|
470
565
  msg = Sidekiq.load_json(message)
@@ -472,16 +567,17 @@ module Sidekiq
472
567
  end
473
568
  end
474
569
 
570
+ # enqueue this job from the retry set so it will be executed
571
+ # at some point in the near future.
475
572
  def retry
476
573
  remove_job do |message|
477
574
  msg = Sidekiq.load_json(message)
478
- msg['retry_count'] -= 1 if msg['retry_count']
575
+ msg["retry_count"] -= 1 if msg["retry_count"]
479
576
  Sidekiq::Client.push(msg)
480
577
  end
481
578
  end
482
579
 
483
- ##
484
- # Place job in the dead set
580
+ # Move this job from its current set into the Dead set.
485
581
  def kill
486
582
  remove_job do |message|
487
583
  DeadSet.new.kill(message)
@@ -489,74 +585,109 @@ module Sidekiq
489
585
  end
490
586
 
491
587
  def error?
492
- !!item['error_class']
588
+ !!item["error_class"]
493
589
  end
494
590
 
495
591
  private
496
592
 
497
593
  def remove_job
498
594
  Sidekiq.redis do |conn|
499
- results = conn.multi do
500
- conn.zrangebyscore(parent.name, score, score)
501
- conn.zremrangebyscore(parent.name, score, score)
502
- end.first
595
+ results = conn.multi { |transaction|
596
+ transaction.zrangebyscore(parent.name, score, score)
597
+ transaction.zremrangebyscore(parent.name, score, score)
598
+ }.first
503
599
 
504
600
  if results.size == 1
505
601
  yield results.first
506
602
  else
507
603
  # multiple jobs with the same score
508
604
  # find the one with the right JID and push it
509
- hash = results.group_by do |message|
605
+ matched, nonmatched = results.partition { |message|
510
606
  if message.index(jid)
511
607
  msg = Sidekiq.load_json(message)
512
- msg['jid'] == jid
608
+ msg["jid"] == jid
513
609
  else
514
610
  false
515
611
  end
516
- end
612
+ }
517
613
 
518
- msg = hash.fetch(true, []).first
614
+ msg = matched.first
519
615
  yield msg if msg
520
616
 
521
617
  # push the rest back onto the sorted set
522
- conn.multi do
523
- hash.fetch(false, []).each do |message|
524
- conn.zadd(parent.name, score.to_f.to_s, message)
618
+ conn.multi do |transaction|
619
+ nonmatched.each do |message|
620
+ transaction.zadd(parent.name, score.to_f.to_s, message)
525
621
  end
526
622
  end
527
623
  end
528
624
  end
529
625
  end
530
-
531
626
  end
532
627
 
628
+ # Base class for all sorted sets within Sidekiq.
533
629
  class SortedSet
534
630
  include Enumerable
535
631
 
632
+ # Redis key of the set
633
+ # @!attribute [r] Name
536
634
  attr_reader :name
537
635
 
636
+ # :nodoc:
637
+ # @api private
538
638
  def initialize(name)
539
639
  @name = name
540
640
  @_size = size
541
641
  end
542
642
 
643
+ # real-time size of the set, will change
543
644
  def size
544
645
  Sidekiq.redis { |c| c.zcard(name) }
545
646
  end
546
647
 
648
+ # Scan through each element of the sorted set, yielding each to the supplied block.
649
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
650
+ #
651
+ # @param match [String] a snippet or regexp to filter matches.
652
+ # @param count [Integer] number of elements to retrieve at a time, default 100
653
+ # @yieldparam [Sidekiq::SortedEntry] each entry
654
+ def scan(match, count = 100)
655
+ return to_enum(:scan, match, count) unless block_given?
656
+
657
+ match = "*#{match}*" unless match.include?("*")
658
+ Sidekiq.redis do |conn|
659
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
660
+ yield SortedEntry.new(self, score, entry)
661
+ end
662
+ end
663
+ end
664
+
665
+ # @return [Boolean] always true
547
666
  def clear
548
667
  Sidekiq.redis do |conn|
549
- conn.del(name)
668
+ conn.unlink(name)
550
669
  end
670
+ true
551
671
  end
552
672
  alias_method :💣, :clear
673
+
674
+ # :nodoc:
675
+ # @api private
676
+ def as_json(options = nil)
677
+ {name: name} # 5336
678
+ end
553
679
  end
554
680
 
681
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
682
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
683
+ # e.g. Batches.
555
684
  class JobSet < SortedSet
556
-
557
- def schedule(timestamp, message)
685
+ # Add a job with the associated timestamp to this set.
686
+ # @param timestamp [Time] the score for the job
687
+ # @param job [Hash] the job data
688
+ def schedule(timestamp, job)
558
689
  Sidekiq.redis do |conn|
559
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
690
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
560
691
  end
561
692
  end
562
693
 
@@ -566,46 +697,66 @@ module Sidekiq
566
697
  page = -1
567
698
  page_size = 50
568
699
 
569
- while true do
700
+ loop do
570
701
  range_start = page * page_size + offset_size
571
- range_end = range_start + page_size - 1
572
- elements = Sidekiq.redis do |conn|
573
- conn.zrange name, range_start, range_end, with_scores: true
574
- end
702
+ range_end = range_start + page_size - 1
703
+ elements = Sidekiq.redis { |conn|
704
+ conn.zrange name, range_start, range_end, withscores: true
705
+ }
575
706
  break if elements.empty?
576
707
  page -= 1
577
- elements.reverse.each do |element, score|
708
+ elements.reverse_each do |element, score|
578
709
  yield SortedEntry.new(self, score, element)
579
710
  end
580
711
  offset_size = initial_size - @_size
581
712
  end
582
713
  end
583
714
 
715
+ ##
716
+ # Fetch jobs that match a given time or Range. Job ID is an
717
+ # optional second argument.
718
+ #
719
+ # @param score [Time,Range] a specific timestamp or range
720
+ # @param jid [String, optional] find a specific JID within the score
721
+ # @return [Array<SortedEntry>] any results found, can be empty
584
722
  def fetch(score, jid = nil)
585
- elements = Sidekiq.redis do |conn|
586
- conn.zrangebyscore(name, score, score)
587
- end
588
-
589
- elements.inject([]) do |result, element|
590
- entry = SortedEntry.new(self, score, element)
591
- if jid
592
- result << entry if entry.jid == jid
723
+ begin_score, end_score =
724
+ if score.is_a?(Range)
725
+ [score.first, score.last]
593
726
  else
594
- result << entry
727
+ [score, score]
595
728
  end
596
- result
729
+
730
+ elements = Sidekiq.redis { |conn|
731
+ conn.zrangebyscore(name, begin_score, end_score, withscores: true)
732
+ }
733
+
734
+ elements.each_with_object([]) do |element, result|
735
+ data, job_score = element
736
+ entry = SortedEntry.new(self, job_score, data)
737
+ result << entry if jid.nil? || entry.jid == jid
597
738
  end
598
739
  end
599
740
 
600
741
  ##
601
742
  # Find the job with the given JID within this sorted set.
743
+ # *This is a slow O(n) operation*. Do not use for app logic.
602
744
  #
603
- # This is a slow, inefficient operation. Do not use under
604
- # normal conditions. Sidekiq Pro contains a faster version.
745
+ # @param jid [String] the job identifier
746
+ # @return [SortedEntry] the record or nil
605
747
  def find_job(jid)
606
- self.detect { |j| j.jid == jid }
748
+ Sidekiq.redis do |conn|
749
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
750
+ job = JSON.parse(entry)
751
+ matched = job["jid"] == jid
752
+ return SortedEntry.new(self, score, entry) if matched
753
+ end
754
+ end
755
+ nil
607
756
  end
608
757
 
758
+ # :nodoc:
759
+ # @api private
609
760
  def delete_by_value(name, value)
610
761
  Sidekiq.redis do |conn|
611
762
  ret = conn.zrem(name, value)
@@ -614,17 +765,20 @@ module Sidekiq
614
765
  end
615
766
  end
616
767
 
768
+ # :nodoc:
769
+ # @api private
617
770
  def delete_by_jid(score, jid)
618
771
  Sidekiq.redis do |conn|
619
772
  elements = conn.zrangebyscore(name, score, score)
620
773
  elements.each do |element|
621
- message = Sidekiq.load_json(element)
622
- if message["jid"] == jid
623
- ret = conn.zrem(name, element)
624
- @_size -= 1 if ret
625
- break ret
774
+ if element.index(jid)
775
+ message = Sidekiq.load_json(element)
776
+ if message["jid"] == jid
777
+ ret = conn.zrem(name, element)
778
+ @_size -= 1 if ret
779
+ break ret
780
+ end
626
781
  end
627
- false
628
782
  end
629
783
  end
630
784
  end
@@ -633,10 +787,10 @@ module Sidekiq
633
787
  end
634
788
 
635
789
  ##
636
- # Allows enumeration of scheduled jobs within Sidekiq.
790
+ # The set of scheduled jobs within Sidekiq.
637
791
  # Based on this, you can search/filter for jobs. Here's an
638
- # example where I'm selecting all jobs of a certain type
639
- # and deleting them from the schedule queue.
792
+ # example where I'm selecting jobs based on some complex logic
793
+ # and deleting them from the scheduled set.
640
794
  #
641
795
  # r = Sidekiq::ScheduledSet.new
642
796
  # r.select do |scheduled|
@@ -646,12 +800,12 @@ module Sidekiq
646
800
  # end.map(&:delete)
647
801
  class ScheduledSet < JobSet
648
802
  def initialize
649
- super 'schedule'
803
+ super "schedule"
650
804
  end
651
805
  end
652
806
 
653
807
  ##
654
- # Allows enumeration of retries within Sidekiq.
808
+ # The set of retries within Sidekiq.
655
809
  # Based on this, you can search/filter for jobs. Here's an
656
810
  # example where I'm selecting all jobs of a certain type
657
811
  # and deleting them from the retry queue.
@@ -664,37 +818,39 @@ module Sidekiq
664
818
  # end.map(&:delete)
665
819
  class RetrySet < JobSet
666
820
  def initialize
667
- super 'retry'
821
+ super "retry"
668
822
  end
669
823
 
824
+ # Enqueues all jobs pending within the retry set.
670
825
  def retry_all
671
- while size > 0
672
- each(&:retry)
673
- end
826
+ each(&:retry) while size > 0
674
827
  end
675
828
 
829
+ # Kills all jobs pending within the retry set.
676
830
  def kill_all
677
- while size > 0
678
- each(&:kill)
679
- end
831
+ each(&:kill) while size > 0
680
832
  end
681
833
  end
682
834
 
683
835
  ##
684
- # Allows enumeration of dead jobs within Sidekiq.
836
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
837
+ # their retries and are helding in this set pending some sort of manual
838
+ # fix. They will be removed after 6 months (dead_timeout) if not.
685
839
  #
686
840
  class DeadSet < JobSet
687
841
  def initialize
688
- super 'dead'
842
+ super "dead"
689
843
  end
690
844
 
691
- def kill(message, opts={})
845
+ # Add the given job to the Dead set.
846
+ # @param message [String] the job data as JSON
847
+ def kill(message, opts = {})
692
848
  now = Time.now.to_f
693
849
  Sidekiq.redis do |conn|
694
- conn.multi do
695
- conn.zadd(name, now.to_s, message)
696
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
697
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
850
+ conn.multi do |transaction|
851
+ transaction.zadd(name, now.to_s, message)
852
+ transaction.zremrangebyscore(name, "-inf", now - self.class.timeout)
853
+ transaction.zremrangebyrank(name, 0, - self.class.max_jobs)
698
854
  end
699
855
  end
700
856
 
@@ -709,103 +865,130 @@ module Sidekiq
709
865
  true
710
866
  end
711
867
 
868
+ # Enqueue all dead jobs
712
869
  def retry_all
713
- while size > 0
714
- each(&:retry)
715
- end
870
+ each(&:retry) while size > 0
716
871
  end
717
872
 
873
+ # The maximum size of the Dead set. Older entries will be trimmed
874
+ # to stay within this limit. Default value is 10,000.
718
875
  def self.max_jobs
719
- Sidekiq.options[:dead_max_jobs]
876
+ Sidekiq[:dead_max_jobs]
720
877
  end
721
878
 
879
+ # The time limit for entries within the Dead set. Older entries will be thrown away.
880
+ # Default value is six months.
722
881
  def self.timeout
723
- Sidekiq.options[:dead_timeout_in_seconds]
882
+ Sidekiq[:dead_timeout_in_seconds]
724
883
  end
725
884
  end
726
885
 
727
886
  ##
728
887
  # Enumerates the set of Sidekiq processes which are actively working
729
- # right now. Each process send a heartbeat to Redis every 5 seconds
888
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
730
889
  # so this set should be relatively accurate, barring network partitions.
731
890
  #
732
- # Yields a Sidekiq::Process.
891
+ # @yieldparam [Sidekiq::Process]
733
892
  #
734
893
  class ProcessSet
735
894
  include Enumerable
736
- include RedisScanner
737
895
 
738
- def initialize(clean_plz=true)
896
+ # :nodoc:
897
+ # @api private
898
+ def initialize(clean_plz = true)
739
899
  cleanup if clean_plz
740
900
  end
741
901
 
742
902
  # Cleans up dead processes recorded in Redis.
743
903
  # Returns the number of processes cleaned.
904
+ # :nodoc:
905
+ # @api private
744
906
  def cleanup
907
+ # dont run cleanup more than once per minute
908
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
909
+
745
910
  count = 0
746
911
  Sidekiq.redis do |conn|
747
- procs = sscan(conn, 'processes').sort
748
- heartbeats = conn.pipelined do
912
+ procs = conn.sscan_each("processes").to_a
913
+ heartbeats = conn.pipelined { |pipeline|
749
914
  procs.each do |key|
750
- conn.hget(key, 'info')
915
+ pipeline.hget(key, "info")
751
916
  end
752
- end
917
+ }
753
918
 
754
919
  # the hash named key has an expiry of 60 seconds.
755
920
  # if it's not found, that means the process has not reported
756
921
  # in to Redis and probably died.
757
- to_prune = []
758
- heartbeats.each_with_index do |beat, i|
759
- to_prune << procs[i] if beat.nil?
760
- end
761
- count = conn.srem('processes', to_prune) unless to_prune.empty?
922
+ to_prune = procs.select.with_index { |proc, i|
923
+ heartbeats[i].nil?
924
+ }
925
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
762
926
  end
763
927
  count
764
928
  end
765
929
 
766
930
  def each
767
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
931
+ result = Sidekiq.redis { |conn|
932
+ procs = conn.sscan_each("processes").to_a.sort
768
933
 
769
- Sidekiq.redis do |conn|
770
934
  # We're making a tradeoff here between consuming more memory instead of
771
935
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
772
936
  # you'll be happier this way
773
- result = conn.pipelined do
937
+ conn.pipelined do |pipeline|
774
938
  procs.each do |key|
775
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
939
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
776
940
  end
777
941
  end
942
+ }
778
943
 
779
- result.each do |info, busy, at_s, quiet|
780
- # If a process is stopped between when we query Redis for `procs` and
781
- # when we query for `result`, we will have an item in `result` that is
782
- # composed of `nil` values.
783
- next if info.nil?
784
-
785
- hash = Sidekiq.load_json(info)
786
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
787
- end
944
+ result.each do |info, busy, at_s, quiet, rss, rtt|
945
+ # If a process is stopped between when we query Redis for `procs` and
946
+ # when we query for `result`, we will have an item in `result` that is
947
+ # composed of `nil` values.
948
+ next if info.nil?
949
+
950
+ hash = Sidekiq.load_json(info)
951
+ yield Process.new(hash.merge("busy" => busy.to_i,
952
+ "beat" => at_s.to_f,
953
+ "quiet" => quiet,
954
+ "rss" => rss.to_i,
955
+ "rtt_us" => rtt.to_i))
788
956
  end
789
-
790
- nil
791
957
  end
792
958
 
793
959
  # This method is not guaranteed accurate since it does not prune the set
794
960
  # based on current heartbeat. #each does that and ensures the set only
795
961
  # contains Sidekiq processes which have sent a heartbeat within the last
796
962
  # 60 seconds.
963
+ # @return [Integer] current number of registered Sidekiq processes
797
964
  def size
798
- Sidekiq.redis { |conn| conn.scard('processes') }
965
+ Sidekiq.redis { |conn| conn.scard("processes") }
966
+ end
967
+
968
+ # Total number of threads available to execute jobs.
969
+ # For Sidekiq Enterprise customers this number (in production) must be
970
+ # less than or equal to your licensed concurrency.
971
+ # @return [Integer] the sum of process concurrency
972
+ def total_concurrency
973
+ sum { |x| x["concurrency"].to_i }
799
974
  end
800
975
 
976
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
977
+ def total_rss_in_kb
978
+ sum { |x| x["rss"].to_i }
979
+ end
980
+ alias_method :total_rss, :total_rss_in_kb
981
+
801
982
  # Returns the identity of the current cluster leader or "" if no leader.
802
983
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
803
984
  # or Sidekiq Pro.
985
+ # @return [String] Identity of cluster leader
986
+ # @return [String] empty string if no leader
804
987
  def leader
805
988
  @leader ||= begin
806
- x = Sidekiq.redis {|c| c.get("dear-leader") }
989
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
807
990
  # need a non-falsy value so we can memoize
808
- x = "" unless x
991
+ x ||= ""
809
992
  x
810
993
  end
811
994
  end
@@ -827,16 +1010,18 @@ module Sidekiq
827
1010
  # 'identity' => <unique string identifying the process>,
828
1011
  # }
829
1012
  class Process
1013
+ # :nodoc:
1014
+ # @api private
830
1015
  def initialize(hash)
831
1016
  @attribs = hash
832
1017
  end
833
1018
 
834
1019
  def tag
835
- self['tag']
1020
+ self["tag"]
836
1021
  end
837
1022
 
838
1023
  def labels
839
- Array(self['labels'])
1024
+ Array(self["labels"])
840
1025
  end
841
1026
 
842
1027
  def [](key)
@@ -844,23 +1029,40 @@ module Sidekiq
844
1029
  end
845
1030
 
846
1031
  def identity
847
- self['identity']
1032
+ self["identity"]
1033
+ end
1034
+
1035
+ def queues
1036
+ self["queues"]
848
1037
  end
849
1038
 
1039
+ # Signal this process to stop processing new jobs.
1040
+ # It will continue to execute jobs it has already fetched.
1041
+ # This method is *asynchronous* and it can take 5-10
1042
+ # seconds for the process to quiet.
850
1043
  def quiet!
851
- signal('TSTP')
1044
+ signal("TSTP")
852
1045
  end
853
1046
 
1047
+ # Signal this process to shutdown.
1048
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1049
+ # This method is *asynchronous* and it can take 5-10
1050
+ # seconds for the process to start shutting down.
854
1051
  def stop!
855
- signal('TERM')
1052
+ signal("TERM")
856
1053
  end
857
1054
 
1055
+ # Signal this process to log backtraces for all threads.
1056
+ # Useful if you have a frozen or deadlocked process which is
1057
+ # still sending a heartbeat.
1058
+ # This method is *asynchronous* and it can take 5-10 seconds.
858
1059
  def dump_threads
859
- signal('TTIN')
1060
+ signal("TTIN")
860
1061
  end
861
1062
 
1063
+ # @return [Boolean] true if this process is quiet or shutting down
862
1064
  def stopping?
863
- self['quiet'] == 'true'
1065
+ self["quiet"] == "true"
864
1066
  end
865
1067
 
866
1068
  private
@@ -868,18 +1070,17 @@ module Sidekiq
868
1070
  def signal(sig)
869
1071
  key = "#{identity}-signals"
870
1072
  Sidekiq.redis do |c|
871
- c.multi do
872
- c.lpush(key, sig)
873
- c.expire(key, 60)
1073
+ c.multi do |transaction|
1074
+ transaction.lpush(key, sig)
1075
+ transaction.expire(key, 60)
874
1076
  end
875
1077
  end
876
1078
  end
877
-
878
1079
  end
879
1080
 
880
1081
  ##
881
- # A worker is a thread that is currently processing a job.
882
- # Programmatic access to the current active worker set.
1082
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1083
+ # It tracks the process and thread working on each job.
883
1084
  #
884
1085
  # WARNING WARNING WARNING
885
1086
  #
@@ -887,34 +1088,47 @@ module Sidekiq
887
1088
  # If you call #size => 5 and then expect #each to be
888
1089
  # called 5 times, you're going to have a bad time.
889
1090
  #
890
- # workers = Sidekiq::Workers.new
891
- # workers.size => 2
892
- # workers.each do |process_id, thread_id, work|
1091
+ # works = Sidekiq::WorkSet.new
1092
+ # works.size => 2
1093
+ # works.each do |process_id, thread_id, work|
893
1094
  # # process_id is a unique identifier per Sidekiq process
894
1095
  # # thread_id is a unique identifier per thread
895
1096
  # # work is a Hash which looks like:
896
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
1097
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
897
1098
  # # run_at is an epoch Integer.
898
1099
  # end
899
1100
  #
900
- class Workers
1101
+ class WorkSet
901
1102
  include Enumerable
902
- include RedisScanner
903
1103
 
904
- def each
1104
+ def each(&block)
1105
+ results = []
1106
+ procs = nil
1107
+ all_works = nil
1108
+
905
1109
  Sidekiq.redis do |conn|
906
- procs = sscan(conn, 'processes')
907
- procs.sort.each do |key|
908
- valid, workers = conn.pipelined do
909
- conn.exists?(key)
910
- conn.hgetall("#{key}:workers")
911
- end
912
- next unless valid
913
- workers.each_pair do |tid, json|
914
- yield key, tid, Sidekiq.load_json(json)
1110
+ procs = conn.sscan_each("processes").to_a.sort
1111
+
1112
+ all_works = conn.pipelined do |pipeline|
1113
+ procs.each do |key|
1114
+ pipeline.hgetall("#{key}:work")
915
1115
  end
916
1116
  end
917
1117
  end
1118
+
1119
+ procs.zip(all_works).each do |key, workers|
1120
+ workers.each_pair do |tid, json|
1121
+ next if json.empty?
1122
+
1123
+ hsh = Sidekiq.load_json(json)
1124
+ p = hsh["payload"]
1125
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
1126
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
1127
+ results << [key, tid, hsh]
1128
+ end
1129
+ end
1130
+
1131
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
918
1132
  end
919
1133
 
920
1134
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -925,18 +1139,21 @@ module Sidekiq
925
1139
  # which can easily get out of sync with crashy processes.
926
1140
  def size
927
1141
  Sidekiq.redis do |conn|
928
- procs = sscan(conn, 'processes')
1142
+ procs = conn.sscan_each("processes").to_a
929
1143
  if procs.empty?
930
1144
  0
931
1145
  else
932
- conn.pipelined do
1146
+ conn.pipelined { |pipeline|
933
1147
  procs.each do |key|
934
- conn.hget(key, 'busy')
1148
+ pipeline.hget(key, "busy")
935
1149
  end
936
- end.map(&:to_i).inject(:+)
1150
+ }.sum(&:to_i)
937
1151
  end
938
1152
  end
939
1153
  end
940
1154
  end
941
-
1155
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1156
+ # Is "worker" a process, a type of job, a thread? Undefined!
1157
+ # WorkSet better describes the data.
1158
+ Workers = WorkSet
942
1159
  end