sidekiq 5.2.10 → 7.2.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (150) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +600 -8
  3. data/LICENSE.txt +9 -0
  4. data/README.md +47 -50
  5. data/bin/sidekiq +22 -3
  6. data/bin/sidekiqload +213 -115
  7. data/bin/sidekiqmon +11 -0
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +557 -354
  13. data/lib/sidekiq/capsule.rb +127 -0
  14. data/lib/sidekiq/cli.rb +204 -226
  15. data/lib/sidekiq/client.rb +127 -102
  16. data/lib/sidekiq/component.rb +68 -0
  17. data/lib/sidekiq/config.rb +287 -0
  18. data/lib/sidekiq/deploy.rb +62 -0
  19. data/lib/sidekiq/embedded.rb +61 -0
  20. data/lib/sidekiq/fetch.rb +49 -42
  21. data/lib/sidekiq/job.rb +374 -0
  22. data/lib/sidekiq/job_logger.rb +33 -7
  23. data/lib/sidekiq/job_retry.rb +147 -108
  24. data/lib/sidekiq/job_util.rb +107 -0
  25. data/lib/sidekiq/launcher.rb +203 -105
  26. data/lib/sidekiq/logger.rb +131 -0
  27. data/lib/sidekiq/manager.rb +43 -46
  28. data/lib/sidekiq/metrics/query.rb +155 -0
  29. data/lib/sidekiq/metrics/shared.rb +95 -0
  30. data/lib/sidekiq/metrics/tracking.rb +136 -0
  31. data/lib/sidekiq/middleware/chain.rb +113 -56
  32. data/lib/sidekiq/middleware/current_attributes.rb +95 -0
  33. data/lib/sidekiq/middleware/i18n.rb +7 -7
  34. data/lib/sidekiq/middleware/modules.rb +21 -0
  35. data/lib/sidekiq/monitor.rb +146 -0
  36. data/lib/sidekiq/paginator.rb +28 -16
  37. data/lib/sidekiq/processor.rb +122 -120
  38. data/lib/sidekiq/rails.rb +48 -38
  39. data/lib/sidekiq/redis_client_adapter.rb +111 -0
  40. data/lib/sidekiq/redis_connection.rb +39 -107
  41. data/lib/sidekiq/ring_buffer.rb +29 -0
  42. data/lib/sidekiq/scheduled.rb +111 -49
  43. data/lib/sidekiq/sd_notify.rb +149 -0
  44. data/lib/sidekiq/systemd.rb +24 -0
  45. data/lib/sidekiq/testing/inline.rb +6 -5
  46. data/lib/sidekiq/testing.rb +90 -89
  47. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  48. data/lib/sidekiq/version.rb +3 -1
  49. data/lib/sidekiq/web/action.rb +15 -11
  50. data/lib/sidekiq/web/application.rb +186 -79
  51. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  52. data/lib/sidekiq/web/helpers.rb +154 -115
  53. data/lib/sidekiq/web/router.rb +23 -19
  54. data/lib/sidekiq/web.rb +68 -107
  55. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  56. data/lib/sidekiq.rb +92 -182
  57. data/sidekiq.gemspec +25 -16
  58. data/web/assets/images/apple-touch-icon.png +0 -0
  59. data/web/assets/javascripts/application.js +146 -61
  60. data/web/assets/javascripts/base-charts.js +106 -0
  61. data/web/assets/javascripts/chart.min.js +13 -0
  62. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  63. data/web/assets/javascripts/dashboard-charts.js +182 -0
  64. data/web/assets/javascripts/dashboard.js +35 -293
  65. data/web/assets/javascripts/metrics.js +298 -0
  66. data/web/assets/stylesheets/application-dark.css +147 -0
  67. data/web/assets/stylesheets/application-rtl.css +2 -95
  68. data/web/assets/stylesheets/application.css +111 -522
  69. data/web/locales/ar.yml +71 -65
  70. data/web/locales/cs.yml +62 -62
  71. data/web/locales/da.yml +60 -53
  72. data/web/locales/de.yml +65 -53
  73. data/web/locales/el.yml +43 -24
  74. data/web/locales/en.yml +86 -66
  75. data/web/locales/es.yml +70 -54
  76. data/web/locales/fa.yml +65 -65
  77. data/web/locales/fr.yml +83 -62
  78. data/web/locales/gd.yml +99 -0
  79. data/web/locales/he.yml +65 -64
  80. data/web/locales/hi.yml +59 -59
  81. data/web/locales/it.yml +53 -53
  82. data/web/locales/ja.yml +75 -64
  83. data/web/locales/ko.yml +52 -52
  84. data/web/locales/lt.yml +83 -0
  85. data/web/locales/nb.yml +61 -61
  86. data/web/locales/nl.yml +52 -52
  87. data/web/locales/pl.yml +45 -45
  88. data/web/locales/pt-br.yml +83 -55
  89. data/web/locales/pt.yml +51 -51
  90. data/web/locales/ru.yml +68 -63
  91. data/web/locales/sv.yml +53 -53
  92. data/web/locales/ta.yml +60 -60
  93. data/web/locales/uk.yml +62 -61
  94. data/web/locales/ur.yml +64 -64
  95. data/web/locales/vi.yml +83 -0
  96. data/web/locales/zh-cn.yml +43 -16
  97. data/web/locales/zh-tw.yml +42 -8
  98. data/web/views/_footer.erb +6 -3
  99. data/web/views/_job_info.erb +21 -4
  100. data/web/views/_metrics_period_select.erb +12 -0
  101. data/web/views/_nav.erb +1 -1
  102. data/web/views/_paging.erb +2 -0
  103. data/web/views/_poll_link.erb +3 -6
  104. data/web/views/_summary.erb +7 -7
  105. data/web/views/busy.erb +77 -27
  106. data/web/views/dashboard.erb +48 -18
  107. data/web/views/dead.erb +3 -3
  108. data/web/views/filtering.erb +7 -0
  109. data/web/views/layout.erb +3 -1
  110. data/web/views/metrics.erb +91 -0
  111. data/web/views/metrics_for_job.erb +59 -0
  112. data/web/views/morgue.erb +14 -15
  113. data/web/views/queue.erb +33 -24
  114. data/web/views/queues.erb +19 -5
  115. data/web/views/retries.erb +16 -17
  116. data/web/views/retry.erb +3 -3
  117. data/web/views/scheduled.erb +17 -15
  118. metadata +71 -71
  119. data/.circleci/config.yml +0 -61
  120. data/.github/contributing.md +0 -32
  121. data/.github/issue_template.md +0 -11
  122. data/.gitignore +0 -15
  123. data/.travis.yml +0 -11
  124. data/3.0-Upgrade.md +0 -70
  125. data/4.0-Upgrade.md +0 -53
  126. data/5.0-Upgrade.md +0 -56
  127. data/COMM-LICENSE +0 -97
  128. data/Ent-Changes.md +0 -238
  129. data/Gemfile +0 -19
  130. data/LICENSE +0 -9
  131. data/Pro-2.0-Upgrade.md +0 -138
  132. data/Pro-3.0-Upgrade.md +0 -44
  133. data/Pro-4.0-Upgrade.md +0 -35
  134. data/Pro-Changes.md +0 -759
  135. data/Rakefile +0 -9
  136. data/bin/sidekiqctl +0 -20
  137. data/code_of_conduct.md +0 -50
  138. data/lib/generators/sidekiq/worker_generator.rb +0 -49
  139. data/lib/sidekiq/core_ext.rb +0 -1
  140. data/lib/sidekiq/ctl.rb +0 -221
  141. data/lib/sidekiq/delay.rb +0 -42
  142. data/lib/sidekiq/exception_handler.rb +0 -29
  143. data/lib/sidekiq/extensions/action_mailer.rb +0 -57
  144. data/lib/sidekiq/extensions/active_record.rb +0 -40
  145. data/lib/sidekiq/extensions/class_methods.rb +0 -40
  146. data/lib/sidekiq/extensions/generic_proxy.rb +0 -31
  147. data/lib/sidekiq/logging.rb +0 -122
  148. data/lib/sidekiq/middleware/server/active_record.rb +0 -23
  149. data/lib/sidekiq/util.rb +0 -66
  150. data/lib/sidekiq/worker.rb +0 -220
data/lib/sidekiq/api.rb CHANGED
@@ -1,26 +1,33 @@
1
1
  # frozen_string_literal: true
2
- require 'sidekiq'
3
2
 
4
- module Sidekiq
3
+ require "sidekiq"
5
4
 
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = '0'
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == '0'
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "set"
7
+ require "base64"
18
8
 
19
- class Stats
20
- include RedisScanner
9
+ require "sidekiq/metrics/query"
21
10
 
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
22
+ module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
28
+ class Stats
22
29
  def initialize
23
- fetch_stats!
30
+ fetch_stats_fast!
24
31
  end
25
32
 
26
33
  def processed
@@ -60,65 +67,96 @@ module Sidekiq
60
67
  end
61
68
 
62
69
  def queues
63
- Sidekiq::Stats::Queues.new.lengths
64
- end
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
65
72
 
66
- def fetch_stats!
67
- pipe1_res = Sidekiq.redis do |conn|
68
- conn.pipelined do
69
- conn.get('stat:processed')
70
- conn.get('stat:failed')
71
- conn.zcard('schedule')
72
- conn.zcard('retry')
73
- conn.zcard('dead')
74
- conn.scard('processes')
75
- conn.lrange('queue:default', -1, -1)
76
- end
77
- end
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
78
 
79
- processes = Sidekiq.redis do |conn|
80
- sscan(conn, 'processes')
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
81
  end
82
+ end
82
83
 
83
- queues = Sidekiq.redis do |conn|
84
- sscan(conn, 'queues')
85
- end
84
+ # O(1) redis calls
85
+ # @api private
86
+ def fetch_stats_fast!
87
+ pipe1_res = Sidekiq.redis { |conn|
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lindex("queue:default", -1)
96
+ end
97
+ }
86
98
 
87
- pipe2_res = Sidekiq.redis do |conn|
88
- conn.pipelined do
89
- processes.each {|key| conn.hget(key, 'busy') }
90
- queues.each {|queue| conn.llen("queue:#{queue}") }
99
+ default_queue_latency = if (entry = pipe1_res[6])
100
+ job = begin
101
+ Sidekiq.load_json(entry)
102
+ rescue
103
+ {}
91
104
  end
105
+ now = Time.now.to_f
106
+ thence = job["enqueued_at"] || now
107
+ now - thence
108
+ else
109
+ 0
92
110
  end
93
111
 
94
- s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
97
-
98
- default_queue_latency = if (entry = pipe1_res[6].first)
99
- job = Sidekiq.load_json(entry) rescue {}
100
- now = Time.now.to_f
101
- thence = job['enqueued_at'] || now
102
- now - thence
103
- else
104
- 0
105
- end
106
112
  @stats = {
107
- processed: pipe1_res[0].to_i,
108
- failed: pipe1_res[1].to_i,
109
- scheduled_size: pipe1_res[2],
110
- retry_size: pipe1_res[3],
111
- dead_size: pipe1_res[4],
112
- processes_size: pipe1_res[5],
113
-
114
- default_queue_latency: default_queue_latency,
115
- workers_size: workers_size,
116
- enqueued: enqueued
113
+ processed: pipe1_res[0].to_i,
114
+ failed: pipe1_res[1].to_i,
115
+ scheduled_size: pipe1_res[2],
116
+ retry_size: pipe1_res[3],
117
+ dead_size: pipe1_res[4],
118
+ processes_size: pipe1_res[5],
119
+
120
+ default_queue_latency: default_queue_latency
117
121
  }
118
122
  end
119
123
 
124
+ # O(number of processes + number of queues) redis calls
125
+ # @api private
126
+ def fetch_stats_slow!
127
+ processes = Sidekiq.redis { |conn|
128
+ conn.sscan("processes").to_a
129
+ }
130
+
131
+ queues = Sidekiq.redis { |conn|
132
+ conn.sscan("queues").to_a
133
+ }
134
+
135
+ pipe2_res = Sidekiq.redis { |conn|
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
139
+ end
140
+ }
141
+
142
+ s = processes.size
143
+ workers_size = pipe2_res[0...s].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
145
+
146
+ @stats[:workers_size] = workers_size
147
+ @stats[:enqueued] = enqueued
148
+ @stats
149
+ end
150
+
151
+ # @api private
152
+ def fetch_stats!
153
+ fetch_stats_fast!
154
+ fetch_stats_slow!
155
+ end
156
+
157
+ # @api private
120
158
  def reset(*stats)
121
- all = %w(failed processed)
159
+ all = %w[failed processed]
122
160
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
123
161
 
124
162
  mset_args = []
@@ -134,37 +172,13 @@ module Sidekiq
134
172
  private
135
173
 
136
174
  def stat(s)
137
- @stats[s]
138
- end
139
-
140
- class Queues
141
- include RedisScanner
142
-
143
- def lengths
144
- Sidekiq.redis do |conn|
145
- queues = sscan(conn, 'queues')
146
-
147
- lengths = conn.pipelined do
148
- queues.each do |queue|
149
- conn.llen("queue:#{queue}")
150
- end
151
- end
152
-
153
- i = 0
154
- array_of_arrays = queues.inject({}) do |memo, queue|
155
- memo[queue] = lengths[i]
156
- i += 1
157
- memo
158
- end.sort_by { |_, size| size }
159
-
160
- Hash[array_of_arrays.reverse]
161
- end
162
- end
175
+ fetch_stats_slow! if @stats[s].nil?
176
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
163
177
  end
164
178
 
165
179
  class History
166
- def initialize(days_previous, start_date = nil)
167
- #we only store five years of data in Redis
180
+ def initialize(days_previous, start_date = nil, pool: nil)
181
+ # we only store five years of data in Redis
168
182
  raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
169
183
  @days_previous = days_previous
170
184
  @start_date = start_date || Time.now.utc.to_date
@@ -181,28 +195,17 @@ module Sidekiq
181
195
  private
182
196
 
183
197
  def date_stat_hash(stat)
184
- i = 0
185
198
  stat_hash = {}
186
- keys = []
187
- dates = []
188
-
189
- while i < @days_previous
190
- date = @start_date - i
191
- datestr = date.strftime("%Y-%m-%d")
192
- keys << "stat:#{stat}:#{datestr}"
193
- dates << datestr
194
- i += 1
195
- end
199
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
200
+ date.strftime("%Y-%m-%d")
201
+ }
196
202
 
197
- begin
198
- Sidekiq.redis do |conn|
199
- conn.mget(keys).each_with_index do |value, idx|
200
- stat_hash[dates[idx]] = value ? value.to_i : 0
201
- end
203
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
204
+
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
202
208
  end
203
- rescue Redis::CommandError
204
- # mget will trigger a CROSSSLOT error when run against a Cluster
205
- # TODO Someone want to add Cluster support?
206
209
  end
207
210
 
208
211
  stat_hash
@@ -211,9 +214,10 @@ module Sidekiq
211
214
  end
212
215
 
213
216
  ##
214
- # Encapsulates a queue within Sidekiq.
217
+ # Represents a queue within Sidekiq.
215
218
  # Allows enumeration of all jobs within the queue
216
- # and deletion of jobs.
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
217
221
  #
218
222
  # queue = Sidekiq::Queue.new("mailer")
219
223
  # queue.each do |job|
@@ -221,30 +225,34 @@ module Sidekiq
221
225
  # job.args # => [1, 2, 3]
222
226
  # job.delete if job.jid == 'abcdef1234567890'
223
227
  # end
224
- #
225
228
  class Queue
226
229
  include Enumerable
227
- extend RedisScanner
228
230
 
229
231
  ##
230
- # Return all known queues within Redis.
232
+ # Fetch all known queues within Redis.
231
233
  #
234
+ # @return [Array<Sidekiq::Queue>]
232
235
  def self.all
233
- Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
234
237
  end
235
238
 
236
239
  attr_reader :name
237
240
 
238
- def initialize(name="default")
241
+ # @param name [String] the name of the queue
242
+ def initialize(name = "default")
239
243
  @name = name.to_s
240
244
  @rname = "queue:#{name}"
241
245
  end
242
246
 
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
243
251
  def size
244
252
  Sidekiq.redis { |con| con.llen(@rname) }
245
253
  end
246
254
 
247
- # Sidekiq Pro overrides this
255
+ # @return [Boolean] if the queue is currently paused
248
256
  def paused?
249
257
  false
250
258
  end
@@ -253,15 +261,15 @@ module Sidekiq
253
261
  # Calculates this queue's latency, the difference in seconds since the oldest
254
262
  # job in the queue was enqueued.
255
263
  #
256
- # @return Float
264
+ # @return [Float] in seconds
257
265
  def latency
258
- entry = Sidekiq.redis do |conn|
259
- conn.lrange(@rname, -1, -1)
260
- end.first
266
+ entry = Sidekiq.redis { |conn|
267
+ conn.lindex(@rname, -1)
268
+ }
261
269
  return 0 unless entry
262
270
  job = Sidekiq.load_json(entry)
263
271
  now = Time.now.to_f
264
- thence = job['enqueued_at'] || now
272
+ thence = job["enqueued_at"] || now
265
273
  now - thence
266
274
  end
267
275
 
@@ -271,16 +279,16 @@ module Sidekiq
271
279
  page = 0
272
280
  page_size = 50
273
281
 
274
- while true do
282
+ loop do
275
283
  range_start = page * page_size - deleted_size
276
- range_end = range_start + page_size - 1
277
- entries = Sidekiq.redis do |conn|
284
+ range_end = range_start + page_size - 1
285
+ entries = Sidekiq.redis { |conn|
278
286
  conn.lrange @rname, range_start, range_end
279
- end
287
+ }
280
288
  break if entries.empty?
281
289
  page += 1
282
290
  entries.each do |entry|
283
- yield Job.new(entry, @name)
291
+ yield JobRecord.new(entry, @name)
284
292
  end
285
293
  deleted_size = initial_size - size
286
294
  end
@@ -289,41 +297,63 @@ module Sidekiq
289
297
  ##
290
298
  # Find the job with the given JID within this queue.
291
299
  #
292
- # This is a slow, inefficient operation. Do not use under
293
- # normal conditions. Sidekiq Pro contains a faster version.
300
+ # This is a *slow, inefficient* operation. Do not use under
301
+ # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
294
306
  def find_job(jid)
295
307
  detect { |j| j.jid == jid }
296
308
  end
297
309
 
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
298
312
  def clear
299
313
  Sidekiq.redis do |conn|
300
- conn.multi do
301
- conn.del(@rname)
302
- conn.srem("queues", name)
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
303
317
  end
304
318
  end
319
+ true
305
320
  end
306
321
  alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
307
328
  end
308
329
 
309
330
  ##
310
- # Encapsulates a pending job within a Sidekiq queue or
311
- # sorted set.
331
+ # Represents a pending job within a Sidekiq queue.
312
332
  #
313
333
  # The job should be considered immutable but may be
314
- # removed from the queue via Job#delete.
315
- #
316
- class Job
334
+ # removed from the queue via JobRecord#delete.
335
+ class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
317
338
  attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
318
341
  attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
319
345
 
320
- def initialize(item, queue_name=nil)
346
+ # :nodoc:
347
+ # @api private
348
+ def initialize(item, queue_name = nil)
321
349
  @args = nil
322
350
  @value = item
323
351
  @item = item.is_a?(Hash) ? item : parse(item)
324
- @queue = queue_name || @item['queue']
352
+ @queue = queue_name || @item["queue"]
325
353
  end
326
354
 
355
+ # :nodoc:
356
+ # @api private
327
357
  def parse(item)
328
358
  Sidekiq.load_json(item)
329
359
  rescue JSON::ParserError
@@ -335,88 +365,99 @@ module Sidekiq
335
365
  {}
336
366
  end
337
367
 
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
338
370
  def klass
339
- self['class']
371
+ self["class"]
340
372
  end
341
373
 
342
374
  def display_class
343
375
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
344
- @klass ||= case klass
345
- when /\ASidekiq::Extensions::Delayed/
346
- safe_load(args[0], klass) do |target, method, _|
347
- "#{target}.#{method}"
348
- end
349
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
350
- job_class = @item['wrapped'] || args[0]
351
- if 'ActionMailer::DeliveryJob' == job_class
352
- # MailerClass#mailer_method
353
- args[0]['arguments'][0..1].join('#')
354
- else
355
- job_class
356
- end
357
- else
358
- klass
359
- end
376
+ @klass ||= self["display_class"] || begin
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
378
+ job_class = @item["wrapped"] || args[0]
379
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
380
+ # MailerClass#mailer_method
381
+ args[0]["arguments"][0..1].join("#")
382
+ else
383
+ job_class
384
+ end
385
+ else
386
+ klass
387
+ end
388
+ end
360
389
  end
361
390
 
362
391
  def display_args
363
392
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
364
- @display_args ||= case klass
365
- when /\ASidekiq::Extensions::Delayed/
366
- safe_load(args[0], args) do |_, _, arg|
367
- arg
368
- end
369
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
370
- job_args = self['wrapped'] ? args[0]["arguments"] : []
371
- if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
372
- # remove MailerClass, mailer_method and 'deliver_now'
373
- job_args.drop(3)
374
- else
375
- job_args
376
- end
377
- else
378
- if self['encrypt']
379
- # no point in showing 150+ bytes of random garbage
380
- args[-1] = '[encrypted data]'
381
- end
382
- args
383
- end
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first.values_at("params", "args")
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
410
+ end
384
411
  end
385
412
 
386
413
  def args
387
- @args || @item['args']
414
+ @args || @item["args"]
388
415
  end
389
416
 
390
417
  def jid
391
- self['jid']
418
+ self["jid"]
419
+ end
420
+
421
+ def bid
422
+ self["bid"]
392
423
  end
393
424
 
394
425
  def enqueued_at
395
- self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
426
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
396
427
  end
397
428
 
398
429
  def created_at
399
- Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
430
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
400
431
  end
401
432
 
402
- def queue
403
- @queue
433
+ def tags
434
+ self["tags"] || []
435
+ end
436
+
437
+ def error_backtrace
438
+ # Cache nil values
439
+ if defined?(@error_backtrace)
440
+ @error_backtrace
441
+ else
442
+ value = self["error_backtrace"]
443
+ @error_backtrace = value && uncompress_backtrace(value)
444
+ end
404
445
  end
405
446
 
406
447
  def latency
407
448
  now = Time.now.to_f
408
- now - (@item['enqueued_at'] || @item['created_at'] || now)
449
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
409
450
  end
410
451
 
411
- ##
412
- # Remove this job from the queue.
452
+ # Remove this job from the queue
413
453
  def delete
414
- count = Sidekiq.redis do |conn|
454
+ count = Sidekiq.redis { |conn|
415
455
  conn.lrem("queue:#{@queue}", 1, @value)
416
- end
456
+ }
417
457
  count != 0
418
458
  end
419
459
 
460
+ # Access arbitrary attributes within the job hash
420
461
  def [](name)
421
462
  # nil will happen if the JSON fails to parse.
422
463
  # We don't guarantee Sidekiq will work with bad job JSON but we should
@@ -426,32 +467,58 @@ module Sidekiq
426
467
 
427
468
  private
428
469
 
429
- def safe_load(content, default)
430
- begin
431
- yield(*YAML.load(content))
432
- rescue => ex
433
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
434
- # memory yet so the YAML can't be loaded.
435
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
436
- default
470
+ ACTIVE_JOB_PREFIX = "_aj_"
471
+ GLOBALID_KEY = "_aj_globalid"
472
+
473
+ def deserialize_argument(argument)
474
+ case argument
475
+ when Array
476
+ argument.map { |arg| deserialize_argument(arg) }
477
+ when Hash
478
+ if serialized_global_id?(argument)
479
+ argument[GLOBALID_KEY]
480
+ else
481
+ argument.transform_values { |v| deserialize_argument(v) }
482
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
483
+ end
484
+ else
485
+ argument
437
486
  end
438
487
  end
488
+
489
+ def serialized_global_id?(hash)
490
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
491
+ end
492
+
493
+ def uncompress_backtrace(backtrace)
494
+ decoded = Base64.decode64(backtrace)
495
+ uncompressed = Zlib::Inflate.inflate(decoded)
496
+ Sidekiq.load_json(uncompressed)
497
+ end
439
498
  end
440
499
 
441
- class SortedEntry < Job
500
+ # Represents a job within a Redis sorted set where the score
501
+ # represents a timestamp associated with the job. This timestamp
502
+ # could be the scheduled time for it to run (e.g. scheduled set),
503
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
504
+ class SortedEntry < JobRecord
442
505
  attr_reader :score
443
506
  attr_reader :parent
444
507
 
508
+ # :nodoc:
509
+ # @api private
445
510
  def initialize(parent, score, item)
446
511
  super(item)
447
- @score = score
512
+ @score = Float(score)
448
513
  @parent = parent
449
514
  end
450
515
 
516
+ # The timestamp associated with this entry
451
517
  def at
452
518
  Time.at(score).utc
453
519
  end
454
520
 
521
+ # remove this entry from the sorted set
455
522
  def delete
456
523
  if @value
457
524
  @parent.delete_by_value(@parent.name, @value)
@@ -460,11 +527,17 @@ module Sidekiq
460
527
  end
461
528
  end
462
529
 
530
+ # Change the scheduled time for this job.
531
+ #
532
+ # @param at [Time] the new timestamp for this job
463
533
  def reschedule(at)
464
- delete
465
- @parent.schedule(at, item)
534
+ Sidekiq.redis do |conn|
535
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
536
+ end
466
537
  end
467
538
 
539
+ # Enqueue this job from the scheduled or dead set so it will
540
+ # be executed at some point in the near future.
468
541
  def add_to_queue
469
542
  remove_job do |message|
470
543
  msg = Sidekiq.load_json(message)
@@ -472,16 +545,17 @@ module Sidekiq
472
545
  end
473
546
  end
474
547
 
548
+ # enqueue this job from the retry set so it will be executed
549
+ # at some point in the near future.
475
550
  def retry
476
551
  remove_job do |message|
477
552
  msg = Sidekiq.load_json(message)
478
- msg['retry_count'] -= 1 if msg['retry_count']
553
+ msg["retry_count"] -= 1 if msg["retry_count"]
479
554
  Sidekiq::Client.push(msg)
480
555
  end
481
556
  end
482
557
 
483
- ##
484
- # Place job in the dead set
558
+ # Move this job from its current set into the Dead set.
485
559
  def kill
486
560
  remove_job do |message|
487
561
  DeadSet.new.kill(message)
@@ -489,74 +563,109 @@ module Sidekiq
489
563
  end
490
564
 
491
565
  def error?
492
- !!item['error_class']
566
+ !!item["error_class"]
493
567
  end
494
568
 
495
569
  private
496
570
 
497
571
  def remove_job
498
572
  Sidekiq.redis do |conn|
499
- results = conn.multi do
500
- conn.zrangebyscore(parent.name, score, score)
501
- conn.zremrangebyscore(parent.name, score, score)
502
- end.first
573
+ results = conn.multi { |transaction|
574
+ transaction.zrange(parent.name, score, score, "BYSCORE")
575
+ transaction.zremrangebyscore(parent.name, score, score)
576
+ }.first
503
577
 
504
578
  if results.size == 1
505
579
  yield results.first
506
580
  else
507
581
  # multiple jobs with the same score
508
582
  # find the one with the right JID and push it
509
- hash = results.group_by do |message|
583
+ matched, nonmatched = results.partition { |message|
510
584
  if message.index(jid)
511
585
  msg = Sidekiq.load_json(message)
512
- msg['jid'] == jid
586
+ msg["jid"] == jid
513
587
  else
514
588
  false
515
589
  end
516
- end
590
+ }
517
591
 
518
- msg = hash.fetch(true, []).first
592
+ msg = matched.first
519
593
  yield msg if msg
520
594
 
521
595
  # push the rest back onto the sorted set
522
- conn.multi do
523
- hash.fetch(false, []).each do |message|
524
- conn.zadd(parent.name, score.to_f.to_s, message)
596
+ conn.multi do |transaction|
597
+ nonmatched.each do |message|
598
+ transaction.zadd(parent.name, score.to_f.to_s, message)
525
599
  end
526
600
  end
527
601
  end
528
602
  end
529
603
  end
530
-
531
604
  end
532
605
 
606
+ # Base class for all sorted sets within Sidekiq.
533
607
  class SortedSet
534
608
  include Enumerable
535
609
 
610
+ # Redis key of the set
611
+ # @!attribute [r] Name
536
612
  attr_reader :name
537
613
 
614
+ # :nodoc:
615
+ # @api private
538
616
  def initialize(name)
539
617
  @name = name
540
618
  @_size = size
541
619
  end
542
620
 
621
+ # real-time size of the set, will change
543
622
  def size
544
623
  Sidekiq.redis { |c| c.zcard(name) }
545
624
  end
546
625
 
626
+ # Scan through each element of the sorted set, yielding each to the supplied block.
627
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
628
+ #
629
+ # @param match [String] a snippet or regexp to filter matches.
630
+ # @param count [Integer] number of elements to retrieve at a time, default 100
631
+ # @yieldparam [Sidekiq::SortedEntry] each entry
632
+ def scan(match, count = 100)
633
+ return to_enum(:scan, match, count) unless block_given?
634
+
635
+ match = "*#{match}*" unless match.include?("*")
636
+ Sidekiq.redis do |conn|
637
+ conn.zscan(name, match: match, count: count) do |entry, score|
638
+ yield SortedEntry.new(self, score, entry)
639
+ end
640
+ end
641
+ end
642
+
643
+ # @return [Boolean] always true
547
644
  def clear
548
645
  Sidekiq.redis do |conn|
549
- conn.del(name)
646
+ conn.unlink(name)
550
647
  end
648
+ true
551
649
  end
552
650
  alias_method :💣, :clear
651
+
652
+ # :nodoc:
653
+ # @api private
654
+ def as_json(options = nil)
655
+ {name: name} # 5336
656
+ end
553
657
  end
554
658
 
659
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
660
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
661
+ # e.g. Batches.
555
662
  class JobSet < SortedSet
556
-
557
- def schedule(timestamp, message)
663
+ # Add a job with the associated timestamp to this set.
664
+ # @param timestamp [Time] the score for the job
665
+ # @param job [Hash] the job data
666
+ def schedule(timestamp, job)
558
667
  Sidekiq.redis do |conn|
559
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
668
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
560
669
  end
561
670
  end
562
671
 
@@ -566,46 +675,66 @@ module Sidekiq
566
675
  page = -1
567
676
  page_size = 50
568
677
 
569
- while true do
678
+ loop do
570
679
  range_start = page * page_size + offset_size
571
- range_end = range_start + page_size - 1
572
- elements = Sidekiq.redis do |conn|
573
- conn.zrange name, range_start, range_end, with_scores: true
574
- end
680
+ range_end = range_start + page_size - 1
681
+ elements = Sidekiq.redis { |conn|
682
+ conn.zrange name, range_start, range_end, "withscores"
683
+ }
575
684
  break if elements.empty?
576
685
  page -= 1
577
- elements.reverse.each do |element, score|
686
+ elements.reverse_each do |element, score|
578
687
  yield SortedEntry.new(self, score, element)
579
688
  end
580
689
  offset_size = initial_size - @_size
581
690
  end
582
691
  end
583
692
 
693
+ ##
694
+ # Fetch jobs that match a given time or Range. Job ID is an
695
+ # optional second argument.
696
+ #
697
+ # @param score [Time,Range] a specific timestamp or range
698
+ # @param jid [String, optional] find a specific JID within the score
699
+ # @return [Array<SortedEntry>] any results found, can be empty
584
700
  def fetch(score, jid = nil)
585
- elements = Sidekiq.redis do |conn|
586
- conn.zrangebyscore(name, score, score)
587
- end
588
-
589
- elements.inject([]) do |result, element|
590
- entry = SortedEntry.new(self, score, element)
591
- if jid
592
- result << entry if entry.jid == jid
701
+ begin_score, end_score =
702
+ if score.is_a?(Range)
703
+ [score.first, score.last]
593
704
  else
594
- result << entry
705
+ [score, score]
595
706
  end
596
- result
707
+
708
+ elements = Sidekiq.redis { |conn|
709
+ conn.zrange(name, begin_score, end_score, "BYSCORE", "withscores")
710
+ }
711
+
712
+ elements.each_with_object([]) do |element, result|
713
+ data, job_score = element
714
+ entry = SortedEntry.new(self, job_score, data)
715
+ result << entry if jid.nil? || entry.jid == jid
597
716
  end
598
717
  end
599
718
 
600
719
  ##
601
720
  # Find the job with the given JID within this sorted set.
721
+ # *This is a slow O(n) operation*. Do not use for app logic.
602
722
  #
603
- # This is a slow, inefficient operation. Do not use under
604
- # normal conditions. Sidekiq Pro contains a faster version.
723
+ # @param jid [String] the job identifier
724
+ # @return [SortedEntry] the record or nil
605
725
  def find_job(jid)
606
- self.detect { |j| j.jid == jid }
726
+ Sidekiq.redis do |conn|
727
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
728
+ job = Sidekiq.load_json(entry)
729
+ matched = job["jid"] == jid
730
+ return SortedEntry.new(self, score, entry) if matched
731
+ end
732
+ end
733
+ nil
607
734
  end
608
735
 
736
+ # :nodoc:
737
+ # @api private
609
738
  def delete_by_value(name, value)
610
739
  Sidekiq.redis do |conn|
611
740
  ret = conn.zrem(name, value)
@@ -614,17 +743,20 @@ module Sidekiq
614
743
  end
615
744
  end
616
745
 
746
+ # :nodoc:
747
+ # @api private
617
748
  def delete_by_jid(score, jid)
618
749
  Sidekiq.redis do |conn|
619
- elements = conn.zrangebyscore(name, score, score)
750
+ elements = conn.zrange(name, score, score, "BYSCORE")
620
751
  elements.each do |element|
621
- message = Sidekiq.load_json(element)
622
- if message["jid"] == jid
623
- ret = conn.zrem(name, element)
624
- @_size -= 1 if ret
625
- break ret
752
+ if element.index(jid)
753
+ message = Sidekiq.load_json(element)
754
+ if message["jid"] == jid
755
+ ret = conn.zrem(name, element)
756
+ @_size -= 1 if ret
757
+ break ret
758
+ end
626
759
  end
627
- false
628
760
  end
629
761
  end
630
762
  end
@@ -633,68 +765,62 @@ module Sidekiq
633
765
  end
634
766
 
635
767
  ##
636
- # Allows enumeration of scheduled jobs within Sidekiq.
768
+ # The set of scheduled jobs within Sidekiq.
637
769
  # Based on this, you can search/filter for jobs. Here's an
638
- # example where I'm selecting all jobs of a certain type
639
- # and deleting them from the schedule queue.
770
+ # example where I'm selecting jobs based on some complex logic
771
+ # and deleting them from the scheduled set.
772
+ #
773
+ # See the API wiki page for usage notes and examples.
640
774
  #
641
- # r = Sidekiq::ScheduledSet.new
642
- # r.select do |scheduled|
643
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
644
- # scheduled.args[0] == 'User' &&
645
- # scheduled.args[1] == 'setup_new_subscriber'
646
- # end.map(&:delete)
647
775
  class ScheduledSet < JobSet
648
776
  def initialize
649
- super 'schedule'
777
+ super "schedule"
650
778
  end
651
779
  end
652
780
 
653
781
  ##
654
- # Allows enumeration of retries within Sidekiq.
782
+ # The set of retries within Sidekiq.
655
783
  # Based on this, you can search/filter for jobs. Here's an
656
784
  # example where I'm selecting all jobs of a certain type
657
785
  # and deleting them from the retry queue.
658
786
  #
659
- # r = Sidekiq::RetrySet.new
660
- # r.select do |retri|
661
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
662
- # retri.args[0] == 'User' &&
663
- # retri.args[1] == 'setup_new_subscriber'
664
- # end.map(&:delete)
787
+ # See the API wiki page for usage notes and examples.
788
+ #
665
789
  class RetrySet < JobSet
666
790
  def initialize
667
- super 'retry'
791
+ super "retry"
668
792
  end
669
793
 
794
+ # Enqueues all jobs pending within the retry set.
670
795
  def retry_all
671
- while size > 0
672
- each(&:retry)
673
- end
796
+ each(&:retry) while size > 0
674
797
  end
675
798
 
799
+ # Kills all jobs pending within the retry set.
676
800
  def kill_all
677
- while size > 0
678
- each(&:kill)
679
- end
801
+ each(&:kill) while size > 0
680
802
  end
681
803
  end
682
804
 
683
805
  ##
684
- # Allows enumeration of dead jobs within Sidekiq.
806
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
807
+ # their retries and are helding in this set pending some sort of manual
808
+ # fix. They will be removed after 6 months (dead_timeout) if not.
685
809
  #
686
810
  class DeadSet < JobSet
687
811
  def initialize
688
- super 'dead'
812
+ super "dead"
689
813
  end
690
814
 
691
- def kill(message, opts={})
815
+ # Add the given job to the Dead set.
816
+ # @param message [String] the job data as JSON
817
+ def kill(message, opts = {})
692
818
  now = Time.now.to_f
693
819
  Sidekiq.redis do |conn|
694
- conn.multi do
695
- conn.zadd(name, now.to_s, message)
696
- conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
697
- conn.zremrangebyrank(name, 0, - self.class.max_jobs)
820
+ conn.multi do |transaction|
821
+ transaction.zadd(name, now.to_s, message)
822
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
823
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
698
824
  end
699
825
  end
700
826
 
@@ -702,110 +828,143 @@ module Sidekiq
702
828
  job = Sidekiq.load_json(message)
703
829
  r = RuntimeError.new("Job killed by API")
704
830
  r.set_backtrace(caller)
705
- Sidekiq.death_handlers.each do |handle|
831
+ Sidekiq.default_configuration.death_handlers.each do |handle|
706
832
  handle.call(job, r)
707
833
  end
708
834
  end
709
835
  true
710
836
  end
711
837
 
838
+ # Enqueue all dead jobs
712
839
  def retry_all
713
- while size > 0
714
- each(&:retry)
715
- end
716
- end
717
-
718
- def self.max_jobs
719
- Sidekiq.options[:dead_max_jobs]
720
- end
721
-
722
- def self.timeout
723
- Sidekiq.options[:dead_timeout_in_seconds]
840
+ each(&:retry) while size > 0
724
841
  end
725
842
  end
726
843
 
727
844
  ##
728
845
  # Enumerates the set of Sidekiq processes which are actively working
729
- # right now. Each process send a heartbeat to Redis every 5 seconds
846
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
730
847
  # so this set should be relatively accurate, barring network partitions.
731
848
  #
732
- # Yields a Sidekiq::Process.
849
+ # @yieldparam [Sidekiq::Process]
733
850
  #
734
851
  class ProcessSet
735
852
  include Enumerable
736
- include RedisScanner
737
853
 
738
- def initialize(clean_plz=true)
854
+ def self.[](identity)
855
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
856
+ conn.multi { |transaction|
857
+ transaction.sismember("processes", identity)
858
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
859
+ }
860
+ }
861
+
862
+ return nil if exists == 0 || info.nil?
863
+
864
+ hash = Sidekiq.load_json(info)
865
+ Process.new(hash.merge("busy" => busy.to_i,
866
+ "beat" => beat.to_f,
867
+ "quiet" => quiet,
868
+ "rss" => rss.to_i,
869
+ "rtt_us" => rtt_us.to_i))
870
+ end
871
+
872
+ # :nodoc:
873
+ # @api private
874
+ def initialize(clean_plz = true)
739
875
  cleanup if clean_plz
740
876
  end
741
877
 
742
878
  # Cleans up dead processes recorded in Redis.
743
879
  # Returns the number of processes cleaned.
880
+ # :nodoc:
881
+ # @api private
744
882
  def cleanup
883
+ # dont run cleanup more than once per minute
884
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", "NX", "EX", "60") }
885
+
745
886
  count = 0
746
887
  Sidekiq.redis do |conn|
747
- procs = sscan(conn, 'processes').sort
748
- heartbeats = conn.pipelined do
888
+ procs = conn.sscan("processes").to_a
889
+ heartbeats = conn.pipelined { |pipeline|
749
890
  procs.each do |key|
750
- conn.hget(key, 'info')
891
+ pipeline.hget(key, "info")
751
892
  end
752
- end
893
+ }
753
894
 
754
895
  # the hash named key has an expiry of 60 seconds.
755
896
  # if it's not found, that means the process has not reported
756
897
  # in to Redis and probably died.
757
- to_prune = []
758
- heartbeats.each_with_index do |beat, i|
759
- to_prune << procs[i] if beat.nil?
760
- end
761
- count = conn.srem('processes', to_prune) unless to_prune.empty?
898
+ to_prune = procs.select.with_index { |proc, i|
899
+ heartbeats[i].nil?
900
+ }
901
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
762
902
  end
763
903
  count
764
904
  end
765
905
 
766
906
  def each
767
- procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
907
+ result = Sidekiq.redis { |conn|
908
+ procs = conn.sscan("processes").to_a.sort
768
909
 
769
- Sidekiq.redis do |conn|
770
910
  # We're making a tradeoff here between consuming more memory instead of
771
911
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
772
912
  # you'll be happier this way
773
- result = conn.pipelined do
913
+ conn.pipelined do |pipeline|
774
914
  procs.each do |key|
775
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
915
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
776
916
  end
777
917
  end
918
+ }
778
919
 
779
- result.each do |info, busy, at_s, quiet|
780
- # If a process is stopped between when we query Redis for `procs` and
781
- # when we query for `result`, we will have an item in `result` that is
782
- # composed of `nil` values.
783
- next if info.nil?
784
-
785
- hash = Sidekiq.load_json(info)
786
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
787
- end
920
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
921
+ # If a process is stopped between when we query Redis for `procs` and
922
+ # when we query for `result`, we will have an item in `result` that is
923
+ # composed of `nil` values.
924
+ next if info.nil?
925
+
926
+ hash = Sidekiq.load_json(info)
927
+ yield Process.new(hash.merge("busy" => busy.to_i,
928
+ "beat" => beat.to_f,
929
+ "quiet" => quiet,
930
+ "rss" => rss.to_i,
931
+ "rtt_us" => rtt_us.to_i))
788
932
  end
789
-
790
- nil
791
933
  end
792
934
 
793
935
  # This method is not guaranteed accurate since it does not prune the set
794
936
  # based on current heartbeat. #each does that and ensures the set only
795
937
  # contains Sidekiq processes which have sent a heartbeat within the last
796
938
  # 60 seconds.
939
+ # @return [Integer] current number of registered Sidekiq processes
797
940
  def size
798
- Sidekiq.redis { |conn| conn.scard('processes') }
941
+ Sidekiq.redis { |conn| conn.scard("processes") }
799
942
  end
800
943
 
944
+ # Total number of threads available to execute jobs.
945
+ # For Sidekiq Enterprise customers this number (in production) must be
946
+ # less than or equal to your licensed concurrency.
947
+ # @return [Integer] the sum of process concurrency
948
+ def total_concurrency
949
+ sum { |x| x["concurrency"].to_i }
950
+ end
951
+
952
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
953
+ def total_rss_in_kb
954
+ sum { |x| x["rss"].to_i }
955
+ end
956
+ alias_method :total_rss, :total_rss_in_kb
957
+
801
958
  # Returns the identity of the current cluster leader or "" if no leader.
802
959
  # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
803
960
  # or Sidekiq Pro.
961
+ # @return [String] Identity of cluster leader
962
+ # @return [String] empty string if no leader
804
963
  def leader
805
964
  @leader ||= begin
806
- x = Sidekiq.redis {|c| c.get("dear-leader") }
965
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
807
966
  # need a non-falsy value so we can memoize
808
- x = "" unless x
967
+ x ||= ""
809
968
  x
810
969
  end
811
970
  end
@@ -825,18 +984,21 @@ module Sidekiq
825
984
  # 'busy' => 10,
826
985
  # 'beat' => <last heartbeat>,
827
986
  # 'identity' => <unique string identifying the process>,
987
+ # 'embedded' => true,
828
988
  # }
829
989
  class Process
990
+ # :nodoc:
991
+ # @api private
830
992
  def initialize(hash)
831
993
  @attribs = hash
832
994
  end
833
995
 
834
996
  def tag
835
- self['tag']
997
+ self["tag"]
836
998
  end
837
999
 
838
1000
  def labels
839
- Array(self['labels'])
1001
+ self["labels"].to_a
840
1002
  end
841
1003
 
842
1004
  def [](key)
@@ -844,23 +1006,56 @@ module Sidekiq
844
1006
  end
845
1007
 
846
1008
  def identity
847
- self['identity']
1009
+ self["identity"]
848
1010
  end
849
1011
 
1012
+ def queues
1013
+ self["queues"]
1014
+ end
1015
+
1016
+ def weights
1017
+ self["weights"]
1018
+ end
1019
+
1020
+ def version
1021
+ self["version"]
1022
+ end
1023
+
1024
+ def embedded?
1025
+ self["embedded"]
1026
+ end
1027
+
1028
+ # Signal this process to stop processing new jobs.
1029
+ # It will continue to execute jobs it has already fetched.
1030
+ # This method is *asynchronous* and it can take 5-10
1031
+ # seconds for the process to quiet.
850
1032
  def quiet!
851
- signal('TSTP')
1033
+ raise "Can't quiet an embedded process" if embedded?
1034
+
1035
+ signal("TSTP")
852
1036
  end
853
1037
 
1038
+ # Signal this process to shutdown.
1039
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1040
+ # This method is *asynchronous* and it can take 5-10
1041
+ # seconds for the process to start shutting down.
854
1042
  def stop!
855
- signal('TERM')
1043
+ raise "Can't stop an embedded process" if embedded?
1044
+
1045
+ signal("TERM")
856
1046
  end
857
1047
 
1048
+ # Signal this process to log backtraces for all threads.
1049
+ # Useful if you have a frozen or deadlocked process which is
1050
+ # still sending a heartbeat.
1051
+ # This method is *asynchronous* and it can take 5-10 seconds.
858
1052
  def dump_threads
859
- signal('TTIN')
1053
+ signal("TTIN")
860
1054
  end
861
1055
 
1056
+ # @return [Boolean] true if this process is quiet or shutting down
862
1057
  def stopping?
863
- self['quiet'] == 'true'
1058
+ self["quiet"] == "true"
864
1059
  end
865
1060
 
866
1061
  private
@@ -868,18 +1063,17 @@ module Sidekiq
868
1063
  def signal(sig)
869
1064
  key = "#{identity}-signals"
870
1065
  Sidekiq.redis do |c|
871
- c.multi do
872
- c.lpush(key, sig)
873
- c.expire(key, 60)
1066
+ c.multi do |transaction|
1067
+ transaction.lpush(key, sig)
1068
+ transaction.expire(key, 60)
874
1069
  end
875
1070
  end
876
1071
  end
877
-
878
1072
  end
879
1073
 
880
1074
  ##
881
- # A worker is a thread that is currently processing a job.
882
- # Programmatic access to the current active worker set.
1075
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1076
+ # It tracks the process and thread working on each job.
883
1077
  #
884
1078
  # WARNING WARNING WARNING
885
1079
  #
@@ -887,34 +1081,40 @@ module Sidekiq
887
1081
  # If you call #size => 5 and then expect #each to be
888
1082
  # called 5 times, you're going to have a bad time.
889
1083
  #
890
- # workers = Sidekiq::Workers.new
891
- # workers.size => 2
892
- # workers.each do |process_id, thread_id, work|
1084
+ # works = Sidekiq::WorkSet.new
1085
+ # works.size => 2
1086
+ # works.each do |process_id, thread_id, work|
893
1087
  # # process_id is a unique identifier per Sidekiq process
894
1088
  # # thread_id is a unique identifier per thread
895
1089
  # # work is a Hash which looks like:
896
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
1090
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
897
1091
  # # run_at is an epoch Integer.
898
1092
  # end
899
1093
  #
900
- class Workers
1094
+ class WorkSet
901
1095
  include Enumerable
902
- include RedisScanner
903
1096
 
904
- def each
1097
+ def each(&block)
1098
+ results = []
1099
+ procs = nil
1100
+ all_works = nil
1101
+
905
1102
  Sidekiq.redis do |conn|
906
- procs = sscan(conn, 'processes')
907
- procs.sort.each do |key|
908
- valid, workers = conn.pipelined do
909
- conn.exists?(key)
910
- conn.hgetall("#{key}:workers")
911
- end
912
- next unless valid
913
- workers.each_pair do |tid, json|
914
- yield key, tid, Sidekiq.load_json(json)
1103
+ procs = conn.sscan("processes").to_a.sort
1104
+ all_works = conn.pipelined do |pipeline|
1105
+ procs.each do |key|
1106
+ pipeline.hgetall("#{key}:work")
915
1107
  end
916
1108
  end
917
1109
  end
1110
+
1111
+ procs.zip(all_works).each do |key, workers|
1112
+ workers.each_pair do |tid, json|
1113
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1114
+ end
1115
+ end
1116
+
1117
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
918
1118
  end
919
1119
 
920
1120
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -925,18 +1125,21 @@ module Sidekiq
925
1125
  # which can easily get out of sync with crashy processes.
926
1126
  def size
927
1127
  Sidekiq.redis do |conn|
928
- procs = sscan(conn, 'processes')
1128
+ procs = conn.sscan("processes").to_a
929
1129
  if procs.empty?
930
1130
  0
931
1131
  else
932
- conn.pipelined do
1132
+ conn.pipelined { |pipeline|
933
1133
  procs.each do |key|
934
- conn.hget(key, 'busy')
1134
+ pipeline.hget(key, "busy")
935
1135
  end
936
- end.map(&:to_i).inject(:+)
1136
+ }.sum(&:to_i)
937
1137
  end
938
1138
  end
939
1139
  end
940
1140
  end
941
-
1141
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1142
+ # Is "worker" a process, a type of job, a thread? Undefined!
1143
+ # WorkSet better describes the data.
1144
+ Workers = WorkSet
942
1145
  end