sidekiq 4.2.10 → 7.3.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. checksums.yaml +5 -5
  2. data/Changes.md +932 -7
  3. data/LICENSE.txt +9 -0
  4. data/README.md +49 -50
  5. data/bin/multi_queue_bench +271 -0
  6. data/bin/sidekiq +22 -3
  7. data/bin/sidekiqload +218 -116
  8. data/bin/sidekiqmon +11 -0
  9. data/lib/active_job/queue_adapters/sidekiq_adapter.rb +75 -0
  10. data/lib/generators/sidekiq/job_generator.rb +59 -0
  11. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  12. data/lib/generators/sidekiq/templates/job_spec.rb.erb +6 -0
  13. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  14. data/lib/sidekiq/api.rb +710 -322
  15. data/lib/sidekiq/capsule.rb +132 -0
  16. data/lib/sidekiq/cli.rb +268 -248
  17. data/lib/sidekiq/client.rb +153 -101
  18. data/lib/sidekiq/component.rb +90 -0
  19. data/lib/sidekiq/config.rb +311 -0
  20. data/lib/sidekiq/deploy.rb +64 -0
  21. data/lib/sidekiq/embedded.rb +63 -0
  22. data/lib/sidekiq/fetch.rb +50 -42
  23. data/lib/sidekiq/iterable_job.rb +55 -0
  24. data/lib/sidekiq/job/interrupt_handler.rb +24 -0
  25. data/lib/sidekiq/job/iterable/active_record_enumerator.rb +53 -0
  26. data/lib/sidekiq/job/iterable/csv_enumerator.rb +47 -0
  27. data/lib/sidekiq/job/iterable/enumerators.rb +135 -0
  28. data/lib/sidekiq/job/iterable.rb +294 -0
  29. data/lib/sidekiq/job.rb +385 -0
  30. data/lib/sidekiq/job_logger.rb +52 -0
  31. data/lib/sidekiq/job_retry.rb +305 -0
  32. data/lib/sidekiq/job_util.rb +109 -0
  33. data/lib/sidekiq/launcher.rb +208 -108
  34. data/lib/sidekiq/logger.rb +131 -0
  35. data/lib/sidekiq/manager.rb +43 -47
  36. data/lib/sidekiq/metrics/query.rb +158 -0
  37. data/lib/sidekiq/metrics/shared.rb +106 -0
  38. data/lib/sidekiq/metrics/tracking.rb +148 -0
  39. data/lib/sidekiq/middleware/chain.rb +113 -56
  40. data/lib/sidekiq/middleware/current_attributes.rb +128 -0
  41. data/lib/sidekiq/middleware/i18n.rb +9 -7
  42. data/lib/sidekiq/middleware/modules.rb +23 -0
  43. data/lib/sidekiq/monitor.rb +147 -0
  44. data/lib/sidekiq/paginator.rb +33 -15
  45. data/lib/sidekiq/processor.rb +188 -98
  46. data/lib/sidekiq/rails.rb +53 -92
  47. data/lib/sidekiq/redis_client_adapter.rb +114 -0
  48. data/lib/sidekiq/redis_connection.rb +86 -77
  49. data/lib/sidekiq/ring_buffer.rb +32 -0
  50. data/lib/sidekiq/scheduled.rb +140 -51
  51. data/lib/sidekiq/sd_notify.rb +149 -0
  52. data/lib/sidekiq/systemd.rb +26 -0
  53. data/lib/sidekiq/testing/inline.rb +6 -5
  54. data/lib/sidekiq/testing.rb +95 -85
  55. data/lib/sidekiq/transaction_aware_client.rb +59 -0
  56. data/lib/sidekiq/version.rb +7 -1
  57. data/lib/sidekiq/web/action.rb +40 -18
  58. data/lib/sidekiq/web/application.rb +189 -89
  59. data/lib/sidekiq/web/csrf_protection.rb +183 -0
  60. data/lib/sidekiq/web/helpers.rb +239 -101
  61. data/lib/sidekiq/web/router.rb +28 -21
  62. data/lib/sidekiq/web.rb +123 -110
  63. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  64. data/lib/sidekiq.rb +97 -185
  65. data/sidekiq.gemspec +26 -27
  66. data/web/assets/images/apple-touch-icon.png +0 -0
  67. data/web/assets/javascripts/application.js +157 -61
  68. data/web/assets/javascripts/base-charts.js +106 -0
  69. data/web/assets/javascripts/chart.min.js +13 -0
  70. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  71. data/web/assets/javascripts/dashboard-charts.js +194 -0
  72. data/web/assets/javascripts/dashboard.js +43 -280
  73. data/web/assets/javascripts/metrics.js +298 -0
  74. data/web/assets/stylesheets/application-dark.css +147 -0
  75. data/web/assets/stylesheets/application-rtl.css +163 -0
  76. data/web/assets/stylesheets/application.css +176 -196
  77. data/web/assets/stylesheets/bootstrap-rtl.min.css +9 -0
  78. data/web/assets/stylesheets/bootstrap.css +2 -2
  79. data/web/locales/ar.yml +87 -0
  80. data/web/locales/cs.yml +62 -62
  81. data/web/locales/da.yml +60 -53
  82. data/web/locales/de.yml +65 -53
  83. data/web/locales/el.yml +43 -24
  84. data/web/locales/en.yml +88 -64
  85. data/web/locales/es.yml +70 -53
  86. data/web/locales/fa.yml +65 -64
  87. data/web/locales/fr.yml +82 -62
  88. data/web/locales/gd.yml +98 -0
  89. data/web/locales/he.yml +80 -0
  90. data/web/locales/hi.yml +59 -59
  91. data/web/locales/it.yml +85 -54
  92. data/web/locales/ja.yml +74 -62
  93. data/web/locales/ko.yml +52 -52
  94. data/web/locales/lt.yml +83 -0
  95. data/web/locales/nb.yml +61 -61
  96. data/web/locales/nl.yml +52 -52
  97. data/web/locales/pl.yml +45 -45
  98. data/web/locales/pt-br.yml +82 -55
  99. data/web/locales/pt.yml +51 -51
  100. data/web/locales/ru.yml +68 -63
  101. data/web/locales/sv.yml +53 -53
  102. data/web/locales/ta.yml +60 -60
  103. data/web/locales/tr.yml +100 -0
  104. data/web/locales/uk.yml +85 -61
  105. data/web/locales/ur.yml +80 -0
  106. data/web/locales/vi.yml +83 -0
  107. data/web/locales/zh-cn.yml +42 -16
  108. data/web/locales/zh-tw.yml +41 -8
  109. data/web/views/_footer.erb +20 -3
  110. data/web/views/_job_info.erb +21 -4
  111. data/web/views/_metrics_period_select.erb +12 -0
  112. data/web/views/_nav.erb +5 -19
  113. data/web/views/_paging.erb +3 -1
  114. data/web/views/_poll_link.erb +3 -6
  115. data/web/views/_summary.erb +7 -7
  116. data/web/views/busy.erb +85 -31
  117. data/web/views/dashboard.erb +53 -20
  118. data/web/views/dead.erb +3 -3
  119. data/web/views/filtering.erb +6 -0
  120. data/web/views/layout.erb +17 -6
  121. data/web/views/metrics.erb +90 -0
  122. data/web/views/metrics_for_job.erb +59 -0
  123. data/web/views/morgue.erb +15 -16
  124. data/web/views/queue.erb +35 -25
  125. data/web/views/queues.erb +20 -4
  126. data/web/views/retries.erb +19 -16
  127. data/web/views/retry.erb +3 -3
  128. data/web/views/scheduled.erb +19 -17
  129. metadata +103 -194
  130. data/.github/contributing.md +0 -32
  131. data/.github/issue_template.md +0 -9
  132. data/.gitignore +0 -12
  133. data/.travis.yml +0 -18
  134. data/3.0-Upgrade.md +0 -70
  135. data/4.0-Upgrade.md +0 -53
  136. data/COMM-LICENSE +0 -95
  137. data/Ent-Changes.md +0 -173
  138. data/Gemfile +0 -29
  139. data/LICENSE +0 -9
  140. data/Pro-2.0-Upgrade.md +0 -138
  141. data/Pro-3.0-Upgrade.md +0 -44
  142. data/Pro-Changes.md +0 -628
  143. data/Rakefile +0 -12
  144. data/bin/sidekiqctl +0 -99
  145. data/code_of_conduct.md +0 -50
  146. data/lib/generators/sidekiq/templates/worker_spec.rb.erb +0 -6
  147. data/lib/generators/sidekiq/worker_generator.rb +0 -49
  148. data/lib/sidekiq/core_ext.rb +0 -119
  149. data/lib/sidekiq/exception_handler.rb +0 -31
  150. data/lib/sidekiq/extensions/action_mailer.rb +0 -57
  151. data/lib/sidekiq/extensions/active_record.rb +0 -40
  152. data/lib/sidekiq/extensions/class_methods.rb +0 -40
  153. data/lib/sidekiq/extensions/generic_proxy.rb +0 -25
  154. data/lib/sidekiq/logging.rb +0 -106
  155. data/lib/sidekiq/middleware/server/active_record.rb +0 -13
  156. data/lib/sidekiq/middleware/server/logging.rb +0 -31
  157. data/lib/sidekiq/middleware/server/retry_jobs.rb +0 -205
  158. data/lib/sidekiq/util.rb +0 -63
  159. data/lib/sidekiq/worker.rb +0 -121
data/lib/sidekiq/api.rb CHANGED
@@ -1,11 +1,32 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
- require 'sidekiq'
2
+
3
+ require "sidekiq"
4
+
5
+ require "zlib"
6
+ require "set"
7
+
8
+ require "sidekiq/metrics/query"
9
+
10
+ #
11
+ # Sidekiq's Data API provides a Ruby object model on top
12
+ # of Sidekiq's runtime data in Redis. This API should never
13
+ # be used within application code for business logic.
14
+ #
15
+ # The Sidekiq server process never uses this API: all data
16
+ # manipulation is done directly for performance reasons to
17
+ # ensure we are using Redis as efficiently as possible at
18
+ # every callsite.
19
+ #
4
20
 
5
21
  module Sidekiq
22
+ # Retrieve runtime statistics from Redis regarding
23
+ # this Sidekiq cluster.
24
+ #
25
+ # stat = Sidekiq::Stats.new
26
+ # stat.processed
6
27
  class Stats
7
28
  def initialize
8
- fetch_stats!
29
+ fetch_stats_fast!
9
30
  end
10
31
 
11
32
  def processed
@@ -45,59 +66,96 @@ module Sidekiq
45
66
  end
46
67
 
47
68
  def queues
48
- Sidekiq::Stats::Queues.new.lengths
69
+ Sidekiq.redis do |conn|
70
+ queues = conn.sscan("queues").to_a
71
+
72
+ lengths = conn.pipelined { |pipeline|
73
+ queues.each do |queue|
74
+ pipeline.llen("queue:#{queue}")
75
+ end
76
+ }
77
+
78
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
79
+ array_of_arrays.to_h
80
+ end
49
81
  end
50
82
 
51
- def fetch_stats!
52
- pipe1_res = Sidekiq.redis do |conn|
53
- conn.pipelined do
54
- conn.get('stat:processed'.freeze)
55
- conn.get('stat:failed'.freeze)
56
- conn.zcard('schedule'.freeze)
57
- conn.zcard('retry'.freeze)
58
- conn.zcard('dead'.freeze)
59
- conn.scard('processes'.freeze)
60
- conn.lrange('queue:default'.freeze, -1, -1)
61
- conn.smembers('processes'.freeze)
62
- conn.smembers('queues'.freeze)
83
+ # O(1) redis calls
84
+ # @api private
85
+ def fetch_stats_fast!
86
+ pipe1_res = Sidekiq.redis { |conn|
87
+ conn.pipelined do |pipeline|
88
+ pipeline.get("stat:processed")
89
+ pipeline.get("stat:failed")
90
+ pipeline.zcard("schedule")
91
+ pipeline.zcard("retry")
92
+ pipeline.zcard("dead")
93
+ pipeline.scard("processes")
94
+ pipeline.lindex("queue:default", -1)
63
95
  end
64
- end
96
+ }
65
97
 
66
- pipe2_res = Sidekiq.redis do |conn|
67
- conn.pipelined do
68
- pipe1_res[7].each {|key| conn.hget(key, 'busy'.freeze) }
69
- pipe1_res[8].each {|queue| conn.llen("queue:#{queue}") }
98
+ default_queue_latency = if (entry = pipe1_res[6])
99
+ job = begin
100
+ Sidekiq.load_json(entry)
101
+ rescue
102
+ {}
70
103
  end
104
+ now = Time.now.to_f
105
+ thence = job["enqueued_at"] || now
106
+ now - thence
107
+ else
108
+ 0
71
109
  end
72
110
 
73
- s = pipe1_res[7].size
74
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
75
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
76
-
77
- default_queue_latency = if (entry = pipe1_res[6].first)
78
- job = Sidekiq.load_json(entry)
79
- now = Time.now.to_f
80
- thence = job['enqueued_at'.freeze] || now
81
- now - thence
82
- else
83
- 0
84
- end
85
111
  @stats = {
86
- processed: pipe1_res[0].to_i,
87
- failed: pipe1_res[1].to_i,
88
- scheduled_size: pipe1_res[2],
89
- retry_size: pipe1_res[3],
90
- dead_size: pipe1_res[4],
91
- processes_size: pipe1_res[5],
92
-
93
- default_queue_latency: default_queue_latency,
94
- workers_size: workers_size,
95
- enqueued: enqueued
112
+ processed: pipe1_res[0].to_i,
113
+ failed: pipe1_res[1].to_i,
114
+ scheduled_size: pipe1_res[2],
115
+ retry_size: pipe1_res[3],
116
+ dead_size: pipe1_res[4],
117
+ processes_size: pipe1_res[5],
118
+
119
+ default_queue_latency: default_queue_latency
120
+ }
121
+ end
122
+
123
+ # O(number of processes + number of queues) redis calls
124
+ # @api private
125
+ def fetch_stats_slow!
126
+ processes = Sidekiq.redis { |conn|
127
+ conn.sscan("processes").to_a
128
+ }
129
+
130
+ queues = Sidekiq.redis { |conn|
131
+ conn.sscan("queues").to_a
96
132
  }
133
+
134
+ pipe2_res = Sidekiq.redis { |conn|
135
+ conn.pipelined do |pipeline|
136
+ processes.each { |key| pipeline.hget(key, "busy") }
137
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
138
+ end
139
+ }
140
+
141
+ s = processes.size
142
+ workers_size = pipe2_res[0...s].sum(&:to_i)
143
+ enqueued = pipe2_res[s..].sum(&:to_i)
144
+
145
+ @stats[:workers_size] = workers_size
146
+ @stats[:enqueued] = enqueued
147
+ @stats
148
+ end
149
+
150
+ # @api private
151
+ def fetch_stats!
152
+ fetch_stats_fast!
153
+ fetch_stats_slow!
97
154
  end
98
155
 
156
+ # @api private
99
157
  def reset(*stats)
100
- all = %w(failed processed)
158
+ all = %w[failed processed]
101
159
  stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
102
160
 
103
161
  mset_args = []
@@ -113,61 +171,35 @@ module Sidekiq
113
171
  private
114
172
 
115
173
  def stat(s)
116
- @stats[s]
117
- end
118
-
119
- class Queues
120
- def lengths
121
- Sidekiq.redis do |conn|
122
- queues = conn.smembers('queues'.freeze)
123
-
124
- lengths = conn.pipelined do
125
- queues.each do |queue|
126
- conn.llen("queue:#{queue}")
127
- end
128
- end
129
-
130
- i = 0
131
- array_of_arrays = queues.inject({}) do |memo, queue|
132
- memo[queue] = lengths[i]
133
- i += 1
134
- memo
135
- end.sort_by { |_, size| size }
136
-
137
- Hash[array_of_arrays.reverse]
138
- end
139
- end
174
+ fetch_stats_slow! if @stats[s].nil?
175
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
140
176
  end
141
177
 
142
178
  class History
143
- def initialize(days_previous, start_date = nil)
179
+ def initialize(days_previous, start_date = nil, pool: nil)
180
+ # we only store five years of data in Redis
181
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
144
182
  @days_previous = days_previous
145
183
  @start_date = start_date || Time.now.utc.to_date
146
184
  end
147
185
 
148
186
  def processed
149
- date_stat_hash("processed")
187
+ @processed ||= date_stat_hash("processed")
150
188
  end
151
189
 
152
190
  def failed
153
- date_stat_hash("failed")
191
+ @failed ||= date_stat_hash("failed")
154
192
  end
155
193
 
156
194
  private
157
195
 
158
196
  def date_stat_hash(stat)
159
- i = 0
160
197
  stat_hash = {}
161
- keys = []
162
- dates = []
163
-
164
- while i < @days_previous
165
- date = @start_date - i
166
- datestr = date.strftime("%Y-%m-%d".freeze)
167
- keys << "stat:#{stat}:#{datestr}"
168
- dates << datestr
169
- i += 1
170
- end
198
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
199
+ date.strftime("%Y-%m-%d")
200
+ }
201
+
202
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
171
203
 
172
204
  Sidekiq.redis do |conn|
173
205
  conn.mget(keys).each_with_index do |value, idx|
@@ -181,9 +213,10 @@ module Sidekiq
181
213
  end
182
214
 
183
215
  ##
184
- # Encapsulates a queue within Sidekiq.
216
+ # Represents a queue within Sidekiq.
185
217
  # Allows enumeration of all jobs within the queue
186
- # and deletion of jobs.
218
+ # and deletion of jobs. NB: this queue data is real-time
219
+ # and is changing within Redis moment by moment.
187
220
  #
188
221
  # queue = Sidekiq::Queue.new("mailer")
189
222
  # queue.each do |job|
@@ -191,29 +224,34 @@ module Sidekiq
191
224
  # job.args # => [1, 2, 3]
192
225
  # job.delete if job.jid == 'abcdef1234567890'
193
226
  # end
194
- #
195
227
  class Queue
196
228
  include Enumerable
197
229
 
198
230
  ##
199
- # Return all known queues within Redis.
231
+ # Fetch all known queues within Redis.
200
232
  #
233
+ # @return [Array<Sidekiq::Queue>]
201
234
  def self.all
202
- Sidekiq.redis { |c| c.smembers('queues'.freeze) }.sort.map { |q| Sidekiq::Queue.new(q) }
235
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
203
236
  end
204
237
 
205
238
  attr_reader :name
206
239
 
207
- def initialize(name="default")
208
- @name = name
240
+ # @param name [String] the name of the queue
241
+ def initialize(name = "default")
242
+ @name = name.to_s
209
243
  @rname = "queue:#{name}"
210
244
  end
211
245
 
246
+ # The current size of the queue within Redis.
247
+ # This value is real-time and can change between calls.
248
+ #
249
+ # @return [Integer] the size
212
250
  def size
213
251
  Sidekiq.redis { |con| con.llen(@rname) }
214
252
  end
215
253
 
216
- # Sidekiq Pro overrides this
254
+ # @return [Boolean] if the queue is currently paused
217
255
  def paused?
218
256
  false
219
257
  end
@@ -222,15 +260,15 @@ module Sidekiq
222
260
  # Calculates this queue's latency, the difference in seconds since the oldest
223
261
  # job in the queue was enqueued.
224
262
  #
225
- # @return Float
263
+ # @return [Float] in seconds
226
264
  def latency
227
- entry = Sidekiq.redis do |conn|
228
- conn.lrange(@rname, -1, -1)
229
- end.first
265
+ entry = Sidekiq.redis { |conn|
266
+ conn.lindex(@rname, -1)
267
+ }
230
268
  return 0 unless entry
231
269
  job = Sidekiq.load_json(entry)
232
270
  now = Time.now.to_f
233
- thence = job['enqueued_at'] || now
271
+ thence = job["enqueued_at"] || now
234
272
  now - thence
235
273
  end
236
274
 
@@ -240,16 +278,16 @@ module Sidekiq
240
278
  page = 0
241
279
  page_size = 50
242
280
 
243
- while true do
281
+ loop do
244
282
  range_start = page * page_size - deleted_size
245
- range_end = range_start + page_size - 1
246
- entries = Sidekiq.redis do |conn|
283
+ range_end = range_start + page_size - 1
284
+ entries = Sidekiq.redis { |conn|
247
285
  conn.lrange @rname, range_start, range_end
248
- end
286
+ }
249
287
  break if entries.empty?
250
288
  page += 1
251
289
  entries.each do |entry|
252
- yield Job.new(entry, @name)
290
+ yield JobRecord.new(entry, @name)
253
291
  end
254
292
  deleted_size = initial_size - size
255
293
  end
@@ -258,150 +296,228 @@ module Sidekiq
258
296
  ##
259
297
  # Find the job with the given JID within this queue.
260
298
  #
261
- # This is a slow, inefficient operation. Do not use under
262
- # normal conditions. Sidekiq Pro contains a faster version.
299
+ # This is a *slow, inefficient* operation. Do not use under
300
+ # normal conditions.
301
+ #
302
+ # @param jid [String] the job_id to look for
303
+ # @return [Sidekiq::JobRecord]
304
+ # @return [nil] if not found
263
305
  def find_job(jid)
264
306
  detect { |j| j.jid == jid }
265
307
  end
266
308
 
309
+ # delete all jobs within this queue
310
+ # @return [Boolean] true
267
311
  def clear
268
312
  Sidekiq.redis do |conn|
269
- conn.multi do
270
- conn.del(@rname)
271
- conn.srem("queues".freeze, name)
313
+ conn.multi do |transaction|
314
+ transaction.unlink(@rname)
315
+ transaction.srem("queues", [name])
272
316
  end
273
317
  end
318
+ true
274
319
  end
275
320
  alias_method :💣, :clear
321
+
322
+ # :nodoc:
323
+ # @api private
324
+ def as_json(options = nil)
325
+ {name: name} # 5336
326
+ end
276
327
  end
277
328
 
278
329
  ##
279
- # Encapsulates a pending job within a Sidekiq queue or
280
- # sorted set.
330
+ # Represents a pending job within a Sidekiq queue.
281
331
  #
282
332
  # The job should be considered immutable but may be
283
- # removed from the queue via Job#delete.
284
- #
285
- class Job
333
+ # removed from the queue via JobRecord#delete.
334
+ class JobRecord
335
+ # the parsed Hash of job data
336
+ # @!attribute [r] Item
286
337
  attr_reader :item
338
+ # the underlying String in Redis
339
+ # @!attribute [r] Value
287
340
  attr_reader :value
288
-
289
- def initialize(item, queue_name=nil)
341
+ # the queue associated with this job
342
+ # @!attribute [r] Queue
343
+ attr_reader :queue
344
+
345
+ # :nodoc:
346
+ # @api private
347
+ def initialize(item, queue_name = nil)
348
+ @args = nil
290
349
  @value = item
291
- @item = item.is_a?(Hash) ? item : Sidekiq.load_json(item)
292
- @queue = queue_name || @item['queue']
350
+ @item = item.is_a?(Hash) ? item : parse(item)
351
+ @queue = queue_name || @item["queue"]
293
352
  end
294
353
 
354
+ # :nodoc:
355
+ # @api private
356
+ def parse(item)
357
+ Sidekiq.load_json(item)
358
+ rescue JSON::ParserError
359
+ # If the job payload in Redis is invalid JSON, we'll load
360
+ # the item as an empty hash and store the invalid JSON as
361
+ # the job 'args' for display in the Web UI.
362
+ @invalid = true
363
+ @args = [item]
364
+ {}
365
+ end
366
+
367
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
368
+ # this class will be the ActiveJob adapter class rather than a specific job.
295
369
  def klass
296
- @item['class']
370
+ self["class"]
297
371
  end
298
372
 
299
373
  def display_class
300
374
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
301
- @klass ||= case klass
302
- when /\ASidekiq::Extensions::Delayed/
303
- safe_load(args[0], klass) do |target, method, _|
304
- "#{target}.#{method}"
305
- end
306
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
307
- job_class = @item['wrapped'] || args[0]
308
- if 'ActionMailer::DeliveryJob' == job_class
309
- # MailerClass#mailer_method
310
- args[0]['arguments'][0..1].join('#')
311
- else
312
- job_class
313
- end
314
- else
315
- klass
316
- end
375
+ @klass ||= self["display_class"] || begin
376
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper" || klass == "Sidekiq::ActiveJob::Wrapper"
377
+ job_class = @item["wrapped"] || args[0]
378
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
379
+ # MailerClass#mailer_method
380
+ args[0]["arguments"][0..1].join("#")
381
+ else
382
+ job_class
383
+ end
384
+ else
385
+ klass
386
+ end
387
+ end
317
388
  end
318
389
 
319
390
  def display_args
320
391
  # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
321
- @args ||= case klass
322
- when /\ASidekiq::Extensions::Delayed/
323
- safe_load(args[0], args) do |_, _, arg|
324
- arg
325
- end
326
- when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
327
- job_args = @item['wrapped'] ? args[0]["arguments"] : []
328
- if 'ActionMailer::DeliveryJob' == (@item['wrapped'] || args[0])
329
- # remove MailerClass, mailer_method and 'deliver_now'
330
- job_args.drop(3)
331
- else
332
- job_args
333
- end
334
- else
335
- args
336
- end
392
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper" || klass == "Sidekiq::ActiveJob::Wrapper"
393
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
394
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
395
+ # remove MailerClass, mailer_method and 'deliver_now'
396
+ job_args.drop(3)
397
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
398
+ # remove MailerClass, mailer_method and 'deliver_now'
399
+ job_args.drop(3).first.values_at("params", "args")
400
+ else
401
+ job_args
402
+ end
403
+ else
404
+ if self["encrypt"]
405
+ # no point in showing 150+ bytes of random garbage
406
+ args[-1] = "[encrypted data]"
407
+ end
408
+ args
409
+ end
337
410
  end
338
411
 
339
412
  def args
340
- @item['args']
413
+ @args || @item["args"]
341
414
  end
342
415
 
343
416
  def jid
344
- @item['jid']
417
+ self["jid"]
418
+ end
419
+
420
+ def bid
421
+ self["bid"]
345
422
  end
346
423
 
347
424
  def enqueued_at
348
- @item['enqueued_at'] ? Time.at(@item['enqueued_at']).utc : nil
425
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
349
426
  end
350
427
 
351
428
  def created_at
352
- Time.at(@item['created_at'] || @item['enqueued_at'] || 0).utc
429
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
353
430
  end
354
431
 
355
- def queue
356
- @queue
432
+ def tags
433
+ self["tags"] || []
434
+ end
435
+
436
+ def error_backtrace
437
+ # Cache nil values
438
+ if defined?(@error_backtrace)
439
+ @error_backtrace
440
+ else
441
+ value = self["error_backtrace"]
442
+ @error_backtrace = value && uncompress_backtrace(value)
443
+ end
357
444
  end
358
445
 
359
446
  def latency
360
447
  now = Time.now.to_f
361
- now - (@item['enqueued_at'] || @item['created_at'] || now)
448
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
362
449
  end
363
450
 
364
- ##
365
- # Remove this job from the queue.
451
+ # Remove this job from the queue
366
452
  def delete
367
- count = Sidekiq.redis do |conn|
453
+ count = Sidekiq.redis { |conn|
368
454
  conn.lrem("queue:#{@queue}", 1, @value)
369
- end
455
+ }
370
456
  count != 0
371
457
  end
372
458
 
459
+ # Access arbitrary attributes within the job hash
373
460
  def [](name)
374
- @item[name]
461
+ # nil will happen if the JSON fails to parse.
462
+ # We don't guarantee Sidekiq will work with bad job JSON but we should
463
+ # make a best effort to minimize the damage.
464
+ @item ? @item[name] : nil
375
465
  end
376
466
 
377
467
  private
378
468
 
379
- def safe_load(content, default)
380
- begin
381
- yield(*YAML.load(content))
382
- rescue => ex
383
- # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
384
- # memory yet so the YAML can't be loaded.
385
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == 'development'
386
- default
469
+ ACTIVE_JOB_PREFIX = "_aj_"
470
+ GLOBALID_KEY = "_aj_globalid"
471
+
472
+ def deserialize_argument(argument)
473
+ case argument
474
+ when Array
475
+ argument.map { |arg| deserialize_argument(arg) }
476
+ when Hash
477
+ if serialized_global_id?(argument)
478
+ argument[GLOBALID_KEY]
479
+ else
480
+ argument.transform_values { |v| deserialize_argument(v) }
481
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
482
+ end
483
+ else
484
+ argument
387
485
  end
388
486
  end
487
+
488
+ def serialized_global_id?(hash)
489
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
490
+ end
491
+
492
+ def uncompress_backtrace(backtrace)
493
+ strict_base64_decoded = backtrace.unpack1("m")
494
+ uncompressed = Zlib::Inflate.inflate(strict_base64_decoded)
495
+ Sidekiq.load_json(uncompressed)
496
+ end
389
497
  end
390
498
 
391
- class SortedEntry < Job
499
+ # Represents a job within a Redis sorted set where the score
500
+ # represents a timestamp associated with the job. This timestamp
501
+ # could be the scheduled time for it to run (e.g. scheduled set),
502
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
503
+ class SortedEntry < JobRecord
392
504
  attr_reader :score
393
505
  attr_reader :parent
394
506
 
507
+ # :nodoc:
508
+ # @api private
395
509
  def initialize(parent, score, item)
396
510
  super(item)
397
- @score = score
511
+ @score = Float(score)
398
512
  @parent = parent
399
513
  end
400
514
 
515
+ # The timestamp associated with this entry
401
516
  def at
402
517
  Time.at(score).utc
403
518
  end
404
519
 
520
+ # remove this entry from the sorted set
405
521
  def delete
406
522
  if @value
407
523
  @parent.delete_by_value(@parent.name, @value)
@@ -410,11 +526,17 @@ module Sidekiq
410
526
  end
411
527
  end
412
528
 
529
+ # Change the scheduled time for this job.
530
+ #
531
+ # @param at [Time] the new timestamp for this job
413
532
  def reschedule(at)
414
- delete
415
- @parent.schedule(at, item)
533
+ Sidekiq.redis do |conn|
534
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
535
+ end
416
536
  end
417
537
 
538
+ # Enqueue this job from the scheduled or dead set so it will
539
+ # be executed at some point in the near future.
418
540
  def add_to_queue
419
541
  remove_job do |message|
420
542
  msg = Sidekiq.load_json(message)
@@ -422,98 +544,162 @@ module Sidekiq
422
544
  end
423
545
  end
424
546
 
547
+ # enqueue this job from the retry set so it will be executed
548
+ # at some point in the near future.
425
549
  def retry
426
550
  remove_job do |message|
427
551
  msg = Sidekiq.load_json(message)
428
- msg['retry_count'] -= 1 if msg['retry_count']
552
+ msg["retry_count"] -= 1 if msg["retry_count"]
429
553
  Sidekiq::Client.push(msg)
430
554
  end
431
555
  end
432
556
 
433
- ##
434
- # Place job in the dead set
557
+ # Move this job from its current set into the Dead set.
435
558
  def kill
436
559
  remove_job do |message|
437
- now = Time.now.to_f
438
- Sidekiq.redis do |conn|
439
- conn.multi do
440
- conn.zadd('dead', now, message)
441
- conn.zremrangebyscore('dead', '-inf', now - DeadSet.timeout)
442
- conn.zremrangebyrank('dead', 0, - DeadSet.max_jobs)
443
- end
444
- end
560
+ DeadSet.new.kill(message)
445
561
  end
446
562
  end
447
563
 
448
564
  def error?
449
- !!item['error_class']
565
+ !!item["error_class"]
450
566
  end
451
567
 
452
568
  private
453
569
 
454
570
  def remove_job
455
571
  Sidekiq.redis do |conn|
456
- results = conn.multi do
457
- conn.zrangebyscore(parent.name, score, score)
458
- conn.zremrangebyscore(parent.name, score, score)
459
- end.first
572
+ results = conn.multi { |transaction|
573
+ transaction.zrange(parent.name, score, score, "BYSCORE")
574
+ transaction.zremrangebyscore(parent.name, score, score)
575
+ }.first
460
576
 
461
577
  if results.size == 1
462
578
  yield results.first
463
579
  else
464
580
  # multiple jobs with the same score
465
581
  # find the one with the right JID and push it
466
- hash = results.group_by do |message|
582
+ matched, nonmatched = results.partition { |message|
467
583
  if message.index(jid)
468
584
  msg = Sidekiq.load_json(message)
469
- msg['jid'] == jid
585
+ msg["jid"] == jid
470
586
  else
471
587
  false
472
588
  end
473
- end
589
+ }
474
590
 
475
- msg = hash.fetch(true, []).first
591
+ msg = matched.first
476
592
  yield msg if msg
477
593
 
478
594
  # push the rest back onto the sorted set
479
- conn.multi do
480
- hash.fetch(false, []).each do |message|
481
- conn.zadd(parent.name, score.to_f.to_s, message)
595
+ conn.multi do |transaction|
596
+ nonmatched.each do |message|
597
+ transaction.zadd(parent.name, score.to_f.to_s, message)
482
598
  end
483
599
  end
484
600
  end
485
601
  end
486
602
  end
487
-
488
603
  end
489
604
 
605
+ # Base class for all sorted sets within Sidekiq.
490
606
  class SortedSet
491
607
  include Enumerable
492
608
 
609
+ # Redis key of the set
610
+ # @!attribute [r] Name
493
611
  attr_reader :name
494
612
 
613
+ # :nodoc:
614
+ # @api private
495
615
  def initialize(name)
496
616
  @name = name
497
617
  @_size = size
498
618
  end
499
619
 
620
+ # real-time size of the set, will change
500
621
  def size
501
622
  Sidekiq.redis { |c| c.zcard(name) }
502
623
  end
503
624
 
625
+ # Scan through each element of the sorted set, yielding each to the supplied block.
626
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
627
+ #
628
+ # @param match [String] a snippet or regexp to filter matches.
629
+ # @param count [Integer] number of elements to retrieve at a time, default 100
630
+ # @yieldparam [Sidekiq::SortedEntry] each entry
631
+ def scan(match, count = 100)
632
+ return to_enum(:scan, match, count) unless block_given?
633
+
634
+ match = "*#{match}*" unless match.include?("*")
635
+ Sidekiq.redis do |conn|
636
+ conn.zscan(name, match: match, count: count) do |entry, score|
637
+ yield SortedEntry.new(self, score, entry)
638
+ end
639
+ end
640
+ end
641
+
642
+ # @return [Boolean] always true
504
643
  def clear
505
644
  Sidekiq.redis do |conn|
506
- conn.del(name)
645
+ conn.unlink(name)
507
646
  end
647
+ true
508
648
  end
509
649
  alias_method :💣, :clear
650
+
651
+ # :nodoc:
652
+ # @api private
653
+ def as_json(options = nil)
654
+ {name: name} # 5336
655
+ end
510
656
  end
511
657
 
658
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
659
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
660
+ # e.g. Batches.
512
661
  class JobSet < SortedSet
513
-
514
- def schedule(timestamp, message)
662
+ # Add a job with the associated timestamp to this set.
663
+ # @param timestamp [Time] the score for the job
664
+ # @param job [Hash] the job data
665
+ def schedule(timestamp, job)
515
666
  Sidekiq.redis do |conn|
516
- conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(message))
667
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
668
+ end
669
+ end
670
+
671
+ def pop_each
672
+ Sidekiq.redis do |c|
673
+ size.times do
674
+ data, score = c.zpopmin(name, 1)&.first
675
+ break unless data
676
+ yield data, score
677
+ end
678
+ end
679
+ end
680
+
681
+ def retry_all
682
+ c = Sidekiq::Client.new
683
+ pop_each do |msg, _|
684
+ job = Sidekiq.load_json(msg)
685
+ # Manual retries should not count against the retry limit.
686
+ job["retry_count"] -= 1 if job["retry_count"]
687
+ c.push(job)
688
+ end
689
+ end
690
+
691
+ # Move all jobs from this Set to the Dead Set.
692
+ # See DeadSet#kill
693
+ def kill_all(notify_failure: false, ex: nil)
694
+ ds = DeadSet.new
695
+ opts = {notify_failure: notify_failure, ex: ex, trim: false}
696
+
697
+ begin
698
+ pop_each do |msg, _|
699
+ ds.kill(msg, opts)
700
+ end
701
+ ensure
702
+ ds.trim
517
703
  end
518
704
  end
519
705
 
@@ -523,46 +709,66 @@ module Sidekiq
523
709
  page = -1
524
710
  page_size = 50
525
711
 
526
- while true do
712
+ loop do
527
713
  range_start = page * page_size + offset_size
528
- range_end = range_start + page_size - 1
529
- elements = Sidekiq.redis do |conn|
530
- conn.zrange name, range_start, range_end, with_scores: true
531
- end
714
+ range_end = range_start + page_size - 1
715
+ elements = Sidekiq.redis { |conn|
716
+ conn.zrange name, range_start, range_end, "withscores"
717
+ }
532
718
  break if elements.empty?
533
719
  page -= 1
534
- elements.each do |element, score|
720
+ elements.reverse_each do |element, score|
535
721
  yield SortedEntry.new(self, score, element)
536
722
  end
537
723
  offset_size = initial_size - @_size
538
724
  end
539
725
  end
540
726
 
727
+ ##
728
+ # Fetch jobs that match a given time or Range. Job ID is an
729
+ # optional second argument.
730
+ #
731
+ # @param score [Time,Range] a specific timestamp or range
732
+ # @param jid [String, optional] find a specific JID within the score
733
+ # @return [Array<SortedEntry>] any results found, can be empty
541
734
  def fetch(score, jid = nil)
542
- elements = Sidekiq.redis do |conn|
543
- conn.zrangebyscore(name, score, score)
544
- end
545
-
546
- elements.inject([]) do |result, element|
547
- entry = SortedEntry.new(self, score, element)
548
- if jid
549
- result << entry if entry.jid == jid
735
+ begin_score, end_score =
736
+ if score.is_a?(Range)
737
+ [score.first, score.last]
550
738
  else
551
- result << entry
739
+ [score, score]
552
740
  end
553
- result
741
+
742
+ elements = Sidekiq.redis { |conn|
743
+ conn.zrange(name, begin_score, end_score, "BYSCORE", "withscores")
744
+ }
745
+
746
+ elements.each_with_object([]) do |element, result|
747
+ data, job_score = element
748
+ entry = SortedEntry.new(self, job_score, data)
749
+ result << entry if jid.nil? || entry.jid == jid
554
750
  end
555
751
  end
556
752
 
557
753
  ##
558
754
  # Find the job with the given JID within this sorted set.
755
+ # *This is a slow O(n) operation*. Do not use for app logic.
559
756
  #
560
- # This is a slow, inefficient operation. Do not use under
561
- # normal conditions. Sidekiq Pro contains a faster version.
757
+ # @param jid [String] the job identifier
758
+ # @return [SortedEntry] the record or nil
562
759
  def find_job(jid)
563
- self.detect { |j| j.jid == jid }
760
+ Sidekiq.redis do |conn|
761
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
762
+ job = Sidekiq.load_json(entry)
763
+ matched = job["jid"] == jid
764
+ return SortedEntry.new(self, score, entry) if matched
765
+ end
766
+ end
767
+ nil
564
768
  end
565
769
 
770
+ # :nodoc:
771
+ # @api private
566
772
  def delete_by_value(name, value)
567
773
  Sidekiq.redis do |conn|
568
774
  ret = conn.zrem(name, value)
@@ -571,17 +777,20 @@ module Sidekiq
571
777
  end
572
778
  end
573
779
 
780
+ # :nodoc:
781
+ # @api private
574
782
  def delete_by_jid(score, jid)
575
783
  Sidekiq.redis do |conn|
576
- elements = conn.zrangebyscore(name, score, score)
784
+ elements = conn.zrange(name, score, score, "BYSCORE")
577
785
  elements.each do |element|
578
- message = Sidekiq.load_json(element)
579
- if message["jid"] == jid
580
- ret = conn.zrem(name, element)
581
- @_size -= 1 if ret
582
- break ret
786
+ if element.index(jid)
787
+ message = Sidekiq.load_json(element)
788
+ if message["jid"] == jid
789
+ ret = conn.zrem(name, element)
790
+ @_size -= 1 if ret
791
+ break ret
792
+ end
583
793
  end
584
- false
585
794
  end
586
795
  end
587
796
  end
@@ -590,136 +799,202 @@ module Sidekiq
590
799
  end
591
800
 
592
801
  ##
593
- # Allows enumeration of scheduled jobs within Sidekiq.
594
- # Based on this, you can search/filter for jobs. Here's an
595
- # example where I'm selecting all jobs of a certain type
596
- # and deleting them from the schedule queue.
802
+ # The set of scheduled jobs within Sidekiq.
803
+ # See the API wiki page for usage notes and examples.
597
804
  #
598
- # r = Sidekiq::ScheduledSet.new
599
- # r.select do |scheduled|
600
- # scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
601
- # scheduled.args[0] == 'User' &&
602
- # scheduled.args[1] == 'setup_new_subscriber'
603
- # end.map(&:delete)
604
805
  class ScheduledSet < JobSet
605
806
  def initialize
606
- super 'schedule'
807
+ super("schedule")
607
808
  end
608
809
  end
609
810
 
610
811
  ##
611
- # Allows enumeration of retries within Sidekiq.
612
- # Based on this, you can search/filter for jobs. Here's an
613
- # example where I'm selecting all jobs of a certain type
614
- # and deleting them from the retry queue.
812
+ # The set of retries within Sidekiq.
813
+ # See the API wiki page for usage notes and examples.
615
814
  #
616
- # r = Sidekiq::RetrySet.new
617
- # r.select do |retri|
618
- # retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
619
- # retri.args[0] == 'User' &&
620
- # retri.args[1] == 'setup_new_subscriber'
621
- # end.map(&:delete)
622
815
  class RetrySet < JobSet
623
816
  def initialize
624
- super 'retry'
625
- end
626
-
627
- def retry_all
628
- while size > 0
629
- each(&:retry)
630
- end
817
+ super("retry")
631
818
  end
632
819
  end
633
820
 
634
821
  ##
635
- # Allows enumeration of dead jobs within Sidekiq.
822
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
823
+ # their retries and are helding in this set pending some sort of manual
824
+ # fix. They will be removed after 6 months (dead_timeout) if not.
636
825
  #
637
826
  class DeadSet < JobSet
638
827
  def initialize
639
- super 'dead'
828
+ super("dead")
640
829
  end
641
830
 
642
- def retry_all
643
- while size > 0
644
- each(&:retry)
831
+ # Trim dead jobs which are over our storage limits
832
+ def trim
833
+ hash = Sidekiq.default_configuration
834
+ now = Time.now.to_f
835
+ Sidekiq.redis do |conn|
836
+ conn.multi do |transaction|
837
+ transaction.zremrangebyscore(name, "-inf", now - hash[:dead_timeout_in_seconds])
838
+ transaction.zremrangebyrank(name, 0, - hash[:dead_max_jobs])
839
+ end
645
840
  end
646
841
  end
647
842
 
648
- def self.max_jobs
649
- Sidekiq.options[:dead_max_jobs]
650
- end
843
+ # Add the given job to the Dead set.
844
+ # @param message [String] the job data as JSON
845
+ # @option opts [Boolean] :notify_failure (true) Whether death handlers should be called
846
+ # @option opts [Boolean] :trim (true) Whether Sidekiq should trim the structure to keep it within configuration
847
+ # @option opts [Exception] :ex (RuntimeError) An exception to pass to the death handlers
848
+ def kill(message, opts = {})
849
+ now = Time.now.to_f
850
+ Sidekiq.redis do |conn|
851
+ conn.zadd(name, now.to_s, message)
852
+ end
651
853
 
652
- def self.timeout
653
- Sidekiq.options[:dead_timeout_in_seconds]
854
+ trim if opts[:trim] != false
855
+
856
+ if opts[:notify_failure] != false
857
+ job = Sidekiq.load_json(message)
858
+ if opts[:ex]
859
+ ex = opts[:ex]
860
+ else
861
+ ex = RuntimeError.new("Job killed by API")
862
+ ex.set_backtrace(caller)
863
+ end
864
+ Sidekiq.default_configuration.death_handlers.each do |handle|
865
+ handle.call(job, ex)
866
+ end
867
+ end
868
+ true
654
869
  end
655
870
  end
656
871
 
657
872
  ##
658
873
  # Enumerates the set of Sidekiq processes which are actively working
659
- # right now. Each process send a heartbeat to Redis every 5 seconds
874
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
660
875
  # so this set should be relatively accurate, barring network partitions.
661
876
  #
662
- # Yields a Sidekiq::Process.
877
+ # @yieldparam [Sidekiq::Process]
663
878
  #
664
879
  class ProcessSet
665
880
  include Enumerable
666
881
 
667
- def initialize(clean_plz=true)
668
- self.class.cleanup if clean_plz
882
+ def self.[](identity)
883
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
884
+ conn.multi { |transaction|
885
+ transaction.sismember("processes", identity)
886
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
887
+ }
888
+ }
889
+
890
+ return nil if exists == 0 || info.nil?
891
+
892
+ hash = Sidekiq.load_json(info)
893
+ Process.new(hash.merge("busy" => busy.to_i,
894
+ "beat" => beat.to_f,
895
+ "quiet" => quiet,
896
+ "rss" => rss.to_i,
897
+ "rtt_us" => rtt_us.to_i))
898
+ end
899
+
900
+ # :nodoc:
901
+ # @api private
902
+ def initialize(clean_plz = true)
903
+ cleanup if clean_plz
669
904
  end
670
905
 
671
906
  # Cleans up dead processes recorded in Redis.
672
907
  # Returns the number of processes cleaned.
673
- def self.cleanup
908
+ # :nodoc:
909
+ # @api private
910
+ def cleanup
911
+ # dont run cleanup more than once per minute
912
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", "NX", "EX", "60") }
913
+
674
914
  count = 0
675
915
  Sidekiq.redis do |conn|
676
- procs = conn.smembers('processes').sort
677
- heartbeats = conn.pipelined do
916
+ procs = conn.sscan("processes").to_a
917
+ heartbeats = conn.pipelined { |pipeline|
678
918
  procs.each do |key|
679
- conn.hget(key, 'info')
919
+ pipeline.hget(key, "info")
680
920
  end
681
- end
921
+ }
682
922
 
683
923
  # the hash named key has an expiry of 60 seconds.
684
924
  # if it's not found, that means the process has not reported
685
925
  # in to Redis and probably died.
686
- to_prune = []
687
- heartbeats.each_with_index do |beat, i|
688
- to_prune << procs[i] if beat.nil?
689
- end
690
- count = conn.srem('processes', to_prune) unless to_prune.empty?
926
+ to_prune = procs.select.with_index { |proc, i|
927
+ heartbeats[i].nil?
928
+ }
929
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
691
930
  end
692
931
  count
693
932
  end
694
933
 
695
934
  def each
696
- procs = Sidekiq.redis { |conn| conn.smembers('processes') }.sort
935
+ result = Sidekiq.redis { |conn|
936
+ procs = conn.sscan("processes").to_a.sort
697
937
 
698
- Sidekiq.redis do |conn|
699
938
  # We're making a tradeoff here between consuming more memory instead of
700
939
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
701
940
  # you'll be happier this way
702
- result = conn.pipelined do
941
+ conn.pipelined do |pipeline|
703
942
  procs.each do |key|
704
- conn.hmget(key, 'info', 'busy', 'beat', 'quiet')
943
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
705
944
  end
706
945
  end
946
+ }
707
947
 
708
- result.each do |info, busy, at_s, quiet|
709
- hash = Sidekiq.load_json(info)
710
- yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
711
- end
948
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
949
+ # If a process is stopped between when we query Redis for `procs` and
950
+ # when we query for `result`, we will have an item in `result` that is
951
+ # composed of `nil` values.
952
+ next if info.nil?
953
+
954
+ hash = Sidekiq.load_json(info)
955
+ yield Process.new(hash.merge("busy" => busy.to_i,
956
+ "beat" => beat.to_f,
957
+ "quiet" => quiet,
958
+ "rss" => rss.to_i,
959
+ "rtt_us" => rtt_us.to_i))
712
960
  end
713
-
714
- nil
715
961
  end
716
962
 
717
963
  # This method is not guaranteed accurate since it does not prune the set
718
964
  # based on current heartbeat. #each does that and ensures the set only
719
965
  # contains Sidekiq processes which have sent a heartbeat within the last
720
966
  # 60 seconds.
967
+ # @return [Integer] current number of registered Sidekiq processes
721
968
  def size
722
- Sidekiq.redis { |conn| conn.scard('processes') }
969
+ Sidekiq.redis { |conn| conn.scard("processes") }
970
+ end
971
+
972
+ # Total number of threads available to execute jobs.
973
+ # For Sidekiq Enterprise customers this number (in production) must be
974
+ # less than or equal to your licensed concurrency.
975
+ # @return [Integer] the sum of process concurrency
976
+ def total_concurrency
977
+ sum { |x| x["concurrency"].to_i }
978
+ end
979
+
980
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
981
+ def total_rss_in_kb
982
+ sum { |x| x["rss"].to_i }
983
+ end
984
+ alias_method :total_rss, :total_rss_in_kb
985
+
986
+ # Returns the identity of the current cluster leader or "" if no leader.
987
+ # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
988
+ # or Sidekiq Pro.
989
+ # @return [String] Identity of cluster leader
990
+ # @return [String] empty string if no leader
991
+ def leader
992
+ @leader ||= begin
993
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
994
+ # need a non-falsy value so we can memoize
995
+ x ||= ""
996
+ x
997
+ end
723
998
  end
724
999
  end
725
1000
 
@@ -737,38 +1012,78 @@ module Sidekiq
737
1012
  # 'busy' => 10,
738
1013
  # 'beat' => <last heartbeat>,
739
1014
  # 'identity' => <unique string identifying the process>,
1015
+ # 'embedded' => true,
740
1016
  # }
741
1017
  class Process
1018
+ # :nodoc:
1019
+ # @api private
742
1020
  def initialize(hash)
743
1021
  @attribs = hash
744
1022
  end
745
1023
 
746
1024
  def tag
747
- self['tag']
1025
+ self["tag"]
748
1026
  end
749
1027
 
750
1028
  def labels
751
- Array(self['labels'])
1029
+ self["labels"].to_a
752
1030
  end
753
1031
 
754
1032
  def [](key)
755
1033
  @attribs[key]
756
1034
  end
757
1035
 
1036
+ def identity
1037
+ self["identity"]
1038
+ end
1039
+
1040
+ def queues
1041
+ self["queues"]
1042
+ end
1043
+
1044
+ def weights
1045
+ self["weights"]
1046
+ end
1047
+
1048
+ def version
1049
+ self["version"]
1050
+ end
1051
+
1052
+ def embedded?
1053
+ self["embedded"]
1054
+ end
1055
+
1056
+ # Signal this process to stop processing new jobs.
1057
+ # It will continue to execute jobs it has already fetched.
1058
+ # This method is *asynchronous* and it can take 5-10
1059
+ # seconds for the process to quiet.
758
1060
  def quiet!
759
- signal('USR1')
1061
+ raise "Can't quiet an embedded process" if embedded?
1062
+
1063
+ signal("TSTP")
760
1064
  end
761
1065
 
1066
+ # Signal this process to shutdown.
1067
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1068
+ # This method is *asynchronous* and it can take 5-10
1069
+ # seconds for the process to start shutting down.
762
1070
  def stop!
763
- signal('TERM')
1071
+ raise "Can't stop an embedded process" if embedded?
1072
+
1073
+ signal("TERM")
764
1074
  end
765
1075
 
1076
+ # Signal this process to log backtraces for all threads.
1077
+ # Useful if you have a frozen or deadlocked process which is
1078
+ # still sending a heartbeat.
1079
+ # This method is *asynchronous* and it can take 5-10 seconds.
766
1080
  def dump_threads
767
- signal('TTIN')
1081
+ signal("TTIN")
768
1082
  end
769
1083
 
1084
+ # @return [Boolean] true if this process is quiet or shutting down
770
1085
  def stopping?
771
- self['quiet'] == 'true'
1086
+ self["quiet"] == "true"
772
1087
  end
773
1088
 
774
1089
  private
@@ -776,21 +1091,17 @@ module Sidekiq
776
1091
  def signal(sig)
777
1092
  key = "#{identity}-signals"
778
1093
  Sidekiq.redis do |c|
779
- c.multi do
780
- c.lpush(key, sig)
781
- c.expire(key, 60)
1094
+ c.multi do |transaction|
1095
+ transaction.lpush(key, sig)
1096
+ transaction.expire(key, 60)
782
1097
  end
783
1098
  end
784
1099
  end
785
-
786
- def identity
787
- self['identity']
788
- end
789
1100
  end
790
1101
 
791
1102
  ##
792
- # A worker is a thread that is currently processing a job.
793
- # Programmatic access to the current active worker set.
1103
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1104
+ # It tracks the process and thread working on each job.
794
1105
  #
795
1106
  # WARNING WARNING WARNING
796
1107
  #
@@ -798,33 +1109,40 @@ module Sidekiq
798
1109
  # If you call #size => 5 and then expect #each to be
799
1110
  # called 5 times, you're going to have a bad time.
800
1111
  #
801
- # workers = Sidekiq::Workers.new
802
- # workers.size => 2
803
- # workers.each do |process_id, thread_id, work|
1112
+ # works = Sidekiq::WorkSet.new
1113
+ # works.size => 2
1114
+ # works.each do |process_id, thread_id, work|
804
1115
  # # process_id is a unique identifier per Sidekiq process
805
1116
  # # thread_id is a unique identifier per thread
806
1117
  # # work is a Hash which looks like:
807
- # # { 'queue' => name, 'run_at' => timestamp, 'payload' => msg }
1118
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
808
1119
  # # run_at is an epoch Integer.
809
1120
  # end
810
1121
  #
811
- class Workers
1122
+ class WorkSet
812
1123
  include Enumerable
813
1124
 
814
- def each
1125
+ def each(&block)
1126
+ results = []
1127
+ procs = nil
1128
+ all_works = nil
1129
+
815
1130
  Sidekiq.redis do |conn|
816
- procs = conn.smembers('processes')
817
- procs.sort.each do |key|
818
- valid, workers = conn.pipelined do
819
- conn.exists(key)
820
- conn.hgetall("#{key}:workers")
821
- end
822
- next unless valid
823
- workers.each_pair do |tid, json|
824
- yield key, tid, Sidekiq.load_json(json)
1131
+ procs = conn.sscan("processes").to_a.sort
1132
+ all_works = conn.pipelined do |pipeline|
1133
+ procs.each do |key|
1134
+ pipeline.hgetall("#{key}:work")
825
1135
  end
826
1136
  end
827
1137
  end
1138
+
1139
+ procs.zip(all_works).each do |key, workers|
1140
+ workers.each_pair do |tid, json|
1141
+ results << [key, tid, Sidekiq::Work.new(key, tid, Sidekiq.load_json(json))] unless json.empty?
1142
+ end
1143
+ end
1144
+
1145
+ results.sort_by { |(_, _, hsh)| hsh.raw("run_at") }.each(&block)
828
1146
  end
829
1147
 
830
1148
  # Note that #size is only as accurate as Sidekiq's heartbeat,
@@ -835,18 +1153,88 @@ module Sidekiq
835
1153
  # which can easily get out of sync with crashy processes.
836
1154
  def size
837
1155
  Sidekiq.redis do |conn|
838
- procs = conn.smembers('processes')
1156
+ procs = conn.sscan("processes").to_a
839
1157
  if procs.empty?
840
1158
  0
841
1159
  else
842
- conn.pipelined do
1160
+ conn.pipelined { |pipeline|
843
1161
  procs.each do |key|
844
- conn.hget(key, 'busy')
1162
+ pipeline.hget(key, "busy")
845
1163
  end
846
- end.map(&:to_i).inject(:+)
1164
+ }.sum(&:to_i)
847
1165
  end
848
1166
  end
849
1167
  end
1168
+
1169
+ ##
1170
+ # Find the work which represents a job with the given JID.
1171
+ # *This is a slow O(n) operation*. Do not use for app logic.
1172
+ #
1173
+ # @param jid [String] the job identifier
1174
+ # @return [Sidekiq::Work] the work or nil
1175
+ def find_work_by_jid(jid)
1176
+ each do |_process_id, _thread_id, work|
1177
+ job = work.job
1178
+ return work if job.jid == jid
1179
+ end
1180
+ nil
1181
+ end
1182
+ end
1183
+
1184
+ # Sidekiq::Work represents a job which is currently executing.
1185
+ class Work
1186
+ attr_reader :process_id
1187
+ attr_reader :thread_id
1188
+
1189
+ def initialize(pid, tid, hsh)
1190
+ @process_id = pid
1191
+ @thread_id = tid
1192
+ @hsh = hsh
1193
+ @job = nil
1194
+ end
1195
+
1196
+ def queue
1197
+ @hsh["queue"]
1198
+ end
1199
+
1200
+ def run_at
1201
+ Time.at(@hsh["run_at"])
1202
+ end
1203
+
1204
+ def job
1205
+ @job ||= Sidekiq::JobRecord.new(@hsh["payload"])
1206
+ end
1207
+
1208
+ def payload
1209
+ @hsh["payload"]
1210
+ end
1211
+
1212
+ # deprecated
1213
+ def [](key)
1214
+ kwargs = {uplevel: 1}
1215
+ kwargs[:category] = :deprecated if RUBY_VERSION > "3.0" # TODO
1216
+ warn("Direct access to `Sidekiq::Work` attributes is deprecated, please use `#payload`, `#queue`, `#run_at` or `#job` instead", **kwargs)
1217
+
1218
+ @hsh[key]
1219
+ end
1220
+
1221
+ # :nodoc:
1222
+ # @api private
1223
+ def raw(name)
1224
+ @hsh[name]
1225
+ end
1226
+
1227
+ def method_missing(*all)
1228
+ @hsh.send(*all)
1229
+ end
1230
+
1231
+ def respond_to_missing?(name, *args)
1232
+ @hsh.respond_to?(name)
1233
+ end
850
1234
  end
851
1235
 
1236
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1237
+ # Is "worker" a process, a type of job, a thread? Undefined!
1238
+ # WorkSet better describes the data.
1239
+ Workers = WorkSet
852
1240
  end