sidekiq 6.4.1 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sidekiq might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/Changes.md +107 -5
- data/README.md +14 -13
- data/bin/sidekiq +3 -8
- data/bin/sidekiqload +26 -29
- data/lib/sidekiq/api.rb +232 -157
- data/lib/sidekiq/capsule.rb +110 -0
- data/lib/sidekiq/cli.rb +80 -86
- data/lib/sidekiq/client.rb +54 -42
- data/lib/sidekiq/component.rb +66 -0
- data/lib/sidekiq/config.rb +271 -0
- data/lib/sidekiq/deploy.rb +62 -0
- data/lib/sidekiq/embedded.rb +61 -0
- data/lib/sidekiq/fetch.rb +20 -19
- data/lib/sidekiq/job.rb +375 -10
- data/lib/sidekiq/job_logger.rb +1 -1
- data/lib/sidekiq/job_retry.rb +74 -53
- data/lib/sidekiq/job_util.rb +17 -11
- data/lib/sidekiq/launcher.rb +63 -69
- data/lib/sidekiq/logger.rb +6 -45
- data/lib/sidekiq/manager.rb +33 -32
- data/lib/sidekiq/metrics/query.rb +153 -0
- data/lib/sidekiq/metrics/shared.rb +95 -0
- data/lib/sidekiq/metrics/tracking.rb +134 -0
- data/lib/sidekiq/middleware/chain.rb +84 -42
- data/lib/sidekiq/middleware/current_attributes.rb +18 -17
- data/lib/sidekiq/middleware/i18n.rb +6 -4
- data/lib/sidekiq/middleware/modules.rb +21 -0
- data/lib/sidekiq/monitor.rb +1 -1
- data/lib/sidekiq/paginator.rb +10 -2
- data/lib/sidekiq/processor.rb +56 -59
- data/lib/sidekiq/rails.rb +10 -9
- data/lib/sidekiq/redis_client_adapter.rb +118 -0
- data/lib/sidekiq/redis_connection.rb +13 -82
- data/lib/sidekiq/ring_buffer.rb +29 -0
- data/lib/sidekiq/scheduled.rb +65 -37
- data/lib/sidekiq/testing/inline.rb +4 -4
- data/lib/sidekiq/testing.rb +41 -68
- data/lib/sidekiq/transaction_aware_client.rb +44 -0
- data/lib/sidekiq/version.rb +2 -1
- data/lib/sidekiq/web/action.rb +3 -3
- data/lib/sidekiq/web/application.rb +22 -6
- data/lib/sidekiq/web/csrf_protection.rb +3 -3
- data/lib/sidekiq/web/helpers.rb +21 -19
- data/lib/sidekiq/web.rb +3 -14
- data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
- data/lib/sidekiq.rb +84 -207
- data/sidekiq.gemspec +29 -5
- data/web/assets/javascripts/application.js +58 -26
- data/web/assets/javascripts/base-charts.js +106 -0
- data/web/assets/javascripts/chart.min.js +13 -0
- data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
- data/web/assets/javascripts/dashboard-charts.js +166 -0
- data/web/assets/javascripts/dashboard.js +3 -240
- data/web/assets/javascripts/metrics.js +236 -0
- data/web/assets/stylesheets/application-rtl.css +2 -91
- data/web/assets/stylesheets/application.css +64 -297
- data/web/locales/ar.yml +70 -70
- data/web/locales/cs.yml +62 -62
- data/web/locales/da.yml +52 -52
- data/web/locales/de.yml +65 -65
- data/web/locales/el.yml +43 -24
- data/web/locales/en.yml +82 -69
- data/web/locales/es.yml +68 -68
- data/web/locales/fa.yml +65 -65
- data/web/locales/fr.yml +67 -67
- data/web/locales/he.yml +65 -64
- data/web/locales/hi.yml +59 -59
- data/web/locales/it.yml +53 -53
- data/web/locales/ja.yml +71 -68
- data/web/locales/ko.yml +52 -52
- data/web/locales/lt.yml +66 -66
- data/web/locales/nb.yml +61 -61
- data/web/locales/nl.yml +52 -52
- data/web/locales/pl.yml +45 -45
- data/web/locales/pt-br.yml +63 -55
- data/web/locales/pt.yml +51 -51
- data/web/locales/ru.yml +67 -66
- data/web/locales/sv.yml +53 -53
- data/web/locales/ta.yml +60 -60
- data/web/locales/uk.yml +62 -61
- data/web/locales/ur.yml +64 -64
- data/web/locales/vi.yml +67 -67
- data/web/locales/zh-cn.yml +37 -11
- data/web/locales/zh-tw.yml +42 -8
- data/web/views/_footer.erb +5 -2
- data/web/views/_nav.erb +1 -1
- data/web/views/_summary.erb +1 -1
- data/web/views/busy.erb +9 -4
- data/web/views/dashboard.erb +36 -4
- data/web/views/metrics.erb +80 -0
- data/web/views/metrics_for_job.erb +69 -0
- data/web/views/queue.erb +5 -1
- metadata +69 -22
- data/lib/sidekiq/delay.rb +0 -43
- data/lib/sidekiq/exception_handler.rb +0 -27
- data/lib/sidekiq/extensions/action_mailer.rb +0 -48
- data/lib/sidekiq/extensions/active_record.rb +0 -43
- data/lib/sidekiq/extensions/class_methods.rb +0 -43
- data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
- data/lib/sidekiq/util.rb +0 -108
- data/lib/sidekiq/worker.rb +0 -362
- /data/{LICENSE → LICENSE.txt} +0 -0
data/lib/sidekiq/api.rb
CHANGED
@@ -3,9 +3,28 @@
|
|
3
3
|
require "sidekiq"
|
4
4
|
|
5
5
|
require "zlib"
|
6
|
+
require "set"
|
6
7
|
require "base64"
|
7
8
|
|
9
|
+
require "sidekiq/metrics/query"
|
10
|
+
|
11
|
+
#
|
12
|
+
# Sidekiq's Data API provides a Ruby object model on top
|
13
|
+
# of Sidekiq's runtime data in Redis. This API should never
|
14
|
+
# be used within application code for business logic.
|
15
|
+
#
|
16
|
+
# The Sidekiq server process never uses this API: all data
|
17
|
+
# manipulation is done directly for performance reasons to
|
18
|
+
# ensure we are using Redis as efficiently as possible at
|
19
|
+
# every callsite.
|
20
|
+
#
|
21
|
+
|
8
22
|
module Sidekiq
|
23
|
+
# Retrieve runtime statistics from Redis regarding
|
24
|
+
# this Sidekiq cluster.
|
25
|
+
#
|
26
|
+
# stat = Sidekiq::Stats.new
|
27
|
+
# stat.processed
|
9
28
|
class Stats
|
10
29
|
def initialize
|
11
30
|
fetch_stats_fast!
|
@@ -48,10 +67,22 @@ module Sidekiq
|
|
48
67
|
end
|
49
68
|
|
50
69
|
def queues
|
51
|
-
Sidekiq
|
70
|
+
Sidekiq.redis do |conn|
|
71
|
+
queues = conn.sscan("queues").to_a
|
72
|
+
|
73
|
+
lengths = conn.pipelined { |pipeline|
|
74
|
+
queues.each do |queue|
|
75
|
+
pipeline.llen("queue:#{queue}")
|
76
|
+
end
|
77
|
+
}
|
78
|
+
|
79
|
+
array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
|
80
|
+
array_of_arrays.to_h
|
81
|
+
end
|
52
82
|
end
|
53
83
|
|
54
84
|
# O(1) redis calls
|
85
|
+
# @api private
|
55
86
|
def fetch_stats_fast!
|
56
87
|
pipe1_res = Sidekiq.redis { |conn|
|
57
88
|
conn.pipelined do |pipeline|
|
@@ -91,13 +122,14 @@ module Sidekiq
|
|
91
122
|
end
|
92
123
|
|
93
124
|
# O(number of processes + number of queues) redis calls
|
125
|
+
# @api private
|
94
126
|
def fetch_stats_slow!
|
95
127
|
processes = Sidekiq.redis { |conn|
|
96
|
-
conn.
|
128
|
+
conn.sscan("processes").to_a
|
97
129
|
}
|
98
130
|
|
99
131
|
queues = Sidekiq.redis { |conn|
|
100
|
-
conn.
|
132
|
+
conn.sscan("queues").to_a
|
101
133
|
}
|
102
134
|
|
103
135
|
pipe2_res = Sidekiq.redis { |conn|
|
@@ -109,18 +141,20 @@ module Sidekiq
|
|
109
141
|
|
110
142
|
s = processes.size
|
111
143
|
workers_size = pipe2_res[0...s].sum(&:to_i)
|
112
|
-
enqueued = pipe2_res[s
|
144
|
+
enqueued = pipe2_res[s..].sum(&:to_i)
|
113
145
|
|
114
146
|
@stats[:workers_size] = workers_size
|
115
147
|
@stats[:enqueued] = enqueued
|
116
148
|
@stats
|
117
149
|
end
|
118
150
|
|
151
|
+
# @api private
|
119
152
|
def fetch_stats!
|
120
153
|
fetch_stats_fast!
|
121
154
|
fetch_stats_slow!
|
122
155
|
end
|
123
156
|
|
157
|
+
# @api private
|
124
158
|
def reset(*stats)
|
125
159
|
all = %w[failed processed]
|
126
160
|
stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
|
@@ -142,25 +176,8 @@ module Sidekiq
|
|
142
176
|
@stats[s] || raise(ArgumentError, "Unknown stat #{s}")
|
143
177
|
end
|
144
178
|
|
145
|
-
class Queues
|
146
|
-
def lengths
|
147
|
-
Sidekiq.redis do |conn|
|
148
|
-
queues = conn.sscan_each("queues").to_a
|
149
|
-
|
150
|
-
lengths = conn.pipelined { |pipeline|
|
151
|
-
queues.each do |queue|
|
152
|
-
pipeline.llen("queue:#{queue}")
|
153
|
-
end
|
154
|
-
}
|
155
|
-
|
156
|
-
array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
|
157
|
-
array_of_arrays.to_h
|
158
|
-
end
|
159
|
-
end
|
160
|
-
end
|
161
|
-
|
162
179
|
class History
|
163
|
-
def initialize(days_previous, start_date = nil)
|
180
|
+
def initialize(days_previous, start_date = nil, pool: nil)
|
164
181
|
# we only store five years of data in Redis
|
165
182
|
raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
|
166
183
|
@days_previous = days_previous
|
@@ -185,15 +202,10 @@ module Sidekiq
|
|
185
202
|
|
186
203
|
keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
|
187
204
|
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
stat_hash[dates[idx]] = value ? value.to_i : 0
|
192
|
-
end
|
205
|
+
Sidekiq.redis do |conn|
|
206
|
+
conn.mget(keys).each_with_index do |value, idx|
|
207
|
+
stat_hash[dates[idx]] = value ? value.to_i : 0
|
193
208
|
end
|
194
|
-
rescue Redis::CommandError
|
195
|
-
# mget will trigger a CROSSSLOT error when run against a Cluster
|
196
|
-
# TODO Someone want to add Cluster support?
|
197
209
|
end
|
198
210
|
|
199
211
|
stat_hash
|
@@ -202,9 +214,10 @@ module Sidekiq
|
|
202
214
|
end
|
203
215
|
|
204
216
|
##
|
205
|
-
#
|
217
|
+
# Represents a queue within Sidekiq.
|
206
218
|
# Allows enumeration of all jobs within the queue
|
207
|
-
# and deletion of jobs.
|
219
|
+
# and deletion of jobs. NB: this queue data is real-time
|
220
|
+
# and is changing within Redis moment by moment.
|
208
221
|
#
|
209
222
|
# queue = Sidekiq::Queue.new("mailer")
|
210
223
|
# queue.each do |job|
|
@@ -212,29 +225,34 @@ module Sidekiq
|
|
212
225
|
# job.args # => [1, 2, 3]
|
213
226
|
# job.delete if job.jid == 'abcdef1234567890'
|
214
227
|
# end
|
215
|
-
#
|
216
228
|
class Queue
|
217
229
|
include Enumerable
|
218
230
|
|
219
231
|
##
|
220
|
-
#
|
232
|
+
# Fetch all known queues within Redis.
|
221
233
|
#
|
234
|
+
# @return [Array<Sidekiq::Queue>]
|
222
235
|
def self.all
|
223
|
-
Sidekiq.redis { |c| c.
|
236
|
+
Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
|
224
237
|
end
|
225
238
|
|
226
239
|
attr_reader :name
|
227
240
|
|
241
|
+
# @param name [String] the name of the queue
|
228
242
|
def initialize(name = "default")
|
229
243
|
@name = name.to_s
|
230
244
|
@rname = "queue:#{name}"
|
231
245
|
end
|
232
246
|
|
247
|
+
# The current size of the queue within Redis.
|
248
|
+
# This value is real-time and can change between calls.
|
249
|
+
#
|
250
|
+
# @return [Integer] the size
|
233
251
|
def size
|
234
252
|
Sidekiq.redis { |con| con.llen(@rname) }
|
235
253
|
end
|
236
254
|
|
237
|
-
#
|
255
|
+
# @return [Boolean] if the queue is currently paused
|
238
256
|
def paused?
|
239
257
|
false
|
240
258
|
end
|
@@ -243,7 +261,7 @@ module Sidekiq
|
|
243
261
|
# Calculates this queue's latency, the difference in seconds since the oldest
|
244
262
|
# job in the queue was enqueued.
|
245
263
|
#
|
246
|
-
# @return Float
|
264
|
+
# @return [Float] in seconds
|
247
265
|
def latency
|
248
266
|
entry = Sidekiq.redis { |conn|
|
249
267
|
conn.lrange(@rname, -1, -1)
|
@@ -279,34 +297,54 @@ module Sidekiq
|
|
279
297
|
##
|
280
298
|
# Find the job with the given JID within this queue.
|
281
299
|
#
|
282
|
-
# This is a slow, inefficient operation. Do not use under
|
300
|
+
# This is a *slow, inefficient* operation. Do not use under
|
283
301
|
# normal conditions.
|
302
|
+
#
|
303
|
+
# @param jid [String] the job_id to look for
|
304
|
+
# @return [Sidekiq::JobRecord]
|
305
|
+
# @return [nil] if not found
|
284
306
|
def find_job(jid)
|
285
307
|
detect { |j| j.jid == jid }
|
286
308
|
end
|
287
309
|
|
310
|
+
# delete all jobs within this queue
|
311
|
+
# @return [Boolean] true
|
288
312
|
def clear
|
289
313
|
Sidekiq.redis do |conn|
|
290
314
|
conn.multi do |transaction|
|
291
315
|
transaction.unlink(@rname)
|
292
|
-
transaction.srem("queues", name)
|
316
|
+
transaction.srem("queues", [name])
|
293
317
|
end
|
294
318
|
end
|
319
|
+
true
|
295
320
|
end
|
296
321
|
alias_method :💣, :clear
|
322
|
+
|
323
|
+
# :nodoc:
|
324
|
+
# @api private
|
325
|
+
def as_json(options = nil)
|
326
|
+
{name: name} # 5336
|
327
|
+
end
|
297
328
|
end
|
298
329
|
|
299
330
|
##
|
300
|
-
#
|
301
|
-
# sorted set.
|
331
|
+
# Represents a pending job within a Sidekiq queue.
|
302
332
|
#
|
303
333
|
# The job should be considered immutable but may be
|
304
334
|
# removed from the queue via JobRecord#delete.
|
305
|
-
#
|
306
335
|
class JobRecord
|
336
|
+
# the parsed Hash of job data
|
337
|
+
# @!attribute [r] Item
|
307
338
|
attr_reader :item
|
339
|
+
# the underlying String in Redis
|
340
|
+
# @!attribute [r] Value
|
308
341
|
attr_reader :value
|
342
|
+
# the queue associated with this job
|
343
|
+
# @!attribute [r] Queue
|
344
|
+
attr_reader :queue
|
309
345
|
|
346
|
+
# :nodoc:
|
347
|
+
# @api private
|
310
348
|
def initialize(item, queue_name = nil)
|
311
349
|
@args = nil
|
312
350
|
@value = item
|
@@ -314,6 +352,8 @@ module Sidekiq
|
|
314
352
|
@queue = queue_name || @item["queue"]
|
315
353
|
end
|
316
354
|
|
355
|
+
# :nodoc:
|
356
|
+
# @api private
|
317
357
|
def parse(item)
|
318
358
|
Sidekiq.load_json(item)
|
319
359
|
rescue JSON::ParserError
|
@@ -325,6 +365,8 @@ module Sidekiq
|
|
325
365
|
{}
|
326
366
|
end
|
327
367
|
|
368
|
+
# This is the job class which Sidekiq will execute. If using ActiveJob,
|
369
|
+
# this class will be the ActiveJob adapter class rather than a specific job.
|
328
370
|
def klass
|
329
371
|
self["class"]
|
330
372
|
end
|
@@ -332,12 +374,7 @@ module Sidekiq
|
|
332
374
|
def display_class
|
333
375
|
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
|
334
376
|
@klass ||= self["display_class"] || begin
|
335
|
-
|
336
|
-
when /\ASidekiq::Extensions::Delayed/
|
337
|
-
safe_load(args[0], klass) do |target, method, _|
|
338
|
-
"#{target}.#{method}"
|
339
|
-
end
|
340
|
-
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
377
|
+
if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
341
378
|
job_class = @item["wrapped"] || args[0]
|
342
379
|
if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
|
343
380
|
# MailerClass#mailer_method
|
@@ -353,32 +390,23 @@ module Sidekiq
|
|
353
390
|
|
354
391
|
def display_args
|
355
392
|
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
|
356
|
-
@display_args ||=
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
else
|
374
|
-
job_args
|
375
|
-
end
|
376
|
-
else
|
377
|
-
if self["encrypt"]
|
378
|
-
# no point in showing 150+ bytes of random garbage
|
379
|
-
args[-1] = "[encrypted data]"
|
380
|
-
end
|
381
|
-
args
|
393
|
+
@display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
394
|
+
job_args = self["wrapped"] ? args[0]["arguments"] : []
|
395
|
+
if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
|
396
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
397
|
+
job_args.drop(3)
|
398
|
+
elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
|
399
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
400
|
+
job_args.drop(3).first["args"]
|
401
|
+
else
|
402
|
+
job_args
|
403
|
+
end
|
404
|
+
else
|
405
|
+
if self["encrypt"]
|
406
|
+
# no point in showing 150+ bytes of random garbage
|
407
|
+
args[-1] = "[encrypted data]"
|
408
|
+
end
|
409
|
+
args
|
382
410
|
end
|
383
411
|
end
|
384
412
|
|
@@ -412,15 +440,12 @@ module Sidekiq
|
|
412
440
|
end
|
413
441
|
end
|
414
442
|
|
415
|
-
attr_reader :queue
|
416
|
-
|
417
443
|
def latency
|
418
444
|
now = Time.now.to_f
|
419
445
|
now - (@item["enqueued_at"] || @item["created_at"] || now)
|
420
446
|
end
|
421
447
|
|
422
|
-
|
423
|
-
# Remove this job from the queue.
|
448
|
+
# Remove this job from the queue
|
424
449
|
def delete
|
425
450
|
count = Sidekiq.redis { |conn|
|
426
451
|
conn.lrem("queue:#{@queue}", 1, @value)
|
@@ -428,6 +453,7 @@ module Sidekiq
|
|
428
453
|
count != 0
|
429
454
|
end
|
430
455
|
|
456
|
+
# Access arbitrary attributes within the job hash
|
431
457
|
def [](name)
|
432
458
|
# nil will happen if the JSON fails to parse.
|
433
459
|
# We don't guarantee Sidekiq will work with bad job JSON but we should
|
@@ -437,47 +463,35 @@ module Sidekiq
|
|
437
463
|
|
438
464
|
private
|
439
465
|
|
440
|
-
def safe_load(content, default)
|
441
|
-
yield(*YAML.load(content))
|
442
|
-
rescue => ex
|
443
|
-
# #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
|
444
|
-
# memory yet so the YAML can't be loaded.
|
445
|
-
Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
|
446
|
-
default
|
447
|
-
end
|
448
|
-
|
449
466
|
def uncompress_backtrace(backtrace)
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
else
|
454
|
-
decoded = Base64.decode64(backtrace)
|
455
|
-
uncompressed = Zlib::Inflate.inflate(decoded)
|
456
|
-
begin
|
457
|
-
Sidekiq.load_json(uncompressed)
|
458
|
-
rescue
|
459
|
-
# Handle old jobs with marshalled backtrace format
|
460
|
-
# TODO Remove in 7.x
|
461
|
-
Marshal.load(uncompressed)
|
462
|
-
end
|
463
|
-
end
|
467
|
+
decoded = Base64.decode64(backtrace)
|
468
|
+
uncompressed = Zlib::Inflate.inflate(decoded)
|
469
|
+
Sidekiq.load_json(uncompressed)
|
464
470
|
end
|
465
471
|
end
|
466
472
|
|
473
|
+
# Represents a job within a Redis sorted set where the score
|
474
|
+
# represents a timestamp associated with the job. This timestamp
|
475
|
+
# could be the scheduled time for it to run (e.g. scheduled set),
|
476
|
+
# or the expiration date after which the entry should be deleted (e.g. dead set).
|
467
477
|
class SortedEntry < JobRecord
|
468
478
|
attr_reader :score
|
469
479
|
attr_reader :parent
|
470
480
|
|
481
|
+
# :nodoc:
|
482
|
+
# @api private
|
471
483
|
def initialize(parent, score, item)
|
472
484
|
super(item)
|
473
|
-
@score = score
|
485
|
+
@score = Float(score)
|
474
486
|
@parent = parent
|
475
487
|
end
|
476
488
|
|
489
|
+
# The timestamp associated with this entry
|
477
490
|
def at
|
478
491
|
Time.at(score).utc
|
479
492
|
end
|
480
493
|
|
494
|
+
# remove this entry from the sorted set
|
481
495
|
def delete
|
482
496
|
if @value
|
483
497
|
@parent.delete_by_value(@parent.name, @value)
|
@@ -486,12 +500,17 @@ module Sidekiq
|
|
486
500
|
end
|
487
501
|
end
|
488
502
|
|
503
|
+
# Change the scheduled time for this job.
|
504
|
+
#
|
505
|
+
# @param at [Time] the new timestamp for this job
|
489
506
|
def reschedule(at)
|
490
507
|
Sidekiq.redis do |conn|
|
491
508
|
conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
|
492
509
|
end
|
493
510
|
end
|
494
511
|
|
512
|
+
# Enqueue this job from the scheduled or dead set so it will
|
513
|
+
# be executed at some point in the near future.
|
495
514
|
def add_to_queue
|
496
515
|
remove_job do |message|
|
497
516
|
msg = Sidekiq.load_json(message)
|
@@ -499,6 +518,8 @@ module Sidekiq
|
|
499
518
|
end
|
500
519
|
end
|
501
520
|
|
521
|
+
# enqueue this job from the retry set so it will be executed
|
522
|
+
# at some point in the near future.
|
502
523
|
def retry
|
503
524
|
remove_job do |message|
|
504
525
|
msg = Sidekiq.load_json(message)
|
@@ -507,8 +528,7 @@ module Sidekiq
|
|
507
528
|
end
|
508
529
|
end
|
509
530
|
|
510
|
-
|
511
|
-
# Place job in the dead set
|
531
|
+
# Move this job from its current set into the Dead set.
|
512
532
|
def kill
|
513
533
|
remove_job do |message|
|
514
534
|
DeadSet.new.kill(message)
|
@@ -556,43 +576,69 @@ module Sidekiq
|
|
556
576
|
end
|
557
577
|
end
|
558
578
|
|
579
|
+
# Base class for all sorted sets within Sidekiq.
|
559
580
|
class SortedSet
|
560
581
|
include Enumerable
|
561
582
|
|
583
|
+
# Redis key of the set
|
584
|
+
# @!attribute [r] Name
|
562
585
|
attr_reader :name
|
563
586
|
|
587
|
+
# :nodoc:
|
588
|
+
# @api private
|
564
589
|
def initialize(name)
|
565
590
|
@name = name
|
566
591
|
@_size = size
|
567
592
|
end
|
568
593
|
|
594
|
+
# real-time size of the set, will change
|
569
595
|
def size
|
570
596
|
Sidekiq.redis { |c| c.zcard(name) }
|
571
597
|
end
|
572
598
|
|
599
|
+
# Scan through each element of the sorted set, yielding each to the supplied block.
|
600
|
+
# Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
|
601
|
+
#
|
602
|
+
# @param match [String] a snippet or regexp to filter matches.
|
603
|
+
# @param count [Integer] number of elements to retrieve at a time, default 100
|
604
|
+
# @yieldparam [Sidekiq::SortedEntry] each entry
|
573
605
|
def scan(match, count = 100)
|
574
606
|
return to_enum(:scan, match, count) unless block_given?
|
575
607
|
|
576
608
|
match = "*#{match}*" unless match.include?("*")
|
577
609
|
Sidekiq.redis do |conn|
|
578
|
-
conn.
|
610
|
+
conn.zscan(name, match: match, count: count) do |entry, score|
|
579
611
|
yield SortedEntry.new(self, score, entry)
|
580
612
|
end
|
581
613
|
end
|
582
614
|
end
|
583
615
|
|
616
|
+
# @return [Boolean] always true
|
584
617
|
def clear
|
585
618
|
Sidekiq.redis do |conn|
|
586
619
|
conn.unlink(name)
|
587
620
|
end
|
621
|
+
true
|
588
622
|
end
|
589
623
|
alias_method :💣, :clear
|
624
|
+
|
625
|
+
# :nodoc:
|
626
|
+
# @api private
|
627
|
+
def as_json(options = nil)
|
628
|
+
{name: name} # 5336
|
629
|
+
end
|
590
630
|
end
|
591
631
|
|
632
|
+
# Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
|
633
|
+
# Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
|
634
|
+
# e.g. Batches.
|
592
635
|
class JobSet < SortedSet
|
593
|
-
|
636
|
+
# Add a job with the associated timestamp to this set.
|
637
|
+
# @param timestamp [Time] the score for the job
|
638
|
+
# @param job [Hash] the job data
|
639
|
+
def schedule(timestamp, job)
|
594
640
|
Sidekiq.redis do |conn|
|
595
|
-
conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(
|
641
|
+
conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
|
596
642
|
end
|
597
643
|
end
|
598
644
|
|
@@ -606,7 +652,7 @@ module Sidekiq
|
|
606
652
|
range_start = page * page_size + offset_size
|
607
653
|
range_end = range_start + page_size - 1
|
608
654
|
elements = Sidekiq.redis { |conn|
|
609
|
-
conn.zrange name, range_start, range_end,
|
655
|
+
conn.zrange name, range_start, range_end, withscores: true
|
610
656
|
}
|
611
657
|
break if elements.empty?
|
612
658
|
page -= 1
|
@@ -620,6 +666,10 @@ module Sidekiq
|
|
620
666
|
##
|
621
667
|
# Fetch jobs that match a given time or Range. Job ID is an
|
622
668
|
# optional second argument.
|
669
|
+
#
|
670
|
+
# @param score [Time,Range] a specific timestamp or range
|
671
|
+
# @param jid [String, optional] find a specific JID within the score
|
672
|
+
# @return [Array<SortedEntry>] any results found, can be empty
|
623
673
|
def fetch(score, jid = nil)
|
624
674
|
begin_score, end_score =
|
625
675
|
if score.is_a?(Range)
|
@@ -629,7 +679,7 @@ module Sidekiq
|
|
629
679
|
end
|
630
680
|
|
631
681
|
elements = Sidekiq.redis { |conn|
|
632
|
-
conn.zrangebyscore(name, begin_score, end_score,
|
682
|
+
conn.zrangebyscore(name, begin_score, end_score, withscores: true)
|
633
683
|
}
|
634
684
|
|
635
685
|
elements.each_with_object([]) do |element, result|
|
@@ -641,10 +691,13 @@ module Sidekiq
|
|
641
691
|
|
642
692
|
##
|
643
693
|
# Find the job with the given JID within this sorted set.
|
644
|
-
# This is a
|
694
|
+
# *This is a slow O(n) operation*. Do not use for app logic.
|
695
|
+
#
|
696
|
+
# @param jid [String] the job identifier
|
697
|
+
# @return [SortedEntry] the record or nil
|
645
698
|
def find_job(jid)
|
646
699
|
Sidekiq.redis do |conn|
|
647
|
-
conn.
|
700
|
+
conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
|
648
701
|
job = JSON.parse(entry)
|
649
702
|
matched = job["jid"] == jid
|
650
703
|
return SortedEntry.new(self, score, entry) if matched
|
@@ -653,6 +706,8 @@ module Sidekiq
|
|
653
706
|
nil
|
654
707
|
end
|
655
708
|
|
709
|
+
# :nodoc:
|
710
|
+
# @api private
|
656
711
|
def delete_by_value(name, value)
|
657
712
|
Sidekiq.redis do |conn|
|
658
713
|
ret = conn.zrem(name, value)
|
@@ -661,6 +716,8 @@ module Sidekiq
|
|
661
716
|
end
|
662
717
|
end
|
663
718
|
|
719
|
+
# :nodoc:
|
720
|
+
# @api private
|
664
721
|
def delete_by_jid(score, jid)
|
665
722
|
Sidekiq.redis do |conn|
|
666
723
|
elements = conn.zrangebyscore(name, score, score)
|
@@ -681,17 +738,13 @@ module Sidekiq
|
|
681
738
|
end
|
682
739
|
|
683
740
|
##
|
684
|
-
#
|
741
|
+
# The set of scheduled jobs within Sidekiq.
|
685
742
|
# Based on this, you can search/filter for jobs. Here's an
|
686
|
-
# example where I'm selecting
|
687
|
-
# and deleting them from the
|
743
|
+
# example where I'm selecting jobs based on some complex logic
|
744
|
+
# and deleting them from the scheduled set.
|
745
|
+
#
|
746
|
+
# See the API wiki page for usage notes and examples.
|
688
747
|
#
|
689
|
-
# r = Sidekiq::ScheduledSet.new
|
690
|
-
# r.select do |scheduled|
|
691
|
-
# scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
|
692
|
-
# scheduled.args[0] == 'User' &&
|
693
|
-
# scheduled.args[1] == 'setup_new_subscriber'
|
694
|
-
# end.map(&:delete)
|
695
748
|
class ScheduledSet < JobSet
|
696
749
|
def initialize
|
697
750
|
super "schedule"
|
@@ -699,46 +752,48 @@ module Sidekiq
|
|
699
752
|
end
|
700
753
|
|
701
754
|
##
|
702
|
-
#
|
755
|
+
# The set of retries within Sidekiq.
|
703
756
|
# Based on this, you can search/filter for jobs. Here's an
|
704
757
|
# example where I'm selecting all jobs of a certain type
|
705
758
|
# and deleting them from the retry queue.
|
706
759
|
#
|
707
|
-
#
|
708
|
-
#
|
709
|
-
# retri.klass == 'Sidekiq::Extensions::DelayedClass' &&
|
710
|
-
# retri.args[0] == 'User' &&
|
711
|
-
# retri.args[1] == 'setup_new_subscriber'
|
712
|
-
# end.map(&:delete)
|
760
|
+
# See the API wiki page for usage notes and examples.
|
761
|
+
#
|
713
762
|
class RetrySet < JobSet
|
714
763
|
def initialize
|
715
764
|
super "retry"
|
716
765
|
end
|
717
766
|
|
767
|
+
# Enqueues all jobs pending within the retry set.
|
718
768
|
def retry_all
|
719
769
|
each(&:retry) while size > 0
|
720
770
|
end
|
721
771
|
|
772
|
+
# Kills all jobs pending within the retry set.
|
722
773
|
def kill_all
|
723
774
|
each(&:kill) while size > 0
|
724
775
|
end
|
725
776
|
end
|
726
777
|
|
727
778
|
##
|
728
|
-
#
|
779
|
+
# The set of dead jobs within Sidekiq. Dead jobs have failed all of
|
780
|
+
# their retries and are helding in this set pending some sort of manual
|
781
|
+
# fix. They will be removed after 6 months (dead_timeout) if not.
|
729
782
|
#
|
730
783
|
class DeadSet < JobSet
|
731
784
|
def initialize
|
732
785
|
super "dead"
|
733
786
|
end
|
734
787
|
|
788
|
+
# Add the given job to the Dead set.
|
789
|
+
# @param message [String] the job data as JSON
|
735
790
|
def kill(message, opts = {})
|
736
791
|
now = Time.now.to_f
|
737
792
|
Sidekiq.redis do |conn|
|
738
793
|
conn.multi do |transaction|
|
739
794
|
transaction.zadd(name, now.to_s, message)
|
740
|
-
transaction.zremrangebyscore(name, "-inf", now -
|
741
|
-
transaction.zremrangebyrank(name, 0, -
|
795
|
+
transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
|
796
|
+
transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
|
742
797
|
end
|
743
798
|
end
|
744
799
|
|
@@ -746,24 +801,17 @@ module Sidekiq
|
|
746
801
|
job = Sidekiq.load_json(message)
|
747
802
|
r = RuntimeError.new("Job killed by API")
|
748
803
|
r.set_backtrace(caller)
|
749
|
-
Sidekiq.death_handlers.each do |handle|
|
804
|
+
Sidekiq.default_configuration.death_handlers.each do |handle|
|
750
805
|
handle.call(job, r)
|
751
806
|
end
|
752
807
|
end
|
753
808
|
true
|
754
809
|
end
|
755
810
|
|
811
|
+
# Enqueue all dead jobs
|
756
812
|
def retry_all
|
757
813
|
each(&:retry) while size > 0
|
758
814
|
end
|
759
|
-
|
760
|
-
def self.max_jobs
|
761
|
-
Sidekiq.options[:dead_max_jobs]
|
762
|
-
end
|
763
|
-
|
764
|
-
def self.timeout
|
765
|
-
Sidekiq.options[:dead_timeout_in_seconds]
|
766
|
-
end
|
767
815
|
end
|
768
816
|
|
769
817
|
##
|
@@ -771,21 +819,28 @@ module Sidekiq
|
|
771
819
|
# right now. Each process sends a heartbeat to Redis every 5 seconds
|
772
820
|
# so this set should be relatively accurate, barring network partitions.
|
773
821
|
#
|
774
|
-
#
|
822
|
+
# @yieldparam [Sidekiq::Process]
|
775
823
|
#
|
776
824
|
class ProcessSet
|
777
825
|
include Enumerable
|
778
826
|
|
827
|
+
# :nodoc:
|
828
|
+
# @api private
|
779
829
|
def initialize(clean_plz = true)
|
780
830
|
cleanup if clean_plz
|
781
831
|
end
|
782
832
|
|
783
833
|
# Cleans up dead processes recorded in Redis.
|
784
834
|
# Returns the number of processes cleaned.
|
835
|
+
# :nodoc:
|
836
|
+
# @api private
|
785
837
|
def cleanup
|
838
|
+
# dont run cleanup more than once per minute
|
839
|
+
return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
|
840
|
+
|
786
841
|
count = 0
|
787
842
|
Sidekiq.redis do |conn|
|
788
|
-
procs = conn.
|
843
|
+
procs = conn.sscan("processes").to_a
|
789
844
|
heartbeats = conn.pipelined { |pipeline|
|
790
845
|
procs.each do |key|
|
791
846
|
pipeline.hget(key, "info")
|
@@ -805,7 +860,7 @@ module Sidekiq
|
|
805
860
|
|
806
861
|
def each
|
807
862
|
result = Sidekiq.redis { |conn|
|
808
|
-
procs = conn.
|
863
|
+
procs = conn.sscan("processes").to_a.sort
|
809
864
|
|
810
865
|
# We're making a tradeoff here between consuming more memory instead of
|
811
866
|
# making more roundtrips to Redis, but if you have hundreds or thousands of workers,
|
@@ -836,6 +891,7 @@ module Sidekiq
|
|
836
891
|
# based on current heartbeat. #each does that and ensures the set only
|
837
892
|
# contains Sidekiq processes which have sent a heartbeat within the last
|
838
893
|
# 60 seconds.
|
894
|
+
# @return [Integer] current number of registered Sidekiq processes
|
839
895
|
def size
|
840
896
|
Sidekiq.redis { |conn| conn.scard("processes") }
|
841
897
|
end
|
@@ -843,10 +899,12 @@ module Sidekiq
|
|
843
899
|
# Total number of threads available to execute jobs.
|
844
900
|
# For Sidekiq Enterprise customers this number (in production) must be
|
845
901
|
# less than or equal to your licensed concurrency.
|
902
|
+
# @return [Integer] the sum of process concurrency
|
846
903
|
def total_concurrency
|
847
904
|
sum { |x| x["concurrency"].to_i }
|
848
905
|
end
|
849
906
|
|
907
|
+
# @return [Integer] total amount of RSS memory consumed by Sidekiq processes
|
850
908
|
def total_rss_in_kb
|
851
909
|
sum { |x| x["rss"].to_i }
|
852
910
|
end
|
@@ -855,6 +913,8 @@ module Sidekiq
|
|
855
913
|
# Returns the identity of the current cluster leader or "" if no leader.
|
856
914
|
# This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
|
857
915
|
# or Sidekiq Pro.
|
916
|
+
# @return [String] Identity of cluster leader
|
917
|
+
# @return [String] empty string if no leader
|
858
918
|
def leader
|
859
919
|
@leader ||= begin
|
860
920
|
x = Sidekiq.redis { |c| c.get("dear-leader") }
|
@@ -881,6 +941,8 @@ module Sidekiq
|
|
881
941
|
# 'identity' => <unique string identifying the process>,
|
882
942
|
# }
|
883
943
|
class Process
|
944
|
+
# :nodoc:
|
945
|
+
# @api private
|
884
946
|
def initialize(hash)
|
885
947
|
@attribs = hash
|
886
948
|
end
|
@@ -890,7 +952,7 @@ module Sidekiq
|
|
890
952
|
end
|
891
953
|
|
892
954
|
def labels
|
893
|
-
|
955
|
+
self["labels"].to_a
|
894
956
|
end
|
895
957
|
|
896
958
|
def [](key)
|
@@ -905,18 +967,31 @@ module Sidekiq
|
|
905
967
|
self["queues"]
|
906
968
|
end
|
907
969
|
|
970
|
+
# Signal this process to stop processing new jobs.
|
971
|
+
# It will continue to execute jobs it has already fetched.
|
972
|
+
# This method is *asynchronous* and it can take 5-10
|
973
|
+
# seconds for the process to quiet.
|
908
974
|
def quiet!
|
909
975
|
signal("TSTP")
|
910
976
|
end
|
911
977
|
|
978
|
+
# Signal this process to shutdown.
|
979
|
+
# It will shutdown within its configured :timeout value, default 25 seconds.
|
980
|
+
# This method is *asynchronous* and it can take 5-10
|
981
|
+
# seconds for the process to start shutting down.
|
912
982
|
def stop!
|
913
983
|
signal("TERM")
|
914
984
|
end
|
915
985
|
|
986
|
+
# Signal this process to log backtraces for all threads.
|
987
|
+
# Useful if you have a frozen or deadlocked process which is
|
988
|
+
# still sending a heartbeat.
|
989
|
+
# This method is *asynchronous* and it can take 5-10 seconds.
|
916
990
|
def dump_threads
|
917
991
|
signal("TTIN")
|
918
992
|
end
|
919
993
|
|
994
|
+
# @return [Boolean] true if this process is quiet or shutting down
|
920
995
|
def stopping?
|
921
996
|
self["quiet"] == "true"
|
922
997
|
end
|
@@ -959,24 +1034,24 @@ module Sidekiq
|
|
959
1034
|
|
960
1035
|
def each(&block)
|
961
1036
|
results = []
|
1037
|
+
procs = nil
|
1038
|
+
all_works = nil
|
1039
|
+
|
962
1040
|
Sidekiq.redis do |conn|
|
963
|
-
procs = conn.
|
964
|
-
|
965
|
-
|
966
|
-
pipeline.
|
967
|
-
pipeline.hgetall("#{key}:workers")
|
968
|
-
}
|
969
|
-
next unless valid
|
970
|
-
workers.each_pair do |tid, json|
|
971
|
-
hsh = Sidekiq.load_json(json)
|
972
|
-
p = hsh["payload"]
|
973
|
-
# avoid breaking API, this is a side effect of the JSON optimization in #4316
|
974
|
-
hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
|
975
|
-
results << [key, tid, hsh]
|
1041
|
+
procs = conn.sscan("processes").to_a.sort
|
1042
|
+
all_works = conn.pipelined do |pipeline|
|
1043
|
+
procs.each do |key|
|
1044
|
+
pipeline.hgetall("#{key}:work")
|
976
1045
|
end
|
977
1046
|
end
|
978
1047
|
end
|
979
1048
|
|
1049
|
+
procs.zip(all_works).each do |key, workers|
|
1050
|
+
workers.each_pair do |tid, json|
|
1051
|
+
results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
|
1052
|
+
end
|
1053
|
+
end
|
1054
|
+
|
980
1055
|
results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
|
981
1056
|
end
|
982
1057
|
|
@@ -988,7 +1063,7 @@ module Sidekiq
|
|
988
1063
|
# which can easily get out of sync with crashy processes.
|
989
1064
|
def size
|
990
1065
|
Sidekiq.redis do |conn|
|
991
|
-
procs = conn.
|
1066
|
+
procs = conn.sscan("processes").to_a
|
992
1067
|
if procs.empty?
|
993
1068
|
0
|
994
1069
|
else
|