sidekiq 6.0.0 → 6.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sidekiq might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/Changes.md +258 -2
- data/LICENSE +1 -1
- data/README.md +6 -8
- data/bin/sidekiq +26 -2
- data/bin/sidekiqload +8 -4
- data/bin/sidekiqmon +4 -5
- data/lib/generators/sidekiq/worker_generator.rb +11 -1
- data/lib/sidekiq/api.rb +220 -145
- data/lib/sidekiq/cli.rb +64 -27
- data/lib/sidekiq/client.rb +31 -14
- data/lib/sidekiq/extensions/action_mailer.rb +3 -2
- data/lib/sidekiq/extensions/active_record.rb +4 -3
- data/lib/sidekiq/extensions/class_methods.rb +5 -4
- data/lib/sidekiq/extensions/generic_proxy.rb +3 -1
- data/lib/sidekiq/fetch.rb +36 -27
- data/lib/sidekiq/job.rb +13 -0
- data/lib/sidekiq/job_logger.rb +13 -5
- data/lib/sidekiq/job_retry.rb +27 -17
- data/lib/sidekiq/launcher.rb +110 -28
- data/lib/sidekiq/logger.rb +109 -12
- data/lib/sidekiq/manager.rb +4 -4
- data/lib/sidekiq/middleware/chain.rb +17 -6
- data/lib/sidekiq/middleware/current_attributes.rb +48 -0
- data/lib/sidekiq/monitor.rb +3 -18
- data/lib/sidekiq/paginator.rb +7 -2
- data/lib/sidekiq/processor.rb +22 -24
- data/lib/sidekiq/rails.rb +27 -18
- data/lib/sidekiq/redis_connection.rb +19 -13
- data/lib/sidekiq/scheduled.rb +37 -11
- data/lib/sidekiq/sd_notify.rb +149 -0
- data/lib/sidekiq/systemd.rb +24 -0
- data/lib/sidekiq/testing.rb +14 -4
- data/lib/sidekiq/util.rb +28 -2
- data/lib/sidekiq/version.rb +1 -1
- data/lib/sidekiq/web/action.rb +2 -2
- data/lib/sidekiq/web/application.rb +37 -30
- data/lib/sidekiq/web/csrf_protection.rb +180 -0
- data/lib/sidekiq/web/helpers.rb +51 -33
- data/lib/sidekiq/web/router.rb +6 -5
- data/lib/sidekiq/web.rb +37 -73
- data/lib/sidekiq/worker.rb +78 -14
- data/lib/sidekiq.rb +24 -8
- data/sidekiq.gemspec +13 -6
- data/web/assets/images/apple-touch-icon.png +0 -0
- data/web/assets/javascripts/application.js +83 -64
- data/web/assets/javascripts/dashboard.js +53 -53
- data/web/assets/stylesheets/application-dark.css +147 -0
- data/web/assets/stylesheets/application-rtl.css +0 -4
- data/web/assets/stylesheets/application.css +43 -230
- data/web/locales/ar.yml +8 -2
- data/web/locales/de.yml +14 -2
- data/web/locales/en.yml +6 -1
- data/web/locales/es.yml +18 -2
- data/web/locales/fr.yml +10 -3
- data/web/locales/ja.yml +5 -0
- data/web/locales/lt.yml +83 -0
- data/web/locales/pl.yml +4 -4
- data/web/locales/ru.yml +4 -0
- data/web/locales/vi.yml +83 -0
- data/web/views/_footer.erb +1 -1
- data/web/views/_job_info.erb +3 -2
- data/web/views/_poll_link.erb +2 -5
- data/web/views/_summary.erb +7 -7
- data/web/views/busy.erb +54 -20
- data/web/views/dashboard.erb +22 -14
- data/web/views/dead.erb +3 -3
- data/web/views/layout.erb +3 -1
- data/web/views/morgue.erb +9 -6
- data/web/views/queue.erb +19 -10
- data/web/views/queues.erb +10 -2
- data/web/views/retries.erb +11 -8
- data/web/views/retry.erb +3 -3
- data/web/views/scheduled.erb +5 -2
- metadata +29 -50
- data/.circleci/config.yml +0 -61
- data/.github/contributing.md +0 -32
- data/.github/issue_template.md +0 -11
- data/.gitignore +0 -13
- data/.standard.yml +0 -20
- data/3.0-Upgrade.md +0 -70
- data/4.0-Upgrade.md +0 -53
- data/5.0-Upgrade.md +0 -56
- data/6.0-Upgrade.md +0 -70
- data/COMM-LICENSE +0 -97
- data/Ent-2.0-Upgrade.md +0 -37
- data/Ent-Changes.md +0 -250
- data/Gemfile +0 -24
- data/Gemfile.lock +0 -196
- data/Pro-2.0-Upgrade.md +0 -138
- data/Pro-3.0-Upgrade.md +0 -44
- data/Pro-4.0-Upgrade.md +0 -35
- data/Pro-5.0-Upgrade.md +0 -25
- data/Pro-Changes.md +0 -768
- data/Rakefile +0 -10
- data/code_of_conduct.md +0 -50
data/lib/sidekiq/api.rb
CHANGED
@@ -2,25 +2,13 @@
|
|
2
2
|
|
3
3
|
require "sidekiq"
|
4
4
|
|
5
|
-
|
6
|
-
|
7
|
-
def sscan(conn, key)
|
8
|
-
cursor = "0"
|
9
|
-
result = []
|
10
|
-
loop do
|
11
|
-
cursor, values = conn.sscan(key, cursor)
|
12
|
-
result.push(*values)
|
13
|
-
break if cursor == "0"
|
14
|
-
end
|
15
|
-
result
|
16
|
-
end
|
17
|
-
end
|
5
|
+
require "zlib"
|
6
|
+
require "base64"
|
18
7
|
|
8
|
+
module Sidekiq
|
19
9
|
class Stats
|
20
|
-
include RedisScanner
|
21
|
-
|
22
10
|
def initialize
|
23
|
-
|
11
|
+
fetch_stats_fast!
|
24
12
|
end
|
25
13
|
|
26
14
|
def processed
|
@@ -63,7 +51,8 @@ module Sidekiq
|
|
63
51
|
Sidekiq::Stats::Queues.new.lengths
|
64
52
|
end
|
65
53
|
|
66
|
-
|
54
|
+
# O(1) redis calls
|
55
|
+
def fetch_stats_fast!
|
67
56
|
pipe1_res = Sidekiq.redis { |conn|
|
68
57
|
conn.pipelined do
|
69
58
|
conn.get("stat:processed")
|
@@ -76,37 +65,19 @@ module Sidekiq
|
|
76
65
|
end
|
77
66
|
}
|
78
67
|
|
79
|
-
processes = Sidekiq.redis { |conn|
|
80
|
-
sscan(conn, "processes")
|
81
|
-
}
|
82
|
-
|
83
|
-
queues = Sidekiq.redis { |conn|
|
84
|
-
sscan(conn, "queues")
|
85
|
-
}
|
86
|
-
|
87
|
-
pipe2_res = Sidekiq.redis { |conn|
|
88
|
-
conn.pipelined do
|
89
|
-
processes.each { |key| conn.hget(key, "busy") }
|
90
|
-
queues.each { |queue| conn.llen("queue:#{queue}") }
|
91
|
-
end
|
92
|
-
}
|
93
|
-
|
94
|
-
s = processes.size
|
95
|
-
workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
|
96
|
-
enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
|
97
|
-
|
98
68
|
default_queue_latency = if (entry = pipe1_res[6].first)
|
99
69
|
job = begin
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
70
|
+
Sidekiq.load_json(entry)
|
71
|
+
rescue
|
72
|
+
{}
|
73
|
+
end
|
104
74
|
now = Time.now.to_f
|
105
75
|
thence = job["enqueued_at"] || now
|
106
76
|
now - thence
|
107
77
|
else
|
108
78
|
0
|
109
79
|
end
|
80
|
+
|
110
81
|
@stats = {
|
111
82
|
processed: pipe1_res[0].to_i,
|
112
83
|
failed: pipe1_res[1].to_i,
|
@@ -115,10 +86,39 @@ module Sidekiq
|
|
115
86
|
dead_size: pipe1_res[4],
|
116
87
|
processes_size: pipe1_res[5],
|
117
88
|
|
118
|
-
default_queue_latency: default_queue_latency
|
119
|
-
|
120
|
-
|
89
|
+
default_queue_latency: default_queue_latency
|
90
|
+
}
|
91
|
+
end
|
92
|
+
|
93
|
+
# O(number of processes + number of queues) redis calls
|
94
|
+
def fetch_stats_slow!
|
95
|
+
processes = Sidekiq.redis { |conn|
|
96
|
+
conn.sscan_each("processes").to_a
|
97
|
+
}
|
98
|
+
|
99
|
+
queues = Sidekiq.redis { |conn|
|
100
|
+
conn.sscan_each("queues").to_a
|
101
|
+
}
|
102
|
+
|
103
|
+
pipe2_res = Sidekiq.redis { |conn|
|
104
|
+
conn.pipelined do
|
105
|
+
processes.each { |key| conn.hget(key, "busy") }
|
106
|
+
queues.each { |queue| conn.llen("queue:#{queue}") }
|
107
|
+
end
|
121
108
|
}
|
109
|
+
|
110
|
+
s = processes.size
|
111
|
+
workers_size = pipe2_res[0...s].sum(&:to_i)
|
112
|
+
enqueued = pipe2_res[s..-1].sum(&:to_i)
|
113
|
+
|
114
|
+
@stats[:workers_size] = workers_size
|
115
|
+
@stats[:enqueued] = enqueued
|
116
|
+
@stats
|
117
|
+
end
|
118
|
+
|
119
|
+
def fetch_stats!
|
120
|
+
fetch_stats_fast!
|
121
|
+
fetch_stats_slow!
|
122
122
|
end
|
123
123
|
|
124
124
|
def reset(*stats)
|
@@ -138,15 +138,14 @@ module Sidekiq
|
|
138
138
|
private
|
139
139
|
|
140
140
|
def stat(s)
|
141
|
-
@stats[s]
|
141
|
+
fetch_stats_slow! if @stats[s].nil?
|
142
|
+
@stats[s] || raise(ArgumentError, "Unknown stat #{s}")
|
142
143
|
end
|
143
144
|
|
144
145
|
class Queues
|
145
|
-
include RedisScanner
|
146
|
-
|
147
146
|
def lengths
|
148
147
|
Sidekiq.redis do |conn|
|
149
|
-
queues =
|
148
|
+
queues = conn.sscan_each("queues").to_a
|
150
149
|
|
151
150
|
lengths = conn.pipelined {
|
152
151
|
queues.each do |queue|
|
@@ -154,13 +153,8 @@ module Sidekiq
|
|
154
153
|
end
|
155
154
|
}
|
156
155
|
|
157
|
-
|
158
|
-
array_of_arrays
|
159
|
-
memo[queue] = lengths[i]
|
160
|
-
i += 1
|
161
|
-
}.sort_by { |_, size| size }
|
162
|
-
|
163
|
-
Hash[array_of_arrays.reverse]
|
156
|
+
array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
|
157
|
+
array_of_arrays.to_h
|
164
158
|
end
|
165
159
|
end
|
166
160
|
end
|
@@ -182,18 +176,12 @@ module Sidekiq
|
|
182
176
|
private
|
183
177
|
|
184
178
|
def date_stat_hash(stat)
|
185
|
-
i = 0
|
186
179
|
stat_hash = {}
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
datestr = date.strftime("%Y-%m-%d")
|
193
|
-
keys << "stat:#{stat}:#{datestr}"
|
194
|
-
dates << datestr
|
195
|
-
i += 1
|
196
|
-
end
|
180
|
+
dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
|
181
|
+
date.strftime("%Y-%m-%d")
|
182
|
+
}
|
183
|
+
|
184
|
+
keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
|
197
185
|
|
198
186
|
begin
|
199
187
|
Sidekiq.redis do |conn|
|
@@ -225,13 +213,12 @@ module Sidekiq
|
|
225
213
|
#
|
226
214
|
class Queue
|
227
215
|
include Enumerable
|
228
|
-
extend RedisScanner
|
229
216
|
|
230
217
|
##
|
231
218
|
# Return all known queues within Redis.
|
232
219
|
#
|
233
220
|
def self.all
|
234
|
-
Sidekiq.redis { |c|
|
221
|
+
Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
|
235
222
|
end
|
236
223
|
|
237
224
|
attr_reader :name
|
@@ -281,7 +268,7 @@ module Sidekiq
|
|
281
268
|
break if entries.empty?
|
282
269
|
page += 1
|
283
270
|
entries.each do |entry|
|
284
|
-
yield
|
271
|
+
yield JobRecord.new(entry, @name)
|
285
272
|
end
|
286
273
|
deleted_size = initial_size - size
|
287
274
|
end
|
@@ -291,7 +278,7 @@ module Sidekiq
|
|
291
278
|
# Find the job with the given JID within this queue.
|
292
279
|
#
|
293
280
|
# This is a slow, inefficient operation. Do not use under
|
294
|
-
# normal conditions.
|
281
|
+
# normal conditions.
|
295
282
|
def find_job(jid)
|
296
283
|
detect { |j| j.jid == jid }
|
297
284
|
end
|
@@ -299,7 +286,7 @@ module Sidekiq
|
|
299
286
|
def clear
|
300
287
|
Sidekiq.redis do |conn|
|
301
288
|
conn.multi do
|
302
|
-
conn.
|
289
|
+
conn.unlink(@rname)
|
303
290
|
conn.srem("queues", name)
|
304
291
|
end
|
305
292
|
end
|
@@ -312,9 +299,9 @@ module Sidekiq
|
|
312
299
|
# sorted set.
|
313
300
|
#
|
314
301
|
# The job should be considered immutable but may be
|
315
|
-
# removed from the queue via
|
302
|
+
# removed from the queue via JobRecord#delete.
|
316
303
|
#
|
317
|
-
class
|
304
|
+
class JobRecord
|
318
305
|
attr_reader :item
|
319
306
|
attr_reader :value
|
320
307
|
|
@@ -342,21 +329,23 @@ module Sidekiq
|
|
342
329
|
|
343
330
|
def display_class
|
344
331
|
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
|
345
|
-
@klass ||=
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
332
|
+
@klass ||= self["display_class"] || begin
|
333
|
+
case klass
|
334
|
+
when /\ASidekiq::Extensions::Delayed/
|
335
|
+
safe_load(args[0], klass) do |target, method, _|
|
336
|
+
"#{target}.#{method}"
|
337
|
+
end
|
338
|
+
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
339
|
+
job_class = @item["wrapped"] || args[0]
|
340
|
+
if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
|
341
|
+
# MailerClass#mailer_method
|
342
|
+
args[0]["arguments"][0..1].join("#")
|
343
|
+
else
|
344
|
+
job_class
|
345
|
+
end
|
346
|
+
else
|
347
|
+
klass
|
348
|
+
end
|
360
349
|
end
|
361
350
|
end
|
362
351
|
|
@@ -372,6 +361,9 @@ module Sidekiq
|
|
372
361
|
if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
|
373
362
|
# remove MailerClass, mailer_method and 'deliver_now'
|
374
363
|
job_args.drop(3)
|
364
|
+
elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
|
365
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
366
|
+
job_args.drop(3).first["args"]
|
375
367
|
else
|
376
368
|
job_args
|
377
369
|
end
|
@@ -400,6 +392,20 @@ module Sidekiq
|
|
400
392
|
Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
|
401
393
|
end
|
402
394
|
|
395
|
+
def tags
|
396
|
+
self["tags"] || []
|
397
|
+
end
|
398
|
+
|
399
|
+
def error_backtrace
|
400
|
+
# Cache nil values
|
401
|
+
if defined?(@error_backtrace)
|
402
|
+
@error_backtrace
|
403
|
+
else
|
404
|
+
value = self["error_backtrace"]
|
405
|
+
@error_backtrace = value && uncompress_backtrace(value)
|
406
|
+
end
|
407
|
+
end
|
408
|
+
|
403
409
|
attr_reader :queue
|
404
410
|
|
405
411
|
def latency
|
@@ -433,9 +439,26 @@ module Sidekiq
|
|
433
439
|
Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
|
434
440
|
default
|
435
441
|
end
|
442
|
+
|
443
|
+
def uncompress_backtrace(backtrace)
|
444
|
+
if backtrace.is_a?(Array)
|
445
|
+
# Handle old jobs with raw Array backtrace format
|
446
|
+
backtrace
|
447
|
+
else
|
448
|
+
decoded = Base64.decode64(backtrace)
|
449
|
+
uncompressed = Zlib::Inflate.inflate(decoded)
|
450
|
+
begin
|
451
|
+
Sidekiq.load_json(uncompressed)
|
452
|
+
rescue
|
453
|
+
# Handle old jobs with marshalled backtrace format
|
454
|
+
# TODO Remove in 7.x
|
455
|
+
Marshal.load(uncompressed)
|
456
|
+
end
|
457
|
+
end
|
458
|
+
end
|
436
459
|
end
|
437
460
|
|
438
|
-
class SortedEntry <
|
461
|
+
class SortedEntry < JobRecord
|
439
462
|
attr_reader :score
|
440
463
|
attr_reader :parent
|
441
464
|
|
@@ -458,8 +481,9 @@ module Sidekiq
|
|
458
481
|
end
|
459
482
|
|
460
483
|
def reschedule(at)
|
461
|
-
|
462
|
-
|
484
|
+
Sidekiq.redis do |conn|
|
485
|
+
conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
|
486
|
+
end
|
463
487
|
end
|
464
488
|
|
465
489
|
def add_to_queue
|
@@ -503,7 +527,7 @@ module Sidekiq
|
|
503
527
|
else
|
504
528
|
# multiple jobs with the same score
|
505
529
|
# find the one with the right JID and push it
|
506
|
-
|
530
|
+
matched, nonmatched = results.partition { |message|
|
507
531
|
if message.index(jid)
|
508
532
|
msg = Sidekiq.load_json(message)
|
509
533
|
msg["jid"] == jid
|
@@ -512,12 +536,12 @@ module Sidekiq
|
|
512
536
|
end
|
513
537
|
}
|
514
538
|
|
515
|
-
msg =
|
539
|
+
msg = matched.first
|
516
540
|
yield msg if msg
|
517
541
|
|
518
542
|
# push the rest back onto the sorted set
|
519
543
|
conn.multi do
|
520
|
-
|
544
|
+
nonmatched.each do |message|
|
521
545
|
conn.zadd(parent.name, score.to_f.to_s, message)
|
522
546
|
end
|
523
547
|
end
|
@@ -540,9 +564,20 @@ module Sidekiq
|
|
540
564
|
Sidekiq.redis { |c| c.zcard(name) }
|
541
565
|
end
|
542
566
|
|
567
|
+
def scan(match, count = 100)
|
568
|
+
return to_enum(:scan, match, count) unless block_given?
|
569
|
+
|
570
|
+
match = "*#{match}*" unless match.include?("*")
|
571
|
+
Sidekiq.redis do |conn|
|
572
|
+
conn.zscan_each(name, match: match, count: count) do |entry, score|
|
573
|
+
yield SortedEntry.new(self, score, entry)
|
574
|
+
end
|
575
|
+
end
|
576
|
+
end
|
577
|
+
|
543
578
|
def clear
|
544
579
|
Sidekiq.redis do |conn|
|
545
|
-
conn.
|
580
|
+
conn.unlink(name)
|
546
581
|
end
|
547
582
|
end
|
548
583
|
alias_method :💣, :clear
|
@@ -576,28 +611,40 @@ module Sidekiq
|
|
576
611
|
end
|
577
612
|
end
|
578
613
|
|
614
|
+
##
|
615
|
+
# Fetch jobs that match a given time or Range. Job ID is an
|
616
|
+
# optional second argument.
|
579
617
|
def fetch(score, jid = nil)
|
618
|
+
begin_score, end_score =
|
619
|
+
if score.is_a?(Range)
|
620
|
+
[score.first, score.last]
|
621
|
+
else
|
622
|
+
[score, score]
|
623
|
+
end
|
624
|
+
|
580
625
|
elements = Sidekiq.redis { |conn|
|
581
|
-
conn.zrangebyscore(name,
|
626
|
+
conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
|
582
627
|
}
|
583
628
|
|
584
629
|
elements.each_with_object([]) do |element, result|
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
else
|
589
|
-
result << entry
|
590
|
-
end
|
630
|
+
data, job_score = element
|
631
|
+
entry = SortedEntry.new(self, job_score, data)
|
632
|
+
result << entry if jid.nil? || entry.jid == jid
|
591
633
|
end
|
592
634
|
end
|
593
635
|
|
594
636
|
##
|
595
637
|
# Find the job with the given JID within this sorted set.
|
596
|
-
#
|
597
|
-
# This is a slow, inefficient operation. Do not use under
|
598
|
-
# normal conditions. Sidekiq Pro contains a faster version.
|
638
|
+
# This is a slower O(n) operation. Do not use for app logic.
|
599
639
|
def find_job(jid)
|
600
|
-
|
640
|
+
Sidekiq.redis do |conn|
|
641
|
+
conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
|
642
|
+
job = JSON.parse(entry)
|
643
|
+
matched = job["jid"] == jid
|
644
|
+
return SortedEntry.new(self, score, entry) if matched
|
645
|
+
end
|
646
|
+
end
|
647
|
+
nil
|
601
648
|
end
|
602
649
|
|
603
650
|
def delete_by_value(name, value)
|
@@ -612,11 +659,13 @@ module Sidekiq
|
|
612
659
|
Sidekiq.redis do |conn|
|
613
660
|
elements = conn.zrangebyscore(name, score, score)
|
614
661
|
elements.each do |element|
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
662
|
+
if element.index(jid)
|
663
|
+
message = Sidekiq.load_json(element)
|
664
|
+
if message["jid"] == jid
|
665
|
+
ret = conn.zrem(name, element)
|
666
|
+
@_size -= 1 if ret
|
667
|
+
break ret
|
668
|
+
end
|
620
669
|
end
|
621
670
|
end
|
622
671
|
end
|
@@ -720,7 +769,6 @@ module Sidekiq
|
|
720
769
|
#
|
721
770
|
class ProcessSet
|
722
771
|
include Enumerable
|
723
|
-
include RedisScanner
|
724
772
|
|
725
773
|
def initialize(clean_plz = true)
|
726
774
|
cleanup if clean_plz
|
@@ -731,7 +779,7 @@ module Sidekiq
|
|
731
779
|
def cleanup
|
732
780
|
count = 0
|
733
781
|
Sidekiq.redis do |conn|
|
734
|
-
procs =
|
782
|
+
procs = conn.sscan_each("processes").to_a.sort
|
735
783
|
heartbeats = conn.pipelined {
|
736
784
|
procs.each do |key|
|
737
785
|
conn.hget(key, "info")
|
@@ -741,40 +789,41 @@ module Sidekiq
|
|
741
789
|
# the hash named key has an expiry of 60 seconds.
|
742
790
|
# if it's not found, that means the process has not reported
|
743
791
|
# in to Redis and probably died.
|
744
|
-
to_prune =
|
745
|
-
|
746
|
-
|
747
|
-
end
|
792
|
+
to_prune = procs.select.with_index { |proc, i|
|
793
|
+
heartbeats[i].nil?
|
794
|
+
}
|
748
795
|
count = conn.srem("processes", to_prune) unless to_prune.empty?
|
749
796
|
end
|
750
797
|
count
|
751
798
|
end
|
752
799
|
|
753
800
|
def each
|
754
|
-
|
801
|
+
result = Sidekiq.redis { |conn|
|
802
|
+
procs = conn.sscan_each("processes").to_a.sort
|
755
803
|
|
756
|
-
Sidekiq.redis do |conn|
|
757
804
|
# We're making a tradeoff here between consuming more memory instead of
|
758
805
|
# making more roundtrips to Redis, but if you have hundreds or thousands of workers,
|
759
806
|
# you'll be happier this way
|
760
|
-
|
807
|
+
conn.pipelined do
|
761
808
|
procs.each do |key|
|
762
|
-
conn.hmget(key, "info", "busy", "beat", "quiet")
|
809
|
+
conn.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
|
763
810
|
end
|
764
|
-
|
811
|
+
end
|
812
|
+
}
|
765
813
|
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
814
|
+
result.each do |info, busy, at_s, quiet, rss, rtt|
|
815
|
+
# If a process is stopped between when we query Redis for `procs` and
|
816
|
+
# when we query for `result`, we will have an item in `result` that is
|
817
|
+
# composed of `nil` values.
|
818
|
+
next if info.nil?
|
771
819
|
|
772
|
-
|
773
|
-
|
774
|
-
|
820
|
+
hash = Sidekiq.load_json(info)
|
821
|
+
yield Process.new(hash.merge("busy" => busy.to_i,
|
822
|
+
"beat" => at_s.to_f,
|
823
|
+
"quiet" => quiet,
|
824
|
+
"rss" => rss.to_i,
|
825
|
+
"rtt_us" => rtt.to_i))
|
775
826
|
end
|
776
|
-
|
777
|
-
nil
|
778
827
|
end
|
779
828
|
|
780
829
|
# This method is not guaranteed accurate since it does not prune the set
|
@@ -785,6 +834,18 @@ module Sidekiq
|
|
785
834
|
Sidekiq.redis { |conn| conn.scard("processes") }
|
786
835
|
end
|
787
836
|
|
837
|
+
# Total number of threads available to execute jobs.
|
838
|
+
# For Sidekiq Enterprise customers this number (in production) must be
|
839
|
+
# less than or equal to your licensed concurrency.
|
840
|
+
def total_concurrency
|
841
|
+
sum { |x| x["concurrency"].to_i }
|
842
|
+
end
|
843
|
+
|
844
|
+
def total_rss_in_kb
|
845
|
+
sum { |x| x["rss"].to_i }
|
846
|
+
end
|
847
|
+
alias_method :total_rss, :total_rss_in_kb
|
848
|
+
|
788
849
|
# Returns the identity of the current cluster leader or "" if no leader.
|
789
850
|
# This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
|
790
851
|
# or Sidekiq Pro.
|
@@ -834,6 +895,10 @@ module Sidekiq
|
|
834
895
|
self["identity"]
|
835
896
|
end
|
836
897
|
|
898
|
+
def queues
|
899
|
+
self["queues"]
|
900
|
+
end
|
901
|
+
|
837
902
|
def quiet!
|
838
903
|
signal("TSTP")
|
839
904
|
end
|
@@ -864,8 +929,8 @@ module Sidekiq
|
|
864
929
|
end
|
865
930
|
|
866
931
|
##
|
867
|
-
#
|
868
|
-
#
|
932
|
+
# The WorkSet stores the work being done by this Sidekiq cluster.
|
933
|
+
# It tracks the process and thread working on each job.
|
869
934
|
#
|
870
935
|
# WARNING WARNING WARNING
|
871
936
|
#
|
@@ -873,34 +938,40 @@ module Sidekiq
|
|
873
938
|
# If you call #size => 5 and then expect #each to be
|
874
939
|
# called 5 times, you're going to have a bad time.
|
875
940
|
#
|
876
|
-
#
|
877
|
-
#
|
878
|
-
#
|
941
|
+
# works = Sidekiq::WorkSet.new
|
942
|
+
# works.size => 2
|
943
|
+
# works.each do |process_id, thread_id, work|
|
879
944
|
# # process_id is a unique identifier per Sidekiq process
|
880
945
|
# # thread_id is a unique identifier per thread
|
881
946
|
# # work is a Hash which looks like:
|
882
|
-
# # { 'queue' => name, 'run_at' => timestamp, 'payload' =>
|
947
|
+
# # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
|
883
948
|
# # run_at is an epoch Integer.
|
884
949
|
# end
|
885
950
|
#
|
886
|
-
class
|
951
|
+
class WorkSet
|
887
952
|
include Enumerable
|
888
|
-
include RedisScanner
|
889
953
|
|
890
|
-
def each
|
954
|
+
def each(&block)
|
955
|
+
results = []
|
891
956
|
Sidekiq.redis do |conn|
|
892
|
-
procs =
|
957
|
+
procs = conn.sscan_each("processes").to_a
|
893
958
|
procs.sort.each do |key|
|
894
959
|
valid, workers = conn.pipelined {
|
895
|
-
conn.exists(key)
|
960
|
+
conn.exists?(key)
|
896
961
|
conn.hgetall("#{key}:workers")
|
897
962
|
}
|
898
963
|
next unless valid
|
899
964
|
workers.each_pair do |tid, json|
|
900
|
-
|
965
|
+
hsh = Sidekiq.load_json(json)
|
966
|
+
p = hsh["payload"]
|
967
|
+
# avoid breaking API, this is a side effect of the JSON optimization in #4316
|
968
|
+
hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
|
969
|
+
results << [key, tid, hsh]
|
901
970
|
end
|
902
971
|
end
|
903
972
|
end
|
973
|
+
|
974
|
+
results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
|
904
975
|
end
|
905
976
|
|
906
977
|
# Note that #size is only as accurate as Sidekiq's heartbeat,
|
@@ -911,7 +982,7 @@ module Sidekiq
|
|
911
982
|
# which can easily get out of sync with crashy processes.
|
912
983
|
def size
|
913
984
|
Sidekiq.redis do |conn|
|
914
|
-
procs =
|
985
|
+
procs = conn.sscan_each("processes").to_a
|
915
986
|
if procs.empty?
|
916
987
|
0
|
917
988
|
else
|
@@ -919,9 +990,13 @@ module Sidekiq
|
|
919
990
|
procs.each do |key|
|
920
991
|
conn.hget(key, "busy")
|
921
992
|
end
|
922
|
-
}.
|
993
|
+
}.sum(&:to_i)
|
923
994
|
end
|
924
995
|
end
|
925
996
|
end
|
926
997
|
end
|
998
|
+
# Since "worker" is a nebulous term, we've deprecated the use of this class name.
|
999
|
+
# Is "worker" a process, a type of job, a thread? Undefined!
|
1000
|
+
# WorkSet better describes the data.
|
1001
|
+
Workers = WorkSet
|
927
1002
|
end
|