sidekiq 4.2.10 → 5.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sidekiq might be problematic. Click here for more details.
- checksums.yaml +5 -5
- data/.circleci/config.yml +61 -0
- data/.github/issue_template.md +3 -1
- data/.gitignore +3 -0
- data/.travis.yml +6 -13
- data/5.0-Upgrade.md +56 -0
- data/COMM-LICENSE +12 -10
- data/Changes.md +158 -1
- data/Ent-Changes.md +67 -2
- data/Gemfile +14 -20
- data/LICENSE +1 -1
- data/Pro-4.0-Upgrade.md +35 -0
- data/Pro-Changes.md +133 -2
- data/README.md +8 -6
- data/Rakefile +2 -5
- data/bin/sidekiqctl +13 -92
- data/bin/sidekiqload +5 -10
- data/lib/generators/sidekiq/templates/worker_spec.rb.erb +1 -1
- data/lib/sidekiq.rb +27 -27
- data/lib/sidekiq/api.rb +145 -57
- data/lib/sidekiq/cli.rb +120 -81
- data/lib/sidekiq/client.rb +25 -18
- data/lib/sidekiq/core_ext.rb +1 -119
- data/lib/sidekiq/ctl.rb +221 -0
- data/lib/sidekiq/delay.rb +42 -0
- data/lib/sidekiq/exception_handler.rb +2 -4
- data/lib/sidekiq/extensions/generic_proxy.rb +7 -1
- data/lib/sidekiq/fetch.rb +1 -1
- data/lib/sidekiq/job_logger.rb +25 -0
- data/lib/sidekiq/job_retry.rb +262 -0
- data/lib/sidekiq/launcher.rb +19 -19
- data/lib/sidekiq/logging.rb +18 -2
- data/lib/sidekiq/manager.rb +5 -6
- data/lib/sidekiq/middleware/server/active_record.rb +10 -0
- data/lib/sidekiq/processor.rb +126 -48
- data/lib/sidekiq/rails.rb +8 -73
- data/lib/sidekiq/redis_connection.rb +43 -5
- data/lib/sidekiq/scheduled.rb +35 -8
- data/lib/sidekiq/testing.rb +16 -7
- data/lib/sidekiq/util.rb +5 -2
- data/lib/sidekiq/version.rb +1 -1
- data/lib/sidekiq/web.rb +4 -4
- data/lib/sidekiq/web/action.rb +2 -6
- data/lib/sidekiq/web/application.rb +33 -16
- data/lib/sidekiq/web/helpers.rb +69 -22
- data/lib/sidekiq/web/router.rb +10 -10
- data/lib/sidekiq/worker.rb +118 -19
- data/sidekiq.gemspec +6 -17
- data/web/assets/javascripts/application.js +0 -0
- data/web/assets/javascripts/dashboard.js +32 -17
- data/web/assets/stylesheets/application-rtl.css +246 -0
- data/web/assets/stylesheets/application.css +371 -6
- data/web/assets/stylesheets/bootstrap-rtl.min.css +9 -0
- data/web/assets/stylesheets/bootstrap.css +2 -2
- data/web/locales/ar.yml +81 -0
- data/web/locales/en.yml +2 -0
- data/web/locales/es.yml +4 -3
- data/web/locales/fa.yml +1 -0
- data/web/locales/he.yml +79 -0
- data/web/locales/ja.yml +5 -3
- data/web/locales/ur.yml +80 -0
- data/web/views/_footer.erb +5 -2
- data/web/views/_nav.erb +4 -18
- data/web/views/_paging.erb +1 -1
- data/web/views/busy.erb +9 -5
- data/web/views/dashboard.erb +1 -1
- data/web/views/layout.erb +11 -2
- data/web/views/morgue.erb +4 -4
- data/web/views/queue.erb +8 -7
- data/web/views/queues.erb +2 -0
- data/web/views/retries.erb +9 -5
- data/web/views/scheduled.erb +2 -2
- metadata +31 -160
- data/lib/sidekiq/middleware/server/logging.rb +0 -31
- data/lib/sidekiq/middleware/server/retry_jobs.rb +0 -205
data/lib/sidekiq.rb
CHANGED
@@ -1,12 +1,13 @@
|
|
1
|
-
# encoding: utf-8
|
2
1
|
# frozen_string_literal: true
|
2
|
+
|
3
3
|
require 'sidekiq/version'
|
4
|
-
fail "Sidekiq #{Sidekiq::VERSION} does not support Ruby versions below 2.
|
4
|
+
fail "Sidekiq #{Sidekiq::VERSION} does not support Ruby versions below 2.2.2." if RUBY_PLATFORM != 'java' && Gem::Version.new(RUBY_VERSION) < Gem::Version.new('2.2.2')
|
5
5
|
|
6
6
|
require 'sidekiq/logging'
|
7
7
|
require 'sidekiq/client'
|
8
8
|
require 'sidekiq/worker'
|
9
9
|
require 'sidekiq/redis_connection'
|
10
|
+
require 'sidekiq/delay'
|
10
11
|
|
11
12
|
require 'json'
|
12
13
|
|
@@ -17,13 +18,14 @@ module Sidekiq
|
|
17
18
|
DEFAULTS = {
|
18
19
|
queues: [],
|
19
20
|
labels: [],
|
20
|
-
concurrency:
|
21
|
+
concurrency: 10,
|
21
22
|
require: '.',
|
22
23
|
environment: nil,
|
23
24
|
timeout: 8,
|
24
25
|
poll_interval_average: nil,
|
25
|
-
average_scheduled_poll_interval:
|
26
|
+
average_scheduled_poll_interval: 5,
|
26
27
|
error_handlers: [],
|
28
|
+
death_handlers: [],
|
27
29
|
lifecycle_events: {
|
28
30
|
startup: [],
|
29
31
|
quiet: [],
|
@@ -33,7 +35,6 @@ module Sidekiq
|
|
33
35
|
dead_max_jobs: 10_000,
|
34
36
|
dead_timeout_in_seconds: 180 * 24 * 60 * 60, # 6 months
|
35
37
|
reloader: proc { |&block| block.call },
|
36
|
-
executor: proc { |&block| block.call },
|
37
38
|
}
|
38
39
|
|
39
40
|
DEFAULT_WORKER_OPTIONS = {
|
@@ -47,7 +48,7 @@ module Sidekiq
|
|
47
48
|
"connected_clients" => "9999",
|
48
49
|
"used_memory_human" => "9P",
|
49
50
|
"used_memory_peak_human" => "9P"
|
50
|
-
}
|
51
|
+
}
|
51
52
|
|
52
53
|
def self.❨╯°□°❩╯︵┻━┻
|
53
54
|
puts "Calm down, yo."
|
@@ -56,6 +57,7 @@ module Sidekiq
|
|
56
57
|
def self.options
|
57
58
|
@options ||= DEFAULTS.dup
|
58
59
|
end
|
60
|
+
|
59
61
|
def self.options=(opts)
|
60
62
|
@options = opts
|
61
63
|
end
|
@@ -94,8 +96,8 @@ module Sidekiq
|
|
94
96
|
begin
|
95
97
|
yield conn
|
96
98
|
rescue Redis::CommandError => ex
|
97
|
-
#2550 Failover can cause the server to become a
|
98
|
-
# to disconnect and reopen the socket to get back to the
|
99
|
+
#2550 Failover can cause the server to become a replica, need
|
100
|
+
# to disconnect and reopen the socket to get back to the primary.
|
99
101
|
(conn.disconnect!; retryable = false; retry) if retryable && ex.message =~ /READONLY/
|
100
102
|
raise
|
101
103
|
end
|
@@ -145,32 +147,34 @@ module Sidekiq
|
|
145
147
|
end
|
146
148
|
|
147
149
|
def self.default_server_middleware
|
148
|
-
|
149
|
-
require 'sidekiq/middleware/server/logging'
|
150
|
-
|
151
|
-
Middleware::Chain.new do |m|
|
152
|
-
m.add Middleware::Server::RetryJobs
|
153
|
-
m.add Middleware::Server::Logging
|
154
|
-
end
|
150
|
+
Middleware::Chain.new
|
155
151
|
end
|
156
152
|
|
157
153
|
def self.default_worker_options=(hash)
|
158
|
-
|
154
|
+
# stringify
|
155
|
+
@default_worker_options = default_worker_options.merge(Hash[hash.map{|k, v| [k.to_s, v]}])
|
159
156
|
end
|
160
157
|
def self.default_worker_options
|
161
158
|
defined?(@default_worker_options) ? @default_worker_options : DEFAULT_WORKER_OPTIONS
|
162
159
|
end
|
163
160
|
|
161
|
+
def self.default_retries_exhausted=(prok)
|
162
|
+
logger.info { "default_retries_exhausted is deprecated, please use `config.death_handlers << -> {|job, ex| }`" }
|
163
|
+
return nil unless prok
|
164
|
+
death_handlers << prok
|
165
|
+
end
|
166
|
+
|
167
|
+
##
|
168
|
+
# Death handlers are called when all retries for a job have been exhausted and
|
169
|
+
# the job dies. It's the notification to your application
|
170
|
+
# that this job will not succeed without manual intervention.
|
171
|
+
#
|
164
172
|
# Sidekiq.configure_server do |config|
|
165
|
-
# config.
|
173
|
+
# config.death_handlers << ->(job, ex) do
|
166
174
|
# end
|
167
175
|
# end
|
168
|
-
def self.
|
169
|
-
|
170
|
-
end
|
171
|
-
@default_retries_exhausted = ->(job, ex) { }
|
172
|
-
def self.default_retries_exhausted
|
173
|
-
@default_retries_exhausted
|
176
|
+
def self.death_handlers
|
177
|
+
options[:death_handlers]
|
174
178
|
end
|
175
179
|
|
176
180
|
def self.load_json(string)
|
@@ -228,10 +232,6 @@ module Sidekiq
|
|
228
232
|
# otherwise Ruby's Thread#kill will commit. See #377.
|
229
233
|
# DO NOT RESCUE THIS ERROR IN YOUR WORKERS
|
230
234
|
class Shutdown < Interrupt; end
|
231
|
-
|
232
235
|
end
|
233
236
|
|
234
|
-
require 'sidekiq/extensions/class_methods'
|
235
|
-
require 'sidekiq/extensions/action_mailer'
|
236
|
-
require 'sidekiq/extensions/active_record'
|
237
237
|
require 'sidekiq/rails' if defined?(::Rails::Engine)
|
data/lib/sidekiq/api.rb
CHANGED
@@ -1,9 +1,24 @@
|
|
1
|
-
# encoding: utf-8
|
2
1
|
# frozen_string_literal: true
|
3
2
|
require 'sidekiq'
|
4
3
|
|
5
4
|
module Sidekiq
|
5
|
+
|
6
|
+
module RedisScanner
|
7
|
+
def sscan(conn, key)
|
8
|
+
cursor = '0'
|
9
|
+
result = []
|
10
|
+
loop do
|
11
|
+
cursor, values = conn.sscan(key, cursor)
|
12
|
+
result.push(*values)
|
13
|
+
break if cursor == '0'
|
14
|
+
end
|
15
|
+
result
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
6
19
|
class Stats
|
20
|
+
include RedisScanner
|
21
|
+
|
7
22
|
def initialize
|
8
23
|
fetch_stats!
|
9
24
|
end
|
@@ -51,33 +66,39 @@ module Sidekiq
|
|
51
66
|
def fetch_stats!
|
52
67
|
pipe1_res = Sidekiq.redis do |conn|
|
53
68
|
conn.pipelined do
|
54
|
-
conn.get('stat:processed'
|
55
|
-
conn.get('stat:failed'
|
56
|
-
conn.zcard('schedule'
|
57
|
-
conn.zcard('retry'
|
58
|
-
conn.zcard('dead'
|
59
|
-
conn.scard('processes'
|
60
|
-
conn.lrange('queue:default'
|
61
|
-
conn.smembers('processes'.freeze)
|
62
|
-
conn.smembers('queues'.freeze)
|
69
|
+
conn.get('stat:processed')
|
70
|
+
conn.get('stat:failed')
|
71
|
+
conn.zcard('schedule')
|
72
|
+
conn.zcard('retry')
|
73
|
+
conn.zcard('dead')
|
74
|
+
conn.scard('processes')
|
75
|
+
conn.lrange('queue:default', -1, -1)
|
63
76
|
end
|
64
77
|
end
|
65
78
|
|
79
|
+
processes = Sidekiq.redis do |conn|
|
80
|
+
sscan(conn, 'processes')
|
81
|
+
end
|
82
|
+
|
83
|
+
queues = Sidekiq.redis do |conn|
|
84
|
+
sscan(conn, 'queues')
|
85
|
+
end
|
86
|
+
|
66
87
|
pipe2_res = Sidekiq.redis do |conn|
|
67
88
|
conn.pipelined do
|
68
|
-
|
69
|
-
|
89
|
+
processes.each {|key| conn.hget(key, 'busy') }
|
90
|
+
queues.each {|queue| conn.llen("queue:#{queue}") }
|
70
91
|
end
|
71
92
|
end
|
72
93
|
|
73
|
-
s =
|
94
|
+
s = processes.size
|
74
95
|
workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
|
75
96
|
enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
|
76
97
|
|
77
98
|
default_queue_latency = if (entry = pipe1_res[6].first)
|
78
|
-
job = Sidekiq.load_json(entry)
|
99
|
+
job = Sidekiq.load_json(entry) rescue {}
|
79
100
|
now = Time.now.to_f
|
80
|
-
thence = job['enqueued_at'
|
101
|
+
thence = job['enqueued_at'] || now
|
81
102
|
now - thence
|
82
103
|
else
|
83
104
|
0
|
@@ -117,9 +138,11 @@ module Sidekiq
|
|
117
138
|
end
|
118
139
|
|
119
140
|
class Queues
|
141
|
+
include RedisScanner
|
142
|
+
|
120
143
|
def lengths
|
121
144
|
Sidekiq.redis do |conn|
|
122
|
-
queues = conn
|
145
|
+
queues = sscan(conn, 'queues')
|
123
146
|
|
124
147
|
lengths = conn.pipelined do
|
125
148
|
queues.each do |queue|
|
@@ -146,11 +169,11 @@ module Sidekiq
|
|
146
169
|
end
|
147
170
|
|
148
171
|
def processed
|
149
|
-
date_stat_hash("processed")
|
172
|
+
@processed ||= date_stat_hash("processed")
|
150
173
|
end
|
151
174
|
|
152
175
|
def failed
|
153
|
-
date_stat_hash("failed")
|
176
|
+
@failed ||= date_stat_hash("failed")
|
154
177
|
end
|
155
178
|
|
156
179
|
private
|
@@ -163,16 +186,21 @@ module Sidekiq
|
|
163
186
|
|
164
187
|
while i < @days_previous
|
165
188
|
date = @start_date - i
|
166
|
-
datestr = date.strftime("%Y-%m-%d"
|
189
|
+
datestr = date.strftime("%Y-%m-%d")
|
167
190
|
keys << "stat:#{stat}:#{datestr}"
|
168
191
|
dates << datestr
|
169
192
|
i += 1
|
170
193
|
end
|
171
194
|
|
172
|
-
|
173
|
-
|
174
|
-
|
195
|
+
begin
|
196
|
+
Sidekiq.redis do |conn|
|
197
|
+
conn.mget(keys).each_with_index do |value, idx|
|
198
|
+
stat_hash[dates[idx]] = value ? value.to_i : 0
|
199
|
+
end
|
175
200
|
end
|
201
|
+
rescue Redis::CommandError
|
202
|
+
# mget will trigger a CROSSSLOT error when run against a Cluster
|
203
|
+
# TODO Someone want to add Cluster support?
|
176
204
|
end
|
177
205
|
|
178
206
|
stat_hash
|
@@ -194,18 +222,19 @@ module Sidekiq
|
|
194
222
|
#
|
195
223
|
class Queue
|
196
224
|
include Enumerable
|
225
|
+
extend RedisScanner
|
197
226
|
|
198
227
|
##
|
199
228
|
# Return all known queues within Redis.
|
200
229
|
#
|
201
230
|
def self.all
|
202
|
-
Sidekiq.redis { |c| c
|
231
|
+
Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
|
203
232
|
end
|
204
233
|
|
205
234
|
attr_reader :name
|
206
235
|
|
207
236
|
def initialize(name="default")
|
208
|
-
@name = name
|
237
|
+
@name = name.to_s
|
209
238
|
@rname = "queue:#{name}"
|
210
239
|
end
|
211
240
|
|
@@ -268,7 +297,7 @@ module Sidekiq
|
|
268
297
|
Sidekiq.redis do |conn|
|
269
298
|
conn.multi do
|
270
299
|
conn.del(@rname)
|
271
|
-
conn.srem("queues"
|
300
|
+
conn.srem("queues", name)
|
272
301
|
end
|
273
302
|
end
|
274
303
|
end
|
@@ -287,13 +316,25 @@ module Sidekiq
|
|
287
316
|
attr_reader :value
|
288
317
|
|
289
318
|
def initialize(item, queue_name=nil)
|
319
|
+
@args = nil
|
290
320
|
@value = item
|
291
|
-
@item = item.is_a?(Hash) ? item :
|
321
|
+
@item = item.is_a?(Hash) ? item : parse(item)
|
292
322
|
@queue = queue_name || @item['queue']
|
293
323
|
end
|
294
324
|
|
325
|
+
def parse(item)
|
326
|
+
Sidekiq.load_json(item)
|
327
|
+
rescue JSON::ParserError
|
328
|
+
# If the job payload in Redis is invalid JSON, we'll load
|
329
|
+
# the item as an empty hash and store the invalid JSON as
|
330
|
+
# the job 'args' for display in the Web UI.
|
331
|
+
@invalid = true
|
332
|
+
@args = [item]
|
333
|
+
{}
|
334
|
+
end
|
335
|
+
|
295
336
|
def klass
|
296
|
-
|
337
|
+
self['class']
|
297
338
|
end
|
298
339
|
|
299
340
|
def display_class
|
@@ -318,38 +359,42 @@ module Sidekiq
|
|
318
359
|
|
319
360
|
def display_args
|
320
361
|
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
|
321
|
-
@
|
362
|
+
@display_args ||= case klass
|
322
363
|
when /\ASidekiq::Extensions::Delayed/
|
323
364
|
safe_load(args[0], args) do |_, _, arg|
|
324
365
|
arg
|
325
366
|
end
|
326
367
|
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
327
|
-
job_args =
|
328
|
-
if 'ActionMailer::DeliveryJob' == (
|
329
|
-
|
330
|
-
|
368
|
+
job_args = self['wrapped'] ? args[0]["arguments"] : []
|
369
|
+
if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
|
370
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
371
|
+
job_args.drop(3)
|
331
372
|
else
|
332
|
-
|
373
|
+
job_args
|
333
374
|
end
|
334
375
|
else
|
376
|
+
if self['encrypt']
|
377
|
+
# no point in showing 150+ bytes of random garbage
|
378
|
+
args[-1] = '[encrypted data]'
|
379
|
+
end
|
335
380
|
args
|
336
381
|
end
|
337
382
|
end
|
338
383
|
|
339
384
|
def args
|
340
|
-
@item['args']
|
385
|
+
@args || @item['args']
|
341
386
|
end
|
342
387
|
|
343
388
|
def jid
|
344
|
-
|
389
|
+
self['jid']
|
345
390
|
end
|
346
391
|
|
347
392
|
def enqueued_at
|
348
|
-
|
393
|
+
self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
|
349
394
|
end
|
350
395
|
|
351
396
|
def created_at
|
352
|
-
Time.at(
|
397
|
+
Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
|
353
398
|
end
|
354
399
|
|
355
400
|
def queue
|
@@ -371,7 +416,10 @@ module Sidekiq
|
|
371
416
|
end
|
372
417
|
|
373
418
|
def [](name)
|
374
|
-
|
419
|
+
# nil will happen if the JSON fails to parse.
|
420
|
+
# We don't guarantee Sidekiq will work with bad job JSON but we should
|
421
|
+
# make a best effort to minimize the damage.
|
422
|
+
@item ? @item[name] : nil
|
375
423
|
end
|
376
424
|
|
377
425
|
private
|
@@ -434,14 +482,7 @@ module Sidekiq
|
|
434
482
|
# Place job in the dead set
|
435
483
|
def kill
|
436
484
|
remove_job do |message|
|
437
|
-
|
438
|
-
Sidekiq.redis do |conn|
|
439
|
-
conn.multi do
|
440
|
-
conn.zadd('dead', now, message)
|
441
|
-
conn.zremrangebyscore('dead', '-inf', now - DeadSet.timeout)
|
442
|
-
conn.zremrangebyrank('dead', 0, - DeadSet.max_jobs)
|
443
|
-
end
|
444
|
-
end
|
485
|
+
DeadSet.new.kill(message)
|
445
486
|
end
|
446
487
|
end
|
447
488
|
|
@@ -531,7 +572,7 @@ module Sidekiq
|
|
531
572
|
end
|
532
573
|
break if elements.empty?
|
533
574
|
page -= 1
|
534
|
-
elements.each do |element, score|
|
575
|
+
elements.reverse.each do |element, score|
|
535
576
|
yield SortedEntry.new(self, score, element)
|
536
577
|
end
|
537
578
|
offset_size = initial_size - @_size
|
@@ -629,6 +670,12 @@ module Sidekiq
|
|
629
670
|
each(&:retry)
|
630
671
|
end
|
631
672
|
end
|
673
|
+
|
674
|
+
def kill_all
|
675
|
+
while size > 0
|
676
|
+
each(&:kill)
|
677
|
+
end
|
678
|
+
end
|
632
679
|
end
|
633
680
|
|
634
681
|
##
|
@@ -639,6 +686,27 @@ module Sidekiq
|
|
639
686
|
super 'dead'
|
640
687
|
end
|
641
688
|
|
689
|
+
def kill(message, opts={})
|
690
|
+
now = Time.now.to_f
|
691
|
+
Sidekiq.redis do |conn|
|
692
|
+
conn.multi do
|
693
|
+
conn.zadd(name, now.to_s, message)
|
694
|
+
conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
|
695
|
+
conn.zremrangebyrank(name, 0, - self.class.max_jobs)
|
696
|
+
end
|
697
|
+
end
|
698
|
+
|
699
|
+
if opts[:notify_failure] != false
|
700
|
+
job = Sidekiq.load_json(message)
|
701
|
+
r = RuntimeError.new("Job killed by API")
|
702
|
+
r.set_backtrace(caller)
|
703
|
+
Sidekiq.death_handlers.each do |handle|
|
704
|
+
handle.call(job, r)
|
705
|
+
end
|
706
|
+
end
|
707
|
+
true
|
708
|
+
end
|
709
|
+
|
642
710
|
def retry_all
|
643
711
|
while size > 0
|
644
712
|
each(&:retry)
|
@@ -663,17 +731,18 @@ module Sidekiq
|
|
663
731
|
#
|
664
732
|
class ProcessSet
|
665
733
|
include Enumerable
|
734
|
+
include RedisScanner
|
666
735
|
|
667
736
|
def initialize(clean_plz=true)
|
668
|
-
|
737
|
+
cleanup if clean_plz
|
669
738
|
end
|
670
739
|
|
671
740
|
# Cleans up dead processes recorded in Redis.
|
672
741
|
# Returns the number of processes cleaned.
|
673
|
-
def
|
742
|
+
def cleanup
|
674
743
|
count = 0
|
675
744
|
Sidekiq.redis do |conn|
|
676
|
-
procs = conn
|
745
|
+
procs = sscan(conn, 'processes').sort
|
677
746
|
heartbeats = conn.pipelined do
|
678
747
|
procs.each do |key|
|
679
748
|
conn.hget(key, 'info')
|
@@ -693,7 +762,7 @@ module Sidekiq
|
|
693
762
|
end
|
694
763
|
|
695
764
|
def each
|
696
|
-
procs = Sidekiq.redis { |conn| conn
|
765
|
+
procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
|
697
766
|
|
698
767
|
Sidekiq.redis do |conn|
|
699
768
|
# We're making a tradeoff here between consuming more memory instead of
|
@@ -706,6 +775,11 @@ module Sidekiq
|
|
706
775
|
end
|
707
776
|
|
708
777
|
result.each do |info, busy, at_s, quiet|
|
778
|
+
# If a process is stopped between when we query Redis for `procs` and
|
779
|
+
# when we query for `result`, we will have an item in `result` that is
|
780
|
+
# composed of `nil` values.
|
781
|
+
next if info.nil?
|
782
|
+
|
709
783
|
hash = Sidekiq.load_json(info)
|
710
784
|
yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
|
711
785
|
end
|
@@ -721,6 +795,18 @@ module Sidekiq
|
|
721
795
|
def size
|
722
796
|
Sidekiq.redis { |conn| conn.scard('processes') }
|
723
797
|
end
|
798
|
+
|
799
|
+
# Returns the identity of the current cluster leader or "" if no leader.
|
800
|
+
# This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
|
801
|
+
# or Sidekiq Pro.
|
802
|
+
def leader
|
803
|
+
@leader ||= begin
|
804
|
+
x = Sidekiq.redis {|c| c.get("dear-leader") }
|
805
|
+
# need a non-falsy value so we can memoize
|
806
|
+
x = "" unless x
|
807
|
+
x
|
808
|
+
end
|
809
|
+
end
|
724
810
|
end
|
725
811
|
|
726
812
|
#
|
@@ -755,8 +841,12 @@ module Sidekiq
|
|
755
841
|
@attribs[key]
|
756
842
|
end
|
757
843
|
|
844
|
+
def identity
|
845
|
+
self['identity']
|
846
|
+
end
|
847
|
+
|
758
848
|
def quiet!
|
759
|
-
signal('
|
849
|
+
signal('TSTP')
|
760
850
|
end
|
761
851
|
|
762
852
|
def stop!
|
@@ -783,9 +873,6 @@ module Sidekiq
|
|
783
873
|
end
|
784
874
|
end
|
785
875
|
|
786
|
-
def identity
|
787
|
-
self['identity']
|
788
|
-
end
|
789
876
|
end
|
790
877
|
|
791
878
|
##
|
@@ -810,10 +897,11 @@ module Sidekiq
|
|
810
897
|
#
|
811
898
|
class Workers
|
812
899
|
include Enumerable
|
900
|
+
include RedisScanner
|
813
901
|
|
814
902
|
def each
|
815
903
|
Sidekiq.redis do |conn|
|
816
|
-
procs = conn
|
904
|
+
procs = sscan(conn, 'processes')
|
817
905
|
procs.sort.each do |key|
|
818
906
|
valid, workers = conn.pipelined do
|
819
907
|
conn.exists(key)
|
@@ -835,7 +923,7 @@ module Sidekiq
|
|
835
923
|
# which can easily get out of sync with crashy processes.
|
836
924
|
def size
|
837
925
|
Sidekiq.redis do |conn|
|
838
|
-
procs = conn
|
926
|
+
procs = sscan(conn, 'processes')
|
839
927
|
if procs.empty?
|
840
928
|
0
|
841
929
|
else
|