sidekiq 4.2.4 → 5.2.10
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of sidekiq might be problematic. Click here for more details.
- checksums.yaml +5 -5
- data/.circleci/config.yml +61 -0
- data/.github/issue_template.md +8 -1
- data/.gitignore +3 -0
- data/.travis.yml +5 -6
- data/5.0-Upgrade.md +56 -0
- data/COMM-LICENSE +12 -10
- data/Changes.md +220 -0
- data/Ent-Changes.md +94 -2
- data/Gemfile +12 -22
- data/LICENSE +1 -1
- data/Pro-4.0-Upgrade.md +35 -0
- data/Pro-Changes.md +176 -2
- data/README.md +10 -7
- data/Rakefile +3 -3
- data/bin/sidekiqctl +13 -92
- data/bin/sidekiqload +16 -34
- data/lib/generators/sidekiq/templates/worker_spec.rb.erb +1 -1
- data/lib/generators/sidekiq/templates/worker_test.rb.erb +1 -1
- data/lib/sidekiq/api.rb +166 -68
- data/lib/sidekiq/cli.rb +122 -77
- data/lib/sidekiq/client.rb +25 -18
- data/lib/sidekiq/core_ext.rb +1 -106
- data/lib/sidekiq/ctl.rb +221 -0
- data/lib/sidekiq/delay.rb +42 -0
- data/lib/sidekiq/exception_handler.rb +2 -4
- data/lib/sidekiq/extensions/generic_proxy.rb +7 -1
- data/lib/sidekiq/fetch.rb +1 -1
- data/lib/sidekiq/job_logger.rb +25 -0
- data/lib/sidekiq/job_retry.rb +262 -0
- data/lib/sidekiq/launcher.rb +49 -40
- data/lib/sidekiq/logging.rb +18 -2
- data/lib/sidekiq/manager.rb +6 -7
- data/lib/sidekiq/middleware/server/active_record.rb +10 -0
- data/lib/sidekiq/processor.rb +127 -37
- data/lib/sidekiq/rails.rb +16 -51
- data/lib/sidekiq/redis_connection.rb +50 -5
- data/lib/sidekiq/scheduled.rb +35 -8
- data/lib/sidekiq/testing.rb +24 -7
- data/lib/sidekiq/util.rb +6 -2
- data/lib/sidekiq/version.rb +1 -1
- data/lib/sidekiq/web/action.rb +3 -7
- data/lib/sidekiq/web/application.rb +38 -22
- data/lib/sidekiq/web/helpers.rb +78 -27
- data/lib/sidekiq/web/router.rb +14 -10
- data/lib/sidekiq/web.rb +4 -4
- data/lib/sidekiq/worker.rb +118 -19
- data/lib/sidekiq.rb +27 -26
- data/sidekiq.gemspec +8 -13
- data/web/assets/javascripts/application.js +0 -0
- data/web/assets/javascripts/dashboard.js +33 -18
- data/web/assets/stylesheets/application-rtl.css +246 -0
- data/web/assets/stylesheets/application.css +371 -6
- data/web/assets/stylesheets/bootstrap-rtl.min.css +9 -0
- data/web/assets/stylesheets/bootstrap.css +2 -2
- data/web/locales/ar.yml +81 -0
- data/web/locales/en.yml +2 -0
- data/web/locales/es.yml +4 -3
- data/web/locales/fa.yml +80 -0
- data/web/locales/he.yml +79 -0
- data/web/locales/ja.yml +5 -3
- data/web/locales/ur.yml +80 -0
- data/web/views/_footer.erb +5 -2
- data/web/views/_job_info.erb +1 -1
- data/web/views/_nav.erb +4 -18
- data/web/views/_paging.erb +1 -1
- data/web/views/busy.erb +9 -5
- data/web/views/dashboard.erb +3 -3
- data/web/views/layout.erb +11 -2
- data/web/views/morgue.erb +14 -10
- data/web/views/queue.erb +11 -10
- data/web/views/queues.erb +4 -2
- data/web/views/retries.erb +17 -11
- data/web/views/retry.erb +1 -1
- data/web/views/scheduled.erb +2 -2
- metadata +32 -151
- data/lib/sidekiq/middleware/server/logging.rb +0 -40
- data/lib/sidekiq/middleware/server/retry_jobs.rb +0 -205
- data/test/config.yml +0 -9
- data/test/env_based_config.yml +0 -11
- data/test/fake_env.rb +0 -1
- data/test/fixtures/en.yml +0 -2
- data/test/helper.rb +0 -75
- data/test/test_actors.rb +0 -138
- data/test/test_api.rb +0 -528
- data/test/test_cli.rb +0 -418
- data/test/test_client.rb +0 -266
- data/test/test_exception_handler.rb +0 -56
- data/test/test_extensions.rb +0 -127
- data/test/test_fetch.rb +0 -50
- data/test/test_launcher.rb +0 -95
- data/test/test_logging.rb +0 -35
- data/test/test_manager.rb +0 -50
- data/test/test_middleware.rb +0 -158
- data/test/test_processor.rb +0 -235
- data/test/test_rails.rb +0 -22
- data/test/test_redis_connection.rb +0 -132
- data/test/test_retry.rb +0 -326
- data/test/test_retry_exhausted.rb +0 -149
- data/test/test_scheduled.rb +0 -115
- data/test/test_scheduling.rb +0 -58
- data/test/test_sidekiq.rb +0 -107
- data/test/test_testing.rb +0 -143
- data/test/test_testing_fake.rb +0 -357
- data/test/test_testing_inline.rb +0 -94
- data/test/test_util.rb +0 -13
- data/test/test_web.rb +0 -726
- data/test/test_web_helpers.rb +0 -54
data/lib/sidekiq/api.rb
CHANGED
@@ -1,9 +1,24 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
|
-
# encoding: utf-8
|
3
2
|
require 'sidekiq'
|
4
3
|
|
5
4
|
module Sidekiq
|
5
|
+
|
6
|
+
module RedisScanner
|
7
|
+
def sscan(conn, key)
|
8
|
+
cursor = '0'
|
9
|
+
result = []
|
10
|
+
loop do
|
11
|
+
cursor, values = conn.sscan(key, cursor)
|
12
|
+
result.push(*values)
|
13
|
+
break if cursor == '0'
|
14
|
+
end
|
15
|
+
result
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
6
19
|
class Stats
|
20
|
+
include RedisScanner
|
21
|
+
|
7
22
|
def initialize
|
8
23
|
fetch_stats!
|
9
24
|
end
|
@@ -51,31 +66,40 @@ module Sidekiq
|
|
51
66
|
def fetch_stats!
|
52
67
|
pipe1_res = Sidekiq.redis do |conn|
|
53
68
|
conn.pipelined do
|
54
|
-
conn.get('stat:processed'
|
55
|
-
conn.get('stat:failed'
|
56
|
-
conn.zcard('schedule'
|
57
|
-
conn.zcard('retry'
|
58
|
-
conn.zcard('dead'
|
59
|
-
conn.scard('processes'
|
60
|
-
conn.lrange('queue:default'
|
61
|
-
conn.smembers('processes'.freeze)
|
62
|
-
conn.smembers('queues'.freeze)
|
69
|
+
conn.get('stat:processed')
|
70
|
+
conn.get('stat:failed')
|
71
|
+
conn.zcard('schedule')
|
72
|
+
conn.zcard('retry')
|
73
|
+
conn.zcard('dead')
|
74
|
+
conn.scard('processes')
|
75
|
+
conn.lrange('queue:default', -1, -1)
|
63
76
|
end
|
64
77
|
end
|
65
78
|
|
79
|
+
processes = Sidekiq.redis do |conn|
|
80
|
+
sscan(conn, 'processes')
|
81
|
+
end
|
82
|
+
|
83
|
+
queues = Sidekiq.redis do |conn|
|
84
|
+
sscan(conn, 'queues')
|
85
|
+
end
|
86
|
+
|
66
87
|
pipe2_res = Sidekiq.redis do |conn|
|
67
88
|
conn.pipelined do
|
68
|
-
|
69
|
-
|
89
|
+
processes.each {|key| conn.hget(key, 'busy') }
|
90
|
+
queues.each {|queue| conn.llen("queue:#{queue}") }
|
70
91
|
end
|
71
92
|
end
|
72
93
|
|
73
|
-
s =
|
94
|
+
s = processes.size
|
74
95
|
workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
|
75
96
|
enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
|
76
97
|
|
77
98
|
default_queue_latency = if (entry = pipe1_res[6].first)
|
78
|
-
|
99
|
+
job = Sidekiq.load_json(entry) rescue {}
|
100
|
+
now = Time.now.to_f
|
101
|
+
thence = job['enqueued_at'] || now
|
102
|
+
now - thence
|
79
103
|
else
|
80
104
|
0
|
81
105
|
end
|
@@ -114,9 +138,11 @@ module Sidekiq
|
|
114
138
|
end
|
115
139
|
|
116
140
|
class Queues
|
141
|
+
include RedisScanner
|
142
|
+
|
117
143
|
def lengths
|
118
144
|
Sidekiq.redis do |conn|
|
119
|
-
queues = conn
|
145
|
+
queues = sscan(conn, 'queues')
|
120
146
|
|
121
147
|
lengths = conn.pipelined do
|
122
148
|
queues.each do |queue|
|
@@ -138,16 +164,18 @@ module Sidekiq
|
|
138
164
|
|
139
165
|
class History
|
140
166
|
def initialize(days_previous, start_date = nil)
|
167
|
+
#we only store five years of data in Redis
|
168
|
+
raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
|
141
169
|
@days_previous = days_previous
|
142
170
|
@start_date = start_date || Time.now.utc.to_date
|
143
171
|
end
|
144
172
|
|
145
173
|
def processed
|
146
|
-
date_stat_hash("processed")
|
174
|
+
@processed ||= date_stat_hash("processed")
|
147
175
|
end
|
148
176
|
|
149
177
|
def failed
|
150
|
-
date_stat_hash("failed")
|
178
|
+
@failed ||= date_stat_hash("failed")
|
151
179
|
end
|
152
180
|
|
153
181
|
private
|
@@ -160,16 +188,21 @@ module Sidekiq
|
|
160
188
|
|
161
189
|
while i < @days_previous
|
162
190
|
date = @start_date - i
|
163
|
-
datestr = date.strftime("%Y-%m-%d"
|
191
|
+
datestr = date.strftime("%Y-%m-%d")
|
164
192
|
keys << "stat:#{stat}:#{datestr}"
|
165
193
|
dates << datestr
|
166
194
|
i += 1
|
167
195
|
end
|
168
196
|
|
169
|
-
|
170
|
-
|
171
|
-
|
197
|
+
begin
|
198
|
+
Sidekiq.redis do |conn|
|
199
|
+
conn.mget(keys).each_with_index do |value, idx|
|
200
|
+
stat_hash[dates[idx]] = value ? value.to_i : 0
|
201
|
+
end
|
172
202
|
end
|
203
|
+
rescue Redis::CommandError
|
204
|
+
# mget will trigger a CROSSSLOT error when run against a Cluster
|
205
|
+
# TODO Someone want to add Cluster support?
|
173
206
|
end
|
174
207
|
|
175
208
|
stat_hash
|
@@ -191,18 +224,19 @@ module Sidekiq
|
|
191
224
|
#
|
192
225
|
class Queue
|
193
226
|
include Enumerable
|
227
|
+
extend RedisScanner
|
194
228
|
|
195
229
|
##
|
196
230
|
# Return all known queues within Redis.
|
197
231
|
#
|
198
232
|
def self.all
|
199
|
-
Sidekiq.redis { |c| c
|
233
|
+
Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
|
200
234
|
end
|
201
235
|
|
202
236
|
attr_reader :name
|
203
237
|
|
204
238
|
def initialize(name="default")
|
205
|
-
@name = name
|
239
|
+
@name = name.to_s
|
206
240
|
@rname = "queue:#{name}"
|
207
241
|
end
|
208
242
|
|
@@ -225,7 +259,10 @@ module Sidekiq
|
|
225
259
|
conn.lrange(@rname, -1, -1)
|
226
260
|
end.first
|
227
261
|
return 0 unless entry
|
228
|
-
|
262
|
+
job = Sidekiq.load_json(entry)
|
263
|
+
now = Time.now.to_f
|
264
|
+
thence = job['enqueued_at'] || now
|
265
|
+
now - thence
|
229
266
|
end
|
230
267
|
|
231
268
|
def each
|
@@ -262,7 +299,7 @@ module Sidekiq
|
|
262
299
|
Sidekiq.redis do |conn|
|
263
300
|
conn.multi do
|
264
301
|
conn.del(@rname)
|
265
|
-
conn.srem("queues"
|
302
|
+
conn.srem("queues", name)
|
266
303
|
end
|
267
304
|
end
|
268
305
|
end
|
@@ -281,13 +318,25 @@ module Sidekiq
|
|
281
318
|
attr_reader :value
|
282
319
|
|
283
320
|
def initialize(item, queue_name=nil)
|
321
|
+
@args = nil
|
284
322
|
@value = item
|
285
|
-
@item = item.is_a?(Hash) ? item :
|
323
|
+
@item = item.is_a?(Hash) ? item : parse(item)
|
286
324
|
@queue = queue_name || @item['queue']
|
287
325
|
end
|
288
326
|
|
327
|
+
def parse(item)
|
328
|
+
Sidekiq.load_json(item)
|
329
|
+
rescue JSON::ParserError
|
330
|
+
# If the job payload in Redis is invalid JSON, we'll load
|
331
|
+
# the item as an empty hash and store the invalid JSON as
|
332
|
+
# the job 'args' for display in the Web UI.
|
333
|
+
@invalid = true
|
334
|
+
@args = [item]
|
335
|
+
{}
|
336
|
+
end
|
337
|
+
|
289
338
|
def klass
|
290
|
-
|
339
|
+
self['class']
|
291
340
|
end
|
292
341
|
|
293
342
|
def display_class
|
@@ -312,38 +361,42 @@ module Sidekiq
|
|
312
361
|
|
313
362
|
def display_args
|
314
363
|
# Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
|
315
|
-
@
|
364
|
+
@display_args ||= case klass
|
316
365
|
when /\ASidekiq::Extensions::Delayed/
|
317
366
|
safe_load(args[0], args) do |_, _, arg|
|
318
367
|
arg
|
319
368
|
end
|
320
369
|
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
321
|
-
job_args =
|
322
|
-
if 'ActionMailer::DeliveryJob' == (
|
323
|
-
|
324
|
-
|
370
|
+
job_args = self['wrapped'] ? args[0]["arguments"] : []
|
371
|
+
if 'ActionMailer::DeliveryJob' == (self['wrapped'] || args[0])
|
372
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
373
|
+
job_args.drop(3)
|
325
374
|
else
|
326
|
-
|
375
|
+
job_args
|
327
376
|
end
|
328
377
|
else
|
378
|
+
if self['encrypt']
|
379
|
+
# no point in showing 150+ bytes of random garbage
|
380
|
+
args[-1] = '[encrypted data]'
|
381
|
+
end
|
329
382
|
args
|
330
383
|
end
|
331
384
|
end
|
332
385
|
|
333
386
|
def args
|
334
|
-
@item['args']
|
387
|
+
@args || @item['args']
|
335
388
|
end
|
336
389
|
|
337
390
|
def jid
|
338
|
-
|
391
|
+
self['jid']
|
339
392
|
end
|
340
393
|
|
341
394
|
def enqueued_at
|
342
|
-
|
395
|
+
self['enqueued_at'] ? Time.at(self['enqueued_at']).utc : nil
|
343
396
|
end
|
344
397
|
|
345
398
|
def created_at
|
346
|
-
Time.at(
|
399
|
+
Time.at(self['created_at'] || self['enqueued_at'] || 0).utc
|
347
400
|
end
|
348
401
|
|
349
402
|
def queue
|
@@ -351,7 +404,8 @@ module Sidekiq
|
|
351
404
|
end
|
352
405
|
|
353
406
|
def latency
|
354
|
-
Time.now.to_f
|
407
|
+
now = Time.now.to_f
|
408
|
+
now - (@item['enqueued_at'] || @item['created_at'] || now)
|
355
409
|
end
|
356
410
|
|
357
411
|
##
|
@@ -364,7 +418,10 @@ module Sidekiq
|
|
364
418
|
end
|
365
419
|
|
366
420
|
def [](name)
|
367
|
-
|
421
|
+
# nil will happen if the JSON fails to parse.
|
422
|
+
# We don't guarantee Sidekiq will work with bad job JSON but we should
|
423
|
+
# make a best effort to minimize the damage.
|
424
|
+
@item ? @item[name] : nil
|
368
425
|
end
|
369
426
|
|
370
427
|
private
|
@@ -416,10 +473,9 @@ module Sidekiq
|
|
416
473
|
end
|
417
474
|
|
418
475
|
def retry
|
419
|
-
raise "Retry not available on jobs which have not failed" unless item["failed_at"]
|
420
476
|
remove_job do |message|
|
421
477
|
msg = Sidekiq.load_json(message)
|
422
|
-
msg['retry_count'] -= 1
|
478
|
+
msg['retry_count'] -= 1 if msg['retry_count']
|
423
479
|
Sidekiq::Client.push(msg)
|
424
480
|
end
|
425
481
|
end
|
@@ -427,20 +483,15 @@ module Sidekiq
|
|
427
483
|
##
|
428
484
|
# Place job in the dead set
|
429
485
|
def kill
|
430
|
-
raise 'Kill not available on jobs which have not failed' unless item['failed_at']
|
431
486
|
remove_job do |message|
|
432
|
-
|
433
|
-
now = Time.now.to_f
|
434
|
-
Sidekiq.redis do |conn|
|
435
|
-
conn.multi do
|
436
|
-
conn.zadd('dead', now, message)
|
437
|
-
conn.zremrangebyscore('dead', '-inf', now - DeadSet.timeout)
|
438
|
-
conn.zremrangebyrank('dead', 0, - DeadSet.max_jobs)
|
439
|
-
end
|
440
|
-
end
|
487
|
+
DeadSet.new.kill(message)
|
441
488
|
end
|
442
489
|
end
|
443
490
|
|
491
|
+
def error?
|
492
|
+
!!item['error_class']
|
493
|
+
end
|
494
|
+
|
444
495
|
private
|
445
496
|
|
446
497
|
def remove_job
|
@@ -523,7 +574,7 @@ module Sidekiq
|
|
523
574
|
end
|
524
575
|
break if elements.empty?
|
525
576
|
page -= 1
|
526
|
-
elements.each do |element, score|
|
577
|
+
elements.reverse.each do |element, score|
|
527
578
|
yield SortedEntry.new(self, score, element)
|
528
579
|
end
|
529
580
|
offset_size = initial_size - @_size
|
@@ -585,13 +636,13 @@ module Sidekiq
|
|
585
636
|
# Allows enumeration of scheduled jobs within Sidekiq.
|
586
637
|
# Based on this, you can search/filter for jobs. Here's an
|
587
638
|
# example where I'm selecting all jobs of a certain type
|
588
|
-
# and deleting them from the
|
639
|
+
# and deleting them from the schedule queue.
|
589
640
|
#
|
590
641
|
# r = Sidekiq::ScheduledSet.new
|
591
|
-
# r.select do |
|
592
|
-
#
|
593
|
-
#
|
594
|
-
#
|
642
|
+
# r.select do |scheduled|
|
643
|
+
# scheduled.klass == 'Sidekiq::Extensions::DelayedClass' &&
|
644
|
+
# scheduled.args[0] == 'User' &&
|
645
|
+
# scheduled.args[1] == 'setup_new_subscriber'
|
595
646
|
# end.map(&:delete)
|
596
647
|
class ScheduledSet < JobSet
|
597
648
|
def initialize
|
@@ -621,6 +672,12 @@ module Sidekiq
|
|
621
672
|
each(&:retry)
|
622
673
|
end
|
623
674
|
end
|
675
|
+
|
676
|
+
def kill_all
|
677
|
+
while size > 0
|
678
|
+
each(&:kill)
|
679
|
+
end
|
680
|
+
end
|
624
681
|
end
|
625
682
|
|
626
683
|
##
|
@@ -631,6 +688,27 @@ module Sidekiq
|
|
631
688
|
super 'dead'
|
632
689
|
end
|
633
690
|
|
691
|
+
def kill(message, opts={})
|
692
|
+
now = Time.now.to_f
|
693
|
+
Sidekiq.redis do |conn|
|
694
|
+
conn.multi do
|
695
|
+
conn.zadd(name, now.to_s, message)
|
696
|
+
conn.zremrangebyscore(name, '-inf', now - self.class.timeout)
|
697
|
+
conn.zremrangebyrank(name, 0, - self.class.max_jobs)
|
698
|
+
end
|
699
|
+
end
|
700
|
+
|
701
|
+
if opts[:notify_failure] != false
|
702
|
+
job = Sidekiq.load_json(message)
|
703
|
+
r = RuntimeError.new("Job killed by API")
|
704
|
+
r.set_backtrace(caller)
|
705
|
+
Sidekiq.death_handlers.each do |handle|
|
706
|
+
handle.call(job, r)
|
707
|
+
end
|
708
|
+
end
|
709
|
+
true
|
710
|
+
end
|
711
|
+
|
634
712
|
def retry_all
|
635
713
|
while size > 0
|
636
714
|
each(&:retry)
|
@@ -655,17 +733,18 @@ module Sidekiq
|
|
655
733
|
#
|
656
734
|
class ProcessSet
|
657
735
|
include Enumerable
|
736
|
+
include RedisScanner
|
658
737
|
|
659
738
|
def initialize(clean_plz=true)
|
660
|
-
|
739
|
+
cleanup if clean_plz
|
661
740
|
end
|
662
741
|
|
663
742
|
# Cleans up dead processes recorded in Redis.
|
664
743
|
# Returns the number of processes cleaned.
|
665
|
-
def
|
744
|
+
def cleanup
|
666
745
|
count = 0
|
667
746
|
Sidekiq.redis do |conn|
|
668
|
-
procs = conn
|
747
|
+
procs = sscan(conn, 'processes').sort
|
669
748
|
heartbeats = conn.pipelined do
|
670
749
|
procs.each do |key|
|
671
750
|
conn.hget(key, 'info')
|
@@ -685,7 +764,7 @@ module Sidekiq
|
|
685
764
|
end
|
686
765
|
|
687
766
|
def each
|
688
|
-
procs = Sidekiq.redis { |conn| conn
|
767
|
+
procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
|
689
768
|
|
690
769
|
Sidekiq.redis do |conn|
|
691
770
|
# We're making a tradeoff here between consuming more memory instead of
|
@@ -698,6 +777,11 @@ module Sidekiq
|
|
698
777
|
end
|
699
778
|
|
700
779
|
result.each do |info, busy, at_s, quiet|
|
780
|
+
# If a process is stopped between when we query Redis for `procs` and
|
781
|
+
# when we query for `result`, we will have an item in `result` that is
|
782
|
+
# composed of `nil` values.
|
783
|
+
next if info.nil?
|
784
|
+
|
701
785
|
hash = Sidekiq.load_json(info)
|
702
786
|
yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f, 'quiet' => quiet))
|
703
787
|
end
|
@@ -713,6 +797,18 @@ module Sidekiq
|
|
713
797
|
def size
|
714
798
|
Sidekiq.redis { |conn| conn.scard('processes') }
|
715
799
|
end
|
800
|
+
|
801
|
+
# Returns the identity of the current cluster leader or "" if no leader.
|
802
|
+
# This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
|
803
|
+
# or Sidekiq Pro.
|
804
|
+
def leader
|
805
|
+
@leader ||= begin
|
806
|
+
x = Sidekiq.redis {|c| c.get("dear-leader") }
|
807
|
+
# need a non-falsy value so we can memoize
|
808
|
+
x = "" unless x
|
809
|
+
x
|
810
|
+
end
|
811
|
+
end
|
716
812
|
end
|
717
813
|
|
718
814
|
#
|
@@ -747,8 +843,12 @@ module Sidekiq
|
|
747
843
|
@attribs[key]
|
748
844
|
end
|
749
845
|
|
846
|
+
def identity
|
847
|
+
self['identity']
|
848
|
+
end
|
849
|
+
|
750
850
|
def quiet!
|
751
|
-
signal('
|
851
|
+
signal('TSTP')
|
752
852
|
end
|
753
853
|
|
754
854
|
def stop!
|
@@ -775,9 +875,6 @@ module Sidekiq
|
|
775
875
|
end
|
776
876
|
end
|
777
877
|
|
778
|
-
def identity
|
779
|
-
self['identity']
|
780
|
-
end
|
781
878
|
end
|
782
879
|
|
783
880
|
##
|
@@ -802,13 +899,14 @@ module Sidekiq
|
|
802
899
|
#
|
803
900
|
class Workers
|
804
901
|
include Enumerable
|
902
|
+
include RedisScanner
|
805
903
|
|
806
904
|
def each
|
807
905
|
Sidekiq.redis do |conn|
|
808
|
-
procs = conn
|
906
|
+
procs = sscan(conn, 'processes')
|
809
907
|
procs.sort.each do |key|
|
810
908
|
valid, workers = conn.pipelined do
|
811
|
-
conn.exists(key)
|
909
|
+
conn.exists?(key)
|
812
910
|
conn.hgetall("#{key}:workers")
|
813
911
|
end
|
814
912
|
next unless valid
|
@@ -827,7 +925,7 @@ module Sidekiq
|
|
827
925
|
# which can easily get out of sync with crashy processes.
|
828
926
|
def size
|
829
927
|
Sidekiq.redis do |conn|
|
830
|
-
procs = conn
|
928
|
+
procs = sscan(conn, 'processes')
|
831
929
|
if procs.empty?
|
832
930
|
0
|
833
931
|
else
|