sidekiq 6.0.0 → 6.0.2
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of sidekiq might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.circleci/config.yml +21 -0
- data/6.0-Upgrade.md +3 -1
- data/Changes.md +82 -1
- data/Ent-Changes.md +6 -0
- data/Gemfile.lock +3 -3
- data/Pro-Changes.md +9 -1
- data/README.md +3 -1
- data/bin/sidekiqload +8 -4
- data/bin/sidekiqmon +4 -5
- data/lib/generators/sidekiq/worker_generator.rb +10 -0
- data/lib/sidekiq/api.rb +104 -63
- data/lib/sidekiq/cli.rb +18 -16
- data/lib/sidekiq/client.rb +8 -2
- data/lib/sidekiq/fetch.rb +7 -7
- data/lib/sidekiq/job_logger.rb +11 -3
- data/lib/sidekiq/job_retry.rb +21 -8
- data/lib/sidekiq/launcher.rb +1 -3
- data/lib/sidekiq/logger.rb +107 -11
- data/lib/sidekiq/middleware/chain.rb +11 -2
- data/lib/sidekiq/monitor.rb +1 -16
- data/lib/sidekiq/paginator.rb +7 -2
- data/lib/sidekiq/processor.rb +17 -19
- data/lib/sidekiq/scheduled.rb +13 -12
- data/lib/sidekiq/testing.rb +12 -0
- data/lib/sidekiq/util.rb +0 -2
- data/lib/sidekiq/version.rb +1 -1
- data/lib/sidekiq/web/application.rb +8 -13
- data/lib/sidekiq/web/helpers.rb +22 -10
- data/lib/sidekiq/worker.rb +4 -4
- data/lib/sidekiq.rb +8 -0
- data/sidekiq.gemspec +1 -1
- data/web/assets/javascripts/dashboard.js +2 -2
- data/web/assets/stylesheets/application-dark.css +125 -0
- data/web/assets/stylesheets/application.css +9 -0
- data/web/locales/de.yml +14 -2
- data/web/views/_job_info.erb +2 -1
- data/web/views/busy.erb +4 -1
- data/web/views/dead.erb +2 -2
- data/web/views/layout.erb +1 -0
- data/web/views/morgue.erb +4 -1
- data/web/views/queue.erb +10 -1
- data/web/views/retries.erb +4 -1
- data/web/views/retry.erb +2 -2
- data/web/views/scheduled.erb +4 -1
- metadata +5 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9dd138f78183ff31972192acbcee3ea7aad67403dfdd478500f3bbbfebf14698
|
4
|
+
data.tar.gz: ea4ab3b7c40bf358a80df9043013a63ee7c0562322ce5f212f938cecdc93c62f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c34d01bffdf5af462b98afa03e4b71356b8afffd873533eb953305314e1c0fd0ea4a4c0eea53c7169b61aca51420a50d5eda9127ad815c46c4053954677be24d
|
7
|
+
data.tar.gz: fdfe2b1704bc7d3a071756ca983c9b81bf4534ceb5f532bec59ec87fe4c3094c95120a7b1471163327f55e132c9773a3fd05d9a6211c6c1a1342c58872e8d980
|
data/.circleci/config.yml
CHANGED
@@ -32,6 +32,9 @@ jobs:
|
|
32
32
|
- <<: *save
|
33
33
|
- <<: *unit
|
34
34
|
"ruby-2.6":
|
35
|
+
environment:
|
36
|
+
COVERAGE: true
|
37
|
+
CC_TEST_REPORTER_ID: 003c3033501d70a2653bd887ff9a8b2884a263e6a4e27f2ba68748e15530918d
|
35
38
|
docker:
|
36
39
|
- image: circleci/ruby:2.6
|
37
40
|
- image: circleci/redis:4.0
|
@@ -40,7 +43,25 @@ jobs:
|
|
40
43
|
- <<: *restore
|
41
44
|
- <<: *bundle
|
42
45
|
- <<: *save
|
46
|
+
|
47
|
+
- run:
|
48
|
+
name: Setup Code Climate test-reporter
|
49
|
+
command: |
|
50
|
+
# download test reporter as a static binary
|
51
|
+
curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
|
52
|
+
chmod +x ./cc-test-reporter
|
53
|
+
|
54
|
+
- run:
|
55
|
+
name: Code Climate before-build
|
56
|
+
command: |
|
57
|
+
./cc-test-reporter before-build
|
58
|
+
|
43
59
|
- <<: *unit
|
60
|
+
|
61
|
+
- run:
|
62
|
+
name: Report code coverage to Code Climate
|
63
|
+
command: |
|
64
|
+
./cc-test-reporter after-build -t simplecov --exit-code $?
|
44
65
|
"jruby":
|
45
66
|
docker:
|
46
67
|
- image: circleci/jruby:latest
|
data/6.0-Upgrade.md
CHANGED
@@ -10,6 +10,7 @@ This release has major breaking changes. Read and test carefully in production.
|
|
10
10
|
- ActiveJobs can now use `sidekiq_options` directly to configure Sidekiq
|
11
11
|
features/internals like the retry subsystem. Prefer the native
|
12
12
|
Sidekiq::Worker APIs as some Sidekiq features (e.g. unique jobs) do not work well with AJ.
|
13
|
+
(requires Rails 6.0.1)
|
13
14
|
```ruby
|
14
15
|
class MyJob < ActiveJob::Base
|
15
16
|
queue_as :myqueue
|
@@ -31,9 +32,10 @@ you can override it by configuring the log formatter explicitly. See
|
|
31
32
|
```ruby
|
32
33
|
Sidekiq.configure_server do |config|
|
33
34
|
config.log_formatter = AcmeCorp::PlainLogFormatter.new
|
34
|
-
# config.log_formatter = Sidekiq::Logger::
|
35
|
+
# config.log_formatter = Sidekiq::Logger::Formatters::JSON.new
|
35
36
|
end
|
36
37
|
```
|
38
|
+
Please see the [Logging](https://github.com/mperham/sidekiq/wiki/Logging) wiki page for the latest documentation and notes.
|
37
39
|
- **Remove the daemonization, logfile and pidfile command line arguments and `sidekiqctl` binary**.
|
38
40
|
I've [noted for years](https://www.mikeperham.com/2014/09/22/dont-daemonize-your-daemons/)
|
39
41
|
how modern services should be managed with a proper init system.
|
data/Changes.md
CHANGED
@@ -2,12 +2,86 @@
|
|
2
2
|
|
3
3
|
[Sidekiq Changes](https://github.com/mperham/sidekiq/blob/master/Changes.md) | [Sidekiq Pro Changes](https://github.com/mperham/sidekiq/blob/master/Pro-Changes.md) | [Sidekiq Enterprise Changes](https://github.com/mperham/sidekiq/blob/master/Ent-Changes.md)
|
4
4
|
|
5
|
+
6.0.2
|
6
|
+
---------
|
7
|
+
|
8
|
+
- Fix Sidekiq Enterprise's rolling restart functionality, broken by refactoring in 6.0.0. [#4334]
|
9
|
+
- More internal refactoring and performance tuning [fatkodima]
|
10
|
+
|
11
|
+
6.0.1
|
12
|
+
---------
|
13
|
+
|
14
|
+
- **Performance tuning**, Sidekiq should be 10-15% faster now [#4303, 4299,
|
15
|
+
4269, fatkodima]
|
16
|
+
- **Dark Mode support in Web UI** (further design polish welcome!) [#4227, mperham,
|
17
|
+
fatkodima, silent-e]
|
18
|
+
- **Job-specific log levels**, allowing you to turn on debugging for
|
19
|
+
problematic workers. [fatkodima, #4287]
|
20
|
+
```ruby
|
21
|
+
MyWorker.set(log_level: :debug).perform_async(...)
|
22
|
+
```
|
23
|
+
- **Ad-hoc job tags**. You can tag your jobs with, e.g, subdomain, tenant, country,
|
24
|
+
locale, application, version, user/client, "alpha/beta/pro/ent", types of jobs,
|
25
|
+
teams/people responsible for jobs, additional metadata, etc.
|
26
|
+
Tags are shown on different pages with job listings. Sidekiq Pro users
|
27
|
+
can filter based on them [fatkodima, #4280]
|
28
|
+
```ruby
|
29
|
+
class MyWorker
|
30
|
+
include Sidekiq::Worker
|
31
|
+
sidekiq_options tags: ['bank-ops', 'alpha']
|
32
|
+
...
|
33
|
+
end
|
34
|
+
```
|
35
|
+
- Fetch scheduled jobs in batches before pushing into specific queues.
|
36
|
+
This will decrease enqueueing time of scheduled jobs by a third. [fatkodima, #4273]
|
37
|
+
```
|
38
|
+
ScheduledSet with 10,000 jobs
|
39
|
+
Before: 56.6 seconds
|
40
|
+
After: 39.2 seconds
|
41
|
+
```
|
42
|
+
- Compress error backtraces before pushing into Redis, if you are
|
43
|
+
storing error backtraces, this will halve the size of your RetrySet
|
44
|
+
in Redis [fatkodima, #4272]
|
45
|
+
```
|
46
|
+
RetrySet with 100,000 jobs
|
47
|
+
Before: 261 MB
|
48
|
+
After: 129 MB
|
49
|
+
```
|
50
|
+
- Support display of ActiveJob 6.0 payloads in the Web UI [#4263]
|
51
|
+
- Add `SortedSet#scan` for pattern based scanning. For large sets this API will be **MUCH** faster
|
52
|
+
than standard iteration using each. [fatkodima, #4262]
|
53
|
+
```ruby
|
54
|
+
Sidekiq::DeadSet.new.scan("UnreliableApi") do |job|
|
55
|
+
job.retry
|
56
|
+
end
|
57
|
+
```
|
58
|
+
- Dramatically speed up SortedSet#find\_job(jid) by using Redis's ZSCAN
|
59
|
+
support, approx 10x faster. [fatkodima, #4259]
|
60
|
+
```
|
61
|
+
zscan 0.179366 0.047727 0.227093 ( 1.161376)
|
62
|
+
enum 8.522311 0.419826 8.942137 ( 9.785079)
|
63
|
+
```
|
64
|
+
- Respect rails' generators `test_framework` option and gracefully handle extra `worker` suffix on generator [fatkodima, #4256]
|
65
|
+
- Add ability to sort 'Enqueued' page on Web UI by position in the queue [fatkodima, #4248]
|
66
|
+
- Support `Client.push_bulk` with different delays [fatkodima, #4243]
|
67
|
+
```ruby
|
68
|
+
Sidekiq::Client.push_bulk("class" => FooJob, "args" => [[1], [2]], "at" => [1.minute.from_now.to_f, 5.minutes.from_now.to_f])
|
69
|
+
```
|
70
|
+
- Easier way to test enqueuing specific ActionMailer and ActiveRecord delayed jobs. Instead of manually
|
71
|
+
parsing embedded class, you can now test by fetching jobs for specific classes. [fatkodima, #4292]
|
72
|
+
```ruby
|
73
|
+
assert_equal 1, Sidekiq::Extensions::DelayedMailer.jobs_for(FooMailer).size
|
74
|
+
```
|
75
|
+
- Add `sidekiqmon` to gemspec executables [#4242]
|
76
|
+
- Gracefully handle `Sidekiq.logger = nil` [#4240]
|
77
|
+
- Inject Sidekiq::LogContext module if user-supplied logger does not include it [#4239]
|
78
|
+
|
5
79
|
6.0
|
6
80
|
---------
|
7
81
|
|
8
82
|
This release has major breaking changes. Read and test carefully in production.
|
9
83
|
|
10
|
-
- ActiveJobs can now use `sidekiq_options` directly to configure Sidekiq
|
84
|
+
- With Rails 6.0.1+, ActiveJobs can now use `sidekiq_options` directly to configure Sidekiq
|
11
85
|
features/internals like the retry subsystem. [#4213, pirj]
|
12
86
|
```ruby
|
13
87
|
class MyJob < ActiveJob::Base
|
@@ -17,6 +91,13 @@ class MyJob < ActiveJob::Base
|
|
17
91
|
end
|
18
92
|
end
|
19
93
|
```
|
94
|
+
- Logging has been redesigned to allow for pluggable log formatters:
|
95
|
+
```ruby
|
96
|
+
Sidekiq.configure_server do |config|
|
97
|
+
config.log_formatter = Sidekiq::Logger::Formatters::JSON.new
|
98
|
+
end
|
99
|
+
```
|
100
|
+
See the [Logging wiki page](https://github.com/mperham/sidekiq/wiki/Logging) for more details.
|
20
101
|
- **BREAKING CHANGE** Validate proper usage of the `REDIS_PROVIDER`
|
21
102
|
variable. This variable is meant to hold the name of the environment
|
22
103
|
variable which contains your Redis URL, so that you can switch Redis
|
data/Ent-Changes.md
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
sidekiq (6.0.
|
4
|
+
sidekiq (6.0.2)
|
5
5
|
connection_pool (>= 2.2.2)
|
6
6
|
rack (>= 2.0.0)
|
7
7
|
rack-protection (>= 2.0.0)
|
@@ -97,7 +97,7 @@ GEM
|
|
97
97
|
nokogiri (1.10.4)
|
98
98
|
mini_portile2 (~> 2.4.0)
|
99
99
|
parallel (1.17.0)
|
100
|
-
parser (2.6.
|
100
|
+
parser (2.6.4.1)
|
101
101
|
ast (~> 2.4.0)
|
102
102
|
pry (0.12.2)
|
103
103
|
coderay (~> 1.1.0)
|
@@ -164,7 +164,7 @@ GEM
|
|
164
164
|
activesupport (>= 4.0)
|
165
165
|
sprockets (>= 3.0.0)
|
166
166
|
sqlite3 (1.4.1)
|
167
|
-
standard (0.1.
|
167
|
+
standard (0.1.4)
|
168
168
|
rubocop (~> 0.72.0)
|
169
169
|
rubocop-performance (~> 1.4.0)
|
170
170
|
thor (0.20.3)
|
data/Pro-Changes.md
CHANGED
@@ -4,12 +4,20 @@
|
|
4
4
|
|
5
5
|
Please see [http://sidekiq.org/](http://sidekiq.org/) for more details and how to buy.
|
6
6
|
|
7
|
+
5.0.1
|
8
|
+
---------
|
9
|
+
|
10
|
+
- Rejigger batch failures UI to add direct links to retries and scheduled jobs [#4209]
|
11
|
+
- Delete batch data with `UNLINK` [#4155]
|
12
|
+
- Fix bug where a scheduled job can lose its scheduled time when using reliable push [#4267]
|
13
|
+
- Sidekiq::JobSet#scan and #find_job APIs have been promoted to Sidekiq OSS. [#4259]
|
14
|
+
|
7
15
|
5.0.0
|
8
16
|
---------
|
9
17
|
|
10
18
|
- There is no significant migration from Sidekiq Pro 4.0 to 5.0
|
11
19
|
but make sure you read the [update notes for Sidekiq
|
12
|
-
6.0](/mperham/sidekiq/blob/master/6.0-Upgrade.md).
|
20
|
+
6.0](https://github.com/mperham/sidekiq/blob/master/6.0-Upgrade.md).
|
13
21
|
- Removed various deprecated APIs and associated warnings.
|
14
22
|
- **BREAKING CHANGE** Remove the `Sidekiq::Batch::Status#dead_jobs` API in favor of
|
15
23
|
`Sidekiq::Batch::Status#dead_jids`. [#4217]
|
data/README.md
CHANGED
@@ -3,6 +3,7 @@ Sidekiq
|
|
3
3
|
|
4
4
|
[![Gem Version](https://badge.fury.io/rb/sidekiq.svg)](https://rubygems.org/gems/sidekiq)
|
5
5
|
[![Code Climate](https://codeclimate.com/github/mperham/sidekiq.svg)](https://codeclimate.com/github/mperham/sidekiq)
|
6
|
+
[![Test Coverage](https://codeclimate.com/github/mperham/sidekiq/badges/coverage.svg)](https://codeclimate.com/github/mperham/sidekiq/coverage)
|
6
7
|
[![Build Status](https://circleci.com/gh/mperham/sidekiq/tree/master.svg?style=svg)](https://circleci.com/gh/mperham/sidekiq/tree/master)
|
7
8
|
[![Gitter Chat](https://badges.gitter.im/mperham/sidekiq.svg)](https://gitter.im/mperham/sidekiq)
|
8
9
|
|
@@ -18,7 +19,8 @@ Performance
|
|
18
19
|
|
19
20
|
Version | Latency | Garbage created for 10k jobs | Time to process 100k jobs | Throughput | Ruby
|
20
21
|
-----------------|------|---------|---------|------------------------|-----
|
21
|
-
Sidekiq 6.0.
|
22
|
+
Sidekiq 6.0.2 | 3 ms | 156 MB | 14.0 sec| **7100 jobs/sec** | MRI 2.6.3
|
23
|
+
Sidekiq 6.0.0 | 3 ms | 156 MB | 19 sec | 5200 jobs/sec | MRI 2.6.3
|
22
24
|
Sidekiq 4.0.0 | 10 ms | 151 MB | 22 sec | 4500 jobs/sec |
|
23
25
|
Sidekiq 3.5.1 | 22 ms | 1257 MB | 125 sec | 800 jobs/sec |
|
24
26
|
Resque 1.25.2 | - | - | 420 sec | 240 jobs/sec |
|
data/bin/sidekiqload
CHANGED
@@ -5,7 +5,8 @@
|
|
5
5
|
$TESTING = false
|
6
6
|
|
7
7
|
#require 'ruby-prof'
|
8
|
-
|
8
|
+
require 'bundler/setup'
|
9
|
+
Bundler.require(:default, :load_test)
|
9
10
|
|
10
11
|
require_relative '../lib/sidekiq/cli'
|
11
12
|
require_relative '../lib/sidekiq/launcher'
|
@@ -102,17 +103,20 @@ iter.times do
|
|
102
103
|
end
|
103
104
|
Sidekiq.logger.error "Created #{count*iter} jobs"
|
104
105
|
|
106
|
+
start = Time.now
|
107
|
+
|
105
108
|
Monitoring = Thread.new do
|
106
109
|
watchdog("monitor thread") do
|
107
110
|
while true
|
108
|
-
sleep 0.
|
111
|
+
sleep 0.2
|
109
112
|
qsize = Sidekiq.redis do |conn|
|
110
113
|
conn.llen "queue:default"
|
111
114
|
end
|
112
115
|
total = qsize
|
113
|
-
Sidekiq.logger.error("RSS: #{Process.rss} Pending: #{total}")
|
116
|
+
#Sidekiq.logger.error("RSS: #{Process.rss} Pending: #{total}")
|
114
117
|
if total == 0
|
115
|
-
Sidekiq.logger.error("Done,
|
118
|
+
Sidekiq.logger.error("Done, #{iter * count} jobs in #{Time.now - start} sec")
|
119
|
+
Sidekiq.logger.error("Now here's the latency for three jobs")
|
116
120
|
|
117
121
|
LoadWorker.perform_async(1, Time.now.to_f)
|
118
122
|
LoadWorker.perform_async(2, Time.now.to_f)
|
data/bin/sidekiqmon
CHANGED
@@ -16,6 +16,8 @@ module Sidekiq
|
|
16
16
|
end
|
17
17
|
|
18
18
|
def create_test_file
|
19
|
+
return unless test_framework
|
20
|
+
|
19
21
|
if defined?(RSpec)
|
20
22
|
create_worker_spec
|
21
23
|
else
|
@@ -42,6 +44,14 @@ module Sidekiq
|
|
42
44
|
)
|
43
45
|
template "worker_test.rb.erb", template_file
|
44
46
|
end
|
47
|
+
|
48
|
+
def file_name
|
49
|
+
@_file_name ||= super.sub(/_?worker\z/i, "")
|
50
|
+
end
|
51
|
+
|
52
|
+
def test_framework
|
53
|
+
::Rails.application.config.generators.options[:rails][:test_framework]
|
54
|
+
end
|
45
55
|
end
|
46
56
|
end
|
47
57
|
end
|
data/lib/sidekiq/api.rb
CHANGED
@@ -2,23 +2,11 @@
|
|
2
2
|
|
3
3
|
require "sidekiq"
|
4
4
|
|
5
|
-
|
6
|
-
|
7
|
-
def sscan(conn, key)
|
8
|
-
cursor = "0"
|
9
|
-
result = []
|
10
|
-
loop do
|
11
|
-
cursor, values = conn.sscan(key, cursor)
|
12
|
-
result.push(*values)
|
13
|
-
break if cursor == "0"
|
14
|
-
end
|
15
|
-
result
|
16
|
-
end
|
17
|
-
end
|
5
|
+
require "zlib"
|
6
|
+
require "base64"
|
18
7
|
|
8
|
+
module Sidekiq
|
19
9
|
class Stats
|
20
|
-
include RedisScanner
|
21
|
-
|
22
10
|
def initialize
|
23
11
|
fetch_stats!
|
24
12
|
end
|
@@ -77,11 +65,11 @@ module Sidekiq
|
|
77
65
|
}
|
78
66
|
|
79
67
|
processes = Sidekiq.redis { |conn|
|
80
|
-
|
68
|
+
conn.sscan_each("processes").to_a
|
81
69
|
}
|
82
70
|
|
83
71
|
queues = Sidekiq.redis { |conn|
|
84
|
-
|
72
|
+
conn.sscan_each("queues").to_a
|
85
73
|
}
|
86
74
|
|
87
75
|
pipe2_res = Sidekiq.redis { |conn|
|
@@ -92,8 +80,8 @@ module Sidekiq
|
|
92
80
|
}
|
93
81
|
|
94
82
|
s = processes.size
|
95
|
-
workers_size = pipe2_res[0...s].
|
96
|
-
enqueued = pipe2_res[s..-1].
|
83
|
+
workers_size = pipe2_res[0...s].sum(&:to_i)
|
84
|
+
enqueued = pipe2_res[s..-1].sum(&:to_i)
|
97
85
|
|
98
86
|
default_queue_latency = if (entry = pipe1_res[6].first)
|
99
87
|
job = begin
|
@@ -142,11 +130,9 @@ module Sidekiq
|
|
142
130
|
end
|
143
131
|
|
144
132
|
class Queues
|
145
|
-
include RedisScanner
|
146
|
-
|
147
133
|
def lengths
|
148
134
|
Sidekiq.redis do |conn|
|
149
|
-
queues =
|
135
|
+
queues = conn.sscan_each("queues").to_a
|
150
136
|
|
151
137
|
lengths = conn.pipelined {
|
152
138
|
queues.each do |queue|
|
@@ -225,13 +211,12 @@ module Sidekiq
|
|
225
211
|
#
|
226
212
|
class Queue
|
227
213
|
include Enumerable
|
228
|
-
extend RedisScanner
|
229
214
|
|
230
215
|
##
|
231
216
|
# Return all known queues within Redis.
|
232
217
|
#
|
233
218
|
def self.all
|
234
|
-
Sidekiq.redis { |c|
|
219
|
+
Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
|
235
220
|
end
|
236
221
|
|
237
222
|
attr_reader :name
|
@@ -349,7 +334,7 @@ module Sidekiq
|
|
349
334
|
end
|
350
335
|
when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
351
336
|
job_class = @item["wrapped"] || args[0]
|
352
|
-
if job_class == "ActionMailer::DeliveryJob"
|
337
|
+
if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
|
353
338
|
# MailerClass#mailer_method
|
354
339
|
args[0]["arguments"][0..1].join("#")
|
355
340
|
else
|
@@ -372,6 +357,9 @@ module Sidekiq
|
|
372
357
|
if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
|
373
358
|
# remove MailerClass, mailer_method and 'deliver_now'
|
374
359
|
job_args.drop(3)
|
360
|
+
elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
|
361
|
+
# remove MailerClass, mailer_method and 'deliver_now'
|
362
|
+
job_args.drop(3).first["args"]
|
375
363
|
else
|
376
364
|
job_args
|
377
365
|
end
|
@@ -400,6 +388,20 @@ module Sidekiq
|
|
400
388
|
Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
|
401
389
|
end
|
402
390
|
|
391
|
+
def tags
|
392
|
+
self["tags"] || []
|
393
|
+
end
|
394
|
+
|
395
|
+
def error_backtrace
|
396
|
+
# Cache nil values
|
397
|
+
if defined?(@error_backtrace)
|
398
|
+
@error_backtrace
|
399
|
+
else
|
400
|
+
value = self["error_backtrace"]
|
401
|
+
@error_backtrace = value && uncompress_backtrace(value)
|
402
|
+
end
|
403
|
+
end
|
404
|
+
|
403
405
|
attr_reader :queue
|
404
406
|
|
405
407
|
def latency
|
@@ -433,6 +435,23 @@ module Sidekiq
|
|
433
435
|
Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
|
434
436
|
default
|
435
437
|
end
|
438
|
+
|
439
|
+
def uncompress_backtrace(backtrace)
|
440
|
+
if backtrace.is_a?(Array)
|
441
|
+
# Handle old jobs with raw Array backtrace format
|
442
|
+
backtrace
|
443
|
+
else
|
444
|
+
decoded = Base64.decode64(backtrace)
|
445
|
+
uncompressed = Zlib::Inflate.inflate(decoded)
|
446
|
+
begin
|
447
|
+
Sidekiq.load_json(uncompressed)
|
448
|
+
rescue
|
449
|
+
# Handle old jobs with marshalled backtrace format
|
450
|
+
# TODO Remove in 7.x
|
451
|
+
Marshal.load(uncompressed)
|
452
|
+
end
|
453
|
+
end
|
454
|
+
end
|
436
455
|
end
|
437
456
|
|
438
457
|
class SortedEntry < Job
|
@@ -458,8 +477,9 @@ module Sidekiq
|
|
458
477
|
end
|
459
478
|
|
460
479
|
def reschedule(at)
|
461
|
-
|
462
|
-
|
480
|
+
Sidekiq.redis do |conn|
|
481
|
+
conn.zincrby(@parent.name, at - @score, Sidekiq.dump_json(@item))
|
482
|
+
end
|
463
483
|
end
|
464
484
|
|
465
485
|
def add_to_queue
|
@@ -540,6 +560,17 @@ module Sidekiq
|
|
540
560
|
Sidekiq.redis { |c| c.zcard(name) }
|
541
561
|
end
|
542
562
|
|
563
|
+
def scan(match, count = 100)
|
564
|
+
return to_enum(:scan, match, count) unless block_given?
|
565
|
+
|
566
|
+
match = "*#{match}*" unless match.include?("*")
|
567
|
+
Sidekiq.redis do |conn|
|
568
|
+
conn.zscan_each(name, match: match, count: count) do |entry, score|
|
569
|
+
yield SortedEntry.new(self, score, entry)
|
570
|
+
end
|
571
|
+
end
|
572
|
+
end
|
573
|
+
|
543
574
|
def clear
|
544
575
|
Sidekiq.redis do |conn|
|
545
576
|
conn.del(name)
|
@@ -576,28 +607,40 @@ module Sidekiq
|
|
576
607
|
end
|
577
608
|
end
|
578
609
|
|
610
|
+
##
|
611
|
+
# Fetch jobs that match a given time or Range. Job ID is an
|
612
|
+
# optional second argument.
|
579
613
|
def fetch(score, jid = nil)
|
614
|
+
begin_score, end_score =
|
615
|
+
if score.is_a?(Range)
|
616
|
+
[score.first, score.last]
|
617
|
+
else
|
618
|
+
[score, score]
|
619
|
+
end
|
620
|
+
|
580
621
|
elements = Sidekiq.redis { |conn|
|
581
|
-
conn.zrangebyscore(name,
|
622
|
+
conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
|
582
623
|
}
|
583
624
|
|
584
625
|
elements.each_with_object([]) do |element, result|
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
else
|
589
|
-
result << entry
|
590
|
-
end
|
626
|
+
data, job_score = element
|
627
|
+
entry = SortedEntry.new(self, job_score, data)
|
628
|
+
result << entry if jid.nil? || entry.jid == jid
|
591
629
|
end
|
592
630
|
end
|
593
631
|
|
594
632
|
##
|
595
633
|
# Find the job with the given JID within this sorted set.
|
596
|
-
#
|
597
|
-
# This is a slow, inefficient operation. Do not use under
|
598
|
-
# normal conditions. Sidekiq Pro contains a faster version.
|
634
|
+
# This is a slower O(n) operation. Do not use for app logic.
|
599
635
|
def find_job(jid)
|
600
|
-
|
636
|
+
Sidekiq.redis do |conn|
|
637
|
+
conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
|
638
|
+
job = JSON.parse(entry)
|
639
|
+
matched = job["jid"] == jid
|
640
|
+
return SortedEntry.new(self, score, entry) if matched
|
641
|
+
end
|
642
|
+
end
|
643
|
+
nil
|
601
644
|
end
|
602
645
|
|
603
646
|
def delete_by_value(name, value)
|
@@ -612,11 +655,13 @@ module Sidekiq
|
|
612
655
|
Sidekiq.redis do |conn|
|
613
656
|
elements = conn.zrangebyscore(name, score, score)
|
614
657
|
elements.each do |element|
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
658
|
+
if element.index(jid)
|
659
|
+
message = Sidekiq.load_json(element)
|
660
|
+
if message["jid"] == jid
|
661
|
+
ret = conn.zrem(name, element)
|
662
|
+
@_size -= 1 if ret
|
663
|
+
break ret
|
664
|
+
end
|
620
665
|
end
|
621
666
|
end
|
622
667
|
end
|
@@ -720,7 +765,6 @@ module Sidekiq
|
|
720
765
|
#
|
721
766
|
class ProcessSet
|
722
767
|
include Enumerable
|
723
|
-
include RedisScanner
|
724
768
|
|
725
769
|
def initialize(clean_plz = true)
|
726
770
|
cleanup if clean_plz
|
@@ -731,7 +775,7 @@ module Sidekiq
|
|
731
775
|
def cleanup
|
732
776
|
count = 0
|
733
777
|
Sidekiq.redis do |conn|
|
734
|
-
procs =
|
778
|
+
procs = conn.sscan_each("processes").to_a.sort
|
735
779
|
heartbeats = conn.pipelined {
|
736
780
|
procs.each do |key|
|
737
781
|
conn.hget(key, "info")
|
@@ -751,30 +795,28 @@ module Sidekiq
|
|
751
795
|
end
|
752
796
|
|
753
797
|
def each
|
754
|
-
|
798
|
+
result = Sidekiq.redis { |conn|
|
799
|
+
procs = conn.sscan_each("processes").to_a.sort
|
755
800
|
|
756
|
-
Sidekiq.redis do |conn|
|
757
801
|
# We're making a tradeoff here between consuming more memory instead of
|
758
802
|
# making more roundtrips to Redis, but if you have hundreds or thousands of workers,
|
759
803
|
# you'll be happier this way
|
760
|
-
|
804
|
+
conn.pipelined do
|
761
805
|
procs.each do |key|
|
762
806
|
conn.hmget(key, "info", "busy", "beat", "quiet")
|
763
807
|
end
|
764
|
-
|
808
|
+
end
|
809
|
+
}
|
765
810
|
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
811
|
+
result.each do |info, busy, at_s, quiet|
|
812
|
+
# If a process is stopped between when we query Redis for `procs` and
|
813
|
+
# when we query for `result`, we will have an item in `result` that is
|
814
|
+
# composed of `nil` values.
|
815
|
+
next if info.nil?
|
771
816
|
|
772
|
-
|
773
|
-
|
774
|
-
end
|
817
|
+
hash = Sidekiq.load_json(info)
|
818
|
+
yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
|
775
819
|
end
|
776
|
-
|
777
|
-
nil
|
778
820
|
end
|
779
821
|
|
780
822
|
# This method is not guaranteed accurate since it does not prune the set
|
@@ -885,11 +927,10 @@ module Sidekiq
|
|
885
927
|
#
|
886
928
|
class Workers
|
887
929
|
include Enumerable
|
888
|
-
include RedisScanner
|
889
930
|
|
890
931
|
def each
|
891
932
|
Sidekiq.redis do |conn|
|
892
|
-
procs =
|
933
|
+
procs = conn.sscan_each("processes").to_a
|
893
934
|
procs.sort.each do |key|
|
894
935
|
valid, workers = conn.pipelined {
|
895
936
|
conn.exists(key)
|
@@ -911,7 +952,7 @@ module Sidekiq
|
|
911
952
|
# which can easily get out of sync with crashy processes.
|
912
953
|
def size
|
913
954
|
Sidekiq.redis do |conn|
|
914
|
-
procs =
|
955
|
+
procs = conn.sscan_each("processes").to_a
|
915
956
|
if procs.empty?
|
916
957
|
0
|
917
958
|
else
|
@@ -919,7 +960,7 @@ module Sidekiq
|
|
919
960
|
procs.each do |key|
|
920
961
|
conn.hget(key, "busy")
|
921
962
|
end
|
922
|
-
}.
|
963
|
+
}.sum(&:to_i)
|
923
964
|
end
|
924
965
|
end
|
925
966
|
end
|