sidekiq-grouping 1.1.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/lint.yml +37 -0
- data/.github/workflows/test.yml +49 -0
- data/.gitignore +1 -0
- data/.rubocop.yml +31 -3
- data/Appraisals +12 -0
- data/README.md +3 -1
- data/bin/console +8 -0
- data/gemfiles/sidekiq_6.0.gemfile +7 -0
- data/gemfiles/sidekiq_6.5.gemfile +7 -0
- data/gemfiles/sidekiq_7.0.gemfile +7 -0
- data/lefthook.yml +20 -0
- data/lib/sidekiq/grouping/batch.rb +29 -19
- data/lib/sidekiq/grouping/config.rb +34 -22
- data/lib/sidekiq/grouping/flusher.rb +48 -36
- data/lib/sidekiq/grouping/flusher_observer.rb +16 -10
- data/lib/sidekiq/grouping/middleware.rb +27 -14
- data/lib/sidekiq/grouping/redis.rb +53 -30
- data/lib/sidekiq/grouping/redis_dispatcher.rb +29 -0
- data/lib/sidekiq/grouping/version.rb +3 -1
- data/lib/sidekiq/grouping/web.rb +10 -5
- data/lib/sidekiq/grouping.rb +36 -30
- data/sidekiq-grouping.gemspec +14 -7
- data/spec/modules/batch_spec.rb +110 -80
- data/spec/modules/redis_spec.rb +23 -26
- data/spec/spec_helper.rb +16 -6
- data/spec/support/test_workers.rb +7 -10
- metadata +68 -21
- data/.travis.yml +0 -18
@@ -1,82 +1,105 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./redis_dispatcher"
|
4
|
+
|
1
5
|
module Sidekiq
|
2
6
|
module Grouping
|
3
7
|
class Redis
|
8
|
+
include RedisDispatcher
|
4
9
|
|
5
10
|
PLUCK_SCRIPT = <<-SCRIPT
|
6
|
-
local pluck_values = redis.call('
|
7
|
-
|
8
|
-
|
9
|
-
redis.call('srem', KEYS[2], v)
|
11
|
+
local pluck_values = redis.call('lpop', KEYS[1], ARGV[1]) or {}
|
12
|
+
if #pluck_values > 0 then
|
13
|
+
redis.call('srem', KEYS[2], unpack(pluck_values))
|
10
14
|
end
|
11
15
|
return pluck_values
|
12
16
|
SCRIPT
|
13
17
|
|
14
|
-
def push_msg(name, msg, remember_unique
|
18
|
+
def push_msg(name, msg, remember_unique: false)
|
15
19
|
redis do |conn|
|
16
20
|
conn.multi do |pipeline|
|
17
|
-
pipeline.sadd
|
18
|
-
pipeline
|
19
|
-
pipeline
|
21
|
+
sadd = pipeline.respond_to?(:sadd?) ? :sadd? : :sadd
|
22
|
+
redis_connection_call(pipeline, sadd, ns("batches"), name)
|
23
|
+
redis_connection_call(pipeline, :rpush, ns(name), msg)
|
24
|
+
|
25
|
+
if remember_unique
|
26
|
+
redis_connection_call(
|
27
|
+
pipeline, sadd, unique_messages_key(name), msg
|
28
|
+
)
|
29
|
+
end
|
20
30
|
end
|
21
31
|
end
|
22
32
|
end
|
23
33
|
|
24
34
|
def enqueued?(name, msg)
|
25
|
-
|
26
|
-
|
27
|
-
|
35
|
+
member = redis_call(:sismember, unique_messages_key(name), msg)
|
36
|
+
return member if member.is_a?(TrueClass) || member.is_a?(FalseClass)
|
37
|
+
|
38
|
+
member != 0
|
28
39
|
end
|
29
40
|
|
30
41
|
def batch_size(name)
|
31
|
-
|
42
|
+
redis_call(:llen, ns(name))
|
32
43
|
end
|
33
44
|
|
34
45
|
def batches
|
35
|
-
|
46
|
+
redis_call(:smembers, ns("batches"))
|
36
47
|
end
|
37
48
|
|
38
49
|
def pluck(name, limit)
|
39
|
-
|
40
|
-
|
41
|
-
|
50
|
+
if new_redis_client?
|
51
|
+
redis_call(
|
52
|
+
:eval,
|
53
|
+
PLUCK_SCRIPT,
|
54
|
+
2,
|
55
|
+
ns(name),
|
56
|
+
unique_messages_key(name),
|
57
|
+
limit
|
58
|
+
)
|
59
|
+
else
|
60
|
+
keys = [ns(name), unique_messages_key(name)]
|
61
|
+
args = [limit]
|
62
|
+
redis_call(:eval, PLUCK_SCRIPT, keys, args)
|
63
|
+
end
|
42
64
|
end
|
43
65
|
|
44
66
|
def get_last_execution_time(name)
|
45
|
-
|
67
|
+
redis_call(:get, ns("last_execution_time:#{name}"))
|
46
68
|
end
|
47
69
|
|
48
70
|
def set_last_execution_time(name, time)
|
49
|
-
|
71
|
+
redis_call(
|
72
|
+
:set, ns("last_execution_time:#{name}"), time.to_json
|
73
|
+
)
|
50
74
|
end
|
51
75
|
|
52
76
|
def lock(name)
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
77
|
+
redis_call(
|
78
|
+
:set,
|
79
|
+
ns("lock:#{name}"),
|
80
|
+
"true",
|
81
|
+
nx: true,
|
82
|
+
ex: Sidekiq::Grouping::Config.lock_ttl
|
83
|
+
)
|
57
84
|
end
|
58
85
|
|
59
86
|
def delete(name)
|
60
87
|
redis do |conn|
|
61
|
-
conn
|
62
|
-
conn
|
63
|
-
conn
|
88
|
+
redis_connection_call(conn, :del, ns("last_execution_time:#{name}"))
|
89
|
+
redis_connection_call(conn, :del, ns(name))
|
90
|
+
redis_connection_call(conn, :srem, ns("batches"), name)
|
64
91
|
end
|
65
92
|
end
|
66
93
|
|
67
94
|
private
|
68
95
|
|
69
|
-
def unique_messages_key
|
96
|
+
def unique_messages_key(name)
|
70
97
|
ns("#{name}:unique_messages")
|
71
98
|
end
|
72
99
|
|
73
100
|
def ns(key = nil)
|
74
101
|
"batching:#{key}"
|
75
102
|
end
|
76
|
-
|
77
|
-
def redis(&block)
|
78
|
-
Sidekiq.redis(&block)
|
79
|
-
end
|
80
103
|
end
|
81
104
|
end
|
82
105
|
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Sidekiq
|
4
|
+
module Grouping
|
5
|
+
module RedisDispatcher
|
6
|
+
def redis_call(command, *args, **kwargs)
|
7
|
+
redis do |connection|
|
8
|
+
redis_connection_call(connection, command, *args, **kwargs)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
def redis_connection_call(connection, command, *args, **kwargs)
|
13
|
+
if new_redis_client? # redis-client
|
14
|
+
connection.call(command.to_s.upcase, *args, **kwargs)
|
15
|
+
else # redis
|
16
|
+
connection.public_send(command, *args, **kwargs)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def new_redis_client?
|
21
|
+
Sidekiq::VERSION[0].to_i >= 7
|
22
|
+
end
|
23
|
+
|
24
|
+
def redis(&block)
|
25
|
+
Sidekiq.redis(&block)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
data/lib/sidekiq/grouping/web.rb
CHANGED
@@ -1,24 +1,29 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "sidekiq/web"
|
2
4
|
|
3
5
|
module Sidekiq
|
4
6
|
module Grouping
|
5
7
|
module Web
|
6
|
-
VIEWS = File.expand_path(
|
8
|
+
VIEWS = File.expand_path("views", File.dirname(__FILE__))
|
7
9
|
|
8
10
|
def self.registered(app)
|
9
11
|
app.get "/grouping" do
|
10
12
|
@batches = Sidekiq::Grouping::Batch.all
|
11
|
-
erb File.read(File.join(VIEWS,
|
13
|
+
erb File.read(File.join(VIEWS, "index.erb")),
|
14
|
+
locals: { view_path: VIEWS }
|
12
15
|
end
|
13
16
|
|
14
17
|
app.post "/grouping/:name/delete" do
|
15
|
-
worker_class, queue =
|
18
|
+
worker_class, queue =
|
19
|
+
Sidekiq::Grouping::Batch.extract_worker_klass_and_queue(
|
20
|
+
params["name"]
|
21
|
+
)
|
16
22
|
batch = Sidekiq::Grouping::Batch.new(worker_class, queue)
|
17
23
|
batch.delete
|
18
24
|
redirect "#{root_path}grouping"
|
19
25
|
end
|
20
26
|
end
|
21
|
-
|
22
27
|
end
|
23
28
|
end
|
24
29
|
end
|
data/lib/sidekiq/grouping.rb
CHANGED
@@ -1,40 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "active_support"
|
2
4
|
require "active_support/core_ext/string"
|
3
5
|
require "active_support/configurable"
|
4
6
|
require "active_support/core_ext/numeric/time"
|
7
|
+
require "sidekiq"
|
5
8
|
require "sidekiq/grouping/version"
|
6
9
|
require "concurrent"
|
7
10
|
|
8
|
-
module Sidekiq
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
11
|
+
module Sidekiq
|
12
|
+
module Grouping
|
13
|
+
autoload :Config, "sidekiq/grouping/config"
|
14
|
+
autoload :Redis, "sidekiq/grouping/redis"
|
15
|
+
autoload :Batch, "sidekiq/grouping/batch"
|
16
|
+
autoload :Middleware, "sidekiq/grouping/middleware"
|
17
|
+
autoload :Flusher, "sidekiq/grouping/flusher"
|
18
|
+
autoload :FlusherObserver, "sidekiq/grouping/flusher_observer"
|
19
|
+
|
20
|
+
class << self
|
21
|
+
attr_writer :logger
|
22
|
+
|
23
|
+
def logger
|
24
|
+
@logger ||= Sidekiq.logger
|
25
|
+
end
|
26
|
+
|
27
|
+
def force_flush_for_test!
|
28
|
+
Sidekiq::Grouping::Flusher.new.force_flush_for_test!
|
29
|
+
end
|
30
|
+
|
31
|
+
def start!
|
32
|
+
interval = Sidekiq::Grouping::Config.poll_interval
|
33
|
+
@observer = Sidekiq::Grouping::FlusherObserver.new
|
34
|
+
@task = Concurrent::TimerTask.new(
|
35
|
+
execution_interval: interval
|
36
|
+
) { Sidekiq::Grouping::Flusher.new.flush }
|
37
|
+
@task.add_observer(@observer)
|
38
|
+
logger.info(
|
39
|
+
"[Sidekiq::Grouping] Started polling batches every " \
|
40
|
+
"#{interval} seconds"
|
41
|
+
)
|
42
|
+
@task.execute
|
43
|
+
end
|
38
44
|
end
|
39
45
|
end
|
40
46
|
end
|
data/sidekiq-grouping.gemspec
CHANGED
@@ -1,4 +1,6 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
lib = File.expand_path("lib", __dir__)
|
2
4
|
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
3
5
|
require "sidekiq/grouping/version"
|
4
6
|
|
@@ -7,26 +9,31 @@ Gem::Specification.new do |spec|
|
|
7
9
|
spec.version = Sidekiq::Grouping::VERSION
|
8
10
|
spec.authors = ["Victor Sokolov"]
|
9
11
|
spec.email = ["gzigzigzeo@gmail.com"]
|
10
|
-
spec.summary =
|
12
|
+
spec.summary = <<~SUMMARY
|
11
13
|
Allows identical sidekiq jobs to be processed with a single background call
|
12
|
-
|
14
|
+
SUMMARY
|
13
15
|
spec.homepage = "http://github.com/gzigzigzeo/sidekiq-grouping"
|
14
16
|
spec.license = "MIT"
|
15
17
|
|
16
18
|
spec.files = `git ls-files -z`.split("\x0")
|
17
19
|
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
-
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
20
|
spec.require_paths = ["lib"]
|
20
21
|
|
22
|
+
spec.required_ruby_version = ">= 2.7.0"
|
23
|
+
|
24
|
+
spec.add_development_dependency "appraisal"
|
21
25
|
spec.add_development_dependency "bundler", "> 1.5"
|
26
|
+
spec.add_development_dependency "pry"
|
22
27
|
spec.add_development_dependency "rake"
|
23
28
|
spec.add_development_dependency "rspec"
|
24
|
-
spec.add_development_dependency "simplecov"
|
25
29
|
spec.add_development_dependency "rspec-sidekiq"
|
30
|
+
spec.add_development_dependency "rubocop"
|
31
|
+
spec.add_development_dependency "rubocop-rspec"
|
32
|
+
spec.add_development_dependency "simplecov"
|
26
33
|
spec.add_development_dependency "timecop"
|
27
|
-
spec.add_development_dependency "appraisal"
|
28
34
|
|
29
35
|
spec.add_dependency "activesupport"
|
30
|
-
spec.add_dependency "sidekiq", ">= 3.4.2"
|
31
36
|
spec.add_dependency "concurrent-ruby"
|
37
|
+
spec.add_dependency "sidekiq", ">= 3.4.2"
|
38
|
+
spec.metadata["rubygems_mfa_required"] = "true"
|
32
39
|
end
|
data/spec/modules/batch_spec.rb
CHANGED
@@ -1,70 +1,72 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
2
4
|
|
3
5
|
describe Sidekiq::Grouping::Batch do
|
4
|
-
subject {
|
6
|
+
subject(:batch_service) { described_class }
|
5
7
|
|
6
|
-
context
|
7
|
-
it
|
8
|
-
|
9
|
-
|
8
|
+
context "when adding" do
|
9
|
+
it "must enqueue unbatched worker" do
|
10
|
+
RegularWorker.perform_async("bar")
|
11
|
+
expect(RegularWorker).to have_enqueued_sidekiq_job("bar")
|
10
12
|
end
|
11
13
|
|
12
|
-
it
|
13
|
-
BatchedSizeWorker.perform_async(
|
14
|
-
expect_batch(BatchedSizeWorker,
|
14
|
+
it "must not enqueue batched worker based on batch size setting" do
|
15
|
+
BatchedSizeWorker.perform_async("bar")
|
16
|
+
expect_batch(BatchedSizeWorker, "batched_size")
|
15
17
|
end
|
16
18
|
|
17
|
-
it
|
18
|
-
BatchedIntervalWorker.perform_async(
|
19
|
-
expect_batch(BatchedIntervalWorker,
|
19
|
+
it "must not enqueue batched worker based on interval setting" do
|
20
|
+
BatchedIntervalWorker.perform_async("bar")
|
21
|
+
expect_batch(BatchedIntervalWorker, "batched_interval")
|
20
22
|
end
|
21
23
|
|
22
|
-
it
|
23
|
-
BatchedBothWorker.perform_async(
|
24
|
-
expect_batch(BatchedBothWorker,
|
24
|
+
it "must not enqueue batched worker based on both settings" do
|
25
|
+
BatchedBothWorker.perform_async("bar")
|
26
|
+
expect_batch(BatchedBothWorker, "batched_both")
|
25
27
|
end
|
26
28
|
end
|
27
29
|
|
28
|
-
context
|
29
|
-
it
|
30
|
-
batch =
|
30
|
+
context "when checking if should flush" do
|
31
|
+
it "must flush if limit exceeds for limit worker", :aggregate_failures do
|
32
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
31
33
|
|
32
|
-
expect(batch
|
33
|
-
BatchedSizeWorker.perform_async(
|
34
|
-
expect(batch
|
35
|
-
4.times { BatchedSizeWorker.perform_async(
|
36
|
-
expect(batch
|
34
|
+
expect(batch).not_to be_could_flush
|
35
|
+
BatchedSizeWorker.perform_async("bar")
|
36
|
+
expect(batch).not_to be_could_flush
|
37
|
+
4.times { BatchedSizeWorker.perform_async("bar") }
|
38
|
+
expect(batch).to be_could_flush
|
37
39
|
end
|
38
40
|
|
39
|
-
it
|
40
|
-
batch =
|
41
|
+
it "must flush if limit exceeds for both worker", :aggregate_failures do
|
42
|
+
batch = batch_service.new(BatchedBothWorker.name, "batched_both")
|
41
43
|
|
42
|
-
expect(batch
|
43
|
-
BatchedBothWorker.perform_async(
|
44
|
-
expect(batch
|
45
|
-
4.times { BatchedBothWorker.perform_async(
|
46
|
-
expect(batch
|
44
|
+
expect(batch).not_to be_could_flush
|
45
|
+
BatchedBothWorker.perform_async("bar")
|
46
|
+
expect(batch).not_to be_could_flush
|
47
|
+
4.times { BatchedBothWorker.perform_async("bar") }
|
48
|
+
expect(batch).to be_could_flush
|
47
49
|
end
|
48
50
|
|
49
|
-
it
|
50
|
-
batch =
|
51
|
+
it "must flush if limit okay but time came", :aggregate_failures do
|
52
|
+
batch = batch_service.new(BatchedIntervalWorker.name, "batched_interval")
|
51
53
|
|
52
|
-
expect(batch
|
53
|
-
BatchedIntervalWorker.perform_async(
|
54
|
-
expect(batch
|
54
|
+
expect(batch).not_to be_could_flush
|
55
|
+
BatchedIntervalWorker.perform_async("bar")
|
56
|
+
expect(batch).not_to be_could_flush
|
55
57
|
expect(batch.size).to eq(1)
|
56
58
|
|
57
59
|
Timecop.travel(2.hours.since)
|
58
60
|
|
59
|
-
expect(batch
|
61
|
+
expect(batch).to be_could_flush
|
60
62
|
end
|
61
63
|
end
|
62
64
|
|
63
|
-
context
|
64
|
-
it
|
65
|
-
batch =
|
65
|
+
context "when flushing" do
|
66
|
+
it "must put worker to queue on flush", :aggregate_failures do
|
67
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
66
68
|
|
67
|
-
expect(batch
|
69
|
+
expect(batch).not_to be_could_flush
|
68
70
|
10.times { |n| BatchedSizeWorker.perform_async("bar#{n}") }
|
69
71
|
batch.flush
|
70
72
|
expect(BatchedSizeWorker).to(
|
@@ -74,65 +76,93 @@ describe Sidekiq::Grouping::Batch do
|
|
74
76
|
end
|
75
77
|
end
|
76
78
|
|
77
|
-
context
|
78
|
-
context
|
79
|
-
it
|
80
|
-
batch =
|
81
|
-
|
79
|
+
context "with similar args" do
|
80
|
+
context "when option batch_unique = true" do
|
81
|
+
it "enqueues once" do
|
82
|
+
batch = batch_service.new(
|
83
|
+
BatchedUniqueArgsWorker.name,
|
84
|
+
"batched_unique_args"
|
85
|
+
)
|
86
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
82
87
|
expect(batch.size).to eq(1)
|
83
88
|
end
|
84
89
|
|
85
|
-
it
|
86
|
-
batch =
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
90
|
+
it "enqueues once each unique set of args" do
|
91
|
+
batch = batch_service.new(
|
92
|
+
BatchedUniqueArgsWorker.name,
|
93
|
+
"batched_unique_args"
|
94
|
+
)
|
95
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
96
|
+
6.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
97
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
98
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 3) }
|
99
|
+
7.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
92
100
|
expect(batch.size).to eq(3)
|
93
101
|
end
|
94
102
|
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
it 'allows to enqueue again after flush' do
|
106
|
-
batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
|
107
|
-
2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
|
108
|
-
2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
|
109
|
-
batch.flush
|
110
|
-
BatchedUniqueArgsWorker.perform_async('bar', 1)
|
111
|
-
BatchedUniqueArgsWorker.perform_async('baz', 1)
|
112
|
-
expect(batch.size).to eq(2)
|
113
|
-
end
|
103
|
+
it "flushes the workers" do
|
104
|
+
batch = batch_service.new(
|
105
|
+
BatchedUniqueArgsWorker.name,
|
106
|
+
"batched_unique_args"
|
107
|
+
)
|
108
|
+
2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
109
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
110
|
+
batch.flush
|
111
|
+
expect(batch.size).to eq(0)
|
114
112
|
end
|
115
113
|
|
114
|
+
it "allows to enqueue again after flush" do
|
115
|
+
batch = batch_service.new(
|
116
|
+
BatchedUniqueArgsWorker.name,
|
117
|
+
"batched_unique_args"
|
118
|
+
)
|
119
|
+
2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
120
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
121
|
+
batch.flush
|
122
|
+
BatchedUniqueArgsWorker.perform_async("bar", 1)
|
123
|
+
BatchedUniqueArgsWorker.perform_async("baz", 1)
|
124
|
+
expect(batch.size).to eq(2)
|
125
|
+
end
|
116
126
|
end
|
117
127
|
|
118
|
-
context
|
119
|
-
it
|
120
|
-
batch =
|
121
|
-
3.times { BatchedSizeWorker.perform_async(
|
128
|
+
context "when batch_unique is not specified" do
|
129
|
+
it "enqueues all" do
|
130
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
131
|
+
3.times { BatchedSizeWorker.perform_async("bar", 1) }
|
122
132
|
expect(batch.size).to eq(3)
|
123
133
|
end
|
124
134
|
end
|
125
135
|
end
|
126
136
|
|
137
|
+
context "when inline mode" do
|
138
|
+
it "must pass args to worker as array" do
|
139
|
+
Sidekiq::Testing.inline! do
|
140
|
+
expect_any_instance_of(BatchedSizeWorker)
|
141
|
+
.to receive(:perform).with([[1]])
|
142
|
+
|
143
|
+
BatchedSizeWorker.perform_async(1)
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
it "must not pass args to worker as array" do
|
148
|
+
Sidekiq::Testing.inline! do
|
149
|
+
expect_any_instance_of(RegularWorker).to receive(:perform).with(1)
|
150
|
+
|
151
|
+
RegularWorker.perform_async(1)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
127
156
|
private
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
157
|
+
|
158
|
+
def expect_batch(klass, queue) # rubocop:disable Metrics/AbcSize
|
159
|
+
expect(klass).not_to have_enqueued_sidekiq_job("bar")
|
160
|
+
batch = batch_service.new(klass.name, queue)
|
161
|
+
stats = batch_service.all
|
132
162
|
expect(batch.size).to eq(1)
|
133
163
|
expect(stats.size).to eq(1)
|
134
164
|
expect(stats.first.worker_class).to eq(klass.name)
|
135
165
|
expect(stats.first.queue).to eq(queue)
|
136
|
-
expect(batch.pluck).to eq [[
|
166
|
+
expect(batch.pluck).to eq [["bar"]]
|
137
167
|
end
|
138
168
|
end
|
data/spec/modules/redis_spec.rb
CHANGED
@@ -1,47 +1,44 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
2
4
|
|
3
5
|
describe Sidekiq::Grouping::Redis do
|
4
|
-
|
6
|
+
include Sidekiq::Grouping::RedisDispatcher
|
7
|
+
|
8
|
+
subject(:redis_service) { described_class.new }
|
5
9
|
|
6
10
|
let(:queue_name) { "my_queue" }
|
7
11
|
let(:key) { "batching:#{queue_name}" }
|
8
12
|
let(:unique_key) { "batching:#{queue_name}:unique_messages" }
|
9
13
|
|
10
14
|
describe "#push_msg" do
|
11
|
-
it "adds message to queue" do
|
12
|
-
|
13
|
-
expect(
|
14
|
-
expect(
|
15
|
-
expect(
|
15
|
+
it "adds message to queue", :aggregate_failures do
|
16
|
+
redis_service.push_msg(queue_name, "My message")
|
17
|
+
expect(redis_call(:llen, key)).to eq 1
|
18
|
+
expect(redis_call(:lrange, key, 0, 1)).to eq ["My message"]
|
19
|
+
expect(redis_call(:smembers, unique_key)).to eq []
|
16
20
|
end
|
17
21
|
|
18
22
|
it "remembers unique message if specified" do
|
19
|
-
|
20
|
-
expect(
|
23
|
+
redis_service.push_msg(queue_name, "My message", remember_unique: true)
|
24
|
+
expect(redis_call(:smembers, unique_key)).to eq ["My message"]
|
21
25
|
end
|
22
26
|
end
|
23
27
|
|
24
28
|
describe "#pluck" do
|
25
29
|
it "removes messages from queue" do
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
expect(
|
30
|
+
redis_service.push_msg(queue_name, "Message 1")
|
31
|
+
redis_service.push_msg(queue_name, "Message 2")
|
32
|
+
redis_service.pluck(queue_name, 2)
|
33
|
+
expect(redis_call(:llen, key)).to eq 0
|
30
34
|
end
|
31
35
|
|
32
|
-
it "forgets unique messages" do
|
33
|
-
|
34
|
-
|
35
|
-
expect(
|
36
|
-
|
37
|
-
expect(
|
36
|
+
it "forgets unique messages", :aggregate_failures do
|
37
|
+
redis_service.push_msg(queue_name, "Message 1", remember_unique: true)
|
38
|
+
redis_service.push_msg(queue_name, "Message 2", remember_unique: true)
|
39
|
+
expect(redis_call(:scard, unique_key)).to eq 2
|
40
|
+
redis_service.pluck(queue_name, 2)
|
41
|
+
expect(redis_call(:smembers, unique_key)).to eq []
|
38
42
|
end
|
39
43
|
end
|
40
|
-
|
41
|
-
private
|
42
|
-
|
43
|
-
def redis(&block)
|
44
|
-
Sidekiq.redis(&block)
|
45
|
-
end
|
46
|
-
|
47
44
|
end
|