sidekiq-grouping 1.1.0 → 1.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/lint.yml +37 -0
- data/.github/workflows/test.yml +49 -0
- data/.rubocop.yml +31 -3
- data/Appraisals +6 -2
- data/README.md +3 -1
- data/gemfiles/sidekiq_6.0.gemfile +7 -0
- data/gemfiles/sidekiq_6.5.gemfile +7 -0
- data/lefthook.yml +15 -0
- data/lib/sidekiq/grouping/batch.rb +29 -19
- data/lib/sidekiq/grouping/config.rb +32 -22
- data/lib/sidekiq/grouping/flusher.rb +48 -36
- data/lib/sidekiq/grouping/flusher_observer.rb +16 -10
- data/lib/sidekiq/grouping/middleware.rb +27 -14
- data/lib/sidekiq/grouping/redis.rb +20 -11
- data/lib/sidekiq/grouping/version.rb +3 -1
- data/lib/sidekiq/grouping/web.rb +10 -5
- data/lib/sidekiq/grouping.rb +35 -30
- data/sidekiq-grouping.gemspec +13 -7
- data/spec/modules/batch_spec.rb +110 -80
- data/spec/modules/redis_spec.rb +17 -16
- data/spec/spec_helper.rb +5 -3
- data/spec/support/test_workers.rb +7 -10
- metadata +55 -20
- data/.travis.yml +0 -18
data/lib/sidekiq/grouping/web.rb
CHANGED
@@ -1,24 +1,29 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "sidekiq/web"
|
2
4
|
|
3
5
|
module Sidekiq
|
4
6
|
module Grouping
|
5
7
|
module Web
|
6
|
-
VIEWS = File.expand_path(
|
8
|
+
VIEWS = File.expand_path("views", File.dirname(__FILE__))
|
7
9
|
|
8
10
|
def self.registered(app)
|
9
11
|
app.get "/grouping" do
|
10
12
|
@batches = Sidekiq::Grouping::Batch.all
|
11
|
-
erb File.read(File.join(VIEWS,
|
13
|
+
erb File.read(File.join(VIEWS, "index.erb")),
|
14
|
+
locals: { view_path: VIEWS }
|
12
15
|
end
|
13
16
|
|
14
17
|
app.post "/grouping/:name/delete" do
|
15
|
-
worker_class, queue =
|
18
|
+
worker_class, queue =
|
19
|
+
Sidekiq::Grouping::Batch.extract_worker_klass_and_queue(
|
20
|
+
params["name"]
|
21
|
+
)
|
16
22
|
batch = Sidekiq::Grouping::Batch.new(worker_class, queue)
|
17
23
|
batch.delete
|
18
24
|
redirect "#{root_path}grouping"
|
19
25
|
end
|
20
26
|
end
|
21
|
-
|
22
27
|
end
|
23
28
|
end
|
24
29
|
end
|
data/lib/sidekiq/grouping.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "active_support"
|
2
4
|
require "active_support/core_ext/string"
|
3
5
|
require "active_support/configurable"
|
@@ -5,36 +7,39 @@ require "active_support/core_ext/numeric/time"
|
|
5
7
|
require "sidekiq/grouping/version"
|
6
8
|
require "concurrent"
|
7
9
|
|
8
|
-
module Sidekiq
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
10
|
+
module Sidekiq
|
11
|
+
module Grouping
|
12
|
+
autoload :Config, "sidekiq/grouping/config"
|
13
|
+
autoload :Redis, "sidekiq/grouping/redis"
|
14
|
+
autoload :Batch, "sidekiq/grouping/batch"
|
15
|
+
autoload :Middleware, "sidekiq/grouping/middleware"
|
16
|
+
autoload :Flusher, "sidekiq/grouping/flusher"
|
17
|
+
autoload :FlusherObserver, "sidekiq/grouping/flusher_observer"
|
18
|
+
|
19
|
+
class << self
|
20
|
+
attr_writer :logger
|
21
|
+
|
22
|
+
def logger
|
23
|
+
@logger ||= Sidekiq.logger
|
24
|
+
end
|
25
|
+
|
26
|
+
def force_flush_for_test!
|
27
|
+
Sidekiq::Grouping::Flusher.new.force_flush_for_test!
|
28
|
+
end
|
29
|
+
|
30
|
+
def start!
|
31
|
+
interval = Sidekiq::Grouping::Config.poll_interval
|
32
|
+
@observer = Sidekiq::Grouping::FlusherObserver.new
|
33
|
+
@task = Concurrent::TimerTask.new(
|
34
|
+
execution_interval: interval
|
35
|
+
) { Sidekiq::Grouping::Flusher.new.flush }
|
36
|
+
@task.add_observer(@observer)
|
37
|
+
logger.info(
|
38
|
+
"[Sidekiq::Grouping] Started polling batches every " \
|
39
|
+
"#{interval} seconds"
|
40
|
+
)
|
41
|
+
@task.execute
|
42
|
+
end
|
38
43
|
end
|
39
44
|
end
|
40
45
|
end
|
data/sidekiq-grouping.gemspec
CHANGED
@@ -1,4 +1,6 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
lib = File.expand_path("lib", __dir__)
|
2
4
|
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
3
5
|
require "sidekiq/grouping/version"
|
4
6
|
|
@@ -7,26 +9,30 @@ Gem::Specification.new do |spec|
|
|
7
9
|
spec.version = Sidekiq::Grouping::VERSION
|
8
10
|
spec.authors = ["Victor Sokolov"]
|
9
11
|
spec.email = ["gzigzigzeo@gmail.com"]
|
10
|
-
spec.summary =
|
12
|
+
spec.summary = <<~SUMMARY
|
11
13
|
Allows identical sidekiq jobs to be processed with a single background call
|
12
|
-
|
14
|
+
SUMMARY
|
13
15
|
spec.homepage = "http://github.com/gzigzigzeo/sidekiq-grouping"
|
14
16
|
spec.license = "MIT"
|
15
17
|
|
16
18
|
spec.files = `git ls-files -z`.split("\x0")
|
17
19
|
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
-
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
20
|
spec.require_paths = ["lib"]
|
20
21
|
|
22
|
+
spec.required_ruby_version = ">= 2.7.0"
|
23
|
+
|
24
|
+
spec.add_development_dependency "appraisal"
|
21
25
|
spec.add_development_dependency "bundler", "> 1.5"
|
22
26
|
spec.add_development_dependency "rake"
|
23
27
|
spec.add_development_dependency "rspec"
|
24
|
-
spec.add_development_dependency "simplecov"
|
25
28
|
spec.add_development_dependency "rspec-sidekiq"
|
29
|
+
spec.add_development_dependency "rubocop"
|
30
|
+
spec.add_development_dependency "rubocop-rspec"
|
31
|
+
spec.add_development_dependency "simplecov"
|
26
32
|
spec.add_development_dependency "timecop"
|
27
|
-
spec.add_development_dependency "appraisal"
|
28
33
|
|
29
34
|
spec.add_dependency "activesupport"
|
30
|
-
spec.add_dependency "sidekiq", ">= 3.4.2"
|
31
35
|
spec.add_dependency "concurrent-ruby"
|
36
|
+
spec.add_dependency "sidekiq", ">= 3.4.2", "< 7"
|
37
|
+
spec.metadata["rubygems_mfa_required"] = "true"
|
32
38
|
end
|
data/spec/modules/batch_spec.rb
CHANGED
@@ -1,70 +1,72 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
2
4
|
|
3
5
|
describe Sidekiq::Grouping::Batch do
|
4
|
-
subject {
|
6
|
+
subject(:batch_service) { described_class }
|
5
7
|
|
6
|
-
context
|
7
|
-
it
|
8
|
-
|
9
|
-
|
8
|
+
context "when adding" do
|
9
|
+
it "must enqueue unbatched worker" do
|
10
|
+
RegularWorker.perform_async("bar")
|
11
|
+
expect(RegularWorker).to have_enqueued_sidekiq_job("bar")
|
10
12
|
end
|
11
13
|
|
12
|
-
it
|
13
|
-
BatchedSizeWorker.perform_async(
|
14
|
-
expect_batch(BatchedSizeWorker,
|
14
|
+
it "must not enqueue batched worker based on batch size setting" do
|
15
|
+
BatchedSizeWorker.perform_async("bar")
|
16
|
+
expect_batch(BatchedSizeWorker, "batched_size")
|
15
17
|
end
|
16
18
|
|
17
|
-
it
|
18
|
-
BatchedIntervalWorker.perform_async(
|
19
|
-
expect_batch(BatchedIntervalWorker,
|
19
|
+
it "must not enqueue batched worker based on interval setting" do
|
20
|
+
BatchedIntervalWorker.perform_async("bar")
|
21
|
+
expect_batch(BatchedIntervalWorker, "batched_interval")
|
20
22
|
end
|
21
23
|
|
22
|
-
it
|
23
|
-
BatchedBothWorker.perform_async(
|
24
|
-
expect_batch(BatchedBothWorker,
|
24
|
+
it "must not enqueue batched worker based on both settings" do
|
25
|
+
BatchedBothWorker.perform_async("bar")
|
26
|
+
expect_batch(BatchedBothWorker, "batched_both")
|
25
27
|
end
|
26
28
|
end
|
27
29
|
|
28
|
-
context
|
29
|
-
it
|
30
|
-
batch =
|
30
|
+
context "when checking if should flush" do
|
31
|
+
it "must flush if limit exceeds for limit worker", :aggregate_failures do
|
32
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
31
33
|
|
32
|
-
expect(batch
|
33
|
-
BatchedSizeWorker.perform_async(
|
34
|
-
expect(batch
|
35
|
-
4.times { BatchedSizeWorker.perform_async(
|
36
|
-
expect(batch
|
34
|
+
expect(batch).not_to be_could_flush
|
35
|
+
BatchedSizeWorker.perform_async("bar")
|
36
|
+
expect(batch).not_to be_could_flush
|
37
|
+
4.times { BatchedSizeWorker.perform_async("bar") }
|
38
|
+
expect(batch).to be_could_flush
|
37
39
|
end
|
38
40
|
|
39
|
-
it
|
40
|
-
batch =
|
41
|
+
it "must flush if limit exceeds for both worker", :aggregate_failures do
|
42
|
+
batch = batch_service.new(BatchedBothWorker.name, "batched_both")
|
41
43
|
|
42
|
-
expect(batch
|
43
|
-
BatchedBothWorker.perform_async(
|
44
|
-
expect(batch
|
45
|
-
4.times { BatchedBothWorker.perform_async(
|
46
|
-
expect(batch
|
44
|
+
expect(batch).not_to be_could_flush
|
45
|
+
BatchedBothWorker.perform_async("bar")
|
46
|
+
expect(batch).not_to be_could_flush
|
47
|
+
4.times { BatchedBothWorker.perform_async("bar") }
|
48
|
+
expect(batch).to be_could_flush
|
47
49
|
end
|
48
50
|
|
49
|
-
it
|
50
|
-
batch =
|
51
|
+
it "must flush if limit okay but time came", :aggregate_failures do
|
52
|
+
batch = batch_service.new(BatchedIntervalWorker.name, "batched_interval")
|
51
53
|
|
52
|
-
expect(batch
|
53
|
-
BatchedIntervalWorker.perform_async(
|
54
|
-
expect(batch
|
54
|
+
expect(batch).not_to be_could_flush
|
55
|
+
BatchedIntervalWorker.perform_async("bar")
|
56
|
+
expect(batch).not_to be_could_flush
|
55
57
|
expect(batch.size).to eq(1)
|
56
58
|
|
57
59
|
Timecop.travel(2.hours.since)
|
58
60
|
|
59
|
-
expect(batch
|
61
|
+
expect(batch).to be_could_flush
|
60
62
|
end
|
61
63
|
end
|
62
64
|
|
63
|
-
context
|
64
|
-
it
|
65
|
-
batch =
|
65
|
+
context "when flushing" do
|
66
|
+
it "must put worker to queue on flush", :aggregate_failures do
|
67
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
66
68
|
|
67
|
-
expect(batch
|
69
|
+
expect(batch).not_to be_could_flush
|
68
70
|
10.times { |n| BatchedSizeWorker.perform_async("bar#{n}") }
|
69
71
|
batch.flush
|
70
72
|
expect(BatchedSizeWorker).to(
|
@@ -74,65 +76,93 @@ describe Sidekiq::Grouping::Batch do
|
|
74
76
|
end
|
75
77
|
end
|
76
78
|
|
77
|
-
context
|
78
|
-
context
|
79
|
-
it
|
80
|
-
batch =
|
81
|
-
|
79
|
+
context "with similar args" do
|
80
|
+
context "when option batch_unique = true" do
|
81
|
+
it "enqueues once" do
|
82
|
+
batch = batch_service.new(
|
83
|
+
BatchedUniqueArgsWorker.name,
|
84
|
+
"batched_unique_args"
|
85
|
+
)
|
86
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
82
87
|
expect(batch.size).to eq(1)
|
83
88
|
end
|
84
89
|
|
85
|
-
it
|
86
|
-
batch =
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
90
|
+
it "enqueues once each unique set of args" do
|
91
|
+
batch = batch_service.new(
|
92
|
+
BatchedUniqueArgsWorker.name,
|
93
|
+
"batched_unique_args"
|
94
|
+
)
|
95
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
96
|
+
6.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
97
|
+
3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
98
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 3) }
|
99
|
+
7.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
92
100
|
expect(batch.size).to eq(3)
|
93
101
|
end
|
94
102
|
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
it 'allows to enqueue again after flush' do
|
106
|
-
batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
|
107
|
-
2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
|
108
|
-
2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
|
109
|
-
batch.flush
|
110
|
-
BatchedUniqueArgsWorker.perform_async('bar', 1)
|
111
|
-
BatchedUniqueArgsWorker.perform_async('baz', 1)
|
112
|
-
expect(batch.size).to eq(2)
|
113
|
-
end
|
103
|
+
it "flushes the workers" do
|
104
|
+
batch = batch_service.new(
|
105
|
+
BatchedUniqueArgsWorker.name,
|
106
|
+
"batched_unique_args"
|
107
|
+
)
|
108
|
+
2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
109
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
110
|
+
batch.flush
|
111
|
+
expect(batch.size).to eq(0)
|
114
112
|
end
|
115
113
|
|
114
|
+
it "allows to enqueue again after flush" do
|
115
|
+
batch = batch_service.new(
|
116
|
+
BatchedUniqueArgsWorker.name,
|
117
|
+
"batched_unique_args"
|
118
|
+
)
|
119
|
+
2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
|
120
|
+
2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
|
121
|
+
batch.flush
|
122
|
+
BatchedUniqueArgsWorker.perform_async("bar", 1)
|
123
|
+
BatchedUniqueArgsWorker.perform_async("baz", 1)
|
124
|
+
expect(batch.size).to eq(2)
|
125
|
+
end
|
116
126
|
end
|
117
127
|
|
118
|
-
context
|
119
|
-
it
|
120
|
-
batch =
|
121
|
-
3.times { BatchedSizeWorker.perform_async(
|
128
|
+
context "when batch_unique is not specified" do
|
129
|
+
it "enqueues all" do
|
130
|
+
batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
|
131
|
+
3.times { BatchedSizeWorker.perform_async("bar", 1) }
|
122
132
|
expect(batch.size).to eq(3)
|
123
133
|
end
|
124
134
|
end
|
125
135
|
end
|
126
136
|
|
137
|
+
context "when inline mode" do
|
138
|
+
it "must pass args to worker as array" do
|
139
|
+
Sidekiq::Testing.inline! do
|
140
|
+
expect_any_instance_of(BatchedSizeWorker)
|
141
|
+
.to receive(:perform).with([[1]])
|
142
|
+
|
143
|
+
BatchedSizeWorker.perform_async(1)
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
it "must not pass args to worker as array" do
|
148
|
+
Sidekiq::Testing.inline! do
|
149
|
+
expect_any_instance_of(RegularWorker).to receive(:perform).with(1)
|
150
|
+
|
151
|
+
RegularWorker.perform_async(1)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
127
156
|
private
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
157
|
+
|
158
|
+
def expect_batch(klass, queue) # rubocop:disable Metrics/AbcSize
|
159
|
+
expect(klass).not_to have_enqueued_sidekiq_job("bar")
|
160
|
+
batch = batch_service.new(klass.name, queue)
|
161
|
+
stats = batch_service.all
|
132
162
|
expect(batch.size).to eq(1)
|
133
163
|
expect(stats.size).to eq(1)
|
134
164
|
expect(stats.first.worker_class).to eq(klass.name)
|
135
165
|
expect(stats.first.queue).to eq(queue)
|
136
|
-
expect(batch.pluck).to eq [[
|
166
|
+
expect(batch.pluck).to eq [["bar"]]
|
137
167
|
end
|
138
168
|
end
|
data/spec/modules/redis_spec.rb
CHANGED
@@ -1,39 +1,41 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
2
4
|
|
3
5
|
describe Sidekiq::Grouping::Redis do
|
4
|
-
subject {
|
6
|
+
subject(:redis_service) { described_class.new }
|
5
7
|
|
6
8
|
let(:queue_name) { "my_queue" }
|
7
9
|
let(:key) { "batching:#{queue_name}" }
|
8
10
|
let(:unique_key) { "batching:#{queue_name}:unique_messages" }
|
9
11
|
|
10
12
|
describe "#push_msg" do
|
11
|
-
it "adds message to queue" do
|
12
|
-
|
13
|
+
it "adds message to queue", :aggregate_failures do
|
14
|
+
redis_service.push_msg(queue_name, "My message")
|
13
15
|
expect(redis { |c| c.llen key }).to eq 1
|
14
|
-
expect(redis { |c| c.lrange key, 0, 1 }).to eq [
|
15
|
-
expect(redis { |c| c.smembers unique_key}).to eq []
|
16
|
+
expect(redis { |c| c.lrange key, 0, 1 }).to eq ["My message"]
|
17
|
+
expect(redis { |c| c.smembers unique_key }).to eq []
|
16
18
|
end
|
17
19
|
|
18
20
|
it "remembers unique message if specified" do
|
19
|
-
|
20
|
-
expect(redis { |c| c.smembers unique_key}).to eq [
|
21
|
+
redis_service.push_msg(queue_name, "My message", remember_unique: true)
|
22
|
+
expect(redis { |c| c.smembers unique_key }).to eq ["My message"]
|
21
23
|
end
|
22
24
|
end
|
23
25
|
|
24
26
|
describe "#pluck" do
|
25
27
|
it "removes messages from queue" do
|
26
|
-
|
27
|
-
|
28
|
-
|
28
|
+
redis_service.push_msg(queue_name, "Message 1")
|
29
|
+
redis_service.push_msg(queue_name, "Message 2")
|
30
|
+
redis_service.pluck(queue_name, 2)
|
29
31
|
expect(redis { |c| c.llen key }).to eq 0
|
30
32
|
end
|
31
33
|
|
32
|
-
it "forgets unique messages" do
|
33
|
-
|
34
|
-
|
34
|
+
it "forgets unique messages", :aggregate_failures do
|
35
|
+
redis_service.push_msg(queue_name, "Message 1", remember_unique: true)
|
36
|
+
redis_service.push_msg(queue_name, "Message 2", remember_unique: true)
|
35
37
|
expect(redis { |c| c.scard unique_key }).to eq 2
|
36
|
-
|
38
|
+
redis_service.pluck(queue_name, 2)
|
37
39
|
expect(redis { |c| c.smembers unique_key }).to eq []
|
38
40
|
end
|
39
41
|
end
|
@@ -43,5 +45,4 @@ describe Sidekiq::Grouping::Redis do
|
|
43
45
|
def redis(&block)
|
44
46
|
Sidekiq.redis(&block)
|
45
47
|
end
|
46
|
-
|
47
48
|
end
|
data/spec/spec_helper.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
$LOAD_PATH << "." unless $LOAD_PATH.include?(".")
|
2
4
|
|
3
5
|
require "rubygems"
|
@@ -15,7 +17,7 @@ end
|
|
15
17
|
require "sidekiq/grouping"
|
16
18
|
|
17
19
|
Sidekiq::Grouping.logger = nil
|
18
|
-
Sidekiq.redis = {
|
20
|
+
Sidekiq.redis = { db: ENV.fetch("db", 1) }
|
19
21
|
Sidekiq.logger = nil
|
20
22
|
|
21
23
|
RSpec::Sidekiq.configure do |config|
|
@@ -28,14 +30,14 @@ RSpec.configure do |config|
|
|
28
30
|
config.run_all_when_everything_filtered = true
|
29
31
|
config.filter_run :focus
|
30
32
|
|
31
|
-
config.before
|
33
|
+
config.before do
|
32
34
|
Sidekiq.redis do |conn|
|
33
35
|
keys = conn.keys "*batching*"
|
34
36
|
keys.each { |key| conn.del key }
|
35
37
|
end
|
36
38
|
end
|
37
39
|
|
38
|
-
config.after
|
40
|
+
config.after do
|
39
41
|
Timecop.return
|
40
42
|
end
|
41
43
|
end
|
@@ -1,8 +1,9 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
class RegularWorker
|
2
4
|
include Sidekiq::Worker
|
3
5
|
|
4
|
-
def perform(foo)
|
5
|
-
end
|
6
|
+
def perform(foo); end
|
6
7
|
end
|
7
8
|
|
8
9
|
class BatchedSizeWorker
|
@@ -10,8 +11,7 @@ class BatchedSizeWorker
|
|
10
11
|
|
11
12
|
sidekiq_options queue: :batched_size, batch_flush_size: 3, batch_size: 2
|
12
13
|
|
13
|
-
def perform(foo)
|
14
|
-
end
|
14
|
+
def perform(foo); end
|
15
15
|
end
|
16
16
|
|
17
17
|
class BatchedIntervalWorker
|
@@ -19,8 +19,7 @@ class BatchedIntervalWorker
|
|
19
19
|
|
20
20
|
sidekiq_options queue: :batched_interval, batch_flush_interval: 3600
|
21
21
|
|
22
|
-
def perform(foo)
|
23
|
-
end
|
22
|
+
def perform(foo); end
|
24
23
|
end
|
25
24
|
|
26
25
|
class BatchedBothWorker
|
@@ -30,8 +29,7 @@ class BatchedBothWorker
|
|
30
29
|
queue: :batched_both, batch_flush_interval: 3600, batch_flush_size: 3
|
31
30
|
)
|
32
31
|
|
33
|
-
def perform(foo)
|
34
|
-
end
|
32
|
+
def perform(foo); end
|
35
33
|
end
|
36
34
|
|
37
35
|
class BatchedUniqueArgsWorker
|
@@ -41,6 +39,5 @@ class BatchedUniqueArgsWorker
|
|
41
39
|
queue: :batched_unique_args, batch_flush_size: 3, batch_unique: true
|
42
40
|
)
|
43
41
|
|
44
|
-
def perform(foo)
|
45
|
-
end
|
42
|
+
def perform(foo); end
|
46
43
|
end
|