sidekiq-grouping 1.0.10 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,82 +1,105 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "./redis_dispatcher"
4
+
1
5
  module Sidekiq
2
6
  module Grouping
3
7
  class Redis
8
+ include RedisDispatcher
4
9
 
5
10
  PLUCK_SCRIPT = <<-SCRIPT
6
- local pluck_values = redis.call('lrange', KEYS[1], 0, ARGV[1] - 1)
7
- redis.call('ltrim', KEYS[1], ARGV[1], -1)
8
- for k, v in pairs(pluck_values) do
9
- redis.call('srem', KEYS[2], v)
11
+ local pluck_values = redis.call('lpop', KEYS[1], ARGV[1]) or {}
12
+ if #pluck_values > 0 then
13
+ redis.call('srem', KEYS[2], unpack(pluck_values))
10
14
  end
11
15
  return pluck_values
12
16
  SCRIPT
13
17
 
14
- def push_msg(name, msg, remember_unique = false)
18
+ def push_msg(name, msg, remember_unique: false)
15
19
  redis do |conn|
16
- conn.multi do
17
- conn.sadd(ns('batches'), name)
18
- conn.rpush(ns(name), msg)
19
- conn.sadd(unique_messages_key(name), msg) if remember_unique
20
+ conn.multi do |pipeline|
21
+ sadd = pipeline.respond_to?(:sadd?) ? :sadd? : :sadd
22
+ redis_connection_call(pipeline, sadd, ns("batches"), name)
23
+ redis_connection_call(pipeline, :rpush, ns(name), msg)
24
+
25
+ if remember_unique
26
+ redis_connection_call(
27
+ pipeline, sadd, unique_messages_key(name), msg
28
+ )
29
+ end
20
30
  end
21
31
  end
22
32
  end
23
33
 
24
34
  def enqueued?(name, msg)
25
- redis do |conn|
26
- conn.sismember(unique_messages_key(name), msg)
27
- end
35
+ member = redis_call(:sismember, unique_messages_key(name), msg)
36
+ return member if member.is_a?(TrueClass) || member.is_a?(FalseClass)
37
+
38
+ member != 0
28
39
  end
29
40
 
30
41
  def batch_size(name)
31
- redis { |conn| conn.llen(ns(name)) }
42
+ redis_call(:llen, ns(name))
32
43
  end
33
44
 
34
45
  def batches
35
- redis { |conn| conn.smembers(ns('batches')) }
46
+ redis_call(:smembers, ns("batches"))
36
47
  end
37
48
 
38
49
  def pluck(name, limit)
39
- keys = [ns(name), unique_messages_key(name)]
40
- args = [limit]
41
- redis { |conn| conn.eval PLUCK_SCRIPT, keys, args }
50
+ if new_redis_client?
51
+ redis_call(
52
+ :eval,
53
+ PLUCK_SCRIPT,
54
+ 2,
55
+ ns(name),
56
+ unique_messages_key(name),
57
+ limit
58
+ )
59
+ else
60
+ keys = [ns(name), unique_messages_key(name)]
61
+ args = [limit]
62
+ redis_call(:eval, PLUCK_SCRIPT, keys, args)
63
+ end
42
64
  end
43
65
 
44
66
  def get_last_execution_time(name)
45
- redis { |conn| conn.get(ns("last_execution_time:#{name}")) }
67
+ redis_call(:get, ns("last_execution_time:#{name}"))
46
68
  end
47
69
 
48
70
  def set_last_execution_time(name, time)
49
- redis { |conn| conn.set(ns("last_execution_time:#{name}"), time.to_json) }
71
+ redis_call(
72
+ :set, ns("last_execution_time:#{name}"), time.to_json
73
+ )
50
74
  end
51
75
 
52
76
  def lock(name)
53
- redis do |conn|
54
- id = ns("lock:#{name}")
55
- conn.set(id, true, nx: true, ex: Sidekiq::Grouping::Config.lock_ttl)
56
- end
77
+ redis_call(
78
+ :set,
79
+ ns("lock:#{name}"),
80
+ "true",
81
+ nx: true,
82
+ ex: Sidekiq::Grouping::Config.lock_ttl
83
+ )
57
84
  end
58
85
 
59
86
  def delete(name)
60
87
  redis do |conn|
61
- conn.del(ns("last_execution_time:#{name}"))
62
- conn.del(ns(name))
63
- conn.srem(ns('batches'), name)
88
+ redis_connection_call(conn, :del, ns("last_execution_time:#{name}"))
89
+ redis_connection_call(conn, :del, ns(name))
90
+ redis_connection_call(conn, :srem, ns("batches"), name)
64
91
  end
65
92
  end
66
93
 
67
94
  private
68
95
 
69
- def unique_messages_key name
96
+ def unique_messages_key(name)
70
97
  ns("#{name}:unique_messages")
71
98
  end
72
99
 
73
100
  def ns(key = nil)
74
101
  "batching:#{key}"
75
102
  end
76
-
77
- def redis(&block)
78
- Sidekiq.redis(&block)
79
- end
80
103
  end
81
104
  end
82
105
  end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sidekiq
4
+ module Grouping
5
+ module RedisDispatcher
6
+ def redis_call(command, *args, **kwargs)
7
+ redis do |connection|
8
+ redis_connection_call(connection, command, *args, **kwargs)
9
+ end
10
+ end
11
+
12
+ def redis_connection_call(connection, command, *args, **kwargs)
13
+ if new_redis_client? # redis-client
14
+ connection.call(command.to_s.upcase, *args, **kwargs)
15
+ else # redis
16
+ connection.public_send(command, *args, **kwargs)
17
+ end
18
+ end
19
+
20
+ def new_redis_client?
21
+ Sidekiq::VERSION[0].to_i >= 7
22
+ end
23
+
24
+ def redis(&block)
25
+ Sidekiq.redis(&block)
26
+ end
27
+ end
28
+ end
29
+ end
@@ -1,5 +1,7 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Sidekiq
2
4
  module Grouping
3
- VERSION = "1.0.10"
5
+ VERSION = "1.3.0"
4
6
  end
5
7
  end
@@ -1,24 +1,29 @@
1
- require 'sidekiq/web'
1
+ # frozen_string_literal: true
2
+
3
+ require "sidekiq/web"
2
4
 
3
5
  module Sidekiq
4
6
  module Grouping
5
7
  module Web
6
- VIEWS = File.expand_path('views', File.dirname(__FILE__))
8
+ VIEWS = File.expand_path("views", File.dirname(__FILE__))
7
9
 
8
10
  def self.registered(app)
9
11
  app.get "/grouping" do
10
12
  @batches = Sidekiq::Grouping::Batch.all
11
- erb File.read(File.join(VIEWS, 'index.erb')), locals: {view_path: VIEWS}
13
+ erb File.read(File.join(VIEWS, "index.erb")),
14
+ locals: { view_path: VIEWS }
12
15
  end
13
16
 
14
17
  app.post "/grouping/:name/delete" do
15
- worker_class, queue = Sidekiq::Grouping::Batch.extract_worker_klass_and_queue(params['name'])
18
+ worker_class, queue =
19
+ Sidekiq::Grouping::Batch.extract_worker_klass_and_queue(
20
+ params["name"]
21
+ )
16
22
  batch = Sidekiq::Grouping::Batch.new(worker_class, queue)
17
23
  batch.delete
18
24
  redirect "#{root_path}grouping"
19
25
  end
20
26
  end
21
-
22
27
  end
23
28
  end
24
29
  end
@@ -1,39 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_support"
1
4
  require "active_support/core_ext/string"
2
5
  require "active_support/configurable"
3
6
  require "active_support/core_ext/numeric/time"
7
+ require "sidekiq"
4
8
  require "sidekiq/grouping/version"
5
9
  require "concurrent"
6
10
 
7
- module Sidekiq::Grouping
8
- autoload :Config, "sidekiq/grouping/config"
9
- autoload :Redis, "sidekiq/grouping/redis"
10
- autoload :Batch, "sidekiq/grouping/batch"
11
- autoload :Middleware, "sidekiq/grouping/middleware"
12
- autoload :Flusher, "sidekiq/grouping/flusher"
13
- autoload :FlusherObserver, "sidekiq/grouping/flusher_observer"
14
-
15
- class << self
16
- attr_writer :logger
17
-
18
- def logger
19
- @logger ||= Sidekiq.logger
20
- end
21
-
22
- def force_flush_for_test!
23
- Sidekiq::Grouping::Flusher.new.force_flush_for_test!
24
- end
25
-
26
- def start!
27
- interval = Sidekiq::Grouping::Config.poll_interval
28
- @observer = Sidekiq::Grouping::FlusherObserver.new
29
- @task = Concurrent::TimerTask.new(
30
- execution_interval: interval
31
- ) { Sidekiq::Grouping::Flusher.new.flush }
32
- @task.add_observer(@observer)
33
- logger.info(
34
- "[Sidekiq::Grouping] Started polling batches every #{interval} seconds"
35
- )
36
- @task.execute
11
+ module Sidekiq
12
+ module Grouping
13
+ autoload :Config, "sidekiq/grouping/config"
14
+ autoload :Redis, "sidekiq/grouping/redis"
15
+ autoload :Batch, "sidekiq/grouping/batch"
16
+ autoload :Middleware, "sidekiq/grouping/middleware"
17
+ autoload :Flusher, "sidekiq/grouping/flusher"
18
+ autoload :FlusherObserver, "sidekiq/grouping/flusher_observer"
19
+
20
+ class << self
21
+ attr_writer :logger
22
+
23
+ def logger
24
+ @logger ||= Sidekiq.logger
25
+ end
26
+
27
+ def force_flush_for_test!
28
+ Sidekiq::Grouping::Flusher.new.force_flush_for_test!
29
+ end
30
+
31
+ def start!
32
+ interval = Sidekiq::Grouping::Config.poll_interval
33
+ @observer = Sidekiq::Grouping::FlusherObserver.new
34
+ @task = Concurrent::TimerTask.new(
35
+ execution_interval: interval
36
+ ) { Sidekiq::Grouping::Flusher.new.flush }
37
+ @task.add_observer(@observer)
38
+ logger.info(
39
+ "[Sidekiq::Grouping] Started polling batches every " \
40
+ "#{interval} seconds"
41
+ )
42
+ @task.execute
43
+ end
37
44
  end
38
45
  end
39
46
  end
@@ -1,4 +1,6 @@
1
- lib = File.expand_path("../lib", __FILE__)
1
+ # frozen_string_literal: true
2
+
3
+ lib = File.expand_path("lib", __dir__)
2
4
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
5
  require "sidekiq/grouping/version"
4
6
 
@@ -7,26 +9,31 @@ Gem::Specification.new do |spec|
7
9
  spec.version = Sidekiq::Grouping::VERSION
8
10
  spec.authors = ["Victor Sokolov"]
9
11
  spec.email = ["gzigzigzeo@gmail.com"]
10
- spec.summary = %q(
12
+ spec.summary = <<~SUMMARY
11
13
  Allows identical sidekiq jobs to be processed with a single background call
12
- )
14
+ SUMMARY
13
15
  spec.homepage = "http://github.com/gzigzigzeo/sidekiq-grouping"
14
16
  spec.license = "MIT"
15
17
 
16
18
  spec.files = `git ls-files -z`.split("\x0")
17
19
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
- spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
20
  spec.require_paths = ["lib"]
20
21
 
21
- spec.add_development_dependency "bundler", "~> 1.5"
22
+ spec.required_ruby_version = ">= 2.7.0"
23
+
24
+ spec.add_development_dependency "appraisal"
25
+ spec.add_development_dependency "bundler", "> 1.5"
26
+ spec.add_development_dependency "pry"
22
27
  spec.add_development_dependency "rake"
23
28
  spec.add_development_dependency "rspec"
24
- spec.add_development_dependency "simplecov"
25
29
  spec.add_development_dependency "rspec-sidekiq"
30
+ spec.add_development_dependency "rubocop"
31
+ spec.add_development_dependency "rubocop-rspec"
32
+ spec.add_development_dependency "simplecov"
26
33
  spec.add_development_dependency "timecop"
27
- spec.add_development_dependency "appraisal"
28
34
 
29
35
  spec.add_dependency "activesupport"
30
- spec.add_dependency "sidekiq", ">= 3.4.2"
31
36
  spec.add_dependency "concurrent-ruby"
37
+ spec.add_dependency "sidekiq", ">= 3.4.2"
38
+ spec.metadata["rubygems_mfa_required"] = "true"
32
39
  end
@@ -1,70 +1,72 @@
1
- require 'spec_helper'
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
2
4
 
3
5
  describe Sidekiq::Grouping::Batch do
4
- subject { Sidekiq::Grouping::Batch }
6
+ subject(:batch_service) { described_class }
5
7
 
6
- context 'adding' do
7
- it 'must enqueue unbatched worker' do
8
- RegularWorker.perform_async('bar')
9
- expect(RegularWorker).to have_enqueued_sidekiq_job("bar")
8
+ context "when adding" do
9
+ it "must enqueue unbatched worker" do
10
+ RegularWorker.perform_async("bar")
11
+ expect(RegularWorker).to have_enqueued_sidekiq_job("bar")
10
12
  end
11
13
 
12
- it 'must not enqueue batched worker' do
13
- BatchedSizeWorker.perform_async('bar')
14
- expect_batch(BatchedSizeWorker, 'batched_size')
14
+ it "must not enqueue batched worker based on batch size setting" do
15
+ BatchedSizeWorker.perform_async("bar")
16
+ expect_batch(BatchedSizeWorker, "batched_size")
15
17
  end
16
18
 
17
- it 'must not enqueue batched worker' do
18
- BatchedIntervalWorker.perform_async('bar')
19
- expect_batch(BatchedIntervalWorker, 'batched_interval')
19
+ it "must not enqueue batched worker based on interval setting" do
20
+ BatchedIntervalWorker.perform_async("bar")
21
+ expect_batch(BatchedIntervalWorker, "batched_interval")
20
22
  end
21
23
 
22
- it 'must not enqueue batched worker' do
23
- BatchedBothWorker.perform_async('bar')
24
- expect_batch(BatchedBothWorker, 'batched_both')
24
+ it "must not enqueue batched worker based on both settings" do
25
+ BatchedBothWorker.perform_async("bar")
26
+ expect_batch(BatchedBothWorker, "batched_both")
25
27
  end
26
28
  end
27
29
 
28
- context 'checking if should flush' do
29
- it 'must flush if limit exceeds for limit worker' do
30
- batch = subject.new(BatchedSizeWorker.name, 'batched_size')
30
+ context "when checking if should flush" do
31
+ it "must flush if limit exceeds for limit worker", :aggregate_failures do
32
+ batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
31
33
 
32
- expect(batch.could_flush?).to be_falsy
33
- BatchedSizeWorker.perform_async('bar')
34
- expect(batch.could_flush?).to be_falsy
35
- 4.times { BatchedSizeWorker.perform_async('bar') }
36
- expect(batch.could_flush?).to be_truthy
34
+ expect(batch).not_to be_could_flush
35
+ BatchedSizeWorker.perform_async("bar")
36
+ expect(batch).not_to be_could_flush
37
+ 4.times { BatchedSizeWorker.perform_async("bar") }
38
+ expect(batch).to be_could_flush
37
39
  end
38
40
 
39
- it 'must flush if limit exceeds for both worker' do
40
- batch = subject.new(BatchedBothWorker.name, 'batched_both')
41
+ it "must flush if limit exceeds for both worker", :aggregate_failures do
42
+ batch = batch_service.new(BatchedBothWorker.name, "batched_both")
41
43
 
42
- expect(batch.could_flush?).to be_falsy
43
- BatchedBothWorker.perform_async('bar')
44
- expect(batch.could_flush?).to be_falsy
45
- 4.times { BatchedBothWorker.perform_async('bar') }
46
- expect(batch.could_flush?).to be_truthy
44
+ expect(batch).not_to be_could_flush
45
+ BatchedBothWorker.perform_async("bar")
46
+ expect(batch).not_to be_could_flush
47
+ 4.times { BatchedBothWorker.perform_async("bar") }
48
+ expect(batch).to be_could_flush
47
49
  end
48
50
 
49
- it 'must flush if limit okay but time came' do
50
- batch = subject.new(BatchedIntervalWorker.name, 'batched_interval')
51
+ it "must flush if limit okay but time came", :aggregate_failures do
52
+ batch = batch_service.new(BatchedIntervalWorker.name, "batched_interval")
51
53
 
52
- expect(batch.could_flush?).to be_falsy
53
- BatchedIntervalWorker.perform_async('bar')
54
- expect(batch.could_flush?).to be_falsy
54
+ expect(batch).not_to be_could_flush
55
+ BatchedIntervalWorker.perform_async("bar")
56
+ expect(batch).not_to be_could_flush
55
57
  expect(batch.size).to eq(1)
56
58
 
57
59
  Timecop.travel(2.hours.since)
58
60
 
59
- expect(batch.could_flush?).to be_truthy
61
+ expect(batch).to be_could_flush
60
62
  end
61
63
  end
62
64
 
63
- context 'flushing' do
64
- it 'must put wokrer to queue on flush' do
65
- batch = subject.new(BatchedSizeWorker.name, 'batched_size')
65
+ context "when flushing" do
66
+ it "must put worker to queue on flush", :aggregate_failures do
67
+ batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
66
68
 
67
- expect(batch.could_flush?).to be_falsy
69
+ expect(batch).not_to be_could_flush
68
70
  10.times { |n| BatchedSizeWorker.perform_async("bar#{n}") }
69
71
  batch.flush
70
72
  expect(BatchedSizeWorker).to(
@@ -74,65 +76,93 @@ describe Sidekiq::Grouping::Batch do
74
76
  end
75
77
  end
76
78
 
77
- context 'with similar args' do
78
- context 'option batch_unique = true' do
79
- it 'enqueues once' do
80
- batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
81
- 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
79
+ context "with similar args" do
80
+ context "when option batch_unique = true" do
81
+ it "enqueues once" do
82
+ batch = batch_service.new(
83
+ BatchedUniqueArgsWorker.name,
84
+ "batched_unique_args"
85
+ )
86
+ 3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
82
87
  expect(batch.size).to eq(1)
83
88
  end
84
89
 
85
- it 'enqueues once each unique set of args' do
86
- batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
87
- 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
88
- 6.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
89
- 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
90
- 2.times { BatchedUniqueArgsWorker.perform_async('baz', 3) }
91
- 7.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
90
+ it "enqueues once each unique set of args" do
91
+ batch = batch_service.new(
92
+ BatchedUniqueArgsWorker.name,
93
+ "batched_unique_args"
94
+ )
95
+ 3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
96
+ 6.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
97
+ 3.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
98
+ 2.times { BatchedUniqueArgsWorker.perform_async("baz", 3) }
99
+ 7.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
92
100
  expect(batch.size).to eq(3)
93
101
  end
94
102
 
95
- context 'flushing' do
96
-
97
- it 'works' do
98
- batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
99
- 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
100
- 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
101
- batch.flush
102
- expect(batch.size).to eq(0)
103
- end
104
-
105
- it 'allows to enqueue again after flush' do
106
- batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
107
- 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
108
- 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
109
- batch.flush
110
- BatchedUniqueArgsWorker.perform_async('bar', 1)
111
- BatchedUniqueArgsWorker.perform_async('baz', 1)
112
- expect(batch.size).to eq(2)
113
- end
103
+ it "flushes the workers" do
104
+ batch = batch_service.new(
105
+ BatchedUniqueArgsWorker.name,
106
+ "batched_unique_args"
107
+ )
108
+ 2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
109
+ 2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
110
+ batch.flush
111
+ expect(batch.size).to eq(0)
114
112
  end
115
113
 
114
+ it "allows to enqueue again after flush" do
115
+ batch = batch_service.new(
116
+ BatchedUniqueArgsWorker.name,
117
+ "batched_unique_args"
118
+ )
119
+ 2.times { BatchedUniqueArgsWorker.perform_async("bar", 1) }
120
+ 2.times { BatchedUniqueArgsWorker.perform_async("baz", 1) }
121
+ batch.flush
122
+ BatchedUniqueArgsWorker.perform_async("bar", 1)
123
+ BatchedUniqueArgsWorker.perform_async("baz", 1)
124
+ expect(batch.size).to eq(2)
125
+ end
116
126
  end
117
127
 
118
- context 'batch_unique is not specified' do
119
- it 'enqueues all' do
120
- batch = subject.new(BatchedSizeWorker.name, 'batched_size')
121
- 3.times { BatchedSizeWorker.perform_async('bar', 1) }
128
+ context "when batch_unique is not specified" do
129
+ it "enqueues all" do
130
+ batch = batch_service.new(BatchedSizeWorker.name, "batched_size")
131
+ 3.times { BatchedSizeWorker.perform_async("bar", 1) }
122
132
  expect(batch.size).to eq(3)
123
133
  end
124
134
  end
125
135
  end
126
136
 
137
+ context "when inline mode" do
138
+ it "must pass args to worker as array" do
139
+ Sidekiq::Testing.inline! do
140
+ expect_any_instance_of(BatchedSizeWorker)
141
+ .to receive(:perform).with([[1]])
142
+
143
+ BatchedSizeWorker.perform_async(1)
144
+ end
145
+ end
146
+
147
+ it "must not pass args to worker as array" do
148
+ Sidekiq::Testing.inline! do
149
+ expect_any_instance_of(RegularWorker).to receive(:perform).with(1)
150
+
151
+ RegularWorker.perform_async(1)
152
+ end
153
+ end
154
+ end
155
+
127
156
  private
128
- def expect_batch(klass, queue)
129
- expect(klass).to_not have_enqueued_sidekiq_job("bar")
130
- batch = subject.new(klass.name, queue)
131
- stats = subject.all
157
+
158
+ def expect_batch(klass, queue) # rubocop:disable Metrics/AbcSize
159
+ expect(klass).not_to have_enqueued_sidekiq_job("bar")
160
+ batch = batch_service.new(klass.name, queue)
161
+ stats = batch_service.all
132
162
  expect(batch.size).to eq(1)
133
163
  expect(stats.size).to eq(1)
134
164
  expect(stats.first.worker_class).to eq(klass.name)
135
165
  expect(stats.first.queue).to eq(queue)
136
- expect(batch.pluck).to eq [['bar']]
166
+ expect(batch.pluck).to eq [["bar"]]
137
167
  end
138
168
  end
@@ -1,47 +1,44 @@
1
- require 'spec_helper'
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
2
4
 
3
5
  describe Sidekiq::Grouping::Redis do
4
- subject { Sidekiq::Grouping::Redis.new }
6
+ include Sidekiq::Grouping::RedisDispatcher
7
+
8
+ subject(:redis_service) { described_class.new }
5
9
 
6
10
  let(:queue_name) { "my_queue" }
7
11
  let(:key) { "batching:#{queue_name}" }
8
12
  let(:unique_key) { "batching:#{queue_name}:unique_messages" }
9
13
 
10
14
  describe "#push_msg" do
11
- it "adds message to queue" do
12
- subject.push_msg(queue_name, 'My message')
13
- expect(redis { |c| c.llen key }).to eq 1
14
- expect(redis { |c| c.lrange key, 0, 1 }).to eq ['My message']
15
- expect(redis { |c| c.smembers unique_key}).to eq []
15
+ it "adds message to queue", :aggregate_failures do
16
+ redis_service.push_msg(queue_name, "My message")
17
+ expect(redis_call(:llen, key)).to eq 1
18
+ expect(redis_call(:lrange, key, 0, 1)).to eq ["My message"]
19
+ expect(redis_call(:smembers, unique_key)).to eq []
16
20
  end
17
21
 
18
22
  it "remembers unique message if specified" do
19
- subject.push_msg(queue_name, 'My message', true)
20
- expect(redis { |c| c.smembers unique_key}).to eq ['My message']
23
+ redis_service.push_msg(queue_name, "My message", remember_unique: true)
24
+ expect(redis_call(:smembers, unique_key)).to eq ["My message"]
21
25
  end
22
26
  end
23
27
 
24
28
  describe "#pluck" do
25
29
  it "removes messages from queue" do
26
- subject.push_msg(queue_name, "Message 1")
27
- subject.push_msg(queue_name, "Message 2")
28
- subject.pluck(queue_name, 2)
29
- expect(redis { |c| c.llen key }).to eq 0
30
+ redis_service.push_msg(queue_name, "Message 1")
31
+ redis_service.push_msg(queue_name, "Message 2")
32
+ redis_service.pluck(queue_name, 2)
33
+ expect(redis_call(:llen, key)).to eq 0
30
34
  end
31
35
 
32
- it "forgets unique messages" do
33
- subject.push_msg(queue_name, "Message 1", true)
34
- subject.push_msg(queue_name, "Message 2", true)
35
- expect(redis { |c| c.scard unique_key }).to eq 2
36
- subject.pluck(queue_name, 2)
37
- expect(redis { |c| c.smembers unique_key }).to eq []
36
+ it "forgets unique messages", :aggregate_failures do
37
+ redis_service.push_msg(queue_name, "Message 1", remember_unique: true)
38
+ redis_service.push_msg(queue_name, "Message 2", remember_unique: true)
39
+ expect(redis_call(:scard, unique_key)).to eq 2
40
+ redis_service.pluck(queue_name, 2)
41
+ expect(redis_call(:smembers, unique_key)).to eq []
38
42
  end
39
43
  end
40
-
41
- private
42
-
43
- def redis(&block)
44
- Sidekiq.redis(&block)
45
- end
46
-
47
44
  end