sidekiq-batching 0.0.3 → 0.0.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b41e2102a596f97cf7dbc3e6a078c12d41085a66
4
- data.tar.gz: 4d5563a28bf4b753ff984ee58172bbfce43938f6
3
+ metadata.gz: e6d0c4e5564c036527dcc48e5336efc2193bcbab
4
+ data.tar.gz: 228c68f5f5005a261c5cf1c90398575628a8a482
5
5
  SHA512:
6
- metadata.gz: 2ea999d0b2ecb210c0e8bfe62fbce383156da92f9ec117c5a1b7382f56ee87f71b63938db59f64c9b6c4c6a869d55816320747ae286a01129ac5323de89107af
7
- data.tar.gz: 363c5fa098b66647c6ef60da7277d589f3767bb8afc1bfef394ce0a7574a85fd68eebba8998b9f5240b1d437306eb25fe9419bc5961ae639001f9e8b3cdb882e
6
+ metadata.gz: e1ebf0255ad1c14f6fd7b6242b934a0ef6a92ed064256991da94fb988596baf265b9f9d4b32f197326ee2d2f0781c80efca049c3fc757fda55d3d1c5c8ee535d
7
+ data.tar.gz: 6208a1a7020cfc52799051efda54a92daed302bdda620925c6668aba03d225301fab07724cf7f89b8044d1e4dbc93d61f53408d70b337105a1edf85ba79261f7
data/README.md CHANGED
@@ -6,6 +6,8 @@ Useful for:
6
6
  * Grouping asynchronous API index calls into bulks for bulk updating/indexing.
7
7
  * Periodical batch updating of recently changing database counters.
8
8
 
9
+ Sponsored by [Evil Martians](http://evilmartians.com)
10
+
9
11
  ## Usage
10
12
 
11
13
  Create a worker:
@@ -50,6 +52,10 @@ This jobs will be grouped into a single job which will be performed with the sin
50
52
 
51
53
  This will happen for every 30 jobs in a row or every 60 seconds.
52
54
 
55
+ ## Web UI
56
+
57
+ ![Web UI](web.png)
58
+
53
59
  Add this line to your `config/routes.rb` to activate web UI:
54
60
 
55
61
  ```ruby
@@ -2,7 +2,6 @@ module Sidekiq
2
2
  module Batching
3
3
  class Actor
4
4
  include Sidekiq::Batching::Logging
5
- include Celluloid
6
5
 
7
6
  def initialize
8
7
  link_to_sidekiq_manager
@@ -44,4 +43,4 @@ module Sidekiq
44
43
  end
45
44
  end
46
45
  end
47
- end
46
+ end
@@ -12,7 +12,13 @@ module Sidekiq
12
12
  attr_reader :name, :worker_class, :queue
13
13
 
14
14
  def add(msg)
15
- @redis.push_msg(@name, msg.to_json)
15
+ msg = msg.to_json
16
+ @redis.push_msg(@name, msg, enqueue_similar_once?) if should_add? msg
17
+ end
18
+
19
+ def should_add? msg
20
+ return true unless enqueue_similar_once?
21
+ !@redis.enqueued?(@name, msg)
16
22
  end
17
23
 
18
24
  def size
@@ -94,6 +100,10 @@ module Sidekiq
94
100
  end
95
101
  end
96
102
 
103
+ def enqueue_similar_once?
104
+ worker_class_options['batch_unique'] == true
105
+ end
106
+
97
107
  def set_current_time_as_last
98
108
  @redis.set_last_execution_time(@name, Time.now)
99
109
  end
@@ -1,10 +1,29 @@
1
1
  module Sidekiq
2
2
  module Batching
3
3
  class Redis
4
- def push_msg(name, msg)
4
+
5
+ PLUCK_SCRIPT = <<-SCRIPT
6
+ local pluck_values = redis.call('lrange', KEYS[1], 0, ARGV[1] - 1)
7
+ redis.call('ltrim', KEYS[1], ARGV[1], -1)
8
+ for k, v in pairs(pluck_values) do
9
+ redis.call('srem', KEYS[2], v)
10
+ end
11
+ return pluck_values
12
+ SCRIPT
13
+
14
+ def push_msg(name, msg, remember_unique = false)
15
+ redis do |conn|
16
+ conn.multi do
17
+ conn.sadd(ns('batches'), name)
18
+ conn.rpush(ns(name), msg)
19
+ conn.sadd(unique_messages_key(name), msg) if remember_unique
20
+ end
21
+ end
22
+ end
23
+
24
+ def enqueued?(name, msg)
5
25
  redis do |conn|
6
- conn.sadd(ns('batches'), name)
7
- conn.rpush(ns(name), msg)
26
+ conn.sismember(unique_messages_key(name), msg)
8
27
  end
9
28
  end
10
29
 
@@ -17,14 +36,9 @@ module Sidekiq
17
36
  end
18
37
 
19
38
  def pluck(name, limit)
20
- redis do |conn|
21
- result = conn.pipelined do
22
- conn.lrange(ns(name), 0, limit - 1)
23
- conn.ltrim(ns(name), limit, -1)
24
- end
25
-
26
- result.first
27
- end
39
+ keys = [ns(name), unique_messages_key(name)]
40
+ args = [limit]
41
+ redis { |conn| conn.eval PLUCK_SCRIPT, keys, args }
28
42
  end
29
43
 
30
44
  def get_last_execution_time(name)
@@ -55,6 +69,11 @@ module Sidekiq
55
69
  end
56
70
 
57
71
  private
72
+
73
+ def unique_messages_key name
74
+ ns("#{name}:unique_messages")
75
+ end
76
+
58
77
  def ns(key = nil)
59
78
  "batching:#{key}"
60
79
  end
@@ -64,4 +83,4 @@ module Sidekiq
64
83
  end
65
84
  end
66
85
  end
67
- end
86
+ end
@@ -1,5 +1,5 @@
1
1
  module Sidekiq
2
2
  module Batching
3
- VERSION = "0.0.3"
3
+ VERSION = '0.0.4'
4
4
  end
5
5
  end
@@ -29,34 +29,34 @@ describe Sidekiq::Batching::Batch do
29
29
  it 'must flush if limit exceeds for limit worker' do
30
30
  batch = subject.new(BatchedSizeWorker.name, 'batched_size')
31
31
 
32
- expect(batch.could_flush?).to be_false
32
+ expect(batch.could_flush?).to be_falsy
33
33
  BatchedSizeWorker.perform_async('bar')
34
- expect(batch.could_flush?).to be_false
34
+ expect(batch.could_flush?).to be_falsy
35
35
  4.times { BatchedSizeWorker.perform_async('bar') }
36
- expect(batch.could_flush?).to be_true
36
+ expect(batch.could_flush?).to be_truthy
37
37
  end
38
38
 
39
39
  it 'must flush if limit exceeds for both worker' do
40
40
  batch = subject.new(BatchedBothWorker.name, 'batched_both')
41
41
 
42
- expect(batch.could_flush?).to be_false
42
+ expect(batch.could_flush?).to be_falsy
43
43
  BatchedBothWorker.perform_async('bar')
44
- expect(batch.could_flush?).to be_false
44
+ expect(batch.could_flush?).to be_falsy
45
45
  4.times { BatchedBothWorker.perform_async('bar') }
46
- expect(batch.could_flush?).to be_true
46
+ expect(batch.could_flush?).to be_truthy
47
47
  end
48
48
 
49
49
  it 'must flush if limit okay but time came' do
50
50
  batch = subject.new(BatchedIntervalWorker.name, 'batched_interval')
51
51
 
52
- expect(batch.could_flush?).to be_false
52
+ expect(batch.could_flush?).to be_falsy
53
53
  BatchedIntervalWorker.perform_async('bar')
54
- expect(batch.could_flush?).to be_false
54
+ expect(batch.could_flush?).to be_falsy
55
55
  expect(batch.size).to eq(1)
56
56
 
57
57
  Timecop.travel(2.hours.since)
58
58
 
59
- expect(batch.could_flush?).to be_true
59
+ expect(batch.could_flush?).to be_truthy
60
60
  end
61
61
  end
62
62
 
@@ -64,7 +64,7 @@ describe Sidekiq::Batching::Batch do
64
64
  it 'must put wokrer to queue on flush' do
65
65
  batch = subject.new(BatchedSizeWorker.name, 'batched_size')
66
66
 
67
- expect(batch.could_flush?).to be_false
67
+ expect(batch.could_flush?).to be_falsy
68
68
  10.times { BatchedSizeWorker.perform_async('bar') }
69
69
  batch.flush
70
70
  expect(BatchedSizeWorker).to have_enqueued_job([["bar"], ["bar"], ["bar"]])
@@ -72,6 +72,56 @@ describe Sidekiq::Batching::Batch do
72
72
  end
73
73
  end
74
74
 
75
+ context 'with similar args' do
76
+ context 'option batch_unique = true' do
77
+ it 'enqueues once' do
78
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
79
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
80
+ expect(batch.size).to eq(1)
81
+ end
82
+
83
+ it 'enqueues once each unique set of args' do
84
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
85
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
86
+ 6.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
87
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
88
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 3) }
89
+ 7.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
90
+ expect(batch.size).to eq(3)
91
+ end
92
+
93
+ context 'flushing' do
94
+
95
+ it 'works' do
96
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
97
+ 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
98
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
99
+ batch.flush
100
+ expect(batch.size).to eq(0)
101
+ end
102
+
103
+ it 'allows to enqueue again after flush' do
104
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
105
+ 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
106
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
107
+ batch.flush
108
+ BatchedUniqueArgsWorker.perform_async('bar', 1)
109
+ BatchedUniqueArgsWorker.perform_async('baz', 1)
110
+ expect(batch.size).to eq(2)
111
+ end
112
+ end
113
+
114
+ end
115
+
116
+ context 'batch_unique is not specified' do
117
+ it 'enqueues all' do
118
+ batch = subject.new(BatchedSizeWorker.name, 'batched_size')
119
+ 3.times { BatchedSizeWorker.perform_async('bar', 1) }
120
+ expect(batch.size).to eq(3)
121
+ end
122
+ end
123
+ end
124
+
75
125
  private
76
126
  def expect_batch(klass, queue)
77
127
  expect(klass).to_not have_enqueued_job('bar')
@@ -83,4 +133,4 @@ describe Sidekiq::Batching::Batch do
83
133
  expect(stats.first.queue).to eq(queue)
84
134
  expect(batch.pluck).to eq [['bar']]
85
135
  end
86
- end
136
+ end
@@ -0,0 +1,47 @@
1
+ require 'spec_helper'
2
+
3
+ describe Sidekiq::Batching::Redis do
4
+ subject { Sidekiq::Batching::Redis.new }
5
+
6
+ let(:queue_name) { "my_queue" }
7
+ let(:key) { "batching:#{queue_name}" }
8
+ let(:unique_key) { "batching:#{queue_name}:unique_messages" }
9
+
10
+ describe "#push_msg" do
11
+ it "adds message to queue" do
12
+ subject.push_msg(queue_name, 'My message')
13
+ expect(redis { |c| c.llen key }).to eq 1
14
+ expect(redis { |c| c.lrange key, 0, 1 }).to eq ['My message']
15
+ expect(redis { |c| c.smembers unique_key}).to eq []
16
+ end
17
+
18
+ it "remembers unique message if specified" do
19
+ subject.push_msg(queue_name, 'My message', true)
20
+ expect(redis { |c| c.smembers unique_key}).to eq ['My message']
21
+ end
22
+ end
23
+
24
+ describe "#pluck" do
25
+ it "removes messages from queue" do
26
+ subject.push_msg(queue_name, "Message 1")
27
+ subject.push_msg(queue_name, "Message 2")
28
+ subject.pluck(queue_name, 2)
29
+ expect(redis { |c| c.llen key }).to eq 0
30
+ end
31
+
32
+ it "forgets unique messages" do
33
+ subject.push_msg(queue_name, "Message 1", true)
34
+ subject.push_msg(queue_name, "Message 2", true)
35
+ expect(redis { |c| c.scard unique_key }).to eq 2
36
+ subject.pluck(queue_name, 2)
37
+ expect(redis { |c| c.smembers unique_key }).to eq []
38
+ end
39
+ end
40
+
41
+ private
42
+
43
+ def redis(&block)
44
+ Sidekiq.redis(&block)
45
+ end
46
+
47
+ end
@@ -41,4 +41,4 @@ RSpec.configure do |config|
41
41
  end
42
42
  end
43
43
 
44
- $: << File.join(File.dirname(__FILE__), '..', 'lib')
44
+ $: << File.join(File.dirname(__FILE__), '..', 'lib')
@@ -30,4 +30,13 @@ class BatchedBothWorker
30
30
 
31
31
  def perform(foo)
32
32
  end
33
- end
33
+ end
34
+
35
+ class BatchedUniqueArgsWorker
36
+ include Sidekiq::Worker
37
+
38
+ sidekiq_options queue: :batched_unique_args, batch_size: 3, batch_unique: true
39
+
40
+ def perform(foo)
41
+ end
42
+ end
data/web.png ADDED
Binary file
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sidekiq-batching
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.3
4
+ version: 0.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Victor Sokolov
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-04-14 00:00:00.000000000 Z
11
+ date: 2015-01-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -147,8 +147,10 @@ files:
147
147
  - lib/sidekiq/batching/web.rb
148
148
  - sidekiq-batching.gemspec
149
149
  - spec/modules/batch_spec.rb
150
+ - spec/modules/redis_spec.rb
150
151
  - spec/spec_helper.rb
151
152
  - spec/support/test_workers.rb
153
+ - web.png
152
154
  homepage: http://github.com/gzigzigzeo/sidekiq-batching
153
155
  licenses:
154
156
  - MIT
@@ -169,11 +171,12 @@ required_rubygems_version: !ruby/object:Gem::Requirement
169
171
  version: '0'
170
172
  requirements: []
171
173
  rubyforge_project:
172
- rubygems_version: 2.2.2
174
+ rubygems_version: 2.4.3
173
175
  signing_key:
174
176
  specification_version: 4
175
177
  summary: Allows identical sidekiq jobs to be processed with a single background call
176
178
  test_files:
177
179
  - spec/modules/batch_spec.rb
180
+ - spec/modules/redis_spec.rb
178
181
  - spec/spec_helper.rb
179
182
  - spec/support/test_workers.rb