sidekiq-grouping 0.0.6

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: ae9e71d0d79bd9af61ed8c49fca7122219849a37
4
+ data.tar.gz: c5469b43d3780b36fbe343eca132c86d3d200c34
5
+ SHA512:
6
+ metadata.gz: db9e641eb35867c42e2b17eb073b921c42ad992b4846452aff33afb927ea76df2811f7262f89a578c45dbd5d48a0f0e8a0a30be5ea531a8169f1cd9ea7a2ecb8
7
+ data.tar.gz: 1fde67a0b91497362100c2608582aaa52e36478a960ec33d9adc99f6552dbe4d2966b713ffa8ef87a3c77391f968894c34c9694801a717d6e68b28e03f148ba7
@@ -0,0 +1,17 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
@@ -0,0 +1,8 @@
1
+ language: ruby
2
+ rvm:
3
+ - 2.2
4
+ cache: bundler
5
+ sudo: false
6
+
7
+ services:
8
+ - redis-server
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in sidekiq-grouping.gemspec
4
+ gemspec
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2014 Victor Sokolov
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,100 @@
1
+ # Sidekiq::Grouping
2
+
3
+ Lets you batch similar tasks to run them all as a single task.
4
+
5
+ Allows identical sidekiq jobs to be processed with a single background call.
6
+
7
+ Useful for:
8
+ * Grouping asynchronous API index calls into bulks for bulk updating/indexing.
9
+ * Periodical batch updating of recently changing database counters.
10
+
11
+ Sponsored by [Evil Martians](http://evilmartians.com)
12
+
13
+ ## Usage
14
+
15
+ Create a worker:
16
+
17
+ ```ruby
18
+ class ElasticBulkIndexWorker
19
+ include Sidekiq::Worker
20
+
21
+ sidekiq_options(
22
+ queue: :batched_by_size,
23
+ batch_size: 30, # Jobs will be combined to groups of 30 items
24
+ batch_flush_interval: 60, # Combined jobs will be executed at least every 60 seconds
25
+ retry: 5
26
+ )
27
+
28
+ def perform(group)
29
+ client = Elasticsearch::Client.new
30
+ client.bulk(body: group.flatten)
31
+ end
32
+ end
33
+ ```
34
+
35
+ Perform a jobs:
36
+
37
+ ```ruby
38
+ ElasticBulkIndexWorker.perform_async({ delete: { _index: 'test', _id: 5, _type: 'user' } })
39
+ ElasticBulkIndexWorker.perform_async({ delete: { _index: 'test', _id: 6, _type: 'user' } })
40
+ ElasticBulkIndexWorker.perform_async({ delete: { _index: 'test', _id: 7, _type: 'user' } })
41
+ ...
42
+ ```
43
+
44
+ This jobs will be grouped into a single job which will be performed with the single argument containing:
45
+
46
+ ```ruby
47
+ [
48
+ [{ delete: { _index: 'test', _id: 5, _type: 'user' } }],
49
+ [{ delete: { _index: 'test', _id: 6, _type: 'user' } }],
50
+ [{ delete: { _index: 'test', _id: 7, _type: 'user' } }]
51
+ ...
52
+ ]
53
+ ```
54
+
55
+ This will happen for every 30 jobs in a row or every 60 seconds.
56
+
57
+ ## Web UI
58
+
59
+ ![Web UI](web.png)
60
+
61
+ Add this line to your `config/routes.rb` to activate web UI:
62
+
63
+ ```ruby
64
+ require "sidekiq/grouping/web"
65
+ ```
66
+
67
+ ## Configuration
68
+
69
+ ```ruby
70
+ Sidekiq::Grouping::Config.poll_interval = 5 # Amount of time between polling batches
71
+ Sidekiq::Grouping::Config.max_batch_size = 5000 # Maximum batch size allowed
72
+ Sidekiq::Grouping::Config.lock_ttl = 1 # Timeout of lock set when batched job enqueues
73
+ ```
74
+
75
+ ## TODO
76
+
77
+ 1. Add support redis_pool option.
78
+ 2. Make able to work together with sidekiq-unique-jobs.
79
+
80
+ ## Installation
81
+
82
+ Add this line to your application's Gemfile:
83
+
84
+ gem 'sidekiq-grouping'
85
+
86
+ And then execute:
87
+
88
+ $ bundle
89
+
90
+ Or install it yourself as:
91
+
92
+ $ gem install sidekiq-grouping
93
+
94
+ ## Contributing
95
+
96
+ 1. Fork it ( http://github.com/gzigzigzeo/sidekiq-grouping/fork )
97
+ 2. Create your feature branch (`git checkout -b my-new-feature`)
98
+ 3. Commit your changes (`git commit -am 'Add some feature'`)
99
+ 4. Push to the branch (`git push origin my-new-feature`)
100
+ 5. Create new Pull Request
@@ -0,0 +1,4 @@
1
+ require "bundler/gem_tasks"
2
+ require 'rspec/core/rake_task'
3
+ RSpec::Core::RakeTask.new(:spec)
4
+ task default: :spec
@@ -0,0 +1,40 @@
1
+ require 'active_support/core_ext/string'
2
+ require 'active_support/configurable'
3
+ require 'active_support/core_ext/numeric/time'
4
+
5
+ require 'sidekiq/grouping/config'
6
+ require 'sidekiq/grouping/redis'
7
+ require 'sidekiq/grouping/batch'
8
+ require 'sidekiq/grouping/middleware'
9
+ require 'sidekiq/grouping/logging'
10
+ require 'sidekiq/grouping/actor'
11
+ require 'sidekiq/grouping/supervisor'
12
+ require 'sidekiq/grouping/version'
13
+
14
+ module Sidekiq
15
+ module Grouping
16
+ class << self
17
+ attr_writer :logger
18
+
19
+ def logger
20
+ @logger ||= Sidekiq.logger
21
+ end
22
+ end
23
+ end
24
+ end
25
+
26
+ Sidekiq.configure_client do |config|
27
+ config.client_middleware do |chain|
28
+ chain.add Sidekiq::Grouping::Middleware
29
+ end
30
+ end
31
+
32
+ Sidekiq.configure_server do |config|
33
+ config.client_middleware do |chain|
34
+ chain.add Sidekiq::Grouping::Middleware
35
+ end
36
+ end
37
+
38
+ if Sidekiq.server?
39
+ Sidekiq::Grouping::Supervisor.run!
40
+ end
@@ -0,0 +1,47 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ class Actor
4
+ include Sidekiq::Grouping::Logging
5
+ include ::Celluloid
6
+
7
+ def initialize
8
+ link_to_sidekiq_manager
9
+ end
10
+
11
+ private
12
+ def start_polling
13
+ interval = Sidekiq::Grouping::Config.poll_interval
14
+ info "Start polling of queue batches every #{interval} seconds"
15
+ every(interval) { flush_batches }
16
+ end
17
+
18
+ def flush_batches
19
+ batches = []
20
+
21
+ Sidekiq::Grouping::Batch.all.map do |batch|
22
+ if batch.could_flush?
23
+ batches << batch
24
+ end
25
+ end
26
+
27
+ flush(batches)
28
+ end
29
+
30
+ def link_to_sidekiq_manager
31
+ Sidekiq::CLI.instance.launcher.manager.link(current_actor)
32
+ start_polling
33
+ rescue NoMethodError
34
+ debug "Can't link #{self.class.name}. Sidekiq::Manager not running. Retrying in 5 seconds ..."
35
+ after(5) { link_to_sidekiq_manager }
36
+ end
37
+
38
+ def flush(batches)
39
+ if batches.any?
40
+ names = batches.map { |batch| "#{batch.worker_class} in #{batch.queue}" }
41
+ info "Trying to flush batched queues: #{names.join(',')}"
42
+ batches.each { |batch| batch.flush }
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,128 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ class Batch
4
+
5
+ def initialize(worker_class, queue, redis_pool = nil)
6
+ @worker_class = worker_class
7
+ @queue = queue
8
+ @name = "#{worker_class.underscore}:#{queue}"
9
+ @redis = Sidekiq::Grouping::Redis.new
10
+ end
11
+
12
+ attr_reader :name, :worker_class, :queue
13
+
14
+ def add(msg)
15
+ msg = msg.to_json
16
+ @redis.push_msg(@name, msg, enqueue_similar_once?) if should_add? msg
17
+ end
18
+
19
+ def should_add? msg
20
+ return true unless enqueue_similar_once?
21
+ !@redis.enqueued?(@name, msg)
22
+ end
23
+
24
+ def size
25
+ @redis.batch_size(@name)
26
+ end
27
+
28
+ def chunk_size
29
+ worker_class_options['batch_size'] ||
30
+ Sidekiq::Grouping::Config.max_batch_size
31
+ end
32
+
33
+ def pluck
34
+ if @redis.lock(@name)
35
+ @redis.pluck(@name, chunk_size).map { |value| JSON.parse(value) }
36
+ end
37
+ end
38
+
39
+ def flush
40
+ chunk = pluck
41
+ if chunk
42
+ set_current_time_as_last
43
+ Sidekiq::Client.push(
44
+ 'class' => @worker_class,
45
+ 'queue' => @queue,
46
+ 'args' => [true, chunk]
47
+ )
48
+ end
49
+ end
50
+
51
+ def worker_class_constant
52
+ @worker_class.constantize
53
+ end
54
+
55
+ def worker_class_options
56
+ worker_class_constant.get_sidekiq_options
57
+ rescue NameError
58
+ {}
59
+ end
60
+
61
+ def could_flush?
62
+ could_flush_on_overflow? || could_flush_on_time?
63
+ end
64
+
65
+ def last_execution_time
66
+ last_time = @redis.get_last_execution_time(@name)
67
+ Time.parse(last_time) if last_time
68
+ end
69
+
70
+ def next_execution_time
71
+ if interval = worker_class_options['batch_flush_interval']
72
+ last_time = last_execution_time
73
+ last_time + interval.seconds if last_time
74
+ end
75
+ end
76
+
77
+ def delete
78
+ @redis.delete(@name)
79
+ end
80
+
81
+ private
82
+ def could_flush_on_overflow?
83
+ worker_class_options['batch_size'] &&
84
+ size >= worker_class_options['batch_size']
85
+ end
86
+
87
+ def could_flush_on_time?
88
+ return false if size.zero?
89
+
90
+ last_time = last_execution_time
91
+ next_time = next_execution_time
92
+
93
+ if last_time.blank?
94
+ set_current_time_as_last
95
+ false
96
+ else
97
+ if next_time
98
+ next_time < Time.now
99
+ end
100
+ end
101
+ end
102
+
103
+ def enqueue_similar_once?
104
+ worker_class_options['batch_unique'] == true
105
+ end
106
+
107
+ def set_current_time_as_last
108
+ @redis.set_last_execution_time(@name, Time.now)
109
+ end
110
+
111
+ class << self
112
+ def all
113
+ redis = Sidekiq::Grouping::Redis.new
114
+
115
+ redis.batches.map do |name|
116
+ new(*extract_worker_klass_and_queue(name))
117
+ end
118
+ end
119
+
120
+ def extract_worker_klass_and_queue(name)
121
+ klass, queue = name.split(':')
122
+ [klass.classify, queue]
123
+ end
124
+ end
125
+
126
+ end
127
+ end
128
+ end
@@ -0,0 +1,19 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ module Config
4
+ include ActiveSupport::Configurable
5
+
6
+ # Interval batch queue polling
7
+ config_accessor :poll_interval
8
+ self.config.poll_interval = 3
9
+
10
+ # Maximum batch size
11
+ config_accessor :max_batch_size
12
+ self.config.max_batch_size = 500
13
+
14
+ # Batch queue lock timeout (set during flush)
15
+ config_accessor :lock_ttl
16
+ self.config.lock_ttl = 1
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,13 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ module Logging
4
+ %w(fatal error warn info debug).each do |level|
5
+ level = level.to_sym
6
+
7
+ define_method(level) do |msg|
8
+ Sidekiq::Grouping.logger.public_send(level, "[Sidekiq::Grouping] #{msg}")
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,34 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ class Middleware
4
+ def call(worker_class, msg, queue, redis_pool = nil)
5
+ worker_class = worker_class.classify.constantize if worker_class.is_a?(String)
6
+ options = worker_class.get_sidekiq_options
7
+
8
+ batch =
9
+ options.keys.include?('batch_size') ||
10
+ options.keys.include?('batch_flush_interval')
11
+
12
+ passthrough =
13
+ msg['args'] &&
14
+ msg['args'].is_a?(Array) &&
15
+ msg['args'].try(:first) == true
16
+
17
+ if batch && not(passthrough)
18
+ add_to_batch(worker_class, queue, msg, redis_pool)
19
+ else
20
+ if batch && passthrough
21
+ msg['args'].shift
22
+ end
23
+ yield
24
+ end
25
+ end
26
+
27
+ private
28
+ def add_to_batch(worker_class, queue, msg, redis_pool = nil)
29
+ Sidekiq::Grouping::Batch.new(worker_class.name, queue, redis_pool).add(msg['args'])
30
+ nil
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,86 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ class Redis
4
+
5
+ PLUCK_SCRIPT = <<-SCRIPT
6
+ local pluck_values = redis.call('lrange', KEYS[1], 0, ARGV[1] - 1)
7
+ redis.call('ltrim', KEYS[1], ARGV[1], -1)
8
+ for k, v in pairs(pluck_values) do
9
+ redis.call('srem', KEYS[2], v)
10
+ end
11
+ return pluck_values
12
+ SCRIPT
13
+
14
+ def push_msg(name, msg, remember_unique = false)
15
+ redis do |conn|
16
+ conn.multi do
17
+ conn.sadd(ns('batches'), name)
18
+ conn.rpush(ns(name), msg)
19
+ conn.sadd(unique_messages_key(name), msg) if remember_unique
20
+ end
21
+ end
22
+ end
23
+
24
+ def enqueued?(name, msg)
25
+ redis do |conn|
26
+ conn.sismember(unique_messages_key(name), msg)
27
+ end
28
+ end
29
+
30
+ def batch_size(name)
31
+ redis { |conn| conn.llen(ns(name)) }
32
+ end
33
+
34
+ def batches
35
+ redis { |conn| conn.smembers(ns('batches')) }
36
+ end
37
+
38
+ def pluck(name, limit)
39
+ keys = [ns(name), unique_messages_key(name)]
40
+ args = [limit]
41
+ redis { |conn| conn.eval PLUCK_SCRIPT, keys, args }
42
+ end
43
+
44
+ def get_last_execution_time(name)
45
+ redis { |conn| conn.get(ns("last_execution_time:#{name}")) }
46
+ end
47
+
48
+ def set_last_execution_time(name, time)
49
+ redis { |conn| conn.set(ns("last_execution_time:#{name}"), time.to_json) }
50
+ end
51
+
52
+ def lock(name)
53
+ redis do |conn|
54
+ id = ns("lock:#{name}")
55
+ conn.setnx(id, true).tap do |obtained|
56
+ if obtained
57
+ conn.expire(id, Sidekiq::Grouping::Config.lock_ttl)
58
+ end
59
+ end
60
+ end
61
+ end
62
+
63
+ def delete(name)
64
+ redis do |conn|
65
+ conn.del(ns("last_execution_time:#{name}"))
66
+ conn.del(ns(name))
67
+ conn.srem(ns('batches'), name)
68
+ end
69
+ end
70
+
71
+ private
72
+
73
+ def unique_messages_key name
74
+ ns("#{name}:unique_messages")
75
+ end
76
+
77
+ def ns(key = nil)
78
+ "batching:#{key}"
79
+ end
80
+
81
+ def redis(&block)
82
+ Sidekiq.redis(&block)
83
+ end
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,14 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ module Supervisor
4
+ class << self
5
+ include Sidekiq::Grouping::Logging
6
+
7
+ def run!
8
+ info 'Sidekiq::Grouping starts supervision'
9
+ Sidekiq::Grouping::Actor.supervise_as(:sidekiq_grouping)
10
+ end
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,5 @@
1
+ module Sidekiq
2
+ module Grouping
3
+ VERSION = '0.0.6'
4
+ end
5
+ end
@@ -0,0 +1,41 @@
1
+ <header class="row">
2
+ <div class="col-sm-5">
3
+ <h3>Grouped jobs</h3>
4
+ </div>
5
+ </header>
6
+
7
+ <div class="container">
8
+ <div class="row">
9
+ <div class="col-sm-12">
10
+ <% if true %>
11
+ <table class="table table-striped table-bordered table-white" style="width: 100%; margin: 0; table-layout:fixed;">
12
+ <thead>
13
+ <th style="width: 50%">Worker</th>
14
+ <th style="width: 30%">Queue</th>
15
+ <th style="width: 10%">Count</th>
16
+ <th style="width: 30%">Last execution time</th>
17
+ <th style="width: 30%">Next enqueue</th>
18
+ <th style="width: 10%">Actions</th>
19
+ </thead>
20
+ <% @batches.each do |batch| %>
21
+ <tr>
22
+ <td><%= batch.worker_class %></td>
23
+ <td><%= batch.queue %></td>
24
+ <td><%= batch.size %></td>
25
+ <td><%= batch.last_execution_time || "&ndash;"%></td>
26
+ <td><%= batch.next_execution_time || "&ndash;"%></td>
27
+ <td>
28
+ <form action="<%= "#{root_path}grouping/#{batch.name}/delete" %>" method="post">
29
+ <input class="btn btn-danger btn-xs" type="submit" name="delete" value="Delete" data-confirm="Are you sure you want to delete this batch?" />
30
+ </form>
31
+ </td>
32
+ </tr>
33
+ <% end %>
34
+ </table>
35
+ <% else %>
36
+ <div class="alert alert-success">No recurring jobs found.</div>
37
+ <% end %>
38
+ </div>
39
+ </div>
40
+ </div>
41
+
@@ -0,0 +1,28 @@
1
+ require 'sidekiq/web'
2
+
3
+ module Sidetiq
4
+ module Grouping
5
+ module Web
6
+ VIEWS = File.expand_path('views', File.dirname(__FILE__))
7
+
8
+ def self.registered(app)
9
+ app.get "/grouping" do
10
+ @batches = Sidekiq::Grouping::Batch.all
11
+ erb File.read(File.join(VIEWS, 'index.erb')), locals: {view_path: VIEWS}
12
+ end
13
+
14
+ app.post "/grouping/:name/delete" do
15
+ worker_class, queue = Sidekiq::Grouping::Batch.extract_worker_klass_and_queue(params['name'])
16
+ batch = Sidekiq::Grouping::Batch.new(worker_class, queue)
17
+ batch.delete
18
+ redirect "#{root_path}/grouping"
19
+ end
20
+ end
21
+
22
+ end
23
+ end
24
+ end
25
+
26
+ Sidekiq::Web.register(Sidetiq::Grouping::Web)
27
+ Sidekiq::Web.tabs["Grouping"] = "grouping"
28
+
@@ -0,0 +1,30 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'sidekiq/grouping/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "sidekiq-grouping"
8
+ spec.version = Sidekiq::Grouping::VERSION
9
+ spec.authors = ["Victor Sokolov"]
10
+ spec.email = ["gzigzigzeo@gmail.com"]
11
+ spec.summary = %q{Allows identical sidekiq jobs to be processed with a single background call}
12
+ spec.homepage = "http://github.com/gzigzigzeo/sidekiq-grouping"
13
+ spec.license = "MIT"
14
+
15
+ spec.files = `git ls-files -z`.split("\x0")
16
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
17
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
18
+ spec.require_paths = ["lib"]
19
+
20
+ spec.add_development_dependency "bundler", "~> 1.5"
21
+ spec.add_development_dependency "rake"
22
+ spec.add_development_dependency "rspec"
23
+ spec.add_development_dependency "simplecov"
24
+ spec.add_development_dependency "rspec-sidekiq"
25
+ spec.add_development_dependency "activesupport"
26
+ spec.add_development_dependency "timecop"
27
+
28
+ spec.add_dependency "sidekiq"
29
+ spec.add_dependency "celluloid"
30
+ end
@@ -0,0 +1,136 @@
1
+ require 'spec_helper'
2
+
3
+ describe Sidekiq::Grouping::Batch do
4
+ subject { Sidekiq::Grouping::Batch }
5
+
6
+ context 'adding' do
7
+ it 'must enqueue unbatched worker' do
8
+ RegularWorker.perform_async('bar')
9
+ expect(RegularWorker).to have_enqueued_job('bar')
10
+ end
11
+
12
+ it 'must not enqueue batched worker' do
13
+ BatchedSizeWorker.perform_async('bar')
14
+ expect_batch(BatchedSizeWorker, 'batched_size')
15
+ end
16
+
17
+ it 'must not enqueue batched worker' do
18
+ BatchedIntervalWorker.perform_async('bar')
19
+ expect_batch(BatchedIntervalWorker, 'batched_interval')
20
+ end
21
+
22
+ it 'must not enqueue batched worker' do
23
+ BatchedBothWorker.perform_async('bar')
24
+ expect_batch(BatchedBothWorker, 'batched_both')
25
+ end
26
+ end
27
+
28
+ context 'checking if should flush' do
29
+ it 'must flush if limit exceeds for limit worker' do
30
+ batch = subject.new(BatchedSizeWorker.name, 'batched_size')
31
+
32
+ expect(batch.could_flush?).to be_falsy
33
+ BatchedSizeWorker.perform_async('bar')
34
+ expect(batch.could_flush?).to be_falsy
35
+ 4.times { BatchedSizeWorker.perform_async('bar') }
36
+ expect(batch.could_flush?).to be_truthy
37
+ end
38
+
39
+ it 'must flush if limit exceeds for both worker' do
40
+ batch = subject.new(BatchedBothWorker.name, 'batched_both')
41
+
42
+ expect(batch.could_flush?).to be_falsy
43
+ BatchedBothWorker.perform_async('bar')
44
+ expect(batch.could_flush?).to be_falsy
45
+ 4.times { BatchedBothWorker.perform_async('bar') }
46
+ expect(batch.could_flush?).to be_truthy
47
+ end
48
+
49
+ it 'must flush if limit okay but time came' do
50
+ batch = subject.new(BatchedIntervalWorker.name, 'batched_interval')
51
+
52
+ expect(batch.could_flush?).to be_falsy
53
+ BatchedIntervalWorker.perform_async('bar')
54
+ expect(batch.could_flush?).to be_falsy
55
+ expect(batch.size).to eq(1)
56
+
57
+ Timecop.travel(2.hours.since)
58
+
59
+ expect(batch.could_flush?).to be_truthy
60
+ end
61
+ end
62
+
63
+ context 'flushing' do
64
+ it 'must put wokrer to queue on flush' do
65
+ batch = subject.new(BatchedSizeWorker.name, 'batched_size')
66
+
67
+ expect(batch.could_flush?).to be_falsy
68
+ 10.times { BatchedSizeWorker.perform_async('bar') }
69
+ batch.flush
70
+ expect(BatchedSizeWorker).to have_enqueued_job([["bar"], ["bar"], ["bar"]])
71
+ expect(batch.size).to eq(7)
72
+ end
73
+ end
74
+
75
+ context 'with similar args' do
76
+ context 'option batch_unique = true' do
77
+ it 'enqueues once' do
78
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
79
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
80
+ expect(batch.size).to eq(1)
81
+ end
82
+
83
+ it 'enqueues once each unique set of args' do
84
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
85
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
86
+ 6.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
87
+ 3.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
88
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 3) }
89
+ 7.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
90
+ expect(batch.size).to eq(3)
91
+ end
92
+
93
+ context 'flushing' do
94
+
95
+ it 'works' do
96
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
97
+ 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
98
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
99
+ batch.flush
100
+ expect(batch.size).to eq(0)
101
+ end
102
+
103
+ it 'allows to enqueue again after flush' do
104
+ batch = subject.new(BatchedUniqueArgsWorker.name, 'batched_unique_args')
105
+ 2.times { BatchedUniqueArgsWorker.perform_async('bar', 1) }
106
+ 2.times { BatchedUniqueArgsWorker.perform_async('baz', 1) }
107
+ batch.flush
108
+ BatchedUniqueArgsWorker.perform_async('bar', 1)
109
+ BatchedUniqueArgsWorker.perform_async('baz', 1)
110
+ expect(batch.size).to eq(2)
111
+ end
112
+ end
113
+
114
+ end
115
+
116
+ context 'batch_unique is not specified' do
117
+ it 'enqueues all' do
118
+ batch = subject.new(BatchedSizeWorker.name, 'batched_size')
119
+ 3.times { BatchedSizeWorker.perform_async('bar', 1) }
120
+ expect(batch.size).to eq(3)
121
+ end
122
+ end
123
+ end
124
+
125
+ private
126
+ def expect_batch(klass, queue)
127
+ expect(klass).to_not have_enqueued_job('bar')
128
+ batch = subject.new(klass.name, queue)
129
+ stats = subject.all
130
+ expect(batch.size).to eq(1)
131
+ expect(stats.size).to eq(1)
132
+ expect(stats.first.worker_class).to eq(klass.name)
133
+ expect(stats.first.queue).to eq(queue)
134
+ expect(batch.pluck).to eq [['bar']]
135
+ end
136
+ end
@@ -0,0 +1,47 @@
1
+ require 'spec_helper'
2
+
3
+ describe Sidekiq::Grouping::Redis do
4
+ subject { Sidekiq::Grouping::Redis.new }
5
+
6
+ let(:queue_name) { "my_queue" }
7
+ let(:key) { "batching:#{queue_name}" }
8
+ let(:unique_key) { "batching:#{queue_name}:unique_messages" }
9
+
10
+ describe "#push_msg" do
11
+ it "adds message to queue" do
12
+ subject.push_msg(queue_name, 'My message')
13
+ expect(redis { |c| c.llen key }).to eq 1
14
+ expect(redis { |c| c.lrange key, 0, 1 }).to eq ['My message']
15
+ expect(redis { |c| c.smembers unique_key}).to eq []
16
+ end
17
+
18
+ it "remembers unique message if specified" do
19
+ subject.push_msg(queue_name, 'My message', true)
20
+ expect(redis { |c| c.smembers unique_key}).to eq ['My message']
21
+ end
22
+ end
23
+
24
+ describe "#pluck" do
25
+ it "removes messages from queue" do
26
+ subject.push_msg(queue_name, "Message 1")
27
+ subject.push_msg(queue_name, "Message 2")
28
+ subject.pluck(queue_name, 2)
29
+ expect(redis { |c| c.llen key }).to eq 0
30
+ end
31
+
32
+ it "forgets unique messages" do
33
+ subject.push_msg(queue_name, "Message 1", true)
34
+ subject.push_msg(queue_name, "Message 2", true)
35
+ expect(redis { |c| c.scard unique_key }).to eq 2
36
+ subject.pluck(queue_name, 2)
37
+ expect(redis { |c| c.smembers unique_key }).to eq []
38
+ end
39
+ end
40
+
41
+ private
42
+
43
+ def redis(&block)
44
+ Sidekiq.redis(&block)
45
+ end
46
+
47
+ end
@@ -0,0 +1,43 @@
1
+ $LOAD_PATH << "." unless $LOAD_PATH.include?(".")
2
+
3
+ require 'rubygems'
4
+ require 'bundler/setup'
5
+ require 'timecop'
6
+ require 'simplecov'
7
+ require 'celluloid/autostart'
8
+ require 'sidekiq'
9
+ require 'rspec-sidekiq'
10
+ require 'support/test_workers'
11
+
12
+ SimpleCov.start do
13
+ add_filter 'spec'
14
+ end
15
+
16
+ require 'sidekiq/grouping'
17
+
18
+ Sidekiq::Grouping.logger = nil
19
+ Sidekiq.redis = { namespace: ENV['namespace'] }
20
+ Sidekiq.logger = nil
21
+
22
+ RSpec::Sidekiq.configure do |config|
23
+ config.clear_all_enqueued_jobs = true
24
+ end
25
+
26
+ RSpec.configure do |config|
27
+ config.order = :random
28
+ config.run_all_when_everything_filtered = true
29
+ config.filter_run :focus
30
+
31
+ config.before :each do
32
+ Sidekiq.redis do |conn|
33
+ keys = conn.keys '*batching*'
34
+ keys.each { |key| conn.del key }
35
+ end
36
+ end
37
+
38
+ config.after :each do
39
+ Timecop.return
40
+ end
41
+ end
42
+
43
+ $: << File.join(File.dirname(__FILE__), '..', 'lib')
@@ -0,0 +1,42 @@
1
+ class RegularWorker
2
+ include Sidekiq::Worker
3
+
4
+ def perform(foo)
5
+ end
6
+ end
7
+
8
+ class BatchedSizeWorker
9
+ include Sidekiq::Worker
10
+
11
+ sidekiq_options queue: :batched_size, batch_size: 3
12
+
13
+ def perform(foo)
14
+ end
15
+ end
16
+
17
+ class BatchedIntervalWorker
18
+ include Sidekiq::Worker
19
+
20
+ sidekiq_options queue: :batched_interval, batch_flush_interval: 3600
21
+
22
+ def perform(foo)
23
+ end
24
+ end
25
+
26
+ class BatchedBothWorker
27
+ include Sidekiq::Worker
28
+
29
+ sidekiq_options queue: :batched_both, batch_flush_interval: 3600, batch_size: 3
30
+
31
+ def perform(foo)
32
+ end
33
+ end
34
+
35
+ class BatchedUniqueArgsWorker
36
+ include Sidekiq::Worker
37
+
38
+ sidekiq_options queue: :batched_unique_args, batch_size: 3, batch_unique: true
39
+
40
+ def perform(foo)
41
+ end
42
+ end
data/web.png ADDED
Binary file
metadata ADDED
@@ -0,0 +1,197 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: sidekiq-grouping
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.6
5
+ platform: ruby
6
+ authors:
7
+ - Victor Sokolov
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2015-04-24 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '1.5'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '1.5'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: simplecov
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: rspec-sidekiq
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: activesupport
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0'
97
+ - !ruby/object:Gem::Dependency
98
+ name: timecop
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ">="
102
+ - !ruby/object:Gem::Version
103
+ version: '0'
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ - !ruby/object:Gem::Dependency
112
+ name: sidekiq
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
116
+ - !ruby/object:Gem::Version
117
+ version: '0'
118
+ type: :runtime
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ - !ruby/object:Gem::Dependency
126
+ name: celluloid
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - ">="
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
132
+ type: :runtime
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - ">="
137
+ - !ruby/object:Gem::Version
138
+ version: '0'
139
+ description:
140
+ email:
141
+ - gzigzigzeo@gmail.com
142
+ executables: []
143
+ extensions: []
144
+ extra_rdoc_files: []
145
+ files:
146
+ - ".gitignore"
147
+ - ".travis.yml"
148
+ - Gemfile
149
+ - LICENSE.txt
150
+ - README.md
151
+ - Rakefile
152
+ - lib/sidekiq/grouping.rb
153
+ - lib/sidekiq/grouping/actor.rb
154
+ - lib/sidekiq/grouping/batch.rb
155
+ - lib/sidekiq/grouping/config.rb
156
+ - lib/sidekiq/grouping/logging.rb
157
+ - lib/sidekiq/grouping/middleware.rb
158
+ - lib/sidekiq/grouping/redis.rb
159
+ - lib/sidekiq/grouping/supervisor.rb
160
+ - lib/sidekiq/grouping/version.rb
161
+ - lib/sidekiq/grouping/views/index.erb
162
+ - lib/sidekiq/grouping/web.rb
163
+ - sidekiq-grouping.gemspec
164
+ - spec/modules/batch_spec.rb
165
+ - spec/modules/redis_spec.rb
166
+ - spec/spec_helper.rb
167
+ - spec/support/test_workers.rb
168
+ - web.png
169
+ homepage: http://github.com/gzigzigzeo/sidekiq-grouping
170
+ licenses:
171
+ - MIT
172
+ metadata: {}
173
+ post_install_message:
174
+ rdoc_options: []
175
+ require_paths:
176
+ - lib
177
+ required_ruby_version: !ruby/object:Gem::Requirement
178
+ requirements:
179
+ - - ">="
180
+ - !ruby/object:Gem::Version
181
+ version: '0'
182
+ required_rubygems_version: !ruby/object:Gem::Requirement
183
+ requirements:
184
+ - - ">="
185
+ - !ruby/object:Gem::Version
186
+ version: '0'
187
+ requirements: []
188
+ rubyforge_project:
189
+ rubygems_version: 2.4.3
190
+ signing_key:
191
+ specification_version: 4
192
+ summary: Allows identical sidekiq jobs to be processed with a single background call
193
+ test_files:
194
+ - spec/modules/batch_spec.rb
195
+ - spec/modules/redis_spec.rb
196
+ - spec/spec_helper.rb
197
+ - spec/support/test_workers.rb