autoscaler 0.7.0 → 0.8.0
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG.md +6 -0
- data/README.md +8 -0
- data/lib/autoscaler/counter_cache_memory.rb +35 -0
- data/lib/autoscaler/counter_cache_redis.rb +44 -0
- data/lib/autoscaler/heroku_scaler.rb +17 -23
- data/lib/autoscaler/version.rb +1 -1
- data/spec/autoscaler/counter_cache_memory_spec.rb +21 -0
- data/spec/autoscaler/counter_cache_redis_spec.rb +40 -0
- data/spec/autoscaler/heroku_scaler_spec.rb +12 -6
- metadata +8 -2
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,11 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## 0.8.0
|
4
|
+
|
5
|
+
- Extracted caching of Heroku worker counts and added experimental Redis cache
|
6
|
+
scaler.counter_cache = Autoscaler::CounterCacheRedis(Sidekiq.method(:redis))
|
7
|
+
- Now rescues Heroku::Api::Errors in addition to Excon::Errors
|
8
|
+
|
3
9
|
## 0.7.0
|
4
10
|
|
5
11
|
- Added Autoscaler::LinearScalingStrategy
|
data/README.md
CHANGED
@@ -45,12 +45,20 @@ Install the middleware in your `Sidekiq.configure_` blocks
|
|
45
45
|
|
46
46
|
## Experimental
|
47
47
|
|
48
|
+
### Strategies
|
49
|
+
|
48
50
|
You can pass a scaling strategy object instead of the timeout to the server middleware. The object (or lambda) should respond to `#call(system, idle_time)` and return the desired number of workers. See `lib/autoscaler/binary_scaling_strategy.rb` for an example.
|
49
51
|
|
52
|
+
### Initial Workers
|
53
|
+
|
50
54
|
`Client#set_initial_workers` to start workers on main process startup; typically:
|
51
55
|
|
52
56
|
Autoscaler::Sidekiq::Client.add_to_chain(chain, 'default' => heroku).set_initial_workers
|
53
57
|
|
58
|
+
### Working caching
|
59
|
+
|
60
|
+
scaler.counter_cache = Autoscaler::CounterCacheRedis(Sidekiq.method(:redis))
|
61
|
+
|
54
62
|
## Tests
|
55
63
|
|
56
64
|
The project is setup to run RSpec with Guard. It expects a redis instance on a custom port, which is started by the Guardfile.
|
@@ -0,0 +1,35 @@
|
|
1
|
+
module Autoscaler
|
2
|
+
# Implements a cache for the number of heroku works currently up
|
3
|
+
# Values are stored for short periods in the object
|
4
|
+
class CounterCacheMemory
|
5
|
+
# @param [Numeric] timeout number of seconds to allow before expiration
|
6
|
+
def initialize(timeout = 5)
|
7
|
+
@timeout = timeout
|
8
|
+
@counter = 0
|
9
|
+
@valid_until = Time.now - 1
|
10
|
+
end
|
11
|
+
|
12
|
+
# @param [Numeric] value new counter value
|
13
|
+
def counter=(value)
|
14
|
+
@valid_until = Time.now + @timeout
|
15
|
+
@counter = value
|
16
|
+
end
|
17
|
+
|
18
|
+
# Raised when no block is provided to #counter
|
19
|
+
class Expired < ArgumentError; end
|
20
|
+
|
21
|
+
# Current value. Uses the Hash#fetch api - pass a block to use in place of expired values or it will raise an exception.
|
22
|
+
def counter
|
23
|
+
return @counter if valid?
|
24
|
+
return yield if block_given?
|
25
|
+
raise Expired
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
attr_reader :timeout
|
30
|
+
|
31
|
+
def valid?
|
32
|
+
Time.now < @valid_until
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
module Autoscaler
|
2
|
+
# Implements a cache for the number of heroku works currently up
|
3
|
+
# This permits some web/worker communication, which makes longer timeouts practical.
|
4
|
+
class CounterCacheRedis
|
5
|
+
# @param [Proc, ConnectionPool, Redis client] redis redis interface
|
6
|
+
# Proc: e.g. Sidekiq.method(:redis)
|
7
|
+
# ConnectionPool: e.g. what you pass to Sidekiq.redis=
|
8
|
+
# Redis client: e.g. Redis.connect
|
9
|
+
# @param [Numeric] timeout number of seconds to allow before expiration
|
10
|
+
def initialize(redis, timeout = 5 * 60)
|
11
|
+
@redis = redis
|
12
|
+
@timeout = timeout
|
13
|
+
end
|
14
|
+
|
15
|
+
# @param [Numeric] value new counter value
|
16
|
+
def counter=(value)
|
17
|
+
redis {|c| c.setex('autoscaler:workers', @timeout, value)}
|
18
|
+
end
|
19
|
+
|
20
|
+
# Raised when no block is provided to #counter
|
21
|
+
class Expired < ArgumentError; end
|
22
|
+
|
23
|
+
# Current value. Uses the Hash#fetch api - pass a block to use in place of expired values or it will raise an exception.
|
24
|
+
def counter
|
25
|
+
value = redis {|c| c.get('autoscaler:workers')}
|
26
|
+
return value.to_i if value
|
27
|
+
return yield if block_given?
|
28
|
+
raise Expired
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
attr_reader :timeout
|
33
|
+
|
34
|
+
def redis(&block)
|
35
|
+
if @redis.respond_to?(:call)
|
36
|
+
@redis.call(&block)
|
37
|
+
elsif @redis.respond_to?(:with)
|
38
|
+
@redis.with(&block)
|
39
|
+
else
|
40
|
+
block.call(@redis)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'heroku-api'
|
2
|
+
require 'autoscaler/counter_cache_memory'
|
2
3
|
|
3
4
|
module Autoscaler
|
4
5
|
# Wraps the Heroku API to provide just the interface that we need for scaling.
|
@@ -13,8 +14,7 @@ module Autoscaler
|
|
13
14
|
@client = Heroku::API.new(:api_key => key)
|
14
15
|
@type = type
|
15
16
|
@app = app
|
16
|
-
@workers =
|
17
|
-
@known = Time.now - 1
|
17
|
+
@workers = CounterCacheMemory.new
|
18
18
|
end
|
19
19
|
|
20
20
|
attr_reader :app
|
@@ -23,25 +23,22 @@ module Autoscaler
|
|
23
23
|
# Read the current worker count (value may be cached)
|
24
24
|
# @return [Numeric] number of workers
|
25
25
|
def workers
|
26
|
-
|
27
|
-
@workers
|
28
|
-
else
|
29
|
-
know heroku_get_workers
|
30
|
-
end
|
26
|
+
@workers.counter {@workers.counter = heroku_get_workers}
|
31
27
|
end
|
32
28
|
|
33
29
|
# Set the number of workers (noop if workers the same)
|
34
30
|
# @param [Numeric] n number of workers
|
35
31
|
def workers=(n)
|
36
|
-
|
32
|
+
unknown = false
|
33
|
+
current = @workers.counter{unknown = true; 1}
|
34
|
+
if n != current || unknown
|
37
35
|
p "Scaling #{type} to #{n}"
|
38
36
|
heroku_set_workers(n)
|
39
|
-
|
37
|
+
@workers.counter = n
|
40
38
|
end
|
41
39
|
end
|
42
40
|
|
43
|
-
# Callable object which responds to exceptions during api calls
|
44
|
-
#
|
41
|
+
# Callable object which responds to exceptions during api calls #
|
45
42
|
# @example
|
46
43
|
# heroku.exception_handler = lambda {|exception| MyApp.logger.error(exception)}
|
47
44
|
# heroku.exception_handler = lambda {|exception| raise}
|
@@ -52,28 +49,25 @@ module Autoscaler
|
|
52
49
|
# }
|
53
50
|
attr_writer :exception_handler
|
54
51
|
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
@known = Time.now + 5
|
60
|
-
@workers = n
|
52
|
+
# Object which supports #counter and #counter=
|
53
|
+
# Defaults to CounterCacheMemory
|
54
|
+
def counter_cache=(cache)
|
55
|
+
@workers = cache
|
61
56
|
end
|
62
57
|
|
63
|
-
|
64
|
-
|
65
|
-
end
|
58
|
+
private
|
59
|
+
attr_reader :client
|
66
60
|
|
67
61
|
def heroku_get_workers
|
68
62
|
client.get_ps(app).body.count {|ps| ps['process'].match /#{type}\.\d?/ }
|
69
|
-
rescue Excon::Errors::Error => e
|
63
|
+
rescue Excon::Errors::Error, Heroku::API::Errors::Error => e
|
70
64
|
exception_handler.call(e)
|
71
|
-
|
65
|
+
0
|
72
66
|
end
|
73
67
|
|
74
68
|
def heroku_set_workers(n)
|
75
69
|
client.post_ps_scale(app, type, n)
|
76
|
-
rescue Excon::Errors::Error => e
|
70
|
+
rescue Excon::Errors::Error, Heroku::API::Errors::Error => e
|
77
71
|
exception_handler.call(e)
|
78
72
|
end
|
79
73
|
|
data/lib/autoscaler/version.rb
CHANGED
@@ -0,0 +1,21 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'autoscaler/counter_cache_memory'
|
3
|
+
|
4
|
+
describe Autoscaler::CounterCacheMemory do
|
5
|
+
let(:cut) {Autoscaler::CounterCacheMemory}
|
6
|
+
|
7
|
+
it {expect{cut.new.counter}.to raise_error(cut::Expired)}
|
8
|
+
it {cut.new.counter{1}.should == 1}
|
9
|
+
|
10
|
+
it 'set and store' do
|
11
|
+
cache = cut.new
|
12
|
+
cache.counter = 1
|
13
|
+
cache.counter.should == 1
|
14
|
+
end
|
15
|
+
|
16
|
+
it 'times out' do
|
17
|
+
cache = cut.new(0)
|
18
|
+
cache.counter = 1
|
19
|
+
expect{cache.counter.should}.to raise_error(cut::Expired)
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'autoscaler/counter_cache_redis'
|
3
|
+
|
4
|
+
describe Autoscaler::CounterCacheRedis do
|
5
|
+
before do
|
6
|
+
@redis = Sidekiq.redis = REDIS
|
7
|
+
Sidekiq.redis {|c| c.flushdb }
|
8
|
+
end
|
9
|
+
|
10
|
+
let(:cut) {Autoscaler::CounterCacheRedis}
|
11
|
+
subject {cut.new(Sidekiq.method(:redis))}
|
12
|
+
|
13
|
+
it {expect{subject.counter}.to raise_error(cut::Expired)}
|
14
|
+
it {subject.counter{1}.should == 1}
|
15
|
+
|
16
|
+
it 'set and store' do
|
17
|
+
subject.counter = 2
|
18
|
+
subject.counter.should == 2
|
19
|
+
end
|
20
|
+
|
21
|
+
it 'times out' do
|
22
|
+
cache = cut.new(Sidekiq.method(:redis), 1) # timeout 0 invalid
|
23
|
+
cache.counter = 3
|
24
|
+
sleep(2)
|
25
|
+
expect{cache.counter}.to raise_error(cut::Expired)
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'passed a connection pool' do
|
29
|
+
cache = cut.new(@redis)
|
30
|
+
cache.counter = 4
|
31
|
+
cache.counter.should == 4
|
32
|
+
end
|
33
|
+
|
34
|
+
it 'passed a plain connection' do
|
35
|
+
connection = Redis.connect(:url => 'http://localhost:9736', :namespace => 'autoscaler')
|
36
|
+
cache = cut.new connection
|
37
|
+
cache.counter = 5
|
38
|
+
cache.counter.should == 5
|
39
|
+
end
|
40
|
+
end
|
@@ -1,5 +1,6 @@
|
|
1
1
|
require 'spec_helper'
|
2
2
|
require 'autoscaler/heroku_scaler'
|
3
|
+
require 'heroku/api/errors'
|
3
4
|
|
4
5
|
describe Autoscaler::HerokuScaler, :online => true do
|
5
6
|
let(:cut) {Autoscaler::HerokuScaler}
|
@@ -18,17 +19,17 @@ describe Autoscaler::HerokuScaler, :online => true do
|
|
18
19
|
its(:workers) {should == 1}
|
19
20
|
end
|
20
21
|
|
21
|
-
|
22
|
+
shared_examples 'exception handler' do |exception_class|
|
22
23
|
before do
|
23
|
-
|
24
|
-
raise
|
25
|
-
|
24
|
+
client.should_receive(:client){
|
25
|
+
raise exception_class.new(Exception.new('oops'))
|
26
|
+
}
|
26
27
|
end
|
27
28
|
|
28
29
|
describe "default handler" do
|
29
30
|
it {expect{client.workers}.to_not raise_error}
|
30
31
|
it {client.workers.should == 0}
|
31
|
-
it {expect{client.workers =
|
32
|
+
it {expect{client.workers = 2}.to_not raise_error}
|
32
33
|
end
|
33
34
|
|
34
35
|
describe "custom handler" do
|
@@ -40,4 +41,9 @@ describe Autoscaler::HerokuScaler, :online => true do
|
|
40
41
|
it {client.workers; @caught.should be_true}
|
41
42
|
end
|
42
43
|
end
|
43
|
-
|
44
|
+
|
45
|
+
describe 'exception handling', :focus => true do
|
46
|
+
it_behaves_like 'exception handler', Excon::Errors::SocketError
|
47
|
+
it_behaves_like 'exception handler', Heroku::API::Errors::Error
|
48
|
+
end
|
49
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: autoscaler
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.8.0
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date:
|
13
|
+
date: 2014-03-07 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: sidekiq
|
@@ -119,6 +119,8 @@ files:
|
|
119
119
|
- CHANGELOG.md
|
120
120
|
- README.md
|
121
121
|
- lib/autoscaler/binary_scaling_strategy.rb
|
122
|
+
- lib/autoscaler/counter_cache_memory.rb
|
123
|
+
- lib/autoscaler/counter_cache_redis.rb
|
122
124
|
- lib/autoscaler/delayed_shutdown.rb
|
123
125
|
- lib/autoscaler/heroku_scaler.rb
|
124
126
|
- lib/autoscaler/linear_scaling_strategy.rb
|
@@ -138,6 +140,8 @@ files:
|
|
138
140
|
- examples/simple.rb
|
139
141
|
- Guardfile
|
140
142
|
- spec/autoscaler/binary_scaling_strategy_spec.rb
|
143
|
+
- spec/autoscaler/counter_cache_memory_spec.rb
|
144
|
+
- spec/autoscaler/counter_cache_redis_spec.rb
|
141
145
|
- spec/autoscaler/delayed_shutdown_spec.rb
|
142
146
|
- spec/autoscaler/heroku_scaler_spec.rb
|
143
147
|
- spec/autoscaler/linear_scaling_strategy_spec.rb
|
@@ -177,6 +181,8 @@ summary: Start/stop Sidekiq workers on Heroku
|
|
177
181
|
test_files:
|
178
182
|
- Guardfile
|
179
183
|
- spec/autoscaler/binary_scaling_strategy_spec.rb
|
184
|
+
- spec/autoscaler/counter_cache_memory_spec.rb
|
185
|
+
- spec/autoscaler/counter_cache_redis_spec.rb
|
180
186
|
- spec/autoscaler/delayed_shutdown_spec.rb
|
181
187
|
- spec/autoscaler/heroku_scaler_spec.rb
|
182
188
|
- spec/autoscaler/linear_scaling_strategy_spec.rb
|