pulse-meter 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
data/lib/pulse-meter.rb CHANGED
@@ -8,6 +8,9 @@ require "pulse-meter/observer"
8
8
  require "pulse-meter/sensor"
9
9
  require "pulse-meter/sensor/configuration"
10
10
 
11
+ require "pulse-meter/command_aggregator/async"
12
+ require "pulse-meter/command_aggregator/sync"
13
+
11
14
  module PulseMeter
12
15
  @@redis = nil
13
16
 
@@ -18,4 +21,16 @@ module PulseMeter
18
21
  def self.redis=(redis)
19
22
  @@redis = redis
20
23
  end
24
+
25
+ def self.command_aggregator
26
+ @@command_aggregator ||= PulseMeter::CommandAggregator::Async.instance
27
+ end
28
+
29
+ def self.command_aggregator=(command_aggregator)
30
+ @@command_aggregator = case command_aggregator
31
+ when :sync; PulseMeter::CommandAggregator::Sync.instance
32
+ when :async; PulseMeter::CommandAggregator::Async.instance
33
+ else raise ArgumentError
34
+ end
35
+ end
21
36
  end
@@ -0,0 +1,82 @@
1
+ require 'singleton'
2
+ require 'thread'
3
+
4
+ module PulseMeter
5
+ module CommandAggregator
6
+ class Async
7
+ include Singleton
8
+
9
+ MAX_QUEUE_LENGTH = 10_000
10
+
11
+ attr_reader :max_queue_length
12
+
13
+ def initialize
14
+ @max_queue_length = MAX_QUEUE_LENGTH
15
+ @queue = Queue.new
16
+ @buffer = []
17
+ @in_multi = false
18
+ @consumer_thread = run_consumer
19
+ end
20
+
21
+ def multi
22
+ @in_multi = true
23
+ yield
24
+ ensure
25
+ @in_multi = false
26
+ send_buffer_to_queue
27
+ end
28
+
29
+ def method_missing(*args)
30
+ @buffer << args
31
+ send_buffer_to_queue unless @in_multi
32
+ end
33
+
34
+ def wait_for_pending_events(max_seconds = 1)
35
+ left_to_wait = max_seconds.to_f
36
+ sleep_step = 0.01
37
+ while has_pending_events? && left_to_wait > 0
38
+ left_to_wait -= sleep_step
39
+ sleep(sleep_step)
40
+ end
41
+ end
42
+
43
+ private
44
+
45
+ def has_pending_events?
46
+ !@queue.empty?
47
+ end
48
+
49
+ def send_buffer_to_queue
50
+ if @queue.size < @max_queue_length
51
+ @queue << @buffer
52
+ end
53
+ @buffer = []
54
+ end
55
+
56
+ def redis
57
+ PulseMeter.redis
58
+ end
59
+
60
+ def consume_commands
61
+ # redis and @queue are threadsafe
62
+ while commands = @queue.pop
63
+ begin
64
+ redis.multi do
65
+ commands.each do |command|
66
+ redis.send(*command)
67
+ end
68
+ end
69
+ rescue StandardError => e
70
+ STDERR.puts "error in consumer_thread: #{e}, #{e.backtrace.join("\n")}"
71
+ end
72
+ end
73
+ end
74
+
75
+ def run_consumer
76
+ Thread.new do
77
+ consume_commands
78
+ end
79
+ end
80
+ end
81
+ end
82
+ end
@@ -0,0 +1,18 @@
1
+ require 'singleton'
2
+
3
+ module PulseMeter
4
+ module CommandAggregator
5
+ class Sync
6
+ include Singleton
7
+
8
+ def redis
9
+ PulseMeter.redis
10
+ end
11
+
12
+ def method_missing(*args, &block)
13
+ redis.send(*args, &block)
14
+ end
15
+ end
16
+ end
17
+ end
18
+
@@ -31,6 +31,10 @@ module PulseMeter
31
31
  PulseMeter.redis
32
32
  end
33
33
 
34
+ def command_aggregator
35
+ PulseMeter.command_aggregator
36
+ end
37
+
34
38
  # Saves annotation to Redis
35
39
  # @param description [String] Sensor annotation
36
40
  def annotate(description)
@@ -54,7 +58,7 @@ module PulseMeter
54
58
  def event(value)
55
59
  process_event(value)
56
60
  true
57
- rescue StandardError
61
+ rescue StandardError => e
58
62
  false
59
63
  end
60
64
 
@@ -31,7 +31,7 @@ module PulseMeter
31
31
  # Processes event by incremnting counter by given value
32
32
  # @param value [Fixnum] increment
33
33
  def process_event(value)
34
- redis.incrby(value_key, value.to_i)
34
+ command_aggregator.incrby(value_key, value.to_i)
35
35
  end
36
36
 
37
37
  end
@@ -26,8 +26,8 @@ module PulseMeter
26
26
  # and values are increments for their keys
27
27
  def process_event(data)
28
28
  data.each_pair do |k, v|
29
- redis.hincrby(value_key, k, v.to_i)
30
- redis.hincrby(value_key, :total, v.to_i)
29
+ command_aggregator.hincrby(value_key, k, v.to_i)
30
+ command_aggregator.hincrby(value_key, :total, v.to_i)
31
31
  end
32
32
  end
33
33
 
@@ -16,7 +16,7 @@ module PulseMeter
16
16
  # Sets indicator values
17
17
  # @param value [Hash] new indicator values
18
18
  def process_event(events)
19
- events.each_pair {|name, value| redis.hset(value_key, name, value.to_f)}
19
+ events.each_pair {|name, value| command_aggregator.hset(value_key, name, value.to_f)}
20
20
  end
21
21
 
22
22
  end
@@ -27,7 +27,7 @@ module PulseMeter
27
27
  # Sets indicator value
28
28
  # @param value [Float] new indicator value
29
29
  def process_event(value)
30
- redis.set(value_key, value.to_f)
30
+ command_aggregator.set(value_key, value.to_f)
31
31
  end
32
32
 
33
33
  end
@@ -57,8 +57,11 @@ module PulseMeter
57
57
  interval_id = get_interval_id(time)
58
58
  key = raw_data_key(interval_id)
59
59
  aggregate_event(key, value)
60
- redis.expire(key, raw_data_ttl)
60
+ command_aggregator.expire(key, raw_data_ttl)
61
61
  end
62
+ true
63
+ rescue StandardError => e
64
+ false
62
65
  end
63
66
 
64
67
  # Reduces data in given interval.
@@ -247,10 +250,10 @@ module PulseMeter
247
250
  # Processes event
248
251
  # @param value event value
249
252
  def process_event(value = nil)
250
- multi do
253
+ command_aggregator.multi do
251
254
  current_key = current_raw_data_key
252
255
  aggregate_event(current_key, value)
253
- redis.expire(current_key, raw_data_ttl)
256
+ command_aggregator.expire(current_key, raw_data_ttl)
254
257
  end
255
258
  end
256
259
 
@@ -5,8 +5,8 @@ module PulseMeter
5
5
  class Average < Timeline
6
6
 
7
7
  def aggregate_event(key, value)
8
- redis.hincrby(key, :count, 1)
9
- redis.hincrby(key, :sum, value)
8
+ command_aggregator.hincrby(key, :count, 1)
9
+ command_aggregator.hincrby(key, :sum, value)
10
10
  end
11
11
 
12
12
  def summarize(key)
@@ -4,7 +4,7 @@ module PulseMeter
4
4
  # Counts events per interval
5
5
  class Counter < Timeline
6
6
  def aggregate_event(key, value)
7
- redis.incrby(key, value.to_i)
7
+ command_aggregator.incrby(key, value.to_i)
8
8
  end
9
9
 
10
10
  def summarize(key)
@@ -8,8 +8,8 @@ module PulseMeter
8
8
  class HashedCounter < Timeline
9
9
  def aggregate_event(key, data)
10
10
  data.each_pair do |k, v|
11
- redis.hincrby(key, k, v)
12
- redis.hincrby(key, :total, v)
11
+ command_aggregator.hincrby(key, k, v)
12
+ command_aggregator.hincrby(key, :total, v)
13
13
  end
14
14
  end
15
15
 
@@ -8,7 +8,7 @@ module PulseMeter
8
8
  class HashedIndicator < Timeline
9
9
  def aggregate_event(key, data)
10
10
  data.each_pair do |k, v|
11
- redis.hset(key, k, v) if v.respond_to?(:to_f)
11
+ command_aggregator.hset(key, k, v) if v.respond_to?(:to_f)
12
12
  end
13
13
  end
14
14
 
@@ -4,7 +4,7 @@ module PulseMeter
4
4
  # Saves last registered flag float value for each interval
5
5
  class Indicator < Timeline
6
6
  def aggregate_event(key, value)
7
- redis.set(key, value.to_f)
7
+ command_aggregator.set(key, value.to_f)
8
8
  end
9
9
 
10
10
  def summarize(key)
@@ -5,8 +5,8 @@ module PulseMeter
5
5
  class Max < Timeline
6
6
 
7
7
  def aggregate_event(key, value)
8
- redis.zadd(key, value, "#{value}::#{uniqid}")
9
- redis.zremrangebyrank(key, 0, -2)
8
+ command_aggregator.zadd(key, value, "#{value}::#{uniqid}")
9
+ command_aggregator.zremrangebyrank(key, 0, -2)
10
10
  end
11
11
 
12
12
  def summarize(key)
@@ -5,8 +5,8 @@ module PulseMeter
5
5
  class Min < Timeline
6
6
 
7
7
  def aggregate_event(key, value)
8
- redis.zadd(key, value, "#{value}::#{uniqid}")
9
- redis.zremrangebyrank(key, 1, -1)
8
+ command_aggregator.zadd(key, value, "#{value}::#{uniqid}")
9
+ command_aggregator.zremrangebyrank(key, 1, -1)
10
10
  end
11
11
 
12
12
  def summarize(key)
@@ -12,7 +12,7 @@ module PulseMeter
12
12
  end
13
13
 
14
14
  def aggregate_event(key, value)
15
- redis.zadd(key, value, "#{value}::#{uniqid}")
15
+ command_aggregator.zadd(key, value, "#{value}::#{uniqid}")
16
16
  end
17
17
 
18
18
  def summarize(key)
@@ -11,7 +11,7 @@ module PulseMeter
11
11
  end
12
12
 
13
13
  def aggregate_event(key, value)
14
- redis.zadd(key, value, "#{value}::#{uniqid}")
14
+ command_aggregator.zadd(key, value, "#{value}::#{uniqid}")
15
15
  end
16
16
 
17
17
  def summarize(key)
@@ -4,7 +4,7 @@ module PulseMeter
4
4
  # Counts unique events per interval
5
5
  class UniqCounter < Timeline
6
6
  def aggregate_event(key, value)
7
- redis.sadd(key, value)
7
+ command_aggregator.sadd(key, value)
8
8
  end
9
9
 
10
10
  def summarize(key)
@@ -16,7 +16,7 @@ module PulseMeter
16
16
  # Processes event
17
17
  # @param name [String] value to be counted
18
18
  def process_event(name)
19
- redis.sadd(value_key, name)
19
+ command_aggregator.sadd(value_key, name)
20
20
  end
21
21
 
22
22
  end
@@ -1,3 +1,3 @@
1
1
  module PulseMeter
2
- VERSION = "0.3.1"
2
+ VERSION = "0.3.2"
3
3
  end
data/pulse-meter.gemspec CHANGED
@@ -28,6 +28,7 @@ Gem::Specification.new do |gem|
28
28
  gem.add_runtime_dependency('terminal-table')
29
29
  gem.add_runtime_dependency('thor')
30
30
 
31
+ gem.add_development_dependency('aquarium')
31
32
  gem.add_development_dependency('coffee-script')
32
33
  gem.add_development_dependency('foreman')
33
34
  gem.add_development_dependency('hashie')
@@ -0,0 +1,53 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::CommandAggregator::Async do
4
+ let(:ca){PulseMeter.command_aggregator}
5
+ let(:redis){PulseMeter.redis}
6
+
7
+ describe "#multi" do
8
+ it "should accumulate redis command and execute in a bulk" do
9
+ ca.multi do
10
+ ca.set("xxxx", "zzzz")
11
+ ca.set("yyyy", "zzzz")
12
+ sleep 0.1
13
+ redis.get("xxxx").should be_nil
14
+ redis.get("yyyy").should be_nil
15
+ end
16
+ ca.wait_for_pending_events
17
+ redis.get("xxxx").should == "zzzz"
18
+ redis.get("yyyy").should == "zzzz"
19
+ end
20
+ end
21
+
22
+ describe "any other redis instance method" do
23
+ it "should be delegated to redis" do
24
+ ca.set("xxxx", "zzzz")
25
+ ca.wait_for_pending_events
26
+ redis.get("xxxx").should == "zzzz"
27
+ end
28
+
29
+ it "should be aggregated if queue is not overflooded" do
30
+ redis.set("x", 0)
31
+ ca.max_queue_length.times{ ca.incr("x") }
32
+ ca.wait_for_pending_events
33
+ redis.get("x").to_i.should == ca.max_queue_length
34
+ end
35
+
36
+ it "should not be aggregated if queue is overflooded" do
37
+ redis.set("x", 0)
38
+ (ca.max_queue_length * 2).times{ ca.incr("x") }
39
+ ca.wait_for_pending_events
40
+ redis.get("x").to_i.should < 2 * ca.max_queue_length
41
+ end
42
+ end
43
+
44
+ describe "#wait_for_pending_events" do
45
+ it "should pause execution until aggregator thread sends all commands ro redis" do
46
+ ca.set("xxxx", "zzzz")
47
+ redis.get("xxxx").should be_nil
48
+ ca.wait_for_pending_events
49
+ redis.get("xxxx").should == "zzzz"
50
+ end
51
+ end
52
+
53
+ end
@@ -0,0 +1,25 @@
1
+ require 'spec_helper'
2
+
3
+ describe PulseMeter::CommandAggregator::Sync do
4
+ let(:ca){described_class.instance}
5
+ let(:redis){PulseMeter.redis}
6
+
7
+ describe "#multi" do
8
+ it "should accumulate redis command and execute in a bulk" do
9
+ ca.multi do
10
+ ca.set("xxxx", "zzzz").should == "QUEUED"
11
+ ca.set("yyyy", "zzzz").should == "QUEUED"
12
+ end
13
+ redis.get("xxxx").should == "zzzz"
14
+ redis.get("yyyy").should == "zzzz"
15
+ end
16
+ end
17
+
18
+ describe "any other redis instance method" do
19
+ it "should be delegated to redis" do
20
+ ca.set("xxxx", "zzzz")
21
+ redis.get("xxxx").should == "zzzz"
22
+ end
23
+ end
24
+ end
25
+
@@ -13,4 +13,41 @@ describe PulseMeter do
13
13
  PulseMeter.redis.should == 'redis'
14
14
  end
15
15
  end
16
+ describe "::command_aggregator=" do
17
+ context "when :async passed" do
18
+ it "should set async command_aggregator to be used" do
19
+ PulseMeter.command_aggregator = :async
20
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Async)
21
+ end
22
+ end
23
+ context "when :sync passed" do
24
+ it "should set sync command_aggregator to be used" do
25
+ PulseMeter.command_aggregator = :sync
26
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Sync)
27
+ end
28
+ end
29
+ context "otherwise" do
30
+ it "should raise ArgumentError" do
31
+ expect{ PulseMeter.command_aggregator = :xxx }.to raise_exception(ArgumentError)
32
+ end
33
+ end
34
+ end
35
+
36
+ describe "::command_aggregator" do
37
+ it "should return current command_aggregator" do
38
+ PulseMeter.command_aggregator = :async
39
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Async)
40
+ PulseMeter.command_aggregator = :sync
41
+ PulseMeter.command_aggregator.should be_kind_of(PulseMeter::CommandAggregator::Sync)
42
+ end
43
+
44
+ it "should always return the same command_aggregator for each type" do
45
+ PulseMeter.command_aggregator = :async
46
+ ca1 = PulseMeter.command_aggregator
47
+ PulseMeter.command_aggregator = :sync
48
+ PulseMeter.command_aggregator = :async
49
+ ca2 = PulseMeter.command_aggregator
50
+ ca1.should == ca2
51
+ end
52
+ end
16
53
  end
@@ -59,7 +59,7 @@ shared_examples_for "timeline sensor" do |extra_init_values, default_event|
59
59
  describe "#event" do
60
60
  it "should write events to redis" do
61
61
  expect{
62
- sensor.event(sample_event)
62
+ sensor.event(sample_event)
63
63
  }.to change{ redis.keys('*').count }.by(1)
64
64
  end
65
65
 
@@ -100,7 +100,7 @@ shared_examples_for "timeline sensor" do |extra_init_values, default_event|
100
100
  it "should write data so that it totally expires after :raw_data_ttl" do
101
101
  key_count = redis.keys('*').count
102
102
  sensor.event_at(now, sample_event)
103
- Timecop.freeze(Time.now + raw_data_ttl + 1) do
103
+ Timecop.freeze(now + raw_data_ttl + 1) do
104
104
  redis.keys('*').count.should == key_count
105
105
  end
106
106
  end
data/spec/spec_helper.rb CHANGED
@@ -15,6 +15,16 @@ require 'rack/test'
15
15
  Dir['spec/support/**/*.rb'].each{|f| require File.join(ROOT, f) }
16
16
  Dir['spec/shared_examples/**/*.rb'].each{|f| require File.join(ROOT,f)}
17
17
 
18
+
19
+ require 'aquarium'
20
+ include Aquarium::Aspects
21
+
22
+ Aspect.new :after, :calls_to => [:event, :event_at], :for_types => [PulseMeter::Sensor::Base, PulseMeter::Sensor::Timeline] do |jp, obj, *args|
23
+ PulseMeter.command_aggregator.wait_for_pending_events
24
+ end
25
+
26
+ PulseMeter.command_aggregator.max_queue_length = 20
27
+
18
28
  RSpec.configure do |config|
19
29
  config.before(:each) do
20
30
  PulseMeter.redis = MockRedis.new
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pulse-meter
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2012-11-03 00:00:00.000000000 Z
13
+ date: 2012-11-05 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: gon-sinatra
@@ -140,6 +140,22 @@ dependencies:
140
140
  - - ! '>='
141
141
  - !ruby/object:Gem::Version
142
142
  version: '0'
143
+ - !ruby/object:Gem::Dependency
144
+ name: aquarium
145
+ requirement: !ruby/object:Gem::Requirement
146
+ none: false
147
+ requirements:
148
+ - - ! '>='
149
+ - !ruby/object:Gem::Version
150
+ version: '0'
151
+ type: :development
152
+ prerelease: false
153
+ version_requirements: !ruby/object:Gem::Requirement
154
+ none: false
155
+ requirements:
156
+ - - ! '>='
157
+ - !ruby/object:Gem::Version
158
+ version: '0'
143
159
  - !ruby/object:Gem::Dependency
144
160
  name: coffee-script
145
161
  requirement: !ruby/object:Gem::Requirement
@@ -397,6 +413,8 @@ files:
397
413
  - examples/server_config.yml
398
414
  - lib/cmd.rb
399
415
  - lib/pulse-meter.rb
416
+ - lib/pulse-meter/command_aggregator/async.rb
417
+ - lib/pulse-meter/command_aggregator/sync.rb
400
418
  - lib/pulse-meter/extensions/enumerable.rb
401
419
  - lib/pulse-meter/mixins/cmd.rb
402
420
  - lib/pulse-meter/mixins/dumper.rb
@@ -515,6 +533,8 @@ files:
515
533
  - lib/pulse-meter/visualize/widgets/timeline.rb
516
534
  - lib/pulse-meter/visualizer.rb
517
535
  - pulse-meter.gemspec
536
+ - spec/pulse_meter/command_aggregator/async_spec.rb
537
+ - spec/pulse_meter/command_aggregator/sync_spec.rb
518
538
  - spec/pulse_meter/extensions/enumerable_spec.rb
519
539
  - spec/pulse_meter/mixins/cmd_spec.rb
520
540
  - spec/pulse_meter/mixins/dumper_spec.rb
@@ -585,6 +605,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
585
605
  - - ! '>='
586
606
  - !ruby/object:Gem::Version
587
607
  version: '0'
608
+ segments:
609
+ - 0
610
+ hash: 284340155524977771
588
611
  requirements: []
589
612
  rubyforge_project:
590
613
  rubygems_version: 1.8.23
@@ -593,6 +616,8 @@ specification_version: 3
593
616
  summary: Lightweight Redis-based metrics aggregator and processor with CLI and simple
594
617
  and customizable WEB interfaces
595
618
  test_files:
619
+ - spec/pulse_meter/command_aggregator/async_spec.rb
620
+ - spec/pulse_meter/command_aggregator/sync_spec.rb
596
621
  - spec/pulse_meter/extensions/enumerable_spec.rb
597
622
  - spec/pulse_meter/mixins/cmd_spec.rb
598
623
  - spec/pulse_meter/mixins/dumper_spec.rb