lookout-statsd 0.9.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YjhiMDk4OTZmNWJkNjgzZmVhMGU5YzgxYzA4ZjZkYTExM2ZiZWRlMw==
5
- data.tar.gz: !binary |-
6
- NGFmMzljMDhhZTE2MzkzYzZhYTJjZTBhYmFlYjE4MjNjY2I2ZTc2ZQ==
2
+ SHA1:
3
+ metadata.gz: c46d6d96b1086ab6165a4254009cfdd9e3dc7dfa
4
+ data.tar.gz: a3d8a60d8abbc04a730e281edf58015d638eb587
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- YWQwNmUzMTIwZjkwNGFkNzRjNDQ2YjEyNTA5ZGVhOGJkOTZkYTBjMmJhZjkz
10
- YWY1MzUzMzVlZGI1OWY4OTBkNTIxODhhNmQ4MDMyZjU3OGYwNTdhZjRjMTZl
11
- N2RjZjQxN2RiYzdhN2EwYTEzMDgxZDU0NTM1YzliMjNlYjc3YTc=
12
- data.tar.gz: !binary |-
13
- YWI2ODM0NzUxODhmNjZkYzAxNWU5NGNjOTRlNmQ2ZmY0MDc5NDUyOGFlNjUw
14
- MTE2ZDgwMTgwYThiNzgwYWNmY2RkMzE3OWQ0MTVlZWI1ZWQyYzYwMjMzNDEw
15
- ZThiMjRhZDVkZmUwOGJmZTczNDRhY2UxNDIxOWM2OTJiMmRhNDY=
6
+ metadata.gz: aad201320598de48a5002b876d0738981784e7acb82216b37e30b1f796aa5886f535fa6935b0de71ba89d7ca3dae7014f125752e8ea90f73fc23ff1cea551e51
7
+ data.tar.gz: 5050801e669c08aa9a12d2d6ac5da237eef48996f5af5edcb1b2799060dc40812a1f1f44d26cac613437cb16f81eddc790826af6186dc0ab4b5df4ba81be1f8d
data/.gitignore CHANGED
@@ -5,3 +5,5 @@
5
5
  Gemfile.lock
6
6
  tmp
7
7
  tmp/*
8
+ .bundle
9
+ bundle
data/Gemfile CHANGED
@@ -3,11 +3,7 @@ source "https://rubygems.org"
3
3
  gem "rake"
4
4
 
5
5
  group :test do
6
- if RUBY_VERSION > "1.9"
7
- gem "ruby-debug19", :require => 'ruby-debug'
8
- else
9
- gem "ruby-debug"
10
- end
6
+ gem "ruby-debug19", :platform => :mri_19
11
7
 
12
8
  gem "rspec"
13
9
  gem "cucumber"
data/README.md CHANGED
@@ -1,36 +1,11 @@
1
1
  # StatsD
2
2
 
3
- A network daemon for aggregating statistics (counters and timers), rolling them up, then sending them to [graphite][graphite].
4
-
3
+ A very simple client to format and send metrics to a StatsD server.
5
4
 
6
5
  ### Installation
7
6
 
8
7
  gem install statsd
9
8
 
10
- ### Configuration
11
-
12
- Create config.yml to your liking.
13
-
14
- Example config.yml
15
- ---
16
- bind: 127.0.0.1
17
- port: 8125
18
-
19
- # Flush interval should be your finest retention in seconds
20
- flush_interval: 10
21
-
22
- # Graphite
23
- graphite_host: localhost
24
- graphite_port: 2003
25
-
26
-
27
-
28
- ### Server
29
- Run the server:
30
-
31
- Flush to Graphite (default):
32
- statsd -c config.yml
33
-
34
9
  ### Client
35
10
  In your client code:
36
11
 
@@ -47,48 +22,12 @@ In your client code:
47
22
  STATSD.timing('some_job_time', 20) # reporting job that took 20ms
48
23
  STATSD.timing('some_job_time', 20, 0.05) # reporting job that took 20ms with sampling (5% sampling)
49
24
 
50
- Concepts
51
- --------
52
-
53
- * *buckets*
54
- Each stat is in it's own "bucket". They are not predefined anywhere. Buckets can be named anything that will translate to Graphite (periods make folders, etc)
55
-
56
- * *values*
57
- Each stat will have a value. How it is interpreted depends on modifiers
58
-
59
- * *flush*
60
- After the flush interval timeout (default 10 seconds), stats are munged and sent over to Graphite.
61
-
62
- Counting
63
- --------
64
-
65
- gorets:1|c
66
-
67
- This is a simple counter. Add 1 to the "gorets" bucket. It stays in memory until the flush interval.
68
-
69
-
70
- Timing
71
- ------
72
-
73
- glork:320|ms
74
-
75
- The glork took 320ms to complete this time. StatsD figures out 90th percentile, average (mean), lower and upper bounds for the flush interval.
76
-
77
- Sampling
78
- --------
79
-
80
- gorets:1|c|@0.1
81
-
82
- Tells StatsD that this counter is being sent sampled ever 1/10th of the time.
83
-
84
-
85
25
  Guts
86
26
  ----
87
27
 
88
28
  * [UDP][udp]
89
29
  Client libraries use UDP to send information to the StatsD daemon.
90
30
 
91
- * [EventMachine][eventmachine]
92
31
  * [Graphite][graphite]
93
32
 
94
33
 
@@ -123,4 +62,3 @@ StatsD was inspired (heavily) by the project (of the same name) at Flickr. Here'
123
62
  [etsy]: http://www.etsy.com
124
63
  [blog post]: http://codeascraft.etsy.com/2011/02/15/measure-anything-measure-everything/
125
64
  [udp]: http://enwp.org/udp
126
- [eventmachine]: http://rubyeventmachine.com/
data/lib/statsd.rb CHANGED
@@ -1,10 +1,5 @@
1
1
  require 'socket'
2
2
  require 'resolv'
3
- require 'statsd/daemon'
4
- require 'statsd/graphite'
5
- require 'statsd/aggregator'
6
- require 'statsd/forwarder'
7
- require 'statsd/test'
8
3
 
9
4
  module Statsd
10
5
  # initialize singleton instance in an initializer
data/spec/statsd_spec.rb CHANGED
@@ -2,6 +2,13 @@ require 'spec_helper'
2
2
 
3
3
  describe Statsd do
4
4
  describe '#create_instance' do
5
+ before(:each) do
6
+ # Make sure prior test hasn't already invoked create_instance
7
+ if Statsd.class_variable_defined?(:@@instance)
8
+ Statsd.send(:remove_class_variable, :@@instance)
9
+ end
10
+ end
11
+
5
12
  after(:each) do
6
13
  Statsd.send(:remove_class_variable, :@@instance)
7
14
  end
data/statsd.gemspec CHANGED
@@ -2,23 +2,19 @@
2
2
 
3
3
  Gem::Specification.new do |s|
4
4
  s.name = "lookout-statsd"
5
- s.version = "0.9.0"
5
+ s.version = "1.0.0"
6
6
  s.platform = Gem::Platform::RUBY
7
7
 
8
8
  s.authors = ['R. Tyler Croy', 'Andrew Coldham', 'Ben VandenBos']
9
9
  s.email = ['rtyler.croy@mylookout.com']
10
10
  s.homepage = "https://github.com/lookout/statsd"
11
11
 
12
- s.summary = "Ruby version of statsd."
13
- s.description = "A network daemon for aggregating statistics (counters and timers), rolling them up, then sending them to graphite."
12
+ s.summary = "Ruby statsd client."
13
+ s.description = "A simple ruby statsd client."
14
14
 
15
15
  s.required_rubygems_version = ">= 1.3.6"
16
16
 
17
- s.add_dependency "eventmachine", ">= 0.12.10", "< 0.15.2"
18
- s.add_dependency "erubis", ">= 2.6.6"
19
-
20
17
  s.files = `git ls-files`.split("\n")
21
18
  s.executables = `git ls-files`.split("\n").map{|f| f =~ /^bin\/(.*)/ ? $1 : nil}.compact
22
19
  s.require_path = 'lib'
23
20
  end
24
-
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: lookout-statsd
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.0
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - R. Tyler Croy
@@ -10,73 +10,26 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2013-12-04 00:00:00.000000000 Z
14
- dependencies:
15
- - !ruby/object:Gem::Dependency
16
- name: eventmachine
17
- requirement: !ruby/object:Gem::Requirement
18
- requirements:
19
- - - ! '>='
20
- - !ruby/object:Gem::Version
21
- version: 0.12.10
22
- - - <
23
- - !ruby/object:Gem::Version
24
- version: 0.15.2
25
- type: :runtime
26
- prerelease: false
27
- version_requirements: !ruby/object:Gem::Requirement
28
- requirements:
29
- - - ! '>='
30
- - !ruby/object:Gem::Version
31
- version: 0.12.10
32
- - - <
33
- - !ruby/object:Gem::Version
34
- version: 0.15.2
35
- - !ruby/object:Gem::Dependency
36
- name: erubis
37
- requirement: !ruby/object:Gem::Requirement
38
- requirements:
39
- - - ! '>='
40
- - !ruby/object:Gem::Version
41
- version: 2.6.6
42
- type: :runtime
43
- prerelease: false
44
- version_requirements: !ruby/object:Gem::Requirement
45
- requirements:
46
- - - ! '>='
47
- - !ruby/object:Gem::Version
48
- version: 2.6.6
49
- description: A network daemon for aggregating statistics (counters and timers), rolling
50
- them up, then sending them to graphite.
13
+ date: 2015-07-09 00:00:00.000000000 Z
14
+ dependencies: []
15
+ description: A simple ruby statsd client.
51
16
  email:
52
17
  - rtyler.croy@mylookout.com
53
- executables:
54
- - statsd
18
+ executables: []
55
19
  extensions: []
56
20
  extra_rdoc_files: []
57
21
  files:
58
- - .gitignore
22
+ - ".gitignore"
59
23
  - Gemfile
60
24
  - Guardfile
61
25
  - README.md
62
26
  - Rakefile
63
- - bin/statsd
64
- - config.yml
65
27
  - lib/statsd.rb
66
- - lib/statsd/aggregator.rb
67
- - lib/statsd/daemon.rb
68
- - lib/statsd/echos.rb
69
- - lib/statsd/forwarder.rb
70
- - lib/statsd/graphite.rb
71
28
  - lib/statsd/test.rb
72
29
  - netcat-example.sh
73
30
  - spec/spec_helper.rb
74
- - spec/statsd/aggregator_spec.rb
75
- - spec/statsd/daemon_spec.rb
76
- - spec/statsd/forwarder_spec.rb
77
31
  - spec/statsd/rails/action_timer_filter_spec.rb
78
32
  - spec/statsd_spec.rb
79
- - stats.rb
80
33
  - statsd.gemspec
81
34
  homepage: https://github.com/lookout/statsd
82
35
  licenses: []
@@ -87,18 +40,18 @@ require_paths:
87
40
  - lib
88
41
  required_ruby_version: !ruby/object:Gem::Requirement
89
42
  requirements:
90
- - - ! '>='
43
+ - - ">="
91
44
  - !ruby/object:Gem::Version
92
45
  version: '0'
93
46
  required_rubygems_version: !ruby/object:Gem::Requirement
94
47
  requirements:
95
- - - ! '>='
48
+ - - ">="
96
49
  - !ruby/object:Gem::Version
97
50
  version: 1.3.6
98
51
  requirements: []
99
52
  rubyforge_project:
100
- rubygems_version: 2.1.11
53
+ rubygems_version: 2.4.5
101
54
  signing_key:
102
55
  specification_version: 4
103
- summary: Ruby version of statsd.
56
+ summary: Ruby statsd client.
104
57
  test_files: []
data/bin/statsd DELETED
@@ -1,46 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- $LOAD_PATH.unshift File.expand_path(File.dirname(__FILE__) + '/../lib')
4
- require 'yaml'
5
- require 'optparse'
6
- require 'rubygems'
7
- require 'pry'
8
-
9
- begin
10
- ORIGINAL_ARGV = ARGV.dup
11
- options = {}
12
-
13
- parser = OptionParser.new do |opts|
14
- opts.banner = "Usage: statsd [options]"
15
-
16
- opts.separator ""
17
- opts.separator "Options:"
18
-
19
- opts.on("-cCONFIG", "--config-file CONFIG", "Configuration file") do |x|
20
- options[:config_file] = x
21
- end
22
-
23
- opts.on("-h", "--help", "Show this message") do
24
- puts opts
25
- exit
26
- end
27
- end
28
-
29
- parser.parse!
30
-
31
- # dispatch
32
- if !options[:config_file]
33
- puts parser.help
34
- else
35
- require 'statsd'
36
- Statsd::Daemon.new.run(options)
37
- end
38
- rescue Exception => e
39
- if e.instance_of?(SystemExit)
40
- raise
41
- else
42
- puts 'Uncaught exception'
43
- puts e.message
44
- puts e.backtrace.join("\n")
45
- end
46
- end
data/config.yml DELETED
@@ -1,25 +0,0 @@
1
- ---
2
- # Accept incoming Statsd UDP messages.
3
- bind: 127.0.0.1
4
- port: 8125
5
-
6
- # Flush interval should be your finest retention in seconds
7
- flush_interval: 5
8
-
9
- # Graphite
10
- graphite_host: localhost
11
- graphite_port: 2003
12
-
13
- # Forwarding sends copies of incoming UDP statsd messages to other
14
- # destinations.
15
- # This allows for bundling of many senders into one UDP flow/stream, or
16
- # directs stats to redundant carbon caches.
17
- #
18
- forwarding: true
19
- forwarding_socket_lifetime: 10
20
- # Example destinations:
21
- forwarding_destinations:
22
- - hostname: localhost
23
- port: 9000
24
- - hostname: 127.0.0.1
25
- port: 9001
@@ -1,53 +0,0 @@
1
- module Statsd
2
- module Aggregator
3
- Version = '0.5.5'
4
-
5
- FLUSH_INTERVAL = 10
6
- COUNTERS = {}
7
- TIMERS = {}
8
- GAUGES = {}
9
-
10
- def post_init
11
- puts "statsd server started!"
12
- end
13
-
14
- def self.get_and_clear_stats!
15
- counters = COUNTERS.dup
16
- timers = TIMERS.dup
17
- gauges = GAUGES.dup
18
- COUNTERS.clear
19
- TIMERS.clear
20
- GAUGES.clear
21
- [counters,timers,gauges]
22
- end
23
-
24
- def self.receive_data(msg)
25
- msg.split("\n").each do |row|
26
- bits = row.split(':')
27
- key = bits.shift.gsub(/\s+/, '_').gsub(/\//, '-').gsub(/[^a-zA-Z_\-0-9\.]/, '')
28
- bits.each do |record|
29
- sample_rate = 1
30
- fields = record.split("|")
31
- if fields.nil? || fields.count < 2
32
- next
33
- end
34
- if (fields[1].strip == "ms")
35
- TIMERS[key] ||= []
36
- TIMERS[key].push(fields[0].to_i)
37
- elsif (fields[1].strip == "c")
38
- if (fields[2] && fields[2].match(/^@([\d\.]+)/))
39
- sample_rate = fields[2].match(/^@([\d\.]+)/)[1]
40
- end
41
- COUNTERS[key] ||= 0
42
- COUNTERS[key] += (fields[0].to_i || 1) * (1.0 / sample_rate.to_f)
43
- elsif (fields[1].strip == "g")
44
- GAUGES[key] ||= (fields[0].to_i || 0)
45
- else
46
- puts "Invalid statistic #{fields.inspect} received; ignoring"
47
- end
48
- end
49
- end
50
- end
51
-
52
- end
53
- end
data/lib/statsd/daemon.rb DELETED
@@ -1,78 +0,0 @@
1
- require 'eventmachine'
2
- require 'yaml'
3
- require 'erb'
4
-
5
- module Statsd
6
- class MessageDispatchDaemon < EventMachine::Connection
7
- # Methods to be called when a statsd message comes in.
8
- @@receivers = []
9
- # Register a Module implementing an EventMachine::Connection -like
10
- # interface.
11
- #
12
- # receive_data methods on all registered modules will get called, but for
13
- # any other EM::Connection methods, the last registered module/method will
14
- # take precedence.
15
- def self.register_receiver(mod)
16
- begin
17
- method = mod.method('receive_data')
18
- @@receivers << method unless @@receivers.include?(method)
19
- rescue NameError
20
- raise ArgumentError.new("The passed module #{mod} doesn't implement a receive_data method.")
21
- end
22
- include mod
23
- end
24
- def self.receivers=(list)
25
- raise ArgumentError unless list.is_a?(Array)
26
- @@receivers = list
27
- end
28
- def self.receivers
29
- @@receivers
30
- end
31
- def receive_data(msg)
32
- @@receivers.each do |method|
33
- method.call(msg)
34
- end
35
- end
36
- end
37
- class Daemon
38
- def run(options)
39
- config = if options[:config] and options[:config].is_a?(Hash)
40
- options[:config]
41
- elsif options[:config_file] and options[:config_file].is_a?(String)
42
- YAML::load(ERB.new(IO.read(options[:config_file])).result)
43
- end
44
-
45
- EventMachine::run do
46
- ## statsd->graphite aggregation
47
- if config['graphite_host']
48
- MessageDispatchDaemon.register_receiver(Statsd::Aggregator)
49
- EventMachine::add_periodic_timer(config['flush_interval']) do
50
- counters,timers = Statsd::Aggregator.get_and_clear_stats!
51
- EventMachine.connect config['graphite_host'], config['graphite_port'], Statsd::Graphite do |conn|
52
- conn.counters = counters
53
- conn.timers = timers
54
- conn.flush_interval = config['flush_interval']
55
- conn.flush_stats
56
- end
57
- end
58
- ##
59
-
60
- ## statsd->statsd data relay
61
- if config['forwarding']
62
- Statsd::Forwarder.set_destinations(config['forwarding_destinations'])
63
- MessageDispatchDaemon.register_receiver(Statsd::Forwarder)
64
-
65
- Statsd::Forwarder.build_fresh_sockets
66
- EventMachine::add_periodic_timer(config['forwarding_socket_lifetime']) do
67
- Statsd::Forwarder.build_fresh_sockets
68
- end
69
- end
70
- ##
71
-
72
- puts "Going to listen on #{config['bind']}:#{config['port']}"
73
- EventMachine::open_datagram_socket(config['bind'], config['port'], MessageDispatchDaemon)
74
- end
75
- end
76
- end
77
- end
78
- end
data/lib/statsd/echos.rb DELETED
@@ -1,21 +0,0 @@
1
- #!/usr/bin/env ruby
2
- #
3
-
4
- require 'rubygems'
5
- require 'eventmachine'
6
-
7
- module EchoServer
8
- def post_init
9
- puts "-- someone connected to the server!"
10
- end
11
-
12
- def receive_data data
13
- puts data
14
- send_data ">>> you sent: #{data}"
15
- end
16
- end
17
-
18
- EventMachine::run {
19
- EventMachine::start_server "127.0.0.1", 2003, EchoServer
20
- puts 'running dummy graphite echo server on 2003'
21
- }
@@ -1,52 +0,0 @@
1
- require 'socket'
2
-
3
- module Statsd
4
- module Forwarder
5
- @@sockets = {}
6
- @@destinations = []
7
-
8
- def self.sockets; @@sockets; end
9
- def self.sockets=(hash)
10
- raise ArgumentError unless hash.is_a?(Hash)
11
- @@sockets = hash
12
- end
13
- def self.destinations; @@destinations; end
14
- def self.destinations=(list)
15
- raise ArgumentError unless list.is_a?(Array)
16
- @@destinations = list
17
- end
18
-
19
- def self.receive_data(msg)
20
- # Broadcast the incoming message to all the forwarding destinations.
21
- @@sockets.each do |destination, socket|
22
- begin
23
- socket.send(msg, 0)
24
- rescue SocketError, Errno::ECONNREFUSED => e
25
- puts "ERROR: Couldn't send message to #{destination}. Stopping this output.(#{e.inspect})"
26
- @@sockets.delete(destination)
27
- end
28
- end
29
- end
30
- def self.build_fresh_sockets
31
- # Reset destinations to those destinations for which we could
32
- # actually get a socket going.
33
- @@sockets.clear
34
- @@destinations = @@destinations.select do |destination|
35
- begin
36
- s = UDPSocket.new(Socket::AF_INET)
37
- s.connect destination['hostname'], destination['port']
38
- @@sockets[destination] = s
39
- true
40
- rescue SocketError => e
41
- puts "ERROR: Couldn't create a socket to #{destination['hostname']}/#{destination['port']}. Pruning destination from Forwarder. (#{e.inspect})"
42
- false
43
- end
44
- end
45
- end
46
- def self.set_destinations(destinations)
47
- raise ArgumentError unless destinations.is_a?(Array)
48
- raise ArgumentError unless destinations.map { |d| d.keys }.flatten.uniq.sort == ['hostname', 'port']
49
- @@destinations = destinations
50
- end
51
- end
52
- end
@@ -1,70 +0,0 @@
1
- require 'benchmark'
2
- require 'eventmachine'
3
-
4
-
5
- module Statsd
6
- class Graphite < EM::Connection
7
- attr_accessor :counters, :timers, :flush_interval
8
-
9
- def flush_stats
10
- puts "#{Time.now} Flushing #{counters.count} counters and #{timers.count} timers to Graphite."
11
-
12
- stat_string = ''
13
-
14
- ts = Time.now.to_i
15
- num_stats = 0
16
-
17
- # store counters
18
- counters.each_pair do |key,value|
19
- message = "#{key} #{value} #{ts}\n"
20
- stat_string += message
21
- counters[key] = 0
22
-
23
- num_stats += 1
24
- end
25
-
26
- # store timers
27
- timers.each_pair do |key, values|
28
- if (values.length > 0)
29
- pct_threshold = 90
30
- values.sort!
31
- count = values.count
32
- min = values.first
33
- max = values.last
34
-
35
- mean = min
36
- max_at_threshold = max
37
-
38
- if (count > 1)
39
- # average all the timing data
40
- sum = values.inject( 0 ) { |s,x| s+x }
41
- mean = sum / values.count
42
-
43
- # strip off the top 100-threshold
44
- threshold_index = (((100 - pct_threshold) / 100.0) * count).round
45
- values = values[0..-threshold_index]
46
- max_at_threshold = values.last
47
- end
48
-
49
- message = ""
50
- message += "#{key}.mean #{mean} #{ts}\n"
51
- message += "#{key}.upper #{max} #{ts}\n"
52
- message += "#{key}.upper_#{pct_threshold} #{max_at_threshold} #{ts}\n"
53
- message += "#{key}.lower #{min} #{ts}\n"
54
- message += "#{key}.count #{count} #{ts}\n"
55
- stat_string += message
56
-
57
- timers[key] = []
58
-
59
- num_stats += 1
60
- end
61
- end
62
-
63
- stat_string += "statsd.numStats #{num_stats} #{ts}\n"
64
-
65
- # send to graphite
66
- send_data stat_string
67
- close_connection_after_writing
68
- end
69
- end
70
- end
@@ -1,15 +0,0 @@
1
- require 'spec_helper'
2
-
3
- describe Statsd::Aggregator do
4
- #include Statsd::Aggregator
5
-
6
- describe :receive_data do
7
- it 'should not vomit on bad data' do
8
- bad_data = "dev.rwygand.app.flexd.exception.no action responded to index. actions: authenticate, authentication_request, authorization, bubble_stacktrace?, decode_credentials, encode_credentials, not_found, and user_name_and_password:1|c"
9
-
10
- expect {
11
- Statsd::Aggregator.receive_data(bad_data)
12
- }.not_to raise_error
13
- end
14
- end
15
- end
@@ -1,47 +0,0 @@
1
- require 'spec_helper'
2
-
3
- describe Statsd::Daemon do
4
- describe :new do
5
- before(:each) do
6
- EventMachine.should_receive(:run) { |&block| block.call }
7
- EventMachine.should_receive(:open_datagram_socket).and_return true
8
- EventMachine.should_receive(:add_periodic_timer).at_least(:once) { |delay, &block| block.call }
9
- EventMachine.should_receive(:connect).and_return true
10
- Statsd::MessageDispatchDaemon.receivers = []
11
- end
12
-
13
- it 'Should extend MessageDispatchDaemon with an Aggregator if "carbon_cache" is configured' do
14
- config = {
15
- "bind"=>"127.0.0.1",
16
- "port"=>8125,
17
- "flush_interval"=>5,
18
- "graphite_host"=>"localhost",
19
- "graphite_port"=>2003,
20
- "forwarding"=>false,
21
- }
22
-
23
- Statsd::Daemon.new.run(:config => config)
24
- Statsd::MessageDispatchDaemon.receivers.should eq([Statsd::Aggregator.method(:receive_data)])
25
- end
26
-
27
- it 'Should extend MessageDispatchDaemon with an Aggregator and Forwarder if "carbon_cache" is configured and forwarding is enabled' do
28
- config = {
29
- "bind"=>"127.0.0.1",
30
- "port"=>8125,
31
- "flush_interval"=>5,
32
- "graphite_host"=>"localhost",
33
- "graphite_port"=>2003,
34
- "forwarding"=>true,
35
- "forwarding_destinations"=>
36
- [
37
- {"port"=>9000, "hostname"=>"localhost"},
38
- {"port"=>9001, "hostname"=>"127.0.0.1"}
39
- ]
40
- }
41
-
42
- Statsd::Daemon.new.run(:config => config)
43
- Statsd::MessageDispatchDaemon.receivers.should eq([Statsd::Aggregator.method(:receive_data), Statsd::Forwarder.method(:receive_data)])
44
- end
45
- end
46
- end
47
-
@@ -1,51 +0,0 @@
1
- require 'spec_helper'
2
- require 'timeout'
3
-
4
- describe Statsd::Forwarder do
5
- let(:destinations) do
6
- [ {'hostname'=>'localhost', 'port'=>9000},
7
- {'hostname'=>'127.0.0.1', 'port'=>9001} ]
8
- end
9
- before(:each) do
10
- Statsd::Forwarder.sockets = {}
11
- expect { Statsd::Forwarder.set_destinations(destinations) }.not_to raise_error
12
- end
13
- it 'Should accept a list of destinations to forward to.' do
14
- Statsd::Forwarder.destinations.should eq(destinations)
15
- end
16
- it 'Should create sockets to the destinations with #build_fresh_sockets' do
17
- Statsd::Forwarder.sockets.should eq({})
18
- Statsd::Forwarder.build_fresh_sockets
19
- Statsd::Forwarder.sockets.should be_a_kind_of(Hash)
20
- Statsd::Forwarder.sockets.keys.length.should eq(destinations.length)
21
- Statsd::Forwarder.sockets.values.each { |socket| socket.should be_a_kind_of(UDPSocket) }
22
- end
23
- describe 'Replicating incoming messages' do
24
- let(:socket_one) do
25
- u = UDPSocket.new
26
- u.bind('127.0.0.1', 0)
27
- #let(:socket_one_port) { u.local_address.ip_port }
28
- u
29
- end
30
- let(:socket_two) do
31
- u = UDPSocket.new
32
- u.bind('127.0.0.1', 0)
33
- u
34
- end
35
- let(:test_stat) { "app.thing.speed:10|ms\n" }
36
- it 'Registers two local receivers, Gets an incoming message, both receivers get it' do
37
- Statsd::Forwarder.set_destinations([{'hostname' => '127.0.0.1', 'port' => socket_one.addr[1] },
38
- {'hostname' => '127.0.0.1', 'port' => socket_two.addr[1] }])
39
- Statsd::Forwarder.build_fresh_sockets
40
- Statsd::Forwarder.receive_data(test_stat)
41
-
42
- Timeout.timeout(3) do
43
- msg, _, _ = socket_one.recv(4_096)
44
- msg.should eq(test_stat)
45
-
46
- msg, _, _ = socket_two.recv(4_096)
47
- msg.should eq(test_stat)
48
- end
49
- end
50
- end
51
- end
data/stats.rb DELETED
@@ -1,24 +0,0 @@
1
- require 'eventmachine'
2
- require 'statsd'
3
-
4
- require 'yaml'
5
- require 'erb'
6
-
7
- ROOT = File.expand_path(File.dirname(__FILE__))
8
- APP_CONFIG = YAML::load(ERB.new(IO.read(File.join(ROOT,'config.yml'))).result)
9
-
10
- # Start the server
11
- EventMachine::run do
12
- EventMachine::open_datagram_socket('127.0.0.1', 8125, Statsd::Aggregator)
13
- EventMachine::add_periodic_timer(APP_CONFIG['flush_interval']) do
14
- counters,timers = Statsd::Aggregator.get_and_clear_stats!
15
-
16
- # Graphite
17
- EventMachine.connect APP_CONFIG['graphite_host'], APP_CONFIG['graphite_port'], Statsd::Graphite do |conn|
18
- conn.counters = counters
19
- conn.timers = timers
20
- conn.flush_interval = 10
21
- conn.flush_stats
22
- end
23
- end
24
- end