pd_metrics 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/.gitignore ADDED
@@ -0,0 +1,17 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in pd_metrics.gemspec
4
+ gemspec
data/README.md ADDED
@@ -0,0 +1,44 @@
1
+ # PdMetrics
2
+
3
+ Library to send metrics to Logstash, which then delivers them to PagerDuty's
4
+ metric systems. This is pretty much only useful if you're a PagerDuty employee.
5
+
6
+ ## Installation
7
+
8
+ Add this line to your application's Gemfile:
9
+
10
+ gem 'pd_metrics'
11
+
12
+ And then execute:
13
+
14
+ $ bundle
15
+
16
+ Or install it yourself as:
17
+
18
+ $ gem install pd_metrics
19
+
20
+ ## Usage
21
+
22
+ # Captures timing metrics for a block of Ruby code.
23
+ PdMetrics.time('api', 'receive_email', account: 'Netflix') do
24
+ # process the email
25
+ end
26
+
27
+ # Captures an increase/decrease in a counter.
28
+ PdMetrics.incr('emails', 'bytes_received', email_bytes.size, account: 'Netflix')
29
+
30
+ # Captures the current value for a metric.
31
+ PdMetrics.gauge('ruby', 'live_objects', ObjectSpace.live_objects)
32
+
33
+ # Captures statistical metrics for a set of values within a given timeframe.
34
+ # This is very similar to the time method, but it's genericized for use in
35
+ # arbitrary values.
36
+ PdMetrics.histogram('api', 'payload_size', payload.size, account: 'Netflix')
37
+
38
+ ## Contributing
39
+
40
+ 1. Fork it
41
+ 2. Create your feature branch (`git checkout -b my-new-feature`)
42
+ 3. Commit your changes (`git commit -am 'Add some feature'`)
43
+ 4. Push to the branch (`git push origin my-new-feature`)
44
+ 5. Create new Pull Request
data/Rakefile ADDED
@@ -0,0 +1,10 @@
1
+ require "bundler/gem_tasks"
2
+ require "rake/testtask"
3
+
4
+ Rake::TestTask.new do |t|
5
+ t.libs << "test"
6
+ t.test_files = FileList['test/*_test.rb']
7
+ t.verbose = true
8
+ end
9
+
10
+ task :default => :test
data/lib/pd_metrics.rb ADDED
@@ -0,0 +1,236 @@
1
+ require 'socket'
2
+ require 'logger'
3
+
4
+ require 'active_support/core_ext/class/attribute_accessors'
5
+
6
+ class PdMetrics
7
+ cattr_accessor :settings
8
+ self.settings ||= {host: '127.0.0.1', port: 5959}
9
+
10
+ cattr_accessor :logger
11
+ self.logger ||= Logger.new('/dev/null')
12
+
13
+ # Logs an event to metric backend. In general, you can log any key value pairs.
14
+ #
15
+ # PdMetrics.send_event('api', account: 'Netflix', wait_delta: 0.01, run_delta: 0.1)
16
+ #
17
+ # This will result in the following line being logged in SumoLogic. No data will be sent to DataDog.
18
+ #
19
+ # api #account=Netflix|#run_delta=0.1|#wait_delta=0.01|
20
+ #
21
+ # In order to support aggregated graphs in DataDog, you'll need to mark the
22
+ # type of any numerical metrics you want aggregated.
23
+ #
24
+ # PdMetrics.send_event('api', wait_delta: (0.01).histogram, run_delta: (0.1).histogram)
25
+ #
26
+ # This extra bit of detail is needed to let DataDog know how to aggregate multiple events in a single timeslice.
27
+ #
28
+ # counter - adds together multiple data points. Use this for things like visits, errors, etc.
29
+ # gauge - takes the last value. Use this for things like free memory, connections to database, etc.
30
+ # histogram - derives count, avg, median, max, min, 95th percentile from a single value. Use this for this like latency, bytes written, etc.
31
+ #
32
+ # Note that when Datadog metrics are supplied, any non-metric data is passed
33
+ # to DataDog as tags. Depending on how many tags you have, this can be
34
+ # counterproductive in DataDog. To have additional data logged only to
35
+ # Sumologic, pass it in the additional_data paramter.
36
+ #
37
+ # PdMetrics.send_event('api', {wait_delta: (0.01).histogram, run_delta: (0.1).histogram}, account: 'Netflix')
38
+ #
39
+ def self.send_event(namespace, metrics_and_tags = {}, additional_data = {})
40
+ logger.debug { "send_event #{namespace} #{metrics_and_tags.inspect} #{additional_data.inspect}" }
41
+ metrics_and_tags ||= {}
42
+ additional_data ||= {}
43
+
44
+ send_datadog_format(namespace, metrics_and_tags)
45
+ send_sumologic_format(namespace, metrics_and_tags, additional_data)
46
+ end
47
+
48
+ # Captures timing metrics for a block of Ruby code.
49
+ #
50
+ # PdMetrics.time('api', 'receive_email', account: 'Netflix') do
51
+ # # process the email
52
+ # end
53
+ #
54
+ # Assuming the request took 2 seconds to process, the following log message will be written in SumoLogic.
55
+ #
56
+ # api #account=Netflix|#receive_email=2.0|#failed=false|
57
+ #
58
+ # Additionally, the following histogram metrics will be captured in DataDog
59
+ #
60
+ # api.receive_email.count
61
+ # api.receive_email.avg
62
+ # api.receive_email.median
63
+ # api.receive_email.max
64
+ # api.receive_email.95percentile
65
+ #
66
+ # In addition to capturing latency of the request, the success or failure of
67
+ # the block of code is captured as well. It is considered failed if an
68
+ # exception is thrown.
69
+ #
70
+ def self.time(namespace, key, tags = {}, additional_data = {})
71
+ failed = false
72
+ start = Time.now
73
+ yield
74
+ rescue
75
+ failed = true
76
+ raise
77
+ ensure
78
+ timing_data = tags || {}
79
+ timing_data[key] = (Time.now - start).histogram
80
+ timing_data['failed'] = failed
81
+ send_event(namespace, timing_data)
82
+ end
83
+
84
+ # Captures an increase/decrease in a counter.
85
+ #
86
+ # You can use this to capture metrics that should be added together when viewed on a graph.
87
+ #
88
+ # PdMetrics.incr('logins', 'success')
89
+ # PdMetrics.incr('emails', 'bytes_received', email_bytes.size, account: 'Netflix')
90
+ #
91
+ # That will produce the following line in SumoLogic.
92
+ #
93
+ # logins #success=1
94
+ # emails #account=Netflix|#bytes_received=1234|
95
+ #
96
+ # Additionally, the following metrics will be defined in DataDog
97
+ #
98
+ # logins.success
99
+ # emails.bytes_received
100
+ #
101
+ def self.incr(namespace, key, increment_by = 1, tags = {}, additional_data = {})
102
+ incr_data = tags || {}
103
+ incr_data[key] = increment_by.counter
104
+ send_event(namespace, incr_data, additional_data)
105
+ end
106
+
107
+ # Captures the current value for a metric.
108
+ #
109
+ # Unlike a counter, this value cannot be combined with itself in a meaningful
110
+ # way, so only the last reported value with a certain sampling frequency
111
+ # (normally every 10 seconds) is recorded in DataDog.
112
+ #
113
+ # You can use this method to capture metrics that change over time, like
114
+ # amount of memory used. Usually, this sampling occurs at a regular frequency
115
+ # via a timer.
116
+ #
117
+ # PdMetrics.gauge('ruby', 'live_objects', ObjectSpace.live_objects)
118
+ #
119
+ # The following line will be printed in SumoLogic for each call to gauge.
120
+ #
121
+ # ruby #live_objects=30873|
122
+ #
123
+ # Additionally, the following metric will be available in DataDog.
124
+ #
125
+ # ruby.live_objects
126
+ #
127
+ def self.gauge(namespace, key, value, tags = {}, additional_data = {})
128
+ gauge_data = tags || {}
129
+ gauge_data[key] = value.gauge
130
+ send_event(namespace, gauge_data, additional_data)
131
+ end
132
+
133
+ # Captures statistical metrics for a set of values within a given timeframe.
134
+ # This is very similar to the time method, but it's genericized for use in
135
+ # arbitrary values.
136
+ #
137
+ # An example usage would be calculating the size of JSON payloads received by
138
+ # an API. You could use a counter, but that wouldn't tease out what the
139
+ # average and median payload sizes are.
140
+ #
141
+ # PdMetrics.histogram('api', 'payload_size', payload.size, account: 'Netflix')
142
+ #
143
+ # The following line will be printed in SumoLogic for every payload.
144
+ #
145
+ # api #account=Netflix|#payload_size=1234
146
+ # api #account=Netflix|#payload_size=0
147
+ #
148
+ # Additionally, DataDog will have the following metrics available. Note,
149
+ # these metrics are captured every 10 seconds, so they likely represent
150
+ # multiple requests within that time window.
151
+ #
152
+ # api.payload_size.count
153
+ # api.payload_size.avg
154
+ # api.payload_size.median
155
+ # api.payload_size.max
156
+ # api.payload_size.95percentile
157
+ #
158
+ def self.histogram(namespace, key, value, tags = {}, additional_data = {})
159
+ histogram_data = tags || {}
160
+ histogram_data[key] = value.histogram
161
+ send_event(namespace, histogram_data, additional_data)
162
+ end
163
+
164
+ class NumericMetric
165
+ attr_reader :value
166
+
167
+ def initialize(value)
168
+ @value = value
169
+ end
170
+
171
+ def to_s
172
+ @value.to_s
173
+ end
174
+ end
175
+
176
+ class Gauge < NumericMetric
177
+ def statsd_format
178
+ "#{value}|g"
179
+ end
180
+ end
181
+
182
+ class Counter < NumericMetric
183
+ def statsd_format
184
+ "#{value}|c"
185
+ end
186
+ end
187
+
188
+ class Histogram < NumericMetric
189
+ def statsd_format
190
+ "#{value}|h"
191
+ end
192
+ end
193
+
194
+ private
195
+
196
+ def self.send_datadog_format(namespace, metrics_and_tags)
197
+ metrics, tag_pairs = metrics_and_tags.partition {|key, value| value.is_a?(NumericMetric) }
198
+ tags = tag_pairs.map {|d| "#{d[0]}:#{d[1]}" }.join(',')
199
+ metrics.each do |name, value|
200
+ msg = "#{namespace}.#{name}:#{value.statsd_format}"
201
+ msg += "|##{tags}" unless tags.empty?
202
+ msg += "\n"
203
+ write_to_socket(msg)
204
+ end
205
+ end
206
+
207
+ def self.send_sumologic_format(namespace, metrics_and_tags, additional_data)
208
+ sumologic_data = metrics_and_tags.merge(additional_data).sort {|a,b| a[0].to_s <=> b[0].to_s }
209
+ msg = "#{namespace} #{sumologic_data.map {|e| "##{e[0]}=#{e[1]}|" }.join}\n"
210
+ write_to_socket(msg)
211
+ end
212
+
213
+ def self.write_to_socket(msg)
214
+ @delivery_socket ||= Socket.new(Socket::PF_INET, Socket::SOCK_DGRAM)
215
+ @destination_addr ||= Socket.pack_sockaddr_in(settings[:port], settings[:host])
216
+ @delivery_socket.send(msg, 0, @destination_addr)
217
+ rescue => e
218
+ logger.warn { "Ignoring PdMetrics exception: #{e.class} #{e.message}" }
219
+ end
220
+
221
+ module NumericExtensions
222
+ def gauge
223
+ PdMetrics::Gauge.new(self)
224
+ end
225
+
226
+ def counter
227
+ PdMetrics::Counter.new(self)
228
+ end
229
+
230
+ def histogram
231
+ PdMetrics::Histogram.new(self)
232
+ end
233
+ end
234
+
235
+ Numeric.send(:include, NumericExtensions)
236
+ end
@@ -0,0 +1,3 @@
1
+ class PdMetrics
2
+ VERSION = "1.0.0"
3
+ end
@@ -0,0 +1,25 @@
1
+ # -*- encoding: utf-8 -*-
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'pd_metrics/version'
5
+
6
+ Gem::Specification.new do |gem|
7
+ gem.name = "pd_metrics"
8
+ gem.version = PdMetrics::VERSION
9
+ gem.authors = ["Doug Barth"]
10
+ gem.email = ["doug@pagerduty.com"]
11
+ gem.description = %q{Library to send metrics to Logstash, which then delivers them to PagerDuty's metric systems}
12
+ gem.summary = %q{PagerDuty's metrics integration library}
13
+ gem.homepage = ""
14
+
15
+ gem.files = `git ls-files`.split($/)
16
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
17
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
18
+ gem.require_paths = ["lib"]
19
+
20
+ gem.add_dependency 'activesupport', '~> 3.2'
21
+
22
+ gem.add_development_dependency 'timecop'
23
+ gem.add_development_dependency 'mocha'
24
+ gem.add_development_dependency 'minitest'
25
+ end
@@ -0,0 +1,102 @@
1
+ require 'test_helper'
2
+ require 'pd_metrics'
3
+
4
+ class PdMetrics
5
+ cattr_accessor :writes
6
+ self.writes = []
7
+
8
+ @original_write_to_socket = method(:write_to_socket)
9
+ def self.write_to_socket(msg)
10
+ writes << msg
11
+ @original_write_to_socket.call(msg)
12
+ end
13
+ end
14
+
15
+ class PdMetricsTest < ActiveSupport::TestCase
16
+ def setup
17
+ PdMetrics.writes = []
18
+ end
19
+
20
+ def teardown
21
+ Timecop.return
22
+ end
23
+
24
+ test "writes events in the SumoLogic format" do
25
+ PdMetrics.send_event('api', call: 1.counter, account: 'foo', wait_delta: (0.01).histogram)
26
+ assert_written_to_socket "api #account=foo|#call=1|#wait_delta=0.01|\n"
27
+ end
28
+
29
+ test "writes events in DataDog format" do
30
+ PdMetrics.send_event('api', call: 1.counter, account: 'foo', wait_delta: (0.01).histogram)
31
+ assert_written_to_socket "api.call:1|c|#account:foo\n"
32
+ assert_written_to_socket "api.wait_delta:0.01|h|#account:foo\n"
33
+ end
34
+
35
+ test "handles events with no DataDog compatible data" do
36
+ PdMetrics.send_event('api', account: 'foo', class: 'Blah')
37
+ assert_written_to_socket "api #account=foo|#class=Blah|\n"
38
+ end
39
+
40
+ test "handles multiple non-DataDog event data pairs in DataDog messages" do
41
+ PdMetrics.send_event('api', call: 1.counter, account: 'foo', class: 'Blah')
42
+ assert_written_to_socket "api.call:1|c|#account:foo,class:Blah\n"
43
+ end
44
+
45
+ test "ignores exceptions when writing to socket" do
46
+ Socket.any_instance.expects(:send).at_least_once.raises(RuntimeError, 'KABOOM')
47
+ PdMetrics.send_event('api', call: 1.counter, account: 'foo', class: 'Blah')
48
+ end
49
+
50
+ test "sends additional_data only to SumoLogic" do
51
+ PdMetrics.send_event('api', {call: 1.counter, account: 'foo'}, txn_id: 'abc123')
52
+ assert_written_to_socket "api.call:1|c|#account:foo\n"
53
+ assert_written_to_socket "api #account=foo|#call=1|#txn_id=abc123|\n"
54
+ end
55
+
56
+ test "time captures latency of a block" do
57
+ Timecop.freeze do
58
+ PdMetrics.time('api', 'create_event', account: 'foo') do
59
+ Timecop.freeze(2)
60
+ end
61
+ end
62
+ assert_written_to_socket "api #account=foo|#create_event=2.0|#failed=false|\n"
63
+ assert_written_to_socket "api.create_event:2.0|h|#account:foo,failed:false\n"
64
+ end
65
+
66
+ class ExpectedError < RuntimeError; end
67
+
68
+ test "time captures failures if exception is thrown" do
69
+ assert_raise(ExpectedError) do
70
+ Timecop.freeze do
71
+ PdMetrics.time('api', 'create_event', account: 'foo') do
72
+ Timecop.freeze(2)
73
+ raise ExpectedError
74
+ end
75
+ end
76
+ end
77
+ assert_written_to_socket "api #account=foo|#create_event=2.0|#failed=true|\n"
78
+ assert_written_to_socket "api.create_event:2.0|h|#account:foo,failed:true\n"
79
+ end
80
+
81
+ test "incr is a convenience method for counters" do
82
+ PdMetrics.incr('api', 'requests')
83
+ assert_written_to_socket "api #requests=1|\n"
84
+ assert_written_to_socket "api.requests:1|c\n"
85
+ end
86
+
87
+ test "gauge is a convenience method for gauges" do
88
+ PdMetrics.gauge('ruby', 'live_objects', 30873)
89
+ assert_written_to_socket "ruby #live_objects=30873|\n"
90
+ assert_written_to_socket "ruby.live_objects:30873|g\n"
91
+ end
92
+
93
+ test "histogram is a convenience method for statical metrics for a set of data" do
94
+ PdMetrics.histogram('api', 'payload_size', 1234, account: "Netflix")
95
+ assert_written_to_socket "api #account=Netflix|#payload_size=1234|\n"
96
+ assert_written_to_socket "api.payload_size:1234|h|#account:Netflix\n"
97
+ end
98
+
99
+ def assert_written_to_socket(msg)
100
+ assert_include PdMetrics.writes, msg
101
+ end
102
+ end
@@ -0,0 +1,11 @@
1
+ require 'rubygems'
2
+ require 'bundler/setup'
3
+
4
+ require 'minitest/autorun'
5
+
6
+ require 'mocha/setup'
7
+ Mocha::Deprecation.mode = :disabled
8
+
9
+ require 'active_support/test_case'
10
+
11
+ require 'timecop'
metadata ADDED
@@ -0,0 +1,122 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: pd_metrics
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Doug Barth
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2013-01-11 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: activesupport
16
+ requirement: !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ~>
20
+ - !ruby/object:Gem::Version
21
+ version: '3.2'
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ~>
28
+ - !ruby/object:Gem::Version
29
+ version: '3.2'
30
+ - !ruby/object:Gem::Dependency
31
+ name: timecop
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ! '>='
36
+ - !ruby/object:Gem::Version
37
+ version: '0'
38
+ type: :development
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: '0'
46
+ - !ruby/object:Gem::Dependency
47
+ name: mocha
48
+ requirement: !ruby/object:Gem::Requirement
49
+ none: false
50
+ requirements:
51
+ - - ! '>='
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :development
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ! '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ - !ruby/object:Gem::Dependency
63
+ name: minitest
64
+ requirement: !ruby/object:Gem::Requirement
65
+ none: false
66
+ requirements:
67
+ - - ! '>='
68
+ - !ruby/object:Gem::Version
69
+ version: '0'
70
+ type: :development
71
+ prerelease: false
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
74
+ requirements:
75
+ - - ! '>='
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
78
+ description: Library to send metrics to Logstash, which then delivers them to PagerDuty's
79
+ metric systems
80
+ email:
81
+ - doug@pagerduty.com
82
+ executables: []
83
+ extensions: []
84
+ extra_rdoc_files: []
85
+ files:
86
+ - .gitignore
87
+ - Gemfile
88
+ - README.md
89
+ - Rakefile
90
+ - lib/pd_metrics.rb
91
+ - lib/pd_metrics/version.rb
92
+ - pd_metrics.gemspec
93
+ - test/pd_metrics_test.rb
94
+ - test/test_helper.rb
95
+ homepage: ''
96
+ licenses: []
97
+ post_install_message:
98
+ rdoc_options: []
99
+ require_paths:
100
+ - lib
101
+ required_ruby_version: !ruby/object:Gem::Requirement
102
+ none: false
103
+ requirements:
104
+ - - ! '>='
105
+ - !ruby/object:Gem::Version
106
+ version: '0'
107
+ required_rubygems_version: !ruby/object:Gem::Requirement
108
+ none: false
109
+ requirements:
110
+ - - ! '>='
111
+ - !ruby/object:Gem::Version
112
+ version: '0'
113
+ requirements: []
114
+ rubyforge_project:
115
+ rubygems_version: 1.8.24
116
+ signing_key:
117
+ specification_version: 3
118
+ summary: PagerDuty's metrics integration library
119
+ test_files:
120
+ - test/pd_metrics_test.rb
121
+ - test/test_helper.rb
122
+ has_rdoc: