logstash-output-graphite 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ ZTM5YjJhNGYyNTBkZjZiZmNmZTAyNWZiMWRmNzRjODRlYjZhNmE4OA==
5
+ data.tar.gz: !binary |-
6
+ NTI0YjBlMDY4NmUyNDY3ODEyNDc1ZjM2NzhmMDVmNmRkMzhiZDliYQ==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ ODU3OGUzNDllOGNiMjc5NGRjMDQyNDYyNzlkYTFmOGExNDE2YTkwN2YxZDQ5
10
+ M2MyYjBhMzQ5OThjZTkyMzllNTJiZGEzOTI4ODAzMzZkMjJmNzBlMmJkYzc1
11
+ NDBhODI5ZGNmNTAyZGJmYTkwZThhYWU3Y2M2YTVlNTQ1N2YyMjQ=
12
+ data.tar.gz: !binary |-
13
+ NmUzZWFkZTg5N2M4NDRmNjMwNjBlNmM2OWYzODRmM2Y5N2FlODYxM2RmNGU3
14
+ M2IzOTliYTY5YmIwOGRkMGU5OWViMDUzMTlkMTYyMGU4NGI0NTNkZTI2NTRl
15
+ ZDJlMTVmOGJhNzQ4MzE1YTc2Mzc3OWE5ZWQyYmMwOGUwZDdhNjM=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,146 @@
1
+ # encoding: utf-8
2
+ require "logstash/outputs/base"
3
+ require "logstash/namespace"
4
+ require "socket"
5
+
6
+ # This output allows you to pull metrics from your logs and ship them to
7
+ # Graphite. Graphite is an open source tool for storing and graphing metrics.
8
+ #
9
+ # An example use case: Some applications emit aggregated stats in the logs
10
+ # every 10 seconds. Using the grok filter and this output, it is possible to
11
+ # capture the metric values from the logs and emit them to Graphite.
12
+ class LogStash::Outputs::Graphite < LogStash::Outputs::Base
13
+ config_name "graphite"
14
+ milestone 2
15
+
16
+ EXCLUDE_ALWAYS = [ "@timestamp", "@version" ]
17
+
18
+ DEFAULT_METRICS_FORMAT = "*"
19
+ METRIC_PLACEHOLDER = "*"
20
+
21
+ # The hostname or IP address of the Graphite server.
22
+ config :host, :validate => :string, :default => "localhost"
23
+
24
+ # The port to connect to on the Graphite server.
25
+ config :port, :validate => :number, :default => 2003
26
+
27
+ # Interval between reconnect attempts to Carbon.
28
+ config :reconnect_interval, :validate => :number, :default => 2
29
+
30
+ # Should metrics be resent on failure?
31
+ config :resend_on_failure, :validate => :boolean, :default => false
32
+
33
+ # The metric(s) to use. This supports dynamic strings like %{host}
34
+ # for metric names and also for values. This is a hash field with key
35
+ # being the metric name, value being the metric value. Example:
36
+ #
37
+ # [ "%{host}/uptime", "%{uptime_1m}" ]
38
+ #
39
+ # The value will be coerced to a floating point value. Values which cannot be
40
+ # coerced will be set to zero (0). You may use either `metrics` or `fields_are_metrics`,
41
+ # but not both.
42
+ config :metrics, :validate => :hash, :default => {}
43
+
44
+ # An array indicating that these event fields should be treated as metrics
45
+ # and will be sent verbatim to Graphite. You may use either `fields_are_metrics`
46
+ # or `metrics`, but not both.
47
+ config :fields_are_metrics, :validate => :boolean, :default => false
48
+
49
+ # Include only regex matched metric names.
50
+ config :include_metrics, :validate => :array, :default => [ ".*" ]
51
+
52
+ # Exclude regex matched metric names, by default exclude unresolved %{field} strings.
53
+ config :exclude_metrics, :validate => :array, :default => [ "%\{[^}]+\}" ]
54
+
55
+ # Enable debug output.
56
+ config :debug, :validate => :boolean, :default => false, :deprecated => "This setting was never used by this plugin. It will be removed soon."
57
+
58
+ # Defines the format of the metric string. The placeholder '*' will be
59
+ # replaced with the name of the actual metric.
60
+ #
61
+ # metrics_format => "foo.bar.*.sum"
62
+ #
63
+ # NOTE: If no metrics_format is defined, the name of the metric will be used as fallback.
64
+ config :metrics_format, :validate => :string, :default => DEFAULT_METRICS_FORMAT
65
+
66
+ def register
67
+ @include_metrics.collect!{|regexp| Regexp.new(regexp)}
68
+ @exclude_metrics.collect!{|regexp| Regexp.new(regexp)}
69
+
70
+ if @metrics_format && !@metrics_format.include?(METRIC_PLACEHOLDER)
71
+ @logger.warn("metrics_format does not include placeholder #{METRIC_PLACEHOLDER} .. falling back to default format: #{DEFAULT_METRICS_FORMAT.inspect}")
72
+
73
+ @metrics_format = DEFAULT_METRICS_FORMAT
74
+ end
75
+
76
+ connect
77
+ end # def register
78
+
79
+ def connect
80
+ # TODO(sissel): Test error cases. Catch exceptions. Find fortune and glory. Retire to yak farm.
81
+ begin
82
+ @socket = TCPSocket.new(@host, @port)
83
+ rescue Errno::ECONNREFUSED => e
84
+ @logger.warn("Connection refused to graphite server, sleeping...",
85
+ :host => @host, :port => @port)
86
+ sleep(@reconnect_interval)
87
+ retry
88
+ end
89
+ end # def connect
90
+
91
+ def construct_metric_name(metric)
92
+ if @metrics_format
93
+ return @metrics_format.gsub(METRIC_PLACEHOLDER, metric)
94
+ end
95
+
96
+ metric
97
+ end
98
+
99
+ public
100
+ def receive(event)
101
+ return unless output?(event)
102
+
103
+ # Graphite message format: metric value timestamp\n
104
+
105
+ messages = []
106
+ timestamp = event.sprintf("%{+%s}")
107
+
108
+ if @fields_are_metrics
109
+ @logger.debug("got metrics event", :metrics => event.to_hash)
110
+ event.to_hash.each do |metric,value|
111
+ next if EXCLUDE_ALWAYS.include?(metric)
112
+ next unless @include_metrics.empty? || @include_metrics.any? { |regexp| metric.match(regexp) }
113
+ next if @exclude_metrics.any? {|regexp| metric.match(regexp)}
114
+ messages << "#{construct_metric_name(metric)} #{event.sprintf(value.to_s).to_f} #{timestamp}"
115
+ end
116
+ else
117
+ @metrics.each do |metric, value|
118
+ @logger.debug("processing", :metric => metric, :value => value)
119
+ metric = event.sprintf(metric)
120
+ next unless @include_metrics.any? {|regexp| metric.match(regexp)}
121
+ next if @exclude_metrics.any? {|regexp| metric.match(regexp)}
122
+ messages << "#{construct_metric_name(event.sprintf(metric))} #{event.sprintf(value).to_f} #{timestamp}"
123
+ end
124
+ end
125
+
126
+ if messages.empty?
127
+ @logger.debug("Message is empty, not sending anything to Graphite", :messages => messages, :host => @host, :port => @port)
128
+ else
129
+ message = messages.join("\n")
130
+ @logger.debug("Sending carbon messages", :messages => messages, :host => @host, :port => @port)
131
+
132
+ # Catch exceptions like ECONNRESET and friends, reconnect on failure.
133
+ # TODO(sissel): Test error cases. Catch exceptions. Find fortune and glory.
134
+ begin
135
+ @socket.puts(message)
136
+ rescue Errno::EPIPE, Errno::ECONNRESET => e
137
+ @logger.warn("Connection to graphite server died",
138
+ :exception => e, :host => @host, :port => @port)
139
+ sleep(@reconnect_interval)
140
+ connect
141
+ retry if @resend_on_failure
142
+ end
143
+ end
144
+
145
+ end # def receive
146
+ end # class LogStash::Outputs::Graphite
@@ -0,0 +1,26 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-output-graphite'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "This output allows you to pull metrics from your logs and ship them to Graphite"
7
+ s.description = "This output allows you to pull metrics from your logs and ship them to Graphite"
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ end
26
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,236 @@
1
+ require "spec_helper"
2
+ require "logstash/outputs/graphite"
3
+ require "mocha/api"
4
+
5
+ describe LogStash::Outputs::Graphite, :socket => true do
6
+
7
+
8
+ describe "defaults should include all metrics" do
9
+ port = 4939
10
+ config <<-CONFIG
11
+ input {
12
+ generator {
13
+ message => "foo=fancy bar=42"
14
+ count => 1
15
+ type => "generator"
16
+ }
17
+ }
18
+
19
+ filter {
20
+ kv { }
21
+ }
22
+
23
+ output {
24
+ graphite {
25
+ host => "localhost"
26
+ port => #{port}
27
+ metrics => [ "hurray.%{foo}", "%{bar}" ]
28
+ }
29
+ }
30
+ CONFIG
31
+
32
+ let(:queue) { Queue.new }
33
+ before :each do
34
+ server = TCPServer.new("127.0.0.1", port)
35
+ Thread.new do
36
+ client = server.accept
37
+ p client
38
+ while true
39
+ p :read
40
+ line = client.readline
41
+ p :done
42
+ queue << line
43
+ p line
44
+ end
45
+ end
46
+ end
47
+
48
+ agent do
49
+ lines = queue.pop
50
+
51
+ insist { lines.size } == 1
52
+ insist { lines }.any? { |l| l =~ /^hurray.fancy 42.0 \d{10,}\n$/ }
53
+ end
54
+ end
55
+
56
+ describe "fields_are_metrics => true" do
57
+ describe "metrics_format => ..." do
58
+ describe "match one key" do
59
+ config <<-CONFIG
60
+ input {
61
+ generator {
62
+ message => "foo=123"
63
+ count => 1
64
+ type => "generator"
65
+ }
66
+ }
67
+
68
+ filter {
69
+ kv { }
70
+ }
71
+
72
+ output {
73
+ graphite {
74
+ host => "localhost"
75
+ port => 2003
76
+ fields_are_metrics => true
77
+ include_metrics => ["foo"]
78
+ metrics_format => "foo.bar.sys.data.*"
79
+ debug => true
80
+ }
81
+ }
82
+ CONFIG
83
+
84
+ agent do
85
+ @mock.rewind
86
+ lines = @mock.readlines
87
+ insist { lines.size } == 1
88
+ insist { lines[0] } =~ /^foo.bar.sys.data.foo 123.0 \d{10,}\n$/
89
+ end
90
+ end
91
+
92
+ describe "match all keys" do
93
+ config <<-CONFIG
94
+ input {
95
+ generator {
96
+ message => "foo=123 bar=42"
97
+ count => 1
98
+ type => "generator"
99
+ }
100
+ }
101
+
102
+ filter {
103
+ kv { }
104
+ }
105
+
106
+ output {
107
+ graphite {
108
+ host => "localhost"
109
+ port => 2003
110
+ fields_are_metrics => true
111
+ include_metrics => [".*"]
112
+ metrics_format => "foo.bar.sys.data.*"
113
+ debug => true
114
+ }
115
+ }
116
+ CONFIG
117
+
118
+ agent do
119
+ @mock.rewind
120
+ lines = @mock.readlines.delete_if { |l| l =~ /\.sequence \d+/ }
121
+
122
+ insist { lines.size } == 2
123
+ insist { lines }.any? { |l| l =~ /^foo.bar.sys.data.foo 123.0 \d{10,}\n$/ }
124
+ insist { lines }.any? { |l| l =~ /^foo.bar.sys.data.bar 42.0 \d{10,}\n$/ }
125
+ end
126
+ end
127
+
128
+ describe "no match" do
129
+ config <<-CONFIG
130
+ input {
131
+ generator {
132
+ message => "foo=123 bar=42"
133
+ count => 1
134
+ type => "generator"
135
+ }
136
+ }
137
+
138
+ filter {
139
+ kv { }
140
+ }
141
+
142
+ output {
143
+ graphite {
144
+ host => "localhost"
145
+ port => 2003
146
+ fields_are_metrics => true
147
+ include_metrics => ["notmatchinganything"]
148
+ metrics_format => "foo.bar.sys.data.*"
149
+ debug => true
150
+ }
151
+ }
152
+ CONFIG
153
+
154
+ agent do
155
+ @mock.rewind
156
+ lines = @mock.readlines
157
+ insist { lines.size } == 0
158
+ end
159
+ end
160
+
161
+ describe "match one key with invalid metric_format" do
162
+ config <<-CONFIG
163
+ input {
164
+ generator {
165
+ message => "foo=123"
166
+ count => 1
167
+ type => "generator"
168
+ }
169
+ }
170
+
171
+ filter {
172
+ kv { }
173
+ }
174
+
175
+ output {
176
+ graphite {
177
+ host => "localhost"
178
+ port => 2003
179
+ fields_are_metrics => true
180
+ include_metrics => ["foo"]
181
+ metrics_format => "invalidformat"
182
+ debug => true
183
+ }
184
+ }
185
+ CONFIG
186
+
187
+ agent do
188
+ @mock.rewind
189
+ lines = @mock.readlines
190
+ insist { lines.size } == 1
191
+ insist { lines[0] } =~ /^foo 123.0 \d{10,}\n$/
192
+ end
193
+ end
194
+ end
195
+ end
196
+
197
+ describe "fields are metrics = false" do
198
+ describe "metrics_format not set" do
199
+ describe "match one key with metrics list" do
200
+ config <<-CONFIG
201
+ input {
202
+ generator {
203
+ message => "foo=123"
204
+ count => 1
205
+ type => "generator"
206
+ }
207
+ }
208
+
209
+ filter {
210
+ kv { }
211
+ }
212
+
213
+ output {
214
+ graphite {
215
+ host => "localhost"
216
+ port => 2003
217
+ fields_are_metrics => false
218
+ include_metrics => ["foo"]
219
+ metrics => [ "custom.foo", "%{foo}" ]
220
+ debug => true
221
+ }
222
+ }
223
+ CONFIG
224
+
225
+ agent do
226
+ @mock.rewind
227
+ lines = @mock.readlines
228
+
229
+ insist { lines.size } == 1
230
+ insist { lines[0] } =~ /^custom.foo 123.0 \d{10,}\n$/
231
+ end
232
+ end
233
+
234
+ end
235
+ end
236
+ end
metadata ADDED
@@ -0,0 +1,75 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-graphite
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-03 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ description: This output allows you to pull metrics from your logs and ship them to
34
+ Graphite
35
+ email: richard.pijnenburg@elasticsearch.com
36
+ executables: []
37
+ extensions: []
38
+ extra_rdoc_files: []
39
+ files:
40
+ - .gitignore
41
+ - Gemfile
42
+ - Rakefile
43
+ - lib/logstash/outputs/graphite.rb
44
+ - logstash-output-graphite.gemspec
45
+ - rakelib/publish.rake
46
+ - rakelib/vendor.rake
47
+ - spec/outputs/graphite_spec.rb
48
+ homepage: http://logstash.net/
49
+ licenses:
50
+ - Apache License (2.0)
51
+ metadata:
52
+ logstash_plugin: 'true'
53
+ group: output
54
+ post_install_message:
55
+ rdoc_options: []
56
+ require_paths:
57
+ - lib
58
+ required_ruby_version: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - ! '>='
61
+ - !ruby/object:Gem::Version
62
+ version: '0'
63
+ required_rubygems_version: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - ! '>='
66
+ - !ruby/object:Gem::Version
67
+ version: '0'
68
+ requirements: []
69
+ rubyforge_project:
70
+ rubygems_version: 2.4.1
71
+ signing_key:
72
+ specification_version: 4
73
+ summary: This output allows you to pull metrics from your logs and ship them to Graphite
74
+ test_files:
75
+ - spec/outputs/graphite_spec.rb