logstash-filter-kv 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ YTZmNmRlZDE1NWJmNmViNjYwZDE0NjY5ZmY2Mjk0MWEzNjBhODZkYQ==
5
+ data.tar.gz: !binary |-
6
+ MGY0OTJiMWNlNDQ5ZjJlZmM5YzczODI5ZTEwZjMwZjBiNGY5MjZkNA==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ NzgxZTMxNzIzNzk1OTc4NGFhYTQ1MTVmYzJjZmE4MzMyMGFmMGVhYjk2M2Rm
10
+ MGUyMjM4NGM1MmQ5MWJhNGJjOTQxYmI5MzU0M2JlYjI0MmYxNzMwZTg3Mjdh
11
+ ZWMxNmI0ZjlhMDE0YTY2YTUwM2IyYzY0NzIxYWU5OWY2ZTA3ZmI=
12
+ data.tar.gz: !binary |-
13
+ MDE3MzIzOGQ1ZGJmNmJjNzNjZjgzZjZjYzg0MWRmNTNhMjU0NzZjMTJjY2M4
14
+ MTZiZDRlNTUwYzQ4ODllZGNiZjUzNDRiYWVjODlhNTI5Mzg2ZjNmZjVlZThh
15
+ Y2RiMjNjMjYyMmVlNjRmODE3NDk4NTQyZDI4NWFiNGQxZGY5NWQ=
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,237 @@
1
+ # encoding: utf-8
2
+ require "logstash/filters/base"
3
+ require "logstash/namespace"
4
+
5
+ # This filter helps automatically parse messages (or specific event fields)
6
+ # which are of the 'foo=bar' variety.
7
+ #
8
+ # For example, if you have a log message which contains 'ip=1.2.3.4
9
+ # error=REFUSED', you can parse those automatically by configuring:
10
+ #
11
+ # filter {
12
+ # kv { }
13
+ # }
14
+ #
15
+ # The above will result in a message of "ip=1.2.3.4 error=REFUSED" having
16
+ # the fields:
17
+ #
18
+ # * ip: 1.2.3.4
19
+ # * error: REFUSED
20
+ #
21
+ # This is great for postfix, iptables, and other types of logs that
22
+ # tend towards 'key=value' syntax.
23
+ #
24
+ # You can configure any arbitrary strings to split your data on,
25
+ # in case your data is not structured using '=' signs and whitespace.
26
+ # For example, this filter can also be used to parse query parameters like
27
+ # 'foo=bar&baz=fizz' by setting the `field_split` parameter to "&".
28
+ class LogStash::Filters::KV < LogStash::Filters::Base
29
+ config_name "kv"
30
+ milestone 2
31
+
32
+ # A string of characters to trim from the value. This is useful if your
33
+ # values are wrapped in brackets or are terminated with commas (like postfix
34
+ # logs).
35
+ #
36
+ # These characters form a regex character class and thus you must escape special regex
37
+ # characters like '[' or ']' using '\'.
38
+ #
39
+ # For example, to strip '<', '>', '[', ']' and ',' characters from values:
40
+ #
41
+ # filter {
42
+ # kv {
43
+ # trim => "<>\[\],"
44
+ # }
45
+ # }
46
+ config :trim, :validate => :string
47
+
48
+ # A string of characters to trim from the key. This is useful if your
49
+ # keys are wrapped in brackets or start with space.
50
+ #
51
+ # These characters form a regex character class and thus you must escape special regex
52
+ # characters like '[' or ']' using '\'.
53
+ #
54
+ # For example, to strip '<' '>' '[' ']' and ',' characters from keys:
55
+ #
56
+ # filter {
57
+ # kv {
58
+ # trimkey => "<>\[\],"
59
+ # }
60
+ # }
61
+ config :trimkey, :validate => :string
62
+
63
+ # A string of characters to use as delimiters for parsing out key-value pairs.
64
+ #
65
+ # These characters form a regex character class and thus you must escape special regex
66
+ # characters like '[' or ']' using '\'.
67
+ #
68
+ # #### Example with URL Query Strings
69
+ #
70
+ # For example, to split out the args from a url query string such as
71
+ # '?pin=12345~0&d=123&e=foo@bar.com&oq=bobo&ss=12345':
72
+ #
73
+ # filter {
74
+ # kv {
75
+ # field_split => "&?"
76
+ # }
77
+ # }
78
+ #
79
+ # The above splits on both "&" and "?" characters, giving you the following
80
+ # fields:
81
+ #
82
+ # * pin: 12345~0
83
+ # * d: 123
84
+ # * e: foo@bar.com
85
+ # * oq: bobo
86
+ # * ss: 12345
87
+ config :field_split, :validate => :string, :default => ' '
88
+
89
+
90
+ # A string of characters to use as delimiters for identifying key-value relations.
91
+ #
92
+ # These characters form a regex character class and thus you must escape special regex
93
+ # characters like '[' or ']' using '\'.
94
+ #
95
+ # For example, to identify key-values such as
96
+ # 'key1:value1 key2:value2':
97
+ #
98
+ # filter { kv { value_split => ":" } }
99
+ config :value_split, :validate => :string, :default => '='
100
+
101
+ # A string to prepend to all of the extracted keys.
102
+ #
103
+ # For example, to prepend arg_ to all keys:
104
+ #
105
+ # filter { kv { prefix => "arg_" } }
106
+ config :prefix, :validate => :string, :default => ''
107
+
108
+ # The field to perform 'key=value' searching on
109
+ #
110
+ # For example, to process the `not_the_message` field:
111
+ #
112
+ # filter { kv { source => "not_the_message" } }
113
+ config :source, :validate => :string, :default => "message"
114
+
115
+ # The name of the container to put all of the key-value pairs into.
116
+ #
117
+ # If this setting is omitted, fields will be written to the root of the
118
+ # event, as individual fields.
119
+ #
120
+ # For example, to place all keys into the event field kv:
121
+ #
122
+ # filter { kv { target => "kv" } }
123
+ config :target, :validate => :string
124
+
125
+ # An array specifying the parsed keys which should be added to the event.
126
+ # By default all keys will be added.
127
+ #
128
+ # For example, consider a source like "Hey, from=<abc>, to=def foo=bar".
129
+ # To include "from" and "to", but exclude the "foo" key, you could use this configuration:
130
+ # filter {
131
+ # kv {
132
+ # include_keys => [ "from", "to" ]
133
+ # }
134
+ # }
135
+ config :include_keys, :validate => :array, :default => []
136
+
137
+ # An array specifying the parsed keys which should not be added to the event.
138
+ # By default no keys will be excluded.
139
+ #
140
+ # For example, consider a source like "Hey, from=<abc>, to=def foo=bar".
141
+ # To exclude "from" and "to", but retain the "foo" key, you could use this configuration:
142
+ # filter {
143
+ # kv {
144
+ # exclude_keys => [ "from", "to" ]
145
+ # }
146
+ # }
147
+ config :exclude_keys, :validate => :array, :default => []
148
+
149
+ # A hash specifying the default keys and their values which should be added to the event
150
+ # in case these keys do not exist in the source field being parsed.
151
+ #
152
+ # filter {
153
+ # kv {
154
+ # default_keys => [ "from", "logstash@example.com",
155
+ # "to", "default@dev.null" ]
156
+ # }
157
+ # }
158
+ config :default_keys, :validate => :hash, :default => {}
159
+
160
+ def register
161
+ @trim_re = Regexp.new("[#{@trim}]") if !@trim.nil?
162
+ @trimkey_re = Regexp.new("[#{@trimkey}]") if !@trimkey.nil?
163
+ @scan_re = Regexp.new("((?:\\\\ |[^"+@field_split+@value_split+"])+)["+@value_split+"](?:\"([^\"]+)\"|'([^']+)'|((?:\\\\ |[^"+@field_split+"])+))")
164
+ end # def register
165
+
166
+ def filter(event)
167
+ return unless filter?(event)
168
+
169
+ kv = Hash.new
170
+
171
+ value = event[@source]
172
+
173
+ case value
174
+ when nil; # Nothing to do
175
+ when String; kv = parse(value, event, kv)
176
+ when Array; value.each { |v| kv = parse(v, event, kv) }
177
+ else
178
+ @logger.warn("kv filter has no support for this type of data",
179
+ :type => value.class, :value => value)
180
+ end # case value
181
+
182
+ # Add default key-values for missing keys
183
+ kv = @default_keys.merge(kv)
184
+
185
+ # If we have any keys, create/append the hash
186
+ if kv.length > 0
187
+ if @target.nil?
188
+ # Default is to write to the root of the event.
189
+ dest = event.to_hash
190
+ else
191
+ if !event[@target].is_a?(Hash)
192
+ @logger.debug("Overwriting existing target field", :target => @target)
193
+ dest = event[@target] = {}
194
+ else
195
+ dest = event[@target]
196
+ end
197
+ end
198
+
199
+ dest.merge!(kv)
200
+ filter_matched(event)
201
+ end
202
+ end # def filter
203
+
204
+ private
205
+ def parse(text, event, kv_keys)
206
+ if !event =~ /[@field_split]/
207
+ return kv_keys
208
+ end
209
+
210
+ # Interpret dynamic keys for @include_keys and @exclude_keys
211
+ include_keys = @include_keys.map{|key| event.sprintf(key)}
212
+ exclude_keys = @exclude_keys.map{|key| event.sprintf(key)}
213
+
214
+ text.scan(@scan_re) do |key, v1, v2, v3|
215
+ value = v1 || v2 || v3
216
+ key = @trimkey.nil? ? key : key.gsub(@trimkey_re, "")
217
+
218
+ # Bail out as per the values of include_keys and exclude_keys
219
+ next if not include_keys.empty? and not include_keys.include?(key)
220
+ next if exclude_keys.include?(key)
221
+
222
+ key = event.sprintf(@prefix) + key
223
+
224
+ value = @trim.nil? ? value : value.gsub(@trim_re, "")
225
+ if kv_keys.has_key?(key)
226
+ if kv_keys[key].is_a? Array
227
+ kv_keys[key].push(value)
228
+ else
229
+ kv_keys[key] = [kv_keys[key], value]
230
+ end
231
+ else
232
+ kv_keys[key] = value
233
+ end
234
+ end
235
+ return kv_keys
236
+ end
237
+ end # class LogStash::Filters::KV
@@ -0,0 +1,26 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-filter-kv'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "This filter helps automatically parse messages (or specific event fields) which are of the 'foo=bar' variety."
7
+ s.description = "This filter helps automatically parse messages (or specific event fields) which are of the 'foo=bar' variety."
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "filter" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ end
26
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,436 @@
1
+ require "spec_helper"
2
+ require "logstash/filters/kv"
3
+
4
+ describe LogStash::Filters::KV do
5
+
6
+ describe "defaults" do
7
+ # The logstash config goes here.
8
+ # At this time, only filters are supported.
9
+ config <<-CONFIG
10
+ filter {
11
+ kv { }
12
+ }
13
+ CONFIG
14
+
15
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
16
+ insist { subject["hello"] } == "world"
17
+ insist { subject["foo"] } == "bar"
18
+ insist { subject["baz"] } == "fizz"
19
+ insist { subject["doublequoted"] } == "hello world"
20
+ insist { subject["singlequoted"] } == "hello world"
21
+ end
22
+
23
+ end
24
+
25
+ describe "LOGSTASH-624: allow escaped space in key or value " do
26
+ config <<-CONFIG
27
+ filter {
28
+ kv { value_split => ':' }
29
+ }
30
+ CONFIG
31
+
32
+ sample 'IKE:=Quick\ Mode\ completion IKE\ IDs:=subnet:\ x.x.x.x\ (mask=\ 255.255.255.254)\ and\ host:\ y.y.y.y' do
33
+ insist { subject["IKE"] } == '=Quick\ Mode\ completion'
34
+ insist { subject['IKE\ IDs'] } == '=subnet:\ x.x.x.x\ (mask=\ 255.255.255.254)\ and\ host:\ y.y.y.y'
35
+ end
36
+ end
37
+
38
+ describe "test value_split" do
39
+ config <<-CONFIG
40
+ filter {
41
+ kv { value_split => ':' }
42
+ }
43
+ CONFIG
44
+
45
+ sample "hello:=world foo:bar baz=:fizz doublequoted:\"hello world\" singlequoted:'hello world'" do
46
+ insist { subject["hello"] } == "=world"
47
+ insist { subject["foo"] } == "bar"
48
+ insist { subject["baz="] } == "fizz"
49
+ insist { subject["doublequoted"] } == "hello world"
50
+ insist { subject["singlequoted"] } == "hello world"
51
+ end
52
+
53
+ end
54
+
55
+ describe "test field_split" do
56
+ config <<-CONFIG
57
+ filter {
58
+ kv { field_split => '?&' }
59
+ }
60
+ CONFIG
61
+
62
+ sample "?hello=world&foo=bar&baz=fizz&doublequoted=\"hello world\"&singlequoted='hello world'&ignoreme&foo12=bar12" do
63
+ insist { subject["hello"] } == "world"
64
+ insist { subject["foo"] } == "bar"
65
+ insist { subject["baz"] } == "fizz"
66
+ insist { subject["doublequoted"] } == "hello world"
67
+ insist { subject["singlequoted"] } == "hello world"
68
+ insist { subject["foo12"] } == "bar12"
69
+ end
70
+
71
+ end
72
+
73
+ describe "delimited fields should override space default (reported by LOGSTASH-733)" do
74
+ config <<-CONFIG
75
+ filter {
76
+ kv { field_split => "|" }
77
+ }
78
+ CONFIG
79
+
80
+ sample "field1=test|field2=another test|field3=test3" do
81
+ insist { subject["field1"] } == "test"
82
+ insist { subject["field2"] } == "another test"
83
+ insist { subject["field3"] } == "test3"
84
+ end
85
+ end
86
+
87
+ describe "test prefix" do
88
+ config <<-CONFIG
89
+ filter {
90
+ kv { prefix => '__' }
91
+ }
92
+ CONFIG
93
+
94
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
95
+ insist { subject["__hello"] } == "world"
96
+ insist { subject["__foo"] } == "bar"
97
+ insist { subject["__baz"] } == "fizz"
98
+ insist { subject["__doublequoted"] } == "hello world"
99
+ insist { subject["__singlequoted"] } == "hello world"
100
+ end
101
+
102
+ end
103
+
104
+ describe "speed test", :performance => true do
105
+ count = 10000 + rand(3000)
106
+ config <<-CONFIG
107
+ input {
108
+ generator {
109
+ count => #{count}
110
+ type => foo
111
+ message => "hello=world bar='baz fizzle'"
112
+ }
113
+ }
114
+
115
+ filter {
116
+ kv { }
117
+ }
118
+
119
+ output {
120
+ null { }
121
+ }
122
+ CONFIG
123
+
124
+ start = Time.now
125
+ agent do
126
+ duration = (Time.now - start)
127
+ puts "filters/kv rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
128
+ end
129
+ end
130
+
131
+ describe "add_tag" do
132
+ context "should activate when successful" do
133
+ config <<-CONFIG
134
+ filter {
135
+ kv { add_tag => "hello" }
136
+ }
137
+ CONFIG
138
+
139
+ sample "hello=world" do
140
+ insist { subject["hello"] } == "world"
141
+ insist { subject["tags"] }.include?("hello")
142
+ end
143
+ end
144
+ context "should not activate when failing" do
145
+ config <<-CONFIG
146
+ filter {
147
+ kv { add_tag => "hello" }
148
+ }
149
+ CONFIG
150
+
151
+ sample "this is not key value" do
152
+ insist { subject["tags"] }.nil?
153
+ end
154
+ end
155
+ end
156
+
157
+ describe "add_field" do
158
+ context "should activate when successful" do
159
+ config <<-CONFIG
160
+ filter {
161
+ kv { add_field => [ "whoa", "fancypants" ] }
162
+ }
163
+ CONFIG
164
+
165
+ sample "hello=world" do
166
+ insist { subject["hello"] } == "world"
167
+ insist { subject["whoa"] } == "fancypants"
168
+ end
169
+ end
170
+
171
+ context "should not activate when failing" do
172
+ config <<-CONFIG
173
+ filter {
174
+ kv { add_tag => "hello" }
175
+ }
176
+ CONFIG
177
+
178
+ sample "this is not key value" do
179
+ reject { subject["whoa"] } == "fancypants"
180
+ end
181
+ end
182
+ end
183
+
184
+ #New tests
185
+ describe "test target" do
186
+ config <<-CONFIG
187
+ filter {
188
+ kv { target => 'kv' }
189
+ }
190
+ CONFIG
191
+
192
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
193
+ insist { subject["kv"]["hello"] } == "world"
194
+ insist { subject["kv"]["foo"] } == "bar"
195
+ insist { subject["kv"]["baz"] } == "fizz"
196
+ insist { subject["kv"]["doublequoted"] } == "hello world"
197
+ insist { subject["kv"]["singlequoted"] } == "hello world"
198
+ insist {subject["kv"].count } == 5
199
+ end
200
+
201
+ end
202
+
203
+ describe "test empty target" do
204
+ config <<-CONFIG
205
+ filter {
206
+ kv { target => 'kv' }
207
+ }
208
+ CONFIG
209
+
210
+ sample "hello:world:foo:bar:baz:fizz" do
211
+ insist { subject["kv"] } == nil
212
+ end
213
+ end
214
+
215
+
216
+ describe "test data from specific sub source" do
217
+ config <<-CONFIG
218
+ filter {
219
+ kv {
220
+ source => "data"
221
+ }
222
+ }
223
+ CONFIG
224
+ sample("data" => "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'") do
225
+ insist { subject["hello"] } == "world"
226
+ insist { subject["foo"] } == "bar"
227
+ insist { subject["baz"] } == "fizz"
228
+ insist { subject["doublequoted"] } == "hello world"
229
+ insist { subject["singlequoted"] } == "hello world"
230
+ end
231
+ end
232
+
233
+ describe "test data from specific top source" do
234
+ config <<-CONFIG
235
+ filter {
236
+ kv {
237
+ source => "@data"
238
+ }
239
+ }
240
+ CONFIG
241
+ sample({"@data" => "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'"}) do
242
+ insist { subject["hello"] } == "world"
243
+ insist { subject["foo"] } == "bar"
244
+ insist { subject["baz"] } == "fizz"
245
+ insist { subject["doublequoted"] } == "hello world"
246
+ insist { subject["singlequoted"] } == "hello world"
247
+ end
248
+ end
249
+
250
+
251
+ describe "test data from specific sub source and target" do
252
+ config <<-CONFIG
253
+ filter {
254
+ kv {
255
+ source => "data"
256
+ target => "kv"
257
+ }
258
+ }
259
+ CONFIG
260
+ sample("data" => "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'") do
261
+ insist { subject["kv"]["hello"] } == "world"
262
+ insist { subject["kv"]["foo"] } == "bar"
263
+ insist { subject["kv"]["baz"] } == "fizz"
264
+ insist { subject["kv"]["doublequoted"] } == "hello world"
265
+ insist { subject["kv"]["singlequoted"] } == "hello world"
266
+ insist { subject["kv"].count } == 5
267
+ end
268
+ end
269
+
270
+ describe "test data from nil sub source, should not issue a warning" do
271
+ config <<-CONFIG
272
+ filter {
273
+ kv {
274
+ source => "non-exisiting-field"
275
+ target => "kv"
276
+ }
277
+ }
278
+ CONFIG
279
+ sample "" do
280
+ insist { subject["non-exisiting-field"] } == nil
281
+ insist { subject["kv"] } == nil
282
+ end
283
+ end
284
+
285
+ describe "test include_keys" do
286
+ config <<-CONFIG
287
+ filter {
288
+ kv {
289
+ include_keys => [ "foo", "singlequoted" ]
290
+ }
291
+ }
292
+ CONFIG
293
+
294
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
295
+ insist { subject["foo"] } == "bar"
296
+ insist { subject["singlequoted"] } == "hello world"
297
+ end
298
+ end
299
+
300
+ describe "test exclude_keys" do
301
+ config <<-CONFIG
302
+ filter {
303
+ kv {
304
+ exclude_keys => [ "foo", "singlequoted" ]
305
+ }
306
+ }
307
+ CONFIG
308
+
309
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
310
+ insist { subject["hello"] } == "world"
311
+ insist { subject["baz"] } == "fizz"
312
+ insist { subject["doublequoted"] } == "hello world"
313
+ end
314
+ end
315
+
316
+ describe "test include_keys with prefix" do
317
+ config <<-CONFIG
318
+ filter {
319
+ kv {
320
+ include_keys => [ "foo", "singlequoted" ]
321
+ prefix => "__"
322
+ }
323
+ }
324
+ CONFIG
325
+
326
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
327
+ insist { subject["__foo"] } == "bar"
328
+ insist { subject["__singlequoted"] } == "hello world"
329
+ end
330
+ end
331
+
332
+ describe "test exclude_keys with prefix" do
333
+ config <<-CONFIG
334
+ filter {
335
+ kv {
336
+ exclude_keys => [ "foo", "singlequoted" ]
337
+ prefix => "__"
338
+ }
339
+ }
340
+ CONFIG
341
+
342
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
343
+ insist { subject["__hello"] } == "world"
344
+ insist { subject["__baz"] } == "fizz"
345
+ insist { subject["__doublequoted"] } == "hello world"
346
+ end
347
+ end
348
+
349
+ describe "test include_keys with dynamic key" do
350
+ config <<-CONFIG
351
+ filter {
352
+ kv {
353
+ source => "data"
354
+ include_keys => [ "%{key}"]
355
+ }
356
+ }
357
+ CONFIG
358
+
359
+ sample({"data" => "foo=bar baz=fizz", "key" => "foo"}) do
360
+ insist { subject["foo"] } == "bar"
361
+ insist { subject["baz"] } == nil
362
+ end
363
+ end
364
+
365
+ describe "test exclude_keys with dynamic key" do
366
+ config <<-CONFIG
367
+ filter {
368
+ kv {
369
+ source => "data"
370
+ exclude_keys => [ "%{key}"]
371
+ }
372
+ }
373
+ CONFIG
374
+
375
+ sample({"data" => "foo=bar baz=fizz", "key" => "foo"}) do
376
+ insist { subject["foo"] } == nil
377
+ insist { subject["baz"] } == "fizz"
378
+ end
379
+ end
380
+
381
+ describe "test include_keys and exclude_keys" do
382
+ config <<-CONFIG
383
+ filter {
384
+ kv {
385
+ # This should exclude everything as a result of both settings.
386
+ include_keys => [ "foo", "singlequoted" ]
387
+ exclude_keys => [ "foo", "singlequoted" ]
388
+ }
389
+ }
390
+ CONFIG
391
+
392
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
393
+ %w(hello foo baz doublequoted singlequoted).each do |field|
394
+ reject { subject }.include?(field)
395
+ end
396
+ end
397
+ end
398
+
399
+ describe "test default_keys" do
400
+ config <<-CONFIG
401
+ filter {
402
+ kv {
403
+ default_keys => [ "foo", "xxx",
404
+ "goo", "yyy" ]
405
+ }
406
+ }
407
+ CONFIG
408
+
409
+ sample "hello=world foo=bar baz=fizz doublequoted=\"hello world\" singlequoted='hello world'" do
410
+ insist { subject["hello"] } == "world"
411
+ insist { subject["foo"] } == "bar"
412
+ insist { subject["goo"] } == "yyy"
413
+ insist { subject["baz"] } == "fizz"
414
+ insist { subject["doublequoted"] } == "hello world"
415
+ insist { subject["singlequoted"] } == "hello world"
416
+ end
417
+ end
418
+
419
+ describe "overwriting a string field (often the source)" do
420
+ config <<-CONFIG
421
+ filter {
422
+ kv {
423
+ source => "happy"
424
+ target => "happy"
425
+ }
426
+ }
427
+ CONFIG
428
+
429
+ sample("happy" => "foo=bar baz=fizz") do
430
+ insist { subject["[happy][foo]"] } == "bar"
431
+ insist { subject["[happy][baz]"] } == "fizz"
432
+ end
433
+
434
+ end
435
+
436
+ end
metadata ADDED
@@ -0,0 +1,76 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-filter-kv
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-02 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ description: This filter helps automatically parse messages (or specific event fields)
34
+ which are of the 'foo=bar' variety.
35
+ email: richard.pijnenburg@elasticsearch.com
36
+ executables: []
37
+ extensions: []
38
+ extra_rdoc_files: []
39
+ files:
40
+ - .gitignore
41
+ - Gemfile
42
+ - Rakefile
43
+ - lib/logstash/filters/kv.rb
44
+ - logstash-filter-kv.gemspec
45
+ - rakelib/publish.rake
46
+ - rakelib/vendor.rake
47
+ - spec/filters/kv_spec.rb
48
+ homepage: http://logstash.net/
49
+ licenses:
50
+ - Apache License (2.0)
51
+ metadata:
52
+ logstash_plugin: 'true'
53
+ group: filter
54
+ post_install_message:
55
+ rdoc_options: []
56
+ require_paths:
57
+ - lib
58
+ required_ruby_version: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - ! '>='
61
+ - !ruby/object:Gem::Version
62
+ version: '0'
63
+ required_rubygems_version: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - ! '>='
66
+ - !ruby/object:Gem::Version
67
+ version: '0'
68
+ requirements: []
69
+ rubyforge_project:
70
+ rubygems_version: 2.4.1
71
+ signing_key:
72
+ specification_version: 4
73
+ summary: This filter helps automatically parse messages (or specific event fields)
74
+ which are of the 'foo=bar' variety.
75
+ test_files:
76
+ - spec/filters/kv_spec.rb