logstash-filter-kv 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YTZmNmRlZDE1NWJmNmViNjYwZDE0NjY5ZmY2Mjk0MWEzNjBhODZkYQ==
5
- data.tar.gz: !binary |-
6
- MGY0OTJiMWNlNDQ5ZjJlZmM5YzczODI5ZTEwZjMwZjBiNGY5MjZkNA==
2
+ SHA1:
3
+ metadata.gz: 6ca5fa85eabb6bf32ae3a03dfb9c26950faef085
4
+ data.tar.gz: 96cfc42fa04f19849cf791b527045785f79ec665
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- NzgxZTMxNzIzNzk1OTc4NGFhYTQ1MTVmYzJjZmE4MzMyMGFmMGVhYjk2M2Rm
10
- MGUyMjM4NGM1MmQ5MWJhNGJjOTQxYmI5MzU0M2JlYjI0MmYxNzMwZTg3Mjdh
11
- ZWMxNmI0ZjlhMDE0YTY2YTUwM2IyYzY0NzIxYWU5OWY2ZTA3ZmI=
12
- data.tar.gz: !binary |-
13
- MDE3MzIzOGQ1ZGJmNmJjNzNjZjgzZjZjYzg0MWRmNTNhMjU0NzZjMTJjY2M4
14
- MTZiZDRlNTUwYzQ4ODllZGNiZjUzNDRiYWVjODlhNTI5Mzg2ZjNmZjVlZThh
15
- Y2RiMjNjMjYyMmVlNjRmODE3NDk4NTQyZDI4NWFiNGQxZGY5NWQ=
6
+ metadata.gz: 708d8e0d40e512a82e700ba0ca2e5df6fa1e464fa315c09d6c2b0cd9db6f41c2ecc5b5a58cfb97ddcec7bec2f92a312503527546e054e60a4b166e0376e6f20e
7
+ data.tar.gz: e3bbe7e9065e17f1f83b38a928cff43492148f4b7b769c3853340a9182b89ea014d832d3780cb5da72379bca79c72d39706652e4bf629131dae8583759871f05
data/Gemfile CHANGED
@@ -1,3 +1,3 @@
1
- source 'http://rubygems.org'
2
- gem 'rake'
3
- gem 'gem_publisher'
1
+ source 'https://rubygems.org'
2
+ gemspec
3
+ gem "logstash", :github => "elasticsearch/logstash", :branch => "1.5"
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/Rakefile CHANGED
@@ -4,3 +4,4 @@ task :default do
4
4
  system("rake -T")
5
5
  end
6
6
 
7
+ require "logstash/devutils/rake"
@@ -3,28 +3,28 @@ require "logstash/filters/base"
3
3
  require "logstash/namespace"
4
4
 
5
5
  # This filter helps automatically parse messages (or specific event fields)
6
- # which are of the 'foo=bar' variety.
7
- #
8
- # For example, if you have a log message which contains 'ip=1.2.3.4
9
- # error=REFUSED', you can parse those automatically by configuring:
6
+ # which are of the `foo=bar` variety.
10
7
  #
8
+ # For example, if you have a log message which contains `ip=1.2.3.4
9
+ # error=REFUSED`, you can parse those automatically by configuring:
10
+ # [source,ruby]
11
11
  # filter {
12
12
  # kv { }
13
13
  # }
14
14
  #
15
- # The above will result in a message of "ip=1.2.3.4 error=REFUSED" having
15
+ # The above will result in a message of `ip=1.2.3.4 error=REFUSED` having
16
16
  # the fields:
17
17
  #
18
- # * ip: 1.2.3.4
19
- # * error: REFUSED
18
+ # * `ip: 1.2.3.4`
19
+ # * `error: REFUSED`
20
20
  #
21
21
  # This is great for postfix, iptables, and other types of logs that
22
- # tend towards 'key=value' syntax.
22
+ # tend towards `key=value` syntax.
23
23
  #
24
24
  # You can configure any arbitrary strings to split your data on,
25
- # in case your data is not structured using '=' signs and whitespace.
25
+ # in case your data is not structured using `=` signs and whitespace.
26
26
  # For example, this filter can also be used to parse query parameters like
27
- # 'foo=bar&baz=fizz' by setting the `field_split` parameter to "&".
27
+ # `foo=bar&baz=fizz` by setting the `field_split` parameter to `&`.
28
28
  class LogStash::Filters::KV < LogStash::Filters::Base
29
29
  config_name "kv"
30
30
  milestone 2
@@ -34,10 +34,10 @@ class LogStash::Filters::KV < LogStash::Filters::Base
34
34
  # logs).
35
35
  #
36
36
  # These characters form a regex character class and thus you must escape special regex
37
- # characters like '[' or ']' using '\'.
38
- #
39
- # For example, to strip '<', '>', '[', ']' and ',' characters from values:
37
+ # characters like `[` or `]` using `\`.
40
38
  #
39
+ # For example, to strip `<`, `>`, `[`, `]` and `,` characters from values:
40
+ # [source,ruby]
41
41
  # filter {
42
42
  # kv {
43
43
  # trim => "<>\[\],"
@@ -49,10 +49,10 @@ class LogStash::Filters::KV < LogStash::Filters::Base
49
49
  # keys are wrapped in brackets or start with space.
50
50
  #
51
51
  # These characters form a regex character class and thus you must escape special regex
52
- # characters like '[' or ']' using '\'.
53
- #
54
- # For example, to strip '<' '>' '[' ']' and ',' characters from keys:
52
+ # characters like `[` or `]` using `\`.
55
53
  #
54
+ # For example, to strip `<` `>` `[` `]` and `,` characters from keys:
55
+ # [source,ruby]
56
56
  # filter {
57
57
  # kv {
58
58
  # trimkey => "<>\[\],"
@@ -63,52 +63,52 @@ class LogStash::Filters::KV < LogStash::Filters::Base
63
63
  # A string of characters to use as delimiters for parsing out key-value pairs.
64
64
  #
65
65
  # These characters form a regex character class and thus you must escape special regex
66
- # characters like '[' or ']' using '\'.
66
+ # characters like `[` or `]` using `\`.
67
67
  #
68
68
  # #### Example with URL Query Strings
69
69
  #
70
70
  # For example, to split out the args from a url query string such as
71
- # '?pin=12345~0&d=123&e=foo@bar.com&oq=bobo&ss=12345':
72
- #
71
+ # `?pin=12345~0&d=123&e=foo@bar.com&oq=bobo&ss=12345`:
72
+ # [source,ruby]
73
73
  # filter {
74
74
  # kv {
75
75
  # field_split => "&?"
76
76
  # }
77
77
  # }
78
78
  #
79
- # The above splits on both "&" and "?" characters, giving you the following
79
+ # The above splits on both `&` and `?` characters, giving you the following
80
80
  # fields:
81
81
  #
82
- # * pin: 12345~0
83
- # * d: 123
84
- # * e: foo@bar.com
85
- # * oq: bobo
86
- # * ss: 12345
82
+ # * `pin: 12345~0`
83
+ # * `d: 123`
84
+ # * `e: foo@bar.com`
85
+ # * `oq: bobo`
86
+ # * `ss: 12345`
87
87
  config :field_split, :validate => :string, :default => ' '
88
88
 
89
89
 
90
90
  # A string of characters to use as delimiters for identifying key-value relations.
91
91
  #
92
92
  # These characters form a regex character class and thus you must escape special regex
93
- # characters like '[' or ']' using '\'.
93
+ # characters like `[` or `]` using `\`.
94
94
  #
95
95
  # For example, to identify key-values such as
96
- # 'key1:value1 key2:value2':
97
- #
96
+ # `key1:value1 key2:value2`:
97
+ # [source,ruby]
98
98
  # filter { kv { value_split => ":" } }
99
99
  config :value_split, :validate => :string, :default => '='
100
100
 
101
101
  # A string to prepend to all of the extracted keys.
102
102
  #
103
103
  # For example, to prepend arg_ to all keys:
104
- #
104
+ # [source,ruby]
105
105
  # filter { kv { prefix => "arg_" } }
106
106
  config :prefix, :validate => :string, :default => ''
107
107
 
108
- # The field to perform 'key=value' searching on
108
+ # The field to perform `key=value` searching on
109
109
  #
110
110
  # For example, to process the `not_the_message` field:
111
- #
111
+ # [source,ruby]
112
112
  # filter { kv { source => "not_the_message" } }
113
113
  config :source, :validate => :string, :default => "message"
114
114
 
@@ -118,15 +118,16 @@ class LogStash::Filters::KV < LogStash::Filters::Base
118
118
  # event, as individual fields.
119
119
  #
120
120
  # For example, to place all keys into the event field kv:
121
- #
121
+ # [source,ruby]
122
122
  # filter { kv { target => "kv" } }
123
123
  config :target, :validate => :string
124
124
 
125
125
  # An array specifying the parsed keys which should be added to the event.
126
126
  # By default all keys will be added.
127
127
  #
128
- # For example, consider a source like "Hey, from=<abc>, to=def foo=bar".
129
- # To include "from" and "to", but exclude the "foo" key, you could use this configuration:
128
+ # For example, consider a source like `Hey, from=<abc>, to=def foo=bar`.
129
+ # To include `from` and `to`, but exclude the `foo` key, you could use this configuration:
130
+ # [source,ruby]
130
131
  # filter {
131
132
  # kv {
132
133
  # include_keys => [ "from", "to" ]
@@ -137,8 +138,9 @@ class LogStash::Filters::KV < LogStash::Filters::Base
137
138
  # An array specifying the parsed keys which should not be added to the event.
138
139
  # By default no keys will be excluded.
139
140
  #
140
- # For example, consider a source like "Hey, from=<abc>, to=def foo=bar".
141
- # To exclude "from" and "to", but retain the "foo" key, you could use this configuration:
141
+ # For example, consider a source like `Hey, from=<abc>, to=def foo=bar`.
142
+ # To exclude `from` and `to`, but retain the `foo` key, you could use this configuration:
143
+ # [source,ruby]
142
144
  # filter {
143
145
  # kv {
144
146
  # exclude_keys => [ "from", "to" ]
@@ -148,7 +150,7 @@ class LogStash::Filters::KV < LogStash::Filters::Base
148
150
 
149
151
  # A hash specifying the default keys and their values which should be added to the event
150
152
  # in case these keys do not exist in the source field being parsed.
151
- #
153
+ # [source,ruby]
152
154
  # filter {
153
155
  # kv {
154
156
  # default_keys => [ "from", "logstash@example.com",
@@ -1,13 +1,13 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-filter-kv'
4
- s.version = '0.1.0'
4
+ s.version = '0.1.1'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "This filter helps automatically parse messages (or specific event fields) which are of the 'foo=bar' variety."
7
- s.description = "This filter helps automatically parse messages (or specific event fields) which are of the 'foo=bar' variety."
7
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
8
  s.authors = ["Elasticsearch"]
9
- s.email = 'richard.pijnenburg@elasticsearch.com'
10
- s.homepage = "http://logstash.net/"
9
+ s.email = 'info@elasticsearch.com'
10
+ s.homepage = "http://www.elasticsearch.org/guide/en/logstash/current/index.html"
11
11
  s.require_paths = ["lib"]
12
12
 
13
13
  # Files
@@ -17,10 +17,11 @@ Gem::Specification.new do |s|
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
18
 
19
19
  # Special flag to let us know this is actually a logstash plugin
20
- s.metadata = { "logstash_plugin" => "true", "group" => "filter" }
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
21
21
 
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
24
 
25
+ s.add_development_dependency 'logstash-devutils'
25
26
  end
26
27
 
@@ -1,4 +1,4 @@
1
- require "spec_helper"
1
+ require "logstash/devutils/rspec/spec_helper"
2
2
  require "logstash/filters/kv"
3
3
 
4
4
  describe LogStash::Filters::KV do
metadata CHANGED
@@ -1,76 +1,87 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-kv
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-02 00:00:00.000000000 Z
11
+ date: 2014-11-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash
15
- requirement: !ruby/object:Gem::Requirement
15
+ version_requirements: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ! '>='
17
+ - - '>='
18
18
  - !ruby/object:Gem::Version
19
19
  version: 1.4.0
20
20
  - - <
21
21
  - !ruby/object:Gem::Version
22
22
  version: 2.0.0
23
- type: :runtime
24
- prerelease: false
25
- version_requirements: !ruby/object:Gem::Requirement
23
+ requirement: !ruby/object:Gem::Requirement
26
24
  requirements:
27
- - - ! '>='
25
+ - - '>='
28
26
  - !ruby/object:Gem::Version
29
27
  version: 1.4.0
30
28
  - - <
31
29
  - !ruby/object:Gem::Version
32
30
  version: 2.0.0
33
- description: This filter helps automatically parse messages (or specific event fields)
34
- which are of the 'foo=bar' variety.
35
- email: richard.pijnenburg@elasticsearch.com
31
+ prerelease: false
32
+ type: :runtime
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-devutils
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ requirement: !ruby/object:Gem::Requirement
41
+ requirements:
42
+ - - '>='
43
+ - !ruby/object:Gem::Version
44
+ version: '0'
45
+ prerelease: false
46
+ type: :development
47
+ description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
48
+ email: info@elasticsearch.com
36
49
  executables: []
37
50
  extensions: []
38
51
  extra_rdoc_files: []
39
52
  files:
40
53
  - .gitignore
41
54
  - Gemfile
55
+ - LICENSE
42
56
  - Rakefile
43
57
  - lib/logstash/filters/kv.rb
44
58
  - logstash-filter-kv.gemspec
45
- - rakelib/publish.rake
46
- - rakelib/vendor.rake
47
59
  - spec/filters/kv_spec.rb
48
- homepage: http://logstash.net/
60
+ homepage: http://www.elasticsearch.org/guide/en/logstash/current/index.html
49
61
  licenses:
50
62
  - Apache License (2.0)
51
63
  metadata:
52
64
  logstash_plugin: 'true'
53
- group: filter
54
- post_install_message:
65
+ logstash_group: filter
66
+ post_install_message:
55
67
  rdoc_options: []
56
68
  require_paths:
57
69
  - lib
58
70
  required_ruby_version: !ruby/object:Gem::Requirement
59
71
  requirements:
60
- - - ! '>='
72
+ - - '>='
61
73
  - !ruby/object:Gem::Version
62
74
  version: '0'
63
75
  required_rubygems_version: !ruby/object:Gem::Requirement
64
76
  requirements:
65
- - - ! '>='
77
+ - - '>='
66
78
  - !ruby/object:Gem::Version
67
79
  version: '0'
68
80
  requirements: []
69
- rubyforge_project:
70
- rubygems_version: 2.4.1
71
- signing_key:
81
+ rubyforge_project:
82
+ rubygems_version: 2.4.4
83
+ signing_key:
72
84
  specification_version: 4
73
- summary: This filter helps automatically parse messages (or specific event fields)
74
- which are of the 'foo=bar' variety.
85
+ summary: This filter helps automatically parse messages (or specific event fields) which are of the 'foo=bar' variety.
75
86
  test_files:
76
87
  - spec/filters/kv_spec.rb
data/rakelib/publish.rake DELETED
@@ -1,9 +0,0 @@
1
- require "gem_publisher"
2
-
3
- desc "Publish gem to RubyGems.org"
4
- task :publish_gem do |t|
5
- gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
- gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
- puts "Published #{gem}" if gem
8
- end
9
-
data/rakelib/vendor.rake DELETED
@@ -1,169 +0,0 @@
1
- require "net/http"
2
- require "uri"
3
- require "digest/sha1"
4
-
5
- def vendor(*args)
6
- return File.join("vendor", *args)
7
- end
8
-
9
- directory "vendor/" => ["vendor"] do |task, args|
10
- mkdir task.name
11
- end
12
-
13
- def fetch(url, sha1, output)
14
-
15
- puts "Downloading #{url}"
16
- actual_sha1 = download(url, output)
17
-
18
- if actual_sha1 != sha1
19
- fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
- end
21
- end # def fetch
22
-
23
- def file_fetch(url, sha1)
24
- filename = File.basename( URI(url).path )
25
- output = "vendor/#{filename}"
26
- task output => [ "vendor/" ] do
27
- begin
28
- actual_sha1 = file_sha1(output)
29
- if actual_sha1 != sha1
30
- fetch(url, sha1, output)
31
- end
32
- rescue Errno::ENOENT
33
- fetch(url, sha1, output)
34
- end
35
- end.invoke
36
-
37
- return output
38
- end
39
-
40
- def file_sha1(path)
41
- digest = Digest::SHA1.new
42
- fd = File.new(path, "r")
43
- while true
44
- begin
45
- digest << fd.sysread(16384)
46
- rescue EOFError
47
- break
48
- end
49
- end
50
- return digest.hexdigest
51
- ensure
52
- fd.close if fd
53
- end
54
-
55
- def download(url, output)
56
- uri = URI(url)
57
- digest = Digest::SHA1.new
58
- tmp = "#{output}.tmp"
59
- Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
- request = Net::HTTP::Get.new(uri.path)
61
- http.request(request) do |response|
62
- fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
- size = (response["content-length"].to_i || -1).to_f
64
- count = 0
65
- File.open(tmp, "w") do |fd|
66
- response.read_body do |chunk|
67
- fd.write(chunk)
68
- digest << chunk
69
- if size > 0 && $stdout.tty?
70
- count += chunk.bytesize
71
- $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
- end
73
- end
74
- end
75
- $stdout.write("\r \r") if $stdout.tty?
76
- end
77
- end
78
-
79
- File.rename(tmp, output)
80
-
81
- return digest.hexdigest
82
- rescue SocketError => e
83
- puts "Failure while downloading #{url}: #{e}"
84
- raise
85
- ensure
86
- File.unlink(tmp) if File.exist?(tmp)
87
- end # def download
88
-
89
- def untar(tarball, &block)
90
- require "archive/tar/minitar"
91
- tgz = Zlib::GzipReader.new(File.open(tarball))
92
- # Pull out typesdb
93
- tar = Archive::Tar::Minitar::Input.open(tgz)
94
- tar.each do |entry|
95
- path = block.call(entry)
96
- next if path.nil?
97
- parent = File.dirname(path)
98
-
99
- mkdir_p parent unless File.directory?(parent)
100
-
101
- # Skip this file if the output file is the same size
102
- if entry.directory?
103
- mkdir path unless File.directory?(path)
104
- else
105
- entry_mode = entry.instance_eval { @mode } & 0777
106
- if File.exists?(path)
107
- stat = File.stat(path)
108
- # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
- # expose headers in the entry.
110
- entry_size = entry.instance_eval { @size }
111
- # If file sizes are same, skip writing.
112
- next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
- end
114
- puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
- File.open(path, "w") do |fd|
116
- # eof? check lets us skip empty files. Necessary because the API provided by
117
- # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
- # IO object. Something about empty files in this EntryStream causes
119
- # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
- # TODO(sissel): File a bug about this.
121
- while !entry.eof?
122
- chunk = entry.read(16384)
123
- fd.write(chunk)
124
- end
125
- #IO.copy_stream(entry, fd)
126
- end
127
- File.chmod(entry_mode, path)
128
- end
129
- end
130
- tar.close
131
- File.unlink(tarball) if File.file?(tarball)
132
- end # def untar
133
-
134
- def ungz(file)
135
-
136
- outpath = file.gsub('.gz', '')
137
- tgz = Zlib::GzipReader.new(File.open(file))
138
- begin
139
- File.open(outpath, "w") do |out|
140
- IO::copy_stream(tgz, out)
141
- end
142
- File.unlink(file)
143
- rescue
144
- File.unlink(outpath) if File.file?(outpath)
145
- raise
146
- end
147
- tgz.close
148
- end
149
-
150
- desc "Process any vendor files required for this plugin"
151
- task "vendor" do |task, args|
152
-
153
- @files.each do |file|
154
- download = file_fetch(file['url'], file['sha1'])
155
- if download =~ /.tar.gz/
156
- prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
- untar(download) do |entry|
158
- if !file['files'].nil?
159
- next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
- out = entry.full_name.split("/").last
161
- end
162
- File.join('vendor', out)
163
- end
164
- elsif download =~ /.gz/
165
- ungz(download)
166
- end
167
- end
168
-
169
- end