logstash-filter-elasticsearch 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YWFiNDI0NzAxYzY1MGI0NWQxZDU2M2E2MjI3NjgyMDY0ZGQwZDk3Zg==
5
- data.tar.gz: !binary |-
6
- MGViMTAzMWFhOWQxNTFjYTU1NmIyNmNiYjg3OTI1MmZhYTkxOTM3Ng==
2
+ SHA1:
3
+ metadata.gz: 6664ba05aa755a5dea1988f48742fb24d932a85f
4
+ data.tar.gz: 98734a64d6560a7c4876ec5300cf8f71d385b343
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- ZmZmODQxNTk5MzM3ZjE2ZTgxZjExMTI3NDQwODRmODlkZGFjZmYxMTFkNGQy
10
- NzMwYzU5YmQ2ZWIzMTFkM2VjMzg5MDA0OGQyM2UwZDIwOGUwYTNjZDI4ZTZl
11
- MThiNTI3MTg3Yzc3ZDYyZTc0MTRlNGViODgyZTkwYjY0NGQyM2E=
12
- data.tar.gz: !binary |-
13
- NzEzMjMxZWI5MjdhZDBiNTVhZGI1OGI0MTcyNjRkNzBiOGQyMTc5MWVjMzZj
14
- ZTI1NTc0OGRiNDg2YTExMjBlZjg5MTY2ZjEyNjA0YTA2ZTBkY2I5N2ZkZmNj
15
- MzdlNGNlMDNjMzA5MTVhODE1OGI5ODc3N2E4ODMzNjllYmI3ZjQ=
6
+ metadata.gz: 2b8306ad27c719ca7f0ba467ccad95bf73efe22f48624e4debfc47b6fc17cb46755f267be5ac6dfe05e61bdeddd11663ed4421a7b6a3247dcab519035790dd9f
7
+ data.tar.gz: 2b23544f61c59b9f952e979d656c067704367be33e29b628c909c7dbb906f8615f75da7b2cc824c0e71562456c093f6fc541055bd74be233371db0c5aab8cc2a
data/Gemfile CHANGED
@@ -1,3 +1,3 @@
1
- source 'http://rubygems.org'
2
- gem 'rake'
3
- gem 'gem_publisher'
1
+ source 'https://rubygems.org'
2
+ gemspec
3
+ gem "logstash", :github => "elasticsearch/logstash", :branch => "1.5"
data/Rakefile CHANGED
@@ -4,3 +4,4 @@ task :default do
4
4
  system("rake -T")
5
5
  end
6
6
 
7
+ require "logstash/devutils/rake"
@@ -1,16 +1,17 @@
1
1
  require "logstash/filters/base"
2
2
  require "logstash/namespace"
3
3
  require "logstash/util/fieldreference"
4
+ require "base64"
4
5
 
5
6
 
6
7
  # Search elasticsearch for a previous log event and copy some fields from it
7
8
  # into the current event. Below is a complete example of how this filter might
8
9
  # be used. Whenever logstash receives an "end" event, it uses this elasticsearch
9
10
  # filter to find the matching "start" event based on some operation identifier.
10
- # Then it copies the @timestamp field from the "start" event into a new field on
11
+ # Then it copies the `@timestamp` field from the "start" event into a new field on
11
12
  # the "end" event. Finally, using a combination of the "date" filter and the
12
13
  # "ruby" filter, we calculate the time duration in hours between the two events.
13
- #
14
+ # [source,ruby]
14
15
  # if [type] == "end" {
15
16
  # elasticsearch {
16
17
  # hosts => ["es-server"]
@@ -38,18 +39,48 @@ class LogStash::Filters::Elasticsearch < LogStash::Filters::Base
38
39
  # Elasticsearch query string
39
40
  config :query, :validate => :string
40
41
 
41
- # Comma-delimited list of <field>:<direction> pairs that define the sort order
42
+ # Comma-delimited list of `<field>:<direction>` pairs that define the sort order
42
43
  config :sort, :validate => :string, :default => "@timestamp:desc"
43
44
 
44
45
  # Hash of fields to copy from old event (found via elasticsearch) into new event
45
46
  config :fields, :validate => :hash, :default => {}
46
47
 
48
+ # Basic Auth - username
49
+ config :user, :validate => :string
50
+
51
+ # Basic Auth - password
52
+ config :password, :validate => :password
53
+
54
+ # SSL
55
+ config :ssl, :validate => :boolean, :default => false
56
+
57
+ # SSL Certificate Authority file
58
+ config :ca_file, :validate => :path
59
+
60
+
47
61
  public
48
62
  def register
49
63
  require "elasticsearch"
50
64
 
51
- @logger.info("New ElasticSearch filter", :hosts => @hosts)
52
- @client = Elasticsearch::Client.new hosts: @hosts
65
+ transport_options = {}
66
+
67
+ if @user && @password
68
+ token = Base64.strict_encode64("#{@user}:#{@password.value}")
69
+ transport_options[:headers] = { Authorization: "Basic #{token}" }
70
+ end
71
+
72
+ hosts = if @ssl then
73
+ @hosts.map {|h| { host: h, scheme: 'https' } }
74
+ else
75
+ @hosts
76
+ end
77
+
78
+ if @ssl && @ca_file
79
+ transport_options[:ssl] = { ca_file: @ca_file }
80
+ end
81
+
82
+ @logger.info("New ElasticSearch filter", :hosts => hosts)
83
+ @client = Elasticsearch::Client.new hosts: hosts, transport_options: transport_options
53
84
  end # def register
54
85
 
55
86
  public
@@ -1,13 +1,13 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-filter-elasticsearch'
4
- s.version = '0.1.0'
4
+ s.version = '0.1.1'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Search elasticsearch for a previous log event and copy some fields from it into the current event"
7
- s.description = "Search elasticsearch for a previous log event and copy some fields from it into the current event."
7
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
8
  s.authors = ["Elasticsearch"]
9
- s.email = 'richard.pijnenburg@elasticsearch.com'
10
- s.homepage = "http://logstash.net/"
9
+ s.email = 'info@elasticsearch.com'
10
+ s.homepage = "http://www.elasticsearch.org/guide/en/logstash/current/index.html"
11
11
  s.require_paths = ["lib"]
12
12
 
13
13
  # Files
@@ -17,11 +17,12 @@ Gem::Specification.new do |s|
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
18
 
19
19
  # Special flag to let us know this is actually a logstash plugin
20
- s.metadata = { "logstash_plugin" => "true", "group" => "filter" }
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
21
21
 
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
24
  s.add_runtime_dependency 'elasticsearch'
25
25
 
26
+ s.add_development_dependency 'logstash-devutils'
26
27
  end
27
28
 
@@ -1 +1 @@
1
- require 'spec_helper'
1
+ require "logstash/devutils/rspec/spec_helper"
metadata CHANGED
@@ -1,52 +1,65 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-05 00:00:00.000000000 Z
11
+ date: 2014-11-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash
15
- requirement: !ruby/object:Gem::Requirement
15
+ version_requirements: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ! '>='
17
+ - - '>='
18
18
  - !ruby/object:Gem::Version
19
19
  version: 1.4.0
20
20
  - - <
21
21
  - !ruby/object:Gem::Version
22
22
  version: 2.0.0
23
- type: :runtime
24
- prerelease: false
25
- version_requirements: !ruby/object:Gem::Requirement
23
+ requirement: !ruby/object:Gem::Requirement
26
24
  requirements:
27
- - - ! '>='
25
+ - - '>='
28
26
  - !ruby/object:Gem::Version
29
27
  version: 1.4.0
30
28
  - - <
31
29
  - !ruby/object:Gem::Version
32
30
  version: 2.0.0
31
+ prerelease: false
32
+ type: :runtime
33
33
  - !ruby/object:Gem::Dependency
34
34
  name: elasticsearch
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
35
40
  requirement: !ruby/object:Gem::Requirement
36
41
  requirements:
37
- - - ! '>='
42
+ - - '>='
38
43
  - !ruby/object:Gem::Version
39
44
  version: '0'
40
- type: :runtime
41
45
  prerelease: false
46
+ type: :runtime
47
+ - !ruby/object:Gem::Dependency
48
+ name: logstash-devutils
42
49
  version_requirements: !ruby/object:Gem::Requirement
43
50
  requirements:
44
- - - ! '>='
51
+ - - '>='
45
52
  - !ruby/object:Gem::Version
46
53
  version: '0'
47
- description: Search elasticsearch for a previous log event and copy some fields from
48
- it into the current event.
49
- email: richard.pijnenburg@elasticsearch.com
54
+ requirement: !ruby/object:Gem::Requirement
55
+ requirements:
56
+ - - '>='
57
+ - !ruby/object:Gem::Version
58
+ version: '0'
59
+ prerelease: false
60
+ type: :development
61
+ description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
62
+ email: info@elasticsearch.com
50
63
  executables: []
51
64
  extensions: []
52
65
  extra_rdoc_files: []
@@ -57,35 +70,32 @@ files:
57
70
  - Rakefile
58
71
  - lib/logstash/filters/elasticsearch.rb
59
72
  - logstash-filter-elasticsearch.gemspec
60
- - rakelib/publish.rake
61
- - rakelib/vendor.rake
62
73
  - spec/filters/elasticsearch_spec.rb
63
- homepage: http://logstash.net/
74
+ homepage: http://www.elasticsearch.org/guide/en/logstash/current/index.html
64
75
  licenses:
65
76
  - Apache License (2.0)
66
77
  metadata:
67
78
  logstash_plugin: 'true'
68
- group: filter
69
- post_install_message:
79
+ logstash_group: filter
80
+ post_install_message:
70
81
  rdoc_options: []
71
82
  require_paths:
72
83
  - lib
73
84
  required_ruby_version: !ruby/object:Gem::Requirement
74
85
  requirements:
75
- - - ! '>='
86
+ - - '>='
76
87
  - !ruby/object:Gem::Version
77
88
  version: '0'
78
89
  required_rubygems_version: !ruby/object:Gem::Requirement
79
90
  requirements:
80
- - - ! '>='
91
+ - - '>='
81
92
  - !ruby/object:Gem::Version
82
93
  version: '0'
83
94
  requirements: []
84
- rubyforge_project:
85
- rubygems_version: 2.4.1
86
- signing_key:
95
+ rubyforge_project:
96
+ rubygems_version: 2.4.4
97
+ signing_key:
87
98
  specification_version: 4
88
- summary: Search elasticsearch for a previous log event and copy some fields from it
89
- into the current event
99
+ summary: Search elasticsearch for a previous log event and copy some fields from it into the current event
90
100
  test_files:
91
101
  - spec/filters/elasticsearch_spec.rb
data/rakelib/publish.rake DELETED
@@ -1,9 +0,0 @@
1
- require "gem_publisher"
2
-
3
- desc "Publish gem to RubyGems.org"
4
- task :publish_gem do |t|
5
- gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
- gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
- puts "Published #{gem}" if gem
8
- end
9
-
data/rakelib/vendor.rake DELETED
@@ -1,169 +0,0 @@
1
- require "net/http"
2
- require "uri"
3
- require "digest/sha1"
4
-
5
- def vendor(*args)
6
- return File.join("vendor", *args)
7
- end
8
-
9
- directory "vendor/" => ["vendor"] do |task, args|
10
- mkdir task.name
11
- end
12
-
13
- def fetch(url, sha1, output)
14
-
15
- puts "Downloading #{url}"
16
- actual_sha1 = download(url, output)
17
-
18
- if actual_sha1 != sha1
19
- fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
- end
21
- end # def fetch
22
-
23
- def file_fetch(url, sha1)
24
- filename = File.basename( URI(url).path )
25
- output = "vendor/#{filename}"
26
- task output => [ "vendor/" ] do
27
- begin
28
- actual_sha1 = file_sha1(output)
29
- if actual_sha1 != sha1
30
- fetch(url, sha1, output)
31
- end
32
- rescue Errno::ENOENT
33
- fetch(url, sha1, output)
34
- end
35
- end.invoke
36
-
37
- return output
38
- end
39
-
40
- def file_sha1(path)
41
- digest = Digest::SHA1.new
42
- fd = File.new(path, "r")
43
- while true
44
- begin
45
- digest << fd.sysread(16384)
46
- rescue EOFError
47
- break
48
- end
49
- end
50
- return digest.hexdigest
51
- ensure
52
- fd.close if fd
53
- end
54
-
55
- def download(url, output)
56
- uri = URI(url)
57
- digest = Digest::SHA1.new
58
- tmp = "#{output}.tmp"
59
- Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
- request = Net::HTTP::Get.new(uri.path)
61
- http.request(request) do |response|
62
- fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
- size = (response["content-length"].to_i || -1).to_f
64
- count = 0
65
- File.open(tmp, "w") do |fd|
66
- response.read_body do |chunk|
67
- fd.write(chunk)
68
- digest << chunk
69
- if size > 0 && $stdout.tty?
70
- count += chunk.bytesize
71
- $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
- end
73
- end
74
- end
75
- $stdout.write("\r \r") if $stdout.tty?
76
- end
77
- end
78
-
79
- File.rename(tmp, output)
80
-
81
- return digest.hexdigest
82
- rescue SocketError => e
83
- puts "Failure while downloading #{url}: #{e}"
84
- raise
85
- ensure
86
- File.unlink(tmp) if File.exist?(tmp)
87
- end # def download
88
-
89
- def untar(tarball, &block)
90
- require "archive/tar/minitar"
91
- tgz = Zlib::GzipReader.new(File.open(tarball))
92
- # Pull out typesdb
93
- tar = Archive::Tar::Minitar::Input.open(tgz)
94
- tar.each do |entry|
95
- path = block.call(entry)
96
- next if path.nil?
97
- parent = File.dirname(path)
98
-
99
- mkdir_p parent unless File.directory?(parent)
100
-
101
- # Skip this file if the output file is the same size
102
- if entry.directory?
103
- mkdir path unless File.directory?(path)
104
- else
105
- entry_mode = entry.instance_eval { @mode } & 0777
106
- if File.exists?(path)
107
- stat = File.stat(path)
108
- # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
- # expose headers in the entry.
110
- entry_size = entry.instance_eval { @size }
111
- # If file sizes are same, skip writing.
112
- next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
- end
114
- puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
- File.open(path, "w") do |fd|
116
- # eof? check lets us skip empty files. Necessary because the API provided by
117
- # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
- # IO object. Something about empty files in this EntryStream causes
119
- # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
- # TODO(sissel): File a bug about this.
121
- while !entry.eof?
122
- chunk = entry.read(16384)
123
- fd.write(chunk)
124
- end
125
- #IO.copy_stream(entry, fd)
126
- end
127
- File.chmod(entry_mode, path)
128
- end
129
- end
130
- tar.close
131
- File.unlink(tarball) if File.file?(tarball)
132
- end # def untar
133
-
134
- def ungz(file)
135
-
136
- outpath = file.gsub('.gz', '')
137
- tgz = Zlib::GzipReader.new(File.open(file))
138
- begin
139
- File.open(outpath, "w") do |out|
140
- IO::copy_stream(tgz, out)
141
- end
142
- File.unlink(file)
143
- rescue
144
- File.unlink(outpath) if File.file?(outpath)
145
- raise
146
- end
147
- tgz.close
148
- end
149
-
150
- desc "Process any vendor files required for this plugin"
151
- task "vendor" do |task, args|
152
-
153
- @files.each do |file|
154
- download = file_fetch(file['url'], file['sha1'])
155
- if download =~ /.tar.gz/
156
- prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
- untar(download) do |entry|
158
- if !file['files'].nil?
159
- next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
- out = entry.full_name.split("/").last
161
- end
162
- File.join('vendor', out)
163
- end
164
- elsif download =~ /.gz/
165
- ungz(download)
166
- end
167
- end
168
-
169
- end