logstash-output-elasticsearch 2.0.0.pre.beta-java → 2.1.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,15 +6,19 @@ require "elasticsearch/transport/transport/http/manticore"
6
6
 
7
7
  module LogStash::Outputs::Elasticsearch
8
8
  class HttpClient
9
- attr_reader :client, :options, :client_options
10
- DEFAULT_OPTIONS = {
11
- :port => 9200
12
- }
9
+ attr_reader :client, :options, :client_options, :sniffer_thread
10
+ # This is here in case we use DEFAULT_OPTIONS in the future
11
+ # DEFAULT_OPTIONS = {
12
+ # :setting => value
13
+ # }
13
14
 
14
15
  def initialize(options={})
15
- @logger = Cabin::Channel.get
16
- @options = DEFAULT_OPTIONS.merge(options)
16
+ @logger = options[:logger]
17
+ # Again, in case we use DEFAULT_OPTIONS in the future, uncomment this.
18
+ # @options = DEFAULT_OPTIONS.merge(options)
19
+ @options = options
17
20
  @client = build_client(@options)
21
+ start_sniffing!
18
22
  end
19
23
 
20
24
  def template_install(name, template, force=false)
@@ -49,26 +53,78 @@ module LogStash::Outputs::Elasticsearch
49
53
  end
50
54
  end.flatten
51
55
 
52
- bulk_response = @client.bulk(:body => bulk_body)
56
+ @client.bulk(:body => bulk_body)
57
+ end
58
+
59
+ def start_sniffing!
60
+ if options[:sniffing]
61
+ @sniffer_thread = Thread.new do
62
+ loop do
63
+ sniff!
64
+ sleep (options[:sniffing_delay].to_f || 30)
65
+ end
66
+ end
67
+ end
68
+ end
53
69
 
54
- self.class.normalize_bulk_response(bulk_response)
70
+ def stop_sniffing!
71
+ @sniffer_thread.kill() if @sniffer_thread
72
+ end
73
+
74
+ def sniff!
75
+ client.transport.reload_connections! if options[:sniffing]
76
+ hosts_by_name = client.transport.hosts.map {|h| h["name"]}.sort
77
+ @logger.debug({"count" => hosts_by_name.count, "hosts" => hosts_by_name})
78
+ rescue StandardError => e
79
+ @logger.error("Error while sniffing connection",
80
+ :message => e.message,
81
+ :class => e.class.name,
82
+ :backtrace => e.backtrace)
55
83
  end
56
84
 
57
85
  private
58
86
 
87
+ # Builds a client and returns an Elasticsearch::Client
88
+ #
89
+ # The `options` is a hash where the following symbol keys have meaning:
90
+ #
91
+ # * `:hosts` - array of String. Set a list of hosts to use for communication.
92
+ # * `:port` - number. set the port to use to communicate with Elasticsearch
93
+ # * `:user` - String. The user to use for authentication.
94
+ # * `:password` - String. The password to use for authentication.
95
+ # * `:timeout` - Float. A duration value, in seconds, after which a socket
96
+ # operation or request will be aborted if not yet successfull
97
+ # * `:client_settings` - a hash; see below for keys.
98
+ #
99
+ # The `client_settings` key is a has that can contain other settings:
100
+ #
101
+ # * `:ssl` - Boolean. Enable or disable SSL/TLS.
102
+ # * `:proxy` - String. Choose a HTTP HTTProxy to use.
103
+ # * `:path` - String. The leading path for prefixing Elasticsearch
104
+ # requests. This is sometimes used if you are proxying Elasticsearch access
105
+ # through a special http path, such as using mod_rewrite.
59
106
  def build_client(options)
60
- uris = options[:hosts].map do |host|
61
- "http://#{host}:#{options[:port]}#{options[:client_settings][:path]}"
107
+ hosts = options[:hosts] || ["127.0.0.1"]
108
+ client_settings = options[:client_settings] || {}
109
+ timeout = options[:timeout] || 0
110
+
111
+ uris = hosts.map do |host|
112
+ proto = client_settings[:ssl] ? "https" : "http"
113
+ if host =~ /:\d+\z/
114
+ "#{proto}://#{host}#{client_settings[:path]}"
115
+ else
116
+ # Use default port of 9200 if none provided with host.
117
+ "#{proto}://#{host}:9200#{client_settings[:path]}"
118
+ end
62
119
  end
63
120
 
64
121
  @client_options = {
65
122
  :hosts => uris,
66
- :ssl => options[:client_settings][:ssl],
67
- :reload_connections => options[:client_settings][:reload_connections],
68
- :transport_options => { # manticore settings so we
69
- :socket_timeout => 0, # do not timeout socket reads
70
- :request_timeout => 0, # and requests
71
- :proxy => options[:client_settings][:proxy]
123
+ :ssl => client_settings[:ssl],
124
+ :transport_options => {
125
+ :socket_timeout => timeout,
126
+ :request_timeout => timeout,
127
+ :proxy => client_settings[:proxy]
72
128
  },
73
129
  :transport_class => ::Elasticsearch::Transport::Transport::HTTP::Manticore
74
130
  }
@@ -78,27 +134,9 @@ module LogStash::Outputs::Elasticsearch
78
134
  @client_options[:headers] = { "Authorization" => "Basic #{token}" }
79
135
  end
80
136
 
81
- c = Elasticsearch::Client.new(client_options)
82
- c.transport.reload_connections! if options[:client_settings][:reload_connections]
83
- c
84
- end
137
+ @logger.debug? && @logger.debug("Elasticsearch HTTP client options", client_options)
85
138
 
86
- def self.normalize_bulk_response(bulk_response)
87
- if bulk_response["errors"]
88
- # The structure of the response from the REST Bulk API is follows:
89
- # {"took"=>74, "errors"=>true, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
90
- # "_type"=>"logs",
91
- # "_id"=>"AUxTS2C55Jrgi-hC6rQF",
92
- # "_version"=>1,
93
- # "status"=>400,
94
- # "error"=>"MapperParsingException[failed to parse]..."}}]}
95
- # where each `item` is a hash of {OPTYPE => Hash[]}. calling first, will retrieve
96
- # this hash as a single array with two elements, where the value is the second element (i.first[1])
97
- # then the status of that item is retrieved.
98
- {"errors" => true, "statuses" => bulk_response["items"].map { |i| i.first[1]['status'] }}
99
- else
100
- {"errors" => false}
101
- end
139
+ Elasticsearch::Client.new(client_options)
102
140
  end
103
141
 
104
142
  def template_exists?(name)
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-elasticsearch'
4
- s.version = '2.0.0-beta'
4
+ s.version = '2.1.0'
5
5
  s.licenses = ['apache-2.0']
6
6
  s.summary = "Logstash Output to Elasticsearch"
7
7
  s.description = "Output events to elasticsearch"
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
11
11
  s.require_paths = ["lib"]
12
12
 
13
13
  # Files
14
- s.files = `git ls-files`.split($\)
14
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
15
15
 
16
16
  # Tests
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
@@ -21,13 +21,13 @@ Gem::Specification.new do |s|
21
21
 
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency 'concurrent-ruby'
24
- s.add_runtime_dependency 'elasticsearch', ['>= 1.0.10', '~> 1.0']
24
+ s.add_runtime_dependency 'elasticsearch', ['>= 1.0.13', '~> 1.0']
25
25
  s.add_runtime_dependency 'stud', ['>= 0.0.17', '~> 0.0']
26
26
  s.add_runtime_dependency 'cabin', ['~> 0.6']
27
- s.add_runtime_dependency "logstash-core", '>= 1.4.0', '< 2.0.0'
27
+ s.add_runtime_dependency "logstash-core", ">= 2.0.0.snapshot", "< 3.0.0"
28
28
 
29
29
  s.add_development_dependency 'ftw', '~> 0.0.42'
30
- s.add_development_dependency 'logstash-input-generator'
30
+ s.add_development_dependency 'logstash-codec-plain'
31
31
 
32
32
  if RUBY_PLATFORM == 'java'
33
33
  s.platform = RUBY_PLATFORM
@@ -36,4 +36,5 @@ Gem::Specification.new do |s|
36
36
 
37
37
  s.add_development_dependency 'logstash-devutils'
38
38
  s.add_development_dependency 'longshoreman'
39
+ s.add_development_dependency 'flores'
39
40
  end
@@ -4,6 +4,7 @@ require "logstash/plugin"
4
4
  require "logstash/json"
5
5
  require "stud/try"
6
6
  require "longshoreman"
7
+ require "logstash/outputs/elasticsearch"
7
8
 
8
9
  CONTAINER_NAME = "logstash-output-elasticsearch-#{rand(999).to_s}"
9
10
  CONTAINER_IMAGE = "elasticsearch"
@@ -6,13 +6,16 @@ shared_examples "an indexer" do
6
6
  let(:event_count) { 10000 + rand(500) }
7
7
  let(:flush_size) { rand(200) + 1 }
8
8
  let(:config) { "not implemented" }
9
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
9
10
 
10
- it "ships events" do
11
- insist { config } != "not implemented"
12
-
13
- pipeline = LogStash::Pipeline.new(config)
14
- pipeline.run
11
+ before do
12
+ subject.register
13
+ event_count.times do
14
+ subject.receive(LogStash::Event.new("message" => "Hello World!", "type" => type))
15
+ end
16
+ end
15
17
 
18
+ it "ships events" do
16
19
  index_url = "http://#{get_host}:#{get_port}/#{index}"
17
20
 
18
21
  ftw = FTW::Agent.new
@@ -42,23 +45,12 @@ end
42
45
  describe "an indexer with custom index_type", :integration => true do
43
46
  it_behaves_like "an indexer" do
44
47
  let(:config) {
45
- <<-CONFIG
46
- input {
47
- generator {
48
- message => "hello world"
49
- count => #{event_count}
50
- type => "#{type}"
51
- }
52
- }
53
- output {
54
- elasticsearch {
55
- hosts => "#{get_host()}"
56
- port => "#{get_port}"
57
- index => "#{index}"
58
- flush_size => #{flush_size}
59
- }
48
+ {
49
+ "hosts" => get_host,
50
+ "port" => get_port,
51
+ "index" => index,
52
+ "flush_size" => flush_size
60
53
  }
61
- CONFIG
62
54
  }
63
55
  end
64
56
  end
@@ -67,22 +59,12 @@ describe "an indexer with no type value set (default to logs)", :integration =>
67
59
  it_behaves_like "an indexer" do
68
60
  let(:type) { "logs" }
69
61
  let(:config) {
70
- <<-CONFIG
71
- input {
72
- generator {
73
- message => "hello world"
74
- count => #{event_count}
75
- }
76
- }
77
- output {
78
- elasticsearch {
79
- hosts => "#{get_host()}"
80
- port => "#{get_port}"
81
- index => "#{index}"
82
- flush_size => #{flush_size}
83
- }
62
+ {
63
+ "hosts" => get_host,
64
+ "port" => get_port,
65
+ "index" => index,
66
+ "flush_size" => flush_size
84
67
  }
85
- CONFIG
86
68
  }
87
69
  end
88
70
  end
@@ -11,8 +11,18 @@ describe "failures in bulk class expected behavior", :integration => true do
11
11
  let(:max_retries) { 3 }
12
12
 
13
13
  def mock_actions_with_response(*resp)
14
- LogStash::Outputs::Elasticsearch::HttpClient
15
- .any_instance.stub(:bulk).and_return(*resp)
14
+ expanded_responses = resp.map do |resp|
15
+ items = resp["statuses"] && resp["statuses"].map do |status|
16
+ {"create" => {"status" => status, "error" => "Error for #{status}"}}
17
+ end
18
+
19
+ {
20
+ "errors" => resp["errors"],
21
+ "items" => items
22
+ }
23
+ end
24
+
25
+ allow_any_instance_of(LogStash::Outputs::Elasticsearch::HttpClient).to receive(:bulk).and_return(*expanded_responses)
16
26
  end
17
27
 
18
28
  subject! do
@@ -61,7 +71,7 @@ describe "failures in bulk class expected behavior", :integration => true do
61
71
  end
62
72
  subject.register
63
73
  subject.receive(event1)
64
- subject.teardown
74
+ subject.close
65
75
  end
66
76
 
67
77
  it "should retry actions with response status of 503" do
@@ -109,7 +119,7 @@ describe "failures in bulk class expected behavior", :integration => true do
109
119
  subject.register
110
120
  subject.receive(invalid_event)
111
121
  expect(subject).not_to receive(:retry_push)
112
- subject.teardown
122
+ subject.close
113
123
 
114
124
  @es.indices.refresh
115
125
  sleep(5)
@@ -123,7 +133,7 @@ describe "failures in bulk class expected behavior", :integration => true do
123
133
  subject.register
124
134
  subject.receive(event1)
125
135
  expect(subject).not_to receive(:retry_push)
126
- subject.teardown
136
+ subject.close
127
137
  @es.indices.refresh
128
138
  sleep(5)
129
139
  Stud::try(10.times) do
@@ -136,7 +146,7 @@ describe "failures in bulk class expected behavior", :integration => true do
136
146
  subject.register
137
147
  subject.receive(invalid_event)
138
148
  subject.receive(event1)
139
- subject.teardown
149
+ subject.close
140
150
 
141
151
  @es.indices.refresh
142
152
  sleep(5)
@@ -7,14 +7,17 @@ shared_examples "a routing indexer" do
7
7
  let(:flush_size) { rand(200) + 1 }
8
8
  let(:routing) { "not_implemented" }
9
9
  let(:config) { "not_implemented" }
10
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
10
11
 
11
- it "ships events" do
12
- insist { routing } != "not_implemented"
13
- insist { config } != "not_implemented"
12
+ before do
13
+ subject.register
14
+ event_count.times do
15
+ subject.receive(LogStash::Event.new("message" => "Hello World!", "type" => type))
16
+ end
17
+ end
14
18
 
15
- pipeline = LogStash::Pipeline.new(config)
16
- pipeline.run
17
19
 
20
+ it "ships events" do
18
21
  index_url = "http://#{get_host()}:#{get_port()}/#{index}"
19
22
 
20
23
  ftw = FTW::Agent.new
@@ -36,24 +39,13 @@ describe "(http protocol) index events with static routing", :integration => tru
36
39
  it_behaves_like 'a routing indexer' do
37
40
  let(:routing) { "test" }
38
41
  let(:config) {
39
- <<-CONFIG
40
- input {
41
- generator {
42
- message => "hello world"
43
- count => #{event_count}
44
- type => "#{type}"
45
- }
46
- }
47
- output {
48
- elasticsearch {
49
- hosts => "#{get_host()}"
50
- port => "#{get_port()}"
51
- index => "#{index}"
52
- flush_size => #{flush_size}
53
- routing => "#{routing}"
54
- }
42
+ {
43
+ "hosts" => get_host,
44
+ "port" => get_port,
45
+ "index" => index,
46
+ "flush_size" => flush_size,
47
+ "routing" => routing
55
48
  }
56
- CONFIG
57
49
  }
58
50
  end
59
51
  end
@@ -62,24 +54,13 @@ describe "(http_protocol) index events with fieldref in routing value", :integra
62
54
  it_behaves_like 'a routing indexer' do
63
55
  let(:routing) { "test" }
64
56
  let(:config) {
65
- <<-CONFIG
66
- input {
67
- generator {
68
- message => "#{routing}"
69
- count => #{event_count}
70
- type => "#{type}"
71
- }
72
- }
73
- output {
74
- elasticsearch {
75
- hosts => "#{get_host()}"
76
- port => "#{get_port()}"
77
- index => "#{index}"
78
- flush_size => #{flush_size}
79
- routing => "%{message}"
80
- }
57
+ {
58
+ "hosts" => get_host,
59
+ "port" => get_port,
60
+ "index" => index,
61
+ "flush_size" => flush_size,
62
+ "routing" => "%{message}"
81
63
  }
82
- CONFIG
83
64
  }
84
65
  end
85
66
  end
@@ -3,24 +3,39 @@ require "logstash/outputs/elasticsearch/http_client"
3
3
  require "java"
4
4
 
5
5
  describe LogStash::Outputs::Elasticsearch::HttpClient do
6
- context "successful" do
7
- it "should map correctly" do
8
- bulk_response = {"took"=>74, "errors"=>false, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
9
- "_type"=>"logs", "_id"=>"AUxTS2C55Jrgi-hC6rQF",
10
- "_version"=>1, "status"=>201}}]}
11
- actual = LogStash::Outputs::Elasticsearch::HttpClient.normalize_bulk_response(bulk_response)
12
- insist { actual } == {"errors"=> false}
6
+ describe "sniffing" do
7
+ let(:base_options) { {:hosts => ["127.0.0.1"], :logger => Cabin::Channel.get }}
8
+ let(:client) { LogStash::Outputs::Elasticsearch::HttpClient.new(base_options.merge(client_opts)) }
9
+ let(:transport) { client.client.transport }
10
+
11
+ before do
12
+ allow(transport).to receive(:reload_connections!)
13
13
  end
14
- end
15
14
 
16
- context "contains failures" do
17
- it "should map correctly" do
18
- bulk_response = {"took"=>71, "errors"=>true,
19
- "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
20
- "_type"=>"logs", "_id"=>"AUxTQ_OI5Jrgi-hC6rQB", "status"=>400,
21
- "error"=>"MapperParsingException[failed to parse]..."}}]}
22
- actual = LogStash::Outputs::Elasticsearch::HttpClient.normalize_bulk_response(bulk_response)
23
- insist { actual } == {"errors"=> true, "statuses"=> [400]}
15
+ context "with sniffing enabled" do
16
+ let(:client_opts) { {:sniffing => true, :sniffing_delay => 1 } }
17
+
18
+ after do
19
+ client.stop_sniffing!
20
+ end
21
+
22
+ it "should start the sniffer" do
23
+ expect(client.sniffer_thread).to be_a(Thread)
24
+ end
25
+
26
+ it "should periodically sniff the client" do
27
+ sleep 2
28
+ expect(transport).to have_received(:reload_connections!)
29
+ end
24
30
  end
31
+
32
+ context "with sniffing disabled" do
33
+ let(:client_opts) { {:sniffing => false} }
34
+
35
+ it "should not start the sniffer" do
36
+ expect(client.sniffer_thread).to be_nil
37
+ end
38
+ end
39
+
25
40
  end
26
41
  end