logstash-output-elasticsearch 2.1.2-java → 2.1.4-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ require "base64"
4
4
  require "elasticsearch"
5
5
  require "elasticsearch/transport/transport/http/manticore"
6
6
 
7
- module LogStash::Outputs::Elasticsearch
7
+ module LogStash; module Outputs; class ElasticSearch;
8
8
  class HttpClient
9
9
  attr_reader :client, :options, :client_options, :sniffer_thread
10
10
  # This is here in case we use DEFAULT_OPTIONS in the future
@@ -30,6 +30,7 @@ module LogStash::Outputs::Elasticsearch
30
30
  end
31
31
 
32
32
  def bulk(actions)
33
+ return if actions.empty?
33
34
  bulk_body = actions.collect do |action, args, source|
34
35
  if action == 'update'
35
36
  if args[:_id]
@@ -150,4 +151,4 @@ module LogStash::Outputs::Elasticsearch
150
151
  @client.indices.put_template(:name => name, :body => template)
151
152
  end
152
153
  end
153
- end
154
+ end end end
@@ -0,0 +1,92 @@
1
+ module LogStash; module Outputs; class ElasticSearch;
2
+ module HttpClientBuilder
3
+ def self.build(logger, hosts, params)
4
+ client_settings = {}
5
+
6
+ common_options = {
7
+ :client_settings => client_settings,
8
+ :sniffing => params["sniffing"],
9
+ :sniffing_delay => params["sniffing_delay"]
10
+ }
11
+
12
+ common_options[:timeout] = params["timeout"] if params["timeout"]
13
+ client_settings[:path] = "/#{params["path"]}/".gsub(/\/+/, "/") # Normalize slashes
14
+ logger.debug? && logger.debug("Normalizing http path", :path => params["path"], :normalized => client_settings[:path])
15
+
16
+ client_settings.merge! setup_ssl(logger, params)
17
+ client_settings.merge! setup_proxy(logger, params)
18
+ common_options.merge! setup_basic_auth(logger, params)
19
+
20
+ # Update API setup
21
+ update_options = {
22
+ :upsert => params["upsert"],
23
+ :doc_as_upsert => params["doc_as_upsert"]
24
+ }
25
+ common_options.merge! update_options if params["action"] == 'update'
26
+
27
+ LogStash::Outputs::ElasticSearch::HttpClient.new(
28
+ common_options.merge(:hosts => hosts, :logger => logger)
29
+ )
30
+ end
31
+
32
+ def self.setup_proxy(logger, params)
33
+ proxy = params["proxy"]
34
+ return {} unless proxy
35
+
36
+ # Symbolize keys
37
+ proxy = if proxy.is_a?(Hash)
38
+ Hash[proxy.map {|k,v| [k.to_sym, v]}]
39
+ elsif proxy.is_a?(String)
40
+ proxy
41
+ else
42
+ raise LogStash::ConfigurationError, "Expected 'proxy' to be a string or hash, not '#{proxy}''!"
43
+ end
44
+
45
+ return {:proxy => proxy}
46
+ end
47
+
48
+ def self.setup_ssl(logger, params)
49
+ return {} unless params["ssl"]
50
+
51
+ cacert, truststore, truststore_password, keystore, keystore_password =
52
+ params.values_at('cacert', 'truststore', 'truststore_password', 'keystore', 'keystore_password')
53
+
54
+ if cacert && truststore
55
+ raise(LogStash::ConfigurationError, "Use either \"cacert\" or \"truststore\" when configuring the CA certificate") if truststore
56
+ end
57
+
58
+ ssl_options = {}
59
+
60
+ if cacert
61
+ ssl_options[:ca_file] = cacert
62
+ elsif truststore
63
+ ssl_options[:truststore_password] = truststore_password.value if truststore_password
64
+ end
65
+
66
+ ssl_options[:truststore] = truststore if truststore
67
+ if keystore
68
+ ssl_options[:keystore] = keystore
69
+ ssl_options[:keystore_password] = keystore_password.value if keystore_password
70
+ end
71
+ if !params["ssl_certificate_verification"]
72
+ logger.warn [
73
+ "** WARNING ** Detected UNSAFE options in elasticsearch output configuration!",
74
+ "** WARNING ** You have enabled encryption but DISABLED certificate verification.",
75
+ "** WARNING ** To make sure your data is secure change :ssl_certificate_verification to true"
76
+ ].join("\n")
77
+ ssl_options[:verify] = false
78
+ end
79
+ { ssl: ssl_options }
80
+ end
81
+
82
+ def self.setup_basic_auth(logger, params)
83
+ user, password = params["user"], params["password"]
84
+ return {} unless user && password
85
+
86
+ {
87
+ :user => user,
88
+ :password => password.value
89
+ }
90
+ end
91
+ end
92
+ end; end; end
@@ -0,0 +1,35 @@
1
+ module LogStash; module Outputs; class ElasticSearch
2
+ class TemplateManager
3
+ # To be mixed into the elasticsearch plugin base
4
+ def self.install_template(plugin)
5
+ return unless plugin.manage_template
6
+ plugin.logger.info("Using mapping template from", :path => plugin.template)
7
+ template = get_template(plugin.template)
8
+ plugin.logger.info("Attempting to install template", :manage_template => template)
9
+ install(plugin.client, plugin.template_name, template, plugin.template_overwrite)
10
+ rescue => e
11
+ plugin.logger.error("Failed to install template.", :message => e.message, :class => e.class.name)
12
+ end
13
+
14
+ private
15
+
16
+ def self.get_template(path)
17
+ template_path = path || default_template_path
18
+ read_template_file(template_path)
19
+ end
20
+
21
+ def self.install(client, template_name, template, template_overwrite)
22
+ client.template_install(template_name, template, template_overwrite)
23
+ end
24
+
25
+ def self.default_template_path
26
+ ::File.expand_path('elasticsearch-template.json', ::File.dirname(__FILE__))
27
+ end
28
+
29
+ def self.read_template_file(template_path)
30
+ raise ArgumentError, "Template file '#{@template_path}' could not be found!" unless ::File.exists?(template_path)
31
+ template_data = ::IO.read(template_path)
32
+ LogStash::Json.load(template_data)
33
+ end
34
+ end
35
+ end end end
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-elasticsearch'
4
- s.version = '2.1.2'
4
+ s.version = '2.1.4'
5
5
  s.licenses = ['apache-2.0']
6
6
  s.summary = "Logstash Output to Elasticsearch"
7
7
  s.description = "Output events to elasticsearch"
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
24
24
  s.add_runtime_dependency 'elasticsearch', ['>= 1.0.13', '~> 1.0']
25
25
  s.add_runtime_dependency 'stud', ['>= 0.0.17', '~> 0.0']
26
26
  s.add_runtime_dependency 'cabin', ['~> 0.6']
27
- s.add_runtime_dependency "logstash-core", ">= 2.0.0.beta2", "< 3.0.0"
27
+ s.add_runtime_dependency "logstash-core", ">= 2.0.0", "< 3.0.0"
28
28
 
29
29
  s.add_development_dependency 'ftw', '~> 0.0.42'
30
30
  s.add_development_dependency 'logstash-codec-plain'
@@ -13,8 +13,9 @@ CONTAINER_TAG = "1.6"
13
13
  DOCKER_INTEGRATION = ENV["DOCKER_INTEGRATION"]
14
14
 
15
15
  module ESHelper
16
- def get_host
17
- DOCKER_INTEGRATION ? Longshoreman.new.get_host_ip : "127.0.0.1"
16
+ def get_host_port
17
+ addr = DOCKER_INTEGRATION ? Longshoreman.new.get_host_ip : "127.0.0.1"
18
+ "#{addr}:#{get_port}"
18
19
  end
19
20
 
20
21
  def get_port
@@ -26,7 +27,7 @@ module ESHelper
26
27
  end
27
28
 
28
29
  def get_client
29
- Elasticsearch::Client.new(:host => "#{get_host}:#{get_port}")
30
+ Elasticsearch::Client.new(:hosts => [get_host_port])
30
31
  end
31
32
  end
32
33
 
@@ -34,7 +35,6 @@ end
34
35
  RSpec.configure do |config|
35
36
  config.include ESHelper
36
37
 
37
-
38
38
  if DOCKER_INTEGRATION
39
39
  # this :all hook gets run before every describe block that is tagged with :integration => true.
40
40
  config.before(:all, :integration => true) do
@@ -9,8 +9,7 @@ describe "client create actions", :integration => true do
9
9
  "manage_template" => true,
10
10
  "index" => "logstash-create",
11
11
  "template_overwrite" => true,
12
- "hosts" => get_host(),
13
- "port" => get_port(),
12
+ "hosts" => get_host_port(),
14
13
  "action" => action
15
14
  }
16
15
  settings['document_id'] = id unless id.nil?
@@ -31,7 +30,7 @@ describe "client create actions", :integration => true do
31
30
  subject = get_es_output("create", "id123")
32
31
  subject.register
33
32
  subject.receive(LogStash::Event.new("message" => "sample message here"))
34
- subject.buffer_flush(:final => true)
33
+ subject.flush
35
34
  @es.indices.refresh
36
35
  # Wait or fail until everything's indexed.
37
36
  Stud::try(3.times) do
@@ -44,7 +43,7 @@ describe "client create actions", :integration => true do
44
43
  subject = get_es_output("create")
45
44
  subject.register
46
45
  subject.receive(LogStash::Event.new("message" => "sample message here"))
47
- subject.buffer_flush(:final => true)
46
+ subject.flush
48
47
  @es.indices.refresh
49
48
  # Wait or fail until everything's indexed.
50
49
  Stud::try(3.times) do
@@ -16,7 +16,7 @@ shared_examples "an indexer" do
16
16
  end
17
17
 
18
18
  it "ships events" do
19
- index_url = "http://#{get_host}:#{get_port}/#{index}"
19
+ index_url = "http://#{get_host_port}/#{index}"
20
20
 
21
21
  ftw = FTW::Agent.new
22
22
  ftw.post!("#{index_url}/_refresh")
@@ -46,8 +46,7 @@ describe "an indexer with custom index_type", :integration => true do
46
46
  it_behaves_like "an indexer" do
47
47
  let(:config) {
48
48
  {
49
- "hosts" => get_host,
50
- "port" => get_port,
49
+ "hosts" => get_host_port,
51
50
  "index" => index,
52
51
  "flush_size" => flush_size
53
52
  }
@@ -60,8 +59,7 @@ describe "an indexer with no type value set (default to logs)", :integration =>
60
59
  let(:type) { "logs" }
61
60
  let(:config) {
62
61
  {
63
- "hosts" => get_host,
64
- "port" => get_port,
62
+ "hosts" => get_host_port,
65
63
  "index" => index,
66
64
  "flush_size" => flush_size
67
65
  }
@@ -22,7 +22,7 @@ describe "failures in bulk class expected behavior", :integration => true do
22
22
  }
23
23
  end
24
24
 
25
- allow_any_instance_of(LogStash::Outputs::Elasticsearch::HttpClient).to receive(:bulk).and_return(*expanded_responses)
25
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient).to receive(:bulk).and_return(*expanded_responses)
26
26
  end
27
27
 
28
28
  subject! do
@@ -30,8 +30,7 @@ describe "failures in bulk class expected behavior", :integration => true do
30
30
  "manage_template" => true,
31
31
  "index" => "logstash-2014.11.17",
32
32
  "template_overwrite" => true,
33
- "hosts" => get_host(),
34
- "port" => get_port(),
33
+ "hosts" => get_host_port(),
35
34
  "retry_max_items" => 10,
36
35
  "retry_max_interval" => 1,
37
36
  "max_retries" => max_retries
@@ -50,28 +49,34 @@ describe "failures in bulk class expected behavior", :integration => true do
50
49
  @es.indices.refresh
51
50
  end
52
51
 
52
+ after :each do
53
+ subject.close
54
+ end
55
+
53
56
  it "should return no errors if all bulk actions are successful" do
54
57
  mock_actions_with_response({"errors" => false})
55
58
  expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
56
59
  subject.register
57
60
  subject.receive(event1)
58
61
  subject.receive(event2)
59
- subject.buffer_flush(:final => true)
62
+ subject.flush
60
63
  sleep(2)
61
64
  end
62
65
 
63
- it "should raise exception and be retried by stud::buffer" do
66
+ it "retry exceptions within the submit body" do
64
67
  call_count = 0
65
- expect(subject).to receive(:submit).with([action1]).exactly(3).times do
68
+ subject.register
69
+
70
+ expect(subject.client).to receive(:bulk).with(anything).exactly(3).times do
66
71
  if (call_count += 1) <= 2
67
72
  raise "error first two times"
68
73
  else
69
74
  {"errors" => false}
70
75
  end
71
76
  end
72
- subject.register
77
+
73
78
  subject.receive(event1)
74
- subject.close
79
+ subject.flush
75
80
  end
76
81
 
77
82
  it "should retry actions with response status of 503" do
@@ -87,17 +92,19 @@ describe "failures in bulk class expected behavior", :integration => true do
87
92
  subject.receive(event1)
88
93
  subject.receive(event1)
89
94
  subject.receive(event2)
90
- subject.buffer_flush(:final => true)
95
+ subject.flush
91
96
  sleep(3)
92
97
  end
93
98
 
94
99
  it "should retry actions with response status of 429" do
100
+ subject.register
101
+
95
102
  mock_actions_with_response({"errors" => true, "statuses" => [429]},
96
103
  {"errors" => false})
97
104
  expect(subject).to receive(:submit).with([action1]).twice.and_call_original
98
- subject.register
105
+
99
106
  subject.receive(event1)
100
- subject.buffer_flush(:final => true)
107
+ subject.flush
101
108
  sleep(3)
102
109
  end
103
110
 
@@ -108,17 +115,17 @@ describe "failures in bulk class expected behavior", :integration => true do
108
115
  {"errors" => true, "statuses" => [429]},
109
116
  {"errors" => true, "statuses" => [429]},
110
117
  {"errors" => true, "statuses" => [429]})
111
- expect(subject).to receive(:submit).with([action1]).exactly(max_retries).times.and_call_original
118
+ expect(subject).to receive(:submit).with([action1]).exactly(max_retries+1).times.and_call_original
112
119
  subject.register
113
120
  subject.receive(event1)
114
- subject.buffer_flush(:final => true)
115
- sleep(3)
121
+ subject.flush
122
+ sleep(5)
116
123
  end
117
124
 
118
125
  it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunately)" do
119
126
  subject.register
120
127
  subject.receive(invalid_event)
121
- expect(subject).not_to receive(:retry_push)
128
+ expect(subject).to receive(:submit).once.and_call_original
122
129
  subject.close
123
130
 
124
131
  @es.indices.refresh
@@ -132,7 +139,7 @@ describe "failures in bulk class expected behavior", :integration => true do
132
139
  it "successful requests should not be appended to retry queue" do
133
140
  subject.register
134
141
  subject.receive(event1)
135
- expect(subject).not_to receive(:retry_push)
142
+ expect(subject).to receive(:submit).once.and_call_original
136
143
  subject.close
137
144
  @es.indices.refresh
138
145
  sleep(5)
@@ -18,7 +18,7 @@ shared_examples "a routing indexer" do
18
18
 
19
19
 
20
20
  it "ships events" do
21
- index_url = "http://#{get_host()}:#{get_port()}/#{index}"
21
+ index_url = "http://#{get_host_port()}/#{index}"
22
22
 
23
23
  ftw = FTW::Agent.new
24
24
  ftw.post!("#{index_url}/_refresh")
@@ -40,8 +40,7 @@ describe "(http protocol) index events with static routing", :integration => tru
40
40
  let(:routing) { "test" }
41
41
  let(:config) {
42
42
  {
43
- "hosts" => get_host,
44
- "port" => get_port,
43
+ "hosts" => get_host_port,
45
44
  "index" => index,
46
45
  "flush_size" => flush_size,
47
46
  "routing" => routing
@@ -55,8 +54,7 @@ describe "(http_protocol) index events with fieldref in routing value", :integra
55
54
  let(:routing) { "test" }
56
55
  let(:config) {
57
56
  {
58
- "hosts" => get_host,
59
- "port" => get_port,
57
+ "hosts" => get_host_port,
60
58
  "index" => index,
61
59
  "flush_size" => flush_size,
62
60
  "routing" => "%{message}"
@@ -25,7 +25,7 @@ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure =>
25
25
  it "sends events to ES" do
26
26
  expect {
27
27
  subject.receive(LogStash::Event.new("message" => "sample message here"))
28
- subject.buffer_flush(:final => true)
28
+ subject.flush
29
29
  }.to_not raise_error
30
30
  end
31
31
  end
@@ -49,7 +49,7 @@ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
49
49
  it "sends events to ES" do
50
50
  expect {
51
51
  subject.receive(LogStash::Event.new("message" => "sample message here"))
52
- subject.buffer_flush(:final => true)
52
+ subject.flush
53
53
  }.to_not raise_error
54
54
  end
55
55
  end
@@ -79,7 +79,7 @@ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure =>
79
79
  it "sends events to ES" do
80
80
  expect {
81
81
  subject.receive(LogStash::Event.new("message" => "sample message here"))
82
- subject.buffer_flush(:final => true)
82
+ subject.flush
83
83
  }.to_not raise_error
84
84
  end
85
85
  end
@@ -102,7 +102,7 @@ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
102
102
  it "sends events to ES" do
103
103
  expect {
104
104
  subject.receive(LogStash::Event.new("message" => "sample message here"))
105
- subject.buffer_flush(:final => true)
105
+ subject.flush
106
106
  }.to_not raise_error
107
107
  end
108
108
  end
@@ -6,8 +6,7 @@ describe "index template expected behavior", :integration => true do
6
6
  settings = {
7
7
  "manage_template" => true,
8
8
  "template_overwrite" => true,
9
- "hosts" => "#{get_host()}",
10
- "port" => "#{get_port()}"
9
+ "hosts" => "#{get_host_port()}"
11
10
  }
12
11
  next LogStash::Outputs::ElasticSearch.new(settings)
13
12
  end
@@ -32,7 +31,7 @@ describe "index template expected behavior", :integration => true do
32
31
  subject.receive(LogStash::Event.new("country" => "us"))
33
32
  subject.receive(LogStash::Event.new("country" => "at"))
34
33
  subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
35
- subject.buffer_flush(:final => true)
34
+ subject.flush
36
35
  @es.indices.refresh
37
36
 
38
37
  # Wait or fail until everything's indexed.