logstash-output-elasticsearch 7.4.0-java → 7.4.1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA256:
3
- metadata.gz: 868ec97797a9b0bf6a6d57b805ce4097c937ab6c30bb0b2f805cd809e0c6c685
4
- data.tar.gz: b91cdaf715b3cfae6836aeeb780e7a7a52fd6ee9fdfd4b182fc1596e238b88e8
2
+ SHA1:
3
+ metadata.gz: a5f3f952de8b5aa9f1e43b98f6a8cf7b8b0eca6f
4
+ data.tar.gz: bd133b48b12f09f9193ad8fae93bde676bbe77b1
5
5
  SHA512:
6
- metadata.gz: bdc11c661ef02325e7b984145244acd4f8df9842e4009928a8a4a9d6d9d2bae288a6aed8047fc15ea0cbf3a096c6211fca1c50aa1dbf191a97d6d96f18bd0f44
7
- data.tar.gz: 066b2f9fa955dc737495ef330a0f75473ede71c7fd3bc5b265f60767dbc0087bd02d247d00dc435b0542a698d90421edcd5ddf9ecd275f6e61351b5e0dbe1b78
6
+ metadata.gz: 51737449d12145f393943545566650d037a6558ca5a4eb1e8d64249f416213c4c046f3fc8455ccd22d9727345e8c51fb5f97b050c875b58ec72cac3542589637
7
+ data.tar.gz: f567270ae2fc471f624276823f3e70425b30a084bdf92ec348e2ede49a562b80d8fc2a71022f83296458917908c5069b5c7f7fe71deffe21ff822225f65be09e
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 7.4.1
2
+ - Properly detect if DLQ is supported and enabled
3
+
1
4
  ## 7.4.0
2
5
  - Retry all non-200 responses of the bulk API indefinitely
3
6
  - Improve documentation on retry codes
@@ -18,7 +18,7 @@ module LogStash; module Outputs; class ElasticSearch;
18
18
  def register
19
19
  @stopping = Concurrent::AtomicBoolean.new(false)
20
20
  # To support BWC, we check if DLQ exists in core (< 5.4). If it doesn't, we use nil to resort to previous behavior.
21
- @dlq_writer = supports_dlq? ? execution_context.dlq_writer : nil
21
+ @dlq_writer = dlq_enabled? ? execution_context.dlq_writer : nil
22
22
 
23
23
  setup_hosts # properly sets @hosts
24
24
  build_client
@@ -279,8 +279,11 @@ module LogStash; module Outputs; class ElasticSearch;
279
279
  end
280
280
  end
281
281
 
282
- def supports_dlq?
283
- respond_to?(:execution_context) && execution_context.respond_to?(:dlq_writer)
282
+ def dlq_enabled?
283
+ # TODO there should be a better way to query if DLQ is enabled
284
+ # See more in: https://github.com/elastic/logstash/issues/8064
285
+ respond_to?(:execution_context) && execution_context.respond_to?(:dlq_writer) &&
286
+ !execution_context.dlq_writer.inner_writer.is_a?(::LogStash::Util::DummyDeadLetterQueueWriter)
284
287
  end
285
288
  end
286
289
  end; end; end
@@ -82,9 +82,8 @@ module LogStash; module Outputs; class ElasticSearch;
82
82
  template_put(name, template)
83
83
  end
84
84
 
85
- def get_version
86
- url, response = @pool.get("")
87
- LogStash::Json.load(response.body)["version"]
85
+ def connected_es_versions
86
+ @pool.connected_es_versions
88
87
  end
89
88
 
90
89
  def bulk(actions)
@@ -135,7 +134,7 @@ module LogStash; module Outputs; class ElasticSearch;
135
134
  def bulk_send(body_stream)
136
135
  params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
137
136
  # Discard the URL
138
- _, response = @pool.post(@bulk_path, params, body_stream.string)
137
+ response = @pool.post(@bulk_path, params, body_stream.string)
139
138
  if !body_stream.closed?
140
139
  body_stream.truncate(0)
141
140
  body_stream.seek(0)
@@ -151,12 +150,12 @@ module LogStash; module Outputs; class ElasticSearch;
151
150
  end
152
151
 
153
152
  def get(path)
154
- url, response = @pool.get(path, nil)
153
+ response = @pool.get(path, nil)
155
154
  LogStash::Json.load(response.body)
156
155
  end
157
156
 
158
157
  def post(path, params = {}, body_string)
159
- url, response = @pool.post(path, params, body_string)
158
+ response = @pool.post(path, params, body_string)
160
159
  LogStash::Json.load(response.body)
161
160
  end
162
161
 
@@ -327,15 +326,14 @@ module LogStash; module Outputs; class ElasticSearch;
327
326
  end
328
327
 
329
328
  def template_exists?(name)
330
- url, response = @pool.head("/_template/#{name}")
329
+ response = @pool.head("/_template/#{name}")
331
330
  response.code >= 200 && response.code <= 299
332
331
  end
333
332
 
334
333
  def template_put(name, template)
335
334
  path = "_template/#{name}"
336
335
  logger.info("Installing elasticsearch template to #{path}")
337
- url, response = @pool.put(path, nil, LogStash::Json.dump(template))
338
- response
336
+ @pool.put(path, nil, LogStash::Json.dump(template))
339
337
  end
340
338
 
341
339
  # Build a bulk item for an elasticsearch update action
@@ -107,6 +107,12 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
107
107
  @state_mutex.synchronize { @url_info }
108
108
  end
109
109
 
110
+ def connected_es_versions
111
+ @state_mutex.synchronize do
112
+ @url_info.values.select {|v| v[:state] == :alive }.map {|v| v[:version] }
113
+ end
114
+ end
115
+
110
116
  def urls
111
117
  url_info.keys
112
118
  end
@@ -154,7 +160,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
154
160
  ES2_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
155
161
  # Sniffs and returns the results. Does not update internal URLs!
156
162
  def check_sniff
157
- _, resp = perform_request(:get, @sniffing_path)
163
+ _, url_meta, resp = perform_request(:get, @sniffing_path)
158
164
  parsed = LogStash::Json.load(resp.body)
159
165
 
160
166
  nodes = parsed['nodes']
@@ -162,12 +168,10 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
162
168
  @logger.warn("Sniff returned no nodes! Will not update hosts.")
163
169
  return nil
164
170
  else
165
- case major_version(nodes)
171
+ case major_version(url_meta[:version])
166
172
  when 5, 6
167
173
  sniff_5x_and_above(nodes)
168
- when 2
169
- sniff_2x_1x(nodes)
170
- when 1
174
+ when 2, 1
171
175
  sniff_2x_1x(nodes)
172
176
  else
173
177
  @logger.warn("Could not determine version for nodes in ES cluster!")
@@ -176,8 +180,8 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
176
180
  end
177
181
  end
178
182
 
179
- def major_version(nodes)
180
- k,v = nodes.first; v['version'].split('.').first.to_i
183
+ def major_version(version_string)
184
+ version_string.split('.').first.to_i
181
185
  end
182
186
 
183
187
  def sniff_5x_and_above(nodes)
@@ -237,7 +241,12 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
237
241
  response = perform_request_to_url(url, :head, @healthcheck_path)
238
242
  # If no exception was raised it must have succeeded!
239
243
  logger.warn("Restored connection to ES instance", :url => url.sanitized.to_s)
240
- @state_mutex.synchronize { meta[:state] = :alive }
244
+ # We reconnected to this node, check its ES version
245
+ es_version = get_es_version(url)
246
+ @state_mutex.synchronize do
247
+ meta[:version] = es_version
248
+ meta[:state] = :alive
249
+ end
241
250
  rescue HostUnreachableError, BadResponseCodeError => e
242
251
  logger.warn("Attempted to resurrect connection to dead ES instance, but got an error.", url: url.sanitized.to_s, error_type: e.class, error: e.message)
243
252
  end
@@ -253,15 +262,16 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
253
262
  end
254
263
 
255
264
  def perform_request(method, path, params={}, body=nil)
256
- with_connection do |url|
265
+ with_connection do |url, url_meta|
257
266
  resp = perform_request_to_url(url, method, path, params, body)
258
- [url, resp]
267
+ [url, url_meta, resp]
259
268
  end
260
269
  end
261
270
 
262
271
  [:get, :put, :post, :delete, :patch, :head].each do |method|
263
272
  define_method(method) do |path, params={}, body=nil|
264
- perform_request(method, path, params, body)
273
+ _, _, response = perform_request(method, path, params, body)
274
+ response
265
275
  end
266
276
  end
267
277
 
@@ -323,6 +333,10 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
323
333
  @state_mutex.synchronize { @url_info.size }
324
334
  end
325
335
 
336
+ def es_versions
337
+ @state_mutex.synchronize { @url_info.size }
338
+ end
339
+
326
340
  def add_url(url)
327
341
  @url_info[url] ||= empty_url_meta
328
342
  end
@@ -344,7 +358,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
344
358
  # Custom error class used here so that users may retry attempts if they receive this error
345
359
  # should they choose to
346
360
  raise NoConnectionAvailableError, "No Available connections" unless url
347
- yield url
361
+ yield url, url_meta
348
362
  rescue HostUnreachableError => e
349
363
  # Mark the connection as dead here since this is likely not transient
350
364
  mark_dead(url, e)
@@ -415,5 +429,10 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
415
429
  end
416
430
  end
417
431
  end
432
+
433
+ def get_es_version(url)
434
+ request = perform_request_to_url(url, :get, ROOT_URI_PATH)
435
+ LogStash::Json.load(request.body)["version"]["number"]
436
+ end
418
437
  end
419
438
  end; end; end; end;
@@ -12,12 +12,10 @@ module LogStash; module Outputs; class ElasticSearch
12
12
  end
13
13
 
14
14
  private
15
- def self.get_es_version(client)
16
- client.get_version
17
- end
18
-
19
15
  def self.get_es_major_version(client)
20
- get_es_version(client)["number"][0]
16
+ # get the elasticsearch version of each node in the pool and
17
+ # pick the biggest major version
18
+ client.connected_es_versions.uniq.map {|version| version.split(".").first.to_i}.max
21
19
  end
22
20
 
23
21
  def self.get_template(path, es_major_version)
@@ -30,7 +28,7 @@ module LogStash; module Outputs; class ElasticSearch
30
28
  end
31
29
 
32
30
  def self.default_template_path(es_major_version)
33
- template_version = es_major_version == "1" ? "2" : es_major_version
31
+ template_version = es_major_version == 1 ? 2 : es_major_version
34
32
  default_template_name = "elasticsearch-template-es#{template_version}x.json"
35
33
  ::File.expand_path(default_template_name, ::File.dirname(__FILE__))
36
34
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '7.4.0'
3
+ s.version = '7.4.1'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Logstash Output to Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -10,6 +10,12 @@ module ESHelper
10
10
  def get_client
11
11
  Elasticsearch::Client.new(:hosts => [get_host_port])
12
12
  end
13
+
14
+ def self.es_version_satisfies?(requirement)
15
+ es_version = RSpec.configuration.filter[:es_version] || ENV['ES_VERSION']
16
+ es_release_version = Gem::Version.new(es_version).release
17
+ Gem::Requirement.new(requirement).satisfied_by?(es_release_version)
18
+ end
13
19
  end
14
20
 
15
21
  RSpec.configure do |config|
@@ -9,58 +9,60 @@ RSpec::Matchers.define :a_valid_gzip_encoded_string do
9
9
  }
10
10
  end
11
11
 
12
- describe "indexing with http_compression turned on", :integration => true, :version_greater_than_equal_to_5x => true do
13
- let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
14
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
15
- let(:type) { 10.times.collect { rand(10).to_s }.join("") }
16
- let(:event_count) { 10000 + rand(500) }
17
- let(:events) { event_count.times.map { event }.to_a }
18
- let(:config) {
19
- {
20
- "hosts" => get_host_port,
21
- "index" => index,
22
- "http_compression" => true
12
+ if ESHelper.es_version_satisfies?(">= 5")
13
+ describe "indexing with http_compression turned on", :integration => true do
14
+ let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
15
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
16
+ let(:type) { 10.times.collect { rand(10).to_s }.join("") }
17
+ let(:event_count) { 10000 + rand(500) }
18
+ let(:events) { event_count.times.map { event }.to_a }
19
+ let(:config) {
20
+ {
21
+ "hosts" => get_host_port,
22
+ "index" => index,
23
+ "http_compression" => true
24
+ }
23
25
  }
24
- }
25
- subject { LogStash::Outputs::ElasticSearch.new(config) }
26
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
26
27
 
27
- let(:es_url) { "http://#{get_host_port}" }
28
- let(:index_url) {"#{es_url}/#{index}"}
29
- let(:http_client_options) { {} }
30
- let(:http_client) do
31
- Manticore::Client.new(http_client_options)
32
- end
28
+ let(:es_url) { "http://#{get_host_port}" }
29
+ let(:index_url) {"#{es_url}/#{index}"}
30
+ let(:http_client_options) { {} }
31
+ let(:http_client) do
32
+ Manticore::Client.new(http_client_options)
33
+ end
33
34
 
34
- before do
35
- subject.register
36
- end
37
-
38
- shared_examples "an indexer" do
39
- it "ships events" do
40
- subject.multi_receive(events)
35
+ before do
36
+ subject.register
37
+ end
41
38
 
42
- http_client.post("#{es_url}/_refresh").call
39
+ shared_examples "an indexer" do
40
+ it "ships events" do
41
+ subject.multi_receive(events)
43
42
 
44
- response = http_client.get("#{index_url}/_count?q=*")
45
- result = LogStash::Json.load(response.body)
46
- cur_count = result["count"]
47
- expect(cur_count).to eq(event_count)
43
+ http_client.post("#{es_url}/_refresh").call
48
44
 
49
- response = http_client.get("#{index_url}/_search?q=*&size=1000")
50
- result = LogStash::Json.load(response.body)
51
- result["hits"]["hits"].each do |doc|
52
- expect(doc["_type"]).to eq(type)
53
- expect(doc["_index"]).to eq(index)
45
+ response = http_client.get("#{index_url}/_count?q=*")
46
+ result = LogStash::Json.load(response.body)
47
+ cur_count = result["count"]
48
+ expect(cur_count).to eq(event_count)
49
+
50
+ response = http_client.get("#{index_url}/_search?q=*&size=1000")
51
+ result = LogStash::Json.load(response.body)
52
+ result["hits"]["hits"].each do |doc|
53
+ expect(doc["_type"]).to eq(type)
54
+ expect(doc["_index"]).to eq(index)
55
+ end
54
56
  end
55
57
  end
56
- end
57
58
 
58
- it "sets the correct content-encoding header and body is compressed" do
59
- expect(subject.client.pool.adapter.client).to receive(:send).
60
- with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
61
- and_call_original
62
- subject.multi_receive(events)
63
- end
59
+ it "sets the correct content-encoding header and body is compressed" do
60
+ expect(subject.client.pool.adapter.client).to receive(:send).
61
+ with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
62
+ and_call_original
63
+ subject.multi_receive(events)
64
+ end
64
65
 
65
- it_behaves_like("an indexer")
66
+ it_behaves_like("an indexer")
67
+ end
66
68
  end
@@ -2,62 +2,64 @@ require_relative "../../../spec/es_spec_helper"
2
2
  require "logstash/outputs/elasticsearch"
3
3
 
4
4
 
5
- describe "Versioned delete", :integration => true, :version_greater_than_equal_to_2x => true do
6
- require "logstash/outputs/elasticsearch"
7
-
8
- let(:es) { get_client }
9
-
10
- before :each do
11
- # Delete all templates first.
12
- # Clean ES of data before we start.
13
- es.indices.delete_template(:name => "*")
14
- # This can fail if there are no indexes, ignore failure.
15
- es.indices.delete(:index => "*") rescue nil
16
- es.indices.refresh
17
- end
5
+ if ESHelper.es_version_satisfies?(">= 2")
6
+ describe "Versioned delete", :integration => true do
7
+ require "logstash/outputs/elasticsearch"
18
8
 
19
- context "when delete only" do
20
- subject { LogStash::Outputs::ElasticSearch.new(settings) }
9
+ let(:es) { get_client }
21
10
 
22
- before do
23
- subject.register
11
+ before :each do
12
+ # Delete all templates first.
13
+ # Clean ES of data before we start.
14
+ es.indices.delete_template(:name => "*")
15
+ # This can fail if there are no indexes, ignore failure.
16
+ es.indices.delete(:index => "*") rescue nil
17
+ es.indices.refresh
24
18
  end
25
19
 
26
- let(:settings) do
27
- {
28
- "manage_template" => true,
29
- "index" => "logstash-delete",
30
- "template_overwrite" => true,
31
- "hosts" => get_host_port(),
32
- "document_id" => "%{my_id}",
33
- "version" => "%{my_version}",
34
- "version_type" => "external",
35
- "action" => "%{my_action}"
36
- }
37
- end
20
+ context "when delete only" do
21
+ subject { LogStash::Outputs::ElasticSearch.new(settings) }
38
22
 
39
- it "should ignore non-monotonic external version updates" do
40
- id = "ev2"
41
- subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
42
- r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
43
- expect(r['_version']).to eq(99)
44
- expect(r['_source']['message']).to eq('foo')
45
-
46
- subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)])
47
- r2 = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
48
- expect(r2['_version']).to eq(99)
49
- expect(r2['_source']['message']).to eq('foo')
50
- end
23
+ before do
24
+ subject.register
25
+ end
26
+
27
+ let(:settings) do
28
+ {
29
+ "manage_template" => true,
30
+ "index" => "logstash-delete",
31
+ "template_overwrite" => true,
32
+ "hosts" => get_host_port(),
33
+ "document_id" => "%{my_id}",
34
+ "version" => "%{my_version}",
35
+ "version_type" => "external",
36
+ "action" => "%{my_action}"
37
+ }
38
+ end
39
+
40
+ it "should ignore non-monotonic external version updates" do
41
+ id = "ev2"
42
+ subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
43
+ r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
44
+ expect(r['_version']).to eq(99)
45
+ expect(r['_source']['message']).to eq('foo')
46
+
47
+ subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)])
48
+ r2 = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
49
+ expect(r2['_version']).to eq(99)
50
+ expect(r2['_source']['message']).to eq('foo')
51
+ end
51
52
 
52
- it "should commit monotonic external version updates" do
53
- id = "ev3"
54
- subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
55
- r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
56
- expect(r['_version']).to eq(99)
57
- expect(r['_source']['message']).to eq('foo')
53
+ it "should commit monotonic external version updates" do
54
+ id = "ev3"
55
+ subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
56
+ r = es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true)
57
+ expect(r['_version']).to eq(99)
58
+ expect(r['_source']['message']).to eq('foo')
58
59
 
59
- subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)])
60
- expect { es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true) }.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
60
+ subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)])
61
+ expect { es.get(:index => 'logstash-delete', :type => 'logs', :id => id, :refresh => true) }.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
62
+ end
61
63
  end
62
64
  end
63
65
  end