logstash-output-elasticsearch 7.0.0-java → 7.1.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/lib/logstash/outputs/elasticsearch.rb +14 -0
- data/lib/logstash/outputs/elasticsearch/common.rb +1 -2
- data/lib/logstash/outputs/elasticsearch/http_client.rb +26 -13
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +5 -1
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +2 -1
- data/logstash-output-elasticsearch.gemspec +2 -1
- data/spec/integration/outputs/compressed_indexing_spec.rb +66 -0
- metadata +18 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fd81205081e6b05250ed7f01c930149b6947b7d8
|
4
|
+
data.tar.gz: 66d17e9dae1b92b2053ce814bc3e0e17285afcf0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f6263b223df958da29405a47756848072f1fa83cfb50e7e334246e95362e43765e1cce246ab84fc29513b0f78b65f950f41f42205e266d7602b207dab3a10478
|
7
|
+
data.tar.gz: 34956e72d80592e44341f24210d894c9f9ffc2ef007367bd5bfee009b1cb3b17b2b826d3ce168c6088c01896409d3a6d733114b7a359faa0dfd6cc6059ddb5f6
|
data/CHANGELOG.md
CHANGED
@@ -62,6 +62,17 @@ require "forwardable"
|
|
62
62
|
#
|
63
63
|
# Keep in mind that a connection with keepalive enabled will
|
64
64
|
# not reevaluate its DNS value while the keepalive is in effect.
|
65
|
+
#
|
66
|
+
# ==== HTTP Compression
|
67
|
+
#
|
68
|
+
# This plugin supports request and response compression. Response compression is enabled by default and
|
69
|
+
# for Elasticsearch versions 5.0 and later, the user doesn't have to set any configs in Elasticsearch for
|
70
|
+
# it to send back compressed response. For versions before 5.0, `http.compression` must be set to `true` in
|
71
|
+
# Elasticsearch[https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-http.html#modules-http] to take advantage of response compression when using this plugin
|
72
|
+
#
|
73
|
+
# For requests compression, regardless of the Elasticsearch version, users have to enable `http_compression`
|
74
|
+
# setting in their Logstash config file.
|
75
|
+
#
|
65
76
|
class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
66
77
|
declare_threadsafe!
|
67
78
|
|
@@ -202,6 +213,9 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
202
213
|
# See https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/impl/conn/PoolingHttpClientConnectionManager.html#setValidateAfterInactivity(int)[these docs for more info]
|
203
214
|
config :validate_after_inactivity, :validate => :number, :default => 10000
|
204
215
|
|
216
|
+
# Enable gzip compression on requests. Note that response compression is on by default for Elasticsearch v5.0 and beyond
|
217
|
+
config :http_compression, :validate => :boolean, :default => false
|
218
|
+
|
205
219
|
def build_client
|
206
220
|
@client ||= ::LogStash::Outputs::ElasticSearch::HttpClientBuilder.build(@logger, @hosts, params)
|
207
221
|
end
|
@@ -82,8 +82,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
82
82
|
begin
|
83
83
|
submit_actions = submit(submit_actions)
|
84
84
|
if submit_actions && submit_actions.size > 0
|
85
|
-
@logger.
|
86
|
-
submit_actions.each {|action| @logger.error("Action", action) }
|
85
|
+
@logger.info("Retrying individual bulk actions that failed or were rejected by the previous bulk request.", :count => submit_actions.size)
|
87
86
|
end
|
88
87
|
rescue => e
|
89
88
|
@logger.error("Encountered an unexpected error submitting a bulk request! Will retry.",
|
@@ -4,6 +4,8 @@ require "base64"
|
|
4
4
|
require 'logstash/outputs/elasticsearch/http_client/pool'
|
5
5
|
require 'logstash/outputs/elasticsearch/http_client/manticore_adapter'
|
6
6
|
require 'cgi'
|
7
|
+
require 'zlib'
|
8
|
+
require 'stringio'
|
7
9
|
|
8
10
|
module LogStash; module Outputs; class ElasticSearch;
|
9
11
|
# This is a constant instead of a config option because
|
@@ -101,24 +103,25 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
101
103
|
end
|
102
104
|
end
|
103
105
|
|
104
|
-
|
106
|
+
body_stream = StringIO.new
|
107
|
+
if http_compression
|
108
|
+
body_stream.set_encoding "BINARY"
|
109
|
+
stream_writer = Zlib::GzipWriter.new(body_stream, Zlib::DEFAULT_COMPRESSION, Zlib::DEFAULT_STRATEGY)
|
110
|
+
else
|
111
|
+
stream_writer = body_stream
|
112
|
+
end
|
105
113
|
bulk_responses = []
|
106
114
|
bulk_actions.each do |action|
|
107
115
|
as_json = action.is_a?(Array) ?
|
108
116
|
action.map {|line| LogStash::Json.dump(line)}.join("\n") :
|
109
117
|
LogStash::Json.dump(action)
|
110
118
|
as_json << "\n"
|
111
|
-
|
112
|
-
|
113
|
-
bulk_responses << bulk_send(bulk_body)
|
114
|
-
bulk_body = as_json
|
115
|
-
else
|
116
|
-
bulk_body << as_json
|
117
|
-
end
|
119
|
+
bulk_responses << bulk_send(body_stream) if (body_stream.size + as_json.bytesize) > TARGET_BULK_BYTES
|
120
|
+
stream_writer.write(as_json)
|
118
121
|
end
|
119
|
-
|
120
|
-
bulk_responses << bulk_send(
|
121
|
-
|
122
|
+
stream_writer.close if http_compression
|
123
|
+
bulk_responses << bulk_send(body_stream) if body_stream.size > 0
|
124
|
+
body_stream.close if !http_compression
|
122
125
|
join_bulk_responses(bulk_responses)
|
123
126
|
end
|
124
127
|
|
@@ -129,8 +132,14 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
129
132
|
}
|
130
133
|
end
|
131
134
|
|
132
|
-
def bulk_send(
|
133
|
-
|
135
|
+
def bulk_send(body_stream)
|
136
|
+
params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
|
137
|
+
# Discard the URL
|
138
|
+
_, response = @pool.post(@bulk_path, params, body_stream.string)
|
139
|
+
if !body_stream.closed?
|
140
|
+
body_stream.truncate(0)
|
141
|
+
body_stream.seek(0)
|
142
|
+
end
|
134
143
|
LogStash::Json.load(response.body)
|
135
144
|
end
|
136
145
|
|
@@ -214,6 +223,10 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
214
223
|
client_settings.fetch(:ssl, {})
|
215
224
|
end
|
216
225
|
|
226
|
+
def http_compression
|
227
|
+
client_settings.fetch(:http_compression, {})
|
228
|
+
end
|
229
|
+
|
217
230
|
def build_adapter(options)
|
218
231
|
timeout = options[:timeout] || 0
|
219
232
|
|
@@ -47,7 +47,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
47
47
|
# @see Transport::Base#perform_request
|
48
48
|
#
|
49
49
|
def perform_request(url, method, path, params={}, body=nil)
|
50
|
-
params
|
50
|
+
# Perform 2-level deep merge on the params, so if the passed params and client params will both have hashes stored on a key they
|
51
|
+
# will be merged as well, instead of choosing just one of the values
|
52
|
+
params = (params || {}).merge(@client_params) { |key, oldval, newval|
|
53
|
+
(oldval.is_a?(Hash) && newval.is_a?(Hash)) ? oldval.merge(newval) : newval
|
54
|
+
}
|
51
55
|
params[:body] = body if body
|
52
56
|
|
53
57
|
if url.user
|
@@ -6,7 +6,8 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
6
6
|
client_settings = {
|
7
7
|
:pool_max => params["pool_max"],
|
8
8
|
:pool_max_per_route => params["pool_max_per_route"],
|
9
|
-
:check_connection_timeout => params["validate_after_inactivity"]
|
9
|
+
:check_connection_timeout => params["validate_after_inactivity"],
|
10
|
+
:http_compression => params["http_compression"]
|
10
11
|
}
|
11
12
|
|
12
13
|
client_settings[:proxy] = params["proxy"] if params["proxy"]
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '7.
|
3
|
+
s.version = '7.1.0'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Logstash Output to Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -25,6 +25,7 @@ Gem::Specification.new do |s|
|
|
25
25
|
s.add_development_dependency 'addressable', "~> 2.3.0" # used by FTW. V 2.5.0 is ruby 2.0 only.
|
26
26
|
s.add_development_dependency 'logstash-codec-plain'
|
27
27
|
s.add_development_dependency 'json' # used by spec/unit/outputs/elasticsearch/http_client/pool_spec.rb
|
28
|
+
s.add_development_dependency 'gzip' # used by spec/integration/outputs/index_spec.rb
|
28
29
|
|
29
30
|
if RUBY_PLATFORM == 'java'
|
30
31
|
s.platform = RUBY_PLATFORM
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
require "logstash/outputs/elasticsearch"
|
3
|
+
require "stringio"
|
4
|
+
require "gzip"
|
5
|
+
|
6
|
+
RSpec::Matchers.define :a_valid_gzip_encoded_string do
|
7
|
+
match { |data|
|
8
|
+
expect { Zlib::GzipReader.new(StringIO.new(data)).read }.not_to raise_error
|
9
|
+
}
|
10
|
+
end
|
11
|
+
|
12
|
+
describe "indexing with http_compression turned on", :integration => true, :version_greater_than_equal_to_5x => true do
|
13
|
+
let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
|
14
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
15
|
+
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
16
|
+
let(:event_count) { 10000 + rand(500) }
|
17
|
+
let(:events) { event_count.times.map { event }.to_a }
|
18
|
+
let(:config) {
|
19
|
+
{
|
20
|
+
"hosts" => get_host_port,
|
21
|
+
"index" => index,
|
22
|
+
"http_compression" => true
|
23
|
+
}
|
24
|
+
}
|
25
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
26
|
+
|
27
|
+
let(:es_url) { "http://#{get_host_port}" }
|
28
|
+
let(:index_url) {"#{es_url}/#{index}"}
|
29
|
+
let(:http_client_options) { {} }
|
30
|
+
let(:http_client) do
|
31
|
+
Manticore::Client.new(http_client_options)
|
32
|
+
end
|
33
|
+
|
34
|
+
before do
|
35
|
+
subject.register
|
36
|
+
end
|
37
|
+
|
38
|
+
shared_examples "an indexer" do
|
39
|
+
it "ships events" do
|
40
|
+
subject.multi_receive(events)
|
41
|
+
|
42
|
+
http_client.post("#{es_url}/_refresh").call
|
43
|
+
|
44
|
+
response = http_client.get("#{index_url}/_count?q=*")
|
45
|
+
result = LogStash::Json.load(response.body)
|
46
|
+
cur_count = result["count"]
|
47
|
+
expect(cur_count).to eq(event_count)
|
48
|
+
|
49
|
+
response = http_client.get("#{index_url}/_search?q=*&size=1000")
|
50
|
+
result = LogStash::Json.load(response.body)
|
51
|
+
result["hits"]["hits"].each do |doc|
|
52
|
+
expect(doc["_type"]).to eq(type)
|
53
|
+
expect(doc["_index"]).to eq(index)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
it "sets the correct content-encoding header and body is compressed" do
|
59
|
+
expect(subject.client.pool.adapter.client).to receive(:send).
|
60
|
+
with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
|
61
|
+
and_call_original
|
62
|
+
subject.multi_receive(events)
|
63
|
+
end
|
64
|
+
|
65
|
+
it_behaves_like("an indexer")
|
66
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 7.
|
4
|
+
version: 7.1.0
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-02-
|
11
|
+
date: 2017-02-24 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -120,6 +120,20 @@ dependencies:
|
|
120
120
|
- - ">="
|
121
121
|
- !ruby/object:Gem::Version
|
122
122
|
version: '0'
|
123
|
+
- !ruby/object:Gem::Dependency
|
124
|
+
requirement: !ruby/object:Gem::Requirement
|
125
|
+
requirements:
|
126
|
+
- - ">="
|
127
|
+
- !ruby/object:Gem::Version
|
128
|
+
version: '0'
|
129
|
+
name: gzip
|
130
|
+
prerelease: false
|
131
|
+
type: :development
|
132
|
+
version_requirements: !ruby/object:Gem::Requirement
|
133
|
+
requirements:
|
134
|
+
- - ">="
|
135
|
+
- !ruby/object:Gem::Version
|
136
|
+
version: '0'
|
123
137
|
- !ruby/object:Gem::Dependency
|
124
138
|
requirement: !ruby/object:Gem::Requirement
|
125
139
|
requirements:
|
@@ -211,6 +225,7 @@ files:
|
|
211
225
|
- spec/fixtures/scripts/scripted_update.groovy
|
212
226
|
- spec/fixtures/scripts/scripted_update_nested.groovy
|
213
227
|
- spec/fixtures/scripts/scripted_upsert.groovy
|
228
|
+
- spec/integration/outputs/compressed_indexing_spec.rb
|
214
229
|
- spec/integration/outputs/create_spec.rb
|
215
230
|
- spec/integration/outputs/delete_spec.rb
|
216
231
|
- spec/integration/outputs/index_spec.rb
|
@@ -265,6 +280,7 @@ test_files:
|
|
265
280
|
- spec/fixtures/scripts/scripted_update.groovy
|
266
281
|
- spec/fixtures/scripts/scripted_update_nested.groovy
|
267
282
|
- spec/fixtures/scripts/scripted_upsert.groovy
|
283
|
+
- spec/integration/outputs/compressed_indexing_spec.rb
|
268
284
|
- spec/integration/outputs/create_spec.rb
|
269
285
|
- spec/integration/outputs/delete_spec.rb
|
270
286
|
- spec/integration/outputs/index_spec.rb
|