logstash-output-elasticsearch 11.16.0-java → 11.17.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/docs/index.asciidoc +17 -4
- data/lib/logstash/outputs/elasticsearch/http_client.rb +10 -8
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +1 -1
- data/lib/logstash/outputs/elasticsearch.rb +16 -0
- data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +9 -1
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/integration/outputs/compressed_indexing_spec.rb +49 -48
- data/spec/integration/outputs/index_spec.rb +8 -4
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +25 -5
- data/spec/unit/outputs/elasticsearch_spec.rb +33 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 37124c3a166313a2fb9f3831273def114770178158439a44ebfa1bc1d32f9d0f
|
4
|
+
data.tar.gz: a09fd3ce2c54908fedc14dc2d780bb3ec48d7091d05e2e8fcb5789e4ec1e30b9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: caac996badd1bbdeb231fad3f40f96a50386baf78ee356587d5fc5d2b4a095f1073bee417a99aab081c13cb1a18802785abed618dc85db504f56145c620c46b6
|
7
|
+
data.tar.gz: 697a89b810998154a44338e8e73f02351b72afa00fc35e1fc115a22ce5ecfeddd27d29bd8fdf4fb779a3b17fbaa2f4f361a2c30b68c0b5ce0cd49a6edeba1a1d
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
## 11.17.0
|
2
|
+
- Added support to http compression level. Deprecated `http_compression` in favour of `compression_level` and enabled compression level 1 by default. [#1148](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1148)
|
3
|
+
|
1
4
|
## 11.16.0
|
2
5
|
- Added support to Serverless Elasticsearch [#1445](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1145)
|
3
6
|
|
data/docs/index.asciidoc
CHANGED
@@ -277,9 +277,9 @@ not reevaluate its DNS value while the keepalive is in effect.
|
|
277
277
|
==== HTTP Compression
|
278
278
|
|
279
279
|
This plugin always reads compressed responses from {es}.
|
280
|
-
|
280
|
+
By default, it sends compressed bulk requests to {es}.
|
281
281
|
|
282
|
-
If you are concerned about bandwidth, you can
|
282
|
+
If you are concerned about bandwidth, you can set a higher <<plugins-{type}s-{plugin}-compression_level>> to trade CPU capacity for a reduction in network IO.
|
283
283
|
|
284
284
|
==== Authentication
|
285
285
|
|
@@ -310,6 +310,7 @@ This plugin supports the following configuration options plus the
|
|
310
310
|
| <<plugins-{type}s-{plugin}-ca_trusted_fingerprint>> |<<string,string>>|No
|
311
311
|
| <<plugins-{type}s-{plugin}-cloud_auth>> |<<password,password>>|No
|
312
312
|
| <<plugins-{type}s-{plugin}-cloud_id>> |<<string,string>>|No
|
313
|
+
| <<plugins-{type}s-{plugin}-compression_level>> |<<number,number>>, one of `[0 ~ 9]`|No
|
313
314
|
| <<plugins-{type}s-{plugin}-custom_headers>> |<<hash,hash>>|No
|
314
315
|
| <<plugins-{type}s-{plugin}-data_stream>> |<<string,string>>, one of `["true", "false", "auto"]`|No
|
315
316
|
| <<plugins-{type}s-{plugin}-data_stream_auto_routing>> |<<boolean,boolean>>|No
|
@@ -459,6 +460,17 @@ Cloud ID, from the Elastic Cloud web console. If set `hosts` should not be used.
|
|
459
460
|
For more details, check out the
|
460
461
|
{logstash-ref}/connecting-to-cloud.html[Logstash-to-Cloud documentation].
|
461
462
|
|
463
|
+
[id="plugins-{type}s-{plugin}-compression_level"]
|
464
|
+
===== `compression_level`
|
465
|
+
|
466
|
+
* Value can be any of: `0`, `1`, `2`, `3`, `4`, `5`, `6`, `7`, `8`, `9`
|
467
|
+
* Default value is `1`
|
468
|
+
|
469
|
+
The gzip compression level. Setting this value to `0` disables compression.
|
470
|
+
The compression level must be in the range of `1` (best speed) to `9` (best compression).
|
471
|
+
|
472
|
+
Increasing the compression level will reduce the network usage but will increase the CPU usage.
|
473
|
+
|
462
474
|
[id="plugins-{type}s-{plugin}-data_stream"]
|
463
475
|
===== `data_stream`
|
464
476
|
|
@@ -618,7 +630,7 @@ NOTE: Deprecated, refer to <<plugins-{type}s-{plugin}-silence_errors_in_log>>.
|
|
618
630
|
Pass a set of key value pairs as the headers sent in each request to
|
619
631
|
an elasticsearch node. The headers will be used for any kind of request
|
620
632
|
(_bulk request, template installation, health checks and sniffing).
|
621
|
-
These custom headers will be overidden by settings like `
|
633
|
+
These custom headers will be overidden by settings like `compression_level`.
|
622
634
|
|
623
635
|
[id="plugins-{type}s-{plugin}-healthcheck_path"]
|
624
636
|
===== `healthcheck_path`
|
@@ -659,11 +671,12 @@ Any special characters present in the URLs here MUST be URL escaped! This means
|
|
659
671
|
|
660
672
|
[id="plugins-{type}s-{plugin}-http_compression"]
|
661
673
|
===== `http_compression`
|
674
|
+
deprecated[11.17.0, Replaced by <<plugins-{type}s-{plugin}-compression_level>>]
|
662
675
|
|
663
676
|
* Value type is <<boolean,boolean>>
|
664
677
|
* Default value is `false`
|
665
678
|
|
666
|
-
|
679
|
+
Setting `true` enables gzip compression level 1 on requests.
|
667
680
|
|
668
681
|
This setting allows you to reduce this plugin's outbound network traffic by
|
669
682
|
compressing each bulk _request_ to {es}.
|
@@ -118,7 +118,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
118
118
|
end
|
119
119
|
|
120
120
|
body_stream = StringIO.new
|
121
|
-
if
|
121
|
+
if compression_level?
|
122
122
|
body_stream.set_encoding "BINARY"
|
123
123
|
stream_writer = gzip_writer(body_stream)
|
124
124
|
else
|
@@ -141,14 +141,14 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
141
141
|
:batch_offset => (index + 1 - batch_actions.size))
|
142
142
|
bulk_responses << bulk_send(body_stream, batch_actions)
|
143
143
|
body_stream.truncate(0) && body_stream.seek(0)
|
144
|
-
stream_writer = gzip_writer(body_stream) if
|
144
|
+
stream_writer = gzip_writer(body_stream) if compression_level?
|
145
145
|
batch_actions.clear
|
146
146
|
end
|
147
147
|
stream_writer.write(as_json)
|
148
148
|
batch_actions << action
|
149
149
|
end
|
150
150
|
|
151
|
-
stream_writer.close if
|
151
|
+
stream_writer.close if compression_level?
|
152
152
|
|
153
153
|
logger.debug("Sending final bulk request for batch.",
|
154
154
|
:action_count => batch_actions.size,
|
@@ -157,7 +157,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
157
157
|
:batch_offset => (actions.size - batch_actions.size))
|
158
158
|
bulk_responses << bulk_send(body_stream, batch_actions) if body_stream.size > 0
|
159
159
|
|
160
|
-
body_stream.close
|
160
|
+
body_stream.close unless compression_level?
|
161
161
|
join_bulk_responses(bulk_responses)
|
162
162
|
end
|
163
163
|
|
@@ -165,7 +165,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
165
165
|
fail(ArgumentError, "Cannot create gzip writer on IO with unread bytes") unless io.eof?
|
166
166
|
fail(ArgumentError, "Cannot create gzip writer on non-empty IO") unless io.pos == 0
|
167
167
|
|
168
|
-
Zlib::GzipWriter.new(io,
|
168
|
+
Zlib::GzipWriter.new(io, client_settings.fetch(:compression_level), Zlib::DEFAULT_STRATEGY)
|
169
169
|
end
|
170
170
|
|
171
171
|
def join_bulk_responses(bulk_responses)
|
@@ -176,7 +176,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
176
176
|
end
|
177
177
|
|
178
178
|
def bulk_send(body_stream, batch_actions)
|
179
|
-
params =
|
179
|
+
params = compression_level? ? {:headers => {"Content-Encoding" => "gzip"}} : {}
|
180
180
|
response = @pool.post(@bulk_path, params, body_stream.string)
|
181
181
|
|
182
182
|
@bulk_response_metrics.increment(response.code.to_s)
|
@@ -298,8 +298,10 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
298
298
|
@_ssl_options ||= client_settings.fetch(:ssl, {})
|
299
299
|
end
|
300
300
|
|
301
|
-
|
302
|
-
|
301
|
+
# return true if compression_level is [1..9]
|
302
|
+
# return false if it is 0
|
303
|
+
def compression_level?
|
304
|
+
client_settings.fetch(:compression_level) > 0
|
303
305
|
end
|
304
306
|
|
305
307
|
def build_adapter(options)
|
@@ -8,7 +8,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
8
8
|
:pool_max => params["pool_max"],
|
9
9
|
:pool_max_per_route => params["pool_max_per_route"],
|
10
10
|
:check_connection_timeout => params["validate_after_inactivity"],
|
11
|
-
:
|
11
|
+
:compression_level => params["compression_level"],
|
12
12
|
:headers => params["custom_headers"] || {}
|
13
13
|
}
|
14
14
|
|
@@ -276,6 +276,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
276
276
|
super
|
277
277
|
setup_ecs_compatibility_related_defaults
|
278
278
|
setup_ssl_params!
|
279
|
+
setup_compression_level!
|
279
280
|
end
|
280
281
|
|
281
282
|
def register
|
@@ -368,6 +369,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
368
369
|
params['proxy'] = proxy # do not do resolving again
|
369
370
|
end
|
370
371
|
end
|
372
|
+
|
371
373
|
super(params)
|
372
374
|
end
|
373
375
|
|
@@ -669,6 +671,20 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
669
671
|
params['ssl_verification_mode'] = @ssl_verification_mode unless @ssl_verification_mode.nil?
|
670
672
|
end
|
671
673
|
|
674
|
+
def setup_compression_level!
|
675
|
+
@compression_level = normalize_config(:compression_level) do |normalize|
|
676
|
+
normalize.with_deprecated_mapping(:http_compression) do |http_compression|
|
677
|
+
if http_compression == true
|
678
|
+
DEFAULT_ZIP_LEVEL
|
679
|
+
else
|
680
|
+
0
|
681
|
+
end
|
682
|
+
end
|
683
|
+
end
|
684
|
+
|
685
|
+
params['compression_level'] = @compression_level unless @compression_level.nil?
|
686
|
+
end
|
687
|
+
|
672
688
|
# To be overidden by the -java version
|
673
689
|
VALID_HTTP_ACTIONS = ["index", "delete", "create", "update"]
|
674
690
|
def valid_actions
|
@@ -7,6 +7,7 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
7
7
|
# This module defines common options that can be reused by alternate elasticsearch output plugins such as the elasticsearch_data_streams output.
|
8
8
|
|
9
9
|
DEFAULT_HOST = ::LogStash::Util::SafeURI.new("//127.0.0.1")
|
10
|
+
DEFAULT_ZIP_LEVEL = 1
|
10
11
|
|
11
12
|
CONFIG_PARAMS = {
|
12
13
|
# Username to authenticate to a secure Elasticsearch cluster
|
@@ -186,7 +187,14 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
186
187
|
:validate_after_inactivity => { :validate => :number, :default => 10000 },
|
187
188
|
|
188
189
|
# Enable gzip compression on requests. Note that response compression is on by default for Elasticsearch v5.0 and beyond
|
189
|
-
|
190
|
+
# Set `true` to enable compression with level 1
|
191
|
+
# Set `false` to disable compression with level 0
|
192
|
+
:http_compression => { :validate => :boolean, :default => true, :deprecated => "Set 'compression_level' instead." },
|
193
|
+
|
194
|
+
# Number `1` ~ `9` are the gzip compression level
|
195
|
+
# Set `0` to disable compression
|
196
|
+
# Set `1` (best speed) to `9` (best compression) to use compression
|
197
|
+
:compression_level => { :validate => [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], :default => DEFAULT_ZIP_LEVEL },
|
190
198
|
|
191
199
|
# Custom Headers to send on each request to elasticsearch nodes
|
192
200
|
:custom_headers => { :validate => :hash, :default => {} },
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '11.
|
3
|
+
s.version = '11.17.0'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Stores logs in Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -8,63 +8,64 @@ RSpec::Matchers.define :a_valid_gzip_encoded_string do
|
|
8
8
|
}
|
9
9
|
end
|
10
10
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
{
|
19
|
-
|
20
|
-
|
21
|
-
|
11
|
+
[ {"http_compression" => true}, {"compression_level" => 1} ].each do |compression_config|
|
12
|
+
describe "indexing with http_compression turned on", :integration => true do
|
13
|
+
let(:event) { LogStash::Event.new("message" => "Hello World!", "type" => type) }
|
14
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
15
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
16
|
+
let(:event_count) { 10000 + rand(500) }
|
17
|
+
let(:events) { event_count.times.map { event }.to_a }
|
18
|
+
let(:config) {
|
19
|
+
{
|
20
|
+
"hosts" => get_host_port,
|
21
|
+
"index" => index
|
22
|
+
}
|
22
23
|
}
|
23
|
-
|
24
|
-
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
24
|
+
subject { LogStash::Outputs::ElasticSearch.new(config.merge(compression_config)) }
|
25
25
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
26
|
+
let(:es_url) { "http://#{get_host_port}" }
|
27
|
+
let(:index_url) {"#{es_url}/#{index}"}
|
28
|
+
let(:http_client_options) { {} }
|
29
|
+
let(:http_client) do
|
30
|
+
Manticore::Client.new(http_client_options)
|
31
|
+
end
|
32
32
|
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
33
|
+
before do
|
34
|
+
subject.register
|
35
|
+
subject.multi_receive([])
|
36
|
+
end
|
37
37
|
|
38
|
-
|
39
|
-
|
40
|
-
|
38
|
+
shared_examples "an indexer" do
|
39
|
+
it "ships events" do
|
40
|
+
subject.multi_receive(events)
|
41
41
|
|
42
|
-
|
42
|
+
http_client.post("#{es_url}/_refresh").call
|
43
43
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
44
|
+
response = http_client.get("#{index_url}/_count?q=*")
|
45
|
+
result = LogStash::Json.load(response.body)
|
46
|
+
cur_count = result["count"]
|
47
|
+
expect(cur_count).to eq(event_count)
|
48
48
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
49
|
+
response = http_client.get("#{index_url}/_search?q=*&size=1000")
|
50
|
+
result = LogStash::Json.load(response.body)
|
51
|
+
result["hits"]["hits"].each do |doc|
|
52
|
+
if ESHelper.es_version_satisfies?("< 8")
|
53
|
+
expect(doc["_type"]).to eq(type)
|
54
|
+
else
|
55
|
+
expect(doc).not_to include("_type")
|
56
|
+
end
|
57
|
+
expect(doc["_index"]).to eq(index)
|
56
58
|
end
|
57
|
-
expect(doc["_index"]).to eq(index)
|
58
59
|
end
|
59
60
|
end
|
60
|
-
end
|
61
61
|
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
62
|
+
it "sets the correct content-encoding header and body is compressed" do
|
63
|
+
expect(subject.client.pool.adapter.client).to receive(:send).
|
64
|
+
with(anything, anything, {:headers=>{"Content-Encoding"=>"gzip", "Content-Type"=>"application/json"}, :body => a_valid_gzip_encoded_string}).
|
65
|
+
and_call_original
|
66
|
+
subject.multi_receive(events)
|
67
|
+
end
|
68
68
|
|
69
|
-
|
70
|
-
end
|
69
|
+
it_behaves_like("an indexer")
|
70
|
+
end
|
71
|
+
end
|
@@ -262,7 +262,8 @@ describe "indexing" do
|
|
262
262
|
let(:config) {
|
263
263
|
{
|
264
264
|
"hosts" => get_host_port,
|
265
|
-
"index" => index
|
265
|
+
"index" => index,
|
266
|
+
"http_compression" => false
|
266
267
|
}
|
267
268
|
}
|
268
269
|
it_behaves_like("an indexer")
|
@@ -273,7 +274,8 @@ describe "indexing" do
|
|
273
274
|
let(:config) {
|
274
275
|
{
|
275
276
|
"hosts" => get_host_port,
|
276
|
-
"index" => index
|
277
|
+
"index" => index,
|
278
|
+
"http_compression" => false
|
277
279
|
}
|
278
280
|
}
|
279
281
|
it_behaves_like("an indexer")
|
@@ -291,7 +293,8 @@ describe "indexing" do
|
|
291
293
|
"password" => password,
|
292
294
|
"ssl_enabled" => true,
|
293
295
|
"ssl_certificate_authorities" => cacert,
|
294
|
-
"index" => index
|
296
|
+
"index" => index,
|
297
|
+
"http_compression" => false
|
295
298
|
}
|
296
299
|
end
|
297
300
|
|
@@ -351,7 +354,8 @@ describe "indexing" do
|
|
351
354
|
"hosts" => ["https://#{CGI.escape(user)}:#{CGI.escape(password)}@elasticsearch:9200"],
|
352
355
|
"ssl_enabled" => true,
|
353
356
|
"ssl_certificate_authorities" => "spec/fixtures/test_certs/test.crt",
|
354
|
-
"index" => index
|
357
|
+
"index" => index,
|
358
|
+
"http_compression" => false
|
355
359
|
}
|
356
360
|
end
|
357
361
|
|
@@ -183,6 +183,25 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
183
183
|
end
|
184
184
|
end
|
185
185
|
|
186
|
+
describe "compression_level?" do
|
187
|
+
subject { described_class.new(base_options) }
|
188
|
+
let(:base_options) { super().merge(:client_settings => {:compression_level => compression_level}) }
|
189
|
+
|
190
|
+
context "with client_settings `compression_level => 1`" do
|
191
|
+
let(:compression_level) { 1 }
|
192
|
+
it "gives true" do
|
193
|
+
expect(subject.compression_level?).to be_truthy
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
context "with client_settings `compression_level => 0`" do
|
198
|
+
let(:compression_level) { 0 }
|
199
|
+
it "gives false" do
|
200
|
+
expect(subject.compression_level?).to be_falsey
|
201
|
+
end
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
186
205
|
describe "#bulk" do
|
187
206
|
subject(:http_client) { described_class.new(base_options) }
|
188
207
|
|
@@ -192,13 +211,14 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
192
211
|
["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message}],
|
193
212
|
]}
|
194
213
|
|
195
|
-
[
|
196
|
-
context "with `
|
214
|
+
[0, 9].each do |compression_level|
|
215
|
+
context "with `compression_level => #{compression_level}`" do
|
197
216
|
|
198
|
-
let(:base_options) { super().merge(:client_settings => {:
|
217
|
+
let(:base_options) { super().merge(:client_settings => {:compression_level => compression_level}) }
|
218
|
+
let(:compression_level_enabled) { compression_level > 0 }
|
199
219
|
|
200
220
|
before(:each) do
|
201
|
-
if
|
221
|
+
if compression_level_enabled
|
202
222
|
expect(http_client).to receive(:gzip_writer).at_least(:once).and_call_original
|
203
223
|
else
|
204
224
|
expect(http_client).to_not receive(:gzip_writer)
|
@@ -212,7 +232,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
212
232
|
it "should be handled properly" do
|
213
233
|
allow(subject).to receive(:join_bulk_responses)
|
214
234
|
expect(subject).to receive(:bulk_send).once do |data|
|
215
|
-
if !
|
235
|
+
if !compression_level_enabled
|
216
236
|
expect(data.size).to be > target_bulk_bytes
|
217
237
|
else
|
218
238
|
expect(Zlib::gunzip(data.string).size).to be > target_bulk_bytes
|
@@ -474,7 +474,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
474
474
|
|
475
475
|
context "unexpected bulk response" do
|
476
476
|
let(:options) do
|
477
|
-
{ "hosts" => "127.0.0.1:9999", "index" => "%{foo}", "manage_template" => false }
|
477
|
+
{ "hosts" => "127.0.0.1:9999", "index" => "%{foo}", "manage_template" => false, "http_compression" => false }
|
478
478
|
end
|
479
479
|
|
480
480
|
let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
|
@@ -624,6 +624,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
624
624
|
end
|
625
625
|
|
626
626
|
context '413 errors' do
|
627
|
+
let(:options) { super().merge("http_compression" => "false") }
|
627
628
|
let(:payload_size) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES + 1024 }
|
628
629
|
let(:event) { ::LogStash::Event.new("message" => ("a" * payload_size ) ) }
|
629
630
|
|
@@ -1557,6 +1558,37 @@ describe LogStash::Outputs::ElasticSearch do
|
|
1557
1558
|
end
|
1558
1559
|
end
|
1559
1560
|
|
1561
|
+
describe "http compression" do
|
1562
|
+
describe "initialize setting" do
|
1563
|
+
context "with `http_compression` => true" do
|
1564
|
+
let(:options) { super().merge('http_compression' => true) }
|
1565
|
+
it "set compression level to 1" do
|
1566
|
+
subject.register
|
1567
|
+
expect(subject.instance_variable_get(:@compression_level)).to eq(1)
|
1568
|
+
end
|
1569
|
+
end
|
1570
|
+
|
1571
|
+
context "with `http_compression` => false" do
|
1572
|
+
let(:options) { super().merge('http_compression' => false) }
|
1573
|
+
it "set compression level to 0" do
|
1574
|
+
subject.register
|
1575
|
+
expect(subject.instance_variable_get(:@compression_level)).to eq(0)
|
1576
|
+
end
|
1577
|
+
end
|
1578
|
+
|
1579
|
+
[0, 9].each do |config|
|
1580
|
+
context "with `compression_level` => #{config}" do
|
1581
|
+
let(:options) { super().merge('compression_level' => config) }
|
1582
|
+
it "keeps the setting" do
|
1583
|
+
subject.register
|
1584
|
+
expect(subject.instance_variable_get(:@compression_level)).to eq(config)
|
1585
|
+
end
|
1586
|
+
end
|
1587
|
+
end
|
1588
|
+
end
|
1589
|
+
|
1590
|
+
end
|
1591
|
+
|
1560
1592
|
@private
|
1561
1593
|
|
1562
1594
|
def stub_manticore_client!(manticore_double = nil)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.
|
4
|
+
version: 11.17.0
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-09-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|