logstash-output-elasticsearch 11.22.12-java → 11.22.13-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +17 -5
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/es_spec_helper.rb +34 -22
- data/spec/fixtures/test_certs/GENERATED_AT +1 -1
- data/spec/fixtures/test_certs/ca.crt +30 -27
- data/spec/fixtures/test_certs/ca.der.sha256 +1 -1
- data/spec/fixtures/test_certs/renew.sh +2 -3
- data/spec/fixtures/test_certs/test.crt +29 -28
- data/spec/fixtures/test_certs/test.der.sha256 +1 -1
- data/spec/fixtures/test_certs/test.p12 +0 -0
- data/spec/integration/outputs/compressed_indexing_spec.rb +3 -1
- data/spec/integration/outputs/delete_spec.rb +4 -4
- data/spec/integration/outputs/ilm_spec.rb +18 -12
- data/spec/integration/outputs/index_spec.rb +12 -2
- data/spec/integration/outputs/index_version_spec.rb +7 -7
- data/spec/integration/outputs/painless_update_spec.rb +11 -10
- data/spec/integration/outputs/unsupported_actions_spec.rb +15 -10
- data/spec/integration/outputs/update_spec.rb +11 -9
- data/spec/spec_helper.rb +8 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +77 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +1 -1
- data/spec/unit/outputs/error_whitelist_spec.rb +0 -1
- metadata +2 -6
- data/spec/support/elasticsearch/api/actions/get_alias.rb +0 -18
- data/spec/support/elasticsearch/api/actions/put_alias.rb +0 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: db023d3f7c10e52c9a7fdd9e2480ccd0f988c977357d86b3980d0698a21f5b10
|
4
|
+
data.tar.gz: f7d2ca8aa8dbb5acf848d5c20cca9911a6530fc5296d9765cc26df96df7554c0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cd23a9523896c06623ec91a33ec820363af6df32149bf0d445ab1218e5b344668398b09cbcdc5b88b39efa4771d79f9de52f80d968105d991ce369cbafd634bc
|
7
|
+
data.tar.gz: e63f6a22b9eb7f308c164ba9e4d63f6685305f82d9e1ef9d4e86d7c6fedb1d19d9de4fd6949b82d534f5745c33d75ec35ae6b036fcc3b8efc2f41977a6948e07
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,9 @@
|
|
1
|
+
## 11.22.13
|
2
|
+
- Add headers reporting uncompressed size and doc count for bulk requests [#1217](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1217)
|
3
|
+
|
1
4
|
## 11.22.12
|
2
5
|
- Properly handle http code 413 (Payload Too Large) [#1199](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1199)
|
6
|
+
|
3
7
|
## 11.22.11
|
4
8
|
- Remove irrelevant log warning about elastic stack version [#1202](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1202)
|
5
9
|
|
@@ -21,7 +21,8 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
21
21
|
# We wound up agreeing that a number greater than 10 MiB and less than 100MiB
|
22
22
|
# made sense. We picked one on the lowish side to not use too much heap.
|
23
23
|
TARGET_BULK_BYTES = 20 * 1024 * 1024 # 20MiB
|
24
|
-
|
24
|
+
EVENT_COUNT_HEADER = "X-Elastic-Event-Count".freeze
|
25
|
+
UNCOMPRESSED_LENGTH_HEADER = "X-Elastic-Uncompressed-Request-Length".freeze
|
25
26
|
|
26
27
|
class HttpClient
|
27
28
|
attr_reader :client, :options, :logger, :pool, :action_count, :recv_count
|
@@ -143,7 +144,11 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
143
144
|
:payload_size => stream_writer.pos,
|
144
145
|
:content_length => body_stream.size,
|
145
146
|
:batch_offset => (index + 1 - batch_actions.size))
|
146
|
-
|
147
|
+
headers = {
|
148
|
+
EVENT_COUNT_HEADER => batch_actions.size.to_s,
|
149
|
+
UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s
|
150
|
+
}
|
151
|
+
bulk_responses << bulk_send(body_stream, batch_actions, headers)
|
147
152
|
body_stream.truncate(0) && body_stream.seek(0)
|
148
153
|
stream_writer = gzip_writer(body_stream) if compression_level?
|
149
154
|
batch_actions.clear
|
@@ -159,7 +164,14 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
159
164
|
:payload_size => stream_writer.pos,
|
160
165
|
:content_length => body_stream.size,
|
161
166
|
:batch_offset => (actions.size - batch_actions.size))
|
162
|
-
|
167
|
+
|
168
|
+
if body_stream.size > 0
|
169
|
+
headers = {
|
170
|
+
EVENT_COUNT_HEADER => batch_actions.size.to_s,
|
171
|
+
UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s
|
172
|
+
}
|
173
|
+
bulk_responses << bulk_send(body_stream, batch_actions, headers)
|
174
|
+
end
|
163
175
|
|
164
176
|
body_stream.close unless compression_level?
|
165
177
|
join_bulk_responses(bulk_responses)
|
@@ -179,8 +191,8 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
179
191
|
}
|
180
192
|
end
|
181
193
|
|
182
|
-
def bulk_send(body_stream, batch_actions)
|
183
|
-
params = compression_level? ? {:headers =>
|
194
|
+
def bulk_send(body_stream, batch_actions, headers = {})
|
195
|
+
params = compression_level? ? {:headers => headers.merge("Content-Encoding" => "gzip") } : { :headers => headers }
|
184
196
|
|
185
197
|
begin
|
186
198
|
response = @pool.post(@bulk_path, params, body_stream.string)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '11.22.
|
3
|
+
s.version = '11.22.13'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Stores logs in Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/es_spec_helper.rb
CHANGED
@@ -1,15 +1,18 @@
|
|
1
1
|
require_relative './spec_helper'
|
2
2
|
|
3
3
|
require 'elasticsearch'
|
4
|
-
require_relative "support/elasticsearch/api/actions/delete_ilm_policy"
|
5
|
-
require_relative "support/elasticsearch/api/actions/get_alias"
|
6
|
-
require_relative "support/elasticsearch/api/actions/put_alias"
|
7
|
-
require_relative "support/elasticsearch/api/actions/get_ilm_policy"
|
8
|
-
require_relative "support/elasticsearch/api/actions/put_ilm_policy"
|
9
4
|
|
10
5
|
require 'json'
|
11
6
|
require 'cabin'
|
12
7
|
|
8
|
+
# remove this condition and support package once plugin starts consuming elasticsearch-ruby v8 client
|
9
|
+
# in elasticsearch-ruby v7, ILM APIs were in a separate xpack gem, now directly available
|
10
|
+
unless elastic_ruby_v8_client_available?
|
11
|
+
require_relative "support/elasticsearch/api/actions/delete_ilm_policy"
|
12
|
+
require_relative "support/elasticsearch/api/actions/get_ilm_policy"
|
13
|
+
require_relative "support/elasticsearch/api/actions/put_ilm_policy"
|
14
|
+
end
|
15
|
+
|
13
16
|
module ESHelper
|
14
17
|
def get_host_port
|
15
18
|
if ENV["INTEGRATION"] == "true"
|
@@ -20,8 +23,12 @@ module ESHelper
|
|
20
23
|
end
|
21
24
|
|
22
25
|
def get_client
|
23
|
-
|
24
|
-
|
26
|
+
if elastic_ruby_v8_client_available?
|
27
|
+
Elasticsearch::Client.new(:hosts => [get_host_port])
|
28
|
+
else
|
29
|
+
Elasticsearch::Client.new(:hosts => [get_host_port]).tap do |client|
|
30
|
+
allow(client).to receive(:verify_elasticsearch).and_return(true) # bypass client side version checking
|
31
|
+
end
|
25
32
|
end
|
26
33
|
end
|
27
34
|
|
@@ -128,31 +135,36 @@ module ESHelper
|
|
128
135
|
end
|
129
136
|
|
130
137
|
def get_policy(client, policy_name)
|
131
|
-
|
138
|
+
if elastic_ruby_v8_client_available?
|
139
|
+
client.index_lifecycle_management.get_lifecycle(policy: policy_name)
|
140
|
+
else
|
141
|
+
client.get_ilm_policy(name: policy_name)
|
142
|
+
end
|
132
143
|
end
|
133
144
|
|
134
145
|
def put_policy(client, policy_name, policy)
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
"aliases" => {
|
141
|
-
index => {
|
142
|
-
"is_write_index"=> true
|
143
|
-
}
|
144
|
-
}
|
145
|
-
}
|
146
|
-
client.put_alias({name: the_alias, body: body})
|
146
|
+
if elastic_ruby_v8_client_available?
|
147
|
+
client.index_lifecycle_management.put_lifecycle({:policy => policy_name, :body=> policy})
|
148
|
+
else
|
149
|
+
client.put_ilm_policy({:name => policy_name, :body=> policy})
|
150
|
+
end
|
147
151
|
end
|
148
152
|
|
149
153
|
def clean_ilm(client)
|
150
|
-
|
154
|
+
if elastic_ruby_v8_client_available?
|
155
|
+
client.index_lifecycle_management.get_lifecycle.each_key { |key| client.index_lifecycle_management.delete_lifecycle(policy: key) if key =~ /logstash-policy/ }
|
156
|
+
else
|
157
|
+
client.get_ilm_policy.each_key { |key| client.delete_ilm_policy(name: key) if key =~ /logstash-policy/ }
|
158
|
+
end
|
151
159
|
end
|
152
160
|
|
153
161
|
def supports_ilm?(client)
|
154
162
|
begin
|
155
|
-
|
163
|
+
if elastic_ruby_v8_client_available?
|
164
|
+
client.index_lifecycle_management.get_lifecycle
|
165
|
+
else
|
166
|
+
client.get_ilm_policy
|
167
|
+
end
|
156
168
|
true
|
157
169
|
rescue
|
158
170
|
false
|
@@ -1 +1 @@
|
|
1
|
-
|
1
|
+
2025-07-22T11:15:03+01:00
|
@@ -1,29 +1,32 @@
|
|
1
1
|
-----BEGIN CERTIFICATE-----
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
2
|
+
MIIFdTCCA12gAwIBAgIUDITbsLT9hKser0ZzBZsxqgaZdWswDQYJKoZIhvcNAQEL
|
3
|
+
BQAwSjELMAkGA1UEBhMCUFQxCzAJBgNVBAgMAk5BMQ8wDQYDVQQHDAZMaXNib24x
|
4
|
+
DjAMBgNVBAoMBU15TGFiMQ0wCwYDVQQDDARyb290MB4XDTI1MDcyMjEwMTUwM1oX
|
5
|
+
DTM1MDcyMDEwMTUwM1owSjELMAkGA1UEBhMCUFQxCzAJBgNVBAgMAk5BMQ8wDQYD
|
6
|
+
VQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ0wCwYDVQQDDARyb290MIICIjAN
|
7
|
+
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAy1MyoBa4fXtv1eo9rkvcc2qCdn3n
|
8
|
+
z6C9w63tD+w4S9wNAmCTNn4bLCHl6vkkXaKiZg4eIPkmdxivhiZFAq5h8PoHVYjk
|
9
|
+
W5C2EP86UDX9Eeq1tjbsnfdJo7rqujyBqXu+MetcpCR59VhHB187oOqpuFXoviwy
|
10
|
+
LLwXNDnlMymgzflxa6+gAzG9JCoZnilhgqd81IaHMe+yx81LXG78vBvtWO7iM+Gn
|
11
|
+
7jcGQbASKYjmSbuM0LWfCOIe3EOxj+z3cApr+8uS1cpQrmcDeOMk6EBtFNWds4CD
|
12
|
+
EW3Rhtf3zFb9pSqxyAFRWz0n8zJNEzBUontWya2HU90lCSxQBK7MRKVI+XT10yNN
|
13
|
+
D4xfMkO3Qm9fxIgk+ZsGeDvwxJoJSGk5mKKLWXaYF89Z6PHbQj9IwJQ2bNvCbbD0
|
14
|
+
kPnQm/aJfiXiB7gfUIgrX+itbWl1j3E8vS4piboAOwMLQHywaA0wFd4HpouiNjX8
|
15
|
+
hPCaZ+l+T1z/JY98Luy0eAJhEC/kx8YCya/T/JrlWtZAYEmQWJjLWmifqfFSMJuC
|
16
|
+
fREAGiNGgZ2GFjdAOKmDCZKAVnGvTrhEWvVu38yXcwmY9/nfmZLA92T+P/PDghlO
|
17
|
+
+WbApbcXIwOHBgv/NfsnRzozMw8sR2LvdCPOavcQ0iuKvg7zrA3PzVhF9iSD1lfz
|
18
|
+
Iokr0sEBStgKMncCAwEAAaNTMFEwHQYDVR0OBBYEFKFadJx46upif1BrhYZ0iu8o
|
19
|
+
2z8rMB8GA1UdIwQYMBaAFKFadJx46upif1BrhYZ0iu8o2z8rMA8GA1UdEwEB/wQF
|
20
|
+
MAMBAf8wDQYJKoZIhvcNAQELBQADggIBAJi4FwYJz/RotoUpfrLZFf69RoI01Fje
|
21
|
+
8ITt8SR1Dx/1GTPEuqVVfx0EYtOoH6Gg3FwgSQ9GHRDIa1vkHY5S+FUSOW3pCoZE
|
22
|
+
/kaLu9bmFxn+GntghvQEor+LzODuZKLXupaGcu1tA4fzyuI4jglVD2sGZtLk//CT
|
23
|
+
Hd4tOWXo5k1Fj0jMnJq+2Htr8yBeSAO5ZNsvtAjOUU6pfDEwL9bgRzlKKFQQMUYo
|
24
|
+
6x1FvRDRXWjpzB/H+OSqOaoNLEB9FfEl8I7nn6uTenr5WxjPAOpwjZl9ObB/95xM
|
25
|
+
p91abKbLQLev5I8npM9G3C/n01l3IzRs7DNHqGJTZO7frGhicD7/jNa+tkSioeJ2
|
26
|
+
fIMqgDOvQE+gMxs19zw1tsI3+kqX7+ptTkU4Lan5V5ZKGfU8xtcVIlyRk5/yDUI5
|
27
|
+
1dfQVubs6z07s6De2qa92LFz9l8sT6QuVer+c/wPPhBdMwbzcHyUJIBjFaBpxH86
|
28
|
+
F7Mr5Zr/+qcbHglAHow1lBqdZzimqGd1koqFRat/pFUFh0iqktMmpl+ZUCjyoQEX
|
29
|
+
93j8aMU2UQjYM8NJDE2aRculo9OEoqERYFM2m3nHvrtE7iZgddryLNH7ZmC1EquX
|
30
|
+
MhZJ26GuZ2U4b9dAX858WTv0q1EF5S8KObMlxMU7IDk+cWlSD+puWliwfUKoTR/4
|
31
|
+
JErSfjCSaRqh
|
29
32
|
-----END CERTIFICATE-----
|
@@ -1 +1 @@
|
|
1
|
-
|
1
|
+
d403930d5296f1515aadd3f730757e7719188b63a276687a3475128b746e4340
|
@@ -3,8 +3,7 @@
|
|
3
3
|
set -e
|
4
4
|
cd "$(dirname "$0")"
|
5
5
|
|
6
|
-
openssl x509 -
|
7
|
-
openssl x509 -req -days 365 -in ca.csr -set_serial 0x01 -signkey ca.key -out ca.crt && rm ca.csr
|
6
|
+
openssl req -x509 -new -nodes -key ca.key -subj "/C=PT/ST=NA/L=Lisbon/O=MyLab/CN=root" -sha256 -days 3650 -out ca.crt
|
8
7
|
openssl x509 -in ca.crt -outform der | sha256sum | awk '{print $1}' > ca.der.sha256
|
9
8
|
|
10
9
|
openssl x509 -x509toreq -in test.crt -signkey test.key -out test.csr
|
@@ -13,4 +12,4 @@ openssl x509 -in test.crt -outform der | sha256sum | awk '{print $1}' > test.der
|
|
13
12
|
openssl pkcs12 -export -inkey test.key -in test.crt -passout "pass:1234567890" -out test.p12
|
14
13
|
|
15
14
|
# output ISO8601 timestamp to file
|
16
|
-
date -Iseconds > GENERATED_AT
|
15
|
+
date -Iseconds > GENERATED_AT
|
@@ -1,30 +1,31 @@
|
|
1
1
|
-----BEGIN CERTIFICATE-----
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
2
|
+
MIIFWjCCA0KgAwIBAgIBATANBgkqhkiG9w0BAQsFADBKMQswCQYDVQQGEwJQVDEL
|
3
|
+
MAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwFTXlMYWIxDTAL
|
4
|
+
BgNVBAMMBHJvb3QwHhcNMjUwNzIyMTAxNTAzWhcNMjYwNzIyMTAxNTAzWjBTMQsw
|
5
|
+
CQYDVQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UE
|
6
|
+
CgwFTXlMYWIxFjAUBgNVBAMMDWVsYXN0aWNzZWFyY2gwggIiMA0GCSqGSIb3DQEB
|
7
|
+
AQUAA4ICDwAwggIKAoICAQDGIT9szzhN5HvZ2nivnCDzVfdYbbqBhgEbPppWPyFc
|
8
|
+
V0r2rtmWfeK5EEdsTS/Ey4owTceOplPpAp4svF+a/i1/bHhqnQYYU4f7Qic4fDAs
|
9
|
+
zLdiSIo0o1csNvIogm/P+uvSzE6eZRZUSmo49dY5SKSJt6Pjh6lM2MHEjsPKIKdA
|
10
|
+
N57wEN90q4IZv6AHE9rphqxcmF1k+j5xmhCUS1EJ+y7hyZ0S7Hghdgp/0cxSu/7Y
|
11
|
+
lVYyJpkIlQd3RPXzEf6VSYjtr9Ajp1rhvv2611q0CB5NALg/KR3OiMPYmTg5HAKO
|
12
|
+
dweNam76nG3VxTeV3y+LW/pZAbi4qAl+4/c0eOGsL7o/YSn7qhThU1AWS9kY1WxT
|
13
|
+
CrKRh58rUGRfmvpnOR99xvR4jz942RNiY61pTmsvo+iJspTII3GZhwIGlHtxE9Rn
|
14
|
+
50lWQcDuDDHfObWhzb4rS55BERIwDUqD1LgCRd0ikRxPSvI1AM4cl35b4DTaDLcn
|
15
|
+
M6EOfy+QTYsgNoftU1PI1onDQ7ZdfgrTrIBFQQRwOqfyB4bB2zWVj62LSDvZoYYi
|
16
|
+
cNUecqyE1542WNKzmyE8Mrf3uknN2J6EH7EhmiyRBtGg3NEQCwIYM4/kWPNPOtkS
|
17
|
+
jsn3cNbMNUZiSnQn/nTs4T8g6b2rrwsay/FGUE83AbPqqcTlp2RUVnjbC8KA5+iV
|
18
|
+
1wIDAQABo0IwQDAdBgNVHQ4EFgQUb789MhsOk89lMWwSwBss1TLXDFAwHwYDVR0j
|
19
|
+
BBgwFoAUoVp0nHjq6mJ/UGuFhnSK7yjbPyswDQYJKoZIhvcNAQELBQADggIBAI+G
|
20
|
+
NKZ3s3m+/R4mH3M84gGWPE1joC2bLavYYLZjKnZv18o6fHX0IW/8v5hd5Df3SP5u
|
21
|
+
vhjC88bewiKVHldqkC6ju9rbZxQynhFZGXbN9zLvFMZGkfRH5vB2Y13ZWBdWhq5L
|
22
|
+
cRxpRk6WlwaSy0Ed4F12u9ERmhMOtSZhqAnNJBeVraOHeGlcMZXJdZkeyxkdcZ4y
|
23
|
+
YJcrI8Da0dMxILgIuc9ZCynAItRAjMw1/3wjlx0Cyxif10ct+EFiP6Zv/gzoo05v
|
24
|
+
tNeqOCrxAqAcwrS1u4q/KAKySiEIyxyU1nEI/g53nALwoQhFsRVqVXNAoy7xu37y
|
25
|
+
o+lvs98rkq/NkkbBvRBPdcF/BYNtesRxKja/QAEvslyZfyICL9oqsuPPEB2nHtXa
|
26
|
+
mWntT2NaXyr1FWCxHaXfZQOxSwco3vTk7HLuNug2wxIc/hewkLlk5NCRkAYfTlan
|
27
|
+
gLhZ3vBej4oA8cdpODMb8SrYhqKTeX8E+ulHVS0paY0kszAGK2x2kHqRGNXUlfoB
|
28
|
+
Ax0etGudHhgtTCAmUgJDyQNLkvBKHYQJ2V/Wv/xej7wXKkACNKlRORl8zcnbVErd
|
29
|
+
GM/ibfqNIPIo8dP2EDycSV6vIICqkxpCZZObNjfgKa0UN03qYi7xREhhEehXgU8H
|
30
|
+
IO9w2pG7ReiO2E+bLIs0Zh1+2IwlM1EM/eqbq+Gi
|
30
31
|
-----END CERTIFICATE-----
|
@@ -1 +1 @@
|
|
1
|
-
|
1
|
+
386ae6ef809d20ddfcc7ca68f480e82007c031b365c86cc58922cf1bd7238f89
|
Binary file
|
@@ -36,7 +36,9 @@ end
|
|
36
36
|
{
|
37
37
|
"Content-Encoding" => "gzip",
|
38
38
|
"Content-Type" => "application/json",
|
39
|
-
'x-elastic-product-origin' => 'logstash-output-elasticsearch'
|
39
|
+
'x-elastic-product-origin' => 'logstash-output-elasticsearch',
|
40
|
+
'X-Elastic-Event-Count' => anything,
|
41
|
+
'X-Elastic-Uncompressed-Request-Length' => anything,
|
40
42
|
}
|
41
43
|
}
|
42
44
|
|
@@ -39,12 +39,12 @@ describe "Versioned delete", :integration => true do
|
|
39
39
|
it "should ignore non-monotonic external version updates" do
|
40
40
|
id = "ev2"
|
41
41
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
|
42
|
-
r = es.get(:index => 'logstash-delete', :
|
42
|
+
r = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
|
43
43
|
expect(r['_version']).to eq(99)
|
44
44
|
expect(r['_source']['message']).to eq('foo')
|
45
45
|
|
46
46
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)])
|
47
|
-
r2 = es.get(:index => 'logstash-delete', :
|
47
|
+
r2 = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
|
48
48
|
expect(r2['_version']).to eq(99)
|
49
49
|
expect(r2['_source']['message']).to eq('foo')
|
50
50
|
end
|
@@ -52,12 +52,12 @@ describe "Versioned delete", :integration => true do
|
|
52
52
|
it "should commit monotonic external version updates" do
|
53
53
|
id = "ev3"
|
54
54
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
|
55
|
-
r = es.get(:index => 'logstash-delete', :
|
55
|
+
r = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
|
56
56
|
expect(r['_version']).to eq(99)
|
57
57
|
expect(r['_source']['message']).to eq('foo')
|
58
58
|
|
59
59
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)])
|
60
|
-
expect { es.get(:index => 'logstash-delete', :
|
60
|
+
expect { es.get(:index => 'logstash-delete', :id => id, :refresh => true) }.to raise_error(get_expected_error_class)
|
61
61
|
end
|
62
62
|
end
|
63
63
|
end
|
@@ -102,7 +102,7 @@ shared_examples_for 'an ILM disabled Logstash' do
|
|
102
102
|
it 'should not install the default policy' do
|
103
103
|
subject.register
|
104
104
|
sleep(1)
|
105
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
105
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
106
106
|
end
|
107
107
|
|
108
108
|
it 'should not write the ILM settings into the template' do
|
@@ -282,12 +282,12 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
282
282
|
subject.register
|
283
283
|
sleep(1)
|
284
284
|
expect(@es.indices.exists_alias(name: "logstash")).to be_truthy
|
285
|
-
expect(@es.get_alias(name: "logstash")).to include("logstash-000001")
|
285
|
+
expect(@es.indices.get_alias(name: "logstash")).to include("logstash-000001")
|
286
286
|
end
|
287
287
|
end
|
288
288
|
|
289
289
|
it 'should install it if it is not present' do
|
290
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
290
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
291
291
|
subject.register
|
292
292
|
sleep(1)
|
293
293
|
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.not_to raise_error
|
@@ -298,7 +298,7 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
298
298
|
subject.register
|
299
299
|
sleep(1)
|
300
300
|
expect(@es.indices.exists_alias(name: "logstash")).to be_truthy
|
301
|
-
expect(@es.get_alias(name: "logstash")).to include("logstash-#{todays_date}-000001")
|
301
|
+
expect(@es.indices.get_alias(name: "logstash")).to include("logstash-#{todays_date}-000001")
|
302
302
|
end
|
303
303
|
|
304
304
|
it 'should ingest into a single index' do
|
@@ -340,14 +340,14 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
340
340
|
let (:policy) { small_max_doc_policy }
|
341
341
|
|
342
342
|
before do
|
343
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
343
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
344
344
|
put_policy(@es,ilm_policy_name, policy)
|
345
345
|
end
|
346
346
|
|
347
347
|
it 'should not install the default policy if it is not used' do
|
348
348
|
subject.register
|
349
349
|
sleep(1)
|
350
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
350
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
351
351
|
end
|
352
352
|
end
|
353
353
|
|
@@ -357,14 +357,14 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
357
357
|
let (:policy) { max_age_policy("1d") }
|
358
358
|
|
359
359
|
before do
|
360
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
360
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
361
361
|
put_policy(@es,ilm_policy_name, policy)
|
362
362
|
end
|
363
363
|
|
364
364
|
it 'should not install the default policy if it is not used' do
|
365
365
|
subject.register
|
366
366
|
sleep(1)
|
367
|
-
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(
|
367
|
+
expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
|
368
368
|
end
|
369
369
|
end
|
370
370
|
|
@@ -374,7 +374,7 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
374
374
|
subject.register
|
375
375
|
sleep(1)
|
376
376
|
expect(@es.indices.exists_alias(name: expected_index)).to be_truthy
|
377
|
-
expect(@es.get_alias(name: expected_index)).to include("#{expected_index}-#{todays_date}-000001")
|
377
|
+
expect(@es.indices.get_alias(name: expected_index)).to include("#{expected_index}-#{todays_date}-000001")
|
378
378
|
end
|
379
379
|
|
380
380
|
it 'should write the ILM settings into the template' do
|
@@ -443,17 +443,18 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
443
443
|
subject.register
|
444
444
|
sleep(1)
|
445
445
|
expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_truthy
|
446
|
-
expect(@es.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
|
446
|
+
expect(@es.indices.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
|
447
447
|
end
|
448
448
|
|
449
449
|
context 'when the custom rollover alias already exists' do
|
450
450
|
it 'should ignore the already exists error' do
|
451
451
|
expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_falsey
|
452
|
-
|
452
|
+
@es.indices.create(index: "#{ilm_rollover_alias}-#{todays_date}-000001")
|
453
|
+
@es.indices.put_alias(name: ilm_rollover_alias, index: "#{ilm_rollover_alias}-#{todays_date}-000001")
|
453
454
|
expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_truthy
|
454
455
|
subject.register
|
455
456
|
sleep(1)
|
456
|
-
expect(@es.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
|
457
|
+
expect(@es.indices.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
|
457
458
|
end
|
458
459
|
|
459
460
|
end
|
@@ -532,3 +533,8 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
|
|
532
533
|
end
|
533
534
|
|
534
535
|
end
|
536
|
+
|
537
|
+
def get_expected_error_class
|
538
|
+
return Elastic::Transport::Transport::Errors::NotFound if elastic_ruby_v8_client_available?
|
539
|
+
Elasticsearch::Transport::Transport::Errors::NotFound
|
540
|
+
end
|
@@ -215,12 +215,22 @@ describe "indexing" do
|
|
215
215
|
|
216
216
|
it "sets the correct content-type header" do
|
217
217
|
expected_manticore_opts = {
|
218
|
-
:headers => {
|
218
|
+
:headers => {
|
219
|
+
"Content-Type" => "application/json",
|
220
|
+
'x-elastic-product-origin' => 'logstash-output-elasticsearch',
|
221
|
+
'X-Elastic-Event-Count' => anything,
|
222
|
+
'X-Elastic-Uncompressed-Request-Length' => anything
|
223
|
+
},
|
219
224
|
:body => anything
|
220
225
|
}
|
221
226
|
if secure
|
222
227
|
expected_manticore_opts = {
|
223
|
-
:headers => {
|
228
|
+
:headers => {
|
229
|
+
"Content-Type" => "application/json",
|
230
|
+
'x-elastic-product-origin' => 'logstash-output-elasticsearch',
|
231
|
+
'X-Elastic-Event-Count' => anything,
|
232
|
+
'X-Elastic-Uncompressed-Request-Length' => anything
|
233
|
+
},
|
224
234
|
:body => anything,
|
225
235
|
:auth => {
|
226
236
|
:user => user,
|
@@ -36,11 +36,11 @@ describe "Versioned indexing", :integration => true do
|
|
36
36
|
|
37
37
|
it "should default to ES version" do
|
38
38
|
subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foo")])
|
39
|
-
r = es.get(:index => 'logstash-index', :
|
39
|
+
r = es.get(:index => 'logstash-index', :id => '123', :refresh => true)
|
40
40
|
expect(r["_version"]).to eq(1)
|
41
41
|
expect(r["_source"]["message"]).to eq('foo')
|
42
42
|
subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foobar")])
|
43
|
-
r2 = es.get(:index => 'logstash-index', :
|
43
|
+
r2 = es.get(:index => 'logstash-index', :id => '123', :refresh => true)
|
44
44
|
expect(r2["_version"]).to eq(2)
|
45
45
|
expect(r2["_source"]["message"]).to eq('foobar')
|
46
46
|
end
|
@@ -63,7 +63,7 @@ describe "Versioned indexing", :integration => true do
|
|
63
63
|
it "should respect the external version" do
|
64
64
|
id = "ev1"
|
65
65
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
66
|
-
r = es.get(:index => 'logstash-index', :
|
66
|
+
r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
|
67
67
|
expect(r["_version"]).to eq(99)
|
68
68
|
expect(r["_source"]["message"]).to eq('foo')
|
69
69
|
end
|
@@ -71,12 +71,12 @@ describe "Versioned indexing", :integration => true do
|
|
71
71
|
it "should ignore non-monotonic external version updates" do
|
72
72
|
id = "ev2"
|
73
73
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
74
|
-
r = es.get(:index => 'logstash-index', :
|
74
|
+
r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
|
75
75
|
expect(r["_version"]).to eq(99)
|
76
76
|
expect(r["_source"]["message"]).to eq('foo')
|
77
77
|
|
78
78
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "98", "message" => "foo")])
|
79
|
-
r2 = es.get(:index => 'logstash-index', :
|
79
|
+
r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true)
|
80
80
|
expect(r2["_version"]).to eq(99)
|
81
81
|
expect(r2["_source"]["message"]).to eq('foo')
|
82
82
|
end
|
@@ -84,12 +84,12 @@ describe "Versioned indexing", :integration => true do
|
|
84
84
|
it "should commit monotonic external version updates" do
|
85
85
|
id = "ev3"
|
86
86
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
87
|
-
r = es.get(:index => 'logstash-index', :
|
87
|
+
r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
|
88
88
|
expect(r["_version"]).to eq(99)
|
89
89
|
expect(r["_source"]["message"]).to eq('foo')
|
90
90
|
|
91
91
|
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "100", "message" => "foo")])
|
92
|
-
r2 = es.get(:index => 'logstash-index', :
|
92
|
+
r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true)
|
93
93
|
expect(r2["_version"]).to eq(100)
|
94
94
|
expect(r2["_source"]["message"]).to eq('foo')
|
95
95
|
end
|
@@ -22,11 +22,12 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
22
22
|
# This can fail if there are no indexes, ignore failure.
|
23
23
|
@es.indices.delete(:index => "*") rescue nil
|
24
24
|
@es.index(
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
25
|
+
{
|
26
|
+
:index => 'logstash-update',
|
27
|
+
:id => '123',
|
28
|
+
:body => { :message => 'Test', :counter => 1 },
|
29
|
+
:refresh => true
|
30
|
+
})
|
30
31
|
@es.indices.refresh
|
31
32
|
end
|
32
33
|
|
@@ -46,7 +47,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
46
47
|
subject = get_es_output(plugin_parameters)
|
47
48
|
subject.register
|
48
49
|
subject.multi_receive([LogStash::Event.new("count" => 4 )])
|
49
|
-
r = @es.get(:index => 'logstash-update', :
|
50
|
+
r = @es.get(:index => 'logstash-update', :id => "123", :refresh => true)
|
50
51
|
expect(r["_source"]["counter"]).to eq(5)
|
51
52
|
end
|
52
53
|
end
|
@@ -57,7 +58,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
57
58
|
subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
|
58
59
|
subject.register
|
59
60
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
60
|
-
r = @es.get(:index => 'logstash-update', :
|
61
|
+
r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
|
61
62
|
expect(r["_source"]["message"]).to eq('upsert message')
|
62
63
|
end
|
63
64
|
|
@@ -65,7 +66,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
65
66
|
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
|
66
67
|
subject.register
|
67
68
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
68
|
-
r = @es.get(:index => 'logstash-update', :
|
69
|
+
r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
|
69
70
|
expect(r["_source"]["message"]).to eq('sample message here')
|
70
71
|
end
|
71
72
|
|
@@ -82,7 +83,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
82
83
|
subject.register
|
83
84
|
|
84
85
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
85
|
-
r = @es.get(:index => 'logstash-update', :
|
86
|
+
r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
|
86
87
|
expect(r["_source"]["message"]).to eq('upsert message')
|
87
88
|
end
|
88
89
|
|
@@ -91,7 +92,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
|
|
91
92
|
subject.register
|
92
93
|
subject.multi_receive([LogStash::Event.new("counter" => 1)])
|
93
94
|
@es.indices.refresh
|
94
|
-
r = @es.get(:index => 'logstash-update', :
|
95
|
+
r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
|
95
96
|
expect(r["_source"]["counter"]).to eq(1)
|
96
97
|
end
|
97
98
|
end
|
@@ -27,16 +27,21 @@ describe "Unsupported actions testing...", :integration => true do
|
|
27
27
|
@es.indices.delete(:index => "*") rescue nil
|
28
28
|
# index single doc for update purpose
|
29
29
|
@es.index(
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
30
|
+
{
|
31
|
+
:index => INDEX,
|
32
|
+
:id => '2',
|
33
|
+
:body => { :message => 'Test to doc indexing', :counter => 1 },
|
34
|
+
:refresh => true
|
35
|
+
}
|
34
36
|
)
|
37
|
+
|
35
38
|
@es.index(
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
39
|
+
{
|
40
|
+
:index => INDEX,
|
41
|
+
:id => '3',
|
42
|
+
:body => { :message => 'Test to doc deletion', :counter => 2 },
|
43
|
+
:refresh => true
|
44
|
+
}
|
40
45
|
)
|
41
46
|
@es.indices.refresh
|
42
47
|
end
|
@@ -63,12 +68,12 @@ describe "Unsupported actions testing...", :integration => true do
|
|
63
68
|
rejected_events = events.select { |event| !index_or_update.call(event) }
|
64
69
|
|
65
70
|
indexed_events.each do |event|
|
66
|
-
response = @es.get(:index => INDEX, :
|
71
|
+
response = @es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true)
|
67
72
|
expect(response['_source']['message']).to eq(event.get("message"))
|
68
73
|
end
|
69
74
|
|
70
75
|
rejected_events.each do |event|
|
71
|
-
expect {@es.get(:index => INDEX, :
|
76
|
+
expect {@es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true)}.to raise_error(get_expected_error_class)
|
72
77
|
end
|
73
78
|
end
|
74
79
|
end
|
@@ -22,10 +22,12 @@ describe "Update actions without scripts", :integration => true do
|
|
22
22
|
# This can fail if there are no indexes, ignore failure.
|
23
23
|
@es.indices.delete(:index => "*") rescue nil
|
24
24
|
@es.index(
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
25
|
+
{
|
26
|
+
:index => 'logstash-update',
|
27
|
+
:id => '123',
|
28
|
+
:body => { :message => 'Test', :counter => 1 },
|
29
|
+
:refresh => true
|
30
|
+
}
|
29
31
|
)
|
30
32
|
@es.indices.refresh
|
31
33
|
end
|
@@ -40,14 +42,14 @@ describe "Update actions without scripts", :integration => true do
|
|
40
42
|
subject = get_es_output({ 'document_id' => "456" } )
|
41
43
|
subject.register
|
42
44
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
43
|
-
expect {@es.get(:index => 'logstash-update', :
|
45
|
+
expect {@es.get(:index => 'logstash-update', :id => '456', :refresh => true)}.to raise_error(get_expected_error_class)
|
44
46
|
end
|
45
47
|
|
46
48
|
it "should update existing document" do
|
47
49
|
subject = get_es_output({ 'document_id' => "123" })
|
48
50
|
subject.register
|
49
51
|
subject.multi_receive([LogStash::Event.new("message" => "updated message here")])
|
50
|
-
r = @es.get(:index => 'logstash-update', :
|
52
|
+
r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true)
|
51
53
|
expect(r["_source"]["message"]).to eq('updated message here')
|
52
54
|
end
|
53
55
|
|
@@ -57,7 +59,7 @@ describe "Update actions without scripts", :integration => true do
|
|
57
59
|
subject = get_es_output({ 'document_id' => "123" })
|
58
60
|
subject.register
|
59
61
|
subject.multi_receive([LogStash::Event.new("data" => "updated message here", "message" => "foo")])
|
60
|
-
r = @es.get(:index => 'logstash-update', :
|
62
|
+
r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true)
|
61
63
|
expect(r["_source"]["data"]).to eq('updated message here')
|
62
64
|
expect(r["_source"]["message"]).to eq('foo')
|
63
65
|
end
|
@@ -94,7 +96,7 @@ describe "Update actions without scripts", :integration => true do
|
|
94
96
|
subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
|
95
97
|
subject.register
|
96
98
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
97
|
-
r = @es.get(:index => 'logstash-update', :
|
99
|
+
r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true)
|
98
100
|
expect(r["_source"]["message"]).to eq('upsert message')
|
99
101
|
end
|
100
102
|
|
@@ -102,7 +104,7 @@ describe "Update actions without scripts", :integration => true do
|
|
102
104
|
subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
|
103
105
|
subject.register
|
104
106
|
subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
|
105
|
-
r = @es.get(:index => 'logstash-update', :
|
107
|
+
r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true)
|
106
108
|
expect(r["_source"]["message"]).to eq('sample message here')
|
107
109
|
end
|
108
110
|
|
data/spec/spec_helper.rb
CHANGED
@@ -8,3 +8,11 @@ end
|
|
8
8
|
RSpec.configure do |config|
|
9
9
|
config.include LogStash::Outputs::ElasticSearch::SpecHelper
|
10
10
|
end
|
11
|
+
|
12
|
+
# remove once plugin starts consuming elasticsearch-ruby v8 client
|
13
|
+
def elastic_ruby_v8_client_available?
|
14
|
+
Elasticsearch::Transport
|
15
|
+
false
|
16
|
+
rescue NameError # NameError: uninitialized constant Elasticsearch::Transport if Elastic Ruby client is not available
|
17
|
+
true
|
18
|
+
end
|
@@ -270,6 +270,83 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
270
270
|
|
271
271
|
end
|
272
272
|
end
|
273
|
+
context "the 'user-agent' header" do
|
274
|
+
let(:pool) { double("pool") }
|
275
|
+
let(:compression_level) { 6 }
|
276
|
+
let(:base_options) { super().merge( :client_settings => {:compression_level => compression_level}) }
|
277
|
+
let(:actions) { [
|
278
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message_1"=> message_1}],
|
279
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message_2"=> message_2}],
|
280
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message_3"=> message_3}],
|
281
|
+
]}
|
282
|
+
let(:message_1) { "hello" }
|
283
|
+
let(:message_2_size) { 1_000 }
|
284
|
+
let(:message_2) { SecureRandom.alphanumeric(message_2_size / 2 ) * 2 }
|
285
|
+
let(:message_3_size) { 1_000 }
|
286
|
+
let(:message_3) { "m" * message_3_size }
|
287
|
+
let(:messages_size) { message_1.size + message_2.size + message_3.size }
|
288
|
+
let(:action_overhead) { 42 + 16 + 2 } # header plus doc key size plus new line overhead per action
|
289
|
+
|
290
|
+
let(:response) do
|
291
|
+
response = double("response")
|
292
|
+
allow(response).to receive(:code).and_return(response)
|
293
|
+
allow(response).to receive(:body).and_return({"errors" => false}.to_json)
|
294
|
+
response
|
295
|
+
end
|
296
|
+
|
297
|
+
before(:each) do
|
298
|
+
subject.instance_variable_set("@pool", pool)
|
299
|
+
end
|
300
|
+
|
301
|
+
it "carries bulk request's uncompressed size" do
|
302
|
+
expect(pool).to receive(:post) do |path, params, body|
|
303
|
+
headers = params.fetch(:headers, {})
|
304
|
+
expect(headers["X-Elastic-Event-Count"]).to eq("3")
|
305
|
+
expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (messages_size + (action_overhead * 3)).to_s
|
306
|
+
end.and_return(response)
|
307
|
+
|
308
|
+
subject.send(:bulk, actions)
|
309
|
+
end
|
310
|
+
context "without compression" do
|
311
|
+
let(:compression_level) { 0 }
|
312
|
+
it "carries bulk request's uncompressed size" do
|
313
|
+
expect(pool).to receive(:post) do |path, params, body|
|
314
|
+
headers = params.fetch(:headers, {})
|
315
|
+
expect(headers["X-Elastic-Event-Count"]).to eq("3")
|
316
|
+
expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (messages_size + (action_overhead * 3)).to_s
|
317
|
+
end.and_return(response)
|
318
|
+
subject.send(:bulk, actions)
|
319
|
+
end
|
320
|
+
end
|
321
|
+
|
322
|
+
context "with compressed messages over 20MB" do
|
323
|
+
let(:message_2_size) { 21_000_000 }
|
324
|
+
it "carries bulk request's uncompressed size" do
|
325
|
+
# only the first, tiny, message is sent first
|
326
|
+
expect(pool).to receive(:post) do |path, params, body|
|
327
|
+
headers = params.fetch(:headers, {})
|
328
|
+
expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_1.size + action_overhead).to_s
|
329
|
+
expect(headers["X-Elastic-Event-Count"]).to eq("1")
|
330
|
+
end.and_return(response)
|
331
|
+
|
332
|
+
# huge message_2 is sent afterwards alone
|
333
|
+
expect(pool).to receive(:post) do |path, params, body|
|
334
|
+
headers = params.fetch(:headers, {})
|
335
|
+
expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_2.size + action_overhead).to_s
|
336
|
+
expect(headers["X-Elastic-Event-Count"]).to eq("1")
|
337
|
+
end.and_return(response)
|
338
|
+
|
339
|
+
# finally medium message_3 is sent alone as well
|
340
|
+
expect(pool).to receive(:post) do |path, params, body|
|
341
|
+
headers = params.fetch(:headers, {})
|
342
|
+
expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_3.size + action_overhead).to_s
|
343
|
+
expect(headers["X-Elastic-Event-Count"]).to eq("1")
|
344
|
+
end.and_return(response)
|
345
|
+
|
346
|
+
subject.send(:bulk, actions)
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
273
350
|
end
|
274
351
|
|
275
352
|
describe "sniffing" do
|
@@ -777,7 +777,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
777
777
|
end
|
778
778
|
|
779
779
|
before(:each) do
|
780
|
-
allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
|
780
|
+
allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array), instance_of(Hash)) do |stream, actions, headers|
|
781
781
|
expect( stream.string ).to include '"foo":"bar1"'
|
782
782
|
expect( stream.string ).to include '"foo":"bar2"'
|
783
783
|
end.and_return(bulk_response, {"errors"=>false}) # let's make it go away (second call) to not retry indefinitely
|
@@ -4,7 +4,6 @@ require_relative "../../../spec/es_spec_helper"
|
|
4
4
|
describe "whitelisting error types in expected behavior" do
|
5
5
|
let(:template) { '{"template" : "not important, will be updated by :index"}' }
|
6
6
|
let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z") }
|
7
|
-
let(:action1) { ["index", {:_id=>1, :routing=>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1] }
|
8
7
|
let(:settings) { {"manage_template" => true, "index" => "logstash-2014.11.17", "template_overwrite" => true, "hosts" => get_host_port() } }
|
9
8
|
|
10
9
|
subject { LogStash::Outputs::ElasticSearch.new(settings) }
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.22.
|
4
|
+
version: 11.22.13
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-01
|
11
|
+
date: 2025-08-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -326,9 +326,7 @@ files:
|
|
326
326
|
- spec/integration/outputs/update_spec.rb
|
327
327
|
- spec/spec_helper.rb
|
328
328
|
- spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
|
329
|
-
- spec/support/elasticsearch/api/actions/get_alias.rb
|
330
329
|
- spec/support/elasticsearch/api/actions/get_ilm_policy.rb
|
331
|
-
- spec/support/elasticsearch/api/actions/put_alias.rb
|
332
330
|
- spec/support/elasticsearch/api/actions/put_ilm_policy.rb
|
333
331
|
- spec/unit/http_client_builder_spec.rb
|
334
332
|
- spec/unit/outputs/elasticsearch/data_stream_support_spec.rb
|
@@ -415,9 +413,7 @@ test_files:
|
|
415
413
|
- spec/integration/outputs/update_spec.rb
|
416
414
|
- spec/spec_helper.rb
|
417
415
|
- spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
|
418
|
-
- spec/support/elasticsearch/api/actions/get_alias.rb
|
419
416
|
- spec/support/elasticsearch/api/actions/get_ilm_policy.rb
|
420
|
-
- spec/support/elasticsearch/api/actions/put_alias.rb
|
421
417
|
- spec/support/elasticsearch/api/actions/put_ilm_policy.rb
|
422
418
|
- spec/unit/http_client_builder_spec.rb
|
423
419
|
- spec/unit/outputs/elasticsearch/data_stream_support_spec.rb
|
@@ -1,18 +0,0 @@
|
|
1
|
-
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
2
|
-
# or more contributor license agreements. Licensed under the Elastic License;
|
3
|
-
# you may not use this file except in compliance with the Elastic License.
|
4
|
-
|
5
|
-
module Elasticsearch
|
6
|
-
module API
|
7
|
-
module Actions
|
8
|
-
|
9
|
-
# Retrieve the list of index lifecycle management policies
|
10
|
-
def get_alias(arguments={})
|
11
|
-
method = HTTP_GET
|
12
|
-
path = Utils.__pathify '_alias', Utils.__escape(arguments[:name])
|
13
|
-
params = {}
|
14
|
-
perform_request(method, path, params, nil).body
|
15
|
-
end
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
@@ -1,24 +0,0 @@
|
|
1
|
-
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
2
|
-
# or more contributor license agreements. Licensed under the Elastic License;
|
3
|
-
# you may not use this file except in compliance with the Elastic License.
|
4
|
-
|
5
|
-
module Elasticsearch
|
6
|
-
module API
|
7
|
-
module Actions
|
8
|
-
|
9
|
-
# @option arguments [String] :name The name of the alias (*Required*)
|
10
|
-
# @option arguments [Hash] :The alias definition(*Required*)
|
11
|
-
|
12
|
-
def put_alias(arguments={})
|
13
|
-
raise ArgumentError, "Required argument 'name' missing" unless arguments[:name]
|
14
|
-
raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
|
15
|
-
method = HTTP_PUT
|
16
|
-
path = Utils.__pathify Utils.__escape(arguments[:name])
|
17
|
-
|
18
|
-
params = Utils.__validate_and_extract_params arguments
|
19
|
-
body = arguments[:body]
|
20
|
-
perform_request(method, path, params, body.to_json).body
|
21
|
-
end
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|