logstash-output-elasticsearch 9.0.2-java → 9.0.3-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/LICENSE +1 -1
- data/docs/index.asciidoc +3 -5
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es7x.json +45 -0
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +4 -1
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/fixtures/_nodes/2x_1x.json +27 -0
- data/spec/fixtures/_nodes/5x_and_above.json +81 -0
- data/spec/integration/outputs/sniffer_spec.rb +78 -25
- metadata +7 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ae7148b3b08c44548f172bc06c88ff48b94809ef756aeef21d4597efb2d50716
|
4
|
+
data.tar.gz: a6c62363e0d1f953cfe9ee9ef8ceed192f7d54f0e3421f2c403f1c110a11a1ed
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 420c2f66334764c1693936ca462cb45e8b196dec449ae7521373ba44c5521e6f82b76f9d3ac02a950dc597149b671701f16e2226f0070a8196c8946a2bdc9f92
|
7
|
+
data.tar.gz: 0f30be7f309a6734f6f0e050b8ff725db3ce71a7ce0c617997f35b0ec013410c7ca56991c59978ec147da8d87cb598d214272e7325ce84145257341fdfd4568c
|
data/CHANGELOG.md
CHANGED
data/LICENSE
CHANGED
data/docs/index.asciidoc
CHANGED
@@ -519,10 +519,8 @@ if enabled, script is in charge of creating non-existent document (scripted upda
|
|
519
519
|
* Default value is `false`
|
520
520
|
|
521
521
|
This setting asks Elasticsearch for the list of all cluster nodes and adds them to the hosts list.
|
522
|
-
|
523
|
-
|
524
|
-
`http.enabled` to false in their elasticsearch.yml. You can either use the `sniffing` option or
|
525
|
-
manually enter multiple Elasticsearch hosts using the `hosts` parameter.
|
522
|
+
For Elasticsearch 1.x and 2.x any nodes with `http.enabled` (on by default) will be added to the hosts list, including master-only nodes!
|
523
|
+
For Elasticsearch 5.x and 6.x any nodes with `http.enabled` (on by default) will be added to the hosts list, excluding master-only nodes.
|
526
524
|
|
527
525
|
[id="plugins-{type}s-{plugin}-sniffing_delay"]
|
528
526
|
===== `sniffing_delay`
|
@@ -635,7 +633,7 @@ Set the truststore password
|
|
635
633
|
* Value type is <<string,string>>
|
636
634
|
* Default value is `""`
|
637
635
|
|
638
|
-
Set upsert content for update mode.
|
636
|
+
Set upsert content for update mode.
|
639
637
|
Create a new document with this parameter as json string if `document_id` doesn't exists
|
640
638
|
|
641
639
|
[id="plugins-{type}s-{plugin}-user"]
|
@@ -0,0 +1,45 @@
|
|
1
|
+
{
|
2
|
+
"template" : "logstash-*",
|
3
|
+
"version" : 60001,
|
4
|
+
"settings" : {
|
5
|
+
"index.refresh_interval" : "5s"
|
6
|
+
},
|
7
|
+
"mappings" : {
|
8
|
+
"_doc" : {
|
9
|
+
"dynamic_templates" : [ {
|
10
|
+
"message_field" : {
|
11
|
+
"path_match" : "message",
|
12
|
+
"match_mapping_type" : "string",
|
13
|
+
"mapping" : {
|
14
|
+
"type" : "text",
|
15
|
+
"norms" : false
|
16
|
+
}
|
17
|
+
}
|
18
|
+
}, {
|
19
|
+
"string_fields" : {
|
20
|
+
"match" : "*",
|
21
|
+
"match_mapping_type" : "string",
|
22
|
+
"mapping" : {
|
23
|
+
"type" : "text", "norms" : false,
|
24
|
+
"fields" : {
|
25
|
+
"keyword" : { "type": "keyword", "ignore_above": 256 }
|
26
|
+
}
|
27
|
+
}
|
28
|
+
}
|
29
|
+
} ],
|
30
|
+
"properties" : {
|
31
|
+
"@timestamp": { "type": "date"},
|
32
|
+
"@version": { "type": "keyword"},
|
33
|
+
"geoip" : {
|
34
|
+
"dynamic": true,
|
35
|
+
"properties" : {
|
36
|
+
"ip": { "type": "ip" },
|
37
|
+
"location" : { "type" : "geo_point" },
|
38
|
+
"latitude" : { "type" : "half_float" },
|
39
|
+
"longitude" : { "type" : "half_float" }
|
40
|
+
}
|
41
|
+
}
|
42
|
+
}
|
43
|
+
}
|
44
|
+
}
|
45
|
+
}
|
@@ -186,6 +186,9 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
186
186
|
|
187
187
|
def sniff_5x_and_above(nodes)
|
188
188
|
nodes.map do |id,info|
|
189
|
+
# Skip master-only nodes
|
190
|
+
next if info["roles"] && info["roles"] == ["master"]
|
191
|
+
|
189
192
|
if info["http"]
|
190
193
|
uri = LogStash::Util::SafeURI.new(info["http"]["publish_address"])
|
191
194
|
end
|
@@ -247,7 +250,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
247
250
|
meta[:version] = es_version
|
248
251
|
major = major_version(es_version)
|
249
252
|
if !@maximum_seen_major_version
|
250
|
-
@logger.info("ES Output version determined", :es_version =>
|
253
|
+
@logger.info("ES Output version determined", :es_version => major)
|
251
254
|
set_new_major_version(major)
|
252
255
|
elsif major > @maximum_seen_major_version
|
253
256
|
@logger.warn("Detected a node with a higher major version than previously observed. This could be the result of an elasticsearch cluster upgrade.", :previous_major => @maximum_seen_major_version, :new_major => major, :node_url => url)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '9.0.
|
3
|
+
s.version = '9.0.3'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Stores logs in Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -0,0 +1,27 @@
|
|
1
|
+
{
|
2
|
+
"cluster_name" : "dev",
|
3
|
+
"nodes" : {
|
4
|
+
"Ur_68iBvTlm7Xr1HgSsh6w" : {
|
5
|
+
"name" : "dev-es-master01",
|
6
|
+
"transport_address" : "http://localhost:9200",
|
7
|
+
"host" : "127.0.0.1",
|
8
|
+
"ip" : "127.0.0.1",
|
9
|
+
"version" : "2.4.6",
|
10
|
+
"build" : "5376dca"
|
11
|
+
},
|
12
|
+
"sari4do3RG-mgh2CIZeHwA" : {
|
13
|
+
"name" : "dev-es-data01",
|
14
|
+
"transport_address" : "http://localhost:9201",
|
15
|
+
"host" : "127.0.0.1",
|
16
|
+
"ip" : "127.0.0.1",
|
17
|
+
"version" : "2.4.6",
|
18
|
+
"build" : "5376dca",
|
19
|
+
"http_address" : "127.0.0.1:9201",
|
20
|
+
"http" : {
|
21
|
+
"bound_address" : [ "[::1]:9201", "127.0.0.1:9201" ],
|
22
|
+
"publish_address" : "127.0.0.1:9201",
|
23
|
+
"max_content_length_in_bytes" : 104857600
|
24
|
+
}
|
25
|
+
}
|
26
|
+
}
|
27
|
+
}
|
@@ -0,0 +1,81 @@
|
|
1
|
+
{
|
2
|
+
"_nodes" : {
|
3
|
+
"total" : 3,
|
4
|
+
"successful" : 3,
|
5
|
+
"failed" : 0
|
6
|
+
},
|
7
|
+
"cluster_name" : "dev",
|
8
|
+
"nodes" : {
|
9
|
+
"Ur_68iBvTlm7Xr1HgSsh6w" : {
|
10
|
+
"name" : "dev-es-master01",
|
11
|
+
"transport_address" : "http://localhost:9200",
|
12
|
+
"host" : "localhost",
|
13
|
+
"ip" : "127.0.0.1",
|
14
|
+
"version" : "5.5.1",
|
15
|
+
"build_hash" : "19c13d0",
|
16
|
+
"roles" : [
|
17
|
+
"master"
|
18
|
+
],
|
19
|
+
"http" : {
|
20
|
+
"bound_address" : [
|
21
|
+
"[::]:9200"
|
22
|
+
],
|
23
|
+
"publish_address" : "http://localhost:9200",
|
24
|
+
"max_content_length_in_bytes" : 104857600
|
25
|
+
}
|
26
|
+
},
|
27
|
+
"sari4do3RG-mgh2CIZeHwA" : {
|
28
|
+
"name" : "dev-es-data01",
|
29
|
+
"transport_address" : "http://localhost:9201",
|
30
|
+
"host" : "localhost",
|
31
|
+
"ip" : "127.0.0.1",
|
32
|
+
"version" : "5.5.1",
|
33
|
+
"build_hash" : "19c13d0",
|
34
|
+
"roles" : [
|
35
|
+
"data"
|
36
|
+
],
|
37
|
+
"http" : {
|
38
|
+
"bound_address" : [
|
39
|
+
"[::]:9200"
|
40
|
+
],
|
41
|
+
"publish_address" : "http://localhost:9201",
|
42
|
+
"max_content_length_in_bytes" : 104857600
|
43
|
+
}
|
44
|
+
},
|
45
|
+
"Rjy1WL66RHm4fyzXA8PCGQ" : {
|
46
|
+
"name" : "dev-es-datamaster01",
|
47
|
+
"transport_address" : "http://localhost:9202",
|
48
|
+
"host" : "localhost",
|
49
|
+
"ip" : "127.0.0.1",
|
50
|
+
"version" : "5.5.1",
|
51
|
+
"build_hash" : "19c13d0",
|
52
|
+
"roles" : [
|
53
|
+
"data",
|
54
|
+
"master"
|
55
|
+
],
|
56
|
+
"http" : {
|
57
|
+
"bound_address" : [
|
58
|
+
"[::]:9200"
|
59
|
+
],
|
60
|
+
"publish_address" : "http://localhost:9202",
|
61
|
+
"max_content_length_in_bytes" : 104857600
|
62
|
+
}
|
63
|
+
},
|
64
|
+
"OguP_obcT_S9JYNB8SKKgQ" : {
|
65
|
+
"name" : "dev-es-coordinator01",
|
66
|
+
"transport_address" : "http://localhost:9203",
|
67
|
+
"host" : "localhost",
|
68
|
+
"ip" : "127.0.0.1",
|
69
|
+
"version" : "5.5.1",
|
70
|
+
"build_hash" : "19c13d0",
|
71
|
+
"roles" : [ ],
|
72
|
+
"http" : {
|
73
|
+
"bound_address" : [
|
74
|
+
"[::]:9200"
|
75
|
+
],
|
76
|
+
"publish_address" : "http://localhost:9203",
|
77
|
+
"max_content_length_in_bytes" : 104857600
|
78
|
+
}
|
79
|
+
}
|
80
|
+
}
|
81
|
+
}
|
@@ -10,38 +10,91 @@ describe "pool sniffer", :integration => true do
|
|
10
10
|
let(:options) { {:resurrect_delay => 2, :url_normalizer => proc {|u| u}} } # Shorten the delay a bit to speed up tests
|
11
11
|
|
12
12
|
subject { LogStash::Outputs::ElasticSearch::HttpClient::Pool.new(logger, adapter, initial_urls, options) }
|
13
|
-
|
14
|
-
before do
|
15
|
-
subject.start
|
16
|
-
end
|
17
|
-
|
18
|
-
shared_examples("sniff parsing") do |check_exact|
|
19
|
-
it "should execute a sniff without error" do
|
20
|
-
expect { subject.check_sniff }.not_to raise_error
|
21
|
-
end
|
22
13
|
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
14
|
+
describe("Simple sniff parsing") do
|
15
|
+
before(:each) { subject.start }
|
16
|
+
|
17
|
+
context "with single node" do
|
18
|
+
it "should execute a sniff without error" do
|
19
|
+
expect { subject.check_sniff }.not_to raise_error
|
20
|
+
end
|
21
|
+
|
22
|
+
it "should return single sniff URL" do
|
23
|
+
uris = subject.check_sniff
|
24
|
+
|
31
25
|
expect(uris.size).to eq(1)
|
32
26
|
end
|
27
|
+
|
28
|
+
it "should return the correct sniff URL" do
|
29
|
+
if ESHelper.es_version_satisfies?(">= 2")
|
30
|
+
# We do a more thorough check on these versions because we can more reliably guess the ip
|
31
|
+
uris = subject.check_sniff
|
32
|
+
|
33
|
+
expect(uris).to include(::LogStash::Util::SafeURI.new("//#{get_host_port}"))
|
34
|
+
else
|
35
|
+
# ES 1.x returned the public hostname by default. This is hard to approximate
|
36
|
+
# so for ES1.x we don't check the *exact* hostname
|
37
|
+
skip
|
38
|
+
end
|
39
|
+
end
|
33
40
|
end
|
34
41
|
end
|
35
|
-
|
36
|
-
|
37
|
-
|
42
|
+
|
43
|
+
if ESHelper.es_version_satisfies?("<= 2")
|
44
|
+
describe("Complex sniff parsing ES 2x/1x") do
|
45
|
+
before(:each) do
|
46
|
+
response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/2x_1x.json"))
|
47
|
+
allow(subject).to receive(:perform_request).and_return([nil, { version: "2.0" }, response_double])
|
48
|
+
subject.start
|
49
|
+
end
|
50
|
+
|
51
|
+
context "with multiple nodes but single http-enabled data node" do
|
52
|
+
it "should execute a sniff without error" do
|
53
|
+
expect { subject.check_sniff }.not_to raise_error
|
54
|
+
end
|
55
|
+
|
56
|
+
it "should return one sniff URL" do
|
57
|
+
uris = subject.check_sniff
|
58
|
+
|
59
|
+
expect(uris.size).to eq(1)
|
60
|
+
end
|
61
|
+
|
62
|
+
it "should return the correct sniff URL" do
|
63
|
+
if ESHelper.es_version_satisfies?(">= 2")
|
64
|
+
# We do a more thorough check on these versions because we can more reliably guess the ip
|
65
|
+
uris = subject.check_sniff
|
66
|
+
|
67
|
+
expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"))
|
68
|
+
else
|
69
|
+
# ES 1.x returned the public hostname by default. This is hard to approximate
|
70
|
+
# so for ES1.x we don't check the *exact* hostname
|
71
|
+
skip
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
38
76
|
end
|
39
|
-
|
40
|
-
# We do a more thorough check on these versions because we can more reliably guess the ip
|
41
77
|
|
42
|
-
if ESHelper.es_version_satisfies?(">=
|
43
|
-
describe("Complex sniff parsing ES 6x/5x
|
44
|
-
|
78
|
+
if ESHelper.es_version_satisfies?(">= 5")
|
79
|
+
describe("Complex sniff parsing ES 6x/5x") do
|
80
|
+
before(:each) do
|
81
|
+
response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_and_above.json"))
|
82
|
+
allow(subject).to receive(:perform_request).and_return([nil, { version: "5.0" }, response_double])
|
83
|
+
subject.start
|
84
|
+
end
|
85
|
+
|
86
|
+
context "with mixed master-only, data-only, and data + master nodes" do
|
87
|
+
it "should execute a sniff without error" do
|
88
|
+
expect { subject.check_sniff }.not_to raise_error
|
89
|
+
end
|
90
|
+
|
91
|
+
it "should return the correct sniff URLs" do
|
92
|
+
# ie. without the master-only node
|
93
|
+
uris = subject.check_sniff
|
94
|
+
|
95
|
+
expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"), ::LogStash::Util::SafeURI.new("http://localhost:9202"), ::LogStash::Util::SafeURI.new("http://localhost:9203"))
|
96
|
+
end
|
97
|
+
end
|
45
98
|
end
|
46
99
|
end
|
47
100
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 9.0.
|
4
|
+
version: 9.0.3
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2018-02-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -217,6 +217,7 @@ files:
|
|
217
217
|
- lib/logstash/outputs/elasticsearch/elasticsearch-template-es2x.json
|
218
218
|
- lib/logstash/outputs/elasticsearch/elasticsearch-template-es5x.json
|
219
219
|
- lib/logstash/outputs/elasticsearch/elasticsearch-template-es6x.json
|
220
|
+
- lib/logstash/outputs/elasticsearch/elasticsearch-template-es7x.json
|
220
221
|
- lib/logstash/outputs/elasticsearch/http_client.rb
|
221
222
|
- lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb
|
222
223
|
- lib/logstash/outputs/elasticsearch/http_client/pool.rb
|
@@ -224,6 +225,8 @@ files:
|
|
224
225
|
- lib/logstash/outputs/elasticsearch/template_manager.rb
|
225
226
|
- logstash-output-elasticsearch.gemspec
|
226
227
|
- spec/es_spec_helper.rb
|
228
|
+
- spec/fixtures/_nodes/2x_1x.json
|
229
|
+
- spec/fixtures/_nodes/5x_and_above.json
|
227
230
|
- spec/fixtures/htpasswd
|
228
231
|
- spec/fixtures/nginx_reverse_proxy.conf
|
229
232
|
- spec/fixtures/scripts/groovy/scripted_update.groovy
|
@@ -288,6 +291,8 @@ specification_version: 4
|
|
288
291
|
summary: Stores logs in Elasticsearch
|
289
292
|
test_files:
|
290
293
|
- spec/es_spec_helper.rb
|
294
|
+
- spec/fixtures/_nodes/2x_1x.json
|
295
|
+
- spec/fixtures/_nodes/5x_and_above.json
|
291
296
|
- spec/fixtures/htpasswd
|
292
297
|
- spec/fixtures/nginx_reverse_proxy.conf
|
293
298
|
- spec/fixtures/scripts/groovy/scripted_update.groovy
|