logstash-output-elasticsearch 9.3.1-java → 9.3.2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d94e36b467260ee9b470252edef5d67e3d1915a1f1e5d3c7edac1dec879de06d
4
- data.tar.gz: 65a256a1b84f2f1ae8de8915423cb3c7e32a756428d2f1110fcaa36e9175181e
3
+ metadata.gz: 4ae86a048dbbbc543991c0f0940002f81c13ef3619a3ab0dcfbd73030b4992be
4
+ data.tar.gz: 8ccb17687bc962888d4a0f2eb94d62a29f27ec7460078deb98f4a789ef9ac13f
5
5
  SHA512:
6
- metadata.gz: 19f7d8f5ac61a23caf78d0fa919e7a99b35b491a505620f00f3699a5112a38d64c0599f4db543fd9a90e51286393db1ce8e980d5bec0e49ca3d14f7218657def
7
- data.tar.gz: ab59d63c93734f63f33d3750bdbd873976f637c57155787c2ebb0aef2459573ba07d74bc85deff9242dfb2bfb144080e56e721117f55fa626beb30b75ddfe126
6
+ metadata.gz: 89be3b3bf2a12d130ae7d6c8da1f9961d827b12814901c180a9caa966ca15bb7b8ae2a1df0b79906edd48edaca7888ed9dd8f6c132a5b52170641a777fcd0e54
7
+ data.tar.gz: 890f65b47189d95c7087095e7f1a0cfad288499428de0002cd260f40a67788b43a9f4b7c6a643d9428ee25cd5e326fe336191bda6e674f5eea264b21f413c97f
@@ -1,3 +1,6 @@
1
+ ## 9.3.2
2
+ - Fixed sniffing support for 7.x [#827](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/827)
3
+
1
4
  ## 9.3.1
2
5
  - Fixed issue with escaping index names which was causing writing aliases for ILM to fail [#831](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/831)
3
6
 
@@ -1,6 +1,6 @@
1
1
  :plugin: elasticsearch
2
2
  :type: output
3
- :default_codec: plain
3
+ :no_codec:
4
4
 
5
5
  ///////////////////////////////////////////
6
6
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -98,6 +98,9 @@ happens, the problem is logged as a warning, and the event is dropped. See
98
98
 
99
99
  beta[]
100
100
 
101
+ [NOTE]
102
+ The Index Lifecycle Management feature requires plugin version `9.3.1` or higher.
103
+
101
104
  [NOTE]
102
105
  This feature requires an Elasticsearch instance of 6.6.0 or higher with at least a Basic license
103
106
 
@@ -814,4 +817,4 @@ See also https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-in
814
817
  [id="plugins-{type}s-{plugin}-common-options"]
815
818
  include::{include_path}/{type}.asciidoc[]
816
819
 
817
- :default_codec!:
820
+ :no_codec!:
@@ -1,44 +1,42 @@
1
1
  {
2
- "template" : "logstash-*",
2
+ "index_patterns" : "logstash-*",
3
3
  "version" : 60001,
4
4
  "settings" : {
5
5
  "index.refresh_interval" : "5s",
6
6
  "number_of_shards": 1
7
7
  },
8
8
  "mappings" : {
9
- "_doc" : {
10
- "dynamic_templates" : [ {
11
- "message_field" : {
12
- "path_match" : "message",
13
- "match_mapping_type" : "string",
14
- "mapping" : {
15
- "type" : "text",
16
- "norms" : false
17
- }
9
+ "dynamic_templates" : [ {
10
+ "message_field" : {
11
+ "path_match" : "message",
12
+ "match_mapping_type" : "string",
13
+ "mapping" : {
14
+ "type" : "text",
15
+ "norms" : false
18
16
  }
19
- }, {
20
- "string_fields" : {
21
- "match" : "*",
22
- "match_mapping_type" : "string",
23
- "mapping" : {
24
- "type" : "text", "norms" : false,
25
- "fields" : {
26
- "keyword" : { "type": "keyword", "ignore_above": 256 }
27
- }
17
+ }
18
+ }, {
19
+ "string_fields" : {
20
+ "match" : "*",
21
+ "match_mapping_type" : "string",
22
+ "mapping" : {
23
+ "type" : "text", "norms" : false,
24
+ "fields" : {
25
+ "keyword" : { "type": "keyword", "ignore_above": 256 }
28
26
  }
29
27
  }
30
- } ],
31
- "properties" : {
32
- "@timestamp": { "type": "date"},
33
- "@version": { "type": "keyword"},
34
- "geoip" : {
35
- "dynamic": true,
36
- "properties" : {
37
- "ip": { "type": "ip" },
38
- "location" : { "type" : "geo_point" },
39
- "latitude" : { "type" : "half_float" },
40
- "longitude" : { "type" : "half_float" }
41
- }
28
+ }
29
+ } ],
30
+ "properties" : {
31
+ "@timestamp": { "type": "date"},
32
+ "@version": { "type": "keyword"},
33
+ "geoip" : {
34
+ "dynamic": true,
35
+ "properties" : {
36
+ "ip": { "type": "ip" },
37
+ "location" : { "type" : "geo_point" },
38
+ "latitude" : { "type" : "half_float" },
39
+ "longitude" : { "type" : "half_float" }
42
40
  }
43
41
  }
44
42
  }
@@ -158,7 +158,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
158
158
  end
159
159
 
160
160
  ES1_SNIFF_RE_URL = /\[([^\/]*)?\/?([^:]*):([0-9]+)\]/
161
- ES2_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
161
+ ES2_AND_ABOVE_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
162
162
  # Sniffs and returns the results. Does not update internal URLs!
163
163
  def check_sniff
164
164
  _, url_meta, resp = perform_request(:get, @sniffing_path)
@@ -189,13 +189,19 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
189
189
  nodes.map do |id,info|
190
190
  # Skip master-only nodes
191
191
  next if info["roles"] && info["roles"] == ["master"]
192
-
193
- if info["http"]
194
- uri = LogStash::Util::SafeURI.new(info["http"]["publish_address"])
195
- end
192
+ address_str_to_uri(info["http"]["publish_address"]) if info["http"]
196
193
  end.compact
197
194
  end
198
-
195
+
196
+ def address_str_to_uri(addr_str)
197
+ matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_AND_ABOVE_SNIFF_RE_URL)
198
+ if matches
199
+ host = matches[1].empty? ? matches[2] : matches[1]
200
+ ::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}")
201
+ end
202
+ end
203
+
204
+
199
205
  def sniff_2x_1x(nodes)
200
206
  nodes.map do |id,info|
201
207
  # TODO Make sure this works with shield. Does that listed
@@ -210,13 +216,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
210
216
  # with master + data + client enabled, so we allow that
211
217
  attributes = info['attributes']
212
218
  next if attributes && attributes['data'] == 'false'
213
-
214
- matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_SNIFF_RE_URL)
215
- if matches
216
- host = matches[1].empty? ? matches[2] : matches[1]
217
- port = matches[3]
218
- ::LogStash::Util::SafeURI.new("#{host}:#{port}")
219
- end
219
+ address_str_to_uri(addr_str)
220
220
  end.compact
221
221
  end
222
222
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '9.3.1'
3
+ s.version = '9.3.2'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Stores logs in Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -20,7 +20,7 @@
20
20
  "bound_address" : [
21
21
  "[::]:9200"
22
22
  ],
23
- "publish_address" : "http://localhost:9200",
23
+ "publish_address" : "127.0.0.1:9200",
24
24
  "max_content_length_in_bytes" : 104857600
25
25
  }
26
26
  },
@@ -38,7 +38,7 @@
38
38
  "bound_address" : [
39
39
  "[::]:9200"
40
40
  ],
41
- "publish_address" : "http://localhost:9201",
41
+ "publish_address" : "127.0.0.1:9201",
42
42
  "max_content_length_in_bytes" : 104857600
43
43
  }
44
44
  },
@@ -57,7 +57,7 @@
57
57
  "bound_address" : [
58
58
  "[::]:9200"
59
59
  ],
60
- "publish_address" : "http://localhost:9202",
60
+ "publish_address" : "127.0.0.1:9202",
61
61
  "max_content_length_in_bytes" : 104857600
62
62
  }
63
63
  },
@@ -73,7 +73,7 @@
73
73
  "bound_address" : [
74
74
  "[::]:9200"
75
75
  ],
76
- "publish_address" : "http://localhost:9203",
76
+ "publish_address" : "127.0.0.1:9203",
77
77
  "max_content_length_in_bytes" : 104857600
78
78
  }
79
79
  }
@@ -0,0 +1,92 @@
1
+ {
2
+ "_nodes" : {
3
+ "total" : 3,
4
+ "successful" : 3,
5
+ "failed" : 0
6
+ },
7
+ "cluster_name" : "elasticsearch",
8
+ "nodes" : {
9
+ "kVPTh7ZvSgWmTRMy-4YExQ" : {
10
+ "name" : "kVPTh7Z",
11
+ "transport_address" : "127.0.0.1:9300",
12
+ "host" : "dev-master",
13
+ "ip" : "127.0.0.1",
14
+ "version" : "7.0.0",
15
+ "build_flavor" : "default",
16
+ "build_type" : "tar",
17
+ "build_hash" : "b0e7036",
18
+ "roles" : [
19
+ "master"
20
+ ],
21
+ "attributes" : {
22
+ "ml.machine_memory" : "17179869184",
23
+ "xpack.installed" : "true",
24
+ "ml.max_open_jobs" : "20",
25
+ "ml.enabled" : "true"
26
+ },
27
+ "http" : {
28
+ "bound_address" : [
29
+ "127.0.0.1:9200",
30
+ "[::1]:9200"
31
+ ],
32
+ "publish_address" : "dev-master/127.0.0.1:9200",
33
+ "max_content_length_in_bytes" : 104857600
34
+ }
35
+ },
36
+ "J47OFlfpSHGFwRJSF2hbcg" : {
37
+ "name" : "J47OFlf",
38
+ "transport_address" : "127.0.0.1:9301",
39
+ "host" : "dev-masterdata",
40
+ "ip" : "127.0.0.1",
41
+ "version" : "7.0.0",
42
+ "build_flavor" : "default",
43
+ "build_type" : "tar",
44
+ "build_hash" : "b0e7036",
45
+ "roles" : [
46
+ "master",
47
+ "data"
48
+ ],
49
+ "attributes" : {
50
+ "ml.machine_memory" : "17179869184",
51
+ "ml.max_open_jobs" : "20",
52
+ "xpack.installed" : "true",
53
+ "ml.enabled" : "true"
54
+ },
55
+ "http" : {
56
+ "bound_address" : [
57
+ "127.0.0.1:9201",
58
+ "[::1]:9201"
59
+ ],
60
+ "publish_address" : "dev-masterdata/127.0.0.1:9201",
61
+ "max_content_length_in_bytes" : 104857600
62
+ }
63
+ },
64
+ "pDYE99f0QmutVb8gvsf-yw" : {
65
+ "name" : "pDYE99f",
66
+ "transport_address" : "127.0.0.1:9302",
67
+ "host" : "dev-data",
68
+ "ip" : "127.0.0.1",
69
+ "version" : "7.0.0",
70
+ "build_flavor" : "default",
71
+ "build_type" : "tar",
72
+ "build_hash" : "b0e7036",
73
+ "roles" : [
74
+ "data"
75
+ ],
76
+ "attributes" : {
77
+ "ml.machine_memory" : "17179869184",
78
+ "ml.max_open_jobs" : "20",
79
+ "xpack.installed" : "true",
80
+ "ml.enabled" : "true"
81
+ },
82
+ "http" : {
83
+ "bound_address" : [
84
+ "127.0.0.1:9202",
85
+ "[::1]:9202"
86
+ ],
87
+ "publish_address" : "dev-data/127.0.0.1:9202",
88
+ "max_content_length_in_bytes" : 104857600
89
+ }
90
+ }
91
+ }
92
+ }
@@ -32,14 +32,14 @@ describe "pool sniffer", :integration => true do
32
32
  end
33
33
 
34
34
  it "should return the correct sniff URL" do
35
- if ESHelper.es_version_satisfies?(">= 2")
35
+ if ESHelper.es_version_satisfies?(">= 2", "<7")
36
36
  # We do a more thorough check on these versions because we can more reliably guess the ip
37
37
  uris = subject.check_sniff
38
38
 
39
39
  expect(uris).to include(::LogStash::Util::SafeURI.new("//#{get_host_port}"))
40
40
  else
41
- # ES 1.x returned the public hostname by default. This is hard to approximate
42
- # so for ES1.x we don't check the *exact* hostname
41
+ # ES 1.x (and ES 7.x) returned the public hostname by default. This is hard to approximate
42
+ # so for ES1.x and 7.x we don't check the *exact* hostname
43
43
  skip
44
44
  end
45
45
  end
@@ -81,10 +81,33 @@ describe "pool sniffer", :integration => true do
81
81
  end
82
82
  end
83
83
 
84
+
85
+ if ESHelper.es_version_satisfies?(">= 7")
86
+ describe("Complex sniff parsing ES 7x") do
87
+ before(:each) do
88
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/7x.json"))
89
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "7.0" }, response_double])
90
+ subject.start
91
+ end
92
+
93
+ context "with mixed master-only, data-only, and data + master nodes" do
94
+ it "should execute a sniff without error" do
95
+ expect { subject.check_sniff }.not_to raise_error
96
+ end
97
+
98
+ it "should return the correct sniff URLs" do
99
+ # ie. with the master-only node, and with the node name correctly set.
100
+ uris = subject.check_sniff
101
+
102
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//dev-masterdata:9201"), ::LogStash::Util::SafeURI.new("//dev-data:9202"))
103
+ end
104
+ end
105
+ end
106
+ end
84
107
  if ESHelper.es_version_satisfies?(">= 5")
85
108
  describe("Complex sniff parsing ES 6x/5x") do
86
109
  before(:each) do
87
- response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_and_above.json"))
110
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_6x.json"))
88
111
  allow(subject).to receive(:perform_request).and_return([nil, { version: "5.0" }, response_double])
89
112
  subject.start
90
113
  end
@@ -98,7 +121,7 @@ describe "pool sniffer", :integration => true do
98
121
  # ie. without the master-only node
99
122
  uris = subject.check_sniff
100
123
 
101
- expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"), ::LogStash::Util::SafeURI.new("http://localhost:9202"), ::LogStash::Util::SafeURI.new("http://localhost:9203"))
124
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
102
125
  end
103
126
  end
104
127
  end
@@ -76,6 +76,48 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
76
76
  end
77
77
  end
78
78
 
79
+ describe 'resolving the address from Elasticsearch node info' do
80
+ let(:host) { "node.elastic.co"}
81
+ let(:ip_address) { "192.168.1.0"}
82
+ let(:port) { 9200 }
83
+
84
+ context 'in Elasticsearch 1.x format' do
85
+ context 'with host and ip address' do
86
+ let(:publish_address) { "inet[#{host}/#{ip_address}:#{port}]"}
87
+ it 'should correctly extract the host' do
88
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{host}:#{port}"))
89
+ end
90
+ end
91
+ context 'with ip address' do
92
+ let(:publish_address) { "inet[/#{ip_address}:#{port}]"}
93
+ it 'should correctly extract the ip address' do
94
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{ip_address}:#{port}"))
95
+ end
96
+ end
97
+ end
98
+
99
+ context 'in Elasticsearch 2.x-6.x format' do
100
+ let(:publish_address) { "#{ip_address}:#{port}"}
101
+ it 'should correctly extract the ip address' do
102
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("//#{ip_address}:#{port}"))
103
+ end
104
+ end
105
+
106
+ context 'in Elasticsearch 7.x'
107
+ context 'with host and ip address' do
108
+ let(:publish_address) { "#{host}/#{ip_address}:#{port}"}
109
+ it 'should correctly extract the host' do
110
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{host}:#{port}"))
111
+ end
112
+ end
113
+ context 'with ip address' do
114
+ let(:publish_address) { "#{ip_address}:#{port}"}
115
+ it 'should correctly extract the ip address' do
116
+ expect(subject.address_str_to_uri(publish_address)).to eq (LogStash::Util::SafeURI.new("#{ip_address}:#{port}"))
117
+ end
118
+ end
119
+ end
120
+
79
121
  describe "the sniffer" do
80
122
  before(:each) { subject.start }
81
123
  it "should not start the sniffer by default" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 9.3.1
4
+ version: 9.3.2
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-01-29 00:00:00.000000000 Z
11
+ date: 2019-02-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -172,7 +172,8 @@ files:
172
172
  - logstash-output-elasticsearch.gemspec
173
173
  - spec/es_spec_helper.rb
174
174
  - spec/fixtures/_nodes/2x_1x.json
175
- - spec/fixtures/_nodes/5x_and_above.json
175
+ - spec/fixtures/_nodes/5x_6x.json
176
+ - spec/fixtures/_nodes/7x.json
176
177
  - spec/fixtures/htpasswd
177
178
  - spec/fixtures/nginx_reverse_proxy.conf
178
179
  - spec/fixtures/scripts/groovy/scripted_update.groovy
@@ -248,7 +249,8 @@ summary: Stores logs in Elasticsearch
248
249
  test_files:
249
250
  - spec/es_spec_helper.rb
250
251
  - spec/fixtures/_nodes/2x_1x.json
251
- - spec/fixtures/_nodes/5x_and_above.json
252
+ - spec/fixtures/_nodes/5x_6x.json
253
+ - spec/fixtures/_nodes/7x.json
252
254
  - spec/fixtures/htpasswd
253
255
  - spec/fixtures/nginx_reverse_proxy.conf
254
256
  - spec/fixtures/scripts/groovy/scripted_update.groovy