logstash-output-elasticsearch 11.0.2-java → 11.1.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +12 -0
  3. data/docs/index.asciidoc +11 -11
  4. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +2 -28
  5. data/lib/logstash/outputs/elasticsearch/http_client.rb +19 -0
  6. data/lib/logstash/outputs/elasticsearch/ilm.rb +2 -33
  7. data/lib/logstash/outputs/elasticsearch/template_manager.rb +1 -1
  8. data/lib/logstash/outputs/elasticsearch.rb +3 -14
  9. data/lib/logstash/plugin_mixins/elasticsearch/common.rb +5 -1
  10. data/logstash-output-elasticsearch.gemspec +3 -2
  11. data/spec/es_spec_helper.rb +14 -7
  12. data/spec/fixtures/_nodes/{5x_6x.json → 6x.json} +5 -5
  13. data/spec/integration/outputs/compressed_indexing_spec.rb +47 -46
  14. data/spec/integration/outputs/delete_spec.rb +49 -51
  15. data/spec/integration/outputs/ilm_spec.rb +230 -246
  16. data/spec/integration/outputs/index_spec.rb +5 -2
  17. data/spec/integration/outputs/index_version_spec.rb +78 -82
  18. data/spec/integration/outputs/ingest_pipeline_spec.rb +58 -60
  19. data/spec/integration/outputs/no_es_on_startup_spec.rb +14 -0
  20. data/spec/integration/outputs/painless_update_spec.rb +74 -164
  21. data/spec/integration/outputs/parent_spec.rb +67 -75
  22. data/spec/integration/outputs/retry_spec.rb +2 -2
  23. data/spec/integration/outputs/sniffer_spec.rb +15 -53
  24. data/spec/integration/outputs/templates_spec.rb +79 -81
  25. data/spec/integration/outputs/update_spec.rb +99 -101
  26. data/spec/spec_helper.rb +1 -5
  27. data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +0 -14
  28. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +30 -37
  29. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +69 -0
  30. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +9 -9
  31. data/spec/unit/outputs/elasticsearch_spec.rb +2 -8
  32. data/spec/unit/outputs/error_whitelist_spec.rb +1 -0
  33. metadata +21 -24
  34. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-2x.json +0 -95
  35. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-5x.json +0 -46
  36. data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-6x.json +0 -2950
  37. data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-7x.json +0 -2948
  38. data/spec/fixtures/_nodes/2x_1x.json +0 -27
  39. data/spec/fixtures/scripts/groovy/scripted_update.groovy +0 -2
  40. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +0 -2
  41. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +0 -2
  42. data/spec/integration/outputs/groovy_update_spec.rb +0 -150
  43. data/spec/integration/outputs/templates_5x_spec.rb +0 -98
@@ -1,102 +1,94 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
  require "logstash/outputs/elasticsearch"
3
3
 
4
- if ESHelper.es_version_satisfies?(">= 5.6")
5
- context "when using elasticsearch 5.6 and above", :integration => true do
4
+ describe "join type field", :integration => true do
6
5
 
7
- shared_examples "a join field based parent indexer" do
8
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
6
+ shared_examples "a join field based parent indexer" do
7
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
9
8
 
10
- let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
9
+ let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
11
10
 
12
- let(:event_count) { 10000 + rand(500) }
13
- let(:parent) { "not_implemented" }
14
- let(:config) { "not_implemented" }
15
- let(:parent_id) { "test" }
16
- let(:join_field) { "join_field" }
17
- let(:parent_relation) { "parent_type" }
18
- let(:child_relation) { "child_type" }
19
- let(:default_headers) {
20
- {"Content-Type" => "application/json"}
21
- }
22
- subject { LogStash::Outputs::ElasticSearch.new(config) }
11
+ let(:event_count) { 10000 + rand(500) }
12
+ let(:parent) { "not_implemented" }
13
+ let(:config) { "not_implemented" }
14
+ let(:parent_id) { "test" }
15
+ let(:join_field) { "join_field" }
16
+ let(:parent_relation) { "parent_type" }
17
+ let(:child_relation) { "child_type" }
18
+ let(:default_headers) {
19
+ {"Content-Type" => "application/json"}
20
+ }
21
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
23
22
 
24
- before do
25
- # Add mapping and a parent document
26
- index_url = "http://#{get_host_port()}/#{index}"
23
+ before do
24
+ # Add mapping and a parent document
25
+ index_url = "http://#{get_host_port()}/#{index}"
27
26
 
28
- properties = {
29
- "properties" => {
30
- join_field => {
31
- "type" => "join",
32
- "relations" => { parent_relation => child_relation }
33
- }
27
+ properties = {
28
+ "properties" => {
29
+ join_field => {
30
+ "type" => "join",
31
+ "relations" => { parent_relation => child_relation }
34
32
  }
35
33
  }
34
+ }
36
35
 
37
- mapping = ESHelper.es_version_satisfies?('<7') ? { "mappings" => { type => properties } }
38
- : { "mappings" => properties}
36
+ mapping = ESHelper.es_version_satisfies?('<7') ? { "mappings" => { type => properties } }
37
+ : { "mappings" => properties}
39
38
 
40
- if ESHelper.es_version_satisfies?('<6')
41
- mapping.merge!({
42
- "settings" => {
43
- "mapping.single_type" => true
44
- }})
45
- end
46
- Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
47
- pdoc = { "message" => "ohayo", join_field => parent_relation }
48
- Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
39
+ Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
40
+ pdoc = { "message" => "ohayo", join_field => parent_relation }
41
+ Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
49
42
 
50
- subject.register
51
- subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
52
- end
43
+ subject.register
44
+ subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
45
+ end
53
46
 
54
47
 
55
- it "ships events" do
56
- index_url = "http://#{get_host_port()}/#{index}"
48
+ it "ships events" do
49
+ index_url = "http://#{get_host_port()}/#{index}"
57
50
 
58
- Manticore.post("#{index_url}/_refresh").call
51
+ Manticore.post("#{index_url}/_refresh").call
59
52
 
60
- # Wait until all events are available.
61
- Stud::try(10.times) do
62
- query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
63
- response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
64
- data = response.body
65
- result = LogStash::Json.load(data)
66
- cur_count = result["count"]
67
- expect(cur_count).to eq(event_count)
68
- end
53
+ # Wait until all events are available.
54
+ Stud::try(10.times) do
55
+ query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
56
+ response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
57
+ data = response.body
58
+ result = LogStash::Json.load(data)
59
+ cur_count = result["count"]
60
+ expect(cur_count).to eq(event_count)
69
61
  end
70
62
  end
63
+ end
71
64
 
72
- describe "(http protocol) index events with static parent" do
73
- it_behaves_like 'a join field based parent indexer' do
74
- let(:config) {
75
- {
76
- "hosts" => get_host_port,
77
- "index" => index,
78
- "parent" => parent_id,
79
- "document_type" => type,
80
- "join_field" => join_field,
81
- "manage_template" => false
82
- }
65
+ describe "(http protocol) index events with static parent" do
66
+ it_behaves_like 'a join field based parent indexer' do
67
+ let(:config) {
68
+ {
69
+ "hosts" => get_host_port,
70
+ "index" => index,
71
+ "parent" => parent_id,
72
+ "document_type" => type,
73
+ "join_field" => join_field,
74
+ "manage_template" => false
83
75
  }
84
- end
76
+ }
85
77
  end
78
+ end
86
79
 
87
- describe "(http_protocol) index events with fieldref in parent value" do
88
- it_behaves_like 'a join field based parent indexer' do
89
- let(:config) {
90
- {
91
- "hosts" => get_host_port,
92
- "index" => index,
93
- "parent" => "%{link_to}",
94
- "document_type" => type,
95
- "join_field" => join_field,
96
- "manage_template" => false
97
- }
80
+ describe "(http_protocol) index events with fieldref in parent value" do
81
+ it_behaves_like 'a join field based parent indexer' do
82
+ let(:config) {
83
+ {
84
+ "hosts" => get_host_port,
85
+ "index" => index,
86
+ "parent" => "%{link_to}",
87
+ "document_type" => type,
88
+ "join_field" => join_field,
89
+ "manage_template" => false
98
90
  }
99
- end
91
+ }
100
92
  end
101
93
  end
102
94
  end
@@ -5,7 +5,7 @@ describe "failures in bulk class expected behavior", :integration => true do
5
5
  let(:template) { '{"template" : "not important, will be updated by :index"}' }
6
6
  let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
7
7
  let(:action1) do
8
- if ESHelper.es_version_satisfies?(">= 6", "< 7")
8
+ if ESHelper.es_version_satisfies?("< 7")
9
9
  ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1.to_hash])
10
10
  else
11
11
  ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event1.to_hash])
@@ -13,7 +13,7 @@ describe "failures in bulk class expected behavior", :integration => true do
13
13
  end
14
14
  let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
15
15
  let(:action2) do
16
- if ESHelper.es_version_satisfies?(">= 6", "< 7")
16
+ if ESHelper.es_version_satisfies?("< 7")
17
17
  ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event2.to_hash])
18
18
  else
19
19
  ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event2.to_hash])
@@ -35,7 +35,7 @@ describe "pool sniffer", :integration => true do
35
35
  end
36
36
 
37
37
  it "should return the correct sniff URL" do
38
- if ESHelper.es_version_satisfies?(">= 2", "<7")
38
+ if ESHelper.es_version_satisfies?("<7")
39
39
  # We do a more thorough check on these versions because we can more reliably guess the ip
40
40
  uris = subject.check_sniff
41
41
 
@@ -49,42 +49,6 @@ describe "pool sniffer", :integration => true do
49
49
  end
50
50
  end
51
51
 
52
- if ESHelper.es_version_satisfies?("<= 2")
53
- describe("Complex sniff parsing ES 2x/1x") do
54
- before(:each) do
55
- response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/2x_1x.json"))
56
- allow(subject).to receive(:perform_request).and_return([nil, { version: "2.0" }, response_double])
57
- subject.start
58
- end
59
-
60
- context "with multiple nodes but single http-enabled data node" do
61
- it "should execute a sniff without error" do
62
- expect { subject.check_sniff }.not_to raise_error
63
- end
64
-
65
- it "should return one sniff URL" do
66
- uris = subject.check_sniff
67
-
68
- expect(uris.size).to eq(1)
69
- end
70
-
71
- it "should return the correct sniff URL" do
72
- if ESHelper.es_version_satisfies?(">= 2")
73
- # We do a more thorough check on these versions because we can more reliably guess the ip
74
- uris = subject.check_sniff
75
-
76
- expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"))
77
- else
78
- # ES 1.x returned the public hostname by default. This is hard to approximate
79
- # so for ES1.x we don't check the *exact* hostname
80
- skip
81
- end
82
- end
83
- end
84
- end
85
- end
86
-
87
-
88
52
  if ESHelper.es_version_satisfies?(">= 7")
89
53
  describe("Complex sniff parsing ES 7x") do
90
54
  before(:each) do
@@ -107,25 +71,23 @@ describe "pool sniffer", :integration => true do
107
71
  end
108
72
  end
109
73
  end
110
- if ESHelper.es_version_satisfies?(">= 5")
111
- describe("Complex sniff parsing ES 6x/5x") do
112
- before(:each) do
113
- response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_6x.json"))
114
- allow(subject).to receive(:perform_request).and_return([nil, { version: "5.0" }, response_double])
115
- subject.start
116
- end
74
+ describe("Complex sniff parsing ES") do
75
+ before(:each) do
76
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/6x.json"))
77
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "6.8" }, response_double])
78
+ subject.start
79
+ end
117
80
 
118
- context "with mixed master-only, data-only, and data + master nodes" do
119
- it "should execute a sniff without error" do
120
- expect { subject.check_sniff }.not_to raise_error
121
- end
81
+ context "with mixed master-only, data-only, and data + master nodes" do
82
+ it "should execute a sniff without error" do
83
+ expect { subject.check_sniff }.not_to raise_error
84
+ end
122
85
 
123
- it "should return the correct sniff URLs" do
124
- # ie. without the master-only node
125
- uris = subject.check_sniff
86
+ it "should return the correct sniff URLs" do
87
+ # ie. without the master-only node
88
+ uris = subject.check_sniff
126
89
 
127
- expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
128
- end
90
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
129
91
  end
130
92
  end
131
93
  end
@@ -1,98 +1,96 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- if ESHelper.es_version_satisfies?("< 5")
4
- describe "index template expected behavior", :integration => true do
5
- subject! do
6
- require "logstash/outputs/elasticsearch"
7
- settings = {
8
- "manage_template" => true,
9
- "template_overwrite" => true,
10
- "hosts" => "#{get_host_port()}"
11
- }
12
- next LogStash::Outputs::ElasticSearch.new(settings)
13
- end
3
+ describe "index template expected behavior", :integration => true do
4
+ subject! do
5
+ require "logstash/outputs/elasticsearch"
6
+ settings = {
7
+ "manage_template" => true,
8
+ "template_overwrite" => true,
9
+ "hosts" => "#{get_host_port()}"
10
+ }
11
+ next LogStash::Outputs::ElasticSearch.new(settings)
12
+ end
14
13
 
15
- before :each do
16
- # Delete all templates first.
17
- require "elasticsearch"
18
-
19
- # Clean ES of data before we start.
20
- @es = get_client
21
- @es.indices.delete_template(:name => "*")
22
-
23
- # This can fail if there are no indexes, ignore failure.
24
- @es.indices.delete(:index => "*") rescue nil
25
-
26
- subject.register
27
-
28
- subject.multi_receive([
29
- LogStash::Event.new("message" => "sample message here"),
30
- LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
- LogStash::Event.new("somevalue" => 100),
32
- LogStash::Event.new("somevalue" => 10),
33
- LogStash::Event.new("somevalue" => 1),
34
- LogStash::Event.new("country" => "us"),
35
- LogStash::Event.new("country" => "at"),
36
- LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
- ])
38
-
39
- @es.indices.refresh
40
-
41
- # Wait or fail until everything's indexed.
42
- Stud::try(20.times) do
43
- r = @es.search(index: 'logstash-*')
44
- expect(r).to have_hits(8)
45
- end
46
- end
14
+ before :each do
15
+ # Delete all templates first.
16
+ require "elasticsearch"
47
17
 
48
- it "permits phrase searching on string fields" do
49
- results = @es.search(:q => "message:\"sample message\"")
50
- expect(results).to have_hits(1)
51
- expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
52
- end
18
+ # Clean ES of data before we start.
19
+ @es = get_client
20
+ @es.indices.delete_template(:name => "*")
53
21
 
54
- it "numbers dynamically map to a numeric type and permit range queries" do
55
- results = @es.search(:q => "somevalue:[5 TO 105]")
56
- expect(results).to have_hits(2)
22
+ # This can fail if there are no indexes, ignore failure.
23
+ @es.indices.delete(:index => "*") rescue nil
57
24
 
58
- values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
- expect(values).to include(10)
60
- expect(values).to include(100)
61
- expect(values).to_not include(1)
62
- end
25
+ subject.register
63
26
 
64
- it "does not create .raw field for the message field" do
65
- results = @es.search(:q => "message.raw:\"sample message here\"")
66
- expect(results).to have_hits(0)
67
- end
27
+ subject.multi_receive([
28
+ LogStash::Event.new("message" => "sample message here"),
29
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
30
+ LogStash::Event.new("somevalue" => 100),
31
+ LogStash::Event.new("somevalue" => 10),
32
+ LogStash::Event.new("somevalue" => 1),
33
+ LogStash::Event.new("country" => "us"),
34
+ LogStash::Event.new("country" => "at"),
35
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
36
+ ])
68
37
 
69
- it "creates .raw field for nested message fields" do
70
- results = @es.search(:q => "somemessage.message.raw:\"sample nested message here\"")
71
- expect(results).to have_hits(1)
38
+ @es.indices.refresh
39
+
40
+ # Wait or fail until everything's indexed.
41
+ Stud::try(20.times) do
42
+ r = @es.search(index: 'logstash-*')
43
+ expect(r).to have_hits(8)
72
44
  end
45
+ end
73
46
 
74
- it "creates .raw field from any string field which is not_analyzed" do
75
- results = @es.search(:q => "country.raw:\"us\"")
76
- expect(results).to have_hits(1)
77
- expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
47
+ it "permits phrase searching on string fields" do
48
+ results = @es.search(:q => "message:\"sample message\"")
49
+ expect(results).to have_hits(1)
50
+ expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
51
+ end
78
52
 
79
- # partial or terms should not work.
80
- results = @es.search(:q => "country.raw:\"u\"")
81
- expect(results).to have_hits(0)
82
- end
53
+ it "numbers dynamically map to a numeric type and permit range queries" do
54
+ results = @es.search(:q => "somevalue:[5 TO 105]")
55
+ expect(results).to have_hits(2)
83
56
 
84
- it "make [geoip][location] a geo_point" do
85
- expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
- end
57
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
58
+ expect(values).to include(10)
59
+ expect(values).to include(100)
60
+ expect(values).to_not include(1)
61
+ end
62
+
63
+ it "does not create .keyword field for top-level message field" do
64
+ results = @es.search(:q => "message.keyword:\"sample message here\"")
65
+ expect(results).to have_hits(0)
66
+ end
87
67
 
88
- it "aggregate .raw results correctly " do
89
- results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.raw" } } } })["aggregations"]["my_agg"]
90
- terms = results["buckets"].collect { |b| b["key"] }
68
+ it "creates .keyword field for nested message fields" do
69
+ results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
70
+ expect(results).to have_hits(1)
71
+ end
91
72
 
92
- expect(terms).to include("us")
73
+ it "creates .keyword field from any string field which is not_analyzed" do
74
+ results = @es.search(:q => "country.keyword:\"us\"")
75
+ expect(results).to have_hits(1)
76
+ expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
93
77
 
94
- # 'at' is a stopword, make sure stopwords are not ignored.
95
- expect(terms).to include("at")
96
- end
78
+ # partial or terms should not work.
79
+ results = @es.search(:q => "country.keyword:\"u\"")
80
+ expect(results).to have_hits(0)
81
+ end
82
+
83
+ it "make [geoip][location] a geo_point" do
84
+ expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point")
85
+ end
86
+
87
+ it "aggregate .keyword results correctly " do
88
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
89
+ terms = results["buckets"].collect { |b| b["key"] }
90
+
91
+ expect(terms).to include("us")
92
+
93
+ # 'at' is a stopword, make sure stopwords are not ignored.
94
+ expect(terms).to include("at")
97
95
  end
98
96
  end