logstash-output-elasticsearch 11.0.2-java → 11.0.3-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (36) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +3 -0
  3. data/lib/logstash/outputs/elasticsearch.rb +3 -14
  4. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +2 -28
  5. data/lib/logstash/outputs/elasticsearch/ilm.rb +2 -33
  6. data/lib/logstash/outputs/elasticsearch/template_manager.rb +1 -1
  7. data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-8x.json +1 -0
  8. data/logstash-output-elasticsearch.gemspec +2 -2
  9. data/spec/es_spec_helper.rb +4 -6
  10. data/spec/fixtures/_nodes/{5x_6x.json → 6x.json} +5 -5
  11. data/spec/integration/outputs/compressed_indexing_spec.rb +47 -46
  12. data/spec/integration/outputs/delete_spec.rb +49 -51
  13. data/spec/integration/outputs/ilm_spec.rb +230 -246
  14. data/spec/integration/outputs/index_spec.rb +5 -2
  15. data/spec/integration/outputs/index_version_spec.rb +78 -82
  16. data/spec/integration/outputs/ingest_pipeline_spec.rb +58 -60
  17. data/spec/integration/outputs/painless_update_spec.rb +74 -164
  18. data/spec/integration/outputs/parent_spec.rb +67 -75
  19. data/spec/integration/outputs/retry_spec.rb +2 -2
  20. data/spec/integration/outputs/sniffer_spec.rb +15 -53
  21. data/spec/integration/outputs/templates_spec.rb +79 -81
  22. data/spec/integration/outputs/update_spec.rb +99 -101
  23. data/spec/spec_helper.rb +1 -5
  24. data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +0 -14
  25. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +30 -37
  26. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +9 -9
  27. data/spec/unit/outputs/elasticsearch_spec.rb +1 -8
  28. metadata +8 -22
  29. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-2x.json +0 -95
  30. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-5x.json +0 -46
  31. data/spec/fixtures/_nodes/2x_1x.json +0 -27
  32. data/spec/fixtures/scripts/groovy/scripted_update.groovy +0 -2
  33. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +0 -2
  34. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +0 -2
  35. data/spec/integration/outputs/groovy_update_spec.rb +0 -150
  36. data/spec/integration/outputs/templates_5x_spec.rb +0 -98
@@ -1,95 +0,0 @@
1
- {
2
- "template" : "logstash-*",
3
- "settings" : {
4
- "index.refresh_interval" : "5s"
5
- },
6
- "mappings" : {
7
- "_default_" : {
8
- "_all" : {"enabled" : true, "omit_norms" : true},
9
- "dynamic_templates" : [ {
10
- "message_field" : {
11
- "path_match" : "message",
12
- "match_mapping_type" : "string",
13
- "mapping" : {
14
- "type" : "string", "index" : "analyzed", "omit_norms" : true,
15
- "fielddata" : { "format" : "disabled" }
16
- }
17
- }
18
- }, {
19
- "string_fields" : {
20
- "match" : "*",
21
- "match_mapping_type" : "string",
22
- "mapping" : {
23
- "type" : "string", "index" : "analyzed", "omit_norms" : true,
24
- "fielddata" : { "format" : "disabled" },
25
- "fields" : {
26
- "raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256}
27
- }
28
- }
29
- }
30
- }, {
31
- "float_fields" : {
32
- "match" : "*",
33
- "match_mapping_type" : "float",
34
- "mapping" : { "type" : "float", "doc_values" : true }
35
- }
36
- }, {
37
- "double_fields" : {
38
- "match" : "*",
39
- "match_mapping_type" : "double",
40
- "mapping" : { "type" : "double", "doc_values" : true }
41
- }
42
- }, {
43
- "byte_fields" : {
44
- "match" : "*",
45
- "match_mapping_type" : "byte",
46
- "mapping" : { "type" : "byte", "doc_values" : true }
47
- }
48
- }, {
49
- "short_fields" : {
50
- "match" : "*",
51
- "match_mapping_type" : "short",
52
- "mapping" : { "type" : "short", "doc_values" : true }
53
- }
54
- }, {
55
- "integer_fields" : {
56
- "match" : "*",
57
- "match_mapping_type" : "integer",
58
- "mapping" : { "type" : "integer", "doc_values" : true }
59
- }
60
- }, {
61
- "long_fields" : {
62
- "match" : "*",
63
- "match_mapping_type" : "long",
64
- "mapping" : { "type" : "long", "doc_values" : true }
65
- }
66
- }, {
67
- "date_fields" : {
68
- "match" : "*",
69
- "match_mapping_type" : "date",
70
- "mapping" : { "type" : "date", "doc_values" : true }
71
- }
72
- }, {
73
- "geo_point_fields" : {
74
- "match" : "*",
75
- "match_mapping_type" : "geo_point",
76
- "mapping" : { "type" : "geo_point", "doc_values" : true }
77
- }
78
- } ],
79
- "properties" : {
80
- "@timestamp": { "type": "date", "doc_values" : true },
81
- "@version": { "type": "string", "index": "not_analyzed", "doc_values" : true },
82
- "geoip" : {
83
- "type" : "object",
84
- "dynamic": true,
85
- "properties" : {
86
- "ip": { "type": "ip", "doc_values" : true },
87
- "location" : { "type" : "geo_point", "doc_values" : true },
88
- "latitude" : { "type" : "float", "doc_values" : true },
89
- "longitude" : { "type" : "float", "doc_values" : true }
90
- }
91
- }
92
- }
93
- }
94
- }
95
- }
@@ -1,46 +0,0 @@
1
- {
2
- "template" : "logstash-*",
3
- "version" : 50001,
4
- "settings" : {
5
- "index.refresh_interval" : "5s"
6
- },
7
- "mappings" : {
8
- "_default_" : {
9
- "_all" : {"enabled" : true, "norms" : false},
10
- "dynamic_templates" : [ {
11
- "message_field" : {
12
- "path_match" : "message",
13
- "match_mapping_type" : "string",
14
- "mapping" : {
15
- "type" : "text",
16
- "norms" : false
17
- }
18
- }
19
- }, {
20
- "string_fields" : {
21
- "match" : "*",
22
- "match_mapping_type" : "string",
23
- "mapping" : {
24
- "type" : "text", "norms" : false,
25
- "fields" : {
26
- "keyword" : { "type": "keyword", "ignore_above": 256 }
27
- }
28
- }
29
- }
30
- } ],
31
- "properties" : {
32
- "@timestamp": { "type": "date", "include_in_all": false },
33
- "@version": { "type": "keyword", "include_in_all": false },
34
- "geoip" : {
35
- "dynamic": true,
36
- "properties" : {
37
- "ip": { "type": "ip" },
38
- "location" : { "type" : "geo_point" },
39
- "latitude" : { "type" : "half_float" },
40
- "longitude" : { "type" : "half_float" }
41
- }
42
- }
43
- }
44
- }
45
- }
46
- }
@@ -1,27 +0,0 @@
1
- {
2
- "cluster_name" : "dev",
3
- "nodes" : {
4
- "Ur_68iBvTlm7Xr1HgSsh6w" : {
5
- "name" : "dev-es-master01",
6
- "transport_address" : "http://localhost:9200",
7
- "host" : "127.0.0.1",
8
- "ip" : "127.0.0.1",
9
- "version" : "2.4.6",
10
- "build" : "5376dca"
11
- },
12
- "sari4do3RG-mgh2CIZeHwA" : {
13
- "name" : "dev-es-data01",
14
- "transport_address" : "http://localhost:9201",
15
- "host" : "127.0.0.1",
16
- "ip" : "127.0.0.1",
17
- "version" : "2.4.6",
18
- "build" : "5376dca",
19
- "http_address" : "127.0.0.1:9201",
20
- "http" : {
21
- "bound_address" : [ "[::1]:9201", "127.0.0.1:9201" ],
22
- "publish_address" : "127.0.0.1:9201",
23
- "max_content_length_in_bytes" : 104857600
24
- }
25
- }
26
- }
27
- }
@@ -1,2 +0,0 @@
1
- ctx._source.counter += event["count"]
2
-
@@ -1,2 +0,0 @@
1
- ctx._source.counter += event["data"]["count"]
2
-
@@ -1,2 +0,0 @@
1
- ctx._source.counter = event["counter"]
2
-
@@ -1,150 +0,0 @@
1
- require_relative "../../../spec/es_spec_helper"
2
-
3
- if ESHelper.es_version_satisfies?('>= 2', '< 6')
4
- describe "Update actions using groovy scripts", :integration => true, :update_tests => 'groovy' do
5
- require "logstash/outputs/elasticsearch"
6
-
7
- def get_es_output( options={} )
8
- settings = {
9
- "manage_template" => true,
10
- "index" => "logstash-update",
11
- "template_overwrite" => true,
12
- "hosts" => get_host_port(),
13
- "action" => "update",
14
- "script_lang" => "groovy"
15
- }
16
- LogStash::Outputs::ElasticSearch.new(settings.merge!(options))
17
- end
18
-
19
- before :each do
20
- @es = get_client
21
- # Delete all templates first.
22
- # Clean ES of data before we start.
23
- @es.indices.delete_template(:name => "*")
24
- # This can fail if there are no indexes, ignore failure.
25
- @es.indices.delete(:index => "*") rescue nil
26
- @es.index(
27
- :index => 'logstash-update',
28
- :type => doc_type,
29
- :id => "123",
30
- :body => { :message => 'Test', :counter => 1 }
31
- )
32
- @es.indices.refresh
33
- end
34
-
35
- context "scripted updates" do
36
- it "should increment a counter with event/doc 'count' variable" do
37
- subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update', 'script_type' => 'file' })
38
- subject.register
39
- subject.multi_receive([LogStash::Event.new("count" => 2)])
40
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
41
- expect(r["_source"]["counter"]).to eq(3)
42
- end
43
-
44
- it "should increment a counter with event/doc '[data][count]' nested variable" do
45
- subject = get_es_output({ 'document_id' => "123", 'script' => 'scripted_update_nested', 'script_type' => 'file' })
46
- subject.register
47
- subject.multi_receive([LogStash::Event.new("data" => { "count" => 3 })])
48
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
49
- expect(r["_source"]["counter"]).to eq(4)
50
- end
51
-
52
- it "should increment a counter with event/doc 'count' variable with inline script" do
53
- subject = get_es_output({
54
- 'document_id' => "123",
55
- 'script' => 'ctx._source.counter += event["counter"]',
56
- 'script_lang' => 'groovy',
57
- 'script_type' => 'inline'
58
- })
59
- subject.register
60
- subject.multi_receive([LogStash::Event.new("counter" => 3 )])
61
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
62
- expect(r["_source"]["counter"]).to eq(4)
63
- end
64
-
65
- it "should increment a counter with event/doc 'count' variable with event/doc as upsert and inline script" do
66
- subject = get_es_output({
67
- 'document_id' => "123",
68
- 'doc_as_upsert' => true,
69
- 'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += event["counter"]; } else { ctx._source.counter = event["counter"]; }',
70
- 'script_lang' => 'groovy',
71
- 'script_type' => 'inline'
72
- })
73
- subject.register
74
- subject.multi_receive([LogStash::Event.new("counter" => 3 )])
75
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
76
- expect(r["_source"]["counter"]).to eq(4)
77
- end
78
-
79
- it "should, with new doc, set a counter with event/doc 'count' variable with event/doc as upsert and inline script" do
80
- subject = get_es_output({
81
- 'document_id' => "456",
82
- 'doc_as_upsert' => true,
83
- 'script' => 'if( ctx._source.containsKey("counter") ){ ctx._source.counter += event["count"]; } else { ctx._source.counter = event["count"]; }',
84
- 'script_lang' => 'groovy',
85
- 'script_type' => 'inline'
86
- })
87
- subject.register
88
- subject.multi_receive([LogStash::Event.new("counter" => 3 )])
89
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
90
- expect(r["_source"]["counter"]).to eq(3)
91
- end
92
-
93
- it "should increment a counter with event/doc 'count' variable with indexed script" do
94
- @es.put_script lang: 'groovy', id: 'indexed_update', body: { script: 'ctx._source.counter += event["count"]' }
95
- subject = get_es_output({
96
- 'document_id' => "123",
97
- 'script' => 'indexed_update',
98
- 'script_lang' => 'groovy',
99
- 'script_type' => 'indexed'
100
- })
101
- subject.register
102
- subject.multi_receive([LogStash::Event.new("count" => 4 )])
103
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
104
- expect(r["_source"]["counter"]).to eq(5)
105
- end
106
- end
107
-
108
- context "when update with upsert" do
109
- it "should create new documents with provided upsert" do
110
- subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
111
- subject.register
112
- subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
113
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
114
- expect(r["_source"]["message"]).to eq('upsert message')
115
- end
116
-
117
- it "should create new documents with event/doc as upsert" do
118
- subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
119
- subject.register
120
- subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
121
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
122
- expect(r["_source"]["message"]).to eq('sample message here')
123
- end
124
-
125
- it "should fail on documents with event/doc as upsert at external version" do
126
- subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true, 'version' => 999, "version_type" => "external" })
127
- expect { subject.register }.to raise_error(LogStash::ConfigurationError)
128
- end
129
- end
130
-
131
- context "updates with scripted upsert" do
132
- it "should create new documents with upsert content" do
133
- subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_update', 'upsert' => '{"message": "upsert message"}', 'script_type' => 'file' })
134
- subject.register
135
- subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
136
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
137
- expect(r["_source"]["message"]).to eq('upsert message')
138
- end
139
-
140
- it "should create new documents with event/doc as script params" do
141
- subject = get_es_output({ 'document_id' => "456", 'script' => 'scripted_upsert', 'scripted_upsert' => true, 'script_type' => 'file' })
142
- subject.register
143
- subject.multi_receive([LogStash::Event.new("counter" => 1)])
144
- @es.indices.refresh
145
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
146
- expect(r["_source"]["counter"]).to eq(1)
147
- end
148
- end
149
- end
150
- end
@@ -1,98 +0,0 @@
1
- require_relative "../../../spec/es_spec_helper"
2
-
3
- if ESHelper.es_version_satisfies?(">= 5")
4
- describe "index template expected behavior for 5.x", :integration => true do
5
- subject! do
6
- require "logstash/outputs/elasticsearch"
7
- settings = {
8
- "manage_template" => true,
9
- "template_overwrite" => true,
10
- "hosts" => "#{get_host_port()}"
11
- }
12
- next LogStash::Outputs::ElasticSearch.new(settings)
13
- end
14
-
15
- before :each do
16
- # Delete all templates first.
17
- require "elasticsearch"
18
-
19
- # Clean ES of data before we start.
20
- @es = get_client
21
- @es.indices.delete_template(:name => "*")
22
-
23
- # This can fail if there are no indexes, ignore failure.
24
- @es.indices.delete(:index => "*") rescue nil
25
-
26
- subject.register
27
-
28
- subject.multi_receive([
29
- LogStash::Event.new("message" => "sample message here"),
30
- LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
- LogStash::Event.new("somevalue" => 100),
32
- LogStash::Event.new("somevalue" => 10),
33
- LogStash::Event.new("somevalue" => 1),
34
- LogStash::Event.new("country" => "us"),
35
- LogStash::Event.new("country" => "at"),
36
- LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
- ])
38
-
39
- @es.indices.refresh
40
-
41
- # Wait or fail until everything's indexed.
42
- Stud::try(20.times) do
43
- r = @es.search(index: 'logstash-*')
44
- expect(r).to have_hits(8)
45
- end
46
- end
47
-
48
- it "permits phrase searching on string fields" do
49
- results = @es.search(:q => "message:\"sample message\"")
50
- expect(results).to have_hits(1)
51
- expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
52
- end
53
-
54
- it "numbers dynamically map to a numeric type and permit range queries" do
55
- results = @es.search(:q => "somevalue:[5 TO 105]")
56
- expect(results).to have_hits(2)
57
-
58
- values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
- expect(values).to include(10)
60
- expect(values).to include(100)
61
- expect(values).to_not include(1)
62
- end
63
-
64
- it "does not create .keyword field for top-level message field" do
65
- results = @es.search(:q => "message.keyword:\"sample message here\"")
66
- expect(results).to have_hits(0)
67
- end
68
-
69
- it "creates .keyword field for nested message fields" do
70
- results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
71
- expect(results).to have_hits(1)
72
- end
73
-
74
- it "creates .keyword field from any string field which is not_analyzed" do
75
- results = @es.search(:q => "country.keyword:\"us\"")
76
- expect(results).to have_hits(1)
77
- expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
78
-
79
- # partial or terms should not work.
80
- results = @es.search(:q => "country.keyword:\"u\"")
81
- expect(results).to have_hits(0)
82
- end
83
-
84
- it "make [geoip][location] a geo_point" do
85
- expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point")
86
- end
87
-
88
- it "aggregate .keyword results correctly " do
89
- results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
90
- terms = results["buckets"].collect { |b| b["key"] }
91
-
92
- expect(terms).to include("us")
93
-
94
- # 'at' is a stopword, make sure stopwords are not ignored.
95
- expect(terms).to include("at")
96
- end
97
- end
98
- end