logstash-output-elasticsearch 10.8.6-java → 11.0.3-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (49) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +17 -0
  3. data/docs/index.asciidoc +132 -22
  4. data/lib/logstash/outputs/elasticsearch.rb +125 -64
  5. data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +233 -0
  6. data/lib/logstash/outputs/elasticsearch/http_client.rb +9 -7
  7. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +49 -62
  8. data/lib/logstash/outputs/elasticsearch/ilm.rb +13 -45
  9. data/lib/logstash/outputs/elasticsearch/license_checker.rb +26 -23
  10. data/lib/logstash/outputs/elasticsearch/template_manager.rb +4 -6
  11. data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-8x.json +1 -0
  12. data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +157 -153
  13. data/lib/logstash/plugin_mixins/elasticsearch/common.rb +71 -58
  14. data/logstash-output-elasticsearch.gemspec +3 -3
  15. data/spec/es_spec_helper.rb +7 -12
  16. data/spec/fixtures/_nodes/{5x_6x.json → 6x.json} +5 -5
  17. data/spec/integration/outputs/compressed_indexing_spec.rb +47 -46
  18. data/spec/integration/outputs/data_stream_spec.rb +61 -0
  19. data/spec/integration/outputs/delete_spec.rb +49 -51
  20. data/spec/integration/outputs/ilm_spec.rb +236 -248
  21. data/spec/integration/outputs/index_spec.rb +5 -2
  22. data/spec/integration/outputs/index_version_spec.rb +78 -82
  23. data/spec/integration/outputs/ingest_pipeline_spec.rb +58 -58
  24. data/spec/integration/outputs/painless_update_spec.rb +74 -164
  25. data/spec/integration/outputs/parent_spec.rb +67 -75
  26. data/spec/integration/outputs/retry_spec.rb +6 -6
  27. data/spec/integration/outputs/sniffer_spec.rb +15 -54
  28. data/spec/integration/outputs/templates_spec.rb +79 -81
  29. data/spec/integration/outputs/update_spec.rb +99 -101
  30. data/spec/spec_helper.rb +10 -0
  31. data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +528 -0
  32. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +1 -0
  33. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +36 -29
  34. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +2 -3
  35. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +10 -12
  36. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +1 -2
  37. data/spec/unit/outputs/elasticsearch_spec.rb +176 -41
  38. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +1 -2
  39. data/spec/unit/outputs/error_whitelist_spec.rb +3 -2
  40. data/spec/unit/outputs/license_check_spec.rb +0 -16
  41. metadata +29 -36
  42. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-2x.json +0 -95
  43. data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-5x.json +0 -46
  44. data/spec/fixtures/_nodes/2x_1x.json +0 -27
  45. data/spec/fixtures/scripts/groovy/scripted_update.groovy +0 -2
  46. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +0 -2
  47. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +0 -2
  48. data/spec/integration/outputs/groovy_update_spec.rb +0 -150
  49. data/spec/integration/outputs/templates_5x_spec.rb +0 -98
@@ -1,102 +1,94 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
  require "logstash/outputs/elasticsearch"
3
3
 
4
- if ESHelper.es_version_satisfies?(">= 5.6")
5
- context "when using elasticsearch 5.6 and above", :integration => true do
4
+ describe "join type field", :integration => true do
6
5
 
7
- shared_examples "a join field based parent indexer" do
8
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
6
+ shared_examples "a join field based parent indexer" do
7
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
9
8
 
10
- let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
9
+ let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
11
10
 
12
- let(:event_count) { 10000 + rand(500) }
13
- let(:parent) { "not_implemented" }
14
- let(:config) { "not_implemented" }
15
- let(:parent_id) { "test" }
16
- let(:join_field) { "join_field" }
17
- let(:parent_relation) { "parent_type" }
18
- let(:child_relation) { "child_type" }
19
- let(:default_headers) {
20
- {"Content-Type" => "application/json"}
21
- }
22
- subject { LogStash::Outputs::ElasticSearch.new(config) }
11
+ let(:event_count) { 10000 + rand(500) }
12
+ let(:parent) { "not_implemented" }
13
+ let(:config) { "not_implemented" }
14
+ let(:parent_id) { "test" }
15
+ let(:join_field) { "join_field" }
16
+ let(:parent_relation) { "parent_type" }
17
+ let(:child_relation) { "child_type" }
18
+ let(:default_headers) {
19
+ {"Content-Type" => "application/json"}
20
+ }
21
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
23
22
 
24
- before do
25
- # Add mapping and a parent document
26
- index_url = "http://#{get_host_port()}/#{index}"
23
+ before do
24
+ # Add mapping and a parent document
25
+ index_url = "http://#{get_host_port()}/#{index}"
27
26
 
28
- properties = {
29
- "properties" => {
30
- join_field => {
31
- "type" => "join",
32
- "relations" => { parent_relation => child_relation }
33
- }
27
+ properties = {
28
+ "properties" => {
29
+ join_field => {
30
+ "type" => "join",
31
+ "relations" => { parent_relation => child_relation }
34
32
  }
35
33
  }
34
+ }
36
35
 
37
- mapping = ESHelper.es_version_satisfies?('<7') ? { "mappings" => { type => properties } }
38
- : { "mappings" => properties}
36
+ mapping = ESHelper.es_version_satisfies?('<7') ? { "mappings" => { type => properties } }
37
+ : { "mappings" => properties}
39
38
 
40
- if ESHelper.es_version_satisfies?('<6')
41
- mapping.merge!({
42
- "settings" => {
43
- "mapping.single_type" => true
44
- }})
45
- end
46
- Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
47
- pdoc = { "message" => "ohayo", join_field => parent_relation }
48
- Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
39
+ Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
40
+ pdoc = { "message" => "ohayo", join_field => parent_relation }
41
+ Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
49
42
 
50
- subject.register
51
- subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
52
- end
43
+ subject.register
44
+ subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
45
+ end
53
46
 
54
47
 
55
- it "ships events" do
56
- index_url = "http://#{get_host_port()}/#{index}"
48
+ it "ships events" do
49
+ index_url = "http://#{get_host_port()}/#{index}"
57
50
 
58
- Manticore.post("#{index_url}/_refresh").call
51
+ Manticore.post("#{index_url}/_refresh").call
59
52
 
60
- # Wait until all events are available.
61
- Stud::try(10.times) do
62
- query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
63
- response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
64
- data = response.body
65
- result = LogStash::Json.load(data)
66
- cur_count = result["count"]
67
- expect(cur_count).to eq(event_count)
68
- end
53
+ # Wait until all events are available.
54
+ Stud::try(10.times) do
55
+ query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
56
+ response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
57
+ data = response.body
58
+ result = LogStash::Json.load(data)
59
+ cur_count = result["count"]
60
+ expect(cur_count).to eq(event_count)
69
61
  end
70
62
  end
63
+ end
71
64
 
72
- describe "(http protocol) index events with static parent" do
73
- it_behaves_like 'a join field based parent indexer' do
74
- let(:config) {
75
- {
76
- "hosts" => get_host_port,
77
- "index" => index,
78
- "parent" => parent_id,
79
- "document_type" => type,
80
- "join_field" => join_field,
81
- "manage_template" => false
82
- }
65
+ describe "(http protocol) index events with static parent" do
66
+ it_behaves_like 'a join field based parent indexer' do
67
+ let(:config) {
68
+ {
69
+ "hosts" => get_host_port,
70
+ "index" => index,
71
+ "parent" => parent_id,
72
+ "document_type" => type,
73
+ "join_field" => join_field,
74
+ "manage_template" => false
83
75
  }
84
- end
76
+ }
85
77
  end
78
+ end
86
79
 
87
- describe "(http_protocol) index events with fieldref in parent value" do
88
- it_behaves_like 'a join field based parent indexer' do
89
- let(:config) {
90
- {
91
- "hosts" => get_host_port,
92
- "index" => index,
93
- "parent" => "%{link_to}",
94
- "document_type" => type,
95
- "join_field" => join_field,
96
- "manage_template" => false
97
- }
80
+ describe "(http_protocol) index events with fieldref in parent value" do
81
+ it_behaves_like 'a join field based parent indexer' do
82
+ let(:config) {
83
+ {
84
+ "hosts" => get_host_port,
85
+ "index" => index,
86
+ "parent" => "%{link_to}",
87
+ "document_type" => type,
88
+ "join_field" => join_field,
89
+ "manage_template" => false
98
90
  }
99
- end
91
+ }
100
92
  end
101
93
  end
102
94
  end
@@ -5,18 +5,18 @@ describe "failures in bulk class expected behavior", :integration => true do
5
5
  let(:template) { '{"template" : "not important, will be updated by :index"}' }
6
6
  let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
7
7
  let(:action1) do
8
- if ESHelper.es_version_satisfies?(">= 6", "< 7")
9
- ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1])
8
+ if ESHelper.es_version_satisfies?("< 7")
9
+ ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1.to_hash])
10
10
  else
11
- ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event1])
11
+ ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event1.to_hash])
12
12
  end
13
13
  end
14
14
  let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
15
15
  let(:action2) do
16
- if ESHelper.es_version_satisfies?(">= 6", "< 7")
17
- ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event2])
16
+ if ESHelper.es_version_satisfies?("< 7")
17
+ ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event2.to_hash])
18
18
  else
19
- ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event2])
19
+ ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17" }, event2.to_hash])
20
20
  end
21
21
  end
22
22
  let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
@@ -1,4 +1,3 @@
1
- require "logstash/devutils/rspec/spec_helper"
2
1
  require_relative "../../../spec/es_spec_helper"
3
2
  require "logstash/outputs/elasticsearch/http_client"
4
3
  require "json"
@@ -36,7 +35,7 @@ describe "pool sniffer", :integration => true do
36
35
  end
37
36
 
38
37
  it "should return the correct sniff URL" do
39
- if ESHelper.es_version_satisfies?(">= 2", "<7")
38
+ if ESHelper.es_version_satisfies?("<7")
40
39
  # We do a more thorough check on these versions because we can more reliably guess the ip
41
40
  uris = subject.check_sniff
42
41
 
@@ -50,42 +49,6 @@ describe "pool sniffer", :integration => true do
50
49
  end
51
50
  end
52
51
 
53
- if ESHelper.es_version_satisfies?("<= 2")
54
- describe("Complex sniff parsing ES 2x/1x") do
55
- before(:each) do
56
- response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/2x_1x.json"))
57
- allow(subject).to receive(:perform_request).and_return([nil, { version: "2.0" }, response_double])
58
- subject.start
59
- end
60
-
61
- context "with multiple nodes but single http-enabled data node" do
62
- it "should execute a sniff without error" do
63
- expect { subject.check_sniff }.not_to raise_error
64
- end
65
-
66
- it "should return one sniff URL" do
67
- uris = subject.check_sniff
68
-
69
- expect(uris.size).to eq(1)
70
- end
71
-
72
- it "should return the correct sniff URL" do
73
- if ESHelper.es_version_satisfies?(">= 2")
74
- # We do a more thorough check on these versions because we can more reliably guess the ip
75
- uris = subject.check_sniff
76
-
77
- expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"))
78
- else
79
- # ES 1.x returned the public hostname by default. This is hard to approximate
80
- # so for ES1.x we don't check the *exact* hostname
81
- skip
82
- end
83
- end
84
- end
85
- end
86
- end
87
-
88
-
89
52
  if ESHelper.es_version_satisfies?(">= 7")
90
53
  describe("Complex sniff parsing ES 7x") do
91
54
  before(:each) do
@@ -108,25 +71,23 @@ describe "pool sniffer", :integration => true do
108
71
  end
109
72
  end
110
73
  end
111
- if ESHelper.es_version_satisfies?(">= 5")
112
- describe("Complex sniff parsing ES 6x/5x") do
113
- before(:each) do
114
- response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_6x.json"))
115
- allow(subject).to receive(:perform_request).and_return([nil, { version: "5.0" }, response_double])
116
- subject.start
117
- end
74
+ describe("Complex sniff parsing ES") do
75
+ before(:each) do
76
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/6x.json"))
77
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "6.8" }, response_double])
78
+ subject.start
79
+ end
118
80
 
119
- context "with mixed master-only, data-only, and data + master nodes" do
120
- it "should execute a sniff without error" do
121
- expect { subject.check_sniff }.not_to raise_error
122
- end
81
+ context "with mixed master-only, data-only, and data + master nodes" do
82
+ it "should execute a sniff without error" do
83
+ expect { subject.check_sniff }.not_to raise_error
84
+ end
123
85
 
124
- it "should return the correct sniff URLs" do
125
- # ie. without the master-only node
126
- uris = subject.check_sniff
86
+ it "should return the correct sniff URLs" do
87
+ # ie. without the master-only node
88
+ uris = subject.check_sniff
127
89
 
128
- expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
129
- end
90
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
130
91
  end
131
92
  end
132
93
  end
@@ -1,98 +1,96 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- if ESHelper.es_version_satisfies?("< 5")
4
- describe "index template expected behavior", :integration => true do
5
- subject! do
6
- require "logstash/outputs/elasticsearch"
7
- settings = {
8
- "manage_template" => true,
9
- "template_overwrite" => true,
10
- "hosts" => "#{get_host_port()}"
11
- }
12
- next LogStash::Outputs::ElasticSearch.new(settings)
13
- end
3
+ describe "index template expected behavior", :integration => true do
4
+ subject! do
5
+ require "logstash/outputs/elasticsearch"
6
+ settings = {
7
+ "manage_template" => true,
8
+ "template_overwrite" => true,
9
+ "hosts" => "#{get_host_port()}"
10
+ }
11
+ next LogStash::Outputs::ElasticSearch.new(settings)
12
+ end
14
13
 
15
- before :each do
16
- # Delete all templates first.
17
- require "elasticsearch"
18
-
19
- # Clean ES of data before we start.
20
- @es = get_client
21
- @es.indices.delete_template(:name => "*")
22
-
23
- # This can fail if there are no indexes, ignore failure.
24
- @es.indices.delete(:index => "*") rescue nil
25
-
26
- subject.register
27
-
28
- subject.multi_receive([
29
- LogStash::Event.new("message" => "sample message here"),
30
- LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
- LogStash::Event.new("somevalue" => 100),
32
- LogStash::Event.new("somevalue" => 10),
33
- LogStash::Event.new("somevalue" => 1),
34
- LogStash::Event.new("country" => "us"),
35
- LogStash::Event.new("country" => "at"),
36
- LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
- ])
38
-
39
- @es.indices.refresh
40
-
41
- # Wait or fail until everything's indexed.
42
- Stud::try(20.times) do
43
- r = @es.search(index: 'logstash-*')
44
- expect(r).to have_hits(8)
45
- end
46
- end
14
+ before :each do
15
+ # Delete all templates first.
16
+ require "elasticsearch"
47
17
 
48
- it "permits phrase searching on string fields" do
49
- results = @es.search(:q => "message:\"sample message\"")
50
- expect(results).to have_hits(1)
51
- expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
52
- end
18
+ # Clean ES of data before we start.
19
+ @es = get_client
20
+ @es.indices.delete_template(:name => "*")
53
21
 
54
- it "numbers dynamically map to a numeric type and permit range queries" do
55
- results = @es.search(:q => "somevalue:[5 TO 105]")
56
- expect(results).to have_hits(2)
22
+ # This can fail if there are no indexes, ignore failure.
23
+ @es.indices.delete(:index => "*") rescue nil
57
24
 
58
- values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
- expect(values).to include(10)
60
- expect(values).to include(100)
61
- expect(values).to_not include(1)
62
- end
25
+ subject.register
63
26
 
64
- it "does not create .raw field for the message field" do
65
- results = @es.search(:q => "message.raw:\"sample message here\"")
66
- expect(results).to have_hits(0)
67
- end
27
+ subject.multi_receive([
28
+ LogStash::Event.new("message" => "sample message here"),
29
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
30
+ LogStash::Event.new("somevalue" => 100),
31
+ LogStash::Event.new("somevalue" => 10),
32
+ LogStash::Event.new("somevalue" => 1),
33
+ LogStash::Event.new("country" => "us"),
34
+ LogStash::Event.new("country" => "at"),
35
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
36
+ ])
68
37
 
69
- it "creates .raw field for nested message fields" do
70
- results = @es.search(:q => "somemessage.message.raw:\"sample nested message here\"")
71
- expect(results).to have_hits(1)
38
+ @es.indices.refresh
39
+
40
+ # Wait or fail until everything's indexed.
41
+ Stud::try(20.times) do
42
+ r = @es.search(index: 'logstash-*')
43
+ expect(r).to have_hits(8)
72
44
  end
45
+ end
73
46
 
74
- it "creates .raw field from any string field which is not_analyzed" do
75
- results = @es.search(:q => "country.raw:\"us\"")
76
- expect(results).to have_hits(1)
77
- expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
47
+ it "permits phrase searching on string fields" do
48
+ results = @es.search(:q => "message:\"sample message\"")
49
+ expect(results).to have_hits(1)
50
+ expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
51
+ end
78
52
 
79
- # partial or terms should not work.
80
- results = @es.search(:q => "country.raw:\"u\"")
81
- expect(results).to have_hits(0)
82
- end
53
+ it "numbers dynamically map to a numeric type and permit range queries" do
54
+ results = @es.search(:q => "somevalue:[5 TO 105]")
55
+ expect(results).to have_hits(2)
83
56
 
84
- it "make [geoip][location] a geo_point" do
85
- expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
- end
57
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
58
+ expect(values).to include(10)
59
+ expect(values).to include(100)
60
+ expect(values).to_not include(1)
61
+ end
62
+
63
+ it "does not create .keyword field for top-level message field" do
64
+ results = @es.search(:q => "message.keyword:\"sample message here\"")
65
+ expect(results).to have_hits(0)
66
+ end
87
67
 
88
- it "aggregate .raw results correctly " do
89
- results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.raw" } } } })["aggregations"]["my_agg"]
90
- terms = results["buckets"].collect { |b| b["key"] }
68
+ it "creates .keyword field for nested message fields" do
69
+ results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
70
+ expect(results).to have_hits(1)
71
+ end
91
72
 
92
- expect(terms).to include("us")
73
+ it "creates .keyword field from any string field which is not_analyzed" do
74
+ results = @es.search(:q => "country.keyword:\"us\"")
75
+ expect(results).to have_hits(1)
76
+ expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
93
77
 
94
- # 'at' is a stopword, make sure stopwords are not ignored.
95
- expect(terms).to include("at")
96
- end
78
+ # partial or terms should not work.
79
+ results = @es.search(:q => "country.keyword:\"u\"")
80
+ expect(results).to have_hits(0)
81
+ end
82
+
83
+ it "make [geoip][location] a geo_point" do
84
+ expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point")
85
+ end
86
+
87
+ it "aggregate .keyword results correctly " do
88
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
89
+ terms = results["buckets"].collect { |b| b["key"] }
90
+
91
+ expect(terms).to include("us")
92
+
93
+ # 'at' is a stopword, make sure stopwords are not ignored.
94
+ expect(terms).to include("at")
97
95
  end
98
96
  end