logstash-output-elasticsearch 7.4.3-java → 8.0.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. checksums.yaml +5 -5
  2. data/CHANGELOG.md +5 -18
  3. data/docs/index.asciidoc +13 -50
  4. data/lib/logstash/outputs/elasticsearch/common.rb +39 -43
  5. data/lib/logstash/outputs/elasticsearch/common_configs.rb +2 -11
  6. data/lib/logstash/outputs/elasticsearch/http_client.rb +22 -27
  7. data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +2 -2
  8. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +12 -31
  9. data/lib/logstash/outputs/elasticsearch/template_manager.rb +6 -4
  10. data/logstash-output-elasticsearch.gemspec +1 -1
  11. data/spec/es_spec_helper.rb +0 -6
  12. data/spec/integration/outputs/compressed_indexing_spec.rb +44 -46
  13. data/spec/integration/outputs/delete_spec.rb +49 -51
  14. data/spec/integration/outputs/groovy_update_spec.rb +129 -131
  15. data/spec/integration/outputs/index_version_spec.rb +81 -82
  16. data/spec/integration/outputs/ingest_pipeline_spec.rb +49 -51
  17. data/spec/integration/outputs/painless_update_spec.rb +130 -170
  18. data/spec/integration/outputs/parent_spec.rb +55 -149
  19. data/spec/integration/outputs/sniffer_spec.rb +2 -5
  20. data/spec/integration/outputs/templates_5x_spec.rb +82 -81
  21. data/spec/integration/outputs/templates_spec.rb +81 -81
  22. data/spec/integration/outputs/update_spec.rb +99 -101
  23. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +5 -30
  24. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +0 -3
  25. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +12 -11
  26. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +25 -13
  27. data/spec/unit/outputs/elasticsearch_spec.rb +1 -10
  28. metadata +4 -6
@@ -1,165 +1,71 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
  require "logstash/outputs/elasticsearch"
3
3
 
4
- if ESHelper.es_version_satisfies?("< 6")
5
- context "when using elasticsearch 5.x and before", :integration => true do
6
- shared_examples "a type based parent indexer" do
7
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
8
- let(:type) { 10.times.collect { rand(10).to_s }.join("") }
9
- let(:event_count) { 10000 + rand(500) }
10
- let(:parent) { "not_implemented" }
11
- let(:config) { "not_implemented" }
12
- let(:default_headers) {
13
- {"Content-Type" => "application/json"}
14
- }
15
- subject { LogStash::Outputs::ElasticSearch.new(config) }
16
-
17
- before do
18
- # Add mapping and a parent document
19
- index_url = "http://#{get_host_port()}/#{index}"
20
- mapping = { "mappings" => { "#{type}" => { "_parent" => { "type" => "#{type}_parent" } } } }
21
- Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
22
- pdoc = { "foo" => "bar" }
23
- Manticore.put("#{index_url}/#{type}_parent/test", {:body => pdoc.to_json, :headers => default_headers}).call
24
-
25
- subject.register
26
- subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => "test", "message" => "Hello World!", "type" => type) })
27
- end
28
-
4
+ shared_examples "a parent indexer" do
5
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
6
+ let(:type) { 10.times.collect { rand(10).to_s }.join("") }
7
+ let(:event_count) { 10000 + rand(500) }
8
+ let(:parent) { "not_implemented" }
9
+ let(:config) { "not_implemented" }
10
+ let(:default_headers) {
11
+ {"Content-Type" => "application/json"}
12
+ }
13
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
14
+
15
+ before do
16
+ # Add mapping and a parent document
17
+ index_url = "http://#{get_host_port()}/#{index}"
18
+ ftw = FTW::Agent.new
19
+ mapping = { "mappings" => { "#{type}" => { "_parent" => { "type" => "#{type}_parent" } } } }
20
+ ftw.put!("#{index_url}", {:body => mapping.to_json, :headers => default_headers})
21
+ pdoc = { "foo" => "bar" }
22
+ ftw.put!("#{index_url}/#{type}_parent/test", {:body => pdoc.to_json, :headers => default_headers})
23
+
24
+ subject.register
25
+ subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => "test", "message" => "Hello World!", "type" => type) })
26
+ end
29
27
 
30
- it "ships events" do
31
- index_url = "http://#{get_host_port()}/#{index}"
32
28
 
33
- Manticore.post("#{index_url}/_refresh").call
29
+ it "ships events" do
30
+ index_url = "http://#{get_host_port()}/#{index}"
34
31
 
35
- # Wait until all events are available.
36
- Stud::try(10.times) do
37
- query = { "query" => { "has_parent" => { "type" => "#{type}_parent", "query" => { "match" => { "foo" => "bar" } } } } }
38
- response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
39
- data = response.body
40
- result = LogStash::Json.load(data)
41
- cur_count = result["count"]
42
- insist { cur_count } == event_count
43
- end
44
- end
45
- end
32
+ ftw = FTW::Agent.new
33
+ ftw.post!("#{index_url}/_refresh")
46
34
 
47
- describe "(http protocol) index events with static parent" do
48
- it_behaves_like 'a type based parent indexer' do
49
- let(:parent) { "test" }
50
- let(:config) {
51
- {
52
- "hosts" => get_host_port,
53
- "index" => index,
54
- "parent" => parent
55
- }
56
- }
35
+ # Wait until all events are available.
36
+ Stud::try(10.times) do
37
+ query = { "query" => { "has_parent" => { "type" => "#{type}_parent", "query" => { "match" => { "foo" => "bar" } } } } }
38
+ data = ""
39
+ response = ftw.post!("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
40
+ response.read_body { |chunk| data << chunk }
41
+ result = LogStash::Json.load(data)
42
+ cur_count = result["count"]
43
+ insist { cur_count } == event_count
57
44
  end
58
45
  end
46
+ end
59
47
 
60
- describe "(http_protocol) index events with fieldref in parent value" do
61
- it_behaves_like 'a type based parent indexer' do
62
- let(:config) {
63
- {
64
- "hosts" => get_host_port,
65
- "index" => index,
66
- "parent" => "%{link_to}"
67
- }
68
- }
69
- end
70
- end
48
+ describe "(http protocol) index events with static parent", :integration => true do
49
+ it_behaves_like 'a parent indexer' do
50
+ let(:parent) { "test" }
51
+ let(:config) {
52
+ {
53
+ "hosts" => get_host_port,
54
+ "index" => index,
55
+ "parent" => parent
56
+ }
57
+ }
71
58
  end
72
59
  end
73
60
 
74
- if ESHelper.es_version_satisfies?(">= 5.6")
75
- context "when using elasticsearch 5.6 and above", :integration => true do
76
- shared_examples "a join field based parent indexer" do
77
- let(:index) { 10.times.collect { rand(10).to_s }.join("") }
78
- let(:type) { 10.times.collect { rand(10).to_s }.join("") }
79
- let(:event_count) { 10000 + rand(500) }
80
- let(:parent) { "not_implemented" }
81
- let(:config) { "not_implemented" }
82
- let(:parent_id) { "test" }
83
- let(:join_field) { "join_field" }
84
- let(:parent_relation) { "parent_type" }
85
- let(:child_relation) { "child_type" }
86
- let(:default_headers) {
87
- {"Content-Type" => "application/json"}
61
+ describe "(http_protocol) index events with fieldref in parent value", :integration => true do
62
+ it_behaves_like 'a parent indexer' do
63
+ let(:config) {
64
+ {
65
+ "hosts" => get_host_port,
66
+ "index" => index,
67
+ "parent" => "%{link_to}"
88
68
  }
89
- subject { LogStash::Outputs::ElasticSearch.new(config) }
90
-
91
- before do
92
- # Add mapping and a parent document
93
- index_url = "http://#{get_host_port()}/#{index}"
94
- mapping = {
95
- "mappings" => {
96
- type => {
97
- "properties" => {
98
- join_field => {
99
- "type" => "join",
100
- "relations" => { parent_relation => child_relation }
101
- }
102
- }
103
- }
104
- }
105
- }
106
- if ESHelper.es_version_satisfies?('<6')
107
- mapping.merge!({ "settings" => { "mapping.single_type" => true }})
108
- end
109
- Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
110
- pdoc = { "message" => "ohayo", join_field => parent_relation }
111
- Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
112
-
113
- subject.register
114
- subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
115
- end
116
-
117
-
118
- it "ships events" do
119
- index_url = "http://#{get_host_port()}/#{index}"
120
-
121
- Manticore.post("#{index_url}/_refresh").call
122
-
123
- # Wait until all events are available.
124
- Stud::try(10.times) do
125
- query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
126
- response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
127
- data = response.body
128
- result = LogStash::Json.load(data)
129
- cur_count = result["count"]
130
- insist { cur_count } == event_count
131
- end
132
- end
133
- end
134
-
135
- describe "(http protocol) index events with static parent" do
136
- it_behaves_like 'a join field based parent indexer' do
137
- let(:config) {
138
- {
139
- "hosts" => get_host_port,
140
- "index" => index,
141
- "parent" => parent_id,
142
- "document_type" => type,
143
- "join_field" => join_field,
144
- "manage_template" => false
145
- }
146
- }
147
- end
148
- end
149
-
150
- describe "(http_protocol) index events with fieldref in parent value" do
151
- it_behaves_like 'a join field based parent indexer' do
152
- let(:config) {
153
- {
154
- "hosts" => get_host_port,
155
- "index" => index,
156
- "parent" => "%{link_to}",
157
- "document_type" => type,
158
- "join_field" => join_field,
159
- "manage_template" => false
160
- }
161
- }
162
- end
163
- end
69
+ }
164
70
  end
165
71
  end
@@ -38,10 +38,7 @@ describe "pool sniffer", :integration => true do
38
38
  end
39
39
 
40
40
  # We do a more thorough check on these versions because we can more reliably guess the ip
41
-
42
- if ESHelper.es_version_satisfies?(">= 2")
43
- describe("Complex sniff parsing ES 6x/5x/2x") do
44
- include_examples("sniff parsing", true)
45
- end
41
+ describe("Complex sniff parsing ES 6x/5x/2x", :version_greater_than_equal_to_2x => true) do
42
+ include_examples("sniff parsing", true)
46
43
  end
47
44
  end
@@ -1,98 +1,99 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- if ESHelper.es_version_satisfies?(">= 5")
4
- describe "index template expected behavior for 5.x", :integration => true do
5
- subject! do
6
- require "logstash/outputs/elasticsearch"
7
- settings = {
8
- "manage_template" => true,
9
- "template_overwrite" => true,
10
- "hosts" => "#{get_host_port()}"
11
- }
12
- next LogStash::Outputs::ElasticSearch.new(settings)
13
- end
3
+ # This file is a copy of template test for 2.x. We can DRY this up later.
4
+ describe "index template expected behavior for 5.x", :integration => true, :version_greater_than_equal_to_5x => true do
5
+ subject! do
6
+ require "logstash/outputs/elasticsearch"
7
+ settings = {
8
+ "manage_template" => true,
9
+ "template_overwrite" => true,
10
+ "hosts" => "#{get_host_port()}"
11
+ }
12
+ next LogStash::Outputs::ElasticSearch.new(settings)
13
+ end
14
14
 
15
- before :each do
16
- # Delete all templates first.
17
- require "elasticsearch"
18
-
19
- # Clean ES of data before we start.
20
- @es = get_client
21
- @es.indices.delete_template(:name => "*")
22
-
23
- # This can fail if there are no indexes, ignore failure.
24
- @es.indices.delete(:index => "*") rescue nil
25
-
26
- subject.register
27
-
28
- subject.multi_receive([
29
- LogStash::Event.new("message" => "sample message here"),
30
- LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
- LogStash::Event.new("somevalue" => 100),
32
- LogStash::Event.new("somevalue" => 10),
33
- LogStash::Event.new("somevalue" => 1),
34
- LogStash::Event.new("country" => "us"),
35
- LogStash::Event.new("country" => "at"),
36
- LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
- ])
38
-
39
- @es.indices.refresh
40
-
41
- # Wait or fail until everything's indexed.
42
- Stud::try(20.times) do
43
- r = @es.search
44
- insist { r["hits"]["total"] } == 8
45
- end
46
- end
15
+ before :each do
16
+ # Delete all templates first.
17
+ require "elasticsearch"
47
18
 
48
- it "permits phrase searching on string fields" do
49
- results = @es.search(:q => "message:\"sample message\"")
50
- insist { results["hits"]["total"] } == 1
51
- insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
52
- end
19
+ # Clean ES of data before we start.
20
+ @es = get_client
21
+ @es.indices.delete_template(:name => "*")
53
22
 
54
- it "numbers dynamically map to a numeric type and permit range queries" do
55
- results = @es.search(:q => "somevalue:[5 TO 105]")
56
- insist { results["hits"]["total"] } == 2
23
+ # This can fail if there are no indexes, ignore failure.
24
+ @es.indices.delete(:index => "*") rescue nil
57
25
 
58
- values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
- insist { values }.include?(10)
60
- insist { values }.include?(100)
61
- reject { values }.include?(1)
62
- end
26
+ subject.register
63
27
 
64
- it "does not create .keyword field for top-level message field" do
65
- results = @es.search(:q => "message.keyword:\"sample message here\"")
66
- insist { results["hits"]["total"] } == 0
67
- end
28
+ subject.multi_receive([
29
+ LogStash::Event.new("message" => "sample message here"),
30
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
+ LogStash::Event.new("somevalue" => 100),
32
+ LogStash::Event.new("somevalue" => 10),
33
+ LogStash::Event.new("somevalue" => 1),
34
+ LogStash::Event.new("country" => "us"),
35
+ LogStash::Event.new("country" => "at"),
36
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
+ ])
68
38
 
69
- it "creates .keyword field for nested message fields" do
70
- results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
71
- insist { results["hits"]["total"] } == 1
39
+ @es.indices.refresh
40
+
41
+ # Wait or fail until everything's indexed.
42
+ Stud::try(20.times) do
43
+ r = @es.search
44
+ insist { r["hits"]["total"] } == 8
72
45
  end
46
+ end
73
47
 
74
- it "creates .keyword field from any string field which is not_analyzed" do
75
- results = @es.search(:q => "country.keyword:\"us\"")
76
- insist { results["hits"]["total"] } == 1
77
- insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
48
+ it "permits phrase searching on string fields" do
49
+ results = @es.search(:q => "message:\"sample message\"")
50
+ insist { results["hits"]["total"] } == 1
51
+ insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
52
+ end
78
53
 
79
- # partial or terms should not work.
80
- results = @es.search(:q => "country.keyword:\"u\"")
81
- insist { results["hits"]["total"] } == 0
82
- end
54
+ it "numbers dynamically map to a numeric type and permit range queries" do
55
+ results = @es.search(:q => "somevalue:[5 TO 105]")
56
+ insist { results["hits"]["total"] } == 2
83
57
 
84
- it "make [geoip][location] a geo_point" do
85
- expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
- end
58
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
+ insist { values }.include?(10)
60
+ insist { values }.include?(100)
61
+ reject { values }.include?(1)
62
+ end
87
63
 
88
- it "aggregate .keyword results correctly " do
89
- results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
90
- terms = results["buckets"].collect { |b| b["key"] }
64
+ it "does not create .keyword field for top-level message field" do
65
+ results = @es.search(:q => "message.keyword:\"sample message here\"")
66
+ insist { results["hits"]["total"] } == 0
67
+ end
91
68
 
92
- insist { terms }.include?("us")
69
+ it "creates .keyword field for nested message fields" do
70
+ results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
71
+ insist { results["hits"]["total"] } == 1
72
+ end
93
73
 
94
- # 'at' is a stopword, make sure stopwords are not ignored.
95
- insist { terms }.include?("at")
96
- end
74
+ it "creates .keyword field from any string field which is not_analyzed" do
75
+ results = @es.search(:q => "country.keyword:\"us\"")
76
+ insist { results["hits"]["total"] } == 1
77
+ insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
78
+
79
+ # partial or terms should not work.
80
+ results = @es.search(:q => "country.keyword:\"u\"")
81
+ insist { results["hits"]["total"] } == 0
82
+ end
83
+
84
+ it "make [geoip][location] a geo_point" do
85
+ expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
+ end
87
+
88
+ it "aggregate .keyword results correctly " do
89
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
90
+ terms = results["buckets"].collect { |b| b["key"] }
91
+
92
+ insist { terms }.include?("us")
93
+
94
+ # 'at' is a stopword, make sure stopwords are not ignored.
95
+ insist { terms }.include?("at")
97
96
  end
98
97
  end
98
+
99
+
@@ -1,98 +1,98 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- if ESHelper.es_version_satisfies?("< 5")
4
- describe "index template expected behavior", :integration => true do
5
- subject! do
6
- require "logstash/outputs/elasticsearch"
7
- settings = {
8
- "manage_template" => true,
9
- "template_overwrite" => true,
10
- "hosts" => "#{get_host_port()}"
11
- }
12
- next LogStash::Outputs::ElasticSearch.new(settings)
13
- end
3
+ describe "index template expected behavior", :integration => true, :version_less_than_5x => true do
4
+ subject! do
5
+ require "logstash/outputs/elasticsearch"
6
+ settings = {
7
+ "manage_template" => true,
8
+ "template_overwrite" => true,
9
+ "hosts" => "#{get_host_port()}"
10
+ }
11
+ next LogStash::Outputs::ElasticSearch.new(settings)
12
+ end
14
13
 
15
- before :each do
16
- # Delete all templates first.
17
- require "elasticsearch"
18
-
19
- # Clean ES of data before we start.
20
- @es = get_client
21
- @es.indices.delete_template(:name => "*")
22
-
23
- # This can fail if there are no indexes, ignore failure.
24
- @es.indices.delete(:index => "*") rescue nil
25
-
26
- subject.register
27
-
28
- subject.multi_receive([
29
- LogStash::Event.new("message" => "sample message here"),
30
- LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
- LogStash::Event.new("somevalue" => 100),
32
- LogStash::Event.new("somevalue" => 10),
33
- LogStash::Event.new("somevalue" => 1),
34
- LogStash::Event.new("country" => "us"),
35
- LogStash::Event.new("country" => "at"),
36
- LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
- ])
38
-
39
- @es.indices.refresh
40
-
41
- # Wait or fail until everything's indexed.
42
- Stud::try(20.times) do
43
- r = @es.search
44
- insist { r["hits"]["total"] } == 8
45
- end
46
- end
14
+ before :each do
15
+ # Delete all templates first.
16
+ require "elasticsearch"
47
17
 
48
- it "permits phrase searching on string fields" do
49
- results = @es.search(:q => "message:\"sample message\"")
50
- insist { results["hits"]["total"] } == 1
51
- insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
52
- end
18
+ # Clean ES of data before we start.
19
+ @es = get_client
20
+ @es.indices.delete_template(:name => "*")
53
21
 
54
- it "numbers dynamically map to a numeric type and permit range queries" do
55
- results = @es.search(:q => "somevalue:[5 TO 105]")
56
- insist { results["hits"]["total"] } == 2
22
+ # This can fail if there are no indexes, ignore failure.
23
+ @es.indices.delete(:index => "*") rescue nil
57
24
 
58
- values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
- insist { values }.include?(10)
60
- insist { values }.include?(100)
61
- reject { values }.include?(1)
62
- end
25
+ subject.register
63
26
 
64
- it "does not create .raw field for the message field" do
65
- results = @es.search(:q => "message.raw:\"sample message here\"")
66
- insist { results["hits"]["total"] } == 0
67
- end
27
+ subject.multi_receive([
28
+ LogStash::Event.new("message" => "sample message here"),
29
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
30
+ LogStash::Event.new("somevalue" => 100),
31
+ LogStash::Event.new("somevalue" => 10),
32
+ LogStash::Event.new("somevalue" => 1),
33
+ LogStash::Event.new("country" => "us"),
34
+ LogStash::Event.new("country" => "at"),
35
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
36
+ ])
68
37
 
69
- it "creates .raw field for nested message fields" do
70
- results = @es.search(:q => "somemessage.message.raw:\"sample nested message here\"")
71
- insist { results["hits"]["total"] } == 1
38
+ @es.indices.refresh
39
+
40
+ # Wait or fail until everything's indexed.
41
+ Stud::try(20.times) do
42
+ r = @es.search
43
+ insist { r["hits"]["total"] } == 8
72
44
  end
45
+ end
73
46
 
74
- it "creates .raw field from any string field which is not_analyzed" do
75
- results = @es.search(:q => "country.raw:\"us\"")
76
- insist { results["hits"]["total"] } == 1
77
- insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
47
+ it "permits phrase searching on string fields" do
48
+ results = @es.search(:q => "message:\"sample message\"")
49
+ insist { results["hits"]["total"] } == 1
50
+ insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
51
+ end
78
52
 
79
- # partial or terms should not work.
80
- results = @es.search(:q => "country.raw:\"u\"")
81
- insist { results["hits"]["total"] } == 0
82
- end
53
+ it "numbers dynamically map to a numeric type and permit range queries" do
54
+ results = @es.search(:q => "somevalue:[5 TO 105]")
55
+ insist { results["hits"]["total"] } == 2
83
56
 
84
- it "make [geoip][location] a geo_point" do
85
- expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
- end
57
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
58
+ insist { values }.include?(10)
59
+ insist { values }.include?(100)
60
+ reject { values }.include?(1)
61
+ end
87
62
 
88
- it "aggregate .raw results correctly " do
89
- results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.raw" } } } })["aggregations"]["my_agg"]
90
- terms = results["buckets"].collect { |b| b["key"] }
63
+ it "does not create .raw field for the message field" do
64
+ results = @es.search(:q => "message.raw:\"sample message here\"")
65
+ insist { results["hits"]["total"] } == 0
66
+ end
91
67
 
92
- insist { terms }.include?("us")
68
+ it "creates .raw field for nested message fields" do
69
+ results = @es.search(:q => "somemessage.message.raw:\"sample nested message here\"")
70
+ insist { results["hits"]["total"] } == 1
71
+ end
93
72
 
94
- # 'at' is a stopword, make sure stopwords are not ignored.
95
- insist { terms }.include?("at")
96
- end
73
+ it "creates .raw field from any string field which is not_analyzed" do
74
+ results = @es.search(:q => "country.raw:\"us\"")
75
+ insist { results["hits"]["total"] } == 1
76
+ insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
77
+
78
+ # partial or terms should not work.
79
+ results = @es.search(:q => "country.raw:\"u\"")
80
+ insist { results["hits"]["total"] } == 0
81
+ end
82
+
83
+ it "make [geoip][location] a geo_point" do
84
+ expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
85
+ end
86
+
87
+ it "aggregate .raw results correctly " do
88
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.raw" } } } })["aggregations"]["my_agg"]
89
+ terms = results["buckets"].collect { |b| b["key"] }
90
+
91
+ insist { terms }.include?("us")
92
+
93
+ # 'at' is a stopword, make sure stopwords are not ignored.
94
+ insist { terms }.include?("at")
97
95
  end
98
96
  end
97
+
98
+