logstash-output-elasticsearch-test 10.3.0-x86_64-linux

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +397 -0
  3. data/CONTRIBUTORS +33 -0
  4. data/Gemfile +15 -0
  5. data/LICENSE +13 -0
  6. data/NOTICE.TXT +5 -0
  7. data/README.md +106 -0
  8. data/docs/index.asciidoc +899 -0
  9. data/lib/logstash/outputs/elasticsearch/common.rb +441 -0
  10. data/lib/logstash/outputs/elasticsearch/common_configs.rb +167 -0
  11. data/lib/logstash/outputs/elasticsearch/default-ilm-policy.json +14 -0
  12. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es2x.json +95 -0
  13. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es5x.json +46 -0
  14. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es6x.json +45 -0
  15. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es7x.json +44 -0
  16. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es8x.json +44 -0
  17. data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +131 -0
  18. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +495 -0
  19. data/lib/logstash/outputs/elasticsearch/http_client.rb +432 -0
  20. data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +159 -0
  21. data/lib/logstash/outputs/elasticsearch/ilm.rb +113 -0
  22. data/lib/logstash/outputs/elasticsearch/template_manager.rb +61 -0
  23. data/lib/logstash/outputs/elasticsearch.rb +263 -0
  24. data/logstash-output-elasticsearch.gemspec +33 -0
  25. data/spec/es_spec_helper.rb +189 -0
  26. data/spec/fixtures/_nodes/2x_1x.json +27 -0
  27. data/spec/fixtures/_nodes/5x_6x.json +81 -0
  28. data/spec/fixtures/_nodes/7x.json +92 -0
  29. data/spec/fixtures/htpasswd +2 -0
  30. data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
  31. data/spec/fixtures/scripts/groovy/scripted_update.groovy +2 -0
  32. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +2 -0
  33. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +2 -0
  34. data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
  35. data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
  36. data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
  37. data/spec/fixtures/template-with-policy-es6x.json +48 -0
  38. data/spec/fixtures/template-with-policy-es7x.json +45 -0
  39. data/spec/fixtures/test_certs/ca/ca.crt +32 -0
  40. data/spec/fixtures/test_certs/ca/ca.key +51 -0
  41. data/spec/fixtures/test_certs/test.crt +36 -0
  42. data/spec/fixtures/test_certs/test.key +51 -0
  43. data/spec/integration/outputs/compressed_indexing_spec.rb +69 -0
  44. data/spec/integration/outputs/create_spec.rb +67 -0
  45. data/spec/integration/outputs/delete_spec.rb +65 -0
  46. data/spec/integration/outputs/groovy_update_spec.rb +150 -0
  47. data/spec/integration/outputs/ilm_spec.rb +531 -0
  48. data/spec/integration/outputs/index_spec.rb +178 -0
  49. data/spec/integration/outputs/index_version_spec.rb +102 -0
  50. data/spec/integration/outputs/ingest_pipeline_spec.rb +74 -0
  51. data/spec/integration/outputs/metrics_spec.rb +70 -0
  52. data/spec/integration/outputs/no_es_on_startup_spec.rb +58 -0
  53. data/spec/integration/outputs/painless_update_spec.rb +189 -0
  54. data/spec/integration/outputs/parent_spec.rb +102 -0
  55. data/spec/integration/outputs/retry_spec.rb +169 -0
  56. data/spec/integration/outputs/routing_spec.rb +61 -0
  57. data/spec/integration/outputs/sniffer_spec.rb +133 -0
  58. data/spec/integration/outputs/templates_5x_spec.rb +98 -0
  59. data/spec/integration/outputs/templates_spec.rb +98 -0
  60. data/spec/integration/outputs/update_spec.rb +116 -0
  61. data/spec/support/elasticsearch/api/actions/delete_ilm_policy.rb +19 -0
  62. data/spec/support/elasticsearch/api/actions/get_alias.rb +18 -0
  63. data/spec/support/elasticsearch/api/actions/get_ilm_policy.rb +18 -0
  64. data/spec/support/elasticsearch/api/actions/put_alias.rb +24 -0
  65. data/spec/support/elasticsearch/api/actions/put_ilm_policy.rb +25 -0
  66. data/spec/unit/http_client_builder_spec.rb +185 -0
  67. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +149 -0
  68. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +274 -0
  69. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +250 -0
  70. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +25 -0
  71. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +72 -0
  72. data/spec/unit/outputs/elasticsearch_spec.rb +675 -0
  73. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +82 -0
  74. data/spec/unit/outputs/error_whitelist_spec.rb +54 -0
  75. metadata +300 -0
@@ -0,0 +1,102 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+ require "logstash/outputs/elasticsearch"
3
+
4
+ if ESHelper.es_version_satisfies?(">= 5.6")
5
+ context "when using elasticsearch 5.6 and above", :integration => true do
6
+
7
+ shared_examples "a join field based parent indexer" do
8
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
9
+
10
+ let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
11
+
12
+ let(:event_count) { 10000 + rand(500) }
13
+ let(:parent) { "not_implemented" }
14
+ let(:config) { "not_implemented" }
15
+ let(:parent_id) { "test" }
16
+ let(:join_field) { "join_field" }
17
+ let(:parent_relation) { "parent_type" }
18
+ let(:child_relation) { "child_type" }
19
+ let(:default_headers) {
20
+ {"Content-Type" => "application/json"}
21
+ }
22
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
23
+
24
+ before do
25
+ # Add mapping and a parent document
26
+ index_url = "http://#{get_host_port()}/#{index}"
27
+
28
+ properties = {
29
+ "properties" => {
30
+ join_field => {
31
+ "type" => "join",
32
+ "relations" => { parent_relation => child_relation }
33
+ }
34
+ }
35
+ }
36
+
37
+ mapping = ESHelper.es_version_satisfies?('<7') ? { "mappings" => { type => properties } }
38
+ : { "mappings" => properties}
39
+
40
+ if ESHelper.es_version_satisfies?('<6')
41
+ mapping.merge!({
42
+ "settings" => {
43
+ "mapping.single_type" => true
44
+ }})
45
+ end
46
+ Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
47
+ pdoc = { "message" => "ohayo", join_field => parent_relation }
48
+ Manticore.put("#{index_url}/#{type}/#{parent_id}", {:body => pdoc.to_json, :headers => default_headers}).call
49
+
50
+ subject.register
51
+ subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => parent_id, "message" => "Hello World!", join_field => child_relation) })
52
+ end
53
+
54
+
55
+ it "ships events" do
56
+ index_url = "http://#{get_host_port()}/#{index}"
57
+
58
+ Manticore.post("#{index_url}/_refresh").call
59
+
60
+ # Wait until all events are available.
61
+ Stud::try(10.times) do
62
+ query = { "query" => { "has_parent" => { "parent_type" => parent_relation, "query" => { "match_all" => { } } } } }
63
+ response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
64
+ data = response.body
65
+ result = LogStash::Json.load(data)
66
+ cur_count = result["count"]
67
+ expect(cur_count).to eq(event_count)
68
+ end
69
+ end
70
+ end
71
+
72
+ describe "(http protocol) index events with static parent" do
73
+ it_behaves_like 'a join field based parent indexer' do
74
+ let(:config) {
75
+ {
76
+ "hosts" => get_host_port,
77
+ "index" => index,
78
+ "parent" => parent_id,
79
+ "document_type" => type,
80
+ "join_field" => join_field,
81
+ "manage_template" => false
82
+ }
83
+ }
84
+ end
85
+ end
86
+
87
+ describe "(http_protocol) index events with fieldref in parent value" do
88
+ it_behaves_like 'a join field based parent indexer' do
89
+ let(:config) {
90
+ {
91
+ "hosts" => get_host_port,
92
+ "index" => index,
93
+ "parent" => "%{link_to}",
94
+ "document_type" => type,
95
+ "join_field" => join_field,
96
+ "manage_template" => false
97
+ }
98
+ }
99
+ end
100
+ end
101
+ end
102
+ end
@@ -0,0 +1,169 @@
1
+ require "logstash/outputs/elasticsearch"
2
+ require_relative "../../../spec/es_spec_helper"
3
+
4
+ describe "failures in bulk class expected behavior", :integration => true do
5
+ let(:template) { '{"template" : "not important, will be updated by :index"}' }
6
+ let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
7
+ let(:action1) { ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1]) }
8
+ let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
9
+ let(:action2) { ESHelper.action_for_version(["index", {:_id=>nil, routing_field_name =>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event2]) }
10
+ let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
11
+
12
+ def mock_actions_with_response(*resp)
13
+ raise ArgumentError, "Cannot mock actions until subject is registered and has a client!" unless subject.client
14
+
15
+ expanded_responses = resp.map do |resp|
16
+ items = resp["statuses"] && resp["statuses"].map do |status|
17
+ {"create" => {"status" => status, "error" => "Error for #{status}"}}
18
+ end
19
+
20
+ {
21
+ "errors" => resp["errors"],
22
+ "items" => items
23
+ }
24
+ end
25
+
26
+ allow(subject.client).to receive(:bulk).and_return(*expanded_responses)
27
+ end
28
+
29
+ subject! do
30
+ settings = {
31
+ "manage_template" => true,
32
+ "index" => "logstash-2014.11.17",
33
+ "template_overwrite" => true,
34
+ "hosts" => get_host_port(),
35
+ "retry_max_interval" => 64,
36
+ "retry_initial_interval" => 2
37
+ }
38
+ next LogStash::Outputs::ElasticSearch.new(settings)
39
+ end
40
+
41
+ before :each do
42
+ # Delete all templates first.
43
+ require "elasticsearch"
44
+ allow(Stud).to receive(:stoppable_sleep)
45
+
46
+ # Clean ES of data before we start.
47
+ @es = get_client
48
+ @es.indices.delete_template(:name => "*")
49
+ @es.indices.delete(:index => "*")
50
+ @es.indices.refresh
51
+ end
52
+
53
+ after :each do
54
+ subject.close
55
+ end
56
+
57
+ it "should retry exactly once if all bulk actions are successful" do
58
+ expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
59
+ subject.register
60
+ mock_actions_with_response({"errors" => false})
61
+ subject.multi_receive([event1, event2])
62
+ end
63
+
64
+ it "retry exceptions within the submit body" do
65
+ call_count = 0
66
+ subject.register
67
+
68
+ expect(subject.client).to receive(:bulk).with(anything).exactly(3).times do
69
+ if (call_count += 1) <= 2
70
+ raise "error first two times"
71
+ else
72
+ {"errors" => false}
73
+ end
74
+ end
75
+
76
+ subject.multi_receive([event1])
77
+ end
78
+
79
+ it "should retry actions with response status of 503" do expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original
80
+ expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original
81
+ expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original
82
+
83
+ subject.register
84
+ mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]},
85
+ {"errors" => true, "statuses" => [200, 503]},
86
+ {"errors" => false})
87
+
88
+ subject.multi_receive([event1, event1, event1, event2])
89
+ end
90
+
91
+ retryable_codes = [429, 502, 503]
92
+
93
+ retryable_codes.each do |code|
94
+ it "should retry actions with response status of #{code}" do
95
+ subject.register
96
+
97
+ mock_actions_with_response({"errors" => true, "statuses" => [code]},
98
+ {"errors" => false})
99
+ expect(subject).to receive(:submit).with([action1]).twice.and_call_original
100
+
101
+ subject.multi_receive([event1])
102
+ end
103
+ end
104
+
105
+ it "should retry an event infinitely until a non retryable status occurs" do
106
+ expect(subject).to receive(:submit).with([action1]).exactly(6).times.and_call_original
107
+ subject.register
108
+
109
+ mock_actions_with_response({"errors" => true, "statuses" => [429]},
110
+ {"errors" => true, "statuses" => [429]},
111
+ {"errors" => true, "statuses" => [429]},
112
+ {"errors" => true, "statuses" => [429]},
113
+ {"errors" => true, "statuses" => [429]},
114
+ {"errors" => true, "statuses" => [400]})
115
+
116
+ subject.multi_receive([event1])
117
+ end
118
+
119
+ it "should sleep for an exponentially increasing amount of time on each retry, capped by the max" do
120
+ [2, 4, 8, 16, 32, 64, 64].each_with_index do |interval,i|
121
+ expect(Stud).to receive(:stoppable_sleep).with(interval).ordered
122
+ end
123
+
124
+ subject.register
125
+
126
+ mock_actions_with_response({"errors" => true, "statuses" => [429]},
127
+ {"errors" => true, "statuses" => [429]},
128
+ {"errors" => true, "statuses" => [429]},
129
+ {"errors" => true, "statuses" => [429]},
130
+ {"errors" => true, "statuses" => [429]},
131
+ {"errors" => true, "statuses" => [429]},
132
+ {"errors" => true, "statuses" => [429]},
133
+ {"errors" => true, "statuses" => [400]})
134
+
135
+ subject.multi_receive([event1])
136
+ end
137
+
138
+ it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunately)" do
139
+ subject.register
140
+ expect(subject).to receive(:submit).once.and_call_original
141
+ subject.multi_receive([invalid_event])
142
+ subject.close
143
+
144
+ @es.indices.refresh
145
+ r = @es.search
146
+ expect(r).to have_hits(0)
147
+ end
148
+
149
+ it "successful requests should not be appended to retry queue" do
150
+ expect(subject).to receive(:submit).once.and_call_original
151
+
152
+ subject.register
153
+ subject.multi_receive([event1])
154
+ subject.close
155
+ @es.indices.refresh
156
+ r = @es.search
157
+ expect(r).to have_hits(1)
158
+ end
159
+
160
+ it "should only index proper events" do
161
+ subject.register
162
+ subject.multi_receive([invalid_event, event1])
163
+ subject.close
164
+
165
+ @es.indices.refresh
166
+ r = @es.search
167
+ expect(r).to have_hits(1)
168
+ end
169
+ end
@@ -0,0 +1,61 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ shared_examples "a routing indexer" do
4
+ let(:index) { 10.times.collect { rand(10).to_s }.join("") }
5
+ let(:type) { 10.times.collect { rand(10).to_s }.join("") }
6
+ let(:event_count) { 10000 + rand(500) }
7
+ let(:routing) { "not_implemented" }
8
+ let(:config) { "not_implemented" }
9
+ subject { LogStash::Outputs::ElasticSearch.new(config) }
10
+
11
+ before do
12
+ subject.register
13
+ event_count.times do
14
+ subject.multi_receive([LogStash::Event.new("message" => "test", "type" => type)])
15
+ end
16
+ end
17
+
18
+
19
+ it "ships events" do
20
+ index_url = "http://#{get_host_port()}/#{index}"
21
+
22
+ client = Manticore::Client.new
23
+ client.post("#{index_url}/_refresh").call
24
+
25
+ # Wait until all events are available.
26
+ Stud::try(10.times) do
27
+ data = ""
28
+
29
+ response = client.get("#{index_url}/_count?q=*&routing=#{routing}").call
30
+ result = LogStash::Json.load(response.body)
31
+ cur_count = result["count"]
32
+ expect(cur_count).to eq(event_count)
33
+ end
34
+ end
35
+ end
36
+
37
+ describe "(http protocol) index events with static routing", :integration => true do
38
+ it_behaves_like 'a routing indexer' do
39
+ let(:routing) { "test" }
40
+ let(:config) {
41
+ {
42
+ "hosts" => get_host_port,
43
+ "index" => index,
44
+ "routing" => routing
45
+ }
46
+ }
47
+ end
48
+ end
49
+
50
+ describe "(http_protocol) index events with fieldref in routing value", :integration => true do
51
+ it_behaves_like 'a routing indexer' do
52
+ let(:routing) { "test" }
53
+ let(:config) {
54
+ {
55
+ "hosts" => get_host_port,
56
+ "index" => index,
57
+ "routing" => "%{message}"
58
+ }
59
+ }
60
+ end
61
+ end
@@ -0,0 +1,133 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require_relative "../../../spec/es_spec_helper"
3
+ require "logstash/outputs/elasticsearch/http_client"
4
+ require "json"
5
+ require "socket"
6
+
7
+ describe "pool sniffer", :integration => true do
8
+ let(:logger) { Cabin::Channel.get }
9
+ let(:adapter) { LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter.new(logger) }
10
+ let(:es_host) { get_host_port.split(":").first }
11
+ let(:es_port) { get_host_port.split(":").last }
12
+ let(:es_ip) { IPSocket.getaddress(es_host) }
13
+ let(:initial_urls) { [::LogStash::Util::SafeURI.new("http://#{get_host_port}")] }
14
+ let(:options) do
15
+ {
16
+ :resurrect_delay => 2, # Shorten the delay a bit to speed up tests
17
+ :url_normalizer => proc {|u| u},
18
+ :metric => ::LogStash::Instrument::NullMetric.new(:dummy).namespace(:alsodummy)
19
+ }
20
+ end
21
+
22
+ subject { LogStash::Outputs::ElasticSearch::HttpClient::Pool.new(logger, adapter, initial_urls, options) }
23
+
24
+ describe("Simple sniff parsing") do
25
+ before(:each) { subject.start }
26
+
27
+ context "with single node" do
28
+ it "should execute a sniff without error" do
29
+ expect { subject.check_sniff }.not_to raise_error
30
+ end
31
+
32
+ it "should return single sniff URL" do
33
+ uris = subject.check_sniff
34
+
35
+ expect(uris.size).to eq(1)
36
+ end
37
+
38
+ it "should return the correct sniff URL" do
39
+ if ESHelper.es_version_satisfies?(">= 2", "<7")
40
+ # We do a more thorough check on these versions because we can more reliably guess the ip
41
+ uris = subject.check_sniff
42
+
43
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//#{es_ip}:#{es_port}"))
44
+ else
45
+ # ES 1.x (and ES 7.x) returned the public hostname by default. This is hard to approximate
46
+ # so for ES1.x and 7.x we don't check the *exact* hostname
47
+ skip
48
+ end
49
+ end
50
+ end
51
+ end
52
+
53
+ if ESHelper.es_version_satisfies?("<= 2")
54
+ describe("Complex sniff parsing ES 2x/1x") do
55
+ before(:each) do
56
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/2x_1x.json"))
57
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "2.0" }, response_double])
58
+ subject.start
59
+ end
60
+
61
+ context "with multiple nodes but single http-enabled data node" do
62
+ it "should execute a sniff without error" do
63
+ expect { subject.check_sniff }.not_to raise_error
64
+ end
65
+
66
+ it "should return one sniff URL" do
67
+ uris = subject.check_sniff
68
+
69
+ expect(uris.size).to eq(1)
70
+ end
71
+
72
+ it "should return the correct sniff URL" do
73
+ if ESHelper.es_version_satisfies?(">= 2")
74
+ # We do a more thorough check on these versions because we can more reliably guess the ip
75
+ uris = subject.check_sniff
76
+
77
+ expect(uris).to include(::LogStash::Util::SafeURI.new("http://localhost:9201"))
78
+ else
79
+ # ES 1.x returned the public hostname by default. This is hard to approximate
80
+ # so for ES1.x we don't check the *exact* hostname
81
+ skip
82
+ end
83
+ end
84
+ end
85
+ end
86
+ end
87
+
88
+
89
+ if ESHelper.es_version_satisfies?(">= 7")
90
+ describe("Complex sniff parsing ES 7x") do
91
+ before(:each) do
92
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/7x.json"))
93
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "7.0" }, response_double])
94
+ subject.start
95
+ end
96
+
97
+ context "with mixed master-only, data-only, and data + master nodes" do
98
+ it "should execute a sniff without error" do
99
+ expect { subject.check_sniff }.not_to raise_error
100
+ end
101
+
102
+ it "should return the correct sniff URLs" do
103
+ # ie. with the master-only node, and with the node name correctly set.
104
+ uris = subject.check_sniff
105
+
106
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//dev-masterdata:9201"), ::LogStash::Util::SafeURI.new("//dev-data:9202"))
107
+ end
108
+ end
109
+ end
110
+ end
111
+ if ESHelper.es_version_satisfies?(">= 5")
112
+ describe("Complex sniff parsing ES 6x/5x") do
113
+ before(:each) do
114
+ response_double = double("_nodes/http", body: File.read("spec/fixtures/_nodes/5x_6x.json"))
115
+ allow(subject).to receive(:perform_request).and_return([nil, { version: "5.0" }, response_double])
116
+ subject.start
117
+ end
118
+
119
+ context "with mixed master-only, data-only, and data + master nodes" do
120
+ it "should execute a sniff without error" do
121
+ expect { subject.check_sniff }.not_to raise_error
122
+ end
123
+
124
+ it "should return the correct sniff URLs" do
125
+ # ie. without the master-only node
126
+ uris = subject.check_sniff
127
+
128
+ expect(uris).to include(::LogStash::Util::SafeURI.new("//127.0.0.1:9201"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9202"), ::LogStash::Util::SafeURI.new("//127.0.0.1:9203"))
129
+ end
130
+ end
131
+ end
132
+ end
133
+ end
@@ -0,0 +1,98 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ if ESHelper.es_version_satisfies?(">= 5")
4
+ describe "index template expected behavior for 5.x", :integration => true do
5
+ subject! do
6
+ require "logstash/outputs/elasticsearch"
7
+ settings = {
8
+ "manage_template" => true,
9
+ "template_overwrite" => true,
10
+ "hosts" => "#{get_host_port()}"
11
+ }
12
+ next LogStash::Outputs::ElasticSearch.new(settings)
13
+ end
14
+
15
+ before :each do
16
+ # Delete all templates first.
17
+ require "elasticsearch"
18
+
19
+ # Clean ES of data before we start.
20
+ @es = get_client
21
+ @es.indices.delete_template(:name => "*")
22
+
23
+ # This can fail if there are no indexes, ignore failure.
24
+ @es.indices.delete(:index => "*") rescue nil
25
+
26
+ subject.register
27
+
28
+ subject.multi_receive([
29
+ LogStash::Event.new("message" => "sample message here"),
30
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
+ LogStash::Event.new("somevalue" => 100),
32
+ LogStash::Event.new("somevalue" => 10),
33
+ LogStash::Event.new("somevalue" => 1),
34
+ LogStash::Event.new("country" => "us"),
35
+ LogStash::Event.new("country" => "at"),
36
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
+ ])
38
+
39
+ @es.indices.refresh
40
+
41
+ # Wait or fail until everything's indexed.
42
+ Stud::try(20.times) do
43
+ r = @es.search
44
+ expect(r).to have_hits(8)
45
+ end
46
+ end
47
+
48
+ it "permits phrase searching on string fields" do
49
+ results = @es.search(:q => "message:\"sample message\"")
50
+ expect(results).to have_hits(1)
51
+ expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
52
+ end
53
+
54
+ it "numbers dynamically map to a numeric type and permit range queries" do
55
+ results = @es.search(:q => "somevalue:[5 TO 105]")
56
+ expect(results).to have_hits(2)
57
+
58
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
+ expect(values).to include(10)
60
+ expect(values).to include(100)
61
+ expect(values).to_not include(1)
62
+ end
63
+
64
+ it "does not create .keyword field for top-level message field" do
65
+ results = @es.search(:q => "message.keyword:\"sample message here\"")
66
+ expect(results).to have_hits(0)
67
+ end
68
+
69
+ it "creates .keyword field for nested message fields" do
70
+ results = @es.search(:q => "somemessage.message.keyword:\"sample nested message here\"")
71
+ expect(results).to have_hits(1)
72
+ end
73
+
74
+ it "creates .keyword field from any string field which is not_analyzed" do
75
+ results = @es.search(:q => "country.keyword:\"us\"")
76
+ expect(results).to have_hits(1)
77
+ expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
78
+
79
+ # partial or terms should not work.
80
+ results = @es.search(:q => "country.keyword:\"u\"")
81
+ expect(results).to have_hits(0)
82
+ end
83
+
84
+ it "make [geoip][location] a geo_point" do
85
+ expect(field_properties_from_template("logstash", "geoip")["location"]["type"]).to eq("geo_point")
86
+ end
87
+
88
+ it "aggregate .keyword results correctly " do
89
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
90
+ terms = results["buckets"].collect { |b| b["key"] }
91
+
92
+ expect(terms).to include("us")
93
+
94
+ # 'at' is a stopword, make sure stopwords are not ignored.
95
+ expect(terms).to include("at")
96
+ end
97
+ end
98
+ end
@@ -0,0 +1,98 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ if ESHelper.es_version_satisfies?("< 5")
4
+ describe "index template expected behavior", :integration => true do
5
+ subject! do
6
+ require "logstash/outputs/elasticsearch"
7
+ settings = {
8
+ "manage_template" => true,
9
+ "template_overwrite" => true,
10
+ "hosts" => "#{get_host_port()}"
11
+ }
12
+ next LogStash::Outputs::ElasticSearch.new(settings)
13
+ end
14
+
15
+ before :each do
16
+ # Delete all templates first.
17
+ require "elasticsearch"
18
+
19
+ # Clean ES of data before we start.
20
+ @es = get_client
21
+ @es.indices.delete_template(:name => "*")
22
+
23
+ # This can fail if there are no indexes, ignore failure.
24
+ @es.indices.delete(:index => "*") rescue nil
25
+
26
+ subject.register
27
+
28
+ subject.multi_receive([
29
+ LogStash::Event.new("message" => "sample message here"),
30
+ LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
31
+ LogStash::Event.new("somevalue" => 100),
32
+ LogStash::Event.new("somevalue" => 10),
33
+ LogStash::Event.new("somevalue" => 1),
34
+ LogStash::Event.new("country" => "us"),
35
+ LogStash::Event.new("country" => "at"),
36
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
37
+ ])
38
+
39
+ @es.indices.refresh
40
+
41
+ # Wait or fail until everything's indexed.
42
+ Stud::try(20.times) do
43
+ r = @es.search
44
+ expect(r).to have_hits(8)
45
+ end
46
+ end
47
+
48
+ it "permits phrase searching on string fields" do
49
+ results = @es.search(:q => "message:\"sample message\"")
50
+ expect(results).to have_hits(1)
51
+ expect(results["hits"]["hits"][0]["_source"]["message"]).to eq("sample message here")
52
+ end
53
+
54
+ it "numbers dynamically map to a numeric type and permit range queries" do
55
+ results = @es.search(:q => "somevalue:[5 TO 105]")
56
+ expect(results).to have_hits(2)
57
+
58
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
59
+ expect(values).to include(10)
60
+ expect(values).to include(100)
61
+ expect(values).to_not include(1)
62
+ end
63
+
64
+ it "does not create .raw field for the message field" do
65
+ results = @es.search(:q => "message.raw:\"sample message here\"")
66
+ expect(results).to have_hits(0)
67
+ end
68
+
69
+ it "creates .raw field for nested message fields" do
70
+ results = @es.search(:q => "somemessage.message.raw:\"sample nested message here\"")
71
+ expect(results).to have_hits(1)
72
+ end
73
+
74
+ it "creates .raw field from any string field which is not_analyzed" do
75
+ results = @es.search(:q => "country.raw:\"us\"")
76
+ expect(results).to have_hits(1)
77
+ expect(results["hits"]["hits"][0]["_source"]["country"]).to eq("us")
78
+
79
+ # partial or terms should not work.
80
+ results = @es.search(:q => "country.raw:\"u\"")
81
+ expect(results).to have_hits(0)
82
+ end
83
+
84
+ it "make [geoip][location] a geo_point" do
85
+ expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
86
+ end
87
+
88
+ it "aggregate .raw results correctly " do
89
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.raw" } } } })["aggregations"]["my_agg"]
90
+ terms = results["buckets"].collect { |b| b["key"] }
91
+
92
+ expect(terms).to include("us")
93
+
94
+ # 'at' is a stopword, make sure stopwords are not ignored.
95
+ expect(terms).to include("at")
96
+ end
97
+ end
98
+ end