logstash-output-elasticsearch-leprechaun-fork 1.0.8

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +5 -0
  3. data/CHANGELOG.md +30 -0
  4. data/CONTRIBUTORS +31 -0
  5. data/Gemfile +3 -0
  6. data/LICENSE +13 -0
  7. data/NOTICE.TXT +5 -0
  8. data/README.md +98 -0
  9. data/Rakefile +1 -0
  10. data/lib/logstash-output-elasticsearch_jars.rb +5 -0
  11. data/lib/logstash/outputs/elasticsearch.rb +784 -0
  12. data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +41 -0
  13. data/lib/logstash/outputs/elasticsearch/protocol.rb +339 -0
  14. data/logstash-output-elasticsearch.gemspec +40 -0
  15. data/spec/es_spec_helper.rb +65 -0
  16. data/spec/integration/outputs/elasticsearch/node_spec.rb +36 -0
  17. data/spec/integration/outputs/index_spec.rb +90 -0
  18. data/spec/integration/outputs/retry_spec.rb +156 -0
  19. data/spec/integration/outputs/routing_spec.rb +114 -0
  20. data/spec/integration/outputs/secure_spec.rb +113 -0
  21. data/spec/integration/outputs/templates_spec.rb +97 -0
  22. data/spec/integration/outputs/transport_create_spec.rb +94 -0
  23. data/spec/integration/outputs/update_spec.rb +87 -0
  24. data/spec/unit/outputs/elasticsearch/protocol_spec.rb +54 -0
  25. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +59 -0
  26. data/spec/unit/outputs/elasticsearch_spec.rb +183 -0
  27. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +82 -0
  28. data/vendor/jar-dependencies/runtime-jars/antlr-runtime-3.5.jar +0 -0
  29. data/vendor/jar-dependencies/runtime-jars/asm-4.1.jar +0 -0
  30. data/vendor/jar-dependencies/runtime-jars/asm-commons-4.1.jar +0 -0
  31. data/vendor/jar-dependencies/runtime-jars/elasticsearch-1.7.0.jar +0 -0
  32. data/vendor/jar-dependencies/runtime-jars/lucene-analyzers-common-4.10.4.jar +0 -0
  33. data/vendor/jar-dependencies/runtime-jars/lucene-core-4.10.4.jar +0 -0
  34. data/vendor/jar-dependencies/runtime-jars/lucene-grouping-4.10.4.jar +0 -0
  35. data/vendor/jar-dependencies/runtime-jars/lucene-highlighter-4.10.4.jar +0 -0
  36. data/vendor/jar-dependencies/runtime-jars/lucene-join-4.10.4.jar +0 -0
  37. data/vendor/jar-dependencies/runtime-jars/lucene-memory-4.10.4.jar +0 -0
  38. data/vendor/jar-dependencies/runtime-jars/lucene-misc-4.10.4.jar +0 -0
  39. data/vendor/jar-dependencies/runtime-jars/lucene-queries-4.10.4.jar +0 -0
  40. data/vendor/jar-dependencies/runtime-jars/lucene-queryparser-4.10.4.jar +0 -0
  41. data/vendor/jar-dependencies/runtime-jars/lucene-sandbox-4.10.4.jar +0 -0
  42. data/vendor/jar-dependencies/runtime-jars/lucene-spatial-4.10.4.jar +0 -0
  43. data/vendor/jar-dependencies/runtime-jars/lucene-suggest-4.10.4.jar +0 -0
  44. data/vendor/jar-dependencies/runtime-jars/spatial4j-0.4.1.jar +0 -0
  45. metadata +246 -0
@@ -0,0 +1,97 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "index template expected behavior", :integration => true do
4
+ ["transport", "http"].each do |protocol|
5
+ context "with protocol => #{protocol}" do
6
+
7
+ subject! do
8
+ require "logstash/outputs/elasticsearch"
9
+ settings = {
10
+ "manage_template" => true,
11
+ "template_overwrite" => true,
12
+ "protocol" => protocol,
13
+ "host" => "#{get_host()}",
14
+ "port" => "#{get_port(protocol)}"
15
+ }
16
+ next LogStash::Outputs::ElasticSearch.new(settings)
17
+ end
18
+
19
+ before :each do
20
+ # Delete all templates first.
21
+ require "elasticsearch"
22
+
23
+ # Clean ES of data before we start.
24
+ @es = get_client
25
+ @es.indices.delete_template(:name => "*")
26
+
27
+ # This can fail if there are no indexes, ignore failure.
28
+ @es.indices.delete(:index => "*") rescue nil
29
+
30
+ subject.register
31
+
32
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
33
+ subject.receive(LogStash::Event.new("somevalue" => 100))
34
+ subject.receive(LogStash::Event.new("somevalue" => 10))
35
+ subject.receive(LogStash::Event.new("somevalue" => 1))
36
+ subject.receive(LogStash::Event.new("country" => "us"))
37
+ subject.receive(LogStash::Event.new("country" => "at"))
38
+ subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
39
+ subject.buffer_flush(:final => true)
40
+ @es.indices.refresh
41
+
42
+ # Wait or fail until everything's indexed.
43
+ Stud::try(20.times) do
44
+ r = @es.search
45
+ insist { r["hits"]["total"] } == 7
46
+ end
47
+ end
48
+
49
+ it "permits phrase searching on string fields" do
50
+ results = @es.search(:q => "message:\"sample message\"")
51
+ insist { results["hits"]["total"] } == 1
52
+ insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
53
+ end
54
+
55
+ it "numbers dynamically map to a numeric type and permit range queries" do
56
+ results = @es.search(:q => "somevalue:[5 TO 105]")
57
+ insist { results["hits"]["total"] } == 2
58
+
59
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
60
+ insist { values }.include?(10)
61
+ insist { values }.include?(100)
62
+ reject { values }.include?(1)
63
+ end
64
+
65
+ it "does not create .raw field for the message field" do
66
+ results = @es.search(:q => "message.raw:\"sample message here\"")
67
+ insist { results["hits"]["total"] } == 0
68
+ end
69
+
70
+ it "creates .raw field from any string field which is not_analyzed" do
71
+ results = @es.search(:q => "country.raw:\"us\"")
72
+ insist { results["hits"]["total"] } == 1
73
+ insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
74
+
75
+ # partial or terms should not work.
76
+ results = @es.search(:q => "country.raw:\"u\"")
77
+ insist { results["hits"]["total"] } == 0
78
+ end
79
+
80
+ it "make [geoip][location] a geo_point" do
81
+ results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
82
+ insist { results["hits"]["total"] } == 1
83
+ insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
84
+ end
85
+
86
+ it "should index stopwords like 'at' " do
87
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country" } } } })["aggregations"]["my_agg"]
88
+ terms = results["buckets"].collect { |b| b["key"] }
89
+
90
+ insist { terms }.include?("us")
91
+
92
+ # 'at' is a stopword, make sure stopwords are not ignored.
93
+ insist { terms }.include?("at")
94
+ end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,94 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "transport client create actions", :integration => true do
4
+ require "logstash/outputs/elasticsearch"
5
+ require "elasticsearch"
6
+
7
+ def get_es_output(action, id = nil)
8
+ settings = {
9
+ "manage_template" => true,
10
+ "index" => "logstash-create",
11
+ "template_overwrite" => true,
12
+ "protocol" => "transport",
13
+ "host" => get_host(),
14
+ "port" => get_port('transport'),
15
+ "action" => action
16
+ }
17
+ settings['document_id'] = id unless id.nil?
18
+ LogStash::Outputs::ElasticSearch.new(settings)
19
+ end
20
+
21
+ before :each do
22
+ @es = get_client
23
+ # Delete all templates first.
24
+ # Clean ES of data before we start.
25
+ @es.indices.delete_template(:name => "*")
26
+ # This can fail if there are no indexes, ignore failure.
27
+ @es.indices.delete(:index => "*") rescue nil
28
+ end
29
+
30
+ context "when action => create" do
31
+ it "should create new documents with or without id" do
32
+ subject = get_es_output("create", "id123")
33
+ subject.register
34
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
35
+ subject.buffer_flush(:final => true)
36
+ @es.indices.refresh
37
+ # Wait or fail until everything's indexed.
38
+ Stud::try(3.times) do
39
+ r = @es.search
40
+ insist { r["hits"]["total"] } == 1
41
+ end
42
+ end
43
+
44
+ it "should create new documents without id" do
45
+ subject = get_es_output("create")
46
+ subject.register
47
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
48
+ subject.buffer_flush(:final => true)
49
+ @es.indices.refresh
50
+ # Wait or fail until everything's indexed.
51
+ Stud::try(3.times) do
52
+ r = @es.search
53
+ insist { r["hits"]["total"] } == 1
54
+ end
55
+ end
56
+ end
57
+
58
+ context "when action => create_unless_exists" do
59
+ it "should create new documents when specific id is specified" do
60
+ subject = get_es_output("create_unless_exists", "id123")
61
+ subject.register
62
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
63
+ subject.buffer_flush(:final => true)
64
+ @es.indices.refresh
65
+ # Wait or fail until everything's indexed.
66
+ Stud::try(3.times) do
67
+ r = @es.search
68
+ insist { r["hits"]["total"] } == 1
69
+ end
70
+ end
71
+
72
+ it "should fail to create a document when no id is specified" do
73
+ event = LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0})
74
+ action = ["create_unless_exists", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event]
75
+ subject = get_es_output(action[0])
76
+ subject.register
77
+ expect { subject.flush([action]) }.to raise_error
78
+ end
79
+
80
+ it "should unsuccesfully submit two records with the same document id" do
81
+ subject = get_es_output("create_unless_exists", "id123")
82
+ subject.register
83
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
84
+ subject.receive(LogStash::Event.new("message" => "sample message here")) # 400 status failure (same id)
85
+ subject.buffer_flush(:final => true)
86
+ @es.indices.refresh
87
+ # Wait or fail until everything's indexed.
88
+ Stud::try(3.times) do
89
+ r = @es.search
90
+ insist { r["hits"]["total"] } == 1
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,87 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "all protocols update actions", :integration => true do
4
+ require "logstash/outputs/elasticsearch"
5
+ require "elasticsearch"
6
+
7
+ def get_es_output( protocol, id = nil, upsert = nil, doc_as_upsert=nil)
8
+ settings = {
9
+ "manage_template" => true,
10
+ "index" => "logstash-update",
11
+ "template_overwrite" => true,
12
+ "protocol" => protocol,
13
+ "host" => get_host(),
14
+ "port" => get_port(protocol),
15
+ "action" => "update"
16
+ }
17
+ settings['upsert'] = upsert unless upsert.nil?
18
+ settings['document_id'] = id unless id.nil?
19
+ settings['doc_as_upsert'] = doc_as_upsert unless doc_as_upsert.nil?
20
+ LogStash::Outputs::ElasticSearch.new(settings)
21
+ end
22
+
23
+ before :each do
24
+ @es = get_client
25
+ # Delete all templates first.
26
+ # Clean ES of data before we start.
27
+ @es.indices.delete_template(:name => "*")
28
+ # This can fail if there are no indexes, ignore failure.
29
+ @es.indices.delete(:index => "*") rescue nil
30
+ @es.index(
31
+ :index => 'logstash-update',
32
+ :type => 'logs',
33
+ :id => "123",
34
+ :body => { :message => 'Test' }
35
+ )
36
+ @es.indices.refresh
37
+ end
38
+
39
+ ["node", "transport", "http"].each do |protocol|
40
+ context "update only with #{protocol} protocol" do
41
+ it "should failed without a document_id" do
42
+ event = LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0})
43
+ action = ["update", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event]
44
+ subject = get_es_output(protocol)
45
+ subject.register
46
+ expect { subject.flush([action]) }.to raise_error
47
+ end
48
+
49
+ it "should not create new document" do
50
+ subject = get_es_output(protocol, "456")
51
+ subject.register
52
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
53
+ subject.buffer_flush(:final => true)
54
+ expect {@es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
55
+ end
56
+
57
+ it "should update existing document" do
58
+ subject = get_es_output(protocol, "123")
59
+ subject.register
60
+ subject.receive(LogStash::Event.new("message" => "updated message here"))
61
+ subject.buffer_flush(:final => true)
62
+ r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
63
+ insist { r["_source"]["message"] } == 'updated message here'
64
+ end
65
+ end
66
+
67
+ context "upsert with #{protocol} protocol" do
68
+ it "should create new documents with upsert content" do
69
+ subject = get_es_output(protocol, "456", '{"message": "upsert message"}')
70
+ subject.register
71
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
72
+ subject.buffer_flush(:final => true)
73
+ r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
74
+ insist { r["_source"]["message"] } == 'upsert message'
75
+ end
76
+
77
+ it "should create new documents with event/doc as upsert" do
78
+ subject = get_es_output(protocol, "456", nil, true)
79
+ subject.register
80
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
81
+ subject.buffer_flush(:final => true)
82
+ r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
83
+ insist { r["_source"]["message"] } == 'sample message here'
84
+ end
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,54 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "logstash/outputs/elasticsearch/protocol"
3
+ require "java"
4
+
5
+ describe LogStash::Outputs::Elasticsearch::Protocols::NodeClient do
6
+ context "successful" do
7
+ it "should map correctly" do
8
+ index_response = org.elasticsearch.action.index.IndexResponse.new("my_index", "my_type", "my_id", 123, true)
9
+ update_response = org.elasticsearch.action.update.UpdateResponse.new("my_index", "my_type", "my_id", 123, false)
10
+ delete_response = org.elasticsearch.action.delete.DeleteResponse.new("my_index", "my_type", "my_id", 123, true)
11
+ bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", index_response)
12
+ bulk_item_response_update = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "update", update_response)
13
+ bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", delete_response)
14
+ bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_update, bulk_item_response_delete], 0)
15
+ ret = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
16
+ insist { ret } == {"errors" => false}
17
+ end
18
+ end
19
+
20
+ context "contains failures" do
21
+ it "should map correctly" do
22
+ failure = org.elasticsearch.action.bulk.BulkItemResponse::Failure.new("my_index", "my_type", "my_id", "error message", org.elasticsearch.rest.RestStatus::BAD_REQUEST)
23
+ bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", failure)
24
+ bulk_item_response_update = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "update", failure)
25
+ bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", failure)
26
+ bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_update, bulk_item_response_delete], 0)
27
+ actual = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
28
+ insist { actual } == {"errors" => true, "statuses" => [400, 400, 400], "error_messages" => ["error message", "error message", "error message"]}
29
+ end
30
+ end
31
+ end
32
+
33
+ describe LogStash::Outputs::Elasticsearch::Protocols::HTTPClient do
34
+ context "successful" do
35
+ it "should map correctly" do
36
+ bulk_response = {"took"=>74, "errors"=>false, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
37
+ "_type"=>"logs", "_id"=>"AUxTS2C55Jrgi-hC6rQF",
38
+ "_version"=>1, "status"=>201}}]}
39
+ actual = LogStash::Outputs::Elasticsearch::Protocols::HTTPClient.normalize_bulk_response(bulk_response)
40
+ insist { actual } == {"errors"=> false}
41
+ end
42
+ end
43
+
44
+ context "contains failures" do
45
+ it "should map correctly" do
46
+ bulk_response = {"took"=>71, "errors"=>true,
47
+ "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
48
+ "_type"=>"logs", "_id"=>"AUxTQ_OI5Jrgi-hC6rQB", "status"=>400,
49
+ "error"=>"MapperParsingException[failed to parse]..."}}]}
50
+ actual = LogStash::Outputs::Elasticsearch::Protocols::HTTPClient.normalize_bulk_response(bulk_response)
51
+ insist { actual } == {"errors"=> true, "statuses"=> [400], "error_messages" => ["MapperParsingException[failed to parse]..."]}
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,59 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+ require 'stud/temporary'
3
+ require 'elasticsearch'
4
+ require "logstash/outputs/elasticsearch"
5
+
6
+ describe "Proxy option" do
7
+ let(:settings) {
8
+ {
9
+ "protocol" => "http",
10
+ "host" => "node01",
11
+ "proxy" => proxy
12
+ }
13
+ }
14
+ subject {
15
+ LogStash::Outputs::ElasticSearch.new(settings)
16
+ }
17
+
18
+ before do
19
+ allow(::Elasticsearch::Client).to receive(:new).with(any_args)
20
+ end
21
+
22
+ describe "valid configs" do
23
+ before do
24
+ subject.register
25
+ end
26
+
27
+ context "when specified as a string" do
28
+ let(:proxy) { "http://127.0.0.1:1234" }
29
+
30
+ it "should set the proxy to the exact value" do
31
+ expect(::Elasticsearch::Client).to have_received(:new) do |options|
32
+ expect(options[:transport_options][:proxy]).to eql(proxy)
33
+ end
34
+ end
35
+ end
36
+
37
+ context "when specified as a hash" do
38
+ let(:proxy) { {"host" => "127.0.0.1", "protocol" => "http"} }
39
+
40
+ it "should pass through the proxy values as symbols" do
41
+ expected = {:host => proxy["host"], :protocol => proxy["protocol"]}
42
+ expect(::Elasticsearch::Client).to have_received(:new) do |options|
43
+ expect(options[:transport_options][:proxy]).to eql(expected)
44
+ end
45
+ end
46
+ end
47
+ end
48
+
49
+ describe "invalid configs" do
50
+ let(:proxy) { ["bad", "stuff"] }
51
+
52
+ it "should have raised an exception" do
53
+ expect {
54
+ subject.register
55
+ }.to raise_error(LogStash::ConfigurationError)
56
+ end
57
+ end
58
+
59
+ end
@@ -0,0 +1,183 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ describe "outputs/elasticsearch" do
4
+ context "registration" do
5
+ it "should register" do
6
+ output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")
7
+ # register will try to load jars and raise if it cannot find jars
8
+ expect {output.register}.to_not raise_error
9
+ end
10
+
11
+ it "should fail to register when protocol => http, action => create_unless_exists" do
12
+ output = LogStash::Plugin.lookup("output", "elasticsearch").new("protocol" => "http", "action" => "create_unless_exists")
13
+ expect {output.register}.to raise_error
14
+ end
15
+ end
16
+
17
+ describe "Authentication option" do
18
+ ["node", "transport"].each do |protocol|
19
+ context "with protocol => #{protocol}" do
20
+ subject do
21
+ require "logstash/outputs/elasticsearch"
22
+ settings = {
23
+ "protocol" => protocol,
24
+ "node_name" => "logstash",
25
+ "cluster" => "elasticsearch",
26
+ "host" => "node01",
27
+ "user" => "test",
28
+ "password" => "test"
29
+ }
30
+ next LogStash::Outputs::ElasticSearch.new(settings)
31
+ end
32
+
33
+ it "should fail in register" do
34
+ expect {subject.register}.to raise_error
35
+ end
36
+ end
37
+ end
38
+ end
39
+
40
+ describe "http client create" do
41
+ require "logstash/outputs/elasticsearch"
42
+ require "elasticsearch"
43
+
44
+ let(:options) {
45
+ {
46
+ "protocol" => "http",
47
+ "index" => "my-index",
48
+ "host" => "localhost",
49
+ "path" => "some-path"
50
+ }
51
+ }
52
+
53
+ let(:eso) {LogStash::Outputs::ElasticSearch.new(options)}
54
+
55
+ let(:manticore_host) {
56
+ eso.client.first.send(:client).transport.options[:host].first
57
+ }
58
+
59
+ around(:each) do |block|
60
+ thread = eso.register
61
+ block.call()
62
+ thread.kill()
63
+ end
64
+
65
+ describe "with path" do
66
+ it "should properly create a URI with the path" do
67
+ expect(eso.path).to eql(options["path"])
68
+ end
69
+
70
+
71
+ it "should properly set the path on the HTTP client adding slashes" do
72
+ expect(manticore_host).to include("/" + options["path"] + "/")
73
+ end
74
+
75
+ context "with extra slashes" do
76
+ let(:path) { "/slashed-path/ "}
77
+ let(:eso) {
78
+ LogStash::Outputs::ElasticSearch.new(options.merge("path" => "/some-path/"))
79
+ }
80
+
81
+ it "should properly set the path on the HTTP client without adding slashes" do
82
+ expect(manticore_host).to include(options["path"])
83
+ end
84
+ end
85
+
86
+
87
+ end
88
+ end
89
+
90
+ describe "transport protocol" do
91
+ context "host not configured" do
92
+ subject do
93
+ require "logstash/outputs/elasticsearch"
94
+ settings = {
95
+ "protocol" => "transport",
96
+ "node_name" => "mynode"
97
+ }
98
+ next LogStash::Outputs::ElasticSearch.new(settings)
99
+ end
100
+
101
+ it "should set host to localhost" do
102
+ expect(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:new).with({
103
+ :host => "localhost",
104
+ :port => "9300-9305",
105
+ :protocol => "transport",
106
+ :client_settings => {
107
+ "client.transport.sniff" => false,
108
+ "node.name" => "mynode"
109
+ }
110
+ })
111
+ subject.register
112
+ end
113
+ end
114
+
115
+ context "sniffing => true" do
116
+ subject do
117
+ require "logstash/outputs/elasticsearch"
118
+ settings = {
119
+ "host" => "node01",
120
+ "protocol" => "transport",
121
+ "sniffing" => true
122
+ }
123
+ next LogStash::Outputs::ElasticSearch.new(settings)
124
+ end
125
+
126
+ it "should set the sniffing property to true" do
127
+ expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
128
+ subject.register
129
+ client = subject.instance_eval("@current_client")
130
+ settings = client.instance_eval("@settings")
131
+
132
+ expect(settings.build.getAsMap["client.transport.sniff"]).to eq("true")
133
+ end
134
+ end
135
+ end
136
+
137
+ context "allow_mapping_mangling" do
138
+ context 'elasticsearch borks on mapping mismatch' do
139
+ context 'logstash was configured to allow mapping mangling' do
140
+ subject do
141
+ require "logstash/outputs/elasticsearch"
142
+ settings = {
143
+ "host" => "node01",
144
+ "protocol" => "transport"
145
+ }
146
+ next LogStash::Outputs::ElasticSearch.new(settings)
147
+ end
148
+
149
+ it 'should identify mapping mismatch errors correctly' do
150
+ error_message = "MapperParsingException[failed to parse]...dsdfsf"
151
+ expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
152
+ subject.register
153
+ client = subject.instance_eval("@current_client")
154
+ end
155
+
156
+ it 'should retry with a semi-randomized type when configured to do so' do
157
+ true
158
+ end
159
+ end
160
+ end
161
+ end
162
+
163
+ context "sniffing => false" do
164
+ subject do
165
+ require "logstash/outputs/elasticsearch"
166
+ settings = {
167
+ "host" => "node01",
168
+ "protocol" => "transport",
169
+ "sniffing" => false
170
+ }
171
+ next LogStash::Outputs::ElasticSearch.new(settings)
172
+ end
173
+
174
+ it "should set the sniffing property to true" do
175
+ expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
176
+ subject.register
177
+ client = subject.instance_eval("@current_client")
178
+ settings = client.instance_eval("@settings")
179
+
180
+ expect(settings.build.getAsMap["client.transport.sniff"]).to eq("false")
181
+ end
182
+ end
183
+ end