logstash-output-elasticsearch 1.1.0-java → 2.0.0.beta4-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/CHANGELOG.md +10 -3
- data/README.md +4 -4
- data/lib/logstash/outputs/elasticsearch/http_client.rb +144 -0
- data/lib/logstash/outputs/elasticsearch.rb +93 -319
- data/logstash-output-elasticsearch.gemspec +1 -3
- data/spec/es_spec_helper.rb +38 -34
- data/spec/integration/outputs/create_spec.rb +56 -0
- data/spec/integration/outputs/index_spec.rb +5 -7
- data/spec/integration/outputs/retry_spec.rb +118 -126
- data/spec/integration/outputs/routing_spec.rb +5 -33
- data/spec/integration/outputs/secure_spec.rb +4 -9
- data/spec/integration/outputs/templates_spec.rb +85 -91
- data/spec/integration/outputs/update_spec.rb +41 -46
- data/spec/unit/outputs/elasticsearch/protocol_spec.rb +45 -36
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +3 -4
- data/spec/unit/outputs/elasticsearch_spec.rb +2 -151
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +38 -63
- metadata +67 -101
- data/lib/logstash/outputs/elasticsearch/protocol.rb +0 -333
- data/lib/logstash-output-elasticsearch_jars.rb +0 -5
- data/spec/integration/outputs/elasticsearch/node_spec.rb +0 -36
- data/spec/integration/outputs/transport_create_spec.rb +0 -94
- data/vendor/jar-dependencies/runtime-jars/antlr-runtime-3.5.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/asm-4.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/asm-commons-4.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/elasticsearch-1.7.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-analyzers-common-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-core-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-grouping-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-highlighter-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-join-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-memory-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-misc-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-queries-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-queryparser-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-sandbox-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-spatial-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-suggest-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/spatial4j-0.4.1.jar +0 -0
@@ -1,97 +1,91 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
|
3
3
|
describe "index template expected behavior", :integration => true do
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
# Wait or fail until everything's indexed.
|
43
|
-
Stud::try(20.times) do
|
44
|
-
r = @es.search
|
45
|
-
insist { r["hits"]["total"] } == 7
|
46
|
-
end
|
47
|
-
end
|
48
|
-
|
49
|
-
it "permits phrase searching on string fields" do
|
50
|
-
results = @es.search(:q => "message:\"sample message\"")
|
51
|
-
insist { results["hits"]["total"] } == 1
|
52
|
-
insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
|
53
|
-
end
|
54
|
-
|
55
|
-
it "numbers dynamically map to a numeric type and permit range queries" do
|
56
|
-
results = @es.search(:q => "somevalue:[5 TO 105]")
|
57
|
-
insist { results["hits"]["total"] } == 2
|
58
|
-
|
59
|
-
values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
|
60
|
-
insist { values }.include?(10)
|
61
|
-
insist { values }.include?(100)
|
62
|
-
reject { values }.include?(1)
|
63
|
-
end
|
64
|
-
|
65
|
-
it "does not create .raw field for the message field" do
|
66
|
-
results = @es.search(:q => "message.raw:\"sample message here\"")
|
67
|
-
insist { results["hits"]["total"] } == 0
|
68
|
-
end
|
69
|
-
|
70
|
-
it "creates .raw field from any string field which is not_analyzed" do
|
71
|
-
results = @es.search(:q => "country.raw:\"us\"")
|
72
|
-
insist { results["hits"]["total"] } == 1
|
73
|
-
insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
|
74
|
-
|
75
|
-
# partial or terms should not work.
|
76
|
-
results = @es.search(:q => "country.raw:\"u\"")
|
77
|
-
insist { results["hits"]["total"] } == 0
|
78
|
-
end
|
79
|
-
|
80
|
-
it "make [geoip][location] a geo_point" do
|
81
|
-
results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
|
82
|
-
insist { results["hits"]["total"] } == 1
|
83
|
-
insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
|
84
|
-
end
|
85
|
-
|
86
|
-
it "should index stopwords like 'at' " do
|
87
|
-
results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country" } } } })["aggregations"]["my_agg"]
|
88
|
-
terms = results["buckets"].collect { |b| b["key"] }
|
89
|
-
|
90
|
-
insist { terms }.include?("us")
|
91
|
-
|
92
|
-
# 'at' is a stopword, make sure stopwords are not ignored.
|
93
|
-
insist { terms }.include?("at")
|
94
|
-
end
|
4
|
+
subject! do
|
5
|
+
require "logstash/outputs/elasticsearch"
|
6
|
+
settings = {
|
7
|
+
"manage_template" => true,
|
8
|
+
"template_overwrite" => true,
|
9
|
+
"hosts" => "#{get_host()}",
|
10
|
+
"port" => "#{get_port()}"
|
11
|
+
}
|
12
|
+
next LogStash::Outputs::ElasticSearch.new(settings)
|
13
|
+
end
|
14
|
+
|
15
|
+
before :each do
|
16
|
+
# Delete all templates first.
|
17
|
+
require "elasticsearch"
|
18
|
+
|
19
|
+
# Clean ES of data before we start.
|
20
|
+
@es = get_client
|
21
|
+
@es.indices.delete_template(:name => "*")
|
22
|
+
|
23
|
+
# This can fail if there are no indexes, ignore failure.
|
24
|
+
@es.indices.delete(:index => "*") rescue nil
|
25
|
+
|
26
|
+
subject.register
|
27
|
+
|
28
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
29
|
+
subject.receive(LogStash::Event.new("somevalue" => 100))
|
30
|
+
subject.receive(LogStash::Event.new("somevalue" => 10))
|
31
|
+
subject.receive(LogStash::Event.new("somevalue" => 1))
|
32
|
+
subject.receive(LogStash::Event.new("country" => "us"))
|
33
|
+
subject.receive(LogStash::Event.new("country" => "at"))
|
34
|
+
subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
|
35
|
+
subject.buffer_flush(:final => true)
|
36
|
+
@es.indices.refresh
|
37
|
+
|
38
|
+
# Wait or fail until everything's indexed.
|
39
|
+
Stud::try(20.times) do
|
40
|
+
r = @es.search
|
41
|
+
insist { r["hits"]["total"] } == 7
|
95
42
|
end
|
96
43
|
end
|
44
|
+
|
45
|
+
it "permits phrase searching on string fields" do
|
46
|
+
results = @es.search(:q => "message:\"sample message\"")
|
47
|
+
insist { results["hits"]["total"] } == 1
|
48
|
+
insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
|
49
|
+
end
|
50
|
+
|
51
|
+
it "numbers dynamically map to a numeric type and permit range queries" do
|
52
|
+
results = @es.search(:q => "somevalue:[5 TO 105]")
|
53
|
+
insist { results["hits"]["total"] } == 2
|
54
|
+
|
55
|
+
values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
|
56
|
+
insist { values }.include?(10)
|
57
|
+
insist { values }.include?(100)
|
58
|
+
reject { values }.include?(1)
|
59
|
+
end
|
60
|
+
|
61
|
+
it "does not create .raw field for the message field" do
|
62
|
+
results = @es.search(:q => "message.raw:\"sample message here\"")
|
63
|
+
insist { results["hits"]["total"] } == 0
|
64
|
+
end
|
65
|
+
|
66
|
+
it "creates .raw field from any string field which is not_analyzed" do
|
67
|
+
results = @es.search(:q => "country.raw:\"us\"")
|
68
|
+
insist { results["hits"]["total"] } == 1
|
69
|
+
insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
|
70
|
+
|
71
|
+
# partial or terms should not work.
|
72
|
+
results = @es.search(:q => "country.raw:\"u\"")
|
73
|
+
insist { results["hits"]["total"] } == 0
|
74
|
+
end
|
75
|
+
|
76
|
+
it "make [geoip][location] a geo_point" do
|
77
|
+
results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
|
78
|
+
insist { results["hits"]["total"] } == 1
|
79
|
+
insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
|
80
|
+
end
|
81
|
+
|
82
|
+
it "should index stopwords like 'at' " do
|
83
|
+
results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country" } } } })["aggregations"]["my_agg"]
|
84
|
+
terms = results["buckets"].collect { |b| b["key"] }
|
85
|
+
|
86
|
+
insist { terms }.include?("us")
|
87
|
+
|
88
|
+
# 'at' is a stopword, make sure stopwords are not ignored.
|
89
|
+
insist { terms }.include?("at")
|
90
|
+
end
|
97
91
|
end
|
@@ -4,14 +4,13 @@ describe "all protocols update actions", :integration => true do
|
|
4
4
|
require "logstash/outputs/elasticsearch"
|
5
5
|
require "elasticsearch"
|
6
6
|
|
7
|
-
def get_es_output(
|
7
|
+
def get_es_output(id = nil, upsert = nil, doc_as_upsert=nil)
|
8
8
|
settings = {
|
9
9
|
"manage_template" => true,
|
10
10
|
"index" => "logstash-update",
|
11
11
|
"template_overwrite" => true,
|
12
|
-
"
|
13
|
-
"
|
14
|
-
"port" => get_port(protocol),
|
12
|
+
"hosts" => get_host(),
|
13
|
+
"port" => get_port(),
|
15
14
|
"action" => "update"
|
16
15
|
}
|
17
16
|
settings['upsert'] = upsert unless upsert.nil?
|
@@ -36,52 +35,48 @@ describe "all protocols update actions", :integration => true do
|
|
36
35
|
@es.indices.refresh
|
37
36
|
end
|
38
37
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
expect { subject.flush([action]) }.to raise_error
|
47
|
-
end
|
38
|
+
it "should fail without a document_id" do
|
39
|
+
event = LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0})
|
40
|
+
action = ["update", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event]
|
41
|
+
subject = get_es_output
|
42
|
+
subject.register
|
43
|
+
expect { subject.flush([action]) }.to raise_error
|
44
|
+
end
|
48
45
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
46
|
+
it "should not create new document" do
|
47
|
+
subject = get_es_output("456")
|
48
|
+
subject.register
|
49
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
50
|
+
subject.buffer_flush(:final => true)
|
51
|
+
expect {@es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
|
52
|
+
end
|
56
53
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
end
|
54
|
+
it "should update existing document" do
|
55
|
+
subject = get_es_output("123")
|
56
|
+
subject.register
|
57
|
+
subject.receive(LogStash::Event.new("message" => "updated message here"))
|
58
|
+
subject.buffer_flush(:final => true)
|
59
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
60
|
+
insist { r["_source"]["message"] } == 'updated message here'
|
61
|
+
end
|
66
62
|
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
63
|
+
context "upsert with protocol" do
|
64
|
+
it "should create new documents with upsert content" do
|
65
|
+
subject = get_es_output("456", '{"message": "upsert message"}')
|
66
|
+
subject.register
|
67
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
68
|
+
subject.buffer_flush(:final => true)
|
69
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
70
|
+
insist { r["_source"]["message"] } == 'upsert message'
|
71
|
+
end
|
76
72
|
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
end
|
73
|
+
it "should create new documents with event/doc as upsert" do
|
74
|
+
subject = get_es_output("456", nil, true)
|
75
|
+
subject.register
|
76
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
77
|
+
subject.buffer_flush(:final => true)
|
78
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "456", :refresh => true)
|
79
|
+
insist { r["_source"]["message"] } == 'sample message here'
|
85
80
|
end
|
86
81
|
end
|
87
82
|
end
|
@@ -1,54 +1,63 @@
|
|
1
1
|
require "logstash/devutils/rspec/spec_helper"
|
2
|
-
require "logstash/outputs/elasticsearch/
|
2
|
+
require "logstash/outputs/elasticsearch/http_client"
|
3
3
|
require "java"
|
4
4
|
|
5
|
-
describe LogStash::Outputs::Elasticsearch::
|
6
|
-
context "successful" do
|
7
|
-
it "should map correctly" do
|
8
|
-
index_response = org.elasticsearch.action.index.IndexResponse.new("my_index", "my_type", "my_id", 123, true)
|
9
|
-
update_response = org.elasticsearch.action.update.UpdateResponse.new("my_index", "my_type", "my_id", 123, false)
|
10
|
-
delete_response = org.elasticsearch.action.delete.DeleteResponse.new("my_index", "my_type", "my_id", 123, true)
|
11
|
-
bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", index_response)
|
12
|
-
bulk_item_response_update = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "update", update_response)
|
13
|
-
bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", delete_response)
|
14
|
-
bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_update, bulk_item_response_delete], 0)
|
15
|
-
ret = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
|
16
|
-
insist { ret } == {"errors" => false}
|
17
|
-
end
|
18
|
-
end
|
19
|
-
|
20
|
-
context "contains failures" do
|
21
|
-
it "should map correctly" do
|
22
|
-
failure = org.elasticsearch.action.bulk.BulkItemResponse::Failure.new("my_index", "my_type", "my_id", "error message", org.elasticsearch.rest.RestStatus::BAD_REQUEST)
|
23
|
-
bulk_item_response_index = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "index", failure)
|
24
|
-
bulk_item_response_update = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "update", failure)
|
25
|
-
bulk_item_response_delete = org.elasticsearch.action.bulk.BulkItemResponse.new(32, "delete", failure)
|
26
|
-
bulk_response = org.elasticsearch.action.bulk.BulkResponse.new([bulk_item_response_index, bulk_item_response_update, bulk_item_response_delete], 0)
|
27
|
-
actual = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.normalize_bulk_response(bulk_response)
|
28
|
-
insist { actual } == {"errors" => true, "statuses" => [400, 400, 400]}
|
29
|
-
end
|
30
|
-
end
|
31
|
-
end
|
32
|
-
|
33
|
-
describe LogStash::Outputs::Elasticsearch::Protocols::HTTPClient do
|
5
|
+
describe LogStash::Outputs::Elasticsearch::HttpClient do
|
34
6
|
context "successful" do
|
35
7
|
it "should map correctly" do
|
36
8
|
bulk_response = {"took"=>74, "errors"=>false, "items"=>[{"create"=>{"_index"=>"logstash-2014.11.17",
|
37
9
|
"_type"=>"logs", "_id"=>"AUxTS2C55Jrgi-hC6rQF",
|
38
10
|
"_version"=>1, "status"=>201}}]}
|
39
|
-
actual = LogStash::Outputs::Elasticsearch::
|
11
|
+
actual = LogStash::Outputs::Elasticsearch::HttpClient.normalize_bulk_response(bulk_response)
|
40
12
|
insist { actual } == {"errors"=> false}
|
41
13
|
end
|
42
14
|
end
|
43
15
|
|
44
16
|
context "contains failures" do
|
45
17
|
it "should map correctly" do
|
18
|
+
item_response = {"_index"=>"logstash-2014.11.17",
|
19
|
+
"_type"=>"logs", "_id"=>"AUxTQ_OI5Jrgi-hC6rQB", "status"=>400,
|
20
|
+
"error"=>"MapperParsingException[failed to parse]..."}
|
46
21
|
bulk_response = {"took"=>71, "errors"=>true,
|
47
|
-
"items"=>[{"create"=>
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
22
|
+
"items"=>[{"create"=>item_response}]}
|
23
|
+
actual = LogStash::Outputs::Elasticsearch::HttpClient.normalize_bulk_response(bulk_response)
|
24
|
+
insist { actual } == {"errors"=> true, "statuses"=> [400], "details" => [item_response]}
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
describe "sniffing" do
|
29
|
+
let(:base_options) { {:hosts => ["127.0.0.1"] }}
|
30
|
+
let(:client) { LogStash::Outputs::Elasticsearch::HttpClient.new(base_options.merge(client_opts)) }
|
31
|
+
let(:transport) { client.client.transport }
|
32
|
+
|
33
|
+
before do
|
34
|
+
allow(transport).to receive(:reload_connections!)
|
35
|
+
end
|
36
|
+
|
37
|
+
context "with sniffing enabled" do
|
38
|
+
let(:client_opts) { {:sniffing => true, :sniffing_delay => 1 } }
|
39
|
+
|
40
|
+
after do
|
41
|
+
client.stop_sniffing!
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should start the sniffer" do
|
45
|
+
expect(client.sniffer_thread).to be_a(Thread)
|
46
|
+
end
|
47
|
+
|
48
|
+
it "should periodically sniff the client" do
|
49
|
+
sleep 2
|
50
|
+
expect(transport).to have_received(:reload_connections!)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
context "with sniffing disabled" do
|
55
|
+
let(:client_opts) { {:sniffing => false} }
|
56
|
+
|
57
|
+
it "should not start the sniffer" do
|
58
|
+
expect(client.sniffer_thread).to be_nil
|
59
|
+
end
|
52
60
|
end
|
61
|
+
|
53
62
|
end
|
54
63
|
end
|
@@ -6,8 +6,7 @@ require "logstash/outputs/elasticsearch"
|
|
6
6
|
describe "Proxy option" do
|
7
7
|
let(:settings) {
|
8
8
|
{
|
9
|
-
"
|
10
|
-
"host" => "node01",
|
9
|
+
"hosts" => "node01",
|
11
10
|
"proxy" => proxy
|
12
11
|
}
|
13
12
|
}
|
@@ -35,10 +34,10 @@ describe "Proxy option" do
|
|
35
34
|
end
|
36
35
|
|
37
36
|
context "when specified as a hash" do
|
38
|
-
let(:proxy) { {"
|
37
|
+
let(:proxy) { {"hosts" => "127.0.0.1", "protocol" => "http"} }
|
39
38
|
|
40
39
|
it "should pass through the proxy values as symbols" do
|
41
|
-
expected = {:
|
40
|
+
expected = {:hosts => proxy["hosts"], :protocol => proxy["protocol"]}
|
42
41
|
expect(::Elasticsearch::Client).to have_received(:new) do |options|
|
43
42
|
expect(options[:transport_options][:proxy]).to eql(expected)
|
44
43
|
end
|
@@ -1,52 +1,14 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
|
-
require "flores/random"
|
3
2
|
|
4
3
|
describe "outputs/elasticsearch" do
|
5
|
-
context "registration" do
|
6
|
-
it "should register" do
|
7
|
-
output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")
|
8
|
-
# register will try to load jars and raise if it cannot find jars
|
9
|
-
expect {output.register}.to_not raise_error
|
10
|
-
end
|
11
|
-
|
12
|
-
it "should fail to register when protocol => http, action => create_unless_exists" do
|
13
|
-
output = LogStash::Plugin.lookup("output", "elasticsearch").new("protocol" => "http", "action" => "create_unless_exists")
|
14
|
-
expect {output.register}.to raise_error
|
15
|
-
end
|
16
|
-
end
|
17
|
-
|
18
|
-
describe "Authentication option" do
|
19
|
-
["node", "transport"].each do |protocol|
|
20
|
-
context "with protocol => #{protocol}" do
|
21
|
-
subject do
|
22
|
-
require "logstash/outputs/elasticsearch"
|
23
|
-
settings = {
|
24
|
-
"protocol" => protocol,
|
25
|
-
"node_name" => "logstash",
|
26
|
-
"cluster" => "elasticsearch",
|
27
|
-
"host" => "node01",
|
28
|
-
"user" => "test",
|
29
|
-
"password" => "test"
|
30
|
-
}
|
31
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
32
|
-
end
|
33
|
-
|
34
|
-
it "should fail in register" do
|
35
|
-
expect {subject.register}.to raise_error
|
36
|
-
end
|
37
|
-
end
|
38
|
-
end
|
39
|
-
end
|
40
|
-
|
41
4
|
describe "http client create" do
|
42
5
|
require "logstash/outputs/elasticsearch"
|
43
6
|
require "elasticsearch"
|
44
7
|
|
45
8
|
let(:options) {
|
46
9
|
{
|
47
|
-
"protocol" => "http",
|
48
10
|
"index" => "my-index",
|
49
|
-
"
|
11
|
+
"hosts" => "localhost",
|
50
12
|
"path" => "some-path"
|
51
13
|
}
|
52
14
|
}
|
@@ -54,7 +16,7 @@ describe "outputs/elasticsearch" do
|
|
54
16
|
let(:eso) {LogStash::Outputs::ElasticSearch.new(options)}
|
55
17
|
|
56
18
|
let(:manticore_host) {
|
57
|
-
eso.client.
|
19
|
+
eso.client.send(:client).transport.options[:hosts].first
|
58
20
|
}
|
59
21
|
|
60
22
|
around(:each) do |block|
|
@@ -83,117 +45,6 @@ describe "outputs/elasticsearch" do
|
|
83
45
|
expect(manticore_host).to include(options["path"])
|
84
46
|
end
|
85
47
|
end
|
86
|
-
|
87
|
-
|
88
|
-
end
|
89
|
-
end
|
90
|
-
|
91
|
-
describe "transport protocol" do
|
92
|
-
context "host not configured" do
|
93
|
-
subject do
|
94
|
-
require "logstash/outputs/elasticsearch"
|
95
|
-
settings = {
|
96
|
-
"protocol" => "transport",
|
97
|
-
"node_name" => "mynode"
|
98
|
-
}
|
99
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
100
|
-
end
|
101
|
-
|
102
|
-
it "should set host to localhost" do
|
103
|
-
expect(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:new).with({
|
104
|
-
:host => "localhost",
|
105
|
-
:port => "9300-9305",
|
106
|
-
:protocol => "transport",
|
107
|
-
:client_settings => {
|
108
|
-
"client.transport.sniff" => false,
|
109
|
-
"node.name" => "mynode"
|
110
|
-
}
|
111
|
-
})
|
112
|
-
subject.register
|
113
|
-
end
|
114
|
-
end
|
115
|
-
|
116
|
-
context "sniffing => true" do
|
117
|
-
subject do
|
118
|
-
require "logstash/outputs/elasticsearch"
|
119
|
-
settings = {
|
120
|
-
"host" => "node01",
|
121
|
-
"protocol" => "transport",
|
122
|
-
"sniffing" => true
|
123
|
-
}
|
124
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
125
|
-
end
|
126
|
-
|
127
|
-
it "should set the sniffing property to true" do
|
128
|
-
expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
|
129
|
-
subject.register
|
130
|
-
client = subject.instance_eval("@current_client")
|
131
|
-
settings = client.instance_eval("@settings")
|
132
|
-
|
133
|
-
expect(settings.build.getAsMap["client.transport.sniff"]).to eq("true")
|
134
|
-
end
|
135
|
-
end
|
136
|
-
|
137
|
-
context "sniffing => false" do
|
138
|
-
subject do
|
139
|
-
require "logstash/outputs/elasticsearch"
|
140
|
-
settings = {
|
141
|
-
"host" => "node01",
|
142
|
-
"protocol" => "transport",
|
143
|
-
"sniffing" => false
|
144
|
-
}
|
145
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
146
|
-
end
|
147
|
-
|
148
|
-
it "should set the sniffing property to true" do
|
149
|
-
expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
|
150
|
-
subject.register
|
151
|
-
client = subject.instance_eval("@current_client")
|
152
|
-
settings = client.instance_eval("@settings")
|
153
|
-
|
154
|
-
expect(settings.build.getAsMap["client.transport.sniff"]).to eq("false")
|
155
|
-
end
|
156
|
-
end
|
157
|
-
end
|
158
|
-
|
159
|
-
|
160
|
-
# TODO(sissel): Improve this. I'm not a fan of using message expectations (expect().to receive...)
|
161
|
-
# especially with respect to logging to verify a failure/retry has occurred. For now, this
|
162
|
-
# should suffice, though.
|
163
|
-
context "with timeout set" do
|
164
|
-
let(:listener) { Flores::Random.tcp_listener }
|
165
|
-
let(:port) { listener[2] }
|
166
|
-
let(:options) do
|
167
|
-
{
|
168
|
-
"protocol" => "http",
|
169
|
-
"manage_template" => false,
|
170
|
-
"host" => "localhost",
|
171
|
-
"port" => port,
|
172
|
-
"flush_size" => 1,
|
173
|
-
"timeout" => 1,
|
174
|
-
}
|
175
|
-
end
|
176
|
-
let(:eso) {LogStash::Outputs::ElasticSearch.new(options)}
|
177
|
-
|
178
|
-
before do
|
179
|
-
eso.logger = Cabin::Channel.get
|
180
|
-
eso.register
|
181
|
-
|
182
|
-
# Expect a timeout to be logged.
|
183
|
-
expect(eso.logger).to receive(:warn).with("Failed to flush outgoing items",
|
184
|
-
hash_including(:exception => "Manticore::SocketTimeout"))
|
185
|
-
end
|
186
|
-
|
187
|
-
after do
|
188
|
-
listener[0].close
|
189
|
-
eso.close
|
190
|
-
end
|
191
|
-
|
192
|
-
it "should fail after the timeout" do
|
193
|
-
Thread.new { eso.receive(LogStash::Event.new) }
|
194
|
-
|
195
|
-
# Allow the timeout to occur.
|
196
|
-
sleep(options["timeout"] + 0.5)
|
197
48
|
end
|
198
49
|
end
|
199
50
|
end
|