logstash-output-elasticsearch 1.1.0-java → 2.0.0.beta4-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/CHANGELOG.md +10 -3
- data/README.md +4 -4
- data/lib/logstash/outputs/elasticsearch/http_client.rb +144 -0
- data/lib/logstash/outputs/elasticsearch.rb +93 -319
- data/logstash-output-elasticsearch.gemspec +1 -3
- data/spec/es_spec_helper.rb +38 -34
- data/spec/integration/outputs/create_spec.rb +56 -0
- data/spec/integration/outputs/index_spec.rb +5 -7
- data/spec/integration/outputs/retry_spec.rb +118 -126
- data/spec/integration/outputs/routing_spec.rb +5 -33
- data/spec/integration/outputs/secure_spec.rb +4 -9
- data/spec/integration/outputs/templates_spec.rb +85 -91
- data/spec/integration/outputs/update_spec.rb +41 -46
- data/spec/unit/outputs/elasticsearch/protocol_spec.rb +45 -36
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +3 -4
- data/spec/unit/outputs/elasticsearch_spec.rb +2 -151
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +38 -63
- metadata +67 -101
- data/lib/logstash/outputs/elasticsearch/protocol.rb +0 -333
- data/lib/logstash-output-elasticsearch_jars.rb +0 -5
- data/spec/integration/outputs/elasticsearch/node_spec.rb +0 -36
- data/spec/integration/outputs/transport_create_spec.rb +0 -94
- data/vendor/jar-dependencies/runtime-jars/antlr-runtime-3.5.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/asm-4.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/asm-commons-4.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/elasticsearch-1.7.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-analyzers-common-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-core-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-grouping-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-highlighter-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-join-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-memory-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-misc-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-queries-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-queryparser-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-sandbox-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-spatial-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lucene-suggest-4.10.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/spatial4j-0.4.1.jar +0 -0
data/spec/es_spec_helper.rb
CHANGED
@@ -9,57 +9,61 @@ CONTAINER_NAME = "logstash-output-elasticsearch-#{rand(999).to_s}"
|
|
9
9
|
CONTAINER_IMAGE = "elasticsearch"
|
10
10
|
CONTAINER_TAG = "1.6"
|
11
11
|
|
12
|
-
|
12
|
+
DOCKER_INTEGRATION = ENV["DOCKER_INTEGRATION"]
|
13
13
|
|
14
|
+
module ESHelper
|
14
15
|
def get_host
|
15
|
-
Longshoreman.new.get_host_ip
|
16
|
+
DOCKER_INTEGRATION ? Longshoreman.new.get_host_ip : "127.0.0.1"
|
16
17
|
end
|
17
18
|
|
18
|
-
def get_port
|
19
|
+
def get_port
|
20
|
+
return 9200 unless DOCKER_INTEGRATION
|
21
|
+
|
19
22
|
container = Longshoreman::Container.new
|
20
23
|
container.get(CONTAINER_NAME)
|
21
|
-
|
22
|
-
when "http"
|
23
|
-
container.rport(9200)
|
24
|
-
when "transport", "node"
|
25
|
-
container.rport(9300)
|
26
|
-
end
|
24
|
+
container.rport(9200)
|
27
25
|
end
|
28
26
|
|
29
27
|
def get_client
|
30
|
-
Elasticsearch::Client.new(:host => "#{get_host}:#{get_port
|
28
|
+
Elasticsearch::Client.new(:host => "#{get_host}:#{get_port}")
|
31
29
|
end
|
32
30
|
end
|
33
31
|
|
32
|
+
|
34
33
|
RSpec.configure do |config|
|
35
34
|
config.include ESHelper
|
36
35
|
|
37
|
-
|
38
|
-
|
39
|
-
#
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
36
|
+
|
37
|
+
if DOCKER_INTEGRATION
|
38
|
+
# this :all hook gets run before every describe block that is tagged with :integration => true.
|
39
|
+
config.before(:all, :integration => true) do
|
40
|
+
|
41
|
+
|
42
|
+
# check if container exists already before creating new one.
|
43
|
+
begin
|
44
|
+
ls = Longshoreman::new
|
45
|
+
ls.container.get(CONTAINER_NAME)
|
46
|
+
rescue Docker::Error::NotFoundError
|
47
|
+
Longshoreman.new("#{CONTAINER_IMAGE}:#{CONTAINER_TAG}", CONTAINER_NAME)
|
48
|
+
# TODO(talevy): verify ES is running instead of static timeout
|
49
|
+
sleep 10
|
50
|
+
end
|
47
51
|
end
|
48
|
-
end
|
49
52
|
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
53
|
+
# we want to do a final cleanup after all :integration runs,
|
54
|
+
# but we don't want to clean up before the last block.
|
55
|
+
# This is a final blind check to see if the ES docker container is running and
|
56
|
+
# needs to be cleaned up. If no container can be found and/or docker is not
|
57
|
+
# running on the system, we do nothing.
|
58
|
+
config.after(:suite) do
|
59
|
+
# only cleanup docker container if system has docker and the container is running
|
60
|
+
begin
|
61
|
+
ls = Longshoreman::new
|
62
|
+
ls.container.get(CONTAINER_NAME)
|
63
|
+
ls.cleanup
|
64
|
+
rescue Docker::Error::NotFoundError, Excon::Errors::SocketError
|
65
|
+
# do nothing
|
66
|
+
end
|
63
67
|
end
|
64
68
|
end
|
65
69
|
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
|
3
|
+
describe "client create actions", :integration => true do
|
4
|
+
require "logstash/outputs/elasticsearch"
|
5
|
+
require "elasticsearch"
|
6
|
+
|
7
|
+
def get_es_output(action, id = nil)
|
8
|
+
settings = {
|
9
|
+
"manage_template" => true,
|
10
|
+
"index" => "logstash-create",
|
11
|
+
"template_overwrite" => true,
|
12
|
+
"hosts" => get_host(),
|
13
|
+
"port" => get_port(),
|
14
|
+
"action" => action
|
15
|
+
}
|
16
|
+
settings['document_id'] = id unless id.nil?
|
17
|
+
LogStash::Outputs::ElasticSearch.new(settings)
|
18
|
+
end
|
19
|
+
|
20
|
+
before :each do
|
21
|
+
@es = get_client
|
22
|
+
# Delete all templates first.
|
23
|
+
# Clean ES of data before we start.
|
24
|
+
@es.indices.delete_template(:name => "*")
|
25
|
+
# This can fail if there are no indexes, ignore failure.
|
26
|
+
@es.indices.delete(:index => "*") rescue nil
|
27
|
+
end
|
28
|
+
|
29
|
+
context "when action => create" do
|
30
|
+
it "should create new documents with or without id" do
|
31
|
+
subject = get_es_output("create", "id123")
|
32
|
+
subject.register
|
33
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
34
|
+
subject.buffer_flush(:final => true)
|
35
|
+
@es.indices.refresh
|
36
|
+
# Wait or fail until everything's indexed.
|
37
|
+
Stud::try(3.times) do
|
38
|
+
r = @es.search
|
39
|
+
insist { r["hits"]["total"] } == 1
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
it "should create new documents without id" do
|
44
|
+
subject = get_es_output("create")
|
45
|
+
subject.register
|
46
|
+
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
47
|
+
subject.buffer_flush(:final => true)
|
48
|
+
@es.indices.refresh
|
49
|
+
# Wait or fail until everything's indexed.
|
50
|
+
Stud::try(3.times) do
|
51
|
+
r = @es.search
|
52
|
+
insist { r["hits"]["total"] } == 1
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -13,7 +13,7 @@ shared_examples "an indexer" do
|
|
13
13
|
pipeline = LogStash::Pipeline.new(config)
|
14
14
|
pipeline.run
|
15
15
|
|
16
|
-
index_url = "http://#{get_host}:#{get_port
|
16
|
+
index_url = "http://#{get_host}:#{get_port}/#{index}"
|
17
17
|
|
18
18
|
ftw = FTW::Agent.new
|
19
19
|
ftw.post!("#{index_url}/_refresh")
|
@@ -52,9 +52,8 @@ describe "an indexer with custom index_type", :integration => true do
|
|
52
52
|
}
|
53
53
|
output {
|
54
54
|
elasticsearch {
|
55
|
-
|
56
|
-
port => "#{get_port
|
57
|
-
protocol => "http"
|
55
|
+
hosts => "#{get_host()}"
|
56
|
+
port => "#{get_port}"
|
58
57
|
index => "#{index}"
|
59
58
|
flush_size => #{flush_size}
|
60
59
|
}
|
@@ -77,9 +76,8 @@ describe "an indexer with no type value set (default to logs)", :integration =>
|
|
77
76
|
}
|
78
77
|
output {
|
79
78
|
elasticsearch {
|
80
|
-
|
81
|
-
port => "#{get_port
|
82
|
-
protocol => "http"
|
79
|
+
hosts => "#{get_host()}"
|
80
|
+
port => "#{get_port}"
|
83
81
|
index => "#{index}"
|
84
82
|
flush_size => #{flush_size}
|
85
83
|
}
|
@@ -11,146 +11,138 @@ describe "failures in bulk class expected behavior", :integration => true do
|
|
11
11
|
let(:max_retries) { 3 }
|
12
12
|
|
13
13
|
def mock_actions_with_response(*resp)
|
14
|
-
LogStash::Outputs::Elasticsearch::
|
15
|
-
.any_instance.stub(:bulk).and_return(*resp)
|
16
|
-
LogStash::Outputs::Elasticsearch::Protocols::NodeClient
|
14
|
+
LogStash::Outputs::Elasticsearch::HttpClient
|
17
15
|
.any_instance.stub(:bulk).and_return(*resp)
|
18
16
|
end
|
19
17
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
}
|
34
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
35
|
-
end
|
18
|
+
subject! do
|
19
|
+
settings = {
|
20
|
+
"manage_template" => true,
|
21
|
+
"index" => "logstash-2014.11.17",
|
22
|
+
"template_overwrite" => true,
|
23
|
+
"hosts" => get_host(),
|
24
|
+
"port" => get_port(),
|
25
|
+
"retry_max_items" => 10,
|
26
|
+
"retry_max_interval" => 1,
|
27
|
+
"max_retries" => max_retries
|
28
|
+
}
|
29
|
+
next LogStash::Outputs::ElasticSearch.new(settings)
|
30
|
+
end
|
36
31
|
|
37
|
-
|
38
|
-
|
39
|
-
|
32
|
+
before :each do
|
33
|
+
# Delete all templates first.
|
34
|
+
require "elasticsearch"
|
40
35
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
36
|
+
# Clean ES of data before we start.
|
37
|
+
@es = get_client
|
38
|
+
@es.indices.delete_template(:name => "*")
|
39
|
+
@es.indices.delete(:index => "*")
|
40
|
+
@es.indices.refresh
|
41
|
+
end
|
47
42
|
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
43
|
+
it "should return no errors if all bulk actions are successful" do
|
44
|
+
mock_actions_with_response({"errors" => false})
|
45
|
+
expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
|
46
|
+
subject.register
|
47
|
+
subject.receive(event1)
|
48
|
+
subject.receive(event2)
|
49
|
+
subject.buffer_flush(:final => true)
|
50
|
+
sleep(2)
|
51
|
+
end
|
57
52
|
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
end
|
66
|
-
end
|
67
|
-
subject.register
|
68
|
-
subject.receive(event1)
|
69
|
-
subject.teardown
|
53
|
+
it "should raise exception and be retried by stud::buffer" do
|
54
|
+
call_count = 0
|
55
|
+
expect(subject).to receive(:submit).with([action1]).exactly(3).times do
|
56
|
+
if (call_count += 1) <= 2
|
57
|
+
raise "error first two times"
|
58
|
+
else
|
59
|
+
{"errors" => false}
|
70
60
|
end
|
61
|
+
end
|
62
|
+
subject.register
|
63
|
+
subject.receive(event1)
|
64
|
+
subject.teardown
|
65
|
+
end
|
71
66
|
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
67
|
+
it "should retry actions with response status of 503" do
|
68
|
+
mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]},
|
69
|
+
{"errors" => true, "statuses" => [200, 503]},
|
70
|
+
{"errors" => false})
|
71
|
+
expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original
|
72
|
+
expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original
|
73
|
+
expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original
|
74
|
+
|
75
|
+
subject.register
|
76
|
+
subject.receive(event1)
|
77
|
+
subject.receive(event1)
|
78
|
+
subject.receive(event1)
|
79
|
+
subject.receive(event2)
|
80
|
+
subject.buffer_flush(:final => true)
|
81
|
+
sleep(3)
|
82
|
+
end
|
88
83
|
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
84
|
+
it "should retry actions with response status of 429" do
|
85
|
+
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
86
|
+
{"errors" => false})
|
87
|
+
expect(subject).to receive(:submit).with([action1]).twice.and_call_original
|
88
|
+
subject.register
|
89
|
+
subject.receive(event1)
|
90
|
+
subject.buffer_flush(:final => true)
|
91
|
+
sleep(3)
|
92
|
+
end
|
98
93
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
94
|
+
it "should retry an event until max_retries reached" do
|
95
|
+
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
96
|
+
{"errors" => true, "statuses" => [429]},
|
97
|
+
{"errors" => true, "statuses" => [429]},
|
98
|
+
{"errors" => true, "statuses" => [429]},
|
99
|
+
{"errors" => true, "statuses" => [429]},
|
100
|
+
{"errors" => true, "statuses" => [429]})
|
101
|
+
expect(subject).to receive(:submit).with([action1]).exactly(max_retries).times.and_call_original
|
102
|
+
subject.register
|
103
|
+
subject.receive(event1)
|
104
|
+
subject.buffer_flush(:final => true)
|
105
|
+
sleep(3)
|
106
|
+
end
|
112
107
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
108
|
+
it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunately)" do
|
109
|
+
subject.register
|
110
|
+
subject.receive(invalid_event)
|
111
|
+
expect(subject).not_to receive(:retry_push)
|
112
|
+
subject.teardown
|
113
|
+
|
114
|
+
@es.indices.refresh
|
115
|
+
sleep(5)
|
116
|
+
Stud::try(10.times) do
|
117
|
+
r = @es.search
|
118
|
+
insist { r["hits"]["total"] } == 0
|
119
|
+
end
|
120
|
+
end
|
126
121
|
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
end
|
122
|
+
it "successful requests should not be appended to retry queue" do
|
123
|
+
subject.register
|
124
|
+
subject.receive(event1)
|
125
|
+
expect(subject).not_to receive(:retry_push)
|
126
|
+
subject.teardown
|
127
|
+
@es.indices.refresh
|
128
|
+
sleep(5)
|
129
|
+
Stud::try(10.times) do
|
130
|
+
r = @es.search
|
131
|
+
insist { r["hits"]["total"] } == 1
|
132
|
+
end
|
133
|
+
end
|
140
134
|
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
end
|
153
|
-
end
|
135
|
+
it "should only index proper events" do
|
136
|
+
subject.register
|
137
|
+
subject.receive(invalid_event)
|
138
|
+
subject.receive(event1)
|
139
|
+
subject.teardown
|
140
|
+
|
141
|
+
@es.indices.refresh
|
142
|
+
sleep(5)
|
143
|
+
Stud::try(10.times) do
|
144
|
+
r = @es.search
|
145
|
+
insist { r["hits"]["total"] } == 1
|
154
146
|
end
|
155
147
|
end
|
156
148
|
end
|
@@ -15,7 +15,7 @@ shared_examples "a routing indexer" do
|
|
15
15
|
pipeline = LogStash::Pipeline.new(config)
|
16
16
|
pipeline.run
|
17
17
|
|
18
|
-
index_url = "http://#{get_host()}:#{get_port(
|
18
|
+
index_url = "http://#{get_host()}:#{get_port()}/#{index}"
|
19
19
|
|
20
20
|
ftw = FTW::Agent.new
|
21
21
|
ftw.post!("#{index_url}/_refresh")
|
@@ -46,9 +46,8 @@ describe "(http protocol) index events with static routing", :integration => tru
|
|
46
46
|
}
|
47
47
|
output {
|
48
48
|
elasticsearch {
|
49
|
-
|
50
|
-
port => "#{get_port(
|
51
|
-
protocol => "http"
|
49
|
+
hosts => "#{get_host()}"
|
50
|
+
port => "#{get_port()}"
|
52
51
|
index => "#{index}"
|
53
52
|
flush_size => #{flush_size}
|
54
53
|
routing => "#{routing}"
|
@@ -73,9 +72,8 @@ describe "(http_protocol) index events with fieldref in routing value", :integra
|
|
73
72
|
}
|
74
73
|
output {
|
75
74
|
elasticsearch {
|
76
|
-
|
77
|
-
port => "#{get_port(
|
78
|
-
protocol => "http"
|
75
|
+
hosts => "#{get_host()}"
|
76
|
+
port => "#{get_port()}"
|
79
77
|
index => "#{index}"
|
80
78
|
flush_size => #{flush_size}
|
81
79
|
routing => "%{message}"
|
@@ -86,29 +84,3 @@ describe "(http_protocol) index events with fieldref in routing value", :integra
|
|
86
84
|
end
|
87
85
|
end
|
88
86
|
|
89
|
-
describe "(transport protocol) index events with fieldref in routing value", :integration => true do
|
90
|
-
it_behaves_like 'a routing indexer' do
|
91
|
-
let(:routing) { "test" }
|
92
|
-
let(:config) {
|
93
|
-
<<-CONFIG
|
94
|
-
input {
|
95
|
-
generator {
|
96
|
-
message => "#{routing}"
|
97
|
-
count => #{event_count}
|
98
|
-
type => "#{type}"
|
99
|
-
}
|
100
|
-
}
|
101
|
-
output {
|
102
|
-
elasticsearch {
|
103
|
-
host => "#{get_host()}"
|
104
|
-
port => "#{get_port('transport')}"
|
105
|
-
protocol => "transport"
|
106
|
-
index => "#{index}"
|
107
|
-
flush_size => #{flush_size}
|
108
|
-
routing => "%{message}"
|
109
|
-
}
|
110
|
-
}
|
111
|
-
CONFIG
|
112
|
-
}
|
113
|
-
end
|
114
|
-
end
|
@@ -4,10 +4,9 @@ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure =>
|
|
4
4
|
subject do
|
5
5
|
require "logstash/outputs/elasticsearch"
|
6
6
|
settings = {
|
7
|
-
"protocol" => "http",
|
8
7
|
"node_name" => "logstash",
|
9
8
|
"cluster" => "elasticsearch",
|
10
|
-
"
|
9
|
+
"hosts" => "node01",
|
11
10
|
"user" => "user",
|
12
11
|
"password" => "changeme",
|
13
12
|
"ssl" => true,
|
@@ -35,9 +34,8 @@ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
|
35
34
|
subject do
|
36
35
|
require "logstash/outputs/elasticsearch"
|
37
36
|
settings = {
|
38
|
-
"protocol" => "http",
|
39
37
|
"cluster" => "elasticsearch",
|
40
|
-
"
|
38
|
+
"hosts" => "node01",
|
41
39
|
"user" => "user",
|
42
40
|
"password" => "changeme",
|
43
41
|
}
|
@@ -60,10 +58,9 @@ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure =>
|
|
60
58
|
subject do
|
61
59
|
require "logstash/outputs/elasticsearch"
|
62
60
|
settings = {
|
63
|
-
"protocol" => "http",
|
64
61
|
"node_name" => "logstash",
|
65
62
|
"cluster" => "elasticsearch",
|
66
|
-
"
|
63
|
+
"hosts" => "node01",
|
67
64
|
"user" => "user",
|
68
65
|
"password" => "changeme",
|
69
66
|
"ssl" => true,
|
@@ -91,9 +88,7 @@ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
|
91
88
|
subject do
|
92
89
|
require "logstash/outputs/elasticsearch"
|
93
90
|
settings = {
|
94
|
-
"
|
95
|
-
"cluster" => "elasticsearch",
|
96
|
-
"host" => "node01",
|
91
|
+
"hosts" => "node01",
|
97
92
|
"user" => "user",
|
98
93
|
"password" => "changeme",
|
99
94
|
}
|