logstash-output-elasticsearch 0.2.8-java → 0.2.9-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/CHANGELOG.md +3 -0
- data/Gemfile +1 -0
- data/NOTICE.TXT +5 -0
- data/README.md +14 -2
- data/lib/logstash/outputs/elasticsearch.rb +42 -2
- data/lib/logstash/outputs/elasticsearch/protocol.rb +1 -1
- data/logstash-output-elasticsearch.gemspec +2 -1
- data/spec/es_spec_helper.rb +65 -0
- data/spec/integration/outputs/elasticsearch/node_spec.rb +36 -0
- data/spec/integration/outputs/index_spec.rb +90 -0
- data/spec/integration/outputs/retry_spec.rb +156 -0
- data/spec/integration/outputs/routing_spec.rb +114 -0
- data/spec/integration/outputs/secure_spec.rb +113 -0
- data/spec/integration/outputs/templates_spec.rb +97 -0
- data/spec/integration/outputs/transport_create_spec.rb +94 -0
- data/spec/{outputs → unit/outputs}/elasticsearch/protocol_spec.rb +0 -1
- data/spec/unit/outputs/elasticsearch_spec.rb +157 -0
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +51 -0
- metadata +39 -6
- data/spec/outputs/elasticsearch_spec.rb +0 -1059
@@ -0,0 +1,51 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
|
3
|
+
describe "SSL option" do
|
4
|
+
["node", "transport"].each do |protocol|
|
5
|
+
context "with protocol => #{protocol}" do
|
6
|
+
subject do
|
7
|
+
require "logstash/outputs/elasticsearch"
|
8
|
+
settings = {
|
9
|
+
"protocol" => protocol,
|
10
|
+
"node_name" => "logstash",
|
11
|
+
"cluster" => "elasticsearch",
|
12
|
+
"host" => "node01",
|
13
|
+
"ssl" => true
|
14
|
+
}
|
15
|
+
next LogStash::Outputs::ElasticSearch.new(settings)
|
16
|
+
end
|
17
|
+
|
18
|
+
it "should fail in register" do
|
19
|
+
expect {subject.register}.to raise_error
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
context "when using http protocol" do
|
25
|
+
protocol = "http"
|
26
|
+
context "when using ssl without cert verification" do
|
27
|
+
subject do
|
28
|
+
require "logstash/outputs/elasticsearch"
|
29
|
+
settings = {
|
30
|
+
"protocol" => protocol,
|
31
|
+
"host" => "node01",
|
32
|
+
"ssl" => true,
|
33
|
+
"ssl_certificate_verification" => false
|
34
|
+
}
|
35
|
+
next LogStash::Outputs::ElasticSearch.new(settings)
|
36
|
+
end
|
37
|
+
|
38
|
+
it "should pass the flag to the ES client" do
|
39
|
+
expect(::Elasticsearch::Client).to receive(:new) do |args|
|
40
|
+
expect(args[:ssl]).to eq(:verify => false)
|
41
|
+
end
|
42
|
+
subject.register
|
43
|
+
end
|
44
|
+
|
45
|
+
it "print a warning" do
|
46
|
+
expect(subject.logger).to receive(:warn)
|
47
|
+
subject.register
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.9
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-06-
|
11
|
+
date: 2015-06-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: concurrent-ruby
|
@@ -154,6 +154,20 @@ dependencies:
|
|
154
154
|
version: '0'
|
155
155
|
prerelease: false
|
156
156
|
type: :development
|
157
|
+
- !ruby/object:Gem::Dependency
|
158
|
+
name: longshoreman
|
159
|
+
version_requirements: !ruby/object:Gem::Requirement
|
160
|
+
requirements:
|
161
|
+
- - '>='
|
162
|
+
- !ruby/object:Gem::Version
|
163
|
+
version: '0'
|
164
|
+
requirement: !ruby/object:Gem::Requirement
|
165
|
+
requirements:
|
166
|
+
- - '>='
|
167
|
+
- !ruby/object:Gem::Version
|
168
|
+
version: '0'
|
169
|
+
prerelease: false
|
170
|
+
type: :development
|
157
171
|
description: Output events to elasticsearch
|
158
172
|
email: info@elastic.co
|
159
173
|
executables: []
|
@@ -165,6 +179,7 @@ files:
|
|
165
179
|
- CONTRIBUTORS
|
166
180
|
- Gemfile
|
167
181
|
- LICENSE
|
182
|
+
- NOTICE.TXT
|
168
183
|
- README.md
|
169
184
|
- Rakefile
|
170
185
|
- lib/logstash-output-elasticsearch_jars.rb
|
@@ -172,8 +187,17 @@ files:
|
|
172
187
|
- lib/logstash/outputs/elasticsearch/elasticsearch-template.json
|
173
188
|
- lib/logstash/outputs/elasticsearch/protocol.rb
|
174
189
|
- logstash-output-elasticsearch.gemspec
|
175
|
-
- spec/
|
176
|
-
- spec/outputs/
|
190
|
+
- spec/es_spec_helper.rb
|
191
|
+
- spec/integration/outputs/elasticsearch/node_spec.rb
|
192
|
+
- spec/integration/outputs/index_spec.rb
|
193
|
+
- spec/integration/outputs/retry_spec.rb
|
194
|
+
- spec/integration/outputs/routing_spec.rb
|
195
|
+
- spec/integration/outputs/secure_spec.rb
|
196
|
+
- spec/integration/outputs/templates_spec.rb
|
197
|
+
- spec/integration/outputs/transport_create_spec.rb
|
198
|
+
- spec/unit/outputs/elasticsearch/protocol_spec.rb
|
199
|
+
- spec/unit/outputs/elasticsearch_spec.rb
|
200
|
+
- spec/unit/outputs/elasticsearch_ssl_spec.rb
|
177
201
|
- vendor/jar-dependencies/runtime-jars/antlr-runtime-3.5.jar
|
178
202
|
- vendor/jar-dependencies/runtime-jars/asm-4.1.jar
|
179
203
|
- vendor/jar-dependencies/runtime-jars/asm-commons-4.1.jar
|
@@ -218,5 +242,14 @@ signing_key:
|
|
218
242
|
specification_version: 4
|
219
243
|
summary: Logstash Output to Elasticsearch
|
220
244
|
test_files:
|
221
|
-
- spec/
|
222
|
-
- spec/outputs/
|
245
|
+
- spec/es_spec_helper.rb
|
246
|
+
- spec/integration/outputs/elasticsearch/node_spec.rb
|
247
|
+
- spec/integration/outputs/index_spec.rb
|
248
|
+
- spec/integration/outputs/retry_spec.rb
|
249
|
+
- spec/integration/outputs/routing_spec.rb
|
250
|
+
- spec/integration/outputs/secure_spec.rb
|
251
|
+
- spec/integration/outputs/templates_spec.rb
|
252
|
+
- spec/integration/outputs/transport_create_spec.rb
|
253
|
+
- spec/unit/outputs/elasticsearch/protocol_spec.rb
|
254
|
+
- spec/unit/outputs/elasticsearch_spec.rb
|
255
|
+
- spec/unit/outputs/elasticsearch_ssl_spec.rb
|
@@ -1,1059 +0,0 @@
|
|
1
|
-
require "logstash/devutils/rspec/spec_helper"
|
2
|
-
require "ftw"
|
3
|
-
require "logstash/plugin"
|
4
|
-
require "logstash/json"
|
5
|
-
require "stud/try"
|
6
|
-
|
7
|
-
describe "outputs/elasticsearch" do
|
8
|
-
|
9
|
-
context "registration" do
|
10
|
-
|
11
|
-
it "should register" do
|
12
|
-
output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")
|
13
|
-
|
14
|
-
# register will try to load jars and raise if it cannot find jars
|
15
|
-
expect {output.register}.to_not raise_error
|
16
|
-
end
|
17
|
-
|
18
|
-
it "should fail to register when protocol => http, action => create_unless_exists" do
|
19
|
-
output = LogStash::Plugin.lookup("output", "elasticsearch").new("protocol" => "http", "action" => "create_unless_exists")
|
20
|
-
|
21
|
-
expect {output.register}.to raise_error
|
22
|
-
end
|
23
|
-
end
|
24
|
-
|
25
|
-
describe "ship lots of events w/ default index_type", :elasticsearch => true do
|
26
|
-
# Generate a random index name
|
27
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
28
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
29
|
-
|
30
|
-
# Write about 10000 events. Add jitter to increase likeliness of finding
|
31
|
-
# boundary-related bugs.
|
32
|
-
event_count = 10000 + rand(500)
|
33
|
-
flush_size = rand(200) + 1
|
34
|
-
|
35
|
-
config <<-CONFIG
|
36
|
-
input {
|
37
|
-
generator {
|
38
|
-
message => "hello world"
|
39
|
-
count => #{event_count}
|
40
|
-
type => "#{type}"
|
41
|
-
}
|
42
|
-
}
|
43
|
-
output {
|
44
|
-
elasticsearch {
|
45
|
-
host => "127.0.0.1"
|
46
|
-
index => "#{index}"
|
47
|
-
flush_size => #{flush_size}
|
48
|
-
}
|
49
|
-
}
|
50
|
-
CONFIG
|
51
|
-
|
52
|
-
agent do
|
53
|
-
# Try a few times to check if we have the correct number of events stored
|
54
|
-
# in ES.
|
55
|
-
#
|
56
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
57
|
-
# elasticsearch to finish processing everything.
|
58
|
-
ftw = FTW::Agent.new
|
59
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
60
|
-
|
61
|
-
# Wait until all events are available.
|
62
|
-
Stud::try(10.times) do
|
63
|
-
data = ""
|
64
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
65
|
-
response.read_body { |chunk| data << chunk }
|
66
|
-
result = LogStash::Json.load(data)
|
67
|
-
count = result["count"]
|
68
|
-
insist { count } == event_count
|
69
|
-
end
|
70
|
-
|
71
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
72
|
-
data = ""
|
73
|
-
response.read_body { |chunk| data << chunk }
|
74
|
-
result = LogStash::Json.load(data)
|
75
|
-
result["hits"]["hits"].each do |doc|
|
76
|
-
# With no 'index_type' set, the document type should be the type
|
77
|
-
# set on the input
|
78
|
-
insist { doc["_type"] } == type
|
79
|
-
insist { doc["_index"] } == index
|
80
|
-
insist { doc["_source"]["message"] } == "hello world"
|
81
|
-
end
|
82
|
-
end
|
83
|
-
end
|
84
|
-
|
85
|
-
describe "ship lots of events w/ default index_type and fixed routing key using http protocol", :elasticsearch => true do
|
86
|
-
# Generate a random index name
|
87
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
88
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
89
|
-
|
90
|
-
# Write 900 events so that we can verify these have been routed correctly.
|
91
|
-
event_count = 900
|
92
|
-
flush_size = rand(200) + 1
|
93
|
-
|
94
|
-
config <<-CONFIG
|
95
|
-
input {
|
96
|
-
generator {
|
97
|
-
message => "hello world"
|
98
|
-
count => #{event_count}
|
99
|
-
type => "#{type}"
|
100
|
-
}
|
101
|
-
}
|
102
|
-
output {
|
103
|
-
elasticsearch {
|
104
|
-
host => "127.0.0.1"
|
105
|
-
index => "#{index}"
|
106
|
-
flush_size => #{flush_size}
|
107
|
-
routing => "test"
|
108
|
-
protocol => "http"
|
109
|
-
}
|
110
|
-
}
|
111
|
-
CONFIG
|
112
|
-
|
113
|
-
agent do
|
114
|
-
# Try a few times to check if we have the correct number of events stored
|
115
|
-
# in ES.
|
116
|
-
#
|
117
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
118
|
-
# elasticsearch to finish processing everything.
|
119
|
-
ftw = FTW::Agent.new
|
120
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
121
|
-
|
122
|
-
# Wait until all events are available.
|
123
|
-
Stud::try(10.times) do
|
124
|
-
data = ""
|
125
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
126
|
-
response.read_body { |chunk| data << chunk }
|
127
|
-
result = LogStash::Json.load(data)
|
128
|
-
count = result["count"]
|
129
|
-
insist { count } == event_count
|
130
|
-
end
|
131
|
-
|
132
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*&routing=test")
|
133
|
-
data = ""
|
134
|
-
response.read_body { |chunk| data << chunk }
|
135
|
-
result = LogStash::Json.load(data)
|
136
|
-
count = result["count"]
|
137
|
-
insist { count } == event_count
|
138
|
-
end
|
139
|
-
end
|
140
|
-
|
141
|
-
describe "ship lots of events w/ default index_type and dynamic routing key using http protocol", :elasticsearch => true do
|
142
|
-
# Generate a random index name
|
143
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
144
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
145
|
-
|
146
|
-
# Write 900 events so that we can verify these have been routed correctly.
|
147
|
-
event_count = 900
|
148
|
-
flush_size = rand(200) + 1
|
149
|
-
|
150
|
-
config <<-CONFIG
|
151
|
-
input {
|
152
|
-
generator {
|
153
|
-
message => "test"
|
154
|
-
count => #{event_count}
|
155
|
-
type => "#{type}"
|
156
|
-
}
|
157
|
-
}
|
158
|
-
output {
|
159
|
-
elasticsearch {
|
160
|
-
host => "127.0.0.1"
|
161
|
-
index => "#{index}"
|
162
|
-
flush_size => #{flush_size}
|
163
|
-
routing => "%{message}"
|
164
|
-
protocol => "http"
|
165
|
-
}
|
166
|
-
}
|
167
|
-
CONFIG
|
168
|
-
|
169
|
-
agent do
|
170
|
-
# Try a few times to check if we have the correct number of events stored
|
171
|
-
# in ES.
|
172
|
-
#
|
173
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
174
|
-
# elasticsearch to finish processing everything.
|
175
|
-
ftw = FTW::Agent.new
|
176
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
177
|
-
|
178
|
-
# Wait until all events are available.
|
179
|
-
Stud::try(10.times) do
|
180
|
-
data = ""
|
181
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
182
|
-
response.read_body { |chunk| data << chunk }
|
183
|
-
result = LogStash::Json.load(data)
|
184
|
-
count = result["count"]
|
185
|
-
insist { count } == event_count
|
186
|
-
end
|
187
|
-
|
188
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*&routing=test")
|
189
|
-
data = ""
|
190
|
-
response.read_body { |chunk| data << chunk }
|
191
|
-
result = LogStash::Json.load(data)
|
192
|
-
count = result["count"]
|
193
|
-
insist { count } == event_count
|
194
|
-
end
|
195
|
-
end
|
196
|
-
|
197
|
-
describe "ship lots of events w/ default index_type and fixed routing key using transport protocol", :elasticsearch => true do
|
198
|
-
# Generate a random index name
|
199
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
200
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
201
|
-
|
202
|
-
# Write 900 events so that we can verify these have been routed correctly.
|
203
|
-
event_count = 900
|
204
|
-
flush_size = rand(200) + 1
|
205
|
-
|
206
|
-
config <<-CONFIG
|
207
|
-
input {
|
208
|
-
generator {
|
209
|
-
message => "hello world"
|
210
|
-
count => #{event_count}
|
211
|
-
type => "#{type}"
|
212
|
-
}
|
213
|
-
}
|
214
|
-
output {
|
215
|
-
elasticsearch {
|
216
|
-
host => "127.0.0.1"
|
217
|
-
index => "#{index}"
|
218
|
-
flush_size => #{flush_size}
|
219
|
-
routing => "test"
|
220
|
-
protocol => "transport"
|
221
|
-
}
|
222
|
-
}
|
223
|
-
CONFIG
|
224
|
-
|
225
|
-
agent do
|
226
|
-
# Try a few times to check if we have the correct number of events stored
|
227
|
-
# in ES.
|
228
|
-
#
|
229
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
230
|
-
# elasticsearch to finish processing everything.
|
231
|
-
ftw = FTW::Agent.new
|
232
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
233
|
-
|
234
|
-
# Wait until all events are available.
|
235
|
-
Stud::try(10.times) do
|
236
|
-
data = ""
|
237
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
238
|
-
response.read_body { |chunk| data << chunk }
|
239
|
-
result = LogStash::Json.load(data)
|
240
|
-
count = result["count"]
|
241
|
-
insist { count } == event_count
|
242
|
-
end
|
243
|
-
|
244
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*&routing=test")
|
245
|
-
data = ""
|
246
|
-
response.read_body { |chunk| data << chunk }
|
247
|
-
result = LogStash::Json.load(data)
|
248
|
-
count = result["count"]
|
249
|
-
insist { count } == event_count
|
250
|
-
end
|
251
|
-
end
|
252
|
-
|
253
|
-
describe "ship lots of events w/ default index_type and fixed routing key using node protocol", :elasticsearch => true do
|
254
|
-
# Generate a random index name
|
255
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
256
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
257
|
-
|
258
|
-
# Write 900 events so that we can verify these have been routed correctly.
|
259
|
-
event_count = 900
|
260
|
-
flush_size = rand(200) + 1
|
261
|
-
|
262
|
-
config <<-CONFIG
|
263
|
-
input {
|
264
|
-
generator {
|
265
|
-
message => "hello world"
|
266
|
-
count => #{event_count}
|
267
|
-
type => "#{type}"
|
268
|
-
}
|
269
|
-
}
|
270
|
-
output {
|
271
|
-
elasticsearch {
|
272
|
-
host => "127.0.0.1"
|
273
|
-
index => "#{index}"
|
274
|
-
flush_size => #{flush_size}
|
275
|
-
routing => "test"
|
276
|
-
protocol => "node"
|
277
|
-
}
|
278
|
-
}
|
279
|
-
CONFIG
|
280
|
-
|
281
|
-
agent do
|
282
|
-
# Try a few times to check if we have the correct number of events stored
|
283
|
-
# in ES.
|
284
|
-
#
|
285
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
286
|
-
# elasticsearch to finish processing everything.
|
287
|
-
ftw = FTW::Agent.new
|
288
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
289
|
-
|
290
|
-
# Wait until all events are available.
|
291
|
-
Stud::try(10.times) do
|
292
|
-
data = ""
|
293
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
294
|
-
response.read_body { |chunk| data << chunk }
|
295
|
-
result = LogStash::Json.load(data)
|
296
|
-
count = result["count"]
|
297
|
-
insist { count } == event_count
|
298
|
-
end
|
299
|
-
|
300
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*&routing=test")
|
301
|
-
data = ""
|
302
|
-
response.read_body { |chunk| data << chunk }
|
303
|
-
result = LogStash::Json.load(data)
|
304
|
-
count = result["count"]
|
305
|
-
insist { count } == event_count
|
306
|
-
end
|
307
|
-
end
|
308
|
-
|
309
|
-
describe "node client create actions", :elasticsearch => true do
|
310
|
-
require "logstash/outputs/elasticsearch"
|
311
|
-
require "elasticsearch"
|
312
|
-
let(:es) { Elasticsearch::Client.new }
|
313
|
-
|
314
|
-
def get_es_output(action, id = nil)
|
315
|
-
settings = {
|
316
|
-
"manage_template" => true,
|
317
|
-
"index" => "logstash-create",
|
318
|
-
"template_overwrite" => true,
|
319
|
-
"protocol" => "node",
|
320
|
-
"host" => "localhost",
|
321
|
-
"action" => action
|
322
|
-
}
|
323
|
-
settings['document_id'] = id unless id.nil?
|
324
|
-
LogStash::Outputs::ElasticSearch.new(settings)
|
325
|
-
end
|
326
|
-
|
327
|
-
before :each do
|
328
|
-
# Delete all templates first.
|
329
|
-
# Clean ES of data before we start.
|
330
|
-
es.indices.delete_template(:name => "*")
|
331
|
-
# This can fail if there are no indexes, ignore failure.
|
332
|
-
es.indices.delete(:index => "*") rescue nil
|
333
|
-
end
|
334
|
-
|
335
|
-
context "when action => create" do
|
336
|
-
it "should create new documents with or without id" do
|
337
|
-
subject = get_es_output("create", "id123")
|
338
|
-
subject.register
|
339
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
340
|
-
subject.buffer_flush(:final => true)
|
341
|
-
es.indices.refresh
|
342
|
-
# Wait or fail until everything's indexed.
|
343
|
-
Stud::try(3.times) do
|
344
|
-
r = es.search
|
345
|
-
insist { r["hits"]["total"] } == 1
|
346
|
-
end
|
347
|
-
end
|
348
|
-
|
349
|
-
it "should create new documents without id" do
|
350
|
-
subject = get_es_output("create")
|
351
|
-
subject.register
|
352
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
353
|
-
subject.buffer_flush(:final => true)
|
354
|
-
es.indices.refresh
|
355
|
-
# Wait or fail until everything's indexed.
|
356
|
-
Stud::try(3.times) do
|
357
|
-
r = es.search
|
358
|
-
insist { r["hits"]["total"] } == 1
|
359
|
-
end
|
360
|
-
end
|
361
|
-
end
|
362
|
-
|
363
|
-
context "when action => create_unless_exists" do
|
364
|
-
it "should create new documents when specific id is specified" do
|
365
|
-
subject = get_es_output("create_unless_exists", "id123")
|
366
|
-
subject.register
|
367
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
368
|
-
subject.buffer_flush(:final => true)
|
369
|
-
es.indices.refresh
|
370
|
-
# Wait or fail until everything's indexed.
|
371
|
-
Stud::try(3.times) do
|
372
|
-
r = es.search
|
373
|
-
insist { r["hits"]["total"] } == 1
|
374
|
-
end
|
375
|
-
end
|
376
|
-
|
377
|
-
it "should fail to create a document when no id is specified" do
|
378
|
-
event = LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0})
|
379
|
-
action = ["create_unless_exists", {:_id=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event]
|
380
|
-
subject = get_es_output(action[0])
|
381
|
-
subject.register
|
382
|
-
expect { subject.flush([action]) }.to raise_error
|
383
|
-
end
|
384
|
-
|
385
|
-
it "should unsuccesfully submit two records with the same document id" do
|
386
|
-
subject = get_es_output("create_unless_exists", "id123")
|
387
|
-
subject.register
|
388
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
389
|
-
subject.receive(LogStash::Event.new("message" => "sample message here")) # 400 status failure (same id)
|
390
|
-
subject.buffer_flush(:final => true)
|
391
|
-
es.indices.refresh
|
392
|
-
# Wait or fail until everything's indexed.
|
393
|
-
Stud::try(3.times) do
|
394
|
-
r = es.search
|
395
|
-
insist { r["hits"]["total"] } == 1
|
396
|
-
end
|
397
|
-
end
|
398
|
-
end
|
399
|
-
end
|
400
|
-
|
401
|
-
describe "testing index_type", :elasticsearch => true do
|
402
|
-
describe "no type value" do
|
403
|
-
# Generate a random index name
|
404
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
405
|
-
event_count = 100 + rand(100)
|
406
|
-
flush_size = rand(200) + 1
|
407
|
-
|
408
|
-
config <<-CONFIG
|
409
|
-
input {
|
410
|
-
generator {
|
411
|
-
message => "hello world"
|
412
|
-
count => #{event_count}
|
413
|
-
}
|
414
|
-
}
|
415
|
-
output {
|
416
|
-
elasticsearch {
|
417
|
-
host => "127.0.0.1"
|
418
|
-
index => "#{index}"
|
419
|
-
flush_size => #{flush_size}
|
420
|
-
}
|
421
|
-
}
|
422
|
-
CONFIG
|
423
|
-
|
424
|
-
agent do
|
425
|
-
ftw = FTW::Agent.new
|
426
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
427
|
-
|
428
|
-
# Wait until all events are available.
|
429
|
-
Stud::try(10.times) do
|
430
|
-
data = ""
|
431
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
432
|
-
response.read_body { |chunk| data << chunk }
|
433
|
-
result = LogStash::Json.load(data)
|
434
|
-
count = result["count"]
|
435
|
-
insist { count } == event_count
|
436
|
-
end
|
437
|
-
|
438
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
439
|
-
data = ""
|
440
|
-
response.read_body { |chunk| data << chunk }
|
441
|
-
result = LogStash::Json.load(data)
|
442
|
-
result["hits"]["hits"].each do |doc|
|
443
|
-
insist { doc["_type"] } == "logs"
|
444
|
-
end
|
445
|
-
end
|
446
|
-
end
|
447
|
-
|
448
|
-
describe "default event type value" do
|
449
|
-
# Generate a random index name
|
450
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
451
|
-
event_count = 100 + rand(100)
|
452
|
-
flush_size = rand(200) + 1
|
453
|
-
|
454
|
-
config <<-CONFIG
|
455
|
-
input {
|
456
|
-
generator {
|
457
|
-
message => "hello world"
|
458
|
-
count => #{event_count}
|
459
|
-
type => "generated"
|
460
|
-
}
|
461
|
-
}
|
462
|
-
output {
|
463
|
-
elasticsearch {
|
464
|
-
host => "127.0.0.1"
|
465
|
-
index => "#{index}"
|
466
|
-
flush_size => #{flush_size}
|
467
|
-
}
|
468
|
-
}
|
469
|
-
CONFIG
|
470
|
-
|
471
|
-
agent do
|
472
|
-
ftw = FTW::Agent.new
|
473
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
474
|
-
|
475
|
-
# Wait until all events are available.
|
476
|
-
Stud::try(10.times) do
|
477
|
-
data = ""
|
478
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
479
|
-
response.read_body { |chunk| data << chunk }
|
480
|
-
result = LogStash::Json.load(data)
|
481
|
-
count = result["count"]
|
482
|
-
insist { count } == event_count
|
483
|
-
end
|
484
|
-
|
485
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
486
|
-
data = ""
|
487
|
-
response.read_body { |chunk| data << chunk }
|
488
|
-
result = LogStash::Json.load(data)
|
489
|
-
result["hits"]["hits"].each do |doc|
|
490
|
-
insist { doc["_type"] } == "generated"
|
491
|
-
end
|
492
|
-
end
|
493
|
-
end
|
494
|
-
end
|
495
|
-
|
496
|
-
describe "action => ...", :elasticsearch => true do
|
497
|
-
index_name = 10.times.collect { rand(10).to_s }.join("")
|
498
|
-
|
499
|
-
config <<-CONFIG
|
500
|
-
input {
|
501
|
-
generator {
|
502
|
-
message => "hello world"
|
503
|
-
count => 100
|
504
|
-
}
|
505
|
-
}
|
506
|
-
output {
|
507
|
-
elasticsearch {
|
508
|
-
host => "127.0.0.1"
|
509
|
-
index => "#{index_name}"
|
510
|
-
}
|
511
|
-
}
|
512
|
-
CONFIG
|
513
|
-
|
514
|
-
|
515
|
-
agent do
|
516
|
-
ftw = FTW::Agent.new
|
517
|
-
ftw.post!("http://localhost:9200/#{index_name}/_refresh")
|
518
|
-
|
519
|
-
# Wait until all events are available.
|
520
|
-
Stud::try(10.times) do
|
521
|
-
data = ""
|
522
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index_name}/_count?q=*")
|
523
|
-
response.read_body { |chunk| data << chunk }
|
524
|
-
result = LogStash::Json.load(data)
|
525
|
-
count = result["count"]
|
526
|
-
insist { count } == 100
|
527
|
-
end
|
528
|
-
|
529
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index_name}/_search?q=*&size=1000")
|
530
|
-
data = ""
|
531
|
-
response.read_body { |chunk| data << chunk }
|
532
|
-
result = LogStash::Json.load(data)
|
533
|
-
result["hits"]["hits"].each do |doc|
|
534
|
-
insist { doc["_type"] } == "logs"
|
535
|
-
end
|
536
|
-
end
|
537
|
-
|
538
|
-
describe "default event type value", :elasticsearch => true do
|
539
|
-
# Generate a random index name
|
540
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
541
|
-
event_count = 100 + rand(100)
|
542
|
-
flush_size = rand(200) + 1
|
543
|
-
|
544
|
-
config <<-CONFIG
|
545
|
-
input {
|
546
|
-
generator {
|
547
|
-
message => "hello world"
|
548
|
-
count => #{event_count}
|
549
|
-
type => "generated"
|
550
|
-
}
|
551
|
-
}
|
552
|
-
output {
|
553
|
-
elasticsearch {
|
554
|
-
host => "127.0.0.1"
|
555
|
-
index => "#{index}"
|
556
|
-
flush_size => #{flush_size}
|
557
|
-
}
|
558
|
-
}
|
559
|
-
CONFIG
|
560
|
-
|
561
|
-
agent do
|
562
|
-
ftw = FTW::Agent.new
|
563
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
564
|
-
|
565
|
-
# Wait until all events are available.
|
566
|
-
Stud::try(10.times) do
|
567
|
-
data = ""
|
568
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
569
|
-
response.read_body { |chunk| data << chunk }
|
570
|
-
result = LogStash::Json.load(data)
|
571
|
-
count = result["count"]
|
572
|
-
insist { count } == event_count
|
573
|
-
end
|
574
|
-
|
575
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
576
|
-
data = ""
|
577
|
-
response.read_body { |chunk| data << chunk }
|
578
|
-
result = LogStash::Json.load(data)
|
579
|
-
result["hits"]["hits"].each do |doc|
|
580
|
-
insist { doc["_type"] } == "generated"
|
581
|
-
end
|
582
|
-
end
|
583
|
-
end
|
584
|
-
end
|
585
|
-
|
586
|
-
describe "index template expected behavior", :elasticsearch => true do
|
587
|
-
["node", "transport", "http"].each do |protocol|
|
588
|
-
context "with protocol => #{protocol}" do
|
589
|
-
subject do
|
590
|
-
require "logstash/outputs/elasticsearch"
|
591
|
-
settings = {
|
592
|
-
"manage_template" => true,
|
593
|
-
"template_overwrite" => true,
|
594
|
-
"protocol" => protocol,
|
595
|
-
"host" => "localhost"
|
596
|
-
}
|
597
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
598
|
-
end
|
599
|
-
|
600
|
-
before :each do
|
601
|
-
# Delete all templates first.
|
602
|
-
require "elasticsearch"
|
603
|
-
|
604
|
-
# Clean ES of data before we start.
|
605
|
-
@es = Elasticsearch::Client.new
|
606
|
-
@es.indices.delete_template(:name => "*")
|
607
|
-
|
608
|
-
# This can fail if there are no indexes, ignore failure.
|
609
|
-
@es.indices.delete(:index => "*") rescue nil
|
610
|
-
|
611
|
-
subject.register
|
612
|
-
|
613
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
614
|
-
subject.receive(LogStash::Event.new("somevalue" => 100))
|
615
|
-
subject.receive(LogStash::Event.new("somevalue" => 10))
|
616
|
-
subject.receive(LogStash::Event.new("somevalue" => 1))
|
617
|
-
subject.receive(LogStash::Event.new("country" => "us"))
|
618
|
-
subject.receive(LogStash::Event.new("country" => "at"))
|
619
|
-
subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
|
620
|
-
subject.buffer_flush(:final => true)
|
621
|
-
@es.indices.refresh
|
622
|
-
|
623
|
-
# Wait or fail until everything's indexed.
|
624
|
-
Stud::try(20.times) do
|
625
|
-
r = @es.search
|
626
|
-
insist { r["hits"]["total"] } == 7
|
627
|
-
end
|
628
|
-
end
|
629
|
-
|
630
|
-
it "permits phrase searching on string fields" do
|
631
|
-
results = @es.search(:q => "message:\"sample message\"")
|
632
|
-
insist { results["hits"]["total"] } == 1
|
633
|
-
insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
|
634
|
-
end
|
635
|
-
|
636
|
-
it "numbers dynamically map to a numeric type and permit range queries" do
|
637
|
-
results = @es.search(:q => "somevalue:[5 TO 105]")
|
638
|
-
insist { results["hits"]["total"] } == 2
|
639
|
-
|
640
|
-
values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
|
641
|
-
insist { values }.include?(10)
|
642
|
-
insist { values }.include?(100)
|
643
|
-
reject { values }.include?(1)
|
644
|
-
end
|
645
|
-
|
646
|
-
it "does not create .raw field for the message field" do
|
647
|
-
results = @es.search(:q => "message.raw:\"sample message here\"")
|
648
|
-
insist { results["hits"]["total"] } == 0
|
649
|
-
end
|
650
|
-
|
651
|
-
it "creates .raw field from any string field which is not_analyzed" do
|
652
|
-
results = @es.search(:q => "country.raw:\"us\"")
|
653
|
-
insist { results["hits"]["total"] } == 1
|
654
|
-
insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
|
655
|
-
|
656
|
-
# partial or terms should not work.
|
657
|
-
results = @es.search(:q => "country.raw:\"u\"")
|
658
|
-
insist { results["hits"]["total"] } == 0
|
659
|
-
end
|
660
|
-
|
661
|
-
it "make [geoip][location] a geo_point" do
|
662
|
-
results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
|
663
|
-
insist { results["hits"]["total"] } == 1
|
664
|
-
insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
|
665
|
-
end
|
666
|
-
|
667
|
-
it "should index stopwords like 'at' " do
|
668
|
-
results = @es.search(:body => { "facets" => { "t" => { "terms" => { "field" => "country" } } } })["facets"]["t"]
|
669
|
-
terms = results["terms"].collect { |t| t["term"] }
|
670
|
-
|
671
|
-
insist { terms }.include?("us")
|
672
|
-
|
673
|
-
# 'at' is a stopword, make sure stopwords are not ignored.
|
674
|
-
insist { terms }.include?("at")
|
675
|
-
end
|
676
|
-
end
|
677
|
-
end
|
678
|
-
end
|
679
|
-
|
680
|
-
describe "failures in bulk class expected behavior", :elasticsearch => true do
|
681
|
-
let(:template) { '{"template" : "not important, will be updated by :index"}' }
|
682
|
-
let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
|
683
|
-
let(:action1) { ["index", {:_id=>nil, :_routing=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event1] }
|
684
|
-
let(:event2) { LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0] }, "@timestamp" => "2014-11-17T20:37:17.223Z", "@metadata" => {"retry_count" => 0}) }
|
685
|
-
let(:action2) { ["index", {:_id=>nil, :_routing=>nil, :_index=>"logstash-2014.11.17", :_type=>"logs"}, event2] }
|
686
|
-
let(:invalid_event) { LogStash::Event.new("geoip" => { "location" => "notlatlon" }, "@timestamp" => "2014-11-17T20:37:17.223Z") }
|
687
|
-
let(:max_retries) { 3 }
|
688
|
-
|
689
|
-
def mock_actions_with_response(*resp)
|
690
|
-
LogStash::Outputs::Elasticsearch::Protocols::HTTPClient
|
691
|
-
.any_instance.stub(:bulk).and_return(*resp)
|
692
|
-
LogStash::Outputs::Elasticsearch::Protocols::NodeClient
|
693
|
-
.any_instance.stub(:bulk).and_return(*resp)
|
694
|
-
end
|
695
|
-
|
696
|
-
["node", "transport", "http"].each do |protocol|
|
697
|
-
context "with protocol => #{protocol}" do
|
698
|
-
subject do
|
699
|
-
require "logstash/outputs/elasticsearch"
|
700
|
-
settings = {
|
701
|
-
"manage_template" => true,
|
702
|
-
"index" => "logstash-2014.11.17",
|
703
|
-
"template_overwrite" => true,
|
704
|
-
"protocol" => protocol,
|
705
|
-
"host" => "localhost",
|
706
|
-
"retry_max_items" => 10,
|
707
|
-
"retry_max_interval" => 1,
|
708
|
-
"max_retries" => max_retries
|
709
|
-
}
|
710
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
711
|
-
end
|
712
|
-
|
713
|
-
before :each do
|
714
|
-
# Delete all templates first.
|
715
|
-
require "elasticsearch"
|
716
|
-
|
717
|
-
# Clean ES of data before we start.
|
718
|
-
@es = Elasticsearch::Client.new
|
719
|
-
@es.indices.delete_template(:name => "*")
|
720
|
-
@es.indices.delete(:index => "*")
|
721
|
-
@es.indices.refresh
|
722
|
-
end
|
723
|
-
|
724
|
-
it "should return no errors if all bulk actions are successful" do
|
725
|
-
mock_actions_with_response({"errors" => false})
|
726
|
-
expect(subject).to receive(:submit).with([action1, action2]).once.and_call_original
|
727
|
-
subject.register
|
728
|
-
subject.receive(event1)
|
729
|
-
subject.receive(event2)
|
730
|
-
subject.buffer_flush(:final => true)
|
731
|
-
sleep(2)
|
732
|
-
end
|
733
|
-
|
734
|
-
it "should raise exception and be retried by stud::buffer" do
|
735
|
-
call_count = 0
|
736
|
-
expect(subject).to receive(:submit).with([action1]).exactly(3).times do
|
737
|
-
if (call_count += 1) <= 2
|
738
|
-
raise "error first two times"
|
739
|
-
else
|
740
|
-
{"errors" => false}
|
741
|
-
end
|
742
|
-
end
|
743
|
-
subject.register
|
744
|
-
subject.receive(event1)
|
745
|
-
subject.teardown
|
746
|
-
end
|
747
|
-
|
748
|
-
it "should retry actions with response status of 503" do
|
749
|
-
mock_actions_with_response({"errors" => true, "statuses" => [200, 200, 503, 503]},
|
750
|
-
{"errors" => true, "statuses" => [200, 503]},
|
751
|
-
{"errors" => false})
|
752
|
-
expect(subject).to receive(:submit).with([action1, action1, action1, action2]).ordered.once.and_call_original
|
753
|
-
expect(subject).to receive(:submit).with([action1, action2]).ordered.once.and_call_original
|
754
|
-
expect(subject).to receive(:submit).with([action2]).ordered.once.and_call_original
|
755
|
-
|
756
|
-
subject.register
|
757
|
-
subject.receive(event1)
|
758
|
-
subject.receive(event1)
|
759
|
-
subject.receive(event1)
|
760
|
-
subject.receive(event2)
|
761
|
-
subject.buffer_flush(:final => true)
|
762
|
-
sleep(3)
|
763
|
-
end
|
764
|
-
|
765
|
-
it "should retry actions with response status of 429" do
|
766
|
-
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
767
|
-
{"errors" => false})
|
768
|
-
expect(subject).to receive(:submit).with([action1]).twice.and_call_original
|
769
|
-
subject.register
|
770
|
-
subject.receive(event1)
|
771
|
-
subject.buffer_flush(:final => true)
|
772
|
-
sleep(3)
|
773
|
-
end
|
774
|
-
|
775
|
-
it "should retry an event until max_retries reached" do
|
776
|
-
mock_actions_with_response({"errors" => true, "statuses" => [429]},
|
777
|
-
{"errors" => true, "statuses" => [429]},
|
778
|
-
{"errors" => true, "statuses" => [429]},
|
779
|
-
{"errors" => true, "statuses" => [429]},
|
780
|
-
{"errors" => true, "statuses" => [429]},
|
781
|
-
{"errors" => true, "statuses" => [429]})
|
782
|
-
expect(subject).to receive(:submit).with([action1]).exactly(max_retries).times.and_call_original
|
783
|
-
subject.register
|
784
|
-
subject.receive(event1)
|
785
|
-
subject.buffer_flush(:final => true)
|
786
|
-
sleep(3)
|
787
|
-
end
|
788
|
-
|
789
|
-
it "non-retryable errors like mapping errors (400) should be dropped and not be retried (unfortunetly)" do
|
790
|
-
subject.register
|
791
|
-
subject.receive(invalid_event)
|
792
|
-
expect(subject).not_to receive(:retry_push)
|
793
|
-
subject.teardown
|
794
|
-
|
795
|
-
@es.indices.refresh
|
796
|
-
sleep(5)
|
797
|
-
Stud::try(10.times) do
|
798
|
-
r = @es.search
|
799
|
-
insist { r["hits"]["total"] } == 0
|
800
|
-
end
|
801
|
-
end
|
802
|
-
|
803
|
-
it "successful requests should not be appended to retry queue" do
|
804
|
-
subject.register
|
805
|
-
subject.receive(event1)
|
806
|
-
expect(subject).not_to receive(:retry_push)
|
807
|
-
subject.teardown
|
808
|
-
|
809
|
-
@es.indices.refresh
|
810
|
-
sleep(5)
|
811
|
-
Stud::try(10.times) do
|
812
|
-
r = @es.search
|
813
|
-
insist { r["hits"]["total"] } == 1
|
814
|
-
end
|
815
|
-
end
|
816
|
-
end
|
817
|
-
end
|
818
|
-
end
|
819
|
-
|
820
|
-
describe "elasticsearch protocol", :elasticsearch => true do
|
821
|
-
# ElasticSearch related jars
|
822
|
-
#LogStash::Environment.load_elasticsearch_jars!
|
823
|
-
# Load elasticsearch protocol
|
824
|
-
require "logstash/outputs/elasticsearch/protocol"
|
825
|
-
|
826
|
-
describe "elasticsearch node client" do
|
827
|
-
# Test ElasticSearch Node Client
|
828
|
-
# Reference: http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
|
829
|
-
|
830
|
-
it "should support hosts in both string and array" do
|
831
|
-
# Because we defined *hosts* method in NodeClient as private,
|
832
|
-
# we use *obj.send :method,[args...]* to call method *hosts*
|
833
|
-
client = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.new
|
834
|
-
|
835
|
-
# Node client should support host in string
|
836
|
-
# Case 1: default :host in string
|
837
|
-
insist { client.send :hosts, :host => "host",:port => 9300 } == "host:9300"
|
838
|
-
# Case 2: :port =~ /^\d+_\d+$/
|
839
|
-
insist { client.send :hosts, :host => "host",:port => "9300-9302"} == "host:9300,host:9301,host:9302"
|
840
|
-
# Case 3: :host =~ /^.+:.+$/
|
841
|
-
insist { client.send :hosts, :host => "host:9303",:port => 9300 } == "host:9303"
|
842
|
-
# Case 4: :host =~ /^.+:.+$/ and :port =~ /^\d+_\d+$/
|
843
|
-
insist { client.send :hosts, :host => "host:9303",:port => "9300-9302"} == "host:9303"
|
844
|
-
|
845
|
-
# Node client should support host in array
|
846
|
-
# Case 5: :host in array with single item
|
847
|
-
insist { client.send :hosts, :host => ["host"],:port => 9300 } == ("host:9300")
|
848
|
-
# Case 6: :host in array with more than one items
|
849
|
-
insist { client.send :hosts, :host => ["host1","host2"],:port => 9300 } == "host1:9300,host2:9300"
|
850
|
-
# Case 7: :host in array with more than one items and :port =~ /^\d+_\d+$/
|
851
|
-
insist { client.send :hosts, :host => ["host1","host2"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9300,host2:9301,host2:9302"
|
852
|
-
# Case 8: :host in array with more than one items and some :host =~ /^.+:.+$/
|
853
|
-
insist { client.send :hosts, :host => ["host1","host2:9303"],:port => 9300 } == "host1:9300,host2:9303"
|
854
|
-
# Case 9: :host in array with more than one items, :port =~ /^\d+_\d+$/ and some :host =~ /^.+:.+$/
|
855
|
-
insist { client.send :hosts, :host => ["host1","host2:9303"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9303"
|
856
|
-
end
|
857
|
-
end
|
858
|
-
end
|
859
|
-
|
860
|
-
describe "Authentication option" do
|
861
|
-
["node", "transport"].each do |protocol|
|
862
|
-
context "with protocol => #{protocol}" do
|
863
|
-
subject do
|
864
|
-
require "logstash/outputs/elasticsearch"
|
865
|
-
settings = {
|
866
|
-
"protocol" => protocol,
|
867
|
-
"node_name" => "logstash",
|
868
|
-
"cluster" => "elasticsearch",
|
869
|
-
"host" => "node01",
|
870
|
-
"user" => "test",
|
871
|
-
"password" => "test"
|
872
|
-
}
|
873
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
874
|
-
end
|
875
|
-
|
876
|
-
it "should fail in register" do
|
877
|
-
expect {subject.register}.to raise_error
|
878
|
-
end
|
879
|
-
end
|
880
|
-
end
|
881
|
-
end
|
882
|
-
|
883
|
-
describe "SSL option" do
|
884
|
-
["node", "transport"].each do |protocol|
|
885
|
-
context "with protocol => #{protocol}" do
|
886
|
-
subject do
|
887
|
-
require "logstash/outputs/elasticsearch"
|
888
|
-
settings = {
|
889
|
-
"protocol" => protocol,
|
890
|
-
"node_name" => "logstash",
|
891
|
-
"cluster" => "elasticsearch",
|
892
|
-
"host" => "node01",
|
893
|
-
"ssl" => true
|
894
|
-
}
|
895
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
896
|
-
end
|
897
|
-
|
898
|
-
it "should fail in register" do
|
899
|
-
expect {subject.register}.to raise_error
|
900
|
-
end
|
901
|
-
end
|
902
|
-
end
|
903
|
-
|
904
|
-
context "when using http protocol" do
|
905
|
-
protocol = "http"
|
906
|
-
context "when using ssl without cert verification" do
|
907
|
-
subject do
|
908
|
-
require "logstash/outputs/elasticsearch"
|
909
|
-
settings = {
|
910
|
-
"protocol" => protocol,
|
911
|
-
"host" => "node01",
|
912
|
-
"ssl" => true,
|
913
|
-
"ssl_certificate_verification" => false
|
914
|
-
}
|
915
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
916
|
-
end
|
917
|
-
|
918
|
-
it "should pass the flag to the ES client" do
|
919
|
-
expect(::Elasticsearch::Client).to receive(:new) do |args|
|
920
|
-
expect(args[:ssl]).to eq(:verify => false)
|
921
|
-
end
|
922
|
-
subject.register
|
923
|
-
end
|
924
|
-
|
925
|
-
it "print a warning" do
|
926
|
-
expect(subject.logger).to receive(:warn)
|
927
|
-
subject.register
|
928
|
-
end
|
929
|
-
end
|
930
|
-
end
|
931
|
-
end
|
932
|
-
|
933
|
-
describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
|
934
|
-
subject do
|
935
|
-
require "logstash/outputs/elasticsearch"
|
936
|
-
settings = {
|
937
|
-
"protocol" => "http",
|
938
|
-
"node_name" => "logstash",
|
939
|
-
"cluster" => "elasticsearch",
|
940
|
-
"host" => "node01",
|
941
|
-
"user" => "user",
|
942
|
-
"password" => "changeme",
|
943
|
-
"ssl" => true,
|
944
|
-
"cacert" => "/tmp/ca/certs/cacert.pem",
|
945
|
-
# or
|
946
|
-
#"truststore" => "/tmp/ca/truststore.jks",
|
947
|
-
#"truststore_password" => "testeteste"
|
948
|
-
}
|
949
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
950
|
-
end
|
951
|
-
|
952
|
-
before :each do
|
953
|
-
subject.register
|
954
|
-
end
|
955
|
-
|
956
|
-
it "sends events to ES" do
|
957
|
-
expect {
|
958
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
959
|
-
subject.buffer_flush(:final => true)
|
960
|
-
}.to_not raise_error
|
961
|
-
end
|
962
|
-
end
|
963
|
-
|
964
|
-
describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
965
|
-
subject do
|
966
|
-
require "logstash/outputs/elasticsearch"
|
967
|
-
settings = {
|
968
|
-
"protocol" => "http",
|
969
|
-
"cluster" => "elasticsearch",
|
970
|
-
"host" => "node01",
|
971
|
-
"user" => "user",
|
972
|
-
"password" => "changeme",
|
973
|
-
}
|
974
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
975
|
-
end
|
976
|
-
|
977
|
-
before :each do
|
978
|
-
subject.register
|
979
|
-
end
|
980
|
-
|
981
|
-
it "sends events to ES" do
|
982
|
-
expect {
|
983
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
984
|
-
subject.buffer_flush(:final => true)
|
985
|
-
}.to_not raise_error
|
986
|
-
end
|
987
|
-
end
|
988
|
-
|
989
|
-
describe "transport protocol" do
|
990
|
-
|
991
|
-
context "host not configured" do
|
992
|
-
subject do
|
993
|
-
require "logstash/outputs/elasticsearch"
|
994
|
-
settings = {
|
995
|
-
"protocol" => "transport",
|
996
|
-
"node_name" => "mynode"
|
997
|
-
}
|
998
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
999
|
-
end
|
1000
|
-
|
1001
|
-
it "should set host to localhost" do
|
1002
|
-
expect(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:new).with({
|
1003
|
-
:host => "localhost",
|
1004
|
-
:port => "9300-9305",
|
1005
|
-
:protocol => "transport",
|
1006
|
-
:client_settings => {
|
1007
|
-
"client.transport.sniff" => false,
|
1008
|
-
"node.name" => "mynode"
|
1009
|
-
}
|
1010
|
-
})
|
1011
|
-
subject.register
|
1012
|
-
end
|
1013
|
-
end
|
1014
|
-
|
1015
|
-
context "sniffing => true" do
|
1016
|
-
|
1017
|
-
subject do
|
1018
|
-
require "logstash/outputs/elasticsearch"
|
1019
|
-
settings = {
|
1020
|
-
"host" => "node01",
|
1021
|
-
"protocol" => "transport",
|
1022
|
-
"sniffing" => true
|
1023
|
-
}
|
1024
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
1025
|
-
end
|
1026
|
-
|
1027
|
-
it "should set the sniffing property to true" do
|
1028
|
-
expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
|
1029
|
-
subject.register
|
1030
|
-
client = subject.instance_eval("@current_client")
|
1031
|
-
settings = client.instance_eval("@settings")
|
1032
|
-
|
1033
|
-
expect(settings.build.getAsMap["client.transport.sniff"]).to eq("true")
|
1034
|
-
end
|
1035
|
-
end
|
1036
|
-
|
1037
|
-
context "sniffing => false" do
|
1038
|
-
|
1039
|
-
subject do
|
1040
|
-
require "logstash/outputs/elasticsearch"
|
1041
|
-
settings = {
|
1042
|
-
"host" => "node01",
|
1043
|
-
"protocol" => "transport",
|
1044
|
-
"sniffing" => false
|
1045
|
-
}
|
1046
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
1047
|
-
end
|
1048
|
-
|
1049
|
-
it "should set the sniffing property to true" do
|
1050
|
-
expect_any_instance_of(LogStash::Outputs::Elasticsearch::Protocols::TransportClient).to receive(:client).and_return(nil)
|
1051
|
-
subject.register
|
1052
|
-
client = subject.instance_eval("@current_client")
|
1053
|
-
settings = client.instance_eval("@settings")
|
1054
|
-
|
1055
|
-
expect(settings.build.getAsMap["client.transport.sniff"]).to eq("false")
|
1056
|
-
end
|
1057
|
-
end
|
1058
|
-
end
|
1059
|
-
end
|