logstash-output-elasticsearch 0.1.6 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -13
- data/CHANGELOG.md +117 -0
- data/CONTRIBUTORS +32 -0
- data/Gemfile +4 -4
- data/LICENSE +1 -1
- data/NOTICE.TXT +5 -0
- data/README.md +110 -0
- data/lib/logstash/outputs/elasticsearch.rb +97 -425
- data/lib/logstash/outputs/elasticsearch/buffer.rb +124 -0
- data/lib/logstash/outputs/elasticsearch/common.rb +205 -0
- data/lib/logstash/outputs/elasticsearch/common_configs.rb +164 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +36 -24
- data/lib/logstash/outputs/elasticsearch/http_client.rb +236 -0
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +106 -0
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +35 -0
- data/logstash-output-elasticsearch.gemspec +17 -15
- data/spec/es_spec_helper.rb +77 -0
- data/spec/fixtures/scripts/scripted_update.groovy +2 -0
- data/spec/fixtures/scripts/scripted_update_nested.groovy +2 -0
- data/spec/fixtures/scripts/scripted_upsert.groovy +2 -0
- data/spec/integration/outputs/create_spec.rb +55 -0
- data/spec/integration/outputs/index_spec.rb +68 -0
- data/spec/integration/outputs/parent_spec.rb +73 -0
- data/spec/integration/outputs/pipeline_spec.rb +75 -0
- data/spec/integration/outputs/retry_spec.rb +163 -0
- data/spec/integration/outputs/routing_spec.rb +65 -0
- data/spec/integration/outputs/secure_spec.rb +108 -0
- data/spec/integration/outputs/templates_spec.rb +90 -0
- data/spec/integration/outputs/update_spec.rb +188 -0
- data/spec/unit/buffer_spec.rb +118 -0
- data/spec/unit/http_client_builder_spec.rb +27 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +133 -0
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +58 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +227 -0
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +55 -0
- metadata +137 -51
- data/.gitignore +0 -4
- data/Rakefile +0 -6
- data/lib/logstash/outputs/elasticsearch/protocol.rb +0 -253
- data/rakelib/publish.rake +0 -9
- data/rakelib/vendor.rake +0 -169
- data/spec/outputs/elasticsearch.rb +0 -518
data/rakelib/publish.rake
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
require "gem_publisher"
|
2
|
-
|
3
|
-
desc "Publish gem to RubyGems.org"
|
4
|
-
task :publish_gem do |t|
|
5
|
-
gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
|
6
|
-
gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
|
7
|
-
puts "Published #{gem}" if gem
|
8
|
-
end
|
9
|
-
|
data/rakelib/vendor.rake
DELETED
@@ -1,169 +0,0 @@
|
|
1
|
-
require "net/http"
|
2
|
-
require "uri"
|
3
|
-
require "digest/sha1"
|
4
|
-
|
5
|
-
def vendor(*args)
|
6
|
-
return File.join("vendor", *args)
|
7
|
-
end
|
8
|
-
|
9
|
-
directory "vendor/" => ["vendor"] do |task, args|
|
10
|
-
mkdir task.name
|
11
|
-
end
|
12
|
-
|
13
|
-
def fetch(url, sha1, output)
|
14
|
-
|
15
|
-
puts "Downloading #{url}"
|
16
|
-
actual_sha1 = download(url, output)
|
17
|
-
|
18
|
-
if actual_sha1 != sha1
|
19
|
-
fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
|
20
|
-
end
|
21
|
-
end # def fetch
|
22
|
-
|
23
|
-
def file_fetch(url, sha1)
|
24
|
-
filename = File.basename( URI(url).path )
|
25
|
-
output = "vendor/#{filename}"
|
26
|
-
task output => [ "vendor/" ] do
|
27
|
-
begin
|
28
|
-
actual_sha1 = file_sha1(output)
|
29
|
-
if actual_sha1 != sha1
|
30
|
-
fetch(url, sha1, output)
|
31
|
-
end
|
32
|
-
rescue Errno::ENOENT
|
33
|
-
fetch(url, sha1, output)
|
34
|
-
end
|
35
|
-
end.invoke
|
36
|
-
|
37
|
-
return output
|
38
|
-
end
|
39
|
-
|
40
|
-
def file_sha1(path)
|
41
|
-
digest = Digest::SHA1.new
|
42
|
-
fd = File.new(path, "r")
|
43
|
-
while true
|
44
|
-
begin
|
45
|
-
digest << fd.sysread(16384)
|
46
|
-
rescue EOFError
|
47
|
-
break
|
48
|
-
end
|
49
|
-
end
|
50
|
-
return digest.hexdigest
|
51
|
-
ensure
|
52
|
-
fd.close if fd
|
53
|
-
end
|
54
|
-
|
55
|
-
def download(url, output)
|
56
|
-
uri = URI(url)
|
57
|
-
digest = Digest::SHA1.new
|
58
|
-
tmp = "#{output}.tmp"
|
59
|
-
Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
|
60
|
-
request = Net::HTTP::Get.new(uri.path)
|
61
|
-
http.request(request) do |response|
|
62
|
-
fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
|
63
|
-
size = (response["content-length"].to_i || -1).to_f
|
64
|
-
count = 0
|
65
|
-
File.open(tmp, "w") do |fd|
|
66
|
-
response.read_body do |chunk|
|
67
|
-
fd.write(chunk)
|
68
|
-
digest << chunk
|
69
|
-
if size > 0 && $stdout.tty?
|
70
|
-
count += chunk.bytesize
|
71
|
-
$stdout.write(sprintf("\r%0.2f%%", count/size * 100))
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
75
|
-
$stdout.write("\r \r") if $stdout.tty?
|
76
|
-
end
|
77
|
-
end
|
78
|
-
|
79
|
-
File.rename(tmp, output)
|
80
|
-
|
81
|
-
return digest.hexdigest
|
82
|
-
rescue SocketError => e
|
83
|
-
puts "Failure while downloading #{url}: #{e}"
|
84
|
-
raise
|
85
|
-
ensure
|
86
|
-
File.unlink(tmp) if File.exist?(tmp)
|
87
|
-
end # def download
|
88
|
-
|
89
|
-
def untar(tarball, &block)
|
90
|
-
require "archive/tar/minitar"
|
91
|
-
tgz = Zlib::GzipReader.new(File.open(tarball))
|
92
|
-
# Pull out typesdb
|
93
|
-
tar = Archive::Tar::Minitar::Input.open(tgz)
|
94
|
-
tar.each do |entry|
|
95
|
-
path = block.call(entry)
|
96
|
-
next if path.nil?
|
97
|
-
parent = File.dirname(path)
|
98
|
-
|
99
|
-
mkdir_p parent unless File.directory?(parent)
|
100
|
-
|
101
|
-
# Skip this file if the output file is the same size
|
102
|
-
if entry.directory?
|
103
|
-
mkdir path unless File.directory?(path)
|
104
|
-
else
|
105
|
-
entry_mode = entry.instance_eval { @mode } & 0777
|
106
|
-
if File.exists?(path)
|
107
|
-
stat = File.stat(path)
|
108
|
-
# TODO(sissel): Submit a patch to archive-tar-minitar upstream to
|
109
|
-
# expose headers in the entry.
|
110
|
-
entry_size = entry.instance_eval { @size }
|
111
|
-
# If file sizes are same, skip writing.
|
112
|
-
next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
|
113
|
-
end
|
114
|
-
puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
|
115
|
-
File.open(path, "w") do |fd|
|
116
|
-
# eof? check lets us skip empty files. Necessary because the API provided by
|
117
|
-
# Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
|
118
|
-
# IO object. Something about empty files in this EntryStream causes
|
119
|
-
# IO.copy_stream to throw "can't convert nil into String" on JRuby
|
120
|
-
# TODO(sissel): File a bug about this.
|
121
|
-
while !entry.eof?
|
122
|
-
chunk = entry.read(16384)
|
123
|
-
fd.write(chunk)
|
124
|
-
end
|
125
|
-
#IO.copy_stream(entry, fd)
|
126
|
-
end
|
127
|
-
File.chmod(entry_mode, path)
|
128
|
-
end
|
129
|
-
end
|
130
|
-
tar.close
|
131
|
-
File.unlink(tarball) if File.file?(tarball)
|
132
|
-
end # def untar
|
133
|
-
|
134
|
-
def ungz(file)
|
135
|
-
|
136
|
-
outpath = file.gsub('.gz', '')
|
137
|
-
tgz = Zlib::GzipReader.new(File.open(file))
|
138
|
-
begin
|
139
|
-
File.open(outpath, "w") do |out|
|
140
|
-
IO::copy_stream(tgz, out)
|
141
|
-
end
|
142
|
-
File.unlink(file)
|
143
|
-
rescue
|
144
|
-
File.unlink(outpath) if File.file?(outpath)
|
145
|
-
raise
|
146
|
-
end
|
147
|
-
tgz.close
|
148
|
-
end
|
149
|
-
|
150
|
-
desc "Process any vendor files required for this plugin"
|
151
|
-
task "vendor" do |task, args|
|
152
|
-
|
153
|
-
@files.each do |file|
|
154
|
-
download = file_fetch(file['url'], file['sha1'])
|
155
|
-
if download =~ /.tar.gz/
|
156
|
-
prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
|
157
|
-
untar(download) do |entry|
|
158
|
-
if !file['files'].nil?
|
159
|
-
next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
|
160
|
-
out = entry.full_name.split("/").last
|
161
|
-
end
|
162
|
-
File.join('vendor', out)
|
163
|
-
end
|
164
|
-
elsif download =~ /.gz/
|
165
|
-
ungz(download)
|
166
|
-
end
|
167
|
-
end
|
168
|
-
|
169
|
-
end
|
@@ -1,518 +0,0 @@
|
|
1
|
-
require "spec_helper"
|
2
|
-
require "ftw"
|
3
|
-
require "logstash/plugin"
|
4
|
-
require "logstash/json"
|
5
|
-
|
6
|
-
describe "outputs/elasticsearch" do
|
7
|
-
|
8
|
-
it "should register" do
|
9
|
-
output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")
|
10
|
-
|
11
|
-
# register will try to load jars and raise if it cannot find jars
|
12
|
-
expect {output.register}.to_not raise_error
|
13
|
-
end
|
14
|
-
|
15
|
-
|
16
|
-
describe "ship lots of events w/ default index_type", :elasticsearch => true do
|
17
|
-
# Generate a random index name
|
18
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
19
|
-
type = 10.times.collect { rand(10).to_s }.join("")
|
20
|
-
|
21
|
-
# Write about 10000 events. Add jitter to increase likeliness of finding
|
22
|
-
# boundary-related bugs.
|
23
|
-
event_count = 10000 + rand(500)
|
24
|
-
flush_size = rand(200) + 1
|
25
|
-
|
26
|
-
config <<-CONFIG
|
27
|
-
input {
|
28
|
-
generator {
|
29
|
-
message => "hello world"
|
30
|
-
count => #{event_count}
|
31
|
-
type => "#{type}"
|
32
|
-
}
|
33
|
-
}
|
34
|
-
output {
|
35
|
-
elasticsearch {
|
36
|
-
host => "127.0.0.1"
|
37
|
-
index => "#{index}"
|
38
|
-
flush_size => #{flush_size}
|
39
|
-
}
|
40
|
-
}
|
41
|
-
CONFIG
|
42
|
-
|
43
|
-
agent do
|
44
|
-
# Try a few times to check if we have the correct number of events stored
|
45
|
-
# in ES.
|
46
|
-
#
|
47
|
-
# We try multiple times to allow final agent flushes as well as allowing
|
48
|
-
# elasticsearch to finish processing everything.
|
49
|
-
ftw = FTW::Agent.new
|
50
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
51
|
-
|
52
|
-
# Wait until all events are available.
|
53
|
-
Stud::try(10.times) do
|
54
|
-
data = ""
|
55
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
56
|
-
response.read_body { |chunk| data << chunk }
|
57
|
-
result = LogStash::Json.load(data)
|
58
|
-
count = result["count"]
|
59
|
-
insist { count } == event_count
|
60
|
-
end
|
61
|
-
|
62
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
63
|
-
data = ""
|
64
|
-
response.read_body { |chunk| data << chunk }
|
65
|
-
result = LogStash::Json.load(data)
|
66
|
-
result["hits"]["hits"].each do |doc|
|
67
|
-
# With no 'index_type' set, the document type should be the type
|
68
|
-
# set on the input
|
69
|
-
insist { doc["_type"] } == type
|
70
|
-
insist { doc["_index"] } == index
|
71
|
-
insist { doc["_source"]["message"] } == "hello world"
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
75
|
-
|
76
|
-
describe "testing index_type", :elasticsearch => true do
|
77
|
-
describe "no type value" do
|
78
|
-
# Generate a random index name
|
79
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
80
|
-
event_count = 100 + rand(100)
|
81
|
-
flush_size = rand(200) + 1
|
82
|
-
|
83
|
-
config <<-CONFIG
|
84
|
-
input {
|
85
|
-
generator {
|
86
|
-
message => "hello world"
|
87
|
-
count => #{event_count}
|
88
|
-
}
|
89
|
-
}
|
90
|
-
output {
|
91
|
-
elasticsearch {
|
92
|
-
host => "127.0.0.1"
|
93
|
-
index => "#{index}"
|
94
|
-
flush_size => #{flush_size}
|
95
|
-
}
|
96
|
-
}
|
97
|
-
CONFIG
|
98
|
-
|
99
|
-
agent do
|
100
|
-
ftw = FTW::Agent.new
|
101
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
102
|
-
|
103
|
-
# Wait until all events are available.
|
104
|
-
Stud::try(10.times) do
|
105
|
-
data = ""
|
106
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
107
|
-
response.read_body { |chunk| data << chunk }
|
108
|
-
result = LogStash::Json.load(data)
|
109
|
-
count = result["count"]
|
110
|
-
insist { count } == event_count
|
111
|
-
end
|
112
|
-
|
113
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
114
|
-
data = ""
|
115
|
-
response.read_body { |chunk| data << chunk }
|
116
|
-
result = LogStash::Json.load(data)
|
117
|
-
result["hits"]["hits"].each do |doc|
|
118
|
-
insist { doc["_type"] } == "logs"
|
119
|
-
end
|
120
|
-
end
|
121
|
-
end
|
122
|
-
|
123
|
-
describe "default event type value" do
|
124
|
-
# Generate a random index name
|
125
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
126
|
-
event_count = 100 + rand(100)
|
127
|
-
flush_size = rand(200) + 1
|
128
|
-
|
129
|
-
config <<-CONFIG
|
130
|
-
input {
|
131
|
-
generator {
|
132
|
-
message => "hello world"
|
133
|
-
count => #{event_count}
|
134
|
-
type => "generated"
|
135
|
-
}
|
136
|
-
}
|
137
|
-
output {
|
138
|
-
elasticsearch {
|
139
|
-
host => "127.0.0.1"
|
140
|
-
index => "#{index}"
|
141
|
-
flush_size => #{flush_size}
|
142
|
-
}
|
143
|
-
}
|
144
|
-
CONFIG
|
145
|
-
|
146
|
-
agent do
|
147
|
-
ftw = FTW::Agent.new
|
148
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
149
|
-
|
150
|
-
# Wait until all events are available.
|
151
|
-
Stud::try(10.times) do
|
152
|
-
data = ""
|
153
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
154
|
-
response.read_body { |chunk| data << chunk }
|
155
|
-
result = LogStash::Json.load(data)
|
156
|
-
count = result["count"]
|
157
|
-
insist { count } == event_count
|
158
|
-
end
|
159
|
-
|
160
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
161
|
-
data = ""
|
162
|
-
response.read_body { |chunk| data << chunk }
|
163
|
-
result = LogStash::Json.load(data)
|
164
|
-
result["hits"]["hits"].each do |doc|
|
165
|
-
insist { doc["_type"] } == "generated"
|
166
|
-
end
|
167
|
-
end
|
168
|
-
end
|
169
|
-
end
|
170
|
-
|
171
|
-
describe "action => ...", :elasticsearch => true do
|
172
|
-
index_name = 10.times.collect { rand(10).to_s }.join("")
|
173
|
-
|
174
|
-
config <<-CONFIG
|
175
|
-
input {
|
176
|
-
generator {
|
177
|
-
message => "hello world"
|
178
|
-
count => 100
|
179
|
-
}
|
180
|
-
}
|
181
|
-
output {
|
182
|
-
elasticsearch {
|
183
|
-
host => "127.0.0.1"
|
184
|
-
index => "#{index_name}"
|
185
|
-
}
|
186
|
-
}
|
187
|
-
CONFIG
|
188
|
-
|
189
|
-
|
190
|
-
agent do
|
191
|
-
ftw = FTW::Agent.new
|
192
|
-
ftw.post!("http://localhost:9200/#{index_name}/_refresh")
|
193
|
-
|
194
|
-
# Wait until all events are available.
|
195
|
-
Stud::try(10.times) do
|
196
|
-
data = ""
|
197
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index_name}/_count?q=*")
|
198
|
-
response.read_body { |chunk| data << chunk }
|
199
|
-
result = LogStash::Json.load(data)
|
200
|
-
count = result["count"]
|
201
|
-
insist { count } == 100
|
202
|
-
end
|
203
|
-
|
204
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index_name}/_search?q=*&size=1000")
|
205
|
-
data = ""
|
206
|
-
response.read_body { |chunk| data << chunk }
|
207
|
-
result = LogStash::Json.load(data)
|
208
|
-
result["hits"]["hits"].each do |doc|
|
209
|
-
insist { doc["_type"] } == "logs"
|
210
|
-
end
|
211
|
-
end
|
212
|
-
|
213
|
-
describe "default event type value", :elasticsearch => true do
|
214
|
-
# Generate a random index name
|
215
|
-
index = 10.times.collect { rand(10).to_s }.join("")
|
216
|
-
event_count = 100 + rand(100)
|
217
|
-
flush_size = rand(200) + 1
|
218
|
-
|
219
|
-
config <<-CONFIG
|
220
|
-
input {
|
221
|
-
generator {
|
222
|
-
message => "hello world"
|
223
|
-
count => #{event_count}
|
224
|
-
type => "generated"
|
225
|
-
}
|
226
|
-
}
|
227
|
-
output {
|
228
|
-
elasticsearch {
|
229
|
-
host => "127.0.0.1"
|
230
|
-
index => "#{index}"
|
231
|
-
flush_size => #{flush_size}
|
232
|
-
}
|
233
|
-
}
|
234
|
-
CONFIG
|
235
|
-
|
236
|
-
agent do
|
237
|
-
ftw = FTW::Agent.new
|
238
|
-
ftw.post!("http://localhost:9200/#{index}/_refresh")
|
239
|
-
|
240
|
-
# Wait until all events are available.
|
241
|
-
Stud::try(10.times) do
|
242
|
-
data = ""
|
243
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
|
244
|
-
response.read_body { |chunk| data << chunk }
|
245
|
-
result = LogStash::Json.load(data)
|
246
|
-
count = result["count"]
|
247
|
-
insist { count } == event_count
|
248
|
-
end
|
249
|
-
|
250
|
-
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
|
251
|
-
data = ""
|
252
|
-
response.read_body { |chunk| data << chunk }
|
253
|
-
result = LogStash::Json.load(data)
|
254
|
-
result["hits"]["hits"].each do |doc|
|
255
|
-
insist { doc["_type"] } == "generated"
|
256
|
-
end
|
257
|
-
end
|
258
|
-
end
|
259
|
-
end
|
260
|
-
|
261
|
-
describe "wildcard substitution in index templates", :todo => true do
|
262
|
-
require "logstash/outputs/elasticsearch"
|
263
|
-
|
264
|
-
let(:template) { '{"template" : "not important, will be updated by :index"}' }
|
265
|
-
|
266
|
-
def settings_with_index(index)
|
267
|
-
return {
|
268
|
-
"manage_template" => true,
|
269
|
-
"template_overwrite" => true,
|
270
|
-
"protocol" => "http",
|
271
|
-
"host" => "localhost",
|
272
|
-
"index" => "#{index}"
|
273
|
-
}
|
274
|
-
end
|
275
|
-
|
276
|
-
it "should substitude placeholders" do
|
277
|
-
IO.stub(:read).with(anything) { template }
|
278
|
-
es_output = LogStash::Outputs::ElasticSearch.new(settings_with_index("index-%{YYYY}"))
|
279
|
-
insist { es_output.get_template['template'] } == "index-*"
|
280
|
-
end
|
281
|
-
|
282
|
-
it "should do nothing to an index with no placeholder" do
|
283
|
-
IO.stub(:read).with(anything) { template }
|
284
|
-
es_output = LogStash::Outputs::ElasticSearch.new(settings_with_index("index"))
|
285
|
-
insist { es_output.get_template['template'] } == "index"
|
286
|
-
end
|
287
|
-
end
|
288
|
-
|
289
|
-
describe "index template expected behavior", :elasticsearch => true do
|
290
|
-
["node", "transport", "http"].each do |protocol|
|
291
|
-
context "with protocol => #{protocol}" do
|
292
|
-
subject do
|
293
|
-
require "logstash/outputs/elasticsearch"
|
294
|
-
settings = {
|
295
|
-
"manage_template" => true,
|
296
|
-
"template_overwrite" => true,
|
297
|
-
"protocol" => protocol,
|
298
|
-
"host" => "localhost"
|
299
|
-
}
|
300
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
301
|
-
end
|
302
|
-
|
303
|
-
before :each do
|
304
|
-
# Delete all templates first.
|
305
|
-
require "elasticsearch"
|
306
|
-
|
307
|
-
# Clean ES of data before we start.
|
308
|
-
@es = Elasticsearch::Client.new
|
309
|
-
@es.indices.delete_template(:name => "*")
|
310
|
-
|
311
|
-
# This can fail if there are no indexes, ignore failure.
|
312
|
-
@es.indices.delete(:index => "*") rescue nil
|
313
|
-
|
314
|
-
subject.register
|
315
|
-
|
316
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
317
|
-
subject.receive(LogStash::Event.new("somevalue" => 100))
|
318
|
-
subject.receive(LogStash::Event.new("somevalue" => 10))
|
319
|
-
subject.receive(LogStash::Event.new("somevalue" => 1))
|
320
|
-
subject.receive(LogStash::Event.new("country" => "us"))
|
321
|
-
subject.receive(LogStash::Event.new("country" => "at"))
|
322
|
-
subject.receive(LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] }))
|
323
|
-
subject.buffer_flush(:final => true)
|
324
|
-
@es.indices.refresh
|
325
|
-
|
326
|
-
# Wait or fail until everything's indexed.
|
327
|
-
Stud::try(20.times) do
|
328
|
-
r = @es.search
|
329
|
-
insist { r["hits"]["total"] } == 7
|
330
|
-
end
|
331
|
-
end
|
332
|
-
|
333
|
-
it "permits phrase searching on string fields" do
|
334
|
-
results = @es.search(:q => "message:\"sample message\"")
|
335
|
-
insist { results["hits"]["total"] } == 1
|
336
|
-
insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
|
337
|
-
end
|
338
|
-
|
339
|
-
it "numbers dynamically map to a numeric type and permit range queries" do
|
340
|
-
results = @es.search(:q => "somevalue:[5 TO 105]")
|
341
|
-
insist { results["hits"]["total"] } == 2
|
342
|
-
|
343
|
-
values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
|
344
|
-
insist { values }.include?(10)
|
345
|
-
insist { values }.include?(100)
|
346
|
-
reject { values }.include?(1)
|
347
|
-
end
|
348
|
-
|
349
|
-
it "creates .raw field fro any string field which is not_analyzed" do
|
350
|
-
results = @es.search(:q => "message.raw:\"sample message here\"")
|
351
|
-
insist { results["hits"]["total"] } == 1
|
352
|
-
insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
|
353
|
-
|
354
|
-
# partial or terms should not work.
|
355
|
-
results = @es.search(:q => "message.raw:\"sample\"")
|
356
|
-
insist { results["hits"]["total"] } == 0
|
357
|
-
end
|
358
|
-
|
359
|
-
it "make [geoip][location] a geo_point" do
|
360
|
-
results = @es.search(:body => { "filter" => { "geo_distance" => { "distance" => "1000km", "geoip.location" => { "lat" => 0.5, "lon" => 0.5 } } } })
|
361
|
-
insist { results["hits"]["total"] } == 1
|
362
|
-
insist { results["hits"]["hits"][0]["_source"]["geoip"]["location"] } == [ 0.0, 0.0 ]
|
363
|
-
end
|
364
|
-
|
365
|
-
it "should index stopwords like 'at' " do
|
366
|
-
results = @es.search(:body => { "facets" => { "t" => { "terms" => { "field" => "country" } } } })["facets"]["t"]
|
367
|
-
terms = results["terms"].collect { |t| t["term"] }
|
368
|
-
|
369
|
-
insist { terms }.include?("us")
|
370
|
-
|
371
|
-
# 'at' is a stopword, make sure stopwords are not ignored.
|
372
|
-
insist { terms }.include?("at")
|
373
|
-
end
|
374
|
-
end
|
375
|
-
end
|
376
|
-
end
|
377
|
-
|
378
|
-
describe "elasticsearch protocol" do
|
379
|
-
# ElasticSearch related jars
|
380
|
-
#LogStash::Environment.load_elasticsearch_jars!
|
381
|
-
# Load elasticsearch protocol
|
382
|
-
require "logstash/outputs/elasticsearch/protocol"
|
383
|
-
|
384
|
-
describe "elasticsearch node client" do
|
385
|
-
# Test ElasticSearch Node Client
|
386
|
-
# Reference: http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
|
387
|
-
|
388
|
-
it "should support hosts in both string and array" do
|
389
|
-
# Because we defined *hosts* method in NodeClient as private,
|
390
|
-
# we use *obj.send :method,[args...]* to call method *hosts*
|
391
|
-
client = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.new
|
392
|
-
|
393
|
-
# Node client should support host in string
|
394
|
-
# Case 1: default :host in string
|
395
|
-
insist { client.send :hosts, :host => "host",:port => 9300 } == "host:9300"
|
396
|
-
# Case 2: :port =~ /^\d+_\d+$/
|
397
|
-
insist { client.send :hosts, :host => "host",:port => "9300-9302"} == "host:9300,host:9301,host:9302"
|
398
|
-
# Case 3: :host =~ /^.+:.+$/
|
399
|
-
insist { client.send :hosts, :host => "host:9303",:port => 9300 } == "host:9303"
|
400
|
-
# Case 4: :host =~ /^.+:.+$/ and :port =~ /^\d+_\d+$/
|
401
|
-
insist { client.send :hosts, :host => "host:9303",:port => "9300-9302"} == "host:9303"
|
402
|
-
|
403
|
-
# Node client should support host in array
|
404
|
-
# Case 5: :host in array with single item
|
405
|
-
insist { client.send :hosts, :host => ["host"],:port => 9300 } == ("host:9300")
|
406
|
-
# Case 6: :host in array with more than one items
|
407
|
-
insist { client.send :hosts, :host => ["host1","host2"],:port => 9300 } == "host1:9300,host2:9300"
|
408
|
-
# Case 7: :host in array with more than one items and :port =~ /^\d+_\d+$/
|
409
|
-
insist { client.send :hosts, :host => ["host1","host2"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9300,host2:9301,host2:9302"
|
410
|
-
# Case 8: :host in array with more than one items and some :host =~ /^.+:.+$/
|
411
|
-
insist { client.send :hosts, :host => ["host1","host2:9303"],:port => 9300 } == "host1:9300,host2:9303"
|
412
|
-
# Case 9: :host in array with more than one items, :port =~ /^\d+_\d+$/ and some :host =~ /^.+:.+$/
|
413
|
-
insist { client.send :hosts, :host => ["host1","host2:9303"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9303"
|
414
|
-
end
|
415
|
-
end
|
416
|
-
end
|
417
|
-
|
418
|
-
describe "Authentication option" do
|
419
|
-
["node", "transport"].each do |protocol|
|
420
|
-
context "with protocol => #{protocol}" do
|
421
|
-
subject do
|
422
|
-
require "logstash/outputs/elasticsearch"
|
423
|
-
settings = {
|
424
|
-
"protocol" => protocol,
|
425
|
-
"node_name" => "logstash",
|
426
|
-
"cluster" => "elasticsearch",
|
427
|
-
"host" => "node01",
|
428
|
-
"user" => "test",
|
429
|
-
"password" => "test"
|
430
|
-
}
|
431
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
432
|
-
end
|
433
|
-
|
434
|
-
it "should fail in register" do
|
435
|
-
expect {subject.register}.to raise_error
|
436
|
-
end
|
437
|
-
end
|
438
|
-
end
|
439
|
-
end
|
440
|
-
|
441
|
-
describe "SSL option" do
|
442
|
-
["node", "transport"].each do |protocol|
|
443
|
-
context "with protocol => #{protocol}" do
|
444
|
-
subject do
|
445
|
-
require "logstash/outputs/elasticsearch"
|
446
|
-
settings = {
|
447
|
-
"protocol" => protocol,
|
448
|
-
"node_name" => "logstash",
|
449
|
-
"cluster" => "elasticsearch",
|
450
|
-
"host" => "node01",
|
451
|
-
"ssl" => true
|
452
|
-
}
|
453
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
454
|
-
end
|
455
|
-
|
456
|
-
it "should fail in register" do
|
457
|
-
expect {subject.register}.to raise_error
|
458
|
-
end
|
459
|
-
end
|
460
|
-
end
|
461
|
-
end
|
462
|
-
|
463
|
-
describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
|
464
|
-
subject do
|
465
|
-
require "logstash/outputs/elasticsearch"
|
466
|
-
settings = {
|
467
|
-
"protocol" => "http",
|
468
|
-
"node_name" => "logstash",
|
469
|
-
"cluster" => "elasticsearch",
|
470
|
-
"host" => "node01",
|
471
|
-
"user" => "user",
|
472
|
-
"password" => "changeme",
|
473
|
-
"ssl" => true,
|
474
|
-
"cacert" => "/tmp/ca/certs/cacert.pem",
|
475
|
-
# or
|
476
|
-
#"truststore" => "/tmp/ca/truststore.jks",
|
477
|
-
#"truststore_password" => "testeteste"
|
478
|
-
}
|
479
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
480
|
-
end
|
481
|
-
|
482
|
-
before :each do
|
483
|
-
subject.register
|
484
|
-
end
|
485
|
-
|
486
|
-
it "sends events to ES" do
|
487
|
-
expect {
|
488
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
489
|
-
subject.buffer_flush(:final => true)
|
490
|
-
}.to_not raise_error
|
491
|
-
end
|
492
|
-
end
|
493
|
-
|
494
|
-
describe "connect using HTTP Authentication", :elasticsearch_secure => true do
|
495
|
-
subject do
|
496
|
-
require "logstash/outputs/elasticsearch"
|
497
|
-
settings = {
|
498
|
-
"protocol" => "http",
|
499
|
-
"cluster" => "elasticsearch",
|
500
|
-
"host" => "node01",
|
501
|
-
"user" => "user",
|
502
|
-
"password" => "changeme",
|
503
|
-
}
|
504
|
-
next LogStash::Outputs::ElasticSearch.new(settings)
|
505
|
-
end
|
506
|
-
|
507
|
-
before :each do
|
508
|
-
subject.register
|
509
|
-
end
|
510
|
-
|
511
|
-
it "sends events to ES" do
|
512
|
-
expect {
|
513
|
-
subject.receive(LogStash::Event.new("message" => "sample message here"))
|
514
|
-
subject.buffer_flush(:final => true)
|
515
|
-
}.to_not raise_error
|
516
|
-
end
|
517
|
-
end
|
518
|
-
end
|