logstash-output-googlecloudstorage 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +73 -0
- data/CONTRIBUTORS +20 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +98 -0
- data/docs/index.asciidoc +147 -0
- data/lib/logstash/outputs/googlecloudstorage.rb +475 -0
- data/logstash-output-googlecloudstorage.gemspec +31 -0
- data/spec/outputs/googlecloudstorage_spec.rb +467 -0
- data/spec/spec_helper.rb +3 -0
- metadata +164 -0
@@ -0,0 +1,31 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
|
3
|
+
s.name = 'logstash-output-googlecloudstorage'
|
4
|
+
s.version = '2.1.0'
|
5
|
+
s.licenses = ['Apache License (2.0)']
|
6
|
+
s.summary = "Writes events to Google cloud storage "
|
7
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
8
|
+
s.authors = ["Elastic"]
|
9
|
+
s.email = 'shailesh@kontext.in'
|
10
|
+
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 2.0.0", "< 2.99"
|
24
|
+
s.add_runtime_dependency 'logstash-codec-json_lines'
|
25
|
+
s.add_runtime_dependency 'logstash-codec-line'
|
26
|
+
s.add_runtime_dependency 'google-api-client', '~> 0.8.7' # version 0.9.x works only with ruby 2.x
|
27
|
+
|
28
|
+
s.add_development_dependency 'logstash-devutils'
|
29
|
+
s.add_development_dependency 'flores'
|
30
|
+
s.add_development_dependency 'logstash-input-generator'
|
31
|
+
end
|
@@ -0,0 +1,467 @@
|
|
1
|
+
# encoding: UTF-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/googlecloudstorage"
|
4
|
+
require "logstash/codecs/line"
|
5
|
+
require "logstash/codecs/json_lines"
|
6
|
+
require "logstash/event"
|
7
|
+
require "logstash/json"
|
8
|
+
require "stud/temporary"
|
9
|
+
require "tempfile"
|
10
|
+
require "uri"
|
11
|
+
require "fileutils"
|
12
|
+
require "flores/random"
|
13
|
+
|
14
|
+
describe LogStash::Outputs::GoogleCloudStorage do
|
15
|
+
describe "ship lots of events to a file" do
|
16
|
+
tmp_file = Tempfile.new('logstash-spec-output-file')
|
17
|
+
event_count = 10000 + rand(500)
|
18
|
+
|
19
|
+
config <<-CONFIG
|
20
|
+
input {
|
21
|
+
generator {
|
22
|
+
message => "hello world"
|
23
|
+
count => #{event_count}
|
24
|
+
type => "generator"
|
25
|
+
}
|
26
|
+
}
|
27
|
+
output {
|
28
|
+
file {
|
29
|
+
path => "#{tmp_file.path}"
|
30
|
+
}
|
31
|
+
}
|
32
|
+
CONFIG
|
33
|
+
|
34
|
+
agent do
|
35
|
+
line_num = 0
|
36
|
+
|
37
|
+
# Now check all events for order and correctness.
|
38
|
+
events = tmp_file.map {|line| LogStash::Event.new(LogStash::Json.load(line))}
|
39
|
+
sorted = events.sort_by {|e| e.get('sequence')}
|
40
|
+
sorted.each do |event|
|
41
|
+
insist {event.get("message")} == "hello world"
|
42
|
+
insist {event.get("sequence")} == line_num
|
43
|
+
line_num += 1
|
44
|
+
end
|
45
|
+
|
46
|
+
insist {line_num} == event_count
|
47
|
+
end # agent
|
48
|
+
end
|
49
|
+
|
50
|
+
describe "ship lots of events to a file gzipped" do
|
51
|
+
Stud::Temporary.file('logstash-spec-output-file') do |tmp_file|
|
52
|
+
event_count = 100000 + rand(500)
|
53
|
+
|
54
|
+
config <<-CONFIG
|
55
|
+
input {
|
56
|
+
generator {
|
57
|
+
message => "hello world"
|
58
|
+
count => #{event_count}
|
59
|
+
type => "generator"
|
60
|
+
}
|
61
|
+
}
|
62
|
+
output {
|
63
|
+
file {
|
64
|
+
path => "#{tmp_file.path}"
|
65
|
+
gzip => true
|
66
|
+
}
|
67
|
+
}
|
68
|
+
CONFIG
|
69
|
+
|
70
|
+
agent do
|
71
|
+
line_num = 0
|
72
|
+
# Now check all events for order and correctness.
|
73
|
+
events = Zlib::GzipReader.open(tmp_file.path).map {|line| LogStash::Event.new(LogStash::Json.load(line)) }
|
74
|
+
sorted = events.sort_by {|e| e.get("sequence")}
|
75
|
+
sorted.each do |event|
|
76
|
+
insist {event.get("message")} == "hello world"
|
77
|
+
insist {event.get("sequence")} == line_num
|
78
|
+
line_num += 1
|
79
|
+
end
|
80
|
+
insist {line_num} == event_count
|
81
|
+
end # agent
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
describe "#register" do
|
86
|
+
let(:path) { '/%{name}' }
|
87
|
+
let(:output) { LogStash::Outputs::GoogleCloudStorage.new({ "path" => path }) }
|
88
|
+
|
89
|
+
it 'doesnt allow the path to start with a dynamic string' do
|
90
|
+
expect { output.register }.to raise_error(LogStash::ConfigurationError)
|
91
|
+
output.close
|
92
|
+
end
|
93
|
+
|
94
|
+
context 'doesnt allow the root directory to have some dynamic part' do
|
95
|
+
['/a%{name}/',
|
96
|
+
'/a %{name}/',
|
97
|
+
'/a- %{name}/',
|
98
|
+
'/a- %{name}'].each do |test_path|
|
99
|
+
it "with path: #{test_path}" do
|
100
|
+
path = test_path
|
101
|
+
expect { output.register }.to raise_error(LogStash::ConfigurationError)
|
102
|
+
output.close
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
it 'allow to have dynamic part after the file root' do
|
108
|
+
path = '/tmp/%{name}'
|
109
|
+
output = LogStash::Outputs::GoogleCloudStorage.new({ "path" => path })
|
110
|
+
expect { output.register }.not_to raise_error
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
describe "receiving events" do
|
115
|
+
|
116
|
+
context "when write_behavior => 'overwrite'" do
|
117
|
+
let(:tmp) { Stud::Temporary.pathname }
|
118
|
+
let(:config) {
|
119
|
+
{
|
120
|
+
"write_behavior" => "overwrite",
|
121
|
+
"path" => tmp,
|
122
|
+
"codec" => LogStash::Codecs::JSONLines.new,
|
123
|
+
"flush_interval" => 0
|
124
|
+
}
|
125
|
+
}
|
126
|
+
let(:output) { LogStash::Outputs::GoogleCloudStorage.new(config) }
|
127
|
+
|
128
|
+
let(:count) { Flores::Random.integer(1..10) }
|
129
|
+
let(:events) do
|
130
|
+
Flores::Random.iterations(1..10).collect do |i|
|
131
|
+
LogStash::Event.new("value" => i)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
before do
|
136
|
+
output.register
|
137
|
+
end
|
138
|
+
|
139
|
+
after do
|
140
|
+
File.unlink(tmp) if File.exist?(tmp)
|
141
|
+
end
|
142
|
+
|
143
|
+
it "should write only the last event of a batch" do
|
144
|
+
output.multi_receive(events)
|
145
|
+
result = LogStash::Json.load(File.read(tmp))
|
146
|
+
expect(result["value"]).to be == events.last.get("value")
|
147
|
+
end
|
148
|
+
|
149
|
+
context "the file" do
|
150
|
+
it "should only contain the last event received" do
|
151
|
+
events.each do |event|
|
152
|
+
output.multi_receive([event])
|
153
|
+
result = LogStash::Json.load(File.read(tmp))
|
154
|
+
expect(result["value"]).to be == event.get("value")
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
context "when the output file is deleted" do
|
161
|
+
|
162
|
+
let(:temp_file) { Tempfile.new('logstash-spec-output-file_deleted') }
|
163
|
+
|
164
|
+
let(:config) do
|
165
|
+
{ "path" => temp_file.path, "flush_interval" => 0 }
|
166
|
+
end
|
167
|
+
|
168
|
+
it "should recreate the required file if deleted" do
|
169
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
170
|
+
output.register
|
171
|
+
|
172
|
+
10.times do |i|
|
173
|
+
event = LogStash::Event.new("event_id" => i)
|
174
|
+
output.multi_receive([event])
|
175
|
+
end
|
176
|
+
FileUtils.rm(temp_file)
|
177
|
+
10.times do |i|
|
178
|
+
event = LogStash::Event.new("event_id" => i+10)
|
179
|
+
output.multi_receive([event])
|
180
|
+
end
|
181
|
+
|
182
|
+
expect(FileTest.size(temp_file.path)).to be > 0
|
183
|
+
end
|
184
|
+
|
185
|
+
context "when appending to the error log" do
|
186
|
+
|
187
|
+
let(:config) do
|
188
|
+
{ "path" => temp_file.path, "flush_interval" => 0, "create_if_deleted" => false }
|
189
|
+
end
|
190
|
+
|
191
|
+
it "should append the events to the filename_failure location" do
|
192
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
193
|
+
output.register
|
194
|
+
|
195
|
+
10.times do |i|
|
196
|
+
event = LogStash::Event.new("event_id" => i)
|
197
|
+
output.multi_receive([event])
|
198
|
+
end
|
199
|
+
FileUtils.rm(temp_file)
|
200
|
+
10.times do |i|
|
201
|
+
event = LogStash::Event.new("event_id" => i+10)
|
202
|
+
output.multi_receive([event])
|
203
|
+
end
|
204
|
+
expect(FileTest.exist?(temp_file.path)).to be_falsey
|
205
|
+
expect(FileTest.size(output.failure_path)).to be > 0
|
206
|
+
end
|
207
|
+
|
208
|
+
end
|
209
|
+
|
210
|
+
end
|
211
|
+
|
212
|
+
context "when using an interpolated path" do
|
213
|
+
context "when trying to write outside the files root directory" do
|
214
|
+
let(:bad_event) do
|
215
|
+
event = LogStash::Event.new
|
216
|
+
event.set('error', '../uncool/directory')
|
217
|
+
event
|
218
|
+
end
|
219
|
+
|
220
|
+
it 'writes the bad event in the specified error file' do
|
221
|
+
Stud::Temporary.directory('filepath_error') do |path|
|
222
|
+
config = {
|
223
|
+
"path" => "#{path}/%{error}",
|
224
|
+
"filename_failure" => "_error"
|
225
|
+
}
|
226
|
+
|
227
|
+
# Trying to write outside the file root
|
228
|
+
outside_path = "#{'../' * path.split(File::SEPARATOR).size}notcool"
|
229
|
+
bad_event.set("error", outside_path)
|
230
|
+
|
231
|
+
|
232
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
233
|
+
output.register
|
234
|
+
output.multi_receive([bad_event])
|
235
|
+
|
236
|
+
error_file = File.join(path, config["filename_failure"])
|
237
|
+
|
238
|
+
expect(File.exist?(error_file)).to eq(true)
|
239
|
+
output.close
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
it 'doesnt decode relatives paths urlencoded' do
|
244
|
+
Stud::Temporary.directory('filepath_error') do |path|
|
245
|
+
encoded_once = "%2E%2E%2ftest" # ../test
|
246
|
+
encoded_twice = "%252E%252E%252F%252E%252E%252Ftest" # ../../test
|
247
|
+
|
248
|
+
output = LogStash::Outputs::GoogleCloudStorage.new({ "path" => "/#{path}/%{error}"})
|
249
|
+
output.register
|
250
|
+
|
251
|
+
bad_event.set('error', encoded_once)
|
252
|
+
output.multi_receive([bad_event])
|
253
|
+
|
254
|
+
bad_event.set('error', encoded_twice)
|
255
|
+
output.multi_receive([bad_event])
|
256
|
+
|
257
|
+
expect(Dir.glob(File.join(path, "*")).size).to eq(2)
|
258
|
+
output.close
|
259
|
+
end
|
260
|
+
end
|
261
|
+
|
262
|
+
it 'doesnt write outside the file if the path is double escaped' do
|
263
|
+
Stud::Temporary.directory('filepath_error') do |path|
|
264
|
+
output = LogStash::Outputs::GoogleCloudStorage.new({ "path" => "/#{path}/%{error}"})
|
265
|
+
output.register
|
266
|
+
|
267
|
+
bad_event.set('error', '../..//test')
|
268
|
+
output.multi_receive([bad_event])
|
269
|
+
|
270
|
+
expect(Dir.glob(File.join(path, "*")).size).to eq(1)
|
271
|
+
output.close
|
272
|
+
end
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
context 'when trying to write inside the file root directory' do
|
277
|
+
it 'write the event to the generated filename' do
|
278
|
+
good_event = LogStash::Event.new
|
279
|
+
good_event.set('error', '42.txt')
|
280
|
+
|
281
|
+
Stud::Temporary.directory do |path|
|
282
|
+
config = { "path" => "#{path}/%{error}" }
|
283
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
284
|
+
output.register
|
285
|
+
output.multi_receive([good_event])
|
286
|
+
|
287
|
+
good_file = File.join(path, good_event.get('error'))
|
288
|
+
expect(File.exist?(good_file)).to eq(true)
|
289
|
+
output.close
|
290
|
+
end
|
291
|
+
end
|
292
|
+
|
293
|
+
it 'write the events to a file when some part of a folder or file is dynamic' do
|
294
|
+
t = Time.now.utc
|
295
|
+
good_event = LogStash::Event.new("@timestamp" => t)
|
296
|
+
|
297
|
+
Stud::Temporary.directory do |path|
|
298
|
+
dynamic_path = "#{path}/failed_syslog-%{+YYYY-MM-dd}"
|
299
|
+
expected_path = "#{path}/failed_syslog-#{t.strftime("%Y-%m-%d")}"
|
300
|
+
|
301
|
+
config = { "path" => dynamic_path }
|
302
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
303
|
+
output.register
|
304
|
+
output.multi_receive([good_event])
|
305
|
+
|
306
|
+
expect(File.exist?(expected_path)).to eq(true)
|
307
|
+
output.close
|
308
|
+
end
|
309
|
+
end
|
310
|
+
|
311
|
+
it 'write the events to the generated path containing multiples fieldref' do
|
312
|
+
t = Time.now.utc
|
313
|
+
good_event = LogStash::Event.new("error" => 42,
|
314
|
+
"@timestamp" => t,
|
315
|
+
"level" => "critical",
|
316
|
+
"weird_path" => '/inside/../deep/nested')
|
317
|
+
|
318
|
+
Stud::Temporary.directory do |path|
|
319
|
+
dynamic_path = "#{path}/%{error}/%{level}/%{weird_path}/failed_syslog-%{+YYYY-MM-dd}"
|
320
|
+
expected_path = "#{path}/42/critical/deep/nested/failed_syslog-#{t.strftime("%Y-%m-%d")}"
|
321
|
+
|
322
|
+
config = { "path" => dynamic_path }
|
323
|
+
|
324
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
325
|
+
output.register
|
326
|
+
output.multi_receive([good_event])
|
327
|
+
|
328
|
+
expect(File.exist?(expected_path)).to eq(true)
|
329
|
+
output.close
|
330
|
+
end
|
331
|
+
end
|
332
|
+
|
333
|
+
it 'write the event to the generated filename with multiple deep' do
|
334
|
+
good_event = LogStash::Event.new
|
335
|
+
good_event.set('error', '/inside/errors/42.txt')
|
336
|
+
|
337
|
+
Stud::Temporary.directory do |path|
|
338
|
+
config = { "path" => "#{path}/%{error}" }
|
339
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
340
|
+
output.register
|
341
|
+
output.multi_receive([good_event])
|
342
|
+
|
343
|
+
good_file = File.join(path, good_event.get('error'))
|
344
|
+
expect(File.exist?(good_file)).to eq(true)
|
345
|
+
output.close
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
350
|
+
context "output string format" do
|
351
|
+
context "when using default configuration" do
|
352
|
+
it 'write the event as a json line' do
|
353
|
+
good_event = LogStash::Event.new
|
354
|
+
good_event.set('message', 'hello world')
|
355
|
+
|
356
|
+
Stud::Temporary.directory do |path|
|
357
|
+
config = { "path" => "#{path}/output.txt" }
|
358
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
359
|
+
output.register
|
360
|
+
output.multi_receive([good_event])
|
361
|
+
good_file = File.join(path, 'output.txt')
|
362
|
+
expect(File.exist?(good_file)).to eq(true)
|
363
|
+
output.close #teardown first to allow reading the file
|
364
|
+
File.open(good_file) {|f|
|
365
|
+
event = LogStash::Event.new(LogStash::Json.load(f.readline))
|
366
|
+
expect(event.get("message")).to eq("hello world")
|
367
|
+
}
|
368
|
+
end
|
369
|
+
end
|
370
|
+
end
|
371
|
+
context "when using line codec" do
|
372
|
+
it 'writes event using specified format' do
|
373
|
+
good_event = LogStash::Event.new
|
374
|
+
good_event.set('message', "hello world")
|
375
|
+
|
376
|
+
Stud::Temporary.directory do |path|
|
377
|
+
config = { "path" => "#{path}/output.txt" }
|
378
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config.merge("codec" => LogStash::Codecs::Line.new({ "format" => "Custom format: %{message}"})))
|
379
|
+
output.register
|
380
|
+
output.multi_receive([good_event])
|
381
|
+
good_file = File.join(path, 'output.txt')
|
382
|
+
expect(File.exist?(good_file)).to eq(true)
|
383
|
+
output.close #teardown first to allow reading the file
|
384
|
+
File.open(good_file) {|f|
|
385
|
+
line = f.readline
|
386
|
+
expect(line).to eq("Custom format: hello world\n")
|
387
|
+
}
|
388
|
+
end
|
389
|
+
end
|
390
|
+
end
|
391
|
+
context "when using file and dir modes" do
|
392
|
+
it 'dirs and files are created with correct atypical permissions' do
|
393
|
+
good_event = LogStash::Event.new
|
394
|
+
good_event.set('message', "hello world")
|
395
|
+
|
396
|
+
Stud::Temporary.directory do |path|
|
397
|
+
config = {
|
398
|
+
"path" => "#{path}/is/nested/output.txt",
|
399
|
+
"dir_mode" => 0751,
|
400
|
+
"file_mode" => 0610,
|
401
|
+
}
|
402
|
+
output = LogStash::Outputs::GoogleCloudStorage.new(config)
|
403
|
+
output.register
|
404
|
+
output.multi_receive([good_event])
|
405
|
+
good_file = File.join(path, 'is/nested/output.txt')
|
406
|
+
expect(File.exist?(good_file)).to eq(true)
|
407
|
+
expect(File.stat(good_file).mode.to_s(8)[-3..-1]).to eq('610')
|
408
|
+
first_good_dir = File.join(path, 'is')
|
409
|
+
expect(File.stat(first_good_dir).mode.to_s(8)[-3..-1]).to eq('751')
|
410
|
+
second_good_dir = File.join(path, 'is/nested')
|
411
|
+
expect(File.stat(second_good_dir).mode.to_s(8)[-3..-1]).to eq('751')
|
412
|
+
output.close #teardown first to allow reading the file
|
413
|
+
File.open(good_file) {|f|
|
414
|
+
event = LogStash::Event.new(LogStash::Json.load(f.readline))
|
415
|
+
expect(event.get("message")).to eq("hello world")
|
416
|
+
}
|
417
|
+
end
|
418
|
+
end
|
419
|
+
end
|
420
|
+
end
|
421
|
+
|
422
|
+
context "with non-zero flush interval" do
|
423
|
+
let(:temporary_output_file) { Stud::Temporary.pathname }
|
424
|
+
|
425
|
+
let(:event_count) { 10 }
|
426
|
+
let(:flush_interval) { 5 }
|
427
|
+
|
428
|
+
let(:events) do
|
429
|
+
event_count.times.map do |idx|
|
430
|
+
LogStash::Event.new("value" => idx)
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
let(:config) {
|
435
|
+
{
|
436
|
+
"path" => temporary_output_file,
|
437
|
+
"codec" => LogStash::Codecs::JSONLines.new,
|
438
|
+
"flush_interval" => flush_interval
|
439
|
+
}
|
440
|
+
}
|
441
|
+
let(:output) { LogStash::Outputs::GoogleCloudStorage.new(config) }
|
442
|
+
|
443
|
+
before(:each) { output.register }
|
444
|
+
after(:each) do
|
445
|
+
output.close
|
446
|
+
File.exist?(temporary_output_file) && File.unlink(temporary_output_file)
|
447
|
+
end
|
448
|
+
|
449
|
+
it 'eventually flushes without receiving additional events' do
|
450
|
+
output.multi_receive(events)
|
451
|
+
|
452
|
+
# events should not all be flushed just yet...
|
453
|
+
expect(File.read(temporary_output_file)).to satisfy("have less than #{event_count} lines") do |contents|
|
454
|
+
contents && contents.lines.count < event_count
|
455
|
+
end
|
456
|
+
|
457
|
+
# wait for the flusher to run...
|
458
|
+
sleep(flush_interval + 1)
|
459
|
+
|
460
|
+
# events should all be flushed
|
461
|
+
expect(File.read(temporary_output_file)).to satisfy("have exactly #{event_count} lines") do |contents|
|
462
|
+
contents && contents.lines.count == event_count
|
463
|
+
end
|
464
|
+
end
|
465
|
+
end
|
466
|
+
end
|
467
|
+
end
|