logstash-input-s3-test 4.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,20 @@
1
+ # This is patch related to the autoloading and ruby
2
+ #
3
+ # The fix exist in jruby 9k but not in the current jruby, not sure when or it will be backported
4
+ # https://github.com/jruby/jruby/issues/3645
5
+ #
6
+ # AWS is doing tricky name discovery in the module to generate the correct error class and
7
+ # this strategy is bogus in jruby and `eager_autoload` don't fix this issue.
8
+ #
9
+ # This will be a short lived patch since AWS is removing the need.
10
+ # see: https://github.com/aws/aws-sdk-ruby/issues/1301#issuecomment-261115960
11
+ old_stderr = $stderr
12
+
13
+ $stderr = StringIO.new
14
+ begin
15
+ module Aws
16
+ const_set(:S3, Aws::S3)
17
+ end
18
+ ensure
19
+ $stderr = old_stderr
20
+ end
@@ -0,0 +1,32 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-input-s3-test'
4
+ s.version = '4.0.0'
5
+ s.licenses = ['Apache-2.0']
6
+ s.summary = "Streams events from files in a S3 bucket"
7
+ s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
8
+ s.authors = ["Sukhbir"]
9
+ s.email = 'sukhbir947@gmail.com'
10
+ s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 2.1.12", "<= 2.99"
24
+ s.add_runtime_dependency 'stud', '~> 0.0.18'
25
+ s.add_runtime_dependency 'aws-sdk-s3', '~> 1'
26
+
27
+ s.add_development_dependency 'logstash-devutils'
28
+ s.add_development_dependency "logstash-codec-json"
29
+ s.add_development_dependency "logstash-codec-plain"
30
+ s.add_development_dependency "logstash-codec-multiline"
31
+ end
32
+
@@ -0,0 +1,4 @@
1
+ #Version: 1.0
2
+ #Fields: date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid
3
+ 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
4
+ 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
@@ -0,0 +1,2 @@
1
+ 2015-01-01T02:52:45.866722Z no "GET http://www.logstash.com:80/utfmadness/��4od HTTP/1.1"
2
+
@@ -0,0 +1,2 @@
1
+ { "hello": "world" }
2
+ { "hello": "awesome world" }
@@ -0,0 +1,2 @@
1
+ { "message": ["GET", 32, "/health"] }
2
+ { "message": true }
@@ -0,0 +1,6 @@
1
+ __SEPARATOR__
2
+ file:1 record:1 line:1
3
+ file:1 record:1 line:2
4
+ __SEPARATOR__
5
+ file:1 record:2 line:1
6
+ file:1 record:2 line:2
@@ -0,0 +1,2 @@
1
+ 2010-03-12 23:51:20 SEA4 192.0.2.147 connect 2014 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 - - - -
2
+ 2010-03-12 23:51:21 SEA4 192.0.2.222 play 3914 OK bfd8a98bee0840d9b871b7f6ade9908f rtmp://shqshne4jdp4b6.cloudfront.net/cfx/st​ key=value http://player.longtailvideo.com/player.swf http://www.longtailvideo.com/support/jw-player-setup-wizard?example=204 LNX%2010,0,32,18 myvideo p=2&q=4 flv 1
@@ -0,0 +1,532 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/devutils/rspec/shared_examples"
4
+ require "logstash/inputs/s3"
5
+ require "logstash/codecs/multiline"
6
+ require "logstash/errors"
7
+ require_relative "../support/helpers"
8
+ require "stud/temporary"
9
+ require "aws-sdk-s3"
10
+ require "fileutils"
11
+
12
+ describe LogStash::Inputs::S3 do
13
+ let(:temporary_directory) { Stud::Temporary.pathname }
14
+ let(:sincedb_path) { Stud::Temporary.pathname }
15
+ let(:day) { 3600 * 24 }
16
+ let(:creds) { Aws::Credentials.new('1234', 'secret') }
17
+ let(:config) {
18
+ {
19
+ "access_key_id" => "1234",
20
+ "secret_access_key" => "secret",
21
+ "bucket" => "logstash-test",
22
+ "temporary_directory" => temporary_directory,
23
+ "sincedb_path" => File.join(sincedb_path, ".sincedb")
24
+ }
25
+ }
26
+
27
+
28
+ before do
29
+ FileUtils.mkdir_p(sincedb_path)
30
+ Aws.config[:stub_responses] = true
31
+ Thread.abort_on_exception = true
32
+ end
33
+
34
+ context "when interrupting the plugin" do
35
+ let(:config) { super.merge({ "interval" => 5 }) }
36
+
37
+ before do
38
+ expect_any_instance_of(LogStash::Inputs::S3).to receive(:list_new_files).and_return(TestInfiniteS3Object.new)
39
+ end
40
+
41
+ it_behaves_like "an interruptible input plugin"
42
+ end
43
+
44
+ describe "#register" do
45
+ subject { LogStash::Inputs::S3.new(config) }
46
+
47
+ context "with temporary directory" do
48
+ let(:temporary_directory) { Stud::Temporary.pathname }
49
+
50
+ it "creates the direct when it doesn't exist" do
51
+ expect { subject.register }.to change { Dir.exist?(temporary_directory) }.from(false).to(true)
52
+ end
53
+ end
54
+ end
55
+
56
+ describe '#get_s3object' do
57
+ subject { LogStash::Inputs::S3.new(settings) }
58
+
59
+ context 'with modern access key options' do
60
+ let(:settings) {
61
+ {
62
+ "access_key_id" => "1234",
63
+ "secret_access_key" => "secret",
64
+ "proxy_uri" => "http://example.com",
65
+ "bucket" => "logstash-test",
66
+ }
67
+ }
68
+
69
+ it 'should instantiate AWS::S3 clients with a proxy set' do
70
+ expect(Aws::S3::Resource).to receive(:new).with({
71
+ :credentials => kind_of(Aws::Credentials),
72
+ :http_proxy => 'http://example.com',
73
+ :region => subject.region
74
+ })
75
+
76
+ subject.send(:get_s3object)
77
+ end
78
+ end
79
+
80
+ describe "additional_settings" do
81
+ context 'when force_path_style is set' do
82
+ let(:settings) {
83
+ {
84
+ "access_key_id" => "1234",
85
+ "secret_access_key" => "secret",
86
+ "additional_settings" => { "force_path_style" => true },
87
+ "bucket" => "logstash-test",
88
+ }
89
+ }
90
+
91
+ it 'should instantiate AWS::S3 clients with force_path_style set' do
92
+ expect(Aws::S3::Resource).to receive(:new).with({
93
+ :credentials => kind_of(Aws::Credentials),
94
+ :region => subject.region,
95
+ :force_path_style => true
96
+ }).and_call_original
97
+
98
+ subject.send(:get_s3object)
99
+ end
100
+ end
101
+
102
+ context 'when an unknown setting is given' do
103
+ let(:settings) {
104
+ {
105
+ "additional_settings" => { "this_setting_doesnt_exist" => true },
106
+ "bucket" => "logstash-test",
107
+ }
108
+ }
109
+
110
+ it 'should raise an error' do
111
+ expect { subject.send(:get_s3object) }.to raise_error(ArgumentError)
112
+ end
113
+ end
114
+ end
115
+ end
116
+
117
+ describe "#list_new_files" do
118
+ before { allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects_list } }
119
+
120
+ let!(:present_object) {double(:key => 'this-should-be-present', :last_modified => Time.now, :content_length => 10, :storage_class => 'STANDARD', :object => double(:data => double(:restore => nil)) ) }
121
+ let!(:archived_object) {double(:key => 'this-should-be-archived', :last_modified => Time.now, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => nil)) ) }
122
+ let!(:deep_archived_object) {double(:key => 'this-should-be-archived', :last_modified => Time.now, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => nil)) ) }
123
+ let!(:restored_object) {double(:key => 'this-should-be-restored-from-archive', :last_modified => Time.now, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => 'ongoing-request="false", expiry-date="Thu, 01 Jan 2099 00:00:00 GMT"')) ) }
124
+ let!(:deep_restored_object) {double(:key => 'this-should-be-restored-from-deep-archive', :last_modified => Time.now, :content_length => 10, :storage_class => 'DEEP_ARCHIVE', :object => double(:data => double(:restore => 'ongoing-request="false", expiry-date="Thu, 01 Jan 2099 00:00:00 GMT"')) ) }
125
+ let(:objects_list) {
126
+ [
127
+ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100, :storage_class => 'STANDARD'),
128
+ double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50, :storage_class => 'STANDARD'),
129
+ archived_object,
130
+ restored_object,
131
+ deep_restored_object,
132
+ present_object
133
+ ]
134
+ }
135
+
136
+ it 'should allow user to exclude files from the s3 bucket' do
137
+ plugin = LogStash::Inputs::S3.new(config.merge({ "exclude_pattern" => "^exclude" }))
138
+ plugin.register
139
+
140
+ files = plugin.list_new_files
141
+ expect(files).to include(present_object.key)
142
+ expect(files).to include(restored_object.key)
143
+ expect(files).to include(deep_restored_object.key)
144
+ expect(files).to_not include('exclude-this-file-1') # matches exclude pattern
145
+ expect(files).to_not include('exclude/logstash') # matches exclude pattern
146
+ expect(files).to_not include(archived_object.key) # archived
147
+ expect(files).to_not include(deep_archived_object.key) # archived
148
+ expect(files.size).to eq(3)
149
+ end
150
+
151
+ it 'should support not providing a exclude pattern' do
152
+ plugin = LogStash::Inputs::S3.new(config)
153
+ plugin.register
154
+
155
+ files = plugin.list_new_files
156
+ expect(files).to include(present_object.key)
157
+ expect(files).to include(restored_object.key)
158
+ expect(files).to include(deep_restored_object.key)
159
+ expect(files).to include('exclude-this-file-1') # no exclude pattern given
160
+ expect(files).to include('exclude/logstash') # no exclude pattern given
161
+ expect(files).to_not include(archived_object.key) # archived
162
+ expect(files).to_not include(deep_archived_object.key) # archived
163
+ expect(files.size).to eq(5)
164
+ end
165
+
166
+ context 'when all files are excluded from a bucket' do
167
+ let(:objects_list) {
168
+ [
169
+ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100, :storage_class => 'STANDARD'),
170
+ double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50, :storage_class => 'STANDARD'),
171
+ ]
172
+ }
173
+
174
+ it 'should not log that no files were found in the bucket' do
175
+ plugin = LogStash::Inputs::S3.new(config.merge({ "exclude_pattern" => "^exclude" }))
176
+ plugin.register
177
+ allow(plugin.logger).to receive(:debug).with(anything, anything)
178
+
179
+ expect(plugin.logger).not_to receive(:info).with(/No files found/, anything)
180
+ expect(plugin.logger).to receive(:debug).with(/Ignoring/, anything)
181
+ expect(plugin.list_new_files).to be_empty
182
+ end
183
+ end
184
+
185
+ context 'with an empty bucket' do
186
+ let(:objects_list) { [] }
187
+
188
+ it 'should log that no files were found in the bucket' do
189
+ plugin = LogStash::Inputs::S3.new(config)
190
+ plugin.register
191
+ expect(plugin.logger).to receive(:info).with(/No files found/, anything)
192
+ expect(plugin.list_new_files).to be_empty
193
+ end
194
+ end
195
+
196
+ context "If the bucket is the same as the backup bucket" do
197
+ it 'should ignore files from the bucket if they match the backup prefix' do
198
+ objects_list = [
199
+ double(:key => 'mybackup-log-1', :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
200
+ present_object
201
+ ]
202
+
203
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects_list }
204
+
205
+ plugin = LogStash::Inputs::S3.new(config.merge({ 'backup_add_prefix' => 'mybackup',
206
+ 'backup_to_bucket' => config['bucket']}))
207
+ plugin.register
208
+
209
+ files = plugin.list_new_files
210
+ expect(files).to include(present_object.key)
211
+ expect(files).to_not include('mybackup-log-1') # matches backup prefix
212
+ expect(files.size).to eq(1)
213
+ end
214
+ end
215
+
216
+ it 'should ignore files older than X' do
217
+ plugin = LogStash::Inputs::S3.new(config.merge({ 'backup_add_prefix' => 'exclude-this-file'}))
218
+
219
+
220
+ allow_any_instance_of(LogStash::Inputs::S3::SinceDB::File).to receive(:read).and_return(Time.now - day)
221
+ plugin.register
222
+
223
+ files = plugin.list_new_files
224
+ expect(files).to include(present_object.key)
225
+ expect(files).to include(restored_object.key)
226
+ expect(files).to include(deep_restored_object.key)
227
+ expect(files).to_not include('exclude-this-file-1') # too old
228
+ expect(files).to_not include('exclude/logstash') # too old
229
+ expect(files).to_not include(archived_object.key) # archived
230
+ expect(files).to_not include(deep_archived_object.key) # archived
231
+ expect(files.size).to eq(3)
232
+ end
233
+
234
+ it 'should ignore file if the file match the prefix' do
235
+ prefix = 'mysource/'
236
+
237
+ objects_list = [
238
+ double(:key => prefix, :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
239
+ present_object
240
+ ]
241
+
242
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects).with(:prefix => prefix) { objects_list }
243
+
244
+ plugin = LogStash::Inputs::S3.new(config.merge({ 'prefix' => prefix }))
245
+ plugin.register
246
+ expect(plugin.list_new_files).to eq([present_object.key])
247
+ end
248
+
249
+ it 'should sort return object sorted by last_modification date with older first' do
250
+ objects = [
251
+ double(:key => 'YESTERDAY', :last_modified => Time.now - day, :content_length => 5, :storage_class => 'STANDARD'),
252
+ double(:key => 'TODAY', :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
253
+ double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD')
254
+ ]
255
+
256
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects }
257
+
258
+
259
+ plugin = LogStash::Inputs::S3.new(config)
260
+ plugin.register
261
+ expect(plugin.list_new_files).to eq(['TWO_DAYS_AGO', 'YESTERDAY', 'TODAY'])
262
+ end
263
+
264
+ describe "when doing backup on the s3" do
265
+ it 'should copy to another s3 bucket when keeping the original file' do
266
+ plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup"}))
267
+ plugin.register
268
+
269
+ s3object = Aws::S3::Object.new('mybucket', 'testkey')
270
+ expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey")
271
+ expect(s3object).to_not receive(:delete)
272
+
273
+ plugin.backup_to_bucket(s3object)
274
+ end
275
+
276
+ it 'should copy to another s3 bucket when deleting the original file' do
277
+ plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup", "delete" => true }))
278
+ plugin.register
279
+
280
+ s3object = Aws::S3::Object.new('mybucket', 'testkey')
281
+ expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey")
282
+ expect(s3object).to receive(:delete)
283
+
284
+ plugin.backup_to_bucket(s3object)
285
+ end
286
+
287
+ it 'should add the specified prefix to the backup file' do
288
+ plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_bucket" => "mybackup",
289
+ "backup_add_prefix" => 'backup-' }))
290
+ plugin.register
291
+
292
+ s3object = Aws::S3::Object.new('mybucket', 'testkey')
293
+ expect_any_instance_of(Aws::S3::Object).to receive(:copy_from).with(:copy_source => "mybucket/testkey")
294
+ expect(s3object).to_not receive(:delete)
295
+
296
+ plugin.backup_to_bucket(s3object)
297
+ end
298
+ end
299
+
300
+ it 'should support doing local backup of files' do
301
+ Stud::Temporary.directory do |backup_dir|
302
+ Stud::Temporary.file do |source_file|
303
+ backup_file = File.join(backup_dir.to_s, Pathname.new(source_file.path).basename.to_s)
304
+
305
+ plugin = LogStash::Inputs::S3.new(config.merge({ "backup_to_dir" => backup_dir }))
306
+
307
+ plugin.backup_to_dir(source_file)
308
+
309
+ expect(File.exists?(backup_file)).to eq(true)
310
+ end
311
+ end
312
+ end
313
+ end
314
+
315
+ shared_examples "generated events" do
316
+ let(:events_to_process) { 2 }
317
+
318
+ it 'should process events' do
319
+ events = fetch_events(config)
320
+ expect(events.size).to eq(events_to_process)
321
+ expect(events[0].get("[@metadata][s3][key]")).to eql log.key
322
+ end
323
+
324
+ it "deletes the temporary file" do
325
+ events = fetch_events(config)
326
+ expect(Dir.glob(File.join(temporary_directory, "*")).size).to eq(0)
327
+ end
328
+ end
329
+
330
+ context 'while communicating with s3' do
331
+ let(:config) {
332
+ {
333
+ "access_key_id" => "1234",
334
+ "secret_access_key" => "secret",
335
+ "bucket" => "logstash-test",
336
+ "codec" => "json",
337
+ }
338
+ }
339
+ %w(AccessDenied NotFound).each do |error|
340
+ context "while listing bucket contents, #{error} is returned" do
341
+ before do
342
+ Aws.config[:s3] = {
343
+ stub_responses: {
344
+ list_objects: error
345
+ }
346
+ }
347
+ end
348
+
349
+ it 'should not crash the plugin' do
350
+ events = fetch_events(config)
351
+ expect(events.size).to eq(0)
352
+ end
353
+ end
354
+ end
355
+
356
+ %w(AccessDenied NoSuchKey).each do |error|
357
+ context "when retrieving an object, #{error} is returned" do
358
+ let(:objects) { [log] }
359
+ let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
360
+
361
+ let(:config) {
362
+ {
363
+ "access_key_id" => "1234",
364
+ "secret_access_key" => "secret",
365
+ "bucket" => "logstash-test",
366
+ "codec" => "json",
367
+ }
368
+ }
369
+ before do
370
+ Aws.config[:s3] = {
371
+ stub_responses: {
372
+ get_object: error
373
+ }
374
+ }
375
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects }
376
+ end
377
+
378
+ it 'should not crash the plugin' do
379
+ events = fetch_events(config)
380
+ expect(events.size).to eq(0)
381
+ end
382
+ end
383
+ end
384
+ end
385
+
386
+ context 'when working with logs' do
387
+ let(:objects) { [log] }
388
+ let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5, :data => { "etag" => 'c2c966251da0bc3229d12c2642ba50a4' }, :storage_class => 'STANDARD') }
389
+ let(:data) { File.read(log_file) }
390
+
391
+ before do
392
+ Aws.config[:s3] = {
393
+ stub_responses: {
394
+ get_object: { body: data }
395
+ }
396
+ }
397
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects }
398
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:object).with(log.key) { log }
399
+ expect(log).to receive(:get).with(instance_of(Hash)) do |arg|
400
+ File.open(arg[:response_target], 'wb') { |s3file| s3file.write(data) }
401
+ end
402
+ end
403
+
404
+ context "when event doesn't have a `message` field" do
405
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'json.log') }
406
+ let(:config) {
407
+ {
408
+ "access_key_id" => "1234",
409
+ "secret_access_key" => "secret",
410
+ "bucket" => "logstash-test",
411
+ "codec" => "json",
412
+ }
413
+ }
414
+
415
+ include_examples "generated events"
416
+ end
417
+
418
+ context "when event does have a `message` field" do
419
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'json_with_message.log') }
420
+ let(:config) {
421
+ {
422
+ "access_key_id" => "1234",
423
+ "secret_access_key" => "secret",
424
+ "bucket" => "logstash-test",
425
+ "codec" => "json",
426
+ }
427
+ }
428
+
429
+ include_examples "generated events"
430
+ end
431
+
432
+ context "multiple compressed streams" do
433
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
434
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'multiple_compressed_streams.gz') }
435
+
436
+ include_examples "generated events" do
437
+ let(:events_to_process) { 16 }
438
+ end
439
+ end
440
+
441
+ context 'compressed' do
442
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
443
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gz') }
444
+
445
+ include_examples "generated events"
446
+ end
447
+
448
+ context 'compressed with gzip extension and using default gzip_pattern option' do
449
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
450
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gzip') }
451
+
452
+ include_examples "generated events"
453
+ end
454
+
455
+ context 'compressed with gzip extension and using custom gzip_pattern option' do
456
+ let(:config) { super.merge({ "gzip_pattern" => "gee.zip$" }) }
457
+ let(:log) { double(:key => 'log.gee.zip', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
458
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gee.zip') }
459
+ include_examples "generated events"
460
+ end
461
+
462
+ context 'plain text' do
463
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
464
+
465
+ include_examples "generated events"
466
+ end
467
+
468
+ context 'multi-line' do
469
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'multiline.log') }
470
+ let(:config) {
471
+ {
472
+ "access_key_id" => "1234",
473
+ "secret_access_key" => "secret",
474
+ "bucket" => "logstash-test",
475
+ "codec" => LogStash::Codecs::Multiline.new( {"pattern" => "__SEPARATOR__", "negate" => "true", "what" => "previous"})
476
+ }
477
+ }
478
+
479
+ include_examples "generated events"
480
+ end
481
+
482
+ context 'encoded' do
483
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'invalid_utf8.gbk.log') }
484
+
485
+ include_examples "generated events"
486
+ end
487
+
488
+ context 'cloudfront' do
489
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'cloudfront.log') }
490
+
491
+ it 'should extract metadata from cloudfront log' do
492
+ events = fetch_events(config)
493
+
494
+ events.each do |event|
495
+ expect(event.get('cloudfront_fields')).to eq('date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid')
496
+ expect(event.get('cloudfront_version')).to eq('1.0')
497
+ end
498
+ end
499
+
500
+ include_examples "generated events"
501
+ end
502
+
503
+ context 'when include_object_properties is set to true' do
504
+ let(:config) { super.merge({ "include_object_properties" => true }) }
505
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
506
+
507
+ it 'should extract object properties onto [@metadata][s3]' do
508
+ events = fetch_events(config)
509
+ events.each do |event|
510
+ expect(event.get('[@metadata][s3]')).to include(log.data)
511
+ end
512
+ end
513
+
514
+ include_examples "generated events"
515
+ end
516
+
517
+ context 'when include_object_properties is set to false' do
518
+ let(:config) { super.merge({ "include_object_properties" => false }) }
519
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
520
+
521
+ it 'should NOT extract object properties onto [@metadata][s3]' do
522
+ events = fetch_events(config)
523
+ events.each do |event|
524
+ expect(event.get('[@metadata][s3]')).to_not include(log.data)
525
+ end
526
+ end
527
+
528
+ include_examples "generated events"
529
+ end
530
+
531
+ end
532
+ end