logstash-input-s3 3.3.7 → 3.7.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-s3'
4
- s.version = '3.3.7'
4
+ s.version = '3.7.0'
5
5
  s.licenses = ['Apache-2.0']
6
6
  s.summary = "Streams events from files in a S3 bucket"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -27,4 +27,5 @@ Gem::Specification.new do |s|
27
27
  s.add_development_dependency 'logstash-devutils'
28
28
  s.add_development_dependency "logstash-codec-json"
29
29
  s.add_development_dependency "logstash-codec-multiline"
30
+ s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.1'
30
31
  end
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/devutils/rspec/shared_examples"
3
4
  require "logstash/inputs/s3"
4
5
  require "logstash/codecs/multiline"
5
6
  require "logstash/errors"
@@ -8,6 +9,7 @@ require_relative "../support/helpers"
8
9
  require "stud/temporary"
9
10
  require "aws-sdk"
10
11
  require "fileutils"
12
+ require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
11
13
 
12
14
  describe LogStash::Inputs::S3 do
13
15
  let(:temporary_directory) { Stud::Temporary.pathname }
@@ -23,6 +25,7 @@ describe LogStash::Inputs::S3 do
23
25
  "sincedb_path" => File.join(sincedb_path, ".sincedb")
24
26
  }
25
27
  }
28
+ let(:cutoff) { LogStash::Inputs::S3::CUTOFF_SECOND }
26
29
 
27
30
 
28
31
  before do
@@ -32,10 +35,11 @@ describe LogStash::Inputs::S3 do
32
35
  end
33
36
 
34
37
  context "when interrupting the plugin" do
35
- let(:config) { super.merge({ "interval" => 5 }) }
38
+ let(:config) { super().merge({ "interval" => 5 }) }
39
+ let(:s3_obj) { double(:key => "awesome-key", :last_modified => Time.now.round, :content_length => 10, :storage_class => 'STANDARD', :object => double(:data => double(:restore => nil)) ) }
36
40
 
37
41
  before do
38
- expect_any_instance_of(LogStash::Inputs::S3).to receive(:list_new_files).and_return(TestInfiniteS3Object.new)
42
+ expect_any_instance_of(LogStash::Inputs::S3).to receive(:list_new_files).and_return(TestInfiniteS3Object.new(s3_obj))
39
43
  end
40
44
 
41
45
  it_behaves_like "an interruptible input plugin"
@@ -114,32 +118,61 @@ describe LogStash::Inputs::S3 do
114
118
  describe "#list_new_files" do
115
119
  before { allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects_list } }
116
120
 
117
- let!(:present_object) { double(:key => 'this-should-be-present', :last_modified => Time.now, :content_length => 10) }
121
+ let!(:present_object_after_cutoff) {double(:key => 'this-should-not-be-present', :last_modified => Time.now, :content_length => 10, :storage_class => 'STANDARD', :object => double(:data => double(:restore => nil)) ) }
122
+ let!(:present_object) {double(:key => 'this-should-be-present', :last_modified => Time.now - cutoff, :content_length => 10, :storage_class => 'STANDARD', :object => double(:data => double(:restore => nil)) ) }
123
+ let!(:archived_object) {double(:key => 'this-should-be-archived', :last_modified => Time.now - cutoff, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => nil)) ) }
124
+ let!(:deep_archived_object) {double(:key => 'this-should-be-archived', :last_modified => Time.now - cutoff, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => nil)) ) }
125
+ let!(:restored_object) {double(:key => 'this-should-be-restored-from-archive', :last_modified => Time.now - cutoff, :content_length => 10, :storage_class => 'GLACIER', :object => double(:data => double(:restore => 'ongoing-request="false", expiry-date="Thu, 01 Jan 2099 00:00:00 GMT"')) ) }
126
+ let!(:deep_restored_object) {double(:key => 'this-should-be-restored-from-deep-archive', :last_modified => Time.now - cutoff, :content_length => 10, :storage_class => 'DEEP_ARCHIVE', :object => double(:data => double(:restore => 'ongoing-request="false", expiry-date="Thu, 01 Jan 2099 00:00:00 GMT"')) ) }
118
127
  let(:objects_list) {
119
128
  [
120
- double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100),
121
- double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50),
122
- present_object
129
+ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100, :storage_class => 'STANDARD'),
130
+ double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50, :storage_class => 'STANDARD'),
131
+ archived_object,
132
+ restored_object,
133
+ deep_restored_object,
134
+ present_object,
135
+ present_object_after_cutoff
123
136
  ]
124
137
  }
125
138
 
126
139
  it 'should allow user to exclude files from the s3 bucket' do
127
140
  plugin = LogStash::Inputs::S3.new(config.merge({ "exclude_pattern" => "^exclude" }))
128
141
  plugin.register
129
- expect(plugin.list_new_files).to eq([present_object.key])
142
+
143
+ files = plugin.list_new_files.map { |item| item.key }
144
+ expect(files).to include(present_object.key)
145
+ expect(files).to include(restored_object.key)
146
+ expect(files).to include(deep_restored_object.key)
147
+ expect(files).to_not include('exclude-this-file-1') # matches exclude pattern
148
+ expect(files).to_not include('exclude/logstash') # matches exclude pattern
149
+ expect(files).to_not include(archived_object.key) # archived
150
+ expect(files).to_not include(deep_archived_object.key) # archived
151
+ expect(files).to_not include(present_object_after_cutoff.key) # after cutoff
152
+ expect(files.size).to eq(3)
130
153
  end
131
154
 
132
155
  it 'should support not providing a exclude pattern' do
133
156
  plugin = LogStash::Inputs::S3.new(config)
134
157
  plugin.register
135
- expect(plugin.list_new_files).to eq(objects_list.map(&:key))
158
+
159
+ files = plugin.list_new_files.map { |item| item.key }
160
+ expect(files).to include(present_object.key)
161
+ expect(files).to include(restored_object.key)
162
+ expect(files).to include(deep_restored_object.key)
163
+ expect(files).to include('exclude-this-file-1') # no exclude pattern given
164
+ expect(files).to include('exclude/logstash') # no exclude pattern given
165
+ expect(files).to_not include(archived_object.key) # archived
166
+ expect(files).to_not include(deep_archived_object.key) # archived
167
+ expect(files).to_not include(present_object_after_cutoff.key) # after cutoff
168
+ expect(files.size).to eq(5)
136
169
  end
137
170
 
138
171
  context 'when all files are excluded from a bucket' do
139
172
  let(:objects_list) {
140
173
  [
141
- double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100),
142
- double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50),
174
+ double(:key => 'exclude-this-file-1', :last_modified => Time.now - 2 * day, :content_length => 100, :storage_class => 'STANDARD'),
175
+ double(:key => 'exclude/logstash', :last_modified => Time.now - 2 * day, :content_length => 50, :storage_class => 'STANDARD'),
143
176
  ]
144
177
  }
145
178
 
@@ -168,7 +201,7 @@ describe LogStash::Inputs::S3 do
168
201
  context "If the bucket is the same as the backup bucket" do
169
202
  it 'should ignore files from the bucket if they match the backup prefix' do
170
203
  objects_list = [
171
- double(:key => 'mybackup-log-1', :last_modified => Time.now, :content_length => 5),
204
+ double(:key => 'mybackup-log-1', :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
172
205
  present_object
173
206
  ]
174
207
 
@@ -177,24 +210,38 @@ describe LogStash::Inputs::S3 do
177
210
  plugin = LogStash::Inputs::S3.new(config.merge({ 'backup_add_prefix' => 'mybackup',
178
211
  'backup_to_bucket' => config['bucket']}))
179
212
  plugin.register
180
- expect(plugin.list_new_files).to eq([present_object.key])
213
+
214
+ files = plugin.list_new_files.map { |item| item.key }
215
+ expect(files).to include(present_object.key)
216
+ expect(files).to_not include('mybackup-log-1') # matches backup prefix
217
+ expect(files.size).to eq(1)
181
218
  end
182
219
  end
183
220
 
184
221
  it 'should ignore files older than X' do
185
222
  plugin = LogStash::Inputs::S3.new(config.merge({ 'backup_add_prefix' => 'exclude-this-file'}))
186
223
 
187
- expect_any_instance_of(LogStash::Inputs::S3::SinceDB::File).to receive(:read).exactly(objects_list.size) { Time.now - day }
224
+
225
+ allow_any_instance_of(LogStash::Inputs::S3::SinceDB::File).to receive(:read).and_return(Time.now - day)
188
226
  plugin.register
189
227
 
190
- expect(plugin.list_new_files).to eq([present_object.key])
228
+ files = plugin.list_new_files.map { |item| item.key }
229
+ expect(files).to include(present_object.key)
230
+ expect(files).to include(restored_object.key)
231
+ expect(files).to include(deep_restored_object.key)
232
+ expect(files).to_not include('exclude-this-file-1') # too old
233
+ expect(files).to_not include('exclude/logstash') # too old
234
+ expect(files).to_not include(archived_object.key) # archived
235
+ expect(files).to_not include(deep_archived_object.key) # archived
236
+ expect(files).to_not include(present_object_after_cutoff.key) # after cutoff
237
+ expect(files.size).to eq(3)
191
238
  end
192
239
 
193
240
  it 'should ignore file if the file match the prefix' do
194
241
  prefix = 'mysource/'
195
242
 
196
243
  objects_list = [
197
- double(:key => prefix, :last_modified => Time.now, :content_length => 5),
244
+ double(:key => prefix, :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
198
245
  present_object
199
246
  ]
200
247
 
@@ -202,14 +249,15 @@ describe LogStash::Inputs::S3 do
202
249
 
203
250
  plugin = LogStash::Inputs::S3.new(config.merge({ 'prefix' => prefix }))
204
251
  plugin.register
205
- expect(plugin.list_new_files).to eq([present_object.key])
252
+ expect(plugin.list_new_files.map { |item| item.key }).to eq([present_object.key])
206
253
  end
207
254
 
208
255
  it 'should sort return object sorted by last_modification date with older first' do
209
256
  objects = [
210
- double(:key => 'YESTERDAY', :last_modified => Time.now - day, :content_length => 5),
211
- double(:key => 'TODAY', :last_modified => Time.now, :content_length => 5),
212
- double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now - 2 * day, :content_length => 5)
257
+ double(:key => 'YESTERDAY', :last_modified => Time.now - day, :content_length => 5, :storage_class => 'STANDARD'),
258
+ double(:key => 'TODAY', :last_modified => Time.now, :content_length => 5, :storage_class => 'STANDARD'),
259
+ double(:key => 'TODAY_BEFORE_CUTOFF', :last_modified => Time.now - cutoff, :content_length => 5, :storage_class => 'STANDARD'),
260
+ double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD')
213
261
  ]
214
262
 
215
263
  allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { objects }
@@ -217,7 +265,7 @@ describe LogStash::Inputs::S3 do
217
265
 
218
266
  plugin = LogStash::Inputs::S3.new(config)
219
267
  plugin.register
220
- expect(plugin.list_new_files).to eq(['TWO_DAYS_AGO', 'YESTERDAY', 'TODAY'])
268
+ expect(plugin.list_new_files.map { |item| item.key }).to eq(['TWO_DAYS_AGO', 'YESTERDAY', 'TODAY_BEFORE_CUTOFF'])
221
269
  end
222
270
 
223
271
  describe "when doing backup on the s3" do
@@ -277,7 +325,7 @@ describe LogStash::Inputs::S3 do
277
325
  it 'should process events' do
278
326
  events = fetch_events(config)
279
327
  expect(events.size).to eq(events_to_process)
280
- insist { events[0].get("[@metadata][s3][key]") } == log.key
328
+ expect(events[0].get("[@metadata][s3][key]")).to eql log.key
281
329
  end
282
330
 
283
331
  it "deletes the temporary file" do
@@ -315,7 +363,7 @@ describe LogStash::Inputs::S3 do
315
363
  %w(AccessDenied NoSuchKey).each do |error|
316
364
  context "when retrieving an object, #{error} is returned" do
317
365
  let(:objects) { [log] }
318
- let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5) }
366
+ let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
319
367
 
320
368
  let(:config) {
321
369
  {
@@ -344,7 +392,7 @@ describe LogStash::Inputs::S3 do
344
392
 
345
393
  context 'when working with logs' do
346
394
  let(:objects) { [log] }
347
- let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5, :data => { "etag" => 'c2c966251da0bc3229d12c2642ba50a4' }) }
395
+ let(:log) { double(:key => 'uncompressed.log', :last_modified => Time.now - 2 * day, :content_length => 5, :data => { "etag" => 'c2c966251da0bc3229d12c2642ba50a4' }, :storage_class => 'STANDARD') }
348
396
  let(:data) { File.read(log_file) }
349
397
 
350
398
  before do
@@ -389,28 +437,35 @@ describe LogStash::Inputs::S3 do
389
437
  end
390
438
 
391
439
  context "multiple compressed streams" do
392
- let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5) }
440
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
393
441
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'multiple_compressed_streams.gz') }
394
442
 
395
443
  include_examples "generated events" do
396
444
  let(:events_to_process) { 16 }
397
445
  end
398
446
  end
399
-
447
+
400
448
  context 'compressed' do
401
- let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5) }
449
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
402
450
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gz') }
403
451
 
404
452
  include_examples "generated events"
405
453
  end
406
454
 
407
- context 'compressed with gzip extension' do
408
- let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5) }
455
+ context 'compressed with gzip extension and using default gzip_pattern option' do
456
+ let(:log) { double(:key => 'log.gz', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
409
457
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gzip') }
410
458
 
411
459
  include_examples "generated events"
412
460
  end
413
461
 
462
+ context 'compressed with gzip extension and using custom gzip_pattern option' do
463
+ let(:config) { super().merge({ "gzip_pattern" => "gee.zip$" }) }
464
+ let(:log) { double(:key => 'log.gee.zip', :last_modified => Time.now - 2 * day, :content_length => 5, :storage_class => 'STANDARD') }
465
+ let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'compressed.log.gee.zip') }
466
+ include_examples "generated events"
467
+ end
468
+
414
469
  context 'plain text' do
415
470
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
416
471
 
@@ -440,12 +495,20 @@ describe LogStash::Inputs::S3 do
440
495
  context 'cloudfront' do
441
496
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'cloudfront.log') }
442
497
 
443
- it 'should extract metadata from cloudfront log' do
444
- events = fetch_events(config)
498
+ describe "metadata", :ecs_compatibility_support, :aggregate_failures do
499
+ ecs_compatibility_matrix(:disabled, :v1) do |ecs_select|
500
+ before(:each) do
501
+ allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
502
+ end
445
503
 
446
- events.each do |event|
447
- expect(event.get('cloudfront_fields')).to eq('date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid')
448
- expect(event.get('cloudfront_version')).to eq('1.0')
504
+ it 'should extract metadata from cloudfront log' do
505
+ events = fetch_events(config)
506
+
507
+ events.each do |event|
508
+ expect(event.get ecs_select[disabled: "cloudfront_fields", v1: "[@metadata][s3][cloudfront][fields]"] ).to eq('date time x-edge-location c-ip x-event sc-bytes x-cf-status x-cf-client-id cs-uri-stem cs-uri-query c-referrer x-page-url​ c-user-agent x-sname x-sname-query x-file-ext x-sid')
509
+ expect(event.get ecs_select[disabled: "cloudfront_version", v1: "[@metadata][s3][cloudfront][version]"] ).to eq('1.0')
510
+ end
511
+ end
449
512
  end
450
513
  end
451
514
 
@@ -453,7 +516,7 @@ describe LogStash::Inputs::S3 do
453
516
  end
454
517
 
455
518
  context 'when include_object_properties is set to true' do
456
- let(:config) { super.merge({ "include_object_properties" => true }) }
519
+ let(:config) { super().merge({ "include_object_properties" => true }) }
457
520
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
458
521
 
459
522
  it 'should extract object properties onto [@metadata][s3]' do
@@ -467,7 +530,7 @@ describe LogStash::Inputs::S3 do
467
530
  end
468
531
 
469
532
  context 'when include_object_properties is set to false' do
470
- let(:config) { super.merge({ "include_object_properties" => false }) }
533
+ let(:config) { super().merge({ "include_object_properties" => false }) }
471
534
  let(:log_file) { File.join(File.dirname(__FILE__), '..', 'fixtures', 'uncompressed.log') }
472
535
 
473
536
  it 'should NOT extract object properties onto [@metadata][s3]' do
@@ -479,6 +542,67 @@ describe LogStash::Inputs::S3 do
479
542
 
480
543
  include_examples "generated events"
481
544
  end
545
+ end
546
+
547
+ describe "data loss" do
548
+ let(:s3_plugin) { LogStash::Inputs::S3.new(config) }
549
+ let(:queue) { [] }
550
+
551
+ before do
552
+ s3_plugin.register
553
+ end
482
554
 
555
+ context 'events come after cutoff time' do
556
+ it 'should be processed in next cycle' do
557
+ s3_objects = [
558
+ double(:key => 'TWO_DAYS_AGO', :last_modified => Time.now.round - 2 * day, :content_length => 5, :storage_class => 'STANDARD'),
559
+ double(:key => 'YESTERDAY', :last_modified => Time.now.round - day, :content_length => 5, :storage_class => 'STANDARD'),
560
+ double(:key => 'TODAY_BEFORE_CUTOFF', :last_modified => Time.now.round - cutoff, :content_length => 5, :storage_class => 'STANDARD'),
561
+ double(:key => 'TODAY', :last_modified => Time.now.round, :content_length => 5, :storage_class => 'STANDARD'),
562
+ double(:key => 'TODAY', :last_modified => Time.now.round, :content_length => 5, :storage_class => 'STANDARD')
563
+ ]
564
+ size = s3_objects.length
565
+
566
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { s3_objects }
567
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:object).and_return(*s3_objects)
568
+ expect(s3_plugin).to receive(:process_log).at_least(size).and_call_original
569
+ expect(s3_plugin).to receive(:stop?).and_return(false).at_least(size)
570
+ expect(s3_plugin).to receive(:download_remote_file).and_return(true).at_least(size)
571
+ expect(s3_plugin).to receive(:process_local_log).and_return(true).at_least(size)
572
+
573
+ # first iteration
574
+ s3_plugin.process_files(queue)
575
+
576
+ # second iteration
577
+ sleep(cutoff + 1)
578
+ s3_plugin.process_files(queue)
579
+ end
580
+ end
581
+
582
+ context 's3 object updated after getting summary' do
583
+ it 'should not update sincedb' do
584
+ s3_summary = [
585
+ double(:key => 'YESTERDAY', :last_modified => Time.now.round - day, :content_length => 5, :storage_class => 'STANDARD'),
586
+ double(:key => 'TODAY', :last_modified => Time.now.round - (cutoff * 10), :content_length => 5, :storage_class => 'STANDARD')
587
+ ]
588
+
589
+ s3_objects = [
590
+ double(:key => 'YESTERDAY', :last_modified => Time.now.round - day, :content_length => 5, :storage_class => 'STANDARD'),
591
+ double(:key => 'TODAY_UPDATED', :last_modified => Time.now.round, :content_length => 5, :storage_class => 'STANDARD')
592
+ ]
593
+
594
+ size = s3_objects.length
595
+
596
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:objects) { s3_summary }
597
+ allow_any_instance_of(Aws::S3::Bucket).to receive(:object).and_return(*s3_objects)
598
+ expect(s3_plugin).to receive(:process_log).at_least(size).and_call_original
599
+ expect(s3_plugin).to receive(:stop?).and_return(false).at_least(size)
600
+ expect(s3_plugin).to receive(:download_remote_file).and_return(true).at_least(size)
601
+ expect(s3_plugin).to receive(:process_local_log).and_return(true).at_least(size)
602
+
603
+ s3_plugin.process_files(queue)
604
+ expect(s3_plugin.send(:sincedb).read).to eq(s3_summary[0].last_modified)
605
+ end
606
+ end
483
607
  end
484
608
  end
@@ -10,6 +10,7 @@ describe LogStash::Inputs::S3, :integration => true, :s3 => true do
10
10
 
11
11
  upload_file('../fixtures/uncompressed.log' , "#{prefix}uncompressed_1.log")
12
12
  upload_file('../fixtures/compressed.log.gz', "#{prefix}compressed_1.log.gz")
13
+ sleep(LogStash::Inputs::S3::CUTOFF_SECOND + 1)
13
14
  end
14
15
 
15
16
  after do
@@ -28,6 +29,7 @@ describe LogStash::Inputs::S3, :integration => true, :s3 => true do
28
29
  "prefix" => prefix,
29
30
  "temporary_directory" => temporary_directory } }
30
31
  let(:backup_prefix) { "backup/" }
32
+ let(:backup_bucket) { "logstash-s3-input-backup" }
31
33
 
32
34
  it "support prefix to scope the remote files" do
33
35
  events = fetch_events(minimal_settings)
@@ -49,13 +51,17 @@ describe LogStash::Inputs::S3, :integration => true, :s3 => true do
49
51
  end
50
52
 
51
53
  context "remote backup" do
54
+ before do
55
+ create_bucket(backup_bucket)
56
+ end
57
+
52
58
  it "another bucket" do
53
- fetch_events(minimal_settings.merge({ "backup_to_bucket" => "logstash-s3-input-backup"}))
54
- expect(list_remote_files("", "logstash-s3-input-backup").size).to eq(2)
59
+ fetch_events(minimal_settings.merge({ "backup_to_bucket" => backup_bucket}))
60
+ expect(list_remote_files("", backup_bucket).size).to eq(2)
55
61
  end
56
62
 
57
63
  after do
58
- delete_bucket("logstash-s3-input-backup")
64
+ delete_bucket(backup_bucket)
59
65
  end
60
66
  end
61
67
  end
@@ -23,6 +23,10 @@ def list_remote_files(prefix, target_bucket = ENV['AWS_LOGSTASH_TEST_BUCKET'])
23
23
  bucket.objects(:prefix => prefix).collect(&:key)
24
24
  end
25
25
 
26
+ def create_bucket(name)
27
+ s3object.bucket(name).create
28
+ end
29
+
26
30
  def delete_bucket(name)
27
31
  s3object.bucket(name).objects.map(&:delete)
28
32
  s3object.bucket(name).delete
@@ -33,13 +37,16 @@ def s3object
33
37
  end
34
38
 
35
39
  class TestInfiniteS3Object
40
+ def initialize(s3_obj)
41
+ @s3_obj = s3_obj
42
+ end
43
+
36
44
  def each
37
45
  counter = 1
38
46
 
39
47
  loop do
40
- yield "awesome-#{counter}"
48
+ yield @s3_obj
41
49
  counter +=1
42
50
  end
43
51
  end
44
- end
45
-
52
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.3.7
4
+ version: 3.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-07-20 00:00:00.000000000 Z
11
+ date: 2021-06-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -100,6 +100,20 @@ dependencies:
100
100
  - - ">="
101
101
  - !ruby/object:Gem::Version
102
102
  version: '0'
103
+ - !ruby/object:Gem::Dependency
104
+ requirement: !ruby/object:Gem::Requirement
105
+ requirements:
106
+ - - "~>"
107
+ - !ruby/object:Gem::Version
108
+ version: '1.1'
109
+ name: logstash-mixin-ecs_compatibility_support
110
+ prerelease: false
111
+ type: :runtime
112
+ version_requirements: !ruby/object:Gem::Requirement
113
+ requirements:
114
+ - - "~>"
115
+ - !ruby/object:Gem::Version
116
+ version: '1.1'
103
117
  description: This gem is a Logstash plugin required to be installed on top of the
104
118
  Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
105
119
  gem is not a stand-alone program
@@ -119,6 +133,7 @@ files:
119
133
  - lib/logstash/inputs/s3/patch.rb
120
134
  - logstash-input-s3.gemspec
121
135
  - spec/fixtures/cloudfront.log
136
+ - spec/fixtures/compressed.log.gee.zip
122
137
  - spec/fixtures/compressed.log.gz
123
138
  - spec/fixtures/compressed.log.gzip
124
139
  - spec/fixtures/invalid_utf8.gbk.log
@@ -159,6 +174,7 @@ specification_version: 4
159
174
  summary: Streams events from files in a S3 bucket
160
175
  test_files:
161
176
  - spec/fixtures/cloudfront.log
177
+ - spec/fixtures/compressed.log.gee.zip
162
178
  - spec/fixtures/compressed.log.gz
163
179
  - spec/fixtures/compressed.log.gzip
164
180
  - spec/fixtures/invalid_utf8.gbk.log