logstash-output-s3 3.2.0 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +21 -0
  3. data/lib/logstash/outputs/s3.rb +188 -308
  4. data/lib/logstash/outputs/s3/file_repository.rb +120 -0
  5. data/lib/logstash/outputs/s3/patch.rb +22 -0
  6. data/lib/logstash/outputs/s3/path_validator.rb +18 -0
  7. data/lib/logstash/outputs/s3/size_and_time_rotation_policy.rb +24 -0
  8. data/lib/logstash/outputs/s3/size_rotation_policy.rb +26 -0
  9. data/lib/logstash/outputs/s3/temporary_file.rb +71 -0
  10. data/lib/logstash/outputs/s3/temporary_file_factory.rb +123 -0
  11. data/lib/logstash/outputs/s3/time_rotation_policy.rb +26 -0
  12. data/lib/logstash/outputs/s3/uploader.rb +59 -0
  13. data/lib/logstash/outputs/s3/writable_directory_validator.rb +17 -0
  14. data/lib/logstash/outputs/s3/write_bucket_permission_validator.rb +49 -0
  15. data/logstash-output-s3.gemspec +2 -2
  16. data/spec/integration/dynamic_prefix_spec.rb +92 -0
  17. data/spec/integration/gzip_file_spec.rb +62 -0
  18. data/spec/integration/gzip_size_rotation_spec.rb +63 -0
  19. data/spec/integration/restore_from_crash_spec.rb +39 -0
  20. data/spec/integration/size_rotation_spec.rb +59 -0
  21. data/spec/integration/stress_test_spec.rb +60 -0
  22. data/spec/integration/time_based_rotation_with_constant_write_spec.rb +60 -0
  23. data/spec/integration/time_based_rotation_with_stale_write_spec.rb +60 -0
  24. data/spec/integration/upload_current_file_on_shutdown_spec.rb +51 -0
  25. data/spec/outputs/s3/file_repository_spec.rb +146 -0
  26. data/spec/outputs/s3/size_and_time_rotation_policy_spec.rb +77 -0
  27. data/spec/outputs/s3/size_rotation_policy_spec.rb +41 -0
  28. data/spec/outputs/s3/temporary_file_factory_spec.rb +85 -0
  29. data/spec/outputs/s3/temporary_file_spec.rb +40 -0
  30. data/spec/outputs/s3/time_rotation_policy_spec.rb +60 -0
  31. data/spec/outputs/s3/uploader_spec.rb +57 -0
  32. data/spec/outputs/s3/writable_directory_validator_spec.rb +40 -0
  33. data/spec/outputs/s3/write_bucket_permission_validator_spec.rb +38 -0
  34. data/spec/outputs/s3_spec.rb +52 -335
  35. data/spec/spec_helper.rb +6 -0
  36. data/spec/supports/helpers.rb +33 -9
  37. metadata +65 -4
  38. data/spec/integration/s3_spec.rb +0 -97
@@ -0,0 +1,40 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/s3/temporary_file"
4
+ require "stud/temporary"
5
+ require "fileutils"
6
+ require "securerandom"
7
+
8
+ describe LogStash::Outputs::S3::TemporaryFile do
9
+ let(:content) { "hello world" }
10
+ let(:key) { "foo" }
11
+ let(:uuid) { SecureRandom.uuid }
12
+ let(:temporary_file) { ::File.open(::File.join(temporary_directory, uuid, key), "w+") }
13
+ let(:temporary_directory) { Stud::Temporary.directory }
14
+
15
+ before :each do
16
+ FileUtils.mkdir_p(::File.join(temporary_directory, uuid))
17
+ end
18
+
19
+ subject { described_class.new(key, temporary_file, temporary_directory) }
20
+
21
+ it "returns the key of the file" do
22
+ expect(subject.key).to eq(key)
23
+ end
24
+
25
+ it "saves content to a file" do
26
+ subject.write(content)
27
+ subject.close
28
+ expect(File.read(subject.path).strip).to eq(content)
29
+ end
30
+
31
+ it "deletes a file" do
32
+ expect(File.exist?(subject.path)).to be_truthy
33
+ subject.delete!
34
+ expect(File.exist?(subject.path)).to be_falsey
35
+ end
36
+
37
+ it "returns the creation time" do
38
+ expect(subject.ctime).to be < Time.now + 0.5
39
+ end
40
+ end
@@ -0,0 +1,60 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/s3/time_rotation_policy"
4
+ require "logstash/outputs/s3/temporary_file"
5
+
6
+ describe LogStash::Outputs::S3::TimeRotationPolicy do
7
+ subject { described_class.new(max_time) }
8
+
9
+ let(:max_time) { 1 }
10
+ let(:temporary_directory) { Stud::Temporary.directory }
11
+ let(:temporary_file) { Stud::Temporary.file }
12
+ let(:name) { "foobar" }
13
+ let(:content) { "hello" * 1000 }
14
+ let(:file) { LogStash::Outputs::S3::TemporaryFile.new(name, temporary_file, temporary_directory) }
15
+
16
+ it "raises an exception if the `file_time` is set to 0" do
17
+ expect { described_class.new(0) }.to raise_error(LogStash::ConfigurationError, /`time_file` need to be greather than 0/)
18
+ end
19
+
20
+ it "raises an exception if the `file_time` is < 0" do
21
+ expect { described_class.new(-100) }.to raise_error(LogStash::ConfigurationError, /`time_file` need to be greather than 0/)
22
+ end
23
+
24
+ context "when the size of the file is superior to 0" do
25
+ before :each do
26
+ file.write(content)
27
+ file.fsync
28
+ end
29
+
30
+ it "returns true if the file old enough" do
31
+ allow(file).to receive(:ctime).and_return(Time.now - (max_time * 2 * 60))
32
+ expect(subject.rotate?(file)).to be_truthy
33
+ end
34
+
35
+ it "returns false is not old enough" do
36
+ expect(subject.rotate?(file)).to be_falsey
37
+ end
38
+ end
39
+
40
+ context "When the size of the file is 0" do
41
+ it "returns false if the file old enough" do
42
+ allow(file).to receive(:ctime).and_return(Time.now - (max_time * 2 * 60))
43
+ expect(subject.rotate?(file)).to be_falsey
44
+ end
45
+
46
+ it "returns false is not old enough" do
47
+ expect(subject.rotate?(file)).to be_falsey
48
+ end
49
+ end
50
+
51
+ context "#needs_periodic?" do
52
+ it "return false" do
53
+ expect(subject.needs_periodic?).to be_truthy
54
+ end
55
+ end
56
+
57
+ it "convert minute into seconds" do
58
+ expect(subject.time_file).to eq(60)
59
+ end
60
+ end
@@ -0,0 +1,57 @@
1
+ # Encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/s3/uploader"
4
+ require "logstash/outputs/s3/temporary_file"
5
+ require "aws-sdk"
6
+ require "stud/temporary"
7
+
8
+ describe LogStash::Outputs::S3::Uploader do
9
+ let(:logger) { spy(:logger ) }
10
+ let(:max_upload_workers) { 1 }
11
+ let(:bucket_name) { "foobar-bucket" }
12
+ let(:client) { Aws::S3::Client.new(stub_responses: true) }
13
+ let(:bucket) { Aws::S3::Bucket.new(bucket_name, :client => client) }
14
+ let(:temporary_directory) { Stud::Temporary.pathname }
15
+ let(:temporary_file) { Stud::Temporary.file }
16
+ let(:key) { "foobar" }
17
+ let(:upload_options) { {} }
18
+ let(:threadpool) do
19
+ Concurrent::ThreadPoolExecutor.new({
20
+ :min_threads => 1,
21
+ :max_threads => 8,
22
+ :max_queue => 1,
23
+ :fallback_policy => :caller_runs
24
+ })
25
+ end
26
+
27
+ let(:file) do
28
+ f = LogStash::Outputs::S3::TemporaryFile.new(key, temporary_file, temporary_directory)
29
+ f.write("random content")
30
+ f.fsync
31
+ f
32
+ end
33
+
34
+ subject { described_class.new(bucket, logger, threadpool) }
35
+
36
+ it "upload file to the s3 bucket" do
37
+ expect { subject.upload(file) }.not_to raise_error
38
+ end
39
+
40
+ it "execute a callback when the upload is complete" do
41
+ callback = proc { |f| }
42
+
43
+ expect(callback).to receive(:call).with(file)
44
+ subject.upload(file, { :on_complete => callback })
45
+ end
46
+
47
+ it "retries errors indefinitively" do
48
+ s3 = double("s3").as_null_object
49
+
50
+ expect(logger).to receive(:error).with(any_args).once
51
+ expect(bucket).to receive(:object).with(file.key).and_return(s3).twice
52
+ expect(s3).to receive(:upload_file).with(any_args).and_raise(StandardError)
53
+ expect(s3).to receive(:upload_file).with(any_args).and_return(true)
54
+
55
+ subject.upload(file)
56
+ end
57
+ end
@@ -0,0 +1,40 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/s3/writable_directory_validator"
4
+ require "stud/temporary"
5
+
6
+ describe LogStash::Outputs::S3::WritableDirectoryValidator do
7
+ let(:temporary_directory) { File.join(Stud::Temporary.directory, Time.now.to_i.to_s) }
8
+
9
+ subject { described_class }
10
+
11
+ context "when the directory doesn't exists" do
12
+ it "creates the directory" do
13
+ expect(Dir.exist?(temporary_directory)).to be_falsey
14
+ expect(subject.valid?(temporary_directory)).to be_truthy
15
+ expect(Dir.exist?(temporary_directory)).to be_truthy
16
+ end
17
+ end
18
+
19
+ context "when the directory exist" do
20
+ before do
21
+ FileUtils.mkdir_p(temporary_directory)
22
+ end
23
+
24
+ it "doesn't change the directory" do
25
+ expect(Dir.exist?(temporary_directory)).to be_truthy
26
+ expect(subject.valid?(temporary_directory)).to be_truthy
27
+ expect(Dir.exist?(temporary_directory)).to be_truthy
28
+ end
29
+ end
30
+
31
+ it "return false if the directory is not writable" do
32
+ expect(::File).to receive(:writable?).with(temporary_directory).and_return(false)
33
+ expect(subject.valid?(temporary_directory)).to be_falsey
34
+ end
35
+
36
+ it "return true if the directory is writable" do
37
+ expect(::File).to receive(:writable?).with(temporary_directory).and_return(true)
38
+ expect(subject.valid?(temporary_directory)).to be_truthy
39
+ end
40
+ end
@@ -0,0 +1,38 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/s3/write_bucket_permission_validator"
4
+ require "aws-sdk"
5
+
6
+ describe LogStash::Outputs::S3::WriteBucketPermissionValidator do
7
+ let(:bucket_name) { "foobar" }
8
+ let(:obj) { double("s3_object") }
9
+ let(:client) { Aws::S3::Client.new(stub_responses: true) }
10
+ let(:bucket) { Aws::S3::Bucket.new(bucket_name, :client => client) }
11
+
12
+ subject { described_class }
13
+
14
+ before do
15
+ expect(bucket).to receive(:object).with(any_args).and_return(obj)
16
+ end
17
+
18
+ context "when permissions are sufficient" do
19
+ it "returns true" do
20
+ expect(obj).to receive(:upload_file).with(any_args).and_return(true)
21
+ expect(obj).to receive(:delete).and_return(true)
22
+ expect(subject.valid?(bucket)).to be_truthy
23
+ end
24
+
25
+ it "hides delete errors" do
26
+ expect(obj).to receive(:upload_file).with(any_args).and_return(true)
27
+ expect(obj).to receive(:delete).and_raise(StandardError)
28
+ expect(subject.valid?(bucket)).to be_truthy
29
+ end
30
+ end
31
+
32
+ context "when permission aren't sufficient" do
33
+ it "returns false" do
34
+ expect(obj).to receive(:upload_file).with(any_args).and_raise(StandardError)
35
+ expect(subject.valid?(bucket)).to be_falsey
36
+ end
37
+ end
38
+ end
@@ -1,371 +1,88 @@
1
1
  # encoding: utf-8
2
- require "logstash/devutils/rspec/spec_helper"
3
2
  require "logstash/outputs/s3"
3
+ require "logstash/event"
4
4
  require "logstash/codecs/line"
5
- require "logstash/pipeline"
6
- require "aws-sdk"
7
- require "fileutils"
8
- require_relative "../supports/helpers"
5
+ require "stud/temporary"
9
6
 
10
7
  describe LogStash::Outputs::S3 do
8
+ let(:prefix) { "super/%{server}" }
9
+ let(:region) { "us-east-1" }
10
+ let(:bucket_name) { "mybucket" }
11
+ let(:options) { { "region" => region,
12
+ "bucket" => bucket_name,
13
+ "prefix" => prefix,
14
+ "restore" => false,
15
+ "access_key_id" => "access_key_id",
16
+ "secret_access_key" => "secret_access_key"
17
+ } }
18
+ let(:client) { Aws::S3::Client.new(stub_responses: true) }
19
+ let(:mock_bucket) { Aws::S3::Bucket.new(:name => bucket_name, :stub_responses => true, :client => client) }
20
+ let(:event) { LogStash::Event.new({ "server" => "overwatch" }) }
21
+ let(:event_encoded) { "super hype" }
22
+ let(:events_and_encoded) { { event => event_encoded } }
23
+
24
+ subject { described_class.new(options) }
25
+
11
26
  before do
12
- # We stub all the calls from S3, for more information see:
13
- # http://ruby.awsblog.com/post/Tx2SU6TYJWQQLC3/Stubbing-AWS-Responses
14
- AWS.stub!
15
- Thread.abort_on_exception = true
27
+ allow(subject).to receive(:bucket_resource).and_return(mock_bucket)
28
+ allow(LogStash::Outputs::S3::WriteBucketPermissionValidator).to receive(:valid?).with(mock_bucket).and_return(true)
16
29
  end
17
30
 
18
- let(:minimal_settings) { { "access_key_id" => "1234",
19
- "secret_access_key" => "secret",
20
- "bucket" => "my-bucket" } }
21
-
22
- describe "configuration" do
23
- let!(:config) { { "region" => "sa-east-1" } }
24
-
31
+ context "#register configuration validation" do
25
32
  describe "signature version" do
26
33
  it "should set the signature version if specified" do
27
- s3 = LogStash::Outputs::S3.new(config.merge({ "signature_version" => 'v4' }))
28
- expect(s3.full_options[:s3_signature_version]).to eql('v4')
34
+ ["v2", "v4"].each do |version|
35
+ s3 = described_class.new(options.merge({ "signature_version" => version }))
36
+ expect(s3.full_options).to include(:s3_signature_version => version)
37
+ end
29
38
  end
30
39
 
31
40
  it "should omit the option completely if not specified" do
32
- s3 = LogStash::Outputs::S3.new(config)
41
+ s3 = described_class.new(options)
33
42
  expect(s3.full_options.has_key?(:s3_signature_version)).to eql(false)
34
43
  end
35
44
  end
36
- end
37
-
38
- describe "#register" do
39
- it "should create the tmp directory if it doesn't exist" do
40
- temporary_directory = Stud::Temporary.pathname("temporary_directory")
41
-
42
- config = {
43
- "access_key_id" => "1234",
44
- "secret_access_key" => "secret",
45
- "bucket" => "logstash",
46
- "size_file" => 10,
47
- "temporary_directory" => temporary_directory
48
- }
49
-
50
- s3 = LogStash::Outputs::S3.new(config)
51
- allow(s3).to receive(:test_s3_write)
52
- s3.register
53
-
54
- expect(Dir.exist?(temporary_directory)).to eq(true)
55
- s3.close
56
- FileUtils.rm_r(temporary_directory)
57
- end
58
-
59
- it "should raise a ConfigurationError if the prefix contains one or more '\^`><' characters" do
60
- config = {
61
- "prefix" => "`no\><^"
62
- }
63
-
64
- s3 = LogStash::Outputs::S3.new(config)
65
-
66
- expect {
67
- s3.register
68
- }.to raise_error(LogStash::ConfigurationError)
69
- end
70
- end
71
-
72
- describe "#generate_temporary_filename" do
73
- before do
74
- allow(Socket).to receive(:gethostname) { "logstash.local" }
75
- end
76
-
77
- it "should add tags to the filename if present" do
78
- config = minimal_settings.merge({ "tags" => ["elasticsearch", "logstash", "kibana"], "temporary_directory" => "/tmp/logstash"})
79
- s3 = LogStash::Outputs::S3.new(config)
80
- expect(s3.get_temporary_filename).to match(/^ls\.s3\.logstash\.local\.\d{4}-\d{2}\-\d{2}T\d{2}\.\d{2}\.tag_#{config["tags"].join("\.")}\.part0\.txt\Z/)
81
- end
82
-
83
- it "should not add the tags to the filename" do
84
- config = minimal_settings.merge({ "tags" => [], "temporary_directory" => "/tmp/logstash" })
85
- s3 = LogStash::Outputs::S3.new(config)
86
- expect(s3.get_temporary_filename(3)).to match(/^ls\.s3\.logstash\.local\.\d{4}-\d{2}\-\d{2}T\d{2}\.\d{2}\.part3\.txt\Z/)
87
- end
88
-
89
- it "normalized the temp directory to include the trailing slash if missing" do
90
- s3 = LogStash::Outputs::S3.new(minimal_settings.merge({ "temporary_directory" => "/tmp/logstash" }))
91
- expect(s3.get_temporary_filename).to match(/^ls\.s3\.logstash\.local\.\d{4}-\d{2}\-\d{2}T\d{2}\.\d{2}\.part0\.txt\Z/)
92
- end
93
- end
94
-
95
- describe "#write_on_bucket" do
96
- let!(:fake_data) { Stud::Temporary.file }
97
-
98
- let(:fake_bucket) do
99
- s3 = double('S3Object')
100
- allow(s3).to receive(:write)
101
- s3
102
- end
103
-
104
- it "should prefix the file on the bucket if a prefix is specified" do
105
- prefix = "my-prefix"
106
-
107
- config = minimal_settings.merge({
108
- "prefix" => prefix,
109
- "bucket" => "my-bucket"
110
- })
111
-
112
- expect_any_instance_of(AWS::S3::ObjectCollection).to receive(:[]).with("#{prefix}#{File.basename(fake_data)}") { fake_bucket }
113
-
114
- s3 = LogStash::Outputs::S3.new(config)
115
- allow(s3).to receive(:test_s3_write)
116
- s3.register
117
- s3.write_on_bucket(fake_data)
118
- end
119
-
120
- it 'should use the same local filename if no prefix is specified' do
121
- config = minimal_settings.merge({
122
- "bucket" => "my-bucket"
123
- })
124
-
125
- expect_any_instance_of(AWS::S3::ObjectCollection).to receive(:[]).with(File.basename(fake_data)) { fake_bucket }
126
-
127
- s3 = LogStash::Outputs::S3.new(minimal_settings)
128
- allow(s3).to receive(:test_s3_write)
129
- s3.register
130
- s3.write_on_bucket(fake_data)
131
- end
132
- end
133
-
134
- describe "#write_events_to_multiple_files?" do
135
- it 'returns true if the size_file is != 0 ' do
136
- s3 = LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 200 }))
137
- expect(s3.write_events_to_multiple_files?).to eq(true)
138
- end
139
-
140
- it 'returns false if size_file is zero or not set' do
141
- s3 = LogStash::Outputs::S3.new(minimal_settings)
142
- expect(s3.write_events_to_multiple_files?).to eq(false)
143
- end
144
- end
145
-
146
- describe "#write_to_tempfile" do
147
- it "should append the event to a file" do
148
- Stud::Temporary.file("logstash", "a+") do |tmp|
149
- s3 = LogStash::Outputs::S3.new(minimal_settings)
150
- allow(s3).to receive(:test_s3_write)
151
- s3.register
152
- s3.tempfile = tmp
153
- s3.write_to_tempfile("test-write")
154
- tmp.rewind
155
- expect(tmp.read).to eq("test-write")
156
- end
157
- end
158
- end
159
-
160
- describe "#rotate_events_log" do
161
-
162
- context "having a single worker" do
163
- let(:s3) { LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 1024 })) }
164
-
165
- before(:each) do
166
- s3.register
167
- end
168
-
169
- it "returns true if the tempfile is over the file_size limit" do
170
- Stud::Temporary.file do |tmp|
171
- allow(tmp).to receive(:size) { 2024001 }
172
-
173
- s3.tempfile = tmp
174
- expect(s3.rotate_events_log?).to be(true)
175
- end
176
- end
177
-
178
- it "returns false if the tempfile is under the file_size limit" do
179
- Stud::Temporary.file do |tmp|
180
- allow(tmp).to receive(:size) { 100 }
181
-
182
- s3.tempfile = tmp
183
- expect(s3.rotate_events_log?).to eq(false)
184
- end
185
- end
186
- end
187
45
 
188
- context "having periodic rotations" do
189
- let(:s3) { LogStash::Outputs::S3.new(minimal_settings.merge({ "size_file" => 1024, "time_file" => 6e-10 })) }
190
- let(:tmp) { Tempfile.new('s3_rotation_temp_file') }
191
-
192
- before(:each) do
193
- s3.tempfile = tmp
194
- s3.register
195
- end
46
+ describe "temporary directory" do
47
+ let(:temporary_directory) { Stud::Temporary.pathname }
48
+ let(:options) { super.merge({ "temporary_directory" => temporary_directory }) }
196
49
 
197
- after(:each) do
198
- s3.close
199
- tmp.close
200
- tmp.unlink
50
+ it "creates the directory when it doesn't exist" do
51
+ expect(Dir.exist?(temporary_directory)).to be_falsey
52
+ subject.register
53
+ expect(Dir.exist?(temporary_directory)).to be_truthy
201
54
  end
202
55
 
203
- it "raises no error when periodic rotation happen" do
204
- 1000.times do
205
- expect { s3.rotate_events_log? }.not_to raise_error
206
- end
56
+ it "raises an error if we cannot write to the directory" do
57
+ expect(LogStash::Outputs::S3::WritableDirectoryValidator).to receive(:valid?).with(temporary_directory).and_return(false)
58
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
207
59
  end
208
60
  end
209
- end
210
-
211
- describe "#move_file_to_bucket" do
212
- subject { LogStash::Outputs::S3.new(minimal_settings) }
213
61
 
214
- it "should always delete the source file" do
215
- tmp = Stud::Temporary.file
216
-
217
- allow(File).to receive(:zero?).and_return(true)
218
- expect(File).to receive(:delete).with(tmp)
219
-
220
- subject.move_file_to_bucket(tmp)
62
+ it "validates the prefix" do
63
+ s3 = described_class.new(options.merge({ "prefix" => "`no\><^" }))
64
+ expect { s3.register }.to raise_error(LogStash::ConfigurationError)
221
65
  end
222
66
 
223
- it 'should not upload the file if the size of the file is zero' do
224
- temp_file = Stud::Temporary.file
225
- allow(temp_file).to receive(:zero?).and_return(true)
226
-
227
- expect(subject).not_to receive(:write_on_bucket)
228
- subject.move_file_to_bucket(temp_file)
229
- end
230
-
231
- it "should upload the file if the size > 0" do
232
- tmp = Stud::Temporary.file
233
-
234
- allow(File).to receive(:zero?).and_return(false)
235
- expect(subject).to receive(:write_on_bucket)
236
-
237
- subject.move_file_to_bucket(tmp)
238
- end
239
- end
240
-
241
- describe "#restore_from_crashes" do
242
- it "read the temp directory and upload the matching file to s3" do
243
- s3 = LogStash::Outputs::S3.new(minimal_settings.merge({ "temporary_directory" => "/tmp/logstash/" }))
244
-
245
- expect(Dir).to receive(:[]).with("/tmp/logstash/*.txt").and_return(["/tmp/logstash/01.txt"])
246
- expect(s3).to receive(:move_file_to_bucket_async).with("/tmp/logstash/01.txt")
247
-
248
-
249
- s3.restore_from_crashes
250
- end
251
- end
252
-
253
- describe "#receive" do
254
- it "should send the event through the codecs" do
255
- data = {"foo" => "bar", "baz" => {"bah" => ["a","b","c"]}, "@timestamp" => "2014-05-30T02:52:17.929Z"}
256
- event = LogStash::Event.new(data)
257
-
258
- expect_any_instance_of(LogStash::Codecs::Line).to receive(:encode).with(event)
259
-
260
- s3 = LogStash::Outputs::S3.new(minimal_settings)
261
- allow(s3).to receive(:test_s3_write)
67
+ it "allow to not validate credentials" do
68
+ s3 = described_class.new(options.merge({"validate_credentials_on_root_bucket" => false}))
69
+ expect(LogStash::Outputs::S3::WriteBucketPermissionValidator).not_to receive(:valid?).with(any_args)
262
70
  s3.register
263
-
264
- s3.receive(event)
265
71
  end
266
72
  end
267
73
 
268
- describe "when rotating the temporary file" do
269
- before { allow(File).to receive(:delete) }
270
-
271
- it "doesn't skip events if using the size_file option" do
272
- Stud::Temporary.directory do |temporary_directory|
273
- size_file = rand(200..20000)
274
- event_count = rand(300..15000)
275
-
276
- config = %Q[
277
- input {
278
- generator {
279
- count => #{event_count}
280
- }
281
- }
282
- output {
283
- s3 {
284
- access_key_id => "1234"
285
- secret_access_key => "secret"
286
- size_file => #{size_file}
287
- codec => line
288
- temporary_directory => '#{temporary_directory}'
289
- bucket => 'testing'
290
- }
291
- }
292
- ]
293
-
294
- pipeline = LogStash::Pipeline.new(config)
295
-
296
- pipeline_thread = Thread.new { pipeline.run }
297
- sleep 0.1 while !pipeline.ready?
298
- pipeline_thread.join
299
-
300
- events_written_count = events_in_files(Dir[File.join(temporary_directory, 'ls.*.txt')])
301
- expect(events_written_count).to eq(event_count)
302
- end
74
+ context "receiving events" do
75
+ before do
76
+ subject.register
303
77
  end
304
78
 
305
- describe "closing" do
306
- let(:options) do
307
- {
308
- "access_key_id" => 1234,
309
- "secret_access_key" => "secret",
310
- "bucket" => "mahbucket"
311
- }
312
- end
313
- subject do
314
- ::LogStash::Outputs::S3.new(options)
315
- end
316
-
317
- before do
318
- subject.register
319
- end
320
-
321
- it "should be clean" do
322
- subject.do_close
323
- end
324
-
325
- it "should remove all worker threads" do
326
- subject.do_close
327
- sleep 1
328
- expect(subject.upload_workers.map(&:thread).any?(&:alive?)).to be false
329
- end
79
+ after do
80
+ subject.close
330
81
  end
331
82
 
332
- it "doesn't skip events if using the time_file option", :tag => :slow do
333
- Stud::Temporary.directory do |temporary_directory|
334
- time_file = rand(1..2)
335
- number_of_rotation = rand(2..5)
336
-
337
- config = {
338
- "time_file" => time_file,
339
- "codec" => "line",
340
- "temporary_directory" => temporary_directory,
341
- "bucket" => "testing"
342
- }
343
-
344
- s3 = LogStash::Outputs::S3.new(minimal_settings.merge(config))
345
- # Make the test run in seconds intead of minutes..
346
- expect(s3).to receive(:periodic_interval).and_return(time_file)
347
- s3.register
348
-
349
- # Force to have a few files rotation
350
- stop_time = Time.now + (number_of_rotation * time_file)
351
- event_count = 0
352
-
353
- event = LogStash::Event.new("message" => "Hello World")
354
-
355
- until Time.now > stop_time do
356
- s3.receive(event)
357
- event_count += 1
358
- end
359
- s3.close
360
-
361
- generated_files = Dir[File.join(temporary_directory, 'ls.*.txt')]
362
-
363
- events_written_count = events_in_files(generated_files)
364
-
365
- # Skew times can affect the number of rotation..
366
- expect(generated_files.count).to be_within(number_of_rotation).of(number_of_rotation + 1)
367
- expect(events_written_count).to eq(event_count)
368
- end
83
+ it "uses `Event#sprintf` for the prefix" do
84
+ expect(event).to receive(:sprintf).with(prefix).and_return("super/overwatch")
85
+ subject.multi_receive_encoded(events_and_encoded)
369
86
  end
370
87
  end
371
88
  end