logstash-output-s3 3.2.0 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -0
- data/lib/logstash/outputs/s3.rb +188 -308
- data/lib/logstash/outputs/s3/file_repository.rb +120 -0
- data/lib/logstash/outputs/s3/patch.rb +22 -0
- data/lib/logstash/outputs/s3/path_validator.rb +18 -0
- data/lib/logstash/outputs/s3/size_and_time_rotation_policy.rb +24 -0
- data/lib/logstash/outputs/s3/size_rotation_policy.rb +26 -0
- data/lib/logstash/outputs/s3/temporary_file.rb +71 -0
- data/lib/logstash/outputs/s3/temporary_file_factory.rb +123 -0
- data/lib/logstash/outputs/s3/time_rotation_policy.rb +26 -0
- data/lib/logstash/outputs/s3/uploader.rb +59 -0
- data/lib/logstash/outputs/s3/writable_directory_validator.rb +17 -0
- data/lib/logstash/outputs/s3/write_bucket_permission_validator.rb +49 -0
- data/logstash-output-s3.gemspec +2 -2
- data/spec/integration/dynamic_prefix_spec.rb +92 -0
- data/spec/integration/gzip_file_spec.rb +62 -0
- data/spec/integration/gzip_size_rotation_spec.rb +63 -0
- data/spec/integration/restore_from_crash_spec.rb +39 -0
- data/spec/integration/size_rotation_spec.rb +59 -0
- data/spec/integration/stress_test_spec.rb +60 -0
- data/spec/integration/time_based_rotation_with_constant_write_spec.rb +60 -0
- data/spec/integration/time_based_rotation_with_stale_write_spec.rb +60 -0
- data/spec/integration/upload_current_file_on_shutdown_spec.rb +51 -0
- data/spec/outputs/s3/file_repository_spec.rb +146 -0
- data/spec/outputs/s3/size_and_time_rotation_policy_spec.rb +77 -0
- data/spec/outputs/s3/size_rotation_policy_spec.rb +41 -0
- data/spec/outputs/s3/temporary_file_factory_spec.rb +85 -0
- data/spec/outputs/s3/temporary_file_spec.rb +40 -0
- data/spec/outputs/s3/time_rotation_policy_spec.rb +60 -0
- data/spec/outputs/s3/uploader_spec.rb +57 -0
- data/spec/outputs/s3/writable_directory_validator_spec.rb +40 -0
- data/spec/outputs/s3/write_bucket_permission_validator_spec.rb +38 -0
- data/spec/outputs/s3_spec.rb +52 -335
- data/spec/spec_helper.rb +6 -0
- data/spec/supports/helpers.rb +33 -9
- metadata +65 -4
- data/spec/integration/s3_spec.rb +0 -97
@@ -0,0 +1,60 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "../spec_helper"
|
3
|
+
require "logstash/outputs/s3"
|
4
|
+
require "logstash/codecs/line"
|
5
|
+
require "stud/temporary"
|
6
|
+
|
7
|
+
describe "File Time rotation with constant write", :integration => true do
|
8
|
+
include_context "setup plugin"
|
9
|
+
|
10
|
+
let(:time_file) { 0.004 }
|
11
|
+
let(:options) { main_options.merge({ "rotation_strategy" => "time" }) }
|
12
|
+
let(:number_of_events) { 5000 }
|
13
|
+
let(:batch_size) { 125 }
|
14
|
+
let(:event_encoded) { "Hello world" }
|
15
|
+
let(:batch) do
|
16
|
+
b = {}
|
17
|
+
number_of_events.times do
|
18
|
+
event = LogStash::Event.new({ "message" => event_encoded })
|
19
|
+
b[event] = "#{event_encoded}\n"
|
20
|
+
end
|
21
|
+
b
|
22
|
+
end
|
23
|
+
let(:minimum_number_of_time_rotation) { 3 }
|
24
|
+
let(:batch_step) { (number_of_events / minimum_number_of_time_rotation).ceil }
|
25
|
+
|
26
|
+
before do
|
27
|
+
clean_remote_files(prefix)
|
28
|
+
subject.register
|
29
|
+
|
30
|
+
# simulate batch read/write
|
31
|
+
batch.each_slice(batch_step) do |batch_time|
|
32
|
+
batch_time.each_slice(batch_size) do |smaller_batch|
|
33
|
+
subject.multi_receive_encoded(smaller_batch)
|
34
|
+
end
|
35
|
+
sleep(1)
|
36
|
+
end
|
37
|
+
|
38
|
+
subject.close
|
39
|
+
end
|
40
|
+
|
41
|
+
it "creates multiples files" do
|
42
|
+
# using close will upload the current file
|
43
|
+
expect(bucket_resource.objects(:prefix => prefix).count).to be_between(minimum_number_of_time_rotation, minimum_number_of_time_rotation + 1).inclusive
|
44
|
+
end
|
45
|
+
|
46
|
+
it "Persists all events" do
|
47
|
+
download_directory = Stud::Temporary.pathname
|
48
|
+
|
49
|
+
FileUtils.rm_rf(download_directory)
|
50
|
+
FileUtils.mkdir_p(download_directory)
|
51
|
+
|
52
|
+
counter = 0
|
53
|
+
bucket_resource.objects(:prefix => prefix).each do |object|
|
54
|
+
target = File.join(download_directory, "#{counter}.txt")
|
55
|
+
object.get(:response_target => target)
|
56
|
+
counter += 1
|
57
|
+
end
|
58
|
+
expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "../spec_helper"
|
3
|
+
require "logstash/outputs/s3"
|
4
|
+
require "logstash/codecs/line"
|
5
|
+
require "stud/temporary"
|
6
|
+
|
7
|
+
describe "File Time rotation with stale write", :integration => true do
|
8
|
+
include_context "setup plugin"
|
9
|
+
|
10
|
+
let(:time_file) { 0.0004 }
|
11
|
+
let(:options) { main_options.merge({ "rotation_strategy" => "time" }) }
|
12
|
+
let(:number_of_events) { 5000 }
|
13
|
+
let(:batch_size) { 125 }
|
14
|
+
let(:event_encoded) { "Hello world" }
|
15
|
+
let(:batch) do
|
16
|
+
b = {}
|
17
|
+
number_of_events.times do
|
18
|
+
event = LogStash::Event.new({ "message" => event_encoded })
|
19
|
+
b[event] = "#{event_encoded}\n"
|
20
|
+
end
|
21
|
+
b
|
22
|
+
end
|
23
|
+
|
24
|
+
before do
|
25
|
+
stub_const('LogStash::Outputs::S3::PERIODIC_CHECK_INTERVAL_IN_SECONDS', 1)
|
26
|
+
clean_remote_files(prefix)
|
27
|
+
subject.register
|
28
|
+
subject.multi_receive_encoded(batch)
|
29
|
+
sleep(1) # the periodic check should have kick int
|
30
|
+
end
|
31
|
+
|
32
|
+
after do
|
33
|
+
subject.close
|
34
|
+
end
|
35
|
+
|
36
|
+
it "create one file" do
|
37
|
+
# using close will upload the current file
|
38
|
+
try(20) do
|
39
|
+
expect(bucket_resource.objects(:prefix => prefix).count).to eq(1)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
it "Persists all events" do
|
44
|
+
download_directory = Stud::Temporary.pathname
|
45
|
+
|
46
|
+
FileUtils.rm_rf(download_directory)
|
47
|
+
FileUtils.mkdir_p(download_directory)
|
48
|
+
|
49
|
+
counter = 0
|
50
|
+
bucket_resource.objects(:prefix => prefix).each do |object|
|
51
|
+
target = File.join(download_directory, "#{counter}.txt")
|
52
|
+
object.get(:response_target => target)
|
53
|
+
counter += 1
|
54
|
+
end
|
55
|
+
|
56
|
+
try(20) do
|
57
|
+
expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "../spec_helper"
|
3
|
+
require "logstash/outputs/s3"
|
4
|
+
require "logstash/codecs/line"
|
5
|
+
require "stud/temporary"
|
6
|
+
|
7
|
+
describe "Upload current file on shutdown", :integration => true do
|
8
|
+
include_context "setup plugin"
|
9
|
+
let(:options) { main_options }
|
10
|
+
|
11
|
+
let(:size_file) { 1000000 }
|
12
|
+
let(:time_file) { 100000 }
|
13
|
+
let(:number_of_events) { 5000 }
|
14
|
+
let(:batch_size) { 125 }
|
15
|
+
let(:event_encoded) { "Hello world" }
|
16
|
+
let(:batch) do
|
17
|
+
b = {}
|
18
|
+
number_of_events.times do
|
19
|
+
event = LogStash::Event.new({ "message" => event_encoded })
|
20
|
+
b[event] = "#{event_encoded}\n"
|
21
|
+
end
|
22
|
+
b
|
23
|
+
end
|
24
|
+
|
25
|
+
before do
|
26
|
+
clean_remote_files(prefix)
|
27
|
+
subject.register
|
28
|
+
subject.multi_receive_encoded(batch)
|
29
|
+
subject.close
|
30
|
+
end
|
31
|
+
|
32
|
+
it "creates a specific quantity of files" do
|
33
|
+
# Since we have really big value of time_file and size_file
|
34
|
+
expect(bucket_resource.objects(:prefix => prefix).count).to eq(1)
|
35
|
+
end
|
36
|
+
|
37
|
+
it "Persists all events" do
|
38
|
+
download_directory = Stud::Temporary.pathname
|
39
|
+
|
40
|
+
FileUtils.rm_rf(download_directory)
|
41
|
+
FileUtils.mkdir_p(download_directory)
|
42
|
+
|
43
|
+
counter = 0
|
44
|
+
bucket_resource.objects(:prefix => prefix).each do |object|
|
45
|
+
target = File.join(download_directory, "#{counter}.txt")
|
46
|
+
object.get(:response_target => target)
|
47
|
+
counter += 1
|
48
|
+
end
|
49
|
+
expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,146 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/s3"
|
3
|
+
require "stud/temporary"
|
4
|
+
require "fileutils"
|
5
|
+
require_relative "../../spec_helper"
|
6
|
+
|
7
|
+
describe LogStash::Outputs::S3::FileRepository do
|
8
|
+
let(:tags) { ["secret", "service"] }
|
9
|
+
let(:encoding) { "none" }
|
10
|
+
let(:temporary_directory) { Stud::Temporary.pathname }
|
11
|
+
let(:prefix_key) { "a-key" }
|
12
|
+
|
13
|
+
before do
|
14
|
+
FileUtils.mkdir_p(temporary_directory)
|
15
|
+
end
|
16
|
+
|
17
|
+
subject { described_class.new(tags, encoding, temporary_directory) }
|
18
|
+
|
19
|
+
it "returns a temporary file" do
|
20
|
+
subject.get_file(prefix_key) do |file|
|
21
|
+
expect(file).to be_kind_of(LogStash::Outputs::S3::TemporaryFile)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
it "returns the same file for the same prefix key" do
|
26
|
+
file_path = nil
|
27
|
+
|
28
|
+
subject.get_file(prefix_key) do |file|
|
29
|
+
file_path = file.path
|
30
|
+
end
|
31
|
+
|
32
|
+
subject.get_file(prefix_key) do |file|
|
33
|
+
expect(file.path).to eq(file_path)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
it "returns the same file for the same dynamic prefix key" do
|
38
|
+
prefix = "%{type}/%{+YYYY}/%{+MM}/%{+dd}/"
|
39
|
+
event = LogStash::Event.new({ "type" => "syslog"})
|
40
|
+
key = event.sprintf(prefix)
|
41
|
+
file_path = nil
|
42
|
+
|
43
|
+
|
44
|
+
subject.get_file(key) do |file|
|
45
|
+
file_path = file.path
|
46
|
+
end
|
47
|
+
|
48
|
+
subject.get_file(key) do |file|
|
49
|
+
expect(file.path).to eq(file_path)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
it "returns different file for different prefix keys" do
|
54
|
+
file_path = nil
|
55
|
+
|
56
|
+
subject.get_file(prefix_key) do |file|
|
57
|
+
file_path = file.path
|
58
|
+
end
|
59
|
+
|
60
|
+
subject.get_file("another_prefix_key") do |file|
|
61
|
+
expect(file.path).not_to eq(file_path)
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
it "allows to get the file factory for a specific prefix" do
|
66
|
+
subject.get_factory(prefix_key) do |factory|
|
67
|
+
expect(factory).to be_kind_of(LogStash::Outputs::S3::TemporaryFileFactory)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
it "returns a different file factory for a different prefix keys" do
|
72
|
+
factory = nil
|
73
|
+
|
74
|
+
subject.get_factory(prefix_key) do |f|
|
75
|
+
factory = f
|
76
|
+
end
|
77
|
+
|
78
|
+
subject.get_factory("another_prefix_key") do |f|
|
79
|
+
expect(factory).not_to eq(f)
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
it "returns the number of prefix keys" do
|
84
|
+
expect(subject.size).to eq(0)
|
85
|
+
subject.get_file(prefix_key) { |file| file.write("something") }
|
86
|
+
expect(subject.size).to eq(1)
|
87
|
+
end
|
88
|
+
|
89
|
+
it "returns all available keys" do
|
90
|
+
subject.get_file(prefix_key) { |file| file.write("something") }
|
91
|
+
expect(subject.keys.toArray).to include(prefix_key)
|
92
|
+
expect(subject.keys.toArray.size).to eq(1)
|
93
|
+
end
|
94
|
+
|
95
|
+
it "clean stale factories" do
|
96
|
+
@file_repository = described_class.new(tags, encoding, temporary_directory, 1, 1)
|
97
|
+
expect(@file_repository.size).to eq(0)
|
98
|
+
path = ""
|
99
|
+
@file_repository.get_factory(prefix_key) do |factory|
|
100
|
+
factory.current.write("hello")
|
101
|
+
# force a rotation so we get an empty file that will get stale.
|
102
|
+
factory.rotate!
|
103
|
+
path = factory.current.temp_path
|
104
|
+
end
|
105
|
+
|
106
|
+
@file_repository.get_file("another-prefix") { |file| file.write("hello") }
|
107
|
+
expect(@file_repository.size).to eq(2)
|
108
|
+
@file_repository.keys.each do |k|
|
109
|
+
puts k
|
110
|
+
end
|
111
|
+
try(10) { expect(@file_repository.size).to eq(1) }
|
112
|
+
expect(File.directory?(path)).to be_falsey
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
|
117
|
+
describe LogStash::Outputs::S3::FileRepository::PrefixedValue do
|
118
|
+
let(:factory) { spy("factory", :current => file) }
|
119
|
+
subject { described_class.new(factory, 1) }
|
120
|
+
|
121
|
+
context "#stale?" do
|
122
|
+
context "the file is empty and older than stale time" do
|
123
|
+
let(:file) { double("file", :size => 0, :ctime => Time.now - 5) }
|
124
|
+
|
125
|
+
it "returns true" do
|
126
|
+
expect(subject.stale?).to be_truthy
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
context "when the file has data in it" do
|
131
|
+
let(:file) { double("file", :size => 200, :ctime => Time.now - 5) }
|
132
|
+
|
133
|
+
it "returns false" do
|
134
|
+
expect(subject.stale?).to be_falsey
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
context "when the file is not old enough" do
|
139
|
+
let(:file) { double("file", :size => 0, :ctime => Time.now + 100) }
|
140
|
+
|
141
|
+
it "returns false" do
|
142
|
+
expect(subject.stale?).to be_falsey
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
@@ -0,0 +1,77 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/s3/size_and_time_rotation_policy"
|
4
|
+
require "logstash/outputs/s3/temporary_file"
|
5
|
+
|
6
|
+
describe LogStash::Outputs::S3::SizeAndTimeRotationPolicy do
|
7
|
+
let(:file_size) { 10 }
|
8
|
+
let(:time_file) { 1 }
|
9
|
+
subject { described_class.new(file_size, time_file) }
|
10
|
+
|
11
|
+
let(:temporary_directory) { Stud::Temporary.pathname }
|
12
|
+
let(:temporary_file) { Stud::Temporary.file }
|
13
|
+
let(:name) { "foobar" }
|
14
|
+
let(:content) { "hello" * 1000 }
|
15
|
+
let(:file) { LogStash::Outputs::S3::TemporaryFile.new(name, temporary_file, temporary_directory) }
|
16
|
+
|
17
|
+
it "raises an exception if the `time_file` is set to 0" do
|
18
|
+
expect { described_class.new(100, 0) }.to raise_error(LogStash::ConfigurationError, /time_file/)
|
19
|
+
end
|
20
|
+
|
21
|
+
it "raises an exception if the `time_file` is < 0" do
|
22
|
+
expect { described_class.new(100, -100) }.to raise_error(LogStash::ConfigurationError, /time_file/)
|
23
|
+
end
|
24
|
+
|
25
|
+
it "raises an exception if the `size_file` is 0" do
|
26
|
+
expect { described_class.new(0, 100) }.to raise_error(LogStash::ConfigurationError, /size_file/)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "raises an exception if the `size_file` is < 0" do
|
30
|
+
expect { described_class.new(-100, 100) }.to raise_error(LogStash::ConfigurationError, /size_file/)
|
31
|
+
end
|
32
|
+
|
33
|
+
it "returns true if the size on disk is higher than the `file_size`" do
|
34
|
+
file.write(content)
|
35
|
+
file.fsync
|
36
|
+
expect(subject.rotate?(file)).to be_truthy
|
37
|
+
end
|
38
|
+
|
39
|
+
it "returns false if the size is inferior than the `file_size`" do
|
40
|
+
expect(subject.rotate?(file)).to be_falsey
|
41
|
+
end
|
42
|
+
|
43
|
+
context "when the size of the file is superior to 0" do
|
44
|
+
let(:file_size) { 10000 }
|
45
|
+
|
46
|
+
before :each do
|
47
|
+
file.write(content)
|
48
|
+
file.fsync
|
49
|
+
end
|
50
|
+
|
51
|
+
it "returns true if the file old enough" do
|
52
|
+
allow(file).to receive(:ctime).and_return(Time.now - (time_file * 2 * 60) )
|
53
|
+
expect(subject.rotate?(file)).to be_truthy
|
54
|
+
end
|
55
|
+
|
56
|
+
it "returns false is not old enough" do
|
57
|
+
allow(file).to receive(:ctime).and_return(Time.now + time_file * 10)
|
58
|
+
expect(subject.rotate?(file)).to be_falsey
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
context "When the size of the file is 0" do
|
63
|
+
it "returns false if the file old enough" do
|
64
|
+
expect(subject.rotate?(file)).to be_falsey
|
65
|
+
end
|
66
|
+
|
67
|
+
it "returns false is not old enough" do
|
68
|
+
expect(subject.rotate?(file)).to be_falsey
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
context "#needs_periodic?" do
|
73
|
+
it "return true" do
|
74
|
+
expect(subject.needs_periodic?).to be_truthy
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/s3/size_rotation_policy"
|
4
|
+
require "logstash/outputs/s3/temporary_file"
|
5
|
+
require "fileutils"
|
6
|
+
|
7
|
+
describe LogStash::Outputs::S3::SizeRotationPolicy do
|
8
|
+
subject { described_class.new(size_file) }
|
9
|
+
|
10
|
+
let(:temporary_directory) { Stud::Temporary.directory }
|
11
|
+
let(:temporary_file) { Stud::Temporary.file }
|
12
|
+
let(:name) { "foobar" }
|
13
|
+
let(:content) { "hello" * 1000 }
|
14
|
+
let(:size_file) { 10 } # in bytes
|
15
|
+
let(:file) { LogStash::Outputs::S3::TemporaryFile.new(name, temporary_file, temporary_directory) }
|
16
|
+
|
17
|
+
it "returns true if the size on disk is higher than the `size_file`" do
|
18
|
+
file.write(content)
|
19
|
+
file.fsync
|
20
|
+
expect(subject.rotate?(file)).to be_truthy
|
21
|
+
end
|
22
|
+
|
23
|
+
it "returns false if the size is inferior than the `size_file`" do
|
24
|
+
expect(subject.rotate?(file)).to be_falsey
|
25
|
+
end
|
26
|
+
|
27
|
+
it "raises an exception if the `size_file` is 0" do
|
28
|
+
expect { described_class.new(0) }.to raise_error(LogStash::ConfigurationError, /need to be greather than 0/)
|
29
|
+
end
|
30
|
+
|
31
|
+
it "raises an exception if the `size_file` is < 0" do
|
32
|
+
expect { described_class.new(-100) }.to raise_error(LogStash::ConfigurationError, /need to be greather than 0/)
|
33
|
+
end
|
34
|
+
|
35
|
+
context "#needs_periodic?" do
|
36
|
+
it "return false" do
|
37
|
+
expect(subject.needs_periodic?).to be_falsey
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/s3/temporary_file_factory"
|
3
|
+
require "logstash/outputs/s3/temporary_file"
|
4
|
+
require "stud/temporary"
|
5
|
+
require "fileutils"
|
6
|
+
|
7
|
+
describe LogStash::Outputs::S3::TemporaryFileFactory do
|
8
|
+
let(:prefix) { "foobar" }
|
9
|
+
let(:tags) { [] }
|
10
|
+
let(:temporary_directory) { Stud::Temporary.pathname }
|
11
|
+
|
12
|
+
before do
|
13
|
+
FileUtils.mkdir_p(temporary_directory)
|
14
|
+
end
|
15
|
+
|
16
|
+
subject { described_class.new(prefix, tags, encoding, temporary_directory) }
|
17
|
+
|
18
|
+
shared_examples "file factory" do
|
19
|
+
it "creates the file on disk" do
|
20
|
+
expect(File.exist?(subject.current.path)).to be_truthy
|
21
|
+
end
|
22
|
+
|
23
|
+
it "create a temporary file when initialized" do
|
24
|
+
expect(subject.current).to be_kind_of(LogStash::Outputs::S3::TemporaryFile)
|
25
|
+
end
|
26
|
+
|
27
|
+
it "create a file in the right format" do
|
28
|
+
expect(subject.current.path).to match(extension)
|
29
|
+
end
|
30
|
+
|
31
|
+
it "allow to rotate the file" do
|
32
|
+
file_path = subject.current.path
|
33
|
+
expect(subject.rotate!.path).not_to eq(file_path)
|
34
|
+
end
|
35
|
+
|
36
|
+
it "increments the part name on rotation" do
|
37
|
+
expect(subject.current.path).to match(/part0/)
|
38
|
+
expect(subject.rotate!.path).to match(/part1/)
|
39
|
+
end
|
40
|
+
|
41
|
+
it "includes the date" do
|
42
|
+
n = Time.now
|
43
|
+
expect(subject.current.path).to include(n.strftime("%Y-%m-%dT"))
|
44
|
+
end
|
45
|
+
|
46
|
+
it "include the file key in the path" do
|
47
|
+
file = subject.current
|
48
|
+
expect(file.path).to match(/#{file.key}/)
|
49
|
+
end
|
50
|
+
|
51
|
+
it "create a unique directory in the temporary directory for each file" do
|
52
|
+
uuid = "hola"
|
53
|
+
expect(SecureRandom).to receive(:uuid).and_return(uuid).twice
|
54
|
+
expect(subject.current.path).to include(uuid)
|
55
|
+
end
|
56
|
+
|
57
|
+
context "with tags supplied" do
|
58
|
+
let(:tags) { ["secret", "service"] }
|
59
|
+
|
60
|
+
it "adds tags to the filename" do
|
61
|
+
expect(subject.current.path).to match(/tag_#{tags.join('.')}.part/)
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
context "without tags" do
|
66
|
+
it "doesn't add tags to the filename" do
|
67
|
+
expect(subject.current.path).not_to match(/tag_/)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
context "when gzip" do
|
73
|
+
let(:encoding) { "gzip" }
|
74
|
+
let(:extension) { /\.txt.gz$/ }
|
75
|
+
|
76
|
+
include_examples "file factory"
|
77
|
+
end
|
78
|
+
|
79
|
+
context "when encoding set to `none`" do
|
80
|
+
let(:encoding) { "none" }
|
81
|
+
let(:extension) { /\.txt$/ }
|
82
|
+
|
83
|
+
include_examples "file factory"
|
84
|
+
end
|
85
|
+
end
|