logstash-output-googlecloudstorage 0.1.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +55 -0
- data/CONTRIBUTORS +18 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +98 -0
- data/docs/index.asciidoc +272 -0
- data/lib/logstash/outputs/gcs/log_rotate.rb +77 -0
- data/lib/logstash/outputs/gcs/path_factory.rb +119 -0
- data/lib/logstash/outputs/gcs/temp_log_file.rb +111 -0
- data/lib/logstash/outputs/gcs/worker_pool.rb +47 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +304 -0
- data/logstash-output-googlecloudstorage.gemspec +32 -0
- data/spec/outputs/gcs/log_rotate_spec.rb +129 -0
- data/spec/outputs/gcs/path_factory_spec.rb +189 -0
- data/spec/outputs/gcs/temp_log_file_spec.rb +155 -0
- data/spec/outputs/gcs/worker_pool_spec.rb +29 -0
- data/spec/outputs/google_cloud_storage_spec.rb +29 -0
- data/spec/spec_helper.rb +3 -0
- metadata +176 -0
@@ -0,0 +1,32 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-output-googlecloudstorage'
|
3
|
+
s.version = '0.1.0'
|
4
|
+
s.licenses = ['Apache-2.0']
|
5
|
+
s.summary = "plugin to upload log events to Google Cloud Storage (GCS)"
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Shailesh"]
|
8
|
+
s.email = 'shailesh17mar@gmail.com'
|
9
|
+
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
s.platform = Gem::Platform::JAVA if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby'
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
24
|
+
s.add_development_dependency 'logstash-devutils'
|
25
|
+
|
26
|
+
s.add_runtime_dependency 'stud'
|
27
|
+
s.add_runtime_dependency 'google-api-client', '~> 0.8.7' # version 0.9.x works only with ruby 2.x
|
28
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
29
|
+
s.add_runtime_dependency 'mime-types', '~> 2' # last version compatible with ruby 2.x
|
30
|
+
s.add_runtime_dependency 'concurrent-ruby', '1.0.5'
|
31
|
+
end
|
32
|
+
|
@@ -0,0 +1,129 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'logstash/outputs/gcs/log_rotate'
|
3
|
+
require 'logstash/outputs/gcs/path_factory'
|
4
|
+
require 'logstash/outputs/gcs/temp_log_file'
|
5
|
+
|
6
|
+
describe LogStash::Outputs::Gcs::LogRotate do
|
7
|
+
let(:tempdir){ Stud::Temporary.directory }
|
8
|
+
let(:path_factory) do
|
9
|
+
LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
10
|
+
builder.set_directory tempdir
|
11
|
+
builder.set_prefix 'prefix'
|
12
|
+
builder.set_include_host true
|
13
|
+
builder.set_date_pattern ''
|
14
|
+
builder.set_include_part true
|
15
|
+
builder.set_include_uuid true
|
16
|
+
builder.set_is_gzipped true
|
17
|
+
end
|
18
|
+
end
|
19
|
+
let(:open_file_1) { double('open-temp-1', :size => 5, :path => 'one', :close! => true, :time_since_sync => 10, :fsync => true)}
|
20
|
+
let(:open_file_2) { double('open-temp-2', :size => 5, :path => 'two', :close! => true, :time_since_sync => 60, :fsync => true)}
|
21
|
+
|
22
|
+
describe '#initialize' do
|
23
|
+
it 'opens the first file' do
|
24
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
|
25
|
+
|
26
|
+
LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
describe '#writeln' do
|
31
|
+
subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
|
32
|
+
|
33
|
+
it 'does not rotate if size is small and path is the same' do
|
34
|
+
expect(path_factory).to receive(:should_rotate?).and_return(false)
|
35
|
+
# once for init
|
36
|
+
expect(path_factory).to receive(:rotate_path!).once
|
37
|
+
|
38
|
+
subject.writeln('foo')
|
39
|
+
end
|
40
|
+
|
41
|
+
it 'rotates the file if the size is too big' do
|
42
|
+
# once for init, once for writeln
|
43
|
+
expect(path_factory).to receive(:rotate_path!).twice
|
44
|
+
|
45
|
+
subject.writeln('this line is longer than ten characters' * 1000)
|
46
|
+
subject.writeln('flush')
|
47
|
+
end
|
48
|
+
|
49
|
+
it 'rotates the file if the path changed' do
|
50
|
+
expect(path_factory).to receive(:should_rotate?).and_return(true)
|
51
|
+
# once for init, once for writeln
|
52
|
+
expect(path_factory).to receive(:rotate_path!).twice
|
53
|
+
|
54
|
+
subject.writeln('foo')
|
55
|
+
end
|
56
|
+
|
57
|
+
it 'writes the message' do
|
58
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
|
59
|
+
expect(open_file_1).to receive(:write).with('foo', "\n")
|
60
|
+
|
61
|
+
subject.writeln('foo')
|
62
|
+
end
|
63
|
+
|
64
|
+
it 'does not write nil messages' do
|
65
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
|
66
|
+
expect(open_file_1).not_to receive(:write)
|
67
|
+
|
68
|
+
subject.writeln(nil)
|
69
|
+
end
|
70
|
+
|
71
|
+
it 'does not fsync if delta less than limit' do
|
72
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
|
73
|
+
expect(open_file_1).not_to receive(:fsync)
|
74
|
+
|
75
|
+
subject.writeln(nil)
|
76
|
+
end
|
77
|
+
|
78
|
+
it 'fsyncs if delta greater than limit' do
|
79
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_2)
|
80
|
+
expect(open_file_2).to receive(:fsync)
|
81
|
+
|
82
|
+
subject.writeln(nil)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
describe '#rotate_log!' do
|
87
|
+
subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
|
88
|
+
|
89
|
+
before :each do
|
90
|
+
allow(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1, open_file_2)
|
91
|
+
end
|
92
|
+
|
93
|
+
it 'closes the old file' do
|
94
|
+
expect(open_file_1).to receive(:close!)
|
95
|
+
|
96
|
+
subject.rotate_log!
|
97
|
+
end
|
98
|
+
|
99
|
+
it 'calls the callback with the old file name' do
|
100
|
+
value = nil
|
101
|
+
subject.on_rotate { |old_path| value = old_path }
|
102
|
+
|
103
|
+
subject.rotate_log!
|
104
|
+
expect(value).to eq(open_file_1.path)
|
105
|
+
end
|
106
|
+
|
107
|
+
it 'opens a new file based on the new path' do
|
108
|
+
expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1, open_file_2)
|
109
|
+
expect(open_file_2).to receive(:write).with('foo', "\n")
|
110
|
+
|
111
|
+
subject.rotate_log!
|
112
|
+
subject.writeln('foo')
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
describe '#on_rotate' do
|
117
|
+
subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
|
118
|
+
|
119
|
+
it 'replaces an existing callback' do
|
120
|
+
value = :none
|
121
|
+
|
122
|
+
subject.on_rotate { value = :first }
|
123
|
+
subject.on_rotate { value = :second }
|
124
|
+
|
125
|
+
subject.rotate_log!
|
126
|
+
expect(value).to eq(:second)
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
@@ -0,0 +1,189 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'logstash/outputs/gcs/path_factory'
|
3
|
+
|
4
|
+
describe LogStash::Outputs::Gcs::PathFactory do
|
5
|
+
describe '#initialize' do
|
6
|
+
it 'includes optional fields if requested' do
|
7
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
8
|
+
builder.set_directory 'path/to/directory'
|
9
|
+
builder.set_prefix 'prefix'
|
10
|
+
builder.set_include_host true
|
11
|
+
builder.set_date_pattern ''
|
12
|
+
builder.set_include_part true
|
13
|
+
builder.set_include_uuid true
|
14
|
+
builder.set_is_gzipped true
|
15
|
+
end
|
16
|
+
|
17
|
+
vars = {
|
18
|
+
prefix: 'prefix',
|
19
|
+
host: 'hostname',
|
20
|
+
date: '2018-01-01',
|
21
|
+
uuid: '00000000-0000-0000-0000-000000000000',
|
22
|
+
partf: '333'
|
23
|
+
}
|
24
|
+
|
25
|
+
expected = 'prefix_hostname_2018-01-01.part333.00000000-0000-0000-0000-000000000000.log.gz'
|
26
|
+
expected = File.join('path/to/directory', expected)
|
27
|
+
|
28
|
+
actual = pf.current_path(vars)
|
29
|
+
|
30
|
+
expect(actual).to eq(expected)
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'excludes optional fields if not requested' do
|
34
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
35
|
+
builder.set_directory 'path/to/directory'
|
36
|
+
builder.set_prefix 'prefix'
|
37
|
+
builder.set_include_host false
|
38
|
+
builder.set_date_pattern ''
|
39
|
+
builder.set_include_part false
|
40
|
+
builder.set_include_uuid false
|
41
|
+
builder.set_is_gzipped false
|
42
|
+
end
|
43
|
+
|
44
|
+
vars = {
|
45
|
+
prefix: 'prefix',
|
46
|
+
host: 'hostname',
|
47
|
+
date: '2018-01-01',
|
48
|
+
uuid: '00000000-0000-0000-0000-000000000000',
|
49
|
+
partf: '333'
|
50
|
+
}
|
51
|
+
|
52
|
+
expected = 'prefix_2018-01-01.log'
|
53
|
+
expected = File.join('path/to/directory', expected)
|
54
|
+
|
55
|
+
actual = pf.current_path(vars)
|
56
|
+
|
57
|
+
expect(actual).to eq(expected)
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'loads a path immediately' do
|
61
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
62
|
+
builder.set_directory ''
|
63
|
+
builder.set_prefix ''
|
64
|
+
builder.set_include_host false
|
65
|
+
builder.set_date_pattern ''
|
66
|
+
builder.set_include_part false
|
67
|
+
builder.set_include_uuid false
|
68
|
+
builder.set_is_gzipped false
|
69
|
+
end
|
70
|
+
|
71
|
+
expect(pf.current_path).to_not eq(nil)
|
72
|
+
end
|
73
|
+
|
74
|
+
it 'recovers the starting part number' do
|
75
|
+
contents = ['pre_date.part009.log.gz', 'pre_date.part091.log.gz', 'pre_date.part000.log.gz']
|
76
|
+
|
77
|
+
allow(::File).to receive(:directory?).with('dir').and_return(true)
|
78
|
+
allow(Dir).to receive(:glob).and_return(contents)
|
79
|
+
|
80
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
81
|
+
builder.set_directory 'dir'
|
82
|
+
builder.set_prefix 'pre'
|
83
|
+
builder.set_include_host false
|
84
|
+
builder.set_date_pattern 'date'
|
85
|
+
builder.set_include_part true
|
86
|
+
builder.set_include_uuid false
|
87
|
+
builder.set_is_gzipped false
|
88
|
+
end
|
89
|
+
|
90
|
+
expect(pf.current_path).to include('part092')
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
describe 'rotate_path!' do
|
95
|
+
it 'increments the part number if the base has not changed' do
|
96
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
97
|
+
builder.set_directory 'dir'
|
98
|
+
builder.set_prefix 'pre'
|
99
|
+
builder.set_include_host false
|
100
|
+
builder.set_date_pattern 'date'
|
101
|
+
builder.set_include_part true
|
102
|
+
builder.set_include_uuid false
|
103
|
+
builder.set_is_gzipped false
|
104
|
+
end
|
105
|
+
|
106
|
+
expect(pf.current_path).to eq(File.join('dir', 'pre_date.part000.log'))
|
107
|
+
|
108
|
+
pf.rotate_path!
|
109
|
+
expect(pf.current_path).to eq(File.join('dir', 'pre_date.part001.log'))
|
110
|
+
end
|
111
|
+
|
112
|
+
it 'resets the part number if the base has changed' do
|
113
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
114
|
+
builder.set_directory 'dir'
|
115
|
+
builder.set_prefix 'pre'
|
116
|
+
builder.set_include_host false
|
117
|
+
builder.set_date_pattern '%N'
|
118
|
+
builder.set_include_part true
|
119
|
+
builder.set_include_uuid false
|
120
|
+
builder.set_is_gzipped false
|
121
|
+
end
|
122
|
+
expect(pf.current_path).to include('part000')
|
123
|
+
|
124
|
+
pf.rotate_path!
|
125
|
+
expect(pf.current_path).to include('part000')
|
126
|
+
end
|
127
|
+
|
128
|
+
it 'returns the path being rotated out' do
|
129
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
130
|
+
builder.set_directory 'dir'
|
131
|
+
builder.set_prefix 'pre'
|
132
|
+
builder.set_include_host false
|
133
|
+
builder.set_date_pattern 'date'
|
134
|
+
builder.set_include_part true
|
135
|
+
builder.set_include_uuid false
|
136
|
+
builder.set_is_gzipped false
|
137
|
+
end
|
138
|
+
last = pf.current_path
|
139
|
+
after = pf.rotate_path!
|
140
|
+
expect(after).to eq(last)
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
describe 'should_rotate?' do
|
145
|
+
it 'returns false when the times in the bases are the same' do
|
146
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
147
|
+
builder.set_directory ''
|
148
|
+
builder.set_prefix ''
|
149
|
+
builder.set_include_host false
|
150
|
+
builder.set_date_pattern ''
|
151
|
+
builder.set_include_part false
|
152
|
+
builder.set_include_uuid false
|
153
|
+
builder.set_is_gzipped false
|
154
|
+
end
|
155
|
+
sleep 1.0
|
156
|
+
expect(pf.should_rotate?).to eq(false)
|
157
|
+
end
|
158
|
+
|
159
|
+
it 'returns true when the times in the bases are different' do
|
160
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
161
|
+
builder.set_directory ''
|
162
|
+
builder.set_prefix ''
|
163
|
+
builder.set_include_host false
|
164
|
+
builder.set_date_pattern '%N'
|
165
|
+
builder.set_include_part false
|
166
|
+
builder.set_include_uuid false
|
167
|
+
builder.set_is_gzipped false
|
168
|
+
end
|
169
|
+
sleep 1.0
|
170
|
+
expect(pf.should_rotate?).to eq(true)
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
describe 'current_path' do
|
175
|
+
it 'joins the directory and filename' do
|
176
|
+
pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
|
177
|
+
builder.set_directory 'dir'
|
178
|
+
builder.set_prefix 'pre'
|
179
|
+
builder.set_include_host false
|
180
|
+
builder.set_date_pattern 'date'
|
181
|
+
builder.set_include_part false
|
182
|
+
builder.set_include_uuid false
|
183
|
+
builder.set_is_gzipped false
|
184
|
+
end
|
185
|
+
|
186
|
+
expect(pf.current_path).to eq(File.join('dir', 'pre_date.log'))
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
@@ -0,0 +1,155 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'logstash/outputs/gcs/temp_log_file'
|
3
|
+
require 'stud/temporary'
|
4
|
+
require 'zlib'
|
5
|
+
|
6
|
+
shared_examples 'a log file' do
|
7
|
+
describe '#initialize' do
|
8
|
+
it 'opens a file' do
|
9
|
+
expect{subject.fd}.to_not raise_error
|
10
|
+
expect(subject.fd).to_not be_nil
|
11
|
+
end
|
12
|
+
|
13
|
+
it 'sets the path' do
|
14
|
+
expect{subject.path}.to_not raise_error
|
15
|
+
expect(subject.path).to_not be_nil
|
16
|
+
end
|
17
|
+
|
18
|
+
it 'sets last sync' do
|
19
|
+
expect{subject.time_since_sync}.to_not raise_error
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
describe '#write' do
|
24
|
+
it 'writes the content' do
|
25
|
+
expect(subject.fd).to receive(:write).with('foo')
|
26
|
+
expect(subject.fd).to receive(:write).with("\n")
|
27
|
+
|
28
|
+
subject.write('foo', "\n")
|
29
|
+
end
|
30
|
+
|
31
|
+
it 'fails if the file is closed' do
|
32
|
+
subject.close!
|
33
|
+
|
34
|
+
expect{ subject.write('foo') }.to raise_error(IOError)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
describe '#fsync' do
|
39
|
+
it 'fails if the file is closed' do
|
40
|
+
subject.close!
|
41
|
+
|
42
|
+
expect{ subject.fsync }.to raise_error(IOError)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
describe '#close!' do
|
47
|
+
it 'fails if the file is closed' do
|
48
|
+
subject.close!
|
49
|
+
|
50
|
+
expect{ subject.close! }.to raise_error(IOError)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
describe '#size' do
|
55
|
+
it 'gets the size of the file on disk' do
|
56
|
+
subject.write('hello, world!')
|
57
|
+
subject.fsync
|
58
|
+
|
59
|
+
expect(subject.size).to eq(File.stat(subject.path).size)
|
60
|
+
end
|
61
|
+
|
62
|
+
it 'does not fail if the file is closed' do
|
63
|
+
subject.close!
|
64
|
+
|
65
|
+
expect{ subject.size }.to_not raise_error
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
describe '#time_since_sync' do
|
70
|
+
it 'returns a delta' do
|
71
|
+
expect(Time).to receive(:now).and_return(Time.at(30), Time.at(40), Time.at(50))
|
72
|
+
|
73
|
+
subject.fsync
|
74
|
+
|
75
|
+
expect(subject.time_since_sync).to eq(10)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
describe LogStash::Outputs::Gcs::PlainLogFile do
|
81
|
+
let(:tempdir) { Stud::Temporary.directory }
|
82
|
+
let(:path) { ::File.join(tempdir, 'logfile.log') }
|
83
|
+
subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, false, false) }
|
84
|
+
|
85
|
+
it_behaves_like 'a log file'
|
86
|
+
|
87
|
+
it 'creates a valid plain text file' do
|
88
|
+
subject.write('Hello, world!')
|
89
|
+
subject.close!
|
90
|
+
data = File.read(path)
|
91
|
+
|
92
|
+
expect(data).to eq('Hello, world!')
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
describe LogStash::Outputs::Gcs::GzipLogFile do
|
97
|
+
let(:tempdir) { Stud::Temporary.directory }
|
98
|
+
let(:path) { ::File.join(tempdir, 'logfile.log') }
|
99
|
+
subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, true, false) }
|
100
|
+
|
101
|
+
it_behaves_like 'a log file'
|
102
|
+
|
103
|
+
it 'creates a valid gzip' do
|
104
|
+
subject.write('Hello, world!')
|
105
|
+
subject.close!
|
106
|
+
|
107
|
+
Zlib::GzipReader.open(path) do |gz|
|
108
|
+
expect(gz.read).to eq('Hello, world!')
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
describe LogStash::Outputs::Gcs::SynchronizedLogFile do
|
114
|
+
let(:tempdir) { Stud::Temporary.directory }
|
115
|
+
let(:path) { ::File.join(tempdir, 'logfile.log') }
|
116
|
+
subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, false, true) }
|
117
|
+
|
118
|
+
it_behaves_like 'a log file'
|
119
|
+
end
|
120
|
+
|
121
|
+
describe 'gzip encoded file' do
|
122
|
+
let(:tempdir) { Stud::Temporary.directory }
|
123
|
+
let(:path) { ::File.join(tempdir, 'logfile.log') }
|
124
|
+
subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, false, false, true) }
|
125
|
+
|
126
|
+
it_behaves_like 'a log file'
|
127
|
+
|
128
|
+
it 'creates a valid gzip' do
|
129
|
+
subject.write('Hello, world!')
|
130
|
+
subject.close!
|
131
|
+
|
132
|
+
Zlib::GzipReader.open(path) do |gz|
|
133
|
+
expect(gz.read).to eq('Hello, world!')
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
describe 'double gzip encoded file' do
|
139
|
+
let(:tempdir) { Stud::Temporary.directory }
|
140
|
+
let(:path) { ::File.join(tempdir, 'logfile.log') }
|
141
|
+
subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, true, false, true) }
|
142
|
+
|
143
|
+
it_behaves_like 'a log file'
|
144
|
+
|
145
|
+
it 'creates a valid double gzip' do
|
146
|
+
subject.write('Hello, world!')
|
147
|
+
subject.close!
|
148
|
+
|
149
|
+
Zlib::GzipReader.open(path) do |outer|
|
150
|
+
Zlib::GzipReader.new(outer) do |inner|
|
151
|
+
expect(inner.read).to eq('Hello, world!')
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|