logstash-output-azure 0.3.0 → 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/CHANGELOG.md +4 -1
- data/CONTRIBUTORS +1 -0
- data/lib/logstash/outputs/azure.rb +59 -81
- data/lib/logstash/outputs/blob/file_repository.rb +33 -18
- data/lib/logstash/outputs/blob/path_validator.rb +3 -3
- data/lib/logstash/outputs/blob/size_and_time_rotation_policy.rb +6 -4
- data/lib/logstash/outputs/blob/size_rotation_policy.rb +5 -4
- data/lib/logstash/outputs/blob/temporary_file.rb +28 -19
- data/lib/logstash/outputs/blob/temporary_file_factory.rb +28 -16
- data/lib/logstash/outputs/blob/time_rotation_policy.rb +5 -4
- data/lib/logstash/outputs/blob/uploader.rb +29 -22
- data/lib/logstash/outputs/blob/writable_directory_validator.rb +6 -7
- data/logstash-output-azure.gemspec +10 -10
- data/spec/outputs/azure_spec.rb +16 -18
- data/spec/outputs/blob/file_repository_spec.rb +35 -38
- data/spec/outputs/blob/size_and_time_rotation_policy_spec.rb +20 -21
- data/spec/outputs/blob/size_rotation_policy_spec.rb +13 -15
- data/spec/outputs/blob/temporary_file_factory_spec.rb +27 -28
- data/spec/outputs/blob/temporary_file_spec.rb +14 -15
- data/spec/outputs/blob/time_rotation_policy_spec.rb +17 -18
- data/spec/outputs/blob/uploader_spec.rb +28 -32
- data/spec/outputs/blob/writable_directory_validator_spec.rb +8 -9
- data/spec/spec_helper.rb +4 -5
- data/spec/supports/helpers.rb +12 -15
- metadata +9 -9
data/spec/outputs/azure_spec.rb
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
|
3
1
|
require 'logstash/devutils/rspec/spec_helper'
|
4
2
|
require 'logstash/outputs/azure'
|
5
3
|
require 'logstash/codecs/plain'
|
@@ -12,7 +10,7 @@ describe LogStash::Outputs::LogstashAzureBlobOutput do
|
|
12
10
|
{
|
13
11
|
storage_account_name: ENV['AZURE_STORAGE_ACCOUNT'],
|
14
12
|
storage_access_key: ENV['AZURE_STORAGE_ACCESS_KEY'],
|
15
|
-
container_name:
|
13
|
+
container_name: 'test',
|
16
14
|
size_file: 5242880,
|
17
15
|
time_file: 15,
|
18
16
|
restore: true,
|
@@ -22,30 +20,30 @@ describe LogStash::Outputs::LogstashAzureBlobOutput do
|
|
22
20
|
upload_workers_count: (Concurrent.processor_count * 0.5).ceil,
|
23
21
|
rotation_strategy: 'size_and_time',
|
24
22
|
tags: [],
|
25
|
-
encoding:
|
23
|
+
encoding: 'none'
|
26
24
|
}
|
27
25
|
end
|
28
26
|
let(:sample_event) { LogStash::Event.new(source: 'alguna', tags: %w[tag1 tag2], fields: { field1: 1, field2: true }) }
|
29
27
|
|
30
|
-
#
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
28
|
+
# let(:output) { described_class.new() }
|
29
|
+
|
30
|
+
# before do
|
31
|
+
# output.register
|
32
|
+
# end
|
35
33
|
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
34
|
+
# it 'should create' do
|
35
|
+
# blober = described_class.new
|
36
|
+
# blober.register
|
37
|
+
# expect(blober.storage_account_name).not_to be_nil
|
38
|
+
# expect(blober.storage_access_key).not_to be_nil
|
39
|
+
# expect(blober.container_name).not_to be_nil
|
40
|
+
# end
|
43
41
|
|
44
42
|
describe 'receive message' do
|
45
43
|
subject { output.receive(sample_event) }
|
46
|
-
#xit 'should return the blob sent to Azure' do
|
44
|
+
# xit 'should return the blob sent to Azure' do
|
47
45
|
# md5 = Digest::MD5.base64digest(sample_event.to_json)
|
48
46
|
# expect(subject.properties[:content_md5]).to eq(md5)
|
49
|
-
#end
|
47
|
+
# end
|
50
48
|
end
|
51
49
|
end
|
@@ -1,14 +1,13 @@
|
|
1
|
-
|
2
|
-
require
|
3
|
-
require
|
4
|
-
|
5
|
-
require_relative "../../spec_helper"
|
1
|
+
require 'logstash/outputs/azure'
|
2
|
+
require 'stud/temporary'
|
3
|
+
require 'fileutils'
|
4
|
+
require_relative '../../spec_helper'
|
6
5
|
|
7
6
|
describe LogStash::Outputs::LogstashAzureBlobOutput::FileRepository do
|
8
7
|
let(:tags) { [] }
|
9
|
-
let(:encoding) {
|
8
|
+
let(:encoding) { 'none' }
|
10
9
|
let(:temporary_directory) { Stud::Temporary.pathname }
|
11
|
-
let(:prefix_key) {
|
10
|
+
let(:prefix_key) { 'a-key' }
|
12
11
|
|
13
12
|
before do
|
14
13
|
FileUtils.mkdir_p(temporary_directory)
|
@@ -16,13 +15,13 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::FileRepository do
|
|
16
15
|
|
17
16
|
subject { described_class.new(tags, encoding, temporary_directory) }
|
18
17
|
|
19
|
-
it
|
18
|
+
it 'returns a temporary file' do
|
20
19
|
subject.get_file(prefix_key) do |file|
|
21
20
|
expect(file).to be_kind_of(LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFile)
|
22
21
|
end
|
23
22
|
end
|
24
23
|
|
25
|
-
it
|
24
|
+
it 'returns the same file for the same prefix key' do
|
26
25
|
file_path = nil
|
27
26
|
|
28
27
|
subject.get_file(prefix_key) do |file|
|
@@ -34,13 +33,12 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::FileRepository do
|
|
34
33
|
end
|
35
34
|
end
|
36
35
|
|
37
|
-
it
|
38
|
-
prefix =
|
39
|
-
event = LogStash::Event.new(
|
36
|
+
it 'returns the same file for the same dynamic prefix key' do
|
37
|
+
prefix = '%{type}/%{+YYYY}/%{+MM}/%{+dd}/'
|
38
|
+
event = LogStash::Event.new('type' => 'syslog')
|
40
39
|
key = event.sprintf(prefix)
|
41
40
|
file_path = nil
|
42
41
|
|
43
|
-
|
44
42
|
subject.get_file(key) do |file|
|
45
43
|
file_path = file.path
|
46
44
|
end
|
@@ -50,92 +48,91 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::FileRepository do
|
|
50
48
|
end
|
51
49
|
end
|
52
50
|
|
53
|
-
it
|
51
|
+
it 'returns different file for different prefix keys' do
|
54
52
|
file_path = nil
|
55
53
|
|
56
54
|
subject.get_file(prefix_key) do |file|
|
57
55
|
file_path = file.path
|
58
56
|
end
|
59
57
|
|
60
|
-
subject.get_file(
|
58
|
+
subject.get_file('another_prefix_key') do |file|
|
61
59
|
expect(file.path).not_to eq(file_path)
|
62
60
|
end
|
63
61
|
end
|
64
62
|
|
65
|
-
it
|
63
|
+
it 'allows to get the file factory for a specific prefix' do
|
66
64
|
subject.get_factory(prefix_key) do |factory|
|
67
65
|
expect(factory).to be_kind_of(LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFileFactory)
|
68
66
|
end
|
69
67
|
end
|
70
68
|
|
71
|
-
it
|
69
|
+
it 'returns a different file factory for a different prefix keys' do
|
72
70
|
factory = nil
|
73
71
|
|
74
72
|
subject.get_factory(prefix_key) do |f|
|
75
73
|
factory = f
|
76
74
|
end
|
77
75
|
|
78
|
-
subject.get_factory(
|
76
|
+
subject.get_factory('another_prefix_key') do |f|
|
79
77
|
expect(factory).not_to eq(f)
|
80
78
|
end
|
81
79
|
end
|
82
80
|
|
83
|
-
it
|
81
|
+
it 'returns the number of prefix keys' do
|
84
82
|
expect(subject.size).to eq(0)
|
85
|
-
subject.get_file(prefix_key) { |file| file.write(
|
83
|
+
subject.get_file(prefix_key) { |file| file.write('something') }
|
86
84
|
expect(subject.size).to eq(1)
|
87
85
|
end
|
88
86
|
|
89
|
-
it
|
90
|
-
subject.get_file(prefix_key) { |file| file.write(
|
87
|
+
it 'returns all available keys' do
|
88
|
+
subject.get_file(prefix_key) { |file| file.write('something') }
|
91
89
|
expect(subject.keys.toArray).to include(prefix_key)
|
92
90
|
expect(subject.keys.toArray.size).to eq(1)
|
93
91
|
end
|
94
92
|
|
95
|
-
it
|
93
|
+
it 'clean stale factories' do
|
96
94
|
@file_repository = described_class.new(tags, encoding, temporary_directory, 1, 1)
|
97
95
|
expect(@file_repository.size).to eq(0)
|
98
|
-
path =
|
96
|
+
path = ''
|
99
97
|
@file_repository.get_factory(prefix_key) do |factory|
|
100
|
-
factory.current.write(
|
98
|
+
factory.current.write('hello')
|
101
99
|
# force a rotation so we get an empty file that will get stale.
|
102
100
|
factory.rotate!
|
103
101
|
path = factory.current.temp_path
|
104
102
|
end
|
105
103
|
|
106
|
-
@file_repository.get_file(
|
104
|
+
@file_repository.get_file('another-prefix') { |file| file.write('hello') }
|
107
105
|
expect(@file_repository.size).to eq(2)
|
108
106
|
try(10) { expect(@file_repository.size).to eq(1) }
|
109
107
|
expect(File.directory?(path)).to be_falsey
|
110
108
|
end
|
111
109
|
end
|
112
110
|
|
113
|
-
|
114
111
|
describe LogStash::Outputs::LogstashAzureBlobOutput::FileRepository::PrefixedValue do
|
115
|
-
let(:factory) { spy(
|
112
|
+
let(:factory) { spy('factory', current: file) }
|
116
113
|
subject { described_class.new(factory, 1) }
|
117
114
|
|
118
|
-
context
|
119
|
-
context
|
120
|
-
let(:file) { double(
|
115
|
+
context '#stale?' do
|
116
|
+
context 'the file is empty and older than stale time' do
|
117
|
+
let(:file) { double('file', size: 0, ctime: Time.now - 5) }
|
121
118
|
|
122
|
-
it
|
119
|
+
it 'returns true' do
|
123
120
|
expect(subject.stale?).to be_truthy
|
124
121
|
end
|
125
122
|
end
|
126
123
|
|
127
|
-
context
|
128
|
-
let(:file) { double(
|
124
|
+
context 'when the file has data in it' do
|
125
|
+
let(:file) { double('file', size: 200, ctime: Time.now - 5) }
|
129
126
|
|
130
|
-
it
|
127
|
+
it 'returns false' do
|
131
128
|
expect(subject.stale?).to be_falsey
|
132
129
|
end
|
133
130
|
end
|
134
131
|
|
135
|
-
context
|
136
|
-
let(:file) { double(
|
132
|
+
context 'when the file is not old enough' do
|
133
|
+
let(:file) { double('file', size: 0, ctime: Time.now + 100) }
|
137
134
|
|
138
|
-
it
|
135
|
+
it 'returns false' do
|
139
136
|
expect(subject.stale?).to be_falsey
|
140
137
|
end
|
141
138
|
end
|
@@ -1,7 +1,6 @@
|
|
1
|
-
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require "logstash/outputs/blob/temporary_file"
|
1
|
+
require 'logstash/devutils/rspec/spec_helper'
|
2
|
+
require 'logstash/outputs/blob/size_and_time_rotation_policy'
|
3
|
+
require 'logstash/outputs/blob/temporary_file'
|
5
4
|
|
6
5
|
describe LogStash::Outputs::LogstashAzureBlobOutput::SizeAndTimeRotationPolicy do
|
7
6
|
let(:file_size) { 10 }
|
@@ -10,37 +9,37 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::SizeAndTimeRotationPolicy d
|
|
10
9
|
|
11
10
|
let(:temporary_directory) { Stud::Temporary.pathname }
|
12
11
|
let(:temporary_file) { Stud::Temporary.file }
|
13
|
-
let(:name) {
|
14
|
-
let(:content) {
|
12
|
+
let(:name) { 'foobar' }
|
13
|
+
let(:content) { 'hello' * 1000 }
|
15
14
|
let(:file) { LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFile.new(name, temporary_file, temporary_directory) }
|
16
15
|
|
17
|
-
it
|
16
|
+
it 'raises an exception if the `time_file` is set to 0' do
|
18
17
|
expect { described_class.new(100, 0) }.to raise_error(LogStash::ConfigurationError, /time_file/)
|
19
18
|
end
|
20
19
|
|
21
|
-
it
|
20
|
+
it 'raises an exception if the `time_file` is < 0' do
|
22
21
|
expect { described_class.new(100, -100) }.to raise_error(LogStash::ConfigurationError, /time_file/)
|
23
22
|
end
|
24
23
|
|
25
|
-
it
|
24
|
+
it 'raises an exception if the `size_file` is 0' do
|
26
25
|
expect { described_class.new(0, 100) }.to raise_error(LogStash::ConfigurationError, /size_file/)
|
27
26
|
end
|
28
27
|
|
29
|
-
it
|
28
|
+
it 'raises an exception if the `size_file` is < 0' do
|
30
29
|
expect { described_class.new(-100, 100) }.to raise_error(LogStash::ConfigurationError, /size_file/)
|
31
30
|
end
|
32
31
|
|
33
|
-
it
|
32
|
+
it 'returns true if the size on disk is higher than the `file_size`' do
|
34
33
|
file.write(content)
|
35
34
|
file.fsync
|
36
35
|
expect(subject.rotate?(file)).to be_truthy
|
37
36
|
end
|
38
37
|
|
39
|
-
it
|
38
|
+
it 'returns false if the size is inferior than the `file_size`' do
|
40
39
|
expect(subject.rotate?(file)).to be_falsey
|
41
40
|
end
|
42
41
|
|
43
|
-
context
|
42
|
+
context 'when the size of the file is superior to 0' do
|
44
43
|
let(:file_size) { 10000 }
|
45
44
|
|
46
45
|
before :each do
|
@@ -48,29 +47,29 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::SizeAndTimeRotationPolicy d
|
|
48
47
|
file.fsync
|
49
48
|
end
|
50
49
|
|
51
|
-
it
|
52
|
-
allow(file).to receive(:ctime).and_return(Time.now - (time_file * 2 * 60)
|
50
|
+
it 'returns true if the file old enough' do
|
51
|
+
allow(file).to receive(:ctime).and_return(Time.now - (time_file * 2 * 60))
|
53
52
|
expect(subject.rotate?(file)).to be_truthy
|
54
53
|
end
|
55
54
|
|
56
|
-
it
|
55
|
+
it 'returns false is not old enough' do
|
57
56
|
allow(file).to receive(:ctime).and_return(Time.now + time_file * 10)
|
58
57
|
expect(subject.rotate?(file)).to be_falsey
|
59
58
|
end
|
60
59
|
end
|
61
60
|
|
62
|
-
context
|
63
|
-
it
|
61
|
+
context 'When the size of the file is 0' do
|
62
|
+
it 'returns false if the file old enough' do
|
64
63
|
expect(subject.rotate?(file)).to be_falsey
|
65
64
|
end
|
66
65
|
|
67
|
-
it
|
66
|
+
it 'returns false is not old enough' do
|
68
67
|
expect(subject.rotate?(file)).to be_falsey
|
69
68
|
end
|
70
69
|
end
|
71
70
|
|
72
|
-
context
|
73
|
-
it
|
71
|
+
context '#needs_periodic?' do
|
72
|
+
it 'return true' do
|
74
73
|
expect(subject.needs_periodic?).to be_truthy
|
75
74
|
end
|
76
75
|
end
|
@@ -1,41 +1,39 @@
|
|
1
|
-
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require "fileutils"
|
1
|
+
require 'logstash/devutils/rspec/spec_helper'
|
2
|
+
require 'logstash/outputs/blob/size_rotation_policy'
|
3
|
+
require 'logstash/outputs/blob/temporary_file'
|
4
|
+
require 'fileutils'
|
6
5
|
|
7
6
|
describe LogStash::Outputs::LogstashAzureBlobOutput::SizeRotationPolicy do
|
8
7
|
subject { described_class.new(size_file) }
|
9
8
|
|
10
|
-
let(:temporary_directory) {
|
9
|
+
let(:temporary_directory) { Stud::Temporary.directory }
|
11
10
|
let(:temporary_file) { Stud::Temporary.file }
|
12
|
-
let(:name) {
|
13
|
-
let(:content) {
|
11
|
+
let(:name) { 'foobar' }
|
12
|
+
let(:content) { 'hello' * 1000 }
|
14
13
|
let(:size_file) { 10 } # in bytes
|
15
14
|
let(:file) { LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFile.new(name, temporary_file, temporary_directory) }
|
16
15
|
|
17
|
-
it
|
16
|
+
it 'returns true if the size on disk is higher than the `size_file`' do
|
18
17
|
file.write(content)
|
19
18
|
file.fsync
|
20
19
|
expect(subject.rotate?(file)).to be_truthy
|
21
20
|
end
|
22
21
|
|
23
|
-
it
|
22
|
+
it 'returns false if the size is inferior than the `size_file`' do
|
24
23
|
expect(subject.rotate?(file)).to be_falsey
|
25
24
|
end
|
26
25
|
|
27
|
-
it
|
26
|
+
it 'raises an exception if the `size_file` is 0' do
|
28
27
|
expect { described_class.new(0) }.to raise_error(LogStash::ConfigurationError, /need to be greather than 0/)
|
29
28
|
end
|
30
29
|
|
31
|
-
it
|
30
|
+
it 'raises an exception if the `size_file` is < 0' do
|
32
31
|
expect { described_class.new(-100) }.to raise_error(LogStash::ConfigurationError, /need to be greather than 0/)
|
33
32
|
end
|
34
33
|
|
35
|
-
context
|
36
|
-
it
|
34
|
+
context '#needs_periodic?' do
|
35
|
+
it 'return false' do
|
37
36
|
expect(subject.needs_periodic?).to be_falsey
|
38
37
|
end
|
39
38
|
end
|
40
|
-
|
41
39
|
end
|
@@ -1,11 +1,10 @@
|
|
1
|
-
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require "fileutils"
|
1
|
+
require 'logstash/outputs/blob/temporary_file_factory'
|
2
|
+
require 'logstash/outputs/blob/temporary_file'
|
3
|
+
require 'stud/temporary'
|
4
|
+
require 'fileutils'
|
6
5
|
|
7
6
|
describe LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFileFactory do
|
8
|
-
let(:prefix) {
|
7
|
+
let(:prefix) { 'foobar' }
|
9
8
|
let(:tags) { [] }
|
10
9
|
let(:temporary_directory) { Stud::Temporary.pathname }
|
11
10
|
|
@@ -15,75 +14,75 @@ describe LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFileFactory do
|
|
15
14
|
|
16
15
|
subject { described_class.new(prefix, tags, encoding, temporary_directory) }
|
17
16
|
|
18
|
-
shared_examples
|
19
|
-
it
|
17
|
+
shared_examples 'file factory' do
|
18
|
+
it 'creates the file on disk' do
|
20
19
|
expect(File.exist?(subject.current.path)).to be_truthy
|
21
20
|
end
|
22
21
|
|
23
|
-
it
|
22
|
+
it 'returns a size equal to zero after file creation' do
|
24
23
|
expect(subject.current.size).to eq(0)
|
25
24
|
end
|
26
25
|
|
27
|
-
it
|
26
|
+
it 'create a temporary file when initialized' do
|
28
27
|
expect(subject.current).to be_kind_of(LogStash::Outputs::LogstashAzureBlobOutput::TemporaryFile)
|
29
28
|
end
|
30
29
|
|
31
|
-
it
|
30
|
+
it 'create a file in the right format' do
|
32
31
|
expect(subject.current.path).to match(extension)
|
33
32
|
end
|
34
33
|
|
35
|
-
it
|
34
|
+
it 'allow to rotate the file' do
|
36
35
|
file_path = subject.current.path
|
37
36
|
expect(subject.rotate!.path).not_to eq(file_path)
|
38
37
|
end
|
39
38
|
|
40
|
-
it
|
39
|
+
it 'increments the part name on rotation' do
|
41
40
|
expect(subject.current.path).to match(/part0/)
|
42
41
|
expect(subject.rotate!.path).to match(/part1/)
|
43
42
|
end
|
44
43
|
|
45
|
-
it
|
44
|
+
it 'includes the date' do
|
46
45
|
n = Time.now
|
47
|
-
expect(subject.current.path).to include(n.strftime(
|
46
|
+
expect(subject.current.path).to include(n.strftime('%Y-%m-%dT'))
|
48
47
|
end
|
49
48
|
|
50
|
-
it
|
49
|
+
it 'include the file key in the path' do
|
51
50
|
file = subject.current
|
52
51
|
expect(file.path).to match(/#{file.key}/)
|
53
52
|
end
|
54
53
|
|
55
|
-
it
|
56
|
-
uuid =
|
54
|
+
it 'create a unique directory in the temporary directory for each file' do
|
55
|
+
uuid = 'hola'
|
57
56
|
expect(SecureRandom).to receive(:uuid).and_return(uuid).twice
|
58
57
|
expect(subject.current.path).to include(uuid)
|
59
58
|
end
|
60
59
|
|
61
|
-
context
|
62
|
-
let(:tags) { [
|
60
|
+
context 'with tags supplied' do
|
61
|
+
let(:tags) { %w[secret service] }
|
63
62
|
|
64
|
-
it
|
63
|
+
it 'adds tags to the filename' do
|
65
64
|
expect(subject.current.path).to match(/tag_#{tags.join('.')}.part/)
|
66
65
|
end
|
67
66
|
end
|
68
67
|
|
69
|
-
context
|
68
|
+
context 'without tags' do
|
70
69
|
it "doesn't add tags to the filename" do
|
71
70
|
expect(subject.current.path).not_to match(/tag_/)
|
72
71
|
end
|
73
72
|
end
|
74
73
|
end
|
75
74
|
|
76
|
-
context
|
77
|
-
let(:encoding) {
|
75
|
+
context 'when gzip' do
|
76
|
+
let(:encoding) { 'gzip' }
|
78
77
|
let(:extension) { /\.txt.gz$/ }
|
79
78
|
|
80
|
-
include_examples
|
79
|
+
include_examples 'file factory'
|
81
80
|
end
|
82
81
|
|
83
|
-
context
|
84
|
-
let(:encoding) {
|
82
|
+
context 'when encoding set to `none`' do
|
83
|
+
let(:encoding) { 'none' }
|
85
84
|
let(:extension) { /\.txt$/ }
|
86
85
|
|
87
|
-
include_examples
|
86
|
+
include_examples 'file factory'
|
88
87
|
end
|
89
88
|
end
|