fluent-plugin-azurestorage 0.0.7 → 0.0.8
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +11 -11
- data/.travis.yml +19 -21
- data/Gemfile +3 -3
- data/LICENSE.txt +22 -22
- data/README.md +210 -210
- data/Rakefile +14 -14
- data/VERSION +1 -1
- data/fluent-plugin-azurestorage.gemspec +26 -26
- data/lib/fluent/plugin/azurestorage_compressor_gzip_command.rb +51 -51
- data/lib/fluent/plugin/azurestorage_compressor_lzma2.rb +34 -34
- data/lib/fluent/plugin/azurestorage_compressor_lzo.rb +34 -34
- data/lib/fluent/plugin/out_azurestorage.rb +248 -248
- data/lib/fluent/plugin/upload_service.rb +207 -207
- data/test/test_out_azurestorage.rb +224 -224
- metadata +32 -27
data/Rakefile
CHANGED
@@ -1,14 +1,14 @@
|
|
1
|
-
|
2
|
-
require 'bundler'
|
3
|
-
Bundler::GemHelper.install_tasks
|
4
|
-
|
5
|
-
require 'rake/testtask'
|
6
|
-
|
7
|
-
Rake::TestTask.new(:test) do |test|
|
8
|
-
test.libs << 'lib' << 'test'
|
9
|
-
test.test_files = FileList['test/test_*.rb']
|
10
|
-
test.verbose = true
|
11
|
-
end
|
12
|
-
|
13
|
-
task :default => [:build]
|
14
|
-
|
1
|
+
|
2
|
+
require 'bundler'
|
3
|
+
Bundler::GemHelper.install_tasks
|
4
|
+
|
5
|
+
require 'rake/testtask'
|
6
|
+
|
7
|
+
Rake::TestTask.new(:test) do |test|
|
8
|
+
test.libs << 'lib' << 'test'
|
9
|
+
test.test_files = FileList['test/test_*.rb']
|
10
|
+
test.verbose = true
|
11
|
+
end
|
12
|
+
|
13
|
+
task :default => [:build]
|
14
|
+
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.8
|
@@ -1,26 +1,26 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
$:.push File.expand_path('../lib', __FILE__)
|
3
|
-
|
4
|
-
Gem::Specification.new do |gem|
|
5
|
-
gem.name = "fluent-plugin-azurestorage"
|
6
|
-
gem.description = "Azure Storage output plugin for Fluentd event collector"
|
7
|
-
gem.license = "Apache-2.0"
|
8
|
-
gem.homepage = "https://github.com/htgc/fluent-plugin-azurestorage"
|
9
|
-
gem.summary = gem.description
|
10
|
-
gem.version = File.read("VERSION").strip
|
11
|
-
gem.authors = ["Hidemasa Togashi"]
|
12
|
-
gem.email = ["togachiro@gmail.com"]
|
13
|
-
gem.has_rdoc = false
|
14
|
-
#gem.platform = Gem::Platform::RUBY
|
15
|
-
gem.files = `git ls-files`.split("\n")
|
16
|
-
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
17
|
-
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
18
|
-
gem.require_paths = ['lib']
|
19
|
-
|
20
|
-
gem.add_dependency "fluentd", [">= 0.
|
21
|
-
gem.add_dependency "azure", "0.
|
22
|
-
gem.add_dependency "fluent-mixin-config-placeholders", ">= 0.3.0"
|
23
|
-
gem.add_development_dependency "rake", ">= 0.9.2"
|
24
|
-
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
25
|
-
gem.add_development_dependency "test-unit-rr", ">= 1.0.3"
|
26
|
-
end
|
1
|
+
# encoding: utf-8
|
2
|
+
$:.push File.expand_path('../lib', __FILE__)
|
3
|
+
|
4
|
+
Gem::Specification.new do |gem|
|
5
|
+
gem.name = "fluent-plugin-azurestorage"
|
6
|
+
gem.description = "Azure Storage output plugin for Fluentd event collector"
|
7
|
+
gem.license = "Apache-2.0"
|
8
|
+
gem.homepage = "https://github.com/htgc/fluent-plugin-azurestorage"
|
9
|
+
gem.summary = gem.description
|
10
|
+
gem.version = File.read("VERSION").strip
|
11
|
+
gem.authors = ["Hidemasa Togashi"]
|
12
|
+
gem.email = ["togachiro@gmail.com"]
|
13
|
+
gem.has_rdoc = false
|
14
|
+
#gem.platform = Gem::Platform::RUBY
|
15
|
+
gem.files = `git ls-files`.split("\n")
|
16
|
+
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
17
|
+
gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
18
|
+
gem.require_paths = ['lib']
|
19
|
+
|
20
|
+
gem.add_dependency "fluentd", [">= 0.12.0", "< 2"]
|
21
|
+
gem.add_dependency "azure", [">= 0.7.1", "<= 0.7.7"]
|
22
|
+
gem.add_dependency "fluent-mixin-config-placeholders", ">= 0.3.0"
|
23
|
+
gem.add_development_dependency "rake", ">= 0.9.2"
|
24
|
+
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
25
|
+
gem.add_development_dependency "test-unit-rr", ">= 1.0.3"
|
26
|
+
end
|
@@ -1,51 +1,51 @@
|
|
1
|
-
module Fluent
|
2
|
-
class AzureStorageOutput
|
3
|
-
class GzipCommandCompressor < Compressor
|
4
|
-
AzureStorageOutput.register_compressor('gzip_command', self)
|
5
|
-
|
6
|
-
config_param :command_parameter, :string, :default => ''
|
7
|
-
|
8
|
-
def configure(conf)
|
9
|
-
super
|
10
|
-
check_command('gzip')
|
11
|
-
end
|
12
|
-
|
13
|
-
def ext
|
14
|
-
'gz'.freeze
|
15
|
-
end
|
16
|
-
|
17
|
-
def content_type
|
18
|
-
'application/x-gzip'.freeze
|
19
|
-
end
|
20
|
-
|
21
|
-
def compress(chunk, tmp)
|
22
|
-
chunk_is_file = @buffer_type == 'file'
|
23
|
-
path = if chunk_is_file
|
24
|
-
chunk.path
|
25
|
-
else
|
26
|
-
w = Tempfile.new("chunk-gzip-tmp")
|
27
|
-
chunk.write_to(w)
|
28
|
-
w.close
|
29
|
-
w.path
|
30
|
-
end
|
31
|
-
|
32
|
-
res = system "gzip #{@command_parameter} -c #{path} > #{tmp.path}"
|
33
|
-
unless res
|
34
|
-
log.warn "failed to execute gzip command. Fallback to GzipWriter. status = #{$?}"
|
35
|
-
begin
|
36
|
-
tmp.truncate(0)
|
37
|
-
gw = Zlib::GzipWriter.new(tmp)
|
38
|
-
chunk.write_to(gw)
|
39
|
-
gw.close
|
40
|
-
ensure
|
41
|
-
gw.close rescue nil
|
42
|
-
end
|
43
|
-
end
|
44
|
-
ensure
|
45
|
-
unless chunk_is_file
|
46
|
-
w.close(true) rescue nil
|
47
|
-
end
|
48
|
-
end
|
49
|
-
end
|
50
|
-
end
|
51
|
-
end
|
1
|
+
module Fluent
|
2
|
+
class AzureStorageOutput
|
3
|
+
class GzipCommandCompressor < Compressor
|
4
|
+
AzureStorageOutput.register_compressor('gzip_command', self)
|
5
|
+
|
6
|
+
config_param :command_parameter, :string, :default => ''
|
7
|
+
|
8
|
+
def configure(conf)
|
9
|
+
super
|
10
|
+
check_command('gzip')
|
11
|
+
end
|
12
|
+
|
13
|
+
def ext
|
14
|
+
'gz'.freeze
|
15
|
+
end
|
16
|
+
|
17
|
+
def content_type
|
18
|
+
'application/x-gzip'.freeze
|
19
|
+
end
|
20
|
+
|
21
|
+
def compress(chunk, tmp)
|
22
|
+
chunk_is_file = @buffer_type == 'file'
|
23
|
+
path = if chunk_is_file
|
24
|
+
chunk.path
|
25
|
+
else
|
26
|
+
w = Tempfile.new("chunk-gzip-tmp")
|
27
|
+
chunk.write_to(w)
|
28
|
+
w.close
|
29
|
+
w.path
|
30
|
+
end
|
31
|
+
|
32
|
+
res = system "gzip #{@command_parameter} -c #{path} > #{tmp.path}"
|
33
|
+
unless res
|
34
|
+
log.warn "failed to execute gzip command. Fallback to GzipWriter. status = #{$?}"
|
35
|
+
begin
|
36
|
+
tmp.truncate(0)
|
37
|
+
gw = Zlib::GzipWriter.new(tmp)
|
38
|
+
chunk.write_to(gw)
|
39
|
+
gw.close
|
40
|
+
ensure
|
41
|
+
gw.close rescue nil
|
42
|
+
end
|
43
|
+
end
|
44
|
+
ensure
|
45
|
+
unless chunk_is_file
|
46
|
+
w.close(true) rescue nil
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -1,34 +1,34 @@
|
|
1
|
-
module Fluent
|
2
|
-
class AzureStorageOutput
|
3
|
-
class LZMA2Compressor < Compressor
|
4
|
-
AzureStorageOutput.register_compressor('lzma2', self)
|
5
|
-
|
6
|
-
config_param :command_parameter, :string, :default => '-qf0'
|
7
|
-
|
8
|
-
def configure(conf)
|
9
|
-
super
|
10
|
-
check_command('xz', 'LZMA2')
|
11
|
-
end
|
12
|
-
|
13
|
-
def ext
|
14
|
-
'xz'.freeze
|
15
|
-
end
|
16
|
-
|
17
|
-
def content_type
|
18
|
-
'application/x-xz'.freeze
|
19
|
-
end
|
20
|
-
|
21
|
-
def compress(chunk, tmp)
|
22
|
-
w = Tempfile.new("chunk-xz-tmp")
|
23
|
-
chunk.write_to(w)
|
24
|
-
w.close
|
25
|
-
|
26
|
-
# We don't check the return code because we can't recover lzop failure.
|
27
|
-
system "xz #{@command_parameter} -c #{w.path} > #{tmp.path}"
|
28
|
-
ensure
|
29
|
-
w.close rescue nil
|
30
|
-
w.unlink rescue nil
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
34
|
-
end
|
1
|
+
module Fluent
|
2
|
+
class AzureStorageOutput
|
3
|
+
class LZMA2Compressor < Compressor
|
4
|
+
AzureStorageOutput.register_compressor('lzma2', self)
|
5
|
+
|
6
|
+
config_param :command_parameter, :string, :default => '-qf0'
|
7
|
+
|
8
|
+
def configure(conf)
|
9
|
+
super
|
10
|
+
check_command('xz', 'LZMA2')
|
11
|
+
end
|
12
|
+
|
13
|
+
def ext
|
14
|
+
'xz'.freeze
|
15
|
+
end
|
16
|
+
|
17
|
+
def content_type
|
18
|
+
'application/x-xz'.freeze
|
19
|
+
end
|
20
|
+
|
21
|
+
def compress(chunk, tmp)
|
22
|
+
w = Tempfile.new("chunk-xz-tmp")
|
23
|
+
chunk.write_to(w)
|
24
|
+
w.close
|
25
|
+
|
26
|
+
# We don't check the return code because we can't recover lzop failure.
|
27
|
+
system "xz #{@command_parameter} -c #{w.path} > #{tmp.path}"
|
28
|
+
ensure
|
29
|
+
w.close rescue nil
|
30
|
+
w.unlink rescue nil
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -1,34 +1,34 @@
|
|
1
|
-
module Fluent
|
2
|
-
class AzureStorageOutput
|
3
|
-
class LZOCompressor < Compressor
|
4
|
-
AzureStorageOutput.register_compressor('lzo', self)
|
5
|
-
|
6
|
-
config_param :command_parameter, :string, :default => '-qf1'
|
7
|
-
|
8
|
-
def configure(conf)
|
9
|
-
super
|
10
|
-
check_command('lzop', 'LZO')
|
11
|
-
end
|
12
|
-
|
13
|
-
def ext
|
14
|
-
'lzo'.freeze
|
15
|
-
end
|
16
|
-
|
17
|
-
def content_type
|
18
|
-
'application/x-lzop'.freeze
|
19
|
-
end
|
20
|
-
|
21
|
-
def compress(chunk, tmp)
|
22
|
-
w = Tempfile.new("chunk-tmp")
|
23
|
-
chunk.write_to(w)
|
24
|
-
w.close
|
25
|
-
|
26
|
-
# We don't check the return code because we can't recover lzop failure.
|
27
|
-
system "lzop #{@command_parameter} -o #{tmp.path} #{w.path}"
|
28
|
-
ensure
|
29
|
-
w.close rescue nil
|
30
|
-
w.unlink rescue nil
|
31
|
-
end
|
32
|
-
end
|
33
|
-
end
|
34
|
-
end
|
1
|
+
module Fluent
|
2
|
+
class AzureStorageOutput
|
3
|
+
class LZOCompressor < Compressor
|
4
|
+
AzureStorageOutput.register_compressor('lzo', self)
|
5
|
+
|
6
|
+
config_param :command_parameter, :string, :default => '-qf1'
|
7
|
+
|
8
|
+
def configure(conf)
|
9
|
+
super
|
10
|
+
check_command('lzop', 'LZO')
|
11
|
+
end
|
12
|
+
|
13
|
+
def ext
|
14
|
+
'lzo'.freeze
|
15
|
+
end
|
16
|
+
|
17
|
+
def content_type
|
18
|
+
'application/x-lzop'.freeze
|
19
|
+
end
|
20
|
+
|
21
|
+
def compress(chunk, tmp)
|
22
|
+
w = Tempfile.new("chunk-tmp")
|
23
|
+
chunk.write_to(w)
|
24
|
+
w.close
|
25
|
+
|
26
|
+
# We don't check the return code because we can't recover lzop failure.
|
27
|
+
system "lzop #{@command_parameter} -o #{tmp.path} #{w.path}"
|
28
|
+
ensure
|
29
|
+
w.close rescue nil
|
30
|
+
w.unlink rescue nil
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -1,248 +1,248 @@
|
|
1
|
-
module Fluent
|
2
|
-
require 'fluent/mixin/config_placeholders'
|
3
|
-
|
4
|
-
class AzureStorageOutput < Fluent::TimeSlicedOutput
|
5
|
-
Fluent::Plugin.register_output('azurestorage', self)
|
6
|
-
|
7
|
-
def initialize
|
8
|
-
super
|
9
|
-
require 'azure'
|
10
|
-
require 'fluent/plugin/upload_service'
|
11
|
-
require 'zlib'
|
12
|
-
require 'time'
|
13
|
-
require 'tempfile'
|
14
|
-
|
15
|
-
@compressor = nil
|
16
|
-
end
|
17
|
-
|
18
|
-
config_param :path, :string, :default => ""
|
19
|
-
config_param :azure_storage_account, :string, :default => nil
|
20
|
-
config_param :azure_storage_access_key, :string, :default => nil, :secret => true
|
21
|
-
config_param :azure_container, :string, :default => nil
|
22
|
-
config_param :azure_storage_type, :string, :default => "blob"
|
23
|
-
config_param :azure_object_key_format, :string, :default => "%{path}%{time_slice}_%{index}.%{file_extension}"
|
24
|
-
config_param :store_as, :string, :default => "gzip"
|
25
|
-
config_param :auto_create_container, :bool, :default => true
|
26
|
-
config_param :format, :string, :default => "out_file"
|
27
|
-
config_param :command_parameter, :string, :default => nil
|
28
|
-
|
29
|
-
attr_reader :bs
|
30
|
-
|
31
|
-
include Fluent::Mixin::ConfigPlaceholders
|
32
|
-
|
33
|
-
def placeholders
|
34
|
-
[:percent]
|
35
|
-
end
|
36
|
-
|
37
|
-
def configure(conf)
|
38
|
-
super
|
39
|
-
|
40
|
-
begin
|
41
|
-
@compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
|
42
|
-
rescue => e
|
43
|
-
$log.warn "#{@store_as} not found. Use 'text' instead"
|
44
|
-
@compressor = TextCompressor.new
|
45
|
-
end
|
46
|
-
@compressor.configure(conf)
|
47
|
-
|
48
|
-
@formatter = Plugin.new_formatter(@format)
|
49
|
-
@formatter.configure(conf)
|
50
|
-
|
51
|
-
if @localtime
|
52
|
-
@path_slicer = Proc.new {|path|
|
53
|
-
Time.now.strftime(path)
|
54
|
-
}
|
55
|
-
else
|
56
|
-
@path_slicer = Proc.new {|path|
|
57
|
-
Time.now.utc.strftime(path)
|
58
|
-
}
|
59
|
-
end
|
60
|
-
|
61
|
-
if @azure_container.nil?
|
62
|
-
raise ConfigError, 'azure_container is needed'
|
63
|
-
end
|
64
|
-
|
65
|
-
@storage_type = case @azure_storage_type
|
66
|
-
when 'tables'
|
67
|
-
raise NotImplementedError
|
68
|
-
when 'queues'
|
69
|
-
raise NotImplementedError
|
70
|
-
else
|
71
|
-
'blob'
|
72
|
-
end
|
73
|
-
end
|
74
|
-
|
75
|
-
def start
|
76
|
-
super
|
77
|
-
|
78
|
-
if (!@azure_storage_account.nil? && !@azure_storage_access_key.nil?)
|
79
|
-
Azure.configure do |config|
|
80
|
-
config.storage_account_name = @azure_storage_account
|
81
|
-
config.storage_access_key = @azure_storage_access_key
|
82
|
-
end
|
83
|
-
end
|
84
|
-
@bs = Azure::BlobService.new
|
85
|
-
@bs.extend UploadService
|
86
|
-
|
87
|
-
ensure_container
|
88
|
-
end
|
89
|
-
|
90
|
-
def format(tag, time, record)
|
91
|
-
@formatter.format(tag, time, record)
|
92
|
-
end
|
93
|
-
|
94
|
-
def write(chunk)
|
95
|
-
i = 0
|
96
|
-
previous_path = nil
|
97
|
-
|
98
|
-
begin
|
99
|
-
path = @path_slicer.call(@path)
|
100
|
-
values_for_object_key = {
|
101
|
-
"path" => path,
|
102
|
-
"time_slice" => chunk.key,
|
103
|
-
"file_extension" => @compressor.ext,
|
104
|
-
"index" => i,
|
105
|
-
"uuid_flush" => uuid_random
|
106
|
-
}
|
107
|
-
storage_path = @azure_object_key_format.gsub(%r(%{[^}]+})) { |expr|
|
108
|
-
values_for_object_key[expr[2...expr.size-1]]
|
109
|
-
}
|
110
|
-
if (i > 0) && (storage_path == previous_path)
|
111
|
-
raise "duplicated path is generated. use %{index} in azure_object_key_format: path = #{storage_path}"
|
112
|
-
end
|
113
|
-
|
114
|
-
i += 1
|
115
|
-
previous_path = storage_path
|
116
|
-
end while blob_exists?(@azure_container, storage_path)
|
117
|
-
|
118
|
-
tmp = Tempfile.new("azure-")
|
119
|
-
begin
|
120
|
-
@compressor.compress(chunk, tmp)
|
121
|
-
tmp.close
|
122
|
-
|
123
|
-
options = {}
|
124
|
-
options[:content_type] = @compressor.content_type
|
125
|
-
options[:container] = @azure_container
|
126
|
-
options[:blob] = storage_path
|
127
|
-
|
128
|
-
@bs.upload(tmp.path, options)
|
129
|
-
end
|
130
|
-
end
|
131
|
-
|
132
|
-
private
|
133
|
-
def ensure_container
|
134
|
-
if ! @bs.list_containers.find { |c| c.name == @azure_container }
|
135
|
-
if @auto_create_container
|
136
|
-
@bs.create_container(@azure_container)
|
137
|
-
else
|
138
|
-
raise "The specified container does not exist: container = #{@azure_container}"
|
139
|
-
end
|
140
|
-
end
|
141
|
-
end
|
142
|
-
|
143
|
-
class Compressor
|
144
|
-
include Configurable
|
145
|
-
|
146
|
-
def initialize(opts = {})
|
147
|
-
super()
|
148
|
-
@buffer_type = opts[:buffer_type]
|
149
|
-
@log = opts[:log]
|
150
|
-
end
|
151
|
-
|
152
|
-
attr_reader :buffer_type, :log
|
153
|
-
|
154
|
-
def configure(conf)
|
155
|
-
super
|
156
|
-
end
|
157
|
-
|
158
|
-
def ext
|
159
|
-
end
|
160
|
-
|
161
|
-
def content_type
|
162
|
-
end
|
163
|
-
|
164
|
-
def compress(chunk, tmp)
|
165
|
-
end
|
166
|
-
|
167
|
-
private
|
168
|
-
|
169
|
-
def check_command(command, algo = nil)
|
170
|
-
require 'open3'
|
171
|
-
|
172
|
-
algo = command if algo.nil?
|
173
|
-
begin
|
174
|
-
Open3.capture3("#{command} -V")
|
175
|
-
rescue Errno::ENOENT
|
176
|
-
raise ConfigError, "'#{command}' utility must be in PATH for #{algo} compression"
|
177
|
-
end
|
178
|
-
end
|
179
|
-
end
|
180
|
-
|
181
|
-
class GzipCompressor < Compressor
|
182
|
-
def ext
|
183
|
-
'gz'.freeze
|
184
|
-
end
|
185
|
-
|
186
|
-
def content_type
|
187
|
-
'application/x-gzip'.freeze
|
188
|
-
end
|
189
|
-
|
190
|
-
def compress(chunk, tmp)
|
191
|
-
w = Zlib::GzipWriter.new(tmp)
|
192
|
-
chunk.write_to(w)
|
193
|
-
w.finish
|
194
|
-
ensure
|
195
|
-
w.finish rescue nil
|
196
|
-
end
|
197
|
-
end
|
198
|
-
|
199
|
-
class TextCompressor < Compressor
|
200
|
-
def ext
|
201
|
-
'txt'.freeze
|
202
|
-
end
|
203
|
-
|
204
|
-
def content_type
|
205
|
-
'text/plain'.freeze
|
206
|
-
end
|
207
|
-
|
208
|
-
def compress(chunk, tmp)
|
209
|
-
chunk.write_to(tmp)
|
210
|
-
end
|
211
|
-
end
|
212
|
-
|
213
|
-
class JsonCompressor < TextCompressor
|
214
|
-
def ext
|
215
|
-
'json'.freeze
|
216
|
-
end
|
217
|
-
|
218
|
-
def content_type
|
219
|
-
'application/json'.freeze
|
220
|
-
end
|
221
|
-
end
|
222
|
-
|
223
|
-
COMPRESSOR_REGISTRY = Registry.new(:azurestorage_compressor_type, 'fluent/plugin/azurestorage_compressor_')
|
224
|
-
{
|
225
|
-
'gzip' => GzipCompressor,
|
226
|
-
'json' => JsonCompressor,
|
227
|
-
'text' => TextCompressor
|
228
|
-
}.each { |name, compressor|
|
229
|
-
COMPRESSOR_REGISTRY.register(name, compressor)
|
230
|
-
}
|
231
|
-
|
232
|
-
def self.register_compressor(name, compressor)
|
233
|
-
COMPRESSOR_REGISTRY.register(name, compressor)
|
234
|
-
end
|
235
|
-
|
236
|
-
def blob_exists?(container, blob)
|
237
|
-
begin
|
238
|
-
@bs.get_blob_properties(container, blob)
|
239
|
-
true
|
240
|
-
rescue Azure::Core::Http::HTTPError => ex
|
241
|
-
raise if ex.status_code != 404
|
242
|
-
false
|
243
|
-
rescue Exception => e
|
244
|
-
raise e.message
|
245
|
-
end
|
246
|
-
end
|
247
|
-
end
|
248
|
-
end
|
1
|
+
module Fluent
|
2
|
+
require 'fluent/mixin/config_placeholders'
|
3
|
+
|
4
|
+
class AzureStorageOutput < Fluent::TimeSlicedOutput
|
5
|
+
Fluent::Plugin.register_output('azurestorage', self)
|
6
|
+
|
7
|
+
def initialize
|
8
|
+
super
|
9
|
+
require 'azure'
|
10
|
+
require 'fluent/plugin/upload_service'
|
11
|
+
require 'zlib'
|
12
|
+
require 'time'
|
13
|
+
require 'tempfile'
|
14
|
+
|
15
|
+
@compressor = nil
|
16
|
+
end
|
17
|
+
|
18
|
+
config_param :path, :string, :default => ""
|
19
|
+
config_param :azure_storage_account, :string, :default => nil
|
20
|
+
config_param :azure_storage_access_key, :string, :default => nil, :secret => true
|
21
|
+
config_param :azure_container, :string, :default => nil
|
22
|
+
config_param :azure_storage_type, :string, :default => "blob"
|
23
|
+
config_param :azure_object_key_format, :string, :default => "%{path}%{time_slice}_%{index}.%{file_extension}"
|
24
|
+
config_param :store_as, :string, :default => "gzip"
|
25
|
+
config_param :auto_create_container, :bool, :default => true
|
26
|
+
config_param :format, :string, :default => "out_file"
|
27
|
+
config_param :command_parameter, :string, :default => nil
|
28
|
+
|
29
|
+
attr_reader :bs
|
30
|
+
|
31
|
+
include Fluent::Mixin::ConfigPlaceholders
|
32
|
+
|
33
|
+
def placeholders
|
34
|
+
[:percent]
|
35
|
+
end
|
36
|
+
|
37
|
+
def configure(conf)
|
38
|
+
super
|
39
|
+
|
40
|
+
begin
|
41
|
+
@compressor = COMPRESSOR_REGISTRY.lookup(@store_as).new(:buffer_type => @buffer_type, :log => log)
|
42
|
+
rescue => e
|
43
|
+
$log.warn "#{@store_as} not found. Use 'text' instead"
|
44
|
+
@compressor = TextCompressor.new
|
45
|
+
end
|
46
|
+
@compressor.configure(conf)
|
47
|
+
|
48
|
+
@formatter = Plugin.new_formatter(@format)
|
49
|
+
@formatter.configure(conf)
|
50
|
+
|
51
|
+
if @localtime
|
52
|
+
@path_slicer = Proc.new {|path|
|
53
|
+
Time.now.strftime(path)
|
54
|
+
}
|
55
|
+
else
|
56
|
+
@path_slicer = Proc.new {|path|
|
57
|
+
Time.now.utc.strftime(path)
|
58
|
+
}
|
59
|
+
end
|
60
|
+
|
61
|
+
if @azure_container.nil?
|
62
|
+
raise ConfigError, 'azure_container is needed'
|
63
|
+
end
|
64
|
+
|
65
|
+
@storage_type = case @azure_storage_type
|
66
|
+
when 'tables'
|
67
|
+
raise NotImplementedError
|
68
|
+
when 'queues'
|
69
|
+
raise NotImplementedError
|
70
|
+
else
|
71
|
+
'blob'
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def start
|
76
|
+
super
|
77
|
+
|
78
|
+
if (!@azure_storage_account.nil? && !@azure_storage_access_key.nil?)
|
79
|
+
Azure.configure do |config|
|
80
|
+
config.storage_account_name = @azure_storage_account
|
81
|
+
config.storage_access_key = @azure_storage_access_key
|
82
|
+
end
|
83
|
+
end
|
84
|
+
@bs = Azure::Blob::BlobService.new
|
85
|
+
@bs.extend UploadService
|
86
|
+
|
87
|
+
ensure_container
|
88
|
+
end
|
89
|
+
|
90
|
+
def format(tag, time, record)
|
91
|
+
@formatter.format(tag, time, record)
|
92
|
+
end
|
93
|
+
|
94
|
+
def write(chunk)
|
95
|
+
i = 0
|
96
|
+
previous_path = nil
|
97
|
+
|
98
|
+
begin
|
99
|
+
path = @path_slicer.call(@path)
|
100
|
+
values_for_object_key = {
|
101
|
+
"path" => path,
|
102
|
+
"time_slice" => chunk.key,
|
103
|
+
"file_extension" => @compressor.ext,
|
104
|
+
"index" => i,
|
105
|
+
"uuid_flush" => uuid_random
|
106
|
+
}
|
107
|
+
storage_path = @azure_object_key_format.gsub(%r(%{[^}]+})) { |expr|
|
108
|
+
values_for_object_key[expr[2...expr.size-1]]
|
109
|
+
}
|
110
|
+
if (i > 0) && (storage_path == previous_path)
|
111
|
+
raise "duplicated path is generated. use %{index} in azure_object_key_format: path = #{storage_path}"
|
112
|
+
end
|
113
|
+
|
114
|
+
i += 1
|
115
|
+
previous_path = storage_path
|
116
|
+
end while blob_exists?(@azure_container, storage_path)
|
117
|
+
|
118
|
+
tmp = Tempfile.new("azure-")
|
119
|
+
begin
|
120
|
+
@compressor.compress(chunk, tmp)
|
121
|
+
tmp.close
|
122
|
+
|
123
|
+
options = {}
|
124
|
+
options[:content_type] = @compressor.content_type
|
125
|
+
options[:container] = @azure_container
|
126
|
+
options[:blob] = storage_path
|
127
|
+
|
128
|
+
@bs.upload(tmp.path, options)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
private
|
133
|
+
def ensure_container
|
134
|
+
if ! @bs.list_containers.find { |c| c.name == @azure_container }
|
135
|
+
if @auto_create_container
|
136
|
+
@bs.create_container(@azure_container)
|
137
|
+
else
|
138
|
+
raise "The specified container does not exist: container = #{@azure_container}"
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
class Compressor
|
144
|
+
include Configurable
|
145
|
+
|
146
|
+
def initialize(opts = {})
|
147
|
+
super()
|
148
|
+
@buffer_type = opts[:buffer_type]
|
149
|
+
@log = opts[:log]
|
150
|
+
end
|
151
|
+
|
152
|
+
attr_reader :buffer_type, :log
|
153
|
+
|
154
|
+
def configure(conf)
|
155
|
+
super
|
156
|
+
end
|
157
|
+
|
158
|
+
def ext
|
159
|
+
end
|
160
|
+
|
161
|
+
def content_type
|
162
|
+
end
|
163
|
+
|
164
|
+
def compress(chunk, tmp)
|
165
|
+
end
|
166
|
+
|
167
|
+
private
|
168
|
+
|
169
|
+
def check_command(command, algo = nil)
|
170
|
+
require 'open3'
|
171
|
+
|
172
|
+
algo = command if algo.nil?
|
173
|
+
begin
|
174
|
+
Open3.capture3("#{command} -V")
|
175
|
+
rescue Errno::ENOENT
|
176
|
+
raise ConfigError, "'#{command}' utility must be in PATH for #{algo} compression"
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
class GzipCompressor < Compressor
|
182
|
+
def ext
|
183
|
+
'gz'.freeze
|
184
|
+
end
|
185
|
+
|
186
|
+
def content_type
|
187
|
+
'application/x-gzip'.freeze
|
188
|
+
end
|
189
|
+
|
190
|
+
def compress(chunk, tmp)
|
191
|
+
w = Zlib::GzipWriter.new(tmp)
|
192
|
+
chunk.write_to(w)
|
193
|
+
w.finish
|
194
|
+
ensure
|
195
|
+
w.finish rescue nil
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
class TextCompressor < Compressor
|
200
|
+
def ext
|
201
|
+
'txt'.freeze
|
202
|
+
end
|
203
|
+
|
204
|
+
def content_type
|
205
|
+
'text/plain'.freeze
|
206
|
+
end
|
207
|
+
|
208
|
+
def compress(chunk, tmp)
|
209
|
+
chunk.write_to(tmp)
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
class JsonCompressor < TextCompressor
|
214
|
+
def ext
|
215
|
+
'json'.freeze
|
216
|
+
end
|
217
|
+
|
218
|
+
def content_type
|
219
|
+
'application/json'.freeze
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
COMPRESSOR_REGISTRY = Registry.new(:azurestorage_compressor_type, 'fluent/plugin/azurestorage_compressor_')
|
224
|
+
{
|
225
|
+
'gzip' => GzipCompressor,
|
226
|
+
'json' => JsonCompressor,
|
227
|
+
'text' => TextCompressor
|
228
|
+
}.each { |name, compressor|
|
229
|
+
COMPRESSOR_REGISTRY.register(name, compressor)
|
230
|
+
}
|
231
|
+
|
232
|
+
def self.register_compressor(name, compressor)
|
233
|
+
COMPRESSOR_REGISTRY.register(name, compressor)
|
234
|
+
end
|
235
|
+
|
236
|
+
def blob_exists?(container, blob)
|
237
|
+
begin
|
238
|
+
@bs.get_blob_properties(container, blob)
|
239
|
+
true
|
240
|
+
rescue Azure::Core::Http::HTTPError => ex
|
241
|
+
raise if ex.status_code != 404
|
242
|
+
false
|
243
|
+
rescue Exception => e
|
244
|
+
raise e.message
|
245
|
+
end
|
246
|
+
end
|
247
|
+
end
|
248
|
+
end
|