fluent-plugin-azurestorage 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +2 -0
- data/VERSION +1 -1
- data/fluent-plugin-azurestorage.gemspec +2 -0
- data/lib/fluent/plugin/out_azurestorage.rb +9 -2
- data/lib/fluent/plugin/upload_service.rb +208 -0
- metadata +31 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 920277cb0b8ab63f16a0626a8c1e9ea341e62b28
|
4
|
+
data.tar.gz: 0a86562c7713af3c45f8863a503e29bdb5aed1cc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 61f809feb8edb5810251a3ee39daeb6a169b4178e932bb02c7ceadc821755ac0bfb20c0a4cc2b3ea4ba4e7e77606c8cccb4b06aafc64cbc0cc4cf07dc65f25a4
|
7
|
+
data.tar.gz: 477d61ae9f6faad494a1ef3d169929ad4416fa09ca7af21c450f70e0eaa70b1c3e85b9d3cff68dfa431ef204dac3216165e7691704bc571da99f0ce6a3c461d1
|
data/README.md
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Azure Storage output plugin for Fluentd
|
2
2
|
|
3
|
+
[](https://travis-ci.org/htgc/fluent-plugin-azurestorage)
|
4
|
+
|
3
5
|
## Overview
|
4
6
|
|
5
7
|
Azure Storate output plugin buffers logs in local file and upload them to Azure Storage periodically.
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.7
|
@@ -21,4 +21,6 @@ Gem::Specification.new do |gem|
|
|
21
21
|
gem.add_dependency "azure", "0.6.4"
|
22
22
|
gem.add_dependency "fluent-mixin-config-placeholders", ">= 0.3.0"
|
23
23
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
24
|
+
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
25
|
+
gem.add_development_dependency "test-unit-rr", ">= 1.0.3"
|
24
26
|
end
|
@@ -7,6 +7,7 @@ module Fluent
|
|
7
7
|
def initialize
|
8
8
|
super
|
9
9
|
require 'azure'
|
10
|
+
require 'fluent/plugin/upload_service'
|
10
11
|
require 'zlib'
|
11
12
|
require 'time'
|
12
13
|
require 'tempfile'
|
@@ -81,6 +82,7 @@ module Fluent
|
|
81
82
|
end
|
82
83
|
end
|
83
84
|
@bs = Azure::BlobService.new
|
85
|
+
@bs.extend UploadService
|
84
86
|
|
85
87
|
ensure_container
|
86
88
|
end
|
@@ -117,8 +119,13 @@ module Fluent
|
|
117
119
|
begin
|
118
120
|
@compressor.compress(chunk, tmp)
|
119
121
|
tmp.close
|
120
|
-
|
121
|
-
|
122
|
+
|
123
|
+
options = {}
|
124
|
+
options[:content_type] = @compressor.content_type
|
125
|
+
options[:container] = @azure_container
|
126
|
+
options[:blob] = storage_path
|
127
|
+
|
128
|
+
@bs.upload(tmp.path, options)
|
122
129
|
end
|
123
130
|
end
|
124
131
|
|
@@ -0,0 +1,208 @@
|
|
1
|
+
require 'pathname'
|
2
|
+
require 'thread'
|
3
|
+
|
4
|
+
module UploadService
|
5
|
+
MAX_BLOCK_SIZE = 4 * 1024 * 1024 # 4MB
|
6
|
+
MAX_PUT_SIZE = 64 * 1024 * 1024 # 64MB
|
7
|
+
THREAD_COUNT = 10
|
8
|
+
|
9
|
+
def self.extended(base)
|
10
|
+
end
|
11
|
+
|
12
|
+
def upload(source, options = {})
|
13
|
+
@thread_count = options[:thread_count] || THREAD_COUNT
|
14
|
+
|
15
|
+
size = File.size(source)
|
16
|
+
|
17
|
+
if size <= MAX_PUT_SIZE
|
18
|
+
content = File.open(source, 'rb') { |file| file.read }
|
19
|
+
self.create_block_blob(options[:container], options[:blob], content)
|
20
|
+
else
|
21
|
+
blocks = upload_blocks(source, options)
|
22
|
+
complete_upload(blocks, options)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def complete_upload(blocks, options)
|
27
|
+
options[:blob_content_type] = options[:content_type]
|
28
|
+
|
29
|
+
self.commit_blob_blocks(options[:container], options[:blob], blocks.map{ |block| [block[:block_id], :uncommitted] }, options)
|
30
|
+
end
|
31
|
+
|
32
|
+
def upload_blocks(source, options)
|
33
|
+
pending = BlockList.new(compute_blocks(source, options))
|
34
|
+
completed = BlockList.new
|
35
|
+
errors = upload_in_threads(pending, completed)
|
36
|
+
if errors.empty?
|
37
|
+
completed.to_a.sort_by { |block| block[:block_number] }
|
38
|
+
else
|
39
|
+
msg = "multipart upload failed: #{errors.map(&:message).join("; ")}"
|
40
|
+
raise BlockUploadError.new(msg, errors)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def compute_blocks(source, options)
|
45
|
+
size = File.size(source)
|
46
|
+
offset = 0
|
47
|
+
block_number = 1
|
48
|
+
blocks = []
|
49
|
+
while offset < size
|
50
|
+
blocks << {
|
51
|
+
container: options[:container],
|
52
|
+
blob: options[:blob],
|
53
|
+
block_id: block_number.to_s.rjust(5, '0'),
|
54
|
+
block_number: block_number,
|
55
|
+
body: FilePart.new(
|
56
|
+
source: source,
|
57
|
+
offset: offset,
|
58
|
+
size: block_size(size, MAX_BLOCK_SIZE, offset)
|
59
|
+
)
|
60
|
+
}
|
61
|
+
block_number += 1
|
62
|
+
offset += MAX_BLOCK_SIZE
|
63
|
+
end
|
64
|
+
blocks
|
65
|
+
end
|
66
|
+
|
67
|
+
def upload_in_threads(pending, completed)
|
68
|
+
threads = []
|
69
|
+
@thread_count.times do
|
70
|
+
thread = Thread.new do
|
71
|
+
begin
|
72
|
+
while block = pending.shift
|
73
|
+
content = block[:body].read
|
74
|
+
block[:body].close
|
75
|
+
|
76
|
+
options = {}
|
77
|
+
options[:content_md5] = Base64.strict_encode64(Digest::MD5.digest(content))
|
78
|
+
options[:timeout] = 30
|
79
|
+
|
80
|
+
content_md5 = self.create_blob_block(block[:container], block[:blob], block[:block_id], content, options)
|
81
|
+
|
82
|
+
if content_md5 != options[:content_md5]
|
83
|
+
raise "The block is corrupt: block = #{block[:block_id]}"
|
84
|
+
end
|
85
|
+
|
86
|
+
completed.push(block_id: block[:block_id], block_number: block[:block_number])
|
87
|
+
end
|
88
|
+
nil
|
89
|
+
rescue => error
|
90
|
+
# keep other threads from uploading other parts
|
91
|
+
pending.clear!
|
92
|
+
error
|
93
|
+
end
|
94
|
+
end
|
95
|
+
thread.abort_on_exception = true
|
96
|
+
threads << thread
|
97
|
+
end
|
98
|
+
threads.map(&:value).compact
|
99
|
+
end
|
100
|
+
|
101
|
+
def block_size(total_size, block_size, offset)
|
102
|
+
if offset + block_size > total_size
|
103
|
+
total_size - offset
|
104
|
+
else
|
105
|
+
block_size
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
# @api private
|
110
|
+
class BlockList
|
111
|
+
|
112
|
+
def initialize(blocks = [])
|
113
|
+
@blocks = blocks
|
114
|
+
@mutex = Mutex.new
|
115
|
+
end
|
116
|
+
|
117
|
+
def push(block)
|
118
|
+
@mutex.synchronize { @blocks.push(block) }
|
119
|
+
end
|
120
|
+
|
121
|
+
def shift
|
122
|
+
@mutex.synchronize { @blocks.shift }
|
123
|
+
end
|
124
|
+
|
125
|
+
def clear!
|
126
|
+
@mutex.synchronize { @blocks.clear }
|
127
|
+
end
|
128
|
+
|
129
|
+
def to_a
|
130
|
+
@mutex.synchronize { @blocks.dup }
|
131
|
+
end
|
132
|
+
|
133
|
+
end
|
134
|
+
|
135
|
+
class BlockUploadError < StandardError
|
136
|
+
|
137
|
+
def initialize(message, errors)
|
138
|
+
@errors = errors
|
139
|
+
super(message)
|
140
|
+
end
|
141
|
+
|
142
|
+
attr_reader :errors
|
143
|
+
|
144
|
+
end
|
145
|
+
|
146
|
+
class FilePart
|
147
|
+
|
148
|
+
def initialize(options = {})
|
149
|
+
@source = options[:source]
|
150
|
+
@first_byte = options[:offset]
|
151
|
+
@last_byte = @first_byte + options[:size]
|
152
|
+
@size = options[:size]
|
153
|
+
@file = nil
|
154
|
+
end
|
155
|
+
|
156
|
+
# @return [String,Pathname,File,Tempfile]
|
157
|
+
attr_reader :source
|
158
|
+
|
159
|
+
# @return [Integer]
|
160
|
+
attr_reader :first_byte
|
161
|
+
|
162
|
+
# @return [Integer]
|
163
|
+
attr_reader :last_byte
|
164
|
+
|
165
|
+
# @return [Integer]
|
166
|
+
attr_reader :size
|
167
|
+
|
168
|
+
def read(bytes = nil, output_buffer = nil)
|
169
|
+
open_file unless @file
|
170
|
+
read_from_file(bytes, output_buffer)
|
171
|
+
end
|
172
|
+
|
173
|
+
def rewind
|
174
|
+
if @file
|
175
|
+
@file.seek(@first_byte)
|
176
|
+
@position = @first_byte
|
177
|
+
end
|
178
|
+
0
|
179
|
+
end
|
180
|
+
|
181
|
+
def close
|
182
|
+
@file.close if @file
|
183
|
+
end
|
184
|
+
|
185
|
+
private
|
186
|
+
|
187
|
+
def open_file
|
188
|
+
@file = File.open(@source, 'rb')
|
189
|
+
rewind
|
190
|
+
end
|
191
|
+
|
192
|
+
def read_from_file(bytes, output_buffer)
|
193
|
+
if bytes
|
194
|
+
data = @file.read([remaining_bytes, bytes].min)
|
195
|
+
data = nil if data == ''
|
196
|
+
else
|
197
|
+
data = @file.read(remaining_bytes)
|
198
|
+
end
|
199
|
+
@position += data ? data.bytesize : 0
|
200
|
+
output_buffer ? output_buffer.replace(data || '') : data
|
201
|
+
end
|
202
|
+
|
203
|
+
def remaining_bytes
|
204
|
+
@last_byte - @position
|
205
|
+
end
|
206
|
+
|
207
|
+
end
|
208
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-azurestorage
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-10-
|
11
|
+
date: 2015-10-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|
@@ -72,6 +72,34 @@ dependencies:
|
|
72
72
|
- - '>='
|
73
73
|
- !ruby/object:Gem::Version
|
74
74
|
version: 0.9.2
|
75
|
+
- !ruby/object:Gem::Dependency
|
76
|
+
name: test-unit
|
77
|
+
requirement: !ruby/object:Gem::Requirement
|
78
|
+
requirements:
|
79
|
+
- - '>='
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: 3.0.8
|
82
|
+
type: :development
|
83
|
+
prerelease: false
|
84
|
+
version_requirements: !ruby/object:Gem::Requirement
|
85
|
+
requirements:
|
86
|
+
- - '>='
|
87
|
+
- !ruby/object:Gem::Version
|
88
|
+
version: 3.0.8
|
89
|
+
- !ruby/object:Gem::Dependency
|
90
|
+
name: test-unit-rr
|
91
|
+
requirement: !ruby/object:Gem::Requirement
|
92
|
+
requirements:
|
93
|
+
- - '>='
|
94
|
+
- !ruby/object:Gem::Version
|
95
|
+
version: 1.0.3
|
96
|
+
type: :development
|
97
|
+
prerelease: false
|
98
|
+
version_requirements: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - '>='
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: 1.0.3
|
75
103
|
description: Azure Storage output plugin for Fluentd event collector
|
76
104
|
email:
|
77
105
|
- togachiro@gmail.com
|
@@ -91,6 +119,7 @@ files:
|
|
91
119
|
- lib/fluent/plugin/azurestorage_compressor_lzma2.rb
|
92
120
|
- lib/fluent/plugin/azurestorage_compressor_lzo.rb
|
93
121
|
- lib/fluent/plugin/out_azurestorage.rb
|
122
|
+
- lib/fluent/plugin/upload_service.rb
|
94
123
|
- test/test_out_azurestorage.rb
|
95
124
|
homepage: https://github.com/htgc/fluent-plugin-azurestorage
|
96
125
|
licenses:
|