fluent-plugin-azurestorage 0.0.7 → 0.0.8

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,208 +1,208 @@
1
- require 'pathname'
2
- require 'thread'
3
-
4
- module UploadService
5
- MAX_BLOCK_SIZE = 4 * 1024 * 1024 # 4MB
6
- MAX_PUT_SIZE = 64 * 1024 * 1024 # 64MB
7
- THREAD_COUNT = 10
8
-
9
- def self.extended(base)
10
- end
11
-
12
- def upload(source, options = {})
13
- @thread_count = options[:thread_count] || THREAD_COUNT
14
-
15
- size = File.size(source)
16
-
17
- if size <= MAX_PUT_SIZE
18
- content = File.open(source, 'rb') { |file| file.read }
19
- self.create_block_blob(options[:container], options[:blob], content)
20
- else
21
- blocks = upload_blocks(source, options)
22
- complete_upload(blocks, options)
23
- end
24
- end
25
-
26
- def complete_upload(blocks, options)
27
- options[:blob_content_type] = options[:content_type]
28
-
29
- self.commit_blob_blocks(options[:container], options[:blob], blocks.map{ |block| [block[:block_id], :uncommitted] }, options)
30
- end
31
-
32
- def upload_blocks(source, options)
33
- pending = BlockList.new(compute_blocks(source, options))
34
- completed = BlockList.new
35
- errors = upload_in_threads(pending, completed)
36
- if errors.empty?
37
- completed.to_a.sort_by { |block| block[:block_number] }
38
- else
39
- msg = "multipart upload failed: #{errors.map(&:message).join("; ")}"
40
- raise BlockUploadError.new(msg, errors)
41
- end
42
- end
43
-
44
- def compute_blocks(source, options)
45
- size = File.size(source)
46
- offset = 0
47
- block_number = 1
48
- blocks = []
49
- while offset < size
50
- blocks << {
51
- container: options[:container],
52
- blob: options[:blob],
53
- block_id: block_number.to_s.rjust(5, '0'),
54
- block_number: block_number,
55
- body: FilePart.new(
56
- source: source,
57
- offset: offset,
58
- size: block_size(size, MAX_BLOCK_SIZE, offset)
59
- )
60
- }
61
- block_number += 1
62
- offset += MAX_BLOCK_SIZE
63
- end
64
- blocks
65
- end
66
-
67
- def upload_in_threads(pending, completed)
68
- threads = []
69
- @thread_count.times do
70
- thread = Thread.new do
71
- begin
72
- while block = pending.shift
73
- content = block[:body].read
74
- block[:body].close
75
-
76
- options = {}
77
- options[:content_md5] = Base64.strict_encode64(Digest::MD5.digest(content))
78
- options[:timeout] = 30
79
-
80
- content_md5 = self.create_blob_block(block[:container], block[:blob], block[:block_id], content, options)
81
-
82
- if content_md5 != options[:content_md5]
83
- raise "The block is corrupt: block = #{block[:block_id]}"
84
- end
85
-
86
- completed.push(block_id: block[:block_id], block_number: block[:block_number])
87
- end
88
- nil
89
- rescue => error
90
- # keep other threads from uploading other parts
91
- pending.clear!
92
- error
93
- end
94
- end
95
- thread.abort_on_exception = true
96
- threads << thread
97
- end
98
- threads.map(&:value).compact
99
- end
100
-
101
- def block_size(total_size, block_size, offset)
102
- if offset + block_size > total_size
103
- total_size - offset
104
- else
105
- block_size
106
- end
107
- end
108
-
109
- # @api private
110
- class BlockList
111
-
112
- def initialize(blocks = [])
113
- @blocks = blocks
114
- @mutex = Mutex.new
115
- end
116
-
117
- def push(block)
118
- @mutex.synchronize { @blocks.push(block) }
119
- end
120
-
121
- def shift
122
- @mutex.synchronize { @blocks.shift }
123
- end
124
-
125
- def clear!
126
- @mutex.synchronize { @blocks.clear }
127
- end
128
-
129
- def to_a
130
- @mutex.synchronize { @blocks.dup }
131
- end
132
-
133
- end
134
-
135
- class BlockUploadError < StandardError
136
-
137
- def initialize(message, errors)
138
- @errors = errors
139
- super(message)
140
- end
141
-
142
- attr_reader :errors
143
-
144
- end
145
-
146
- class FilePart
147
-
148
- def initialize(options = {})
149
- @source = options[:source]
150
- @first_byte = options[:offset]
151
- @last_byte = @first_byte + options[:size]
152
- @size = options[:size]
153
- @file = nil
154
- end
155
-
156
- # @return [String,Pathname,File,Tempfile]
157
- attr_reader :source
158
-
159
- # @return [Integer]
160
- attr_reader :first_byte
161
-
162
- # @return [Integer]
163
- attr_reader :last_byte
164
-
165
- # @return [Integer]
166
- attr_reader :size
167
-
168
- def read(bytes = nil, output_buffer = nil)
169
- open_file unless @file
170
- read_from_file(bytes, output_buffer)
171
- end
172
-
173
- def rewind
174
- if @file
175
- @file.seek(@first_byte)
176
- @position = @first_byte
177
- end
178
- 0
179
- end
180
-
181
- def close
182
- @file.close if @file
183
- end
184
-
185
- private
186
-
187
- def open_file
188
- @file = File.open(@source, 'rb')
189
- rewind
190
- end
191
-
192
- def read_from_file(bytes, output_buffer)
193
- if bytes
194
- data = @file.read([remaining_bytes, bytes].min)
195
- data = nil if data == ''
196
- else
197
- data = @file.read(remaining_bytes)
198
- end
199
- @position += data ? data.bytesize : 0
200
- output_buffer ? output_buffer.replace(data || '') : data
201
- end
202
-
203
- def remaining_bytes
204
- @last_byte - @position
205
- end
206
-
207
- end
1
+ require 'pathname'
2
+ require 'thread'
3
+
4
+ module UploadService
5
+ MAX_BLOCK_SIZE = 4 * 1024 * 1024 # 4MB
6
+ MAX_PUT_SIZE = 64 * 1024 * 1024 # 64MB
7
+ THREAD_COUNT = 10
8
+
9
+ def self.extended(base)
10
+ end
11
+
12
+ def upload(source, options = {})
13
+ @thread_count = options[:thread_count] || THREAD_COUNT
14
+
15
+ size = File.size(source)
16
+
17
+ if size <= MAX_PUT_SIZE
18
+ content = File.open(source, 'rb') { |file| file.read }
19
+ self.create_block_blob(options[:container], options[:blob], content)
20
+ else
21
+ blocks = upload_blocks(source, options)
22
+ complete_upload(blocks, options)
23
+ end
24
+ end
25
+
26
+ def complete_upload(blocks, options)
27
+ options[:blob_content_type] = options[:content_type]
28
+
29
+ self.commit_blob_blocks(options[:container], options[:blob], blocks.map{ |block| [block[:block_id], :uncommitted] }, options)
30
+ end
31
+
32
+ def upload_blocks(source, options)
33
+ pending = BlockList.new(compute_blocks(source, options))
34
+ completed = BlockList.new
35
+ errors = upload_in_threads(pending, completed)
36
+ if errors.empty?
37
+ completed.to_a.sort_by { |block| block[:block_number] }
38
+ else
39
+ msg = "multipart upload failed: #{errors.map(&:message).join("; ")}"
40
+ raise BlockUploadError.new(msg, errors)
41
+ end
42
+ end
43
+
44
+ def compute_blocks(source, options)
45
+ size = File.size(source)
46
+ offset = 0
47
+ block_number = 1
48
+ blocks = []
49
+ while offset < size
50
+ blocks << {
51
+ container: options[:container],
52
+ blob: options[:blob],
53
+ block_id: block_number.to_s.rjust(5, '0'),
54
+ block_number: block_number,
55
+ body: FilePart.new(
56
+ source: source,
57
+ offset: offset,
58
+ size: block_size(size, MAX_BLOCK_SIZE, offset)
59
+ )
60
+ }
61
+ block_number += 1
62
+ offset += MAX_BLOCK_SIZE
63
+ end
64
+ blocks
65
+ end
66
+
67
+ def upload_in_threads(pending, completed)
68
+ threads = []
69
+ @thread_count.times do
70
+ thread = Thread.new do
71
+ begin
72
+ while block = pending.shift
73
+ content = block[:body].read
74
+ block[:body].close
75
+
76
+ options = {}
77
+ options[:content_md5] = Base64.strict_encode64(Digest::MD5.digest(content))
78
+ options[:timeout] = 30
79
+
80
+ content_md5 = self.create_blob_block(block[:container], block[:blob], block[:block_id], content, options)
81
+
82
+ if content_md5 != options[:content_md5]
83
+ raise "The block is corrupt: block = #{block[:block_id]}"
84
+ end
85
+
86
+ completed.push(block_id: block[:block_id], block_number: block[:block_number])
87
+ end
88
+ nil
89
+ rescue => error
90
+ # keep other threads from uploading other parts
91
+ pending.clear!
92
+ error
93
+ end
94
+ end
95
+ thread.abort_on_exception = true
96
+ threads << thread
97
+ end
98
+ threads.map(&:value).compact
99
+ end
100
+
101
+ def block_size(total_size, block_size, offset)
102
+ if offset + block_size > total_size
103
+ total_size - offset
104
+ else
105
+ block_size
106
+ end
107
+ end
108
+
109
+ # @api private
110
+ class BlockList
111
+
112
+ def initialize(blocks = [])
113
+ @blocks = blocks
114
+ @mutex = Mutex.new
115
+ end
116
+
117
+ def push(block)
118
+ @mutex.synchronize { @blocks.push(block) }
119
+ end
120
+
121
+ def shift
122
+ @mutex.synchronize { @blocks.shift }
123
+ end
124
+
125
+ def clear!
126
+ @mutex.synchronize { @blocks.clear }
127
+ end
128
+
129
+ def to_a
130
+ @mutex.synchronize { @blocks.dup }
131
+ end
132
+
133
+ end
134
+
135
+ class BlockUploadError < StandardError
136
+
137
+ def initialize(message, errors)
138
+ @errors = errors
139
+ super(message)
140
+ end
141
+
142
+ attr_reader :errors
143
+
144
+ end
145
+
146
+ class FilePart
147
+
148
+ def initialize(options = {})
149
+ @source = options[:source]
150
+ @first_byte = options[:offset]
151
+ @last_byte = @first_byte + options[:size]
152
+ @size = options[:size]
153
+ @file = nil
154
+ end
155
+
156
+ # @return [String,Pathname,File,Tempfile]
157
+ attr_reader :source
158
+
159
+ # @return [Integer]
160
+ attr_reader :first_byte
161
+
162
+ # @return [Integer]
163
+ attr_reader :last_byte
164
+
165
+ # @return [Integer]
166
+ attr_reader :size
167
+
168
+ def read(bytes = nil, output_buffer = nil)
169
+ open_file unless @file
170
+ read_from_file(bytes, output_buffer)
171
+ end
172
+
173
+ def rewind
174
+ if @file
175
+ @file.seek(@first_byte)
176
+ @position = @first_byte
177
+ end
178
+ 0
179
+ end
180
+
181
+ def close
182
+ @file.close if @file
183
+ end
184
+
185
+ private
186
+
187
+ def open_file
188
+ @file = File.open(@source, 'rb')
189
+ rewind
190
+ end
191
+
192
+ def read_from_file(bytes, output_buffer)
193
+ if bytes
194
+ data = @file.read([remaining_bytes, bytes].min)
195
+ data = nil if data == ''
196
+ else
197
+ data = @file.read(remaining_bytes)
198
+ end
199
+ @position += data ? data.bytesize : 0
200
+ output_buffer ? output_buffer.replace(data || '') : data
201
+ end
202
+
203
+ def remaining_bytes
204
+ @last_byte - @position
205
+ end
206
+
207
+ end
208
208
  end
@@ -1,224 +1,224 @@
1
- require 'fluent/test'
2
- require 'fluent/plugin/out_azurestorage'
3
-
4
- require 'test/unit/rr'
5
- require 'zlib'
6
- require 'fileutils'
7
-
8
- class AzureStorageOutputTest < Test::Unit::TestCase
9
- def setup
10
- require 'azure'
11
- Fluent::Test.setup
12
- end
13
-
14
- CONFIG = %[
15
- azure_storage_account test_storage_account
16
- azure_storage_access_key dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=
17
- azure_container test_container
18
- path log
19
- utc
20
- buffer_type memory
21
- ]
22
-
23
- def create_driver(conf = CONFIG)
24
- Fluent::Test::BufferedOutputTestDriver.new(Fluent::AzureStorageOutput) do
25
- def write(chunk)
26
- chunk.read
27
- end
28
-
29
- private
30
-
31
- def ensure_container
32
- end
33
-
34
- end.configure(conf)
35
- end
36
-
37
- def test_configure
38
- d = create_driver
39
- assert_equal 'test_storage_account', d.instance.azure_storage_account
40
- assert_equal 'dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=', d.instance.azure_storage_access_key
41
- assert_equal 'test_container', d.instance.azure_container
42
- assert_equal 'log', d.instance.path
43
- assert_equal 'gz', d.instance.instance_variable_get(:@compressor).ext
44
- assert_equal 'application/x-gzip', d.instance.instance_variable_get(:@compressor).content_type
45
- end
46
-
47
- def test_configure_with_mime_type_json
48
- conf = CONFIG.clone
49
- conf << "\nstore_as json\n"
50
- d = create_driver(conf)
51
- assert_equal 'json', d.instance.instance_variable_get(:@compressor).ext
52
- assert_equal 'application/json', d.instance.instance_variable_get(:@compressor).content_type
53
- end
54
-
55
- def test_configure_with_mime_type_text
56
- conf = CONFIG.clone
57
- conf << "\nstore_as text\n"
58
- d = create_driver(conf)
59
- assert_equal 'txt', d.instance.instance_variable_get(:@compressor).ext
60
- assert_equal 'text/plain', d.instance.instance_variable_get(:@compressor).content_type
61
- end
62
-
63
- def test_configure_with_mime_type_lzo
64
- conf = CONFIG.clone
65
- conf << "\nstore_as lzo\n"
66
- d = create_driver(conf)
67
- assert_equal 'lzo', d.instance.instance_variable_get(:@compressor).ext
68
- assert_equal 'application/x-lzop', d.instance.instance_variable_get(:@compressor).content_type
69
- rescue => e
70
- # TODO: replace code with disable lzop command
71
- assert(e.is_a?(Fluent::ConfigError))
72
- end
73
-
74
- def test_path_slicing
75
- config = CONFIG.clone.gsub(/path\slog/, "path log/%Y/%m/%d")
76
- d = create_driver(config)
77
- path_slicer = d.instance.instance_variable_get(:@path_slicer)
78
- path = d.instance.instance_variable_get(:@path)
79
- slice = path_slicer.call(path)
80
- assert_equal slice, Time.now.utc.strftime("log/%Y/%m/%d")
81
- end
82
-
83
- def test_path_slicing_utc
84
- config = CONFIG.clone.gsub(/path\slog/, "path log/%Y/%m/%d")
85
- config << "\nutc\n"
86
- d = create_driver(config)
87
- path_slicer = d.instance.instance_variable_get(:@path_slicer)
88
- path = d.instance.instance_variable_get(:@path)
89
- slice = path_slicer.call(path)
90
- assert_equal slice, Time.now.utc.strftime("log/%Y/%m/%d")
91
- end
92
-
93
- def test_format
94
- d = create_driver
95
-
96
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
97
- d.emit({"a"=>1}, time)
98
- d.emit({"a"=>2}, time)
99
-
100
- d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n]
101
- d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
102
-
103
- d.run
104
- end
105
-
106
- def test_format_included_tag_and_time
107
- config = [CONFIG, 'include_tag_key true', 'include_time_key true'].join("\n")
108
- d = create_driver(config)
109
-
110
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
111
- d.emit({"a"=>1}, time)
112
- d.emit({"a"=>2}, time)
113
-
114
- d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
115
- d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
116
-
117
- d.run
118
- end
119
-
120
- def test_format_with_format_ltsv
121
- config = [CONFIG, 'format ltsv'].join("\n")
122
- d = create_driver(config)
123
-
124
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
125
- d.emit({"a"=>1, "b"=>1}, time)
126
- d.emit({"a"=>2, "b"=>2}, time)
127
-
128
- d.expect_format %[a:1\tb:1\n]
129
- d.expect_format %[a:2\tb:2\n]
130
-
131
- d.run
132
- end
133
-
134
- def test_format_with_format_json
135
- config = [CONFIG, 'format json'].join("\n")
136
- d = create_driver(config)
137
-
138
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
139
- d.emit({"a"=>1}, time)
140
- d.emit({"a"=>2}, time)
141
-
142
- d.expect_format %[{"a":1}\n]
143
- d.expect_format %[{"a":2}\n]
144
-
145
- d.run
146
- end
147
-
148
- def test_format_with_format_json_included_tag
149
- config = [CONFIG, 'format json', 'include_tag_key true'].join("\n")
150
- d = create_driver(config)
151
-
152
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
153
- d.emit({"a"=>1}, time)
154
- d.emit({"a"=>2}, time)
155
-
156
- d.expect_format %[{"a":1,"tag":"test"}\n]
157
- d.expect_format %[{"a":2,"tag":"test"}\n]
158
-
159
- d.run
160
- end
161
-
162
- def test_format_with_format_json_included_time
163
- config = [CONFIG, 'format json', 'include_time_key true'].join("\n")
164
- d = create_driver(config)
165
-
166
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
167
- d.emit({"a"=>1}, time)
168
- d.emit({"a"=>2}, time)
169
-
170
- d.expect_format %[{"a":1,"time":"2011-01-02T13:14:15Z"}\n]
171
- d.expect_format %[{"a":2,"time":"2011-01-02T13:14:15Z"}\n]
172
-
173
- d.run
174
- end
175
-
176
- def test_format_with_format_json_included_tag_and_time
177
- config = [CONFIG, 'format json', 'include_tag_key true', 'include_time_key true'].join("\n")
178
- d = create_driver(config)
179
-
180
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
181
- d.emit({"a"=>1}, time)
182
- d.emit({"a"=>2}, time)
183
-
184
- d.expect_format %[{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
185
- d.expect_format %[{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
186
-
187
- d.run
188
- end
189
-
190
- def test_chunk_to_write
191
- d = create_driver
192
-
193
- time = Time.parse("2011-01-02 13:14:15 UTC").to_i
194
- d.emit({"a"=>1}, time)
195
- d.emit({"a"=>2}, time)
196
-
197
- # AzureStorageOutputTest#write returns chunk.read
198
- data = d.run
199
-
200
- assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
201
- %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
202
- data
203
- end
204
-
205
- CONFIG_TIME_SLICE = %[
206
- hostname testing.node.local
207
- azure_storage_account test_storage_account
208
- azure_storage_access_key dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=
209
- azure_container test_container
210
- azure_object_key_format %{path}/events/ts=%{time_slice}/events_%{index}-%{hostname}.%{file_extension}
211
- time_slice_format %Y%m%d-%H
212
- path log
213
- utc
214
- buffer_type memory
215
- log_level debug
216
- ]
217
-
218
- def create_time_sliced_driver(conf = CONFIG_TIME_SLICE)
219
- d = Fluent::Test::TimeSlicedOutputTestDriver.new(Fluent::AzureStorageOutput) do
220
- end.configure(conf)
221
- d
222
- end
223
-
224
- end
1
+ require 'fluent/test'
2
+ require 'fluent/plugin/out_azurestorage'
3
+
4
+ require 'test/unit/rr'
5
+ require 'zlib'
6
+ require 'fileutils'
7
+
8
+ class AzureStorageOutputTest < Test::Unit::TestCase
9
+ def setup
10
+ require 'azure'
11
+ Fluent::Test.setup
12
+ end
13
+
14
+ CONFIG = %[
15
+ azure_storage_account test_storage_account
16
+ azure_storage_access_key dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=
17
+ azure_container test_container
18
+ path log
19
+ utc
20
+ buffer_type memory
21
+ ]
22
+
23
+ def create_driver(conf = CONFIG)
24
+ Fluent::Test::TimeSlicedOutputTestDriver.new(Fluent::AzureStorageOutput) do
25
+ def write(chunk)
26
+ chunk.read
27
+ end
28
+
29
+ private
30
+
31
+ def ensure_container
32
+ end
33
+
34
+ end.configure(conf)
35
+ end
36
+
37
+ def test_configure
38
+ d = create_driver
39
+ assert_equal 'test_storage_account', d.instance.azure_storage_account
40
+ assert_equal 'dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=', d.instance.azure_storage_access_key
41
+ assert_equal 'test_container', d.instance.azure_container
42
+ assert_equal 'log', d.instance.path
43
+ assert_equal 'gz', d.instance.instance_variable_get(:@compressor).ext
44
+ assert_equal 'application/x-gzip', d.instance.instance_variable_get(:@compressor).content_type
45
+ end
46
+
47
+ def test_configure_with_mime_type_json
48
+ conf = CONFIG.clone
49
+ conf << "\nstore_as json\n"
50
+ d = create_driver(conf)
51
+ assert_equal 'json', d.instance.instance_variable_get(:@compressor).ext
52
+ assert_equal 'application/json', d.instance.instance_variable_get(:@compressor).content_type
53
+ end
54
+
55
+ def test_configure_with_mime_type_text
56
+ conf = CONFIG.clone
57
+ conf << "\nstore_as text\n"
58
+ d = create_driver(conf)
59
+ assert_equal 'txt', d.instance.instance_variable_get(:@compressor).ext
60
+ assert_equal 'text/plain', d.instance.instance_variable_get(:@compressor).content_type
61
+ end
62
+
63
+ def test_configure_with_mime_type_lzo
64
+ conf = CONFIG.clone
65
+ conf << "\nstore_as lzo\n"
66
+ d = create_driver(conf)
67
+ assert_equal 'lzo', d.instance.instance_variable_get(:@compressor).ext
68
+ assert_equal 'application/x-lzop', d.instance.instance_variable_get(:@compressor).content_type
69
+ rescue => e
70
+ # TODO: replace code with disable lzop command
71
+ assert(e.is_a?(Fluent::ConfigError))
72
+ end
73
+
74
+ def test_path_slicing
75
+ config = CONFIG.clone.gsub(/path\slog/, "path log/%Y/%m/%d")
76
+ d = create_driver(config)
77
+ path_slicer = d.instance.instance_variable_get(:@path_slicer)
78
+ path = d.instance.instance_variable_get(:@path)
79
+ slice = path_slicer.call(path)
80
+ assert_equal slice, Time.now.utc.strftime("log/%Y/%m/%d")
81
+ end
82
+
83
+ def test_path_slicing_utc
84
+ config = CONFIG.clone.gsub(/path\slog/, "path log/%Y/%m/%d")
85
+ config << "\nutc\n"
86
+ d = create_driver(config)
87
+ path_slicer = d.instance.instance_variable_get(:@path_slicer)
88
+ path = d.instance.instance_variable_get(:@path)
89
+ slice = path_slicer.call(path)
90
+ assert_equal slice, Time.now.utc.strftime("log/%Y/%m/%d")
91
+ end
92
+
93
+ def test_format
94
+ d = create_driver
95
+
96
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
97
+ d.emit({"a"=>1}, time)
98
+ d.emit({"a"=>2}, time)
99
+
100
+ d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n]
101
+ d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
102
+
103
+ d.run
104
+ end
105
+
106
+ def test_format_included_tag_and_time
107
+ config = [CONFIG, 'include_tag_key true', 'include_time_key true'].join("\n")
108
+ d = create_driver(config)
109
+
110
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
111
+ d.emit({"a"=>1}, time)
112
+ d.emit({"a"=>2}, time)
113
+
114
+ d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
115
+ d.expect_format %[2011-01-02T13:14:15Z\ttest\t{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
116
+
117
+ d.run
118
+ end
119
+
120
+ def test_format_with_format_ltsv
121
+ config = [CONFIG, 'format ltsv'].join("\n")
122
+ d = create_driver(config)
123
+
124
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
125
+ d.emit({"a"=>1, "b"=>1}, time)
126
+ d.emit({"a"=>2, "b"=>2}, time)
127
+
128
+ d.expect_format %[a:1\tb:1\n]
129
+ d.expect_format %[a:2\tb:2\n]
130
+
131
+ d.run
132
+ end
133
+
134
+ def test_format_with_format_json
135
+ config = [CONFIG, 'format json'].join("\n")
136
+ d = create_driver(config)
137
+
138
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
139
+ d.emit({"a"=>1}, time)
140
+ d.emit({"a"=>2}, time)
141
+
142
+ d.expect_format %[{"a":1}\n]
143
+ d.expect_format %[{"a":2}\n]
144
+
145
+ d.run
146
+ end
147
+
148
+ def test_format_with_format_json_included_tag
149
+ config = [CONFIG, 'format json', 'include_tag_key true'].join("\n")
150
+ d = create_driver(config)
151
+
152
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
153
+ d.emit({"a"=>1}, time)
154
+ d.emit({"a"=>2}, time)
155
+
156
+ d.expect_format %[{"a":1,"tag":"test"}\n]
157
+ d.expect_format %[{"a":2,"tag":"test"}\n]
158
+
159
+ d.run
160
+ end
161
+
162
+ def test_format_with_format_json_included_time
163
+ config = [CONFIG, 'format json', 'include_time_key true'].join("\n")
164
+ d = create_driver(config)
165
+
166
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
167
+ d.emit({"a"=>1}, time)
168
+ d.emit({"a"=>2}, time)
169
+
170
+ d.expect_format %[{"a":1,"time":"2011-01-02T13:14:15Z"}\n]
171
+ d.expect_format %[{"a":2,"time":"2011-01-02T13:14:15Z"}\n]
172
+
173
+ d.run
174
+ end
175
+
176
+ def test_format_with_format_json_included_tag_and_time
177
+ config = [CONFIG, 'format json', 'include_tag_key true', 'include_time_key true'].join("\n")
178
+ d = create_driver(config)
179
+
180
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
181
+ d.emit({"a"=>1}, time)
182
+ d.emit({"a"=>2}, time)
183
+
184
+ d.expect_format %[{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
185
+ d.expect_format %[{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
186
+
187
+ d.run
188
+ end
189
+
190
+ def test_chunk_to_write
191
+ d = create_driver
192
+
193
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
194
+ d.emit({"a"=>1}, time)
195
+ d.emit({"a"=>2}, time)
196
+
197
+ # AzureStorageOutputTest#write returns chunk.read
198
+ data = d.run
199
+
200
+ assert_equal [%[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
201
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]],
202
+ data
203
+ end
204
+
205
+ CONFIG_TIME_SLICE = %[
206
+ hostname testing.node.local
207
+ azure_storage_account test_storage_account
208
+ azure_storage_access_key dGVzdF9zdG9yYWdlX2FjY2Vzc19rZXk=
209
+ azure_container test_container
210
+ azure_object_key_format %{path}/events/ts=%{time_slice}/events_%{index}-%{hostname}.%{file_extension}
211
+ time_slice_format %Y%m%d-%H
212
+ path log
213
+ utc
214
+ buffer_type memory
215
+ log_level debug
216
+ ]
217
+
218
+ def create_time_sliced_driver(conf = CONFIG_TIME_SLICE)
219
+ d = Fluent::Test::TimeSlicedOutputTestDriver.new(Fluent::AzureStorageOutput) do
220
+ end.configure(conf)
221
+ d
222
+ end
223
+
224
+ end