fluent-plugin-td 0.10.17 → 0.10.18
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.travis.yml +11 -0
- data/AUTHORS +1 -0
- data/ChangeLog +9 -0
- data/Gemfile.fluentd.lt.0.10.43 +4 -0
- data/README.rdoc +3 -1
- data/Rakefile +1 -2
- data/VERSION +1 -1
- data/fluent-plugin-td.gemspec +2 -1
- data/lib/fluent/plugin/out_tditem.rb +105 -0
- data/lib/fluent/plugin/out_tdlog.rb +28 -13
- data/lib/fluent/plugin/td_plugin_util.rb +107 -0
- data/test/plugin/test_out_tditem.rb +84 -0
- data/test/plugin/test_out_tdlog.rb +106 -0
- data/test/test_helper.rb +71 -0
- metadata +35 -27
- data/test/out_tdlog.rb +0 -67
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 04dd7b7e2f098cf317a600be1125138cc6d9a440
|
4
|
+
data.tar.gz: 3b211b46e282e9e94189dcc4a388223b45bbc72d
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 3c65e15dcaf7ab967da39f5479088d30a22c37de275e2e50a6fb17577e3eb6202167f0bec453eb5729cd7eab745d3c2a926533aef020cb4093ee779ef3e4e77e
|
7
|
+
data.tar.gz: dc56feaad2c8a2c554a03fc95a64cf8e9dd84941101efd44f43b5af5258da74f3308af54687ad42ed936359770ac5b8b9ee7a7feab4dedc387621dabfec8fe3b
|
data/.travis.yml
ADDED
data/AUTHORS
CHANGED
data/ChangeLog
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
Release 0.10.18 - 2014/03/31
|
2
|
+
|
3
|
+
* use_ssl is true by default
|
4
|
+
* Support log_level option
|
5
|
+
* Add endpoint option to set treasuredata.com for the future
|
6
|
+
* Add experimental out_tditem plugin for item table
|
7
|
+
* Update td-client to v0.8.58
|
8
|
+
|
9
|
+
|
1
10
|
Release 0.10.17 - 2013/12/05
|
2
11
|
|
3
12
|
* Change table create / check mechanizm
|
data/README.rdoc
CHANGED
data/Rakefile
CHANGED
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.10.
|
1
|
+
0.10.18
|
data/fluent-plugin-td.gemspec
CHANGED
@@ -17,6 +17,7 @@ Gem::Specification.new do |gem|
|
|
17
17
|
gem.require_paths = ['lib']
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", "~> 0.10.27"
|
20
|
-
gem.add_dependency "td-client", "~> 0.8.
|
20
|
+
gem.add_dependency "td-client", "~> 0.8.58"
|
21
21
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
22
|
+
gem.add_development_dependency "webmock", "~> 1.16"
|
22
23
|
end
|
@@ -0,0 +1,105 @@
|
|
1
|
+
module Fluent
|
2
|
+
class TreasureDataItemOutput < BufferedOutput
|
3
|
+
Plugin.register_output('tditem', self)
|
4
|
+
|
5
|
+
require_relative 'td_plugin_util'
|
6
|
+
include TDPluginUtil
|
7
|
+
|
8
|
+
IMPORT_SIZE_LIMIT = 32 * 1024 * 1024
|
9
|
+
|
10
|
+
# To support log_level option since Fluentd v0.10.43
|
11
|
+
unless method_defined?(:log)
|
12
|
+
define_method(:log) { $log }
|
13
|
+
end
|
14
|
+
|
15
|
+
config_param :apikey, :string
|
16
|
+
config_param :database, :string
|
17
|
+
config_param :table, :string
|
18
|
+
config_param :tmpdir, :string, :default => nil
|
19
|
+
#config_param :auto_create_table, :bool, :default => true # TODO: implement if user wants this feature
|
20
|
+
|
21
|
+
config_param :endpoint, :string, :default => nil
|
22
|
+
config_param :use_ssl, :bool, :default => true
|
23
|
+
config_param :http_proxy, :string, :default => nil
|
24
|
+
config_param :connect_timeout, :integer, :default => nil
|
25
|
+
config_param :read_timeout, :integer, :default => nil
|
26
|
+
config_param :send_timeout, :integer, :default => nil
|
27
|
+
|
28
|
+
def initialize
|
29
|
+
super
|
30
|
+
|
31
|
+
@auto_create_table = false
|
32
|
+
@tmpdir_prefix = 'tditem-'.freeze
|
33
|
+
@key_num_limit = 1024 # Item table default limitation
|
34
|
+
@record_size_limit = 32 * 1024 * 1024 # TODO
|
35
|
+
@empty_gz_data = TreasureData::API.create_empty_gz_data
|
36
|
+
@user_agent = "fluent-plugin-td-item: 0.10.18".freeze # TODO: automatic increment version
|
37
|
+
|
38
|
+
# overwrite default configurations
|
39
|
+
@buffer_type = 'file'
|
40
|
+
@flush_interval = 300
|
41
|
+
end
|
42
|
+
|
43
|
+
def configure(conf)
|
44
|
+
super
|
45
|
+
|
46
|
+
# overwrite default value of buffer_chunk_limit
|
47
|
+
if @buffer.respond_to?(:buffer_chunk_limit=) && !conf.has_key?('buffer_chunk_limit')
|
48
|
+
@buffer.buffer_chunk_limit = IMPORT_SIZE_LIMIT
|
49
|
+
end
|
50
|
+
|
51
|
+
validate_database_and_table_name(@database, @table, conf)
|
52
|
+
@key = "#{@database}.#{@table}".freeze
|
53
|
+
@use_ssl = parse_bool_parameter(@use_ssl) if @use_ssl.instance_of?(String)
|
54
|
+
FileUtils.mkdir_p(@tmpdir) unless @tmpdir.nil?
|
55
|
+
|
56
|
+
if @endpoint.nil?
|
57
|
+
$log.warn "tditem plugin will change the API endpoint from api.treasure-data.com to api.treasuredata.com"
|
58
|
+
$log.warn "If want to keep api.treasure-data.com, please set 'endpoint api.treasure-data.com' in tditem configuration"
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def start
|
63
|
+
super
|
64
|
+
|
65
|
+
client_opts = {
|
66
|
+
:ssl => @use_ssl, :http_proxy => @http_proxy, :user_agent => @user_agent, :endpoint => @endpoint,
|
67
|
+
:connect_timeout => @connect_timeout, :read_timeout => @read_timeout, :send_timeout => @send_timeout
|
68
|
+
}
|
69
|
+
@client = TreasureData::Client.new(@apikey, client_opts)
|
70
|
+
|
71
|
+
check_table_existence(@database, @table)
|
72
|
+
end
|
73
|
+
|
74
|
+
def emit(tag, es, chain)
|
75
|
+
super(tag, es, chain, @key)
|
76
|
+
end
|
77
|
+
|
78
|
+
def format_stream(tag, es)
|
79
|
+
out = ''
|
80
|
+
off = out.bytesize
|
81
|
+
es.each { |time, record|
|
82
|
+
if record.size > @key_num_limit
|
83
|
+
log.error "Too many number of keys (#{record.size} keys)" # TODO include summary of the record
|
84
|
+
next
|
85
|
+
end
|
86
|
+
|
87
|
+
begin
|
88
|
+
record.to_msgpack(out)
|
89
|
+
rescue RangeError
|
90
|
+
TreasureData::API.normalized_msgpack(record, out)
|
91
|
+
end
|
92
|
+
|
93
|
+
noff = out.bytesize
|
94
|
+
sz = noff - off
|
95
|
+
if sz > @record_size_limit
|
96
|
+
# TODO don't raise error
|
97
|
+
#raise "Size of a record too large (#{sz} bytes)" # TODO include summary of the record
|
98
|
+
log.warn "Size of a record too large (#{sz} bytes): #{summarize_record(record)}"
|
99
|
+
end
|
100
|
+
off = noff
|
101
|
+
}
|
102
|
+
out
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
@@ -60,6 +60,13 @@ class TreasureDataLogOutput < BufferedOutput
|
|
60
60
|
end
|
61
61
|
end
|
62
62
|
|
63
|
+
# To support log_level option since Fluentd v0.10.43
|
64
|
+
unless method_defined?(:log)
|
65
|
+
define_method(:log) { $log }
|
66
|
+
end
|
67
|
+
|
68
|
+
config_param :endpoint, :string, :default => nil
|
69
|
+
|
63
70
|
config_param :connect_timeout, :integer, :default => nil
|
64
71
|
config_param :read_timeout, :integer, :default => nil
|
65
72
|
config_param :send_timeout, :integer, :default => nil
|
@@ -83,7 +90,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
83
90
|
@record_size_limit = 32*1024*1024 # TODO
|
84
91
|
@table_list = {}
|
85
92
|
@auto_create_table = true
|
86
|
-
@use_ssl =
|
93
|
+
@use_ssl = true
|
87
94
|
@buffer_type = 'file' # overwrite default buffer_type
|
88
95
|
@flush_interval = 300 # overwrite default flush_interval to 5mins
|
89
96
|
@empty_gz_data = create_empty_gz_data
|
@@ -159,14 +166,22 @@ class TreasureDataLogOutput < BufferedOutput
|
|
159
166
|
@anonymizes = nil if @anonymizes.empty?
|
160
167
|
|
161
168
|
@http_proxy = conf['http_proxy']
|
162
|
-
@user_agent = "fluent-plugin-td: 0.10.
|
169
|
+
@user_agent = "fluent-plugin-td: 0.10.18" # TODO: automatic increment version
|
170
|
+
|
171
|
+
if @endpoint.nil?
|
172
|
+
$log.warn "tdlog plugin will change the API endpoint from api.treasure-data.com to api.treasuredata.com"
|
173
|
+
$log.warn "If want to keep api.treasure-data.com, please set 'endpoint api.treasure-data.com' in tdlog configuration"
|
174
|
+
end
|
163
175
|
end
|
164
176
|
|
165
177
|
def start
|
166
178
|
super
|
167
179
|
|
168
|
-
|
169
|
-
:
|
180
|
+
client_opts = {
|
181
|
+
:ssl => @use_ssl, :http_proxy => @http_proxy, :user_agent => @user_agent, :endpoint => @endpoint,
|
182
|
+
:connect_timeout => @connect_timeout, :read_timeout => @read_timeout, :send_timeout => @send_timeout
|
183
|
+
}
|
184
|
+
@client = TreasureData::Client.new(@apikey, client_opts)
|
170
185
|
|
171
186
|
if @key
|
172
187
|
if @auto_create_table
|
@@ -218,8 +233,8 @@ class TreasureDataLogOutput < BufferedOutput
|
|
218
233
|
# TODO (a) Remove the transaction mechanism of fluentd
|
219
234
|
# or (b) keep transaction boundaries in in/out_forward.
|
220
235
|
# This code disables the transaction mechanism (a).
|
221
|
-
|
222
|
-
|
236
|
+
log.error "#{$!}: #{summarize_record(record)}"
|
237
|
+
log.error_backtrace $!.backtrace
|
223
238
|
next
|
224
239
|
end
|
225
240
|
|
@@ -234,7 +249,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
234
249
|
if sz > @record_size_limit
|
235
250
|
# TODO don't raise error
|
236
251
|
#raise "Size of a record too large (#{sz} bytes)" # TODO include summary of the record
|
237
|
-
|
252
|
+
log.warn "Size of a record too large (#{sz} bytes): #{summarize_record(record)}"
|
238
253
|
end
|
239
254
|
off = noff
|
240
255
|
}
|
@@ -279,7 +294,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
279
294
|
|
280
295
|
def upload(database, table, io, size, unique_id)
|
281
296
|
unique_str = unique_id.unpack('C*').map {|x| "%02x" % x }.join
|
282
|
-
|
297
|
+
log.trace { "uploading logs to Treasure Data database=#{database} table=#{table} (#{size}bytes)" }
|
283
298
|
|
284
299
|
begin
|
285
300
|
begin
|
@@ -295,7 +310,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
295
310
|
end
|
296
311
|
rescue => e
|
297
312
|
elapsed = Time.now - start
|
298
|
-
ne = RuntimeError.new("Failed to upload to
|
313
|
+
ne = RuntimeError.new("Failed to upload to Treasure Data '#{database}.#{table}' table: #{$!} (#{size} bytes; #{elapsed} seconds)")
|
299
314
|
ne.set_backtrace(e.backtrace)
|
300
315
|
raise ne
|
301
316
|
end
|
@@ -304,7 +319,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
304
319
|
def check_table_exists(key)
|
305
320
|
unless @table_list.has_key?(key)
|
306
321
|
database, table = key.split('.',2)
|
307
|
-
|
322
|
+
log.debug "checking whether table '#{database}.#{table}' exists on Treasure Data"
|
308
323
|
io = StringIO.new(@empty_gz_data)
|
309
324
|
begin
|
310
325
|
@client.import(database, table, "msgpack.gz", io, io.size)
|
@@ -312,8 +327,8 @@ class TreasureDataLogOutput < BufferedOutput
|
|
312
327
|
rescue TreasureData::NotFoundError
|
313
328
|
raise "Table #{key.inspect} does not exist on Treasure Data. Use 'td table:create #{database} #{table}' to create it."
|
314
329
|
rescue
|
315
|
-
|
316
|
-
|
330
|
+
log.warn "failed to check existence of '#{database}.#{table}' table on Treasure Data", :error=>$!.to_s
|
331
|
+
log.debug_backtrace $!
|
317
332
|
end
|
318
333
|
end
|
319
334
|
end
|
@@ -332,7 +347,7 @@ class TreasureDataLogOutput < BufferedOutput
|
|
332
347
|
end
|
333
348
|
|
334
349
|
def ensure_database_and_table(database, table)
|
335
|
-
|
350
|
+
log.info "Creating table #{database}.#{table} on TreasureData"
|
336
351
|
begin
|
337
352
|
@client.create_log_table(database, table)
|
338
353
|
rescue TreasureData::NotFoundError
|
@@ -0,0 +1,107 @@
|
|
1
|
+
module Fluent
|
2
|
+
module TDPluginUtil
|
3
|
+
require 'fileutils'
|
4
|
+
require 'stringio'
|
5
|
+
require 'tempfile'
|
6
|
+
require 'zlib'
|
7
|
+
require 'td-client'
|
8
|
+
|
9
|
+
def validate_database_and_table_name(database, table, conf)
|
10
|
+
begin
|
11
|
+
TreasureData::API.validate_database_name(database)
|
12
|
+
rescue => e
|
13
|
+
raise ConfigError, "Invalid database name #{database.inspect}: #{e}: #{conf}"
|
14
|
+
end
|
15
|
+
begin
|
16
|
+
TreasureData::API.validate_table_name(table)
|
17
|
+
rescue => e
|
18
|
+
raise ConfigError, "Invalid table name #{table.inspect}: #{e}: #{conf}"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def parse_bool_parameter(param)
|
23
|
+
if param.empty?
|
24
|
+
true
|
25
|
+
else
|
26
|
+
param = Config.bool_value(param)
|
27
|
+
raise ConfigError, "'true' or 'false' is required for #{key} option on tdlog output" if param.nil?
|
28
|
+
param
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def summarize_record(record)
|
33
|
+
json = record.to_json
|
34
|
+
if json.size > 100
|
35
|
+
json[0..97] + "..."
|
36
|
+
else
|
37
|
+
json
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
def check_table_existence(database, table)
|
42
|
+
@table_list ||= {}
|
43
|
+
key = "#{database}.#{table}"
|
44
|
+
unless @table_list.has_key?(key)
|
45
|
+
log.debug "checking whether table '#{key}' exists on Treasure Data"
|
46
|
+
io = StringIO.new(@empty_gz_data)
|
47
|
+
begin
|
48
|
+
# here doesn't check whether target table is item table or not because import-only user can't read the table status.
|
49
|
+
# So I use empty import request to check table existence.
|
50
|
+
@client.import(database, table, "msgpack.gz", io, io.size)
|
51
|
+
@table_list[key] = true
|
52
|
+
rescue TreasureData::NotFoundError
|
53
|
+
args = self.class == TreasureDataItemOutput ? ' -t item' : ''
|
54
|
+
raise "Table #{key.inspect} does not exist on Treasure Data. Use 'td table:create #{database} #{table}#{args}' to create it."
|
55
|
+
rescue => e
|
56
|
+
log.warn "failed to check table existence on Treasure Data", :error => e.to_s
|
57
|
+
log.debug_backtrace e
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def write(chunk)
|
63
|
+
unique_id = chunk.unique_id
|
64
|
+
database, table = chunk.key.split('.', 2)
|
65
|
+
|
66
|
+
FileUtils.mkdir_p(@tmpdir) unless @tmpdir.nil?
|
67
|
+
f = Tempfile.new(@tmpdir_prefix, @tmpdir)
|
68
|
+
w = Zlib::GzipWriter.new(f)
|
69
|
+
|
70
|
+
chunk.write_to(w)
|
71
|
+
w.finish
|
72
|
+
w = nil
|
73
|
+
|
74
|
+
size = f.pos
|
75
|
+
f.pos = 0
|
76
|
+
upload(database, table, f, size, unique_id)
|
77
|
+
ensure
|
78
|
+
w.close if w
|
79
|
+
f.close if f
|
80
|
+
end
|
81
|
+
|
82
|
+
# assume @client and @auto_create_table variable exist
|
83
|
+
def upload(database, table, io, size, unique_id)
|
84
|
+
unique_str = unique_id.unpack('C*').map {|x| "%02x" % x }.join
|
85
|
+
log.trace { "uploading logs to Treasure Data database=#{database} table=#{table} (#{size}bytes)" }
|
86
|
+
|
87
|
+
begin
|
88
|
+
begin
|
89
|
+
start = Time.now
|
90
|
+
@client.import(database, table, "msgpack.gz", io, size, unique_str)
|
91
|
+
rescue TreasureData::NotFoundError => e
|
92
|
+
unless @auto_create_table
|
93
|
+
raise e
|
94
|
+
end
|
95
|
+
ensure_database_and_table(database, table)
|
96
|
+
io.pos = 0
|
97
|
+
retry
|
98
|
+
end
|
99
|
+
rescue => e
|
100
|
+
elapsed = Time.now - start
|
101
|
+
ne = RuntimeError.new("Failed to upload to TreasureData: #{e} (#{size} bytes; #{elapsed} seconds)")
|
102
|
+
ne.set_backtrace(e.backtrace)
|
103
|
+
raise ne
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
require 'test_helper'
|
2
|
+
require 'fluent/plugin/out_tditem'
|
3
|
+
|
4
|
+
class TreasureDataItemOutputTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
Fluent::Test.setup
|
7
|
+
end
|
8
|
+
|
9
|
+
DEFAULT_CONFIG = %[
|
10
|
+
database test
|
11
|
+
table table
|
12
|
+
]
|
13
|
+
|
14
|
+
def create_driver(conf = DEFAULT_CONFIG)
|
15
|
+
config = %[
|
16
|
+
apikey testkey
|
17
|
+
buffer_type memory
|
18
|
+
] + conf
|
19
|
+
|
20
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::TreasureDataItemOutput) do
|
21
|
+
def write(chunk)
|
22
|
+
# TestDriver doesn't call acutual Output#emit so set key to get database and table in this place.
|
23
|
+
chunk.instance_variable_set(:@key, @key)
|
24
|
+
super(chunk)
|
25
|
+
end
|
26
|
+
end.configure(config)
|
27
|
+
end
|
28
|
+
|
29
|
+
def test_configure
|
30
|
+
d = create_driver
|
31
|
+
|
32
|
+
assert_equal(d.instance.apikey, 'testkey')
|
33
|
+
assert_equal(d.instance.database, 'test')
|
34
|
+
assert_equal(d.instance.table, 'table')
|
35
|
+
assert_equal(d.instance.use_ssl, true)
|
36
|
+
end
|
37
|
+
|
38
|
+
def test_configure_with_invalid_database
|
39
|
+
assert_raise(Fluent::ConfigError) {
|
40
|
+
create_driver(%[
|
41
|
+
database a
|
42
|
+
table table
|
43
|
+
])
|
44
|
+
}
|
45
|
+
end
|
46
|
+
|
47
|
+
def test_configure_with_invalid_table
|
48
|
+
assert_raise(Fluent::ConfigError) {
|
49
|
+
create_driver(%[
|
50
|
+
database test
|
51
|
+
table 1
|
52
|
+
])
|
53
|
+
}
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_emit
|
57
|
+
d = create_driver
|
58
|
+
|
59
|
+
time, records = stub_seed_values
|
60
|
+
stub_td_import_request(stub_request_body(records), d.instance.database, d.instance.table)
|
61
|
+
|
62
|
+
records.each { |record|
|
63
|
+
d.emit(record, time)
|
64
|
+
}
|
65
|
+
d.run
|
66
|
+
|
67
|
+
assert_equal(@auth_header, 'TD1 testkey')
|
68
|
+
end
|
69
|
+
|
70
|
+
def test_emit
|
71
|
+
d = create_driver(DEFAULT_CONFIG + "endpoint foo.bar.baz")
|
72
|
+
opts = {:endpoint => 'foo.bar.baz'}
|
73
|
+
|
74
|
+
time, records = stub_seed_values
|
75
|
+
stub_td_import_request(stub_request_body(records), d.instance.database, d.instance.table, opts)
|
76
|
+
|
77
|
+
records.each { |record|
|
78
|
+
d.emit(record, time)
|
79
|
+
}
|
80
|
+
d.run
|
81
|
+
|
82
|
+
assert_equal(@auth_header, 'TD1 testkey')
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
require 'fluent/test'
|
2
|
+
require 'fluent/plugin/out_tdlog'
|
3
|
+
|
4
|
+
class TreasureDataLogOutputTest < Test::Unit::TestCase
|
5
|
+
# BufferedOutputTestDriver uses module_eval, not inheritance.
|
6
|
+
# This DummyOutput is for testing actual write method with webmock
|
7
|
+
class TreasureDataLogDummyOutput < Fluent::TreasureDataLogOutput
|
8
|
+
end
|
9
|
+
|
10
|
+
def setup
|
11
|
+
Fluent::Test.setup
|
12
|
+
end
|
13
|
+
|
14
|
+
TMP_DIR = File.dirname(__FILE__) + "/tmp"
|
15
|
+
|
16
|
+
DEFAULT_CONFIG = %[
|
17
|
+
database test
|
18
|
+
table table
|
19
|
+
]
|
20
|
+
|
21
|
+
def create_driver(conf = DEFAULT_CONFIG)
|
22
|
+
config = %[
|
23
|
+
apikey testkey
|
24
|
+
buffer_path #{TMP_DIR}/buffer
|
25
|
+
] + conf
|
26
|
+
|
27
|
+
Fluent::Test::BufferedOutputTestDriver.new(TreasureDataLogDummyOutput) do
|
28
|
+
def write(chunk)
|
29
|
+
chunk.instance_variable_set(:@key, @key)
|
30
|
+
super(chunk)
|
31
|
+
end
|
32
|
+
end.configure(config)
|
33
|
+
end
|
34
|
+
|
35
|
+
def test_configure
|
36
|
+
d = create_driver
|
37
|
+
|
38
|
+
{:@apikey => 'testkey', :@use_ssl => true, :@auto_create_table => true,
|
39
|
+
:@buffer_type => 'file', :@flush_interval => 300}.each { |k, v|
|
40
|
+
assert_equal(d.instance.instance_variable_get(k), v)
|
41
|
+
}
|
42
|
+
end
|
43
|
+
|
44
|
+
def test_emit
|
45
|
+
d = create_driver
|
46
|
+
|
47
|
+
time, records = stub_seed_values
|
48
|
+
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
49
|
+
stub_td_table_create_request(database, table)
|
50
|
+
stub_td_import_request(stub_request_body(records, time), database, table)
|
51
|
+
|
52
|
+
records.each { |record|
|
53
|
+
d.emit(record, time)
|
54
|
+
}
|
55
|
+
d.run
|
56
|
+
|
57
|
+
assert_equal(@auth_header, 'TD1 testkey')
|
58
|
+
end
|
59
|
+
|
60
|
+
def test_emit_with_endpoint
|
61
|
+
d = create_driver(DEFAULT_CONFIG + "endpoint foo.bar.baz")
|
62
|
+
opts = {:endpoint => 'foo.bar.baz'}
|
63
|
+
time, records = stub_seed_values
|
64
|
+
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
65
|
+
stub_td_table_create_request(database, table, opts)
|
66
|
+
stub_td_import_request(stub_request_body(records, time), database, table, opts)
|
67
|
+
|
68
|
+
records.each { |record|
|
69
|
+
d.emit(record, time)
|
70
|
+
}
|
71
|
+
d.run
|
72
|
+
end
|
73
|
+
|
74
|
+
# TODO: add normalized_msgpack / key_num_limit / tag split test
|
75
|
+
|
76
|
+
## TODO invalid names are normalized
|
77
|
+
# def test_invalid_name
|
78
|
+
# d = create_driver
|
79
|
+
# d.instance.start
|
80
|
+
#
|
81
|
+
# es = Fluent::OneEventStream.new(Time.now.to_i, {})
|
82
|
+
# chain = Fluent::NullOutputChain.instance
|
83
|
+
# assert_raise(RuntimeError) do
|
84
|
+
# d.instance.emit("test.invalid-name", es, chain)
|
85
|
+
# end
|
86
|
+
# assert_raise(RuntimeError) do
|
87
|
+
# d.instance.emit("empty", es, chain)
|
88
|
+
# end
|
89
|
+
# assert_raise(RuntimeError) do
|
90
|
+
# d.instance.emit("", es, chain)
|
91
|
+
# end
|
92
|
+
# end
|
93
|
+
|
94
|
+
## TODO invalid data is ignored
|
95
|
+
# def test_invalid_data
|
96
|
+
# d = create_driver
|
97
|
+
# d.instance.start
|
98
|
+
#
|
99
|
+
# es = Fluent::OneEventStream.new(Time.now.to_i, "invalid")
|
100
|
+
# chain = Fluent::NullOutputChain.instance
|
101
|
+
# assert_nothing_raised do
|
102
|
+
# d.instance.emit("test.name", es, chain)
|
103
|
+
# end
|
104
|
+
# end
|
105
|
+
end
|
106
|
+
|
data/test/test_helper.rb
ADDED
@@ -0,0 +1,71 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'msgpack'
|
3
|
+
require 'fluent/test'
|
4
|
+
require 'webmock/test_unit'
|
5
|
+
require 'stringio'
|
6
|
+
require 'td-client'
|
7
|
+
require 'zlib'
|
8
|
+
|
9
|
+
def e(s)
|
10
|
+
require 'cgi'
|
11
|
+
CGI.escape(s.to_s)
|
12
|
+
end
|
13
|
+
|
14
|
+
class Test::Unit::TestCase
|
15
|
+
def stub_seed_values
|
16
|
+
time = Time.parse("2014-01-01 00:00:00 UTC").to_i
|
17
|
+
records = [{"a" => 1}, {"a" => 2}]
|
18
|
+
return time, records
|
19
|
+
end
|
20
|
+
|
21
|
+
def stub_request_body(records, time = nil)
|
22
|
+
out = ''
|
23
|
+
records.each { |record|
|
24
|
+
r = record.dup
|
25
|
+
if time
|
26
|
+
r['time'] = time
|
27
|
+
end
|
28
|
+
r.to_msgpack(out)
|
29
|
+
}
|
30
|
+
|
31
|
+
io = StringIO.new
|
32
|
+
gz = Zlib::GzipWriter.new(io)
|
33
|
+
FileUtils.copy_stream(StringIO.new(out), gz)
|
34
|
+
gz.finish
|
35
|
+
io.string
|
36
|
+
end
|
37
|
+
|
38
|
+
def stub_gzip_unwrap(body)
|
39
|
+
io = StringIO.new(body)
|
40
|
+
gz = Zlib::GzipReader.new(io)
|
41
|
+
gz.read
|
42
|
+
end
|
43
|
+
|
44
|
+
def stub_td_table_create_request(database, table, opts = {})
|
45
|
+
opts[:use_ssl] = true unless opts.has_key?(:use_ssl)
|
46
|
+
schema = opts[:use_ssl] ? 'https' : 'http'
|
47
|
+
response = {"database" => database, "table" => table}.to_json
|
48
|
+
endpoint = opts[:endpoint] ? opts[:endpoint] : TreasureData::API::DEFAULT_ENDPOINT
|
49
|
+
|
50
|
+
url = "#{schema}://#{endpoint}/v3/table/create/#{e(database)}/#{e(table)}/log"
|
51
|
+
stub_request(:post, url).to_return(:status => 200, :body => response)
|
52
|
+
end
|
53
|
+
|
54
|
+
def stub_td_import_request(body, db, table, opts = {})
|
55
|
+
opts[:use_ssl] = true unless opts.has_key?(:use_ssl)
|
56
|
+
format = opts[:format] || 'msgpack.gz'
|
57
|
+
schema = opts[:use_ssl] ? 'https' : 'http'
|
58
|
+
response = {"database" => db, "table" => table, "elapsed_time" => 0}.to_json
|
59
|
+
endpoint = opts[:endpoint] ? opts[:endpoint] : TreasureData::API::DEFAULT_IMPORT_ENDPOINT
|
60
|
+
|
61
|
+
# for check_table_existence
|
62
|
+
url_with_empty = "#{schema}://#{endpoint}//v3/table/import/#{e(db)}/#{e(table)}/#{format}"
|
63
|
+
stub_request(:put, url_with_empty).to_return(:status => 200, :body => response)
|
64
|
+
|
65
|
+
url_with_unique = Regexp.compile("#{schema}://#{endpoint}//v3/table/import_with_id/#{e(db)}/#{e(table)}/.*/#{format}")
|
66
|
+
stub_request(:put, url_with_unique).with(:headers => {'Content-Type' => 'application/octet-stream'}) { |req|
|
67
|
+
@auth_header = req.headers["Authorization"]
|
68
|
+
stub_gzip_unwrap(req.body) == stub_gzip_unwrap(body)
|
69
|
+
}.to_return(:status => 200, :body => response)
|
70
|
+
end
|
71
|
+
end
|
metadata
CHANGED
@@ -1,20 +1,18 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-td
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.10.
|
5
|
-
prerelease:
|
4
|
+
version: 0.10.18
|
6
5
|
platform: ruby
|
7
6
|
authors:
|
8
7
|
- Treasure Data, Inc.
|
9
8
|
autorequire:
|
10
9
|
bindir: bin
|
11
10
|
cert_chain: []
|
12
|
-
date:
|
11
|
+
date: 2014-03-31 00:00:00.000000000 Z
|
13
12
|
dependencies:
|
14
13
|
- !ruby/object:Gem::Dependency
|
15
14
|
name: fluentd
|
16
15
|
requirement: !ruby/object:Gem::Requirement
|
17
|
-
none: false
|
18
16
|
requirements:
|
19
17
|
- - ~>
|
20
18
|
- !ruby/object:Gem::Version
|
@@ -22,7 +20,6 @@ dependencies:
|
|
22
20
|
type: :runtime
|
23
21
|
prerelease: false
|
24
22
|
version_requirements: !ruby/object:Gem::Requirement
|
25
|
-
none: false
|
26
23
|
requirements:
|
27
24
|
- - ~>
|
28
25
|
- !ruby/object:Gem::Version
|
@@ -30,35 +27,45 @@ dependencies:
|
|
30
27
|
- !ruby/object:Gem::Dependency
|
31
28
|
name: td-client
|
32
29
|
requirement: !ruby/object:Gem::Requirement
|
33
|
-
none: false
|
34
30
|
requirements:
|
35
31
|
- - ~>
|
36
32
|
- !ruby/object:Gem::Version
|
37
|
-
version: 0.8.
|
33
|
+
version: 0.8.58
|
38
34
|
type: :runtime
|
39
35
|
prerelease: false
|
40
36
|
version_requirements: !ruby/object:Gem::Requirement
|
41
|
-
none: false
|
42
37
|
requirements:
|
43
38
|
- - ~>
|
44
39
|
- !ruby/object:Gem::Version
|
45
|
-
version: 0.8.
|
40
|
+
version: 0.8.58
|
46
41
|
- !ruby/object:Gem::Dependency
|
47
42
|
name: rake
|
48
43
|
requirement: !ruby/object:Gem::Requirement
|
49
|
-
none: false
|
50
44
|
requirements:
|
51
|
-
- -
|
45
|
+
- - '>='
|
52
46
|
- !ruby/object:Gem::Version
|
53
47
|
version: 0.9.2
|
54
48
|
type: :development
|
55
49
|
prerelease: false
|
56
50
|
version_requirements: !ruby/object:Gem::Requirement
|
57
|
-
none: false
|
58
51
|
requirements:
|
59
|
-
- -
|
52
|
+
- - '>='
|
60
53
|
- !ruby/object:Gem::Version
|
61
54
|
version: 0.9.2
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: webmock
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ~>
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '1.16'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ~>
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '1.16'
|
62
69
|
description: Treasure Data Cloud Data Service plugin for Fluentd
|
63
70
|
email: support@treasure-data.com
|
64
71
|
executables: []
|
@@ -66,45 +73,46 @@ extensions: []
|
|
66
73
|
extra_rdoc_files: []
|
67
74
|
files:
|
68
75
|
- .gitignore
|
76
|
+
- .travis.yml
|
69
77
|
- AUTHORS
|
70
78
|
- ChangeLog
|
71
79
|
- Gemfile
|
80
|
+
- Gemfile.fluentd.lt.0.10.43
|
72
81
|
- README.rdoc
|
73
82
|
- Rakefile
|
74
83
|
- VERSION
|
75
84
|
- example.conf
|
76
85
|
- fluent-plugin-td.gemspec
|
86
|
+
- lib/fluent/plugin/out_tditem.rb
|
77
87
|
- lib/fluent/plugin/out_tdlog.rb
|
78
|
-
-
|
88
|
+
- lib/fluent/plugin/td_plugin_util.rb
|
89
|
+
- test/plugin/test_out_tditem.rb
|
90
|
+
- test/plugin/test_out_tdlog.rb
|
91
|
+
- test/test_helper.rb
|
79
92
|
homepage: http://www.treasuredata.com/
|
80
93
|
licenses: []
|
94
|
+
metadata: {}
|
81
95
|
post_install_message:
|
82
96
|
rdoc_options: []
|
83
97
|
require_paths:
|
84
98
|
- lib
|
85
99
|
required_ruby_version: !ruby/object:Gem::Requirement
|
86
|
-
none: false
|
87
100
|
requirements:
|
88
|
-
- -
|
101
|
+
- - '>='
|
89
102
|
- !ruby/object:Gem::Version
|
90
103
|
version: '0'
|
91
|
-
segments:
|
92
|
-
- 0
|
93
|
-
hash: 79461157441894892
|
94
104
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
95
|
-
none: false
|
96
105
|
requirements:
|
97
|
-
- -
|
106
|
+
- - '>='
|
98
107
|
- !ruby/object:Gem::Version
|
99
108
|
version: '0'
|
100
|
-
segments:
|
101
|
-
- 0
|
102
|
-
hash: 79461157441894892
|
103
109
|
requirements: []
|
104
110
|
rubyforge_project:
|
105
|
-
rubygems_version:
|
111
|
+
rubygems_version: 2.0.2
|
106
112
|
signing_key:
|
107
|
-
specification_version:
|
113
|
+
specification_version: 4
|
108
114
|
summary: Treasure Data Cloud Data Service plugin for Fluentd
|
109
115
|
test_files:
|
110
|
-
- test/
|
116
|
+
- test/plugin/test_out_tditem.rb
|
117
|
+
- test/plugin/test_out_tdlog.rb
|
118
|
+
- test/test_helper.rb
|
data/test/out_tdlog.rb
DELETED
@@ -1,67 +0,0 @@
|
|
1
|
-
require 'fluent/test'
|
2
|
-
require 'fluent/plugin/out_tdlog'
|
3
|
-
|
4
|
-
class TreasureDataLogOutputTest < Test::Unit::TestCase
|
5
|
-
def setup
|
6
|
-
Fluent::Test.setup
|
7
|
-
end
|
8
|
-
|
9
|
-
TMP_DIR = File.dirname(__FILE__) + "/tmp"
|
10
|
-
|
11
|
-
CONFIG = %[
|
12
|
-
apikey testkey
|
13
|
-
buffer_path #{TMP_DIR}/buffer
|
14
|
-
]
|
15
|
-
|
16
|
-
def create_driver(conf = CONFIG)
|
17
|
-
Fluent::Test::BufferedOutputTestDriver.new(Fluent::TreasureDataLogOutput) do
|
18
|
-
def start
|
19
|
-
super
|
20
|
-
end
|
21
|
-
|
22
|
-
def write(chunk)
|
23
|
-
chunk.read
|
24
|
-
end
|
25
|
-
end.configure(conf)
|
26
|
-
end
|
27
|
-
|
28
|
-
def test_emit
|
29
|
-
d = create_driver
|
30
|
-
|
31
|
-
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
32
|
-
d.emit({"a"=>1}, time)
|
33
|
-
d.emit({"a"=>2}, time)
|
34
|
-
d.run
|
35
|
-
end
|
36
|
-
|
37
|
-
## TODO invalid names are normalized
|
38
|
-
# def test_invalid_name
|
39
|
-
# d = create_driver
|
40
|
-
# d.instance.start
|
41
|
-
#
|
42
|
-
# es = Fluent::OneEventStream.new(Time.now.to_i, {})
|
43
|
-
# chain = Fluent::NullOutputChain.instance
|
44
|
-
# assert_raise(RuntimeError) do
|
45
|
-
# d.instance.emit("test.invalid-name", es, chain)
|
46
|
-
# end
|
47
|
-
# assert_raise(RuntimeError) do
|
48
|
-
# d.instance.emit("empty", es, chain)
|
49
|
-
# end
|
50
|
-
# assert_raise(RuntimeError) do
|
51
|
-
# d.instance.emit("", es, chain)
|
52
|
-
# end
|
53
|
-
# end
|
54
|
-
|
55
|
-
## TODO invalid data is ignored
|
56
|
-
# def test_invalid_data
|
57
|
-
# d = create_driver
|
58
|
-
# d.instance.start
|
59
|
-
#
|
60
|
-
# es = Fluent::OneEventStream.new(Time.now.to_i, "invalid")
|
61
|
-
# chain = Fluent::NullOutputChain.instance
|
62
|
-
# assert_nothing_raised do
|
63
|
-
# d.instance.emit("test.name", es, chain)
|
64
|
-
# end
|
65
|
-
# end
|
66
|
-
end
|
67
|
-
|