fluent-plugin-td 0.11.0.rc1 → 1.0.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +0 -4
- data/ChangeLog +6 -0
- data/fluent-plugin-td.gemspec +2 -2
- data/lib/fluent/plugin/out_tdlog.rb +79 -101
- data/lib/fluent/plugin/td_plugin_version.rb +4 -2
- data/test/plugin/test_out_tdlog.rb +107 -89
- data/test/test_helper.rb +9 -4
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 41182a2b265e1d4e1cfa4a1fdc6936c972b83a03
|
4
|
+
data.tar.gz: 5cec317a97fa59bd1f71a05f5cf72d2900618220
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 36d0dea559b0481a8b262cd1f7aad273d7ae5c213179d010623fcbf78f4fe94f1f58b91d83acd86f7093b9b27d661a0dad57c7fd59b2aa21576dc079c7f23182
|
7
|
+
data.tar.gz: 0cd0f5890c5132cfe622bd3cdb5a1ba14ca5b5a801d78befe2ca12a97847134bb256b28307c5618d5a11b09f2feb732876cc58f2b4575984adbea02ac8e6575f
|
data/.travis.yml
CHANGED
@@ -7,7 +7,6 @@ rvm:
|
|
7
7
|
|
8
8
|
gemfile:
|
9
9
|
- Gemfile
|
10
|
-
- Gemfile.v0.12
|
11
10
|
|
12
11
|
before_install: gem update bundler
|
13
12
|
script: bundle exec rake test
|
@@ -17,6 +16,3 @@ sudo: false
|
|
17
16
|
matrix:
|
18
17
|
allow_failures:
|
19
18
|
- rvm: ruby-head
|
20
|
-
exclude:
|
21
|
-
- rvm: 2.4.0
|
22
|
-
gemfile: Gemfile.v0.12
|
data/ChangeLog
CHANGED
data/fluent-plugin-td.gemspec
CHANGED
@@ -7,7 +7,7 @@ Gem::Specification.new do |gem|
|
|
7
7
|
gem.description = "Treasure Data Cloud Data Service plugin for Fluentd"
|
8
8
|
gem.homepage = "http://www.treasuredata.com/"
|
9
9
|
gem.summary = gem.description
|
10
|
-
gem.version = Fluent::TreasureDataPlugin::VERSION
|
10
|
+
gem.version = Fluent::Plugin::TreasureDataPlugin::VERSION
|
11
11
|
gem.authors = ["Treasure Data, Inc."]
|
12
12
|
gem.email = "support@treasure-data.com"
|
13
13
|
gem.has_rdoc = false
|
@@ -18,7 +18,7 @@ Gem::Specification.new do |gem|
|
|
18
18
|
gem.require_paths = ['lib']
|
19
19
|
gem.license = "Apache-2.0"
|
20
20
|
|
21
|
-
gem.add_dependency "fluentd", [">= 0.
|
21
|
+
gem.add_dependency "fluentd", [">= 0.14.13", "< 2"]
|
22
22
|
gem.add_dependency "td-client", "~> 1.0"
|
23
23
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
24
24
|
gem.add_development_dependency "webmock", "~> 1.16"
|
@@ -1,41 +1,44 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
require 'tempfile'
|
3
|
+
require 'zlib'
|
4
|
+
require 'stringio'
|
1
5
|
require 'td-client'
|
2
|
-
|
6
|
+
|
7
|
+
require 'fluent/plugin/output'
|
3
8
|
require 'fluent/plugin/td_plugin_version'
|
4
9
|
|
5
|
-
module Fluent
|
6
|
-
class TreasureDataLogOutput <
|
7
|
-
Plugin.register_output('tdlog', self)
|
10
|
+
module Fluent::Plugin
|
11
|
+
class TreasureDataLogOutput < Output
|
12
|
+
Fluent::Plugin.register_output('tdlog', self)
|
8
13
|
|
9
14
|
IMPORT_SIZE_LIMIT = 32 * 1024 * 1024
|
15
|
+
UPLOAD_EXT = 'msgpack.gz'.freeze
|
10
16
|
|
11
|
-
|
12
|
-
unless method_defined?(:log)
|
13
|
-
define_method(:log) { $log }
|
14
|
-
end
|
17
|
+
helpers :event_emitter, :compat_parameters
|
15
18
|
|
16
19
|
config_param :apikey, :string, :secret => true
|
17
20
|
config_param :auto_create_table, :bool, :default => true
|
21
|
+
config_param :database, :string, :default => nil
|
22
|
+
config_param :table, :string, :default => nil
|
18
23
|
config_param :use_gzip_command, :bool, :default => false
|
19
24
|
|
20
25
|
config_param :endpoint, :string, :default => TreasureData::API::NEW_DEFAULT_ENDPOINT
|
21
26
|
config_param :use_ssl, :bool, :default => true
|
27
|
+
config_param :tmpdir, :string, :default => nil
|
28
|
+
config_param :http_proxy, :string, :default => nil
|
22
29
|
config_param :connect_timeout, :integer, :default => nil
|
23
30
|
config_param :read_timeout, :integer, :default => nil
|
24
31
|
config_param :send_timeout, :integer, :default => nil
|
25
|
-
|
32
|
+
|
33
|
+
config_section :buffer do
|
34
|
+
config_set_default :@type, 'file'
|
35
|
+
config_set_default :chunk_keys, ['tag']
|
36
|
+
config_set_default :flush_interval, 300
|
37
|
+
config_set_default :chunk_limit_size, IMPORT_SIZE_LIMIT
|
38
|
+
end
|
26
39
|
|
27
40
|
def initialize
|
28
|
-
require 'fileutils'
|
29
|
-
require 'tempfile'
|
30
|
-
require 'zlib'
|
31
|
-
require 'net/http'
|
32
|
-
require 'json'
|
33
|
-
require 'cgi' # CGI.escape
|
34
|
-
require 'time' # Time#rfc2822
|
35
|
-
require 'digest/md5'
|
36
|
-
require 'stringio'
|
37
41
|
super
|
38
|
-
@tmpdir = nil
|
39
42
|
@key = nil
|
40
43
|
@key_num_limit = 512 # TODO: Our one-time import has the restriction about the number of record keys.
|
41
44
|
@record_size_limit = 32 * 1024 * 1024 # TODO
|
@@ -45,15 +48,7 @@ module Fluent
|
|
45
48
|
end
|
46
49
|
|
47
50
|
def configure(conf)
|
48
|
-
|
49
|
-
unless conf.has_key?('buffer_chunk_limit')
|
50
|
-
conf['buffer_chunk_limit'] = IMPORT_SIZE_LIMIT
|
51
|
-
end
|
52
|
-
|
53
|
-
# v0.14 seems to have a bug of config_set_default: https://github.com/treasure-data/fluent-plugin-td/pull/22#issuecomment-230782005
|
54
|
-
unless conf.has_key?('buffer_type')
|
55
|
-
conf['buffer_type'] = 'file'
|
56
|
-
end
|
51
|
+
compat_parameters_convert(conf, :buffer, default_chunk_key: 'tag')
|
57
52
|
|
58
53
|
super
|
59
54
|
|
@@ -67,19 +62,16 @@ module Fluent
|
|
67
62
|
end
|
68
63
|
end
|
69
64
|
|
70
|
-
|
71
|
-
@tmpdir = conf['tmpdir']
|
72
|
-
FileUtils.mkdir_p(@tmpdir)
|
73
|
-
end
|
65
|
+
FileUtils.mkdir_p(@tmpdir) if @tmpdir
|
74
66
|
|
75
|
-
database
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
@
|
67
|
+
if @database && @table
|
68
|
+
validate_database_and_table_name(@database, @table)
|
69
|
+
@key = "#{@database}.#{@table}"
|
70
|
+
else
|
71
|
+
unless @chunk_key_tag
|
72
|
+
raise Fluent::ConfigError, "'tag' must be included in <buffer ARG> when database and table are not specified"
|
73
|
+
end
|
80
74
|
end
|
81
|
-
|
82
|
-
@http_proxy = conf['http_proxy']
|
83
75
|
end
|
84
76
|
|
85
77
|
def start
|
@@ -93,72 +85,51 @@ module Fluent
|
|
93
85
|
|
94
86
|
if @key
|
95
87
|
if @auto_create_table
|
96
|
-
database, table
|
97
|
-
ensure_database_and_table(database, table)
|
88
|
+
ensure_database_and_table(@database, @table)
|
98
89
|
else
|
99
90
|
check_table_exists(@key)
|
100
91
|
end
|
101
92
|
end
|
102
93
|
end
|
103
94
|
|
104
|
-
def
|
105
|
-
|
106
|
-
key = @key
|
107
|
-
else
|
108
|
-
database, table = tag.split('.')[-2,2]
|
109
|
-
database = TreasureData::API.normalize_database_name(database)
|
110
|
-
table = TreasureData::API.normalize_table_name(table)
|
111
|
-
key = "#{database}.#{table}"
|
112
|
-
end
|
113
|
-
|
114
|
-
unless @auto_create_table
|
115
|
-
check_table_exists(key)
|
116
|
-
end
|
117
|
-
|
118
|
-
super(tag, es, chain, key)
|
95
|
+
def multi_workers_ready?
|
96
|
+
true
|
119
97
|
end
|
120
98
|
|
121
|
-
def
|
122
|
-
|
123
|
-
|
124
|
-
es.each { |time, record|
|
125
|
-
# Applications may send non-hash record or broken chunk may generate non-hash record so such records should be skipped
|
126
|
-
next unless record.is_a?(Hash)
|
99
|
+
def formatted_to_msgpack_binary
|
100
|
+
true
|
101
|
+
end
|
127
102
|
|
128
|
-
|
129
|
-
|
130
|
-
|
103
|
+
def format(tag, time, record)
|
104
|
+
begin
|
105
|
+
record['time'] = time.to_i
|
106
|
+
record.delete(:time) if record.has_key?(:time)
|
131
107
|
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
# TODO (a) Remove the transaction mechanism of fluentd
|
137
|
-
# or (b) keep transaction boundaries in in/out_forward.
|
138
|
-
# This code disables the transaction mechanism (a).
|
139
|
-
log.warn "Skipped a broken record (#{e}): #{summarize_record(record)}"
|
140
|
-
log.warn_backtrace e.backtrace
|
141
|
-
next
|
108
|
+
if record.size > @key_num_limit
|
109
|
+
# TODO include summary of the record
|
110
|
+
router.emit_error_event(tag, time, record, RuntimeError.new("too many number of keys (#{record.size} keys)"))
|
111
|
+
return nil
|
142
112
|
end
|
113
|
+
rescue => e
|
114
|
+
router.emit_error_event(tag, time, {'record' => record}, RuntimeError.new("skipped a broken record: #{e}"))
|
115
|
+
return nil
|
116
|
+
end
|
143
117
|
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
118
|
+
begin
|
119
|
+
result = record.to_msgpack
|
120
|
+
rescue RangeError
|
121
|
+
result = TreasureData::API.normalized_msgpack(record)
|
122
|
+
rescue => e
|
123
|
+
router.emit_error_event(tag, time, {'record' => record}, RuntimeError.new("can't convert record to msgpack: #{e}"))
|
124
|
+
return nil
|
125
|
+
end
|
151
126
|
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
end
|
159
|
-
off = noff
|
160
|
-
}
|
161
|
-
out.to_s
|
127
|
+
if result.bytesize > @record_size_limit
|
128
|
+
# Don't raise error. Large size is not critical for streaming import
|
129
|
+
log.warn "Size of a record too large (#{result.bytesize} bytes): #{summarize_record(record)}"
|
130
|
+
end
|
131
|
+
|
132
|
+
result
|
162
133
|
end
|
163
134
|
|
164
135
|
def summarize_record(record)
|
@@ -172,10 +143,17 @@ module Fluent
|
|
172
143
|
|
173
144
|
def write(chunk)
|
174
145
|
unique_id = chunk.unique_id
|
175
|
-
|
146
|
+
|
147
|
+
if @key
|
148
|
+
database, table = @database, @table
|
149
|
+
else
|
150
|
+
database, table = chunk.metadata.tag.split('.')[-2, 2]
|
151
|
+
database = TreasureData::API.normalize_database_name(database)
|
152
|
+
table = TreasureData::API.normalize_table_name(table)
|
153
|
+
end
|
176
154
|
|
177
155
|
FileUtils.mkdir_p(@tmpdir) unless @tmpdir.nil?
|
178
|
-
f = Tempfile.new("tdlog-#{chunk.
|
156
|
+
f = Tempfile.new("tdlog-#{chunk.metadata.tag}-", @tmpdir)
|
179
157
|
f.binmode
|
180
158
|
|
181
159
|
size = if @use_gzip_command
|
@@ -191,11 +169,11 @@ module Fluent
|
|
191
169
|
|
192
170
|
# TODO: Share this routine with s3 compressors
|
193
171
|
def gzip_by_command(chunk, tmp)
|
194
|
-
chunk_is_file = @
|
172
|
+
chunk_is_file = @buffer_config['@type'] == 'file'
|
195
173
|
path = if chunk_is_file
|
196
174
|
chunk.path
|
197
175
|
else
|
198
|
-
w = Tempfile.new("gzip-tdlog-#{chunk.
|
176
|
+
w = Tempfile.new("gzip-tdlog-#{chunk.metadata.tag}-", @tmpdir)
|
199
177
|
w.binmode
|
200
178
|
chunk.write_to(w)
|
201
179
|
w.close
|
@@ -235,7 +213,7 @@ module Fluent
|
|
235
213
|
begin
|
236
214
|
begin
|
237
215
|
start = Time.now
|
238
|
-
@client.import(database, table,
|
216
|
+
@client.import(database, table, UPLOAD_EXT, io, size, unique_str)
|
239
217
|
rescue TreasureData::NotFoundError => e
|
240
218
|
unless @auto_create_table
|
241
219
|
raise e
|
@@ -258,7 +236,7 @@ module Fluent
|
|
258
236
|
log.debug "checking whether table '#{database}.#{table}' exists on Treasure Data"
|
259
237
|
io = StringIO.new(@empty_gz_data)
|
260
238
|
begin
|
261
|
-
@client.import(database, table,
|
239
|
+
@client.import(database, table, UPLOAD_EXT, io, io.size)
|
262
240
|
@table_list[key] = true
|
263
241
|
rescue TreasureData::NotFoundError
|
264
242
|
raise "Table #{key.inspect} does not exist on Treasure Data. Use 'td table:create #{database} #{table}' to create it."
|
@@ -269,16 +247,16 @@ module Fluent
|
|
269
247
|
end
|
270
248
|
end
|
271
249
|
|
272
|
-
def validate_database_and_table_name(database, table
|
250
|
+
def validate_database_and_table_name(database, table)
|
273
251
|
begin
|
274
252
|
TreasureData::API.validate_database_name(database)
|
275
253
|
rescue => e
|
276
|
-
raise ConfigError, "Invalid database name #{database.inspect}: #{e}
|
254
|
+
raise ConfigError, "Invalid database name #{database.inspect}: #{e}"
|
277
255
|
end
|
278
256
|
begin
|
279
257
|
TreasureData::API.validate_table_name(table)
|
280
258
|
rescue => e
|
281
|
-
raise ConfigError, "Invalid table name #{table.inspect}: #{e}
|
259
|
+
raise ConfigError, "Invalid table name #{table.inspect}: #{e}"
|
282
260
|
end
|
283
261
|
end
|
284
262
|
|
@@ -1,26 +1,36 @@
|
|
1
1
|
require 'fluent/test'
|
2
|
+
require 'fluent/test/driver/output'
|
2
3
|
require 'fluent/plugin/out_tdlog'
|
3
4
|
require 'test_helper.rb'
|
4
5
|
|
5
6
|
class TreasureDataLogOutputTest < Test::Unit::TestCase
|
7
|
+
TMP_DIR = File.dirname(__FILE__) + "/tmp"
|
8
|
+
|
6
9
|
def setup
|
10
|
+
super
|
7
11
|
Fluent::Test.setup
|
12
|
+
FileUtils.rm_rf(TMP_DIR, secure: true)
|
13
|
+
FileUtils.mkdir_p(TMP_DIR)
|
8
14
|
end
|
9
15
|
|
10
|
-
|
16
|
+
def teardown
|
17
|
+
super
|
18
|
+
Fluent::Engine.stop
|
19
|
+
end
|
11
20
|
|
21
|
+
BASE_CONFIG = %[
|
22
|
+
apikey testkey
|
23
|
+
buffer_path #{TMP_DIR}/buffer
|
24
|
+
]
|
12
25
|
DEFAULT_CONFIG = %[
|
13
26
|
database test
|
14
27
|
table table
|
15
28
|
]
|
16
29
|
|
17
30
|
def create_driver(conf = DEFAULT_CONFIG)
|
18
|
-
config =
|
19
|
-
apikey testkey
|
20
|
-
buffer_path #{TMP_DIR}/buffer
|
21
|
-
] + conf
|
31
|
+
config = BASE_CONFIG + conf
|
22
32
|
|
23
|
-
Fluent::Test::
|
33
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::TreasureDataLogOutput) do
|
24
34
|
def write(chunk)
|
25
35
|
chunk.instance_variable_set(:@key, @key)
|
26
36
|
def chunk.key
|
@@ -34,15 +44,30 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
34
44
|
def test_configure
|
35
45
|
d = create_driver
|
36
46
|
|
37
|
-
{:@apikey => 'testkey', :@use_ssl => true, :@auto_create_table => true,
|
38
|
-
:@buffer_type => 'file', :@flush_interval => 300, :@use_gzip_command => false}.each { |k, v|
|
47
|
+
{:@apikey => 'testkey', :@use_ssl => true, :@auto_create_table => true, :@use_gzip_command => false}.each { |k, v|
|
39
48
|
assert_equal(d.instance.instance_variable_get(k), v)
|
40
49
|
}
|
50
|
+
{:@chunk_keys => ['tag'], :@flush_interval => 300, :@chunk_limit_size => Fluent::Plugin::TreasureDataLogOutput::IMPORT_SIZE_LIMIT}.each { |k, v|
|
51
|
+
assert_equal(d.instance.buffer.instance_variable_get(k), v)
|
52
|
+
}
|
53
|
+
end
|
54
|
+
|
55
|
+
def test_configure_for_chunk_key_tag
|
56
|
+
assert_raise Fluent::ConfigError.new("'tag' must be included in <buffer ARG> when database and table are not specified") do
|
57
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::TreasureDataLogOutput).configure(%[
|
58
|
+
apikey testkey
|
59
|
+
<buffer []>
|
60
|
+
flush_interval 10s
|
61
|
+
path #{TMP_DIR}/buffer
|
62
|
+
</buffer>
|
63
|
+
])
|
64
|
+
end
|
41
65
|
end
|
42
66
|
|
43
|
-
|
67
|
+
data('evet_time' => 'event_time', 'int_time' => 'int')
|
68
|
+
def test_emit(time_class)
|
44
69
|
d = create_driver
|
45
|
-
time, records = stub_seed_values
|
70
|
+
time, records = stub_seed_values(time_class)
|
46
71
|
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
47
72
|
stub_td_table_create_request(database, table)
|
48
73
|
stub_td_import_request(stub_request_body(records, time), database, table)
|
@@ -51,10 +76,11 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
51
76
|
# We need actual gzipped content to verify compressed body is correct or not.
|
52
77
|
dont_allow(d.instance).gzip_by_command(anything, is_a(Tempfile))
|
53
78
|
|
54
|
-
|
55
|
-
|
79
|
+
d.run(default_tag: 'test') {
|
80
|
+
records.each { |record|
|
81
|
+
d.feed(time, record)
|
82
|
+
}
|
56
83
|
}
|
57
|
-
d.run
|
58
84
|
}
|
59
85
|
|
60
86
|
assert_equal('TD1 testkey', @auth_header)
|
@@ -69,11 +95,11 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
69
95
|
assert_rr {
|
70
96
|
# same as test_emit
|
71
97
|
dont_allow(d.instance).gzip_by_writer(anything, is_a(Tempfile))
|
72
|
-
|
73
|
-
|
74
|
-
|
98
|
+
d.run(default_tag: 'test') {
|
99
|
+
records.each { |record|
|
100
|
+
d.feed(time, record)
|
101
|
+
}
|
75
102
|
}
|
76
|
-
d.run
|
77
103
|
}
|
78
104
|
|
79
105
|
assert_equal('TD1 testkey', @auth_header)
|
@@ -88,14 +114,14 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
88
114
|
stub_td_table_create_request(database, table)
|
89
115
|
stub_td_import_request(stub_request_body(records, time), database, table)
|
90
116
|
|
91
|
-
|
92
|
-
d.
|
117
|
+
d.run(default_tag: 'test') {
|
118
|
+
d.feed_to_plugin('test', Fluent::ArrayEventStream.new(records.map { |e| [time, e] }))
|
93
119
|
}
|
94
|
-
d.run
|
95
120
|
|
96
|
-
|
97
|
-
|
98
|
-
|
121
|
+
error_events = d.error_events(tag: 'test')
|
122
|
+
assert_equal 2, error_events.size
|
123
|
+
assert_equal nil, error_events[0][2]['record']
|
124
|
+
assert_equal "string", error_events[1][2]['record']
|
99
125
|
end
|
100
126
|
|
101
127
|
def test_emit_with_bigint_record
|
@@ -111,29 +137,11 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
111
137
|
test_time, test_records = stub_seed_values
|
112
138
|
test_records[1]['k'] = ['hogehoge' * 1000]
|
113
139
|
test_records[1]['kk'] = n
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
end
|
119
|
-
|
120
|
-
def test_emit_with_event_time
|
121
|
-
omit "EventTime is not implemented with current Fluentd version" unless Fluent.const_defined?('EventTime')
|
122
|
-
|
123
|
-
event_time_klass = Fluent.const_get('EventTime')
|
124
|
-
|
125
|
-
event_time = event_time_klass.now
|
126
|
-
d = create_driver
|
127
|
-
_time, records = stub_seed_values
|
128
|
-
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
129
|
-
stub_td_table_create_request(database, table)
|
130
|
-
stub_td_import_request(stub_request_body(records, event_time.to_i), database, table)
|
131
|
-
|
132
|
-
_test_time, test_records = stub_seed_values
|
133
|
-
test_records.each { |record|
|
134
|
-
d.emit(record, event_time)
|
140
|
+
d.run(default_tag: 'test') {
|
141
|
+
test_records.each { |record|
|
142
|
+
d.feed(test_time, record)
|
143
|
+
}
|
135
144
|
}
|
136
|
-
d.run
|
137
145
|
end
|
138
146
|
|
139
147
|
def test_emit_with_time_symbole
|
@@ -143,11 +151,12 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
143
151
|
stub_td_table_create_request(database, table)
|
144
152
|
stub_td_import_request(stub_request_body(records, time), database, table)
|
145
153
|
|
146
|
-
|
147
|
-
|
148
|
-
|
154
|
+
d.run(default_tag: 'test') {
|
155
|
+
records.each { |record|
|
156
|
+
record[:time] = Time.now.to_i # emit removes this :time key
|
157
|
+
d.feed(time, record)
|
158
|
+
}
|
149
159
|
}
|
150
|
-
d.run
|
151
160
|
|
152
161
|
assert_equal('TD1 testkey', @auth_header)
|
153
162
|
end
|
@@ -160,10 +169,11 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
160
169
|
stub_td_table_create_request(database, table, opts)
|
161
170
|
stub_td_import_request(stub_request_body(records, time), database, table, opts)
|
162
171
|
|
163
|
-
|
164
|
-
|
172
|
+
d.run(default_tag: 'test') {
|
173
|
+
records.each { |record|
|
174
|
+
d.feed(time, record)
|
175
|
+
}
|
165
176
|
}
|
166
|
-
d.run
|
167
177
|
end
|
168
178
|
|
169
179
|
def test_emit_with_too_many_keys
|
@@ -174,45 +184,53 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
174
184
|
stub_td_table_create_request(database, table, opts)
|
175
185
|
stub_td_import_request(stub_request_body([], time), database, table, opts)
|
176
186
|
|
177
|
-
d.
|
178
|
-
|
187
|
+
d.run(default_tag: 'test') {
|
188
|
+
d.feed(time, create_too_many_keys_record)
|
189
|
+
}
|
179
190
|
|
180
|
-
assert_equal 0, d.
|
181
|
-
|
182
|
-
line =~ /Too many number of keys/
|
183
|
-
}.size == 1, "too many keys error is not logged"
|
191
|
+
assert_equal 0, d.events.size
|
192
|
+
assert_equal 1, d.error_events.size
|
184
193
|
end
|
185
194
|
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
195
|
+
sub_test_case 'tag splitting for database and table' do
|
196
|
+
def create_driver(conf = %[auto_create_table true])
|
197
|
+
config = BASE_CONFIG + conf
|
198
|
+
|
199
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::TreasureDataLogOutput).configure(config)
|
200
|
+
end
|
201
|
+
|
202
|
+
data('evet_time' => 'event_time', 'int_time' => 'int')
|
203
|
+
def test_tag_split(time_class)
|
204
|
+
d = create_driver
|
205
|
+
|
206
|
+
time, records = stub_seed_values(time_class)
|
207
|
+
database = 'db1'
|
208
|
+
table = 'table1'
|
209
|
+
stub_td_table_create_request(database, table)
|
210
|
+
stub_td_import_request(stub_request_body(records, time), database, table)
|
211
|
+
|
212
|
+
d.run(default_tag: 'td.db1.table1') {
|
213
|
+
records.each { |record|
|
214
|
+
d.feed(time, record)
|
215
|
+
}
|
216
|
+
}
|
217
|
+
end
|
218
|
+
|
219
|
+
def test_tag_split_with_normalization
|
220
|
+
d = create_driver
|
221
|
+
|
222
|
+
time, records = stub_seed_values
|
223
|
+
database = 'db_'
|
224
|
+
table = 'tb_'
|
225
|
+
stub_td_table_create_request(database, table)
|
226
|
+
stub_td_import_request(stub_request_body(records, time), database, table)
|
227
|
+
|
228
|
+
d.run(default_tag: 'td.db.tb') {
|
229
|
+
records.each { |record|
|
230
|
+
d.feed(time, record)
|
231
|
+
}
|
232
|
+
}
|
233
|
+
end
|
234
|
+
end
|
217
235
|
end
|
218
236
|
|
data/test/test_helper.rb
CHANGED
@@ -1,26 +1,31 @@
|
|
1
1
|
require 'json'
|
2
2
|
require 'msgpack'
|
3
|
-
require 'fluent/test'
|
4
3
|
require 'webmock/test_unit'
|
5
4
|
require 'stringio'
|
6
5
|
require 'td-client'
|
7
6
|
require 'zlib'
|
8
7
|
require 'test/unit/rr'
|
9
8
|
|
9
|
+
require 'fluent/test'
|
10
|
+
require 'fluent/test/helpers'
|
11
|
+
|
10
12
|
def e(s)
|
11
13
|
require 'cgi'
|
12
14
|
CGI.escape(s.to_s)
|
13
15
|
end
|
14
16
|
|
15
17
|
class Test::Unit::TestCase
|
18
|
+
include Fluent::Test::Helpers
|
19
|
+
|
16
20
|
def create_too_many_keys_record
|
17
21
|
record = {}
|
18
22
|
5012.times { |i| record["k#{i}"] = i }
|
19
23
|
record
|
20
24
|
end
|
21
25
|
|
22
|
-
def stub_seed_values
|
23
|
-
time =
|
26
|
+
def stub_seed_values(time_class = 'int')
|
27
|
+
time = event_time("2014-01-01 00:00:00 UTC")
|
28
|
+
time = time.to_i if time_class == 'int'
|
24
29
|
records = [{"a" => 1}, {"a" => 2}]
|
25
30
|
return time, records
|
26
31
|
end
|
@@ -32,7 +37,7 @@ class Test::Unit::TestCase
|
|
32
37
|
|
33
38
|
r = record.dup
|
34
39
|
if time
|
35
|
-
r['time'] = time
|
40
|
+
r['time'] = time.to_i
|
36
41
|
end
|
37
42
|
r.to_msgpack(out)
|
38
43
|
}
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-td
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Treasure Data, Inc.
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-02-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|
@@ -16,7 +16,7 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: 0.14.13
|
20
20
|
- - "<"
|
21
21
|
- !ruby/object:Gem::Version
|
22
22
|
version: '2'
|
@@ -26,7 +26,7 @@ dependencies:
|
|
26
26
|
requirements:
|
27
27
|
- - ">="
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: 0.
|
29
|
+
version: 0.14.13
|
30
30
|
- - "<"
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: '2'
|