fluent-plugin-td 0.10.20 → 0.10.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.travis.yml +0 -1
- data/ChangeLog +6 -0
- data/VERSION +1 -1
- data/lib/fluent/plugin/out_tditem.rb +1 -1
- data/lib/fluent/plugin/out_tdlog.rb +274 -285
- data/test/plugin/test_out_tditem.rb +16 -3
- data/test/plugin/test_out_tdlog.rb +18 -2
- data/test/test_helper.rb +6 -0
- metadata +17 -34
- data/Gemfile.fluentd.lt.0.10.43 +0 -4
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 45aa1dc756f40504543010be6622b6af518fa46c
|
4
|
+
data.tar.gz: 72c08e7060e8e6e1fb3af3144a746f92e78f4cf0
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 3d494ed57200fb989b24db7552a917f690ce5e96b83a8a108e56d03d2d41274c1a2b37ee610494790ea4f21be41cefcbcd783fad74cd1df03fa5fbd84caa079e
|
7
|
+
data.tar.gz: e62a326f85674fe4acdb78f7f9b7df0c27cc45e569b5af171b1754e99204222e8ff0cb286d0a4667ae6c64d83611b4aec742c98a5c95fdc77bf6f3fc5c45b884
|
data/.travis.yml
CHANGED
data/ChangeLog
CHANGED
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.10.
|
1
|
+
0.10.21
|
@@ -37,7 +37,7 @@ module Fluent
|
|
37
37
|
@key_num_limit = 1024 # Item table default limitation
|
38
38
|
@record_size_limit = 32 * 1024 * 1024 # TODO
|
39
39
|
@empty_gz_data = TreasureData::API.create_empty_gz_data
|
40
|
-
@user_agent = "fluent-plugin-td-item: 0.10.
|
40
|
+
@user_agent = "fluent-plugin-td-item: 0.10.21".freeze # TODO: automatic increment version
|
41
41
|
end
|
42
42
|
|
43
43
|
def configure(conf)
|
@@ -1,358 +1,347 @@
|
|
1
1
|
require 'td-client'
|
2
2
|
|
3
3
|
module Fluent
|
4
|
+
class TreasureDataLogOutput < BufferedOutput
|
5
|
+
Plugin.register_output('tdlog', self)
|
4
6
|
|
7
|
+
IMPORT_SIZE_LIMIT = 32 * 1024 * 1024
|
5
8
|
|
6
|
-
class
|
7
|
-
|
8
|
-
|
9
|
-
IMPORT_SIZE_LIMIT = 32*1024*1024
|
10
|
-
|
11
|
-
class Anonymizer
|
12
|
-
include Configurable
|
13
|
-
end
|
9
|
+
class Anonymizer
|
10
|
+
include Configurable
|
11
|
+
end
|
14
12
|
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
13
|
+
class RawAnonymizer < Anonymizer
|
14
|
+
def anonymize(obj)
|
15
|
+
if obj.nil?
|
16
|
+
nil
|
17
|
+
elsif obj.is_a?(String)
|
18
|
+
anonymize_raw obj
|
19
|
+
elsif obj.is_a?(Numeric)
|
20
|
+
anonymize_raw obj.to_s
|
21
|
+
else
|
22
|
+
# boolean, array, map
|
23
|
+
anonymize_raw MessagePack.pack(obj)
|
24
|
+
end
|
26
25
|
end
|
27
26
|
end
|
28
|
-
end
|
29
27
|
|
30
|
-
|
31
|
-
|
32
|
-
|
28
|
+
class MD5Anonymizer < RawAnonymizer
|
29
|
+
def anonymize_raw(raw)
|
30
|
+
Digest::MD5.hexdigest(raw)
|
31
|
+
end
|
33
32
|
end
|
34
|
-
end
|
35
33
|
|
36
|
-
|
37
|
-
|
34
|
+
class IPXORAnonymizer < RawAnonymizer
|
35
|
+
config_param :xor_key, :string
|
38
36
|
|
39
|
-
|
40
|
-
|
37
|
+
def configure(conf)
|
38
|
+
super
|
41
39
|
|
42
|
-
|
43
|
-
|
40
|
+
a1, a2, a3, a4 = @xor_key.split('.')
|
41
|
+
@xor_keys = [a1.to_i, a2.to_i, a3.to_i, a4.to_i]
|
44
42
|
|
45
|
-
|
46
|
-
|
43
|
+
if @xor_keys == [0, 0, 0, 0]
|
44
|
+
raise ConfigError, "'xor_key' must be IPv4 address"
|
45
|
+
end
|
47
46
|
end
|
48
|
-
end
|
49
47
|
|
50
|
-
|
51
|
-
|
52
|
-
|
48
|
+
def anonymize_raw(raw)
|
49
|
+
m = /\A(\d+)\.(\d+)\.(\d+)\.(\d+)/.match(raw)
|
50
|
+
return nil unless m
|
53
51
|
|
54
|
-
|
52
|
+
k1, k2, k3, k4 = @xor_keys
|
55
53
|
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
54
|
+
o1 = m[1].to_i ^ k1
|
55
|
+
o2 = m[2].to_i ^ k2
|
56
|
+
o3 = m[3].to_i ^ k3
|
57
|
+
o4 = m[4].to_i ^ k4
|
60
58
|
|
61
|
-
|
59
|
+
"#{o1}.#{o2}.#{o3}.#{o4}"
|
60
|
+
end
|
62
61
|
end
|
63
|
-
end
|
64
62
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
63
|
+
# To support log_level option since Fluentd v0.10.43
|
64
|
+
unless method_defined?(:log)
|
65
|
+
define_method(:log) { $log }
|
66
|
+
end
|
69
67
|
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
68
|
+
config_param :endpoint, :string, :default => TreasureData::API::NEW_DEFAULT_ENDPOINT
|
69
|
+
|
70
|
+
config_param :connect_timeout, :integer, :default => nil
|
71
|
+
config_param :read_timeout, :integer, :default => nil
|
72
|
+
config_param :send_timeout, :integer, :default => nil
|
73
|
+
config_set_default :buffer_type, 'file'
|
74
|
+
config_set_default :flush_interval, 300
|
75
|
+
|
76
|
+
def initialize
|
77
|
+
require 'fileutils'
|
78
|
+
require 'tempfile'
|
79
|
+
require 'zlib'
|
80
|
+
require 'net/http'
|
81
|
+
require 'json'
|
82
|
+
require 'cgi' # CGI.escape
|
83
|
+
require 'time' # Time#rfc2822
|
84
|
+
require 'digest/md5'
|
85
|
+
require 'stringio'
|
86
|
+
super
|
87
|
+
@tmpdir = nil
|
88
|
+
@apikey = nil
|
89
|
+
@key = nil
|
90
|
+
@key_num_limit = 512 # TODO: Our one-time import has the restriction about the number of record keys.
|
91
|
+
@record_size_limit = 32 * 1024 * 1024 # TODO
|
92
|
+
@table_list = {}
|
93
|
+
@auto_create_table = true
|
94
|
+
@use_ssl = true
|
95
|
+
@empty_gz_data = TreasureData::API.create_empty_gz_data
|
96
|
+
end
|
99
97
|
|
100
|
-
|
101
|
-
|
98
|
+
def configure(conf)
|
99
|
+
super
|
102
100
|
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
101
|
+
# overwrite default value of buffer_chunk_limit
|
102
|
+
if @buffer.respond_to?(:buffer_chunk_limit=) && !conf['buffer_chunk_limit']
|
103
|
+
@buffer.buffer_chunk_limit = IMPORT_SIZE_LIMIT
|
104
|
+
end
|
107
105
|
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
106
|
+
if conf.has_key?('tmpdir')
|
107
|
+
@tmpdir = conf['tmpdir']
|
108
|
+
FileUtils.mkdir_p(@tmpdir)
|
109
|
+
end
|
112
110
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
111
|
+
@apikey = conf['apikey']
|
112
|
+
unless @apikey
|
113
|
+
raise ConfigError, "'apikey' parameter is required on tdlog output"
|
114
|
+
end
|
117
115
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
116
|
+
if auto_create_table = conf['auto_create_table']
|
117
|
+
if auto_create_table.empty?
|
118
|
+
@auto_create_table = true
|
119
|
+
else
|
120
|
+
@auto_create_table = Config.bool_value(auto_create_table)
|
121
|
+
if @auto_create_table == nil
|
122
|
+
raise ConfigError, "'true' or 'false' is required for auto_create_table option on tdlog output"
|
123
|
+
end
|
125
124
|
end
|
126
125
|
end
|
127
|
-
end
|
128
126
|
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
127
|
+
if use_ssl = conf['use_ssl']
|
128
|
+
if use_ssl.empty?
|
129
|
+
@use_ssl = true
|
130
|
+
else
|
131
|
+
@use_ssl = Config.bool_value(use_ssl)
|
132
|
+
if @use_ssl == nil
|
133
|
+
raise ConfigError, "'true' or 'false' is required for use_ssl option on tdlog output"
|
134
|
+
end
|
136
135
|
end
|
137
136
|
end
|
138
|
-
end
|
139
137
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
end
|
146
|
-
|
147
|
-
@anonymizes = {}
|
148
|
-
conf.elements.select {|e|
|
149
|
-
e.name == 'anonymize'
|
150
|
-
}.each {|e|
|
151
|
-
key = e['key']
|
152
|
-
method = e['method']
|
153
|
-
|
154
|
-
case method
|
155
|
-
when 'md5'
|
156
|
-
scr = MD5Anonymizer.new
|
157
|
-
when 'ip_xor'
|
158
|
-
scr = IPXORAnonymizer.new
|
159
|
-
else
|
160
|
-
raise ConfigError, "Unknown anonymize method: #{method}"
|
138
|
+
database = conf['database']
|
139
|
+
table = conf['table']
|
140
|
+
if database && table
|
141
|
+
validate_database_and_table_name(database, table, conf)
|
142
|
+
@key = "#{database}.#{table}"
|
161
143
|
end
|
162
144
|
|
163
|
-
|
145
|
+
@anonymizes = {}
|
146
|
+
conf.elements.select { |e|
|
147
|
+
e.name == 'anonymize'
|
148
|
+
}.each { |e|
|
149
|
+
key = e['key']
|
150
|
+
method = e['method']
|
151
|
+
|
152
|
+
case method
|
153
|
+
when 'md5'
|
154
|
+
scr = MD5Anonymizer.new
|
155
|
+
when 'ip_xor'
|
156
|
+
scr = IPXORAnonymizer.new
|
157
|
+
else
|
158
|
+
raise ConfigError, "Unknown anonymize method: #{method}"
|
159
|
+
end
|
164
160
|
|
165
|
-
|
166
|
-
}
|
167
|
-
@anonymizes = nil if @anonymizes.empty?
|
161
|
+
scr.configure(e)
|
168
162
|
|
169
|
-
|
170
|
-
|
171
|
-
|
163
|
+
@anonymizes[key] = scr
|
164
|
+
}
|
165
|
+
@anonymizes = nil if @anonymizes.empty?
|
172
166
|
|
173
|
-
|
174
|
-
|
167
|
+
@http_proxy = conf['http_proxy']
|
168
|
+
@user_agent = "fluent-plugin-td: 0.10.21" # TODO: automatic increment version
|
169
|
+
end
|
175
170
|
|
176
|
-
|
177
|
-
|
178
|
-
:connect_timeout => @connect_timeout, :read_timeout => @read_timeout, :send_timeout => @send_timeout
|
179
|
-
}
|
180
|
-
@client = TreasureData::Client.new(@apikey, client_opts)
|
171
|
+
def start
|
172
|
+
super
|
181
173
|
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
174
|
+
client_opts = {
|
175
|
+
:ssl => @use_ssl, :http_proxy => @http_proxy, :user_agent => @user_agent, :endpoint => @endpoint,
|
176
|
+
:connect_timeout => @connect_timeout, :read_timeout => @read_timeout, :send_timeout => @send_timeout
|
177
|
+
}
|
178
|
+
@client = TreasureData::Client.new(@apikey, client_opts)
|
179
|
+
|
180
|
+
if @key
|
181
|
+
if @auto_create_table
|
182
|
+
database, table = @key.split('.',2)
|
183
|
+
ensure_database_and_table(database, table)
|
184
|
+
else
|
185
|
+
check_table_exists(@key)
|
186
|
+
end
|
188
187
|
end
|
189
188
|
end
|
190
|
-
end
|
191
189
|
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
190
|
+
def emit(tag, es, chain)
|
191
|
+
if @key
|
192
|
+
key = @key
|
193
|
+
else
|
194
|
+
database, table = tag.split('.')[-2,2]
|
195
|
+
database = TreasureData::API.normalize_database_name(database)
|
196
|
+
table = TreasureData::API.normalize_table_name(table)
|
197
|
+
key = "#{database}.#{table}"
|
198
|
+
end
|
201
199
|
|
202
|
-
|
203
|
-
|
204
|
-
|
200
|
+
unless @auto_create_table
|
201
|
+
check_table_exists(key)
|
202
|
+
end
|
205
203
|
|
206
|
-
|
207
|
-
|
204
|
+
super(tag, es, chain, key)
|
205
|
+
end
|
208
206
|
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
207
|
+
def format_stream(tag, es)
|
208
|
+
out = ''
|
209
|
+
off = out.bytesize
|
210
|
+
es.each { |time, record|
|
211
|
+
begin
|
212
|
+
if @anonymizes
|
213
|
+
@anonymizes.each_pair { |key, scr|
|
214
|
+
if value = record[key]
|
215
|
+
record[key] = scr.anonymize(value)
|
216
|
+
end
|
217
|
+
}
|
218
|
+
end
|
219
|
+
|
220
|
+
record['time'] = time
|
221
|
+
|
222
|
+
if record.size > @key_num_limit
|
223
|
+
raise "Too many number of keys (#{record.size} keys)" # TODO include summary of the record
|
224
|
+
end
|
225
|
+
rescue => e
|
226
|
+
# TODO (a) Remove the transaction mechanism of fluentd
|
227
|
+
# or (b) keep transaction boundaries in in/out_forward.
|
228
|
+
# This code disables the transaction mechanism (a).
|
229
|
+
log.error "#{e}: #{summarize_record(record)}"
|
230
|
+
log.error_backtrace e.backtrace
|
231
|
+
next
|
220
232
|
end
|
221
233
|
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
234
|
+
begin
|
235
|
+
record.to_msgpack(out)
|
236
|
+
rescue RangeError
|
237
|
+
TreasureData::API.normalized_msgpack(record, out)
|
226
238
|
end
|
227
239
|
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
rescue RangeError
|
240
|
-
TreasureData::API.normalized_msgpack(record, out)
|
241
|
-
end
|
240
|
+
noff = out.bytesize
|
241
|
+
sz = noff - off
|
242
|
+
if sz > @record_size_limit
|
243
|
+
# TODO don't raise error
|
244
|
+
#raise "Size of a record too large (#{sz} bytes)" # TODO include summary of the record
|
245
|
+
log.warn "Size of a record too large (#{sz} bytes): #{summarize_record(record)}"
|
246
|
+
end
|
247
|
+
off = noff
|
248
|
+
}
|
249
|
+
out
|
250
|
+
end
|
242
251
|
|
243
|
-
|
244
|
-
|
245
|
-
if
|
246
|
-
|
247
|
-
|
248
|
-
|
252
|
+
def summarize_record(record)
|
253
|
+
json = Yajl.dump(record)
|
254
|
+
if json.size > 100
|
255
|
+
json[0..97] + "..."
|
256
|
+
else
|
257
|
+
json
|
249
258
|
end
|
250
|
-
off = noff
|
251
|
-
}
|
252
|
-
out
|
253
|
-
end
|
254
|
-
|
255
|
-
def summarize_record(record)
|
256
|
-
json = record.to_json
|
257
|
-
if json.size > 100
|
258
|
-
json[0..97]+"..."
|
259
|
-
else
|
260
|
-
json
|
261
259
|
end
|
262
|
-
end
|
263
260
|
|
264
|
-
|
265
|
-
|
266
|
-
|
261
|
+
def write(chunk)
|
262
|
+
unique_id = chunk.unique_id
|
263
|
+
database, table = chunk.key.split('.', 2)
|
267
264
|
|
268
|
-
|
269
|
-
|
270
|
-
|
265
|
+
FileUtils.mkdir_p(@tmpdir) unless @tmpdir.nil?
|
266
|
+
f = Tempfile.new("tdlog-", @tmpdir)
|
267
|
+
w = Zlib::GzipWriter.new(f)
|
271
268
|
|
272
|
-
|
273
|
-
|
274
|
-
|
269
|
+
chunk.write_to(w)
|
270
|
+
w.finish
|
271
|
+
w = nil
|
275
272
|
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
ensure
|
281
|
-
w.close if w
|
282
|
-
f.close if f
|
283
|
-
end
|
273
|
+
size = f.pos
|
274
|
+
f.pos = 0
|
275
|
+
upload(database, table, f, size, unique_id)
|
284
276
|
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
end
|
277
|
+
ensure
|
278
|
+
w.close if w
|
279
|
+
f.close if f
|
280
|
+
end
|
290
281
|
|
291
|
-
|
292
|
-
|
293
|
-
|
282
|
+
def upload(database, table, io, size, unique_id)
|
283
|
+
unique_str = unique_id.unpack('C*').map { |x| "%02x" % x }.join
|
284
|
+
log.trace { "uploading logs to Treasure Data database=#{database} table=#{table} (#{size}bytes)" }
|
294
285
|
|
295
|
-
begin
|
296
286
|
begin
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
287
|
+
begin
|
288
|
+
start = Time.now
|
289
|
+
@client.import(database, table, "msgpack.gz", io, size, unique_str)
|
290
|
+
rescue TreasureData::NotFoundError => e
|
291
|
+
unless @auto_create_table
|
292
|
+
raise e
|
293
|
+
end
|
294
|
+
ensure_database_and_table(database, table)
|
295
|
+
io.pos = 0
|
296
|
+
retry
|
302
297
|
end
|
303
|
-
|
304
|
-
|
305
|
-
|
298
|
+
rescue => e
|
299
|
+
elapsed = Time.now - start
|
300
|
+
ne = RuntimeError.new("Failed to upload to Treasure Data '#{database}.#{table}' table: #{$!} (#{size} bytes; #{elapsed} seconds)")
|
301
|
+
ne.set_backtrace(e.backtrace)
|
302
|
+
raise ne
|
306
303
|
end
|
307
|
-
rescue => e
|
308
|
-
elapsed = Time.now - start
|
309
|
-
ne = RuntimeError.new("Failed to upload to Treasure Data '#{database}.#{table}' table: #{$!} (#{size} bytes; #{elapsed} seconds)")
|
310
|
-
ne.set_backtrace(e.backtrace)
|
311
|
-
raise ne
|
312
304
|
end
|
313
|
-
end
|
314
305
|
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
306
|
+
def check_table_exists(key)
|
307
|
+
unless @table_list.has_key?(key)
|
308
|
+
database, table = key.split('.', 2)
|
309
|
+
log.debug "checking whether table '#{database}.#{table}' exists on Treasure Data"
|
310
|
+
io = StringIO.new(@empty_gz_data)
|
311
|
+
begin
|
312
|
+
@client.import(database, table, "msgpack.gz", io, io.size)
|
313
|
+
@table_list[key] = true
|
314
|
+
rescue TreasureData::NotFoundError
|
315
|
+
raise "Table #{key.inspect} does not exist on Treasure Data. Use 'td table:create #{database} #{table}' to create it."
|
316
|
+
rescue => e
|
317
|
+
log.warn "failed to check existence of '#{database}.#{table}' table on Treasure Data", :error => e.to_s
|
318
|
+
log.debug_backtrace e.backtrace
|
319
|
+
end
|
328
320
|
end
|
329
321
|
end
|
330
|
-
end
|
331
322
|
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
323
|
+
def validate_database_and_table_name(database, table, conf)
|
324
|
+
begin
|
325
|
+
TreasureData::API.validate_database_name(database)
|
326
|
+
rescue => e
|
327
|
+
raise ConfigError, "Invalid database name #{database.inspect}: #{e}: #{conf}"
|
328
|
+
end
|
329
|
+
begin
|
330
|
+
TreasureData::API.validate_table_name(table)
|
331
|
+
rescue => e
|
332
|
+
raise ConfigError, "Invalid table name #{table.inspect}: #{e}: #{conf}"
|
333
|
+
end
|
342
334
|
end
|
343
|
-
end
|
344
335
|
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
336
|
+
def ensure_database_and_table(database, table)
|
337
|
+
log.info "Creating table #{database}.#{table} on TreasureData"
|
338
|
+
begin
|
339
|
+
@client.create_log_table(database, table)
|
340
|
+
rescue TreasureData::NotFoundError
|
341
|
+
@client.create_database(database)
|
342
|
+
@client.create_log_table(database, table)
|
343
|
+
rescue TreasureData::AlreadyExistsError
|
344
|
+
end
|
353
345
|
end
|
354
346
|
end
|
355
347
|
end
|
356
|
-
|
357
|
-
|
358
|
-
end
|
@@ -57,7 +57,6 @@ class TreasureDataItemOutputTest < Test::Unit::TestCase
|
|
57
57
|
|
58
58
|
def test_emit
|
59
59
|
d = create_driver
|
60
|
-
|
61
60
|
time, records = stub_seed_values
|
62
61
|
stub_td_import_request(stub_request_body(records), d.instance.database, d.instance.table)
|
63
62
|
|
@@ -69,10 +68,9 @@ class TreasureDataItemOutputTest < Test::Unit::TestCase
|
|
69
68
|
assert_equal('TD1 testkey', @auth_header)
|
70
69
|
end
|
71
70
|
|
72
|
-
def
|
71
|
+
def test_emit_with_endpoint
|
73
72
|
d = create_driver(DEFAULT_CONFIG + "endpoint foo.bar.baz")
|
74
73
|
opts = {:endpoint => 'foo.bar.baz'}
|
75
|
-
|
76
74
|
time, records = stub_seed_values
|
77
75
|
stub_td_import_request(stub_request_body(records), d.instance.database, d.instance.table, opts)
|
78
76
|
|
@@ -83,4 +81,19 @@ class TreasureDataItemOutputTest < Test::Unit::TestCase
|
|
83
81
|
|
84
82
|
assert_equal('TD1 testkey', @auth_header)
|
85
83
|
end
|
84
|
+
|
85
|
+
def test_emit_with_too_many_keys
|
86
|
+
d = create_driver(DEFAULT_CONFIG + "endpoint foo.bar.baz")
|
87
|
+
opts = {:endpoint => 'foo.bar.baz'}
|
88
|
+
time, _ = stub_seed_values
|
89
|
+
stub_td_import_request(stub_request_body([]), d.instance.database, d.instance.table, opts)
|
90
|
+
|
91
|
+
d.emit(create_too_many_keys_record, time)
|
92
|
+
d.run
|
93
|
+
|
94
|
+
assert_equal 0, d.emits.size
|
95
|
+
assert d.instance.log.logs.select{ |line|
|
96
|
+
line =~ / \[error\]: Too many number of keys/
|
97
|
+
}.size == 1, "too many keys error is not logged"
|
98
|
+
end
|
86
99
|
end
|
@@ -43,7 +43,6 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
43
43
|
|
44
44
|
def test_emit
|
45
45
|
d = create_driver
|
46
|
-
|
47
46
|
time, records = stub_seed_values
|
48
47
|
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
49
48
|
stub_td_table_create_request(database, table)
|
@@ -71,7 +70,24 @@ class TreasureDataLogOutputTest < Test::Unit::TestCase
|
|
71
70
|
d.run
|
72
71
|
end
|
73
72
|
|
74
|
-
|
73
|
+
def test_emit_with_too_many_keys
|
74
|
+
d = create_driver(DEFAULT_CONFIG + "endpoint foo.bar.baz")
|
75
|
+
opts = {:endpoint => 'foo.bar.baz'}
|
76
|
+
time, records = stub_seed_values
|
77
|
+
database, table = d.instance.instance_variable_get(:@key).split(".", 2)
|
78
|
+
stub_td_table_create_request(database, table, opts)
|
79
|
+
stub_td_import_request(stub_request_body([], time), database, table, opts)
|
80
|
+
|
81
|
+
d.emit(create_too_many_keys_record, time)
|
82
|
+
d.run
|
83
|
+
|
84
|
+
assert_equal 0, d.emits.size
|
85
|
+
assert d.instance.log.logs.select{ |line|
|
86
|
+
line =~ / \[error\]: Too many number of keys/
|
87
|
+
}.size == 1, "too many keys error is not logged"
|
88
|
+
end
|
89
|
+
|
90
|
+
# TODO: add normalized_msgpack / tag split test
|
75
91
|
|
76
92
|
## TODO invalid names are normalized
|
77
93
|
# def test_invalid_name
|
data/test/test_helper.rb
CHANGED
@@ -12,6 +12,12 @@ def e(s)
|
|
12
12
|
end
|
13
13
|
|
14
14
|
class Test::Unit::TestCase
|
15
|
+
def create_too_many_keys_record
|
16
|
+
record = {}
|
17
|
+
5012.times { |i| record["k#{i}"] = i }
|
18
|
+
record
|
19
|
+
end
|
20
|
+
|
15
21
|
def stub_seed_values
|
16
22
|
time = Time.parse("2014-01-01 00:00:00 UTC").to_i
|
17
23
|
records = [{"a" => 1}, {"a" => 2}]
|
metadata
CHANGED
@@ -1,78 +1,69 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-td
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.10.
|
5
|
-
prerelease:
|
4
|
+
version: 0.10.21
|
6
5
|
platform: ruby
|
7
6
|
authors:
|
8
7
|
- Treasure Data, Inc.
|
9
8
|
autorequire:
|
10
9
|
bindir: bin
|
11
10
|
cert_chain: []
|
12
|
-
date: 2014-
|
11
|
+
date: 2014-07-03 00:00:00.000000000 Z
|
13
12
|
dependencies:
|
14
13
|
- !ruby/object:Gem::Dependency
|
15
14
|
name: fluentd
|
16
15
|
requirement: !ruby/object:Gem::Requirement
|
17
|
-
none: false
|
18
16
|
requirements:
|
19
|
-
- - ~>
|
17
|
+
- - "~>"
|
20
18
|
- !ruby/object:Gem::Version
|
21
19
|
version: 0.10.27
|
22
20
|
type: :runtime
|
23
21
|
prerelease: false
|
24
22
|
version_requirements: !ruby/object:Gem::Requirement
|
25
|
-
none: false
|
26
23
|
requirements:
|
27
|
-
- - ~>
|
24
|
+
- - "~>"
|
28
25
|
- !ruby/object:Gem::Version
|
29
26
|
version: 0.10.27
|
30
27
|
- !ruby/object:Gem::Dependency
|
31
28
|
name: td-client
|
32
29
|
requirement: !ruby/object:Gem::Requirement
|
33
|
-
none: false
|
34
30
|
requirements:
|
35
|
-
- - ~>
|
31
|
+
- - "~>"
|
36
32
|
- !ruby/object:Gem::Version
|
37
33
|
version: 0.8.61
|
38
34
|
type: :runtime
|
39
35
|
prerelease: false
|
40
36
|
version_requirements: !ruby/object:Gem::Requirement
|
41
|
-
none: false
|
42
37
|
requirements:
|
43
|
-
- - ~>
|
38
|
+
- - "~>"
|
44
39
|
- !ruby/object:Gem::Version
|
45
40
|
version: 0.8.61
|
46
41
|
- !ruby/object:Gem::Dependency
|
47
42
|
name: rake
|
48
43
|
requirement: !ruby/object:Gem::Requirement
|
49
|
-
none: false
|
50
44
|
requirements:
|
51
|
-
- -
|
45
|
+
- - ">="
|
52
46
|
- !ruby/object:Gem::Version
|
53
47
|
version: 0.9.2
|
54
48
|
type: :development
|
55
49
|
prerelease: false
|
56
50
|
version_requirements: !ruby/object:Gem::Requirement
|
57
|
-
none: false
|
58
51
|
requirements:
|
59
|
-
- -
|
52
|
+
- - ">="
|
60
53
|
- !ruby/object:Gem::Version
|
61
54
|
version: 0.9.2
|
62
55
|
- !ruby/object:Gem::Dependency
|
63
56
|
name: webmock
|
64
57
|
requirement: !ruby/object:Gem::Requirement
|
65
|
-
none: false
|
66
58
|
requirements:
|
67
|
-
- - ~>
|
59
|
+
- - "~>"
|
68
60
|
- !ruby/object:Gem::Version
|
69
61
|
version: '1.16'
|
70
62
|
type: :development
|
71
63
|
prerelease: false
|
72
64
|
version_requirements: !ruby/object:Gem::Requirement
|
73
|
-
none: false
|
74
65
|
requirements:
|
75
|
-
- - ~>
|
66
|
+
- - "~>"
|
76
67
|
- !ruby/object:Gem::Version
|
77
68
|
version: '1.16'
|
78
69
|
description: Treasure Data Cloud Data Service plugin for Fluentd
|
@@ -81,12 +72,11 @@ executables: []
|
|
81
72
|
extensions: []
|
82
73
|
extra_rdoc_files: []
|
83
74
|
files:
|
84
|
-
- .gitignore
|
85
|
-
- .travis.yml
|
75
|
+
- ".gitignore"
|
76
|
+
- ".travis.yml"
|
86
77
|
- AUTHORS
|
87
78
|
- ChangeLog
|
88
79
|
- Gemfile
|
89
|
-
- Gemfile.fluentd.lt.0.10.43
|
90
80
|
- README.rdoc
|
91
81
|
- Rakefile
|
92
82
|
- VERSION
|
@@ -100,33 +90,26 @@ files:
|
|
100
90
|
- test/test_helper.rb
|
101
91
|
homepage: http://www.treasuredata.com/
|
102
92
|
licenses: []
|
93
|
+
metadata: {}
|
103
94
|
post_install_message:
|
104
95
|
rdoc_options: []
|
105
96
|
require_paths:
|
106
97
|
- lib
|
107
98
|
required_ruby_version: !ruby/object:Gem::Requirement
|
108
|
-
none: false
|
109
99
|
requirements:
|
110
|
-
- -
|
100
|
+
- - ">="
|
111
101
|
- !ruby/object:Gem::Version
|
112
102
|
version: '0'
|
113
|
-
segments:
|
114
|
-
- 0
|
115
|
-
hash: 4219119697860923650
|
116
103
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
117
|
-
none: false
|
118
104
|
requirements:
|
119
|
-
- -
|
105
|
+
- - ">="
|
120
106
|
- !ruby/object:Gem::Version
|
121
107
|
version: '0'
|
122
|
-
segments:
|
123
|
-
- 0
|
124
|
-
hash: 4219119697860923650
|
125
108
|
requirements: []
|
126
109
|
rubyforge_project:
|
127
|
-
rubygems_version:
|
110
|
+
rubygems_version: 2.2.2
|
128
111
|
signing_key:
|
129
|
-
specification_version:
|
112
|
+
specification_version: 4
|
130
113
|
summary: Treasure Data Cloud Data Service plugin for Fluentd
|
131
114
|
test_files:
|
132
115
|
- test/plugin/test_out_tditem.rb
|
data/Gemfile.fluentd.lt.0.10.43
DELETED