fluentd 0.12.0.pre.1 → 0.12.0.pre.2
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.gitignore +1 -1
- data/.travis.yml +1 -0
- data/ChangeLog +21 -0
- data/README.md +10 -2
- data/Rakefile +4 -13
- data/example/v1_literal_example.conf +36 -0
- data/fluentd.gemspec +4 -1
- data/lib/fluent/buffer.rb +73 -46
- data/lib/fluent/command/fluentd.rb +7 -2
- data/lib/fluent/config/basic_parser.rb +5 -0
- data/lib/fluent/config/element.rb +2 -5
- data/lib/fluent/config/literal_parser.rb +26 -7
- data/lib/fluent/config/section.rb +2 -0
- data/lib/fluent/config/v1_parser.rb +9 -2
- data/lib/fluent/formatter.rb +2 -1
- data/lib/fluent/mixin.rb +22 -7
- data/lib/fluent/output.rb +17 -8
- data/lib/fluent/parser.rb +14 -3
- data/lib/fluent/plugin/buf_file.rb +30 -15
- data/lib/fluent/plugin/filter_grep.rb +69 -0
- data/lib/fluent/plugin/filter_record_transformer.rb +183 -0
- data/lib/fluent/plugin/in_exec.rb +6 -0
- data/lib/fluent/plugin/in_forward.rb +34 -4
- data/lib/fluent/plugin/in_http.rb +1 -1
- data/lib/fluent/plugin/out_exec.rb +1 -1
- data/lib/fluent/plugin/out_exec_filter.rb +8 -1
- data/lib/fluent/plugin/out_forward.rb +82 -4
- data/lib/fluent/supervisor.rb +1 -1
- data/lib/fluent/timezone.rb +131 -0
- data/lib/fluent/version.rb +1 -1
- data/test/config/assertions.rb +42 -0
- data/test/config/test_config_parser.rb +385 -0
- data/test/config/test_configurable.rb +530 -0
- data/test/config/test_configure_proxy.rb +99 -0
- data/test/config/test_dsl.rb +237 -0
- data/test/config/test_literal_parser.rb +293 -0
- data/test/config/test_section.rb +112 -0
- data/test/config/test_system_config.rb +49 -0
- data/test/helper.rb +25 -0
- data/test/plugin/test_buf_file.rb +604 -0
- data/test/plugin/test_buf_memory.rb +204 -0
- data/test/plugin/test_filter_grep.rb +124 -0
- data/test/plugin/test_filter_record_transformer.rb +251 -0
- data/test/plugin/test_in_exec.rb +1 -0
- data/test/plugin/test_in_forward.rb +205 -2
- data/test/plugin/test_in_gc_stat.rb +1 -0
- data/test/plugin/test_in_http.rb +58 -2
- data/test/plugin/test_in_object_space.rb +1 -0
- data/test/plugin/test_in_status.rb +1 -0
- data/test/plugin/test_in_stream.rb +1 -1
- data/test/plugin/test_in_syslog.rb +1 -1
- data/test/plugin/test_in_tail.rb +1 -0
- data/test/plugin/test_in_tcp.rb +1 -1
- data/test/plugin/test_in_udp.rb +1 -1
- data/test/plugin/test_out_copy.rb +1 -0
- data/test/plugin/test_out_exec.rb +1 -0
- data/test/plugin/test_out_exec_filter.rb +1 -0
- data/test/plugin/test_out_file.rb +36 -0
- data/test/plugin/test_out_forward.rb +279 -8
- data/test/plugin/test_out_roundrobin.rb +1 -0
- data/test/plugin/test_out_stdout.rb +1 -0
- data/test/plugin/test_out_stream.rb +1 -1
- data/test/test_buffer.rb +530 -0
- data/test/test_config.rb +1 -1
- data/test/test_configdsl.rb +1 -1
- data/test/test_formatter.rb +223 -0
- data/test/test_match.rb +1 -2
- data/test/test_mixin.rb +74 -2
- data/test/test_parser.rb +7 -1
- metadata +88 -35
- data/lib/fluent/plugin/buf_zfile.rb +0 -75
- data/spec/config/config_parser_spec.rb +0 -314
- data/spec/config/configurable_spec.rb +0 -524
- data/spec/config/configure_proxy_spec.rb +0 -96
- data/spec/config/dsl_spec.rb +0 -239
- data/spec/config/helper.rb +0 -49
- data/spec/config/literal_parser_spec.rb +0 -222
- data/spec/config/section_spec.rb +0 -97
- data/spec/config/system_config_spec.rb +0 -49
- data/spec/spec_helper.rb +0 -60
data/test/plugin/test_in_exec.rb
CHANGED
@@ -1,9 +1,11 @@
|
|
1
|
-
require 'fluent/test'
|
2
1
|
require 'helper'
|
2
|
+
require 'fluent/test'
|
3
|
+
require 'base64'
|
3
4
|
|
4
5
|
class ForwardInputTest < Test::Unit::TestCase
|
5
6
|
def setup
|
6
7
|
Fluent::Test.setup
|
8
|
+
@responses = [] # for testing responses after sending data
|
7
9
|
end
|
8
10
|
|
9
11
|
PORT = unused_port
|
@@ -198,13 +200,214 @@ class ForwardInputTest < Test::Unit::TestCase
|
|
198
200
|
}.size == 1, "large chunk warning is not logged"
|
199
201
|
end
|
200
202
|
|
201
|
-
def
|
203
|
+
def test_respond_to_message_requiring_ack
|
204
|
+
d = create_driver
|
205
|
+
|
206
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
207
|
+
|
208
|
+
events = [
|
209
|
+
["tag1", time, {"a"=>1}],
|
210
|
+
["tag2", time, {"a"=>2}]
|
211
|
+
]
|
212
|
+
d.expected_emits_length = events.length
|
213
|
+
|
214
|
+
expected_acks = []
|
215
|
+
|
216
|
+
d.run do
|
217
|
+
events.each {|tag,time,record|
|
218
|
+
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
|
219
|
+
expected_acks << op['chunk']
|
220
|
+
send_data [tag, time, record, op].to_msgpack, true
|
221
|
+
}
|
222
|
+
end
|
223
|
+
|
224
|
+
assert_equal events, d.emits
|
225
|
+
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
|
226
|
+
end
|
227
|
+
|
228
|
+
# FIX: response is not pushed into @responses because IO.select has been blocked until InputForward shutdowns
|
229
|
+
def test_respond_to_forward_requiring_ack
|
230
|
+
d = create_driver
|
231
|
+
|
232
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
233
|
+
|
234
|
+
events = [
|
235
|
+
["tag1", time, {"a"=>1}],
|
236
|
+
["tag1", time, {"a"=>2}]
|
237
|
+
]
|
238
|
+
d.expected_emits_length = events.length
|
239
|
+
|
240
|
+
expected_acks = []
|
241
|
+
|
242
|
+
d.run do
|
243
|
+
entries = []
|
244
|
+
events.each {|tag,time,record|
|
245
|
+
entries << [time, record]
|
246
|
+
}
|
247
|
+
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
|
248
|
+
expected_acks << op['chunk']
|
249
|
+
send_data ["tag1", entries, op].to_msgpack, true
|
250
|
+
end
|
251
|
+
|
252
|
+
assert_equal events, d.emits
|
253
|
+
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
|
254
|
+
end
|
255
|
+
|
256
|
+
def test_respond_to_packed_forward_requiring_ack
|
257
|
+
d = create_driver
|
258
|
+
|
259
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
260
|
+
|
261
|
+
events = [
|
262
|
+
["tag1", time, {"a"=>1}],
|
263
|
+
["tag1", time, {"a"=>2}]
|
264
|
+
]
|
265
|
+
d.expected_emits_length = events.length
|
266
|
+
|
267
|
+
expected_acks = []
|
268
|
+
|
269
|
+
d.run do
|
270
|
+
entries = ''
|
271
|
+
events.each {|tag,time,record|
|
272
|
+
[time, record].to_msgpack(entries)
|
273
|
+
}
|
274
|
+
op = { 'chunk' => Base64.encode64(entries.object_id.to_s) }
|
275
|
+
expected_acks << op['chunk']
|
276
|
+
send_data ["tag1", entries, op].to_msgpack, true
|
277
|
+
end
|
278
|
+
|
279
|
+
assert_equal events, d.emits
|
280
|
+
assert_equal expected_acks, @responses.map { |res| MessagePack.unpack(res)['ack'] }
|
281
|
+
end
|
282
|
+
|
283
|
+
def test_respond_to_message_json_requiring_ack
|
284
|
+
d = create_driver
|
285
|
+
|
286
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
287
|
+
|
288
|
+
events = [
|
289
|
+
["tag1", time, {"a"=>1}],
|
290
|
+
["tag2", time, {"a"=>2}]
|
291
|
+
]
|
292
|
+
d.expected_emits_length = events.length
|
293
|
+
|
294
|
+
expected_acks = []
|
295
|
+
|
296
|
+
d.run do
|
297
|
+
events.each {|tag,time,record|
|
298
|
+
op = { 'chunk' => Base64.encode64(record.object_id.to_s) }
|
299
|
+
expected_acks << op['chunk']
|
300
|
+
send_data [tag, time, record, op].to_json, true
|
301
|
+
}
|
302
|
+
end
|
303
|
+
|
304
|
+
assert_equal events, d.emits
|
305
|
+
assert_equal expected_acks, @responses.map { |res| JSON.parse(res)['ack'] }
|
306
|
+
|
307
|
+
end
|
308
|
+
|
309
|
+
def test_not_respond_to_message_not_requiring_ack
|
310
|
+
d = create_driver
|
311
|
+
|
312
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
313
|
+
|
314
|
+
events = [
|
315
|
+
["tag1", time, {"a"=>1}],
|
316
|
+
["tag2", time, {"a"=>2}]
|
317
|
+
]
|
318
|
+
d.expected_emits_length = events.length
|
319
|
+
|
320
|
+
d.run do
|
321
|
+
events.each {|tag,time,record|
|
322
|
+
send_data [tag, time, record].to_msgpack, true
|
323
|
+
}
|
324
|
+
end
|
325
|
+
|
326
|
+
assert_equal events, d.emits
|
327
|
+
assert_equal [nil, nil], @responses
|
328
|
+
end
|
329
|
+
|
330
|
+
def test_not_respond_to_forward_not_requiring_ack
|
331
|
+
d = create_driver
|
332
|
+
|
333
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
334
|
+
|
335
|
+
events = [
|
336
|
+
["tag1", time, {"a"=>1}],
|
337
|
+
["tag1", time, {"a"=>2}]
|
338
|
+
]
|
339
|
+
d.expected_emits_length = events.length
|
340
|
+
|
341
|
+
d.run do
|
342
|
+
entries = []
|
343
|
+
events.each {|tag,time,record|
|
344
|
+
entries << [time, record]
|
345
|
+
}
|
346
|
+
send_data ["tag1", entries].to_msgpack, true
|
347
|
+
end
|
348
|
+
|
349
|
+
assert_equal events, d.emits
|
350
|
+
assert_equal [nil], @responses
|
351
|
+
end
|
352
|
+
|
353
|
+
def test_not_respond_to_packed_forward_not_requiring_ack
|
354
|
+
d = create_driver
|
355
|
+
|
356
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
357
|
+
|
358
|
+
events = [
|
359
|
+
["tag1", time, {"a"=>1}],
|
360
|
+
["tag1", time, {"a"=>2}]
|
361
|
+
]
|
362
|
+
d.expected_emits_length = events.length
|
363
|
+
|
364
|
+
d.run do
|
365
|
+
entries = ''
|
366
|
+
events.each {|tag,time,record|
|
367
|
+
[time, record].to_msgpack(entries)
|
368
|
+
}
|
369
|
+
send_data ["tag1", entries].to_msgpack, true
|
370
|
+
end
|
371
|
+
|
372
|
+
assert_equal events, d.emits
|
373
|
+
assert_equal [nil], @responses
|
374
|
+
end
|
375
|
+
|
376
|
+
def test_not_respond_to_message_json_not_requiring_ack
|
377
|
+
d = create_driver
|
378
|
+
|
379
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
380
|
+
|
381
|
+
events = [
|
382
|
+
["tag1", time, {"a"=>1}],
|
383
|
+
["tag2", time, {"a"=>2}]
|
384
|
+
]
|
385
|
+
d.expected_emits_length = events.length
|
386
|
+
|
387
|
+
d.run do
|
388
|
+
events.each {|tag,time,record|
|
389
|
+
send_data [tag, time, record].to_json, true
|
390
|
+
}
|
391
|
+
end
|
392
|
+
|
393
|
+
assert_equal events, d.emits
|
394
|
+
assert_equal [nil, nil], @responses
|
395
|
+
end
|
396
|
+
|
397
|
+
def send_data(data, try_to_receive_response=false, response_timeout=1)
|
202
398
|
io = connect
|
203
399
|
begin
|
204
400
|
io.write data
|
401
|
+
if try_to_receive_response
|
402
|
+
if IO.select([io], nil, nil, response_timeout)
|
403
|
+
res = io.recv(1024)
|
404
|
+
end
|
405
|
+
# timeout means no response, so push nil to @responses
|
406
|
+
end
|
205
407
|
ensure
|
206
408
|
io.close
|
207
409
|
end
|
410
|
+
@responses << res if try_to_receive_response
|
208
411
|
end
|
209
412
|
|
210
413
|
# TODO heartbeat
|
data/test/plugin/test_in_http.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
|
-
require 'fluent/test'
|
2
1
|
require 'helper'
|
2
|
+
require 'fluent/test'
|
3
3
|
require 'net/http'
|
4
4
|
|
5
5
|
class HttpInputTest < Test::Unit::TestCase
|
@@ -157,7 +157,7 @@ class HttpInputTest < Test::Unit::TestCase
|
|
157
157
|
|
158
158
|
def test_with_regexp
|
159
159
|
d = create_driver(CONFIG + %[
|
160
|
-
format /^(?<field_1
|
160
|
+
format /^(?<field_1>\\d+):(?<field_2>\\w+)$/
|
161
161
|
types field_1:integer
|
162
162
|
])
|
163
163
|
|
@@ -199,6 +199,62 @@ class HttpInputTest < Test::Unit::TestCase
|
|
199
199
|
end
|
200
200
|
end
|
201
201
|
|
202
|
+
def test_if_content_type_is_initialized_properly
|
203
|
+
# This test is to check if Fluent::HttpInput::Handler's @content_type is initialized properly.
|
204
|
+
# Especially when in Keep-Alive and the second request has no 'Content-Type'.
|
205
|
+
#
|
206
|
+
# Actually, in the current implementation of in_http, we can't test it directly.
|
207
|
+
# So we replace Fluent::HttpInput::Handler temporally with the extended Handler
|
208
|
+
# in order to collect @content_type(s) per request.
|
209
|
+
# Finally, we check those collected @content_type(s).
|
210
|
+
|
211
|
+
# Save the original Handler
|
212
|
+
orig_handler = Fluent::HttpInput::Handler
|
213
|
+
|
214
|
+
begin
|
215
|
+
# Create the extended Handler which can store @content_type per request
|
216
|
+
ext_handler = Class.new(Fluent::HttpInput::Handler) do
|
217
|
+
@@content_types = []
|
218
|
+
|
219
|
+
def self.content_types
|
220
|
+
@@content_types
|
221
|
+
end
|
222
|
+
|
223
|
+
def on_message_complete
|
224
|
+
@@content_types << @content_type
|
225
|
+
super
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
# Replace the original Handler temporally with the extended one
|
230
|
+
Fluent::HttpInput.module_eval do
|
231
|
+
remove_const(:Handler) if const_defined?(:Handler)
|
232
|
+
const_set(:Handler, ext_handler)
|
233
|
+
end
|
234
|
+
|
235
|
+
d = create_driver
|
236
|
+
|
237
|
+
d.run do
|
238
|
+
# Send two requests the second one has no Content-Type in Keep-Alive
|
239
|
+
Net::HTTP.start("127.0.0.1", PORT) do |http|
|
240
|
+
req = Net::HTTP::Post.new("/foodb/bartbl", {"connection" => "keepalive", "content-type" => "application/json"})
|
241
|
+
res = http.request(req)
|
242
|
+
|
243
|
+
req = Net::HTTP::Get.new("/foodb/bartbl", {"connection" => "keepalive"})
|
244
|
+
res = http.request(req)
|
245
|
+
end
|
246
|
+
|
247
|
+
assert_equal(['application/json', ''], ext_handler.content_types)
|
248
|
+
end
|
249
|
+
ensure
|
250
|
+
# Revert the original Handler
|
251
|
+
Fluent::HttpInput.module_eval do
|
252
|
+
remove_const(:Handler) if const_defined?(:Handler)
|
253
|
+
const_set(:Handler, orig_handler)
|
254
|
+
end
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
202
258
|
def post(path, params, header = {})
|
203
259
|
http = Net::HTTP.new("127.0.0.1", PORT)
|
204
260
|
req = Net::HTTP::Post.new(path, header)
|
data/test/plugin/test_in_tail.rb
CHANGED
data/test/plugin/test_in_tcp.rb
CHANGED
data/test/plugin/test_in_udp.rb
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
require 'helper'
|
1
2
|
require 'fluent/test'
|
2
3
|
require 'fileutils'
|
3
4
|
require 'time'
|
@@ -63,6 +64,41 @@ class FileOutputTest < Test::Unit::TestCase
|
|
63
64
|
d.run
|
64
65
|
end
|
65
66
|
|
67
|
+
def test_timezone_1
|
68
|
+
d = create_driver %[
|
69
|
+
path #{TMP_DIR}/out_file_test
|
70
|
+
timezone Asia/Taipei
|
71
|
+
]
|
72
|
+
|
73
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
74
|
+
|
75
|
+
d.emit({"a"=>1}, time)
|
76
|
+
d.expect_format %[2011-01-02T21:14:15+08:00\ttest\t{"a":1}\n]
|
77
|
+
d.run
|
78
|
+
end
|
79
|
+
|
80
|
+
def test_timezone_2
|
81
|
+
d = create_driver %[
|
82
|
+
path #{TMP_DIR}/out_file_test
|
83
|
+
timezone -03:30
|
84
|
+
]
|
85
|
+
|
86
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
87
|
+
|
88
|
+
d.emit({"a"=>1}, time)
|
89
|
+
d.expect_format %[2011-01-02T09:44:15-03:30\ttest\t{"a":1}\n]
|
90
|
+
d.run
|
91
|
+
end
|
92
|
+
|
93
|
+
def test_timezone_invalid
|
94
|
+
assert_raise(Fluent::ConfigError) do
|
95
|
+
create_driver %[
|
96
|
+
path #{TMP_DIR}/out_file_test
|
97
|
+
timezone Invalid/Invalid
|
98
|
+
]
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
66
102
|
def check_gzipped_result(path, expect)
|
67
103
|
# Zlib::GzipReader has a bug of concatenated file: https://bugs.ruby-lang.org/issues/9790
|
68
104
|
# Following code from https://www.ruby-forum.com/topic/971591#979520
|
@@ -1,26 +1,45 @@
|
|
1
|
-
require 'fluent/test'
|
2
1
|
require 'helper'
|
2
|
+
require 'fluent/test'
|
3
3
|
|
4
4
|
class ForwardOutputTest < Test::Unit::TestCase
|
5
5
|
def setup
|
6
6
|
Fluent::Test.setup
|
7
7
|
end
|
8
8
|
|
9
|
+
TARGET_HOST = '127.0.0.1'
|
10
|
+
TARGET_PORT = 13999
|
9
11
|
CONFIG = %[
|
10
12
|
send_timeout 51
|
11
13
|
<server>
|
12
14
|
name test
|
13
|
-
host
|
14
|
-
port
|
15
|
+
host #{TARGET_HOST}
|
16
|
+
port #{TARGET_PORT}
|
15
17
|
</server>
|
16
18
|
]
|
17
19
|
|
20
|
+
TARGET_CONFIG = %[
|
21
|
+
port #{TARGET_PORT}
|
22
|
+
bind #{TARGET_HOST}
|
23
|
+
]
|
24
|
+
|
18
25
|
def create_driver(conf=CONFIG)
|
19
|
-
Fluent::Test::OutputTestDriver.new(Fluent::ForwardOutput)
|
20
|
-
|
21
|
-
|
26
|
+
Fluent::Test::OutputTestDriver.new(Fluent::ForwardOutput) {
|
27
|
+
attr_reader :responses, :exceptions
|
28
|
+
|
29
|
+
def initialize
|
30
|
+
super
|
31
|
+
@responses = []
|
32
|
+
@exceptions = []
|
22
33
|
end
|
23
|
-
|
34
|
+
|
35
|
+
def send_data(node, tag, chunk)
|
36
|
+
# Original #send_data returns nil when it does not wait for responses or when on response timeout.
|
37
|
+
@responses << super(node, tag, chunk)
|
38
|
+
rescue => e
|
39
|
+
@exceptions << e
|
40
|
+
raise e
|
41
|
+
end
|
42
|
+
}.configure(conf)
|
24
43
|
end
|
25
44
|
|
26
45
|
def test_configure
|
@@ -52,5 +71,257 @@ class ForwardOutputTest < Test::Unit::TestCase
|
|
52
71
|
node.tick
|
53
72
|
assert_equal node.available, false
|
54
73
|
end
|
55
|
-
end
|
56
74
|
|
75
|
+
def test_wait_response_timeout_config
|
76
|
+
d = create_driver(CONFIG)
|
77
|
+
assert_equal false, d.instance.extend_internal_protocol
|
78
|
+
assert_equal false, d.instance.require_ack_response
|
79
|
+
assert_equal 190, d.instance.ack_response_timeout
|
80
|
+
|
81
|
+
d = create_driver(CONFIG + %[
|
82
|
+
require_ack_response true
|
83
|
+
ack_response_timeout 2s
|
84
|
+
])
|
85
|
+
assert d.instance.extend_internal_protocol
|
86
|
+
assert d.instance.require_ack_response
|
87
|
+
assert_equal 2, d.instance.ack_response_timeout
|
88
|
+
end
|
89
|
+
|
90
|
+
def test_send_to_a_node_supporting_responses
|
91
|
+
target_input_driver = create_target_input_driver(true)
|
92
|
+
|
93
|
+
d = create_driver(CONFIG + %[flush_interval 1s])
|
94
|
+
|
95
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
96
|
+
|
97
|
+
records = [
|
98
|
+
{"a" => 1},
|
99
|
+
{"a" => 2}
|
100
|
+
]
|
101
|
+
d.register_run_post_condition do
|
102
|
+
d.instance.responses.length == 1
|
103
|
+
end
|
104
|
+
|
105
|
+
target_input_driver.run do
|
106
|
+
d.run do
|
107
|
+
records.each do |record|
|
108
|
+
d.emit record, time
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
emits = target_input_driver.emits
|
114
|
+
assert_equal ['test', time, records[0]], emits[0]
|
115
|
+
assert_equal ['test', time, records[1]], emits[1]
|
116
|
+
|
117
|
+
assert_equal [nil], d.instance.responses # not attempt to receive responses, so nil is returned
|
118
|
+
assert_empty d.instance.exceptions
|
119
|
+
end
|
120
|
+
|
121
|
+
def test_send_to_a_node_not_supporting_responses
|
122
|
+
target_input_driver = create_target_input_driver
|
123
|
+
|
124
|
+
d = create_driver(CONFIG + %[flush_interval 1s])
|
125
|
+
|
126
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
127
|
+
|
128
|
+
records = [
|
129
|
+
{"a" => 1},
|
130
|
+
{"a" => 2}
|
131
|
+
]
|
132
|
+
d.register_run_post_condition do
|
133
|
+
d.instance.responses.length == 1
|
134
|
+
end
|
135
|
+
|
136
|
+
target_input_driver.run do
|
137
|
+
d.run do
|
138
|
+
records.each do |record|
|
139
|
+
d.emit record, time
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
emits = target_input_driver.emits
|
145
|
+
assert_equal ['test', time, records[0]], emits[0]
|
146
|
+
assert_equal ['test', time, records[1]], emits[1]
|
147
|
+
|
148
|
+
assert_equal [nil], d.instance.responses # not attempt to receive responses, so nil is returned
|
149
|
+
assert_empty d.instance.exceptions
|
150
|
+
end
|
151
|
+
|
152
|
+
def test_require_a_node_supporting_responses_to_respond_with_ack
|
153
|
+
target_input_driver = create_target_input_driver(true)
|
154
|
+
|
155
|
+
d = create_driver(CONFIG + %[
|
156
|
+
flush_interval 1s
|
157
|
+
require_ack_response true
|
158
|
+
ack_response_timeout 1s
|
159
|
+
])
|
160
|
+
|
161
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
162
|
+
|
163
|
+
records = [
|
164
|
+
{"a" => 1},
|
165
|
+
{"a" => 2}
|
166
|
+
]
|
167
|
+
d.register_run_post_condition do
|
168
|
+
d.instance.responses.length == 1
|
169
|
+
end
|
170
|
+
|
171
|
+
target_input_driver.run do
|
172
|
+
d.run do
|
173
|
+
records.each do |record|
|
174
|
+
d.emit record, time
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
emits = target_input_driver.emits
|
180
|
+
assert_equal ['test', time, records[0]], emits[0]
|
181
|
+
assert_equal ['test', time, records[1]], emits[1]
|
182
|
+
|
183
|
+
assert_equal 1, d.instance.responses.length
|
184
|
+
assert d.instance.responses[0].has_key?('ack')
|
185
|
+
assert_empty d.instance.exceptions
|
186
|
+
end
|
187
|
+
|
188
|
+
def test_require_a_node_not_supporting_responses_to_respond_with_ack
|
189
|
+
target_input_driver = create_target_input_driver
|
190
|
+
|
191
|
+
d = create_driver(CONFIG + %[
|
192
|
+
flush_interval 1s
|
193
|
+
require_ack_response true
|
194
|
+
ack_response_timeout 1s
|
195
|
+
])
|
196
|
+
|
197
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
198
|
+
|
199
|
+
records = [
|
200
|
+
{"a" => 1},
|
201
|
+
{"a" => 2}
|
202
|
+
]
|
203
|
+
d.register_run_post_condition do
|
204
|
+
d.instance.responses.length == 1
|
205
|
+
end
|
206
|
+
|
207
|
+
target_input_driver.run do
|
208
|
+
d.run do
|
209
|
+
records.each do |record|
|
210
|
+
d.emit record, time
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
emits = target_input_driver.emits
|
216
|
+
assert_equal ['test', time, records[0]], emits[0]
|
217
|
+
assert_equal ['test', time, records[1]], emits[1]
|
218
|
+
|
219
|
+
node = d.instance.nodes.first
|
220
|
+
assert_equal false, node.available # node is regarded as unavailable when timeout
|
221
|
+
|
222
|
+
assert_empty d.instance.responses # send_data() raises exception, so response is missing
|
223
|
+
assert_equal 1, d.instance.exceptions.size
|
224
|
+
end
|
225
|
+
|
226
|
+
def create_target_input_driver(do_respond=false, conf=TARGET_CONFIG)
|
227
|
+
require 'fluent/plugin/in_forward'
|
228
|
+
|
229
|
+
DummyEngineDriver.new(Fluent::ForwardInput) {
|
230
|
+
handler_class = Class.new(Fluent::ForwardInput::Handler) { |klass|
|
231
|
+
attr_reader :chunk_counter # for checking if received data is successfully deserialized
|
232
|
+
|
233
|
+
def initialize(sock, log, on_message)
|
234
|
+
@sock = sock
|
235
|
+
@log = log
|
236
|
+
@chunk_counter = 0
|
237
|
+
@on_message = on_message
|
238
|
+
end
|
239
|
+
|
240
|
+
if do_respond
|
241
|
+
def write(data)
|
242
|
+
@sock.write data
|
243
|
+
rescue => e
|
244
|
+
@sock.close
|
245
|
+
end
|
246
|
+
else
|
247
|
+
def write(data)
|
248
|
+
# do nothing
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
def close
|
253
|
+
@sock.close
|
254
|
+
end
|
255
|
+
}
|
256
|
+
|
257
|
+
define_method(:start) do
|
258
|
+
@thread = Thread.new do
|
259
|
+
Socket.tcp_server_loop(@host, @port) do |sock, client_addrinfo|
|
260
|
+
begin
|
261
|
+
handler = handler_class.new(sock, @log, method(:on_message))
|
262
|
+
loop do
|
263
|
+
raw_data = sock.recv(1024)
|
264
|
+
handler.on_read(raw_data)
|
265
|
+
# chunk_counter is reset to zero only after all the data have been received and successfully deserialized.
|
266
|
+
break if handler.chunk_counter == 0
|
267
|
+
end
|
268
|
+
sleep # wait for connection to be closed by client
|
269
|
+
ensure
|
270
|
+
sock.close
|
271
|
+
end
|
272
|
+
end
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
def shutdown
|
277
|
+
@thread.kill
|
278
|
+
@thread.join
|
279
|
+
end
|
280
|
+
}.configure(conf).inject_router()
|
281
|
+
end
|
282
|
+
|
283
|
+
class DummyEngineDriver < Fluent::Test::TestDriver
|
284
|
+
def initialize(klass, &block)
|
285
|
+
super(klass, &block)
|
286
|
+
@engine = DummyEngineClass.new
|
287
|
+
@klass = klass
|
288
|
+
# To avoid accessing Fluent::Engine, set Engine as a plugin's class constant (Fluent::SomePlugin::Engine).
|
289
|
+
# But this makes it impossible to run tests concurrently by threading in a process.
|
290
|
+
@klass.const_set(:Engine, @engine)
|
291
|
+
end
|
292
|
+
|
293
|
+
def inject_router
|
294
|
+
@instance.router = @engine
|
295
|
+
self
|
296
|
+
end
|
297
|
+
|
298
|
+
def run(&block)
|
299
|
+
super(&block)
|
300
|
+
@klass.class_eval do
|
301
|
+
remove_const(:Engine)
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
def emits
|
306
|
+
all = []
|
307
|
+
@engine.emit_streams.each {|tag,events|
|
308
|
+
events.each {|time,record|
|
309
|
+
all << [tag, time, record]
|
310
|
+
}
|
311
|
+
}
|
312
|
+
all
|
313
|
+
end
|
314
|
+
|
315
|
+
class DummyEngineClass
|
316
|
+
attr_reader :emit_streams
|
317
|
+
|
318
|
+
def initialize
|
319
|
+
@emit_streams ||= []
|
320
|
+
end
|
321
|
+
|
322
|
+
def emit_stream(tag, es)
|
323
|
+
@emit_streams << [tag, es.to_a]
|
324
|
+
end
|
325
|
+
end
|
326
|
+
end
|
327
|
+
end
|