fluent-plugin-kafka 0.16.1 → 0.17.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/ISSUE_TEMPLATE/bug_report.yaml +71 -0
- data/.github/ISSUE_TEMPLATE/config.yml +5 -0
- data/.github/ISSUE_TEMPLATE/feature_request.yaml +38 -0
- data/.github/workflows/linux.yml +12 -0
- data/.github/workflows/stale-actions.yml +22 -0
- data/ChangeLog +16 -0
- data/Gemfile +2 -0
- data/README.md +31 -4
- data/ci/prepare-kafka-server.sh +33 -0
- data/fluent-plugin-kafka.gemspec +4 -2
- data/lib/fluent/plugin/in_kafka.rb +5 -4
- data/lib/fluent/plugin/in_kafka_group.rb +5 -5
- data/lib/fluent/plugin/out_kafka.rb +12 -6
- data/lib/fluent/plugin/out_kafka2.rb +25 -8
- data/lib/fluent/plugin/out_kafka_buffered.rb +12 -6
- data/lib/fluent/plugin/out_rdkafka.rb +15 -9
- data/lib/fluent/plugin/out_rdkafka2.rb +26 -8
- data/test/helper.rb +7 -0
- data/test/plugin/test_in_kafka.rb +66 -0
- data/test/plugin/test_in_kafka_group.rb +67 -0
- data/test/plugin/test_kafka_plugin_util.rb +18 -12
- data/test/plugin/test_out_kafka.rb +10 -0
- data/test/plugin/test_out_kafka2.rb +116 -0
- data/test/plugin/test_out_kafka_buffered.rb +68 -0
- data/test/plugin/test_out_rdkafka2.rb +120 -0
- metadata +48 -6
- data/.travis.yml +0 -21
@@ -48,7 +48,7 @@ Set true to remove partition from data
|
|
48
48
|
DESC
|
49
49
|
config_param :exclude_message_key, :bool, :default => false,
|
50
50
|
:desc => <<-DESC
|
51
|
-
Set true to remove
|
51
|
+
Set true to remove message key from data
|
52
52
|
DESC
|
53
53
|
config_param :exclude_topic_key, :bool, :default => false,
|
54
54
|
:desc => <<-DESC
|
@@ -91,23 +91,29 @@ DESC
|
|
91
91
|
def configure(conf)
|
92
92
|
super
|
93
93
|
log.instance_eval {
|
94
|
-
def add(level,
|
95
|
-
|
94
|
+
def add(level, message = nil)
|
95
|
+
if message.nil?
|
96
|
+
if block_given?
|
97
|
+
message = yield
|
98
|
+
else
|
99
|
+
return
|
100
|
+
end
|
101
|
+
end
|
96
102
|
|
97
103
|
# Follow rdkakfa's log level. See also rdkafka-ruby's bindings.rb: https://github.com/appsignal/rdkafka-ruby/blob/e5c7261e3f2637554a5c12b924be297d7dca1328/lib/rdkafka/bindings.rb#L117
|
98
104
|
case level
|
99
105
|
when Logger::FATAL
|
100
|
-
self.fatal(
|
106
|
+
self.fatal(message)
|
101
107
|
when Logger::ERROR
|
102
|
-
self.error(
|
108
|
+
self.error(message)
|
103
109
|
when Logger::WARN
|
104
|
-
self.warn(
|
110
|
+
self.warn(message)
|
105
111
|
when Logger::INFO
|
106
|
-
self.info(
|
112
|
+
self.info(message)
|
107
113
|
when Logger::DEBUG
|
108
|
-
self.debug(
|
114
|
+
self.debug(message)
|
109
115
|
else
|
110
|
-
self.trace(
|
116
|
+
self.trace(message)
|
111
117
|
end
|
112
118
|
end
|
113
119
|
}
|
@@ -56,6 +56,8 @@ DESC
|
|
56
56
|
:desc => <<-DESC
|
57
57
|
Set true to remove topic key from data
|
58
58
|
DESC
|
59
|
+
config_param :exclude_fields, :array, :default => [], value_type: :string,
|
60
|
+
:desc => 'Fields to remove from data where the value is a jsonpath to a record value'
|
59
61
|
config_param :headers, :hash, default: {}, symbolize_keys: true, value_type: :string,
|
60
62
|
:desc => 'Kafka message headers'
|
61
63
|
config_param :headers_from_record, :hash, default: {}, symbolize_keys: true, value_type: :string,
|
@@ -110,23 +112,29 @@ DESC
|
|
110
112
|
def configure(conf)
|
111
113
|
super
|
112
114
|
log.instance_eval {
|
113
|
-
def add(level,
|
114
|
-
|
115
|
+
def add(level, message = nil)
|
116
|
+
if message.nil?
|
117
|
+
if block_given?
|
118
|
+
message = yield
|
119
|
+
else
|
120
|
+
return
|
121
|
+
end
|
122
|
+
end
|
115
123
|
|
116
124
|
# Follow rdkakfa's log level. See also rdkafka-ruby's bindings.rb: https://github.com/appsignal/rdkafka-ruby/blob/e5c7261e3f2637554a5c12b924be297d7dca1328/lib/rdkafka/bindings.rb#L117
|
117
125
|
case level
|
118
126
|
when Logger::FATAL
|
119
|
-
self.fatal(
|
127
|
+
self.fatal(message)
|
120
128
|
when Logger::ERROR
|
121
|
-
self.error(
|
129
|
+
self.error(message)
|
122
130
|
when Logger::WARN
|
123
|
-
self.warn(
|
131
|
+
self.warn(message)
|
124
132
|
when Logger::INFO
|
125
|
-
self.info(
|
133
|
+
self.info(message)
|
126
134
|
when Logger::DEBUG
|
127
|
-
self.debug(
|
135
|
+
self.debug(message)
|
128
136
|
else
|
129
|
-
self.trace(
|
137
|
+
self.trace(message)
|
130
138
|
end
|
131
139
|
end
|
132
140
|
}
|
@@ -158,6 +166,10 @@ DESC
|
|
158
166
|
@headers_from_record.each do |key, value|
|
159
167
|
@headers_from_record_accessors[key] = record_accessor_create(value)
|
160
168
|
end
|
169
|
+
|
170
|
+
@exclude_field_accessors = @exclude_fields.map do |field|
|
171
|
+
record_accessor_create(field)
|
172
|
+
end
|
161
173
|
end
|
162
174
|
|
163
175
|
def build_config
|
@@ -305,6 +317,12 @@ DESC
|
|
305
317
|
headers[key] = header_accessor.call(record)
|
306
318
|
end
|
307
319
|
|
320
|
+
unless @exclude_fields.empty?
|
321
|
+
@exclude_field_accessors.each do |exclude_field_acessor|
|
322
|
+
exclude_field_acessor.delete(record)
|
323
|
+
end
|
324
|
+
end
|
325
|
+
|
308
326
|
record_buf = @formatter_proc.call(tag, time, record)
|
309
327
|
record_buf_bytes = record_buf.bytesize
|
310
328
|
if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
|
data/test/helper.rb
CHANGED
@@ -8,6 +8,7 @@ rescue Bundler::BundlerError => e
|
|
8
8
|
exit e.status_code
|
9
9
|
end
|
10
10
|
require 'test/unit'
|
11
|
+
require 'test/unit/rr'
|
11
12
|
|
12
13
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
14
|
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
@@ -22,6 +23,12 @@ unless ENV.has_key?('VERBOSE')
|
|
22
23
|
end
|
23
24
|
|
24
25
|
require 'fluent/plugin/out_kafka'
|
26
|
+
require 'fluent/plugin/out_kafka_buffered'
|
27
|
+
require 'fluent/plugin/out_kafka2'
|
28
|
+
require 'fluent/plugin/in_kafka'
|
29
|
+
require 'fluent/plugin/in_kafka_group'
|
30
|
+
|
31
|
+
require "fluent/test/driver/output"
|
25
32
|
|
26
33
|
class Test::Unit::TestCase
|
27
34
|
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/driver/input'
|
3
|
+
require 'securerandom'
|
4
|
+
|
5
|
+
class KafkaInputTest < Test::Unit::TestCase
|
6
|
+
def setup
|
7
|
+
Fluent::Test.setup
|
8
|
+
end
|
9
|
+
|
10
|
+
TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
|
11
|
+
|
12
|
+
CONFIG = %[
|
13
|
+
@type kafka
|
14
|
+
brokers localhost:9092
|
15
|
+
format text
|
16
|
+
@label @kafka
|
17
|
+
topics #{TOPIC_NAME}
|
18
|
+
]
|
19
|
+
|
20
|
+
def create_driver(conf = CONFIG)
|
21
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
|
22
|
+
end
|
23
|
+
|
24
|
+
|
25
|
+
def test_configure
|
26
|
+
d = create_driver
|
27
|
+
assert_equal TOPIC_NAME, d.instance.topics
|
28
|
+
assert_equal 'text', d.instance.format
|
29
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_multi_worker_support
|
33
|
+
d = create_driver
|
34
|
+
assert_false d.instance.multi_workers_ready?
|
35
|
+
end
|
36
|
+
|
37
|
+
class ConsumeTest < self
|
38
|
+
def setup
|
39
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
40
|
+
@producer = @kafka.producer
|
41
|
+
end
|
42
|
+
|
43
|
+
def teardown
|
44
|
+
@kafka.delete_topic(TOPIC_NAME)
|
45
|
+
@kafka.close
|
46
|
+
end
|
47
|
+
|
48
|
+
def test_consume
|
49
|
+
conf = %[
|
50
|
+
@type kafka
|
51
|
+
brokers localhost:9092
|
52
|
+
format text
|
53
|
+
@label @kafka
|
54
|
+
topics #{TOPIC_NAME}
|
55
|
+
]
|
56
|
+
d = create_driver
|
57
|
+
|
58
|
+
d.run(expect_records: 1, timeout: 10) do
|
59
|
+
@producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
|
60
|
+
@producer.deliver_messages
|
61
|
+
end
|
62
|
+
expected = {'message' => 'Hello, fluent-plugin-kafka!'}
|
63
|
+
assert_equal expected, d.events[0][2]
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/driver/input'
|
3
|
+
require 'securerandom'
|
4
|
+
|
5
|
+
class KafkaGroupInputTest < Test::Unit::TestCase
|
6
|
+
def setup
|
7
|
+
Fluent::Test.setup
|
8
|
+
end
|
9
|
+
|
10
|
+
TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
|
11
|
+
|
12
|
+
CONFIG = %[
|
13
|
+
@type kafka
|
14
|
+
brokers localhost:9092
|
15
|
+
consumer_group fluentd
|
16
|
+
format text
|
17
|
+
@label @kafka
|
18
|
+
topics #{TOPIC_NAME}
|
19
|
+
]
|
20
|
+
|
21
|
+
def create_driver(conf = CONFIG)
|
22
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaGroupInput).configure(conf)
|
23
|
+
end
|
24
|
+
|
25
|
+
|
26
|
+
def test_configure
|
27
|
+
d = create_driver
|
28
|
+
assert_equal [TOPIC_NAME], d.instance.topics
|
29
|
+
assert_equal 'text', d.instance.format
|
30
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
31
|
+
end
|
32
|
+
|
33
|
+
def test_multi_worker_support
|
34
|
+
d = create_driver
|
35
|
+
assert_true d.instance.multi_workers_ready?
|
36
|
+
end
|
37
|
+
|
38
|
+
class ConsumeTest < self
|
39
|
+
def setup
|
40
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
41
|
+
@producer = @kafka.producer
|
42
|
+
end
|
43
|
+
|
44
|
+
def teardown
|
45
|
+
@kafka.delete_topic(TOPIC_NAME)
|
46
|
+
@kafka.close
|
47
|
+
end
|
48
|
+
|
49
|
+
def test_consume
|
50
|
+
conf = %[
|
51
|
+
@type kafka
|
52
|
+
brokers localhost:9092
|
53
|
+
format text
|
54
|
+
@label @kafka
|
55
|
+
topics #{TOPIC_NAME}
|
56
|
+
]
|
57
|
+
d = create_driver
|
58
|
+
|
59
|
+
d.run(expect_records: 1, timeout: 10) do
|
60
|
+
@producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
|
61
|
+
@producer.deliver_messages
|
62
|
+
end
|
63
|
+
expected = {'message' => 'Hello, fluent-plugin-kafka!'}
|
64
|
+
assert_equal expected, d.events[0][2]
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -1,14 +1,8 @@
|
|
1
1
|
require 'helper'
|
2
2
|
require 'fluent/plugin/kafka_plugin_util'
|
3
3
|
|
4
|
-
class File
|
5
|
-
def File::read(path)
|
6
|
-
path
|
7
|
-
end
|
8
|
-
end
|
9
|
-
|
10
4
|
class KafkaPluginUtilTest < Test::Unit::TestCase
|
11
|
-
|
5
|
+
|
12
6
|
def self.config_param(name, type, options)
|
13
7
|
end
|
14
8
|
include Fluent::KafkaPluginUtil::SSLSettings
|
@@ -20,19 +14,31 @@ class KafkaPluginUtilTest < Test::Unit::TestCase
|
|
20
14
|
end
|
21
15
|
|
22
16
|
def test_read_ssl_file_when_nil
|
23
|
-
|
17
|
+
stub(File).read(anything) do |path|
|
18
|
+
path
|
19
|
+
end
|
20
|
+
assert_equal(nil, read_ssl_file(nil))
|
24
21
|
end
|
25
22
|
|
26
23
|
def test_read_ssl_file_when_empty_string
|
27
|
-
|
24
|
+
stub(File).read(anything) do |path|
|
25
|
+
path
|
26
|
+
end
|
27
|
+
assert_equal(nil, read_ssl_file(""))
|
28
28
|
end
|
29
29
|
|
30
30
|
def test_read_ssl_file_when_non_empty_path
|
31
|
-
|
31
|
+
stub(File).read(anything) do |path|
|
32
|
+
path
|
33
|
+
end
|
34
|
+
assert_equal("path", read_ssl_file("path"))
|
32
35
|
end
|
33
36
|
|
34
37
|
def test_read_ssl_file_when_non_empty_array
|
35
|
-
|
38
|
+
stub(File).read(anything) do |path|
|
39
|
+
path
|
40
|
+
end
|
41
|
+
assert_equal(["a","b"], read_ssl_file(["a","b"]))
|
36
42
|
end
|
37
43
|
|
38
|
-
end
|
44
|
+
end
|
@@ -43,6 +43,16 @@ class KafkaOutputTest < Test::Unit::TestCase
|
|
43
43
|
d = create_driver
|
44
44
|
end
|
45
45
|
|
46
|
+
data("crc32" => "crc32",
|
47
|
+
"murmur2" => "murmur2")
|
48
|
+
def test_partitioner_hash_function(data)
|
49
|
+
hash_type = data
|
50
|
+
d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
|
51
|
+
assert_nothing_raised do
|
52
|
+
d.instance.refresh_client
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
46
56
|
def test_mutli_worker_support
|
47
57
|
d = create_driver
|
48
58
|
assert_equal true, d.instance.multi_workers_ready?
|
@@ -0,0 +1,116 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/helpers'
|
3
|
+
require 'fluent/test/driver/input'
|
4
|
+
require 'fluent/test/driver/output'
|
5
|
+
require 'securerandom'
|
6
|
+
|
7
|
+
class Kafka2OutputTest < Test::Unit::TestCase
|
8
|
+
include Fluent::Test::Helpers
|
9
|
+
|
10
|
+
def setup
|
11
|
+
Fluent::Test.setup
|
12
|
+
end
|
13
|
+
|
14
|
+
def base_config
|
15
|
+
config_element('ROOT', '', {"@type" => "kafka2"}, [
|
16
|
+
config_element('format', "", {"@type" => "json"})
|
17
|
+
])
|
18
|
+
end
|
19
|
+
|
20
|
+
def config(default_topic: "kitagawakeiko")
|
21
|
+
base_config + config_element('ROOT', '', {"default_topic" => default_topic,
|
22
|
+
"brokers" => "localhost:9092"}, [
|
23
|
+
])
|
24
|
+
end
|
25
|
+
|
26
|
+
def create_driver(conf = config, tag='test')
|
27
|
+
Fluent::Test::Driver::Output.new(Fluent::Kafka2Output).configure(conf)
|
28
|
+
end
|
29
|
+
|
30
|
+
def test_configure
|
31
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
32
|
+
create_driver(base_config)
|
33
|
+
}
|
34
|
+
|
35
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
36
|
+
create_driver(config)
|
37
|
+
}
|
38
|
+
|
39
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
40
|
+
create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
|
41
|
+
}
|
42
|
+
|
43
|
+
d = create_driver
|
44
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
45
|
+
assert_equal ['localhost:9092'], d.instance.brokers
|
46
|
+
end
|
47
|
+
|
48
|
+
data("crc32" => "crc32",
|
49
|
+
"murmur2" => "murmur2")
|
50
|
+
def test_partitioner_hash_function(data)
|
51
|
+
hash_type = data
|
52
|
+
d = create_driver(config + config_element('ROOT', '', {"partitioner_hash_function" => hash_type}))
|
53
|
+
assert_nothing_raised do
|
54
|
+
d.instance.refresh_client
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def test_mutli_worker_support
|
59
|
+
d = create_driver
|
60
|
+
assert_equal true, d.instance.multi_workers_ready?
|
61
|
+
end
|
62
|
+
|
63
|
+
class WriteTest < self
|
64
|
+
TOPIC_NAME = "kafka-output-#{SecureRandom.uuid}"
|
65
|
+
|
66
|
+
INPUT_CONFIG = %[
|
67
|
+
@type kafka
|
68
|
+
brokers localhost:9092
|
69
|
+
format json
|
70
|
+
@label @kafka
|
71
|
+
topics #{TOPIC_NAME}
|
72
|
+
]
|
73
|
+
|
74
|
+
def create_target_driver(conf = INPUT_CONFIG)
|
75
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
|
76
|
+
end
|
77
|
+
|
78
|
+
def setup
|
79
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
80
|
+
end
|
81
|
+
|
82
|
+
def teardown
|
83
|
+
@kafka.delete_topic(TOPIC_NAME)
|
84
|
+
@kafka.close
|
85
|
+
end
|
86
|
+
|
87
|
+
def test_write
|
88
|
+
target_driver = create_target_driver
|
89
|
+
expected_message = {"a" => 2}
|
90
|
+
target_driver.run(expect_records: 1, timeout: 5) do
|
91
|
+
sleep 2
|
92
|
+
d = create_driver(config(default_topic: TOPIC_NAME))
|
93
|
+
d.run do
|
94
|
+
d.feed("test", event_time, expected_message)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
actual_messages = target_driver.events.collect { |event| event[2] }
|
98
|
+
assert_equal([expected_message], actual_messages)
|
99
|
+
end
|
100
|
+
|
101
|
+
def test_exclude_fields
|
102
|
+
conf = config(default_topic: TOPIC_NAME) +
|
103
|
+
config_element('ROOT', '', {"exclude_fields" => "$.foo"}, [])
|
104
|
+
target_driver = create_target_driver
|
105
|
+
target_driver.run(expect_records: 1, timeout: 5) do
|
106
|
+
sleep 2
|
107
|
+
d = create_driver(conf)
|
108
|
+
d.run do
|
109
|
+
d.feed('test', event_time, {'a' => 'b', 'foo' => 'bar', 'message' => 'test'})
|
110
|
+
end
|
111
|
+
end
|
112
|
+
actual_messages = target_driver.events.collect { |event| event[2] }
|
113
|
+
assert_equal([{'a' => 'b', 'message' => 'test'}], actual_messages)
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/output'
|
3
|
+
|
4
|
+
class KafkaBufferedOutputTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
Fluent::Test.setup
|
7
|
+
end
|
8
|
+
|
9
|
+
BASE_CONFIG = %[
|
10
|
+
type kafka_buffered
|
11
|
+
]
|
12
|
+
|
13
|
+
CONFIG = BASE_CONFIG + %[
|
14
|
+
default_topic kitagawakeiko
|
15
|
+
brokers localhost:9092
|
16
|
+
]
|
17
|
+
|
18
|
+
def create_driver(conf = CONFIG, tag='test')
|
19
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutputBuffered, tag).configure(conf)
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_configure
|
23
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
24
|
+
create_driver(BASE_CONFIG)
|
25
|
+
}
|
26
|
+
|
27
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
28
|
+
create_driver(CONFIG)
|
29
|
+
}
|
30
|
+
|
31
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
32
|
+
create_driver(CONFIG + %[
|
33
|
+
buffer_type memory
|
34
|
+
])
|
35
|
+
}
|
36
|
+
|
37
|
+
d = create_driver
|
38
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
39
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_format
|
43
|
+
d = create_driver
|
44
|
+
end
|
45
|
+
|
46
|
+
data("crc32" => "crc32",
|
47
|
+
"murmur2" => "murmur2")
|
48
|
+
def test_partitioner_hash_function(data)
|
49
|
+
hash_type = data
|
50
|
+
d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
|
51
|
+
assert_nothing_raised do
|
52
|
+
d.instance.refresh_client
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_mutli_worker_support
|
57
|
+
d = create_driver
|
58
|
+
assert_equal true, d.instance.multi_workers_ready?
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def test_write
|
63
|
+
d = create_driver
|
64
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
65
|
+
d.emit({"a"=>1}, time)
|
66
|
+
d.emit({"a"=>2}, time)
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,120 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/helpers'
|
3
|
+
require 'fluent/test/driver/input'
|
4
|
+
require 'fluent/test/driver/output'
|
5
|
+
require 'securerandom'
|
6
|
+
|
7
|
+
class Rdkafka2OutputTest < Test::Unit::TestCase
|
8
|
+
include Fluent::Test::Helpers
|
9
|
+
|
10
|
+
def have_rdkafka
|
11
|
+
begin
|
12
|
+
require 'fluent/plugin/out_rdkafka2'
|
13
|
+
true
|
14
|
+
rescue LoadError
|
15
|
+
false
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
def setup
|
20
|
+
omit_unless(have_rdkafka, "rdkafka isn't installed")
|
21
|
+
Fluent::Test.setup
|
22
|
+
end
|
23
|
+
|
24
|
+
def base_config
|
25
|
+
config_element('ROOT', '', {"@type" => "rdkafka2"}, [
|
26
|
+
config_element('format', "", {"@type" => "json"})
|
27
|
+
])
|
28
|
+
end
|
29
|
+
|
30
|
+
def config(default_topic: "kitagawakeiko")
|
31
|
+
base_config + config_element('ROOT', '', {"default_topic" => default_topic,
|
32
|
+
"brokers" => "localhost:9092"}, [
|
33
|
+
])
|
34
|
+
end
|
35
|
+
|
36
|
+
def create_driver(conf = config, tag='test')
|
37
|
+
Fluent::Test::Driver::Output.new(Fluent::Rdkafka2Output).configure(conf)
|
38
|
+
end
|
39
|
+
|
40
|
+
def test_configure
|
41
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
42
|
+
create_driver(base_config)
|
43
|
+
}
|
44
|
+
|
45
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
46
|
+
create_driver(config)
|
47
|
+
}
|
48
|
+
|
49
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
50
|
+
create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
|
51
|
+
}
|
52
|
+
|
53
|
+
d = create_driver
|
54
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
55
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
56
|
+
end
|
57
|
+
|
58
|
+
def test_mutli_worker_support
|
59
|
+
d = create_driver
|
60
|
+
assert_equal true, d.instance.multi_workers_ready?
|
61
|
+
end
|
62
|
+
|
63
|
+
class WriteTest < self
|
64
|
+
TOPIC_NAME = "kafka-output-#{SecureRandom.uuid}"
|
65
|
+
|
66
|
+
INPUT_CONFIG = %[
|
67
|
+
@type kafka
|
68
|
+
brokers localhost:9092
|
69
|
+
format json
|
70
|
+
@label @kafka
|
71
|
+
topics #{TOPIC_NAME}
|
72
|
+
]
|
73
|
+
|
74
|
+
def create_target_driver(conf = INPUT_CONFIG)
|
75
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
|
76
|
+
end
|
77
|
+
|
78
|
+
def setup
|
79
|
+
@kafka = nil
|
80
|
+
omit_unless(have_rdkafka, "rdkafka isn't installed")
|
81
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
82
|
+
end
|
83
|
+
|
84
|
+
def teardown
|
85
|
+
if @kafka
|
86
|
+
@kafka.delete_topic(TOPIC_NAME)
|
87
|
+
@kafka.close
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def test_write
|
92
|
+
target_driver = create_target_driver
|
93
|
+
expected_message = {"a" => 2}
|
94
|
+
target_driver.run(expect_records: 1, timeout: 5) do
|
95
|
+
sleep 2
|
96
|
+
d = create_driver(config(default_topic: TOPIC_NAME))
|
97
|
+
d.run do
|
98
|
+
d.feed("test", event_time, expected_message)
|
99
|
+
end
|
100
|
+
end
|
101
|
+
actual_messages = target_driver.events.collect { |event| event[2] }
|
102
|
+
assert_equal([expected_message], actual_messages)
|
103
|
+
end
|
104
|
+
|
105
|
+
def test_exclude_fields
|
106
|
+
conf = config(default_topic: TOPIC_NAME) +
|
107
|
+
config_element('ROOT', '', {"exclude_fields" => "$.foo"}, [])
|
108
|
+
target_driver = create_target_driver
|
109
|
+
target_driver.run(expect_records: 1, timeout: 5) do
|
110
|
+
sleep 2
|
111
|
+
d = create_driver(conf)
|
112
|
+
d.run do
|
113
|
+
d.feed('test', event_time, {'a' => 'b', 'foo' => 'bar', 'message' => 'test'})
|
114
|
+
end
|
115
|
+
end
|
116
|
+
actual_messages = target_driver.events.collect { |event| event[2] }
|
117
|
+
assert_equal([{'a' => 'b', 'message' => 'test'}], actual_messages)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
end
|