fluent-plugin-kafka-xst 0.19.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +7 -0
  2. data/.github/ISSUE_TEMPLATE/bug_report.yaml +72 -0
  3. data/.github/ISSUE_TEMPLATE/config.yml +5 -0
  4. data/.github/ISSUE_TEMPLATE/feature_request.yaml +39 -0
  5. data/.github/dependabot.yml +6 -0
  6. data/.github/workflows/linux.yml +45 -0
  7. data/.github/workflows/stale-actions.yml +24 -0
  8. data/.gitignore +2 -0
  9. data/ChangeLog +344 -0
  10. data/Gemfile +6 -0
  11. data/LICENSE +14 -0
  12. data/README.md +594 -0
  13. data/Rakefile +12 -0
  14. data/ci/prepare-kafka-server.sh +33 -0
  15. data/examples/README.md +3 -0
  16. data/examples/out_kafka2/dynamic_topic_based_on_tag.conf +32 -0
  17. data/examples/out_kafka2/protobuf-formatter.conf +23 -0
  18. data/examples/out_kafka2/record_key.conf +31 -0
  19. data/fluent-plugin-kafka.gemspec +27 -0
  20. data/lib/fluent/plugin/in_kafka.rb +388 -0
  21. data/lib/fluent/plugin/in_kafka_group.rb +394 -0
  22. data/lib/fluent/plugin/in_rdkafka_group.rb +305 -0
  23. data/lib/fluent/plugin/kafka_plugin_util.rb +84 -0
  24. data/lib/fluent/plugin/kafka_producer_ext.rb +308 -0
  25. data/lib/fluent/plugin/out_kafka.rb +268 -0
  26. data/lib/fluent/plugin/out_kafka2.rb +427 -0
  27. data/lib/fluent/plugin/out_kafka_buffered.rb +374 -0
  28. data/lib/fluent/plugin/out_rdkafka.rb +324 -0
  29. data/lib/fluent/plugin/out_rdkafka2.rb +526 -0
  30. data/test/helper.rb +34 -0
  31. data/test/plugin/test_in_kafka.rb +66 -0
  32. data/test/plugin/test_in_kafka_group.rb +69 -0
  33. data/test/plugin/test_kafka_plugin_util.rb +44 -0
  34. data/test/plugin/test_out_kafka.rb +68 -0
  35. data/test/plugin/test_out_kafka2.rb +138 -0
  36. data/test/plugin/test_out_kafka_buffered.rb +68 -0
  37. data/test/plugin/test_out_rdkafka2.rb +182 -0
  38. metadata +214 -0
@@ -0,0 +1,44 @@
1
+ require 'helper'
2
+ require 'fluent/plugin/kafka_plugin_util'
3
+
4
+ class KafkaPluginUtilTest < Test::Unit::TestCase
5
+
6
+ def self.config_param(name, type, options)
7
+ end
8
+ include Fluent::KafkaPluginUtil::SSLSettings
9
+
10
+ def config_param
11
+ end
12
+ def setup
13
+ Fluent::Test.setup
14
+ end
15
+
16
+ def test_read_ssl_file_when_nil
17
+ stub(File).read(anything) do |path|
18
+ path
19
+ end
20
+ assert_equal(nil, read_ssl_file(nil))
21
+ end
22
+
23
+ def test_read_ssl_file_when_empty_string
24
+ stub(File).read(anything) do |path|
25
+ path
26
+ end
27
+ assert_equal(nil, read_ssl_file(""))
28
+ end
29
+
30
+ def test_read_ssl_file_when_non_empty_path
31
+ stub(File).read(anything) do |path|
32
+ path
33
+ end
34
+ assert_equal("path", read_ssl_file("path"))
35
+ end
36
+
37
+ def test_read_ssl_file_when_non_empty_array
38
+ stub(File).read(anything) do |path|
39
+ path
40
+ end
41
+ assert_equal(["a","b"], read_ssl_file(["a","b"]))
42
+ end
43
+
44
+ end
@@ -0,0 +1,68 @@
1
+ require 'helper'
2
+ require 'fluent/output'
3
+
4
+ class KafkaOutputTest < Test::Unit::TestCase
5
+ def setup
6
+ Fluent::Test.setup
7
+ end
8
+
9
+ BASE_CONFIG = %[
10
+ type kafka_buffered
11
+ ]
12
+
13
+ CONFIG = BASE_CONFIG + %[
14
+ default_topic kitagawakeiko
15
+ brokers localhost:9092
16
+ ]
17
+
18
+ def create_driver(conf = CONFIG, tag='test')
19
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutput, tag).configure(conf)
20
+ end
21
+
22
+ def test_configure
23
+ assert_nothing_raised(Fluent::ConfigError) {
24
+ create_driver(BASE_CONFIG)
25
+ }
26
+
27
+ assert_nothing_raised(Fluent::ConfigError) {
28
+ create_driver(CONFIG)
29
+ }
30
+
31
+ assert_nothing_raised(Fluent::ConfigError) {
32
+ create_driver(CONFIG + %[
33
+ buffer_type memory
34
+ ])
35
+ }
36
+
37
+ d = create_driver
38
+ assert_equal 'kitagawakeiko', d.instance.default_topic
39
+ assert_equal 'localhost:9092', d.instance.brokers
40
+ end
41
+
42
+ def test_format
43
+ d = create_driver
44
+ end
45
+
46
+ data("crc32" => "crc32",
47
+ "murmur2" => "murmur2")
48
+ def test_partitioner_hash_function(data)
49
+ hash_type = data
50
+ d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
51
+ assert_nothing_raised do
52
+ d.instance.refresh_client
53
+ end
54
+ end
55
+
56
+ def test_mutli_worker_support
57
+ d = create_driver
58
+ assert_equal true, d.instance.multi_workers_ready?
59
+
60
+ end
61
+
62
+ def test_write
63
+ d = create_driver
64
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
65
+ d.emit({"a"=>1}, time)
66
+ d.emit({"a"=>2}, time)
67
+ end
68
+ end
@@ -0,0 +1,138 @@
1
+ require 'helper'
2
+ require 'fluent/test/helpers'
3
+ require 'fluent/test/driver/input'
4
+ require 'fluent/test/driver/output'
5
+ require 'securerandom'
6
+
7
+ class Kafka2OutputTest < Test::Unit::TestCase
8
+ include Fluent::Test::Helpers
9
+
10
+ def setup
11
+ Fluent::Test.setup
12
+ end
13
+
14
+ def base_config
15
+ config_element('ROOT', '', {"@type" => "kafka2"}, [
16
+ config_element('format', "", {"@type" => "json"})
17
+ ])
18
+ end
19
+
20
+ def config(default_topic: "kitagawakeiko")
21
+ base_config + config_element('ROOT', '', {"default_topic" => default_topic,
22
+ "brokers" => "localhost:9092"}, [
23
+ ])
24
+ end
25
+
26
+ def create_driver(conf = config, tag='test')
27
+ Fluent::Test::Driver::Output.new(Fluent::Kafka2Output).configure(conf)
28
+ end
29
+
30
+ def test_configure
31
+ assert_nothing_raised(Fluent::ConfigError) {
32
+ create_driver(base_config)
33
+ }
34
+
35
+ assert_nothing_raised(Fluent::ConfigError) {
36
+ create_driver(config)
37
+ }
38
+
39
+ assert_nothing_raised(Fluent::ConfigError) {
40
+ create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
41
+ }
42
+
43
+ d = create_driver
44
+ assert_equal 'kitagawakeiko', d.instance.default_topic
45
+ assert_equal ['localhost:9092'], d.instance.brokers
46
+ end
47
+
48
+ data("crc32" => "crc32",
49
+ "murmur2" => "murmur2")
50
+ def test_partitioner_hash_function(data)
51
+ hash_type = data
52
+ d = create_driver(config + config_element('ROOT', '', {"partitioner_hash_function" => hash_type}))
53
+ assert_nothing_raised do
54
+ d.instance.refresh_client
55
+ end
56
+ end
57
+
58
+ def test_mutli_worker_support
59
+ d = create_driver
60
+ assert_equal true, d.instance.multi_workers_ready?
61
+ end
62
+
63
+ def test_resolve_seed_brokers
64
+ d = create_driver(config + config_element('ROOT', '', {"resolve_seed_brokers" => true}))
65
+ assert_nothing_raised do
66
+ d.instance.refresh_client
67
+ end
68
+ end
69
+
70
+ class WriteTest < self
71
+ TOPIC_NAME = "kafka-output-#{SecureRandom.uuid}"
72
+
73
+ INPUT_CONFIG = %[
74
+ @type kafka
75
+ brokers localhost:9092
76
+ format json
77
+ @label @kafka
78
+ topics #{TOPIC_NAME}
79
+ ]
80
+
81
+ def create_target_driver(conf = INPUT_CONFIG)
82
+ Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
83
+ end
84
+
85
+ def setup
86
+ @kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
87
+ end
88
+
89
+ def teardown
90
+ @kafka.delete_topic(TOPIC_NAME)
91
+ @kafka.close
92
+ end
93
+
94
+ def test_write
95
+ target_driver = create_target_driver
96
+ expected_message = {"a" => 2}
97
+ target_driver.run(expect_records: 1, timeout: 5) do
98
+ sleep 2
99
+ d = create_driver(config(default_topic: TOPIC_NAME))
100
+ d.run do
101
+ d.feed("test", event_time, expected_message)
102
+ end
103
+ end
104
+ actual_messages = target_driver.events.collect { |event| event[2] }
105
+ assert_equal([expected_message], actual_messages)
106
+ end
107
+
108
+ def test_record_key
109
+ conf = config(default_topic: TOPIC_NAME) +
110
+ config_element('ROOT', '', {"record_key" => "$.data"}, [])
111
+ target_driver = create_target_driver
112
+ target_driver.run(expect_records: 1, timeout: 5) do
113
+ sleep 2
114
+ d = create_driver(conf)
115
+ d.run do
116
+ d.feed('test', event_time, {'data' => {'a' => 'b', 'foo' => 'bar', 'message' => 'test'}, 'message_key' => '123456'})
117
+ end
118
+ end
119
+ actual_messages = target_driver.events.collect { |event| event[2] }
120
+ assert_equal([{'a' => 'b', 'foo' => 'bar', 'message' => 'test'}], actual_messages)
121
+ end
122
+
123
+ def test_exclude_fields
124
+ conf = config(default_topic: TOPIC_NAME) +
125
+ config_element('ROOT', '', {"exclude_fields" => "$.foo"}, [])
126
+ target_driver = create_target_driver
127
+ target_driver.run(expect_records: 1, timeout: 5) do
128
+ sleep 2
129
+ d = create_driver(conf)
130
+ d.run do
131
+ d.feed('test', event_time, {'a' => 'b', 'foo' => 'bar', 'message' => 'test'})
132
+ end
133
+ end
134
+ actual_messages = target_driver.events.collect { |event| event[2] }
135
+ assert_equal([{'a' => 'b', 'message' => 'test'}], actual_messages)
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,68 @@
1
+ require 'helper'
2
+ require 'fluent/output'
3
+
4
+ class KafkaBufferedOutputTest < Test::Unit::TestCase
5
+ def setup
6
+ Fluent::Test.setup
7
+ end
8
+
9
+ BASE_CONFIG = %[
10
+ type kafka_buffered
11
+ ]
12
+
13
+ CONFIG = BASE_CONFIG + %[
14
+ default_topic kitagawakeiko
15
+ brokers localhost:9092
16
+ ]
17
+
18
+ def create_driver(conf = CONFIG, tag='test')
19
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutputBuffered, tag).configure(conf)
20
+ end
21
+
22
+ def test_configure
23
+ assert_nothing_raised(Fluent::ConfigError) {
24
+ create_driver(BASE_CONFIG)
25
+ }
26
+
27
+ assert_nothing_raised(Fluent::ConfigError) {
28
+ create_driver(CONFIG)
29
+ }
30
+
31
+ assert_nothing_raised(Fluent::ConfigError) {
32
+ create_driver(CONFIG + %[
33
+ buffer_type memory
34
+ ])
35
+ }
36
+
37
+ d = create_driver
38
+ assert_equal 'kitagawakeiko', d.instance.default_topic
39
+ assert_equal 'localhost:9092', d.instance.brokers
40
+ end
41
+
42
+ def test_format
43
+ d = create_driver
44
+ end
45
+
46
+ data("crc32" => "crc32",
47
+ "murmur2" => "murmur2")
48
+ def test_partitioner_hash_function(data)
49
+ hash_type = data
50
+ d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
51
+ assert_nothing_raised do
52
+ d.instance.refresh_client
53
+ end
54
+ end
55
+
56
+ def test_mutli_worker_support
57
+ d = create_driver
58
+ assert_equal true, d.instance.multi_workers_ready?
59
+
60
+ end
61
+
62
+ def test_write
63
+ d = create_driver
64
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
65
+ d.emit({"a"=>1}, time)
66
+ d.emit({"a"=>2}, time)
67
+ end
68
+ end
@@ -0,0 +1,182 @@
1
+ require 'helper'
2
+ require 'fluent/test/helpers'
3
+ require 'fluent/test/driver/input'
4
+ require 'fluent/test/driver/output'
5
+ require 'securerandom'
6
+
7
+ class Rdkafka2OutputTest < Test::Unit::TestCase
8
+ include Fluent::Test::Helpers
9
+
10
+ def have_rdkafka
11
+ begin
12
+ require 'fluent/plugin/out_rdkafka2'
13
+ true
14
+ rescue LoadError
15
+ false
16
+ end
17
+ end
18
+
19
+ def setup
20
+ omit_unless(have_rdkafka, "rdkafka isn't installed")
21
+ Fluent::Test.setup
22
+ end
23
+
24
+ def base_config
25
+ config_element('ROOT', '', {"@type" => "rdkafka2"}, [
26
+ config_element('format', "", {"@type" => "json"})
27
+ ])
28
+ end
29
+
30
+ def config(default_topic: "kitagawakeiko")
31
+ base_config + config_element('ROOT', '', {"default_topic" => default_topic,
32
+ "brokers" => "localhost:9092"}, [
33
+ ])
34
+ end
35
+
36
+ def create_driver(conf = config, tag='test')
37
+ Fluent::Test::Driver::Output.new(Fluent::Rdkafka2Output).configure(conf)
38
+ end
39
+
40
+ def test_configure
41
+ assert_nothing_raised(Fluent::ConfigError) {
42
+ create_driver(base_config)
43
+ }
44
+
45
+ assert_nothing_raised(Fluent::ConfigError) {
46
+ create_driver(config)
47
+ }
48
+
49
+ assert_nothing_raised(Fluent::ConfigError) {
50
+ create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
51
+ }
52
+
53
+ d = create_driver
54
+ assert_equal 'kitagawakeiko', d.instance.default_topic
55
+ assert_equal 'localhost:9092', d.instance.brokers
56
+ end
57
+
58
+ def test_mutli_worker_support
59
+ d = create_driver
60
+ assert_equal true, d.instance.multi_workers_ready?
61
+ end
62
+
63
+ class WriteTest < self
64
+ TOPIC_NAME = "kafka-output-#{SecureRandom.uuid}"
65
+
66
+ INPUT_CONFIG = %[
67
+ @type kafka
68
+ brokers localhost:9092
69
+ format json
70
+ @label @kafka
71
+ topics #{TOPIC_NAME}
72
+ ]
73
+
74
+ def create_target_driver(conf = INPUT_CONFIG)
75
+ Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
76
+ end
77
+
78
+ def setup
79
+ @kafka = nil
80
+ omit_unless(have_rdkafka, "rdkafka isn't installed")
81
+ @kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
82
+ end
83
+
84
+ def teardown
85
+ if @kafka
86
+ @kafka.delete_topic(TOPIC_NAME)
87
+ @kafka.close
88
+ end
89
+ end
90
+
91
+ def test_write
92
+ target_driver = create_target_driver
93
+ expected_message = {"a" => 2}
94
+ target_driver.run(expect_records: 1, timeout: 5) do
95
+ sleep 2
96
+ d = create_driver(config(default_topic: TOPIC_NAME))
97
+ d.run do
98
+ d.feed("test", event_time, expected_message)
99
+ end
100
+ end
101
+ actual_messages = target_driver.events.collect { |event| event[2] }
102
+ assert_equal([expected_message], actual_messages)
103
+ end
104
+
105
+ def test_write_with_use_event_time
106
+ input_config = %[
107
+ @type kafka
108
+ brokers localhost:9092
109
+ format json
110
+ @label @kafka
111
+ topics #{TOPIC_NAME}
112
+ time_source kafka
113
+ ]
114
+ target_driver = create_target_driver(input_config)
115
+ expected_message = {"a" => 2}
116
+ now = event_time
117
+ target_driver.run(expect_records: 1, timeout: 5) do
118
+ sleep 2
119
+ d = create_driver(config(default_topic: TOPIC_NAME) + config_element('ROOT', '', {"use_event_time" => true}))
120
+ d.run do
121
+ d.feed("test", now, expected_message)
122
+ end
123
+ end
124
+ actual_time = target_driver.events.collect { |event| event[1] }.last
125
+ assert_in_delta(actual_time, now, 0.001) # expects millseconds precision
126
+ actual_messages = target_driver.events.collect { |event| event[2] }
127
+ assert_equal([expected_message], actual_messages)
128
+ end
129
+
130
+ def test_exclude_fields
131
+ conf = config(default_topic: TOPIC_NAME) +
132
+ config_element('ROOT', '', {"exclude_fields" => "$.foo"}, [])
133
+ target_driver = create_target_driver
134
+ target_driver.run(expect_records: 1, timeout: 5) do
135
+ sleep 2
136
+ d = create_driver(conf)
137
+ d.run do
138
+ d.feed('test', event_time, {'a' => 'b', 'foo' => 'bar', 'message' => 'test'})
139
+ end
140
+ end
141
+ actual_messages = target_driver.events.collect { |event| event[2] }
142
+ assert_equal([{'a' => 'b', 'message' => 'test'}], actual_messages)
143
+ end
144
+
145
+ def test_max_enqueue_bytes_per_second
146
+ conf = config(default_topic: TOPIC_NAME) +
147
+ config_element('ROOT', '', {"max_enqueue_bytes_per_second" => 32 * 3}, [])
148
+ target_driver = create_target_driver
149
+ expected_messages = []
150
+ target_driver.run(expect_records: 9, timeout: 10) do
151
+ sleep 2
152
+ d = create_driver(conf)
153
+ start_time = Fluent::Clock.now
154
+ d.run do
155
+ 9.times do |i|
156
+ message = {"message" => "32bytes message: #{i}"}
157
+ d.feed("test", event_time, message)
158
+ expected_messages << message
159
+ end
160
+ end
161
+ assert_in_delta(2.0, Fluent::Clock.now - start_time, 0.5)
162
+ end
163
+ actual_messages = target_driver.events.collect { |event| event[2] }
164
+ assert_equal(expected_messages, actual_messages)
165
+ end
166
+
167
+ def test_record_key
168
+ conf = config(default_topic: TOPIC_NAME) +
169
+ config_element('ROOT', '', {"record_key" => "$.data"}, [])
170
+ target_driver = create_target_driver
171
+ target_driver.run(expect_records: 1, timeout: 5) do
172
+ sleep 2
173
+ d = create_driver(conf)
174
+ d.run do
175
+ d.feed('test', event_time, {'data' => {'a' => 'b', 'foo' => 'bar', 'message' => 'test'}, 'message_key' => '123456'})
176
+ end
177
+ end
178
+ actual_messages = target_driver.events.collect { |event| event[2] }
179
+ assert_equal([{'a' => 'b', 'foo' => 'bar', 'message' => 'test'}], actual_messages)
180
+ end
181
+ end
182
+ end