fluent-plugin-kafka 0.15.2 → 0.16.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/linux.yml +36 -0
- data/ChangeLog +20 -0
- data/README.md +4 -1
- data/ci/prepare-kafka-server.sh +33 -0
- data/fluent-plugin-kafka.gemspec +2 -1
- data/lib/fluent/plugin/in_kafka.rb +17 -2
- data/lib/fluent/plugin/in_kafka_group.rb +104 -41
- data/lib/fluent/plugin/kafka_plugin_util.rb +1 -1
- data/lib/fluent/plugin/out_kafka.rb +1 -1
- data/lib/fluent/plugin/out_kafka_buffered.rb +1 -1
- data/test/helper.rb +2 -0
- data/test/plugin/test_in_kafka.rb +66 -0
- data/test/plugin/test_in_kafka_group.rb +67 -0
- data/test/plugin/test_kafka_plugin_util.rb +38 -0
- metadata +25 -4
- data/.travis.yml +0 -21
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1cc628766df718a6fff17debcf2cffdcc041419cb96c1c9b6ce1fee532807b58
|
4
|
+
data.tar.gz: 543ccbd91345f45ee75c3e5e84ef5b3414420a0e2557c76a40afc8e308d70735
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7cb27a4fe28ccc0f31e36449046faf8d4d3389be7820f7cfce8f612aad84b3239234663a71b50523cb08a13e145f78fdc86e93cf9cec28d35d82765b96c9dddc
|
7
|
+
data.tar.gz: 98f52a5d84fb348178f9df6ede6592412ddccd43891c95e9d6937622bff8b02bb4cbb3231c7ca7d709a3ae6cdbfb90d3f00074eefe4cc0703379315098424809
|
@@ -0,0 +1,36 @@
|
|
1
|
+
name: linux
|
2
|
+
on:
|
3
|
+
- push
|
4
|
+
- pull_request
|
5
|
+
jobs:
|
6
|
+
build:
|
7
|
+
runs-on: ${{ matrix.os }}
|
8
|
+
strategy:
|
9
|
+
fail-fast: false
|
10
|
+
matrix:
|
11
|
+
ruby: [ '2.4', '2.5', '2.6', '2.7', '3.0' ]
|
12
|
+
os:
|
13
|
+
- ubuntu-latest
|
14
|
+
name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
|
15
|
+
steps:
|
16
|
+
- uses: actions/checkout@v2
|
17
|
+
- uses: ruby/setup-ruby@v1
|
18
|
+
with:
|
19
|
+
ruby-version: ${{ matrix.ruby }}
|
20
|
+
- name: Install confluent-kafka
|
21
|
+
run: |
|
22
|
+
sudo apt install -V -y gnupg2 wget
|
23
|
+
wget https://packages.confluent.io/deb/6.0/archive.key
|
24
|
+
sudo gpg2 --homedir /tmp --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/confluent-archive-keyring.gpg --import archive.key
|
25
|
+
sudo chmod 644 /usr/share/keyrings/confluent-archive-keyring.gpg
|
26
|
+
sudo sh -c 'echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/confluent-archive-keyring.gpg] https://packages.confluent.io/deb/6.0 stable main" > /etc/apt/sources.list.d/confluent.list'
|
27
|
+
sudo apt update
|
28
|
+
sudo apt install -y confluent-community-2.13 openjdk-11-jre netcat-openbsd
|
29
|
+
- name: unit testing
|
30
|
+
env:
|
31
|
+
CI: true
|
32
|
+
run: |
|
33
|
+
sudo ./ci/prepare-kafka-server.sh
|
34
|
+
gem install bundler rake
|
35
|
+
bundle install --jobs 4 --retry 3
|
36
|
+
bundle exec rake test
|
data/ChangeLog
CHANGED
@@ -1,3 +1,22 @@
|
|
1
|
+
Release 0.16.3 - 2021/05/17
|
2
|
+
* in_kafka_group: Fix one more Ruby 3.0 keyword arguments issue
|
3
|
+
|
4
|
+
Release 0.16.2 - 2021/05/17
|
5
|
+
* in_kafka, in_kafka_group: Support Ruby 3.0 keyword arguments interop
|
6
|
+
|
7
|
+
Release 0.16.1 - 2021/04/14
|
8
|
+
* out_kafka/out_kafka_buffered: Support Ruby 3.0.0 keyword arguments interop
|
9
|
+
* kafka_plugin_util: Treat empty string in read_ssl_file as nil
|
10
|
+
|
11
|
+
Release 0.16.0 - 2021/01/25
|
12
|
+
|
13
|
+
* input: Add `tag_source` and `record_tag_key` parameters for using record field as tag
|
14
|
+
* in_kafka_group: Use NumericParser for floating point
|
15
|
+
|
16
|
+
Release 0.15.3 - 2020/12/08
|
17
|
+
|
18
|
+
* in_kafka: Fix `record_time_key` parameter not working
|
19
|
+
|
1
20
|
Release 0.15.2 - 2020/09/30
|
2
21
|
|
3
22
|
* input: Support 3rd party parser
|
@@ -28,6 +47,7 @@ Release 0.14.0 - 2020/08/07
|
|
28
47
|
Release 0.13.1 - 2020/07/17
|
29
48
|
|
30
49
|
* in_kafka_group: Support ssl_verify_hostname parameter
|
50
|
+
* in_kafka_group: Support regex based topics
|
31
51
|
* out_kafka2/out_rdkafka2: Support topic parameter with placeholders
|
32
52
|
|
33
53
|
Release 0.13.0 - 2020/03/09
|
data/README.md
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
# fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
|
2
2
|
|
3
|
-
[![
|
3
|
+
[![GitHub Actions Status](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml/badge.svg)](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml)
|
4
|
+
|
4
5
|
|
5
6
|
A fluentd plugin to both consume and produce data for Apache Kafka.
|
6
7
|
|
@@ -139,6 +140,8 @@ Consume events by kafka consumer group features..
|
|
139
140
|
|
140
141
|
See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka options.
|
141
142
|
|
143
|
+
`topics` supports regex pattern since v0.13.1. If you want to use regex pattern, use `/pattern/` like `/foo.*/`.
|
144
|
+
|
142
145
|
Consuming topic name is used for event tag. So when the target topic name is `app_event`, the tag is `app_event`. If you want to modify tag, use `add_prefix` or `add_suffix` parameter. With `add_prefix kafka`, the tag is `kafka.app_event`.
|
143
146
|
|
144
147
|
### Input plugin (@type 'rdkafka_group', supports kafka consumer groups, uses rdkafka-ruby)
|
@@ -0,0 +1,33 @@
|
|
1
|
+
#!/bin/sh
|
2
|
+
|
3
|
+
export KAFKA_OPTS=-Dzookeeper.4lw.commands.whitelist=ruok
|
4
|
+
/usr/bin/zookeeper-server-start /etc/kafka/zookeeper.properties &
|
5
|
+
N_POLLING=30
|
6
|
+
n=1
|
7
|
+
while true ; do
|
8
|
+
sleep 1
|
9
|
+
status=$(echo ruok | nc localhost 2181)
|
10
|
+
if [ "$status" = "imok" ]; then
|
11
|
+
break
|
12
|
+
fi
|
13
|
+
n=$((n + 1))
|
14
|
+
if [ $n -ge $N_POLLING ]; then
|
15
|
+
echo "failed to get response from zookeeper-server"
|
16
|
+
exit 1
|
17
|
+
fi
|
18
|
+
done
|
19
|
+
/usr/bin/kafka-server-start /etc/kafka/server.properties &
|
20
|
+
n=1
|
21
|
+
while true ; do
|
22
|
+
sleep 1
|
23
|
+
status=$(/usr/bin/zookeeper-shell localhost:2181 ls /brokers/ids | sed -n 6p)
|
24
|
+
if [ "$status" = "[0]" ]; then
|
25
|
+
break
|
26
|
+
fi
|
27
|
+
n=$((n + 1))
|
28
|
+
if [ $n -ge $N_POLLING ]; then
|
29
|
+
echo "failed to get response from kafka-server"
|
30
|
+
exit 1
|
31
|
+
fi
|
32
|
+
done
|
33
|
+
/usr/bin/kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.
|
16
|
+
gem.version = '0.16.3'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -21,4 +21,5 @@ Gem::Specification.new do |gem|
|
|
21
21
|
gem.add_dependency 'ruby-kafka', '>= 1.2.0', '< 2'
|
22
22
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
23
|
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
24
|
+
gem.add_development_dependency "webrick"
|
24
25
|
end
|
@@ -31,6 +31,10 @@ class Fluent::KafkaInput < Fluent::Input
|
|
31
31
|
config_param :add_suffix, :string, :default => nil,
|
32
32
|
:desc => "tag suffix"
|
33
33
|
config_param :add_offset_in_record, :bool, :default => false
|
34
|
+
config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
|
35
|
+
:desc => "Source for the fluentd event tag"
|
36
|
+
config_param :record_tag_key, :string, :default => 'tag',
|
37
|
+
:desc => "Tag field when tag_source is 'record'"
|
34
38
|
|
35
39
|
config_param :offset_zookeeper, :string, :default => nil
|
36
40
|
config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
|
@@ -224,6 +228,9 @@ class Fluent::KafkaInput < Fluent::Input
|
|
224
228
|
router,
|
225
229
|
@kafka_message_key,
|
226
230
|
@time_source,
|
231
|
+
@record_time_key,
|
232
|
+
@tag_source,
|
233
|
+
@record_tag_key,
|
227
234
|
opt)
|
228
235
|
}
|
229
236
|
@topic_watchers.each {|tw|
|
@@ -248,7 +255,7 @@ class Fluent::KafkaInput < Fluent::Input
|
|
248
255
|
end
|
249
256
|
|
250
257
|
class TopicWatcher < Coolio::TimerWatcher
|
251
|
-
def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, options={})
|
258
|
+
def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, record_time_key, tag_source, record_tag_key, options={})
|
252
259
|
@topic_entry = topic_entry
|
253
260
|
@kafka = kafka
|
254
261
|
@callback = method(:consume)
|
@@ -260,6 +267,9 @@ class Fluent::KafkaInput < Fluent::Input
|
|
260
267
|
@router = router
|
261
268
|
@kafka_message_key = kafka_message_key
|
262
269
|
@time_source = time_source
|
270
|
+
@record_time_key = record_time_key
|
271
|
+
@tag_source = tag_source
|
272
|
+
@record_tag_key = record_tag_key
|
263
273
|
|
264
274
|
@next_offset = @topic_entry.offset
|
265
275
|
if @topic_entry.offset == -1 && offset_manager
|
@@ -284,7 +294,7 @@ class Fluent::KafkaInput < Fluent::Input
|
|
284
294
|
def consume
|
285
295
|
offset = @next_offset
|
286
296
|
@fetch_args[:offset] = offset
|
287
|
-
messages = @kafka.fetch_messages(
|
297
|
+
messages = @kafka.fetch_messages(**@fetch_args)
|
288
298
|
|
289
299
|
return if messages.size.zero?
|
290
300
|
|
@@ -296,6 +306,11 @@ class Fluent::KafkaInput < Fluent::Input
|
|
296
306
|
messages.each { |msg|
|
297
307
|
begin
|
298
308
|
record = @parser.call(msg, @topic_entry)
|
309
|
+
if @tag_source == :record
|
310
|
+
tag = record[@record_tag_key]
|
311
|
+
tag = @add_prefix + "." + tag if @add_prefix
|
312
|
+
tag = tag + "." + @add_suffix if @add_suffix
|
313
|
+
end
|
299
314
|
case @time_source
|
300
315
|
when :kafka
|
301
316
|
record_time = Fluent::EventTime.from_time(msg.create_time)
|
@@ -36,6 +36,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
36
36
|
config_param :get_kafka_client_log, :bool, :default => false
|
37
37
|
config_param :time_format, :string, :default => nil,
|
38
38
|
:desc => "Time format to be used to parse 'time' field."
|
39
|
+
config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic,
|
40
|
+
:desc => "Source for the fluentd event tag"
|
41
|
+
config_param :record_tag_key, :string, :default => 'tag',
|
42
|
+
:desc => "Tag field when tag_source is 'record'"
|
39
43
|
config_param :kafka_message_key, :string, :default => nil,
|
40
44
|
:desc => "Set kafka's message key to this field"
|
41
45
|
config_param :connect_timeout, :integer, :default => nil,
|
@@ -138,6 +142,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
138
142
|
@time_parser = Fluent::TextParser::TimeParser.new(@time_format)
|
139
143
|
end
|
140
144
|
end
|
145
|
+
|
146
|
+
if @time_source == :record && defined?(Fluent::NumericTimeParser)
|
147
|
+
@float_numeric_parse = Fluent::NumericTimeParser.new(:float)
|
148
|
+
end
|
141
149
|
end
|
142
150
|
|
143
151
|
def setup_parser(conf)
|
@@ -209,7 +217,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
209
217
|
end
|
210
218
|
|
211
219
|
def setup_consumer
|
212
|
-
consumer = @kafka.consumer(
|
220
|
+
consumer = @kafka.consumer(**@consumer_opts)
|
213
221
|
@topics.each { |topic|
|
214
222
|
if m = /^\/(.+)\/$/.match(topic)
|
215
223
|
topic_or_regex = Regexp.new(m[1])
|
@@ -244,49 +252,104 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
244
252
|
end
|
245
253
|
end
|
246
254
|
|
247
|
-
def
|
248
|
-
|
255
|
+
def process_batch_with_record_tag(batch)
|
256
|
+
es = {}
|
257
|
+
batch.messages.each { |msg|
|
249
258
|
begin
|
250
|
-
@
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
259
|
+
record = @parser_proc.call(msg)
|
260
|
+
tag = record[@record_tag_key]
|
261
|
+
tag = @add_prefix + "." + tag if @add_prefix
|
262
|
+
tag = tag + "." + @add_suffix if @add_suffix
|
263
|
+
es[tag] ||= Fluent::MultiEventStream.new
|
264
|
+
case @time_source
|
265
|
+
when :kafka
|
266
|
+
record_time = Fluent::EventTime.from_time(msg.create_time)
|
267
|
+
when :now
|
268
|
+
record_time = Fluent::Engine.now
|
269
|
+
when :record
|
270
|
+
if @time_format
|
271
|
+
record_time = @time_parser.parse(record[@record_time_key].to_s)
|
272
|
+
else
|
273
|
+
record_time = record[@record_time_key]
|
274
|
+
end
|
275
|
+
else
|
276
|
+
log.fatal "BUG: invalid time_source: #{@time_source}"
|
277
|
+
end
|
278
|
+
if @kafka_message_key
|
279
|
+
record[@kafka_message_key] = msg.key
|
280
|
+
end
|
281
|
+
if @add_headers
|
282
|
+
msg.headers.each_pair { |k, v|
|
283
|
+
record[k] = v
|
284
|
+
}
|
285
|
+
end
|
286
|
+
es[tag].add(record_time, record)
|
287
|
+
rescue => e
|
288
|
+
log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
|
289
|
+
log.debug_backtrace
|
290
|
+
end
|
291
|
+
}
|
292
|
+
|
293
|
+
unless es.empty?
|
294
|
+
es.each { |tag,es|
|
295
|
+
emit_events(tag, es)
|
296
|
+
}
|
297
|
+
end
|
298
|
+
end
|
299
|
+
|
300
|
+
def process_batch(batch)
|
301
|
+
es = Fluent::MultiEventStream.new
|
302
|
+
tag = batch.topic
|
303
|
+
tag = @add_prefix + "." + tag if @add_prefix
|
304
|
+
tag = tag + "." + @add_suffix if @add_suffix
|
305
|
+
|
306
|
+
batch.messages.each { |msg|
|
307
|
+
begin
|
308
|
+
record = @parser_proc.call(msg)
|
309
|
+
case @time_source
|
310
|
+
when :kafka
|
311
|
+
record_time = Fluent::EventTime.from_time(msg.create_time)
|
312
|
+
when :now
|
313
|
+
record_time = Fluent::Engine.now
|
314
|
+
when :record
|
315
|
+
record_time = record[@record_time_key]
|
316
|
+
|
317
|
+
if @time_format
|
318
|
+
record_time = @time_parser.parse(record_time.to_s)
|
319
|
+
elsif record_time.is_a?(Float) && @float_numeric_parse
|
320
|
+
record_time = @float_numeric_parse.parse(record_time)
|
321
|
+
end
|
322
|
+
else
|
323
|
+
log.fatal "BUG: invalid time_source: #{@time_source}"
|
324
|
+
end
|
325
|
+
if @kafka_message_key
|
326
|
+
record[@kafka_message_key] = msg.key
|
327
|
+
end
|
328
|
+
if @add_headers
|
329
|
+
msg.headers.each_pair { |k, v|
|
330
|
+
record[k] = v
|
286
331
|
}
|
332
|
+
end
|
333
|
+
es.add(record_time, record)
|
334
|
+
rescue => e
|
335
|
+
log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
|
336
|
+
log.debug_backtrace
|
337
|
+
end
|
338
|
+
}
|
339
|
+
|
340
|
+
unless es.empty?
|
341
|
+
emit_events(tag, es)
|
342
|
+
end
|
343
|
+
end
|
287
344
|
|
288
|
-
|
289
|
-
|
345
|
+
def run
|
346
|
+
while @consumer
|
347
|
+
begin
|
348
|
+
@consumer.each_batch(**@fetch_opts) { |batch|
|
349
|
+
if @tag_source == :record
|
350
|
+
process_batch_with_record_tag(batch)
|
351
|
+
else
|
352
|
+
process_batch(batch)
|
290
353
|
end
|
291
354
|
}
|
292
355
|
rescue ForShutdown
|
@@ -215,7 +215,7 @@ DESC
|
|
215
215
|
chain.next
|
216
216
|
|
217
217
|
# out_kafka is mainly for testing so don't need the performance unlike out_kafka_buffered.
|
218
|
-
producer = @kafka.producer(
|
218
|
+
producer = @kafka.producer(**@producer_opts)
|
219
219
|
|
220
220
|
es.each do |time, record|
|
221
221
|
if @output_include_time
|
@@ -239,7 +239,7 @@ DESC
|
|
239
239
|
@producers_mutex.synchronize {
|
240
240
|
producer = @producers[Thread.current.object_id]
|
241
241
|
unless producer
|
242
|
-
producer = @kafka.producer(
|
242
|
+
producer = @kafka.producer(**@producer_opts)
|
243
243
|
@producers[Thread.current.object_id] = producer
|
244
244
|
end
|
245
245
|
producer
|
data/test/helper.rb
CHANGED
@@ -0,0 +1,66 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/driver/input'
|
3
|
+
require 'securerandom'
|
4
|
+
|
5
|
+
class KafkaInputTest < Test::Unit::TestCase
|
6
|
+
def setup
|
7
|
+
Fluent::Test.setup
|
8
|
+
end
|
9
|
+
|
10
|
+
TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
|
11
|
+
|
12
|
+
CONFIG = %[
|
13
|
+
@type kafka
|
14
|
+
brokers localhost:9092
|
15
|
+
format text
|
16
|
+
@label @kafka
|
17
|
+
topics #{TOPIC_NAME}
|
18
|
+
]
|
19
|
+
|
20
|
+
def create_driver(conf = CONFIG)
|
21
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
|
22
|
+
end
|
23
|
+
|
24
|
+
|
25
|
+
def test_configure
|
26
|
+
d = create_driver
|
27
|
+
assert_equal TOPIC_NAME, d.instance.topics
|
28
|
+
assert_equal 'text', d.instance.format
|
29
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_multi_worker_support
|
33
|
+
d = create_driver
|
34
|
+
assert_false d.instance.multi_workers_ready?
|
35
|
+
end
|
36
|
+
|
37
|
+
class ConsumeTest < self
|
38
|
+
def setup
|
39
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
40
|
+
@producer = @kafka.producer
|
41
|
+
end
|
42
|
+
|
43
|
+
def teardown
|
44
|
+
@kafka.delete_topic(TOPIC_NAME)
|
45
|
+
@kafka.close
|
46
|
+
end
|
47
|
+
|
48
|
+
def test_consume
|
49
|
+
conf = %[
|
50
|
+
@type kafka
|
51
|
+
brokers localhost:9092
|
52
|
+
format text
|
53
|
+
@label @kafka
|
54
|
+
topics #{TOPIC_NAME}
|
55
|
+
]
|
56
|
+
d = create_driver
|
57
|
+
|
58
|
+
d.run(expect_records: 1, timeout: 10) do
|
59
|
+
@producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
|
60
|
+
@producer.deliver_messages
|
61
|
+
end
|
62
|
+
expected = {'message' => 'Hello, fluent-plugin-kafka!'}
|
63
|
+
assert_equal expected, d.events[0][2]
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/driver/input'
|
3
|
+
require 'securerandom'
|
4
|
+
|
5
|
+
class KafkaGroupInputTest < Test::Unit::TestCase
|
6
|
+
def setup
|
7
|
+
Fluent::Test.setup
|
8
|
+
end
|
9
|
+
|
10
|
+
TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
|
11
|
+
|
12
|
+
CONFIG = %[
|
13
|
+
@type kafka
|
14
|
+
brokers localhost:9092
|
15
|
+
consumer_group fluentd
|
16
|
+
format text
|
17
|
+
@label @kafka
|
18
|
+
topics #{TOPIC_NAME}
|
19
|
+
]
|
20
|
+
|
21
|
+
def create_driver(conf = CONFIG)
|
22
|
+
Fluent::Test::Driver::Input.new(Fluent::KafkaGroupInput).configure(conf)
|
23
|
+
end
|
24
|
+
|
25
|
+
|
26
|
+
def test_configure
|
27
|
+
d = create_driver
|
28
|
+
assert_equal [TOPIC_NAME], d.instance.topics
|
29
|
+
assert_equal 'text', d.instance.format
|
30
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
31
|
+
end
|
32
|
+
|
33
|
+
def test_multi_worker_support
|
34
|
+
d = create_driver
|
35
|
+
assert_true d.instance.multi_workers_ready?
|
36
|
+
end
|
37
|
+
|
38
|
+
class ConsumeTest < self
|
39
|
+
def setup
|
40
|
+
@kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
|
41
|
+
@producer = @kafka.producer
|
42
|
+
end
|
43
|
+
|
44
|
+
def teardown
|
45
|
+
@kafka.delete_topic(TOPIC_NAME)
|
46
|
+
@kafka.close
|
47
|
+
end
|
48
|
+
|
49
|
+
def test_consume
|
50
|
+
conf = %[
|
51
|
+
@type kafka
|
52
|
+
brokers localhost:9092
|
53
|
+
format text
|
54
|
+
@label @kafka
|
55
|
+
topics #{TOPIC_NAME}
|
56
|
+
]
|
57
|
+
d = create_driver
|
58
|
+
|
59
|
+
d.run(expect_records: 1, timeout: 10) do
|
60
|
+
@producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
|
61
|
+
@producer.deliver_messages
|
62
|
+
end
|
63
|
+
expected = {'message' => 'Hello, fluent-plugin-kafka!'}
|
64
|
+
assert_equal expected, d.events[0][2]
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/plugin/kafka_plugin_util'
|
3
|
+
|
4
|
+
class File
|
5
|
+
def File::read(path)
|
6
|
+
path
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
class KafkaPluginUtilTest < Test::Unit::TestCase
|
11
|
+
|
12
|
+
def self.config_param(name, type, options)
|
13
|
+
end
|
14
|
+
include Fluent::KafkaPluginUtil::SSLSettings
|
15
|
+
|
16
|
+
def config_param
|
17
|
+
end
|
18
|
+
def setup
|
19
|
+
Fluent::Test.setup
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_read_ssl_file_when_nil
|
23
|
+
assert_equal(nil, read_ssl_file(nil))
|
24
|
+
end
|
25
|
+
|
26
|
+
def test_read_ssl_file_when_empty_string
|
27
|
+
assert_equal(nil, read_ssl_file(""))
|
28
|
+
end
|
29
|
+
|
30
|
+
def test_read_ssl_file_when_non_empty_path
|
31
|
+
assert_equal("path", read_ssl_file("path"))
|
32
|
+
end
|
33
|
+
|
34
|
+
def test_read_ssl_file_when_non_empty_array
|
35
|
+
assert_equal(["a","b"], read_ssl_file(["a","b"]))
|
36
|
+
end
|
37
|
+
|
38
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.16.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2021-05-17 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -93,6 +93,20 @@ dependencies:
|
|
93
93
|
- - ">="
|
94
94
|
- !ruby/object:Gem::Version
|
95
95
|
version: 3.0.8
|
96
|
+
- !ruby/object:Gem::Dependency
|
97
|
+
name: webrick
|
98
|
+
requirement: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - ">="
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: '0'
|
103
|
+
type: :development
|
104
|
+
prerelease: false
|
105
|
+
version_requirements: !ruby/object:Gem::Requirement
|
106
|
+
requirements:
|
107
|
+
- - ">="
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '0'
|
96
110
|
description: Fluentd plugin for Apache Kafka > 0.8
|
97
111
|
email:
|
98
112
|
- togachiro@gmail.com
|
@@ -101,13 +115,14 @@ executables: []
|
|
101
115
|
extensions: []
|
102
116
|
extra_rdoc_files: []
|
103
117
|
files:
|
118
|
+
- ".github/workflows/linux.yml"
|
104
119
|
- ".gitignore"
|
105
|
-
- ".travis.yml"
|
106
120
|
- ChangeLog
|
107
121
|
- Gemfile
|
108
122
|
- LICENSE
|
109
123
|
- README.md
|
110
124
|
- Rakefile
|
125
|
+
- ci/prepare-kafka-server.sh
|
111
126
|
- fluent-plugin-kafka.gemspec
|
112
127
|
- lib/fluent/plugin/in_kafka.rb
|
113
128
|
- lib/fluent/plugin/in_kafka_group.rb
|
@@ -120,6 +135,9 @@ files:
|
|
120
135
|
- lib/fluent/plugin/out_rdkafka.rb
|
121
136
|
- lib/fluent/plugin/out_rdkafka2.rb
|
122
137
|
- test/helper.rb
|
138
|
+
- test/plugin/test_in_kafka.rb
|
139
|
+
- test/plugin/test_in_kafka_group.rb
|
140
|
+
- test/plugin/test_kafka_plugin_util.rb
|
123
141
|
- test/plugin/test_out_kafka.rb
|
124
142
|
homepage: https://github.com/fluent/fluent-plugin-kafka
|
125
143
|
licenses:
|
@@ -140,10 +158,13 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
140
158
|
- !ruby/object:Gem::Version
|
141
159
|
version: '0'
|
142
160
|
requirements: []
|
143
|
-
rubygems_version: 3.
|
161
|
+
rubygems_version: 3.2.5
|
144
162
|
signing_key:
|
145
163
|
specification_version: 4
|
146
164
|
summary: Fluentd plugin for Apache Kafka > 0.8
|
147
165
|
test_files:
|
148
166
|
- test/helper.rb
|
167
|
+
- test/plugin/test_in_kafka.rb
|
168
|
+
- test/plugin/test_in_kafka_group.rb
|
169
|
+
- test/plugin/test_kafka_plugin_util.rb
|
149
170
|
- test/plugin/test_out_kafka.rb
|
data/.travis.yml
DELETED
@@ -1,21 +0,0 @@
|
|
1
|
-
language: ruby
|
2
|
-
|
3
|
-
rvm:
|
4
|
-
- 2.1
|
5
|
-
- 2.2
|
6
|
-
- 2.3.1
|
7
|
-
- 2.4.1
|
8
|
-
- 2.5.0
|
9
|
-
- ruby-head
|
10
|
-
|
11
|
-
before_install:
|
12
|
-
- gem update --system=2.7.8
|
13
|
-
script:
|
14
|
-
- bundle exec rake test
|
15
|
-
|
16
|
-
sudo: false
|
17
|
-
|
18
|
-
matrix:
|
19
|
-
allow_failures:
|
20
|
-
- rvm: ruby-head
|
21
|
-
|