fluent-plugin-kafka 0.16.3 → 0.17.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/ISSUE_TEMPLATE/bug_report.yaml +71 -0
- data/.github/ISSUE_TEMPLATE/config.yml +5 -0
- data/.github/ISSUE_TEMPLATE/feature_request.yaml +38 -0
- data/.github/workflows/stale-actions.yml +22 -0
- data/ChangeLog +5 -0
- data/README.md +9 -0
- data/fluent-plugin-kafka.gemspec +4 -2
- data/lib/fluent/plugin/in_kafka.rb +3 -3
- data/lib/fluent/plugin/in_kafka_group.rb +3 -3
- data/lib/fluent/plugin/out_kafka.rb +11 -6
- data/lib/fluent/plugin/out_kafka2.rb +12 -7
- data/lib/fluent/plugin/out_kafka_buffered.rb +11 -6
- data/lib/fluent/plugin/out_rdkafka.rb +1 -1
- data/test/helper.rb +5 -0
- data/test/plugin/test_kafka_plugin_util.rb +18 -12
- data/test/plugin/test_out_kafka.rb +10 -0
- data/test/plugin/test_out_kafka2.rb +60 -0
- data/test/plugin/test_out_kafka_buffered.rb +68 -0
- metadata +44 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 15805411e4029813123b9b636b6faadb937cf38c9841adcd9d998a0f54d8b687
|
4
|
+
data.tar.gz: 75fef11595c86beb4a54d2d2ff659f77075e328426463f7ef830982c9724ff16
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: db08ffbde4fe36ce38abe6eebf83e6e0dc157bc5d8dd95141ef911d0c5b59074a208c3a8f836e258ffddc34f73fda303006eaef1cd8fc12ae4dd4c79d101d0c7
|
7
|
+
data.tar.gz: 9b3b098b1bc58654924d50ac8d685af007763942c1cc68187bbd6d39510a27ca31ae795df0de312d97ad67463ae0c428882118640fce052119288e27e3fb0df5
|
@@ -0,0 +1,71 @@
|
|
1
|
+
name: Bug Report
|
2
|
+
description: Create a report with a procedure for reproducing the bug
|
3
|
+
body:
|
4
|
+
- type: markdown
|
5
|
+
attributes:
|
6
|
+
value: |
|
7
|
+
Check [README](https://github.com/fluent/fluent-plugin-kafka/#faq) first and here is the list to help us investigate the problem.
|
8
|
+
- type: textarea
|
9
|
+
id: description
|
10
|
+
attributes:
|
11
|
+
label: Describe the bug
|
12
|
+
description: A clear and concise description of what the bug is
|
13
|
+
validations:
|
14
|
+
required: true
|
15
|
+
- type: textarea
|
16
|
+
id: reproduce
|
17
|
+
attributes:
|
18
|
+
label: To Reproduce
|
19
|
+
description: Steps to reproduce the behavior
|
20
|
+
validations:
|
21
|
+
required: true
|
22
|
+
- type: textarea
|
23
|
+
id: expected
|
24
|
+
attributes:
|
25
|
+
label: Expected behavior
|
26
|
+
description: A clear and concise description of what you expected to happen
|
27
|
+
validations:
|
28
|
+
required: true
|
29
|
+
- type: textarea
|
30
|
+
id: environment
|
31
|
+
attributes:
|
32
|
+
label: Your Environment
|
33
|
+
description: |
|
34
|
+
- Fluentd or td-agent version: `fluentd --version` or `td-agent --version`
|
35
|
+
- Operating system: `cat /etc/os-release`
|
36
|
+
- Kernel version: `uname -r`
|
37
|
+
|
38
|
+
Tip: If you hit the problem with older fluentd version, try latest version first.
|
39
|
+
value: |
|
40
|
+
- Fluentd version:
|
41
|
+
- TD Agent version:
|
42
|
+
- fluent-plugin-kafka version:
|
43
|
+
- ruby-kafka version:
|
44
|
+
- Operating system:
|
45
|
+
- Kernel version:
|
46
|
+
render: markdown
|
47
|
+
validations:
|
48
|
+
required: true
|
49
|
+
- type: textarea
|
50
|
+
id: configuration
|
51
|
+
attributes:
|
52
|
+
label: Your Configuration
|
53
|
+
description: |
|
54
|
+
Write your configuration here. Minimum reproducible fluentd.conf is recommended.
|
55
|
+
validations:
|
56
|
+
required: true
|
57
|
+
- type: textarea
|
58
|
+
id: logs
|
59
|
+
attributes:
|
60
|
+
label: Your Error Log
|
61
|
+
description: Write your ALL error log here
|
62
|
+
render: shell
|
63
|
+
validations:
|
64
|
+
required: true
|
65
|
+
- type: textarea
|
66
|
+
id: addtional-context
|
67
|
+
attributes:
|
68
|
+
label: Additional context
|
69
|
+
description: Add any other context about the problem here.
|
70
|
+
validations:
|
71
|
+
required: false
|
@@ -0,0 +1,38 @@
|
|
1
|
+
name: Feature request
|
2
|
+
description: Suggest an idea for this project
|
3
|
+
body:
|
4
|
+
- type: markdown
|
5
|
+
attributes:
|
6
|
+
value: |
|
7
|
+
Check [README.md](https://github.com/fluent/fluent-plugin-kafka/blob/master/README.md) first and here is the list to help us investigate the problem.
|
8
|
+
- type: textarea
|
9
|
+
id: description
|
10
|
+
attributes:
|
11
|
+
label: Is your feature request related to a problem? Please describe.
|
12
|
+
description: |
|
13
|
+
A clear and concise description of what the problem is.
|
14
|
+
Ex. I'm always frustrated when [...]
|
15
|
+
validations:
|
16
|
+
required: true
|
17
|
+
- type: textarea
|
18
|
+
id: solution
|
19
|
+
attributes:
|
20
|
+
label: Describe the solution you'd like
|
21
|
+
description: A clear and concise description of what you want to happen.
|
22
|
+
validations:
|
23
|
+
required: true
|
24
|
+
- type: textarea
|
25
|
+
id: alternative
|
26
|
+
attributes:
|
27
|
+
label: Describe alternatives you've considered
|
28
|
+
description: A clear and concise description of any alternative solutions or features you've considered.
|
29
|
+
validations:
|
30
|
+
required: true
|
31
|
+
- type: textarea
|
32
|
+
id: addtional-context
|
33
|
+
attributes:
|
34
|
+
label: Additional context
|
35
|
+
description: Add any other context or screenshots about the feature request here.
|
36
|
+
validations:
|
37
|
+
required: false
|
38
|
+
|
@@ -0,0 +1,22 @@
|
|
1
|
+
name: "Mark or close stale issues and PRs"
|
2
|
+
on:
|
3
|
+
schedule:
|
4
|
+
- cron: "00 10 * * *"
|
5
|
+
|
6
|
+
jobs:
|
7
|
+
stale:
|
8
|
+
runs-on: ubuntu-latest
|
9
|
+
steps:
|
10
|
+
- uses: actions/stale@v3
|
11
|
+
with:
|
12
|
+
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
13
|
+
days-before-stale: 90
|
14
|
+
days-before-close: 30
|
15
|
+
stale-issue-message: "This issue has been automatically marked as stale because it has been open 90 days with no activity. Remove stale label or comment or this issue will be closed in 30 days"
|
16
|
+
stale-pr-message: "This PR has been automatically marked as stale because it has been open 90 days with no activity. Remove stale label or comment or this PR will be closed in 30 days"
|
17
|
+
close-issue-message: "This issue was automatically closed because of stale in 30 days"
|
18
|
+
close-pr-message: "This PR was automatically closed because of stale in 30 days"
|
19
|
+
stale-pr-label: "stale"
|
20
|
+
stale-issue-label: "stale"
|
21
|
+
exempt-issue-labels: "bug,enhancement,help wanted"
|
22
|
+
exempt-pr-labels: "bug,enhancement,help wanted"
|
data/ChangeLog
CHANGED
@@ -1,3 +1,8 @@
|
|
1
|
+
Release 0.17.0 - 2021/08/30
|
2
|
+
* out_kafka/out_kafka_buffered/out_kafka2: Provide murmur2 partitioner hash function choice
|
3
|
+
* in_kafka/in_kafka_group/out_kafka/out_kafka_buffered/out_kafka2: Use Ruby Kafka's ssl_ca_cert_file_path parameter to feed the CA certs
|
4
|
+
* out_kafka/out_kafka2: fix description for `exclude_message_key` option
|
5
|
+
|
1
6
|
Release 0.16.3 - 2021/05/17
|
2
7
|
* in_kafka_group: Fix one more Ruby 3.0 keyword arguments issue
|
3
8
|
|
data/README.md
CHANGED
@@ -202,6 +202,7 @@ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin inst
|
|
202
202
|
headers_from_record (hash) :default => {}
|
203
203
|
use_default_for_unknown_topic (bool) :default => false
|
204
204
|
discard_kafka_delivery_failed (bool) :default => false (No discard)
|
205
|
+
partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
|
205
206
|
|
206
207
|
<format>
|
207
208
|
@type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
@@ -230,6 +231,8 @@ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin inst
|
|
230
231
|
|
231
232
|
The `<formatter name>` in `<format>` uses fluentd's formatter plugins. See [formatter article](https://docs.fluentd.org/v/1.0/formatter).
|
232
233
|
|
234
|
+
**Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
|
235
|
+
|
233
236
|
ruby-kafka sometimes returns `Kafka::DeliveryFailed` error without good information.
|
234
237
|
In this case, `get_kafka_client_log` is useful for identifying the error cause.
|
235
238
|
ruby-kafka's log is routed to fluentd log so you can see ruby-kafka's log in fluentd logs.
|
@@ -343,6 +346,7 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
|
|
343
346
|
exclude_topic_key (bool) :default => false
|
344
347
|
exclude_partition_key (bool) :default => false
|
345
348
|
get_kafka_client_log (bool) :default => false
|
349
|
+
partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
|
346
350
|
|
347
351
|
# See fluentd document for buffer related parameters: https://docs.fluentd.org/v/0.12/buffer
|
348
352
|
|
@@ -365,6 +369,8 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
|
|
365
369
|
- kafka_agg_max_bytes - default: 4096 - Maximum value of total message size to be included in one batch transmission.
|
366
370
|
- kafka_agg_max_messages - default: nil - Maximum number of messages to include in one batch transmission.
|
367
371
|
|
372
|
+
**Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
|
373
|
+
|
368
374
|
### Non-buffered output plugin
|
369
375
|
|
370
376
|
This plugin uses ruby-kafka producer for writing data. For performance and reliability concerns, use `kafka_bufferd` output instead. This is mainly for testing.
|
@@ -385,6 +391,7 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
|
|
385
391
|
output_include_time (bool) :default => false
|
386
392
|
exclude_topic_key (bool) :default => false
|
387
393
|
exclude_partition_key (bool) :default => false
|
394
|
+
partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
|
388
395
|
|
389
396
|
# ruby-kafka producer options
|
390
397
|
max_send_retries (integer) :default => 1
|
@@ -397,6 +404,8 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
|
|
397
404
|
|
398
405
|
This plugin also supports ruby-kafka related parameters. See Buffered output plugin section.
|
399
406
|
|
407
|
+
**Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
|
408
|
+
|
400
409
|
### rdkafka based output plugin
|
401
410
|
|
402
411
|
This plugin uses `rdkafka` instead of `ruby-kafka` for kafka client.
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,13 +13,15 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.
|
16
|
+
gem.version = '0.17.0'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
20
20
|
gem.add_dependency 'ltsv'
|
21
|
-
gem.add_dependency 'ruby-kafka', '>= 1.
|
21
|
+
gem.add_dependency 'ruby-kafka', '>= 1.4.0', '< 2'
|
22
22
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
23
|
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
24
|
+
gem.add_development_dependency "test-unit-rr", "~> 1.0"
|
24
25
|
gem.add_development_dependency "webrick"
|
26
|
+
gem.add_development_dependency "digest-murmurhash"
|
25
27
|
end
|
@@ -197,17 +197,17 @@ class Fluent::KafkaInput < Fluent::Input
|
|
197
197
|
|
198
198
|
logger = @get_kafka_client_log ? log : nil
|
199
199
|
if @scram_mechanism != nil && @username != nil && @password != nil
|
200
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger,
|
200
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
201
201
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
202
202
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
|
203
203
|
sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
|
204
204
|
elsif @username != nil && @password != nil
|
205
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger,
|
205
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
206
206
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
207
207
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password,
|
208
208
|
sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
|
209
209
|
else
|
210
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger,
|
210
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
211
211
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
212
212
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
|
213
213
|
ssl_verify_hostname: @ssl_verify_hostname)
|
@@ -183,17 +183,17 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
183
183
|
|
184
184
|
logger = @get_kafka_client_log ? log : nil
|
185
185
|
if @scram_mechanism != nil && @username != nil && @password != nil
|
186
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
186
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
187
187
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
188
188
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
|
189
189
|
sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
|
190
190
|
elsif @username != nil && @password != nil
|
191
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
191
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
192
192
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
193
193
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password,
|
194
194
|
sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
|
195
195
|
else
|
196
|
-
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
196
|
+
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
197
197
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
198
198
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
|
199
199
|
ssl_verify_hostname: @ssl_verify_hostname)
|
@@ -19,6 +19,8 @@ DESC
|
|
19
19
|
config_param :default_message_key, :string, :default => nil
|
20
20
|
config_param :default_partition_key, :string, :default => nil
|
21
21
|
config_param :default_partition, :integer, :default => nil
|
22
|
+
config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
|
23
|
+
:desc => "Specify kafka patrtitioner hash algorithm"
|
22
24
|
config_param :client_id, :string, :default => 'kafka'
|
23
25
|
config_param :sasl_over_ssl, :bool, :default => true,
|
24
26
|
:desc => <<-DESC
|
@@ -106,18 +108,21 @@ DESC
|
|
106
108
|
begin
|
107
109
|
if @seed_brokers.length > 0
|
108
110
|
if @scram_mechanism != nil && @username != nil && @password != nil
|
109
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id,
|
111
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
|
110
112
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
111
113
|
sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl,
|
112
|
-
ssl_verify_hostname: @ssl_verify_hostname
|
114
|
+
ssl_verify_hostname: @ssl_verify_hostname,
|
115
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
113
116
|
elsif @username != nil && @password != nil
|
114
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id,
|
117
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
|
115
118
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
116
|
-
sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname
|
119
|
+
sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
|
120
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
117
121
|
else
|
118
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id,
|
122
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
|
119
123
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
120
|
-
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname
|
124
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
|
125
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
121
126
|
end
|
122
127
|
log.info "initialized kafka producer: #{@client_id}"
|
123
128
|
else
|
@@ -24,6 +24,8 @@ DESC
|
|
24
24
|
config_param :partition_key_key, :string, :default => 'partition_key', :desc => "Field for kafka partition key"
|
25
25
|
config_param :default_partition_key, :string, :default => nil
|
26
26
|
config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
|
27
|
+
config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
|
28
|
+
:desc => "Specify kafka patrtitioner hash algorithm"
|
27
29
|
config_param :default_partition, :integer, :default => nil
|
28
30
|
config_param :use_default_for_unknown_topic, :bool, :default => false, :desc => "If true, default_topic is used when topic not found"
|
29
31
|
config_param :client_id, :string, :default => 'fluentd'
|
@@ -37,7 +39,7 @@ DESC
|
|
37
39
|
config_param :exclude_partition, :bool, :default => false,
|
38
40
|
:desc => 'Set true to remove partition from data'
|
39
41
|
config_param :exclude_message_key, :bool, :default => false,
|
40
|
-
:desc => 'Set true to remove
|
42
|
+
:desc => 'Set true to remove message key from data'
|
41
43
|
config_param :exclude_topic_key, :bool, :default => false,
|
42
44
|
:desc => 'Set true to remove topic name key from data'
|
43
45
|
config_param :use_event_time, :bool, :default => false, :desc => 'Use fluentd event time for kafka create_time'
|
@@ -96,20 +98,23 @@ DESC
|
|
96
98
|
begin
|
97
99
|
logger = @get_kafka_client_log ? log : nil
|
98
100
|
if @scram_mechanism != nil && @username != nil && @password != nil
|
99
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
101
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
100
102
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
|
101
103
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
|
102
|
-
sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname
|
104
|
+
sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
|
105
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
103
106
|
elsif @username != nil && @password != nil
|
104
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
107
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
105
108
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
|
106
109
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl,
|
107
|
-
ssl_verify_hostname: @ssl_verify_hostname
|
110
|
+
ssl_verify_hostname: @ssl_verify_hostname,
|
111
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
108
112
|
else
|
109
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout,
|
113
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
|
110
114
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
|
111
115
|
ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl,
|
112
|
-
ssl_verify_hostname: @ssl_verify_hostname
|
116
|
+
ssl_verify_hostname: @ssl_verify_hostname,
|
117
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
113
118
|
end
|
114
119
|
log.info "initialized kafka producer: #{@client_id}"
|
115
120
|
rescue Exception => e
|
@@ -26,6 +26,8 @@ DESC
|
|
26
26
|
config_param :default_partition_key, :string, :default => nil
|
27
27
|
config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
|
28
28
|
config_param :default_partition, :integer, :default => nil
|
29
|
+
config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
|
30
|
+
:desc => "Specify kafka patrtitioner hash algorithm"
|
29
31
|
config_param :client_id, :string, :default => 'kafka'
|
30
32
|
config_param :idempotent, :bool, :default => false, :desc => 'Enable idempotent producer'
|
31
33
|
config_param :sasl_over_ssl, :bool, :default => true,
|
@@ -130,18 +132,21 @@ DESC
|
|
130
132
|
if @seed_brokers.length > 0
|
131
133
|
logger = @get_kafka_client_log ? log : nil
|
132
134
|
if @scram_mechanism != nil && @username != nil && @password != nil
|
133
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger,
|
135
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
134
136
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
135
137
|
sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl,
|
136
|
-
ssl_verify_hostname: @ssl_verify_hostname
|
138
|
+
ssl_verify_hostname: @ssl_verify_hostname,
|
139
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
137
140
|
elsif @username != nil && @password != nil
|
138
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger,
|
141
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
139
142
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
140
|
-
sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname
|
143
|
+
sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
|
144
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
141
145
|
else
|
142
|
-
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger,
|
146
|
+
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
|
143
147
|
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
|
144
|
-
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname
|
148
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
|
149
|
+
partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
|
145
150
|
end
|
146
151
|
log.info "initialized kafka producer: #{@client_id}"
|
147
152
|
else
|
@@ -48,7 +48,7 @@ Set true to remove partition from data
|
|
48
48
|
DESC
|
49
49
|
config_param :exclude_message_key, :bool, :default => false,
|
50
50
|
:desc => <<-DESC
|
51
|
-
Set true to remove
|
51
|
+
Set true to remove message key from data
|
52
52
|
DESC
|
53
53
|
config_param :exclude_topic_key, :bool, :default => false,
|
54
54
|
:desc => <<-DESC
|
data/test/helper.rb
CHANGED
@@ -8,6 +8,7 @@ rescue Bundler::BundlerError => e
|
|
8
8
|
exit e.status_code
|
9
9
|
end
|
10
10
|
require 'test/unit'
|
11
|
+
require 'test/unit/rr'
|
11
12
|
|
12
13
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
14
|
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
@@ -22,8 +23,12 @@ unless ENV.has_key?('VERBOSE')
|
|
22
23
|
end
|
23
24
|
|
24
25
|
require 'fluent/plugin/out_kafka'
|
26
|
+
require 'fluent/plugin/out_kafka_buffered'
|
27
|
+
require 'fluent/plugin/out_kafka2'
|
25
28
|
require 'fluent/plugin/in_kafka'
|
26
29
|
require 'fluent/plugin/in_kafka_group'
|
27
30
|
|
31
|
+
require "fluent/test/driver/output"
|
32
|
+
|
28
33
|
class Test::Unit::TestCase
|
29
34
|
end
|
@@ -1,14 +1,8 @@
|
|
1
1
|
require 'helper'
|
2
2
|
require 'fluent/plugin/kafka_plugin_util'
|
3
3
|
|
4
|
-
class File
|
5
|
-
def File::read(path)
|
6
|
-
path
|
7
|
-
end
|
8
|
-
end
|
9
|
-
|
10
4
|
class KafkaPluginUtilTest < Test::Unit::TestCase
|
11
|
-
|
5
|
+
|
12
6
|
def self.config_param(name, type, options)
|
13
7
|
end
|
14
8
|
include Fluent::KafkaPluginUtil::SSLSettings
|
@@ -20,19 +14,31 @@ class KafkaPluginUtilTest < Test::Unit::TestCase
|
|
20
14
|
end
|
21
15
|
|
22
16
|
def test_read_ssl_file_when_nil
|
23
|
-
|
17
|
+
stub(File).read(anything) do |path|
|
18
|
+
path
|
19
|
+
end
|
20
|
+
assert_equal(nil, read_ssl_file(nil))
|
24
21
|
end
|
25
22
|
|
26
23
|
def test_read_ssl_file_when_empty_string
|
27
|
-
|
24
|
+
stub(File).read(anything) do |path|
|
25
|
+
path
|
26
|
+
end
|
27
|
+
assert_equal(nil, read_ssl_file(""))
|
28
28
|
end
|
29
29
|
|
30
30
|
def test_read_ssl_file_when_non_empty_path
|
31
|
-
|
31
|
+
stub(File).read(anything) do |path|
|
32
|
+
path
|
33
|
+
end
|
34
|
+
assert_equal("path", read_ssl_file("path"))
|
32
35
|
end
|
33
36
|
|
34
37
|
def test_read_ssl_file_when_non_empty_array
|
35
|
-
|
38
|
+
stub(File).read(anything) do |path|
|
39
|
+
path
|
40
|
+
end
|
41
|
+
assert_equal(["a","b"], read_ssl_file(["a","b"]))
|
36
42
|
end
|
37
43
|
|
38
|
-
end
|
44
|
+
end
|
@@ -43,6 +43,16 @@ class KafkaOutputTest < Test::Unit::TestCase
|
|
43
43
|
d = create_driver
|
44
44
|
end
|
45
45
|
|
46
|
+
data("crc32" => "crc32",
|
47
|
+
"murmur2" => "murmur2")
|
48
|
+
def test_partitioner_hash_function(data)
|
49
|
+
hash_type = data
|
50
|
+
d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
|
51
|
+
assert_nothing_raised do
|
52
|
+
d.instance.refresh_client
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
46
56
|
def test_mutli_worker_support
|
47
57
|
d = create_driver
|
48
58
|
assert_equal true, d.instance.multi_workers_ready?
|
@@ -0,0 +1,60 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/test/helpers'
|
3
|
+
require 'fluent/output'
|
4
|
+
|
5
|
+
class Kafka2OutputTest < Test::Unit::TestCase
|
6
|
+
include Fluent::Test::Helpers
|
7
|
+
|
8
|
+
def setup
|
9
|
+
Fluent::Test.setup
|
10
|
+
end
|
11
|
+
|
12
|
+
def base_config
|
13
|
+
config_element('ROOT', '', {"@type" => "kafka2"}, [
|
14
|
+
config_element('format', "", {"@type" => "json"})
|
15
|
+
])
|
16
|
+
end
|
17
|
+
|
18
|
+
def config
|
19
|
+
base_config + config_element('ROOT', '', {"default_topic" => "kitagawakeiko",
|
20
|
+
"brokers" => "localhost:9092"}, [
|
21
|
+
])
|
22
|
+
end
|
23
|
+
|
24
|
+
def create_driver(conf = config, tag='test')
|
25
|
+
Fluent::Test::Driver::Output.new(Fluent::Kafka2Output).configure(conf)
|
26
|
+
end
|
27
|
+
|
28
|
+
def test_configure
|
29
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
30
|
+
create_driver(base_config)
|
31
|
+
}
|
32
|
+
|
33
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
34
|
+
create_driver(config)
|
35
|
+
}
|
36
|
+
|
37
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
38
|
+
create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
|
39
|
+
}
|
40
|
+
|
41
|
+
d = create_driver
|
42
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
43
|
+
assert_equal ['localhost:9092'], d.instance.brokers
|
44
|
+
end
|
45
|
+
|
46
|
+
data("crc32" => "crc32",
|
47
|
+
"murmur2" => "murmur2")
|
48
|
+
def test_partitioner_hash_function(data)
|
49
|
+
hash_type = data
|
50
|
+
d = create_driver(config + config_element('ROOT', '', {"partitioner_hash_function" => hash_type}))
|
51
|
+
assert_nothing_raised do
|
52
|
+
d.instance.refresh_client
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_mutli_worker_support
|
57
|
+
d = create_driver
|
58
|
+
assert_equal true, d.instance.multi_workers_ready?
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/output'
|
3
|
+
|
4
|
+
class KafkaBufferedOutputTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
Fluent::Test.setup
|
7
|
+
end
|
8
|
+
|
9
|
+
BASE_CONFIG = %[
|
10
|
+
type kafka_buffered
|
11
|
+
]
|
12
|
+
|
13
|
+
CONFIG = BASE_CONFIG + %[
|
14
|
+
default_topic kitagawakeiko
|
15
|
+
brokers localhost:9092
|
16
|
+
]
|
17
|
+
|
18
|
+
def create_driver(conf = CONFIG, tag='test')
|
19
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutputBuffered, tag).configure(conf)
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_configure
|
23
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
24
|
+
create_driver(BASE_CONFIG)
|
25
|
+
}
|
26
|
+
|
27
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
28
|
+
create_driver(CONFIG)
|
29
|
+
}
|
30
|
+
|
31
|
+
assert_nothing_raised(Fluent::ConfigError) {
|
32
|
+
create_driver(CONFIG + %[
|
33
|
+
buffer_type memory
|
34
|
+
])
|
35
|
+
}
|
36
|
+
|
37
|
+
d = create_driver
|
38
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
39
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_format
|
43
|
+
d = create_driver
|
44
|
+
end
|
45
|
+
|
46
|
+
data("crc32" => "crc32",
|
47
|
+
"murmur2" => "murmur2")
|
48
|
+
def test_partitioner_hash_function(data)
|
49
|
+
hash_type = data
|
50
|
+
d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
|
51
|
+
assert_nothing_raised do
|
52
|
+
d.instance.refresh_client
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_mutli_worker_support
|
57
|
+
d = create_driver
|
58
|
+
assert_equal true, d.instance.multi_workers_ready?
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def test_write
|
63
|
+
d = create_driver
|
64
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
65
|
+
d.emit({"a"=>1}, time)
|
66
|
+
d.emit({"a"=>2}, time)
|
67
|
+
end
|
68
|
+
end
|
metadata
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.17.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
- Masahiro Nakagawa
|
9
|
-
autorequire:
|
9
|
+
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2021-
|
12
|
+
date: 2021-08-30 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -51,7 +51,7 @@ dependencies:
|
|
51
51
|
requirements:
|
52
52
|
- - ">="
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: 1.
|
54
|
+
version: 1.4.0
|
55
55
|
- - "<"
|
56
56
|
- !ruby/object:Gem::Version
|
57
57
|
version: '2'
|
@@ -61,7 +61,7 @@ dependencies:
|
|
61
61
|
requirements:
|
62
62
|
- - ">="
|
63
63
|
- !ruby/object:Gem::Version
|
64
|
-
version: 1.
|
64
|
+
version: 1.4.0
|
65
65
|
- - "<"
|
66
66
|
- !ruby/object:Gem::Version
|
67
67
|
version: '2'
|
@@ -93,6 +93,20 @@ dependencies:
|
|
93
93
|
- - ">="
|
94
94
|
- !ruby/object:Gem::Version
|
95
95
|
version: 3.0.8
|
96
|
+
- !ruby/object:Gem::Dependency
|
97
|
+
name: test-unit-rr
|
98
|
+
requirement: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - "~>"
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: '1.0'
|
103
|
+
type: :development
|
104
|
+
prerelease: false
|
105
|
+
version_requirements: !ruby/object:Gem::Requirement
|
106
|
+
requirements:
|
107
|
+
- - "~>"
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '1.0'
|
96
110
|
- !ruby/object:Gem::Dependency
|
97
111
|
name: webrick
|
98
112
|
requirement: !ruby/object:Gem::Requirement
|
@@ -107,6 +121,20 @@ dependencies:
|
|
107
121
|
- - ">="
|
108
122
|
- !ruby/object:Gem::Version
|
109
123
|
version: '0'
|
124
|
+
- !ruby/object:Gem::Dependency
|
125
|
+
name: digest-murmurhash
|
126
|
+
requirement: !ruby/object:Gem::Requirement
|
127
|
+
requirements:
|
128
|
+
- - ">="
|
129
|
+
- !ruby/object:Gem::Version
|
130
|
+
version: '0'
|
131
|
+
type: :development
|
132
|
+
prerelease: false
|
133
|
+
version_requirements: !ruby/object:Gem::Requirement
|
134
|
+
requirements:
|
135
|
+
- - ">="
|
136
|
+
- !ruby/object:Gem::Version
|
137
|
+
version: '0'
|
110
138
|
description: Fluentd plugin for Apache Kafka > 0.8
|
111
139
|
email:
|
112
140
|
- togachiro@gmail.com
|
@@ -115,7 +143,11 @@ executables: []
|
|
115
143
|
extensions: []
|
116
144
|
extra_rdoc_files: []
|
117
145
|
files:
|
146
|
+
- ".github/ISSUE_TEMPLATE/bug_report.yaml"
|
147
|
+
- ".github/ISSUE_TEMPLATE/config.yml"
|
148
|
+
- ".github/ISSUE_TEMPLATE/feature_request.yaml"
|
118
149
|
- ".github/workflows/linux.yml"
|
150
|
+
- ".github/workflows/stale-actions.yml"
|
119
151
|
- ".gitignore"
|
120
152
|
- ChangeLog
|
121
153
|
- Gemfile
|
@@ -139,11 +171,13 @@ files:
|
|
139
171
|
- test/plugin/test_in_kafka_group.rb
|
140
172
|
- test/plugin/test_kafka_plugin_util.rb
|
141
173
|
- test/plugin/test_out_kafka.rb
|
174
|
+
- test/plugin/test_out_kafka2.rb
|
175
|
+
- test/plugin/test_out_kafka_buffered.rb
|
142
176
|
homepage: https://github.com/fluent/fluent-plugin-kafka
|
143
177
|
licenses:
|
144
178
|
- Apache-2.0
|
145
179
|
metadata: {}
|
146
|
-
post_install_message:
|
180
|
+
post_install_message:
|
147
181
|
rdoc_options: []
|
148
182
|
require_paths:
|
149
183
|
- lib
|
@@ -158,8 +192,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
158
192
|
- !ruby/object:Gem::Version
|
159
193
|
version: '0'
|
160
194
|
requirements: []
|
161
|
-
rubygems_version: 3.
|
162
|
-
signing_key:
|
195
|
+
rubygems_version: 3.1.4
|
196
|
+
signing_key:
|
163
197
|
specification_version: 4
|
164
198
|
summary: Fluentd plugin for Apache Kafka > 0.8
|
165
199
|
test_files:
|
@@ -168,3 +202,5 @@ test_files:
|
|
168
202
|
- test/plugin/test_in_kafka_group.rb
|
169
203
|
- test/plugin/test_kafka_plugin_util.rb
|
170
204
|
- test/plugin/test_out_kafka.rb
|
205
|
+
- test/plugin/test_out_kafka2.rb
|
206
|
+
- test/plugin/test_out_kafka_buffered.rb
|