fluent-plugin-kafka 0.16.0 → 0.17.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7fcaf3e8fbb836ab8db3fa21003a713f9048de076097e2c95a9f30e5d1b05c08
4
- data.tar.gz: 8437b2c9401238d811973422a65d2c9ee34bd8afc513f32412a22f93e03204a0
3
+ metadata.gz: 15805411e4029813123b9b636b6faadb937cf38c9841adcd9d998a0f54d8b687
4
+ data.tar.gz: 75fef11595c86beb4a54d2d2ff659f77075e328426463f7ef830982c9724ff16
5
5
  SHA512:
6
- metadata.gz: 6d407e6f12dbafc6f5fad59ac6cbae4b13fa5f0a212cf82dba604f871afc286899290972b3afca5d1031888ee2f141c12d5d2e3d6b7324c7246317029149c491
7
- data.tar.gz: b278441842361cc53836fce087e87e6d2d800bccbe9b307c2f0b39d3c4c357aae96275d2bfb808ab2455ebce692948d1241f4f991a5036473da1d31da699832b
6
+ metadata.gz: db08ffbde4fe36ce38abe6eebf83e6e0dc157bc5d8dd95141ef911d0c5b59074a208c3a8f836e258ffddc34f73fda303006eaef1cd8fc12ae4dd4c79d101d0c7
7
+ data.tar.gz: 9b3b098b1bc58654924d50ac8d685af007763942c1cc68187bbd6d39510a27ca31ae795df0de312d97ad67463ae0c428882118640fce052119288e27e3fb0df5
@@ -0,0 +1,71 @@
1
+ name: Bug Report
2
+ description: Create a report with a procedure for reproducing the bug
3
+ body:
4
+ - type: markdown
5
+ attributes:
6
+ value: |
7
+ Check [README](https://github.com/fluent/fluent-plugin-kafka/#faq) first and here is the list to help us investigate the problem.
8
+ - type: textarea
9
+ id: description
10
+ attributes:
11
+ label: Describe the bug
12
+ description: A clear and concise description of what the bug is
13
+ validations:
14
+ required: true
15
+ - type: textarea
16
+ id: reproduce
17
+ attributes:
18
+ label: To Reproduce
19
+ description: Steps to reproduce the behavior
20
+ validations:
21
+ required: true
22
+ - type: textarea
23
+ id: expected
24
+ attributes:
25
+ label: Expected behavior
26
+ description: A clear and concise description of what you expected to happen
27
+ validations:
28
+ required: true
29
+ - type: textarea
30
+ id: environment
31
+ attributes:
32
+ label: Your Environment
33
+ description: |
34
+ - Fluentd or td-agent version: `fluentd --version` or `td-agent --version`
35
+ - Operating system: `cat /etc/os-release`
36
+ - Kernel version: `uname -r`
37
+
38
+ Tip: If you hit the problem with older fluentd version, try latest version first.
39
+ value: |
40
+ - Fluentd version:
41
+ - TD Agent version:
42
+ - fluent-plugin-kafka version:
43
+ - ruby-kafka version:
44
+ - Operating system:
45
+ - Kernel version:
46
+ render: markdown
47
+ validations:
48
+ required: true
49
+ - type: textarea
50
+ id: configuration
51
+ attributes:
52
+ label: Your Configuration
53
+ description: |
54
+ Write your configuration here. Minimum reproducible fluentd.conf is recommended.
55
+ validations:
56
+ required: true
57
+ - type: textarea
58
+ id: logs
59
+ attributes:
60
+ label: Your Error Log
61
+ description: Write your ALL error log here
62
+ render: shell
63
+ validations:
64
+ required: true
65
+ - type: textarea
66
+ id: addtional-context
67
+ attributes:
68
+ label: Additional context
69
+ description: Add any other context about the problem here.
70
+ validations:
71
+ required: false
@@ -0,0 +1,5 @@
1
+ blank_issues_enabled: false
2
+ contact_links:
3
+ - name: Ask a Question
4
+ url: https://discuss.fluentd.org/
5
+ about: I have questions about fluent-plugin-kafka. Please ask and answer questions at https://discuss.fluentd.org/.
@@ -0,0 +1,38 @@
1
+ name: Feature request
2
+ description: Suggest an idea for this project
3
+ body:
4
+ - type: markdown
5
+ attributes:
6
+ value: |
7
+ Check [README.md](https://github.com/fluent/fluent-plugin-kafka/blob/master/README.md) first and here is the list to help us investigate the problem.
8
+ - type: textarea
9
+ id: description
10
+ attributes:
11
+ label: Is your feature request related to a problem? Please describe.
12
+ description: |
13
+ A clear and concise description of what the problem is.
14
+ Ex. I'm always frustrated when [...]
15
+ validations:
16
+ required: true
17
+ - type: textarea
18
+ id: solution
19
+ attributes:
20
+ label: Describe the solution you'd like
21
+ description: A clear and concise description of what you want to happen.
22
+ validations:
23
+ required: true
24
+ - type: textarea
25
+ id: alternative
26
+ attributes:
27
+ label: Describe alternatives you've considered
28
+ description: A clear and concise description of any alternative solutions or features you've considered.
29
+ validations:
30
+ required: true
31
+ - type: textarea
32
+ id: addtional-context
33
+ attributes:
34
+ label: Additional context
35
+ description: Add any other context or screenshots about the feature request here.
36
+ validations:
37
+ required: false
38
+
@@ -17,10 +17,20 @@ jobs:
17
17
  - uses: ruby/setup-ruby@v1
18
18
  with:
19
19
  ruby-version: ${{ matrix.ruby }}
20
+ - name: Install confluent-kafka
21
+ run: |
22
+ sudo apt install -V -y gnupg2 wget
23
+ wget https://packages.confluent.io/deb/6.0/archive.key
24
+ sudo gpg2 --homedir /tmp --no-default-keyring --keyring gnupg-ring:/usr/share/keyrings/confluent-archive-keyring.gpg --import archive.key
25
+ sudo chmod 644 /usr/share/keyrings/confluent-archive-keyring.gpg
26
+ sudo sh -c 'echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/confluent-archive-keyring.gpg] https://packages.confluent.io/deb/6.0 stable main" > /etc/apt/sources.list.d/confluent.list'
27
+ sudo apt update
28
+ sudo apt install -y confluent-community-2.13 openjdk-11-jre netcat-openbsd
20
29
  - name: unit testing
21
30
  env:
22
31
  CI: true
23
32
  run: |
33
+ sudo ./ci/prepare-kafka-server.sh
24
34
  gem install bundler rake
25
35
  bundle install --jobs 4 --retry 3
26
36
  bundle exec rake test
@@ -0,0 +1,22 @@
1
+ name: "Mark or close stale issues and PRs"
2
+ on:
3
+ schedule:
4
+ - cron: "00 10 * * *"
5
+
6
+ jobs:
7
+ stale:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/stale@v3
11
+ with:
12
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
13
+ days-before-stale: 90
14
+ days-before-close: 30
15
+ stale-issue-message: "This issue has been automatically marked as stale because it has been open 90 days with no activity. Remove stale label or comment or this issue will be closed in 30 days"
16
+ stale-pr-message: "This PR has been automatically marked as stale because it has been open 90 days with no activity. Remove stale label or comment or this PR will be closed in 30 days"
17
+ close-issue-message: "This issue was automatically closed because of stale in 30 days"
18
+ close-pr-message: "This PR was automatically closed because of stale in 30 days"
19
+ stale-pr-label: "stale"
20
+ stale-issue-label: "stale"
21
+ exempt-issue-labels: "bug,enhancement,help wanted"
22
+ exempt-pr-labels: "bug,enhancement,help wanted"
data/ChangeLog CHANGED
@@ -1,3 +1,18 @@
1
+ Release 0.17.0 - 2021/08/30
2
+ * out_kafka/out_kafka_buffered/out_kafka2: Provide murmur2 partitioner hash function choice
3
+ * in_kafka/in_kafka_group/out_kafka/out_kafka_buffered/out_kafka2: Use Ruby Kafka's ssl_ca_cert_file_path parameter to feed the CA certs
4
+ * out_kafka/out_kafka2: fix description for `exclude_message_key` option
5
+
6
+ Release 0.16.3 - 2021/05/17
7
+ * in_kafka_group: Fix one more Ruby 3.0 keyword arguments issue
8
+
9
+ Release 0.16.2 - 2021/05/17
10
+ * in_kafka, in_kafka_group: Support Ruby 3.0 keyword arguments interop
11
+
12
+ Release 0.16.1 - 2021/04/14
13
+ * out_kafka/out_kafka_buffered: Support Ruby 3.0.0 keyword arguments interop
14
+ * kafka_plugin_util: Treat empty string in read_ssl_file as nil
15
+
1
16
  Release 0.16.0 - 2021/01/25
2
17
 
3
18
  * input: Add `tag_source` and `record_tag_key` parameters for using record field as tag
data/README.md CHANGED
@@ -1,6 +1,7 @@
1
1
  # fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
2
2
 
3
- [![Build Status](https://travis-ci.org/fluent/fluent-plugin-kafka.svg?branch=master)](https://travis-ci.org/fluent/fluent-plugin-kafka)
3
+ [![GitHub Actions Status](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml/badge.svg)](https://github.com/fluent/fluent-plugin-kafka/actions/workflows/linux.yml)
4
+
4
5
 
5
6
  A fluentd plugin to both consume and produce data for Apache Kafka.
6
7
 
@@ -201,6 +202,7 @@ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin inst
201
202
  headers_from_record (hash) :default => {}
202
203
  use_default_for_unknown_topic (bool) :default => false
203
204
  discard_kafka_delivery_failed (bool) :default => false (No discard)
205
+ partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
204
206
 
205
207
  <format>
206
208
  @type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
@@ -229,6 +231,8 @@ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin inst
229
231
 
230
232
  The `<formatter name>` in `<format>` uses fluentd's formatter plugins. See [formatter article](https://docs.fluentd.org/v/1.0/formatter).
231
233
 
234
+ **Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
235
+
232
236
  ruby-kafka sometimes returns `Kafka::DeliveryFailed` error without good information.
233
237
  In this case, `get_kafka_client_log` is useful for identifying the error cause.
234
238
  ruby-kafka's log is routed to fluentd log so you can see ruby-kafka's log in fluentd logs.
@@ -342,6 +346,7 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
342
346
  exclude_topic_key (bool) :default => false
343
347
  exclude_partition_key (bool) :default => false
344
348
  get_kafka_client_log (bool) :default => false
349
+ partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
345
350
 
346
351
  # See fluentd document for buffer related parameters: https://docs.fluentd.org/v/0.12/buffer
347
352
 
@@ -364,6 +369,8 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
364
369
  - kafka_agg_max_bytes - default: 4096 - Maximum value of total message size to be included in one batch transmission.
365
370
  - kafka_agg_max_messages - default: nil - Maximum number of messages to include in one batch transmission.
366
371
 
372
+ **Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
373
+
367
374
  ### Non-buffered output plugin
368
375
 
369
376
  This plugin uses ruby-kafka producer for writing data. For performance and reliability concerns, use `kafka_bufferd` output instead. This is mainly for testing.
@@ -384,6 +391,7 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
384
391
  output_include_time (bool) :default => false
385
392
  exclude_topic_key (bool) :default => false
386
393
  exclude_partition_key (bool) :default => false
394
+ partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
387
395
 
388
396
  # ruby-kafka producer options
389
397
  max_send_retries (integer) :default => 1
@@ -396,6 +404,8 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
396
404
 
397
405
  This plugin also supports ruby-kafka related parameters. See Buffered output plugin section.
398
406
 
407
+ **Note:** Java based Kafka client uses `murmur2` as partitioner function by default. If you want to use same partitioning behavior with fluent-plugin-kafka, change it to `murmur2` instead of `crc32`. Note that for using `murmur2` hash partitioner function, you must install `digest-murmurhash` gem.
408
+
399
409
  ### rdkafka based output plugin
400
410
 
401
411
  This plugin uses `rdkafka` instead of `ruby-kafka` for kafka client.
@@ -0,0 +1,33 @@
1
+ #!/bin/sh
2
+
3
+ export KAFKA_OPTS=-Dzookeeper.4lw.commands.whitelist=ruok
4
+ /usr/bin/zookeeper-server-start /etc/kafka/zookeeper.properties &
5
+ N_POLLING=30
6
+ n=1
7
+ while true ; do
8
+ sleep 1
9
+ status=$(echo ruok | nc localhost 2181)
10
+ if [ "$status" = "imok" ]; then
11
+ break
12
+ fi
13
+ n=$((n + 1))
14
+ if [ $n -ge $N_POLLING ]; then
15
+ echo "failed to get response from zookeeper-server"
16
+ exit 1
17
+ fi
18
+ done
19
+ /usr/bin/kafka-server-start /etc/kafka/server.properties &
20
+ n=1
21
+ while true ; do
22
+ sleep 1
23
+ status=$(/usr/bin/zookeeper-shell localhost:2181 ls /brokers/ids | sed -n 6p)
24
+ if [ "$status" = "[0]" ]; then
25
+ break
26
+ fi
27
+ n=$((n + 1))
28
+ if [ $n -ge $N_POLLING ]; then
29
+ echo "failed to get response from kafka-server"
30
+ exit 1
31
+ fi
32
+ done
33
+ /usr/bin/kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
@@ -13,13 +13,15 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.16.0'
16
+ gem.version = '0.17.0'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
20
20
  gem.add_dependency 'ltsv'
21
- gem.add_dependency 'ruby-kafka', '>= 1.2.0', '< 2'
21
+ gem.add_dependency 'ruby-kafka', '>= 1.4.0', '< 2'
22
22
  gem.add_development_dependency "rake", ">= 0.9.2"
23
23
  gem.add_development_dependency "test-unit", ">= 3.0.8"
24
+ gem.add_development_dependency "test-unit-rr", "~> 1.0"
24
25
  gem.add_development_dependency "webrick"
26
+ gem.add_development_dependency "digest-murmurhash"
25
27
  end
@@ -197,17 +197,17 @@ class Fluent::KafkaInput < Fluent::Input
197
197
 
198
198
  logger = @get_kafka_client_log ? log : nil
199
199
  if @scram_mechanism != nil && @username != nil && @password != nil
200
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
200
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
201
201
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
202
202
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
203
203
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
204
204
  elsif @username != nil && @password != nil
205
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
205
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
206
206
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
207
207
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password,
208
208
  sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
209
209
  else
210
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
210
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
211
211
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
212
212
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
213
213
  ssl_verify_hostname: @ssl_verify_hostname)
@@ -294,7 +294,7 @@ class Fluent::KafkaInput < Fluent::Input
294
294
  def consume
295
295
  offset = @next_offset
296
296
  @fetch_args[:offset] = offset
297
- messages = @kafka.fetch_messages(@fetch_args)
297
+ messages = @kafka.fetch_messages(**@fetch_args)
298
298
 
299
299
  return if messages.size.zero?
300
300
 
@@ -183,17 +183,17 @@ class Fluent::KafkaGroupInput < Fluent::Input
183
183
 
184
184
  logger = @get_kafka_client_log ? log : nil
185
185
  if @scram_mechanism != nil && @username != nil && @password != nil
186
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
186
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
187
187
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
188
188
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
189
189
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
190
190
  elsif @username != nil && @password != nil
191
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
191
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
192
192
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
193
193
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password,
194
194
  sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
195
195
  else
196
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
196
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
197
197
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
198
198
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab,
199
199
  ssl_verify_hostname: @ssl_verify_hostname)
@@ -217,7 +217,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
217
217
  end
218
218
 
219
219
  def setup_consumer
220
- consumer = @kafka.consumer(@consumer_opts)
220
+ consumer = @kafka.consumer(**@consumer_opts)
221
221
  @topics.each { |topic|
222
222
  if m = /^\/(.+)\/$/.match(topic)
223
223
  topic_or_regex = Regexp.new(m[1])
@@ -345,7 +345,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
345
345
  def run
346
346
  while @consumer
347
347
  begin
348
- @consumer.each_batch(@fetch_opts) { |batch|
348
+ @consumer.each_batch(**@fetch_opts) { |batch|
349
349
  if @tag_source == :record
350
350
  process_batch_with_record_tag(batch)
351
351
  else
@@ -33,7 +33,7 @@ module Fluent
33
33
  end
34
34
 
35
35
  def read_ssl_file(path)
36
- return nil if path.nil?
36
+ return nil if path.nil? || path.respond_to?(:strip) && path.strip.empty?
37
37
 
38
38
  if path.is_a?(Array)
39
39
  path.map { |fp| File.read(fp) }
@@ -19,6 +19,8 @@ DESC
19
19
  config_param :default_message_key, :string, :default => nil
20
20
  config_param :default_partition_key, :string, :default => nil
21
21
  config_param :default_partition, :integer, :default => nil
22
+ config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
23
+ :desc => "Specify kafka patrtitioner hash algorithm"
22
24
  config_param :client_id, :string, :default => 'kafka'
23
25
  config_param :sasl_over_ssl, :bool, :default => true,
24
26
  :desc => <<-DESC
@@ -106,18 +108,21 @@ DESC
106
108
  begin
107
109
  if @seed_brokers.length > 0
108
110
  if @scram_mechanism != nil && @username != nil && @password != nil
109
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
111
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
110
112
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
111
113
  sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl,
112
- ssl_verify_hostname: @ssl_verify_hostname)
114
+ ssl_verify_hostname: @ssl_verify_hostname,
115
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
113
116
  elsif @username != nil && @password != nil
114
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
117
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
115
118
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
116
- sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
119
+ sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
120
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
117
121
  else
118
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
122
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert_file_path: @ssl_ca_cert,
119
123
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
120
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
124
+ sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
125
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
121
126
  end
122
127
  log.info "initialized kafka producer: #{@client_id}"
123
128
  else
@@ -215,7 +220,7 @@ DESC
215
220
  chain.next
216
221
 
217
222
  # out_kafka is mainly for testing so don't need the performance unlike out_kafka_buffered.
218
- producer = @kafka.producer(@producer_opts)
223
+ producer = @kafka.producer(**@producer_opts)
219
224
 
220
225
  es.each do |time, record|
221
226
  if @output_include_time
@@ -24,6 +24,8 @@ DESC
24
24
  config_param :partition_key_key, :string, :default => 'partition_key', :desc => "Field for kafka partition key"
25
25
  config_param :default_partition_key, :string, :default => nil
26
26
  config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
27
+ config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
28
+ :desc => "Specify kafka patrtitioner hash algorithm"
27
29
  config_param :default_partition, :integer, :default => nil
28
30
  config_param :use_default_for_unknown_topic, :bool, :default => false, :desc => "If true, default_topic is used when topic not found"
29
31
  config_param :client_id, :string, :default => 'fluentd'
@@ -37,7 +39,7 @@ DESC
37
39
  config_param :exclude_partition, :bool, :default => false,
38
40
  :desc => 'Set true to remove partition from data'
39
41
  config_param :exclude_message_key, :bool, :default => false,
40
- :desc => 'Set true to remove partition key from data'
42
+ :desc => 'Set true to remove message key from data'
41
43
  config_param :exclude_topic_key, :bool, :default => false,
42
44
  :desc => 'Set true to remove topic name key from data'
43
45
  config_param :use_event_time, :bool, :default => false, :desc => 'Use fluentd event time for kafka create_time'
@@ -96,20 +98,23 @@ DESC
96
98
  begin
97
99
  logger = @get_kafka_client_log ? log : nil
98
100
  if @scram_mechanism != nil && @username != nil && @password != nil
99
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
101
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
100
102
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
101
103
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
102
- sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
104
+ sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
105
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
103
106
  elsif @username != nil && @password != nil
104
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
107
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
105
108
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
106
109
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl,
107
- ssl_verify_hostname: @ssl_verify_hostname)
110
+ ssl_verify_hostname: @ssl_verify_hostname,
111
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
108
112
  else
109
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
113
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert_file_path: @ssl_ca_cert,
110
114
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
111
115
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl,
112
- ssl_verify_hostname: @ssl_verify_hostname)
116
+ ssl_verify_hostname: @ssl_verify_hostname,
117
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
113
118
  end
114
119
  log.info "initialized kafka producer: #{@client_id}"
115
120
  rescue Exception => e
@@ -26,6 +26,8 @@ DESC
26
26
  config_param :default_partition_key, :string, :default => nil
27
27
  config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
28
28
  config_param :default_partition, :integer, :default => nil
29
+ config_param :partitioner_hash_function, :enum, list: [:crc32, :murmur2], :default => :crc32,
30
+ :desc => "Specify kafka patrtitioner hash algorithm"
29
31
  config_param :client_id, :string, :default => 'kafka'
30
32
  config_param :idempotent, :bool, :default => false, :desc => 'Enable idempotent producer'
31
33
  config_param :sasl_over_ssl, :bool, :default => true,
@@ -130,18 +132,21 @@ DESC
130
132
  if @seed_brokers.length > 0
131
133
  logger = @get_kafka_client_log ? log : nil
132
134
  if @scram_mechanism != nil && @username != nil && @password != nil
133
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
135
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
134
136
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
135
137
  sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl,
136
- ssl_verify_hostname: @ssl_verify_hostname)
138
+ ssl_verify_hostname: @ssl_verify_hostname,
139
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
137
140
  elsif @username != nil && @password != nil
138
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
141
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
139
142
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
140
- sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
143
+ sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
144
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
141
145
  else
142
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
146
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert_file_path: @ssl_ca_cert,
143
147
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
144
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
148
+ sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname,
149
+ partitioner: Kafka::Partitioner.new(hash_function: @partitioner_hash_function))
145
150
  end
146
151
  log.info "initialized kafka producer: #{@client_id}"
147
152
  else
@@ -239,7 +244,7 @@ DESC
239
244
  @producers_mutex.synchronize {
240
245
  producer = @producers[Thread.current.object_id]
241
246
  unless producer
242
- producer = @kafka.producer(@producer_opts)
247
+ producer = @kafka.producer(**@producer_opts)
243
248
  @producers[Thread.current.object_id] = producer
244
249
  end
245
250
  producer
@@ -48,7 +48,7 @@ Set true to remove partition from data
48
48
  DESC
49
49
  config_param :exclude_message_key, :bool, :default => false,
50
50
  :desc => <<-DESC
51
- Set true to remove partition key from data
51
+ Set true to remove message key from data
52
52
  DESC
53
53
  config_param :exclude_topic_key, :bool, :default => false,
54
54
  :desc => <<-DESC
data/test/helper.rb CHANGED
@@ -8,6 +8,7 @@ rescue Bundler::BundlerError => e
8
8
  exit e.status_code
9
9
  end
10
10
  require 'test/unit'
11
+ require 'test/unit/rr'
11
12
 
12
13
  $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
13
14
  $LOAD_PATH.unshift(File.dirname(__FILE__))
@@ -22,6 +23,12 @@ unless ENV.has_key?('VERBOSE')
22
23
  end
23
24
 
24
25
  require 'fluent/plugin/out_kafka'
26
+ require 'fluent/plugin/out_kafka_buffered'
27
+ require 'fluent/plugin/out_kafka2'
28
+ require 'fluent/plugin/in_kafka'
29
+ require 'fluent/plugin/in_kafka_group'
30
+
31
+ require "fluent/test/driver/output"
25
32
 
26
33
  class Test::Unit::TestCase
27
34
  end
@@ -0,0 +1,66 @@
1
+ require 'helper'
2
+ require 'fluent/test/driver/input'
3
+ require 'securerandom'
4
+
5
+ class KafkaInputTest < Test::Unit::TestCase
6
+ def setup
7
+ Fluent::Test.setup
8
+ end
9
+
10
+ TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
11
+
12
+ CONFIG = %[
13
+ @type kafka
14
+ brokers localhost:9092
15
+ format text
16
+ @label @kafka
17
+ topics #{TOPIC_NAME}
18
+ ]
19
+
20
+ def create_driver(conf = CONFIG)
21
+ Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
22
+ end
23
+
24
+
25
+ def test_configure
26
+ d = create_driver
27
+ assert_equal TOPIC_NAME, d.instance.topics
28
+ assert_equal 'text', d.instance.format
29
+ assert_equal 'localhost:9092', d.instance.brokers
30
+ end
31
+
32
+ def test_multi_worker_support
33
+ d = create_driver
34
+ assert_false d.instance.multi_workers_ready?
35
+ end
36
+
37
+ class ConsumeTest < self
38
+ def setup
39
+ @kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
40
+ @producer = @kafka.producer
41
+ end
42
+
43
+ def teardown
44
+ @kafka.delete_topic(TOPIC_NAME)
45
+ @kafka.close
46
+ end
47
+
48
+ def test_consume
49
+ conf = %[
50
+ @type kafka
51
+ brokers localhost:9092
52
+ format text
53
+ @label @kafka
54
+ topics #{TOPIC_NAME}
55
+ ]
56
+ d = create_driver
57
+
58
+ d.run(expect_records: 1, timeout: 10) do
59
+ @producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
60
+ @producer.deliver_messages
61
+ end
62
+ expected = {'message' => 'Hello, fluent-plugin-kafka!'}
63
+ assert_equal expected, d.events[0][2]
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,67 @@
1
+ require 'helper'
2
+ require 'fluent/test/driver/input'
3
+ require 'securerandom'
4
+
5
+ class KafkaGroupInputTest < Test::Unit::TestCase
6
+ def setup
7
+ Fluent::Test.setup
8
+ end
9
+
10
+ TOPIC_NAME = "kafka-input-#{SecureRandom.uuid}"
11
+
12
+ CONFIG = %[
13
+ @type kafka
14
+ brokers localhost:9092
15
+ consumer_group fluentd
16
+ format text
17
+ @label @kafka
18
+ topics #{TOPIC_NAME}
19
+ ]
20
+
21
+ def create_driver(conf = CONFIG)
22
+ Fluent::Test::Driver::Input.new(Fluent::KafkaGroupInput).configure(conf)
23
+ end
24
+
25
+
26
+ def test_configure
27
+ d = create_driver
28
+ assert_equal [TOPIC_NAME], d.instance.topics
29
+ assert_equal 'text', d.instance.format
30
+ assert_equal 'localhost:9092', d.instance.brokers
31
+ end
32
+
33
+ def test_multi_worker_support
34
+ d = create_driver
35
+ assert_true d.instance.multi_workers_ready?
36
+ end
37
+
38
+ class ConsumeTest < self
39
+ def setup
40
+ @kafka = Kafka.new(["localhost:9092"], client_id: 'kafka')
41
+ @producer = @kafka.producer
42
+ end
43
+
44
+ def teardown
45
+ @kafka.delete_topic(TOPIC_NAME)
46
+ @kafka.close
47
+ end
48
+
49
+ def test_consume
50
+ conf = %[
51
+ @type kafka
52
+ brokers localhost:9092
53
+ format text
54
+ @label @kafka
55
+ topics #{TOPIC_NAME}
56
+ ]
57
+ d = create_driver
58
+
59
+ d.run(expect_records: 1, timeout: 10) do
60
+ @producer.produce("Hello, fluent-plugin-kafka!", topic: TOPIC_NAME)
61
+ @producer.deliver_messages
62
+ end
63
+ expected = {'message' => 'Hello, fluent-plugin-kafka!'}
64
+ assert_equal expected, d.events[0][2]
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,44 @@
1
+ require 'helper'
2
+ require 'fluent/plugin/kafka_plugin_util'
3
+
4
+ class KafkaPluginUtilTest < Test::Unit::TestCase
5
+
6
+ def self.config_param(name, type, options)
7
+ end
8
+ include Fluent::KafkaPluginUtil::SSLSettings
9
+
10
+ def config_param
11
+ end
12
+ def setup
13
+ Fluent::Test.setup
14
+ end
15
+
16
+ def test_read_ssl_file_when_nil
17
+ stub(File).read(anything) do |path|
18
+ path
19
+ end
20
+ assert_equal(nil, read_ssl_file(nil))
21
+ end
22
+
23
+ def test_read_ssl_file_when_empty_string
24
+ stub(File).read(anything) do |path|
25
+ path
26
+ end
27
+ assert_equal(nil, read_ssl_file(""))
28
+ end
29
+
30
+ def test_read_ssl_file_when_non_empty_path
31
+ stub(File).read(anything) do |path|
32
+ path
33
+ end
34
+ assert_equal("path", read_ssl_file("path"))
35
+ end
36
+
37
+ def test_read_ssl_file_when_non_empty_array
38
+ stub(File).read(anything) do |path|
39
+ path
40
+ end
41
+ assert_equal(["a","b"], read_ssl_file(["a","b"]))
42
+ end
43
+
44
+ end
@@ -43,6 +43,16 @@ class KafkaOutputTest < Test::Unit::TestCase
43
43
  d = create_driver
44
44
  end
45
45
 
46
+ data("crc32" => "crc32",
47
+ "murmur2" => "murmur2")
48
+ def test_partitioner_hash_function(data)
49
+ hash_type = data
50
+ d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
51
+ assert_nothing_raised do
52
+ d.instance.refresh_client
53
+ end
54
+ end
55
+
46
56
  def test_mutli_worker_support
47
57
  d = create_driver
48
58
  assert_equal true, d.instance.multi_workers_ready?
@@ -0,0 +1,60 @@
1
+ require 'helper'
2
+ require 'fluent/test/helpers'
3
+ require 'fluent/output'
4
+
5
+ class Kafka2OutputTest < Test::Unit::TestCase
6
+ include Fluent::Test::Helpers
7
+
8
+ def setup
9
+ Fluent::Test.setup
10
+ end
11
+
12
+ def base_config
13
+ config_element('ROOT', '', {"@type" => "kafka2"}, [
14
+ config_element('format', "", {"@type" => "json"})
15
+ ])
16
+ end
17
+
18
+ def config
19
+ base_config + config_element('ROOT', '', {"default_topic" => "kitagawakeiko",
20
+ "brokers" => "localhost:9092"}, [
21
+ ])
22
+ end
23
+
24
+ def create_driver(conf = config, tag='test')
25
+ Fluent::Test::Driver::Output.new(Fluent::Kafka2Output).configure(conf)
26
+ end
27
+
28
+ def test_configure
29
+ assert_nothing_raised(Fluent::ConfigError) {
30
+ create_driver(base_config)
31
+ }
32
+
33
+ assert_nothing_raised(Fluent::ConfigError) {
34
+ create_driver(config)
35
+ }
36
+
37
+ assert_nothing_raised(Fluent::ConfigError) {
38
+ create_driver(config + config_element('buffer', "", {"@type" => "memory"}))
39
+ }
40
+
41
+ d = create_driver
42
+ assert_equal 'kitagawakeiko', d.instance.default_topic
43
+ assert_equal ['localhost:9092'], d.instance.brokers
44
+ end
45
+
46
+ data("crc32" => "crc32",
47
+ "murmur2" => "murmur2")
48
+ def test_partitioner_hash_function(data)
49
+ hash_type = data
50
+ d = create_driver(config + config_element('ROOT', '', {"partitioner_hash_function" => hash_type}))
51
+ assert_nothing_raised do
52
+ d.instance.refresh_client
53
+ end
54
+ end
55
+
56
+ def test_mutli_worker_support
57
+ d = create_driver
58
+ assert_equal true, d.instance.multi_workers_ready?
59
+ end
60
+ end
@@ -0,0 +1,68 @@
1
+ require 'helper'
2
+ require 'fluent/output'
3
+
4
+ class KafkaBufferedOutputTest < Test::Unit::TestCase
5
+ def setup
6
+ Fluent::Test.setup
7
+ end
8
+
9
+ BASE_CONFIG = %[
10
+ type kafka_buffered
11
+ ]
12
+
13
+ CONFIG = BASE_CONFIG + %[
14
+ default_topic kitagawakeiko
15
+ brokers localhost:9092
16
+ ]
17
+
18
+ def create_driver(conf = CONFIG, tag='test')
19
+ Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutputBuffered, tag).configure(conf)
20
+ end
21
+
22
+ def test_configure
23
+ assert_nothing_raised(Fluent::ConfigError) {
24
+ create_driver(BASE_CONFIG)
25
+ }
26
+
27
+ assert_nothing_raised(Fluent::ConfigError) {
28
+ create_driver(CONFIG)
29
+ }
30
+
31
+ assert_nothing_raised(Fluent::ConfigError) {
32
+ create_driver(CONFIG + %[
33
+ buffer_type memory
34
+ ])
35
+ }
36
+
37
+ d = create_driver
38
+ assert_equal 'kitagawakeiko', d.instance.default_topic
39
+ assert_equal 'localhost:9092', d.instance.brokers
40
+ end
41
+
42
+ def test_format
43
+ d = create_driver
44
+ end
45
+
46
+ data("crc32" => "crc32",
47
+ "murmur2" => "murmur2")
48
+ def test_partitioner_hash_function(data)
49
+ hash_type = data
50
+ d = create_driver(CONFIG + %[partitioner_hash_function #{hash_type}])
51
+ assert_nothing_raised do
52
+ d.instance.refresh_client
53
+ end
54
+ end
55
+
56
+ def test_mutli_worker_support
57
+ d = create_driver
58
+ assert_equal true, d.instance.multi_workers_ready?
59
+
60
+ end
61
+
62
+ def test_write
63
+ d = create_driver
64
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
65
+ d.emit({"a"=>1}, time)
66
+ d.emit({"a"=>2}, time)
67
+ end
68
+ end
metadata CHANGED
@@ -1,15 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.0
4
+ version: 0.17.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
8
8
  - Masahiro Nakagawa
9
- autorequire:
9
+ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2021-01-25 00:00:00.000000000 Z
12
+ date: 2021-08-30 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd
@@ -51,7 +51,7 @@ dependencies:
51
51
  requirements:
52
52
  - - ">="
53
53
  - !ruby/object:Gem::Version
54
- version: 1.2.0
54
+ version: 1.4.0
55
55
  - - "<"
56
56
  - !ruby/object:Gem::Version
57
57
  version: '2'
@@ -61,7 +61,7 @@ dependencies:
61
61
  requirements:
62
62
  - - ">="
63
63
  - !ruby/object:Gem::Version
64
- version: 1.2.0
64
+ version: 1.4.0
65
65
  - - "<"
66
66
  - !ruby/object:Gem::Version
67
67
  version: '2'
@@ -93,6 +93,20 @@ dependencies:
93
93
  - - ">="
94
94
  - !ruby/object:Gem::Version
95
95
  version: 3.0.8
96
+ - !ruby/object:Gem::Dependency
97
+ name: test-unit-rr
98
+ requirement: !ruby/object:Gem::Requirement
99
+ requirements:
100
+ - - "~>"
101
+ - !ruby/object:Gem::Version
102
+ version: '1.0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - "~>"
108
+ - !ruby/object:Gem::Version
109
+ version: '1.0'
96
110
  - !ruby/object:Gem::Dependency
97
111
  name: webrick
98
112
  requirement: !ruby/object:Gem::Requirement
@@ -107,6 +121,20 @@ dependencies:
107
121
  - - ">="
108
122
  - !ruby/object:Gem::Version
109
123
  version: '0'
124
+ - !ruby/object:Gem::Dependency
125
+ name: digest-murmurhash
126
+ requirement: !ruby/object:Gem::Requirement
127
+ requirements:
128
+ - - ">="
129
+ - !ruby/object:Gem::Version
130
+ version: '0'
131
+ type: :development
132
+ prerelease: false
133
+ version_requirements: !ruby/object:Gem::Requirement
134
+ requirements:
135
+ - - ">="
136
+ - !ruby/object:Gem::Version
137
+ version: '0'
110
138
  description: Fluentd plugin for Apache Kafka > 0.8
111
139
  email:
112
140
  - togachiro@gmail.com
@@ -115,14 +143,18 @@ executables: []
115
143
  extensions: []
116
144
  extra_rdoc_files: []
117
145
  files:
146
+ - ".github/ISSUE_TEMPLATE/bug_report.yaml"
147
+ - ".github/ISSUE_TEMPLATE/config.yml"
148
+ - ".github/ISSUE_TEMPLATE/feature_request.yaml"
118
149
  - ".github/workflows/linux.yml"
150
+ - ".github/workflows/stale-actions.yml"
119
151
  - ".gitignore"
120
- - ".travis.yml"
121
152
  - ChangeLog
122
153
  - Gemfile
123
154
  - LICENSE
124
155
  - README.md
125
156
  - Rakefile
157
+ - ci/prepare-kafka-server.sh
126
158
  - fluent-plugin-kafka.gemspec
127
159
  - lib/fluent/plugin/in_kafka.rb
128
160
  - lib/fluent/plugin/in_kafka_group.rb
@@ -135,12 +167,17 @@ files:
135
167
  - lib/fluent/plugin/out_rdkafka.rb
136
168
  - lib/fluent/plugin/out_rdkafka2.rb
137
169
  - test/helper.rb
170
+ - test/plugin/test_in_kafka.rb
171
+ - test/plugin/test_in_kafka_group.rb
172
+ - test/plugin/test_kafka_plugin_util.rb
138
173
  - test/plugin/test_out_kafka.rb
174
+ - test/plugin/test_out_kafka2.rb
175
+ - test/plugin/test_out_kafka_buffered.rb
139
176
  homepage: https://github.com/fluent/fluent-plugin-kafka
140
177
  licenses:
141
178
  - Apache-2.0
142
179
  metadata: {}
143
- post_install_message:
180
+ post_install_message:
144
181
  rdoc_options: []
145
182
  require_paths:
146
183
  - lib
@@ -155,10 +192,15 @@ required_rubygems_version: !ruby/object:Gem::Requirement
155
192
  - !ruby/object:Gem::Version
156
193
  version: '0'
157
194
  requirements: []
158
- rubygems_version: 3.0.3
159
- signing_key:
195
+ rubygems_version: 3.1.4
196
+ signing_key:
160
197
  specification_version: 4
161
198
  summary: Fluentd plugin for Apache Kafka > 0.8
162
199
  test_files:
163
200
  - test/helper.rb
201
+ - test/plugin/test_in_kafka.rb
202
+ - test/plugin/test_in_kafka_group.rb
203
+ - test/plugin/test_kafka_plugin_util.rb
164
204
  - test/plugin/test_out_kafka.rb
205
+ - test/plugin/test_out_kafka2.rb
206
+ - test/plugin/test_out_kafka_buffered.rb
data/.travis.yml DELETED
@@ -1,21 +0,0 @@
1
- language: ruby
2
-
3
- rvm:
4
- - 2.1
5
- - 2.2
6
- - 2.3.1
7
- - 2.4.1
8
- - 2.5.0
9
- - ruby-head
10
-
11
- before_install:
12
- - gem update --system=2.7.8
13
- script:
14
- - bundle exec rake test
15
-
16
- sudo: false
17
-
18
- matrix:
19
- allow_failures:
20
- - rvm: ruby-head
21
-