fluent-plugin-kafka 0.19.2 → 0.19.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/add-to-project.yml +17 -0
- data/.github/workflows/linux.yml +13 -3
- data/.github/workflows/stale-actions.yml +1 -1
- data/ChangeLog +13 -0
- data/README.md +3 -1
- data/fluent-plugin-kafka.gemspec +5 -1
- data/lib/fluent/plugin/kafka_producer_ext.rb +7 -2
- data/lib/fluent/plugin/out_rdkafka2.rb +22 -42
- data/lib/fluent/plugin/rdkafka_patch/0_11_0.rb +15 -0
- data/lib/fluent/plugin/rdkafka_patch/0_12_0.rb +27 -0
- data/lib/fluent/plugin/rdkafka_patch/0_14_0.rb +44 -0
- data/lib/fluent/plugin/rdkafka_patch/0_16_0.rb +55 -0
- metadata +25 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 242a6673fffdacfbab759884cadaf66e22b33e72cbb60ff49cc9201a28366125
|
4
|
+
data.tar.gz: 0ac9e110d4f43cb6d25e9cf416084ebcb05d3467671db28bae38310d6c8741dc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5ed4bc59126e9dbb6c59c4dc35c7b94d837652ed0f48a4fecdba62071ab4b284389adbe0d2cf493e926757d8aaedfc8283f6192e7e9d71c882176bd3d044d1c2
|
7
|
+
data.tar.gz: e2e1a07d54ac22a342b2f27ddcacb69e5973d6f606000c0e2fcd601f97a6f9c6ed8c488d759a9af3c95e16a70c6f6aca691567734a2fae852b9acc01d34cae46
|
@@ -0,0 +1,17 @@
|
|
1
|
+
name: Add bugs to fluent project
|
2
|
+
|
3
|
+
on:
|
4
|
+
issues:
|
5
|
+
types:
|
6
|
+
- opened
|
7
|
+
|
8
|
+
jobs:
|
9
|
+
add-to-project:
|
10
|
+
name: Add issue to project
|
11
|
+
runs-on: ubuntu-latest
|
12
|
+
steps:
|
13
|
+
- uses: actions/add-to-project@v1.0.2
|
14
|
+
with:
|
15
|
+
project-url: https://github.com/orgs/fluent/projects/4
|
16
|
+
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
17
|
+
labeled: waiting-for-triage
|
data/.github/workflows/linux.yml
CHANGED
@@ -12,12 +12,22 @@ jobs:
|
|
12
12
|
strategy:
|
13
13
|
fail-fast: false
|
14
14
|
matrix:
|
15
|
-
ruby: [ '3.
|
15
|
+
ruby: [ '3.4', '3.3', '3.2', '3.1', '3.0' ]
|
16
16
|
os:
|
17
17
|
- ubuntu-latest
|
18
18
|
rdkafka_versions:
|
19
19
|
- { min: '>= 0.6.0', max: '< 0.12.0' }
|
20
|
-
- { min: '>= 0.12.0', max: '
|
20
|
+
- { min: '>= 0.12.0', max: '< 0.14.0' }
|
21
|
+
- { min: '>= 0.14.0', max: '< 0.16.0' }
|
22
|
+
- { min: '>= 0.16.0', max: '>= 0.16.0' }
|
23
|
+
# rdkafka 0.15.2 is the last version which supports Ruby 2.7
|
24
|
+
include:
|
25
|
+
- ruby: '2.7'
|
26
|
+
os: ubuntu-latest
|
27
|
+
rdkafka_versions: { min: '>= 0.6.0', max: '< 0.12.0' }
|
28
|
+
- ruby: '2.7'
|
29
|
+
os: ubuntu-latest
|
30
|
+
rdkafka_versions: { min: '>= 0.12.0', max: '= 0.15.2' }
|
21
31
|
name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }} with rdkafka gem version (min ${{ matrix.rdkafka_versions.min }} max ${{ matrix.rdkafka_versions.max }})
|
22
32
|
steps:
|
23
33
|
- uses: actions/checkout@v4
|
@@ -40,6 +50,6 @@ jobs:
|
|
40
50
|
RDKAFKA_VERSION_MAX_RANGE: ${{ matrix.rdkafka_versions.max }}
|
41
51
|
run: |
|
42
52
|
sudo ./ci/prepare-kafka-server.sh
|
43
|
-
gem install
|
53
|
+
gem install rake
|
44
54
|
bundle install --jobs 4 --retry 3
|
45
55
|
bundle exec rake test
|
data/ChangeLog
CHANGED
@@ -1,3 +1,16 @@
|
|
1
|
+
Release 0.19.4 - 2025/03/24
|
2
|
+
* Support Ruby 3.4. (#526)
|
3
|
+
|
4
|
+
Release 0.19.3 - 2024/08/02
|
5
|
+
* out_rdkafka2: Add `unrecoverable_error_codes` parameter to handle
|
6
|
+
specific error code as unrecoverable errors. `topic_authorization_failed`
|
7
|
+
and `msg_size_too_large` are treated as such unrecoverable error by default. (#510)
|
8
|
+
* out_rdkafka2: Add missing closing timeout feature to keep compatibility with
|
9
|
+
rdkafka-ruby 0.12.x or later. (#505)
|
10
|
+
* out_rdkafka2: Add `idempotent` parameter to enable idempotence in Kafka producer. (#501)
|
11
|
+
* out_kafka2: Fix errors while sending data to EventHub by adding
|
12
|
+
broker pool to take care of fetching metadata (#503)
|
13
|
+
|
1
14
|
Release 0.19.2 - 2023/10/13
|
2
15
|
* out_rdkafka2: Add `discard_kafka_delivery_failed_regex`
|
3
16
|
|
data/README.md
CHANGED
@@ -212,6 +212,7 @@ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin inst
|
|
212
212
|
discard_kafka_delivery_failed (bool) :default => false (No discard)
|
213
213
|
partitioner_hash_function (enum) (crc32|murmur2) :default => 'crc32'
|
214
214
|
share_producer (bool) :default => false
|
215
|
+
idempotent (bool) :default => false
|
215
216
|
|
216
217
|
# If you intend to rely on AWS IAM auth to MSK with long lived credentials
|
217
218
|
# https://docs.aws.amazon.com/msk/latest/developerguide/iam-access-control.html
|
@@ -509,7 +510,6 @@ You need to install rdkafka gem.
|
|
509
510
|
partition_key (string) :default => 'partition'
|
510
511
|
partition_key_key (string) :default => 'partition_key'
|
511
512
|
message_key_key (string) :default => 'message_key'
|
512
|
-
default_topic (string) :default => nil
|
513
513
|
use_default_for_unknown_topic (bool) :default => false
|
514
514
|
use_default_for_unknown_partition_error (bool) :default => false
|
515
515
|
default_partition_key (string) :default => nil
|
@@ -558,6 +558,8 @@ You need to install rdkafka gem.
|
|
558
558
|
# load of both Fluentd and Kafka when excessive messages are attempted
|
559
559
|
# to send. Default is no limit.
|
560
560
|
max_enqueue_bytes_per_second (integer) :default => nil
|
561
|
+
unrecoverable_error_codes (array) :default => ["topic_authorization_failed", "msg_size_too_large"]
|
562
|
+
|
561
563
|
</match>
|
562
564
|
|
563
565
|
`rdkafka2` supports `discard_kafka_delivery_failed_regex` parameter:
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,12 +13,16 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.19.
|
16
|
+
gem.version = '0.19.4'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
20
20
|
gem.add_dependency 'ltsv'
|
21
21
|
gem.add_dependency 'ruby-kafka', '>= 1.5.0', '< 2'
|
22
|
+
|
23
|
+
# gems that aren't default gems as of Ruby 3.4
|
24
|
+
gem.add_dependency("bigdecimal", ["~> 3.1"])
|
25
|
+
|
22
26
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
23
27
|
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
24
28
|
gem.add_development_dependency "test-unit-rr", "~> 1.0"
|
@@ -93,6 +93,10 @@ module Kafka
|
|
93
93
|
@max_buffer_bytesize = max_buffer_bytesize
|
94
94
|
@compressor = compressor
|
95
95
|
@partitioner = partitioner
|
96
|
+
|
97
|
+
# The set of topics that are produced to.
|
98
|
+
@target_topics = Set.new
|
99
|
+
|
96
100
|
# A buffer organized by topic/partition.
|
97
101
|
@buffer = MessageBuffer.new
|
98
102
|
|
@@ -116,7 +120,8 @@ module Kafka
|
|
116
120
|
if @transaction_manager.transactional? && !@transaction_manager.in_transaction?
|
117
121
|
raise 'You must trigger begin_transaction before producing messages'
|
118
122
|
end
|
119
|
-
|
123
|
+
|
124
|
+
@target_topics.add(topic)
|
120
125
|
@pending_message_queue.write(message)
|
121
126
|
|
122
127
|
nil
|
@@ -187,7 +192,7 @@ module Kafka
|
|
187
192
|
def deliver_messages_with_retries
|
188
193
|
attempt = 0
|
189
194
|
|
190
|
-
|
195
|
+
@cluster.add_target_topics(@target_topics)
|
191
196
|
|
192
197
|
operation = ProduceOperation.new(
|
193
198
|
cluster: @cluster,
|
@@ -5,48 +5,19 @@ require 'fluent/plugin/kafka_plugin_util'
|
|
5
5
|
|
6
6
|
require 'rdkafka'
|
7
7
|
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
thread = @polling_thread.join(timeout)
|
19
|
-
|
20
|
-
Rdkafka::Bindings.rd_kafka_destroy(@native)
|
21
|
-
|
22
|
-
@native = nil
|
23
|
-
|
24
|
-
return !thread.nil?
|
25
|
-
end
|
26
|
-
end
|
27
|
-
|
28
|
-
class Rdkafka::Producer
|
29
|
-
# return false if producer is forcefully closed, otherwise return true
|
30
|
-
def close(timeout = nil)
|
31
|
-
rdkafka_version = Rdkafka::VERSION || '0.0.0'
|
32
|
-
# Rdkafka version >= 0.12.0 changed its internals
|
33
|
-
if Gem::Version::create(rdkafka_version) >= Gem::Version.create('0.12.0')
|
34
|
-
ObjectSpace.undefine_finalizer(self)
|
35
|
-
|
36
|
-
return @client.close(timeout)
|
37
|
-
end
|
38
|
-
|
39
|
-
@closing = true
|
40
|
-
# Wait for the polling thread to finish up
|
41
|
-
# If the broker isn't alive, the thread doesn't exit
|
42
|
-
if timeout
|
43
|
-
thr = @polling_thread.join(timeout)
|
44
|
-
return !!thr
|
45
|
-
else
|
46
|
-
@polling_thread.join
|
47
|
-
return true
|
48
|
-
end
|
8
|
+
begin
|
9
|
+
rdkafka_version = Gem::Version::create(Rdkafka::VERSION)
|
10
|
+
if rdkafka_version < Gem::Version.create('0.12.0')
|
11
|
+
require_relative 'rdkafka_patch/0_11_0'
|
12
|
+
elsif rdkafka_version == Gem::Version.create('0.12.0')
|
13
|
+
require_relative 'rdkafka_patch/0_12_0'
|
14
|
+
elsif rdkafka_version >= Gem::Version.create('0.14.0')
|
15
|
+
require_relative 'rdkafka_patch/0_14_0'
|
16
|
+
elsif rdkafka_version >= Gem::Version.create('0.16.0')
|
17
|
+
require_relative 'rdkafka_patch/0_16_0'
|
49
18
|
end
|
19
|
+
rescue LoadError, NameError
|
20
|
+
raise "unable to patch rdkafka."
|
50
21
|
end
|
51
22
|
|
52
23
|
module Fluent::Plugin
|
@@ -71,6 +42,7 @@ DESC
|
|
71
42
|
config_param :default_message_key, :string, :default => nil
|
72
43
|
config_param :partition_key, :string, :default => 'partition', :desc => "Field for kafka partition"
|
73
44
|
config_param :default_partition, :integer, :default => nil
|
45
|
+
config_param :idempotent, :bool, :default => false, :desc => 'Enable idempotent producer'
|
74
46
|
config_param :output_data_type, :string, :default => 'json', :obsoleted => "Use <format> section instead"
|
75
47
|
config_param :output_include_tag, :bool, :default => false, :obsoleted => "Use <inject> section instead"
|
76
48
|
config_param :output_include_time, :bool, :default => false, :obsoleted => "Use <inject> section instead"
|
@@ -126,6 +98,8 @@ DESC
|
|
126
98
|
config_param :max_enqueue_bytes_per_second, :size, :default => nil, :desc => 'The maximum number of enqueueing bytes per second'
|
127
99
|
|
128
100
|
config_param :service_name, :string, :default => nil, :desc => 'Used for sasl.kerberos.service.name'
|
101
|
+
config_param :unrecoverable_error_codes, :array, :default => ["topic_authorization_failed", "msg_size_too_large"],
|
102
|
+
:desc => 'Handle some of the error codes should be unrecoverable if specified'
|
129
103
|
|
130
104
|
config_section :buffer do
|
131
105
|
config_set_default :chunk_keys, ["topic"]
|
@@ -313,6 +287,7 @@ DESC
|
|
313
287
|
config[:"batch.num.messages"] = @rdkafka_message_max_num if @rdkafka_message_max_num
|
314
288
|
config[:"sasl.username"] = @username if @username
|
315
289
|
config[:"sasl.password"] = @password if @password
|
290
|
+
config[:"enable.idempotence"] = @idempotent if @idempotent
|
316
291
|
|
317
292
|
@rdkafka_options.each { |k, v|
|
318
293
|
config[k.to_sym] = v
|
@@ -522,7 +497,12 @@ DESC
|
|
522
497
|
|
523
498
|
raise e
|
524
499
|
else
|
525
|
-
|
500
|
+
if unrecoverable_error_codes.include?(e.code.to_s)
|
501
|
+
# some of the errors should be handled as an unrecoverable error
|
502
|
+
raise Fluent::UnrecoverableError, "Rejected due to #{e}"
|
503
|
+
else
|
504
|
+
raise e
|
505
|
+
end
|
526
506
|
end
|
527
507
|
end
|
528
508
|
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
class Rdkafka::Producer
|
2
|
+
# return false if producer is forcefully closed, otherwise return true
|
3
|
+
def close(timeout = nil)
|
4
|
+
@closing = true
|
5
|
+
# Wait for the polling thread to finish up
|
6
|
+
# If the broker isn't alive, the thread doesn't exit
|
7
|
+
if timeout
|
8
|
+
thr = @polling_thread.join(timeout)
|
9
|
+
return !!thr
|
10
|
+
else
|
11
|
+
@polling_thread.join
|
12
|
+
return true
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# This is required for `rdkafka` version >= 0.12.0
|
2
|
+
# Overriding the close method in order to provide a time limit for when it should be forcibly closed
|
3
|
+
class Rdkafka::Producer::Client
|
4
|
+
# return false if producer is forcefully closed, otherwise return true
|
5
|
+
def close(timeout=nil)
|
6
|
+
return unless @native
|
7
|
+
|
8
|
+
# Indicate to polling thread that we're closing
|
9
|
+
@polling_thread[:closing] = true
|
10
|
+
# Wait for the polling thread to finish up
|
11
|
+
thread = @polling_thread.join(timeout)
|
12
|
+
|
13
|
+
Rdkafka::Bindings.rd_kafka_destroy(@native)
|
14
|
+
|
15
|
+
@native = nil
|
16
|
+
|
17
|
+
return !thread.nil?
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
class Rdkafka::Producer
|
22
|
+
def close(timeout = nil)
|
23
|
+
ObjectSpace.undefine_finalizer(self)
|
24
|
+
|
25
|
+
return @client.close(timeout)
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
class Rdkafka::NativeKafka
|
2
|
+
# return false if producer is forcefully closed, otherwise return true
|
3
|
+
def close(timeout=nil, object_id=nil)
|
4
|
+
return true if closed?
|
5
|
+
|
6
|
+
synchronize do
|
7
|
+
# Indicate to the outside world that we are closing
|
8
|
+
@closing = true
|
9
|
+
|
10
|
+
thread_status = :unknown
|
11
|
+
if @polling_thread
|
12
|
+
# Indicate to polling thread that we're closing
|
13
|
+
@polling_thread[:closing] = true
|
14
|
+
|
15
|
+
# Wait for the polling thread to finish up,
|
16
|
+
# this can be aborted in practice if this
|
17
|
+
# code runs from a finalizer.
|
18
|
+
thread_status = @polling_thread.join(timeout)
|
19
|
+
end
|
20
|
+
|
21
|
+
# Destroy the client after locking both mutexes
|
22
|
+
@poll_mutex.lock
|
23
|
+
|
24
|
+
# This check prevents a race condition, where we would enter the close in two threads
|
25
|
+
# and after unlocking the primary one that hold the lock but finished, ours would be unlocked
|
26
|
+
# and would continue to run, trying to destroy inner twice
|
27
|
+
if @inner
|
28
|
+
Rdkafka::Bindings.rd_kafka_destroy(@inner)
|
29
|
+
@inner = nil
|
30
|
+
@opaque = nil
|
31
|
+
end
|
32
|
+
|
33
|
+
!thread_status.nil?
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class Rdkafka::Producer
|
39
|
+
def close(timeout = nil)
|
40
|
+
return true if closed?
|
41
|
+
ObjectSpace.undefine_finalizer(self)
|
42
|
+
@native_kafka.close(timeout)
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
class Rdkafka::NativeKafka
|
2
|
+
# return false if producer is forcefully closed, otherwise return true
|
3
|
+
def close(timeout=nil, object_id=nil)
|
4
|
+
return true if closed?
|
5
|
+
|
6
|
+
synchronize do
|
7
|
+
# Indicate to the outside world that we are closing
|
8
|
+
@closing = true
|
9
|
+
|
10
|
+
thread_status = :unknown
|
11
|
+
if @polling_thread
|
12
|
+
# Indicate to polling thread that we're closing
|
13
|
+
@polling_thread[:closing] = true
|
14
|
+
|
15
|
+
# Wait for the polling thread to finish up,
|
16
|
+
# this can be aborted in practice if this
|
17
|
+
# code runs from a finalizer.
|
18
|
+
thread_status = @polling_thread.join(timeout)
|
19
|
+
end
|
20
|
+
|
21
|
+
# Destroy the client after locking both mutexes
|
22
|
+
@poll_mutex.lock
|
23
|
+
|
24
|
+
# This check prevents a race condition, where we would enter the close in two threads
|
25
|
+
# and after unlocking the primary one that hold the lock but finished, ours would be unlocked
|
26
|
+
# and would continue to run, trying to destroy inner twice
|
27
|
+
retun unless @inner
|
28
|
+
|
29
|
+
Rdkafka::Bindings.rd_kafka_destroy(@inner)
|
30
|
+
@inner = nil
|
31
|
+
@opaque = nil
|
32
|
+
|
33
|
+
!thread_status.nil?
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class Rdkafka::Producer
|
39
|
+
def close(timeout = nil)
|
40
|
+
return true if closed?
|
41
|
+
ObjectSpace.undefine_finalizer(self)
|
42
|
+
|
43
|
+
@native_kafka.close(timeout) do
|
44
|
+
# We need to remove the topics references objects before we destroy the producer,
|
45
|
+
# otherwise they would leak out
|
46
|
+
@topics_refs_map.each_value do |refs|
|
47
|
+
refs.each_value do |ref|
|
48
|
+
Rdkafka::Bindings.rd_kafka_topic_destroy(ref)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
@topics_refs_map.clear
|
54
|
+
end
|
55
|
+
end
|
metadata
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.19.
|
4
|
+
version: 0.19.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
- Masahiro Nakagawa
|
9
|
-
autorequire:
|
9
|
+
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2025-03-25 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -65,6 +65,20 @@ dependencies:
|
|
65
65
|
- - "<"
|
66
66
|
- !ruby/object:Gem::Version
|
67
67
|
version: '2'
|
68
|
+
- !ruby/object:Gem::Dependency
|
69
|
+
name: bigdecimal
|
70
|
+
requirement: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - "~>"
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '3.1'
|
75
|
+
type: :runtime
|
76
|
+
prerelease: false
|
77
|
+
version_requirements: !ruby/object:Gem::Requirement
|
78
|
+
requirements:
|
79
|
+
- - "~>"
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: '3.1'
|
68
82
|
- !ruby/object:Gem::Dependency
|
69
83
|
name: rake
|
70
84
|
requirement: !ruby/object:Gem::Requirement
|
@@ -147,6 +161,7 @@ files:
|
|
147
161
|
- ".github/ISSUE_TEMPLATE/config.yml"
|
148
162
|
- ".github/ISSUE_TEMPLATE/feature_request.yaml"
|
149
163
|
- ".github/dependabot.yml"
|
164
|
+
- ".github/workflows/add-to-project.yml"
|
150
165
|
- ".github/workflows/linux.yml"
|
151
166
|
- ".github/workflows/stale-actions.yml"
|
152
167
|
- ".gitignore"
|
@@ -171,6 +186,10 @@ files:
|
|
171
186
|
- lib/fluent/plugin/out_kafka_buffered.rb
|
172
187
|
- lib/fluent/plugin/out_rdkafka.rb
|
173
188
|
- lib/fluent/plugin/out_rdkafka2.rb
|
189
|
+
- lib/fluent/plugin/rdkafka_patch/0_11_0.rb
|
190
|
+
- lib/fluent/plugin/rdkafka_patch/0_12_0.rb
|
191
|
+
- lib/fluent/plugin/rdkafka_patch/0_14_0.rb
|
192
|
+
- lib/fluent/plugin/rdkafka_patch/0_16_0.rb
|
174
193
|
- test/helper.rb
|
175
194
|
- test/plugin/test_in_kafka.rb
|
176
195
|
- test/plugin/test_in_kafka_group.rb
|
@@ -183,7 +202,7 @@ homepage: https://github.com/fluent/fluent-plugin-kafka
|
|
183
202
|
licenses:
|
184
203
|
- Apache-2.0
|
185
204
|
metadata: {}
|
186
|
-
post_install_message:
|
205
|
+
post_install_message:
|
187
206
|
rdoc_options: []
|
188
207
|
require_paths:
|
189
208
|
- lib
|
@@ -198,8 +217,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
198
217
|
- !ruby/object:Gem::Version
|
199
218
|
version: '0'
|
200
219
|
requirements: []
|
201
|
-
rubygems_version: 3.
|
202
|
-
signing_key:
|
220
|
+
rubygems_version: 3.5.22
|
221
|
+
signing_key:
|
203
222
|
specification_version: 4
|
204
223
|
summary: Fluentd plugin for Apache Kafka > 0.8
|
205
224
|
test_files:
|