fluent-plugin-kafka 0.18.0 → 0.18.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +3 -0
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/kafka_producer_ext.rb +14 -12
- data/lib/fluent/plugin/out_kafka2.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8b47d0dafccd1d84a2adcef4e9a0830b26998fd21c4095a974887ffa201fc64c
|
4
|
+
data.tar.gz: 744716215237149802687884a5c306e4684f1d2e89959d13d04b64f3116a65f2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0215e2229d272a24abef39f8179457a4585d582a388f8b03bc70a1d4a066d27422878c91277d5b7412b3ddd930716b337ff56a75e4fb8de00366063b7c47c52a
|
7
|
+
data.tar.gz: 6f47fa553a9cf2cf2d55b725d5b8f562e6aef1c3ef92aee597bbf9f84aac7e0fb7fac5d67864d935c02b1dc5a890563a9b530b75ae47d3bac1763fbc021f20bc
|
data/ChangeLog
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
Release 0.18.1 - 2022/08/17
|
2
|
+
* out_kafka2: Fix a bug that it doesn't respect `chunk_limit_records` and `chunk_limit_size`
|
3
|
+
|
1
4
|
Release 0.18.0 - 2022/07/21
|
2
5
|
* out_kafka2: Keep alive Kafka connections between flushes
|
3
6
|
* out_rdkafka2: Enable to set SASL credentials via `username` and `password` parameters
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.18.
|
16
|
+
gem.version = '0.18.1'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -38,9 +38,15 @@ module Kafka
|
|
38
38
|
end
|
39
39
|
|
40
40
|
# for out_kafka2
|
41
|
+
# Majority (if not all) of this code is lifted from https://github.com/zendesk/ruby-kafka/blob/master/lib/kafka/producer.rb
|
42
|
+
# with the main difference where we have removed any checks regarding max_buffer_bytesize and max_buffer_size
|
43
|
+
# The reason for doing this is to provide a better UX for our users where they only need to set those bounds in
|
44
|
+
# the Buffer section using `chunk_limit_size` and `chunk_limit_records`.
|
45
|
+
#
|
46
|
+
# We should reconsider this in the future in case the `ruby-kafka` library drastically changes its internal.
|
41
47
|
module Kafka
|
42
48
|
class Client
|
43
|
-
def
|
49
|
+
def custom_producer(compression_codec: nil, compression_threshold: 1, ack_timeout: 5, required_acks: :all, max_retries: 2, retry_backoff: 1, max_buffer_size: 1000, max_buffer_bytesize: 10_000_000, idempotent: false, transactional: false, transactional_id: nil, transactional_timeout: 60)
|
44
50
|
cluster = initialize_cluster
|
45
51
|
compressor = Compressor.new(
|
46
52
|
codec_name: compression_codec,
|
@@ -57,8 +63,7 @@ module Kafka
|
|
57
63
|
transactional_timeout: transactional_timeout,
|
58
64
|
)
|
59
65
|
|
60
|
-
|
61
|
-
cluster: cluster,
|
66
|
+
CustomProducer.new(cluster: cluster,
|
62
67
|
transaction_manager: transaction_manager,
|
63
68
|
logger: @logger,
|
64
69
|
instrumenter: @instrumenter,
|
@@ -74,8 +79,8 @@ module Kafka
|
|
74
79
|
end
|
75
80
|
end
|
76
81
|
|
77
|
-
class
|
78
|
-
def initialize(
|
82
|
+
class CustomProducer
|
83
|
+
def initialize(cluster:, transaction_manager:, logger:, instrumenter:, compressor:, ack_timeout:, required_acks:, max_retries:, retry_backoff:, max_buffer_size:, max_buffer_bytesize:, partitioner:)
|
79
84
|
@cluster = cluster
|
80
85
|
@transaction_manager = transaction_manager
|
81
86
|
@logger = logger
|
@@ -88,10 +93,6 @@ module Kafka
|
|
88
93
|
@max_buffer_bytesize = max_buffer_bytesize
|
89
94
|
@compressor = compressor
|
90
95
|
@partitioner = partitioner
|
91
|
-
|
92
|
-
@topic = topic
|
93
|
-
@cluster.add_target_topics(Set.new([topic]))
|
94
|
-
|
95
96
|
# A buffer organized by topic/partition.
|
96
97
|
@buffer = MessageBuffer.new
|
97
98
|
|
@@ -99,12 +100,12 @@ module Kafka
|
|
99
100
|
@pending_message_queue = PendingMessageQueue.new
|
100
101
|
end
|
101
102
|
|
102
|
-
def produce(value, key: nil, partition: nil, partition_key: nil, headers: EMPTY_HEADER, create_time: Time.now)
|
103
|
+
def produce(value, key: nil, partition: nil, partition_key: nil, headers: EMPTY_HEADER, create_time: Time.now, topic: nil)
|
103
104
|
message = PendingMessage.new(
|
104
105
|
value: value,
|
105
106
|
key: key,
|
106
107
|
headers: headers,
|
107
|
-
topic:
|
108
|
+
topic: topic,
|
108
109
|
partition: partition,
|
109
110
|
partition_key: partition_key,
|
110
111
|
create_time: create_time
|
@@ -245,12 +246,13 @@ module Kafka
|
|
245
246
|
|
246
247
|
def assign_partitions!
|
247
248
|
failed_messages = []
|
248
|
-
partition_count = @cluster.partitions_for(@topic).count
|
249
249
|
|
250
250
|
@pending_message_queue.each do |message|
|
251
251
|
partition = message.partition
|
252
252
|
|
253
253
|
begin
|
254
|
+
partition_count = @cluster.partitions_for(message.topic).count
|
255
|
+
|
254
256
|
if partition.nil?
|
255
257
|
partition = @partitioner.call(partition_count, message)
|
256
258
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.18.
|
4
|
+
version: 0.18.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2022-
|
12
|
+
date: 2022-08-17 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|