fluent-plugin-kafka 0.14.1 → 0.14.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +5 -0
- data/README.md +6 -1
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/in_kafka_group.rb +7 -0
- data/lib/fluent/plugin/out_kafka2.rb +11 -1
- data/lib/fluent/plugin/out_rdkafka2.rb +8 -3
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 14cde202b38079778e0d694692f05c19c9576be8622f35a8896df35f33ea9733
|
4
|
+
data.tar.gz: 80cbc1050e85239dabfe78dbffd97a100c939ec4e9227bd68956a7b3d15aa75e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5ec7ed5f16a7d78a0dcd5f6eb15ecd94dd47adc7ae77208896a0ede1d341ede1f5a668141e7ac4e5413734105e276bd483f3f2b8c2041d329a4d619a9c469a76
|
7
|
+
data.tar.gz: 11877dd67f3b0f714b38153368611c8f234b6a96976a00e21117e9523fa5bcde210fd13fa5e22086cf604ee61fc94a3472bc4946c69a6f5623eeace486e7eb75
|
data/ChangeLog
CHANGED
data/README.md
CHANGED
@@ -118,6 +118,8 @@ Consume events by kafka consumer group features..
|
|
118
118
|
topics <listening topics(separate with comma',')>
|
119
119
|
format <input text type (text|json|ltsv|msgpack)> :default => json
|
120
120
|
message_key <key (Optional, for text format only, default is message)>
|
121
|
+
kafka_mesasge_key <key (Optional, If specified, set kafka's message key to this key)>
|
122
|
+
add_headers <If true, add kafka's message headers to record>
|
121
123
|
add_prefix <tag prefix (Optional)>
|
122
124
|
add_suffix <tag suffix (Optional)>
|
123
125
|
retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>
|
@@ -141,7 +143,8 @@ Consuming topic name is used for event tag. So when the target topic name is `ap
|
|
141
143
|
|
142
144
|
### Output plugin
|
143
145
|
|
144
|
-
This `kafka2` plugin is for fluentd v1
|
146
|
+
This `kafka2` plugin is for fluentd v1 or later. This plugin uses `ruby-kafka` producer for writing data.
|
147
|
+
If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin instead. This will be `out_kafka` plugin in the future.
|
145
148
|
|
146
149
|
<match app.**>
|
147
150
|
@type kafka2
|
@@ -162,6 +165,7 @@ This `kafka2` plugin is for fluentd v1.0 or later. This will be `out_kafka` plug
|
|
162
165
|
headers (hash) :default => {}
|
163
166
|
headers_from_record (hash) :default => {}
|
164
167
|
use_default_for_unknown_topic (bool) :default => false
|
168
|
+
discard_kafka_delivery_failed (bool) :default => false (No discard)
|
165
169
|
|
166
170
|
<format>
|
167
171
|
@type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
@@ -385,6 +389,7 @@ You need to install rdkafka gem.
|
|
385
389
|
default_message_key (string) :default => nil
|
386
390
|
exclude_topic_key (bool) :default => false
|
387
391
|
exclude_partition_key (bool) :default => false
|
392
|
+
discard_kafka_delivery_failed (bool) :default => false (No discard)
|
388
393
|
|
389
394
|
# same with kafka2
|
390
395
|
headers (hash) :default => {}
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.14.
|
16
|
+
gem.version = '0.14.2'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -18,6 +18,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
18
18
|
:desc => "Supported format: (json|text|ltsv|msgpack)"
|
19
19
|
config_param :message_key, :string, :default => 'message',
|
20
20
|
:desc => "For 'text' format only."
|
21
|
+
config_param :add_headers, :bool, :default => false,
|
22
|
+
:desc => "Add kafka's message headers to event record"
|
21
23
|
config_param :add_prefix, :string, :default => nil,
|
22
24
|
:desc => "Tag prefix (Optional)"
|
23
25
|
config_param :add_suffix, :string, :default => nil,
|
@@ -263,6 +265,11 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
263
265
|
if @kafka_message_key
|
264
266
|
record[@kafka_message_key] = msg.key
|
265
267
|
end
|
268
|
+
if @add_headers
|
269
|
+
msg.headers.each_pair { |k, v|
|
270
|
+
record[k] = v
|
271
|
+
}
|
272
|
+
end
|
266
273
|
es.add(record_time, record)
|
267
274
|
rescue => e
|
268
275
|
log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
|
@@ -69,6 +69,7 @@ The codec the producer uses to compress messages.
|
|
69
69
|
Supported codecs depends on ruby-kafka: https://github.com/zendesk/ruby-kafka#compression
|
70
70
|
DESC
|
71
71
|
config_param :max_send_limit_bytes, :size, :default => nil
|
72
|
+
config_param :discard_kafka_delivery_failed, :bool, :default => false
|
72
73
|
config_param :active_support_notification_regex, :string, :default => nil,
|
73
74
|
:desc => <<-DESC
|
74
75
|
Add a regular expression to capture ActiveSupport notifications from the Kafka client
|
@@ -267,7 +268,16 @@ DESC
|
|
267
268
|
|
268
269
|
if messages > 0
|
269
270
|
log.debug { "#{messages} messages send." }
|
270
|
-
|
271
|
+
if @discard_kafka_delivery_failed
|
272
|
+
begin
|
273
|
+
producer.deliver_messages
|
274
|
+
rescue Kafka::DeliveryFailed => e
|
275
|
+
log.warn "DeliveryFailed occurred. Discard broken event:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
|
276
|
+
producer.clear_buffer
|
277
|
+
end
|
278
|
+
else
|
279
|
+
producer.deliver_messages
|
280
|
+
end
|
271
281
|
end
|
272
282
|
rescue Kafka::UnknownTopicOrPartition
|
273
283
|
if @use_default_for_unknown_topic && topic != @default_topic
|
@@ -73,6 +73,7 @@ The codec the producer uses to compress messages. Used for compression.codec
|
|
73
73
|
Supported codecs: (gzip|snappy)
|
74
74
|
DESC
|
75
75
|
config_param :max_send_limit_bytes, :size, :default => nil
|
76
|
+
config_param :discard_kafka_delivery_failed, :bool, :default => false
|
76
77
|
config_param :rdkafka_buffering_max_ms, :integer, :default => nil, :desc => 'Used for queue.buffering.max.ms'
|
77
78
|
config_param :rdkafka_buffering_max_messages, :integer, :default => nil, :desc => 'Used for queue.buffering.max.messages'
|
78
79
|
config_param :rdkafka_message_max_bytes, :integer, :default => nil, :desc => 'Used for message.max.bytes'
|
@@ -325,9 +326,13 @@ DESC
|
|
325
326
|
}
|
326
327
|
end
|
327
328
|
rescue Exception => e
|
328
|
-
|
329
|
-
|
330
|
-
|
329
|
+
if @discard_kafka_delivery_failed
|
330
|
+
log.warn "Delivery failed. Discard events:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
|
331
|
+
else
|
332
|
+
log.warn "Send exception occurred: #{e} at #{e.backtrace.first}"
|
333
|
+
# Raise exception to retry sendind messages
|
334
|
+
raise e
|
335
|
+
end
|
331
336
|
end
|
332
337
|
|
333
338
|
def enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers)
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.14.
|
4
|
+
version: 0.14.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2020-08-
|
12
|
+
date: 2020-08-26 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|