fluent-plugin-kafka 0.14.1 → 0.14.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 27a59b5835dff5d64dcf78bd5d3bf945341c9b734de476b16f9217afa2839a22
4
- data.tar.gz: fdac125fa11e88712059f0f0794ca199ef248d562b1e1aad389e6b0dace9c777
3
+ metadata.gz: 14cde202b38079778e0d694692f05c19c9576be8622f35a8896df35f33ea9733
4
+ data.tar.gz: 80cbc1050e85239dabfe78dbffd97a100c939ec4e9227bd68956a7b3d15aa75e
5
5
  SHA512:
6
- metadata.gz: faf2abd472b6af6b010409750d6b0e3483ba8748af8930c4dad05d6e6b4d9aca5dc7c55d29b2e4ed3b092b12612f0d19e46b9878463ecbe417e417d7c3ee522b
7
- data.tar.gz: f51803596ea03e0f6dfc9f83abaf070179136f497dc89ad48eed0582c8d580ad88e105f5f4e075096421f6dc0acf8bf1e58fd75738e7c49636acca01ec347a46
6
+ metadata.gz: 5ec7ed5f16a7d78a0dcd5f6eb15ecd94dd47adc7ae77208896a0ede1d341ede1f5a668141e7ac4e5413734105e276bd483f3f2b8c2041d329a4d619a9c469a76
7
+ data.tar.gz: 11877dd67f3b0f714b38153368611c8f234b6a96976a00e21117e9523fa5bcde210fd13fa5e22086cf604ee61fc94a3472bc4946c69a6f5623eeace486e7eb75
data/ChangeLog CHANGED
@@ -1,3 +1,8 @@
1
+ Release 0.14.2 - 2020/08/26
2
+
3
+ * in_kafka_group: Add `add_headers` parameter
4
+ * out_kafka2/out_rdkafka2: Support `discard_kafka_delivery_failed` parameter
5
+
1
6
  Release 0.14.1 - 2020/08/11
2
7
 
3
8
  * kafka_producer_ext: Fix regression by v0.14.0 changes
data/README.md CHANGED
@@ -118,6 +118,8 @@ Consume events by kafka consumer group features..
118
118
  topics <listening topics(separate with comma',')>
119
119
  format <input text type (text|json|ltsv|msgpack)> :default => json
120
120
  message_key <key (Optional, for text format only, default is message)>
121
+ kafka_mesasge_key <key (Optional, If specified, set kafka's message key to this key)>
122
+ add_headers <If true, add kafka's message headers to record>
121
123
  add_prefix <tag prefix (Optional)>
122
124
  add_suffix <tag suffix (Optional)>
123
125
  retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>
@@ -141,7 +143,8 @@ Consuming topic name is used for event tag. So when the target topic name is `ap
141
143
 
142
144
  ### Output plugin
143
145
 
144
- This `kafka2` plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the future.
146
+ This `kafka2` plugin is for fluentd v1 or later. This plugin uses `ruby-kafka` producer for writing data.
147
+ If `ruby-kafka` doesn't fit your kafka environment, check `rdkafka2` plugin instead. This will be `out_kafka` plugin in the future.
145
148
 
146
149
  <match app.**>
147
150
  @type kafka2
@@ -162,6 +165,7 @@ This `kafka2` plugin is for fluentd v1.0 or later. This will be `out_kafka` plug
162
165
  headers (hash) :default => {}
163
166
  headers_from_record (hash) :default => {}
164
167
  use_default_for_unknown_topic (bool) :default => false
168
+ discard_kafka_delivery_failed (bool) :default => false (No discard)
165
169
 
166
170
  <format>
167
171
  @type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
@@ -385,6 +389,7 @@ You need to install rdkafka gem.
385
389
  default_message_key (string) :default => nil
386
390
  exclude_topic_key (bool) :default => false
387
391
  exclude_partition_key (bool) :default => false
392
+ discard_kafka_delivery_failed (bool) :default => false (No discard)
388
393
 
389
394
  # same with kafka2
390
395
  headers (hash) :default => {}
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.14.1'
16
+ gem.version = '0.14.2'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -18,6 +18,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
18
18
  :desc => "Supported format: (json|text|ltsv|msgpack)"
19
19
  config_param :message_key, :string, :default => 'message',
20
20
  :desc => "For 'text' format only."
21
+ config_param :add_headers, :bool, :default => false,
22
+ :desc => "Add kafka's message headers to event record"
21
23
  config_param :add_prefix, :string, :default => nil,
22
24
  :desc => "Tag prefix (Optional)"
23
25
  config_param :add_suffix, :string, :default => nil,
@@ -263,6 +265,11 @@ class Fluent::KafkaGroupInput < Fluent::Input
263
265
  if @kafka_message_key
264
266
  record[@kafka_message_key] = msg.key
265
267
  end
268
+ if @add_headers
269
+ msg.headers.each_pair { |k, v|
270
+ record[k] = v
271
+ }
272
+ end
266
273
  es.add(record_time, record)
267
274
  rescue => e
268
275
  log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
@@ -69,6 +69,7 @@ The codec the producer uses to compress messages.
69
69
  Supported codecs depends on ruby-kafka: https://github.com/zendesk/ruby-kafka#compression
70
70
  DESC
71
71
  config_param :max_send_limit_bytes, :size, :default => nil
72
+ config_param :discard_kafka_delivery_failed, :bool, :default => false
72
73
  config_param :active_support_notification_regex, :string, :default => nil,
73
74
  :desc => <<-DESC
74
75
  Add a regular expression to capture ActiveSupport notifications from the Kafka client
@@ -267,7 +268,16 @@ DESC
267
268
 
268
269
  if messages > 0
269
270
  log.debug { "#{messages} messages send." }
270
- producer.deliver_messages
271
+ if @discard_kafka_delivery_failed
272
+ begin
273
+ producer.deliver_messages
274
+ rescue Kafka::DeliveryFailed => e
275
+ log.warn "DeliveryFailed occurred. Discard broken event:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
276
+ producer.clear_buffer
277
+ end
278
+ else
279
+ producer.deliver_messages
280
+ end
271
281
  end
272
282
  rescue Kafka::UnknownTopicOrPartition
273
283
  if @use_default_for_unknown_topic && topic != @default_topic
@@ -73,6 +73,7 @@ The codec the producer uses to compress messages. Used for compression.codec
73
73
  Supported codecs: (gzip|snappy)
74
74
  DESC
75
75
  config_param :max_send_limit_bytes, :size, :default => nil
76
+ config_param :discard_kafka_delivery_failed, :bool, :default => false
76
77
  config_param :rdkafka_buffering_max_ms, :integer, :default => nil, :desc => 'Used for queue.buffering.max.ms'
77
78
  config_param :rdkafka_buffering_max_messages, :integer, :default => nil, :desc => 'Used for queue.buffering.max.messages'
78
79
  config_param :rdkafka_message_max_bytes, :integer, :default => nil, :desc => 'Used for message.max.bytes'
@@ -325,9 +326,13 @@ DESC
325
326
  }
326
327
  end
327
328
  rescue Exception => e
328
- log.warn "Send exception occurred: #{e} at #{e.backtrace.first}"
329
- # Raise exception to retry sendind messages
330
- raise e
329
+ if @discard_kafka_delivery_failed
330
+ log.warn "Delivery failed. Discard events:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
331
+ else
332
+ log.warn "Send exception occurred: #{e} at #{e.backtrace.first}"
333
+ # Raise exception to retry sendind messages
334
+ raise e
335
+ end
331
336
  end
332
337
 
333
338
  def enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers)
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.1
4
+ version: 0.14.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-08-11 00:00:00.000000000 Z
12
+ date: 2020-08-26 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd