fluent-plugin-kafka 0.10.0 → 0.11.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e2c8a3efaae4c3daecbc360c8092aa17a46a0bbcd2e533243f6f7ed375eb3aa4
4
- data.tar.gz: e2ae435c6923d71ea46e4fe3a5bfdd599cfb08aeea309da461ab15196cebdc07
3
+ metadata.gz: 51192d8576ab4ca9653af26130f08f51b61b2004ebeb64759139f08cd90844c9
4
+ data.tar.gz: 731fea3399de38b4f0b6133704db5abbfe7d904a22ac1d71bb03875770a02e7d
5
5
  SHA512:
6
- metadata.gz: 233159921bb7c2cd699b2e10939d26bd90e3280bfae56b9e5146dc7ac75619e3d91256e1f404c5ab412726bcf74e980f61b0e52600290b0b927579969aa21937
7
- data.tar.gz: f20ce22e379eb1785667d19ff95089b70c9cd33bbe0517f52e4c67b733cebc8128e6096b7376468fe15c1aea4aea3c9347f3413ad1c8535be58579f16f409583
6
+ metadata.gz: b00872ab8b675af0d00df8d11e14709036d1694ad98613fdbe5ff6ce9de1dd69c47cfe00af3ebc66fcf9a9093566c492145e100436fa032c60dea14192b952c6
7
+ data.tar.gz: 41a4cda769893e18dcfaa213d96f0c270bd7e059a1a003dda7fb9a5b2ac2cdbc61c49698ee47ad6003d59c652694fb4a4f0a13451e6354b1cb024a695e7f8828
data/ChangeLog CHANGED
@@ -1,3 +1,10 @@
1
+ Release 0.11.0 - 2019/07/17
2
+
3
+ * out_kafka2: Add connect_timeout / socket_timeout parameters
4
+ * out_kafka2/out_rdkafka2: Add headers / headers_from_record parameters
5
+ * output: Add use_event_time parameter to use fluentd event time for CreateTime
6
+ * input: Add get_kafka_client_log parameter. Default is `false`
7
+
1
8
  Release 0.10.0 - 2019/07/03
2
9
 
3
10
  * output: Fixed max_send_limit_bytes parameter to be in all kafka outputs
data/README.md CHANGED
@@ -158,6 +158,8 @@ This plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the
158
158
  exclude_topic_key (bool) :default => false
159
159
  exclude_partition_key (bool) :default => false
160
160
  get_kafka_client_log (bool) :default => false
161
+ headers (hash) :default => {}
162
+ headers_from_record (hash) :default => {}
161
163
  use_default_for_unknown_topic (bool) :default => false
162
164
 
163
165
  <format>
@@ -245,6 +247,33 @@ If key name `partition_key_key` exists in a message, this plugin set the value o
245
247
 
246
248
  If key name `message_key_key` exists in a message, this plugin publishes the value of message_key_key to kafka and can be read by consumers. Same message key will be assigned to all messages by setting `default_message_key` in config file. If message_key_key exists and if partition_key_key is not set explicitly, messsage_key_key will be used for partitioning.
247
249
 
250
+ #### Headers
251
+ It is possible to set headers on Kafka messages. This only works for kafka2 and rdkafka2 output plugin.
252
+
253
+ The format is like key1:value1,key2:value2. For example:
254
+
255
+ <match app.**>
256
+ @type kafka2
257
+ [...]
258
+ headers some_header_name:some_header_value
259
+ <match>
260
+
261
+ You may set header values based on a value of a fluentd record field. For example, imagine a fluentd record like:
262
+
263
+ {"source": { "ip": "127.0.0.1" }, "payload": "hello world" }
264
+
265
+ And the following fluentd config:
266
+
267
+ <match app.**>
268
+ @type kafka2
269
+ [...]
270
+ headers_from_record source_ip:$.source.ip
271
+ <match>
272
+
273
+ The Kafka message will have a header of source_ip=12.7.0.0.1.
274
+
275
+ The configuration format is jsonpath. It is descibed in https://docs.fluentd.org/plugin-helper-overview/api-plugin-helper-record_accessor
276
+
248
277
  ### Buffered output plugin
249
278
 
250
279
  This plugin uses ruby-kafka producer for writing data. This plugin works with recent kafka versions. This plugin is for v0.12. If you use v1, see `kafka2`.
@@ -344,6 +373,8 @@ You need to install rdkafka gem.
344
373
  default_message_key (string) :default => nil
345
374
  exclude_topic_key (bool) :default => false
346
375
  exclude_partition_key (bool) :default => false
376
+ headers (hash) :default => {}
377
+ headers_from_record (hash) :default => {}
347
378
 
348
379
  <format>
349
380
  @type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.10.0'
16
+ gem.version = '0.11.0'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -36,6 +36,7 @@ class Fluent::KafkaInput < Fluent::Input
36
36
  config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
37
37
  config_param :use_record_time, :bool, :default => false,
38
38
  :desc => "Replace message timestamp with contents of 'time' field."
39
+ config_param :get_kafka_client_log, :bool, :default => false
39
40
  config_param :time_format, :string, :default => nil,
40
41
  :desc => "Time format to be used to parse 'time' filed."
41
42
  config_param :kafka_message_key, :string, :default => nil,
@@ -175,17 +176,18 @@ class Fluent::KafkaInput < Fluent::Input
175
176
  opt[:max_wait_time] = @max_wait_time if @max_wait_time
176
177
  opt[:min_bytes] = @min_bytes if @min_bytes
177
178
 
179
+ logger = @get_kafka_client_log ? log : nil
178
180
  if @scram_mechanism != nil && @username != nil && @password != nil
179
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
181
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
180
182
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
181
183
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
182
184
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl)
183
185
  elsif @username != nil && @password != nil
184
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
186
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
185
187
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
186
188
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password)
187
189
  else
188
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
190
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
189
191
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
190
192
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
191
193
  end
@@ -26,6 +26,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
26
26
  :desc => "How long to stop event consuming when BufferQueueLimitError happens. Wait retry_emit_limit x 1s. The default is waiting until BufferQueueLimitError is resolved"
27
27
  config_param :use_record_time, :bool, :default => false,
28
28
  :desc => "Replace message timestamp with contents of 'time' field."
29
+ config_param :get_kafka_client_log, :bool, :default => false
29
30
  config_param :time_format, :string, :default => nil,
30
31
  :desc => "Time format to be used to parse 'time' filed."
31
32
  config_param :kafka_message_key, :string, :default => nil,
@@ -155,17 +156,18 @@ class Fluent::KafkaGroupInput < Fluent::Input
155
156
  def start
156
157
  super
157
158
 
159
+ logger = @get_kafka_client_log ? log : nil
158
160
  if @scram_mechanism != nil && @username != nil && @password != nil
159
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
161
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
160
162
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
161
163
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
162
164
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl)
163
165
  elsif @username != nil && @password != nil
164
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
166
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
165
167
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
166
168
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password)
167
169
  else
168
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: log, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
170
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
169
171
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
170
172
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
171
173
  end
@@ -12,9 +12,7 @@ module Kafka
12
12
  EMPTY_HEADER = {}
13
13
 
14
14
  class Producer
15
- def produce_for_buffered(value, key: nil, topic:, partition: nil, partition_key: nil)
16
- create_time = Time.now
17
-
15
+ def produce_for_buffered(value, key: nil, topic:, partition: nil, partition_key: nil, create_time: Time.now)
18
16
  message = PendingMessage.new(
19
17
  value: value,
20
18
  key: key,
@@ -99,13 +97,11 @@ module Kafka
99
97
  @pending_message_queue = PendingMessageQueue.new
100
98
  end
101
99
 
102
- def produce(value, key: nil, partition: nil, partition_key: nil)
103
- create_time = Time.now
104
-
100
+ def produce(value, key: nil, partition: nil, partition_key: nil, headers: EMPTY_HEADER, create_time: Time.now)
105
101
  message = PendingMessage.new(
106
102
  value: value,
107
103
  key: key,
108
- headers: EMPTY_HEADER,
104
+ headers: headers,
109
105
  topic: @topic,
110
106
  partition: partition,
111
107
  partition_key: partition_key,
@@ -8,7 +8,7 @@ module Fluent::Plugin
8
8
  class Fluent::Kafka2Output < Output
9
9
  Fluent::Plugin.register_output('kafka2', self)
10
10
 
11
- helpers :inject, :formatter, :event_emitter
11
+ helpers :inject, :formatter, :event_emitter, :record_accessor
12
12
 
13
13
  config_param :brokers, :array, :value_type => :string, :default => ['localhost:9092'],
14
14
  :desc => <<-DESC
@@ -39,12 +39,22 @@ DESC
39
39
  :desc => 'Set true to remove partition key from data'
40
40
  config_param :exclude_topic_key, :bool, :default => false,
41
41
  :desc => 'Set true to remove topic name key from data'
42
+ config_param :use_event_time, :bool, :default => false, :desc => 'Use fluentd event time for kafka create_time'
43
+ config_param :headers, :hash, default: {}, symbolize_keys: true, value_type: :string,
44
+ :desc => 'Kafka message headers'
45
+ config_param :headers_from_record, :hash, default: {}, symbolize_keys: true, value_type: :string,
46
+ :desc => 'Kafka message headers where the header value is a jsonpath to a record value'
42
47
 
43
48
  config_param :get_kafka_client_log, :bool, :default => false
44
49
 
45
50
  config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
46
51
  config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
47
52
 
53
+ config_param :connect_timeout, :integer, :default => nil,
54
+ :desc => "The timeout setting for connecting to brokers"
55
+ config_param :socket_timeout, :integer, :default => nil,
56
+ :desc => "The timeout setting for socket connection"
57
+
48
58
  # ruby-kafka producer options
49
59
  config_param :max_send_retries, :integer, :default => 2,
50
60
  :desc => "Number of times to retry sending of messages to a leader."
@@ -84,17 +94,17 @@ DESC
84
94
  begin
85
95
  logger = @get_kafka_client_log ? log : nil
86
96
  if @scram_mechanism != nil && @username != nil && @password != nil
87
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
97
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
88
98
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
89
99
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
90
100
  sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname)
91
101
  elsif @username != nil && @password != nil
92
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
102
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
93
103
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
94
104
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl,
95
105
  ssl_verify_hostname: @ssl_verify_hostname)
96
106
  else
97
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
107
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
98
108
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_client_cert_chain: read_ssl_file(@ssl_client_cert_chain),
99
109
  ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, sasl_over_ssl: @sasl_over_ssl,
100
110
  ssl_verify_hostname: @ssl_verify_hostname)
@@ -155,6 +165,11 @@ DESC
155
165
  end
156
166
 
157
167
  @topic_key_sym = @topic_key.to_sym
168
+
169
+ @headers_from_record_accessors = {}
170
+ @headers_from_record.each do |key, value|
171
+ @headers_from_record_accessors[key] = record_accessor_create(value)
172
+ end
158
173
  end
159
174
 
160
175
  def multi_workers_ready?
@@ -205,6 +220,8 @@ DESC
205
220
  messages = 0
206
221
  record_buf = nil
207
222
 
223
+ headers = @headers.clone
224
+
208
225
  begin
209
226
  producer = @kafka.topic_producer(topic, @producer_opts)
210
227
  chunk.msgpack_each { |time, record|
@@ -215,6 +232,10 @@ DESC
215
232
  partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition
216
233
  message_key = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key
217
234
 
235
+ @headers_from_record_accessors.each do |key, header_accessor|
236
+ headers[key] = header_accessor.call(record)
237
+ end
238
+
218
239
  record_buf = @formatter_proc.call(tag, time, record)
219
240
  record_buf_bytes = record_buf.bytesize
220
241
  if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
@@ -229,7 +250,8 @@ DESC
229
250
  log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
230
251
  messages += 1
231
252
 
232
- producer.produce(record_buf, key: message_key, partition_key: partition_key, partition: partition)
253
+ producer.produce(record_buf, key: message_key, partition_key: partition_key, partition: partition, headers: headers,
254
+ create_time: @use_event_time ? Time.at(time) : Time.now)
233
255
  }
234
256
 
235
257
  if messages > 0
@@ -46,14 +46,15 @@ DESC
46
46
  :desc => <<-DESC
47
47
  Set true to remove partition from data
48
48
  DESC
49
- config_param :exclude_message_key, :bool, :default => false,
49
+ config_param :exclude_message_key, :bool, :default => false,
50
50
  :desc => <<-DESC
51
51
  Set true to remove message key from data
52
52
  DESC
53
- config_param :exclude_topic_key, :bool, :default => false,
53
+ config_param :exclude_topic_key, :bool, :default => false,
54
54
  :desc => <<-DESC
55
55
  Set true to remove topic name key from data
56
56
  DESC
57
+ config_param :use_event_time, :bool, :default => false, :desc => 'Use fluentd event time for kafka create_time'
57
58
 
58
59
  config_param :kafka_agg_max_bytes, :size, :default => 4*1024 #4k
59
60
  config_param :kafka_agg_max_messages, :integer, :default => nil
@@ -339,7 +340,8 @@ DESC
339
340
  end
340
341
  log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
341
342
  messages += 1
342
- producer.produce_for_buffered(record_buf, topic: topic, key: message_key, partition_key: partition_key, partition: partition)
343
+ producer.produce_for_buffered(record_buf, topic: topic, key: message_key, partition_key: partition_key, partition: partition,
344
+ create_time: @use_event_time ? Time.at(time) : Time.now)
343
345
  messages_bytes += record_buf_bytes
344
346
 
345
347
  records_by_topic[topic] += 1
@@ -24,7 +24,7 @@ module Fluent::Plugin
24
24
  class Fluent::Rdkafka2Output < Output
25
25
  Fluent::Plugin.register_output('rdkafka2', self)
26
26
 
27
- helpers :inject, :formatter
27
+ helpers :inject, :formatter, :record_accessor
28
28
 
29
29
  config_param :brokers, :string, :default => 'localhost:9092',
30
30
  :desc => <<-DESC
@@ -54,6 +54,11 @@ DESC
54
54
  :desc => <<-DESC
55
55
  Set true to remove topic key from data
56
56
  DESC
57
+ config_param :headers, :hash, default: {}, symbolize_keys: true, value_type: :string,
58
+ :desc => 'Kafka message headers'
59
+ config_param :headers_from_record, :hash, default: {}, symbolize_keys: true, value_type: :string,
60
+ :desc => 'Kafka message headers where the header value is a jsonpath to a record value'
61
+
57
62
  config_param :max_send_retries, :integer, :default => 2,
58
63
  :desc => "Number of times to retry sending of messages to a leader. Used for message.send.max.retries"
59
64
  config_param :required_acks, :integer, :default => -1,
@@ -128,6 +133,11 @@ DESC
128
133
  end
129
134
  @formatter_proc = setup_formatter(formatter_conf)
130
135
  @topic_key_sym = @topic_key.to_sym
136
+
137
+ @headers_from_record_accessors = {}
138
+ @headers_from_record.each do |key, value|
139
+ @headers_from_record_accessors[key] = record_accessor_create(value)
140
+ end
131
141
  end
132
142
 
133
143
  def build_config
@@ -236,6 +246,8 @@ DESC
236
246
  record_buf = nil
237
247
  record_buf_bytes = nil
238
248
 
249
+ headers = @headers.clone
250
+
239
251
  begin
240
252
  producer = get_producer
241
253
  chunk.msgpack_each { |time, record|
@@ -245,6 +257,10 @@ DESC
245
257
  partition = (@exclude_partition ? record.delete(@partition_key) : record[@partition_key]) || @default_partition
246
258
  message_key = (@exclude_message_key ? record.delete(@message_key_key) : record[@message_key_key]) || @default_message_key
247
259
 
260
+ @headers_from_record_accessors.each do |key, header_accessor|
261
+ headers[key] = header_accessor.call(record)
262
+ end
263
+
248
264
  record_buf = @formatter_proc.call(tag, time, record)
249
265
  record_buf_bytes = record_buf.bytesize
250
266
  if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
@@ -256,7 +272,7 @@ DESC
256
272
  next
257
273
  end
258
274
 
259
- handlers << enqueue_with_retry(producer, topic, record_buf, message_key, partition)
275
+ handlers << enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers)
260
276
  }
261
277
  handlers.each { |handler|
262
278
  handler.wait(@rdkafka_delivery_handle_poll_timeout) if @rdkafka_delivery_handle_poll_timeout != 0
@@ -268,11 +284,11 @@ DESC
268
284
  raise e
269
285
  end
270
286
 
271
- def enqueue_with_retry(producer, topic, record_buf, message_key, partition)
287
+ def enqueue_with_retry(producer, topic, record_buf, message_key, partition, headers)
272
288
  attempt = 0
273
289
  loop do
274
290
  begin
275
- return producer.produce(topic: topic, payload: record_buf, key: message_key, partition: partition)
291
+ return producer.produce(topic: topic, payload: record_buf, key: message_key, partition: partition, headers: headers)
276
292
  rescue Exception => e
277
293
  if e.respond_to?(:code) && e.code == :queue_full
278
294
  if attempt <= @max_enqueue_retries
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.0
4
+ version: 0.11.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2019-07-03 00:00:00.000000000 Z
12
+ date: 2019-07-17 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd