fluent-plugin-kafka 0.5.4 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 891e2173b1a15ec7e20c6793aa9fd05dae0fdfbc
4
- data.tar.gz: 0f810a44129c6a274eb0b486a86c09d5e723b68d
3
+ metadata.gz: 25a3a2247250b5f3c839378914e21ff82906986f
4
+ data.tar.gz: e1f643b5798a8b07f12f7874391e449c5cd16f05
5
5
  SHA512:
6
- metadata.gz: '09895588da32e7b8d7faf12dcae9b0fe76f7ab038c811b2674a588d88853e69e55e1ede7976a585250136bb8eacc522455b6a45ba2c555cf61638bd91a6e0bcb'
7
- data.tar.gz: c9ce862ec1619447d67f204c3266b9b74491a47c43d4caa25e3bba111e8c58f18ce7d0973f5915e75ead3fa37170246b841698583376179b14345e4335a57366
6
+ metadata.gz: 3dae82784298f2b431568805c5b4955cb64bb11e0e09f41bb95068c58f815fbd0b370dc5262a3254e196c28a08cf77f822c9b6ad91d2829901ef4983b4e80424
7
+ data.tar.gz: 589d4ad5267d694732a2e5cf93d669516ea21f25d1d6872c4410ecd652fdd6cb24319e19db0ebf3aaba5639e44743c3c6fd4579c9d3e2fd1f61e6bb463a59df3
data/.travis.yml CHANGED
@@ -4,6 +4,7 @@ rvm:
4
4
  - 2.1
5
5
  - 2.2
6
6
  - 2.3.1
7
+ - 2.4.1
7
8
  - ruby-head
8
9
 
9
10
  script:
data/ChangeLog CHANGED
@@ -1,3 +1,8 @@
1
+ Release 0.5.5 - 2017/04/19
2
+
3
+ * output: Some trace log level changed to debug
4
+ * out_kafka_buffered: Add discard_kafka_delivery_failed parameter
5
+
1
6
  Release 0.5.4 - 2017/04/12
2
7
 
3
8
  * out_kafka_buffered: Add max_send_limit_bytes parameter
data/README.md CHANGED
@@ -137,11 +137,12 @@ This plugin uses ruby-kafka producer for writing data. This plugin works with re
137
137
  # See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
138
138
 
139
139
  # ruby-kafka producer options
140
- max_send_retries (integer) :default => 1
141
- required_acks (integer) :default => -1
142
- ack_timeout (integer) :default => nil (Use default of ruby-kafka)
143
- compression_codec (gzip|snappy) :default => nil (No compression)
144
- max_send_limit_bytes (integer) :default => nil (No drop)
140
+ max_send_retries (integer) :default => 1
141
+ required_acks (integer) :default => -1
142
+ ack_timeout (integer) :default => nil (Use default of ruby-kafka)
143
+ compression_codec (gzip|snappy) :default => nil (No compression)
144
+ max_send_limit_bytes (integer) :default => nil (No drop)
145
+ discard_kafka_delivery_failed (bool) :default => false (No discard)
145
146
  </match>
146
147
 
147
148
  `<formatter name>` of `output_data_type` uses fluentd's formatter plugins. See [formatter article](http://docs.fluentd.org/articles/formatter-plugin-overview).
@@ -157,6 +158,7 @@ Supports following ruby-kafka's producer options.
157
158
  - ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
158
159
  - compression_codec - default: nil - The codec the producer uses to compress messages.
159
160
  - max_send_limit_bytes - default: nil - Max byte size to send message to avoid MessageSizeTooLarge. For example, if you set 1000000(message.max.bytes in kafka), Message more than 1000000 byes will be dropped.
161
+ - discard_kafka_delivery_failed - default: false - discard the record where [Kafka::DeliveryFailed](http://www.rubydoc.info/gems/ruby-kafka/Kafka/DeliveryFailed) occurred
160
162
 
161
163
  See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about ruby-kafka.
162
164
 
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
12
12
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
13
13
  gem.name = "fluent-plugin-kafka"
14
14
  gem.require_paths = ["lib"]
15
- gem.version = '0.5.4'
15
+ gem.version = '0.5.5'
16
16
  gem.required_ruby_version = ">= 2.1.0"
17
17
 
18
18
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -191,7 +191,7 @@ DESC
191
191
 
192
192
  value = @formatter_proc.call(tag, time, record)
193
193
 
194
- log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{value}.") }
194
+ log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{value}." }
195
195
  begin
196
196
  producer.produce(value, topic: topic, key: message_key, partition: partition, partition_key: partition_key)
197
197
  rescue Kafka::BufferOverflow => e
@@ -162,14 +162,14 @@ DESC
162
162
  next
163
163
  end
164
164
 
165
- log.on_trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
165
+ log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
166
166
  messages += 1
167
167
 
168
168
  producer.produce(record_buf, message_key, partition, partition_key)
169
169
  }
170
170
 
171
171
  if messages > 0
172
- log.trace { "#{messages} messages send." }
172
+ log.debug { "#{messages} messages send." }
173
173
  producer.deliver_messages
174
174
  end
175
175
  end
@@ -63,6 +63,7 @@ The codec the producer uses to compress messages.
63
63
  Supported codecs: (gzip|snappy)
64
64
  DESC
65
65
  config_param :max_send_limit_bytes, :size, :default => nil
66
+ config_param :discard_kafka_delivery_failed, :bool, :default => false
66
67
 
67
68
  config_param :time_format, :string, :default => nil
68
69
 
@@ -142,6 +143,11 @@ DESC
142
143
  @producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks}
143
144
  @producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
144
145
  @producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
146
+
147
+ if @discard_kafka_delivery_failed
148
+ log.warn "'discard_kafka_delivery_failed' option discards events which cause delivery failure, e.g. invalid topic or something."
149
+ log.warn "If this is unexpected, you need to check your configuration or data."
150
+ end
145
151
  end
146
152
 
147
153
  def start
@@ -209,6 +215,19 @@ DESC
209
215
  end
210
216
  end
211
217
 
218
+ def deliver_messages(producer, tag)
219
+ if @discard_kafka_delivery_failed
220
+ begin
221
+ producer.deliver_messages
222
+ rescue Kafka::DeliveryFailed => e
223
+ log.warn "DeliveryFailed occurred. Discard broken event:", :error => e.to_s, :error_class => e.class.to_s, :tag => tag
224
+ producer.clear_buffer
225
+ end
226
+ else
227
+ producer.deliver_messages
228
+ end
229
+ end
230
+
212
231
  def write(chunk)
213
232
  tag = chunk.key
214
233
  def_topic = @default_topic || tag
@@ -253,12 +272,12 @@ DESC
253
272
  end
254
273
 
255
274
  if (messages > 0) and (messages_bytes + record_buf_bytes > @kafka_agg_max_bytes)
256
- log.on_trace { log.trace("#{messages} messages send.") }
257
- producer.deliver_messages
275
+ log.debug { "#{messages} messages send because reaches kafka_agg_max_bytes" }
276
+ deliver_messages(producer, tag)
258
277
  messages = 0
259
278
  messages_bytes = 0
260
279
  end
261
- log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}.") }
280
+ log.trace { "message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}." }
262
281
  messages += 1
263
282
  producer.produce2(record_buf, topic: topic, key: message_key, partition_key: partition_key, partition: partition)
264
283
  messages_bytes += record_buf_bytes
@@ -267,8 +286,8 @@ DESC
267
286
  bytes_by_topic[topic] += record_buf_bytes
268
287
  }
269
288
  if messages > 0
270
- log.trace { "#{messages} messages send." }
271
- producer.deliver_messages
289
+ log.debug { "#{messages} messages send." }
290
+ deliver_messages(producer, tag)
272
291
  end
273
292
  log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
274
293
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.4
4
+ version: 0.5.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-04-12 00:00:00.000000000 Z
12
+ date: 2017-04-19 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd