fluent-plugin-kafka 0.5.3 → 0.5.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +5 -0
- data/README.md +12 -8
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/out_kafka.rb +17 -2
- data/lib/fluent/plugin/out_kafka_buffered.rb +5 -0
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 891e2173b1a15ec7e20c6793aa9fd05dae0fdfbc
|
4
|
+
data.tar.gz: 0f810a44129c6a274eb0b486a86c09d5e723b68d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: '09895588da32e7b8d7faf12dcae9b0fe76f7ab038c811b2674a588d88853e69e55e1ede7976a585250136bb8eacc522455b6a45ba2c555cf61638bd91a6e0bcb'
|
7
|
+
data.tar.gz: c9ce862ec1619447d67f204c3266b9b74491a47c43d4caa25e3bba111e8c58f18ce7d0973f5915e75ead3fa37170246b841698583376179b14345e4335a57366
|
data/ChangeLog
CHANGED
data/README.md
CHANGED
@@ -137,10 +137,11 @@ This plugin uses ruby-kafka producer for writing data. This plugin works with re
|
|
137
137
|
# See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
|
138
138
|
|
139
139
|
# ruby-kafka producer options
|
140
|
-
max_send_retries
|
141
|
-
required_acks
|
142
|
-
ack_timeout
|
143
|
-
compression_codec
|
140
|
+
max_send_retries (integer) :default => 1
|
141
|
+
required_acks (integer) :default => -1
|
142
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
143
|
+
compression_codec (gzip|snappy) :default => nil (No compression)
|
144
|
+
max_send_limit_bytes (integer) :default => nil (No drop)
|
144
145
|
</match>
|
145
146
|
|
146
147
|
`<formatter name>` of `output_data_type` uses fluentd's formatter plugins. See [formatter article](http://docs.fluentd.org/articles/formatter-plugin-overview).
|
@@ -155,6 +156,7 @@ Supports following ruby-kafka's producer options.
|
|
155
156
|
- required_acks - default: -1 - The number of acks required per request. If you need flush performance, set lower value, e.g. 1, 2.
|
156
157
|
- ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
|
157
158
|
- compression_codec - default: nil - The codec the producer uses to compress messages.
|
159
|
+
- max_send_limit_bytes - default: nil - Max byte size to send message to avoid MessageSizeTooLarge. For example, if you set 1000000(message.max.bytes in kafka), Message more than 1000000 byes will be dropped.
|
158
160
|
|
159
161
|
See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about ruby-kafka.
|
160
162
|
|
@@ -204,10 +206,12 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
|
|
204
206
|
exclude_partition_key (bool) :default => false
|
205
207
|
|
206
208
|
# ruby-kafka producer options
|
207
|
-
max_send_retries
|
208
|
-
required_acks
|
209
|
-
ack_timeout
|
210
|
-
compression_codec
|
209
|
+
max_send_retries (integer) :default => 1
|
210
|
+
required_acks (integer) :default => -1
|
211
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
212
|
+
compression_codec (gzip|snappy) :default => nil
|
213
|
+
max_buffer_size (integer) :default => nil (Use default of ruby-kafka)
|
214
|
+
max_buffer_bytesize (integer) :default => nil (Use default of ruby-kafka)
|
211
215
|
</match>
|
212
216
|
|
213
217
|
This plugin also supports ruby-kafka related parameters. See Buffered output plugin section.
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.5.
|
15
|
+
gem.version = '0.5.4'
|
16
16
|
gem.required_ruby_version = ">= 2.1.0"
|
17
17
|
|
18
18
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -54,6 +54,12 @@ DESC
|
|
54
54
|
|
55
55
|
config_param :time_format, :string, :default => nil
|
56
56
|
|
57
|
+
config_param :max_buffer_size, :integer, :default => nil,
|
58
|
+
:desc => "Number of messages to be buffered by the kafka producer."
|
59
|
+
|
60
|
+
config_param :max_buffer_bytesize, :integer, :default => nil,
|
61
|
+
:desc => "Maximum size in bytes to be buffered."
|
62
|
+
|
57
63
|
include Fluent::KafkaPluginUtil::SSLSettings
|
58
64
|
|
59
65
|
attr_accessor :output_data_type
|
@@ -122,6 +128,8 @@ DESC
|
|
122
128
|
@producer_opts = {max_retries: @max_send_retries, required_acks: @required_acks}
|
123
129
|
@producer_opts[:ack_timeout] = @ack_timeout if @ack_timeout
|
124
130
|
@producer_opts[:compression_codec] = @compression_codec.to_sym if @compression_codec
|
131
|
+
@producer_opts[:max_buffer_size] = @max_buffer_size if @max_buffer_size
|
132
|
+
@producer_opts[:max_buffer_bytesize] = @max_buffer_bytesize if @max_buffer_bytesize
|
125
133
|
end
|
126
134
|
|
127
135
|
def start
|
@@ -183,8 +191,15 @@ DESC
|
|
183
191
|
|
184
192
|
value = @formatter_proc.call(tag, time, record)
|
185
193
|
|
186
|
-
log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{
|
187
|
-
|
194
|
+
log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{value}.") }
|
195
|
+
begin
|
196
|
+
producer.produce(value, topic: topic, key: message_key, partition: partition, partition_key: partition_key)
|
197
|
+
rescue Kafka::BufferOverflow => e
|
198
|
+
log.warn "BufferOverflow occurred: #{e}"
|
199
|
+
log.info "Trying to deliver the messages to prevent the buffer from overflowing again."
|
200
|
+
producer.deliver_messages
|
201
|
+
log.info "Recovered from BufferOverflow successfully`"
|
202
|
+
end
|
188
203
|
end
|
189
204
|
|
190
205
|
producer.deliver_messages
|
@@ -62,6 +62,7 @@ DESC
|
|
62
62
|
The codec the producer uses to compress messages.
|
63
63
|
Supported codecs: (gzip|snappy)
|
64
64
|
DESC
|
65
|
+
config_param :max_send_limit_bytes, :size, :default => nil
|
65
66
|
|
66
67
|
config_param :time_format, :string, :default => nil
|
67
68
|
|
@@ -242,6 +243,10 @@ DESC
|
|
242
243
|
|
243
244
|
record_buf = @formatter_proc.call(tag, time, record)
|
244
245
|
record_buf_bytes = record_buf.bytesize
|
246
|
+
if @max_send_limit_bytes && record_buf_bytes > @max_send_limit_bytes
|
247
|
+
log.warn "record size exceeds max_send_limit_bytes. Skip event:", :time => time, :record => record
|
248
|
+
next
|
249
|
+
end
|
245
250
|
rescue StandardError => e
|
246
251
|
log.warn "unexpected error during format record. Skip broken event:", :error => e.to_s, :error_class => e.class.to_s, :time => time, :record => record
|
247
252
|
next
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.
|
4
|
+
version: 0.5.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2017-
|
12
|
+
date: 2017-04-12 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -131,7 +131,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
131
131
|
version: '0'
|
132
132
|
requirements: []
|
133
133
|
rubyforge_project:
|
134
|
-
rubygems_version: 2.
|
134
|
+
rubygems_version: 2.6.11
|
135
135
|
signing_key:
|
136
136
|
specification_version: 4
|
137
137
|
summary: Fluentd plugin for Apache Kafka > 0.8
|