fluent-plugin-kafka 0.12.3 → 0.12.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +4 -0
- data/README.md +14 -5
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/out_rdkafka.rb +16 -1
- data/lib/fluent/plugin/out_rdkafka2.rb +16 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8b9d681257730f610d10ef3f3ab0f219d3167df23482370bcc3c89b01cf12098
|
4
|
+
data.tar.gz: f628e31d41fdc36f51d93a4fb4f7e68321ad0857765a6088a9b46ce593c435c8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 57b996881b2981fb4366c31f25f27b4ca19b4d8ca2c4e27227ac4b5d603ae7a6b2719e7a049d8b668ace11620786832b7e970c90c3536de1ed6ba3da1723dd09
|
7
|
+
data.tar.gz: 21ddc3ab9a074b3910719ae59cdf826423f3cd52c1b9179ab86ccdf6db60166776cd3b7218f6e04da1ac101dd130c87486e30658736e7f6347b161776c7bc7ee
|
data/ChangeLog
CHANGED
data/README.md
CHANGED
@@ -140,7 +140,7 @@ Consuming topic name is used for event tag. So when the target topic name is `ap
|
|
140
140
|
|
141
141
|
### Output plugin
|
142
142
|
|
143
|
-
This plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the future.
|
143
|
+
This `kafka2` plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the future.
|
144
144
|
|
145
145
|
<match app.**>
|
146
146
|
@type kafka2
|
@@ -155,6 +155,8 @@ This plugin is for fluentd v1.0 or later. This will be `out_kafka` plugin in the
|
|
155
155
|
default_message_key (string) :default => nil
|
156
156
|
exclude_topic_key (bool) :default => false
|
157
157
|
exclude_partition_key (bool) :default => false
|
158
|
+
exclude_partition (bool) :default => false
|
159
|
+
exclude_message_key (bool) :default => false
|
158
160
|
get_kafka_client_log (bool) :default => false
|
159
161
|
headers (hash) :default => {}
|
160
162
|
headers_from_record (hash) :default => {}
|
@@ -197,8 +199,6 @@ Supports following ruby-kafka's producer options.
|
|
197
199
|
- required_acks - default: -1 - The number of acks required per request. If you need flush performance, set lower value, e.g. 1, 2.
|
198
200
|
- ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
|
199
201
|
- compression_codec - default: nil - The codec the producer uses to compress messages.
|
200
|
-
- kafka_agg_max_bytes - default: 4096 - Maximum value of total message size to be included in one batch transmission.
|
201
|
-
- kafka_agg_max_messages - default: nil - Maximum number of messages to include in one batch transmission.
|
202
202
|
- max_send_limit_bytes - default: nil - Max byte size to send message to avoid MessageSizeTooLarge. For example, if you set 1000000(message.max.bytes in kafka), Message more than 1000000 byes will be dropped.
|
203
203
|
- discard_kafka_delivery_failed - default: false - discard the record where [Kafka::DeliveryFailed](http://www.rubydoc.info/gems/ruby-kafka/Kafka/DeliveryFailed) occurred
|
204
204
|
- monitoring_list - default: [] - library to be used to monitor. statsd and datadog are supported
|
@@ -292,6 +292,10 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
|
|
292
292
|
default_topic (string) :default => nil
|
293
293
|
default_partition_key (string) :default => nil
|
294
294
|
default_message_key (string) :default => nil
|
295
|
+
exclude_topic_key (bool) :default => false
|
296
|
+
exclude_partition_key (bool) :default => false
|
297
|
+
exclude_partition (bool) :default => false
|
298
|
+
exclude_message_key (bool) :default => false
|
295
299
|
output_data_type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
296
300
|
output_include_tag (bool) :default => false
|
297
301
|
output_include_time (bool) :default => false
|
@@ -315,6 +319,11 @@ Support of fluentd v0.12 has ended. `kafka_buffered` will be an alias of `kafka2
|
|
315
319
|
monitoring_list (array) :default => []
|
316
320
|
</match>
|
317
321
|
|
322
|
+
`kafka_buffered` has two additional parameters:
|
323
|
+
|
324
|
+
- kafka_agg_max_bytes - default: 4096 - Maximum value of total message size to be included in one batch transmission.
|
325
|
+
- kafka_agg_max_messages - default: nil - Maximum number of messages to include in one batch transmission.
|
326
|
+
|
318
327
|
### Non-buffered output plugin
|
319
328
|
|
320
329
|
This plugin uses ruby-kafka producer for writing data. For performance and reliability concerns, use `kafka_bufferd` output instead. This is mainly for testing.
|
@@ -349,10 +358,10 @@ This plugin also supports ruby-kafka related parameters. See Buffered output plu
|
|
349
358
|
|
350
359
|
### rdkafka based output plugin
|
351
360
|
|
352
|
-
This plugin uses `rdkafka` instead of `ruby-kafka` for
|
361
|
+
This plugin uses `rdkafka` instead of `ruby-kafka` for kafka client.
|
353
362
|
You need to install rdkafka gem.
|
354
363
|
|
355
|
-
# rdkafka is C extension library
|
364
|
+
# rdkafka is C extension library. Need to install development tools like ruby-devel, gcc and etc
|
356
365
|
# for v0.12 or later
|
357
366
|
$ gem install rdkafka --no-document
|
358
367
|
# for v0.11 or earlier
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
|
|
13
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
14
14
|
gem.name = "fluent-plugin-kafka"
|
15
15
|
gem.require_paths = ["lib"]
|
16
|
-
gem.version = '0.12.
|
16
|
+
gem.version = '0.12.4'
|
17
17
|
gem.required_ruby_version = ">= 2.1.0"
|
18
18
|
|
19
19
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'thread'
|
2
|
+
require 'logger'
|
2
3
|
require 'fluent/output'
|
3
4
|
require 'fluent/plugin/kafka_plugin_util'
|
4
5
|
|
@@ -91,8 +92,22 @@ DESC
|
|
91
92
|
super
|
92
93
|
log.instance_eval {
|
93
94
|
def add(level, &block)
|
94
|
-
|
95
|
+
return unless block
|
96
|
+
|
97
|
+
# Follow rdkakfa's log level. See also rdkafka-ruby's bindings.rb: https://github.com/appsignal/rdkafka-ruby/blob/e5c7261e3f2637554a5c12b924be297d7dca1328/lib/rdkafka/bindings.rb#L117
|
98
|
+
case level
|
99
|
+
when Logger::FATAL
|
100
|
+
self.fatal(block.call)
|
101
|
+
when Logger::ERROR
|
102
|
+
self.error(block.call)
|
103
|
+
when Logger::WARN
|
104
|
+
self.warn(block.call)
|
105
|
+
when Logger::INFO
|
95
106
|
self.info(block.call)
|
107
|
+
when Logger::DEBUG
|
108
|
+
self.debug(block.call)
|
109
|
+
else
|
110
|
+
self.trace(block.call)
|
96
111
|
end
|
97
112
|
end
|
98
113
|
}
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'thread'
|
2
|
+
require 'logger'
|
2
3
|
require 'fluent/plugin/output'
|
3
4
|
require 'fluent/plugin/kafka_plugin_util'
|
4
5
|
|
@@ -108,8 +109,22 @@ DESC
|
|
108
109
|
super
|
109
110
|
log.instance_eval {
|
110
111
|
def add(level, &block)
|
111
|
-
|
112
|
+
return unless block
|
113
|
+
|
114
|
+
# Follow rdkakfa's log level. See also rdkafka-ruby's bindings.rb: https://github.com/appsignal/rdkafka-ruby/blob/e5c7261e3f2637554a5c12b924be297d7dca1328/lib/rdkafka/bindings.rb#L117
|
115
|
+
case level
|
116
|
+
when Logger::FATAL
|
117
|
+
self.fatal(block.call)
|
118
|
+
when Logger::ERROR
|
119
|
+
self.error(block.call)
|
120
|
+
when Logger::WARN
|
121
|
+
self.warn(block.call)
|
122
|
+
when Logger::INFO
|
112
123
|
self.info(block.call)
|
124
|
+
when Logger::DEBUG
|
125
|
+
self.debug(block.call)
|
126
|
+
else
|
127
|
+
self.trace(block.call)
|
113
128
|
end
|
114
129
|
end
|
115
130
|
}
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.12.
|
4
|
+
version: 0.12.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2020-
|
12
|
+
date: 2020-03-03 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|