fluent-plugin-kafka 0.4.0 → 0.4.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +8 -0
- data/README.md +2 -0
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/out_kafka.rb +9 -2
- data/lib/fluent/plugin/out_kafka_buffered.rb +8 -2
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9f3bd136ea663d18398ab38653018906d9294e46
|
4
|
+
data.tar.gz: e927e2731e379645077d2d43006df2da17f7827a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7835b7639afd419afc4a6647bde7d513f0576dbb91a46acee60825d41dc48875a1dbd9f8c8a875df124fc85c216dd3572e25c1e4350b031e2147045679bd45cf
|
7
|
+
data.tar.gz: 3e0b247d8efd4208200aa81398f1afae7d36b32415d9e62addd91e416d6baeb06c3eb930800d0e5f24afb4283357576cb37b3eee29ddd46087ff9dfdece0f2c6
|
data/ChangeLog
CHANGED
data/README.md
CHANGED
@@ -20,6 +20,8 @@ Or install it yourself as:
|
|
20
20
|
|
21
21
|
$ gem install fluent-plugin-kafka
|
22
22
|
|
23
|
+
If you want to use zookeeper related parameters, you also need to install zookeeper gem. zookeeper gem includes native extension, so development tools are needed, e.g. gcc, make and etc.
|
24
|
+
|
23
25
|
## Requirements
|
24
26
|
|
25
27
|
- Ruby 2.1 or later
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.4.
|
15
|
+
gem.version = '0.4.1'
|
16
16
|
gem.required_ruby_version = ">= 2.1.0"
|
17
17
|
|
18
18
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
@@ -18,6 +18,7 @@ DESC
|
|
18
18
|
:desc => "Output topic."
|
19
19
|
config_param :default_message_key, :string, :default => nil
|
20
20
|
config_param :default_partition_key, :string, :default => nil
|
21
|
+
config_param :default_partition, :integer, :default => nil
|
21
22
|
config_param :client_id, :string, :default => 'kafka'
|
22
23
|
config_param :output_data_type, :string, :default => 'json',
|
23
24
|
:desc => "Supported format: (json|ltsv|msgpack|attr:<record name>|<formatter name>)"
|
@@ -27,6 +28,11 @@ DESC
|
|
27
28
|
:desc => <<-DESC
|
28
29
|
Set true to remove partition key from data
|
29
30
|
DESC
|
31
|
+
config_param :exclude_partition, :bool, :default => false,
|
32
|
+
:desc => <<-DESC
|
33
|
+
Set true to remove partition from data
|
34
|
+
DESC
|
35
|
+
|
30
36
|
config_param :exclude_message_key, :bool, :default => false,
|
31
37
|
:desc => <<-DESC
|
32
38
|
Set true to remove message key from data
|
@@ -172,12 +178,13 @@ DESC
|
|
172
178
|
record['tag'] = tag if @output_include_tag
|
173
179
|
topic = (@exclude_topic_key ? record.delete('topic') : record['topic']) || @default_topic || tag
|
174
180
|
partition_key = (@exclude_partition_key ? record.delete('partition_key') : record['partition_key']) || @default_partition_key
|
181
|
+
partition = (@exclude_partition ? record.delete('partition'.freeze) : record['partition'.freeze]) || @default_partition
|
175
182
|
message_key = (@exclude_message_key ? record.delete('message_key') : record['message_key']) || @default_message_key
|
176
183
|
|
177
184
|
value = @formatter_proc.call(tag, time, record)
|
178
185
|
|
179
|
-
log.on_trace { log.trace("message send to #{topic} with partition_key: #{partition_key}, message_key: #{message_key} and value: #{
|
180
|
-
producer.produce(value, topic: topic, key: message_key, partition_key: partition_key)
|
186
|
+
log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}.") }
|
187
|
+
producer.produce(value, topic: topic, key: message_key, partition: partition, partition_key: partition_key)
|
181
188
|
end
|
182
189
|
|
183
190
|
producer.deliver_messages
|
@@ -22,6 +22,7 @@ DESC
|
|
22
22
|
:desc => "Output topic"
|
23
23
|
config_param :default_message_key, :string, :default => nil
|
24
24
|
config_param :default_partition_key, :string, :default => nil
|
25
|
+
config_param :default_partition, :integer, :default => nil
|
25
26
|
config_param :client_id, :string, :default => 'kafka'
|
26
27
|
config_param :output_data_type, :string, :default => 'json',
|
27
28
|
:desc => <<-DESC
|
@@ -32,6 +33,10 @@ DESC
|
|
32
33
|
config_param :exclude_partition_key, :bool, :default => false,
|
33
34
|
:desc => <<-DESC
|
34
35
|
Set true to remove partition key from data
|
36
|
+
DESC
|
37
|
+
config_param :exclude_partition, :bool, :default => false,
|
38
|
+
:desc => <<-DESC
|
39
|
+
Set true to remove partition from data
|
35
40
|
DESC
|
36
41
|
config_param :exclude_message_key, :bool, :default => false,
|
37
42
|
:desc => <<-DESC
|
@@ -229,6 +234,7 @@ DESC
|
|
229
234
|
record['tag'] = tag if @output_include_tag
|
230
235
|
topic = (@exclude_topic_key ? record.delete('topic'.freeze) : record['topic'.freeze]) || def_topic
|
231
236
|
partition_key = (@exclude_partition_key ? record.delete('partition_key'.freeze) : record['partition_key'.freeze]) || @default_partition_key
|
237
|
+
partition = (@exclude_partition ? record.delete('partition'.freeze) : record['partition'.freeze]) || @default_partition
|
232
238
|
message_key = (@exclude_message_key ? record.delete('message_key'.freeze) : record['message_key'.freeze]) || @default_message_key
|
233
239
|
|
234
240
|
records_by_topic[topic] ||= 0
|
@@ -247,9 +253,9 @@ DESC
|
|
247
253
|
messages = 0
|
248
254
|
messages_bytes = 0
|
249
255
|
end
|
250
|
-
log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, message_key: #{message_key} and value: #{record_buf}.") }
|
256
|
+
log.on_trace { log.trace("message will send to #{topic} with partition_key: #{partition_key}, partition: #{partition}, message_key: #{message_key} and value: #{record_buf}.") }
|
251
257
|
messages += 1
|
252
|
-
producer.produce2(record_buf, topic: topic, key: message_key, partition_key: partition_key)
|
258
|
+
producer.produce2(record_buf, topic: topic, key: message_key, partition_key: partition_key, partition: partition)
|
253
259
|
messages_bytes += record_buf_bytes
|
254
260
|
|
255
261
|
records_by_topic[topic] += 1
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2016-
|
12
|
+
date: 2016-12-01 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|