fluent-plugin-kafka 0.3.0.rc1 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +76 -74
- data/fluent-plugin-kafka.gemspec +4 -4
- data/lib/fluent/plugin/in_kafka.rb +7 -13
- data/lib/fluent/plugin/in_kafka_group.rb +6 -10
- data/lib/fluent/plugin/kafka_plugin_util.rb +1 -0
- data/lib/fluent/plugin/out_kafka.rb +14 -22
- data/lib/fluent/plugin/out_kafka_buffered.rb +22 -32
- metadata +7 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: eac190de45814d289eb9e82f469f4d5a2af50296
|
4
|
+
data.tar.gz: 1b2217fa4de6d4be5d5ba7e05618fbb9dd0b6c92
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 840c3ee94d3830a43bc6d503bad1676f340730b0a15b4f47bfec31f838fd09aae7029d093cda3b37e39ec3a1fe82908ed8f9ad49e877acb2d49ebee82d4fe520
|
7
|
+
data.tar.gz: 242b6180a85333ba0536f5e0598db0b121ab03a14dae605188845b1b020542787ce8198a1dbac22286fbd854d7e19b6cf38553574ead496b3edf19b356f73287
|
data/README.md
CHANGED
@@ -25,105 +25,103 @@ Or install it yourself as:
|
|
25
25
|
### Input plugin (@type 'kafka')
|
26
26
|
|
27
27
|
<source>
|
28
|
-
@type
|
29
|
-
|
30
|
-
|
28
|
+
@type kafka
|
29
|
+
|
30
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
31
31
|
topics <listening topics(separate with comma',')>
|
32
32
|
format <input text type (text|json|ltsv|msgpack)>
|
33
33
|
message_key <key (Optional, for text format only, default is message)>
|
34
34
|
add_prefix <tag prefix (Optional)>
|
35
35
|
add_suffix <tag suffix (Optional)>
|
36
|
-
max_bytes (integer) :default => nil (Use default of Poseidon)
|
37
|
-
max_wait_ms (integer) :default => nil (Use default of Poseidon)
|
38
|
-
min_bytes (integer) :default => nil (Use default of Poseidon)
|
39
|
-
socket_timeout_ms (integer) :default => nil (Use default of Poseidon)
|
40
|
-
</source>
|
41
36
|
|
42
|
-
|
37
|
+
# Optionally, you can manage topic offset by using zookeeper
|
38
|
+
offset_zookeeper <zookeer node list (<zookeeper1_host>:<zookeeper1_port>,<zookeeper2_host>:<zookeeper2_port>,..)>
|
39
|
+
offset_zk_root_node <offset path in zookeeper> default => '/fluent-plugin-kafka'
|
43
40
|
|
44
|
-
-
|
45
|
-
|
46
|
-
|
47
|
-
|
41
|
+
# ruby-kafka consumer options
|
42
|
+
max_bytes (integer) :default => nil (Use default of ruby-kafka)
|
43
|
+
max_wait_time (integer) :default => nil (Use default of ruby-kafka)
|
44
|
+
min_bytes (integer) :default => nil (Use default of ruby-kafka)
|
45
|
+
</source>
|
48
46
|
|
49
47
|
Supports a start of processing from the assigned offset for specific topics.
|
50
48
|
|
51
49
|
<source>
|
52
|
-
@type
|
53
|
-
|
54
|
-
|
50
|
+
@type kafka
|
51
|
+
|
52
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
55
53
|
format <input text type (text|json|ltsv|msgpack)>
|
56
54
|
<topic>
|
57
|
-
topic
|
58
|
-
partition
|
59
|
-
offset
|
55
|
+
topic <listening topic>
|
56
|
+
partition <listening partition: default=0>
|
57
|
+
offset <listening start offset: default=-1>
|
60
58
|
</topic>
|
61
59
|
<topic>
|
62
|
-
topic
|
63
|
-
partition
|
64
|
-
offset
|
60
|
+
topic <listening topic>
|
61
|
+
partition <listening partition: default=0>
|
62
|
+
offset <listening start offset: default=-1>
|
65
63
|
</topic>
|
66
64
|
</source>
|
67
65
|
|
68
|
-
See also [
|
66
|
+
See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka.
|
69
67
|
|
70
68
|
### Input plugin (@type 'kafka_group', supports kafka group)
|
71
69
|
|
72
70
|
<source>
|
73
|
-
@type
|
74
|
-
|
75
|
-
|
71
|
+
@type kafka_group
|
72
|
+
|
73
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,..
|
76
74
|
consumer_group <consumer group name, must set>
|
77
75
|
topics <listening topics(separate with comma',')>
|
78
76
|
format <input text type (text|json|ltsv|msgpack)>
|
79
77
|
message_key <key (Optional, for text format only, default is message)>
|
80
78
|
add_prefix <tag prefix (Optional)>
|
81
79
|
add_suffix <tag suffix (Optional)>
|
82
|
-
max_bytes (integer) :default => nil (Use default of Poseidon)
|
83
|
-
max_wait_ms (integer) :default => nil (Use default of Poseidon)
|
84
|
-
min_bytes (integer) :default => nil (Use default of Poseidon)
|
85
|
-
socket_timeout_ms (integer) :default => nil (Use default of Poseidon)
|
86
|
-
</source>
|
87
80
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
81
|
+
# ruby-kafka consumer options
|
82
|
+
max_bytes (integer) :default => nil (Use default of ruby-kafka)
|
83
|
+
max_wait_time (integer) :default => nil (Use default of ruby-kafka)
|
84
|
+
min_bytes (integer) :default => nil (Use default of ruby-kafka)
|
85
|
+
offset_commit_interval (integer) :default => nil (Use default of ruby-kafka)
|
86
|
+
offset_commit_threshold (integer) :default => nil (Use default of ruby-kafka)
|
87
|
+
start_from_beginning (bool) :default => true
|
88
|
+
</source>
|
94
89
|
|
95
|
-
See also [
|
90
|
+
See also [ruby-kafka README](https://github.com/zendesk/ruby-kafka#consuming-messages-from-kafka) for more detailed documentation about ruby-kafka options.
|
96
91
|
|
97
92
|
### Output plugin (non-buffered)
|
98
93
|
|
99
|
-
This plugin uses
|
94
|
+
This plugin uses ruby-kafka producer for writing data. For performance and reliability concerns, use `kafka_bufferd` output instead.
|
100
95
|
|
101
96
|
<match *.**>
|
102
|
-
@type
|
97
|
+
@type kafka
|
103
98
|
|
104
99
|
# Brokers: you can choose either brokers or zookeeper.
|
105
|
-
brokers
|
106
|
-
zookeeper
|
107
|
-
zookeeper_path
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
100
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
|
101
|
+
zookeeper <zookeeper_host>:<zookeeper_port> # Set brokers via Zookeeper
|
102
|
+
zookeeper_path <broker path in zookeeper> :default => /brokers/ids # Set path in zookeeper for kafka
|
103
|
+
|
104
|
+
default_topic (string) :default => nil
|
105
|
+
default_partition_key (string) :default => nil
|
106
|
+
output_data_type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
107
|
+
output_include_tag (bool) :default => false
|
108
|
+
output_include_time (bool) :default => false
|
109
|
+
|
110
|
+
# ruby-kafka producer options
|
111
|
+
max_send_retries (integer) :default => 1
|
112
|
+
required_acks (integer) :default => 0
|
113
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
114
|
+
compression_codec (gzip|snappy) :default => nil
|
117
115
|
</match>
|
118
116
|
|
119
|
-
Supports following
|
117
|
+
Supports following ruby-kafka::Producer options.
|
120
118
|
|
121
|
-
- max_send_retries
|
122
|
-
- required_acks
|
123
|
-
-
|
124
|
-
- compression_codec - default:
|
119
|
+
- max_send_retries - default: 1 - Number of times to retry sending of messages to a leader.
|
120
|
+
- required_acks - default: 0 - The number of acks required per request.
|
121
|
+
- ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
|
122
|
+
- compression_codec - default: nil - The codec the producer uses to compress messages.
|
125
123
|
|
126
|
-
See also [
|
124
|
+
See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about ruby-kafka.
|
127
125
|
|
128
126
|
This plugin supports compression codec "snappy" also.
|
129
127
|
Install snappy module before you use snappy compression.
|
@@ -148,33 +146,37 @@ If key name `partition_key` exists in a message, this plugin set its value of pa
|
|
148
146
|
This plugin uses ruby-kafka producer for writing data. This plugin works with recent kafka versions.
|
149
147
|
|
150
148
|
<match *.**>
|
151
|
-
@type
|
149
|
+
@type kafka_buffered
|
152
150
|
|
153
151
|
# Brokers: you can choose either brokers or zookeeper.
|
154
152
|
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>,.. # Set brokers directly
|
155
153
|
zookeeper <zookeeper_host>:<zookeeper_port> # Set brokers via Zookeeper
|
156
154
|
zookeeper_path <broker path in zookeeper> :default => /brokers/ids # Set path in zookeeper for kafka
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
155
|
+
|
156
|
+
default_topic (string) :default => nil
|
157
|
+
default_partition_key (string) :default => nil
|
158
|
+
output_data_type (json|ltsv|msgpack|attr:<record name>|<formatter name>) :default => json
|
159
|
+
output_include_tag (bool) :default => false
|
160
|
+
output_include_time (bool) :default => false
|
161
|
+
|
162
|
+
# See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
|
163
|
+
|
164
|
+
# ruby-kafka producer options
|
165
|
+
max_send_retries (integer) :default => 1
|
166
|
+
required_acks (integer) :default => 0
|
167
|
+
ack_timeout (integer) :default => nil (Use default of ruby-kafka)
|
168
|
+
compression_codec (gzip|snappy) :default => nil (No compression)
|
168
169
|
</match>
|
169
170
|
|
170
171
|
Supports following ruby-kafka's producer options.
|
171
172
|
|
172
|
-
- max_send_retries
|
173
|
-
- required_acks
|
174
|
-
- ack_timeout
|
173
|
+
- max_send_retries - default: 1 - Number of times to retry sending of messages to a leader.
|
174
|
+
- required_acks - default: 0 - The number of acks required per request.
|
175
|
+
- ack_timeout - default: nil - How long the producer waits for acks. The unit is seconds.
|
175
176
|
- compression_codec - default: nil - The codec the producer uses to compress messages.
|
176
177
|
|
177
|
-
See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about
|
178
|
+
See also [Kafka::Client](http://www.rubydoc.info/gems/ruby-kafka/Kafka/Client) for more detailed documentation about ruby-kafka.
|
179
|
+
|
178
180
|
|
179
181
|
This plugin supports compression codec "snappy" also.
|
180
182
|
Install snappy module before you use snappy compression.
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -1,18 +1,18 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
2
|
|
3
3
|
Gem::Specification.new do |gem|
|
4
|
-
gem.authors = ["Hidemasa Togashi"]
|
5
|
-
gem.email = ["togachiro@gmail.com"]
|
4
|
+
gem.authors = ["Hidemasa Togashi", "Masahiro Nakagawa"]
|
5
|
+
gem.email = ["togachiro@gmail.com", "repeatedly@gmail.com"]
|
6
6
|
gem.description = %q{Fluentd plugin for Apache Kafka > 0.8}
|
7
7
|
gem.summary = %q{Fluentd plugin for Apache Kafka > 0.8}
|
8
|
-
gem.homepage = "https://github.com/
|
8
|
+
gem.homepage = "https://github.com/fluent/fluent-plugin-kafka"
|
9
9
|
|
10
10
|
gem.files = `git ls-files`.split($\)
|
11
11
|
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version
|
15
|
+
gem.version = '0.3.0'
|
16
16
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
17
17
|
gem.add_dependency 'ltsv'
|
18
18
|
gem.add_dependency 'zookeeper'
|
@@ -1,10 +1,8 @@
|
|
1
1
|
require 'fluent/input'
|
2
2
|
require 'fluent/plugin/kafka_plugin_util'
|
3
3
|
|
4
|
-
|
5
|
-
|
6
|
-
class KafkaInput < Input
|
7
|
-
Plugin.register_input('kafka', self)
|
4
|
+
class Fluent::KafkaInput < Fluent::Input
|
5
|
+
Fluent::Plugin.register_input('kafka', self)
|
8
6
|
|
9
7
|
config_param :format, :string, :default => 'json',
|
10
8
|
:desc => "Supported format: (json|text|ltsv|msgpack)"
|
@@ -41,10 +39,8 @@ class KafkaInput < Input
|
|
41
39
|
:desc => "How long to block until the server sends us data."
|
42
40
|
config_param :min_bytes, :integer, :default => nil,
|
43
41
|
:desc => "Smallest amount of data the server should send us."
|
44
|
-
config_param :socket_timeout_ms, :integer, :default => nil,
|
45
|
-
:desc => "How long to wait for reply from server. Should be higher than max_wait_ms."
|
46
42
|
|
47
|
-
include KafkaPluginUtil::SSLSettings
|
43
|
+
include Fluent::KafkaPluginUtil::SSLSettings
|
48
44
|
|
49
45
|
unless method_defined?(:router)
|
50
46
|
define_method("router") { Fluent::Engine }
|
@@ -66,7 +62,7 @@ class KafkaInput < Input
|
|
66
62
|
else
|
67
63
|
conf.elements.select { |element| element.name == 'topic' }.each do |element|
|
68
64
|
unless element.has_key?('topic')
|
69
|
-
raise ConfigError, "kafka: 'topic' is a require parameter in 'topic element'."
|
65
|
+
raise Fluent::ConfigError, "kafka: 'topic' is a require parameter in 'topic element'."
|
70
66
|
end
|
71
67
|
partition = element.has_key?('partition') ? element['partition'].to_i : 0
|
72
68
|
offset = element.has_key?('offset') ? element['offset'].to_i : -1
|
@@ -75,7 +71,7 @@ class KafkaInput < Input
|
|
75
71
|
end
|
76
72
|
|
77
73
|
if @topic_list.empty?
|
78
|
-
raise ConfigError, "kafka: 'topics' or 'topic element' is a require parameter"
|
74
|
+
raise Fluent::ConfigError, "kafka: 'topics' or 'topic element' is a require parameter"
|
79
75
|
end
|
80
76
|
|
81
77
|
# For backward compatibility
|
@@ -228,14 +224,14 @@ class KafkaInput < Input
|
|
228
224
|
|
229
225
|
return if messages.size.zero?
|
230
226
|
|
231
|
-
es = MultiEventStream.new
|
227
|
+
es = Fluent::MultiEventStream.new
|
232
228
|
tag = @topic_entry.topic
|
233
229
|
tag = @add_prefix + "." + tag if @add_prefix
|
234
230
|
tag = tag + "." + @add_suffix if @add_suffix
|
235
231
|
|
236
232
|
messages.each { |msg|
|
237
233
|
begin
|
238
|
-
es.add(Engine.now, @parser.call(msg, @topic_entry))
|
234
|
+
es.add(Fluent::Engine.now, @parser.call(msg, @topic_entry))
|
239
235
|
rescue => e
|
240
236
|
$log.warn "parser error in #{@topic_entry.topic}/#{@topic_entry.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
|
241
237
|
$log.debug_backtrace
|
@@ -289,5 +285,3 @@ class KafkaInput < Input
|
|
289
285
|
end
|
290
286
|
end
|
291
287
|
end
|
292
|
-
|
293
|
-
end
|
@@ -1,10 +1,8 @@
|
|
1
1
|
require 'fluent/input'
|
2
2
|
require 'fluent/plugin/kafka_plugin_util'
|
3
3
|
|
4
|
-
|
5
|
-
|
6
|
-
class KafkaGroupInput < Input
|
7
|
-
Plugin.register_input('kafka_group', self)
|
4
|
+
class Fluent::KafkaGroupInput < Fluent::Input
|
5
|
+
Fluent::Plugin.register_input('kafka_group', self)
|
8
6
|
|
9
7
|
config_param :brokers, :string, :default => 'localhost:9092',
|
10
8
|
:desc => "List of broker-host:port, separate with comma, must set."
|
@@ -35,7 +33,7 @@ class KafkaGroupInput < Input
|
|
35
33
|
config_param :start_from_beginning, :bool, :default => true,
|
36
34
|
:desc => "Whether to start from the beginning of the topic or just subscribe to new messages being produced"
|
37
35
|
|
38
|
-
include KafkaPluginUtil::SSLSettings
|
36
|
+
include Fluent::KafkaPluginUtil::SSLSettings
|
39
37
|
|
40
38
|
unless method_defined?(:router)
|
41
39
|
define_method("router") { Fluent::Engine }
|
@@ -49,7 +47,7 @@ class KafkaGroupInput < Input
|
|
49
47
|
def _config_to_array(config)
|
50
48
|
config_array = config.split(',').map {|k| k.strip }
|
51
49
|
if config_array.empty?
|
52
|
-
raise ConfigError, "kafka_group: '#{config}' is a required parameter"
|
50
|
+
raise Fluent::ConfigError, "kafka_group: '#{config}' is a required parameter"
|
53
51
|
end
|
54
52
|
config_array
|
55
53
|
end
|
@@ -120,14 +118,14 @@ class KafkaGroupInput < Input
|
|
120
118
|
|
121
119
|
def run
|
122
120
|
@consumer.each_batch(@fetch_opts) { |batch|
|
123
|
-
es = MultiEventStream.new
|
121
|
+
es = Fluent::MultiEventStream.new
|
124
122
|
tag = batch.topic
|
125
123
|
tag = @add_prefix + "." + tag if @add_prefix
|
126
124
|
tag = tag + "." + @add_suffix if @add_suffix
|
127
125
|
|
128
126
|
batch.messages.each { |msg|
|
129
127
|
begin
|
130
|
-
es.add(Engine.now, @parser_proc.call(msg))
|
128
|
+
es.add(Fluent::Engine.now, @parser_proc.call(msg))
|
131
129
|
rescue => e
|
132
130
|
$log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset
|
133
131
|
$log.debug_backtrace
|
@@ -143,5 +141,3 @@ class KafkaGroupInput < Input
|
|
143
141
|
$log.error_backtrace
|
144
142
|
end
|
145
143
|
end
|
146
|
-
|
147
|
-
end
|
@@ -3,6 +3,7 @@ module Fluent
|
|
3
3
|
module SSLSettings
|
4
4
|
def self.included(klass)
|
5
5
|
klass.instance_eval {
|
6
|
+
# https://github.com/zendesk/ruby-kafka#encryption-and-authentication-using-ssl
|
6
7
|
config_param :ssl_ca_cert, :string, :default => nil,
|
7
8
|
:desc => "a PEM encoded CA cert to use with and SSL connection."
|
8
9
|
config_param :ssl_client_cert, :string, :default => nil,
|
@@ -1,14 +1,9 @@
|
|
1
|
+
require 'fluent/output'
|
2
|
+
require 'fluent/plugin/kafka_plugin_util'
|
3
|
+
|
1
4
|
class Fluent::KafkaOutput < Fluent::Output
|
2
5
|
Fluent::Plugin.register_output('kafka', self)
|
3
6
|
|
4
|
-
def initialize
|
5
|
-
super
|
6
|
-
|
7
|
-
require 'kafka'
|
8
|
-
|
9
|
-
@kafka = nil
|
10
|
-
end
|
11
|
-
|
12
7
|
config_param :brokers, :string, :default => 'localhost:9092',
|
13
8
|
:desc => <<-DESC
|
14
9
|
Set brokers directly
|
@@ -28,15 +23,7 @@ DESC
|
|
28
23
|
config_param :output_include_tag, :bool, :default => false
|
29
24
|
config_param :output_include_time, :bool, :default => false
|
30
25
|
|
31
|
-
#
|
32
|
-
config_param :ssl_ca_cert, :string, :default => nil,
|
33
|
-
:desc => "a PEM encoded CA cert to use with and SSL connection."
|
34
|
-
config_param :ssl_client_cert, :string, :default => nil,
|
35
|
-
:desc => "a PEM encoded client cert to use with and SSL connection. Must be used in combination with ssl_client_cert_key."
|
36
|
-
config_param :ssl_client_cert_key, :string, :default => nil,
|
37
|
-
:desc => "a PEM encoded client cert key to use with and SSL connection. Must be used in combination with ssl_client_cert."
|
38
|
-
|
39
|
-
# poseidon producer options
|
26
|
+
# ruby-kafka producer options
|
40
27
|
config_param :max_send_retries, :integer, :default => 1,
|
41
28
|
:desc => "Number of times to retry sending of messages to a leader."
|
42
29
|
config_param :required_acks, :integer, :default => 0,
|
@@ -48,6 +35,8 @@ DESC
|
|
48
35
|
|
49
36
|
config_param :time_format, :string, :default => nil
|
50
37
|
|
38
|
+
include Fluent::KafkaPluginUtil::SSLSettings
|
39
|
+
|
51
40
|
attr_accessor :output_data_type
|
52
41
|
attr_accessor :field_separator
|
53
42
|
|
@@ -55,6 +44,14 @@ DESC
|
|
55
44
|
define_method("log") { $log }
|
56
45
|
end
|
57
46
|
|
47
|
+
def initialize
|
48
|
+
super
|
49
|
+
|
50
|
+
require 'kafka'
|
51
|
+
|
52
|
+
@kafka = nil
|
53
|
+
end
|
54
|
+
|
58
55
|
def refresh_client
|
59
56
|
if @zookeeper
|
60
57
|
@seed_brokers = []
|
@@ -79,11 +76,6 @@ DESC
|
|
79
76
|
end
|
80
77
|
end
|
81
78
|
|
82
|
-
def read_ssl_file(path)
|
83
|
-
return nil if path.nil?
|
84
|
-
File.read(path)
|
85
|
-
end
|
86
|
-
|
87
79
|
def configure(conf)
|
88
80
|
super
|
89
81
|
|
@@ -1,20 +1,10 @@
|
|
1
|
-
# encode: utf-8
|
2
1
|
require 'thread'
|
2
|
+
require 'fluent/output'
|
3
|
+
require 'fluent/plugin/kafka_plugin_util'
|
3
4
|
|
4
5
|
class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
5
6
|
Fluent::Plugin.register_output('kafka_buffered', self)
|
6
7
|
|
7
|
-
def initialize
|
8
|
-
super
|
9
|
-
|
10
|
-
require 'kafka'
|
11
|
-
require 'fluent/plugin/kafka_producer_ext'
|
12
|
-
|
13
|
-
@kafka = nil
|
14
|
-
@producers = {}
|
15
|
-
@producers_mutex = Mutex.new
|
16
|
-
end
|
17
|
-
|
18
8
|
config_param :brokers, :string, :default => 'localhost:9092',
|
19
9
|
:desc => <<-DESC
|
20
10
|
Set brokers directly:
|
@@ -40,15 +30,7 @@ DESC
|
|
40
30
|
config_param :output_include_time, :bool, :default => false
|
41
31
|
config_param :kafka_agg_max_bytes, :size, :default => 4*1024 #4k
|
42
32
|
|
43
|
-
#
|
44
|
-
config_param :ssl_ca_cert, :string, :default => nil,
|
45
|
-
:desc => "a PEM encoded CA cert to use with and SSL connection."
|
46
|
-
config_param :ssl_client_cert, :string, :default => nil,
|
47
|
-
:desc => "a PEM encoded client cert to use with and SSL connection. Must be used in combination with ssl_client_cert_key."
|
48
|
-
config_param :ssl_client_cert_key, :string, :default => nil,
|
49
|
-
:desc => "a PEM encoded client cert key to use with and SSL connection. Must be used in combination with ssl_client_cert."
|
50
|
-
|
51
|
-
# poseidon producer options
|
33
|
+
# ruby-kafka producer options
|
52
34
|
config_param :max_send_retries, :integer, :default => 1,
|
53
35
|
:desc => "Number of times to retry sending of messages to a leader."
|
54
36
|
config_param :required_acks, :integer, :default => 0,
|
@@ -63,6 +45,8 @@ DESC
|
|
63
45
|
|
64
46
|
config_param :time_format, :string, :default => nil
|
65
47
|
|
48
|
+
include Fluent::KafkaPluginUtil::SSLSettings
|
49
|
+
|
66
50
|
attr_accessor :output_data_type
|
67
51
|
attr_accessor :field_separator
|
68
52
|
|
@@ -70,6 +54,17 @@ DESC
|
|
70
54
|
define_method("log") { $log }
|
71
55
|
end
|
72
56
|
|
57
|
+
def initialize
|
58
|
+
super
|
59
|
+
|
60
|
+
require 'kafka'
|
61
|
+
require 'fluent/plugin/kafka_producer_ext'
|
62
|
+
|
63
|
+
@kafka = nil
|
64
|
+
@producers = {}
|
65
|
+
@producers_mutex = Mutex.new
|
66
|
+
end
|
67
|
+
|
73
68
|
def refresh_client(raise_error = true)
|
74
69
|
if @zookeeper
|
75
70
|
@seed_brokers = []
|
@@ -98,11 +93,6 @@ DESC
|
|
98
93
|
end
|
99
94
|
end
|
100
95
|
|
101
|
-
def read_ssl_file(path)
|
102
|
-
return nil if path.nil?
|
103
|
-
File.read(path)
|
104
|
-
end
|
105
|
-
|
106
96
|
def configure(conf)
|
107
97
|
super
|
108
98
|
|
@@ -210,15 +200,15 @@ DESC
|
|
210
200
|
chunk.msgpack_each { |time, record|
|
211
201
|
if @output_include_time
|
212
202
|
if @time_format
|
213
|
-
record['time'] = Time.at(time).strftime(@time_format)
|
203
|
+
record['time'.freeze] = Time.at(time).strftime(@time_format)
|
214
204
|
else
|
215
|
-
record['time'] = time
|
205
|
+
record['time'.freeze] = time
|
216
206
|
end
|
217
207
|
end
|
218
208
|
|
219
209
|
record['tag'] = tag if @output_include_tag
|
220
|
-
topic = record['topic'] || def_topic
|
221
|
-
partition_key = record['partition_key'] || @default_partition_key
|
210
|
+
topic = record['topic'.freeze] || def_topic
|
211
|
+
partition_key = record['partition_key'.freeze] || @default_partition_key
|
222
212
|
|
223
213
|
records_by_topic[topic] ||= 0
|
224
214
|
bytes_by_topic[topic] ||= 0
|
@@ -240,10 +230,10 @@ DESC
|
|
240
230
|
bytes_by_topic[topic] += record_buf_bytes
|
241
231
|
}
|
242
232
|
if messages > 0
|
243
|
-
log.trace
|
233
|
+
log.trace { "#{messages} messages send." }
|
244
234
|
producer.deliver_messages
|
245
235
|
end
|
246
|
-
log.debug "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})"
|
236
|
+
log.debug { "(records|bytes) (#{records_by_topic}|#{bytes_by_topic})" }
|
247
237
|
end
|
248
238
|
rescue Exception => e
|
249
239
|
log.warn "Send exception occurred: #{e}"
|
metadata
CHANGED
@@ -1,14 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.0
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
|
+
- Masahiro Nakagawa
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain: []
|
11
|
-
date: 2016-08-
|
12
|
+
date: 2016-08-24 00:00:00.000000000 Z
|
12
13
|
dependencies:
|
13
14
|
- !ruby/object:Gem::Dependency
|
14
15
|
name: fluentd
|
@@ -103,6 +104,7 @@ dependencies:
|
|
103
104
|
description: Fluentd plugin for Apache Kafka > 0.8
|
104
105
|
email:
|
105
106
|
- togachiro@gmail.com
|
107
|
+
- repeatedly@gmail.com
|
106
108
|
executables: []
|
107
109
|
extensions: []
|
108
110
|
extra_rdoc_files: []
|
@@ -122,7 +124,7 @@ files:
|
|
122
124
|
- lib/fluent/plugin/out_kafka_buffered.rb
|
123
125
|
- test/helper.rb
|
124
126
|
- test/plugin/test_out_kafka.rb
|
125
|
-
homepage: https://github.com/
|
127
|
+
homepage: https://github.com/fluent/fluent-plugin-kafka
|
126
128
|
licenses: []
|
127
129
|
metadata: {}
|
128
130
|
post_install_message:
|
@@ -136,9 +138,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
136
138
|
version: '0'
|
137
139
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
138
140
|
requirements:
|
139
|
-
- - "
|
141
|
+
- - ">="
|
140
142
|
- !ruby/object:Gem::Version
|
141
|
-
version:
|
143
|
+
version: '0'
|
142
144
|
requirements: []
|
143
145
|
rubyforge_project:
|
144
146
|
rubygems_version: 2.5.1
|