fluent-plugin-kafka 0.0.17 → 0.0.18
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/in_kafka.rb +72 -12
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 35a323b016500c1df85bc9631d27aceefe63f633
|
4
|
+
data.tar.gz: c7d13cda1a73ad277400c8aa4ff58e27fdf31a49
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6524c04bcd2fb424fe0332be575116bf1106eb517f7a25297dd7430da0a46f2901a5417b84c9244a56f626860c02a6ce05ec74303946c5c5b9d5584575bc3c7d
|
7
|
+
data.tar.gz: 0aba3a07b0a558c8d172a861634f7aae517b1827b069041198e78b27bc2988be045b85784d3d9a99f8e9bea8449ec1ab8e1c0fc11f7e05cc6916cce31c3fa2db
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.0.
|
15
|
+
gem.version = '0.0.18'
|
16
16
|
gem.add_dependency 'fluentd'
|
17
17
|
gem.add_dependency 'poseidon'
|
18
18
|
gem.add_dependency 'ltsv'
|
@@ -16,6 +16,9 @@ class KafkaInput < Input
|
|
16
16
|
config_param :add_suffix, :string, :default => nil
|
17
17
|
config_param :add_offset_in_record, :bool, :default => false
|
18
18
|
|
19
|
+
config_param :offset_zookeeper, :string, :default => nil
|
20
|
+
config_param :offset_zk_root_node, :string, :default => '/fluent-plugin-kafka'
|
21
|
+
|
19
22
|
# poseidon PartitionConsumer options
|
20
23
|
config_param :max_bytes, :integer, :default => nil
|
21
24
|
config_param :max_wait_ms, :integer, :default => nil
|
@@ -25,6 +28,7 @@ class KafkaInput < Input
|
|
25
28
|
def initialize
|
26
29
|
super
|
27
30
|
require 'poseidon'
|
31
|
+
require 'zookeeper'
|
28
32
|
end
|
29
33
|
|
30
34
|
def configure(conf)
|
@@ -68,7 +72,10 @@ class KafkaInput < Input
|
|
68
72
|
opt[:min_bytes] = @min_bytes if @min_bytes
|
69
73
|
opt[:socket_timeout_ms] = @socket_timeout_ms if @socket_timeout_ms
|
70
74
|
|
75
|
+
@zookeeper = Zookeeper.new(@offset_zookeeper) if @offset_zookeeper
|
76
|
+
|
71
77
|
@topic_watchers = @topic_list.map {|topic_entry|
|
78
|
+
offset_manager = OffsetManager.new(topic_entry, @zookeeper, @offset_zk_root_node) if @offset_zookeeper
|
72
79
|
TopicWatcher.new(
|
73
80
|
topic_entry,
|
74
81
|
@host,
|
@@ -80,6 +87,7 @@ class KafkaInput < Input
|
|
80
87
|
@add_offset_in_record,
|
81
88
|
@add_prefix,
|
82
89
|
@add_suffix,
|
90
|
+
offset_manager,
|
83
91
|
opt)
|
84
92
|
}
|
85
93
|
@topic_watchers.each {|tw|
|
@@ -90,6 +98,7 @@ class KafkaInput < Input
|
|
90
98
|
|
91
99
|
def shutdown
|
92
100
|
@loop.stop
|
101
|
+
@zookeeper.close! if @zookeeper
|
93
102
|
end
|
94
103
|
|
95
104
|
def run
|
@@ -100,24 +109,26 @@ class KafkaInput < Input
|
|
100
109
|
end
|
101
110
|
|
102
111
|
class TopicWatcher < Coolio::TimerWatcher
|
103
|
-
def initialize(topic_entry, host, port, client_id, interval, format, message_key, add_offset_in_record, add_prefix, add_suffix, options={})
|
112
|
+
def initialize(topic_entry, host, port, client_id, interval, format, message_key, add_offset_in_record, add_prefix, add_suffix, offset_manager, options={})
|
104
113
|
@topic_entry = topic_entry
|
105
|
-
@
|
114
|
+
@host = host
|
115
|
+
@port = port
|
116
|
+
@client_id = client_id
|
106
117
|
@callback = method(:consume)
|
107
118
|
@format = format
|
108
119
|
@message_key = message_key
|
120
|
+
@add_offset_in_record = add_offset_in_record
|
109
121
|
@add_prefix = add_prefix
|
110
122
|
@add_suffix = add_suffix
|
111
|
-
@
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
123
|
+
@options = options
|
124
|
+
@offset_manager = offset_manager
|
125
|
+
|
126
|
+
@next_offset = @topic_entry.offset
|
127
|
+
if @topic_entry.offset == -1 && offset_manager
|
128
|
+
@next_offset = offset_manager.next_offset
|
129
|
+
end
|
130
|
+
@consumer = create_consumer(@next_offset)
|
131
|
+
|
121
132
|
super(interval, true)
|
122
133
|
end
|
123
134
|
|
@@ -134,6 +145,11 @@ class KafkaInput < Input
|
|
134
145
|
tag = @topic_entry.topic
|
135
146
|
tag = @add_prefix + "." + tag if @add_prefix
|
136
147
|
tag = tag + "." + @add_suffix if @add_suffix
|
148
|
+
|
149
|
+
if @offset_manager && @consumer.next_offset != @next_offset
|
150
|
+
@consumer = create_consumer(@next_offset)
|
151
|
+
end
|
152
|
+
|
137
153
|
@consumer.fetch.each { |msg|
|
138
154
|
begin
|
139
155
|
msg_record = parse_line(msg.value)
|
@@ -147,9 +163,28 @@ class KafkaInput < Input
|
|
147
163
|
|
148
164
|
unless es.empty?
|
149
165
|
Engine.emit_stream(tag, es)
|
166
|
+
|
167
|
+
if @offset_manager
|
168
|
+
next_offset = @consumer.next_offset
|
169
|
+
@offset_manager.save_offset(next_offset)
|
170
|
+
@next_offset = next_offset
|
171
|
+
end
|
150
172
|
end
|
151
173
|
end
|
152
174
|
|
175
|
+
def create_consumer(offset)
|
176
|
+
@consumer.close if @consumer
|
177
|
+
Poseidon::PartitionConsumer.new(
|
178
|
+
@client_id, # client_id
|
179
|
+
@host, # host
|
180
|
+
@port, # port
|
181
|
+
@topic_entry.topic, # topic
|
182
|
+
@topic_entry.partition, # partition
|
183
|
+
offset, # offset
|
184
|
+
@options # options
|
185
|
+
)
|
186
|
+
end
|
187
|
+
|
153
188
|
def parse_line(record)
|
154
189
|
parsed_record = {}
|
155
190
|
case @format
|
@@ -197,6 +232,31 @@ class KafkaInput < Input
|
|
197
232
|
attr_reader :topic, :partition, :offset
|
198
233
|
end
|
199
234
|
|
235
|
+
class OffsetManager
|
236
|
+
def initialize(topic_entry, zookeeper, zk_root_node)
|
237
|
+
@zookeeper = zookeeper
|
238
|
+
@zk_path = "#{zk_root_node}/#{topic_entry.topic}/#{topic_entry.partition}/next_offset"
|
239
|
+
create_node(@zk_path, topic_entry.topic, topic_entry.partition)
|
240
|
+
end
|
241
|
+
|
242
|
+
def create_node(zk_path, topic, partition)
|
243
|
+
path = ""
|
244
|
+
zk_path.split(/(\/[^\/]+)/).reject(&:empty?).each { |dir|
|
245
|
+
path = path + dir
|
246
|
+
@zookeeper.create(:path => "#{path}")
|
247
|
+
}
|
248
|
+
$log.trace "use zk offset node : #{path}"
|
249
|
+
end
|
250
|
+
|
251
|
+
def next_offset
|
252
|
+
@zookeeper.get(:path => @zk_path)[:data].to_i
|
253
|
+
end
|
254
|
+
|
255
|
+
def save_offset(offset)
|
256
|
+
@zookeeper.set(:path => @zk_path, :data => offset.to_s)
|
257
|
+
$log.trace "update zk offset node : #{offset.to_s}"
|
258
|
+
end
|
259
|
+
end
|
200
260
|
end
|
201
261
|
|
202
262
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.18
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-10-29 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|