fluent-plugin-kafka 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/fluent-plugin-kafka.gemspec +1 -1
- data/lib/fluent/plugin/in_kafka.rb +13 -9
- data/lib/fluent/plugin/in_kafka_group.rb +14 -11
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d78b37f9ea216d97ac377d5a67e1be3e33e17daf
|
4
|
+
data.tar.gz: cb214827bea96f3e38c5e1c45de1aa9c2acdd47d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a3f1e852d3f66feb00446175c9ced7a89a85d6af850d84fa8dc2cd4a67a557616af5b91734074f1fb399704fa5a9a8b9413f93fd7d92ec39e940bcb3d8fe4972
|
7
|
+
data.tar.gz: b8218fb071b44874c7a941504ee048d76064a5c8a913347d171903dd559c60d12e81fc87d6916ed0f8dde5bc80887bc3fe29612c10d092bc8f59071c4b933e46
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.1.
|
15
|
+
gem.version = '0.1.1'
|
16
16
|
gem.add_dependency 'fluentd'
|
17
17
|
gem.add_dependency 'poseidon_cluster'
|
18
18
|
gem.add_dependency 'ltsv'
|
@@ -25,6 +25,10 @@ class KafkaInput < Input
|
|
25
25
|
config_param :min_bytes, :integer, :default => nil
|
26
26
|
config_param :socket_timeout_ms, :integer, :default => nil
|
27
27
|
|
28
|
+
unless method_defined?(:router)
|
29
|
+
define_method("router") { Fluent::Engine }
|
30
|
+
end
|
31
|
+
|
28
32
|
def initialize
|
29
33
|
super
|
30
34
|
require 'poseidon'
|
@@ -88,6 +92,7 @@ class KafkaInput < Input
|
|
88
92
|
@add_prefix,
|
89
93
|
@add_suffix,
|
90
94
|
offset_manager,
|
95
|
+
router,
|
91
96
|
opt)
|
92
97
|
}
|
93
98
|
@topic_watchers.each {|tw|
|
@@ -109,7 +114,7 @@ class KafkaInput < Input
|
|
109
114
|
end
|
110
115
|
|
111
116
|
class TopicWatcher < Coolio::TimerWatcher
|
112
|
-
def initialize(topic_entry, host, port, client_id, interval, format, message_key, add_offset_in_record, add_prefix, add_suffix, offset_manager, options={})
|
117
|
+
def initialize(topic_entry, host, port, client_id, interval, format, message_key, add_offset_in_record, add_prefix, add_suffix, offset_manager, router, options={})
|
113
118
|
@topic_entry = topic_entry
|
114
119
|
@host = host
|
115
120
|
@port = port
|
@@ -122,6 +127,7 @@ class KafkaInput < Input
|
|
122
127
|
@add_suffix = add_suffix
|
123
128
|
@options = options
|
124
129
|
@offset_manager = offset_manager
|
130
|
+
@router = router
|
125
131
|
|
126
132
|
@next_offset = @topic_entry.offset
|
127
133
|
if @topic_entry.offset == -1 && offset_manager
|
@@ -154,7 +160,7 @@ class KafkaInput < Input
|
|
154
160
|
begin
|
155
161
|
msg_record = parse_line(msg.value)
|
156
162
|
msg_record = decorate_offset(msg_record, msg.offset) if @add_offset_in_record
|
157
|
-
es.add(
|
163
|
+
es.add(Engine.now, msg_record)
|
158
164
|
rescue
|
159
165
|
$log.warn msg_record.to_s, :error=>$!.to_s
|
160
166
|
$log.debug_backtrace
|
@@ -162,7 +168,7 @@ class KafkaInput < Input
|
|
162
168
|
}
|
163
169
|
|
164
170
|
unless es.empty?
|
165
|
-
|
171
|
+
@router.emit_stream(tag, es)
|
166
172
|
|
167
173
|
if @offset_manager
|
168
174
|
next_offset = @consumer.next_offset
|
@@ -186,18 +192,16 @@ class KafkaInput < Input
|
|
186
192
|
end
|
187
193
|
|
188
194
|
def parse_line(record)
|
189
|
-
parsed_record = {}
|
190
195
|
case @format
|
191
196
|
when 'json'
|
192
|
-
|
197
|
+
Yajl::Parser.parse(record)
|
193
198
|
when 'ltsv'
|
194
|
-
|
199
|
+
LTSV.parse(record)
|
195
200
|
when 'msgpack'
|
196
|
-
|
201
|
+
MessagePack.unpack(record)
|
197
202
|
when 'text'
|
198
|
-
|
203
|
+
{@message_key => record}
|
199
204
|
end
|
200
|
-
parsed_record
|
201
205
|
end
|
202
206
|
|
203
207
|
def decorate_offset(record, offset)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
module Fluent
|
2
2
|
|
3
|
-
class
|
3
|
+
class KafkaGroupInput < Input
|
4
4
|
Plugin.register_input('kafka_group', self)
|
5
5
|
|
6
6
|
config_param :brokers, :string
|
@@ -19,6 +19,10 @@ class KafkaInput < Input
|
|
19
19
|
config_param :min_bytes, :integer, :default => nil
|
20
20
|
config_param :socket_timeout_ms, :integer, :default => nil
|
21
21
|
|
22
|
+
unless method_defined?(:router)
|
23
|
+
define_method("router") { Fluent::Engine }
|
24
|
+
end
|
25
|
+
|
22
26
|
def initialize
|
23
27
|
super
|
24
28
|
require 'poseidon_cluster'
|
@@ -68,7 +72,7 @@ class KafkaInput < Input
|
|
68
72
|
@topic_watchers = @topic_list.map {|topic|
|
69
73
|
TopicWatcher.new(topic, @broker_list, @zookeeper_list, @consumer_group,
|
70
74
|
interval, @format, @message_key, @add_prefix,
|
71
|
-
@add_suffix, opt)
|
75
|
+
@add_suffix, router, opt)
|
72
76
|
}
|
73
77
|
@topic_watchers.each {|tw|
|
74
78
|
tw.attach(@loop)
|
@@ -90,13 +94,14 @@ class KafkaInput < Input
|
|
90
94
|
class TopicWatcher < Coolio::TimerWatcher
|
91
95
|
def initialize(topic, broker_list, zookeeper_list, consumer_group,
|
92
96
|
interval, format, message_key, add_prefix, add_suffix,
|
93
|
-
options)
|
97
|
+
router, options)
|
94
98
|
@topic = topic
|
95
99
|
@callback = method(:consume)
|
96
100
|
@format = format
|
97
101
|
@message_key = message_key
|
98
102
|
@add_prefix = add_prefix
|
99
103
|
@add_suffix = add_suffix
|
104
|
+
@router = router
|
100
105
|
|
101
106
|
@consumer = Poseidon::ConsumerGroup.new(
|
102
107
|
consumer_group,
|
@@ -127,7 +132,7 @@ class KafkaInput < Input
|
|
127
132
|
bulk.each do |msg|
|
128
133
|
begin
|
129
134
|
msg_record = parse_line(msg.value)
|
130
|
-
es.add(
|
135
|
+
es.add(Engine.now, msg_record)
|
131
136
|
rescue
|
132
137
|
$log.warn msg_record.to_s, :error=>$!.to_s
|
133
138
|
$log.debug_backtrace
|
@@ -136,23 +141,21 @@ class KafkaInput < Input
|
|
136
141
|
end
|
137
142
|
|
138
143
|
unless es.empty?
|
139
|
-
|
144
|
+
@router.emit_stream(tag, es)
|
140
145
|
end
|
141
146
|
end
|
142
147
|
|
143
148
|
def parse_line(record)
|
144
|
-
parsed_record = {}
|
145
149
|
case @format
|
146
150
|
when 'json'
|
147
|
-
|
151
|
+
Yajl::Parser.parse(record)
|
148
152
|
when 'ltsv'
|
149
|
-
|
153
|
+
LTSV.parse(record)
|
150
154
|
when 'msgpack'
|
151
|
-
|
155
|
+
MessagePack.unpack(record)
|
152
156
|
when 'text'
|
153
|
-
|
157
|
+
{@message_key => record}
|
154
158
|
end
|
155
|
-
parsed_record
|
156
159
|
end
|
157
160
|
end
|
158
161
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-12-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|