fluent-plugin-kafka 0.0.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +4 -0
- data/LICENSE +22 -0
- data/README.md +60 -0
- data/Rakefile +11 -0
- data/fluent-plugin-kafka.gemspec +20 -0
- data/lib/fluent/plugin/in_kafka.rb +117 -0
- data/lib/fluent/plugin/out_kafka.rb +83 -0
- data/lib/fluent/plugin/out_kafka_buffered.rb +97 -0
- data/test/helper.rb +27 -0
- data/test/plugin/test_out_kafka.rb +33 -0
- metadata +111 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: e7ea6df9ab262c3d1a76fd1bf8084dcacfed5e13
|
4
|
+
data.tar.gz: a4e75c5d1c30425d35a7cc57a802256dd28901fb
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 12b5a1579ffd62f45279983cbbbc3c13c4659a0c736b29b6f8e40c765b839ed3a0e6424be27e12917b674f6f62bff076c5879de3cfe133dc9b8b269f8aebc489
|
7
|
+
data.tar.gz: 963e4236af8e79c47eecfdfd0b58f73c6a9e68f3161b1c984d48691ae025c64d3d5e57bb18f8a20ce91430d11ad4007ed6893c7e20aa31efded4ad7d9dd24192
|
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2014 htgc
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,60 @@
|
|
1
|
+
# Fluent::Plugin::Kafka
|
2
|
+
|
3
|
+
TODO: Write a gem description
|
4
|
+
TODO: Also, I need to write tests
|
5
|
+
|
6
|
+
## Installation
|
7
|
+
|
8
|
+
Add this line to your application's Gemfile:
|
9
|
+
|
10
|
+
gem 'fluent-plugin-kafka'
|
11
|
+
|
12
|
+
And then execute:
|
13
|
+
|
14
|
+
$ bundle
|
15
|
+
|
16
|
+
Or install it yourself as:
|
17
|
+
|
18
|
+
$ gem install fluent-plugin-kafka
|
19
|
+
|
20
|
+
## Usage
|
21
|
+
|
22
|
+
### Input plugin
|
23
|
+
|
24
|
+
<source>
|
25
|
+
type kafka
|
26
|
+
host <broker host>
|
27
|
+
port <broker port: default=9092>
|
28
|
+
topics <listening topics(separate with comma',')>
|
29
|
+
format <input text type (text|json)>
|
30
|
+
add_prefix <tag prefix (Optional)>
|
31
|
+
add_suffix <tag suffix (Optional)>
|
32
|
+
</source>
|
33
|
+
|
34
|
+
### Output plugin (non-buffered)
|
35
|
+
|
36
|
+
<match *.**>
|
37
|
+
type kafka
|
38
|
+
brokers <broker1_host>:<broker1_ip>,<broker2_host>:<broker2_ip>,..
|
39
|
+
default_topic <output topic>
|
40
|
+
output_data_type (json|ltsv|attr:<record name>)
|
41
|
+
</match>
|
42
|
+
|
43
|
+
### Buffered output plugin
|
44
|
+
|
45
|
+
<match *.**>
|
46
|
+
type kafka_buffered
|
47
|
+
brokers <broker1_host>:<broker1_ip>,<broker2_host>:<broker2_ip>,..
|
48
|
+
default_topic <output topic>
|
49
|
+
flush_interval <flush interval (sec) :default => 60>
|
50
|
+
buffer_type (file|memory)
|
51
|
+
output_data_type (json|ltsv|attr:<record name>)
|
52
|
+
</match>
|
53
|
+
|
54
|
+
## Contributing
|
55
|
+
|
56
|
+
1. Fork it
|
57
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
58
|
+
3. Commit your changes (`git commit -am 'Added some feature'`)
|
59
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
60
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
Gem::Specification.new do |gem|
|
4
|
+
gem.authors = ["Hidemasa Togashi"]
|
5
|
+
gem.email = ["togachiro@gmail.com"]
|
6
|
+
gem.description = %q{Fluentd plugin for Apache Kafka > 0.8}
|
7
|
+
gem.summary = %q{Fluentd plugin for Apache Kafka > 0.8}
|
8
|
+
gem.homepage = "https://github.com/htgc/fluent-plugin-kafka"
|
9
|
+
|
10
|
+
gem.files = `git ls-files`.split($\)
|
11
|
+
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
12
|
+
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
|
+
gem.name = "fluent-plugin-kafka"
|
14
|
+
gem.require_paths = ["lib"]
|
15
|
+
gem.version = '0.0.5'
|
16
|
+
gem.add_dependency 'fluentd'
|
17
|
+
gem.add_dependency 'poseidon'
|
18
|
+
gem.add_dependency 'ltsv'
|
19
|
+
gem.add_dependency 'json'
|
20
|
+
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
module Fluent
|
2
|
+
|
3
|
+
class KafkaInput < Input
|
4
|
+
Plugin.register_input('kafka', self)
|
5
|
+
|
6
|
+
config_param :format, :string, :default => 'json' # (json|text)
|
7
|
+
config_param :host, :string, :default => 'localhost'
|
8
|
+
config_param :port, :integer, :default => 2181
|
9
|
+
config_param :interval, :integer, :default => 1 # seconds
|
10
|
+
config_param :topics, :string
|
11
|
+
config_param :client_id, :string, :default => 'kafka'
|
12
|
+
config_param :partition, :integer, :default => 0
|
13
|
+
config_param :offset, :integer, :default => -1
|
14
|
+
config_param :add_prefix, :string, :default => nil
|
15
|
+
config_param :add_suffix, :string, :default => nil
|
16
|
+
|
17
|
+
def initialize
|
18
|
+
super
|
19
|
+
require 'poseidon'
|
20
|
+
end
|
21
|
+
|
22
|
+
def configure(conf)
|
23
|
+
super
|
24
|
+
@topic_list = @topics.split(',').map {|topic| topic.strip }
|
25
|
+
if @topic_list.empty?
|
26
|
+
raise ConfigError, "kafka: 'topics' is a require parameter"
|
27
|
+
end
|
28
|
+
|
29
|
+
case @format
|
30
|
+
when 'json'
|
31
|
+
require 'json'
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def start
|
36
|
+
@loop = Coolio::Loop.new
|
37
|
+
@topic_watchers = @topic_list.map {|topic|
|
38
|
+
TopicWatcher.new(topic, @host, @port, @client_id, @partition, @offset, interval, @format, @add_prefix, @add_suffix)
|
39
|
+
}
|
40
|
+
@topic_watchers.each {|tw|
|
41
|
+
tw.attach(@loop)
|
42
|
+
}
|
43
|
+
@thread = Thread.new(&method(:run))
|
44
|
+
end
|
45
|
+
|
46
|
+
def shutdown
|
47
|
+
@loop.stop
|
48
|
+
end
|
49
|
+
|
50
|
+
def run
|
51
|
+
@loop.run
|
52
|
+
rescue
|
53
|
+
$log.error "unexpected error", :error=>$!.to_s
|
54
|
+
$log.error_backtrace
|
55
|
+
end
|
56
|
+
|
57
|
+
class TopicWatcher < Coolio::TimerWatcher
|
58
|
+
def initialize(topic, host, port, client_id, partition, offset, interval, format, add_prefix, add_suffix)
|
59
|
+
@topic = topic
|
60
|
+
@callback = method(:consume)
|
61
|
+
@format = format
|
62
|
+
@add_prefix = add_prefix
|
63
|
+
@add_suffix = add_suffix
|
64
|
+
@consumer = Poseidon::PartitionConsumer.new(
|
65
|
+
client_id, # client_id
|
66
|
+
host, # host
|
67
|
+
port, # port
|
68
|
+
topic, # topic
|
69
|
+
partition, # partition
|
70
|
+
offset # offset
|
71
|
+
)
|
72
|
+
|
73
|
+
super(interval, true)
|
74
|
+
end
|
75
|
+
|
76
|
+
def on_timer
|
77
|
+
@callback.call
|
78
|
+
rescue
|
79
|
+
# TODO log?
|
80
|
+
$log.error $!.to_s
|
81
|
+
$log.error_backtrace
|
82
|
+
end
|
83
|
+
|
84
|
+
def consume
|
85
|
+
es = MultiEventStream.new
|
86
|
+
tag = @topic
|
87
|
+
tag = @add_prefix + "." + tag if @add_prefix
|
88
|
+
tag = tag + "." + @add_suffix if @add_suffix
|
89
|
+
@consumer.fetch.each { |msg|
|
90
|
+
begin
|
91
|
+
msg_record = parse_line(msg.value)
|
92
|
+
es.add(Time.now.to_i, msg_record)
|
93
|
+
rescue
|
94
|
+
$log.warn msg_record.to_s, :error=>$!.to_s
|
95
|
+
$log.debug_backtrace
|
96
|
+
end
|
97
|
+
}
|
98
|
+
|
99
|
+
unless es.empty?
|
100
|
+
Engine.emit_stream(tag, es)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def parse_line(record)
|
105
|
+
parsed_record = {}
|
106
|
+
case @format
|
107
|
+
when 'json'
|
108
|
+
parsed_record = JSON.parse(record)
|
109
|
+
when 'text'
|
110
|
+
parsed_record = record
|
111
|
+
end
|
112
|
+
parsed_record
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
end
|
@@ -0,0 +1,83 @@
|
|
1
|
+
class Fluent::KafkaOutput < Fluent::Output
|
2
|
+
Fluent::Plugin.register_output('kafka', self)
|
3
|
+
|
4
|
+
def initialize
|
5
|
+
super
|
6
|
+
require 'poseidon'
|
7
|
+
end
|
8
|
+
|
9
|
+
config_param :brokers, :string, :default => 'localhost:9092'
|
10
|
+
config_param :default_topic, :string, :default => nil
|
11
|
+
config_param :default_partition, :integer, :default => 0
|
12
|
+
config_param :client_id, :string, :default => 'kafka'
|
13
|
+
config_param :output_data_type, :string, :default => 'json'
|
14
|
+
attr_accessor :output_data_type
|
15
|
+
attr_accessor :field_separator
|
16
|
+
|
17
|
+
def configure(conf)
|
18
|
+
super
|
19
|
+
@seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
|
20
|
+
@producers = {} # keyed by topic:partition
|
21
|
+
case @output_data_type
|
22
|
+
when 'json'
|
23
|
+
require 'json'
|
24
|
+
when 'ltsv'
|
25
|
+
require 'ltsv'
|
26
|
+
end
|
27
|
+
|
28
|
+
@f_separator = case @field_separator
|
29
|
+
when /SPACE/i then ' '
|
30
|
+
when /COMMA/i then ','
|
31
|
+
when /SOH/i then "\x01"
|
32
|
+
else "\t"
|
33
|
+
end
|
34
|
+
|
35
|
+
@custom_attributes = if @output_data_type == 'json'
|
36
|
+
nil
|
37
|
+
elsif @output_data_type == 'ltsv'
|
38
|
+
nil
|
39
|
+
elsif @output_data_type =~ /^attr:(.*)$/
|
40
|
+
$1.split(',').map(&:strip).reject(&:empty?)
|
41
|
+
else
|
42
|
+
nil
|
43
|
+
end
|
44
|
+
|
45
|
+
end
|
46
|
+
|
47
|
+
def start
|
48
|
+
super
|
49
|
+
end
|
50
|
+
|
51
|
+
def shutdown
|
52
|
+
super
|
53
|
+
end
|
54
|
+
|
55
|
+
def parse_record(record)
|
56
|
+
if @custom_attributes.nil?
|
57
|
+
case @output_data_type
|
58
|
+
when 'json'
|
59
|
+
JSON.dump(record)
|
60
|
+
when 'ltsv'
|
61
|
+
LTSV.dump(record)
|
62
|
+
else
|
63
|
+
record.to_s
|
64
|
+
end
|
65
|
+
else
|
66
|
+
@custom_attributes.map { |attr|
|
67
|
+
record[attr].nil? ? '' : record[attr].to_s
|
68
|
+
}.join(@f_separator)
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def emit(tag, es, chain)
|
73
|
+
chain.next
|
74
|
+
es.each do |time,record|
|
75
|
+
topic = record['topic'] || self.default_topic || tag
|
76
|
+
partition = record['partition'] || self.default_partition
|
77
|
+
message = Poseidon::MessageToSend.new(topic, parse_record(record))
|
78
|
+
@producers[topic] ||= Poseidon::Producer.new(@seed_brokers, self.client_id)
|
79
|
+
@producers[topic].send_messages([message])
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
end
|
@@ -0,0 +1,97 @@
|
|
1
|
+
class Fluent::KafkaOutputBuffered < Fluent::BufferedOutput
|
2
|
+
Fluent::Plugin.register_output('kafka_buffered', self)
|
3
|
+
|
4
|
+
def initialize
|
5
|
+
super
|
6
|
+
require 'poseidon'
|
7
|
+
end
|
8
|
+
|
9
|
+
config_param :brokers, :string, :default => 'localhost:9092'
|
10
|
+
config_param :default_topic, :string, :default => nil
|
11
|
+
config_param :default_partition, :integer, :default => 0
|
12
|
+
config_param :client_id, :string, :default => 'kafka'
|
13
|
+
config_param :output_data_type, :string, :default => 'json'
|
14
|
+
attr_accessor :output_data_type
|
15
|
+
attr_accessor :field_separator
|
16
|
+
|
17
|
+
def configure(conf)
|
18
|
+
super
|
19
|
+
@seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
|
20
|
+
@producers = {} # keyed by topic:partition
|
21
|
+
case @output_data_type
|
22
|
+
when 'json'
|
23
|
+
require 'json'
|
24
|
+
when 'ltsv'
|
25
|
+
require 'ltsv'
|
26
|
+
end
|
27
|
+
|
28
|
+
@f_separator = case @field_separator
|
29
|
+
when /SPACE/i then ' '
|
30
|
+
when /COMMA/i then ','
|
31
|
+
when /SOH/i then "\x01"
|
32
|
+
else "\t"
|
33
|
+
end
|
34
|
+
|
35
|
+
@custom_attributes = if @output_data_type == 'json'
|
36
|
+
nil
|
37
|
+
elsif @output_data_type == 'ltsv'
|
38
|
+
nil
|
39
|
+
elsif @output_data_type =~ /^attr:(.*)$/
|
40
|
+
$1.split(',').map(&:strip).reject(&:empty?)
|
41
|
+
else
|
42
|
+
nil
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def start
|
47
|
+
super
|
48
|
+
end
|
49
|
+
|
50
|
+
def shutdown
|
51
|
+
super
|
52
|
+
end
|
53
|
+
|
54
|
+
def format(tag, time, record)
|
55
|
+
[tag, time, record].to_msgpack
|
56
|
+
end
|
57
|
+
|
58
|
+
def parse_record(record)
|
59
|
+
if @custom_attributes.nil?
|
60
|
+
case @output_data_type
|
61
|
+
when 'json'
|
62
|
+
JSON.dump(record)
|
63
|
+
when 'ltsv'
|
64
|
+
LTSV.dump(record)
|
65
|
+
else
|
66
|
+
record.to_s
|
67
|
+
end
|
68
|
+
else
|
69
|
+
@custom_attributes.map { |attr|
|
70
|
+
record[attr].nil? ? '' : record[attr].to_s
|
71
|
+
}.join(@f_separator)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def write(chunk)
|
76
|
+
records_by_topic = {}
|
77
|
+
chunk.msgpack_each { |tag, time, record|
|
78
|
+
topic = record['topic'] || self.default_topic || tag
|
79
|
+
partition = record['partition'] || self.default_partition
|
80
|
+
message = Poseidon::MessageToSend.new(topic, parse_record(record))
|
81
|
+
records_by_topic[topic] ||= []
|
82
|
+
records_by_topic[topic][partition] ||= []
|
83
|
+
records_by_topic[topic][partition] << message
|
84
|
+
}
|
85
|
+
publish(records_by_topic)
|
86
|
+
end
|
87
|
+
|
88
|
+
def publish(records_by_topic)
|
89
|
+
records_by_topic.each { |topic, partitions|
|
90
|
+
partitions.each_with_index { |messages, partition|
|
91
|
+
next if not messages
|
92
|
+
@producers[topic] ||= Poseidon::Producer.new(@seed_brokers, self.client_id)
|
93
|
+
@producers[topic].send_messages(messages)
|
94
|
+
}
|
95
|
+
}
|
96
|
+
end
|
97
|
+
end
|
data/test/helper.rb
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler'
|
3
|
+
begin
|
4
|
+
Bundler.setup(:default, :development)
|
5
|
+
rescue Bundler::BundlerError => e
|
6
|
+
$stderr.puts e.message
|
7
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
8
|
+
exit e.status_code
|
9
|
+
end
|
10
|
+
require 'test/unit'
|
11
|
+
|
12
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
13
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
14
|
+
require 'fluent/test'
|
15
|
+
unless ENV.has_key?('VERBOSE')
|
16
|
+
nulllogger = Object.new
|
17
|
+
nulllogger.instance_eval {|obj|
|
18
|
+
def method_missing(method, *args)
|
19
|
+
end
|
20
|
+
}
|
21
|
+
$log = nulllogger
|
22
|
+
end
|
23
|
+
|
24
|
+
require 'fluent/plugin/out_kafka'
|
25
|
+
|
26
|
+
class Test::Unit::TestCase
|
27
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require 'helper'
|
2
|
+
|
3
|
+
class KafkaOutputTest < Test::Unit::TestCase
|
4
|
+
def setup
|
5
|
+
Fluent::Test.setup
|
6
|
+
end
|
7
|
+
|
8
|
+
CONFIG = %[
|
9
|
+
default_topic kitagawakeiko
|
10
|
+
brokers localhost:9092
|
11
|
+
]
|
12
|
+
|
13
|
+
def create_driver(conf = CONFIG, tag='test')
|
14
|
+
Fluent::Test::BufferedOutputTestDriver.new(Fluent::KafkaOutput, tag).configure(conf)
|
15
|
+
end
|
16
|
+
|
17
|
+
def test_configure
|
18
|
+
d = create_driver
|
19
|
+
assert_equal 'kitagawakeiko', d.instance.default_topic
|
20
|
+
assert_equal 'localhost:9092', d.instance.brokers
|
21
|
+
end
|
22
|
+
|
23
|
+
def test_format
|
24
|
+
d = create_driver
|
25
|
+
end
|
26
|
+
|
27
|
+
def test_write
|
28
|
+
d = create_driver
|
29
|
+
time = Time.parse("2011-01-02 13:14:15 UTC").to_i
|
30
|
+
d.emit({"a"=>1}, time)
|
31
|
+
d.emit({"a"=>2}, time)
|
32
|
+
end
|
33
|
+
end
|
metadata
ADDED
@@ -0,0 +1,111 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fluent-plugin-kafka
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.5
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Hidemasa Togashi
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-02-05 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: fluentd
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - '>='
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - '>='
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: poseidon
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - '>='
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: ltsv
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - '>='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - '>='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: json
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - '>='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
description: Fluentd plugin for Apache Kafka > 0.8
|
70
|
+
email:
|
71
|
+
- togachiro@gmail.com
|
72
|
+
executables: []
|
73
|
+
extensions: []
|
74
|
+
extra_rdoc_files: []
|
75
|
+
files:
|
76
|
+
- Gemfile
|
77
|
+
- LICENSE
|
78
|
+
- README.md
|
79
|
+
- Rakefile
|
80
|
+
- fluent-plugin-kafka.gemspec
|
81
|
+
- lib/fluent/plugin/in_kafka.rb
|
82
|
+
- lib/fluent/plugin/out_kafka.rb
|
83
|
+
- lib/fluent/plugin/out_kafka_buffered.rb
|
84
|
+
- test/helper.rb
|
85
|
+
- test/plugin/test_out_kafka.rb
|
86
|
+
homepage: https://github.com/htgc/fluent-plugin-kafka
|
87
|
+
licenses: []
|
88
|
+
metadata: {}
|
89
|
+
post_install_message:
|
90
|
+
rdoc_options: []
|
91
|
+
require_paths:
|
92
|
+
- lib
|
93
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
94
|
+
requirements:
|
95
|
+
- - '>='
|
96
|
+
- !ruby/object:Gem::Version
|
97
|
+
version: '0'
|
98
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - '>='
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: '0'
|
103
|
+
requirements: []
|
104
|
+
rubyforge_project:
|
105
|
+
rubygems_version: 2.0.3
|
106
|
+
signing_key:
|
107
|
+
specification_version: 4
|
108
|
+
summary: Fluentd plugin for Apache Kafka > 0.8
|
109
|
+
test_files:
|
110
|
+
- test/helper.rb
|
111
|
+
- test/plugin/test_out_kafka.rb
|