fluent-plugin-kafkaclient 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +11 -0
- data/CODE_OF_CONDUCT.md +49 -0
- data/Gemfile +4 -0
- data/README.md +112 -0
- data/Rakefile +10 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/fluent-plugin-kafkaclient.gemspec +29 -0
- data/lib/fluent/plugin/kafka_out_buffered.rb +137 -0
- metadata +171 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 06d9d7dd61b1f86d40e0ac9970982a079cdabf93
|
4
|
+
data.tar.gz: fe644a4924d928183bfbcaa29272f8e5bc49412c
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 122625d7a86924f1af1a795610d98207cd93a9118c5e85d8938818dcaf8d257ddb7f781b57c07bdc8fb8603b9537bdea3b8aba7e1863aa7435316439801399bd
|
7
|
+
data.tar.gz: 5b7771c0bc3d951070fe147929b6a30d78f7e470fd12dd5d5859bcfb3ec7b4b27ee43017c2d844fd4f1f38c02fad528ed592c9decb8e7b9cbadf4eaed01ee657
|
data/.gitignore
ADDED
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
# Contributor Code of Conduct
|
2
|
+
|
3
|
+
As contributors and maintainers of this project, and in the interest of
|
4
|
+
fostering an open and welcoming community, we pledge to respect all people who
|
5
|
+
contribute through reporting issues, posting feature requests, updating
|
6
|
+
documentation, submitting pull requests or patches, and other activities.
|
7
|
+
|
8
|
+
We are committed to making participation in this project a harassment-free
|
9
|
+
experience for everyone, regardless of level of experience, gender, gender
|
10
|
+
identity and expression, sexual orientation, disability, personal appearance,
|
11
|
+
body size, race, ethnicity, age, religion, or nationality.
|
12
|
+
|
13
|
+
Examples of unacceptable behavior by participants include:
|
14
|
+
|
15
|
+
* The use of sexualized language or imagery
|
16
|
+
* Personal attacks
|
17
|
+
* Trolling or insulting/derogatory comments
|
18
|
+
* Public or private harassment
|
19
|
+
* Publishing other's private information, such as physical or electronic
|
20
|
+
addresses, without explicit permission
|
21
|
+
* Other unethical or unprofessional conduct
|
22
|
+
|
23
|
+
Project maintainers have the right and responsibility to remove, edit, or
|
24
|
+
reject comments, commits, code, wiki edits, issues, and other contributions
|
25
|
+
that are not aligned to this Code of Conduct, or to ban temporarily or
|
26
|
+
permanently any contributor for other behaviors that they deem inappropriate,
|
27
|
+
threatening, offensive, or harmful.
|
28
|
+
|
29
|
+
By adopting this Code of Conduct, project maintainers commit themselves to
|
30
|
+
fairly and consistently applying these principles to every aspect of managing
|
31
|
+
this project. Project maintainers who do not follow or enforce the Code of
|
32
|
+
Conduct may be permanently removed from the project team.
|
33
|
+
|
34
|
+
This code of conduct applies both within project spaces and in public spaces
|
35
|
+
when an individual is representing the project or its community.
|
36
|
+
|
37
|
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
38
|
+
reported by contacting a project maintainer at minamiya@r.recruit.co.jp. All
|
39
|
+
complaints will be reviewed and investigated and will result in a response that
|
40
|
+
is deemed necessary and appropriate to the circumstances. Maintainers are
|
41
|
+
obligated to maintain confidentiality with regard to the reporter of an
|
42
|
+
incident.
|
43
|
+
|
44
|
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
45
|
+
version 1.3.0, available at
|
46
|
+
[http://contributor-covenant.org/version/1/3/0/][version]
|
47
|
+
|
48
|
+
[homepage]: http://contributor-covenant.org
|
49
|
+
[version]: http://contributor-covenant.org/version/1/3/0/
|
data/Gemfile
ADDED
data/README.md
ADDED
@@ -0,0 +1,112 @@
|
|
1
|
+
# Fluent::Plugin::Kafkaclient
|
2
|
+
Fluentd plugin for Apache Kafka.
|
3
|
+
This pluing uses [ruby-kafka](https://github.com/zendesk/ruby-kafka) as ruby client library.
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
```ruby
|
10
|
+
gem 'fluent-plugin-kafkaclient'
|
11
|
+
```
|
12
|
+
|
13
|
+
And then execute:
|
14
|
+
|
15
|
+
$ bundle
|
16
|
+
|
17
|
+
Or install it yourself as:
|
18
|
+
|
19
|
+
$ gem install fluent-plugin-kafkaclient
|
20
|
+
|
21
|
+
## Usage
|
22
|
+
|
23
|
+
### Producing Messages to Kafka (Buffered Output Plugin)
|
24
|
+
This is basic configuration.
|
25
|
+
|
26
|
+
```
|
27
|
+
<match *.**>
|
28
|
+
@type kafka_out_buffered
|
29
|
+
client_id <client id> :default => producer_000
|
30
|
+
brokers <broker1_host>:<broker1_port>,<broker2_host>:<broker2_port>
|
31
|
+
topic <kafka topic>
|
32
|
+
producer_type (sync|async) :default => sync
|
33
|
+
partition_key (string) :default => nil
|
34
|
+
buffer_type (file|memory)
|
35
|
+
output_data_type (none|json|msgpack) :default => none
|
36
|
+
output_include_tag (true|false) :default => false
|
37
|
+
output_include_time (true|false) :default => false
|
38
|
+
</match>
|
39
|
+
```
|
40
|
+
|
41
|
+
#### Sync Producer
|
42
|
+
|
43
|
+
These Parameters are used for Sync Producer.
|
44
|
+
|
45
|
+
```
|
46
|
+
required_acks, :integer, :default => 1
|
47
|
+
ack_timeout, :integer, :default => 2
|
48
|
+
compression_codec, (snappy|gzip|nil), :default => nil
|
49
|
+
compression_threshold, :integer, :default => 1
|
50
|
+
max_retries, :string, :integer, :default => 2
|
51
|
+
retry_backoff, :string, :integer, :default => 1
|
52
|
+
max_buffer_size, :integer, :default => 1000
|
53
|
+
```
|
54
|
+
- ```required_acks``` The number of acknowledgments the producer requires the leader to have received before considering a request complete.
|
55
|
+
- ```ack_timeout``` a timeout executed by a broker when the client is sending messages to it
|
56
|
+
- ```compression_codec``` you can choose snappy or gzip to compress.
|
57
|
+
- ```compression_threshold``` the number of messages to compress one time.
|
58
|
+
- ```max_retries``` the maximum number of retries to attempt
|
59
|
+
- ```retry_backoff``` the number of seconds to wait after a failed attempt to send messages to a Kafka broker before retrying.
|
60
|
+
- ```max_buffer_size``` the maximum size of the producer buffer.
|
61
|
+
|
62
|
+
#### Async Producer
|
63
|
+
These Parameters can be used for Async Producer.
|
64
|
+
|
65
|
+
```
|
66
|
+
max_queue_size, :integer, :default => 1000
|
67
|
+
delivery_threshold, :integer, :default => 0
|
68
|
+
delivery_interval, :integer, :default => 0
|
69
|
+
```
|
70
|
+
- ```deliver_threshold``` Trigger a delivery once 'deliver_threshold' messages have been buffered
|
71
|
+
- ```delivery_interval``` Trigger a delivery every 'delivery_interval' seconds.
|
72
|
+
|
73
|
+
### Encryption and Authentication using TSL
|
74
|
+
|
75
|
+
```
|
76
|
+
encryption :bool, :default => false
|
77
|
+
authentication :bool, :default => false
|
78
|
+
ca_cert_path <path to ca_cert>:default => nil
|
79
|
+
client_cert_path <path to client cert> :default => nil
|
80
|
+
client_cert_key_path <path to client cert key> :default => nil
|
81
|
+
```
|
82
|
+
|
83
|
+
In order to encrypt messages, you just need to
|
84
|
+
1. make param of encryption true
|
85
|
+
2. set a valid CA certificate path to param of ca_cert_path
|
86
|
+
|
87
|
+
In order to authenticate the client to the cluster,
|
88
|
+
1. make param of authentication true
|
89
|
+
2. set each path of a certificate and key created for the client and trusted by the brokers.
|
90
|
+
|
91
|
+
For details, Please see document about [ruby-kafka](https://github.com/zendesk/ruby-kafka) and [Apache Kafka](http://kafka.apache.org)
|
92
|
+
|
93
|
+
## Development
|
94
|
+
|
95
|
+
After checking out the repo, run `bin/setup` to install dependencies. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
96
|
+
|
97
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
98
|
+
|
99
|
+
## Contributing
|
100
|
+
|
101
|
+
1. Fork it
|
102
|
+
2. Create your feature branch (git checkout -b my-new-feature)
|
103
|
+
3. Commit your changes (git commit -am 'Add some feature')
|
104
|
+
4. Push to the branch (git push origin my-new-feature)
|
105
|
+
5. Create new Pull Request
|
106
|
+
|
107
|
+
## License
|
108
|
+
Copyright 2015 Kazuki Minamiya
|
109
|
+
|
110
|
+
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
|
111
|
+
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
112
|
+
|
data/Rakefile
ADDED
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "fluent/plugin/kafkaclient"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start
|
data/bin/setup
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "fluent-plugin-kafkaclient"
|
7
|
+
spec.version = "0.0.1"
|
8
|
+
spec.authors = ["Kazuki Minamiya"]
|
9
|
+
spec.email = ["minami.ind@gmail.com"]
|
10
|
+
|
11
|
+
spec.summary = %q{Kafka client plugin which supports version 0.9 of kafka.}
|
12
|
+
spec.description = %q{Kafka client Plugin which supports version 0.9 of kafka.}
|
13
|
+
spec.homepage = "https://github.com/nantani/fluent-plugin-kafkaclient"
|
14
|
+
spec.license = "APLv2"
|
15
|
+
|
16
|
+
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
|
17
|
+
spec.bindir = "exe"
|
18
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
19
|
+
spec.require_paths = ["lib"]
|
20
|
+
spec.required_ruby_version = '>= 2.2.0'
|
21
|
+
spec.add_dependency "fluentd"
|
22
|
+
spec.add_dependency "ruby-kafka"
|
23
|
+
spec.add_dependency "yajl"
|
24
|
+
spec.add_dependency "msgpack"
|
25
|
+
spec.add_dependency "activesupport", ">= 4.2.0", "< 5.1"
|
26
|
+
spec.add_development_dependency "test-unit"
|
27
|
+
spec.add_development_dependency "bundler", "~> 1.11"
|
28
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
29
|
+
end
|
@@ -0,0 +1,137 @@
|
|
1
|
+
# encode: utf-8
|
2
|
+
class Fluent::KafkaOutBuffered < Fluent::BufferedOutput
|
3
|
+
Fluent::Plugin.register_output('kafka_out_buffered', self)
|
4
|
+
|
5
|
+
# ruby-kafka plugin main options
|
6
|
+
config_param :client_id, :string, :default => 'producer_000'
|
7
|
+
config_param :brokers, :string, :default => 'localhost:9092'
|
8
|
+
config_param :topic, :string, :default => nil
|
9
|
+
config_param :partition_key, :string, :default => nil
|
10
|
+
config_param :output_data_type, :string, :default => nil
|
11
|
+
config_param :output_include_tag, :bool, :default => false
|
12
|
+
config_param :output_include_time, :bool, :default => false
|
13
|
+
config_param :producer_type, :string, :default => 'sync'
|
14
|
+
|
15
|
+
# Sync Producer options
|
16
|
+
config_param :required_acks, :integer, :default => 1
|
17
|
+
config_param :ack_timeout, :integer, :default => 2
|
18
|
+
config_param :compression_codec, :string, :default => nil
|
19
|
+
config_param :compression_threshold, :integer, :default => 1
|
20
|
+
config_param :max_retries, :string, :integer, :default => 2
|
21
|
+
config_param :retry_backoff, :string, :integer, :default => 1
|
22
|
+
config_param :max_buffer_size, :integer, :default => 1000
|
23
|
+
|
24
|
+
# Async Producer options
|
25
|
+
config_param :max_queue_size, :integer, :default => 1000
|
26
|
+
config_param :delivery_threshold, :integer, :default => 0
|
27
|
+
config_param :delivery_interval, :integer, :default => 0
|
28
|
+
|
29
|
+
# encryption and authentication options
|
30
|
+
config_param :encryption, :bool, :default => false
|
31
|
+
config_param :authentication, :bool, :default => false
|
32
|
+
config_param :ca_cert_path, :string,:default => nil
|
33
|
+
config_param :client_cert_path, :string, :default => nil
|
34
|
+
config_param :client_cert_key_path, :string, :default => nil
|
35
|
+
|
36
|
+
def initialize
|
37
|
+
super
|
38
|
+
require 'kafka'
|
39
|
+
require "active_support/notifications"
|
40
|
+
require 'Yajl'
|
41
|
+
end
|
42
|
+
|
43
|
+
def configure(conf)
|
44
|
+
super
|
45
|
+
|
46
|
+
if @encryption
|
47
|
+
raise Fluent::ConfigError, "CA cert file is not found or invalid" unless File.readable?(@ca_cert_path)
|
48
|
+
@ca_cert = File.read(@ca_cert_path)
|
49
|
+
$log.info "ca_cert is valid"
|
50
|
+
end
|
51
|
+
|
52
|
+
if @authentication
|
53
|
+
raise Fluent::ConfigError, "Client cert file is not found or invalid" unless File.readable?(@client_cert_path)
|
54
|
+
raise Fluent::ConfigError, "Client cert key is not found or invalid" unless File.readable?(@client_cert_key_path)
|
55
|
+
@client_cert = File.read(@client_cert_path)
|
56
|
+
@client_cert_key = File.read(@client_cert_key_path)
|
57
|
+
$log.info "client cert and client cert key is valid"
|
58
|
+
end
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def build_producer()
|
63
|
+
@kafka = Kafka.new(
|
64
|
+
seed_brokers: @brokers.split(','),
|
65
|
+
ssl_ca_cert: @ca_cert,
|
66
|
+
ssl_client_cert: @client_cert,
|
67
|
+
ssl_client_cert_key: @client_cert_key
|
68
|
+
)
|
69
|
+
|
70
|
+
if @producer_type == 'sync'
|
71
|
+
@producer = @kafka.producer(
|
72
|
+
required_acks: @required_acks,
|
73
|
+
ack_timeout: @ack_timeout,
|
74
|
+
compression_codec: @compression_codec,
|
75
|
+
compression_threshold: @compression_threshold,
|
76
|
+
max_retries: @max_retries,
|
77
|
+
retry_backoff: @retry_backoff,
|
78
|
+
max_buffer_size: @max_buffer_size
|
79
|
+
)
|
80
|
+
|
81
|
+
elsif @producer_type == 'async'
|
82
|
+
@producer = @kafka.async_producer(
|
83
|
+
max_queue_size: @max_queue_size,
|
84
|
+
delivery_threshold: @delivery_threshold,
|
85
|
+
delivery_interval: @delivery_interval
|
86
|
+
)
|
87
|
+
else
|
88
|
+
raise Fluent::ConfigError, "Producer type parameter, #{@producer_type}, is invalid"
|
89
|
+
|
90
|
+
$log.info "produer type is #{@producer_type}"
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def start
|
95
|
+
super
|
96
|
+
build_producer()
|
97
|
+
end
|
98
|
+
|
99
|
+
def shutdown
|
100
|
+
super
|
101
|
+
@producer.shutdown
|
102
|
+
end
|
103
|
+
|
104
|
+
def format(tag, time, record)
|
105
|
+
[tag, time, record].to_msgpack
|
106
|
+
end
|
107
|
+
|
108
|
+
def encode(record)
|
109
|
+
if @output_data_type == 'msgpack'
|
110
|
+
record.to_msgpack
|
111
|
+
elsif @output_data_type == 'json'
|
112
|
+
Yajl::Encoder.encode(record)
|
113
|
+
elsif none
|
114
|
+
record
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def write(chunk)
|
119
|
+
|
120
|
+
chunk.msgpack_each { |(tag, time, record)|
|
121
|
+
|
122
|
+
record['tag'] = tag if @output_include_tag
|
123
|
+
record['time'] = time if @output_include_time
|
124
|
+
encoded_record=encode(record)
|
125
|
+
|
126
|
+
@producer.produce(
|
127
|
+
encoded_record,
|
128
|
+
topic: @topic,
|
129
|
+
partition_key: @partition_key
|
130
|
+
)
|
131
|
+
|
132
|
+
@producer.deliver_messages
|
133
|
+
}
|
134
|
+
|
135
|
+
end
|
136
|
+
|
137
|
+
end
|
metadata
ADDED
@@ -0,0 +1,171 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fluent-plugin-kafkaclient
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Kazuki Minamiya
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2016-04-03 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: fluentd
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: ruby-kafka
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: yajl
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: msgpack
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: activesupport
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: 4.2.0
|
76
|
+
- - "<"
|
77
|
+
- !ruby/object:Gem::Version
|
78
|
+
version: '5.1'
|
79
|
+
type: :runtime
|
80
|
+
prerelease: false
|
81
|
+
version_requirements: !ruby/object:Gem::Requirement
|
82
|
+
requirements:
|
83
|
+
- - ">="
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: 4.2.0
|
86
|
+
- - "<"
|
87
|
+
- !ruby/object:Gem::Version
|
88
|
+
version: '5.1'
|
89
|
+
- !ruby/object:Gem::Dependency
|
90
|
+
name: test-unit
|
91
|
+
requirement: !ruby/object:Gem::Requirement
|
92
|
+
requirements:
|
93
|
+
- - ">="
|
94
|
+
- !ruby/object:Gem::Version
|
95
|
+
version: '0'
|
96
|
+
type: :development
|
97
|
+
prerelease: false
|
98
|
+
version_requirements: !ruby/object:Gem::Requirement
|
99
|
+
requirements:
|
100
|
+
- - ">="
|
101
|
+
- !ruby/object:Gem::Version
|
102
|
+
version: '0'
|
103
|
+
- !ruby/object:Gem::Dependency
|
104
|
+
name: bundler
|
105
|
+
requirement: !ruby/object:Gem::Requirement
|
106
|
+
requirements:
|
107
|
+
- - "~>"
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '1.11'
|
110
|
+
type: :development
|
111
|
+
prerelease: false
|
112
|
+
version_requirements: !ruby/object:Gem::Requirement
|
113
|
+
requirements:
|
114
|
+
- - "~>"
|
115
|
+
- !ruby/object:Gem::Version
|
116
|
+
version: '1.11'
|
117
|
+
- !ruby/object:Gem::Dependency
|
118
|
+
name: rake
|
119
|
+
requirement: !ruby/object:Gem::Requirement
|
120
|
+
requirements:
|
121
|
+
- - "~>"
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '10.0'
|
124
|
+
type: :development
|
125
|
+
prerelease: false
|
126
|
+
version_requirements: !ruby/object:Gem::Requirement
|
127
|
+
requirements:
|
128
|
+
- - "~>"
|
129
|
+
- !ruby/object:Gem::Version
|
130
|
+
version: '10.0'
|
131
|
+
description: Kafka client Plugin which supports version 0.9 of kafka.
|
132
|
+
email:
|
133
|
+
- minami.ind@gmail.com
|
134
|
+
executables: []
|
135
|
+
extensions: []
|
136
|
+
extra_rdoc_files: []
|
137
|
+
files:
|
138
|
+
- ".gitignore"
|
139
|
+
- CODE_OF_CONDUCT.md
|
140
|
+
- Gemfile
|
141
|
+
- README.md
|
142
|
+
- Rakefile
|
143
|
+
- bin/console
|
144
|
+
- bin/setup
|
145
|
+
- fluent-plugin-kafkaclient.gemspec
|
146
|
+
- lib/fluent/plugin/kafka_out_buffered.rb
|
147
|
+
homepage: https://github.com/nantani/fluent-plugin-kafkaclient
|
148
|
+
licenses:
|
149
|
+
- APLv2
|
150
|
+
metadata: {}
|
151
|
+
post_install_message:
|
152
|
+
rdoc_options: []
|
153
|
+
require_paths:
|
154
|
+
- lib
|
155
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
156
|
+
requirements:
|
157
|
+
- - ">="
|
158
|
+
- !ruby/object:Gem::Version
|
159
|
+
version: 2.2.0
|
160
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
161
|
+
requirements:
|
162
|
+
- - ">="
|
163
|
+
- !ruby/object:Gem::Version
|
164
|
+
version: '0'
|
165
|
+
requirements: []
|
166
|
+
rubyforge_project:
|
167
|
+
rubygems_version: 2.4.5
|
168
|
+
signing_key:
|
169
|
+
specification_version: 4
|
170
|
+
summary: Kafka client plugin which supports version 0.9 of kafka.
|
171
|
+
test_files: []
|