fluent-plugin-kafka 0.5.7 → 0.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +4 -0
- data/README.md +10 -1
- data/fluent-plugin-kafka.gemspec +2 -2
- data/lib/fluent/plugin/in_kafka.rb +3 -1
- data/lib/fluent/plugin/in_kafka_group.rb +3 -1
- data/lib/fluent/plugin/kafka_plugin_util.rb +11 -0
- data/lib/fluent/plugin/out_kafka.rb +3 -1
- data/lib/fluent/plugin/out_kafka2.rb +3 -1
- data/lib/fluent/plugin/out_kafka_buffered.rb +3 -1
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 724da4a2036ca9c067bb14f5dce7a4011120b486
|
4
|
+
data.tar.gz: dcf10e1242df2a44b7cab4ac3ca48df545131bdb
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5fa1fdc5af0678ea8212488021b261c5c1f06b129c6a88b796c24cc6dcc306647f770cae276003249e0f1b36a27a5f0d95e085cf2af2e6af0ebf7fad1a6a4257
|
7
|
+
data.tar.gz: e304d72397ffb647e511d62e53ce467083f583f67e020ce376fc4d1270a957f47908b0204a27184a12b4c9f7b96a2ae724ca7e15872235663ef6c840d5de4e67
|
data/ChangeLog
CHANGED
data/README.md
CHANGED
@@ -32,12 +32,21 @@ If you want to use zookeeper related parameters, you also need to install zookee
|
|
32
32
|
|
33
33
|
### Common parameters
|
34
34
|
|
35
|
+
#### SSL authentication
|
36
|
+
|
35
37
|
- ssl_ca_cert
|
36
38
|
- ssl_client_cert
|
37
39
|
- ssl_client_cert_key
|
38
40
|
|
39
41
|
Set path to SSL related files. See [Encryption and Authentication using SSL](https://github.com/zendesk/ruby-kafka#encryption-and-authentication-using-ssl) for more detail.
|
40
42
|
|
43
|
+
#### SASL authentication
|
44
|
+
|
45
|
+
- principal
|
46
|
+
- keytab
|
47
|
+
|
48
|
+
Set principal and path to keytab for SASL/GSSAPI authentication. See [Authentication using SASL](https://github.com/zendesk/ruby-kafka#authentication-using-sasl) for more details.
|
49
|
+
|
41
50
|
### Input plugin (@type 'kafka')
|
42
51
|
|
43
52
|
Consume events by single consumer.
|
@@ -150,7 +159,7 @@ This plugin uses ruby-kafka producer for writing data. This plugin works with re
|
|
150
159
|
`<formatter name>` of `output_data_type` uses fluentd's formatter plugins. See [formatter article](http://docs.fluentd.org/articles/formatter-plugin-overview).
|
151
160
|
|
152
161
|
ruby-kafka sometimes returns `Kafka::DeliveryFailed` error without good information.
|
153
|
-
In this case, `get_kafka_client_log` is useful for identifying the error cause.
|
162
|
+
In this case, `get_kafka_client_log` is useful for identifying the error cause.
|
154
163
|
ruby-kafka's log is routed to fluentd log so you can see ruby-kafka's log in fluentd logs.
|
155
164
|
|
156
165
|
Supports following ruby-kafka's producer options.
|
data/fluent-plugin-kafka.gemspec
CHANGED
@@ -12,12 +12,12 @@ Gem::Specification.new do |gem|
|
|
12
12
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
13
|
gem.name = "fluent-plugin-kafka"
|
14
14
|
gem.require_paths = ["lib"]
|
15
|
-
gem.version = '0.
|
15
|
+
gem.version = '0.6.0'
|
16
16
|
gem.required_ruby_version = ">= 2.1.0"
|
17
17
|
|
18
18
|
gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
|
19
19
|
gem.add_dependency 'ltsv'
|
20
|
-
gem.add_dependency 'ruby-kafka', '~> 0.
|
20
|
+
gem.add_dependency 'ruby-kafka', '~> 0.4.0'
|
21
21
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
22
22
|
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
23
23
|
end
|
@@ -46,6 +46,7 @@ class Fluent::KafkaInput < Fluent::Input
|
|
46
46
|
:desc => "Smallest amount of data the server should send us."
|
47
47
|
|
48
48
|
include Fluent::KafkaPluginUtil::SSLSettings
|
49
|
+
include Fluent::KafkaPluginUtil::SaslSettings
|
49
50
|
|
50
51
|
unless method_defined?(:router)
|
51
52
|
define_method("router") { Fluent::Engine }
|
@@ -159,7 +160,8 @@ class Fluent::KafkaInput < Fluent::Input
|
|
159
160
|
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id,
|
160
161
|
ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
|
161
162
|
ssl_client_cert: read_ssl_file(@ssl_client_cert),
|
162
|
-
ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key)
|
163
|
+
ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
164
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
|
163
165
|
@zookeeper = Zookeeper.new(@offset_zookeeper) if @offset_zookeeper
|
164
166
|
|
165
167
|
@topic_watchers = @topic_list.map {|topic_entry|
|
@@ -43,6 +43,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
43
43
|
:desc => "Whether to start from the beginning of the topic or just subscribe to new messages being produced"
|
44
44
|
|
45
45
|
include Fluent::KafkaPluginUtil::SSLSettings
|
46
|
+
include Fluent::KafkaPluginUtil::SaslSettings
|
46
47
|
|
47
48
|
class ForShutdown < StandardError
|
48
49
|
end
|
@@ -125,7 +126,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
|
|
125
126
|
@kafka = Kafka.new(seed_brokers: @brokers,
|
126
127
|
ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
|
127
128
|
ssl_client_cert: read_ssl_file(@ssl_client_cert),
|
128
|
-
ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key)
|
129
|
+
ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
130
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
|
129
131
|
@consumer = setup_consumer
|
130
132
|
@thread = Thread.new(&method(:run))
|
131
133
|
end
|
@@ -18,5 +18,16 @@ module Fluent
|
|
18
18
|
File.read(path)
|
19
19
|
end
|
20
20
|
end
|
21
|
+
|
22
|
+
module SaslSettings
|
23
|
+
def self.included(klass)
|
24
|
+
klass.instance_eval {
|
25
|
+
config_param :principal, :string, :default => nil,
|
26
|
+
:desc => "a Kerberos principal to use with SASL authentication (GSSAPI)."
|
27
|
+
config_param :keytab, :string, :default => nil,
|
28
|
+
:desc => "a filepath to Kerberos keytab. Must be used with principal."
|
29
|
+
}
|
30
|
+
end
|
31
|
+
end
|
21
32
|
end
|
22
33
|
end
|
@@ -67,6 +67,7 @@ requires activesupport gem - records will be generated under fluent_kafka_stats.
|
|
67
67
|
DESC
|
68
68
|
|
69
69
|
include Fluent::KafkaPluginUtil::SSLSettings
|
70
|
+
include Fluent::KafkaPluginUtil::SaslSettings
|
70
71
|
|
71
72
|
attr_accessor :output_data_type
|
72
73
|
attr_accessor :field_separator
|
@@ -97,7 +98,8 @@ DESC
|
|
97
98
|
begin
|
98
99
|
if @seed_brokers.length > 0
|
99
100
|
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
|
100
|
-
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key)
|
101
|
+
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
102
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
|
101
103
|
log.info "initialized kafka producer: #{@client_id}"
|
102
104
|
else
|
103
105
|
log.warn "No brokers found on Zookeeper"
|
@@ -59,6 +59,7 @@ DESC
|
|
59
59
|
end
|
60
60
|
|
61
61
|
include Fluent::KafkaPluginUtil::SSLSettings
|
62
|
+
include Fluent::KafkaPluginUtil::SaslSettings
|
62
63
|
|
63
64
|
def initialize
|
64
65
|
super
|
@@ -70,7 +71,8 @@ DESC
|
|
70
71
|
begin
|
71
72
|
logger = @get_kafka_client_log ? log : nil
|
72
73
|
@kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
|
73
|
-
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key)
|
74
|
+
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
75
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
|
74
76
|
log.info "initialized kafka producer: #{@client_id}"
|
75
77
|
rescue Exception => e
|
76
78
|
if raise_error # During startup, error should be reported to engine and stop its phase for safety.
|
@@ -75,6 +75,7 @@ requires activesupport gem - records will be generated under fluent_kafka_stats.
|
|
75
75
|
DESC
|
76
76
|
|
77
77
|
include Fluent::KafkaPluginUtil::SSLSettings
|
78
|
+
include Fluent::KafkaPluginUtil::SaslSettings
|
78
79
|
|
79
80
|
attr_accessor :output_data_type
|
80
81
|
attr_accessor :field_separator
|
@@ -109,7 +110,8 @@ DESC
|
|
109
110
|
if @seed_brokers.length > 0
|
110
111
|
logger = @get_kafka_client_log ? log : nil
|
111
112
|
@kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
|
112
|
-
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key)
|
113
|
+
ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
|
114
|
+
sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
|
113
115
|
log.info "initialized kafka producer: #{@client_id}"
|
114
116
|
else
|
115
117
|
log.warn "No brokers found on Zookeeper"
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Hidemasa Togashi
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2017-07-
|
12
|
+
date: 2017-07-25 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -51,14 +51,14 @@ dependencies:
|
|
51
51
|
requirements:
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: 0.
|
54
|
+
version: 0.4.0
|
55
55
|
type: :runtime
|
56
56
|
prerelease: false
|
57
57
|
version_requirements: !ruby/object:Gem::Requirement
|
58
58
|
requirements:
|
59
59
|
- - "~>"
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: 0.
|
61
|
+
version: 0.4.0
|
62
62
|
- !ruby/object:Gem::Dependency
|
63
63
|
name: rake
|
64
64
|
requirement: !ruby/object:Gem::Requirement
|