fluent-plugin-kafka 0.6.6 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f03db16ea9d9f3e937b8f2e5ee3840ac92929f98
4
- data.tar.gz: 8f5db823ac54b6154c38571585e18915b38d0e71
3
+ metadata.gz: 82c28fd4a4b133b7ad2685d4bd65b49ce4640b51
4
+ data.tar.gz: 9e215032a9f21a622abd34185e0d4137daa69fd6
5
5
  SHA512:
6
- metadata.gz: 5a032b3b1cbabc0ec9678257151cba70d98338999bd028c865419790817259e63eb7172c24a9956db3bfe0ee2b35a6c27615bbb1e6c39e466dfac39cf9282bb0
7
- data.tar.gz: fa7da134390f0532f399b5e0d802c377e21d917d4ed606e0ec6d1df1b0a1e81c54e37b7897643fc191cf3d41ba420e1859424170b3fbe07fd7039413300c61c5
6
+ metadata.gz: 2cabd4dd25b39d84cd1785d4f28b0733719504ceef7c8e39fe55c4179232869de53db740007f423c0825a316a324a52d9bf806795fc3581b6e0f9b59bbcd133b
7
+ data.tar.gz: 4ccf05ed9a4776a6380988b8ab9c603205cb111c39715c591baaf9473103d882e7535b4369a8189ad33779d6543bc6c01b97b5ac3682d38976ad9fbe179af4b2
data/ChangeLog CHANGED
@@ -1,3 +1,9 @@
1
+ Release 0.7.0 - 2018/02/27
2
+
3
+ * output: SASL PLAIN/SCRAM support
4
+ * input: Fix TimeParser location bug in v1
5
+ * out_kafka2: Fix warning condition for tag chunk key
6
+
1
7
  Release 0.6.6 - 2017/12/25
2
8
 
3
9
  * output: Make topic/partition/partition_key/message_key configurable
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # fluent-plugin-kafka, a plugin for [Fluentd](http://fluentd.org)
2
2
 
3
- [![Build Status](https://travis-ci.org/htgc/fluent-plugin-kafka.svg?branch=master)](https://travis-ci.org/htgc/fluent-plugin-kafka)
3
+ [![Build Status](https://travis-ci.org/fluent/fluent-plugin-kafka.svg?branch=master)](https://travis-ci.org/fluent/fluent-plugin-kafka)
4
4
 
5
5
  A fluentd plugin to both consume and produce data for Apache Kafka.
6
6
 
@@ -213,7 +213,7 @@ If key name `message_key` exists in a message, this plugin publishes the value o
213
213
 
214
214
  ### Output plugin
215
215
 
216
- This plugin is for v1.0. This will be `out_kafka` plugin in the feature.
216
+ This plugin is for v1.0. This will be `out_kafka` plugin in the future.
217
217
 
218
218
  <match app.**>
219
219
  @type kafka2
@@ -242,7 +242,7 @@ This plugin is for v1.0. This will be `out_kafka` plugin in the feature.
242
242
  # See fluentd document for buffer related parameters: http://docs.fluentd.org/articles/buffer-plugin-overview
243
243
  # Buffer chunk key should be same with topic_key. If value is not found in the record, default_topic is used.
244
244
  <buffer topic>
245
- flush_interavl 10s
245
+ flush_interval 10s
246
246
  </buffer>
247
247
 
248
248
  # ruby-kafka producer options
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.6.6'
16
+ gem.version = '0.7.0'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -108,7 +108,11 @@ class Fluent::KafkaInput < Fluent::Input
108
108
  @parser_proc = setup_parser
109
109
 
110
110
  if @use_record_time and @time_format
111
- @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
111
+ if defined?(Fluent::TimeParser)
112
+ @time_parser = Fluent::TimeParser.new(@time_format)
113
+ else
114
+ @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
115
+ end
112
116
  end
113
117
  end
114
118
 
@@ -108,7 +108,11 @@ class Fluent::KafkaGroupInput < Fluent::Input
108
108
  @fetch_opts[:min_bytes] = @min_bytes if @min_bytes
109
109
 
110
110
  if @use_record_time and @time_format
111
- @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
111
+ if defined?(Fluent::TimeParser)
112
+ @time_parser = Fluent::TimeParser.new(@time_format)
113
+ else
114
+ @time_parser = Fluent::TextParser::TimeParser.new(@time_format)
115
+ end
112
116
  end
113
117
  end
114
118
 
@@ -31,6 +31,12 @@ module Fluent
31
31
  :desc => "a Kerberos principal to use with SASL authentication (GSSAPI)."
32
32
  config_param :keytab, :string, :default => nil,
33
33
  :desc => "a filepath to Kerberos keytab. Must be used with principal."
34
+ config_param :username, :string, :default => nil,
35
+ :desc => "a username when using PLAIN/SCRAM SASL authentication"
36
+ config_param :password, :string, :default => nil,
37
+ :desc => "a password when using PLAIN/SCRAM SASL authentication"
38
+ config_param :scram_mechanism, :string, :default => nil,
39
+ :desc => "if set, use SCRAM authentication with specified mechanism. When unset, default to PLAIN authentication"
34
40
  }
35
41
  end
36
42
  end
@@ -97,9 +97,19 @@ DESC
97
97
  end
98
98
  begin
99
99
  if @seed_brokers.length > 0
100
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
101
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
102
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
100
+ if @scram_mechanism != nil && @username != nil && @password != nil
101
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
102
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
103
+ sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
104
+ elsif @username != nil && @password != nil
105
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
106
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
107
+ sasl_plain_username: @username, sasl_plain_password: @password)
108
+ else
109
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
110
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
111
+ sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
112
+ end
103
113
  log.info "initialized kafka producer: #{@client_id}"
104
114
  else
105
115
  log.warn "No brokers found on Zookeeper"
@@ -115,7 +125,7 @@ DESC
115
125
  if @zookeeper
116
126
  require 'zookeeper'
117
127
  else
118
- @seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
128
+ @seed_brokers = @brokers.split(",")
119
129
  log.info "brokers has been set directly: #{@seed_brokers}"
120
130
  end
121
131
 
@@ -74,9 +74,19 @@ DESC
74
74
  def refresh_client(raise_error = true)
75
75
  begin
76
76
  logger = @get_kafka_client_log ? log : nil
77
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
78
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
79
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
77
+ if @scram_mechanism != nil && @username != nil && @password != nil
78
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
79
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
80
+ sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
81
+ elsif @username != nil && @password != nil
82
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
83
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
84
+ sasl_plain_username: @username, sasl_plain_password: @password)
85
+ else
86
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
87
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
88
+ sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
89
+ end
80
90
  log.info "initialized kafka producer: #{@client_id}"
81
91
  rescue Exception => e
82
92
  if raise_error # During startup, error should be reported to engine and stop its phase for safety.
@@ -106,11 +116,11 @@ DESC
106
116
  @formatter_proc = setup_formatter(formatter_conf)
107
117
 
108
118
  if @default_topic.nil?
109
- if @chunk_keys.include?('topic') && !@chunk_keys.include?('tag')
119
+ if @chunk_keys.include?('topic') && !@chunk_key_tag
110
120
  log.warn "Use 'topic' field of event record for topic but no fallback. Recommend to set default_topic or set 'tag' in buffer chunk keys like <buffer topic,tag>"
111
121
  end
112
122
  else
113
- if @chunk_keys.include?('tag')
123
+ if @chunk_key_tag
114
124
  log.warn "default_topic is set. Fluentd's event tag is not used for topic"
115
125
  end
116
126
  end
@@ -119,9 +119,19 @@ DESC
119
119
  begin
120
120
  if @seed_brokers.length > 0
121
121
  logger = @get_kafka_client_log ? log : nil
122
- @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
123
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
124
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
122
+ if @scram_mechanism != nil && @username != nil && @password != nil
123
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
124
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
125
+ sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
126
+ elsif @username != nil && @password != nil
127
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
128
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
129
+ sasl_plain_username: @username, sasl_plain_password: @password)
130
+ else
131
+ @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
132
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
133
+ sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
134
+ end
125
135
  log.info "initialized kafka producer: #{@client_id}"
126
136
  else
127
137
  log.warn "No brokers found on Zookeeper"
@@ -141,7 +151,7 @@ DESC
141
151
  if @zookeeper
142
152
  require 'zookeeper'
143
153
  else
144
- @seed_brokers = @brokers.match(",").nil? ? [@brokers] : @brokers.split(",")
154
+ @seed_brokers = @brokers.split(",")
145
155
  log.info "brokers has been set directly: #{@seed_brokers}"
146
156
  end
147
157
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.6
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-12-25 00:00:00.000000000 Z
12
+ date: 2018-02-28 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd