fluent-plugin-kafka 0.7.6 → 0.7.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 1f69043ece5ca612bdca6988b3fea137e958832b
4
- data.tar.gz: 1b784d0fca451efd86fa532f868033f5605b21e7
3
+ metadata.gz: 44307aa17ae864f8e892b8c0eeb95066fe50c89c
4
+ data.tar.gz: 13f83473d1835e7613ec989f21e017f99236c0e3
5
5
  SHA512:
6
- metadata.gz: c081880f8953d5e97c7ff31f90a84ead32ea538103d215f8d3cb65287a87f39e3acf03be0a5f2af15ce4ff9a2816c114bf5b2d715afde7e90ebd6663cc237c0c
7
- data.tar.gz: e1d65842eaadb2edc711be10612cc8a5d69f648e0934e49c5544360aaca798842cbac021cd21080fcdc4700c943773a571aedc2083f23df8d32fed3959f7c95a
6
+ metadata.gz: f62828d69cc47006dfc5fba8ff31ecc6db33c867144c9a348ff952288aed48d3c5e8ebb7c3b625369cb694b3c0f5d98bb80df193e6545de4d8d6db6338c58539
7
+ data.tar.gz: f2a2d16d81a2eeabbfe597d15cd2124e7a0f1b2e625f136d5b45fae7ff0ba94563ade1383dd9638c23be79ee8326bef9520ce135be56553f95010784fc9045ed
data/ChangeLog CHANGED
@@ -1,3 +1,10 @@
1
+ Release 0.7.7 - 2018/08/27
2
+
3
+ * in_kafka_group: Add retry_limit and disable_retry_limit parameters
4
+ * input: Support SSL parameters
5
+ * Add ssl_ca_certs_from_system parameter
6
+ * Make password parameter secret
7
+
1
8
  Release 0.7.6 - 2018/08/15
2
9
 
3
10
  * out_kafka2: Add ignore_exceptions and exception_backup parameters
data/README.md CHANGED
@@ -37,6 +37,7 @@ If you want to use zookeeper related parameters, you also need to install zookee
37
37
  - ssl_ca_cert
38
38
  - ssl_client_cert
39
39
  - ssl_client_cert_key
40
+ - ssl_ca_certs_from_system
40
41
 
41
42
  Set path to SSL related files. See [Encryption and Authentication using SSL](https://github.com/zendesk/ruby-kafka#encryption-and-authentication-using-ssl) for more detail.
42
43
 
@@ -280,10 +281,10 @@ This plugin uses ruby-kafka producer for writing data. For performance and relia
280
281
  # ruby-kafka producer options
281
282
  max_send_retries (integer) :default => 1
282
283
  required_acks (integer) :default => -1
283
- ack_timeout (integer) :default => nil (Use default of ruby-kafka)
284
+ ack_timeout (integer) :default => nil (Use default of ruby-kafka)
284
285
  compression_codec (gzip|snappy) :default => nil
285
286
  max_buffer_size (integer) :default => nil (Use default of ruby-kafka)
286
- max_buffer_bytesize (integer) :default => nil (Use default of ruby-kafka)
287
+ max_buffer_bytesize (integer) :default => nil (Use default of ruby-kafka)
287
288
  </match>
288
289
 
289
290
  This plugin also supports ruby-kafka related parameters. See Buffered output plugin section.
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.7.6'
16
+ gem.version = '0.7.7'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -173,11 +173,21 @@ class Fluent::KafkaInput < Fluent::Input
173
173
  opt[:max_wait_time] = @max_wait_time if @max_wait_time
174
174
  opt[:min_bytes] = @min_bytes if @min_bytes
175
175
 
176
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id,
177
- ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
178
- ssl_client_cert: read_ssl_file(@ssl_client_cert),
179
- ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
180
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
176
+ if @scram_mechanism != nil && @username != nil && @password != nil
177
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
178
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
179
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
180
+ sasl_scram_mechanism: @scram_mechanism)
181
+ elsif @username != nil && @password != nil
182
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
183
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
184
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password)
185
+ else
186
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
187
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
188
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
189
+ end
190
+
181
191
  @zookeeper = Zookeeper.new(@offset_zookeeper) if @offset_zookeeper
182
192
 
183
193
  @topic_watchers = @topic_list.map {|topic_entry|
@@ -11,7 +11,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
11
11
  :desc => "Consumer group name, must set."
12
12
  config_param :topics, :string,
13
13
  :desc => "Listening topics(separate with comma',')."
14
- config_param :client_id, :string, :default => 'kafka'
14
+ config_param :client_id, :string, :default => 'kafka'
15
15
  config_param :format, :string, :default => 'json',
16
16
  :desc => "Supported format: (json|text|ltsv|msgpack)"
17
17
  config_param :message_key, :string, :default => 'message',
@@ -30,6 +30,10 @@ class Fluent::KafkaGroupInput < Fluent::Input
30
30
  :desc => "Set kafka's message key to this field"
31
31
 
32
32
  config_param :retry_wait_seconds, :integer, :default => 30
33
+ config_param :disable_retry_limit, :bool, :default => false,
34
+ :desc => "If set true, it disables retry_limit and make Fluentd retry indefinitely (default: false)"
35
+ config_param :retry_limit, :integer, :default => 10,
36
+ :desc => "The maximum number of retries for connecting kafka (default: 10)"
33
37
  # Kafka consumer options
34
38
  config_param :max_bytes, :integer, :default => 1048576,
35
39
  :desc => "Maximum number of bytes to fetch."
@@ -67,6 +71,7 @@ class Fluent::KafkaGroupInput < Fluent::Input
67
71
  require 'kafka'
68
72
 
69
73
  @time_parser = nil
74
+ @retry_count = 1
70
75
  end
71
76
 
72
77
  def _config_to_array(config)
@@ -141,11 +146,21 @@ class Fluent::KafkaGroupInput < Fluent::Input
141
146
  def start
142
147
  super
143
148
 
144
- @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id,
145
- ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
146
- ssl_client_cert: read_ssl_file(@ssl_client_cert),
147
- ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
148
- sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
149
+ if @scram_mechanism != nil && @username != nil && @password != nil
150
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
151
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
152
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password,
153
+ sasl_scram_mechanism: @scram_mechanism)
154
+ elsif @username != nil && @password != nil
155
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
156
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
157
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password)
158
+ else
159
+ @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
160
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
161
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
162
+ end
163
+
149
164
  @consumer = setup_consumer
150
165
  @thread = Thread.new(&method(:run))
151
166
  end
@@ -170,21 +185,28 @@ class Fluent::KafkaGroupInput < Fluent::Input
170
185
  }
171
186
  consumer
172
187
  end
173
-
188
+
174
189
  def reconnect_consumer
175
190
  log.warn "Stopping Consumer"
176
191
  consumer = @consumer
177
192
  @consumer = nil
178
- consumer.stop
179
- log.warn "Could not connect to broker. Next retry will be in #{@retry_wait_seconds} seconds"
193
+ if consumer
194
+ consumer.stop
195
+ end
196
+ log.warn "Could not connect to broker. retry_time:#{@retry_count}. Next retry will be in #{@retry_wait_seconds} seconds"
197
+ @retry_count = @retry_count + 1
180
198
  sleep @retry_wait_seconds
181
199
  @consumer = setup_consumer
182
200
  log.warn "Re-starting consumer #{Time.now.to_s}"
201
+ @retry_count = 0
183
202
  rescue =>e
184
203
  log.error "unexpected error during re-starting consumer object access", :error => e.to_s
185
204
  log.error_backtrace
205
+ if @retry_count <= @retry_limit or disable_retry_limit
206
+ reconnect_consumer
207
+ end
186
208
  end
187
-
209
+
188
210
  def run
189
211
  while @consumer
190
212
  begin
@@ -254,3 +276,4 @@ class Fluent::KafkaGroupInput < Fluent::Input
254
276
  end
255
277
  end
256
278
  end
279
+
@@ -10,6 +10,8 @@ module Fluent
10
10
  :desc => "a PEM encoded client cert to use with and SSL connection. Must be used in combination with ssl_client_cert_key."
11
11
  config_param :ssl_client_cert_key, :string, :default => nil,
12
12
  :desc => "a PEM encoded client cert key to use with and SSL connection. Must be used in combination with ssl_client_cert."
13
+ config_param :ssl_ca_certs_from_system, :bool, :default => false,
14
+ :desc => "this configures the store to look up CA certificates from the system default certificate store on an as needed basis. The location of the store can usually be determined by: OpenSSL::X509::DEFAULT_CERT_FILE."
13
15
  }
14
16
  end
15
17
 
@@ -39,7 +41,7 @@ module Fluent
39
41
  :desc => "a filepath to Kerberos keytab. Must be used with principal."
40
42
  config_param :username, :string, :default => nil,
41
43
  :desc => "a username when using PLAIN/SCRAM SASL authentication"
42
- config_param :password, :string, :default => nil,
44
+ config_param :password, :string, :default => nil, secret: true,
43
45
  :desc => "a password when using PLAIN/SCRAM SASL authentication"
44
46
  config_param :scram_mechanism, :string, :default => nil,
45
47
  :desc => "if set, use SCRAM authentication with specified mechanism. When unset, default to PLAIN authentication"
@@ -103,15 +103,15 @@ DESC
103
103
  if @seed_brokers.length > 0
104
104
  if @scram_mechanism != nil && @username != nil && @password != nil
105
105
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
106
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
106
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
107
107
  sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
108
108
  elsif @username != nil && @password != nil
109
109
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
110
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
110
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
111
111
  sasl_plain_username: @username, sasl_plain_password: @password)
112
112
  else
113
113
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
114
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
114
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
115
115
  sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
116
116
  end
117
117
  log.info "initialized kafka producer: #{@client_id}"
@@ -79,15 +79,15 @@ DESC
79
79
  logger = @get_kafka_client_log ? log : nil
80
80
  if @scram_mechanism != nil && @username != nil && @password != nil
81
81
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
82
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
82
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
83
83
  sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
84
84
  elsif @username != nil && @password != nil
85
85
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
86
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
86
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
87
87
  sasl_plain_username: @username, sasl_plain_password: @password)
88
88
  else
89
89
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
90
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
90
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
91
91
  sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
92
92
  end
93
93
  log.info "initialized kafka producer: #{@client_id}"
@@ -125,15 +125,15 @@ DESC
125
125
  logger = @get_kafka_client_log ? log : nil
126
126
  if @scram_mechanism != nil && @username != nil && @password != nil
127
127
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
128
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
128
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
129
129
  sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism)
130
130
  elsif @username != nil && @password != nil
131
131
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
132
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
132
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
133
133
  sasl_plain_username: @username, sasl_plain_password: @password)
134
134
  else
135
135
  @kafka = Kafka.new(seed_brokers: @seed_brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
136
- ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
136
+ ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system,
137
137
  sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab)
138
138
  end
139
139
  log.info "initialized kafka producer: #{@client_id}"
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.6
4
+ version: 0.7.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-08-16 00:00:00.000000000 Z
12
+ date: 2018-08-27 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd