fluent-plugin-kafka 0.12.1 → 0.12.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 497c5a450bc4f55ddaf1b46454ed80f028c283775666e2a38ee0d91f16d391ef
4
- data.tar.gz: 5f40c26e75f06e4ee73cc0b3ba32ef8438da48bea465549fb95f4bcb89e98819
3
+ metadata.gz: 9dd9e59977840b92e4db04a067c6a5af66c63b982f808511b1118d33a37604a5
4
+ data.tar.gz: fb6445978009096d7461b6da7605e0e51d0402a43c56690a8c0912844555af3b
5
5
  SHA512:
6
- metadata.gz: dcac3281832905427ad5799b6b1d7c9dab9b13562c19ba8283fa4eaf2ea1b88e7c782714be7cec9546878e26ab223725b0cbc06259bc8e375eff5e407575bb3f
7
- data.tar.gz: deef97208222254e9fc4b0279f0dd9045c177354fa1a8dcca3a304d3b2a5d1090e1e7970b5cd9066d7ff917e8020b798d4dd674623285c52041c173fe37d48f5
6
+ metadata.gz: 5ccfca260f9a688203bb44a64ea9ff07abacd1ce0089aed664bfb67e5001b24e4e5a848c373273f0fed3727fa4dae125c959e27c7628b34a3db12efe030c5ad8
7
+ data.tar.gz: b6a79e20ed5e19434161864f9dfd96df3d5a140fdbadd6242b88992a1b9327b24089b519275f9c7e2fc256d824430818068e6613317b22e3a31a0ecaf3e0e7f7
data/ChangeLog CHANGED
@@ -1,3 +1,7 @@
1
+ Release 0.12.2 - 2020/01/07
2
+
3
+ * input: Refer sasl_over_ssl parameter in plain SASL
4
+
1
5
  Release 0.12.1 - 2019/10/14
2
6
 
3
7
  * input: Add time_source parameter to replace use_record_time
data/README.md CHANGED
@@ -4,8 +4,6 @@
4
4
 
5
5
  A fluentd plugin to both consume and produce data for Apache Kafka.
6
6
 
7
- TODO: Also, I need to write tests
8
-
9
7
  ## Installation
10
8
 
11
9
  Add this line to your application's Gemfile:
@@ -426,6 +424,18 @@ If you use v0.12, use `rdkafka` instead.
426
424
  }
427
425
  </match>
428
426
 
427
+ ## FAQ
428
+
429
+ ### Why fluent-plugin-kafka can't send data to our kafka cluster?
430
+
431
+ We got lots of similar questions. Almost cases, this problem happens by version mismatch between ruby-kafka and kafka cluster.
432
+ See ruby-kafka README for more details: https://github.com/zendesk/ruby-kafka#compatibility
433
+
434
+ To avoid the problem, there are 2 approaches:
435
+
436
+ - Upgrade your kafka cluster to latest version. This is better becase recent version is faster and robust.
437
+ - Downgrade ruby-kafka/fluent-plugin-kafka to work with your older kafka.
438
+
429
439
  ## Contributing
430
440
 
431
441
  1. Fork it
@@ -13,7 +13,7 @@ Gem::Specification.new do |gem|
13
13
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
14
14
  gem.name = "fluent-plugin-kafka"
15
15
  gem.require_paths = ["lib"]
16
- gem.version = '0.12.1'
16
+ gem.version = '0.12.2'
17
17
  gem.required_ruby_version = ">= 2.1.0"
18
18
 
19
19
  gem.add_dependency "fluentd", [">= 0.10.58", "< 2"]
@@ -190,7 +190,8 @@ class Fluent::KafkaInput < Fluent::Input
190
190
  elsif @username != nil && @password != nil
191
191
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
192
192
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
193
- ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password)
193
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system,sasl_plain_username: @username, sasl_plain_password: @password,
194
+ sasl_over_ssl: @sasl_over_ssl)
194
195
  else
195
196
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
196
197
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
@@ -170,7 +170,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
170
170
  elsif @username != nil && @password != nil
171
171
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
172
172
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
173
- ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password)
173
+ ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password,
174
+ sasl_over_ssl: @sasl_over_ssl)
174
175
  else
175
176
  @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert),
176
177
  ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key),
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.1
4
+ version: 0.12.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Hidemasa Togashi
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2019-10-15 00:00:00.000000000 Z
12
+ date: 2020-01-07 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd