logstash-input-kafka 3.0.0.beta1 → 3.0.0.beta2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 9c608112bad7e897363c4d3328b7979746f5825e
4
- data.tar.gz: 67cc809f94c52b10bad640ab916601958824ed83
3
+ metadata.gz: 22b41bd3a4cc464aa8fb16bad0a6d87cb29d9c2e
4
+ data.tar.gz: 3e6751e3ce4323a44b74f1234b5c69fcb89f841d
5
5
  SHA512:
6
- metadata.gz: 2df9dd8df82666322cbbced05a00cb930b489c640a121d57dbe947d2ecb9ba9f025c9457fba0ae085bbe0c4fb47c95cebca3727531da36b70df43d6d54001c20
7
- data.tar.gz: b3c4d7f98f3a81932279bc157665862a71ae1f3f62e6d88f0fb8a56dac5f82cb3984fa00ae904496dad4eef1528e0b885f2ff56046e671f6293956d606c02998
6
+ metadata.gz: e242a64f3b7e2f99e31c02c6cc5f7d9eecd781e1f36549ed61914176361f2f1ebac2a0827e83585733322613b6ec0d33da4799c3c056873aee675c5730c0f991
7
+ data.tar.gz: 9e8b29fa5c1412190e12ef6b0cefdf105b6e0c9aca494c54b66d1c51c98296bfaed6b91a47d38e5ac93bcccc6d2e810de1a979ac22fc03abce5bfbf76b15ebf4
data/CHANGELOG.md CHANGED
@@ -1,3 +1,10 @@
1
+ # 3.0.0.beta2
2
+ - Added SSL/TLS connection support to Kafka
3
+
4
+ # 3.0.0.beta1
5
+ - Refactor to use new Java based consumer, bypassing jruby-kafka
6
+ - Change configuration to match Kafka's configuration. This version is not backward compatible
7
+
1
8
  ## 2.0.0
2
9
  - Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
3
10
  instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
@@ -6,10 +6,11 @@ require 'logstash-input-kafka_jars.rb'
6
6
 
7
7
  # This input will read events from a Kafka topic. It uses the the newly designed
8
8
  # 0.9 version of consumer API[https://cwiki.apache.org/confluence/display/KAFKA/Kafka+0.9+Consumer+Rewrite+Design]
9
- # provided by Kafka to read messages from the broker. This consumer is backward compatible and can
10
- # be used with 0.8.x brokers.
9
+ # provided by Kafka to read messages from the broker.
11
10
  #
12
- # The Logstash consumer handles group management and uses the default Kafka offset management
11
+ # NOTE: This consumer is not backward compatible with 0.8.x brokers and needs a 0.9 broker.
12
+ #
13
+ # The Logstash Kafka consumer handles group management and uses the default offset management
13
14
  # strategy using Kafka topics.
14
15
  #
15
16
  # Logstash instances by default form a single logical group to subscribe to Kafka topics
@@ -25,10 +26,13 @@ require 'logstash-input-kafka_jars.rb'
25
26
  #
26
27
  # Kafka consumer configuration: http://kafka.apache.org/documentation.html#consumerconfigs
27
28
  #
29
+ # This version also adds support for SSL/TLS security connection to Kafka. By default SSL is
30
+ # disabled but can be turned on as needed.
31
+ #
28
32
  class LogStash::Inputs::Kafka < LogStash::Inputs::Base
29
33
  config_name 'kafka'
30
34
 
31
- default :codec, 'plain'
35
+ default :codec, 'json'
32
36
 
33
37
  # The frequency in milliseconds that the consumer offsets are committed to Kafka.
34
38
  config :auto_commit_interval_ms, :validate => :string, :default => "10"
@@ -109,7 +113,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
109
113
  config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
110
114
  # Ideally you should have as many threads as the number of partitions for a perfect
111
115
  # balance — more threads than partitions means that some threads will be idle
112
- config :num_threads, :validate => :number, :default => 1
116
+ config :consumer_threads, :validate => :number, :default => 1
113
117
  # A list of topics to subscribe to.
114
118
  config :topics, :validate => :array, :required => true
115
119
  # Time kafka consumer will wait to receive new messages from topics
@@ -134,7 +138,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
134
138
 
135
139
  public
136
140
  def run(logstash_queue)
137
- @runner_consumers = num_threads.times.map { || create_consumer }
141
+ @runner_consumers = consumer_threads.times.map { || create_consumer }
138
142
  @runner_threads = @runner_consumers.map { |consumer| thread_runner(logstash_queue, consumer) }
139
143
  @runner_threads.each { |t| t.join }
140
144
  end # def run
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-kafka'
4
- s.version = '3.0.0.beta1'
4
+ s.version = '3.0.0.beta2'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.0.beta1
4
+ version: 3.0.0.beta2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-01-14 00:00:00.000000000 Z
11
+ date: 2016-02-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement