logstash-input-kafka 0.1.0 → 0.1.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- N2M1OTFjOGRmNTgyNWEyNjg3NTM0NDhkM2Y5NzNhNDUxYmQ5NjY1OQ==
4
+ MGFmYTZjOTlmYzU5ZWIxMjVhOWQwZWVlOGQ5NjgzZTYxNzk1YzQ1Mg==
5
5
  data.tar.gz: !binary |-
6
- NGYzOTRhZmYyMDExYzc1MTAxMGI1ODM3ODc4OWYwZGUyODI5NGY0Yw==
6
+ YjZiMmRkY2U1ZjE3MWZjNDg3NDU1ZWFmZTA4MDQyNjczZmQ4Y2UyOQ==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- OGJhZTZlYWZmOTc1OWYxYzg1YWVjNzYzMzJjMzQ0MTgyODczYjlhODUyZDhl
10
- Zjc2YWY5NmY4Y2NmNmM5MDJjNTI2ZmU0ZGE2MTIwMGNhZTk1MmM4NGMwZTY4
11
- MzQ4MjE2N2Q4NWExYmIxODY3MzE4ZDk4YjdkNDU5ZGY0MTU2NWI=
9
+ YzkzMWU4YzU4ZTYyZDQzY2U0NTk3MmJiYTVhZGUyMDlkY2Q2ZDI2YzdmYjhh
10
+ MmZjMTFhYjIzZWZkYTA4YjNlMWI4YzI4ZGFjN2MxNmM4NDkwZTI0ZTA0MGVm
11
+ ZGU2ZTE3NGRhNGQ5ZTE3OGNhODcwYmQ1NjA1NGZlOWViMjI0MWY=
12
12
  data.tar.gz: !binary |-
13
- YzY1Mjg1NjM2MmUwMTc5MzNlNWQyODRiNmQzMTEyYTQ2MDU2N2ZlZmZmZTMx
14
- ZGRiNjRlNmRjZWU3MjcxZmE3NDFiMzAzOTUxOTNkNTQyYTljNjNhZTFhNjY4
15
- ZjhiNzVlMmIzZmNmMzZmNzQxZTY5ZTY3MjY3OTFjZmY3NWU0ODM=
13
+ NzRkMTI1ZWM1ZDU4NTE2MGY5N2ZiMWFiNmY5MTU4YWQxNDMzZjU0ZjY4N2I3
14
+ Njk3ODY3YjFlZDkwMWJmMWY2NTg2NmI0MTViYjc5NzAxMmIxMmU5ODg4ZmRi
15
+ MWZiMDBmMTIwNDZlYTkyYzY3YjllOTFkYjBiNmY5YTY3YmE2NjQ=
@@ -24,13 +24,13 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
24
24
 
25
25
  # Specifies the ZooKeeper connection string in the form hostname:port where host and port are
26
26
  # the host and port of a ZooKeeper server. You can also specify multiple hosts in the form
27
- # hostname1:port1,hostname2:port2,hostname3:port3.
27
+ # `hostname1:port1,hostname2:port2,hostname3:port3`.
28
28
  #
29
29
  # The server may also have a ZooKeeper chroot path as part of it's ZooKeeper connection string
30
30
  # which puts its data under some path in the global ZooKeeper namespace. If so the consumer
31
31
  # should use the same chroot path in its connection string. For example to give a chroot path of
32
- # /chroot/path you would give the connection string as
33
- # hostname1:port1,hostname2:port2,hostname3:port3/chroot/path.
32
+ # `/chroot/path` you would give the connection string as
33
+ # `hostname1:port1,hostname2:port2,hostname3:port3/chroot/path`.
34
34
  config :zk_connect, :validate => :string, :default => 'localhost:2181'
35
35
  # A string that uniquely identifies the group of consumer processes to which this consumer
36
36
  # belongs. By setting the same group id multiple processes indicate that they are all part of
@@ -39,12 +39,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
39
39
  # The topic to consume messages from
40
40
  config :topic_id, :validate => :string, :required => true
41
41
  # Specify whether to jump to beginning of the queue when there is no initial offset in
42
- # ZooKeeper, or if an offset is out of range. If this is false, messages are consumed
42
+ # ZooKeeper, or if an offset is out of range. If this is `false`, messages are consumed
43
43
  # from the latest offset
44
44
  #
45
- # If reset_beginning is true, the consumer will check ZooKeeper to see if any other group members
45
+ # If `reset_beginning` is true, the consumer will check ZooKeeper to see if any other group members
46
46
  # are present and active. If not, the consumer deletes any offset information in the ZooKeeper
47
- # and starts at the smallest offset. If other group members are present reset_beginning will not
47
+ # and starts at the smallest offset. If other group members are present `reset_beginning` will not
48
48
  # work and the consumer threads will rejoin the consumer group.
49
49
  config :reset_beginning, :validate => :boolean, :default => false
50
50
  # Number of threads to read from the partitions. Ideally you should have as many threads as the
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-kafka'
4
- s.version = '0.1.0'
4
+ s.version = '0.1.2'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
7
  s.description = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
@@ -20,12 +20,15 @@ Gem::Specification.new do |s|
20
20
  s.metadata = { 'logstash_plugin' => 'true', 'group' => 'input'}
21
21
 
22
22
  # Jar dependencies
23
- s.requirements << "jar 'org.apache.kafka:kafka_2.10', '0.8.1.1'"
23
+ s.requirements << "jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'"
24
+ s.requirements << "jar 'log4j:log4j', '1.2.14'"
24
25
 
25
26
  # Gem dependencies
26
27
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
27
- s.add_runtime_dependency 'jar-dependencies', ['~> 0.1.0']
28
+ s.add_runtime_dependency 'logstash-codec-json'
29
+ s.add_runtime_dependency 'logstash-codec-plain'
28
30
 
31
+ s.add_runtime_dependency 'jar-dependencies', ['~> 0.1.0']
29
32
  s.add_runtime_dependency 'jruby-kafka', ['>=0.2.1']
30
33
 
31
34
  end
@@ -1,15 +1,14 @@
1
1
  # encoding: utf-8
2
+ require 'spec_helper'
2
3
 
3
- require 'rspec'
4
- require 'insist'
5
- require 'logstash/namespace'
6
- require 'logstash/inputs/kafka'
7
- require 'logstash/errors'
4
+ describe 'inputs/kafka' do
8
5
 
9
- describe LogStash::Inputs::Kafka do
10
- extend LogStash::RSpec
6
+ let (:kafka_config) {{'topic_id' => 'test'}}
11
7
 
12
- let (:kafka_config) {{:topic_id => 'test'}}
8
+ it "should register" do
9
+ input = LogStash::Plugin.lookup("input", "kafka").new(kafka_config)
10
+ expect {input.register}.to_not raise_error
11
+ end
13
12
 
14
13
  it 'should populate kafka config with default values' do
15
14
  kafka = LogStash::Inputs::Kafka.new(kafka_config)
@@ -19,11 +18,6 @@ describe LogStash::Inputs::Kafka do
19
18
  !insist { kafka.reset_beginning }
20
19
  end
21
20
 
22
- it 'should register and load kafka jars without errors' do
23
- kafka = LogStash::Inputs::Kafka.new(kafka_config)
24
- kafka.register
25
- end
26
-
27
21
  it 'should retrieve event from kafka' do
28
22
  # Extend class to control behavior
29
23
  class LogStash::Inputs::TestKafka < LogStash::Inputs::Kafka
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-05 00:00:00.000000000 Z
11
+ date: 2014-11-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash
@@ -30,6 +30,34 @@ dependencies:
30
30
  - - <
31
31
  - !ruby/object:Gem::Version
32
32
  version: 2.0.0
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-codec-json
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ type: :runtime
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ! '>='
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ name: logstash-codec-plain
49
+ requirement: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ! '>='
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ! '>='
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
33
61
  - !ruby/object:Gem::Dependency
34
62
  name: jar-dependencies
35
63
  requirement: !ruby/object:Gem::Requirement
@@ -74,7 +102,7 @@ files:
74
102
  - logstash-input-kafka.gemspec
75
103
  - rakelib/publish.rake
76
104
  - rakelib/vendor.rake
77
- - spec/inputs/kafka.rb
105
+ - spec/inputs/kafka_spec.rb
78
106
  homepage: http://logstash.net/
79
107
  licenses:
80
108
  - Apache License (2.0)
@@ -96,7 +124,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
96
124
  - !ruby/object:Gem::Version
97
125
  version: '0'
98
126
  requirements:
99
- - jar 'org.apache.kafka:kafka_2.10', '0.8.1.1'
127
+ - jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'
128
+ - jar 'log4j:log4j', '1.2.14'
100
129
  rubyforge_project:
101
130
  rubygems_version: 2.4.1
102
131
  signing_key:
@@ -104,4 +133,4 @@ specification_version: 4
104
133
  summary: This input will read events from a Kafka topic. It uses the high level consumer
105
134
  API provided by Kafka to read messages from the broker
106
135
  test_files:
107
- - spec/inputs/kafka.rb
136
+ - spec/inputs/kafka_spec.rb