logstash-input-kafka 0.1.3 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e920e4baec6da1b94c89c5ad1b692d1e383c664c
4
- data.tar.gz: ecb86971196992a7e46a8a728ccc46110adb6233
3
+ metadata.gz: 05f5cd7c72a186b093819f66e218371c7df835db
4
+ data.tar.gz: 6332d4de7ee712dbc73d5e07d0e399126663ace4
5
5
  SHA512:
6
- metadata.gz: a8842c0c7219606784c4697719714b9dd330af85c9b8d20cad5a84df594b8343c5f2c4791edc0cb66df4dbf481f7da18051b28f8a5a09d5191e665a255c5dd61
7
- data.tar.gz: a81cbf3fab3914f4ff1949568821301e63e5739009383ac7618ae1f78221fd5b2848e7fc61fcf09651288ed043cb1ac5733dcb2e90ff4fc98a6ab50bc8caf9ff
6
+ metadata.gz: 20abe7a55e1c94457d183047216dc3e6038828a8df9a97e35835780e95a62ca558e219f6c94c4bbe6ad18299a1612b75177367cc787fbeb9abae7dbe49f6ca0f
7
+ data.tar.gz: ff2d4349cb26dfea4df53d5e31d9e9236fbae62b14b063a12bceb5442327aeaadc597ac458de7a52f7a514d0638135e1620b99469afcda08baa85f08420f4341
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-kafka'
4
- s.version = '0.1.3'
4
+ s.version = '0.1.5'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -1,8 +1,19 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/inputs/kafka"
4
+
5
+ class LogStash::Inputs::TestKafka < LogStash::Inputs::Kafka
6
+ milestone 1
7
+ private
8
+ def queue_event(msg, output_queue)
9
+ super(msg, output_queue)
10
+ # need to raise exception here to stop the infinite loop
11
+ raise LogStash::ShutdownSignal
12
+ end
13
+ end
3
14
 
4
- describe 'inputs/kafka' do
5
15
 
16
+ describe 'inputs/kafka' do
6
17
  let (:kafka_config) {{'topic_id' => 'test'}}
7
18
 
8
19
  it "should register" do
@@ -11,7 +22,7 @@ describe 'inputs/kafka' do
11
22
  end
12
23
 
13
24
  it 'should populate kafka config with default values' do
14
- kafka = LogStash::Inputs::Kafka.new(kafka_config)
25
+ kafka = LogStash::Inputs::TestKafka.new(kafka_config)
15
26
  insist {kafka.zk_connect} == 'localhost:2181'
16
27
  insist {kafka.topic_id} == 'test'
17
28
  insist {kafka.group_id} == 'logstash'
@@ -19,25 +30,11 @@ describe 'inputs/kafka' do
19
30
  end
20
31
 
21
32
  it 'should retrieve event from kafka' do
22
- # Extend class to control behavior
23
- class LogStash::Inputs::TestKafka < LogStash::Inputs::Kafka
24
- milestone 1
25
- private
26
- def queue_event(msg, output_queue)
27
- super(msg, output_queue)
28
- # need to raise exception here to stop the infinite loop
29
- raise LogStash::ShutdownSignal
30
- end
31
- end
32
-
33
33
  kafka = LogStash::Inputs::TestKafka.new(kafka_config)
34
34
  kafka.register
35
35
 
36
- class Kafka::Group
37
- public
38
- def run(a_num_threads, a_queue)
39
- a_queue << 'Kafka message'
40
- end
36
+ expect_any_instance_of(Kafka::Group).to receive(:run) do |a_num_threads, a_queue|
37
+ a_queue << 'Kafka message'
41
38
  end
42
39
 
43
40
  logstash_queue = Queue.new
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-19 00:00:00.000000000 Z
11
+ date: 2014-12-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash
@@ -138,7 +138,7 @@ requirements:
138
138
  - jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'
139
139
  - jar 'log4j:log4j', '1.2.14'
140
140
  rubyforge_project:
141
- rubygems_version: 2.2.2
141
+ rubygems_version: 2.1.9
142
142
  signing_key:
143
143
  specification_version: 4
144
144
  summary: This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker