logstash-kafka 0.8.1-java → 0.8.2-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 1e358d058f86eef4b5fc6fc9f270972593722a11
4
- data.tar.gz: ea695e3d5437794f85b30bd27eb0b6ea049360bf
3
+ metadata.gz: 2e632483abd563894f4eb6aa602d38ac66aa5371
4
+ data.tar.gz: b17b598b5845a8007ecd45cbf9feb704692b9d1b
5
5
  SHA512:
6
- metadata.gz: 36e0046ed5591ca0664fbb0b68cefe7b7cee969b1b2ce16894a3f00d30205c79dabc788aa45bc7c45e5b6f95e2f9bebc6a781c25fd89c14597bba74d960b2058
7
- data.tar.gz: b87e7e46523bbb18df4a42a1183dfc066145f0a67925b094a74e69d692b758e579e31f62778e0f3894ecddd0076d8a06fc015ce9eb8c8ca760a9f44d33a21708
6
+ metadata.gz: 62ad722dda07f70aea2aae515da1789f171547a5f87ff3e8d87697aae2d08749fe2fd41c0671a1399cd829a5c069091c15c1cf01377a88363cf26b946a68608e
7
+ data.tar.gz: 2eb29abf8aa79a5d3fd90b1393b72a970d105184793560079b4d556eb54caaeda796b6fd67ece6eddad45036f466a61100b75b86057a64bffe7b4e6caa84f9f3
@@ -0,0 +1,101 @@
1
+ require "test_utils"
2
+ require "logstash/inputs/kafka"
3
+ require 'jruby-kafka'
4
+
5
+ class LogStash::Inputs::TestKafka < LogStash::Inputs::Kafka
6
+ private
7
+ def queue_event(msg, output_queue)
8
+ super(msg, output_queue)
9
+ # need to raise exception here to stop the infinite loop
10
+ raise LogStash::ShutdownSignal
11
+ end
12
+ end
13
+
14
+ class TestMessageAndMetadata
15
+ attr_reader :topic, :partition, :key, :message
16
+ def initialize(topic, partition, key, message)
17
+ @topic = topic
18
+ @partition = partition
19
+ @key = key
20
+ @message = message
21
+ end
22
+ end
23
+
24
+
25
+ class TestKafkaGroup < Kafka::Group
26
+ def run(a_num_threads, a_queue)
27
+ blah = TestMessageAndMetadata.new(@topic, 0, nil, 'Kafka message')
28
+ a_queue << blah
29
+ end
30
+ end
31
+
32
+ describe 'inputs/kafka' do
33
+ let (:kafka_config) {{'topic_id' => 'test'}}
34
+ let (:empty_config) {{}}
35
+ let (:bad_kafka_config) {{'topic_id' => 'test', 'white_list' => 'other_topic'}}
36
+ let (:white_list_kafka_config) {{'white_list' => 'other_topic'}}
37
+ let (:decorated_kafka_config) {{'topic_id' => 'test', 'decorate_events' => true}}
38
+
39
+ it "should register" do
40
+ input = LogStash::Plugin.lookup("input", "kafka").new(kafka_config)
41
+ expect {input.register}.to_not raise_error
42
+ end
43
+
44
+ it "should register with whitelist" do
45
+ input = LogStash::Plugin.lookup("input", "kafka").new(white_list_kafka_config)
46
+ expect {input.register}.to_not raise_error
47
+ end
48
+
49
+ it "should fail with multiple topic configs" do
50
+ input = LogStash::Plugin.lookup("input", "kafka").new(empty_config)
51
+ expect {input.register}.to raise_error
52
+ end
53
+
54
+ it "should fail without topic configs" do
55
+ input = LogStash::Plugin.lookup("input", "kafka").new(bad_kafka_config)
56
+ expect {input.register}.to raise_error
57
+ end
58
+
59
+ it 'should populate kafka config with default values' do
60
+ kafka = LogStash::Inputs::TestKafka.new(kafka_config)
61
+ insist {kafka.zk_connect} == 'localhost:2181'
62
+ insist {kafka.topic_id} == 'test'
63
+ insist {kafka.group_id} == 'logstash'
64
+ !insist { kafka.reset_beginning }
65
+ end
66
+
67
+ it 'should retrieve event from kafka' do
68
+ kafka = LogStash::Inputs::TestKafka.new(kafka_config)
69
+ expect(kafka).to receive(:create_consumer_group) do |options|
70
+ TestKafkaGroup.new(options)
71
+ end
72
+ kafka.register
73
+
74
+ logstash_queue = Queue.new
75
+ kafka.run logstash_queue
76
+ e = logstash_queue.pop
77
+ insist { e['message'] } == 'Kafka message'
78
+ # no metadata by default
79
+ insist { e['kafka'] } == nil
80
+ end
81
+
82
+ it 'should retrieve a decorated event from kafka' do
83
+ kafka = LogStash::Inputs::TestKafka.new(decorated_kafka_config)
84
+ expect(kafka).to receive(:create_consumer_group) do |options|
85
+ TestKafkaGroup.new(options)
86
+ end
87
+ kafka.register
88
+
89
+ logstash_queue = Queue.new
90
+ kafka.run logstash_queue
91
+ e = logstash_queue.pop
92
+ insist { e['message'] } == 'Kafka message'
93
+ # no metadata by default
94
+ insist { e['kafka']['topic'] } == 'test'
95
+ insist { e['kafka']['consumer_group'] } == 'logstash'
96
+ insist { e['kafka']['msg_size'] } == 13
97
+ insist { e['kafka']['partition'] } == 0
98
+ insist { e['kafka']['key'] } == nil
99
+ end
100
+
101
+ end
@@ -0,0 +1,37 @@
1
+ require "test_utils"
2
+ require 'logstash/outputs/kafka'
3
+ require 'jruby-kafka'
4
+ require 'json'
5
+
6
+ describe "outputs/kafka" do
7
+ let (:simple_kafka_config) {{'topic_id' => 'test'}}
8
+ let (:event) { LogStash::Event.new({'message' => 'hello', 'topic_name' => 'my_topic', 'host' => '172.0.0.1',
9
+ '@timestamp' => "2011-08-18T13:00:14.000Z"}) }
10
+
11
+ context 'when initializing' do
12
+ it "should register" do
13
+ output = LogStash::Plugin.lookup("output", "kafka").new(simple_kafka_config)
14
+ expect {output.register}.to_not raise_error
15
+ end
16
+
17
+ it 'should populate kafka config with default values' do
18
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
19
+ insist {kafka.broker_list} == 'localhost:9092'
20
+ insist {kafka.topic_id} == 'test'
21
+ insist {kafka.compression_codec} == 'none'
22
+ insist {kafka.serializer_class} == 'kafka.serializer.StringEncoder'
23
+ insist {kafka.partitioner_class} == 'kafka.producer.DefaultPartitioner'
24
+ insist {kafka.producer_type} == 'sync'
25
+ end
26
+ end
27
+
28
+ context 'when outputting messages' do
29
+ it 'should send logstash event to kafka broker' do
30
+ expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
31
+ .with(simple_kafka_config['topic_id'], nil, event.to_hash.to_json)
32
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
33
+ kafka.register
34
+ kafka.receive(event)
35
+ end
36
+ end
37
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.1
4
+ version: 0.8.2
5
5
  platform: java
6
6
  authors:
7
7
  - Joseph Lawson
@@ -33,6 +33,8 @@ extra_rdoc_files: []
33
33
  files:
34
34
  - lib/logstash/inputs/kafka.rb
35
35
  - lib/logstash/outputs/kafka.rb
36
+ - spec/inputs/kafka_spec.rb
37
+ - spec/outputs/kafka_spec.rb
36
38
  homepage: https://github.com/joekiller/logstash-kafka
37
39
  licenses:
38
40
  - Apache 2.0
@@ -57,4 +59,6 @@ rubygems_version: 2.4.6
57
59
  signing_key:
58
60
  specification_version: 4
59
61
  summary: Provides input and output plugin functionality for Logstash 1.4.X
60
- test_files: []
62
+ test_files:
63
+ - spec/inputs/kafka_spec.rb
64
+ - spec/outputs/kafka_spec.rb