logstash-output-kafka 7.0.3 → 7.0.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a5ccedfbfedaaf72bad1ec4696411dd1859e1922
4
- data.tar.gz: c4180be5c8a6e66d49b421ebf46e962948c03c35
3
+ metadata.gz: 773d049a3eb04a9cfdfef72b6e36c260461de0a9
4
+ data.tar.gz: a717066ae5f896259fd073ebb74ee2dad270b14b
5
5
  SHA512:
6
- metadata.gz: 3fe5acfc009c895074de3d3c84fc188c5bbaba6297f5a0b1c2286fbd25da7688f6f56ad28d9f60c5c4d6ac01ba0bafccb4e5bdccc5b960e8d0c934c21a5312b9
7
- data.tar.gz: 4916c1b9c9aac493d83e63a5448b3776603ec2f4dbcaca58e37850e8a19a8e8a0acb8de7e68b6c61a6fc5304c8ad2a92b0b6dce1f48fff8e1cf4d3da3e3abdf1
6
+ metadata.gz: f1986831b306f4744e625980988fc5e3d936c93aaee1e7ee98e99e92f95d79ea7f2e527974a9c2d73aba5fec53ed84426a219ebca761f85c1cc729cf9a7897a4
7
+ data.tar.gz: 573738c79bbb910037c942809ac71f4f1ea4b28ce5e49f5ec724db758150eba9f5762462d23bebb9f73d901caabf17b4a32cb9a922e00fb6e97c29029bd3aa63
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 7.0.4
2
+ - Bugfix: Fixed a bug that broke using `org.apache.kafka.common.serialization.ByteArraySerializer` as the `value_serializer`
3
+
1
4
  ## 7.0.3
2
5
  - Bugfix: Sends are now retried until successful. Previously, failed transmissions to Kafka
3
6
  could have been lost by the KafkaProducer library. Now we verify transmission explicitly.
@@ -3,6 +3,8 @@ require 'logstash/outputs/base'
3
3
  require 'java'
4
4
  require 'logstash-output-kafka_jars.rb'
5
5
 
6
+ java_import org.apache.kafka.clients.producer.ProducerRecord
7
+
6
8
  # Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
7
9
  # the broker.
8
10
  #
@@ -188,22 +190,20 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
188
190
 
189
191
 
190
192
  @producer = create_producer
191
- @codec.on_event do |event, data|
192
- begin
193
- if @message_key.nil?
194
- record = org.apache.kafka.clients.producer.ProducerRecord.new(event.sprintf(@topic_id), data)
195
- else
196
- record = org.apache.kafka.clients.producer.ProducerRecord.new(event.sprintf(@topic_id), event.sprintf(@message_key), data)
197
- end
198
- prepare(record)
199
- rescue LogStash::ShutdownSignal
200
- @logger.debug('Kafka producer got shutdown signal')
201
- rescue => e
202
- @logger.warn('kafka producer threw exception, restarting',
203
- :exception => e)
193
+ if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
194
+ @codec.on_event do |event, data|
195
+ write_to_kafka(event, data)
196
+ end
197
+ elsif value_serializer == 'org.apache.kafka.common.serialization.ByteArraySerializer'
198
+ @codec.on_event do |event, data|
199
+ write_to_kafka(event, data.to_java_bytes)
204
200
  end
201
+ else
202
+ raise ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
205
203
  end
206
- end # def register
204
+ end
205
+
206
+ # def register
207
207
 
208
208
  def prepare(record)
209
209
  # This output is threadsafe, so we need to keep a batch per thread.
@@ -293,6 +293,21 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
293
293
  end
294
294
 
295
295
  private
296
+
297
+ def write_to_kafka(event, serialized_data)
298
+ if @message_key.nil?
299
+ record = ProducerRecord.new(event.sprintf(@topic_id), serialized_data)
300
+ else
301
+ record = ProducerRecord.new(event.sprintf(@topic_id), event.sprintf(@message_key), serialized_data)
302
+ end
303
+ prepare(record)
304
+ rescue LogStash::ShutdownSignal
305
+ @logger.debug('Kafka producer got shutdown signal')
306
+ rescue => e
307
+ @logger.warn('kafka producer threw exception, restarting',
308
+ :exception => e)
309
+ end
310
+
296
311
  def create_producer
297
312
  begin
298
313
  props = java.util.Properties.new
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-kafka'
4
- s.version = '7.0.3'
4
+ s.version = '7.0.4'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -13,7 +13,7 @@ describe "outputs/kafka", :integration => true do
13
13
  let(:event) { LogStash::Event.new({'message' => '183.60.215.50 - - [11/Sep/2014:22:00:00 +0000] "GET /scripts/netcat-webserver HTTP/1.1" 200 182 "-" "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"', '@timestamp' => LogStash::Timestamp.at(0) }) }
14
14
 
15
15
 
16
- context 'when outputting messages' do
16
+ context 'when outputting messages serialized as String' do
17
17
  let(:test_topic) { 'topic1' }
18
18
  let(:num_events) { 3 }
19
19
  let(:consumer) do
@@ -38,6 +38,36 @@ describe "outputs/kafka", :integration => true do
38
38
 
39
39
  end
40
40
 
41
+ context 'when outputting messages serialized as Byte Array' do
42
+ let(:test_topic) { 'topic1b' }
43
+ let(:num_events) { 3 }
44
+ let(:consumer) do
45
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
46
+ test_topic, 0, :earliest_offset)
47
+ end
48
+ subject do
49
+ consumer.fetch
50
+ end
51
+
52
+ before :each do
53
+ config = base_config.merge(
54
+ {
55
+ "topic_id" => test_topic,
56
+ "value_serializer" => 'org.apache.kafka.common.serialization.ByteArraySerializer'
57
+ }
58
+ )
59
+ load_kafka_data(config)
60
+ end
61
+
62
+ it 'should have data integrity' do
63
+ expect(subject.size).to eq(num_events)
64
+ subject.each do |m|
65
+ expect(m.value).to eq(event.to_s)
66
+ end
67
+ end
68
+
69
+ end
70
+
41
71
  context 'when setting message_key' do
42
72
  let(:num_events) { 10 }
43
73
  let(:test_topic) { 'topic2' }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 7.0.3
4
+ version: 7.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-10-09 00:00:00.000000000 Z
11
+ date: 2017-10-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement