logstash-integration-kafka 10.0.0-java → 10.4.0-java

Sign up to get free protection for your applications and to get access to all the features.
@@ -34,6 +34,68 @@ describe LogStash::Inputs::Kafka do
34
34
  subject { LogStash::Inputs::Kafka.new(config) }
35
35
 
36
36
  it "should register" do
37
- expect {subject.register}.to_not raise_error
37
+ expect { subject.register }.to_not raise_error
38
+ end
39
+
40
+ context 'with client_rack' do
41
+ let(:config) { super.merge('client_rack' => 'EU-R1') }
42
+
43
+ it "sets broker rack parameter" do
44
+ expect(org.apache.kafka.clients.consumer.KafkaConsumer).
45
+ to receive(:new).with(hash_including('client.rack' => 'EU-R1')).
46
+ and_return kafka_client = double('kafka-consumer')
47
+
48
+ expect( subject.send(:create_consumer, 'sample_client-0') ).to be kafka_client
49
+ end
50
+ end
51
+
52
+ context 'string integer config' do
53
+ let(:config) { super.merge('session_timeout_ms' => '25000', 'max_poll_interval_ms' => '345000') }
54
+
55
+ it "sets integer values" do
56
+ expect(org.apache.kafka.clients.consumer.KafkaConsumer).
57
+ to receive(:new).with(hash_including('session.timeout.ms' => '25000', 'max.poll.interval.ms' => '345000')).
58
+ and_return kafka_client = double('kafka-consumer')
59
+
60
+ expect( subject.send(:create_consumer, 'sample_client-1') ).to be kafka_client
61
+ end
62
+ end
63
+
64
+ context 'integer config' do
65
+ let(:config) { super.merge('session_timeout_ms' => 25200, 'max_poll_interval_ms' => 123_000) }
66
+
67
+ it "sets integer values" do
68
+ expect(org.apache.kafka.clients.consumer.KafkaConsumer).
69
+ to receive(:new).with(hash_including('session.timeout.ms' => '25200', 'max.poll.interval.ms' => '123000')).
70
+ and_return kafka_client = double('kafka-consumer')
71
+
72
+ expect( subject.send(:create_consumer, 'sample_client-2') ).to be kafka_client
73
+ end
74
+ end
75
+
76
+ context 'string boolean config' do
77
+ let(:config) { super.merge('enable_auto_commit' => 'false', 'check_crcs' => 'true') }
78
+
79
+ it "sets parameters" do
80
+ expect(org.apache.kafka.clients.consumer.KafkaConsumer).
81
+ to receive(:new).with(hash_including('enable.auto.commit' => 'false', 'check.crcs' => 'true')).
82
+ and_return kafka_client = double('kafka-consumer')
83
+
84
+ expect( subject.send(:create_consumer, 'sample_client-3') ).to be kafka_client
85
+ expect( subject.enable_auto_commit ).to be false
86
+ end
87
+ end
88
+
89
+ context 'boolean config' do
90
+ let(:config) { super.merge('enable_auto_commit' => true, 'check_crcs' => false) }
91
+
92
+ it "sets parameters" do
93
+ expect(org.apache.kafka.clients.consumer.KafkaConsumer).
94
+ to receive(:new).with(hash_including('enable.auto.commit' => 'true', 'check.crcs' => 'false')).
95
+ and_return kafka_client = double('kafka-consumer')
96
+
97
+ expect( subject.send(:create_consumer, 'sample_client-4') ).to be kafka_client
98
+ expect( subject.enable_auto_commit ).to be true
99
+ end
38
100
  end
39
101
  end
@@ -16,9 +16,9 @@ describe "outputs/kafka" do
16
16
 
17
17
  it 'should populate kafka config with default values' do
18
18
  kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
19
- insist {kafka.bootstrap_servers} == 'localhost:9092'
20
- insist {kafka.topic_id} == 'test'
21
- insist {kafka.key_serializer} == 'org.apache.kafka.common.serialization.StringSerializer'
19
+ expect(kafka.bootstrap_servers).to eql 'localhost:9092'
20
+ expect(kafka.topic_id).to eql 'test'
21
+ expect(kafka.key_serializer).to eql 'org.apache.kafka.common.serialization.StringSerializer'
22
22
  end
23
23
  end
24
24
 
@@ -55,7 +55,7 @@ describe "outputs/kafka" do
55
55
  expect { kafka.register }.to raise_error(LogStash::ConfigurationError, /ssl_truststore_location must be set when SSL is enabled/)
56
56
  end
57
57
  end
58
-
58
+
59
59
  context "when KafkaProducer#send() raises an exception" do
60
60
  let(:failcount) { (rand * 10).to_i }
61
61
  let(:sendcount) { failcount + 1 }
@@ -97,7 +97,7 @@ describe "outputs/kafka" do
97
97
  let(:sendcount) { failcount + 1 }
98
98
 
99
99
  it "should retry until successful" do
100
- count = 0;
100
+ count = 0
101
101
 
102
102
  expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
103
103
  .exactly(sendcount).times
@@ -189,4 +189,25 @@ describe "outputs/kafka" do
189
189
  end
190
190
  end
191
191
  end
192
+
193
+ context 'when ssl endpoint identification disabled' do
194
+
195
+ let(:config) do
196
+ simple_kafka_config.merge('ssl_endpoint_identification_algorithm' => '', 'security_protocol' => 'SSL')
197
+ end
198
+
199
+ subject { LogStash::Outputs::Kafka.new(config) }
200
+
201
+ it 'does not configure truststore' do
202
+ expect(org.apache.kafka.clients.producer.KafkaProducer).
203
+ to receive(:new).with(hash_excluding('ssl.truststore.location' => anything))
204
+ subject.register
205
+ end
206
+
207
+ it 'sets empty ssl.endpoint.identification.algorithm' do
208
+ expect(org.apache.kafka.clients.producer.KafkaProducer).
209
+ to receive(:new).with(hash_including('ssl.endpoint.identification.algorithm' => ''))
210
+ subject.register
211
+ end
212
+ end
192
213
  end
metadata CHANGED
@@ -1,19 +1,19 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.0.0
4
+ version: 10.4.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-10-15 00:00:00.000000000 Z
11
+ date: 2020-07-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
- - - "~>"
16
+ - - ">="
17
17
  - !ruby/object:Gem::Version
18
18
  version: 0.3.12
19
19
  name: jar-dependencies
@@ -21,7 +21,7 @@ dependencies:
21
21
  type: :development
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "~>"
24
+ - - ">="
25
25
  - !ruby/object:Gem::Version
26
26
  version: 0.3.12
27
27
  - !ruby/object:Gem::Dependency
@@ -140,7 +140,7 @@ dependencies:
140
140
  - - ">="
141
141
  - !ruby/object:Gem::Version
142
142
  version: '0'
143
- name: poseidon
143
+ name: ruby-kafka
144
144
  prerelease: false
145
145
  type: :development
146
146
  version_requirements: !ruby/object:Gem::Requirement
@@ -188,10 +188,10 @@ files:
188
188
  - spec/integration/outputs/kafka_spec.rb
189
189
  - spec/unit/inputs/kafka_spec.rb
190
190
  - spec/unit/outputs/kafka_spec.rb
191
- - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.2-1/zstd-jni-1.4.2-1.jar
192
- - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.3.0/kafka-clients-2.3.0.jar
191
+ - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.3-1/zstd-jni-1.4.3-1.jar
192
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.4.1/kafka-clients-2.4.1.jar
193
193
  - vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar
194
- - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26.jar
194
+ - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar
195
195
  - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar
196
196
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
197
197
  licenses:
@@ -217,7 +217,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
217
217
  version: '0'
218
218
  requirements: []
219
219
  rubyforge_project:
220
- rubygems_version: 2.6.14
220
+ rubygems_version: 2.6.13
221
221
  signing_key:
222
222
  specification_version: 4
223
223
  summary: Integration with Kafka - input and output plugins