logstash-integration-kafka 10.8.2-java → 10.9.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/lib/logstash/inputs/kafka.rb +15 -12
- data/logstash-integration-kafka.gemspec +2 -1
- data/spec/integration/inputs/kafka_spec.rb +9 -12
- data/spec/unit/inputs/kafka_spec.rb +25 -5
- metadata +16 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: df9c89cdfcc2db6702409ec45ffb8d9f8f2b9274304889df1527e0697ccfcf95
|
4
|
+
data.tar.gz: e907ad2e277d27c8cdbe98ebd203af86a3505a85574d64ace4402607c075a69e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: bac93eb957af9028a6efc6e31a66c94818ae61333fa738daa6606abdd325dbea206fea2cd905d891f2b341a7bc983f8eaf5a5471015ac9548bc902f941b4a0d9
|
7
|
+
data.tar.gz: 456739a2409ef5a42f007a23c8d0dbfceb3518e8e65b528c0f48266f2a219c2415a83a507fdab3ba028cbc5493d645c080ce191a0d39d7c1787557abded9a0e1
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,8 @@
|
|
1
|
+
## 10.9.0
|
2
|
+
- Refactor: leverage codec when using schema registry [#106](https://github.com/logstash-plugins/logstash-integration-kafka/pull/106)
|
3
|
+
|
4
|
+
Previously using `schema_registry_url` parsed the payload as JSON even if `codec => 'plain'` was set, this is no longer the case.
|
5
|
+
|
1
6
|
## 10.8.2
|
2
7
|
- [DOC] Updates description of `enable_auto_commit=false` to clarify that the commit happens after data is fetched AND written to the queue [#90](https://github.com/logstash-plugins/logstash-integration-kafka/pull/90)
|
3
8
|
- Fix: update to Gradle 7 [#104](https://github.com/logstash-plugins/logstash-integration-kafka/pull/104)
|
@@ -63,7 +63,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
63
63
|
|
64
64
|
config_name 'kafka'
|
65
65
|
|
66
|
-
default :codec, 'plain'
|
66
|
+
# default :codec, 'plain' or 'json' depending whether schema registry is used
|
67
|
+
#
|
68
|
+
# @override LogStash::Inputs::Base - removing the `:default => :plain`
|
69
|
+
config :codec, :validate => :codec
|
70
|
+
# NOTE: isn't necessary due the params['codec'] = ... done in #initialize
|
71
|
+
# having the `nil` default explicit makes the behavior more noticeable.
|
67
72
|
|
68
73
|
# The frequency in milliseconds that the consumer offsets are committed to Kafka.
|
69
74
|
config :auto_commit_interval_ms, :validate => :number, :default => 5000 # Kafka default
|
@@ -249,6 +254,15 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
249
254
|
|
250
255
|
attr_reader :metadata_mode
|
251
256
|
|
257
|
+
# @overload based on schema registry change the codec default
|
258
|
+
def initialize(params = {})
|
259
|
+
unless params.key?('codec')
|
260
|
+
params['codec'] = params.key?('schema_registry_url') ? 'json' : 'plain'
|
261
|
+
end
|
262
|
+
|
263
|
+
super(params)
|
264
|
+
end
|
265
|
+
|
252
266
|
public
|
253
267
|
def register
|
254
268
|
@runner_threads = []
|
@@ -341,22 +355,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
341
355
|
def handle_record(record, codec_instance, queue)
|
342
356
|
codec_instance.decode(record.value.to_s) do |event|
|
343
357
|
decorate(event)
|
344
|
-
maybe_apply_schema(event, record)
|
345
358
|
maybe_set_metadata(event, record)
|
346
359
|
queue << event
|
347
360
|
end
|
348
361
|
end
|
349
362
|
|
350
|
-
def maybe_apply_schema(event, record)
|
351
|
-
if schema_registry_url
|
352
|
-
json = LogStash::Json.load(record.value.to_s)
|
353
|
-
json.each do |k, v|
|
354
|
-
event.set(k, v)
|
355
|
-
end
|
356
|
-
event.remove("message")
|
357
|
-
end
|
358
|
-
end
|
359
|
-
|
360
363
|
def maybe_set_metadata(event, record)
|
361
364
|
if @metadata_mode.include?(:record_props)
|
362
365
|
event.set("[@metadata][kafka][topic]", record.topic)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '10.
|
3
|
+
s.version = '10.9.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -50,6 +50,7 @@ Gem::Specification.new do |s|
|
|
50
50
|
s.add_runtime_dependency 'logstash-mixin-deprecation_logger_support', '~>1.0'
|
51
51
|
|
52
52
|
s.add_development_dependency 'logstash-devutils'
|
53
|
+
s.add_development_dependency 'logstash-codec-line'
|
53
54
|
s.add_development_dependency 'rspec-wait'
|
54
55
|
s.add_development_dependency 'digest-crc', '~> 0.5.1' # 0.6.0 started using a C-ext
|
55
56
|
s.add_development_dependency 'ruby-kafka' # depends on digest-crc
|
@@ -16,38 +16,38 @@ describe "inputs/kafka", :integration => true do
|
|
16
16
|
let(:group_id_5) {rand(36**8).to_s(36)}
|
17
17
|
let(:group_id_6) {rand(36**8).to_s(36)}
|
18
18
|
let(:plain_config) do
|
19
|
-
{ 'topics' => ['logstash_integration_topic_plain'], '
|
19
|
+
{ 'topics' => ['logstash_integration_topic_plain'], 'group_id' => group_id_1,
|
20
20
|
'auto_offset_reset' => 'earliest' }
|
21
21
|
end
|
22
22
|
let(:multi_consumer_config) do
|
23
23
|
plain_config.merge({"group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3})
|
24
24
|
end
|
25
25
|
let(:snappy_config) do
|
26
|
-
{ 'topics' => ['logstash_integration_topic_snappy'], '
|
26
|
+
{ 'topics' => ['logstash_integration_topic_snappy'], 'group_id' => group_id_1,
|
27
27
|
'auto_offset_reset' => 'earliest' }
|
28
28
|
end
|
29
29
|
let(:lz4_config) do
|
30
|
-
{ 'topics' => ['logstash_integration_topic_lz4'], '
|
30
|
+
{ 'topics' => ['logstash_integration_topic_lz4'], 'group_id' => group_id_1,
|
31
31
|
'auto_offset_reset' => 'earliest' }
|
32
32
|
end
|
33
33
|
let(:pattern_config) do
|
34
|
-
{ 'topics_pattern' => 'logstash_integration_topic_.*', 'group_id' => group_id_2,
|
34
|
+
{ 'topics_pattern' => 'logstash_integration_topic_.*', 'group_id' => group_id_2,
|
35
35
|
'auto_offset_reset' => 'earliest' }
|
36
36
|
end
|
37
37
|
let(:decorate_config) do
|
38
|
-
{ 'topics' => ['logstash_integration_topic_plain'], '
|
38
|
+
{ 'topics' => ['logstash_integration_topic_plain'], 'group_id' => group_id_3,
|
39
39
|
'auto_offset_reset' => 'earliest', 'decorate_events' => 'true' }
|
40
40
|
end
|
41
41
|
let(:decorate_headers_config) do
|
42
|
-
{ 'topics' => ['logstash_integration_topic_plain_with_headers'], '
|
42
|
+
{ 'topics' => ['logstash_integration_topic_plain_with_headers'], 'group_id' => group_id_3,
|
43
43
|
'auto_offset_reset' => 'earliest', 'decorate_events' => 'extended' }
|
44
44
|
end
|
45
45
|
let(:decorate_bad_headers_config) do
|
46
|
-
{ 'topics' => ['logstash_integration_topic_plain_with_headers_badly'], '
|
46
|
+
{ 'topics' => ['logstash_integration_topic_plain_with_headers_badly'], 'group_id' => group_id_3,
|
47
47
|
'auto_offset_reset' => 'earliest', 'decorate_events' => 'extended' }
|
48
48
|
end
|
49
49
|
let(:manual_commit_config) do
|
50
|
-
{ 'topics' => ['logstash_integration_topic_plain'], '
|
50
|
+
{ 'topics' => ['logstash_integration_topic_plain'], 'group_id' => group_id_5,
|
51
51
|
'auto_offset_reset' => 'earliest', 'enable_auto_commit' => 'false' }
|
52
52
|
end
|
53
53
|
let(:timeout_seconds) { 30 }
|
@@ -352,10 +352,7 @@ describe "Deserializing with the schema registry", :integration => true do
|
|
352
352
|
|
353
353
|
let(:base_config) do
|
354
354
|
{
|
355
|
-
'topics' => [avro_topic_name],
|
356
|
-
'codec' => 'plain',
|
357
|
-
'group_id' => group_id_1,
|
358
|
-
'auto_offset_reset' => 'earliest'
|
355
|
+
'topics' => [avro_topic_name], 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'
|
359
356
|
}
|
360
357
|
end
|
361
358
|
|
@@ -177,7 +177,23 @@ describe LogStash::Inputs::Kafka do
|
|
177
177
|
end
|
178
178
|
end
|
179
179
|
|
180
|
-
|
180
|
+
it 'uses plain codec by default' do
|
181
|
+
expect( subject.codec ).to respond_to :decode
|
182
|
+
expect( subject.codec.class ).to be LogStash::Codecs::Plain
|
183
|
+
end
|
184
|
+
|
185
|
+
context 'with codec option' do
|
186
|
+
|
187
|
+
let(:config) { super().merge 'codec' => 'line' }
|
188
|
+
|
189
|
+
it 'uses specified codec' do
|
190
|
+
expect( subject.codec ).to respond_to :decode
|
191
|
+
expect( subject.codec.class ).to be LogStash::Codecs::Line
|
192
|
+
end
|
193
|
+
|
194
|
+
end
|
195
|
+
|
196
|
+
describe "schema registry" do
|
181
197
|
let(:base_config) do {
|
182
198
|
'schema_registry_url' => 'http://localhost:8081',
|
183
199
|
'topics' => ['logstash'],
|
@@ -186,7 +202,7 @@ describe LogStash::Inputs::Kafka do
|
|
186
202
|
end
|
187
203
|
|
188
204
|
context "schema_registry_url" do
|
189
|
-
|
205
|
+
let(:config) { base_config }
|
190
206
|
|
191
207
|
it "conflict with value_deserializer_class should fail" do
|
192
208
|
config['value_deserializer_class'] = 'my.fantasy.Deserializer'
|
@@ -197,6 +213,11 @@ describe LogStash::Inputs::Kafka do
|
|
197
213
|
config['topics_pattern'] = 'topic_.*'
|
198
214
|
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Option schema_registry_url prohibit the customization of topics_pattern/
|
199
215
|
end
|
216
|
+
|
217
|
+
it 'switches default codec to json' do
|
218
|
+
expect( subject.codec ).to respond_to :decode
|
219
|
+
expect( subject.codec.class ).to be LogStash::Codecs::JSON
|
220
|
+
end
|
200
221
|
end
|
201
222
|
|
202
223
|
context 'when kerberos auth is used' do
|
@@ -204,9 +225,8 @@ describe LogStash::Inputs::Kafka do
|
|
204
225
|
context "with #{protocol}" do
|
205
226
|
['auto', 'skip'].each do |vsr|
|
206
227
|
context "when validata_schema_registry is #{vsr}" do
|
207
|
-
let(:config) { base_config.merge({'security_protocol' => protocol,
|
208
|
-
|
209
|
-
}
|
228
|
+
let(:config) { base_config.merge({'security_protocol' => protocol, 'schema_registry_validation' => vsr}) }
|
229
|
+
|
210
230
|
it 'skips verification' do
|
211
231
|
expect(subject).not_to receive(:check_for_schema_registry_connectivity_and_subjects)
|
212
232
|
expect { subject.register }.not_to raise_error
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 10.
|
4
|
+
version: 10.9.0
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-
|
11
|
+
date: 2021-12-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -154,6 +154,20 @@ dependencies:
|
|
154
154
|
- - ">="
|
155
155
|
- !ruby/object:Gem::Version
|
156
156
|
version: '0'
|
157
|
+
- !ruby/object:Gem::Dependency
|
158
|
+
requirement: !ruby/object:Gem::Requirement
|
159
|
+
requirements:
|
160
|
+
- - ">="
|
161
|
+
- !ruby/object:Gem::Version
|
162
|
+
version: '0'
|
163
|
+
name: logstash-codec-line
|
164
|
+
prerelease: false
|
165
|
+
type: :development
|
166
|
+
version_requirements: !ruby/object:Gem::Requirement
|
167
|
+
requirements:
|
168
|
+
- - ">="
|
169
|
+
- !ruby/object:Gem::Version
|
170
|
+
version: '0'
|
157
171
|
- !ruby/object:Gem::Dependency
|
158
172
|
requirement: !ruby/object:Gem::Requirement
|
159
173
|
requirements:
|