logstash-integration-kafka 10.0.0-java → 10.0.1-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/CHANGELOG.md +6 -2
- data/docs/index.asciidoc +4 -1
- data/docs/input-kafka.asciidoc +4 -1
- data/lib/logstash/outputs/kafka.rb +3 -2
- data/logstash-integration-kafka.gemspec +2 -2
- data/spec/unit/outputs/kafka_spec.rb +4 -4
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: c46d0081cceeb47ad2a3718ab295b38350edbedce2448d6423257b9696d5f4ff
|
4
|
+
data.tar.gz: 258b4f5bd38c5e4acfa848a23292dd54db45b0bc77dbf473821eb4caa228294c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a8ee6bb607a1636544679a647f1ed20d0ed5d022dcad5af08297b016cd19dc5a01251b2a00e9c5fe984df6bb5c4d9ec831113e94ce3a6c84503caaf966d12491
|
7
|
+
data.tar.gz: 8d315bdf003dbb5b4c9bd0af488426c3f8a84d5aebf9084728fda9f62ac2f8a269c563750f4be5cf4ab51befb372c80f4d91717396c9a8e9a3682e2c314ecbee
|
data/CHANGELOG.md
CHANGED
@@ -1,6 +1,10 @@
|
|
1
|
+
## 10.0.1
|
2
|
+
- Fix links in changelog pointing to stand-alone plugin changelogs.
|
3
|
+
- Refactor: scope java_import to plugin class
|
4
|
+
|
1
5
|
## 10.0.0
|
2
6
|
- Initial release of the Kafka Integration Plugin, which combines
|
3
7
|
previously-separate Kafka plugins and shared dependencies into a single
|
4
8
|
codebase; independent changelogs for previous versions can be found:
|
5
|
-
- [Kafka Input Plugin @9.1.0](https://github.com/logstash-plugins/logstash-input-
|
6
|
-
- [Kafka Output Plugin @8.1.0](https://github.com/logstash-plugins/logstash-output-
|
9
|
+
- [Kafka Input Plugin @9.1.0](https://github.com/logstash-plugins/logstash-input-kafka/blob/v9.1.0/CHANGELOG.md)
|
10
|
+
- [Kafka Output Plugin @8.1.0](https://github.com/logstash-plugins/logstash-output-kafka/blob/v8.1.0/CHANGELOG.md)
|
data/docs/index.asciidoc
CHANGED
@@ -23,6 +23,9 @@ include::{include_path}/plugin_header.asciidoc[]
|
|
23
23
|
|
24
24
|
The Kafka Integration Plugin provides integrated plugins for working with the https://kafka.apache.org/[Kafka] distributed streaming platform.
|
25
25
|
|
26
|
-
|
26
|
+
- {logstash-ref}/plugins-inputs-kafka.html[Kafka Input Plugin]
|
27
|
+
- {logstash-ref}/plugins-outputs-kafka.html[Kafka Output Plugin]
|
28
|
+
|
29
|
+
This plugin uses Kafka Client 2.3.0. For broker compatibility, see the official https://cwiki.apache.org/confluence/display/KAFKA/Compatibility+Matrix[Kafka compatibility reference]. If the linked compatibility wiki is not up-to-date, please contact Kafka support/community to confirm compatibility.
|
27
30
|
|
28
31
|
:no_codec!:
|
data/docs/input-kafka.asciidoc
CHANGED
@@ -220,10 +220,13 @@ This will add a field named `kafka` to the logstash event containing the followi
|
|
220
220
|
* Value type is <<string,string>>
|
221
221
|
* Default value is `"true"`
|
222
222
|
|
223
|
-
If true, periodically commit to Kafka the offsets of messages already returned by the consumer.
|
224
223
|
This committed offset will be used when the process fails as the position from
|
225
224
|
which the consumption will begin.
|
226
225
|
|
226
|
+
If true, periodically commit to Kafka the offsets of messages already returned by
|
227
|
+
the consumer. If value is `false` however, the offset is committed every time the
|
228
|
+
consumer fetches the data from the topic.
|
229
|
+
|
227
230
|
[id="plugins-{type}s-{plugin}-exclude_internal_topics"]
|
228
231
|
===== `exclude_internal_topics`
|
229
232
|
|
@@ -3,8 +3,6 @@ require 'logstash/outputs/base'
|
|
3
3
|
require 'java'
|
4
4
|
require 'logstash-integration-kafka_jars.rb'
|
5
5
|
|
6
|
-
java_import org.apache.kafka.clients.producer.ProducerRecord
|
7
|
-
|
8
6
|
# Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
|
9
7
|
# the broker.
|
10
8
|
#
|
@@ -49,6 +47,9 @@ java_import org.apache.kafka.clients.producer.ProducerRecord
|
|
49
47
|
#
|
50
48
|
# Kafka producer configuration: http://kafka.apache.org/documentation.html#newproducerconfigs
|
51
49
|
class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
50
|
+
|
51
|
+
java_import org.apache.kafka.clients.producer.ProducerRecord
|
52
|
+
|
52
53
|
declare_threadsafe!
|
53
54
|
|
54
55
|
config_name 'kafka'
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '10.0.
|
3
|
+
s.version = '10.0.1'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -35,7 +35,7 @@ Gem::Specification.new do |s|
|
|
35
35
|
"integration_plugins" => "logstash-input-kafka,logstash-output-kafka"
|
36
36
|
}
|
37
37
|
|
38
|
-
s.add_development_dependency 'jar-dependencies', '
|
38
|
+
s.add_development_dependency 'jar-dependencies', '>= 0.3.12'
|
39
39
|
|
40
40
|
s.platform = RUBY_PLATFORM
|
41
41
|
|
@@ -16,9 +16,9 @@ describe "outputs/kafka" do
|
|
16
16
|
|
17
17
|
it 'should populate kafka config with default values' do
|
18
18
|
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
|
19
|
-
|
20
|
-
|
21
|
-
|
19
|
+
expect(kafka.bootstrap_servers).to eql 'localhost:9092'
|
20
|
+
expect(kafka.topic_id).to eql 'test'
|
21
|
+
expect(kafka.key_serializer).to eql 'org.apache.kafka.common.serialization.StringSerializer'
|
22
22
|
end
|
23
23
|
end
|
24
24
|
|
@@ -55,7 +55,7 @@ describe "outputs/kafka" do
|
|
55
55
|
expect { kafka.register }.to raise_error(LogStash::ConfigurationError, /ssl_truststore_location must be set when SSL is enabled/)
|
56
56
|
end
|
57
57
|
end
|
58
|
-
|
58
|
+
|
59
59
|
context "when KafkaProducer#send() raises an exception" do
|
60
60
|
let(:failcount) { (rand * 10).to_i }
|
61
61
|
let(:sendcount) { failcount + 1 }
|
metadata
CHANGED
@@ -1,19 +1,19 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 10.0.
|
4
|
+
version: 10.0.1
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-03-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
|
-
- - "
|
16
|
+
- - ">="
|
17
17
|
- !ruby/object:Gem::Version
|
18
18
|
version: 0.3.12
|
19
19
|
name: jar-dependencies
|
@@ -21,7 +21,7 @@ dependencies:
|
|
21
21
|
type: :development
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- - "
|
24
|
+
- - ">="
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: 0.3.12
|
27
27
|
- !ruby/object:Gem::Dependency
|
@@ -217,7 +217,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
217
217
|
version: '0'
|
218
218
|
requirements: []
|
219
219
|
rubyforge_project:
|
220
|
-
rubygems_version: 2.6.
|
220
|
+
rubygems_version: 2.6.13
|
221
221
|
signing_key:
|
222
222
|
specification_version: 4
|
223
223
|
summary: Integration with Kafka - input and output plugins
|