logstash-integration-kafka 10.12.0-java → 11.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (26) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +6 -1
  3. data/docs/index.asciidoc +1 -1
  4. data/docs/input-kafka.asciidoc +19 -8
  5. data/docs/output-kafka.asciidoc +8 -2
  6. data/lib/logstash/inputs/kafka.rb +4 -2
  7. data/lib/logstash/outputs/kafka.rb +3 -1
  8. data/lib/logstash/plugin_mixins/kafka/common.rb +8 -0
  9. data/lib/logstash-integration-kafka_jars.rb +8 -8
  10. data/logstash-integration-kafka.gemspec +2 -2
  11. data/spec/unit/inputs/kafka_spec.rb +10 -0
  12. data/spec/unit/outputs/kafka_spec.rb +8 -0
  13. data/vendor/jar-dependencies/io/confluent/common-config/{6.2.2/common-config-6.2.2.jar → 7.3.0/common-config-7.3.0.jar} +0 -0
  14. data/vendor/jar-dependencies/io/confluent/common-utils/{6.2.2/common-utils-6.2.2.jar → 7.3.0/common-utils-7.3.0.jar} +0 -0
  15. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/7.3.0/kafka-avro-serializer-7.3.0.jar +0 -0
  16. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/7.3.0/kafka-schema-registry-client-7.3.0.jar +0 -0
  17. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/7.3.0/kafka-schema-serializer-7.3.0.jar +0 -0
  18. data/vendor/jar-dependencies/org/apache/kafka/kafka-clients/{2.8.1/kafka-clients-2.8.1.jar → 3.3.1/kafka-clients-3.3.1.jar} +0 -0
  19. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/3.3.1/kafka_2.12-3.3.1.jar +0 -0
  20. data/vendor/jar-dependencies/org/lz4/lz4-java/1.8.0/lz4-java-1.8.0.jar +0 -0
  21. metadata +12 -12
  22. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/6.2.2/kafka-avro-serializer-6.2.2.jar +0 -0
  23. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/6.2.2/kafka-schema-registry-client-6.2.2.jar +0 -0
  24. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/6.2.2/kafka-schema-serializer-6.2.2.jar +0 -0
  25. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.8.1/kafka_2.12-2.8.1.jar +0 -0
  26. data/vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ad714dd5a1201d57e98cc740560fb96fdeb999b500065ba5c5dcb0d950cab3fa
4
- data.tar.gz: 148938661749ae182a0ea2e2482d846bf7225390c2e418605cd83ad1242ba9b0
3
+ metadata.gz: c0060f7684d8dd0787c1e106fdcc1b1b9673ec42cf2ffc01141dc5a2d9351967
4
+ data.tar.gz: a0d878319e3ffde777330f9a15c64c23e898f16f8cde797622a804e9f4cbf89c
5
5
  SHA512:
6
- metadata.gz: afac318b49b8a489b3c0e7919067417eee1cc3f6097c1e71e86647a66c1383c9054f81742fe70cc8673f2c6ba377c2734a17426d2541abecb0f941465a037f0f
7
- data.tar.gz: c8f7c382bbb00680fdb4a2c9d72a31ddcd231644d683503ba3eb0996da7236ac5fdd55a7aaa40f01a8737b9d3b5419052f1fd2b054ee3f6da1c23b2e1631ce99
6
+ metadata.gz: 721c864dff1a72f31cea49f7e3c674bdd827347b7b7a60c8ab1c2f5e88839205e3e94836cc4b664a55fd16107e0412b6885d15949c2ac1fd20798e82124fa502
7
+ data.tar.gz: 49823c88d015acbdaf7b91d0fe64a9975ff22c9c8691c79c9270bed33aa174d3ea4f770c841e074c64ed82a1b092814817a63447d62fa17c84f9e3666bf89203
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
+ ## 11.0.0
2
+ - Changed Kafka client to 3.3.1, requires Logstash >= 8.3.0.
3
+ - Deprecated `default` value for setting `client_dns_lookup` forcing to `use_all_dns_ips` when explicitly used [#130](https://github.com/logstash-plugins/logstash-integration-kafka/pull/130)
4
+ - Changed the consumer's poll from using the one that blocks on metadata retrieval to the one that doesn't [#136](https://github.com/logstash-plugins/logstash-integration-kafka/pull/133)
5
+
1
6
  ## 10.12.0
2
- - bump kafka client to 2.8.1 [#115](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/115)
7
+ - bump kafka client to 2.8.1 [#115](https://github.com/logstash-plugins/logstash-integration-kafka/pull/115)
3
8
 
4
9
  ## 10.11.0
5
10
  - Feat: added connections_max_idle_ms setting for output [#118](https://github.com/logstash-plugins/logstash-integration-kafka/pull/118)
data/docs/index.asciidoc CHANGED
@@ -1,7 +1,7 @@
1
1
  :plugin: kafka
2
2
  :type: integration
3
3
  :no_codec:
4
- :kafka_client: 2.8.1
4
+ :kafka_client: 3.3.1
5
5
 
6
6
  ///////////////////////////////////////////
7
7
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -2,8 +2,8 @@
2
2
  :plugin: kafka
3
3
  :type: input
4
4
  :default_codec: plain
5
- :kafka_client: 2.8
6
- :kafka_client_doc: 25
5
+ :kafka_client: 3.3
6
+ :kafka_client_doc: 33
7
7
 
8
8
  ///////////////////////////////////////////
9
9
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -143,10 +143,10 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
143
143
  | <<plugins-{type}s-{plugin}-ssl_key_password>> |<<password,password>>|No
144
144
  | <<plugins-{type}s-{plugin}-ssl_keystore_location>> |a valid filesystem path|No
145
145
  | <<plugins-{type}s-{plugin}-ssl_keystore_password>> |<<password,password>>|No
146
- | <<plugins-{type}s-{plugin}-ssl_keystore_type>> |<<string,string>>|No
146
+ | <<plugins-{type}s-{plugin}-ssl_keystore_type>> |<<string,string>>, one of `["jks", "PKCS12"]`|No
147
147
  | <<plugins-{type}s-{plugin}-ssl_truststore_location>> |a valid filesystem path|No
148
148
  | <<plugins-{type}s-{plugin}-ssl_truststore_password>> |<<password,password>>|No
149
- | <<plugins-{type}s-{plugin}-ssl_truststore_type>> |<<string,string>>|No
149
+ | <<plugins-{type}s-{plugin}-ssl_truststore_type>> |<<string,string>>, one of `["jks", "PKCS12"]`|No
150
150
  | <<plugins-{type}s-{plugin}-topics>> |<<array,array>>|No
151
151
  | <<plugins-{type}s-{plugin}-topics_pattern>> |<<string,string>>|No
152
152
  | <<plugins-{type}s-{plugin}-value_deserializer_class>> |<<string,string>>|No
@@ -211,6 +211,12 @@ IP addresses for a hostname, they will all be attempted to connect to before fai
211
211
  connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be
212
212
  resolved and expanded into a list of canonical names.
213
213
 
214
+ [NOTE]
215
+ ====
216
+ Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed.
217
+ If explicitly configured it fallbacks to `use_all_dns_ips`.
218
+ ====
219
+
214
220
  [id="plugins-{type}s-{plugin}-client_id"]
215
221
  ===== `client_id`
216
222
 
@@ -659,7 +665,7 @@ If client authentication is required, this setting stores the keystore password
659
665
  * Value type is <<string,string>>
660
666
  * There is no default value for this setting.
661
667
 
662
- The keystore type.
668
+ The format of the keystore file. It must be either `jks` or `PKCS12`.
663
669
 
664
670
  [id="plugins-{type}s-{plugin}-ssl_truststore_location"]
665
671
  ===== `ssl_truststore_location`
@@ -683,7 +689,7 @@ The truststore password.
683
689
  * Value type is <<string,string>>
684
690
  * There is no default value for this setting.
685
691
 
686
- The truststore type.
692
+ The format of the truststore file. It must be either `jks` or `PKCS12`.
687
693
 
688
694
  [id="plugins-{type}s-{plugin}-topics"]
689
695
  ===== `topics`
@@ -699,8 +705,13 @@ A list of topics to subscribe to, defaults to ["logstash"].
699
705
  * Value type is <<string,string>>
700
706
  * There is no default value for this setting.
701
707
 
702
- A topic regex pattern to subscribe to.
703
- The topics configuration will be ignored when using this configuration.
708
+ A topic regular expression pattern to subscribe to.
709
+
710
+ Filtering by a regular expression is done by retrieving the full list of topic names from the broker and applying the pattern locally. When used with brokers with a lot of topics this operation could be very slow, especially if there are a lot of consumers.
711
+
712
+ NOTE: When the broker has some topics configured with ACL rules and they miss the DESCRIBE permission, then the subscription
713
+ happens but on the broker side it is logged that the subscription of some topics was denied to the configured user.
714
+
704
715
 
705
716
  [id="plugins-{type}s-{plugin}-value_deserializer_class"]
706
717
  ===== `value_deserializer_class`
@@ -2,8 +2,8 @@
2
2
  :plugin: kafka
3
3
  :type: output
4
4
  :default_codec: plain
5
- :kafka_client: 2.8
6
- :kafka_client_doc: 25
5
+ :kafka_client: 3.3
6
+ :kafka_client_doc: 33
7
7
 
8
8
  ///////////////////////////////////////////
9
9
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -181,6 +181,12 @@ all IP addresses returned for a hostname before failing the connection.
181
181
  If set to `resolve_canonical_bootstrap_servers_only`, each entry will be
182
182
  resolved and expanded into a list of canonical names.
183
183
 
184
+ [NOTE]
185
+ ====
186
+ Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed.
187
+ If explicitly configured it fallbacks to `use_all_dns_ips`.
188
+ ====
189
+
184
190
  [id="plugins-{type}s-{plugin}-client_id"]
185
191
  ===== `client_id`
186
192
 
@@ -92,7 +92,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
92
92
  # IP addresses for a hostname, they will all be attempted to connect to before failing the
93
93
  # connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be
94
94
  # resolved and expanded into a list of canonical names.
95
- config :client_dns_lookup, :validate => ["default", "use_all_dns_ips", "resolve_canonical_bootstrap_servers_only"], :default => "default"
95
+ # Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed. If explicitly configured it fallbacks to `use_all_dns_ips`.
96
+ config :client_dns_lookup, :validate => ["default", "use_all_dns_ips", "resolve_canonical_bootstrap_servers_only"], :default => "use_all_dns_ips"
96
97
  # The id string to pass to the server when making requests. The purpose of this
97
98
  # is to be able to track the source of requests beyond just ip/port by allowing
98
99
  # a logical application name to be included.
@@ -257,6 +258,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
257
258
  def register
258
259
  @runner_threads = []
259
260
  @metadata_mode = extract_metadata_level(@decorate_events)
261
+ reassign_dns_lookup
260
262
  @pattern ||= java.util.regex.Pattern.compile(@topics_pattern) unless @topics_pattern.nil?
261
263
  check_schema_registry_parameters
262
264
  end
@@ -329,7 +331,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
329
331
  def do_poll(consumer)
330
332
  records = []
331
333
  begin
332
- records = consumer.poll(poll_timeout_ms)
334
+ records = consumer.poll(java.time.Duration.ofMillis(poll_timeout_ms))
333
335
  rescue org.apache.kafka.common.errors.WakeupException => e
334
336
  logger.debug("Wake up from poll", :kafka_error_message => e)
335
337
  raise e unless stop?
@@ -86,7 +86,8 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
86
86
  # IP addresses for a hostname, they will all be attempted to connect to before failing the
87
87
  # connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be
88
88
  # resolved and expanded into a list of canonical names.
89
- config :client_dns_lookup, :validate => ["default", "use_all_dns_ips", "resolve_canonical_bootstrap_servers_only"], :default => "default"
89
+ # Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed. If explicitly configured it fallbacks to `use_all_dns_ips`.
90
+ config :client_dns_lookup, :validate => ["default", "use_all_dns_ips", "resolve_canonical_bootstrap_servers_only"], :default => "use_all_dns_ips"
90
91
  # The id string to pass to the server when making requests.
91
92
  # The purpose of this is to be able to track the source of requests beyond just
92
93
  # ip/port by allowing a logical application name to be included with the request
@@ -190,6 +191,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
190
191
  logger.warn("Kafka output is configured with finite retry. This instructs Logstash to LOSE DATA after a set number of send attempts fails. If you do not want to lose data if Kafka is down, then you must remove the retry setting.", :retries => @retries)
191
192
  end
192
193
 
194
+ reassign_dns_lookup
193
195
 
194
196
  @producer = create_producer
195
197
  if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
@@ -43,5 +43,13 @@ module LogStash module PluginMixins module Kafka
43
43
  props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
44
44
  end
45
45
 
46
+ def reassign_dns_lookup
47
+ if @client_dns_lookup == "default"
48
+ @client_dns_lookup = "use_all_dns_ips"
49
+ logger.warn("client_dns_lookup setting 'default' value is deprecated, forced to 'use_all_dns_ips', please update your configuration")
50
+ deprecation_logger.deprecated("Deprecated value `default` for `client_dns_lookup` option; use `use_all_dns_ips` instead.")
51
+ end
52
+ end
53
+
46
54
  end
47
55
  end end end
@@ -1,17 +1,17 @@
1
1
  # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
2
 
3
3
  require 'jar_dependencies'
4
- require_jar('io.confluent', 'kafka-avro-serializer', '6.2.2')
5
- require_jar('io.confluent', 'kafka-schema-serializer', '6.2.2')
6
- require_jar('io.confluent', 'common-config', '6.2.2')
4
+ require_jar('io.confluent', 'kafka-avro-serializer', '7.3.0')
5
+ require_jar('io.confluent', 'kafka-schema-serializer', '7.3.0')
6
+ require_jar('io.confluent', 'common-config', '7.3.0')
7
7
  require_jar('org.apache.avro', 'avro', '1.11.0')
8
- require_jar('io.confluent', 'kafka-schema-registry-client', '6.2.2')
9
- require_jar('org.apache.kafka', 'kafka_2.12', '2.8.1')
10
- require_jar('io.confluent', 'common-utils', '6.2.2')
8
+ require_jar('io.confluent', 'kafka-schema-registry-client', '7.3.0')
9
+ require_jar('org.apache.kafka', 'kafka_2.12', '3.3.1')
10
+ require_jar('io.confluent', 'common-utils', '7.3.0')
11
11
  require_jar('javax.ws.rs', 'javax.ws.rs-api', '2.1.1')
12
12
  require_jar('org.glassfish.jersey.core', 'jersey-common', '2.33')
13
- require_jar('org.apache.kafka', 'kafka-clients', '2.8.1')
13
+ require_jar('org.apache.kafka', 'kafka-clients', '3.3.1')
14
14
  require_jar('com.github.luben', 'zstd-jni', '1.5.2-2')
15
15
  require_jar('org.slf4j', 'slf4j-api', '1.7.36')
16
- require_jar('org.lz4', 'lz4-java', '1.7.1')
16
+ require_jar('org.lz4', 'lz4-java', '1.8.0')
17
17
  require_jar('org.xerial.snappy', 'snappy-java', '1.1.8.4')
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.12.0'
3
+ s.version = '11.0.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -41,7 +41,7 @@ Gem::Specification.new do |s|
41
41
 
42
42
  # Gem dependencies
43
43
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
44
- s.add_runtime_dependency "logstash-core", ">= 6.5.0"
44
+ s.add_runtime_dependency "logstash-core", ">= 8.3.0"
45
45
 
46
46
  s.add_runtime_dependency 'logstash-codec-json'
47
47
  s.add_runtime_dependency 'logstash-codec-plain'
@@ -83,6 +83,16 @@ describe LogStash::Inputs::Kafka do
83
83
  it "should register" do
84
84
  expect { subject.register }.to_not raise_error
85
85
  end
86
+
87
+ context "when the deprecated `default` is specified" do
88
+ let(:config) { common_config.merge('client_dns_lookup' => 'default') }
89
+
90
+ it 'should fallback `client_dns_lookup` to `use_all_dns_ips`' do
91
+ subject.register
92
+
93
+ expect(subject.client_dns_lookup).to eq('use_all_dns_ips')
94
+ end
95
+ end
86
96
  end
87
97
 
88
98
  describe '#running' do
@@ -22,6 +22,14 @@ describe "outputs/kafka" do
22
22
  expect(kafka.topic_id).to eql 'test'
23
23
  expect(kafka.key_serializer).to eql 'org.apache.kafka.common.serialization.StringSerializer'
24
24
  end
25
+
26
+ it 'should fallback `client_dns_lookup` to `use_all_dns_ips` when the deprecated `default` is specified' do
27
+ simple_kafka_config["client_dns_lookup"] = 'default'
28
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
29
+ kafka.register
30
+
31
+ expect(kafka.client_dns_lookup).to eq('use_all_dns_ips')
32
+ end
25
33
  end
26
34
 
27
35
  context 'when outputting messages' do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.12.0
4
+ version: 11.0.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-05-09 00:00:00.000000000 Z
11
+ date: 2022-12-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -49,7 +49,7 @@ dependencies:
49
49
  requirements:
50
50
  - - ">="
51
51
  - !ruby/object:Gem::Version
52
- version: 6.5.0
52
+ version: 8.3.0
53
53
  name: logstash-core
54
54
  prerelease: false
55
55
  type: :runtime
@@ -57,7 +57,7 @@ dependencies:
57
57
  requirements:
58
58
  - - ">="
59
59
  - !ruby/object:Gem::Version
60
- version: 6.5.0
60
+ version: 8.3.0
61
61
  - !ruby/object:Gem::Dependency
62
62
  requirement: !ruby/object:Gem::Requirement
63
63
  requirements:
@@ -258,17 +258,17 @@ files:
258
258
  - spec/unit/inputs/kafka_spec.rb
259
259
  - spec/unit/outputs/kafka_spec.rb
260
260
  - vendor/jar-dependencies/com/github/luben/zstd-jni/1.5.2-2/zstd-jni-1.5.2-2.jar
261
- - vendor/jar-dependencies/io/confluent/common-config/6.2.2/common-config-6.2.2.jar
262
- - vendor/jar-dependencies/io/confluent/common-utils/6.2.2/common-utils-6.2.2.jar
263
- - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/6.2.2/kafka-avro-serializer-6.2.2.jar
264
- - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/6.2.2/kafka-schema-registry-client-6.2.2.jar
265
- - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/6.2.2/kafka-schema-serializer-6.2.2.jar
261
+ - vendor/jar-dependencies/io/confluent/common-config/7.3.0/common-config-7.3.0.jar
262
+ - vendor/jar-dependencies/io/confluent/common-utils/7.3.0/common-utils-7.3.0.jar
263
+ - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/7.3.0/kafka-avro-serializer-7.3.0.jar
264
+ - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/7.3.0/kafka-schema-registry-client-7.3.0.jar
265
+ - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/7.3.0/kafka-schema-serializer-7.3.0.jar
266
266
  - vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar
267
267
  - vendor/jar-dependencies/org/apache/avro/avro/1.11.0/avro-1.11.0.jar
268
- - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.8.1/kafka-clients-2.8.1.jar
269
- - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.8.1/kafka_2.12-2.8.1.jar
268
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/3.3.1/kafka-clients-3.3.1.jar
269
+ - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/3.3.1/kafka_2.12-3.3.1.jar
270
270
  - vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.33/jersey-common-2.33.jar
271
- - vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar
271
+ - vendor/jar-dependencies/org/lz4/lz4-java/1.8.0/lz4-java-1.8.0.jar
272
272
  - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.36/slf4j-api-1.7.36.jar
273
273
  - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.8.4/snappy-java-1.1.8.4.jar
274
274
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html