logstash-integration-kafka 11.4.1-java → 11.5.0-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e5562239ccaed5a89b586942517c8cce2ed2142168ad22d5b8d9c55a3a6cb4d4
4
- data.tar.gz: bfcf0ae13b30512324e9dce71bc2ce457c3e6656e2ce1c58bf544377c76add4b
3
+ metadata.gz: 3742372cdd8a8d7156590031aad63b4c5d820446d197d3a172dd8cc2dd44fed9
4
+ data.tar.gz: be0d8faf56ba103a9921223e82137c2fb0ad737f20abdc81ccddc5fa7db8808f
5
5
  SHA512:
6
- metadata.gz: 02d67854eb10338192c737e32a79baf2926c976c3500f0e38b7f79593d1800e130dbb178ca72064981637c7160bfd6f8817ae3e28652fa86f6f6f4e4aee93c26
7
- data.tar.gz: c4f0f576e44bdfcd907e0868efb13f8854a1540256253b5c426114bebecfedfd157faa9a275d64d5677fcdb765c16facda252af896e5a1db0d0c82fa3981f802
6
+ metadata.gz: bc2f4b1311c3e0d5742dfd8cfddb07eb22b5bf8fe53ba015953d4761916993903a890c08cbd1a5a7c208834c4ffed7e69736e15b7c8f856accd8387968493735
7
+ data.tar.gz: a821facccb4c252338bc4be58b082185017ab7e6493efeb12a625747186729881dfe01bae740b0e74a2f52aca36ff0db8f1beaabcbbc12106c0ce0a70d542e47
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 11.5.0
2
+ - Add "auto_create_topics" option to allow disabling of topic auto creation [#172](https://github.com/logstash-plugins/logstash-integration-kafka/pull/172)
3
+
4
+ ## 11.4.2
5
+ - Add default client_id of logstash to kafka output [#169](https://github.com/logstash-plugins/logstash-integration-kafka/pull/169)
6
+
1
7
  ## 11.4.1
2
8
  - [DOC] Match anchor ID and references for `message_headers` [#164](https://github.com/logstash-plugins/logstash-integration-kafka/pull/164)
3
9
 
@@ -98,6 +98,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
98
98
  |=======================================================================
99
99
  |Setting |Input type|Required
100
100
  | <<plugins-{type}s-{plugin}-auto_commit_interval_ms>> |<<number,number>>|No
101
+ | <<plugins-{type}s-{plugin}-auto_create_topics>> |<<boolean,boolean>>|No
101
102
  | <<plugins-{type}s-{plugin}-auto_offset_reset>> |<<string,string>>|No
102
103
  | <<plugins-{type}s-{plugin}-bootstrap_servers>> |<<string,string>>|No
103
104
  | <<plugins-{type}s-{plugin}-check_crcs>> |<<boolean,boolean>>|No
@@ -285,6 +286,17 @@ This will add a field named `kafka` to the logstash event containing the followi
285
286
  * `offset`: The offset from the partition this message is associated with
286
287
  * `key`: A ByteBuffer containing the message key
287
288
 
289
+
290
+ [id="plugins-{type}s-{plugin}-auto_create_topics"]
291
+ ===== `auto_create_topics` 
292
+
293
+   * Value type is <<boolean,boolean>>
294
+ * Default value is `true`
295
+
296
+ Controls whether the topic is automatically created when subscribing to a non-existent topic.
297
+ A topic will be auto-created only if this configuration is set to `true` and auto-topic creation is enabled on the broker using `auto.create.topics.enable`;
298
+ otherwise auto-topic creation is not permitted. 
299
+
288
300
  [id="plugins-{type}s-{plugin}-enable_auto_commit"]
289
301
  ===== `enable_auto_commit`
290
302
 
@@ -789,7 +801,6 @@ Filtering by a regular expression is done by retrieving the full list of topic n
789
801
  NOTE: When the broker has some topics configured with ACL rules and they miss the DESCRIBE permission, then the subscription
790
802
  happens but on the broker side it is logged that the subscription of some topics was denied to the configured user.
791
803
 
792
-
793
804
  [id="plugins-{type}s-{plugin}-value_deserializer_class"]
794
805
  ===== `value_deserializer_class`
795
806
 
@@ -192,7 +192,7 @@ If not explicitly configured it defaults to `use_all_dns_ips`.
192
192
  ===== `client_id`
193
193
 
194
194
  * Value type is <<string,string>>
195
- * There is no default value for this setting.
195
+ * Default value is `"logstash"`
196
196
 
197
197
  The id string to pass to the server when making requests.
198
198
  The purpose of this is to be able to track the source of requests beyond just
@@ -246,6 +246,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
246
246
  # `timestamp`: The timestamp of this message
247
247
  # While with `extended` it adds also all the key values present in the Kafka header if the key is valid UTF-8 else
248
248
  # silently skip it.
249
+ #
250
+ # Controls whether a kafka topic is automatically created when subscribing to a non-existent topic.
251
+ # A topic will be auto-created only if this configuration is set to `true` and auto-topic creation is enabled on the broker using `auto.create.topics.enable`; 
252
+ # otherwise auto-topic creation is not permitted.
253
+ config :auto_create_topics, :validate => :boolean, :default => true
254
+
249
255
  config :decorate_events, :validate => %w(none basic extended false true), :default => "none"
250
256
 
251
257
  attr_reader :metadata_mode
@@ -410,6 +416,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
410
416
 
411
417
  props.put(kafka::AUTO_COMMIT_INTERVAL_MS_CONFIG, auto_commit_interval_ms.to_s) unless auto_commit_interval_ms.nil?
412
418
  props.put(kafka::AUTO_OFFSET_RESET_CONFIG, auto_offset_reset) unless auto_offset_reset.nil?
419
+ props.put(kafka::ALLOW_AUTO_CREATE_TOPICS_CONFIG, auto_create_topics) unless auto_create_topics.nil?
413
420
  props.put(kafka::BOOTSTRAP_SERVERS_CONFIG, bootstrap_servers)
414
421
  props.put(kafka::CHECK_CRCS_CONFIG, check_crcs.to_s) unless check_crcs.nil?
415
422
  props.put(kafka::CLIENT_DNS_LOOKUP_CONFIG, client_dns_lookup)
@@ -91,7 +91,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
91
91
  # The id string to pass to the server when making requests.
92
92
  # The purpose of this is to be able to track the source of requests beyond just
93
93
  # ip/port by allowing a logical application name to be included with the request
94
- config :client_id, :validate => :string
94
+ config :client_id, :validate => :string, :default => "logstash"
95
95
  # Serializer class for the key of the message
96
96
  config :key_serializer, :validate => :string, :default => 'org.apache.kafka.common.serialization.StringSerializer'
97
97
  # The producer groups together any records that arrive in between request
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '11.4.1'
3
+ s.version = '11.5.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.4.1
4
+ version: 11.5.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-04-10 00:00:00.000000000 Z
11
+ date: 2024-07-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement