logstash-integration-kafka 10.5.3-java → 10.6.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (27) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +4 -0
  3. data/docs/input-kafka.asciidoc +46 -1
  4. data/lib/logstash-integration-kafka_jars.rb +13 -4
  5. data/lib/logstash/inputs/kafka.rb +29 -4
  6. data/lib/logstash/plugin_mixins/common.rb +92 -0
  7. data/logstash-integration-kafka.gemspec +1 -1
  8. data/spec/integration/inputs/kafka_spec.rb +186 -11
  9. data/spec/unit/inputs/avro_schema_fixture_payment.asvc +8 -0
  10. data/spec/unit/inputs/kafka_spec.rb +16 -0
  11. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar +0 -0
  12. data/vendor/jar-dependencies/io/confluent/common-config/5.5.1/common-config-5.5.1.jar +0 -0
  13. data/vendor/jar-dependencies/io/confluent/common-utils/5.5.1/common-utils-5.5.1.jar +0 -0
  14. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar +0 -0
  15. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar +0 -0
  16. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar +0 -0
  17. data/vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar +0 -0
  18. data/vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar +0 -0
  19. data/vendor/jar-dependencies/org/apache/kafka/kafka-clients/{2.4.1/kafka-clients-2.4.1.jar → 2.5.1/kafka-clients-2.5.1.jar} +0 -0
  20. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar +0 -0
  21. data/vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar +0 -0
  22. data/vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar +0 -0
  23. data/vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar +0 -0
  24. metadata +18 -6
  25. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.3-1/zstd-jni-1.4.3-1.jar +0 -0
  26. data/vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar +0 -0
  27. data/vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4383db6ec7c8fa26ef358d104c490f51620f615afb2f68359b6f6e98d4e58f8b
4
- data.tar.gz: 040637202d15cb1e5784104ff505f10a6610e91187a57f104a8f81cd2b24475a
3
+ metadata.gz: 1d8b40d779e91e9c05dece65249660ab5c272b6833658cbca51b977d92936f42
4
+ data.tar.gz: 58323e216be645aede9f0b49c27824958b6627485bdf79a6774cd5f87b818245
5
5
  SHA512:
6
- metadata.gz: 98da085bceebd241a6d45f9166aa4ff1a132551cd2cda8825ceab3c22ebbb5f78579f0d1a0b596aeaf3d504147d656cf2ef784570ef3fd9ab98c792ae6e15be4
7
- data.tar.gz: f552e5ec8d84f3ae7d85b3d4bc4ba4f7309321ea81efaa9bfb69a4a493141868b2576a626f0ee061bb0ebe6cbc8071993dd8592bc53030478baad2b5173e1086
6
+ metadata.gz: 0f05eec028758745a2ab04b90d721128c088c46d0bb9a01923c389118c99a718a561d2ca2420fc2e206e4cf75e4e3695e1704a17d9eff9f4f18e66da3d3ccb85
7
+ data.tar.gz: 85e117a64d14d013674869ccadfb5487a04991ec83c6a9d6874496b862be200bf4e7203d3ac4fc779d67131ecbe82212fafd66884114337d17ed33dda7ad0963
@@ -1,3 +1,7 @@
1
+ ## 10.6.0
2
+ - Added functionality to Kafka input to use Avro deserializer in retrieving data from Kafka. The schema is retrieved
3
+ from an instance of Confluent's Schema Registry service [#51](https://github.com/logstash-plugins/logstash-integration-kafka/pull/51)
4
+
1
5
  ## 10.5.3
2
6
  - Fix: set (optional) truststore when endpoint id check disabled [#60](https://github.com/logstash-plugins/logstash-integration-kafka/pull/60).
3
7
  Since **10.1.0** disabling server host-name verification (`ssl_endpoint_identification_algorithm => ""`) did not allow
@@ -124,6 +124,10 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
124
124
  | <<plugins-{type}s-{plugin}-sasl_jaas_config>> |<<string,string>>|No
125
125
  | <<plugins-{type}s-{plugin}-sasl_kerberos_service_name>> |<<string,string>>|No
126
126
  | <<plugins-{type}s-{plugin}-sasl_mechanism>> |<<string,string>>|No
127
+ | <<plugins-{type}s-{plugin}-schema_registry_key>> |<<string,string>>|No
128
+ | <<plugins-{type}s-{plugin}-schema_registry_proxy>> |<<uri,uri>>|No
129
+ | <<plugins-{type}s-{plugin}-schema_registry_secret>> |<<string,string>>|No
130
+ | <<plugins-{type}s-{plugin}-schema_registry_url>> |<<uri,uri>>|No
127
131
  | <<plugins-{type}s-{plugin}-security_protocol>> |<<string,string>>, one of `["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]`|No
128
132
  | <<plugins-{type}s-{plugin}-send_buffer_bytes>> |<<number,number>>|No
129
133
  | <<plugins-{type}s-{plugin}-session_timeout_ms>> |<<number,number>>|No
@@ -528,6 +532,44 @@ http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used fo
528
532
  This may be any mechanism for which a security provider is available.
529
533
  GSSAPI is the default mechanism.
530
534
 
535
+ [id="plugins-{type}s-{plugin}-schema_registry_key"]
536
+ ===== `schema_registry_key`
537
+
538
+ * Value type is <<string,string>>
539
+ * There is no default value for this setting.
540
+
541
+ Set the username for basic authorization to access remote Schema Registry.
542
+
543
+ [id="plugins-{type}s-{plugin}-schema_registry_proxy"]
544
+ ===== `schema_registry_proxy`
545
+
546
+ * Value type is <<uri,uri>>
547
+ * There is no default value for this setting.
548
+
549
+ Set the address of a forward HTTP proxy. An empty string is treated as if proxy was not set.
550
+
551
+ [id="plugins-{type}s-{plugin}-schema_registry_secret"]
552
+ ===== `schema_registry_secret`
553
+
554
+ * Value type is <<string,string>>
555
+ * There is no default value for this setting.
556
+
557
+ Set the password for basic authorization to access remote Schema Registry.
558
+
559
+ [id="plugins-{type}s-{plugin}-schema_registry_url"]
560
+ ===== `schema_registry_url`
561
+
562
+ * Value type is <<uri,uri>>
563
+
564
+ The URI that points to an instance of the
565
+ https://docs.confluent.io/current/schema-registry/index.html[Schema Registry] service,
566
+ used to manage Avro schemas. Be sure that the Avro schemas for deserializing the data from
567
+ the specified topics have been uploaded to the Schema Registry service.
568
+ The schemas must follow a naming convention with the pattern <topic name>-value.
569
+
570
+ Use either the Schema Registry config option or the
571
+ <<plugins-{type}s-{plugin}-value_deserializer_class>> config option, but not both.
572
+
531
573
  [id="plugins-{type}s-{plugin}-security_protocol"]
532
574
  ===== `security_protocol`
533
575
 
@@ -641,7 +683,10 @@ The topics configuration will be ignored when using this configuration.
641
683
  * Value type is <<string,string>>
642
684
  * Default value is `"org.apache.kafka.common.serialization.StringDeserializer"`
643
685
 
644
- Java Class used to deserialize the record's value
686
+ Java Class used to deserialize the record's value.
687
+ A custom value deserializer can be used only if you are not using a Schema Registry.
688
+ Use either the value_deserializer_class config option or the
689
+ <<plugins-{type}s-{plugin}-schema_registry_url>> config option, but not both.
645
690
 
646
691
  [id="plugins-{type}s-{plugin}-common-options"]
647
692
  include::{include_path}/{type}.asciidoc[]
@@ -1,8 +1,17 @@
1
1
  # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
2
 
3
3
  require 'jar_dependencies'
4
- require_jar('org.apache.kafka', 'kafka-clients', '2.4.1')
5
- require_jar('com.github.luben', 'zstd-jni', '1.4.3-1')
6
- require_jar('org.slf4j', 'slf4j-api', '1.7.28')
7
- require_jar('org.lz4', 'lz4-java', '1.6.0')
4
+ require_jar('io.confluent', 'kafka-avro-serializer', '5.5.1')
5
+ require_jar('io.confluent', 'kafka-schema-serializer', '5.5.1')
6
+ require_jar('io.confluent', 'common-config', '5.5.1')
7
+ require_jar('org.apache.avro', 'avro', '1.9.2')
8
+ require_jar('io.confluent', 'kafka-schema-registry-client', '5.5.1')
9
+ require_jar('org.apache.kafka', 'kafka_2.12', '2.5.1')
10
+ require_jar('io.confluent', 'common-utils', '5.5.1')
11
+ require_jar('javax.ws.rs', 'javax.ws.rs-api', '2.1.1')
12
+ require_jar('org.glassfish.jersey.core', 'jersey-common', '2.30')
13
+ require_jar('org.apache.kafka', 'kafka-clients', '2.5.1')
14
+ require_jar('com.github.luben', 'zstd-jni', '1.4.4-7')
15
+ require_jar('org.slf4j', 'slf4j-api', '1.7.30')
16
+ require_jar('org.lz4', 'lz4-java', '1.7.1')
8
17
  require_jar('org.xerial.snappy', 'snappy-java', '1.1.7.3')
@@ -4,6 +4,10 @@ require 'stud/interval'
4
4
  require 'java'
5
5
  require 'logstash-integration-kafka_jars.rb'
6
6
  require 'logstash/plugin_mixins/kafka_support'
7
+ require "faraday"
8
+ require "json"
9
+ require "logstash/json"
10
+ require_relative '../plugin_mixins/common'
7
11
 
8
12
  # This input will read events from a Kafka topic. It uses the 0.10 version of
9
13
  # the consumer API provided by Kafka to read messages from the broker.
@@ -50,7 +54,10 @@ require 'logstash/plugin_mixins/kafka_support'
50
54
  #
51
55
  class LogStash::Inputs::Kafka < LogStash::Inputs::Base
52
56
 
57
+ DEFAULT_DESERIALIZER_CLASS = "org.apache.kafka.common.serialization.StringDeserializer"
58
+
53
59
  include LogStash::PluginMixins::KafkaSupport
60
+ include ::LogStash::PluginMixins::KafkaAvroSchemaRegistry
54
61
 
55
62
  config_name 'kafka'
56
63
 
@@ -167,7 +174,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
167
174
  # and a rebalance operation is triggered for the group identified by `group_id`
168
175
  config :session_timeout_ms, :validate => :number, :default => 10_000 # (10s) Kafka default
169
176
  # Java Class used to deserialize the record's value
170
- config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
177
+ config :value_deserializer_class, :validate => :string, :default => DEFAULT_DESERIALIZER_CLASS
171
178
  # A list of topics to subscribe to, defaults to ["logstash"].
172
179
  config :topics, :validate => :array, :default => ["logstash"]
173
180
  # A topic regex pattern to subscribe to.
@@ -236,11 +243,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
236
243
  # `timestamp`: The timestamp of this message
237
244
  config :decorate_events, :validate => :boolean, :default => false
238
245
 
239
-
240
246
  public
241
247
  def register
242
248
  @runner_threads = []
243
- end # def register
249
+ check_schema_registry_parameters
250
+ end
244
251
 
245
252
  public
246
253
  def run(logstash_queue)
@@ -278,6 +285,13 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
278
285
  for record in records do
279
286
  codec_instance.decode(record.value.to_s) do |event|
280
287
  decorate(event)
288
+ if schema_registry_url
289
+ json = LogStash::Json.load(record.value.to_s)
290
+ json.each do |k, v|
291
+ event.set(k, v)
292
+ end
293
+ event.remove("message")
294
+ end
281
295
  if @decorate_events
282
296
  event.set("[@metadata][kafka][topic]", record.topic)
283
297
  event.set("[@metadata][kafka][consumer_group]", @group_id)
@@ -337,7 +351,18 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
337
351
  props.put(kafka::CLIENT_RACK_CONFIG, client_rack) unless client_rack.nil?
338
352
 
339
353
  props.put("security.protocol", security_protocol) unless security_protocol.nil?
340
-
354
+ if schema_registry_url
355
+ props.put(kafka::VALUE_DESERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroDeserializer.java_class)
356
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
357
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.to_s)
358
+ if schema_registry_proxy && !schema_registry_proxy.empty?
359
+ props.put(serdes_config::PROXY_HOST, @schema_registry_proxy_host)
360
+ props.put(serdes_config::PROXY_PORT, @schema_registry_proxy_port)
361
+ end
362
+ if schema_registry_key && !schema_registry_key.empty?
363
+ props.put(serdes_config::USER_INFO_CONFIG, schema_registry_key + ":" + schema_registry_secret.value)
364
+ end
365
+ end
341
366
  if security_protocol == "SSL"
342
367
  set_trustore_keystore_config(props)
343
368
  elsif security_protocol == "SASL_PLAINTEXT"
@@ -0,0 +1,92 @@
1
+ module LogStash
2
+ module PluginMixins
3
+ module KafkaAvroSchemaRegistry
4
+
5
+ def self.included(base)
6
+ base.extend(self)
7
+ base.setup_schema_registry_config
8
+ end
9
+
10
+ def setup_schema_registry_config
11
+ # Option to set key to access Schema Registry.
12
+ config :schema_registry_key, :validate => :string
13
+
14
+ # Option to set secret to access Schema Registry.
15
+ config :schema_registry_secret, :validate => :password
16
+
17
+ # Option to set the endpoint of the Schema Registry.
18
+ # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
19
+ # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
20
+ config :schema_registry_url, :validate => :uri
21
+
22
+ # Option to set the proxy of the Schema Registry.
23
+ # This option permits to define a proxy to be used to reach the schema registry service instance.
24
+ config :schema_registry_proxy, :validate => :uri
25
+ end
26
+
27
+ def check_schema_registry_parameters
28
+ if @schema_registry_url
29
+ check_for_schema_registry_conflicts
30
+ @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
31
+ check_for_key_and_secret
32
+ check_for_schema_registry_connectivity_and_subjects
33
+ end
34
+ end
35
+
36
+ private
37
+ def check_for_schema_registry_conflicts
38
+ if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
39
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
40
+ end
41
+ if @topics_pattern && !@topics_pattern.empty?
42
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
43
+ end
44
+ end
45
+
46
+ private
47
+ def check_for_schema_registry_connectivity_and_subjects
48
+ client = Faraday.new(@schema_registry_url.to_s) do |conn|
49
+ if schema_registry_proxy && !schema_registry_proxy.empty?
50
+ conn.proxy = schema_registry_proxy.to_s
51
+ end
52
+ if schema_registry_key and !schema_registry_key.empty?
53
+ conn.basic_auth(schema_registry_key, schema_registry_secret.value)
54
+ end
55
+ end
56
+ begin
57
+ response = client.get('/subjects')
58
+ rescue Faraday::Error => e
59
+ raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
60
+ end
61
+ registered_subjects = JSON.parse response.body
62
+ expected_subjects = @topics.map { |t| "#{t}-value"}
63
+ if (expected_subjects & registered_subjects).size != expected_subjects.size
64
+ undefined_topic_subjects = expected_subjects - registered_subjects
65
+ raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
66
+ end
67
+ end
68
+
69
+ def split_proxy_into_host_and_port(proxy_uri)
70
+ return nil unless proxy_uri && !proxy_uri.empty?
71
+
72
+ port = proxy_uri.port
73
+
74
+ host_spec = ""
75
+ host_spec << proxy_uri.scheme || "http"
76
+ host_spec << "://"
77
+ host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
78
+ host_spec << proxy_uri.host
79
+
80
+ [host_spec, port]
81
+ end
82
+
83
+ def check_for_key_and_secret
84
+ if schema_registry_key and !schema_registry_key.empty?
85
+ if !schema_registry_secret or schema_registry_secret.value.empty?
86
+ raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.5.3'
3
+ s.version = '10.6.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -2,6 +2,9 @@
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/inputs/kafka"
4
4
  require "rspec/wait"
5
+ require "stud/try"
6
+ require "faraday"
7
+ require "json"
5
8
 
6
9
  # Please run kafka_test_setup.sh prior to executing this integration test.
7
10
  describe "inputs/kafka", :integration => true do
@@ -120,20 +123,192 @@ describe "inputs/kafka", :integration => true do
120
123
  end
121
124
  end
122
125
  end
126
+ end
127
+
128
+ private
129
+
130
+ def consume_messages(config, queue: Queue.new, timeout:, event_count:)
131
+ kafka_input = LogStash::Inputs::Kafka.new(config)
132
+ t = Thread.new { kafka_input.run(queue) }
133
+ begin
134
+ t.run
135
+ wait(timeout).for { queue.length }.to eq(event_count) unless timeout.eql?(false)
136
+ block_given? ? yield(queue, kafka_input) : queue
137
+ ensure
138
+ t.kill
139
+ t.join(30_000)
140
+ end
141
+ end
142
+
143
+
144
+ describe "schema registry connection options" do
145
+ context "remote endpoint validation" do
146
+ it "should fail if not reachable" do
147
+ config = {'schema_registry_url' => 'http://localnothost:8081'}
148
+ kafka_input = LogStash::Inputs::Kafka.new(config)
149
+ expect { kafka_input.register }.to raise_error LogStash::ConfigurationError, /Schema registry service doesn't respond.*/
150
+ end
151
+
152
+ it "should fail if any topic is not matched by a subject on the schema registry" do
153
+ config = {
154
+ 'schema_registry_url' => 'http://localhost:8081',
155
+ 'topics' => ['temperature_stream']
156
+ }
157
+
158
+ kafka_input = LogStash::Inputs::Kafka.new(config)
159
+ expect { kafka_input.register }.to raise_error LogStash::ConfigurationError, /The schema registry does not contain definitions for required topic subjects: \["temperature_stream-value"\]/
160
+ end
161
+
162
+ context "register with subject present" do
163
+ SUBJECT_NAME = "temperature_stream-value"
164
+
165
+ before(:each) do
166
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), SUBJECT_NAME)
167
+ expect( response.status ).to be(200)
168
+ end
123
169
 
124
- private
170
+ after(:each) do
171
+ schema_registry_client = Faraday.new('http://localhost:8081')
172
+ delete_remote_schema(schema_registry_client, SUBJECT_NAME)
173
+ end
125
174
 
126
- def consume_messages(config, queue: Queue.new, timeout:, event_count:)
127
- kafka_input = LogStash::Inputs::Kafka.new(config)
128
- t = Thread.new { kafka_input.run(queue) }
129
- begin
130
- t.run
131
- wait(timeout).for { queue.length }.to eq(event_count) unless timeout.eql?(false)
132
- block_given? ? yield(queue, kafka_input) : queue
133
- ensure
134
- t.kill
135
- t.join(30_000)
175
+ it "should correctly complete registration phase" do
176
+ config = {
177
+ 'schema_registry_url' => 'http://localhost:8081',
178
+ 'topics' => ['temperature_stream']
179
+ }
180
+ kafka_input = LogStash::Inputs::Kafka.new(config)
181
+ kafka_input.register
182
+ end
136
183
  end
137
184
  end
185
+ end
138
186
 
187
+ def save_avro_schema_to_schema_registry(schema_file, subject_name)
188
+ raw_schema = File.readlines(schema_file).map(&:chomp).join
189
+ raw_schema_quoted = raw_schema.gsub('"', '\"')
190
+ response = Faraday.post("http://localhost:8081/subjects/#{subject_name}/versions",
191
+ '{"schema": "' + raw_schema_quoted + '"}',
192
+ "Content-Type" => "application/vnd.schemaregistry.v1+json")
193
+ response
139
194
  end
195
+
196
+ def delete_remote_schema(schema_registry_client, subject_name)
197
+ expect(schema_registry_client.delete("/subjects/#{subject_name}").status ).to be(200)
198
+ expect(schema_registry_client.delete("/subjects/#{subject_name}?permanent=true").status ).to be(200)
199
+ end
200
+
201
+ # AdminClientConfig = org.alpache.kafka.clients.admin.AdminClientConfig
202
+
203
+ describe "Schema registry API", :integration => true do
204
+
205
+ let(:schema_registry) { Faraday.new('http://localhost:8081') }
206
+
207
+ context 'listing subject on clean instance' do
208
+ it "should return an empty set" do
209
+ subjects = JSON.parse schema_registry.get('/subjects').body
210
+ expect( subjects ).to be_empty
211
+ end
212
+ end
213
+
214
+ context 'send a schema definition' do
215
+ it "save the definition" do
216
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), "schema_test_1")
217
+ expect( response.status ).to be(200)
218
+ delete_remote_schema(schema_registry, "schema_test_1")
219
+ end
220
+
221
+ it "delete the schema just added" do
222
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), "schema_test_1")
223
+ expect( response.status ).to be(200)
224
+
225
+ expect( schema_registry.delete('/subjects/schema_test_1?permanent=false').status ).to be(200)
226
+ sleep(1)
227
+ subjects = JSON.parse schema_registry.get('/subjects').body
228
+ expect( subjects ).to be_empty
229
+ end
230
+ end
231
+
232
+ context 'use the schema to serialize' do
233
+ after(:each) do
234
+ expect( schema_registry.delete('/subjects/topic_avro-value').status ).to be(200)
235
+ sleep 1
236
+ expect( schema_registry.delete('/subjects/topic_avro-value?permanent=true').status ).to be(200)
237
+
238
+ Stud.try(3.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
239
+ wait(10).for do
240
+ subjects = JSON.parse schema_registry.get('/subjects').body
241
+ subjects.empty?
242
+ end.to be_truthy
243
+ end
244
+ end
245
+
246
+ let(:group_id_1) {rand(36**8).to_s(36)}
247
+
248
+ let(:avro_topic_name) { "topic_avro" }
249
+
250
+ let(:plain_config) do
251
+ { 'schema_registry_url' => 'http://localhost:8081',
252
+ 'topics' => [avro_topic_name],
253
+ 'codec' => 'plain',
254
+ 'group_id' => group_id_1,
255
+ 'auto_offset_reset' => 'earliest' }
256
+ end
257
+
258
+ def delete_topic_if_exists(topic_name)
259
+ props = java.util.Properties.new
260
+ props.put(Java::org.apache.kafka.clients.admin.AdminClientConfig::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
261
+
262
+ admin_client = org.apache.kafka.clients.admin.AdminClient.create(props)
263
+ topics_list = admin_client.listTopics().names().get()
264
+ if topics_list.contains(topic_name)
265
+ result = admin_client.deleteTopics([topic_name])
266
+ result.values.get(topic_name).get()
267
+ end
268
+ end
269
+
270
+ def write_some_data_to(topic_name)
271
+ props = java.util.Properties.new
272
+ config = org.apache.kafka.clients.producer.ProducerConfig
273
+
274
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
275
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081")
276
+
277
+ props.put(config::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
278
+ props.put(config::KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.java_class)
279
+ props.put(config::VALUE_SERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroSerializer.java_class)
280
+
281
+ parser = org.apache.avro.Schema::Parser.new()
282
+ user_schema = '''{"type":"record",
283
+ "name":"myrecord",
284
+ "fields":[
285
+ {"name":"str_field", "type": "string"},
286
+ {"name":"map_field", "type": {"type": "map", "values": "string"}}
287
+ ]}'''
288
+ schema = parser.parse(user_schema)
289
+ avro_record = org.apache.avro.generic.GenericData::Record.new(schema)
290
+ avro_record.put("str_field", "value1")
291
+ avro_record.put("map_field", {"inner_field" => "inner value"})
292
+
293
+ producer = org.apache.kafka.clients.producer.KafkaProducer.new(props)
294
+ record = org.apache.kafka.clients.producer.ProducerRecord.new(topic_name, "avro_key", avro_record)
295
+ producer.send(record)
296
+ end
297
+
298
+ it "stored a new schema using Avro Kafka serdes" do
299
+ delete_topic_if_exists avro_topic_name
300
+ write_some_data_to avro_topic_name
301
+
302
+ subjects = JSON.parse schema_registry.get('/subjects').body
303
+ expect( subjects ).to contain_exactly("topic_avro-value")
304
+
305
+ num_events = 1
306
+ queue = consume_messages(plain_config, timeout: 30, event_count: num_events)
307
+ expect(queue.length).to eq(num_events)
308
+ elem = queue.pop
309
+ expect( elem.to_hash).not_to include("message")
310
+ expect( elem.get("str_field") ).to eq("value1")
311
+ expect( elem.get("map_field")["inner_field"] ).to eq("inner value")
312
+ end
313
+ end
314
+ end
@@ -0,0 +1,8 @@
1
+ {"namespace": "io.confluent.examples.clients.basicavro",
2
+ "type": "record",
3
+ "name": "Payment",
4
+ "fields": [
5
+ {"name": "id", "type": "string"},
6
+ {"name": "amount", "type": "double"}
7
+ ]
8
+ }
@@ -37,6 +37,22 @@ describe LogStash::Inputs::Kafka do
37
37
  expect { subject.register }.to_not raise_error
38
38
  end
39
39
 
40
+ context "register parameter verification" do
41
+ let(:config) do
42
+ { 'schema_registry_url' => 'http://localhost:8081', 'topics' => ['logstash'], 'consumer_threads' => 4 }
43
+ end
44
+
45
+ it "schema_registry_url conflict with value_deserializer_class should fail" do
46
+ config['value_deserializer_class'] = 'my.fantasy.Deserializer'
47
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Option schema_registry_url prohibit the customization of value_deserializer_class/
48
+ end
49
+
50
+ it "schema_registry_url conflict with topics_pattern should fail" do
51
+ config['topics_pattern'] = 'topic_.*'
52
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Option schema_registry_url prohibit the customization of topics_pattern/
53
+ end
54
+ end
55
+
40
56
  context 'with client_rack' do
41
57
  let(:config) { super.merge('client_rack' => 'EU-R1') }
42
58
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.5.3
4
+ version: 10.6.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-10-21 00:00:00.000000000 Z
11
+ date: 2020-10-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -183,17 +183,28 @@ files:
183
183
  - lib/logstash-integration-kafka_jars.rb
184
184
  - lib/logstash/inputs/kafka.rb
185
185
  - lib/logstash/outputs/kafka.rb
186
+ - lib/logstash/plugin_mixins/common.rb
186
187
  - lib/logstash/plugin_mixins/kafka_support.rb
187
188
  - logstash-integration-kafka.gemspec
188
189
  - spec/fixtures/trust-store_stub.jks
189
190
  - spec/integration/inputs/kafka_spec.rb
190
191
  - spec/integration/outputs/kafka_spec.rb
192
+ - spec/unit/inputs/avro_schema_fixture_payment.asvc
191
193
  - spec/unit/inputs/kafka_spec.rb
192
194
  - spec/unit/outputs/kafka_spec.rb
193
- - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.3-1/zstd-jni-1.4.3-1.jar
194
- - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.4.1/kafka-clients-2.4.1.jar
195
- - vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar
196
- - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar
195
+ - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar
196
+ - vendor/jar-dependencies/io/confluent/common-config/5.5.1/common-config-5.5.1.jar
197
+ - vendor/jar-dependencies/io/confluent/common-utils/5.5.1/common-utils-5.5.1.jar
198
+ - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar
199
+ - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar
200
+ - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar
201
+ - vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar
202
+ - vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar
203
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.5.1/kafka-clients-2.5.1.jar
204
+ - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar
205
+ - vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.30/jersey-common-2.30.jar
206
+ - vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar
207
+ - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar
197
208
  - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar
198
209
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
199
210
  licenses:
@@ -227,5 +238,6 @@ test_files:
227
238
  - spec/fixtures/trust-store_stub.jks
228
239
  - spec/integration/inputs/kafka_spec.rb
229
240
  - spec/integration/outputs/kafka_spec.rb
241
+ - spec/unit/inputs/avro_schema_fixture_payment.asvc
230
242
  - spec/unit/inputs/kafka_spec.rb
231
243
  - spec/unit/outputs/kafka_spec.rb