rdkafka 0.14.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci.yml +2 -3
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +25 -0
  7. data/README.md +44 -22
  8. data/docker-compose.yml +3 -1
  9. data/ext/Rakefile +43 -26
  10. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  11. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  12. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  13. data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
  14. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  15. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  16. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  17. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  18. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  19. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  20. data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
  21. data/lib/rdkafka/admin.rb +443 -0
  22. data/lib/rdkafka/bindings.rb +125 -2
  23. data/lib/rdkafka/callbacks.rb +196 -1
  24. data/lib/rdkafka/config.rb +24 -3
  25. data/lib/rdkafka/consumer/headers.rb +1 -1
  26. data/lib/rdkafka/consumer/topic_partition_list.rb +8 -7
  27. data/lib/rdkafka/consumer.rb +80 -29
  28. data/lib/rdkafka/producer/delivery_handle.rb +12 -1
  29. data/lib/rdkafka/producer/delivery_report.rb +16 -3
  30. data/lib/rdkafka/producer.rb +42 -12
  31. data/lib/rdkafka/version.rb +3 -3
  32. data/lib/rdkafka.rb +11 -0
  33. data/rdkafka.gemspec +2 -2
  34. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  35. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  36. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  37. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  38. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  39. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  40. data/spec/rdkafka/admin_spec.rb +204 -0
  41. data/spec/rdkafka/config_spec.rb +8 -0
  42. data/spec/rdkafka/consumer_spec.rb +89 -0
  43. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -0
  44. data/spec/rdkafka/producer_spec.rb +26 -2
  45. data/spec/spec_helper.rb +3 -1
  46. data.tar.gz.sig +0 -0
  47. metadata +29 -4
  48. metadata.gz.sig +0 -0
@@ -23,6 +23,9 @@ module Rdkafka
23
23
  attr_reader :delivery_callback_arity
24
24
 
25
25
  # @private
26
+ # @param native_kafka [NativeKafka]
27
+ # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
28
+ # the "consistent_random" default
26
29
  def initialize(native_kafka, partitioner_name)
27
30
  @native_kafka = native_kafka
28
31
  @partitioner_name = partitioner_name || "consistent_random"
@@ -37,10 +40,16 @@ module Rdkafka
37
40
  topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
38
41
  end
39
42
 
40
- cache[topic] = [
41
- monotonic_now,
42
- topic_metadata ? topic_metadata[:partition_count] : nil
43
- ]
43
+ partition_count = topic_metadata ? topic_metadata[:partition_count] : -1
44
+
45
+ # This approach caches the failure to fetch only for 1 second. This will make sure, that
46
+ # we do not cache the failure for too long but also "buys" us a bit of time in case there
47
+ # would be issues in the cluster so we won't overaload it with consecutive requests
48
+ cache[topic] = if partition_count.positive?
49
+ [monotonic_now, partition_count]
50
+ else
51
+ [monotonic_now - PARTITIONS_COUNT_TTL + 5, partition_count]
52
+ end
44
53
  end
45
54
  end
46
55
 
@@ -134,14 +143,15 @@ module Rdkafka
134
143
  # Partition count for a given topic.
135
144
  #
136
145
  # @param topic [String] The topic name.
137
- # @return [Integer] partition count for a given topic
146
+ # @return [Integer] partition count for a given topic or `-1` if it could not be obtained.
138
147
  #
139
148
  # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
140
149
  # auto-created after returning nil.
141
150
  #
142
151
  # @note We cache the partition count for a given topic for given time.
143
152
  # This prevents us in case someone uses `partition_key` from querying for the count with
144
- # each message. Instead we query once every 30 seconds at most
153
+ # each message. Instead we query once every 30 seconds at most if we have a valid partition
154
+ # count or every 5 seconds in case we were not able to obtain number of partitions
145
155
  def partition_count(topic)
146
156
  closed_producer_check(__method__)
147
157
 
@@ -164,11 +174,12 @@ module Rdkafka
164
174
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
165
175
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
166
176
  # @param headers [Hash<String,String>] Optional message headers
167
- #
168
- # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
177
+ # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
169
178
  #
170
179
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
171
- def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
180
+ #
181
+ # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
182
+ def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
172
183
  closed_producer_check(__method__)
173
184
 
174
185
  # Start by checking and converting the input
@@ -190,7 +201,7 @@ module Rdkafka
190
201
  if partition_key
191
202
  partition_count = partition_count(topic)
192
203
  # If the topic is not present, set to -1
193
- partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
204
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
194
205
  end
195
206
 
196
207
  # If partition is nil, use -1 to let librdafka set the partition randomly or
@@ -210,6 +221,7 @@ module Rdkafka
210
221
  end
211
222
 
212
223
  delivery_handle = DeliveryHandle.new
224
+ delivery_handle.label = label
213
225
  delivery_handle[:pending] = true
214
226
  delivery_handle[:response] = -1
215
227
  delivery_handle[:partition] = -1
@@ -256,13 +268,27 @@ module Rdkafka
256
268
  delivery_handle
257
269
  end
258
270
 
271
+ # Calls (if registered) the delivery callback
272
+ #
273
+ # @param delivery_report [Producer::DeliveryReport]
274
+ # @param delivery_handle [Producer::DeliveryHandle]
259
275
  def call_delivery_callback(delivery_report, delivery_handle)
260
276
  return unless @delivery_callback
261
277
 
262
- args = [delivery_report, delivery_handle].take(@delivery_callback_arity)
263
- @delivery_callback.call(*args)
278
+ case @delivery_callback_arity
279
+ when 0
280
+ @delivery_callback.call
281
+ when 1
282
+ @delivery_callback.call(delivery_report)
283
+ else
284
+ @delivery_callback.call(delivery_report, delivery_handle)
285
+ end
264
286
  end
265
287
 
288
+ # Figures out the arity of a given block/method
289
+ #
290
+ # @param callback [#call, Proc]
291
+ # @return [Integer] arity of the provided block/method
266
292
  def arity(callback)
267
293
  return callback.arity if callback.respond_to?(:arity)
268
294
 
@@ -271,6 +297,10 @@ module Rdkafka
271
297
 
272
298
  private
273
299
 
300
+ # Ensures, no operations can happen on a closed producer
301
+ #
302
+ # @param method [Symbol] name of the method that invoked producer
303
+ # @raise [Rdkafka::ClosedProducerError]
274
304
  def closed_producer_check(method)
275
305
  raise Rdkafka::ClosedProducerError.new(method) if closed?
276
306
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.0"
5
- LIBRDKAFKA_VERSION = "2.2.0"
6
- LIBRDKAFKA_SOURCE_SHA256 = "af9a820cbecbc64115629471df7c7cecd40403b6c34bfdbb9223152677a47226"
4
+ VERSION = "0.15.1"
5
+ LIBRDKAFKA_VERSION = "2.3.0"
6
+ LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -11,8 +11,19 @@ require "rdkafka/abstract_handle"
11
11
  require "rdkafka/admin"
12
12
  require "rdkafka/admin/create_topic_handle"
13
13
  require "rdkafka/admin/create_topic_report"
14
+ require "rdkafka/admin/delete_groups_handle"
15
+ require "rdkafka/admin/delete_groups_report"
14
16
  require "rdkafka/admin/delete_topic_handle"
15
17
  require "rdkafka/admin/delete_topic_report"
18
+ require "rdkafka/admin/create_partitions_handle"
19
+ require "rdkafka/admin/create_partitions_report"
20
+ require "rdkafka/admin/create_acl_handle"
21
+ require "rdkafka/admin/create_acl_report"
22
+ require "rdkafka/admin/delete_acl_handle"
23
+ require "rdkafka/admin/delete_acl_report"
24
+ require "rdkafka/admin/describe_acl_handle"
25
+ require "rdkafka/admin/describe_acl_report"
26
+ require "rdkafka/admin/acl_binding_result"
16
27
  require "rdkafka/bindings"
17
28
  require "rdkafka/callbacks"
18
29
  require "rdkafka/config"
data/rdkafka.gemspec CHANGED
@@ -3,10 +3,10 @@
3
3
  require File.expand_path('lib/rdkafka/version', __dir__)
4
4
 
5
5
  Gem::Specification.new do |gem|
6
- gem.authors = ['Thijs Cadier']
6
+ gem.authors = ['Thijs Cadier', 'Maciej Mensfeld']
7
7
  gem.email = ["contact@karafka.io"]
8
8
  gem.description = "Modern Kafka client library for Ruby based on librdkafka"
9
- gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.4+."
9
+ gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.7+."
10
10
  gem.license = 'MIT'
11
11
 
12
12
  gem.files = `git ls-files`.split($\)
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::CreateAclHandle do
6
+ # If create acl was successful there is no error object
7
+ # the error code is set to RD_KAFKA_RESP_ERR_NO_ERRORa
8
+ # https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L169
9
+ let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
10
+
11
+ subject do
12
+ Rdkafka::Admin::CreateAclHandle.new.tap do |handle|
13
+ handle[:pending] = pending_handle
14
+ handle[:response] = response
15
+ # If create acl was successful there is no error object and the error_string is set to ""
16
+ # https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L178
17
+ handle[:response_string] = FFI::MemoryPointer.from_string("")
18
+ end
19
+ end
20
+
21
+ describe "#wait" do
22
+ let(:pending_handle) { true }
23
+
24
+ it "should wait until the timeout and then raise an error" do
25
+ expect {
26
+ subject.wait(max_wait_timeout: 0.1)
27
+ }.to raise_error Rdkafka::Admin::CreateAclHandle::WaitTimeoutError, /create acl/
28
+ end
29
+
30
+ context "when not pending anymore and no error" do
31
+ let(:pending_handle) { false }
32
+
33
+ it "should return a create acl report" do
34
+ report = subject.wait
35
+
36
+ expect(report.rdkafka_response_string).to eq("")
37
+ end
38
+
39
+ it "should wait without a timeout" do
40
+ report = subject.wait(max_wait_timeout: nil)
41
+
42
+ expect(report.rdkafka_response_string).to eq("")
43
+ end
44
+ end
45
+ end
46
+
47
+ describe "#raise_error" do
48
+ let(:pending_handle) { false }
49
+
50
+ it "should raise the appropriate error" do
51
+ expect {
52
+ subject.raise_error
53
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::CreateAclReport do
6
+ subject { Rdkafka::Admin::CreateAclReport.new(
7
+ rdkafka_response: Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR,
8
+ rdkafka_response_string: FFI::MemoryPointer.from_string("")
9
+ )}
10
+
11
+ it "should get RD_KAFKA_RESP_ERR_NO_ERROR " do
12
+ expect(subject.rdkafka_response).to eq(0)
13
+ end
14
+
15
+ it "should get empty string" do
16
+ expect(subject.rdkafka_response_string).to eq("")
17
+ end
18
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DeleteAclHandle do
6
+ let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:delete_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if delete_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [delete_acl_ptr]
33
+ delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ delete_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DeleteAclHandle.new.tap do |handle|
36
+ handle[:pending] = pending_handle
37
+ handle[:response] = response
38
+ handle[:response_string] = FFI::MemoryPointer.from_string("")
39
+ handle[:matching_acls] = delete_acls_array_ptr
40
+ handle[:matching_acls_count] = 1
41
+ end
42
+ end
43
+
44
+ after do
45
+ if delete_acl_ptr != FFI::Pointer::NULL
46
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
47
+ end
48
+ end
49
+
50
+ describe "#wait" do
51
+ let(:pending_handle) { true }
52
+
53
+ it "should wait until the timeout and then raise an error" do
54
+ expect {
55
+ subject.wait(max_wait_timeout: 0.1)
56
+ }.to raise_error Rdkafka::Admin::DeleteAclHandle::WaitTimeoutError, /delete acl/
57
+ end
58
+
59
+ context "when not pending anymore and no error" do
60
+ let(:pending_handle) { false }
61
+
62
+ it "should return a delete acl report" do
63
+ report = subject.wait
64
+
65
+ expect(report.deleted_acls.length).to eq(1)
66
+ end
67
+
68
+ it "should wait without a timeout" do
69
+ report = subject.wait(max_wait_timeout: nil)
70
+
71
+ expect(report.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
72
+ end
73
+ end
74
+ end
75
+
76
+ describe "#raise_error" do
77
+ let(:pending_handle) { false }
78
+
79
+ it "should raise the appropriate error" do
80
+ expect {
81
+ subject.raise_error
82
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,72 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DeleteAclReport do
6
+
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:delete_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if delete_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [delete_acl_ptr]
33
+ delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ delete_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DeleteAclReport.new(matching_acls: delete_acls_array_ptr, matching_acls_count: 1)
36
+ end
37
+
38
+ after do
39
+ if delete_acl_ptr != FFI::Pointer::NULL
40
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
41
+ end
42
+ end
43
+
44
+ it "should get deleted acl resource type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC" do
45
+ expect(subject.deleted_acls[0].matching_acl_resource_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC)
46
+ end
47
+
48
+ it "should get deleted acl resource name as acl-test-topic" do
49
+ expect(subject.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
50
+ end
51
+
52
+ it "should get deleted acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
53
+ expect(subject.deleted_acls[0].matching_acl_resource_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
54
+ expect(subject.deleted_acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
55
+ end
56
+
57
+ it "should get deleted acl principal as User:anonymous" do
58
+ expect(subject.deleted_acls[0].matching_acl_principal).to eq("User:anonymous")
59
+ end
60
+
61
+ it "should get deleted acl host as * " do
62
+ expect(subject.deleted_acls[0].matching_acl_host).to eq("*")
63
+ end
64
+
65
+ it "should get deleted acl operation as Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ" do
66
+ expect(subject.deleted_acls[0].matching_acl_operation).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ)
67
+ end
68
+
69
+ it "should get deleted acl permission_type as Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW" do
70
+ expect(subject.deleted_acls[0].matching_acl_permission_type).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW)
71
+ end
72
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DescribeAclHandle do
6
+ let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:describe_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ describe_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if describe_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [describe_acl_ptr]
33
+ describe_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ describe_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DescribeAclHandle.new.tap do |handle|
36
+ handle[:pending] = pending_handle
37
+ handle[:response] = response
38
+ handle[:response_string] = FFI::MemoryPointer.from_string("")
39
+ handle[:acls] = describe_acls_array_ptr
40
+ handle[:acls_count] = 1
41
+ end
42
+ end
43
+
44
+ after do
45
+ if describe_acl_ptr != FFI::Pointer::NULL
46
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(describe_acl_ptr)
47
+ end
48
+ end
49
+
50
+ describe "#wait" do
51
+ let(:pending_handle) { true }
52
+
53
+ it "should wait until the timeout and then raise an error" do
54
+ expect {
55
+ subject.wait(max_wait_timeout: 0.1)
56
+ }.to raise_error Rdkafka::Admin::DescribeAclHandle::WaitTimeoutError, /describe acl/
57
+ end
58
+
59
+ context "when not pending anymore and no error" do
60
+ let(:pending_handle) { false }
61
+
62
+ it "should return a describe acl report" do
63
+ report = subject.wait
64
+
65
+ expect(report.acls.length).to eq(1)
66
+ end
67
+
68
+ it "should wait without a timeout" do
69
+ report = subject.wait(max_wait_timeout: nil)
70
+
71
+ expect(report.acls[0].matching_acl_resource_name).to eq("acl-test-topic")
72
+ end
73
+ end
74
+ end
75
+
76
+ describe "#raise_error" do
77
+ let(:pending_handle) { false }
78
+
79
+ it "should raise the appropriate error" do
80
+ expect {
81
+ subject.raise_error
82
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DescribeAclReport do
6
+
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:describe_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ describe_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if describe_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [describe_acl_ptr]
33
+ describe_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ describe_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DescribeAclReport.new(acls: describe_acls_array_ptr, acls_count: 1)
36
+ end
37
+
38
+ after do
39
+ if describe_acl_ptr != FFI::Pointer::NULL
40
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(describe_acl_ptr)
41
+ end
42
+ end
43
+
44
+
45
+ it "should get matching acl resource type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC" do
46
+ expect(subject.acls[0].matching_acl_resource_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC)
47
+ end
48
+
49
+ it "should get matching acl resource name as acl-test-topic" do
50
+ expect(subject.acls[0].matching_acl_resource_name).to eq(resource_name)
51
+ end
52
+
53
+ it "should get matching acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
54
+ expect(subject.acls[0].matching_acl_resource_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
55
+ expect(subject.acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
56
+ end
57
+
58
+ it "should get matching acl principal as User:anonymous" do
59
+ expect(subject.acls[0].matching_acl_principal).to eq("User:anonymous")
60
+ end
61
+
62
+ it "should get matching acl host as * " do
63
+ expect(subject.acls[0].matching_acl_host).to eq("*")
64
+ end
65
+
66
+ it "should get matching acl operation as Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ" do
67
+ expect(subject.acls[0].matching_acl_operation).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ)
68
+ end
69
+
70
+ it "should get matching acl permission_type as Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW" do
71
+ expect(subject.acls[0].matching_acl_permission_type).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW)
72
+ end
73
+ end