karafka-rdkafka 0.14.0 → 0.14.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/CHANGELOG.md +104 -96
  5. data/README.md +32 -22
  6. data/docker-compose.yml +2 -0
  7. data/lib/rdkafka/admin/acl_binding_result.rb +37 -0
  8. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  9. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  10. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  11. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  12. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  13. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  14. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  15. data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
  16. data/lib/rdkafka/admin.rb +377 -0
  17. data/lib/rdkafka/bindings.rb +108 -0
  18. data/lib/rdkafka/callbacks.rb +167 -0
  19. data/lib/rdkafka/consumer/headers.rb +1 -1
  20. data/lib/rdkafka/consumer/topic_partition_list.rb +8 -7
  21. data/lib/rdkafka/consumer.rb +18 -18
  22. data/lib/rdkafka/error.rb +13 -2
  23. data/lib/rdkafka/producer.rb +2 -2
  24. data/lib/rdkafka/version.rb +1 -1
  25. data/lib/rdkafka.rb +9 -0
  26. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  27. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  28. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  29. data/spec/rdkafka/admin/delete_acl_report_spec.rb +71 -0
  30. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  31. data/spec/rdkafka/admin/describe_acl_report_spec.rb +72 -0
  32. data/spec/rdkafka/admin_spec.rb +204 -0
  33. data/spec/rdkafka/consumer_spec.rb +9 -0
  34. data/spec/rdkafka/producer_spec.rb +0 -35
  35. data/spec/spec_helper.rb +1 -1
  36. data.tar.gz.sig +0 -0
  37. metadata +24 -2
  38. metadata.gz.sig +0 -0
@@ -23,6 +23,96 @@ module Rdkafka
23
23
  end
24
24
  end
25
25
 
26
+ class GroupResult
27
+ attr_reader :result_error, :error_string, :result_name
28
+ def initialize(group_result_pointer)
29
+ native_error = Rdkafka::Bindings.rd_kafka_group_result_error(group_result_pointer)
30
+
31
+ if native_error.null?
32
+ @result_error = 0
33
+ @error_string = FFI::Pointer::NULL
34
+ else
35
+ @result_error = native_error[:code]
36
+ @error_string = native_error[:errstr]
37
+ end
38
+
39
+ @result_name = Rdkafka::Bindings.rd_kafka_group_result_name(group_result_pointer)
40
+ end
41
+ def self.create_group_results_from_array(count, array_pointer)
42
+ (1..count).map do |index|
43
+ result_pointer = (array_pointer + (index - 1)).read_pointer
44
+ new(result_pointer)
45
+ end
46
+ end
47
+ end
48
+
49
+ # Extracts attributes of rd_kafka_acl_result_t
50
+ #
51
+ # @private
52
+ class CreateAclResult
53
+ attr_reader :result_error, :error_string
54
+
55
+ def initialize(acl_result_pointer)
56
+ rd_kafka_error_pointer = Bindings.rd_kafka_acl_result_error(acl_result_pointer)
57
+ @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
58
+ @error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
59
+ end
60
+
61
+ def self.create_acl_results_from_array(count, array_pointer)
62
+ (1..count).map do |index|
63
+ result_pointer = (array_pointer + (index - 1)).read_pointer
64
+ new(result_pointer)
65
+ end
66
+ end
67
+ end
68
+
69
+ # Extracts attributes of rd_kafka_DeleteAcls_result_response_t
70
+ #
71
+ # @private
72
+ class DeleteAclResult
73
+ attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
74
+
75
+ def initialize(acl_result_pointer)
76
+ @matching_acls=[]
77
+ rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_response_error(acl_result_pointer)
78
+ @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
79
+ @error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
80
+ if @result_error == 0
81
+ # Get the number of matching acls
82
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
83
+ @matching_acls = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_response_matching_acls(acl_result_pointer, pointer_to_size_t)
84
+ @matching_acls_count = pointer_to_size_t.read_int
85
+ end
86
+ end
87
+
88
+ def self.delete_acl_results_from_array(count, array_pointer)
89
+ (1..count).map do |index|
90
+ result_pointer = (array_pointer + (index - 1)).read_pointer
91
+ new(result_pointer)
92
+ end
93
+ end
94
+ end
95
+
96
+ # Extracts attributes of rd_kafka_DeleteAcls_result_response_t
97
+ #
98
+ # @private
99
+ class DescribeAclResult
100
+ attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
101
+
102
+ def initialize(event_ptr)
103
+ @matching_acls=[]
104
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
105
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
106
+ if @result_error == 0
107
+ acl_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeAcls_result(event_ptr)
108
+ # Get the number of matching acls
109
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
110
+ @matching_acls = Rdkafka::Bindings.rd_kafka_DescribeAcls_result_acls(acl_describe_result, pointer_to_size_t)
111
+ @matching_acls_count = pointer_to_size_t.read_int
112
+ end
113
+ end
114
+ end
115
+
26
116
  # FFI Function used for Create Topic and Delete Topic callbacks
27
117
  BackgroundEventCallbackFunction = FFI::Function.new(
28
118
  :void, [:pointer, :pointer, :pointer]
@@ -40,6 +130,14 @@ module Rdkafka
40
130
  process_delete_topic(event_ptr)
41
131
  elsif event_type == Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
42
132
  process_create_partitions(event_ptr)
133
+ elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
134
+ process_create_acl(event_ptr)
135
+ elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
136
+ process_delete_acl(event_ptr)
137
+ elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
138
+ process_describe_acl(event_ptr)
139
+ elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
140
+ process_delete_groups(event_ptr)
43
141
  end
44
142
  end
45
143
 
@@ -62,6 +160,23 @@ module Rdkafka
62
160
  end
63
161
  end
64
162
 
163
+ def self.process_delete_groups(event_ptr)
164
+ delete_groups_result = Rdkafka::Bindings.rd_kafka_event_DeleteGroups_result(event_ptr)
165
+
166
+ # Get the number of delete group results
167
+ pointer_to_size_t = FFI::MemoryPointer.new(:size_t)
168
+ delete_group_result_array = Rdkafka::Bindings.rd_kafka_DeleteGroups_result_groups(delete_groups_result, pointer_to_size_t)
169
+ delete_group_results = GroupResult.create_group_results_from_array(pointer_to_size_t.read_int, delete_group_result_array) # TODO fix this
170
+ delete_group_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
171
+
172
+ if (delete_group_handle = Rdkafka::Admin::DeleteGroupsHandle.remove(delete_group_handle_ptr.address))
173
+ delete_group_handle[:response] = delete_group_results[0].result_error
174
+ delete_group_handle[:error_string] = delete_group_results[0].error_string
175
+ delete_group_handle[:result_name] = delete_group_results[0].result_name
176
+ delete_group_handle[:pending] = false
177
+ end
178
+ end
179
+
65
180
  def self.process_delete_topic(event_ptr)
66
181
  delete_topics_result = Rdkafka::Bindings.rd_kafka_event_DeleteTopics_result(event_ptr)
67
182
 
@@ -95,6 +210,57 @@ module Rdkafka
95
210
  create_partitions_handle[:pending] = false
96
211
  end
97
212
  end
213
+
214
+ def self.process_create_acl(event_ptr)
215
+ create_acls_result = Rdkafka::Bindings.rd_kafka_event_CreateAcls_result(event_ptr)
216
+
217
+ # Get the number of acl results
218
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
219
+ create_acl_result_array = Rdkafka::Bindings.rd_kafka_CreateAcls_result_acls(create_acls_result, pointer_to_size_t)
220
+ create_acl_results = CreateAclResult.create_acl_results_from_array(pointer_to_size_t.read_int, create_acl_result_array)
221
+ create_acl_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
222
+
223
+ if create_acl_handle = Rdkafka::Admin::CreateAclHandle.remove(create_acl_handle_ptr.address)
224
+ create_acl_handle[:response] = create_acl_results[0].result_error
225
+ create_acl_handle[:response_string] = create_acl_results[0].error_string
226
+ create_acl_handle[:pending] = false
227
+ end
228
+ end
229
+
230
+ def self.process_delete_acl(event_ptr)
231
+ delete_acls_result = Rdkafka::Bindings.rd_kafka_event_DeleteAcls_result(event_ptr)
232
+
233
+ # Get the number of acl results
234
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
235
+ delete_acl_result_responses = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_responses(delete_acls_result, pointer_to_size_t)
236
+ delete_acl_results = DeleteAclResult.delete_acl_results_from_array(pointer_to_size_t.read_int, delete_acl_result_responses)
237
+ delete_acl_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
238
+
239
+ if delete_acl_handle = Rdkafka::Admin::DeleteAclHandle.remove(delete_acl_handle_ptr.address)
240
+ delete_acl_handle[:response] = delete_acl_results[0].result_error
241
+ delete_acl_handle[:response_string] = delete_acl_results[0].error_string
242
+ delete_acl_handle[:pending] = false
243
+ if delete_acl_results[0].result_error == 0
244
+ delete_acl_handle[:matching_acls] = delete_acl_results[0].matching_acls
245
+ delete_acl_handle[:matching_acls_count] = delete_acl_results[0].matching_acls_count
246
+ end
247
+ end
248
+ end
249
+
250
+ def self.process_describe_acl(event_ptr)
251
+ describe_acl = DescribeAclResult.new(event_ptr)
252
+ describe_acl_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
253
+
254
+ if describe_acl_handle = Rdkafka::Admin::DescribeAclHandle.remove(describe_acl_handle_ptr.address)
255
+ describe_acl_handle[:response] = describe_acl.result_error
256
+ describe_acl_handle[:response_string] = describe_acl.error_string
257
+ describe_acl_handle[:pending] = false
258
+ if describe_acl.result_error == 0
259
+ describe_acl_handle[:acls] = describe_acl.matching_acls
260
+ describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
261
+ end
262
+ end
263
+ end
98
264
  end
99
265
 
100
266
  # FFI Function used for Message Delivery callbacks
@@ -127,5 +293,6 @@ module Rdkafka
127
293
  end
128
294
  end
129
295
  end
296
+
130
297
  end
131
298
  end
@@ -20,7 +20,7 @@ module Rdkafka
20
20
  #
21
21
  # @private
22
22
  #
23
- # @param [librdkakfa message] native_message
23
+ # @param [Rdkafka::Bindings::Message] native_message
24
24
  # @return [Hash<String, String>] headers Hash for the native_message
25
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
26
26
  def self.from_native(native_message)
@@ -36,6 +36,11 @@ module Rdkafka
36
36
  # Add a topic with optionally partitions to the list.
37
37
  # Calling this method multiple times for the same topic will overwrite the previous configuraton.
38
38
  #
39
+ # @param topic [String] The topic's name
40
+ # @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
41
+ #
42
+ # @return [nil]
43
+ #
39
44
  # @example Add a topic with unassigned partitions
40
45
  # tpl.add_topic("topic")
41
46
  #
@@ -45,10 +50,6 @@ module Rdkafka
45
50
  # @example Add a topic with all topics up to a count
46
51
  # tpl.add_topic("topic", 9)
47
52
  #
48
- # @param topic [String] The topic's name
49
- # @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
50
- #
51
- # @return [nil]
52
53
  def add_topic(topic, partitions=nil)
53
54
  if partitions.nil?
54
55
  @data[topic.to_s] = nil
@@ -90,11 +91,11 @@ module Rdkafka
90
91
 
91
92
  # Create a new topic partition list based of a native one.
92
93
  #
94
+ # @private
95
+ #
93
96
  # @param pointer [FFI::Pointer] Optional pointer to an existing native list. Its contents will be copied.
94
97
  #
95
98
  # @return [TopicPartitionList]
96
- #
97
- # @private
98
99
  def self.from_native_tpl(pointer)
99
100
  # Data to be moved into the tpl
100
101
  data = {}
@@ -127,8 +128,8 @@ module Rdkafka
127
128
  #
128
129
  # The pointer will be cleaned by `rd_kafka_topic_partition_list_destroy` when GC releases it.
129
130
  #
130
- # @return [FFI::Pointer]
131
131
  # @private
132
+ # @return [FFI::Pointer]
132
133
  def to_native_tpl
133
134
  tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(count)
134
135
 
@@ -272,9 +272,9 @@ module Rdkafka
272
272
  #
273
273
  # @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
274
274
  #
275
- # @raise [RdkafkaError] When getting the positions fails.
276
- #
277
275
  # @return [TopicPartitionList]
276
+ #
277
+ # @raise [RdkafkaError] When getting the positions fails.
278
278
  def position(list=nil)
279
279
  if list.nil?
280
280
  list = assignment
@@ -449,9 +449,9 @@ module Rdkafka
449
449
  #
450
450
  # @param list [TopicPartitionList] The TopicPartitionList with timestamps instead of offsets
451
451
  #
452
- # @raise [RdKafkaError] When the OffsetForTimes lookup fails
453
- #
454
452
  # @return [TopicPartitionList]
453
+ #
454
+ # @raise [RdKafkaError] When the OffsetForTimes lookup fails
455
455
  def offsets_for_times(list, timeout_ms = 1000)
456
456
  closed_consumer_check(__method__)
457
457
 
@@ -522,18 +522,17 @@ module Rdkafka
522
522
  message_ptr = @native_kafka.with_inner do |inner|
523
523
  Rdkafka::Bindings.rd_kafka_consumer_poll(inner, timeout_ms)
524
524
  end
525
- if message_ptr.null?
526
- nil
527
- else
528
- # Create struct wrapper
529
- native_message = Rdkafka::Bindings::Message.new(message_ptr)
530
- # Raise error if needed
531
525
 
532
- Rdkafka::RdkafkaError.validate!(native_message[:err])
526
+ return nil if message_ptr.null?
533
527
 
534
- # Create a message to pass out
535
- Rdkafka::Consumer::Message.new(native_message)
536
- end
528
+ # Create struct wrapper
529
+ native_message = Rdkafka::Bindings::Message.new(message_ptr)
530
+
531
+ # Create a message to pass out
532
+ return Rdkafka::Consumer::Message.new(native_message) if native_message[:err].zero?
533
+
534
+ # Raise error if needed
535
+ Rdkafka::RdkafkaError.validate!(native_message)
537
536
  ensure
538
537
  # Clean up rdkafka message if there is one
539
538
  if message_ptr && !message_ptr.null?
@@ -573,9 +572,9 @@ module Rdkafka
573
572
  # If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
574
573
  # reached, so you probably want to disable that when using this method of iteration.
575
574
  #
576
- # @raise [RdkafkaError] When polling fails
577
575
  # @yieldparam message [Message] Received message
578
576
  # @return [nil]
577
+ # @raise [RdkafkaError] When polling fails
579
578
  def each
580
579
  loop do
581
580
  message = poll(250)
@@ -630,14 +629,15 @@ module Rdkafka
630
629
  # @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
631
630
  # @param timeout_ms [Integer] max time to wait for up to max_items
632
631
  #
633
- # @raise [RdkafkaError] When polling fails
634
- #
635
- # @yield [messages, pending_exception]
636
632
  # @yieldparam messages [Array] An array of received Message
637
633
  # @yieldparam pending_exception [Exception] normally nil, or an exception
634
+ #
635
+ # @yield [messages, pending_exception]
638
636
  # which will be propagated after processing of the partial batch is complete.
639
637
  #
640
638
  # @return [nil]
639
+ #
640
+ # @raise [RdkafkaError] When polling fails
641
641
  def each_batch(max_items: 100, bytes_threshold: Float::INFINITY, timeout_ms: 250, yield_on_error: false, &block)
642
642
  closed_consumer_check(__method__)
643
643
  slice = []
data/lib/rdkafka/error.rb CHANGED
@@ -42,8 +42,19 @@ module Rdkafka
42
42
  end
43
43
 
44
44
  def build(response_ptr_or_code, message_prefix = nil, broker_message: nil)
45
- if response_ptr_or_code.is_a?(Integer)
46
- response_ptr_or_code.zero? ? false : new(response_ptr_or_code, message_prefix, broker_message: broker_message)
45
+ case response_ptr_or_code
46
+ when Integer
47
+ return false if response_ptr_or_code.zero?
48
+
49
+ new(response_ptr_or_code, message_prefix, broker_message: broker_message)
50
+ when Bindings::Message
51
+ return false if response_ptr_or_code[:err].zero?
52
+
53
+ unless response_ptr_or_code[:payload].null?
54
+ message_prefix ||= response_ptr_or_code[:payload].read_string(response_ptr_or_code[:len])
55
+ end
56
+
57
+ new(response_ptr_or_code[:err], message_prefix, broker_message: broker_message)
47
58
  else
48
59
  build_from_c(response_ptr_or_code, message_prefix)
49
60
  end
@@ -200,9 +200,9 @@ module Rdkafka
200
200
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
201
201
  # @param headers [Hash<String,String>] Optional message headers
202
202
  #
203
- # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
204
- #
205
203
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
204
+ #
205
+ # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
206
206
  def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
207
207
  closed_producer_check(__method__)
208
208
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.0"
4
+ VERSION = "0.14.1"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -11,10 +11,19 @@ require "rdkafka/abstract_handle"
11
11
  require "rdkafka/admin"
12
12
  require "rdkafka/admin/create_topic_handle"
13
13
  require "rdkafka/admin/create_topic_report"
14
+ require "rdkafka/admin/delete_groups_handle"
15
+ require "rdkafka/admin/delete_groups_report"
14
16
  require "rdkafka/admin/delete_topic_handle"
15
17
  require "rdkafka/admin/delete_topic_report"
16
18
  require "rdkafka/admin/create_partitions_handle"
17
19
  require "rdkafka/admin/create_partitions_report"
20
+ require "rdkafka/admin/create_acl_handle"
21
+ require "rdkafka/admin/create_acl_report"
22
+ require "rdkafka/admin/delete_acl_handle"
23
+ require "rdkafka/admin/delete_acl_report"
24
+ require "rdkafka/admin/describe_acl_handle"
25
+ require "rdkafka/admin/describe_acl_report"
26
+ require "rdkafka/admin/acl_binding_result"
18
27
  require "rdkafka/bindings"
19
28
  require "rdkafka/callbacks"
20
29
  require "rdkafka/config"
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::CreateAclHandle do
6
+ # If create acl was successful there is no error object
7
+ # the error code is set to RD_KAFKA_RESP_ERR_NO_ERRORa
8
+ # https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L169
9
+ let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
10
+
11
+ subject do
12
+ Rdkafka::Admin::CreateAclHandle.new.tap do |handle|
13
+ handle[:pending] = pending_handle
14
+ handle[:response] = response
15
+ # If create acl was successful there is no error object and the error_string is set to ""
16
+ # https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L178
17
+ handle[:response_string] = FFI::MemoryPointer.from_string("")
18
+ end
19
+ end
20
+
21
+ describe "#wait" do
22
+ let(:pending_handle) { true }
23
+
24
+ it "should wait until the timeout and then raise an error" do
25
+ expect {
26
+ subject.wait(max_wait_timeout: 0.1)
27
+ }.to raise_error Rdkafka::Admin::CreateAclHandle::WaitTimeoutError, /create acl/
28
+ end
29
+
30
+ context "when not pending anymore and no error" do
31
+ let(:pending_handle) { false }
32
+
33
+ it "should return a create acl report" do
34
+ report = subject.wait
35
+
36
+ expect(report.rdkafka_response_string).to eq("")
37
+ end
38
+
39
+ it "should wait without a timeout" do
40
+ report = subject.wait(max_wait_timeout: nil)
41
+
42
+ expect(report.rdkafka_response_string).to eq("")
43
+ end
44
+ end
45
+ end
46
+
47
+ describe "#raise_error" do
48
+ let(:pending_handle) { false }
49
+
50
+ it "should raise the appropriate error" do
51
+ expect {
52
+ subject.raise_error
53
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::CreateAclReport do
6
+ subject { Rdkafka::Admin::CreateAclReport.new(
7
+ rdkafka_response: Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR,
8
+ rdkafka_response_string: FFI::MemoryPointer.from_string("")
9
+ )}
10
+
11
+ it "should get RD_KAFKA_RESP_ERR_NO_ERROR " do
12
+ expect(subject.rdkafka_response).to eq(0)
13
+ end
14
+
15
+ it "should get empty string" do
16
+ expect(subject.rdkafka_response_string).to eq("")
17
+ end
18
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DeleteAclHandle do
6
+ let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:delete_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if delete_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [delete_acl_ptr]
33
+ delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ delete_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DeleteAclHandle.new.tap do |handle|
36
+ handle[:pending] = pending_handle
37
+ handle[:response] = response
38
+ handle[:response_string] = FFI::MemoryPointer.from_string("")
39
+ handle[:matching_acls] = delete_acls_array_ptr
40
+ handle[:matching_acls_count] = 1
41
+ end
42
+ end
43
+
44
+ after do
45
+ if delete_acl_ptr != FFI::Pointer::NULL
46
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
47
+ end
48
+ end
49
+
50
+ describe "#wait" do
51
+ let(:pending_handle) { true }
52
+
53
+ it "should wait until the timeout and then raise an error" do
54
+ expect {
55
+ subject.wait(max_wait_timeout: 0.1)
56
+ }.to raise_error Rdkafka::Admin::DeleteAclHandle::WaitTimeoutError, /delete acl/
57
+ end
58
+
59
+ context "when not pending anymore and no error" do
60
+ let(:pending_handle) { false }
61
+
62
+ it "should return a delete acl report" do
63
+ report = subject.wait
64
+
65
+ expect(report.deleted_acls.length).to eq(1)
66
+ end
67
+
68
+ it "should wait without a timeout" do
69
+ report = subject.wait(max_wait_timeout: nil)
70
+
71
+ expect(report.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
72
+ end
73
+ end
74
+ end
75
+
76
+ describe "#raise_error" do
77
+ let(:pending_handle) { false }
78
+
79
+ it "should raise the appropriate error" do
80
+ expect {
81
+ subject.raise_error
82
+ }.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Admin::DeleteAclReport do
6
+
7
+ let(:resource_name) {"acl-test-topic"}
8
+ let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
9
+ let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
10
+ let(:principal) {"User:anonymous"}
11
+ let(:host) {"*"}
12
+ let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
13
+ let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
14
+ let(:delete_acl_ptr) {FFI::Pointer::NULL}
15
+
16
+ subject do
17
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
18
+ delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
19
+ resource_type,
20
+ FFI::MemoryPointer.from_string(resource_name),
21
+ resource_pattern_type,
22
+ FFI::MemoryPointer.from_string(principal),
23
+ FFI::MemoryPointer.from_string(host),
24
+ operation,
25
+ permission_type,
26
+ error_buffer,
27
+ 256
28
+ )
29
+ if delete_acl_ptr.null?
30
+ raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
31
+ end
32
+ pointer_array = [delete_acl_ptr]
33
+ delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
34
+ delete_acls_array_ptr.write_array_of_pointer(pointer_array)
35
+ Rdkafka::Admin::DeleteAclReport.new(matching_acls: delete_acls_array_ptr, matching_acls_count: 1)
36
+ end
37
+
38
+ after do
39
+ if delete_acl_ptr != FFI::Pointer::NULL
40
+ Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
41
+ end
42
+ end
43
+
44
+ it "should get deleted acl resource type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC" do
45
+ expect(subject.deleted_acls[0].matching_acl_resource_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC)
46
+ end
47
+
48
+ it "should get deleted acl resource name as acl-test-topic" do
49
+ expect(subject.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
50
+ end
51
+
52
+ it "should get deleted acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
53
+ expect(subject.deleted_acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
54
+ end
55
+
56
+ it "should get deleted acl principal as User:anonymous" do
57
+ expect(subject.deleted_acls[0].matching_acl_principal).to eq("User:anonymous")
58
+ end
59
+
60
+ it "should get deleted acl host as * " do
61
+ expect(subject.deleted_acls[0].matching_acl_host).to eq("*")
62
+ end
63
+
64
+ it "should get deleted acl operation as Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ" do
65
+ expect(subject.deleted_acls[0].matching_acl_operation).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ)
66
+ end
67
+
68
+ it "should get deleted acl permission_type as Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW" do
69
+ expect(subject.deleted_acls[0].matching_acl_permission_type).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW)
70
+ end
71
+ end