rdkafka 0.13.0 → 0.15.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci.yml +57 -0
- data/.gitignore +4 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +155 -111
- data/{LICENSE → MIT-LICENSE} +2 -1
- data/README.md +60 -39
- data/certs/cert_chain.pem +26 -0
- data/docker-compose.yml +18 -15
- data/ext/README.md +1 -1
- data/ext/Rakefile +43 -26
- data/lib/rdkafka/abstract_handle.rb +40 -26
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
- data/lib/rdkafka/admin.rb +449 -7
- data/lib/rdkafka/bindings.rb +133 -7
- data/lib/rdkafka/callbacks.rb +196 -1
- data/lib/rdkafka/config.rb +53 -19
- data/lib/rdkafka/consumer/headers.rb +2 -4
- data/lib/rdkafka/consumer/topic_partition_list.rb +11 -8
- data/lib/rdkafka/consumer.rb +164 -74
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +22 -1
- data/lib/rdkafka/native_kafka.rb +6 -1
- data/lib/rdkafka/producer/delivery_handle.rb +12 -1
- data/lib/rdkafka/producer/delivery_report.rb +16 -3
- data/lib/rdkafka/producer.rb +121 -13
- data/lib/rdkafka/version.rb +3 -3
- data/lib/rdkafka.rb +21 -1
- data/rdkafka.gemspec +19 -5
- data/renovate.json +6 -0
- data/spec/rdkafka/abstract_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +205 -2
- data/spec/rdkafka/bindings_spec.rb +0 -1
- data/spec/rdkafka/callbacks_spec.rb +0 -2
- data/spec/rdkafka/config_spec.rb +8 -2
- data/spec/rdkafka/consumer/headers_spec.rb +0 -2
- data/spec/rdkafka/consumer/message_spec.rb +0 -2
- data/spec/rdkafka/consumer/partition_spec.rb +0 -2
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +19 -2
- data/spec/rdkafka/consumer_spec.rb +232 -39
- data/spec/rdkafka/error_spec.rb +0 -2
- data/spec/rdkafka/metadata_spec.rb +2 -3
- data/spec/rdkafka/native_kafka_spec.rb +2 -3
- data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
- data/spec/rdkafka/producer/delivery_report_spec.rb +4 -2
- data/spec/rdkafka/producer_spec.rb +183 -3
- data/spec/spec_helper.rb +3 -1
- data.tar.gz.sig +0 -0
- metadata +78 -14
- metadata.gz.sig +0 -0
- data/.semaphore/semaphore.yml +0 -27
data/lib/rdkafka/producer.rb
CHANGED
@@ -1,10 +1,15 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "objspace"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
# A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
|
7
5
|
class Producer
|
6
|
+
include Helpers::Time
|
7
|
+
|
8
|
+
# Cache partitions count for 30 seconds
|
9
|
+
PARTITIONS_COUNT_TTL = 30
|
10
|
+
|
11
|
+
private_constant :PARTITIONS_COUNT_TTL
|
12
|
+
|
8
13
|
# @private
|
9
14
|
# Returns the current delivery callback, by default this is nil.
|
10
15
|
#
|
@@ -18,12 +23,41 @@ module Rdkafka
|
|
18
23
|
attr_reader :delivery_callback_arity
|
19
24
|
|
20
25
|
# @private
|
26
|
+
# @param native_kafka [NativeKafka]
|
27
|
+
# @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
|
28
|
+
# the "consistent_random" default
|
21
29
|
def initialize(native_kafka, partitioner_name)
|
22
30
|
@native_kafka = native_kafka
|
23
31
|
@partitioner_name = partitioner_name || "consistent_random"
|
24
32
|
|
25
33
|
# Makes sure, that native kafka gets closed before it gets GCed by Ruby
|
26
34
|
ObjectSpace.define_finalizer(self, native_kafka.finalizer)
|
35
|
+
|
36
|
+
@_partitions_count_cache = Hash.new do |cache, topic|
|
37
|
+
topic_metadata = nil
|
38
|
+
|
39
|
+
@native_kafka.with_inner do |inner|
|
40
|
+
topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
|
41
|
+
end
|
42
|
+
|
43
|
+
partition_count = topic_metadata ? topic_metadata[:partition_count] : -1
|
44
|
+
|
45
|
+
# This approach caches the failure to fetch only for 1 second. This will make sure, that
|
46
|
+
# we do not cache the failure for too long but also "buys" us a bit of time in case there
|
47
|
+
# would be issues in the cluster so we won't overaload it with consecutive requests
|
48
|
+
cache[topic] = if partition_count.positive?
|
49
|
+
[monotonic_now, partition_count]
|
50
|
+
else
|
51
|
+
[monotonic_now - PARTITIONS_COUNT_TTL + 5, partition_count]
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
# @return [String] producer name
|
57
|
+
def name
|
58
|
+
@name ||= @native_kafka.with_inner do |inner|
|
59
|
+
::Rdkafka::Bindings.rd_kafka_name(inner)
|
60
|
+
end
|
27
61
|
end
|
28
62
|
|
29
63
|
# Set a callback that will be called every time a message is successfully produced.
|
@@ -54,25 +88,78 @@ module Rdkafka
|
|
54
88
|
# in seconds. Call this before closing a producer to ensure delivery of all messages.
|
55
89
|
#
|
56
90
|
# @param timeout_ms [Integer] how long should we wait for flush of all messages
|
91
|
+
# @return [Boolean] true if no more data and all was flushed, false in case there are still
|
92
|
+
# outgoing messages after the timeout
|
93
|
+
#
|
94
|
+
# @note We raise an exception for other errors because based on the librdkafka docs, there
|
95
|
+
# should be no other errors.
|
96
|
+
#
|
97
|
+
# @note For `timed_out` we do not raise an error to keep it backwards compatible
|
57
98
|
def flush(timeout_ms=5_000)
|
58
99
|
closed_producer_check(__method__)
|
59
100
|
|
101
|
+
code = nil
|
102
|
+
|
103
|
+
@native_kafka.with_inner do |inner|
|
104
|
+
code = Rdkafka::Bindings.rd_kafka_flush(inner, timeout_ms)
|
105
|
+
end
|
106
|
+
|
107
|
+
# Early skip not to build the error message
|
108
|
+
return true if code.zero?
|
109
|
+
|
110
|
+
error = Rdkafka::RdkafkaError.new(code)
|
111
|
+
|
112
|
+
return false if error.code == :timed_out
|
113
|
+
|
114
|
+
raise(error)
|
115
|
+
end
|
116
|
+
|
117
|
+
# Purges the outgoing queue and releases all resources.
|
118
|
+
#
|
119
|
+
# Useful when closing the producer with outgoing messages to unstable clusters or when for
|
120
|
+
# any other reasons waiting cannot go on anymore. This purges both the queue and all the
|
121
|
+
# inflight requests + updates the delivery handles statuses so they can be materialized into
|
122
|
+
# `purge_queue` errors.
|
123
|
+
def purge
|
124
|
+
closed_producer_check(__method__)
|
125
|
+
|
126
|
+
code = nil
|
127
|
+
|
60
128
|
@native_kafka.with_inner do |inner|
|
61
|
-
|
129
|
+
code = Bindings.rd_kafka_purge(
|
130
|
+
inner,
|
131
|
+
Bindings::RD_KAFKA_PURGE_F_QUEUE | Bindings::RD_KAFKA_PURGE_F_INFLIGHT
|
132
|
+
)
|
62
133
|
end
|
134
|
+
|
135
|
+
code.zero? || raise(Rdkafka::RdkafkaError.new(code))
|
136
|
+
|
137
|
+
# Wait for the purge to affect everything
|
138
|
+
sleep(0.001) until flush(100)
|
139
|
+
|
140
|
+
true
|
63
141
|
end
|
64
142
|
|
65
143
|
# Partition count for a given topic.
|
66
|
-
# NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
|
67
144
|
#
|
68
145
|
# @param topic [String] The topic name.
|
146
|
+
# @return [Integer] partition count for a given topic or `-1` if it could not be obtained.
|
147
|
+
#
|
148
|
+
# @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
|
149
|
+
# auto-created after returning nil.
|
69
150
|
#
|
70
|
-
# @
|
151
|
+
# @note We cache the partition count for a given topic for given time.
|
152
|
+
# This prevents us in case someone uses `partition_key` from querying for the count with
|
153
|
+
# each message. Instead we query once every 30 seconds at most if we have a valid partition
|
154
|
+
# count or every 5 seconds in case we were not able to obtain number of partitions
|
71
155
|
def partition_count(topic)
|
72
156
|
closed_producer_check(__method__)
|
73
|
-
|
74
|
-
|
157
|
+
|
158
|
+
@_partitions_count_cache.delete_if do |_, cached|
|
159
|
+
monotonic_now - cached.first > PARTITIONS_COUNT_TTL
|
75
160
|
end
|
161
|
+
|
162
|
+
@_partitions_count_cache[topic].last
|
76
163
|
end
|
77
164
|
|
78
165
|
# Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
|
@@ -87,11 +174,12 @@ module Rdkafka
|
|
87
174
|
# @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
|
88
175
|
# @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
|
89
176
|
# @param headers [Hash<String,String>] Optional message headers
|
90
|
-
#
|
91
|
-
# @raise [RdkafkaError] When adding the message to rdkafka's queue failed
|
177
|
+
# @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
|
92
178
|
#
|
93
179
|
# @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
|
94
|
-
|
180
|
+
#
|
181
|
+
# @raise [RdkafkaError] When adding the message to rdkafka's queue failed
|
182
|
+
def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
|
95
183
|
closed_producer_check(__method__)
|
96
184
|
|
97
185
|
# Start by checking and converting the input
|
@@ -113,7 +201,7 @@ module Rdkafka
|
|
113
201
|
if partition_key
|
114
202
|
partition_count = partition_count(topic)
|
115
203
|
# If the topic is not present, set to -1
|
116
|
-
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
|
204
|
+
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
|
117
205
|
end
|
118
206
|
|
119
207
|
# If partition is nil, use -1 to let librdafka set the partition randomly or
|
@@ -133,6 +221,7 @@ module Rdkafka
|
|
133
221
|
end
|
134
222
|
|
135
223
|
delivery_handle = DeliveryHandle.new
|
224
|
+
delivery_handle.label = label
|
136
225
|
delivery_handle[:pending] = true
|
137
226
|
delivery_handle[:response] = -1
|
138
227
|
delivery_handle[:partition] = -1
|
@@ -179,13 +268,27 @@ module Rdkafka
|
|
179
268
|
delivery_handle
|
180
269
|
end
|
181
270
|
|
271
|
+
# Calls (if registered) the delivery callback
|
272
|
+
#
|
273
|
+
# @param delivery_report [Producer::DeliveryReport]
|
274
|
+
# @param delivery_handle [Producer::DeliveryHandle]
|
182
275
|
def call_delivery_callback(delivery_report, delivery_handle)
|
183
276
|
return unless @delivery_callback
|
184
277
|
|
185
|
-
|
186
|
-
|
278
|
+
case @delivery_callback_arity
|
279
|
+
when 0
|
280
|
+
@delivery_callback.call
|
281
|
+
when 1
|
282
|
+
@delivery_callback.call(delivery_report)
|
283
|
+
else
|
284
|
+
@delivery_callback.call(delivery_report, delivery_handle)
|
285
|
+
end
|
187
286
|
end
|
188
287
|
|
288
|
+
# Figures out the arity of a given block/method
|
289
|
+
#
|
290
|
+
# @param callback [#call, Proc]
|
291
|
+
# @return [Integer] arity of the provided block/method
|
189
292
|
def arity(callback)
|
190
293
|
return callback.arity if callback.respond_to?(:arity)
|
191
294
|
|
@@ -193,6 +296,11 @@ module Rdkafka
|
|
193
296
|
end
|
194
297
|
|
195
298
|
private
|
299
|
+
|
300
|
+
# Ensures, no operations can happen on a closed producer
|
301
|
+
#
|
302
|
+
# @param method [Symbol] name of the method that invoked producer
|
303
|
+
# @raise [Rdkafka::ClosedProducerError]
|
196
304
|
def closed_producer_check(method)
|
197
305
|
raise Rdkafka::ClosedProducerError.new(method) if closed?
|
198
306
|
end
|
data/lib/rdkafka/version.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module Rdkafka
|
4
|
-
VERSION = "0.
|
5
|
-
LIBRDKAFKA_VERSION = "2.0
|
6
|
-
LIBRDKAFKA_SOURCE_SHA256 = "
|
4
|
+
VERSION = "0.15.1"
|
5
|
+
LIBRDKAFKA_VERSION = "2.3.0"
|
6
|
+
LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
|
7
7
|
end
|
data/lib/rdkafka.rb
CHANGED
@@ -1,13 +1,29 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "
|
3
|
+
require "logger"
|
4
|
+
require "objspace"
|
5
|
+
require "ffi"
|
6
|
+
require "json"
|
4
7
|
|
8
|
+
require "rdkafka/version"
|
9
|
+
require "rdkafka/helpers/time"
|
5
10
|
require "rdkafka/abstract_handle"
|
6
11
|
require "rdkafka/admin"
|
7
12
|
require "rdkafka/admin/create_topic_handle"
|
8
13
|
require "rdkafka/admin/create_topic_report"
|
14
|
+
require "rdkafka/admin/delete_groups_handle"
|
15
|
+
require "rdkafka/admin/delete_groups_report"
|
9
16
|
require "rdkafka/admin/delete_topic_handle"
|
10
17
|
require "rdkafka/admin/delete_topic_report"
|
18
|
+
require "rdkafka/admin/create_partitions_handle"
|
19
|
+
require "rdkafka/admin/create_partitions_report"
|
20
|
+
require "rdkafka/admin/create_acl_handle"
|
21
|
+
require "rdkafka/admin/create_acl_report"
|
22
|
+
require "rdkafka/admin/delete_acl_handle"
|
23
|
+
require "rdkafka/admin/delete_acl_report"
|
24
|
+
require "rdkafka/admin/describe_acl_handle"
|
25
|
+
require "rdkafka/admin/describe_acl_report"
|
26
|
+
require "rdkafka/admin/acl_binding_result"
|
11
27
|
require "rdkafka/bindings"
|
12
28
|
require "rdkafka/callbacks"
|
13
29
|
require "rdkafka/config"
|
@@ -22,3 +38,7 @@ require "rdkafka/native_kafka"
|
|
22
38
|
require "rdkafka/producer"
|
23
39
|
require "rdkafka/producer/delivery_handle"
|
24
40
|
require "rdkafka/producer/delivery_report"
|
41
|
+
|
42
|
+
# Main Rdkafka namespace of this gem
|
43
|
+
module Rdkafka
|
44
|
+
end
|
data/rdkafka.gemspec
CHANGED
@@ -3,12 +3,11 @@
|
|
3
3
|
require File.expand_path('lib/rdkafka/version', __dir__)
|
4
4
|
|
5
5
|
Gem::Specification.new do |gem|
|
6
|
-
gem.authors = ['Thijs Cadier']
|
7
|
-
gem.email = ["
|
6
|
+
gem.authors = ['Thijs Cadier', 'Maciej Mensfeld']
|
7
|
+
gem.email = ["contact@karafka.io"]
|
8
8
|
gem.description = "Modern Kafka client library for Ruby based on librdkafka"
|
9
|
-
gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.
|
9
|
+
gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.7+."
|
10
10
|
gem.license = 'MIT'
|
11
|
-
gem.homepage = 'https://github.com/thijsc/rdkafka-ruby'
|
12
11
|
|
13
12
|
gem.files = `git ls-files`.split($\)
|
14
13
|
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
@@ -16,8 +15,13 @@ Gem::Specification.new do |gem|
|
|
16
15
|
gem.name = 'rdkafka'
|
17
16
|
gem.require_paths = ['lib']
|
18
17
|
gem.version = Rdkafka::VERSION
|
19
|
-
gem.required_ruby_version = '>= 2.
|
18
|
+
gem.required_ruby_version = '>= 2.7'
|
20
19
|
gem.extensions = %w(ext/Rakefile)
|
20
|
+
gem.cert_chain = %w[certs/cert_chain.pem]
|
21
|
+
|
22
|
+
if $PROGRAM_NAME.end_with?('gem')
|
23
|
+
gem.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
|
24
|
+
end
|
21
25
|
|
22
26
|
gem.add_dependency 'ffi', '~> 1.15'
|
23
27
|
gem.add_dependency 'mini_portile2', '~> 2.6'
|
@@ -29,4 +33,14 @@ Gem::Specification.new do |gem|
|
|
29
33
|
gem.add_development_dependency 'simplecov'
|
30
34
|
gem.add_development_dependency 'guard'
|
31
35
|
gem.add_development_dependency 'guard-rspec'
|
36
|
+
|
37
|
+
gem.metadata = {
|
38
|
+
'funding_uri' => 'https://karafka.io/#become-pro',
|
39
|
+
'homepage_uri' => 'https://karafka.io',
|
40
|
+
'changelog_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/CHANGELOG.md',
|
41
|
+
'bug_tracker_uri' => 'https://github.com/karafka/rdkafka-ruby/issues',
|
42
|
+
'source_code_uri' => 'https://github.com/karafka/rdkafka-ruby',
|
43
|
+
'documentation_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/README.md',
|
44
|
+
'rubygems_mfa_required' => 'true'
|
45
|
+
}
|
32
46
|
end
|
data/renovate.json
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
describe Rdkafka::Admin::CreateAclHandle do
|
6
|
+
# If create acl was successful there is no error object
|
7
|
+
# the error code is set to RD_KAFKA_RESP_ERR_NO_ERRORa
|
8
|
+
# https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L169
|
9
|
+
let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
|
10
|
+
|
11
|
+
subject do
|
12
|
+
Rdkafka::Admin::CreateAclHandle.new.tap do |handle|
|
13
|
+
handle[:pending] = pending_handle
|
14
|
+
handle[:response] = response
|
15
|
+
# If create acl was successful there is no error object and the error_string is set to ""
|
16
|
+
# https://github.com/confluentinc/librdkafka/blob/1f9f245ac409f50f724695c628c7a0d54a763b9a/src/rdkafka_error.c#L178
|
17
|
+
handle[:response_string] = FFI::MemoryPointer.from_string("")
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
describe "#wait" do
|
22
|
+
let(:pending_handle) { true }
|
23
|
+
|
24
|
+
it "should wait until the timeout and then raise an error" do
|
25
|
+
expect {
|
26
|
+
subject.wait(max_wait_timeout: 0.1)
|
27
|
+
}.to raise_error Rdkafka::Admin::CreateAclHandle::WaitTimeoutError, /create acl/
|
28
|
+
end
|
29
|
+
|
30
|
+
context "when not pending anymore and no error" do
|
31
|
+
let(:pending_handle) { false }
|
32
|
+
|
33
|
+
it "should return a create acl report" do
|
34
|
+
report = subject.wait
|
35
|
+
|
36
|
+
expect(report.rdkafka_response_string).to eq("")
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should wait without a timeout" do
|
40
|
+
report = subject.wait(max_wait_timeout: nil)
|
41
|
+
|
42
|
+
expect(report.rdkafka_response_string).to eq("")
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
describe "#raise_error" do
|
48
|
+
let(:pending_handle) { false }
|
49
|
+
|
50
|
+
it "should raise the appropriate error" do
|
51
|
+
expect {
|
52
|
+
subject.raise_error
|
53
|
+
}.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
describe Rdkafka::Admin::CreateAclReport do
|
6
|
+
subject { Rdkafka::Admin::CreateAclReport.new(
|
7
|
+
rdkafka_response: Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR,
|
8
|
+
rdkafka_response_string: FFI::MemoryPointer.from_string("")
|
9
|
+
)}
|
10
|
+
|
11
|
+
it "should get RD_KAFKA_RESP_ERR_NO_ERROR " do
|
12
|
+
expect(subject.rdkafka_response).to eq(0)
|
13
|
+
end
|
14
|
+
|
15
|
+
it "should get empty string" do
|
16
|
+
expect(subject.rdkafka_response_string).to eq("")
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
describe Rdkafka::Admin::DeleteAclHandle do
|
6
|
+
let(:response) { Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR }
|
7
|
+
let(:resource_name) {"acl-test-topic"}
|
8
|
+
let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
|
9
|
+
let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
|
10
|
+
let(:principal) {"User:anonymous"}
|
11
|
+
let(:host) {"*"}
|
12
|
+
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
13
|
+
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
14
|
+
let(:delete_acl_ptr) {FFI::Pointer::NULL}
|
15
|
+
|
16
|
+
subject do
|
17
|
+
error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
18
|
+
delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
|
19
|
+
resource_type,
|
20
|
+
FFI::MemoryPointer.from_string(resource_name),
|
21
|
+
resource_pattern_type,
|
22
|
+
FFI::MemoryPointer.from_string(principal),
|
23
|
+
FFI::MemoryPointer.from_string(host),
|
24
|
+
operation,
|
25
|
+
permission_type,
|
26
|
+
error_buffer,
|
27
|
+
256
|
28
|
+
)
|
29
|
+
if delete_acl_ptr.null?
|
30
|
+
raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
|
31
|
+
end
|
32
|
+
pointer_array = [delete_acl_ptr]
|
33
|
+
delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
|
34
|
+
delete_acls_array_ptr.write_array_of_pointer(pointer_array)
|
35
|
+
Rdkafka::Admin::DeleteAclHandle.new.tap do |handle|
|
36
|
+
handle[:pending] = pending_handle
|
37
|
+
handle[:response] = response
|
38
|
+
handle[:response_string] = FFI::MemoryPointer.from_string("")
|
39
|
+
handle[:matching_acls] = delete_acls_array_ptr
|
40
|
+
handle[:matching_acls_count] = 1
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
after do
|
45
|
+
if delete_acl_ptr != FFI::Pointer::NULL
|
46
|
+
Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
describe "#wait" do
|
51
|
+
let(:pending_handle) { true }
|
52
|
+
|
53
|
+
it "should wait until the timeout and then raise an error" do
|
54
|
+
expect {
|
55
|
+
subject.wait(max_wait_timeout: 0.1)
|
56
|
+
}.to raise_error Rdkafka::Admin::DeleteAclHandle::WaitTimeoutError, /delete acl/
|
57
|
+
end
|
58
|
+
|
59
|
+
context "when not pending anymore and no error" do
|
60
|
+
let(:pending_handle) { false }
|
61
|
+
|
62
|
+
it "should return a delete acl report" do
|
63
|
+
report = subject.wait
|
64
|
+
|
65
|
+
expect(report.deleted_acls.length).to eq(1)
|
66
|
+
end
|
67
|
+
|
68
|
+
it "should wait without a timeout" do
|
69
|
+
report = subject.wait(max_wait_timeout: nil)
|
70
|
+
|
71
|
+
expect(report.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
describe "#raise_error" do
|
77
|
+
let(:pending_handle) { false }
|
78
|
+
|
79
|
+
it "should raise the appropriate error" do
|
80
|
+
expect {
|
81
|
+
subject.raise_error
|
82
|
+
}.to raise_exception(Rdkafka::RdkafkaError, /Success \(no_error\)/)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
describe Rdkafka::Admin::DeleteAclReport do
|
6
|
+
|
7
|
+
let(:resource_name) {"acl-test-topic"}
|
8
|
+
let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
|
9
|
+
let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
|
10
|
+
let(:principal) {"User:anonymous"}
|
11
|
+
let(:host) {"*"}
|
12
|
+
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
13
|
+
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
14
|
+
let(:delete_acl_ptr) {FFI::Pointer::NULL}
|
15
|
+
|
16
|
+
subject do
|
17
|
+
error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
18
|
+
delete_acl_ptr = Rdkafka::Bindings.rd_kafka_AclBinding_new(
|
19
|
+
resource_type,
|
20
|
+
FFI::MemoryPointer.from_string(resource_name),
|
21
|
+
resource_pattern_type,
|
22
|
+
FFI::MemoryPointer.from_string(principal),
|
23
|
+
FFI::MemoryPointer.from_string(host),
|
24
|
+
operation,
|
25
|
+
permission_type,
|
26
|
+
error_buffer,
|
27
|
+
256
|
28
|
+
)
|
29
|
+
if delete_acl_ptr.null?
|
30
|
+
raise Rdkafka::Config::ConfigError.new(error_buffer.read_string)
|
31
|
+
end
|
32
|
+
pointer_array = [delete_acl_ptr]
|
33
|
+
delete_acls_array_ptr = FFI::MemoryPointer.new(:pointer)
|
34
|
+
delete_acls_array_ptr.write_array_of_pointer(pointer_array)
|
35
|
+
Rdkafka::Admin::DeleteAclReport.new(matching_acls: delete_acls_array_ptr, matching_acls_count: 1)
|
36
|
+
end
|
37
|
+
|
38
|
+
after do
|
39
|
+
if delete_acl_ptr != FFI::Pointer::NULL
|
40
|
+
Rdkafka::Bindings.rd_kafka_AclBinding_destroy(delete_acl_ptr)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should get deleted acl resource type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC" do
|
45
|
+
expect(subject.deleted_acls[0].matching_acl_resource_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC)
|
46
|
+
end
|
47
|
+
|
48
|
+
it "should get deleted acl resource name as acl-test-topic" do
|
49
|
+
expect(subject.deleted_acls[0].matching_acl_resource_name).to eq(resource_name)
|
50
|
+
end
|
51
|
+
|
52
|
+
it "should get deleted acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
|
53
|
+
expect(subject.deleted_acls[0].matching_acl_resource_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
|
54
|
+
expect(subject.deleted_acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
|
55
|
+
end
|
56
|
+
|
57
|
+
it "should get deleted acl principal as User:anonymous" do
|
58
|
+
expect(subject.deleted_acls[0].matching_acl_principal).to eq("User:anonymous")
|
59
|
+
end
|
60
|
+
|
61
|
+
it "should get deleted acl host as * " do
|
62
|
+
expect(subject.deleted_acls[0].matching_acl_host).to eq("*")
|
63
|
+
end
|
64
|
+
|
65
|
+
it "should get deleted acl operation as Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ" do
|
66
|
+
expect(subject.deleted_acls[0].matching_acl_operation).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ)
|
67
|
+
end
|
68
|
+
|
69
|
+
it "should get deleted acl permission_type as Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW" do
|
70
|
+
expect(subject.deleted_acls[0].matching_acl_permission_type).to eq(Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW)
|
71
|
+
end
|
72
|
+
end
|