rdkafka 0.12.0 → 0.15.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci.yml +57 -0
- data/.gitignore +4 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +155 -93
- data/Gemfile +2 -0
- data/{LICENSE → MIT-LICENSE} +2 -1
- data/README.md +76 -29
- data/Rakefile +2 -0
- data/certs/cert_chain.pem +26 -0
- data/docker-compose.yml +18 -15
- data/ext/README.md +1 -1
- data/ext/Rakefile +46 -27
- data/lib/rdkafka/abstract_handle.rb +41 -25
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/create_topic_report.rb +2 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
- data/lib/rdkafka/admin.rb +494 -35
- data/lib/rdkafka/bindings.rb +180 -41
- data/lib/rdkafka/callbacks.rb +202 -1
- data/lib/rdkafka/config.rb +62 -25
- data/lib/rdkafka/consumer/headers.rb +24 -9
- data/lib/rdkafka/consumer/message.rb +3 -1
- data/lib/rdkafka/consumer/partition.rb +2 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +13 -8
- data/lib/rdkafka/consumer.rb +243 -111
- data/lib/rdkafka/error.rb +15 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +25 -2
- data/lib/rdkafka/native_kafka.rb +120 -0
- data/lib/rdkafka/producer/delivery_handle.rb +16 -2
- data/lib/rdkafka/producer/delivery_report.rb +22 -2
- data/lib/rdkafka/producer.rb +151 -21
- data/lib/rdkafka/version.rb +5 -3
- data/lib/rdkafka.rb +24 -2
- data/rdkafka.gemspec +21 -5
- data/renovate.json +6 -0
- data/spec/rdkafka/abstract_handle_spec.rb +1 -1
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +1 -1
- data/spec/rdkafka/admin/create_topic_report_spec.rb +1 -1
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +1 -1
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +1 -1
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +209 -5
- data/spec/rdkafka/bindings_spec.rb +2 -1
- data/spec/rdkafka/callbacks_spec.rb +1 -1
- data/spec/rdkafka/config_spec.rb +24 -3
- data/spec/rdkafka/consumer/headers_spec.rb +60 -0
- data/spec/rdkafka/consumer/message_spec.rb +1 -1
- data/spec/rdkafka/consumer/partition_spec.rb +1 -1
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +20 -1
- data/spec/rdkafka/consumer_spec.rb +352 -61
- data/spec/rdkafka/error_spec.rb +1 -1
- data/spec/rdkafka/metadata_spec.rb +4 -3
- data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -35
- data/spec/rdkafka/producer/delivery_handle_spec.rb +4 -1
- data/spec/rdkafka/producer/delivery_report_spec.rb +11 -3
- data/spec/rdkafka/producer_spec.rb +234 -22
- data/spec/spec_helper.rb +20 -2
- data.tar.gz.sig +0 -0
- metadata +81 -17
- metadata.gz.sig +0 -0
- data/.semaphore/semaphore.yml +0 -23
- data/bin/console +0 -11
- data/lib/rdkafka/producer/client.rb +0 -47
data/ext/Rakefile
CHANGED
@@ -1,38 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require File.expand_path('../../lib/rdkafka/version', __FILE__)
|
2
|
-
require "mini_portile2"
|
3
4
|
require "fileutils"
|
4
5
|
require "open-uri"
|
5
6
|
|
6
7
|
task :default => :clean do
|
7
|
-
#
|
8
|
-
|
9
|
-
|
10
|
-
#
|
11
|
-
#
|
12
|
-
if
|
13
|
-
|
14
|
-
|
15
|
-
|
8
|
+
# For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
|
9
|
+
# managed by nix itself, so using the normal file paths doesn't work for nix users.
|
10
|
+
#
|
11
|
+
# Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
|
12
|
+
# accessible in the nix environment
|
13
|
+
if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
|
14
|
+
# Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
|
15
|
+
require "mini_portile2"
|
16
|
+
recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
|
17
|
+
|
18
|
+
# Use default homebrew openssl if we're on mac and the directory exists
|
19
|
+
# and each of flags is not empty
|
20
|
+
if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
|
21
|
+
ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
|
22
|
+
ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
|
23
|
+
end
|
16
24
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
25
|
+
recipe.files << {
|
26
|
+
:url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
|
27
|
+
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
28
|
+
}
|
29
|
+
recipe.configure_options = ["--host=#{recipe.host}"]
|
30
|
+
recipe.cook
|
31
|
+
# Move dynamic library we're interested in
|
32
|
+
if recipe.host.include?('darwin')
|
33
|
+
from_extension = '1.dylib'
|
34
|
+
to_extension = 'dylib'
|
35
|
+
else
|
36
|
+
from_extension = 'so.1'
|
37
|
+
to_extension = 'so'
|
38
|
+
end
|
39
|
+
lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
|
40
|
+
FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
|
41
|
+
# Cleanup files created by miniportile we don't need in the gem
|
42
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
43
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
27
44
|
else
|
28
|
-
|
29
|
-
|
45
|
+
# Otherwise, copy existing libraries to ./ext
|
46
|
+
if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
|
47
|
+
raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
|
48
|
+
end
|
49
|
+
files = [
|
50
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
|
51
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
|
52
|
+
]
|
53
|
+
files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
|
30
54
|
end
|
31
|
-
lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
|
32
|
-
FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
|
33
|
-
# Cleanup files created by miniportile we don't need in the gem
|
34
|
-
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
35
|
-
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
36
55
|
end
|
37
56
|
|
38
57
|
task :clean do
|
@@ -1,26 +1,37 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module Rdkafka
|
4
|
+
# This class serves as an abstract base class to represent handles within the Rdkafka module.
|
5
|
+
# As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle
|
6
|
+
# classes to inherit from, ensuring they adhere to a particular structure and behavior.
|
7
|
+
#
|
8
|
+
# Subclasses must define their own layout, and the layout must start with:
|
9
|
+
#
|
10
|
+
# layout :pending, :bool,
|
11
|
+
# :response, :int
|
4
12
|
class AbstractHandle < FFI::Struct
|
5
|
-
|
6
|
-
#
|
7
|
-
# layout :pending, :bool,
|
8
|
-
# :response, :int
|
13
|
+
include Helpers::Time
|
9
14
|
|
15
|
+
# Registry for registering all the handles.
|
10
16
|
REGISTRY = {}
|
11
17
|
|
12
|
-
|
13
|
-
|
14
|
-
|
18
|
+
class << self
|
19
|
+
# Adds handle to the register
|
20
|
+
#
|
21
|
+
# @param handle [AbstractHandle] any handle we want to register
|
22
|
+
def register(handle)
|
23
|
+
address = handle.to_ptr.address
|
24
|
+
REGISTRY[address] = handle
|
25
|
+
end
|
15
26
|
|
16
|
-
|
17
|
-
|
18
|
-
|
27
|
+
# Removes handle from the register based on the handle address
|
28
|
+
#
|
29
|
+
# @param address [Integer] address of the registered handle we want to remove
|
30
|
+
def remove(address)
|
31
|
+
REGISTRY.delete(address)
|
32
|
+
end
|
19
33
|
end
|
20
34
|
|
21
|
-
def self.remove(address)
|
22
|
-
REGISTRY.delete(address)
|
23
|
-
end
|
24
35
|
|
25
36
|
# Whether the handle is still pending.
|
26
37
|
#
|
@@ -30,29 +41,34 @@ module Rdkafka
|
|
30
41
|
end
|
31
42
|
|
32
43
|
# Wait for the operation to complete or raise an error if this takes longer than the timeout.
|
33
|
-
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
34
|
-
# In this case it is possible to call wait again.
|
44
|
+
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
45
|
+
# on the operation. In this case it is possible to call wait again.
|
35
46
|
#
|
36
|
-
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
37
|
-
#
|
47
|
+
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
48
|
+
# If this is nil it does not time out.
|
49
|
+
# @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the
|
50
|
+
# operation has completed
|
51
|
+
# @param raise_response_error [Boolean] should we raise error when waiting finishes
|
52
|
+
#
|
53
|
+
# @return [Object] Operation-specific result
|
38
54
|
#
|
39
55
|
# @raise [RdkafkaError] When the operation failed
|
40
56
|
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
41
|
-
|
42
|
-
# @return [Object] Operation-specific result
|
43
|
-
def wait(max_wait_timeout: 60, wait_timeout: 0.1)
|
57
|
+
def wait(max_wait_timeout: 60, wait_timeout: 0.1, raise_response_error: true)
|
44
58
|
timeout = if max_wait_timeout
|
45
|
-
|
59
|
+
monotonic_now + max_wait_timeout
|
46
60
|
else
|
47
61
|
nil
|
48
62
|
end
|
49
63
|
loop do
|
50
64
|
if pending?
|
51
|
-
if timeout && timeout <=
|
52
|
-
raise WaitTimeoutError.new(
|
65
|
+
if timeout && timeout <= monotonic_now
|
66
|
+
raise WaitTimeoutError.new(
|
67
|
+
"Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
|
68
|
+
)
|
53
69
|
end
|
54
70
|
sleep wait_timeout
|
55
|
-
elsif self[:response] != 0
|
71
|
+
elsif self[:response] != 0 && raise_response_error
|
56
72
|
raise_error
|
57
73
|
else
|
58
74
|
return create_result
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
# Extracts attributes of rd_kafka_AclBinding_t
|
6
|
+
#
|
7
|
+
class AclBindingResult
|
8
|
+
attr_reader :result_error, :error_string, :matching_acl_resource_type,
|
9
|
+
:matching_acl_resource_name, :matching_acl_resource_pattern_type,
|
10
|
+
:matching_acl_principal, :matching_acl_host, :matching_acl_operation,
|
11
|
+
:matching_acl_permission_type
|
12
|
+
|
13
|
+
# This attribute was initially released under the name that is now an alias
|
14
|
+
# We keep it for backwards compatibility but it was changed for the consistency
|
15
|
+
alias matching_acl_pattern_type matching_acl_resource_pattern_type
|
16
|
+
|
17
|
+
def initialize(matching_acl)
|
18
|
+
rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
|
19
|
+
@result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
|
20
|
+
error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
|
21
|
+
|
22
|
+
if error_string != FFI::Pointer::NULL
|
23
|
+
@error_string = error_string.read_string
|
24
|
+
end
|
25
|
+
|
26
|
+
@matching_acl_resource_type = Rdkafka::Bindings.rd_kafka_AclBinding_restype(matching_acl)
|
27
|
+
matching_acl_resource_name = Rdkafka::Bindings.rd_kafka_AclBinding_name(matching_acl)
|
28
|
+
|
29
|
+
if matching_acl_resource_name != FFI::Pointer::NULL
|
30
|
+
@matching_acl_resource_name = matching_acl_resource_name.read_string
|
31
|
+
end
|
32
|
+
|
33
|
+
@matching_acl_resource_pattern_type = Rdkafka::Bindings.rd_kafka_AclBinding_resource_pattern_type(matching_acl)
|
34
|
+
matching_acl_principal = Rdkafka::Bindings.rd_kafka_AclBinding_principal(matching_acl)
|
35
|
+
|
36
|
+
if matching_acl_principal != FFI::Pointer::NULL
|
37
|
+
@matching_acl_principal = matching_acl_principal.read_string
|
38
|
+
end
|
39
|
+
|
40
|
+
matching_acl_host = Rdkafka::Bindings.rd_kafka_AclBinding_host(matching_acl)
|
41
|
+
|
42
|
+
if matching_acl_host != FFI::Pointer::NULL
|
43
|
+
@matching_acl_host = matching_acl_host.read_string
|
44
|
+
end
|
45
|
+
|
46
|
+
@matching_acl_operation = Rdkafka::Bindings.rd_kafka_AclBinding_operation(matching_acl)
|
47
|
+
@matching_acl_permission_type = Rdkafka::Bindings.rd_kafka_AclBinding_permission_type(matching_acl)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class CreateAclHandle < AbstractHandle
|
6
|
+
layout :pending, :bool,
|
7
|
+
:response, :int,
|
8
|
+
:response_string, :pointer
|
9
|
+
|
10
|
+
# @return [String] the name of the operation
|
11
|
+
def operation_name
|
12
|
+
"create acl"
|
13
|
+
end
|
14
|
+
|
15
|
+
# @return [CreateAclReport] instance with rdkafka_response value as 0 and rdkafka_response_string value as empty string if the acl creation was successful
|
16
|
+
def create_result
|
17
|
+
CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
|
18
|
+
end
|
19
|
+
|
20
|
+
def raise_error
|
21
|
+
raise RdkafkaError.new(
|
22
|
+
self[:response],
|
23
|
+
broker_message: self[:response_string].read_string
|
24
|
+
)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class CreateAclReport
|
6
|
+
|
7
|
+
# Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
|
8
|
+
# @return [Integer]
|
9
|
+
attr_reader :rdkafka_response
|
10
|
+
|
11
|
+
|
12
|
+
# Upon successful creation of Acl empty string will be returned as rdkafka_response_string
|
13
|
+
# @return [String]
|
14
|
+
attr_reader :rdkafka_response_string
|
15
|
+
|
16
|
+
def initialize(rdkafka_response:, rdkafka_response_string:)
|
17
|
+
@rdkafka_response = rdkafka_response
|
18
|
+
if rdkafka_response_string != FFI::Pointer::NULL
|
19
|
+
@rdkafka_response_string = rdkafka_response_string.read_string
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
class Admin
|
3
|
+
class CreatePartitionsHandle < AbstractHandle
|
4
|
+
layout :pending, :bool,
|
5
|
+
:response, :int,
|
6
|
+
:error_string, :pointer,
|
7
|
+
:result_name, :pointer
|
8
|
+
|
9
|
+
# @return [String] the name of the operation
|
10
|
+
def operation_name
|
11
|
+
"create partitions"
|
12
|
+
end
|
13
|
+
|
14
|
+
# @return [Boolean] whether the create topic was successful
|
15
|
+
def create_result
|
16
|
+
CreatePartitionsReport.new(self[:error_string], self[:result_name])
|
17
|
+
end
|
18
|
+
|
19
|
+
def raise_error
|
20
|
+
raise RdkafkaError.new(
|
21
|
+
self[:response],
|
22
|
+
broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DeleteAclHandle < AbstractHandle
|
6
|
+
layout :pending, :bool,
|
7
|
+
:response, :int,
|
8
|
+
:response_string, :pointer,
|
9
|
+
:matching_acls, :pointer,
|
10
|
+
:matching_acls_count, :int
|
11
|
+
|
12
|
+
# @return [String] the name of the operation
|
13
|
+
def operation_name
|
14
|
+
"delete acl"
|
15
|
+
end
|
16
|
+
|
17
|
+
# @return [DeleteAclReport] instance with an array of matching_acls
|
18
|
+
def create_result
|
19
|
+
DeleteAclReport.new(matching_acls: self[:matching_acls], matching_acls_count: self[:matching_acls_count])
|
20
|
+
end
|
21
|
+
|
22
|
+
def raise_error
|
23
|
+
raise RdkafkaError.new(
|
24
|
+
self[:response],
|
25
|
+
broker_message: self[:response_string].read_string
|
26
|
+
)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DeleteAclReport
|
6
|
+
|
7
|
+
# deleted acls
|
8
|
+
# @return [Rdkafka::Bindings::AclBindingResult]
|
9
|
+
attr_reader :deleted_acls
|
10
|
+
|
11
|
+
def initialize(matching_acls:, matching_acls_count:)
|
12
|
+
@deleted_acls=[]
|
13
|
+
if matching_acls != FFI::Pointer::NULL
|
14
|
+
acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
|
15
|
+
(1..matching_acls_count).map do |matching_acl_index|
|
16
|
+
acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[matching_acl_index - 1])
|
17
|
+
@deleted_acls << acl_binding_result
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DeleteGroupsHandle < AbstractHandle
|
6
|
+
layout :pending, :bool, # TODO: ???
|
7
|
+
:response, :int,
|
8
|
+
:error_string, :pointer,
|
9
|
+
:result_name, :pointer
|
10
|
+
|
11
|
+
# @return [String] the name of the operation
|
12
|
+
def operation_name
|
13
|
+
"delete groups"
|
14
|
+
end
|
15
|
+
|
16
|
+
def create_result
|
17
|
+
DeleteGroupsReport.new(self[:error_string], self[:result_name])
|
18
|
+
end
|
19
|
+
|
20
|
+
def raise_error
|
21
|
+
raise RdkafkaError.new(
|
22
|
+
self[:response],
|
23
|
+
broker_message: create_result.error_string
|
24
|
+
)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DeleteGroupsReport
|
6
|
+
# Any error message generated from the DeleteTopic
|
7
|
+
# @return [String]
|
8
|
+
attr_reader :error_string
|
9
|
+
|
10
|
+
# The name of the topic deleted
|
11
|
+
# @return [String]
|
12
|
+
attr_reader :result_name
|
13
|
+
|
14
|
+
def initialize(error_string, result_name)
|
15
|
+
if error_string != FFI::Pointer::NULL
|
16
|
+
@error_string = error_string.read_string
|
17
|
+
end
|
18
|
+
if result_name != FFI::Pointer::NULL
|
19
|
+
@result_name = @result_name = result_name.read_string
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DescribeAclHandle < AbstractHandle
|
6
|
+
layout :pending, :bool,
|
7
|
+
:response, :int,
|
8
|
+
:response_string, :pointer,
|
9
|
+
:acls, :pointer,
|
10
|
+
:acls_count, :int
|
11
|
+
|
12
|
+
# @return [String] the name of the operation.
|
13
|
+
def operation_name
|
14
|
+
"describe acl"
|
15
|
+
end
|
16
|
+
|
17
|
+
# @return [DescribeAclReport] instance with an array of acls that matches the request filters.
|
18
|
+
def create_result
|
19
|
+
DescribeAclReport.new(acls: self[:acls], acls_count: self[:acls_count])
|
20
|
+
end
|
21
|
+
|
22
|
+
def raise_error
|
23
|
+
raise RdkafkaError.new(
|
24
|
+
self[:response],
|
25
|
+
broker_message: self[:response_string].read_string
|
26
|
+
)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
class Admin
|
5
|
+
class DescribeAclReport
|
6
|
+
|
7
|
+
# acls that exists in the cluster for the resource_type, resource_name and pattern_type filters provided in the request.
|
8
|
+
# @return [Rdkafka::Bindings::AclBindingResult] array of matching acls.
|
9
|
+
attr_reader :acls
|
10
|
+
|
11
|
+
def initialize(acls:, acls_count:)
|
12
|
+
@acls=[]
|
13
|
+
if acls != FFI::Pointer::NULL
|
14
|
+
acl_binding_result_pointers = acls.read_array_of_pointer(acls_count)
|
15
|
+
(1..acls_count).map do |acl_index|
|
16
|
+
acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[acl_index - 1])
|
17
|
+
@acls << acl_binding_result
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|