karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
  5. data/.github/workflows/ci_macos_arm64.yml +301 -0
  6. data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
  7. data/.github/workflows/push_ruby.yml +37 -0
  8. data/.github/workflows/verify-action-pins.yml +16 -0
  9. data/.gitignore +15 -0
  10. data/.rspec +2 -0
  11. data/.ruby-gemset +1 -0
  12. data/.ruby-version +1 -0
  13. data/.yardopts +2 -0
  14. data/CHANGELOG.md +323 -0
  15. data/Gemfile +5 -0
  16. data/MIT-LICENSE +22 -0
  17. data/README.md +177 -0
  18. data/Rakefile +96 -0
  19. data/docker-compose.yml +25 -0
  20. data/ext/README.md +19 -0
  21. data/ext/Rakefile +131 -0
  22. data/ext/build_common.sh +361 -0
  23. data/ext/build_linux_x86_64_gnu.sh +306 -0
  24. data/ext/build_macos_arm64.sh +550 -0
  25. data/ext/librdkafka.so +0 -0
  26. data/karafka-rdkafka.gemspec +61 -0
  27. data/lib/rdkafka/abstract_handle.rb +116 -0
  28. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  29. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  30. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  31. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  32. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  33. data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
  34. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  35. data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
  36. data/lib/rdkafka/admin/create_topic_report.rb +24 -0
  37. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  38. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  39. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  40. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  41. data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
  42. data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
  43. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  44. data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
  45. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  46. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  47. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  48. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  49. data/lib/rdkafka/admin.rb +832 -0
  50. data/lib/rdkafka/bindings.rb +582 -0
  51. data/lib/rdkafka/callbacks.rb +415 -0
  52. data/lib/rdkafka/config.rb +398 -0
  53. data/lib/rdkafka/consumer/headers.rb +79 -0
  54. data/lib/rdkafka/consumer/message.rb +86 -0
  55. data/lib/rdkafka/consumer/partition.rb +57 -0
  56. data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
  57. data/lib/rdkafka/consumer.rb +663 -0
  58. data/lib/rdkafka/error.rb +201 -0
  59. data/lib/rdkafka/helpers/oauth.rb +58 -0
  60. data/lib/rdkafka/helpers/time.rb +14 -0
  61. data/lib/rdkafka/metadata.rb +115 -0
  62. data/lib/rdkafka/native_kafka.rb +139 -0
  63. data/lib/rdkafka/producer/delivery_handle.rb +48 -0
  64. data/lib/rdkafka/producer/delivery_report.rb +45 -0
  65. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  66. data/lib/rdkafka/producer.rb +492 -0
  67. data/lib/rdkafka/version.rb +7 -0
  68. data/lib/rdkafka.rb +54 -0
  69. data/renovate.json +92 -0
  70. data/spec/rdkafka/abstract_handle_spec.rb +117 -0
  71. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  72. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  73. data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
  74. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  75. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  76. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  77. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
  78. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  79. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  80. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  81. data/spec/rdkafka/admin_spec.rb +769 -0
  82. data/spec/rdkafka/bindings_spec.rb +222 -0
  83. data/spec/rdkafka/callbacks_spec.rb +20 -0
  84. data/spec/rdkafka/config_spec.rb +258 -0
  85. data/spec/rdkafka/consumer/headers_spec.rb +73 -0
  86. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  87. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  88. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
  89. data/spec/rdkafka/consumer_spec.rb +1299 -0
  90. data/spec/rdkafka/error_spec.rb +95 -0
  91. data/spec/rdkafka/metadata_spec.rb +79 -0
  92. data/spec/rdkafka/native_kafka_spec.rb +130 -0
  93. data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
  94. data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
  95. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  96. data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
  97. data/spec/rdkafka/producer_spec.rb +1234 -0
  98. data/spec/spec_helper.rb +181 -0
  99. metadata +244 -0
@@ -0,0 +1,116 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # This class serves as an abstract base class to represent handles within the Rdkafka module.
5
+ # As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle
6
+ # classes to inherit from, ensuring they adhere to a particular structure and behavior.
7
+ #
8
+ # Subclasses must define their own layout, and the layout must start with:
9
+ #
10
+ # layout :pending, :bool,
11
+ # :response, :int
12
+ class AbstractHandle < FFI::Struct
13
+ include Helpers::Time
14
+
15
+ # Registry for registering all the handles.
16
+ REGISTRY = {}
17
+ # Default wait timeout is 31 years
18
+ MAX_WAIT_TIMEOUT_FOREVER = 10_000_000_000
19
+
20
+ private_constant :MAX_WAIT_TIMEOUT_FOREVER
21
+
22
+ class << self
23
+ # Adds handle to the register
24
+ #
25
+ # @param handle [AbstractHandle] any handle we want to register
26
+ def register(handle)
27
+ address = handle.to_ptr.address
28
+ REGISTRY[address] = handle
29
+ end
30
+
31
+ # Removes handle from the register based on the handle address
32
+ #
33
+ # @param address [Integer] address of the registered handle we want to remove
34
+ def remove(address)
35
+ REGISTRY.delete(address)
36
+ end
37
+ end
38
+
39
+ def initialize
40
+ @mutex = Thread::Mutex.new
41
+ @resource = Thread::ConditionVariable.new
42
+
43
+ super
44
+ end
45
+
46
+ # Whether the handle is still pending.
47
+ #
48
+ # @return [Boolean]
49
+ def pending?
50
+ self[:pending]
51
+ end
52
+
53
+ # Wait for the operation to complete or raise an error if this takes longer than the timeout.
54
+ # If there is a timeout this does not mean the operation failed, rdkafka might still be working
55
+ # on the operation. In this case it is possible to call wait again.
56
+ #
57
+ # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
58
+ # If this is nil we will wait forever
59
+ # @param raise_response_error [Boolean] should we raise error when waiting finishes
60
+ #
61
+ # @return [Object] Operation-specific result
62
+ #
63
+ # @raise [RdkafkaError] When the operation failed
64
+ # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
65
+ def wait(max_wait_timeout: 60, raise_response_error: true)
66
+ timeout = max_wait_timeout ? monotonic_now + max_wait_timeout : MAX_WAIT_TIMEOUT_FOREVER
67
+
68
+ @mutex.synchronize do
69
+ loop do
70
+ if pending?
71
+ to_wait = (timeout - monotonic_now)
72
+
73
+ if to_wait.positive?
74
+ @resource.wait(@mutex, to_wait)
75
+ else
76
+ raise WaitTimeoutError.new(
77
+ "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
78
+ )
79
+ end
80
+ elsif self[:response] != 0 && raise_response_error
81
+ raise_error
82
+ else
83
+ return create_result
84
+ end
85
+ end
86
+ end
87
+ end
88
+
89
+ # Unlock the resources
90
+ def unlock
91
+ @mutex.synchronize do
92
+ self[:pending] = false
93
+ @resource.broadcast
94
+ end
95
+ end
96
+
97
+ # @return [String] the name of the operation (e.g. "delivery")
98
+ def operation_name
99
+ raise "Must be implemented by subclass!"
100
+ end
101
+
102
+ # @return [Object] operation-specific result
103
+ def create_result
104
+ raise "Must be implemented by subclass!"
105
+ end
106
+
107
+ # Allow subclasses to override
108
+ def raise_error
109
+ RdkafkaError.validate!(self[:response])
110
+ end
111
+
112
+ # Error that is raised when waiting for the handle to complete
113
+ # takes longer than the specified timeout.
114
+ class WaitTimeoutError < RuntimeError; end
115
+ end
116
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ # Extracts attributes of rd_kafka_AclBinding_t
6
+ #
7
+ class AclBindingResult
8
+ attr_reader :result_error, :error_string, :matching_acl_resource_type,
9
+ :matching_acl_resource_name, :matching_acl_resource_pattern_type,
10
+ :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
11
+ :matching_acl_permission_type
12
+
13
+ # This attribute was initially released under the name that is now an alias
14
+ # We keep it for backwards compatibility but it was changed for the consistency
15
+ alias matching_acl_pattern_type matching_acl_resource_pattern_type
16
+
17
+ def initialize(matching_acl)
18
+ rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
19
+ @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
20
+ error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
21
+
22
+ if error_string != FFI::Pointer::NULL
23
+ @error_string = error_string.read_string
24
+ end
25
+
26
+ @matching_acl_resource_type = Rdkafka::Bindings.rd_kafka_AclBinding_restype(matching_acl)
27
+ matching_acl_resource_name = Rdkafka::Bindings.rd_kafka_AclBinding_name(matching_acl)
28
+
29
+ if matching_acl_resource_name != FFI::Pointer::NULL
30
+ @matching_acl_resource_name = matching_acl_resource_name.read_string
31
+ end
32
+
33
+ @matching_acl_resource_pattern_type = Rdkafka::Bindings.rd_kafka_AclBinding_resource_pattern_type(matching_acl)
34
+ matching_acl_principal = Rdkafka::Bindings.rd_kafka_AclBinding_principal(matching_acl)
35
+
36
+ if matching_acl_principal != FFI::Pointer::NULL
37
+ @matching_acl_principal = matching_acl_principal.read_string
38
+ end
39
+
40
+ matching_acl_host = Rdkafka::Bindings.rd_kafka_AclBinding_host(matching_acl)
41
+
42
+ if matching_acl_host != FFI::Pointer::NULL
43
+ @matching_acl_host = matching_acl_host.read_string
44
+ end
45
+
46
+ @matching_acl_operation = Rdkafka::Bindings.rd_kafka_AclBinding_operation(matching_acl)
47
+ @matching_acl_permission_type = Rdkafka::Bindings.rd_kafka_AclBinding_permission_type(matching_acl)
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ # A single config binding result that represents its values extracted from C
6
+ class ConfigBindingResult
7
+ attr_reader :name, :value, :read_only, :default, :sensitive, :synonym, :synonyms
8
+
9
+ # @param config_ptr [FFI::Pointer] config pointer
10
+ def initialize(config_ptr)
11
+ @name = Bindings.rd_kafka_ConfigEntry_name(config_ptr)
12
+ @value = Bindings.rd_kafka_ConfigEntry_value(config_ptr)
13
+ @read_only = Bindings.rd_kafka_ConfigEntry_is_read_only(config_ptr)
14
+ @default = Bindings.rd_kafka_ConfigEntry_is_default(config_ptr)
15
+ @sensitive = Bindings.rd_kafka_ConfigEntry_is_sensitive(config_ptr)
16
+ @synonym = Bindings.rd_kafka_ConfigEntry_is_synonym(config_ptr)
17
+ @synonyms = []
18
+
19
+ # The code below builds up the config synonyms using same config binding
20
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
21
+ synonym_ptr = Bindings.rd_kafka_ConfigEntry_synonyms(config_ptr, pointer_to_size_t)
22
+ synonyms_ptr = synonym_ptr.read_array_of_pointer(pointer_to_size_t.read_int)
23
+
24
+ (1..pointer_to_size_t.read_int).map do |ar|
25
+ @synonyms << self.class.new(synonyms_ptr[ar - 1])
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ # A simple binding that represents the requested config resource
6
+ class ConfigResourceBindingResult
7
+ attr_reader :name, :type, :configs, :configs_count
8
+
9
+ def initialize(config_resource_ptr)
10
+ ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
11
+
12
+ @name = ffi_binding[:name]
13
+ @type = ffi_binding[:type]
14
+ @configs = []
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class CreateAclHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer
9
+
10
+ # @return [String] the name of the operation
11
+ def operation_name
12
+ "create acl"
13
+ end
14
+
15
+ # @return [CreateAclReport] instance with rdkafka_response value as 0 and rdkafka_response_string value as empty string if the acl creation was successful
16
+ def create_result
17
+ CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
18
+ end
19
+
20
+ def raise_error
21
+ raise RdkafkaError.new(
22
+ self[:response],
23
+ broker_message: self[:response_string].read_string
24
+ )
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class CreateAclReport
6
+
7
+ # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
8
+ # @return [Integer]
9
+ attr_reader :rdkafka_response
10
+
11
+
12
+ # Upon successful creation of Acl empty string will be returned as rdkafka_response_string
13
+ # @return [String]
14
+ attr_reader :rdkafka_response_string
15
+
16
+ def initialize(rdkafka_response:, rdkafka_response_string:)
17
+ @rdkafka_response = rdkafka_response
18
+ if rdkafka_response_string != FFI::Pointer::NULL
19
+ @rdkafka_response_string = rdkafka_response_string.read_string
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,30 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreatePartitionsHandle < AbstractHandle
4
+ layout :pending, :bool,
5
+ :response, :int,
6
+ :error_string, :pointer,
7
+ :result_name, :pointer
8
+
9
+ # @return [String] the name of the operation
10
+ def operation_name
11
+ "create partitions"
12
+ end
13
+
14
+ # @return [Boolean] whether the create topic was successful
15
+ def create_result
16
+ CreatePartitionsReport.new(self[:error_string], self[:result_name])
17
+ end
18
+
19
+ def raise_error
20
+ RdkafkaError.validate!(
21
+ self[:response],
22
+ broker_message: CreatePartitionsReport.new(
23
+ self[:error_string],
24
+ self[:result_name]
25
+ ).error_string
26
+ )
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,6 @@
1
+ module Rdkafka
2
+ class Admin
3
+ class CreatePartitionsReport < CreateTopicReport
4
+ end
5
+ end
6
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class CreateTopicHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :error_string, :pointer,
9
+ :result_name, :pointer
10
+
11
+ # @return [String] the name of the operation
12
+ def operation_name
13
+ "create topic"
14
+ end
15
+
16
+ # @return [Boolean] whether the create topic was successful
17
+ def create_result
18
+ CreateTopicReport.new(self[:error_string], self[:result_name])
19
+ end
20
+
21
+ def raise_error
22
+ RdkafkaError.validate!(
23
+ self[:response],
24
+ broker_message: CreateTopicReport.new(
25
+ self[:error_string],
26
+ self[:result_name]
27
+ ).error_string
28
+ )
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class CreateTopicReport
6
+ # Any error message generated from the CreateTopic
7
+ # @return [String]
8
+ attr_reader :error_string
9
+
10
+ # The name of the topic created
11
+ # @return [String]
12
+ attr_reader :result_name
13
+
14
+ def initialize(error_string, result_name)
15
+ if error_string != FFI::Pointer::NULL
16
+ @error_string = error_string.read_string
17
+ end
18
+ if result_name != FFI::Pointer::NULL
19
+ @result_name = result_name.read_string
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteAclHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer,
9
+ :matching_acls, :pointer,
10
+ :matching_acls_count, :int
11
+
12
+ # @return [String] the name of the operation
13
+ def operation_name
14
+ "delete acl"
15
+ end
16
+
17
+ # @return [DeleteAclReport] instance with an array of matching_acls
18
+ def create_result
19
+ DeleteAclReport.new(matching_acls: self[:matching_acls], matching_acls_count: self[:matching_acls_count])
20
+ end
21
+
22
+ def raise_error
23
+ raise RdkafkaError.new(
24
+ self[:response],
25
+ broker_message: self[:response_string].read_string
26
+ )
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteAclReport
6
+
7
+ # deleted acls
8
+ # @return [Rdkafka::Bindings::AclBindingResult]
9
+ attr_reader :deleted_acls
10
+
11
+ def initialize(matching_acls:, matching_acls_count:)
12
+ @deleted_acls=[]
13
+ if matching_acls != FFI::Pointer::NULL
14
+ acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
15
+ (1..matching_acls_count).map do |matching_acl_index|
16
+ acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[matching_acl_index - 1])
17
+ @deleted_acls << acl_binding_result
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteGroupsHandle < AbstractHandle
6
+ layout :pending, :bool, # TODO: ???
7
+ :response, :int,
8
+ :error_string, :pointer,
9
+ :result_name, :pointer
10
+
11
+ # @return [String] the name of the operation
12
+ def operation_name
13
+ "delete groups"
14
+ end
15
+
16
+ def create_result
17
+ DeleteGroupsReport.new(self[:error_string], self[:result_name])
18
+ end
19
+
20
+ def raise_error
21
+ raise RdkafkaError.new(
22
+ self[:response],
23
+ broker_message: create_result.error_string
24
+ )
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteGroupsReport
6
+ # Any error message generated from the DeleteTopic
7
+ # @return [String]
8
+ attr_reader :error_string
9
+
10
+ # The name of the topic deleted
11
+ # @return [String]
12
+ attr_reader :result_name
13
+
14
+ def initialize(error_string, result_name)
15
+ if error_string != FFI::Pointer::NULL
16
+ @error_string = error_string.read_string
17
+ end
18
+ if result_name != FFI::Pointer::NULL
19
+ @result_name = result_name.read_string
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteTopicHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :error_string, :pointer,
9
+ :result_name, :pointer
10
+
11
+ # @return [String] the name of the operation
12
+ def operation_name
13
+ "delete topic"
14
+ end
15
+
16
+ # @return [Boolean] whether the delete topic was successful
17
+ def create_result
18
+ DeleteTopicReport.new(self[:error_string], self[:result_name])
19
+ end
20
+
21
+ def raise_error
22
+ RdkafkaError.validate!(
23
+ self[:response],
24
+ broker_message: DeleteTopicReport.new(
25
+ self[:error_string],
26
+ self[:result_name]
27
+ ).error_string
28
+ )
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DeleteTopicReport
6
+ # Any error message generated from the DeleteTopic
7
+ # @return [String]
8
+ attr_reader :error_string
9
+
10
+ # The name of the topic deleted
11
+ # @return [String]
12
+ attr_reader :result_name
13
+
14
+ def initialize(error_string, result_name)
15
+ if error_string != FFI::Pointer::NULL
16
+ @error_string = error_string.read_string
17
+ end
18
+ if result_name != FFI::Pointer::NULL
19
+ @result_name = result_name.read_string
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeAclHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer,
9
+ :acls, :pointer,
10
+ :acls_count, :int
11
+
12
+ # @return [String] the name of the operation.
13
+ def operation_name
14
+ "describe acl"
15
+ end
16
+
17
+ # @return [DescribeAclReport] instance with an array of acls that matches the request filters.
18
+ def create_result
19
+ DescribeAclReport.new(acls: self[:acls], acls_count: self[:acls_count])
20
+ end
21
+
22
+ def raise_error
23
+ raise RdkafkaError.new(
24
+ self[:response],
25
+ broker_message: self[:response_string].read_string
26
+ )
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeAclReport
6
+
7
+ # acls that exists in the cluster for the resource_type, resource_name and pattern_type filters provided in the request.
8
+ # @return [Rdkafka::Bindings::AclBindingResult] array of matching acls.
9
+ attr_reader :acls
10
+
11
+ def initialize(acls:, acls_count:)
12
+ @acls=[]
13
+
14
+ if acls != FFI::Pointer::NULL
15
+ acl_binding_result_pointers = acls.read_array_of_pointer(acls_count)
16
+ (1..acls_count).map do |acl_index|
17
+ acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[acl_index - 1])
18
+ @acls << acl_binding_result
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeConfigsHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer,
9
+ :config_entries, :pointer,
10
+ :entry_count, :int
11
+
12
+ # @return [String] the name of the operation.
13
+ def operation_name
14
+ "describe configs"
15
+ end
16
+
17
+ # @return [DescribeAclReport] instance with an array of acls that matches the request filters.
18
+ def create_result
19
+ DescribeConfigsReport.new(
20
+ config_entries: self[:config_entries],
21
+ entry_count: self[:entry_count]
22
+ )
23
+ end
24
+
25
+ def raise_error
26
+ raise RdkafkaError.new(
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
+ )
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeConfigsReport
6
+ attr_reader :resources
7
+
8
+ def initialize(config_entries:, entry_count:)
9
+ @resources=[]
10
+
11
+ return if config_entries == FFI::Pointer::NULL
12
+
13
+ config_entries
14
+ .read_array_of_pointer(entry_count)
15
+ .each { |config_resource_result_ptr| validate!(config_resource_result_ptr) }
16
+ .each do |config_resource_result_ptr|
17
+ config_resource_result = ConfigResourceBindingResult.new(config_resource_result_ptr)
18
+
19
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
20
+ configs_ptr = Bindings.rd_kafka_ConfigResource_configs(
21
+ config_resource_result_ptr,
22
+ pointer_to_size_t
23
+ )
24
+
25
+ configs_ptr
26
+ .read_array_of_pointer(pointer_to_size_t.read_int)
27
+ .map { |config_ptr| ConfigBindingResult.new(config_ptr) }
28
+ .each { |config_binding| config_resource_result.configs << config_binding }
29
+
30
+ @resources << config_resource_result
31
+ end
32
+ ensure
33
+ return if config_entries == FFI::Pointer::NULL
34
+
35
+ Bindings.rd_kafka_ConfigResource_destroy_array(config_entries, entry_count)
36
+ end
37
+
38
+ private
39
+
40
+ def validate!(config_resource_result_ptr)
41
+ RdkafkaError.validate!(
42
+ Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr),
43
+ Bindings.rd_kafka_ConfigResource_error_string(config_resource_result_ptr)
44
+ )
45
+ end
46
+ end
47
+ end
48
+ end