rdkafka 0.24.2 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/Gemfile +9 -0
  4. data/README.md +2 -1
  5. data/dist/{librdkafka-2.11.1.tar.gz → librdkafka-2.12.1.tar.gz} +0 -0
  6. data/docker-compose-ssl.yml +1 -1
  7. data/docker-compose.yml +1 -1
  8. data/lib/rdkafka/abstract_handle.rb +23 -5
  9. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  10. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  11. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  12. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  13. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  15. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  16. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  17. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  19. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  20. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  21. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  23. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  25. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  27. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  29. data/lib/rdkafka/admin.rb +108 -112
  30. data/lib/rdkafka/bindings.rb +62 -29
  31. data/lib/rdkafka/callbacks.rb +71 -11
  32. data/lib/rdkafka/config.rb +20 -8
  33. data/lib/rdkafka/consumer/headers.rb +3 -2
  34. data/lib/rdkafka/consumer/message.rb +7 -3
  35. data/lib/rdkafka/consumer/partition.rb +6 -2
  36. data/lib/rdkafka/consumer/topic_partition_list.rb +8 -8
  37. data/lib/rdkafka/consumer.rb +40 -28
  38. data/lib/rdkafka/defaults.rb +106 -0
  39. data/lib/rdkafka/error.rb +16 -1
  40. data/lib/rdkafka/helpers/oauth.rb +11 -5
  41. data/lib/rdkafka/metadata.rb +29 -5
  42. data/lib/rdkafka/native_kafka.rb +26 -2
  43. data/lib/rdkafka/producer/delivery_report.rb +6 -2
  44. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  45. data/lib/rdkafka/producer.rb +49 -23
  46. data/lib/rdkafka/version.rb +6 -3
  47. data/lib/rdkafka.rb +1 -0
  48. data/rdkafka.gemspec +0 -7
  49. data/renovate.json +1 -8
  50. metadata +4 -87
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5b0467e6f670f137f458bb1cb856f812ba2292c45a4c2a4b19477531fd59a3b7
4
- data.tar.gz: 9e18e607e5114b7ed31266565c308517729890e643a68b8535d19fb8df4f8256
3
+ metadata.gz: 6e080c8b1e6e04090b729bcea44d0d35ab53083c7878c952e0e09146318ccf66
4
+ data.tar.gz: aee7d2f5e8d71de67e25ff89192f8f02f065c3dea3978e1850dac7c823e4fb17
5
5
  SHA512:
6
- metadata.gz: d90b9aff5f184ec165c37451773845f66c71cfa47dc4dbf62d4764c009711ac28ab40fe4b3ecc083c2b599043f3906d552b52a1296fcbee089a8024a03c16e9e
7
- data.tar.gz: 91db6ebb4e3dde0d98395c1714205bae0a5027b31f4d1a1fa435171873d81998efa56c76bdb2c8a410f3cbb3d4348ae847d564d8c2348884d0a7ee080877e7a4
6
+ metadata.gz: fdcc94f6e3b2f9ac318f2e3146987c12b823f016add46e881ab772838df2238a627f84fbb6741fcf3c30ed34b1a00c8197e71faa543db2a9c6f47439dae7a083
7
+ data.tar.gz: 0e5f9095c34b21c2307d7736b786c1376a1ab976fc0f9cf0a6fb27c6c5e0382ce3ab005701333b50066f23c64cb5fea352f5443ae31c85abc59e1c0cdabf1660
data/CHANGELOG.md CHANGED
@@ -1,5 +1,15 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.25.0 (2026-01-20)
4
+ - **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout:` (seconds) is deprecated in favor of `max_wait_timeout_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
5
+ - **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
6
+ - [Enhancement] Extract all timeout defaults to `Rdkafka::Defaults` module for discoverability and per-call overrides (#310). All time-related values are now in milliseconds for consistency.
7
+ - [Enhancement] Add `timeout_ms` parameter to `Consumer#each` for configurable poll timeout.
8
+ - [Enhancement] Extract non-time configuration values (`METADATA_MAX_RETRIES`, `PARTITIONS_COUNT_CACHE_TTL_MS`) to `Rdkafka::Defaults` module.
9
+ - [Enhancement] Bump librdkafka to `2.12.1`
10
+ - [Enhancement] Add descriptive error messages for glibc compatibility issues with instructions for resolution (#654)
11
+ - [Enhancement] Replace magic numbers with named constants throughout codebase for improved readability and maintainability
12
+
3
13
  ## 0.24.2 (2025-10-31)
4
14
  - [Enhancement] Force lock FFI to 1.17.1 or higher to include critical bug fixes around GCC, write barriers, and thread restarts for forks.
5
15
  - [Fix] Fix for Core dump when providing extensions to oauthbearer_set_token (dssjoblom)
data/Gemfile CHANGED
@@ -3,3 +3,12 @@
3
3
  source "https://rubygems.org"
4
4
 
5
5
  gemspec
6
+
7
+ group :development do
8
+ gem 'ostruct'
9
+ gem 'pry'
10
+ gem 'rspec'
11
+ gem 'simplecov'
12
+ gem 'warning'
13
+ gem 'yard-lint', '~> 1.3.0'
14
+ end
data/README.md CHANGED
@@ -163,7 +163,8 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.24.x (Unreleased) | 2.11.1 (2025-08-08) | yes |
166
+ | 0.25.x (Unreleased) | 2.12.1 (2025-10-21) | yes |
167
+ | 0.24.x (2025-10-10) | 2.11.1 (2025-08-18) | yes |
167
168
  | 0.23.x (2025-09-04) | 2.11.0 (2025-07-03) | yes |
168
169
  | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
169
170
  | 0.21.x (2025-02-13) | 2.8.0 (2025-01-07) | yes |
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.1.0
4
+ image: confluentinc/cp-kafka:8.1.1
5
5
  ports:
6
6
  - 9092:9092 # Support PLAINTEXT so we can run one docker setup for SSL and PLAINTEXT
7
7
  - 9093:9093
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.1.0
4
+ image: confluentinc/cp-kafka:8.1.1
5
5
 
6
6
  ports:
7
7
  - 9092:9092
@@ -54,16 +54,34 @@ module Rdkafka
54
54
  # If there is a timeout this does not mean the operation failed, rdkafka might still be working
55
55
  # on the operation. In this case it is possible to call wait again.
56
56
  #
57
- # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
58
- # If this is nil we will wait forever
57
+ # @param max_wait_timeout [Numeric, nil] DEPRECATED: Use max_wait_timeout_ms instead.
58
+ # Amount of time in seconds to wait before timing out. Will be removed in v1.0.0.
59
+ # @param max_wait_timeout_ms [Numeric, nil] Amount of time in milliseconds to wait before
60
+ # timing out. If this is nil we will wait forever. Defaults to 60,000ms (60 seconds).
59
61
  # @param raise_response_error [Boolean] should we raise error when waiting finishes
60
62
  #
61
63
  # @return [Object] Operation-specific result
62
64
  #
63
65
  # @raise [RdkafkaError] When the operation failed
64
66
  # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
65
- def wait(max_wait_timeout: 60, raise_response_error: true)
66
- timeout = max_wait_timeout ? monotonic_now + max_wait_timeout : MAX_WAIT_TIMEOUT_FOREVER
67
+ def wait(max_wait_timeout: :not_provided, max_wait_timeout_ms: :not_provided, raise_response_error: true)
68
+ # Determine which timeout value to use
69
+ if max_wait_timeout != :not_provided && max_wait_timeout_ms != :not_provided
70
+ warn "DEPRECATION WARNING: Both max_wait_timeout and max_wait_timeout_ms were provided. " \
71
+ "Using max_wait_timeout_ms. The max_wait_timeout parameter is deprecated and will be removed in v1.0.0."
72
+ timeout_ms = max_wait_timeout_ms
73
+ elsif max_wait_timeout != :not_provided
74
+ warn "DEPRECATION WARNING: max_wait_timeout (seconds) is deprecated. " \
75
+ "Use max_wait_timeout_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
76
+ timeout_ms = max_wait_timeout ? (max_wait_timeout * 1000).to_i : nil
77
+ elsif max_wait_timeout_ms == :not_provided
78
+ timeout_ms = Defaults::HANDLE_WAIT_TIMEOUT_MS
79
+ else
80
+ timeout_ms = max_wait_timeout_ms
81
+ end
82
+
83
+ timeout_s = timeout_ms ? timeout_ms / 1000.0 : nil
84
+ timeout = timeout_s ? monotonic_now + timeout_s : MAX_WAIT_TIMEOUT_FOREVER
67
85
 
68
86
  @mutex.synchronize do
69
87
  loop do
@@ -74,7 +92,7 @@ module Rdkafka
74
92
  @resource.wait(@mutex, to_wait)
75
93
  else
76
94
  raise WaitTimeoutError.new(
77
- "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
95
+ "Waiting for #{operation_name} timed out after #{timeout_ms} ms"
78
96
  )
79
97
  end
80
98
  elsif self[:response] != 0 && raise_response_error
@@ -3,7 +3,6 @@
3
3
  module Rdkafka
4
4
  class Admin
5
5
  # Extracts attributes of rd_kafka_AclBinding_t
6
- #
7
6
  class AclBindingResult
8
7
  attr_reader :result_error, :error_string, :matching_acl_resource_type,
9
8
  :matching_acl_resource_name, :matching_acl_resource_pattern_type,
@@ -14,6 +13,7 @@ module Rdkafka
14
13
  # We keep it for backwards compatibility but it was changed for the consistency
15
14
  alias matching_acl_pattern_type matching_acl_resource_pattern_type
16
15
 
16
+ # @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
17
17
  def initialize(matching_acl)
18
18
  rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
19
19
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
@@ -6,6 +6,7 @@ module Rdkafka
6
6
  class ConfigResourceBindingResult
7
7
  attr_reader :name, :type, :configs, :configs_count
8
8
 
9
+ # @param config_resource_ptr [FFI::Pointer] pointer to the config resource struct
9
10
  def initialize(config_resource_ptr)
10
11
  ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
11
12
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create ACL operation
5
6
  class CreateAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -17,6 +18,8 @@ module Rdkafka
17
18
  CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
18
19
  end
19
20
 
21
+ # Raises an error if the operation failed
22
+ # @raise [RdkafkaError]
20
23
  def raise_error
21
24
  raise RdkafkaError.new(
22
25
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create ACL operation result
5
6
  class CreateAclReport
6
7
 
7
8
  # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
@@ -13,6 +14,8 @@ module Rdkafka
13
14
  # @return [String]
14
15
  attr_reader :rdkafka_response_string
15
16
 
17
+ # @param rdkafka_response [Integer] response code from librdkafka
18
+ # @param rdkafka_response_string [FFI::Pointer] pointer to response string
16
19
  def initialize(rdkafka_response:, rdkafka_response_string:)
17
20
  @rdkafka_response = rdkafka_response
18
21
  if rdkafka_response_string != FFI::Pointer::NULL
@@ -1,5 +1,6 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Handle for create partitions operation
3
4
  class CreatePartitionsHandle < AbstractHandle
4
5
  layout :pending, :bool,
5
6
  :response, :int,
@@ -16,6 +17,8 @@ module Rdkafka
16
17
  CreatePartitionsReport.new(self[:error_string], self[:result_name])
17
18
  end
18
19
 
20
+ # Raises an error if the operation failed
21
+ # @raise [RdkafkaError]
19
22
  def raise_error
20
23
  raise RdkafkaError.new(
21
24
  self[:response],
@@ -1,5 +1,6 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Report for create partitions operation result
3
4
  class CreatePartitionsReport < CreateTopicReport
4
5
  end
5
6
  end
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create topic operation
5
6
  class CreateTopicHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -18,6 +19,8 @@ module Rdkafka
18
19
  CreateTopicReport.new(self[:error_string], self[:result_name])
19
20
  end
20
21
 
22
+ # Raises an error if the operation failed
23
+ # @raise [RdkafkaError]
21
24
  def raise_error
22
25
  raise RdkafkaError.new(
23
26
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create topic operation result
5
6
  class CreateTopicReport
6
7
  # Any error message generated from the CreateTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to topic name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete ACL operation
5
6
  class DeleteAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -19,6 +20,8 @@ module Rdkafka
19
20
  DeleteAclReport.new(matching_acls: self[:matching_acls], matching_acls_count: self[:matching_acls_count])
20
21
  end
21
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
22
25
  def raise_error
23
26
  raise RdkafkaError.new(
24
27
  self[:response],
@@ -2,12 +2,15 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete ACL operation result
5
6
  class DeleteAclReport
6
7
 
7
8
  # deleted acls
8
9
  # @return [Rdkafka::Bindings::AclBindingResult]
9
10
  attr_reader :deleted_acls
10
11
 
12
+ # @param matching_acls [FFI::Pointer] pointer to matching ACLs array
13
+ # @param matching_acls_count [Integer] number of matching ACLs
11
14
  def initialize(matching_acls:, matching_acls_count:)
12
15
  @deleted_acls=[]
13
16
  if matching_acls != FFI::Pointer::NULL
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete groups operation
5
6
  class DeleteGroupsHandle < AbstractHandle
6
7
  layout :pending, :bool, # TODO: ???
7
8
  :response, :int,
@@ -13,10 +14,14 @@ module Rdkafka
13
14
  "delete groups"
14
15
  end
15
16
 
17
+ # Creates the result report
18
+ # @return [DeleteGroupsReport]
16
19
  def create_result
17
20
  DeleteGroupsReport.new(self[:error_string], self[:result_name])
18
21
  end
19
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
20
25
  def raise_error
21
26
  raise RdkafkaError.new(
22
27
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete groups operation result
5
6
  class DeleteGroupsReport
6
7
  # Any error message generated from the DeleteTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to group name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete topic operation
5
6
  class DeleteTopicHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -18,6 +19,8 @@ module Rdkafka
18
19
  DeleteTopicReport.new(self[:error_string], self[:result_name])
19
20
  end
20
21
 
22
+ # Raises an error if the operation failed
23
+ # @raise [RdkafkaError]
21
24
  def raise_error
22
25
  raise RdkafkaError.new(
23
26
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete topic operation result
5
6
  class DeleteTopicReport
6
7
  # Any error message generated from the DeleteTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to topic name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for describe ACL operation
5
6
  class DescribeAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -19,6 +20,8 @@ module Rdkafka
19
20
  DescribeAclReport.new(acls: self[:acls], acls_count: self[:acls_count])
20
21
  end
21
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
22
25
  def raise_error
23
26
  raise RdkafkaError.new(
24
27
  self[:response],
@@ -2,12 +2,15 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for describe ACL operation result
5
6
  class DescribeAclReport
6
7
 
7
8
  # acls that exists in the cluster for the resource_type, resource_name and pattern_type filters provided in the request.
8
9
  # @return [Rdkafka::Bindings::AclBindingResult] array of matching acls.
9
10
  attr_reader :acls
10
11
 
12
+ # @param acls [FFI::Pointer] pointer to ACLs array
13
+ # @param acls_count [Integer] number of ACLs
11
14
  def initialize(acls:, acls_count:)
12
15
  @acls=[]
13
16
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for describe configs operation
5
6
  class DescribeConfigsHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -22,6 +23,8 @@ module Rdkafka
22
23
  )
23
24
  end
24
25
 
26
+ # Raises an error if the operation failed
27
+ # @raise [RdkafkaError]
25
28
  def raise_error
26
29
  raise RdkafkaError.new(
27
30
  self[:response],
@@ -2,9 +2,12 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for describe configs operation result
5
6
  class DescribeConfigsReport
6
7
  attr_reader :resources
7
8
 
9
+ # @param config_entries [FFI::Pointer] pointer to config entries array
10
+ # @param entry_count [Integer] number of config entries
8
11
  def initialize(config_entries:, entry_count:)
9
12
  @resources=[]
10
13
 
@@ -37,6 +40,9 @@ module Rdkafka
37
40
 
38
41
  private
39
42
 
43
+ # Validates the config resource result and raises an error if invalid
44
+ # @param config_resource_result_ptr [FFI::Pointer] pointer to the config resource result
45
+ # @raise [RdkafkaError] when the config resource has an error
40
46
  def validate!(config_resource_result_ptr)
41
47
  code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
48
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for incremental alter configs operation
5
6
  class IncrementalAlterConfigsHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -22,6 +23,8 @@ module Rdkafka
22
23
  )
23
24
  end
24
25
 
26
+ # Raises an error if the operation failed
27
+ # @raise [RdkafkaError]
25
28
  def raise_error
26
29
  raise RdkafkaError.new(
27
30
  self[:response],
@@ -2,9 +2,12 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for incremental alter configs operation result
5
6
  class IncrementalAlterConfigsReport
6
7
  attr_reader :resources
7
8
 
9
+ # @param config_entries [FFI::Pointer] pointer to config entries array
10
+ # @param entry_count [Integer] number of config entries
8
11
  def initialize(config_entries:, entry_count:)
9
12
  @resources=[]
10
13
 
@@ -37,6 +40,9 @@ module Rdkafka
37
40
 
38
41
  private
39
42
 
43
+ # Validates the config resource result and raises an error if invalid
44
+ # @param config_resource_result_ptr [FFI::Pointer] pointer to the config resource result
45
+ # @raise [RdkafkaError] when the config resource has an error
40
46
  def validate!(config_resource_result_ptr)
41
47
  code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
48