rdkafka 0.22.2 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +38 -3
  3. data/Gemfile +9 -0
  4. data/README.md +17 -14
  5. data/bin/verify_kafka_warnings +37 -0
  6. data/dist/{librdkafka-2.8.0.tar.gz → librdkafka-2.12.1.tar.gz} +0 -0
  7. data/docker-compose-ssl.yml +35 -0
  8. data/docker-compose.yml +1 -1
  9. data/lib/rdkafka/abstract_handle.rb +23 -5
  10. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  11. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  12. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  13. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  15. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  16. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  17. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  19. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  20. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  21. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  23. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  25. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  27. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  29. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  30. data/lib/rdkafka/admin.rb +108 -113
  31. data/lib/rdkafka/bindings.rb +76 -30
  32. data/lib/rdkafka/callbacks.rb +71 -11
  33. data/lib/rdkafka/config.rb +21 -12
  34. data/lib/rdkafka/consumer/headers.rb +3 -2
  35. data/lib/rdkafka/consumer/message.rb +7 -3
  36. data/lib/rdkafka/consumer/partition.rb +6 -2
  37. data/lib/rdkafka/consumer/topic_partition_list.rb +11 -7
  38. data/lib/rdkafka/consumer.rb +41 -29
  39. data/lib/rdkafka/defaults.rb +106 -0
  40. data/lib/rdkafka/error.rb +16 -1
  41. data/lib/rdkafka/helpers/oauth.rb +45 -12
  42. data/lib/rdkafka/metadata.rb +29 -5
  43. data/lib/rdkafka/native_kafka.rb +26 -2
  44. data/lib/rdkafka/producer/delivery_report.rb +8 -4
  45. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  46. data/lib/rdkafka/producer.rb +52 -26
  47. data/lib/rdkafka/version.rb +6 -3
  48. data/lib/rdkafka.rb +1 -0
  49. data/rdkafka.gemspec +35 -13
  50. data/renovate.json +6 -25
  51. metadata +23 -124
  52. data/.github/CODEOWNERS +0 -3
  53. data/.github/FUNDING.yml +0 -1
  54. data/.github/workflows/ci_linux_x86_64_gnu.yml +0 -271
  55. data/.github/workflows/ci_linux_x86_64_musl.yml +0 -194
  56. data/.github/workflows/ci_macos_arm64.yml +0 -284
  57. data/.github/workflows/push_linux_x86_64_gnu.yml +0 -65
  58. data/.github/workflows/push_linux_x86_64_musl.yml +0 -79
  59. data/.github/workflows/push_macos_arm64.yml +0 -54
  60. data/.github/workflows/push_ruby.yml +0 -37
  61. data/.github/workflows/verify-action-pins.yml +0 -16
  62. data/.gitignore +0 -14
  63. data/.rspec +0 -2
  64. data/.ruby-gemset +0 -1
  65. data/.ruby-version +0 -1
  66. data/.yardopts +0 -2
  67. data/ext/README.md +0 -19
  68. data/ext/build_common.sh +0 -361
  69. data/ext/build_linux_x86_64_gnu.sh +0 -306
  70. data/ext/build_linux_x86_64_musl.sh +0 -763
  71. data/ext/build_macos_arm64.sh +0 -550
  72. data/spec/rdkafka/abstract_handle_spec.rb +0 -117
  73. data/spec/rdkafka/admin/create_acl_handle_spec.rb +0 -56
  74. data/spec/rdkafka/admin/create_acl_report_spec.rb +0 -18
  75. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -52
  76. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -16
  77. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +0 -85
  78. data/spec/rdkafka/admin/delete_acl_report_spec.rb +0 -72
  79. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -52
  80. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -16
  81. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +0 -85
  82. data/spec/rdkafka/admin/describe_acl_report_spec.rb +0 -73
  83. data/spec/rdkafka/admin_spec.rb +0 -971
  84. data/spec/rdkafka/bindings_spec.rb +0 -199
  85. data/spec/rdkafka/callbacks_spec.rb +0 -20
  86. data/spec/rdkafka/config_spec.rb +0 -258
  87. data/spec/rdkafka/consumer/headers_spec.rb +0 -73
  88. data/spec/rdkafka/consumer/message_spec.rb +0 -139
  89. data/spec/rdkafka/consumer/partition_spec.rb +0 -57
  90. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -248
  91. data/spec/rdkafka/consumer_spec.rb +0 -1274
  92. data/spec/rdkafka/error_spec.rb +0 -89
  93. data/spec/rdkafka/metadata_spec.rb +0 -79
  94. data/spec/rdkafka/native_kafka_spec.rb +0 -130
  95. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -45
  96. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -25
  97. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +0 -359
  98. data/spec/rdkafka/producer_spec.rb +0 -1345
  99. data/spec/spec_helper.rb +0 -195
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6bdfd8912cec56ebe8303d0ad14f3ab7ab2e5e0565e9418a6265a0e48172bc31
4
- data.tar.gz: 646f8a174c1019a0417a571f5894c745701c5d670a0585beb40200a2ffe08193
3
+ metadata.gz: 6e080c8b1e6e04090b729bcea44d0d35ab53083c7878c952e0e09146318ccf66
4
+ data.tar.gz: aee7d2f5e8d71de67e25ff89192f8f02f065c3dea3978e1850dac7c823e4fb17
5
5
  SHA512:
6
- metadata.gz: 9d0711b10d02a1c6792fb21206d7729c958fadc186da5b6d8382d69b0d98183cddf23c123ea1602dd890f39603cd44341b864726c4a737966148fffb7e5e5dbd
7
- data.tar.gz: deeacf4c205c164d2bd2d786f4a512f62caf8aeda6d6fcc11c24d2d665f4b7bd0a7f44447dfa1ebffc1fe54e7a9cdf36c7bbd4ae71c01e167b46b36a298d7482
6
+ metadata.gz: fdcc94f6e3b2f9ac318f2e3146987c12b823f016add46e881ab772838df2238a627f84fbb6741fcf3c30ed34b1a00c8197e71faa543db2a9c6f47439dae7a083
7
+ data.tar.gz: 0e5f9095c34b21c2307d7736b786c1376a1ab976fc0f9cf0a6fb27c6c5e0382ce3ab005701333b50066f23c64cb5fea352f5443ae31c85abc59e1c0cdabf1660
data/CHANGELOG.md CHANGED
@@ -1,9 +1,44 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.25.0 (2026-01-20)
4
+ - **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout:` (seconds) is deprecated in favor of `max_wait_timeout_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
5
+ - **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
6
+ - [Enhancement] Extract all timeout defaults to `Rdkafka::Defaults` module for discoverability and per-call overrides (#310). All time-related values are now in milliseconds for consistency.
7
+ - [Enhancement] Add `timeout_ms` parameter to `Consumer#each` for configurable poll timeout.
8
+ - [Enhancement] Extract non-time configuration values (`METADATA_MAX_RETRIES`, `PARTITIONS_COUNT_CACHE_TTL_MS`) to `Rdkafka::Defaults` module.
9
+ - [Enhancement] Bump librdkafka to `2.12.1`
10
+ - [Enhancement] Add descriptive error messages for glibc compatibility issues with instructions for resolution (#654)
11
+ - [Enhancement] Replace magic numbers with named constants throughout codebase for improved readability and maintainability
12
+
13
+ ## 0.24.2 (2025-10-31)
14
+ - [Enhancement] Force lock FFI to 1.17.1 or higher to include critical bug fixes around GCC, write barriers, and thread restarts for forks.
15
+ - [Fix] Fix for Core dump when providing extensions to oauthbearer_set_token (dssjoblom)
16
+
17
+ ## 0.24.1 (2025-10-10)
18
+ - [Fix] Fix Github Action Ruby reference preventing non-compiled releases.
19
+
20
+ ## 0.24.0 (2025-10-10)
21
+ - [Enhancement] Bump librdkafka to `2.11.1`
22
+
23
+ ## 0.23.1 (2025-09-25)
24
+ - [Enhancement] Improve sigstore attestation for precompiled releases.
25
+ - [Fix] Fix incorrectly set default SSL certs dir.
26
+ - [Fix] Disable OpenSSL Heartbeats during compilation.
27
+
28
+ ## 0.23.0 (2025-09-04)
29
+ - **[EOL]** Drop support for Ruby 3.1 to move forward with the fiber scheduler work.
30
+ - [Enhancement] Bump librdkafka to `2.11.0`
31
+ - [Enhancement] Support explicit Debian testing due to lib issues.
32
+ - [Enhancement] Support ARM64 Gnu precompilation.
33
+ - [Enhancement] Improve what symbols are exposed outside of the precompiled extensions.
34
+ - [Enhancement] Introduce an integration suite layer for non RSpec specs execution.
35
+ - [Fix] Add `json` gem as a dependency (was missing but used).
36
+
3
37
  ## 0.22.2 (2025-07-21)
4
38
  - [Enhancement] Drastically increase number of platforms in the integration suite
5
39
  - [Fix] Support Ubuntu `22.04` and older Alpine precompiled versions
6
40
  - [Fix] FFI::DynamicLibrary.load_library': Could not open library
41
+ - [Change] Add new CI action to trigger auto-doc refresh.
7
42
 
8
43
  ## 0.22.1 (2025-07-17)
9
44
  - [Fix] Fix `Rakefile` being available in the precompiled versions causing build failures.
@@ -38,7 +73,7 @@
38
73
  - [Patch] Retire no longer needed cooperative-sticky patch.
39
74
 
40
75
  ## 0.19.0 (2024-10-01)
41
- - **[Breaking]** Drop Ruby 3.0 support
76
+ - **[EOL]** Drop Ruby 3.0 support
42
77
  - [Enhancement] Update `librdkafka` to `2.5.3`
43
78
  - [Enhancement] Use default oauth callback if none is passed (bachmanity1)
44
79
  - [Fix] Fix incorrectly behaving CI on failures.
@@ -62,7 +97,7 @@
62
97
  - [Fix] Switch to local release of librdkafka to mitigate its unavailability.
63
98
 
64
99
  ## 0.16.0 (2024-06-13)
65
- - **[Breaking]** Retire support for Ruby 2.7.
100
+ - **[EOL]** Retire support for Ruby 2.7.
66
101
  - **[Breaking]** Messages without headers returned by `#poll` contain frozen empty hash.
67
102
  - **[Breaking]** `HashWithSymbolKeysTreatedLikeStrings` has been removed so headers are regular hashes with string keys.
68
103
  - **[Feature]** Support incremental config describe + alter API.
@@ -123,7 +158,7 @@
123
158
  - [Fix] Reference to Opaque is not released when Admin, Consumer or Producer is closed (mensfeld)
124
159
  - [Fix] Trigger `#poll` on native kafka creation to handle oauthbearer cb (mensfeld)
125
160
  - [Fix] `#flush` does not handle the timeouts errors by making it return `true` if all flushed or `false` if failed. We do **not** raise an exception here to keep it backwards compatible (mensfeld)
126
- - [Change] Remove support for Ruby 2.6 due to it being EOL and WeakMap incompatibilities (mensfeld)
161
+ - **[EOL]** Remove support for Ruby 2.6 due to it being EOL and WeakMap incompatibilities (mensfeld)
127
162
  - [Change] Update Kafka Docker with Confluent KRaft (mensfeld)
128
163
  - [Change] Update librdkafka repo reference from edenhill to confluentinc (mensfeld)
129
164
 
data/Gemfile CHANGED
@@ -3,3 +3,12 @@
3
3
  source "https://rubygems.org"
4
4
 
5
5
  gemspec
6
+
7
+ group :development do
8
+ gem 'ostruct'
9
+ gem 'pry'
10
+ gem 'rspec'
11
+ gem 'simplecov'
12
+ gem 'warning'
13
+ gem 'yard-lint', '~> 1.3.0'
14
+ end
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Rdkafka
2
2
 
3
- [![Build Status](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci_linux_x86_64_gnu.yml/badge.svg)](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci_linux_x86_64_gnu.yml)
3
+ [![Build Status](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci_linux_ubuntu_x86_64_gnu.yml/badge.svg)](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci_linux_x86_64_gnu.yml)
4
4
  [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
5
5
  [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
@@ -163,16 +163,19 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
167
- | 0.21.x (2025-02-13) | 2.8.0 (2025-01-07) | yes |
168
- | 0.20.0 (2025-01-07) | 2.6.1 (2024-11-18) | yes |
169
- | 0.19.0 (2024-10-01) | 2.5.3 (2024-09-02) | yes |
170
- | 0.18.0 (2024-09-02) | 2.5.0 (2024-06-10) | yes |
171
- | 0.17.0 (2024-08-03) | 2.4.0 (2024-05-07) | no |
172
- | 0.16.0 (2024-06-13) | 2.3.0 (2023-10-25) | no |
173
- | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) | no |
174
- | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) | no |
175
- | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) | no |
176
- | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) | no |
177
- | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) | no |
178
- | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) | no |
166
+ | 0.25.x (Unreleased) | 2.12.1 (2025-10-21) | yes |
167
+ | 0.24.x (2025-10-10) | 2.11.1 (2025-08-18) | yes |
168
+ | 0.23.x (2025-09-04) | 2.11.0 (2025-07-03) | yes |
169
+ | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
170
+ | 0.21.x (2025-02-13) | 2.8.0 (2025-01-07) | yes |
171
+ | 0.20.0 (2025-01-07) | 2.6.1 (2024-11-18) | yes |
172
+ | 0.19.0 (2024-10-01) | 2.5.3 (2024-09-02) | yes |
173
+ | 0.18.0 (2024-09-02) | 2.5.0 (2024-06-10) | yes |
174
+ | 0.17.0 (2024-08-03) | 2.4.0 (2024-05-07) | no |
175
+ | 0.16.0 (2024-06-13) | 2.3.0 (2023-10-25) | no |
176
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) | no |
177
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) | no |
178
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) | no |
179
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) | no |
180
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) | no |
181
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) | no |
@@ -0,0 +1,37 @@
1
+ #!/bin/bash
2
+
3
+ # Checks Kafka logs for unsupported warning patterns
4
+ # Only specified warnings are allowed, all others should trigger failure
5
+
6
+ allowed_patterns=(
7
+ "Performing controller activation"
8
+ "registered with feature metadata.version"
9
+ "Replayed TopicRecord for"
10
+ "Replayed PartitionRecord for"
11
+ "Previous leader None and previous leader epoch"
12
+ "Creating new"
13
+ "Unloaded transaction metadata"
14
+ "closing connection"
15
+ )
16
+
17
+ # Get all warnings
18
+ warnings=$(docker logs --since=0 kafka | grep "] WARN ")
19
+ exit_code=0
20
+
21
+ while IFS= read -r line; do
22
+ allowed=0
23
+ for pattern in "${allowed_patterns[@]}"; do
24
+ if echo "$line" | grep -q "$pattern"; then
25
+ allowed=1
26
+ break
27
+ fi
28
+ done
29
+
30
+ if [ $allowed -eq 0 ]; then
31
+ echo "Unexpected warning found:"
32
+ echo "$line"
33
+ exit_code=1
34
+ fi
35
+ done <<< "$warnings"
36
+
37
+ exit $exit_code
@@ -0,0 +1,35 @@
1
+ services:
2
+ kafka:
3
+ container_name: kafka
4
+ image: confluentinc/cp-kafka:8.1.1
5
+ ports:
6
+ - 9092:9092 # Support PLAINTEXT so we can run one docker setup for SSL and PLAINTEXT
7
+ - 9093:9093
8
+ volumes:
9
+ - ./ssl:/etc/kafka/secrets
10
+ environment:
11
+ CLUSTER_ID: kafka-docker-cluster-1
12
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
13
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
14
+ KAFKA_PROCESS_ROLES: broker,controller
15
+ KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
16
+ KAFKA_LISTENERS: PLAINTEXT://:9092,SSL://:9093,CONTROLLER://:9094
17
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL
18
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093
19
+ KAFKA_BROKER_ID: 1
20
+ KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9094
21
+ ALLOW_PLAINTEXT_LISTENER: 'yes'
22
+ KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
23
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
24
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
25
+ KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
26
+ KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
27
+
28
+ # SSL Configuration
29
+ KAFKA_SSL_KEYSTORE_FILENAME: kafka.server.keystore.jks
30
+ KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka_keystore_creds
31
+ KAFKA_SSL_KEY_CREDENTIALS: kafka_ssl_key_creds
32
+ KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.server.truststore.jks
33
+ KAFKA_SSL_TRUSTSTORE_CREDENTIALS: kafka_truststore_creds
34
+ KAFKA_SSL_CLIENT_AUTH: none
35
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: ""
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.0.0
4
+ image: confluentinc/cp-kafka:8.1.1
5
5
 
6
6
  ports:
7
7
  - 9092:9092
@@ -54,16 +54,34 @@ module Rdkafka
54
54
  # If there is a timeout this does not mean the operation failed, rdkafka might still be working
55
55
  # on the operation. In this case it is possible to call wait again.
56
56
  #
57
- # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
58
- # If this is nil we will wait forever
57
+ # @param max_wait_timeout [Numeric, nil] DEPRECATED: Use max_wait_timeout_ms instead.
58
+ # Amount of time in seconds to wait before timing out. Will be removed in v1.0.0.
59
+ # @param max_wait_timeout_ms [Numeric, nil] Amount of time in milliseconds to wait before
60
+ # timing out. If this is nil we will wait forever. Defaults to 60,000ms (60 seconds).
59
61
  # @param raise_response_error [Boolean] should we raise error when waiting finishes
60
62
  #
61
63
  # @return [Object] Operation-specific result
62
64
  #
63
65
  # @raise [RdkafkaError] When the operation failed
64
66
  # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
65
- def wait(max_wait_timeout: 60, raise_response_error: true)
66
- timeout = max_wait_timeout ? monotonic_now + max_wait_timeout : MAX_WAIT_TIMEOUT_FOREVER
67
+ def wait(max_wait_timeout: :not_provided, max_wait_timeout_ms: :not_provided, raise_response_error: true)
68
+ # Determine which timeout value to use
69
+ if max_wait_timeout != :not_provided && max_wait_timeout_ms != :not_provided
70
+ warn "DEPRECATION WARNING: Both max_wait_timeout and max_wait_timeout_ms were provided. " \
71
+ "Using max_wait_timeout_ms. The max_wait_timeout parameter is deprecated and will be removed in v1.0.0."
72
+ timeout_ms = max_wait_timeout_ms
73
+ elsif max_wait_timeout != :not_provided
74
+ warn "DEPRECATION WARNING: max_wait_timeout (seconds) is deprecated. " \
75
+ "Use max_wait_timeout_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
76
+ timeout_ms = max_wait_timeout ? (max_wait_timeout * 1000).to_i : nil
77
+ elsif max_wait_timeout_ms == :not_provided
78
+ timeout_ms = Defaults::HANDLE_WAIT_TIMEOUT_MS
79
+ else
80
+ timeout_ms = max_wait_timeout_ms
81
+ end
82
+
83
+ timeout_s = timeout_ms ? timeout_ms / 1000.0 : nil
84
+ timeout = timeout_s ? monotonic_now + timeout_s : MAX_WAIT_TIMEOUT_FOREVER
67
85
 
68
86
  @mutex.synchronize do
69
87
  loop do
@@ -74,7 +92,7 @@ module Rdkafka
74
92
  @resource.wait(@mutex, to_wait)
75
93
  else
76
94
  raise WaitTimeoutError.new(
77
- "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
95
+ "Waiting for #{operation_name} timed out after #{timeout_ms} ms"
78
96
  )
79
97
  end
80
98
  elsif self[:response] != 0 && raise_response_error
@@ -3,7 +3,6 @@
3
3
  module Rdkafka
4
4
  class Admin
5
5
  # Extracts attributes of rd_kafka_AclBinding_t
6
- #
7
6
  class AclBindingResult
8
7
  attr_reader :result_error, :error_string, :matching_acl_resource_type,
9
8
  :matching_acl_resource_name, :matching_acl_resource_pattern_type,
@@ -14,6 +13,7 @@ module Rdkafka
14
13
  # We keep it for backwards compatibility but it was changed for the consistency
15
14
  alias matching_acl_pattern_type matching_acl_resource_pattern_type
16
15
 
16
+ # @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
17
17
  def initialize(matching_acl)
18
18
  rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
19
19
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
@@ -6,6 +6,7 @@ module Rdkafka
6
6
  class ConfigResourceBindingResult
7
7
  attr_reader :name, :type, :configs, :configs_count
8
8
 
9
+ # @param config_resource_ptr [FFI::Pointer] pointer to the config resource struct
9
10
  def initialize(config_resource_ptr)
10
11
  ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
11
12
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create ACL operation
5
6
  class CreateAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -17,6 +18,8 @@ module Rdkafka
17
18
  CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
18
19
  end
19
20
 
21
+ # Raises an error if the operation failed
22
+ # @raise [RdkafkaError]
20
23
  def raise_error
21
24
  raise RdkafkaError.new(
22
25
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create ACL operation result
5
6
  class CreateAclReport
6
7
 
7
8
  # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
@@ -13,6 +14,8 @@ module Rdkafka
13
14
  # @return [String]
14
15
  attr_reader :rdkafka_response_string
15
16
 
17
+ # @param rdkafka_response [Integer] response code from librdkafka
18
+ # @param rdkafka_response_string [FFI::Pointer] pointer to response string
16
19
  def initialize(rdkafka_response:, rdkafka_response_string:)
17
20
  @rdkafka_response = rdkafka_response
18
21
  if rdkafka_response_string != FFI::Pointer::NULL
@@ -1,5 +1,6 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Handle for create partitions operation
3
4
  class CreatePartitionsHandle < AbstractHandle
4
5
  layout :pending, :bool,
5
6
  :response, :int,
@@ -16,6 +17,8 @@ module Rdkafka
16
17
  CreatePartitionsReport.new(self[:error_string], self[:result_name])
17
18
  end
18
19
 
20
+ # Raises an error if the operation failed
21
+ # @raise [RdkafkaError]
19
22
  def raise_error
20
23
  raise RdkafkaError.new(
21
24
  self[:response],
@@ -1,5 +1,6 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Report for create partitions operation result
3
4
  class CreatePartitionsReport < CreateTopicReport
4
5
  end
5
6
  end
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create topic operation
5
6
  class CreateTopicHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -18,6 +19,8 @@ module Rdkafka
18
19
  CreateTopicReport.new(self[:error_string], self[:result_name])
19
20
  end
20
21
 
22
+ # Raises an error if the operation failed
23
+ # @raise [RdkafkaError]
21
24
  def raise_error
22
25
  raise RdkafkaError.new(
23
26
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create topic operation result
5
6
  class CreateTopicReport
6
7
  # Any error message generated from the CreateTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to topic name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete ACL operation
5
6
  class DeleteAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -19,6 +20,8 @@ module Rdkafka
19
20
  DeleteAclReport.new(matching_acls: self[:matching_acls], matching_acls_count: self[:matching_acls_count])
20
21
  end
21
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
22
25
  def raise_error
23
26
  raise RdkafkaError.new(
24
27
  self[:response],
@@ -2,12 +2,15 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete ACL operation result
5
6
  class DeleteAclReport
6
7
 
7
8
  # deleted acls
8
9
  # @return [Rdkafka::Bindings::AclBindingResult]
9
10
  attr_reader :deleted_acls
10
11
 
12
+ # @param matching_acls [FFI::Pointer] pointer to matching ACLs array
13
+ # @param matching_acls_count [Integer] number of matching ACLs
11
14
  def initialize(matching_acls:, matching_acls_count:)
12
15
  @deleted_acls=[]
13
16
  if matching_acls != FFI::Pointer::NULL
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete groups operation
5
6
  class DeleteGroupsHandle < AbstractHandle
6
7
  layout :pending, :bool, # TODO: ???
7
8
  :response, :int,
@@ -13,10 +14,14 @@ module Rdkafka
13
14
  "delete groups"
14
15
  end
15
16
 
17
+ # Creates the result report
18
+ # @return [DeleteGroupsReport]
16
19
  def create_result
17
20
  DeleteGroupsReport.new(self[:error_string], self[:result_name])
18
21
  end
19
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
20
25
  def raise_error
21
26
  raise RdkafkaError.new(
22
27
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete groups operation result
5
6
  class DeleteGroupsReport
6
7
  # Any error message generated from the DeleteTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to group name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete topic operation
5
6
  class DeleteTopicHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -18,6 +19,8 @@ module Rdkafka
18
19
  DeleteTopicReport.new(self[:error_string], self[:result_name])
19
20
  end
20
21
 
22
+ # Raises an error if the operation failed
23
+ # @raise [RdkafkaError]
21
24
  def raise_error
22
25
  raise RdkafkaError.new(
23
26
  self[:response],
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete topic operation result
5
6
  class DeleteTopicReport
6
7
  # Any error message generated from the DeleteTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to topic name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for describe ACL operation
5
6
  class DescribeAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -19,6 +20,8 @@ module Rdkafka
19
20
  DescribeAclReport.new(acls: self[:acls], acls_count: self[:acls_count])
20
21
  end
21
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
22
25
  def raise_error
23
26
  raise RdkafkaError.new(
24
27
  self[:response],
@@ -2,12 +2,15 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for describe ACL operation result
5
6
  class DescribeAclReport
6
7
 
7
8
  # acls that exists in the cluster for the resource_type, resource_name and pattern_type filters provided in the request.
8
9
  # @return [Rdkafka::Bindings::AclBindingResult] array of matching acls.
9
10
  attr_reader :acls
10
11
 
12
+ # @param acls [FFI::Pointer] pointer to ACLs array
13
+ # @param acls_count [Integer] number of ACLs
11
14
  def initialize(acls:, acls_count:)
12
15
  @acls=[]
13
16
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for describe configs operation
5
6
  class DescribeConfigsHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -22,6 +23,8 @@ module Rdkafka
22
23
  )
23
24
  end
24
25
 
26
+ # Raises an error if the operation failed
27
+ # @raise [RdkafkaError]
25
28
  def raise_error
26
29
  raise RdkafkaError.new(
27
30
  self[:response],
@@ -2,9 +2,12 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for describe configs operation result
5
6
  class DescribeConfigsReport
6
7
  attr_reader :resources
7
8
 
9
+ # @param config_entries [FFI::Pointer] pointer to config entries array
10
+ # @param entry_count [Integer] number of config entries
8
11
  def initialize(config_entries:, entry_count:)
9
12
  @resources=[]
10
13
 
@@ -37,6 +40,9 @@ module Rdkafka
37
40
 
38
41
  private
39
42
 
43
+ # Validates the config resource result and raises an error if invalid
44
+ # @param config_resource_result_ptr [FFI::Pointer] pointer to the config resource result
45
+ # @raise [RdkafkaError] when the config resource has an error
40
46
  def validate!(config_resource_result_ptr)
41
47
  code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
48
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for incremental alter configs operation
5
6
  class IncrementalAlterConfigsHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
8
  :response, :int,
@@ -22,6 +23,8 @@ module Rdkafka
22
23
  )
23
24
  end
24
25
 
26
+ # Raises an error if the operation failed
27
+ # @raise [RdkafkaError]
25
28
  def raise_error
26
29
  raise RdkafkaError.new(
27
30
  self[:response],
@@ -2,9 +2,12 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for incremental alter configs operation result
5
6
  class IncrementalAlterConfigsReport
6
7
  attr_reader :resources
7
8
 
9
+ # @param config_entries [FFI::Pointer] pointer to config entries array
10
+ # @param entry_count [Integer] number of config entries
8
11
  def initialize(config_entries:, entry_count:)
9
12
  @resources=[]
10
13
 
@@ -37,6 +40,9 @@ module Rdkafka
37
40
 
38
41
  private
39
42
 
43
+ # Validates the config resource result and raises an error if invalid
44
+ # @param config_resource_result_ptr [FFI::Pointer] pointer to the config resource result
45
+ # @raise [RdkafkaError] when the config resource has an error
40
46
  def validate!(config_resource_result_ptr)
41
47
  code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
48