rdkafka 0.24.2-aarch64-linux-gnu → 0.25.1-aarch64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +18 -0
  3. data/Gemfile +8 -0
  4. data/Gemfile.lint +14 -0
  5. data/Gemfile.lint.lock +123 -0
  6. data/README.md +2 -1
  7. data/Rakefile +21 -21
  8. data/docker-compose-ssl.yml +1 -1
  9. data/docker-compose.yml +1 -1
  10. data/ext/librdkafka.so +0 -0
  11. data/lib/rdkafka/abstract_handle.rb +23 -5
  12. data/lib/rdkafka/admin/acl_binding_result.rb +5 -5
  13. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  14. data/lib/rdkafka/admin/create_acl_handle.rb +7 -4
  15. data/lib/rdkafka/admin/create_acl_report.rb +3 -2
  16. data/lib/rdkafka/admin/create_partitions_handle.rb +8 -5
  17. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +8 -5
  19. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  20. data/lib/rdkafka/admin/delete_acl_handle.rb +9 -6
  21. data/lib/rdkafka/admin/delete_acl_report.rb +5 -3
  22. data/lib/rdkafka/admin/delete_groups_handle.rb +10 -5
  23. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  24. data/lib/rdkafka/admin/delete_topic_handle.rb +8 -5
  25. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  26. data/lib/rdkafka/admin/describe_acl_handle.rb +9 -6
  27. data/lib/rdkafka/admin/describe_acl_report.rb +5 -3
  28. data/lib/rdkafka/admin/describe_configs_handle.rb +7 -4
  29. data/lib/rdkafka/admin/describe_configs_report.rb +7 -1
  30. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +7 -4
  31. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +7 -1
  32. data/lib/rdkafka/admin.rb +194 -132
  33. data/lib/rdkafka/bindings.rb +155 -107
  34. data/lib/rdkafka/callbacks.rb +81 -21
  35. data/lib/rdkafka/config.rb +36 -24
  36. data/lib/rdkafka/consumer/headers.rb +3 -2
  37. data/lib/rdkafka/consumer/message.rb +12 -11
  38. data/lib/rdkafka/consumer/partition.rb +8 -4
  39. data/lib/rdkafka/consumer/topic_partition_list.rb +18 -18
  40. data/lib/rdkafka/consumer.rb +247 -42
  41. data/lib/rdkafka/defaults.rb +106 -0
  42. data/lib/rdkafka/error.rb +28 -13
  43. data/lib/rdkafka/helpers/oauth.rb +11 -6
  44. data/lib/rdkafka/helpers/time.rb +5 -0
  45. data/lib/rdkafka/metadata.rb +45 -21
  46. data/lib/rdkafka/native_kafka.rb +89 -4
  47. data/lib/rdkafka/producer/delivery_handle.rb +5 -5
  48. data/lib/rdkafka/producer/delivery_report.rb +8 -4
  49. data/lib/rdkafka/producer/partitions_count_cache.rb +29 -19
  50. data/lib/rdkafka/producer.rb +165 -79
  51. data/lib/rdkafka/version.rb +6 -3
  52. data/lib/rdkafka.rb +1 -0
  53. data/package-lock.json +331 -0
  54. data/package.json +9 -0
  55. data/rdkafka.gemspec +39 -47
  56. data/renovate.json +22 -8
  57. metadata +7 -86
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cc0c11225b1bfbd6973f5996df200d4a005c6ab8a30e603e8e703cb41c254a59
4
- data.tar.gz: 186086ab53ab1153aee63fbdb54aaf8bfc020afe9eec351b13664c61910234d5
3
+ metadata.gz: cb4d5531f159f367ce708bf7f673ac532f3abc4b252cb27cc8c8048ee40f5469
4
+ data.tar.gz: d7470b6f2eb99a44750dcd9703e39d7f978764e4ab281c1ed4090c9d7834a6c0
5
5
  SHA512:
6
- metadata.gz: 62f9c4c201c116572a40593c144cb33691808c60990832af4b30610a7ad3798d9405f207ef2bf308ebbab82dbfd32560f4142a524c01b7a71bf167b7eef5bf48
7
- data.tar.gz: 643c4132f91354fba5e1e07504b93ee451b18efd90318582629fcea7d9ea1e3642b4fc15e5cf67e1c8cdbe596b01c9fc4443a7bc37b805b51c63898673073555
6
+ metadata.gz: 8c767c43a9bb81d866423289f79695d46f78e4e4afc85f5d5e8ae8444a2835ddb7381455463fccfcb60ec3759b0bdc9c8abcc974bfd26d440ba1ddee35cbec7a
7
+ data.tar.gz: 2721303ffa4c4bdcdd1a9c11ec13153003202e91968fd22fc882c24cd4efbc6c75ba093d1c43ba0047d0d3bb9aaca6f858a45177895516186db4bc35087d4685
data/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.25.1 (2026-02-26)
4
+ - [Enhancement] Use native ARM64 runners instead of QEMU emulation for Alpine musl aarch64 builds, improving build performance and reliability.
5
+ - [Enhancement] Enable parallel compilation (`make -j$(nproc)`) for ARM64 Alpine musl builds.
6
+ - [Enhancement] Add file descriptor API for fiber scheduler integration. Expose `enable_queue_io_events` and `enable_background_queue_io_events` on `Consumer`, `Producer`, and `Admin` to enable non-blocking monitoring with select/poll/epoll for integration with Ruby fiber schedulers (Falcon, Async) and custom event loops.
7
+ - [Enhancement] Add non-blocking poll methods (`poll_nb`, `events_poll_nb`) on `Consumer` that skip GVL release for efficient fiber scheduler integration when using `poll(0)`.
8
+ - [Enhancement] Add `events_poll_nb_each` method on `Producer`, `Consumer`, and `Admin` for polling events in a single GVL/mutex session. Yields count after each iteration, caller returns `:stop` to break.
9
+ - [Enhancement] Add `poll_nb_each` method on `Consumer` for non-blocking message polling with proper resource cleanup, yielding each message and supporting early termination via `:stop` return value.
10
+
11
+ ## 0.25.0 (2026-01-20)
12
+ - **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout:` (seconds) is deprecated in favor of `max_wait_timeout_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
13
+ - **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
14
+ - [Enhancement] Extract all timeout defaults to `Rdkafka::Defaults` module for discoverability and per-call overrides (#310). All time-related values are now in milliseconds for consistency.
15
+ - [Enhancement] Add `timeout_ms` parameter to `Consumer#each` for configurable poll timeout.
16
+ - [Enhancement] Extract non-time configuration values (`METADATA_MAX_RETRIES`, `PARTITIONS_COUNT_CACHE_TTL_MS`) to `Rdkafka::Defaults` module.
17
+ - [Enhancement] Bump librdkafka to `2.12.1`
18
+ - [Enhancement] Add descriptive error messages for glibc compatibility issues with instructions for resolution (#654)
19
+ - [Enhancement] Replace magic numbers with named constants throughout codebase for improved readability and maintainability
20
+
3
21
  ## 0.24.2 (2025-10-31)
4
22
  - [Enhancement] Force lock FFI to 1.17.1 or higher to include critical bug fixes around GCC, write barriers, and thread restarts for forks.
5
23
  - [Fix] Fix for Core dump when providing extensions to oauthbearer_set_token (dssjoblom)
data/Gemfile CHANGED
@@ -3,3 +3,11 @@
3
3
  source "https://rubygems.org"
4
4
 
5
5
  gemspec
6
+
7
+ group :development do
8
+ gem "ostruct"
9
+ gem "pry"
10
+ gem "rspec"
11
+ gem "simplecov"
12
+ gem "warning"
13
+ end
data/Gemfile.lint ADDED
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ # Documentation linting
6
+ gem "yard-lint"
7
+
8
+ # Code style (StandardRB via RuboCop)
9
+ gem "standard"
10
+ gem "standard-performance"
11
+ gem "rubocop-performance"
12
+ gem "rubocop-rspec"
13
+ gem "standard-rspec"
14
+ gem "rubocop-thread_safety"
data/Gemfile.lint.lock ADDED
@@ -0,0 +1,123 @@
1
+ GEM
2
+ remote: https://rubygems.org/
3
+ specs:
4
+ ast (2.4.3)
5
+ json (2.18.0)
6
+ language_server-protocol (3.17.0.5)
7
+ lint_roller (1.1.0)
8
+ parallel (1.27.0)
9
+ parser (3.3.10.1)
10
+ ast (~> 2.4.1)
11
+ racc
12
+ prism (1.8.0)
13
+ racc (1.8.1)
14
+ rainbow (3.1.1)
15
+ regexp_parser (2.11.3)
16
+ rubocop (1.82.1)
17
+ json (~> 2.3)
18
+ language_server-protocol (~> 3.17.0.2)
19
+ lint_roller (~> 1.1.0)
20
+ parallel (~> 1.10)
21
+ parser (>= 3.3.0.2)
22
+ rainbow (>= 2.2.2, < 4.0)
23
+ regexp_parser (>= 2.9.3, < 3.0)
24
+ rubocop-ast (>= 1.48.0, < 2.0)
25
+ ruby-progressbar (~> 1.7)
26
+ unicode-display_width (>= 2.4.0, < 4.0)
27
+ rubocop-ast (1.49.0)
28
+ parser (>= 3.3.7.2)
29
+ prism (~> 1.7)
30
+ rubocop-capybara (2.22.1)
31
+ lint_roller (~> 1.1)
32
+ rubocop (~> 1.72, >= 1.72.1)
33
+ rubocop-factory_bot (2.28.0)
34
+ lint_roller (~> 1.1)
35
+ rubocop (~> 1.72, >= 1.72.1)
36
+ rubocop-performance (1.26.1)
37
+ lint_roller (~> 1.1)
38
+ rubocop (>= 1.75.0, < 2.0)
39
+ rubocop-ast (>= 1.47.1, < 2.0)
40
+ rubocop-rspec (3.9.0)
41
+ lint_roller (~> 1.1)
42
+ rubocop (~> 1.81)
43
+ rubocop-rspec_rails (2.32.0)
44
+ lint_roller (~> 1.1)
45
+ rubocop (~> 1.72, >= 1.72.1)
46
+ rubocop-rspec (~> 3.5)
47
+ rubocop-thread_safety (0.7.3)
48
+ lint_roller (~> 1.1)
49
+ rubocop (~> 1.72, >= 1.72.1)
50
+ rubocop-ast (>= 1.44.0, < 2.0)
51
+ ruby-progressbar (1.13.0)
52
+ standard (1.53.0)
53
+ language_server-protocol (~> 3.17.0.2)
54
+ lint_roller (~> 1.0)
55
+ rubocop (~> 1.82.0)
56
+ standard-custom (~> 1.0.0)
57
+ standard-performance (~> 1.8)
58
+ standard-custom (1.0.2)
59
+ lint_roller (~> 1.0)
60
+ rubocop (~> 1.50)
61
+ standard-performance (1.9.0)
62
+ lint_roller (~> 1.1)
63
+ rubocop-performance (~> 1.26.0)
64
+ standard-rspec (0.3.1)
65
+ lint_roller (>= 1.0)
66
+ rubocop-capybara (~> 2.22)
67
+ rubocop-factory_bot (~> 2.27)
68
+ rubocop-rspec (~> 3.5)
69
+ rubocop-rspec_rails (~> 2.31)
70
+ unicode-display_width (3.2.0)
71
+ unicode-emoji (~> 4.1)
72
+ unicode-emoji (4.2.0)
73
+ yard (0.9.38)
74
+ yard-lint (1.4.0)
75
+ yard (~> 0.9)
76
+ zeitwerk (~> 2.6)
77
+ zeitwerk (2.7.4)
78
+
79
+ PLATFORMS
80
+ ruby
81
+ x86_64-linux
82
+
83
+ DEPENDENCIES
84
+ rubocop-performance
85
+ rubocop-rspec
86
+ rubocop-thread_safety
87
+ standard
88
+ standard-performance
89
+ standard-rspec
90
+ yard-lint
91
+
92
+ CHECKSUMS
93
+ ast (2.4.3) sha256=954615157c1d6a382bc27d690d973195e79db7f55e9765ac7c481c60bdb4d383
94
+ json (2.18.0) sha256=b10506aee4183f5cf49e0efc48073d7b75843ce3782c68dbeb763351c08fd505
95
+ language_server-protocol (3.17.0.5) sha256=fd1e39a51a28bf3eec959379985a72e296e9f9acfce46f6a79d31ca8760803cc
96
+ lint_roller (1.1.0) sha256=2c0c845b632a7d172cb849cc90c1bce937a28c5c8ccccb50dfd46a485003cc87
97
+ parallel (1.27.0) sha256=4ac151e1806b755fb4e2dc2332cbf0e54f2e24ba821ff2d3dcf86bf6dc4ae130
98
+ parser (3.3.10.1) sha256=06f6a725d2cd91e5e7f2b7c32ba143631e1f7c8ae2fb918fc4cebec187e6a688
99
+ prism (1.8.0) sha256=84453a16ef5530ea62c5f03ec16b52a459575ad4e7b9c2b360fd8ce2c39c1254
100
+ racc (1.8.1) sha256=4a7f6929691dbec8b5209a0b373bc2614882b55fc5d2e447a21aaa691303d62f
101
+ rainbow (3.1.1) sha256=039491aa3a89f42efa1d6dec2fc4e62ede96eb6acd95e52f1ad581182b79bc6a
102
+ regexp_parser (2.11.3) sha256=ca13f381a173b7a93450e53459075c9b76a10433caadcb2f1180f2c741fc55a4
103
+ rubocop (1.82.1) sha256=09f1a6a654a960eda767aebea33e47603080f8e9c9a3f019bf9b94c9cab5e273
104
+ rubocop-ast (1.49.0) sha256=49c3676d3123a0923d333e20c6c2dbaaae2d2287b475273fddee0c61da9f71fd
105
+ rubocop-capybara (2.22.1) sha256=ced88caef23efea53f46e098ff352f8fc1068c649606ca75cb74650970f51c0c
106
+ rubocop-factory_bot (2.28.0) sha256=4b17fc02124444173317e131759d195b0d762844a71a29fe8139c1105d92f0cb
107
+ rubocop-performance (1.26.1) sha256=cd19b936ff196df85829d264b522fd4f98b6c89ad271fa52744a8c11b8f71834
108
+ rubocop-rspec (3.9.0) sha256=8fa70a3619408237d789aeecfb9beef40576acc855173e60939d63332fdb55e2
109
+ rubocop-rspec_rails (2.32.0) sha256=4a0d641c72f6ebb957534f539d9d0a62c47abd8ce0d0aeee1ef4701e892a9100
110
+ rubocop-thread_safety (0.7.3) sha256=067cdd52fbf5deffc18995437e45b5194236eaff4f71de3375a1f6052e48f431
111
+ ruby-progressbar (1.13.0) sha256=80fc9c47a9b640d6834e0dc7b3c94c9df37f08cb072b7761e4a71e22cff29b33
112
+ standard (1.53.0) sha256=f3c9493385db7079d0abce6f7582f553122156997b81258cd361d3480eeacf9c
113
+ standard-custom (1.0.2) sha256=424adc84179a074f1a2a309bb9cf7cd6bfdb2b6541f20c6bf9436c0ba22a652b
114
+ standard-performance (1.9.0) sha256=49483d31be448292951d80e5e67cdcb576c2502103c7b40aec6f1b6e9c88e3f2
115
+ standard-rspec (0.3.1) sha256=67bc957281cacf24f0d88235ca1bf28a8995265b1a60eb519cd0451858b56a22
116
+ unicode-display_width (3.2.0) sha256=0cdd96b5681a5949cdbc2c55e7b420facae74c4aaf9a9815eee1087cb1853c42
117
+ unicode-emoji (4.2.0) sha256=519e69150f75652e40bf736106cfbc8f0f73aa3fb6a65afe62fefa7f80b0f80f
118
+ yard (0.9.38) sha256=721fb82afb10532aa49860655f6cc2eaa7130889df291b052e1e6b268283010f
119
+ yard-lint (1.4.0) sha256=7dd88fbb08fd77cb840bea899d58812817b36d92291b5693dd0eeb3af9f91f0f
120
+ zeitwerk (2.7.4) sha256=2bef90f356bdafe9a6c2bd32bcd804f83a4f9b8bc27f3600fff051eb3edcec8b
121
+
122
+ BUNDLED WITH
123
+ 4.0.3
data/README.md CHANGED
@@ -163,7 +163,8 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.24.x (Unreleased) | 2.11.1 (2025-08-08) | yes |
166
+ | 0.25.x (2026-01-21) | 2.12.1 (2025-10-21) | yes |
167
+ | 0.24.x (2025-10-10) | 2.11.1 (2025-08-18) | yes |
167
168
  | 0.23.x (2025-09-04) | 2.11.0 (2025-07-03) | yes |
168
169
  | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
169
170
  | 0.21.x (2025-02-13) | 2.8.0 (2025-01-07) | yes |
data/Rakefile CHANGED
@@ -1,11 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'bundler/gem_tasks'
3
+ require "bundler/gem_tasks"
4
4
  require "./lib/rdkafka"
5
5
 
6
- desc 'Generate some message traffic'
6
+ desc "Generate some message traffic"
7
7
  task :produce_messages do
8
- config = {:"bootstrap.servers" => "localhost:9092"}
8
+ config = { "bootstrap.servers": "localhost:9092" }
9
9
  if ENV["DEBUG"]
10
10
  config[:debug] = "broker,topic,msg"
11
11
  end
@@ -15,24 +15,24 @@ task :produce_messages do
15
15
  100.times do |i|
16
16
  puts "Producing message #{i}"
17
17
  delivery_handles << producer.produce(
18
- topic: "rake_test_topic",
19
- payload: "Payload #{i} from Rake",
20
- key: "Key #{i} from Rake"
18
+ topic: "rake_test_topic",
19
+ payload: "Payload #{i} from Rake",
20
+ key: "Key #{i} from Rake"
21
21
  )
22
22
  end
23
- puts 'Waiting for delivery'
23
+ puts "Waiting for delivery"
24
24
  delivery_handles.each(&:wait)
25
- puts 'Done'
25
+ puts "Done"
26
26
  end
27
27
 
28
- desc 'Consume some messages'
28
+ desc "Consume some messages"
29
29
  task :consume_messages do
30
30
  config = {
31
- :"bootstrap.servers" => "localhost:9092",
32
- :"group.id" => "rake_test",
33
- :"enable.partition.eof" => false,
34
- :"auto.offset.reset" => "earliest",
35
- :"statistics.interval.ms" => 10_000
31
+ "bootstrap.servers": "localhost:9092",
32
+ "group.id": "rake_test",
33
+ "enable.partition.eof": false,
34
+ "auto.offset.reset": "earliest",
35
+ "statistics.interval.ms": 10_000
36
36
  }
37
37
  if ENV["DEBUG"]
38
38
  config[:debug] = "cgrp,topic,fetch"
@@ -40,7 +40,7 @@ task :consume_messages do
40
40
  Rdkafka::Config.statistics_callback = lambda do |stats|
41
41
  puts stats
42
42
  end
43
- consumer = Rdkafka::Config.new(config).consumer
43
+ Rdkafka::Config.new(config).consumer
44
44
  consumer = Rdkafka::Config.new(config).consumer
45
45
  consumer.subscribe("rake_test_topic")
46
46
  consumer.each do |message|
@@ -48,14 +48,14 @@ task :consume_messages do
48
48
  end
49
49
  end
50
50
 
51
- desc 'Hammer down'
51
+ desc "Hammer down"
52
52
  task :load_test do
53
53
  puts "Starting load test"
54
54
 
55
55
  config = Rdkafka::Config.new(
56
- :"bootstrap.servers" => "localhost:9092",
57
- :"group.id" => "load-test",
58
- :"enable.partition.eof" => false
56
+ "bootstrap.servers": "localhost:9092",
57
+ "group.id": "load-test",
58
+ "enable.partition.eof": false
59
59
  )
60
60
 
61
61
  # Create a producer in a thread
@@ -65,9 +65,9 @@ task :load_test do
65
65
  handles = []
66
66
  1000.times do |i|
67
67
  handles.push(producer.produce(
68
- topic: "load_test_topic",
68
+ topic: "load_test_topic",
69
69
  payload: "Payload #{i}",
70
- key: "Key #{i}"
70
+ key: "Key #{i}"
71
71
  ))
72
72
  end
73
73
  handles.each(&:wait)
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.1.0
4
+ image: confluentinc/cp-kafka:8.1.1
5
5
  ports:
6
6
  - 9092:9092 # Support PLAINTEXT so we can run one docker setup for SSL and PLAINTEXT
7
7
  - 9093:9093
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.1.0
4
+ image: confluentinc/cp-kafka:8.1.1
5
5
 
6
6
  ports:
7
7
  - 9092:9092
data/ext/librdkafka.so CHANGED
Binary file
@@ -54,16 +54,34 @@ module Rdkafka
54
54
  # If there is a timeout this does not mean the operation failed, rdkafka might still be working
55
55
  # on the operation. In this case it is possible to call wait again.
56
56
  #
57
- # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
58
- # If this is nil we will wait forever
57
+ # @param max_wait_timeout [Numeric, nil] DEPRECATED: Use max_wait_timeout_ms instead.
58
+ # Amount of time in seconds to wait before timing out. Will be removed in v1.0.0.
59
+ # @param max_wait_timeout_ms [Numeric, nil] Amount of time in milliseconds to wait before
60
+ # timing out. If this is nil we will wait forever. Defaults to 60,000ms (60 seconds).
59
61
  # @param raise_response_error [Boolean] should we raise error when waiting finishes
60
62
  #
61
63
  # @return [Object] Operation-specific result
62
64
  #
63
65
  # @raise [RdkafkaError] When the operation failed
64
66
  # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
65
- def wait(max_wait_timeout: 60, raise_response_error: true)
66
- timeout = max_wait_timeout ? monotonic_now + max_wait_timeout : MAX_WAIT_TIMEOUT_FOREVER
67
+ def wait(max_wait_timeout: :not_provided, max_wait_timeout_ms: :not_provided, raise_response_error: true)
68
+ # Determine which timeout value to use
69
+ if max_wait_timeout != :not_provided && max_wait_timeout_ms != :not_provided
70
+ warn "DEPRECATION WARNING: Both max_wait_timeout and max_wait_timeout_ms were provided. " \
71
+ "Using max_wait_timeout_ms. The max_wait_timeout parameter is deprecated and will be removed in v1.0.0."
72
+ timeout_ms = max_wait_timeout_ms
73
+ elsif max_wait_timeout != :not_provided
74
+ warn "DEPRECATION WARNING: max_wait_timeout (seconds) is deprecated. " \
75
+ "Use max_wait_timeout_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
76
+ timeout_ms = max_wait_timeout ? (max_wait_timeout * 1000).to_i : nil
77
+ elsif max_wait_timeout_ms == :not_provided
78
+ timeout_ms = Defaults::HANDLE_WAIT_TIMEOUT_MS
79
+ else
80
+ timeout_ms = max_wait_timeout_ms
81
+ end
82
+
83
+ timeout_s = timeout_ms ? timeout_ms / 1000.0 : nil
84
+ timeout = timeout_s ? monotonic_now + timeout_s : MAX_WAIT_TIMEOUT_FOREVER
67
85
 
68
86
  @mutex.synchronize do
69
87
  loop do
@@ -74,7 +92,7 @@ module Rdkafka
74
92
  @resource.wait(@mutex, to_wait)
75
93
  else
76
94
  raise WaitTimeoutError.new(
77
- "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
95
+ "Waiting for #{operation_name} timed out after #{timeout_ms} ms"
78
96
  )
79
97
  end
80
98
  elsif self[:response] != 0 && raise_response_error
@@ -3,17 +3,17 @@
3
3
  module Rdkafka
4
4
  class Admin
5
5
  # Extracts attributes of rd_kafka_AclBinding_t
6
- #
7
6
  class AclBindingResult
8
7
  attr_reader :result_error, :error_string, :matching_acl_resource_type,
9
- :matching_acl_resource_name, :matching_acl_resource_pattern_type,
10
- :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
11
- :matching_acl_permission_type
8
+ :matching_acl_resource_name, :matching_acl_resource_pattern_type,
9
+ :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
10
+ :matching_acl_permission_type
12
11
 
13
12
  # This attribute was initially released under the name that is now an alias
14
13
  # We keep it for backwards compatibility but it was changed for the consistency
15
- alias matching_acl_pattern_type matching_acl_resource_pattern_type
14
+ alias_method :matching_acl_pattern_type, :matching_acl_resource_pattern_type
16
15
 
16
+ # @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
17
17
  def initialize(matching_acl)
18
18
  rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
19
19
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
@@ -6,6 +6,7 @@ module Rdkafka
6
6
  class ConfigResourceBindingResult
7
7
  attr_reader :name, :type, :configs, :configs_count
8
8
 
9
+ # @param config_resource_ptr [FFI::Pointer] pointer to the config resource struct
9
10
  def initialize(config_resource_ptr)
10
11
  ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
11
12
 
@@ -2,10 +2,11 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create ACL operation
5
6
  class CreateAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
- :response, :int,
8
- :response_string, :pointer
8
+ :response, :int,
9
+ :response_string, :pointer
9
10
 
10
11
  # @return [String] the name of the operation
11
12
  def operation_name
@@ -17,10 +18,12 @@ module Rdkafka
17
18
  CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
18
19
  end
19
20
 
21
+ # Raises an error if the operation failed
22
+ # @raise [RdkafkaError]
20
23
  def raise_error
21
24
  raise RdkafkaError.new(
22
- self[:response],
23
- broker_message: self[:response_string].read_string
25
+ self[:response],
26
+ broker_message: self[:response_string].read_string
24
27
  )
25
28
  end
26
29
  end
@@ -2,17 +2,18 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create ACL operation result
5
6
  class CreateAclReport
6
-
7
7
  # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
8
8
  # @return [Integer]
9
9
  attr_reader :rdkafka_response
10
10
 
11
-
12
11
  # Upon successful creation of Acl empty string will be returned as rdkafka_response_string
13
12
  # @return [String]
14
13
  attr_reader :rdkafka_response_string
15
14
 
15
+ # @param rdkafka_response [Integer] response code from librdkafka
16
+ # @param rdkafka_response_string [FFI::Pointer] pointer to response string
16
17
  def initialize(rdkafka_response:, rdkafka_response_string:)
17
18
  @rdkafka_response = rdkafka_response
18
19
  if rdkafka_response_string != FFI::Pointer::NULL
@@ -1,10 +1,11 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Handle for create partitions operation
3
4
  class CreatePartitionsHandle < AbstractHandle
4
5
  layout :pending, :bool,
5
- :response, :int,
6
- :error_string, :pointer,
7
- :result_name, :pointer
6
+ :response, :int,
7
+ :error_string, :pointer,
8
+ :result_name, :pointer
8
9
 
9
10
  # @return [String] the name of the operation
10
11
  def operation_name
@@ -16,10 +17,12 @@ module Rdkafka
16
17
  CreatePartitionsReport.new(self[:error_string], self[:result_name])
17
18
  end
18
19
 
20
+ # Raises an error if the operation failed
21
+ # @raise [RdkafkaError]
19
22
  def raise_error
20
23
  raise RdkafkaError.new(
21
- self[:response],
22
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
24
+ self[:response],
25
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
23
26
  )
24
27
  end
25
28
  end
@@ -1,5 +1,6 @@
1
1
  module Rdkafka
2
2
  class Admin
3
+ # Report for create partitions operation result
3
4
  class CreatePartitionsReport < CreateTopicReport
4
5
  end
5
6
  end
@@ -2,11 +2,12 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for create topic operation
5
6
  class CreateTopicHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
- :response, :int,
8
- :error_string, :pointer,
9
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
10
11
 
11
12
  # @return [String] the name of the operation
12
13
  def operation_name
@@ -18,10 +19,12 @@ module Rdkafka
18
19
  CreateTopicReport.new(self[:error_string], self[:result_name])
19
20
  end
20
21
 
22
+ # Raises an error if the operation failed
23
+ # @raise [RdkafkaError]
21
24
  def raise_error
22
25
  raise RdkafkaError.new(
23
- self[:response],
24
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
26
+ self[:response],
27
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
25
28
  )
26
29
  end
27
30
  end
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for create topic operation result
5
6
  class CreateTopicReport
6
7
  # Any error message generated from the CreateTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to topic name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string
@@ -2,12 +2,13 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete ACL operation
5
6
  class DeleteAclHandle < AbstractHandle
6
7
  layout :pending, :bool,
7
- :response, :int,
8
- :response_string, :pointer,
9
- :matching_acls, :pointer,
10
- :matching_acls_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :matching_acls, :pointer,
11
+ :matching_acls_count, :int
11
12
 
12
13
  # @return [String] the name of the operation
13
14
  def operation_name
@@ -19,10 +20,12 @@ module Rdkafka
19
20
  DeleteAclReport.new(matching_acls: self[:matching_acls], matching_acls_count: self[:matching_acls_count])
20
21
  end
21
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
22
25
  def raise_error
23
26
  raise RdkafkaError.new(
24
- self[:response],
25
- broker_message: self[:response_string].read_string
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
26
29
  )
27
30
  end
28
31
  end
@@ -2,16 +2,18 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete ACL operation result
5
6
  class DeleteAclReport
6
-
7
7
  # deleted acls
8
8
  # @return [Rdkafka::Bindings::AclBindingResult]
9
9
  attr_reader :deleted_acls
10
10
 
11
+ # @param matching_acls [FFI::Pointer] pointer to matching ACLs array
12
+ # @param matching_acls_count [Integer] number of matching ACLs
11
13
  def initialize(matching_acls:, matching_acls_count:)
12
- @deleted_acls=[]
14
+ @deleted_acls = []
13
15
  if matching_acls != FFI::Pointer::NULL
14
- acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
16
+ acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
15
17
  (1..matching_acls_count).map do |matching_acl_index|
16
18
  acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[matching_acl_index - 1])
17
19
  @deleted_acls << acl_binding_result
@@ -2,25 +2,30 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Handle for delete groups operation
5
6
  class DeleteGroupsHandle < AbstractHandle
6
7
  layout :pending, :bool, # TODO: ???
7
- :response, :int,
8
- :error_string, :pointer,
9
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
10
11
 
11
12
  # @return [String] the name of the operation
12
13
  def operation_name
13
14
  "delete groups"
14
15
  end
15
16
 
17
+ # Creates the result report
18
+ # @return [DeleteGroupsReport]
16
19
  def create_result
17
20
  DeleteGroupsReport.new(self[:error_string], self[:result_name])
18
21
  end
19
22
 
23
+ # Raises an error if the operation failed
24
+ # @raise [RdkafkaError]
20
25
  def raise_error
21
26
  raise RdkafkaError.new(
22
- self[:response],
23
- broker_message: create_result.error_string
27
+ self[:response],
28
+ broker_message: create_result.error_string
24
29
  )
25
30
  end
26
31
  end
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ # Report for delete groups operation result
5
6
  class DeleteGroupsReport
6
7
  # Any error message generated from the DeleteTopic
7
8
  # @return [String]
@@ -11,6 +12,8 @@ module Rdkafka
11
12
  # @return [String]
12
13
  attr_reader :result_name
13
14
 
15
+ # @param error_string [FFI::Pointer] pointer to error string
16
+ # @param result_name [FFI::Pointer] pointer to group name
14
17
  def initialize(error_string, result_name)
15
18
  if error_string != FFI::Pointer::NULL
16
19
  @error_string = error_string.read_string