rdkafka 0.25.0-aarch64-linux-gnu → 0.25.1-aarch64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/Gemfile +5 -6
  4. data/Gemfile.lint +14 -0
  5. data/Gemfile.lint.lock +123 -0
  6. data/README.md +1 -1
  7. data/Rakefile +21 -21
  8. data/ext/librdkafka.so +0 -0
  9. data/lib/rdkafka/admin/acl_binding_result.rb +4 -4
  10. data/lib/rdkafka/admin/create_acl_handle.rb +4 -4
  11. data/lib/rdkafka/admin/create_acl_report.rb +0 -2
  12. data/lib/rdkafka/admin/create_partitions_handle.rb +5 -5
  13. data/lib/rdkafka/admin/create_topic_handle.rb +5 -5
  14. data/lib/rdkafka/admin/delete_acl_handle.rb +6 -6
  15. data/lib/rdkafka/admin/delete_acl_report.rb +2 -3
  16. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -5
  17. data/lib/rdkafka/admin/delete_topic_handle.rb +5 -5
  18. data/lib/rdkafka/admin/describe_acl_handle.rb +6 -6
  19. data/lib/rdkafka/admin/describe_acl_report.rb +2 -3
  20. data/lib/rdkafka/admin/describe_configs_handle.rb +4 -4
  21. data/lib/rdkafka/admin/describe_configs_report.rb +1 -1
  22. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +4 -4
  23. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +1 -1
  24. data/lib/rdkafka/admin.rb +86 -20
  25. data/lib/rdkafka/bindings.rb +97 -82
  26. data/lib/rdkafka/callbacks.rb +10 -10
  27. data/lib/rdkafka/config.rb +18 -18
  28. data/lib/rdkafka/consumer/message.rb +5 -8
  29. data/lib/rdkafka/consumer/partition.rb +2 -2
  30. data/lib/rdkafka/consumer/topic_partition_list.rb +10 -10
  31. data/lib/rdkafka/consumer.rb +207 -14
  32. data/lib/rdkafka/error.rb +13 -13
  33. data/lib/rdkafka/helpers/oauth.rb +0 -1
  34. data/lib/rdkafka/helpers/time.rb +5 -0
  35. data/lib/rdkafka/metadata.rb +16 -16
  36. data/lib/rdkafka/native_kafka.rb +63 -2
  37. data/lib/rdkafka/producer/delivery_handle.rb +5 -5
  38. data/lib/rdkafka/producer/delivery_report.rb +1 -1
  39. data/lib/rdkafka/producer/partitions_count_cache.rb +6 -6
  40. data/lib/rdkafka/producer.rb +117 -57
  41. data/lib/rdkafka/version.rb +1 -1
  42. data/package-lock.json +331 -0
  43. data/package.json +9 -0
  44. data/rdkafka.gemspec +39 -40
  45. data/renovate.json +21 -0
  46. metadata +5 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ee8385e7bf5c04fd9d341fd737cbbcaf8b7acd6cf479c978c8b7a67b5ca64b2c
4
- data.tar.gz: ce0ccd9f7bc5ade5d0d5cefa77190b45ff6f3341104b901eade78d2ec8ff9b23
3
+ metadata.gz: cb4d5531f159f367ce708bf7f673ac532f3abc4b252cb27cc8c8048ee40f5469
4
+ data.tar.gz: d7470b6f2eb99a44750dcd9703e39d7f978764e4ab281c1ed4090c9d7834a6c0
5
5
  SHA512:
6
- metadata.gz: cd960c1ff7becd5c393382329e80b468103fd8e30f8a95a4ff9d942b451fdb56d6506c3998d7fe452dbae73ebe78c8cc3c4523837b771ae6ab599de8f2374a62
7
- data.tar.gz: 5f678c54591cd9e88fcb58c3550c49dc74024866ecef751ea4aad0355b0e26628082ba91ee7914c1f2df381dd0c0a2401d3168a912cdfb05cf099707df1b2ad6
6
+ metadata.gz: 8c767c43a9bb81d866423289f79695d46f78e4e4afc85f5d5e8ae8444a2835ddb7381455463fccfcb60ec3759b0bdc9c8abcc974bfd26d440ba1ddee35cbec7a
7
+ data.tar.gz: 2721303ffa4c4bdcdd1a9c11ec13153003202e91968fd22fc882c24cd4efbc6c75ba093d1c43ba0047d0d3bb9aaca6f858a45177895516186db4bc35087d4685
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.25.1 (2026-02-26)
4
+ - [Enhancement] Use native ARM64 runners instead of QEMU emulation for Alpine musl aarch64 builds, improving build performance and reliability.
5
+ - [Enhancement] Enable parallel compilation (`make -j$(nproc)`) for ARM64 Alpine musl builds.
6
+ - [Enhancement] Add file descriptor API for fiber scheduler integration. Expose `enable_queue_io_events` and `enable_background_queue_io_events` on `Consumer`, `Producer`, and `Admin` to enable non-blocking monitoring with select/poll/epoll for integration with Ruby fiber schedulers (Falcon, Async) and custom event loops.
7
+ - [Enhancement] Add non-blocking poll methods (`poll_nb`, `events_poll_nb`) on `Consumer` that skip GVL release for efficient fiber scheduler integration when using `poll(0)`.
8
+ - [Enhancement] Add `events_poll_nb_each` method on `Producer`, `Consumer`, and `Admin` for polling events in a single GVL/mutex session. Yields count after each iteration, caller returns `:stop` to break.
9
+ - [Enhancement] Add `poll_nb_each` method on `Consumer` for non-blocking message polling with proper resource cleanup, yielding each message and supporting early termination via `:stop` return value.
10
+
3
11
  ## 0.25.0 (2026-01-20)
4
12
  - **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout:` (seconds) is deprecated in favor of `max_wait_timeout_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
5
13
  - **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
data/Gemfile CHANGED
@@ -5,10 +5,9 @@ source "https://rubygems.org"
5
5
  gemspec
6
6
 
7
7
  group :development do
8
- gem 'ostruct'
9
- gem 'pry'
10
- gem 'rspec'
11
- gem 'simplecov'
12
- gem 'warning'
13
- gem 'yard-lint', '~> 1.3.0'
8
+ gem "ostruct"
9
+ gem "pry"
10
+ gem "rspec"
11
+ gem "simplecov"
12
+ gem "warning"
14
13
  end
data/Gemfile.lint ADDED
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ # Documentation linting
6
+ gem "yard-lint"
7
+
8
+ # Code style (StandardRB via RuboCop)
9
+ gem "standard"
10
+ gem "standard-performance"
11
+ gem "rubocop-performance"
12
+ gem "rubocop-rspec"
13
+ gem "standard-rspec"
14
+ gem "rubocop-thread_safety"
data/Gemfile.lint.lock ADDED
@@ -0,0 +1,123 @@
1
+ GEM
2
+ remote: https://rubygems.org/
3
+ specs:
4
+ ast (2.4.3)
5
+ json (2.18.0)
6
+ language_server-protocol (3.17.0.5)
7
+ lint_roller (1.1.0)
8
+ parallel (1.27.0)
9
+ parser (3.3.10.1)
10
+ ast (~> 2.4.1)
11
+ racc
12
+ prism (1.8.0)
13
+ racc (1.8.1)
14
+ rainbow (3.1.1)
15
+ regexp_parser (2.11.3)
16
+ rubocop (1.82.1)
17
+ json (~> 2.3)
18
+ language_server-protocol (~> 3.17.0.2)
19
+ lint_roller (~> 1.1.0)
20
+ parallel (~> 1.10)
21
+ parser (>= 3.3.0.2)
22
+ rainbow (>= 2.2.2, < 4.0)
23
+ regexp_parser (>= 2.9.3, < 3.0)
24
+ rubocop-ast (>= 1.48.0, < 2.0)
25
+ ruby-progressbar (~> 1.7)
26
+ unicode-display_width (>= 2.4.0, < 4.0)
27
+ rubocop-ast (1.49.0)
28
+ parser (>= 3.3.7.2)
29
+ prism (~> 1.7)
30
+ rubocop-capybara (2.22.1)
31
+ lint_roller (~> 1.1)
32
+ rubocop (~> 1.72, >= 1.72.1)
33
+ rubocop-factory_bot (2.28.0)
34
+ lint_roller (~> 1.1)
35
+ rubocop (~> 1.72, >= 1.72.1)
36
+ rubocop-performance (1.26.1)
37
+ lint_roller (~> 1.1)
38
+ rubocop (>= 1.75.0, < 2.0)
39
+ rubocop-ast (>= 1.47.1, < 2.0)
40
+ rubocop-rspec (3.9.0)
41
+ lint_roller (~> 1.1)
42
+ rubocop (~> 1.81)
43
+ rubocop-rspec_rails (2.32.0)
44
+ lint_roller (~> 1.1)
45
+ rubocop (~> 1.72, >= 1.72.1)
46
+ rubocop-rspec (~> 3.5)
47
+ rubocop-thread_safety (0.7.3)
48
+ lint_roller (~> 1.1)
49
+ rubocop (~> 1.72, >= 1.72.1)
50
+ rubocop-ast (>= 1.44.0, < 2.0)
51
+ ruby-progressbar (1.13.0)
52
+ standard (1.53.0)
53
+ language_server-protocol (~> 3.17.0.2)
54
+ lint_roller (~> 1.0)
55
+ rubocop (~> 1.82.0)
56
+ standard-custom (~> 1.0.0)
57
+ standard-performance (~> 1.8)
58
+ standard-custom (1.0.2)
59
+ lint_roller (~> 1.0)
60
+ rubocop (~> 1.50)
61
+ standard-performance (1.9.0)
62
+ lint_roller (~> 1.1)
63
+ rubocop-performance (~> 1.26.0)
64
+ standard-rspec (0.3.1)
65
+ lint_roller (>= 1.0)
66
+ rubocop-capybara (~> 2.22)
67
+ rubocop-factory_bot (~> 2.27)
68
+ rubocop-rspec (~> 3.5)
69
+ rubocop-rspec_rails (~> 2.31)
70
+ unicode-display_width (3.2.0)
71
+ unicode-emoji (~> 4.1)
72
+ unicode-emoji (4.2.0)
73
+ yard (0.9.38)
74
+ yard-lint (1.4.0)
75
+ yard (~> 0.9)
76
+ zeitwerk (~> 2.6)
77
+ zeitwerk (2.7.4)
78
+
79
+ PLATFORMS
80
+ ruby
81
+ x86_64-linux
82
+
83
+ DEPENDENCIES
84
+ rubocop-performance
85
+ rubocop-rspec
86
+ rubocop-thread_safety
87
+ standard
88
+ standard-performance
89
+ standard-rspec
90
+ yard-lint
91
+
92
+ CHECKSUMS
93
+ ast (2.4.3) sha256=954615157c1d6a382bc27d690d973195e79db7f55e9765ac7c481c60bdb4d383
94
+ json (2.18.0) sha256=b10506aee4183f5cf49e0efc48073d7b75843ce3782c68dbeb763351c08fd505
95
+ language_server-protocol (3.17.0.5) sha256=fd1e39a51a28bf3eec959379985a72e296e9f9acfce46f6a79d31ca8760803cc
96
+ lint_roller (1.1.0) sha256=2c0c845b632a7d172cb849cc90c1bce937a28c5c8ccccb50dfd46a485003cc87
97
+ parallel (1.27.0) sha256=4ac151e1806b755fb4e2dc2332cbf0e54f2e24ba821ff2d3dcf86bf6dc4ae130
98
+ parser (3.3.10.1) sha256=06f6a725d2cd91e5e7f2b7c32ba143631e1f7c8ae2fb918fc4cebec187e6a688
99
+ prism (1.8.0) sha256=84453a16ef5530ea62c5f03ec16b52a459575ad4e7b9c2b360fd8ce2c39c1254
100
+ racc (1.8.1) sha256=4a7f6929691dbec8b5209a0b373bc2614882b55fc5d2e447a21aaa691303d62f
101
+ rainbow (3.1.1) sha256=039491aa3a89f42efa1d6dec2fc4e62ede96eb6acd95e52f1ad581182b79bc6a
102
+ regexp_parser (2.11.3) sha256=ca13f381a173b7a93450e53459075c9b76a10433caadcb2f1180f2c741fc55a4
103
+ rubocop (1.82.1) sha256=09f1a6a654a960eda767aebea33e47603080f8e9c9a3f019bf9b94c9cab5e273
104
+ rubocop-ast (1.49.0) sha256=49c3676d3123a0923d333e20c6c2dbaaae2d2287b475273fddee0c61da9f71fd
105
+ rubocop-capybara (2.22.1) sha256=ced88caef23efea53f46e098ff352f8fc1068c649606ca75cb74650970f51c0c
106
+ rubocop-factory_bot (2.28.0) sha256=4b17fc02124444173317e131759d195b0d762844a71a29fe8139c1105d92f0cb
107
+ rubocop-performance (1.26.1) sha256=cd19b936ff196df85829d264b522fd4f98b6c89ad271fa52744a8c11b8f71834
108
+ rubocop-rspec (3.9.0) sha256=8fa70a3619408237d789aeecfb9beef40576acc855173e60939d63332fdb55e2
109
+ rubocop-rspec_rails (2.32.0) sha256=4a0d641c72f6ebb957534f539d9d0a62c47abd8ce0d0aeee1ef4701e892a9100
110
+ rubocop-thread_safety (0.7.3) sha256=067cdd52fbf5deffc18995437e45b5194236eaff4f71de3375a1f6052e48f431
111
+ ruby-progressbar (1.13.0) sha256=80fc9c47a9b640d6834e0dc7b3c94c9df37f08cb072b7761e4a71e22cff29b33
112
+ standard (1.53.0) sha256=f3c9493385db7079d0abce6f7582f553122156997b81258cd361d3480eeacf9c
113
+ standard-custom (1.0.2) sha256=424adc84179a074f1a2a309bb9cf7cd6bfdb2b6541f20c6bf9436c0ba22a652b
114
+ standard-performance (1.9.0) sha256=49483d31be448292951d80e5e67cdcb576c2502103c7b40aec6f1b6e9c88e3f2
115
+ standard-rspec (0.3.1) sha256=67bc957281cacf24f0d88235ca1bf28a8995265b1a60eb519cd0451858b56a22
116
+ unicode-display_width (3.2.0) sha256=0cdd96b5681a5949cdbc2c55e7b420facae74c4aaf9a9815eee1087cb1853c42
117
+ unicode-emoji (4.2.0) sha256=519e69150f75652e40bf736106cfbc8f0f73aa3fb6a65afe62fefa7f80b0f80f
118
+ yard (0.9.38) sha256=721fb82afb10532aa49860655f6cc2eaa7130889df291b052e1e6b268283010f
119
+ yard-lint (1.4.0) sha256=7dd88fbb08fd77cb840bea899d58812817b36d92291b5693dd0eeb3af9f91f0f
120
+ zeitwerk (2.7.4) sha256=2bef90f356bdafe9a6c2bd32bcd804f83a4f9b8bc27f3600fff051eb3edcec8b
121
+
122
+ BUNDLED WITH
123
+ 4.0.3
data/README.md CHANGED
@@ -163,7 +163,7 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.25.x (Unreleased) | 2.12.1 (2025-10-21) | yes |
166
+ | 0.25.x (2026-01-21) | 2.12.1 (2025-10-21) | yes |
167
167
  | 0.24.x (2025-10-10) | 2.11.1 (2025-08-18) | yes |
168
168
  | 0.23.x (2025-09-04) | 2.11.0 (2025-07-03) | yes |
169
169
  | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
data/Rakefile CHANGED
@@ -1,11 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'bundler/gem_tasks'
3
+ require "bundler/gem_tasks"
4
4
  require "./lib/rdkafka"
5
5
 
6
- desc 'Generate some message traffic'
6
+ desc "Generate some message traffic"
7
7
  task :produce_messages do
8
- config = {:"bootstrap.servers" => "localhost:9092"}
8
+ config = { "bootstrap.servers": "localhost:9092" }
9
9
  if ENV["DEBUG"]
10
10
  config[:debug] = "broker,topic,msg"
11
11
  end
@@ -15,24 +15,24 @@ task :produce_messages do
15
15
  100.times do |i|
16
16
  puts "Producing message #{i}"
17
17
  delivery_handles << producer.produce(
18
- topic: "rake_test_topic",
19
- payload: "Payload #{i} from Rake",
20
- key: "Key #{i} from Rake"
18
+ topic: "rake_test_topic",
19
+ payload: "Payload #{i} from Rake",
20
+ key: "Key #{i} from Rake"
21
21
  )
22
22
  end
23
- puts 'Waiting for delivery'
23
+ puts "Waiting for delivery"
24
24
  delivery_handles.each(&:wait)
25
- puts 'Done'
25
+ puts "Done"
26
26
  end
27
27
 
28
- desc 'Consume some messages'
28
+ desc "Consume some messages"
29
29
  task :consume_messages do
30
30
  config = {
31
- :"bootstrap.servers" => "localhost:9092",
32
- :"group.id" => "rake_test",
33
- :"enable.partition.eof" => false,
34
- :"auto.offset.reset" => "earliest",
35
- :"statistics.interval.ms" => 10_000
31
+ "bootstrap.servers": "localhost:9092",
32
+ "group.id": "rake_test",
33
+ "enable.partition.eof": false,
34
+ "auto.offset.reset": "earliest",
35
+ "statistics.interval.ms": 10_000
36
36
  }
37
37
  if ENV["DEBUG"]
38
38
  config[:debug] = "cgrp,topic,fetch"
@@ -40,7 +40,7 @@ task :consume_messages do
40
40
  Rdkafka::Config.statistics_callback = lambda do |stats|
41
41
  puts stats
42
42
  end
43
- consumer = Rdkafka::Config.new(config).consumer
43
+ Rdkafka::Config.new(config).consumer
44
44
  consumer = Rdkafka::Config.new(config).consumer
45
45
  consumer.subscribe("rake_test_topic")
46
46
  consumer.each do |message|
@@ -48,14 +48,14 @@ task :consume_messages do
48
48
  end
49
49
  end
50
50
 
51
- desc 'Hammer down'
51
+ desc "Hammer down"
52
52
  task :load_test do
53
53
  puts "Starting load test"
54
54
 
55
55
  config = Rdkafka::Config.new(
56
- :"bootstrap.servers" => "localhost:9092",
57
- :"group.id" => "load-test",
58
- :"enable.partition.eof" => false
56
+ "bootstrap.servers": "localhost:9092",
57
+ "group.id": "load-test",
58
+ "enable.partition.eof": false
59
59
  )
60
60
 
61
61
  # Create a producer in a thread
@@ -65,9 +65,9 @@ task :load_test do
65
65
  handles = []
66
66
  1000.times do |i|
67
67
  handles.push(producer.produce(
68
- topic: "load_test_topic",
68
+ topic: "load_test_topic",
69
69
  payload: "Payload #{i}",
70
- key: "Key #{i}"
70
+ key: "Key #{i}"
71
71
  ))
72
72
  end
73
73
  handles.each(&:wait)
data/ext/librdkafka.so CHANGED
Binary file
@@ -5,13 +5,13 @@ module Rdkafka
5
5
  # Extracts attributes of rd_kafka_AclBinding_t
6
6
  class AclBindingResult
7
7
  attr_reader :result_error, :error_string, :matching_acl_resource_type,
8
- :matching_acl_resource_name, :matching_acl_resource_pattern_type,
9
- :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
10
- :matching_acl_permission_type
8
+ :matching_acl_resource_name, :matching_acl_resource_pattern_type,
9
+ :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
10
+ :matching_acl_permission_type
11
11
 
12
12
  # This attribute was initially released under the name that is now an alias
13
13
  # We keep it for backwards compatibility but it was changed for the consistency
14
- alias matching_acl_pattern_type matching_acl_resource_pattern_type
14
+ alias_method :matching_acl_pattern_type, :matching_acl_resource_pattern_type
15
15
 
16
16
  # @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
17
17
  def initialize(matching_acl)
@@ -5,8 +5,8 @@ module Rdkafka
5
5
  # Handle for create ACL operation
6
6
  class CreateAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer
8
+ :response, :int,
9
+ :response_string, :pointer
10
10
 
11
11
  # @return [String] the name of the operation
12
12
  def operation_name
@@ -22,8 +22,8 @@ module Rdkafka
22
22
  # @raise [RdkafkaError]
23
23
  def raise_error
24
24
  raise RdkafkaError.new(
25
- self[:response],
26
- broker_message: self[:response_string].read_string
25
+ self[:response],
26
+ broker_message: self[:response_string].read_string
27
27
  )
28
28
  end
29
29
  end
@@ -4,12 +4,10 @@ module Rdkafka
4
4
  class Admin
5
5
  # Report for create ACL operation result
6
6
  class CreateAclReport
7
-
8
7
  # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
9
8
  # @return [Integer]
10
9
  attr_reader :rdkafka_response
11
10
 
12
-
13
11
  # Upon successful creation of Acl empty string will be returned as rdkafka_response_string
14
12
  # @return [String]
15
13
  attr_reader :rdkafka_response_string
@@ -3,9 +3,9 @@ module Rdkafka
3
3
  # Handle for create partitions operation
4
4
  class CreatePartitionsHandle < AbstractHandle
5
5
  layout :pending, :bool,
6
- :response, :int,
7
- :error_string, :pointer,
8
- :result_name, :pointer
6
+ :response, :int,
7
+ :error_string, :pointer,
8
+ :result_name, :pointer
9
9
 
10
10
  # @return [String] the name of the operation
11
11
  def operation_name
@@ -21,8 +21,8 @@ module Rdkafka
21
21
  # @raise [RdkafkaError]
22
22
  def raise_error
23
23
  raise RdkafkaError.new(
24
- self[:response],
25
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
24
+ self[:response],
25
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
26
26
  )
27
27
  end
28
28
  end
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for create topic operation
6
6
  class CreateTopicHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -23,8 +23,8 @@ module Rdkafka
23
23
  # @raise [RdkafkaError]
24
24
  def raise_error
25
25
  raise RdkafkaError.new(
26
- self[:response],
27
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
26
+ self[:response],
27
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
28
28
  )
29
29
  end
30
30
  end
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for delete ACL operation
6
6
  class DeleteAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :matching_acls, :pointer,
11
- :matching_acls_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :matching_acls, :pointer,
11
+ :matching_acls_count, :int
12
12
 
13
13
  # @return [String] the name of the operation
14
14
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: self[:response_string].read_string
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
29
  )
30
30
  end
31
31
  end
@@ -4,7 +4,6 @@ module Rdkafka
4
4
  class Admin
5
5
  # Report for delete ACL operation result
6
6
  class DeleteAclReport
7
-
8
7
  # deleted acls
9
8
  # @return [Rdkafka::Bindings::AclBindingResult]
10
9
  attr_reader :deleted_acls
@@ -12,9 +11,9 @@ module Rdkafka
12
11
  # @param matching_acls [FFI::Pointer] pointer to matching ACLs array
13
12
  # @param matching_acls_count [Integer] number of matching ACLs
14
13
  def initialize(matching_acls:, matching_acls_count:)
15
- @deleted_acls=[]
14
+ @deleted_acls = []
16
15
  if matching_acls != FFI::Pointer::NULL
17
- acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
16
+ acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
18
17
  (1..matching_acls_count).map do |matching_acl_index|
19
18
  acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[matching_acl_index - 1])
20
19
  @deleted_acls << acl_binding_result
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for delete groups operation
6
6
  class DeleteGroupsHandle < AbstractHandle
7
7
  layout :pending, :bool, # TODO: ???
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: create_result.error_string
27
+ self[:response],
28
+ broker_message: create_result.error_string
29
29
  )
30
30
  end
31
31
  end
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for delete topic operation
6
6
  class DeleteTopicHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -23,8 +23,8 @@ module Rdkafka
23
23
  # @raise [RdkafkaError]
24
24
  def raise_error
25
25
  raise RdkafkaError.new(
26
- self[:response],
27
- broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
26
+ self[:response],
27
+ broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
28
28
  )
29
29
  end
30
30
  end
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for describe ACL operation
6
6
  class DescribeAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :acls, :pointer,
11
- :acls_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :acls, :pointer,
11
+ :acls_count, :int
12
12
 
13
13
  # @return [String] the name of the operation.
14
14
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: self[:response_string].read_string
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
29
  )
30
30
  end
31
31
  end
@@ -4,7 +4,6 @@ module Rdkafka
4
4
  class Admin
5
5
  # Report for describe ACL operation result
6
6
  class DescribeAclReport
7
-
8
7
  # acls that exists in the cluster for the resource_type, resource_name and pattern_type filters provided in the request.
9
8
  # @return [Rdkafka::Bindings::AclBindingResult] array of matching acls.
10
9
  attr_reader :acls
@@ -12,10 +11,10 @@ module Rdkafka
12
11
  # @param acls [FFI::Pointer] pointer to ACLs array
13
12
  # @param acls_count [Integer] number of ACLs
14
13
  def initialize(acls:, acls_count:)
15
- @acls=[]
14
+ @acls = []
16
15
 
17
16
  if acls != FFI::Pointer::NULL
18
- acl_binding_result_pointers = acls.read_array_of_pointer(acls_count)
17
+ acl_binding_result_pointers = acls.read_array_of_pointer(acls_count)
19
18
  (1..acls_count).map do |acl_index|
20
19
  acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[acl_index - 1])
21
20
  @acls << acl_binding_result
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for describe configs operation
6
6
  class DescribeConfigsHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :config_entries, :pointer,
11
- :entry_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :config_entries, :pointer,
11
+ :entry_count, :int
12
12
 
13
13
  # @return [String] the name of the operation.
14
14
  def operation_name
@@ -9,7 +9,7 @@ module Rdkafka
9
9
  # @param config_entries [FFI::Pointer] pointer to config entries array
10
10
  # @param entry_count [Integer] number of config entries
11
11
  def initialize(config_entries:, entry_count:)
12
- @resources=[]
12
+ @resources = []
13
13
 
14
14
  return if config_entries == FFI::Pointer::NULL
15
15
 
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for incremental alter configs operation
6
6
  class IncrementalAlterConfigsHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :config_entries, :pointer,
11
- :entry_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :config_entries, :pointer,
11
+ :entry_count, :int
12
12
 
13
13
  # @return [String] the name of the operation.
14
14
  def operation_name
@@ -9,7 +9,7 @@ module Rdkafka
9
9
  # @param config_entries [FFI::Pointer] pointer to config entries array
10
10
  # @param entry_count [Integer] number of config entries
11
11
  def initialize(config_entries:, entry_count:)
12
- @resources=[]
12
+ @resources = []
13
13
 
14
14
  return if config_entries == FFI::Pointer::NULL
15
15