rdkafka 0.25.0 → 0.25.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/Gemfile +5 -6
  4. data/Gemfile.lint +14 -0
  5. data/Gemfile.lint.lock +123 -0
  6. data/README.md +1 -1
  7. data/Rakefile +21 -21
  8. data/ext/Rakefile +27 -27
  9. data/lib/rdkafka/admin/acl_binding_result.rb +4 -4
  10. data/lib/rdkafka/admin/create_acl_handle.rb +4 -4
  11. data/lib/rdkafka/admin/create_acl_report.rb +0 -2
  12. data/lib/rdkafka/admin/create_partitions_handle.rb +5 -5
  13. data/lib/rdkafka/admin/create_topic_handle.rb +5 -5
  14. data/lib/rdkafka/admin/delete_acl_handle.rb +6 -6
  15. data/lib/rdkafka/admin/delete_acl_report.rb +2 -3
  16. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -5
  17. data/lib/rdkafka/admin/delete_topic_handle.rb +5 -5
  18. data/lib/rdkafka/admin/describe_acl_handle.rb +6 -6
  19. data/lib/rdkafka/admin/describe_acl_report.rb +2 -3
  20. data/lib/rdkafka/admin/describe_configs_handle.rb +4 -4
  21. data/lib/rdkafka/admin/describe_configs_report.rb +1 -1
  22. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +4 -4
  23. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +1 -1
  24. data/lib/rdkafka/admin.rb +86 -20
  25. data/lib/rdkafka/bindings.rb +97 -82
  26. data/lib/rdkafka/callbacks.rb +10 -10
  27. data/lib/rdkafka/config.rb +18 -18
  28. data/lib/rdkafka/consumer/message.rb +5 -8
  29. data/lib/rdkafka/consumer/partition.rb +2 -2
  30. data/lib/rdkafka/consumer/topic_partition_list.rb +10 -10
  31. data/lib/rdkafka/consumer.rb +207 -14
  32. data/lib/rdkafka/error.rb +13 -13
  33. data/lib/rdkafka/helpers/oauth.rb +0 -1
  34. data/lib/rdkafka/helpers/time.rb +5 -0
  35. data/lib/rdkafka/metadata.rb +16 -16
  36. data/lib/rdkafka/native_kafka.rb +63 -2
  37. data/lib/rdkafka/producer/delivery_handle.rb +5 -5
  38. data/lib/rdkafka/producer/delivery_report.rb +1 -1
  39. data/lib/rdkafka/producer/partitions_count_cache.rb +6 -6
  40. data/lib/rdkafka/producer.rb +117 -57
  41. data/lib/rdkafka/version.rb +1 -1
  42. data/package-lock.json +331 -0
  43. data/package.json +9 -0
  44. data/rdkafka.gemspec +39 -40
  45. data/renovate.json +21 -0
  46. metadata +5 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6e080c8b1e6e04090b729bcea44d0d35ab53083c7878c952e0e09146318ccf66
4
- data.tar.gz: aee7d2f5e8d71de67e25ff89192f8f02f065c3dea3978e1850dac7c823e4fb17
3
+ metadata.gz: 36d2a4ebad99bd98289d2af65b9dea7f244a70f5025d3d8dc96cff7020f762f3
4
+ data.tar.gz: fa243ec34891717494123b0b1ed012d3ac37d8c6541f1d9ad8c38982d57c90fe
5
5
  SHA512:
6
- metadata.gz: fdcc94f6e3b2f9ac318f2e3146987c12b823f016add46e881ab772838df2238a627f84fbb6741fcf3c30ed34b1a00c8197e71faa543db2a9c6f47439dae7a083
7
- data.tar.gz: 0e5f9095c34b21c2307d7736b786c1376a1ab976fc0f9cf0a6fb27c6c5e0382ce3ab005701333b50066f23c64cb5fea352f5443ae31c85abc59e1c0cdabf1660
6
+ metadata.gz: d15396b3341b8685d50167d9531e52be6c3fd96d3e9d4a9d34453fd982472c19bfb84c5754f71e91aa5bd1bfd6c9fc2c3bf6941ae84431c85165845f8ba86e57
7
+ data.tar.gz: a6be61394c84fd7c939b72c2d1283f415c66692890c5f55888573b56a2cee5e3038ba8de39087f1fb70e9480781b2b3052d980597d1e2d1350c070740bfd8f6c
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.25.1 (2026-02-26)
4
+ - [Enhancement] Use native ARM64 runners instead of QEMU emulation for Alpine musl aarch64 builds, improving build performance and reliability.
5
+ - [Enhancement] Enable parallel compilation (`make -j$(nproc)`) for ARM64 Alpine musl builds.
6
+ - [Enhancement] Add file descriptor API for fiber scheduler integration. Expose `enable_queue_io_events` and `enable_background_queue_io_events` on `Consumer`, `Producer`, and `Admin` to enable non-blocking monitoring with select/poll/epoll for integration with Ruby fiber schedulers (Falcon, Async) and custom event loops.
7
+ - [Enhancement] Add non-blocking poll methods (`poll_nb`, `events_poll_nb`) on `Consumer` that skip GVL release for efficient fiber scheduler integration when using `poll(0)`.
8
+ - [Enhancement] Add `events_poll_nb_each` method on `Producer`, `Consumer`, and `Admin` for polling events in a single GVL/mutex session. Yields count after each iteration, caller returns `:stop` to break.
9
+ - [Enhancement] Add `poll_nb_each` method on `Consumer` for non-blocking message polling with proper resource cleanup, yielding each message and supporting early termination via `:stop` return value.
10
+
3
11
  ## 0.25.0 (2026-01-20)
4
12
  - **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout:` (seconds) is deprecated in favor of `max_wait_timeout_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
5
13
  - **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms:` (milliseconds). The old parameter still works but will be removed in v1.0.0.
data/Gemfile CHANGED
@@ -5,10 +5,9 @@ source "https://rubygems.org"
5
5
  gemspec
6
6
 
7
7
  group :development do
8
- gem 'ostruct'
9
- gem 'pry'
10
- gem 'rspec'
11
- gem 'simplecov'
12
- gem 'warning'
13
- gem 'yard-lint', '~> 1.3.0'
8
+ gem "ostruct"
9
+ gem "pry"
10
+ gem "rspec"
11
+ gem "simplecov"
12
+ gem "warning"
14
13
  end
data/Gemfile.lint ADDED
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ # Documentation linting
6
+ gem "yard-lint"
7
+
8
+ # Code style (StandardRB via RuboCop)
9
+ gem "standard"
10
+ gem "standard-performance"
11
+ gem "rubocop-performance"
12
+ gem "rubocop-rspec"
13
+ gem "standard-rspec"
14
+ gem "rubocop-thread_safety"
data/Gemfile.lint.lock ADDED
@@ -0,0 +1,123 @@
1
+ GEM
2
+ remote: https://rubygems.org/
3
+ specs:
4
+ ast (2.4.3)
5
+ json (2.18.0)
6
+ language_server-protocol (3.17.0.5)
7
+ lint_roller (1.1.0)
8
+ parallel (1.27.0)
9
+ parser (3.3.10.1)
10
+ ast (~> 2.4.1)
11
+ racc
12
+ prism (1.8.0)
13
+ racc (1.8.1)
14
+ rainbow (3.1.1)
15
+ regexp_parser (2.11.3)
16
+ rubocop (1.82.1)
17
+ json (~> 2.3)
18
+ language_server-protocol (~> 3.17.0.2)
19
+ lint_roller (~> 1.1.0)
20
+ parallel (~> 1.10)
21
+ parser (>= 3.3.0.2)
22
+ rainbow (>= 2.2.2, < 4.0)
23
+ regexp_parser (>= 2.9.3, < 3.0)
24
+ rubocop-ast (>= 1.48.0, < 2.0)
25
+ ruby-progressbar (~> 1.7)
26
+ unicode-display_width (>= 2.4.0, < 4.0)
27
+ rubocop-ast (1.49.0)
28
+ parser (>= 3.3.7.2)
29
+ prism (~> 1.7)
30
+ rubocop-capybara (2.22.1)
31
+ lint_roller (~> 1.1)
32
+ rubocop (~> 1.72, >= 1.72.1)
33
+ rubocop-factory_bot (2.28.0)
34
+ lint_roller (~> 1.1)
35
+ rubocop (~> 1.72, >= 1.72.1)
36
+ rubocop-performance (1.26.1)
37
+ lint_roller (~> 1.1)
38
+ rubocop (>= 1.75.0, < 2.0)
39
+ rubocop-ast (>= 1.47.1, < 2.0)
40
+ rubocop-rspec (3.9.0)
41
+ lint_roller (~> 1.1)
42
+ rubocop (~> 1.81)
43
+ rubocop-rspec_rails (2.32.0)
44
+ lint_roller (~> 1.1)
45
+ rubocop (~> 1.72, >= 1.72.1)
46
+ rubocop-rspec (~> 3.5)
47
+ rubocop-thread_safety (0.7.3)
48
+ lint_roller (~> 1.1)
49
+ rubocop (~> 1.72, >= 1.72.1)
50
+ rubocop-ast (>= 1.44.0, < 2.0)
51
+ ruby-progressbar (1.13.0)
52
+ standard (1.53.0)
53
+ language_server-protocol (~> 3.17.0.2)
54
+ lint_roller (~> 1.0)
55
+ rubocop (~> 1.82.0)
56
+ standard-custom (~> 1.0.0)
57
+ standard-performance (~> 1.8)
58
+ standard-custom (1.0.2)
59
+ lint_roller (~> 1.0)
60
+ rubocop (~> 1.50)
61
+ standard-performance (1.9.0)
62
+ lint_roller (~> 1.1)
63
+ rubocop-performance (~> 1.26.0)
64
+ standard-rspec (0.3.1)
65
+ lint_roller (>= 1.0)
66
+ rubocop-capybara (~> 2.22)
67
+ rubocop-factory_bot (~> 2.27)
68
+ rubocop-rspec (~> 3.5)
69
+ rubocop-rspec_rails (~> 2.31)
70
+ unicode-display_width (3.2.0)
71
+ unicode-emoji (~> 4.1)
72
+ unicode-emoji (4.2.0)
73
+ yard (0.9.38)
74
+ yard-lint (1.4.0)
75
+ yard (~> 0.9)
76
+ zeitwerk (~> 2.6)
77
+ zeitwerk (2.7.4)
78
+
79
+ PLATFORMS
80
+ ruby
81
+ x86_64-linux
82
+
83
+ DEPENDENCIES
84
+ rubocop-performance
85
+ rubocop-rspec
86
+ rubocop-thread_safety
87
+ standard
88
+ standard-performance
89
+ standard-rspec
90
+ yard-lint
91
+
92
+ CHECKSUMS
93
+ ast (2.4.3) sha256=954615157c1d6a382bc27d690d973195e79db7f55e9765ac7c481c60bdb4d383
94
+ json (2.18.0) sha256=b10506aee4183f5cf49e0efc48073d7b75843ce3782c68dbeb763351c08fd505
95
+ language_server-protocol (3.17.0.5) sha256=fd1e39a51a28bf3eec959379985a72e296e9f9acfce46f6a79d31ca8760803cc
96
+ lint_roller (1.1.0) sha256=2c0c845b632a7d172cb849cc90c1bce937a28c5c8ccccb50dfd46a485003cc87
97
+ parallel (1.27.0) sha256=4ac151e1806b755fb4e2dc2332cbf0e54f2e24ba821ff2d3dcf86bf6dc4ae130
98
+ parser (3.3.10.1) sha256=06f6a725d2cd91e5e7f2b7c32ba143631e1f7c8ae2fb918fc4cebec187e6a688
99
+ prism (1.8.0) sha256=84453a16ef5530ea62c5f03ec16b52a459575ad4e7b9c2b360fd8ce2c39c1254
100
+ racc (1.8.1) sha256=4a7f6929691dbec8b5209a0b373bc2614882b55fc5d2e447a21aaa691303d62f
101
+ rainbow (3.1.1) sha256=039491aa3a89f42efa1d6dec2fc4e62ede96eb6acd95e52f1ad581182b79bc6a
102
+ regexp_parser (2.11.3) sha256=ca13f381a173b7a93450e53459075c9b76a10433caadcb2f1180f2c741fc55a4
103
+ rubocop (1.82.1) sha256=09f1a6a654a960eda767aebea33e47603080f8e9c9a3f019bf9b94c9cab5e273
104
+ rubocop-ast (1.49.0) sha256=49c3676d3123a0923d333e20c6c2dbaaae2d2287b475273fddee0c61da9f71fd
105
+ rubocop-capybara (2.22.1) sha256=ced88caef23efea53f46e098ff352f8fc1068c649606ca75cb74650970f51c0c
106
+ rubocop-factory_bot (2.28.0) sha256=4b17fc02124444173317e131759d195b0d762844a71a29fe8139c1105d92f0cb
107
+ rubocop-performance (1.26.1) sha256=cd19b936ff196df85829d264b522fd4f98b6c89ad271fa52744a8c11b8f71834
108
+ rubocop-rspec (3.9.0) sha256=8fa70a3619408237d789aeecfb9beef40576acc855173e60939d63332fdb55e2
109
+ rubocop-rspec_rails (2.32.0) sha256=4a0d641c72f6ebb957534f539d9d0a62c47abd8ce0d0aeee1ef4701e892a9100
110
+ rubocop-thread_safety (0.7.3) sha256=067cdd52fbf5deffc18995437e45b5194236eaff4f71de3375a1f6052e48f431
111
+ ruby-progressbar (1.13.0) sha256=80fc9c47a9b640d6834e0dc7b3c94c9df37f08cb072b7761e4a71e22cff29b33
112
+ standard (1.53.0) sha256=f3c9493385db7079d0abce6f7582f553122156997b81258cd361d3480eeacf9c
113
+ standard-custom (1.0.2) sha256=424adc84179a074f1a2a309bb9cf7cd6bfdb2b6541f20c6bf9436c0ba22a652b
114
+ standard-performance (1.9.0) sha256=49483d31be448292951d80e5e67cdcb576c2502103c7b40aec6f1b6e9c88e3f2
115
+ standard-rspec (0.3.1) sha256=67bc957281cacf24f0d88235ca1bf28a8995265b1a60eb519cd0451858b56a22
116
+ unicode-display_width (3.2.0) sha256=0cdd96b5681a5949cdbc2c55e7b420facae74c4aaf9a9815eee1087cb1853c42
117
+ unicode-emoji (4.2.0) sha256=519e69150f75652e40bf736106cfbc8f0f73aa3fb6a65afe62fefa7f80b0f80f
118
+ yard (0.9.38) sha256=721fb82afb10532aa49860655f6cc2eaa7130889df291b052e1e6b268283010f
119
+ yard-lint (1.4.0) sha256=7dd88fbb08fd77cb840bea899d58812817b36d92291b5693dd0eeb3af9f91f0f
120
+ zeitwerk (2.7.4) sha256=2bef90f356bdafe9a6c2bd32bcd804f83a4f9b8bc27f3600fff051eb3edcec8b
121
+
122
+ BUNDLED WITH
123
+ 4.0.3
data/README.md CHANGED
@@ -163,7 +163,7 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.25.x (Unreleased) | 2.12.1 (2025-10-21) | yes |
166
+ | 0.25.x (2026-01-21) | 2.12.1 (2025-10-21) | yes |
167
167
  | 0.24.x (2025-10-10) | 2.11.1 (2025-08-18) | yes |
168
168
  | 0.23.x (2025-09-04) | 2.11.0 (2025-07-03) | yes |
169
169
  | 0.22.x (2025-07-17) | 2.8.0 (2025-01-07) | yes |
data/Rakefile CHANGED
@@ -1,11 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'bundler/gem_tasks'
3
+ require "bundler/gem_tasks"
4
4
  require "./lib/rdkafka"
5
5
 
6
- desc 'Generate some message traffic'
6
+ desc "Generate some message traffic"
7
7
  task :produce_messages do
8
- config = {:"bootstrap.servers" => "localhost:9092"}
8
+ config = { "bootstrap.servers": "localhost:9092" }
9
9
  if ENV["DEBUG"]
10
10
  config[:debug] = "broker,topic,msg"
11
11
  end
@@ -15,24 +15,24 @@ task :produce_messages do
15
15
  100.times do |i|
16
16
  puts "Producing message #{i}"
17
17
  delivery_handles << producer.produce(
18
- topic: "rake_test_topic",
19
- payload: "Payload #{i} from Rake",
20
- key: "Key #{i} from Rake"
18
+ topic: "rake_test_topic",
19
+ payload: "Payload #{i} from Rake",
20
+ key: "Key #{i} from Rake"
21
21
  )
22
22
  end
23
- puts 'Waiting for delivery'
23
+ puts "Waiting for delivery"
24
24
  delivery_handles.each(&:wait)
25
- puts 'Done'
25
+ puts "Done"
26
26
  end
27
27
 
28
- desc 'Consume some messages'
28
+ desc "Consume some messages"
29
29
  task :consume_messages do
30
30
  config = {
31
- :"bootstrap.servers" => "localhost:9092",
32
- :"group.id" => "rake_test",
33
- :"enable.partition.eof" => false,
34
- :"auto.offset.reset" => "earliest",
35
- :"statistics.interval.ms" => 10_000
31
+ "bootstrap.servers": "localhost:9092",
32
+ "group.id": "rake_test",
33
+ "enable.partition.eof": false,
34
+ "auto.offset.reset": "earliest",
35
+ "statistics.interval.ms": 10_000
36
36
  }
37
37
  if ENV["DEBUG"]
38
38
  config[:debug] = "cgrp,topic,fetch"
@@ -40,7 +40,7 @@ task :consume_messages do
40
40
  Rdkafka::Config.statistics_callback = lambda do |stats|
41
41
  puts stats
42
42
  end
43
- consumer = Rdkafka::Config.new(config).consumer
43
+ Rdkafka::Config.new(config).consumer
44
44
  consumer = Rdkafka::Config.new(config).consumer
45
45
  consumer.subscribe("rake_test_topic")
46
46
  consumer.each do |message|
@@ -48,14 +48,14 @@ task :consume_messages do
48
48
  end
49
49
  end
50
50
 
51
- desc 'Hammer down'
51
+ desc "Hammer down"
52
52
  task :load_test do
53
53
  puts "Starting load test"
54
54
 
55
55
  config = Rdkafka::Config.new(
56
- :"bootstrap.servers" => "localhost:9092",
57
- :"group.id" => "load-test",
58
- :"enable.partition.eof" => false
56
+ "bootstrap.servers": "localhost:9092",
57
+ "group.id": "load-test",
58
+ "enable.partition.eof": false
59
59
  )
60
60
 
61
61
  # Create a producer in a thread
@@ -65,9 +65,9 @@ task :load_test do
65
65
  handles = []
66
66
  1000.times do |i|
67
67
  handles.push(producer.produce(
68
- topic: "load_test_topic",
68
+ topic: "load_test_topic",
69
69
  payload: "Payload #{i}",
70
- key: "Key #{i}"
70
+ key: "Key #{i}"
71
71
  ))
72
72
  end
73
73
  handles.each(&:wait)
data/ext/Rakefile CHANGED
@@ -1,53 +1,53 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require File.expand_path('../../lib/rdkafka/version', __FILE__)
3
+ require File.expand_path("../../lib/rdkafka/version", __FILE__)
4
4
  require "digest"
5
5
  require "fileutils"
6
6
  require "open-uri"
7
7
 
8
- task :default => :clean do
8
+ task default: :clean do
9
9
  # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
10
10
  # managed by nix itself, so using the normal file paths doesn't work for nix users.
11
11
  #
12
12
  # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
13
13
  # accessible in the nix environment
14
- if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
+ if ENV.fetch("RDKAFKA_EXT_PATH", "").empty?
15
15
  # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
16
16
  require "mini_portile2"
17
17
  recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
18
18
 
19
19
  # Use default homebrew openssl if we're on mac and the directory exists, is not using nix-prepared libraries
20
20
  # and each of flags is not already set
21
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}") && !ENV.key?("NIX_LDFLAGS")
21
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?((homebrew_prefix = `brew --prefix openssl`.strip).to_s) && !ENV.key?("NIX_LDFLAGS")
22
22
  ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV.key?("CPPFLAGS")
23
23
  ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV.key?("LDFLAGS")
24
24
  end
25
25
 
26
- releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
26
+ releases = File.expand_path(File.join(File.dirname(__FILE__), "../dist"))
27
27
 
28
28
  recipe.files << {
29
- :url => "file://#{releases}/librdkafka-#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
30
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
29
+ url: "file://#{releases}/librdkafka-#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
30
+ sha256: Rdkafka::LIBRDKAFKA_SOURCE_SHA256
31
31
  }
32
32
  recipe.configure_options = ["--host=#{recipe.host}"]
33
33
 
34
- recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
34
+ recipe.patch_files = Dir[File.join(releases, "patches", "*.patch")].sort
35
35
 
36
36
  # Disable using libc regex engine in favor of the embedded one
37
37
  # The default regex engine of librdkafka does not always work exactly as most of the users
38
38
  # would expect, hence this flag allows for changing it to the other one
39
- if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
40
- recipe.configure_options << '--disable-regex-ext'
39
+ if ENV.key?("RDKAFKA_DISABLE_REGEX_EXT")
40
+ recipe.configure_options << "--disable-regex-ext"
41
41
  end
42
42
 
43
43
  recipe.cook
44
44
  # Move dynamic library we're interested in
45
- if recipe.host.include?('darwin')
46
- from_extension = '1.dylib'
47
- to_extension = 'dylib'
45
+ if recipe.host.include?("darwin")
46
+ from_extension = "1.dylib"
47
+ to_extension = "dylib"
48
48
  else
49
- from_extension = 'so.1'
50
- to_extension = 'so'
49
+ from_extension = "so.1"
50
+ to_extension = "so"
51
51
  end
52
52
  lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
53
53
  FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
@@ -56,12 +56,12 @@ task :default => :clean do
56
56
  FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
57
57
  else
58
58
  # Otherwise, copy existing libraries to ./ext
59
- if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
59
+ if ENV["RDKAFKA_EXT_PATH"].nil? || ENV["RDKAFKA_EXT_PATH"].empty?
60
60
  raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
61
61
  end
62
62
  files = [
63
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
64
- File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
63
+ File.join(ENV["RDKAFKA_EXT_PATH"], "lib", "librdkafka.dylib"),
64
+ File.join(ENV["RDKAFKA_EXT_PATH"], "lib", "librdkafka.so")
65
65
  ]
66
66
  files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
67
67
  end
@@ -76,28 +76,28 @@ end
76
76
 
77
77
  namespace :dist do
78
78
  task :dir do
79
- ENV["RDKAFKA_DIST_PATH"] ||= File.expand_path(File.join(File.dirname(__FILE__), '..', 'dist'))
79
+ ENV["RDKAFKA_DIST_PATH"] ||= File.expand_path(File.join(File.dirname(__FILE__), "..", "dist"))
80
80
  end
81
81
 
82
- task :file => "dist:dir" do
82
+ task file: "dist:dir" do
83
83
  ENV["RDKAFKA_DIST_FILE"] ||= File.join(ENV["RDKAFKA_DIST_PATH"], "librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz")
84
84
  end
85
85
 
86
- task :clean => "dist:file" do
87
- Dir.glob(File.join("#{ENV['RDKAFKA_DIST_PATH']}", "*")).each do |filename|
86
+ task clean: "dist:file" do
87
+ Dir.glob(File.join(ENV["RDKAFKA_DIST_PATH"].to_s, "*")).each do |filename|
88
88
  next if filename.include? ENV["RDKAFKA_DIST_FILE"]
89
89
 
90
90
  FileUtils.rm_rf filename
91
91
  end
92
92
  end
93
93
 
94
- task :download => "dist:file" do
94
+ task download: "dist:file" do
95
95
  version = Rdkafka::LIBRDKAFKA_VERSION
96
96
  librdkafka_download = "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{version}"
97
97
 
98
98
  URI.open(librdkafka_download) do |file|
99
99
  filename = ENV["RDKAFKA_DIST_FILE"]
100
- data = file.read
100
+ data = file.read
101
101
 
102
102
  if Digest::SHA256.hexdigest(data) != Rdkafka::LIBRDKAFKA_SOURCE_SHA256
103
103
  raise "SHA256 does not match downloaded file"
@@ -107,7 +107,7 @@ namespace :dist do
107
107
  end
108
108
  end
109
109
 
110
- task :update => %w[dist:download dist:clean]
110
+ task update: %w[dist:download dist:clean]
111
111
  end
112
112
 
113
113
  namespace :build do
@@ -118,8 +118,8 @@ namespace :build do
118
118
 
119
119
  recipe = MiniPortile.new("librdkafka", version)
120
120
  recipe.files << "https://github.com/confluentinc/librdkafka/archive/#{ref}.tar.gz"
121
- recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
122
- recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
121
+ recipe.configure_options = ["--host=#{recipe.host}", "--enable-static", "--enable-zstd"]
122
+ recipe.patch_files = Dir[File.join(releases, "patches", "*.patch")].sort
123
123
  recipe.cook
124
124
 
125
125
  ext = recipe.host.include?("darwin") ? "dylib" : "so"
@@ -5,13 +5,13 @@ module Rdkafka
5
5
  # Extracts attributes of rd_kafka_AclBinding_t
6
6
  class AclBindingResult
7
7
  attr_reader :result_error, :error_string, :matching_acl_resource_type,
8
- :matching_acl_resource_name, :matching_acl_resource_pattern_type,
9
- :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
10
- :matching_acl_permission_type
8
+ :matching_acl_resource_name, :matching_acl_resource_pattern_type,
9
+ :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
10
+ :matching_acl_permission_type
11
11
 
12
12
  # This attribute was initially released under the name that is now an alias
13
13
  # We keep it for backwards compatibility but it was changed for the consistency
14
- alias matching_acl_pattern_type matching_acl_resource_pattern_type
14
+ alias_method :matching_acl_pattern_type, :matching_acl_resource_pattern_type
15
15
 
16
16
  # @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
17
17
  def initialize(matching_acl)
@@ -5,8 +5,8 @@ module Rdkafka
5
5
  # Handle for create ACL operation
6
6
  class CreateAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer
8
+ :response, :int,
9
+ :response_string, :pointer
10
10
 
11
11
  # @return [String] the name of the operation
12
12
  def operation_name
@@ -22,8 +22,8 @@ module Rdkafka
22
22
  # @raise [RdkafkaError]
23
23
  def raise_error
24
24
  raise RdkafkaError.new(
25
- self[:response],
26
- broker_message: self[:response_string].read_string
25
+ self[:response],
26
+ broker_message: self[:response_string].read_string
27
27
  )
28
28
  end
29
29
  end
@@ -4,12 +4,10 @@ module Rdkafka
4
4
  class Admin
5
5
  # Report for create ACL operation result
6
6
  class CreateAclReport
7
-
8
7
  # Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
9
8
  # @return [Integer]
10
9
  attr_reader :rdkafka_response
11
10
 
12
-
13
11
  # Upon successful creation of Acl empty string will be returned as rdkafka_response_string
14
12
  # @return [String]
15
13
  attr_reader :rdkafka_response_string
@@ -3,9 +3,9 @@ module Rdkafka
3
3
  # Handle for create partitions operation
4
4
  class CreatePartitionsHandle < AbstractHandle
5
5
  layout :pending, :bool,
6
- :response, :int,
7
- :error_string, :pointer,
8
- :result_name, :pointer
6
+ :response, :int,
7
+ :error_string, :pointer,
8
+ :result_name, :pointer
9
9
 
10
10
  # @return [String] the name of the operation
11
11
  def operation_name
@@ -21,8 +21,8 @@ module Rdkafka
21
21
  # @raise [RdkafkaError]
22
22
  def raise_error
23
23
  raise RdkafkaError.new(
24
- self[:response],
25
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
24
+ self[:response],
25
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
26
26
  )
27
27
  end
28
28
  end
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for create topic operation
6
6
  class CreateTopicHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -23,8 +23,8 @@ module Rdkafka
23
23
  # @raise [RdkafkaError]
24
24
  def raise_error
25
25
  raise RdkafkaError.new(
26
- self[:response],
27
- broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
26
+ self[:response],
27
+ broker_message: CreateTopicReport.new(self[:error_string], self[:result_name]).error_string
28
28
  )
29
29
  end
30
30
  end
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for delete ACL operation
6
6
  class DeleteAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :matching_acls, :pointer,
11
- :matching_acls_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :matching_acls, :pointer,
11
+ :matching_acls_count, :int
12
12
 
13
13
  # @return [String] the name of the operation
14
14
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: self[:response_string].read_string
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
29
  )
30
30
  end
31
31
  end
@@ -4,7 +4,6 @@ module Rdkafka
4
4
  class Admin
5
5
  # Report for delete ACL operation result
6
6
  class DeleteAclReport
7
-
8
7
  # deleted acls
9
8
  # @return [Rdkafka::Bindings::AclBindingResult]
10
9
  attr_reader :deleted_acls
@@ -12,9 +11,9 @@ module Rdkafka
12
11
  # @param matching_acls [FFI::Pointer] pointer to matching ACLs array
13
12
  # @param matching_acls_count [Integer] number of matching ACLs
14
13
  def initialize(matching_acls:, matching_acls_count:)
15
- @deleted_acls=[]
14
+ @deleted_acls = []
16
15
  if matching_acls != FFI::Pointer::NULL
17
- acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
16
+ acl_binding_result_pointers = matching_acls.read_array_of_pointer(matching_acls_count)
18
17
  (1..matching_acls_count).map do |matching_acl_index|
19
18
  acl_binding_result = AclBindingResult.new(acl_binding_result_pointers[matching_acl_index - 1])
20
19
  @deleted_acls << acl_binding_result
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for delete groups operation
6
6
  class DeleteGroupsHandle < AbstractHandle
7
7
  layout :pending, :bool, # TODO: ???
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: create_result.error_string
27
+ self[:response],
28
+ broker_message: create_result.error_string
29
29
  )
30
30
  end
31
31
  end
@@ -5,9 +5,9 @@ module Rdkafka
5
5
  # Handle for delete topic operation
6
6
  class DeleteTopicHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :error_string, :pointer,
10
- :result_name, :pointer
8
+ :response, :int,
9
+ :error_string, :pointer,
10
+ :result_name, :pointer
11
11
 
12
12
  # @return [String] the name of the operation
13
13
  def operation_name
@@ -23,8 +23,8 @@ module Rdkafka
23
23
  # @raise [RdkafkaError]
24
24
  def raise_error
25
25
  raise RdkafkaError.new(
26
- self[:response],
27
- broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
26
+ self[:response],
27
+ broker_message: DeleteTopicReport.new(self[:error_string], self[:result_name]).error_string
28
28
  )
29
29
  end
30
30
  end
@@ -5,10 +5,10 @@ module Rdkafka
5
5
  # Handle for describe ACL operation
6
6
  class DescribeAclHandle < AbstractHandle
7
7
  layout :pending, :bool,
8
- :response, :int,
9
- :response_string, :pointer,
10
- :acls, :pointer,
11
- :acls_count, :int
8
+ :response, :int,
9
+ :response_string, :pointer,
10
+ :acls, :pointer,
11
+ :acls_count, :int
12
12
 
13
13
  # @return [String] the name of the operation.
14
14
  def operation_name
@@ -24,8 +24,8 @@ module Rdkafka
24
24
  # @raise [RdkafkaError]
25
25
  def raise_error
26
26
  raise RdkafkaError.new(
27
- self[:response],
28
- broker_message: self[:response_string].read_string
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
29
  )
30
30
  end
31
31
  end