rdkafka 0.15.0 → 0.16.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +4 -7
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +25 -1
  7. data/README.md +31 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +51 -26
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/acl_binding_result.rb +38 -24
  12. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  13. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  15. data/lib/rdkafka/admin.rb +15 -0
  16. data/lib/rdkafka/bindings.rb +44 -8
  17. data/lib/rdkafka/callbacks.rb +28 -12
  18. data/lib/rdkafka/config.rb +69 -15
  19. data/lib/rdkafka/consumer.rb +39 -17
  20. data/lib/rdkafka/helpers/oauth.rb +58 -0
  21. data/lib/rdkafka/native_kafka.rb +32 -19
  22. data/lib/rdkafka/producer/delivery_handle.rb +12 -1
  23. data/lib/rdkafka/producer/delivery_report.rb +16 -3
  24. data/lib/rdkafka/producer.rb +47 -10
  25. data/lib/rdkafka/version.rb +1 -1
  26. data/lib/rdkafka.rb +1 -0
  27. data/rdkafka.gemspec +2 -2
  28. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  29. data/spec/rdkafka/admin/delete_acl_report_spec.rb +1 -0
  30. data/spec/rdkafka/admin/describe_acl_report_spec.rb +1 -0
  31. data/spec/rdkafka/admin_spec.rb +53 -0
  32. data/spec/rdkafka/bindings_spec.rb +97 -0
  33. data/spec/rdkafka/config_spec.rb +53 -0
  34. data/spec/rdkafka/consumer_spec.rb +74 -0
  35. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  36. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -0
  37. data/spec/rdkafka/producer_spec.rb +69 -2
  38. data/spec/spec_helper.rb +16 -1
  39. data.tar.gz.sig +0 -0
  40. metadata +6 -4
  41. metadata.gz.sig +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d906b2e71dae5b5f45459e915c48dc8cb88e0d51ebb90ded80cef3c8e5531b77
4
- data.tar.gz: 8f0df2688bbc3b264de22b5943b18462ad41898781cc12e6e534804409133ce0
3
+ metadata.gz: 3135d4f2663517517330d165948e9761ffc0ecd20942f911a6ee9541c437ee7e
4
+ data.tar.gz: 9a489c2400c4054e9cec0d6c8d24f75bce407d370f8c393b7b31dcd0dbf7c361
5
5
  SHA512:
6
- metadata.gz: c35d392b326f4d47077f419bced92b929436be548651afc9364f5ada2eda51883ad75feeeb30183369aa6b15db3ac4630410f408eae449d1b0cc5a007cf011fc
7
- data.tar.gz: 1487bb54713e6330ce55fd95f656dffd2edc34bcd8bc151d94faf2d6f043b8183276e3407d5f45c6152e99463c8323ccb83c15fff7007ebb96c4b369533002d5
6
+ metadata.gz: be8eba2aec012af189d893ebf6ddcd4e4dec117aecd7f36afa76bc6ee4c2a3fa93f4ac4160efa16e8c91efd7011ee41cda00a5e2146e18044a166035850cd490
7
+ data.tar.gz: d1761a6ab9c7d9ee539d79679dc730cc392fc2d7369d3f78b9c8d8f1c800855ab15ae7c472259b43e95139aa30d4777026c0529671fd85f98f3e5fceeaf53e62
checksums.yaml.gz.sig CHANGED
Binary file
@@ -22,25 +22,22 @@ jobs:
22
22
  fail-fast: false
23
23
  matrix:
24
24
  ruby:
25
- - '3.3.0-preview2'
25
+ - '3.3'
26
26
  - '3.2'
27
27
  - '3.1'
28
- - '3.1.0'
29
28
  - '3.0'
30
- - '3.0.0'
31
29
  - '2.7'
32
- - '2.7.0'
33
30
  include:
34
- - ruby: '3.2'
31
+ - ruby: '3.3'
35
32
  coverage: 'true'
36
33
  steps:
37
34
  - uses: actions/checkout@v4
38
35
  - name: Install package dependencies
39
36
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
40
37
 
41
- - name: Start Kafka with docker-compose
38
+ - name: Start Kafka with docker compose
42
39
  run: |
43
- docker-compose up -d || (sleep 5 && docker-compose up -d)
40
+ docker compose up -d || (sleep 5 && docker compose up -d)
44
41
 
45
42
  - name: Set up Ruby
46
43
  uses: ruby/setup-ruby@v1
data/.gitignore CHANGED
@@ -10,3 +10,5 @@ ext/librdkafka.*
10
10
  doc
11
11
  coverage
12
12
  vendor
13
+ .idea/
14
+ out/
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.2.2
1
+ 3.3.1
data/CHANGELOG.md CHANGED
@@ -1,12 +1,36 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.16.0 (Unreleased)
4
+ - **[Feature]** Oauthbearer token refresh callback (bruce-szalwinski-he)
5
+ - [Enhancement] Replace time poll based wait engine with an event based to improve response times on blocking operations and wait (nijikon + mensfeld)
6
+ - [Enhancement] Allow for usage of the second regex engine of librdkafka by setting `RDKAFKA_DISABLE_REGEX_EXT` during build (mensfeld)
7
+ - [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
8
+ - [Change] Allow for native kafka thread operations deferring and manual start for consumer, producer and admin.
9
+ - [Change] The `wait_timeout` argument in `AbstractHandle.wait` method is deprecated and will be removed in future versions without replacement. We don't rely on it's value anymore (nijikon)
10
+ - [Fix] Background logger stops working after forking causing memory leaks (mensfeld)
11
+ - [Fix] Fix bogus case/when syntax. Levels 1, 2, and 6 previously defaulted to UNKNOWN (jjowdy)
12
+
13
+ ## 0.15.1 (2024-01-30)
14
+ - [Enhancement] Provide support for Nix OS (alexandriainfantino)
15
+ - [Enhancement] Replace `rd_kafka_offset_store` with `rd_kafka_offsets_store` (mensfeld)
16
+ - [Enhancement] Alias `topic_name` as `topic` in the delivery report (mensfeld)
17
+ - [Enhancement] Provide `label` producer handler and report reference for improved traceability (mensfeld)
18
+ - [Enhancement] Include the error when invoking `create_result` on producer handle (mensfeld)
19
+ - [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld).
20
+ - [Enhancement] Report `-1` instead of `nil` in case `partition_count` failure (mensfeld).
21
+ - [Fix] Fix return type on `#rd_kafka_poll` (mensfeld)
22
+ - [Fix] `uint8_t` does not exist on Apple Silicon (mensfeld)
23
+ - [Fix] Missing ACL `RD_KAFKA_RESOURCE_BROKER` constant reference (mensfeld)
24
+ - [Fix] Partition cache caches invalid nil result for `PARTITIONS_COUNT_TTL` (mensfeld)
25
+ - [Change] Rename `matching_acl_pattern_type` to `matching_acl_resource_pattern_type` to align the whole API (mensfeld)
26
+
3
27
  ## 0.15.0 (2023-12-03)
4
28
  - **[Feature]** Add `Admin#metadata` (mensfeld)
5
29
  - **[Feature]** Add `Admin#create_partitions` (mensfeld)
6
30
  - **[Feature]** Add `Admin#delete_group` utility (piotaixr)
7
31
  - **[Feature]** Add Create and Delete ACL Feature To Admin Functions (vgnanasekaran)
8
32
  - **[Feature]** Support `#assignment_lost?` on a consumer to check for involuntary assignment revocation (mensfeld)
9
- - [Enhancement] Expose alternative way of managing consumer events via a separate queue (mensfeld)
33
+ - [Enhancement] Expose alternative way of managing consumer events via a separate queue (mensfeld)
10
34
  - [Enhancement] **Bump** librdkafka to 2.3.0 (mensfeld)
11
35
  - [Enhancement] Increase the `#lag` and `#query_watermark_offsets` default timeouts from 100ms to 1000ms. This will compensate for network glitches and remote clusters operations (mensfeld)
12
36
  - [Change] Use `SecureRandom.uuid` instead of `random` for test consumer groups (mensfeld)
data/README.md CHANGED
@@ -18,7 +18,7 @@ become EOL.
18
18
 
19
19
  `rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
20
20
 
21
- The most important pieces of a Kafka client are implemented, and we aim to provide all relevant consumer, producer, and admin APIs.
21
+ The most essential pieces of a Kafka client are implemented, and we aim to provide all relevant consumer, producer, and admin APIs.
22
22
 
23
23
  ## Table of content
24
24
 
@@ -30,8 +30,10 @@ The most important pieces of a Kafka client are implemented, and we aim to provi
30
30
  - [Higher Level Libraries](#higher-level-libraries)
31
31
  * [Message Processing Frameworks](#message-processing-frameworks)
32
32
  * [Message Publishing Libraries](#message-publishing-libraries)
33
+ - [Forking](#forking)
33
34
  - [Development](#development)
34
35
  - [Example](#example)
36
+ - [Versions](#versions)
35
37
 
36
38
  ## Project Scope
37
39
 
@@ -46,12 +48,13 @@ While rdkafka-ruby aims to simplify the use of librdkafka in Ruby applications,
46
48
 
47
49
  ## Installation
48
50
 
49
- This gem downloads and compiles librdkafka when it is installed. If you
50
- If you have any problems installing the gem, please open an issue.
51
+ When installed, this gem downloads and compiles librdkafka. If you have any problems installing the gem, please open an issue.
51
52
 
52
53
  ## Usage
53
54
 
54
- See the [documentation](https://karafka.io/docs/code/rdkafka-ruby/) for full details on how to use this gem. Two quick examples:
55
+ Please see the [documentation](https://karafka.io/docs/code/rdkafka-ruby/) for full details on how to use this gem. Below are two quick examples.
56
+
57
+ Unless you are seeking specific low-level capabilities, we **strongly** recommend using [Karafka](https://github.com/karafka/karafka) and [WaterDrop](https://github.com/karafka/waterdrop) when working with Kafka. These are higher-level libraries also maintained by us based on rdkafka-ruby.
55
58
 
56
59
  ### Consuming Messages
57
60
 
@@ -73,7 +76,7 @@ end
73
76
 
74
77
  ### Producing Messages
75
78
 
76
- Produce a number of messages, put the delivery handles in an array, and
79
+ Produce several messages, put the delivery handles in an array, and
77
80
  wait for them before exiting. This way the messages will be batched and
78
81
  efficiently sent to Kafka.
79
82
 
@@ -94,13 +97,11 @@ end
94
97
  delivery_handles.each(&:wait)
95
98
  ```
96
99
 
97
- Note that creating a producer consumes some resources that will not be
98
- released until it `#close` is explicitly called, so be sure to call
99
- `Config#producer` only as necessary.
100
+ Note that creating a producer consumes some resources that will not be released until it `#close` is explicitly called, so be sure to call `Config#producer` only as necessary.
100
101
 
101
102
  ## Higher Level Libraries
102
103
 
103
- Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
104
+ Currently, there are two actively developed frameworks based on `rdkafka-ruby`, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
104
105
 
105
106
  ### Message Processing Frameworks
106
107
 
@@ -111,6 +112,16 @@ Currently, there are two actively developed frameworks based on rdkafka-ruby, th
111
112
 
112
113
  * [WaterDrop](https://github.com/karafka/waterdrop) – Standalone Karafka library for producing Kafka messages.
113
114
 
115
+ ## Forking
116
+
117
+ When working with `rdkafka-ruby`, it's essential to know that the underlying `librdkafka` library does not support fork-safe operations, even though it is thread-safe. Forking a process after initializing librdkafka clients can lead to unpredictable behavior due to inherited file descriptors and memory states. This limitation requires careful handling, especially in Ruby applications that rely on forking.
118
+
119
+ To address this, it's highly recommended to:
120
+
121
+ - Never initialize any `rdkafka-ruby` producers or consumers before forking to avoid state corruption.
122
+ - Before forking, always close any open producers or consumers if you've opened any.
123
+ - Use high-level libraries like [WaterDrop](https://github.com/karafka/waterdrop) and [Karafka](https://github.com/karafka/karafka/), which provide abstractions for handling librdkafka's intricacies.
124
+
114
125
  ## Development
115
126
 
116
127
  Contributors are encouraged to focus on enhancements that align with the core goal of the library. We appreciate contributions but will likely not accept pull requests for features that:
@@ -147,3 +158,14 @@ To see everything working, run these in separate tabs:
147
158
  bundle exec rake consume_messages
148
159
  bundle exec rake produce_messages
149
160
  ```
161
+
162
+ ## Versions
163
+
164
+ | rdkafka-ruby | librdkafka |
165
+ |-|-|
166
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
167
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
168
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
169
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) |
170
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) |
171
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) |
data/docker-compose.yml CHANGED
@@ -3,7 +3,7 @@ version: '2'
3
3
  services:
4
4
  kafka:
5
5
  container_name: kafka
6
- image: confluentinc/cp-kafka:7.5.2
6
+ image: confluentinc/cp-kafka:7.6.1
7
7
 
8
8
  ports:
9
9
  - 9092:9092
data/ext/Rakefile CHANGED
@@ -1,40 +1,65 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
- require "mini_portile2"
5
4
  require "fileutils"
6
5
  require "open-uri"
7
6
 
8
7
  task :default => :clean do
9
- # Download and compile librdkafka
10
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
8
+ # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
+ # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
+ #
11
+ # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
+ # accessible in the nix environment
13
+ if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
+ # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
+ require "mini_portile2"
16
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
11
17
 
12
- # Use default homebrew openssl if we're on mac and the directory exists
13
- # and each of flags is not empty
14
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
- end
18
+ # Use default homebrew openssl if we're on mac and the directory exists
19
+ # and each of flags is not empty
20
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
+ end
24
+
25
+ recipe.files << {
26
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
27
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
28
+ }
29
+ recipe.configure_options = ["--host=#{recipe.host}"]
30
+
31
+ # Disable using libc regex engine in favor of the embedded one
32
+ # The default regex engine of librdkafka does not always work exactly as most of the users
33
+ # would expect, hence this flag allows for changing it to the other one
34
+ if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
35
+ recipe.configure_options << '--disable-regex-ext'
36
+ end
18
37
 
19
- recipe.files << {
20
- :url => "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
- }
23
- recipe.configure_options = ["--host=#{recipe.host}"]
24
- recipe.cook
25
- # Move dynamic library we're interested in
26
- if recipe.host.include?('darwin')
27
- from_extension = '1.dylib'
28
- to_extension = 'dylib'
38
+ recipe.cook
39
+ # Move dynamic library we're interested in
40
+ if recipe.host.include?('darwin')
41
+ from_extension = '1.dylib'
42
+ to_extension = 'dylib'
43
+ else
44
+ from_extension = 'so.1'
45
+ to_extension = 'so'
46
+ end
47
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
48
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
49
+ # Cleanup files created by miniportile we don't need in the gem
50
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
51
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
29
52
  else
30
- from_extension = 'so.1'
31
- to_extension = 'so'
53
+ # Otherwise, copy existing libraries to ./ext
54
+ if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
55
+ raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
56
+ end
57
+ files = [
58
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
59
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
60
+ ]
61
+ files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
32
62
  end
33
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
- # Cleanup files created by miniportile we don't need in the gem
36
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
38
63
  end
39
64
 
40
65
  task :clean do
@@ -14,6 +14,13 @@ module Rdkafka
14
14
 
15
15
  # Registry for registering all the handles.
16
16
  REGISTRY = {}
17
+ # Default wait timeout is 31 years
18
+ MAX_WAIT_TIMEOUT_FOREVER = 10_000_000_000
19
+ # Deprecation message for wait_timeout argument in wait method
20
+ WAIT_TIMEOUT_DEPRECATION_MESSAGE = "The 'wait_timeout' argument is deprecated and will be removed in future versions without replacement. " \
21
+ "We don't rely on it's value anymore. Please refactor your code to remove references to it."
22
+
23
+ private_constant :MAX_WAIT_TIMEOUT_FOREVER
17
24
 
18
25
  class << self
19
26
  # Adds handle to the register
@@ -32,6 +39,12 @@ module Rdkafka
32
39
  end
33
40
  end
34
41
 
42
+ def initialize
43
+ @mutex = Thread::Mutex.new
44
+ @resource = Thread::ConditionVariable.new
45
+
46
+ super
47
+ end
35
48
 
36
49
  # Whether the handle is still pending.
37
50
  #
@@ -45,37 +58,48 @@ module Rdkafka
45
58
  # on the operation. In this case it is possible to call wait again.
46
59
  #
47
60
  # @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
48
- # If this is nil it does not time out.
49
- # @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the
50
- # operation has completed
61
+ # If this is nil we will wait forever
62
+ # @param wait_timeout [nil] deprecated
51
63
  # @param raise_response_error [Boolean] should we raise error when waiting finishes
52
64
  #
53
65
  # @return [Object] Operation-specific result
54
66
  #
55
67
  # @raise [RdkafkaError] When the operation failed
56
68
  # @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
57
- def wait(max_wait_timeout: 60, wait_timeout: 0.1, raise_response_error: true)
58
- timeout = if max_wait_timeout
59
- monotonic_now + max_wait_timeout
60
- else
61
- nil
62
- end
63
- loop do
64
- if pending?
65
- if timeout && timeout <= monotonic_now
66
- raise WaitTimeoutError.new(
67
- "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
68
- )
69
+ def wait(max_wait_timeout: 60, wait_timeout: nil, raise_response_error: true)
70
+ Kernel.warn(WAIT_TIMEOUT_DEPRECATION_MESSAGE) unless wait_timeout.nil?
71
+
72
+ timeout = max_wait_timeout ? monotonic_now + max_wait_timeout : MAX_WAIT_TIMEOUT_FOREVER
73
+
74
+ @mutex.synchronize do
75
+ loop do
76
+ if pending?
77
+ to_wait = (timeout - monotonic_now)
78
+
79
+ if to_wait.positive?
80
+ @resource.wait(@mutex, to_wait)
81
+ else
82
+ raise WaitTimeoutError.new(
83
+ "Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
84
+ )
85
+ end
86
+ elsif self[:response] != 0 && raise_response_error
87
+ raise_error
88
+ else
89
+ return create_result
69
90
  end
70
- sleep wait_timeout
71
- elsif self[:response] != 0 && raise_response_error
72
- raise_error
73
- else
74
- return create_result
75
91
  end
76
92
  end
77
93
  end
78
94
 
95
+ # Unlock the resources
96
+ def unlock
97
+ @mutex.synchronize do
98
+ self[:pending] = false
99
+ @resource.broadcast
100
+ end
101
+ end
102
+
79
103
  # @return [String] the name of the operation (e.g. "delivery")
80
104
  def operation_name
81
105
  raise "Must be implemented by subclass!"
@@ -2,36 +2,50 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
-
6
5
  # Extracts attributes of rd_kafka_AclBinding_t
7
6
  #
8
7
  class AclBindingResult
9
- attr_reader :result_error, :error_string, :matching_acl_resource_type, :matching_acl_resource_name, :matching_acl_pattern_type, :matching_acl_principal, :matching_acl_host, :matching_acl_operation, :matching_acl_permission_type
8
+ attr_reader :result_error, :error_string, :matching_acl_resource_type,
9
+ :matching_acl_resource_name, :matching_acl_resource_pattern_type,
10
+ :matching_acl_principal, :matching_acl_host, :matching_acl_operation,
11
+ :matching_acl_permission_type
12
+
13
+ # This attribute was initially released under the name that is now an alias
14
+ # We keep it for backwards compatibility but it was changed for the consistency
15
+ alias matching_acl_pattern_type matching_acl_resource_pattern_type
10
16
 
11
17
  def initialize(matching_acl)
12
- rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
13
- @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
14
- error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
15
- if error_string != FFI::Pointer::NULL
16
- @error_string = error_string.read_string
17
- end
18
- @matching_acl_resource_type = Rdkafka::Bindings.rd_kafka_AclBinding_restype(matching_acl)
19
- matching_acl_resource_name = Rdkafka::Bindings.rd_kafka_AclBinding_name(matching_acl)
20
- if matching_acl_resource_name != FFI::Pointer::NULL
21
- @matching_acl_resource_name = matching_acl_resource_name.read_string
22
- end
23
- @matching_acl_pattern_type = Rdkafka::Bindings.rd_kafka_AclBinding_resource_pattern_type(matching_acl)
24
- matching_acl_principal = Rdkafka::Bindings.rd_kafka_AclBinding_principal(matching_acl)
25
- if matching_acl_principal != FFI::Pointer::NULL
26
- @matching_acl_principal = matching_acl_principal.read_string
27
- end
28
- matching_acl_host = Rdkafka::Bindings.rd_kafka_AclBinding_host(matching_acl)
29
- if matching_acl_host != FFI::Pointer::NULL
30
- @matching_acl_host = matching_acl_host.read_string
31
- end
32
- @matching_acl_operation = Rdkafka::Bindings.rd_kafka_AclBinding_operation(matching_acl)
33
- @matching_acl_permission_type = Rdkafka::Bindings.rd_kafka_AclBinding_permission_type(matching_acl)
18
+ rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
19
+ @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
20
+ error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
21
+
22
+ if error_string != FFI::Pointer::NULL
23
+ @error_string = error_string.read_string
34
24
  end
25
+
26
+ @matching_acl_resource_type = Rdkafka::Bindings.rd_kafka_AclBinding_restype(matching_acl)
27
+ matching_acl_resource_name = Rdkafka::Bindings.rd_kafka_AclBinding_name(matching_acl)
28
+
29
+ if matching_acl_resource_name != FFI::Pointer::NULL
30
+ @matching_acl_resource_name = matching_acl_resource_name.read_string
31
+ end
32
+
33
+ @matching_acl_resource_pattern_type = Rdkafka::Bindings.rd_kafka_AclBinding_resource_pattern_type(matching_acl)
34
+ matching_acl_principal = Rdkafka::Bindings.rd_kafka_AclBinding_principal(matching_acl)
35
+
36
+ if matching_acl_principal != FFI::Pointer::NULL
37
+ @matching_acl_principal = matching_acl_principal.read_string
38
+ end
39
+
40
+ matching_acl_host = Rdkafka::Bindings.rd_kafka_AclBinding_host(matching_acl)
41
+
42
+ if matching_acl_host != FFI::Pointer::NULL
43
+ @matching_acl_host = matching_acl_host.read_string
44
+ end
45
+
46
+ @matching_acl_operation = Rdkafka::Bindings.rd_kafka_AclBinding_operation(matching_acl)
47
+ @matching_acl_permission_type = Rdkafka::Bindings.rd_kafka_AclBinding_permission_type(matching_acl)
35
48
  end
49
+ end
36
50
  end
37
51
  end
@@ -16,7 +16,7 @@ module Rdkafka
16
16
  @error_string = error_string.read_string
17
17
  end
18
18
  if result_name != FFI::Pointer::NULL
19
- @result_name = @result_name = result_name.read_string
19
+ @result_name = result_name.read_string
20
20
  end
21
21
  end
22
22
  end
@@ -16,7 +16,7 @@ module Rdkafka
16
16
  @error_string = error_string.read_string
17
17
  end
18
18
  if result_name != FFI::Pointer::NULL
19
- @result_name = @result_name = result_name.read_string
19
+ @result_name = result_name.read_string
20
20
  end
21
21
  end
22
22
  end
@@ -16,7 +16,7 @@ module Rdkafka
16
16
  @error_string = error_string.read_string
17
17
  end
18
18
  if result_name != FFI::Pointer::NULL
19
- @result_name = @result_name = result_name.read_string
19
+ @result_name = result_name.read_string
20
20
  end
21
21
  end
22
22
  end
data/lib/rdkafka/admin.rb CHANGED
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ include Helpers::OAuth
6
+
5
7
  # @private
6
8
  def initialize(native_kafka)
7
9
  @native_kafka = native_kafka
@@ -10,6 +12,19 @@ module Rdkafka
10
12
  ObjectSpace.define_finalizer(self, native_kafka.finalizer)
11
13
  end
12
14
 
15
+ # Starts the native Kafka polling thread and kicks off the init polling
16
+ # @note Not needed to run unless explicit start was disabled
17
+ def start
18
+ @native_kafka.start
19
+ end
20
+
21
+ # @return [String] admin name
22
+ def name
23
+ @name ||= @native_kafka.with_inner do |inner|
24
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
25
+ end
26
+ end
27
+
13
28
  def finalizer
14
29
  ->(_) { close }
15
30
  end
@@ -17,6 +17,7 @@ module Rdkafka
17
17
 
18
18
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
19
19
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
20
+ RD_KAFKA_RESP_ERR__STATE = -172
20
21
  RD_KAFKA_RESP_ERR__NOENT = -156
21
22
  RD_KAFKA_RESP_ERR_NO_ERROR = 0
22
23
 
@@ -32,7 +33,7 @@ module Rdkafka
32
33
  # Polling
33
34
 
34
35
  attach_function :rd_kafka_flush, [:pointer, :int], :int, blocking: true
35
- attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
36
+ attach_function :rd_kafka_poll, [:pointer, :int], :int, blocking: true
36
37
  attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
37
38
 
38
39
  # Metadata
@@ -111,7 +112,10 @@ module Rdkafka
111
112
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
112
113
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
113
114
  attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
114
-
115
+ callback :oauthbearer_token_refresh_cb, [:pointer, :string, :pointer], :void
116
+ attach_function :rd_kafka_conf_set_oauthbearer_token_refresh_cb, [:pointer, :oauthbearer_token_refresh_cb], :void
117
+ attach_function :rd_kafka_oauthbearer_set_token, [:pointer, :string, :int64, :pointer, :pointer, :int, :pointer, :int], :int
118
+ attach_function :rd_kafka_oauthbearer_set_token_failure, [:pointer, :string], :int
115
119
  # Log queue
116
120
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
117
121
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
@@ -120,19 +124,21 @@ module Rdkafka
120
124
  :void, [:pointer, :int, :string, :string]
121
125
  ) do |_client_ptr, level, _level_string, line|
122
126
  severity = case level
123
- when 0 || 1 || 2
127
+ when 0, 1, 2
124
128
  Logger::FATAL
125
129
  when 3
126
130
  Logger::ERROR
127
131
  when 4
128
132
  Logger::WARN
129
- when 5 || 6
133
+ when 5, 6
130
134
  Logger::INFO
131
135
  when 7
132
136
  Logger::DEBUG
133
137
  else
134
138
  Logger::UNKNOWN
135
139
  end
140
+
141
+ Rdkafka::Config.ensure_log_thread
136
142
  Rdkafka::Config.log_queue << [severity, "rdkafka: #{line}"]
137
143
  end
138
144
 
@@ -159,6 +165,32 @@ module Rdkafka
159
165
  end
160
166
  end
161
167
 
168
+ # The OAuth callback is currently global and contextless.
169
+ # This means that the callback will be called for all instances, and the callback must be able to determine to which instance it is associated.
170
+ # The instance name will be provided in the callback, allowing the callback to reference the correct instance.
171
+ #
172
+ # An example of how to use the instance name in the callback is given below.
173
+ # The `refresh_token` is configured as the `oauthbearer_token_refresh_callback`.
174
+ # `instances` is a map of client names to client instances, maintained by the user.
175
+ #
176
+ # ```
177
+ # def refresh_token(config, client_name)
178
+ # client = instances[client_name]
179
+ # client.oauthbearer_set_token(
180
+ # token: 'new-token-value',
181
+ # lifetime_ms: token-lifetime-ms,
182
+ # principal_name: 'principal-name'
183
+ # )
184
+ # end
185
+ # ```
186
+ OAuthbearerTokenRefreshCallback = FFI::Function.new(
187
+ :void, [:pointer, :string, :pointer]
188
+ ) do |client_ptr, config, _opaque|
189
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
190
+ Rdkafka::Config.oauthbearer_token_refresh_callback.call(config, Rdkafka::Bindings.rd_kafka_name(client_ptr))
191
+ end
192
+ end
193
+
162
194
  # Handle
163
195
 
164
196
  enum :kafka_type, [
@@ -185,12 +217,15 @@ module Rdkafka
185
217
  attach_function :rd_kafka_poll_set_consumer, [:pointer], :void, blocking: true
186
218
  attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer, blocking: true
187
219
  attach_function :rd_kafka_consumer_close, [:pointer], :void, blocking: true
188
- attach_function :rd_kafka_offset_store, [:pointer, :int32, :int64], :int, blocking: true
220
+ attach_function :rd_kafka_offsets_store, [:pointer, :pointer], :int, blocking: true
189
221
  attach_function :rd_kafka_pause_partitions, [:pointer, :pointer], :int, blocking: true
190
222
  attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int, blocking: true
191
223
  attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int, blocking: true
192
224
  attach_function :rd_kafka_offsets_for_times, [:pointer, :pointer, :int], :int, blocking: true
193
225
  attach_function :rd_kafka_position, [:pointer, :pointer], :int, blocking: true
226
+ # those two are used for eos support
227
+ attach_function :rd_kafka_consumer_group_metadata, [:pointer], :pointer, blocking: true
228
+ attach_function :rd_kafka_consumer_group_metadata_destroy, [:pointer], :void, blocking: true
194
229
 
195
230
  # Headers
196
231
  attach_function :rd_kafka_header_get_all, [:pointer, :size_t, :pointer, :pointer, SizePtr], :int
@@ -394,6 +429,7 @@ module Rdkafka
394
429
  RD_KAFKA_RESOURCE_ANY = 1
395
430
  RD_KAFKA_RESOURCE_TOPIC = 2
396
431
  RD_KAFKA_RESOURCE_GROUP = 3
432
+ RD_KAFKA_RESOURCE_BROKER = 4
397
433
 
398
434
  # rd_kafka_ResourcePatternType_t - https://github.com/confluentinc/librdkafka/blob/292d2a66b9921b783f08147807992e603c7af059/src/rdkafka.h#L7320
399
435
 
@@ -436,9 +472,9 @@ module Rdkafka
436
472
  class NativeError < FFI::Struct # rd_kafka_error_t
437
473
  layout :code, :int32,
438
474
  :errstr, :pointer,
439
- :fatal, :uint8_t,
440
- :retriable, :uint8_t,
441
- :txn_requires_abort, :uint8_t
475
+ :fatal, :u_int8_t,
476
+ :retriable, :u_int8_t,
477
+ :txn_requires_abort, :u_int8_t
442
478
  end
443
479
 
444
480
  attach_function :rd_kafka_group_result_error, [:pointer], NativeError.by_ref # rd_kafka_group_result_t* => rd_kafka_error_t*