karafka-rdkafka 0.15.0.alpha1 → 0.15.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2cd7ecb658a7aedb5953f4d39c8941a282d6dc7001c7600129d86ebb05a550ce
4
- data.tar.gz: 1dc2ebcb1deebe94197f216f0da3d78e2ed5d55de9b1be8b0654a1042e2d4289
3
+ metadata.gz: bc8e5edfc4eccc512562aa6937a6b3c7d6ad417b83fa136b9090af3f7bfe7bc3
4
+ data.tar.gz: f1b4645880ffe22d1512acee977d045d3390d3dc1f8573ca41a8db22d6c1e3d1
5
5
  SHA512:
6
- metadata.gz: a300e2dcdf5aac16b59b0289a71fec440747e9238c5a801572c7d1ce6a433843ab47e76915a239f860a24cd832323dab952e09b62c7184462c806183958b2a85
7
- data.tar.gz: c1a3b1d23522f2941b9d39d3b5955f40d3205a3820188c7cba02f7825b17bf534963fec68956271908394e443f7b8a84edcdf972021a74f3ed4bcf93d76b1b6d
6
+ metadata.gz: aeba97f575ceea5a67dcf6fe3e31fc9ec98ba6049ece2b23daa7af8d959e8074be5f20eca71134e32ec86076a75bb44dfc3ee9b0aa4eb85f27af6933b922138a
7
+ data.tar.gz: 2089b7f52c3a8a7fa58e682cc73f6c5a8d363affd419a9f4d7c96228fbdc3b96129a4e2b3739fba4ef82cf44c6fe2d00551aedfa33fa3b5d5d2a7ad80f91a18f
checksums.yaml.gz.sig CHANGED
Binary file
data/.gitignore CHANGED
@@ -10,3 +10,5 @@ ext/librdkafka.*
10
10
  doc
11
11
  coverage
12
12
  vendor
13
+ .idea/
14
+ out/
data/CHANGELOG.md CHANGED
@@ -1,8 +1,10 @@
1
1
  # Rdkafka Changelog
2
2
 
3
3
  ## 0.15.0 (Unreleased)
4
- - [Feature] Support incremental config describe + alter API.
4
+ - **[Feature]** Oauthbearer token refresh callback (bruce-szalwinski-he)
5
+ - **[Feature]** Support incremental config describe + alter API (mensfeld)
5
6
  - [Enhancement] Replace time poll based wait engine with an event based to improve response times on blocking operations and wait (nijikon + mensfeld)
7
+ - [Enhancement] Allow for usage of the second regex engine of librdkafka by setting `RDKAFKA_DISABLE_REGEX_EXT` during build (mensfeld)
6
8
  - [Change] The `wait_timeout` argument in `AbstractHandle.wait` method is deprecated and will be removed in future versions without replacement. We don't rely on it's value anymore (nijikon)
7
9
 
8
10
  ## 0.14.10 (2024-02-08)
data/ext/Rakefile CHANGED
@@ -27,6 +27,14 @@ task :default => :clean do
27
27
  :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
28
28
  }
29
29
  recipe.configure_options = ["--host=#{recipe.host}"]
30
+
31
+ # Disable using libc regex engine in favor of the embedded one
32
+ # The default regex engine of librdkafka does not always work exactly as most of the users
33
+ # would expect, hence this flag allows for changing it to the other one
34
+ if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
35
+ recipe.configure_options << '--disable-regex-ext'
36
+ end
37
+
30
38
  recipe.cook
31
39
  # Move dynamic library we're interested in
32
40
  if recipe.host.include?('darwin')
data/lib/rdkafka/admin.rb CHANGED
@@ -2,6 +2,8 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Admin
5
+ include Helpers::OAuth
6
+
5
7
  # @private
6
8
  def initialize(native_kafka)
7
9
  @native_kafka = native_kafka
@@ -26,6 +26,7 @@ module Rdkafka
26
26
 
27
27
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
28
28
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
29
+ RD_KAFKA_RESP_ERR__STATE = -172
29
30
  RD_KAFKA_RESP_ERR__NOENT = -156
30
31
  RD_KAFKA_RESP_ERR_NO_ERROR = 0
31
32
 
@@ -167,7 +168,10 @@ module Rdkafka
167
168
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
168
169
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
169
170
  attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
170
-
171
+ callback :oauthbearer_token_refresh_cb, [:pointer, :string, :pointer], :void
172
+ attach_function :rd_kafka_conf_set_oauthbearer_token_refresh_cb, [:pointer, :oauthbearer_token_refresh_cb], :void
173
+ attach_function :rd_kafka_oauthbearer_set_token, [:pointer, :string, :int64, :pointer, :pointer, :int, :pointer, :int], :int
174
+ attach_function :rd_kafka_oauthbearer_set_token_failure, [:pointer, :string], :int
171
175
  # Log queue
172
176
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
173
177
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
@@ -217,6 +221,32 @@ module Rdkafka
217
221
  end
218
222
  end
219
223
 
224
+ # The OAuth callback is currently global and contextless.
225
+ # This means that the callback will be called for all instances, and the callback must be able to determine to which instance it is associated.
226
+ # The instance name will be provided in the callback, allowing the callback to reference the correct instance.
227
+ #
228
+ # An example of how to use the instance name in the callback is given below.
229
+ # The `refresh_token` is configured as the `oauthbearer_token_refresh_callback`.
230
+ # `instances` is a map of client names to client instances, maintained by the user.
231
+ #
232
+ # ```
233
+ # def refresh_token(config, client_name)
234
+ # client = instances[client_name]
235
+ # client.oauthbearer_set_token(
236
+ # token: 'new-token-value',
237
+ # lifetime_ms: token-lifetime-ms,
238
+ # principal_name: 'principal-name'
239
+ # )
240
+ # end
241
+ # ```
242
+ OAuthbearerTokenRefreshCallback = FFI::Function.new(
243
+ :void, [:pointer, :string, :pointer]
244
+ ) do |client_ptr, config, _opaque|
245
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
246
+ Rdkafka::Config.oauthbearer_token_refresh_callback.call(config, Rdkafka::Bindings.rd_kafka_name(client_ptr))
247
+ end
248
+ end
249
+
220
250
  # Handle
221
251
 
222
252
  enum :kafka_type, [
@@ -15,12 +15,13 @@ module Rdkafka
15
15
  @@opaques = ObjectSpace::WeakMap.new
16
16
  # @private
17
17
  @@log_queue = Queue.new
18
- # @private
19
18
  # We memoize thread on the first log flush
20
19
  # This allows us also to restart logger thread on forks
21
20
  @@log_thread = nil
22
21
  # @private
23
22
  @@log_mutex = Mutex.new
23
+ # @private
24
+ @@oauthbearer_token_refresh_callback = nil
24
25
 
25
26
  # Returns the current logger, by default this is a logger to stdout.
26
27
  #
@@ -104,6 +105,24 @@ module Rdkafka
104
105
  @@error_callback
105
106
  end
106
107
 
108
+ # Sets the SASL/OAUTHBEARER token refresh callback.
109
+ # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
+ #
111
+ # @param callback [Proc, #call] The callback
112
+ #
113
+ # @return [nil]
114
+ def self.oauthbearer_token_refresh_callback=(callback)
115
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
116
+ @@oauthbearer_token_refresh_callback = callback
117
+ end
118
+
119
+ # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
120
+ #
121
+ # @return [Proc, nil]
122
+ def self.oauthbearer_token_refresh_callback
123
+ @@oauthbearer_token_refresh_callback
124
+ end
125
+
107
126
  # @private
108
127
  def self.opaques
109
128
  @@opaques
@@ -300,6 +319,9 @@ module Rdkafka
300
319
 
301
320
  # Set error callback
302
321
  Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
322
+
323
+ # Set oauth callback
324
+ Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
303
325
  end
304
326
  end
305
327
 
@@ -13,6 +13,7 @@ module Rdkafka
13
13
  class Consumer
14
14
  include Enumerable
15
15
  include Helpers::Time
16
+ include Helpers::OAuth
16
17
 
17
18
  # @private
18
19
  def initialize(native_kafka)
@@ -0,0 +1,58 @@
1
+ module Rdkafka
2
+ module Helpers
3
+
4
+ module OAuth
5
+
6
+ # Set the OAuthBearer token
7
+ #
8
+ # @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
9
+ # @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
10
+ # @param principal_name [String] the mandatory Kafka principal name associated with the token.
11
+ # @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
12
+ # @return [Integer] 0 on success
13
+ def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
14
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
15
+
16
+ response = @native_kafka.with_inner do |inner|
17
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
18
+ inner, token, lifetime_ms, principal_name,
19
+ flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
20
+ )
21
+ end
22
+
23
+ return response if response.zero?
24
+
25
+ oauthbearer_set_token_failure("Failed to set token: #{error_buffer.read_string}")
26
+
27
+ response
28
+ end
29
+
30
+ # Marks failed oauth token acquire in librdkafka
31
+ #
32
+ # @param reason [String] human readable error reason for failing to acquire token
33
+ def oauthbearer_set_token_failure(reason)
34
+ @native_kafka.with_inner do |inner|
35
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
36
+ inner,
37
+ reason
38
+ )
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ # Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
45
+ def flatten_extensions(extensions)
46
+ return nil unless extensions
47
+ "\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
48
+ end
49
+
50
+ # extension_size is the number of keys + values which should be a non-negative even number
51
+ # https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
52
+ def extension_size(extensions)
53
+ return 0 unless extensions
54
+ extensions.size * 2
55
+ end
56
+ end
57
+ end
58
+ end
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
5
  class Producer
6
6
  include Helpers::Time
7
+ include Helpers::OAuth
7
8
 
8
9
  # Cache partitions count for 30 seconds
9
10
  PARTITIONS_COUNT_TTL = 30
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.15.0.alpha1"
4
+ VERSION = "0.15.0.beta1"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -7,6 +7,7 @@ require "json"
7
7
 
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
+ require "rdkafka/helpers/oauth"
10
11
  require "rdkafka/abstract_handle"
11
12
  require "rdkafka/admin"
12
13
  require "rdkafka/admin/create_topic_handle"
@@ -676,4 +676,41 @@ describe Rdkafka::Admin do
676
676
  end
677
677
  end
678
678
  end
679
+
680
+ describe '#oauthbearer_set_token' do
681
+ context 'when sasl not configured' do
682
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
683
+ response = admin.oauthbearer_set_token(
684
+ token: "foo",
685
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
686
+ principal_name: "kafka-cluster"
687
+ )
688
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
689
+ end
690
+ end
691
+
692
+ context 'when sasl configured' do
693
+ before do
694
+ config_sasl = rdkafka_config(
695
+ "security.protocol": "sasl_ssl",
696
+ "sasl.mechanisms": 'OAUTHBEARER'
697
+ )
698
+ $admin_sasl = config_sasl.admin
699
+ end
700
+
701
+ after do
702
+ $admin_sasl.close
703
+ end
704
+
705
+ it 'should succeed' do
706
+
707
+ response = $admin_sasl.oauthbearer_set_token(
708
+ token: "foo",
709
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
710
+ principal_name: "kafka-cluster"
711
+ )
712
+ expect(response).to eq(0)
713
+ end
714
+ end
715
+ end
679
716
  end
@@ -132,4 +132,86 @@ describe Rdkafka::Bindings do
132
132
  end
133
133
  end
134
134
  end
135
+
136
+ describe "oauthbearer set token" do
137
+
138
+ context "without args" do
139
+ it "should raise argument error" do
140
+ expect {
141
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
142
+ }.to raise_error(ArgumentError)
143
+ end
144
+ end
145
+
146
+ context "with args" do
147
+ before do
148
+ DEFAULT_TOKEN_EXPIRY_SECONDS = 900
149
+ $token_value = "token"
150
+ $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
151
+ $md_principal_name = "kafka-cluster"
152
+ $extensions = nil
153
+ $extension_size = 0
154
+ $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
155
+ end
156
+
157
+ it "should set token or capture failure" do
158
+ RdKafkaTestConsumer.with do |consumer_ptr|
159
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
160
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
161
+ expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
162
+ end
163
+ end
164
+ end
165
+ end
166
+
167
+ describe "oauthbearer set token failure" do
168
+
169
+ context "without args" do
170
+
171
+ it "should fail" do
172
+ expect {
173
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
174
+ }.to raise_error(ArgumentError)
175
+ end
176
+ end
177
+
178
+ context "with args" do
179
+ it "should succeed" do
180
+ expect {
181
+ errstr = "error"
182
+ RdKafkaTestConsumer.with do |consumer_ptr|
183
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
184
+ end
185
+ }.to_not raise_error
186
+ end
187
+ end
188
+ end
189
+
190
+ describe "oauthbearer callback" do
191
+
192
+ context "without an oauthbearer callback" do
193
+ it "should do nothing" do
194
+ expect {
195
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
196
+ }.not_to raise_error
197
+ end
198
+ end
199
+
200
+ context "with an oauthbearer callback" do
201
+ before do
202
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
203
+ $received_config = config
204
+ $received_client_name = client_name
205
+ end
206
+ end
207
+
208
+ it "should call the oauth bearer callback and receive config and client name" do
209
+ RdKafkaTestConsumer.with do |consumer_ptr|
210
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
211
+ expect($received_config).to eq("{}")
212
+ expect($received_client_name).to match(/consumer/)
213
+ end
214
+ end
215
+ end
216
+ end
135
217
  end
@@ -115,6 +115,39 @@ describe Rdkafka::Config do
115
115
  end
116
116
  end
117
117
 
118
+ context "oauthbearer calllback" do
119
+ context "with a proc/lambda" do
120
+ it "should set the callback" do
121
+ expect {
122
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
123
+ puts config
124
+ puts client_name
125
+ end
126
+ }.not_to raise_error
127
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
128
+ end
129
+ end
130
+
131
+ context "with a callable object" do
132
+ it "should set the callback" do
133
+ callback = Class.new do
134
+ def call(config, client_name); end
135
+ end
136
+
137
+ expect {
138
+ Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
139
+ }.not_to raise_error
140
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
141
+ end
142
+ end
143
+
144
+ it "should not accept a callback that's not callable" do
145
+ expect {
146
+ Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
147
+ }.to raise_error(TypeError)
148
+ end
149
+ end
150
+
118
151
  context "configuration" do
119
152
  it "should store configuration" do
120
153
  config = Rdkafka::Config.new
@@ -1329,4 +1329,40 @@ describe Rdkafka::Consumer do
1329
1329
  ])
1330
1330
  end
1331
1331
  end
1332
+
1333
+ describe '#oauthbearer_set_token' do
1334
+ context 'when sasl not configured' do
1335
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
1336
+ response = consumer.oauthbearer_set_token(
1337
+ token: "foo",
1338
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1339
+ principal_name: "kafka-cluster"
1340
+ )
1341
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
1342
+ end
1343
+ end
1344
+
1345
+ context 'when sasl configured' do
1346
+ before do
1347
+ $consumer_sasl = rdkafka_producer_config(
1348
+ "security.protocol": "sasl_ssl",
1349
+ "sasl.mechanisms": 'OAUTHBEARER'
1350
+ ).consumer
1351
+ end
1352
+
1353
+ after do
1354
+ $consumer_sasl.close
1355
+ end
1356
+
1357
+ it 'should succeed' do
1358
+
1359
+ response = $consumer_sasl.oauthbearer_set_token(
1360
+ token: "foo",
1361
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1362
+ principal_name: "kafka-cluster"
1363
+ )
1364
+ expect(response).to eq(0)
1365
+ end
1366
+ end
1367
+ end
1332
1368
  end
@@ -917,4 +917,34 @@ describe Rdkafka::Producer do
917
917
  end
918
918
  end
919
919
  end
920
+
921
+ describe '#oauthbearer_set_token' do
922
+ context 'when sasl not configured' do
923
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
924
+ response = producer.oauthbearer_set_token(
925
+ token: "foo",
926
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
927
+ principal_name: "kafka-cluster"
928
+ )
929
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
930
+ end
931
+ end
932
+
933
+ context 'when sasl configured' do
934
+ it 'should succeed' do
935
+ producer_sasl = rdkafka_producer_config(
936
+ {
937
+ "security.protocol": "sasl_ssl",
938
+ "sasl.mechanisms": 'OAUTHBEARER'
939
+ }
940
+ ).producer
941
+ response = producer_sasl.oauthbearer_set_token(
942
+ token: "foo",
943
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
944
+ principal_name: "kafka-cluster"
945
+ )
946
+ expect(response).to eq(0)
947
+ end
948
+ end
949
+ end
920
950
  end
data/spec/spec_helper.rb CHANGED
@@ -155,3 +155,18 @@ RSpec.configure do |config|
155
155
  end
156
156
  end
157
157
  end
158
+
159
+ class RdKafkaTestConsumer
160
+ def self.with
161
+ consumer = Rdkafka::Bindings.rd_kafka_new(
162
+ :rd_kafka_consumer,
163
+ nil,
164
+ nil,
165
+ 0
166
+ )
167
+ yield consumer
168
+ ensure
169
+ Rdkafka::Bindings.rd_kafka_consumer_close(consumer)
170
+ Rdkafka::Bindings.rd_kafka_destroy(consumer)
171
+ end
172
+ end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.15.0.alpha1
4
+ version: 0.15.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -36,7 +36,7 @@ cert_chain:
36
36
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
37
37
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
38
38
  -----END CERTIFICATE-----
39
- date: 2024-03-17 00:00:00.000000000 Z
39
+ date: 2024-03-22 00:00:00.000000000 Z
40
40
  dependencies:
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: ffi
@@ -223,6 +223,7 @@ files:
223
223
  - lib/rdkafka/consumer/partition.rb
224
224
  - lib/rdkafka/consumer/topic_partition_list.rb
225
225
  - lib/rdkafka/error.rb
226
+ - lib/rdkafka/helpers/oauth.rb
226
227
  - lib/rdkafka/helpers/time.rb
227
228
  - lib/rdkafka/metadata.rb
228
229
  - lib/rdkafka/native_kafka.rb
metadata.gz.sig CHANGED
Binary file