rdkafka 0.15.0 → 0.16.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +4 -7
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +25 -1
  7. data/README.md +31 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +51 -26
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/acl_binding_result.rb +38 -24
  12. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  13. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  15. data/lib/rdkafka/admin.rb +15 -0
  16. data/lib/rdkafka/bindings.rb +44 -8
  17. data/lib/rdkafka/callbacks.rb +28 -12
  18. data/lib/rdkafka/config.rb +69 -15
  19. data/lib/rdkafka/consumer.rb +39 -17
  20. data/lib/rdkafka/helpers/oauth.rb +58 -0
  21. data/lib/rdkafka/native_kafka.rb +32 -19
  22. data/lib/rdkafka/producer/delivery_handle.rb +12 -1
  23. data/lib/rdkafka/producer/delivery_report.rb +16 -3
  24. data/lib/rdkafka/producer.rb +47 -10
  25. data/lib/rdkafka/version.rb +1 -1
  26. data/lib/rdkafka.rb +1 -0
  27. data/rdkafka.gemspec +2 -2
  28. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  29. data/spec/rdkafka/admin/delete_acl_report_spec.rb +1 -0
  30. data/spec/rdkafka/admin/describe_acl_report_spec.rb +1 -0
  31. data/spec/rdkafka/admin_spec.rb +53 -0
  32. data/spec/rdkafka/bindings_spec.rb +97 -0
  33. data/spec/rdkafka/config_spec.rb +53 -0
  34. data/spec/rdkafka/consumer_spec.rb +74 -0
  35. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  36. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -0
  37. data/spec/rdkafka/producer_spec.rb +69 -2
  38. data/spec/spec_helper.rb +16 -1
  39. data.tar.gz.sig +0 -0
  40. metadata +6 -4
  41. metadata.gz.sig +0 -0
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
5
  class Producer
6
6
  include Helpers::Time
7
+ include Helpers::OAuth
7
8
 
8
9
  # Cache partitions count for 30 seconds
9
10
  PARTITIONS_COUNT_TTL = 30
@@ -23,6 +24,9 @@ module Rdkafka
23
24
  attr_reader :delivery_callback_arity
24
25
 
25
26
  # @private
27
+ # @param native_kafka [NativeKafka]
28
+ # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
29
+ # the "consistent_random" default
26
30
  def initialize(native_kafka, partitioner_name)
27
31
  @native_kafka = native_kafka
28
32
  @partitioner_name = partitioner_name || "consistent_random"
@@ -37,13 +41,25 @@ module Rdkafka
37
41
  topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
38
42
  end
39
43
 
40
- cache[topic] = [
41
- monotonic_now,
42
- topic_metadata ? topic_metadata[:partition_count] : nil
43
- ]
44
+ partition_count = topic_metadata ? topic_metadata[:partition_count] : -1
45
+
46
+ # This approach caches the failure to fetch only for 1 second. This will make sure, that
47
+ # we do not cache the failure for too long but also "buys" us a bit of time in case there
48
+ # would be issues in the cluster so we won't overaload it with consecutive requests
49
+ cache[topic] = if partition_count.positive?
50
+ [monotonic_now, partition_count]
51
+ else
52
+ [monotonic_now - PARTITIONS_COUNT_TTL + 5, partition_count]
53
+ end
44
54
  end
45
55
  end
46
56
 
57
+ # Starts the native Kafka polling thread and kicks off the init polling
58
+ # @note Not needed to run unless explicit start was disabled
59
+ def start
60
+ @native_kafka.start
61
+ end
62
+
47
63
  # @return [String] producer name
48
64
  def name
49
65
  @name ||= @native_kafka.with_inner do |inner|
@@ -134,14 +150,15 @@ module Rdkafka
134
150
  # Partition count for a given topic.
135
151
  #
136
152
  # @param topic [String] The topic name.
137
- # @return [Integer] partition count for a given topic
153
+ # @return [Integer] partition count for a given topic or `-1` if it could not be obtained.
138
154
  #
139
155
  # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
140
156
  # auto-created after returning nil.
141
157
  #
142
158
  # @note We cache the partition count for a given topic for given time.
143
159
  # This prevents us in case someone uses `partition_key` from querying for the count with
144
- # each message. Instead we query once every 30 seconds at most
160
+ # each message. Instead we query once every 30 seconds at most if we have a valid partition
161
+ # count or every 5 seconds in case we were not able to obtain number of partitions
145
162
  def partition_count(topic)
146
163
  closed_producer_check(__method__)
147
164
 
@@ -164,11 +181,12 @@ module Rdkafka
164
181
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
165
182
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
166
183
  # @param headers [Hash<String,String>] Optional message headers
184
+ # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
167
185
  #
168
186
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
169
187
  #
170
188
  # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
171
- def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
189
+ def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
172
190
  closed_producer_check(__method__)
173
191
 
174
192
  # Start by checking and converting the input
@@ -190,7 +208,7 @@ module Rdkafka
190
208
  if partition_key
191
209
  partition_count = partition_count(topic)
192
210
  # If the topic is not present, set to -1
193
- partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
211
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
194
212
  end
195
213
 
196
214
  # If partition is nil, use -1 to let librdafka set the partition randomly or
@@ -210,6 +228,7 @@ module Rdkafka
210
228
  end
211
229
 
212
230
  delivery_handle = DeliveryHandle.new
231
+ delivery_handle.label = label
213
232
  delivery_handle[:pending] = true
214
233
  delivery_handle[:response] = -1
215
234
  delivery_handle[:partition] = -1
@@ -256,13 +275,27 @@ module Rdkafka
256
275
  delivery_handle
257
276
  end
258
277
 
278
+ # Calls (if registered) the delivery callback
279
+ #
280
+ # @param delivery_report [Producer::DeliveryReport]
281
+ # @param delivery_handle [Producer::DeliveryHandle]
259
282
  def call_delivery_callback(delivery_report, delivery_handle)
260
283
  return unless @delivery_callback
261
284
 
262
- args = [delivery_report, delivery_handle].take(@delivery_callback_arity)
263
- @delivery_callback.call(*args)
285
+ case @delivery_callback_arity
286
+ when 0
287
+ @delivery_callback.call
288
+ when 1
289
+ @delivery_callback.call(delivery_report)
290
+ else
291
+ @delivery_callback.call(delivery_report, delivery_handle)
292
+ end
264
293
  end
265
294
 
295
+ # Figures out the arity of a given block/method
296
+ #
297
+ # @param callback [#call, Proc]
298
+ # @return [Integer] arity of the provided block/method
266
299
  def arity(callback)
267
300
  return callback.arity if callback.respond_to?(:arity)
268
301
 
@@ -271,6 +304,10 @@ module Rdkafka
271
304
 
272
305
  private
273
306
 
307
+ # Ensures, no operations can happen on a closed producer
308
+ #
309
+ # @param method [Symbol] name of the method that invoked producer
310
+ # @raise [Rdkafka::ClosedProducerError]
274
311
  def closed_producer_check(method)
275
312
  raise Rdkafka::ClosedProducerError.new(method) if closed?
276
313
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.15.0"
4
+ VERSION = "0.16.0.beta1"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -7,6 +7,7 @@ require "json"
7
7
 
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
+ require "rdkafka/helpers/oauth"
10
11
  require "rdkafka/abstract_handle"
11
12
  require "rdkafka/admin"
12
13
  require "rdkafka/admin/create_topic_handle"
data/rdkafka.gemspec CHANGED
@@ -3,10 +3,10 @@
3
3
  require File.expand_path('lib/rdkafka/version', __dir__)
4
4
 
5
5
  Gem::Specification.new do |gem|
6
- gem.authors = ['Thijs Cadier']
6
+ gem.authors = ['Thijs Cadier', 'Maciej Mensfeld']
7
7
  gem.email = ["contact@karafka.io"]
8
8
  gem.description = "Modern Kafka client library for Ruby based on librdkafka"
9
- gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.4+."
9
+ gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.7+."
10
10
  gem.license = 'MIT'
11
11
 
12
12
  gem.files = `git ls-files`.split($\)
@@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do
76
76
  end
77
77
 
78
78
  describe "#wait" do
79
- let(:pending_handle) { true }
79
+ context 'when pending_handle true' do
80
+ let(:pending_handle) { true }
80
81
 
81
- it "should wait until the timeout and then raise an error" do
82
- expect {
83
- subject.wait(max_wait_timeout: 0.1)
84
- }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
82
+ it "should wait until the timeout and then raise an error" do
83
+ expect(Kernel).not_to receive(:warn)
84
+ expect {
85
+ subject.wait(max_wait_timeout: 0.1)
86
+ }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
87
+ end
85
88
  end
86
89
 
87
- context "when not pending anymore and no error" do
90
+ context 'when pending_handle false' do
88
91
  let(:pending_handle) { false }
89
- let(:result) { 1 }
90
92
 
91
- it "should return a result" do
92
- wait_result = subject.wait
93
- expect(wait_result).to eq(result)
93
+ it 'should show a deprecation warning when wait_timeout is set' do
94
+ expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
95
+ subject.wait(wait_timeout: 0.1)
94
96
  end
95
97
 
96
- it "should wait without a timeout" do
97
- wait_result = subject.wait(max_wait_timeout: nil)
98
- expect(wait_result).to eq(result)
98
+ context "without error" do
99
+ let(:result) { 1 }
100
+
101
+ it "should return a result" do
102
+ expect(Kernel).not_to receive(:warn)
103
+ wait_result = subject.wait
104
+ expect(wait_result).to eq(result)
105
+ end
106
+
107
+ it "should wait without a timeout" do
108
+ expect(Kernel).not_to receive(:warn)
109
+ wait_result = subject.wait(max_wait_timeout: nil)
110
+ expect(wait_result).to eq(result)
111
+ end
99
112
  end
100
- end
101
113
 
102
- context "when not pending anymore and there was an error" do
103
- let(:pending_handle) { false }
104
- let(:response) { 20 }
114
+ context "with error" do
115
+ let(:response) { 20 }
105
116
 
106
- it "should raise an rdkafka error" do
107
- expect {
108
- subject.wait
109
- }.to raise_error Rdkafka::RdkafkaError
117
+ it "should raise an rdkafka error" do
118
+ expect(Kernel).not_to receive(:warn)
119
+ expect {
120
+ subject.wait
121
+ }.to raise_error Rdkafka::RdkafkaError
122
+ end
110
123
  end
111
124
  end
112
125
  end
@@ -50,6 +50,7 @@ describe Rdkafka::Admin::DeleteAclReport do
50
50
  end
51
51
 
52
52
  it "should get deleted acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
53
+ expect(subject.deleted_acls[0].matching_acl_resource_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
53
54
  expect(subject.deleted_acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
54
55
  end
55
56
 
@@ -51,6 +51,7 @@ describe Rdkafka::Admin::DescribeAclReport do
51
51
  end
52
52
 
53
53
  it "should get matching acl resource pattern type as Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL" do
54
+ expect(subject.acls[0].matching_acl_resource_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
54
55
  expect(subject.acls[0].matching_acl_pattern_type).to eq(Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL)
55
56
  end
56
57
 
@@ -31,6 +31,19 @@ describe Rdkafka::Admin do
31
31
  let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
32
32
  let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
33
33
 
34
+ describe 'admin without auto-start' do
35
+ let(:admin) { config.admin(native_kafka_auto_start: false) }
36
+
37
+ it 'expect to be able to start it later and close' do
38
+ admin.start
39
+ admin.close
40
+ end
41
+
42
+ it 'expect to be able to close it without starting' do
43
+ admin.close
44
+ end
45
+ end
46
+
34
47
  describe "#create_topic" do
35
48
  describe "called with invalid input" do
36
49
  describe "with an invalid topic name" do
@@ -275,6 +288,9 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
275
288
  expect(create_acl_report.rdkafka_response).to eq(0)
276
289
  expect(create_acl_report.rdkafka_response_string).to eq("")
277
290
 
291
+ # Since we create and immediately check, this is slow on loaded CIs, hence we wait
292
+ sleep(2)
293
+
278
294
  #describe_acl
279
295
  describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
280
296
  describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
@@ -404,4 +420,41 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
404
420
  end
405
421
  end
406
422
  end
423
+
424
+ describe '#oauthbearer_set_token' do
425
+ context 'when sasl not configured' do
426
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
427
+ response = admin.oauthbearer_set_token(
428
+ token: "foo",
429
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
430
+ principal_name: "kafka-cluster"
431
+ )
432
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
433
+ end
434
+ end
435
+
436
+ context 'when sasl configured' do
437
+ before do
438
+ config_sasl = rdkafka_config(
439
+ "security.protocol": "sasl_ssl",
440
+ "sasl.mechanisms": 'OAUTHBEARER'
441
+ )
442
+ $admin_sasl = config_sasl.admin
443
+ end
444
+
445
+ after do
446
+ $admin_sasl.close
447
+ end
448
+
449
+ it 'should succeed' do
450
+
451
+ response = $admin_sasl.oauthbearer_set_token(
452
+ token: "foo",
453
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
454
+ principal_name: "kafka-cluster"
455
+ )
456
+ expect(response).to eq(0)
457
+ end
458
+ end
459
+ end
407
460
  end
@@ -36,6 +36,16 @@ describe Rdkafka::Bindings do
36
36
  expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
37
37
  end
38
38
 
39
+ it "should log fatal messages" do
40
+ Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
41
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
42
+ end
43
+
44
+ it "should log fatal messages" do
45
+ Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
46
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
47
+ end
48
+
39
49
  it "should log error messages" do
40
50
  Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
41
51
  expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
@@ -51,6 +61,11 @@ describe Rdkafka::Bindings do
51
61
  expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
52
62
  end
53
63
 
64
+ it "should log info messages" do
65
+ Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
66
+ expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
67
+ end
68
+
54
69
  it "should log debug messages" do
55
70
  Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
56
71
  expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
@@ -132,4 +147,86 @@ describe Rdkafka::Bindings do
132
147
  end
133
148
  end
134
149
  end
150
+
151
+ describe "oauthbearer set token" do
152
+
153
+ context "without args" do
154
+ it "should raise argument error" do
155
+ expect {
156
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
157
+ }.to raise_error(ArgumentError)
158
+ end
159
+ end
160
+
161
+ context "with args" do
162
+ before do
163
+ DEFAULT_TOKEN_EXPIRY_SECONDS = 900
164
+ $token_value = "token"
165
+ $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
166
+ $md_principal_name = "kafka-cluster"
167
+ $extensions = nil
168
+ $extension_size = 0
169
+ $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
170
+ end
171
+
172
+ it "should set token or capture failure" do
173
+ RdKafkaTestConsumer.with do |consumer_ptr|
174
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
175
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
176
+ expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
177
+ end
178
+ end
179
+ end
180
+ end
181
+
182
+ describe "oauthbearer set token failure" do
183
+
184
+ context "without args" do
185
+
186
+ it "should fail" do
187
+ expect {
188
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
189
+ }.to raise_error(ArgumentError)
190
+ end
191
+ end
192
+
193
+ context "with args" do
194
+ it "should succeed" do
195
+ expect {
196
+ errstr = "error"
197
+ RdKafkaTestConsumer.with do |consumer_ptr|
198
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
199
+ end
200
+ }.to_not raise_error
201
+ end
202
+ end
203
+ end
204
+
205
+ describe "oauthbearer callback" do
206
+
207
+ context "without an oauthbearer callback" do
208
+ it "should do nothing" do
209
+ expect {
210
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
211
+ }.not_to raise_error
212
+ end
213
+ end
214
+
215
+ context "with an oauthbearer callback" do
216
+ before do
217
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
218
+ $received_config = config
219
+ $received_client_name = client_name
220
+ end
221
+ end
222
+
223
+ it "should call the oauth bearer callback and receive config and client name" do
224
+ RdKafkaTestConsumer.with do |consumer_ptr|
225
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
226
+ expect($received_config).to eq("{}")
227
+ expect($received_client_name).to match(/consumer/)
228
+ end
229
+ end
230
+ end
231
+ end
135
232
  end
@@ -22,6 +22,7 @@ describe Rdkafka::Config do
22
22
  it "supports logging queue" do
23
23
  log = StringIO.new
24
24
  Rdkafka::Config.logger = Logger.new(log)
25
+ Rdkafka::Config.ensure_log_thread
25
26
 
26
27
  Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
27
28
  20.times do
@@ -31,6 +32,25 @@ describe Rdkafka::Config do
31
32
 
32
33
  expect(log.string).to include "FATAL -- : I love testing"
33
34
  end
35
+
36
+ it "expect to start new logger thread after fork and work" do
37
+ reader, writer = IO.pipe
38
+
39
+ pid = fork do
40
+ $stdout.reopen(writer)
41
+ Rdkafka::Config.logger = Logger.new($stdout)
42
+ reader.close
43
+ producer = rdkafka_producer_config(debug: 'all').producer
44
+ producer.close
45
+ writer.close
46
+ sleep(1)
47
+ end
48
+
49
+ writer.close
50
+ Process.wait(pid)
51
+ output = reader.read
52
+ expect(output.split("\n").size).to be >= 20
53
+ end
34
54
  end
35
55
 
36
56
  context "statistics callback" do
@@ -95,6 +115,39 @@ describe Rdkafka::Config do
95
115
  end
96
116
  end
97
117
 
118
+ context "oauthbearer calllback" do
119
+ context "with a proc/lambda" do
120
+ it "should set the callback" do
121
+ expect {
122
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
123
+ puts config
124
+ puts client_name
125
+ end
126
+ }.not_to raise_error
127
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
128
+ end
129
+ end
130
+
131
+ context "with a callable object" do
132
+ it "should set the callback" do
133
+ callback = Class.new do
134
+ def call(config, client_name); end
135
+ end
136
+
137
+ expect {
138
+ Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
139
+ }.not_to raise_error
140
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
141
+ end
142
+ end
143
+
144
+ it "should not accept a callback that's not callable" do
145
+ expect {
146
+ Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
147
+ }.to raise_error(TypeError)
148
+ end
149
+ end
150
+
98
151
  context "configuration" do
99
152
  it "should store configuration" do
100
153
  config = Rdkafka::Config.new
@@ -14,6 +14,19 @@ describe Rdkafka::Consumer do
14
14
  it { expect(consumer.name).to include('rdkafka#consumer-') }
15
15
  end
16
16
 
17
+ describe 'consumer without auto-start' do
18
+ let(:consumer) { rdkafka_consumer_config.consumer(native_kafka_auto_start: false) }
19
+
20
+ it 'expect to be able to start it later and close' do
21
+ consumer.start
22
+ consumer.close
23
+ end
24
+
25
+ it 'expect to be able to close it without starting' do
26
+ consumer.close
27
+ end
28
+ end
29
+
17
30
  describe "#subscribe, #unsubscribe and #subscription" do
18
31
  it "should subscribe, unsubscribe and return the subscription" do
19
32
  expect(consumer.subscription).to be_empty
@@ -211,6 +224,11 @@ describe Rdkafka::Consumer do
211
224
 
212
225
  # 7. ensure same message is read again
213
226
  message2 = consumer.poll(timeout)
227
+
228
+ # This is needed because `enable.auto.offset.store` is true but when running in CI that
229
+ # is overloaded, offset store lags
230
+ sleep(2)
231
+
214
232
  consumer.commit
215
233
  expect(message1.offset).to eq message2.offset
216
234
  expect(message1.payload).to eq message2.payload
@@ -480,6 +498,8 @@ describe Rdkafka::Consumer do
480
498
  end
481
499
 
482
500
  describe "#store_offset" do
501
+ let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': false).consumer }
502
+
483
503
  before do
484
504
  config = {}
485
505
  config[:'enable.auto.offset.store'] = false
@@ -542,6 +562,14 @@ describe Rdkafka::Consumer do
542
562
  }.to raise_error(Rdkafka::RdkafkaError)
543
563
  end
544
564
  end
565
+
566
+ context "when trying to use with enable.auto.offset.store set to true" do
567
+ let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': true).consumer }
568
+
569
+ it "expect to raise invalid configuration error" do
570
+ expect { consumer.store_offset(message) }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
571
+ end
572
+ end
545
573
  end
546
574
  end
547
575
  end
@@ -1123,6 +1151,16 @@ describe Rdkafka::Consumer do
1123
1151
  end
1124
1152
  end
1125
1153
 
1154
+ describe '#consumer_group_metadata_pointer' do
1155
+ let(:pointer) { consumer.consumer_group_metadata_pointer }
1156
+
1157
+ after { Rdkafka::Bindings.rd_kafka_consumer_group_metadata_destroy(pointer) }
1158
+
1159
+ it 'expect to return a pointer' do
1160
+ expect(pointer).to be_a(FFI::Pointer)
1161
+ end
1162
+ end
1163
+
1126
1164
  describe "a rebalance listener" do
1127
1165
  let(:consumer) do
1128
1166
  config = rdkafka_consumer_config
@@ -1276,4 +1314,40 @@ describe Rdkafka::Consumer do
1276
1314
  ])
1277
1315
  end
1278
1316
  end
1317
+
1318
+ describe '#oauthbearer_set_token' do
1319
+ context 'when sasl not configured' do
1320
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
1321
+ response = consumer.oauthbearer_set_token(
1322
+ token: "foo",
1323
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1324
+ principal_name: "kafka-cluster"
1325
+ )
1326
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
1327
+ end
1328
+ end
1329
+
1330
+ context 'when sasl configured' do
1331
+ before do
1332
+ $consumer_sasl = rdkafka_producer_config(
1333
+ "security.protocol": "sasl_ssl",
1334
+ "sasl.mechanisms": 'OAUTHBEARER'
1335
+ ).consumer
1336
+ end
1337
+
1338
+ after do
1339
+ $consumer_sasl.close
1340
+ end
1341
+
1342
+ it 'should succeed' do
1343
+
1344
+ response = $consumer_sasl.oauthbearer_set_token(
1345
+ token: "foo",
1346
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1347
+ principal_name: "kafka-cluster"
1348
+ )
1349
+ expect(response).to eq(0)
1350
+ end
1351
+ end
1352
+ end
1279
1353
  end
@@ -10,8 +10,9 @@ describe Rdkafka::NativeKafka do
10
10
  subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
11
11
 
12
12
  before do
13
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_name).and_return('producer-1')
13
14
  allow(Thread).to receive(:new).and_return(thread)
14
-
15
+ allow(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
15
16
  allow(thread).to receive(:[]=).with(:closing, anything)
16
17
  allow(thread).to receive(:join)
17
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
@@ -20,6 +21,12 @@ describe Rdkafka::NativeKafka do
20
21
  after { client.close }
21
22
 
22
23
  context "defaults" do
24
+ it "sets the thread name" do
25
+ expect(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
26
+
27
+ client
28
+ end
29
+
23
30
  it "sets the thread to abort on exception" do
24
31
  expect(thread).to receive(:abort_on_exception=).with(true)
25
32
 
@@ -15,6 +15,10 @@ describe Rdkafka::Producer::DeliveryReport do
15
15
  expect(subject.topic_name).to eq "topic"
16
16
  end
17
17
 
18
+ it "should get the same topic name under topic alias" do
19
+ expect(subject.topic).to eq "topic"
20
+ end
21
+
18
22
  it "should get the error" do
19
23
  expect(subject.error).to eq -1
20
24
  end