rdkafka 0.15.2 → 0.16.0.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +2 -4
- data/.gitignore +2 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +10 -3
- data/README.md +19 -9
- data/docker-compose.yml +1 -1
- data/ext/Rakefile +1 -3
- data/lib/rdkafka/abstract_handle.rb +44 -20
- data/lib/rdkafka/admin/create_topic_report.rb +1 -1
- data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
- data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
- data/lib/rdkafka/admin.rb +15 -0
- data/lib/rdkafka/bindings.rb +35 -3
- data/lib/rdkafka/callbacks.rb +18 -10
- data/lib/rdkafka/config.rb +69 -15
- data/lib/rdkafka/consumer.rb +7 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/native_kafka.rb +32 -19
- data/lib/rdkafka/producer.rb +7 -0
- data/lib/rdkafka/version.rb +1 -1
- data/lib/rdkafka.rb +1 -0
- data/spec/rdkafka/abstract_handle_spec.rb +34 -21
- data/spec/rdkafka/admin_spec.rb +53 -0
- data/spec/rdkafka/bindings_spec.rb +97 -0
- data/spec/rdkafka/config_spec.rb +53 -0
- data/spec/rdkafka/consumer_spec.rb +54 -0
- data/spec/rdkafka/native_kafka_spec.rb +8 -1
- data/spec/rdkafka/producer_spec.rb +43 -0
- data/spec/spec_helper.rb +16 -1
- data.tar.gz.sig +0 -0
- metadata +4 -4
- metadata.gz.sig +0 -0
- data/dist/librdkafka_2.3.0.tar.gz +0 -0
data/lib/rdkafka/consumer.rb
CHANGED
@@ -13,12 +13,19 @@ module Rdkafka
|
|
13
13
|
class Consumer
|
14
14
|
include Enumerable
|
15
15
|
include Helpers::Time
|
16
|
+
include Helpers::OAuth
|
16
17
|
|
17
18
|
# @private
|
18
19
|
def initialize(native_kafka)
|
19
20
|
@native_kafka = native_kafka
|
20
21
|
end
|
21
22
|
|
23
|
+
# Starts the native Kafka polling thread and kicks off the init polling
|
24
|
+
# @note Not needed to run unless explicit start was disabled
|
25
|
+
def start
|
26
|
+
@native_kafka.start
|
27
|
+
end
|
28
|
+
|
22
29
|
# @return [String] consumer name
|
23
30
|
def name
|
24
31
|
@name ||= @native_kafka.with_inner do |inner|
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
module Helpers
|
3
|
+
|
4
|
+
module OAuth
|
5
|
+
|
6
|
+
# Set the OAuthBearer token
|
7
|
+
#
|
8
|
+
# @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
|
9
|
+
# @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
|
10
|
+
# @param principal_name [String] the mandatory Kafka principal name associated with the token.
|
11
|
+
# @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
|
12
|
+
# @return [Integer] 0 on success
|
13
|
+
def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
|
14
|
+
error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
15
|
+
|
16
|
+
response = @native_kafka.with_inner do |inner|
|
17
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
|
18
|
+
inner, token, lifetime_ms, principal_name,
|
19
|
+
flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
|
20
|
+
)
|
21
|
+
end
|
22
|
+
|
23
|
+
return response if response.zero?
|
24
|
+
|
25
|
+
oauthbearer_set_token_failure("Failed to set token: #{error_buffer.read_string}")
|
26
|
+
|
27
|
+
response
|
28
|
+
end
|
29
|
+
|
30
|
+
# Marks failed oauth token acquire in librdkafka
|
31
|
+
#
|
32
|
+
# @param reason [String] human readable error reason for failing to acquire token
|
33
|
+
def oauthbearer_set_token_failure(reason)
|
34
|
+
@native_kafka.with_inner do |inner|
|
35
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
|
36
|
+
inner,
|
37
|
+
reason
|
38
|
+
)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
# Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
|
45
|
+
def flatten_extensions(extensions)
|
46
|
+
return nil unless extensions
|
47
|
+
"\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
|
48
|
+
end
|
49
|
+
|
50
|
+
# extension_size is the number of keys + values which should be a non-negative even number
|
51
|
+
# https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
|
52
|
+
def extension_size(extensions)
|
53
|
+
return 0 unless extensions
|
54
|
+
extensions.size * 2
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
data/lib/rdkafka/native_kafka.rb
CHANGED
@@ -4,7 +4,7 @@ module Rdkafka
|
|
4
4
|
# @private
|
5
5
|
# A wrapper around a native kafka that polls and cleanly exits
|
6
6
|
class NativeKafka
|
7
|
-
def initialize(inner, run_polling_thread:, opaque:)
|
7
|
+
def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
|
8
8
|
@inner = inner
|
9
9
|
@opaque = opaque
|
10
10
|
# Lock around external access
|
@@ -28,30 +28,43 @@ module Rdkafka
|
|
28
28
|
# counter for operations in progress using inner
|
29
29
|
@operations_in_progress = 0
|
30
30
|
|
31
|
-
|
32
|
-
Rdkafka::Bindings.rd_kafka_poll(inner, 0)
|
31
|
+
@run_polling_thread = run_polling_thread
|
33
32
|
|
34
|
-
if
|
35
|
-
# Start thread to poll client for delivery callbacks,
|
36
|
-
# not used in consumer.
|
37
|
-
@polling_thread = Thread.new do
|
38
|
-
loop do
|
39
|
-
@poll_mutex.synchronize do
|
40
|
-
Rdkafka::Bindings.rd_kafka_poll(inner, 100)
|
41
|
-
end
|
33
|
+
start if auto_start
|
42
34
|
|
43
|
-
|
44
|
-
|
45
|
-
|
35
|
+
@closing = false
|
36
|
+
end
|
37
|
+
|
38
|
+
def start
|
39
|
+
synchronize do
|
40
|
+
return if @started
|
41
|
+
|
42
|
+
@started = true
|
43
|
+
|
44
|
+
# Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
|
45
|
+
Rdkafka::Bindings.rd_kafka_poll(@inner, 0)
|
46
|
+
|
47
|
+
if @run_polling_thread
|
48
|
+
# Start thread to poll client for delivery callbacks,
|
49
|
+
# not used in consumer.
|
50
|
+
@polling_thread = Thread.new do
|
51
|
+
loop do
|
52
|
+
@poll_mutex.synchronize do
|
53
|
+
Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
|
54
|
+
end
|
55
|
+
|
56
|
+
# Exit thread if closing and the poll queue is empty
|
57
|
+
if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(@inner) == 0
|
58
|
+
break
|
59
|
+
end
|
46
60
|
end
|
47
61
|
end
|
48
|
-
end
|
49
62
|
|
50
|
-
|
51
|
-
|
63
|
+
@polling_thread.name = "rdkafka.native_kafka##{Rdkafka::Bindings.rd_kafka_name(@inner).gsub('rdkafka', '')}"
|
64
|
+
@polling_thread.abort_on_exception = true
|
65
|
+
@polling_thread[:closing] = false
|
66
|
+
end
|
52
67
|
end
|
53
|
-
|
54
|
-
@closing = false
|
55
68
|
end
|
56
69
|
|
57
70
|
def with_inner
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -4,6 +4,7 @@ module Rdkafka
|
|
4
4
|
# A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
|
5
5
|
class Producer
|
6
6
|
include Helpers::Time
|
7
|
+
include Helpers::OAuth
|
7
8
|
|
8
9
|
# Cache partitions count for 30 seconds
|
9
10
|
PARTITIONS_COUNT_TTL = 30
|
@@ -53,6 +54,12 @@ module Rdkafka
|
|
53
54
|
end
|
54
55
|
end
|
55
56
|
|
57
|
+
# Starts the native Kafka polling thread and kicks off the init polling
|
58
|
+
# @note Not needed to run unless explicit start was disabled
|
59
|
+
def start
|
60
|
+
@native_kafka.start
|
61
|
+
end
|
62
|
+
|
56
63
|
# @return [String] producer name
|
57
64
|
def name
|
58
65
|
@name ||= @native_kafka.with_inner do |inner|
|
data/lib/rdkafka/version.rb
CHANGED
data/lib/rdkafka.rb
CHANGED
@@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do
|
|
76
76
|
end
|
77
77
|
|
78
78
|
describe "#wait" do
|
79
|
-
|
79
|
+
context 'when pending_handle true' do
|
80
|
+
let(:pending_handle) { true }
|
80
81
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
82
|
+
it "should wait until the timeout and then raise an error" do
|
83
|
+
expect(Kernel).not_to receive(:warn)
|
84
|
+
expect {
|
85
|
+
subject.wait(max_wait_timeout: 0.1)
|
86
|
+
}.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
|
87
|
+
end
|
85
88
|
end
|
86
89
|
|
87
|
-
context
|
90
|
+
context 'when pending_handle false' do
|
88
91
|
let(:pending_handle) { false }
|
89
|
-
let(:result) { 1 }
|
90
92
|
|
91
|
-
it
|
92
|
-
|
93
|
-
|
93
|
+
it 'should show a deprecation warning when wait_timeout is set' do
|
94
|
+
expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
|
95
|
+
subject.wait(wait_timeout: 0.1)
|
94
96
|
end
|
95
97
|
|
96
|
-
|
97
|
-
|
98
|
-
|
98
|
+
context "without error" do
|
99
|
+
let(:result) { 1 }
|
100
|
+
|
101
|
+
it "should return a result" do
|
102
|
+
expect(Kernel).not_to receive(:warn)
|
103
|
+
wait_result = subject.wait
|
104
|
+
expect(wait_result).to eq(result)
|
105
|
+
end
|
106
|
+
|
107
|
+
it "should wait without a timeout" do
|
108
|
+
expect(Kernel).not_to receive(:warn)
|
109
|
+
wait_result = subject.wait(max_wait_timeout: nil)
|
110
|
+
expect(wait_result).to eq(result)
|
111
|
+
end
|
99
112
|
end
|
100
|
-
end
|
101
113
|
|
102
|
-
|
103
|
-
|
104
|
-
let(:response) { 20 }
|
114
|
+
context "with error" do
|
115
|
+
let(:response) { 20 }
|
105
116
|
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
117
|
+
it "should raise an rdkafka error" do
|
118
|
+
expect(Kernel).not_to receive(:warn)
|
119
|
+
expect {
|
120
|
+
subject.wait
|
121
|
+
}.to raise_error Rdkafka::RdkafkaError
|
122
|
+
end
|
110
123
|
end
|
111
124
|
end
|
112
125
|
end
|
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -31,6 +31,19 @@ describe Rdkafka::Admin do
|
|
31
31
|
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
32
32
|
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
33
33
|
|
34
|
+
describe 'admin without auto-start' do
|
35
|
+
let(:admin) { config.admin(native_kafka_auto_start: false) }
|
36
|
+
|
37
|
+
it 'expect to be able to start it later and close' do
|
38
|
+
admin.start
|
39
|
+
admin.close
|
40
|
+
end
|
41
|
+
|
42
|
+
it 'expect to be able to close it without starting' do
|
43
|
+
admin.close
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
34
47
|
describe "#create_topic" do
|
35
48
|
describe "called with invalid input" do
|
36
49
|
describe "with an invalid topic name" do
|
@@ -275,6 +288,9 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
|
|
275
288
|
expect(create_acl_report.rdkafka_response).to eq(0)
|
276
289
|
expect(create_acl_report.rdkafka_response_string).to eq("")
|
277
290
|
|
291
|
+
# Since we create and immediately check, this is slow on loaded CIs, hence we wait
|
292
|
+
sleep(2)
|
293
|
+
|
278
294
|
#describe_acl
|
279
295
|
describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
|
280
296
|
describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
|
@@ -404,4 +420,41 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
|
|
404
420
|
end
|
405
421
|
end
|
406
422
|
end
|
423
|
+
|
424
|
+
describe '#oauthbearer_set_token' do
|
425
|
+
context 'when sasl not configured' do
|
426
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
427
|
+
response = admin.oauthbearer_set_token(
|
428
|
+
token: "foo",
|
429
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
430
|
+
principal_name: "kafka-cluster"
|
431
|
+
)
|
432
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
433
|
+
end
|
434
|
+
end
|
435
|
+
|
436
|
+
context 'when sasl configured' do
|
437
|
+
before do
|
438
|
+
config_sasl = rdkafka_config(
|
439
|
+
"security.protocol": "sasl_ssl",
|
440
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
441
|
+
)
|
442
|
+
$admin_sasl = config_sasl.admin
|
443
|
+
end
|
444
|
+
|
445
|
+
after do
|
446
|
+
$admin_sasl.close
|
447
|
+
end
|
448
|
+
|
449
|
+
it 'should succeed' do
|
450
|
+
|
451
|
+
response = $admin_sasl.oauthbearer_set_token(
|
452
|
+
token: "foo",
|
453
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
454
|
+
principal_name: "kafka-cluster"
|
455
|
+
)
|
456
|
+
expect(response).to eq(0)
|
457
|
+
end
|
458
|
+
end
|
459
|
+
end
|
407
460
|
end
|
@@ -36,6 +36,16 @@ describe Rdkafka::Bindings do
|
|
36
36
|
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
37
37
|
end
|
38
38
|
|
39
|
+
it "should log fatal messages" do
|
40
|
+
Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
|
41
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should log fatal messages" do
|
45
|
+
Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
|
46
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
47
|
+
end
|
48
|
+
|
39
49
|
it "should log error messages" do
|
40
50
|
Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
|
41
51
|
expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
|
@@ -51,6 +61,11 @@ describe Rdkafka::Bindings do
|
|
51
61
|
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
52
62
|
end
|
53
63
|
|
64
|
+
it "should log info messages" do
|
65
|
+
Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
|
66
|
+
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
67
|
+
end
|
68
|
+
|
54
69
|
it "should log debug messages" do
|
55
70
|
Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
|
56
71
|
expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
|
@@ -132,4 +147,86 @@ describe Rdkafka::Bindings do
|
|
132
147
|
end
|
133
148
|
end
|
134
149
|
end
|
150
|
+
|
151
|
+
describe "oauthbearer set token" do
|
152
|
+
|
153
|
+
context "without args" do
|
154
|
+
it "should raise argument error" do
|
155
|
+
expect {
|
156
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
|
157
|
+
}.to raise_error(ArgumentError)
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
context "with args" do
|
162
|
+
before do
|
163
|
+
DEFAULT_TOKEN_EXPIRY_SECONDS = 900
|
164
|
+
$token_value = "token"
|
165
|
+
$md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
|
166
|
+
$md_principal_name = "kafka-cluster"
|
167
|
+
$extensions = nil
|
168
|
+
$extension_size = 0
|
169
|
+
$error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
170
|
+
end
|
171
|
+
|
172
|
+
it "should set token or capture failure" do
|
173
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
174
|
+
response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
|
175
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
176
|
+
expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
describe "oauthbearer set token failure" do
|
183
|
+
|
184
|
+
context "without args" do
|
185
|
+
|
186
|
+
it "should fail" do
|
187
|
+
expect {
|
188
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
|
189
|
+
}.to raise_error(ArgumentError)
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
context "with args" do
|
194
|
+
it "should succeed" do
|
195
|
+
expect {
|
196
|
+
errstr = "error"
|
197
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
198
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
|
199
|
+
end
|
200
|
+
}.to_not raise_error
|
201
|
+
end
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
describe "oauthbearer callback" do
|
206
|
+
|
207
|
+
context "without an oauthbearer callback" do
|
208
|
+
it "should do nothing" do
|
209
|
+
expect {
|
210
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
|
211
|
+
}.not_to raise_error
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
context "with an oauthbearer callback" do
|
216
|
+
before do
|
217
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
218
|
+
$received_config = config
|
219
|
+
$received_client_name = client_name
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
it "should call the oauth bearer callback and receive config and client name" do
|
224
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
225
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
|
226
|
+
expect($received_config).to eq("{}")
|
227
|
+
expect($received_client_name).to match(/consumer/)
|
228
|
+
end
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
135
232
|
end
|
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -22,6 +22,7 @@ describe Rdkafka::Config do
|
|
22
22
|
it "supports logging queue" do
|
23
23
|
log = StringIO.new
|
24
24
|
Rdkafka::Config.logger = Logger.new(log)
|
25
|
+
Rdkafka::Config.ensure_log_thread
|
25
26
|
|
26
27
|
Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
|
27
28
|
20.times do
|
@@ -31,6 +32,25 @@ describe Rdkafka::Config do
|
|
31
32
|
|
32
33
|
expect(log.string).to include "FATAL -- : I love testing"
|
33
34
|
end
|
35
|
+
|
36
|
+
it "expect to start new logger thread after fork and work" do
|
37
|
+
reader, writer = IO.pipe
|
38
|
+
|
39
|
+
pid = fork do
|
40
|
+
$stdout.reopen(writer)
|
41
|
+
Rdkafka::Config.logger = Logger.new($stdout)
|
42
|
+
reader.close
|
43
|
+
producer = rdkafka_producer_config(debug: 'all').producer
|
44
|
+
producer.close
|
45
|
+
writer.close
|
46
|
+
sleep(1)
|
47
|
+
end
|
48
|
+
|
49
|
+
writer.close
|
50
|
+
Process.wait(pid)
|
51
|
+
output = reader.read
|
52
|
+
expect(output.split("\n").size).to be >= 20
|
53
|
+
end
|
34
54
|
end
|
35
55
|
|
36
56
|
context "statistics callback" do
|
@@ -95,6 +115,39 @@ describe Rdkafka::Config do
|
|
95
115
|
end
|
96
116
|
end
|
97
117
|
|
118
|
+
context "oauthbearer calllback" do
|
119
|
+
context "with a proc/lambda" do
|
120
|
+
it "should set the callback" do
|
121
|
+
expect {
|
122
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
123
|
+
puts config
|
124
|
+
puts client_name
|
125
|
+
end
|
126
|
+
}.not_to raise_error
|
127
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
context "with a callable object" do
|
132
|
+
it "should set the callback" do
|
133
|
+
callback = Class.new do
|
134
|
+
def call(config, client_name); end
|
135
|
+
end
|
136
|
+
|
137
|
+
expect {
|
138
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
|
139
|
+
}.not_to raise_error
|
140
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
it "should not accept a callback that's not callable" do
|
145
|
+
expect {
|
146
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
|
147
|
+
}.to raise_error(TypeError)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
98
151
|
context "configuration" do
|
99
152
|
it "should store configuration" do
|
100
153
|
config = Rdkafka::Config.new
|
@@ -14,6 +14,19 @@ describe Rdkafka::Consumer do
|
|
14
14
|
it { expect(consumer.name).to include('rdkafka#consumer-') }
|
15
15
|
end
|
16
16
|
|
17
|
+
describe 'consumer without auto-start' do
|
18
|
+
let(:consumer) { rdkafka_consumer_config.consumer(native_kafka_auto_start: false) }
|
19
|
+
|
20
|
+
it 'expect to be able to start it later and close' do
|
21
|
+
consumer.start
|
22
|
+
consumer.close
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'expect to be able to close it without starting' do
|
26
|
+
consumer.close
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
17
30
|
describe "#subscribe, #unsubscribe and #subscription" do
|
18
31
|
it "should subscribe, unsubscribe and return the subscription" do
|
19
32
|
expect(consumer.subscription).to be_empty
|
@@ -211,6 +224,11 @@ describe Rdkafka::Consumer do
|
|
211
224
|
|
212
225
|
# 7. ensure same message is read again
|
213
226
|
message2 = consumer.poll(timeout)
|
227
|
+
|
228
|
+
# This is needed because `enable.auto.offset.store` is true but when running in CI that
|
229
|
+
# is overloaded, offset store lags
|
230
|
+
sleep(2)
|
231
|
+
|
214
232
|
consumer.commit
|
215
233
|
expect(message1.offset).to eq message2.offset
|
216
234
|
expect(message1.payload).to eq message2.payload
|
@@ -1296,4 +1314,40 @@ describe Rdkafka::Consumer do
|
|
1296
1314
|
])
|
1297
1315
|
end
|
1298
1316
|
end
|
1317
|
+
|
1318
|
+
describe '#oauthbearer_set_token' do
|
1319
|
+
context 'when sasl not configured' do
|
1320
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
1321
|
+
response = consumer.oauthbearer_set_token(
|
1322
|
+
token: "foo",
|
1323
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
1324
|
+
principal_name: "kafka-cluster"
|
1325
|
+
)
|
1326
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
1327
|
+
end
|
1328
|
+
end
|
1329
|
+
|
1330
|
+
context 'when sasl configured' do
|
1331
|
+
before do
|
1332
|
+
$consumer_sasl = rdkafka_producer_config(
|
1333
|
+
"security.protocol": "sasl_ssl",
|
1334
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
1335
|
+
).consumer
|
1336
|
+
end
|
1337
|
+
|
1338
|
+
after do
|
1339
|
+
$consumer_sasl.close
|
1340
|
+
end
|
1341
|
+
|
1342
|
+
it 'should succeed' do
|
1343
|
+
|
1344
|
+
response = $consumer_sasl.oauthbearer_set_token(
|
1345
|
+
token: "foo",
|
1346
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
1347
|
+
principal_name: "kafka-cluster"
|
1348
|
+
)
|
1349
|
+
expect(response).to eq(0)
|
1350
|
+
end
|
1351
|
+
end
|
1352
|
+
end
|
1299
1353
|
end
|
@@ -10,8 +10,9 @@ describe Rdkafka::NativeKafka do
|
|
10
10
|
subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
|
11
11
|
|
12
12
|
before do
|
13
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_name).and_return('producer-1')
|
13
14
|
allow(Thread).to receive(:new).and_return(thread)
|
14
|
-
|
15
|
+
allow(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
|
15
16
|
allow(thread).to receive(:[]=).with(:closing, anything)
|
16
17
|
allow(thread).to receive(:join)
|
17
18
|
allow(thread).to receive(:abort_on_exception=).with(anything)
|
@@ -20,6 +21,12 @@ describe Rdkafka::NativeKafka do
|
|
20
21
|
after { client.close }
|
21
22
|
|
22
23
|
context "defaults" do
|
24
|
+
it "sets the thread name" do
|
25
|
+
expect(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
|
26
|
+
|
27
|
+
client
|
28
|
+
end
|
29
|
+
|
23
30
|
it "sets the thread to abort on exception" do
|
24
31
|
expect(thread).to receive(:abort_on_exception=).with(true)
|
25
32
|
|
@@ -14,6 +14,19 @@ describe Rdkafka::Producer do
|
|
14
14
|
consumer.close
|
15
15
|
end
|
16
16
|
|
17
|
+
describe 'producer without auto-start' do
|
18
|
+
let(:producer) { rdkafka_producer_config.producer(native_kafka_auto_start: false) }
|
19
|
+
|
20
|
+
it 'expect to be able to start it later and close' do
|
21
|
+
producer.start
|
22
|
+
producer.close
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'expect to be able to close it without starting' do
|
26
|
+
producer.close
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
17
30
|
describe '#name' do
|
18
31
|
it { expect(producer.name).to include('rdkafka#producer-') }
|
19
32
|
end
|
@@ -734,4 +747,34 @@ describe Rdkafka::Producer do
|
|
734
747
|
end
|
735
748
|
end
|
736
749
|
end
|
750
|
+
|
751
|
+
describe '#oauthbearer_set_token' do
|
752
|
+
context 'when sasl not configured' do
|
753
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
754
|
+
response = producer.oauthbearer_set_token(
|
755
|
+
token: "foo",
|
756
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
757
|
+
principal_name: "kafka-cluster"
|
758
|
+
)
|
759
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
760
|
+
end
|
761
|
+
end
|
762
|
+
|
763
|
+
context 'when sasl configured' do
|
764
|
+
it 'should succeed' do
|
765
|
+
producer_sasl = rdkafka_producer_config(
|
766
|
+
{
|
767
|
+
"security.protocol": "sasl_ssl",
|
768
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
769
|
+
}
|
770
|
+
).producer
|
771
|
+
response = producer_sasl.oauthbearer_set_token(
|
772
|
+
token: "foo",
|
773
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
774
|
+
principal_name: "kafka-cluster"
|
775
|
+
)
|
776
|
+
expect(response).to eq(0)
|
777
|
+
end
|
778
|
+
end
|
779
|
+
end
|
737
780
|
end
|