karafka-rdkafka 0.14.10 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +2 -4
- data/.gitignore +2 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +11 -0
- data/README.md +19 -9
- data/docker-compose.yml +1 -1
- data/ext/Rakefile +8 -0
- data/lib/rdkafka/abstract_handle.rb +44 -20
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_topic_report.rb +1 -1
- data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
- data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
- data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +174 -0
- data/lib/rdkafka/bindings.rb +75 -3
- data/lib/rdkafka/callbacks.rb +103 -19
- data/lib/rdkafka/config.rb +46 -9
- data/lib/rdkafka/consumer.rb +7 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/native_kafka.rb +32 -19
- data/lib/rdkafka/producer.rb +7 -0
- data/lib/rdkafka/version.rb +1 -1
- data/lib/rdkafka.rb +7 -0
- data/spec/rdkafka/abstract_handle_spec.rb +34 -21
- data/spec/rdkafka/admin_spec.rb +328 -3
- data/spec/rdkafka/bindings_spec.rb +97 -0
- data/spec/rdkafka/config_spec.rb +33 -0
- data/spec/rdkafka/consumer_spec.rb +50 -1
- data/spec/rdkafka/native_kafka_spec.rb +8 -1
- data/spec/rdkafka/producer_spec.rb +43 -0
- data/spec/spec_helper.rb +16 -1
- data.tar.gz.sig +0 -0
- metadata +10 -3
- metadata.gz.sig +0 -0
@@ -36,6 +36,16 @@ describe Rdkafka::Bindings do
|
|
36
36
|
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
37
37
|
end
|
38
38
|
|
39
|
+
it "should log fatal messages" do
|
40
|
+
Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
|
41
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should log fatal messages" do
|
45
|
+
Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
|
46
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
47
|
+
end
|
48
|
+
|
39
49
|
it "should log error messages" do
|
40
50
|
Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
|
41
51
|
expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
|
@@ -51,6 +61,11 @@ describe Rdkafka::Bindings do
|
|
51
61
|
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
52
62
|
end
|
53
63
|
|
64
|
+
it "should log info messages" do
|
65
|
+
Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
|
66
|
+
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
67
|
+
end
|
68
|
+
|
54
69
|
it "should log debug messages" do
|
55
70
|
Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
|
56
71
|
expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
|
@@ -132,4 +147,86 @@ describe Rdkafka::Bindings do
|
|
132
147
|
end
|
133
148
|
end
|
134
149
|
end
|
150
|
+
|
151
|
+
describe "oauthbearer set token" do
|
152
|
+
|
153
|
+
context "without args" do
|
154
|
+
it "should raise argument error" do
|
155
|
+
expect {
|
156
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
|
157
|
+
}.to raise_error(ArgumentError)
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
context "with args" do
|
162
|
+
before do
|
163
|
+
DEFAULT_TOKEN_EXPIRY_SECONDS = 900
|
164
|
+
$token_value = "token"
|
165
|
+
$md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
|
166
|
+
$md_principal_name = "kafka-cluster"
|
167
|
+
$extensions = nil
|
168
|
+
$extension_size = 0
|
169
|
+
$error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
170
|
+
end
|
171
|
+
|
172
|
+
it "should set token or capture failure" do
|
173
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
174
|
+
response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
|
175
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
176
|
+
expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
describe "oauthbearer set token failure" do
|
183
|
+
|
184
|
+
context "without args" do
|
185
|
+
|
186
|
+
it "should fail" do
|
187
|
+
expect {
|
188
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
|
189
|
+
}.to raise_error(ArgumentError)
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
context "with args" do
|
194
|
+
it "should succeed" do
|
195
|
+
expect {
|
196
|
+
errstr = "error"
|
197
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
198
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
|
199
|
+
end
|
200
|
+
}.to_not raise_error
|
201
|
+
end
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
describe "oauthbearer callback" do
|
206
|
+
|
207
|
+
context "without an oauthbearer callback" do
|
208
|
+
it "should do nothing" do
|
209
|
+
expect {
|
210
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
|
211
|
+
}.not_to raise_error
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
context "with an oauthbearer callback" do
|
216
|
+
before do
|
217
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
218
|
+
$received_config = config
|
219
|
+
$received_client_name = client_name
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
it "should call the oauth bearer callback and receive config and client name" do
|
224
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
225
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
|
226
|
+
expect($received_config).to eq("{}")
|
227
|
+
expect($received_client_name).to match(/consumer/)
|
228
|
+
end
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
135
232
|
end
|
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -115,6 +115,39 @@ describe Rdkafka::Config do
|
|
115
115
|
end
|
116
116
|
end
|
117
117
|
|
118
|
+
context "oauthbearer calllback" do
|
119
|
+
context "with a proc/lambda" do
|
120
|
+
it "should set the callback" do
|
121
|
+
expect {
|
122
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
123
|
+
puts config
|
124
|
+
puts client_name
|
125
|
+
end
|
126
|
+
}.not_to raise_error
|
127
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
context "with a callable object" do
|
132
|
+
it "should set the callback" do
|
133
|
+
callback = Class.new do
|
134
|
+
def call(config, client_name); end
|
135
|
+
end
|
136
|
+
|
137
|
+
expect {
|
138
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
|
139
|
+
}.not_to raise_error
|
140
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
it "should not accept a callback that's not callable" do
|
145
|
+
expect {
|
146
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
|
147
|
+
}.to raise_error(TypeError)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
118
151
|
context "configuration" do
|
119
152
|
it "should store configuration" do
|
120
153
|
config = Rdkafka::Config.new
|
@@ -14,6 +14,19 @@ describe Rdkafka::Consumer do
|
|
14
14
|
it { expect(consumer.name).to include('rdkafka#consumer-') }
|
15
15
|
end
|
16
16
|
|
17
|
+
describe 'consumer without auto-start' do
|
18
|
+
let(:consumer) { rdkafka_consumer_config.consumer(native_kafka_auto_start: false) }
|
19
|
+
|
20
|
+
it 'expect to be able to start it later and close' do
|
21
|
+
consumer.start
|
22
|
+
consumer.close
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'expect to be able to close it without starting' do
|
26
|
+
consumer.close
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
17
30
|
describe "#subscribe, #unsubscribe and #subscription" do
|
18
31
|
it "should subscribe, unsubscribe and return the subscription" do
|
19
32
|
expect(consumer.subscription).to be_empty
|
@@ -214,7 +227,7 @@ describe Rdkafka::Consumer do
|
|
214
227
|
|
215
228
|
# This is needed because `enable.auto.offset.store` is true but when running in CI that
|
216
229
|
# is overloaded, offset store lags
|
217
|
-
sleep(
|
230
|
+
sleep(2)
|
218
231
|
|
219
232
|
consumer.commit
|
220
233
|
expect(message1.offset).to eq message2.offset
|
@@ -1329,4 +1342,40 @@ describe Rdkafka::Consumer do
|
|
1329
1342
|
])
|
1330
1343
|
end
|
1331
1344
|
end
|
1345
|
+
|
1346
|
+
describe '#oauthbearer_set_token' do
|
1347
|
+
context 'when sasl not configured' do
|
1348
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
1349
|
+
response = consumer.oauthbearer_set_token(
|
1350
|
+
token: "foo",
|
1351
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
1352
|
+
principal_name: "kafka-cluster"
|
1353
|
+
)
|
1354
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
1355
|
+
end
|
1356
|
+
end
|
1357
|
+
|
1358
|
+
context 'when sasl configured' do
|
1359
|
+
before do
|
1360
|
+
$consumer_sasl = rdkafka_producer_config(
|
1361
|
+
"security.protocol": "sasl_ssl",
|
1362
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
1363
|
+
).consumer
|
1364
|
+
end
|
1365
|
+
|
1366
|
+
after do
|
1367
|
+
$consumer_sasl.close
|
1368
|
+
end
|
1369
|
+
|
1370
|
+
it 'should succeed' do
|
1371
|
+
|
1372
|
+
response = $consumer_sasl.oauthbearer_set_token(
|
1373
|
+
token: "foo",
|
1374
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
1375
|
+
principal_name: "kafka-cluster"
|
1376
|
+
)
|
1377
|
+
expect(response).to eq(0)
|
1378
|
+
end
|
1379
|
+
end
|
1380
|
+
end
|
1332
1381
|
end
|
@@ -10,8 +10,9 @@ describe Rdkafka::NativeKafka do
|
|
10
10
|
subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
|
11
11
|
|
12
12
|
before do
|
13
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_name).and_return('producer-1')
|
13
14
|
allow(Thread).to receive(:new).and_return(thread)
|
14
|
-
|
15
|
+
allow(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
|
15
16
|
allow(thread).to receive(:[]=).with(:closing, anything)
|
16
17
|
allow(thread).to receive(:join)
|
17
18
|
allow(thread).to receive(:abort_on_exception=).with(anything)
|
@@ -20,6 +21,12 @@ describe Rdkafka::NativeKafka do
|
|
20
21
|
after { client.close }
|
21
22
|
|
22
23
|
context "defaults" do
|
24
|
+
it "sets the thread name" do
|
25
|
+
expect(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
|
26
|
+
|
27
|
+
client
|
28
|
+
end
|
29
|
+
|
23
30
|
it "sets the thread to abort on exception" do
|
24
31
|
expect(thread).to receive(:abort_on_exception=).with(true)
|
25
32
|
|
@@ -14,6 +14,19 @@ describe Rdkafka::Producer do
|
|
14
14
|
consumer.close
|
15
15
|
end
|
16
16
|
|
17
|
+
describe 'producer without auto-start' do
|
18
|
+
let(:producer) { rdkafka_producer_config.producer(native_kafka_auto_start: false) }
|
19
|
+
|
20
|
+
it 'expect to be able to start it later and close' do
|
21
|
+
producer.start
|
22
|
+
producer.close
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'expect to be able to close it without starting' do
|
26
|
+
producer.close
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
17
30
|
describe '#name' do
|
18
31
|
it { expect(producer.name).to include('rdkafka#producer-') }
|
19
32
|
end
|
@@ -917,4 +930,34 @@ describe Rdkafka::Producer do
|
|
917
930
|
end
|
918
931
|
end
|
919
932
|
end
|
933
|
+
|
934
|
+
describe '#oauthbearer_set_token' do
|
935
|
+
context 'when sasl not configured' do
|
936
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
937
|
+
response = producer.oauthbearer_set_token(
|
938
|
+
token: "foo",
|
939
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
940
|
+
principal_name: "kafka-cluster"
|
941
|
+
)
|
942
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
943
|
+
end
|
944
|
+
end
|
945
|
+
|
946
|
+
context 'when sasl configured' do
|
947
|
+
it 'should succeed' do
|
948
|
+
producer_sasl = rdkafka_producer_config(
|
949
|
+
{
|
950
|
+
"security.protocol": "sasl_ssl",
|
951
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
952
|
+
}
|
953
|
+
).producer
|
954
|
+
response = producer_sasl.oauthbearer_set_token(
|
955
|
+
token: "foo",
|
956
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
957
|
+
principal_name: "kafka-cluster"
|
958
|
+
)
|
959
|
+
expect(response).to eq(0)
|
960
|
+
end
|
961
|
+
end
|
962
|
+
end
|
920
963
|
end
|
data/spec/spec_helper.rb
CHANGED
@@ -139,7 +139,7 @@ RSpec.configure do |config|
|
|
139
139
|
}.each do |topic, partitions|
|
140
140
|
create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
|
141
141
|
begin
|
142
|
-
create_topic_handle.wait(max_wait_timeout:
|
142
|
+
create_topic_handle.wait(max_wait_timeout: 1.0)
|
143
143
|
rescue Rdkafka::RdkafkaError => ex
|
144
144
|
raise unless ex.message.match?(/topic_already_exists/)
|
145
145
|
end
|
@@ -155,3 +155,18 @@ RSpec.configure do |config|
|
|
155
155
|
end
|
156
156
|
end
|
157
157
|
end
|
158
|
+
|
159
|
+
class RdKafkaTestConsumer
|
160
|
+
def self.with
|
161
|
+
consumer = Rdkafka::Bindings.rd_kafka_new(
|
162
|
+
:rd_kafka_consumer,
|
163
|
+
nil,
|
164
|
+
nil,
|
165
|
+
0
|
166
|
+
)
|
167
|
+
yield consumer
|
168
|
+
ensure
|
169
|
+
Rdkafka::Bindings.rd_kafka_consumer_close(consumer)
|
170
|
+
Rdkafka::Bindings.rd_kafka_destroy(consumer)
|
171
|
+
end
|
172
|
+
end
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.15.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -36,7 +36,7 @@ cert_chain:
|
|
36
36
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
37
37
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2024-
|
39
|
+
date: 2024-04-26 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: ffi
|
@@ -194,6 +194,8 @@ files:
|
|
194
194
|
- lib/rdkafka/abstract_handle.rb
|
195
195
|
- lib/rdkafka/admin.rb
|
196
196
|
- lib/rdkafka/admin/acl_binding_result.rb
|
197
|
+
- lib/rdkafka/admin/config_binding_result.rb
|
198
|
+
- lib/rdkafka/admin/config_resource_binding_result.rb
|
197
199
|
- lib/rdkafka/admin/create_acl_handle.rb
|
198
200
|
- lib/rdkafka/admin/create_acl_report.rb
|
199
201
|
- lib/rdkafka/admin/create_partitions_handle.rb
|
@@ -208,6 +210,10 @@ files:
|
|
208
210
|
- lib/rdkafka/admin/delete_topic_report.rb
|
209
211
|
- lib/rdkafka/admin/describe_acl_handle.rb
|
210
212
|
- lib/rdkafka/admin/describe_acl_report.rb
|
213
|
+
- lib/rdkafka/admin/describe_configs_handle.rb
|
214
|
+
- lib/rdkafka/admin/describe_configs_report.rb
|
215
|
+
- lib/rdkafka/admin/incremental_alter_configs_handle.rb
|
216
|
+
- lib/rdkafka/admin/incremental_alter_configs_report.rb
|
211
217
|
- lib/rdkafka/bindings.rb
|
212
218
|
- lib/rdkafka/callbacks.rb
|
213
219
|
- lib/rdkafka/config.rb
|
@@ -217,6 +223,7 @@ files:
|
|
217
223
|
- lib/rdkafka/consumer/partition.rb
|
218
224
|
- lib/rdkafka/consumer/topic_partition_list.rb
|
219
225
|
- lib/rdkafka/error.rb
|
226
|
+
- lib/rdkafka/helpers/oauth.rb
|
220
227
|
- lib/rdkafka/helpers/time.rb
|
221
228
|
- lib/rdkafka/metadata.rb
|
222
229
|
- lib/rdkafka/native_kafka.rb
|
@@ -278,7 +285,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
278
285
|
- !ruby/object:Gem::Version
|
279
286
|
version: '0'
|
280
287
|
requirements: []
|
281
|
-
rubygems_version: 3.5.
|
288
|
+
rubygems_version: 3.5.9
|
282
289
|
signing_key:
|
283
290
|
specification_version: 4
|
284
291
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
metadata.gz.sig
CHANGED
Binary file
|