karafka-rdkafka 0.19.5 → 0.20.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ci_linux_x86_64_gnu.yml +271 -0
- data/.github/workflows/ci_linux_x86_64_musl.yml +194 -0
- data/.github/workflows/ci_macos_arm64.yml +284 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +65 -0
- data/.github/workflows/push_linux_x86_64_musl.yml +79 -0
- data/.github/workflows/push_macos_arm64.yml +54 -0
- data/.github/workflows/{push.yml → push_ruby.yml} +3 -3
- data/.gitignore +1 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +16 -0
- data/README.md +1 -0
- data/docker-compose.yml +1 -1
- data/ext/Rakefile +1 -1
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_linux_x86_64_musl.sh +763 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/karafka-rdkafka.gemspec +27 -5
- data/lib/rdkafka/bindings.rb +7 -5
- data/lib/rdkafka/producer.rb +11 -6
- data/lib/rdkafka/version.rb +1 -1
- data/renovate.json +74 -0
- data/spec/rdkafka/admin_spec.rb +216 -2
- data/spec/rdkafka/bindings_spec.rb +0 -25
- data/spec/rdkafka/consumer_spec.rb +31 -13
- data/spec/rdkafka/metadata_spec.rb +2 -2
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +1 -1
- data/spec/rdkafka/producer_spec.rb +300 -6
- data/spec/spec_helper.rb +21 -7
- metadata +37 -59
- data/.github/workflows/ci.yml +0 -99
- data/Guardfile +0 -19
- data/spec/rdkafka/producer/partitions_count_spec.rb +0 -359
@@ -53,7 +53,7 @@ describe Rdkafka::Producer do
|
|
53
53
|
let(:producer) do
|
54
54
|
rdkafka_producer_config(
|
55
55
|
'message.timeout.ms': 1_000_000,
|
56
|
-
:"bootstrap.servers" => "
|
56
|
+
:"bootstrap.servers" => "127.0.0.1:9094",
|
57
57
|
).producer
|
58
58
|
end
|
59
59
|
|
@@ -340,7 +340,7 @@ describe Rdkafka::Producer do
|
|
340
340
|
)
|
341
341
|
end
|
342
342
|
|
343
|
-
expect(messages[0].partition).to
|
343
|
+
expect(messages[0].partition).to be >= 0
|
344
344
|
expect(messages[0].key).to eq 'a'
|
345
345
|
end
|
346
346
|
|
@@ -665,7 +665,7 @@ describe Rdkafka::Producer do
|
|
665
665
|
context "when not being able to deliver the message" do
|
666
666
|
let(:producer) do
|
667
667
|
rdkafka_producer_config(
|
668
|
-
"bootstrap.servers": "
|
668
|
+
"bootstrap.servers": "127.0.0.1:9093",
|
669
669
|
"message.timeout.ms": 100
|
670
670
|
).producer
|
671
671
|
end
|
@@ -682,7 +682,7 @@ describe Rdkafka::Producer do
|
|
682
682
|
context "when topic does not exist and allow.auto.create.topics is false" do
|
683
683
|
let(:producer) do
|
684
684
|
rdkafka_producer_config(
|
685
|
-
"bootstrap.servers": "
|
685
|
+
"bootstrap.servers": "127.0.0.1:9092",
|
686
686
|
"message.timeout.ms": 100,
|
687
687
|
"allow.auto.create.topics": false
|
688
688
|
).producer
|
@@ -781,7 +781,7 @@ describe Rdkafka::Producer do
|
|
781
781
|
context 'when it cannot flush due to a timeout' do
|
782
782
|
let(:producer) do
|
783
783
|
rdkafka_producer_config(
|
784
|
-
"bootstrap.servers": "
|
784
|
+
"bootstrap.servers": "127.0.0.1:9093",
|
785
785
|
"message.timeout.ms": 2_000
|
786
786
|
).producer
|
787
787
|
end
|
@@ -828,7 +828,7 @@ describe Rdkafka::Producer do
|
|
828
828
|
context 'when there are outgoing things in the queue' do
|
829
829
|
let(:producer) do
|
830
830
|
rdkafka_producer_config(
|
831
|
-
"bootstrap.servers": "
|
831
|
+
"bootstrap.servers": "127.0.0.1:9093",
|
832
832
|
"message.timeout.ms": 2_000
|
833
833
|
).producer
|
834
834
|
end
|
@@ -1231,4 +1231,298 @@ describe Rdkafka::Producer do
|
|
1231
1231
|
end
|
1232
1232
|
end
|
1233
1233
|
end
|
1234
|
+
|
1235
|
+
let(:producer) { rdkafka_producer_config.producer }
|
1236
|
+
let(:all_partitioners) { %w(random consistent consistent_random murmur2 murmur2_random fnv1a fnv1a_random) }
|
1237
|
+
|
1238
|
+
describe "partitioner behavior through producer API" do
|
1239
|
+
context "testing all partitioners with same key" do
|
1240
|
+
it "should not return partition 0 for all partitioners" do
|
1241
|
+
test_key = "test-key-123"
|
1242
|
+
results = {}
|
1243
|
+
|
1244
|
+
all_partitioners.each do |partitioner|
|
1245
|
+
handle = producer.produce(
|
1246
|
+
topic: "partitioner_test_topic",
|
1247
|
+
payload: "test payload",
|
1248
|
+
partition_key: test_key,
|
1249
|
+
partitioner: partitioner
|
1250
|
+
)
|
1251
|
+
|
1252
|
+
report = handle.wait(max_wait_timeout: 5)
|
1253
|
+
results[partitioner] = report.partition
|
1254
|
+
end
|
1255
|
+
|
1256
|
+
# Should not all be the same partition (especially not all 0)
|
1257
|
+
unique_partitions = results.values.uniq
|
1258
|
+
expect(unique_partitions.size).to be > 1
|
1259
|
+
end
|
1260
|
+
end
|
1261
|
+
|
1262
|
+
context "empty string partition key" do
|
1263
|
+
it "should produce message with empty partition key without crashing and go to partition 0 for all partitioners" do
|
1264
|
+
all_partitioners.each do |partitioner|
|
1265
|
+
handle = producer.produce(
|
1266
|
+
topic: "partitioner_test_topic",
|
1267
|
+
payload: "test payload",
|
1268
|
+
key: "test-key",
|
1269
|
+
partition_key: "",
|
1270
|
+
partitioner: partitioner
|
1271
|
+
)
|
1272
|
+
|
1273
|
+
report = handle.wait(max_wait_timeout: 5)
|
1274
|
+
expect(report.partition).to be >= 0
|
1275
|
+
end
|
1276
|
+
end
|
1277
|
+
end
|
1278
|
+
|
1279
|
+
context "nil partition key" do
|
1280
|
+
it "should handle nil partition key gracefully" do
|
1281
|
+
handle = producer.produce(
|
1282
|
+
topic: "partitioner_test_topic",
|
1283
|
+
payload: "test payload",
|
1284
|
+
key: "test-key",
|
1285
|
+
partition_key: nil
|
1286
|
+
)
|
1287
|
+
|
1288
|
+
report = handle.wait(max_wait_timeout: 5)
|
1289
|
+
expect(report.partition).to be >= 0
|
1290
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1291
|
+
end
|
1292
|
+
end
|
1293
|
+
|
1294
|
+
context "various key types and lengths with different partitioners" do
|
1295
|
+
it "should handle very short keys with all partitioners" do
|
1296
|
+
all_partitioners.each do |partitioner|
|
1297
|
+
handle = producer.produce(
|
1298
|
+
topic: "partitioner_test_topic",
|
1299
|
+
payload: "test payload",
|
1300
|
+
partition_key: "a",
|
1301
|
+
partitioner: partitioner
|
1302
|
+
)
|
1303
|
+
|
1304
|
+
report = handle.wait(max_wait_timeout: 5)
|
1305
|
+
expect(report.partition).to be >= 0
|
1306
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1307
|
+
end
|
1308
|
+
end
|
1309
|
+
|
1310
|
+
it "should handle very long keys with all partitioners" do
|
1311
|
+
long_key = "a" * 1000
|
1312
|
+
|
1313
|
+
all_partitioners.each do |partitioner|
|
1314
|
+
handle = producer.produce(
|
1315
|
+
topic: "partitioner_test_topic",
|
1316
|
+
payload: "test payload",
|
1317
|
+
partition_key: long_key,
|
1318
|
+
partitioner: partitioner
|
1319
|
+
)
|
1320
|
+
|
1321
|
+
report = handle.wait(max_wait_timeout: 5)
|
1322
|
+
expect(report.partition).to be >= 0
|
1323
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1324
|
+
end
|
1325
|
+
end
|
1326
|
+
|
1327
|
+
it "should handle unicode keys with all partitioners" do
|
1328
|
+
unicode_key = "测试键值🚀"
|
1329
|
+
|
1330
|
+
all_partitioners.each do |partitioner|
|
1331
|
+
handle = producer.produce(
|
1332
|
+
topic: "partitioner_test_topic",
|
1333
|
+
payload: "test payload",
|
1334
|
+
partition_key: unicode_key,
|
1335
|
+
partitioner: partitioner
|
1336
|
+
)
|
1337
|
+
|
1338
|
+
report = handle.wait(max_wait_timeout: 5)
|
1339
|
+
expect(report.partition).to be >= 0
|
1340
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1341
|
+
end
|
1342
|
+
end
|
1343
|
+
end
|
1344
|
+
|
1345
|
+
context "consistency testing for deterministic partitioners" do
|
1346
|
+
%w(consistent murmur2 fnv1a).each do |partitioner|
|
1347
|
+
it "should consistently route same partition key to same partition with #{partitioner}" do
|
1348
|
+
partition_key = "consistent-test-key"
|
1349
|
+
|
1350
|
+
# Produce multiple messages with same partition key
|
1351
|
+
reports = 5.times.map do
|
1352
|
+
handle = producer.produce(
|
1353
|
+
topic: "partitioner_test_topic",
|
1354
|
+
payload: "test payload #{Time.now.to_f}",
|
1355
|
+
partition_key: partition_key,
|
1356
|
+
partitioner: partitioner
|
1357
|
+
)
|
1358
|
+
handle.wait(max_wait_timeout: 5)
|
1359
|
+
end
|
1360
|
+
|
1361
|
+
# All should go to same partition
|
1362
|
+
partitions = reports.map(&:partition).uniq
|
1363
|
+
expect(partitions.size).to eq(1)
|
1364
|
+
end
|
1365
|
+
end
|
1366
|
+
end
|
1367
|
+
|
1368
|
+
context "randomness testing for random partitioners" do
|
1369
|
+
%w(random consistent_random murmur2_random fnv1a_random).each do |partitioner|
|
1370
|
+
it "should potentially distribute across partitions with #{partitioner}" do
|
1371
|
+
# Note: random partitioners might still return same value by chance
|
1372
|
+
partition_key = "random-test-key"
|
1373
|
+
|
1374
|
+
reports = 10.times.map do
|
1375
|
+
handle = producer.produce(
|
1376
|
+
topic: "partitioner_test_topic",
|
1377
|
+
payload: "test payload #{Time.now.to_f}",
|
1378
|
+
partition_key: partition_key,
|
1379
|
+
partitioner: partitioner
|
1380
|
+
)
|
1381
|
+
handle.wait(max_wait_timeout: 5)
|
1382
|
+
end
|
1383
|
+
|
1384
|
+
partitions = reports.map(&:partition)
|
1385
|
+
|
1386
|
+
# Just ensure they're valid partitions
|
1387
|
+
partitions.each do |partition|
|
1388
|
+
expect(partition).to be >= 0
|
1389
|
+
expect(partition).to be < producer.partition_count("partitioner_test_topic")
|
1390
|
+
end
|
1391
|
+
end
|
1392
|
+
end
|
1393
|
+
end
|
1394
|
+
|
1395
|
+
context "comparing different partitioners with same key" do
|
1396
|
+
it "should route different partition keys to potentially different partitions" do
|
1397
|
+
keys = ["key1", "key2", "key3", "key4", "key5"]
|
1398
|
+
|
1399
|
+
all_partitioners.each do |partitioner|
|
1400
|
+
reports = keys.map do |key|
|
1401
|
+
handle = producer.produce(
|
1402
|
+
topic: "partitioner_test_topic",
|
1403
|
+
payload: "test payload",
|
1404
|
+
partition_key: key,
|
1405
|
+
partitioner: partitioner
|
1406
|
+
)
|
1407
|
+
handle.wait(max_wait_timeout: 5)
|
1408
|
+
end
|
1409
|
+
|
1410
|
+
partitions = reports.map(&:partition).uniq
|
1411
|
+
|
1412
|
+
# Should distribute across multiple partitions for most partitioners
|
1413
|
+
# (though some might hash all keys to same partition by chance)
|
1414
|
+
expect(partitions.all? { |p| p >= 0 && p < producer.partition_count("partitioner_test_topic") }).to be true
|
1415
|
+
end
|
1416
|
+
end
|
1417
|
+
end
|
1418
|
+
|
1419
|
+
context "partition key vs regular key behavior" do
|
1420
|
+
it "should use partition key for partitioning when both key and partition_key are provided" do
|
1421
|
+
# Use keys that would hash to different partitions
|
1422
|
+
regular_key = "regular-key-123"
|
1423
|
+
partition_key = "partition-key-456"
|
1424
|
+
|
1425
|
+
# Message with both keys
|
1426
|
+
handle1 = producer.produce(
|
1427
|
+
topic: "partitioner_test_topic",
|
1428
|
+
payload: "test payload 1",
|
1429
|
+
key: regular_key,
|
1430
|
+
partition_key: partition_key
|
1431
|
+
)
|
1432
|
+
|
1433
|
+
# Message with only partition key (should go to same partition)
|
1434
|
+
handle2 = producer.produce(
|
1435
|
+
topic: "partitioner_test_topic",
|
1436
|
+
payload: "test payload 2",
|
1437
|
+
partition_key: partition_key
|
1438
|
+
)
|
1439
|
+
|
1440
|
+
# Message with only regular key (should go to different partition)
|
1441
|
+
handle3 = producer.produce(
|
1442
|
+
topic: "partitioner_test_topic",
|
1443
|
+
payload: "test payload 3",
|
1444
|
+
key: regular_key
|
1445
|
+
)
|
1446
|
+
|
1447
|
+
report1 = handle1.wait(max_wait_timeout: 5)
|
1448
|
+
report2 = handle2.wait(max_wait_timeout: 5)
|
1449
|
+
report3 = handle3.wait(max_wait_timeout: 5)
|
1450
|
+
|
1451
|
+
# Messages 1 and 2 should go to same partition (both use partition_key)
|
1452
|
+
expect(report1.partition).to eq(report2.partition)
|
1453
|
+
|
1454
|
+
# Message 3 should potentially go to different partition (uses regular key)
|
1455
|
+
expect(report3.partition).not_to eq(report1.partition)
|
1456
|
+
end
|
1457
|
+
end
|
1458
|
+
|
1459
|
+
context "edge case combinations with different partitioners" do
|
1460
|
+
it "should handle nil partition key with all partitioners" do
|
1461
|
+
all_partitioners.each do |partitioner|
|
1462
|
+
handle = producer.produce(
|
1463
|
+
topic: "partitioner_test_topic",
|
1464
|
+
payload: "test payload",
|
1465
|
+
key: "test-key",
|
1466
|
+
partition_key: nil,
|
1467
|
+
partitioner: partitioner
|
1468
|
+
)
|
1469
|
+
|
1470
|
+
report = handle.wait(max_wait_timeout: 5)
|
1471
|
+
expect(report.partition).to be >= 0
|
1472
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1473
|
+
end
|
1474
|
+
end
|
1475
|
+
|
1476
|
+
it "should handle whitespace-only partition key with all partitioners" do
|
1477
|
+
all_partitioners.each do |partitioner|
|
1478
|
+
handle = producer.produce(
|
1479
|
+
topic: "partitioner_test_topic",
|
1480
|
+
payload: "test payload",
|
1481
|
+
partition_key: " ",
|
1482
|
+
partitioner: partitioner
|
1483
|
+
)
|
1484
|
+
|
1485
|
+
report = handle.wait(max_wait_timeout: 5)
|
1486
|
+
expect(report.partition).to be >= 0
|
1487
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1488
|
+
end
|
1489
|
+
end
|
1490
|
+
|
1491
|
+
it "should handle newline characters in partition key with all partitioners" do
|
1492
|
+
all_partitioners.each do |partitioner|
|
1493
|
+
handle = producer.produce(
|
1494
|
+
topic: "partitioner_test_topic",
|
1495
|
+
payload: "test payload",
|
1496
|
+
partition_key: "key\nwith\nnewlines",
|
1497
|
+
partitioner: partitioner
|
1498
|
+
)
|
1499
|
+
|
1500
|
+
report = handle.wait(max_wait_timeout: 5)
|
1501
|
+
expect(report.partition).to be >= 0
|
1502
|
+
expect(report.partition).to be < producer.partition_count("partitioner_test_topic")
|
1503
|
+
end
|
1504
|
+
end
|
1505
|
+
end
|
1506
|
+
|
1507
|
+
context "debugging partitioner issues" do
|
1508
|
+
it "should show if all partitioners return 0 (indicating a problem)" do
|
1509
|
+
test_key = "debug-test-key"
|
1510
|
+
zero_count = 0
|
1511
|
+
|
1512
|
+
all_partitioners.each do |partitioner|
|
1513
|
+
handle = producer.produce(
|
1514
|
+
topic: "partitioner_test_topic",
|
1515
|
+
payload: "debug payload",
|
1516
|
+
partition_key: test_key,
|
1517
|
+
partitioner: partitioner
|
1518
|
+
)
|
1519
|
+
|
1520
|
+
report = handle.wait(max_wait_timeout: 5)
|
1521
|
+
zero_count += 1 if report.partition == 0
|
1522
|
+
end
|
1523
|
+
|
1524
|
+
expect(zero_count).to be < all_partitioners.size
|
1525
|
+
end
|
1526
|
+
end
|
1527
|
+
end
|
1234
1528
|
end
|
data/spec/spec_helper.rb
CHANGED
@@ -17,7 +17,7 @@ def rdkafka_base_config
|
|
17
17
|
{
|
18
18
|
:"api.version.request" => false,
|
19
19
|
:"broker.version.fallback" => "1.0",
|
20
|
-
:"bootstrap.servers" => "
|
20
|
+
:"bootstrap.servers" => "127.0.0.1:9092",
|
21
21
|
# Display statistics and refresh often just to cover those in specs
|
22
22
|
:'statistics.interval.ms' => 1_000,
|
23
23
|
:'topic.metadata.refresh.interval.ms' => 1_000
|
@@ -78,18 +78,32 @@ end
|
|
78
78
|
|
79
79
|
def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
|
80
80
|
new_consumer = consumer.nil?
|
81
|
-
consumer ||= rdkafka_consumer_config.consumer
|
81
|
+
consumer ||= rdkafka_consumer_config('allow.auto.create.topics': true).consumer
|
82
82
|
consumer.subscribe(topic)
|
83
83
|
timeout = Time.now.to_i + timeout_in_seconds
|
84
|
+
retry_count = 0
|
85
|
+
max_retries = 10
|
86
|
+
|
84
87
|
loop do
|
85
88
|
if timeout <= Time.now.to_i
|
86
89
|
raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
|
87
90
|
end
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
91
|
+
|
92
|
+
begin
|
93
|
+
message = consumer.poll(100)
|
94
|
+
if message &&
|
95
|
+
message.partition == delivery_report.partition &&
|
96
|
+
message.offset == delivery_report.offset
|
97
|
+
return message
|
98
|
+
end
|
99
|
+
rescue Rdkafka::RdkafkaError => e
|
100
|
+
if e.code == :unknown_topic_or_part && retry_count < max_retries
|
101
|
+
retry_count += 1
|
102
|
+
sleep(0.1) # Small delay before retry
|
103
|
+
next
|
104
|
+
else
|
105
|
+
raise
|
106
|
+
end
|
93
107
|
end
|
94
108
|
end
|
95
109
|
ensure
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.20.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -24,6 +24,20 @@ dependencies:
|
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '1.15'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: logger
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
27
41
|
- !ruby/object:Gem::Dependency
|
28
42
|
name: mini_portile2
|
29
43
|
requirement: !ruby/object:Gem::Requirement
|
@@ -53,7 +67,7 @@ dependencies:
|
|
53
67
|
- !ruby/object:Gem::Version
|
54
68
|
version: '12'
|
55
69
|
- !ruby/object:Gem::Dependency
|
56
|
-
name:
|
70
|
+
name: ostruct
|
57
71
|
requirement: !ruby/object:Gem::Requirement
|
58
72
|
requirements:
|
59
73
|
- - ">="
|
@@ -67,21 +81,7 @@ dependencies:
|
|
67
81
|
- !ruby/object:Gem::Version
|
68
82
|
version: '0'
|
69
83
|
- !ruby/object:Gem::Dependency
|
70
|
-
name:
|
71
|
-
requirement: !ruby/object:Gem::Requirement
|
72
|
-
requirements:
|
73
|
-
- - "~>"
|
74
|
-
- !ruby/object:Gem::Version
|
75
|
-
version: '3.5'
|
76
|
-
type: :development
|
77
|
-
prerelease: false
|
78
|
-
version_requirements: !ruby/object:Gem::Requirement
|
79
|
-
requirements:
|
80
|
-
- - "~>"
|
81
|
-
- !ruby/object:Gem::Version
|
82
|
-
version: '3.5'
|
83
|
-
- !ruby/object:Gem::Dependency
|
84
|
-
name: rake
|
84
|
+
name: pry
|
85
85
|
requirement: !ruby/object:Gem::Requirement
|
86
86
|
requirements:
|
87
87
|
- - ">="
|
@@ -95,21 +95,21 @@ dependencies:
|
|
95
95
|
- !ruby/object:Gem::Version
|
96
96
|
version: '0'
|
97
97
|
- !ruby/object:Gem::Dependency
|
98
|
-
name:
|
98
|
+
name: rspec
|
99
99
|
requirement: !ruby/object:Gem::Requirement
|
100
100
|
requirements:
|
101
|
-
- - "
|
101
|
+
- - "~>"
|
102
102
|
- !ruby/object:Gem::Version
|
103
|
-
version: '
|
103
|
+
version: '3.5'
|
104
104
|
type: :development
|
105
105
|
prerelease: false
|
106
106
|
version_requirements: !ruby/object:Gem::Requirement
|
107
107
|
requirements:
|
108
|
-
- - "
|
108
|
+
- - "~>"
|
109
109
|
- !ruby/object:Gem::Version
|
110
|
-
version: '
|
110
|
+
version: '3.5'
|
111
111
|
- !ruby/object:Gem::Dependency
|
112
|
-
name:
|
112
|
+
name: rake
|
113
113
|
requirement: !ruby/object:Gem::Requirement
|
114
114
|
requirements:
|
115
115
|
- - ">="
|
@@ -123,7 +123,7 @@ dependencies:
|
|
123
123
|
- !ruby/object:Gem::Version
|
124
124
|
version: '0'
|
125
125
|
- !ruby/object:Gem::Dependency
|
126
|
-
name:
|
126
|
+
name: simplecov
|
127
127
|
requirement: !ruby/object:Gem::Requirement
|
128
128
|
requirements:
|
129
129
|
- - ">="
|
@@ -146,8 +146,13 @@ extra_rdoc_files: []
|
|
146
146
|
files:
|
147
147
|
- ".github/CODEOWNERS"
|
148
148
|
- ".github/FUNDING.yml"
|
149
|
-
- ".github/workflows/
|
150
|
-
- ".github/workflows/
|
149
|
+
- ".github/workflows/ci_linux_x86_64_gnu.yml"
|
150
|
+
- ".github/workflows/ci_linux_x86_64_musl.yml"
|
151
|
+
- ".github/workflows/ci_macos_arm64.yml"
|
152
|
+
- ".github/workflows/push_linux_x86_64_gnu.yml"
|
153
|
+
- ".github/workflows/push_linux_x86_64_musl.yml"
|
154
|
+
- ".github/workflows/push_macos_arm64.yml"
|
155
|
+
- ".github/workflows/push_ruby.yml"
|
151
156
|
- ".github/workflows/verify-action-pins.yml"
|
152
157
|
- ".gitignore"
|
153
158
|
- ".rspec"
|
@@ -156,7 +161,6 @@ files:
|
|
156
161
|
- ".yardopts"
|
157
162
|
- CHANGELOG.md
|
158
163
|
- Gemfile
|
159
|
-
- Guardfile
|
160
164
|
- MIT-LICENSE
|
161
165
|
- README.md
|
162
166
|
- Rakefile
|
@@ -165,6 +169,10 @@ files:
|
|
165
169
|
- docker-compose.yml
|
166
170
|
- ext/README.md
|
167
171
|
- ext/Rakefile
|
172
|
+
- ext/build_common.sh
|
173
|
+
- ext/build_linux_x86_64_gnu.sh
|
174
|
+
- ext/build_linux_x86_64_musl.sh
|
175
|
+
- ext/build_macos_arm64.sh
|
168
176
|
- karafka-rdkafka.gemspec
|
169
177
|
- lib/rdkafka.rb
|
170
178
|
- lib/rdkafka/abstract_handle.rb
|
@@ -235,7 +243,6 @@ files:
|
|
235
243
|
- spec/rdkafka/producer/delivery_handle_spec.rb
|
236
244
|
- spec/rdkafka/producer/delivery_report_spec.rb
|
237
245
|
- spec/rdkafka/producer/partitions_count_cache_spec.rb
|
238
|
-
- spec/rdkafka/producer/partitions_count_spec.rb
|
239
246
|
- spec/rdkafka/producer_spec.rb
|
240
247
|
- spec/spec_helper.rb
|
241
248
|
licenses:
|
@@ -262,38 +269,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
262
269
|
- !ruby/object:Gem::Version
|
263
270
|
version: '0'
|
264
271
|
requirements: []
|
265
|
-
rubygems_version: 3.6.
|
272
|
+
rubygems_version: 3.6.9
|
266
273
|
specification_version: 4
|
267
274
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
268
275
|
It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
|
269
276
|
and Ruby 2.7+.
|
270
|
-
test_files:
|
271
|
-
- spec/rdkafka/abstract_handle_spec.rb
|
272
|
-
- spec/rdkafka/admin/create_acl_handle_spec.rb
|
273
|
-
- spec/rdkafka/admin/create_acl_report_spec.rb
|
274
|
-
- spec/rdkafka/admin/create_topic_handle_spec.rb
|
275
|
-
- spec/rdkafka/admin/create_topic_report_spec.rb
|
276
|
-
- spec/rdkafka/admin/delete_acl_handle_spec.rb
|
277
|
-
- spec/rdkafka/admin/delete_acl_report_spec.rb
|
278
|
-
- spec/rdkafka/admin/delete_topic_handle_spec.rb
|
279
|
-
- spec/rdkafka/admin/delete_topic_report_spec.rb
|
280
|
-
- spec/rdkafka/admin/describe_acl_handle_spec.rb
|
281
|
-
- spec/rdkafka/admin/describe_acl_report_spec.rb
|
282
|
-
- spec/rdkafka/admin_spec.rb
|
283
|
-
- spec/rdkafka/bindings_spec.rb
|
284
|
-
- spec/rdkafka/callbacks_spec.rb
|
285
|
-
- spec/rdkafka/config_spec.rb
|
286
|
-
- spec/rdkafka/consumer/headers_spec.rb
|
287
|
-
- spec/rdkafka/consumer/message_spec.rb
|
288
|
-
- spec/rdkafka/consumer/partition_spec.rb
|
289
|
-
- spec/rdkafka/consumer/topic_partition_list_spec.rb
|
290
|
-
- spec/rdkafka/consumer_spec.rb
|
291
|
-
- spec/rdkafka/error_spec.rb
|
292
|
-
- spec/rdkafka/metadata_spec.rb
|
293
|
-
- spec/rdkafka/native_kafka_spec.rb
|
294
|
-
- spec/rdkafka/producer/delivery_handle_spec.rb
|
295
|
-
- spec/rdkafka/producer/delivery_report_spec.rb
|
296
|
-
- spec/rdkafka/producer/partitions_count_cache_spec.rb
|
297
|
-
- spec/rdkafka/producer/partitions_count_spec.rb
|
298
|
-
- spec/rdkafka/producer_spec.rb
|
299
|
-
- spec/spec_helper.rb
|
277
|
+
test_files: []
|
data/.github/workflows/ci.yml
DELETED
@@ -1,99 +0,0 @@
|
|
1
|
-
name: CI
|
2
|
-
|
3
|
-
concurrency:
|
4
|
-
group: ${{ github.workflow }}-${{ github.ref }}
|
5
|
-
cancel-in-progress: true
|
6
|
-
|
7
|
-
on:
|
8
|
-
pull_request:
|
9
|
-
branches: [ main, master ]
|
10
|
-
push:
|
11
|
-
branches: [ main, master ]
|
12
|
-
schedule:
|
13
|
-
- cron: '0 1 * * *'
|
14
|
-
|
15
|
-
permissions:
|
16
|
-
contents: read
|
17
|
-
|
18
|
-
env:
|
19
|
-
BUNDLE_RETRY: 6
|
20
|
-
BUNDLE_JOBS: 4
|
21
|
-
|
22
|
-
jobs:
|
23
|
-
specs:
|
24
|
-
timeout-minutes: 30
|
25
|
-
runs-on: ubuntu-latest
|
26
|
-
strategy:
|
27
|
-
fail-fast: false
|
28
|
-
matrix:
|
29
|
-
ruby:
|
30
|
-
- '3.4'
|
31
|
-
- '3.3'
|
32
|
-
- '3.2'
|
33
|
-
- '3.1'
|
34
|
-
- 'jruby-10.0'
|
35
|
-
include:
|
36
|
-
- ruby: '3.4'
|
37
|
-
coverage: 'true'
|
38
|
-
- ruby: 'jruby-10.0'
|
39
|
-
continue-on-error: true
|
40
|
-
|
41
|
-
steps:
|
42
|
-
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
43
|
-
with:
|
44
|
-
fetch-depth: 0
|
45
|
-
|
46
|
-
- name: Install package dependencies
|
47
|
-
run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
|
48
|
-
|
49
|
-
- name: Start Kafka with Docker Compose
|
50
|
-
run: |
|
51
|
-
docker compose up -d || (sleep 5 && docker compose up -d)
|
52
|
-
|
53
|
-
- name: Set up Ruby
|
54
|
-
uses: ruby/setup-ruby@13e7a03dc3ac6c3798f4570bfead2aed4d96abfb # v1.244.0
|
55
|
-
with:
|
56
|
-
ruby-version: ${{matrix.ruby}}
|
57
|
-
bundler-cache: true
|
58
|
-
|
59
|
-
- name: Run all specs
|
60
|
-
env:
|
61
|
-
GITHUB_COVERAGE: ${{matrix.coverage}}
|
62
|
-
continue-on-error: ${{ matrix.continue-on-error || false }} # Use the matrix value if present
|
63
|
-
run: |
|
64
|
-
set -e
|
65
|
-
bundle install --jobs 4 --retry 3
|
66
|
-
cd ext && bundle exec rake
|
67
|
-
cd ..
|
68
|
-
bundle exec rspec
|
69
|
-
|
70
|
-
macos_build:
|
71
|
-
timeout-minutes: 30
|
72
|
-
runs-on: macos-latest
|
73
|
-
strategy:
|
74
|
-
fail-fast: false
|
75
|
-
matrix:
|
76
|
-
ruby:
|
77
|
-
- '3.4'
|
78
|
-
- '3.3'
|
79
|
-
- '3.2'
|
80
|
-
- '3.1'
|
81
|
-
- 'jruby-9.4'
|
82
|
-
include:
|
83
|
-
- ruby: 'jruby-10.0'
|
84
|
-
continue-on-error: true
|
85
|
-
steps:
|
86
|
-
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
87
|
-
|
88
|
-
- name: Set up Ruby
|
89
|
-
uses: ruby/setup-ruby@13e7a03dc3ac6c3798f4570bfead2aed4d96abfb # v1.244.0
|
90
|
-
with:
|
91
|
-
ruby-version: ${{matrix.ruby}}
|
92
|
-
bundler-cache: false
|
93
|
-
|
94
|
-
- name: Build rdkafka-ruby
|
95
|
-
continue-on-error: ${{ matrix.continue-on-error || false }}
|
96
|
-
run: |
|
97
|
-
set -e
|
98
|
-
bundle install --jobs 4 --retry 3
|
99
|
-
cd ext && bundle exec rake
|
data/Guardfile
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
logger level: :error
|
4
|
-
|
5
|
-
guard :rspec, cmd: "bundle exec rspec --format #{ENV.fetch("FORMAT", "documentation")}" do
|
6
|
-
require "guard/rspec/dsl"
|
7
|
-
dsl = Guard::RSpec::Dsl.new(self)
|
8
|
-
|
9
|
-
# Ruby files
|
10
|
-
ruby = dsl.ruby
|
11
|
-
dsl.watch_spec_files_for(ruby.lib_files)
|
12
|
-
watch(%r{^lib/(.+)\.rb}) { |m| "spec/#{m[1]}_spec.rb" }
|
13
|
-
|
14
|
-
# RSpec files
|
15
|
-
rspec = dsl.rspec
|
16
|
-
watch(rspec.spec_helper) { rspec.spec_dir }
|
17
|
-
watch(rspec.spec_support) { rspec.spec_dir }
|
18
|
-
watch(rspec.spec_files)
|
19
|
-
end
|