karafka-rdkafka 0.21.0.rc1-arm64-darwin → 0.21.0.rc2-arm64-darwin

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/{ci_linux_x86_64_musl.yml → ci_linux_alpine_x86_64_musl.yml} +12 -9
  3. data/.github/workflows/ci_linux_alpine_x86_64_musl_complementary.yml +264 -0
  4. data/.github/workflows/ci_linux_debian_x86_64_gnu.yml +271 -0
  5. data/.github/workflows/ci_linux_debian_x86_64_gnu_complementary.yml +334 -0
  6. data/.github/workflows/{ci_linux_x86_64_gnu.yml → ci_linux_ubuntu_aarch64_gnu.yml} +15 -15
  7. data/.github/workflows/ci_linux_ubuntu_aarch64_gnu_complementary.yml +295 -0
  8. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +281 -0
  9. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu_complementary.yml +294 -0
  10. data/.github/workflows/ci_macos_arm64.yml +5 -5
  11. data/.github/workflows/push_linux_aarch64_gnu.yml +65 -0
  12. data/.github/workflows/push_linux_x86_64_gnu.yml +2 -2
  13. data/.github/workflows/push_linux_x86_64_musl.yml +3 -3
  14. data/.github/workflows/push_macos_arm64.yml +2 -2
  15. data/.github/workflows/push_ruby.yml +1 -1
  16. data/.github/workflows/trigger-wiki-refresh.yml +30 -0
  17. data/.github/workflows/verify-action-pins.yml +1 -1
  18. data/.gitignore +1 -0
  19. data/.rspec +1 -0
  20. data/CHANGELOG.md +7 -1
  21. data/README.md +34 -134
  22. data/dist/cyrus-sasl-2.1.28.tar.gz +0 -0
  23. data/dist/krb5-1.21.3.tar.gz +0 -0
  24. data/dist/openssl-3.0.16.tar.gz +0 -0
  25. data/dist/zlib-1.3.1.tar.gz +0 -0
  26. data/dist/zstd-1.5.7.tar.gz +0 -0
  27. data/docker-compose-ssl.yml +35 -0
  28. data/ext/build_common.sh +16 -1
  29. data/ext/build_linux_aarch64_gnu.sh +326 -0
  30. data/ext/build_linux_x86_64_gnu.sh +12 -1
  31. data/ext/build_linux_x86_64_musl.sh +18 -8
  32. data/ext/build_macos_arm64.sh +7 -0
  33. data/ext/generate-ssl-certs.sh +109 -0
  34. data/ext/librdkafka.dylib +0 -0
  35. data/karafka-rdkafka.gemspec +2 -0
  36. data/lib/rdkafka/bindings.rb +0 -1
  37. data/lib/rdkafka/consumer.rb +1 -1
  38. data/lib/rdkafka/version.rb +1 -1
  39. data/spec/integrations/ssl_stress_spec.rb +121 -0
  40. data/spec/{rdkafka → lib/rdkafka}/admin_spec.rb +16 -4
  41. data/spec/{rdkafka → lib/rdkafka}/consumer_spec.rb +50 -3
  42. data/spec/{rdkafka → lib/rdkafka}/metadata_spec.rb +2 -2
  43. data/spec/{rdkafka → lib/rdkafka}/producer/delivery_report_spec.rb +1 -1
  44. data/spec/{rdkafka → lib/rdkafka}/producer_spec.rb +6 -5
  45. data/spec/spec_helper.rb +45 -8
  46. metadata +76 -31
  47. /data/spec/{rdkafka → lib/rdkafka}/abstract_handle_spec.rb +0 -0
  48. /data/spec/{rdkafka → lib/rdkafka}/admin/create_acl_handle_spec.rb +0 -0
  49. /data/spec/{rdkafka → lib/rdkafka}/admin/create_acl_report_spec.rb +0 -0
  50. /data/spec/{rdkafka → lib/rdkafka}/admin/create_topic_handle_spec.rb +0 -0
  51. /data/spec/{rdkafka → lib/rdkafka}/admin/create_topic_report_spec.rb +0 -0
  52. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_acl_handle_spec.rb +0 -0
  53. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_acl_report_spec.rb +0 -0
  54. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_topic_handle_spec.rb +0 -0
  55. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_topic_report_spec.rb +0 -0
  56. /data/spec/{rdkafka → lib/rdkafka}/admin/describe_acl_handle_spec.rb +0 -0
  57. /data/spec/{rdkafka → lib/rdkafka}/admin/describe_acl_report_spec.rb +0 -0
  58. /data/spec/{rdkafka → lib/rdkafka}/bindings_spec.rb +0 -0
  59. /data/spec/{rdkafka → lib/rdkafka}/callbacks_spec.rb +0 -0
  60. /data/spec/{rdkafka → lib/rdkafka}/config_spec.rb +0 -0
  61. /data/spec/{rdkafka → lib/rdkafka}/consumer/headers_spec.rb +0 -0
  62. /data/spec/{rdkafka → lib/rdkafka}/consumer/message_spec.rb +0 -0
  63. /data/spec/{rdkafka → lib/rdkafka}/consumer/partition_spec.rb +0 -0
  64. /data/spec/{rdkafka → lib/rdkafka}/consumer/topic_partition_list_spec.rb +0 -0
  65. /data/spec/{rdkafka → lib/rdkafka}/error_spec.rb +0 -0
  66. /data/spec/{rdkafka → lib/rdkafka}/native_kafka_spec.rb +0 -0
  67. /data/spec/{rdkafka → lib/rdkafka}/producer/delivery_handle_spec.rb +0 -0
  68. /data/spec/{rdkafka → lib/rdkafka}/producer/partitions_count_cache_spec.rb +0 -0
@@ -448,6 +448,12 @@ log "Creating self-contained librdkafka.dylib with Kerberos support..."
448
448
 
449
449
  # Create self-contained shared library by linking all static dependencies (NOW INCLUDING KERBEROS)
450
450
  # This is the macOS equivalent of your Linux gcc -shared command
451
+
452
+ # Write symbol export file (macOS equivalent of export.map)
453
+ cat > export_symbols.txt <<'EOF'
454
+ _rd_kafka_*
455
+ EOF
456
+
451
457
  clang -dynamiclib -fPIC \
452
458
  -Wl,-force_load,src/librdkafka.a \
453
459
  -Wl,-force_load,"$SASL_PREFIX/lib/libsasl2.a" \
@@ -462,6 +468,7 @@ clang -dynamiclib -fPIC \
462
468
  -Wl,-force_load,"$ZSTD_PREFIX/lib/libzstd.a" \
463
469
  -o librdkafka.dylib \
464
470
  -lpthread -lc -arch $ARCH -lresolv \
471
+ -framework GSS -framework Kerberos \
465
472
  -install_name @rpath/librdkafka.dylib \
466
473
  -Wl,-undefined,dynamic_lookup
467
474
 
@@ -0,0 +1,109 @@
1
+ #!/bin/bash
2
+
3
+ #==============================================================================
4
+ # Kafka SSL Certificate Generator
5
+ #==============================================================================
6
+ #
7
+ # DESCRIPTION:
8
+ # Generates SSL certificates for testing Kafka with SSL/TLS encryption.
9
+ # Creates both Java KeyStore (JKS) files for Kafka server and PEM files
10
+ # for client applications like rdkafka.
11
+ #
12
+ # PURPOSE:
13
+ # - Test SSL connectivity between Kafka clients and brokers
14
+ # - Validate rdkafka SSL integration
15
+ # - Enable encrypted communication for development/testing environments
16
+ #
17
+ # USAGE:
18
+ # ./ext/generate-ssl-certs.sh
19
+ # docker compose -f docker-compose-ssl.yml up
20
+ #
21
+ # REQUIREMENTS:
22
+ # - OpenSSL (for certificate generation)
23
+ # - Java keytool (usually included with JDK/JRE)
24
+ # - Write permissions in current directory
25
+ #
26
+ # OUTPUT FILES (created in ./ssl/ directory):
27
+ # ├── kafka.server.keystore.jks # Kafka server's private key and certificate
28
+ # ├── kafka.server.truststore.jks # Trusted CA certificates for Kafka
29
+ # ├── kafka_keystore_creds # Password file for keystore
30
+ # ├── kafka_truststore_creds # Password file for truststore
31
+ # ├── kafka_ssl_key_creds # Password file for SSL keys
32
+ # ├── ca-cert # CA certificate (for rdkafka clients)
33
+ # └── ca-cert.pem # CA certificate in PEM format
34
+ #
35
+ # CONFIGURATION:
36
+ # - Certificate validity: 365 days
37
+ # - Password: "confluent" (all certificates use same password for simplicity)
38
+ # - Subject: CN=localhost (suitable for local testing)
39
+ # - CA Subject: CN=localhost-ca
40
+ #
41
+ # DOCKER COMPOSE INTEGRATION:
42
+ # Use with docker-compose-ssl.yml that mounts ./ssl directory to
43
+ # /etc/kafka/secrets inside the Kafka container.
44
+ #
45
+ # RDKAFKA CLIENT CONFIGURATION:
46
+ # security.protocol=SSL
47
+ # ssl.ca.location=./ssl/ca-cert
48
+ # ssl.endpoint.identification.algorithm=none # For localhost testing
49
+ #
50
+ # NOTES:
51
+ # - Safe to run multiple times (cleans up existing files)
52
+ # - Certificates are self-signed and suitable for testing only
53
+ # - For production, use certificates signed by a trusted CA
54
+ # - All passwords are set to "confluent" for simplicity
55
+ #
56
+ #==============================================================================
57
+
58
+ # Create ssl directory and clean up any existing files
59
+ mkdir -p ssl
60
+ cd ssl
61
+
62
+ # Clean up existing files
63
+ rm -f kafka.server.keystore.jks kafka.server.truststore.jks
64
+ rm -f kafka_keystore_creds kafka_truststore_creds kafka_ssl_key_creds
65
+ rm -f ca-key ca-cert cert-file cert-signed ca-cert.srl ca-cert.pem
66
+
67
+ echo "Cleaned up existing SSL files..."
68
+
69
+ # Set variables
70
+ VALIDITY_DAYS=365
71
+ PASSWORD="confluent" # Use a simpler, well-known password
72
+ DNAME="CN=localhost,OU=Test,O=Test,L=Test,ST=Test,C=US"
73
+
74
+ # Create password files (all same password for simplicity)
75
+ echo "$PASSWORD" > kafka_keystore_creds
76
+ echo "$PASSWORD" > kafka_truststore_creds
77
+ echo "$PASSWORD" > kafka_ssl_key_creds
78
+
79
+ # Step 1: Generate CA key and certificate
80
+ openssl req -new -x509 -keyout ca-key -out ca-cert -days $VALIDITY_DAYS -subj "/CN=localhost-ca/OU=Test/O=Test/L=Test/S=Test/C=US" -passin pass:$PASSWORD -passout pass:$PASSWORD
81
+
82
+ # Step 2: Create truststore and import the CA certificate
83
+ keytool -keystore kafka.server.truststore.jks -alias CARoot -import -file ca-cert -storepass $PASSWORD -keypass $PASSWORD -noprompt
84
+
85
+ # Step 3: Create keystore
86
+ keytool -keystore kafka.server.keystore.jks -alias localhost -validity $VALIDITY_DAYS -genkey -keyalg RSA -dname "$DNAME" -storepass $PASSWORD -keypass $PASSWORD
87
+
88
+ # Step 4: Create certificate signing request
89
+ keytool -keystore kafka.server.keystore.jks -alias localhost -certreq -file cert-file -storepass $PASSWORD -keypass $PASSWORD
90
+
91
+ # Step 5: Sign the certificate with the CA
92
+ openssl x509 -req -CA ca-cert -CAkey ca-key -in cert-file -out cert-signed -days $VALIDITY_DAYS -CAcreateserial -passin pass:$PASSWORD
93
+
94
+ # Step 6: Import CA certificate into keystore
95
+ keytool -keystore kafka.server.keystore.jks -alias CARoot -import -file ca-cert -storepass $PASSWORD -keypass $PASSWORD -noprompt
96
+
97
+ # Step 7: Import signed certificate into keystore
98
+ keytool -keystore kafka.server.keystore.jks -alias localhost -import -file cert-signed -storepass $PASSWORD -keypass $PASSWORD -noprompt
99
+
100
+ # Export CA certificate to PEM format for rdkafka
101
+ cp ca-cert ca-cert.pem
102
+
103
+ # Clean up intermediate files (but keep ca-cert.pem for rdkafka)
104
+ rm ca-key cert-file cert-signed
105
+
106
+ echo "SSL certificates generated successfully!"
107
+ echo "Password: $PASSWORD"
108
+ echo ""
109
+ echo "For rdkafka, use ca-cert.pem or ca-cert files"
data/ext/librdkafka.dylib CHANGED
Binary file
@@ -41,6 +41,7 @@ Gem::Specification.new do |gem|
41
41
  end
42
42
 
43
43
  gem.add_dependency 'ffi', '~> 1.15'
44
+ gem.add_dependency 'json', '> 2.0'
44
45
  gem.add_dependency 'logger'
45
46
  gem.add_dependency 'mini_portile2', '~> 2.6'
46
47
  gem.add_dependency 'rake', '> 12'
@@ -50,6 +51,7 @@ Gem::Specification.new do |gem|
50
51
  gem.add_development_dependency 'rspec', '~> 3.5'
51
52
  gem.add_development_dependency 'rake'
52
53
  gem.add_development_dependency 'simplecov'
54
+ gem.add_development_dependency 'warning'
53
55
 
54
56
  gem.metadata = {
55
57
  'funding_uri' => 'https://karafka.io/#become-pro',
@@ -160,7 +160,6 @@ module Rdkafka
160
160
  attach_function :rd_kafka_error_is_retriable, [:pointer], :int
161
161
  attach_function :rd_kafka_error_txn_requires_abort, [:pointer], :int
162
162
  attach_function :rd_kafka_error_destroy, [:pointer], :void
163
- attach_function :rd_kafka_error_code, [:pointer], :int
164
163
  attach_function :rd_kafka_get_err_descs, [:pointer, :pointer], :void
165
164
 
166
165
  # Configuration
@@ -344,7 +344,7 @@ module Rdkafka
344
344
  topic_out = {}
345
345
  partitions.each do |p|
346
346
  next if p.offset.nil?
347
- _, high = query_watermark_offsets(
347
+ _low, high = query_watermark_offsets(
348
348
  topic,
349
349
  p.partition,
350
350
  watermark_timeout_ms
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.21.0.rc1"
4
+ VERSION = "0.21.0.rc2"
5
5
  LIBRDKAFKA_VERSION = "2.11.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "592a823dc7c09ad4ded1bc8f700da6d4e0c88ffaf267815c6f25e7450b9395ca"
7
7
  end
@@ -0,0 +1,121 @@
1
+ # ssl_stress_test.rb
2
+ #
3
+ # This script is designed to stress-test the OpenSSL SSL/TLS layer under high concurrency
4
+ # to help detect regressions like the one described in OpenSSL issue #28171:
5
+ # https://github.com/openssl/openssl/issues/28171
6
+ #
7
+ # Issue summary:
8
+ # - OpenSSL 3.0.17 introduced a concurrency-related regression.
9
+ # - Multiple threads sharing the same SSL_CTX and making parallel TLS connections
10
+ # (often with certificate verification enabled) can cause segmentation faults
11
+ # due to race conditions in X509 store handling.
12
+ # - Affected users include Python (httpx), Rust (reqwest, native-tls), and C applications.
13
+ #
14
+ # Script details:
15
+ # - Starts 100 SSL servers using self-signed, in-memory certs on sequential localhost ports.
16
+ # - Uses `rdkafka-ruby` to spin up 100 consumer threads that continuously create and destroy
17
+ # SSL connections to these servers for a given duration.
18
+ # - This mimics high TLS connection churn and aims to trigger latent SSL_CTX or X509_STORE
19
+ # threading bugs like double-frees, memory corruption, or segmentation faults.
20
+ #
21
+ # Goal:
22
+ # - Catch regressions early by validating that heavy concurrent SSL use does not lead to crashes.
23
+ # - Provide a minimal and repeatable reproducer when diagnosing OpenSSL-level SSL instability.
24
+ #
25
+ # In case of a failure, segfault will happen
26
+
27
+ require 'rdkafka'
28
+ require 'socket'
29
+ require 'openssl'
30
+
31
+ $stdout.sync = true
32
+
33
+ STARTING_PORT = 19093
34
+ NUM_PORTS = 150
35
+ BATCHES = 100
36
+ PORTS = STARTING_PORT...(STARTING_PORT + NUM_PORTS)
37
+
38
+ CONFIG = {
39
+ 'bootstrap.servers': Array.new(NUM_PORTS) { |i| "127.0.0.1:#{19093+i}" }.join(','),
40
+ 'security.protocol': 'SSL',
41
+ 'enable.ssl.certificate.verification': false
42
+ }
43
+
44
+ # Generate in-memory self-signed cert
45
+ key = OpenSSL::PKey::RSA.new(2048)
46
+
47
+ name = OpenSSL::X509::Name.parse("/CN=127.0.0.1")
48
+ cert = OpenSSL::X509::Certificate.new
49
+ cert.version = 2
50
+ cert.serial = 1
51
+ cert.subject = name
52
+ cert.issuer = name
53
+ cert.public_key = key.public_key
54
+ cert.not_before = Time.now
55
+ cert.not_after = Time.now + 3600
56
+ cert.sign(key, OpenSSL::Digest::SHA256.new)
57
+
58
+ # Start servers on multiple ports
59
+ PORTS.map do |port|
60
+ Thread.new do
61
+ # Prepare SSL context
62
+ # We do not use a shared context for the server because the goal is to stress librdkafka layer
63
+ # and not the Ruby SSL layer
64
+ ssl_context = OpenSSL::SSL::SSLContext.new
65
+ ssl_context.cert = cert
66
+ ssl_context.key = key
67
+
68
+ tcp_server = TCPServer.new('127.0.0.1', port)
69
+ ssl_server = OpenSSL::SSL::SSLServer.new(tcp_server, ssl_context)
70
+
71
+ loop do
72
+ begin
73
+ ssl_socket = ssl_server.accept
74
+ ssl_socket.close
75
+ rescue => e
76
+ # Some errors are expected and irrelevant
77
+ next if e.message.include?('unexpected eof while reading')
78
+
79
+ puts "Port #{port} SSL error: #{e}"
80
+ end
81
+ end
82
+ end
83
+ end
84
+
85
+ puts "Starting #{NUM_PORTS} on ports from #{PORTS.first} to #{PORTS.last} SSL servers"
86
+
87
+ timeout = 30
88
+ start = Time.now
89
+
90
+ # Wait for the servers to be available
91
+ # We want to make sure that they are available so we are sure that librdkafka actually hammers
92
+ # them
93
+ loop do
94
+ all_up = PORTS.all? do |port|
95
+ TCPSocket.new('127.0.0.1', port).close
96
+ true
97
+ rescue
98
+ false
99
+ end
100
+
101
+ break if all_up
102
+
103
+ raise "Timeout waiting for SSL servers" if Time.now - start > timeout
104
+
105
+ sleep 0.1
106
+ end
107
+
108
+ puts "SSL servers ready"
109
+
110
+ start_time = Time.now
111
+ duration = 60 * 10 # 10 minutes - it should crash faster than that if SSL vulnerable
112
+ attempts = 0
113
+
114
+ while Time.now - start_time < duration do
115
+ css = Array.new(BATCHES) { Rdkafka::Config.new(CONFIG) }
116
+ csss = css.map(&:consumer)
117
+ # This print is needed. No idea why but it increases the chances of segfault
118
+ p attempts += 1
119
+ sleep(1)
120
+ csss.each(&:close)
121
+ end
@@ -518,17 +518,17 @@ describe Rdkafka::Admin do
518
518
  before do
519
519
  #create topic for testing acl
520
520
  create_topic_handle = admin.create_topic(resource_name, topic_partition_count, topic_replication_factor)
521
- create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
521
+ create_topic_handle.wait(max_wait_timeout: 15.0)
522
522
  end
523
523
 
524
524
  after do
525
525
  #delete acl
526
526
  delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
527
- delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
527
+ delete_acl_handle.wait(max_wait_timeout: 15.0)
528
528
 
529
529
  #delete topic that was created for testing acl
530
530
  delete_topic_handle = admin.delete_topic(resource_name)
531
- delete_topic_report = delete_topic_handle.wait(max_wait_timeout: 15.0)
531
+ delete_topic_handle.wait(max_wait_timeout: 15.0)
532
532
  end
533
533
 
534
534
  describe "#create_acl" do
@@ -876,7 +876,17 @@ describe Rdkafka::Admin do
876
876
  end
877
877
 
878
878
  describe '#create_partitions' do
879
- let(:metadata) { admin.metadata(topic_name).topics.first }
879
+ let(:metadata) do
880
+ begin
881
+ admin.metadata(topic_name).topics.first
882
+ rescue Rdkafka::RdkafkaError
883
+ # We have to wait because if we query too fast after topic creation request, it may not
884
+ # yet be available throwing an error.
885
+ # This occurs mostly on slow CIs
886
+ sleep(1)
887
+ admin.metadata(topic_name).topics.first
888
+ end
889
+ end
880
890
 
881
891
  context 'when topic does not exist' do
882
892
  it 'expect to fail due to unknown partition' do
@@ -898,6 +908,8 @@ describe Rdkafka::Admin do
898
908
 
899
909
  it 'expect not to change number of partitions' do
900
910
  expect { admin.create_partitions(topic_name, 2).wait }.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
911
+ # On slow CI this may propagate, thus we wait a bit
912
+ sleep(1)
901
913
  expect(metadata[:partition_count]).to eq(5)
902
914
  end
903
915
  end
@@ -175,6 +175,7 @@ describe Rdkafka::Consumer do
175
175
  before do
176
176
  admin = rdkafka_producer_config.admin
177
177
  admin.create_topic(topic, 1, 1).wait
178
+ wait_for_topic(admin, topic)
178
179
  admin.close
179
180
  end
180
181
 
@@ -278,6 +279,7 @@ describe Rdkafka::Consumer do
278
279
  before do
279
280
  admin = rdkafka_producer_config.admin
280
281
  admin.create_topic(topic, 1, 1).wait
282
+ wait_for_topic(admin, topic)
281
283
  admin.close
282
284
  end
283
285
 
@@ -426,7 +428,7 @@ describe Rdkafka::Consumer do
426
428
  describe '#assignment_lost?' do
427
429
  it "should not return true as we do have an assignment" do
428
430
  consumer.subscribe("consume_test_topic")
429
- expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
431
+ Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
430
432
  list.add_topic("consume_test_topic")
431
433
  end
432
434
 
@@ -436,7 +438,7 @@ describe Rdkafka::Consumer do
436
438
 
437
439
  it "should not return true after voluntary unsubscribing" do
438
440
  consumer.subscribe("consume_test_topic")
439
- expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
441
+ Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
440
442
  list.add_topic("consume_test_topic")
441
443
  end
442
444
 
@@ -1276,7 +1278,6 @@ describe Rdkafka::Consumer do
1276
1278
 
1277
1279
  # Consume to the end
1278
1280
  consumer.subscribe("consume_test_topic")
1279
- eof_count = 0
1280
1281
  eof_error = nil
1281
1282
 
1282
1283
  loop do
@@ -1293,4 +1294,50 @@ describe Rdkafka::Consumer do
1293
1294
  expect(eof_error.code).to eq(:partition_eof)
1294
1295
  end
1295
1296
  end
1297
+
1298
+ describe "long running consumption" do
1299
+ let(:consumer) { rdkafka_consumer_config.consumer }
1300
+ let(:producer) { rdkafka_producer_config.producer }
1301
+
1302
+ after { consumer.close }
1303
+ after { producer.close }
1304
+
1305
+ it "should consume messages continuously for 60 seconds" do
1306
+ consumer.subscribe("consume_test_topic")
1307
+ wait_for_assignment(consumer)
1308
+
1309
+ messages_consumed = 0
1310
+ start_time = Time.now
1311
+
1312
+ # Producer thread - sends message every second
1313
+ producer_thread = Thread.new do
1314
+ counter = 0
1315
+ while Time.now - start_time < 60
1316
+ producer.produce(
1317
+ topic: "consume_test_topic",
1318
+ payload: "payload #{counter}",
1319
+ key: "key #{counter}",
1320
+ partition: 0
1321
+ ).wait
1322
+ counter += 1
1323
+ sleep(1)
1324
+ end
1325
+ end
1326
+
1327
+ # Consumer loop
1328
+ while Time.now - start_time < 60
1329
+ message = consumer.poll(1000)
1330
+ if message
1331
+ expect(message).to be_a Rdkafka::Consumer::Message
1332
+ expect(message.topic).to eq("consume_test_topic")
1333
+ messages_consumed += 1
1334
+ consumer.commit if messages_consumed % 10 == 0
1335
+ end
1336
+ end
1337
+
1338
+ producer_thread.join
1339
+
1340
+ expect(messages_consumed).to be > 50 # Should consume most messages
1341
+ end
1342
+ end
1296
1343
  end
@@ -31,7 +31,7 @@ describe Rdkafka::Metadata do
31
31
  expect(subject.brokers.length).to eq(1)
32
32
  expect(subject.brokers[0][:broker_id]).to eq(1)
33
33
  expect(%w[127.0.0.1 localhost]).to include(subject.brokers[0][:broker_name])
34
- expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ expect(subject.brokers[0][:broker_port]).to eq(rdkafka_base_config[:'bootstrap.servers'].split(':').last.to_i)
35
35
  end
36
36
 
37
37
  it "#topics returns data on our test topic" do
@@ -54,7 +54,7 @@ describe Rdkafka::Metadata do
54
54
  expect(subject.brokers.length).to eq(1)
55
55
  expect(subject.brokers[0][:broker_id]).to eq(1)
56
56
  expect(%w[127.0.0.1 localhost]).to include(subject.brokers[0][:broker_name])
57
- expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ expect(subject.brokers[0][:broker_port]).to eq(rdkafka_base_config[:'bootstrap.servers'].split(':').last.to_i)
58
58
  end
59
59
 
60
60
  it "#topics returns data about all of our test topics" do
@@ -20,6 +20,6 @@ describe Rdkafka::Producer::DeliveryReport do
20
20
  end
21
21
 
22
22
  it "should get the error" do
23
- expect(subject.error).to eq -1
23
+ expect(subject.error).to eq(-1)
24
24
  end
25
25
  end
@@ -663,7 +663,7 @@ describe Rdkafka::Producer do
663
663
  context "when not being able to deliver the message" do
664
664
  let(:producer) do
665
665
  rdkafka_producer_config(
666
- "bootstrap.servers": "127.0.0.1:9093",
666
+ "bootstrap.servers": "127.0.0.1:9095",
667
667
  "message.timeout.ms": 100
668
668
  ).producer
669
669
  end
@@ -779,14 +779,15 @@ describe Rdkafka::Producer do
779
779
  context 'when it cannot flush due to a timeout' do
780
780
  let(:producer) do
781
781
  rdkafka_producer_config(
782
- "bootstrap.servers": "127.0.0.1:9093",
782
+ "bootstrap.servers": "127.0.0.1:9095",
783
783
  "message.timeout.ms": 2_000
784
784
  ).producer
785
785
  end
786
786
 
787
787
  after do
788
788
  # Allow rdkafka to evict message preventing memory-leak
789
- sleep(2)
789
+ # We give it a bit more time as on slow CIs things take time
790
+ sleep(5)
790
791
  end
791
792
 
792
793
  it "should return false on flush when cannot deliver and beyond timeout" do
@@ -826,7 +827,7 @@ describe Rdkafka::Producer do
826
827
  context 'when there are outgoing things in the queue' do
827
828
  let(:producer) do
828
829
  rdkafka_producer_config(
829
- "bootstrap.servers": "127.0.0.1:9093",
830
+ "bootstrap.servers": "127.0.0.1:9095",
830
831
  "message.timeout.ms": 2_000
831
832
  ).producer
832
833
  end
@@ -862,7 +863,7 @@ describe Rdkafka::Producer do
862
863
  before { producer.delivery_callback = delivery_callback }
863
864
 
864
865
  it "should run the callback" do
865
- handle = producer.produce(
866
+ producer.produce(
866
867
  topic: "produce_test_topic",
867
868
  payload: "payload headers"
868
869
  )
data/spec/spec_helper.rb CHANGED
@@ -1,5 +1,19 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ Warning[:performance] = true if RUBY_VERSION >= '3.3'
4
+ Warning[:deprecated] = true
5
+ $VERBOSE = true
6
+
7
+ require 'warning'
8
+
9
+ Warning.process do |warning|
10
+ next unless warning.include?(Dir.pwd)
11
+ # Allow OpenStruct usage only in specs
12
+ next if warning.include?('OpenStruct use') && warning.include?('_spec')
13
+
14
+ raise "Warning in your code: #{warning}"
15
+ end
16
+
3
17
  unless ENV["CI"] == "true"
4
18
  require "simplecov"
5
19
  SimpleCov.start do
@@ -14,12 +28,25 @@ require "timeout"
14
28
  require "securerandom"
15
29
 
16
30
  def rdkafka_base_config
17
- {
18
- :"bootstrap.servers" => "localhost:9092",
19
- # Display statistics and refresh often just to cover those in specs
20
- :'statistics.interval.ms' => 1_000,
21
- :'topic.metadata.refresh.interval.ms' => 1_000
22
- }
31
+ if ENV['KAFKA_SSL_ENABLED'] == 'true'
32
+ {
33
+ :"bootstrap.servers" => "localhost:9093",
34
+ # Display statistics and refresh often just to cover those in specs
35
+ :'statistics.interval.ms' => 1_000,
36
+ :'topic.metadata.refresh.interval.ms' => 1_000,
37
+ # SSL Configuration
38
+ :'security.protocol' => 'SSL',
39
+ :'ssl.ca.location' => './ssl/ca-cert',
40
+ :'ssl.endpoint.identification.algorithm' => 'none'
41
+ }
42
+ else
43
+ {
44
+ :"bootstrap.servers" => "localhost:9092",
45
+ # Display statistics and refresh often just to cover those in specs
46
+ :'statistics.interval.ms' => 1_000,
47
+ :'topic.metadata.refresh.interval.ms' => 1_000
48
+ }
49
+ end
23
50
  end
24
51
 
25
52
  def rdkafka_config(config_overrides={})
@@ -122,6 +149,16 @@ def wait_for_unassignment(consumer)
122
149
  end
123
150
  end
124
151
 
152
+ def wait_for_topic(admin, topic)
153
+ admin.metadata(topic)
154
+ rescue Rdkafka::RdkafkaError => e
155
+ raise unless e.code == :unknown_topic_or_part
156
+
157
+ sleep(0.5)
158
+
159
+ retry
160
+ end
161
+
125
162
  def notify_listener(listener, &block)
126
163
  # 1. subscribe and poll
127
164
  consumer.subscribe("consume_test_topic")
@@ -169,9 +206,9 @@ RSpec.configure do |config|
169
206
  end
170
207
 
171
208
  config.around(:each) do |example|
172
- # Timeout specs after a minute. If they take longer
209
+ # Timeout specs after 1.5 minute. If they take longer
173
210
  # they are probably stuck
174
- Timeout::timeout(60) do
211
+ Timeout::timeout(90) do
175
212
  example.run
176
213
  end
177
214
  end