rdkafka 0.21.0 → 0.22.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/workflows/ci_linux_x86_64_gnu.yml +249 -0
  4. data/.github/workflows/ci_linux_x86_64_musl.yml +205 -0
  5. data/.github/workflows/ci_macos_arm64.yml +306 -0
  6. data/.github/workflows/push_linux_x86_64_gnu.yml +64 -0
  7. data/.github/workflows/push_linux_x86_64_musl.yml +77 -0
  8. data/.github/workflows/push_macos_arm64.yml +54 -0
  9. data/.github/workflows/push_ruby.yml +37 -0
  10. data/.github/workflows/verify-action-pins.yml +16 -0
  11. data/.ruby-version +1 -1
  12. data/CHANGELOG.md +17 -0
  13. data/README.md +2 -1
  14. data/Rakefile +0 -2
  15. data/docker-compose.yml +1 -1
  16. data/ext/Rakefile +1 -1
  17. data/ext/build_common.sh +361 -0
  18. data/ext/build_linux_x86_64_gnu.sh +306 -0
  19. data/ext/build_linux_x86_64_musl.sh +763 -0
  20. data/ext/build_macos_arm64.sh +550 -0
  21. data/lib/rdkafka/bindings.rb +30 -3
  22. data/lib/rdkafka/config.rb +8 -4
  23. data/lib/rdkafka/consumer/headers.rb +14 -3
  24. data/lib/rdkafka/native_kafka.rb +8 -2
  25. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  26. data/lib/rdkafka/producer.rb +59 -35
  27. data/lib/rdkafka/version.rb +1 -1
  28. data/lib/rdkafka.rb +1 -0
  29. data/rdkafka.gemspec +27 -8
  30. data/renovate.json +87 -1
  31. data/spec/rdkafka/admin_spec.rb +27 -11
  32. data/spec/rdkafka/bindings_spec.rb +0 -9
  33. data/spec/rdkafka/config_spec.rb +17 -15
  34. data/spec/rdkafka/consumer/headers_spec.rb +26 -10
  35. data/spec/rdkafka/consumer_spec.rb +74 -15
  36. data/spec/rdkafka/metadata_spec.rb +2 -2
  37. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  38. data/spec/rdkafka/producer_spec.rb +237 -7
  39. data/spec/spec_helper.rb +30 -7
  40. metadata +45 -87
  41. checksums.yaml.gz.sig +0 -0
  42. data/.github/workflows/ci.yml +0 -83
  43. data/Guardfile +0 -19
  44. data/certs/cert.pem +0 -26
  45. data.tar.gz.sig +0 -0
  46. metadata.gz.sig +0 -0
@@ -53,7 +53,7 @@ describe Rdkafka::Producer do
53
53
  let(:producer) do
54
54
  rdkafka_producer_config(
55
55
  'message.timeout.ms': 1_000_000,
56
- :"bootstrap.servers" => "localhost:9094",
56
+ :"bootstrap.servers" => "127.0.0.1:9094",
57
57
  ).producer
58
58
  end
59
59
 
@@ -364,6 +364,48 @@ describe Rdkafka::Producer do
364
364
  expect(message.key).to eq "key utf8"
365
365
  end
366
366
 
367
+ it "should produce a message to a non-existing topic with key and partition key" do
368
+ new_topic = "it-#{SecureRandom.uuid}"
369
+
370
+ handle = producer.produce(
371
+ # Needs to be a new topic each time
372
+ topic: new_topic,
373
+ payload: "payload",
374
+ key: "key",
375
+ partition_key: "partition_key",
376
+ label: "label"
377
+ )
378
+
379
+ # Should be pending at first
380
+ expect(handle.pending?).to be true
381
+ expect(handle.label).to eq "label"
382
+
383
+ # Check delivery handle and report
384
+ report = handle.wait(max_wait_timeout: 5)
385
+ expect(handle.pending?).to be false
386
+ expect(report).not_to be_nil
387
+ expect(report.partition).to eq 0
388
+ expect(report.offset).to be >= 0
389
+ expect(report.label).to eq "label"
390
+
391
+ # Flush and close producer
392
+ producer.flush
393
+ producer.close
394
+
395
+ # Consume message and verify its content
396
+ message = wait_for_message(
397
+ topic: new_topic,
398
+ delivery_report: report,
399
+ consumer: consumer
400
+ )
401
+ expect(message.partition).to eq 0
402
+ expect(message.payload).to eq "payload"
403
+ expect(message.key).to eq "key"
404
+ # Since api.version.request is on by default we will get
405
+ # the message creation timestamp if it's not set.
406
+ expect(message.timestamp).to be_within(10).of(Time.now)
407
+ end
408
+
367
409
  context "timestamp" do
368
410
  it "should raise a type error if not nil, integer or time" do
369
411
  expect {
@@ -623,7 +665,7 @@ describe Rdkafka::Producer do
623
665
  context "when not being able to deliver the message" do
624
666
  let(:producer) do
625
667
  rdkafka_producer_config(
626
- "bootstrap.servers": "localhost:9093",
668
+ "bootstrap.servers": "127.0.0.1:9093",
627
669
  "message.timeout.ms": 100
628
670
  ).producer
629
671
  end
@@ -637,6 +679,25 @@ describe Rdkafka::Producer do
637
679
  end
638
680
  end
639
681
 
682
+ context "when topic does not exist and allow.auto.create.topics is false" do
683
+ let(:producer) do
684
+ rdkafka_producer_config(
685
+ "bootstrap.servers": "127.0.0.1:9092",
686
+ "message.timeout.ms": 100,
687
+ "allow.auto.create.topics": false
688
+ ).producer
689
+ end
690
+
691
+ it "should contain the error in the response when not deliverable" do
692
+ handler = producer.produce(topic: "it-#{SecureRandom.uuid}", payload: nil, label: 'na')
693
+ # Wait for the async callbacks and delivery registry to update
694
+ sleep(2)
695
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
696
+ expect(handler.create_result.error.code).to eq(:msg_timed_out)
697
+ expect(handler.create_result.label).to eq('na')
698
+ end
699
+ end
700
+
640
701
  describe '#partition_count' do
641
702
  it { expect(producer.partition_count('consume_test_topic')).to eq(3) }
642
703
 
@@ -654,12 +715,11 @@ describe Rdkafka::Producer do
654
715
 
655
716
  context 'when the partition count value was cached but time expired' do
656
717
  before do
657
- allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
658
- producer.partition_count('consume_test_topic')
718
+ ::Rdkafka::Producer.partitions_count_cache = Rdkafka::Producer::PartitionsCountCache.new
659
719
  allow(::Rdkafka::Metadata).to receive(:new).and_call_original
660
720
  end
661
721
 
662
- it 'expect not to query it again' do
722
+ it 'expect to query it again' do
663
723
  producer.partition_count('consume_test_topic')
664
724
  expect(::Rdkafka::Metadata).to have_received(:new)
665
725
  end
@@ -694,7 +754,7 @@ describe Rdkafka::Producer do
694
754
  context 'when it cannot flush due to a timeout' do
695
755
  let(:producer) do
696
756
  rdkafka_producer_config(
697
- "bootstrap.servers": "localhost:9093",
757
+ "bootstrap.servers": "127.0.0.1:9093",
698
758
  "message.timeout.ms": 2_000
699
759
  ).producer
700
760
  end
@@ -741,7 +801,7 @@ describe Rdkafka::Producer do
741
801
  context 'when there are outgoing things in the queue' do
742
802
  let(:producer) do
743
803
  rdkafka_producer_config(
744
- "bootstrap.servers": "localhost:9093",
804
+ "bootstrap.servers": "127.0.0.1:9093",
745
805
  "message.timeout.ms": 2_000
746
806
  ).producer
747
807
  end
@@ -819,4 +879,174 @@ describe Rdkafka::Producer do
819
879
  end
820
880
  end
821
881
  end
882
+
883
+ describe "#produce with headers" do
884
+ it "should produce a message with array headers" do
885
+ headers = {
886
+ "version" => ["2.1.3", "2.1.4"],
887
+ "type" => "String"
888
+ }
889
+
890
+ report = producer.produce(
891
+ topic: "consume_test_topic",
892
+ key: "key headers",
893
+ headers: headers
894
+ ).wait
895
+
896
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
897
+ expect(message).to be
898
+ expect(message.key).to eq('key headers')
899
+ expect(message.headers['type']).to eq('String')
900
+ expect(message.headers['version']).to eq(["2.1.3", "2.1.4"])
901
+ end
902
+
903
+ it "should produce a message with single value headers" do
904
+ headers = {
905
+ "version" => "2.1.3",
906
+ "type" => "String"
907
+ }
908
+
909
+ report = producer.produce(
910
+ topic: "consume_test_topic",
911
+ key: "key headers",
912
+ headers: headers
913
+ ).wait
914
+
915
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
916
+ expect(message).to be
917
+ expect(message.key).to eq('key headers')
918
+ expect(message.headers['type']).to eq('String')
919
+ expect(message.headers['version']).to eq('2.1.3')
920
+ end
921
+ end
922
+
923
+
924
+ describe 'with active statistics callback' do
925
+ let(:producer) do
926
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
927
+ end
928
+
929
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
930
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
931
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
932
+
933
+ context "when using partition key" do
934
+ before do
935
+ Rdkafka::Config.statistics_callback = ->(*) {}
936
+
937
+ # This call will make a blocking request to the metadata cache
938
+ producer.produce(
939
+ topic: "produce_test_topic",
940
+ payload: "payload headers",
941
+ partition_key: "test"
942
+ ).wait
943
+
944
+ pre_statistics_ttl
945
+
946
+ # We wait to make sure that statistics are triggered and that there is a refresh
947
+ sleep(1.5)
948
+
949
+ post_statistics_ttl
950
+ end
951
+
952
+ it 'expect to update ttl on the partitions count cache via statistics' do
953
+ expect(pre_statistics_ttl).to be < post_statistics_ttl
954
+ end
955
+ end
956
+
957
+ context "when not using partition key" do
958
+ before do
959
+ Rdkafka::Config.statistics_callback = ->(*) {}
960
+
961
+ # This call will make a blocking request to the metadata cache
962
+ producer.produce(
963
+ topic: "produce_test_topic",
964
+ payload: "payload headers"
965
+ ).wait
966
+
967
+ pre_statistics_ttl
968
+
969
+ # We wait to make sure that statistics are triggered and that there is a refresh
970
+ sleep(1.5)
971
+
972
+ # This will anyhow be populated from statistic
973
+ post_statistics_ttl
974
+ end
975
+
976
+ it 'expect not to update ttl on the partitions count cache via blocking but via use stats' do
977
+ expect(pre_statistics_ttl).to be_nil
978
+ expect(post_statistics_ttl).not_to be_nil
979
+ end
980
+ end
981
+ end
982
+
983
+ describe 'without active statistics callback' do
984
+ let(:producer) do
985
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
986
+ end
987
+
988
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
989
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
990
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
991
+
992
+ context "when using partition key" do
993
+ before do
994
+ # This call will make a blocking request to the metadata cache
995
+ producer.produce(
996
+ topic: "produce_test_topic",
997
+ payload: "payload headers",
998
+ partition_key: "test"
999
+ ).wait
1000
+
1001
+ pre_statistics_ttl
1002
+
1003
+ # We wait to make sure that statistics are triggered and that there is a refresh
1004
+ sleep(1.5)
1005
+
1006
+ post_statistics_ttl
1007
+ end
1008
+
1009
+ it 'expect not to update ttl on the partitions count cache via statistics' do
1010
+ expect(pre_statistics_ttl).to eq post_statistics_ttl
1011
+ end
1012
+ end
1013
+
1014
+ context "when not using partition key" do
1015
+ before do
1016
+ # This call will make a blocking request to the metadata cache
1017
+ producer.produce(
1018
+ topic: "produce_test_topic",
1019
+ payload: "payload headers"
1020
+ ).wait
1021
+
1022
+ pre_statistics_ttl
1023
+
1024
+ # We wait to make sure that statistics are triggered and that there is a refresh
1025
+ sleep(1.5)
1026
+
1027
+ # This should not be populated because stats are not in use
1028
+ post_statistics_ttl
1029
+ end
1030
+
1031
+ it 'expect not to update ttl on the partitions count cache via anything' do
1032
+ expect(pre_statistics_ttl).to be_nil
1033
+ expect(post_statistics_ttl).to be_nil
1034
+ end
1035
+ end
1036
+ end
1037
+
1038
+ describe 'with other fiber closing' do
1039
+ context 'when we create many fibers and close producer in some of them' do
1040
+ it 'expect not to crash ruby' do
1041
+ 10.times do |i|
1042
+ producer = rdkafka_producer_config.producer
1043
+
1044
+ Fiber.new do
1045
+ GC.start
1046
+ producer.close
1047
+ end.resume
1048
+ end
1049
+ end
1050
+ end
1051
+ end
822
1052
  end
data/spec/spec_helper.rb CHANGED
@@ -17,7 +17,10 @@ def rdkafka_base_config
17
17
  {
18
18
  :"api.version.request" => false,
19
19
  :"broker.version.fallback" => "1.0",
20
- :"bootstrap.servers" => "localhost:9092",
20
+ :"bootstrap.servers" => "127.0.0.1:9092",
21
+ # Display statistics and refresh often just to cover those in specs
22
+ :'statistics.interval.ms' => 1_000,
23
+ :'topic.metadata.refresh.interval.ms' => 1_000
21
24
  }
22
25
  end
23
26
 
@@ -75,18 +78,32 @@ end
75
78
 
76
79
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
77
80
  new_consumer = consumer.nil?
78
- consumer ||= rdkafka_consumer_config.consumer
81
+ consumer ||= rdkafka_consumer_config('allow.auto.create.topics': true).consumer
79
82
  consumer.subscribe(topic)
80
83
  timeout = Time.now.to_i + timeout_in_seconds
84
+ retry_count = 0
85
+ max_retries = 10
86
+
81
87
  loop do
82
88
  if timeout <= Time.now.to_i
83
89
  raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
84
90
  end
85
- message = consumer.poll(100)
86
- if message &&
87
- message.partition == delivery_report.partition &&
88
- message.offset == delivery_report.offset
89
- return message
91
+
92
+ begin
93
+ message = consumer.poll(100)
94
+ if message &&
95
+ message.partition == delivery_report.partition &&
96
+ message.offset == delivery_report.offset
97
+ return message
98
+ end
99
+ rescue Rdkafka::RdkafkaError => e
100
+ if e.code == :unknown_topic_or_part && retry_count < max_retries
101
+ retry_count += 1
102
+ sleep(0.1) # Small delay before retry
103
+ next
104
+ else
105
+ raise
106
+ end
90
107
  end
91
108
  end
92
109
  ensure
@@ -125,6 +142,12 @@ RSpec.configure do |config|
125
142
  config.filter_run focus: true
126
143
  config.run_all_when_everything_filtered = true
127
144
 
145
+ config.before(:each) do
146
+ Rdkafka::Config.statistics_callback = nil
147
+ # We need to clear it so state does not leak between specs
148
+ Rdkafka::Producer.partitions_count_cache.to_h.clear
149
+ end
150
+
128
151
  config.before(:suite) do
129
152
  admin = rdkafka_config.admin
130
153
  {
metadata CHANGED
@@ -1,41 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.21.0
4
+ version: 0.22.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  - Maciej Mensfeld
9
9
  bindir: bin
10
- cert_chain:
11
- - |
12
- -----BEGIN CERTIFICATE-----
13
- MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
14
- YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
15
- MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
16
- dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
17
- bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
18
- PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
19
- ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
20
- Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
21
- JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
22
- T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
23
- flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
24
- vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
25
- iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
26
- HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
27
- sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
28
- bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
29
- eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
30
- Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
31
- agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
32
- TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
33
- Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
34
- oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
35
- i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
36
- ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
37
- -----END CERTIFICATE-----
38
- date: 2025-02-13 00:00:00.000000000 Z
10
+ cert_chain: []
11
+ date: 1980-01-02 00:00:00.000000000 Z
39
12
  dependencies:
40
13
  - !ruby/object:Gem::Dependency
41
14
  name: ffi
@@ -51,6 +24,20 @@ dependencies:
51
24
  - - "~>"
52
25
  - !ruby/object:Gem::Version
53
26
  version: '1.15'
27
+ - !ruby/object:Gem::Dependency
28
+ name: logger
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
54
41
  - !ruby/object:Gem::Dependency
55
42
  name: mini_portile2
56
43
  requirement: !ruby/object:Gem::Requirement
@@ -80,7 +67,7 @@ dependencies:
80
67
  - !ruby/object:Gem::Version
81
68
  version: '12'
82
69
  - !ruby/object:Gem::Dependency
83
- name: pry
70
+ name: ostruct
84
71
  requirement: !ruby/object:Gem::Requirement
85
72
  requirements:
86
73
  - - ">="
@@ -94,21 +81,7 @@ dependencies:
94
81
  - !ruby/object:Gem::Version
95
82
  version: '0'
96
83
  - !ruby/object:Gem::Dependency
97
- name: rspec
98
- requirement: !ruby/object:Gem::Requirement
99
- requirements:
100
- - - "~>"
101
- - !ruby/object:Gem::Version
102
- version: '3.5'
103
- type: :development
104
- prerelease: false
105
- version_requirements: !ruby/object:Gem::Requirement
106
- requirements:
107
- - - "~>"
108
- - !ruby/object:Gem::Version
109
- version: '3.5'
110
- - !ruby/object:Gem::Dependency
111
- name: rake
84
+ name: pry
112
85
  requirement: !ruby/object:Gem::Requirement
113
86
  requirements:
114
87
  - - ">="
@@ -122,21 +95,21 @@ dependencies:
122
95
  - !ruby/object:Gem::Version
123
96
  version: '0'
124
97
  - !ruby/object:Gem::Dependency
125
- name: simplecov
98
+ name: rspec
126
99
  requirement: !ruby/object:Gem::Requirement
127
100
  requirements:
128
- - - ">="
101
+ - - "~>"
129
102
  - !ruby/object:Gem::Version
130
- version: '0'
103
+ version: '3.5'
131
104
  type: :development
132
105
  prerelease: false
133
106
  version_requirements: !ruby/object:Gem::Requirement
134
107
  requirements:
135
- - - ">="
108
+ - - "~>"
136
109
  - !ruby/object:Gem::Version
137
- version: '0'
110
+ version: '3.5'
138
111
  - !ruby/object:Gem::Dependency
139
- name: guard
112
+ name: rake
140
113
  requirement: !ruby/object:Gem::Requirement
141
114
  requirements:
142
115
  - - ">="
@@ -150,7 +123,7 @@ dependencies:
150
123
  - !ruby/object:Gem::Version
151
124
  version: '0'
152
125
  - !ruby/object:Gem::Dependency
153
- name: guard-rspec
126
+ name: simplecov
154
127
  requirement: !ruby/object:Gem::Requirement
155
128
  requirements:
156
129
  - - ">="
@@ -171,8 +144,16 @@ extensions:
171
144
  - ext/Rakefile
172
145
  extra_rdoc_files: []
173
146
  files:
147
+ - ".github/CODEOWNERS"
174
148
  - ".github/FUNDING.yml"
175
- - ".github/workflows/ci.yml"
149
+ - ".github/workflows/ci_linux_x86_64_gnu.yml"
150
+ - ".github/workflows/ci_linux_x86_64_musl.yml"
151
+ - ".github/workflows/ci_macos_arm64.yml"
152
+ - ".github/workflows/push_linux_x86_64_gnu.yml"
153
+ - ".github/workflows/push_linux_x86_64_musl.yml"
154
+ - ".github/workflows/push_macos_arm64.yml"
155
+ - ".github/workflows/push_ruby.yml"
156
+ - ".github/workflows/verify-action-pins.yml"
176
157
  - ".gitignore"
177
158
  - ".rspec"
178
159
  - ".ruby-gemset"
@@ -180,16 +161,18 @@ files:
180
161
  - ".yardopts"
181
162
  - CHANGELOG.md
182
163
  - Gemfile
183
- - Guardfile
184
164
  - MIT-LICENSE
185
165
  - README.md
186
166
  - Rakefile
187
- - certs/cert.pem
188
167
  - dist/librdkafka-2.8.0.tar.gz
189
168
  - dist/patches/rdkafka_global_init.patch
190
169
  - docker-compose.yml
191
170
  - ext/README.md
192
171
  - ext/Rakefile
172
+ - ext/build_common.sh
173
+ - ext/build_linux_x86_64_gnu.sh
174
+ - ext/build_linux_x86_64_musl.sh
175
+ - ext/build_macos_arm64.sh
193
176
  - lib/rdkafka.rb
194
177
  - lib/rdkafka/abstract_handle.rb
195
178
  - lib/rdkafka/admin.rb
@@ -230,6 +213,7 @@ files:
230
213
  - lib/rdkafka/producer.rb
231
214
  - lib/rdkafka/producer/delivery_handle.rb
232
215
  - lib/rdkafka/producer/delivery_report.rb
216
+ - lib/rdkafka/producer/partitions_count_cache.rb
233
217
  - lib/rdkafka/version.rb
234
218
  - rdkafka.gemspec
235
219
  - renovate.json
@@ -258,6 +242,7 @@ files:
258
242
  - spec/rdkafka/native_kafka_spec.rb
259
243
  - spec/rdkafka/producer/delivery_handle_spec.rb
260
244
  - spec/rdkafka/producer/delivery_report_spec.rb
245
+ - spec/rdkafka/producer/partitions_count_cache_spec.rb
261
246
  - spec/rdkafka/producer_spec.rb
262
247
  - spec/spec_helper.rb
263
248
  licenses:
@@ -265,10 +250,10 @@ licenses:
265
250
  metadata:
266
251
  funding_uri: https://karafka.io/#become-pro
267
252
  homepage_uri: https://karafka.io
268
- changelog_uri: https://github.com/karafka/rdkafka-ruby/blob/main/CHANGELOG.md
253
+ changelog_uri: https://karafka.io/docs/Changelog-Rdkafka
269
254
  bug_tracker_uri: https://github.com/karafka/rdkafka-ruby/issues
270
255
  source_code_uri: https://github.com/karafka/rdkafka-ruby
271
- documentation_uri: https://github.com/karafka/rdkafka-ruby/blob/main/README.md
256
+ documentation_uri: https://karafka.io/docs
272
257
  rubygems_mfa_required: 'true'
273
258
  rdoc_options: []
274
259
  require_paths:
@@ -284,36 +269,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
284
269
  - !ruby/object:Gem::Version
285
270
  version: '0'
286
271
  requirements: []
287
- rubygems_version: 3.6.2
272
+ rubygems_version: 3.6.7
288
273
  specification_version: 4
289
274
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
290
275
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
291
276
  and Ruby 2.7+.
292
- test_files:
293
- - spec/rdkafka/abstract_handle_spec.rb
294
- - spec/rdkafka/admin/create_acl_handle_spec.rb
295
- - spec/rdkafka/admin/create_acl_report_spec.rb
296
- - spec/rdkafka/admin/create_topic_handle_spec.rb
297
- - spec/rdkafka/admin/create_topic_report_spec.rb
298
- - spec/rdkafka/admin/delete_acl_handle_spec.rb
299
- - spec/rdkafka/admin/delete_acl_report_spec.rb
300
- - spec/rdkafka/admin/delete_topic_handle_spec.rb
301
- - spec/rdkafka/admin/delete_topic_report_spec.rb
302
- - spec/rdkafka/admin/describe_acl_handle_spec.rb
303
- - spec/rdkafka/admin/describe_acl_report_spec.rb
304
- - spec/rdkafka/admin_spec.rb
305
- - spec/rdkafka/bindings_spec.rb
306
- - spec/rdkafka/callbacks_spec.rb
307
- - spec/rdkafka/config_spec.rb
308
- - spec/rdkafka/consumer/headers_spec.rb
309
- - spec/rdkafka/consumer/message_spec.rb
310
- - spec/rdkafka/consumer/partition_spec.rb
311
- - spec/rdkafka/consumer/topic_partition_list_spec.rb
312
- - spec/rdkafka/consumer_spec.rb
313
- - spec/rdkafka/error_spec.rb
314
- - spec/rdkafka/metadata_spec.rb
315
- - spec/rdkafka/native_kafka_spec.rb
316
- - spec/rdkafka/producer/delivery_handle_spec.rb
317
- - spec/rdkafka/producer/delivery_report_spec.rb
318
- - spec/rdkafka/producer_spec.rb
319
- - spec/spec_helper.rb
277
+ test_files: []
checksums.yaml.gz.sig DELETED
Binary file
@@ -1,83 +0,0 @@
1
- name: ci
2
-
3
- concurrency:
4
- group: ${{ github.workflow }}-${{ github.ref }}
5
- cancel-in-progress: true
6
-
7
- on:
8
- pull_request:
9
- push:
10
- schedule:
11
- - cron: '0 1 * * *'
12
-
13
- env:
14
- BUNDLE_RETRY: 6
15
- BUNDLE_JOBS: 4
16
-
17
- jobs:
18
- specs:
19
- timeout-minutes: 30
20
- runs-on: ubuntu-latest
21
- strategy:
22
- fail-fast: false
23
- matrix:
24
- ruby:
25
- - '3.4'
26
- - '3.3'
27
- - '3.2'
28
- - '3.1'
29
- include:
30
- - ruby: '3.4'
31
- coverage: 'true'
32
- steps:
33
- - uses: actions/checkout@v4
34
- - name: Install package dependencies
35
- run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
36
-
37
- - name: Start Kafka with docker compose
38
- run: |
39
- docker compose up -d || (sleep 5 && docker compose up -d)
40
-
41
- - name: Set up Ruby
42
- uses: ruby/setup-ruby@v1
43
- with:
44
- ruby-version: ${{matrix.ruby}}
45
- bundler-cache: true
46
-
47
- - name: Run all specs
48
- env:
49
- GITHUB_COVERAGE: ${{matrix.coverage}}
50
-
51
- run: |
52
- set -e
53
- bundle install --path vendor/bundle
54
- cd ext && bundle exec rake
55
- cd ..
56
- bundle exec rspec
57
-
58
-
59
- macos_build:
60
- timeout-minutes: 30
61
- runs-on: macos-latest
62
- strategy:
63
- fail-fast: false
64
- matrix:
65
- ruby:
66
- - '3.4'
67
- - '3.3'
68
- - '3.2'
69
- - '3.1'
70
- steps:
71
- - uses: actions/checkout@v4
72
-
73
- - name: Set up Ruby
74
- uses: ruby/setup-ruby@v1
75
- with:
76
- ruby-version: ${{matrix.ruby}}
77
- bundler-cache: false
78
-
79
- - name: Build rdkafka-ruby
80
- run: |
81
- set -e
82
- bundle install --path vendor/bundle
83
- cd ext && bundle exec rake