karafka-rdkafka 0.20.0.rc1 → 0.20.0.rc5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_x86_64_gnu.yml +249 -0
  3. data/.github/workflows/ci_linux_x86_64_musl.yml +205 -0
  4. data/.github/workflows/ci_macos_arm64.yml +306 -0
  5. data/.github/workflows/push_linux_x86_64_gnu.yml +64 -0
  6. data/.github/workflows/push_linux_x86_64_musl.yml +77 -0
  7. data/.github/workflows/push_macos_arm64.yml +54 -0
  8. data/.github/workflows/push_ruby.yml +37 -0
  9. data/.gitignore +1 -0
  10. data/.ruby-version +1 -1
  11. data/CHANGELOG.md +22 -1
  12. data/README.md +2 -3
  13. data/Rakefile +0 -2
  14. data/dist/{librdkafka-2.10.0.tar.gz → librdkafka-2.8.0.tar.gz} +0 -0
  15. data/docker-compose.yml +1 -1
  16. data/ext/Rakefile +1 -1
  17. data/ext/build_common.sh +361 -0
  18. data/ext/build_linux_x86_64_gnu.sh +306 -0
  19. data/ext/build_linux_x86_64_musl.sh +763 -0
  20. data/ext/build_macos_arm64.sh +550 -0
  21. data/karafka-rdkafka.gemspec +26 -9
  22. data/lib/rdkafka/bindings.rb +31 -4
  23. data/lib/rdkafka/error.rb +8 -1
  24. data/lib/rdkafka/native_kafka.rb +4 -0
  25. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  26. data/lib/rdkafka/producer.rb +40 -28
  27. data/lib/rdkafka/version.rb +3 -3
  28. data/lib/rdkafka.rb +1 -0
  29. data/renovate.json +74 -0
  30. data/spec/rdkafka/admin_spec.rb +15 -2
  31. data/spec/rdkafka/bindings_spec.rb +0 -1
  32. data/spec/rdkafka/consumer_spec.rb +34 -13
  33. data/spec/rdkafka/metadata_spec.rb +2 -2
  34. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  35. data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
  36. data/spec/rdkafka/producer_spec.rb +196 -7
  37. data/spec/spec_helper.rb +10 -1
  38. metadata +45 -88
  39. checksums.yaml.gz.sig +0 -0
  40. data/.github/workflows/ci.yml +0 -99
  41. data/Guardfile +0 -19
  42. data/certs/cert.pem +0 -26
  43. data.tar.gz.sig +0 -0
  44. metadata.gz.sig +0 -0
@@ -53,7 +53,7 @@ describe Rdkafka::Producer do
53
53
  let(:producer) do
54
54
  rdkafka_producer_config(
55
55
  'message.timeout.ms': 1_000_000,
56
- :"bootstrap.servers" => "localhost:9094",
56
+ :"bootstrap.servers" => "127.0.0.1:9094",
57
57
  ).producer
58
58
  end
59
59
 
@@ -364,6 +364,48 @@ describe Rdkafka::Producer do
364
364
  expect(message.key).to eq "key utf8"
365
365
  end
366
366
 
367
+ it "should produce a message to a non-existing topic with key and partition key" do
368
+ new_topic = "it-#{SecureRandom.uuid}"
369
+
370
+ handle = producer.produce(
371
+ # Needs to be a new topic each time
372
+ topic: new_topic,
373
+ payload: "payload",
374
+ key: "key",
375
+ partition_key: "partition_key",
376
+ label: "label"
377
+ )
378
+
379
+ # Should be pending at first
380
+ expect(handle.pending?).to be true
381
+ expect(handle.label).to eq "label"
382
+
383
+ # Check delivery handle and report
384
+ report = handle.wait(max_wait_timeout: 5)
385
+ expect(handle.pending?).to be false
386
+ expect(report).not_to be_nil
387
+ expect(report.partition).to eq 0
388
+ expect(report.offset).to be >= 0
389
+ expect(report.label).to eq "label"
390
+
391
+ # Flush and close producer
392
+ producer.flush
393
+ producer.close
394
+
395
+ # Consume message and verify its content
396
+ message = wait_for_message(
397
+ topic: new_topic,
398
+ delivery_report: report,
399
+ consumer: consumer
400
+ )
401
+ expect(message.partition).to eq 0
402
+ expect(message.payload).to eq "payload"
403
+ expect(message.key).to eq "key"
404
+ # Since api.version.request is on by default we will get
405
+ # the message creation timestamp if it's not set.
406
+ expect(message.timestamp).to be_within(10).of(Time.now)
407
+ end
408
+
367
409
  context "timestamp" do
368
410
  it "should raise a type error if not nil, integer or time" do
369
411
  expect {
@@ -623,7 +665,7 @@ describe Rdkafka::Producer do
623
665
  context "when not being able to deliver the message" do
624
666
  let(:producer) do
625
667
  rdkafka_producer_config(
626
- "bootstrap.servers": "localhost:9093",
668
+ "bootstrap.servers": "127.0.0.1:9093",
627
669
  "message.timeout.ms": 100
628
670
  ).producer
629
671
  end
@@ -637,6 +679,25 @@ describe Rdkafka::Producer do
637
679
  end
638
680
  end
639
681
 
682
+ context "when topic does not exist and allow.auto.create.topics is false" do
683
+ let(:producer) do
684
+ rdkafka_producer_config(
685
+ "bootstrap.servers": "127.0.0.1:9092",
686
+ "message.timeout.ms": 100,
687
+ "allow.auto.create.topics": false
688
+ ).producer
689
+ end
690
+
691
+ it "should contain the error in the response when not deliverable" do
692
+ handler = producer.produce(topic: "it-#{SecureRandom.uuid}", payload: nil, label: 'na')
693
+ # Wait for the async callbacks and delivery registry to update
694
+ sleep(2)
695
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
696
+ expect(handler.create_result.error.code).to eq(:msg_timed_out)
697
+ expect(handler.create_result.label).to eq('na')
698
+ end
699
+ end
700
+
640
701
  describe '#partition_count' do
641
702
  it { expect(producer.partition_count('example_topic')).to eq(1) }
642
703
 
@@ -654,12 +715,11 @@ describe Rdkafka::Producer do
654
715
 
655
716
  context 'when the partition count value was cached but time expired' do
656
717
  before do
657
- allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
658
- producer.partition_count('example_topic')
718
+ ::Rdkafka::Producer.partitions_count_cache = Rdkafka::Producer::PartitionsCountCache.new
659
719
  allow(::Rdkafka::Metadata).to receive(:new).and_call_original
660
720
  end
661
721
 
662
- it 'expect not to query it again' do
722
+ it 'expect to query it again' do
663
723
  producer.partition_count('example_topic')
664
724
  expect(::Rdkafka::Metadata).to have_received(:new)
665
725
  end
@@ -721,7 +781,7 @@ describe Rdkafka::Producer do
721
781
  context 'when it cannot flush due to a timeout' do
722
782
  let(:producer) do
723
783
  rdkafka_producer_config(
724
- "bootstrap.servers": "localhost:9093",
784
+ "bootstrap.servers": "127.0.0.1:9093",
725
785
  "message.timeout.ms": 2_000
726
786
  ).producer
727
787
  end
@@ -768,7 +828,7 @@ describe Rdkafka::Producer do
768
828
  context 'when there are outgoing things in the queue' do
769
829
  let(:producer) do
770
830
  rdkafka_producer_config(
771
- "bootstrap.servers": "localhost:9093",
831
+ "bootstrap.servers": "127.0.0.1:9093",
772
832
  "message.timeout.ms": 2_000
773
833
  ).producer
774
834
  end
@@ -1042,4 +1102,133 @@ describe Rdkafka::Producer do
1042
1102
  expect(message.headers['version']).to eq('2.1.3')
1043
1103
  end
1044
1104
  end
1105
+
1106
+ describe 'with active statistics callback' do
1107
+ let(:producer) do
1108
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
1109
+ end
1110
+
1111
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
1112
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
1113
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
1114
+
1115
+ context "when using partition key" do
1116
+ before do
1117
+ Rdkafka::Config.statistics_callback = ->(*) {}
1118
+
1119
+ # This call will make a blocking request to the metadata cache
1120
+ producer.produce(
1121
+ topic: "produce_test_topic",
1122
+ payload: "payload headers",
1123
+ partition_key: "test"
1124
+ ).wait
1125
+
1126
+ pre_statistics_ttl
1127
+
1128
+ # We wait to make sure that statistics are triggered and that there is a refresh
1129
+ sleep(1.5)
1130
+
1131
+ post_statistics_ttl
1132
+ end
1133
+
1134
+ it 'expect to update ttl on the partitions count cache via statistics' do
1135
+ expect(pre_statistics_ttl).to be < post_statistics_ttl
1136
+ end
1137
+ end
1138
+
1139
+ context "when not using partition key" do
1140
+ before do
1141
+ Rdkafka::Config.statistics_callback = ->(*) {}
1142
+
1143
+ # This call will make a blocking request to the metadata cache
1144
+ producer.produce(
1145
+ topic: "produce_test_topic",
1146
+ payload: "payload headers"
1147
+ ).wait
1148
+
1149
+ pre_statistics_ttl
1150
+
1151
+ # We wait to make sure that statistics are triggered and that there is a refresh
1152
+ sleep(1.5)
1153
+
1154
+ # This will anyhow be populated from statistic
1155
+ post_statistics_ttl
1156
+ end
1157
+
1158
+ it 'expect not to update ttl on the partitions count cache via blocking but via use stats' do
1159
+ expect(pre_statistics_ttl).to be_nil
1160
+ expect(post_statistics_ttl).not_to be_nil
1161
+ end
1162
+ end
1163
+ end
1164
+
1165
+ describe 'without active statistics callback' do
1166
+ let(:producer) do
1167
+ rdkafka_producer_config('statistics.interval.ms': 1_000).producer
1168
+ end
1169
+
1170
+ let(:count_cache_hash) { described_class.partitions_count_cache.to_h }
1171
+ let(:pre_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
1172
+ let(:post_statistics_ttl) { count_cache_hash.fetch('produce_test_topic', [])[0] }
1173
+
1174
+ context "when using partition key" do
1175
+ before do
1176
+ # This call will make a blocking request to the metadata cache
1177
+ producer.produce(
1178
+ topic: "produce_test_topic",
1179
+ payload: "payload headers",
1180
+ partition_key: "test"
1181
+ ).wait
1182
+
1183
+ pre_statistics_ttl
1184
+
1185
+ # We wait to make sure that statistics are triggered and that there is a refresh
1186
+ sleep(1.5)
1187
+
1188
+ post_statistics_ttl
1189
+ end
1190
+
1191
+ it 'expect not to update ttl on the partitions count cache via statistics' do
1192
+ expect(pre_statistics_ttl).to eq post_statistics_ttl
1193
+ end
1194
+ end
1195
+
1196
+ context "when not using partition key" do
1197
+ before do
1198
+ # This call will make a blocking request to the metadata cache
1199
+ producer.produce(
1200
+ topic: "produce_test_topic",
1201
+ payload: "payload headers"
1202
+ ).wait
1203
+
1204
+ pre_statistics_ttl
1205
+
1206
+ # We wait to make sure that statistics are triggered and that there is a refresh
1207
+ sleep(1.5)
1208
+
1209
+ # This should not be populated because stats are not in use
1210
+ post_statistics_ttl
1211
+ end
1212
+
1213
+ it 'expect not to update ttl on the partitions count cache via anything' do
1214
+ expect(pre_statistics_ttl).to be_nil
1215
+ expect(post_statistics_ttl).to be_nil
1216
+ end
1217
+ end
1218
+ end
1219
+
1220
+ describe 'with other fiber closing' do
1221
+ context 'when we create many fibers and close producer in some of them' do
1222
+ it 'expect not to crash ruby' do
1223
+ 10.times do |i|
1224
+ producer = rdkafka_producer_config.producer
1225
+
1226
+ Fiber.new do
1227
+ GC.start
1228
+ producer.close
1229
+ end.resume
1230
+ end
1231
+ end
1232
+ end
1233
+ end
1045
1234
  end
data/spec/spec_helper.rb CHANGED
@@ -17,7 +17,10 @@ def rdkafka_base_config
17
17
  {
18
18
  :"api.version.request" => false,
19
19
  :"broker.version.fallback" => "1.0",
20
- :"bootstrap.servers" => "localhost:9092",
20
+ :"bootstrap.servers" => "127.0.0.1:9092",
21
+ # Display statistics and refresh often just to cover those in specs
22
+ :'statistics.interval.ms' => 1_000,
23
+ :'topic.metadata.refresh.interval.ms' => 1_000
21
24
  }
22
25
  end
23
26
 
@@ -125,6 +128,12 @@ RSpec.configure do |config|
125
128
  config.filter_run focus: true
126
129
  config.run_all_when_everything_filtered = true
127
130
 
131
+ config.before(:each) do
132
+ Rdkafka::Config.statistics_callback = nil
133
+ # We need to clear it so state does not leak between specs
134
+ Rdkafka::Producer.partitions_count_cache.to_h.clear
135
+ end
136
+
128
137
  config.before(:suite) do
129
138
  admin = rdkafka_config.admin
130
139
  {
metadata CHANGED
@@ -1,40 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.20.0.rc1
4
+ version: 0.20.0.rc5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  - Maciej Mensfeld
9
9
  bindir: bin
10
- cert_chain:
11
- - |
12
- -----BEGIN CERTIFICATE-----
13
- MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
14
- YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
15
- MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
16
- dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
17
- bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
18
- PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
19
- ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
20
- Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
21
- JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
22
- T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
23
- flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
24
- vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
25
- iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
26
- HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
27
- sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
28
- bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
29
- eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
30
- Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
31
- agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
32
- TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
33
- Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
34
- oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
35
- i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
36
- ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
37
- -----END CERTIFICATE-----
10
+ cert_chain: []
38
11
  date: 1980-01-02 00:00:00.000000000 Z
39
12
  dependencies:
40
13
  - !ruby/object:Gem::Dependency
@@ -52,41 +25,41 @@ dependencies:
52
25
  - !ruby/object:Gem::Version
53
26
  version: '1.15'
54
27
  - !ruby/object:Gem::Dependency
55
- name: mini_portile2
28
+ name: logger
56
29
  requirement: !ruby/object:Gem::Requirement
57
30
  requirements:
58
- - - "~>"
31
+ - - ">="
59
32
  - !ruby/object:Gem::Version
60
- version: '2.6'
33
+ version: '0'
61
34
  type: :runtime
62
35
  prerelease: false
63
36
  version_requirements: !ruby/object:Gem::Requirement
64
37
  requirements:
65
- - - "~>"
38
+ - - ">="
66
39
  - !ruby/object:Gem::Version
67
- version: '2.6'
40
+ version: '0'
68
41
  - !ruby/object:Gem::Dependency
69
- name: rake
42
+ name: mini_portile2
70
43
  requirement: !ruby/object:Gem::Requirement
71
44
  requirements:
72
- - - ">"
45
+ - - "~>"
73
46
  - !ruby/object:Gem::Version
74
- version: '12'
47
+ version: '2.6'
75
48
  type: :runtime
76
49
  prerelease: false
77
50
  version_requirements: !ruby/object:Gem::Requirement
78
51
  requirements:
79
- - - ">"
52
+ - - "~>"
80
53
  - !ruby/object:Gem::Version
81
- version: '12'
54
+ version: '2.6'
82
55
  - !ruby/object:Gem::Dependency
83
- name: pry
56
+ name: ostruct
84
57
  requirement: !ruby/object:Gem::Requirement
85
58
  requirements:
86
59
  - - ">="
87
60
  - !ruby/object:Gem::Version
88
61
  version: '0'
89
- type: :development
62
+ type: :runtime
90
63
  prerelease: false
91
64
  version_requirements: !ruby/object:Gem::Requirement
92
65
  requirements:
@@ -94,21 +67,21 @@ dependencies:
94
67
  - !ruby/object:Gem::Version
95
68
  version: '0'
96
69
  - !ruby/object:Gem::Dependency
97
- name: rspec
70
+ name: rake
98
71
  requirement: !ruby/object:Gem::Requirement
99
72
  requirements:
100
- - - "~>"
73
+ - - ">"
101
74
  - !ruby/object:Gem::Version
102
- version: '3.5'
103
- type: :development
75
+ version: '12'
76
+ type: :runtime
104
77
  prerelease: false
105
78
  version_requirements: !ruby/object:Gem::Requirement
106
79
  requirements:
107
- - - "~>"
80
+ - - ">"
108
81
  - !ruby/object:Gem::Version
109
- version: '3.5'
82
+ version: '12'
110
83
  - !ruby/object:Gem::Dependency
111
- name: rake
84
+ name: pry
112
85
  requirement: !ruby/object:Gem::Requirement
113
86
  requirements:
114
87
  - - ">="
@@ -122,21 +95,21 @@ dependencies:
122
95
  - !ruby/object:Gem::Version
123
96
  version: '0'
124
97
  - !ruby/object:Gem::Dependency
125
- name: simplecov
98
+ name: rspec
126
99
  requirement: !ruby/object:Gem::Requirement
127
100
  requirements:
128
- - - ">="
101
+ - - "~>"
129
102
  - !ruby/object:Gem::Version
130
- version: '0'
103
+ version: '3.5'
131
104
  type: :development
132
105
  prerelease: false
133
106
  version_requirements: !ruby/object:Gem::Requirement
134
107
  requirements:
135
- - - ">="
108
+ - - "~>"
136
109
  - !ruby/object:Gem::Version
137
- version: '0'
110
+ version: '3.5'
138
111
  - !ruby/object:Gem::Dependency
139
- name: guard
112
+ name: rake
140
113
  requirement: !ruby/object:Gem::Requirement
141
114
  requirements:
142
115
  - - ">="
@@ -150,7 +123,7 @@ dependencies:
150
123
  - !ruby/object:Gem::Version
151
124
  version: '0'
152
125
  - !ruby/object:Gem::Dependency
153
- name: guard-rspec
126
+ name: simplecov
154
127
  requirement: !ruby/object:Gem::Requirement
155
128
  requirements:
156
129
  - - ">="
@@ -173,7 +146,13 @@ extra_rdoc_files: []
173
146
  files:
174
147
  - ".github/CODEOWNERS"
175
148
  - ".github/FUNDING.yml"
176
- - ".github/workflows/ci.yml"
149
+ - ".github/workflows/ci_linux_x86_64_gnu.yml"
150
+ - ".github/workflows/ci_linux_x86_64_musl.yml"
151
+ - ".github/workflows/ci_macos_arm64.yml"
152
+ - ".github/workflows/push_linux_x86_64_gnu.yml"
153
+ - ".github/workflows/push_linux_x86_64_musl.yml"
154
+ - ".github/workflows/push_macos_arm64.yml"
155
+ - ".github/workflows/push_ruby.yml"
177
156
  - ".github/workflows/verify-action-pins.yml"
178
157
  - ".gitignore"
179
158
  - ".rspec"
@@ -182,16 +161,18 @@ files:
182
161
  - ".yardopts"
183
162
  - CHANGELOG.md
184
163
  - Gemfile
185
- - Guardfile
186
164
  - MIT-LICENSE
187
165
  - README.md
188
166
  - Rakefile
189
- - certs/cert.pem
190
- - dist/librdkafka-2.10.0.tar.gz
167
+ - dist/librdkafka-2.8.0.tar.gz
191
168
  - dist/patches/rdkafka_global_init.patch
192
169
  - docker-compose.yml
193
170
  - ext/README.md
194
171
  - ext/Rakefile
172
+ - ext/build_common.sh
173
+ - ext/build_linux_x86_64_gnu.sh
174
+ - ext/build_linux_x86_64_musl.sh
175
+ - ext/build_macos_arm64.sh
195
176
  - karafka-rdkafka.gemspec
196
177
  - lib/rdkafka.rb
197
178
  - lib/rdkafka/abstract_handle.rb
@@ -233,6 +214,7 @@ files:
233
214
  - lib/rdkafka/producer.rb
234
215
  - lib/rdkafka/producer/delivery_handle.rb
235
216
  - lib/rdkafka/producer/delivery_report.rb
217
+ - lib/rdkafka/producer/partitions_count_cache.rb
236
218
  - lib/rdkafka/version.rb
237
219
  - renovate.json
238
220
  - spec/rdkafka/abstract_handle_spec.rb
@@ -260,6 +242,8 @@ files:
260
242
  - spec/rdkafka/native_kafka_spec.rb
261
243
  - spec/rdkafka/producer/delivery_handle_spec.rb
262
244
  - spec/rdkafka/producer/delivery_report_spec.rb
245
+ - spec/rdkafka/producer/partitions_count_cache_spec.rb
246
+ - spec/rdkafka/producer/partitions_count_spec.rb
263
247
  - spec/rdkafka/producer_spec.rb
264
248
  - spec/spec_helper.rb
265
249
  licenses:
@@ -267,7 +251,7 @@ licenses:
267
251
  metadata:
268
252
  funding_uri: https://karafka.io/#become-pro
269
253
  homepage_uri: https://karafka.io
270
- changelog_uri: https://github.com/karafka/karafka-rdkafka/blob/master/CHANGELOG.md
254
+ changelog_uri: https://karafka.io/docs/Changelog-Karafka-Rdkafka/
271
255
  bug_tracker_uri: https://github.com/karafka/karafka-rdkafka/issues
272
256
  source_code_uri: https://github.com/karafka/karafka-rdkafka
273
257
  documentation_uri: https://karafka.io/docs
@@ -291,31 +275,4 @@ specification_version: 4
291
275
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
292
276
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
293
277
  and Ruby 2.7+.
294
- test_files:
295
- - spec/rdkafka/abstract_handle_spec.rb
296
- - spec/rdkafka/admin/create_acl_handle_spec.rb
297
- - spec/rdkafka/admin/create_acl_report_spec.rb
298
- - spec/rdkafka/admin/create_topic_handle_spec.rb
299
- - spec/rdkafka/admin/create_topic_report_spec.rb
300
- - spec/rdkafka/admin/delete_acl_handle_spec.rb
301
- - spec/rdkafka/admin/delete_acl_report_spec.rb
302
- - spec/rdkafka/admin/delete_topic_handle_spec.rb
303
- - spec/rdkafka/admin/delete_topic_report_spec.rb
304
- - spec/rdkafka/admin/describe_acl_handle_spec.rb
305
- - spec/rdkafka/admin/describe_acl_report_spec.rb
306
- - spec/rdkafka/admin_spec.rb
307
- - spec/rdkafka/bindings_spec.rb
308
- - spec/rdkafka/callbacks_spec.rb
309
- - spec/rdkafka/config_spec.rb
310
- - spec/rdkafka/consumer/headers_spec.rb
311
- - spec/rdkafka/consumer/message_spec.rb
312
- - spec/rdkafka/consumer/partition_spec.rb
313
- - spec/rdkafka/consumer/topic_partition_list_spec.rb
314
- - spec/rdkafka/consumer_spec.rb
315
- - spec/rdkafka/error_spec.rb
316
- - spec/rdkafka/metadata_spec.rb
317
- - spec/rdkafka/native_kafka_spec.rb
318
- - spec/rdkafka/producer/delivery_handle_spec.rb
319
- - spec/rdkafka/producer/delivery_report_spec.rb
320
- - spec/rdkafka/producer_spec.rb
321
- - spec/spec_helper.rb
278
+ test_files: []
checksums.yaml.gz.sig DELETED
Binary file
@@ -1,99 +0,0 @@
1
- name: ci
2
-
3
- concurrency:
4
- group: ${{ github.workflow }}-${{ github.ref }}
5
- cancel-in-progress: true
6
-
7
- on:
8
- pull_request:
9
- branches: [ main, master ]
10
- push:
11
- branches: [ main, master ]
12
- schedule:
13
- - cron: '0 1 * * *'
14
-
15
- permissions:
16
- contents: read
17
-
18
- env:
19
- BUNDLE_RETRY: 6
20
- BUNDLE_JOBS: 4
21
-
22
- jobs:
23
- specs:
24
- timeout-minutes: 30
25
- runs-on: ubuntu-latest
26
- strategy:
27
- fail-fast: false
28
- matrix:
29
- ruby:
30
- - '3.4'
31
- - '3.3'
32
- - '3.2'
33
- - '3.1'
34
- - 'jruby-10.0'
35
- include:
36
- - ruby: '3.4'
37
- coverage: 'true'
38
- - ruby: 'jruby-10.0'
39
- continue-on-error: true
40
-
41
- steps:
42
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
43
- with:
44
- fetch-depth: 0
45
-
46
- - name: Install package dependencies
47
- run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
48
-
49
- - name: Start Kafka with Docker Compose
50
- run: |
51
- docker compose up -d || (sleep 5 && docker compose up -d)
52
-
53
- - name: Set up Ruby
54
- uses: ruby/setup-ruby@ca041f971d66735f3e5ff1e21cc13e2d51e7e535 # v1.233.0
55
- with:
56
- ruby-version: ${{matrix.ruby}}
57
- bundler-cache: true
58
-
59
- - name: Run all specs
60
- env:
61
- GITHUB_COVERAGE: ${{matrix.coverage}}
62
- continue-on-error: ${{ matrix.continue-on-error || false }} # Use the matrix value if present
63
- run: |
64
- set -e
65
- bundle install --jobs 4 --retry 3
66
- cd ext && bundle exec rake
67
- cd ..
68
- bundle exec rspec
69
-
70
- macos_build:
71
- timeout-minutes: 30
72
- runs-on: macos-latest
73
- strategy:
74
- fail-fast: false
75
- matrix:
76
- ruby:
77
- - '3.4'
78
- - '3.3'
79
- - '3.2'
80
- - '3.1'
81
- - 'jruby-9.4'
82
- include:
83
- - ruby: 'jruby-10.0'
84
- continue-on-error: true
85
- steps:
86
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
87
-
88
- - name: Set up Ruby
89
- uses: ruby/setup-ruby@ca041f971d66735f3e5ff1e21cc13e2d51e7e535 # v1.233.0
90
- with:
91
- ruby-version: ${{matrix.ruby}}
92
- bundler-cache: false
93
-
94
- - name: Build rdkafka-ruby
95
- continue-on-error: ${{ matrix.continue-on-error || false }}
96
- run: |
97
- set -e
98
- bundle install --jobs 4 --retry 3
99
- cd ext && bundle exec rake
data/Guardfile DELETED
@@ -1,19 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- logger level: :error
4
-
5
- guard :rspec, cmd: "bundle exec rspec --format #{ENV.fetch("FORMAT", "documentation")}" do
6
- require "guard/rspec/dsl"
7
- dsl = Guard::RSpec::Dsl.new(self)
8
-
9
- # Ruby files
10
- ruby = dsl.ruby
11
- dsl.watch_spec_files_for(ruby.lib_files)
12
- watch(%r{^lib/(.+)\.rb}) { |m| "spec/#{m[1]}_spec.rb" }
13
-
14
- # RSpec files
15
- rspec = dsl.rspec
16
- watch(rspec.spec_helper) { rspec.spec_dir }
17
- watch(rspec.spec_support) { rspec.spec_dir }
18
- watch(rspec.spec_files)
19
- end
data/certs/cert.pem DELETED
@@ -1,26 +0,0 @@
1
- -----BEGIN CERTIFICATE-----
2
- MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
- YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
- MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
5
- dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
- bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
7
- PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
8
- ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
9
- Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
10
- JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
11
- T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
12
- flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
13
- vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
14
- iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
15
- HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
16
- sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
- bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
18
- eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
19
- Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
20
- agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
21
- TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
22
- Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
23
- oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
24
- i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
25
- ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
26
- -----END CERTIFICATE-----
data.tar.gz.sig DELETED
Binary file
metadata.gz.sig DELETED
Binary file