rdkafka 0.21.1.alpha2 → 0.22.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/renovate.json CHANGED
@@ -14,5 +14,79 @@
14
14
  ],
15
15
  "minimumReleaseAge": "7 days"
16
16
  }
17
+ ],
18
+ "customManagers": [
19
+ {
20
+ "customType": "regex",
21
+ "managerFilePatterns": [
22
+ "/^ext/build_common\\.sh$/"
23
+ ],
24
+ "matchStrings": [
25
+ "readonly OPENSSL_VERSION=\"(?<currentValue>.*)\""
26
+ ],
27
+ "depNameTemplate": "openssl/openssl",
28
+ "datasourceTemplate": "github-releases",
29
+ "extractVersionTemplate": "^OpenSSL_(?<version>.*)$"
30
+ },
31
+ {
32
+ "customType": "regex",
33
+ "managerFilePatterns": [
34
+ "/^ext/build_common\\.sh$/"
35
+ ],
36
+ "matchStrings": [
37
+ "readonly CYRUS_SASL_VERSION=\"(?<currentValue>.*)\""
38
+ ],
39
+ "depNameTemplate": "cyrusimap/cyrus-sasl",
40
+ "datasourceTemplate": "github-releases",
41
+ "extractVersionTemplate": "^cyrus-sasl-(?<version>.*)$"
42
+ },
43
+ {
44
+ "customType": "regex",
45
+ "managerFilePatterns": [
46
+ "/^ext/build_common\\.sh$/"
47
+ ],
48
+ "matchStrings": [
49
+ "readonly ZLIB_VERSION=\"(?<currentValue>.*)\""
50
+ ],
51
+ "depNameTemplate": "madler/zlib",
52
+ "datasourceTemplate": "github-releases",
53
+ "extractVersionTemplate": "^v(?<version>.*)$"
54
+ },
55
+ {
56
+ "customType": "regex",
57
+ "managerFilePatterns": [
58
+ "/^ext/build_common\\.sh$/"
59
+ ],
60
+ "matchStrings": [
61
+ "readonly ZSTD_VERSION=\"(?<currentValue>.*)\""
62
+ ],
63
+ "depNameTemplate": "facebook/zstd",
64
+ "datasourceTemplate": "github-releases",
65
+ "extractVersionTemplate": "^v(?<version>.*)$"
66
+ },
67
+ {
68
+ "customType": "regex",
69
+ "managerFilePatterns": [
70
+ "/^ext/build_common\\.sh$/"
71
+ ],
72
+ "matchStrings": [
73
+ "readonly KRB5_VERSION=\"(?<currentValue>.*)\""
74
+ ],
75
+ "depNameTemplate": "krb5/krb5",
76
+ "datasourceTemplate": "github-releases",
77
+ "extractVersionTemplate": "^krb5-(?<version>.*)$"
78
+ },
79
+ {
80
+ "customType": "regex",
81
+ "managerFilePatterns": [
82
+ "/^ext/build_common\\.sh$/"
83
+ ],
84
+ "matchStrings": [
85
+ "readonly LIBRDKAFKA_VERSION=\"(?<currentValue>.*)\""
86
+ ],
87
+ "depNameTemplate": "confluentinc/librdkafka",
88
+ "datasourceTemplate": "github-releases",
89
+ "extractVersionTemplate": "^v(?<version>.*)$"
90
+ }
17
91
  ]
18
92
  }
@@ -295,6 +295,8 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
295
295
  expect(resources_results.first.type).to eq(2)
296
296
  expect(resources_results.first.name).to eq(topic_name)
297
297
 
298
+ sleep(1)
299
+
298
300
  ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
299
301
  config.name == 'delete.retention.ms'
300
302
  end
@@ -325,6 +327,9 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
325
327
  expect(resources_results.size).to eq(1)
326
328
  expect(resources_results.first.type).to eq(2)
327
329
  expect(resources_results.first.name).to eq(topic_name)
330
+
331
+ sleep(1)
332
+
328
333
  ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
329
334
  config.name == 'delete.retention.ms'
330
335
  end
@@ -356,6 +361,8 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
356
361
  expect(resources_results.first.type).to eq(2)
357
362
  expect(resources_results.first.name).to eq(topic_name)
358
363
 
364
+ sleep(1)
365
+
359
366
  ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
360
367
  config.name == 'cleanup.policy'
361
368
  end
@@ -387,6 +394,8 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
387
394
  expect(resources_results.first.type).to eq(2)
388
395
  expect(resources_results.first.name).to eq(topic_name)
389
396
 
397
+ sleep(1)
398
+
390
399
  ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
391
400
  config.name == 'cleanup.policy'
392
401
  end
@@ -622,7 +631,12 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
622
631
 
623
632
  consumer.subscribe(topic_name)
624
633
  wait_for_assignment(consumer)
625
- message = consumer.poll(100)
634
+
635
+ message = nil
636
+
637
+ 10.times do
638
+ message ||= consumer.poll(100)
639
+ end
626
640
 
627
641
  expect(message).to_not be_nil
628
642
 
@@ -170,8 +170,16 @@ describe Rdkafka::Consumer do
170
170
  end
171
171
 
172
172
  describe "#seek" do
173
+ let(:topic) { "it-#{SecureRandom.uuid}" }
174
+
175
+ before do
176
+ admin = rdkafka_producer_config.admin
177
+ admin.create_topic(topic, 1, 1).wait
178
+ admin.close
179
+ end
180
+
173
181
  it "should raise an error when seeking fails" do
174
- fake_msg = OpenStruct.new(topic: "consume_test_topic", partition: 0, offset: 0)
182
+ fake_msg = OpenStruct.new(topic: topic, partition: 0, offset: 0)
175
183
 
176
184
  expect(Rdkafka::Bindings).to receive(:rd_kafka_seek).and_return(20)
177
185
  expect {
@@ -181,9 +189,12 @@ describe Rdkafka::Consumer do
181
189
 
182
190
  context "subscription" do
183
191
  let(:timeout) { 1000 }
192
+ # Some specs here test the manual offset commit hence we want to ensure, that we have some
193
+ # offsets in-memory that we can manually commit
194
+ let(:consumer) { rdkafka_consumer_config('auto.commit.interval.ms': 60_000).consumer }
184
195
 
185
196
  before do
186
- consumer.subscribe("consume_test_topic")
197
+ consumer.subscribe(topic)
187
198
 
188
199
  # 1. partitions are assigned
189
200
  wait_for_assignment(consumer)
@@ -196,7 +207,7 @@ describe Rdkafka::Consumer do
196
207
 
197
208
  def send_one_message(val)
198
209
  producer.produce(
199
- topic: "consume_test_topic",
210
+ topic: topic,
200
211
  payload: "payload #{val}",
201
212
  key: "key 1",
202
213
  partition: 0
@@ -211,7 +222,7 @@ describe Rdkafka::Consumer do
211
222
 
212
223
  # 4. pause the subscription
213
224
  tpl = Rdkafka::Consumer::TopicPartitionList.new
214
- tpl.add_topic("consume_test_topic", 1)
225
+ tpl.add_topic(topic, 1)
215
226
  consumer.pause(tpl)
216
227
 
217
228
  # 5. seek to previous message
@@ -219,7 +230,7 @@ describe Rdkafka::Consumer do
219
230
 
220
231
  # 6. resume the subscription
221
232
  tpl = Rdkafka::Consumer::TopicPartitionList.new
222
- tpl.add_topic("consume_test_topic", 1)
233
+ tpl.add_topic(topic, 1)
223
234
  consumer.resume(tpl)
224
235
 
225
236
  # 7. ensure same message is read again
@@ -227,7 +238,7 @@ describe Rdkafka::Consumer do
227
238
 
228
239
  # This is needed because `enable.auto.offset.store` is true but when running in CI that
229
240
  # is overloaded, offset store lags
230
- sleep(2)
241
+ sleep(1)
231
242
 
232
243
  consumer.commit
233
244
  expect(message1.offset).to eq message2.offset
@@ -259,10 +270,17 @@ describe Rdkafka::Consumer do
259
270
  end
260
271
 
261
272
  describe "#seek_by" do
262
- let(:topic) { "consume_test_topic" }
273
+ let(:consumer) { rdkafka_consumer_config('auto.commit.interval.ms': 60_000).consumer }
274
+ let(:topic) { "it-#{SecureRandom.uuid}" }
263
275
  let(:partition) { 0 }
264
276
  let(:offset) { 0 }
265
277
 
278
+ before do
279
+ admin = rdkafka_producer_config.admin
280
+ admin.create_topic(topic, 1, 1).wait
281
+ admin.close
282
+ end
283
+
266
284
  it "should raise an error when seeking fails" do
267
285
  expect(Rdkafka::Bindings).to receive(:rd_kafka_seek).and_return(20)
268
286
  expect {
@@ -588,12 +606,18 @@ describe Rdkafka::Consumer do
588
606
 
589
607
  describe "#store_offset" do
590
608
  let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': false).consumer }
609
+ let(:metadata) { SecureRandom.uuid }
610
+ let(:group_id) { SecureRandom.uuid }
611
+ let(:base_config) do
612
+ {
613
+ 'group.id': group_id,
614
+ 'enable.auto.offset.store': false,
615
+ 'enable.auto.commit': false
616
+ }
617
+ end
591
618
 
592
619
  before do
593
- config = {}
594
- config[:'enable.auto.offset.store'] = false
595
- config[:'enable.auto.commit'] = false
596
- @new_consumer = rdkafka_consumer_config(config).consumer
620
+ @new_consumer = rdkafka_consumer_config(base_config).consumer
597
621
  @new_consumer.subscribe("consume_test_topic")
598
622
  wait_for_assignment(@new_consumer)
599
623
  end
@@ -795,12 +819,14 @@ describe Rdkafka::Consumer do
795
819
  end
796
820
 
797
821
  it "should return a message if there is one" do
822
+ topic = "it-#{SecureRandom.uuid}"
823
+
798
824
  producer.produce(
799
- topic: "consume_test_topic",
825
+ topic: topic,
800
826
  payload: "payload 1",
801
827
  key: "key 1"
802
828
  ).wait
803
- consumer.subscribe("consume_test_topic")
829
+ consumer.subscribe(topic)
804
830
  message = consumer.each {|m| break m}
805
831
 
806
832
  expect(message).to be_a Rdkafka::Consumer::Message
@@ -1000,7 +1026,7 @@ describe Rdkafka::Consumer do
1000
1026
  after { Rdkafka::Config.statistics_callback = nil }
1001
1027
 
1002
1028
  let(:consumer) do
1003
- config = rdkafka_consumer_config('statistics.interval.ms': 100)
1029
+ config = rdkafka_consumer_config('statistics.interval.ms': 500)
1004
1030
  config.consumer_poll_set = false
1005
1031
  config.consumer
1006
1032
  end
@@ -1097,7 +1123,8 @@ describe Rdkafka::Consumer do
1097
1123
  :assign => [ nil ],
1098
1124
  :assignment => nil,
1099
1125
  :committed => [],
1100
- :query_watermark_offsets => [ nil, nil ]
1126
+ :query_watermark_offsets => [ nil, nil ],
1127
+ :assignment_lost? => []
1101
1128
  }.each do |method, args|
1102
1129
  it "raises an exception if #{method} is called" do
1103
1130
  expect {
@@ -1212,4 +1239,36 @@ describe Rdkafka::Consumer do
1212
1239
  end
1213
1240
  end
1214
1241
  end
1242
+
1243
+ describe "when reaching eof on a topic and eof reporting enabled" do
1244
+ let(:consumer) { rdkafka_consumer_config(:"enable.partition.eof" => true).consumer }
1245
+
1246
+ it "should return proper details" do
1247
+ (0..2).each do |i|
1248
+ producer.produce(
1249
+ topic: "consume_test_topic",
1250
+ key: "key lag #{i}",
1251
+ partition: i
1252
+ ).wait
1253
+ end
1254
+
1255
+ # Consume to the end
1256
+ consumer.subscribe("consume_test_topic")
1257
+ eof_count = 0
1258
+ eof_error = nil
1259
+
1260
+ loop do
1261
+ begin
1262
+ consumer.poll(100)
1263
+ rescue Rdkafka::RdkafkaError => error
1264
+ if error.is_partition_eof?
1265
+ eof_error = error
1266
+ end
1267
+ break if eof_error
1268
+ end
1269
+ end
1270
+
1271
+ expect(eof_error.code).to eq(:partition_eof)
1272
+ end
1273
+ end
1215
1274
  end
@@ -30,7 +30,7 @@ describe Rdkafka::Metadata do
30
30
  it "#brokers returns our single broker" do
31
31
  expect(subject.brokers.length).to eq(1)
32
32
  expect(subject.brokers[0][:broker_id]).to eq(1)
33
- expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
33
+ expect(%w[127.0.0.1 localhost]).to include(subject.brokers[0][:broker_name])
34
34
  expect(subject.brokers[0][:broker_port]).to eq(9092)
35
35
  end
36
36
 
@@ -53,7 +53,7 @@ describe Rdkafka::Metadata do
53
53
  it "#brokers returns our single broker" do
54
54
  expect(subject.brokers.length).to eq(1)
55
55
  expect(subject.brokers[0][:broker_id]).to eq(1)
56
- expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
56
+ expect(%w[127.0.0.1 localhost]).to include(subject.brokers[0][:broker_name])
57
57
  expect(subject.brokers[0][:broker_port]).to eq(9092)
58
58
  end
59
59
 
@@ -257,7 +257,7 @@ RSpec.describe Rdkafka::Producer::PartitionsCountCache do
257
257
  cache.get(topic) { partition_count }
258
258
 
259
259
  # Wait just under TTL
260
- sleep(default_ttl - 0.1)
260
+ sleep(default_ttl - 0.2)
261
261
 
262
262
  # Value should still be cached (block should not be called)
263
263
  result = cache.get(topic) { fail "Should not be called when cache is valid" }
@@ -53,7 +53,7 @@ describe Rdkafka::Producer do
53
53
  let(:producer) do
54
54
  rdkafka_producer_config(
55
55
  'message.timeout.ms': 1_000_000,
56
- :"bootstrap.servers" => "localhost:9094",
56
+ :"bootstrap.servers" => "127.0.0.1:9094",
57
57
  ).producer
58
58
  end
59
59
 
@@ -364,6 +364,48 @@ describe Rdkafka::Producer do
364
364
  expect(message.key).to eq "key utf8"
365
365
  end
366
366
 
367
+ it "should produce a message to a non-existing topic with key and partition key" do
368
+ new_topic = "it-#{SecureRandom.uuid}"
369
+
370
+ handle = producer.produce(
371
+ # Needs to be a new topic each time
372
+ topic: new_topic,
373
+ payload: "payload",
374
+ key: "key",
375
+ partition_key: "partition_key",
376
+ label: "label"
377
+ )
378
+
379
+ # Should be pending at first
380
+ expect(handle.pending?).to be true
381
+ expect(handle.label).to eq "label"
382
+
383
+ # Check delivery handle and report
384
+ report = handle.wait(max_wait_timeout: 5)
385
+ expect(handle.pending?).to be false
386
+ expect(report).not_to be_nil
387
+ expect(report.partition).to eq 0
388
+ expect(report.offset).to be >= 0
389
+ expect(report.label).to eq "label"
390
+
391
+ # Flush and close producer
392
+ producer.flush
393
+ producer.close
394
+
395
+ # Consume message and verify its content
396
+ message = wait_for_message(
397
+ topic: new_topic,
398
+ delivery_report: report,
399
+ consumer: consumer
400
+ )
401
+ expect(message.partition).to eq 0
402
+ expect(message.payload).to eq "payload"
403
+ expect(message.key).to eq "key"
404
+ # Since api.version.request is on by default we will get
405
+ # the message creation timestamp if it's not set.
406
+ expect(message.timestamp).to be_within(10).of(Time.now)
407
+ end
408
+
367
409
  context "timestamp" do
368
410
  it "should raise a type error if not nil, integer or time" do
369
411
  expect {
@@ -623,7 +665,7 @@ describe Rdkafka::Producer do
623
665
  context "when not being able to deliver the message" do
624
666
  let(:producer) do
625
667
  rdkafka_producer_config(
626
- "bootstrap.servers": "localhost:9093",
668
+ "bootstrap.servers": "127.0.0.1:9093",
627
669
  "message.timeout.ms": 100
628
670
  ).producer
629
671
  end
@@ -637,6 +679,25 @@ describe Rdkafka::Producer do
637
679
  end
638
680
  end
639
681
 
682
+ context "when topic does not exist and allow.auto.create.topics is false" do
683
+ let(:producer) do
684
+ rdkafka_producer_config(
685
+ "bootstrap.servers": "127.0.0.1:9092",
686
+ "message.timeout.ms": 100,
687
+ "allow.auto.create.topics": false
688
+ ).producer
689
+ end
690
+
691
+ it "should contain the error in the response when not deliverable" do
692
+ handler = producer.produce(topic: "it-#{SecureRandom.uuid}", payload: nil, label: 'na')
693
+ # Wait for the async callbacks and delivery registry to update
694
+ sleep(2)
695
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
696
+ expect(handler.create_result.error.code).to eq(:msg_timed_out)
697
+ expect(handler.create_result.label).to eq('na')
698
+ end
699
+ end
700
+
640
701
  describe '#partition_count' do
641
702
  it { expect(producer.partition_count('consume_test_topic')).to eq(3) }
642
703
 
@@ -693,7 +754,7 @@ describe Rdkafka::Producer do
693
754
  context 'when it cannot flush due to a timeout' do
694
755
  let(:producer) do
695
756
  rdkafka_producer_config(
696
- "bootstrap.servers": "localhost:9093",
757
+ "bootstrap.servers": "127.0.0.1:9093",
697
758
  "message.timeout.ms": 2_000
698
759
  ).producer
699
760
  end
@@ -740,7 +801,7 @@ describe Rdkafka::Producer do
740
801
  context 'when there are outgoing things in the queue' do
741
802
  let(:producer) do
742
803
  rdkafka_producer_config(
743
- "bootstrap.servers": "localhost:9093",
804
+ "bootstrap.servers": "127.0.0.1:9093",
744
805
  "message.timeout.ms": 2_000
745
806
  ).producer
746
807
  end
data/spec/spec_helper.rb CHANGED
@@ -17,7 +17,7 @@ def rdkafka_base_config
17
17
  {
18
18
  :"api.version.request" => false,
19
19
  :"broker.version.fallback" => "1.0",
20
- :"bootstrap.servers" => "localhost:9092",
20
+ :"bootstrap.servers" => "127.0.0.1:9092",
21
21
  # Display statistics and refresh often just to cover those in specs
22
22
  :'statistics.interval.ms' => 1_000,
23
23
  :'topic.metadata.refresh.interval.ms' => 1_000
@@ -78,18 +78,32 @@ end
78
78
 
79
79
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
80
80
  new_consumer = consumer.nil?
81
- consumer ||= rdkafka_consumer_config.consumer
81
+ consumer ||= rdkafka_consumer_config('allow.auto.create.topics': true).consumer
82
82
  consumer.subscribe(topic)
83
83
  timeout = Time.now.to_i + timeout_in_seconds
84
+ retry_count = 0
85
+ max_retries = 10
86
+
84
87
  loop do
85
88
  if timeout <= Time.now.to_i
86
89
  raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
87
90
  end
88
- message = consumer.poll(100)
89
- if message &&
90
- message.partition == delivery_report.partition &&
91
- message.offset == delivery_report.offset
92
- return message
91
+
92
+ begin
93
+ message = consumer.poll(100)
94
+ if message &&
95
+ message.partition == delivery_report.partition &&
96
+ message.offset == delivery_report.offset
97
+ return message
98
+ end
99
+ rescue Rdkafka::RdkafkaError => e
100
+ if e.code == :unknown_topic_or_part && retry_count < max_retries
101
+ retry_count += 1
102
+ sleep(0.1) # Small delay before retry
103
+ next
104
+ else
105
+ raise
106
+ end
93
107
  end
94
108
  end
95
109
  ensure
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.21.1.alpha2
4
+ version: 0.22.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -24,6 +24,20 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '1.15'
27
+ - !ruby/object:Gem::Dependency
28
+ name: logger
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
27
41
  - !ruby/object:Gem::Dependency
28
42
  name: mini_portile2
29
43
  requirement: !ruby/object:Gem::Requirement
@@ -53,7 +67,7 @@ dependencies:
53
67
  - !ruby/object:Gem::Version
54
68
  version: '12'
55
69
  - !ruby/object:Gem::Dependency
56
- name: pry
70
+ name: ostruct
57
71
  requirement: !ruby/object:Gem::Requirement
58
72
  requirements:
59
73
  - - ">="
@@ -67,21 +81,7 @@ dependencies:
67
81
  - !ruby/object:Gem::Version
68
82
  version: '0'
69
83
  - !ruby/object:Gem::Dependency
70
- name: rspec
71
- requirement: !ruby/object:Gem::Requirement
72
- requirements:
73
- - - "~>"
74
- - !ruby/object:Gem::Version
75
- version: '3.5'
76
- type: :development
77
- prerelease: false
78
- version_requirements: !ruby/object:Gem::Requirement
79
- requirements:
80
- - - "~>"
81
- - !ruby/object:Gem::Version
82
- version: '3.5'
83
- - !ruby/object:Gem::Dependency
84
- name: rake
84
+ name: pry
85
85
  requirement: !ruby/object:Gem::Requirement
86
86
  requirements:
87
87
  - - ">="
@@ -95,21 +95,21 @@ dependencies:
95
95
  - !ruby/object:Gem::Version
96
96
  version: '0'
97
97
  - !ruby/object:Gem::Dependency
98
- name: simplecov
98
+ name: rspec
99
99
  requirement: !ruby/object:Gem::Requirement
100
100
  requirements:
101
- - - ">="
101
+ - - "~>"
102
102
  - !ruby/object:Gem::Version
103
- version: '0'
103
+ version: '3.5'
104
104
  type: :development
105
105
  prerelease: false
106
106
  version_requirements: !ruby/object:Gem::Requirement
107
107
  requirements:
108
- - - ">="
108
+ - - "~>"
109
109
  - !ruby/object:Gem::Version
110
- version: '0'
110
+ version: '3.5'
111
111
  - !ruby/object:Gem::Dependency
112
- name: guard
112
+ name: rake
113
113
  requirement: !ruby/object:Gem::Requirement
114
114
  requirements:
115
115
  - - ">="
@@ -123,7 +123,7 @@ dependencies:
123
123
  - !ruby/object:Gem::Version
124
124
  version: '0'
125
125
  - !ruby/object:Gem::Dependency
126
- name: guard-rspec
126
+ name: simplecov
127
127
  requirement: !ruby/object:Gem::Requirement
128
128
  requirements:
129
129
  - - ">="
@@ -146,8 +146,13 @@ extra_rdoc_files: []
146
146
  files:
147
147
  - ".github/CODEOWNERS"
148
148
  - ".github/FUNDING.yml"
149
- - ".github/workflows/ci.yml"
150
- - ".github/workflows/push.yml"
149
+ - ".github/workflows/ci_linux_x86_64_gnu.yml"
150
+ - ".github/workflows/ci_linux_x86_64_musl.yml"
151
+ - ".github/workflows/ci_macos_arm64.yml"
152
+ - ".github/workflows/push_linux_x86_64_gnu.yml"
153
+ - ".github/workflows/push_linux_x86_64_musl.yml"
154
+ - ".github/workflows/push_macos_arm64.yml"
155
+ - ".github/workflows/push_ruby.yml"
151
156
  - ".github/workflows/verify-action-pins.yml"
152
157
  - ".gitignore"
153
158
  - ".rspec"
@@ -156,7 +161,6 @@ files:
156
161
  - ".yardopts"
157
162
  - CHANGELOG.md
158
163
  - Gemfile
159
- - Guardfile
160
164
  - MIT-LICENSE
161
165
  - README.md
162
166
  - Rakefile
@@ -165,6 +169,10 @@ files:
165
169
  - docker-compose.yml
166
170
  - ext/README.md
167
171
  - ext/Rakefile
172
+ - ext/build_common.sh
173
+ - ext/build_linux_x86_64_gnu.sh
174
+ - ext/build_linux_x86_64_musl.sh
175
+ - ext/build_macos_arm64.sh
168
176
  - lib/rdkafka.rb
169
177
  - lib/rdkafka/abstract_handle.rb
170
178
  - lib/rdkafka/admin.rb
@@ -234,7 +242,7 @@ files:
234
242
  - spec/rdkafka/native_kafka_spec.rb
235
243
  - spec/rdkafka/producer/delivery_handle_spec.rb
236
244
  - spec/rdkafka/producer/delivery_report_spec.rb
237
- - spec/rdkafka/producer/partitions_count_spec.rb
245
+ - spec/rdkafka/producer/partitions_count_cache_spec.rb
238
246
  - spec/rdkafka/producer_spec.rb
239
247
  - spec/spec_helper.rb
240
248
  licenses:
@@ -266,32 +274,4 @@ specification_version: 4
266
274
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
267
275
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
268
276
  and Ruby 2.7+.
269
- test_files:
270
- - spec/rdkafka/abstract_handle_spec.rb
271
- - spec/rdkafka/admin/create_acl_handle_spec.rb
272
- - spec/rdkafka/admin/create_acl_report_spec.rb
273
- - spec/rdkafka/admin/create_topic_handle_spec.rb
274
- - spec/rdkafka/admin/create_topic_report_spec.rb
275
- - spec/rdkafka/admin/delete_acl_handle_spec.rb
276
- - spec/rdkafka/admin/delete_acl_report_spec.rb
277
- - spec/rdkafka/admin/delete_topic_handle_spec.rb
278
- - spec/rdkafka/admin/delete_topic_report_spec.rb
279
- - spec/rdkafka/admin/describe_acl_handle_spec.rb
280
- - spec/rdkafka/admin/describe_acl_report_spec.rb
281
- - spec/rdkafka/admin_spec.rb
282
- - spec/rdkafka/bindings_spec.rb
283
- - spec/rdkafka/callbacks_spec.rb
284
- - spec/rdkafka/config_spec.rb
285
- - spec/rdkafka/consumer/headers_spec.rb
286
- - spec/rdkafka/consumer/message_spec.rb
287
- - spec/rdkafka/consumer/partition_spec.rb
288
- - spec/rdkafka/consumer/topic_partition_list_spec.rb
289
- - spec/rdkafka/consumer_spec.rb
290
- - spec/rdkafka/error_spec.rb
291
- - spec/rdkafka/metadata_spec.rb
292
- - spec/rdkafka/native_kafka_spec.rb
293
- - spec/rdkafka/producer/delivery_handle_spec.rb
294
- - spec/rdkafka/producer/delivery_report_spec.rb
295
- - spec/rdkafka/producer/partitions_count_spec.rb
296
- - spec/rdkafka/producer_spec.rb
297
- - spec/spec_helper.rb
277
+ test_files: []