karafka-rdkafka 0.12.4 → 0.13.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/CHANGELOG.md +21 -2
  5. data/Gemfile +2 -0
  6. data/README.md +26 -0
  7. data/Rakefile +2 -0
  8. data/certs/cert_chain.pem +21 -21
  9. data/certs/karafka-pro.pem +11 -0
  10. data/ext/Rakefile +26 -53
  11. data/karafka-rdkafka.gemspec +2 -0
  12. data/lib/rdkafka/abstract_handle.rb +2 -0
  13. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  14. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  15. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  16. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  17. data/lib/rdkafka/admin.rb +95 -73
  18. data/lib/rdkafka/bindings.rb +52 -37
  19. data/lib/rdkafka/callbacks.rb +2 -0
  20. data/lib/rdkafka/config.rb +13 -10
  21. data/lib/rdkafka/consumer/headers.rb +24 -7
  22. data/lib/rdkafka/consumer/message.rb +3 -1
  23. data/lib/rdkafka/consumer/partition.rb +2 -0
  24. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  25. data/lib/rdkafka/consumer.rb +100 -44
  26. data/lib/rdkafka/error.rb +9 -0
  27. data/lib/rdkafka/metadata.rb +25 -2
  28. data/lib/rdkafka/native_kafka.rb +83 -0
  29. data/lib/rdkafka/producer/delivery_handle.rb +2 -0
  30. data/lib/rdkafka/producer/delivery_report.rb +3 -1
  31. data/lib/rdkafka/producer.rb +75 -12
  32. data/lib/rdkafka/version.rb +3 -1
  33. data/lib/rdkafka.rb +3 -1
  34. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  35. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  38. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  39. data/spec/rdkafka/admin_spec.rb +4 -3
  40. data/spec/rdkafka/bindings_spec.rb +2 -0
  41. data/spec/rdkafka/callbacks_spec.rb +2 -0
  42. data/spec/rdkafka/config_spec.rb +17 -2
  43. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  44. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  45. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  46. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  47. data/spec/rdkafka/consumer_spec.rb +124 -22
  48. data/spec/rdkafka/error_spec.rb +2 -0
  49. data/spec/rdkafka/metadata_spec.rb +2 -0
  50. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  51. data/spec/rdkafka/producer/delivery_handle_spec.rb +2 -0
  52. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -2
  53. data/spec/rdkafka/producer_spec.rb +118 -17
  54. data/spec/spec_helper.rb +17 -1
  55. data.tar.gz.sig +0 -0
  56. metadata +33 -33
  57. metadata.gz.sig +0 -0
  58. data/bin/console +0 -11
  59. data/dist/librdkafka_2.0.2.tar.gz +0 -0
  60. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
  require 'securerandom'
@@ -9,6 +11,10 @@ describe Rdkafka::Consumer do
9
11
  after { consumer.close }
10
12
  after { producer.close }
11
13
 
14
+ describe '#name' do
15
+ it { expect(consumer.name).to include('rdkafka#consumer-') }
16
+ end
17
+
12
18
  describe "#subscribe, #unsubscribe and #subscription" do
13
19
  it "should subscribe, unsubscribe and return the subscription" do
14
20
  expect(consumer.subscription).to be_empty
@@ -53,7 +59,7 @@ describe Rdkafka::Consumer do
53
59
 
54
60
  describe "#pause and #resume" do
55
61
  context "subscription" do
56
- let(:timeout) { 1000 }
62
+ let(:timeout) { 2000 }
57
63
 
58
64
  before { consumer.subscribe("consume_test_topic") }
59
65
  after { consumer.unsubscribe }
@@ -268,6 +274,28 @@ describe Rdkafka::Consumer do
268
274
  end
269
275
  end
270
276
 
277
+ describe '#assignment_lost?' do
278
+ it "should not return true as we do have an assignment" do
279
+ consumer.subscribe("consume_test_topic")
280
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
281
+ list.add_topic("consume_test_topic")
282
+ end
283
+
284
+ expect(consumer.assignment_lost?).to eq false
285
+ consumer.unsubscribe
286
+ end
287
+
288
+ it "should not return true after voluntary unsubscribing" do
289
+ consumer.subscribe("consume_test_topic")
290
+ expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
291
+ list.add_topic("consume_test_topic")
292
+ end
293
+
294
+ consumer.unsubscribe
295
+ expect(consumer.assignment_lost?).to eq false
296
+ end
297
+ end
298
+
271
299
  describe "#close" do
272
300
  it "should close a consumer" do
273
301
  consumer.subscribe("consume_test_topic")
@@ -593,7 +621,7 @@ describe Rdkafka::Consumer do
593
621
  end
594
622
 
595
623
  describe "#poll with headers" do
596
- it "should return message with headers" do
624
+ it "should return message with headers using string keys (when produced with symbol keys)" do
597
625
  report = producer.produce(
598
626
  topic: "consume_test_topic",
599
627
  key: "key headers",
@@ -603,7 +631,20 @@ describe Rdkafka::Consumer do
603
631
  message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
604
632
  expect(message).to be
605
633
  expect(message.key).to eq('key headers')
606
- expect(message.headers).to include(foo: 'bar')
634
+ expect(message.headers).to include('foo' => 'bar')
635
+ end
636
+
637
+ it "should return message with headers using string keys (when produced with string keys)" do
638
+ report = producer.produce(
639
+ topic: "consume_test_topic",
640
+ key: "key headers",
641
+ headers: { 'foo' => 'bar' }
642
+ ).wait
643
+
644
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
645
+ expect(message).to be
646
+ expect(message.key).to eq('key headers')
647
+ expect(message.headers).to include('foo' => 'bar')
607
648
  end
608
649
 
609
650
  it "should return message with no headers" do
@@ -698,7 +739,7 @@ describe Rdkafka::Consumer do
698
739
  n.times do |i|
699
740
  handles << producer.produce(
700
741
  topic: topic_name,
701
- payload: Time.new.to_f.to_s,
742
+ payload: i % 10 == 0 ? nil : Time.new.to_f.to_s,
702
743
  key: i.to_s,
703
744
  partition: 0
704
745
  )
@@ -723,7 +764,8 @@ describe Rdkafka::Consumer do
723
764
  #
724
765
  # This is, in effect, an integration test and the subsequent specs are
725
766
  # unit tests.
726
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
767
+ admin = rdkafka_config.admin
768
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
727
769
  create_topic_handle.wait(max_wait_timeout: 15.0)
728
770
  consumer.subscribe(topic_name)
729
771
  produce_n 42
@@ -736,6 +778,7 @@ describe Rdkafka::Consumer do
736
778
  expect(all_yields.flatten.size).to eq 42
737
779
  expect(all_yields.size).to be > 4
738
780
  expect(all_yields.flatten.map(&:key)).to eq (0..41).map { |x| x.to_s }
781
+ admin.close
739
782
  end
740
783
 
741
784
  it "should batch poll results and yield arrays of messages" do
@@ -778,13 +821,15 @@ describe Rdkafka::Consumer do
778
821
  end
779
822
 
780
823
  it "should yield [] if nothing is received before the timeout" do
781
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
824
+ admin = rdkafka_config.admin
825
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
782
826
  create_topic_handle.wait(max_wait_timeout: 15.0)
783
827
  consumer.subscribe(topic_name)
784
828
  consumer.each_batch do |batch|
785
829
  expect(batch).to eq([])
786
830
  break
787
831
  end
832
+ admin.close
788
833
  end
789
834
 
790
835
  it "should yield batchs of max_items in size if messages are already fetched" do
@@ -861,6 +906,7 @@ describe Rdkafka::Consumer do
861
906
  expect(batches_yielded.first.size).to eq 2
862
907
  expect(exceptions_yielded.flatten.size).to eq 1
863
908
  expect(exceptions_yielded.flatten.first).to be_instance_of(Rdkafka::RdkafkaError)
909
+ consumer.close
864
910
  end
865
911
  end
866
912
 
@@ -902,6 +948,7 @@ describe Rdkafka::Consumer do
902
948
  expect(each_batch_iterations).to eq 0
903
949
  expect(batches_yielded.size).to eq 0
904
950
  expect(exceptions_yielded.size).to eq 0
951
+ consumer.close
905
952
  end
906
953
  end
907
954
  end
@@ -916,11 +963,11 @@ describe Rdkafka::Consumer do
916
963
  context "with a working listener" do
917
964
  let(:listener) do
918
965
  Struct.new(:queue) do
919
- def on_partitions_assigned(consumer, list)
966
+ def on_partitions_assigned(list)
920
967
  collect(:assign, list)
921
968
  end
922
969
 
923
- def on_partitions_revoked(consumer, list)
970
+ def on_partitions_revoked(list)
924
971
  collect(:revoke, list)
925
972
  end
926
973
 
@@ -944,12 +991,12 @@ describe Rdkafka::Consumer do
944
991
  context "with a broken listener" do
945
992
  let(:listener) do
946
993
  Struct.new(:queue) do
947
- def on_partitions_assigned(consumer, list)
994
+ def on_partitions_assigned(list)
948
995
  queue << :assigned
949
996
  raise 'boom'
950
997
  end
951
998
 
952
- def on_partitions_revoked(consumer, list)
999
+ def on_partitions_revoked(list)
953
1000
  queue << :revoked
954
1001
  raise 'boom'
955
1002
  end
@@ -962,18 +1009,6 @@ describe Rdkafka::Consumer do
962
1009
  expect(listener.queue).to eq([:assigned, :revoked])
963
1010
  end
964
1011
  end
965
-
966
- def notify_listener(listener)
967
- # 1. subscribe and poll
968
- consumer.subscribe("consume_test_topic")
969
- wait_for_assignment(consumer)
970
- consumer.poll(100)
971
-
972
- # 2. unsubscribe
973
- consumer.unsubscribe
974
- wait_for_unassignment(consumer)
975
- consumer.close
976
- end
977
1012
  end
978
1013
 
979
1014
  context "methods that should not be called after a consumer has been closed" do
@@ -993,6 +1028,7 @@ describe Rdkafka::Consumer do
993
1028
  :assignment => nil,
994
1029
  :committed => [],
995
1030
  :query_watermark_offsets => [ nil, nil ],
1031
+ :assignment_lost? => []
996
1032
  }.each do |method, args|
997
1033
  it "raises an exception if #{method} is called" do
998
1034
  expect {
@@ -1005,4 +1041,70 @@ describe Rdkafka::Consumer do
1005
1041
  end
1006
1042
  end
1007
1043
  end
1044
+
1045
+ it "provides a finalizer that closes the native kafka client" do
1046
+ expect(consumer.closed?).to eq(false)
1047
+
1048
+ consumer.finalizer.call("some-ignored-object-id")
1049
+
1050
+ expect(consumer.closed?).to eq(true)
1051
+ end
1052
+
1053
+ context "when the rebalance protocol is cooperative" do
1054
+ let(:consumer) do
1055
+ config = rdkafka_consumer_config(
1056
+ {
1057
+ :"partition.assignment.strategy" => "cooperative-sticky",
1058
+ :"debug" => "consumer",
1059
+ }
1060
+ )
1061
+ config.consumer_rebalance_listener = listener
1062
+ config.consumer
1063
+ end
1064
+
1065
+ let(:listener) do
1066
+ Struct.new(:queue) do
1067
+ def on_partitions_assigned(list)
1068
+ collect(:assign, list)
1069
+ end
1070
+
1071
+ def on_partitions_revoked(list)
1072
+ collect(:revoke, list)
1073
+ end
1074
+
1075
+ def collect(name, list)
1076
+ partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
1077
+ queue << ([name] + partitions)
1078
+ end
1079
+ end.new([])
1080
+ end
1081
+
1082
+ it "should be able to assign and unassign partitions using the cooperative partition assignment APIs" do
1083
+ notify_listener(listener) do
1084
+ handles = []
1085
+ 10.times do
1086
+ handles << producer.produce(
1087
+ topic: "consume_test_topic",
1088
+ payload: "payload 1",
1089
+ key: "key 1",
1090
+ partition: 0
1091
+ )
1092
+ end
1093
+ handles.each(&:wait)
1094
+
1095
+ consumer.subscribe("consume_test_topic")
1096
+ # Check the first 10 messages. Then close the consumer, which
1097
+ # should break the each loop.
1098
+ consumer.each_with_index do |message, i|
1099
+ expect(message).to be_a Rdkafka::Consumer::Message
1100
+ break if i == 10
1101
+ end
1102
+ end
1103
+
1104
+ expect(listener.queue).to eq([
1105
+ [:assign, "consume_test_topic", 0, 1, 2],
1106
+ [:revoke, "consume_test_topic", 0, 1, 2]
1107
+ ])
1108
+ end
1109
+ end
1008
1110
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::RdkafkaError do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "securerandom"
3
5
 
@@ -1,17 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
7
9
  let(:thread) { double(Thread) }
8
10
 
9
- subject(:client) { described_class.new(native) }
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
10
12
 
11
13
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
14
  allow(Thread).to receive(:new).and_return(thread)
16
15
 
17
16
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +18,8 @@ describe Rdkafka::Producer::Client do
19
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
19
  end
21
20
 
21
+ after { client.close }
22
+
22
23
  context "defaults" do
23
24
  it "sets the thread to abort on exception" do
24
25
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +40,12 @@ describe Rdkafka::Producer::Client do
39
40
 
40
41
  client
41
42
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
43
  end
65
44
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
68
49
  end
69
50
 
70
51
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +55,7 @@ describe Rdkafka::Producer::Client do
74
55
 
75
56
  context "and attempt to close" do
76
57
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
59
 
79
60
  client.close
80
61
  end
@@ -94,7 +75,6 @@ describe Rdkafka::Producer::Client do
94
75
  it "closes and unassign the native client" do
95
76
  client.close
96
77
 
97
- expect(client.native).to eq(nil)
98
78
  expect(client.closed?).to eq(true)
99
79
  end
100
80
  end
@@ -109,7 +89,7 @@ describe Rdkafka::Producer::Client do
109
89
 
110
90
  context "and attempt to close again" do
111
91
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
93
 
114
94
  client.close
115
95
  end
@@ -129,13 +109,12 @@ describe Rdkafka::Producer::Client do
129
109
  it "does not close and unassign the native client again" do
130
110
  client.close
131
111
 
132
- expect(client.native).to eq(nil)
133
112
  expect(client.closed?).to eq(true)
134
113
  end
135
114
  end
136
115
  end
137
116
 
138
- it "provide a finalizer Proc that closes the `native` client" do
117
+ it "provides a finalizer that closes the native kafka client" do
139
118
  expect(client.closed?).to eq(false)
140
119
 
141
120
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -16,6 +18,6 @@ describe Rdkafka::Producer::DeliveryReport do
16
18
  end
17
19
 
18
20
  it "should get the error" do
19
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
20
22
  end
21
23
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,11 +9,16 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
11
14
  producer.close
12
15
  consumer.close
13
16
  end
14
17
 
18
+ describe '#name' do
19
+ it { expect(producer.name).to include('rdkafka#producer-') }
20
+ end
21
+
15
22
  context "delivery callback" do
16
23
  context "with a proc/lambda" do
17
24
  it "should set the callback" do
@@ -182,10 +189,11 @@ describe Rdkafka::Producer do
182
189
  expect(report.partition).to eq 1
183
190
  expect(report.offset).to be >= 0
184
191
 
185
- # Close producer
192
+ # Flush and close producer
193
+ producer.flush
186
194
  producer.close
187
195
 
188
- # Consume message and verify it's content
196
+ # Consume message and verify its content
189
197
  message = wait_for_message(
190
198
  topic: "produce_test_topic",
191
199
  delivery_report: report,
@@ -209,7 +217,7 @@ describe Rdkafka::Producer do
209
217
  )
210
218
  report = handle.wait(max_wait_timeout: 5)
211
219
 
212
- # Consume message and verify it's content
220
+ # Consume message and verify its content
213
221
  message = wait_for_message(
214
222
  topic: "produce_test_topic",
215
223
  delivery_report: report,
@@ -253,6 +261,28 @@ describe Rdkafka::Producer do
253
261
  expect(messages[2].key).to eq key
254
262
  end
255
263
 
264
+ it "should produce a message with empty string without crashing" do
265
+ messages = [{key: 'a', partition_key: ''}]
266
+
267
+ messages = messages.map do |m|
268
+ handle = producer.produce(
269
+ topic: "partitioner_test_topic",
270
+ payload: "payload partition",
271
+ key: m[:key],
272
+ partition_key: m[:partition_key]
273
+ )
274
+ report = handle.wait(max_wait_timeout: 5)
275
+
276
+ wait_for_message(
277
+ topic: "partitioner_test_topic",
278
+ delivery_report: report,
279
+ )
280
+ end
281
+
282
+ expect(messages[0].partition).to eq 0
283
+ expect(messages[0].key).to eq 'a'
284
+ end
285
+
256
286
  it "should produce a message with utf-8 encoding" do
257
287
  handle = producer.produce(
258
288
  topic: "produce_test_topic",
@@ -261,7 +291,7 @@ describe Rdkafka::Producer do
261
291
  )
262
292
  report = handle.wait(max_wait_timeout: 5)
263
293
 
264
- # Consume message and verify it's content
294
+ # Consume message and verify its content
265
295
  message = wait_for_message(
266
296
  topic: "produce_test_topic",
267
297
  delivery_report: report,
@@ -294,7 +324,7 @@ describe Rdkafka::Producer do
294
324
  )
295
325
  report = handle.wait(max_wait_timeout: 5)
296
326
 
297
- # Consume message and verify it's content
327
+ # Consume message and verify its content
298
328
  message = wait_for_message(
299
329
  topic: "produce_test_topic",
300
330
  delivery_report: report,
@@ -315,7 +345,7 @@ describe Rdkafka::Producer do
315
345
  )
316
346
  report = handle.wait(max_wait_timeout: 5)
317
347
 
318
- # Consume message and verify it's content
348
+ # Consume message and verify its content
319
349
  message = wait_for_message(
320
350
  topic: "produce_test_topic",
321
351
  delivery_report: report,
@@ -335,7 +365,7 @@ describe Rdkafka::Producer do
335
365
  )
336
366
  report = handle.wait(max_wait_timeout: 5)
337
367
 
338
- # Consume message and verify it's content
368
+ # Consume message and verify its content
339
369
  message = wait_for_message(
340
370
  topic: "produce_test_topic",
341
371
  delivery_report: report,
@@ -353,7 +383,7 @@ describe Rdkafka::Producer do
353
383
  )
354
384
  report = handle.wait(max_wait_timeout: 5)
355
385
 
356
- # Consume message and verify it's content
386
+ # Consume message and verify its content
357
387
  message = wait_for_message(
358
388
  topic: "produce_test_topic",
359
389
  delivery_report: report,
@@ -373,7 +403,7 @@ describe Rdkafka::Producer do
373
403
  )
374
404
  report = handle.wait(max_wait_timeout: 5)
375
405
 
376
- # Consume message and verify it's content
406
+ # Consume message and verify its content
377
407
  message = wait_for_message(
378
408
  topic: "produce_test_topic",
379
409
  delivery_report: report,
@@ -382,9 +412,9 @@ describe Rdkafka::Producer do
382
412
 
383
413
  expect(message.payload).to eq "payload headers"
384
414
  expect(message.key).to eq "key headers"
385
- expect(message.headers[:foo]).to eq "bar"
386
- expect(message.headers[:baz]).to eq "foobar"
387
- expect(message.headers[:foobar]).to be_nil
415
+ expect(message.headers["foo"]).to eq "bar"
416
+ expect(message.headers["baz"]).to eq "foobar"
417
+ expect(message.headers["foobar"]).to be_nil
388
418
  end
389
419
 
390
420
  it "should produce a message with empty headers" do
@@ -396,7 +426,7 @@ describe Rdkafka::Producer do
396
426
  )
397
427
  report = handle.wait(max_wait_timeout: 5)
398
428
 
399
- # Consume message and verify it's content
429
+ # Consume message and verify its content
400
430
  message = wait_for_message(
401
431
  topic: "produce_test_topic",
402
432
  delivery_report: report,
@@ -434,10 +464,10 @@ describe Rdkafka::Producer do
434
464
  # wait for and check the message in the main process.
435
465
  reader, writer = IO.pipe
436
466
 
437
- fork do
467
+ pid = fork do
438
468
  reader.close
439
469
 
440
- # Avoids sharing the socket between processes.
470
+ # Avoid sharing the client between processes.
441
471
  producer = rdkafka_producer_config.producer
442
472
 
443
473
  handle = producer.produce(
@@ -456,8 +486,10 @@ describe Rdkafka::Producer do
456
486
 
457
487
  writer.write(report_json)
458
488
  writer.close
489
+ producer.flush
459
490
  producer.close
460
491
  end
492
+ Process.wait(pid)
461
493
 
462
494
  writer.close
463
495
  report_hash = JSON.parse(reader.read)
@@ -469,7 +501,7 @@ describe Rdkafka::Producer do
469
501
 
470
502
  reader.close
471
503
 
472
- # Consume message and verify it's content
504
+ # Consume message and verify its content
473
505
  message = wait_for_message(
474
506
  topic: "produce_test_topic",
475
507
  delivery_report: report,
@@ -526,4 +558,73 @@ describe Rdkafka::Producer do
526
558
  end
527
559
  end
528
560
  end
561
+
562
+ describe '#partition_count' do
563
+ it { expect(producer.partition_count('example_topic')).to eq(1) }
564
+
565
+ context 'when the partition count value is already cached' do
566
+ before do
567
+ producer.partition_count('example_topic')
568
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
569
+ end
570
+
571
+ it 'expect not to query it again' do
572
+ producer.partition_count('example_topic')
573
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
574
+ end
575
+ end
576
+
577
+ context 'when the partition count value was cached but time expired' do
578
+ before do
579
+ allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
580
+ producer.partition_count('example_topic')
581
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
582
+ end
583
+
584
+ it 'expect not to query it again' do
585
+ producer.partition_count('example_topic')
586
+ expect(::Rdkafka::Metadata).to have_received(:new)
587
+ end
588
+ end
589
+
590
+ context 'when the partition count value was cached and time did not expire' do
591
+ before do
592
+ allow(::Process).to receive(:clock_gettime).and_return(0, 29.001)
593
+ producer.partition_count('example_topic')
594
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
595
+ end
596
+
597
+ it 'expect not to query it again' do
598
+ producer.partition_count('example_topic')
599
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
600
+ end
601
+ end
602
+ end
603
+
604
+ describe 'metadata fetch request recovery' do
605
+ subject(:partition_count) { producer.partition_count('example_topic') }
606
+
607
+ describe 'metadata initialization recovery' do
608
+ context 'when all good' do
609
+ it { expect(partition_count).to eq(1) }
610
+ end
611
+
612
+ context 'when we fail for the first time with handled error' do
613
+ before do
614
+ raised = false
615
+
616
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_wrap_original do |m, *args|
617
+ if raised
618
+ m.call(*args)
619
+ else
620
+ raised = true
621
+ -185
622
+ end
623
+ end
624
+ end
625
+
626
+ it { expect(partition_count).to eq(1) }
627
+ end
628
+ end
629
+ end
529
630
  end
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -71,7 +73,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
71
73
  end
72
74
 
73
75
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
- new_consumer = !!consumer
76
+ new_consumer = consumer.nil?
75
77
  consumer ||= rdkafka_consumer_config.consumer
76
78
  consumer.subscribe(topic)
77
79
  timeout = Time.now.to_i + timeout_in_seconds
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
104
106
  end
105
107
  end
106
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
107
123
  RSpec.configure do |config|
108
124
  config.filter_run focus: true
109
125
  config.run_all_when_everything_filtered = true