rdkafka 0.12.0 → 0.13.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +7 -3
  3. data/CHANGELOG.md +18 -0
  4. data/Gemfile +2 -0
  5. data/README.md +26 -0
  6. data/Rakefile +2 -0
  7. data/ext/Rakefile +2 -0
  8. data/lib/rdkafka/abstract_handle.rb +2 -0
  9. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  10. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  11. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  12. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  13. data/lib/rdkafka/admin.rb +48 -31
  14. data/lib/rdkafka/bindings.rb +50 -37
  15. data/lib/rdkafka/callbacks.rb +7 -1
  16. data/lib/rdkafka/config.rb +13 -10
  17. data/lib/rdkafka/consumer/headers.rb +24 -7
  18. data/lib/rdkafka/consumer/message.rb +3 -1
  19. data/lib/rdkafka/consumer/partition.rb +2 -0
  20. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  21. data/lib/rdkafka/consumer.rb +86 -44
  22. data/lib/rdkafka/error.rb +15 -0
  23. data/lib/rdkafka/metadata.rb +4 -2
  24. data/lib/rdkafka/native_kafka.rb +115 -0
  25. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  26. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  27. data/lib/rdkafka/producer.rb +35 -13
  28. data/lib/rdkafka/version.rb +5 -3
  29. data/lib/rdkafka.rb +3 -1
  30. data/rdkafka.gemspec +2 -0
  31. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  32. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  33. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  34. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  35. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  36. data/spec/rdkafka/admin_spec.rb +4 -3
  37. data/spec/rdkafka/bindings_spec.rb +2 -0
  38. data/spec/rdkafka/callbacks_spec.rb +2 -0
  39. data/spec/rdkafka/config_spec.rb +17 -2
  40. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  41. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  42. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  43. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  44. data/spec/rdkafka/consumer_spec.rb +120 -22
  45. data/spec/rdkafka/error_spec.rb +2 -0
  46. data/spec/rdkafka/metadata_spec.rb +2 -0
  47. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  48. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  49. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  50. data/spec/rdkafka/producer_spec.rb +51 -19
  51. data/spec/spec_helper.rb +17 -1
  52. metadata +12 -12
  53. data/bin/console +0 -11
  54. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::DeleteTopicHandle do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Admin::DeleteTopicReport do
@@ -1,9 +1,11 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
 
4
6
  describe Rdkafka::Admin do
5
- let(:config) { rdkafka_config }
6
- let(:admin) { config.admin }
7
+ let(:config) { rdkafka_config }
8
+ let(:admin) { config.admin }
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
@@ -174,7 +176,6 @@ describe Rdkafka::Admin do
174
176
  end
175
177
  end
176
178
 
177
-
178
179
  it "deletes a topic that was newly created" do
179
180
  create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
180
181
  create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require 'zlib'
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Callbacks do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Config do
@@ -148,11 +150,24 @@ describe Rdkafka::Config do
148
150
  }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
149
151
  end
150
152
 
153
+ it "allows string partitioner key" do
154
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
155
+ config = Rdkafka::Config.new("partitioner" => "murmur2")
156
+ config.producer.close
157
+ end
158
+
159
+ it "allows symbol partitioner key" do
160
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
161
+ config = Rdkafka::Config.new(:partitioner => "murmur2")
162
+ config.producer.close
163
+ end
164
+
151
165
  it "should allow configuring zstd compression" do
152
166
  config = Rdkafka::Config.new('compression.codec' => 'zstd')
153
167
  begin
154
- expect(config.producer).to be_a Rdkafka::Producer
155
- config.producer.close
168
+ producer = config.producer
169
+ expect(producer).to be_a Rdkafka::Producer
170
+ producer.close
156
171
  rescue Rdkafka::Config::ConfigError => ex
157
172
  pending "Zstd compression not supported on this machine"
158
173
  raise ex
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Consumer::Headers do
6
+ let(:headers) do
7
+ { # Note String keys!
8
+ "version" => "2.1.3",
9
+ "type" => "String"
10
+ }
11
+ end
12
+ let(:native_message) { double('native message') }
13
+ let(:headers_ptr) { double('headers pointer') }
14
+
15
+ describe '.from_native' do
16
+ before do
17
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
18
+ expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
19
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
20
+ end
21
+
22
+ expect(Rdkafka::Bindings).to \
23
+ receive(:rd_kafka_header_get_all)
24
+ .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
25
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
26
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
27
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
28
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
29
+ end
30
+
31
+ expect(Rdkafka::Bindings).to \
32
+ receive(:rd_kafka_header_get_all)
33
+ .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
37
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
38
+ end
39
+
40
+ expect(Rdkafka::Bindings).to \
41
+ receive(:rd_kafka_header_get_all)
42
+ .with(headers_ptr, 2, anything, anything, anything)
43
+ .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
44
+ end
45
+
46
+ subject { described_class.from_native(native_message) }
47
+
48
+ it { is_expected.to eq(headers) }
49
+ it { is_expected.to be_frozen }
50
+
51
+ it 'allows String key' do
52
+ expect(subject['version']).to eq("2.1.3")
53
+ end
54
+
55
+ it 'allows Symbol key, but warns' do
56
+ expect(Kernel).to \
57
+ receive(:warn).with("rdkafka deprecation warning: header access with Symbol key :version treated as a String. " \
58
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
59
+ expect(subject[:version]).to eq("2.1.3")
60
+ end
61
+ end
62
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Message do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Partition do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::TopicPartitionList do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
  require 'securerandom'
@@ -53,7 +55,7 @@ describe Rdkafka::Consumer do
53
55
 
54
56
  describe "#pause and #resume" do
55
57
  context "subscription" do
56
- let(:timeout) { 1000 }
58
+ let(:timeout) { 2000 }
57
59
 
58
60
  before { consumer.subscribe("consume_test_topic") }
59
61
  after { consumer.unsubscribe }
@@ -284,6 +286,29 @@ describe Rdkafka::Consumer do
284
286
  consumer.poll(100)
285
287
  }.to raise_error(Rdkafka::ClosedConsumerError, /poll/)
286
288
  end
289
+
290
+ context 'when there are outgoing operations in other threads' do
291
+ it 'should wait and not crash' do
292
+ times = []
293
+
294
+ # Run a long running poll
295
+ thread = Thread.new do
296
+ times << Time.now
297
+ consumer.subscribe("empty_test_topic")
298
+ times << Time.now
299
+ consumer.poll(1_000)
300
+ times << Time.now
301
+ end
302
+
303
+ # Make sure it starts before we close
304
+ sleep(0.1)
305
+ consumer.close
306
+ close_time = Time.now
307
+ thread.join
308
+
309
+ times.each { |op_time| expect(op_time).to be < close_time }
310
+ end
311
+ end
287
312
  end
288
313
 
289
314
  describe "#commit, #committed and #store_offset" do
@@ -593,7 +618,7 @@ describe Rdkafka::Consumer do
593
618
  end
594
619
 
595
620
  describe "#poll with headers" do
596
- it "should return message with headers" do
621
+ it "should return message with headers using string keys (when produced with symbol keys)" do
597
622
  report = producer.produce(
598
623
  topic: "consume_test_topic",
599
624
  key: "key headers",
@@ -603,7 +628,20 @@ describe Rdkafka::Consumer do
603
628
  message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
604
629
  expect(message).to be
605
630
  expect(message.key).to eq('key headers')
606
- expect(message.headers).to include(foo: 'bar')
631
+ expect(message.headers).to include('foo' => 'bar')
632
+ end
633
+
634
+ it "should return message with headers using string keys (when produced with string keys)" do
635
+ report = producer.produce(
636
+ topic: "consume_test_topic",
637
+ key: "key headers",
638
+ headers: { 'foo' => 'bar' }
639
+ ).wait
640
+
641
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
642
+ expect(message).to be
643
+ expect(message.key).to eq('key headers')
644
+ expect(message.headers).to include('foo' => 'bar')
607
645
  end
608
646
 
609
647
  it "should return message with no headers" do
@@ -698,7 +736,7 @@ describe Rdkafka::Consumer do
698
736
  n.times do |i|
699
737
  handles << producer.produce(
700
738
  topic: topic_name,
701
- payload: Time.new.to_f.to_s,
739
+ payload: i % 10 == 0 ? nil : Time.new.to_f.to_s,
702
740
  key: i.to_s,
703
741
  partition: 0
704
742
  )
@@ -723,7 +761,8 @@ describe Rdkafka::Consumer do
723
761
  #
724
762
  # This is, in effect, an integration test and the subsequent specs are
725
763
  # unit tests.
726
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
764
+ admin = rdkafka_config.admin
765
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
727
766
  create_topic_handle.wait(max_wait_timeout: 15.0)
728
767
  consumer.subscribe(topic_name)
729
768
  produce_n 42
@@ -736,6 +775,7 @@ describe Rdkafka::Consumer do
736
775
  expect(all_yields.flatten.size).to eq 42
737
776
  expect(all_yields.size).to be > 4
738
777
  expect(all_yields.flatten.map(&:key)).to eq (0..41).map { |x| x.to_s }
778
+ admin.close
739
779
  end
740
780
 
741
781
  it "should batch poll results and yield arrays of messages" do
@@ -778,13 +818,15 @@ describe Rdkafka::Consumer do
778
818
  end
779
819
 
780
820
  it "should yield [] if nothing is received before the timeout" do
781
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
821
+ admin = rdkafka_config.admin
822
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
782
823
  create_topic_handle.wait(max_wait_timeout: 15.0)
783
824
  consumer.subscribe(topic_name)
784
825
  consumer.each_batch do |batch|
785
826
  expect(batch).to eq([])
786
827
  break
787
828
  end
829
+ admin.close
788
830
  end
789
831
 
790
832
  it "should yield batchs of max_items in size if messages are already fetched" do
@@ -861,6 +903,7 @@ describe Rdkafka::Consumer do
861
903
  expect(batches_yielded.first.size).to eq 2
862
904
  expect(exceptions_yielded.flatten.size).to eq 1
863
905
  expect(exceptions_yielded.flatten.first).to be_instance_of(Rdkafka::RdkafkaError)
906
+ consumer.close
864
907
  end
865
908
  end
866
909
 
@@ -902,6 +945,7 @@ describe Rdkafka::Consumer do
902
945
  expect(each_batch_iterations).to eq 0
903
946
  expect(batches_yielded.size).to eq 0
904
947
  expect(exceptions_yielded.size).to eq 0
948
+ consumer.close
905
949
  end
906
950
  end
907
951
  end
@@ -916,11 +960,11 @@ describe Rdkafka::Consumer do
916
960
  context "with a working listener" do
917
961
  let(:listener) do
918
962
  Struct.new(:queue) do
919
- def on_partitions_assigned(consumer, list)
963
+ def on_partitions_assigned(list)
920
964
  collect(:assign, list)
921
965
  end
922
966
 
923
- def on_partitions_revoked(consumer, list)
967
+ def on_partitions_revoked(list)
924
968
  collect(:revoke, list)
925
969
  end
926
970
 
@@ -944,12 +988,12 @@ describe Rdkafka::Consumer do
944
988
  context "with a broken listener" do
945
989
  let(:listener) do
946
990
  Struct.new(:queue) do
947
- def on_partitions_assigned(consumer, list)
991
+ def on_partitions_assigned(list)
948
992
  queue << :assigned
949
993
  raise 'boom'
950
994
  end
951
995
 
952
- def on_partitions_revoked(consumer, list)
996
+ def on_partitions_revoked(list)
953
997
  queue << :revoked
954
998
  raise 'boom'
955
999
  end
@@ -962,18 +1006,6 @@ describe Rdkafka::Consumer do
962
1006
  expect(listener.queue).to eq([:assigned, :revoked])
963
1007
  end
964
1008
  end
965
-
966
- def notify_listener(listener)
967
- # 1. subscribe and poll
968
- consumer.subscribe("consume_test_topic")
969
- wait_for_assignment(consumer)
970
- consumer.poll(100)
971
-
972
- # 2. unsubscribe
973
- consumer.unsubscribe
974
- wait_for_unassignment(consumer)
975
- consumer.close
976
- end
977
1009
  end
978
1010
 
979
1011
  context "methods that should not be called after a consumer has been closed" do
@@ -1005,4 +1037,70 @@ describe Rdkafka::Consumer do
1005
1037
  end
1006
1038
  end
1007
1039
  end
1040
+
1041
+ it "provides a finalizer that closes the native kafka client" do
1042
+ expect(consumer.closed?).to eq(false)
1043
+
1044
+ consumer.finalizer.call("some-ignored-object-id")
1045
+
1046
+ expect(consumer.closed?).to eq(true)
1047
+ end
1048
+
1049
+ context "when the rebalance protocol is cooperative" do
1050
+ let(:consumer) do
1051
+ config = rdkafka_consumer_config(
1052
+ {
1053
+ :"partition.assignment.strategy" => "cooperative-sticky",
1054
+ :"debug" => "consumer",
1055
+ }
1056
+ )
1057
+ config.consumer_rebalance_listener = listener
1058
+ config.consumer
1059
+ end
1060
+
1061
+ let(:listener) do
1062
+ Struct.new(:queue) do
1063
+ def on_partitions_assigned(list)
1064
+ collect(:assign, list)
1065
+ end
1066
+
1067
+ def on_partitions_revoked(list)
1068
+ collect(:revoke, list)
1069
+ end
1070
+
1071
+ def collect(name, list)
1072
+ partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
1073
+ queue << ([name] + partitions)
1074
+ end
1075
+ end.new([])
1076
+ end
1077
+
1078
+ it "should be able to assign and unassign partitions using the cooperative partition assignment APIs" do
1079
+ notify_listener(listener) do
1080
+ handles = []
1081
+ 10.times do
1082
+ handles << producer.produce(
1083
+ topic: "consume_test_topic",
1084
+ payload: "payload 1",
1085
+ key: "key 1",
1086
+ partition: 0
1087
+ )
1088
+ end
1089
+ handles.each(&:wait)
1090
+
1091
+ consumer.subscribe("consume_test_topic")
1092
+ # Check the first 10 messages. Then close the consumer, which
1093
+ # should break the each loop.
1094
+ consumer.each_with_index do |message, i|
1095
+ expect(message).to be_a Rdkafka::Consumer::Message
1096
+ break if i == 10
1097
+ end
1098
+ end
1099
+
1100
+ expect(listener.queue).to eq([
1101
+ [:assign, "consume_test_topic", 0, 1, 2],
1102
+ [:revoke, "consume_test_topic", 0, 1, 2]
1103
+ ])
1104
+ end
1105
+ end
1008
1106
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::RdkafkaError do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "securerandom"
3
5
 
@@ -1,17 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
7
9
  let(:thread) { double(Thread) }
8
10
 
9
- subject(:client) { described_class.new(native) }
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
10
12
 
11
13
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
14
  allow(Thread).to receive(:new).and_return(thread)
16
15
 
17
16
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +18,8 @@ describe Rdkafka::Producer::Client do
19
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
19
  end
21
20
 
21
+ after { client.close }
22
+
22
23
  context "defaults" do
23
24
  it "sets the thread to abort on exception" do
24
25
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +40,12 @@ describe Rdkafka::Producer::Client do
39
40
 
40
41
  client
41
42
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
43
  end
65
44
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
68
49
  end
69
50
 
70
51
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +55,7 @@ describe Rdkafka::Producer::Client do
74
55
 
75
56
  context "and attempt to close" do
76
57
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
59
 
79
60
  client.close
80
61
  end
@@ -94,7 +75,6 @@ describe Rdkafka::Producer::Client do
94
75
  it "closes and unassign the native client" do
95
76
  client.close
96
77
 
97
- expect(client.native).to eq(nil)
98
78
  expect(client.closed?).to eq(true)
99
79
  end
100
80
  end
@@ -109,7 +89,7 @@ describe Rdkafka::Producer::Client do
109
89
 
110
90
  context "and attempt to close again" do
111
91
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
93
 
114
94
  client.close
115
95
  end
@@ -129,13 +109,12 @@ describe Rdkafka::Producer::Client do
129
109
  it "does not close and unassign the native client again" do
130
110
  client.close
131
111
 
132
- expect(client.native).to eq(nil)
133
112
  expect(client.closed?).to eq(true)
134
113
  end
135
114
  end
136
115
  end
137
116
 
138
- it "provide a finalizer Proc that closes the `native` client" do
117
+ it "provides a finalizer that closes the native kafka client" do
139
118
  expect(client.closed?).to eq(false)
140
119
 
141
120
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
11
  handle[:response] = response
10
12
  handle[:partition] = 2
11
13
  handle[:offset] = 100
14
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
15
  end
13
16
  end
14
17
 
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
32
 
30
33
  expect(report.partition).to eq(2)
31
34
  expect(report.offset).to eq(100)
35
+ expect(report.topic_name).to eq("produce_test_topic")
32
36
  end
33
37
 
34
38
  it "should wait without a timeout" do
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
40
 
37
41
  expect(report.partition).to eq(2)
38
42
  expect(report.offset).to eq(100)
43
+ expect(report.topic_name).to eq("produce_test_topic")
39
44
  end
40
45
  end
41
46
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
11
13
  expect(subject.offset).to eq 100
12
14
  end
13
15
 
16
+ it "should get the topic_name" do
17
+ expect(subject.topic_name).to eq "topic"
18
+ end
19
+
14
20
  it "should get the error" do
15
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
16
22
  end
17
23
  end