rdkafka 0.12.0 → 0.13.0.beta.7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +7 -3
  3. data/CHANGELOG.md +18 -0
  4. data/Gemfile +2 -0
  5. data/README.md +26 -0
  6. data/Rakefile +2 -0
  7. data/ext/Rakefile +2 -0
  8. data/lib/rdkafka/abstract_handle.rb +2 -0
  9. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  10. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  11. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  12. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  13. data/lib/rdkafka/admin.rb +48 -31
  14. data/lib/rdkafka/bindings.rb +50 -37
  15. data/lib/rdkafka/callbacks.rb +7 -1
  16. data/lib/rdkafka/config.rb +13 -10
  17. data/lib/rdkafka/consumer/headers.rb +24 -7
  18. data/lib/rdkafka/consumer/message.rb +3 -1
  19. data/lib/rdkafka/consumer/partition.rb +2 -0
  20. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  21. data/lib/rdkafka/consumer.rb +84 -44
  22. data/lib/rdkafka/error.rb +9 -0
  23. data/lib/rdkafka/metadata.rb +4 -2
  24. data/lib/rdkafka/native_kafka.rb +83 -0
  25. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  26. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  27. data/lib/rdkafka/producer.rb +35 -13
  28. data/lib/rdkafka/version.rb +5 -3
  29. data/lib/rdkafka.rb +3 -1
  30. data/rdkafka.gemspec +2 -0
  31. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  32. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  33. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  34. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  35. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  36. data/spec/rdkafka/admin_spec.rb +4 -3
  37. data/spec/rdkafka/bindings_spec.rb +2 -0
  38. data/spec/rdkafka/callbacks_spec.rb +2 -0
  39. data/spec/rdkafka/config_spec.rb +17 -2
  40. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  41. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  42. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  43. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  44. data/spec/rdkafka/consumer_spec.rb +97 -22
  45. data/spec/rdkafka/error_spec.rb +2 -0
  46. data/spec/rdkafka/metadata_spec.rb +2 -0
  47. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  48. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  49. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  50. data/spec/rdkafka/producer_spec.rb +51 -19
  51. data/spec/spec_helper.rb +17 -1
  52. metadata +14 -14
  53. data/bin/console +0 -11
  54. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require 'zlib'
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Callbacks do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Config do
@@ -148,11 +150,24 @@ describe Rdkafka::Config do
148
150
  }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
149
151
  end
150
152
 
153
+ it "allows string partitioner key" do
154
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
155
+ config = Rdkafka::Config.new("partitioner" => "murmur2")
156
+ config.producer.close
157
+ end
158
+
159
+ it "allows symbol partitioner key" do
160
+ expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
161
+ config = Rdkafka::Config.new(:partitioner => "murmur2")
162
+ config.producer.close
163
+ end
164
+
151
165
  it "should allow configuring zstd compression" do
152
166
  config = Rdkafka::Config.new('compression.codec' => 'zstd')
153
167
  begin
154
- expect(config.producer).to be_a Rdkafka::Producer
155
- config.producer.close
168
+ producer = config.producer
169
+ expect(producer).to be_a Rdkafka::Producer
170
+ producer.close
156
171
  rescue Rdkafka::Config::ConfigError => ex
157
172
  pending "Zstd compression not supported on this machine"
158
173
  raise ex
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Consumer::Headers do
6
+ let(:headers) do
7
+ { # Note String keys!
8
+ "version" => "2.1.3",
9
+ "type" => "String"
10
+ }
11
+ end
12
+ let(:native_message) { double('native message') }
13
+ let(:headers_ptr) { double('headers pointer') }
14
+
15
+ describe '.from_native' do
16
+ before do
17
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
18
+ expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
19
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
20
+ end
21
+
22
+ expect(Rdkafka::Bindings).to \
23
+ receive(:rd_kafka_header_get_all)
24
+ .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
25
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
26
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
27
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
28
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
29
+ end
30
+
31
+ expect(Rdkafka::Bindings).to \
32
+ receive(:rd_kafka_header_get_all)
33
+ .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
37
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
38
+ end
39
+
40
+ expect(Rdkafka::Bindings).to \
41
+ receive(:rd_kafka_header_get_all)
42
+ .with(headers_ptr, 2, anything, anything, anything)
43
+ .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
44
+ end
45
+
46
+ subject { described_class.from_native(native_message) }
47
+
48
+ it { is_expected.to eq(headers) }
49
+ it { is_expected.to be_frozen }
50
+
51
+ it 'allows String key' do
52
+ expect(subject['version']).to eq("2.1.3")
53
+ end
54
+
55
+ it 'allows Symbol key, but warns' do
56
+ expect(Kernel).to \
57
+ receive(:warn).with("rdkafka deprecation warning: header access with Symbol key :version treated as a String. " \
58
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
59
+ expect(subject[:version]).to eq("2.1.3")
60
+ end
61
+ end
62
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Message do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::Partition do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Consumer::TopicPartitionList do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "ostruct"
3
5
  require 'securerandom'
@@ -53,7 +55,7 @@ describe Rdkafka::Consumer do
53
55
 
54
56
  describe "#pause and #resume" do
55
57
  context "subscription" do
56
- let(:timeout) { 1000 }
58
+ let(:timeout) { 2000 }
57
59
 
58
60
  before { consumer.subscribe("consume_test_topic") }
59
61
  after { consumer.unsubscribe }
@@ -593,7 +595,7 @@ describe Rdkafka::Consumer do
593
595
  end
594
596
 
595
597
  describe "#poll with headers" do
596
- it "should return message with headers" do
598
+ it "should return message with headers using string keys (when produced with symbol keys)" do
597
599
  report = producer.produce(
598
600
  topic: "consume_test_topic",
599
601
  key: "key headers",
@@ -603,7 +605,20 @@ describe Rdkafka::Consumer do
603
605
  message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
604
606
  expect(message).to be
605
607
  expect(message.key).to eq('key headers')
606
- expect(message.headers).to include(foo: 'bar')
608
+ expect(message.headers).to include('foo' => 'bar')
609
+ end
610
+
611
+ it "should return message with headers using string keys (when produced with string keys)" do
612
+ report = producer.produce(
613
+ topic: "consume_test_topic",
614
+ key: "key headers",
615
+ headers: { 'foo' => 'bar' }
616
+ ).wait
617
+
618
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
619
+ expect(message).to be
620
+ expect(message.key).to eq('key headers')
621
+ expect(message.headers).to include('foo' => 'bar')
607
622
  end
608
623
 
609
624
  it "should return message with no headers" do
@@ -698,7 +713,7 @@ describe Rdkafka::Consumer do
698
713
  n.times do |i|
699
714
  handles << producer.produce(
700
715
  topic: topic_name,
701
- payload: Time.new.to_f.to_s,
716
+ payload: i % 10 == 0 ? nil : Time.new.to_f.to_s,
702
717
  key: i.to_s,
703
718
  partition: 0
704
719
  )
@@ -723,7 +738,8 @@ describe Rdkafka::Consumer do
723
738
  #
724
739
  # This is, in effect, an integration test and the subsequent specs are
725
740
  # unit tests.
726
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
741
+ admin = rdkafka_config.admin
742
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
727
743
  create_topic_handle.wait(max_wait_timeout: 15.0)
728
744
  consumer.subscribe(topic_name)
729
745
  produce_n 42
@@ -736,6 +752,7 @@ describe Rdkafka::Consumer do
736
752
  expect(all_yields.flatten.size).to eq 42
737
753
  expect(all_yields.size).to be > 4
738
754
  expect(all_yields.flatten.map(&:key)).to eq (0..41).map { |x| x.to_s }
755
+ admin.close
739
756
  end
740
757
 
741
758
  it "should batch poll results and yield arrays of messages" do
@@ -778,13 +795,15 @@ describe Rdkafka::Consumer do
778
795
  end
779
796
 
780
797
  it "should yield [] if nothing is received before the timeout" do
781
- create_topic_handle = rdkafka_config.admin.create_topic(topic_name, 1, 1)
798
+ admin = rdkafka_config.admin
799
+ create_topic_handle = admin.create_topic(topic_name, 1, 1)
782
800
  create_topic_handle.wait(max_wait_timeout: 15.0)
783
801
  consumer.subscribe(topic_name)
784
802
  consumer.each_batch do |batch|
785
803
  expect(batch).to eq([])
786
804
  break
787
805
  end
806
+ admin.close
788
807
  end
789
808
 
790
809
  it "should yield batchs of max_items in size if messages are already fetched" do
@@ -861,6 +880,7 @@ describe Rdkafka::Consumer do
861
880
  expect(batches_yielded.first.size).to eq 2
862
881
  expect(exceptions_yielded.flatten.size).to eq 1
863
882
  expect(exceptions_yielded.flatten.first).to be_instance_of(Rdkafka::RdkafkaError)
883
+ consumer.close
864
884
  end
865
885
  end
866
886
 
@@ -902,6 +922,7 @@ describe Rdkafka::Consumer do
902
922
  expect(each_batch_iterations).to eq 0
903
923
  expect(batches_yielded.size).to eq 0
904
924
  expect(exceptions_yielded.size).to eq 0
925
+ consumer.close
905
926
  end
906
927
  end
907
928
  end
@@ -916,11 +937,11 @@ describe Rdkafka::Consumer do
916
937
  context "with a working listener" do
917
938
  let(:listener) do
918
939
  Struct.new(:queue) do
919
- def on_partitions_assigned(consumer, list)
940
+ def on_partitions_assigned(list)
920
941
  collect(:assign, list)
921
942
  end
922
943
 
923
- def on_partitions_revoked(consumer, list)
944
+ def on_partitions_revoked(list)
924
945
  collect(:revoke, list)
925
946
  end
926
947
 
@@ -944,12 +965,12 @@ describe Rdkafka::Consumer do
944
965
  context "with a broken listener" do
945
966
  let(:listener) do
946
967
  Struct.new(:queue) do
947
- def on_partitions_assigned(consumer, list)
968
+ def on_partitions_assigned(list)
948
969
  queue << :assigned
949
970
  raise 'boom'
950
971
  end
951
972
 
952
- def on_partitions_revoked(consumer, list)
973
+ def on_partitions_revoked(list)
953
974
  queue << :revoked
954
975
  raise 'boom'
955
976
  end
@@ -962,18 +983,6 @@ describe Rdkafka::Consumer do
962
983
  expect(listener.queue).to eq([:assigned, :revoked])
963
984
  end
964
985
  end
965
-
966
- def notify_listener(listener)
967
- # 1. subscribe and poll
968
- consumer.subscribe("consume_test_topic")
969
- wait_for_assignment(consumer)
970
- consumer.poll(100)
971
-
972
- # 2. unsubscribe
973
- consumer.unsubscribe
974
- wait_for_unassignment(consumer)
975
- consumer.close
976
- end
977
986
  end
978
987
 
979
988
  context "methods that should not be called after a consumer has been closed" do
@@ -1005,4 +1014,70 @@ describe Rdkafka::Consumer do
1005
1014
  end
1006
1015
  end
1007
1016
  end
1017
+
1018
+ it "provides a finalizer that closes the native kafka client" do
1019
+ expect(consumer.closed?).to eq(false)
1020
+
1021
+ consumer.finalizer.call("some-ignored-object-id")
1022
+
1023
+ expect(consumer.closed?).to eq(true)
1024
+ end
1025
+
1026
+ context "when the rebalance protocol is cooperative" do
1027
+ let(:consumer) do
1028
+ config = rdkafka_consumer_config(
1029
+ {
1030
+ :"partition.assignment.strategy" => "cooperative-sticky",
1031
+ :"debug" => "consumer",
1032
+ }
1033
+ )
1034
+ config.consumer_rebalance_listener = listener
1035
+ config.consumer
1036
+ end
1037
+
1038
+ let(:listener) do
1039
+ Struct.new(:queue) do
1040
+ def on_partitions_assigned(list)
1041
+ collect(:assign, list)
1042
+ end
1043
+
1044
+ def on_partitions_revoked(list)
1045
+ collect(:revoke, list)
1046
+ end
1047
+
1048
+ def collect(name, list)
1049
+ partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
1050
+ queue << ([name] + partitions)
1051
+ end
1052
+ end.new([])
1053
+ end
1054
+
1055
+ it "should be able to assign and unassign partitions using the cooperative partition assignment APIs" do
1056
+ notify_listener(listener) do
1057
+ handles = []
1058
+ 10.times do
1059
+ handles << producer.produce(
1060
+ topic: "consume_test_topic",
1061
+ payload: "payload 1",
1062
+ key: "key 1",
1063
+ partition: 0
1064
+ )
1065
+ end
1066
+ handles.each(&:wait)
1067
+
1068
+ consumer.subscribe("consume_test_topic")
1069
+ # Check the first 10 messages. Then close the consumer, which
1070
+ # should break the each loop.
1071
+ consumer.each_with_index do |message, i|
1072
+ expect(message).to be_a Rdkafka::Consumer::Message
1073
+ break if i == 10
1074
+ end
1075
+ end
1076
+
1077
+ expect(listener.queue).to eq([
1078
+ [:assign, "consume_test_topic", 0, 1, 2],
1079
+ [:revoke, "consume_test_topic", 0, 1, 2]
1080
+ ])
1081
+ end
1082
+ end
1008
1083
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::RdkafkaError do
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "securerandom"
3
5
 
@@ -1,17 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
7
9
  let(:thread) { double(Thread) }
8
10
 
9
- subject(:client) { described_class.new(native) }
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
10
12
 
11
13
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
14
  allow(Thread).to receive(:new).and_return(thread)
16
15
 
17
16
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +18,8 @@ describe Rdkafka::Producer::Client do
19
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
19
  end
21
20
 
21
+ after { client.close }
22
+
22
23
  context "defaults" do
23
24
  it "sets the thread to abort on exception" do
24
25
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +40,12 @@ describe Rdkafka::Producer::Client do
39
40
 
40
41
  client
41
42
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
43
  end
65
44
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
68
49
  end
69
50
 
70
51
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +55,7 @@ describe Rdkafka::Producer::Client do
74
55
 
75
56
  context "and attempt to close" do
76
57
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
59
 
79
60
  client.close
80
61
  end
@@ -94,7 +75,6 @@ describe Rdkafka::Producer::Client do
94
75
  it "closes and unassign the native client" do
95
76
  client.close
96
77
 
97
- expect(client.native).to eq(nil)
98
78
  expect(client.closed?).to eq(true)
99
79
  end
100
80
  end
@@ -109,7 +89,7 @@ describe Rdkafka::Producer::Client do
109
89
 
110
90
  context "and attempt to close again" do
111
91
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
93
 
114
94
  client.close
115
95
  end
@@ -129,13 +109,12 @@ describe Rdkafka::Producer::Client do
129
109
  it "does not close and unassign the native client again" do
130
110
  client.close
131
111
 
132
- expect(client.native).to eq(nil)
133
112
  expect(client.closed?).to eq(true)
134
113
  end
135
114
  end
136
115
  end
137
116
 
138
- it "provide a finalizer Proc that closes the `native` client" do
117
+ it "provides a finalizer that closes the native kafka client" do
139
118
  expect(client.closed?).to eq(false)
140
119
 
141
120
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
11
  handle[:response] = response
10
12
  handle[:partition] = 2
11
13
  handle[:offset] = 100
14
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
15
  end
13
16
  end
14
17
 
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
32
 
30
33
  expect(report.partition).to eq(2)
31
34
  expect(report.offset).to eq(100)
35
+ expect(report.topic_name).to eq("produce_test_topic")
32
36
  end
33
37
 
34
38
  it "should wait without a timeout" do
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
40
 
37
41
  expect(report.partition).to eq(2)
38
42
  expect(report.offset).to eq(100)
43
+ expect(report.topic_name).to eq("produce_test_topic")
39
44
  end
40
45
  end
41
46
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
11
13
  expect(subject.offset).to eq 100
12
14
  end
13
15
 
16
+ it "should get the topic_name" do
17
+ expect(subject.topic_name).to eq "topic"
18
+ end
19
+
14
20
  it "should get the error" do
15
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
16
22
  end
17
23
  end