rdkafka 0.12.0 → 0.13.0.beta.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.semaphore/semaphore.yml +6 -2
- data/CHANGELOG.md +12 -0
- data/Gemfile +2 -0
- data/Rakefile +2 -0
- data/ext/Rakefile +2 -0
- data/lib/rdkafka/abstract_handle.rb +2 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/create_topic_report.rb +2 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
- data/lib/rdkafka/admin.rb +35 -35
- data/lib/rdkafka/bindings.rb +19 -5
- data/lib/rdkafka/callbacks.rb +7 -1
- data/lib/rdkafka/config.rb +7 -5
- data/lib/rdkafka/consumer/headers.rb +24 -7
- data/lib/rdkafka/consumer/message.rb +3 -1
- data/lib/rdkafka/consumer/partition.rb +2 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
- data/lib/rdkafka/consumer.rb +19 -10
- data/lib/rdkafka/error.rb +9 -0
- data/lib/rdkafka/metadata.rb +2 -0
- data/lib/rdkafka/native_kafka.rb +52 -0
- data/lib/rdkafka/producer/delivery_handle.rb +5 -2
- data/lib/rdkafka/producer/delivery_report.rb +9 -2
- data/lib/rdkafka/producer.rb +11 -10
- data/lib/rdkafka/version.rb +5 -3
- data/lib/rdkafka.rb +3 -1
- data/rdkafka.gemspec +2 -0
- data/spec/rdkafka/abstract_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
- data/spec/rdkafka/admin_spec.rb +4 -3
- data/spec/rdkafka/bindings_spec.rb +2 -0
- data/spec/rdkafka/callbacks_spec.rb +2 -0
- data/spec/rdkafka/config_spec.rb +14 -0
- data/spec/rdkafka/consumer/headers_spec.rb +62 -0
- data/spec/rdkafka/consumer/message_spec.rb +2 -0
- data/spec/rdkafka/consumer/partition_spec.rb +2 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
- data/spec/rdkafka/consumer_spec.rb +84 -15
- data/spec/rdkafka/error_spec.rb +2 -0
- data/spec/rdkafka/metadata_spec.rb +2 -0
- data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +8 -6
- data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
- data/spec/rdkafka/producer_spec.rb +45 -16
- data/spec/spec_helper.rb +16 -0
- metadata +13 -11
- data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Rdkafka
|
2
4
|
class Producer
|
3
5
|
# Delivery report for a successfully produced message.
|
@@ -10,15 +12,20 @@ module Rdkafka
|
|
10
12
|
# @return [Integer]
|
11
13
|
attr_reader :offset
|
12
14
|
|
13
|
-
#
|
15
|
+
# The name of the topic this message was produced to.
|
14
16
|
# @return [String]
|
17
|
+
attr_reader :topic_name
|
18
|
+
|
19
|
+
# Error in case happen during produce.
|
20
|
+
# @return [Integer]
|
15
21
|
attr_reader :error
|
16
22
|
|
17
23
|
private
|
18
24
|
|
19
|
-
def initialize(partition, offset, error = nil)
|
25
|
+
def initialize(partition, offset, topic_name = nil, error = nil)
|
20
26
|
@partition = partition
|
21
27
|
@offset = offset
|
28
|
+
@topic_name = topic_name
|
22
29
|
@error = error
|
23
30
|
end
|
24
31
|
end
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "objspace"
|
2
4
|
|
3
5
|
module Rdkafka
|
@@ -16,12 +18,12 @@ module Rdkafka
|
|
16
18
|
attr_reader :delivery_callback_arity
|
17
19
|
|
18
20
|
# @private
|
19
|
-
def initialize(
|
20
|
-
@
|
21
|
+
def initialize(native_kafka, partitioner_name)
|
22
|
+
@native_kafka = native_kafka
|
21
23
|
@partitioner_name = partitioner_name || "consistent_random"
|
22
24
|
|
23
|
-
# Makes sure, that
|
24
|
-
ObjectSpace.define_finalizer(self,
|
25
|
+
# Makes sure, that native kafka gets closed before it gets GCed by Ruby
|
26
|
+
ObjectSpace.define_finalizer(self, native_kafka.finalizer)
|
25
27
|
end
|
26
28
|
|
27
29
|
# Set a callback that will be called every time a message is successfully produced.
|
@@ -40,7 +42,7 @@ module Rdkafka
|
|
40
42
|
def close
|
41
43
|
ObjectSpace.undefine_finalizer(self)
|
42
44
|
|
43
|
-
@
|
45
|
+
@native_kafka.close
|
44
46
|
end
|
45
47
|
|
46
48
|
# Partition count for a given topic.
|
@@ -49,10 +51,9 @@ module Rdkafka
|
|
49
51
|
# @param topic [String] The topic name.
|
50
52
|
#
|
51
53
|
# @return partition count [Integer,nil]
|
52
|
-
#
|
53
54
|
def partition_count(topic)
|
54
55
|
closed_producer_check(__method__)
|
55
|
-
Rdkafka::Metadata.new(@
|
56
|
+
Rdkafka::Metadata.new(@native_kafka.inner, topic).topics&.first[:partition_count]
|
56
57
|
end
|
57
58
|
|
58
59
|
# Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
|
@@ -144,7 +145,7 @@ module Rdkafka
|
|
144
145
|
|
145
146
|
# Produce the message
|
146
147
|
response = Rdkafka::Bindings.rd_kafka_producev(
|
147
|
-
@
|
148
|
+
@native_kafka.inner,
|
148
149
|
*args
|
149
150
|
)
|
150
151
|
|
@@ -157,7 +158,6 @@ module Rdkafka
|
|
157
158
|
delivery_handle
|
158
159
|
end
|
159
160
|
|
160
|
-
# @private
|
161
161
|
def call_delivery_callback(delivery_report, delivery_handle)
|
162
162
|
return unless @delivery_callback
|
163
163
|
|
@@ -171,8 +171,9 @@ module Rdkafka
|
|
171
171
|
callback.method(:call).arity
|
172
172
|
end
|
173
173
|
|
174
|
+
private
|
174
175
|
def closed_producer_check(method)
|
175
|
-
raise Rdkafka::ClosedProducerError.new(method) if @
|
176
|
+
raise Rdkafka::ClosedProducerError.new(method) if @native_kafka.closed?
|
176
177
|
end
|
177
178
|
end
|
178
179
|
end
|
data/lib/rdkafka/version.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Rdkafka
|
2
|
-
VERSION = "0.
|
3
|
-
LIBRDKAFKA_VERSION = "1.9.
|
4
|
-
LIBRDKAFKA_SOURCE_SHA256 = "
|
4
|
+
VERSION = "0.13.0.beta.2"
|
5
|
+
LIBRDKAFKA_VERSION = "1.9.2"
|
6
|
+
LIBRDKAFKA_SOURCE_SHA256 = "3fba157a9f80a0889c982acdd44608be8a46142270a389008b22d921be1198ad"
|
5
7
|
end
|
data/lib/rdkafka.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "rdkafka/version"
|
2
4
|
|
3
5
|
require "rdkafka/abstract_handle"
|
@@ -16,7 +18,7 @@ require "rdkafka/consumer/partition"
|
|
16
18
|
require "rdkafka/consumer/topic_partition_list"
|
17
19
|
require "rdkafka/error"
|
18
20
|
require "rdkafka/metadata"
|
21
|
+
require "rdkafka/native_kafka"
|
19
22
|
require "rdkafka/producer"
|
20
|
-
require "rdkafka/producer/client"
|
21
23
|
require "rdkafka/producer/delivery_handle"
|
22
24
|
require "rdkafka/producer/delivery_report"
|
data/rdkafka.gemspec
CHANGED
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -1,9 +1,11 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
require "ostruct"
|
3
5
|
|
4
6
|
describe Rdkafka::Admin do
|
5
|
-
let(:config)
|
6
|
-
let(:admin)
|
7
|
+
let(:config) { rdkafka_config }
|
8
|
+
let(:admin) { config.admin }
|
7
9
|
|
8
10
|
after do
|
9
11
|
# Registry should always end up being empty
|
@@ -174,7 +176,6 @@ describe Rdkafka::Admin do
|
|
174
176
|
end
|
175
177
|
end
|
176
178
|
|
177
|
-
|
178
179
|
it "deletes a topic that was newly created" do
|
179
180
|
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
180
181
|
create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
|
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
|
3
5
|
describe Rdkafka::Config do
|
@@ -148,6 +150,18 @@ describe Rdkafka::Config do
|
|
148
150
|
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
|
149
151
|
end
|
150
152
|
|
153
|
+
it "allows string partitioner key" do
|
154
|
+
expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2")
|
155
|
+
config = Rdkafka::Config.new("partitioner" => "murmur2")
|
156
|
+
config.producer
|
157
|
+
end
|
158
|
+
|
159
|
+
it "allows symbol partitioner key" do
|
160
|
+
expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2")
|
161
|
+
config = Rdkafka::Config.new(:partitioner => "murmur2")
|
162
|
+
config.producer
|
163
|
+
end
|
164
|
+
|
151
165
|
it "should allow configuring zstd compression" do
|
152
166
|
config = Rdkafka::Config.new('compression.codec' => 'zstd')
|
153
167
|
begin
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
describe Rdkafka::Consumer::Headers do
|
6
|
+
let(:headers) do
|
7
|
+
{ # Note String keys!
|
8
|
+
"version" => "2.1.3",
|
9
|
+
"type" => "String"
|
10
|
+
}
|
11
|
+
end
|
12
|
+
let(:native_message) { double('native message') }
|
13
|
+
let(:headers_ptr) { double('headers pointer') }
|
14
|
+
|
15
|
+
describe '.from_native' do
|
16
|
+
before do
|
17
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
|
18
|
+
expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
|
19
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
20
|
+
end
|
21
|
+
|
22
|
+
expect(Rdkafka::Bindings).to \
|
23
|
+
receive(:rd_kafka_header_get_all)
|
24
|
+
.with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
|
25
|
+
expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
|
26
|
+
expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
|
27
|
+
expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
|
28
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
29
|
+
end
|
30
|
+
|
31
|
+
expect(Rdkafka::Bindings).to \
|
32
|
+
receive(:rd_kafka_header_get_all)
|
33
|
+
.with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
|
34
|
+
expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
|
35
|
+
expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
|
36
|
+
expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
|
37
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
38
|
+
end
|
39
|
+
|
40
|
+
expect(Rdkafka::Bindings).to \
|
41
|
+
receive(:rd_kafka_header_get_all)
|
42
|
+
.with(headers_ptr, 2, anything, anything, anything)
|
43
|
+
.and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
|
44
|
+
end
|
45
|
+
|
46
|
+
subject { described_class.from_native(native_message) }
|
47
|
+
|
48
|
+
it { is_expected.to eq(headers) }
|
49
|
+
it { is_expected.to be_frozen }
|
50
|
+
|
51
|
+
it 'allows String key' do
|
52
|
+
expect(subject['version']).to eq("2.1.3")
|
53
|
+
end
|
54
|
+
|
55
|
+
it 'allows Symbol key, but warns' do
|
56
|
+
expect(Kernel).to \
|
57
|
+
receive(:warn).with("rdkafka deprecation warning: header access with Symbol key :version treated as a String. " \
|
58
|
+
"Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
|
59
|
+
expect(subject[:version]).to eq("2.1.3")
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
require "ostruct"
|
3
5
|
require 'securerandom'
|
@@ -593,7 +595,7 @@ describe Rdkafka::Consumer do
|
|
593
595
|
end
|
594
596
|
|
595
597
|
describe "#poll with headers" do
|
596
|
-
it "should return message with headers" do
|
598
|
+
it "should return message with headers using string keys (when produced with symbol keys)" do
|
597
599
|
report = producer.produce(
|
598
600
|
topic: "consume_test_topic",
|
599
601
|
key: "key headers",
|
@@ -603,7 +605,20 @@ describe Rdkafka::Consumer do
|
|
603
605
|
message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
|
604
606
|
expect(message).to be
|
605
607
|
expect(message.key).to eq('key headers')
|
606
|
-
expect(message.headers).to include(foo
|
608
|
+
expect(message.headers).to include('foo' => 'bar')
|
609
|
+
end
|
610
|
+
|
611
|
+
it "should return message with headers using string keys (when produced with string keys)" do
|
612
|
+
report = producer.produce(
|
613
|
+
topic: "consume_test_topic",
|
614
|
+
key: "key headers",
|
615
|
+
headers: { 'foo' => 'bar' }
|
616
|
+
).wait
|
617
|
+
|
618
|
+
message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
|
619
|
+
expect(message).to be
|
620
|
+
expect(message.key).to eq('key headers')
|
621
|
+
expect(message.headers).to include('foo' => 'bar')
|
607
622
|
end
|
608
623
|
|
609
624
|
it "should return message with no headers" do
|
@@ -698,7 +713,7 @@ describe Rdkafka::Consumer do
|
|
698
713
|
n.times do |i|
|
699
714
|
handles << producer.produce(
|
700
715
|
topic: topic_name,
|
701
|
-
payload: Time.new.to_f.to_s,
|
716
|
+
payload: i % 10 == 0 ? nil : Time.new.to_f.to_s,
|
702
717
|
key: i.to_s,
|
703
718
|
partition: 0
|
704
719
|
)
|
@@ -962,18 +977,6 @@ describe Rdkafka::Consumer do
|
|
962
977
|
expect(listener.queue).to eq([:assigned, :revoked])
|
963
978
|
end
|
964
979
|
end
|
965
|
-
|
966
|
-
def notify_listener(listener)
|
967
|
-
# 1. subscribe and poll
|
968
|
-
consumer.subscribe("consume_test_topic")
|
969
|
-
wait_for_assignment(consumer)
|
970
|
-
consumer.poll(100)
|
971
|
-
|
972
|
-
# 2. unsubscribe
|
973
|
-
consumer.unsubscribe
|
974
|
-
wait_for_unassignment(consumer)
|
975
|
-
consumer.close
|
976
|
-
end
|
977
980
|
end
|
978
981
|
|
979
982
|
context "methods that should not be called after a consumer has been closed" do
|
@@ -1005,4 +1008,70 @@ describe Rdkafka::Consumer do
|
|
1005
1008
|
end
|
1006
1009
|
end
|
1007
1010
|
end
|
1011
|
+
|
1012
|
+
it "provides a finalizer that closes the native kafka client" do
|
1013
|
+
expect(consumer.closed?).to eq(false)
|
1014
|
+
|
1015
|
+
consumer.finalizer.call("some-ignored-object-id")
|
1016
|
+
|
1017
|
+
expect(consumer.closed?).to eq(true)
|
1018
|
+
end
|
1019
|
+
|
1020
|
+
context "when the rebalance protocol is cooperative" do
|
1021
|
+
let(:consumer) do
|
1022
|
+
config = rdkafka_consumer_config(
|
1023
|
+
{
|
1024
|
+
:"partition.assignment.strategy" => "cooperative-sticky",
|
1025
|
+
:"debug" => "consumer",
|
1026
|
+
}
|
1027
|
+
)
|
1028
|
+
config.consumer_rebalance_listener = listener
|
1029
|
+
config.consumer
|
1030
|
+
end
|
1031
|
+
|
1032
|
+
let(:listener) do
|
1033
|
+
Struct.new(:queue) do
|
1034
|
+
def on_partitions_assigned(consumer, list)
|
1035
|
+
collect(:assign, list)
|
1036
|
+
end
|
1037
|
+
|
1038
|
+
def on_partitions_revoked(consumer, list)
|
1039
|
+
collect(:revoke, list)
|
1040
|
+
end
|
1041
|
+
|
1042
|
+
def collect(name, list)
|
1043
|
+
partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
|
1044
|
+
queue << ([name] + partitions)
|
1045
|
+
end
|
1046
|
+
end.new([])
|
1047
|
+
end
|
1048
|
+
|
1049
|
+
it "should be able to assign and unassign partitions using the cooperative partition assignment APIs" do
|
1050
|
+
notify_listener(listener) do
|
1051
|
+
handles = []
|
1052
|
+
10.times do
|
1053
|
+
handles << producer.produce(
|
1054
|
+
topic: "consume_test_topic",
|
1055
|
+
payload: "payload 1",
|
1056
|
+
key: "key 1",
|
1057
|
+
partition: 0
|
1058
|
+
)
|
1059
|
+
end
|
1060
|
+
handles.each(&:wait)
|
1061
|
+
|
1062
|
+
consumer.subscribe("consume_test_topic")
|
1063
|
+
# Check the first 10 messages. Then close the consumer, which
|
1064
|
+
# should break the each loop.
|
1065
|
+
consumer.each_with_index do |message, i|
|
1066
|
+
expect(message).to be_a Rdkafka::Consumer::Message
|
1067
|
+
break if i == 10
|
1068
|
+
end
|
1069
|
+
end
|
1070
|
+
|
1071
|
+
expect(listener.queue).to eq([
|
1072
|
+
[:assign, "consume_test_topic", 0, 1, 2],
|
1073
|
+
[:revoke, "consume_test_topic", 0, 1, 2]
|
1074
|
+
])
|
1075
|
+
end
|
1076
|
+
end
|
1008
1077
|
end
|
data/spec/rdkafka/error_spec.rb
CHANGED
@@ -1,6 +1,8 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
|
3
|
-
describe Rdkafka::
|
5
|
+
describe Rdkafka::NativeKafka do
|
4
6
|
let(:config) { rdkafka_producer_config }
|
5
7
|
let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
|
6
8
|
let(:closing) { false }
|
@@ -63,8 +65,8 @@ describe Rdkafka::Producer::Client do
|
|
63
65
|
client
|
64
66
|
end
|
65
67
|
|
66
|
-
it "exposes
|
67
|
-
expect(client.
|
68
|
+
it "exposes inner client" do
|
69
|
+
expect(client.inner).to eq(native)
|
68
70
|
end
|
69
71
|
|
70
72
|
context "when client was not yet closed (`nil`)" do
|
@@ -94,7 +96,7 @@ describe Rdkafka::Producer::Client do
|
|
94
96
|
it "closes and unassign the native client" do
|
95
97
|
client.close
|
96
98
|
|
97
|
-
expect(client.
|
99
|
+
expect(client.inner).to eq(nil)
|
98
100
|
expect(client.closed?).to eq(true)
|
99
101
|
end
|
100
102
|
end
|
@@ -129,13 +131,13 @@ describe Rdkafka::Producer::Client do
|
|
129
131
|
it "does not close and unassign the native client again" do
|
130
132
|
client.close
|
131
133
|
|
132
|
-
expect(client.
|
134
|
+
expect(client.inner).to eq(nil)
|
133
135
|
expect(client.closed?).to eq(true)
|
134
136
|
end
|
135
137
|
end
|
136
138
|
end
|
137
139
|
|
138
|
-
it "
|
140
|
+
it "provides a finalizer that closes the native kafka client" do
|
139
141
|
expect(client.closed?).to eq(false)
|
140
142
|
|
141
143
|
client.finalizer.call("some-ignored-object-id")
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
|
3
5
|
describe Rdkafka::Producer::DeliveryHandle do
|
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
|
|
9
11
|
handle[:response] = response
|
10
12
|
handle[:partition] = 2
|
11
13
|
handle[:offset] = 100
|
14
|
+
handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
|
12
15
|
end
|
13
16
|
end
|
14
17
|
|
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
|
|
29
32
|
|
30
33
|
expect(report.partition).to eq(2)
|
31
34
|
expect(report.offset).to eq(100)
|
35
|
+
expect(report.topic_name).to eq("produce_test_topic")
|
32
36
|
end
|
33
37
|
|
34
38
|
it "should wait without a timeout" do
|
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
|
|
36
40
|
|
37
41
|
expect(report.partition).to eq(2)
|
38
42
|
expect(report.offset).to eq(100)
|
43
|
+
expect(report.topic_name).to eq("produce_test_topic")
|
39
44
|
end
|
40
45
|
end
|
41
46
|
end
|
@@ -1,7 +1,9 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "spec_helper"
|
2
4
|
|
3
5
|
describe Rdkafka::Producer::DeliveryReport do
|
4
|
-
subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "
|
6
|
+
subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
|
5
7
|
|
6
8
|
it "should get the partition" do
|
7
9
|
expect(subject.partition).to eq 2
|
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
|
|
11
13
|
expect(subject.offset).to eq 100
|
12
14
|
end
|
13
15
|
|
16
|
+
it "should get the topic_name" do
|
17
|
+
expect(subject.topic_name).to eq "topic"
|
18
|
+
end
|
19
|
+
|
14
20
|
it "should get the error" do
|
15
|
-
expect(subject.error).to eq
|
21
|
+
expect(subject.error).to eq -1
|
16
22
|
end
|
17
23
|
end
|