rdkafka 0.7.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,11 +10,16 @@ module Rdkafka
10
10
  # @return [Integer]
11
11
  attr_reader :offset
12
12
 
13
+ # Error in case happen during produce.
14
+ # @return [string]
15
+ attr_reader :error
16
+
13
17
  private
14
18
 
15
- def initialize(partition, offset)
19
+ def initialize(partition, offset, error = nil)
16
20
  @partition = partition
17
21
  @offset = offset
22
+ @error = error
18
23
  end
19
24
  end
20
25
  end
@@ -37,10 +37,25 @@ module Rdkafka
37
37
 
38
38
  # Close this producer and wait for the internal poll queue to empty.
39
39
  def close
40
+ return unless @native_kafka
41
+
40
42
  # Indicate to polling thread that we're closing
41
43
  @closing = true
42
44
  # Wait for the polling thread to finish up
43
45
  @polling_thread.join
46
+ Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
47
+ @native_kafka = nil
48
+ end
49
+
50
+ # Partition count for a given topic.
51
+ # NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
52
+ #
53
+ # @param topic [String] The topic name.
54
+ #
55
+ # @return partition count [Integer,nil]
56
+ #
57
+ def partition_count(topic)
58
+ Rdkafka::Metadata.new(@native_kafka, topic).topics&.select { |x| x[:topic_name] == topic }&.dig(0, :partition_count)
44
59
  end
45
60
 
46
61
  # Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
@@ -58,7 +73,7 @@ module Rdkafka
58
73
  # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
59
74
  #
60
75
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
61
- def produce(topic:, payload: nil, key: nil, partition: nil, timestamp: nil, headers: nil)
76
+ def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
62
77
  # Start by checking and converting the input
63
78
 
64
79
  # Get payload length
@@ -75,9 +90,15 @@ module Rdkafka
75
90
  key.bytesize
76
91
  end
77
92
 
78
- # If partition is nil use -1 to let Kafka set the partition based
79
- # on the key/randomly if there is no key
80
- partition = -1 if partition.nil?
93
+ if partition_key
94
+ partition_count = partition_count(topic)
95
+ # If the topic is not present, set to -1
96
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count) if partition_count
97
+ end
98
+
99
+ # If partition is nil, use -1 to let librdafka set the partition randomly or
100
+ # based on the key when present.
101
+ partition ||= -1
81
102
 
82
103
  # If timestamp is nil use 0 and let Kafka set one. If an integer or time
83
104
  # use it.
@@ -1,5 +1,5 @@
1
1
  module Rdkafka
2
- VERSION = "0.7.0"
3
- LIBRDKAFKA_VERSION = "1.2.0"
4
- LIBRDKAFKA_SOURCE_SHA256 = "eedde1c96104e4ac2d22a4230e34f35dd60d53976ae2563e3dd7c27190a96859"
2
+ VERSION = "0.8.0"
3
+ LIBRDKAFKA_VERSION = "1.4.0"
4
+ LIBRDKAFKA_SOURCE_SHA256 = "ae27ea3f3d0d32d29004e7f709efbba2666c5383a107cc45b3a1949486b2eb84"
5
5
  end
data/lib/rdkafka.rb CHANGED
@@ -8,6 +8,7 @@ require "rdkafka/consumer/message"
8
8
  require "rdkafka/consumer/partition"
9
9
  require "rdkafka/consumer/topic_partition_list"
10
10
  require "rdkafka/error"
11
+ require "rdkafka/metadata"
11
12
  require "rdkafka/producer"
12
13
  require "rdkafka/producer/delivery_handle"
13
14
  require "rdkafka/producer/delivery_report"
data/rdkafka.gemspec CHANGED
@@ -19,7 +19,7 @@ Gem::Specification.new do |gem|
19
19
 
20
20
  gem.add_dependency 'ffi', '~> 1.9'
21
21
  gem.add_dependency 'mini_portile2', '~> 2.1'
22
- gem.add_dependency 'rake', '~> 12.3'
22
+ gem.add_dependency 'rake', '>= 12.3'
23
23
 
24
24
  gem.add_development_dependency 'pry', '~> 0.10'
25
25
  gem.add_development_dependency 'rspec', '~> 3.5'
@@ -1,4 +1,5 @@
1
1
  require "spec_helper"
2
+ require 'zlib'
2
3
 
3
4
  describe Rdkafka::Bindings do
4
5
  it "should load librdkafka" do
@@ -7,12 +8,12 @@ describe Rdkafka::Bindings do
7
8
 
8
9
  describe ".lib_extension" do
9
10
  it "should know the lib extension for darwin" do
10
- expect(Gem::Platform.local).to receive(:os).and_return("darwin-aaa")
11
+ stub_const('RbConfig::CONFIG', 'host_os' =>'darwin')
11
12
  expect(Rdkafka::Bindings.lib_extension).to eq "dylib"
12
13
  end
13
14
 
14
15
  it "should know the lib extension for linux" do
15
- expect(Gem::Platform.local).to receive(:os).and_return("linux")
16
+ stub_const('RbConfig::CONFIG', 'host_os' =>'linux')
16
17
  expect(Rdkafka::Bindings.lib_extension).to eq "so"
17
18
  end
18
19
  end
@@ -60,6 +61,23 @@ describe Rdkafka::Bindings do
60
61
  end
61
62
  end
62
63
 
64
+ describe "partitioner" do
65
+ let(:partition_key) { ('a'..'z').to_a.shuffle.take(15).join('') }
66
+ let(:partition_count) { rand(50) + 1 }
67
+
68
+ it "should return the same partition for a similar string and the same partition count" do
69
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
70
+ result_2 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
71
+ expect(result_1).to eq(result_2)
72
+ end
73
+
74
+ it "should match the old partitioner" do
75
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
76
+ result_2 = (Zlib.crc32(partition_key) % partition_count)
77
+ expect(result_1).to eq(result_2)
78
+ end
79
+ end
80
+
63
81
  describe "stats callback" do
64
82
  context "without a stats callback" do
65
83
  it "should do nothing" do
@@ -50,7 +50,9 @@ describe Rdkafka::Config do
50
50
  end
51
51
 
52
52
  it "should create a consumer with valid config" do
53
- expect(rdkafka_config.consumer).to be_a Rdkafka::Consumer
53
+ consumer = rdkafka_config.consumer
54
+ expect(consumer).to be_a Rdkafka::Consumer
55
+ consumer.close
54
56
  end
55
57
 
56
58
  it "should raise an error when creating a consumer with invalid config" do
@@ -76,7 +78,9 @@ describe Rdkafka::Config do
76
78
  end
77
79
 
78
80
  it "should create a producer with valid config" do
79
- expect(rdkafka_config.producer).to be_a Rdkafka::Producer
81
+ producer = rdkafka_config.producer
82
+ expect(producer).to be_a Rdkafka::Producer
83
+ producer.close
80
84
  end
81
85
 
82
86
  it "should raise an error when creating a producer with invalid config" do
@@ -1,7 +1,8 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Consumer::Message do
4
- let(:native_topic) { new_native_topic }
4
+ let(:native_client) { new_native_client }
5
+ let(:native_topic) { new_native_topic(native_client: native_client) }
5
6
  let(:payload) { nil }
6
7
  let(:key) { nil }
7
8
  let(:native_message) do
@@ -24,6 +25,10 @@ describe Rdkafka::Consumer::Message do
24
25
  end
25
26
  end
26
27
 
28
+ after(:each) do
29
+ Rdkafka::Bindings.rd_kafka_destroy(native_client)
30
+ end
31
+
27
32
  subject { Rdkafka::Consumer::Message.new(native_message) }
28
33
 
29
34
  before do
@@ -6,7 +6,10 @@ describe Rdkafka::Consumer do
6
6
  let(:consumer) { config.consumer }
7
7
  let(:producer) { config.producer }
8
8
 
9
- describe "#subscripe, #unsubscribe and #subscription" do
9
+ after { consumer.close }
10
+ after { producer.close }
11
+
12
+ describe "#subscribe, #unsubscribe and #subscription" do
10
13
  it "should subscribe, unsubscribe and return the subscription" do
11
14
  expect(consumer.subscription).to be_empty
12
15
 
@@ -88,7 +91,6 @@ describe Rdkafka::Consumer do
88
91
  # 8. ensure that message is successfully consumed
89
92
  records = consumer.poll(timeout)
90
93
  expect(records).not_to be_nil
91
- consumer.commit
92
94
  end
93
95
  end
94
96
 
@@ -205,8 +207,6 @@ describe Rdkafka::Consumer do
205
207
  expect(records&.payload).to eq "payload c"
206
208
  records = consumer.poll(timeout)
207
209
  expect(records).to be_nil
208
-
209
- consumer.commit
210
210
  end
211
211
  end
212
212
  end
@@ -313,11 +313,11 @@ describe Rdkafka::Consumer do
313
313
  }.to raise_error TypeError
314
314
  end
315
315
 
316
- context "with a commited consumer" do
316
+ context "with a committed consumer" do
317
317
  before :all do
318
- # Make sure there are some message
319
- producer = rdkafka_config.producer
318
+ # Make sure there are some messages.
320
319
  handles = []
320
+ producer = rdkafka_config.producer
321
321
  10.times do
322
322
  (0..2).each do |i|
323
323
  handles << producer.produce(
@@ -329,6 +329,7 @@ describe Rdkafka::Consumer do
329
329
  end
330
330
  end
331
331
  handles.each(&:wait)
332
+ producer.close
332
333
  end
333
334
 
334
335
  before do
@@ -389,20 +390,26 @@ describe Rdkafka::Consumer do
389
390
 
390
391
  describe "#store_offset" do
391
392
  before do
393
+ config = {}
392
394
  config[:'enable.auto.offset.store'] = false
393
395
  config[:'enable.auto.commit'] = false
394
- consumer.subscribe("consume_test_topic")
395
- wait_for_assignment(consumer)
396
+ @new_consumer = rdkafka_config(config).consumer
397
+ @new_consumer.subscribe("consume_test_topic")
398
+ wait_for_assignment(@new_consumer)
399
+ end
400
+
401
+ after do
402
+ @new_consumer.close
396
403
  end
397
404
 
398
405
  it "should store the offset for a message" do
399
- consumer.store_offset(message)
400
- consumer.commit
406
+ @new_consumer.store_offset(message)
407
+ @new_consumer.commit
401
408
 
402
409
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
403
410
  list.add_topic("consume_test_topic", [0, 1, 2])
404
411
  end
405
- partitions = consumer.committed(list).to_h["consume_test_topic"]
412
+ partitions = @new_consumer.committed(list).to_h["consume_test_topic"]
406
413
  expect(partitions).not_to be_nil
407
414
  expect(partitions[message.partition].offset).to eq(message.offset + 1)
408
415
  end
@@ -410,7 +417,7 @@ describe Rdkafka::Consumer do
410
417
  it "should raise an error with invalid input" do
411
418
  allow(message).to receive(:partition).and_return(9999)
412
419
  expect {
413
- consumer.store_offset(message)
420
+ @new_consumer.store_offset(message)
414
421
  }.to raise_error Rdkafka::RdkafkaError
415
422
  end
416
423
  end
@@ -554,12 +561,12 @@ describe Rdkafka::Consumer do
554
561
  payload: "payload 1",
555
562
  key: "key 1"
556
563
  ).wait
557
-
558
564
  consumer.subscribe("consume_test_topic")
559
- message = consumer.poll(5000)
560
- expect(message).to be_a Rdkafka::Consumer::Message
565
+ message = consumer.each {|m| break m}
561
566
 
562
- # Message content is tested in producer spec
567
+ expect(message).to be_a Rdkafka::Consumer::Message
568
+ expect(message.payload).to eq('payload 1')
569
+ expect(message.key).to eq('key 1')
563
570
  end
564
571
 
565
572
  it "should raise an error when polling fails" do
@@ -71,15 +71,15 @@ describe Rdkafka::RdkafkaError do
71
71
  end
72
72
 
73
73
  it "should not equal another error with a different error code" do
74
- expect(subject).to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
74
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
75
75
  end
76
76
 
77
77
  it "should not equal another error with a different message" do
78
- expect(subject).to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
78
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
79
79
  end
80
80
 
81
81
  it "should not equal another error with no message" do
82
- expect(subject).to eq Rdkafka::RdkafkaError.new(10)
82
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10)
83
83
  end
84
84
  end
85
85
  end
@@ -1,7 +1,7 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100) }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -10,4 +10,8 @@ describe Rdkafka::Producer::DeliveryReport do
10
10
  it "should get the offset" do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
+
14
+ it "should get the error" do
15
+ expect(subject.error).to eq "error"
16
+ end
13
17
  end
@@ -2,10 +2,13 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
4
  let(:producer) { rdkafka_config.producer }
5
+ let(:consumer) { rdkafka_config.consumer }
5
6
 
6
7
  after do
7
8
  # Registry should always end up being empty
8
9
  expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
10
+ producer.close
11
+ consumer.close
9
12
  end
10
13
 
11
14
  context "delivery callback" do
@@ -27,6 +30,7 @@ describe Rdkafka::Producer do
27
30
  it "should call the callback when a message is delivered" do
28
31
  @callback_called = false
29
32
 
33
+
30
34
  producer.delivery_callback = lambda do |report|
31
35
  expect(report).not_to be_nil
32
36
  expect(report.partition).to eq 1
@@ -42,7 +46,10 @@ describe Rdkafka::Producer do
42
46
  )
43
47
 
44
48
  # Wait for it to be delivered
45
- handle.wait(max_wait_timeout: 5)
49
+ handle.wait(max_wait_timeout: 15)
50
+
51
+ # Join the producer thread.
52
+ producer.close
46
53
 
47
54
  # Callback should have been called
48
55
  expect(@callback_called).to be true
@@ -55,7 +62,7 @@ describe Rdkafka::Producer do
55
62
  payload: "payload",
56
63
  key: "key"
57
64
  )
58
- }.to raise_error ArgumentError, "missing keyword: topic"
65
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
59
66
  end
60
67
 
61
68
  it "should produce a message" do
@@ -82,14 +89,15 @@ describe Rdkafka::Producer do
82
89
  # Consume message and verify it's content
83
90
  message = wait_for_message(
84
91
  topic: "produce_test_topic",
85
- delivery_report: report
92
+ delivery_report: report,
93
+ consumer: consumer
86
94
  )
87
95
  expect(message.partition).to eq 1
88
96
  expect(message.payload).to eq "payload"
89
97
  expect(message.key).to eq "key"
90
98
  # Since api.version.request is on by default we will get
91
99
  # the message creation timestamp if it's not set.
92
- expect(message.timestamp).to be_within(5).of(Time.now)
100
+ expect(message.timestamp).to be_within(10).of(Time.now)
93
101
  end
94
102
 
95
103
  it "should produce a message with a specified partition" do
@@ -105,12 +113,47 @@ describe Rdkafka::Producer do
105
113
  # Consume message and verify it's content
106
114
  message = wait_for_message(
107
115
  topic: "produce_test_topic",
108
- delivery_report: report
116
+ delivery_report: report,
117
+ consumer: consumer
109
118
  )
110
119
  expect(message.partition).to eq 1
111
120
  expect(message.key).to eq "key partition"
112
121
  end
113
122
 
123
+ it "should produce a message to the same partition with a similar partition key" do
124
+ # Avoid partitioner collisions.
125
+ while true
126
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
127
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
128
+ partition_count = producer.partition_count('partitioner_test_topic')
129
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
130
+ end
131
+
132
+ # Produce a message with key, partition_key and key + partition_key
133
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
134
+
135
+ messages = messages.map do |m|
136
+ handle = producer.produce(
137
+ topic: "partitioner_test_topic",
138
+ payload: "payload partition",
139
+ key: m[:key],
140
+ partition_key: m[:partition_key]
141
+ )
142
+ report = handle.wait(max_wait_timeout: 5)
143
+
144
+ wait_for_message(
145
+ topic: "partitioner_test_topic",
146
+ delivery_report: report,
147
+ )
148
+ end
149
+
150
+ expect(messages[0].partition).not_to eq(messages[2].partition)
151
+ expect(messages[1].partition).to eq(messages[2].partition)
152
+ expect(messages[0].key).to eq key
153
+ expect(messages[1].key).to be_nil
154
+ expect(messages[2].key).to eq key
155
+ end
156
+
114
157
  it "should produce a message with utf-8 encoding" do
115
158
  handle = producer.produce(
116
159
  topic: "produce_test_topic",
@@ -122,7 +165,8 @@ describe Rdkafka::Producer do
122
165
  # Consume message and verify it's content
123
166
  message = wait_for_message(
124
167
  topic: "produce_test_topic",
125
- delivery_report: report
168
+ delivery_report: report,
169
+ consumer: consumer
126
170
  )
127
171
 
128
172
  expect(message.partition).to eq 1
@@ -154,7 +198,8 @@ describe Rdkafka::Producer do
154
198
  # Consume message and verify it's content
155
199
  message = wait_for_message(
156
200
  topic: "produce_test_topic",
157
- delivery_report: report
201
+ delivery_report: report,
202
+ consumer: consumer
158
203
  )
159
204
 
160
205
  expect(message.partition).to eq 2
@@ -174,7 +219,8 @@ describe Rdkafka::Producer do
174
219
  # Consume message and verify it's content
175
220
  message = wait_for_message(
176
221
  topic: "produce_test_topic",
177
- delivery_report: report
222
+ delivery_report: report,
223
+ consumer: consumer
178
224
  )
179
225
 
180
226
  expect(message.partition).to eq 2
@@ -193,7 +239,8 @@ describe Rdkafka::Producer do
193
239
  # Consume message and verify it's content
194
240
  message = wait_for_message(
195
241
  topic: "produce_test_topic",
196
- delivery_report: report
242
+ delivery_report: report,
243
+ consumer: consumer
197
244
  )
198
245
 
199
246
  expect(message.key).to be_nil
@@ -210,7 +257,8 @@ describe Rdkafka::Producer do
210
257
  # Consume message and verify it's content
211
258
  message = wait_for_message(
212
259
  topic: "produce_test_topic",
213
- delivery_report: report
260
+ delivery_report: report,
261
+ consumer: consumer
214
262
  )
215
263
 
216
264
  expect(message.key).to eq "key no payload"
@@ -229,7 +277,8 @@ describe Rdkafka::Producer do
229
277
  # Consume message and verify it's content
230
278
  message = wait_for_message(
231
279
  topic: "produce_test_topic",
232
- delivery_report: report
280
+ delivery_report: report,
281
+ consumer: consumer
233
282
  )
234
283
 
235
284
  expect(message.payload).to eq "payload headers"
@@ -251,7 +300,8 @@ describe Rdkafka::Producer do
251
300
  # Consume message and verify it's content
252
301
  message = wait_for_message(
253
302
  topic: "produce_test_topic",
254
- delivery_report: report
303
+ delivery_report: report,
304
+ consumer: consumer
255
305
  )
256
306
 
257
307
  expect(message.payload).to eq "payload headers"
@@ -284,11 +334,17 @@ describe Rdkafka::Producer do
284
334
  # Fork, produce a message, send the report over a pipe and
285
335
  # wait for and check the message in the main process.
286
336
 
337
+ # Kernel#fork is not available in JRuby
338
+ skip if defined?(JRUBY_VERSION)
339
+
287
340
  reader, writer = IO.pipe
288
341
 
289
342
  fork do
290
343
  reader.close
291
344
 
345
+ # Avoids sharing the socket between processes.
346
+ producer = rdkafka_config.producer
347
+
292
348
  handle = producer.produce(
293
349
  topic: "produce_test_topic",
294
350
  payload: "payload-forked",
@@ -296,7 +352,6 @@ describe Rdkafka::Producer do
296
352
  )
297
353
 
298
354
  report = handle.wait(max_wait_timeout: 5)
299
- producer.close
300
355
 
301
356
  report_json = JSON.generate(
302
357
  "partition" => report.partition,
@@ -304,20 +359,24 @@ describe Rdkafka::Producer do
304
359
  )
305
360
 
306
361
  writer.write(report_json)
362
+ writer.close
363
+ producer.close
307
364
  end
308
365
 
309
366
  writer.close
310
-
311
367
  report_hash = JSON.parse(reader.read)
312
368
  report = Rdkafka::Producer::DeliveryReport.new(
313
369
  report_hash["partition"],
314
370
  report_hash["offset"]
315
371
  )
316
372
 
373
+ reader.close
374
+
317
375
  # Consume message and verify it's content
318
376
  message = wait_for_message(
319
377
  topic: "produce_test_topic",
320
- delivery_report: report
378
+ delivery_report: report,
379
+ consumer: consumer
321
380
  )
322
381
  expect(message.partition).to eq 0
323
382
  expect(message.payload).to eq "payload-forked"
data/spec/spec_helper.rb CHANGED
@@ -7,6 +7,14 @@ require "pry"
7
7
  require "rspec"
8
8
  require "rdkafka"
9
9
 
10
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic consume_test_topic`
11
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic empty_test_topic`
12
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic load_test_topic`
13
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic produce_test_topic`
14
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic rake_test_topic`
15
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic watermarks_test_topic`
16
+ `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 25 --if-not-exists --topic partitioner_test_topic`
17
+
10
18
  def rdkafka_config(config_overrides={})
11
19
  config = {
12
20
  :"api.version.request" => false,
@@ -25,12 +33,12 @@ def rdkafka_config(config_overrides={})
25
33
  Rdkafka::Config.new(config)
26
34
  end
27
35
 
28
- def native_client
36
+ def new_native_client
29
37
  config = rdkafka_config
30
38
  config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
31
39
  end
32
40
 
33
- def new_native_topic(topic_name="topic_name")
41
+ def new_native_topic(topic_name="topic_name", native_client: )
34
42
  Rdkafka::Bindings.rd_kafka_topic_new(
35
43
  native_client,
36
44
  topic_name,
@@ -39,7 +47,8 @@ def new_native_topic(topic_name="topic_name")
39
47
  end
40
48
 
41
49
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
42
- consumer = rdkafka_config.consumer if consumer.nil?
50
+ new_consumer = !!consumer
51
+ consumer ||= rdkafka_config.consumer
43
52
  consumer.subscribe(topic)
44
53
  timeout = Time.now.to_i + timeout_in_seconds
45
54
  loop do
@@ -53,6 +62,8 @@ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer:
53
62
  return message
54
63
  end
55
64
  end
65
+ ensure
66
+ consumer.close if new_consumer
56
67
  end
57
68
 
58
69
  def wait_for_assignment(consumer)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.0
4
+ version: 0.8.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-09-21 00:00:00.000000000 Z
11
+ date: 2020-06-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -42,14 +42,14 @@ dependencies:
42
42
  name: rake
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
- - - "~>"
45
+ - - ">="
46
46
  - !ruby/object:Gem::Version
47
47
  version: '12.3'
48
48
  type: :runtime
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - "~>"
52
+ - - ">="
53
53
  - !ruby/object:Gem::Version
54
54
  version: '12.3'
55
55
  - !ruby/object:Gem::Dependency
@@ -136,6 +136,7 @@ files:
136
136
  - lib/rdkafka/consumer/partition.rb
137
137
  - lib/rdkafka/consumer/topic_partition_list.rb
138
138
  - lib/rdkafka/error.rb
139
+ - lib/rdkafka/metadata.rb
139
140
  - lib/rdkafka/producer.rb
140
141
  - lib/rdkafka/producer/delivery_handle.rb
141
142
  - lib/rdkafka/producer/delivery_report.rb
@@ -171,8 +172,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
171
172
  - !ruby/object:Gem::Version
172
173
  version: '0'
173
174
  requirements: []
174
- rubyforge_project:
175
- rubygems_version: 2.7.6.2
175
+ rubygems_version: 3.1.2
176
176
  signing_key:
177
177
  specification_version: 4
178
178
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.