rdkafka 0.6.0 → 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +23 -0
  3. data/CHANGELOG.md +27 -0
  4. data/README.md +9 -9
  5. data/docker-compose.yml +17 -11
  6. data/ext/README.md +10 -15
  7. data/ext/Rakefile +24 -3
  8. data/lib/rdkafka.rb +8 -0
  9. data/lib/rdkafka/abstract_handle.rb +82 -0
  10. data/lib/rdkafka/admin.rb +155 -0
  11. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  12. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  13. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  14. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  15. data/lib/rdkafka/bindings.rb +64 -18
  16. data/lib/rdkafka/callbacks.rb +106 -0
  17. data/lib/rdkafka/config.rb +38 -9
  18. data/lib/rdkafka/consumer.rb +221 -46
  19. data/lib/rdkafka/consumer/headers.rb +7 -5
  20. data/lib/rdkafka/consumer/partition.rb +1 -1
  21. data/lib/rdkafka/consumer/topic_partition_list.rb +6 -16
  22. data/lib/rdkafka/error.rb +35 -4
  23. data/lib/rdkafka/metadata.rb +92 -0
  24. data/lib/rdkafka/producer.rb +50 -24
  25. data/lib/rdkafka/producer/delivery_handle.rb +7 -49
  26. data/lib/rdkafka/producer/delivery_report.rb +7 -2
  27. data/lib/rdkafka/version.rb +3 -3
  28. data/rdkafka.gemspec +3 -3
  29. data/spec/rdkafka/abstract_handle_spec.rb +114 -0
  30. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  31. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  32. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  33. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  34. data/spec/rdkafka/admin_spec.rb +203 -0
  35. data/spec/rdkafka/bindings_spec.rb +28 -10
  36. data/spec/rdkafka/callbacks_spec.rb +20 -0
  37. data/spec/rdkafka/config_spec.rb +51 -9
  38. data/spec/rdkafka/consumer/message_spec.rb +6 -1
  39. data/spec/rdkafka/consumer_spec.rb +287 -20
  40. data/spec/rdkafka/error_spec.rb +7 -3
  41. data/spec/rdkafka/metadata_spec.rb +78 -0
  42. data/spec/rdkafka/producer/delivery_handle_spec.rb +3 -43
  43. data/spec/rdkafka/producer/delivery_report_spec.rb +5 -1
  44. data/spec/rdkafka/producer_spec.rb +220 -100
  45. data/spec/spec_helper.rb +34 -6
  46. metadata +37 -13
  47. data/.travis.yml +0 -34
@@ -11,6 +11,10 @@ describe Rdkafka::RdkafkaError do
11
11
  expect(Rdkafka::RdkafkaError.new(10, "message prefix").message_prefix).to eq "message prefix"
12
12
  end
13
13
 
14
+ it "should create an error with a broker message" do
15
+ expect(Rdkafka::RdkafkaError.new(10, broker_message: "broker message").broker_message).to eq "broker message"
16
+ end
17
+
14
18
  describe "#code" do
15
19
  it "should handle an invalid response" do
16
20
  expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
@@ -71,15 +75,15 @@ describe Rdkafka::RdkafkaError do
71
75
  end
72
76
 
73
77
  it "should not equal another error with a different error code" do
74
- expect(subject).to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
78
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
75
79
  end
76
80
 
77
81
  it "should not equal another error with a different message" do
78
- expect(subject).to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
82
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
79
83
  end
80
84
 
81
85
  it "should not equal another error with no message" do
82
- expect(subject).to eq Rdkafka::RdkafkaError.new(10)
86
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10)
83
87
  end
84
88
  end
85
89
  end
@@ -0,0 +1,78 @@
1
+ require "spec_helper"
2
+ require "securerandom"
3
+
4
+ describe Rdkafka::Metadata do
5
+ let(:config) { rdkafka_config }
6
+ let(:native_config) { config.send(:native_config) }
7
+ let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
+
9
+ after do
10
+ Rdkafka::Bindings.rd_kafka_consumer_close(native_kafka)
11
+ Rdkafka::Bindings.rd_kafka_destroy(native_kafka)
12
+ end
13
+
14
+ context "passing in a topic name" do
15
+ context "that is non-existent topic" do
16
+ let(:topic_name) { SecureRandom.uuid.to_s }
17
+
18
+ it "raises an appropriate exception" do
19
+ expect {
20
+ described_class.new(native_kafka, topic_name)
21
+ }.to raise_exception(Rdkafka::RdkafkaError, "Broker: Leader not available (leader_not_available)")
22
+ end
23
+ end
24
+
25
+ context "that is one of our test topics" do
26
+ subject { described_class.new(native_kafka, topic_name) }
27
+ let(:topic_name) { "partitioner_test_topic" }
28
+
29
+ it "#brokers returns our single broker" do
30
+ expect(subject.brokers.length).to eq(1)
31
+ expect(subject.brokers[0][:broker_id]).to eq(1)
32
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ end
35
+
36
+ it "#topics returns data on our test topic" do
37
+ expect(subject.topics.length).to eq(1)
38
+ expect(subject.topics[0][:partition_count]).to eq(25)
39
+ expect(subject.topics[0][:partitions].length).to eq(25)
40
+ expect(subject.topics[0][:topic_name]).to eq(topic_name)
41
+ end
42
+ end
43
+ end
44
+
45
+ context "not passing in a topic name" do
46
+ subject { described_class.new(native_kafka, topic_name) }
47
+ let(:topic_name) { nil }
48
+ let(:test_topics) {
49
+ %w(consume_test_topic empty_test_topic load_test_topic produce_test_topic rake_test_topic watermarks_test_topic partitioner_test_topic)
50
+ } # Test topics crated in spec_helper.rb
51
+
52
+ it "#brokers returns our single broker" do
53
+ expect(subject.brokers.length).to eq(1)
54
+ expect(subject.brokers[0][:broker_id]).to eq(1)
55
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ end
58
+
59
+ it "#topics returns data about all of our test topics" do
60
+ result = subject.topics.map { |topic| topic[:topic_name] }
61
+ expect(result).to include(*test_topics)
62
+ end
63
+ end
64
+
65
+ context "when a non-zero error code is returned" do
66
+ let(:topic_name) { SecureRandom.uuid.to_s }
67
+
68
+ before do
69
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_return(-165)
70
+ end
71
+
72
+ it "creating the instance raises an exception" do
73
+ expect {
74
+ described_class.new(native_kafka, topic_name)
75
+ }.to raise_error(Rdkafka::RdkafkaError, /Local: Required feature not supported by broker \(unsupported_feature\)/)
76
+ end
77
+ end
78
+ end
@@ -12,42 +12,13 @@ describe Rdkafka::Producer::DeliveryHandle do
12
12
  end
13
13
  end
14
14
 
15
- describe ".register and .remove" do
16
- let(:pending_handle) { true }
17
-
18
- it "should register and remove a delivery handle" do
19
- Rdkafka::Producer::DeliveryHandle.register(subject.to_ptr.address, subject)
20
- removed = Rdkafka::Producer::DeliveryHandle.remove(subject.to_ptr.address)
21
- expect(removed).to eq subject
22
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
23
- end
24
- end
25
-
26
- describe "#pending?" do
27
- context "when true" do
28
- let(:pending_handle) { true }
29
-
30
- it "should be true" do
31
- expect(subject.pending?).to be true
32
- end
33
- end
34
-
35
- context "when not true" do
36
- let(:pending_handle) { false }
37
-
38
- it "should be false" do
39
- expect(subject.pending?).to be false
40
- end
41
- end
42
- end
43
-
44
15
  describe "#wait" do
45
16
  let(:pending_handle) { true }
46
17
 
47
18
  it "should wait until the timeout and then raise an error" do
48
19
  expect {
49
- subject.wait(0.1)
50
- }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
20
+ subject.wait(max_wait_timeout: 0.1)
21
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError, /delivery/
51
22
  end
52
23
 
53
24
  context "when not pending anymore and no error" do
@@ -61,22 +32,11 @@ describe Rdkafka::Producer::DeliveryHandle do
61
32
  end
62
33
 
63
34
  it "should wait without a timeout" do
64
- report = subject.wait(nil)
35
+ report = subject.wait(max_wait_timeout: nil)
65
36
 
66
37
  expect(report.partition).to eq(2)
67
38
  expect(report.offset).to eq(100)
68
39
  end
69
40
  end
70
-
71
- context "when not pending anymore and there was an error" do
72
- let(:pending_handle) { false }
73
- let(:response) { 20 }
74
-
75
- it "should raise an rdkafka error" do
76
- expect {
77
- subject.wait
78
- }.to raise_error Rdkafka::RdkafkaError
79
- end
80
- end
81
41
  end
82
42
  end
@@ -1,7 +1,7 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100) }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -10,4 +10,8 @@ describe Rdkafka::Producer::DeliveryReport do
10
10
  it "should get the offset" do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
+
14
+ it "should get the error" do
15
+ expect(subject.error).to eq "error"
16
+ end
13
17
  end
@@ -2,50 +2,102 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
4
  let(:producer) { rdkafka_config.producer }
5
+ let(:consumer) { rdkafka_config.consumer }
5
6
 
6
7
  after do
7
8
  # Registry should always end up being empty
8
9
  expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
10
+ producer.close
11
+ consumer.close
9
12
  end
10
13
 
11
14
  context "delivery callback" do
12
- it "should set the callback" do
13
- expect {
14
- producer.delivery_callback = lambda do |delivery_handle|
15
- puts stats
15
+ context "with a proc/lambda" do
16
+ it "should set the callback" do
17
+ expect {
18
+ producer.delivery_callback = lambda do |delivery_handle|
19
+ puts delivery_handle
20
+ end
21
+ }.not_to raise_error
22
+ expect(producer.delivery_callback).to respond_to :call
23
+ end
24
+
25
+ it "should call the callback when a message is delivered" do
26
+ @callback_called = false
27
+
28
+ producer.delivery_callback = lambda do |report|
29
+ expect(report).not_to be_nil
30
+ expect(report.partition).to eq 1
31
+ expect(report.offset).to be >= 0
32
+ @callback_called = true
16
33
  end
17
- }.not_to raise_error
18
- expect(producer.delivery_callback).to be_a Proc
19
- end
20
34
 
21
- it "should not accept a callback that's not a proc" do
22
- expect {
23
- producer.delivery_callback = 'a string'
24
- }.to raise_error(TypeError)
25
- end
35
+ # Produce a message
36
+ handle = producer.produce(
37
+ topic: "produce_test_topic",
38
+ payload: "payload",
39
+ key: "key"
40
+ )
41
+
42
+ # Wait for it to be delivered
43
+ handle.wait(max_wait_timeout: 15)
26
44
 
27
- it "should call the callback when a message is delivered" do
28
- @callback_called = false
45
+ # Join the producer thread.
46
+ producer.close
29
47
 
30
- producer.delivery_callback = lambda do |report|
31
- expect(report).not_to be_nil
32
- expect(report.partition).to eq 1
33
- expect(report.offset).to be >= 0
34
- @callback_called = true
48
+ # Callback should have been called
49
+ expect(@callback_called).to be true
35
50
  end
51
+ end
36
52
 
37
- # Produce a message
38
- handle = producer.produce(
39
- topic: "produce_test_topic",
40
- payload: "payload",
41
- key: "key"
42
- )
53
+ context "with a callable object" do
54
+ it "should set the callback" do
55
+ callback = Class.new do
56
+ def call(stats); end
57
+ end
58
+ expect {
59
+ producer.delivery_callback = callback.new
60
+ }.not_to raise_error
61
+ expect(producer.delivery_callback).to respond_to :call
62
+ end
63
+
64
+ it "should call the callback when a message is delivered" do
65
+ called_report = []
66
+ callback = Class.new do
67
+ def initialize(called_report)
68
+ @called_report = called_report
69
+ end
70
+
71
+ def call(report)
72
+ @called_report << report
73
+ end
74
+ end
75
+ producer.delivery_callback = callback.new(called_report)
76
+
77
+ # Produce a message
78
+ handle = producer.produce(
79
+ topic: "produce_test_topic",
80
+ payload: "payload",
81
+ key: "key"
82
+ )
83
+
84
+ # Wait for it to be delivered
85
+ handle.wait(max_wait_timeout: 15)
43
86
 
44
- # Wait for it to be delivered
45
- handle.wait(5)
87
+ # Join the producer thread.
88
+ producer.close
46
89
 
47
- # Callback should have been called
48
- expect(@callback_called).to be true
90
+ # Callback should have been called
91
+ expect(called_report.first).not_to be_nil
92
+ expect(called_report.first.partition).to eq 1
93
+ expect(called_report.first.offset).to be >= 0
94
+ end
95
+ end
96
+
97
+ it "should not accept a callback that's not callable" do
98
+ expect {
99
+ producer.delivery_callback = 'a string'
100
+ }.to raise_error(TypeError)
49
101
  end
50
102
  end
51
103
 
@@ -55,7 +107,7 @@ describe Rdkafka::Producer do
55
107
  payload: "payload",
56
108
  key: "key"
57
109
  )
58
- }.to raise_error ArgumentError, "missing keyword: topic"
110
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
59
111
  end
60
112
 
61
113
  it "should produce a message" do
@@ -70,7 +122,7 @@ describe Rdkafka::Producer do
70
122
  expect(handle.pending?).to be true
71
123
 
72
124
  # Check delivery handle and report
73
- report = handle.wait(5)
125
+ report = handle.wait(max_wait_timeout: 5)
74
126
  expect(handle.pending?).to be false
75
127
  expect(report).not_to be_nil
76
128
  expect(report.partition).to eq 1
@@ -82,14 +134,15 @@ describe Rdkafka::Producer do
82
134
  # Consume message and verify it's content
83
135
  message = wait_for_message(
84
136
  topic: "produce_test_topic",
85
- delivery_report: report
137
+ delivery_report: report,
138
+ consumer: consumer
86
139
  )
87
140
  expect(message.partition).to eq 1
88
141
  expect(message.payload).to eq "payload"
89
142
  expect(message.key).to eq "key"
90
143
  # Since api.version.request is on by default we will get
91
144
  # the message creation timestamp if it's not set.
92
- expect(message.timestamp).to be_within(5).of(Time.now)
145
+ expect(message.timestamp).to be_within(10).of(Time.now)
93
146
  end
94
147
 
95
148
  it "should produce a message with a specified partition" do
@@ -100,29 +153,65 @@ describe Rdkafka::Producer do
100
153
  key: "key partition",
101
154
  partition: 1
102
155
  )
103
- report = handle.wait(5)
156
+ report = handle.wait(max_wait_timeout: 5)
104
157
 
105
158
  # Consume message and verify it's content
106
159
  message = wait_for_message(
107
160
  topic: "produce_test_topic",
108
- delivery_report: report
161
+ delivery_report: report,
162
+ consumer: consumer
109
163
  )
110
164
  expect(message.partition).to eq 1
111
165
  expect(message.key).to eq "key partition"
112
166
  end
113
167
 
168
+ it "should produce a message to the same partition with a similar partition key" do
169
+ # Avoid partitioner collisions.
170
+ while true
171
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
172
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
173
+ partition_count = producer.partition_count('partitioner_test_topic')
174
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
175
+ end
176
+
177
+ # Produce a message with key, partition_key and key + partition_key
178
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
179
+
180
+ messages = messages.map do |m|
181
+ handle = producer.produce(
182
+ topic: "partitioner_test_topic",
183
+ payload: "payload partition",
184
+ key: m[:key],
185
+ partition_key: m[:partition_key]
186
+ )
187
+ report = handle.wait(max_wait_timeout: 5)
188
+
189
+ wait_for_message(
190
+ topic: "partitioner_test_topic",
191
+ delivery_report: report,
192
+ )
193
+ end
194
+
195
+ expect(messages[0].partition).not_to eq(messages[2].partition)
196
+ expect(messages[1].partition).to eq(messages[2].partition)
197
+ expect(messages[0].key).to eq key
198
+ expect(messages[1].key).to be_nil
199
+ expect(messages[2].key).to eq key
200
+ end
201
+
114
202
  it "should produce a message with utf-8 encoding" do
115
203
  handle = producer.produce(
116
204
  topic: "produce_test_topic",
117
205
  payload: "Τη γλώσσα μου έδωσαν ελληνική",
118
206
  key: "key utf8"
119
207
  )
120
- report = handle.wait(5)
208
+ report = handle.wait(max_wait_timeout: 5)
121
209
 
122
210
  # Consume message and verify it's content
123
211
  message = wait_for_message(
124
212
  topic: "produce_test_topic",
125
- delivery_report: report
213
+ delivery_report: report,
214
+ consumer: consumer
126
215
  )
127
216
 
128
217
  expect(message.partition).to eq 1
@@ -149,12 +238,13 @@ describe Rdkafka::Producer do
149
238
  key: "key timestamp",
150
239
  timestamp: 1505069646252
151
240
  )
152
- report = handle.wait(5)
241
+ report = handle.wait(max_wait_timeout: 5)
153
242
 
154
243
  # Consume message and verify it's content
155
244
  message = wait_for_message(
156
245
  topic: "produce_test_topic",
157
- delivery_report: report
246
+ delivery_report: report,
247
+ consumer: consumer
158
248
  )
159
249
 
160
250
  expect(message.partition).to eq 2
@@ -169,12 +259,13 @@ describe Rdkafka::Producer do
169
259
  key: "key timestamp",
170
260
  timestamp: Time.at(1505069646, 353_000)
171
261
  )
172
- report = handle.wait(5)
262
+ report = handle.wait(max_wait_timeout: 5)
173
263
 
174
264
  # Consume message and verify it's content
175
265
  message = wait_for_message(
176
266
  topic: "produce_test_topic",
177
- delivery_report: report
267
+ delivery_report: report,
268
+ consumer: consumer
178
269
  )
179
270
 
180
271
  expect(message.partition).to eq 2
@@ -188,12 +279,13 @@ describe Rdkafka::Producer do
188
279
  topic: "produce_test_topic",
189
280
  payload: "payload no key"
190
281
  )
191
- report = handle.wait(5)
282
+ report = handle.wait(max_wait_timeout: 5)
192
283
 
193
284
  # Consume message and verify it's content
194
285
  message = wait_for_message(
195
286
  topic: "produce_test_topic",
196
- delivery_report: report
287
+ delivery_report: report,
288
+ consumer: consumer
197
289
  )
198
290
 
199
291
  expect(message.key).to be_nil
@@ -205,12 +297,13 @@ describe Rdkafka::Producer do
205
297
  topic: "produce_test_topic",
206
298
  key: "key no payload"
207
299
  )
208
- report = handle.wait(5)
300
+ report = handle.wait(max_wait_timeout: 5)
209
301
 
210
302
  # Consume message and verify it's content
211
303
  message = wait_for_message(
212
304
  topic: "produce_test_topic",
213
- delivery_report: report
305
+ delivery_report: report,
306
+ consumer: consumer
214
307
  )
215
308
 
216
309
  expect(message.key).to eq "key no payload"
@@ -224,12 +317,13 @@ describe Rdkafka::Producer do
224
317
  key: "key headers",
225
318
  headers: { foo: :bar, baz: :foobar }
226
319
  )
227
- report = handle.wait(5)
320
+ report = handle.wait(max_wait_timeout: 5)
228
321
 
229
322
  # Consume message and verify it's content
230
323
  message = wait_for_message(
231
324
  topic: "produce_test_topic",
232
- delivery_report: report
325
+ delivery_report: report,
326
+ consumer: consumer
233
327
  )
234
328
 
235
329
  expect(message.payload).to eq "payload headers"
@@ -246,12 +340,13 @@ describe Rdkafka::Producer do
246
340
  key: "key headers",
247
341
  headers: {}
248
342
  )
249
- report = handle.wait(5)
343
+ report = handle.wait(max_wait_timeout: 5)
250
344
 
251
345
  # Consume message and verify it's content
252
346
  message = wait_for_message(
253
347
  topic: "produce_test_topic",
254
- delivery_report: report
348
+ delivery_report: report,
349
+ consumer: consumer
255
350
  )
256
351
 
257
352
  expect(message.payload).to eq "payload headers"
@@ -280,55 +375,58 @@ describe Rdkafka::Producer do
280
375
  end
281
376
  end
282
377
 
283
- # TODO this spec crashes if you create and use the producer before
284
- # forking like so:
285
- #
286
- # @producer = producer
287
- #
288
- # This will be added as part of https://github.com/appsignal/rdkafka-ruby/issues/19
289
- #it "should produce a message in a forked process" do
290
- # # Fork, produce a message, send the report of a pipe and
291
- # # wait for it in the main process.
292
-
293
- # reader, writer = IO.pipe
294
-
295
- # fork do
296
- # reader.close
297
-
298
- # handle = producer.produce(
299
- # topic: "produce_test_topic",
300
- # payload: "payload",
301
- # key: "key"
302
- # )
303
-
304
- # report = handle.wait(5)
305
- # producer.close
306
-
307
- # report_json = JSON.generate(
308
- # "partition" => report.partition,
309
- # "offset" => report.offset
310
- # )
311
-
312
- # writer.write(report_json)
313
- # end
314
-
315
- # writer.close
316
-
317
- # report_hash = JSON.parse(reader.read)
318
- # report = Rdkafka::Producer::DeliveryReport.new(
319
- # report_hash["partition"],
320
- # report_hash["offset"]
321
- # )
322
-
323
- # # Consume message and verify it's content
324
- # message = wait_for_message(
325
- # topic: "produce_test_topic",
326
- # delivery_report: report
327
- # )
328
- # expect(message.partition).to eq 1
329
- # expect(message.payload).to eq "payload"
330
- # expect(message.key).to eq "key"
331
- #end
378
+ it "should produce a message in a forked process" do
379
+ # Fork, produce a message, send the report over a pipe and
380
+ # wait for and check the message in the main process.
381
+
382
+ # Kernel#fork is not available in JRuby
383
+ skip if defined?(JRUBY_VERSION)
384
+
385
+ reader, writer = IO.pipe
386
+
387
+ fork do
388
+ reader.close
389
+
390
+ # Avoids sharing the socket between processes.
391
+ producer = rdkafka_config.producer
392
+
393
+ handle = producer.produce(
394
+ topic: "produce_test_topic",
395
+ payload: "payload-forked",
396
+ key: "key-forked"
397
+ )
398
+
399
+ report = handle.wait(max_wait_timeout: 5)
400
+
401
+ report_json = JSON.generate(
402
+ "partition" => report.partition,
403
+ "offset" => report.offset
404
+ )
405
+
406
+ writer.write(report_json)
407
+ writer.close
408
+ producer.close
409
+ end
410
+
411
+ writer.close
412
+ report_hash = JSON.parse(reader.read)
413
+ report = Rdkafka::Producer::DeliveryReport.new(
414
+ report_hash["partition"],
415
+ report_hash["offset"]
416
+ )
417
+
418
+ reader.close
419
+
420
+ # Consume message and verify it's content
421
+ message = wait_for_message(
422
+ topic: "produce_test_topic",
423
+ delivery_report: report,
424
+ consumer: consumer
425
+ )
426
+ expect(message.partition).to eq 0
427
+ expect(message.payload).to eq "payload-forked"
428
+ expect(message.key).to eq "key-forked"
429
+ end
332
430
 
333
431
  it "should raise an error when producing fails" do
334
432
  expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
@@ -348,10 +446,32 @@ describe Rdkafka::Producer do
348
446
  key: "key timeout"
349
447
  )
350
448
  expect {
351
- handle.wait(0)
449
+ handle.wait(max_wait_timeout: 0)
352
450
  }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
353
451
 
354
452
  # Waiting a second time should work
355
- handle.wait(5)
453
+ handle.wait(max_wait_timeout: 5)
454
+ end
455
+
456
+ context "methods that should not be called after a producer has been closed" do
457
+ before do
458
+ producer.close
459
+ end
460
+
461
+ # Affected methods and a non-invalid set of parameters for the method
462
+ {
463
+ :produce => { topic: nil },
464
+ :partition_count => nil,
465
+ }.each do |method, args|
466
+ it "raises an exception if #{method} is called" do
467
+ expect {
468
+ if args.is_a?(Hash)
469
+ producer.public_send(method, **args)
470
+ else
471
+ producer.public_send(method, args)
472
+ end
473
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
474
+ end
475
+ end
356
476
  end
357
477
  end