rdkafka 0.5.0 → 0.8.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +23 -0
  3. data/CHANGELOG.md +23 -0
  4. data/README.md +9 -9
  5. data/docker-compose.yml +17 -11
  6. data/ext/README.md +3 -15
  7. data/ext/Rakefile +23 -3
  8. data/lib/rdkafka.rb +8 -0
  9. data/lib/rdkafka/abstract_handle.rb +82 -0
  10. data/lib/rdkafka/admin.rb +144 -0
  11. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  12. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  13. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  14. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  15. data/lib/rdkafka/bindings.rb +63 -17
  16. data/lib/rdkafka/callbacks.rb +106 -0
  17. data/lib/rdkafka/config.rb +18 -7
  18. data/lib/rdkafka/consumer.rb +162 -46
  19. data/lib/rdkafka/consumer/headers.rb +7 -5
  20. data/lib/rdkafka/consumer/partition.rb +1 -1
  21. data/lib/rdkafka/consumer/topic_partition_list.rb +6 -16
  22. data/lib/rdkafka/error.rb +35 -4
  23. data/lib/rdkafka/metadata.rb +92 -0
  24. data/lib/rdkafka/producer.rb +43 -15
  25. data/lib/rdkafka/producer/delivery_handle.rb +7 -49
  26. data/lib/rdkafka/producer/delivery_report.rb +7 -2
  27. data/lib/rdkafka/version.rb +3 -3
  28. data/rdkafka.gemspec +3 -3
  29. data/spec/rdkafka/abstract_handle_spec.rb +114 -0
  30. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  31. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  32. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  33. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  34. data/spec/rdkafka/admin_spec.rb +192 -0
  35. data/spec/rdkafka/bindings_spec.rb +20 -2
  36. data/spec/rdkafka/callbacks_spec.rb +20 -0
  37. data/spec/rdkafka/config_spec.rb +17 -2
  38. data/spec/rdkafka/consumer/message_spec.rb +6 -1
  39. data/spec/rdkafka/consumer_spec.rb +145 -19
  40. data/spec/rdkafka/error_spec.rb +7 -3
  41. data/spec/rdkafka/metadata_spec.rb +78 -0
  42. data/spec/rdkafka/producer/delivery_handle_spec.rb +3 -43
  43. data/spec/rdkafka/producer/delivery_report_spec.rb +5 -1
  44. data/spec/rdkafka/producer_spec.rb +147 -72
  45. data/spec/spec_helper.rb +34 -6
  46. metadata +34 -10
  47. data/.travis.yml +0 -34
@@ -12,42 +12,13 @@ describe Rdkafka::Producer::DeliveryHandle do
12
12
  end
13
13
  end
14
14
 
15
- describe ".register and .remove" do
16
- let(:pending_handle) { true }
17
-
18
- it "should register and remove a delivery handle" do
19
- Rdkafka::Producer::DeliveryHandle.register(subject.to_ptr.address, subject)
20
- removed = Rdkafka::Producer::DeliveryHandle.remove(subject.to_ptr.address)
21
- expect(removed).to eq subject
22
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
23
- end
24
- end
25
-
26
- describe "#pending?" do
27
- context "when true" do
28
- let(:pending_handle) { true }
29
-
30
- it "should be true" do
31
- expect(subject.pending?).to be true
32
- end
33
- end
34
-
35
- context "when not true" do
36
- let(:pending_handle) { false }
37
-
38
- it "should be false" do
39
- expect(subject.pending?).to be false
40
- end
41
- end
42
- end
43
-
44
15
  describe "#wait" do
45
16
  let(:pending_handle) { true }
46
17
 
47
18
  it "should wait until the timeout and then raise an error" do
48
19
  expect {
49
- subject.wait(0.1)
50
- }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
20
+ subject.wait(max_wait_timeout: 0.1)
21
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError, /delivery/
51
22
  end
52
23
 
53
24
  context "when not pending anymore and no error" do
@@ -61,22 +32,11 @@ describe Rdkafka::Producer::DeliveryHandle do
61
32
  end
62
33
 
63
34
  it "should wait without a timeout" do
64
- report = subject.wait(nil)
35
+ report = subject.wait(max_wait_timeout: nil)
65
36
 
66
37
  expect(report.partition).to eq(2)
67
38
  expect(report.offset).to eq(100)
68
39
  end
69
40
  end
70
-
71
- context "when not pending anymore and there was an error" do
72
- let(:pending_handle) { false }
73
- let(:response) { 20 }
74
-
75
- it "should raise an rdkafka error" do
76
- expect {
77
- subject.wait
78
- }.to raise_error Rdkafka::RdkafkaError
79
- end
80
- end
81
41
  end
82
42
  end
@@ -1,7 +1,7 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100) }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -10,4 +10,8 @@ describe Rdkafka::Producer::DeliveryReport do
10
10
  it "should get the offset" do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
+
14
+ it "should get the error" do
15
+ expect(subject.error).to eq "error"
16
+ end
13
17
  end
@@ -2,10 +2,13 @@ require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer do
4
4
  let(:producer) { rdkafka_config.producer }
5
+ let(:consumer) { rdkafka_config.consumer }
5
6
 
6
7
  after do
7
8
  # Registry should always end up being empty
8
9
  expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
10
+ producer.close
11
+ consumer.close
9
12
  end
10
13
 
11
14
  context "delivery callback" do
@@ -27,6 +30,7 @@ describe Rdkafka::Producer do
27
30
  it "should call the callback when a message is delivered" do
28
31
  @callback_called = false
29
32
 
33
+
30
34
  producer.delivery_callback = lambda do |report|
31
35
  expect(report).not_to be_nil
32
36
  expect(report.partition).to eq 1
@@ -42,7 +46,10 @@ describe Rdkafka::Producer do
42
46
  )
43
47
 
44
48
  # Wait for it to be delivered
45
- handle.wait(5)
49
+ handle.wait(max_wait_timeout: 15)
50
+
51
+ # Join the producer thread.
52
+ producer.close
46
53
 
47
54
  # Callback should have been called
48
55
  expect(@callback_called).to be true
@@ -55,7 +62,7 @@ describe Rdkafka::Producer do
55
62
  payload: "payload",
56
63
  key: "key"
57
64
  )
58
- }.to raise_error ArgumentError, "missing keyword: topic"
65
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
59
66
  end
60
67
 
61
68
  it "should produce a message" do
@@ -70,7 +77,7 @@ describe Rdkafka::Producer do
70
77
  expect(handle.pending?).to be true
71
78
 
72
79
  # Check delivery handle and report
73
- report = handle.wait(5)
80
+ report = handle.wait(max_wait_timeout: 5)
74
81
  expect(handle.pending?).to be false
75
82
  expect(report).not_to be_nil
76
83
  expect(report.partition).to eq 1
@@ -82,14 +89,15 @@ describe Rdkafka::Producer do
82
89
  # Consume message and verify it's content
83
90
  message = wait_for_message(
84
91
  topic: "produce_test_topic",
85
- delivery_report: report
92
+ delivery_report: report,
93
+ consumer: consumer
86
94
  )
87
95
  expect(message.partition).to eq 1
88
96
  expect(message.payload).to eq "payload"
89
97
  expect(message.key).to eq "key"
90
98
  # Since api.version.request is on by default we will get
91
99
  # the message creation timestamp if it's not set.
92
- expect(message.timestamp).to be_within(5).of(Time.now)
100
+ expect(message.timestamp).to be_within(10).of(Time.now)
93
101
  end
94
102
 
95
103
  it "should produce a message with a specified partition" do
@@ -100,29 +108,65 @@ describe Rdkafka::Producer do
100
108
  key: "key partition",
101
109
  partition: 1
102
110
  )
103
- report = handle.wait(5)
111
+ report = handle.wait(max_wait_timeout: 5)
104
112
 
105
113
  # Consume message and verify it's content
106
114
  message = wait_for_message(
107
115
  topic: "produce_test_topic",
108
- delivery_report: report
116
+ delivery_report: report,
117
+ consumer: consumer
109
118
  )
110
119
  expect(message.partition).to eq 1
111
120
  expect(message.key).to eq "key partition"
112
121
  end
113
122
 
123
+ it "should produce a message to the same partition with a similar partition key" do
124
+ # Avoid partitioner collisions.
125
+ while true
126
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
127
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
128
+ partition_count = producer.partition_count('partitioner_test_topic')
129
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
130
+ end
131
+
132
+ # Produce a message with key, partition_key and key + partition_key
133
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
134
+
135
+ messages = messages.map do |m|
136
+ handle = producer.produce(
137
+ topic: "partitioner_test_topic",
138
+ payload: "payload partition",
139
+ key: m[:key],
140
+ partition_key: m[:partition_key]
141
+ )
142
+ report = handle.wait(max_wait_timeout: 5)
143
+
144
+ wait_for_message(
145
+ topic: "partitioner_test_topic",
146
+ delivery_report: report,
147
+ )
148
+ end
149
+
150
+ expect(messages[0].partition).not_to eq(messages[2].partition)
151
+ expect(messages[1].partition).to eq(messages[2].partition)
152
+ expect(messages[0].key).to eq key
153
+ expect(messages[1].key).to be_nil
154
+ expect(messages[2].key).to eq key
155
+ end
156
+
114
157
  it "should produce a message with utf-8 encoding" do
115
158
  handle = producer.produce(
116
159
  topic: "produce_test_topic",
117
160
  payload: "Τη γλώσσα μου έδωσαν ελληνική",
118
161
  key: "key utf8"
119
162
  )
120
- report = handle.wait(5)
163
+ report = handle.wait(max_wait_timeout: 5)
121
164
 
122
165
  # Consume message and verify it's content
123
166
  message = wait_for_message(
124
167
  topic: "produce_test_topic",
125
- delivery_report: report
168
+ delivery_report: report,
169
+ consumer: consumer
126
170
  )
127
171
 
128
172
  expect(message.partition).to eq 1
@@ -149,12 +193,13 @@ describe Rdkafka::Producer do
149
193
  key: "key timestamp",
150
194
  timestamp: 1505069646252
151
195
  )
152
- report = handle.wait(5)
196
+ report = handle.wait(max_wait_timeout: 5)
153
197
 
154
198
  # Consume message and verify it's content
155
199
  message = wait_for_message(
156
200
  topic: "produce_test_topic",
157
- delivery_report: report
201
+ delivery_report: report,
202
+ consumer: consumer
158
203
  )
159
204
 
160
205
  expect(message.partition).to eq 2
@@ -169,12 +214,13 @@ describe Rdkafka::Producer do
169
214
  key: "key timestamp",
170
215
  timestamp: Time.at(1505069646, 353_000)
171
216
  )
172
- report = handle.wait(5)
217
+ report = handle.wait(max_wait_timeout: 5)
173
218
 
174
219
  # Consume message and verify it's content
175
220
  message = wait_for_message(
176
221
  topic: "produce_test_topic",
177
- delivery_report: report
222
+ delivery_report: report,
223
+ consumer: consumer
178
224
  )
179
225
 
180
226
  expect(message.partition).to eq 2
@@ -188,12 +234,13 @@ describe Rdkafka::Producer do
188
234
  topic: "produce_test_topic",
189
235
  payload: "payload no key"
190
236
  )
191
- report = handle.wait(5)
237
+ report = handle.wait(max_wait_timeout: 5)
192
238
 
193
239
  # Consume message and verify it's content
194
240
  message = wait_for_message(
195
241
  topic: "produce_test_topic",
196
- delivery_report: report
242
+ delivery_report: report,
243
+ consumer: consumer
197
244
  )
198
245
 
199
246
  expect(message.key).to be_nil
@@ -205,12 +252,13 @@ describe Rdkafka::Producer do
205
252
  topic: "produce_test_topic",
206
253
  key: "key no payload"
207
254
  )
208
- report = handle.wait(5)
255
+ report = handle.wait(max_wait_timeout: 5)
209
256
 
210
257
  # Consume message and verify it's content
211
258
  message = wait_for_message(
212
259
  topic: "produce_test_topic",
213
- delivery_report: report
260
+ delivery_report: report,
261
+ consumer: consumer
214
262
  )
215
263
 
216
264
  expect(message.key).to eq "key no payload"
@@ -224,12 +272,13 @@ describe Rdkafka::Producer do
224
272
  key: "key headers",
225
273
  headers: { foo: :bar, baz: :foobar }
226
274
  )
227
- report = handle.wait(5)
275
+ report = handle.wait(max_wait_timeout: 5)
228
276
 
229
277
  # Consume message and verify it's content
230
278
  message = wait_for_message(
231
279
  topic: "produce_test_topic",
232
- delivery_report: report
280
+ delivery_report: report,
281
+ consumer: consumer
233
282
  )
234
283
 
235
284
  expect(message.payload).to eq "payload headers"
@@ -246,12 +295,13 @@ describe Rdkafka::Producer do
246
295
  key: "key headers",
247
296
  headers: {}
248
297
  )
249
- report = handle.wait(5)
298
+ report = handle.wait(max_wait_timeout: 5)
250
299
 
251
300
  # Consume message and verify it's content
252
301
  message = wait_for_message(
253
302
  topic: "produce_test_topic",
254
- delivery_report: report
303
+ delivery_report: report,
304
+ consumer: consumer
255
305
  )
256
306
 
257
307
  expect(message.payload).to eq "payload headers"
@@ -280,55 +330,58 @@ describe Rdkafka::Producer do
280
330
  end
281
331
  end
282
332
 
283
- # TODO this spec crashes if you create and use the producer before
284
- # forking like so:
285
- #
286
- # @producer = producer
287
- #
288
- # This will be added as part of https://github.com/appsignal/rdkafka-ruby/issues/19
289
- #it "should produce a message in a forked process" do
290
- # # Fork, produce a message, send the report of a pipe and
291
- # # wait for it in the main process.
292
-
293
- # reader, writer = IO.pipe
294
-
295
- # fork do
296
- # reader.close
297
-
298
- # handle = producer.produce(
299
- # topic: "produce_test_topic",
300
- # payload: "payload",
301
- # key: "key"
302
- # )
303
-
304
- # report = handle.wait(5)
305
- # producer.close
306
-
307
- # report_json = JSON.generate(
308
- # "partition" => report.partition,
309
- # "offset" => report.offset
310
- # )
311
-
312
- # writer.write(report_json)
313
- # end
314
-
315
- # writer.close
316
-
317
- # report_hash = JSON.parse(reader.read)
318
- # report = Rdkafka::Producer::DeliveryReport.new(
319
- # report_hash["partition"],
320
- # report_hash["offset"]
321
- # )
322
-
323
- # # Consume message and verify it's content
324
- # message = wait_for_message(
325
- # topic: "produce_test_topic",
326
- # delivery_report: report
327
- # )
328
- # expect(message.partition).to eq 1
329
- # expect(message.payload).to eq "payload"
330
- # expect(message.key).to eq "key"
331
- #end
333
+ it "should produce a message in a forked process" do
334
+ # Fork, produce a message, send the report over a pipe and
335
+ # wait for and check the message in the main process.
336
+
337
+ # Kernel#fork is not available in JRuby
338
+ skip if defined?(JRUBY_VERSION)
339
+
340
+ reader, writer = IO.pipe
341
+
342
+ fork do
343
+ reader.close
344
+
345
+ # Avoids sharing the socket between processes.
346
+ producer = rdkafka_config.producer
347
+
348
+ handle = producer.produce(
349
+ topic: "produce_test_topic",
350
+ payload: "payload-forked",
351
+ key: "key-forked"
352
+ )
353
+
354
+ report = handle.wait(max_wait_timeout: 5)
355
+
356
+ report_json = JSON.generate(
357
+ "partition" => report.partition,
358
+ "offset" => report.offset
359
+ )
360
+
361
+ writer.write(report_json)
362
+ writer.close
363
+ producer.close
364
+ end
365
+
366
+ writer.close
367
+ report_hash = JSON.parse(reader.read)
368
+ report = Rdkafka::Producer::DeliveryReport.new(
369
+ report_hash["partition"],
370
+ report_hash["offset"]
371
+ )
372
+
373
+ reader.close
374
+
375
+ # Consume message and verify it's content
376
+ message = wait_for_message(
377
+ topic: "produce_test_topic",
378
+ delivery_report: report,
379
+ consumer: consumer
380
+ )
381
+ expect(message.partition).to eq 0
382
+ expect(message.payload).to eq "payload-forked"
383
+ expect(message.key).to eq "key-forked"
384
+ end
332
385
 
333
386
  it "should raise an error when producing fails" do
334
387
  expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
@@ -348,10 +401,32 @@ describe Rdkafka::Producer do
348
401
  key: "key timeout"
349
402
  )
350
403
  expect {
351
- handle.wait(0)
404
+ handle.wait(max_wait_timeout: 0)
352
405
  }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
353
406
 
354
407
  # Waiting a second time should work
355
- handle.wait(5)
408
+ handle.wait(max_wait_timeout: 5)
409
+ end
410
+
411
+ context "methods that should not be called after a producer has been closed" do
412
+ before do
413
+ producer.close
414
+ end
415
+
416
+ # Affected methods and a non-invalid set of parameters for the method
417
+ {
418
+ :produce => { topic: nil },
419
+ :partition_count => nil,
420
+ }.each do |method, args|
421
+ it "raises an exception if #{method} is called" do
422
+ expect {
423
+ if args.is_a?(Hash)
424
+ producer.public_send(method, **args)
425
+ else
426
+ producer.public_send(method, args)
427
+ end
428
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
429
+ end
430
+ end
356
431
  end
357
432
  end