karafka-rdkafka 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,525 @@
1
+ require "spec_helper"
2
+ require "zlib"
3
+
4
+ describe Rdkafka::Producer do
5
+ let(:producer) { rdkafka_producer_config.producer }
6
+ let(:consumer) { rdkafka_consumer_config.consumer }
7
+
8
+ after do
9
+ # Registry should always end up being empty
10
+ expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
11
+ producer.close
12
+ consumer.close
13
+ end
14
+
15
+ context "delivery callback" do
16
+ context "with a proc/lambda" do
17
+ it "should set the callback" do
18
+ expect {
19
+ producer.delivery_callback = lambda do |delivery_handle|
20
+ puts delivery_handle
21
+ end
22
+ }.not_to raise_error
23
+ expect(producer.delivery_callback).to respond_to :call
24
+ end
25
+
26
+ it "should call the callback when a message is delivered" do
27
+ @callback_called = false
28
+
29
+ producer.delivery_callback = lambda do |report|
30
+ expect(report).not_to be_nil
31
+ expect(report.partition).to eq 1
32
+ expect(report.offset).to be >= 0
33
+ @callback_called = true
34
+ end
35
+
36
+ # Produce a message
37
+ handle = producer.produce(
38
+ topic: "produce_test_topic",
39
+ payload: "payload",
40
+ key: "key"
41
+ )
42
+
43
+ # Wait for it to be delivered
44
+ handle.wait(max_wait_timeout: 15)
45
+
46
+ # Join the producer thread.
47
+ producer.close
48
+
49
+ # Callback should have been called
50
+ expect(@callback_called).to be true
51
+ end
52
+
53
+ it "should provide handle" do
54
+ @callback_handle = nil
55
+
56
+ producer.delivery_callback = lambda { |_, handle| @callback_handle = handle }
57
+
58
+ # Produce a message
59
+ handle = producer.produce(
60
+ topic: "produce_test_topic",
61
+ payload: "payload",
62
+ key: "key"
63
+ )
64
+
65
+ # Wait for it to be delivered
66
+ handle.wait(max_wait_timeout: 15)
67
+
68
+ # Join the producer thread.
69
+ producer.close
70
+
71
+ expect(handle).to be @callback_handle
72
+ end
73
+ end
74
+
75
+ context "with a callable object" do
76
+ it "should set the callback" do
77
+ callback = Class.new do
78
+ def call(stats); end
79
+ end
80
+ expect {
81
+ producer.delivery_callback = callback.new
82
+ }.not_to raise_error
83
+ expect(producer.delivery_callback).to respond_to :call
84
+ end
85
+
86
+ it "should call the callback when a message is delivered" do
87
+ called_report = []
88
+ callback = Class.new do
89
+ def initialize(called_report)
90
+ @called_report = called_report
91
+ end
92
+
93
+ def call(report)
94
+ @called_report << report
95
+ end
96
+ end
97
+ producer.delivery_callback = callback.new(called_report)
98
+
99
+ # Produce a message
100
+ handle = producer.produce(
101
+ topic: "produce_test_topic",
102
+ payload: "payload",
103
+ key: "key"
104
+ )
105
+
106
+ # Wait for it to be delivered
107
+ handle.wait(max_wait_timeout: 15)
108
+
109
+ # Join the producer thread.
110
+ producer.close
111
+
112
+ # Callback should have been called
113
+ expect(called_report.first).not_to be_nil
114
+ expect(called_report.first.partition).to eq 1
115
+ expect(called_report.first.offset).to be >= 0
116
+ end
117
+
118
+ it "should provide handle" do
119
+ callback_handles = []
120
+ callback = Class.new do
121
+ def initialize(callback_handles)
122
+ @callback_handles = callback_handles
123
+ end
124
+
125
+ def call(_, handle)
126
+ @callback_handles << handle
127
+ end
128
+ end
129
+ producer.delivery_callback = callback.new(callback_handles)
130
+
131
+ # Produce a message
132
+ handle = producer.produce(
133
+ topic: "produce_test_topic",
134
+ payload: "payload",
135
+ key: "key"
136
+ )
137
+
138
+ # Wait for it to be delivered
139
+ handle.wait(max_wait_timeout: 15)
140
+
141
+ # Join the producer thread.
142
+ producer.close
143
+
144
+ # Callback should have been called
145
+ expect(handle).to be callback_handles.first
146
+ end
147
+ end
148
+
149
+ it "should not accept a callback that's not callable" do
150
+ expect {
151
+ producer.delivery_callback = 'a string'
152
+ }.to raise_error(TypeError)
153
+ end
154
+ end
155
+
156
+ it "should require a topic" do
157
+ expect {
158
+ producer.produce(
159
+ payload: "payload",
160
+ key: "key"
161
+ )
162
+ }.to raise_error ArgumentError, /missing keyword: [\:]?topic/
163
+ end
164
+
165
+ it "should produce a message" do
166
+ # Produce a message
167
+ handle = producer.produce(
168
+ topic: "produce_test_topic",
169
+ payload: "payload",
170
+ key: "key"
171
+ )
172
+
173
+ # Should be pending at first
174
+ expect(handle.pending?).to be true
175
+
176
+ # Check delivery handle and report
177
+ report = handle.wait(max_wait_timeout: 5)
178
+ expect(handle.pending?).to be false
179
+ expect(report).not_to be_nil
180
+ expect(report.partition).to eq 1
181
+ expect(report.offset).to be >= 0
182
+
183
+ # Close producer
184
+ producer.close
185
+
186
+ # Consume message and verify it's content
187
+ message = wait_for_message(
188
+ topic: "produce_test_topic",
189
+ delivery_report: report,
190
+ consumer: consumer
191
+ )
192
+ expect(message.partition).to eq 1
193
+ expect(message.payload).to eq "payload"
194
+ expect(message.key).to eq "key"
195
+ # Since api.version.request is on by default we will get
196
+ # the message creation timestamp if it's not set.
197
+ expect(message.timestamp).to be_within(10).of(Time.now)
198
+ end
199
+
200
+ it "should produce a message with a specified partition" do
201
+ # Produce a message
202
+ handle = producer.produce(
203
+ topic: "produce_test_topic",
204
+ payload: "payload partition",
205
+ key: "key partition",
206
+ partition: 1
207
+ )
208
+ report = handle.wait(max_wait_timeout: 5)
209
+
210
+ # Consume message and verify it's content
211
+ message = wait_for_message(
212
+ topic: "produce_test_topic",
213
+ delivery_report: report,
214
+ consumer: consumer
215
+ )
216
+ expect(message.partition).to eq 1
217
+ expect(message.key).to eq "key partition"
218
+ end
219
+
220
+ it "should produce a message to the same partition with a similar partition key" do
221
+ # Avoid partitioner collisions.
222
+ while true
223
+ key = ('a'..'z').to_a.shuffle.take(10).join('')
224
+ partition_key = ('a'..'z').to_a.shuffle.take(10).join('')
225
+ partition_count = producer.partition_count('partitioner_test_topic')
226
+ break if (Zlib.crc32(key) % partition_count) != (Zlib.crc32(partition_key) % partition_count)
227
+ end
228
+
229
+ # Produce a message with key, partition_key and key + partition_key
230
+ messages = [{key: key}, {partition_key: partition_key}, {key: key, partition_key: partition_key}]
231
+
232
+ messages = messages.map do |m|
233
+ handle = producer.produce(
234
+ topic: "partitioner_test_topic",
235
+ payload: "payload partition",
236
+ key: m[:key],
237
+ partition_key: m[:partition_key]
238
+ )
239
+ report = handle.wait(max_wait_timeout: 5)
240
+
241
+ wait_for_message(
242
+ topic: "partitioner_test_topic",
243
+ delivery_report: report,
244
+ )
245
+ end
246
+
247
+ expect(messages[0].partition).not_to eq(messages[2].partition)
248
+ expect(messages[1].partition).to eq(messages[2].partition)
249
+ expect(messages[0].key).to eq key
250
+ expect(messages[1].key).to be_nil
251
+ expect(messages[2].key).to eq key
252
+ end
253
+
254
+ it "should produce a message with utf-8 encoding" do
255
+ handle = producer.produce(
256
+ topic: "produce_test_topic",
257
+ payload: "Τη γλώσσα μου έδωσαν ελληνική",
258
+ key: "key utf8"
259
+ )
260
+ report = handle.wait(max_wait_timeout: 5)
261
+
262
+ # Consume message and verify it's content
263
+ message = wait_for_message(
264
+ topic: "produce_test_topic",
265
+ delivery_report: report,
266
+ consumer: consumer
267
+ )
268
+
269
+ expect(message.partition).to eq 1
270
+ expect(message.payload.force_encoding("utf-8")).to eq "Τη γλώσσα μου έδωσαν ελληνική"
271
+ expect(message.key).to eq "key utf8"
272
+ end
273
+
274
+ context "timestamp" do
275
+ it "should raise a type error if not nil, integer or time" do
276
+ expect {
277
+ producer.produce(
278
+ topic: "produce_test_topic",
279
+ payload: "payload timestamp",
280
+ key: "key timestamp",
281
+ timestamp: "10101010"
282
+ )
283
+ }.to raise_error TypeError
284
+ end
285
+
286
+ it "should produce a message with an integer timestamp" do
287
+ handle = producer.produce(
288
+ topic: "produce_test_topic",
289
+ payload: "payload timestamp",
290
+ key: "key timestamp",
291
+ timestamp: 1505069646252
292
+ )
293
+ report = handle.wait(max_wait_timeout: 5)
294
+
295
+ # Consume message and verify it's content
296
+ message = wait_for_message(
297
+ topic: "produce_test_topic",
298
+ delivery_report: report,
299
+ consumer: consumer
300
+ )
301
+
302
+ expect(message.partition).to eq 2
303
+ expect(message.key).to eq "key timestamp"
304
+ expect(message.timestamp).to eq Time.at(1505069646, 252_000)
305
+ end
306
+
307
+ it "should produce a message with a time timestamp" do
308
+ handle = producer.produce(
309
+ topic: "produce_test_topic",
310
+ payload: "payload timestamp",
311
+ key: "key timestamp",
312
+ timestamp: Time.at(1505069646, 353_000)
313
+ )
314
+ report = handle.wait(max_wait_timeout: 5)
315
+
316
+ # Consume message and verify it's content
317
+ message = wait_for_message(
318
+ topic: "produce_test_topic",
319
+ delivery_report: report,
320
+ consumer: consumer
321
+ )
322
+
323
+ expect(message.partition).to eq 2
324
+ expect(message.key).to eq "key timestamp"
325
+ expect(message.timestamp).to eq Time.at(1505069646, 353_000)
326
+ end
327
+ end
328
+
329
+ it "should produce a message with nil key" do
330
+ handle = producer.produce(
331
+ topic: "produce_test_topic",
332
+ payload: "payload no key"
333
+ )
334
+ report = handle.wait(max_wait_timeout: 5)
335
+
336
+ # Consume message and verify it's content
337
+ message = wait_for_message(
338
+ topic: "produce_test_topic",
339
+ delivery_report: report,
340
+ consumer: consumer
341
+ )
342
+
343
+ expect(message.key).to be_nil
344
+ expect(message.payload).to eq "payload no key"
345
+ end
346
+
347
+ it "should produce a message with nil payload" do
348
+ handle = producer.produce(
349
+ topic: "produce_test_topic",
350
+ key: "key no payload"
351
+ )
352
+ report = handle.wait(max_wait_timeout: 5)
353
+
354
+ # Consume message and verify it's content
355
+ message = wait_for_message(
356
+ topic: "produce_test_topic",
357
+ delivery_report: report,
358
+ consumer: consumer
359
+ )
360
+
361
+ expect(message.key).to eq "key no payload"
362
+ expect(message.payload).to be_nil
363
+ end
364
+
365
+ it "should produce a message with headers" do
366
+ handle = producer.produce(
367
+ topic: "produce_test_topic",
368
+ payload: "payload headers",
369
+ key: "key headers",
370
+ headers: { foo: :bar, baz: :foobar }
371
+ )
372
+ report = handle.wait(max_wait_timeout: 5)
373
+
374
+ # Consume message and verify it's content
375
+ message = wait_for_message(
376
+ topic: "produce_test_topic",
377
+ delivery_report: report,
378
+ consumer: consumer
379
+ )
380
+
381
+ expect(message.payload).to eq "payload headers"
382
+ expect(message.key).to eq "key headers"
383
+ expect(message.headers[:foo]).to eq "bar"
384
+ expect(message.headers[:baz]).to eq "foobar"
385
+ expect(message.headers[:foobar]).to be_nil
386
+ end
387
+
388
+ it "should produce a message with empty headers" do
389
+ handle = producer.produce(
390
+ topic: "produce_test_topic",
391
+ payload: "payload headers",
392
+ key: "key headers",
393
+ headers: {}
394
+ )
395
+ report = handle.wait(max_wait_timeout: 5)
396
+
397
+ # Consume message and verify it's content
398
+ message = wait_for_message(
399
+ topic: "produce_test_topic",
400
+ delivery_report: report,
401
+ consumer: consumer
402
+ )
403
+
404
+ expect(message.payload).to eq "payload headers"
405
+ expect(message.key).to eq "key headers"
406
+ expect(message.headers).to be_empty
407
+ end
408
+
409
+ it "should produce message that aren't waited for and not crash" do
410
+ 5.times do
411
+ 200.times do
412
+ producer.produce(
413
+ topic: "produce_test_topic",
414
+ payload: "payload not waiting",
415
+ key: "key not waiting"
416
+ )
417
+ end
418
+
419
+ # Allow some time for a GC run
420
+ sleep 1
421
+ end
422
+
423
+ # Wait for the delivery notifications
424
+ 10.times do
425
+ break if Rdkafka::Producer::DeliveryHandle::REGISTRY.empty?
426
+ sleep 1
427
+ end
428
+ end
429
+
430
+ it "should produce a message in a forked process", skip: defined?(JRUBY_VERSION) && "Kernel#fork is not available" do
431
+ # Fork, produce a message, send the report over a pipe and
432
+ # wait for and check the message in the main process.
433
+ reader, writer = IO.pipe
434
+
435
+ fork do
436
+ reader.close
437
+
438
+ # Avoids sharing the socket between processes.
439
+ producer = rdkafka_producer_config.producer
440
+
441
+ handle = producer.produce(
442
+ topic: "produce_test_topic",
443
+ payload: "payload-forked",
444
+ key: "key-forked"
445
+ )
446
+
447
+ report = handle.wait(max_wait_timeout: 5)
448
+
449
+ report_json = JSON.generate(
450
+ "partition" => report.partition,
451
+ "offset" => report.offset
452
+ )
453
+
454
+ writer.write(report_json)
455
+ writer.close
456
+ producer.close
457
+ end
458
+
459
+ writer.close
460
+ report_hash = JSON.parse(reader.read)
461
+ report = Rdkafka::Producer::DeliveryReport.new(
462
+ report_hash["partition"],
463
+ report_hash["offset"]
464
+ )
465
+
466
+ reader.close
467
+
468
+ # Consume message and verify it's content
469
+ message = wait_for_message(
470
+ topic: "produce_test_topic",
471
+ delivery_report: report,
472
+ consumer: consumer
473
+ )
474
+ expect(message.partition).to eq 0
475
+ expect(message.payload).to eq "payload-forked"
476
+ expect(message.key).to eq "key-forked"
477
+ end
478
+
479
+ it "should raise an error when producing fails" do
480
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_producev).and_return(20)
481
+
482
+ expect {
483
+ producer.produce(
484
+ topic: "produce_test_topic",
485
+ key: "key error"
486
+ )
487
+ }.to raise_error Rdkafka::RdkafkaError
488
+ end
489
+
490
+ it "should raise a timeout error when waiting too long" do
491
+ handle = producer.produce(
492
+ topic: "produce_test_topic",
493
+ payload: "payload timeout",
494
+ key: "key timeout"
495
+ )
496
+ expect {
497
+ handle.wait(max_wait_timeout: 0)
498
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
499
+
500
+ # Waiting a second time should work
501
+ handle.wait(max_wait_timeout: 5)
502
+ end
503
+
504
+ context "methods that should not be called after a producer has been closed" do
505
+ before do
506
+ producer.close
507
+ end
508
+
509
+ # Affected methods and a non-invalid set of parameters for the method
510
+ {
511
+ :produce => { topic: nil },
512
+ :partition_count => nil,
513
+ }.each do |method, args|
514
+ it "raises an exception if #{method} is called" do
515
+ expect {
516
+ if args.is_a?(Hash)
517
+ producer.public_send(method, **args)
518
+ else
519
+ producer.public_send(method, args)
520
+ end
521
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
522
+ end
523
+ end
524
+ end
525
+ end
@@ -0,0 +1,139 @@
1
+ unless ENV["CI"] == "true"
2
+ require "simplecov"
3
+ SimpleCov.start do
4
+ add_filter "/spec/"
5
+ end
6
+ end
7
+
8
+ require "pry"
9
+ require "rspec"
10
+ require "rdkafka"
11
+ require "timeout"
12
+
13
+ def rdkafka_base_config
14
+ {
15
+ :"api.version.request" => false,
16
+ :"broker.version.fallback" => "1.0",
17
+ :"bootstrap.servers" => "localhost:9092",
18
+ }
19
+ end
20
+
21
+ def rdkafka_config(config_overrides={})
22
+ # Generate the base config
23
+ config = rdkafka_base_config
24
+ # Merge overrides
25
+ config.merge!(config_overrides)
26
+ # Return it
27
+ Rdkafka::Config.new(config)
28
+ end
29
+
30
+ def rdkafka_consumer_config(config_overrides={})
31
+ # Generate the base config
32
+ config = rdkafka_base_config
33
+ # Add consumer specific fields to it
34
+ config[:"auto.offset.reset"] = "earliest"
35
+ config[:"enable.partition.eof"] = false
36
+ config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
37
+ # Enable debug mode if required
38
+ if ENV["DEBUG_CONSUMER"]
39
+ config[:debug] = "cgrp,topic,fetch"
40
+ end
41
+ # Merge overrides
42
+ config.merge!(config_overrides)
43
+ # Return it
44
+ Rdkafka::Config.new(config)
45
+ end
46
+
47
+ def rdkafka_producer_config(config_overrides={})
48
+ # Generate the base config
49
+ config = rdkafka_base_config
50
+ # Enable debug mode if required
51
+ if ENV["DEBUG_PRODUCER"]
52
+ config[:debug] = "broker,topic,msg"
53
+ end
54
+ # Merge overrides
55
+ config.merge!(config_overrides)
56
+ # Return it
57
+ Rdkafka::Config.new(config)
58
+ end
59
+
60
+ def new_native_client
61
+ config = rdkafka_consumer_config
62
+ config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
63
+ end
64
+
65
+ def new_native_topic(topic_name="topic_name", native_client: )
66
+ Rdkafka::Bindings.rd_kafka_topic_new(
67
+ native_client,
68
+ topic_name,
69
+ nil
70
+ )
71
+ end
72
+
73
+ def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
+ new_consumer = !!consumer
75
+ consumer ||= rdkafka_consumer_config.consumer
76
+ consumer.subscribe(topic)
77
+ timeout = Time.now.to_i + timeout_in_seconds
78
+ loop do
79
+ if timeout <= Time.now.to_i
80
+ raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
81
+ end
82
+ message = consumer.poll(100)
83
+ if message &&
84
+ message.partition == delivery_report.partition &&
85
+ message.offset == delivery_report.offset
86
+ return message
87
+ end
88
+ end
89
+ ensure
90
+ consumer.close if new_consumer
91
+ end
92
+
93
+ def wait_for_assignment(consumer)
94
+ 10.times do
95
+ break if !consumer.assignment.empty?
96
+ sleep 1
97
+ end
98
+ end
99
+
100
+ def wait_for_unassignment(consumer)
101
+ 10.times do
102
+ break if consumer.assignment.empty?
103
+ sleep 1
104
+ end
105
+ end
106
+
107
+ RSpec.configure do |config|
108
+ config.filter_run focus: true
109
+ config.run_all_when_everything_filtered = true
110
+
111
+ config.before(:suite) do
112
+ admin = rdkafka_config.admin
113
+ {
114
+ consume_test_topic: 3,
115
+ empty_test_topic: 3,
116
+ load_test_topic: 3,
117
+ produce_test_topic: 3,
118
+ rake_test_topic: 3,
119
+ watermarks_test_topic: 3,
120
+ partitioner_test_topic: 25,
121
+ }.each do |topic, partitions|
122
+ create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
123
+ begin
124
+ create_topic_handle.wait(max_wait_timeout: 15)
125
+ rescue Rdkafka::RdkafkaError => ex
126
+ raise unless ex.message.match?(/topic_already_exists/)
127
+ end
128
+ end
129
+ admin.close
130
+ end
131
+
132
+ config.around(:each) do |example|
133
+ # Timeout specs after a minute. If they take longer
134
+ # they are probably stuck
135
+ Timeout::timeout(60) do
136
+ example.run
137
+ end
138
+ end
139
+ end
data.tar.gz.sig ADDED
Binary file