rdkafka 0.12.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci.yml +57 -0
  5. data/.gitignore +4 -0
  6. data/.rspec +1 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +155 -93
  10. data/Gemfile +2 -0
  11. data/{LICENSE → MIT-LICENSE} +2 -1
  12. data/README.md +76 -29
  13. data/Rakefile +2 -0
  14. data/certs/cert_chain.pem +26 -0
  15. data/docker-compose.yml +18 -15
  16. data/ext/README.md +1 -1
  17. data/ext/Rakefile +46 -27
  18. data/lib/rdkafka/abstract_handle.rb +41 -25
  19. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  20. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  21. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  22. data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
  23. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  24. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  25. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  26. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  27. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  28. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  29. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  30. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  31. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  32. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  33. data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
  34. data/lib/rdkafka/admin.rb +494 -35
  35. data/lib/rdkafka/bindings.rb +180 -41
  36. data/lib/rdkafka/callbacks.rb +202 -1
  37. data/lib/rdkafka/config.rb +62 -25
  38. data/lib/rdkafka/consumer/headers.rb +24 -9
  39. data/lib/rdkafka/consumer/message.rb +3 -1
  40. data/lib/rdkafka/consumer/partition.rb +2 -0
  41. data/lib/rdkafka/consumer/topic_partition_list.rb +13 -8
  42. data/lib/rdkafka/consumer.rb +243 -111
  43. data/lib/rdkafka/error.rb +15 -0
  44. data/lib/rdkafka/helpers/time.rb +14 -0
  45. data/lib/rdkafka/metadata.rb +25 -2
  46. data/lib/rdkafka/native_kafka.rb +120 -0
  47. data/lib/rdkafka/producer/delivery_handle.rb +16 -2
  48. data/lib/rdkafka/producer/delivery_report.rb +22 -2
  49. data/lib/rdkafka/producer.rb +151 -21
  50. data/lib/rdkafka/version.rb +5 -3
  51. data/lib/rdkafka.rb +24 -2
  52. data/rdkafka.gemspec +21 -5
  53. data/renovate.json +6 -0
  54. data/spec/rdkafka/abstract_handle_spec.rb +1 -1
  55. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  56. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  57. data/spec/rdkafka/admin/create_topic_handle_spec.rb +1 -1
  58. data/spec/rdkafka/admin/create_topic_report_spec.rb +1 -1
  59. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  60. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  61. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +1 -1
  62. data/spec/rdkafka/admin/delete_topic_report_spec.rb +1 -1
  63. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  64. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  65. data/spec/rdkafka/admin_spec.rb +209 -5
  66. data/spec/rdkafka/bindings_spec.rb +2 -1
  67. data/spec/rdkafka/callbacks_spec.rb +1 -1
  68. data/spec/rdkafka/config_spec.rb +24 -3
  69. data/spec/rdkafka/consumer/headers_spec.rb +60 -0
  70. data/spec/rdkafka/consumer/message_spec.rb +1 -1
  71. data/spec/rdkafka/consumer/partition_spec.rb +1 -1
  72. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +20 -1
  73. data/spec/rdkafka/consumer_spec.rb +352 -61
  74. data/spec/rdkafka/error_spec.rb +1 -1
  75. data/spec/rdkafka/metadata_spec.rb +4 -3
  76. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -35
  77. data/spec/rdkafka/producer/delivery_handle_spec.rb +4 -1
  78. data/spec/rdkafka/producer/delivery_report_spec.rb +11 -3
  79. data/spec/rdkafka/producer_spec.rb +234 -22
  80. data/spec/spec_helper.rb +20 -2
  81. data.tar.gz.sig +0 -0
  82. metadata +81 -17
  83. metadata.gz.sig +0 -0
  84. data/.semaphore/semaphore.yml +0 -23
  85. data/bin/console +0 -11
  86. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,4 +1,4 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
2
 
3
3
  describe Rdkafka::RdkafkaError do
4
4
  it "should raise a type error for a nil response" do
@@ -1,4 +1,5 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
+
2
3
  require "securerandom"
3
4
 
4
5
  describe Rdkafka::Metadata do
@@ -29,7 +30,7 @@ describe Rdkafka::Metadata do
29
30
  it "#brokers returns our single broker" do
30
31
  expect(subject.brokers.length).to eq(1)
31
32
  expect(subject.brokers[0][:broker_id]).to eq(1)
32
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
33
34
  expect(subject.brokers[0][:broker_port]).to eq(9092)
34
35
  end
35
36
 
@@ -52,7 +53,7 @@ describe Rdkafka::Metadata do
52
53
  it "#brokers returns our single broker" do
53
54
  expect(subject.brokers.length).to eq(1)
54
55
  expect(subject.brokers[0][:broker_id]).to eq(1)
55
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
56
57
  expect(subject.brokers[0][:broker_port]).to eq(9092)
57
58
  end
58
59
 
@@ -1,17 +1,15 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
2
 
3
- describe Rdkafka::Producer::Client do
3
+ describe Rdkafka::NativeKafka do
4
4
  let(:config) { rdkafka_producer_config }
5
5
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
6
  let(:closing) { false }
7
7
  let(:thread) { double(Thread) }
8
+ let(:opaque) { Rdkafka::Opaque.new }
8
9
 
9
- subject(:client) { described_class.new(native) }
10
+ subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
10
11
 
11
12
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
13
  allow(Thread).to receive(:new).and_return(thread)
16
14
 
17
15
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +17,8 @@ describe Rdkafka::Producer::Client do
19
17
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
18
  end
21
19
 
20
+ after { client.close }
21
+
22
22
  context "defaults" do
23
23
  it "sets the thread to abort on exception" do
24
24
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +39,12 @@ describe Rdkafka::Producer::Client do
39
39
 
40
40
  client
41
41
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
42
  end
65
43
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
44
+ it "exposes the inner client" do
45
+ client.with_inner do |inner|
46
+ expect(inner).to eq(native)
47
+ end
68
48
  end
69
49
 
70
50
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +54,7 @@ describe Rdkafka::Producer::Client do
74
54
 
75
55
  context "and attempt to close" do
76
56
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
57
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
58
 
79
59
  client.close
80
60
  end
@@ -94,7 +74,6 @@ describe Rdkafka::Producer::Client do
94
74
  it "closes and unassign the native client" do
95
75
  client.close
96
76
 
97
- expect(client.native).to eq(nil)
98
77
  expect(client.closed?).to eq(true)
99
78
  end
100
79
  end
@@ -109,7 +88,7 @@ describe Rdkafka::Producer::Client do
109
88
 
110
89
  context "and attempt to close again" do
111
90
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
91
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
92
 
114
93
  client.close
115
94
  end
@@ -129,13 +108,12 @@ describe Rdkafka::Producer::Client do
129
108
  it "does not close and unassign the native client again" do
130
109
  client.close
131
110
 
132
- expect(client.native).to eq(nil)
133
111
  expect(client.closed?).to eq(true)
134
112
  end
135
113
  end
136
114
  end
137
115
 
138
- it "provide a finalizer Proc that closes the `native` client" do
116
+ it "provides a finalizer that closes the native kafka client" do
139
117
  expect(client.closed?).to eq(false)
140
118
 
141
119
  client.finalizer.call("some-ignored-object-id")
@@ -1,4 +1,4 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryHandle do
4
4
  let(:response) { 0 }
@@ -9,6 +9,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
9
  handle[:response] = response
10
10
  handle[:partition] = 2
11
11
  handle[:offset] = 100
12
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
13
  end
13
14
  end
14
15
 
@@ -29,6 +30,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
30
 
30
31
  expect(report.partition).to eq(2)
31
32
  expect(report.offset).to eq(100)
33
+ expect(report.topic_name).to eq("produce_test_topic")
32
34
  end
33
35
 
34
36
  it "should wait without a timeout" do
@@ -36,6 +38,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
38
 
37
39
  expect(report.partition).to eq(2)
38
40
  expect(report.offset).to eq(100)
41
+ expect(report.topic_name).to eq("produce_test_topic")
39
42
  end
40
43
  end
41
44
  end
@@ -1,7 +1,7 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -11,7 +11,15 @@ describe Rdkafka::Producer::DeliveryReport do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
13
 
14
+ it "should get the topic_name" do
15
+ expect(subject.topic_name).to eq "topic"
16
+ end
17
+
18
+ it "should get the same topic name under topic alias" do
19
+ expect(subject.topic).to eq "topic"
20
+ end
21
+
14
22
  it "should get the error" do
15
- expect(subject.error).to eq "error"
23
+ expect(subject.error).to eq -1
16
24
  end
17
25
  end
@@ -1,4 +1,5 @@
1
- require "spec_helper"
1
+ # frozen_string_literal: true
2
+
2
3
  require "zlib"
3
4
 
4
5
  describe Rdkafka::Producer do
@@ -7,11 +8,16 @@ describe Rdkafka::Producer do
7
8
 
8
9
  after do
9
10
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
11
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
12
+ expect(registry).to be_empty, registry.inspect
11
13
  producer.close
12
14
  consumer.close
13
15
  end
14
16
 
17
+ describe '#name' do
18
+ it { expect(producer.name).to include('rdkafka#producer-') }
19
+ end
20
+
15
21
  context "delivery callback" do
16
22
  context "with a proc/lambda" do
17
23
  it "should set the callback" do
@@ -28,8 +34,10 @@ describe Rdkafka::Producer do
28
34
 
29
35
  producer.delivery_callback = lambda do |report|
30
36
  expect(report).not_to be_nil
37
+ expect(report.label).to eq "label"
31
38
  expect(report.partition).to eq 1
32
39
  expect(report.offset).to be >= 0
40
+ expect(report.topic_name).to eq "produce_test_topic"
33
41
  @callback_called = true
34
42
  end
35
43
 
@@ -37,9 +45,12 @@ describe Rdkafka::Producer do
37
45
  handle = producer.produce(
38
46
  topic: "produce_test_topic",
39
47
  payload: "payload",
40
- key: "key"
48
+ key: "key",
49
+ label: "label"
41
50
  )
42
51
 
52
+ expect(handle.label).to eq "label"
53
+
43
54
  # Wait for it to be delivered
44
55
  handle.wait(max_wait_timeout: 15)
45
56
 
@@ -113,6 +124,7 @@ describe Rdkafka::Producer do
113
124
  expect(called_report.first).not_to be_nil
114
125
  expect(called_report.first.partition).to eq 1
115
126
  expect(called_report.first.offset).to be >= 0
127
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
116
128
  end
117
129
 
118
130
  it "should provide handle" do
@@ -167,11 +179,13 @@ describe Rdkafka::Producer do
167
179
  handle = producer.produce(
168
180
  topic: "produce_test_topic",
169
181
  payload: "payload",
170
- key: "key"
182
+ key: "key",
183
+ label: "label"
171
184
  )
172
185
 
173
186
  # Should be pending at first
174
187
  expect(handle.pending?).to be true
188
+ expect(handle.label).to eq "label"
175
189
 
176
190
  # Check delivery handle and report
177
191
  report = handle.wait(max_wait_timeout: 5)
@@ -179,11 +193,13 @@ describe Rdkafka::Producer do
179
193
  expect(report).not_to be_nil
180
194
  expect(report.partition).to eq 1
181
195
  expect(report.offset).to be >= 0
196
+ expect(report.label).to eq "label"
182
197
 
183
- # Close producer
198
+ # Flush and close producer
199
+ producer.flush
184
200
  producer.close
185
201
 
186
- # Consume message and verify it's content
202
+ # Consume message and verify its content
187
203
  message = wait_for_message(
188
204
  topic: "produce_test_topic",
189
205
  delivery_report: report,
@@ -207,7 +223,7 @@ describe Rdkafka::Producer do
207
223
  )
208
224
  report = handle.wait(max_wait_timeout: 5)
209
225
 
210
- # Consume message and verify it's content
226
+ # Consume message and verify its content
211
227
  message = wait_for_message(
212
228
  topic: "produce_test_topic",
213
229
  delivery_report: report,
@@ -251,6 +267,28 @@ describe Rdkafka::Producer do
251
267
  expect(messages[2].key).to eq key
252
268
  end
253
269
 
270
+ it "should produce a message with empty string without crashing" do
271
+ messages = [{key: 'a', partition_key: ''}]
272
+
273
+ messages = messages.map do |m|
274
+ handle = producer.produce(
275
+ topic: "partitioner_test_topic",
276
+ payload: "payload partition",
277
+ key: m[:key],
278
+ partition_key: m[:partition_key]
279
+ )
280
+ report = handle.wait(max_wait_timeout: 5)
281
+
282
+ wait_for_message(
283
+ topic: "partitioner_test_topic",
284
+ delivery_report: report,
285
+ )
286
+ end
287
+
288
+ expect(messages[0].partition).to eq 0
289
+ expect(messages[0].key).to eq 'a'
290
+ end
291
+
254
292
  it "should produce a message with utf-8 encoding" do
255
293
  handle = producer.produce(
256
294
  topic: "produce_test_topic",
@@ -259,7 +297,7 @@ describe Rdkafka::Producer do
259
297
  )
260
298
  report = handle.wait(max_wait_timeout: 5)
261
299
 
262
- # Consume message and verify it's content
300
+ # Consume message and verify its content
263
301
  message = wait_for_message(
264
302
  topic: "produce_test_topic",
265
303
  delivery_report: report,
@@ -292,7 +330,7 @@ describe Rdkafka::Producer do
292
330
  )
293
331
  report = handle.wait(max_wait_timeout: 5)
294
332
 
295
- # Consume message and verify it's content
333
+ # Consume message and verify its content
296
334
  message = wait_for_message(
297
335
  topic: "produce_test_topic",
298
336
  delivery_report: report,
@@ -313,7 +351,7 @@ describe Rdkafka::Producer do
313
351
  )
314
352
  report = handle.wait(max_wait_timeout: 5)
315
353
 
316
- # Consume message and verify it's content
354
+ # Consume message and verify its content
317
355
  message = wait_for_message(
318
356
  topic: "produce_test_topic",
319
357
  delivery_report: report,
@@ -333,7 +371,7 @@ describe Rdkafka::Producer do
333
371
  )
334
372
  report = handle.wait(max_wait_timeout: 5)
335
373
 
336
- # Consume message and verify it's content
374
+ # Consume message and verify its content
337
375
  message = wait_for_message(
338
376
  topic: "produce_test_topic",
339
377
  delivery_report: report,
@@ -351,7 +389,7 @@ describe Rdkafka::Producer do
351
389
  )
352
390
  report = handle.wait(max_wait_timeout: 5)
353
391
 
354
- # Consume message and verify it's content
392
+ # Consume message and verify its content
355
393
  message = wait_for_message(
356
394
  topic: "produce_test_topic",
357
395
  delivery_report: report,
@@ -371,7 +409,7 @@ describe Rdkafka::Producer do
371
409
  )
372
410
  report = handle.wait(max_wait_timeout: 5)
373
411
 
374
- # Consume message and verify it's content
412
+ # Consume message and verify its content
375
413
  message = wait_for_message(
376
414
  topic: "produce_test_topic",
377
415
  delivery_report: report,
@@ -380,9 +418,9 @@ describe Rdkafka::Producer do
380
418
 
381
419
  expect(message.payload).to eq "payload headers"
382
420
  expect(message.key).to eq "key headers"
383
- expect(message.headers[:foo]).to eq "bar"
384
- expect(message.headers[:baz]).to eq "foobar"
385
- expect(message.headers[:foobar]).to be_nil
421
+ expect(message.headers["foo"]).to eq "bar"
422
+ expect(message.headers["baz"]).to eq "foobar"
423
+ expect(message.headers["foobar"]).to be_nil
386
424
  end
387
425
 
388
426
  it "should produce a message with empty headers" do
@@ -394,7 +432,7 @@ describe Rdkafka::Producer do
394
432
  )
395
433
  report = handle.wait(max_wait_timeout: 5)
396
434
 
397
- # Consume message and verify it's content
435
+ # Consume message and verify its content
398
436
  message = wait_for_message(
399
437
  topic: "produce_test_topic",
400
438
  delivery_report: report,
@@ -432,10 +470,10 @@ describe Rdkafka::Producer do
432
470
  # wait for and check the message in the main process.
433
471
  reader, writer = IO.pipe
434
472
 
435
- fork do
473
+ pid = fork do
436
474
  reader.close
437
475
 
438
- # Avoids sharing the socket between processes.
476
+ # Avoid sharing the client between processes.
439
477
  producer = rdkafka_producer_config.producer
440
478
 
441
479
  handle = producer.produce(
@@ -448,24 +486,28 @@ describe Rdkafka::Producer do
448
486
 
449
487
  report_json = JSON.generate(
450
488
  "partition" => report.partition,
451
- "offset" => report.offset
489
+ "offset" => report.offset,
490
+ "topic_name" => report.topic_name
452
491
  )
453
492
 
454
493
  writer.write(report_json)
455
494
  writer.close
495
+ producer.flush
456
496
  producer.close
457
497
  end
498
+ Process.wait(pid)
458
499
 
459
500
  writer.close
460
501
  report_hash = JSON.parse(reader.read)
461
502
  report = Rdkafka::Producer::DeliveryReport.new(
462
503
  report_hash["partition"],
463
- report_hash["offset"]
504
+ report_hash["offset"],
505
+ report_hash["topic_name"]
464
506
  )
465
507
 
466
508
  reader.close
467
509
 
468
- # Consume message and verify it's content
510
+ # Consume message and verify its content
469
511
  message = wait_for_message(
470
512
  topic: "produce_test_topic",
471
513
  delivery_report: report,
@@ -522,4 +564,174 @@ describe Rdkafka::Producer do
522
564
  end
523
565
  end
524
566
  end
567
+
568
+ context "when not being able to deliver the message" do
569
+ let(:producer) do
570
+ rdkafka_producer_config(
571
+ "bootstrap.servers": "localhost:9093",
572
+ "message.timeout.ms": 100
573
+ ).producer
574
+ end
575
+
576
+ it "should contain the error in the response when not deliverable" do
577
+ handler = producer.produce(topic: 'produce_test_topic', payload: nil, label: 'na')
578
+ # Wait for the async callbacks and delivery registry to update
579
+ sleep(2)
580
+ expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
581
+ expect(handler.create_result.label).to eq('na')
582
+ end
583
+ end
584
+
585
+ describe '#partition_count' do
586
+ it { expect(producer.partition_count('consume_test_topic')).to eq(3) }
587
+
588
+ context 'when the partition count value is already cached' do
589
+ before do
590
+ producer.partition_count('consume_test_topic')
591
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
592
+ end
593
+
594
+ it 'expect not to query it again' do
595
+ producer.partition_count('consume_test_topic')
596
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
597
+ end
598
+ end
599
+
600
+ context 'when the partition count value was cached but time expired' do
601
+ before do
602
+ allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
603
+ producer.partition_count('consume_test_topic')
604
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
605
+ end
606
+
607
+ it 'expect not to query it again' do
608
+ producer.partition_count('consume_test_topic')
609
+ expect(::Rdkafka::Metadata).to have_received(:new)
610
+ end
611
+ end
612
+
613
+ context 'when the partition count value was cached and time did not expire' do
614
+ before do
615
+ allow(::Process).to receive(:clock_gettime).and_return(0, 29.001)
616
+ producer.partition_count('consume_test_topic')
617
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
618
+ end
619
+
620
+ it 'expect not to query it again' do
621
+ producer.partition_count('consume_test_topic')
622
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
623
+ end
624
+ end
625
+ end
626
+
627
+ describe '#flush' do
628
+ it "should return flush when it can flush all outstanding messages or when no messages" do
629
+ producer.produce(
630
+ topic: "produce_test_topic",
631
+ payload: "payload headers",
632
+ key: "key headers",
633
+ headers: {}
634
+ )
635
+
636
+ expect(producer.flush(5_000)).to eq(true)
637
+ end
638
+
639
+ context 'when it cannot flush due to a timeout' do
640
+ let(:producer) do
641
+ rdkafka_producer_config(
642
+ "bootstrap.servers": "localhost:9093",
643
+ "message.timeout.ms": 2_000
644
+ ).producer
645
+ end
646
+
647
+ after do
648
+ # Allow rdkafka to evict message preventing memory-leak
649
+ sleep(2)
650
+ end
651
+
652
+ it "should return false on flush when cannot deliver and beyond timeout" do
653
+ producer.produce(
654
+ topic: "produce_test_topic",
655
+ payload: "payload headers",
656
+ key: "key headers",
657
+ headers: {}
658
+ )
659
+
660
+ expect(producer.flush(1_000)).to eq(false)
661
+ end
662
+ end
663
+
664
+ context 'when there is a different error' do
665
+ before { allow(Rdkafka::Bindings).to receive(:rd_kafka_flush).and_return(-199) }
666
+
667
+ it 'should raise it' do
668
+ expect { producer.flush }.to raise_error(Rdkafka::RdkafkaError)
669
+ end
670
+ end
671
+ end
672
+
673
+ describe '#purge' do
674
+ context 'when no outgoing messages' do
675
+ it { expect(producer.purge).to eq(true) }
676
+ end
677
+
678
+ context 'when librdkafka purge returns an error' do
679
+ before { expect(Rdkafka::Bindings).to receive(:rd_kafka_purge).and_return(-153) }
680
+
681
+ it 'expect to raise an error' do
682
+ expect { producer.purge }.to raise_error(Rdkafka::RdkafkaError, /retry/)
683
+ end
684
+ end
685
+
686
+ context 'when there are outgoing things in the queue' do
687
+ let(:producer) do
688
+ rdkafka_producer_config(
689
+ "bootstrap.servers": "localhost:9093",
690
+ "message.timeout.ms": 2_000
691
+ ).producer
692
+ end
693
+
694
+ it "should should purge and move forward" do
695
+ producer.produce(
696
+ topic: "produce_test_topic",
697
+ payload: "payload headers"
698
+ )
699
+
700
+ expect(producer.purge).to eq(true)
701
+ expect(producer.flush(1_000)).to eq(true)
702
+ end
703
+
704
+ it "should materialize the delivery handles" do
705
+ handle = producer.produce(
706
+ topic: "produce_test_topic",
707
+ payload: "payload headers"
708
+ )
709
+
710
+ expect(producer.purge).to eq(true)
711
+
712
+ expect { handle.wait }.to raise_error(Rdkafka::RdkafkaError, /purge_queue/)
713
+ end
714
+
715
+ context "when using delivery_callback" do
716
+ let(:delivery_reports) { [] }
717
+
718
+ let(:delivery_callback) do
719
+ ->(delivery_report) { delivery_reports << delivery_report }
720
+ end
721
+
722
+ before { producer.delivery_callback = delivery_callback }
723
+
724
+ it "should run the callback" do
725
+ handle = producer.produce(
726
+ topic: "produce_test_topic",
727
+ payload: "payload headers"
728
+ )
729
+
730
+ expect(producer.purge).to eq(true)
731
+ # queue purge
732
+ expect(delivery_reports[0].error).to eq(-152)
733
+ end
734
+ end
735
+ end
736
+ end
525
737
  end
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -9,6 +11,7 @@ require "pry"
9
11
  require "rspec"
10
12
  require "rdkafka"
11
13
  require "timeout"
14
+ require "securerandom"
12
15
 
13
16
  def rdkafka_base_config
14
17
  {
@@ -33,7 +36,7 @@ def rdkafka_consumer_config(config_overrides={})
33
36
  # Add consumer specific fields to it
34
37
  config[:"auto.offset.reset"] = "earliest"
35
38
  config[:"enable.partition.eof"] = false
36
- config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
39
+ config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
37
40
  # Enable debug mode if required
38
41
  if ENV["DEBUG_CONSUMER"]
39
42
  config[:debug] = "cgrp,topic,fetch"
@@ -71,7 +74,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
71
74
  end
72
75
 
73
76
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
- new_consumer = !!consumer
77
+ new_consumer = consumer.nil?
75
78
  consumer ||= rdkafka_consumer_config.consumer
76
79
  consumer.subscribe(topic)
77
80
  timeout = Time.now.to_i + timeout_in_seconds
@@ -104,6 +107,20 @@ def wait_for_unassignment(consumer)
104
107
  end
105
108
  end
106
109
 
110
+ def notify_listener(listener, &block)
111
+ # 1. subscribe and poll
112
+ consumer.subscribe("consume_test_topic")
113
+ wait_for_assignment(consumer)
114
+ consumer.poll(100)
115
+
116
+ block.call if block
117
+
118
+ # 2. unsubscribe
119
+ consumer.unsubscribe
120
+ wait_for_unassignment(consumer)
121
+ consumer.close
122
+ end
123
+
107
124
  RSpec.configure do |config|
108
125
  config.filter_run focus: true
109
126
  config.run_all_when_everything_filtered = true
@@ -118,6 +135,7 @@ RSpec.configure do |config|
118
135
  rake_test_topic: 3,
119
136
  watermarks_test_topic: 3,
120
137
  partitioner_test_topic: 25,
138
+ example_topic: 1
121
139
  }.each do |topic, partitions|
122
140
  create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
123
141
  begin
data.tar.gz.sig ADDED
Binary file