karafka-rdkafka 0.12.2 → 0.13.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/CHANGELOG.md +26 -0
  5. data/Gemfile +2 -0
  6. data/README.md +26 -0
  7. data/Rakefile +2 -0
  8. data/ext/Rakefile +2 -0
  9. data/karafka-rdkafka.gemspec +2 -0
  10. data/lib/rdkafka/abstract_handle.rb +2 -0
  11. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  12. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  13. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  14. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  15. data/lib/rdkafka/admin.rb +95 -73
  16. data/lib/rdkafka/bindings.rb +53 -37
  17. data/lib/rdkafka/callbacks.rb +7 -1
  18. data/lib/rdkafka/config.rb +13 -10
  19. data/lib/rdkafka/consumer/headers.rb +24 -7
  20. data/lib/rdkafka/consumer/message.rb +3 -1
  21. data/lib/rdkafka/consumer/partition.rb +2 -0
  22. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  23. data/lib/rdkafka/consumer.rb +100 -44
  24. data/lib/rdkafka/error.rb +9 -0
  25. data/lib/rdkafka/metadata.rb +25 -2
  26. data/lib/rdkafka/native_kafka.rb +83 -0
  27. data/lib/rdkafka/producer/delivery_handle.rb +5 -2
  28. data/lib/rdkafka/producer/delivery_report.rb +9 -2
  29. data/lib/rdkafka/producer.rb +75 -12
  30. data/lib/rdkafka/version.rb +3 -1
  31. data/lib/rdkafka.rb +3 -1
  32. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  33. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  34. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  35. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin_spec.rb +4 -3
  38. data/spec/rdkafka/bindings_spec.rb +2 -0
  39. data/spec/rdkafka/callbacks_spec.rb +2 -0
  40. data/spec/rdkafka/config_spec.rb +17 -2
  41. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  42. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  43. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  44. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  45. data/spec/rdkafka/consumer_spec.rb +124 -22
  46. data/spec/rdkafka/error_spec.rb +2 -0
  47. data/spec/rdkafka/metadata_spec.rb +2 -0
  48. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  49. data/spec/rdkafka/producer/delivery_handle_spec.rb +5 -0
  50. data/spec/rdkafka/producer/delivery_report_spec.rb +8 -2
  51. data/spec/rdkafka/producer_spec.rb +124 -19
  52. data/spec/spec_helper.rb +17 -1
  53. data.tar.gz.sig +0 -0
  54. metadata +10 -10
  55. metadata.gz.sig +0 -0
  56. data/bin/console +0 -11
  57. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,17 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
7
9
  let(:thread) { double(Thread) }
8
10
 
9
- subject(:client) { described_class.new(native) }
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
10
12
 
11
13
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
14
  allow(Thread).to receive(:new).and_return(thread)
16
15
 
17
16
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +18,8 @@ describe Rdkafka::Producer::Client do
19
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
19
  end
21
20
 
21
+ after { client.close }
22
+
22
23
  context "defaults" do
23
24
  it "sets the thread to abort on exception" do
24
25
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +40,12 @@ describe Rdkafka::Producer::Client do
39
40
 
40
41
  client
41
42
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
43
  end
65
44
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
68
49
  end
69
50
 
70
51
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +55,7 @@ describe Rdkafka::Producer::Client do
74
55
 
75
56
  context "and attempt to close" do
76
57
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
59
 
79
60
  client.close
80
61
  end
@@ -94,7 +75,6 @@ describe Rdkafka::Producer::Client do
94
75
  it "closes and unassign the native client" do
95
76
  client.close
96
77
 
97
- expect(client.native).to eq(nil)
98
78
  expect(client.closed?).to eq(true)
99
79
  end
100
80
  end
@@ -109,7 +89,7 @@ describe Rdkafka::Producer::Client do
109
89
 
110
90
  context "and attempt to close again" do
111
91
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
93
 
114
94
  client.close
115
95
  end
@@ -129,13 +109,12 @@ describe Rdkafka::Producer::Client do
129
109
  it "does not close and unassign the native client again" do
130
110
  client.close
131
111
 
132
- expect(client.native).to eq(nil)
133
112
  expect(client.closed?).to eq(true)
134
113
  end
135
114
  end
136
115
  end
137
116
 
138
- it "provide a finalizer Proc that closes the `native` client" do
117
+ it "provides a finalizer that closes the native kafka client" do
139
118
  expect(client.closed?).to eq(false)
140
119
 
141
120
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -9,6 +11,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
11
  handle[:response] = response
10
12
  handle[:partition] = 2
11
13
  handle[:offset] = 100
14
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
15
  end
13
16
  end
14
17
 
@@ -29,6 +32,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
32
 
30
33
  expect(report.partition).to eq(2)
31
34
  expect(report.offset).to eq(100)
35
+ expect(report.topic_name).to eq("produce_test_topic")
32
36
  end
33
37
 
34
38
  it "should wait without a timeout" do
@@ -36,6 +40,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
40
 
37
41
  expect(report.partition).to eq(2)
38
42
  expect(report.offset).to eq(100)
43
+ expect(report.topic_name).to eq("produce_test_topic")
39
44
  end
40
45
  end
41
46
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -11,7 +13,11 @@ describe Rdkafka::Producer::DeliveryReport do
11
13
  expect(subject.offset).to eq 100
12
14
  end
13
15
 
16
+ it "should get the topic_name" do
17
+ expect(subject.topic_name).to eq "topic"
18
+ end
19
+
14
20
  it "should get the error" do
15
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
16
22
  end
17
23
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,11 +9,16 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
11
14
  producer.close
12
15
  consumer.close
13
16
  end
14
17
 
18
+ describe '#name' do
19
+ it { expect(producer.name).to include('rdkafka#producer-') }
20
+ end
21
+
15
22
  context "delivery callback" do
16
23
  context "with a proc/lambda" do
17
24
  it "should set the callback" do
@@ -30,6 +37,7 @@ describe Rdkafka::Producer do
30
37
  expect(report).not_to be_nil
31
38
  expect(report.partition).to eq 1
32
39
  expect(report.offset).to be >= 0
40
+ expect(report.topic_name).to eq "produce_test_topic"
33
41
  @callback_called = true
34
42
  end
35
43
 
@@ -113,6 +121,7 @@ describe Rdkafka::Producer do
113
121
  expect(called_report.first).not_to be_nil
114
122
  expect(called_report.first.partition).to eq 1
115
123
  expect(called_report.first.offset).to be >= 0
124
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
116
125
  end
117
126
 
118
127
  it "should provide handle" do
@@ -180,10 +189,11 @@ describe Rdkafka::Producer do
180
189
  expect(report.partition).to eq 1
181
190
  expect(report.offset).to be >= 0
182
191
 
183
- # Close producer
192
+ # Flush and close producer
193
+ producer.flush
184
194
  producer.close
185
195
 
186
- # Consume message and verify it's content
196
+ # Consume message and verify its content
187
197
  message = wait_for_message(
188
198
  topic: "produce_test_topic",
189
199
  delivery_report: report,
@@ -207,7 +217,7 @@ describe Rdkafka::Producer do
207
217
  )
208
218
  report = handle.wait(max_wait_timeout: 5)
209
219
 
210
- # Consume message and verify it's content
220
+ # Consume message and verify its content
211
221
  message = wait_for_message(
212
222
  topic: "produce_test_topic",
213
223
  delivery_report: report,
@@ -251,6 +261,28 @@ describe Rdkafka::Producer do
251
261
  expect(messages[2].key).to eq key
252
262
  end
253
263
 
264
+ it "should produce a message with empty string without crashing" do
265
+ messages = [{key: 'a', partition_key: ''}]
266
+
267
+ messages = messages.map do |m|
268
+ handle = producer.produce(
269
+ topic: "partitioner_test_topic",
270
+ payload: "payload partition",
271
+ key: m[:key],
272
+ partition_key: m[:partition_key]
273
+ )
274
+ report = handle.wait(max_wait_timeout: 5)
275
+
276
+ wait_for_message(
277
+ topic: "partitioner_test_topic",
278
+ delivery_report: report,
279
+ )
280
+ end
281
+
282
+ expect(messages[0].partition).to eq 0
283
+ expect(messages[0].key).to eq 'a'
284
+ end
285
+
254
286
  it "should produce a message with utf-8 encoding" do
255
287
  handle = producer.produce(
256
288
  topic: "produce_test_topic",
@@ -259,7 +291,7 @@ describe Rdkafka::Producer do
259
291
  )
260
292
  report = handle.wait(max_wait_timeout: 5)
261
293
 
262
- # Consume message and verify it's content
294
+ # Consume message and verify its content
263
295
  message = wait_for_message(
264
296
  topic: "produce_test_topic",
265
297
  delivery_report: report,
@@ -292,7 +324,7 @@ describe Rdkafka::Producer do
292
324
  )
293
325
  report = handle.wait(max_wait_timeout: 5)
294
326
 
295
- # Consume message and verify it's content
327
+ # Consume message and verify its content
296
328
  message = wait_for_message(
297
329
  topic: "produce_test_topic",
298
330
  delivery_report: report,
@@ -313,7 +345,7 @@ describe Rdkafka::Producer do
313
345
  )
314
346
  report = handle.wait(max_wait_timeout: 5)
315
347
 
316
- # Consume message and verify it's content
348
+ # Consume message and verify its content
317
349
  message = wait_for_message(
318
350
  topic: "produce_test_topic",
319
351
  delivery_report: report,
@@ -333,7 +365,7 @@ describe Rdkafka::Producer do
333
365
  )
334
366
  report = handle.wait(max_wait_timeout: 5)
335
367
 
336
- # Consume message and verify it's content
368
+ # Consume message and verify its content
337
369
  message = wait_for_message(
338
370
  topic: "produce_test_topic",
339
371
  delivery_report: report,
@@ -351,7 +383,7 @@ describe Rdkafka::Producer do
351
383
  )
352
384
  report = handle.wait(max_wait_timeout: 5)
353
385
 
354
- # Consume message and verify it's content
386
+ # Consume message and verify its content
355
387
  message = wait_for_message(
356
388
  topic: "produce_test_topic",
357
389
  delivery_report: report,
@@ -371,7 +403,7 @@ describe Rdkafka::Producer do
371
403
  )
372
404
  report = handle.wait(max_wait_timeout: 5)
373
405
 
374
- # Consume message and verify it's content
406
+ # Consume message and verify its content
375
407
  message = wait_for_message(
376
408
  topic: "produce_test_topic",
377
409
  delivery_report: report,
@@ -380,9 +412,9 @@ describe Rdkafka::Producer do
380
412
 
381
413
  expect(message.payload).to eq "payload headers"
382
414
  expect(message.key).to eq "key headers"
383
- expect(message.headers[:foo]).to eq "bar"
384
- expect(message.headers[:baz]).to eq "foobar"
385
- expect(message.headers[:foobar]).to be_nil
415
+ expect(message.headers["foo"]).to eq "bar"
416
+ expect(message.headers["baz"]).to eq "foobar"
417
+ expect(message.headers["foobar"]).to be_nil
386
418
  end
387
419
 
388
420
  it "should produce a message with empty headers" do
@@ -394,7 +426,7 @@ describe Rdkafka::Producer do
394
426
  )
395
427
  report = handle.wait(max_wait_timeout: 5)
396
428
 
397
- # Consume message and verify it's content
429
+ # Consume message and verify its content
398
430
  message = wait_for_message(
399
431
  topic: "produce_test_topic",
400
432
  delivery_report: report,
@@ -432,10 +464,10 @@ describe Rdkafka::Producer do
432
464
  # wait for and check the message in the main process.
433
465
  reader, writer = IO.pipe
434
466
 
435
- fork do
467
+ pid = fork do
436
468
  reader.close
437
469
 
438
- # Avoids sharing the socket between processes.
470
+ # Avoid sharing the client between processes.
439
471
  producer = rdkafka_producer_config.producer
440
472
 
441
473
  handle = producer.produce(
@@ -448,24 +480,28 @@ describe Rdkafka::Producer do
448
480
 
449
481
  report_json = JSON.generate(
450
482
  "partition" => report.partition,
451
- "offset" => report.offset
483
+ "offset" => report.offset,
484
+ "topic_name" => report.topic_name
452
485
  )
453
486
 
454
487
  writer.write(report_json)
455
488
  writer.close
489
+ producer.flush
456
490
  producer.close
457
491
  end
492
+ Process.wait(pid)
458
493
 
459
494
  writer.close
460
495
  report_hash = JSON.parse(reader.read)
461
496
  report = Rdkafka::Producer::DeliveryReport.new(
462
497
  report_hash["partition"],
463
- report_hash["offset"]
498
+ report_hash["offset"],
499
+ report_hash["topic_name"]
464
500
  )
465
501
 
466
502
  reader.close
467
503
 
468
- # Consume message and verify it's content
504
+ # Consume message and verify its content
469
505
  message = wait_for_message(
470
506
  topic: "produce_test_topic",
471
507
  delivery_report: report,
@@ -522,4 +558,73 @@ describe Rdkafka::Producer do
522
558
  end
523
559
  end
524
560
  end
561
+
562
+ describe '#partition_count' do
563
+ it { expect(producer.partition_count('example_topic')).to eq(1) }
564
+
565
+ context 'when the partition count value is already cached' do
566
+ before do
567
+ producer.partition_count('example_topic')
568
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
569
+ end
570
+
571
+ it 'expect not to query it again' do
572
+ producer.partition_count('example_topic')
573
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
574
+ end
575
+ end
576
+
577
+ context 'when the partition count value was cached but time expired' do
578
+ before do
579
+ allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
580
+ producer.partition_count('example_topic')
581
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
582
+ end
583
+
584
+ it 'expect not to query it again' do
585
+ producer.partition_count('example_topic')
586
+ expect(::Rdkafka::Metadata).to have_received(:new)
587
+ end
588
+ end
589
+
590
+ context 'when the partition count value was cached and time did not expire' do
591
+ before do
592
+ allow(::Process).to receive(:clock_gettime).and_return(0, 29.001)
593
+ producer.partition_count('example_topic')
594
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
595
+ end
596
+
597
+ it 'expect not to query it again' do
598
+ producer.partition_count('example_topic')
599
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
600
+ end
601
+ end
602
+ end
603
+
604
+ describe 'metadata fetch request recovery' do
605
+ subject(:partition_count) { producer.partition_count('example_topic') }
606
+
607
+ describe 'metadata initialization recovery' do
608
+ context 'when all good' do
609
+ it { expect(partition_count).to eq(1) }
610
+ end
611
+
612
+ context 'when we fail for the first time with handled error' do
613
+ before do
614
+ raised = false
615
+
616
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_wrap_original do |m, *args|
617
+ if raised
618
+ m.call(*args)
619
+ else
620
+ raised = true
621
+ -185
622
+ end
623
+ end
624
+ end
625
+
626
+ it { expect(partition_count).to eq(1) }
627
+ end
628
+ end
629
+ end
525
630
  end
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -71,7 +73,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
71
73
  end
72
74
 
73
75
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
- new_consumer = !!consumer
76
+ new_consumer = consumer.nil?
75
77
  consumer ||= rdkafka_consumer_config.consumer
76
78
  consumer.subscribe(topic)
77
79
  timeout = Time.now.to_i + timeout_in_seconds
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
104
106
  end
105
107
  end
106
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
107
123
  RSpec.configure do |config|
108
124
  config.filter_run focus: true
109
125
  config.run_all_when_everything_filtered = true
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.2
4
+ version: 0.13.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2023-05-24 00:00:00.000000000 Z
38
+ date: 2023-06-15 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -166,8 +166,7 @@ dependencies:
166
166
  description: Modern Kafka client library for Ruby based on librdkafka
167
167
  email:
168
168
  - thijs@appsignal.com
169
- executables:
170
- - console
169
+ executables: []
171
170
  extensions:
172
171
  - ext/Rakefile
173
172
  extra_rdoc_files: []
@@ -182,7 +181,6 @@ files:
182
181
  - LICENSE
183
182
  - README.md
184
183
  - Rakefile
185
- - bin/console
186
184
  - certs/cert_chain.pem
187
185
  - certs/karafka-pro.pem
188
186
  - docker-compose.yml
@@ -208,8 +206,8 @@ files:
208
206
  - lib/rdkafka/consumer/topic_partition_list.rb
209
207
  - lib/rdkafka/error.rb
210
208
  - lib/rdkafka/metadata.rb
209
+ - lib/rdkafka/native_kafka.rb
211
210
  - lib/rdkafka/producer.rb
212
- - lib/rdkafka/producer/client.rb
213
211
  - lib/rdkafka/producer/delivery_handle.rb
214
212
  - lib/rdkafka/producer/delivery_report.rb
215
213
  - lib/rdkafka/version.rb
@@ -222,13 +220,14 @@ files:
222
220
  - spec/rdkafka/bindings_spec.rb
223
221
  - spec/rdkafka/callbacks_spec.rb
224
222
  - spec/rdkafka/config_spec.rb
223
+ - spec/rdkafka/consumer/headers_spec.rb
225
224
  - spec/rdkafka/consumer/message_spec.rb
226
225
  - spec/rdkafka/consumer/partition_spec.rb
227
226
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
228
227
  - spec/rdkafka/consumer_spec.rb
229
228
  - spec/rdkafka/error_spec.rb
230
229
  - spec/rdkafka/metadata_spec.rb
231
- - spec/rdkafka/producer/client_spec.rb
230
+ - spec/rdkafka/native_kafka_spec.rb
232
231
  - spec/rdkafka/producer/delivery_handle_spec.rb
233
232
  - spec/rdkafka/producer/delivery_report_spec.rb
234
233
  - spec/rdkafka/producer_spec.rb
@@ -255,9 +254,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
255
254
  version: '2.6'
256
255
  required_rubygems_version: !ruby/object:Gem::Requirement
257
256
  requirements:
258
- - - ">="
257
+ - - ">"
259
258
  - !ruby/object:Gem::Version
260
- version: '0'
259
+ version: 1.3.1
261
260
  requirements: []
262
261
  rubygems_version: 3.1.2
263
262
  signing_key:
@@ -275,13 +274,14 @@ test_files:
275
274
  - spec/rdkafka/bindings_spec.rb
276
275
  - spec/rdkafka/callbacks_spec.rb
277
276
  - spec/rdkafka/config_spec.rb
277
+ - spec/rdkafka/consumer/headers_spec.rb
278
278
  - spec/rdkafka/consumer/message_spec.rb
279
279
  - spec/rdkafka/consumer/partition_spec.rb
280
280
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
281
281
  - spec/rdkafka/consumer_spec.rb
282
282
  - spec/rdkafka/error_spec.rb
283
283
  - spec/rdkafka/metadata_spec.rb
284
- - spec/rdkafka/producer/client_spec.rb
284
+ - spec/rdkafka/native_kafka_spec.rb
285
285
  - spec/rdkafka/producer/delivery_handle_spec.rb
286
286
  - spec/rdkafka/producer/delivery_report_spec.rb
287
287
  - spec/rdkafka/producer_spec.rb
metadata.gz.sig CHANGED
Binary file
data/bin/console DELETED
@@ -1,11 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # frozen_string_literal: true
4
-
5
- ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
-
7
- require "bundler/setup"
8
- require "rdkafka"
9
-
10
- require "irb"
11
- IRB.start(__FILE__)
@@ -1,47 +0,0 @@
1
- module Rdkafka
2
- class Producer
3
- class Client
4
- def initialize(native)
5
- @native = native
6
-
7
- # Start thread to poll client for delivery callbacks
8
- @polling_thread = Thread.new do
9
- loop do
10
- Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
- # Exit thread if closing and the poll queue is empty
12
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
- break
14
- end
15
- end
16
- end
17
- @polling_thread.abort_on_exception = true
18
- @polling_thread[:closing] = false
19
- end
20
-
21
- def native
22
- @native
23
- end
24
-
25
- def finalizer
26
- ->(_) { close }
27
- end
28
-
29
- def closed?
30
- @native.nil?
31
- end
32
-
33
- def close(object_id=nil)
34
- return unless @native
35
-
36
- # Indicate to polling thread that we're closing
37
- @polling_thread[:closing] = true
38
- # Wait for the polling thread to finish up
39
- @polling_thread.join
40
-
41
- Rdkafka::Bindings.rd_kafka_destroy(@native)
42
-
43
- @native = nil
44
- end
45
- end
46
- end
47
- end