karafka-rdkafka 0.12.3 → 0.13.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/CHANGELOG.md +22 -0
  5. data/Gemfile +2 -0
  6. data/README.md +26 -0
  7. data/Rakefile +2 -0
  8. data/ext/Rakefile +2 -0
  9. data/karafka-rdkafka.gemspec +2 -0
  10. data/lib/rdkafka/abstract_handle.rb +2 -0
  11. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  12. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  13. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  14. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  15. data/lib/rdkafka/admin.rb +95 -73
  16. data/lib/rdkafka/bindings.rb +52 -37
  17. data/lib/rdkafka/callbacks.rb +2 -0
  18. data/lib/rdkafka/config.rb +13 -10
  19. data/lib/rdkafka/consumer/headers.rb +24 -7
  20. data/lib/rdkafka/consumer/message.rb +3 -1
  21. data/lib/rdkafka/consumer/partition.rb +2 -0
  22. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  23. data/lib/rdkafka/consumer.rb +100 -44
  24. data/lib/rdkafka/error.rb +9 -0
  25. data/lib/rdkafka/metadata.rb +25 -2
  26. data/lib/rdkafka/native_kafka.rb +83 -0
  27. data/lib/rdkafka/producer/delivery_handle.rb +2 -0
  28. data/lib/rdkafka/producer/delivery_report.rb +3 -1
  29. data/lib/rdkafka/producer.rb +75 -12
  30. data/lib/rdkafka/version.rb +3 -1
  31. data/lib/rdkafka.rb +3 -1
  32. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  33. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  34. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  35. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin_spec.rb +4 -3
  38. data/spec/rdkafka/bindings_spec.rb +2 -0
  39. data/spec/rdkafka/callbacks_spec.rb +2 -0
  40. data/spec/rdkafka/config_spec.rb +17 -2
  41. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  42. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  43. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  44. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  45. data/spec/rdkafka/consumer_spec.rb +124 -22
  46. data/spec/rdkafka/error_spec.rb +2 -0
  47. data/spec/rdkafka/metadata_spec.rb +2 -0
  48. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  49. data/spec/rdkafka/producer/delivery_handle_spec.rb +2 -0
  50. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -2
  51. data/spec/rdkafka/producer_spec.rb +118 -17
  52. data/spec/spec_helper.rb +17 -1
  53. data.tar.gz.sig +0 -0
  54. metadata +10 -10
  55. metadata.gz.sig +0 -0
  56. data/bin/console +0 -11
  57. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,17 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
- describe Rdkafka::Producer::Client do
5
+ describe Rdkafka::NativeKafka do
4
6
  let(:config) { rdkafka_producer_config }
5
7
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
8
  let(:closing) { false }
7
9
  let(:thread) { double(Thread) }
8
10
 
9
- subject(:client) { described_class.new(native) }
11
+ subject(:client) { described_class.new(native, run_polling_thread: true) }
10
12
 
11
13
  before do
12
- allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
- allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
- allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
14
  allow(Thread).to receive(:new).and_return(thread)
16
15
 
17
16
  allow(thread).to receive(:[]=).with(:closing, anything)
@@ -19,6 +18,8 @@ describe Rdkafka::Producer::Client do
19
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
20
19
  end
21
20
 
21
+ after { client.close }
22
+
22
23
  context "defaults" do
23
24
  it "sets the thread to abort on exception" do
24
25
  expect(thread).to receive(:abort_on_exception=).with(true)
@@ -39,32 +40,12 @@ describe Rdkafka::Producer::Client do
39
40
 
40
41
  client
41
42
  end
42
-
43
- it "polls the native with default 250ms timeout" do
44
- polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
- end
47
- end
48
-
49
- it "check the out queue of native client" do
50
- polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
- end
53
- end
54
- end
55
-
56
- def polling_loop_expects(&block)
57
- Thread.current[:closing] = true # this forces the loop break with line #12
58
-
59
- allow(Thread).to receive(:new).and_yield do |_|
60
- block.call
61
- end.and_return(thread)
62
-
63
- client
64
43
  end
65
44
 
66
- it "exposes `native` client" do
67
- expect(client.native).to eq(native)
45
+ it "exposes the inner client" do
46
+ client.with_inner do |inner|
47
+ expect(inner).to eq(native)
48
+ end
68
49
  end
69
50
 
70
51
  context "when client was not yet closed (`nil`)" do
@@ -74,7 +55,7 @@ describe Rdkafka::Producer::Client do
74
55
 
75
56
  context "and attempt to close" do
76
57
  it "calls the `destroy` binding" do
77
- expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
58
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native).and_call_original
78
59
 
79
60
  client.close
80
61
  end
@@ -94,7 +75,6 @@ describe Rdkafka::Producer::Client do
94
75
  it "closes and unassign the native client" do
95
76
  client.close
96
77
 
97
- expect(client.native).to eq(nil)
98
78
  expect(client.closed?).to eq(true)
99
79
  end
100
80
  end
@@ -109,7 +89,7 @@ describe Rdkafka::Producer::Client do
109
89
 
110
90
  context "and attempt to close again" do
111
91
  it "does not call the `destroy` binding" do
112
- expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
92
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy_flags)
113
93
 
114
94
  client.close
115
95
  end
@@ -129,13 +109,12 @@ describe Rdkafka::Producer::Client do
129
109
  it "does not close and unassign the native client again" do
130
110
  client.close
131
111
 
132
- expect(client.native).to eq(nil)
133
112
  expect(client.closed?).to eq(true)
134
113
  end
135
114
  end
136
115
  end
137
116
 
138
- it "provide a finalizer Proc that closes the `native` client" do
117
+ it "provides a finalizer that closes the native kafka client" do
139
118
  expect(client.closed?).to eq(false)
140
119
 
141
120
  client.finalizer.call("some-ignored-object-id")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryHandle do
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
 
3
5
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", "error") }
6
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
5
7
 
6
8
  it "should get the partition" do
7
9
  expect(subject.partition).to eq 2
@@ -16,6 +18,6 @@ describe Rdkafka::Producer::DeliveryReport do
16
18
  end
17
19
 
18
20
  it "should get the error" do
19
- expect(subject.error).to eq "error"
21
+ expect(subject.error).to eq -1
20
22
  end
21
23
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "spec_helper"
2
4
  require "zlib"
3
5
 
@@ -7,11 +9,16 @@ describe Rdkafka::Producer do
7
9
 
8
10
  after do
9
11
  # Registry should always end up being empty
10
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
11
14
  producer.close
12
15
  consumer.close
13
16
  end
14
17
 
18
+ describe '#name' do
19
+ it { expect(producer.name).to include('rdkafka#producer-') }
20
+ end
21
+
15
22
  context "delivery callback" do
16
23
  context "with a proc/lambda" do
17
24
  it "should set the callback" do
@@ -182,10 +189,11 @@ describe Rdkafka::Producer do
182
189
  expect(report.partition).to eq 1
183
190
  expect(report.offset).to be >= 0
184
191
 
185
- # Close producer
192
+ # Flush and close producer
193
+ producer.flush
186
194
  producer.close
187
195
 
188
- # Consume message and verify it's content
196
+ # Consume message and verify its content
189
197
  message = wait_for_message(
190
198
  topic: "produce_test_topic",
191
199
  delivery_report: report,
@@ -209,7 +217,7 @@ describe Rdkafka::Producer do
209
217
  )
210
218
  report = handle.wait(max_wait_timeout: 5)
211
219
 
212
- # Consume message and verify it's content
220
+ # Consume message and verify its content
213
221
  message = wait_for_message(
214
222
  topic: "produce_test_topic",
215
223
  delivery_report: report,
@@ -253,6 +261,28 @@ describe Rdkafka::Producer do
253
261
  expect(messages[2].key).to eq key
254
262
  end
255
263
 
264
+ it "should produce a message with empty string without crashing" do
265
+ messages = [{key: 'a', partition_key: ''}]
266
+
267
+ messages = messages.map do |m|
268
+ handle = producer.produce(
269
+ topic: "partitioner_test_topic",
270
+ payload: "payload partition",
271
+ key: m[:key],
272
+ partition_key: m[:partition_key]
273
+ )
274
+ report = handle.wait(max_wait_timeout: 5)
275
+
276
+ wait_for_message(
277
+ topic: "partitioner_test_topic",
278
+ delivery_report: report,
279
+ )
280
+ end
281
+
282
+ expect(messages[0].partition).to eq 0
283
+ expect(messages[0].key).to eq 'a'
284
+ end
285
+
256
286
  it "should produce a message with utf-8 encoding" do
257
287
  handle = producer.produce(
258
288
  topic: "produce_test_topic",
@@ -261,7 +291,7 @@ describe Rdkafka::Producer do
261
291
  )
262
292
  report = handle.wait(max_wait_timeout: 5)
263
293
 
264
- # Consume message and verify it's content
294
+ # Consume message and verify its content
265
295
  message = wait_for_message(
266
296
  topic: "produce_test_topic",
267
297
  delivery_report: report,
@@ -294,7 +324,7 @@ describe Rdkafka::Producer do
294
324
  )
295
325
  report = handle.wait(max_wait_timeout: 5)
296
326
 
297
- # Consume message and verify it's content
327
+ # Consume message and verify its content
298
328
  message = wait_for_message(
299
329
  topic: "produce_test_topic",
300
330
  delivery_report: report,
@@ -315,7 +345,7 @@ describe Rdkafka::Producer do
315
345
  )
316
346
  report = handle.wait(max_wait_timeout: 5)
317
347
 
318
- # Consume message and verify it's content
348
+ # Consume message and verify its content
319
349
  message = wait_for_message(
320
350
  topic: "produce_test_topic",
321
351
  delivery_report: report,
@@ -335,7 +365,7 @@ describe Rdkafka::Producer do
335
365
  )
336
366
  report = handle.wait(max_wait_timeout: 5)
337
367
 
338
- # Consume message and verify it's content
368
+ # Consume message and verify its content
339
369
  message = wait_for_message(
340
370
  topic: "produce_test_topic",
341
371
  delivery_report: report,
@@ -353,7 +383,7 @@ describe Rdkafka::Producer do
353
383
  )
354
384
  report = handle.wait(max_wait_timeout: 5)
355
385
 
356
- # Consume message and verify it's content
386
+ # Consume message and verify its content
357
387
  message = wait_for_message(
358
388
  topic: "produce_test_topic",
359
389
  delivery_report: report,
@@ -373,7 +403,7 @@ describe Rdkafka::Producer do
373
403
  )
374
404
  report = handle.wait(max_wait_timeout: 5)
375
405
 
376
- # Consume message and verify it's content
406
+ # Consume message and verify its content
377
407
  message = wait_for_message(
378
408
  topic: "produce_test_topic",
379
409
  delivery_report: report,
@@ -382,9 +412,9 @@ describe Rdkafka::Producer do
382
412
 
383
413
  expect(message.payload).to eq "payload headers"
384
414
  expect(message.key).to eq "key headers"
385
- expect(message.headers[:foo]).to eq "bar"
386
- expect(message.headers[:baz]).to eq "foobar"
387
- expect(message.headers[:foobar]).to be_nil
415
+ expect(message.headers["foo"]).to eq "bar"
416
+ expect(message.headers["baz"]).to eq "foobar"
417
+ expect(message.headers["foobar"]).to be_nil
388
418
  end
389
419
 
390
420
  it "should produce a message with empty headers" do
@@ -396,7 +426,7 @@ describe Rdkafka::Producer do
396
426
  )
397
427
  report = handle.wait(max_wait_timeout: 5)
398
428
 
399
- # Consume message and verify it's content
429
+ # Consume message and verify its content
400
430
  message = wait_for_message(
401
431
  topic: "produce_test_topic",
402
432
  delivery_report: report,
@@ -434,10 +464,10 @@ describe Rdkafka::Producer do
434
464
  # wait for and check the message in the main process.
435
465
  reader, writer = IO.pipe
436
466
 
437
- fork do
467
+ pid = fork do
438
468
  reader.close
439
469
 
440
- # Avoids sharing the socket between processes.
470
+ # Avoid sharing the client between processes.
441
471
  producer = rdkafka_producer_config.producer
442
472
 
443
473
  handle = producer.produce(
@@ -456,8 +486,10 @@ describe Rdkafka::Producer do
456
486
 
457
487
  writer.write(report_json)
458
488
  writer.close
489
+ producer.flush
459
490
  producer.close
460
491
  end
492
+ Process.wait(pid)
461
493
 
462
494
  writer.close
463
495
  report_hash = JSON.parse(reader.read)
@@ -469,7 +501,7 @@ describe Rdkafka::Producer do
469
501
 
470
502
  reader.close
471
503
 
472
- # Consume message and verify it's content
504
+ # Consume message and verify its content
473
505
  message = wait_for_message(
474
506
  topic: "produce_test_topic",
475
507
  delivery_report: report,
@@ -526,4 +558,73 @@ describe Rdkafka::Producer do
526
558
  end
527
559
  end
528
560
  end
561
+
562
+ describe '#partition_count' do
563
+ it { expect(producer.partition_count('example_topic')).to eq(1) }
564
+
565
+ context 'when the partition count value is already cached' do
566
+ before do
567
+ producer.partition_count('example_topic')
568
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
569
+ end
570
+
571
+ it 'expect not to query it again' do
572
+ producer.partition_count('example_topic')
573
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
574
+ end
575
+ end
576
+
577
+ context 'when the partition count value was cached but time expired' do
578
+ before do
579
+ allow(::Process).to receive(:clock_gettime).and_return(0, 30.02)
580
+ producer.partition_count('example_topic')
581
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
582
+ end
583
+
584
+ it 'expect not to query it again' do
585
+ producer.partition_count('example_topic')
586
+ expect(::Rdkafka::Metadata).to have_received(:new)
587
+ end
588
+ end
589
+
590
+ context 'when the partition count value was cached and time did not expire' do
591
+ before do
592
+ allow(::Process).to receive(:clock_gettime).and_return(0, 29.001)
593
+ producer.partition_count('example_topic')
594
+ allow(::Rdkafka::Metadata).to receive(:new).and_call_original
595
+ end
596
+
597
+ it 'expect not to query it again' do
598
+ producer.partition_count('example_topic')
599
+ expect(::Rdkafka::Metadata).not_to have_received(:new)
600
+ end
601
+ end
602
+ end
603
+
604
+ describe 'metadata fetch request recovery' do
605
+ subject(:partition_count) { producer.partition_count('example_topic') }
606
+
607
+ describe 'metadata initialization recovery' do
608
+ context 'when all good' do
609
+ it { expect(partition_count).to eq(1) }
610
+ end
611
+
612
+ context 'when we fail for the first time with handled error' do
613
+ before do
614
+ raised = false
615
+
616
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_wrap_original do |m, *args|
617
+ if raised
618
+ m.call(*args)
619
+ else
620
+ raised = true
621
+ -185
622
+ end
623
+ end
624
+ end
625
+
626
+ it { expect(partition_count).to eq(1) }
627
+ end
628
+ end
629
+ end
529
630
  end
data/spec/spec_helper.rb CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  unless ENV["CI"] == "true"
2
4
  require "simplecov"
3
5
  SimpleCov.start do
@@ -71,7 +73,7 @@ def new_native_topic(topic_name="topic_name", native_client: )
71
73
  end
72
74
 
73
75
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
74
- new_consumer = !!consumer
76
+ new_consumer = consumer.nil?
75
77
  consumer ||= rdkafka_consumer_config.consumer
76
78
  consumer.subscribe(topic)
77
79
  timeout = Time.now.to_i + timeout_in_seconds
@@ -104,6 +106,20 @@ def wait_for_unassignment(consumer)
104
106
  end
105
107
  end
106
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
107
123
  RSpec.configure do |config|
108
124
  config.filter_run focus: true
109
125
  config.run_all_when_everything_filtered = true
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.3
4
+ version: 0.13.0.beta1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2023-05-26 00:00:00.000000000 Z
38
+ date: 2023-06-15 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -166,8 +166,7 @@ dependencies:
166
166
  description: Modern Kafka client library for Ruby based on librdkafka
167
167
  email:
168
168
  - thijs@appsignal.com
169
- executables:
170
- - console
169
+ executables: []
171
170
  extensions:
172
171
  - ext/Rakefile
173
172
  extra_rdoc_files: []
@@ -182,7 +181,6 @@ files:
182
181
  - LICENSE
183
182
  - README.md
184
183
  - Rakefile
185
- - bin/console
186
184
  - certs/cert_chain.pem
187
185
  - certs/karafka-pro.pem
188
186
  - docker-compose.yml
@@ -208,8 +206,8 @@ files:
208
206
  - lib/rdkafka/consumer/topic_partition_list.rb
209
207
  - lib/rdkafka/error.rb
210
208
  - lib/rdkafka/metadata.rb
209
+ - lib/rdkafka/native_kafka.rb
211
210
  - lib/rdkafka/producer.rb
212
- - lib/rdkafka/producer/client.rb
213
211
  - lib/rdkafka/producer/delivery_handle.rb
214
212
  - lib/rdkafka/producer/delivery_report.rb
215
213
  - lib/rdkafka/version.rb
@@ -222,13 +220,14 @@ files:
222
220
  - spec/rdkafka/bindings_spec.rb
223
221
  - spec/rdkafka/callbacks_spec.rb
224
222
  - spec/rdkafka/config_spec.rb
223
+ - spec/rdkafka/consumer/headers_spec.rb
225
224
  - spec/rdkafka/consumer/message_spec.rb
226
225
  - spec/rdkafka/consumer/partition_spec.rb
227
226
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
228
227
  - spec/rdkafka/consumer_spec.rb
229
228
  - spec/rdkafka/error_spec.rb
230
229
  - spec/rdkafka/metadata_spec.rb
231
- - spec/rdkafka/producer/client_spec.rb
230
+ - spec/rdkafka/native_kafka_spec.rb
232
231
  - spec/rdkafka/producer/delivery_handle_spec.rb
233
232
  - spec/rdkafka/producer/delivery_report_spec.rb
234
233
  - spec/rdkafka/producer_spec.rb
@@ -255,9 +254,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
255
254
  version: '2.6'
256
255
  required_rubygems_version: !ruby/object:Gem::Requirement
257
256
  requirements:
258
- - - ">="
257
+ - - ">"
259
258
  - !ruby/object:Gem::Version
260
- version: '0'
259
+ version: 1.3.1
261
260
  requirements: []
262
261
  rubygems_version: 3.1.2
263
262
  signing_key:
@@ -275,13 +274,14 @@ test_files:
275
274
  - spec/rdkafka/bindings_spec.rb
276
275
  - spec/rdkafka/callbacks_spec.rb
277
276
  - spec/rdkafka/config_spec.rb
277
+ - spec/rdkafka/consumer/headers_spec.rb
278
278
  - spec/rdkafka/consumer/message_spec.rb
279
279
  - spec/rdkafka/consumer/partition_spec.rb
280
280
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
281
281
  - spec/rdkafka/consumer_spec.rb
282
282
  - spec/rdkafka/error_spec.rb
283
283
  - spec/rdkafka/metadata_spec.rb
284
- - spec/rdkafka/producer/client_spec.rb
284
+ - spec/rdkafka/native_kafka_spec.rb
285
285
  - spec/rdkafka/producer/delivery_handle_spec.rb
286
286
  - spec/rdkafka/producer/delivery_report_spec.rb
287
287
  - spec/rdkafka/producer_spec.rb
metadata.gz.sig CHANGED
Binary file
data/bin/console DELETED
@@ -1,11 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # frozen_string_literal: true
4
-
5
- ENV["IRBRC"] = File.join(File.dirname(__FILE__), ".irbrc")
6
-
7
- require "bundler/setup"
8
- require "rdkafka"
9
-
10
- require "irb"
11
- IRB.start(__FILE__)
@@ -1,47 +0,0 @@
1
- module Rdkafka
2
- class Producer
3
- class Client
4
- def initialize(native)
5
- @native = native
6
-
7
- # Start thread to poll client for delivery callbacks
8
- @polling_thread = Thread.new do
9
- loop do
10
- Rdkafka::Bindings.rd_kafka_poll(native, 250)
11
- # Exit thread if closing and the poll queue is empty
12
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(native) == 0
13
- break
14
- end
15
- end
16
- end
17
- @polling_thread.abort_on_exception = true
18
- @polling_thread[:closing] = false
19
- end
20
-
21
- def native
22
- @native
23
- end
24
-
25
- def finalizer
26
- ->(_) { close }
27
- end
28
-
29
- def closed?
30
- @native.nil?
31
- end
32
-
33
- def close(object_id=nil)
34
- return unless @native
35
-
36
- # Indicate to polling thread that we're closing
37
- @polling_thread[:closing] = true
38
- # Wait for the polling thread to finish up
39
- @polling_thread.join
40
-
41
- Rdkafka::Bindings.rd_kafka_destroy(@native)
42
-
43
- @native = nil
44
- end
45
- end
46
- end
47
- end