rdkafka 0.13.0 → 0.14.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +1 -0
  3. data/.github/workflows/ci.yml +58 -0
  4. data/.gitignore +4 -0
  5. data/.rspec +1 -0
  6. data/.ruby-gemset +1 -0
  7. data/.ruby-version +1 -0
  8. data/CHANGELOG.md +39 -21
  9. data/{LICENSE → MIT-LICENSE} +2 -1
  10. data/README.md +19 -20
  11. data/certs/cert_chain.pem +26 -0
  12. data/docker-compose.yml +16 -15
  13. data/ext/README.md +1 -1
  14. data/ext/Rakefile +1 -1
  15. data/lib/rdkafka/abstract_handle.rb +37 -24
  16. data/lib/rdkafka/admin.rb +6 -7
  17. data/lib/rdkafka/bindings.rb +8 -5
  18. data/lib/rdkafka/config.rb +30 -17
  19. data/lib/rdkafka/consumer/headers.rb +2 -4
  20. data/lib/rdkafka/consumer/topic_partition_list.rb +3 -1
  21. data/lib/rdkafka/consumer.rb +92 -53
  22. data/lib/rdkafka/helpers/time.rb +14 -0
  23. data/lib/rdkafka/metadata.rb +22 -1
  24. data/lib/rdkafka/native_kafka.rb +6 -1
  25. data/lib/rdkafka/producer.rb +85 -7
  26. data/lib/rdkafka/version.rb +3 -3
  27. data/lib/rdkafka.rb +10 -1
  28. data/rdkafka.gemspec +17 -3
  29. data/renovate.json +6 -0
  30. data/spec/rdkafka/abstract_handle_spec.rb +0 -2
  31. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
  32. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
  33. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
  34. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
  35. data/spec/rdkafka/admin_spec.rb +1 -2
  36. data/spec/rdkafka/bindings_spec.rb +0 -1
  37. data/spec/rdkafka/callbacks_spec.rb +0 -2
  38. data/spec/rdkafka/config_spec.rb +0 -2
  39. data/spec/rdkafka/consumer/headers_spec.rb +0 -2
  40. data/spec/rdkafka/consumer/message_spec.rb +0 -2
  41. data/spec/rdkafka/consumer/partition_spec.rb +0 -2
  42. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +19 -2
  43. data/spec/rdkafka/consumer_spec.rb +143 -39
  44. data/spec/rdkafka/error_spec.rb +0 -2
  45. data/spec/rdkafka/metadata_spec.rb +2 -3
  46. data/spec/rdkafka/native_kafka_spec.rb +2 -3
  47. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
  48. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
  49. data/spec/rdkafka/producer_spec.rb +157 -1
  50. data.tar.gz.sig +0 -0
  51. metadata +54 -15
  52. metadata.gz.sig +0 -0
  53. data/.semaphore/semaphore.yml +0 -27
data/lib/rdkafka.rb CHANGED
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "rdkafka/version"
3
+ require "logger"
4
+ require "objspace"
5
+ require "ffi"
6
+ require "json"
4
7
 
8
+ require "rdkafka/version"
9
+ require "rdkafka/helpers/time"
5
10
  require "rdkafka/abstract_handle"
6
11
  require "rdkafka/admin"
7
12
  require "rdkafka/admin/create_topic_handle"
@@ -22,3 +27,7 @@ require "rdkafka/native_kafka"
22
27
  require "rdkafka/producer"
23
28
  require "rdkafka/producer/delivery_handle"
24
29
  require "rdkafka/producer/delivery_report"
30
+
31
+ # Main Rdkafka namespace of this gem
32
+ module Rdkafka
33
+ end
data/rdkafka.gemspec CHANGED
@@ -4,11 +4,10 @@ require File.expand_path('lib/rdkafka/version', __dir__)
4
4
 
5
5
  Gem::Specification.new do |gem|
6
6
  gem.authors = ['Thijs Cadier']
7
- gem.email = ["thijs@appsignal.com"]
7
+ gem.email = ["contact@karafka.io"]
8
8
  gem.description = "Modern Kafka client library for Ruby based on librdkafka"
9
9
  gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.4+."
10
10
  gem.license = 'MIT'
11
- gem.homepage = 'https://github.com/thijsc/rdkafka-ruby'
12
11
 
13
12
  gem.files = `git ls-files`.split($\)
14
13
  gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
@@ -16,8 +15,13 @@ Gem::Specification.new do |gem|
16
15
  gem.name = 'rdkafka'
17
16
  gem.require_paths = ['lib']
18
17
  gem.version = Rdkafka::VERSION
19
- gem.required_ruby_version = '>= 2.6'
18
+ gem.required_ruby_version = '>= 2.7'
20
19
  gem.extensions = %w(ext/Rakefile)
20
+ gem.cert_chain = %w[certs/cert_chain.pem]
21
+
22
+ if $PROGRAM_NAME.end_with?('gem')
23
+ gem.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
24
+ end
21
25
 
22
26
  gem.add_dependency 'ffi', '~> 1.15'
23
27
  gem.add_dependency 'mini_portile2', '~> 2.6'
@@ -29,4 +33,14 @@ Gem::Specification.new do |gem|
29
33
  gem.add_development_dependency 'simplecov'
30
34
  gem.add_development_dependency 'guard'
31
35
  gem.add_development_dependency 'guard-rspec'
36
+
37
+ gem.metadata = {
38
+ 'funding_uri' => 'https://karafka.io/#become-pro',
39
+ 'homepage_uri' => 'https://karafka.io',
40
+ 'changelog_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/CHANGELOG.md',
41
+ 'bug_tracker_uri' => 'https://github.com/karafka/rdkafka-ruby/issues',
42
+ 'source_code_uri' => 'https://github.com/karafka/rdkafka-ruby',
43
+ 'documentation_uri' => 'https://github.com/karafka/rdkafka-ruby/blob/main/README.md',
44
+ 'rubygems_mfa_required' => 'true'
45
+ }
32
46
  end
data/renovate.json ADDED
@@ -0,0 +1,6 @@
1
+ {
2
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
+ "extends": [
4
+ "config:base"
5
+ ]
6
+ }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::AbstractHandle do
6
4
  let(:response) { 0 }
7
5
  let(:result) { -1 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::CreateTopicReport do
6
4
  subject { Rdkafka::Admin::CreateTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Admin::DeleteTopicReport do
6
4
  subject { Rdkafka::Admin::DeleteTopicReport.new(
7
5
  FFI::MemoryPointer.from_string("error string"),
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
 
6
5
  describe Rdkafka::Admin do
@@ -33,7 +32,7 @@ describe Rdkafka::Admin do
33
32
  }.to raise_exception { |ex|
34
33
  expect(ex).to be_a(Rdkafka::RdkafkaError)
35
34
  expect(ex.message).to match(/Broker: Invalid topic \(topic_exception\)/)
36
- expect(ex.broker_message).to match(/Topic name.*is illegal, it contains a character other than ASCII alphanumerics/)
35
+ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or more characters other than ASCII alphanumerics, '.', '_' and '-'/)
37
36
  }
38
37
  end
39
38
  end
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require 'zlib'
5
4
 
6
5
  describe Rdkafka::Bindings do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Callbacks do
6
4
 
7
5
  # The code in the call back functions is 100% covered by other specs. Due to
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Config do
6
4
  context "logger" do
7
5
  it "should have a default logger" do
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Headers do
6
4
  let(:headers) do
7
5
  { # Note String keys!
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Message do
6
4
  let(:native_client) { new_native_client }
7
5
  let(:native_topic) { new_native_topic(native_client: native_client) }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::Partition do
6
4
  let(:offset) { 100 }
7
5
  let(:err) { 0 }
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Consumer::TopicPartitionList do
6
4
  it "should create a new list and add unassigned topics" do
7
5
  list = Rdkafka::Consumer::TopicPartitionList.new
@@ -221,5 +219,24 @@ describe Rdkafka::Consumer::TopicPartitionList do
221
219
 
222
220
  expect(list).to eq other
223
221
  end
222
+
223
+ it "should create a native list with timetamp offsets if offsets are Time" do
224
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
225
+ list.add_topic_and_partitions_with_offsets("topic", 0 => Time.at(1505069646, 250_000))
226
+ end
227
+
228
+ tpl = list.to_native_tpl
229
+
230
+ compare_list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
231
+ list.add_topic_and_partitions_with_offsets(
232
+ "topic",
233
+ 0 => (Time.at(1505069646, 250_000).to_f * 1000).floor
234
+ )
235
+ end
236
+
237
+ native_list = Rdkafka::Consumer::TopicPartitionList.from_native_tpl(tpl)
238
+
239
+ expect(native_list).to eq compare_list
240
+ end
224
241
  end
225
242
  end
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "ostruct"
5
4
  require 'securerandom'
6
5
 
@@ -11,6 +10,10 @@ describe Rdkafka::Consumer do
11
10
  after { consumer.close }
12
11
  after { producer.close }
13
12
 
13
+ describe '#name' do
14
+ it { expect(consumer.name).to include('rdkafka#consumer-') }
15
+ end
16
+
14
17
  describe "#subscribe, #unsubscribe and #subscription" do
15
18
  it "should subscribe, unsubscribe and return the subscription" do
16
19
  expect(consumer.subscription).to be_empty
@@ -311,8 +314,9 @@ describe Rdkafka::Consumer do
311
314
  end
312
315
  end
313
316
 
314
- describe "#commit, #committed and #store_offset" do
315
- # Make sure there's a stored offset
317
+
318
+ describe "#position, #commit, #committed and #store_offset" do
319
+ # Make sure there are messages to work with
316
320
  let!(:report) do
317
321
  producer.produce(
318
322
  topic: "consume_test_topic",
@@ -330,29 +334,33 @@ describe Rdkafka::Consumer do
330
334
  )
331
335
  end
332
336
 
333
- it "should only accept a topic partition list in committed" do
334
- expect {
335
- consumer.committed("list")
336
- }.to raise_error TypeError
337
+ describe "#position" do
338
+ it "should only accept a topic partition list in position if not nil" do
339
+ expect {
340
+ consumer.position("list")
341
+ }.to raise_error TypeError
342
+ end
337
343
  end
338
344
 
339
- it "should commit in sync mode" do
340
- expect {
341
- consumer.commit(nil, true)
342
- }.not_to raise_error
343
- end
345
+ describe "#committed" do
346
+ it "should only accept a topic partition list in commit if not nil" do
347
+ expect {
348
+ consumer.commit("list")
349
+ }.to raise_error TypeError
350
+ end
344
351
 
345
- it "should only accept a topic partition list in commit if not nil" do
346
- expect {
347
- consumer.commit("list")
348
- }.to raise_error TypeError
352
+ it "should commit in sync mode" do
353
+ expect {
354
+ consumer.commit(nil, true)
355
+ }.not_to raise_error
356
+ end
349
357
  end
350
358
 
351
359
  context "with a committed consumer" do
352
360
  before :all do
353
361
  # Make sure there are some messages.
354
362
  handles = []
355
- producer = rdkafka_producer_config.producer
363
+ producer = rdkafka_config.producer
356
364
  10.times do
357
365
  (0..2).each do |i|
358
366
  handles << producer.produce(
@@ -396,31 +404,33 @@ describe Rdkafka::Consumer do
396
404
  }.to raise_error(Rdkafka::RdkafkaError)
397
405
  end
398
406
 
399
- it "should fetch the committed offsets for the current assignment" do
400
- partitions = consumer.committed.to_h["consume_test_topic"]
401
- expect(partitions).not_to be_nil
402
- expect(partitions[0].offset).to eq 1
403
- end
407
+ describe "#committed" do
408
+ it "should fetch the committed offsets for the current assignment" do
409
+ partitions = consumer.committed.to_h["consume_test_topic"]
410
+ expect(partitions).not_to be_nil
411
+ expect(partitions[0].offset).to eq 1
412
+ end
404
413
 
405
- it "should fetch the committed offsets for a specified topic partition list" do
406
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
407
- list.add_topic("consume_test_topic", [0, 1, 2])
414
+ it "should fetch the committed offsets for a specified topic partition list" do
415
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
416
+ list.add_topic("consume_test_topic", [0, 1, 2])
417
+ end
418
+ partitions = consumer.committed(list).to_h["consume_test_topic"]
419
+ expect(partitions).not_to be_nil
420
+ expect(partitions[0].offset).to eq 1
421
+ expect(partitions[1].offset).to eq 1
422
+ expect(partitions[2].offset).to eq 1
408
423
  end
409
- partitions = consumer.committed(list).to_h["consume_test_topic"]
410
- expect(partitions).not_to be_nil
411
- expect(partitions[0].offset).to eq 1
412
- expect(partitions[1].offset).to eq 1
413
- expect(partitions[2].offset).to eq 1
414
- end
415
424
 
416
- it "should raise an error when getting committed fails" do
417
- expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
418
- list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
419
- list.add_topic("consume_test_topic", [0, 1, 2])
425
+ it "should raise an error when getting committed fails" do
426
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_committed).and_return(20)
427
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
428
+ list.add_topic("consume_test_topic", [0, 1, 2])
429
+ end
430
+ expect {
431
+ consumer.committed(list)
432
+ }.to raise_error Rdkafka::RdkafkaError
420
433
  end
421
- expect {
422
- consumer.committed(list)
423
- }.to raise_error Rdkafka::RdkafkaError
424
434
  end
425
435
 
426
436
  describe "#store_offset" do
@@ -441,6 +451,8 @@ describe Rdkafka::Consumer do
441
451
  @new_consumer.store_offset(message)
442
452
  @new_consumer.commit
443
453
 
454
+ # TODO use position here, should be at offset
455
+
444
456
  list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
445
457
  list.add_topic("consume_test_topic", [0, 1, 2])
446
458
  end
@@ -455,6 +467,35 @@ describe Rdkafka::Consumer do
455
467
  @new_consumer.store_offset(message)
456
468
  }.to raise_error Rdkafka::RdkafkaError
457
469
  end
470
+
471
+ describe "#position" do
472
+ it "should fetch the positions for the current assignment" do
473
+ consumer.store_offset(message)
474
+
475
+ partitions = consumer.position.to_h["consume_test_topic"]
476
+ expect(partitions).not_to be_nil
477
+ expect(partitions[0].offset).to eq message.offset + 1
478
+ end
479
+
480
+ it "should fetch the positions for a specified assignment" do
481
+ consumer.store_offset(message)
482
+
483
+ list = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
484
+ list.add_topic_and_partitions_with_offsets("consume_test_topic", 0 => nil, 1 => nil, 2 => nil)
485
+ end
486
+ partitions = consumer.position(list).to_h["consume_test_topic"]
487
+ expect(partitions).not_to be_nil
488
+ expect(partitions[0].offset).to eq message.offset + 1
489
+ end
490
+
491
+ it "should raise an error when getting the position fails" do
492
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_position).and_return(20)
493
+
494
+ expect {
495
+ consumer.position
496
+ }.to raise_error(Rdkafka::RdkafkaError)
497
+ end
498
+ end
458
499
  end
459
500
  end
460
501
  end
@@ -950,6 +991,69 @@ describe Rdkafka::Consumer do
950
991
  end
951
992
  end
952
993
 
994
+ describe "#offsets_for_times" do
995
+ it "should raise when not TopicPartitionList" do
996
+ expect { consumer.offsets_for_times([]) }.to raise_error(TypeError)
997
+ end
998
+
999
+ it "should raise an error when offsets_for_times fails" do
1000
+ tpl = Rdkafka::Consumer::TopicPartitionList.new
1001
+
1002
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_offsets_for_times).and_return(7)
1003
+
1004
+ expect { consumer.offsets_for_times(tpl) }.to raise_error(Rdkafka::RdkafkaError)
1005
+ end
1006
+
1007
+ context "when subscribed" do
1008
+ let(:timeout) { 1000 }
1009
+
1010
+ before do
1011
+ consumer.subscribe("consume_test_topic")
1012
+
1013
+ # 1. partitions are assigned
1014
+ wait_for_assignment(consumer)
1015
+ expect(consumer.assignment).not_to be_empty
1016
+
1017
+ # 2. eat unrelated messages
1018
+ while(consumer.poll(timeout)) do; end
1019
+ end
1020
+
1021
+ after { consumer.unsubscribe }
1022
+
1023
+ def send_one_message(val)
1024
+ producer.produce(
1025
+ topic: "consume_test_topic",
1026
+ payload: "payload #{val}",
1027
+ key: "key 0",
1028
+ partition: 0
1029
+ ).wait
1030
+ end
1031
+
1032
+ it "returns a TopicParticionList with updated offsets" do
1033
+ send_one_message("a")
1034
+ send_one_message("b")
1035
+ send_one_message("c")
1036
+
1037
+ consumer.poll(timeout)
1038
+ message = consumer.poll(timeout)
1039
+ consumer.poll(timeout)
1040
+
1041
+ tpl = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
1042
+ list.add_topic_and_partitions_with_offsets(
1043
+ "consume_test_topic",
1044
+ [
1045
+ [0, message.timestamp]
1046
+ ]
1047
+ )
1048
+ end
1049
+
1050
+ tpl_response = consumer.offsets_for_times(tpl)
1051
+
1052
+ expect(tpl_response.to_h["consume_test_topic"][0].offset).to eq message.offset
1053
+ end
1054
+ end
1055
+ end
1056
+
953
1057
  describe "a rebalance listener" do
954
1058
  let(:consumer) do
955
1059
  config = rdkafka_consumer_config
@@ -1024,7 +1128,7 @@ describe Rdkafka::Consumer do
1024
1128
  :assign => [ nil ],
1025
1129
  :assignment => nil,
1026
1130
  :committed => [],
1027
- :query_watermark_offsets => [ nil, nil ],
1131
+ :query_watermark_offsets => [ nil, nil ]
1028
1132
  }.each do |method, args|
1029
1133
  it "raises an exception if #{method} is called" do
1030
1134
  expect {
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::RdkafkaError do
6
4
  it "should raise a type error for a nil response" do
7
5
  expect {
@@ -1,6 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
3
  require "securerandom"
5
4
 
6
5
  describe Rdkafka::Metadata do
@@ -31,7 +30,7 @@ describe Rdkafka::Metadata do
31
30
  it "#brokers returns our single broker" do
32
31
  expect(subject.brokers.length).to eq(1)
33
32
  expect(subject.brokers[0][:broker_id]).to eq(1)
34
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
35
34
  expect(subject.brokers[0][:broker_port]).to eq(9092)
36
35
  end
37
36
 
@@ -54,7 +53,7 @@ describe Rdkafka::Metadata do
54
53
  it "#brokers returns our single broker" do
55
54
  expect(subject.brokers.length).to eq(1)
56
55
  expect(subject.brokers[0][:broker_id]).to eq(1)
57
- expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_name]).to eq("127.0.0.1")
58
57
  expect(subject.brokers[0][:broker_port]).to eq(9092)
59
58
  end
60
59
 
@@ -1,14 +1,13 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::NativeKafka do
6
4
  let(:config) { rdkafka_producer_config }
7
5
  let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
8
6
  let(:closing) { false }
9
7
  let(:thread) { double(Thread) }
8
+ let(:opaque) { Rdkafka::Opaque.new }
10
9
 
11
- subject(:client) { described_class.new(native, run_polling_thread: true) }
10
+ subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
12
11
 
13
12
  before do
14
13
  allow(Thread).to receive(:new).and_return(thread)
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryHandle do
6
4
  let(:response) { 0 }
7
5
 
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "spec_helper"
4
-
5
3
  describe Rdkafka::Producer::DeliveryReport do
6
4
  subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", -1) }
7
5