rdkafka 0.8.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/.rspec +1 -0
  3. data/.semaphore/semaphore.yml +23 -0
  4. data/CHANGELOG.md +24 -1
  5. data/Guardfile +19 -0
  6. data/README.md +8 -3
  7. data/bin/console +11 -0
  8. data/docker-compose.yml +5 -3
  9. data/ext/README.md +8 -1
  10. data/ext/Rakefile +5 -20
  11. data/lib/rdkafka/abstract_handle.rb +82 -0
  12. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  14. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  15. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  16. data/lib/rdkafka/admin.rb +155 -0
  17. data/lib/rdkafka/bindings.rb +57 -18
  18. data/lib/rdkafka/callbacks.rb +106 -0
  19. data/lib/rdkafka/config.rb +59 -3
  20. data/lib/rdkafka/consumer.rb +125 -5
  21. data/lib/rdkafka/error.rb +29 -3
  22. data/lib/rdkafka/metadata.rb +6 -5
  23. data/lib/rdkafka/producer/delivery_handle.rb +7 -53
  24. data/lib/rdkafka/producer/delivery_report.rb +1 -1
  25. data/lib/rdkafka/producer.rb +27 -12
  26. data/lib/rdkafka/version.rb +3 -3
  27. data/lib/rdkafka.rb +7 -0
  28. data/rdkafka.gemspec +9 -7
  29. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  30. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  31. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  32. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  33. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  34. data/spec/rdkafka/admin_spec.rb +203 -0
  35. data/spec/rdkafka/bindings_spec.rb +32 -8
  36. data/spec/rdkafka/callbacks_spec.rb +20 -0
  37. data/spec/rdkafka/config_spec.rb +78 -9
  38. data/spec/rdkafka/consumer_spec.rb +326 -42
  39. data/spec/rdkafka/error_spec.rb +4 -0
  40. data/spec/rdkafka/metadata_spec.rb +78 -0
  41. data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -41
  42. data/spec/rdkafka/producer_spec.rb +102 -34
  43. data/spec/spec_helper.rb +78 -20
  44. metadata +84 -29
  45. data/.travis.yml +0 -48
@@ -0,0 +1,78 @@
1
+ require "spec_helper"
2
+ require "securerandom"
3
+
4
+ describe Rdkafka::Metadata do
5
+ let(:config) { rdkafka_consumer_config }
6
+ let(:native_config) { config.send(:native_config) }
7
+ let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
+
9
+ after do
10
+ Rdkafka::Bindings.rd_kafka_consumer_close(native_kafka)
11
+ Rdkafka::Bindings.rd_kafka_destroy(native_kafka)
12
+ end
13
+
14
+ context "passing in a topic name" do
15
+ context "that is non-existent topic" do
16
+ let(:topic_name) { SecureRandom.uuid.to_s }
17
+
18
+ it "raises an appropriate exception" do
19
+ expect {
20
+ described_class.new(native_kafka, topic_name)
21
+ }.to raise_exception(Rdkafka::RdkafkaError, "Broker: Unknown topic or partition (unknown_topic_or_part)")
22
+ end
23
+ end
24
+
25
+ context "that is one of our test topics" do
26
+ subject { described_class.new(native_kafka, topic_name) }
27
+ let(:topic_name) { "partitioner_test_topic" }
28
+
29
+ it "#brokers returns our single broker" do
30
+ expect(subject.brokers.length).to eq(1)
31
+ expect(subject.brokers[0][:broker_id]).to eq(1)
32
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ end
35
+
36
+ it "#topics returns data on our test topic" do
37
+ expect(subject.topics.length).to eq(1)
38
+ expect(subject.topics[0][:partition_count]).to eq(25)
39
+ expect(subject.topics[0][:partitions].length).to eq(25)
40
+ expect(subject.topics[0][:topic_name]).to eq(topic_name)
41
+ end
42
+ end
43
+ end
44
+
45
+ context "not passing in a topic name" do
46
+ subject { described_class.new(native_kafka, topic_name) }
47
+ let(:topic_name) { nil }
48
+ let(:test_topics) {
49
+ %w(consume_test_topic empty_test_topic load_test_topic produce_test_topic rake_test_topic watermarks_test_topic partitioner_test_topic)
50
+ } # Test topics crated in spec_helper.rb
51
+
52
+ it "#brokers returns our single broker" do
53
+ expect(subject.brokers.length).to eq(1)
54
+ expect(subject.brokers[0][:broker_id]).to eq(1)
55
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ end
58
+
59
+ it "#topics returns data about all of our test topics" do
60
+ result = subject.topics.map { |topic| topic[:topic_name] }
61
+ expect(result).to include(*test_topics)
62
+ end
63
+ end
64
+
65
+ context "when a non-zero error code is returned" do
66
+ let(:topic_name) { SecureRandom.uuid.to_s }
67
+
68
+ before do
69
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_return(-165)
70
+ end
71
+
72
+ it "creating the instance raises an exception" do
73
+ expect {
74
+ described_class.new(native_kafka, topic_name)
75
+ }.to raise_error(Rdkafka::RdkafkaError, /Local: Required feature not supported by broker \(unsupported_feature\)/)
76
+ end
77
+ end
78
+ end
@@ -12,42 +12,13 @@ describe Rdkafka::Producer::DeliveryHandle do
12
12
  end
13
13
  end
14
14
 
15
- describe ".register and .remove" do
16
- let(:pending_handle) { true }
17
-
18
- it "should register and remove a delivery handle" do
19
- Rdkafka::Producer::DeliveryHandle.register(subject.to_ptr.address, subject)
20
- removed = Rdkafka::Producer::DeliveryHandle.remove(subject.to_ptr.address)
21
- expect(removed).to eq subject
22
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
23
- end
24
- end
25
-
26
- describe "#pending?" do
27
- context "when true" do
28
- let(:pending_handle) { true }
29
-
30
- it "should be true" do
31
- expect(subject.pending?).to be true
32
- end
33
- end
34
-
35
- context "when not true" do
36
- let(:pending_handle) { false }
37
-
38
- it "should be false" do
39
- expect(subject.pending?).to be false
40
- end
41
- end
42
- end
43
-
44
15
  describe "#wait" do
45
16
  let(:pending_handle) { true }
46
17
 
47
18
  it "should wait until the timeout and then raise an error" do
48
19
  expect {
49
20
  subject.wait(max_wait_timeout: 0.1)
50
- }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
21
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError, /delivery/
51
22
  end
52
23
 
53
24
  context "when not pending anymore and no error" do
@@ -67,16 +38,5 @@ describe Rdkafka::Producer::DeliveryHandle do
67
38
  expect(report.offset).to eq(100)
68
39
  end
69
40
  end
70
-
71
- context "when not pending anymore and there was an error" do
72
- let(:pending_handle) { false }
73
- let(:response) { 20 }
74
-
75
- it "should raise an rdkafka error" do
76
- expect {
77
- subject.wait
78
- }.to raise_error Rdkafka::RdkafkaError
79
- end
80
- end
81
41
  end
82
42
  end
@@ -1,8 +1,9 @@
1
1
  require "spec_helper"
2
+ require "zlib"
2
3
 
3
4
  describe Rdkafka::Producer do
4
- let(:producer) { rdkafka_config.producer }
5
- let(:consumer) { rdkafka_config.consumer }
5
+ let(:producer) { rdkafka_producer_config.producer }
6
+ let(:consumer) { rdkafka_consumer_config.consumer }
6
7
 
7
8
  after do
8
9
  # Registry should always end up being empty
@@ -12,47 +13,92 @@ describe Rdkafka::Producer do
12
13
  end
13
14
 
14
15
  context "delivery callback" do
15
- it "should set the callback" do
16
- expect {
17
- producer.delivery_callback = lambda do |delivery_handle|
18
- puts stats
16
+ context "with a proc/lambda" do
17
+ it "should set the callback" do
18
+ expect {
19
+ producer.delivery_callback = lambda do |delivery_handle|
20
+ puts delivery_handle
21
+ end
22
+ }.not_to raise_error
23
+ expect(producer.delivery_callback).to respond_to :call
24
+ end
25
+
26
+ it "should call the callback when a message is delivered" do
27
+ @callback_called = false
28
+
29
+ producer.delivery_callback = lambda do |report|
30
+ expect(report).not_to be_nil
31
+ expect(report.partition).to eq 1
32
+ expect(report.offset).to be >= 0
33
+ @callback_called = true
19
34
  end
20
- }.not_to raise_error
21
- expect(producer.delivery_callback).to be_a Proc
22
- end
23
35
 
24
- it "should not accept a callback that's not a proc" do
25
- expect {
26
- producer.delivery_callback = 'a string'
27
- }.to raise_error(TypeError)
28
- end
36
+ # Produce a message
37
+ handle = producer.produce(
38
+ topic: "produce_test_topic",
39
+ payload: "payload",
40
+ key: "key"
41
+ )
29
42
 
30
- it "should call the callback when a message is delivered" do
31
- @callback_called = false
43
+ # Wait for it to be delivered
44
+ handle.wait(max_wait_timeout: 15)
32
45
 
46
+ # Join the producer thread.
47
+ producer.close
33
48
 
34
- producer.delivery_callback = lambda do |report|
35
- expect(report).not_to be_nil
36
- expect(report.partition).to eq 1
37
- expect(report.offset).to be >= 0
38
- @callback_called = true
49
+ # Callback should have been called
50
+ expect(@callback_called).to be true
39
51
  end
52
+ end
40
53
 
41
- # Produce a message
42
- handle = producer.produce(
43
- topic: "produce_test_topic",
44
- payload: "payload",
45
- key: "key"
46
- )
54
+ context "with a callable object" do
55
+ it "should set the callback" do
56
+ callback = Class.new do
57
+ def call(stats); end
58
+ end
59
+ expect {
60
+ producer.delivery_callback = callback.new
61
+ }.not_to raise_error
62
+ expect(producer.delivery_callback).to respond_to :call
63
+ end
47
64
 
48
- # Wait for it to be delivered
49
- handle.wait(max_wait_timeout: 15)
65
+ it "should call the callback when a message is delivered" do
66
+ called_report = []
67
+ callback = Class.new do
68
+ def initialize(called_report)
69
+ @called_report = called_report
70
+ end
50
71
 
51
- # Join the producer thread.
52
- producer.close
72
+ def call(report)
73
+ @called_report << report
74
+ end
75
+ end
76
+ producer.delivery_callback = callback.new(called_report)
77
+
78
+ # Produce a message
79
+ handle = producer.produce(
80
+ topic: "produce_test_topic",
81
+ payload: "payload",
82
+ key: "key"
83
+ )
84
+
85
+ # Wait for it to be delivered
86
+ handle.wait(max_wait_timeout: 15)
53
87
 
54
- # Callback should have been called
55
- expect(@callback_called).to be true
88
+ # Join the producer thread.
89
+ producer.close
90
+
91
+ # Callback should have been called
92
+ expect(called_report.first).not_to be_nil
93
+ expect(called_report.first.partition).to eq 1
94
+ expect(called_report.first.offset).to be >= 0
95
+ end
96
+ end
97
+
98
+ it "should not accept a callback that's not callable" do
99
+ expect {
100
+ producer.delivery_callback = 'a string'
101
+ }.to raise_error(TypeError)
56
102
  end
57
103
  end
58
104
 
@@ -343,7 +389,7 @@ describe Rdkafka::Producer do
343
389
  reader.close
344
390
 
345
391
  # Avoids sharing the socket between processes.
346
- producer = rdkafka_config.producer
392
+ producer = rdkafka_producer_config.producer
347
393
 
348
394
  handle = producer.produce(
349
395
  topic: "produce_test_topic",
@@ -407,4 +453,26 @@ describe Rdkafka::Producer do
407
453
  # Waiting a second time should work
408
454
  handle.wait(max_wait_timeout: 5)
409
455
  end
456
+
457
+ context "methods that should not be called after a producer has been closed" do
458
+ before do
459
+ producer.close
460
+ end
461
+
462
+ # Affected methods and a non-invalid set of parameters for the method
463
+ {
464
+ :produce => { topic: nil },
465
+ :partition_count => nil,
466
+ }.each do |method, args|
467
+ it "raises an exception if #{method} is called" do
468
+ expect {
469
+ if args.is_a?(Hash)
470
+ producer.public_send(method, **args)
471
+ else
472
+ producer.public_send(method, args)
473
+ end
474
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
475
+ end
476
+ end
477
+ end
410
478
  end
data/spec/spec_helper.rb CHANGED
@@ -1,40 +1,64 @@
1
- require "simplecov"
2
- SimpleCov.start do
3
- add_filter "/spec/"
1
+ unless ENV["CI"] == "true"
2
+ require "simplecov"
3
+ SimpleCov.start do
4
+ add_filter "/spec/"
5
+ end
4
6
  end
5
7
 
6
8
  require "pry"
7
9
  require "rspec"
8
10
  require "rdkafka"
11
+ require "timeout"
9
12
 
10
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic consume_test_topic`
11
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic empty_test_topic`
12
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic load_test_topic`
13
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic produce_test_topic`
14
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic rake_test_topic`
15
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic watermarks_test_topic`
16
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 25 --if-not-exists --topic partitioner_test_topic`
17
-
18
- def rdkafka_config(config_overrides={})
19
- config = {
13
+ def rdkafka_base_config
14
+ {
20
15
  :"api.version.request" => false,
21
16
  :"broker.version.fallback" => "1.0",
22
17
  :"bootstrap.servers" => "localhost:9092",
23
- :"group.id" => "ruby-test-#{Random.new.rand(0..1_000_000)}",
24
- :"auto.offset.reset" => "earliest",
25
- :"enable.partition.eof" => false
26
18
  }
19
+ end
20
+
21
+ def rdkafka_config(config_overrides={})
22
+ # Generate the base config
23
+ config = rdkafka_base_config
24
+ # Merge overrides
25
+ config.merge!(config_overrides)
26
+ # Return it
27
+ Rdkafka::Config.new(config)
28
+ end
29
+
30
+ def rdkafka_consumer_config(config_overrides={})
31
+ # Generate the base config
32
+ config = rdkafka_base_config
33
+ # Add consumer specific fields to it
34
+ config[:"auto.offset.reset"] = "earliest"
35
+ config[:"enable.partition.eof"] = false
36
+ config[:"group.id"] = "ruby-test-#{Random.new.rand(0..1_000_000)}"
37
+ # Enable debug mode if required
38
+ if ENV["DEBUG_CONSUMER"]
39
+ config[:debug] = "cgrp,topic,fetch"
40
+ end
41
+ # Merge overrides
42
+ config.merge!(config_overrides)
43
+ # Return it
44
+ Rdkafka::Config.new(config)
45
+ end
46
+
47
+ def rdkafka_producer_config(config_overrides={})
48
+ # Generate the base config
49
+ config = rdkafka_base_config
50
+ # Enable debug mode if required
27
51
  if ENV["DEBUG_PRODUCER"]
28
52
  config[:debug] = "broker,topic,msg"
29
- elsif ENV["DEBUG_CONSUMER"]
30
- config[:debug] = "cgrp,topic,fetch"
31
53
  end
54
+ # Merge overrides
32
55
  config.merge!(config_overrides)
56
+ # Return it
33
57
  Rdkafka::Config.new(config)
34
58
  end
35
59
 
36
60
  def new_native_client
37
- config = rdkafka_config
61
+ config = rdkafka_consumer_config
38
62
  config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
39
63
  end
40
64
 
@@ -48,7 +72,7 @@ end
48
72
 
49
73
  def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
50
74
  new_consumer = !!consumer
51
- consumer ||= rdkafka_config.consumer
75
+ consumer ||= rdkafka_consumer_config.consumer
52
76
  consumer.subscribe(topic)
53
77
  timeout = Time.now.to_i + timeout_in_seconds
54
78
  loop do
@@ -79,3 +103,37 @@ def wait_for_unassignment(consumer)
79
103
  sleep 1
80
104
  end
81
105
  end
106
+
107
+ RSpec.configure do |config|
108
+ config.filter_run focus: true
109
+ config.run_all_when_everything_filtered = true
110
+
111
+ config.before(:suite) do
112
+ admin = rdkafka_config.admin
113
+ {
114
+ consume_test_topic: 3,
115
+ empty_test_topic: 3,
116
+ load_test_topic: 3,
117
+ produce_test_topic: 3,
118
+ rake_test_topic: 3,
119
+ watermarks_test_topic: 3,
120
+ partitioner_test_topic: 25,
121
+ }.each do |topic, partitions|
122
+ create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
123
+ begin
124
+ create_topic_handle.wait(max_wait_timeout: 15)
125
+ rescue Rdkafka::RdkafkaError => ex
126
+ raise unless ex.message.match?(/topic_already_exists/)
127
+ end
128
+ end
129
+ admin.close
130
+ end
131
+
132
+ config.around(:each) do |example|
133
+ # Timeout specs after a minute. If they take longer
134
+ # they are probably stuck
135
+ Timeout::timeout(60) do
136
+ example.run
137
+ end
138
+ end
139
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.11.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-06-02 00:00:00.000000000 Z
11
+ date: 2021-11-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -16,56 +16,56 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '1.9'
19
+ version: '1.15'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '1.9'
26
+ version: '1.15'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: mini_portile2
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
31
  - - "~>"
32
32
  - !ruby/object:Gem::Version
33
- version: '2.1'
33
+ version: '2.7'
34
34
  type: :runtime
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
- version: '2.1'
40
+ version: '2.7'
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: rake
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
- - - ">="
45
+ - - ">"
46
46
  - !ruby/object:Gem::Version
47
- version: '12.3'
47
+ version: '12'
48
48
  type: :runtime
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - ">="
52
+ - - ">"
53
53
  - !ruby/object:Gem::Version
54
- version: '12.3'
54
+ version: '12'
55
55
  - !ruby/object:Gem::Dependency
56
56
  name: pry
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
- - - "~>"
59
+ - - ">="
60
60
  - !ruby/object:Gem::Version
61
- version: '0.10'
61
+ version: '0'
62
62
  type: :development
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - "~>"
66
+ - - ">="
67
67
  - !ruby/object:Gem::Version
68
- version: '0.10'
68
+ version: '0'
69
69
  - !ruby/object:Gem::Dependency
70
70
  name: rspec
71
71
  requirement: !ruby/object:Gem::Requirement
@@ -84,51 +84,90 @@ dependencies:
84
84
  name: rake
85
85
  requirement: !ruby/object:Gem::Requirement
86
86
  requirements:
87
- - - "~>"
87
+ - - ">="
88
88
  - !ruby/object:Gem::Version
89
- version: '12.0'
89
+ version: '0'
90
90
  type: :development
91
91
  prerelease: false
92
92
  version_requirements: !ruby/object:Gem::Requirement
93
93
  requirements:
94
- - - "~>"
94
+ - - ">="
95
95
  - !ruby/object:Gem::Version
96
- version: '12.0'
96
+ version: '0'
97
97
  - !ruby/object:Gem::Dependency
98
98
  name: simplecov
99
99
  requirement: !ruby/object:Gem::Requirement
100
100
  requirements:
101
- - - "~>"
101
+ - - ">="
102
102
  - !ruby/object:Gem::Version
103
- version: '0.15'
103
+ version: '0'
104
104
  type: :development
105
105
  prerelease: false
106
106
  version_requirements: !ruby/object:Gem::Requirement
107
107
  requirements:
108
- - - "~>"
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ - !ruby/object:Gem::Dependency
112
+ name: guard
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
116
+ - !ruby/object:Gem::Version
117
+ version: '0'
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ - !ruby/object:Gem::Dependency
126
+ name: guard-rspec
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - ">="
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
132
+ type: :development
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - ">="
109
137
  - !ruby/object:Gem::Version
110
- version: '0.15'
138
+ version: '0'
111
139
  description: Modern Kafka client library for Ruby based on librdkafka
112
140
  email:
113
141
  - thijs@appsignal.com
114
- executables: []
142
+ executables:
143
+ - console
115
144
  extensions:
116
145
  - ext/Rakefile
117
146
  extra_rdoc_files: []
118
147
  files:
119
148
  - ".gitignore"
120
- - ".travis.yml"
149
+ - ".rspec"
150
+ - ".semaphore/semaphore.yml"
121
151
  - ".yardopts"
122
152
  - CHANGELOG.md
123
153
  - Gemfile
154
+ - Guardfile
124
155
  - LICENSE
125
156
  - README.md
126
157
  - Rakefile
158
+ - bin/console
127
159
  - docker-compose.yml
128
160
  - ext/README.md
129
161
  - ext/Rakefile
130
162
  - lib/rdkafka.rb
163
+ - lib/rdkafka/abstract_handle.rb
164
+ - lib/rdkafka/admin.rb
165
+ - lib/rdkafka/admin/create_topic_handle.rb
166
+ - lib/rdkafka/admin/create_topic_report.rb
167
+ - lib/rdkafka/admin/delete_topic_handle.rb
168
+ - lib/rdkafka/admin/delete_topic_report.rb
131
169
  - lib/rdkafka/bindings.rb
170
+ - lib/rdkafka/callbacks.rb
132
171
  - lib/rdkafka/config.rb
133
172
  - lib/rdkafka/consumer.rb
134
173
  - lib/rdkafka/consumer/headers.rb
@@ -142,13 +181,21 @@ files:
142
181
  - lib/rdkafka/producer/delivery_report.rb
143
182
  - lib/rdkafka/version.rb
144
183
  - rdkafka.gemspec
184
+ - spec/rdkafka/abstract_handle_spec.rb
185
+ - spec/rdkafka/admin/create_topic_handle_spec.rb
186
+ - spec/rdkafka/admin/create_topic_report_spec.rb
187
+ - spec/rdkafka/admin/delete_topic_handle_spec.rb
188
+ - spec/rdkafka/admin/delete_topic_report_spec.rb
189
+ - spec/rdkafka/admin_spec.rb
145
190
  - spec/rdkafka/bindings_spec.rb
191
+ - spec/rdkafka/callbacks_spec.rb
146
192
  - spec/rdkafka/config_spec.rb
147
193
  - spec/rdkafka/consumer/message_spec.rb
148
194
  - spec/rdkafka/consumer/partition_spec.rb
149
195
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
150
196
  - spec/rdkafka/consumer_spec.rb
151
197
  - spec/rdkafka/error_spec.rb
198
+ - spec/rdkafka/metadata_spec.rb
152
199
  - spec/rdkafka/producer/delivery_handle_spec.rb
153
200
  - spec/rdkafka/producer/delivery_report_spec.rb
154
201
  - spec/rdkafka/producer_spec.rb
@@ -157,7 +204,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
157
204
  licenses:
158
205
  - MIT
159
206
  metadata: {}
160
- post_install_message:
207
+ post_install_message:
161
208
  rdoc_options: []
162
209
  require_paths:
163
210
  - lib
@@ -165,27 +212,35 @@ required_ruby_version: !ruby/object:Gem::Requirement
165
212
  requirements:
166
213
  - - ">="
167
214
  - !ruby/object:Gem::Version
168
- version: '2.4'
215
+ version: '2.6'
169
216
  required_rubygems_version: !ruby/object:Gem::Requirement
170
217
  requirements:
171
218
  - - ">="
172
219
  - !ruby/object:Gem::Version
173
220
  version: '0'
174
221
  requirements: []
175
- rubygems_version: 3.1.2
176
- signing_key:
222
+ rubygems_version: 3.1.4
223
+ signing_key:
177
224
  specification_version: 4
178
225
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
179
226
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
180
227
  and Ruby 2.4+.
181
228
  test_files:
229
+ - spec/rdkafka/abstract_handle_spec.rb
230
+ - spec/rdkafka/admin/create_topic_handle_spec.rb
231
+ - spec/rdkafka/admin/create_topic_report_spec.rb
232
+ - spec/rdkafka/admin/delete_topic_handle_spec.rb
233
+ - spec/rdkafka/admin/delete_topic_report_spec.rb
234
+ - spec/rdkafka/admin_spec.rb
182
235
  - spec/rdkafka/bindings_spec.rb
236
+ - spec/rdkafka/callbacks_spec.rb
183
237
  - spec/rdkafka/config_spec.rb
184
238
  - spec/rdkafka/consumer/message_spec.rb
185
239
  - spec/rdkafka/consumer/partition_spec.rb
186
240
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
187
241
  - spec/rdkafka/consumer_spec.rb
188
242
  - spec/rdkafka/error_spec.rb
243
+ - spec/rdkafka/metadata_spec.rb
189
244
  - spec/rdkafka/producer/delivery_handle_spec.rb
190
245
  - spec/rdkafka/producer/delivery_report_spec.rb
191
246
  - spec/rdkafka/producer_spec.rb