rdkafka 0.8.0 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/.semaphore/semaphore.yml +23 -0
  3. data/CHANGELOG.md +6 -0
  4. data/README.md +5 -2
  5. data/docker-compose.yml +2 -0
  6. data/ext/Rakefile +1 -1
  7. data/lib/rdkafka.rb +7 -0
  8. data/lib/rdkafka/abstract_handle.rb +82 -0
  9. data/lib/rdkafka/admin.rb +144 -0
  10. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  11. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  12. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  13. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  14. data/lib/rdkafka/bindings.rb +44 -17
  15. data/lib/rdkafka/callbacks.rb +106 -0
  16. data/lib/rdkafka/config.rb +14 -1
  17. data/lib/rdkafka/consumer.rb +35 -5
  18. data/lib/rdkafka/error.rb +29 -3
  19. data/lib/rdkafka/metadata.rb +6 -5
  20. data/lib/rdkafka/producer.rb +13 -2
  21. data/lib/rdkafka/producer/delivery_handle.rb +7 -53
  22. data/lib/rdkafka/version.rb +1 -1
  23. data/spec/rdkafka/abstract_handle_spec.rb +114 -0
  24. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  25. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  26. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  27. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  28. data/spec/rdkafka/admin_spec.rb +192 -0
  29. data/spec/rdkafka/callbacks_spec.rb +20 -0
  30. data/spec/rdkafka/config_spec.rb +11 -0
  31. data/spec/rdkafka/consumer_spec.rb +34 -2
  32. data/spec/rdkafka/error_spec.rb +4 -0
  33. data/spec/rdkafka/metadata_spec.rb +78 -0
  34. data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -41
  35. data/spec/rdkafka/producer_spec.rb +22 -0
  36. data/spec/spec_helper.rb +28 -11
  37. metadata +26 -3
  38. data/.travis.yml +0 -48
@@ -0,0 +1,20 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Callbacks do
4
+
5
+ # The code in the call back functions is 100% covered by other specs. Due to
6
+ # the large number of collaborators, and the fact that FFI does not play
7
+ # nicely with doubles, it was very difficult to construct tests that were
8
+ # not over-mocked.
9
+
10
+ # For debugging purposes, if you suspect that you are running into trouble in
11
+ # one of the callback functions, it may be helpful to surround the inner body
12
+ # of the method with something like:
13
+ #
14
+ # begin
15
+ # <method body>
16
+ # rescue => ex; puts ex.inspect; puts ex.backtrace; end;
17
+ #
18
+ # This will output to STDOUT any exceptions that are being raised in the callback.
19
+
20
+ end
@@ -90,6 +90,17 @@ describe Rdkafka::Config do
90
90
  }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
91
91
  end
92
92
 
93
+ it "should allow configuring zstd compression" do
94
+ config = Rdkafka::Config.new('compression.codec' => 'zstd')
95
+ begin
96
+ expect(config.producer).to be_a Rdkafka::Producer
97
+ config.producer.close
98
+ rescue Rdkafka::Config::ConfigError => ex
99
+ pending "Zstd compression not supported on this machine"
100
+ raise ex
101
+ end
102
+ end
103
+
93
104
  it "should raise an error when client creation fails for a consumer" do
94
105
  config = Rdkafka::Config.new(
95
106
  "security.protocol" => "SSL",
@@ -272,7 +272,9 @@ describe Rdkafka::Consumer do
272
272
  it "should close a consumer" do
273
273
  consumer.subscribe("consume_test_topic")
274
274
  consumer.close
275
- expect(consumer.poll(100)).to be_nil
275
+ expect {
276
+ consumer.poll(100)
277
+ }.to raise_error(Rdkafka::ClosedConsumerError, /poll/)
276
278
  end
277
279
  end
278
280
 
@@ -662,8 +664,9 @@ describe Rdkafka::Consumer do
662
664
  # should break the each loop.
663
665
  consumer.each_with_index do |message, i|
664
666
  expect(message).to be_a Rdkafka::Consumer::Message
665
- consumer.close if i == 10
667
+ break if i == 10
666
668
  end
669
+ consumer.close
667
670
  end
668
671
  end
669
672
 
@@ -723,4 +726,33 @@ describe Rdkafka::Consumer do
723
726
  consumer.close
724
727
  end
725
728
  end
729
+
730
+ context "methods that should not be called after a consumer has been closed" do
731
+ before do
732
+ consumer.close
733
+ end
734
+
735
+ # Affected methods and a non-invalid set of parameters for the method
736
+ {
737
+ :subscribe => [ nil ],
738
+ :unsubscribe => nil,
739
+ :pause => [ nil ],
740
+ :resume => [ nil ],
741
+ :subscription => nil,
742
+ :assign => [ nil ],
743
+ :assignment => nil,
744
+ :committed => [],
745
+ :query_watermark_offsets => [ nil, nil ],
746
+ }.each do |method, args|
747
+ it "raises an exception if #{method} is called" do
748
+ expect {
749
+ if args.nil?
750
+ consumer.public_send(method)
751
+ else
752
+ consumer.public_send(method, *args)
753
+ end
754
+ }.to raise_exception(Rdkafka::ClosedConsumerError, /#{method.to_s}/)
755
+ end
756
+ end
757
+ end
726
758
  end
@@ -11,6 +11,10 @@ describe Rdkafka::RdkafkaError do
11
11
  expect(Rdkafka::RdkafkaError.new(10, "message prefix").message_prefix).to eq "message prefix"
12
12
  end
13
13
 
14
+ it "should create an error with a broker message" do
15
+ expect(Rdkafka::RdkafkaError.new(10, broker_message: "broker message").broker_message).to eq "broker message"
16
+ end
17
+
14
18
  describe "#code" do
15
19
  it "should handle an invalid response" do
16
20
  expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
@@ -0,0 +1,78 @@
1
+ require "spec_helper"
2
+ require "securerandom"
3
+
4
+ describe Rdkafka::Metadata do
5
+ let(:config) { rdkafka_config }
6
+ let(:native_config) { config.send(:native_config) }
7
+ let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
+
9
+ after do
10
+ Rdkafka::Bindings.rd_kafka_consumer_close(native_kafka)
11
+ Rdkafka::Bindings.rd_kafka_destroy(native_kafka)
12
+ end
13
+
14
+ context "passing in a topic name" do
15
+ context "that is non-existent topic" do
16
+ let(:topic_name) { SecureRandom.uuid.to_s }
17
+
18
+ it "raises an appropriate exception" do
19
+ expect {
20
+ described_class.new(native_kafka, topic_name)
21
+ }.to raise_exception(Rdkafka::RdkafkaError, "Broker: Leader not available (leader_not_available)")
22
+ end
23
+ end
24
+
25
+ context "that is one of our test topics" do
26
+ subject { described_class.new(native_kafka, topic_name) }
27
+ let(:topic_name) { "partitioner_test_topic" }
28
+
29
+ it "#brokers returns our single broker" do
30
+ expect(subject.brokers.length).to eq(1)
31
+ expect(subject.brokers[0][:broker_id]).to eq(1)
32
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ end
35
+
36
+ it "#topics returns data on our test topic" do
37
+ expect(subject.topics.length).to eq(1)
38
+ expect(subject.topics[0][:partition_count]).to eq(25)
39
+ expect(subject.topics[0][:partitions].length).to eq(25)
40
+ expect(subject.topics[0][:topic_name]).to eq(topic_name)
41
+ end
42
+ end
43
+ end
44
+
45
+ context "not passing in a topic name" do
46
+ subject { described_class.new(native_kafka, topic_name) }
47
+ let(:topic_name) { nil }
48
+ let(:test_topics) {
49
+ %w(consume_test_topic empty_test_topic load_test_topic produce_test_topic rake_test_topic watermarks_test_topic partitioner_test_topic)
50
+ } # Test topics crated in spec_helper.rb
51
+
52
+ it "#brokers returns our single broker" do
53
+ expect(subject.brokers.length).to eq(1)
54
+ expect(subject.brokers[0][:broker_id]).to eq(1)
55
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ end
58
+
59
+ it "#topics returns data about all of our test topics" do
60
+ result = subject.topics.map { |topic| topic[:topic_name] }
61
+ expect(result).to include(*test_topics)
62
+ end
63
+ end
64
+
65
+ context "when a non-zero error code is returned" do
66
+ let(:topic_name) { SecureRandom.uuid.to_s }
67
+
68
+ before do
69
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_return(-165)
70
+ end
71
+
72
+ it "creating the instance raises an exception" do
73
+ expect {
74
+ described_class.new(native_kafka, topic_name)
75
+ }.to raise_error(Rdkafka::RdkafkaError, /Local: Required feature not supported by broker \(unsupported_feature\)/)
76
+ end
77
+ end
78
+ end
@@ -12,42 +12,13 @@ describe Rdkafka::Producer::DeliveryHandle do
12
12
  end
13
13
  end
14
14
 
15
- describe ".register and .remove" do
16
- let(:pending_handle) { true }
17
-
18
- it "should register and remove a delivery handle" do
19
- Rdkafka::Producer::DeliveryHandle.register(subject.to_ptr.address, subject)
20
- removed = Rdkafka::Producer::DeliveryHandle.remove(subject.to_ptr.address)
21
- expect(removed).to eq subject
22
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to be_empty
23
- end
24
- end
25
-
26
- describe "#pending?" do
27
- context "when true" do
28
- let(:pending_handle) { true }
29
-
30
- it "should be true" do
31
- expect(subject.pending?).to be true
32
- end
33
- end
34
-
35
- context "when not true" do
36
- let(:pending_handle) { false }
37
-
38
- it "should be false" do
39
- expect(subject.pending?).to be false
40
- end
41
- end
42
- end
43
-
44
15
  describe "#wait" do
45
16
  let(:pending_handle) { true }
46
17
 
47
18
  it "should wait until the timeout and then raise an error" do
48
19
  expect {
49
20
  subject.wait(max_wait_timeout: 0.1)
50
- }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError
21
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError, /delivery/
51
22
  end
52
23
 
53
24
  context "when not pending anymore and no error" do
@@ -67,16 +38,5 @@ describe Rdkafka::Producer::DeliveryHandle do
67
38
  expect(report.offset).to eq(100)
68
39
  end
69
40
  end
70
-
71
- context "when not pending anymore and there was an error" do
72
- let(:pending_handle) { false }
73
- let(:response) { 20 }
74
-
75
- it "should raise an rdkafka error" do
76
- expect {
77
- subject.wait
78
- }.to raise_error Rdkafka::RdkafkaError
79
- end
80
- end
81
41
  end
82
42
  end
@@ -407,4 +407,26 @@ describe Rdkafka::Producer do
407
407
  # Waiting a second time should work
408
408
  handle.wait(max_wait_timeout: 5)
409
409
  end
410
+
411
+ context "methods that should not be called after a producer has been closed" do
412
+ before do
413
+ producer.close
414
+ end
415
+
416
+ # Affected methods and a non-invalid set of parameters for the method
417
+ {
418
+ :produce => { topic: nil },
419
+ :partition_count => nil,
420
+ }.each do |method, args|
421
+ it "raises an exception if #{method} is called" do
422
+ expect {
423
+ if args.is_a?(Hash)
424
+ producer.public_send(method, **args)
425
+ else
426
+ producer.public_send(method, args)
427
+ end
428
+ }.to raise_exception(Rdkafka::ClosedProducerError, /#{method.to_s}/)
429
+ end
430
+ end
431
+ end
410
432
  end
@@ -1,20 +1,14 @@
1
- require "simplecov"
2
- SimpleCov.start do
3
- add_filter "/spec/"
1
+ unless ENV["CI"] == "true"
2
+ require "simplecov"
3
+ SimpleCov.start do
4
+ add_filter "/spec/"
5
+ end
4
6
  end
5
7
 
6
8
  require "pry"
7
9
  require "rspec"
8
10
  require "rdkafka"
9
11
 
10
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic consume_test_topic`
11
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic empty_test_topic`
12
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic load_test_topic`
13
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic produce_test_topic`
14
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic rake_test_topic`
15
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 3 --if-not-exists --topic watermarks_test_topic`
16
- `docker-compose exec kafka kafka-topics --create --zookeeper zookeeper:2181 --replication-factor 1 --partitions 25 --if-not-exists --topic partitioner_test_topic`
17
-
18
12
  def rdkafka_config(config_overrides={})
19
13
  config = {
20
14
  :"api.version.request" => false,
@@ -79,3 +73,26 @@ def wait_for_unassignment(consumer)
79
73
  sleep 1
80
74
  end
81
75
  end
76
+
77
+ RSpec.configure do |config|
78
+ config.before(:suite) do
79
+ admin = rdkafka_config.admin
80
+ {
81
+ consume_test_topic: 3,
82
+ empty_test_topic: 3,
83
+ load_test_topic: 3,
84
+ produce_test_topic: 3,
85
+ rake_test_topic: 3,
86
+ watermarks_test_topic: 3,
87
+ partitioner_test_topic: 25,
88
+ }.each do |topic, partitions|
89
+ create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
90
+ begin
91
+ create_topic_handle.wait(max_wait_timeout: 15)
92
+ rescue Rdkafka::RdkafkaError => ex
93
+ raise unless ex.message.match?(/topic_already_exists/)
94
+ end
95
+ end
96
+ admin.close
97
+ end
98
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.8.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-06-02 00:00:00.000000000 Z
11
+ date: 2020-12-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -117,7 +117,7 @@ extensions:
117
117
  extra_rdoc_files: []
118
118
  files:
119
119
  - ".gitignore"
120
- - ".travis.yml"
120
+ - ".semaphore/semaphore.yml"
121
121
  - ".yardopts"
122
122
  - CHANGELOG.md
123
123
  - Gemfile
@@ -128,7 +128,14 @@ files:
128
128
  - ext/README.md
129
129
  - ext/Rakefile
130
130
  - lib/rdkafka.rb
131
+ - lib/rdkafka/abstract_handle.rb
132
+ - lib/rdkafka/admin.rb
133
+ - lib/rdkafka/admin/create_topic_handle.rb
134
+ - lib/rdkafka/admin/create_topic_report.rb
135
+ - lib/rdkafka/admin/delete_topic_handle.rb
136
+ - lib/rdkafka/admin/delete_topic_report.rb
131
137
  - lib/rdkafka/bindings.rb
138
+ - lib/rdkafka/callbacks.rb
132
139
  - lib/rdkafka/config.rb
133
140
  - lib/rdkafka/consumer.rb
134
141
  - lib/rdkafka/consumer/headers.rb
@@ -142,13 +149,21 @@ files:
142
149
  - lib/rdkafka/producer/delivery_report.rb
143
150
  - lib/rdkafka/version.rb
144
151
  - rdkafka.gemspec
152
+ - spec/rdkafka/abstract_handle_spec.rb
153
+ - spec/rdkafka/admin/create_topic_handle_spec.rb
154
+ - spec/rdkafka/admin/create_topic_report_spec.rb
155
+ - spec/rdkafka/admin/delete_topic_handle_spec.rb
156
+ - spec/rdkafka/admin/delete_topic_report_spec.rb
157
+ - spec/rdkafka/admin_spec.rb
145
158
  - spec/rdkafka/bindings_spec.rb
159
+ - spec/rdkafka/callbacks_spec.rb
146
160
  - spec/rdkafka/config_spec.rb
147
161
  - spec/rdkafka/consumer/message_spec.rb
148
162
  - spec/rdkafka/consumer/partition_spec.rb
149
163
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
150
164
  - spec/rdkafka/consumer_spec.rb
151
165
  - spec/rdkafka/error_spec.rb
166
+ - spec/rdkafka/metadata_spec.rb
152
167
  - spec/rdkafka/producer/delivery_handle_spec.rb
153
168
  - spec/rdkafka/producer/delivery_report_spec.rb
154
169
  - spec/rdkafka/producer_spec.rb
@@ -179,13 +194,21 @@ summary: The rdkafka gem is a modern Kafka client library for Ruby based on libr
179
194
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
180
195
  and Ruby 2.4+.
181
196
  test_files:
197
+ - spec/rdkafka/abstract_handle_spec.rb
198
+ - spec/rdkafka/admin/create_topic_handle_spec.rb
199
+ - spec/rdkafka/admin/create_topic_report_spec.rb
200
+ - spec/rdkafka/admin/delete_topic_handle_spec.rb
201
+ - spec/rdkafka/admin/delete_topic_report_spec.rb
202
+ - spec/rdkafka/admin_spec.rb
182
203
  - spec/rdkafka/bindings_spec.rb
204
+ - spec/rdkafka/callbacks_spec.rb
183
205
  - spec/rdkafka/config_spec.rb
184
206
  - spec/rdkafka/consumer/message_spec.rb
185
207
  - spec/rdkafka/consumer/partition_spec.rb
186
208
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
187
209
  - spec/rdkafka/consumer_spec.rb
188
210
  - spec/rdkafka/error_spec.rb
211
+ - spec/rdkafka/metadata_spec.rb
189
212
  - spec/rdkafka/producer/delivery_handle_spec.rb
190
213
  - spec/rdkafka/producer/delivery_report_spec.rb
191
214
  - spec/rdkafka/producer_spec.rb
@@ -1,48 +0,0 @@
1
- language: ruby
2
-
3
- sudo: false
4
-
5
- services:
6
- - docker
7
-
8
- env:
9
- global:
10
- - CC_TEST_REPORTER_ID=9f7f740ac1b6e264e1189fa07a6687a87bcdb9f3c0f4199d4344ab3b538e187e
11
- - KAFKA_HEAP_OPTS="-Xmx512m -Xms512m"
12
-
13
- rvm:
14
- - 2.4
15
- - 2.5
16
- - 2.6
17
- - 2.7.1
18
- - jruby-9.2.9.0
19
- - jruby-9.2.10.0
20
- - jruby-9.2.11.1
21
-
22
- before_install:
23
- - gem update --system
24
- - |
25
- r_eng="$(ruby -e 'STDOUT.write RUBY_ENGINE')";
26
- if [ "$r_eng" == "jruby" ]; then
27
- sudo apt-get update && \
28
- sudo apt-get install -y git && \
29
- sudo apt-get install -y libpthread-stubs0-dev && \
30
- sudo apt-get install -y build-essential && \
31
- sudo apt-get install -y zlib1g-dev && \
32
- sudo apt-get install -y libssl-dev && \
33
- sudo apt-get install -y libsasl2-dev
34
- fi
35
-
36
- before_script:
37
- - docker-compose up -d
38
- - cd ext && bundle exec rake && cd ..
39
- - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
40
- - chmod +x ./cc-test-reporter
41
- - ./cc-test-reporter before-build
42
-
43
- script:
44
- - bundle exec rspec --format documentation
45
-
46
- after_script:
47
- - docker-compose stop
48
- - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT