karafka-rdkafka 0.12.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,89 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::RdkafkaError do
4
+ it "should raise a type error for a nil response" do
5
+ expect {
6
+ Rdkafka::RdkafkaError.new(nil)
7
+ }.to raise_error TypeError
8
+ end
9
+
10
+ it "should create an error with a message prefix" do
11
+ expect(Rdkafka::RdkafkaError.new(10, "message prefix").message_prefix).to eq "message prefix"
12
+ end
13
+
14
+ it "should create an error with a broker message" do
15
+ expect(Rdkafka::RdkafkaError.new(10, broker_message: "broker message").broker_message).to eq "broker message"
16
+ end
17
+
18
+ describe "#code" do
19
+ it "should handle an invalid response" do
20
+ expect(Rdkafka::RdkafkaError.new(933975).code).to eq :err_933975?
21
+ end
22
+
23
+ it "should return error messages from rdkafka" do
24
+ expect(Rdkafka::RdkafkaError.new(10).code).to eq :msg_size_too_large
25
+ end
26
+
27
+ it "should strip a leading underscore" do
28
+ expect(Rdkafka::RdkafkaError.new(-191).code).to eq :partition_eof
29
+ end
30
+ end
31
+
32
+ describe "#to_s" do
33
+ it "should handle an invalid response" do
34
+ expect(Rdkafka::RdkafkaError.new(933975).to_s).to eq "Err-933975? (err_933975?)"
35
+ end
36
+
37
+ it "should return error messages from rdkafka" do
38
+ expect(Rdkafka::RdkafkaError.new(10).to_s).to eq "Broker: Message size too large (msg_size_too_large)"
39
+ end
40
+
41
+ it "should add the message prefix if present" do
42
+ expect(Rdkafka::RdkafkaError.new(10, "Error explanation").to_s).to eq "Error explanation - Broker: Message size too large (msg_size_too_large)"
43
+ end
44
+ end
45
+
46
+ describe "#message" do
47
+ it "should handle an invalid response" do
48
+ expect(Rdkafka::RdkafkaError.new(933975).message).to eq "Err-933975? (err_933975?)"
49
+ end
50
+
51
+ it "should return error messages from rdkafka" do
52
+ expect(Rdkafka::RdkafkaError.new(10).message).to eq "Broker: Message size too large (msg_size_too_large)"
53
+ end
54
+
55
+ it "should add the message prefix if present" do
56
+ expect(Rdkafka::RdkafkaError.new(10, "Error explanation").message).to eq "Error explanation - Broker: Message size too large (msg_size_too_large)"
57
+ end
58
+ end
59
+
60
+ describe "#is_partition_eof?" do
61
+ it "should be false when not partition eof" do
62
+ expect(Rdkafka::RdkafkaError.new(933975).is_partition_eof?).to be false
63
+ end
64
+
65
+ it "should be true when partition eof" do
66
+ expect(Rdkafka::RdkafkaError.new(-191).is_partition_eof?).to be true
67
+ end
68
+ end
69
+
70
+ describe "#==" do
71
+ subject { Rdkafka::RdkafkaError.new(10, "Error explanation") }
72
+
73
+ it "should equal another error with the same content" do
74
+ expect(subject).to eq Rdkafka::RdkafkaError.new(10, "Error explanation")
75
+ end
76
+
77
+ it "should not equal another error with a different error code" do
78
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(20, "Error explanation")
79
+ end
80
+
81
+ it "should not equal another error with a different message" do
82
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10, "Different error explanation")
83
+ end
84
+
85
+ it "should not equal another error with no message" do
86
+ expect(subject).not_to eq Rdkafka::RdkafkaError.new(10)
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,78 @@
1
+ require "spec_helper"
2
+ require "securerandom"
3
+
4
+ describe Rdkafka::Metadata do
5
+ let(:config) { rdkafka_consumer_config }
6
+ let(:native_config) { config.send(:native_config) }
7
+ let(:native_kafka) { config.send(:native_kafka, native_config, :rd_kafka_consumer) }
8
+
9
+ after do
10
+ Rdkafka::Bindings.rd_kafka_consumer_close(native_kafka)
11
+ Rdkafka::Bindings.rd_kafka_destroy(native_kafka)
12
+ end
13
+
14
+ context "passing in a topic name" do
15
+ context "that is non-existent topic" do
16
+ let(:topic_name) { SecureRandom.uuid.to_s }
17
+
18
+ it "raises an appropriate exception" do
19
+ expect {
20
+ described_class.new(native_kafka, topic_name)
21
+ }.to raise_exception(Rdkafka::RdkafkaError, "Broker: Unknown topic or partition (unknown_topic_or_part)")
22
+ end
23
+ end
24
+
25
+ context "that is one of our test topics" do
26
+ subject { described_class.new(native_kafka, topic_name) }
27
+ let(:topic_name) { "partitioner_test_topic" }
28
+
29
+ it "#brokers returns our single broker" do
30
+ expect(subject.brokers.length).to eq(1)
31
+ expect(subject.brokers[0][:broker_id]).to eq(1)
32
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
33
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
34
+ end
35
+
36
+ it "#topics returns data on our test topic" do
37
+ expect(subject.topics.length).to eq(1)
38
+ expect(subject.topics[0][:partition_count]).to eq(25)
39
+ expect(subject.topics[0][:partitions].length).to eq(25)
40
+ expect(subject.topics[0][:topic_name]).to eq(topic_name)
41
+ end
42
+ end
43
+ end
44
+
45
+ context "not passing in a topic name" do
46
+ subject { described_class.new(native_kafka, topic_name) }
47
+ let(:topic_name) { nil }
48
+ let(:test_topics) {
49
+ %w(consume_test_topic empty_test_topic load_test_topic produce_test_topic rake_test_topic watermarks_test_topic partitioner_test_topic)
50
+ } # Test topics crated in spec_helper.rb
51
+
52
+ it "#brokers returns our single broker" do
53
+ expect(subject.brokers.length).to eq(1)
54
+ expect(subject.brokers[0][:broker_id]).to eq(1)
55
+ expect(subject.brokers[0][:broker_name]).to eq("localhost")
56
+ expect(subject.brokers[0][:broker_port]).to eq(9092)
57
+ end
58
+
59
+ it "#topics returns data about all of our test topics" do
60
+ result = subject.topics.map { |topic| topic[:topic_name] }
61
+ expect(result).to include(*test_topics)
62
+ end
63
+ end
64
+
65
+ context "when a non-zero error code is returned" do
66
+ let(:topic_name) { SecureRandom.uuid.to_s }
67
+
68
+ before do
69
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_metadata).and_return(-165)
70
+ end
71
+
72
+ it "creating the instance raises an exception" do
73
+ expect {
74
+ described_class.new(native_kafka, topic_name)
75
+ }.to raise_error(Rdkafka::RdkafkaError, /Local: Required feature not supported by broker \(unsupported_feature\)/)
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,145 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer::Client do
4
+ let(:config) { rdkafka_producer_config }
5
+ let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
6
+ let(:closing) { false }
7
+ let(:thread) { double(Thread) }
8
+
9
+ subject(:client) { described_class.new(native) }
10
+
11
+ before do
12
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
13
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
14
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
15
+ allow(Thread).to receive(:new).and_return(thread)
16
+
17
+ allow(thread).to receive(:[]=).with(:closing, anything)
18
+ allow(thread).to receive(:join)
19
+ allow(thread).to receive(:abort_on_exception=).with(anything)
20
+ end
21
+
22
+ context "defaults" do
23
+ it "sets the thread to abort on exception" do
24
+ expect(thread).to receive(:abort_on_exception=).with(true)
25
+
26
+ client
27
+ end
28
+
29
+ it "sets the thread `closing` flag to false" do
30
+ expect(thread).to receive(:[]=).with(:closing, false)
31
+
32
+ client
33
+ end
34
+ end
35
+
36
+ context "the polling thread" do
37
+ it "is created" do
38
+ expect(Thread).to receive(:new)
39
+
40
+ client
41
+ end
42
+
43
+ it "polls the native with default 250ms timeout" do
44
+ polling_loop_expects do
45
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
+ end
47
+ end
48
+
49
+ it "check the out queue of native client" do
50
+ polling_loop_expects do
51
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
+ end
53
+ end
54
+ end
55
+
56
+ def polling_loop_expects(&block)
57
+ Thread.current[:closing] = true # this forces the loop break with line #12
58
+
59
+ allow(Thread).to receive(:new).and_yield do |_|
60
+ block.call
61
+ end.and_return(thread)
62
+
63
+ client
64
+ end
65
+
66
+ it "exposes `native` client" do
67
+ expect(client.native).to eq(native)
68
+ end
69
+
70
+ context "when client was not yet closed (`nil`)" do
71
+ it "is not closed" do
72
+ expect(client.closed?).to eq(false)
73
+ end
74
+
75
+ context "and attempt to close" do
76
+ it "calls the `destroy` binding" do
77
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_destroy).with(native)
78
+
79
+ client.close
80
+ end
81
+
82
+ it "indicates to the polling thread that it is closing" do
83
+ expect(thread).to receive(:[]=).with(:closing, true)
84
+
85
+ client.close
86
+ end
87
+
88
+ it "joins the polling thread" do
89
+ expect(thread).to receive(:join)
90
+
91
+ client.close
92
+ end
93
+
94
+ it "closes and unassign the native client" do
95
+ client.close
96
+
97
+ expect(client.native).to eq(nil)
98
+ expect(client.closed?).to eq(true)
99
+ end
100
+ end
101
+ end
102
+
103
+ context "when client was already closed" do
104
+ before { client.close }
105
+
106
+ it "is closed" do
107
+ expect(client.closed?).to eq(true)
108
+ end
109
+
110
+ context "and attempt to close again" do
111
+ it "does not call the `destroy` binding" do
112
+ expect(Rdkafka::Bindings).not_to receive(:rd_kafka_destroy)
113
+
114
+ client.close
115
+ end
116
+
117
+ it "does not indicate to the polling thread that it is closing" do
118
+ expect(thread).not_to receive(:[]=).with(:closing, true)
119
+
120
+ client.close
121
+ end
122
+
123
+ it "does not join the polling thread" do
124
+ expect(thread).not_to receive(:join)
125
+
126
+ client.close
127
+ end
128
+
129
+ it "does not close and unassign the native client again" do
130
+ client.close
131
+
132
+ expect(client.native).to eq(nil)
133
+ expect(client.closed?).to eq(true)
134
+ end
135
+ end
136
+ end
137
+
138
+ it "provide a finalizer Proc that closes the `native` client" do
139
+ expect(client.closed?).to eq(false)
140
+
141
+ client.finalizer.call("some-ignored-object-id")
142
+
143
+ expect(client.closed?).to eq(true)
144
+ end
145
+ end
@@ -0,0 +1,42 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer::DeliveryHandle do
4
+ let(:response) { 0 }
5
+
6
+ subject do
7
+ Rdkafka::Producer::DeliveryHandle.new.tap do |handle|
8
+ handle[:pending] = pending_handle
9
+ handle[:response] = response
10
+ handle[:partition] = 2
11
+ handle[:offset] = 100
12
+ end
13
+ end
14
+
15
+ describe "#wait" do
16
+ let(:pending_handle) { true }
17
+
18
+ it "should wait until the timeout and then raise an error" do
19
+ expect {
20
+ subject.wait(max_wait_timeout: 0.1)
21
+ }.to raise_error Rdkafka::Producer::DeliveryHandle::WaitTimeoutError, /delivery/
22
+ end
23
+
24
+ context "when not pending anymore and no error" do
25
+ let(:pending_handle) { false }
26
+
27
+ it "should return a delivery report" do
28
+ report = subject.wait
29
+
30
+ expect(report.partition).to eq(2)
31
+ expect(report.offset).to eq(100)
32
+ end
33
+
34
+ it "should wait without a timeout" do
35
+ report = subject.wait(max_wait_timeout: nil)
36
+
37
+ expect(report.partition).to eq(2)
38
+ expect(report.offset).to eq(100)
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,17 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Producer::DeliveryReport do
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
5
+
6
+ it "should get the partition" do
7
+ expect(subject.partition).to eq 2
8
+ end
9
+
10
+ it "should get the offset" do
11
+ expect(subject.offset).to eq 100
12
+ end
13
+
14
+ it "should get the error" do
15
+ expect(subject.error).to eq "error"
16
+ end
17
+ end