karafka-rdkafka 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,203 @@
1
+ require "spec_helper"
2
+ require "ostruct"
3
+
4
+ describe Rdkafka::Admin do
5
+ let(:config) { rdkafka_config }
6
+ let(:admin) { config.admin }
7
+
8
+ after do
9
+ # Registry should always end up being empty
10
+ expect(Rdkafka::Admin::CreateTopicHandle::REGISTRY).to be_empty
11
+ admin.close
12
+ end
13
+
14
+ let(:topic_name) { "test-topic-#{Random.new.rand(0..1_000_000)}" }
15
+ let(:topic_partition_count) { 3 }
16
+ let(:topic_replication_factor) { 1 }
17
+ let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
18
+ let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
19
+
20
+ describe "#create_topic" do
21
+ describe "called with invalid input" do
22
+ describe "with an invalid topic name" do
23
+ # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
24
+ # public static final String LEGAL_CHARS = "[a-zA-Z0-9._-]";
25
+ let(:topic_name) { "[!@#]" }
26
+
27
+ it "raises an exception" do
28
+ create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
29
+ expect {
30
+ create_topic_handle.wait(max_wait_timeout: 15.0)
31
+ }.to raise_exception { |ex|
32
+ expect(ex).to be_a(Rdkafka::RdkafkaError)
33
+ expect(ex.message).to match(/Broker: Invalid topic \(topic_exception\)/)
34
+ expect(ex.broker_message).to match(/Topic name.*is illegal, it contains a character other than ASCII alphanumerics/)
35
+ }
36
+ end
37
+ end
38
+
39
+ describe "with the name of a topic that already exists" do
40
+ let(:topic_name) { "empty_test_topic" } # created in spec_helper.rb
41
+
42
+ it "raises an exception" do
43
+ create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
44
+ expect {
45
+ create_topic_handle.wait(max_wait_timeout: 15.0)
46
+ }.to raise_exception { |ex|
47
+ expect(ex).to be_a(Rdkafka::RdkafkaError)
48
+ expect(ex.message).to match(/Broker: Topic already exists \(topic_already_exists\)/)
49
+ expect(ex.broker_message).to match(/Topic 'empty_test_topic' already exists/)
50
+ }
51
+ end
52
+ end
53
+
54
+ describe "with an invalid partition count" do
55
+ let(:topic_partition_count) { -999 }
56
+
57
+ it "raises an exception" do
58
+ expect {
59
+ admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
60
+ }.to raise_error Rdkafka::Config::ConfigError, /num_partitions out of expected range/
61
+ end
62
+ end
63
+
64
+ describe "with an invalid replication factor" do
65
+ let(:topic_replication_factor) { -2 }
66
+
67
+ it "raises an exception" do
68
+ expect {
69
+ admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
70
+ }.to raise_error Rdkafka::Config::ConfigError, /replication_factor out of expected range/
71
+ end
72
+ end
73
+
74
+ describe "with an invalid topic configuration" do
75
+ it "doesn't create the topic" do
76
+ create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor, invalid_topic_config)
77
+ expect {
78
+ create_topic_handle.wait(max_wait_timeout: 15.0)
79
+ }.to raise_error Rdkafka::RdkafkaError, /Broker: Configuration is invalid \(invalid_config\)/
80
+ end
81
+ end
82
+ end
83
+
84
+ context "edge case" do
85
+ context "where we are unable to get the background queue" do
86
+ before do
87
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_queue_get_background).and_return(FFI::Pointer::NULL)
88
+ end
89
+
90
+ it "raises an exception" do
91
+ expect {
92
+ admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
93
+ }.to raise_error Rdkafka::Config::ConfigError, /rd_kafka_queue_get_background was NULL/
94
+ end
95
+ end
96
+
97
+ context "where rd_kafka_CreateTopics raises an exception" do
98
+ before do
99
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_CreateTopics).and_raise(RuntimeError.new("oops"))
100
+ end
101
+
102
+ it "raises an exception" do
103
+ expect {
104
+ admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
105
+ }.to raise_error RuntimeError, /oops/
106
+ end
107
+ end
108
+ end
109
+
110
+ it "creates a topic" do
111
+ create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor, topic_config)
112
+ create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
113
+ expect(create_topic_report.error_string).to be_nil
114
+ expect(create_topic_report.result_name).to eq(topic_name)
115
+ end
116
+ end
117
+
118
+ describe "#delete_topic" do
119
+ describe "called with invalid input" do
120
+ # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
121
+ # public static final String LEGAL_CHARS = "[a-zA-Z0-9._-]";
122
+ describe "with an invalid topic name" do
123
+ let(:topic_name) { "[!@#]" }
124
+
125
+ it "raises an exception" do
126
+ delete_topic_handle = admin.delete_topic(topic_name)
127
+ expect {
128
+ delete_topic_handle.wait(max_wait_timeout: 15.0)
129
+ }.to raise_exception { |ex|
130
+ expect(ex).to be_a(Rdkafka::RdkafkaError)
131
+ expect(ex.message).to match(/Broker: Unknown topic or partition \(unknown_topic_or_part\)/)
132
+ expect(ex.broker_message).to match(/Broker: Unknown topic or partition/)
133
+ }
134
+ end
135
+ end
136
+
137
+ describe "with the name of a topic that does not exist" do
138
+ it "raises an exception" do
139
+ delete_topic_handle = admin.delete_topic(topic_name)
140
+ expect {
141
+ delete_topic_handle.wait(max_wait_timeout: 15.0)
142
+ }.to raise_exception { |ex|
143
+ expect(ex).to be_a(Rdkafka::RdkafkaError)
144
+ expect(ex.message).to match(/Broker: Unknown topic or partition \(unknown_topic_or_part\)/)
145
+ expect(ex.broker_message).to match(/Broker: Unknown topic or partition/)
146
+ }
147
+ end
148
+ end
149
+ end
150
+
151
+ context "edge case" do
152
+ context "where we are unable to get the background queue" do
153
+ before do
154
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_queue_get_background).and_return(FFI::Pointer::NULL)
155
+ end
156
+
157
+ it "raises an exception" do
158
+ expect {
159
+ admin.delete_topic(topic_name)
160
+ }.to raise_error Rdkafka::Config::ConfigError, /rd_kafka_queue_get_background was NULL/
161
+ end
162
+ end
163
+
164
+ context "where rd_kafka_DeleteTopics raises an exception" do
165
+ before do
166
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_DeleteTopics).and_raise(RuntimeError.new("oops"))
167
+ end
168
+
169
+ it "raises an exception" do
170
+ expect {
171
+ admin.delete_topic(topic_name)
172
+ }.to raise_error RuntimeError, /oops/
173
+ end
174
+ end
175
+ end
176
+
177
+
178
+ it "deletes a topic that was newly created" do
179
+ create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
180
+ create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
181
+ expect(create_topic_report.error_string).to be_nil
182
+ expect(create_topic_report.result_name).to eq(topic_name)
183
+
184
+ # Retry topic deletion a few times. On CI Kafka seems to not
185
+ # always be ready for it immediately
186
+ delete_topic_report = nil
187
+ 10.times do |i|
188
+ begin
189
+ delete_topic_handle = admin.delete_topic(topic_name)
190
+ delete_topic_report = delete_topic_handle.wait(max_wait_timeout: 15.0)
191
+ break
192
+ rescue Rdkafka::RdkafkaError => ex
193
+ if i > 3
194
+ raise ex
195
+ end
196
+ end
197
+ end
198
+
199
+ expect(delete_topic_report.error_string).to be_nil
200
+ expect(delete_topic_report.result_name).to eq(topic_name)
201
+ end
202
+ end
203
+ end
@@ -0,0 +1,134 @@
1
+ require "spec_helper"
2
+ require 'zlib'
3
+
4
+ describe Rdkafka::Bindings do
5
+ it "should load librdkafka" do
6
+ expect(Rdkafka::Bindings.ffi_libraries.map(&:name).first).to include "librdkafka"
7
+ end
8
+
9
+ describe ".lib_extension" do
10
+ it "should know the lib extension for darwin" do
11
+ stub_const('RbConfig::CONFIG', 'host_os' =>'darwin')
12
+ expect(Rdkafka::Bindings.lib_extension).to eq "dylib"
13
+ end
14
+
15
+ it "should know the lib extension for linux" do
16
+ stub_const('RbConfig::CONFIG', 'host_os' =>'linux')
17
+ expect(Rdkafka::Bindings.lib_extension).to eq "so"
18
+ end
19
+ end
20
+
21
+ it "should successfully call librdkafka" do
22
+ expect {
23
+ Rdkafka::Bindings.rd_kafka_conf_new
24
+ }.not_to raise_error
25
+ end
26
+
27
+ describe "log callback" do
28
+ let(:log_queue) { Rdkafka::Config.log_queue }
29
+ before do
30
+ allow(log_queue).to receive(:<<)
31
+ end
32
+
33
+ it "should log fatal messages" do
34
+ Rdkafka::Bindings::LogCallback.call(nil, 0, nil, "log line")
35
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
36
+ end
37
+
38
+ it "should log error messages" do
39
+ Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
40
+ expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
41
+ end
42
+
43
+ it "should log warning messages" do
44
+ Rdkafka::Bindings::LogCallback.call(nil, 4, nil, "log line")
45
+ expect(log_queue).to have_received(:<<).with([Logger::WARN, "rdkafka: log line"])
46
+ end
47
+
48
+ it "should log info messages" do
49
+ Rdkafka::Bindings::LogCallback.call(nil, 5, nil, "log line")
50
+ expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
51
+ end
52
+
53
+ it "should log debug messages" do
54
+ Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
55
+ expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
56
+ end
57
+
58
+ it "should log unknown messages" do
59
+ Rdkafka::Bindings::LogCallback.call(nil, 100, nil, "log line")
60
+ expect(log_queue).to have_received(:<<).with([Logger::UNKNOWN, "rdkafka: log line"])
61
+ end
62
+ end
63
+
64
+ describe "partitioner" do
65
+ let(:partition_key) { ('a'..'z').to_a.shuffle.take(15).join('') }
66
+ let(:partition_count) { rand(50) + 1 }
67
+
68
+ it "should return the same partition for a similar string and the same partition count" do
69
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
70
+ result_2 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
71
+ expect(result_1).to eq(result_2)
72
+ end
73
+
74
+ it "should match the old partitioner" do
75
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
76
+ result_2 = (Zlib.crc32(partition_key) % partition_count)
77
+ expect(result_1).to eq(result_2)
78
+ end
79
+
80
+ it "should return the partition calculated by the specified partitioner" do
81
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count, "murmur2")
82
+ ptr = FFI::MemoryPointer.from_string(partition_key)
83
+ result_2 = Rdkafka::Bindings.rd_kafka_msg_partitioner_murmur2(nil, ptr, partition_key.size, partition_count, nil, nil)
84
+ expect(result_1).to eq(result_2)
85
+ end
86
+ end
87
+
88
+ describe "stats callback" do
89
+ context "without a stats callback" do
90
+ it "should do nothing" do
91
+ expect {
92
+ Rdkafka::Bindings::StatsCallback.call(nil, "{}", 2, nil)
93
+ }.not_to raise_error
94
+ end
95
+ end
96
+
97
+ context "with a stats callback" do
98
+ before do
99
+ Rdkafka::Config.statistics_callback = lambda do |stats|
100
+ $received_stats = stats
101
+ end
102
+ end
103
+
104
+ it "should call the stats callback with a stats hash" do
105
+ Rdkafka::Bindings::StatsCallback.call(nil, "{\"received\":1}", 13, nil)
106
+ expect($received_stats).to eq({'received' => 1})
107
+ end
108
+ end
109
+ end
110
+
111
+ describe "error callback" do
112
+ context "without an error callback" do
113
+ it "should do nothing" do
114
+ expect {
115
+ Rdkafka::Bindings::ErrorCallback.call(nil, 1, "error", nil)
116
+ }.not_to raise_error
117
+ end
118
+ end
119
+
120
+ context "with an error callback" do
121
+ before do
122
+ Rdkafka::Config.error_callback = lambda do |error|
123
+ $received_error = error
124
+ end
125
+ end
126
+
127
+ it "should call the error callback with an Rdkafka::Error" do
128
+ Rdkafka::Bindings::ErrorCallback.call(nil, 8, "Broker not available", nil)
129
+ expect($received_error.code).to eq(:broker_not_available)
130
+ expect($received_error.broker_message).to eq("Broker not available")
131
+ end
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,20 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Callbacks do
4
+
5
+ # The code in the call back functions is 100% covered by other specs. Due to
6
+ # the large number of collaborators, and the fact that FFI does not play
7
+ # nicely with doubles, it was very difficult to construct tests that were
8
+ # not over-mocked.
9
+
10
+ # For debugging purposes, if you suspect that you are running into trouble in
11
+ # one of the callback functions, it may be helpful to surround the inner body
12
+ # of the method with something like:
13
+ #
14
+ # begin
15
+ # <method body>
16
+ # rescue => ex; puts ex.inspect; puts ex.backtrace; end;
17
+ #
18
+ # This will output to STDOUT any exceptions that are being raised in the callback.
19
+
20
+ end
@@ -0,0 +1,182 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Config do
4
+ context "logger" do
5
+ it "should have a default logger" do
6
+ expect(Rdkafka::Config.logger).to be_a Logger
7
+ end
8
+
9
+ it "should set the logger" do
10
+ logger = Logger.new(STDOUT)
11
+ expect(Rdkafka::Config.logger).not_to eq logger
12
+ Rdkafka::Config.logger = logger
13
+ expect(Rdkafka::Config.logger).to eq logger
14
+ end
15
+
16
+ it "should not accept a nil logger" do
17
+ expect {
18
+ Rdkafka::Config.logger = nil
19
+ }.to raise_error(Rdkafka::Config::NoLoggerError)
20
+ end
21
+
22
+ it "supports logging queue" do
23
+ log = StringIO.new
24
+ Rdkafka::Config.logger = Logger.new(log)
25
+
26
+ Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
27
+ 20.times do
28
+ break if log.string != ""
29
+ sleep 0.05
30
+ end
31
+
32
+ expect(log.string).to include "FATAL -- : I love testing"
33
+ end
34
+ end
35
+
36
+ context "statistics callback" do
37
+ context "with a proc/lambda" do
38
+ it "should set the callback" do
39
+ expect {
40
+ Rdkafka::Config.statistics_callback = lambda do |stats|
41
+ puts stats
42
+ end
43
+ }.not_to raise_error
44
+ expect(Rdkafka::Config.statistics_callback).to respond_to :call
45
+ end
46
+ end
47
+
48
+ context "with a callable object" do
49
+ it "should set the callback" do
50
+ callback = Class.new do
51
+ def call(stats); end
52
+ end
53
+ expect {
54
+ Rdkafka::Config.statistics_callback = callback.new
55
+ }.not_to raise_error
56
+ expect(Rdkafka::Config.statistics_callback).to respond_to :call
57
+ end
58
+ end
59
+
60
+ it "should not accept a callback that's not callable" do
61
+ expect {
62
+ Rdkafka::Config.statistics_callback = 'a string'
63
+ }.to raise_error(TypeError)
64
+ end
65
+ end
66
+
67
+ context "error callback" do
68
+ context "with a proc/lambda" do
69
+ it "should set the callback" do
70
+ expect {
71
+ Rdkafka::Config.error_callback = lambda do |error|
72
+ puts error
73
+ end
74
+ }.not_to raise_error
75
+ expect(Rdkafka::Config.error_callback).to respond_to :call
76
+ end
77
+ end
78
+
79
+ context "with a callable object" do
80
+ it "should set the callback" do
81
+ callback = Class.new do
82
+ def call(stats); end
83
+ end
84
+ expect {
85
+ Rdkafka::Config.error_callback = callback.new
86
+ }.not_to raise_error
87
+ expect(Rdkafka::Config.error_callback).to respond_to :call
88
+ end
89
+ end
90
+
91
+ it "should not accept a callback that's not callable" do
92
+ expect {
93
+ Rdkafka::Config.error_callback = 'a string'
94
+ }.to raise_error(TypeError)
95
+ end
96
+ end
97
+
98
+ context "configuration" do
99
+ it "should store configuration" do
100
+ config = Rdkafka::Config.new
101
+ config[:"key"] = 'value'
102
+ expect(config[:"key"]).to eq 'value'
103
+ end
104
+
105
+ it "should use default configuration" do
106
+ config = Rdkafka::Config.new
107
+ expect(config[:"api.version.request"]).to eq true
108
+ end
109
+
110
+ it "should create a consumer with valid config" do
111
+ consumer = rdkafka_consumer_config.consumer
112
+ expect(consumer).to be_a Rdkafka::Consumer
113
+ consumer.close
114
+ end
115
+
116
+ it "should raise an error when creating a consumer with invalid config" do
117
+ config = Rdkafka::Config.new('invalid.key' => 'value')
118
+ expect {
119
+ config.consumer
120
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
121
+ end
122
+
123
+ it "should raise an error when creating a consumer with a nil key in the config" do
124
+ config = Rdkafka::Config.new(nil => 'value')
125
+ expect {
126
+ config.consumer
127
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"\"")
128
+ end
129
+
130
+ it "should treat a nil value as blank" do
131
+ config = Rdkafka::Config.new('security.protocol' => nil)
132
+ expect {
133
+ config.consumer
134
+ config.producer
135
+ }.to raise_error(Rdkafka::Config::ConfigError, "Configuration property \"security.protocol\" cannot be set to empty value")
136
+ end
137
+
138
+ it "should create a producer with valid config" do
139
+ producer = rdkafka_consumer_config.producer
140
+ expect(producer).to be_a Rdkafka::Producer
141
+ producer.close
142
+ end
143
+
144
+ it "should raise an error when creating a producer with invalid config" do
145
+ config = Rdkafka::Config.new('invalid.key' => 'value')
146
+ expect {
147
+ config.producer
148
+ }.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
149
+ end
150
+
151
+ it "should allow configuring zstd compression" do
152
+ config = Rdkafka::Config.new('compression.codec' => 'zstd')
153
+ begin
154
+ expect(config.producer).to be_a Rdkafka::Producer
155
+ config.producer.close
156
+ rescue Rdkafka::Config::ConfigError => ex
157
+ pending "Zstd compression not supported on this machine"
158
+ raise ex
159
+ end
160
+ end
161
+
162
+ it "should raise an error when client creation fails for a consumer" do
163
+ config = Rdkafka::Config.new(
164
+ "security.protocol" => "SSL",
165
+ "ssl.ca.location" => "/nonsense"
166
+ )
167
+ expect {
168
+ config.consumer
169
+ }.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
170
+ end
171
+
172
+ it "should raise an error when client creation fails for a producer" do
173
+ config = Rdkafka::Config.new(
174
+ "security.protocol" => "SSL",
175
+ "ssl.ca.location" => "/nonsense"
176
+ )
177
+ expect {
178
+ config.producer
179
+ }.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
180
+ end
181
+ end
182
+ end
@@ -0,0 +1,139 @@
1
+ require "spec_helper"
2
+
3
+ describe Rdkafka::Consumer::Message do
4
+ let(:native_client) { new_native_client }
5
+ let(:native_topic) { new_native_topic(native_client: native_client) }
6
+ let(:payload) { nil }
7
+ let(:key) { nil }
8
+ let(:native_message) do
9
+ Rdkafka::Bindings::Message.new.tap do |message|
10
+ message[:rkt] = native_topic
11
+ message[:partition] = 3
12
+ message[:offset] = 100
13
+ if payload
14
+ ptr = FFI::MemoryPointer.new(:char, payload.bytesize)
15
+ ptr.put_bytes(0, payload)
16
+ message[:payload] = ptr
17
+ message[:len] = payload.bytesize
18
+ end
19
+ if key
20
+ ptr = FFI::MemoryPointer.new(:char, key.bytesize)
21
+ ptr.put_bytes(0, key)
22
+ message[:key] = ptr
23
+ message[:key_len] = key.bytesize
24
+ end
25
+ end
26
+ end
27
+
28
+ after(:each) do
29
+ Rdkafka::Bindings.rd_kafka_destroy(native_client)
30
+ end
31
+
32
+ subject { Rdkafka::Consumer::Message.new(native_message) }
33
+
34
+ before do
35
+ # mock headers, because it produces 'segmentation fault' while settings or reading headers for
36
+ # a message which is created from scratch
37
+ #
38
+ # Code dump example:
39
+ #
40
+ # ```
41
+ # frame #7: 0x000000010dacf5ab librdkafka.dylib`rd_list_destroy + 11
42
+ # frame #8: 0x000000010dae5a7e librdkafka.dylib`rd_kafka_headers_destroy + 14
43
+ # frame #9: 0x000000010da9ab40 librdkafka.dylib`rd_kafka_message_set_headers + 32
44
+ # ```
45
+ expect( Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(any_args) do
46
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
47
+ end
48
+ end
49
+
50
+ it "should have a topic" do
51
+ expect(subject.topic).to eq "topic_name"
52
+ end
53
+
54
+ it "should have a partition" do
55
+ expect(subject.partition).to eq 3
56
+ end
57
+
58
+ context "payload" do
59
+ it "should have a nil payload when none is present" do
60
+ expect(subject.payload).to be_nil
61
+ end
62
+
63
+ context "present payload" do
64
+ let(:payload) { "payload content" }
65
+
66
+ it "should have a payload" do
67
+ expect(subject.payload).to eq "payload content"
68
+ end
69
+ end
70
+ end
71
+
72
+ context "key" do
73
+ it "should have a nil key when none is present" do
74
+ expect(subject.key).to be_nil
75
+ end
76
+
77
+ context "present key" do
78
+ let(:key) { "key content" }
79
+
80
+ it "should have a key" do
81
+ expect(subject.key).to eq "key content"
82
+ end
83
+ end
84
+ end
85
+
86
+ it "should have an offset" do
87
+ expect(subject.offset).to eq 100
88
+ end
89
+
90
+ describe "#timestamp" do
91
+ context "without a timestamp" do
92
+ before do
93
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_message_timestamp).and_return(-1)
94
+ end
95
+
96
+ it "should have a nil timestamp if not present" do
97
+ expect(subject.timestamp).to be_nil
98
+ end
99
+ end
100
+
101
+ context "with a timestamp" do
102
+ before do
103
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_message_timestamp).and_return(1505069646250)
104
+ end
105
+
106
+ it "should have timestamp if present" do
107
+ expect(subject.timestamp).to eq Time.at(1505069646, 250_000)
108
+ end
109
+ end
110
+ end
111
+
112
+ describe "#to_s" do
113
+ before do
114
+ allow(subject).to receive(:timestamp).and_return(1000)
115
+ end
116
+
117
+ it "should have a human readable representation" do
118
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key '', payload '', partition 3, offset 100, timestamp 1000>"
119
+ end
120
+
121
+ context "with key and payload" do
122
+ let(:key) { "key" }
123
+ let(:payload) { "payload" }
124
+
125
+ it "should have a human readable representation" do
126
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'key', payload 'payload', partition 3, offset 100, timestamp 1000>"
127
+ end
128
+ end
129
+
130
+ context "with a very long key and payload" do
131
+ let(:key) { "k" * 100_000 }
132
+ let(:payload) { "p" * 100_000 }
133
+
134
+ it "should have a human readable representation" do
135
+ expect(subject.to_s).to eq "<Message in 'topic_name' with key 'kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk...', payload 'pppppppppppppppppppppppppppppppppppppppp...', partition 3, offset 100, timestamp 1000>"
136
+ end
137
+ end
138
+ end
139
+ end