karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +3 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
- data/.github/workflows/ci_macos_arm64.yml +301 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
- data/.github/workflows/push_ruby.yml +37 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +323 -0
- data/Gemfile +5 -0
- data/MIT-LICENSE +22 -0
- data/README.md +177 -0
- data/Rakefile +96 -0
- data/docker-compose.yml +25 -0
- data/ext/README.md +19 -0
- data/ext/Rakefile +131 -0
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/ext/librdkafka.so +0 -0
- data/karafka-rdkafka.gemspec +61 -0
- data/lib/rdkafka/abstract_handle.rb +116 -0
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/create_topic_report.rb +24 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +832 -0
- data/lib/rdkafka/bindings.rb +582 -0
- data/lib/rdkafka/callbacks.rb +415 -0
- data/lib/rdkafka/config.rb +398 -0
- data/lib/rdkafka/consumer/headers.rb +79 -0
- data/lib/rdkafka/consumer/message.rb +86 -0
- data/lib/rdkafka/consumer/partition.rb +57 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
- data/lib/rdkafka/consumer.rb +663 -0
- data/lib/rdkafka/error.rb +201 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +115 -0
- data/lib/rdkafka/native_kafka.rb +139 -0
- data/lib/rdkafka/producer/delivery_handle.rb +48 -0
- data/lib/rdkafka/producer/delivery_report.rb +45 -0
- data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
- data/lib/rdkafka/producer.rb +492 -0
- data/lib/rdkafka/version.rb +7 -0
- data/lib/rdkafka.rb +54 -0
- data/renovate.json +92 -0
- data/spec/rdkafka/abstract_handle_spec.rb +117 -0
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +769 -0
- data/spec/rdkafka/bindings_spec.rb +222 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +258 -0
- data/spec/rdkafka/consumer/headers_spec.rb +73 -0
- data/spec/rdkafka/consumer/message_spec.rb +139 -0
- data/spec/rdkafka/consumer/partition_spec.rb +57 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
- data/spec/rdkafka/consumer_spec.rb +1299 -0
- data/spec/rdkafka/error_spec.rb +95 -0
- data/spec/rdkafka/metadata_spec.rb +79 -0
- data/spec/rdkafka/native_kafka_spec.rb +130 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
- data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
- data/spec/rdkafka/producer_spec.rb +1234 -0
- data/spec/spec_helper.rb +181 -0
- metadata +244 -0
@@ -0,0 +1,222 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'zlib'
|
4
|
+
|
5
|
+
describe Rdkafka::Bindings do
|
6
|
+
it "should load librdkafka" do
|
7
|
+
expect(Rdkafka::Bindings.ffi_libraries.map(&:name).first).to include "librdkafka"
|
8
|
+
end
|
9
|
+
|
10
|
+
describe ".lib_extension" do
|
11
|
+
it "should know the lib extension for darwin" do
|
12
|
+
stub_const('RbConfig::CONFIG', 'host_os' =>'darwin')
|
13
|
+
expect(Rdkafka::Bindings.lib_extension).to eq "dylib"
|
14
|
+
end
|
15
|
+
|
16
|
+
it "should know the lib extension for linux" do
|
17
|
+
stub_const('RbConfig::CONFIG', 'host_os' =>'linux')
|
18
|
+
expect(Rdkafka::Bindings.lib_extension).to eq "so"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
it "should successfully call librdkafka" do
|
23
|
+
expect {
|
24
|
+
Rdkafka::Bindings.rd_kafka_conf_new
|
25
|
+
}.not_to raise_error
|
26
|
+
end
|
27
|
+
|
28
|
+
describe "log callback" do
|
29
|
+
let(:log_queue) { Rdkafka::Config.log_queue }
|
30
|
+
before do
|
31
|
+
allow(log_queue).to receive(:<<)
|
32
|
+
end
|
33
|
+
|
34
|
+
it "should log fatal messages" do
|
35
|
+
Rdkafka::Bindings::LogCallback.call(nil, 0, nil, "log line")
|
36
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should log fatal messages" do
|
40
|
+
Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
|
41
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should log fatal messages" do
|
45
|
+
Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
|
46
|
+
expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
|
47
|
+
end
|
48
|
+
|
49
|
+
it "should log error messages" do
|
50
|
+
Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
|
51
|
+
expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
|
52
|
+
end
|
53
|
+
|
54
|
+
it "should log warning messages" do
|
55
|
+
Rdkafka::Bindings::LogCallback.call(nil, 4, nil, "log line")
|
56
|
+
expect(log_queue).to have_received(:<<).with([Logger::WARN, "rdkafka: log line"])
|
57
|
+
end
|
58
|
+
|
59
|
+
it "should log info messages" do
|
60
|
+
Rdkafka::Bindings::LogCallback.call(nil, 5, nil, "log line")
|
61
|
+
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
62
|
+
end
|
63
|
+
|
64
|
+
it "should log info messages" do
|
65
|
+
Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
|
66
|
+
expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
|
67
|
+
end
|
68
|
+
|
69
|
+
it "should log debug messages" do
|
70
|
+
Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
|
71
|
+
expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
|
72
|
+
end
|
73
|
+
|
74
|
+
it "should log unknown messages" do
|
75
|
+
Rdkafka::Bindings::LogCallback.call(nil, 100, nil, "log line")
|
76
|
+
expect(log_queue).to have_received(:<<).with([Logger::UNKNOWN, "rdkafka: log line"])
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
describe "partitioner" do
|
81
|
+
let(:partition_key) { ('a'..'z').to_a.shuffle.take(15).join('') }
|
82
|
+
let(:partition_count) { rand(50) + 1 }
|
83
|
+
|
84
|
+
it "should return the same partition for a similar string and the same partition count" do
|
85
|
+
result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
|
86
|
+
result_2 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
|
87
|
+
expect(result_1).to eq(result_2)
|
88
|
+
end
|
89
|
+
|
90
|
+
it "should match the old partitioner" do
|
91
|
+
result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count)
|
92
|
+
result_2 = (Zlib.crc32(partition_key) % partition_count)
|
93
|
+
expect(result_1).to eq(result_2)
|
94
|
+
end
|
95
|
+
|
96
|
+
it "should return the partition calculated by the specified partitioner" do
|
97
|
+
result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count, "murmur2")
|
98
|
+
ptr = FFI::MemoryPointer.from_string(partition_key)
|
99
|
+
result_2 = Rdkafka::Bindings.rd_kafka_msg_partitioner_murmur2(nil, ptr, partition_key.size, partition_count, nil, nil)
|
100
|
+
expect(result_1).to eq(result_2)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
describe "stats callback" do
|
105
|
+
context "without a stats callback" do
|
106
|
+
it "should do nothing" do
|
107
|
+
expect {
|
108
|
+
Rdkafka::Bindings::StatsCallback.call(nil, "{}", 2, nil)
|
109
|
+
}.not_to raise_error
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
context "with a stats callback" do
|
114
|
+
before do
|
115
|
+
Rdkafka::Config.statistics_callback = lambda do |stats|
|
116
|
+
$received_stats = stats
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
it "should call the stats callback with a stats hash" do
|
121
|
+
Rdkafka::Bindings::StatsCallback.call(nil, "{\"received\":1}", 13, nil)
|
122
|
+
expect($received_stats).to eq({'received' => 1})
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
describe "error callback" do
|
128
|
+
context "without an error callback" do
|
129
|
+
it "should do nothing" do
|
130
|
+
expect {
|
131
|
+
Rdkafka::Bindings::ErrorCallback.call(nil, 1, "error", nil)
|
132
|
+
}.not_to raise_error
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
context "with an error callback" do
|
137
|
+
before do
|
138
|
+
Rdkafka::Config.error_callback = lambda do |error|
|
139
|
+
$received_error = error
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
it "should call the error callback with an Rdkafka::Error" do
|
144
|
+
Rdkafka::Bindings::ErrorCallback.call(nil, 8, "Broker not available", nil)
|
145
|
+
expect($received_error.code).to eq(:broker_not_available)
|
146
|
+
expect($received_error.broker_message).to eq("Broker not available")
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
describe "oauthbearer set token" do
|
152
|
+
context "with args" do
|
153
|
+
before do
|
154
|
+
DEFAULT_TOKEN_EXPIRY_SECONDS = 900
|
155
|
+
$token_value = "token"
|
156
|
+
$md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
|
157
|
+
$md_principal_name = "kafka-cluster"
|
158
|
+
$extensions = nil
|
159
|
+
$extension_size = 0
|
160
|
+
$error_buffer = FFI::MemoryPointer.from_string(" " * 256)
|
161
|
+
end
|
162
|
+
|
163
|
+
it "should set token or capture failure" do
|
164
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
165
|
+
response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
|
166
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
167
|
+
expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
describe "oauthbearer set token failure" do
|
174
|
+
|
175
|
+
context "without args" do
|
176
|
+
|
177
|
+
it "should fail" do
|
178
|
+
expect {
|
179
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
|
180
|
+
}.to raise_error(ArgumentError)
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
context "with args" do
|
185
|
+
it "should succeed" do
|
186
|
+
expect {
|
187
|
+
errstr = "error"
|
188
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
189
|
+
Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
|
190
|
+
end
|
191
|
+
}.to_not raise_error
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
describe "oauthbearer callback" do
|
197
|
+
context "without an oauthbearer callback" do
|
198
|
+
it "should do nothing" do
|
199
|
+
expect {
|
200
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
|
201
|
+
}.not_to raise_error
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
context "with an oauthbearer callback" do
|
206
|
+
before do
|
207
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
208
|
+
$received_config = config
|
209
|
+
$received_client_name = client_name
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
it "should call the oauth bearer callback and receive config and client name" do
|
214
|
+
RdKafkaTestConsumer.with do |consumer_ptr|
|
215
|
+
Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
|
216
|
+
expect($received_config).to eq("{}")
|
217
|
+
expect($received_client_name).to match(/consumer/)
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
221
|
+
end
|
222
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
describe Rdkafka::Callbacks do
|
4
|
+
|
5
|
+
# The code in the call back functions is 100% covered by other specs. Due to
|
6
|
+
# the large number of collaborators, and the fact that FFI does not play
|
7
|
+
# nicely with doubles, it was very difficult to construct tests that were
|
8
|
+
# not over-mocked.
|
9
|
+
|
10
|
+
# For debugging purposes, if you suspect that you are running into trouble in
|
11
|
+
# one of the callback functions, it may be helpful to surround the inner body
|
12
|
+
# of the method with something like:
|
13
|
+
#
|
14
|
+
# begin
|
15
|
+
# <method body>
|
16
|
+
# rescue => ex; puts ex.inspect; puts ex.backtrace; end;
|
17
|
+
#
|
18
|
+
# This will output to STDOUT any exceptions that are being raised in the callback.
|
19
|
+
|
20
|
+
end
|
@@ -0,0 +1,258 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
describe Rdkafka::Config do
|
4
|
+
context "logger" do
|
5
|
+
it "should have a default logger" do
|
6
|
+
expect(Rdkafka::Config.logger).to be_a Logger
|
7
|
+
end
|
8
|
+
|
9
|
+
it "should set the logger" do
|
10
|
+
logger = Logger.new(STDOUT)
|
11
|
+
expect(Rdkafka::Config.logger).not_to eq logger
|
12
|
+
Rdkafka::Config.logger = logger
|
13
|
+
expect(Rdkafka::Config.logger).to eq logger
|
14
|
+
end
|
15
|
+
|
16
|
+
it "should not accept a nil logger" do
|
17
|
+
expect {
|
18
|
+
Rdkafka::Config.logger = nil
|
19
|
+
}.to raise_error(Rdkafka::Config::NoLoggerError)
|
20
|
+
end
|
21
|
+
|
22
|
+
it "supports logging queue" do
|
23
|
+
log = StringIO.new
|
24
|
+
Rdkafka::Config.logger = Logger.new(log)
|
25
|
+
Rdkafka::Config.ensure_log_thread
|
26
|
+
|
27
|
+
Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
|
28
|
+
20.times do
|
29
|
+
break if log.string != ""
|
30
|
+
sleep 0.05
|
31
|
+
end
|
32
|
+
|
33
|
+
expect(log.string).to include "FATAL -- : I love testing"
|
34
|
+
end
|
35
|
+
|
36
|
+
unless RUBY_PLATFORM == 'java'
|
37
|
+
it "expect to start new logger thread after fork and work" do
|
38
|
+
reader, writer = IO.pipe
|
39
|
+
|
40
|
+
pid = fork do
|
41
|
+
$stdout.reopen(writer)
|
42
|
+
Rdkafka::Config.logger = Logger.new($stdout)
|
43
|
+
reader.close
|
44
|
+
producer = rdkafka_producer_config(debug: 'all').producer
|
45
|
+
producer.close
|
46
|
+
writer.close
|
47
|
+
sleep(1)
|
48
|
+
end
|
49
|
+
|
50
|
+
writer.close
|
51
|
+
Process.wait(pid)
|
52
|
+
output = reader.read
|
53
|
+
expect(output.split("\n").size).to be >= 20
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
context "statistics callback" do
|
59
|
+
context "with a proc/lambda" do
|
60
|
+
it "should set the callback" do
|
61
|
+
expect {
|
62
|
+
Rdkafka::Config.statistics_callback = lambda do |stats|
|
63
|
+
puts stats
|
64
|
+
end
|
65
|
+
}.not_to raise_error
|
66
|
+
expect(Rdkafka::Config.statistics_callback).to respond_to :call
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
context "with a callable object" do
|
71
|
+
it "should set the callback" do
|
72
|
+
callback = Class.new do
|
73
|
+
def call(stats); end
|
74
|
+
end
|
75
|
+
expect {
|
76
|
+
Rdkafka::Config.statistics_callback = callback.new
|
77
|
+
}.not_to raise_error
|
78
|
+
expect(Rdkafka::Config.statistics_callback).to respond_to :call
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
it "should not accept a callback that's not callable" do
|
83
|
+
expect {
|
84
|
+
Rdkafka::Config.statistics_callback = 'a string'
|
85
|
+
}.to raise_error(TypeError)
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
context "error callback" do
|
90
|
+
context "with a proc/lambda" do
|
91
|
+
it "should set the callback" do
|
92
|
+
expect {
|
93
|
+
Rdkafka::Config.error_callback = lambda do |error|
|
94
|
+
puts error
|
95
|
+
end
|
96
|
+
}.not_to raise_error
|
97
|
+
expect(Rdkafka::Config.error_callback).to respond_to :call
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
context "with a callable object" do
|
102
|
+
it "should set the callback" do
|
103
|
+
callback = Class.new do
|
104
|
+
def call(stats); end
|
105
|
+
end
|
106
|
+
expect {
|
107
|
+
Rdkafka::Config.error_callback = callback.new
|
108
|
+
}.not_to raise_error
|
109
|
+
expect(Rdkafka::Config.error_callback).to respond_to :call
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
it "should not accept a callback that's not callable" do
|
114
|
+
expect {
|
115
|
+
Rdkafka::Config.error_callback = 'a string'
|
116
|
+
}.to raise_error(TypeError)
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
context "oauthbearer calllback" do
|
121
|
+
context "with a proc/lambda" do
|
122
|
+
it "should set the callback" do
|
123
|
+
expect {
|
124
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
|
125
|
+
puts config
|
126
|
+
puts client_name
|
127
|
+
end
|
128
|
+
}.not_to raise_error
|
129
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
context "with a callable object" do
|
134
|
+
it "should set the callback" do
|
135
|
+
callback = Class.new do
|
136
|
+
def call(config, client_name); end
|
137
|
+
end
|
138
|
+
|
139
|
+
expect {
|
140
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
|
141
|
+
}.not_to raise_error
|
142
|
+
expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
it "should not accept a callback that's not callable" do
|
147
|
+
expect {
|
148
|
+
Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
|
149
|
+
}.to raise_error(TypeError)
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
context "configuration" do
|
154
|
+
it "should store configuration" do
|
155
|
+
config = Rdkafka::Config.new
|
156
|
+
config[:"key"] = 'value'
|
157
|
+
expect(config[:"key"]).to eq 'value'
|
158
|
+
end
|
159
|
+
|
160
|
+
it "should use default configuration" do
|
161
|
+
config = Rdkafka::Config.new
|
162
|
+
expect(config[:"api.version.request"]).to eq true
|
163
|
+
end
|
164
|
+
|
165
|
+
it "should create a consumer with valid config" do
|
166
|
+
consumer = rdkafka_consumer_config.consumer
|
167
|
+
expect(consumer).to be_a Rdkafka::Consumer
|
168
|
+
consumer.close
|
169
|
+
end
|
170
|
+
|
171
|
+
it "should create a consumer with consumer_poll_set set to false" do
|
172
|
+
config = rdkafka_consumer_config
|
173
|
+
config.consumer_poll_set = false
|
174
|
+
consumer = config.consumer
|
175
|
+
expect(consumer).to be_a Rdkafka::Consumer
|
176
|
+
consumer.close
|
177
|
+
end
|
178
|
+
|
179
|
+
it "should raise an error when creating a consumer with invalid config" do
|
180
|
+
config = Rdkafka::Config.new('invalid.key' => 'value')
|
181
|
+
expect {
|
182
|
+
config.consumer
|
183
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
|
184
|
+
end
|
185
|
+
|
186
|
+
it "should raise an error when creating a consumer with a nil key in the config" do
|
187
|
+
config = Rdkafka::Config.new(nil => 'value')
|
188
|
+
expect {
|
189
|
+
config.consumer
|
190
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"\"")
|
191
|
+
end
|
192
|
+
|
193
|
+
it "should treat a nil value as blank" do
|
194
|
+
config = Rdkafka::Config.new('security.protocol' => nil)
|
195
|
+
expect {
|
196
|
+
config.consumer
|
197
|
+
config.producer
|
198
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "Configuration property \"security.protocol\" cannot be set to empty value")
|
199
|
+
end
|
200
|
+
|
201
|
+
it "should create a producer with valid config" do
|
202
|
+
producer = rdkafka_consumer_config.producer
|
203
|
+
expect(producer).to be_a Rdkafka::Producer
|
204
|
+
producer.close
|
205
|
+
end
|
206
|
+
|
207
|
+
it "should raise an error when creating a producer with invalid config" do
|
208
|
+
config = Rdkafka::Config.new('invalid.key' => 'value')
|
209
|
+
expect {
|
210
|
+
config.producer
|
211
|
+
}.to raise_error(Rdkafka::Config::ConfigError, "No such configuration property: \"invalid.key\"")
|
212
|
+
end
|
213
|
+
|
214
|
+
it "allows string partitioner key" do
|
215
|
+
expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
|
216
|
+
config = Rdkafka::Config.new("partitioner" => "murmur2")
|
217
|
+
config.producer.close
|
218
|
+
end
|
219
|
+
|
220
|
+
it "allows symbol partitioner key" do
|
221
|
+
expect(Rdkafka::Producer).to receive(:new).with(kind_of(Rdkafka::NativeKafka), "murmur2").and_call_original
|
222
|
+
config = Rdkafka::Config.new(:partitioner => "murmur2")
|
223
|
+
config.producer.close
|
224
|
+
end
|
225
|
+
|
226
|
+
it "should allow configuring zstd compression" do
|
227
|
+
config = Rdkafka::Config.new('compression.codec' => 'zstd')
|
228
|
+
begin
|
229
|
+
producer = config.producer
|
230
|
+
expect(producer).to be_a Rdkafka::Producer
|
231
|
+
producer.close
|
232
|
+
rescue Rdkafka::Config::ConfigError => ex
|
233
|
+
pending "Zstd compression not supported on this machine"
|
234
|
+
raise ex
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
it "should raise an error when client creation fails for a consumer" do
|
239
|
+
config = Rdkafka::Config.new(
|
240
|
+
"security.protocol" => "SSL",
|
241
|
+
"ssl.ca.location" => "/nonsense"
|
242
|
+
)
|
243
|
+
expect {
|
244
|
+
config.consumer
|
245
|
+
}.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
|
246
|
+
end
|
247
|
+
|
248
|
+
it "should raise an error when client creation fails for a producer" do
|
249
|
+
config = Rdkafka::Config.new(
|
250
|
+
"security.protocol" => "SSL",
|
251
|
+
"ssl.ca.location" => "/nonsense"
|
252
|
+
)
|
253
|
+
expect {
|
254
|
+
config.producer
|
255
|
+
}.to raise_error(Rdkafka::Config::ClientCreationError, /ssl.ca.location failed(.*)/)
|
256
|
+
end
|
257
|
+
end
|
258
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
describe Rdkafka::Consumer::Headers do
|
4
|
+
let(:headers) do
|
5
|
+
{ # Note String keys!
|
6
|
+
"version" => ["2.1.3", "2.1.4"],
|
7
|
+
"type" => "String"
|
8
|
+
}
|
9
|
+
end
|
10
|
+
let(:native_message) { double('native message') }
|
11
|
+
let(:headers_ptr) { double('headers pointer') }
|
12
|
+
|
13
|
+
describe '.from_native' do
|
14
|
+
before do
|
15
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
|
16
|
+
expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
|
17
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
18
|
+
end
|
19
|
+
|
20
|
+
# First version header
|
21
|
+
expect(Rdkafka::Bindings).to \
|
22
|
+
receive(:rd_kafka_header_get_all)
|
23
|
+
.with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
|
24
|
+
expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: "version"))
|
25
|
+
expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][0].size)
|
26
|
+
expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers["version"][0]))
|
27
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
28
|
+
end
|
29
|
+
|
30
|
+
# Second version header
|
31
|
+
expect(Rdkafka::Bindings).to \
|
32
|
+
receive(:rd_kafka_header_get_all)
|
33
|
+
.with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
|
34
|
+
expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: "version"))
|
35
|
+
expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][1].size)
|
36
|
+
expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers["version"][1]))
|
37
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
38
|
+
end
|
39
|
+
|
40
|
+
# Single type header
|
41
|
+
expect(Rdkafka::Bindings).to \
|
42
|
+
receive(:rd_kafka_header_get_all)
|
43
|
+
.with(headers_ptr, 2, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
|
44
|
+
expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 2", read_string_to_null: "type"))
|
45
|
+
expect(size_ptr).to receive(:[]).with(:value).and_return(headers["type"].size)
|
46
|
+
expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 2", read_string: headers["type"]))
|
47
|
+
Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
|
48
|
+
end
|
49
|
+
|
50
|
+
expect(Rdkafka::Bindings).to \
|
51
|
+
receive(:rd_kafka_header_get_all)
|
52
|
+
.with(headers_ptr, 3, anything, anything, anything)
|
53
|
+
.and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
|
54
|
+
end
|
55
|
+
|
56
|
+
subject { described_class.from_native(native_message) }
|
57
|
+
|
58
|
+
it { is_expected.to eq(headers) }
|
59
|
+
it { is_expected.to be_frozen }
|
60
|
+
|
61
|
+
it 'returns array for duplicate headers' do
|
62
|
+
expect(subject['version']).to eq(["2.1.3", "2.1.4"])
|
63
|
+
end
|
64
|
+
|
65
|
+
it 'returns string for single headers' do
|
66
|
+
expect(subject['type']).to eq("String")
|
67
|
+
end
|
68
|
+
|
69
|
+
it 'does not support symbols mappings' do
|
70
|
+
expect(subject.key?(:version)).to eq(false)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|