karafka-rdkafka 0.20.0.rc3-arm64-darwin
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +3 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
- data/.github/workflows/ci_macos_arm64.yml +301 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
- data/.github/workflows/push_macos_arm64.yml +50 -0
- data/.github/workflows/push_ruby.yml +37 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +323 -0
- data/Gemfile +5 -0
- data/MIT-LICENSE +22 -0
- data/README.md +177 -0
- data/Rakefile +96 -0
- data/docker-compose.yml +25 -0
- data/ext/README.md +19 -0
- data/ext/Rakefile +131 -0
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/ext/librdkafka.dylib +0 -0
- data/karafka-rdkafka.gemspec +63 -0
- data/lib/rdkafka/abstract_handle.rb +116 -0
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/create_topic_report.rb +24 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +832 -0
- data/lib/rdkafka/bindings.rb +582 -0
- data/lib/rdkafka/callbacks.rb +415 -0
- data/lib/rdkafka/config.rb +398 -0
- data/lib/rdkafka/consumer/headers.rb +79 -0
- data/lib/rdkafka/consumer/message.rb +86 -0
- data/lib/rdkafka/consumer/partition.rb +57 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
- data/lib/rdkafka/consumer.rb +663 -0
- data/lib/rdkafka/error.rb +201 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +115 -0
- data/lib/rdkafka/native_kafka.rb +139 -0
- data/lib/rdkafka/producer/delivery_handle.rb +48 -0
- data/lib/rdkafka/producer/delivery_report.rb +45 -0
- data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
- data/lib/rdkafka/producer.rb +492 -0
- data/lib/rdkafka/version.rb +7 -0
- data/lib/rdkafka.rb +54 -0
- data/renovate.json +92 -0
- data/spec/rdkafka/abstract_handle_spec.rb +117 -0
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +769 -0
- data/spec/rdkafka/bindings_spec.rb +222 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +258 -0
- data/spec/rdkafka/consumer/headers_spec.rb +73 -0
- data/spec/rdkafka/consumer/message_spec.rb +139 -0
- data/spec/rdkafka/consumer/partition_spec.rb +57 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
- data/spec/rdkafka/consumer_spec.rb +1299 -0
- data/spec/rdkafka/error_spec.rb +95 -0
- data/spec/rdkafka/metadata_spec.rb +79 -0
- data/spec/rdkafka/native_kafka_spec.rb +130 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
- data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
- data/spec/rdkafka/producer_spec.rb +1234 -0
- data/spec/spec_helper.rb +181 -0
- metadata +273 -0
@@ -0,0 +1,769 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "ostruct"
|
4
|
+
|
5
|
+
describe Rdkafka::Admin do
|
6
|
+
let(:config) { rdkafka_config }
|
7
|
+
let(:admin) { config.admin }
|
8
|
+
|
9
|
+
after do
|
10
|
+
# Registry should always end up being empty
|
11
|
+
expect(Rdkafka::Admin::CreateTopicHandle::REGISTRY).to be_empty
|
12
|
+
expect(Rdkafka::Admin::CreatePartitionsHandle::REGISTRY).to be_empty
|
13
|
+
expect(Rdkafka::Admin::DescribeAclHandle::REGISTRY).to be_empty
|
14
|
+
expect(Rdkafka::Admin::CreateAclHandle::REGISTRY).to be_empty
|
15
|
+
expect(Rdkafka::Admin::DeleteAclHandle::REGISTRY).to be_empty
|
16
|
+
admin.close
|
17
|
+
end
|
18
|
+
|
19
|
+
let(:topic_name) { "test-topic-#{SecureRandom.uuid}" }
|
20
|
+
let(:topic_partition_count) { 3 }
|
21
|
+
let(:topic_replication_factor) { 1 }
|
22
|
+
let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
|
23
|
+
let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
|
24
|
+
let(:group_name) { "test-group-#{SecureRandom.uuid}" }
|
25
|
+
|
26
|
+
let(:resource_name) {"acl-test-topic"}
|
27
|
+
let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
|
28
|
+
let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
|
29
|
+
let(:principal) {"User:anonymous"}
|
30
|
+
let(:host) {"*"}
|
31
|
+
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
32
|
+
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
33
|
+
|
34
|
+
describe '#describe_errors' do
|
35
|
+
let(:errors) { admin.class.describe_errors }
|
36
|
+
|
37
|
+
it { expect(errors.size).to eq(170) }
|
38
|
+
it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
|
39
|
+
it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
|
40
|
+
end
|
41
|
+
|
42
|
+
describe 'admin without auto-start' do
|
43
|
+
let(:admin) { config.admin(native_kafka_auto_start: false) }
|
44
|
+
|
45
|
+
it 'expect to be able to start it later and close' do
|
46
|
+
admin.start
|
47
|
+
admin.close
|
48
|
+
end
|
49
|
+
|
50
|
+
it 'expect to be able to close it without starting' do
|
51
|
+
admin.close
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
describe "#create_topic" do
|
56
|
+
describe "called with invalid input" do
|
57
|
+
describe "with an invalid topic name" do
|
58
|
+
# https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
|
59
|
+
# public static final String LEGAL_CHARS = "[a-zA-Z0-9._-]";
|
60
|
+
let(:topic_name) { "[!@#]" }
|
61
|
+
|
62
|
+
it "raises an exception" do
|
63
|
+
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
64
|
+
expect {
|
65
|
+
create_topic_handle.wait(max_wait_timeout: 15.0)
|
66
|
+
}.to raise_exception { |ex|
|
67
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
68
|
+
expect(ex.message).to match(/Broker: Invalid topic \(topic_exception\)/)
|
69
|
+
expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or more characters other than ASCII alphanumerics, '.', '_' and '-'/)
|
70
|
+
}
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
describe "with the name of a topic that already exists" do
|
75
|
+
let(:topic_name) { "empty_test_topic" } # created in spec_helper.rb
|
76
|
+
|
77
|
+
it "raises an exception" do
|
78
|
+
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
79
|
+
expect {
|
80
|
+
create_topic_handle.wait(max_wait_timeout: 15.0)
|
81
|
+
}.to raise_exception { |ex|
|
82
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
83
|
+
expect(ex.message).to match(/Broker: Topic already exists \(topic_already_exists\)/)
|
84
|
+
expect(ex.broker_message).to match(/Topic 'empty_test_topic' already exists/)
|
85
|
+
}
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
describe "with an invalid partition count" do
|
90
|
+
let(:topic_partition_count) { -999 }
|
91
|
+
|
92
|
+
it "raises an exception" do
|
93
|
+
expect {
|
94
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
95
|
+
}.to raise_error Rdkafka::Config::ConfigError, /num_partitions out of expected range/
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
describe "with an invalid replication factor" do
|
100
|
+
let(:topic_replication_factor) { -2 }
|
101
|
+
|
102
|
+
it "raises an exception" do
|
103
|
+
expect {
|
104
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
105
|
+
}.to raise_error Rdkafka::Config::ConfigError, /replication_factor out of expected range/
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
describe "with an invalid topic configuration" do
|
110
|
+
it "doesn't create the topic" do
|
111
|
+
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor, invalid_topic_config)
|
112
|
+
expect {
|
113
|
+
create_topic_handle.wait(max_wait_timeout: 15.0)
|
114
|
+
}.to raise_error Rdkafka::RdkafkaError, /Broker: Configuration is invalid \(invalid_config\)/
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
context "edge case" do
|
120
|
+
context "where we are unable to get the background queue" do
|
121
|
+
before do
|
122
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_queue_get_background).and_return(FFI::Pointer::NULL)
|
123
|
+
end
|
124
|
+
|
125
|
+
it "raises an exception" do
|
126
|
+
expect {
|
127
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
128
|
+
}.to raise_error Rdkafka::Config::ConfigError, /rd_kafka_queue_get_background was NULL/
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
context "where rd_kafka_CreateTopics raises an exception" do
|
133
|
+
before do
|
134
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_CreateTopics).and_raise(RuntimeError.new("oops"))
|
135
|
+
end
|
136
|
+
|
137
|
+
it "raises an exception" do
|
138
|
+
expect {
|
139
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
140
|
+
}.to raise_error RuntimeError, /oops/
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
it "creates a topic" do
|
146
|
+
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor, topic_config)
|
147
|
+
create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
|
148
|
+
expect(create_topic_report.error_string).to be_nil
|
149
|
+
expect(create_topic_report.result_name).to eq(topic_name)
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
describe "describe_configs" do
|
154
|
+
subject(:resources_results) { admin.describe_configs(resources).wait.resources }
|
155
|
+
|
156
|
+
before do
|
157
|
+
admin.create_topic(topic_name, 2, 1).wait
|
158
|
+
sleep(1)
|
159
|
+
end
|
160
|
+
|
161
|
+
context 'when describing config of an existing topic' do
|
162
|
+
let(:resources) { [{ resource_type: 2, resource_name: topic_name }] }
|
163
|
+
|
164
|
+
it do
|
165
|
+
expect(resources_results.size).to eq(1)
|
166
|
+
expect(resources_results.first.type).to eq(2)
|
167
|
+
expect(resources_results.first.name).to eq(topic_name)
|
168
|
+
expect(resources_results.first.configs.size).to be > 25
|
169
|
+
expect(resources_results.first.configs.first.name).to eq('compression.type')
|
170
|
+
expect(resources_results.first.configs.first.value).to eq('producer')
|
171
|
+
expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
context 'when describing config of a non-existing topic' do
|
176
|
+
let(:resources) { [{ resource_type: 2, resource_name: SecureRandom.uuid }] }
|
177
|
+
|
178
|
+
it 'expect to raise error' do
|
179
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
context 'when describing both existing and non-existing topics' do
|
184
|
+
let(:resources) do
|
185
|
+
[
|
186
|
+
{ resource_type: 2, resource_name: topic_name },
|
187
|
+
{ resource_type: 2, resource_name: SecureRandom.uuid }
|
188
|
+
]
|
189
|
+
end
|
190
|
+
|
191
|
+
it 'expect to raise error' do
|
192
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
context 'when describing multiple existing topics' do
|
197
|
+
let(:resources) do
|
198
|
+
[
|
199
|
+
{ resource_type: 2, resource_name: 'example_topic' },
|
200
|
+
{ resource_type: 2, resource_name: topic_name }
|
201
|
+
]
|
202
|
+
end
|
203
|
+
|
204
|
+
it do
|
205
|
+
expect(resources_results.size).to eq(2)
|
206
|
+
expect(resources_results.first.type).to eq(2)
|
207
|
+
expect(resources_results.first.name).to eq('example_topic')
|
208
|
+
expect(resources_results.last.type).to eq(2)
|
209
|
+
expect(resources_results.last.name).to eq(topic_name)
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
context 'when trying to describe invalid resource type' do
|
214
|
+
let(:resources) { [{ resource_type: 0, resource_name: SecureRandom.uuid }] }
|
215
|
+
|
216
|
+
it 'expect to raise error' do
|
217
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_request/)
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
context 'when trying to describe invalid broker' do
|
222
|
+
let(:resources) { [{ resource_type: 4, resource_name: 'non-existing' }] }
|
223
|
+
|
224
|
+
it 'expect to raise error' do
|
225
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
context 'when trying to describe valid broker' do
|
230
|
+
let(:resources) { [{ resource_type: 4, resource_name: '1' }] }
|
231
|
+
|
232
|
+
it do
|
233
|
+
expect(resources_results.size).to eq(1)
|
234
|
+
expect(resources_results.first.type).to eq(4)
|
235
|
+
expect(resources_results.first.name).to eq('1')
|
236
|
+
expect(resources_results.first.configs.size).to be > 230
|
237
|
+
expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
|
238
|
+
expect(resources_results.first.configs.first.value).to eq('0')
|
239
|
+
expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
context 'when describing valid broker with topics in one request' do
|
244
|
+
let(:resources) do
|
245
|
+
[
|
246
|
+
{ resource_type: 4, resource_name: '1' },
|
247
|
+
{ resource_type: 2, resource_name: topic_name }
|
248
|
+
]
|
249
|
+
end
|
250
|
+
|
251
|
+
it do
|
252
|
+
expect(resources_results.size).to eq(2)
|
253
|
+
expect(resources_results.first.type).to eq(4)
|
254
|
+
expect(resources_results.first.name).to eq('1')
|
255
|
+
expect(resources_results.first.configs.size).to be > 230
|
256
|
+
expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
|
257
|
+
expect(resources_results.first.configs.first.value).to eq('0')
|
258
|
+
expect(resources_results.last.type).to eq(2)
|
259
|
+
expect(resources_results.last.name).to eq(topic_name)
|
260
|
+
expect(resources_results.last.configs.size).to be > 25
|
261
|
+
expect(resources_results.last.configs.first.name).to eq('compression.type')
|
262
|
+
expect(resources_results.last.configs.first.value).to eq('producer')
|
263
|
+
end
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
describe "incremental_alter_configs" do
|
268
|
+
subject(:resources_results) { admin.incremental_alter_configs(resources_with_configs).wait.resources }
|
269
|
+
|
270
|
+
before do
|
271
|
+
admin.create_topic(topic_name, 2, 1).wait
|
272
|
+
sleep(1)
|
273
|
+
end
|
274
|
+
|
275
|
+
context 'when altering one topic with one valid config via set' do
|
276
|
+
let(:target_retention) { (86400002 + rand(10_000)).to_s }
|
277
|
+
let(:resources_with_configs) do
|
278
|
+
[
|
279
|
+
{
|
280
|
+
resource_type: 2,
|
281
|
+
resource_name: topic_name,
|
282
|
+
configs: [
|
283
|
+
{
|
284
|
+
name: 'delete.retention.ms',
|
285
|
+
value: target_retention,
|
286
|
+
op_type: 0
|
287
|
+
}
|
288
|
+
]
|
289
|
+
}
|
290
|
+
]
|
291
|
+
end
|
292
|
+
|
293
|
+
it do
|
294
|
+
expect(resources_results.size).to eq(1)
|
295
|
+
expect(resources_results.first.type).to eq(2)
|
296
|
+
expect(resources_results.first.name).to eq(topic_name)
|
297
|
+
|
298
|
+
sleep(1)
|
299
|
+
|
300
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
301
|
+
config.name == 'delete.retention.ms'
|
302
|
+
end
|
303
|
+
|
304
|
+
expect(ret_config.value).to eq(target_retention)
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
context 'when altering one topic with one valid config via delete' do
|
309
|
+
let(:target_retention) { (8640002 + rand(10_000)).to_s }
|
310
|
+
let(:resources_with_configs) do
|
311
|
+
[
|
312
|
+
{
|
313
|
+
resource_type: 2,
|
314
|
+
resource_name: topic_name,
|
315
|
+
configs: [
|
316
|
+
{
|
317
|
+
name: 'delete.retention.ms',
|
318
|
+
value: target_retention,
|
319
|
+
op_type: 1
|
320
|
+
}
|
321
|
+
]
|
322
|
+
}
|
323
|
+
]
|
324
|
+
end
|
325
|
+
|
326
|
+
it do
|
327
|
+
expect(resources_results.size).to eq(1)
|
328
|
+
expect(resources_results.first.type).to eq(2)
|
329
|
+
expect(resources_results.first.name).to eq(topic_name)
|
330
|
+
|
331
|
+
sleep(1)
|
332
|
+
|
333
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
334
|
+
config.name == 'delete.retention.ms'
|
335
|
+
end
|
336
|
+
|
337
|
+
expect(ret_config.value).to eq('86400000')
|
338
|
+
end
|
339
|
+
end
|
340
|
+
|
341
|
+
context 'when altering one topic with one valid config via append' do
|
342
|
+
let(:target_policy) { 'compact' }
|
343
|
+
let(:resources_with_configs) do
|
344
|
+
[
|
345
|
+
{
|
346
|
+
resource_type: 2,
|
347
|
+
resource_name: topic_name,
|
348
|
+
configs: [
|
349
|
+
{
|
350
|
+
name: 'cleanup.policy',
|
351
|
+
value: target_policy,
|
352
|
+
op_type: 2
|
353
|
+
}
|
354
|
+
]
|
355
|
+
}
|
356
|
+
]
|
357
|
+
end
|
358
|
+
|
359
|
+
it do
|
360
|
+
expect(resources_results.size).to eq(1)
|
361
|
+
expect(resources_results.first.type).to eq(2)
|
362
|
+
expect(resources_results.first.name).to eq(topic_name)
|
363
|
+
|
364
|
+
sleep(1)
|
365
|
+
|
366
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
367
|
+
config.name == 'cleanup.policy'
|
368
|
+
end
|
369
|
+
|
370
|
+
expect(ret_config.value).to eq("delete,#{target_policy}")
|
371
|
+
end
|
372
|
+
end
|
373
|
+
|
374
|
+
context 'when altering one topic with one valid config via subtrack' do
|
375
|
+
let(:target_policy) { 'delete' }
|
376
|
+
let(:resources_with_configs) do
|
377
|
+
[
|
378
|
+
{
|
379
|
+
resource_type: 2,
|
380
|
+
resource_name: topic_name,
|
381
|
+
configs: [
|
382
|
+
{
|
383
|
+
name: 'cleanup.policy',
|
384
|
+
value: target_policy,
|
385
|
+
op_type: 3
|
386
|
+
}
|
387
|
+
]
|
388
|
+
}
|
389
|
+
]
|
390
|
+
end
|
391
|
+
|
392
|
+
it do
|
393
|
+
expect(resources_results.size).to eq(1)
|
394
|
+
expect(resources_results.first.type).to eq(2)
|
395
|
+
expect(resources_results.first.name).to eq(topic_name)
|
396
|
+
|
397
|
+
sleep(1)
|
398
|
+
|
399
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
400
|
+
config.name == 'cleanup.policy'
|
401
|
+
end
|
402
|
+
|
403
|
+
expect(ret_config.value).to eq('')
|
404
|
+
end
|
405
|
+
end
|
406
|
+
|
407
|
+
context 'when altering one topic with invalid config' do
|
408
|
+
let(:target_retention) { '-10' }
|
409
|
+
let(:resources_with_configs) do
|
410
|
+
[
|
411
|
+
{
|
412
|
+
resource_type: 2,
|
413
|
+
resource_name: topic_name,
|
414
|
+
configs: [
|
415
|
+
{
|
416
|
+
name: 'delete.retention.ms',
|
417
|
+
value: target_retention,
|
418
|
+
op_type: 0
|
419
|
+
}
|
420
|
+
]
|
421
|
+
}
|
422
|
+
]
|
423
|
+
end
|
424
|
+
|
425
|
+
it 'expect to raise error' do
|
426
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_config/)
|
427
|
+
end
|
428
|
+
end
|
429
|
+
end
|
430
|
+
|
431
|
+
describe "#delete_topic" do
|
432
|
+
describe "called with invalid input" do
|
433
|
+
# https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
|
434
|
+
# public static final String LEGAL_CHARS = "[a-zA-Z0-9._-]";
|
435
|
+
describe "with an invalid topic name" do
|
436
|
+
let(:topic_name) { "[!@#]" }
|
437
|
+
|
438
|
+
it "raises an exception" do
|
439
|
+
delete_topic_handle = admin.delete_topic(topic_name)
|
440
|
+
expect {
|
441
|
+
delete_topic_handle.wait(max_wait_timeout: 15.0)
|
442
|
+
}.to raise_exception { |ex|
|
443
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
444
|
+
expect(ex.message).to match(/Broker: Unknown topic or partition \(unknown_topic_or_part\)/)
|
445
|
+
expect(ex.broker_message).to match(/Broker: Unknown topic or partition/)
|
446
|
+
}
|
447
|
+
end
|
448
|
+
end
|
449
|
+
|
450
|
+
describe "with the name of a topic that does not exist" do
|
451
|
+
it "raises an exception" do
|
452
|
+
delete_topic_handle = admin.delete_topic(topic_name)
|
453
|
+
expect {
|
454
|
+
delete_topic_handle.wait(max_wait_timeout: 15.0)
|
455
|
+
}.to raise_exception { |ex|
|
456
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
457
|
+
expect(ex.message).to match(/Broker: Unknown topic or partition \(unknown_topic_or_part\)/)
|
458
|
+
expect(ex.broker_message).to match(/Broker: Unknown topic or partition/)
|
459
|
+
}
|
460
|
+
end
|
461
|
+
end
|
462
|
+
end
|
463
|
+
|
464
|
+
context "edge case" do
|
465
|
+
context "where we are unable to get the background queue" do
|
466
|
+
before do
|
467
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_queue_get_background).and_return(FFI::Pointer::NULL)
|
468
|
+
end
|
469
|
+
|
470
|
+
it "raises an exception" do
|
471
|
+
expect {
|
472
|
+
admin.delete_topic(topic_name)
|
473
|
+
}.to raise_error Rdkafka::Config::ConfigError, /rd_kafka_queue_get_background was NULL/
|
474
|
+
end
|
475
|
+
end
|
476
|
+
|
477
|
+
context "where rd_kafka_DeleteTopics raises an exception" do
|
478
|
+
before do
|
479
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_DeleteTopics).and_raise(RuntimeError.new("oops"))
|
480
|
+
end
|
481
|
+
|
482
|
+
it "raises an exception" do
|
483
|
+
expect {
|
484
|
+
admin.delete_topic(topic_name)
|
485
|
+
}.to raise_error RuntimeError, /oops/
|
486
|
+
end
|
487
|
+
end
|
488
|
+
end
|
489
|
+
|
490
|
+
it "deletes a topic that was newly created" do
|
491
|
+
create_topic_handle = admin.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
492
|
+
create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
|
493
|
+
expect(create_topic_report.error_string).to be_nil
|
494
|
+
expect(create_topic_report.result_name).to eq(topic_name)
|
495
|
+
|
496
|
+
# Retry topic deletion a few times. On CI Kafka seems to not
|
497
|
+
# always be ready for it immediately
|
498
|
+
delete_topic_report = nil
|
499
|
+
10.times do |i|
|
500
|
+
begin
|
501
|
+
delete_topic_handle = admin.delete_topic(topic_name)
|
502
|
+
delete_topic_report = delete_topic_handle.wait(max_wait_timeout: 15.0)
|
503
|
+
break
|
504
|
+
rescue Rdkafka::RdkafkaError => ex
|
505
|
+
if i > 3
|
506
|
+
raise ex
|
507
|
+
end
|
508
|
+
end
|
509
|
+
end
|
510
|
+
|
511
|
+
expect(delete_topic_report.error_string).to be_nil
|
512
|
+
expect(delete_topic_report.result_name).to eq(topic_name)
|
513
|
+
end
|
514
|
+
end
|
515
|
+
|
516
|
+
describe "#ACL tests" do
|
517
|
+
let(:non_existing_resource_name) {"non-existing-topic"}
|
518
|
+
before do
|
519
|
+
#create topic for testing acl
|
520
|
+
create_topic_handle = admin.create_topic(resource_name, topic_partition_count, topic_replication_factor)
|
521
|
+
create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
|
522
|
+
end
|
523
|
+
|
524
|
+
after do
|
525
|
+
#delete acl
|
526
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
527
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
528
|
+
|
529
|
+
#delete topic that was created for testing acl
|
530
|
+
delete_topic_handle = admin.delete_topic(resource_name)
|
531
|
+
delete_topic_report = delete_topic_handle.wait(max_wait_timeout: 15.0)
|
532
|
+
end
|
533
|
+
|
534
|
+
describe "#create_acl" do
|
535
|
+
it "create acl for a topic that does not exist" do
|
536
|
+
# acl creation for resources that does not exist will still get created successfully.
|
537
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
538
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
539
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
540
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
541
|
+
|
542
|
+
# delete the acl that was created for a non existing topic"
|
543
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
544
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
545
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
546
|
+
expect(delete_acl_report.deleted_acls.size).to eq(1)
|
547
|
+
end
|
548
|
+
|
549
|
+
it "creates a acl for topic that was newly created" do
|
550
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
551
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
552
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
553
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
554
|
+
end
|
555
|
+
end
|
556
|
+
|
557
|
+
describe "#describe_acl" do
|
558
|
+
it "describe acl of a topic that does not exist" do
|
559
|
+
describe_acl_handle = admin.describe_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
560
|
+
describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
|
561
|
+
expect(describe_acl_handle[:response]).to eq(0)
|
562
|
+
expect(describe_acl_report.acls.size).to eq(0)
|
563
|
+
end
|
564
|
+
|
565
|
+
it "create acls and describe the newly created acls" do
|
566
|
+
#create_acl
|
567
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_1", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
568
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
569
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
570
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
571
|
+
|
572
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_2", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
573
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
574
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
575
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
576
|
+
|
577
|
+
# Since we create and immediately check, this is slow on loaded CIs, hence we wait
|
578
|
+
sleep(2)
|
579
|
+
|
580
|
+
#describe_acl
|
581
|
+
describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
|
582
|
+
describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
|
583
|
+
expect(describe_acl_handle[:response]).to eq(0)
|
584
|
+
expect(describe_acl_report.acls.length).to eq(2)
|
585
|
+
end
|
586
|
+
end
|
587
|
+
|
588
|
+
describe "#delete_acl" do
|
589
|
+
it "delete acl of a topic that does not exist" do
|
590
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
591
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
592
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
593
|
+
expect(delete_acl_report.deleted_acls.size).to eq(0)
|
594
|
+
end
|
595
|
+
|
596
|
+
it "create an acl and delete the newly created acl" do
|
597
|
+
#create_acl
|
598
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_1", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
599
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
600
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
601
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
602
|
+
|
603
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_2", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
604
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
605
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
606
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
607
|
+
|
608
|
+
#delete_acl - resource_name nil - to delete all acls with any resource name and matching all other filters.
|
609
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: nil, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
610
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
611
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
612
|
+
expect(delete_acl_report.deleted_acls.length).to eq(2)
|
613
|
+
|
614
|
+
end
|
615
|
+
end
|
616
|
+
end
|
617
|
+
|
618
|
+
describe('Group tests') do
|
619
|
+
describe "#delete_group" do
|
620
|
+
describe("with an existing group") do
|
621
|
+
let(:consumer_config) { rdkafka_consumer_config('group.id': group_name) }
|
622
|
+
let(:producer_config) { rdkafka_producer_config }
|
623
|
+
let(:producer) { producer_config.producer }
|
624
|
+
let(:consumer) { consumer_config.consumer }
|
625
|
+
|
626
|
+
before do
|
627
|
+
# Create a topic, post a message to it, consume it and commit offsets, this will create a group that we can then delete.
|
628
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor).wait(max_wait_timeout: 15.0)
|
629
|
+
|
630
|
+
producer.produce(topic: topic_name, payload: "test", key: "test").wait(max_wait_timeout: 15.0)
|
631
|
+
|
632
|
+
consumer.subscribe(topic_name)
|
633
|
+
wait_for_assignment(consumer)
|
634
|
+
message = nil
|
635
|
+
|
636
|
+
10.times do
|
637
|
+
message ||= consumer.poll(100)
|
638
|
+
end
|
639
|
+
|
640
|
+
expect(message).to_not be_nil
|
641
|
+
|
642
|
+
consumer.commit
|
643
|
+
consumer.close
|
644
|
+
end
|
645
|
+
|
646
|
+
after do
|
647
|
+
producer.close
|
648
|
+
consumer.close
|
649
|
+
end
|
650
|
+
|
651
|
+
it "deletes the group" do
|
652
|
+
delete_group_handle = admin.delete_group(group_name)
|
653
|
+
report = delete_group_handle.wait(max_wait_timeout: 15.0)
|
654
|
+
|
655
|
+
expect(report.result_name).to eql(group_name)
|
656
|
+
end
|
657
|
+
end
|
658
|
+
|
659
|
+
describe "called with invalid input" do
|
660
|
+
describe "with the name of a group that does not exist" do
|
661
|
+
it "raises an exception" do
|
662
|
+
delete_group_handle = admin.delete_group(group_name)
|
663
|
+
|
664
|
+
expect {
|
665
|
+
delete_group_handle.wait(max_wait_timeout: 15.0)
|
666
|
+
}.to raise_exception { |ex|
|
667
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
668
|
+
expect(ex.message).to match(/Broker: The group id does not exist \(group_id_not_found\)/)
|
669
|
+
}
|
670
|
+
end
|
671
|
+
end
|
672
|
+
end
|
673
|
+
|
674
|
+
end
|
675
|
+
end
|
676
|
+
|
677
|
+
describe '#create_partitions' do
|
678
|
+
let(:metadata) { admin.metadata(topic_name).topics.first }
|
679
|
+
|
680
|
+
context 'when topic does not exist' do
|
681
|
+
it 'expect to fail due to unknown partition' do
|
682
|
+
expect { admin.create_partitions(topic_name, 10).wait }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
683
|
+
end
|
684
|
+
end
|
685
|
+
|
686
|
+
context 'when topic already has the desired number of partitions' do
|
687
|
+
before { admin.create_topic(topic_name, 2, 1).wait }
|
688
|
+
|
689
|
+
it 'expect not to change number of partitions' do
|
690
|
+
expect { admin.create_partitions(topic_name, 2).wait }.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
|
691
|
+
expect(metadata[:partition_count]).to eq(2)
|
692
|
+
end
|
693
|
+
end
|
694
|
+
|
695
|
+
context 'when topic has more than the requested number of partitions' do
|
696
|
+
before { admin.create_topic(topic_name, 5, 1).wait }
|
697
|
+
|
698
|
+
it 'expect not to change number of partitions' do
|
699
|
+
expect { admin.create_partitions(topic_name, 2).wait }.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
|
700
|
+
expect(metadata[:partition_count]).to eq(5)
|
701
|
+
end
|
702
|
+
end
|
703
|
+
|
704
|
+
context 'when topic has less then desired number of partitions' do
|
705
|
+
before do
|
706
|
+
admin.create_topic(topic_name, 1, 1).wait
|
707
|
+
sleep(1)
|
708
|
+
end
|
709
|
+
|
710
|
+
it 'expect to change number of partitions' do
|
711
|
+
admin.create_partitions(topic_name, 10).wait
|
712
|
+
expect(metadata[:partition_count]).to eq(10)
|
713
|
+
end
|
714
|
+
end
|
715
|
+
end
|
716
|
+
|
717
|
+
describe '#oauthbearer_set_token' do
|
718
|
+
context 'when sasl not configured' do
|
719
|
+
it 'should return RD_KAFKA_RESP_ERR__STATE' do
|
720
|
+
response = admin.oauthbearer_set_token(
|
721
|
+
token: "foo",
|
722
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
723
|
+
principal_name: "kafka-cluster"
|
724
|
+
)
|
725
|
+
expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
|
726
|
+
end
|
727
|
+
end
|
728
|
+
|
729
|
+
context 'when sasl configured' do
|
730
|
+
before do
|
731
|
+
config_sasl = rdkafka_config(
|
732
|
+
"security.protocol": "sasl_ssl",
|
733
|
+
"sasl.mechanisms": 'OAUTHBEARER'
|
734
|
+
)
|
735
|
+
$admin_sasl = config_sasl.admin
|
736
|
+
end
|
737
|
+
|
738
|
+
after do
|
739
|
+
$admin_sasl.close
|
740
|
+
end
|
741
|
+
|
742
|
+
it 'should succeed' do
|
743
|
+
|
744
|
+
response = $admin_sasl.oauthbearer_set_token(
|
745
|
+
token: "foo",
|
746
|
+
lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
|
747
|
+
principal_name: "kafka-cluster"
|
748
|
+
)
|
749
|
+
expect(response).to eq(0)
|
750
|
+
end
|
751
|
+
end
|
752
|
+
end
|
753
|
+
|
754
|
+
unless RUBY_PLATFORM == 'java'
|
755
|
+
context "when operating from a fork" do
|
756
|
+
# @see https://github.com/ffi/ffi/issues/1114
|
757
|
+
it 'expect to be able to create topics and run other admin operations without hanging' do
|
758
|
+
# If the FFI issue is not mitigated, this will hang forever
|
759
|
+
pid = fork do
|
760
|
+
admin
|
761
|
+
.create_topic(topic_name, topic_partition_count, topic_replication_factor)
|
762
|
+
.wait
|
763
|
+
end
|
764
|
+
|
765
|
+
Process.wait(pid)
|
766
|
+
end
|
767
|
+
end
|
768
|
+
end
|
769
|
+
end
|