karafka-rdkafka 0.15.0 → 0.15.1.rc1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/CHANGELOG.md +5 -6
- data/lib/rdkafka/admin.rb +44 -0
- data/lib/rdkafka/bindings.rb +9 -0
- data/lib/rdkafka/producer.rb +94 -4
- data/lib/rdkafka/version.rb +1 -1
- data/spec/rdkafka/admin_spec.rb +8 -0
- data/spec/rdkafka/producer_spec.rb +42 -0
- data.tar.gz.sig +0 -0
- metadata +2 -2
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6c9fbe378b3d1bdf4f72d9bdb1339757bb52145c5865aa54326c0b93b113d3cd
|
4
|
+
data.tar.gz: f5db0b05352eac209bcf6c4bac756200bdaf822866c96bc1c0a7bd66eebb326e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 98cd5043f6a9573aeecb92123ed9c4b522a946aeea6b3d7fe71974092f1f036ffe983af27d1b67234436071e88698f45f0d15b6204189d15dfe1edb6dae34094
|
7
|
+
data.tar.gz: 60306bfb7d9d367ea0bb0804d801e0b34b1e8b887033355d22ecb56170319a418aed4d258ab53b406f28eb26b8f0905f1458bb99ee35b7e48ac32e99f08b95ad
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/CHANGELOG.md
CHANGED
@@ -1,15 +1,14 @@
|
|
1
1
|
# Rdkafka Changelog
|
2
2
|
|
3
|
+
## 0.15.1 (Unreleased)
|
4
|
+
- **[Feature]** Provide ability to use topic config on a producer for custom behaviors per dispatch.
|
5
|
+
- [Enhancement] Use topic config reference cache for messages production to prevent topic objects allocation with each message.
|
6
|
+
- [Enhancement] Provide `Rrdkafka::Admin#describe_errors` to get errors descriptions (mensfeld)
|
7
|
+
|
3
8
|
## 0.15.0 (2024-04-26)
|
4
9
|
- **[Feature]** Oauthbearer token refresh callback (bruce-szalwinski-he)
|
5
10
|
- **[Feature]** Support incremental config describe + alter API (mensfeld)
|
6
11
|
- [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
|
7
|
-
- [Enhancement] Replace time poll based wait engine with an event based to improve response times on blocking operations and wait (nijikon + mensfeld)
|
8
|
-
- [Enhancement] Allow for usage of the second regex engine of librdkafka by setting `RDKAFKA_DISABLE_REGEX_EXT` during build (mensfeld)
|
9
|
-
- [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
|
10
|
-
- [Change] Allow for native kafka thread operations deferring and manual start for consumer, producer and admin.
|
11
|
-
- [Change] The `wait_timeout` argument in `AbstractHandle.wait` method is deprecated and will be removed in future versions without replacement. We don't rely on it's value anymore (nijikon)
|
12
|
-
- [Fix] Fix bogus case/when syntax. Levels 1, 2, and 6 previously defaulted to UNKNOWN (jjowdy)
|
13
12
|
|
14
13
|
## 0.14.10 (2024-02-08)
|
15
14
|
- [Fix] Background logger stops working after forking causing memory leaks (mensfeld).
|
data/lib/rdkafka/admin.rb
CHANGED
@@ -4,6 +4,50 @@ module Rdkafka
|
|
4
4
|
class Admin
|
5
5
|
include Helpers::OAuth
|
6
6
|
|
7
|
+
class << self
|
8
|
+
# Allows us to retrieve librdkafka errors with descriptions
|
9
|
+
# Useful for debugging and building UIs, etc.
|
10
|
+
#
|
11
|
+
# @return [Hash<Integer, Hash>] hash with errors mapped by code
|
12
|
+
def describe_errors
|
13
|
+
# Memory pointers for the array of structures and count
|
14
|
+
p_error_descs = FFI::MemoryPointer.new(:pointer)
|
15
|
+
p_count = FFI::MemoryPointer.new(:size_t)
|
16
|
+
|
17
|
+
# Call the attached function
|
18
|
+
Bindings.rd_kafka_get_err_descs(p_error_descs, p_count)
|
19
|
+
|
20
|
+
# Retrieve the number of items in the array
|
21
|
+
count = p_count.read_uint
|
22
|
+
|
23
|
+
# Get the pointer to the array of error descriptions
|
24
|
+
array_of_errors = FFI::Pointer.new(Bindings::NativeErrorDesc, p_error_descs.read_pointer)
|
25
|
+
|
26
|
+
errors = {}
|
27
|
+
|
28
|
+
count.times do |i|
|
29
|
+
# Get the pointer to each struct
|
30
|
+
error_ptr = array_of_errors[i]
|
31
|
+
|
32
|
+
# Create a new instance of NativeErrorDesc for each item
|
33
|
+
error_desc = Bindings::NativeErrorDesc.new(error_ptr)
|
34
|
+
|
35
|
+
# Read values from the struct
|
36
|
+
code = error_desc[:code]
|
37
|
+
|
38
|
+
name = ''
|
39
|
+
desc = ''
|
40
|
+
|
41
|
+
name = error_desc[:name].read_string unless error_desc[:name].null?
|
42
|
+
desc = error_desc[:desc].read_string unless error_desc[:desc].null?
|
43
|
+
|
44
|
+
errors[code] = { code: code, name: name, description: desc }
|
45
|
+
end
|
46
|
+
|
47
|
+
errors
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
7
51
|
# @private
|
8
52
|
def initialize(native_kafka)
|
9
53
|
@native_kafka = native_kafka
|
data/lib/rdkafka/bindings.rb
CHANGED
@@ -141,6 +141,11 @@ module Rdkafka
|
|
141
141
|
RD_KAFKA_ALTER_CONFIG_OP_TYPE_SUBTRACT = 3
|
142
142
|
|
143
143
|
# Errors
|
144
|
+
class NativeErrorDesc < FFI::Struct
|
145
|
+
layout :code, :int,
|
146
|
+
:name, :pointer,
|
147
|
+
:desc, :pointer
|
148
|
+
end
|
144
149
|
|
145
150
|
attach_function :rd_kafka_err2name, [:int], :string
|
146
151
|
attach_function :rd_kafka_err2str, [:int], :string
|
@@ -149,6 +154,7 @@ module Rdkafka
|
|
149
154
|
attach_function :rd_kafka_error_txn_requires_abort, [:pointer], :int
|
150
155
|
attach_function :rd_kafka_error_destroy, [:pointer], :void
|
151
156
|
attach_function :rd_kafka_error_code, [:pointer], :int
|
157
|
+
attach_function :rd_kafka_get_err_descs, [:pointer, :pointer], :void
|
152
158
|
|
153
159
|
# Configuration
|
154
160
|
|
@@ -175,6 +181,9 @@ module Rdkafka
|
|
175
181
|
# Log queue
|
176
182
|
attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
|
177
183
|
attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
|
184
|
+
# Per topic configs
|
185
|
+
attach_function :rd_kafka_topic_conf_new, [], :pointer
|
186
|
+
attach_function :rd_kafka_topic_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
|
178
187
|
|
179
188
|
LogCallback = FFI::Function.new(
|
180
189
|
:void, [:pointer, :int, :string, :string]
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -9,7 +9,15 @@ module Rdkafka
|
|
9
9
|
# Cache partitions count for 30 seconds
|
10
10
|
PARTITIONS_COUNT_TTL = 30
|
11
11
|
|
12
|
-
|
12
|
+
# Empty hash used as a default
|
13
|
+
EMPTY_HASH = {}.freeze
|
14
|
+
|
15
|
+
private_constant :PARTITIONS_COUNT_TTL, :EMPTY_HASH
|
16
|
+
|
17
|
+
# Raised when there was a critical issue when invoking rd_kafka_topic_new
|
18
|
+
# This is a temporary solution until https://github.com/karafka/rdkafka-ruby/issues/451 is
|
19
|
+
# resolved and this is normalized in all the places
|
20
|
+
class TopicHandleCreationError < RuntimeError; end
|
13
21
|
|
14
22
|
# @private
|
15
23
|
# Returns the current delivery callback, by default this is nil.
|
@@ -28,6 +36,8 @@ module Rdkafka
|
|
28
36
|
# @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
|
29
37
|
# the "consistent_random" default
|
30
38
|
def initialize(native_kafka, partitioner_name)
|
39
|
+
@topics_refs_map = {}
|
40
|
+
@topics_configs = {}
|
31
41
|
@native_kafka = native_kafka
|
32
42
|
@partitioner_name = partitioner_name || "consistent_random"
|
33
43
|
|
@@ -54,6 +64,52 @@ module Rdkafka
|
|
54
64
|
end
|
55
65
|
end
|
56
66
|
|
67
|
+
# Sets alternative set of configuration details that can be set per topic
|
68
|
+
# @note It is not allowed to re-set the same topic config twice because of the underlying
|
69
|
+
# librdkafka caching
|
70
|
+
# @param topic [String] The topic name
|
71
|
+
# @param config [Hash] config we want to use per topic basis
|
72
|
+
# @param config_hash [Integer] hash of the config. We expect it here instead of computing it,
|
73
|
+
# because it is already computed during the retrieval attempt in the `#produce` flow.
|
74
|
+
def set_topic_config(topic, config, config_hash)
|
75
|
+
# Ensure lock on topic reference just in case
|
76
|
+
@native_kafka.with_inner do |inner|
|
77
|
+
@topics_refs_map[topic] ||= {}
|
78
|
+
@topics_configs[topic] ||= {}
|
79
|
+
|
80
|
+
return if @topics_configs[topic].key?(config_hash)
|
81
|
+
|
82
|
+
# If config is empty, we create an empty reference that will be used with defaults
|
83
|
+
rd_topic_config = if config.empty?
|
84
|
+
nil
|
85
|
+
else
|
86
|
+
Rdkafka::Bindings.rd_kafka_topic_conf_new.tap do |topic_config|
|
87
|
+
config.each do |key, value|
|
88
|
+
error_buffer = FFI::MemoryPointer.new(:char, 256)
|
89
|
+
result = Rdkafka::Bindings.rd_kafka_topic_conf_set(
|
90
|
+
topic_config,
|
91
|
+
key.to_s,
|
92
|
+
value.to_s,
|
93
|
+
error_buffer,
|
94
|
+
256
|
95
|
+
)
|
96
|
+
|
97
|
+
unless result == :config_ok
|
98
|
+
raise Config::ConfigError.new(error_buffer.read_string)
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
topic_handle = Bindings.rd_kafka_topic_new(inner, topic, rd_topic_config)
|
105
|
+
|
106
|
+
raise TopicHandleCreationError.new("Error creating topic handle for topic #{topic}") if topic_handle.null?
|
107
|
+
|
108
|
+
@topics_configs[topic][config_hash] = config
|
109
|
+
@topics_refs_map[topic][config_hash] = topic_handle
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
57
113
|
# Starts the native Kafka polling thread and kicks off the init polling
|
58
114
|
# @note Not needed to run unless explicit start was disabled
|
59
115
|
def start
|
@@ -151,7 +207,18 @@ module Rdkafka
|
|
151
207
|
def close
|
152
208
|
return if closed?
|
153
209
|
ObjectSpace.undefine_finalizer(self)
|
154
|
-
|
210
|
+
|
211
|
+
@native_kafka.close do
|
212
|
+
# We need to remove the topics references objects before we destroy the producer,
|
213
|
+
# otherwise they would leak out
|
214
|
+
@topics_refs_map.each_value do |refs|
|
215
|
+
refs.each_value do |ref|
|
216
|
+
Rdkafka::Bindings.rd_kafka_topic_destroy(ref)
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
@topics_refs_map.clear
|
155
222
|
end
|
156
223
|
|
157
224
|
# Whether this producer has closed
|
@@ -244,11 +311,22 @@ module Rdkafka
|
|
244
311
|
# @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
|
245
312
|
# @param headers [Hash<String,String>] Optional message headers
|
246
313
|
# @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
|
314
|
+
# @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
|
247
315
|
#
|
248
316
|
# @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
|
249
317
|
#
|
250
318
|
# @raise [RdkafkaError] When adding the message to rdkafka's queue failed
|
251
|
-
def produce(
|
319
|
+
def produce(
|
320
|
+
topic:,
|
321
|
+
payload: nil,
|
322
|
+
key: nil,
|
323
|
+
partition: nil,
|
324
|
+
partition_key: nil,
|
325
|
+
timestamp: nil,
|
326
|
+
headers: nil,
|
327
|
+
label: nil,
|
328
|
+
topic_config: EMPTY_HASH
|
329
|
+
)
|
252
330
|
closed_producer_check(__method__)
|
253
331
|
|
254
332
|
# Start by checking and converting the input
|
@@ -267,8 +345,20 @@ module Rdkafka
|
|
267
345
|
key.bytesize
|
268
346
|
end
|
269
347
|
|
348
|
+
topic_config_hash = topic_config.hash
|
349
|
+
|
350
|
+
# Checks if we have the rdkafka topic reference object ready. It saves us on object
|
351
|
+
# allocation and allows to use custom config on demand.
|
352
|
+
set_topic_config(topic, topic_config, topic_config_hash) unless @topics_refs_map.dig(topic, topic_config_hash)
|
353
|
+
topic_ref = @topics_refs_map.dig(topic, topic_config_hash)
|
354
|
+
|
270
355
|
if partition_key
|
271
356
|
partition_count = partition_count(topic)
|
357
|
+
|
358
|
+
# Check if there are no overrides for the partitioner and use the default one only when
|
359
|
+
# no per-topic is present.
|
360
|
+
partitioner_name = @topics_configs.dig(topic, topic_config_hash, :partitioner) || @partitioner_name
|
361
|
+
|
272
362
|
# If the topic is not present, set to -1
|
273
363
|
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
|
274
364
|
end
|
@@ -298,7 +388,7 @@ module Rdkafka
|
|
298
388
|
DeliveryHandle.register(delivery_handle)
|
299
389
|
|
300
390
|
args = [
|
301
|
-
:int, Rdkafka::Bindings::
|
391
|
+
:int, Rdkafka::Bindings::RD_KAFKA_VTYPE_RKT, :pointer, topic_ref,
|
302
392
|
:int, Rdkafka::Bindings::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::Bindings::RD_KAFKA_MSG_F_COPY,
|
303
393
|
:int, Rdkafka::Bindings::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
|
304
394
|
:int, Rdkafka::Bindings::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
|
data/lib/rdkafka/version.rb
CHANGED
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -31,6 +31,14 @@ describe Rdkafka::Admin do
|
|
31
31
|
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
32
32
|
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
33
33
|
|
34
|
+
describe '#describe_errors' do
|
35
|
+
let(:errors) { admin.class.describe_errors }
|
36
|
+
|
37
|
+
it { expect(errors.size).to eq(162) }
|
38
|
+
it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
|
39
|
+
it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
|
40
|
+
end
|
41
|
+
|
34
42
|
describe 'admin without auto-start' do
|
35
43
|
let(:admin) { config.admin(native_kafka_auto_start: false) }
|
36
44
|
|
@@ -31,6 +31,48 @@ describe Rdkafka::Producer do
|
|
31
31
|
it { expect(producer.name).to include('rdkafka#producer-') }
|
32
32
|
end
|
33
33
|
|
34
|
+
describe '#produce with topic config alterations' do
|
35
|
+
context 'when config is not valid' do
|
36
|
+
it 'expect to raise error' do
|
37
|
+
expect do
|
38
|
+
producer.produce(topic: 'test', payload: '', topic_config: { 'invalid': 'invalid' })
|
39
|
+
end.to raise_error(Rdkafka::Config::ConfigError)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
context 'when config is valid' do
|
44
|
+
it 'expect to raise error' do
|
45
|
+
expect do
|
46
|
+
producer.produce(topic: 'test', payload: '', topic_config: { 'acks': 1 }).wait
|
47
|
+
end.not_to raise_error
|
48
|
+
end
|
49
|
+
|
50
|
+
context 'when alteration should change behavior' do
|
51
|
+
# This is set incorrectly for a reason
|
52
|
+
# If alteration would not work, this will hang the spec suite
|
53
|
+
let(:producer) do
|
54
|
+
rdkafka_producer_config(
|
55
|
+
'message.timeout.ms': 1_000_000,
|
56
|
+
:"bootstrap.servers" => "localhost:9094",
|
57
|
+
).producer
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'expect to give up on delivery fast based on alteration config' do
|
61
|
+
expect do
|
62
|
+
producer.produce(
|
63
|
+
topic: 'produce_config_test',
|
64
|
+
payload: 'test',
|
65
|
+
topic_config: {
|
66
|
+
'compression.type': 'gzip',
|
67
|
+
'message.timeout.ms': 1
|
68
|
+
}
|
69
|
+
).wait
|
70
|
+
end.to raise_error(Rdkafka::RdkafkaError, /msg_timed_out/)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
34
76
|
context "delivery callback" do
|
35
77
|
context "with a proc/lambda" do
|
36
78
|
it "should set the callback" do
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.15.
|
4
|
+
version: 0.15.1.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -36,7 +36,7 @@ cert_chain:
|
|
36
36
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
37
37
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2024-
|
39
|
+
date: 2024-05-08 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: ffi
|
metadata.gz.sig
CHANGED
Binary file
|