karafka-rdkafka 0.14.7 → 0.14.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +0 -1
- data/CHANGELOG.md +8 -0
- data/README.md +12 -0
- data/ext/Rakefile +43 -26
- data/lib/rdkafka/consumer.rb +0 -7
- data/lib/rdkafka/producer.rb +37 -9
- data/lib/rdkafka/version.rb +1 -1
- data/spec/rdkafka/consumer_spec.rb +6 -4
- data.tar.gz.sig +0 -0
- metadata +2 -2
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8a3805cc154b0e1ca0d71fe06a852013c3e8d8dfd8148d4de8902aed66a2774d
|
4
|
+
data.tar.gz: 908236ad1ab0be52549f01ddf90ed37355a3b6e9e3373b867eeb83fa05fdf39c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6460881076e6c45ae5feb38a3cfd4da3160f13a092f6a7acdbbe268e297b10e27ad792dab32a8c193824a5ef44fc9c2f40c5685ee5c489370056f1c408297f83
|
7
|
+
data.tar.gz: c2f42212e4df244d36f2312506dd69e52e8064c3485dc1fd0692d223ac9f2512a8b8102b7f473766386eb997dc7bb490fc4fc3c4a2815c528d6c1a8cddee7fcd
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,13 @@
|
|
1
1
|
# Rdkafka Changelog
|
2
2
|
|
3
|
+
## 0.14.9 (2024-01-29)
|
4
|
+
- [Fix] Partition cache caches invalid `nil` result for `PARTITIONS_COUNT_TTL`.
|
5
|
+
- [Enhancement] Report `-1` instead of `nil` in case `partition_count` failure.
|
6
|
+
|
7
|
+
## 0.14.8 (2024-01-24)
|
8
|
+
- [Enhancement] Provide support for Nix OS (alexandriainfantino)
|
9
|
+
- [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld)
|
10
|
+
|
3
11
|
## 0.14.7 (2023-12-29)
|
4
12
|
- [Fix] Recognize that Karafka uses a custom partition object (fixed in 2.3.0) and ensure it is recognized.
|
5
13
|
|
data/README.md
CHANGED
@@ -32,6 +32,7 @@ The most important pieces of a Kafka client are implemented, and we aim to provi
|
|
32
32
|
* [Message Publishing Libraries](#message-publishing-libraries)
|
33
33
|
- [Development](#development)
|
34
34
|
- [Example](#example)
|
35
|
+
- [Versions](#versions)
|
35
36
|
|
36
37
|
## Project Scope
|
37
38
|
|
@@ -147,3 +148,14 @@ To see everything working, run these in separate tabs:
|
|
147
148
|
bundle exec rake consume_messages
|
148
149
|
bundle exec rake produce_messages
|
149
150
|
```
|
151
|
+
|
152
|
+
## Versions
|
153
|
+
|
154
|
+
| rdkafka-ruby | librdkafka |
|
155
|
+
|-|-|
|
156
|
+
| 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
|
157
|
+
| 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
|
158
|
+
| 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
|
159
|
+
| 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) |
|
160
|
+
| 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) |
|
161
|
+
| 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) |
|
data/ext/Rakefile
CHANGED
@@ -1,40 +1,57 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require File.expand_path('../../lib/rdkafka/version', __FILE__)
|
4
|
-
require "mini_portile2"
|
5
4
|
require "fileutils"
|
6
5
|
require "open-uri"
|
7
6
|
|
8
7
|
task :default => :clean do
|
9
|
-
#
|
10
|
-
|
8
|
+
# For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
|
9
|
+
# managed by nix itself, so using the normal file paths doesn't work for nix users.
|
10
|
+
#
|
11
|
+
# Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
|
12
|
+
# accessible in the nix environment
|
13
|
+
if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
|
14
|
+
# Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
|
15
|
+
require "mini_portile2"
|
16
|
+
recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
|
11
17
|
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
+
# Use default homebrew openssl if we're on mac and the directory exists
|
19
|
+
# and each of flags is not empty
|
20
|
+
if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
|
21
|
+
ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
|
22
|
+
ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
|
23
|
+
end
|
18
24
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
25
|
+
recipe.files << {
|
26
|
+
:url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
|
27
|
+
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
28
|
+
}
|
29
|
+
recipe.configure_options = ["--host=#{recipe.host}"]
|
30
|
+
recipe.cook
|
31
|
+
# Move dynamic library we're interested in
|
32
|
+
if recipe.host.include?('darwin')
|
33
|
+
from_extension = '1.dylib'
|
34
|
+
to_extension = 'dylib'
|
35
|
+
else
|
36
|
+
from_extension = 'so.1'
|
37
|
+
to_extension = 'so'
|
38
|
+
end
|
39
|
+
lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
|
40
|
+
FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
|
41
|
+
# Cleanup files created by miniportile we don't need in the gem
|
42
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
43
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
29
44
|
else
|
30
|
-
|
31
|
-
|
45
|
+
# Otherwise, copy existing libraries to ./ext
|
46
|
+
if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
|
47
|
+
raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
|
48
|
+
end
|
49
|
+
files = [
|
50
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
|
51
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
|
52
|
+
]
|
53
|
+
files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
|
32
54
|
end
|
33
|
-
lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
|
34
|
-
FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
|
35
|
-
# Cleanup files created by miniportile we don't need in the gem
|
36
|
-
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
37
|
-
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
38
55
|
end
|
39
56
|
|
40
57
|
task :clean do
|
data/lib/rdkafka/consumer.rb
CHANGED
@@ -30,13 +30,6 @@ module Rdkafka
|
|
30
30
|
->(_) { close }
|
31
31
|
end
|
32
32
|
|
33
|
-
# @return [String] consumer name
|
34
|
-
def name
|
35
|
-
@name ||= @native_kafka.with_inner do |inner|
|
36
|
-
::Rdkafka::Bindings.rd_kafka_name(inner)
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
33
|
# Close this consumer
|
41
34
|
# @return [nil]
|
42
35
|
def close
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -23,6 +23,9 @@ module Rdkafka
|
|
23
23
|
attr_reader :delivery_callback_arity
|
24
24
|
|
25
25
|
# @private
|
26
|
+
# @param native_kafka [NativeKafka]
|
27
|
+
# @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
|
28
|
+
# the "consistent_random" default
|
26
29
|
def initialize(native_kafka, partitioner_name)
|
27
30
|
@native_kafka = native_kafka
|
28
31
|
@partitioner_name = partitioner_name || "consistent_random"
|
@@ -37,10 +40,16 @@ module Rdkafka
|
|
37
40
|
topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
|
38
41
|
end
|
39
42
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
43
|
+
partition_count = topic_metadata ? topic_metadata[:partition_count] : -1
|
44
|
+
|
45
|
+
# This approach caches the failure to fetch only for 1 second. This will make sure, that
|
46
|
+
# we do not cache the failure for too long but also "buys" us a bit of time in case there
|
47
|
+
# would be issues in the cluster so we won't overaload it with consecutive requests
|
48
|
+
cache[topic] = if partition_count.positive?
|
49
|
+
[monotonic_now, partition_count]
|
50
|
+
else
|
51
|
+
[monotonic_now - PARTITIONS_COUNT_TTL + 5, partition_count]
|
52
|
+
end
|
44
53
|
end
|
45
54
|
end
|
46
55
|
|
@@ -196,14 +205,15 @@ module Rdkafka
|
|
196
205
|
# Partition count for a given topic.
|
197
206
|
#
|
198
207
|
# @param topic [String] The topic name.
|
199
|
-
# @return [Integer] partition count for a given topic
|
208
|
+
# @return [Integer] partition count for a given topic or `-1` if it could not be obtained.
|
200
209
|
#
|
201
210
|
# @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
|
202
211
|
# auto-created after returning nil.
|
203
212
|
#
|
204
213
|
# @note We cache the partition count for a given topic for given time.
|
205
214
|
# This prevents us in case someone uses `partition_key` from querying for the count with
|
206
|
-
# each message. Instead we query once every 30 seconds at most
|
215
|
+
# each message. Instead we query once every 30 seconds at most if we have a valid partition
|
216
|
+
# count or every 5 seconds in case we were not able to obtain number of partitions
|
207
217
|
def partition_count(topic)
|
208
218
|
closed_producer_check(__method__)
|
209
219
|
|
@@ -253,7 +263,7 @@ module Rdkafka
|
|
253
263
|
if partition_key
|
254
264
|
partition_count = partition_count(topic)
|
255
265
|
# If the topic is not present, set to -1
|
256
|
-
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
|
266
|
+
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
|
257
267
|
end
|
258
268
|
|
259
269
|
# If partition is nil, use -1 to let librdafka set the partition randomly or
|
@@ -320,13 +330,27 @@ module Rdkafka
|
|
320
330
|
delivery_handle
|
321
331
|
end
|
322
332
|
|
333
|
+
# Calls (if registered) the delivery callback
|
334
|
+
#
|
335
|
+
# @param delivery_report [Producer::DeliveryReport]
|
336
|
+
# @param delivery_handle [Producer::DeliveryHandle]
|
323
337
|
def call_delivery_callback(delivery_report, delivery_handle)
|
324
338
|
return unless @delivery_callback
|
325
339
|
|
326
|
-
|
327
|
-
|
340
|
+
case @delivery_callback_arity
|
341
|
+
when 0
|
342
|
+
@delivery_callback.call
|
343
|
+
when 1
|
344
|
+
@delivery_callback.call(delivery_report)
|
345
|
+
else
|
346
|
+
@delivery_callback.call(delivery_report, delivery_handle)
|
347
|
+
end
|
328
348
|
end
|
329
349
|
|
350
|
+
# Figures out the arity of a given block/method
|
351
|
+
#
|
352
|
+
# @param callback [#call, Proc]
|
353
|
+
# @return [Integer] arity of the provided block/method
|
330
354
|
def arity(callback)
|
331
355
|
return callback.arity if callback.respond_to?(:arity)
|
332
356
|
|
@@ -335,6 +359,10 @@ module Rdkafka
|
|
335
359
|
|
336
360
|
private
|
337
361
|
|
362
|
+
# Ensures, no operations can happen on a closed producer
|
363
|
+
#
|
364
|
+
# @param method [Symbol] name of the method that invoked producer
|
365
|
+
# @raise [Rdkafka::ClosedProducerError]
|
338
366
|
def closed_producer_check(method)
|
339
367
|
raise Rdkafka::ClosedProducerError.new(method) if closed?
|
340
368
|
end
|
data/lib/rdkafka/version.rb
CHANGED
@@ -1161,10 +1161,12 @@ describe Rdkafka::Consumer do
|
|
1161
1161
|
end
|
1162
1162
|
|
1163
1163
|
describe '#consumer_group_metadata_pointer' do
|
1164
|
-
|
1165
|
-
|
1166
|
-
|
1167
|
-
|
1164
|
+
let(:pointer) { consumer.consumer_group_metadata_pointer }
|
1165
|
+
|
1166
|
+
after { Rdkafka::Bindings.rd_kafka_consumer_group_metadata_destroy(pointer) }
|
1167
|
+
|
1168
|
+
it 'expect to return a pointer' do
|
1169
|
+
expect(pointer).to be_a(FFI::Pointer)
|
1168
1170
|
end
|
1169
1171
|
end
|
1170
1172
|
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.14.
|
4
|
+
version: 0.14.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date:
|
38
|
+
date: 2024-01-29 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: ffi
|
metadata.gz.sig
CHANGED
Binary file
|