karafka-rdkafka 0.14.7 → 0.14.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 90a3f576c4973a7ee361afd645b426d011234f844ce8cb12228ee9ae6a7a9c2d
4
- data.tar.gz: 1d3945367f88877b27eb038281cf88436f289d228824adce48bea6e6a8bd9c3d
3
+ metadata.gz: 8a3805cc154b0e1ca0d71fe06a852013c3e8d8dfd8148d4de8902aed66a2774d
4
+ data.tar.gz: 908236ad1ab0be52549f01ddf90ed37355a3b6e9e3373b867eeb83fa05fdf39c
5
5
  SHA512:
6
- metadata.gz: 260c1e17e41aa25175af3d2ea1acd093872704602179ddf5238903b644316c9631fbbeafbdc062ee03953f6d5d1fc19a4e73643a01630eae48d3d179d70cc72b
7
- data.tar.gz: fa8193f334c6bed1084218842c2762586fdb4cb16c15ddc095ae0215e1d51f9b3ccbaa40d92b4379fef6003cccc94396e68db906cff546c3ef8e4ac4f8a7c360
6
+ metadata.gz: 6460881076e6c45ae5feb38a3cfd4da3160f13a092f6a7acdbbe268e297b10e27ad792dab32a8c193824a5ef44fc9c2f40c5685ee5c489370056f1c408297f83
7
+ data.tar.gz: c2f42212e4df244d36f2312506dd69e52e8064c3485dc1fd0692d223ac9f2512a8b8102b7f473766386eb997dc7bb490fc4fc3c4a2815c528d6c1a8cddee7fcd
checksums.yaml.gz.sig CHANGED
Binary file
@@ -29,7 +29,6 @@ jobs:
29
29
  - '3.0'
30
30
  - '3.0.0'
31
31
  - '2.7'
32
- - '2.7.0'
33
32
  include:
34
33
  - ruby: '3.3'
35
34
  coverage: 'true'
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.14.9 (2024-01-29)
4
+ - [Fix] Partition cache caches invalid `nil` result for `PARTITIONS_COUNT_TTL`.
5
+ - [Enhancement] Report `-1` instead of `nil` in case `partition_count` failure.
6
+
7
+ ## 0.14.8 (2024-01-24)
8
+ - [Enhancement] Provide support for Nix OS (alexandriainfantino)
9
+ - [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld)
10
+
3
11
  ## 0.14.7 (2023-12-29)
4
12
  - [Fix] Recognize that Karafka uses a custom partition object (fixed in 2.3.0) and ensure it is recognized.
5
13
 
data/README.md CHANGED
@@ -32,6 +32,7 @@ The most important pieces of a Kafka client are implemented, and we aim to provi
32
32
  * [Message Publishing Libraries](#message-publishing-libraries)
33
33
  - [Development](#development)
34
34
  - [Example](#example)
35
+ - [Versions](#versions)
35
36
 
36
37
  ## Project Scope
37
38
 
@@ -147,3 +148,14 @@ To see everything working, run these in separate tabs:
147
148
  bundle exec rake consume_messages
148
149
  bundle exec rake produce_messages
149
150
  ```
151
+
152
+ ## Versions
153
+
154
+ | rdkafka-ruby | librdkafka |
155
+ |-|-|
156
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
157
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
158
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
159
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) |
160
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) |
161
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) |
data/ext/Rakefile CHANGED
@@ -1,40 +1,57 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
- require "mini_portile2"
5
4
  require "fileutils"
6
5
  require "open-uri"
7
6
 
8
7
  task :default => :clean do
9
- # Download and compile librdkafka
10
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
8
+ # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
+ # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
+ #
11
+ # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
+ # accessible in the nix environment
13
+ if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
+ # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
+ require "mini_portile2"
16
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
11
17
 
12
- # Use default homebrew openssl if we're on mac and the directory exists
13
- # and each of flags is not empty
14
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
- end
18
+ # Use default homebrew openssl if we're on mac and the directory exists
19
+ # and each of flags is not empty
20
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
+ end
18
24
 
19
- recipe.files << {
20
- :url => "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
- }
23
- recipe.configure_options = ["--host=#{recipe.host}"]
24
- recipe.cook
25
- # Move dynamic library we're interested in
26
- if recipe.host.include?('darwin')
27
- from_extension = '1.dylib'
28
- to_extension = 'dylib'
25
+ recipe.files << {
26
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
27
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
28
+ }
29
+ recipe.configure_options = ["--host=#{recipe.host}"]
30
+ recipe.cook
31
+ # Move dynamic library we're interested in
32
+ if recipe.host.include?('darwin')
33
+ from_extension = '1.dylib'
34
+ to_extension = 'dylib'
35
+ else
36
+ from_extension = 'so.1'
37
+ to_extension = 'so'
38
+ end
39
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
40
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
41
+ # Cleanup files created by miniportile we don't need in the gem
42
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
43
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
29
44
  else
30
- from_extension = 'so.1'
31
- to_extension = 'so'
45
+ # Otherwise, copy existing libraries to ./ext
46
+ if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
47
+ raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
48
+ end
49
+ files = [
50
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
51
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
52
+ ]
53
+ files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
32
54
  end
33
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
- # Cleanup files created by miniportile we don't need in the gem
36
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
38
55
  end
39
56
 
40
57
  task :clean do
@@ -30,13 +30,6 @@ module Rdkafka
30
30
  ->(_) { close }
31
31
  end
32
32
 
33
- # @return [String] consumer name
34
- def name
35
- @name ||= @native_kafka.with_inner do |inner|
36
- ::Rdkafka::Bindings.rd_kafka_name(inner)
37
- end
38
- end
39
-
40
33
  # Close this consumer
41
34
  # @return [nil]
42
35
  def close
@@ -23,6 +23,9 @@ module Rdkafka
23
23
  attr_reader :delivery_callback_arity
24
24
 
25
25
  # @private
26
+ # @param native_kafka [NativeKafka]
27
+ # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
28
+ # the "consistent_random" default
26
29
  def initialize(native_kafka, partitioner_name)
27
30
  @native_kafka = native_kafka
28
31
  @partitioner_name = partitioner_name || "consistent_random"
@@ -37,10 +40,16 @@ module Rdkafka
37
40
  topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
38
41
  end
39
42
 
40
- cache[topic] = [
41
- monotonic_now,
42
- topic_metadata ? topic_metadata[:partition_count] : nil
43
- ]
43
+ partition_count = topic_metadata ? topic_metadata[:partition_count] : -1
44
+
45
+ # This approach caches the failure to fetch only for 1 second. This will make sure, that
46
+ # we do not cache the failure for too long but also "buys" us a bit of time in case there
47
+ # would be issues in the cluster so we won't overaload it with consecutive requests
48
+ cache[topic] = if partition_count.positive?
49
+ [monotonic_now, partition_count]
50
+ else
51
+ [monotonic_now - PARTITIONS_COUNT_TTL + 5, partition_count]
52
+ end
44
53
  end
45
54
  end
46
55
 
@@ -196,14 +205,15 @@ module Rdkafka
196
205
  # Partition count for a given topic.
197
206
  #
198
207
  # @param topic [String] The topic name.
199
- # @return [Integer] partition count for a given topic
208
+ # @return [Integer] partition count for a given topic or `-1` if it could not be obtained.
200
209
  #
201
210
  # @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
202
211
  # auto-created after returning nil.
203
212
  #
204
213
  # @note We cache the partition count for a given topic for given time.
205
214
  # This prevents us in case someone uses `partition_key` from querying for the count with
206
- # each message. Instead we query once every 30 seconds at most
215
+ # each message. Instead we query once every 30 seconds at most if we have a valid partition
216
+ # count or every 5 seconds in case we were not able to obtain number of partitions
207
217
  def partition_count(topic)
208
218
  closed_producer_check(__method__)
209
219
 
@@ -253,7 +263,7 @@ module Rdkafka
253
263
  if partition_key
254
264
  partition_count = partition_count(topic)
255
265
  # If the topic is not present, set to -1
256
- partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
266
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
257
267
  end
258
268
 
259
269
  # If partition is nil, use -1 to let librdafka set the partition randomly or
@@ -320,13 +330,27 @@ module Rdkafka
320
330
  delivery_handle
321
331
  end
322
332
 
333
+ # Calls (if registered) the delivery callback
334
+ #
335
+ # @param delivery_report [Producer::DeliveryReport]
336
+ # @param delivery_handle [Producer::DeliveryHandle]
323
337
  def call_delivery_callback(delivery_report, delivery_handle)
324
338
  return unless @delivery_callback
325
339
 
326
- args = [delivery_report, delivery_handle].take(@delivery_callback_arity)
327
- @delivery_callback.call(*args)
340
+ case @delivery_callback_arity
341
+ when 0
342
+ @delivery_callback.call
343
+ when 1
344
+ @delivery_callback.call(delivery_report)
345
+ else
346
+ @delivery_callback.call(delivery_report, delivery_handle)
347
+ end
328
348
  end
329
349
 
350
+ # Figures out the arity of a given block/method
351
+ #
352
+ # @param callback [#call, Proc]
353
+ # @return [Integer] arity of the provided block/method
330
354
  def arity(callback)
331
355
  return callback.arity if callback.respond_to?(:arity)
332
356
 
@@ -335,6 +359,10 @@ module Rdkafka
335
359
 
336
360
  private
337
361
 
362
+ # Ensures, no operations can happen on a closed producer
363
+ #
364
+ # @param method [Symbol] name of the method that invoked producer
365
+ # @raise [Rdkafka::ClosedProducerError]
338
366
  def closed_producer_check(method)
339
367
  raise Rdkafka::ClosedProducerError.new(method) if closed?
340
368
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.7"
4
+ VERSION = "0.14.9"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
@@ -1161,10 +1161,12 @@ describe Rdkafka::Consumer do
1161
1161
  end
1162
1162
 
1163
1163
  describe '#consumer_group_metadata_pointer' do
1164
- it 'expect to yield pointer' do
1165
- consumer.consumer_group_metadata_pointer do |pointer|
1166
- expect(pointer).to be_a(FFI::Pointer)
1167
- end
1164
+ let(:pointer) { consumer.consumer_group_metadata_pointer }
1165
+
1166
+ after { Rdkafka::Bindings.rd_kafka_consumer_group_metadata_destroy(pointer) }
1167
+
1168
+ it 'expect to return a pointer' do
1169
+ expect(pointer).to be_a(FFI::Pointer)
1168
1170
  end
1169
1171
  end
1170
1172
 
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.7
4
+ version: 0.14.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-12-29 00:00:00.000000000 Z
38
+ date: 2024-01-29 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
metadata.gz.sig CHANGED
Binary file