karafka-rdkafka 0.14.6 → 0.14.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c7376c9030672ba0205c3973317a626b7ececd3b100c312e5ef310d08de397cc
4
- data.tar.gz: b843498f05cdadfd034c9c20338fd698b37df792c0bd54919c19632ae6ac8d29
3
+ metadata.gz: 610db3521dfe3b63d906f2d8814756c04882cc68603a218842cc41b487124133
4
+ data.tar.gz: d870f56e2d7fbc1ae8d14cda5757a02d01d121740da7656deab08341904f4677
5
5
  SHA512:
6
- metadata.gz: ca8944dae92ea927a09748524a04df8079e9a1bef1c84034cdf2d5c372ca122e652a1b728c5a497ac26cefa3ae6b2eeb52da3df988332f06ef1a153ce8773028
7
- data.tar.gz: 5772911202fae3ccd51d54300a0dd74159188d17395e4ee6895132abcc30a916a9adcf9643f1626dc9884f8a17c88e6d3d546bb0191218a4e263a4cbb37b3009
6
+ metadata.gz: 17bca0d972de4d24d2de1c1b39ac38bab36eff5f856dd65448215fb53a88fcc8bc0b61e856cf18802ad6649938f52b2d25fdc76b0055396821866841c8067e3a
7
+ data.tar.gz: 87742fba542fc3df5af16e41baa36d5a7d0c33304f37e54d6ffc50121a4dcf7173a717a0e37353b77cac8828ec707aab54056e3eba53ecfd2780dd87ef9b21ec
checksums.yaml.gz.sig CHANGED
Binary file
@@ -29,7 +29,6 @@ jobs:
29
29
  - '3.0'
30
30
  - '3.0.0'
31
31
  - '2.7'
32
- - '2.7.0'
33
32
  include:
34
33
  - ruby: '3.3'
35
34
  coverage: 'true'
data/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.14.8 (2024-01-24)
4
+ - [Enhancement] Provide support for Nix OS (alexandriainfantino)
5
+ - [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld)
6
+
7
+ ## 0.14.7 (2023-12-29)
8
+ - [Fix] Recognize that Karafka uses a custom partition object (fixed in 2.3.0) and ensure it is recognized.
9
+
3
10
  ## 0.14.6 (2023-12-29)
4
11
  - **[Feature]** Support storing metadata alongside offsets via `rd_kafka_offsets_store` in `#store_offset` (mensfeld)
5
12
  - [Enhancement] Increase the `#committed` default timeout from 1_200ms to 2000ms. This will compensate for network glitches and remote clusters operations and will align with metadata query timeout.
data/README.md CHANGED
@@ -32,6 +32,7 @@ The most important pieces of a Kafka client are implemented, and we aim to provi
32
32
  * [Message Publishing Libraries](#message-publishing-libraries)
33
33
  - [Development](#development)
34
34
  - [Example](#example)
35
+ - [Versions](#versions)
35
36
 
36
37
  ## Project Scope
37
38
 
@@ -147,3 +148,14 @@ To see everything working, run these in separate tabs:
147
148
  bundle exec rake consume_messages
148
149
  bundle exec rake produce_messages
149
150
  ```
151
+
152
+ ## Versions
153
+
154
+ | rdkafka-ruby | librdkafka |
155
+ |-|-|
156
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
157
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
158
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
159
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) |
160
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) |
161
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) |
data/ext/Rakefile CHANGED
@@ -1,40 +1,57 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
- require "mini_portile2"
5
4
  require "fileutils"
6
5
  require "open-uri"
7
6
 
8
7
  task :default => :clean do
9
- # Download and compile librdkafka
10
- recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
8
+ # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
9
+ # managed by nix itself, so using the normal file paths doesn't work for nix users.
10
+ #
11
+ # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
12
+ # accessible in the nix environment
13
+ if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
14
+ # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
15
+ require "mini_portile2"
16
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
11
17
 
12
- # Use default homebrew openssl if we're on mac and the directory exists
13
- # and each of flags is not empty
14
- if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
15
- ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
16
- ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
17
- end
18
+ # Use default homebrew openssl if we're on mac and the directory exists
19
+ # and each of flags is not empty
20
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}")
21
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV["CPPFLAGS"]
22
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
23
+ end
18
24
 
19
- recipe.files << {
20
- :url => "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
21
- :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
22
- }
23
- recipe.configure_options = ["--host=#{recipe.host}"]
24
- recipe.cook
25
- # Move dynamic library we're interested in
26
- if recipe.host.include?('darwin')
27
- from_extension = '1.dylib'
28
- to_extension = 'dylib'
25
+ recipe.files << {
26
+ :url => "https://codeload.github.com/edenhill/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
27
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
28
+ }
29
+ recipe.configure_options = ["--host=#{recipe.host}"]
30
+ recipe.cook
31
+ # Move dynamic library we're interested in
32
+ if recipe.host.include?('darwin')
33
+ from_extension = '1.dylib'
34
+ to_extension = 'dylib'
35
+ else
36
+ from_extension = 'so.1'
37
+ to_extension = 'so'
38
+ end
39
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
40
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
41
+ # Cleanup files created by miniportile we don't need in the gem
42
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
43
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
29
44
  else
30
- from_extension = 'so.1'
31
- to_extension = 'so'
45
+ # Otherwise, copy existing libraries to ./ext
46
+ if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
47
+ raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
48
+ end
49
+ files = [
50
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
51
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
52
+ ]
53
+ files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
32
54
  end
33
- lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
34
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
35
- # Cleanup files created by miniportile we don't need in the gem
36
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
37
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
38
55
  end
39
56
 
40
57
  task :clean do
@@ -152,7 +152,8 @@ module Rdkafka
152
152
  p.partition
153
153
  )
154
154
 
155
- if p.metadata
155
+ # Remove the respond to check after karafka 2.3.0 is released
156
+ if p.respond_to?(:metadata) && p.metadata
156
157
  part = Rdkafka::Bindings::TopicPartition.new(ref)
157
158
  str_ptr = FFI::MemoryPointer.from_string(p.metadata)
158
159
  # released here:
@@ -30,13 +30,6 @@ module Rdkafka
30
30
  ->(_) { close }
31
31
  end
32
32
 
33
- # @return [String] consumer name
34
- def name
35
- @name ||= @native_kafka.with_inner do |inner|
36
- ::Rdkafka::Bindings.rd_kafka_name(inner)
37
- end
38
- end
39
-
40
33
  # Close this consumer
41
34
  # @return [nil]
42
35
  def close
@@ -23,6 +23,9 @@ module Rdkafka
23
23
  attr_reader :delivery_callback_arity
24
24
 
25
25
  # @private
26
+ # @param native_kafka [NativeKafka]
27
+ # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
28
+ # the "consistent_random" default
26
29
  def initialize(native_kafka, partitioner_name)
27
30
  @native_kafka = native_kafka
28
31
  @partitioner_name = partitioner_name || "consistent_random"
@@ -320,13 +323,27 @@ module Rdkafka
320
323
  delivery_handle
321
324
  end
322
325
 
326
+ # Calls (if registered) the delivery callback
327
+ #
328
+ # @param delivery_report [Producer::DeliveryReport]
329
+ # @param delivery_handle [Producer::DeliveryHandle]
323
330
  def call_delivery_callback(delivery_report, delivery_handle)
324
331
  return unless @delivery_callback
325
332
 
326
- args = [delivery_report, delivery_handle].take(@delivery_callback_arity)
327
- @delivery_callback.call(*args)
333
+ case @delivery_callback_arity
334
+ when 0
335
+ @delivery_callback.call
336
+ when 1
337
+ @delivery_callback.call(delivery_report)
338
+ else
339
+ @delivery_callback.call(delivery_report, delivery_handle)
340
+ end
328
341
  end
329
342
 
343
+ # Figures out the arity of a given block/method
344
+ #
345
+ # @param callback [#call, Proc]
346
+ # @return [Integer] arity of the provided block/method
330
347
  def arity(callback)
331
348
  return callback.arity if callback.respond_to?(:arity)
332
349
 
@@ -335,6 +352,10 @@ module Rdkafka
335
352
 
336
353
  private
337
354
 
355
+ # Ensures, no operations can happen on a closed producer
356
+ #
357
+ # @param method [Symbol] name of the method that invoked producer
358
+ # @raise [Rdkafka::ClosedProducerError]
338
359
  def closed_producer_check(method)
339
360
  raise Rdkafka::ClosedProducerError.new(method) if closed?
340
361
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.6"
4
+ VERSION = "0.14.8"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
@@ -1161,10 +1161,12 @@ describe Rdkafka::Consumer do
1161
1161
  end
1162
1162
 
1163
1163
  describe '#consumer_group_metadata_pointer' do
1164
- it 'expect to yield pointer' do
1165
- consumer.consumer_group_metadata_pointer do |pointer|
1166
- expect(pointer).to be_a(FFI::Pointer)
1167
- end
1164
+ let(:pointer) { consumer.consumer_group_metadata_pointer }
1165
+
1166
+ after { Rdkafka::Bindings.rd_kafka_consumer_group_metadata_destroy(pointer) }
1167
+
1168
+ it 'expect to return a pointer' do
1169
+ expect(pointer).to be_a(FFI::Pointer)
1168
1170
  end
1169
1171
  end
1170
1172
 
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.6
4
+ version: 0.14.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-12-29 00:00:00.000000000 Z
38
+ date: 2024-01-24 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
metadata.gz.sig CHANGED
Binary file