rdkafka 0.12.0.beta.1 → 0.12.0.beta.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b7009092099e6d6f23e2e4c3ed22799e8e8473ac4265a7c72a81b55d7f4d3df6
4
- data.tar.gz: 847c47119591237cca59f178e7a0c438f098ed26039bf96de04d01df7d03362c
3
+ metadata.gz: a52f754380304d2a9bef30ecd2cbf0f33347cdb02254758f5452d9b9880a5bae
4
+ data.tar.gz: ed3f56449ad95f679bb5e686117ff6590533bd70965679cedbf952a3d30e0cfb
5
5
  SHA512:
6
- metadata.gz: 49b59362a354331726158d9d4f3c34b882ac87495d688861089a316873d7a25d4cdea16f37d347cd59db04f2b79410b4885d15edf336d75c4b8b9a4675326af6
7
- data.tar.gz: f85edc6bc9937a65169314ce9c068caac55dbd9ddd9d6b2e25c31f97323ac3b2584e9ff8f2d65342cd296278cb524802814e06c806c88b01b940bc13153f5418
6
+ metadata.gz: 8e5d72b357d60352cabcac36e0257fc72d1ffcfd5f13bcca5652add92f3cac421941af3b2ae19015575490021287218a06755268727f3e07de5826c30d7e9f98
7
+ data.tar.gz: 84a475f9167d598ad8499412a44c16019beaf77ec297f6d9c0879f27d0bcae079b0006bc880334c7e9d7310d9110250db69bdface85937cb1ec0abd5b56c4b2b
data/ext/Rakefile CHANGED
@@ -28,16 +28,25 @@ task :default => :clean do
28
28
  from_extension = 'so.1'
29
29
  to_extension = 'so'
30
30
  end
31
+
32
+ # Get path to the compiled library
31
33
  lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
32
- FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
34
+
35
+ # Get target dir
36
+ target_dir = ENV["RUBYARCHDIR"] || File.dirname(__FILE__)
37
+
38
+ # Move the compliled library there
39
+ FileUtils.mv(lib_path, File.join(target_dir, "librdkafka.#{to_extension}"))
40
+
33
41
  # Cleanup files created by miniportile we don't need in the gem
34
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
35
- FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
42
+ FileUtils.rm_rf File.join(target_dir, "tmp")
43
+ FileUtils.rm_rf File.join(target_dir, "ports")
36
44
  end
37
45
 
38
46
  task :clean do
39
- FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.dylib")
40
- FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.so")
47
+ target_dir = ENV["RUBYARCHDIR"] || File.dirname(__FILE__)
48
+ FileUtils.rm_f File.join(target_dir, "librdkafka.dylib")
49
+ FileUtils.rm_f File.join(target_dir, "librdkafka.so")
41
50
  FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
42
51
  FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
43
52
  end
@@ -246,14 +246,21 @@ module Rdkafka
246
246
  attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
247
247
 
248
248
  # Partitioner
249
- attach_function :rd_kafka_msg_partitioner_consistent_random, [:pointer, :pointer, :size_t, :int32, :pointer, :pointer], :int32
249
+ PARTITIONERS = %w(random consistent consistent_random murmur2 murmur2_random fnv1a fnv1a_random).each_with_object({}) do |name, hsh|
250
+ method_name = "rd_kafka_msg_partitioner_#{name}".to_sym
251
+ attach_function method_name, [:pointer, :pointer, :size_t, :int32, :pointer, :pointer], :int32
252
+ hsh[name] = method_name
253
+ end
250
254
 
251
- def self.partitioner(str, partition_count)
255
+ def self.partitioner(str, partition_count, partitioner_name = "consistent_random")
252
256
  # Return RD_KAFKA_PARTITION_UA(unassigned partition) when partition count is nil/zero.
253
257
  return -1 unless partition_count&.nonzero?
254
258
 
255
259
  str_ptr = FFI::MemoryPointer.from_string(str)
256
- rd_kafka_msg_partitioner_consistent_random(nil, str_ptr, str.size, partition_count, nil, nil)
260
+ method_name = PARTITIONERS.fetch(partitioner_name) do
261
+ raise Rdkafka::Config::ConfigError.new("Unknown partitioner: #{partitioner_name}")
262
+ end
263
+ public_send(method_name, nil, str_ptr, str.size, partition_count, nil, nil)
257
264
  end
258
265
 
259
266
  # Create Topics
@@ -179,7 +179,7 @@ module Rdkafka
179
179
  # Set callback to receive delivery reports on config
180
180
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
181
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer))).tap do |producer|
182
+ Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
183
183
  opaque.producer = producer
184
184
  end
185
185
  end
@@ -10,8 +10,9 @@ module Rdkafka
10
10
  attr_reader :delivery_callback
11
11
 
12
12
  # @private
13
- def initialize(client)
13
+ def initialize(client, partitioner_name)
14
14
  @client = client
15
+ @partitioner_name = partitioner_name || "consistent_random"
15
16
 
16
17
  # Makes sure, that the producer gets closed before it gets GCed by Ruby
17
18
  ObjectSpace.define_finalizer(self, client.finalizer)
@@ -85,7 +86,7 @@ module Rdkafka
85
86
  if partition_key
86
87
  partition_count = partition_count(topic)
87
88
  # If the topic is not present, set to -1
88
- partition = Rdkafka::Bindings.partitioner(partition_key, partition_count) if partition_count
89
+ partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
89
90
  end
90
91
 
91
92
  # If partition is nil, use -1 to let librdafka set the partition randomly or
@@ -1,5 +1,5 @@
1
1
  module Rdkafka
2
- VERSION = "0.12.0.beta.1"
3
- LIBRDKAFKA_VERSION = "1.9.0-RC1"
4
- LIBRDKAFKA_SOURCE_SHA256 = "c6aefe22ee14e8d036304bd61a5e5a61e100f0554df911fe107af1375c9f01f5"
2
+ VERSION = "0.12.0.beta.2"
3
+ LIBRDKAFKA_VERSION = "1.9.0-RC2"
4
+ LIBRDKAFKA_SOURCE_SHA256 = "f38c7007d01c489c7b743d200a6760678f43b171b36886c4dda39d4a8d5aab59"
5
5
  end
@@ -76,6 +76,13 @@ describe Rdkafka::Bindings do
76
76
  result_2 = (Zlib.crc32(partition_key) % partition_count)
77
77
  expect(result_1).to eq(result_2)
78
78
  end
79
+
80
+ it "should return the partition calculated by the specified partitioner" do
81
+ result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count, "murmur2")
82
+ ptr = FFI::MemoryPointer.from_string(partition_key)
83
+ result_2 = Rdkafka::Bindings.rd_kafka_msg_partitioner_murmur2(nil, ptr, partition_key.size, partition_count, nil, nil)
84
+ expect(result_1).to eq(result_2)
85
+ end
79
86
  end
80
87
 
81
88
  describe "stats callback" do
@@ -42,13 +42,13 @@ describe Rdkafka::Producer::Client do
42
42
 
43
43
  it "polls the native with default 250ms timeout" do
44
44
  polling_loop_expects do
45
- expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250)
45
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
46
46
  end
47
47
  end
48
48
 
49
49
  it "check the out queue of native client" do
50
50
  polling_loop_expects do
51
- expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native)
51
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
52
52
  end
53
53
  end
54
54
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.0.beta.1
4
+ version: 0.12.0.beta.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-03-24 00:00:00.000000000 Z
11
+ date: 2022-04-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi