rdkafka 0.12.0.beta.0 → 0.12.0.beta.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/rdkafka/admin.rb +2 -2
- data/lib/rdkafka/bindings.rb +10 -3
- data/lib/rdkafka/config.rb +1 -1
- data/lib/rdkafka/producer.rb +3 -2
- data/lib/rdkafka/version.rb +3 -3
- data/spec/rdkafka/bindings_spec.rb +7 -0
- data/spec/rdkafka/consumer_spec.rb +3 -5
- data/spec/rdkafka/producer/client_spec.rb +6 -5
- data/spec/rdkafka/producer_spec.rb +1 -5
- metadata +6 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1a6f8dfde957dbdc31ad2f229a6da70ef3023705bfc68e4414e896118b7df3c6
|
4
|
+
data.tar.gz: ed07422acb0268bb6483bda6eb0a93a8d75194c06589918e2d8a1cde5bf7ea5b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8e51b05b0aa38d8d31ee0e8394f4d5a637ca8ef591fa2f0c76699ced276101902816f89c1c200ab45f02a13f68ed2fe35c961070c851c840e46986526ac1422a
|
7
|
+
data.tar.gz: f7414245c4ce729abe326a7fac4be7617a68615c3d4339888e22424bb0a49faa5077c2caf725453ab361f1f88afff7d1a7ff4928afcb433792983f2851484c12
|
data/lib/rdkafka/admin.rb
CHANGED
@@ -90,7 +90,7 @@ module Rdkafka
|
|
90
90
|
admin_options_ptr,
|
91
91
|
queue_ptr
|
92
92
|
)
|
93
|
-
rescue Exception
|
93
|
+
rescue Exception
|
94
94
|
CreateTopicHandle.remove(create_topic_handle.to_ptr.address)
|
95
95
|
raise
|
96
96
|
ensure
|
@@ -140,7 +140,7 @@ module Rdkafka
|
|
140
140
|
admin_options_ptr,
|
141
141
|
queue_ptr
|
142
142
|
)
|
143
|
-
rescue Exception
|
143
|
+
rescue Exception
|
144
144
|
DeleteTopicHandle.remove(delete_topic_handle.to_ptr.address)
|
145
145
|
raise
|
146
146
|
ensure
|
data/lib/rdkafka/bindings.rb
CHANGED
@@ -246,14 +246,21 @@ module Rdkafka
|
|
246
246
|
attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
|
247
247
|
|
248
248
|
# Partitioner
|
249
|
-
|
249
|
+
PARTITIONERS = %w(random consistent consistent_random murmur2 murmur2_random fnv1a fnv1a_random).each_with_object({}) do |name, hsh|
|
250
|
+
method_name = "rd_kafka_msg_partitioner_#{name}".to_sym
|
251
|
+
attach_function method_name, [:pointer, :pointer, :size_t, :int32, :pointer, :pointer], :int32
|
252
|
+
hsh[name] = method_name
|
253
|
+
end
|
250
254
|
|
251
|
-
def self.partitioner(str, partition_count)
|
255
|
+
def self.partitioner(str, partition_count, partitioner_name = "consistent_random")
|
252
256
|
# Return RD_KAFKA_PARTITION_UA(unassigned partition) when partition count is nil/zero.
|
253
257
|
return -1 unless partition_count&.nonzero?
|
254
258
|
|
255
259
|
str_ptr = FFI::MemoryPointer.from_string(str)
|
256
|
-
|
260
|
+
method_name = PARTITIONERS.fetch(partitioner_name) do
|
261
|
+
raise Rdkafka::Config::ConfigError.new("Unknown partitioner: #{partitioner_name}")
|
262
|
+
end
|
263
|
+
public_send(method_name, nil, str_ptr, str.size, partition_count, nil, nil)
|
257
264
|
end
|
258
265
|
|
259
266
|
# Create Topics
|
data/lib/rdkafka/config.rb
CHANGED
@@ -179,7 +179,7 @@ module Rdkafka
|
|
179
179
|
# Set callback to receive delivery reports on config
|
180
180
|
Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
|
181
181
|
# Return producer with Kafka client
|
182
|
-
Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer))).tap do |producer|
|
182
|
+
Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
|
183
183
|
opaque.producer = producer
|
184
184
|
end
|
185
185
|
end
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -10,8 +10,9 @@ module Rdkafka
|
|
10
10
|
attr_reader :delivery_callback
|
11
11
|
|
12
12
|
# @private
|
13
|
-
def initialize(client)
|
13
|
+
def initialize(client, partitioner_name)
|
14
14
|
@client = client
|
15
|
+
@partitioner_name = partitioner_name || "consistent_random"
|
15
16
|
|
16
17
|
# Makes sure, that the producer gets closed before it gets GCed by Ruby
|
17
18
|
ObjectSpace.define_finalizer(self, client.finalizer)
|
@@ -85,7 +86,7 @@ module Rdkafka
|
|
85
86
|
if partition_key
|
86
87
|
partition_count = partition_count(topic)
|
87
88
|
# If the topic is not present, set to -1
|
88
|
-
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count) if partition_count
|
89
|
+
partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count
|
89
90
|
end
|
90
91
|
|
91
92
|
# If partition is nil, use -1 to let librdafka set the partition randomly or
|
data/lib/rdkafka/version.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
module Rdkafka
|
2
|
-
VERSION = "0.12.0.beta.
|
3
|
-
LIBRDKAFKA_VERSION = "1.
|
4
|
-
LIBRDKAFKA_SOURCE_SHA256 = "
|
2
|
+
VERSION = "0.12.0.beta.3"
|
3
|
+
LIBRDKAFKA_VERSION = "1.9.0-RC2"
|
4
|
+
LIBRDKAFKA_SOURCE_SHA256 = "f38c7007d01c489c7b743d200a6760678f43b171b36886c4dda39d4a8d5aab59"
|
5
5
|
end
|
@@ -76,6 +76,13 @@ describe Rdkafka::Bindings do
|
|
76
76
|
result_2 = (Zlib.crc32(partition_key) % partition_count)
|
77
77
|
expect(result_1).to eq(result_2)
|
78
78
|
end
|
79
|
+
|
80
|
+
it "should return the partition calculated by the specified partitioner" do
|
81
|
+
result_1 = Rdkafka::Bindings.partitioner(partition_key, partition_count, "murmur2")
|
82
|
+
ptr = FFI::MemoryPointer.from_string(partition_key)
|
83
|
+
result_2 = Rdkafka::Bindings.rd_kafka_msg_partitioner_murmur2(nil, ptr, partition_key.size, partition_count, nil, nil)
|
84
|
+
expect(result_1).to eq(result_2)
|
85
|
+
end
|
79
86
|
end
|
80
87
|
|
81
88
|
describe "stats callback" do
|
@@ -241,7 +241,7 @@ describe Rdkafka::Consumer do
|
|
241
241
|
|
242
242
|
it "should return the assignment when subscribed" do
|
243
243
|
# Make sure there's a message
|
244
|
-
|
244
|
+
producer.produce(
|
245
245
|
topic: "consume_test_topic",
|
246
246
|
payload: "payload 1",
|
247
247
|
key: "key 1",
|
@@ -272,7 +272,7 @@ describe Rdkafka::Consumer do
|
|
272
272
|
it "should close a consumer" do
|
273
273
|
consumer.subscribe("consume_test_topic")
|
274
274
|
100.times do |i|
|
275
|
-
|
275
|
+
producer.produce(
|
276
276
|
topic: "consume_test_topic",
|
277
277
|
payload: "payload #{i}",
|
278
278
|
key: "key #{i}",
|
@@ -289,7 +289,7 @@ describe Rdkafka::Consumer do
|
|
289
289
|
describe "#commit, #committed and #store_offset" do
|
290
290
|
# Make sure there's a stored offset
|
291
291
|
let!(:report) do
|
292
|
-
|
292
|
+
producer.produce(
|
293
293
|
topic: "consume_test_topic",
|
294
294
|
payload: "payload 1",
|
295
295
|
key: "key 1",
|
@@ -831,7 +831,6 @@ describe Rdkafka::Consumer do
|
|
831
831
|
)
|
832
832
|
consumer = config.consumer
|
833
833
|
consumer.subscribe(topic_name)
|
834
|
-
loop_count = 0
|
835
834
|
batches_yielded = []
|
836
835
|
exceptions_yielded = []
|
837
836
|
each_batch_iterations = 0
|
@@ -875,7 +874,6 @@ describe Rdkafka::Consumer do
|
|
875
874
|
)
|
876
875
|
consumer = config.consumer
|
877
876
|
consumer.subscribe(topic_name)
|
878
|
-
loop_count = 0
|
879
877
|
batches_yielded = []
|
880
878
|
exceptions_yielded = []
|
881
879
|
each_batch_iterations = 0
|
@@ -1,15 +1,16 @@
|
|
1
1
|
require "spec_helper"
|
2
2
|
|
3
3
|
describe Rdkafka::Producer::Client do
|
4
|
-
let(:
|
4
|
+
let(:config) { rdkafka_producer_config }
|
5
|
+
let(:native) { config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer) }
|
5
6
|
let(:closing) { false }
|
6
7
|
let(:thread) { double(Thread) }
|
7
8
|
|
8
9
|
subject(:client) { described_class.new(native) }
|
9
10
|
|
10
11
|
before do
|
11
|
-
allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(
|
12
|
-
allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(
|
12
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).and_call_original
|
13
|
+
allow(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(instance_of(FFI::Pointer)).and_return(0).and_call_original
|
13
14
|
allow(Rdkafka::Bindings).to receive(:rd_kafka_destroy)
|
14
15
|
allow(Thread).to receive(:new).and_return(thread)
|
15
16
|
|
@@ -41,13 +42,13 @@ describe Rdkafka::Producer::Client do
|
|
41
42
|
|
42
43
|
it "polls the native with default 250ms timeout" do
|
43
44
|
polling_loop_expects do
|
44
|
-
expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(
|
45
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_poll).with(instance_of(FFI::Pointer), 250).at_least(:once)
|
45
46
|
end
|
46
47
|
end
|
47
48
|
|
48
49
|
it "check the out queue of native client" do
|
49
50
|
polling_loop_expects do
|
50
|
-
expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native)
|
51
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_outq_len).with(native).at_least(:once)
|
51
52
|
end
|
52
53
|
end
|
53
54
|
end
|
@@ -376,13 +376,9 @@ describe Rdkafka::Producer do
|
|
376
376
|
end
|
377
377
|
end
|
378
378
|
|
379
|
-
it "should produce a message in a forked process" do
|
379
|
+
it "should produce a message in a forked process", skip: defined?(JRUBY_VERSION) && "Kernel#fork is not available" do
|
380
380
|
# Fork, produce a message, send the report over a pipe and
|
381
381
|
# wait for and check the message in the main process.
|
382
|
-
|
383
|
-
# Kernel#fork is not available in JRuby
|
384
|
-
skip if defined?(JRUBY_VERSION)
|
385
|
-
|
386
382
|
reader, writer = IO.pipe
|
387
383
|
|
388
384
|
fork do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.12.0.beta.
|
4
|
+
version: 0.12.0.beta.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-
|
11
|
+
date: 2022-04-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ffi
|
@@ -206,7 +206,7 @@ homepage: https://github.com/thijsc/rdkafka-ruby
|
|
206
206
|
licenses:
|
207
207
|
- MIT
|
208
208
|
metadata: {}
|
209
|
-
post_install_message:
|
209
|
+
post_install_message:
|
210
210
|
rdoc_options: []
|
211
211
|
require_paths:
|
212
212
|
- lib
|
@@ -221,8 +221,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
221
221
|
- !ruby/object:Gem::Version
|
222
222
|
version: 1.3.1
|
223
223
|
requirements: []
|
224
|
-
rubygems_version: 3.
|
225
|
-
signing_key:
|
224
|
+
rubygems_version: 3.0.3
|
225
|
+
signing_key:
|
226
226
|
specification_version: 4
|
227
227
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
228
228
|
It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
|