rdkafka 0.13.0.beta.1 → 0.13.0.beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c3c5b37efae2485950c8a7627e09dfca50bd7c1264e18ae811b85c3f9ae7d09a
4
- data.tar.gz: fb106016aae053f18f53885ae176cdb9e07078c80139146b165231cdd7e490ab
3
+ metadata.gz: eac016dc712d56e178c77a62f8af4e422319a783e80acbd7528453bfca6ca260
4
+ data.tar.gz: 8a4ed8df1339e0e74c96b259431012465c499ae9e9c8c6db8ff751497e1e5c43
5
5
  SHA512:
6
- metadata.gz: 2aaaf70e222ad813ec88ce49078f0e643c1860ff8ce93289134d52ac8a7104681c3f66fad48083c3d78223f7f3157dd7019bd41355933141316078bb1c5fd3aa
7
- data.tar.gz: a1503635d8e51589db14db327176cbae4f8be9454938e63c96f2bfdcbd258cef1f392479955797e742463cd44ab1951f7596d23adb2b5c06ebf6ff6ab1963442
6
+ metadata.gz: e3167de231aaabdfa512cabef13a4ffaabcc94757a2b7d0f360ed27a9005a40da5bbb262dffc49bda03d11557eef63dcf6b9c010fb944cf41fcb06bb7531c3ee
7
+ data.tar.gz: 8db04d46686505a9b1fef79c8544f3c27056ae5ff9e9eede9343d4f5cef4c63c844b9798011a5339ebbdf2cab883d1c3fd5df61842ea90c4862fa0d03dd88610
data/CHANGELOG.md CHANGED
@@ -1,4 +1,7 @@
1
1
  # 0.13.0
2
+ * Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
3
+ * Support both string and symbol header keys (ColinDKelley)
4
+ * Handle tombstone messages properly (kgalieva)
2
5
  * Add topic name to delivery report (maeve)
3
6
  * Allow string partitioner config (mollyegibson)
4
7
  * Fix documented type for DeliveryReport#error (jimmydo)
@@ -6,7 +9,7 @@
6
9
  * Use finalizers to cleanly exit producer and admin (thijsc)
7
10
 
8
11
  # 0.12.0
9
- * Bump librdkafka to 1.9.0
12
+ * Bumps librdkafka to 1.9.0
10
13
  * Fix crash on empty partition key (mensfeld)
11
14
  * Pass the delivery handle to the callback (gvisokinskas)
12
15
 
@@ -17,7 +17,7 @@ module Rdkafka
17
17
  end
18
18
  end
19
19
 
20
- ffi_lib File.join(File.dirname(__FILE__), "../../ext/librdkafka.#{lib_extension}")
20
+ ffi_lib File.join(__dir__, "../../ext/librdkafka.#{lib_extension}")
21
21
 
22
22
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
23
23
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
@@ -113,6 +113,7 @@ module Rdkafka
113
113
  attach_function :rd_kafka_conf_set_stats_cb, [:pointer, :stats_cb], :void
114
114
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
115
115
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
116
+ attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
116
117
 
117
118
  # Log queue
118
119
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
@@ -176,6 +177,8 @@ module Rdkafka
176
177
  attach_function :rd_kafka_unsubscribe, [:pointer], :int
177
178
  attach_function :rd_kafka_subscription, [:pointer, :pointer], :int
178
179
  attach_function :rd_kafka_assign, [:pointer, :pointer], :int
180
+ attach_function :rd_kafka_incremental_assign, [:pointer, :pointer], :int
181
+ attach_function :rd_kafka_incremental_unassign, [:pointer, :pointer], :int
179
182
  attach_function :rd_kafka_assignment, [:pointer, :pointer], :int
180
183
  attach_function :rd_kafka_committed, [:pointer, :pointer, :int], :int
181
184
  attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int, blocking: true
@@ -201,9 +204,17 @@ module Rdkafka
201
204
  ) do |client_ptr, code, partitions_ptr, opaque_ptr|
202
205
  case code
203
206
  when RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
204
- Rdkafka::Bindings.rd_kafka_assign(client_ptr, partitions_ptr)
207
+ if Rdkafka::Bindings.rd_kafka_rebalance_protocol(client_ptr) == "COOPERATIVE"
208
+ Rdkafka::Bindings.rd_kafka_incremental_assign(client_ptr, partitions_ptr)
209
+ else
210
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, partitions_ptr)
211
+ end
205
212
  else # RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS or errors
206
- Rdkafka::Bindings.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
213
+ if Rdkafka::Bindings.rd_kafka_rebalance_protocol(client_ptr) == "COOPERATIVE"
214
+ Rdkafka::Bindings.rd_kafka_incremental_unassign(client_ptr, partitions_ptr)
215
+ else
216
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
217
+ end
207
218
  end
208
219
 
209
220
  opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
@@ -2,11 +2,25 @@
2
2
 
3
3
  module Rdkafka
4
4
  class Consumer
5
- # A message headers
6
- class Headers
7
- # Reads a native kafka's message header into ruby's hash
5
+ # Interface to return headers for a consumer message
6
+ module Headers
7
+ class HashWithSymbolKeysTreatedLikeStrings < Hash
8
+ def [](key)
9
+ if key.is_a?(Symbol)
10
+ Kernel.warn("rdkafka deprecation warning: header access with Symbol key #{key.inspect} treated as a String. " \
11
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
12
+ super(key.to_s)
13
+ else
14
+ super
15
+ end
16
+ end
17
+ end
18
+
19
+ # Reads a librdkafka native message's headers and returns them as a Ruby Hash
20
+ #
21
+ # @param [librdkakfa message] native_message
8
22
  #
9
- # @return [Hash<String, String>] a message headers
23
+ # @return [Hash<String, String>] headers Hash for the native_message
10
24
  #
11
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
12
26
  #
@@ -26,7 +40,8 @@ module Rdkafka
26
40
  name_ptrptr = FFI::MemoryPointer.new(:pointer)
27
41
  value_ptrptr = FFI::MemoryPointer.new(:pointer)
28
42
  size_ptr = Rdkafka::Bindings::SizePtr.new
29
- headers = {}
43
+
44
+ headers = HashWithSymbolKeysTreatedLikeStrings.new
30
45
 
31
46
  idx = 0
32
47
  loop do
@@ -53,12 +68,12 @@ module Rdkafka
53
68
 
54
69
  value = value_ptr.read_string(size)
55
70
 
56
- headers[name.to_sym] = value
71
+ headers[name] = value
57
72
 
58
73
  idx += 1
59
74
  end
60
75
 
61
- headers
76
+ headers.freeze
62
77
  end
63
78
  end
64
79
  end
@@ -20,7 +20,7 @@ module Rdkafka
20
20
  # @return [String, nil]
21
21
  attr_reader :key
22
22
 
23
- # This message's offset in it's partition
23
+ # This message's offset in its partition
24
24
  # @return [Integer]
25
25
  attr_reader :offset
26
26
 
@@ -550,7 +550,7 @@ module Rdkafka
550
550
  end
551
551
  if message
552
552
  slice << message
553
- bytes += message.payload.bytesize
553
+ bytes += message.payload.bytesize if message.payload
554
554
  end
555
555
  if slice.size == max_items || bytes >= bytes_threshold || monotonic_now >= end_time - 0.001
556
556
  yield slice.dup, nil
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.13.0.beta.1"
4
+ VERSION = "0.13.0.beta.2"
5
5
  LIBRDKAFKA_VERSION = "1.9.2"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "3fba157a9f80a0889c982acdd44608be8a46142270a389008b22d921be1198ad"
7
7
  end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "spec_helper"
4
+
5
+ describe Rdkafka::Consumer::Headers do
6
+ let(:headers) do
7
+ { # Note String keys!
8
+ "version" => "2.1.3",
9
+ "type" => "String"
10
+ }
11
+ end
12
+ let(:native_message) { double('native message') }
13
+ let(:headers_ptr) { double('headers pointer') }
14
+
15
+ describe '.from_native' do
16
+ before do
17
+ expect(Rdkafka::Bindings).to receive(:rd_kafka_message_headers).with(native_message, anything) do |_, headers_ptrptr|
18
+ expect(headers_ptrptr).to receive(:read_pointer).and_return(headers_ptr)
19
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
20
+ end
21
+
22
+ expect(Rdkafka::Bindings).to \
23
+ receive(:rd_kafka_header_get_all)
24
+ .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
25
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
26
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
27
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
28
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
29
+ end
30
+
31
+ expect(Rdkafka::Bindings).to \
32
+ receive(:rd_kafka_header_get_all)
33
+ .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
37
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
38
+ end
39
+
40
+ expect(Rdkafka::Bindings).to \
41
+ receive(:rd_kafka_header_get_all)
42
+ .with(headers_ptr, 2, anything, anything, anything)
43
+ .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
44
+ end
45
+
46
+ subject { described_class.from_native(native_message) }
47
+
48
+ it { is_expected.to eq(headers) }
49
+ it { is_expected.to be_frozen }
50
+
51
+ it 'allows String key' do
52
+ expect(subject['version']).to eq("2.1.3")
53
+ end
54
+
55
+ it 'allows Symbol key, but warns' do
56
+ expect(Kernel).to \
57
+ receive(:warn).with("rdkafka deprecation warning: header access with Symbol key :version treated as a String. " \
58
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
59
+ expect(subject[:version]).to eq("2.1.3")
60
+ end
61
+ end
62
+ end
@@ -595,7 +595,7 @@ describe Rdkafka::Consumer do
595
595
  end
596
596
 
597
597
  describe "#poll with headers" do
598
- it "should return message with headers" do
598
+ it "should return message with headers using string keys (when produced with symbol keys)" do
599
599
  report = producer.produce(
600
600
  topic: "consume_test_topic",
601
601
  key: "key headers",
@@ -605,7 +605,20 @@ describe Rdkafka::Consumer do
605
605
  message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
606
606
  expect(message).to be
607
607
  expect(message.key).to eq('key headers')
608
- expect(message.headers).to include(foo: 'bar')
608
+ expect(message.headers).to include('foo' => 'bar')
609
+ end
610
+
611
+ it "should return message with headers using string keys (when produced with string keys)" do
612
+ report = producer.produce(
613
+ topic: "consume_test_topic",
614
+ key: "key headers",
615
+ headers: { 'foo' => 'bar' }
616
+ ).wait
617
+
618
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
619
+ expect(message).to be
620
+ expect(message.key).to eq('key headers')
621
+ expect(message.headers).to include('foo' => 'bar')
609
622
  end
610
623
 
611
624
  it "should return message with no headers" do
@@ -700,7 +713,7 @@ describe Rdkafka::Consumer do
700
713
  n.times do |i|
701
714
  handles << producer.produce(
702
715
  topic: topic_name,
703
- payload: Time.new.to_f.to_s,
716
+ payload: i % 10 == 0 ? nil : Time.new.to_f.to_s,
704
717
  key: i.to_s,
705
718
  partition: 0
706
719
  )
@@ -964,18 +977,6 @@ describe Rdkafka::Consumer do
964
977
  expect(listener.queue).to eq([:assigned, :revoked])
965
978
  end
966
979
  end
967
-
968
- def notify_listener(listener)
969
- # 1. subscribe and poll
970
- consumer.subscribe("consume_test_topic")
971
- wait_for_assignment(consumer)
972
- consumer.poll(100)
973
-
974
- # 2. unsubscribe
975
- consumer.unsubscribe
976
- wait_for_unassignment(consumer)
977
- consumer.close
978
- end
979
980
  end
980
981
 
981
982
  context "methods that should not be called after a consumer has been closed" do
@@ -1015,4 +1016,62 @@ describe Rdkafka::Consumer do
1015
1016
 
1016
1017
  expect(consumer.closed?).to eq(true)
1017
1018
  end
1019
+
1020
+ context "when the rebalance protocol is cooperative" do
1021
+ let(:consumer) do
1022
+ config = rdkafka_consumer_config(
1023
+ {
1024
+ :"partition.assignment.strategy" => "cooperative-sticky",
1025
+ :"debug" => "consumer",
1026
+ }
1027
+ )
1028
+ config.consumer_rebalance_listener = listener
1029
+ config.consumer
1030
+ end
1031
+
1032
+ let(:listener) do
1033
+ Struct.new(:queue) do
1034
+ def on_partitions_assigned(consumer, list)
1035
+ collect(:assign, list)
1036
+ end
1037
+
1038
+ def on_partitions_revoked(consumer, list)
1039
+ collect(:revoke, list)
1040
+ end
1041
+
1042
+ def collect(name, list)
1043
+ partitions = list.to_h.map { |key, values| [key, values.map(&:partition)] }.flatten
1044
+ queue << ([name] + partitions)
1045
+ end
1046
+ end.new([])
1047
+ end
1048
+
1049
+ it "should be able to assign and unassign partitions using the cooperative partition assignment APIs" do
1050
+ notify_listener(listener) do
1051
+ handles = []
1052
+ 10.times do
1053
+ handles << producer.produce(
1054
+ topic: "consume_test_topic",
1055
+ payload: "payload 1",
1056
+ key: "key 1",
1057
+ partition: 0
1058
+ )
1059
+ end
1060
+ handles.each(&:wait)
1061
+
1062
+ consumer.subscribe("consume_test_topic")
1063
+ # Check the first 10 messages. Then close the consumer, which
1064
+ # should break the each loop.
1065
+ consumer.each_with_index do |message, i|
1066
+ expect(message).to be_a Rdkafka::Consumer::Message
1067
+ break if i == 10
1068
+ end
1069
+ end
1070
+
1071
+ expect(listener.queue).to eq([
1072
+ [:assign, "consume_test_topic", 0, 1, 2],
1073
+ [:revoke, "consume_test_topic", 0, 1, 2]
1074
+ ])
1075
+ end
1076
+ end
1018
1077
  end
@@ -9,7 +9,8 @@ describe Rdkafka::Producer do
9
9
 
10
10
  after do
11
11
  # Registry should always end up being empty
12
- expect(Rdkafka::Producer::DeliveryHandle::REGISTRY).to eq({})
12
+ registry = Rdkafka::Producer::DeliveryHandle::REGISTRY
13
+ expect(registry).to be_empty, registry.inspect
13
14
  producer.close
14
15
  consumer.close
15
16
  end
@@ -187,7 +188,7 @@ describe Rdkafka::Producer do
187
188
  # Close producer
188
189
  producer.close
189
190
 
190
- # Consume message and verify it's content
191
+ # Consume message and verify its content
191
192
  message = wait_for_message(
192
193
  topic: "produce_test_topic",
193
194
  delivery_report: report,
@@ -211,7 +212,7 @@ describe Rdkafka::Producer do
211
212
  )
212
213
  report = handle.wait(max_wait_timeout: 5)
213
214
 
214
- # Consume message and verify it's content
215
+ # Consume message and verify its content
215
216
  message = wait_for_message(
216
217
  topic: "produce_test_topic",
217
218
  delivery_report: report,
@@ -285,7 +286,7 @@ describe Rdkafka::Producer do
285
286
  )
286
287
  report = handle.wait(max_wait_timeout: 5)
287
288
 
288
- # Consume message and verify it's content
289
+ # Consume message and verify its content
289
290
  message = wait_for_message(
290
291
  topic: "produce_test_topic",
291
292
  delivery_report: report,
@@ -318,7 +319,7 @@ describe Rdkafka::Producer do
318
319
  )
319
320
  report = handle.wait(max_wait_timeout: 5)
320
321
 
321
- # Consume message and verify it's content
322
+ # Consume message and verify its content
322
323
  message = wait_for_message(
323
324
  topic: "produce_test_topic",
324
325
  delivery_report: report,
@@ -339,7 +340,7 @@ describe Rdkafka::Producer do
339
340
  )
340
341
  report = handle.wait(max_wait_timeout: 5)
341
342
 
342
- # Consume message and verify it's content
343
+ # Consume message and verify its content
343
344
  message = wait_for_message(
344
345
  topic: "produce_test_topic",
345
346
  delivery_report: report,
@@ -359,7 +360,7 @@ describe Rdkafka::Producer do
359
360
  )
360
361
  report = handle.wait(max_wait_timeout: 5)
361
362
 
362
- # Consume message and verify it's content
363
+ # Consume message and verify its content
363
364
  message = wait_for_message(
364
365
  topic: "produce_test_topic",
365
366
  delivery_report: report,
@@ -377,7 +378,7 @@ describe Rdkafka::Producer do
377
378
  )
378
379
  report = handle.wait(max_wait_timeout: 5)
379
380
 
380
- # Consume message and verify it's content
381
+ # Consume message and verify its content
381
382
  message = wait_for_message(
382
383
  topic: "produce_test_topic",
383
384
  delivery_report: report,
@@ -397,7 +398,7 @@ describe Rdkafka::Producer do
397
398
  )
398
399
  report = handle.wait(max_wait_timeout: 5)
399
400
 
400
- # Consume message and verify it's content
401
+ # Consume message and verify its content
401
402
  message = wait_for_message(
402
403
  topic: "produce_test_topic",
403
404
  delivery_report: report,
@@ -406,9 +407,9 @@ describe Rdkafka::Producer do
406
407
 
407
408
  expect(message.payload).to eq "payload headers"
408
409
  expect(message.key).to eq "key headers"
409
- expect(message.headers[:foo]).to eq "bar"
410
- expect(message.headers[:baz]).to eq "foobar"
411
- expect(message.headers[:foobar]).to be_nil
410
+ expect(message.headers["foo"]).to eq "bar"
411
+ expect(message.headers["baz"]).to eq "foobar"
412
+ expect(message.headers["foobar"]).to be_nil
412
413
  end
413
414
 
414
415
  it "should produce a message with empty headers" do
@@ -420,7 +421,7 @@ describe Rdkafka::Producer do
420
421
  )
421
422
  report = handle.wait(max_wait_timeout: 5)
422
423
 
423
- # Consume message and verify it's content
424
+ # Consume message and verify its content
424
425
  message = wait_for_message(
425
426
  topic: "produce_test_topic",
426
427
  delivery_report: report,
@@ -493,7 +494,7 @@ describe Rdkafka::Producer do
493
494
 
494
495
  reader.close
495
496
 
496
- # Consume message and verify it's content
497
+ # Consume message and verify its content
497
498
  message = wait_for_message(
498
499
  topic: "produce_test_topic",
499
500
  delivery_report: report,
data/spec/spec_helper.rb CHANGED
@@ -106,6 +106,20 @@ def wait_for_unassignment(consumer)
106
106
  end
107
107
  end
108
108
 
109
+ def notify_listener(listener, &block)
110
+ # 1. subscribe and poll
111
+ consumer.subscribe("consume_test_topic")
112
+ wait_for_assignment(consumer)
113
+ consumer.poll(100)
114
+
115
+ block.call if block
116
+
117
+ # 2. unsubscribe
118
+ consumer.unsubscribe
119
+ wait_for_unassignment(consumer)
120
+ consumer.close
121
+ end
122
+
109
123
  RSpec.configure do |config|
110
124
  config.filter_run focus: true
111
125
  config.run_all_when_everything_filtered = true
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.0.beta.1
4
+ version: 0.13.0.beta.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-11 00:00:00.000000000 Z
11
+ date: 2022-10-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi
@@ -191,6 +191,7 @@ files:
191
191
  - spec/rdkafka/bindings_spec.rb
192
192
  - spec/rdkafka/callbacks_spec.rb
193
193
  - spec/rdkafka/config_spec.rb
194
+ - spec/rdkafka/consumer/headers_spec.rb
194
195
  - spec/rdkafka/consumer/message_spec.rb
195
196
  - spec/rdkafka/consumer/partition_spec.rb
196
197
  - spec/rdkafka/consumer/topic_partition_list_spec.rb
@@ -221,7 +222,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
221
222
  - !ruby/object:Gem::Version
222
223
  version: 1.3.1
223
224
  requirements: []
224
- rubygems_version: 3.3.7
225
+ rubygems_version: 3.3.13
225
226
  signing_key:
226
227
  specification_version: 4
227
228
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
@@ -237,6 +238,7 @@ test_files:
237
238
  - spec/rdkafka/bindings_spec.rb
238
239
  - spec/rdkafka/callbacks_spec.rb
239
240
  - spec/rdkafka/config_spec.rb
241
+ - spec/rdkafka/consumer/headers_spec.rb
240
242
  - spec/rdkafka/consumer/message_spec.rb
241
243
  - spec/rdkafka/consumer/partition_spec.rb
242
244
  - spec/rdkafka/consumer/topic_partition_list_spec.rb