karafka-rdkafka 0.14.4 → 0.14.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d22e33d93f77f138063f03111b25386c08e1667012b6ab4c8449fef61754ad44
4
- data.tar.gz: b8cc0557cbb945e5d7b7e025198a292a7cf601ce9decef97cb5102fd25af742b
3
+ metadata.gz: c7376c9030672ba0205c3973317a626b7ececd3b100c312e5ef310d08de397cc
4
+ data.tar.gz: b843498f05cdadfd034c9c20338fd698b37df792c0bd54919c19632ae6ac8d29
5
5
  SHA512:
6
- metadata.gz: cfc9af0e70030f513c4c34b00f9b94b6b3b9c828add2ac6cd742ebe2eb308a1e1a5489bf4c4280c34a98d2f0118d741608918314fa8f267b7cf98b22b3d8782d
7
- data.tar.gz: 315b552a77691b62e5a30fd5eaec1bf89efd8ec5789f9f0deeadf3997eeca9dc83bb6466ee8365e40e53d2e09707d3d68a57278b3637b170ad027b5080838519
6
+ metadata.gz: ca8944dae92ea927a09748524a04df8079e9a1bef1c84034cdf2d5c372ca122e652a1b728c5a497ac26cefa3ae6b2eeb52da3df988332f06ef1a153ce8773028
7
+ data.tar.gz: 5772911202fae3ccd51d54300a0dd74159188d17395e4ee6895132abcc30a916a9adcf9643f1626dc9884f8a17c88e6d3d546bb0191218a4e263a4cbb37b3009
checksums.yaml.gz.sig CHANGED
Binary file
@@ -22,7 +22,7 @@ jobs:
22
22
  fail-fast: false
23
23
  matrix:
24
24
  ruby:
25
- - '3.3.0-rc1'
25
+ - '3.3'
26
26
  - '3.2'
27
27
  - '3.1'
28
28
  - '3.1.0'
@@ -31,7 +31,7 @@ jobs:
31
31
  - '2.7'
32
32
  - '2.7.0'
33
33
  include:
34
- - ruby: '3.2'
34
+ - ruby: '3.3'
35
35
  coverage: 'true'
36
36
  steps:
37
37
  - uses: actions/checkout@v4
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.2.2
1
+ 3.3.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.14.6 (2023-12-29)
4
+ - **[Feature]** Support storing metadata alongside offsets via `rd_kafka_offsets_store` in `#store_offset` (mensfeld)
5
+ - [Enhancement] Increase the `#committed` default timeout from 1_200ms to 2000ms. This will compensate for network glitches and remote clusters operations and will align with metadata query timeout.
6
+
7
+ ## 0.14.5 (2023-12-20)
8
+ - [Enhancement] Provide `label` producer handler and report reference for improved traceability.
9
+
3
10
  ## 0.14.4 (2023-12-19)
4
11
  - [Enhancement] Add ability to store offsets in a transaction (mensfeld)
5
12
 
data/docker-compose.yml CHANGED
@@ -3,7 +3,7 @@ version: '2'
3
3
  services:
4
4
  kafka:
5
5
  container_name: kafka
6
- image: confluentinc/cp-kafka:7.5.2
6
+ image: confluentinc/cp-kafka:7.5.3
7
7
 
8
8
  ports:
9
9
  - 9092:9092
@@ -92,7 +92,7 @@ module Rdkafka
92
92
  end
93
93
 
94
94
  attach_function :rd_kafka_topic_partition_list_new, [:int32], :pointer
95
- attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :void
95
+ attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :pointer
96
96
  attach_function :rd_kafka_topic_partition_list_set_offset, [:pointer, :string, :int32, :int64], :void
97
97
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
98
98
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
@@ -288,7 +288,16 @@ module Rdkafka
288
288
 
289
289
  # Call delivery callback on opaque
290
290
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
291
- opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], topic_name, message[:err]), delivery_handle)
291
+ opaque.call_delivery_callback(
292
+ Rdkafka::Producer::DeliveryReport.new(
293
+ message[:partition],
294
+ message[:offset],
295
+ topic_name,
296
+ message[:err],
297
+ delivery_handle.label
298
+ ),
299
+ delivery_handle
300
+ )
292
301
  end
293
302
  end
294
303
  end
@@ -16,11 +16,16 @@ module Rdkafka
16
16
  # @return [Integer]
17
17
  attr_reader :err
18
18
 
19
+ # Partition metadata in the context of a consumer
20
+ # @return [String, nil]
21
+ attr_reader :metadata
22
+
19
23
  # @private
20
- def initialize(partition, offset, err = 0)
24
+ def initialize(partition, offset, err = 0, metadata = nil)
21
25
  @partition = partition
22
26
  @offset = offset
23
27
  @err = err
28
+ @metadata = metadata
24
29
  end
25
30
 
26
31
  # Human readable representation of this partition.
@@ -29,6 +34,7 @@ module Rdkafka
29
34
  message = "<Partition #{partition}"
30
35
  message += " offset=#{offset}" if offset
31
36
  message += " err=#{err}" if err != 0
37
+ message += " metadata=#{metadata}" if metadata != nil
32
38
  message += ">"
33
39
  message
34
40
  end
@@ -66,10 +66,14 @@ module Rdkafka
66
66
  #
67
67
  # @param topic [String] The topic's name
68
68
  # @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
69
+ # @param partitions_with_offsets [Array<Consumer::Partition>] The topic's partitions with offsets
70
+ # and metadata (if any)
69
71
  #
70
72
  # @return [nil]
71
73
  def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
72
- @data[topic.to_s] = partitions_with_offsets.map { |p, o| Partition.new(p, o) }
74
+ @data[topic.to_s] = partitions_with_offsets.map do |p, o|
75
+ p.is_a?(Partition) ? p : Partition.new(p, o)
76
+ end
73
77
  end
74
78
 
75
79
  # Return a `Hash` with the topics as keys and and an array of partition information as the value if present.
@@ -114,7 +118,13 @@ module Rdkafka
114
118
  else
115
119
  elem[:offset]
116
120
  end
117
- partition = Partition.new(elem[:partition], offset, elem[:err])
121
+
122
+ partition = Partition.new(
123
+ elem[:partition],
124
+ offset,
125
+ elem[:err],
126
+ elem[:metadata].null? ? nil : elem[:metadata].read_string(elem[:metadata_size])
127
+ )
118
128
  partitions.push(partition)
119
129
  data[elem[:topic]] = partitions
120
130
  end
@@ -136,12 +146,22 @@ module Rdkafka
136
146
  @data.each do |topic, partitions|
137
147
  if partitions
138
148
  partitions.each do |p|
139
- Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
149
+ ref = Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
140
150
  tpl,
141
151
  topic,
142
152
  p.partition
143
153
  )
144
154
 
155
+ if p.metadata
156
+ part = Rdkafka::Bindings::TopicPartition.new(ref)
157
+ str_ptr = FFI::MemoryPointer.from_string(p.metadata)
158
+ # released here:
159
+ # https://github.com/confluentinc/librdkafka/blob/e03d3bb91ed92a38f38d9806b8d8deffe78a1de5/src/rdkafka_partition.c#L2682C18-L2682C18
160
+ str_ptr.autorelease = false
161
+ part[:metadata] = str_ptr
162
+ part[:metadata_size] = p.metadata.bytesize
163
+ end
164
+
145
165
  if p.offset
146
166
  offset = p.offset.is_a?(Time) ? p.offset.to_f * 1_000 : p.offset
147
167
 
@@ -243,7 +243,7 @@ module Rdkafka
243
243
  # @param timeout_ms [Integer] The timeout for fetching this information.
244
244
  # @return [TopicPartitionList]
245
245
  # @raise [RdkafkaError] When getting the committed positions fails.
246
- def committed(list=nil, timeout_ms=1200)
246
+ def committed(list=nil, timeout_ms=2_000)
247
247
  closed_consumer_check(__method__)
248
248
 
249
249
  if list.nil?
@@ -383,16 +383,34 @@ module Rdkafka
383
383
  # When using this `enable.auto.offset.store` should be set to `false` in the config.
384
384
  #
385
385
  # @param message [Rdkafka::Consumer::Message] The message which offset will be stored
386
+ # @param metadata [String, nil] commit metadata string or nil if none
386
387
  # @return [nil]
387
388
  # @raise [RdkafkaError] When storing the offset fails
388
- def store_offset(message)
389
+ def store_offset(message, metadata = nil)
389
390
  closed_consumer_check(__method__)
390
391
 
391
392
  list = TopicPartitionList.new
392
- list.add_topic_and_partitions_with_offsets(
393
- message.topic,
394
- message.partition => message.offset + 1
395
- )
393
+
394
+ # For metadata aware commits we build the partition reference directly to save on
395
+ # objects allocations
396
+ if metadata
397
+ list.add_topic_and_partitions_with_offsets(
398
+ message.topic,
399
+ [
400
+ Consumer::Partition.new(
401
+ message.partition,
402
+ message.offset + 1,
403
+ 0,
404
+ metadata
405
+ )
406
+ ]
407
+ )
408
+ else
409
+ list.add_topic_and_partitions_with_offsets(
410
+ message.topic,
411
+ message.partition => message.offset + 1
412
+ )
413
+ end
396
414
 
397
415
  tpl = list.to_native_tpl
398
416
 
@@ -11,6 +11,9 @@ module Rdkafka
11
11
  :offset, :int64,
12
12
  :topic_name, :pointer
13
13
 
14
+ # @return [Object, nil] label set during message production or nil by default
15
+ attr_accessor :label
16
+
14
17
  # @return [String] the name of the operation (e.g. "delivery")
15
18
  def operation_name
16
19
  "delivery"
@@ -22,7 +25,9 @@ module Rdkafka
22
25
  DeliveryReport.new(
23
26
  self[:partition],
24
27
  self[:offset],
25
- self[:topic_name].read_string
28
+ self[:topic_name].read_string,
29
+ nil,
30
+ label
26
31
  )
27
32
  else
28
33
  DeliveryReport.new(
@@ -31,7 +36,8 @@ module Rdkafka
31
36
  # For part of errors, we will not get a topic name reference and in cases like this
32
37
  # we should not return it
33
38
  self[:topic_name].null? ? nil : self[:topic_name].read_string,
34
- Rdkafka::RdkafkaError.build(self[:response])
39
+ Rdkafka::RdkafkaError.build(self[:response]),
40
+ label
35
41
  )
36
42
  end
37
43
  end
@@ -21,6 +21,9 @@ module Rdkafka
21
21
  # @return [Integer]
22
22
  attr_reader :error
23
23
 
24
+ # @return [Object, nil] label set during message production or nil by default
25
+ attr_reader :label
26
+
24
27
  # We alias the `#topic_name` under `#topic` to make this consistent with `Consumer::Message`
25
28
  # where the topic name is under `#topic` method. That way we have a consistent name that
26
29
  # is present in both places
@@ -30,11 +33,12 @@ module Rdkafka
30
33
 
31
34
  private
32
35
 
33
- def initialize(partition, offset, topic_name = nil, error = nil)
36
+ def initialize(partition, offset, topic_name = nil, error = nil, label = nil)
34
37
  @partition = partition
35
38
  @offset = offset
36
39
  @topic_name = topic_name
37
40
  @error = error
41
+ @label = label
38
42
  end
39
43
  end
40
44
  end
@@ -226,11 +226,12 @@ module Rdkafka
226
226
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
227
227
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
228
228
  # @param headers [Hash<String,String>] Optional message headers
229
+ # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
229
230
  #
230
231
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
231
232
  #
232
233
  # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
233
- def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
234
+ def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
234
235
  closed_producer_check(__method__)
235
236
 
236
237
  # Start by checking and converting the input
@@ -272,6 +273,7 @@ module Rdkafka
272
273
  end
273
274
 
274
275
  delivery_handle = DeliveryHandle.new
276
+ delivery_handle.label = label
275
277
  delivery_handle[:pending] = true
276
278
  delivery_handle[:response] = -1
277
279
  delivery_handle[:partition] = -1
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.4"
4
+ VERSION = "0.14.6"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
@@ -480,12 +480,18 @@ describe Rdkafka::Consumer do
480
480
 
481
481
  describe "#store_offset" do
482
482
  let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': false).consumer }
483
+ let(:metadata) { SecureRandom.uuid }
484
+ let(:group_id) { SecureRandom.uuid }
485
+ let(:base_config) do
486
+ {
487
+ 'group.id': group_id,
488
+ 'enable.auto.offset.store': false,
489
+ 'enable.auto.commit': false
490
+ }
491
+ end
483
492
 
484
493
  before do
485
- config = {}
486
- config[:'enable.auto.offset.store'] = false
487
- config[:'enable.auto.commit'] = false
488
- @new_consumer = rdkafka_consumer_config(config).consumer
494
+ @new_consumer = rdkafka_consumer_config(base_config).consumer
489
495
  @new_consumer.subscribe("consume_test_topic")
490
496
  wait_for_assignment(@new_consumer)
491
497
  end
@@ -508,6 +514,19 @@ describe Rdkafka::Consumer do
508
514
  expect(partitions[message.partition].offset).to eq(message.offset + 1)
509
515
  end
510
516
 
517
+ it "should store the offset for a message with metadata" do
518
+ @new_consumer.store_offset(message, metadata)
519
+ @new_consumer.commit
520
+ @new_consumer.close
521
+
522
+ meta_consumer = rdkafka_consumer_config(base_config).consumer
523
+ meta_consumer.subscribe("consume_test_topic")
524
+ wait_for_assignment(meta_consumer)
525
+ meta_consumer.poll(1_000)
526
+ expect(meta_consumer.committed.to_h[message.topic][message.partition].metadata).to eq(metadata)
527
+ meta_consumer.close
528
+ end
529
+
511
530
  it "should raise an error with invalid input" do
512
531
  allow(message).to receive(:partition).and_return(9999)
513
532
  expect {
@@ -34,6 +34,7 @@ describe Rdkafka::Producer do
34
34
 
35
35
  producer.delivery_callback = lambda do |report|
36
36
  expect(report).not_to be_nil
37
+ expect(report.label).to eq "label"
37
38
  expect(report.partition).to eq 1
38
39
  expect(report.offset).to be >= 0
39
40
  expect(report.topic_name).to eq "produce_test_topic"
@@ -44,9 +45,12 @@ describe Rdkafka::Producer do
44
45
  handle = producer.produce(
45
46
  topic: "produce_test_topic",
46
47
  payload: "payload",
47
- key: "key"
48
+ key: "key",
49
+ label: "label"
48
50
  )
49
51
 
52
+ expect(handle.label).to eq "label"
53
+
50
54
  # Wait for it to be delivered
51
55
  handle.wait(max_wait_timeout: 15)
52
56
 
@@ -175,11 +179,13 @@ describe Rdkafka::Producer do
175
179
  handle = producer.produce(
176
180
  topic: "produce_test_topic",
177
181
  payload: "payload",
178
- key: "key"
182
+ key: "key",
183
+ label: "label"
179
184
  )
180
185
 
181
186
  # Should be pending at first
182
187
  expect(handle.pending?).to be true
188
+ expect(handle.label).to eq "label"
183
189
 
184
190
  # Check delivery handle and report
185
191
  report = handle.wait(max_wait_timeout: 5)
@@ -187,6 +193,7 @@ describe Rdkafka::Producer do
187
193
  expect(report).not_to be_nil
188
194
  expect(report.partition).to eq 1
189
195
  expect(report.offset).to be >= 0
196
+ expect(report.label).to eq "label"
190
197
 
191
198
  # Flush and close producer
192
199
  producer.flush
@@ -567,10 +574,11 @@ describe Rdkafka::Producer do
567
574
  end
568
575
 
569
576
  it "should contain the error in the response when not deliverable" do
570
- handler = producer.produce(topic: 'produce_test_topic', payload: nil)
577
+ handler = producer.produce(topic: 'produce_test_topic', payload: nil, label: 'na')
571
578
  # Wait for the async callbacks and delivery registry to update
572
579
  sleep(2)
573
580
  expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
581
+ expect(handler.create_result.label).to eq('na')
574
582
  end
575
583
  end
576
584
 
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.4
4
+ version: 0.14.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-12-19 00:00:00.000000000 Z
38
+ date: 2023-12-29 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -277,7 +277,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
277
277
  - !ruby/object:Gem::Version
278
278
  version: '0'
279
279
  requirements: []
280
- rubygems_version: 3.4.19
280
+ rubygems_version: 3.5.3
281
281
  signing_key:
282
282
  specification_version: 4
283
283
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
metadata.gz.sig CHANGED
Binary file