karafka-rdkafka 0.14.5 → 0.14.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5ed56a133dd27e17055cf63b7e68945d9cb97b42690afa6c7eb230f56273754c
4
- data.tar.gz: bcb63a1c2c77d599ab403bfdca1df0badd28f1c1cc72eed16169e6d4044fe778
3
+ metadata.gz: c7376c9030672ba0205c3973317a626b7ececd3b100c312e5ef310d08de397cc
4
+ data.tar.gz: b843498f05cdadfd034c9c20338fd698b37df792c0bd54919c19632ae6ac8d29
5
5
  SHA512:
6
- metadata.gz: '07185f868e86c3aaa917f8be42a14047cfb9dd9901001351f416f14338bf1dcc942b4d4649749832efed5c10cb3c7e9ea3543706fd7e11bb79fc332206569898'
7
- data.tar.gz: c7bafd507d2b86c9ae61daab5ed32b142ec6d9e7dbd6d4cf26a704d432b257c00e84fb8872daab978c685941ab71e3201d7fe05635fa0fc8526eac016faf48d9
6
+ metadata.gz: ca8944dae92ea927a09748524a04df8079e9a1bef1c84034cdf2d5c372ca122e652a1b728c5a497ac26cefa3ae6b2eeb52da3df988332f06ef1a153ce8773028
7
+ data.tar.gz: 5772911202fae3ccd51d54300a0dd74159188d17395e4ee6895132abcc30a916a9adcf9643f1626dc9884f8a17c88e6d3d546bb0191218a4e263a4cbb37b3009
checksums.yaml.gz.sig CHANGED
Binary file
@@ -22,7 +22,7 @@ jobs:
22
22
  fail-fast: false
23
23
  matrix:
24
24
  ruby:
25
- - '3.3.0-rc1'
25
+ - '3.3'
26
26
  - '3.2'
27
27
  - '3.1'
28
28
  - '3.1.0'
@@ -31,7 +31,7 @@ jobs:
31
31
  - '2.7'
32
32
  - '2.7.0'
33
33
  include:
34
- - ruby: '3.2'
34
+ - ruby: '3.3'
35
35
  coverage: 'true'
36
36
  steps:
37
37
  - uses: actions/checkout@v4
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.2.2
1
+ 3.3.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.14.6 (2023-12-29)
4
+ - **[Feature]** Support storing metadata alongside offsets via `rd_kafka_offsets_store` in `#store_offset` (mensfeld)
5
+ - [Enhancement] Increase the `#committed` default timeout from 1_200ms to 2000ms. This will compensate for network glitches and remote clusters operations and will align with metadata query timeout.
6
+
3
7
  ## 0.14.5 (2023-12-20)
4
8
  - [Enhancement] Provide `label` producer handler and report reference for improved traceability.
5
9
 
data/docker-compose.yml CHANGED
@@ -3,7 +3,7 @@ version: '2'
3
3
  services:
4
4
  kafka:
5
5
  container_name: kafka
6
- image: confluentinc/cp-kafka:7.5.2
6
+ image: confluentinc/cp-kafka:7.5.3
7
7
 
8
8
  ports:
9
9
  - 9092:9092
@@ -92,7 +92,7 @@ module Rdkafka
92
92
  end
93
93
 
94
94
  attach_function :rd_kafka_topic_partition_list_new, [:int32], :pointer
95
- attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :void
95
+ attach_function :rd_kafka_topic_partition_list_add, [:pointer, :string, :int32], :pointer
96
96
  attach_function :rd_kafka_topic_partition_list_set_offset, [:pointer, :string, :int32, :int64], :void
97
97
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
98
98
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
@@ -16,11 +16,16 @@ module Rdkafka
16
16
  # @return [Integer]
17
17
  attr_reader :err
18
18
 
19
+ # Partition metadata in the context of a consumer
20
+ # @return [String, nil]
21
+ attr_reader :metadata
22
+
19
23
  # @private
20
- def initialize(partition, offset, err = 0)
24
+ def initialize(partition, offset, err = 0, metadata = nil)
21
25
  @partition = partition
22
26
  @offset = offset
23
27
  @err = err
28
+ @metadata = metadata
24
29
  end
25
30
 
26
31
  # Human readable representation of this partition.
@@ -29,6 +34,7 @@ module Rdkafka
29
34
  message = "<Partition #{partition}"
30
35
  message += " offset=#{offset}" if offset
31
36
  message += " err=#{err}" if err != 0
37
+ message += " metadata=#{metadata}" if metadata != nil
32
38
  message += ">"
33
39
  message
34
40
  end
@@ -66,10 +66,14 @@ module Rdkafka
66
66
  #
67
67
  # @param topic [String] The topic's name
68
68
  # @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
69
+ # @param partitions_with_offsets [Array<Consumer::Partition>] The topic's partitions with offsets
70
+ # and metadata (if any)
69
71
  #
70
72
  # @return [nil]
71
73
  def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
72
- @data[topic.to_s] = partitions_with_offsets.map { |p, o| Partition.new(p, o) }
74
+ @data[topic.to_s] = partitions_with_offsets.map do |p, o|
75
+ p.is_a?(Partition) ? p : Partition.new(p, o)
76
+ end
73
77
  end
74
78
 
75
79
  # Return a `Hash` with the topics as keys and and an array of partition information as the value if present.
@@ -114,7 +118,13 @@ module Rdkafka
114
118
  else
115
119
  elem[:offset]
116
120
  end
117
- partition = Partition.new(elem[:partition], offset, elem[:err])
121
+
122
+ partition = Partition.new(
123
+ elem[:partition],
124
+ offset,
125
+ elem[:err],
126
+ elem[:metadata].null? ? nil : elem[:metadata].read_string(elem[:metadata_size])
127
+ )
118
128
  partitions.push(partition)
119
129
  data[elem[:topic]] = partitions
120
130
  end
@@ -136,12 +146,22 @@ module Rdkafka
136
146
  @data.each do |topic, partitions|
137
147
  if partitions
138
148
  partitions.each do |p|
139
- Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
149
+ ref = Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
140
150
  tpl,
141
151
  topic,
142
152
  p.partition
143
153
  )
144
154
 
155
+ if p.metadata
156
+ part = Rdkafka::Bindings::TopicPartition.new(ref)
157
+ str_ptr = FFI::MemoryPointer.from_string(p.metadata)
158
+ # released here:
159
+ # https://github.com/confluentinc/librdkafka/blob/e03d3bb91ed92a38f38d9806b8d8deffe78a1de5/src/rdkafka_partition.c#L2682C18-L2682C18
160
+ str_ptr.autorelease = false
161
+ part[:metadata] = str_ptr
162
+ part[:metadata_size] = p.metadata.bytesize
163
+ end
164
+
145
165
  if p.offset
146
166
  offset = p.offset.is_a?(Time) ? p.offset.to_f * 1_000 : p.offset
147
167
 
@@ -243,7 +243,7 @@ module Rdkafka
243
243
  # @param timeout_ms [Integer] The timeout for fetching this information.
244
244
  # @return [TopicPartitionList]
245
245
  # @raise [RdkafkaError] When getting the committed positions fails.
246
- def committed(list=nil, timeout_ms=1200)
246
+ def committed(list=nil, timeout_ms=2_000)
247
247
  closed_consumer_check(__method__)
248
248
 
249
249
  if list.nil?
@@ -383,16 +383,34 @@ module Rdkafka
383
383
  # When using this `enable.auto.offset.store` should be set to `false` in the config.
384
384
  #
385
385
  # @param message [Rdkafka::Consumer::Message] The message which offset will be stored
386
+ # @param metadata [String, nil] commit metadata string or nil if none
386
387
  # @return [nil]
387
388
  # @raise [RdkafkaError] When storing the offset fails
388
- def store_offset(message)
389
+ def store_offset(message, metadata = nil)
389
390
  closed_consumer_check(__method__)
390
391
 
391
392
  list = TopicPartitionList.new
392
- list.add_topic_and_partitions_with_offsets(
393
- message.topic,
394
- message.partition => message.offset + 1
395
- )
393
+
394
+ # For metadata aware commits we build the partition reference directly to save on
395
+ # objects allocations
396
+ if metadata
397
+ list.add_topic_and_partitions_with_offsets(
398
+ message.topic,
399
+ [
400
+ Consumer::Partition.new(
401
+ message.partition,
402
+ message.offset + 1,
403
+ 0,
404
+ metadata
405
+ )
406
+ ]
407
+ )
408
+ else
409
+ list.add_topic_and_partitions_with_offsets(
410
+ message.topic,
411
+ message.partition => message.offset + 1
412
+ )
413
+ end
396
414
 
397
415
  tpl = list.to_native_tpl
398
416
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.5"
4
+ VERSION = "0.14.6"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
@@ -480,12 +480,18 @@ describe Rdkafka::Consumer do
480
480
 
481
481
  describe "#store_offset" do
482
482
  let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': false).consumer }
483
+ let(:metadata) { SecureRandom.uuid }
484
+ let(:group_id) { SecureRandom.uuid }
485
+ let(:base_config) do
486
+ {
487
+ 'group.id': group_id,
488
+ 'enable.auto.offset.store': false,
489
+ 'enable.auto.commit': false
490
+ }
491
+ end
483
492
 
484
493
  before do
485
- config = {}
486
- config[:'enable.auto.offset.store'] = false
487
- config[:'enable.auto.commit'] = false
488
- @new_consumer = rdkafka_consumer_config(config).consumer
494
+ @new_consumer = rdkafka_consumer_config(base_config).consumer
489
495
  @new_consumer.subscribe("consume_test_topic")
490
496
  wait_for_assignment(@new_consumer)
491
497
  end
@@ -508,6 +514,19 @@ describe Rdkafka::Consumer do
508
514
  expect(partitions[message.partition].offset).to eq(message.offset + 1)
509
515
  end
510
516
 
517
+ it "should store the offset for a message with metadata" do
518
+ @new_consumer.store_offset(message, metadata)
519
+ @new_consumer.commit
520
+ @new_consumer.close
521
+
522
+ meta_consumer = rdkafka_consumer_config(base_config).consumer
523
+ meta_consumer.subscribe("consume_test_topic")
524
+ wait_for_assignment(meta_consumer)
525
+ meta_consumer.poll(1_000)
526
+ expect(meta_consumer.committed.to_h[message.topic][message.partition].metadata).to eq(metadata)
527
+ meta_consumer.close
528
+ end
529
+
511
530
  it "should raise an error with invalid input" do
512
531
  allow(message).to receive(:partition).and_return(9999)
513
532
  expect {
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.5
4
+ version: 0.14.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-12-20 00:00:00.000000000 Z
38
+ date: 2023-12-29 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -277,7 +277,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
277
277
  - !ruby/object:Gem::Version
278
278
  version: '0'
279
279
  requirements: []
280
- rubygems_version: 3.4.19
280
+ rubygems_version: 3.5.3
281
281
  signing_key:
282
282
  specification_version: 4
283
283
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
metadata.gz.sig CHANGED
Binary file