karafka-rdkafka 0.19.0 → 0.19.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5a1e9fa0ca2b5dd14aed77c653fc4d154bb566113cac11c04d51cccc4e1d9fb7
4
- data.tar.gz: c513b0b82bdae4d9a16251a0abcd4b73a79ba21d2833814bccddcd8f6446151a
3
+ metadata.gz: 390b5be16a78ebe2b6f994429cfe32e51bc11d88735ef28bfa57b9b8ba34d73e
4
+ data.tar.gz: 66003597faaddea33ab464aa3ec9dd906f3676f85e12e1e7c7bc2379e35feef4
5
5
  SHA512:
6
- metadata.gz: 53bee0b1c513f6947ca657ca3836df05e6de31ba441aa6d85d71f523c28cad7b996ec14fae798b50bbaafb09eb00367bbc4298da5a926e3fae61cc94cb5179bb
7
- data.tar.gz: 51e903bb75f34fa7f49a8ebd6cdff193b2ee916fd0f4823145aae6ee219bf45e51ce538f3c9c3e4de9ba58c659fa4ecaf494768df1b83f998c9d2509cda58074
6
+ metadata.gz: b7856bee34f2d3d4abe28013d3a8905d4166067f5ce4a1cddc4dc6572d0ebfb03f412a3d4a53ac515a3523cbc8088774699da8fa2fad8d88272db4d08e2ec81e
7
+ data.tar.gz: 0d1fbbfce6be2bb41cfc7db66f53030e907f4c9c7f8e2251aabdeb7ca9786d120e22e971eebf426dbdd57dcb60c198506204cb2b4443333744c65339d9956746
checksums.yaml.gz.sig CHANGED
Binary file
@@ -0,0 +1,3 @@
1
+ /.github @mensfeld
2
+ /.github/workflows/ @mensfeld
3
+ /.github/actions/ @mensfeld
@@ -6,9 +6,14 @@ concurrency:
6
6
 
7
7
  on:
8
8
  pull_request:
9
+ branches: [ main, master ]
9
10
  push:
11
+ branches: [ main, master ]
10
12
  schedule:
11
- - cron: '0 1 * * *'
13
+ - cron: '0 1 * * *'
14
+
15
+ permissions:
16
+ contents: read
12
17
 
13
18
  env:
14
19
  BUNDLE_RETRY: 6
@@ -30,16 +35,19 @@ jobs:
30
35
  - ruby: '3.4'
31
36
  coverage: 'true'
32
37
  steps:
33
- - uses: actions/checkout@v4
38
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
39
+ with:
40
+ fetch-depth: 0
41
+
34
42
  - name: Install package dependencies
35
43
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
36
44
 
37
- - name: Start Kafka with docker compose
45
+ - name: Start Kafka with Docker Compose
38
46
  run: |
39
47
  docker compose up -d || (sleep 5 && docker compose up -d)
40
48
 
41
49
  - name: Set up Ruby
42
- uses: ruby/setup-ruby@v1
50
+ uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0
43
51
  with:
44
52
  ruby-version: ${{matrix.ruby}}
45
53
  bundler-cache: true
@@ -47,15 +55,13 @@ jobs:
47
55
  - name: Run all specs
48
56
  env:
49
57
  GITHUB_COVERAGE: ${{matrix.coverage}}
50
-
51
58
  run: |
52
59
  set -e
53
- bundle install --path vendor/bundle
60
+ bundle install --jobs 4 --retry 3
54
61
  cd ext && bundle exec rake
55
62
  cd ..
56
63
  bundle exec rspec
57
64
 
58
-
59
65
  macos_build:
60
66
  timeout-minutes: 30
61
67
  runs-on: macos-latest
@@ -68,10 +74,10 @@ jobs:
68
74
  - '3.2'
69
75
  - '3.1'
70
76
  steps:
71
- - uses: actions/checkout@v4
77
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
72
78
 
73
79
  - name: Set up Ruby
74
- uses: ruby/setup-ruby@v1
80
+ uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0
75
81
  with:
76
82
  ruby-version: ${{matrix.ruby}}
77
83
  bundler-cache: false
@@ -79,5 +85,5 @@ jobs:
79
85
  - name: Build rdkafka-ruby
80
86
  run: |
81
87
  set -e
82
- bundle install --path vendor/bundle
88
+ bundle install --jobs 4 --retry 3
83
89
  cd ext && bundle exec rake
@@ -0,0 +1,16 @@
1
+ name: Verify Action Pins
2
+ on:
3
+ pull_request:
4
+ paths:
5
+ - '.github/workflows/**'
6
+ jobs:
7
+ verify:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
11
+ - name: Check SHA pins
12
+ run: |
13
+ if grep -E -r "uses: .*/.*@(v[0-9]+|main|master)($|[[:space:]]|$)" --include="*.yml" --include="*.yaml" .github/workflows/ | grep -v "#"; then
14
+ echo "::error::Actions should use SHA pins, not tags or branch names"
15
+ exit 1
16
+ fi
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.4.1
1
+ 3.4.2
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.19.1 (2025-04-07)
4
+ - [Enhancement] Support producing and consuming of headers with mulitple values (KIP-82).
5
+ - [Enhancement] Allow native Kafka customization poll time.
6
+
3
7
  ## 0.19.0 (2025-01-20)
4
8
  - **[Breaking]** Deprecate and remove `#each_batch` due to data consistency concerns.
5
9
  - [Enhancement] Bump librdkafka to 2.8.0
data/README.md CHANGED
@@ -163,6 +163,7 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
+ | 0.19.1 (2025-04-07) | 2.8.0 (2025-01-07) | yes |
166
167
  | 0.19.0 (2025-01-20) | 2.8.0 (2025-01-07) | yes |
167
168
  | 0.18.0 (2024-11-26) | 2.6.1 (2024-11-18) | yes |
168
169
  | 0.17.4 (2024-09-02) | 2.5.3 (2024-09-02) | yes |
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:7.8.0
4
+ image: confluentinc/cp-kafka:7.9.0
5
5
 
6
6
  ports:
7
7
  - 9092:9092
@@ -233,11 +233,12 @@ module Rdkafka
233
233
  #
234
234
  # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
235
235
  # automatically. Defaults to true. Set to false only when doing complex initialization.
236
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
236
237
  # @return [Producer] The created producer
237
238
  #
238
239
  # @raise [ConfigError] When the configuration contains invalid options
239
240
  # @raise [ClientCreationError] When the native client cannot be created
240
- def producer(native_kafka_auto_start: true)
241
+ def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
241
242
  # Create opaque
242
243
  opaque = Opaque.new
243
244
  # Create Kafka config
@@ -254,7 +255,8 @@ module Rdkafka
254
255
  kafka,
255
256
  run_polling_thread: true,
256
257
  opaque: opaque,
257
- auto_start: native_kafka_auto_start
258
+ auto_start: native_kafka_auto_start,
259
+ timeout_ms: native_kafka_poll_timeout_ms
258
260
  ),
259
261
  partitioner_name
260
262
  ).tap do |producer|
@@ -266,11 +268,12 @@ module Rdkafka
266
268
  #
267
269
  # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
268
270
  # automatically. Defaults to true. Set to false only when doing complex initialization.
271
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
269
272
  # @return [Admin] The created admin instance
270
273
  #
271
274
  # @raise [ConfigError] When the configuration contains invalid options
272
275
  # @raise [ClientCreationError] When the native client cannot be created
273
- def admin(native_kafka_auto_start: true)
276
+ def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
274
277
  opaque = Opaque.new
275
278
  config = native_config(opaque)
276
279
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
@@ -282,7 +285,8 @@ module Rdkafka
282
285
  kafka,
283
286
  run_polling_thread: true,
284
287
  opaque: opaque,
285
- auto_start: native_kafka_auto_start
288
+ auto_start: native_kafka_auto_start,
289
+ timeout_ms: native_kafka_poll_timeout_ms
286
290
  )
287
291
  )
288
292
  end
@@ -7,11 +7,13 @@ module Rdkafka
7
7
  EMPTY_HEADERS = {}.freeze
8
8
 
9
9
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
10
+ # where each key maps to either a String (single value) or Array<String> (multiple values)
11
+ # to support duplicate headers per KIP-82
10
12
  #
11
13
  # @private
12
14
  #
13
15
  # @param [Rdkafka::Bindings::Message] native_message
14
- # @return [Hash<String, String>] headers Hash for the native_message
16
+ # @return [Hash<String, String|Array<String>>] headers Hash for the native_message
15
17
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
16
18
  def self.from_native(native_message)
17
19
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
@@ -53,10 +55,19 @@ module Rdkafka
53
55
  size = size_ptr[:value]
54
56
 
55
57
  value_ptr = value_ptrptr.read_pointer
56
-
57
58
  value = value_ptr.read_string(size)
58
59
 
59
- headers[name] = value
60
+ if headers.key?(name)
61
+ # If we've seen this header before, convert to array if needed and append
62
+ if headers[name].is_a?(Array)
63
+ headers[name] << value
64
+ else
65
+ headers[name] = [headers[name], value]
66
+ end
67
+ else
68
+ # First occurrence - store as single value
69
+ headers[name] = value
70
+ end
60
71
 
61
72
  idx += 1
62
73
  end
@@ -4,7 +4,7 @@ module Rdkafka
4
4
  # @private
5
5
  # A wrapper around a native kafka that polls and cleanly exits
6
6
  class NativeKafka
7
- def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
7
+ def initialize(inner, run_polling_thread:, opaque:, auto_start: true, timeout_ms: 100)
8
8
  @inner = inner
9
9
  @opaque = opaque
10
10
  # Lock around external access
@@ -30,6 +30,8 @@ module Rdkafka
30
30
 
31
31
  @run_polling_thread = run_polling_thread
32
32
 
33
+ @timeout_ms = timeout_ms
34
+
33
35
  start if auto_start
34
36
 
35
37
  @closing = false
@@ -50,7 +52,7 @@ module Rdkafka
50
52
  @polling_thread = Thread.new do
51
53
  loop do
52
54
  @poll_mutex.synchronize do
53
- Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
55
+ Rdkafka::Bindings.rd_kafka_poll(@inner, @timeout_ms)
54
56
  end
55
57
 
56
58
  # Exit thread if closing and the poll queue is empty
@@ -309,7 +309,7 @@ module Rdkafka
309
309
  # @param partition [Integer,nil] Optional partition to produce to
310
310
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
311
311
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
312
- # @param headers [Hash<String,String>] Optional message headers
312
+ # @param headers [Hash<String,String|Array<String>>] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
313
313
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
314
314
  # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
315
315
  #
@@ -401,11 +401,23 @@ module Rdkafka
401
401
  if headers
402
402
  headers.each do |key0, value0|
403
403
  key = key0.to_s
404
- value = value0.to_s
405
- args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
406
- args << :string << key
407
- args << :pointer << value
408
- args << :size_t << value.bytesize
404
+ if value0.is_a?(Array)
405
+ # Handle array of values per KIP-82
406
+ value0.each do |value|
407
+ value = value.to_s
408
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
409
+ args << :string << key
410
+ args << :pointer << value
411
+ args << :size_t << value.bytesize
412
+ end
413
+ else
414
+ # Handle single value
415
+ value = value0.to_s
416
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
417
+ args << :string << key
418
+ args << :pointer << value
419
+ args << :size_t << value.bytesize
420
+ end
409
421
  end
410
422
  end
411
423
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.19.0"
4
+ VERSION = "0.19.1"
5
5
  LIBRDKAFKA_VERSION = "2.8.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "5bd1c46f63265f31c6bfcedcde78703f77d28238eadf23821c2b43fc30be3e25"
7
7
  end
data/renovate.json CHANGED
@@ -1,6 +1,18 @@
1
1
  {
2
2
  "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
3
  "extends": [
4
- "config:base"
4
+ "config:recommended"
5
+ ],
6
+ "github-actions": {
7
+ "enabled": true,
8
+ "pinDigests": true
9
+ },
10
+ "packageRules": [
11
+ {
12
+ "matchManagers": [
13
+ "github-actions"
14
+ ],
15
+ "minimumReleaseAge": "7 days"
16
+ }
5
17
  ]
6
18
  }
@@ -3,7 +3,7 @@
3
3
  describe Rdkafka::Consumer::Headers do
4
4
  let(:headers) do
5
5
  { # Note String keys!
6
- "version" => "2.1.3",
6
+ "version" => ["2.1.3", "2.1.4"],
7
7
  "type" => "String"
8
8
  }
9
9
  end
@@ -17,27 +17,39 @@ describe Rdkafka::Consumer::Headers do
17
17
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
18
18
  end
19
19
 
20
+ # First version header
20
21
  expect(Rdkafka::Bindings).to \
21
22
  receive(:rd_kafka_header_get_all)
22
23
  .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
23
- expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
24
- expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
25
- expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
24
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: "version"))
25
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][0].size)
26
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers["version"][0]))
26
27
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
27
28
  end
28
29
 
30
+ # Second version header
29
31
  expect(Rdkafka::Bindings).to \
30
32
  receive(:rd_kafka_header_get_all)
31
33
  .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
32
- expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
33
- expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
34
- expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: "version"))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers["version"][1]))
35
37
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
36
38
  end
37
39
 
40
+ # Single type header
38
41
  expect(Rdkafka::Bindings).to \
39
42
  receive(:rd_kafka_header_get_all)
40
- .with(headers_ptr, 2, anything, anything, anything)
43
+ .with(headers_ptr, 2, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
44
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 2", read_string_to_null: "type"))
45
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["type"].size)
46
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 2", read_string: headers["type"]))
47
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
48
+ end
49
+
50
+ expect(Rdkafka::Bindings).to \
51
+ receive(:rd_kafka_header_get_all)
52
+ .with(headers_ptr, 3, anything, anything, anything)
41
53
  .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
42
54
  end
43
55
 
@@ -46,8 +58,12 @@ describe Rdkafka::Consumer::Headers do
46
58
  it { is_expected.to eq(headers) }
47
59
  it { is_expected.to be_frozen }
48
60
 
49
- it 'allows String key' do
50
- expect(subject['version']).to eq("2.1.3")
61
+ it 'returns array for duplicate headers' do
62
+ expect(subject['version']).to eq(["2.1.3", "2.1.4"])
63
+ end
64
+
65
+ it 'returns string for single headers' do
66
+ expect(subject['type']).to eq("String")
51
67
  end
52
68
 
53
69
  it 'does not support symbols mappings' do
@@ -1002,4 +1002,44 @@ describe Rdkafka::Producer do
1002
1002
  end
1003
1003
  end
1004
1004
  end
1005
+
1006
+ describe "#produce with headers" do
1007
+ it "should produce a message with array headers" do
1008
+ headers = {
1009
+ "version" => ["2.1.3", "2.1.4"],
1010
+ "type" => "String"
1011
+ }
1012
+
1013
+ report = producer.produce(
1014
+ topic: "consume_test_topic",
1015
+ key: "key headers",
1016
+ headers: headers
1017
+ ).wait
1018
+
1019
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
1020
+ expect(message).to be
1021
+ expect(message.key).to eq('key headers')
1022
+ expect(message.headers['type']).to eq('String')
1023
+ expect(message.headers['version']).to eq(["2.1.3", "2.1.4"])
1024
+ end
1025
+
1026
+ it "should produce a message with single value headers" do
1027
+ headers = {
1028
+ "version" => "2.1.3",
1029
+ "type" => "String"
1030
+ }
1031
+
1032
+ report = producer.produce(
1033
+ topic: "consume_test_topic",
1034
+ key: "key headers",
1035
+ headers: headers
1036
+ ).wait
1037
+
1038
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
1039
+ expect(message).to be
1040
+ expect(message.key).to eq('key headers')
1041
+ expect(message.headers['type']).to eq('String')
1042
+ expect(message.headers['version']).to eq('2.1.3')
1043
+ end
1044
+ end
1005
1045
  end
data.tar.gz.sig CHANGED
@@ -1,2 +1,3 @@
1
- <����T��HT9wKo�s<��
2
- ��Y@�J�w�vr���|���%W��6^[G����q�3�D���ڟ0�:d6����#�ă�C*�Ƌ{����Lw�~��Am�;6Ey��$0�D��@����`�-5@� �V�%'��׏󸤆L��'$��s����X|�!b*�[�,�����6�$Zli�Ek��¹g�}M��fi�0:���?����UЖJ��ki>i�$u*�����R&�܏�Q�P"š՚|$�cyJ��U"Ф�]QB�L4��|�������R�vq�ۃW'F�17(��ǻX���@]J�~�F�"��q�Ŝ���OJ�09�D�1Pq��>Q�/��4 �d��O��oa/$/C��'h��Lr�(��\
1
+ ��E�d/�;��u'q�������ˏn)��}-^ �V�l-���^@M��%b���s��{i�l�ý��-��?�L���1�l�s��c��Z$xɪW�Ƚ��l(��D�|[����)�5V̚��k%�X~�Ÿ��%
2
+ ���1}kڃ��29���ؔ�1!T�ޯ[��8�� ���^�Ԥ�gk 3�k P��o�&)�=`]c|u(�$
3
+ !%������,Ȓ �������`��V�m+Y.�бt�M�������z�hc,���
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.19.0
4
+ version: 0.19.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
36
36
  ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
37
37
  -----END CERTIFICATE-----
38
- date: 2025-01-20 00:00:00.000000000 Z
38
+ date: 2025-04-07 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -171,8 +171,10 @@ extensions:
171
171
  - ext/Rakefile
172
172
  extra_rdoc_files: []
173
173
  files:
174
+ - ".github/CODEOWNERS"
174
175
  - ".github/FUNDING.yml"
175
176
  - ".github/workflows/ci.yml"
177
+ - ".github/workflows/verify-action-pins.yml"
176
178
  - ".gitignore"
177
179
  - ".rspec"
178
180
  - ".ruby-gemset"
metadata.gz.sig CHANGED
Binary file