karafka-rdkafka 0.19.0 → 0.20.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5a1e9fa0ca2b5dd14aed77c653fc4d154bb566113cac11c04d51cccc4e1d9fb7
4
- data.tar.gz: c513b0b82bdae4d9a16251a0abcd4b73a79ba21d2833814bccddcd8f6446151a
3
+ metadata.gz: c5b81585e7271750f4b6c5110df05e25765fd6f86c991ce7f27f3a4714ec94ae
4
+ data.tar.gz: 387f5d228af380f6be3e4df9e1ea0d8842f4e65450f0d2dfc60ca68df9613a41
5
5
  SHA512:
6
- metadata.gz: 53bee0b1c513f6947ca657ca3836df05e6de31ba441aa6d85d71f523c28cad7b996ec14fae798b50bbaafb09eb00367bbc4298da5a926e3fae61cc94cb5179bb
7
- data.tar.gz: 51e903bb75f34fa7f49a8ebd6cdff193b2ee916fd0f4823145aae6ee219bf45e51ce538f3c9c3e4de9ba58c659fa4ecaf494768df1b83f998c9d2509cda58074
6
+ metadata.gz: '0190399ef8742b240a04d4fe55de8263d6b7ff015556ca094227bbf7838ce1d47c7f152e0224bbc9a4ed288651a6a2a4d2ec161b1f8219570f9affdfaa93e1a3'
7
+ data.tar.gz: 4d2ab1b51e28b07e852de063b3ad163e90a5e7191c70b3b0c7d88591b063fbd3b15f2bee452b6cb8de9a6e1032e633a64f013fd0ff10a0ff5f5f58e3ce1b3c18
checksums.yaml.gz.sig CHANGED
Binary file
@@ -0,0 +1,3 @@
1
+ /.github @mensfeld
2
+ /.github/workflows/ @mensfeld
3
+ /.github/actions/ @mensfeld
@@ -6,9 +6,14 @@ concurrency:
6
6
 
7
7
  on:
8
8
  pull_request:
9
+ branches: [ main, master ]
9
10
  push:
11
+ branches: [ main, master ]
10
12
  schedule:
11
- - cron: '0 1 * * *'
13
+ - cron: '0 1 * * *'
14
+
15
+ permissions:
16
+ contents: read
12
17
 
13
18
  env:
14
19
  BUNDLE_RETRY: 6
@@ -26,20 +31,27 @@ jobs:
26
31
  - '3.3'
27
32
  - '3.2'
28
33
  - '3.1'
34
+ - 'jruby-10.0'
29
35
  include:
30
36
  - ruby: '3.4'
31
37
  coverage: 'true'
38
+ - ruby: 'jruby-10.0'
39
+ continue-on-error: true
40
+
32
41
  steps:
33
- - uses: actions/checkout@v4
42
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
43
+ with:
44
+ fetch-depth: 0
45
+
34
46
  - name: Install package dependencies
35
47
  run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
36
48
 
37
- - name: Start Kafka with docker compose
49
+ - name: Start Kafka with Docker Compose
38
50
  run: |
39
51
  docker compose up -d || (sleep 5 && docker compose up -d)
40
52
 
41
53
  - name: Set up Ruby
42
- uses: ruby/setup-ruby@v1
54
+ uses: ruby/setup-ruby@ca041f971d66735f3e5ff1e21cc13e2d51e7e535 # v1.233.0
43
55
  with:
44
56
  ruby-version: ${{matrix.ruby}}
45
57
  bundler-cache: true
@@ -47,15 +59,14 @@ jobs:
47
59
  - name: Run all specs
48
60
  env:
49
61
  GITHUB_COVERAGE: ${{matrix.coverage}}
50
-
62
+ continue-on-error: ${{ matrix.continue-on-error || false }} # Use the matrix value if present
51
63
  run: |
52
64
  set -e
53
- bundle install --path vendor/bundle
65
+ bundle install --jobs 4 --retry 3
54
66
  cd ext && bundle exec rake
55
67
  cd ..
56
68
  bundle exec rspec
57
69
 
58
-
59
70
  macos_build:
60
71
  timeout-minutes: 30
61
72
  runs-on: macos-latest
@@ -67,17 +78,22 @@ jobs:
67
78
  - '3.3'
68
79
  - '3.2'
69
80
  - '3.1'
81
+ - 'jruby-9.4'
82
+ include:
83
+ - ruby: 'jruby-10.0'
84
+ continue-on-error: true
70
85
  steps:
71
- - uses: actions/checkout@v4
86
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
72
87
 
73
88
  - name: Set up Ruby
74
- uses: ruby/setup-ruby@v1
89
+ uses: ruby/setup-ruby@ca041f971d66735f3e5ff1e21cc13e2d51e7e535 # v1.233.0
75
90
  with:
76
91
  ruby-version: ${{matrix.ruby}}
77
92
  bundler-cache: false
78
93
 
79
94
  - name: Build rdkafka-ruby
95
+ continue-on-error: ${{ matrix.continue-on-error || false }}
80
96
  run: |
81
97
  set -e
82
- bundle install --path vendor/bundle
98
+ bundle install --jobs 4 --retry 3
83
99
  cd ext && bundle exec rake
@@ -0,0 +1,16 @@
1
+ name: Verify Action Pins
2
+ on:
3
+ pull_request:
4
+ paths:
5
+ - '.github/workflows/**'
6
+ jobs:
7
+ verify:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
11
+ - name: Check SHA pins
12
+ run: |
13
+ if grep -E -r "uses: .*/.*@(v[0-9]+|main|master)($|[[:space:]]|$)" --include="*.yml" --include="*.yaml" .github/workflows/ | grep -v "#"; then
14
+ echo "::error::Actions should use SHA pins, not tags or branch names"
15
+ exit 1
16
+ fi
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.4.1
1
+ 3.4.3
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Rdkafka Changelog
2
2
 
3
+ ## 0.20.0 (Unreleased)
4
+ - [Enhancement] Bump librdkafka to 2.10.0
5
+ - [Enhancement] Roll out experimental jruby support.
6
+
7
+ ## 0.19.1 (2025-04-07)
8
+ - [Enhancement] Support producing and consuming of headers with mulitple values (KIP-82).
9
+ - [Enhancement] Allow native Kafka customization poll time.
10
+
3
11
  ## 0.19.0 (2025-01-20)
4
12
  - **[Breaking]** Deprecate and remove `#each_batch` due to data consistency concerns.
5
13
  - [Enhancement] Bump librdkafka to 2.8.0
data/README.md CHANGED
@@ -163,14 +163,16 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka | patches |
165
165
  |-|-|-|
166
- | 0.19.0 (2025-01-20) | 2.8.0 (2025-01-07) | yes |
167
- | 0.18.0 (2024-11-26) | 2.6.1 (2024-11-18) | yes |
168
- | 0.17.4 (2024-09-02) | 2.5.3 (2024-09-02) | yes |
169
- | 0.17.0 (2024-08-01) | 2.5.0 (2024-07-10) | yes |
170
- | 0.16.0 (2024-06-13) | 2.4.0 (2024-05-07) | no |
171
- | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) | no |
172
- | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) | no |
173
- | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) | no |
174
- | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) | no |
175
- | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) | no |
176
- | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) | no |
166
+ | 0.20.0 (Unreleased) | 2.10.0 (2025-04-18) | yes |
167
+ | 0.19.1 (2025-04-07) | 2.8.0 (2025-01-07) | yes |
168
+ | 0.19.0 (2025-01-20) | 2.8.0 (2025-01-07) | yes |
169
+ | 0.18.0 (2024-11-26) | 2.6.1 (2024-11-18) | yes |
170
+ | 0.17.4 (2024-09-02) | 2.5.3 (2024-09-02) | yes |
171
+ | 0.17.0 (2024-08-01) | 2.5.0 (2024-07-10) | yes |
172
+ | 0.16.0 (2024-06-13) | 2.4.0 (2024-05-07) | no |
173
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) | no |
174
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) | no |
175
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) | no |
176
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) | no |
177
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) | no |
178
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) | no |
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:7.8.0
4
+ image: confluentinc/cp-kafka:7.9.0
5
5
 
6
6
  ports:
7
7
  - 9092:9092
@@ -233,11 +233,12 @@ module Rdkafka
233
233
  #
234
234
  # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
235
235
  # automatically. Defaults to true. Set to false only when doing complex initialization.
236
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
236
237
  # @return [Producer] The created producer
237
238
  #
238
239
  # @raise [ConfigError] When the configuration contains invalid options
239
240
  # @raise [ClientCreationError] When the native client cannot be created
240
- def producer(native_kafka_auto_start: true)
241
+ def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
241
242
  # Create opaque
242
243
  opaque = Opaque.new
243
244
  # Create Kafka config
@@ -254,7 +255,8 @@ module Rdkafka
254
255
  kafka,
255
256
  run_polling_thread: true,
256
257
  opaque: opaque,
257
- auto_start: native_kafka_auto_start
258
+ auto_start: native_kafka_auto_start,
259
+ timeout_ms: native_kafka_poll_timeout_ms
258
260
  ),
259
261
  partitioner_name
260
262
  ).tap do |producer|
@@ -266,11 +268,12 @@ module Rdkafka
266
268
  #
267
269
  # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
268
270
  # automatically. Defaults to true. Set to false only when doing complex initialization.
271
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
269
272
  # @return [Admin] The created admin instance
270
273
  #
271
274
  # @raise [ConfigError] When the configuration contains invalid options
272
275
  # @raise [ClientCreationError] When the native client cannot be created
273
- def admin(native_kafka_auto_start: true)
276
+ def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
274
277
  opaque = Opaque.new
275
278
  config = native_config(opaque)
276
279
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
@@ -282,7 +285,8 @@ module Rdkafka
282
285
  kafka,
283
286
  run_polling_thread: true,
284
287
  opaque: opaque,
285
- auto_start: native_kafka_auto_start
288
+ auto_start: native_kafka_auto_start,
289
+ timeout_ms: native_kafka_poll_timeout_ms
286
290
  )
287
291
  )
288
292
  end
@@ -7,11 +7,13 @@ module Rdkafka
7
7
  EMPTY_HEADERS = {}.freeze
8
8
 
9
9
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
10
+ # where each key maps to either a String (single value) or Array<String> (multiple values)
11
+ # to support duplicate headers per KIP-82
10
12
  #
11
13
  # @private
12
14
  #
13
15
  # @param [Rdkafka::Bindings::Message] native_message
14
- # @return [Hash<String, String>] headers Hash for the native_message
16
+ # @return [Hash<String, String|Array<String>>] headers Hash for the native_message
15
17
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
16
18
  def self.from_native(native_message)
17
19
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
@@ -53,10 +55,19 @@ module Rdkafka
53
55
  size = size_ptr[:value]
54
56
 
55
57
  value_ptr = value_ptrptr.read_pointer
56
-
57
58
  value = value_ptr.read_string(size)
58
59
 
59
- headers[name] = value
60
+ if headers.key?(name)
61
+ # If we've seen this header before, convert to array if needed and append
62
+ if headers[name].is_a?(Array)
63
+ headers[name] << value
64
+ else
65
+ headers[name] = [headers[name], value]
66
+ end
67
+ else
68
+ # First occurrence - store as single value
69
+ headers[name] = value
70
+ end
60
71
 
61
72
  idx += 1
62
73
  end
@@ -4,7 +4,7 @@ module Rdkafka
4
4
  # @private
5
5
  # A wrapper around a native kafka that polls and cleanly exits
6
6
  class NativeKafka
7
- def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
7
+ def initialize(inner, run_polling_thread:, opaque:, auto_start: true, timeout_ms: 100)
8
8
  @inner = inner
9
9
  @opaque = opaque
10
10
  # Lock around external access
@@ -30,6 +30,8 @@ module Rdkafka
30
30
 
31
31
  @run_polling_thread = run_polling_thread
32
32
 
33
+ @timeout_ms = timeout_ms
34
+
33
35
  start if auto_start
34
36
 
35
37
  @closing = false
@@ -50,7 +52,7 @@ module Rdkafka
50
52
  @polling_thread = Thread.new do
51
53
  loop do
52
54
  @poll_mutex.synchronize do
53
- Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
55
+ Rdkafka::Bindings.rd_kafka_poll(@inner, @timeout_ms)
54
56
  end
55
57
 
56
58
  # Exit thread if closing and the poll queue is empty
@@ -309,7 +309,7 @@ module Rdkafka
309
309
  # @param partition [Integer,nil] Optional partition to produce to
310
310
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
311
311
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
312
- # @param headers [Hash<String,String>] Optional message headers
312
+ # @param headers [Hash<String,String|Array<String>>] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
313
313
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
314
314
  # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
315
315
  #
@@ -401,11 +401,23 @@ module Rdkafka
401
401
  if headers
402
402
  headers.each do |key0, value0|
403
403
  key = key0.to_s
404
- value = value0.to_s
405
- args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
406
- args << :string << key
407
- args << :pointer << value
408
- args << :size_t << value.bytesize
404
+ if value0.is_a?(Array)
405
+ # Handle array of values per KIP-82
406
+ value0.each do |value|
407
+ value = value.to_s
408
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
409
+ args << :string << key
410
+ args << :pointer << value
411
+ args << :size_t << value.bytesize
412
+ end
413
+ else
414
+ # Handle single value
415
+ value = value0.to_s
416
+ args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
417
+ args << :string << key
418
+ args << :pointer << value
419
+ args << :size_t << value.bytesize
420
+ end
409
421
  end
410
422
  end
411
423
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.19.0"
5
- LIBRDKAFKA_VERSION = "2.8.0"
6
- LIBRDKAFKA_SOURCE_SHA256 = "5bd1c46f63265f31c6bfcedcde78703f77d28238eadf23821c2b43fc30be3e25"
4
+ VERSION = "0.20.0.rc1"
5
+ LIBRDKAFKA_VERSION = "2.10.0"
6
+ LIBRDKAFKA_SOURCE_SHA256 = "004b1cc2685d1d6d416b90b426a0a9d27327a214c6b807df6f9ea5887346ba3a"
7
7
  end
data/renovate.json CHANGED
@@ -1,6 +1,18 @@
1
1
  {
2
2
  "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3
3
  "extends": [
4
- "config:base"
4
+ "config:recommended"
5
+ ],
6
+ "github-actions": {
7
+ "enabled": true,
8
+ "pinDigests": true
9
+ },
10
+ "packageRules": [
11
+ {
12
+ "matchManagers": [
13
+ "github-actions"
14
+ ],
15
+ "minimumReleaseAge": "7 days"
16
+ }
5
17
  ]
6
18
  }
@@ -34,7 +34,7 @@ describe Rdkafka::Admin do
34
34
  describe '#describe_errors' do
35
35
  let(:errors) { admin.class.describe_errors }
36
36
 
37
- it { expect(errors.size).to eq(170) }
37
+ it { expect(errors.size).to eq(172) }
38
38
  it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
39
39
  it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
40
40
  end
@@ -738,17 +738,19 @@ describe Rdkafka::Admin do
738
738
  end
739
739
  end
740
740
 
741
- context "when operating from a fork" do
742
- # @see https://github.com/ffi/ffi/issues/1114
743
- it 'expect to be able to create topics and run other admin operations without hanging' do
744
- # If the FFI issue is not mitigated, this will hang forever
745
- pid = fork do
746
- admin
747
- .create_topic(topic_name, topic_partition_count, topic_replication_factor)
748
- .wait
749
- end
741
+ unless RUBY_PLATFORM == 'java'
742
+ context "when operating from a fork" do
743
+ # @see https://github.com/ffi/ffi/issues/1114
744
+ it 'expect to be able to create topics and run other admin operations without hanging' do
745
+ # If the FFI issue is not mitigated, this will hang forever
746
+ pid = fork do
747
+ admin
748
+ .create_topic(topic_name, topic_partition_count, topic_replication_factor)
749
+ .wait
750
+ end
750
751
 
751
- Process.wait(pid)
752
+ Process.wait(pid)
753
+ end
752
754
  end
753
755
  end
754
756
  end
@@ -149,15 +149,6 @@ describe Rdkafka::Bindings do
149
149
  end
150
150
 
151
151
  describe "oauthbearer set token" do
152
-
153
- context "without args" do
154
- it "should raise argument error" do
155
- expect {
156
- Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
157
- }.to raise_error(ArgumentError)
158
- end
159
- end
160
-
161
152
  context "with args" do
162
153
  before do
163
154
  DEFAULT_TOKEN_EXPIRY_SECONDS = 900
@@ -33,23 +33,25 @@ describe Rdkafka::Config do
33
33
  expect(log.string).to include "FATAL -- : I love testing"
34
34
  end
35
35
 
36
- it "expect to start new logger thread after fork and work" do
37
- reader, writer = IO.pipe
38
-
39
- pid = fork do
40
- $stdout.reopen(writer)
41
- Rdkafka::Config.logger = Logger.new($stdout)
42
- reader.close
43
- producer = rdkafka_producer_config(debug: 'all').producer
44
- producer.close
36
+ unless RUBY_PLATFORM == 'java'
37
+ it "expect to start new logger thread after fork and work" do
38
+ reader, writer = IO.pipe
39
+
40
+ pid = fork do
41
+ $stdout.reopen(writer)
42
+ Rdkafka::Config.logger = Logger.new($stdout)
43
+ reader.close
44
+ producer = rdkafka_producer_config(debug: 'all').producer
45
+ producer.close
46
+ writer.close
47
+ sleep(1)
48
+ end
49
+
45
50
  writer.close
46
- sleep(1)
51
+ Process.wait(pid)
52
+ output = reader.read
53
+ expect(output.split("\n").size).to be >= 20
47
54
  end
48
-
49
- writer.close
50
- Process.wait(pid)
51
- output = reader.read
52
- expect(output.split("\n").size).to be >= 20
53
55
  end
54
56
  end
55
57
 
@@ -3,7 +3,7 @@
3
3
  describe Rdkafka::Consumer::Headers do
4
4
  let(:headers) do
5
5
  { # Note String keys!
6
- "version" => "2.1.3",
6
+ "version" => ["2.1.3", "2.1.4"],
7
7
  "type" => "String"
8
8
  }
9
9
  end
@@ -17,27 +17,39 @@ describe Rdkafka::Consumer::Headers do
17
17
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
18
18
  end
19
19
 
20
+ # First version header
20
21
  expect(Rdkafka::Bindings).to \
21
22
  receive(:rd_kafka_header_get_all)
22
23
  .with(headers_ptr, 0, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
23
- expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: headers.keys[0]))
24
- expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[0].size)
25
- expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers.values[0]))
24
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 0", read_string_to_null: "version"))
25
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][0].size)
26
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 0", read_string: headers["version"][0]))
26
27
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
27
28
  end
28
29
 
30
+ # Second version header
29
31
  expect(Rdkafka::Bindings).to \
30
32
  receive(:rd_kafka_header_get_all)
31
33
  .with(headers_ptr, 1, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
32
- expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: headers.keys[1]))
33
- expect(size_ptr).to receive(:[]).with(:value).and_return(headers.keys[1].size)
34
- expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers.values[1]))
34
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 1", read_string_to_null: "version"))
35
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["version"][1].size)
36
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 1", read_string: headers["version"][1]))
35
37
  Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
36
38
  end
37
39
 
40
+ # Single type header
38
41
  expect(Rdkafka::Bindings).to \
39
42
  receive(:rd_kafka_header_get_all)
40
- .with(headers_ptr, 2, anything, anything, anything)
43
+ .with(headers_ptr, 2, anything, anything, anything) do |_, _, name_ptrptr, value_ptrptr, size_ptr|
44
+ expect(name_ptrptr).to receive(:read_pointer).and_return(double("pointer 2", read_string_to_null: "type"))
45
+ expect(size_ptr).to receive(:[]).with(:value).and_return(headers["type"].size)
46
+ expect(value_ptrptr).to receive(:read_pointer).and_return(double("value pointer 2", read_string: headers["type"]))
47
+ Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
48
+ end
49
+
50
+ expect(Rdkafka::Bindings).to \
51
+ receive(:rd_kafka_header_get_all)
52
+ .with(headers_ptr, 3, anything, anything, anything)
41
53
  .and_return(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT)
42
54
  end
43
55
 
@@ -46,8 +58,12 @@ describe Rdkafka::Consumer::Headers do
46
58
  it { is_expected.to eq(headers) }
47
59
  it { is_expected.to be_frozen }
48
60
 
49
- it 'allows String key' do
50
- expect(subject['version']).to eq("2.1.3")
61
+ it 'returns array for duplicate headers' do
62
+ expect(subject['version']).to eq(["2.1.3", "2.1.4"])
63
+ end
64
+
65
+ it 'returns string for single headers' do
66
+ expect(subject['type']).to eq("String")
51
67
  end
52
68
 
53
69
  it 'does not support symbols mappings' do
@@ -842,9 +842,9 @@ describe Rdkafka::Consumer do
842
842
  missing_topic = SecureRandom.uuid
843
843
  consumer.subscribe(missing_topic)
844
844
 
845
- expect {
846
- consumer.poll(1_000)
847
- }.to raise_error Rdkafka::RdkafkaError, /Subscribed topic not available: #{missing_topic}/
845
+ # @note it used to raise "Subscribed topic not available" in previous librdkafka versions
846
+ # but this behaviour has been changed
847
+ expect { consumer.poll(1_000) }.not_to raise_error
848
848
  end
849
849
  end
850
850
 
@@ -1002,4 +1002,44 @@ describe Rdkafka::Producer do
1002
1002
  end
1003
1003
  end
1004
1004
  end
1005
+
1006
+ describe "#produce with headers" do
1007
+ it "should produce a message with array headers" do
1008
+ headers = {
1009
+ "version" => ["2.1.3", "2.1.4"],
1010
+ "type" => "String"
1011
+ }
1012
+
1013
+ report = producer.produce(
1014
+ topic: "consume_test_topic",
1015
+ key: "key headers",
1016
+ headers: headers
1017
+ ).wait
1018
+
1019
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
1020
+ expect(message).to be
1021
+ expect(message.key).to eq('key headers')
1022
+ expect(message.headers['type']).to eq('String')
1023
+ expect(message.headers['version']).to eq(["2.1.3", "2.1.4"])
1024
+ end
1025
+
1026
+ it "should produce a message with single value headers" do
1027
+ headers = {
1028
+ "version" => "2.1.3",
1029
+ "type" => "String"
1030
+ }
1031
+
1032
+ report = producer.produce(
1033
+ topic: "consume_test_topic",
1034
+ key: "key headers",
1035
+ headers: headers
1036
+ ).wait
1037
+
1038
+ message = wait_for_message(topic: "consume_test_topic", consumer: consumer, delivery_report: report)
1039
+ expect(message).to be
1040
+ expect(message.key).to eq('key headers')
1041
+ expect(message.headers['type']).to eq('String')
1042
+ expect(message.headers['version']).to eq('2.1.3')
1043
+ end
1044
+ end
1005
1045
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.19.0
4
+ version: 0.20.0.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
36
36
  ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
37
37
  -----END CERTIFICATE-----
38
- date: 2025-01-20 00:00:00.000000000 Z
38
+ date: 1980-01-02 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
@@ -171,8 +171,10 @@ extensions:
171
171
  - ext/Rakefile
172
172
  extra_rdoc_files: []
173
173
  files:
174
+ - ".github/CODEOWNERS"
174
175
  - ".github/FUNDING.yml"
175
176
  - ".github/workflows/ci.yml"
177
+ - ".github/workflows/verify-action-pins.yml"
176
178
  - ".gitignore"
177
179
  - ".rspec"
178
180
  - ".ruby-gemset"
@@ -185,7 +187,7 @@ files:
185
187
  - README.md
186
188
  - Rakefile
187
189
  - certs/cert.pem
188
- - dist/librdkafka-2.8.0.tar.gz
190
+ - dist/librdkafka-2.10.0.tar.gz
189
191
  - dist/patches/rdkafka_global_init.patch
190
192
  - docker-compose.yml
191
193
  - ext/README.md
@@ -284,7 +286,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
284
286
  - !ruby/object:Gem::Version
285
287
  version: '0'
286
288
  requirements: []
287
- rubygems_version: 3.6.2
289
+ rubygems_version: 3.6.7
288
290
  specification_version: 4
289
291
  summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
290
292
  It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
metadata.gz.sig CHANGED
Binary file