karafka-rdkafka 0.16.0 → 0.17.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +19 -0
- data/README.md +2 -1
- data/dist/librdkafka_2.5.0.tar.gz +0 -0
- data/docker-compose.yml +1 -1
- data/ext/README.md +5 -4
- data/ext/Rakefile +41 -2
- data/lib/rdkafka/consumer.rb +16 -3
- data/lib/rdkafka/version.rb +3 -3
- data/spec/rdkafka/admin_spec.rb +1 -1
- data/spec/rdkafka/consumer_spec.rb +89 -0
- data.tar.gz.sig +0 -0
- metadata +4 -3
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f0a60c074ecbf580c271442ac5e86a2152acf2542fc88c841f3adf8de99a8326
|
4
|
+
data.tar.gz: ab81215e4a2d4d641521efbd8f0e679a169bea6920378fab237f8401b0b8f3ac
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9595e54c497e0018da1df9611d1b2dca8b4e03fab8ca5ef26e3e20da93fb2ccdc9c0f568ddf7c371fe4f59873187e653744f087afea1dc6ebfc57a151b572dfc
|
7
|
+
data.tar.gz: 6ce0c6d3baee8e939dcfe1867771d2ca41b8d8c9d6018a5d00e63e5c9d79c499beebfdca3f75d8624862351191e36d4ec20fb86fad3eaeb8f169dca2802e83c2
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.3.
|
1
|
+
3.3.4
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
# Rdkafka Changelog
|
2
2
|
|
3
|
+
## 0.17.0 (2024-07-21)
|
4
|
+
- [Enhancement] Bump librdkafka to 2.5.0
|
5
|
+
|
6
|
+
## 0.16.1 (2024-07-10)
|
7
|
+
- [Feature] Add `#seek_by` to be able to seek for a message by topic, partition and offset (zinahia)
|
8
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
9
|
+
|
3
10
|
## 0.16.0 (2024-06-17)
|
4
11
|
- **[Breaking]** Messages without headers returned by `#poll` contain frozen empty hash.
|
5
12
|
- **[Breaking]** `HashWithSymbolKeysTreatedLikeStrings` has been removed so headers are regular hashes with string keys.
|
@@ -7,6 +14,9 @@
|
|
7
14
|
- [Enhancement] Save two objects on message produced and lower CPU usage on message produced with small improvements.
|
8
15
|
- [Fix] Remove support for Ruby 2.7. Supporting it was a bug since rest of the karafka ecosystem no longer supports it.
|
9
16
|
|
17
|
+
## 0.15.2 (2024-07-10)
|
18
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
19
|
+
|
10
20
|
## 0.15.1 (2024-05-09)
|
11
21
|
- **[Feature]** Provide ability to use topic config on a producer for custom behaviors per dispatch.
|
12
22
|
- [Enhancement] Use topic config reference cache for messages production to prevent topic objects allocation with each message.
|
@@ -17,6 +27,9 @@
|
|
17
27
|
- **[Feature]** Support incremental config describe + alter API (mensfeld)
|
18
28
|
- [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
|
19
29
|
|
30
|
+
## 0.14.11 (2024-07-10)
|
31
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
32
|
+
|
20
33
|
## 0.14.10 (2024-02-08)
|
21
34
|
- [Fix] Background logger stops working after forking causing memory leaks (mensfeld).
|
22
35
|
|
@@ -63,6 +76,9 @@
|
|
63
76
|
- [Enhancement] Bump librdkafka to 2.3.0
|
64
77
|
- [Enhancement] Increase the `#lag` and `#query_watermark_offsets` default timeouts from 100ms to 1000ms. This will compensate for network glitches and remote clusters operations.
|
65
78
|
|
79
|
+
## 0.13.10 (2024-07-10)
|
80
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
81
|
+
|
66
82
|
## 0.13.9 (2023-11-07)
|
67
83
|
- [Enhancement] Expose alternative way of managing consumer events via a separate queue.
|
68
84
|
- [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem.
|
@@ -117,6 +133,9 @@
|
|
117
133
|
- Retry metadta fetches on certain errors with a backoff (mensfeld)
|
118
134
|
- Do not lock access to underlying native kafka client and rely on Karafka granular locking (mensfeld)
|
119
135
|
|
136
|
+
## 0.12.4 (2024-07-10)
|
137
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
138
|
+
|
120
139
|
## 0.12.3
|
121
140
|
- Include backtrace in non-raised binded errors.
|
122
141
|
- Include topic name in the delivery reports
|
data/README.md
CHANGED
@@ -163,7 +163,8 @@ bundle exec rake produce_messages
|
|
163
163
|
|
164
164
|
| rdkafka-ruby | librdkafka |
|
165
165
|
|-|-|
|
166
|
-
| 0.
|
166
|
+
| 0.17.0 (Unreleased) | 2.5.0 (2024-07-10) |
|
167
|
+
| 0.16.0 (2024-06-13) | 2.4.0 (2024-05-07) |
|
167
168
|
| 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
|
168
169
|
| 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
|
169
170
|
| 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
|
Binary file
|
data/docker-compose.yml
CHANGED
data/ext/README.md
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
# Ext
|
2
2
|
|
3
|
-
This gem depends on the `librdkafka` C library. It is downloaded
|
4
|
-
|
3
|
+
This gem depends on the `librdkafka` C library. It is downloaded, stored in
|
4
|
+
`dist/` directory, and checked into source control.
|
5
5
|
|
6
6
|
To update the `librdkafka` version follow the following steps:
|
7
7
|
|
@@ -9,8 +9,9 @@ To update the `librdkafka` version follow the following steps:
|
|
9
9
|
version number and asset checksum for `tar.gz`.
|
10
10
|
* Change the version in `lib/rdkafka/version.rb`
|
11
11
|
* Change the `sha256` in `lib/rdkafka/version.rb`
|
12
|
-
* Run `bundle exec rake` in the `ext` directory to download
|
13
|
-
the
|
12
|
+
* Run `bundle exec rake dist:download` in the `ext` directory to download the
|
13
|
+
new release and place it in the `dist/` for you
|
14
|
+
* Run `bundle exec rake` in the `ext` directory to build the new version
|
14
15
|
* Run `docker-compose pull` in the main gem directory to ensure the docker
|
15
16
|
images used by the tests and run `docker-compose up`
|
16
17
|
* Finally, run `bundle exec rspec` in the main gem directory to execute
|
data/ext/Rakefile
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require File.expand_path('../../lib/rdkafka/version', __FILE__)
|
4
|
+
require "digest"
|
4
5
|
require "fileutils"
|
5
6
|
require "open-uri"
|
6
7
|
|
@@ -22,8 +23,10 @@ task :default => :clean do
|
|
22
23
|
ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV["LDFLAGS"]
|
23
24
|
end
|
24
25
|
|
26
|
+
releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
|
27
|
+
|
25
28
|
recipe.files << {
|
26
|
-
:url => "
|
29
|
+
:url => "file://#{releases}/librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
|
27
30
|
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
28
31
|
}
|
29
32
|
recipe.configure_options = ["--host=#{recipe.host}"]
|
@@ -69,6 +72,42 @@ task :clean do
|
|
69
72
|
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
70
73
|
end
|
71
74
|
|
75
|
+
namespace :dist do
|
76
|
+
task :dir do
|
77
|
+
ENV["RDKAFKA_DIST_PATH"] ||= File.expand_path(File.join(File.dirname(__FILE__), '..', 'dist'))
|
78
|
+
end
|
79
|
+
|
80
|
+
task :file => "dist:dir" do
|
81
|
+
ENV["RDKAFKA_DIST_FILE"] ||= File.join(ENV["RDKAFKA_DIST_PATH"], "librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz")
|
82
|
+
end
|
83
|
+
|
84
|
+
task :clean => "dist:file" do
|
85
|
+
Dir.glob(File.join("#{ENV['RDKAFKA_DIST_PATH']}", "*")).each do |filename|
|
86
|
+
next if filename.include? ENV["RDKAFKA_DIST_FILE"]
|
87
|
+
|
88
|
+
FileUtils.rm_rf filename
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
task :download => "dist:file" do
|
93
|
+
version = Rdkafka::LIBRDKAFKA_VERSION
|
94
|
+
librdkafka_download = "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{version}"
|
95
|
+
|
96
|
+
URI.open(librdkafka_download) do |file|
|
97
|
+
filename = ENV["RDKAFKA_DIST_FILE"]
|
98
|
+
data = file.read
|
99
|
+
|
100
|
+
if Digest::SHA256.hexdigest(data) != Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
101
|
+
raise "SHA256 does not match downloaded file"
|
102
|
+
end
|
103
|
+
|
104
|
+
File.write(filename, data)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
task :update => %w[dist:download dist:clean]
|
109
|
+
end
|
110
|
+
|
72
111
|
namespace :build do
|
73
112
|
desc "Build librdkafka at the given git sha or tag"
|
74
113
|
task :git, [:ref] do |task, args|
|
@@ -76,7 +115,7 @@ namespace :build do
|
|
76
115
|
version = "git-#{ref}"
|
77
116
|
|
78
117
|
recipe = MiniPortile.new("librdkafka", version)
|
79
|
-
recipe.files << "https://github.com/
|
118
|
+
recipe.files << "https://github.com/confluentinc/librdkafka/archive/#{ref}.tar.gz"
|
80
119
|
recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
|
81
120
|
recipe.cook
|
82
121
|
|
data/lib/rdkafka/consumer.rb
CHANGED
@@ -435,6 +435,19 @@ module Rdkafka
|
|
435
435
|
# @return [nil]
|
436
436
|
# @raise [RdkafkaError] When seeking fails
|
437
437
|
def seek(message)
|
438
|
+
seek_by(message.topic, message.partition, message.offset)
|
439
|
+
end
|
440
|
+
|
441
|
+
# Seek to a particular message by providing the topic, partition and offset.
|
442
|
+
# The next poll on the topic/partition will return the
|
443
|
+
# message at the given offset.
|
444
|
+
#
|
445
|
+
# @param topic [String] The topic in which to seek
|
446
|
+
# @param partition [Integer] The partition number to seek
|
447
|
+
# @param offset [Integer] The partition offset to seek
|
448
|
+
# @return [nil]
|
449
|
+
# @raise [RdkafkaError] When seeking fails
|
450
|
+
def seek_by(topic, partition, offset)
|
438
451
|
closed_consumer_check(__method__)
|
439
452
|
|
440
453
|
# rd_kafka_offset_store is one of the few calls that does not support
|
@@ -442,14 +455,14 @@ module Rdkafka
|
|
442
455
|
native_topic = @native_kafka.with_inner do |inner|
|
443
456
|
Rdkafka::Bindings.rd_kafka_topic_new(
|
444
457
|
inner,
|
445
|
-
|
458
|
+
topic,
|
446
459
|
nil
|
447
460
|
)
|
448
461
|
end
|
449
462
|
response = Rdkafka::Bindings.rd_kafka_seek(
|
450
463
|
native_topic,
|
451
|
-
|
452
|
-
|
464
|
+
partition,
|
465
|
+
offset,
|
453
466
|
0 # timeout
|
454
467
|
)
|
455
468
|
Rdkafka::RdkafkaError.validate!(response)
|
data/lib/rdkafka/version.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module Rdkafka
|
4
|
-
VERSION = "0.
|
5
|
-
LIBRDKAFKA_VERSION = "2.
|
6
|
-
LIBRDKAFKA_SOURCE_SHA256 = "
|
4
|
+
VERSION = "0.17.0"
|
5
|
+
LIBRDKAFKA_VERSION = "2.5.0"
|
6
|
+
LIBRDKAFKA_SOURCE_SHA256 = "3dc62de731fd516dfb1032861d9a580d4d0b5b0856beb0f185d06df8e6c26259"
|
7
7
|
end
|
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -34,7 +34,7 @@ describe Rdkafka::Admin do
|
|
34
34
|
describe '#describe_errors' do
|
35
35
|
let(:errors) { admin.class.describe_errors }
|
36
36
|
|
37
|
-
it { expect(errors.size).to eq(
|
37
|
+
it { expect(errors.size).to eq(170) }
|
38
38
|
it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
|
39
39
|
it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
|
40
40
|
end
|
@@ -258,6 +258,95 @@ describe Rdkafka::Consumer do
|
|
258
258
|
end
|
259
259
|
end
|
260
260
|
|
261
|
+
describe "#seek_by" do
|
262
|
+
let(:topic) { "consume_test_topic" }
|
263
|
+
let(:partition) { 0 }
|
264
|
+
let(:offset) { 0 }
|
265
|
+
|
266
|
+
it "should raise an error when seeking fails" do
|
267
|
+
expect(Rdkafka::Bindings).to receive(:rd_kafka_seek).and_return(20)
|
268
|
+
expect {
|
269
|
+
consumer.seek_by(topic, partition, offset)
|
270
|
+
}.to raise_error Rdkafka::RdkafkaError
|
271
|
+
end
|
272
|
+
|
273
|
+
context "subscription" do
|
274
|
+
let(:timeout) { 1000 }
|
275
|
+
|
276
|
+
before do
|
277
|
+
consumer.subscribe(topic)
|
278
|
+
|
279
|
+
# 1. partitions are assigned
|
280
|
+
wait_for_assignment(consumer)
|
281
|
+
expect(consumer.assignment).not_to be_empty
|
282
|
+
|
283
|
+
# 2. eat unrelated messages
|
284
|
+
while(consumer.poll(timeout)) do; end
|
285
|
+
end
|
286
|
+
after { consumer.unsubscribe }
|
287
|
+
|
288
|
+
def send_one_message(val)
|
289
|
+
producer.produce(
|
290
|
+
topic: topic,
|
291
|
+
payload: "payload #{val}",
|
292
|
+
key: "key 1",
|
293
|
+
partition: 0
|
294
|
+
).wait
|
295
|
+
end
|
296
|
+
|
297
|
+
it "works when a partition is paused" do
|
298
|
+
# 3. get reference message
|
299
|
+
send_one_message(:a)
|
300
|
+
message1 = consumer.poll(timeout)
|
301
|
+
expect(message1&.payload).to eq "payload a"
|
302
|
+
|
303
|
+
# 4. pause the subscription
|
304
|
+
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
305
|
+
tpl.add_topic(topic, 1)
|
306
|
+
consumer.pause(tpl)
|
307
|
+
|
308
|
+
# 5. seek by the previous message fields
|
309
|
+
consumer.seek_by(message1.topic, message1.partition, message1.offset)
|
310
|
+
|
311
|
+
# 6. resume the subscription
|
312
|
+
tpl = Rdkafka::Consumer::TopicPartitionList.new
|
313
|
+
tpl.add_topic(topic, 1)
|
314
|
+
consumer.resume(tpl)
|
315
|
+
|
316
|
+
# 7. ensure same message is read again
|
317
|
+
message2 = consumer.poll(timeout)
|
318
|
+
|
319
|
+
# This is needed because `enable.auto.offset.store` is true but when running in CI that
|
320
|
+
# is overloaded, offset store lags
|
321
|
+
sleep(2)
|
322
|
+
|
323
|
+
consumer.commit
|
324
|
+
expect(message1.offset).to eq message2.offset
|
325
|
+
expect(message1.payload).to eq message2.payload
|
326
|
+
end
|
327
|
+
|
328
|
+
it "allows skipping messages" do
|
329
|
+
# 3. send messages
|
330
|
+
send_one_message(:a)
|
331
|
+
send_one_message(:b)
|
332
|
+
send_one_message(:c)
|
333
|
+
|
334
|
+
# 4. get reference message
|
335
|
+
message = consumer.poll(timeout)
|
336
|
+
expect(message&.payload).to eq "payload a"
|
337
|
+
|
338
|
+
# 5. seek over one message
|
339
|
+
consumer.seek_by(message.topic, message.partition, message.offset + 2)
|
340
|
+
|
341
|
+
# 6. ensure that only one message is available
|
342
|
+
records = consumer.poll(timeout)
|
343
|
+
expect(records&.payload).to eq "payload c"
|
344
|
+
records = consumer.poll(timeout)
|
345
|
+
expect(records).to be_nil
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
|
261
350
|
describe "#assign and #assignment" do
|
262
351
|
it "should return an empty assignment if nothing is assigned" do
|
263
352
|
expect(consumer.assignment).to be_empty
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.17.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -36,7 +36,7 @@ cert_chain:
|
|
36
36
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
37
37
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2024-
|
39
|
+
date: 2024-07-21 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: ffi
|
@@ -186,6 +186,7 @@ files:
|
|
186
186
|
- README.md
|
187
187
|
- Rakefile
|
188
188
|
- certs/cert_chain.pem
|
189
|
+
- dist/librdkafka_2.5.0.tar.gz
|
189
190
|
- docker-compose.yml
|
190
191
|
- ext/README.md
|
191
192
|
- ext/Rakefile
|
@@ -285,7 +286,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
285
286
|
- !ruby/object:Gem::Version
|
286
287
|
version: '0'
|
287
288
|
requirements: []
|
288
|
-
rubygems_version: 3.5.
|
289
|
+
rubygems_version: 3.5.9
|
289
290
|
signing_key:
|
290
291
|
specification_version: 4
|
291
292
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
metadata.gz.sig
CHANGED
Binary file
|