karafka-rdkafka 0.23.1.rc2-aarch64-linux-gnu → 0.24.0-aarch64-linux-gnu
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -1
- data/Gemfile +8 -0
- data/Gemfile.lint +14 -0
- data/Gemfile.lint.lock +123 -0
- data/README.md +1 -0
- data/Rakefile +21 -21
- data/docker-compose-ssl.yml +1 -1
- data/docker-compose.yml +1 -1
- data/ext/librdkafka.so +0 -0
- data/karafka-rdkafka.gemspec +39 -47
- data/lib/rdkafka/abstract_handle.rb +23 -5
- data/lib/rdkafka/admin/acl_binding_result.rb +5 -5
- data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +7 -4
- data/lib/rdkafka/admin/create_acl_report.rb +3 -2
- data/lib/rdkafka/admin/create_partitions_handle.rb +6 -3
- data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +6 -3
- data/lib/rdkafka/admin/create_topic_report.rb +3 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +9 -6
- data/lib/rdkafka/admin/delete_acl_report.rb +5 -3
- data/lib/rdkafka/admin/delete_groups_handle.rb +10 -5
- data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +6 -3
- data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +9 -6
- data/lib/rdkafka/admin/describe_acl_report.rb +5 -3
- data/lib/rdkafka/admin/describe_configs_handle.rb +7 -4
- data/lib/rdkafka/admin/describe_configs_report.rb +7 -1
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +7 -4
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +7 -1
- data/lib/rdkafka/admin.rb +185 -123
- data/lib/rdkafka/bindings.rb +121 -106
- data/lib/rdkafka/callbacks.rb +72 -12
- data/lib/rdkafka/config.rb +36 -24
- data/lib/rdkafka/consumer/headers.rb +3 -2
- data/lib/rdkafka/consumer/message.rb +12 -11
- data/lib/rdkafka/consumer/partition.rb +9 -4
- data/lib/rdkafka/consumer/topic_partition_list.rb +15 -15
- data/lib/rdkafka/consumer.rb +231 -26
- data/lib/rdkafka/defaults.rb +84 -0
- data/lib/rdkafka/error.rb +66 -21
- data/lib/rdkafka/helpers/oauth.rb +11 -6
- data/lib/rdkafka/helpers/time.rb +5 -0
- data/lib/rdkafka/metadata.rb +45 -21
- data/lib/rdkafka/native_kafka.rb +89 -4
- data/lib/rdkafka/producer/delivery_handle.rb +4 -4
- data/lib/rdkafka/producer/delivery_report.rb +8 -4
- data/lib/rdkafka/producer/partitions_count_cache.rb +29 -19
- data/lib/rdkafka/producer/testing.rb +3 -3
- data/lib/rdkafka/producer.rb +163 -57
- data/lib/rdkafka/version.rb +6 -3
- data/lib/rdkafka.rb +1 -0
- data/package-lock.json +331 -0
- data/package.json +9 -0
- data/renovate.json +22 -8
- metadata +7 -86
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: a7058149bcea613963a7df4058b47b47a06992c5e864fff26f9fe3c15bfff12a
|
|
4
|
+
data.tar.gz: e4a861f2d1601e014b40ea380a5483863db3e13c0d89def2cdf6d1bb5627942e
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 0e4ec74b36320789abab70a0a2fa1283a7dec08949552c4ab5b1a96a40ede4a8b40a4ac1d44dc9089d306aba33483b16e8070ee008fc6638c5e907ca63ade985
|
|
7
|
+
data.tar.gz: '089aa71eccc39df14e639d8d10822050444e2475b2c13eaa5d0486d0722f13e837eaace7c73dfbe26aad303bdab3446b7728f38cb75ce904575523dd7c91f1e7'
|
data/CHANGELOG.md
CHANGED
|
@@ -1,6 +1,26 @@
|
|
|
1
1
|
# Rdkafka Changelog
|
|
2
2
|
|
|
3
|
-
## 0.
|
|
3
|
+
## 0.24.0 (2026-02-25)
|
|
4
|
+
- **[Feature]** Add `Producer#queue_size` (and `#queue_length` alias) to report the number of messages waiting in the librdkafka output queue. Useful for monitoring producer backpressure, implementing custom flow control, debugging message delivery issues, and graceful shutdown logic.
|
|
5
|
+
- **[Feature]** Add fiber scheduler API for integration with Ruby fiber schedulers (Falcon, Async) and custom event loops (from upstream). Expose `enable_queue_io_events` and `enable_background_queue_io_events` methods on `Consumer`, `Producer`, and `Admin`.
|
|
6
|
+
- **[Deprecation]** `AbstractHandle#wait` parameter `max_wait_timeout` (seconds) is deprecated in favor of `max_wait_timeout_ms` (milliseconds). The old parameter still works with backwards compatibility but will be removed in v1.0.0.
|
|
7
|
+
- **[Deprecation]** `PartitionsCountCache` constructor parameter `ttl` (seconds) is deprecated in favor of `ttl_ms` (milliseconds). The old parameter still works with backwards compatibility but will be removed in v1.0.0.
|
|
8
|
+
- [Enhancement] Add Ruby 4.0 support.
|
|
9
|
+
- [Enhancement] Add `Rdkafka::Defaults` module with centralized timeout constants (aligning with upstream refactor).
|
|
10
|
+
- [Enhancement] Add `run_polling_thread` parameter to `Config#producer` and `Config#admin` for fiber scheduler integration (from upstream).
|
|
11
|
+
- [Enhancement] Extract all hardcoded timeout values to named constants for better maintainability and discoverability.
|
|
12
|
+
- [Enhancement] Add `timeout_ms` parameter to `Consumer#each` for configurable poll timeout (from upstream).
|
|
13
|
+
- [Enhancement] Extract non-time configuration values (`METADATA_MAX_RETRIES`, `PARTITIONS_COUNT_CACHE_TTL_MS`) to `Rdkafka::Defaults` module (from upstream).
|
|
14
|
+
- [Enhancement] Add descriptive error messages for glibc compatibility issues with instructions for resolution (from upstream).
|
|
15
|
+
- [Enhancement] Use native ARM64 runners instead of QEMU emulation for Alpine musl aarch64 builds, improving build performance and reliability (from upstream).
|
|
16
|
+
- [Enhancement] Enable parallel compilation (`make -j$(nproc)`) for ARM64 Alpine musl builds (from upstream).
|
|
17
|
+
- [Enhancement] Bump librdkafka to 2.13.0.
|
|
18
|
+
- [Enhancement] Add non-blocking poll methods (`poll_nb`, `events_poll_nb`) that skip GVL release for efficient fiber scheduler integration when using `poll(0)` (from upstream).
|
|
19
|
+
- [Enhancement] Add `events_poll_nb_each` method on `Producer`, `Consumer`, and `Admin` for polling events in a single GVL/mutex session. Yields count after each iteration, caller returns `:stop` to break (from upstream).
|
|
20
|
+
- [Enhancement] Add `poll_nb_each` method on `Consumer` for non-blocking message polling with proper resource cleanup, yielding each message and supporting early termination via `:stop` return value (from upstream).
|
|
21
|
+
- [Fix] Fix Kerberos build on Alpine 3.23+ (GCC 15/C23) by forcing C17 semantics to maintain compatibility with old-style K&R declarations in MIT Kerberos and Cyrus SASL dependencies.
|
|
22
|
+
|
|
23
|
+
## 0.23.1 (2025-11-14)
|
|
4
24
|
- **[Feature]** Add integrated fatal error handling in `RdkafkaError.validate!` - automatically detects and handles fatal errors (-150) with single entrypoint API.
|
|
5
25
|
- [Enhancement] Add optional `client_ptr` parameter to `validate!` for automatic fatal error remapping to actual underlying error codes.
|
|
6
26
|
- [Enhancement] Update all Producer and Consumer `validate!` calls to provide `client_ptr` for comprehensive fatal error handling.
|
data/Gemfile
CHANGED
data/Gemfile.lint
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
source "https://rubygems.org"
|
|
4
|
+
|
|
5
|
+
# Documentation linting
|
|
6
|
+
gem "yard-lint"
|
|
7
|
+
|
|
8
|
+
# Code style (StandardRB via RuboCop)
|
|
9
|
+
gem "standard"
|
|
10
|
+
gem "standard-performance"
|
|
11
|
+
gem "rubocop-performance"
|
|
12
|
+
gem "rubocop-rspec"
|
|
13
|
+
gem "standard-rspec"
|
|
14
|
+
gem "rubocop-thread_safety"
|
data/Gemfile.lint.lock
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
GEM
|
|
2
|
+
remote: https://rubygems.org/
|
|
3
|
+
specs:
|
|
4
|
+
ast (2.4.3)
|
|
5
|
+
json (2.18.0)
|
|
6
|
+
language_server-protocol (3.17.0.5)
|
|
7
|
+
lint_roller (1.1.0)
|
|
8
|
+
parallel (1.27.0)
|
|
9
|
+
parser (3.3.10.1)
|
|
10
|
+
ast (~> 2.4.1)
|
|
11
|
+
racc
|
|
12
|
+
prism (1.8.0)
|
|
13
|
+
racc (1.8.1)
|
|
14
|
+
rainbow (3.1.1)
|
|
15
|
+
regexp_parser (2.11.3)
|
|
16
|
+
rubocop (1.82.1)
|
|
17
|
+
json (~> 2.3)
|
|
18
|
+
language_server-protocol (~> 3.17.0.2)
|
|
19
|
+
lint_roller (~> 1.1.0)
|
|
20
|
+
parallel (~> 1.10)
|
|
21
|
+
parser (>= 3.3.0.2)
|
|
22
|
+
rainbow (>= 2.2.2, < 4.0)
|
|
23
|
+
regexp_parser (>= 2.9.3, < 3.0)
|
|
24
|
+
rubocop-ast (>= 1.48.0, < 2.0)
|
|
25
|
+
ruby-progressbar (~> 1.7)
|
|
26
|
+
unicode-display_width (>= 2.4.0, < 4.0)
|
|
27
|
+
rubocop-ast (1.49.0)
|
|
28
|
+
parser (>= 3.3.7.2)
|
|
29
|
+
prism (~> 1.7)
|
|
30
|
+
rubocop-capybara (2.22.1)
|
|
31
|
+
lint_roller (~> 1.1)
|
|
32
|
+
rubocop (~> 1.72, >= 1.72.1)
|
|
33
|
+
rubocop-factory_bot (2.28.0)
|
|
34
|
+
lint_roller (~> 1.1)
|
|
35
|
+
rubocop (~> 1.72, >= 1.72.1)
|
|
36
|
+
rubocop-performance (1.26.1)
|
|
37
|
+
lint_roller (~> 1.1)
|
|
38
|
+
rubocop (>= 1.75.0, < 2.0)
|
|
39
|
+
rubocop-ast (>= 1.47.1, < 2.0)
|
|
40
|
+
rubocop-rspec (3.9.0)
|
|
41
|
+
lint_roller (~> 1.1)
|
|
42
|
+
rubocop (~> 1.81)
|
|
43
|
+
rubocop-rspec_rails (2.32.0)
|
|
44
|
+
lint_roller (~> 1.1)
|
|
45
|
+
rubocop (~> 1.72, >= 1.72.1)
|
|
46
|
+
rubocop-rspec (~> 3.5)
|
|
47
|
+
rubocop-thread_safety (0.7.3)
|
|
48
|
+
lint_roller (~> 1.1)
|
|
49
|
+
rubocop (~> 1.72, >= 1.72.1)
|
|
50
|
+
rubocop-ast (>= 1.44.0, < 2.0)
|
|
51
|
+
ruby-progressbar (1.13.0)
|
|
52
|
+
standard (1.53.0)
|
|
53
|
+
language_server-protocol (~> 3.17.0.2)
|
|
54
|
+
lint_roller (~> 1.0)
|
|
55
|
+
rubocop (~> 1.82.0)
|
|
56
|
+
standard-custom (~> 1.0.0)
|
|
57
|
+
standard-performance (~> 1.8)
|
|
58
|
+
standard-custom (1.0.2)
|
|
59
|
+
lint_roller (~> 1.0)
|
|
60
|
+
rubocop (~> 1.50)
|
|
61
|
+
standard-performance (1.9.0)
|
|
62
|
+
lint_roller (~> 1.1)
|
|
63
|
+
rubocop-performance (~> 1.26.0)
|
|
64
|
+
standard-rspec (0.4.0)
|
|
65
|
+
lint_roller (>= 1.0)
|
|
66
|
+
rubocop-capybara (~> 2.22)
|
|
67
|
+
rubocop-factory_bot (~> 2.27)
|
|
68
|
+
rubocop-rspec (~> 3.9)
|
|
69
|
+
rubocop-rspec_rails (~> 2.31)
|
|
70
|
+
unicode-display_width (3.2.0)
|
|
71
|
+
unicode-emoji (~> 4.1)
|
|
72
|
+
unicode-emoji (4.2.0)
|
|
73
|
+
yard (0.9.38)
|
|
74
|
+
yard-lint (1.4.0)
|
|
75
|
+
yard (~> 0.9)
|
|
76
|
+
zeitwerk (~> 2.6)
|
|
77
|
+
zeitwerk (2.7.4)
|
|
78
|
+
|
|
79
|
+
PLATFORMS
|
|
80
|
+
ruby
|
|
81
|
+
x86_64-linux
|
|
82
|
+
|
|
83
|
+
DEPENDENCIES
|
|
84
|
+
rubocop-performance
|
|
85
|
+
rubocop-rspec
|
|
86
|
+
rubocop-thread_safety
|
|
87
|
+
standard
|
|
88
|
+
standard-performance
|
|
89
|
+
standard-rspec
|
|
90
|
+
yard-lint
|
|
91
|
+
|
|
92
|
+
CHECKSUMS
|
|
93
|
+
ast (2.4.3) sha256=954615157c1d6a382bc27d690d973195e79db7f55e9765ac7c481c60bdb4d383
|
|
94
|
+
json (2.18.0) sha256=b10506aee4183f5cf49e0efc48073d7b75843ce3782c68dbeb763351c08fd505
|
|
95
|
+
language_server-protocol (3.17.0.5) sha256=fd1e39a51a28bf3eec959379985a72e296e9f9acfce46f6a79d31ca8760803cc
|
|
96
|
+
lint_roller (1.1.0) sha256=2c0c845b632a7d172cb849cc90c1bce937a28c5c8ccccb50dfd46a485003cc87
|
|
97
|
+
parallel (1.27.0) sha256=4ac151e1806b755fb4e2dc2332cbf0e54f2e24ba821ff2d3dcf86bf6dc4ae130
|
|
98
|
+
parser (3.3.10.1) sha256=06f6a725d2cd91e5e7f2b7c32ba143631e1f7c8ae2fb918fc4cebec187e6a688
|
|
99
|
+
prism (1.8.0) sha256=84453a16ef5530ea62c5f03ec16b52a459575ad4e7b9c2b360fd8ce2c39c1254
|
|
100
|
+
racc (1.8.1) sha256=4a7f6929691dbec8b5209a0b373bc2614882b55fc5d2e447a21aaa691303d62f
|
|
101
|
+
rainbow (3.1.1) sha256=039491aa3a89f42efa1d6dec2fc4e62ede96eb6acd95e52f1ad581182b79bc6a
|
|
102
|
+
regexp_parser (2.11.3) sha256=ca13f381a173b7a93450e53459075c9b76a10433caadcb2f1180f2c741fc55a4
|
|
103
|
+
rubocop (1.82.1) sha256=09f1a6a654a960eda767aebea33e47603080f8e9c9a3f019bf9b94c9cab5e273
|
|
104
|
+
rubocop-ast (1.49.0) sha256=49c3676d3123a0923d333e20c6c2dbaaae2d2287b475273fddee0c61da9f71fd
|
|
105
|
+
rubocop-capybara (2.22.1) sha256=ced88caef23efea53f46e098ff352f8fc1068c649606ca75cb74650970f51c0c
|
|
106
|
+
rubocop-factory_bot (2.28.0) sha256=4b17fc02124444173317e131759d195b0d762844a71a29fe8139c1105d92f0cb
|
|
107
|
+
rubocop-performance (1.26.1) sha256=cd19b936ff196df85829d264b522fd4f98b6c89ad271fa52744a8c11b8f71834
|
|
108
|
+
rubocop-rspec (3.9.0) sha256=8fa70a3619408237d789aeecfb9beef40576acc855173e60939d63332fdb55e2
|
|
109
|
+
rubocop-rspec_rails (2.32.0) sha256=4a0d641c72f6ebb957534f539d9d0a62c47abd8ce0d0aeee1ef4701e892a9100
|
|
110
|
+
rubocop-thread_safety (0.7.3) sha256=067cdd52fbf5deffc18995437e45b5194236eaff4f71de3375a1f6052e48f431
|
|
111
|
+
ruby-progressbar (1.13.0) sha256=80fc9c47a9b640d6834e0dc7b3c94c9df37f08cb072b7761e4a71e22cff29b33
|
|
112
|
+
standard (1.53.0) sha256=f3c9493385db7079d0abce6f7582f553122156997b81258cd361d3480eeacf9c
|
|
113
|
+
standard-custom (1.0.2) sha256=424adc84179a074f1a2a309bb9cf7cd6bfdb2b6541f20c6bf9436c0ba22a652b
|
|
114
|
+
standard-performance (1.9.0) sha256=49483d31be448292951d80e5e67cdcb576c2502103c7b40aec6f1b6e9c88e3f2
|
|
115
|
+
standard-rspec (0.4.0) sha256=0fdf64c887cd6404f1c3a1435b14ba6fde2e9e80c0f4dafe4b04a67f673db262
|
|
116
|
+
unicode-display_width (3.2.0) sha256=0cdd96b5681a5949cdbc2c55e7b420facae74c4aaf9a9815eee1087cb1853c42
|
|
117
|
+
unicode-emoji (4.2.0) sha256=519e69150f75652e40bf736106cfbc8f0f73aa3fb6a65afe62fefa7f80b0f80f
|
|
118
|
+
yard (0.9.38) sha256=721fb82afb10532aa49860655f6cc2eaa7130889df291b052e1e6b268283010f
|
|
119
|
+
yard-lint (1.4.0) sha256=7dd88fbb08fd77cb840bea899d58812817b36d92291b5693dd0eeb3af9f91f0f
|
|
120
|
+
zeitwerk (2.7.4) sha256=2bef90f356bdafe9a6c2bd32bcd804f83a4f9b8bc27f3600fff051eb3edcec8b
|
|
121
|
+
|
|
122
|
+
BUNDLED WITH
|
|
123
|
+
4.0.3
|
data/README.md
CHANGED
|
@@ -63,6 +63,7 @@ Contributions should generally be made to the upstream [rdkafka-ruby repository]
|
|
|
63
63
|
|
|
64
64
|
| rdkafka-ruby | librdkafka | patches |
|
|
65
65
|
|-|-|-|
|
|
66
|
+
| 0.24.x (Unreleased) | 2.13.0 (2026-01-05) | yes |
|
|
66
67
|
| 0.23.x (2025-11-01) | 2.12.1 (2025-10-16) | yes |
|
|
67
68
|
| 0.22.x (2025-09-26) | 2.11.1 (2025-08-18) | yes |
|
|
68
69
|
| 0.21.x (2025-08-18) | 2.11.0 (2025-07-03) | yes |
|
data/Rakefile
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
-
require
|
|
3
|
+
require "bundler/gem_tasks"
|
|
4
4
|
require "./lib/rdkafka"
|
|
5
5
|
|
|
6
|
-
desc
|
|
6
|
+
desc "Generate some message traffic"
|
|
7
7
|
task :produce_messages do
|
|
8
|
-
config = {
|
|
8
|
+
config = { "bootstrap.servers": "localhost:9092" }
|
|
9
9
|
if ENV["DEBUG"]
|
|
10
10
|
config[:debug] = "broker,topic,msg"
|
|
11
11
|
end
|
|
@@ -15,24 +15,24 @@ task :produce_messages do
|
|
|
15
15
|
100.times do |i|
|
|
16
16
|
puts "Producing message #{i}"
|
|
17
17
|
delivery_handles << producer.produce(
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
18
|
+
topic: "rake_test_topic",
|
|
19
|
+
payload: "Payload #{i} from Rake",
|
|
20
|
+
key: "Key #{i} from Rake"
|
|
21
21
|
)
|
|
22
22
|
end
|
|
23
|
-
puts
|
|
23
|
+
puts "Waiting for delivery"
|
|
24
24
|
delivery_handles.each(&:wait)
|
|
25
|
-
puts
|
|
25
|
+
puts "Done"
|
|
26
26
|
end
|
|
27
27
|
|
|
28
|
-
desc
|
|
28
|
+
desc "Consume some messages"
|
|
29
29
|
task :consume_messages do
|
|
30
30
|
config = {
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
31
|
+
"bootstrap.servers": "localhost:9092",
|
|
32
|
+
"group.id": "rake_test",
|
|
33
|
+
"enable.partition.eof": false,
|
|
34
|
+
"auto.offset.reset": "earliest",
|
|
35
|
+
"statistics.interval.ms": 10_000
|
|
36
36
|
}
|
|
37
37
|
if ENV["DEBUG"]
|
|
38
38
|
config[:debug] = "cgrp,topic,fetch"
|
|
@@ -40,7 +40,7 @@ task :consume_messages do
|
|
|
40
40
|
Rdkafka::Config.statistics_callback = lambda do |stats|
|
|
41
41
|
puts stats
|
|
42
42
|
end
|
|
43
|
-
|
|
43
|
+
Rdkafka::Config.new(config).consumer
|
|
44
44
|
consumer = Rdkafka::Config.new(config).consumer
|
|
45
45
|
consumer.subscribe("rake_test_topic")
|
|
46
46
|
consumer.each do |message|
|
|
@@ -48,14 +48,14 @@ task :consume_messages do
|
|
|
48
48
|
end
|
|
49
49
|
end
|
|
50
50
|
|
|
51
|
-
desc
|
|
51
|
+
desc "Hammer down"
|
|
52
52
|
task :load_test do
|
|
53
53
|
puts "Starting load test"
|
|
54
54
|
|
|
55
55
|
config = Rdkafka::Config.new(
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
56
|
+
"bootstrap.servers": "localhost:9092",
|
|
57
|
+
"group.id": "load-test",
|
|
58
|
+
"enable.partition.eof": false
|
|
59
59
|
)
|
|
60
60
|
|
|
61
61
|
# Create a producer in a thread
|
|
@@ -65,9 +65,9 @@ task :load_test do
|
|
|
65
65
|
handles = []
|
|
66
66
|
1000.times do |i|
|
|
67
67
|
handles.push(producer.produce(
|
|
68
|
-
topic:
|
|
68
|
+
topic: "load_test_topic",
|
|
69
69
|
payload: "Payload #{i}",
|
|
70
|
-
key:
|
|
70
|
+
key: "Key #{i}"
|
|
71
71
|
))
|
|
72
72
|
end
|
|
73
73
|
handles.each(&:wait)
|
data/docker-compose-ssl.yml
CHANGED
data/docker-compose.yml
CHANGED
data/ext/librdkafka.so
CHANGED
|
Binary file
|
data/karafka-rdkafka.gemspec
CHANGED
|
@@ -1,52 +1,51 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
-
require File.expand_path(
|
|
3
|
+
require File.expand_path("lib/rdkafka/version", __dir__)
|
|
4
4
|
|
|
5
5
|
Gem::Specification.new do |gem|
|
|
6
|
-
gem.authors = [
|
|
6
|
+
gem.authors = ["Thijs Cadier", "Maciej Mensfeld"]
|
|
7
7
|
gem.email = ["contact@karafka.io"]
|
|
8
8
|
gem.description = "Modern Kafka client library for Ruby based on librdkafka"
|
|
9
9
|
gem.summary = "The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka. It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+ and Ruby 2.7+."
|
|
10
|
-
gem.license =
|
|
10
|
+
gem.license = "MIT"
|
|
11
11
|
|
|
12
|
-
gem.
|
|
13
|
-
gem.
|
|
14
|
-
gem.require_paths = ['lib']
|
|
12
|
+
gem.name = "karafka-rdkafka"
|
|
13
|
+
gem.require_paths = ["lib"]
|
|
15
14
|
gem.version = Rdkafka::VERSION
|
|
16
|
-
gem.required_ruby_version =
|
|
15
|
+
gem.required_ruby_version = ">= 3.2"
|
|
17
16
|
|
|
18
17
|
files = `git ls-files`.split($\)
|
|
19
18
|
files = files.reject do |file|
|
|
20
|
-
next true if file.start_with?(
|
|
21
|
-
next true if file.start_with?(
|
|
22
|
-
next true if file.start_with?(
|
|
19
|
+
next true if file.start_with?(".")
|
|
20
|
+
next true if file.start_with?("spec/")
|
|
21
|
+
next true if file.start_with?("ext/README.md")
|
|
23
22
|
|
|
24
23
|
false
|
|
25
24
|
end
|
|
26
25
|
|
|
27
|
-
if ENV[
|
|
28
|
-
gem.platform = ENV[
|
|
26
|
+
if ENV["RUBY_PLATFORM"]
|
|
27
|
+
gem.platform = ENV["RUBY_PLATFORM"]
|
|
29
28
|
|
|
30
29
|
# Do not include the source code for librdkafka as it should be precompiled already per
|
|
31
30
|
# platform. Same applies to any possible patches.
|
|
32
31
|
# Do not include github actions details in RubyGems releases
|
|
33
32
|
gem.files = files.reject do |file|
|
|
34
|
-
next true if file.start_with?(
|
|
35
|
-
next true if file.start_with?(
|
|
36
|
-
next true if file.start_with?(
|
|
37
|
-
next true if file.start_with?(
|
|
38
|
-
next true if file.start_with?(
|
|
33
|
+
next true if file.start_with?("dist/")
|
|
34
|
+
next true if file.start_with?("ext/build_")
|
|
35
|
+
next true if file.start_with?("ext/ci_")
|
|
36
|
+
next true if file.start_with?("ext/Rakefile")
|
|
37
|
+
next true if file.start_with?("ext/generate-")
|
|
39
38
|
|
|
40
39
|
false
|
|
41
40
|
end
|
|
42
41
|
|
|
43
42
|
# Add the compiled extensions that exist (not in git)
|
|
44
|
-
if File.exist?(
|
|
45
|
-
gem.files <<
|
|
43
|
+
if File.exist?("ext/librdkafka.so")
|
|
44
|
+
gem.files << "ext/librdkafka.so"
|
|
46
45
|
end
|
|
47
46
|
|
|
48
|
-
if File.exist?(
|
|
49
|
-
gem.files <<
|
|
47
|
+
if File.exist?("ext/librdkafka.dylib")
|
|
48
|
+
gem.files << "ext/librdkafka.dylib"
|
|
50
49
|
end
|
|
51
50
|
else
|
|
52
51
|
gem.platform = Gem::Platform::RUBY
|
|
@@ -54,39 +53,32 @@ Gem::Specification.new do |gem|
|
|
|
54
53
|
# Do not include code used for building native extensions
|
|
55
54
|
# Do not include github actions details in RubyGems releases
|
|
56
55
|
gem.files = files.reject do |file|
|
|
57
|
-
next true if file.start_with?(
|
|
58
|
-
next true if file.start_with?(
|
|
59
|
-
next true if file.start_with?(
|
|
60
|
-
next false unless file.start_with?(
|
|
61
|
-
next false if file.start_with?(
|
|
62
|
-
next false if file.start_with?(
|
|
56
|
+
next true if file.start_with?("ext/build_")
|
|
57
|
+
next true if file.start_with?("ext/ci_")
|
|
58
|
+
next true if file.start_with?("ext/generate-")
|
|
59
|
+
next false unless file.start_with?("dist/")
|
|
60
|
+
next false if file.start_with?("dist/patches")
|
|
61
|
+
next false if file.start_with?("dist/librdkafka-")
|
|
63
62
|
|
|
64
63
|
true
|
|
65
64
|
end
|
|
66
65
|
|
|
67
|
-
gem.extensions = %w
|
|
66
|
+
gem.extensions = %w[ext/Rakefile]
|
|
68
67
|
end
|
|
69
68
|
|
|
70
|
-
gem.add_dependency
|
|
71
|
-
gem.add_dependency
|
|
72
|
-
gem.add_dependency
|
|
73
|
-
gem.add_dependency
|
|
74
|
-
gem.add_dependency
|
|
75
|
-
|
|
76
|
-
gem.add_development_dependency 'ostruct'
|
|
77
|
-
gem.add_development_dependency 'pry'
|
|
78
|
-
gem.add_development_dependency 'rspec', '~> 3.5'
|
|
79
|
-
gem.add_development_dependency 'rake'
|
|
80
|
-
gem.add_development_dependency 'simplecov'
|
|
81
|
-
gem.add_development_dependency 'warning'
|
|
69
|
+
gem.add_dependency "ffi", "~> 1.17.1"
|
|
70
|
+
gem.add_dependency "json", "> 2.0"
|
|
71
|
+
gem.add_dependency "logger"
|
|
72
|
+
gem.add_dependency "mini_portile2", "~> 2.6"
|
|
73
|
+
gem.add_dependency "rake", "> 12"
|
|
82
74
|
|
|
83
75
|
gem.metadata = {
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
76
|
+
"funding_uri" => "https://karafka.io/#become-pro",
|
|
77
|
+
"homepage_uri" => "https://karafka.io",
|
|
78
|
+
"changelog_uri" => "https://karafka.io/docs/Changelog-Karafka-Rdkafka/",
|
|
79
|
+
"bug_tracker_uri" => "https://github.com/karafka/karafka-rdkafka/issues",
|
|
80
|
+
"source_code_uri" => "https://github.com/karafka/karafka-rdkafka",
|
|
81
|
+
"documentation_uri" => "https://karafka.io/docs",
|
|
82
|
+
"rubygems_mfa_required" => "true"
|
|
91
83
|
}
|
|
92
84
|
end
|
|
@@ -54,16 +54,34 @@ module Rdkafka
|
|
|
54
54
|
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
|
55
55
|
# on the operation. In this case it is possible to call wait again.
|
|
56
56
|
#
|
|
57
|
-
# @param max_wait_timeout [Numeric, nil]
|
|
58
|
-
#
|
|
57
|
+
# @param max_wait_timeout [Numeric, nil] DEPRECATED: Use max_wait_timeout_ms instead.
|
|
58
|
+
# Amount of time in seconds to wait before timing out. Will be removed in v1.0.0.
|
|
59
|
+
# @param max_wait_timeout_ms [Numeric, nil] Amount of time in milliseconds to wait before
|
|
60
|
+
# timing out. If this is nil we will wait forever. Defaults to 60,000ms (60 seconds).
|
|
59
61
|
# @param raise_response_error [Boolean] should we raise error when waiting finishes
|
|
60
62
|
#
|
|
61
63
|
# @return [Object] Operation-specific result
|
|
62
64
|
#
|
|
63
65
|
# @raise [RdkafkaError] When the operation failed
|
|
64
66
|
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
|
65
|
-
def wait(max_wait_timeout:
|
|
66
|
-
|
|
67
|
+
def wait(max_wait_timeout: :not_provided, max_wait_timeout_ms: :not_provided, raise_response_error: true)
|
|
68
|
+
# Determine which timeout value to use
|
|
69
|
+
if max_wait_timeout != :not_provided && max_wait_timeout_ms != :not_provided
|
|
70
|
+
warn "DEPRECATION WARNING: Both max_wait_timeout and max_wait_timeout_ms were provided. " \
|
|
71
|
+
"Using max_wait_timeout_ms. The max_wait_timeout parameter is deprecated and will be removed in v1.0.0."
|
|
72
|
+
timeout_ms = max_wait_timeout_ms
|
|
73
|
+
elsif max_wait_timeout != :not_provided
|
|
74
|
+
warn "DEPRECATION WARNING: max_wait_timeout (seconds) is deprecated. " \
|
|
75
|
+
"Use max_wait_timeout_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
|
|
76
|
+
timeout_ms = max_wait_timeout ? (max_wait_timeout * 1000).to_i : nil
|
|
77
|
+
elsif max_wait_timeout_ms == :not_provided
|
|
78
|
+
timeout_ms = Defaults::HANDLE_WAIT_TIMEOUT_MS
|
|
79
|
+
else
|
|
80
|
+
timeout_ms = max_wait_timeout_ms
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
timeout_s = timeout_ms ? timeout_ms / 1000.0 : nil
|
|
84
|
+
timeout = timeout_s ? monotonic_now + timeout_s : MAX_WAIT_TIMEOUT_FOREVER
|
|
67
85
|
|
|
68
86
|
@mutex.synchronize do
|
|
69
87
|
loop do
|
|
@@ -74,7 +92,7 @@ module Rdkafka
|
|
|
74
92
|
@resource.wait(@mutex, to_wait)
|
|
75
93
|
else
|
|
76
94
|
raise WaitTimeoutError.new(
|
|
77
|
-
"Waiting for #{operation_name} timed out after #{
|
|
95
|
+
"Waiting for #{operation_name} timed out after #{timeout_ms} ms"
|
|
78
96
|
)
|
|
79
97
|
end
|
|
80
98
|
elsif self[:response] != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR && raise_response_error
|
|
@@ -3,17 +3,17 @@
|
|
|
3
3
|
module Rdkafka
|
|
4
4
|
class Admin
|
|
5
5
|
# Extracts attributes of rd_kafka_AclBinding_t
|
|
6
|
-
#
|
|
7
6
|
class AclBindingResult
|
|
8
7
|
attr_reader :result_error, :error_string, :matching_acl_resource_type,
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
8
|
+
:matching_acl_resource_name, :matching_acl_resource_pattern_type,
|
|
9
|
+
:matching_acl_principal, :matching_acl_host, :matching_acl_operation,
|
|
10
|
+
:matching_acl_permission_type
|
|
12
11
|
|
|
13
12
|
# This attribute was initially released under the name that is now an alias
|
|
14
13
|
# We keep it for backwards compatibility but it was changed for the consistency
|
|
15
|
-
|
|
14
|
+
alias_method :matching_acl_pattern_type, :matching_acl_resource_pattern_type
|
|
16
15
|
|
|
16
|
+
# @param matching_acl [FFI::Pointer] pointer to the ACL binding struct
|
|
17
17
|
def initialize(matching_acl)
|
|
18
18
|
rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_AclBinding_error(matching_acl)
|
|
19
19
|
@result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
|
|
@@ -6,6 +6,7 @@ module Rdkafka
|
|
|
6
6
|
class ConfigResourceBindingResult
|
|
7
7
|
attr_reader :name, :type, :configs, :configs_count
|
|
8
8
|
|
|
9
|
+
# @param config_resource_ptr [FFI::Pointer] pointer to the config resource struct
|
|
9
10
|
def initialize(config_resource_ptr)
|
|
10
11
|
ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
|
|
11
12
|
|
|
@@ -2,10 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
module Rdkafka
|
|
4
4
|
class Admin
|
|
5
|
+
# Handle for create ACL operation
|
|
5
6
|
class CreateAclHandle < AbstractHandle
|
|
6
7
|
layout :pending, :bool,
|
|
7
|
-
|
|
8
|
-
|
|
8
|
+
:response, :int,
|
|
9
|
+
:response_string, :pointer
|
|
9
10
|
|
|
10
11
|
# @return [String] the name of the operation
|
|
11
12
|
def operation_name
|
|
@@ -17,10 +18,12 @@ module Rdkafka
|
|
|
17
18
|
CreateAclReport.new(rdkafka_response: self[:response], rdkafka_response_string: self[:response_string])
|
|
18
19
|
end
|
|
19
20
|
|
|
21
|
+
# Raises an error if the operation failed
|
|
22
|
+
# @raise [RdkafkaError]
|
|
20
23
|
def raise_error
|
|
21
24
|
raise RdkafkaError.new(
|
|
22
|
-
|
|
23
|
-
|
|
25
|
+
self[:response],
|
|
26
|
+
broker_message: self[:response_string].read_string
|
|
24
27
|
)
|
|
25
28
|
end
|
|
26
29
|
end
|
|
@@ -2,17 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
module Rdkafka
|
|
4
4
|
class Admin
|
|
5
|
+
# Report for create ACL operation result
|
|
5
6
|
class CreateAclReport
|
|
6
|
-
|
|
7
7
|
# Upon successful creation of Acl RD_KAFKA_RESP_ERR_NO_ERROR - 0 is returned as rdkafka_response
|
|
8
8
|
# @return [Integer]
|
|
9
9
|
attr_reader :rdkafka_response
|
|
10
10
|
|
|
11
|
-
|
|
12
11
|
# Upon successful creation of Acl empty string will be returned as rdkafka_response_string
|
|
13
12
|
# @return [String]
|
|
14
13
|
attr_reader :rdkafka_response_string
|
|
15
14
|
|
|
15
|
+
# @param rdkafka_response [Integer] response code from librdkafka
|
|
16
|
+
# @param rdkafka_response_string [FFI::Pointer] pointer to response string
|
|
16
17
|
def initialize(rdkafka_response:, rdkafka_response_string:)
|
|
17
18
|
@rdkafka_response = rdkafka_response
|
|
18
19
|
if rdkafka_response_string != FFI::Pointer::NULL
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
module Rdkafka
|
|
2
2
|
class Admin
|
|
3
|
+
# Handle for create partitions operation
|
|
3
4
|
class CreatePartitionsHandle < AbstractHandle
|
|
4
5
|
layout :pending, :bool,
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
:response, :int,
|
|
7
|
+
:error_string, :pointer,
|
|
8
|
+
:result_name, :pointer
|
|
8
9
|
|
|
9
10
|
# @return [String] the name of the operation
|
|
10
11
|
def operation_name
|
|
@@ -16,6 +17,8 @@ module Rdkafka
|
|
|
16
17
|
CreatePartitionsReport.new(self[:error_string], self[:result_name])
|
|
17
18
|
end
|
|
18
19
|
|
|
20
|
+
# Raises an error if the operation failed
|
|
21
|
+
# @raise [RdkafkaError]
|
|
19
22
|
def raise_error
|
|
20
23
|
RdkafkaError.validate!(
|
|
21
24
|
self[:response],
|
|
@@ -2,11 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
module Rdkafka
|
|
4
4
|
class Admin
|
|
5
|
+
# Handle for create topic operation
|
|
5
6
|
class CreateTopicHandle < AbstractHandle
|
|
6
7
|
layout :pending, :bool,
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
:response, :int,
|
|
9
|
+
:error_string, :pointer,
|
|
10
|
+
:result_name, :pointer
|
|
10
11
|
|
|
11
12
|
# @return [String] the name of the operation
|
|
12
13
|
def operation_name
|
|
@@ -18,6 +19,8 @@ module Rdkafka
|
|
|
18
19
|
CreateTopicReport.new(self[:error_string], self[:result_name])
|
|
19
20
|
end
|
|
20
21
|
|
|
22
|
+
# Raises an error if the operation failed
|
|
23
|
+
# @raise [RdkafkaError]
|
|
21
24
|
def raise_error
|
|
22
25
|
RdkafkaError.validate!(
|
|
23
26
|
self[:response],
|