karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +3 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
- data/.github/workflows/ci_macos_arm64.yml +301 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
- data/.github/workflows/push_ruby.yml +37 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +323 -0
- data/Gemfile +5 -0
- data/MIT-LICENSE +22 -0
- data/README.md +177 -0
- data/Rakefile +96 -0
- data/docker-compose.yml +25 -0
- data/ext/README.md +19 -0
- data/ext/Rakefile +131 -0
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/ext/librdkafka.so +0 -0
- data/karafka-rdkafka.gemspec +61 -0
- data/lib/rdkafka/abstract_handle.rb +116 -0
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/create_topic_report.rb +24 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +832 -0
- data/lib/rdkafka/bindings.rb +582 -0
- data/lib/rdkafka/callbacks.rb +415 -0
- data/lib/rdkafka/config.rb +398 -0
- data/lib/rdkafka/consumer/headers.rb +79 -0
- data/lib/rdkafka/consumer/message.rb +86 -0
- data/lib/rdkafka/consumer/partition.rb +57 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
- data/lib/rdkafka/consumer.rb +663 -0
- data/lib/rdkafka/error.rb +201 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +115 -0
- data/lib/rdkafka/native_kafka.rb +139 -0
- data/lib/rdkafka/producer/delivery_handle.rb +48 -0
- data/lib/rdkafka/producer/delivery_report.rb +45 -0
- data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
- data/lib/rdkafka/producer.rb +492 -0
- data/lib/rdkafka/version.rb +7 -0
- data/lib/rdkafka.rb +54 -0
- data/renovate.json +92 -0
- data/spec/rdkafka/abstract_handle_spec.rb +117 -0
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +769 -0
- data/spec/rdkafka/bindings_spec.rb +222 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +258 -0
- data/spec/rdkafka/consumer/headers_spec.rb +73 -0
- data/spec/rdkafka/consumer/message_spec.rb +139 -0
- data/spec/rdkafka/consumer/partition_spec.rb +57 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
- data/spec/rdkafka/consumer_spec.rb +1299 -0
- data/spec/rdkafka/error_spec.rb +95 -0
- data/spec/rdkafka/metadata_spec.rb +79 -0
- data/spec/rdkafka/native_kafka_spec.rb +130 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
- data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
- data/spec/rdkafka/producer_spec.rb +1234 -0
- data/spec/spec_helper.rb +181 -0
- metadata +244 -0
data/.ruby-gemset
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
rdkafka-ruby
|
data/.ruby-version
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
3.4.4
|
data/.yardopts
ADDED
data/CHANGELOG.md
ADDED
@@ -0,0 +1,323 @@
|
|
1
|
+
# Rdkafka Changelog
|
2
|
+
|
3
|
+
## 0.20.0 (Unreleased)
|
4
|
+
- **[Feature]** Add precompiled `x86_64-linux-gnu` setup.
|
5
|
+
- **[Feature]** Add precompiled `macos_arm64` setup.
|
6
|
+
|
7
|
+
## 0.19.5 (2025-05-30)
|
8
|
+
- [Enhancement] Allow for producing to non-existing topics with `key` and `partition_key` present.
|
9
|
+
|
10
|
+
## 0.19.4 (2025-05-23)
|
11
|
+
- [Change] Move to trusted-publishers and remove signing since no longer needed.
|
12
|
+
|
13
|
+
## 0.19.3 (2025-05-23)
|
14
|
+
- [Enhancement] Include broker message in the error full message if provided.
|
15
|
+
|
16
|
+
## 0.19.2 (2025-05-20)
|
17
|
+
- [Enhancement] Replace TTL-based partition count cache with a global cache that reuses `librdkafka` statistics data when possible.
|
18
|
+
- [Enhancement] Roll out experimental jruby support.
|
19
|
+
- [Fix] Fix issue where post-closed producer C topics refs would not be cleaned.
|
20
|
+
- [Fix] Fiber causes Segmentation Fault.
|
21
|
+
- [Change] Move to trusted-publishers and remove signing since no longer needed.
|
22
|
+
|
23
|
+
## 0.19.1 (2025-04-07)
|
24
|
+
- [Enhancement] Support producing and consuming of headers with mulitple values (KIP-82).
|
25
|
+
- [Enhancement] Allow native Kafka customization poll time.
|
26
|
+
|
27
|
+
## 0.19.0 (2025-01-20)
|
28
|
+
- **[Breaking]** Deprecate and remove `#each_batch` due to data consistency concerns.
|
29
|
+
- [Enhancement] Bump librdkafka to 2.8.0
|
30
|
+
- [Fix] Restore `Rdkafka::Bindings.rd_kafka_global_init` as it was not the source of the original issue.
|
31
|
+
|
32
|
+
## 0.18.1 (2024-12-04)
|
33
|
+
- [Fix] Do not run `Rdkafka::Bindings.rd_kafka_global_init` on require to prevent some of macos versions from hanging on Puma fork.
|
34
|
+
|
35
|
+
## 0.18.0 (2024-11-26)
|
36
|
+
- **[Breaking]** Drop Ruby 3.0 support
|
37
|
+
- [Enhancement] Bump librdkafka to 2.6.1
|
38
|
+
- [Enhancement] Use default oauth callback if none is passed (bachmanity1)
|
39
|
+
- [Enhancement] Expose `rd_kafka_global_init` to mitigate macos forking issues.
|
40
|
+
- [Patch] Retire no longer needed cooperative-sticky patch.
|
41
|
+
|
42
|
+
## 0.17.6 (2024-09-03)
|
43
|
+
- [Fix] Fix incorrectly behaving CI on failures.
|
44
|
+
- [Fix] Fix invalid patches librdkafka references.
|
45
|
+
|
46
|
+
## 0.17.5 (2024-09-03)
|
47
|
+
- [Patch] Patch with "Add forward declaration to fix compilation without ssl" fix
|
48
|
+
|
49
|
+
## 0.17.4 (2024-09-02)
|
50
|
+
- [Enhancement] Bump librdkafka to 2.5.3
|
51
|
+
- [Enhancement] Do not release GVL on `rd_kafka_name` (ferrous26)
|
52
|
+
- [Fix] Fix unused variable reference in producer (lucasmvnascimento)
|
53
|
+
|
54
|
+
## 0.17.3 (2024-08-09)
|
55
|
+
- [Fix] Mitigate a case where FFI would not restart the background events callback dispatcher in forks.
|
56
|
+
|
57
|
+
## 0.17.2 (2024-08-07)
|
58
|
+
- [Enhancement] Support returning `#details` for errors that do have topic/partition related extra info.
|
59
|
+
|
60
|
+
## 0.17.1 (2024-08-01)
|
61
|
+
- [Enhancement] Support ability to release patches to librdkafka.
|
62
|
+
- [Patch] Patch cooperative-sticky assignments in librdkafka.
|
63
|
+
|
64
|
+
## 0.17.0 (2024-07-21)
|
65
|
+
- [Enhancement] Bump librdkafka to 2.5.0
|
66
|
+
|
67
|
+
## 0.16.1 (2024-07-10)
|
68
|
+
- [Feature] Add `#seek_by` to be able to seek for a message by topic, partition and offset (zinahia)
|
69
|
+
- [Change] Remove old producer timeout API warnings.
|
70
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
71
|
+
|
72
|
+
## 0.16.0 (2024-06-17)
|
73
|
+
- **[Breaking]** Messages without headers returned by `#poll` contain frozen empty hash.
|
74
|
+
- **[Breaking]** `HashWithSymbolKeysTreatedLikeStrings` has been removed so headers are regular hashes with string keys.
|
75
|
+
- [Enhancement] Bump librdkafka to 2.4.0
|
76
|
+
- [Enhancement] Save two objects on message produced and lower CPU usage on message produced with small improvements.
|
77
|
+
- [Fix] Remove support for Ruby 2.7. Supporting it was a bug since rest of the karafka ecosystem no longer supports it.
|
78
|
+
|
79
|
+
## 0.15.2 (2024-07-10)
|
80
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
81
|
+
|
82
|
+
## 0.15.1 (2024-05-09)
|
83
|
+
- **[Feature]** Provide ability to use topic config on a producer for custom behaviors per dispatch.
|
84
|
+
- [Enhancement] Use topic config reference cache for messages production to prevent topic objects allocation with each message.
|
85
|
+
- [Enhancement] Provide `Rrdkafka::Admin#describe_errors` to get errors descriptions (mensfeld)
|
86
|
+
|
87
|
+
## 0.15.0 (2024-04-26)
|
88
|
+
- **[Feature]** Oauthbearer token refresh callback (bruce-szalwinski-he)
|
89
|
+
- **[Feature]** Support incremental config describe + alter API (mensfeld)
|
90
|
+
- [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
|
91
|
+
- [Enhancement] Replace time poll based wait engine with an event based to improve response times on blocking operations and wait (nijikon + mensfeld)
|
92
|
+
- [Enhancement] Allow for usage of the second regex engine of librdkafka by setting `RDKAFKA_DISABLE_REGEX_EXT` during build (mensfeld)
|
93
|
+
- [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
|
94
|
+
- [Change] Allow for native kafka thread operations deferring and manual start for consumer, producer and admin.
|
95
|
+
- [Change] The `wait_timeout` argument in `AbstractHandle.wait` method is deprecated and will be removed in future versions without replacement. We don't rely on it's value anymore (nijikon)
|
96
|
+
- [Fix] Fix bogus case/when syntax. Levels 1, 2, and 6 previously defaulted to UNKNOWN (jjowdy)
|
97
|
+
|
98
|
+
## 0.14.11 (2024-07-10)
|
99
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
100
|
+
|
101
|
+
## 0.14.10 (2024-02-08)
|
102
|
+
- [Fix] Background logger stops working after forking causing memory leaks (mensfeld).
|
103
|
+
|
104
|
+
## 0.14.9 (2024-01-29)
|
105
|
+
- [Fix] Partition cache caches invalid `nil` result for `PARTITIONS_COUNT_TTL`.
|
106
|
+
- [Enhancement] Report `-1` instead of `nil` in case `partition_count` failure.
|
107
|
+
|
108
|
+
## 0.14.8 (2024-01-24)
|
109
|
+
- [Enhancement] Provide support for Nix OS (alexandriainfantino)
|
110
|
+
- [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld)
|
111
|
+
|
112
|
+
## 0.14.7 (2023-12-29)
|
113
|
+
- [Fix] Recognize that Karafka uses a custom partition object (fixed in 2.3.0) and ensure it is recognized.
|
114
|
+
|
115
|
+
## 0.14.6 (2023-12-29)
|
116
|
+
- **[Feature]** Support storing metadata alongside offsets via `rd_kafka_offsets_store` in `#store_offset` (mensfeld)
|
117
|
+
- [Enhancement] Increase the `#committed` default timeout from 1_200ms to 2000ms. This will compensate for network glitches and remote clusters operations and will align with metadata query timeout.
|
118
|
+
|
119
|
+
## 0.14.5 (2023-12-20)
|
120
|
+
- [Enhancement] Provide `label` producer handler and report reference for improved traceability.
|
121
|
+
|
122
|
+
## 0.14.4 (2023-12-19)
|
123
|
+
- [Enhancement] Add ability to store offsets in a transaction (mensfeld)
|
124
|
+
|
125
|
+
## 0.14.3 (2023-12-17)
|
126
|
+
- [Enhancement] Replace `rd_kafka_offset_store` with `rd_kafka_offsets_store` (mensfeld)
|
127
|
+
- [Fix] Missing ACL `RD_KAFKA_RESOURCE_BROKER` constant reference (mensfeld)
|
128
|
+
- [Change] Rename `matching_acl_pattern_type` to `matching_acl_resource_pattern_type` to align the whole API (mensfeld)
|
129
|
+
|
130
|
+
## 0.14.2 (2023-12-11)
|
131
|
+
- [Enhancement] Alias `topic_name` as `topic` in the delivery report (mensfeld)
|
132
|
+
- [Fix] Fix return type on `#rd_kafka_poll` (mensfeld)
|
133
|
+
- [Fix] `uint8_t` does not exist on Apple Silicon (mensfeld)
|
134
|
+
|
135
|
+
## 0.14.1 (2023-12-02)
|
136
|
+
- **[Feature]** Add `Admin#metadata` (mensfeld)
|
137
|
+
- **[Feature]** Add `Admin#create_partitions` (mensfeld)
|
138
|
+
- **[Feature]** Add `Admin#delete_group` utility (piotaixr)
|
139
|
+
- **[Feature]** Add Create and Delete ACL Feature To Admin Functions (vgnanasekaran)
|
140
|
+
- **[Enhancement]** Improve error reporting on `unknown_topic_or_part` and include missing topic (mensfeld)
|
141
|
+
- **[Enhancement]** Improve error reporting on consumer polling errors (mensfeld)
|
142
|
+
|
143
|
+
## 0.14.0 (2023-11-17)
|
144
|
+
- [Enhancement] Bump librdkafka to 2.3.0
|
145
|
+
- [Enhancement] Increase the `#lag` and `#query_watermark_offsets` default timeouts from 100ms to 1000ms. This will compensate for network glitches and remote clusters operations.
|
146
|
+
|
147
|
+
## 0.13.10 (2024-07-10)
|
148
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
149
|
+
|
150
|
+
## 0.13.9 (2023-11-07)
|
151
|
+
- [Enhancement] Expose alternative way of managing consumer events via a separate queue.
|
152
|
+
- [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem.
|
153
|
+
|
154
|
+
## 0.13.8 (2023-10-31)
|
155
|
+
- [Enhancement] Get consumer position (thijsc & mensfeld)
|
156
|
+
|
157
|
+
## 0.13.7 (2023-10-31)
|
158
|
+
- [Change] Drop support for Ruby 2.6 due to incompatibilities in usage of `ObjectSpace::WeakMap`
|
159
|
+
- [Fix] Fix dangling Opaque references.
|
160
|
+
|
161
|
+
## 0.13.6 (2023-10-17)
|
162
|
+
- **[Feature]** Support transactions API in the producer
|
163
|
+
- [Enhancement] Add `raise_response_error` flag to the `Rdkafka::AbstractHandle`.
|
164
|
+
- [Enhancement] Provide `#purge` to remove any outstanding requests from the producer.
|
165
|
+
- [Enhancement] Fix `#flush` does not handle the timeouts errors by making it return true if all flushed or false if failed. We do **not** raise an exception here to keep it backwards compatible.
|
166
|
+
|
167
|
+
## 0.13.5
|
168
|
+
- Fix DeliveryReport `create_result#error` being nil despite an error being associated with it
|
169
|
+
|
170
|
+
## 0.13.4
|
171
|
+
- Always call initial poll on librdkafka to make sure oauth bearer cb is handled pre-operations.
|
172
|
+
|
173
|
+
## 0.13.3
|
174
|
+
- Bump librdkafka to 2.2.0
|
175
|
+
|
176
|
+
## 0.13.2
|
177
|
+
- Ensure operations counter decrement is fully thread-safe
|
178
|
+
- Bump librdkafka to 2.1.1
|
179
|
+
|
180
|
+
## 0.13.1
|
181
|
+
- Add offsets_for_times method on consumer (timflapper)
|
182
|
+
|
183
|
+
## 0.13.0 (2023-07-24)
|
184
|
+
- Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
|
185
|
+
- Support both string and symbol header keys (ColinDKelley)
|
186
|
+
- Handle tombstone messages properly (kgalieva)
|
187
|
+
- Add topic name to delivery report (maeve)
|
188
|
+
- Allow string partitioner config (mollyegibson)
|
189
|
+
- Fix documented type for DeliveryReport#error (jimmydo)
|
190
|
+
- Bump librdkafka to 2.0.2 (lmaia)
|
191
|
+
- Use finalizers to cleanly exit producer and admin (thijsc)
|
192
|
+
- Lock access to the native kafka client (thijsc)
|
193
|
+
- Fix potential race condition in multi-threaded producer (mensfeld)
|
194
|
+
- Fix leaking FFI resources in specs (mensfeld)
|
195
|
+
- Improve specs stability (mensfeld)
|
196
|
+
- Make metadata request timeout configurable (mensfeld)
|
197
|
+
- call_on_partitions_assigned and call_on_partitions_revoked only get a tpl passed in (thijsc)
|
198
|
+
- Support `#assignment_lost?` on a consumer to check for involuntary assignment revocation (mensfeld)
|
199
|
+
- Expose `#name` on the consumer and producer (mensfeld)
|
200
|
+
- Introduce producer partitions count metadata cache (mensfeld)
|
201
|
+
- Retry metadta fetches on certain errors with a backoff (mensfeld)
|
202
|
+
- Do not lock access to underlying native kafka client and rely on Karafka granular locking (mensfeld)
|
203
|
+
|
204
|
+
## 0.12.4 (2024-07-10)
|
205
|
+
- [Fix] Switch to local release of librdkafka to mitigate its unavailability.
|
206
|
+
|
207
|
+
## 0.12.3
|
208
|
+
- Include backtrace in non-raised binded errors.
|
209
|
+
- Include topic name in the delivery reports
|
210
|
+
|
211
|
+
## 0.12.2
|
212
|
+
- Increase the metadata default timeout from 250ms to 2 seconds. This should allow for working with remote clusters.
|
213
|
+
|
214
|
+
## 0.12.1
|
215
|
+
- Bumps librdkafka to 2.0.2 (lmaia)
|
216
|
+
- Add support for adding more partitions via Admin API
|
217
|
+
|
218
|
+
## 0.12.0 (2022-06-17)
|
219
|
+
- Bumps librdkafka to 1.9.0
|
220
|
+
- Fix crash on empty partition key (mensfeld)
|
221
|
+
- Pass the delivery handle to the callback (gvisokinskas)
|
222
|
+
|
223
|
+
## 0.11.0 (2021-11-17)
|
224
|
+
- Upgrade librdkafka to 1.8.2
|
225
|
+
- Bump supported minimum Ruby version to 2.6
|
226
|
+
- Better homebrew path detection
|
227
|
+
|
228
|
+
## 0.10.0 (2021-09-07)
|
229
|
+
- Upgrade librdkafka to 1.5.0
|
230
|
+
- Add error callback config
|
231
|
+
|
232
|
+
## 0.9.0 (2021-06-23)
|
233
|
+
- Fixes for Ruby 3.0
|
234
|
+
- Allow any callable object for callbacks (gremerritt)
|
235
|
+
- Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
|
236
|
+
- Use queue as log callback to avoid unsafe calls from trap context (breunigs)
|
237
|
+
- Allow passing in topic configuration on create_topic (dezka)
|
238
|
+
- Add each_batch method to consumer (mgrosso)
|
239
|
+
|
240
|
+
## 0.8.1 (2020-12-07)
|
241
|
+
- Fix topic_flag behaviour and add tests for Metadata (geoff2k)
|
242
|
+
- Add topic admin interface (geoff2k)
|
243
|
+
- Raise an exception if @native_kafka is nil (geoff2k)
|
244
|
+
- Option to use zstd compression (jasonmartens)
|
245
|
+
|
246
|
+
## 0.8.0 (2020-06-02)
|
247
|
+
- Upgrade librdkafka to 1.4.0
|
248
|
+
- Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
|
249
|
+
- Ruby 2.7 compatibility fix (by Geoff Thé)A
|
250
|
+
- Add error to delivery report (by Alex Stanovsky)
|
251
|
+
- Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
|
252
|
+
- Allow use of Rake 13.x and up (by Tomasz Pajor)
|
253
|
+
|
254
|
+
## 0.7.0 (2019-09-21)
|
255
|
+
- Bump librdkafka to 1.2.0 (by rob-as)
|
256
|
+
- Allow customizing the wait time for delivery report availability (by mensfeld)
|
257
|
+
|
258
|
+
## 0.6.0 (2019-07-23)
|
259
|
+
- Bump librdkafka to 1.1.0 (by Chris Gaffney)
|
260
|
+
- Implement seek (by breunigs)
|
261
|
+
|
262
|
+
## 0.5.0 (2019-04-11)
|
263
|
+
- Bump librdkafka to 1.0.0 (by breunigs)
|
264
|
+
- Add cluster and member information (by dmexe)
|
265
|
+
- Support message headers for consumer & producer (by dmexe)
|
266
|
+
- Add consumer rebalance listener (by dmexe)
|
267
|
+
- Implement pause/resume partitions (by dmexe)
|
268
|
+
|
269
|
+
## 0.4.2 (2019-01-12)
|
270
|
+
- Delivery callback for producer
|
271
|
+
- Document list param of commit method
|
272
|
+
- Use default Homebrew openssl location if present
|
273
|
+
- Consumer lag handles empty topics
|
274
|
+
- End iteration in consumer when it is closed
|
275
|
+
- Add support for storing message offsets
|
276
|
+
- Add missing runtime dependency to rake
|
277
|
+
|
278
|
+
## 0.4.1 (2018-10-19)
|
279
|
+
- Bump librdkafka to 0.11.6
|
280
|
+
|
281
|
+
## 0.4.0 (2018-09-24)
|
282
|
+
- Improvements in librdkafka archive download
|
283
|
+
- Add global statistics callback
|
284
|
+
- Use Time for timestamps, potentially breaking change if you
|
285
|
+
rely on the previous behavior where it returns an integer with
|
286
|
+
the number of milliseconds.
|
287
|
+
- Bump librdkafka to 0.11.5
|
288
|
+
- Implement TopicPartitionList in Ruby so we don't have to keep
|
289
|
+
track of native objects.
|
290
|
+
- Support committing a topic partition list
|
291
|
+
- Add consumer assignment method
|
292
|
+
|
293
|
+
## 0.3.5 (2018-01-17)
|
294
|
+
- Fix crash when not waiting for delivery handles
|
295
|
+
- Run specs on Ruby 2.5
|
296
|
+
|
297
|
+
## 0.3.4 (2017-12-05)
|
298
|
+
- Bump librdkafka to 0.11.3
|
299
|
+
|
300
|
+
## 0.3.3 (2017-10-27)
|
301
|
+
- Fix bug that prevent display of `RdkafkaError` message
|
302
|
+
|
303
|
+
## 0.3.2 (2017-10-25)
|
304
|
+
- `add_topic` now supports using a partition count
|
305
|
+
- Add way to make errors clearer with an extra message
|
306
|
+
- Show topics in subscribe error message
|
307
|
+
- Show partition and topic in query watermark offsets error message
|
308
|
+
|
309
|
+
## 0.3.1 (2017-10-23)
|
310
|
+
- Bump librdkafka to 0.11.1
|
311
|
+
- Officially support ranges in `add_topic` for topic partition list.
|
312
|
+
- Add consumer lag calculator
|
313
|
+
|
314
|
+
## 0.3.0 (2017-10-17)
|
315
|
+
- Move both add topic methods to one `add_topic` in `TopicPartitionList`
|
316
|
+
- Add committed offsets to consumer
|
317
|
+
- Add query watermark offset to consumer
|
318
|
+
|
319
|
+
## 0.2.0 (2017-10-13)
|
320
|
+
- Some refactoring and add inline documentation
|
321
|
+
|
322
|
+
## 0.1.x (2017-09-10)
|
323
|
+
- Initial working version including producing and consuming
|
data/Gemfile
ADDED
data/MIT-LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2017-2023 Thijs Cadier
|
4
|
+
2023, Maciej Mensfeld
|
5
|
+
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
8
|
+
in the Software without restriction, including without limitation the rights
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
11
|
+
furnished to do so, subject to the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
14
|
+
copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
22
|
+
SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,177 @@
|
|
1
|
+
# Rdkafka
|
2
|
+
|
3
|
+
[](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci.yml)
|
4
|
+
[](https://badge.fury.io/rb/rdkafka)
|
5
|
+
[](https://slack.karafka.io)
|
6
|
+
|
7
|
+
> [!NOTE]
|
8
|
+
> The `rdkafka-ruby` gem was created and developed by [AppSignal](https://www.appsignal.com/). Their impactful contributions have significantly shaped the Ruby Kafka and Karafka ecosystems. For robust monitoring, we highly recommend AppSignal.
|
9
|
+
|
10
|
+
---
|
11
|
+
|
12
|
+
The `rdkafka` gem is a modern Kafka client library for Ruby based on
|
13
|
+
[librdkafka](https://github.com/confluentinc/librdkafka/).
|
14
|
+
It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
|
15
|
+
gem and targets Kafka 1.0+ and Ruby versions under security or
|
16
|
+
active maintenance. We remove a Ruby version from our CI builds when they
|
17
|
+
become EOL.
|
18
|
+
|
19
|
+
`rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
|
20
|
+
|
21
|
+
The most essential pieces of a Kafka client are implemented, and we aim to provide all relevant consumer, producer, and admin APIs.
|
22
|
+
|
23
|
+
## Table of content
|
24
|
+
|
25
|
+
- [Project Scope](#project-scope)
|
26
|
+
- [Installation](#installation)
|
27
|
+
- [Usage](#usage)
|
28
|
+
* [Consuming Messages](#consuming-messages)
|
29
|
+
* [Producing Messages](#producing-messages)
|
30
|
+
- [Higher Level Libraries](#higher-level-libraries)
|
31
|
+
* [Message Processing Frameworks](#message-processing-frameworks)
|
32
|
+
* [Message Publishing Libraries](#message-publishing-libraries)
|
33
|
+
- [Forking](#forking)
|
34
|
+
- [Development](#development)
|
35
|
+
- [Example](#example)
|
36
|
+
- [Versions](#versions)
|
37
|
+
|
38
|
+
## Project Scope
|
39
|
+
|
40
|
+
While rdkafka-ruby aims to simplify the use of librdkafka in Ruby applications, it's important to understand the limitations of this library:
|
41
|
+
|
42
|
+
- **No Complex Producers/Consumers**: This library does not intend to offer complex producers or consumers. The aim is to stick closely to the functionalities provided by librdkafka itself.
|
43
|
+
|
44
|
+
- **Focus on librdkafka Capabilities**: Features that can be achieved directly in Ruby, without specific needs from librdkafka, are outside the scope of this library.
|
45
|
+
|
46
|
+
- **Existing High-Level Functionalities**: Certain high-level functionalities like producer metadata cache and simple consumer are already part of the library. Although they fall slightly outside the primary goal, they will remain part of the contract, given their existing usage.
|
47
|
+
|
48
|
+
|
49
|
+
## Installation
|
50
|
+
|
51
|
+
When installed, this gem downloads and compiles librdkafka. If you have any problems installing the gem, please open an issue.
|
52
|
+
|
53
|
+
## Usage
|
54
|
+
|
55
|
+
Please see the [documentation](https://karafka.io/docs/code/rdkafka-ruby/) for full details on how to use this gem. Below are two quick examples.
|
56
|
+
|
57
|
+
Unless you are seeking specific low-level capabilities, we **strongly** recommend using [Karafka](https://github.com/karafka/karafka) and [WaterDrop](https://github.com/karafka/waterdrop) when working with Kafka. These are higher-level libraries also maintained by us based on rdkafka-ruby.
|
58
|
+
|
59
|
+
### Consuming Messages
|
60
|
+
|
61
|
+
Subscribe to a topic and get messages. Kafka will automatically spread
|
62
|
+
the available partitions over consumers with the same group id.
|
63
|
+
|
64
|
+
```ruby
|
65
|
+
config = {
|
66
|
+
:"bootstrap.servers" => "localhost:9092",
|
67
|
+
:"group.id" => "ruby-test"
|
68
|
+
}
|
69
|
+
consumer = Rdkafka::Config.new(config).consumer
|
70
|
+
consumer.subscribe("ruby-test-topic")
|
71
|
+
|
72
|
+
consumer.each do |message|
|
73
|
+
puts "Message received: #{message}"
|
74
|
+
end
|
75
|
+
```
|
76
|
+
|
77
|
+
### Producing Messages
|
78
|
+
|
79
|
+
Produce several messages, put the delivery handles in an array, and
|
80
|
+
wait for them before exiting. This way the messages will be batched and
|
81
|
+
efficiently sent to Kafka.
|
82
|
+
|
83
|
+
```ruby
|
84
|
+
config = {:"bootstrap.servers" => "localhost:9092"}
|
85
|
+
producer = Rdkafka::Config.new(config).producer
|
86
|
+
delivery_handles = []
|
87
|
+
|
88
|
+
100.times do |i|
|
89
|
+
puts "Producing message #{i}"
|
90
|
+
delivery_handles << producer.produce(
|
91
|
+
topic: "ruby-test-topic",
|
92
|
+
payload: "Payload #{i}",
|
93
|
+
key: "Key #{i}"
|
94
|
+
)
|
95
|
+
end
|
96
|
+
|
97
|
+
delivery_handles.each(&:wait)
|
98
|
+
```
|
99
|
+
|
100
|
+
Note that creating a producer consumes some resources that will not be released until it `#close` is explicitly called, so be sure to call `Config#producer` only as necessary.
|
101
|
+
|
102
|
+
## Higher Level Libraries
|
103
|
+
|
104
|
+
Currently, there are two actively developed frameworks based on `rdkafka-ruby`, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
|
105
|
+
|
106
|
+
### Message Processing Frameworks
|
107
|
+
|
108
|
+
* [Karafka](https://github.com/karafka/karafka) - Ruby and Rails efficient Kafka processing framework.
|
109
|
+
* [Racecar](https://github.com/zendesk/racecar) - A simple framework for Kafka consumers in Ruby
|
110
|
+
|
111
|
+
### Message Publishing Libraries
|
112
|
+
|
113
|
+
* [WaterDrop](https://github.com/karafka/waterdrop) – Standalone Karafka library for producing Kafka messages.
|
114
|
+
|
115
|
+
## Forking
|
116
|
+
|
117
|
+
When working with `rdkafka-ruby`, it's essential to know that the underlying `librdkafka` library does not support fork-safe operations, even though it is thread-safe. Forking a process after initializing librdkafka clients can lead to unpredictable behavior due to inherited file descriptors and memory states. This limitation requires careful handling, especially in Ruby applications that rely on forking.
|
118
|
+
|
119
|
+
To address this, it's highly recommended to:
|
120
|
+
|
121
|
+
- Never initialize any `rdkafka-ruby` producers or consumers before forking to avoid state corruption.
|
122
|
+
- Before forking, always close any open producers or consumers if you've opened any.
|
123
|
+
- Use high-level libraries like [WaterDrop](https://github.com/karafka/waterdrop) and [Karafka](https://github.com/karafka/karafka/), which provide abstractions for handling librdkafka's intricacies.
|
124
|
+
|
125
|
+
## Development
|
126
|
+
|
127
|
+
Contributors are encouraged to focus on enhancements that align with the core goal of the library. We appreciate contributions but will likely not accept pull requests for features that:
|
128
|
+
|
129
|
+
- Implement functionalities that can achieved using standard Ruby capabilities without changes to the underlying rdkafka-ruby bindings.
|
130
|
+
- Deviate significantly from the primary aim of providing librdkafka bindings with Ruby-friendly interfaces.
|
131
|
+
|
132
|
+
A Docker Compose file is included to run Kafka. To run that:
|
133
|
+
|
134
|
+
```
|
135
|
+
docker-compose up
|
136
|
+
```
|
137
|
+
|
138
|
+
Run `bundle` and `cd ext && bundle exec rake && cd ..` to download and compile `librdkafka`.
|
139
|
+
|
140
|
+
You can then run `bundle exec rspec` to run the tests. To see rdkafka debug output:
|
141
|
+
|
142
|
+
```
|
143
|
+
DEBUG_PRODUCER=true bundle exec rspec
|
144
|
+
DEBUG_CONSUMER=true bundle exec rspec
|
145
|
+
```
|
146
|
+
|
147
|
+
After running the tests, you can bring the cluster down to start with a clean slate:
|
148
|
+
|
149
|
+
```
|
150
|
+
docker-compose down
|
151
|
+
```
|
152
|
+
|
153
|
+
## Example
|
154
|
+
|
155
|
+
To see everything working, run these in separate tabs:
|
156
|
+
|
157
|
+
```
|
158
|
+
bundle exec rake consume_messages
|
159
|
+
bundle exec rake produce_messages
|
160
|
+
```
|
161
|
+
|
162
|
+
## Versions
|
163
|
+
|
164
|
+
| rdkafka-ruby | librdkafka | patches |
|
165
|
+
|-|-|-|
|
166
|
+
| 0.20.x (Unreleased) | 2.8.0 (2025-01-07) | yes |
|
167
|
+
| 0.19.x (2025-01-20) | 2.8.0 (2025-01-07) | yes |
|
168
|
+
| 0.18.0 (2024-11-26) | 2.6.1 (2024-11-18) | yes |
|
169
|
+
| 0.17.4 (2024-09-02) | 2.5.3 (2024-09-02) | yes |
|
170
|
+
| 0.17.0 (2024-08-01) | 2.5.0 (2024-07-10) | yes |
|
171
|
+
| 0.16.0 (2024-06-13) | 2.4.0 (2024-05-07) | no |
|
172
|
+
| 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) | no |
|
173
|
+
| 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) | no |
|
174
|
+
| 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) | no |
|
175
|
+
| 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) | no |
|
176
|
+
| 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) | no |
|
177
|
+
| 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) | no |
|
data/Rakefile
ADDED
@@ -0,0 +1,96 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'bundler/gem_tasks'
|
4
|
+
require "./lib/rdkafka"
|
5
|
+
|
6
|
+
desc 'Generate some message traffic'
|
7
|
+
task :produce_messages do
|
8
|
+
config = {:"bootstrap.servers" => "localhost:9092"}
|
9
|
+
if ENV["DEBUG"]
|
10
|
+
config[:debug] = "broker,topic,msg"
|
11
|
+
end
|
12
|
+
producer = Rdkafka::Config.new(config).producer
|
13
|
+
|
14
|
+
delivery_handles = []
|
15
|
+
100.times do |i|
|
16
|
+
puts "Producing message #{i}"
|
17
|
+
delivery_handles << producer.produce(
|
18
|
+
topic: "rake_test_topic",
|
19
|
+
payload: "Payload #{i} from Rake",
|
20
|
+
key: "Key #{i} from Rake"
|
21
|
+
)
|
22
|
+
end
|
23
|
+
puts 'Waiting for delivery'
|
24
|
+
delivery_handles.each(&:wait)
|
25
|
+
puts 'Done'
|
26
|
+
end
|
27
|
+
|
28
|
+
desc 'Consume some messages'
|
29
|
+
task :consume_messages do
|
30
|
+
config = {
|
31
|
+
:"bootstrap.servers" => "localhost:9092",
|
32
|
+
:"group.id" => "rake_test",
|
33
|
+
:"enable.partition.eof" => false,
|
34
|
+
:"auto.offset.reset" => "earliest",
|
35
|
+
:"statistics.interval.ms" => 10_000
|
36
|
+
}
|
37
|
+
if ENV["DEBUG"]
|
38
|
+
config[:debug] = "cgrp,topic,fetch"
|
39
|
+
end
|
40
|
+
Rdkafka::Config.statistics_callback = lambda do |stats|
|
41
|
+
puts stats
|
42
|
+
end
|
43
|
+
consumer = Rdkafka::Config.new(config).consumer
|
44
|
+
consumer = Rdkafka::Config.new(config).consumer
|
45
|
+
consumer.subscribe("rake_test_topic")
|
46
|
+
consumer.each do |message|
|
47
|
+
puts "Message received: #{message}"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
desc 'Hammer down'
|
52
|
+
task :load_test do
|
53
|
+
puts "Starting load test"
|
54
|
+
|
55
|
+
config = Rdkafka::Config.new(
|
56
|
+
:"bootstrap.servers" => "localhost:9092",
|
57
|
+
:"group.id" => "load-test",
|
58
|
+
:"enable.partition.eof" => false
|
59
|
+
)
|
60
|
+
|
61
|
+
# Create a producer in a thread
|
62
|
+
Thread.new do
|
63
|
+
producer = config.producer
|
64
|
+
loop do
|
65
|
+
handles = []
|
66
|
+
1000.times do |i|
|
67
|
+
handles.push(producer.produce(
|
68
|
+
topic: "load_test_topic",
|
69
|
+
payload: "Payload #{i}",
|
70
|
+
key: "Key #{i}"
|
71
|
+
))
|
72
|
+
end
|
73
|
+
handles.each(&:wait)
|
74
|
+
puts "Produced 1000 messages"
|
75
|
+
end
|
76
|
+
end.abort_on_exception = true
|
77
|
+
|
78
|
+
# Create three consumers in threads
|
79
|
+
3.times do |i|
|
80
|
+
Thread.new do
|
81
|
+
count = 0
|
82
|
+
consumer = config.consumer
|
83
|
+
consumer.subscribe("load_test_topic")
|
84
|
+
consumer.each do |message|
|
85
|
+
count += 1
|
86
|
+
if count % 1000 == 0
|
87
|
+
puts "Received 1000 messages in thread #{i}"
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end.abort_on_exception = true
|
91
|
+
end
|
92
|
+
|
93
|
+
loop do
|
94
|
+
sleep 1
|
95
|
+
end
|
96
|
+
end
|
data/docker-compose.yml
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
services:
|
2
|
+
kafka:
|
3
|
+
container_name: kafka
|
4
|
+
image: confluentinc/cp-kafka:8.0.0
|
5
|
+
|
6
|
+
ports:
|
7
|
+
- 9092:9092
|
8
|
+
|
9
|
+
environment:
|
10
|
+
CLUSTER_ID: kafka-docker-cluster-1
|
11
|
+
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
|
12
|
+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
13
|
+
KAFKA_PROCESS_ROLES: broker,controller
|
14
|
+
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
|
15
|
+
KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
|
16
|
+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
|
17
|
+
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
|
18
|
+
KAFKA_BROKER_ID: 1
|
19
|
+
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093
|
20
|
+
ALLOW_PLAINTEXT_LISTENER: 'yes'
|
21
|
+
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
|
22
|
+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
23
|
+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
24
|
+
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
|
25
|
+
KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
|
data/ext/README.md
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
# Ext
|
2
|
+
|
3
|
+
This gem depends on the `librdkafka` C library. It is downloaded, stored in
|
4
|
+
`dist/` directory, and checked into source control.
|
5
|
+
|
6
|
+
To update the `librdkafka` version follow the following steps:
|
7
|
+
|
8
|
+
* Go to https://github.com/confluentinc/librdkafka/releases to get the new
|
9
|
+
version number and asset checksum for `tar.gz`.
|
10
|
+
* Change the version in `lib/rdkafka/version.rb`
|
11
|
+
* Change the `sha256` in `lib/rdkafka/version.rb`
|
12
|
+
* Run `bundle exec rake dist:download` in the `ext` directory to download the
|
13
|
+
new release and place it in the `dist/` for you
|
14
|
+
* Run `bundle exec rake` in the `ext` directory to build the new version
|
15
|
+
* Run `docker-compose pull` in the main gem directory to ensure the docker
|
16
|
+
images used by the tests and run `docker-compose up`
|
17
|
+
* Finally, run `bundle exec rspec` in the main gem directory to execute
|
18
|
+
the test suite to detect any regressions that may have been introduced
|
19
|
+
by the update
|