rdkafka 0.12.0 → 0.15.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci.yml +57 -0
  5. data/.gitignore +4 -0
  6. data/.rspec +1 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +155 -93
  10. data/Gemfile +2 -0
  11. data/{LICENSE → MIT-LICENSE} +2 -1
  12. data/README.md +76 -29
  13. data/Rakefile +2 -0
  14. data/certs/cert_chain.pem +26 -0
  15. data/docker-compose.yml +18 -15
  16. data/ext/README.md +1 -1
  17. data/ext/Rakefile +46 -27
  18. data/lib/rdkafka/abstract_handle.rb +41 -25
  19. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  20. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  21. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  22. data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
  23. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  24. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  25. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  26. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  27. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  28. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  29. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  30. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  31. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  32. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  33. data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
  34. data/lib/rdkafka/admin.rb +494 -35
  35. data/lib/rdkafka/bindings.rb +180 -41
  36. data/lib/rdkafka/callbacks.rb +202 -1
  37. data/lib/rdkafka/config.rb +62 -25
  38. data/lib/rdkafka/consumer/headers.rb +24 -9
  39. data/lib/rdkafka/consumer/message.rb +3 -1
  40. data/lib/rdkafka/consumer/partition.rb +2 -0
  41. data/lib/rdkafka/consumer/topic_partition_list.rb +13 -8
  42. data/lib/rdkafka/consumer.rb +243 -111
  43. data/lib/rdkafka/error.rb +15 -0
  44. data/lib/rdkafka/helpers/time.rb +14 -0
  45. data/lib/rdkafka/metadata.rb +25 -2
  46. data/lib/rdkafka/native_kafka.rb +120 -0
  47. data/lib/rdkafka/producer/delivery_handle.rb +16 -2
  48. data/lib/rdkafka/producer/delivery_report.rb +22 -2
  49. data/lib/rdkafka/producer.rb +151 -21
  50. data/lib/rdkafka/version.rb +5 -3
  51. data/lib/rdkafka.rb +24 -2
  52. data/rdkafka.gemspec +21 -5
  53. data/renovate.json +6 -0
  54. data/spec/rdkafka/abstract_handle_spec.rb +1 -1
  55. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  56. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  57. data/spec/rdkafka/admin/create_topic_handle_spec.rb +1 -1
  58. data/spec/rdkafka/admin/create_topic_report_spec.rb +1 -1
  59. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  60. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  61. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +1 -1
  62. data/spec/rdkafka/admin/delete_topic_report_spec.rb +1 -1
  63. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  64. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  65. data/spec/rdkafka/admin_spec.rb +209 -5
  66. data/spec/rdkafka/bindings_spec.rb +2 -1
  67. data/spec/rdkafka/callbacks_spec.rb +1 -1
  68. data/spec/rdkafka/config_spec.rb +24 -3
  69. data/spec/rdkafka/consumer/headers_spec.rb +60 -0
  70. data/spec/rdkafka/consumer/message_spec.rb +1 -1
  71. data/spec/rdkafka/consumer/partition_spec.rb +1 -1
  72. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +20 -1
  73. data/spec/rdkafka/consumer_spec.rb +352 -61
  74. data/spec/rdkafka/error_spec.rb +1 -1
  75. data/spec/rdkafka/metadata_spec.rb +4 -3
  76. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -35
  77. data/spec/rdkafka/producer/delivery_handle_spec.rb +4 -1
  78. data/spec/rdkafka/producer/delivery_report_spec.rb +11 -3
  79. data/spec/rdkafka/producer_spec.rb +234 -22
  80. data/spec/spec_helper.rb +20 -2
  81. data.tar.gz.sig +0 -0
  82. metadata +81 -17
  83. metadata.gz.sig +0 -0
  84. data/.semaphore/semaphore.yml +0 -23
  85. data/bin/console +0 -11
  86. data/lib/rdkafka/producer/client.rb +0 -47
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 821523c304fc7a1fbb2c7be2b58d98d56600b645b89fdb4093f976418650035d
4
- data.tar.gz: '039b8e345fd8be5f295a293d64466071dbefd77d81b01460abb0fcf343a6bed3'
3
+ metadata.gz: 8636c80e1798cf24b34cf25a20ca24f35e2951fb179843a3a85a94fe0274ca76
4
+ data.tar.gz: f115aa7fff4961d42280a7ad6fd78fed40568b936139d588ae2362a2f0f45c25
5
5
  SHA512:
6
- metadata.gz: 2c7ac2199a63aacd3b1420890981ed5d953ae5cdadb874886cc4e396fa1fd8f69333633319beef35a05a002d75d22335a526a126e518cc3fbbb877a1c11ef2f7
7
- data.tar.gz: 5d23c6beec3759877013b040018111453e05c41238014b07a27c1a9d8b96e8af3bc037aacd1ebe89f856435cf0afb8e34a9f89443f87cf1a3682736efb79b4bd
6
+ metadata.gz: e5b5368a732e42b1c57aff93a7172c95b0bad93ac646dcba495c0509c5b6c29cf8753601d1f04f028c98579846e5db7e7119ae9a6a4cca2f41441316b60b5c9c
7
+ data.tar.gz: 8596d6944d5151df3ad875d93dbd7cf2aee00c1ded85e053e96880b8c5420ca6ba18a72f57cc867a7bfedf38060be3c9ea4334d79d7a21b2503a078bce1d266a
checksums.yaml.gz.sig ADDED
Binary file
@@ -0,0 +1 @@
1
+ custom: ['https://karafka.io/#become-pro']
@@ -0,0 +1,57 @@
1
+ name: ci
2
+
3
+ concurrency:
4
+ group: ${{ github.workflow }}-${{ github.ref }}
5
+ cancel-in-progress: true
6
+
7
+ on:
8
+ pull_request:
9
+ push:
10
+ schedule:
11
+ - cron: '0 1 * * *'
12
+
13
+ env:
14
+ BUNDLE_RETRY: 6
15
+ BUNDLE_JOBS: 4
16
+
17
+ jobs:
18
+ specs:
19
+ timeout-minutes: 30
20
+ runs-on: ubuntu-latest
21
+ strategy:
22
+ fail-fast: false
23
+ matrix:
24
+ ruby:
25
+ - '3.3'
26
+ - '3.2'
27
+ - '3.1'
28
+ - '3.1.0'
29
+ - '3.0'
30
+ - '3.0.0'
31
+ - '2.7'
32
+ include:
33
+ - ruby: '3.3'
34
+ coverage: 'true'
35
+ steps:
36
+ - uses: actions/checkout@v4
37
+ - name: Install package dependencies
38
+ run: "[ -e $APT_DEPS ] || sudo apt-get install -y --no-install-recommends $APT_DEPS"
39
+
40
+ - name: Start Kafka with docker-compose
41
+ run: |
42
+ docker-compose up -d || (sleep 5 && docker-compose up -d)
43
+
44
+ - name: Set up Ruby
45
+ uses: ruby/setup-ruby@v1
46
+ with:
47
+ ruby-version: ${{matrix.ruby}}
48
+ bundler-cache: true
49
+
50
+ - name: Run all specs
51
+ env:
52
+ GITHUB_COVERAGE: ${{matrix.coverage}}
53
+
54
+ run: |
55
+ bundle install --path vendor/bundle
56
+ cd ext && bundle exec rake && cd ..
57
+ bundle exec rspec
data/.gitignore CHANGED
@@ -1,3 +1,6 @@
1
+ # Ignore bundler config.
2
+ /.bundle
3
+
1
4
  Gemfile.lock
2
5
  ext/ports
3
6
  ext/tmp
@@ -6,3 +9,4 @@ ext/librdkafka.*
6
9
  .yardoc
7
10
  doc
8
11
  coverage
12
+ vendor
data/.rspec CHANGED
@@ -1 +1,2 @@
1
+ --require spec_helper
1
2
  --format documentation
data/.ruby-gemset ADDED
@@ -0,0 +1 @@
1
+ rdkafka-ruby
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 3.3.0
data/CHANGELOG.md CHANGED
@@ -1,104 +1,166 @@
1
- # 0.12.0
2
- * Bumps librdkafka to 1.9.0
3
-
4
- # 0.11.0
5
- * Upgrade librdkafka to 1.8.2
6
- * Bump supported minimum Ruby version to 2.6
7
- * Better homebrew path detection
8
-
9
- # 0.10.0
10
- * Upgrade librdkafka to 1.5.0
11
- * Add error callback config
12
-
13
- # 0.9.0
14
- * Fixes for Ruby 3.0
15
- * Allow any callable object for callbacks (gremerritt)
16
- * Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
17
- * Use queue as log callback to avoid unsafe calls from trap context (breunigs)
18
- * Allow passing in topic configuration on create_topic (dezka)
19
- * Add each_batch method to consumer (mgrosso)
20
-
21
- # 0.8.1
22
- * Fix topic_flag behaviour and add tests for Metadata (geoff2k)
23
- * Add topic admin interface (geoff2k)
24
- * Raise an exception if @native_kafka is nil (geoff2k)
25
- * Option to use zstd compression (jasonmartens)
26
-
27
- # 0.8.0
28
- * Upgrade librdkafka to 1.4.0
29
- * Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
30
- * Ruby 2.7 compatibility fix (by Geoff Thé)A
31
- * Add error to delivery report (by Alex Stanovsky)
32
- * Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
33
- * Allow use of Rake 13.x and up (by Tomasz Pajor)
34
-
35
- # 0.7.0
36
- * Bump librdkafka to 1.2.0 (by rob-as)
37
- * Allow customizing the wait time for delivery report availability (by mensfeld)
38
-
39
- # 0.6.0
40
- * Bump librdkafka to 1.1.0 (by Chris Gaffney)
41
- * Implement seek (by breunigs)
42
-
43
- # 0.5.0
44
- * Bump librdkafka to 1.0.0 (by breunigs)
45
- * Add cluster and member information (by dmexe)
46
- * Support message headers for consumer & producer (by dmexe)
47
- * Add consumer rebalance listener (by dmexe)
48
- * Implement pause/resume partitions (by dmexe)
49
-
50
- # 0.4.2
51
- * Delivery callback for producer
52
- * Document list param of commit method
53
- * Use default Homebrew openssl location if present
54
- * Consumer lag handles empty topics
55
- * End iteration in consumer when it is closed
56
- * Add support for storing message offsets
57
- * Add missing runtime dependency to rake
58
-
59
- # 0.4.1
60
- * Bump librdkafka to 0.11.6
61
-
62
- # 0.4.0
63
- * Improvements in librdkafka archive download
64
- * Add global statistics callback
65
- * Use Time for timestamps, potentially breaking change if you
1
+ # Rdkafka Changelog
2
+
3
+ ## 0.15.1 (2024-01-30)
4
+ - [Enhancement] Provide support for Nix OS (alexandriainfantino)
5
+ - [Enhancement] Replace `rd_kafka_offset_store` with `rd_kafka_offsets_store` (mensfeld)
6
+ - [Enhancement] Alias `topic_name` as `topic` in the delivery report (mensfeld)
7
+ - [Enhancement] Provide `label` producer handler and report reference for improved traceability (mensfeld)
8
+ - [Enhancement] Include the error when invoking `create_result` on producer handle (mensfeld)
9
+ - [Enhancement] Skip intermediate array creation on delivery report callback execution (one per message) (mensfeld).
10
+ - [Enhancement] Report `-1` instead of `nil` in case `partition_count` failure (mensfeld).
11
+ - [Fix] Fix return type on `#rd_kafka_poll` (mensfeld)
12
+ - [Fix] `uint8_t` does not exist on Apple Silicon (mensfeld)
13
+ - [Fix] Missing ACL `RD_KAFKA_RESOURCE_BROKER` constant reference (mensfeld)
14
+ - [Fix] Partition cache caches invalid nil result for `PARTITIONS_COUNT_TTL` (mensfeld)
15
+ - [Change] Rename `matching_acl_pattern_type` to `matching_acl_resource_pattern_type` to align the whole API (mensfeld)
16
+
17
+ ## 0.15.0 (2023-12-03)
18
+ - **[Feature]** Add `Admin#metadata` (mensfeld)
19
+ - **[Feature]** Add `Admin#create_partitions` (mensfeld)
20
+ - **[Feature]** Add `Admin#delete_group` utility (piotaixr)
21
+ - **[Feature]** Add Create and Delete ACL Feature To Admin Functions (vgnanasekaran)
22
+ - **[Feature]** Support `#assignment_lost?` on a consumer to check for involuntary assignment revocation (mensfeld)
23
+ - [Enhancement] Expose alternative way of managing consumer events via a separate queue (mensfeld)
24
+ - [Enhancement] **Bump** librdkafka to 2.3.0 (mensfeld)
25
+ - [Enhancement] Increase the `#lag` and `#query_watermark_offsets` default timeouts from 100ms to 1000ms. This will compensate for network glitches and remote clusters operations (mensfeld)
26
+ - [Change] Use `SecureRandom.uuid` instead of `random` for test consumer groups (mensfeld)
27
+
28
+ ## 0.14.0 (2023-11-21)
29
+ - [Enhancement] Add `raise_response_error` flag to the `Rdkafka::AbstractHandle`.
30
+ - [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem (mensfeld)
31
+ - [Enhancement] Get consumer position (thijsc & mensfeld)
32
+ - [Enhancement] Provide `#purge` to remove any outstanding requests from the producer (mensfeld)
33
+ - [Enhancement] Update `librdkafka` to `2.2.0` (mensfeld)
34
+ - [Enhancement] Introduce producer partitions count metadata cache (mensfeld)
35
+ - [Enhancement] Increase metadata timeout request from `250 ms` to `2000 ms` default to allow for remote cluster operations via `rdkafka-ruby` (mensfeld)
36
+ - [Enhancement] Introduce `#name` for producers and consumers (mensfeld)
37
+ - [Enhancement] Include backtrace in non-raised binded errors (mensfeld)
38
+ - [Fix] Reference to Opaque is not released when Admin, Consumer or Producer is closed (mensfeld)
39
+ - [Fix] Trigger `#poll` on native kafka creation to handle oauthbearer cb (mensfeld)
40
+ - [Fix] `#flush` does not handle the timeouts errors by making it return `true` if all flushed or `false` if failed. We do **not** raise an exception here to keep it backwards compatible (mensfeld)
41
+ - [Change] Remove support for Ruby 2.6 due to it being EOL and WeakMap incompatibilities (mensfeld)
42
+ - [Change] Update Kafka Docker with Confluent KRaft (mensfeld)
43
+ - [Change] Update librdkafka repo reference from edenhill to confluentinc (mensfeld)
44
+
45
+ ## 0.13.0 (2023-07-24)
46
+ - Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
47
+ - Support both string and symbol header keys (ColinDKelley)
48
+ - Handle tombstone messages properly (kgalieva)
49
+ - Add topic name to delivery report (maeve)
50
+ - Allow string partitioner config (mollyegibson)
51
+ - Fix documented type for DeliveryReport#error (jimmydo)
52
+ - Bump librdkafka to 2.0.2 (lmaia)
53
+ - Use finalizers to cleanly exit producer and admin (thijsc)
54
+ - Lock access to the native kafka client (thijsc)
55
+ - Fix potential race condition in multi-threaded producer (mensfeld)
56
+ - Fix leaking FFI resources in specs (mensfeld)
57
+ - Improve specs stability (mensfeld)
58
+ - Make metadata request timeout configurable (mensfeld)
59
+ - call_on_partitions_assigned and call_on_partitions_revoked only get a tpl passed in (thijsc)
60
+
61
+ ## 0.12.0 (2022-06-17)
62
+ - Bumps librdkafka to 1.9.0
63
+ - Fix crash on empty partition key (mensfeld)
64
+ - Pass the delivery handle to the callback (gvisokinskas)
65
+
66
+ ## 0.11.0 (2021-11-17)
67
+ - Upgrade librdkafka to 1.8.2
68
+ - Bump supported minimum Ruby version to 2.6
69
+ - Better homebrew path detection
70
+
71
+ ## 0.10.0 (2021-09-07)
72
+ - Upgrade librdkafka to 1.5.0
73
+ - Add error callback config
74
+
75
+ ## 0.9.0 (2021-06-23)
76
+ - Fixes for Ruby 3.0
77
+ - Allow any callable object for callbacks (gremerritt)
78
+ - Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
79
+ - Use queue as log callback to avoid unsafe calls from trap context (breunigs)
80
+ - Allow passing in topic configuration on create_topic (dezka)
81
+ - Add each_batch method to consumer (mgrosso)
82
+
83
+ ## 0.8.1 (2020-12-07)
84
+ - Fix topic_flag behaviour and add tests for Metadata (geoff2k)
85
+ - Add topic admin interface (geoff2k)
86
+ - Raise an exception if @native_kafka is nil (geoff2k)
87
+ - Option to use zstd compression (jasonmartens)
88
+
89
+ ## 0.8.0 (2020-06-02)
90
+ - Upgrade librdkafka to 1.4.0
91
+ - Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
92
+ - Ruby 2.7 compatibility fix (by Geoff Thé)A
93
+ - Add error to delivery report (by Alex Stanovsky)
94
+ - Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
95
+ - Allow use of Rake 13.x and up (by Tomasz Pajor)
96
+
97
+ ## 0.7.0 (2019-09-21)
98
+ - Bump librdkafka to 1.2.0 (by rob-as)
99
+ - Allow customizing the wait time for delivery report availability (by mensfeld)
100
+
101
+ ## 0.6.0 (2019-07-23)
102
+ - Bump librdkafka to 1.1.0 (by Chris Gaffney)
103
+ - Implement seek (by breunigs)
104
+
105
+ ## 0.5.0 (2019-04-11)
106
+ - Bump librdkafka to 1.0.0 (by breunigs)
107
+ - Add cluster and member information (by dmexe)
108
+ - Support message headers for consumer & producer (by dmexe)
109
+ - Add consumer rebalance listener (by dmexe)
110
+ - Implement pause/resume partitions (by dmexe)
111
+
112
+ ## 0.4.2 (2019-01-12)
113
+ - Delivery callback for producer
114
+ - Document list param of commit method
115
+ - Use default Homebrew openssl location if present
116
+ - Consumer lag handles empty topics
117
+ - End iteration in consumer when it is closed
118
+ - Add support for storing message offsets
119
+ - Add missing runtime dependency to rake
120
+
121
+ ## 0.4.1 (2018-10-19)
122
+ - Bump librdkafka to 0.11.6
123
+
124
+ ## 0.4.0 (2018-09-24)
125
+ - Improvements in librdkafka archive download
126
+ - Add global statistics callback
127
+ - Use Time for timestamps, potentially breaking change if you
66
128
  rely on the previous behavior where it returns an integer with
67
129
  the number of milliseconds.
68
- * Bump librdkafka to 0.11.5
69
- * Implement TopicPartitionList in Ruby so we don't have to keep
130
+ - Bump librdkafka to 0.11.5
131
+ - Implement TopicPartitionList in Ruby so we don't have to keep
70
132
  track of native objects.
71
- * Support committing a topic partition list
72
- * Add consumer assignment method
133
+ - Support committing a topic partition list
134
+ - Add consumer assignment method
73
135
 
74
- # 0.3.5
75
- * Fix crash when not waiting for delivery handles
76
- * Run specs on Ruby 2.5
136
+ ## 0.3.5 (2018-01-17)
137
+ - Fix crash when not waiting for delivery handles
138
+ - Run specs on Ruby 2.5
77
139
 
78
- # 0.3.4
79
- * Bump librdkafka to 0.11.3
140
+ ## 0.3.4 (2017-12-05)
141
+ - Bump librdkafka to 0.11.3
80
142
 
81
- # 0.3.3
82
- * Fix bug that prevent display of `RdkafkaError` message
143
+ ## 0.3.3 (2017-10-27)
144
+ - Fix bug that prevent display of `RdkafkaError` message
83
145
 
84
- # 0.3.2
85
- * `add_topic` now supports using a partition count
86
- * Add way to make errors clearer with an extra message
87
- * Show topics in subscribe error message
88
- * Show partition and topic in query watermark offsets error message
146
+ ## 0.3.2 (2017-10-25)
147
+ - `add_topic` now supports using a partition count
148
+ - Add way to make errors clearer with an extra message
149
+ - Show topics in subscribe error message
150
+ - Show partition and topic in query watermark offsets error message
89
151
 
90
- # 0.3.1
91
- * Bump librdkafka to 0.11.1
92
- * Officially support ranges in `add_topic` for topic partition list.
93
- * Add consumer lag calculator
152
+ ## 0.3.1 (2017-10-23)
153
+ - Bump librdkafka to 0.11.1
154
+ - Officially support ranges in `add_topic` for topic partition list.
155
+ - Add consumer lag calculator
94
156
 
95
- # 0.3.0
96
- * Move both add topic methods to one `add_topic` in `TopicPartitionList`
97
- * Add committed offsets to consumer
98
- * Add query watermark offset to consumer
157
+ ## 0.3.0 (2017-10-17)
158
+ - Move both add topic methods to one `add_topic` in `TopicPartitionList`
159
+ - Add committed offsets to consumer
160
+ - Add query watermark offset to consumer
99
161
 
100
- # 0.2.0
101
- * Some refactoring and add inline documentation
162
+ ## 0.2.0 (2017-10-13)
163
+ - Some refactoring and add inline documentation
102
164
 
103
- # 0.1.x
104
- * Initial working version including producing and consuming
165
+ ## 0.1.x (2017-09-10)
166
+ - Initial working version including producing and consuming
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  source "https://rubygems.org"
2
4
 
3
5
  gemspec
@@ -1,6 +1,7 @@
1
1
  The MIT License (MIT)
2
2
 
3
- Copyright (c) 2017 Thijs Cadier
3
+ Copyright (c) 2017-2023 Thijs Cadier
4
+ 2023, Maciej Mensfeld
4
5
 
5
6
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
7
  of this software and associated documentation files (the "Software"), to deal
data/README.md CHANGED
@@ -1,38 +1,60 @@
1
1
  # Rdkafka
2
2
 
3
- [![Build Status](https://appsignal.semaphoreci.com/badges/rdkafka-ruby/branches/master.svg?style=shields)](https://appsignal.semaphoreci.com/projects/rdkafka-ruby)
3
+ [![Build Status](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/rdkafka-ruby/actions/workflows/ci.yml)
4
4
  [![Gem Version](https://badge.fury.io/rb/rdkafka.svg)](https://badge.fury.io/rb/rdkafka)
5
- [![Maintainability](https://api.codeclimate.com/v1/badges/ecb1765f81571cccdb0e/maintainability)](https://codeclimate.com/github/appsignal/rdkafka-ruby/maintainability)
5
+ [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
+
7
+ > [!NOTE]
8
+ > The `rdkafka-ruby` gem was created and developed by [AppSignal](https://www.appsignal.com/). Their impactful contributions have significantly shaped the Ruby Kafka and Karafka ecosystems. For robust monitoring, we highly recommend AppSignal.
9
+
10
+ ---
6
11
 
7
12
  The `rdkafka` gem is a modern Kafka client library for Ruby based on
8
- [librdkafka](https://github.com/edenhill/librdkafka/).
13
+ [librdkafka](https://github.com/confluentinc/librdkafka/).
9
14
  It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
10
- gem and targets Kafka 1.0+ and Ruby versions that are under security or
11
- active maintenance. We remove Ruby version from our CI builds if they
15
+ gem and targets Kafka 1.0+ and Ruby versions under security or
16
+ active maintenance. We remove a Ruby version from our CI builds when they
12
17
  become EOL.
13
18
 
14
- `rdkafka` was written because we needed a reliable Ruby client for
15
- Kafka that supports modern Kafka at [AppSignal](https://appsignal.com).
16
- We run it in production on very high traffic systems.
19
+ `rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
20
+
21
+ The most important pieces of a Kafka client are implemented, and we aim to provide all relevant consumer, producer, and admin APIs.
22
+
23
+ ## Table of content
24
+
25
+ - [Project Scope](#project-scope)
26
+ - [Installation](#installation)
27
+ - [Usage](#usage)
28
+ * [Consuming Messages](#consuming-messages)
29
+ * [Producing Messages](#producing-messages)
30
+ - [Higher Level Libraries](#higher-level-libraries)
31
+ * [Message Processing Frameworks](#message-processing-frameworks)
32
+ * [Message Publishing Libraries](#message-publishing-libraries)
33
+ - [Development](#development)
34
+ - [Example](#example)
35
+ - [Versions](#versions)
17
36
 
18
- This gem only provides a high-level Kafka consumer. If you are running
19
- an older version of Kafka and/or need the legacy simple consumer we
20
- suggest using the [Hermann](https://github.com/reiseburo/hermann) gem.
37
+ ## Project Scope
38
+
39
+ While rdkafka-ruby aims to simplify the use of librdkafka in Ruby applications, it's important to understand the limitations of this library:
40
+
41
+ - **No Complex Producers/Consumers**: This library does not intend to offer complex producers or consumers. The aim is to stick closely to the functionalities provided by librdkafka itself.
42
+
43
+ - **Focus on librdkafka Capabilities**: Features that can be achieved directly in Ruby, without specific needs from librdkafka, are outside the scope of this library.
44
+
45
+ - **Existing High-Level Functionalities**: Certain high-level functionalities like producer metadata cache and simple consumer are already part of the library. Although they fall slightly outside the primary goal, they will remain part of the contract, given their existing usage.
21
46
 
22
- The most important pieces of a Kafka client are implemented. We're
23
- working towards feature completeness, you can track that here:
24
- https://github.com/appsignal/rdkafka-ruby/milestone/1
25
47
 
26
48
  ## Installation
27
49
 
28
50
  This gem downloads and compiles librdkafka when it is installed. If you
29
- have any problems installing the gem please open an issue.
51
+ If you have any problems installing the gem, please open an issue.
30
52
 
31
53
  ## Usage
32
54
 
33
- See the [documentation](https://www.rubydoc.info/github/appsignal/rdkafka-ruby) for full details on how to use this gem. Two quick examples:
55
+ See the [documentation](https://karafka.io/docs/code/rdkafka-ruby/) for full details on how to use this gem. Two quick examples:
34
56
 
35
- ### Consuming messages
57
+ ### Consuming Messages
36
58
 
37
59
  Subscribe to a topic and get messages. Kafka will automatically spread
38
60
  the available partitions over consumers with the same group id.
@@ -50,11 +72,11 @@ consumer.each do |message|
50
72
  end
51
73
  ```
52
74
 
53
- ### Producing messages
75
+ ### Producing Messages
54
76
 
55
- Produce a number of messages, put the delivery handles in an array and
77
+ Produce a number of messages, put the delivery handles in an array, and
56
78
  wait for them before exiting. This way the messages will be batched and
57
- sent to Kafka in an efficient way.
79
+ efficiently sent to Kafka.
58
80
 
59
81
  ```ruby
60
82
  config = {:"bootstrap.servers" => "localhost:9092"}
@@ -77,28 +99,42 @@ Note that creating a producer consumes some resources that will not be
77
99
  released until it `#close` is explicitly called, so be sure to call
78
100
  `Config#producer` only as necessary.
79
101
 
102
+ ## Higher Level Libraries
103
+
104
+ Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
105
+
106
+ ### Message Processing Frameworks
107
+
108
+ * [Karafka](https://github.com/karafka/karafka) - Ruby and Rails efficient Kafka processing framework.
109
+ * [Racecar](https://github.com/zendesk/racecar) - A simple framework for Kafka consumers in Ruby
110
+
111
+ ### Message Publishing Libraries
112
+
113
+ * [WaterDrop](https://github.com/karafka/waterdrop) – Standalone Karafka library for producing Kafka messages.
114
+
80
115
  ## Development
81
116
 
82
- A Docker Compose file is included to run Kafka and Zookeeper. To run
83
- that:
117
+ Contributors are encouraged to focus on enhancements that align with the core goal of the library. We appreciate contributions but will likely not accept pull requests for features that:
118
+
119
+ - Implement functionalities that can achieved using standard Ruby capabilities without changes to the underlying rdkafka-ruby bindings.
120
+ - Deviate significantly from the primary aim of providing librdkafka bindings with Ruby-friendly interfaces.
121
+
122
+ A Docker Compose file is included to run Kafka. To run that:
84
123
 
85
124
  ```
86
125
  docker-compose up
87
126
  ```
88
127
 
89
- Run `bundle` and `cd ext && bundle exec rake && cd ..` to download and
90
- compile `librdkafka`.
128
+ Run `bundle` and `cd ext && bundle exec rake && cd ..` to download and compile `librdkafka`.
91
129
 
92
- You can then run `bundle exec rspec` to run the tests. To see rdkafka
93
- debug output:
130
+ You can then run `bundle exec rspec` to run the tests. To see rdkafka debug output:
94
131
 
95
132
  ```
96
133
  DEBUG_PRODUCER=true bundle exec rspec
97
134
  DEBUG_CONSUMER=true bundle exec rspec
98
135
  ```
99
136
 
100
- After running the tests you can bring the cluster down to start with a
101
- clean slate:
137
+ After running the tests, you can bring the cluster down to start with a clean slate:
102
138
 
103
139
  ```
104
140
  docker-compose down
@@ -106,9 +142,20 @@ docker-compose down
106
142
 
107
143
  ## Example
108
144
 
109
- To see everything working run these in separate tabs:
145
+ To see everything working, run these in separate tabs:
110
146
 
111
147
  ```
112
148
  bundle exec rake consume_messages
113
149
  bundle exec rake produce_messages
114
150
  ```
151
+
152
+ ## Versions
153
+
154
+ | rdkafka-ruby | librdkafka |
155
+ |-|-|
156
+ | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
157
+ | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
158
+ | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
159
+ | 0.12.0 (2022-06-17) | 1.9.0 (2022-06-16) |
160
+ | 0.11.0 (2021-11-17) | 1.8.2 (2021-10-18) |
161
+ | 0.10.0 (2021-09-07) | 1.5.0 (2020-07-20) |
data/Rakefile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Rakefile
2
4
 
3
5
  require 'bundler/gem_tasks'
@@ -0,0 +1,26 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
+ MB4XDTIzMDgyMTA3MjU1NFoXDTI0MDgyMDA3MjU1NFowPzEQMA4GA1UEAwwHY29u
5
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAOuZpyQKEwsTG9plLat7
7
+ 8bUaNuNBEnouTsNMr6X+XTgvyrAxTuocdsyP1sNCjdS1B8RiiDH1/Nt9qpvlBWon
8
+ sdJ1SYhaWNVfqiYStTDnCx3PRMmHRdD4KqUWKpN6VpZ1O/Zu+9Mw0COmvXgZuuO9
9
+ wMSJkXRo6dTCfMedLAIxjMeBIxtoLR2e6Jm6MR8+8WYYVWrO9kSOOt5eKQLBY7aK
10
+ b/Dc40EcJKPg3Z30Pia1M9ZyRlb6SOj6SKpHRqc7vbVQxjEw6Jjal1lZ49m3YZMd
11
+ ArMAs9lQZNdSw5/UX6HWWURLowg6k10RnhTUtYyzO9BFev0JFJftHnmuk8vtb+SD
12
+ 5VPmjFXg2VOcw0B7FtG75Vackk8QKfgVe3nSPhVpew2CSPlbJzH80wChbr19+e3+
13
+ YGr1tOiaJrL6c+PNmb0F31NXMKpj/r+n15HwlTMRxQrzFcgjBlxf2XFGnPQXHhBm
14
+ kp1OFnEq4GG9sON4glRldkwzi/f/fGcZmo5fm3d+0ZdNgwIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUPVH5+dLA80A1kJ2Uz5iGwfOa
16
+ 1+swHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAnpa0jcN7JzREHMTQ
18
+ bfZ+xcvlrzuROMY6A3zIZmQgbnoZZNuX4cMRrT1p1HuwXpxdpHPw7dDjYqWw3+1h
19
+ 3mXLeMuk7amjQpYoSWU/OIZMhIsARra22UN8qkkUlUj3AwTaChVKN/bPJOM2DzfU
20
+ kz9vUgLeYYFfQbZqeI6SsM7ltilRV4W8D9yNUQQvOxCFxtLOetJ00fC/E7zMUzbK
21
+ IBwYFQYsbI6XQzgAIPW6nGSYKgRhkfpmquXSNKZRIQ4V6bFrufa+DzD0bt2ZA3ah
22
+ fMmJguyb5L2Gf1zpDXzFSPMG7YQFLzwYz1zZZvOU7/UCpQsHpID/YxqDp4+Dgb+Y
23
+ qma0whX8UG/gXFV2pYWpYOfpatvahwi+A1TwPQsuZwkkhi1OyF1At3RY+hjSXyav
24
+ AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
25
+ msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
26
+ -----END CERTIFICATE-----
data/docker-compose.yml CHANGED
@@ -1,24 +1,27 @@
1
- ---
2
-
3
1
  version: '2'
4
2
 
5
3
  services:
6
- zookeeper:
7
- image: confluentinc/cp-zookeeper:5.2.6
8
- environment:
9
- ZOOKEEPER_CLIENT_PORT: 2181
10
- ZOOKEEPER_TICK_TIME: 2000
11
-
12
4
  kafka:
13
- image: confluentinc/cp-kafka:5.2.5-10
14
- depends_on:
15
- - zookeeper
5
+ container_name: kafka
6
+ image: confluentinc/cp-kafka:7.5.3
7
+
16
8
  ports:
17
9
  - 9092:9092
10
+
18
11
  environment:
19
- KAFKA_BROKER_ID: 1
20
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
21
- KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29092,PLAINTEXT_HOST://localhost:9092
22
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
12
+ CLUSTER_ID: kafka-docker-cluster-1
23
13
  KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
24
14
  KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
15
+ KAFKA_PROCESS_ROLES: broker,controller
16
+ KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
17
+ KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
18
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
19
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
20
+ KAFKA_BROKER_ID: 1
21
+ KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093
22
+ ALLOW_PLAINTEXT_LISTENER: 'yes'
23
+ KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
24
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
26
+ KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
27
+ KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
data/ext/README.md CHANGED
@@ -5,7 +5,7 @@ this gem is installed.
5
5
 
6
6
  To update the `librdkafka` version follow the following steps:
7
7
 
8
- * Go to https://github.com/edenhill/librdkafka/releases to get the new
8
+ * Go to https://github.com/confluentinc/librdkafka/releases to get the new
9
9
  version number and asset checksum for `tar.gz`.
10
10
  * Change the version in `lib/rdkafka/version.rb`
11
11
  * Change the `sha256` in `lib/rdkafka/version.rb`