karafka-rdkafka 0.13.8 → 0.14.0.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.gitignore +4 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +42 -32
- data/{LICENSE → MIT-LICENSE} +2 -1
- data/README.md +11 -11
- data/docker-compose.yml +1 -1
- data/ext/README.md +1 -1
- data/ext/Rakefile +1 -1
- data/lib/rdkafka/abstract_handle.rb +37 -24
- data/lib/rdkafka/admin.rb +6 -7
- data/lib/rdkafka/bindings.rb +0 -4
- data/lib/rdkafka/config.rb +30 -15
- data/lib/rdkafka/consumer/headers.rb +2 -4
- data/lib/rdkafka/consumer.rb +52 -55
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/producer.rb +8 -15
- data/lib/rdkafka/version.rb +3 -3
- data/lib/rdkafka.rb +10 -1
- data/spec/rdkafka/abstract_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin_spec.rb +0 -1
- data/spec/rdkafka/bindings_spec.rb +0 -1
- data/spec/rdkafka/callbacks_spec.rb +0 -2
- data/spec/rdkafka/config_spec.rb +8 -2
- data/spec/rdkafka/consumer/headers_spec.rb +0 -2
- data/spec/rdkafka/consumer/message_spec.rb +0 -2
- data/spec/rdkafka/consumer/partition_spec.rb +0 -2
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
- data/spec/rdkafka/consumer_spec.rb +47 -1
- data/spec/rdkafka/error_spec.rb +0 -2
- data/spec/rdkafka/metadata_spec.rb +0 -1
- data/spec/rdkafka/native_kafka_spec.rb +0 -2
- data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
- data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
- data/spec/rdkafka/producer_spec.rb +0 -1
- data.tar.gz.sig +3 -2
- metadata +9 -6
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5e180bea275e4b1e2b0f43ba8ec7215b0b96cb926b708ee38a6a4cb73116be53
|
4
|
+
data.tar.gz: 6558fb60a50c96bd11054400e5effb89f42c9b2a35ea23f02cf3b56f96c7142a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7449b5221257909fbf806e5a78446566659ffb71ed40ee2ced5ef51624a2ad9b8c710edf4a34dd024c96e03cd698698cd235c1b58cf3e9d407e051e95be80fa9
|
7
|
+
data.tar.gz: 3ad4a979ce55647485d75f2a45da3dd444ced6231b555557b3d888ef6641365b4fb675a7e29f019a2df7ff960cc2561d0141291d8a344be5bd87b413ee64a6e3
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.gitignore
CHANGED
data/.rspec
CHANGED
data/.ruby-gemset
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
rdkafka-ruby
|
data/.ruby-version
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
3.2.2
|
data/CHANGELOG.md
CHANGED
@@ -1,33 +1,43 @@
|
|
1
|
-
#
|
1
|
+
# Rdkafka Changelog
|
2
|
+
|
3
|
+
## 0.14.0 (Unreleased)
|
4
|
+
- [Enhancement] Bump librdkafka to 2.3.0
|
5
|
+
- [Enhancement] Increase the `#lag` and `#query_watermark_offsets` default timeouts from 100ms to 1000ms. This will compensate for network glitches and remote clusters operations.
|
6
|
+
|
7
|
+
## 0.13.9 (2023-11-07)
|
8
|
+
- [Enhancement] Expose alternative way of managing consumer events via a separate queue.
|
9
|
+
- [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem.
|
10
|
+
|
11
|
+
## 0.13.8 (2023-10-31)
|
2
12
|
- [Enhancement] Get consumer position (thijsc & mensfeld)
|
3
13
|
|
4
|
-
|
14
|
+
## 0.13.7 (2023-10-31)
|
5
15
|
- [Change] Drop support for Ruby 2.6 due to incompatibilities in usage of `ObjectSpace::WeakMap`
|
6
16
|
- [Fix] Fix dangling Opaque references.
|
7
17
|
|
8
|
-
|
18
|
+
## 0.13.6 (2023-10-17)
|
9
19
|
* **[Feature]** Support transactions API in the producer
|
10
20
|
* [Enhancement] Add `raise_response_error` flag to the `Rdkafka::AbstractHandle`.
|
11
21
|
* [Enhancement] Provide `#purge` to remove any outstanding requests from the producer.
|
12
22
|
* [Enhancement] Fix `#flush` does not handle the timeouts errors by making it return true if all flushed or false if failed. We do **not** raise an exception here to keep it backwards compatible.
|
13
23
|
|
14
|
-
|
24
|
+
## 0.13.5
|
15
25
|
* Fix DeliveryReport `create_result#error` being nil despite an error being associated with it
|
16
26
|
|
17
|
-
|
27
|
+
## 0.13.4
|
18
28
|
* Always call initial poll on librdkafka to make sure oauth bearer cb is handled pre-operations.
|
19
29
|
|
20
|
-
|
30
|
+
## 0.13.3
|
21
31
|
* Bump librdkafka to 2.2.0
|
22
32
|
|
23
|
-
|
33
|
+
## 0.13.2
|
24
34
|
* Ensure operations counter decrement is fully thread-safe
|
25
35
|
* Bump librdkafka to 2.1.1
|
26
36
|
|
27
|
-
|
37
|
+
## 0.13.1
|
28
38
|
* Add offsets_for_times method on consumer (timflapper)
|
29
39
|
|
30
|
-
|
40
|
+
## 0.13.0 (2023-07-24)
|
31
41
|
* Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
|
32
42
|
* Support both string and symbol header keys (ColinDKelley)
|
33
43
|
* Handle tombstone messages properly (kgalieva)
|
@@ -48,32 +58,32 @@
|
|
48
58
|
* Retry metadta fetches on certain errors with a backoff (mensfeld)
|
49
59
|
* Do not lock access to underlying native kafka client and rely on Karafka granular locking (mensfeld)
|
50
60
|
|
51
|
-
|
61
|
+
## 0.12.3
|
52
62
|
- Include backtrace in non-raised binded errors.
|
53
63
|
- Include topic name in the delivery reports
|
54
64
|
|
55
|
-
|
65
|
+
## 0.12.2
|
56
66
|
* Increase the metadata default timeout from 250ms to 2 seconds. This should allow for working with remote clusters.
|
57
67
|
|
58
|
-
|
68
|
+
## 0.12.1
|
59
69
|
* Bumps librdkafka to 2.0.2 (lmaia)
|
60
70
|
* Add support for adding more partitions via Admin API
|
61
71
|
|
62
|
-
|
72
|
+
## 0.12.0 (2022-06-17)
|
63
73
|
* Bumps librdkafka to 1.9.0
|
64
74
|
* Fix crash on empty partition key (mensfeld)
|
65
75
|
* Pass the delivery handle to the callback (gvisokinskas)
|
66
76
|
|
67
|
-
|
77
|
+
## 0.11.0 (2021-11-17)
|
68
78
|
* Upgrade librdkafka to 1.8.2
|
69
79
|
* Bump supported minimum Ruby version to 2.6
|
70
80
|
* Better homebrew path detection
|
71
81
|
|
72
|
-
|
82
|
+
## 0.10.0 (2021-09-07)
|
73
83
|
* Upgrade librdkafka to 1.5.0
|
74
84
|
* Add error callback config
|
75
85
|
|
76
|
-
|
86
|
+
## 0.9.0 (2021-06-23)
|
77
87
|
* Fixes for Ruby 3.0
|
78
88
|
* Allow any callable object for callbacks (gremerritt)
|
79
89
|
* Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
|
@@ -81,13 +91,13 @@
|
|
81
91
|
* Allow passing in topic configuration on create_topic (dezka)
|
82
92
|
* Add each_batch method to consumer (mgrosso)
|
83
93
|
|
84
|
-
|
94
|
+
## 0.8.1 (2020-12-07)
|
85
95
|
* Fix topic_flag behaviour and add tests for Metadata (geoff2k)
|
86
96
|
* Add topic admin interface (geoff2k)
|
87
97
|
* Raise an exception if @native_kafka is nil (geoff2k)
|
88
98
|
* Option to use zstd compression (jasonmartens)
|
89
99
|
|
90
|
-
|
100
|
+
## 0.8.0 (2020-06-02)
|
91
101
|
* Upgrade librdkafka to 1.4.0
|
92
102
|
* Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
|
93
103
|
* Ruby 2.7 compatibility fix (by Geoff Thé)A
|
@@ -95,22 +105,22 @@
|
|
95
105
|
* Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
|
96
106
|
* Allow use of Rake 13.x and up (by Tomasz Pajor)
|
97
107
|
|
98
|
-
|
108
|
+
## 0.7.0 (2019-09-21)
|
99
109
|
* Bump librdkafka to 1.2.0 (by rob-as)
|
100
110
|
* Allow customizing the wait time for delivery report availability (by mensfeld)
|
101
111
|
|
102
|
-
|
112
|
+
## 0.6.0 (2019-07-23)
|
103
113
|
* Bump librdkafka to 1.1.0 (by Chris Gaffney)
|
104
114
|
* Implement seek (by breunigs)
|
105
115
|
|
106
|
-
|
116
|
+
## 0.5.0 (2019-04-11)
|
107
117
|
* Bump librdkafka to 1.0.0 (by breunigs)
|
108
118
|
* Add cluster and member information (by dmexe)
|
109
119
|
* Support message headers for consumer & producer (by dmexe)
|
110
120
|
* Add consumer rebalance listener (by dmexe)
|
111
121
|
* Implement pause/resume partitions (by dmexe)
|
112
122
|
|
113
|
-
|
123
|
+
## 0.4.2 (2019-01-12)
|
114
124
|
* Delivery callback for producer
|
115
125
|
* Document list param of commit method
|
116
126
|
* Use default Homebrew openssl location if present
|
@@ -119,10 +129,10 @@
|
|
119
129
|
* Add support for storing message offsets
|
120
130
|
* Add missing runtime dependency to rake
|
121
131
|
|
122
|
-
|
132
|
+
## 0.4.1 (2018-10-19)
|
123
133
|
* Bump librdkafka to 0.11.6
|
124
134
|
|
125
|
-
|
135
|
+
## 0.4.0 (2018-09-24)
|
126
136
|
* Improvements in librdkafka archive download
|
127
137
|
* Add global statistics callback
|
128
138
|
* Use Time for timestamps, potentially breaking change if you
|
@@ -134,34 +144,34 @@
|
|
134
144
|
* Support committing a topic partition list
|
135
145
|
* Add consumer assignment method
|
136
146
|
|
137
|
-
|
147
|
+
## 0.3.5 (2018-01-17)
|
138
148
|
* Fix crash when not waiting for delivery handles
|
139
149
|
* Run specs on Ruby 2.5
|
140
150
|
|
141
|
-
|
151
|
+
## 0.3.4 (2017-12-05)
|
142
152
|
* Bump librdkafka to 0.11.3
|
143
153
|
|
144
|
-
|
154
|
+
## 0.3.3 (2017-10-27)
|
145
155
|
* Fix bug that prevent display of `RdkafkaError` message
|
146
156
|
|
147
|
-
|
157
|
+
## 0.3.2 (2017-10-25)
|
148
158
|
* `add_topic` now supports using a partition count
|
149
159
|
* Add way to make errors clearer with an extra message
|
150
160
|
* Show topics in subscribe error message
|
151
161
|
* Show partition and topic in query watermark offsets error message
|
152
162
|
|
153
|
-
|
163
|
+
## 0.3.1 (2017-10-23)
|
154
164
|
* Bump librdkafka to 0.11.1
|
155
165
|
* Officially support ranges in `add_topic` for topic partition list.
|
156
166
|
* Add consumer lag calculator
|
157
167
|
|
158
|
-
|
168
|
+
## 0.3.0 (2017-10-17)
|
159
169
|
* Move both add topic methods to one `add_topic` in `TopicPartitionList`
|
160
170
|
* Add committed offsets to consumer
|
161
171
|
* Add query watermark offset to consumer
|
162
172
|
|
163
|
-
|
173
|
+
## 0.2.0 (2017-10-13)
|
164
174
|
* Some refactoring and add inline documentation
|
165
175
|
|
166
|
-
|
176
|
+
## 0.1.x (2017-09-10)
|
167
177
|
* Initial working version including producing and consuming
|
data/{LICENSE → MIT-LICENSE}
RENAMED
@@ -1,6 +1,7 @@
|
|
1
1
|
The MIT License (MIT)
|
2
2
|
|
3
|
-
Copyright (c) 2017 Thijs Cadier
|
3
|
+
Copyright (c) 2017-2023 Thijs Cadier
|
4
|
+
2023, Maciej Mensfeld
|
4
5
|
|
5
6
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
7
|
of this software and associated documentation files (the "Software"), to deal
|
data/README.md
CHANGED
@@ -10,16 +10,16 @@
|
|
10
10
|
---
|
11
11
|
|
12
12
|
The `rdkafka` gem is a modern Kafka client library for Ruby based on
|
13
|
-
[librdkafka](https://github.com/
|
13
|
+
[librdkafka](https://github.com/confluentinc/librdkafka/).
|
14
14
|
It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
|
15
|
-
gem and targets Kafka 1.0+ and Ruby versions
|
16
|
-
active maintenance. We remove Ruby version from our CI builds
|
15
|
+
gem and targets Kafka 1.0+ and Ruby versions under security or
|
16
|
+
active maintenance. We remove a Ruby version from our CI builds when they
|
17
17
|
become EOL.
|
18
18
|
|
19
19
|
`rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
|
20
20
|
|
21
21
|
The most important pieces of a Kafka client are implemented. We're
|
22
|
-
working towards feature completeness
|
22
|
+
working towards feature completeness. You can track that here:
|
23
23
|
https://github.com/appsignal/rdkafka-ruby/milestone/1
|
24
24
|
|
25
25
|
## Table of content
|
@@ -38,7 +38,7 @@ https://github.com/appsignal/rdkafka-ruby/milestone/1
|
|
38
38
|
## Installation
|
39
39
|
|
40
40
|
This gem downloads and compiles librdkafka when it is installed. If you
|
41
|
-
have any problems installing the gem please open an issue.
|
41
|
+
If you have any problems installing the gem, please open an issue.
|
42
42
|
|
43
43
|
## Usage
|
44
44
|
|
@@ -64,9 +64,9 @@ end
|
|
64
64
|
|
65
65
|
### Producing messages
|
66
66
|
|
67
|
-
Produce a number of messages, put the delivery handles in an array and
|
67
|
+
Produce a number of messages, put the delivery handles in an array, and
|
68
68
|
wait for them before exiting. This way the messages will be batched and
|
69
|
-
sent to Kafka
|
69
|
+
efficiently sent to Kafka.
|
70
70
|
|
71
71
|
```ruby
|
72
72
|
config = {:"bootstrap.servers" => "localhost:9092"}
|
@@ -91,7 +91,7 @@ released until it `#close` is explicitly called, so be sure to call
|
|
91
91
|
|
92
92
|
## Higher level libraries
|
93
93
|
|
94
|
-
Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher
|
94
|
+
Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
|
95
95
|
|
96
96
|
### Message processing frameworks
|
97
97
|
|
@@ -104,7 +104,7 @@ Currently, there are two actively developed frameworks based on rdkafka-ruby, th
|
|
104
104
|
|
105
105
|
## Development
|
106
106
|
|
107
|
-
A Docker Compose file is included to run Kafka
|
107
|
+
A Docker Compose file is included to run Kafka. To run
|
108
108
|
that:
|
109
109
|
|
110
110
|
```
|
@@ -122,7 +122,7 @@ DEBUG_PRODUCER=true bundle exec rspec
|
|
122
122
|
DEBUG_CONSUMER=true bundle exec rspec
|
123
123
|
```
|
124
124
|
|
125
|
-
After running the tests you can bring the cluster down to start with a
|
125
|
+
After running the tests, you can bring the cluster down to start with a
|
126
126
|
clean slate:
|
127
127
|
|
128
128
|
```
|
@@ -131,7 +131,7 @@ docker-compose down
|
|
131
131
|
|
132
132
|
## Example
|
133
133
|
|
134
|
-
To see everything working run these in separate tabs:
|
134
|
+
To see everything working, run these in separate tabs:
|
135
135
|
|
136
136
|
```
|
137
137
|
bundle exec rake consume_messages
|
data/docker-compose.yml
CHANGED
data/ext/README.md
CHANGED
@@ -5,7 +5,7 @@ this gem is installed.
|
|
5
5
|
|
6
6
|
To update the `librdkafka` version follow the following steps:
|
7
7
|
|
8
|
-
* Go to https://github.com/
|
8
|
+
* Go to https://github.com/confluentinc/librdkafka/releases to get the new
|
9
9
|
version number and asset checksum for `tar.gz`.
|
10
10
|
* Change the version in `lib/rdkafka/version.rb`
|
11
11
|
* Change the `sha256` in `lib/rdkafka/version.rb`
|
data/ext/Rakefile
CHANGED
@@ -17,7 +17,7 @@ task :default => :clean do
|
|
17
17
|
end
|
18
18
|
|
19
19
|
recipe.files << {
|
20
|
-
:url => "https://codeload.github.com/
|
20
|
+
:url => "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
|
21
21
|
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
22
22
|
}
|
23
23
|
recipe.configure_options = ["--host=#{recipe.host}"]
|
@@ -1,28 +1,37 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "ffi"
|
4
|
-
|
5
3
|
module Rdkafka
|
4
|
+
# This class serves as an abstract base class to represent handles within the Rdkafka module.
|
5
|
+
# As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle
|
6
|
+
# classes to inherit from, ensuring they adhere to a particular structure and behavior.
|
7
|
+
#
|
8
|
+
# Subclasses must define their own layout, and the layout must start with:
|
9
|
+
#
|
10
|
+
# layout :pending, :bool,
|
11
|
+
# :response, :int
|
6
12
|
class AbstractHandle < FFI::Struct
|
7
|
-
|
8
|
-
#
|
9
|
-
# layout :pending, :bool,
|
10
|
-
# :response, :int
|
13
|
+
include Helpers::Time
|
11
14
|
|
15
|
+
# Registry for registering all the handles.
|
12
16
|
REGISTRY = {}
|
13
17
|
|
14
|
-
|
15
|
-
|
16
|
-
|
18
|
+
class << self
|
19
|
+
# Adds handle to the register
|
20
|
+
#
|
21
|
+
# @param handle [AbstractHandle] any handle we want to register
|
22
|
+
def register(handle)
|
23
|
+
address = handle.to_ptr.address
|
24
|
+
REGISTRY[address] = handle
|
25
|
+
end
|
17
26
|
|
18
|
-
|
19
|
-
|
20
|
-
|
27
|
+
# Removes handle from the register based on the handle address
|
28
|
+
#
|
29
|
+
# @param address [Integer] address of the registered handle we want to remove
|
30
|
+
def remove(address)
|
31
|
+
REGISTRY.delete(address)
|
32
|
+
end
|
21
33
|
end
|
22
34
|
|
23
|
-
def self.remove(address)
|
24
|
-
REGISTRY.delete(address)
|
25
|
-
end
|
26
35
|
|
27
36
|
# Whether the handle is still pending.
|
28
37
|
#
|
@@ -32,27 +41,31 @@ module Rdkafka
|
|
32
41
|
end
|
33
42
|
|
34
43
|
# Wait for the operation to complete or raise an error if this takes longer than the timeout.
|
35
|
-
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
36
|
-
# In this case it is possible to call wait again.
|
44
|
+
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
45
|
+
# on the operation. In this case it is possible to call wait again.
|
37
46
|
#
|
38
|
-
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
39
|
-
#
|
47
|
+
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
48
|
+
# If this is nil it does not time out.
|
49
|
+
# @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the
|
50
|
+
# operation has completed
|
40
51
|
# @param raise_response_error [Boolean] should we raise error when waiting finishes
|
41
52
|
#
|
53
|
+
# @return [Object] Operation-specific result
|
54
|
+
#
|
42
55
|
# @raise [RdkafkaError] When the operation failed
|
43
56
|
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
44
|
-
#
|
45
|
-
# @return [Object] Operation-specific result
|
46
57
|
def wait(max_wait_timeout: 60, wait_timeout: 0.1, raise_response_error: true)
|
47
58
|
timeout = if max_wait_timeout
|
48
|
-
|
59
|
+
monotonic_now + max_wait_timeout
|
49
60
|
else
|
50
61
|
nil
|
51
62
|
end
|
52
63
|
loop do
|
53
64
|
if pending?
|
54
|
-
if timeout && timeout <=
|
55
|
-
raise WaitTimeoutError.new(
|
65
|
+
if timeout && timeout <= monotonic_now
|
66
|
+
raise WaitTimeoutError.new(
|
67
|
+
"Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
|
68
|
+
)
|
56
69
|
end
|
57
70
|
sleep wait_timeout
|
58
71
|
elsif self[:response] != 0 && raise_response_error
|
data/lib/rdkafka/admin.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "objspace"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
class Admin
|
7
5
|
# @private
|
@@ -30,11 +28,12 @@ module Rdkafka
|
|
30
28
|
|
31
29
|
# Create a topic with the given partition count and replication factor
|
32
30
|
#
|
31
|
+
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of
|
32
|
+
# creating the topic
|
33
|
+
#
|
33
34
|
# @raise [ConfigError] When the partition count or replication factor are out of valid range
|
34
35
|
# @raise [RdkafkaError] When the topic name is invalid or the topic already exists
|
35
36
|
# @raise [RdkafkaError] When the topic configuration is invalid
|
36
|
-
#
|
37
|
-
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
|
38
37
|
def create_topic(topic_name, partition_count, replication_factor, topic_config={})
|
39
38
|
closed_admin_check(__method__)
|
40
39
|
|
@@ -107,11 +106,11 @@ module Rdkafka
|
|
107
106
|
create_topic_handle
|
108
107
|
end
|
109
108
|
|
110
|
-
#
|
109
|
+
# Deletes the named topic
|
111
110
|
#
|
111
|
+
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of
|
112
|
+
# deleting the topic
|
112
113
|
# @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
|
113
|
-
#
|
114
|
-
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
|
115
114
|
def delete_topic(topic_name)
|
116
115
|
closed_admin_check(__method__)
|
117
116
|
|
data/lib/rdkafka/bindings.rb
CHANGED
data/lib/rdkafka/config.rb
CHANGED
@@ -1,11 +1,9 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "logger"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
# Configuration for a Kafka consumer or producer. You can create an instance and use
|
7
5
|
# the consumer and producer methods to create a client. Documentation of the available
|
8
|
-
# configuration options is available on https://github.com/
|
6
|
+
# configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
|
9
7
|
class Config
|
10
8
|
# @private
|
11
9
|
@@logger = Logger.new(STDOUT)
|
@@ -53,13 +51,13 @@ module Rdkafka
|
|
53
51
|
|
54
52
|
# Set a callback that will be called every time the underlying client emits statistics.
|
55
53
|
# You can configure if and how often this happens using `statistics.interval.ms`.
|
56
|
-
# The callback is called with a hash that's documented here: https://github.com/
|
54
|
+
# The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
|
57
55
|
#
|
58
56
|
# @param callback [Proc, #call] The callback
|
59
57
|
#
|
60
58
|
# @return [nil]
|
61
59
|
def self.statistics_callback=(callback)
|
62
|
-
raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
|
60
|
+
raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
|
63
61
|
@@statistics_callback = callback
|
64
62
|
end
|
65
63
|
|
@@ -114,6 +112,7 @@ module Rdkafka
|
|
114
112
|
def initialize(config_hash = {})
|
115
113
|
@config_hash = DEFAULT_CONFIG.merge(config_hash)
|
116
114
|
@consumer_rebalance_listener = nil
|
115
|
+
@consumer_poll_set = true
|
117
116
|
end
|
118
117
|
|
119
118
|
# Set a config option.
|
@@ -142,12 +141,28 @@ module Rdkafka
|
|
142
141
|
@consumer_rebalance_listener = listener
|
143
142
|
end
|
144
143
|
|
145
|
-
#
|
144
|
+
# Should we use a single queue for the underlying consumer and events.
|
146
145
|
#
|
147
|
-
#
|
148
|
-
#
|
146
|
+
# This is an advanced API that allows for more granular control of the polling process.
|
147
|
+
# When this value is set to `false` (`true` by defualt), there will be two queues that need to
|
148
|
+
# be polled:
|
149
|
+
# - main librdkafka queue for events
|
150
|
+
# - consumer queue with messages and rebalances
|
151
|
+
#
|
152
|
+
# It is recommended to use the defaults and only set it to `false` in advance multi-threaded
|
153
|
+
# and complex cases where granular events handling control is needed.
|
154
|
+
#
|
155
|
+
# @param poll_set [Boolean]
|
156
|
+
def consumer_poll_set=(poll_set)
|
157
|
+
@consumer_poll_set = poll_set
|
158
|
+
end
|
159
|
+
|
160
|
+
# Creates a consumer with this configuration.
|
149
161
|
#
|
150
162
|
# @return [Consumer] The created consumer
|
163
|
+
#
|
164
|
+
# @raise [ConfigError] When the configuration contains invalid options
|
165
|
+
# @raise [ClientCreationError] When the native client cannot be created
|
151
166
|
def consumer
|
152
167
|
opaque = Opaque.new
|
153
168
|
config = native_config(opaque)
|
@@ -160,8 +175,8 @@ module Rdkafka
|
|
160
175
|
# Create native client
|
161
176
|
kafka = native_kafka(config, :rd_kafka_consumer)
|
162
177
|
|
163
|
-
# Redirect the main queue to the consumer
|
164
|
-
Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
|
178
|
+
# Redirect the main queue to the consumer queue
|
179
|
+
Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
|
165
180
|
|
166
181
|
# Return consumer with Kafka client
|
167
182
|
Rdkafka::Consumer.new(
|
@@ -175,10 +190,10 @@ module Rdkafka
|
|
175
190
|
|
176
191
|
# Create a producer with this configuration.
|
177
192
|
#
|
193
|
+
# @return [Producer] The created producer
|
194
|
+
#
|
178
195
|
# @raise [ConfigError] When the configuration contains invalid options
|
179
196
|
# @raise [ClientCreationError] When the native client cannot be created
|
180
|
-
#
|
181
|
-
# @return [Producer] The created producer
|
182
197
|
def producer
|
183
198
|
# Create opaque
|
184
199
|
opaque = Opaque.new
|
@@ -200,12 +215,12 @@ module Rdkafka
|
|
200
215
|
end
|
201
216
|
end
|
202
217
|
|
203
|
-
#
|
218
|
+
# Creates an admin instance with this configuration.
|
219
|
+
#
|
220
|
+
# @return [Admin] The created admin instance
|
204
221
|
#
|
205
222
|
# @raise [ConfigError] When the configuration contains invalid options
|
206
223
|
# @raise [ClientCreationError] When the native client cannot be created
|
207
|
-
#
|
208
|
-
# @return [Admin] The created admin instance
|
209
224
|
def admin
|
210
225
|
opaque = Opaque.new
|
211
226
|
config = native_config(opaque)
|
@@ -18,13 +18,11 @@ module Rdkafka
|
|
18
18
|
|
19
19
|
# Reads a librdkafka native message's headers and returns them as a Ruby Hash
|
20
20
|
#
|
21
|
-
# @
|
21
|
+
# @private
|
22
22
|
#
|
23
|
+
# @param [librdkakfa message] native_message
|
23
24
|
# @return [Hash<String, String>] headers Hash for the native_message
|
24
|
-
#
|
25
25
|
# @raise [Rdkafka::RdkafkaError] when fail to read headers
|
26
|
-
#
|
27
|
-
# @private
|
28
26
|
def self.from_native(native_message)
|
29
27
|
headers_ptrptr = FFI::MemoryPointer.new(:pointer)
|
30
28
|
err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
|