karafka-rdkafka 0.13.8 → 0.13.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.gitignore +4 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +38 -32
- data/{LICENSE → MIT-LICENSE} +2 -1
- data/README.md +11 -11
- data/ext/README.md +1 -1
- data/ext/Rakefile +1 -1
- data/lib/rdkafka/abstract_handle.rb +37 -24
- data/lib/rdkafka/admin.rb +6 -7
- data/lib/rdkafka/bindings.rb +0 -4
- data/lib/rdkafka/config.rb +30 -15
- data/lib/rdkafka/consumer/headers.rb +2 -4
- data/lib/rdkafka/consumer.rb +50 -53
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/producer.rb +8 -15
- data/lib/rdkafka/version.rb +1 -1
- data/lib/rdkafka.rb +10 -1
- data/spec/rdkafka/abstract_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -2
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -2
- data/spec/rdkafka/admin_spec.rb +0 -1
- data/spec/rdkafka/bindings_spec.rb +0 -1
- data/spec/rdkafka/callbacks_spec.rb +0 -2
- data/spec/rdkafka/config_spec.rb +8 -2
- data/spec/rdkafka/consumer/headers_spec.rb +0 -2
- data/spec/rdkafka/consumer/message_spec.rb +0 -2
- data/spec/rdkafka/consumer/partition_spec.rb +0 -2
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -2
- data/spec/rdkafka/consumer_spec.rb +47 -1
- data/spec/rdkafka/error_spec.rb +0 -2
- data/spec/rdkafka/metadata_spec.rb +0 -1
- data/spec/rdkafka/native_kafka_spec.rb +0 -2
- data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -2
- data/spec/rdkafka/producer/delivery_report_spec.rb +0 -2
- data/spec/rdkafka/producer_spec.rb +0 -1
- data.tar.gz.sig +0 -0
- metadata +7 -4
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c954a06b4461885d7648e8081d0426ebb895a82f1a27607f2224fc9e60843574
|
4
|
+
data.tar.gz: a25bf01b430920c7fd2cf22b0dd23ea1de9dd15d1b45f614025e97a3b725f8f3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: afe480fedbbe5dc6f055709aaf3601acb17aaaa890f1c6e7423b90263eb8fa893de0e8b9bb571e6abf1c5fc0b8df2a6be2e5de4af1de1b68414204275981246e
|
7
|
+
data.tar.gz: 7bb81ff6dcbc95b018ede8bd986eda701f1f5f3bb3985684d436a38a2173f8753b01e1642780eb9ad52a65ad02e2b3b7d656c4259758c8571ef2858822384875
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.gitignore
CHANGED
data/.rspec
CHANGED
data/.ruby-gemset
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
rdkafka-ruby
|
data/.ruby-version
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
3.2.2
|
data/CHANGELOG.md
CHANGED
@@ -1,33 +1,39 @@
|
|
1
|
-
#
|
1
|
+
# Rdkafka Changelog
|
2
|
+
|
3
|
+
## 0.13.9 (2023-11-07)
|
4
|
+
- [Enhancement] Expose alternative way of managing consumer events via a separate queue.
|
5
|
+
- [Enhancement] Allow for setting `statistics_callback` as nil to reset predefined settings configured by a different gem.
|
6
|
+
|
7
|
+
## 0.13.8 (2023-10-31)
|
2
8
|
- [Enhancement] Get consumer position (thijsc & mensfeld)
|
3
9
|
|
4
|
-
|
10
|
+
## 0.13.7 (2023-10-31)
|
5
11
|
- [Change] Drop support for Ruby 2.6 due to incompatibilities in usage of `ObjectSpace::WeakMap`
|
6
12
|
- [Fix] Fix dangling Opaque references.
|
7
13
|
|
8
|
-
|
14
|
+
## 0.13.6 (2023-10-17)
|
9
15
|
* **[Feature]** Support transactions API in the producer
|
10
16
|
* [Enhancement] Add `raise_response_error` flag to the `Rdkafka::AbstractHandle`.
|
11
17
|
* [Enhancement] Provide `#purge` to remove any outstanding requests from the producer.
|
12
18
|
* [Enhancement] Fix `#flush` does not handle the timeouts errors by making it return true if all flushed or false if failed. We do **not** raise an exception here to keep it backwards compatible.
|
13
19
|
|
14
|
-
|
20
|
+
## 0.13.5
|
15
21
|
* Fix DeliveryReport `create_result#error` being nil despite an error being associated with it
|
16
22
|
|
17
|
-
|
23
|
+
## 0.13.4
|
18
24
|
* Always call initial poll on librdkafka to make sure oauth bearer cb is handled pre-operations.
|
19
25
|
|
20
|
-
|
26
|
+
## 0.13.3
|
21
27
|
* Bump librdkafka to 2.2.0
|
22
28
|
|
23
|
-
|
29
|
+
## 0.13.2
|
24
30
|
* Ensure operations counter decrement is fully thread-safe
|
25
31
|
* Bump librdkafka to 2.1.1
|
26
32
|
|
27
|
-
|
33
|
+
## 0.13.1
|
28
34
|
* Add offsets_for_times method on consumer (timflapper)
|
29
35
|
|
30
|
-
|
36
|
+
## 0.13.0 (2023-07-24)
|
31
37
|
* Support cooperative sticky partition assignment in the rebalance callback (methodmissing)
|
32
38
|
* Support both string and symbol header keys (ColinDKelley)
|
33
39
|
* Handle tombstone messages properly (kgalieva)
|
@@ -48,32 +54,32 @@
|
|
48
54
|
* Retry metadta fetches on certain errors with a backoff (mensfeld)
|
49
55
|
* Do not lock access to underlying native kafka client and rely on Karafka granular locking (mensfeld)
|
50
56
|
|
51
|
-
|
57
|
+
## 0.12.3
|
52
58
|
- Include backtrace in non-raised binded errors.
|
53
59
|
- Include topic name in the delivery reports
|
54
60
|
|
55
|
-
|
61
|
+
## 0.12.2
|
56
62
|
* Increase the metadata default timeout from 250ms to 2 seconds. This should allow for working with remote clusters.
|
57
63
|
|
58
|
-
|
64
|
+
## 0.12.1
|
59
65
|
* Bumps librdkafka to 2.0.2 (lmaia)
|
60
66
|
* Add support for adding more partitions via Admin API
|
61
67
|
|
62
|
-
|
68
|
+
## 0.12.0 (2022-06-17)
|
63
69
|
* Bumps librdkafka to 1.9.0
|
64
70
|
* Fix crash on empty partition key (mensfeld)
|
65
71
|
* Pass the delivery handle to the callback (gvisokinskas)
|
66
72
|
|
67
|
-
|
73
|
+
## 0.11.0 (2021-11-17)
|
68
74
|
* Upgrade librdkafka to 1.8.2
|
69
75
|
* Bump supported minimum Ruby version to 2.6
|
70
76
|
* Better homebrew path detection
|
71
77
|
|
72
|
-
|
78
|
+
## 0.10.0 (2021-09-07)
|
73
79
|
* Upgrade librdkafka to 1.5.0
|
74
80
|
* Add error callback config
|
75
81
|
|
76
|
-
|
82
|
+
## 0.9.0 (2021-06-23)
|
77
83
|
* Fixes for Ruby 3.0
|
78
84
|
* Allow any callable object for callbacks (gremerritt)
|
79
85
|
* Reduce memory allocations in Rdkafka::Producer#produce (jturkel)
|
@@ -81,13 +87,13 @@
|
|
81
87
|
* Allow passing in topic configuration on create_topic (dezka)
|
82
88
|
* Add each_batch method to consumer (mgrosso)
|
83
89
|
|
84
|
-
|
90
|
+
## 0.8.1 (2020-12-07)
|
85
91
|
* Fix topic_flag behaviour and add tests for Metadata (geoff2k)
|
86
92
|
* Add topic admin interface (geoff2k)
|
87
93
|
* Raise an exception if @native_kafka is nil (geoff2k)
|
88
94
|
* Option to use zstd compression (jasonmartens)
|
89
95
|
|
90
|
-
|
96
|
+
## 0.8.0 (2020-06-02)
|
91
97
|
* Upgrade librdkafka to 1.4.0
|
92
98
|
* Integrate librdkafka metadata API and add partition_key (by Adithya-copart)
|
93
99
|
* Ruby 2.7 compatibility fix (by Geoff Thé)A
|
@@ -95,22 +101,22 @@
|
|
95
101
|
* Don't override CPPFLAGS and LDFLAGS if already set on Mac (by Hiroshi Hatake)
|
96
102
|
* Allow use of Rake 13.x and up (by Tomasz Pajor)
|
97
103
|
|
98
|
-
|
104
|
+
## 0.7.0 (2019-09-21)
|
99
105
|
* Bump librdkafka to 1.2.0 (by rob-as)
|
100
106
|
* Allow customizing the wait time for delivery report availability (by mensfeld)
|
101
107
|
|
102
|
-
|
108
|
+
## 0.6.0 (2019-07-23)
|
103
109
|
* Bump librdkafka to 1.1.0 (by Chris Gaffney)
|
104
110
|
* Implement seek (by breunigs)
|
105
111
|
|
106
|
-
|
112
|
+
## 0.5.0 (2019-04-11)
|
107
113
|
* Bump librdkafka to 1.0.0 (by breunigs)
|
108
114
|
* Add cluster and member information (by dmexe)
|
109
115
|
* Support message headers for consumer & producer (by dmexe)
|
110
116
|
* Add consumer rebalance listener (by dmexe)
|
111
117
|
* Implement pause/resume partitions (by dmexe)
|
112
118
|
|
113
|
-
|
119
|
+
## 0.4.2 (2019-01-12)
|
114
120
|
* Delivery callback for producer
|
115
121
|
* Document list param of commit method
|
116
122
|
* Use default Homebrew openssl location if present
|
@@ -119,10 +125,10 @@
|
|
119
125
|
* Add support for storing message offsets
|
120
126
|
* Add missing runtime dependency to rake
|
121
127
|
|
122
|
-
|
128
|
+
## 0.4.1 (2018-10-19)
|
123
129
|
* Bump librdkafka to 0.11.6
|
124
130
|
|
125
|
-
|
131
|
+
## 0.4.0 (2018-09-24)
|
126
132
|
* Improvements in librdkafka archive download
|
127
133
|
* Add global statistics callback
|
128
134
|
* Use Time for timestamps, potentially breaking change if you
|
@@ -134,34 +140,34 @@
|
|
134
140
|
* Support committing a topic partition list
|
135
141
|
* Add consumer assignment method
|
136
142
|
|
137
|
-
|
143
|
+
## 0.3.5 (2018-01-17)
|
138
144
|
* Fix crash when not waiting for delivery handles
|
139
145
|
* Run specs on Ruby 2.5
|
140
146
|
|
141
|
-
|
147
|
+
## 0.3.4 (2017-12-05)
|
142
148
|
* Bump librdkafka to 0.11.3
|
143
149
|
|
144
|
-
|
150
|
+
## 0.3.3 (2017-10-27)
|
145
151
|
* Fix bug that prevent display of `RdkafkaError` message
|
146
152
|
|
147
|
-
|
153
|
+
## 0.3.2 (2017-10-25)
|
148
154
|
* `add_topic` now supports using a partition count
|
149
155
|
* Add way to make errors clearer with an extra message
|
150
156
|
* Show topics in subscribe error message
|
151
157
|
* Show partition and topic in query watermark offsets error message
|
152
158
|
|
153
|
-
|
159
|
+
## 0.3.1 (2017-10-23)
|
154
160
|
* Bump librdkafka to 0.11.1
|
155
161
|
* Officially support ranges in `add_topic` for topic partition list.
|
156
162
|
* Add consumer lag calculator
|
157
163
|
|
158
|
-
|
164
|
+
## 0.3.0 (2017-10-17)
|
159
165
|
* Move both add topic methods to one `add_topic` in `TopicPartitionList`
|
160
166
|
* Add committed offsets to consumer
|
161
167
|
* Add query watermark offset to consumer
|
162
168
|
|
163
|
-
|
169
|
+
## 0.2.0 (2017-10-13)
|
164
170
|
* Some refactoring and add inline documentation
|
165
171
|
|
166
|
-
|
172
|
+
## 0.1.x (2017-09-10)
|
167
173
|
* Initial working version including producing and consuming
|
data/{LICENSE → MIT-LICENSE}
RENAMED
@@ -1,6 +1,7 @@
|
|
1
1
|
The MIT License (MIT)
|
2
2
|
|
3
|
-
Copyright (c) 2017 Thijs Cadier
|
3
|
+
Copyright (c) 2017-2023 Thijs Cadier
|
4
|
+
2023, Maciej Mensfeld
|
4
5
|
|
5
6
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
7
|
of this software and associated documentation files (the "Software"), to deal
|
data/README.md
CHANGED
@@ -10,16 +10,16 @@
|
|
10
10
|
---
|
11
11
|
|
12
12
|
The `rdkafka` gem is a modern Kafka client library for Ruby based on
|
13
|
-
[librdkafka](https://github.com/
|
13
|
+
[librdkafka](https://github.com/confluentinc/librdkafka/).
|
14
14
|
It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
|
15
|
-
gem and targets Kafka 1.0+ and Ruby versions
|
16
|
-
active maintenance. We remove Ruby version from our CI builds
|
15
|
+
gem and targets Kafka 1.0+ and Ruby versions under security or
|
16
|
+
active maintenance. We remove a Ruby version from our CI builds when they
|
17
17
|
become EOL.
|
18
18
|
|
19
19
|
`rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
|
20
20
|
|
21
21
|
The most important pieces of a Kafka client are implemented. We're
|
22
|
-
working towards feature completeness
|
22
|
+
working towards feature completeness. You can track that here:
|
23
23
|
https://github.com/appsignal/rdkafka-ruby/milestone/1
|
24
24
|
|
25
25
|
## Table of content
|
@@ -38,7 +38,7 @@ https://github.com/appsignal/rdkafka-ruby/milestone/1
|
|
38
38
|
## Installation
|
39
39
|
|
40
40
|
This gem downloads and compiles librdkafka when it is installed. If you
|
41
|
-
have any problems installing the gem please open an issue.
|
41
|
+
If you have any problems installing the gem, please open an issue.
|
42
42
|
|
43
43
|
## Usage
|
44
44
|
|
@@ -64,9 +64,9 @@ end
|
|
64
64
|
|
65
65
|
### Producing messages
|
66
66
|
|
67
|
-
Produce a number of messages, put the delivery handles in an array and
|
67
|
+
Produce a number of messages, put the delivery handles in an array, and
|
68
68
|
wait for them before exiting. This way the messages will be batched and
|
69
|
-
sent to Kafka
|
69
|
+
efficiently sent to Kafka.
|
70
70
|
|
71
71
|
```ruby
|
72
72
|
config = {:"bootstrap.servers" => "localhost:9092"}
|
@@ -91,7 +91,7 @@ released until it `#close` is explicitly called, so be sure to call
|
|
91
91
|
|
92
92
|
## Higher level libraries
|
93
93
|
|
94
|
-
Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher
|
94
|
+
Currently, there are two actively developed frameworks based on rdkafka-ruby, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
|
95
95
|
|
96
96
|
### Message processing frameworks
|
97
97
|
|
@@ -104,7 +104,7 @@ Currently, there are two actively developed frameworks based on rdkafka-ruby, th
|
|
104
104
|
|
105
105
|
## Development
|
106
106
|
|
107
|
-
A Docker Compose file is included to run Kafka
|
107
|
+
A Docker Compose file is included to run Kafka. To run
|
108
108
|
that:
|
109
109
|
|
110
110
|
```
|
@@ -122,7 +122,7 @@ DEBUG_PRODUCER=true bundle exec rspec
|
|
122
122
|
DEBUG_CONSUMER=true bundle exec rspec
|
123
123
|
```
|
124
124
|
|
125
|
-
After running the tests you can bring the cluster down to start with a
|
125
|
+
After running the tests, you can bring the cluster down to start with a
|
126
126
|
clean slate:
|
127
127
|
|
128
128
|
```
|
@@ -131,7 +131,7 @@ docker-compose down
|
|
131
131
|
|
132
132
|
## Example
|
133
133
|
|
134
|
-
To see everything working run these in separate tabs:
|
134
|
+
To see everything working, run these in separate tabs:
|
135
135
|
|
136
136
|
```
|
137
137
|
bundle exec rake consume_messages
|
data/ext/README.md
CHANGED
@@ -5,7 +5,7 @@ this gem is installed.
|
|
5
5
|
|
6
6
|
To update the `librdkafka` version follow the following steps:
|
7
7
|
|
8
|
-
* Go to https://github.com/
|
8
|
+
* Go to https://github.com/confluentinc/librdkafka/releases to get the new
|
9
9
|
version number and asset checksum for `tar.gz`.
|
10
10
|
* Change the version in `lib/rdkafka/version.rb`
|
11
11
|
* Change the `sha256` in `lib/rdkafka/version.rb`
|
data/ext/Rakefile
CHANGED
@@ -17,7 +17,7 @@ task :default => :clean do
|
|
17
17
|
end
|
18
18
|
|
19
19
|
recipe.files << {
|
20
|
-
:url => "https://codeload.github.com/
|
20
|
+
:url => "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{Rdkafka::LIBRDKAFKA_VERSION}",
|
21
21
|
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
22
22
|
}
|
23
23
|
recipe.configure_options = ["--host=#{recipe.host}"]
|
@@ -1,28 +1,37 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "ffi"
|
4
|
-
|
5
3
|
module Rdkafka
|
4
|
+
# This class serves as an abstract base class to represent handles within the Rdkafka module.
|
5
|
+
# As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle
|
6
|
+
# classes to inherit from, ensuring they adhere to a particular structure and behavior.
|
7
|
+
#
|
8
|
+
# Subclasses must define their own layout, and the layout must start with:
|
9
|
+
#
|
10
|
+
# layout :pending, :bool,
|
11
|
+
# :response, :int
|
6
12
|
class AbstractHandle < FFI::Struct
|
7
|
-
|
8
|
-
#
|
9
|
-
# layout :pending, :bool,
|
10
|
-
# :response, :int
|
13
|
+
include Helpers::Time
|
11
14
|
|
15
|
+
# Registry for registering all the handles.
|
12
16
|
REGISTRY = {}
|
13
17
|
|
14
|
-
|
15
|
-
|
16
|
-
|
18
|
+
class << self
|
19
|
+
# Adds handle to the register
|
20
|
+
#
|
21
|
+
# @param handle [AbstractHandle] any handle we want to register
|
22
|
+
def register(handle)
|
23
|
+
address = handle.to_ptr.address
|
24
|
+
REGISTRY[address] = handle
|
25
|
+
end
|
17
26
|
|
18
|
-
|
19
|
-
|
20
|
-
|
27
|
+
# Removes handle from the register based on the handle address
|
28
|
+
#
|
29
|
+
# @param address [Integer] address of the registered handle we want to remove
|
30
|
+
def remove(address)
|
31
|
+
REGISTRY.delete(address)
|
32
|
+
end
|
21
33
|
end
|
22
34
|
|
23
|
-
def self.remove(address)
|
24
|
-
REGISTRY.delete(address)
|
25
|
-
end
|
26
35
|
|
27
36
|
# Whether the handle is still pending.
|
28
37
|
#
|
@@ -32,27 +41,31 @@ module Rdkafka
|
|
32
41
|
end
|
33
42
|
|
34
43
|
# Wait for the operation to complete or raise an error if this takes longer than the timeout.
|
35
|
-
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
36
|
-
# In this case it is possible to call wait again.
|
44
|
+
# If there is a timeout this does not mean the operation failed, rdkafka might still be working
|
45
|
+
# on the operation. In this case it is possible to call wait again.
|
37
46
|
#
|
38
|
-
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
39
|
-
#
|
47
|
+
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out.
|
48
|
+
# If this is nil it does not time out.
|
49
|
+
# @param wait_timeout [Numeric] Amount of time we should wait before we recheck if the
|
50
|
+
# operation has completed
|
40
51
|
# @param raise_response_error [Boolean] should we raise error when waiting finishes
|
41
52
|
#
|
53
|
+
# @return [Object] Operation-specific result
|
54
|
+
#
|
42
55
|
# @raise [RdkafkaError] When the operation failed
|
43
56
|
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
44
|
-
#
|
45
|
-
# @return [Object] Operation-specific result
|
46
57
|
def wait(max_wait_timeout: 60, wait_timeout: 0.1, raise_response_error: true)
|
47
58
|
timeout = if max_wait_timeout
|
48
|
-
|
59
|
+
monotonic_now + max_wait_timeout
|
49
60
|
else
|
50
61
|
nil
|
51
62
|
end
|
52
63
|
loop do
|
53
64
|
if pending?
|
54
|
-
if timeout && timeout <=
|
55
|
-
raise WaitTimeoutError.new(
|
65
|
+
if timeout && timeout <= monotonic_now
|
66
|
+
raise WaitTimeoutError.new(
|
67
|
+
"Waiting for #{operation_name} timed out after #{max_wait_timeout} seconds"
|
68
|
+
)
|
56
69
|
end
|
57
70
|
sleep wait_timeout
|
58
71
|
elsif self[:response] != 0 && raise_response_error
|
data/lib/rdkafka/admin.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "objspace"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
class Admin
|
7
5
|
# @private
|
@@ -30,11 +28,12 @@ module Rdkafka
|
|
30
28
|
|
31
29
|
# Create a topic with the given partition count and replication factor
|
32
30
|
#
|
31
|
+
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of
|
32
|
+
# creating the topic
|
33
|
+
#
|
33
34
|
# @raise [ConfigError] When the partition count or replication factor are out of valid range
|
34
35
|
# @raise [RdkafkaError] When the topic name is invalid or the topic already exists
|
35
36
|
# @raise [RdkafkaError] When the topic configuration is invalid
|
36
|
-
#
|
37
|
-
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
|
38
37
|
def create_topic(topic_name, partition_count, replication_factor, topic_config={})
|
39
38
|
closed_admin_check(__method__)
|
40
39
|
|
@@ -107,11 +106,11 @@ module Rdkafka
|
|
107
106
|
create_topic_handle
|
108
107
|
end
|
109
108
|
|
110
|
-
#
|
109
|
+
# Deletes the named topic
|
111
110
|
#
|
111
|
+
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of
|
112
|
+
# deleting the topic
|
112
113
|
# @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
|
113
|
-
#
|
114
|
-
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
|
115
114
|
def delete_topic(topic_name)
|
116
115
|
closed_admin_check(__method__)
|
117
116
|
|
data/lib/rdkafka/bindings.rb
CHANGED
data/lib/rdkafka/config.rb
CHANGED
@@ -1,11 +1,9 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "logger"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
# Configuration for a Kafka consumer or producer. You can create an instance and use
|
7
5
|
# the consumer and producer methods to create a client. Documentation of the available
|
8
|
-
# configuration options is available on https://github.com/
|
6
|
+
# configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
|
9
7
|
class Config
|
10
8
|
# @private
|
11
9
|
@@logger = Logger.new(STDOUT)
|
@@ -53,13 +51,13 @@ module Rdkafka
|
|
53
51
|
|
54
52
|
# Set a callback that will be called every time the underlying client emits statistics.
|
55
53
|
# You can configure if and how often this happens using `statistics.interval.ms`.
|
56
|
-
# The callback is called with a hash that's documented here: https://github.com/
|
54
|
+
# The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
|
57
55
|
#
|
58
56
|
# @param callback [Proc, #call] The callback
|
59
57
|
#
|
60
58
|
# @return [nil]
|
61
59
|
def self.statistics_callback=(callback)
|
62
|
-
raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
|
60
|
+
raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
|
63
61
|
@@statistics_callback = callback
|
64
62
|
end
|
65
63
|
|
@@ -114,6 +112,7 @@ module Rdkafka
|
|
114
112
|
def initialize(config_hash = {})
|
115
113
|
@config_hash = DEFAULT_CONFIG.merge(config_hash)
|
116
114
|
@consumer_rebalance_listener = nil
|
115
|
+
@consumer_poll_set = true
|
117
116
|
end
|
118
117
|
|
119
118
|
# Set a config option.
|
@@ -142,12 +141,28 @@ module Rdkafka
|
|
142
141
|
@consumer_rebalance_listener = listener
|
143
142
|
end
|
144
143
|
|
145
|
-
#
|
144
|
+
# Should we use a single queue for the underlying consumer and events.
|
146
145
|
#
|
147
|
-
#
|
148
|
-
#
|
146
|
+
# This is an advanced API that allows for more granular control of the polling process.
|
147
|
+
# When this value is set to `false` (`true` by defualt), there will be two queues that need to
|
148
|
+
# be polled:
|
149
|
+
# - main librdkafka queue for events
|
150
|
+
# - consumer queue with messages and rebalances
|
151
|
+
#
|
152
|
+
# It is recommended to use the defaults and only set it to `false` in advance multi-threaded
|
153
|
+
# and complex cases where granular events handling control is needed.
|
154
|
+
#
|
155
|
+
# @param poll_set [Boolean]
|
156
|
+
def consumer_poll_set=(poll_set)
|
157
|
+
@consumer_poll_set = poll_set
|
158
|
+
end
|
159
|
+
|
160
|
+
# Creates a consumer with this configuration.
|
149
161
|
#
|
150
162
|
# @return [Consumer] The created consumer
|
163
|
+
#
|
164
|
+
# @raise [ConfigError] When the configuration contains invalid options
|
165
|
+
# @raise [ClientCreationError] When the native client cannot be created
|
151
166
|
def consumer
|
152
167
|
opaque = Opaque.new
|
153
168
|
config = native_config(opaque)
|
@@ -160,8 +175,8 @@ module Rdkafka
|
|
160
175
|
# Create native client
|
161
176
|
kafka = native_kafka(config, :rd_kafka_consumer)
|
162
177
|
|
163
|
-
# Redirect the main queue to the consumer
|
164
|
-
Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
|
178
|
+
# Redirect the main queue to the consumer queue
|
179
|
+
Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
|
165
180
|
|
166
181
|
# Return consumer with Kafka client
|
167
182
|
Rdkafka::Consumer.new(
|
@@ -175,10 +190,10 @@ module Rdkafka
|
|
175
190
|
|
176
191
|
# Create a producer with this configuration.
|
177
192
|
#
|
193
|
+
# @return [Producer] The created producer
|
194
|
+
#
|
178
195
|
# @raise [ConfigError] When the configuration contains invalid options
|
179
196
|
# @raise [ClientCreationError] When the native client cannot be created
|
180
|
-
#
|
181
|
-
# @return [Producer] The created producer
|
182
197
|
def producer
|
183
198
|
# Create opaque
|
184
199
|
opaque = Opaque.new
|
@@ -200,12 +215,12 @@ module Rdkafka
|
|
200
215
|
end
|
201
216
|
end
|
202
217
|
|
203
|
-
#
|
218
|
+
# Creates an admin instance with this configuration.
|
219
|
+
#
|
220
|
+
# @return [Admin] The created admin instance
|
204
221
|
#
|
205
222
|
# @raise [ConfigError] When the configuration contains invalid options
|
206
223
|
# @raise [ClientCreationError] When the native client cannot be created
|
207
|
-
#
|
208
|
-
# @return [Admin] The created admin instance
|
209
224
|
def admin
|
210
225
|
opaque = Opaque.new
|
211
226
|
config = native_config(opaque)
|
@@ -18,13 +18,11 @@ module Rdkafka
|
|
18
18
|
|
19
19
|
# Reads a librdkafka native message's headers and returns them as a Ruby Hash
|
20
20
|
#
|
21
|
-
# @
|
21
|
+
# @private
|
22
22
|
#
|
23
|
+
# @param [librdkakfa message] native_message
|
23
24
|
# @return [Hash<String, String>] headers Hash for the native_message
|
24
|
-
#
|
25
25
|
# @raise [Rdkafka::RdkafkaError] when fail to read headers
|
26
|
-
#
|
27
|
-
# @private
|
28
26
|
def self.from_native(native_message)
|
29
27
|
headers_ptrptr = FFI::MemoryPointer.new(:pointer)
|
30
28
|
err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
|
data/lib/rdkafka/consumer.rb
CHANGED
@@ -12,6 +12,7 @@ module Rdkafka
|
|
12
12
|
# `each_slice` to consume batches of messages.
|
13
13
|
class Consumer
|
14
14
|
include Enumerable
|
15
|
+
include Helpers::Time
|
15
16
|
|
16
17
|
# @private
|
17
18
|
def initialize(native_kafka)
|
@@ -54,13 +55,11 @@ module Rdkafka
|
|
54
55
|
@native_kafka.closed?
|
55
56
|
end
|
56
57
|
|
57
|
-
#
|
58
|
+
# Subscribes to one or more topics letting Kafka handle partition assignments.
|
58
59
|
#
|
59
60
|
# @param topics [Array<String>] One or more topic names
|
60
|
-
#
|
61
|
-
# @raise [RdkafkaError] When subscribing fails
|
62
|
-
#
|
63
61
|
# @return [nil]
|
62
|
+
# @raise [RdkafkaError] When subscribing fails
|
64
63
|
def subscribe(*topics)
|
65
64
|
closed_consumer_check(__method__)
|
66
65
|
|
@@ -83,9 +82,8 @@ module Rdkafka
|
|
83
82
|
|
84
83
|
# Unsubscribe from all subscribed topics.
|
85
84
|
#
|
86
|
-
# @raise [RdkafkaError] When unsubscribing fails
|
87
|
-
#
|
88
85
|
# @return [nil]
|
86
|
+
# @raise [RdkafkaError] When unsubscribing fails
|
89
87
|
def unsubscribe
|
90
88
|
closed_consumer_check(__method__)
|
91
89
|
|
@@ -101,10 +99,8 @@ module Rdkafka
|
|
101
99
|
# Pause producing or consumption for the provided list of partitions
|
102
100
|
#
|
103
101
|
# @param list [TopicPartitionList] The topic with partitions to pause
|
104
|
-
#
|
105
|
-
# @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
|
106
|
-
#
|
107
102
|
# @return [nil]
|
103
|
+
# @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
|
108
104
|
def pause(list)
|
109
105
|
closed_consumer_check(__method__)
|
110
106
|
|
@@ -128,13 +124,11 @@ module Rdkafka
|
|
128
124
|
end
|
129
125
|
end
|
130
126
|
|
131
|
-
#
|
127
|
+
# Resumes producing consumption for the provided list of partitions
|
132
128
|
#
|
133
129
|
# @param list [TopicPartitionList] The topic with partitions to pause
|
134
|
-
#
|
135
|
-
# @raise [RdkafkaError] When resume subscription fails.
|
136
|
-
#
|
137
130
|
# @return [nil]
|
131
|
+
# @raise [RdkafkaError] When resume subscription fails.
|
138
132
|
def resume(list)
|
139
133
|
closed_consumer_check(__method__)
|
140
134
|
|
@@ -157,11 +151,10 @@ module Rdkafka
|
|
157
151
|
end
|
158
152
|
end
|
159
153
|
|
160
|
-
#
|
161
|
-
#
|
162
|
-
# @raise [RdkafkaError] When getting the subscription fails.
|
154
|
+
# Returns the current subscription to topics and partitions
|
163
155
|
#
|
164
156
|
# @return [TopicPartitionList]
|
157
|
+
# @raise [RdkafkaError] When getting the subscription fails.
|
165
158
|
def subscription
|
166
159
|
closed_consumer_check(__method__)
|
167
160
|
|
@@ -184,7 +177,6 @@ module Rdkafka
|
|
184
177
|
# Atomic assignment of partitions to consume
|
185
178
|
#
|
186
179
|
# @param list [TopicPartitionList] The topic with partitions to assign
|
187
|
-
#
|
188
180
|
# @raise [RdkafkaError] When assigning fails
|
189
181
|
def assign(list)
|
190
182
|
closed_consumer_check(__method__)
|
@@ -208,9 +200,8 @@ module Rdkafka
|
|
208
200
|
|
209
201
|
# Returns the current partition assignment.
|
210
202
|
#
|
211
|
-
# @raise [RdkafkaError] When getting the assignment fails.
|
212
|
-
#
|
213
203
|
# @return [TopicPartitionList]
|
204
|
+
# @raise [RdkafkaError] When getting the assignment fails.
|
214
205
|
def assignment
|
215
206
|
closed_consumer_check(__method__)
|
216
207
|
|
@@ -244,14 +235,14 @@ module Rdkafka
|
|
244
235
|
end
|
245
236
|
|
246
237
|
# Return the current committed offset per partition for this consumer group.
|
247
|
-
# The offset field of each requested partition will either be set to stored offset or to -1001
|
238
|
+
# The offset field of each requested partition will either be set to stored offset or to -1001
|
239
|
+
# in case there was no stored offset for that partition.
|
248
240
|
#
|
249
|
-
# @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
|
241
|
+
# @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
|
242
|
+
# to use the current subscription.
|
250
243
|
# @param timeout_ms [Integer] The timeout for fetching this information.
|
251
|
-
#
|
252
|
-
# @raise [RdkafkaError] When getting the committed positions fails.
|
253
|
-
#
|
254
244
|
# @return [TopicPartitionList]
|
245
|
+
# @raise [RdkafkaError] When getting the committed positions fails.
|
255
246
|
def committed(list=nil, timeout_ms=1200)
|
256
247
|
closed_consumer_check(__method__)
|
257
248
|
|
@@ -307,10 +298,8 @@ module Rdkafka
|
|
307
298
|
# @param topic [String] The topic to query
|
308
299
|
# @param partition [Integer] The partition to query
|
309
300
|
# @param timeout_ms [Integer] The timeout for querying the broker
|
310
|
-
#
|
311
|
-
# @raise [RdkafkaError] When querying the broker fails.
|
312
|
-
#
|
313
301
|
# @return [Integer] The low and high watermark
|
302
|
+
# @raise [RdkafkaError] When querying the broker fails.
|
314
303
|
def query_watermark_offsets(topic, partition, timeout_ms=200)
|
315
304
|
closed_consumer_check(__method__)
|
316
305
|
|
@@ -343,10 +332,9 @@ module Rdkafka
|
|
343
332
|
#
|
344
333
|
# @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
|
345
334
|
# @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
|
346
|
-
#
|
335
|
+
# @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
|
336
|
+
# per partition
|
347
337
|
# @raise [RdkafkaError] When querying the broker fails.
|
348
|
-
#
|
349
|
-
# @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag per partition
|
350
338
|
def lag(topic_partition_list, watermark_timeout_ms=100)
|
351
339
|
out = {}
|
352
340
|
|
@@ -395,10 +383,8 @@ module Rdkafka
|
|
395
383
|
# When using this `enable.auto.offset.store` should be set to `false` in the config.
|
396
384
|
#
|
397
385
|
# @param message [Rdkafka::Consumer::Message] The message which offset will be stored
|
398
|
-
#
|
399
|
-
# @raise [RdkafkaError] When storing the offset fails
|
400
|
-
#
|
401
386
|
# @return [nil]
|
387
|
+
# @raise [RdkafkaError] When storing the offset fails
|
402
388
|
def store_offset(message)
|
403
389
|
closed_consumer_check(__method__)
|
404
390
|
|
@@ -430,10 +416,8 @@ module Rdkafka
|
|
430
416
|
# message at the given offset.
|
431
417
|
#
|
432
418
|
# @param message [Rdkafka::Consumer::Message] The message to which to seek
|
433
|
-
#
|
434
|
-
# @raise [RdkafkaError] When seeking fails
|
435
|
-
#
|
436
419
|
# @return [nil]
|
420
|
+
# @raise [RdkafkaError] When seeking fails
|
437
421
|
def seek(message)
|
438
422
|
closed_consumer_check(__method__)
|
439
423
|
|
@@ -503,10 +487,8 @@ module Rdkafka
|
|
503
487
|
#
|
504
488
|
# @param list [TopicPartitionList,nil] The topic with partitions to commit
|
505
489
|
# @param async [Boolean] Whether to commit async or wait for the commit to finish
|
506
|
-
#
|
507
|
-
# @raise [RdkafkaError] When committing fails
|
508
|
-
#
|
509
490
|
# @return [nil]
|
491
|
+
# @raise [RdkafkaError] When committing fails
|
510
492
|
def commit(list=nil, async=false)
|
511
493
|
closed_consumer_check(__method__)
|
512
494
|
|
@@ -532,10 +514,8 @@ module Rdkafka
|
|
532
514
|
# Poll for the next message on one of the subscribed topics
|
533
515
|
#
|
534
516
|
# @param timeout_ms [Integer] Timeout of this poll
|
535
|
-
#
|
536
|
-
# @raise [RdkafkaError] When polling fails
|
537
|
-
#
|
538
517
|
# @return [Message, nil] A message or nil if there was no new message within the timeout
|
518
|
+
# @raise [RdkafkaError] When polling fails
|
539
519
|
def poll(timeout_ms)
|
540
520
|
closed_consumer_check(__method__)
|
541
521
|
|
@@ -561,17 +541,40 @@ module Rdkafka
|
|
561
541
|
end
|
562
542
|
end
|
563
543
|
|
544
|
+
# Polls the main rdkafka queue (not the consumer one). Do **NOT** use it if `consumer_poll_set`
|
545
|
+
# was set to `true`.
|
546
|
+
#
|
547
|
+
# Events will cause application-provided callbacks to be called.
|
548
|
+
#
|
549
|
+
# Events (in the context of the consumer):
|
550
|
+
# - error callbacks
|
551
|
+
# - stats callbacks
|
552
|
+
# - any other callbacks supported by librdkafka that are not part of the consumer_poll, that
|
553
|
+
# would have a callback configured and activated.
|
554
|
+
#
|
555
|
+
# This method needs to be called at regular intervals to serve any queued callbacks waiting to
|
556
|
+
# be called. When in use, does **NOT** replace `#poll` but needs to run complementary with it.
|
557
|
+
#
|
558
|
+
# @param timeout_ms [Integer] poll timeout. If set to 0 will run async, when set to -1 will
|
559
|
+
# block until any events available.
|
560
|
+
#
|
561
|
+
# @note This method technically should be called `#poll` and the current `#poll` should be
|
562
|
+
# called `#consumer_poll` though we keep the current naming convention to make it backward
|
563
|
+
# compatible.
|
564
|
+
def events_poll(timeout_ms = 0)
|
565
|
+
@native_kafka.with_inner do |inner|
|
566
|
+
Rdkafka::Bindings.rd_kafka_poll(inner, timeout_ms)
|
567
|
+
end
|
568
|
+
end
|
569
|
+
|
564
570
|
# Poll for new messages and yield for each received one. Iteration
|
565
571
|
# will end when the consumer is closed.
|
566
572
|
#
|
567
|
-
# If `enable.partition.eof` is turned on in the config this will raise an
|
568
|
-
#
|
569
|
-
# using this method of iteration.
|
573
|
+
# If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
|
574
|
+
# reached, so you probably want to disable that when using this method of iteration.
|
570
575
|
#
|
571
576
|
# @raise [RdkafkaError] When polling fails
|
572
|
-
#
|
573
577
|
# @yieldparam message [Message] Received message
|
574
|
-
#
|
575
578
|
# @return [nil]
|
576
579
|
def each
|
577
580
|
loop do
|
@@ -624,9 +627,7 @@ module Rdkafka
|
|
624
627
|
# that you may or may not see again.
|
625
628
|
#
|
626
629
|
# @param max_items [Integer] Maximum size of the yielded array of messages
|
627
|
-
#
|
628
630
|
# @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
|
629
|
-
#
|
630
631
|
# @param timeout_ms [Integer] max time to wait for up to max_items
|
631
632
|
#
|
632
633
|
# @raise [RdkafkaError] When polling fails
|
@@ -673,10 +674,6 @@ module Rdkafka
|
|
673
674
|
end
|
674
675
|
|
675
676
|
private
|
676
|
-
def monotonic_now
|
677
|
-
# needed because Time.now can go backwards
|
678
|
-
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
679
|
-
end
|
680
677
|
|
681
678
|
def closed_consumer_check(method)
|
682
679
|
raise Rdkafka::ClosedConsumerError.new(method) if closed?
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Rdkafka
|
4
|
+
# Namespace for some small utilities used in multiple components
|
5
|
+
module Helpers
|
6
|
+
# Time related methods used across Karafka
|
7
|
+
module Time
|
8
|
+
# @return [Float] current monotonic time in seconds with microsecond precision
|
9
|
+
def monotonic_now
|
10
|
+
::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "objspace"
|
4
|
-
|
5
3
|
module Rdkafka
|
6
4
|
# A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
|
7
5
|
class Producer
|
6
|
+
include Helpers::Time
|
7
|
+
|
8
8
|
# Cache partitions count for 30 seconds
|
9
9
|
PARTITIONS_COUNT_TTL = 30
|
10
10
|
|
@@ -167,18 +167,16 @@ module Rdkafka
|
|
167
167
|
end
|
168
168
|
|
169
169
|
# Partition count for a given topic.
|
170
|
-
# NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
|
171
170
|
#
|
172
171
|
# @param topic [String] The topic name.
|
172
|
+
# @return [Integer] partition count for a given topic
|
173
173
|
#
|
174
|
-
# @
|
175
|
-
#
|
176
|
-
# We cache the partition count for a given topic for given time
|
177
|
-
# This prevents us in case someone uses `partition_key` from querying for the count with
|
178
|
-
# each message. Instead we query once every 30 seconds at most
|
174
|
+
# @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
|
175
|
+
# auto-created after returning nil.
|
179
176
|
#
|
180
|
-
# @
|
181
|
-
#
|
177
|
+
# @note We cache the partition count for a given topic for given time.
|
178
|
+
# This prevents us in case someone uses `partition_key` from querying for the count with
|
179
|
+
# each message. Instead we query once every 30 seconds at most
|
182
180
|
def partition_count(topic)
|
183
181
|
closed_producer_check(__method__)
|
184
182
|
|
@@ -308,11 +306,6 @@ module Rdkafka
|
|
308
306
|
|
309
307
|
private
|
310
308
|
|
311
|
-
def monotonic_now
|
312
|
-
# needed because Time.now can go backwards
|
313
|
-
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
314
|
-
end
|
315
|
-
|
316
309
|
def closed_producer_check(method)
|
317
310
|
raise Rdkafka::ClosedProducerError.new(method) if closed?
|
318
311
|
end
|
data/lib/rdkafka/version.rb
CHANGED
data/lib/rdkafka.rb
CHANGED
@@ -1,7 +1,12 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "
|
3
|
+
require "logger"
|
4
|
+
require "objspace"
|
5
|
+
require "ffi"
|
6
|
+
require "json"
|
4
7
|
|
8
|
+
require "rdkafka/version"
|
9
|
+
require "rdkafka/helpers/time"
|
5
10
|
require "rdkafka/abstract_handle"
|
6
11
|
require "rdkafka/admin"
|
7
12
|
require "rdkafka/admin/create_topic_handle"
|
@@ -24,3 +29,7 @@ require "rdkafka/native_kafka"
|
|
24
29
|
require "rdkafka/producer"
|
25
30
|
require "rdkafka/producer/delivery_handle"
|
26
31
|
require "rdkafka/producer/delivery_report"
|
32
|
+
|
33
|
+
# Main Rdkafka namespace of this gem
|
34
|
+
module Rdkafka
|
35
|
+
end
|
data/spec/rdkafka/admin_spec.rb
CHANGED
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "spec_helper"
|
4
|
-
|
5
3
|
describe Rdkafka::Config do
|
6
4
|
context "logger" do
|
7
5
|
it "should have a default logger" do
|
@@ -115,6 +113,14 @@ describe Rdkafka::Config do
|
|
115
113
|
consumer.close
|
116
114
|
end
|
117
115
|
|
116
|
+
it "should create a consumer with consumer_poll_set set to false" do
|
117
|
+
config = rdkafka_consumer_config
|
118
|
+
config.consumer_poll_set = false
|
119
|
+
consumer = config.consumer
|
120
|
+
expect(consumer).to be_a Rdkafka::Consumer
|
121
|
+
consumer.close
|
122
|
+
end
|
123
|
+
|
118
124
|
it "should raise an error when creating a consumer with invalid config" do
|
119
125
|
config = Rdkafka::Config.new('invalid.key' => 'value')
|
120
126
|
expect {
|
@@ -1,6 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "spec_helper"
|
4
3
|
require "ostruct"
|
5
4
|
require 'securerandom'
|
6
5
|
|
@@ -55,6 +54,30 @@ describe Rdkafka::Consumer do
|
|
55
54
|
consumer.subscription
|
56
55
|
}.to raise_error(Rdkafka::RdkafkaError)
|
57
56
|
end
|
57
|
+
|
58
|
+
context "when using consumer without the poll set" do
|
59
|
+
let(:consumer) do
|
60
|
+
config = rdkafka_consumer_config
|
61
|
+
config.consumer_poll_set = false
|
62
|
+
config.consumer
|
63
|
+
end
|
64
|
+
|
65
|
+
it "should subscribe, unsubscribe and return the subscription" do
|
66
|
+
expect(consumer.subscription).to be_empty
|
67
|
+
|
68
|
+
consumer.subscribe("consume_test_topic")
|
69
|
+
|
70
|
+
expect(consumer.subscription).not_to be_empty
|
71
|
+
expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
72
|
+
list.add_topic("consume_test_topic")
|
73
|
+
end
|
74
|
+
expect(consumer.subscription).to eq expected_subscription
|
75
|
+
|
76
|
+
consumer.unsubscribe
|
77
|
+
|
78
|
+
expect(consumer.subscription).to be_empty
|
79
|
+
end
|
80
|
+
end
|
58
81
|
end
|
59
82
|
|
60
83
|
describe "#pause and #resume" do
|
@@ -1077,6 +1100,29 @@ describe Rdkafka::Consumer do
|
|
1077
1100
|
end
|
1078
1101
|
end
|
1079
1102
|
|
1103
|
+
# Only relevant in case of a consumer with separate queues
|
1104
|
+
describe '#events_poll' do
|
1105
|
+
let(:stats) { [] }
|
1106
|
+
|
1107
|
+
before { Rdkafka::Config.statistics_callback = ->(published) { stats << published } }
|
1108
|
+
|
1109
|
+
after { Rdkafka::Config.statistics_callback = nil }
|
1110
|
+
|
1111
|
+
let(:consumer) do
|
1112
|
+
config = rdkafka_consumer_config('statistics.interval.ms': 100)
|
1113
|
+
config.consumer_poll_set = false
|
1114
|
+
config.consumer
|
1115
|
+
end
|
1116
|
+
|
1117
|
+
it "expect to run events_poll, operate and propagate stats on events_poll and not poll" do
|
1118
|
+
consumer.subscribe("consume_test_topic")
|
1119
|
+
consumer.poll(1_000)
|
1120
|
+
expect(stats).to be_empty
|
1121
|
+
consumer.events_poll(-1)
|
1122
|
+
expect(stats).not_to be_empty
|
1123
|
+
end
|
1124
|
+
end
|
1125
|
+
|
1080
1126
|
describe "a rebalance listener" do
|
1081
1127
|
let(:consumer) do
|
1082
1128
|
config = rdkafka_consumer_config
|
data/spec/rdkafka/error_spec.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.13.
|
4
|
+
version: 0.13.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2023-
|
38
|
+
date: 2023-11-07 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: ffi
|
@@ -174,11 +174,13 @@ files:
|
|
174
174
|
- ".github/workflows/ci.yml"
|
175
175
|
- ".gitignore"
|
176
176
|
- ".rspec"
|
177
|
+
- ".ruby-gemset"
|
178
|
+
- ".ruby-version"
|
177
179
|
- ".yardopts"
|
178
180
|
- CHANGELOG.md
|
179
181
|
- Gemfile
|
180
182
|
- Guardfile
|
181
|
-
- LICENSE
|
183
|
+
- MIT-LICENSE
|
182
184
|
- README.md
|
183
185
|
- Rakefile
|
184
186
|
- certs/cert_chain.pem
|
@@ -204,6 +206,7 @@ files:
|
|
204
206
|
- lib/rdkafka/consumer/partition.rb
|
205
207
|
- lib/rdkafka/consumer/topic_partition_list.rb
|
206
208
|
- lib/rdkafka/error.rb
|
209
|
+
- lib/rdkafka/helpers/time.rb
|
207
210
|
- lib/rdkafka/metadata.rb
|
208
211
|
- lib/rdkafka/native_kafka.rb
|
209
212
|
- lib/rdkafka/producer.rb
|
@@ -258,7 +261,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
258
261
|
- !ruby/object:Gem::Version
|
259
262
|
version: '0'
|
260
263
|
requirements: []
|
261
|
-
rubygems_version: 3.
|
264
|
+
rubygems_version: 3.4.19
|
262
265
|
signing_key:
|
263
266
|
specification_version: 4
|
264
267
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
metadata.gz.sig
CHANGED
Binary file
|