ruby-kafka 0.7.6.beta1 → 0.7.6.beta2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +2 -0
- data/README.md +20 -0
- data/lib/kafka.rb +4 -0
- data/lib/kafka/async_producer.rb +23 -7
- data/lib/kafka/client.rb +8 -2
- data/lib/kafka/protocol/sasl_handshake_request.rb +1 -1
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl_authenticator.rb +9 -2
- data/lib/kafka/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b14f3fd396d495fc5c240cd5451b7806154b0fda6dbae72764cfa8087a4b778d
|
4
|
+
data.tar.gz: e283a412d4bcdfd7b6ac8f8c0ba7d6ef9b1bbc94163796ca23cbe6d884710076
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1bbe81c129b203d3a4f7f64853cdd21ab99b602844fc426726a74878b40bf9266a9ed14a77b05bab5e500118aa35ea8a0e39bdbf466018d746e744580a525c66
|
7
|
+
data.tar.gz: 359e818723d15663dc6d1de2b83b70aad4f27bd293b0bb95426be16a440f9c9c48cd58d53a754d8e62d541b8043ef8109ab7549173e2232732ebe94673acf301
|
data/CHANGELOG.md
CHANGED
@@ -8,6 +8,8 @@ Changes and additions to the library will be listed here.
|
|
8
8
|
- Introduce regex matching in `Consumer#subscribe` (#700)
|
9
9
|
- Only rejoin group on error if we're not in shutdown mode (#711)
|
10
10
|
- Use `maxTimestamp` for `logAppendTime` timestamps (#706)
|
11
|
+
- Async producer limit number of retries (#708)
|
12
|
+
- Support SASL OAuthBearer Authentication (#710)
|
11
13
|
|
12
14
|
## 0.7.5
|
13
15
|
- Distribute partitions across consumer groups when there are few partitions per topic (#681)
|
data/README.md
CHANGED
@@ -988,6 +988,26 @@ kafka = Kafka.new(
|
|
988
988
|
)
|
989
989
|
```
|
990
990
|
|
991
|
+
##### OAUTHBEARER
|
992
|
+
This mechanism is supported in kafka >= 2.0.0 as of [KIP-255](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=75968876)
|
993
|
+
|
994
|
+
In order to authenticate using OAUTHBEARER, you must set the client with an instance of a class that implements a `token` method (the interface is described in [Kafka::Sasl::OAuth](lib/kafka/sasl/oauth.rb)) which returns an ID/Access token.
|
995
|
+
|
996
|
+
Optionally, the client may implement an `extensions` method that returns a map of key-value pairs. These can be sent with the SASL/OAUTHBEARER initial client response. This is only supported in kafka >= 2.1.0.
|
997
|
+
|
998
|
+
```ruby
|
999
|
+
class TokenProvider
|
1000
|
+
def token
|
1001
|
+
"some_id_token"
|
1002
|
+
end
|
1003
|
+
end
|
1004
|
+
# ...
|
1005
|
+
client = Kafka.new(
|
1006
|
+
["kafka1:9092"],
|
1007
|
+
sasl_oauth_token_provider: TokenProvider.new
|
1008
|
+
)
|
1009
|
+
```
|
1010
|
+
|
991
1011
|
### Topic management
|
992
1012
|
|
993
1013
|
In addition to producing and consuming messages, ruby-kafka supports managing Kafka topics and their configurations. See [the Kafka documentation](https://kafka.apache.org/documentation/#topicconfigs) for a full list of topic configuration keys.
|
data/lib/kafka.rb
CHANGED
@@ -351,6 +351,10 @@ module Kafka
|
|
351
351
|
class FailedScramAuthentication < SaslScramError
|
352
352
|
end
|
353
353
|
|
354
|
+
# The Token Provider object used for SASL OAuthBearer does not implement the method `token`
|
355
|
+
class TokenMethodNotImplementedError < Error
|
356
|
+
end
|
357
|
+
|
354
358
|
# Initializes a new Kafka client.
|
355
359
|
#
|
356
360
|
# @see Client#initialize
|
data/lib/kafka/async_producer.rb
CHANGED
@@ -72,7 +72,7 @@ module Kafka
|
|
72
72
|
# @param delivery_interval [Integer] if greater than zero, the number of
|
73
73
|
# seconds between automatic message deliveries.
|
74
74
|
#
|
75
|
-
def initialize(sync_producer:, max_queue_size: 1000, delivery_threshold: 0, delivery_interval: 0, instrumenter:, logger:)
|
75
|
+
def initialize(sync_producer:, max_queue_size: 1000, delivery_threshold: 0, delivery_interval: 0, max_retries: -1, retry_backoff: 0, instrumenter:, logger:)
|
76
76
|
raise ArgumentError unless max_queue_size > 0
|
77
77
|
raise ArgumentError unless delivery_threshold >= 0
|
78
78
|
raise ArgumentError unless delivery_interval >= 0
|
@@ -86,8 +86,10 @@ module Kafka
|
|
86
86
|
queue: @queue,
|
87
87
|
producer: sync_producer,
|
88
88
|
delivery_threshold: delivery_threshold,
|
89
|
+
max_retries: max_retries,
|
90
|
+
retry_backoff: retry_backoff,
|
89
91
|
instrumenter: instrumenter,
|
90
|
-
logger: logger
|
92
|
+
logger: logger
|
91
93
|
)
|
92
94
|
|
93
95
|
# The timer will no-op if the delivery interval is zero.
|
@@ -184,10 +186,12 @@ module Kafka
|
|
184
186
|
end
|
185
187
|
|
186
188
|
class Worker
|
187
|
-
def initialize(queue:, producer:, delivery_threshold:, instrumenter:, logger:)
|
189
|
+
def initialize(queue:, producer:, delivery_threshold:, max_retries: -1, retry_backoff: 0, instrumenter:, logger:)
|
188
190
|
@queue = queue
|
189
191
|
@producer = producer
|
190
192
|
@delivery_threshold = delivery_threshold
|
193
|
+
@max_retries = max_retries
|
194
|
+
@retry_backoff = retry_backoff
|
191
195
|
@instrumenter = instrumenter
|
192
196
|
@logger = TaggedLogger.new(logger)
|
193
197
|
end
|
@@ -240,10 +244,22 @@ module Kafka
|
|
240
244
|
private
|
241
245
|
|
242
246
|
def produce(*args)
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
+
retries = 0
|
248
|
+
begin
|
249
|
+
@producer.produce(*args)
|
250
|
+
rescue BufferOverflow => e
|
251
|
+
deliver_messages
|
252
|
+
if @max_retries == -1
|
253
|
+
retry
|
254
|
+
elsif retries < @max_retries
|
255
|
+
retries += 1
|
256
|
+
sleep @retry_backoff**retries
|
257
|
+
retry
|
258
|
+
else
|
259
|
+
@logger.error("Failed to asynchronously produce messages due to BufferOverflow")
|
260
|
+
@instrumenter.instrument("error.async_producer", { error: e })
|
261
|
+
end
|
262
|
+
end
|
247
263
|
end
|
248
264
|
|
249
265
|
def deliver_messages
|
data/lib/kafka/client.rb
CHANGED
@@ -62,13 +62,16 @@ module Kafka
|
|
62
62
|
#
|
63
63
|
# @param sasl_over_ssl [Boolean] whether to enforce SSL with SASL
|
64
64
|
#
|
65
|
+
# @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
|
66
|
+
# implements method token. See {Sasl::OAuth#initialize}
|
67
|
+
#
|
65
68
|
# @return [Client]
|
66
69
|
def initialize(seed_brokers:, client_id: "ruby-kafka", logger: nil, connect_timeout: nil, socket_timeout: nil,
|
67
70
|
ssl_ca_cert_file_path: nil, ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
|
68
71
|
ssl_client_cert_key_password: nil, ssl_client_cert_chain: nil, sasl_gssapi_principal: nil,
|
69
72
|
sasl_gssapi_keytab: nil, sasl_plain_authzid: '', sasl_plain_username: nil, sasl_plain_password: nil,
|
70
73
|
sasl_scram_username: nil, sasl_scram_password: nil, sasl_scram_mechanism: nil,
|
71
|
-
sasl_over_ssl: true, ssl_ca_certs_from_system: false)
|
74
|
+
sasl_over_ssl: true, ssl_ca_certs_from_system: false, sasl_oauth_token_provider: nil)
|
72
75
|
@logger = TaggedLogger.new(logger)
|
73
76
|
@instrumenter = Instrumenter.new(client_id: client_id)
|
74
77
|
@seed_brokers = normalize_seed_brokers(seed_brokers)
|
@@ -92,6 +95,7 @@ module Kafka
|
|
92
95
|
sasl_scram_username: sasl_scram_username,
|
93
96
|
sasl_scram_password: sasl_scram_password,
|
94
97
|
sasl_scram_mechanism: sasl_scram_mechanism,
|
98
|
+
sasl_oauth_token_provider: sasl_oauth_token_provider,
|
95
99
|
logger: @logger
|
96
100
|
)
|
97
101
|
|
@@ -296,7 +300,7 @@ module Kafka
|
|
296
300
|
#
|
297
301
|
# @see AsyncProducer
|
298
302
|
# @return [AsyncProducer]
|
299
|
-
def async_producer(delivery_interval: 0, delivery_threshold: 0, max_queue_size: 1000, **options)
|
303
|
+
def async_producer(delivery_interval: 0, delivery_threshold: 0, max_queue_size: 1000, max_retries: -1, retry_backoff: 0, **options)
|
300
304
|
sync_producer = producer(**options)
|
301
305
|
|
302
306
|
AsyncProducer.new(
|
@@ -304,6 +308,8 @@ module Kafka
|
|
304
308
|
delivery_interval: delivery_interval,
|
305
309
|
delivery_threshold: delivery_threshold,
|
306
310
|
max_queue_size: max_queue_size,
|
311
|
+
max_retries: max_retries,
|
312
|
+
retry_backoff: retry_backoff,
|
307
313
|
instrumenter: @instrumenter,
|
308
314
|
logger: @logger,
|
309
315
|
)
|
@@ -8,7 +8,7 @@ module Kafka
|
|
8
8
|
|
9
9
|
class SaslHandshakeRequest
|
10
10
|
|
11
|
-
SUPPORTED_MECHANISMS = %w(GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512)
|
11
|
+
SUPPORTED_MECHANISMS = %w(GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512 OAUTHBEARER)
|
12
12
|
|
13
13
|
def initialize(mechanism)
|
14
14
|
unless SUPPORTED_MECHANISMS.include?(mechanism)
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class OAuth
|
6
|
+
OAUTH_IDENT = "OAUTHBEARER"
|
7
|
+
|
8
|
+
# token_provider: THE FOLLOWING INTERFACE MUST BE FULFILLED:
|
9
|
+
#
|
10
|
+
# [REQUIRED] TokenProvider#token - Returns an ID/Access Token to be sent to the Kafka client.
|
11
|
+
# The implementation should ensure token reuse so that multiple calls at connect time do not
|
12
|
+
# create multiple tokens. The implementation should also periodically refresh the token in
|
13
|
+
# order to guarantee that each call returns an unexpired token. A timeout error should
|
14
|
+
# be returned after a short period of inactivity so that the broker can log debugging
|
15
|
+
# info and retry.
|
16
|
+
#
|
17
|
+
# [OPTIONAL] TokenProvider#extensions - Returns a map of key-value pairs that can be sent with the
|
18
|
+
# SASL/OAUTHBEARER initial client response. If not provided, the values are ignored. This feature
|
19
|
+
# is only available in Kafka >= 2.1.0.
|
20
|
+
#
|
21
|
+
def initialize(logger:, token_provider:)
|
22
|
+
@logger = TaggedLogger.new(logger)
|
23
|
+
@token_provider = token_provider
|
24
|
+
end
|
25
|
+
|
26
|
+
def ident
|
27
|
+
OAUTH_IDENT
|
28
|
+
end
|
29
|
+
|
30
|
+
def configured?
|
31
|
+
@token_provider
|
32
|
+
end
|
33
|
+
|
34
|
+
def authenticate!(host, encoder, decoder)
|
35
|
+
# Send SASLOauthBearerClientResponse with token
|
36
|
+
@logger.debug "Authenticating to #{host} with SASL #{OAUTH_IDENT}"
|
37
|
+
|
38
|
+
encoder.write_bytes(initial_client_response)
|
39
|
+
|
40
|
+
begin
|
41
|
+
# receive SASL OAuthBearer Server Response
|
42
|
+
msg = decoder.bytes
|
43
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: unknown error" unless msg
|
44
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
45
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: #{e.message}"
|
46
|
+
end
|
47
|
+
|
48
|
+
@logger.debug "SASL #{OAUTH_IDENT} authentication successful."
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def initial_client_response
|
54
|
+
raise Kafka::TokenMethodNotImplementedError, "Token provider doesn't define 'token'" unless @token_provider.respond_to? :token
|
55
|
+
"n,,\x01auth=Bearer #{@token_provider.token}#{token_extensions}\x01\x01"
|
56
|
+
end
|
57
|
+
|
58
|
+
def token_extensions
|
59
|
+
return nil unless @token_provider.respond_to? :extensions
|
60
|
+
"\x01#{@token_provider.extensions.map {|e| e.join("=")}.join("\x01")}"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
@@ -3,12 +3,14 @@
|
|
3
3
|
require 'kafka/sasl/plain'
|
4
4
|
require 'kafka/sasl/gssapi'
|
5
5
|
require 'kafka/sasl/scram'
|
6
|
+
require 'kafka/sasl/oauth'
|
6
7
|
|
7
8
|
module Kafka
|
8
9
|
class SaslAuthenticator
|
9
10
|
def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:,
|
10
11
|
sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:,
|
11
|
-
sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism
|
12
|
+
sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:,
|
13
|
+
sasl_oauth_token_provider:)
|
12
14
|
@logger = TaggedLogger.new(logger)
|
13
15
|
|
14
16
|
@plain = Sasl::Plain.new(
|
@@ -31,7 +33,12 @@ module Kafka
|
|
31
33
|
logger: @logger,
|
32
34
|
)
|
33
35
|
|
34
|
-
@
|
36
|
+
@oauth = Sasl::OAuth.new(
|
37
|
+
token_provider: sasl_oauth_token_provider,
|
38
|
+
logger: @logger,
|
39
|
+
)
|
40
|
+
|
41
|
+
@mechanism = [@gssapi, @plain, @scram, @oauth].find(&:configured?)
|
35
42
|
end
|
36
43
|
|
37
44
|
def enabled?
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.6.
|
4
|
+
version: 0.7.6.beta2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-02-
|
11
|
+
date: 2019-02-28 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: digest-crc
|
@@ -418,6 +418,7 @@ files:
|
|
418
418
|
- lib/kafka/protocol/sync_group_response.rb
|
419
419
|
- lib/kafka/round_robin_assignment_strategy.rb
|
420
420
|
- lib/kafka/sasl/gssapi.rb
|
421
|
+
- lib/kafka/sasl/oauth.rb
|
421
422
|
- lib/kafka/sasl/plain.rb
|
422
423
|
- lib/kafka/sasl/scram.rb
|
423
424
|
- lib/kafka/sasl_authenticator.rb
|