ruby-kafka 0.6.0.beta1 → 0.6.0.beta2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +2 -0
- data/lib/kafka.rb +1 -1
- data/lib/kafka/client.rb +11 -6
- data/lib/kafka/consumer.rb +1 -1
- data/lib/kafka/datadog.rb +5 -0
- data/lib/kafka/fetch_operation.rb +1 -0
- data/lib/kafka/offset_manager.rb +4 -1
- data/lib/kafka/sasl_authenticator.rb +9 -5
- data/lib/kafka/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a8de7c8abb93903beb344e75576f60227f42e03b4fc05cfd535b69ed71443c20
|
4
|
+
data.tar.gz: e23592d1e5c00e145cba041729ba69ba80a6ed7452170d6fc0a04fbb472b42cd
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f3c7af12d2699a189d10e12117ecf06b315118fa373dc9365c50578dea3d5b1bc55f06401bd2bb044c778cf036b99daba9c4bc1a6cc6770eefa7c266ecbd0521
|
7
|
+
data.tar.gz: bfe265ae1b2af3024ffb10de3dc627cfda81b75916f7ade3d1537cc5ccae00dd6c134af70f5f6871be1a788e85ad2b4952db779588b59d17bd1bd1d9963b9dd4
|
data/README.md
CHANGED
@@ -924,6 +924,8 @@ Typically, Kafka certificates come in the JKS format, which isn't supported by r
|
|
924
924
|
|
925
925
|
Kafka has support for using SASL to authenticate clients. Currently GSSAPI, SCRAM and PLAIN mechanisms are supported by ruby-kafka.
|
926
926
|
|
927
|
+
**NOTE:** In order to use SASL for authentication, you need to configure SSL encryption by passing `ssl_ca_cert` or enabling `ssl_ca_certs_from_system`.
|
928
|
+
|
927
929
|
##### GSSAPI
|
928
930
|
In order to authenticate using GSSAPI, set your principal and optionally your keytab when initializing the Kafka client:
|
929
931
|
|
data/lib/kafka.rb
CHANGED
data/lib/kafka/client.rb
CHANGED
@@ -82,6 +82,10 @@ module Kafka
|
|
82
82
|
logger: @logger
|
83
83
|
)
|
84
84
|
|
85
|
+
if sasl_authenticator.enabled? && ssl_context.nil?
|
86
|
+
raise ArgumentError, "SASL authentication requires that SSL is configured"
|
87
|
+
end
|
88
|
+
|
85
89
|
@connection_builder = ConnectionBuilder.new(
|
86
90
|
client_id: client_id,
|
87
91
|
connect_timeout: connect_timeout,
|
@@ -291,9 +295,16 @@ module Kafka
|
|
291
295
|
instrumenter: instrumenter,
|
292
296
|
)
|
293
297
|
|
298
|
+
fetcher = Fetcher.new(
|
299
|
+
cluster: initialize_cluster,
|
300
|
+
logger: @logger,
|
301
|
+
instrumenter: instrumenter,
|
302
|
+
)
|
303
|
+
|
294
304
|
offset_manager = OffsetManager.new(
|
295
305
|
cluster: cluster,
|
296
306
|
group: group,
|
307
|
+
fetcher: fetcher,
|
297
308
|
logger: @logger,
|
298
309
|
commit_interval: offset_commit_interval,
|
299
310
|
commit_threshold: offset_commit_threshold,
|
@@ -305,12 +316,6 @@ module Kafka
|
|
305
316
|
interval: heartbeat_interval,
|
306
317
|
)
|
307
318
|
|
308
|
-
fetcher = Fetcher.new(
|
309
|
-
cluster: initialize_cluster,
|
310
|
-
logger: @logger,
|
311
|
-
instrumenter: instrumenter,
|
312
|
-
)
|
313
|
-
|
314
319
|
Consumer.new(
|
315
320
|
cluster: cluster,
|
316
321
|
logger: @logger,
|
data/lib/kafka/consumer.rb
CHANGED
@@ -484,7 +484,7 @@ module Kafka
|
|
484
484
|
end
|
485
485
|
end
|
486
486
|
rescue OffsetOutOfRange => e
|
487
|
-
@logger.error "Invalid offset for #{e.topic}/#{e.partition}, resetting to default offset"
|
487
|
+
@logger.error "Invalid offset #{e.offset} for #{e.topic}/#{e.partition}, resetting to default offset"
|
488
488
|
|
489
489
|
@offset_manager.seek_to_default(e.topic, e.partition)
|
490
490
|
|
data/lib/kafka/datadog.rb
CHANGED
data/lib/kafka/offset_manager.rb
CHANGED
@@ -7,9 +7,10 @@ module Kafka
|
|
7
7
|
# The default broker setting for offsets.retention.minutes is 1440.
|
8
8
|
DEFAULT_RETENTION_TIME = 1440 * 60
|
9
9
|
|
10
|
-
def initialize(cluster:, group:, logger:, commit_interval:, commit_threshold:, offset_retention_time:)
|
10
|
+
def initialize(cluster:, group:, fetcher:, logger:, commit_interval:, commit_threshold:, offset_retention_time:)
|
11
11
|
@cluster = cluster
|
12
12
|
@group = group
|
13
|
+
@fetcher = fetcher
|
13
14
|
@logger = logger
|
14
15
|
@commit_interval = commit_interval
|
15
16
|
@commit_threshold = commit_threshold
|
@@ -80,6 +81,8 @@ module Kafka
|
|
80
81
|
def seek_to(topic, partition, offset)
|
81
82
|
@processed_offsets[topic] ||= {}
|
82
83
|
@processed_offsets[topic][partition] = offset
|
84
|
+
|
85
|
+
@fetcher.seek(topic, partition, offset)
|
83
86
|
end
|
84
87
|
|
85
88
|
# Return the next offset that should be fetched for the specified partition.
|
@@ -28,21 +28,25 @@ module Kafka
|
|
28
28
|
mechanism: sasl_scram_mechanism,
|
29
29
|
logger: @logger,
|
30
30
|
)
|
31
|
+
|
32
|
+
@mechanism = [@gssapi, @plain, @scram].find(&:configured?)
|
31
33
|
end
|
32
34
|
|
33
|
-
def
|
34
|
-
mechanism
|
35
|
+
def enabled?
|
36
|
+
!@mechanism.nil?
|
37
|
+
end
|
35
38
|
|
36
|
-
|
39
|
+
def authenticate!(connection)
|
40
|
+
return unless enabled?
|
37
41
|
|
38
|
-
ident = mechanism.ident
|
42
|
+
ident = @mechanism.ident
|
39
43
|
response = connection.send_request(Kafka::Protocol::SaslHandshakeRequest.new(ident))
|
40
44
|
|
41
45
|
unless response.error_code == 0 && response.enabled_mechanisms.include?(ident)
|
42
46
|
raise Kafka::Error, "#{ident} is not supported."
|
43
47
|
end
|
44
48
|
|
45
|
-
mechanism.authenticate!(connection.to_s, connection.encoder, connection.decoder)
|
49
|
+
@mechanism.authenticate!(connection.to_s, connection.encoder, connection.decoder)
|
46
50
|
end
|
47
51
|
end
|
48
52
|
end
|
data/lib/kafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.6.0.
|
4
|
+
version: 0.6.0.beta2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-04-
|
11
|
+
date: 2018-04-17 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|