ruby-kafka-custom 0.7.7.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/kafka/async_producer.rb +279 -0
- data/lib/kafka/broker.rb +205 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +754 -0
- data/lib/kafka/cluster.rb +455 -0
- data/lib/kafka/compression.rb +43 -0
- data/lib/kafka/compressor.rb +85 -0
- data/lib/kafka/connection.rb +220 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +592 -0
- data/lib/kafka/consumer_group.rb +208 -0
- data/lib/kafka/datadog.rb +413 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +54 -0
- data/lib/kafka/fetched_batch_generator.rb +117 -0
- data/lib/kafka/fetched_message.rb +47 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +221 -0
- data/lib/kafka/gzip_codec.rb +30 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/lz4_codec.rb +23 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/offset_manager.rb +248 -0
- data/lib/kafka/partitioner.rb +35 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +504 -0
- data/lib/kafka/protocol.rb +217 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +41 -0
- data/lib/kafka/protocol/join_group_response.rb +33 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +36 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +92 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +222 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +23 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +54 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +177 -0
- data/lib/kafka/sasl_authenticator.rb +61 -0
- data/lib/kafka/snappy_codec.rb +25 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +187 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +72 -0
- data/lib/kafka/transaction_manager.rb +261 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- metadata +461 -0
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class RequestMessage
|
6
|
+
API_VERSION = 0
|
7
|
+
|
8
|
+
def initialize(api_key:, api_version: API_VERSION, correlation_id:, client_id:, request:)
|
9
|
+
@api_key = api_key
|
10
|
+
@api_version = api_version
|
11
|
+
@correlation_id = correlation_id
|
12
|
+
@client_id = client_id
|
13
|
+
@request = request
|
14
|
+
end
|
15
|
+
|
16
|
+
def encode(encoder)
|
17
|
+
encoder.write_int16(@api_key)
|
18
|
+
encoder.write_int16(@api_version)
|
19
|
+
encoder.write_int32(@correlation_id)
|
20
|
+
encoder.write_string(@client_id)
|
21
|
+
|
22
|
+
@request.encode(encoder)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
|
6
|
+
# SaslHandshake Request (Version: 0) => mechanism
|
7
|
+
# mechanism => string
|
8
|
+
|
9
|
+
class SaslHandshakeRequest
|
10
|
+
|
11
|
+
SUPPORTED_MECHANISMS = %w(GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512 OAUTHBEARER)
|
12
|
+
|
13
|
+
def initialize(mechanism)
|
14
|
+
unless SUPPORTED_MECHANISMS.include?(mechanism)
|
15
|
+
raise Kafka::Error, "Unsupported SASL mechanism #{mechanism}. Supported are #{SUPPORTED_MECHANISMS.join(', ')}"
|
16
|
+
end
|
17
|
+
@mechanism = mechanism
|
18
|
+
end
|
19
|
+
|
20
|
+
def api_key
|
21
|
+
SASL_HANDSHAKE_API
|
22
|
+
end
|
23
|
+
|
24
|
+
def response_class
|
25
|
+
SaslHandshakeResponse
|
26
|
+
end
|
27
|
+
|
28
|
+
def encode(encoder)
|
29
|
+
encoder.write_string(@mechanism)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
|
6
|
+
# SaslHandshake Response (Version: 0) => error_code [enabled_mechanisms]
|
7
|
+
# error_code => int16
|
8
|
+
# enabled_mechanisms => array of strings
|
9
|
+
|
10
|
+
class SaslHandshakeResponse
|
11
|
+
attr_reader :error_code
|
12
|
+
|
13
|
+
attr_reader :enabled_mechanisms
|
14
|
+
|
15
|
+
def initialize(error_code:, enabled_mechanisms:)
|
16
|
+
@error_code = error_code
|
17
|
+
@enabled_mechanisms = enabled_mechanisms
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.decode(decoder)
|
21
|
+
new(
|
22
|
+
error_code: decoder.int16,
|
23
|
+
enabled_mechanisms: decoder.array { decoder.string }
|
24
|
+
)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class SyncGroupRequest
|
6
|
+
def initialize(group_id:, generation_id:, member_id:, group_assignment: {})
|
7
|
+
@group_id = group_id
|
8
|
+
@generation_id = generation_id
|
9
|
+
@member_id = member_id
|
10
|
+
@group_assignment = group_assignment
|
11
|
+
end
|
12
|
+
|
13
|
+
def api_key
|
14
|
+
SYNC_GROUP_API
|
15
|
+
end
|
16
|
+
|
17
|
+
def response_class
|
18
|
+
SyncGroupResponse
|
19
|
+
end
|
20
|
+
|
21
|
+
def encode(encoder)
|
22
|
+
encoder.write_string(@group_id)
|
23
|
+
encoder.write_int32(@generation_id)
|
24
|
+
encoder.write_string(@member_id)
|
25
|
+
|
26
|
+
encoder.write_array(@group_assignment) do |member_id, member_assignment|
|
27
|
+
encoder.write_string(member_id)
|
28
|
+
encoder.write_bytes(Encoder.encode_with(member_assignment))
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/protocol/member_assignment"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module Protocol
|
7
|
+
class SyncGroupResponse
|
8
|
+
attr_reader :error_code, :member_assignment
|
9
|
+
|
10
|
+
def initialize(error_code:, member_assignment:)
|
11
|
+
@error_code = error_code
|
12
|
+
@member_assignment = member_assignment
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.decode(decoder)
|
16
|
+
new(
|
17
|
+
error_code: decoder.int16,
|
18
|
+
member_assignment: MemberAssignment.decode(Decoder.from_string(decoder.bytes)),
|
19
|
+
)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/protocol/member_assignment"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
|
7
|
+
# A consumer group partition assignment strategy that assigns partitions to
|
8
|
+
# consumers in a round-robin fashion.
|
9
|
+
class RoundRobinAssignmentStrategy
|
10
|
+
def initialize(cluster:)
|
11
|
+
@cluster = cluster
|
12
|
+
end
|
13
|
+
|
14
|
+
# Assign the topic partitions to the group members.
|
15
|
+
#
|
16
|
+
# @param members [Array<String>] member ids
|
17
|
+
# @param topics [Array<String>] topics
|
18
|
+
# @return [Hash<String, Protocol::MemberAssignment>] a hash mapping member
|
19
|
+
# ids to assignments.
|
20
|
+
def assign(members:, topics:)
|
21
|
+
group_assignment = {}
|
22
|
+
|
23
|
+
members.each do |member_id|
|
24
|
+
group_assignment[member_id] = Protocol::MemberAssignment.new
|
25
|
+
end
|
26
|
+
|
27
|
+
topic_partitions = topics.flat_map do |topic|
|
28
|
+
begin
|
29
|
+
partitions = @cluster.partitions_for(topic).map(&:partition_id)
|
30
|
+
rescue UnknownTopicOrPartition
|
31
|
+
raise UnknownTopicOrPartition, "unknown topic #{topic}"
|
32
|
+
end
|
33
|
+
Array.new(partitions.count) { topic }.zip(partitions)
|
34
|
+
end
|
35
|
+
|
36
|
+
partitions_per_member = topic_partitions.group_by.with_index do |_, index|
|
37
|
+
index % members.count
|
38
|
+
end.values
|
39
|
+
|
40
|
+
members.zip(partitions_per_member).each do |member_id, member_partitions|
|
41
|
+
unless member_partitions.nil?
|
42
|
+
member_partitions.each do |topic, partition|
|
43
|
+
group_assignment[member_id].assign(topic, [partition])
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
group_assignment
|
49
|
+
rescue Kafka::LeaderNotAvailable
|
50
|
+
sleep 1
|
51
|
+
retry
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class Gssapi
|
6
|
+
GSSAPI_IDENT = "GSSAPI"
|
7
|
+
GSSAPI_CONFIDENTIALITY = false
|
8
|
+
|
9
|
+
def initialize(logger:, principal:, keytab:)
|
10
|
+
@logger = TaggedLogger.new(logger)
|
11
|
+
@principal = principal
|
12
|
+
@keytab = keytab
|
13
|
+
end
|
14
|
+
|
15
|
+
def configured?
|
16
|
+
@principal && !@principal.empty?
|
17
|
+
end
|
18
|
+
|
19
|
+
def ident
|
20
|
+
GSSAPI_IDENT
|
21
|
+
end
|
22
|
+
|
23
|
+
def authenticate!(host, encoder, decoder)
|
24
|
+
load_gssapi
|
25
|
+
initialize_gssapi_context(host)
|
26
|
+
|
27
|
+
@encoder = encoder
|
28
|
+
@decoder = decoder
|
29
|
+
|
30
|
+
# send gssapi token and receive token to verify
|
31
|
+
token_to_verify = send_and_receive_sasl_token
|
32
|
+
|
33
|
+
# verify incoming token
|
34
|
+
unless @gssapi_ctx.init_context(token_to_verify)
|
35
|
+
raise Kafka::Error, "GSSAPI context verification failed."
|
36
|
+
end
|
37
|
+
|
38
|
+
# we can continue, so send OK
|
39
|
+
@encoder.write([0, 2].pack('l>c'))
|
40
|
+
|
41
|
+
# read wrapped message and return it back with principal
|
42
|
+
handshake_messages
|
43
|
+
end
|
44
|
+
|
45
|
+
def handshake_messages
|
46
|
+
msg = @decoder.bytes
|
47
|
+
raise Kafka::Error, "GSSAPI negotiation failed." unless msg
|
48
|
+
# unwrap with integrity only
|
49
|
+
msg_unwrapped = @gssapi_ctx.unwrap_message(msg, GSSAPI_CONFIDENTIALITY)
|
50
|
+
msg_wrapped = @gssapi_ctx.wrap_message(msg_unwrapped + @principal, GSSAPI_CONFIDENTIALITY)
|
51
|
+
@encoder.write_bytes(msg_wrapped)
|
52
|
+
end
|
53
|
+
|
54
|
+
def send_and_receive_sasl_token
|
55
|
+
@encoder.write_bytes(@gssapi_token)
|
56
|
+
@decoder.bytes
|
57
|
+
end
|
58
|
+
|
59
|
+
def load_gssapi
|
60
|
+
begin
|
61
|
+
require "gssapi"
|
62
|
+
rescue LoadError
|
63
|
+
@logger.error "In order to use GSSAPI authentication you need to install the `gssapi` gem."
|
64
|
+
raise
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
def initialize_gssapi_context(host)
|
69
|
+
@logger.debug "GSSAPI: Initializing context with #{host}, principal #{@principal}"
|
70
|
+
|
71
|
+
@gssapi_ctx = GSSAPI::Simple.new(host, @principal, @keytab)
|
72
|
+
@gssapi_token = @gssapi_ctx.init_context(nil)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class OAuth
|
6
|
+
OAUTH_IDENT = "OAUTHBEARER"
|
7
|
+
|
8
|
+
# token_provider: THE FOLLOWING INTERFACE MUST BE FULFILLED:
|
9
|
+
#
|
10
|
+
# [REQUIRED] TokenProvider#token - Returns an ID/Access Token to be sent to the Kafka client.
|
11
|
+
# The implementation should ensure token reuse so that multiple calls at connect time do not
|
12
|
+
# create multiple tokens. The implementation should also periodically refresh the token in
|
13
|
+
# order to guarantee that each call returns an unexpired token. A timeout error should
|
14
|
+
# be returned after a short period of inactivity so that the broker can log debugging
|
15
|
+
# info and retry.
|
16
|
+
#
|
17
|
+
# [OPTIONAL] TokenProvider#extensions - Returns a map of key-value pairs that can be sent with the
|
18
|
+
# SASL/OAUTHBEARER initial client response. If not provided, the values are ignored. This feature
|
19
|
+
# is only available in Kafka >= 2.1.0.
|
20
|
+
#
|
21
|
+
def initialize(logger:, token_provider:)
|
22
|
+
@logger = TaggedLogger.new(logger)
|
23
|
+
@token_provider = token_provider
|
24
|
+
end
|
25
|
+
|
26
|
+
def ident
|
27
|
+
OAUTH_IDENT
|
28
|
+
end
|
29
|
+
|
30
|
+
def configured?
|
31
|
+
@token_provider
|
32
|
+
end
|
33
|
+
|
34
|
+
def authenticate!(host, encoder, decoder)
|
35
|
+
# Send SASLOauthBearerClientResponse with token
|
36
|
+
@logger.debug "Authenticating to #{host} with SASL #{OAUTH_IDENT}"
|
37
|
+
|
38
|
+
encoder.write_bytes(initial_client_response)
|
39
|
+
|
40
|
+
begin
|
41
|
+
# receive SASL OAuthBearer Server Response
|
42
|
+
msg = decoder.bytes
|
43
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: unknown error" unless msg
|
44
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
45
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: #{e.message}"
|
46
|
+
end
|
47
|
+
|
48
|
+
@logger.debug "SASL #{OAUTH_IDENT} authentication successful."
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def initial_client_response
|
54
|
+
raise Kafka::TokenMethodNotImplementedError, "Token provider doesn't define 'token'" unless @token_provider.respond_to? :token
|
55
|
+
"n,,\x01auth=Bearer #{@token_provider.token}#{token_extensions}\x01\x01"
|
56
|
+
end
|
57
|
+
|
58
|
+
def token_extensions
|
59
|
+
return nil unless @token_provider.respond_to? :extensions
|
60
|
+
"\x01#{@token_provider.extensions.map {|e| e.join("=")}.join("\x01")}"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class Plain
|
6
|
+
PLAIN_IDENT = "PLAIN"
|
7
|
+
|
8
|
+
def initialize(logger:, authzid:, username:, password:)
|
9
|
+
@logger = TaggedLogger.new(logger)
|
10
|
+
@authzid = authzid
|
11
|
+
@username = username
|
12
|
+
@password = password
|
13
|
+
end
|
14
|
+
|
15
|
+
def ident
|
16
|
+
PLAIN_IDENT
|
17
|
+
end
|
18
|
+
|
19
|
+
def configured?
|
20
|
+
@authzid && @username && @password
|
21
|
+
end
|
22
|
+
|
23
|
+
def authenticate!(host, encoder, decoder)
|
24
|
+
msg = [@authzid, @username, @password].join("\000").force_encoding("utf-8")
|
25
|
+
|
26
|
+
encoder.write_bytes(msg)
|
27
|
+
|
28
|
+
begin
|
29
|
+
msg = decoder.bytes
|
30
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: unknown error" unless msg
|
31
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
32
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: #{e.message}"
|
33
|
+
end
|
34
|
+
|
35
|
+
@logger.debug "SASL PLAIN authentication successful."
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,177 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'base64'
|
5
|
+
|
6
|
+
module Kafka
|
7
|
+
module Sasl
|
8
|
+
class Scram
|
9
|
+
MECHANISMS = {
|
10
|
+
"sha256" => "SCRAM-SHA-256",
|
11
|
+
"sha512" => "SCRAM-SHA-512",
|
12
|
+
}.freeze
|
13
|
+
|
14
|
+
def initialize(username:, password:, mechanism: 'sha256', logger:)
|
15
|
+
@username = username
|
16
|
+
@password = password
|
17
|
+
@logger = TaggedLogger.new(logger)
|
18
|
+
|
19
|
+
if mechanism
|
20
|
+
@mechanism = MECHANISMS.fetch(mechanism) do
|
21
|
+
raise Kafka::SaslScramError, "SCRAM mechanism #{mechanism} is not supported."
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def ident
|
27
|
+
@mechanism
|
28
|
+
end
|
29
|
+
|
30
|
+
def configured?
|
31
|
+
@username && @password && @mechanism
|
32
|
+
end
|
33
|
+
|
34
|
+
def authenticate!(host, encoder, decoder)
|
35
|
+
@logger.debug "Authenticating #{@username} with SASL #{@mechanism}"
|
36
|
+
|
37
|
+
begin
|
38
|
+
msg = first_message
|
39
|
+
@logger.debug "Sending first client SASL SCRAM message: #{msg}"
|
40
|
+
encoder.write_bytes(msg)
|
41
|
+
|
42
|
+
@server_first_message = decoder.bytes
|
43
|
+
@logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
|
44
|
+
|
45
|
+
msg = final_message
|
46
|
+
@logger.debug "Sending final client SASL SCRAM message: #{msg}"
|
47
|
+
encoder.write_bytes(msg)
|
48
|
+
|
49
|
+
response = parse_response(decoder.bytes)
|
50
|
+
@logger.debug "Received last server SASL SCRAM message: #{response}"
|
51
|
+
|
52
|
+
raise FailedScramAuthentication, response['e'] if response['e']
|
53
|
+
raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
|
54
|
+
rescue EOFError => e
|
55
|
+
raise FailedScramAuthentication, e.message
|
56
|
+
end
|
57
|
+
|
58
|
+
@logger.debug "SASL SCRAM authentication successful"
|
59
|
+
end
|
60
|
+
|
61
|
+
private
|
62
|
+
|
63
|
+
def first_message
|
64
|
+
"n,,#{first_message_bare}"
|
65
|
+
end
|
66
|
+
|
67
|
+
def first_message_bare
|
68
|
+
"n=#{encoded_username},r=#{nonce}"
|
69
|
+
end
|
70
|
+
|
71
|
+
def final_message_without_proof
|
72
|
+
"c=biws,r=#{rnonce}"
|
73
|
+
end
|
74
|
+
|
75
|
+
def final_message
|
76
|
+
"#{final_message_without_proof},p=#{client_proof}"
|
77
|
+
end
|
78
|
+
|
79
|
+
def server_data
|
80
|
+
parse_response(@server_first_message)
|
81
|
+
end
|
82
|
+
|
83
|
+
def rnonce
|
84
|
+
server_data['r']
|
85
|
+
end
|
86
|
+
|
87
|
+
def salt
|
88
|
+
Base64.strict_decode64(server_data['s'])
|
89
|
+
end
|
90
|
+
|
91
|
+
def iterations
|
92
|
+
server_data['i'].to_i
|
93
|
+
end
|
94
|
+
|
95
|
+
def auth_message
|
96
|
+
[first_message_bare, @server_first_message, final_message_without_proof].join(',')
|
97
|
+
end
|
98
|
+
|
99
|
+
def salted_password
|
100
|
+
hi(@password, salt, iterations)
|
101
|
+
end
|
102
|
+
|
103
|
+
def client_key
|
104
|
+
hmac(salted_password, 'Client Key')
|
105
|
+
end
|
106
|
+
|
107
|
+
def stored_key
|
108
|
+
h(client_key)
|
109
|
+
end
|
110
|
+
|
111
|
+
def server_key
|
112
|
+
hmac(salted_password, 'Server Key')
|
113
|
+
end
|
114
|
+
|
115
|
+
def client_signature
|
116
|
+
hmac(stored_key, auth_message)
|
117
|
+
end
|
118
|
+
|
119
|
+
def server_signature
|
120
|
+
Base64.strict_encode64(hmac(server_key, auth_message))
|
121
|
+
end
|
122
|
+
|
123
|
+
def client_proof
|
124
|
+
Base64.strict_encode64(xor(client_key, client_signature))
|
125
|
+
end
|
126
|
+
|
127
|
+
def h(str)
|
128
|
+
digest.digest(str)
|
129
|
+
end
|
130
|
+
|
131
|
+
def hi(str, salt, iterations)
|
132
|
+
OpenSSL::PKCS5.pbkdf2_hmac(
|
133
|
+
str,
|
134
|
+
salt,
|
135
|
+
iterations,
|
136
|
+
digest.size,
|
137
|
+
digest
|
138
|
+
)
|
139
|
+
end
|
140
|
+
|
141
|
+
def hmac(data, key)
|
142
|
+
OpenSSL::HMAC.digest(digest, data, key)
|
143
|
+
end
|
144
|
+
|
145
|
+
def xor(first, second)
|
146
|
+
first.bytes.zip(second.bytes).map { |(a, b)| (a ^ b).chr }.join('')
|
147
|
+
end
|
148
|
+
|
149
|
+
def parse_response(data)
|
150
|
+
data.split(',').map { |s| s.split('=', 2) }.to_h
|
151
|
+
end
|
152
|
+
|
153
|
+
def encoded_username
|
154
|
+
safe_str(@username.encode(Encoding::UTF_8))
|
155
|
+
end
|
156
|
+
|
157
|
+
def nonce
|
158
|
+
@nonce ||= SecureRandom.urlsafe_base64(32)
|
159
|
+
end
|
160
|
+
|
161
|
+
def digest
|
162
|
+
@digest ||= case @mechanism
|
163
|
+
when 'SCRAM-SHA-256'
|
164
|
+
OpenSSL::Digest::SHA256.new
|
165
|
+
when 'SCRAM-SHA-512'
|
166
|
+
OpenSSL::Digest::SHA512.new
|
167
|
+
else
|
168
|
+
raise ArgumentError, "Unknown SASL mechanism '#{@mechanism}'"
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
def safe_str(val)
|
173
|
+
val.gsub('=', '=3D').gsub(',', '=2C')
|
174
|
+
end
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|