ruby-kafka-aws-iam 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +393 -0
- data/.github/workflows/stale.yml +19 -0
- data/.gitignore +13 -0
- data/.readygo +1 -0
- data/.rspec +3 -0
- data/.rubocop.yml +44 -0
- data/.ruby-version +1 -0
- data/.yardopts +3 -0
- data/CHANGELOG.md +314 -0
- data/Gemfile +5 -0
- data/ISSUE_TEMPLATE.md +23 -0
- data/LICENSE.txt +176 -0
- data/Procfile +2 -0
- data/README.md +1356 -0
- data/Rakefile +8 -0
- data/benchmarks/message_encoding.rb +23 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/docker-compose.yml +39 -0
- data/examples/consumer-group.rb +35 -0
- data/examples/firehose-consumer.rb +64 -0
- data/examples/firehose-producer.rb +54 -0
- data/examples/simple-consumer.rb +34 -0
- data/examples/simple-producer.rb +42 -0
- data/examples/ssl-producer.rb +44 -0
- data/lib/kafka/async_producer.rb +297 -0
- data/lib/kafka/broker.rb +217 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +838 -0
- data/lib/kafka/cluster.rb +513 -0
- data/lib/kafka/compression.rb +45 -0
- data/lib/kafka/compressor.rb +86 -0
- data/lib/kafka/connection.rb +228 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +642 -0
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/consumer_group.rb +231 -0
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +420 -0
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +58 -0
- data/lib/kafka/fetched_batch_generator.rb +120 -0
- data/lib/kafka/fetched_message.rb +48 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +224 -0
- data/lib/kafka/gzip_codec.rb +34 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/lz4_codec.rb +27 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +259 -0
- data/lib/kafka/partitioner.rb +40 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +528 -0
- data/lib/kafka/prometheus.rb +316 -0
- data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +47 -0
- data/lib/kafka/protocol/join_group_response.rb +41 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +94 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +223 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +26 -0
- data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
- data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
- data/lib/kafka/protocol.rb +225 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
- data/lib/kafka/sasl/awsmskiam.rb +128 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +180 -0
- data/lib/kafka/sasl_authenticator.rb +73 -0
- data/lib/kafka/snappy_codec.rb +29 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +192 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +77 -0
- data/lib/kafka/transaction_manager.rb +306 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- data/lib/kafka/zstd_codec.rb +27 -0
- data/lib/kafka.rb +373 -0
- data/lib/ruby-kafka.rb +5 -0
- data/ruby-kafka.gemspec +54 -0
- metadata +520 -0
@@ -0,0 +1,52 @@
|
|
1
|
+
module Kafka
|
2
|
+
|
3
|
+
# A round robin assignment strategy inpired on the
|
4
|
+
# original java client round robin assignor. It's capable
|
5
|
+
# of handling identical as well as different topic subscriptions
|
6
|
+
# accross the same consumer group.
|
7
|
+
class RoundRobinAssignmentStrategy
|
8
|
+
def protocol_name
|
9
|
+
"roundrobin"
|
10
|
+
end
|
11
|
+
|
12
|
+
# Assign the topic partitions to the group members.
|
13
|
+
#
|
14
|
+
# @param cluster [Kafka::Cluster]
|
15
|
+
# @param members [Hash<String, Kafka::Protocol::JoinGroupResponse::Metadata>] a hash
|
16
|
+
# mapping member ids to metadata
|
17
|
+
# @param partitions [Array<Kafka::ConsumerGroup::Assignor::Partition>] a list of
|
18
|
+
# partitions the consumer group processes
|
19
|
+
# @return [Hash<String, Array<Kafka::ConsumerGroup::Assignor::Partition>] a hash
|
20
|
+
# mapping member ids to partitions.
|
21
|
+
def call(cluster:, members:, partitions:)
|
22
|
+
partitions_per_member = Hash.new {|h, k| h[k] = [] }
|
23
|
+
relevant_partitions = valid_sorted_partitions(members, partitions)
|
24
|
+
members_ids = members.keys
|
25
|
+
iterator = (0...members.size).cycle
|
26
|
+
idx = iterator.next
|
27
|
+
|
28
|
+
relevant_partitions.each do |partition|
|
29
|
+
topic = partition.topic
|
30
|
+
|
31
|
+
while !members[members_ids[idx]].topics.include?(topic)
|
32
|
+
idx = iterator.next
|
33
|
+
end
|
34
|
+
|
35
|
+
partitions_per_member[members_ids[idx]] << partition
|
36
|
+
idx = iterator.next
|
37
|
+
end
|
38
|
+
|
39
|
+
partitions_per_member
|
40
|
+
end
|
41
|
+
|
42
|
+
def valid_sorted_partitions(members, partitions)
|
43
|
+
subscribed_topics = members.map do |id, metadata|
|
44
|
+
metadata && metadata.topics
|
45
|
+
end.flatten.compact
|
46
|
+
|
47
|
+
partitions
|
48
|
+
.select { |partition| subscribed_topics.include?(partition.topic) }
|
49
|
+
.sort_by { |partition| partition.topic }
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'base64'
|
5
|
+
require 'json'
|
6
|
+
|
7
|
+
module Kafka
|
8
|
+
module Sasl
|
9
|
+
class AwsMskIam
|
10
|
+
AWS_MSK_IAM = "AWS_MSK_IAM"
|
11
|
+
|
12
|
+
def initialize(aws_region:, access_key_id:, secret_key_id:, logger:)
|
13
|
+
@semaphore = Mutex.new
|
14
|
+
|
15
|
+
@aws_region = aws_region
|
16
|
+
@access_key_id = access_key_id
|
17
|
+
@secret_key_id = secret_key_id
|
18
|
+
@logger = TaggedLogger.new(logger)
|
19
|
+
end
|
20
|
+
|
21
|
+
def ident
|
22
|
+
AWS_MSK_IAM
|
23
|
+
end
|
24
|
+
|
25
|
+
def configured?
|
26
|
+
@aws_region && @access_key_id && @secret_key_id
|
27
|
+
end
|
28
|
+
|
29
|
+
def authenticate!(host, encoder, decoder)
|
30
|
+
@logger.debug "Authenticating #{@access_key_id} with SASL #{AWS_MSK_IAM}"
|
31
|
+
|
32
|
+
host_without_port = host.split(':', -1).first
|
33
|
+
|
34
|
+
time_now = Time.now.utc
|
35
|
+
|
36
|
+
msg = authentication_payload(host: host_without_port, time_now: time_now)
|
37
|
+
@logger.debug "Sending first client SASL AWS_MSK_IAM message:"
|
38
|
+
@logger.debug msg
|
39
|
+
encoder.write_bytes(msg)
|
40
|
+
|
41
|
+
begin
|
42
|
+
@server_first_message = decoder.bytes
|
43
|
+
@logger.debug "Received first server SASL AWS_MSK_IAM message: #{@server_first_message}"
|
44
|
+
|
45
|
+
raise Kafka::Error, "SASL AWS_MSK_IAM authentication failed: unknown error" unless @server_first_message
|
46
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
47
|
+
raise Kafka::Error, "SASL AWS_MSK_IAM authentication failed: #{e.message}"
|
48
|
+
end
|
49
|
+
|
50
|
+
@logger.debug "SASL #{AWS_MSK_IAM} authentication successful"
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
|
55
|
+
def bin_to_hex(s)
|
56
|
+
s.each_byte.map { |b| b.to_s(16).rjust(2, '0') }.join
|
57
|
+
end
|
58
|
+
|
59
|
+
def digest
|
60
|
+
@digest ||= OpenSSL::Digest::SHA256.new
|
61
|
+
end
|
62
|
+
|
63
|
+
def authentication_payload(host:, time_now:)
|
64
|
+
{
|
65
|
+
'version': "2020_10_22",
|
66
|
+
'host': host,
|
67
|
+
'user-agent': "ruby-kafka",
|
68
|
+
'action': "kafka-cluster:Connect",
|
69
|
+
'x-amz-algorithm': "AWS4-HMAC-SHA256",
|
70
|
+
'x-amz-credential': @access_key_id + "/" + time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request",
|
71
|
+
'x-amz-date': time_now.strftime("%Y%m%dT%H%M%SZ"),
|
72
|
+
'x-amz-signedheaders': "host",
|
73
|
+
'x-amz-expires': "900",
|
74
|
+
'x-amz-signature': signature(host: host, time_now: time_now)
|
75
|
+
}.to_json
|
76
|
+
end
|
77
|
+
|
78
|
+
def canonical_request(host:, time_now:)
|
79
|
+
"GET\n" +
|
80
|
+
"/\n" +
|
81
|
+
canonical_query_string(time_now: time_now) + "\n" +
|
82
|
+
canonical_headers(host: host) + "\n" +
|
83
|
+
signed_headers + "\n" +
|
84
|
+
hashed_payload
|
85
|
+
end
|
86
|
+
|
87
|
+
def canonical_query_string(time_now:)
|
88
|
+
URI.encode_www_form(
|
89
|
+
"Action" => "kafka-cluster:Connect",
|
90
|
+
"X-Amz-Algorithm" => "AWS4-HMAC-SHA256",
|
91
|
+
"X-Amz-Credential" => @access_key_id + "/" + time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request",
|
92
|
+
"X-Amz-Date" => time_now.strftime("%Y%m%dT%H%M%SZ"),
|
93
|
+
"X-Amz-Expires" => "900",
|
94
|
+
"X-Amz-SignedHeaders" => "host"
|
95
|
+
)
|
96
|
+
end
|
97
|
+
|
98
|
+
def canonical_headers(host:)
|
99
|
+
"host" + ":" + host + "\n"
|
100
|
+
end
|
101
|
+
|
102
|
+
def signed_headers
|
103
|
+
"host"
|
104
|
+
end
|
105
|
+
|
106
|
+
def hashed_payload
|
107
|
+
bin_to_hex(digest.digest(""))
|
108
|
+
end
|
109
|
+
|
110
|
+
def string_to_sign(host:, time_now:)
|
111
|
+
"AWS4-HMAC-SHA256" + "\n" +
|
112
|
+
time_now.strftime("%Y%m%dT%H%M%SZ") + "\n" +
|
113
|
+
time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request" + "\n" +
|
114
|
+
bin_to_hex(digest.digest(canonical_request(host: host, time_now: time_now)))
|
115
|
+
end
|
116
|
+
|
117
|
+
def signature(host:, time_now:)
|
118
|
+
date_key = OpenSSL::HMAC.digest("SHA256", "AWS4" + @secret_key_id, time_now.strftime("%Y%m%d"))
|
119
|
+
date_region_key = OpenSSL::HMAC.digest("SHA256", date_key, @aws_region)
|
120
|
+
date_region_service_key = OpenSSL::HMAC.digest("SHA256", date_region_key, "kafka-cluster")
|
121
|
+
signing_key = OpenSSL::HMAC.digest("SHA256", date_region_service_key, "aws4_request")
|
122
|
+
signature = bin_to_hex(OpenSSL::HMAC.digest("SHA256", signing_key, string_to_sign(host: host, time_now: time_now)))
|
123
|
+
|
124
|
+
signature
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class Gssapi
|
6
|
+
GSSAPI_IDENT = "GSSAPI"
|
7
|
+
GSSAPI_CONFIDENTIALITY = false
|
8
|
+
|
9
|
+
def initialize(logger:, principal:, keytab:)
|
10
|
+
@logger = TaggedLogger.new(logger)
|
11
|
+
@principal = principal
|
12
|
+
@keytab = keytab
|
13
|
+
end
|
14
|
+
|
15
|
+
def configured?
|
16
|
+
@principal && !@principal.empty?
|
17
|
+
end
|
18
|
+
|
19
|
+
def ident
|
20
|
+
GSSAPI_IDENT
|
21
|
+
end
|
22
|
+
|
23
|
+
def authenticate!(host, encoder, decoder)
|
24
|
+
load_gssapi
|
25
|
+
initialize_gssapi_context(host)
|
26
|
+
|
27
|
+
@encoder = encoder
|
28
|
+
@decoder = decoder
|
29
|
+
|
30
|
+
# send gssapi token and receive token to verify
|
31
|
+
token_to_verify = send_and_receive_sasl_token
|
32
|
+
|
33
|
+
# verify incoming token
|
34
|
+
unless @gssapi_ctx.init_context(token_to_verify)
|
35
|
+
raise Kafka::Error, "GSSAPI context verification failed."
|
36
|
+
end
|
37
|
+
|
38
|
+
# we can continue, so send OK
|
39
|
+
@encoder.write([0, 2].pack('l>c'))
|
40
|
+
|
41
|
+
# read wrapped message and return it back with principal
|
42
|
+
handshake_messages
|
43
|
+
end
|
44
|
+
|
45
|
+
def handshake_messages
|
46
|
+
msg = @decoder.bytes
|
47
|
+
raise Kafka::Error, "GSSAPI negotiation failed." unless msg
|
48
|
+
# unwrap with integrity only
|
49
|
+
msg_unwrapped = @gssapi_ctx.unwrap_message(msg, GSSAPI_CONFIDENTIALITY)
|
50
|
+
msg_wrapped = @gssapi_ctx.wrap_message(msg_unwrapped + @principal, GSSAPI_CONFIDENTIALITY)
|
51
|
+
@encoder.write_bytes(msg_wrapped)
|
52
|
+
end
|
53
|
+
|
54
|
+
def send_and_receive_sasl_token
|
55
|
+
@encoder.write_bytes(@gssapi_token)
|
56
|
+
@decoder.bytes
|
57
|
+
end
|
58
|
+
|
59
|
+
def load_gssapi
|
60
|
+
begin
|
61
|
+
require "gssapi"
|
62
|
+
rescue LoadError
|
63
|
+
@logger.error "In order to use GSSAPI authentication you need to install the `gssapi` gem."
|
64
|
+
raise
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
def initialize_gssapi_context(host)
|
69
|
+
@logger.debug "GSSAPI: Initializing context with #{host}, principal #{@principal}"
|
70
|
+
|
71
|
+
@gssapi_ctx = GSSAPI::Simple.new(host, @principal, @keytab)
|
72
|
+
@gssapi_token = @gssapi_ctx.init_context(nil)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class OAuth
|
6
|
+
OAUTH_IDENT = "OAUTHBEARER"
|
7
|
+
|
8
|
+
# token_provider: THE FOLLOWING INTERFACE MUST BE FULFILLED:
|
9
|
+
#
|
10
|
+
# [REQUIRED] TokenProvider#token - Returns an ID/Access Token to be sent to the Kafka client.
|
11
|
+
# The implementation should ensure token reuse so that multiple calls at connect time do not
|
12
|
+
# create multiple tokens. The implementation should also periodically refresh the token in
|
13
|
+
# order to guarantee that each call returns an unexpired token. A timeout error should
|
14
|
+
# be returned after a short period of inactivity so that the broker can log debugging
|
15
|
+
# info and retry.
|
16
|
+
#
|
17
|
+
# [OPTIONAL] TokenProvider#extensions - Returns a map of key-value pairs that can be sent with the
|
18
|
+
# SASL/OAUTHBEARER initial client response. If not provided, the values are ignored. This feature
|
19
|
+
# is only available in Kafka >= 2.1.0.
|
20
|
+
#
|
21
|
+
def initialize(logger:, token_provider:)
|
22
|
+
@logger = TaggedLogger.new(logger)
|
23
|
+
@token_provider = token_provider
|
24
|
+
end
|
25
|
+
|
26
|
+
def ident
|
27
|
+
OAUTH_IDENT
|
28
|
+
end
|
29
|
+
|
30
|
+
def configured?
|
31
|
+
@token_provider
|
32
|
+
end
|
33
|
+
|
34
|
+
def authenticate!(host, encoder, decoder)
|
35
|
+
# Send SASLOauthBearerClientResponse with token
|
36
|
+
@logger.debug "Authenticating to #{host} with SASL #{OAUTH_IDENT}"
|
37
|
+
|
38
|
+
encoder.write_bytes(initial_client_response)
|
39
|
+
|
40
|
+
begin
|
41
|
+
# receive SASL OAuthBearer Server Response
|
42
|
+
msg = decoder.bytes
|
43
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: unknown error" unless msg
|
44
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
45
|
+
raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: #{e.message}"
|
46
|
+
end
|
47
|
+
|
48
|
+
@logger.debug "SASL #{OAUTH_IDENT} authentication successful."
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def initial_client_response
|
54
|
+
raise Kafka::TokenMethodNotImplementedError, "Token provider doesn't define 'token'" unless @token_provider.respond_to? :token
|
55
|
+
"n,,\x01auth=Bearer #{@token_provider.token}#{token_extensions}\x01\x01"
|
56
|
+
end
|
57
|
+
|
58
|
+
def token_extensions
|
59
|
+
return nil unless @token_provider.respond_to? :extensions
|
60
|
+
"\x01#{@token_provider.extensions.map {|e| e.join("=")}.join("\x01")}"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class Plain
|
6
|
+
PLAIN_IDENT = "PLAIN"
|
7
|
+
|
8
|
+
def initialize(logger:, authzid:, username:, password:)
|
9
|
+
@logger = TaggedLogger.new(logger)
|
10
|
+
@authzid = authzid
|
11
|
+
@username = username
|
12
|
+
@password = password
|
13
|
+
end
|
14
|
+
|
15
|
+
def ident
|
16
|
+
PLAIN_IDENT
|
17
|
+
end
|
18
|
+
|
19
|
+
def configured?
|
20
|
+
@authzid && @username && @password
|
21
|
+
end
|
22
|
+
|
23
|
+
def authenticate!(host, encoder, decoder)
|
24
|
+
msg = [@authzid, @username, @password].join("\000").force_encoding("utf-8")
|
25
|
+
|
26
|
+
encoder.write_bytes(msg)
|
27
|
+
|
28
|
+
begin
|
29
|
+
msg = decoder.bytes
|
30
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: unknown error" unless msg
|
31
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
32
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: #{e.message}"
|
33
|
+
end
|
34
|
+
|
35
|
+
@logger.debug "SASL PLAIN authentication successful."
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,180 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'base64'
|
5
|
+
|
6
|
+
module Kafka
|
7
|
+
module Sasl
|
8
|
+
class Scram
|
9
|
+
MECHANISMS = {
|
10
|
+
"sha256" => "SCRAM-SHA-256",
|
11
|
+
"sha512" => "SCRAM-SHA-512",
|
12
|
+
}.freeze
|
13
|
+
|
14
|
+
def initialize(username:, password:, mechanism: 'sha256', logger:)
|
15
|
+
@semaphore = Mutex.new
|
16
|
+
@username = username
|
17
|
+
@password = password
|
18
|
+
@logger = TaggedLogger.new(logger)
|
19
|
+
|
20
|
+
if mechanism
|
21
|
+
@mechanism = MECHANISMS.fetch(mechanism) do
|
22
|
+
raise Kafka::SaslScramError, "SCRAM mechanism #{mechanism} is not supported."
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def ident
|
28
|
+
@mechanism
|
29
|
+
end
|
30
|
+
|
31
|
+
def configured?
|
32
|
+
@username && @password && @mechanism
|
33
|
+
end
|
34
|
+
|
35
|
+
def authenticate!(host, encoder, decoder)
|
36
|
+
@logger.debug "Authenticating #{@username} with SASL #{@mechanism}"
|
37
|
+
|
38
|
+
begin
|
39
|
+
@semaphore.synchronize do
|
40
|
+
msg = first_message
|
41
|
+
@logger.debug "Sending first client SASL SCRAM message: #{msg}"
|
42
|
+
encoder.write_bytes(msg)
|
43
|
+
|
44
|
+
@server_first_message = decoder.bytes
|
45
|
+
@logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
|
46
|
+
|
47
|
+
msg = final_message
|
48
|
+
@logger.debug "Sending final client SASL SCRAM message: #{msg}"
|
49
|
+
encoder.write_bytes(msg)
|
50
|
+
|
51
|
+
response = parse_response(decoder.bytes)
|
52
|
+
@logger.debug "Received last server SASL SCRAM message: #{response}"
|
53
|
+
|
54
|
+
raise FailedScramAuthentication, response['e'] if response['e']
|
55
|
+
raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
|
56
|
+
end
|
57
|
+
rescue EOFError => e
|
58
|
+
raise FailedScramAuthentication, e.message
|
59
|
+
end
|
60
|
+
|
61
|
+
@logger.debug "SASL SCRAM authentication successful"
|
62
|
+
end
|
63
|
+
|
64
|
+
private
|
65
|
+
|
66
|
+
def first_message
|
67
|
+
"n,,#{first_message_bare}"
|
68
|
+
end
|
69
|
+
|
70
|
+
def first_message_bare
|
71
|
+
"n=#{encoded_username},r=#{nonce}"
|
72
|
+
end
|
73
|
+
|
74
|
+
def final_message_without_proof
|
75
|
+
"c=biws,r=#{rnonce}"
|
76
|
+
end
|
77
|
+
|
78
|
+
def final_message
|
79
|
+
"#{final_message_without_proof},p=#{client_proof}"
|
80
|
+
end
|
81
|
+
|
82
|
+
def server_data
|
83
|
+
parse_response(@server_first_message)
|
84
|
+
end
|
85
|
+
|
86
|
+
def rnonce
|
87
|
+
server_data['r']
|
88
|
+
end
|
89
|
+
|
90
|
+
def salt
|
91
|
+
Base64.strict_decode64(server_data['s'])
|
92
|
+
end
|
93
|
+
|
94
|
+
def iterations
|
95
|
+
server_data['i'].to_i
|
96
|
+
end
|
97
|
+
|
98
|
+
def auth_message
|
99
|
+
[first_message_bare, @server_first_message, final_message_without_proof].join(',')
|
100
|
+
end
|
101
|
+
|
102
|
+
def salted_password
|
103
|
+
hi(@password, salt, iterations)
|
104
|
+
end
|
105
|
+
|
106
|
+
def client_key
|
107
|
+
hmac(salted_password, 'Client Key')
|
108
|
+
end
|
109
|
+
|
110
|
+
def stored_key
|
111
|
+
h(client_key)
|
112
|
+
end
|
113
|
+
|
114
|
+
def server_key
|
115
|
+
hmac(salted_password, 'Server Key')
|
116
|
+
end
|
117
|
+
|
118
|
+
def client_signature
|
119
|
+
hmac(stored_key, auth_message)
|
120
|
+
end
|
121
|
+
|
122
|
+
def server_signature
|
123
|
+
Base64.strict_encode64(hmac(server_key, auth_message))
|
124
|
+
end
|
125
|
+
|
126
|
+
def client_proof
|
127
|
+
Base64.strict_encode64(xor(client_key, client_signature))
|
128
|
+
end
|
129
|
+
|
130
|
+
def h(str)
|
131
|
+
digest.digest(str)
|
132
|
+
end
|
133
|
+
|
134
|
+
def hi(str, salt, iterations)
|
135
|
+
OpenSSL::PKCS5.pbkdf2_hmac(
|
136
|
+
str,
|
137
|
+
salt,
|
138
|
+
iterations,
|
139
|
+
digest.size,
|
140
|
+
digest
|
141
|
+
)
|
142
|
+
end
|
143
|
+
|
144
|
+
def hmac(data, key)
|
145
|
+
OpenSSL::HMAC.digest(digest, data, key)
|
146
|
+
end
|
147
|
+
|
148
|
+
def xor(first, second)
|
149
|
+
first.bytes.zip(second.bytes).map { |(a, b)| (a ^ b).chr }.join('')
|
150
|
+
end
|
151
|
+
|
152
|
+
def parse_response(data)
|
153
|
+
data.split(',').map { |s| s.split('=', 2) }.to_h
|
154
|
+
end
|
155
|
+
|
156
|
+
def encoded_username
|
157
|
+
safe_str(@username.encode(Encoding::UTF_8))
|
158
|
+
end
|
159
|
+
|
160
|
+
def nonce
|
161
|
+
@nonce ||= SecureRandom.urlsafe_base64(32)
|
162
|
+
end
|
163
|
+
|
164
|
+
def digest
|
165
|
+
@digest ||= case @mechanism
|
166
|
+
when 'SCRAM-SHA-256'
|
167
|
+
OpenSSL::Digest::SHA256.new
|
168
|
+
when 'SCRAM-SHA-512'
|
169
|
+
OpenSSL::Digest::SHA512.new
|
170
|
+
else
|
171
|
+
raise ArgumentError, "Unknown SASL mechanism '#{@mechanism}'"
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def safe_str(val)
|
176
|
+
val.gsub('=', '=3D').gsub(',', '=2C')
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'kafka/sasl/plain'
|
4
|
+
require 'kafka/sasl/gssapi'
|
5
|
+
require 'kafka/sasl/scram'
|
6
|
+
require 'kafka/sasl/oauth'
|
7
|
+
require 'kafka/sasl/awsmskiam'
|
8
|
+
|
9
|
+
module Kafka
|
10
|
+
class SaslAuthenticator
|
11
|
+
def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:,
|
12
|
+
sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:,
|
13
|
+
sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:,
|
14
|
+
sasl_oauth_token_provider:,
|
15
|
+
sasl_aws_msk_iam_access_key_id:,
|
16
|
+
sasl_aws_msk_iam_secret_key_id:,
|
17
|
+
sasl_aws_msk_iam_aws_region:
|
18
|
+
)
|
19
|
+
@logger = TaggedLogger.new(logger)
|
20
|
+
|
21
|
+
@plain = Sasl::Plain.new(
|
22
|
+
authzid: sasl_plain_authzid,
|
23
|
+
username: sasl_plain_username,
|
24
|
+
password: sasl_plain_password,
|
25
|
+
logger: @logger,
|
26
|
+
)
|
27
|
+
|
28
|
+
@gssapi = Sasl::Gssapi.new(
|
29
|
+
principal: sasl_gssapi_principal,
|
30
|
+
keytab: sasl_gssapi_keytab,
|
31
|
+
logger: @logger,
|
32
|
+
)
|
33
|
+
|
34
|
+
@scram = Sasl::Scram.new(
|
35
|
+
username: sasl_scram_username,
|
36
|
+
password: sasl_scram_password,
|
37
|
+
mechanism: sasl_scram_mechanism,
|
38
|
+
logger: @logger,
|
39
|
+
)
|
40
|
+
|
41
|
+
@aws_msk_iam = Sasl::AwsMskIam.new(
|
42
|
+
access_key_id: sasl_aws_msk_iam_access_key_id,
|
43
|
+
secret_key_id: sasl_aws_msk_iam_secret_key_id,
|
44
|
+
aws_region: sasl_aws_msk_iam_aws_region,
|
45
|
+
logger: @logger,
|
46
|
+
)
|
47
|
+
|
48
|
+
@oauth = Sasl::OAuth.new(
|
49
|
+
token_provider: sasl_oauth_token_provider,
|
50
|
+
logger: @logger,
|
51
|
+
)
|
52
|
+
|
53
|
+
@mechanism = [@gssapi, @plain, @scram, @oauth, @aws_msk_iam].find(&:configured?)
|
54
|
+
end
|
55
|
+
|
56
|
+
def enabled?
|
57
|
+
!@mechanism.nil?
|
58
|
+
end
|
59
|
+
|
60
|
+
def authenticate!(connection)
|
61
|
+
return unless enabled?
|
62
|
+
|
63
|
+
ident = @mechanism.ident
|
64
|
+
response = connection.send_request(Kafka::Protocol::SaslHandshakeRequest.new(ident))
|
65
|
+
|
66
|
+
unless response.error_code == 0 && response.enabled_mechanisms.include?(ident)
|
67
|
+
raise Kafka::Error, "#{ident} is not supported."
|
68
|
+
end
|
69
|
+
|
70
|
+
@mechanism.authenticate!(connection.to_s, connection.encoder, connection.decoder)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
class SnappyCodec
|
5
|
+
def codec_id
|
6
|
+
2
|
7
|
+
end
|
8
|
+
|
9
|
+
def produce_api_min_version
|
10
|
+
0
|
11
|
+
end
|
12
|
+
|
13
|
+
def load
|
14
|
+
require "snappy"
|
15
|
+
rescue LoadError
|
16
|
+
raise LoadError,
|
17
|
+
"Using snappy compression requires adding a dependency on the `snappy` gem to your Gemfile."
|
18
|
+
end
|
19
|
+
|
20
|
+
def compress(data)
|
21
|
+
Snappy.deflate(data)
|
22
|
+
end
|
23
|
+
|
24
|
+
def decompress(data)
|
25
|
+
buffer = StringIO.new(data)
|
26
|
+
Snappy::Reader.new(buffer).read
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|