ruby-kafka 0.5.0 → 0.5.1.beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +33 -0
- data/CHANGELOG.md +8 -0
- data/README.md +29 -7
- data/docker-compose.yml +39 -0
- data/lib/kafka.rb +14 -0
- data/lib/kafka/broker.rb +48 -19
- data/lib/kafka/broker_pool.rb +3 -1
- data/lib/kafka/client.rb +65 -7
- data/lib/kafka/cluster.rb +68 -4
- data/lib/kafka/connection.rb +8 -18
- data/lib/kafka/connection_builder.rb +2 -1
- data/lib/kafka/consumer.rb +16 -4
- data/lib/kafka/fetch_operation.rb +3 -1
- data/lib/kafka/protocol.rb +8 -0
- data/lib/kafka/protocol/api_versions_request.rb +19 -0
- data/lib/kafka/protocol/api_versions_response.rb +47 -0
- data/lib/kafka/protocol/create_topics_request.rb +40 -0
- data/lib/kafka/protocol/create_topics_response.rb +24 -0
- data/lib/kafka/protocol/decoder.rb +9 -0
- data/lib/kafka/protocol/encoder.rb +8 -0
- data/lib/kafka/protocol/fetch_request.rb +4 -2
- data/lib/kafka/protocol/message.rb +13 -1
- data/lib/kafka/protocol/message_set.rb +7 -1
- data/lib/kafka/protocol/metadata_response.rb +10 -2
- data/lib/kafka/protocol/sasl_handshake_request.rb +1 -1
- data/lib/kafka/protocol/topic_metadata_request.rb +4 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +5 -1
- data/lib/kafka/sasl/gssapi.rb +74 -0
- data/lib/kafka/sasl/plain.rb +37 -0
- data/lib/kafka/sasl/scram.rb +175 -0
- data/lib/kafka/sasl_authenticator.rb +31 -39
- data/lib/kafka/version.rb +1 -1
- metadata +14 -8
- data/circle.yml +0 -23
- data/lib/kafka/sasl_gssapi_authenticator.rb +0 -77
- data/lib/kafka/sasl_plain_authenticator.rb +0 -37
@@ -0,0 +1,24 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Protocol
|
3
|
+
|
4
|
+
class CreateTopicsResponse
|
5
|
+
attr_reader :errors
|
6
|
+
|
7
|
+
def initialize(errors:)
|
8
|
+
@errors = errors
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.decode(decoder)
|
12
|
+
errors = decoder.array do
|
13
|
+
topic = decoder.string
|
14
|
+
error_code = decoder.int16
|
15
|
+
|
16
|
+
[topic, error_code]
|
17
|
+
end
|
18
|
+
|
19
|
+
new(errors: errors)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
end
|
24
|
+
end
|
@@ -20,6 +20,13 @@ module Kafka
|
|
20
20
|
@io.eof?
|
21
21
|
end
|
22
22
|
|
23
|
+
# Decodes an 8-bit boolean from the IO object.
|
24
|
+
#
|
25
|
+
# @return [Boolean]
|
26
|
+
def boolean
|
27
|
+
read(1) == 0x1
|
28
|
+
end
|
29
|
+
|
23
30
|
# Decodes an 8-bit integer from the IO object.
|
24
31
|
#
|
25
32
|
# @return [Integer]
|
@@ -92,6 +99,8 @@ module Kafka
|
|
92
99
|
#
|
93
100
|
# @return [String]
|
94
101
|
def read(number_of_bytes)
|
102
|
+
return "" if number_of_bytes == 0
|
103
|
+
|
95
104
|
data = @io.read(number_of_bytes) or raise EOFError
|
96
105
|
|
97
106
|
# If the `read` call returned less data than expected we should not
|
@@ -25,6 +25,14 @@ module Kafka
|
|
25
25
|
nil
|
26
26
|
end
|
27
27
|
|
28
|
+
# Writes an 8-bit boolean to the IO object.
|
29
|
+
#
|
30
|
+
# @param boolean [Boolean]
|
31
|
+
# @return [nil]
|
32
|
+
def write_boolean(boolean)
|
33
|
+
write(boolean ? 0x1 : 0x0)
|
34
|
+
end
|
35
|
+
|
28
36
|
# Writes an 8-bit integer to the IO object.
|
29
37
|
#
|
30
38
|
# @param int [Integer]
|
@@ -19,10 +19,11 @@ module Kafka
|
|
19
19
|
# @param max_wait_time [Integer]
|
20
20
|
# @param min_bytes [Integer]
|
21
21
|
# @param topics [Hash]
|
22
|
-
def initialize(max_wait_time:, min_bytes:, topics:)
|
22
|
+
def initialize(max_wait_time:, min_bytes:, max_bytes:, topics:)
|
23
23
|
@replica_id = REPLICA_ID
|
24
24
|
@max_wait_time = max_wait_time
|
25
25
|
@min_bytes = min_bytes
|
26
|
+
@max_bytes = max_bytes
|
26
27
|
@topics = topics
|
27
28
|
end
|
28
29
|
|
@@ -31,7 +32,7 @@ module Kafka
|
|
31
32
|
end
|
32
33
|
|
33
34
|
def api_version
|
34
|
-
|
35
|
+
3
|
35
36
|
end
|
36
37
|
|
37
38
|
def response_class
|
@@ -42,6 +43,7 @@ module Kafka
|
|
42
43
|
encoder.write_int32(@replica_id)
|
43
44
|
encoder.write_int32(@max_wait_time)
|
44
45
|
encoder.write_int32(@min_bytes)
|
46
|
+
encoder.write_int32(@max_bytes)
|
45
47
|
|
46
48
|
encoder.write_array(@topics) do |topic, partitions|
|
47
49
|
encoder.write_string(topic)
|
@@ -56,8 +56,20 @@ module Kafka
|
|
56
56
|
# For some weird reason we need to cut out the first 20 bytes.
|
57
57
|
data = codec.decompress(value)
|
58
58
|
message_set_decoder = Decoder.from_string(data)
|
59
|
+
message_set = MessageSet.decode(message_set_decoder)
|
60
|
+
|
61
|
+
# The contained messages need to have their offset corrected.
|
62
|
+
messages = message_set.messages.each_with_index.map do |message, i|
|
63
|
+
Message.new(
|
64
|
+
offset: offset + i,
|
65
|
+
value: message.value,
|
66
|
+
key: message.key,
|
67
|
+
create_time: message.create_time,
|
68
|
+
codec_id: message.codec_id
|
69
|
+
)
|
70
|
+
end
|
59
71
|
|
60
|
-
MessageSet.
|
72
|
+
MessageSet.new(messages: messages)
|
61
73
|
end
|
62
74
|
|
63
75
|
def self.decode(decoder)
|
@@ -37,7 +37,13 @@ module Kafka
|
|
37
37
|
fetched_messages << message
|
38
38
|
end
|
39
39
|
rescue EOFError
|
40
|
-
|
40
|
+
if fetched_messages.empty?
|
41
|
+
# If the first message in the set is truncated, it's likely because the
|
42
|
+
# message is larger than the maximum size that we have asked for.
|
43
|
+
raise MessageTooLargeToRead
|
44
|
+
else
|
45
|
+
# We tried to decode a partial message at the end of the set; just skip it.
|
46
|
+
end
|
41
47
|
end
|
42
48
|
end
|
43
49
|
|
@@ -81,8 +81,12 @@ module Kafka
|
|
81
81
|
# @return [Array<TopicMetadata>] the list of topics in the cluster.
|
82
82
|
attr_reader :topics
|
83
83
|
|
84
|
-
|
84
|
+
# @return [Integer] The broker id of the controller broker.
|
85
|
+
attr_reader :controller_id
|
86
|
+
|
87
|
+
def initialize(brokers:, controller_id:, topics:)
|
85
88
|
@brokers = brokers
|
89
|
+
@controller_id = controller_id
|
86
90
|
@topics = topics
|
87
91
|
end
|
88
92
|
|
@@ -149,6 +153,7 @@ module Kafka
|
|
149
153
|
node_id = decoder.int32
|
150
154
|
host = decoder.string
|
151
155
|
port = decoder.int32
|
156
|
+
rack = decoder.string
|
152
157
|
|
153
158
|
BrokerInfo.new(
|
154
159
|
node_id: node_id,
|
@@ -157,9 +162,12 @@ module Kafka
|
|
157
162
|
)
|
158
163
|
end
|
159
164
|
|
165
|
+
controller_id = decoder.int32
|
166
|
+
|
160
167
|
topics = decoder.array do
|
161
168
|
topic_error_code = decoder.int16
|
162
169
|
topic_name = decoder.string
|
170
|
+
is_internal = decoder.boolean
|
163
171
|
|
164
172
|
partitions = decoder.array do
|
165
173
|
PartitionMetadata.new(
|
@@ -178,7 +186,7 @@ module Kafka
|
|
178
186
|
)
|
179
187
|
end
|
180
188
|
|
181
|
-
new(brokers: brokers, topics: topics)
|
189
|
+
new(brokers: brokers, controller_id: controller_id, topics: topics)
|
182
190
|
end
|
183
191
|
end
|
184
192
|
end
|
@@ -23,7 +23,11 @@ module Kafka
|
|
23
23
|
end
|
24
24
|
|
25
25
|
topics.each do |topic|
|
26
|
-
|
26
|
+
begin
|
27
|
+
partitions = @cluster.partitions_for(topic).map(&:partition_id)
|
28
|
+
rescue UnknownTopicOrPartition
|
29
|
+
raise UnknownTopicOrPartition, "unknown topic #{topic}"
|
30
|
+
end
|
27
31
|
|
28
32
|
partitions_per_member = partitions.group_by {|partition_id|
|
29
33
|
partition_id % members.count
|
@@ -0,0 +1,74 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Sasl
|
3
|
+
class Gssapi
|
4
|
+
GSSAPI_IDENT = "GSSAPI"
|
5
|
+
GSSAPI_CONFIDENTIALITY = false
|
6
|
+
|
7
|
+
def initialize(logger:, principal:, keytab:)
|
8
|
+
@logger = logger
|
9
|
+
@principal = principal
|
10
|
+
@keytab = keytab
|
11
|
+
end
|
12
|
+
|
13
|
+
def configured?
|
14
|
+
@principal && !@principal.empty?
|
15
|
+
end
|
16
|
+
|
17
|
+
def ident
|
18
|
+
GSSAPI_IDENT
|
19
|
+
end
|
20
|
+
|
21
|
+
def authenticate!(host, encoder, decoder)
|
22
|
+
load_gssapi
|
23
|
+
initialize_gssapi_context(host)
|
24
|
+
|
25
|
+
@encoder = encoder
|
26
|
+
@decoder = decoder
|
27
|
+
|
28
|
+
# send gssapi token and receive token to verify
|
29
|
+
token_to_verify = send_and_receive_sasl_token
|
30
|
+
|
31
|
+
# verify incoming token
|
32
|
+
unless @gssapi_ctx.init_context(token_to_verify)
|
33
|
+
raise Kafka::Error, "GSSAPI context verification failed."
|
34
|
+
end
|
35
|
+
|
36
|
+
# we can continue, so send OK
|
37
|
+
@encoder.write([0, 2].pack('l>c'))
|
38
|
+
|
39
|
+
# read wrapped message and return it back with principal
|
40
|
+
handshake_messages
|
41
|
+
end
|
42
|
+
|
43
|
+
def handshake_messages
|
44
|
+
msg = @decoder.bytes
|
45
|
+
raise Kafka::Error, "GSSAPI negotiation failed." unless msg
|
46
|
+
# unwrap with integrity only
|
47
|
+
msg_unwrapped = @gssapi_ctx.unwrap_message(msg, GSSAPI_CONFIDENTIALITY)
|
48
|
+
msg_wrapped = @gssapi_ctx.wrap_message(msg_unwrapped + @principal, GSSAPI_CONFIDENTIALITY)
|
49
|
+
@encoder.write_bytes(msg_wrapped)
|
50
|
+
end
|
51
|
+
|
52
|
+
def send_and_receive_sasl_token
|
53
|
+
@encoder.write_bytes(@gssapi_token)
|
54
|
+
@decoder.bytes
|
55
|
+
end
|
56
|
+
|
57
|
+
def load_gssapi
|
58
|
+
begin
|
59
|
+
require "gssapi"
|
60
|
+
rescue LoadError
|
61
|
+
@logger.error "In order to use GSSAPI authentication you need to install the `gssapi` gem."
|
62
|
+
raise
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def initialize_gssapi_context(host)
|
67
|
+
@logger.debug "GSSAPI: Initializing context with #{host}, principal #{@principal}"
|
68
|
+
|
69
|
+
@gssapi_ctx = GSSAPI::Simple.new(host, @principal, @keytab)
|
70
|
+
@gssapi_token = @gssapi_ctx.init_context(nil)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module Kafka
|
2
|
+
module Sasl
|
3
|
+
class Plain
|
4
|
+
PLAIN_IDENT = "PLAIN"
|
5
|
+
|
6
|
+
def initialize(logger:, authzid:, username:, password:)
|
7
|
+
@logger = logger
|
8
|
+
@authzid = authzid
|
9
|
+
@username = username
|
10
|
+
@password = password
|
11
|
+
end
|
12
|
+
|
13
|
+
def ident
|
14
|
+
PLAIN_IDENT
|
15
|
+
end
|
16
|
+
|
17
|
+
def configured?
|
18
|
+
@authzid && @username && @password
|
19
|
+
end
|
20
|
+
|
21
|
+
def authenticate!(host, encoder, decoder)
|
22
|
+
msg = [@authzid, @username, @password].join("\000").force_encoding("utf-8")
|
23
|
+
|
24
|
+
encoder.write_bytes(msg)
|
25
|
+
|
26
|
+
begin
|
27
|
+
msg = decoder.bytes
|
28
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: unknown error" unless msg
|
29
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
30
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: #{e.message}"
|
31
|
+
end
|
32
|
+
|
33
|
+
@logger.debug "SASL PLAIN authentication successful."
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,175 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
require 'base64'
|
3
|
+
|
4
|
+
module Kafka
|
5
|
+
module Sasl
|
6
|
+
class Scram
|
7
|
+
MECHANISMS = {
|
8
|
+
"sha256" => "SCRAM-SHA-256",
|
9
|
+
"sha512" => "SCRAM-SHA-512",
|
10
|
+
}.freeze
|
11
|
+
|
12
|
+
def initialize(username:, password:, mechanism: 'sha256', logger:)
|
13
|
+
@username = username
|
14
|
+
@password = password
|
15
|
+
@logger = logger
|
16
|
+
|
17
|
+
if mechanism
|
18
|
+
@mechanism = MECHANISMS.fetch(mechanism) do
|
19
|
+
raise Kafka::SaslScramError, "SCRAM mechanism #{mechanism} is not supported."
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def ident
|
25
|
+
@mechanism
|
26
|
+
end
|
27
|
+
|
28
|
+
def configured?
|
29
|
+
@username && @password && @mechanism
|
30
|
+
end
|
31
|
+
|
32
|
+
def authenticate!(host, encoder, decoder)
|
33
|
+
@logger.debug "Authenticating #{@username} with SASL #{@mechanism}"
|
34
|
+
|
35
|
+
begin
|
36
|
+
msg = first_message
|
37
|
+
@logger.debug "Sending first client SASL SCRAM message: #{msg}"
|
38
|
+
encoder.write_bytes(msg)
|
39
|
+
|
40
|
+
@server_first_message = decoder.bytes
|
41
|
+
@logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
|
42
|
+
|
43
|
+
msg = final_message
|
44
|
+
@logger.debug "Sending final client SASL SCRAM message: #{msg}"
|
45
|
+
encoder.write_bytes(msg)
|
46
|
+
|
47
|
+
response = parse_response(decoder.bytes)
|
48
|
+
@logger.debug "Received last server SASL SCRAM message: #{response}"
|
49
|
+
|
50
|
+
raise FailedScramAuthentication, response['e'] if response['e']
|
51
|
+
raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
|
52
|
+
rescue EOFError => e
|
53
|
+
raise FailedScramAuthentication, e.message
|
54
|
+
end
|
55
|
+
|
56
|
+
@logger.debug "SASL SCRAM authentication successful"
|
57
|
+
end
|
58
|
+
|
59
|
+
private
|
60
|
+
|
61
|
+
def first_message
|
62
|
+
"n,,#{first_message_bare}"
|
63
|
+
end
|
64
|
+
|
65
|
+
def first_message_bare
|
66
|
+
"n=#{encoded_username},r=#{nonce}"
|
67
|
+
end
|
68
|
+
|
69
|
+
def final_message_without_proof
|
70
|
+
"c=biws,r=#{rnonce}"
|
71
|
+
end
|
72
|
+
|
73
|
+
def final_message
|
74
|
+
"#{final_message_without_proof},p=#{client_proof}"
|
75
|
+
end
|
76
|
+
|
77
|
+
def server_data
|
78
|
+
parse_response(@server_first_message)
|
79
|
+
end
|
80
|
+
|
81
|
+
def rnonce
|
82
|
+
server_data['r']
|
83
|
+
end
|
84
|
+
|
85
|
+
def salt
|
86
|
+
Base64.strict_decode64(server_data['s'])
|
87
|
+
end
|
88
|
+
|
89
|
+
def iterations
|
90
|
+
server_data['i'].to_i
|
91
|
+
end
|
92
|
+
|
93
|
+
def auth_message
|
94
|
+
[first_message_bare, @server_first_message, final_message_without_proof].join(',')
|
95
|
+
end
|
96
|
+
|
97
|
+
def salted_password
|
98
|
+
hi(@password, salt, iterations)
|
99
|
+
end
|
100
|
+
|
101
|
+
def client_key
|
102
|
+
hmac(salted_password, 'Client Key')
|
103
|
+
end
|
104
|
+
|
105
|
+
def stored_key
|
106
|
+
h(client_key)
|
107
|
+
end
|
108
|
+
|
109
|
+
def server_key
|
110
|
+
hmac(salted_password, 'Server Key')
|
111
|
+
end
|
112
|
+
|
113
|
+
def client_signature
|
114
|
+
hmac(stored_key, auth_message)
|
115
|
+
end
|
116
|
+
|
117
|
+
def server_signature
|
118
|
+
Base64.strict_encode64(hmac(server_key, auth_message))
|
119
|
+
end
|
120
|
+
|
121
|
+
def client_proof
|
122
|
+
Base64.strict_encode64(xor(client_key, client_signature))
|
123
|
+
end
|
124
|
+
|
125
|
+
def h(str)
|
126
|
+
digest.digest(str)
|
127
|
+
end
|
128
|
+
|
129
|
+
def hi(str, salt, iterations)
|
130
|
+
OpenSSL::PKCS5.pbkdf2_hmac(
|
131
|
+
str,
|
132
|
+
salt,
|
133
|
+
iterations,
|
134
|
+
digest.size,
|
135
|
+
digest
|
136
|
+
)
|
137
|
+
end
|
138
|
+
|
139
|
+
def hmac(data, key)
|
140
|
+
OpenSSL::HMAC.digest(digest, data, key)
|
141
|
+
end
|
142
|
+
|
143
|
+
def xor(first, second)
|
144
|
+
first.bytes.zip(second.bytes).map { |(a, b)| (a ^ b).chr }.join('')
|
145
|
+
end
|
146
|
+
|
147
|
+
def parse_response(data)
|
148
|
+
data.split(',').map { |s| s.split('=', 2) }.to_h
|
149
|
+
end
|
150
|
+
|
151
|
+
def encoded_username
|
152
|
+
safe_str(@username.encode(Encoding::UTF_8))
|
153
|
+
end
|
154
|
+
|
155
|
+
def nonce
|
156
|
+
@nonce ||= SecureRandom.urlsafe_base64(32)
|
157
|
+
end
|
158
|
+
|
159
|
+
def digest
|
160
|
+
@digest ||= case @mechanism
|
161
|
+
when 'SCRAM-SHA-256'
|
162
|
+
OpenSSL::Digest::SHA256.new
|
163
|
+
when 'SCRAM-SHA-512'
|
164
|
+
OpenSSL::Digest::SHA512.new
|
165
|
+
else
|
166
|
+
raise ArgumentError, "Unknown SASL mechanism '#{@mechanism}'"
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
def safe_str(val)
|
171
|
+
val.gsub('=', '=3D').gsub(',', '=2C')
|
172
|
+
end
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|