ruby-kafka-temp-fork 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +393 -0
- data/.github/workflows/stale.yml +19 -0
- data/.gitignore +13 -0
- data/.readygo +1 -0
- data/.rspec +3 -0
- data/.rubocop.yml +44 -0
- data/.ruby-version +1 -0
- data/.yardopts +3 -0
- data/CHANGELOG.md +310 -0
- data/Gemfile +5 -0
- data/ISSUE_TEMPLATE.md +23 -0
- data/LICENSE.txt +176 -0
- data/Procfile +2 -0
- data/README.md +1342 -0
- data/Rakefile +8 -0
- data/benchmarks/message_encoding.rb +23 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/docker-compose.yml +39 -0
- data/examples/consumer-group.rb +35 -0
- data/examples/firehose-consumer.rb +64 -0
- data/examples/firehose-producer.rb +54 -0
- data/examples/simple-consumer.rb +34 -0
- data/examples/simple-producer.rb +42 -0
- data/examples/ssl-producer.rb +44 -0
- data/lib/kafka.rb +373 -0
- data/lib/kafka/async_producer.rb +291 -0
- data/lib/kafka/broker.rb +217 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +833 -0
- data/lib/kafka/cluster.rb +513 -0
- data/lib/kafka/compression.rb +45 -0
- data/lib/kafka/compressor.rb +86 -0
- data/lib/kafka/connection.rb +223 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +642 -0
- data/lib/kafka/consumer_group.rb +231 -0
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +420 -0
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +58 -0
- data/lib/kafka/fetched_batch_generator.rb +120 -0
- data/lib/kafka/fetched_message.rb +48 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +224 -0
- data/lib/kafka/gzip_codec.rb +34 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/lz4_codec.rb +27 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +259 -0
- data/lib/kafka/partitioner.rb +40 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +528 -0
- data/lib/kafka/prometheus.rb +316 -0
- data/lib/kafka/protocol.rb +225 -0
- data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +47 -0
- data/lib/kafka/protocol/join_group_response.rb +41 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +94 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +223 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +26 -0
- data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
- data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +180 -0
- data/lib/kafka/sasl_authenticator.rb +61 -0
- data/lib/kafka/snappy_codec.rb +29 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +188 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +77 -0
- data/lib/kafka/transaction_manager.rb +306 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- data/lib/kafka/zstd_codec.rb +27 -0
- data/lib/ruby-kafka-temp-fork.rb +5 -0
- data/ruby-kafka-temp-fork.gemspec +54 -0
- metadata +520 -0
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Sasl
|
5
|
+
class Plain
|
6
|
+
PLAIN_IDENT = "PLAIN"
|
7
|
+
|
8
|
+
def initialize(logger:, authzid:, username:, password:)
|
9
|
+
@logger = TaggedLogger.new(logger)
|
10
|
+
@authzid = authzid
|
11
|
+
@username = username
|
12
|
+
@password = password
|
13
|
+
end
|
14
|
+
|
15
|
+
def ident
|
16
|
+
PLAIN_IDENT
|
17
|
+
end
|
18
|
+
|
19
|
+
def configured?
|
20
|
+
@authzid && @username && @password
|
21
|
+
end
|
22
|
+
|
23
|
+
def authenticate!(host, encoder, decoder)
|
24
|
+
msg = [@authzid, @username, @password].join("\000").force_encoding("utf-8")
|
25
|
+
|
26
|
+
encoder.write_bytes(msg)
|
27
|
+
|
28
|
+
begin
|
29
|
+
msg = decoder.bytes
|
30
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: unknown error" unless msg
|
31
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
32
|
+
raise Kafka::Error, "SASL PLAIN authentication failed: #{e.message}"
|
33
|
+
end
|
34
|
+
|
35
|
+
@logger.debug "SASL PLAIN authentication successful."
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,180 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'base64'
|
5
|
+
|
6
|
+
module Kafka
|
7
|
+
module Sasl
|
8
|
+
class Scram
|
9
|
+
MECHANISMS = {
|
10
|
+
"sha256" => "SCRAM-SHA-256",
|
11
|
+
"sha512" => "SCRAM-SHA-512",
|
12
|
+
}.freeze
|
13
|
+
|
14
|
+
def initialize(username:, password:, mechanism: 'sha256', logger:)
|
15
|
+
@semaphore = Mutex.new
|
16
|
+
@username = username
|
17
|
+
@password = password
|
18
|
+
@logger = TaggedLogger.new(logger)
|
19
|
+
|
20
|
+
if mechanism
|
21
|
+
@mechanism = MECHANISMS.fetch(mechanism) do
|
22
|
+
raise Kafka::SaslScramError, "SCRAM mechanism #{mechanism} is not supported."
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def ident
|
28
|
+
@mechanism
|
29
|
+
end
|
30
|
+
|
31
|
+
def configured?
|
32
|
+
@username && @password && @mechanism
|
33
|
+
end
|
34
|
+
|
35
|
+
def authenticate!(host, encoder, decoder)
|
36
|
+
@logger.debug "Authenticating #{@username} with SASL #{@mechanism}"
|
37
|
+
|
38
|
+
begin
|
39
|
+
@semaphore.synchronize do
|
40
|
+
msg = first_message
|
41
|
+
@logger.debug "Sending first client SASL SCRAM message: #{msg}"
|
42
|
+
encoder.write_bytes(msg)
|
43
|
+
|
44
|
+
@server_first_message = decoder.bytes
|
45
|
+
@logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
|
46
|
+
|
47
|
+
msg = final_message
|
48
|
+
@logger.debug "Sending final client SASL SCRAM message: #{msg}"
|
49
|
+
encoder.write_bytes(msg)
|
50
|
+
|
51
|
+
response = parse_response(decoder.bytes)
|
52
|
+
@logger.debug "Received last server SASL SCRAM message: #{response}"
|
53
|
+
|
54
|
+
raise FailedScramAuthentication, response['e'] if response['e']
|
55
|
+
raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
|
56
|
+
end
|
57
|
+
rescue EOFError => e
|
58
|
+
raise FailedScramAuthentication, e.message
|
59
|
+
end
|
60
|
+
|
61
|
+
@logger.debug "SASL SCRAM authentication successful"
|
62
|
+
end
|
63
|
+
|
64
|
+
private
|
65
|
+
|
66
|
+
def first_message
|
67
|
+
"n,,#{first_message_bare}"
|
68
|
+
end
|
69
|
+
|
70
|
+
def first_message_bare
|
71
|
+
"n=#{encoded_username},r=#{nonce}"
|
72
|
+
end
|
73
|
+
|
74
|
+
def final_message_without_proof
|
75
|
+
"c=biws,r=#{rnonce}"
|
76
|
+
end
|
77
|
+
|
78
|
+
def final_message
|
79
|
+
"#{final_message_without_proof},p=#{client_proof}"
|
80
|
+
end
|
81
|
+
|
82
|
+
def server_data
|
83
|
+
parse_response(@server_first_message)
|
84
|
+
end
|
85
|
+
|
86
|
+
def rnonce
|
87
|
+
server_data['r']
|
88
|
+
end
|
89
|
+
|
90
|
+
def salt
|
91
|
+
Base64.strict_decode64(server_data['s'])
|
92
|
+
end
|
93
|
+
|
94
|
+
def iterations
|
95
|
+
server_data['i'].to_i
|
96
|
+
end
|
97
|
+
|
98
|
+
def auth_message
|
99
|
+
[first_message_bare, @server_first_message, final_message_without_proof].join(',')
|
100
|
+
end
|
101
|
+
|
102
|
+
def salted_password
|
103
|
+
hi(@password, salt, iterations)
|
104
|
+
end
|
105
|
+
|
106
|
+
def client_key
|
107
|
+
hmac(salted_password, 'Client Key')
|
108
|
+
end
|
109
|
+
|
110
|
+
def stored_key
|
111
|
+
h(client_key)
|
112
|
+
end
|
113
|
+
|
114
|
+
def server_key
|
115
|
+
hmac(salted_password, 'Server Key')
|
116
|
+
end
|
117
|
+
|
118
|
+
def client_signature
|
119
|
+
hmac(stored_key, auth_message)
|
120
|
+
end
|
121
|
+
|
122
|
+
def server_signature
|
123
|
+
Base64.strict_encode64(hmac(server_key, auth_message))
|
124
|
+
end
|
125
|
+
|
126
|
+
def client_proof
|
127
|
+
Base64.strict_encode64(xor(client_key, client_signature))
|
128
|
+
end
|
129
|
+
|
130
|
+
def h(str)
|
131
|
+
digest.digest(str)
|
132
|
+
end
|
133
|
+
|
134
|
+
def hi(str, salt, iterations)
|
135
|
+
OpenSSL::PKCS5.pbkdf2_hmac(
|
136
|
+
str,
|
137
|
+
salt,
|
138
|
+
iterations,
|
139
|
+
digest.size,
|
140
|
+
digest
|
141
|
+
)
|
142
|
+
end
|
143
|
+
|
144
|
+
def hmac(data, key)
|
145
|
+
OpenSSL::HMAC.digest(digest, data, key)
|
146
|
+
end
|
147
|
+
|
148
|
+
def xor(first, second)
|
149
|
+
first.bytes.zip(second.bytes).map { |(a, b)| (a ^ b).chr }.join('')
|
150
|
+
end
|
151
|
+
|
152
|
+
def parse_response(data)
|
153
|
+
data.split(',').map { |s| s.split('=', 2) }.to_h
|
154
|
+
end
|
155
|
+
|
156
|
+
def encoded_username
|
157
|
+
safe_str(@username.encode(Encoding::UTF_8))
|
158
|
+
end
|
159
|
+
|
160
|
+
def nonce
|
161
|
+
@nonce ||= SecureRandom.urlsafe_base64(32)
|
162
|
+
end
|
163
|
+
|
164
|
+
def digest
|
165
|
+
@digest ||= case @mechanism
|
166
|
+
when 'SCRAM-SHA-256'
|
167
|
+
OpenSSL::Digest::SHA256.new
|
168
|
+
when 'SCRAM-SHA-512'
|
169
|
+
OpenSSL::Digest::SHA512.new
|
170
|
+
else
|
171
|
+
raise ArgumentError, "Unknown SASL mechanism '#{@mechanism}'"
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def safe_str(val)
|
176
|
+
val.gsub('=', '=3D').gsub(',', '=2C')
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'kafka/sasl/plain'
|
4
|
+
require 'kafka/sasl/gssapi'
|
5
|
+
require 'kafka/sasl/scram'
|
6
|
+
require 'kafka/sasl/oauth'
|
7
|
+
|
8
|
+
module Kafka
|
9
|
+
class SaslAuthenticator
|
10
|
+
def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:,
|
11
|
+
sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:,
|
12
|
+
sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:,
|
13
|
+
sasl_oauth_token_provider:)
|
14
|
+
@logger = TaggedLogger.new(logger)
|
15
|
+
|
16
|
+
@plain = Sasl::Plain.new(
|
17
|
+
authzid: sasl_plain_authzid,
|
18
|
+
username: sasl_plain_username,
|
19
|
+
password: sasl_plain_password,
|
20
|
+
logger: @logger,
|
21
|
+
)
|
22
|
+
|
23
|
+
@gssapi = Sasl::Gssapi.new(
|
24
|
+
principal: sasl_gssapi_principal,
|
25
|
+
keytab: sasl_gssapi_keytab,
|
26
|
+
logger: @logger,
|
27
|
+
)
|
28
|
+
|
29
|
+
@scram = Sasl::Scram.new(
|
30
|
+
username: sasl_scram_username,
|
31
|
+
password: sasl_scram_password,
|
32
|
+
mechanism: sasl_scram_mechanism,
|
33
|
+
logger: @logger,
|
34
|
+
)
|
35
|
+
|
36
|
+
@oauth = Sasl::OAuth.new(
|
37
|
+
token_provider: sasl_oauth_token_provider,
|
38
|
+
logger: @logger,
|
39
|
+
)
|
40
|
+
|
41
|
+
@mechanism = [@gssapi, @plain, @scram, @oauth].find(&:configured?)
|
42
|
+
end
|
43
|
+
|
44
|
+
def enabled?
|
45
|
+
!@mechanism.nil?
|
46
|
+
end
|
47
|
+
|
48
|
+
def authenticate!(connection)
|
49
|
+
return unless enabled?
|
50
|
+
|
51
|
+
ident = @mechanism.ident
|
52
|
+
response = connection.send_request(Kafka::Protocol::SaslHandshakeRequest.new(ident))
|
53
|
+
|
54
|
+
unless response.error_code == 0 && response.enabled_mechanisms.include?(ident)
|
55
|
+
raise Kafka::Error, "#{ident} is not supported."
|
56
|
+
end
|
57
|
+
|
58
|
+
@mechanism.authenticate!(connection.to_s, connection.encoder, connection.decoder)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
class SnappyCodec
|
5
|
+
def codec_id
|
6
|
+
2
|
7
|
+
end
|
8
|
+
|
9
|
+
def produce_api_min_version
|
10
|
+
0
|
11
|
+
end
|
12
|
+
|
13
|
+
def load
|
14
|
+
require "snappy"
|
15
|
+
rescue LoadError
|
16
|
+
raise LoadError,
|
17
|
+
"Using snappy compression requires adding a dependency on the `snappy` gem to your Gemfile."
|
18
|
+
end
|
19
|
+
|
20
|
+
def compress(data)
|
21
|
+
Snappy.deflate(data)
|
22
|
+
end
|
23
|
+
|
24
|
+
def decompress(data)
|
25
|
+
buffer = StringIO.new(data)
|
26
|
+
Snappy::Reader.new(buffer).read
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,96 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "socket"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
|
7
|
+
# Opens sockets in a non-blocking fashion, ensuring that we're not stalling
|
8
|
+
# for long periods of time.
|
9
|
+
#
|
10
|
+
# It's possible to set timeouts for connecting to the server, for reading data,
|
11
|
+
# and for writing data. Whenever a timeout is exceeded, Errno::ETIMEDOUT is
|
12
|
+
# raised.
|
13
|
+
#
|
14
|
+
class SocketWithTimeout
|
15
|
+
|
16
|
+
# Opens a socket.
|
17
|
+
#
|
18
|
+
# @param host [String]
|
19
|
+
# @param port [Integer]
|
20
|
+
# @param connect_timeout [Integer] the connection timeout, in seconds.
|
21
|
+
# @param timeout [Integer] the read and write timeout, in seconds.
|
22
|
+
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
23
|
+
def initialize(host, port, connect_timeout: nil, timeout: nil)
|
24
|
+
addr = Socket.getaddrinfo(host, nil)
|
25
|
+
sockaddr = Socket.pack_sockaddr_in(port, addr[0][3])
|
26
|
+
|
27
|
+
@timeout = timeout
|
28
|
+
|
29
|
+
@socket = Socket.new(Socket.const_get(addr[0][0]), Socket::SOCK_STREAM, 0)
|
30
|
+
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1)
|
31
|
+
|
32
|
+
begin
|
33
|
+
# Initiate the socket connection in the background. If it doesn't fail
|
34
|
+
# immediately it will raise an IO::WaitWritable (Errno::EINPROGRESS)
|
35
|
+
# indicating the connection is in progress.
|
36
|
+
@socket.connect_nonblock(sockaddr)
|
37
|
+
rescue IO::WaitWritable
|
38
|
+
# IO.select will block until the socket is writable or the timeout
|
39
|
+
# is exceeded, whichever comes first.
|
40
|
+
unless IO.select(nil, [@socket], nil, connect_timeout)
|
41
|
+
# IO.select returns nil when the socket is not ready before timeout
|
42
|
+
# seconds have elapsed
|
43
|
+
@socket.close
|
44
|
+
raise Errno::ETIMEDOUT
|
45
|
+
end
|
46
|
+
|
47
|
+
begin
|
48
|
+
# Verify there is now a good connection.
|
49
|
+
@socket.connect_nonblock(sockaddr)
|
50
|
+
rescue Errno::EISCONN
|
51
|
+
# The socket is connected, we're good!
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
# Reads bytes from the socket, possible with a timeout.
|
57
|
+
#
|
58
|
+
# @param num_bytes [Integer] the number of bytes to read.
|
59
|
+
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
60
|
+
# @return [String] the data that was read from the socket.
|
61
|
+
def read(num_bytes)
|
62
|
+
unless IO.select([@socket], nil, nil, @timeout)
|
63
|
+
raise Errno::ETIMEDOUT
|
64
|
+
end
|
65
|
+
|
66
|
+
@socket.read(num_bytes)
|
67
|
+
rescue IO::EAGAINWaitReadable
|
68
|
+
retry
|
69
|
+
end
|
70
|
+
|
71
|
+
# Writes bytes to the socket, possible with a timeout.
|
72
|
+
#
|
73
|
+
# @param bytes [String] the data that should be written to the socket.
|
74
|
+
# @raise [Errno::ETIMEDOUT] if the timeout is exceeded.
|
75
|
+
# @return [Integer] the number of bytes written.
|
76
|
+
def write(bytes)
|
77
|
+
unless IO.select(nil, [@socket], nil, @timeout)
|
78
|
+
raise Errno::ETIMEDOUT
|
79
|
+
end
|
80
|
+
|
81
|
+
@socket.write(bytes)
|
82
|
+
end
|
83
|
+
|
84
|
+
def close
|
85
|
+
@socket.close
|
86
|
+
end
|
87
|
+
|
88
|
+
def closed?
|
89
|
+
@socket.closed?
|
90
|
+
end
|
91
|
+
|
92
|
+
def set_encoding(encoding)
|
93
|
+
@socket.set_encoding(encoding)
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "openssl"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module SslContext
|
7
|
+
CLIENT_CERT_DELIMITER = "\n-----END CERTIFICATE-----\n"
|
8
|
+
|
9
|
+
def self.build(ca_cert_file_path: nil, ca_cert: nil, client_cert: nil, client_cert_key: nil, client_cert_key_password: nil, client_cert_chain: nil, ca_certs_from_system: nil, verify_hostname: true)
|
10
|
+
return nil unless ca_cert_file_path || ca_cert || client_cert || client_cert_key || client_cert_key_password || client_cert_chain || ca_certs_from_system
|
11
|
+
|
12
|
+
ssl_context = OpenSSL::SSL::SSLContext.new
|
13
|
+
|
14
|
+
if client_cert && client_cert_key
|
15
|
+
if client_cert_key_password
|
16
|
+
cert_key = OpenSSL::PKey.read(client_cert_key, client_cert_key_password)
|
17
|
+
else
|
18
|
+
cert_key = OpenSSL::PKey.read(client_cert_key)
|
19
|
+
end
|
20
|
+
context_params = {
|
21
|
+
cert: OpenSSL::X509::Certificate.new(client_cert),
|
22
|
+
key: cert_key
|
23
|
+
}
|
24
|
+
if client_cert_chain
|
25
|
+
certs = []
|
26
|
+
client_cert_chain.split(CLIENT_CERT_DELIMITER).each do |cert|
|
27
|
+
cert += CLIENT_CERT_DELIMITER
|
28
|
+
certs << OpenSSL::X509::Certificate.new(cert)
|
29
|
+
end
|
30
|
+
context_params[:extra_chain_cert] = certs
|
31
|
+
end
|
32
|
+
ssl_context.set_params(context_params)
|
33
|
+
elsif client_cert && !client_cert_key
|
34
|
+
raise ArgumentError, "Kafka client initialized with `ssl_client_cert` but no `ssl_client_cert_key`. Please provide both."
|
35
|
+
elsif !client_cert && client_cert_key
|
36
|
+
raise ArgumentError, "Kafka client initialized with `ssl_client_cert_key`, but no `ssl_client_cert`. Please provide both."
|
37
|
+
elsif client_cert_chain && !client_cert
|
38
|
+
raise ArgumentError, "Kafka client initialized with `ssl_client_cert_chain`, but no `ssl_client_cert`. Please provide cert, key and chain."
|
39
|
+
elsif client_cert_chain && !client_cert_key
|
40
|
+
raise ArgumentError, "Kafka client initialized with `ssl_client_cert_chain`, but no `ssl_client_cert_key`. Please provide cert, key and chain."
|
41
|
+
elsif client_cert_key_password && !client_cert_key
|
42
|
+
raise ArgumentError, "Kafka client initialized with `ssl_client_cert_key_password`, but no `ssl_client_cert_key`. Please provide both."
|
43
|
+
end
|
44
|
+
|
45
|
+
if ca_cert || ca_cert_file_path || ca_certs_from_system
|
46
|
+
store = OpenSSL::X509::Store.new
|
47
|
+
Array(ca_cert).each do |cert|
|
48
|
+
store.add_cert(OpenSSL::X509::Certificate.new(cert))
|
49
|
+
end
|
50
|
+
Array(ca_cert_file_path).each do |cert_file_path|
|
51
|
+
store.add_file(cert_file_path)
|
52
|
+
end
|
53
|
+
if ca_certs_from_system
|
54
|
+
store.set_default_paths
|
55
|
+
end
|
56
|
+
ssl_context.cert_store = store
|
57
|
+
end
|
58
|
+
|
59
|
+
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
60
|
+
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
61
|
+
ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
|
62
|
+
|
63
|
+
ssl_context
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|