ruby-kafka 0.7.10 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +179 -0
- data/.github/workflows/stale.yml +19 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +40 -0
- data/README.md +167 -0
- data/lib/kafka/async_producer.rb +60 -42
- data/lib/kafka/client.rb +92 -6
- data/lib/kafka/cluster.rb +82 -24
- data/lib/kafka/connection.rb +3 -0
- data/lib/kafka/consumer.rb +61 -11
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/consumer_group.rb +29 -6
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +20 -13
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetcher.rb +5 -2
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +12 -1
- data/lib/kafka/partitioner.rb +8 -3
- data/lib/kafka/producer.rb +13 -5
- data/lib/kafka/prometheus.rb +78 -79
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +2 -0
- data/lib/kafka/protocol/encoder.rb +1 -1
- data/lib/kafka/protocol/join_group_request.rb +8 -2
- data/lib/kafka/protocol/join_group_response.rb +9 -1
- data/lib/kafka/protocol/metadata_response.rb +1 -1
- data/lib/kafka/protocol/offset_fetch_request.rb +3 -1
- data/lib/kafka/protocol/record_batch.rb +2 -2
- data/lib/kafka/protocol/sasl_handshake_request.rb +1 -1
- data/lib/kafka/protocol/sync_group_response.rb +5 -2
- data/lib/kafka/protocol/txn_offset_commit_response.rb +34 -5
- data/lib/kafka/round_robin_assignment_strategy.rb +37 -39
- data/lib/kafka/sasl/awsmskiam.rb +133 -0
- data/lib/kafka/sasl_authenticator.rb +15 -2
- data/lib/kafka/ssl_context.rb +6 -5
- data/lib/kafka/tagged_logger.rb +1 -0
- data/lib/kafka/transaction_manager.rb +30 -10
- data/lib/kafka/version.rb +1 -1
- data/ruby-kafka.gemspec +5 -4
- metadata +39 -13
@@ -0,0 +1,133 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'securerandom'
|
4
|
+
require 'base64'
|
5
|
+
require 'json'
|
6
|
+
|
7
|
+
module Kafka
|
8
|
+
module Sasl
|
9
|
+
class AwsMskIam
|
10
|
+
AWS_MSK_IAM = "AWS_MSK_IAM"
|
11
|
+
|
12
|
+
def initialize(aws_region:, access_key_id:, secret_key_id:, session_token: nil, logger:)
|
13
|
+
@semaphore = Mutex.new
|
14
|
+
|
15
|
+
@aws_region = aws_region
|
16
|
+
@access_key_id = access_key_id
|
17
|
+
@secret_key_id = secret_key_id
|
18
|
+
@session_token = session_token
|
19
|
+
@logger = TaggedLogger.new(logger)
|
20
|
+
end
|
21
|
+
|
22
|
+
def ident
|
23
|
+
AWS_MSK_IAM
|
24
|
+
end
|
25
|
+
|
26
|
+
def configured?
|
27
|
+
@aws_region && @access_key_id && @secret_key_id
|
28
|
+
end
|
29
|
+
|
30
|
+
def authenticate!(host, encoder, decoder)
|
31
|
+
@logger.debug "Authenticating #{@access_key_id} with SASL #{AWS_MSK_IAM}"
|
32
|
+
|
33
|
+
host_without_port = host.split(':', -1).first
|
34
|
+
|
35
|
+
time_now = Time.now.utc
|
36
|
+
|
37
|
+
msg = authentication_payload(host: host_without_port, time_now: time_now)
|
38
|
+
@logger.debug "Sending first client SASL AWS_MSK_IAM message:"
|
39
|
+
@logger.debug msg
|
40
|
+
encoder.write_bytes(msg)
|
41
|
+
|
42
|
+
begin
|
43
|
+
@server_first_message = decoder.bytes
|
44
|
+
@logger.debug "Received first server SASL AWS_MSK_IAM message: #{@server_first_message}"
|
45
|
+
|
46
|
+
raise Kafka::Error, "SASL AWS_MSK_IAM authentication failed: unknown error" unless @server_first_message
|
47
|
+
rescue Errno::ETIMEDOUT, EOFError => e
|
48
|
+
raise Kafka::Error, "SASL AWS_MSK_IAM authentication failed: #{e.message}"
|
49
|
+
end
|
50
|
+
|
51
|
+
@logger.debug "SASL #{AWS_MSK_IAM} authentication successful"
|
52
|
+
end
|
53
|
+
|
54
|
+
private
|
55
|
+
|
56
|
+
def bin_to_hex(s)
|
57
|
+
s.each_byte.map { |b| b.to_s(16).rjust(2, '0') }.join
|
58
|
+
end
|
59
|
+
|
60
|
+
def digest
|
61
|
+
@digest ||= OpenSSL::Digest::SHA256.new
|
62
|
+
end
|
63
|
+
|
64
|
+
def authentication_payload(host:, time_now:)
|
65
|
+
{
|
66
|
+
'version' => "2020_10_22",
|
67
|
+
'host' => host,
|
68
|
+
'user-agent' => "ruby-kafka",
|
69
|
+
'action' => "kafka-cluster:Connect",
|
70
|
+
'x-amz-algorithm' => "AWS4-HMAC-SHA256",
|
71
|
+
'x-amz-credential' => @access_key_id + "/" + time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request",
|
72
|
+
'x-amz-date' => time_now.strftime("%Y%m%dT%H%M%SZ"),
|
73
|
+
'x-amz-signedheaders' => "host",
|
74
|
+
'x-amz-expires' => "900",
|
75
|
+
'x-amz-security-token' => @session_token,
|
76
|
+
'x-amz-signature' => signature(host: host, time_now: time_now)
|
77
|
+
}.delete_if { |_, v| v.nil? }.to_json
|
78
|
+
end
|
79
|
+
|
80
|
+
def canonical_request(host:, time_now:)
|
81
|
+
"GET\n" +
|
82
|
+
"/\n" +
|
83
|
+
canonical_query_string(time_now: time_now) + "\n" +
|
84
|
+
canonical_headers(host: host) + "\n" +
|
85
|
+
signed_headers + "\n" +
|
86
|
+
hashed_payload
|
87
|
+
end
|
88
|
+
|
89
|
+
def canonical_query_string(time_now:)
|
90
|
+
params = {
|
91
|
+
"Action" => "kafka-cluster:Connect",
|
92
|
+
"X-Amz-Algorithm" => "AWS4-HMAC-SHA256",
|
93
|
+
"X-Amz-Credential" => @access_key_id + "/" + time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request",
|
94
|
+
"X-Amz-Date" => time_now.strftime("%Y%m%dT%H%M%SZ"),
|
95
|
+
"X-Amz-Expires" => "900",
|
96
|
+
"X-Amz-Security-Token" => @session_token,
|
97
|
+
"X-Amz-SignedHeaders" => "host"
|
98
|
+
}.delete_if { |_, v| v.nil? }
|
99
|
+
|
100
|
+
URI.encode_www_form(params)
|
101
|
+
end
|
102
|
+
|
103
|
+
def canonical_headers(host:)
|
104
|
+
"host" + ":" + host + "\n"
|
105
|
+
end
|
106
|
+
|
107
|
+
def signed_headers
|
108
|
+
"host"
|
109
|
+
end
|
110
|
+
|
111
|
+
def hashed_payload
|
112
|
+
bin_to_hex(digest.digest(""))
|
113
|
+
end
|
114
|
+
|
115
|
+
def string_to_sign(host:, time_now:)
|
116
|
+
"AWS4-HMAC-SHA256" + "\n" +
|
117
|
+
time_now.strftime("%Y%m%dT%H%M%SZ") + "\n" +
|
118
|
+
time_now.strftime("%Y%m%d") + "/" + @aws_region + "/kafka-cluster/aws4_request" + "\n" +
|
119
|
+
bin_to_hex(digest.digest(canonical_request(host: host, time_now: time_now)))
|
120
|
+
end
|
121
|
+
|
122
|
+
def signature(host:, time_now:)
|
123
|
+
date_key = OpenSSL::HMAC.digest("SHA256", "AWS4" + @secret_key_id, time_now.strftime("%Y%m%d"))
|
124
|
+
date_region_key = OpenSSL::HMAC.digest("SHA256", date_key, @aws_region)
|
125
|
+
date_region_service_key = OpenSSL::HMAC.digest("SHA256", date_region_key, "kafka-cluster")
|
126
|
+
signing_key = OpenSSL::HMAC.digest("SHA256", date_region_service_key, "aws4_request")
|
127
|
+
signature = bin_to_hex(OpenSSL::HMAC.digest("SHA256", signing_key, string_to_sign(host: host, time_now: time_now)))
|
128
|
+
|
129
|
+
signature
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
@@ -4,13 +4,18 @@ require 'kafka/sasl/plain'
|
|
4
4
|
require 'kafka/sasl/gssapi'
|
5
5
|
require 'kafka/sasl/scram'
|
6
6
|
require 'kafka/sasl/oauth'
|
7
|
+
require 'kafka/sasl/awsmskiam'
|
7
8
|
|
8
9
|
module Kafka
|
9
10
|
class SaslAuthenticator
|
10
11
|
def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:,
|
11
12
|
sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:,
|
12
13
|
sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:,
|
13
|
-
sasl_oauth_token_provider
|
14
|
+
sasl_oauth_token_provider:,
|
15
|
+
sasl_aws_msk_iam_access_key_id:,
|
16
|
+
sasl_aws_msk_iam_secret_key_id:,
|
17
|
+
sasl_aws_msk_iam_aws_region:,
|
18
|
+
sasl_aws_msk_iam_session_token: nil)
|
14
19
|
@logger = TaggedLogger.new(logger)
|
15
20
|
|
16
21
|
@plain = Sasl::Plain.new(
|
@@ -33,12 +38,20 @@ module Kafka
|
|
33
38
|
logger: @logger,
|
34
39
|
)
|
35
40
|
|
41
|
+
@aws_msk_iam = Sasl::AwsMskIam.new(
|
42
|
+
access_key_id: sasl_aws_msk_iam_access_key_id,
|
43
|
+
secret_key_id: sasl_aws_msk_iam_secret_key_id,
|
44
|
+
aws_region: sasl_aws_msk_iam_aws_region,
|
45
|
+
session_token: sasl_aws_msk_iam_session_token,
|
46
|
+
logger: @logger,
|
47
|
+
)
|
48
|
+
|
36
49
|
@oauth = Sasl::OAuth.new(
|
37
50
|
token_provider: sasl_oauth_token_provider,
|
38
51
|
logger: @logger,
|
39
52
|
)
|
40
53
|
|
41
|
-
@mechanism = [@gssapi, @plain, @scram, @oauth].find(&:configured?)
|
54
|
+
@mechanism = [@gssapi, @plain, @scram, @oauth, @aws_msk_iam].find(&:configured?)
|
42
55
|
end
|
43
56
|
|
44
57
|
def enabled?
|
data/lib/kafka/ssl_context.rb
CHANGED
@@ -47,18 +47,19 @@ module Kafka
|
|
47
47
|
Array(ca_cert).each do |cert|
|
48
48
|
store.add_cert(OpenSSL::X509::Certificate.new(cert))
|
49
49
|
end
|
50
|
-
|
51
|
-
store.add_file(
|
50
|
+
Array(ca_cert_file_path).each do |cert_file_path|
|
51
|
+
store.add_file(cert_file_path)
|
52
52
|
end
|
53
53
|
if ca_certs_from_system
|
54
54
|
store.set_default_paths
|
55
55
|
end
|
56
56
|
ssl_context.cert_store = store
|
57
|
-
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
58
|
-
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
59
|
-
ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
|
60
57
|
end
|
61
58
|
|
59
|
+
ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
|
60
|
+
# Verify certificate hostname if supported (ruby >= 2.4.0)
|
61
|
+
ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
|
62
|
+
|
62
63
|
ssl_context
|
63
64
|
end
|
64
65
|
end
|
data/lib/kafka/tagged_logger.rb
CHANGED
@@ -95,7 +95,7 @@ module Kafka
|
|
95
95
|
force_transactional!
|
96
96
|
|
97
97
|
if @transaction_state.uninitialized?
|
98
|
-
raise 'Transaction is uninitialized'
|
98
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is uninitialized'
|
99
99
|
end
|
100
100
|
|
101
101
|
# Extract newly created partitions
|
@@ -138,8 +138,8 @@ module Kafka
|
|
138
138
|
|
139
139
|
def begin_transaction
|
140
140
|
force_transactional!
|
141
|
-
raise 'Transaction has already started' if @transaction_state.in_transaction?
|
142
|
-
raise 'Transaction is not ready' unless @transaction_state.ready?
|
141
|
+
raise Kafka::InvalidTxnStateError, 'Transaction has already started' if @transaction_state.in_transaction?
|
142
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not ready' unless @transaction_state.ready?
|
143
143
|
@transaction_state.transition_to!(TransactionStateMachine::IN_TRANSACTION)
|
144
144
|
|
145
145
|
@logger.info "Begin transaction #{@transactional_id}, Producer ID: #{@producer_id} (Epoch #{@producer_epoch})"
|
@@ -159,7 +159,7 @@ module Kafka
|
|
159
159
|
end
|
160
160
|
|
161
161
|
unless @transaction_state.in_transaction?
|
162
|
-
raise 'Transaction is not valid to commit'
|
162
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not valid to commit'
|
163
163
|
end
|
164
164
|
|
165
165
|
@transaction_state.transition_to!(TransactionStateMachine::COMMITTING_TRANSACTION)
|
@@ -192,7 +192,8 @@ module Kafka
|
|
192
192
|
end
|
193
193
|
|
194
194
|
unless @transaction_state.in_transaction?
|
195
|
-
|
195
|
+
@logger.warn('Aborting transaction that was never opened on brokers')
|
196
|
+
return
|
196
197
|
end
|
197
198
|
|
198
199
|
@transaction_state.transition_to!(TransactionStateMachine::ABORTING_TRANSACTION)
|
@@ -221,7 +222,7 @@ module Kafka
|
|
221
222
|
force_transactional!
|
222
223
|
|
223
224
|
unless @transaction_state.in_transaction?
|
224
|
-
raise 'Transaction is not valid to send offsets'
|
225
|
+
raise Kafka::InvalidTxnStateError, 'Transaction is not valid to send offsets'
|
225
226
|
end
|
226
227
|
|
227
228
|
add_response = transaction_coordinator.add_offsets_to_txn(
|
@@ -232,14 +233,23 @@ module Kafka
|
|
232
233
|
)
|
233
234
|
Protocol.handle_error(add_response.error_code)
|
234
235
|
|
235
|
-
send_response =
|
236
|
+
send_response = group_coordinator(group_id: group_id).txn_offset_commit(
|
236
237
|
transactional_id: @transactional_id,
|
237
238
|
group_id: group_id,
|
238
239
|
producer_id: @producer_id,
|
239
240
|
producer_epoch: @producer_epoch,
|
240
241
|
offsets: offsets
|
241
242
|
)
|
242
|
-
|
243
|
+
send_response.errors.each do |tp|
|
244
|
+
tp.partitions.each do |partition|
|
245
|
+
Protocol.handle_error(partition.error_code)
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
nil
|
250
|
+
rescue
|
251
|
+
@transaction_state.transition_to!(TransactionStateMachine::ERROR)
|
252
|
+
raise
|
243
253
|
end
|
244
254
|
|
245
255
|
def in_transaction?
|
@@ -250,6 +260,10 @@ module Kafka
|
|
250
260
|
@transaction_state.error?
|
251
261
|
end
|
252
262
|
|
263
|
+
def ready?
|
264
|
+
@transaction_state.ready?
|
265
|
+
end
|
266
|
+
|
253
267
|
def close
|
254
268
|
if in_transaction?
|
255
269
|
@logger.warn("Aborting pending transaction ...")
|
@@ -264,11 +278,11 @@ module Kafka
|
|
264
278
|
|
265
279
|
def force_transactional!
|
266
280
|
unless transactional?
|
267
|
-
raise 'Please turn on transactional mode to use transaction'
|
281
|
+
raise Kafka::InvalidTxnStateError, 'Please turn on transactional mode to use transaction'
|
268
282
|
end
|
269
283
|
|
270
284
|
if @transactional_id.nil? || @transactional_id.empty?
|
271
|
-
raise 'Please provide a transaction_id to use transactional mode'
|
285
|
+
raise Kafka::InvalidTxnStateError, 'Please provide a transaction_id to use transactional mode'
|
272
286
|
end
|
273
287
|
end
|
274
288
|
|
@@ -278,6 +292,12 @@ module Kafka
|
|
278
292
|
)
|
279
293
|
end
|
280
294
|
|
295
|
+
def group_coordinator(group_id:)
|
296
|
+
@cluster.get_group_coordinator(
|
297
|
+
group_id: group_id
|
298
|
+
)
|
299
|
+
end
|
300
|
+
|
281
301
|
def complete_transaction
|
282
302
|
@transaction_state.transition_to!(TransactionStateMachine::READY)
|
283
303
|
@transaction_partitions = {}
|
data/lib/kafka/version.rb
CHANGED
data/ruby-kafka.gemspec
CHANGED
@@ -18,7 +18,7 @@ Gem::Specification.new do |spec|
|
|
18
18
|
DESC
|
19
19
|
|
20
20
|
spec.homepage = "https://github.com/zendesk/ruby-kafka"
|
21
|
-
spec.license = "Apache
|
21
|
+
spec.license = "Apache-2.0"
|
22
22
|
|
23
23
|
spec.required_ruby_version = '>= 2.1.0'
|
24
24
|
|
@@ -33,18 +33,19 @@ Gem::Specification.new do |spec|
|
|
33
33
|
spec.add_development_dependency "rake", "~> 10.0"
|
34
34
|
spec.add_development_dependency "rspec"
|
35
35
|
spec.add_development_dependency "pry"
|
36
|
+
spec.add_development_dependency "digest-murmurhash"
|
36
37
|
spec.add_development_dependency "dotenv"
|
37
38
|
spec.add_development_dependency "docker-api"
|
38
39
|
spec.add_development_dependency "rspec-benchmark"
|
39
|
-
spec.add_development_dependency "activesupport"
|
40
|
+
spec.add_development_dependency "activesupport", ">= 4.0", "< 6.1"
|
40
41
|
spec.add_development_dependency "snappy"
|
41
42
|
spec.add_development_dependency "extlz4"
|
42
43
|
spec.add_development_dependency "zstd-ruby"
|
43
44
|
spec.add_development_dependency "colored"
|
44
45
|
spec.add_development_dependency "rspec_junit_formatter", "0.2.2"
|
45
|
-
spec.add_development_dependency "dogstatsd-ruby", ">=
|
46
|
+
spec.add_development_dependency "dogstatsd-ruby", ">= 4.0.0", "< 5.0.0"
|
46
47
|
spec.add_development_dependency "statsd-ruby"
|
47
|
-
spec.add_development_dependency "prometheus-client"
|
48
|
+
spec.add_development_dependency "prometheus-client", "~> 0.10.0"
|
48
49
|
spec.add_development_dependency "ruby-prof"
|
49
50
|
spec.add_development_dependency "timecop"
|
50
51
|
spec.add_development_dependency "rubocop", "~> 0.49.1"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 1.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Daniel Schierbeck
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-05-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: digest-crc
|
@@ -80,6 +80,20 @@ dependencies:
|
|
80
80
|
- - ">="
|
81
81
|
- !ruby/object:Gem::Version
|
82
82
|
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: digest-murmurhash
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
83
97
|
- !ruby/object:Gem::Dependency
|
84
98
|
name: dotenv
|
85
99
|
requirement: !ruby/object:Gem::Requirement
|
@@ -128,14 +142,20 @@ dependencies:
|
|
128
142
|
requirements:
|
129
143
|
- - ">="
|
130
144
|
- !ruby/object:Gem::Version
|
131
|
-
version: '0'
|
145
|
+
version: '4.0'
|
146
|
+
- - "<"
|
147
|
+
- !ruby/object:Gem::Version
|
148
|
+
version: '6.1'
|
132
149
|
type: :development
|
133
150
|
prerelease: false
|
134
151
|
version_requirements: !ruby/object:Gem::Requirement
|
135
152
|
requirements:
|
136
153
|
- - ">="
|
137
154
|
- !ruby/object:Gem::Version
|
138
|
-
version: '0'
|
155
|
+
version: '4.0'
|
156
|
+
- - "<"
|
157
|
+
- !ruby/object:Gem::Version
|
158
|
+
version: '6.1'
|
139
159
|
- !ruby/object:Gem::Dependency
|
140
160
|
name: snappy
|
141
161
|
requirement: !ruby/object:Gem::Requirement
|
@@ -212,7 +232,7 @@ dependencies:
|
|
212
232
|
requirements:
|
213
233
|
- - ">="
|
214
234
|
- !ruby/object:Gem::Version
|
215
|
-
version:
|
235
|
+
version: 4.0.0
|
216
236
|
- - "<"
|
217
237
|
- !ruby/object:Gem::Version
|
218
238
|
version: 5.0.0
|
@@ -222,7 +242,7 @@ dependencies:
|
|
222
242
|
requirements:
|
223
243
|
- - ">="
|
224
244
|
- !ruby/object:Gem::Version
|
225
|
-
version:
|
245
|
+
version: 4.0.0
|
226
246
|
- - "<"
|
227
247
|
- !ruby/object:Gem::Version
|
228
248
|
version: 5.0.0
|
@@ -244,16 +264,16 @@ dependencies:
|
|
244
264
|
name: prometheus-client
|
245
265
|
requirement: !ruby/object:Gem::Requirement
|
246
266
|
requirements:
|
247
|
-
- - "
|
267
|
+
- - "~>"
|
248
268
|
- !ruby/object:Gem::Version
|
249
|
-
version:
|
269
|
+
version: 0.10.0
|
250
270
|
type: :development
|
251
271
|
prerelease: false
|
252
272
|
version_requirements: !ruby/object:Gem::Requirement
|
253
273
|
requirements:
|
254
|
-
- - "
|
274
|
+
- - "~>"
|
255
275
|
- !ruby/object:Gem::Version
|
256
|
-
version:
|
276
|
+
version: 0.10.0
|
257
277
|
- !ruby/object:Gem::Dependency
|
258
278
|
name: ruby-prof
|
259
279
|
requirement: !ruby/object:Gem::Requirement
|
@@ -332,6 +352,7 @@ extensions: []
|
|
332
352
|
extra_rdoc_files: []
|
333
353
|
files:
|
334
354
|
- ".circleci/config.yml"
|
355
|
+
- ".github/workflows/stale.yml"
|
335
356
|
- ".gitignore"
|
336
357
|
- ".readygo"
|
337
358
|
- ".rspec"
|
@@ -369,7 +390,10 @@ files:
|
|
369
390
|
- lib/kafka/connection_builder.rb
|
370
391
|
- lib/kafka/consumer.rb
|
371
392
|
- lib/kafka/consumer_group.rb
|
393
|
+
- lib/kafka/consumer_group/assignor.rb
|
394
|
+
- lib/kafka/crc32_hash.rb
|
372
395
|
- lib/kafka/datadog.rb
|
396
|
+
- lib/kafka/digest.rb
|
373
397
|
- lib/kafka/fetch_operation.rb
|
374
398
|
- lib/kafka/fetched_batch.rb
|
375
399
|
- lib/kafka/fetched_batch_generator.rb
|
@@ -379,8 +403,10 @@ files:
|
|
379
403
|
- lib/kafka/gzip_codec.rb
|
380
404
|
- lib/kafka/heartbeat.rb
|
381
405
|
- lib/kafka/instrumenter.rb
|
406
|
+
- lib/kafka/interceptors.rb
|
382
407
|
- lib/kafka/lz4_codec.rb
|
383
408
|
- lib/kafka/message_buffer.rb
|
409
|
+
- lib/kafka/murmur2_hash.rb
|
384
410
|
- lib/kafka/offset_manager.rb
|
385
411
|
- lib/kafka/partitioner.rb
|
386
412
|
- lib/kafka/pause.rb
|
@@ -450,6 +476,7 @@ files:
|
|
450
476
|
- lib/kafka/protocol/txn_offset_commit_request.rb
|
451
477
|
- lib/kafka/protocol/txn_offset_commit_response.rb
|
452
478
|
- lib/kafka/round_robin_assignment_strategy.rb
|
479
|
+
- lib/kafka/sasl/awsmskiam.rb
|
453
480
|
- lib/kafka/sasl/gssapi.rb
|
454
481
|
- lib/kafka/sasl/oauth.rb
|
455
482
|
- lib/kafka/sasl/plain.rb
|
@@ -469,7 +496,7 @@ files:
|
|
469
496
|
- ruby-kafka.gemspec
|
470
497
|
homepage: https://github.com/zendesk/ruby-kafka
|
471
498
|
licenses:
|
472
|
-
- Apache
|
499
|
+
- Apache-2.0
|
473
500
|
metadata: {}
|
474
501
|
post_install_message:
|
475
502
|
rdoc_options: []
|
@@ -486,8 +513,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
486
513
|
- !ruby/object:Gem::Version
|
487
514
|
version: '0'
|
488
515
|
requirements: []
|
489
|
-
|
490
|
-
rubygems_version: 2.7.6
|
516
|
+
rubygems_version: 3.1.2
|
491
517
|
signing_key:
|
492
518
|
specification_version: 4
|
493
519
|
summary: A client library for the Kafka distributed commit log.
|