ruby-kafka 0.7.4 → 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +168 -3
  3. data/.github/workflows/stale.yml +19 -0
  4. data/CHANGELOG.md +48 -0
  5. data/README.md +59 -0
  6. data/lib/kafka/async_producer.rb +30 -9
  7. data/lib/kafka/broker.rb +13 -1
  8. data/lib/kafka/broker_pool.rb +1 -1
  9. data/lib/kafka/client.rb +63 -6
  10. data/lib/kafka/cluster.rb +53 -1
  11. data/lib/kafka/compression.rb +13 -11
  12. data/lib/kafka/compressor.rb +1 -0
  13. data/lib/kafka/connection.rb +7 -1
  14. data/lib/kafka/connection_builder.rb +1 -1
  15. data/lib/kafka/consumer.rb +98 -17
  16. data/lib/kafka/consumer_group.rb +20 -2
  17. data/lib/kafka/datadog.rb +32 -12
  18. data/lib/kafka/fetch_operation.rb +1 -1
  19. data/lib/kafka/fetched_batch.rb +5 -1
  20. data/lib/kafka/fetched_batch_generator.rb +5 -2
  21. data/lib/kafka/fetched_message.rb +1 -0
  22. data/lib/kafka/fetched_offset_resolver.rb +1 -1
  23. data/lib/kafka/fetcher.rb +13 -6
  24. data/lib/kafka/gzip_codec.rb +4 -0
  25. data/lib/kafka/heartbeat.rb +8 -3
  26. data/lib/kafka/lz4_codec.rb +4 -0
  27. data/lib/kafka/offset_manager.rb +13 -2
  28. data/lib/kafka/produce_operation.rb +1 -1
  29. data/lib/kafka/producer.rb +33 -8
  30. data/lib/kafka/prometheus.rb +316 -0
  31. data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
  32. data/lib/kafka/protocol/add_offsets_to_txn_response.rb +19 -0
  33. data/lib/kafka/protocol/join_group_request.rb +8 -2
  34. data/lib/kafka/protocol/metadata_response.rb +1 -1
  35. data/lib/kafka/protocol/offset_fetch_request.rb +3 -1
  36. data/lib/kafka/protocol/produce_request.rb +3 -1
  37. data/lib/kafka/protocol/record_batch.rb +7 -4
  38. data/lib/kafka/protocol/sasl_handshake_request.rb +1 -1
  39. data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
  40. data/lib/kafka/protocol/txn_offset_commit_response.rb +18 -0
  41. data/lib/kafka/protocol.rb +8 -0
  42. data/lib/kafka/round_robin_assignment_strategy.rb +10 -7
  43. data/lib/kafka/sasl/gssapi.rb +1 -1
  44. data/lib/kafka/sasl/oauth.rb +64 -0
  45. data/lib/kafka/sasl/plain.rb +1 -1
  46. data/lib/kafka/sasl/scram.rb +16 -13
  47. data/lib/kafka/sasl_authenticator.rb +10 -3
  48. data/lib/kafka/snappy_codec.rb +4 -0
  49. data/lib/kafka/ssl_context.rb +5 -1
  50. data/lib/kafka/ssl_socket_with_timeout.rb +1 -0
  51. data/lib/kafka/statsd.rb +10 -1
  52. data/lib/kafka/tagged_logger.rb +77 -0
  53. data/lib/kafka/transaction_manager.rb +26 -1
  54. data/lib/kafka/transaction_state_machine.rb +1 -1
  55. data/lib/kafka/version.rb +1 -1
  56. data/lib/kafka/zstd_codec.rb +27 -0
  57. data/lib/kafka.rb +4 -0
  58. data/ruby-kafka.gemspec +5 -3
  59. metadata +50 -7
@@ -12,8 +12,10 @@ module Kafka
12
12
  OFFSET_FETCH_API
13
13
  end
14
14
 
15
+ # setting topics to nil fetches all offsets for a consumer group
16
+ # and that feature is only available in API version 2+
15
17
  def api_version
16
- 1
18
+ @topics.nil? ? 2 : 1
17
19
  end
18
20
 
19
21
  def response_class
@@ -27,6 +27,8 @@ module Kafka
27
27
  # Value => bytes
28
28
  #
29
29
  class ProduceRequest
30
+ API_MIN_VERSION = 3
31
+
30
32
  attr_reader :transactional_id, :required_acks, :timeout, :messages_for_topics, :compressor
31
33
 
32
34
  # @param required_acks [Integer]
@@ -45,7 +47,7 @@ module Kafka
45
47
  end
46
48
 
47
49
  def api_version
48
- 3
50
+ compressor.codec.nil? ? API_MIN_VERSION : [compressor.codec.produce_api_min_version, API_MIN_VERSION].max
49
51
  end
50
52
 
51
53
  def response_class
@@ -1,3 +1,4 @@
1
+ require 'bigdecimal'
1
2
  require 'digest/crc32'
2
3
  require 'kafka/protocol/record'
3
4
 
@@ -11,6 +12,7 @@ module Kafka
11
12
  CODEC_ID_MASK = 0b00000111
12
13
  IN_TRANSACTION_MASK = 0b00010000
13
14
  IS_CONTROL_BATCH_MASK = 0b00100000
15
+ TIMESTAMP_TYPE_MASK = 0b001000
14
16
 
15
17
  attr_reader :records, :first_offset, :first_timestamp, :partition_leader_epoch, :in_transaction, :is_control_batch, :last_offset_delta, :max_timestamp, :producer_id, :producer_epoch, :first_sequence
16
18
 
@@ -130,7 +132,7 @@ module Kafka
130
132
 
131
133
  records.each_with_index do |record, index|
132
134
  record.offset_delta = index
133
- record.timestamp_delta = (record.create_time - first_timestamp).to_i
135
+ record.timestamp_delta = ((record.create_time - first_timestamp) * 1000).to_i
134
136
  end
135
137
  @last_offset_delta = records.length - 1
136
138
  end
@@ -163,10 +165,11 @@ module Kafka
163
165
  codec_id = attributes & CODEC_ID_MASK
164
166
  in_transaction = (attributes & IN_TRANSACTION_MASK) > 0
165
167
  is_control_batch = (attributes & IS_CONTROL_BATCH_MASK) > 0
168
+ log_append_time = (attributes & TIMESTAMP_TYPE_MASK) != 0
166
169
 
167
170
  last_offset_delta = record_batch_decoder.int32
168
- first_timestamp = Time.at(record_batch_decoder.int64 / 1000)
169
- max_timestamp = Time.at(record_batch_decoder.int64 / 1000)
171
+ first_timestamp = Time.at(record_batch_decoder.int64 / BigDecimal(1000))
172
+ max_timestamp = Time.at(record_batch_decoder.int64 / BigDecimal(1000))
170
173
 
171
174
  producer_id = record_batch_decoder.int64
172
175
  producer_epoch = record_batch_decoder.int16
@@ -186,7 +189,7 @@ module Kafka
186
189
  until records_array_decoder.eof?
187
190
  record = Record.decode(records_array_decoder)
188
191
  record.offset = first_offset + record.offset_delta
189
- record.create_time = first_timestamp + record.timestamp_delta
192
+ record.create_time = log_append_time && max_timestamp ? max_timestamp : first_timestamp + record.timestamp_delta / BigDecimal(1000)
190
193
  records_array << record
191
194
  end
192
195
 
@@ -8,7 +8,7 @@ module Kafka
8
8
 
9
9
  class SaslHandshakeRequest
10
10
 
11
- SUPPORTED_MECHANISMS = %w(GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512)
11
+ SUPPORTED_MECHANISMS = %w(GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512 OAUTHBEARER)
12
12
 
13
13
  def initialize(mechanism)
14
14
  unless SUPPORTED_MECHANISMS.include?(mechanism)
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class TxnOffsetCommitRequest
6
+
7
+ def api_key
8
+ TXN_OFFSET_COMMIT_API
9
+ end
10
+
11
+ def api_version
12
+ 2
13
+ end
14
+
15
+ def response_class
16
+ TxnOffsetCommitResponse
17
+ end
18
+
19
+ def initialize(transactional_id:, group_id:, producer_id:, producer_epoch:, offsets:)
20
+ @transactional_id = transactional_id
21
+ @producer_id = producer_id
22
+ @producer_epoch = producer_epoch
23
+ @group_id = group_id
24
+ @offsets = offsets
25
+ end
26
+
27
+ def encode(encoder)
28
+ encoder.write_string(@transactional_id.to_s)
29
+ encoder.write_string(@group_id)
30
+ encoder.write_int64(@producer_id)
31
+ encoder.write_int16(@producer_epoch)
32
+
33
+ encoder.write_array(@offsets) do |topic, partitions|
34
+ encoder.write_string(topic)
35
+ encoder.write_array(partitions) do |partition, offset|
36
+ encoder.write_int32(partition)
37
+ encoder.write_int64(offset[:offset])
38
+ encoder.write_string(nil) # metadata
39
+ encoder.write_int32(offset[:leader_epoch])
40
+ end
41
+ end
42
+ end
43
+
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,18 @@
1
+ module Kafka
2
+ module Protocol
3
+ class TxnOffsetCommitResponse
4
+
5
+ attr_reader :error_code
6
+
7
+ def initialize(error_code:)
8
+ @error_code = error_code
9
+ end
10
+
11
+ def self.decode(decoder)
12
+ _throttle_time_ms = decoder.int32
13
+ error_code = decoder.int16
14
+ new(error_code: error_code)
15
+ end
16
+ end
17
+ end
18
+ end
@@ -33,7 +33,9 @@ module Kafka
33
33
  DELETE_TOPICS_API = 20
34
34
  INIT_PRODUCER_ID_API = 22
35
35
  ADD_PARTITIONS_TO_TXN_API = 24
36
+ ADD_OFFSETS_TO_TXN_API = 25
36
37
  END_TXN_API = 26
38
+ TXN_OFFSET_COMMIT_API = 28
37
39
  DESCRIBE_CONFIGS_API = 32
38
40
  ALTER_CONFIGS_API = 33
39
41
  CREATE_PARTITIONS_API = 37
@@ -57,7 +59,9 @@ module Kafka
57
59
  DELETE_TOPICS_API => :delete_topics,
58
60
  INIT_PRODUCER_ID_API => :init_producer_id_api,
59
61
  ADD_PARTITIONS_TO_TXN_API => :add_partitions_to_txn_api,
62
+ ADD_OFFSETS_TO_TXN_API => :add_offsets_to_txn_api,
60
63
  END_TXN_API => :end_txn_api,
64
+ TXN_OFFSET_COMMIT_API => :txn_offset_commit_api,
61
65
  DESCRIBE_CONFIGS_API => :describe_configs_api,
62
66
  CREATE_PARTITIONS_API => :create_partitions
63
67
  }
@@ -177,6 +181,10 @@ require "kafka/protocol/fetch_request"
177
181
  require "kafka/protocol/fetch_response"
178
182
  require "kafka/protocol/list_offset_request"
179
183
  require "kafka/protocol/list_offset_response"
184
+ require "kafka/protocol/add_offsets_to_txn_request"
185
+ require "kafka/protocol/add_offsets_to_txn_response"
186
+ require "kafka/protocol/txn_offset_commit_request"
187
+ require "kafka/protocol/txn_offset_commit_response"
180
188
  require "kafka/protocol/find_coordinator_request"
181
189
  require "kafka/protocol/find_coordinator_response"
182
190
  require "kafka/protocol/join_group_request"
@@ -24,20 +24,23 @@ module Kafka
24
24
  group_assignment[member_id] = Protocol::MemberAssignment.new
25
25
  end
26
26
 
27
- topics.each do |topic|
27
+ topic_partitions = topics.flat_map do |topic|
28
28
  begin
29
29
  partitions = @cluster.partitions_for(topic).map(&:partition_id)
30
30
  rescue UnknownTopicOrPartition
31
31
  raise UnknownTopicOrPartition, "unknown topic #{topic}"
32
32
  end
33
+ Array.new(partitions.count) { topic }.zip(partitions)
34
+ end
33
35
 
34
- partitions_per_member = partitions.group_by {|partition_id|
35
- partition_id % members.count
36
- }.values
36
+ partitions_per_member = topic_partitions.group_by.with_index do |_, index|
37
+ index % members.count
38
+ end.values
37
39
 
38
- members.zip(partitions_per_member).each do |member_id, member_partitions|
39
- unless member_partitions.nil?
40
- group_assignment[member_id].assign(topic, member_partitions)
40
+ members.zip(partitions_per_member).each do |member_id, member_partitions|
41
+ unless member_partitions.nil?
42
+ member_partitions.each do |topic, partition|
43
+ group_assignment[member_id].assign(topic, [partition])
41
44
  end
42
45
  end
43
46
  end
@@ -7,7 +7,7 @@ module Kafka
7
7
  GSSAPI_CONFIDENTIALITY = false
8
8
 
9
9
  def initialize(logger:, principal:, keytab:)
10
- @logger = logger
10
+ @logger = TaggedLogger.new(logger)
11
11
  @principal = principal
12
12
  @keytab = keytab
13
13
  end
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ module Sasl
5
+ class OAuth
6
+ OAUTH_IDENT = "OAUTHBEARER"
7
+
8
+ # token_provider: THE FOLLOWING INTERFACE MUST BE FULFILLED:
9
+ #
10
+ # [REQUIRED] TokenProvider#token - Returns an ID/Access Token to be sent to the Kafka client.
11
+ # The implementation should ensure token reuse so that multiple calls at connect time do not
12
+ # create multiple tokens. The implementation should also periodically refresh the token in
13
+ # order to guarantee that each call returns an unexpired token. A timeout error should
14
+ # be returned after a short period of inactivity so that the broker can log debugging
15
+ # info and retry.
16
+ #
17
+ # [OPTIONAL] TokenProvider#extensions - Returns a map of key-value pairs that can be sent with the
18
+ # SASL/OAUTHBEARER initial client response. If not provided, the values are ignored. This feature
19
+ # is only available in Kafka >= 2.1.0.
20
+ #
21
+ def initialize(logger:, token_provider:)
22
+ @logger = TaggedLogger.new(logger)
23
+ @token_provider = token_provider
24
+ end
25
+
26
+ def ident
27
+ OAUTH_IDENT
28
+ end
29
+
30
+ def configured?
31
+ @token_provider
32
+ end
33
+
34
+ def authenticate!(host, encoder, decoder)
35
+ # Send SASLOauthBearerClientResponse with token
36
+ @logger.debug "Authenticating to #{host} with SASL #{OAUTH_IDENT}"
37
+
38
+ encoder.write_bytes(initial_client_response)
39
+
40
+ begin
41
+ # receive SASL OAuthBearer Server Response
42
+ msg = decoder.bytes
43
+ raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: unknown error" unless msg
44
+ rescue Errno::ETIMEDOUT, EOFError => e
45
+ raise Kafka::Error, "SASL #{OAUTH_IDENT} authentication failed: #{e.message}"
46
+ end
47
+
48
+ @logger.debug "SASL #{OAUTH_IDENT} authentication successful."
49
+ end
50
+
51
+ private
52
+
53
+ def initial_client_response
54
+ raise Kafka::TokenMethodNotImplementedError, "Token provider doesn't define 'token'" unless @token_provider.respond_to? :token
55
+ "n,,\x01auth=Bearer #{@token_provider.token}#{token_extensions}\x01\x01"
56
+ end
57
+
58
+ def token_extensions
59
+ return nil unless @token_provider.respond_to? :extensions
60
+ "\x01#{@token_provider.extensions.map {|e| e.join("=")}.join("\x01")}"
61
+ end
62
+ end
63
+ end
64
+ end
@@ -6,7 +6,7 @@ module Kafka
6
6
  PLAIN_IDENT = "PLAIN"
7
7
 
8
8
  def initialize(logger:, authzid:, username:, password:)
9
- @logger = logger
9
+ @logger = TaggedLogger.new(logger)
10
10
  @authzid = authzid
11
11
  @username = username
12
12
  @password = password
@@ -12,9 +12,10 @@ module Kafka
12
12
  }.freeze
13
13
 
14
14
  def initialize(username:, password:, mechanism: 'sha256', logger:)
15
+ @semaphore = Mutex.new
15
16
  @username = username
16
17
  @password = password
17
- @logger = logger
18
+ @logger = TaggedLogger.new(logger)
18
19
 
19
20
  if mechanism
20
21
  @mechanism = MECHANISMS.fetch(mechanism) do
@@ -35,22 +36,24 @@ module Kafka
35
36
  @logger.debug "Authenticating #{@username} with SASL #{@mechanism}"
36
37
 
37
38
  begin
38
- msg = first_message
39
- @logger.debug "Sending first client SASL SCRAM message: #{msg}"
40
- encoder.write_bytes(msg)
39
+ @semaphore.synchronize do
40
+ msg = first_message
41
+ @logger.debug "Sending first client SASL SCRAM message: #{msg}"
42
+ encoder.write_bytes(msg)
41
43
 
42
- @server_first_message = decoder.bytes
43
- @logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
44
+ @server_first_message = decoder.bytes
45
+ @logger.debug "Received first server SASL SCRAM message: #{@server_first_message}"
44
46
 
45
- msg = final_message
46
- @logger.debug "Sending final client SASL SCRAM message: #{msg}"
47
- encoder.write_bytes(msg)
47
+ msg = final_message
48
+ @logger.debug "Sending final client SASL SCRAM message: #{msg}"
49
+ encoder.write_bytes(msg)
48
50
 
49
- response = parse_response(decoder.bytes)
50
- @logger.debug "Received last server SASL SCRAM message: #{response}"
51
+ response = parse_response(decoder.bytes)
52
+ @logger.debug "Received last server SASL SCRAM message: #{response}"
51
53
 
52
- raise FailedScramAuthentication, response['e'] if response['e']
53
- raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
54
+ raise FailedScramAuthentication, response['e'] if response['e']
55
+ raise FailedScramAuthentication, "Invalid server signature" if response['v'] != server_signature
56
+ end
54
57
  rescue EOFError => e
55
58
  raise FailedScramAuthentication, e.message
56
59
  end
@@ -3,13 +3,15 @@
3
3
  require 'kafka/sasl/plain'
4
4
  require 'kafka/sasl/gssapi'
5
5
  require 'kafka/sasl/scram'
6
+ require 'kafka/sasl/oauth'
6
7
 
7
8
  module Kafka
8
9
  class SaslAuthenticator
9
10
  def initialize(logger:, sasl_gssapi_principal:, sasl_gssapi_keytab:,
10
11
  sasl_plain_authzid:, sasl_plain_username:, sasl_plain_password:,
11
- sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:)
12
- @logger = logger
12
+ sasl_scram_username:, sasl_scram_password:, sasl_scram_mechanism:,
13
+ sasl_oauth_token_provider:)
14
+ @logger = TaggedLogger.new(logger)
13
15
 
14
16
  @plain = Sasl::Plain.new(
15
17
  authzid: sasl_plain_authzid,
@@ -31,7 +33,12 @@ module Kafka
31
33
  logger: @logger,
32
34
  )
33
35
 
34
- @mechanism = [@gssapi, @plain, @scram].find(&:configured?)
36
+ @oauth = Sasl::OAuth.new(
37
+ token_provider: sasl_oauth_token_provider,
38
+ logger: @logger,
39
+ )
40
+
41
+ @mechanism = [@gssapi, @plain, @scram, @oauth].find(&:configured?)
35
42
  end
36
43
 
37
44
  def enabled?
@@ -6,6 +6,10 @@ module Kafka
6
6
  2
7
7
  end
8
8
 
9
+ def produce_api_min_version
10
+ 0
11
+ end
12
+
9
13
  def load
10
14
  require "snappy"
11
15
  rescue LoadError
@@ -6,7 +6,7 @@ module Kafka
6
6
  module SslContext
7
7
  CLIENT_CERT_DELIMITER = "\n-----END CERTIFICATE-----\n"
8
8
 
9
- def self.build(ca_cert_file_path: nil, ca_cert: nil, client_cert: nil, client_cert_key: nil, client_cert_key_password: nil, client_cert_chain: nil, ca_certs_from_system: nil)
9
+ def self.build(ca_cert_file_path: nil, ca_cert: nil, client_cert: nil, client_cert_key: nil, client_cert_key_password: nil, client_cert_chain: nil, ca_certs_from_system: nil, verify_hostname: true)
10
10
  return nil unless ca_cert_file_path || ca_cert || client_cert || client_cert_key || client_cert_key_password || client_cert_chain || ca_certs_from_system
11
11
 
12
12
  ssl_context = OpenSSL::SSL::SSLContext.new
@@ -56,6 +56,10 @@ module Kafka
56
56
  ssl_context.cert_store = store
57
57
  end
58
58
 
59
+ ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER
60
+ # Verify certificate hostname if supported (ruby >= 2.4.0)
61
+ ssl_context.verify_hostname = verify_hostname if ssl_context.respond_to?(:verify_hostname=)
62
+
59
63
  ssl_context
60
64
  end
61
65
  end
@@ -57,6 +57,7 @@ module Kafka
57
57
 
58
58
  # once that's connected, we can start initiating the ssl socket
59
59
  @ssl_socket = OpenSSL::SSL::SSLSocket.new(@tcp_socket, ssl_context)
60
+ @ssl_socket.hostname = host
60
61
 
61
62
  begin
62
63
  # Initiate the socket connection in the background. If it doesn't fail
data/lib/kafka/statsd.rb CHANGED
@@ -107,7 +107,6 @@ module Kafka
107
107
  end
108
108
 
109
109
  def process_batch(event)
110
- lag = event.payload.fetch(:offset_lag)
111
110
  messages = event.payload.fetch(:message_count)
112
111
  client = event.payload.fetch(:client_id)
113
112
  group_id = event.payload.fetch(:group_id)
@@ -120,7 +119,17 @@ module Kafka
120
119
  timing("consumer.#{client}.#{group_id}.#{topic}.#{partition}.process_batch.latency", event.duration)
121
120
  count("consumer.#{client}.#{group_id}.#{topic}.#{partition}.messages", messages)
122
121
  end
122
+ end
123
+
124
+ def fetch_batch(event)
125
+ lag = event.payload.fetch(:offset_lag)
126
+ batch_size = event.payload.fetch(:message_count)
127
+ client = event.payload.fetch(:client_id)
128
+ group_id = event.payload.fetch(:group_id)
129
+ topic = event.payload.fetch(:topic)
130
+ partition = event.payload.fetch(:partition)
123
131
 
132
+ count("consumer.#{client}.#{group_id}.#{topic}.#{partition}.batch_size", batch_size)
124
133
  gauge("consumer.#{client}.#{group_id}.#{topic}.#{partition}.lag", lag)
125
134
  end
126
135
 
@@ -0,0 +1,77 @@
1
+ # Basic implementation of a tagged logger that matches the API of
2
+ # ActiveSupport::TaggedLogging.
3
+
4
+ require 'delegate'
5
+ require 'logger'
6
+
7
+ module Kafka
8
+ class TaggedLogger < SimpleDelegator
9
+
10
+ %i(debug info warn error).each do |method|
11
+ define_method method do |msg_or_progname, &block|
12
+ if block_given?
13
+ super(msg_or_progname, &block)
14
+ else
15
+ super("#{tags_text}#{msg_or_progname}")
16
+ end
17
+ end
18
+ end
19
+
20
+ def tagged(*tags)
21
+ new_tags = push_tags(*tags)
22
+ yield self
23
+ ensure
24
+ pop_tags(new_tags.size)
25
+ end
26
+
27
+ def push_tags(*tags)
28
+ tags.flatten.reject { |t| t.nil? || t.empty? }.tap do |new_tags|
29
+ current_tags.concat new_tags
30
+ end
31
+ end
32
+
33
+ def pop_tags(size = 1)
34
+ current_tags.pop size
35
+ end
36
+
37
+ def clear_tags!
38
+ current_tags.clear
39
+ end
40
+
41
+ def current_tags
42
+ # We use our object ID here to avoid conflicting with other instances
43
+ thread_key = @thread_key ||= "kafka_tagged_logging_tags:#{object_id}".freeze
44
+ Thread.current[thread_key] ||= []
45
+ end
46
+
47
+ def tags_text
48
+ tags = current_tags
49
+ if tags.any?
50
+ tags.collect { |tag| "[#{tag}] " }.join
51
+ end
52
+ end
53
+
54
+ def self.new(logger_or_stream = nil)
55
+ # don't keep wrapping the same logger over and over again
56
+ return logger_or_stream if logger_or_stream.is_a?(TaggedLogger)
57
+ super
58
+ end
59
+
60
+ def initialize(logger_or_stream = nil)
61
+ logger = if %w(info debug warn error).all? { |s| logger_or_stream.respond_to?(s) }
62
+ logger_or_stream
63
+ elsif logger_or_stream
64
+ ::Logger.new(logger_or_stream)
65
+ else
66
+ ::Logger.new(nil)
67
+ end
68
+ super(logger)
69
+ end
70
+
71
+ def flush
72
+ clear_tags!
73
+ super if defined?(super)
74
+ end
75
+ end
76
+
77
+ end
@@ -19,7 +19,7 @@ module Kafka
19
19
  transactional_timeout: DEFAULT_TRANSACTION_TIMEOUT
20
20
  )
21
21
  @cluster = cluster
22
- @logger = logger
22
+ @logger = TaggedLogger.new(logger)
23
23
 
24
24
  @transactional = transactional
25
25
  @transactional_id = transactional_id
@@ -217,6 +217,31 @@ module Kafka
217
217
  raise
218
218
  end
219
219
 
220
+ def send_offsets_to_txn(offsets:, group_id:)
221
+ force_transactional!
222
+
223
+ unless @transaction_state.in_transaction?
224
+ raise 'Transaction is not valid to send offsets'
225
+ end
226
+
227
+ add_response = transaction_coordinator.add_offsets_to_txn(
228
+ transactional_id: @transactional_id,
229
+ producer_id: @producer_id,
230
+ producer_epoch: @producer_epoch,
231
+ group_id: group_id
232
+ )
233
+ Protocol.handle_error(add_response.error_code)
234
+
235
+ send_response = transaction_coordinator.txn_offset_commit(
236
+ transactional_id: @transactional_id,
237
+ group_id: group_id,
238
+ producer_id: @producer_id,
239
+ producer_epoch: @producer_epoch,
240
+ offsets: offsets
241
+ )
242
+ Protocol.handle_error(send_response.error_code)
243
+ end
244
+
220
245
  def in_transaction?
221
246
  @transaction_state.in_transaction?
222
247
  end
@@ -27,7 +27,7 @@ module Kafka
27
27
  def initialize(logger:)
28
28
  @state = UNINITIALIZED
29
29
  @mutex = Mutex.new
30
- @logger = logger
30
+ @logger = TaggedLogger.new(logger)
31
31
  end
32
32
 
33
33
  def transition_to!(next_state)
data/lib/kafka/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Kafka
4
- VERSION = "0.7.4"
4
+ VERSION = "1.1.0"
5
5
  end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ class ZstdCodec
5
+ def codec_id
6
+ 4
7
+ end
8
+
9
+ def produce_api_min_version
10
+ 7
11
+ end
12
+
13
+ def load
14
+ require "zstd-ruby"
15
+ rescue LoadError
16
+ raise LoadError, "using zstd compression requires adding a dependency on the `zstd-ruby` gem to your Gemfile."
17
+ end
18
+
19
+ def compress(data)
20
+ Zstd.compress(data)
21
+ end
22
+
23
+ def decompress(data)
24
+ Zstd.decompress(data)
25
+ end
26
+ end
27
+ end
data/lib/kafka.rb CHANGED
@@ -351,6 +351,10 @@ module Kafka
351
351
  class FailedScramAuthentication < SaslScramError
352
352
  end
353
353
 
354
+ # The Token Provider object used for SASL OAuthBearer does not implement the method `token`
355
+ class TokenMethodNotImplementedError < Error
356
+ end
357
+
354
358
  # Initializes a new Kafka client.
355
359
  #
356
360
  # @see Client#initialize
data/ruby-kafka.gemspec CHANGED
@@ -18,7 +18,7 @@ Gem::Specification.new do |spec|
18
18
  DESC
19
19
 
20
20
  spec.homepage = "https://github.com/zendesk/ruby-kafka"
21
- spec.license = "Apache License Version 2.0"
21
+ spec.license = "Apache-2.0"
22
22
 
23
23
  spec.required_ruby_version = '>= 2.1.0'
24
24
 
@@ -36,13 +36,15 @@ Gem::Specification.new do |spec|
36
36
  spec.add_development_dependency "dotenv"
37
37
  spec.add_development_dependency "docker-api"
38
38
  spec.add_development_dependency "rspec-benchmark"
39
- spec.add_development_dependency "activesupport"
39
+ spec.add_development_dependency "activesupport", ">= 4.0", "< 6.1"
40
40
  spec.add_development_dependency "snappy"
41
41
  spec.add_development_dependency "extlz4"
42
+ spec.add_development_dependency "zstd-ruby"
42
43
  spec.add_development_dependency "colored"
43
44
  spec.add_development_dependency "rspec_junit_formatter", "0.2.2"
44
- spec.add_development_dependency "dogstatsd-ruby", ">= 3.0.0", "< 5.0.0"
45
+ spec.add_development_dependency "dogstatsd-ruby", ">= 4.0.0", "< 5.0.0"
45
46
  spec.add_development_dependency "statsd-ruby"
47
+ spec.add_development_dependency "prometheus-client", "~> 0.10.0"
46
48
  spec.add_development_dependency "ruby-prof"
47
49
  spec.add_development_dependency "timecop"
48
50
  spec.add_development_dependency "rubocop", "~> 0.49.1"