ruby-kafka-aws-iam 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +393 -0
- data/.github/workflows/stale.yml +19 -0
- data/.gitignore +13 -0
- data/.readygo +1 -0
- data/.rspec +3 -0
- data/.rubocop.yml +44 -0
- data/.ruby-version +1 -0
- data/.yardopts +3 -0
- data/CHANGELOG.md +314 -0
- data/Gemfile +5 -0
- data/ISSUE_TEMPLATE.md +23 -0
- data/LICENSE.txt +176 -0
- data/Procfile +2 -0
- data/README.md +1356 -0
- data/Rakefile +8 -0
- data/benchmarks/message_encoding.rb +23 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/docker-compose.yml +39 -0
- data/examples/consumer-group.rb +35 -0
- data/examples/firehose-consumer.rb +64 -0
- data/examples/firehose-producer.rb +54 -0
- data/examples/simple-consumer.rb +34 -0
- data/examples/simple-producer.rb +42 -0
- data/examples/ssl-producer.rb +44 -0
- data/lib/kafka/async_producer.rb +297 -0
- data/lib/kafka/broker.rb +217 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +838 -0
- data/lib/kafka/cluster.rb +513 -0
- data/lib/kafka/compression.rb +45 -0
- data/lib/kafka/compressor.rb +86 -0
- data/lib/kafka/connection.rb +228 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +642 -0
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/consumer_group.rb +231 -0
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +420 -0
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +58 -0
- data/lib/kafka/fetched_batch_generator.rb +120 -0
- data/lib/kafka/fetched_message.rb +48 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +224 -0
- data/lib/kafka/gzip_codec.rb +34 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/lz4_codec.rb +27 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +259 -0
- data/lib/kafka/partitioner.rb +40 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +528 -0
- data/lib/kafka/prometheus.rb +316 -0
- data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +47 -0
- data/lib/kafka/protocol/join_group_response.rb +41 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +94 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +223 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +26 -0
- data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
- data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
- data/lib/kafka/protocol.rb +225 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
- data/lib/kafka/sasl/awsmskiam.rb +128 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +180 -0
- data/lib/kafka/sasl_authenticator.rb +73 -0
- data/lib/kafka/snappy_codec.rb +29 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +192 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +77 -0
- data/lib/kafka/transaction_manager.rb +306 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- data/lib/kafka/zstd_codec.rb +27 -0
- data/lib/kafka.rb +373 -0
- data/lib/ruby-kafka.rb +5 -0
- data/ruby-kafka.gemspec +54 -0
- metadata +520 -0
@@ -0,0 +1,184 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "stringio"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module Protocol
|
7
|
+
# An encoder wraps an IO object, making it easy to write specific data types
|
8
|
+
# to it.
|
9
|
+
class Encoder
|
10
|
+
# Initializes a new encoder.
|
11
|
+
#
|
12
|
+
# @param io [IO] an object that acts as an IO.
|
13
|
+
def initialize(io)
|
14
|
+
@io = io
|
15
|
+
@io.set_encoding(Encoding::BINARY)
|
16
|
+
end
|
17
|
+
|
18
|
+
# Writes bytes directly to the IO object.
|
19
|
+
#
|
20
|
+
# @param bytes [String]
|
21
|
+
# @return [nil]
|
22
|
+
def write(bytes)
|
23
|
+
@io.write(bytes)
|
24
|
+
|
25
|
+
nil
|
26
|
+
end
|
27
|
+
|
28
|
+
# Writes an 8-bit boolean to the IO object.
|
29
|
+
#
|
30
|
+
# @param boolean [Boolean]
|
31
|
+
# @return [nil]
|
32
|
+
def write_boolean(boolean)
|
33
|
+
boolean ? write_int8(1) : write_int8(0)
|
34
|
+
end
|
35
|
+
|
36
|
+
# Writes an 8-bit integer to the IO object.
|
37
|
+
#
|
38
|
+
# @param int [Integer]
|
39
|
+
# @return [nil]
|
40
|
+
def write_int8(int)
|
41
|
+
write([int].pack("C"))
|
42
|
+
end
|
43
|
+
|
44
|
+
# Writes a 16-bit integer to the IO object.
|
45
|
+
#
|
46
|
+
# @param int [Integer]
|
47
|
+
# @return [nil]
|
48
|
+
def write_int16(int)
|
49
|
+
write([int].pack("s>"))
|
50
|
+
end
|
51
|
+
|
52
|
+
# Writes a 32-bit integer to the IO object.
|
53
|
+
#
|
54
|
+
# @param int [Integer]
|
55
|
+
# @return [nil]
|
56
|
+
def write_int32(int)
|
57
|
+
write([int].pack("l>"))
|
58
|
+
end
|
59
|
+
|
60
|
+
# Writes a 64-bit integer to the IO object.
|
61
|
+
#
|
62
|
+
# @param int [Integer]
|
63
|
+
# @return [nil]
|
64
|
+
def write_int64(int)
|
65
|
+
write([int].pack("q>"))
|
66
|
+
end
|
67
|
+
|
68
|
+
# Writes an array to the IO object.
|
69
|
+
#
|
70
|
+
# Each item in the specified array will be yielded to the provided block;
|
71
|
+
# it's the responsibility of the block to write those items using the
|
72
|
+
# encoder.
|
73
|
+
#
|
74
|
+
# @param array [Array]
|
75
|
+
# @return [nil]
|
76
|
+
def write_array(array, &block)
|
77
|
+
if array.nil?
|
78
|
+
# An array can be null, which is different from it being empty.
|
79
|
+
write_int32(-1)
|
80
|
+
else
|
81
|
+
write_int32(array.size)
|
82
|
+
array.each(&block)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
# Writes an array to the IO object.
|
87
|
+
# Just like #write_array, unless the size is under varint format
|
88
|
+
#
|
89
|
+
# @param array [Array]
|
90
|
+
# @return [nil]
|
91
|
+
def write_varint_array(array, &block)
|
92
|
+
if array.nil?
|
93
|
+
write_varint(-1)
|
94
|
+
else
|
95
|
+
write_varint(array.size)
|
96
|
+
array.each(&block)
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
# Writes a string to the IO object.
|
101
|
+
#
|
102
|
+
# @param string [String]
|
103
|
+
# @return [nil]
|
104
|
+
def write_string(string)
|
105
|
+
if string.nil?
|
106
|
+
write_int16(-1)
|
107
|
+
else
|
108
|
+
write_int16(string.bytesize)
|
109
|
+
write(string)
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
# Writes a string to the IO object, the size is under varint format
|
114
|
+
#
|
115
|
+
# @param string [String]
|
116
|
+
# @return [nil]
|
117
|
+
def write_varint_string(string)
|
118
|
+
if string.nil?
|
119
|
+
write_varint(-1)
|
120
|
+
else
|
121
|
+
write_varint(string.bytesize)
|
122
|
+
write(string)
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
# Writes an integer under varints serializing to the IO object.
|
127
|
+
# https://developers.google.com/protocol-buffers/docs/encoding#varints
|
128
|
+
#
|
129
|
+
# @param int [Integer]
|
130
|
+
# @return [nil]
|
131
|
+
def write_varint(int)
|
132
|
+
int = int << 1
|
133
|
+
int = ~int | 1 if int < 0
|
134
|
+
|
135
|
+
chunks = []
|
136
|
+
while int >> 7 != 0
|
137
|
+
chunks << (int & 0x7f | 0x80)
|
138
|
+
int >>= 7
|
139
|
+
end
|
140
|
+
chunks << int
|
141
|
+
write(chunks.pack("C*"))
|
142
|
+
end
|
143
|
+
|
144
|
+
# Writes a byte string to the IO object.
|
145
|
+
#
|
146
|
+
# @param bytes [String]
|
147
|
+
# @return [nil]
|
148
|
+
def write_bytes(bytes)
|
149
|
+
if bytes.nil?
|
150
|
+
write_int32(-1)
|
151
|
+
else
|
152
|
+
write_int32(bytes.bytesize)
|
153
|
+
write(bytes)
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
# Writes a byte string to the IO object, the size is under varint format
|
158
|
+
#
|
159
|
+
# @param bytes [String]
|
160
|
+
# @return [nil]
|
161
|
+
def write_varint_bytes(bytes)
|
162
|
+
if bytes.nil?
|
163
|
+
write_varint(-1)
|
164
|
+
else
|
165
|
+
write_varint(bytes.bytesize)
|
166
|
+
write(bytes)
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
# Encodes an object into a new buffer.
|
171
|
+
#
|
172
|
+
# @param object [#encode] the object that will encode itself.
|
173
|
+
# @return [String] the encoded data.
|
174
|
+
def self.encode_with(object)
|
175
|
+
buffer = StringIO.new
|
176
|
+
encoder = new(buffer)
|
177
|
+
|
178
|
+
object.encode(encoder)
|
179
|
+
|
180
|
+
buffer.string
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class EndTxnRequest
|
6
|
+
def initialize(transactional_id:, producer_id:, producer_epoch:, transaction_result:)
|
7
|
+
@transactional_id = transactional_id
|
8
|
+
@producer_id = producer_id
|
9
|
+
@producer_epoch = producer_epoch
|
10
|
+
@transaction_result = transaction_result
|
11
|
+
end
|
12
|
+
|
13
|
+
def api_key
|
14
|
+
END_TXN_API
|
15
|
+
end
|
16
|
+
|
17
|
+
def response_class
|
18
|
+
EndTxnResposne
|
19
|
+
end
|
20
|
+
|
21
|
+
def encode(encoder)
|
22
|
+
encoder.write_string(@transactional_id)
|
23
|
+
encoder.write_int64(@producer_id)
|
24
|
+
encoder.write_int16(@producer_epoch)
|
25
|
+
encoder.write_boolean(@transaction_result)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class EndTxnResposne
|
6
|
+
attr_reader :error_code
|
7
|
+
|
8
|
+
def initialize(error_code:)
|
9
|
+
@error_code = error_code
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.decode(decoder)
|
13
|
+
_throttle_time_ms = decoder.int32
|
14
|
+
error_code = decoder.int16
|
15
|
+
new(error_code: error_code)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
|
6
|
+
# A request to fetch messages from a given partition.
|
7
|
+
#
|
8
|
+
# ## API Specification
|
9
|
+
#
|
10
|
+
# FetchRequest => ReplicaId MaxWaitTime MinBytes MaxBytes IsolationLevel [TopicName [Partition FetchOffset MaxBytes]]
|
11
|
+
# ReplicaId => int32
|
12
|
+
# MaxWaitTime => int32
|
13
|
+
# MinBytes => int32
|
14
|
+
# MaxBytes => int32
|
15
|
+
# IsolationLevel => int8
|
16
|
+
# TopicName => string
|
17
|
+
# Partition => int32
|
18
|
+
# FetchOffset => int64
|
19
|
+
# MaxBytes => int32
|
20
|
+
#
|
21
|
+
class FetchRequest
|
22
|
+
ISOLATION_READ_UNCOMMITTED = 0
|
23
|
+
ISOLATION_READ_COMMITTED = 1
|
24
|
+
|
25
|
+
# @param max_wait_time [Integer]
|
26
|
+
# @param min_bytes [Integer]
|
27
|
+
# @param topics [Hash]
|
28
|
+
def initialize(max_wait_time:, min_bytes:, max_bytes:, topics:)
|
29
|
+
@replica_id = REPLICA_ID
|
30
|
+
@max_wait_time = max_wait_time
|
31
|
+
@min_bytes = min_bytes
|
32
|
+
@max_bytes = max_bytes
|
33
|
+
@topics = topics
|
34
|
+
end
|
35
|
+
|
36
|
+
def api_key
|
37
|
+
FETCH_API
|
38
|
+
end
|
39
|
+
|
40
|
+
def api_version
|
41
|
+
4
|
42
|
+
end
|
43
|
+
|
44
|
+
def response_class
|
45
|
+
Protocol::FetchResponse
|
46
|
+
end
|
47
|
+
|
48
|
+
def encode(encoder)
|
49
|
+
encoder.write_int32(@replica_id)
|
50
|
+
encoder.write_int32(@max_wait_time)
|
51
|
+
encoder.write_int32(@min_bytes)
|
52
|
+
encoder.write_int32(@max_bytes)
|
53
|
+
encoder.write_int8(ISOLATION_READ_COMMITTED)
|
54
|
+
|
55
|
+
encoder.write_array(@topics) do |topic, partitions|
|
56
|
+
encoder.write_string(topic)
|
57
|
+
|
58
|
+
encoder.write_array(partitions) do |partition, config|
|
59
|
+
fetch_offset = config.fetch(:fetch_offset)
|
60
|
+
max_bytes = config.fetch(:max_bytes)
|
61
|
+
|
62
|
+
encoder.write_int32(partition)
|
63
|
+
encoder.write_int64(fetch_offset)
|
64
|
+
encoder.write_int32(max_bytes)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,136 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/protocol/message_set"
|
4
|
+
require "kafka/protocol/record_batch"
|
5
|
+
|
6
|
+
module Kafka
|
7
|
+
module Protocol
|
8
|
+
|
9
|
+
# A response to a fetch request.
|
10
|
+
#
|
11
|
+
# ## API Specification
|
12
|
+
#
|
13
|
+
# FetchResponse => ThrottleTimeMS [TopicName [Partition ErrorCode HighwaterMarkOffset LastStableOffset [AbortedTransaction] Records]]
|
14
|
+
# ThrottleTimeMS => int32
|
15
|
+
# TopicName => string
|
16
|
+
# Partition => int32
|
17
|
+
# ErrorCode => int16
|
18
|
+
# HighwaterMarkOffset => int64
|
19
|
+
# LastStableOffset => int64
|
20
|
+
# MessageSetSize => int32
|
21
|
+
# AbortedTransaction => [
|
22
|
+
# ProducerId => int64
|
23
|
+
# FirstOffset => int64
|
24
|
+
# ]
|
25
|
+
#
|
26
|
+
class FetchResponse
|
27
|
+
MAGIC_BYTE_OFFSET = 16
|
28
|
+
MAGIC_BYTE_LENGTH = 1
|
29
|
+
|
30
|
+
class FetchedPartition
|
31
|
+
attr_reader :partition, :error_code
|
32
|
+
attr_reader :highwater_mark_offset, :last_stable_offset, :aborted_transactions, :messages
|
33
|
+
|
34
|
+
def initialize(partition:, error_code:, highwater_mark_offset:, last_stable_offset:, aborted_transactions:, messages:)
|
35
|
+
@partition = partition
|
36
|
+
@error_code = error_code
|
37
|
+
@highwater_mark_offset = highwater_mark_offset
|
38
|
+
@messages = messages
|
39
|
+
@last_stable_offset = last_stable_offset
|
40
|
+
@aborted_transactions = aborted_transactions
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
class FetchedTopic
|
45
|
+
attr_reader :name, :partitions
|
46
|
+
|
47
|
+
def initialize(name:, partitions:)
|
48
|
+
@name = name
|
49
|
+
@partitions = partitions
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
class AbortedTransaction
|
54
|
+
attr_reader :producer_id, :first_offset
|
55
|
+
|
56
|
+
def initialize(producer_id:, first_offset:)
|
57
|
+
@producer_id = producer_id
|
58
|
+
@first_offset = first_offset
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
attr_reader :topics
|
63
|
+
|
64
|
+
def initialize(topics: [], throttle_time_ms: 0)
|
65
|
+
@topics = topics
|
66
|
+
@throttle_time_ms = throttle_time_ms
|
67
|
+
end
|
68
|
+
|
69
|
+
def self.decode(decoder)
|
70
|
+
throttle_time_ms = decoder.int32
|
71
|
+
|
72
|
+
topics = decoder.array do
|
73
|
+
topic_name = decoder.string
|
74
|
+
|
75
|
+
partitions = decoder.array do
|
76
|
+
partition = decoder.int32
|
77
|
+
error_code = decoder.int16
|
78
|
+
highwater_mark_offset = decoder.int64
|
79
|
+
last_stable_offset = decoder.int64
|
80
|
+
|
81
|
+
aborted_transactions = decoder.array do
|
82
|
+
producer_id = decoder.int64
|
83
|
+
first_offset = decoder.int64
|
84
|
+
AbortedTransaction.new(
|
85
|
+
producer_id: producer_id,
|
86
|
+
first_offset: first_offset
|
87
|
+
)
|
88
|
+
end
|
89
|
+
|
90
|
+
messages_raw = decoder.bytes
|
91
|
+
messages = []
|
92
|
+
|
93
|
+
if !messages_raw.nil? && !messages_raw.empty?
|
94
|
+
messages_decoder = Decoder.from_string(messages_raw)
|
95
|
+
|
96
|
+
magic_byte = messages_decoder.peek(MAGIC_BYTE_OFFSET, MAGIC_BYTE_LENGTH)[0].to_i
|
97
|
+
if magic_byte == RecordBatch::MAGIC_BYTE
|
98
|
+
until messages_decoder.eof?
|
99
|
+
begin
|
100
|
+
record_batch = RecordBatch.decode(messages_decoder)
|
101
|
+
messages << record_batch
|
102
|
+
rescue InsufficientDataMessage
|
103
|
+
if messages.length > 0
|
104
|
+
break
|
105
|
+
else
|
106
|
+
raise
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
else
|
111
|
+
message_set = MessageSet.decode(messages_decoder)
|
112
|
+
messages << message_set
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
FetchedPartition.new(
|
117
|
+
partition: partition,
|
118
|
+
error_code: error_code,
|
119
|
+
highwater_mark_offset: highwater_mark_offset,
|
120
|
+
last_stable_offset: last_stable_offset,
|
121
|
+
aborted_transactions: aborted_transactions,
|
122
|
+
messages: messages
|
123
|
+
)
|
124
|
+
end
|
125
|
+
|
126
|
+
FetchedTopic.new(
|
127
|
+
name: topic_name,
|
128
|
+
partitions: partitions,
|
129
|
+
)
|
130
|
+
end
|
131
|
+
|
132
|
+
new(topics: topics, throttle_time_ms: throttle_time_ms)
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class FindCoordinatorRequest
|
6
|
+
def initialize(coordinator_key:, coordinator_type:)
|
7
|
+
@coordinator_key = coordinator_key
|
8
|
+
@coordinator_type = coordinator_type
|
9
|
+
end
|
10
|
+
|
11
|
+
def api_key
|
12
|
+
FIND_COORDINATOR_API
|
13
|
+
end
|
14
|
+
|
15
|
+
def api_version
|
16
|
+
1
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(encoder)
|
20
|
+
encoder.write_string(@coordinator_key)
|
21
|
+
encoder.write_int8(@coordinator_type)
|
22
|
+
end
|
23
|
+
|
24
|
+
def response_class
|
25
|
+
FindCoordinatorResponse
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class FindCoordinatorResponse
|
6
|
+
attr_reader :error_code, :error_message
|
7
|
+
|
8
|
+
attr_reader :coordinator_id, :coordinator_host, :coordinator_port
|
9
|
+
|
10
|
+
def initialize(error_code:, error_message:, coordinator_id:, coordinator_host:, coordinator_port:)
|
11
|
+
@error_code = error_code
|
12
|
+
@coordinator_id = coordinator_id
|
13
|
+
@coordinator_host = coordinator_host
|
14
|
+
@coordinator_port = coordinator_port
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.decode(decoder)
|
18
|
+
_throttle_time_ms = decoder.int32
|
19
|
+
new(
|
20
|
+
error_code: decoder.int16,
|
21
|
+
error_message: decoder.string,
|
22
|
+
coordinator_id: decoder.int32,
|
23
|
+
coordinator_host: decoder.string,
|
24
|
+
coordinator_port: decoder.int32,
|
25
|
+
)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class HeartbeatRequest
|
6
|
+
def initialize(group_id:, generation_id:, member_id:)
|
7
|
+
@group_id = group_id
|
8
|
+
@generation_id = generation_id
|
9
|
+
@member_id = member_id
|
10
|
+
end
|
11
|
+
|
12
|
+
def api_key
|
13
|
+
HEARTBEAT_API
|
14
|
+
end
|
15
|
+
|
16
|
+
def response_class
|
17
|
+
HeartbeatResponse
|
18
|
+
end
|
19
|
+
|
20
|
+
def encode(encoder)
|
21
|
+
encoder.write_string(@group_id)
|
22
|
+
encoder.write_int32(@generation_id)
|
23
|
+
encoder.write_string(@member_id)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class HeartbeatResponse
|
6
|
+
attr_reader :error_code
|
7
|
+
|
8
|
+
def initialize(error_code:)
|
9
|
+
@error_code = error_code
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.decode(decoder)
|
13
|
+
new(error_code: decoder.int16)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class InitProducerIDRequest
|
6
|
+
def initialize(transactional_id: nil, transactional_timeout:)
|
7
|
+
@transactional_id = transactional_id
|
8
|
+
@transactional_timeout = transactional_timeout
|
9
|
+
end
|
10
|
+
|
11
|
+
def api_key
|
12
|
+
INIT_PRODUCER_ID_API
|
13
|
+
end
|
14
|
+
|
15
|
+
def response_class
|
16
|
+
InitProducerIDResponse
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(encoder)
|
20
|
+
encoder.write_string(@transactional_id)
|
21
|
+
# Timeout is in ms unit
|
22
|
+
encoder.write_int32(@transactional_timeout * 1000)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class InitProducerIDResponse
|
6
|
+
attr_reader :error_code, :producer_id, :producer_epoch
|
7
|
+
|
8
|
+
def initialize(error_code:, producer_id:, producer_epoch:)
|
9
|
+
@error_code = error_code
|
10
|
+
@producer_id = producer_id
|
11
|
+
@producer_epoch = producer_epoch
|
12
|
+
end
|
13
|
+
|
14
|
+
def self.decode(decoder)
|
15
|
+
_throttle_time_ms = decoder.int32
|
16
|
+
error_code = decoder.int16
|
17
|
+
producer_id = decoder.int64
|
18
|
+
producer_epoch = decoder.int16
|
19
|
+
new(
|
20
|
+
error_code: error_code,
|
21
|
+
producer_id: producer_id,
|
22
|
+
producer_epoch: producer_epoch
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/protocol/consumer_group_protocol"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module Protocol
|
7
|
+
class JoinGroupRequest
|
8
|
+
PROTOCOL_TYPE = "consumer"
|
9
|
+
|
10
|
+
def initialize(group_id:, session_timeout:, rebalance_timeout:, member_id:, topics: [], protocol_name:, user_data: nil)
|
11
|
+
@group_id = group_id
|
12
|
+
@session_timeout = session_timeout * 1000 # Kafka wants ms.
|
13
|
+
@rebalance_timeout = rebalance_timeout * 1000 # Kafka wants ms.
|
14
|
+
@member_id = member_id || ""
|
15
|
+
@protocol_type = PROTOCOL_TYPE
|
16
|
+
@group_protocols = {
|
17
|
+
protocol_name => ConsumerGroupProtocol.new(topics: topics, user_data: user_data),
|
18
|
+
}
|
19
|
+
end
|
20
|
+
|
21
|
+
def api_key
|
22
|
+
JOIN_GROUP_API
|
23
|
+
end
|
24
|
+
|
25
|
+
def api_version
|
26
|
+
1
|
27
|
+
end
|
28
|
+
|
29
|
+
def response_class
|
30
|
+
JoinGroupResponse
|
31
|
+
end
|
32
|
+
|
33
|
+
def encode(encoder)
|
34
|
+
encoder.write_string(@group_id)
|
35
|
+
encoder.write_int32(@session_timeout)
|
36
|
+
encoder.write_int32(@rebalance_timeout)
|
37
|
+
encoder.write_string(@member_id)
|
38
|
+
encoder.write_string(@protocol_type)
|
39
|
+
|
40
|
+
encoder.write_array(@group_protocols) do |name, metadata|
|
41
|
+
encoder.write_string(name)
|
42
|
+
encoder.write_bytes(Encoder.encode_with(metadata))
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class JoinGroupResponse
|
6
|
+
Metadata = Struct.new(:version, :topics, :user_data)
|
7
|
+
|
8
|
+
attr_reader :error_code
|
9
|
+
|
10
|
+
attr_reader :generation_id, :group_protocol
|
11
|
+
|
12
|
+
attr_reader :leader_id, :member_id, :members
|
13
|
+
|
14
|
+
def initialize(error_code:, generation_id:, group_protocol:, leader_id:, member_id:, members:)
|
15
|
+
@error_code = error_code
|
16
|
+
@generation_id = generation_id
|
17
|
+
@group_protocol = group_protocol
|
18
|
+
@leader_id = leader_id
|
19
|
+
@member_id = member_id
|
20
|
+
@members = members
|
21
|
+
end
|
22
|
+
|
23
|
+
def self.decode(decoder)
|
24
|
+
new(
|
25
|
+
error_code: decoder.int16,
|
26
|
+
generation_id: decoder.int32,
|
27
|
+
group_protocol: decoder.string,
|
28
|
+
leader_id: decoder.string,
|
29
|
+
member_id: decoder.string,
|
30
|
+
members: Hash[
|
31
|
+
decoder.array do
|
32
|
+
member_id = decoder.string
|
33
|
+
d = Decoder.from_string(decoder.bytes)
|
34
|
+
[member_id, Metadata.new(d.int16, d.array { d.string }, d.bytes)]
|
35
|
+
end
|
36
|
+
],
|
37
|
+
)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|