ruby-kafka-custom 0.7.7.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/kafka/async_producer.rb +279 -0
- data/lib/kafka/broker.rb +205 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +754 -0
- data/lib/kafka/cluster.rb +455 -0
- data/lib/kafka/compression.rb +43 -0
- data/lib/kafka/compressor.rb +85 -0
- data/lib/kafka/connection.rb +220 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +592 -0
- data/lib/kafka/consumer_group.rb +208 -0
- data/lib/kafka/datadog.rb +413 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +54 -0
- data/lib/kafka/fetched_batch_generator.rb +117 -0
- data/lib/kafka/fetched_message.rb +47 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +221 -0
- data/lib/kafka/gzip_codec.rb +30 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/lz4_codec.rb +23 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/offset_manager.rb +248 -0
- data/lib/kafka/partitioner.rb +35 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +504 -0
- data/lib/kafka/protocol.rb +217 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +41 -0
- data/lib/kafka/protocol/join_group_response.rb +33 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +36 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +92 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +222 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +23 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +54 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +177 -0
- data/lib/kafka/sasl_authenticator.rb +61 -0
- data/lib/kafka/snappy_codec.rb +25 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +187 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +72 -0
- data/lib/kafka/transaction_manager.rb +261 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- metadata +461 -0
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class FindCoordinatorRequest
|
6
|
+
def initialize(coordinator_key:, coordinator_type:)
|
7
|
+
@coordinator_key = coordinator_key
|
8
|
+
@coordinator_type = coordinator_type
|
9
|
+
end
|
10
|
+
|
11
|
+
def api_key
|
12
|
+
FIND_COORDINATOR_API
|
13
|
+
end
|
14
|
+
|
15
|
+
def api_version
|
16
|
+
1
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(encoder)
|
20
|
+
encoder.write_string(@coordinator_key)
|
21
|
+
encoder.write_int8(@coordinator_type)
|
22
|
+
end
|
23
|
+
|
24
|
+
def response_class
|
25
|
+
FindCoordinatorResponse
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class FindCoordinatorResponse
|
6
|
+
attr_reader :error_code, :error_message
|
7
|
+
|
8
|
+
attr_reader :coordinator_id, :coordinator_host, :coordinator_port
|
9
|
+
|
10
|
+
def initialize(error_code:, error_message:, coordinator_id:, coordinator_host:, coordinator_port:)
|
11
|
+
@error_code = error_code
|
12
|
+
@coordinator_id = coordinator_id
|
13
|
+
@coordinator_host = coordinator_host
|
14
|
+
@coordinator_port = coordinator_port
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.decode(decoder)
|
18
|
+
_throttle_time_ms = decoder.int32
|
19
|
+
new(
|
20
|
+
error_code: decoder.int16,
|
21
|
+
error_message: decoder.string,
|
22
|
+
coordinator_id: decoder.int32,
|
23
|
+
coordinator_host: decoder.string,
|
24
|
+
coordinator_port: decoder.int32,
|
25
|
+
)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class HeartbeatRequest
|
6
|
+
def initialize(group_id:, generation_id:, member_id:)
|
7
|
+
@group_id = group_id
|
8
|
+
@generation_id = generation_id
|
9
|
+
@member_id = member_id
|
10
|
+
end
|
11
|
+
|
12
|
+
def api_key
|
13
|
+
HEARTBEAT_API
|
14
|
+
end
|
15
|
+
|
16
|
+
def response_class
|
17
|
+
HeartbeatResponse
|
18
|
+
end
|
19
|
+
|
20
|
+
def encode(encoder)
|
21
|
+
encoder.write_string(@group_id)
|
22
|
+
encoder.write_int32(@generation_id)
|
23
|
+
encoder.write_string(@member_id)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class HeartbeatResponse
|
6
|
+
attr_reader :error_code
|
7
|
+
|
8
|
+
def initialize(error_code:)
|
9
|
+
@error_code = error_code
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.decode(decoder)
|
13
|
+
new(error_code: decoder.int16)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class InitProducerIDRequest
|
6
|
+
def initialize(transactional_id: nil, transactional_timeout:)
|
7
|
+
@transactional_id = transactional_id
|
8
|
+
@transactional_timeout = transactional_timeout
|
9
|
+
end
|
10
|
+
|
11
|
+
def api_key
|
12
|
+
INIT_PRODUCER_ID_API
|
13
|
+
end
|
14
|
+
|
15
|
+
def response_class
|
16
|
+
InitProducerIDResponse
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(encoder)
|
20
|
+
encoder.write_string(@transactional_id)
|
21
|
+
# Timeout is in ms unit
|
22
|
+
encoder.write_int32(@transactional_timeout * 1000)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class InitProducerIDResponse
|
6
|
+
attr_reader :error_code, :producer_id, :producer_epoch
|
7
|
+
|
8
|
+
def initialize(error_code:, producer_id:, producer_epoch:)
|
9
|
+
@error_code = error_code
|
10
|
+
@producer_id = producer_id
|
11
|
+
@producer_epoch = producer_epoch
|
12
|
+
end
|
13
|
+
|
14
|
+
def self.decode(decoder)
|
15
|
+
_throttle_time_ms = decoder.int32
|
16
|
+
error_code = decoder.int16
|
17
|
+
producer_id = decoder.int64
|
18
|
+
producer_epoch = decoder.int16
|
19
|
+
new(
|
20
|
+
error_code: error_code,
|
21
|
+
producer_id: producer_id,
|
22
|
+
producer_epoch: producer_epoch
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "kafka/protocol/consumer_group_protocol"
|
4
|
+
|
5
|
+
module Kafka
|
6
|
+
module Protocol
|
7
|
+
class JoinGroupRequest
|
8
|
+
PROTOCOL_TYPE = "consumer"
|
9
|
+
|
10
|
+
def initialize(group_id:, session_timeout:, member_id:, topics: [])
|
11
|
+
@group_id = group_id
|
12
|
+
@session_timeout = session_timeout * 1000 # Kafka wants ms.
|
13
|
+
@member_id = member_id || ""
|
14
|
+
@protocol_type = PROTOCOL_TYPE
|
15
|
+
@group_protocols = {
|
16
|
+
"standard" => ConsumerGroupProtocol.new(topics: ["test-messages"]),
|
17
|
+
}
|
18
|
+
end
|
19
|
+
|
20
|
+
def api_key
|
21
|
+
JOIN_GROUP_API
|
22
|
+
end
|
23
|
+
|
24
|
+
def response_class
|
25
|
+
JoinGroupResponse
|
26
|
+
end
|
27
|
+
|
28
|
+
def encode(encoder)
|
29
|
+
encoder.write_string(@group_id)
|
30
|
+
encoder.write_int32(@session_timeout)
|
31
|
+
encoder.write_string(@member_id)
|
32
|
+
encoder.write_string(@protocol_type)
|
33
|
+
|
34
|
+
encoder.write_array(@group_protocols) do |name, metadata|
|
35
|
+
encoder.write_string(name)
|
36
|
+
encoder.write_bytes(Encoder.encode_with(metadata))
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class JoinGroupResponse
|
6
|
+
attr_reader :error_code
|
7
|
+
|
8
|
+
attr_reader :generation_id, :group_protocol
|
9
|
+
|
10
|
+
attr_reader :leader_id, :member_id, :members
|
11
|
+
|
12
|
+
def initialize(error_code:, generation_id:, group_protocol:, leader_id:, member_id:, members:)
|
13
|
+
@error_code = error_code
|
14
|
+
@generation_id = generation_id
|
15
|
+
@group_protocol = group_protocol
|
16
|
+
@leader_id = leader_id
|
17
|
+
@member_id = member_id
|
18
|
+
@members = members
|
19
|
+
end
|
20
|
+
|
21
|
+
def self.decode(decoder)
|
22
|
+
new(
|
23
|
+
error_code: decoder.int16,
|
24
|
+
generation_id: decoder.int32,
|
25
|
+
group_protocol: decoder.string,
|
26
|
+
leader_id: decoder.string,
|
27
|
+
member_id: decoder.string,
|
28
|
+
members: Hash[decoder.array { [decoder.string, decoder.bytes] }],
|
29
|
+
)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class LeaveGroupRequest
|
6
|
+
def initialize(group_id:, member_id:)
|
7
|
+
@group_id = group_id
|
8
|
+
@member_id = member_id
|
9
|
+
end
|
10
|
+
|
11
|
+
def api_key
|
12
|
+
LEAVE_GROUP_API
|
13
|
+
end
|
14
|
+
|
15
|
+
def response_class
|
16
|
+
LeaveGroupResponse
|
17
|
+
end
|
18
|
+
|
19
|
+
def encode(encoder)
|
20
|
+
encoder.write_string(@group_id)
|
21
|
+
encoder.write_string(@member_id)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class LeaveGroupResponse
|
6
|
+
attr_reader :error_code
|
7
|
+
|
8
|
+
def initialize(error_code:)
|
9
|
+
@error_code = error_code
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.decode(decoder)
|
13
|
+
new(error_code: decoder.int16)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class ListGroupsRequest
|
6
|
+
def api_key
|
7
|
+
LIST_GROUPS_API
|
8
|
+
end
|
9
|
+
|
10
|
+
def api_version
|
11
|
+
0
|
12
|
+
end
|
13
|
+
|
14
|
+
def response_class
|
15
|
+
Protocol::ListGroupsResponse
|
16
|
+
end
|
17
|
+
|
18
|
+
def encode(encoder)
|
19
|
+
# noop
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
class ListGroupsResponse
|
6
|
+
class GroupEntry
|
7
|
+
attr_reader :group_id, :protocol_type
|
8
|
+
|
9
|
+
def initialize(group_id:, protocol_type:)
|
10
|
+
@group_id = group_id
|
11
|
+
@protocol_type = protocol_type
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
attr_reader :error_code, :groups
|
16
|
+
|
17
|
+
def initialize(error_code:, groups:)
|
18
|
+
@error_code = error_code
|
19
|
+
@groups = groups
|
20
|
+
end
|
21
|
+
|
22
|
+
def self.decode(decoder)
|
23
|
+
error_code = decoder.int16
|
24
|
+
groups = decoder.array do
|
25
|
+
GroupEntry.new(
|
26
|
+
group_id: decoder.string,
|
27
|
+
protocol_type: decoder.string
|
28
|
+
)
|
29
|
+
end
|
30
|
+
|
31
|
+
new(error_code: error_code, groups: groups)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
# A request to list the available offsets for a set of topics/partitions.
|
6
|
+
#
|
7
|
+
# ## API Specification
|
8
|
+
#
|
9
|
+
# OffsetRequest => ReplicaId [TopicName [Partition Time MaxNumberOfOffsets]]
|
10
|
+
# ReplicaId => int32
|
11
|
+
# IsolationLevel => int8
|
12
|
+
# TopicName => string
|
13
|
+
# Partition => int32
|
14
|
+
# Time => int64
|
15
|
+
#
|
16
|
+
class ListOffsetRequest
|
17
|
+
ISOLATION_READ_UNCOMMITTED = 0
|
18
|
+
ISOLATION_READ_COMMITTED = 1
|
19
|
+
|
20
|
+
# @param topics [Hash]
|
21
|
+
def initialize(topics:)
|
22
|
+
@replica_id = REPLICA_ID
|
23
|
+
@topics = topics
|
24
|
+
end
|
25
|
+
|
26
|
+
def api_version
|
27
|
+
2
|
28
|
+
end
|
29
|
+
|
30
|
+
def api_key
|
31
|
+
LIST_OFFSET_API
|
32
|
+
end
|
33
|
+
|
34
|
+
def response_class
|
35
|
+
Protocol::ListOffsetResponse
|
36
|
+
end
|
37
|
+
|
38
|
+
def encode(encoder)
|
39
|
+
encoder.write_int32(@replica_id)
|
40
|
+
encoder.write_int8(ISOLATION_READ_COMMITTED)
|
41
|
+
|
42
|
+
encoder.write_array(@topics) do |topic, partitions|
|
43
|
+
encoder.write_string(topic)
|
44
|
+
|
45
|
+
encoder.write_array(partitions) do |partition|
|
46
|
+
encoder.write_int32(partition.fetch(:partition))
|
47
|
+
encoder.write_int64(partition.fetch(:time))
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module Protocol
|
5
|
+
|
6
|
+
# A response to a list offset request.
|
7
|
+
#
|
8
|
+
# ## API Specification
|
9
|
+
#
|
10
|
+
# OffsetResponse => [TopicName [PartitionOffsets]]
|
11
|
+
# ThrottleTimeMS => int32
|
12
|
+
# PartitionOffsets => Partition ErrorCode Timestamp Offset
|
13
|
+
# Partition => int32
|
14
|
+
# ErrorCode => int16
|
15
|
+
# Timestamp => int64
|
16
|
+
# Offset => int64
|
17
|
+
#
|
18
|
+
class ListOffsetResponse
|
19
|
+
class TopicOffsetInfo
|
20
|
+
attr_reader :name, :partition_offsets
|
21
|
+
|
22
|
+
def initialize(name:, partition_offsets:)
|
23
|
+
@name = name
|
24
|
+
@partition_offsets = partition_offsets
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
class PartitionOffsetInfo
|
29
|
+
attr_reader :partition, :error_code, :timestamp, :offset
|
30
|
+
|
31
|
+
def initialize(partition:, error_code:, timestamp:, offset:)
|
32
|
+
@partition = partition
|
33
|
+
@error_code = error_code
|
34
|
+
@timestamp = timestamp
|
35
|
+
@offset = offset
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
attr_reader :topics
|
40
|
+
|
41
|
+
def initialize(topics:)
|
42
|
+
@topics = topics
|
43
|
+
end
|
44
|
+
|
45
|
+
def offset_for(topic, partition)
|
46
|
+
topic_info = @topics.find {|t| t.name == topic }
|
47
|
+
|
48
|
+
if topic_info.nil?
|
49
|
+
raise UnknownTopicOrPartition, "Unknown topic #{topic}"
|
50
|
+
end
|
51
|
+
|
52
|
+
partition_info = topic_info
|
53
|
+
.partition_offsets
|
54
|
+
.find {|p| p.partition == partition }
|
55
|
+
|
56
|
+
if partition_info.nil?
|
57
|
+
raise UnknownTopicOrPartition, "Unknown partition #{topic}/#{partition}"
|
58
|
+
end
|
59
|
+
|
60
|
+
Protocol.handle_error(partition_info.error_code)
|
61
|
+
|
62
|
+
partition_info.offset
|
63
|
+
end
|
64
|
+
|
65
|
+
def self.decode(decoder)
|
66
|
+
_throttle_time_ms = decoder.int32
|
67
|
+
topics = decoder.array do
|
68
|
+
name = decoder.string
|
69
|
+
|
70
|
+
partition_offsets = decoder.array do
|
71
|
+
PartitionOffsetInfo.new(
|
72
|
+
partition: decoder.int32,
|
73
|
+
error_code: decoder.int16,
|
74
|
+
timestamp: decoder.int64,
|
75
|
+
offset: decoder.int64
|
76
|
+
)
|
77
|
+
end
|
78
|
+
|
79
|
+
TopicOffsetInfo.new(
|
80
|
+
name: name,
|
81
|
+
partition_offsets: partition_offsets
|
82
|
+
)
|
83
|
+
end
|
84
|
+
|
85
|
+
new(topics: topics)
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|