ruby-kafka-temp-fork 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +393 -0
- data/.github/workflows/stale.yml +19 -0
- data/.gitignore +13 -0
- data/.readygo +1 -0
- data/.rspec +3 -0
- data/.rubocop.yml +44 -0
- data/.ruby-version +1 -0
- data/.yardopts +3 -0
- data/CHANGELOG.md +310 -0
- data/Gemfile +5 -0
- data/ISSUE_TEMPLATE.md +23 -0
- data/LICENSE.txt +176 -0
- data/Procfile +2 -0
- data/README.md +1342 -0
- data/Rakefile +8 -0
- data/benchmarks/message_encoding.rb +23 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/docker-compose.yml +39 -0
- data/examples/consumer-group.rb +35 -0
- data/examples/firehose-consumer.rb +64 -0
- data/examples/firehose-producer.rb +54 -0
- data/examples/simple-consumer.rb +34 -0
- data/examples/simple-producer.rb +42 -0
- data/examples/ssl-producer.rb +44 -0
- data/lib/kafka.rb +373 -0
- data/lib/kafka/async_producer.rb +291 -0
- data/lib/kafka/broker.rb +217 -0
- data/lib/kafka/broker_info.rb +16 -0
- data/lib/kafka/broker_pool.rb +41 -0
- data/lib/kafka/broker_uri.rb +43 -0
- data/lib/kafka/client.rb +833 -0
- data/lib/kafka/cluster.rb +513 -0
- data/lib/kafka/compression.rb +45 -0
- data/lib/kafka/compressor.rb +86 -0
- data/lib/kafka/connection.rb +223 -0
- data/lib/kafka/connection_builder.rb +33 -0
- data/lib/kafka/consumer.rb +642 -0
- data/lib/kafka/consumer_group.rb +231 -0
- data/lib/kafka/consumer_group/assignor.rb +63 -0
- data/lib/kafka/crc32_hash.rb +15 -0
- data/lib/kafka/datadog.rb +420 -0
- data/lib/kafka/digest.rb +22 -0
- data/lib/kafka/fetch_operation.rb +115 -0
- data/lib/kafka/fetched_batch.rb +58 -0
- data/lib/kafka/fetched_batch_generator.rb +120 -0
- data/lib/kafka/fetched_message.rb +48 -0
- data/lib/kafka/fetched_offset_resolver.rb +48 -0
- data/lib/kafka/fetcher.rb +224 -0
- data/lib/kafka/gzip_codec.rb +34 -0
- data/lib/kafka/heartbeat.rb +25 -0
- data/lib/kafka/instrumenter.rb +38 -0
- data/lib/kafka/interceptors.rb +33 -0
- data/lib/kafka/lz4_codec.rb +27 -0
- data/lib/kafka/message_buffer.rb +87 -0
- data/lib/kafka/murmur2_hash.rb +17 -0
- data/lib/kafka/offset_manager.rb +259 -0
- data/lib/kafka/partitioner.rb +40 -0
- data/lib/kafka/pause.rb +92 -0
- data/lib/kafka/pending_message.rb +29 -0
- data/lib/kafka/pending_message_queue.rb +41 -0
- data/lib/kafka/produce_operation.rb +205 -0
- data/lib/kafka/producer.rb +528 -0
- data/lib/kafka/prometheus.rb +316 -0
- data/lib/kafka/protocol.rb +225 -0
- data/lib/kafka/protocol/add_offsets_to_txn_request.rb +29 -0
- data/lib/kafka/protocol/add_offsets_to_txn_response.rb +21 -0
- data/lib/kafka/protocol/add_partitions_to_txn_request.rb +34 -0
- data/lib/kafka/protocol/add_partitions_to_txn_response.rb +47 -0
- data/lib/kafka/protocol/alter_configs_request.rb +44 -0
- data/lib/kafka/protocol/alter_configs_response.rb +49 -0
- data/lib/kafka/protocol/api_versions_request.rb +21 -0
- data/lib/kafka/protocol/api_versions_response.rb +53 -0
- data/lib/kafka/protocol/consumer_group_protocol.rb +19 -0
- data/lib/kafka/protocol/create_partitions_request.rb +42 -0
- data/lib/kafka/protocol/create_partitions_response.rb +28 -0
- data/lib/kafka/protocol/create_topics_request.rb +45 -0
- data/lib/kafka/protocol/create_topics_response.rb +26 -0
- data/lib/kafka/protocol/decoder.rb +175 -0
- data/lib/kafka/protocol/delete_topics_request.rb +33 -0
- data/lib/kafka/protocol/delete_topics_response.rb +26 -0
- data/lib/kafka/protocol/describe_configs_request.rb +35 -0
- data/lib/kafka/protocol/describe_configs_response.rb +73 -0
- data/lib/kafka/protocol/describe_groups_request.rb +27 -0
- data/lib/kafka/protocol/describe_groups_response.rb +73 -0
- data/lib/kafka/protocol/encoder.rb +184 -0
- data/lib/kafka/protocol/end_txn_request.rb +29 -0
- data/lib/kafka/protocol/end_txn_response.rb +19 -0
- data/lib/kafka/protocol/fetch_request.rb +70 -0
- data/lib/kafka/protocol/fetch_response.rb +136 -0
- data/lib/kafka/protocol/find_coordinator_request.rb +29 -0
- data/lib/kafka/protocol/find_coordinator_response.rb +29 -0
- data/lib/kafka/protocol/heartbeat_request.rb +27 -0
- data/lib/kafka/protocol/heartbeat_response.rb +17 -0
- data/lib/kafka/protocol/init_producer_id_request.rb +26 -0
- data/lib/kafka/protocol/init_producer_id_response.rb +27 -0
- data/lib/kafka/protocol/join_group_request.rb +47 -0
- data/lib/kafka/protocol/join_group_response.rb +41 -0
- data/lib/kafka/protocol/leave_group_request.rb +25 -0
- data/lib/kafka/protocol/leave_group_response.rb +17 -0
- data/lib/kafka/protocol/list_groups_request.rb +23 -0
- data/lib/kafka/protocol/list_groups_response.rb +35 -0
- data/lib/kafka/protocol/list_offset_request.rb +53 -0
- data/lib/kafka/protocol/list_offset_response.rb +89 -0
- data/lib/kafka/protocol/member_assignment.rb +42 -0
- data/lib/kafka/protocol/message.rb +172 -0
- data/lib/kafka/protocol/message_set.rb +55 -0
- data/lib/kafka/protocol/metadata_request.rb +31 -0
- data/lib/kafka/protocol/metadata_response.rb +185 -0
- data/lib/kafka/protocol/offset_commit_request.rb +47 -0
- data/lib/kafka/protocol/offset_commit_response.rb +29 -0
- data/lib/kafka/protocol/offset_fetch_request.rb +38 -0
- data/lib/kafka/protocol/offset_fetch_response.rb +56 -0
- data/lib/kafka/protocol/produce_request.rb +94 -0
- data/lib/kafka/protocol/produce_response.rb +63 -0
- data/lib/kafka/protocol/record.rb +88 -0
- data/lib/kafka/protocol/record_batch.rb +223 -0
- data/lib/kafka/protocol/request_message.rb +26 -0
- data/lib/kafka/protocol/sasl_handshake_request.rb +33 -0
- data/lib/kafka/protocol/sasl_handshake_response.rb +28 -0
- data/lib/kafka/protocol/sync_group_request.rb +33 -0
- data/lib/kafka/protocol/sync_group_response.rb +26 -0
- data/lib/kafka/protocol/txn_offset_commit_request.rb +46 -0
- data/lib/kafka/protocol/txn_offset_commit_response.rb +47 -0
- data/lib/kafka/round_robin_assignment_strategy.rb +52 -0
- data/lib/kafka/sasl/gssapi.rb +76 -0
- data/lib/kafka/sasl/oauth.rb +64 -0
- data/lib/kafka/sasl/plain.rb +39 -0
- data/lib/kafka/sasl/scram.rb +180 -0
- data/lib/kafka/sasl_authenticator.rb +61 -0
- data/lib/kafka/snappy_codec.rb +29 -0
- data/lib/kafka/socket_with_timeout.rb +96 -0
- data/lib/kafka/ssl_context.rb +66 -0
- data/lib/kafka/ssl_socket_with_timeout.rb +188 -0
- data/lib/kafka/statsd.rb +296 -0
- data/lib/kafka/tagged_logger.rb +77 -0
- data/lib/kafka/transaction_manager.rb +306 -0
- data/lib/kafka/transaction_state_machine.rb +72 -0
- data/lib/kafka/version.rb +5 -0
- data/lib/kafka/zstd_codec.rb +27 -0
- data/lib/ruby-kafka-temp-fork.rb +5 -0
- data/ruby-kafka-temp-fork.gemspec +54 -0
- metadata +520 -0
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class AddOffsetsToTxnRequest
|
|
6
|
+
def initialize(transactional_id: nil, producer_id:, producer_epoch:, group_id:)
|
|
7
|
+
@transactional_id = transactional_id
|
|
8
|
+
@producer_id = producer_id
|
|
9
|
+
@producer_epoch = producer_epoch
|
|
10
|
+
@group_id = group_id
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def api_key
|
|
14
|
+
ADD_OFFSETS_TO_TXN_API
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def response_class
|
|
18
|
+
AddOffsetsToTxnResponse
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def encode(encoder)
|
|
22
|
+
encoder.write_string(@transactional_id.to_s)
|
|
23
|
+
encoder.write_int64(@producer_id)
|
|
24
|
+
encoder.write_int16(@producer_epoch)
|
|
25
|
+
encoder.write_string(@group_id)
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class AddOffsetsToTxnResponse
|
|
6
|
+
|
|
7
|
+
attr_reader :error_code
|
|
8
|
+
|
|
9
|
+
def initialize(error_code:)
|
|
10
|
+
@error_code = error_code
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def self.decode(decoder)
|
|
14
|
+
_throttle_time_ms = decoder.int32
|
|
15
|
+
error_code = decoder.int16
|
|
16
|
+
new(error_code: error_code)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class AddPartitionsToTxnRequest
|
|
6
|
+
def initialize(transactional_id: nil, producer_id:, producer_epoch:, topics:)
|
|
7
|
+
@transactional_id = transactional_id
|
|
8
|
+
@producer_id = producer_id
|
|
9
|
+
@producer_epoch = producer_epoch
|
|
10
|
+
@topics = topics
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def api_key
|
|
14
|
+
ADD_PARTITIONS_TO_TXN_API
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def response_class
|
|
18
|
+
AddPartitionsToTxnResponse
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def encode(encoder)
|
|
22
|
+
encoder.write_string(@transactional_id.to_s)
|
|
23
|
+
encoder.write_int64(@producer_id)
|
|
24
|
+
encoder.write_int16(@producer_epoch)
|
|
25
|
+
encoder.write_array(@topics.to_a) do |topic, partitions|
|
|
26
|
+
encoder.write_string(topic)
|
|
27
|
+
encoder.write_array(partitions) do |partition|
|
|
28
|
+
encoder.write_int32(partition)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class AddPartitionsToTxnResponse
|
|
6
|
+
class PartitionError
|
|
7
|
+
attr_reader :partition, :error_code
|
|
8
|
+
|
|
9
|
+
def initialize(partition:, error_code:)
|
|
10
|
+
@partition = partition
|
|
11
|
+
@error_code = error_code
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
class TopicPartitionsError
|
|
16
|
+
attr_reader :topic, :partitions
|
|
17
|
+
|
|
18
|
+
def initialize(topic:, partitions:)
|
|
19
|
+
@topic = topic
|
|
20
|
+
@partitions = partitions
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
attr_reader :errors
|
|
25
|
+
|
|
26
|
+
def initialize(errors:)
|
|
27
|
+
@errors = errors
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def self.decode(decoder)
|
|
31
|
+
_throttle_time_ms = decoder.int32
|
|
32
|
+
errors = decoder.array do
|
|
33
|
+
TopicPartitionsError.new(
|
|
34
|
+
topic: decoder.string,
|
|
35
|
+
partitions: decoder.array do
|
|
36
|
+
PartitionError.new(
|
|
37
|
+
partition: decoder.int32,
|
|
38
|
+
error_code: decoder.int16
|
|
39
|
+
)
|
|
40
|
+
end
|
|
41
|
+
)
|
|
42
|
+
end
|
|
43
|
+
new(errors: errors)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class AlterConfigsRequest
|
|
7
|
+
def initialize(resources:)
|
|
8
|
+
@resources = resources
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def api_key
|
|
12
|
+
ALTER_CONFIGS_API
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def api_version
|
|
16
|
+
0
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def response_class
|
|
20
|
+
Protocol::AlterConfigsResponse
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def encode(encoder)
|
|
24
|
+
encoder.write_array(@resources) do |type, name, configs|
|
|
25
|
+
encoder.write_int8(type)
|
|
26
|
+
encoder.write_string(name)
|
|
27
|
+
|
|
28
|
+
configs = configs.to_a
|
|
29
|
+
encoder.write_array(configs) do |config_name, config_value|
|
|
30
|
+
# Config value is nullable. In other cases, we must write the
|
|
31
|
+
# stringified value.
|
|
32
|
+
config_value = config_value.to_s unless config_value.nil?
|
|
33
|
+
|
|
34
|
+
encoder.write_string(config_name)
|
|
35
|
+
encoder.write_string(config_value)
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
# validate_only. We'll skip this feature.
|
|
39
|
+
encoder.write_boolean(false)
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class AlterConfigsResponse
|
|
6
|
+
class ResourceDescription
|
|
7
|
+
attr_reader :name, :type, :error_code, :error_message
|
|
8
|
+
|
|
9
|
+
def initialize(name:, type:, error_code:, error_message:)
|
|
10
|
+
@name = name
|
|
11
|
+
@type = type
|
|
12
|
+
@error_code = error_code
|
|
13
|
+
@error_message = error_message
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
attr_reader :resources
|
|
18
|
+
|
|
19
|
+
def initialize(throttle_time_ms:, resources:)
|
|
20
|
+
@throttle_time_ms = throttle_time_ms
|
|
21
|
+
@resources = resources
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def self.decode(decoder)
|
|
25
|
+
throttle_time_ms = decoder.int32
|
|
26
|
+
resources = decoder.array do
|
|
27
|
+
error_code = decoder.int16
|
|
28
|
+
error_message = decoder.string
|
|
29
|
+
|
|
30
|
+
resource_type = decoder.int8
|
|
31
|
+
if Kafka::Protocol::RESOURCE_TYPES[resource_type].nil?
|
|
32
|
+
raise Kafka::ProtocolError, "Resource type not supported: #{resource_type}"
|
|
33
|
+
end
|
|
34
|
+
resource_name = decoder.string
|
|
35
|
+
|
|
36
|
+
ResourceDescription.new(
|
|
37
|
+
type: RESOURCE_TYPES[resource_type],
|
|
38
|
+
name: resource_name,
|
|
39
|
+
error_code: error_code,
|
|
40
|
+
error_message: error_message
|
|
41
|
+
)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
new(throttle_time_ms: throttle_time_ms, resources: resources)
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
end
|
|
49
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class ApiVersionsRequest
|
|
7
|
+
def api_key
|
|
8
|
+
API_VERSIONS_API
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def encode(encoder)
|
|
12
|
+
# Nothing to do.
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def response_class
|
|
16
|
+
Protocol::ApiVersionsResponse
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
end
|
|
21
|
+
end
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class ApiVersionsResponse
|
|
7
|
+
class ApiInfo
|
|
8
|
+
attr_reader :api_key, :min_version, :max_version
|
|
9
|
+
|
|
10
|
+
def initialize(api_key:, min_version:, max_version:)
|
|
11
|
+
@api_key, @min_version, @max_version = api_key, min_version, max_version
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def api_name
|
|
15
|
+
Protocol.api_name(api_key)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def version_supported?(version)
|
|
19
|
+
(min_version..max_version).include?(version)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def to_s
|
|
23
|
+
"#{api_name}=#{min_version}..#{max_version}"
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def inspect
|
|
27
|
+
"#<Kafka api version #{to_s}>"
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
attr_reader :error_code, :apis
|
|
32
|
+
|
|
33
|
+
def initialize(error_code:, apis:)
|
|
34
|
+
@error_code = error_code
|
|
35
|
+
@apis = apis
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def self.decode(decoder)
|
|
39
|
+
error_code = decoder.int16
|
|
40
|
+
|
|
41
|
+
apis = decoder.array do
|
|
42
|
+
ApiInfo.new(
|
|
43
|
+
api_key: decoder.int16,
|
|
44
|
+
min_version: decoder.int16,
|
|
45
|
+
max_version: decoder.int16,
|
|
46
|
+
)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
new(error_code: error_code, apis: apis)
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
class ConsumerGroupProtocol
|
|
6
|
+
def initialize(version: 0, topics:, user_data: nil)
|
|
7
|
+
@version = version
|
|
8
|
+
@topics = topics
|
|
9
|
+
@user_data = user_data
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def encode(encoder)
|
|
13
|
+
encoder.write_int16(@version)
|
|
14
|
+
encoder.write_array(@topics) {|topic| encoder.write_string(topic) }
|
|
15
|
+
encoder.write_bytes(@user_data)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class CreatePartitionsRequest
|
|
7
|
+
def initialize(topics:, timeout:)
|
|
8
|
+
@topics, @timeout = topics, timeout
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def api_key
|
|
12
|
+
CREATE_PARTITIONS_API
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def api_version
|
|
16
|
+
0
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def response_class
|
|
20
|
+
Protocol::CreatePartitionsResponse
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def encode(encoder)
|
|
24
|
+
encoder.write_array(@topics) do |topic, count, assignments|
|
|
25
|
+
encoder.write_string(topic)
|
|
26
|
+
encoder.write_int32(count)
|
|
27
|
+
encoder.write_array(assignments) do |assignment|
|
|
28
|
+
encoder.write_array(assignment) do |broker|
|
|
29
|
+
encoder.write_int32(broker)
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
# Timeout is in ms.
|
|
34
|
+
encoder.write_int32(@timeout * 1000)
|
|
35
|
+
# validate_only. There isn't any use case for this in real life. So
|
|
36
|
+
# let's ignore it for now
|
|
37
|
+
encoder.write_boolean(false)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
end
|
|
42
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class CreatePartitionsResponse
|
|
7
|
+
attr_reader :errors
|
|
8
|
+
|
|
9
|
+
def initialize(throttle_time_ms:, errors:)
|
|
10
|
+
@throttle_time_ms = throttle_time_ms
|
|
11
|
+
@errors = errors
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def self.decode(decoder)
|
|
15
|
+
throttle_time_ms = decoder.int32
|
|
16
|
+
errors = decoder.array do
|
|
17
|
+
topic = decoder.string
|
|
18
|
+
error_code = decoder.int16
|
|
19
|
+
error_message = decoder.string
|
|
20
|
+
[topic, error_code, error_message]
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
new(throttle_time_ms: throttle_time_ms, errors: errors)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Kafka
|
|
4
|
+
module Protocol
|
|
5
|
+
|
|
6
|
+
class CreateTopicsRequest
|
|
7
|
+
def initialize(topics:, timeout:)
|
|
8
|
+
@topics, @timeout = topics, timeout
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def api_key
|
|
12
|
+
CREATE_TOPICS_API
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def api_version
|
|
16
|
+
0
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def response_class
|
|
20
|
+
Protocol::CreateTopicsResponse
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def encode(encoder)
|
|
24
|
+
encoder.write_array(@topics) do |topic, config|
|
|
25
|
+
encoder.write_string(topic)
|
|
26
|
+
encoder.write_int32(config.fetch(:num_partitions))
|
|
27
|
+
encoder.write_int16(config.fetch(:replication_factor))
|
|
28
|
+
|
|
29
|
+
# Replica assignments. We don't care.
|
|
30
|
+
encoder.write_array([])
|
|
31
|
+
|
|
32
|
+
encoder.write_array(config.fetch(:config)) do |config_name, config_value|
|
|
33
|
+
config_value = config_value.to_s unless config_value.nil?
|
|
34
|
+
encoder.write_string(config_name)
|
|
35
|
+
encoder.write_string(config_value)
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
# Timeout is in ms.
|
|
40
|
+
encoder.write_int32(@timeout * 1000)
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
end
|
|
45
|
+
end
|