kafka_syrup 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,22 @@
1
+ %w[
2
+ errors
3
+ base
4
+ request
5
+ response
6
+ message
7
+ message_set
8
+ metadata_request
9
+ metadata_response
10
+ produce_request
11
+ produce_response
12
+ fetch_request
13
+ fetch_response
14
+ offset_request
15
+ offset_response
16
+ ].each{ |file| require "kafka_syrup/protocol/#{file}" }
17
+
18
+ module KafkaSyrup
19
+ module Protocol
20
+ REPLICA_ID = -1
21
+ end
22
+ end
@@ -0,0 +1,41 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class Base
4
+ include Utils
5
+
6
+ def initialize(*args, &block)
7
+ if(io = args.first).respond_to?(:read)
8
+ decode(io, &block)
9
+ else
10
+ load_args(defaults)
11
+ load_args(*args)
12
+ end
13
+ end
14
+
15
+ def defaults
16
+ {}
17
+ end
18
+
19
+ def config
20
+ @config ||= ::KafkaSyrup.config
21
+ end
22
+
23
+ def encode(&block)
24
+ encoded = block_given? ? yield : ""
25
+
26
+ [
27
+ E.write_int32(encoded.length),
28
+ encoded
29
+ ].join
30
+ end
31
+
32
+ def decode(io)
33
+ E.read_int32(io) # Total length (ignored)
34
+ end
35
+
36
+ def ==(obj)
37
+ obj.encode.bytes == encode.bytes
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,32 @@
1
+ module KafkaSyrup
2
+ class KafkaResponseError < Error; end;
3
+
4
+ module KafkaResponseErrors
5
+ ERRORS = {
6
+ -1 => 'Unknown',
7
+ 1 => 'OffsetOutOfRange',
8
+ 2 => 'InvalidMessage',
9
+ 3 => 'UnknownTopicOrPartition',
10
+ 4 => 'InvalidMessageSize',
11
+ 5 => 'LeaderNotAvailable',
12
+ 6 => 'NotLeaderForPartition',
13
+ 7 => 'RequestTimedOut',
14
+ 8 => 'BrokerNotAvailable',
15
+ 9 => 'ReplicaNotAvailable',
16
+ 10 => 'MessageSizeTooLarge',
17
+ 11 => 'StaleControllerEpochCode',
18
+ 12 => 'OffsetMetadataTooLargeCode',
19
+ 14 => 'OffsetsLoadInProgressCode',
20
+ 15 => 'ConsumerCoordinatorNotAvailableCode',
21
+ 16 => 'NotCoordinatorForConsumerCode'
22
+ }
23
+
24
+ ERRORS.values.each{ |val| const_set val, Class.new(KafkaResponseError) }
25
+
26
+ class << self
27
+ def raise_from_code(code, msg = nil)
28
+ raise const_get(ERRORS[code]).new(msg) if ERRORS.keys.include?(code)
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,60 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class FetchRequest < Request
4
+ self.api_key = 1
5
+
6
+ attr_accessor :max_wait_time, :min_bytes, :max_bytes, :topics
7
+
8
+ def defaults
9
+ {
10
+ max_wait_time: config.consume_max_wait_time,
11
+ min_bytes: config.consume_min_bytes,
12
+ max_bytes: config.consume_max_bytes,
13
+ topics: []
14
+ }
15
+ end
16
+
17
+ def encode
18
+ super do
19
+ [
20
+ E.write_int32(REPLICA_ID),
21
+ E.write_int32(max_wait_time),
22
+ E.write_int32(min_bytes),
23
+ E.write_array(topics)
24
+ ].join
25
+ end
26
+ end
27
+
28
+ def add_topic(name)
29
+ topic = Topic.new(name, [], max_bytes)
30
+ topics << topic
31
+ topic
32
+ end
33
+
34
+ Topic = Struct.new(:name, :partitions, :max_bytes) do
35
+ def add_partition(id, offset)
36
+ partition = Partition.new(id, offset, max_bytes)
37
+ partitions << partition
38
+ partition
39
+ end
40
+
41
+ def encode
42
+ [
43
+ E.write_string(name),
44
+ E.write_array(partitions)
45
+ ].join
46
+ end
47
+ end
48
+
49
+ Partition = Struct.new(:id, :offset, :max_bytes) do
50
+ def encode
51
+ [
52
+ E.write_int32(id),
53
+ E.write_int64(offset),
54
+ E.write_int32(max_bytes)
55
+ ].join
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,84 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class FetchResponse < Response
4
+ attr_accessor :topics
5
+
6
+ def defaults
7
+ { topics: [] }
8
+ end
9
+
10
+ def add_topic(name)
11
+ topic = Topic.new(name, [])
12
+ topics << topic
13
+ topic
14
+ end
15
+
16
+ def encode
17
+ super do
18
+ E.write_array(topics)
19
+ end
20
+ end
21
+
22
+ def decode(io, &block)
23
+ super
24
+ self.topics = E.read_array(io){ |input| Topic.decode(input, &block) }
25
+ topics.flat_map(&:partitions).map(&:code).each(&KafkaResponseErrors.method(:raise_from_code))
26
+ end
27
+
28
+ Topic = Struct.new(:name, :partitions) do
29
+ def add_partition(id, code, highwater_offset)
30
+ partition = Partition.new(id, code, highwater_offset, MessageSet.new)
31
+ partitions << partition
32
+ partition
33
+ end
34
+
35
+ def encode
36
+ [
37
+ E.write_string(name),
38
+ E.write_array(partitions)
39
+ ].join
40
+ end
41
+
42
+ def self.decode(io, &block)
43
+ new(
44
+ E.read_string(io), # Name
45
+ E.read_array(io){ |input| Partition.decode(input, &block) } # Partitions
46
+ )
47
+ end
48
+ end
49
+
50
+ Partition = Struct.new(:id, :code, :highwater_offset, :message_set) do
51
+ def messages
52
+ message_set.messages
53
+ end
54
+
55
+ def add_message(value = nil, opts = {})
56
+ m = Message.new(opts.merge(value: value))
57
+ message_set.messages << m
58
+ m
59
+ end
60
+
61
+ def encode
62
+ encoded = message_set.encode
63
+ [
64
+ E.write_int32(id),
65
+ E.write_int16(code),
66
+ E.write_int64(highwater_offset),
67
+ E.write_int32(encoded.length),
68
+ encoded
69
+ ].join
70
+ end
71
+
72
+ def self.decode(io, &block)
73
+ partition = new
74
+ partition.id = E.read_int32(io)
75
+ partition.code = E.read_int16(io)
76
+ partition.highwater_offset = E.read_int64(io)
77
+ length = E.read_int32(io)
78
+ partition.message_set = MessageSet.new(io, length, &block)
79
+ partition
80
+ end
81
+ end
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,58 @@
1
+ require 'zlib'
2
+
3
+ module KafkaSyrup
4
+ module Protocol
5
+ class Message
6
+ include Utils
7
+
8
+ MAGIC_BYTE = 0
9
+
10
+ attr_accessor :key, :value, :offset
11
+
12
+ def initialize(*args)
13
+ if (io = args.first).respond_to?(:read)
14
+ opts = { check_crc: false }
15
+ opts.merge(args.last) if args.last.is_a?(Hash)
16
+ load_args(opts)
17
+
18
+ crc = E.read_int32(io)
19
+ E.read_int8(io) # Magic Byte (ignored)
20
+ E.read_int8(io) # Compression (ignored)
21
+ self.key = E.read_bytes(io)
22
+ self.value = E.read_bytes(io)
23
+ # TODO Verify CRC
24
+ end
25
+
26
+ load_args(*args)
27
+ end
28
+
29
+ def encode
30
+ [
31
+ E.write_int32(crc),
32
+ encoded
33
+ ].join
34
+ end
35
+
36
+ def encoded
37
+ @encoded ||= [
38
+ E.write_int8(MAGIC_BYTE),
39
+ E.write_int8(0), # Currently don't support compression
40
+ E.write_bytes(key),
41
+ E.write_bytes(value)
42
+ ].join
43
+ end
44
+
45
+ def crc
46
+ @crc ||= Zlib.crc32(encoded)
47
+ end
48
+
49
+ def key
50
+ @key.to_s.empty? ? nil : @key
51
+ end
52
+
53
+ def ==(obj)
54
+ obj.encode == encode
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,53 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class MessageSet
4
+ include Utils
5
+
6
+ attr_accessor :messages
7
+
8
+ def initialize(*args)
9
+ io, total_length, *_ = args
10
+ if io.respond_to?(:read)
11
+ total_length = 0 unless total_length.is_a?(Fixnum)
12
+
13
+ read_length = 0
14
+
15
+ self.messages = []
16
+
17
+ while read_length < total_length && !io.eof?
18
+ offset = E.read_int64(io)
19
+ read_length += 8
20
+ msg_length = E.read_int32(io)
21
+ read_length += 4
22
+ msg = Message.new(io, offset: offset, length: msg_length) rescue nil
23
+ messages << msg
24
+ read_length += msg_length
25
+ yield(msg) if block_given? && msg
26
+ end
27
+ else
28
+ load_args(defaults)
29
+ load_args(*args)
30
+ end
31
+ end
32
+
33
+ def defaults
34
+ { messages: [] }
35
+ end
36
+
37
+ def encode
38
+ messages.map{ |msg|
39
+ encoded = msg.encode
40
+ [
41
+ E.write_int64(msg.offset.to_i),
42
+ E.write_int32(encoded.length),
43
+ encoded
44
+ ].join
45
+ }.join
46
+ end
47
+
48
+ def ==(obj)
49
+ obj.encode == encode
50
+ end
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,23 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class MetadataRequest < Request
4
+ self.api_key = 3
5
+
6
+ attr_accessor :topics
7
+
8
+ def initialize(*args)
9
+ opts = args.last.is_a?(Hash) ? args.pop : {}
10
+
11
+ load_args(opts)
12
+
13
+ self.topics = args
14
+ end
15
+
16
+ def encode
17
+ super do
18
+ E.write_array(topics, &E.method(:write_string))
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,105 @@
1
+ module KafkaSyrup
2
+ module Protocol
3
+ class MetadataResponse < Response
4
+ attr_accessor :brokers, :topics
5
+
6
+ def defaults
7
+ { brokers: [], topics: [] }
8
+ end
9
+
10
+ def add_broker(node, host, port)
11
+ broker = Broker.new(node, host, port)
12
+ brokers << broker
13
+ broker
14
+ end
15
+
16
+ def add_topic(code, name)
17
+ topic = Topic.new(code, name, [])
18
+ topics << topic
19
+ topic
20
+ end
21
+
22
+ def encode
23
+ super do
24
+ [
25
+ E.write_array(brokers),
26
+ E.write_array(topics)
27
+ ].join
28
+ end
29
+ end
30
+
31
+ def decode(io)
32
+ super
33
+ self.brokers = E.read_array(io, &Broker.method(:decode))
34
+ self.topics = E.read_array(io, &Topic.method(:decode))
35
+
36
+ topics.map(&:code).each(&KafkaResponseErrors.method(:raise_from_code))
37
+ topics.flat_map(&:partitions).map(&:code).each(&KafkaResponseErrors.method(:raise_from_code))
38
+ end
39
+
40
+ Broker = Struct.new(:node, :host, :port) do
41
+ def encode
42
+ [
43
+ E.write_int32(node),
44
+ E.write_string(host),
45
+ E.write_int32(port)
46
+ ].join
47
+ end
48
+
49
+ def self.decode(io)
50
+ new(
51
+ E.read_int32(io), # Node
52
+ E.read_string(io), # Host
53
+ E.read_int32(io) # Port
54
+ )
55
+ end
56
+ end
57
+
58
+ Topic = Struct.new(:code, :name, :partitions) do
59
+ def add_partition(p_code, id, leader, replicas, isr)
60
+ partition = Partition.new(p_code, id, leader, replicas, isr)
61
+ partitions << partition
62
+ partition
63
+ end
64
+
65
+ def encode
66
+ [
67
+ E.write_int16(code),
68
+ E.write_string(name),
69
+ E.write_array(partitions)
70
+ ].join
71
+ end
72
+
73
+ def self.decode(io)
74
+ new(
75
+ E.read_int16(io), # Error Code
76
+ E.read_string(io), # Name
77
+ E.read_array(io, &Partition.method(:decode)) # Partitions
78
+ )
79
+ end
80
+ end
81
+
82
+ Partition = Struct.new(:code, :id, :leader, :replicas, :isr) do
83
+ def encode
84
+ [
85
+ E.write_int16(code),
86
+ E.write_int32(id),
87
+ E.write_int32(leader),
88
+ E.write_array(replicas, &E.method(:write_int32)),
89
+ E.write_array(isr, &E.method(:write_int32))
90
+ ].join
91
+ end
92
+
93
+ def self.decode(io)
94
+ new(
95
+ E.read_int16(io), # Error Code
96
+ E.read_int32(io), # ID
97
+ E.read_int32(io), # Leader
98
+ E.read_array(io, &E.method(:read_int32)), # Replicas
99
+ E.read_array(io, &E.method(:read_int32)) # ISR
100
+ )
101
+ end
102
+ end
103
+ end
104
+ end
105
+ end