ruby-kafka 0.1.7 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (36) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/README.md +12 -1
  4. data/lib/kafka.rb +18 -0
  5. data/lib/kafka/broker.rb +42 -0
  6. data/lib/kafka/client.rb +35 -5
  7. data/lib/kafka/cluster.rb +30 -0
  8. data/lib/kafka/compressor.rb +59 -0
  9. data/lib/kafka/connection.rb +1 -0
  10. data/lib/kafka/consumer.rb +211 -0
  11. data/lib/kafka/consumer_group.rb +172 -0
  12. data/lib/kafka/fetch_operation.rb +2 -2
  13. data/lib/kafka/produce_operation.rb +4 -8
  14. data/lib/kafka/producer.rb +7 -5
  15. data/lib/kafka/protocol.rb +27 -0
  16. data/lib/kafka/protocol/consumer_group_protocol.rb +17 -0
  17. data/lib/kafka/protocol/group_coordinator_request.rb +21 -0
  18. data/lib/kafka/protocol/group_coordinator_response.rb +25 -0
  19. data/lib/kafka/protocol/heartbeat_request.rb +25 -0
  20. data/lib/kafka/protocol/heartbeat_response.rb +15 -0
  21. data/lib/kafka/protocol/join_group_request.rb +39 -0
  22. data/lib/kafka/protocol/join_group_response.rb +31 -0
  23. data/lib/kafka/protocol/leave_group_request.rb +23 -0
  24. data/lib/kafka/protocol/leave_group_response.rb +15 -0
  25. data/lib/kafka/protocol/member_assignment.rb +40 -0
  26. data/lib/kafka/protocol/message_set.rb +5 -37
  27. data/lib/kafka/protocol/metadata_response.rb +5 -1
  28. data/lib/kafka/protocol/offset_commit_request.rb +42 -0
  29. data/lib/kafka/protocol/offset_commit_response.rb +27 -0
  30. data/lib/kafka/protocol/offset_fetch_request.rb +34 -0
  31. data/lib/kafka/protocol/offset_fetch_response.rb +51 -0
  32. data/lib/kafka/protocol/sync_group_request.rb +31 -0
  33. data/lib/kafka/protocol/sync_group_response.rb +21 -0
  34. data/lib/kafka/round_robin_assignment_strategy.rb +40 -0
  35. data/lib/kafka/version.rb +1 -1
  36. metadata +23 -2
@@ -0,0 +1,31 @@
1
+ module Kafka
2
+ module Protocol
3
+ class JoinGroupResponse
4
+ attr_reader :error_code
5
+
6
+ attr_reader :generation_id, :group_protocol
7
+
8
+ attr_reader :leader_id, :member_id, :members
9
+
10
+ def initialize(error_code:, generation_id:, group_protocol:, leader_id:, member_id:, members:)
11
+ @error_code = error_code
12
+ @generation_id = generation_id
13
+ @group_protocol = group_protocol
14
+ @leader_id = leader_id
15
+ @member_id = member_id
16
+ @members = members
17
+ end
18
+
19
+ def self.decode(decoder)
20
+ new(
21
+ error_code: decoder.int16,
22
+ generation_id: decoder.int32,
23
+ group_protocol: decoder.string,
24
+ leader_id: decoder.string,
25
+ member_id: decoder.string,
26
+ members: Hash[decoder.array { [decoder.string, decoder.bytes] }],
27
+ )
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,23 @@
1
+ module Kafka
2
+ module Protocol
3
+ class LeaveGroupRequest
4
+ def initialize(group_id:, member_id:)
5
+ @group_id = group_id
6
+ @member_id = member_id
7
+ end
8
+
9
+ def api_key
10
+ 13
11
+ end
12
+
13
+ def response_class
14
+ LeaveGroupResponse
15
+ end
16
+
17
+ def encode(encoder)
18
+ encoder.write_string(@group_id)
19
+ encoder.write_string(@member_id)
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,15 @@
1
+ module Kafka
2
+ module Protocol
3
+ class LeaveGroupResponse
4
+ attr_reader :error_code
5
+
6
+ def initialize(error_code:)
7
+ @error_code = error_code
8
+ end
9
+
10
+ def self.decode(decoder)
11
+ new(error_code: decoder.int16)
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,40 @@
1
+ module Kafka
2
+ module Protocol
3
+ class MemberAssignment
4
+ attr_reader :topics
5
+
6
+ def initialize(version: 0, topics: {}, user_data: nil)
7
+ @version = version
8
+ @topics = topics
9
+ @user_data = user_data
10
+ end
11
+
12
+ def assign(topic, partitions)
13
+ @topics[topic] ||= []
14
+ @topics[topic].concat(partitions)
15
+ end
16
+
17
+ def encode(encoder)
18
+ encoder.write_int16(@version)
19
+
20
+ encoder.write_array(@topics) do |topic, partitions|
21
+ encoder.write_string(topic)
22
+
23
+ encoder.write_array(partitions) do |partition|
24
+ encoder.write_int32(partition)
25
+ end
26
+ end
27
+
28
+ encoder.write_bytes(@user_data)
29
+ end
30
+
31
+ def self.decode(decoder)
32
+ new(
33
+ version: decoder.int16,
34
+ topics: Hash[decoder.array { [decoder.string, decoder.array { decoder.int32 }] }],
35
+ user_data: decoder.bytes,
36
+ )
37
+ end
38
+ end
39
+ end
40
+ end
@@ -3,10 +3,8 @@ module Kafka
3
3
  class MessageSet
4
4
  attr_reader :messages
5
5
 
6
- def initialize(messages: [], compression_codec: nil, compression_threshold: 1)
6
+ def initialize(messages: [])
7
7
  @messages = messages
8
- @compression_codec = compression_codec
9
- @compression_threshold = compression_threshold
10
8
  end
11
9
 
12
10
  def size
@@ -18,10 +16,10 @@ module Kafka
18
16
  end
19
17
 
20
18
  def encode(encoder)
21
- if compress?
22
- encode_with_compression(encoder)
23
- else
24
- encode_without_compression(encoder)
19
+ # Messages in a message set are *not* encoded as an array. Rather,
20
+ # they are written in sequence.
21
+ @messages.each do |message|
22
+ message.encode(encoder)
25
23
  end
26
24
  end
27
25
 
@@ -41,36 +39,6 @@ module Kafka
41
39
 
42
40
  new(messages: fetched_messages)
43
41
  end
44
-
45
- private
46
-
47
- def compress?
48
- !@compression_codec.nil? && size >= @compression_threshold
49
- end
50
-
51
- def encode_with_compression(encoder)
52
- codec = @compression_codec
53
-
54
- buffer = StringIO.new
55
- encode_without_compression(Encoder.new(buffer))
56
- data = codec.compress(buffer.string)
57
-
58
- wrapper_message = Protocol::Message.new(
59
- value: data,
60
- attributes: codec.codec_id,
61
- )
62
-
63
- message_set = MessageSet.new(messages: [wrapper_message])
64
- message_set.encode(encoder)
65
- end
66
-
67
- def encode_without_compression(encoder)
68
- # Messages in a message set are *not* encoded as an array. Rather,
69
- # they are written in sequence.
70
- @messages.each do |message|
71
- message.encode(encoder)
72
- end
73
- end
74
42
  end
75
43
  end
76
44
  end
@@ -121,7 +121,11 @@ module Kafka
121
121
  # @param node_id [Integer] the node id of the broker.
122
122
  # @return [BrokerInfo] information about the broker.
123
123
  def find_broker(node_id)
124
- @brokers.find {|broker| broker.node_id == node_id }
124
+ broker = @brokers.find {|broker| broker.node_id == node_id }
125
+
126
+ raise Kafka::Error, "No broker with id #{node_id}" if broker.nil?
127
+
128
+ broker
125
129
  end
126
130
 
127
131
  def partitions_for(topic_name)
@@ -0,0 +1,42 @@
1
+ module Kafka
2
+ module Protocol
3
+ class OffsetCommitRequest
4
+ def api_key
5
+ 8
6
+ end
7
+
8
+ def api_version
9
+ 2
10
+ end
11
+
12
+ def response_class
13
+ OffsetCommitResponse
14
+ end
15
+
16
+ def initialize(group_id:, generation_id:, member_id:, retention_time: 0, offsets:)
17
+ @group_id = group_id
18
+ @generation_id = generation_id
19
+ @member_id = member_id
20
+ @retention_time = retention_time
21
+ @offsets = offsets
22
+ end
23
+
24
+ def encode(encoder)
25
+ encoder.write_string(@group_id)
26
+ encoder.write_int32(@generation_id)
27
+ encoder.write_string(@member_id)
28
+ encoder.write_int64(@retention_time)
29
+
30
+ encoder.write_array(@offsets) do |topic, partitions|
31
+ encoder.write_string(topic)
32
+
33
+ encoder.write_array(partitions) do |partition, offset|
34
+ encoder.write_int32(partition)
35
+ encoder.write_int64(offset)
36
+ encoder.write_string(nil) # metadata
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,27 @@
1
+ module Kafka
2
+ module Protocol
3
+ class OffsetCommitResponse
4
+ attr_reader :topics
5
+
6
+ def initialize(topics:)
7
+ @topics = topics
8
+ end
9
+
10
+ def self.decode(decoder)
11
+ topics = decoder.array {
12
+ topic = decoder.string
13
+ partitions = decoder.array {
14
+ partition = decoder.int32
15
+ error_code = decoder.int16
16
+
17
+ [partition, error_code]
18
+ }
19
+
20
+ [topic, Hash[partitions]]
21
+ }
22
+
23
+ new(topics: Hash[topics])
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,34 @@
1
+ module Kafka
2
+ module Protocol
3
+ class OffsetFetchRequest
4
+ def initialize(group_id:, topics:)
5
+ @group_id = group_id
6
+ @topics = topics
7
+ end
8
+
9
+ def api_key
10
+ 9
11
+ end
12
+
13
+ def api_version
14
+ 1
15
+ end
16
+
17
+ def response_class
18
+ OffsetFetchResponse
19
+ end
20
+
21
+ def encode(encoder)
22
+ encoder.write_string(@group_id)
23
+
24
+ encoder.write_array(@topics) do |topic, partitions|
25
+ encoder.write_string(topic)
26
+
27
+ encoder.write_array(partitions) do |partition|
28
+ encoder.write_int32(partition)
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,51 @@
1
+ module Kafka
2
+ module Protocol
3
+ class OffsetFetchResponse
4
+ class PartitionOffsetInfo
5
+ attr_reader :offset, :metadata, :error_code
6
+
7
+ def initialize(offset:, metadata:, error_code:)
8
+ @offset = offset
9
+ @metadata = metadata
10
+ @error_code = error_code
11
+ end
12
+ end
13
+
14
+ attr_reader :topics
15
+
16
+ def initialize(topics:)
17
+ @topics = topics
18
+ end
19
+
20
+ def offset_for(topic, partition)
21
+ offset_info = topics.fetch(topic).fetch(partition)
22
+
23
+ Protocol.handle_error(offset_info.error_code)
24
+
25
+ offset_info.offset
26
+ end
27
+
28
+ def self.decode(decoder)
29
+ topics = decoder.array {
30
+ topic = decoder.string
31
+
32
+ partitions = decoder.array {
33
+ partition = decoder.int32
34
+
35
+ info = PartitionOffsetInfo.new(
36
+ offset: decoder.int64,
37
+ metadata: decoder.string,
38
+ error_code: decoder.int16,
39
+ )
40
+
41
+ [partition, info]
42
+ }
43
+
44
+ [topic, Hash[partitions]]
45
+ }
46
+
47
+ new(topics: Hash[topics])
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,31 @@
1
+ module Kafka
2
+ module Protocol
3
+ class SyncGroupRequest
4
+ def initialize(group_id:, generation_id:, member_id:, group_assignment: {})
5
+ @group_id = group_id
6
+ @generation_id = generation_id
7
+ @member_id = member_id
8
+ @group_assignment = group_assignment
9
+ end
10
+
11
+ def api_key
12
+ 14
13
+ end
14
+
15
+ def response_class
16
+ SyncGroupResponse
17
+ end
18
+
19
+ def encode(encoder)
20
+ encoder.write_string(@group_id)
21
+ encoder.write_int32(@generation_id)
22
+ encoder.write_string(@member_id)
23
+
24
+ encoder.write_array(@group_assignment) do |member_id, member_assignment|
25
+ encoder.write_string(member_id)
26
+ encoder.write_bytes(Encoder.encode_with(member_assignment))
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,21 @@
1
+ require "kafka/protocol/member_assignment"
2
+
3
+ module Kafka
4
+ module Protocol
5
+ class SyncGroupResponse
6
+ attr_reader :error_code, :member_assignment
7
+
8
+ def initialize(error_code:, member_assignment:)
9
+ @error_code = error_code
10
+ @member_assignment = member_assignment
11
+ end
12
+
13
+ def self.decode(decoder)
14
+ new(
15
+ error_code: decoder.int16,
16
+ member_assignment: MemberAssignment.decode(Decoder.from_string(decoder.bytes)),
17
+ )
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,40 @@
1
+ require "kafka/protocol/member_assignment"
2
+
3
+ module Kafka
4
+
5
+ # A consumer group partition assignment strategy that assigns partitions to
6
+ # consumers in a round-robin fashion.
7
+ class RoundRobinAssignmentStrategy
8
+ def initialize(cluster:)
9
+ @cluster = cluster
10
+ end
11
+
12
+ # Assign the topic partitions to the group members.
13
+ #
14
+ # @param members [Array<String>] member ids
15
+ # @param topics [Array<String>] topics
16
+ # @return [Hash<String, Protocol::MemberAssignment>] a hash mapping member
17
+ # ids to assignments.
18
+ def assign(members:, topics:)
19
+ group_assignment = {}
20
+
21
+ members.each do |member_id|
22
+ group_assignment[member_id] = Protocol::MemberAssignment.new
23
+ end
24
+
25
+ topics.each do |topic|
26
+ partitions = @cluster.partitions_for(topic).map(&:partition_id)
27
+
28
+ partitions_per_member = partitions.group_by {|partition_id|
29
+ partition_id % members.count
30
+ }.values
31
+
32
+ members.zip(partitions_per_member).each do |member_id, member_partitions|
33
+ group_assignment[member_id].assign(topic, member_partitions)
34
+ end
35
+ end
36
+
37
+ group_assignment
38
+ end
39
+ end
40
+ end