wooga-kafka-rb 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,68 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ module Kafka
16
+ class Producer
17
+
18
+ include Kafka::IO
19
+
20
+ PRODUCE_REQUEST_ID = Kafka::RequestType::PRODUCE
21
+
22
+ attr_accessor :topic, :partition
23
+
24
+ def initialize(options = {})
25
+ self.topic = options[:topic] || "test"
26
+ self.partition = options[:partition] || 0
27
+ self.host = options[:host] || "localhost"
28
+ self.port = options[:port] || 9092
29
+ self.connect(self.host, self.port)
30
+ end
31
+
32
+ def encode(message)
33
+ if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
34
+ ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
35
+ [message.magic].pack("C") + [message.calculate_checksum].pack("N") + ic.iconv(message.payload.to_s)
36
+ else
37
+ [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload.to_s.force_encoding(Encoding::ASCII_8BIT)
38
+ end
39
+ end
40
+
41
+ def encode_request(topic, partition, messages)
42
+ message_set = Array(messages).collect { |message|
43
+ encoded_message = self.encode(message)
44
+ [encoded_message.length].pack("N") + encoded_message
45
+ }.join("")
46
+
47
+ request = [PRODUCE_REQUEST_ID].pack("n")
48
+ topic = [topic.length].pack("n") + topic
49
+ partition = [partition].pack("N")
50
+ messages = [message_set.length].pack("N") + message_set
51
+
52
+ data = request + topic + partition + messages
53
+
54
+ return [data.length].pack("N") + data
55
+ end
56
+
57
+ def send(messages)
58
+ self.write(self.encode_request(self.topic, self.partition, messages))
59
+ end
60
+
61
+ def batch(&block)
62
+ batch = Kafka::Batch.new
63
+ block.call( batch )
64
+ self.send(batch.messages)
65
+ batch.messages.clear
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,23 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ module Kafka
16
+ module RequestType
17
+ PRODUCE = 0
18
+ FETCH = 1
19
+ MULTIFETCH = 2
20
+ MULTIPRODUCE = 3
21
+ OFFSETS = 4
22
+ end
23
+ end
data/lib/kafka.rb ADDED
@@ -0,0 +1,33 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require 'socket'
16
+ require 'zlib'
17
+ if RUBY_VERSION[0,3] == "1.8"
18
+ require 'iconv'
19
+ end
20
+
21
+ require File.join(File.dirname(__FILE__), "kafka", "io")
22
+ require File.join(File.dirname(__FILE__), "kafka", "request_type")
23
+ require File.join(File.dirname(__FILE__), "kafka", "error_codes")
24
+ require File.join(File.dirname(__FILE__), "kafka", "batch")
25
+ require File.join(File.dirname(__FILE__), "kafka", "message")
26
+ require File.join(File.dirname(__FILE__), "kafka", "producer")
27
+ require File.join(File.dirname(__FILE__), "kafka", "consumer")
28
+
29
+ module Kafka
30
+
31
+ class SocketError < RuntimeError; end
32
+
33
+ end
@@ -0,0 +1,35 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Batch do
18
+
19
+ before(:each) do
20
+ @batch = Batch.new
21
+ end
22
+
23
+ describe "batch messages" do
24
+ it "holds all messages to be sent" do
25
+ @batch.should respond_to(:messages)
26
+ @batch.messages.class.should eql(Array)
27
+ end
28
+
29
+ it "supports queueing/adding messages to be send" do
30
+ @batch.messages << mock(Kafka::Message.new("one"))
31
+ @batch.messages << mock(Kafka::Message.new("two"))
32
+ @batch.messages.length.should eql(2)
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,179 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Consumer do
18
+
19
+ before(:each) do
20
+ @mocked_socket = mock(TCPSocket)
21
+ TCPSocket.stub!(:new).and_return(@mocked_socket) # don't use a real socket
22
+ @consumer = Consumer.new(:offset => 0)
23
+ end
24
+
25
+ describe "Kafka Consumer" do
26
+
27
+ it "should have a Kafka::RequestType::FETCH" do
28
+ Consumer::Kafka::RequestType::FETCH.should eql(1)
29
+ @consumer.should respond_to(:request_type)
30
+ end
31
+
32
+ it "should have a topic and a partition" do
33
+ @consumer.should respond_to(:topic)
34
+ @consumer.should respond_to(:partition)
35
+ end
36
+
37
+ it "should have a polling option, and a default value" do
38
+ Consumer::DEFAULT_POLLING_INTERVAL.should eql(2)
39
+ @consumer.should respond_to(:polling)
40
+ @consumer.polling.should eql(2)
41
+ end
42
+
43
+ it "should set a topic and partition on initialize" do
44
+ @consumer = Consumer.new({ :host => "localhost", :port => 9092, :topic => "testing" })
45
+ @consumer.topic.should eql("testing")
46
+ @consumer.partition.should eql(0)
47
+ @consumer = Consumer.new({ :topic => "testing", :partition => 3 })
48
+ @consumer.partition.should eql(3)
49
+ end
50
+
51
+ it "should set default host and port if none is specified" do
52
+ @consumer = Consumer.new
53
+ @consumer.host.should eql("localhost")
54
+ @consumer.port.should eql(9092)
55
+ end
56
+
57
+ it "should not have a default offset but be able to set it" do
58
+ @consumer = Consumer.new
59
+ @consumer.offset.should be_nil
60
+ @consumer = Consumer.new({ :offset => 1111 })
61
+ @consumer.offset.should eql(1111)
62
+ end
63
+
64
+ it "should have a max size" do
65
+ Consumer::MAX_SIZE.should eql(1048576)
66
+ @consumer.max_size.should eql(1048576)
67
+ end
68
+
69
+ it "should return the size of the request" do
70
+ @consumer.topic = "someothertopicname"
71
+ @consumer.encoded_request_size.should eql([38].pack("N"))
72
+ end
73
+
74
+ it "should encode a request to consume" do
75
+ bytes = [Kafka::RequestType::FETCH].pack("n") + ["test".length].pack("n") + "test" + [0].pack("N") + [0].pack("q").reverse + [Kafka::Consumer::MAX_SIZE].pack("N")
76
+ @consumer.encode_request(Kafka::RequestType::FETCH, "test", 0, 0, Kafka::Consumer::MAX_SIZE).should eql(bytes)
77
+ end
78
+
79
+ it "should read the response data" do
80
+ bytes = [0].pack("n") + [1120192889].pack("N") + "ale"
81
+ @mocked_socket.should_receive(:read).and_return([9].pack("N"))
82
+ @mocked_socket.should_receive(:read).with(9).and_return(bytes)
83
+ @consumer.read_data_response.should eql(bytes[2,7])
84
+ end
85
+
86
+ it "should send a consumer request" do
87
+ @consumer.stub!(:encoded_request_size).and_return(666)
88
+ @consumer.stub!(:encode_request).and_return("someencodedrequest")
89
+ @consumer.should_receive(:write).with("someencodedrequest").exactly(:once).and_return(true)
90
+ @consumer.should_receive(:write).with(666).exactly(:once).and_return(true)
91
+ @consumer.send_consume_request.should eql(true)
92
+ end
93
+
94
+ it "should parse a message set from bytes" do
95
+ bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
96
+ message = @consumer.parse_message_set_from(bytes).first
97
+ message.payload.should eql("ale")
98
+ message.checksum.should eql(1120192889)
99
+ message.magic.should eql(0)
100
+ message.valid?.should eql(true)
101
+ end
102
+
103
+ it "should skip an incomplete message at the end of the response" do
104
+ bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
105
+ # incomplete message
106
+ bytes += [8].pack("N")
107
+ messages = @consumer.parse_message_set_from(bytes)
108
+ messages.size.should eql(1)
109
+ end
110
+
111
+ it "should skip an incomplete message at the end of the response which has the same length as an empty message" do
112
+ bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
113
+ # incomplete message because payload is missing
114
+ bytes += [8].pack("N") + [0].pack("C") + [1120192889].pack("N")
115
+ messages = @consumer.parse_message_set_from(bytes)
116
+ messages.size.should eql(1)
117
+ end
118
+
119
+ it "should read empty messages correctly" do
120
+ # empty message
121
+ bytes = [5].pack("N") + [0].pack("C") + [0].pack("N") + ""
122
+ messages = @consumer.parse_message_set_from(bytes)
123
+ messages.size.should eql(1)
124
+ messages.first.payload.should eql("")
125
+ end
126
+
127
+ it "should consume messages" do
128
+ @consumer.should_receive(:send_consume_request).and_return(true)
129
+ @consumer.should_receive(:read_data_response).and_return("")
130
+ @consumer.consume.should eql([])
131
+ end
132
+
133
+ it "should loop and execute a block with the consumed messages" do
134
+ @consumer.stub!(:consume).and_return([mock(Kafka::Message)])
135
+ messages = []
136
+ messages.should_receive(:<<).exactly(:once).and_return([])
137
+ @consumer.loop do |message|
138
+ messages << message
139
+ break # we don't wanna loop forever on the test
140
+ end
141
+ end
142
+
143
+ it "should loop (every N seconds, configurable on polling attribute), and execute a block with the consumed messages" do
144
+ @consumer = Consumer.new({ :polling => 1 })
145
+ @consumer.stub!(:consume).and_return([mock(Kafka::Message)])
146
+ messages = []
147
+ messages.should_receive(:<<).exactly(:twice).and_return([])
148
+ executed_times = 0
149
+ @consumer.loop do |message|
150
+ messages << message
151
+ executed_times += 1
152
+ break if executed_times >= 2 # we don't wanna loop forever on the test, only 2 seconds
153
+ end
154
+
155
+ executed_times.should eql(2)
156
+ end
157
+
158
+ it "should fetch initial offset if no offset is given" do
159
+ @consumer = Consumer.new
160
+ @consumer.should_receive(:fetch_earliest_offset).exactly(:once).and_return(1000)
161
+ @consumer.should_receive(:send_consume_request).and_return(true)
162
+ @consumer.should_receive(:read_data_response).and_return("")
163
+ @consumer.consume
164
+ @consumer.offset.should eql(1000)
165
+ end
166
+
167
+ it "should encode an offset request" do
168
+ bytes = [Kafka::RequestType::OFFSETS].pack("n") + ["test".length].pack("n") + "test" + [0].pack("N") + [-2].pack("q").reverse + [Kafka::Consumer::MAX_OFFSETS].pack("N")
169
+ @consumer.encode_request(Kafka::RequestType::OFFSETS, "test", 0, -2, Kafka::Consumer::MAX_OFFSETS).should eql(bytes)
170
+ end
171
+
172
+ it "should parse an offsets response" do
173
+ bytes = [0].pack("n") + [1].pack('N') + [21346].pack('q').reverse
174
+ @mocked_socket.should_receive(:read).and_return([14].pack("N"))
175
+ @mocked_socket.should_receive(:read).and_return(bytes)
176
+ @consumer.read_offsets_response.should eql(21346)
177
+ end
178
+ end
179
+ end
data/spec/io_spec.rb ADDED
@@ -0,0 +1,88 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ class IOTest
18
+ include Kafka::IO
19
+ end
20
+
21
+ describe IO do
22
+
23
+ before(:each) do
24
+ @mocked_socket = mock(TCPSocket)
25
+ TCPSocket.stub!(:new).and_return(@mocked_socket) # don't use a real socket
26
+ @io = IOTest.new
27
+ @io.connect("somehost", 9093)
28
+ end
29
+
30
+ describe "default methods" do
31
+ it "has a socket, a host and a port" do
32
+ [:socket, :host, :port].each do |m|
33
+ @io.should respond_to(m.to_sym)
34
+ end
35
+ end
36
+
37
+ it "raises an exception if no host and port is specified" do
38
+ lambda {
39
+ io = IOTest.new
40
+ io.connect
41
+ }.should raise_error(ArgumentError)
42
+ end
43
+
44
+ it "should remember the port and host on connect" do
45
+ @io.connect("somehost", 9093)
46
+ @io.host.should eql("somehost")
47
+ @io.port.should eql(9093)
48
+ end
49
+
50
+ it "should write to a socket" do
51
+ data = "some data"
52
+ @mocked_socket.should_receive(:write).with(data).and_return(9)
53
+ @io.write(data).should eql(9)
54
+ end
55
+
56
+ it "should read from a socket" do
57
+ length = 200
58
+ @mocked_socket.should_receive(:read).with(length).and_return("foo")
59
+ @io.read(length)
60
+ end
61
+
62
+ it "should disconnect on a timeout when reading from a socket (to aviod protocol desync state)" do
63
+ length = 200
64
+ @mocked_socket.should_receive(:read).with(length).and_raise(Errno::EAGAIN)
65
+ @io.should_receive(:disconnect)
66
+ lambda { @io.read(length) }.should raise_error(Kafka::SocketError)
67
+ end
68
+
69
+ it "should disconnect" do
70
+ @io.should respond_to(:disconnect)
71
+ @mocked_socket.should_receive(:close).and_return(nil)
72
+ @io.disconnect
73
+ end
74
+
75
+ it "should reconnect" do
76
+ TCPSocket.should_receive(:new)
77
+ @io.reconnect
78
+ end
79
+
80
+ it "should disconnect on a broken pipe error" do
81
+ [Errno::ECONNABORTED, Errno::EPIPE, Errno::ECONNRESET].each do |error|
82
+ @mocked_socket.should_receive(:write).exactly(:once).and_raise(error)
83
+ @mocked_socket.should_receive(:close).exactly(:once).and_return(nil)
84
+ lambda { @io.write("some data to send") }.should raise_error(Kafka::SocketError)
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,21 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Kafka do
18
+
19
+ before(:each) do
20
+ end
21
+ end
@@ -0,0 +1,69 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Message do
18
+
19
+ before(:each) do
20
+ @message = Message.new
21
+ end
22
+
23
+ describe "Kafka Message" do
24
+ it "should have a default magic number" do
25
+ Message::MAGIC_IDENTIFIER_DEFAULT.should eql(0)
26
+ end
27
+
28
+ it "should have a magic field, a checksum and a payload" do
29
+ [:magic, :checksum, :payload].each do |field|
30
+ @message.should respond_to(field.to_sym)
31
+ end
32
+ end
33
+
34
+ it "should set a default value of zero" do
35
+ @message.magic.should eql(Kafka::Message::MAGIC_IDENTIFIER_DEFAULT)
36
+ end
37
+
38
+ it "should allow to set a custom magic number" do
39
+ @message = Message.new("ale", 1)
40
+ @message.magic.should eql(1)
41
+ end
42
+
43
+ it "should calculate the checksum (crc32 of a given message)" do
44
+ @message.payload = "ale"
45
+ @message.calculate_checksum.should eql(1120192889)
46
+ @message.payload = "alejandro"
47
+ @message.calculate_checksum.should eql(2865078607)
48
+ end
49
+
50
+ it "should say if the message is valid using the crc32 signature" do
51
+ @message.payload = "alejandro"
52
+ @message.checksum = 2865078607
53
+ @message.valid?.should eql(true)
54
+ @message.checksum = 0
55
+ @message.valid?.should eql(false)
56
+ @message = Message.new("alejandro", 0, 66666666) # 66666666 is a funny checksum
57
+ @message.valid?.should eql(false)
58
+ end
59
+
60
+ it "should parse a message from bytes" do
61
+ bytes = [12].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
62
+ message = Kafka::Message.parse_from(bytes)
63
+ message.valid?.should eql(true)
64
+ message.magic.should eql(0)
65
+ message.checksum.should eql(1120192889)
66
+ message.payload.should eql("ale")
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,123 @@
1
+ # encoding: utf-8
2
+
3
+ # Licensed to the Apache Software Foundation (ASF) under one or more
4
+ # contributor license agreements. See the NOTICE file distributed with
5
+ # this work for additional information regarding copyright ownership.
6
+ # The ASF licenses this file to You under the Apache License, Version 2.0
7
+ # (the "License"); you may not use this file except in compliance with
8
+ # the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ require File.dirname(__FILE__) + '/spec_helper'
18
+
19
+ describe Producer do
20
+
21
+ before(:each) do
22
+ @mocked_socket = mock(TCPSocket)
23
+ TCPSocket.stub!(:new).and_return(@mocked_socket) # don't use a real socket
24
+ @producer = Producer.new
25
+ end
26
+
27
+ describe "Kafka Producer" do
28
+ it "should have a PRODUCE_REQUEST_ID" do
29
+ Producer::PRODUCE_REQUEST_ID.should eql(0)
30
+ end
31
+
32
+ it "should have a topic and a partition" do
33
+ @producer.should respond_to(:topic)
34
+ @producer.should respond_to(:partition)
35
+ end
36
+
37
+ it "should set a topic and partition on initialize" do
38
+ @producer = Producer.new({ :host => "localhost", :port => 9092, :topic => "testing" })
39
+ @producer.topic.should eql("testing")
40
+ @producer.partition.should eql(0)
41
+ @producer = Producer.new({ :topic => "testing", :partition => 3 })
42
+ @producer.partition.should eql(3)
43
+ end
44
+
45
+ it "should set default host and port if none is specified" do
46
+ @producer = Producer.new
47
+ @producer.host.should eql("localhost")
48
+ @producer.port.should eql(9092)
49
+ end
50
+
51
+ describe "Message Encoding" do
52
+ it "should encode a message" do
53
+ message = Kafka::Message.new("alejandro")
54
+ full_message = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload
55
+ @producer.encode(message).should eql(full_message)
56
+ end
57
+
58
+ it "should encode an empty message" do
59
+ message = Kafka::Message.new()
60
+ full_message = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload.to_s
61
+ @producer.encode(message).should eql(full_message)
62
+ end
63
+
64
+ it "should encode strings containing non-ASCII characters" do
65
+ message = Kafka::Message.new("ümlaut")
66
+ encoded = @producer.encode(message)
67
+ data = [encoded.size].pack("N") + encoded
68
+ if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
69
+ ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
70
+ ic.iconv(Kafka::Message.parse_from(data).payload).should eql("ümlaut")
71
+ else
72
+ Kafka::Message.parse_from(data).payload.force_encoding(Encoding::UTF_8).should eql("ümlaut")
73
+ end
74
+ end
75
+ end
76
+
77
+ describe "Request Encoding" do
78
+ it "should binary encode an empty request" do
79
+ bytes = @producer.encode_request("test", 0, [])
80
+ bytes.length.should eql(20)
81
+ bytes.should eql("\000\000\000\020\000\000\000\004test\000\000\000\000\000\000\000\000")
82
+ end
83
+
84
+ it "should binary encode a request with a message, using a specific wire format" do
85
+ message = Kafka::Message.new("ale")
86
+ bytes = @producer.encode_request("test", 3, message)
87
+ data_size = bytes[0, 4].unpack("N").shift
88
+ request_id = bytes[4, 2].unpack("n").shift
89
+ topic_length = bytes[6, 2].unpack("n").shift
90
+ topic = bytes[8, 4]
91
+ partition = bytes[12, 4].unpack("N").shift
92
+ messages_length = bytes[16, 4].unpack("N").shift
93
+ messages = bytes[20, messages_length]
94
+
95
+ bytes.length.should eql(32)
96
+ data_size.should eql(28)
97
+ request_id.should eql(0)
98
+ topic_length.should eql(4)
99
+ topic.should eql("test")
100
+ partition.should eql(3)
101
+ messages_length.should eql(12)
102
+ end
103
+ end
104
+ end
105
+
106
+ it "should send messages" do
107
+ @producer.should_receive(:write).and_return(32)
108
+ message = Kafka::Message.new("ale")
109
+ @producer.send(message).should eql(32)
110
+ end
111
+
112
+ describe "Message Batching" do
113
+ it "should batch messages and send them at once" do
114
+ message1 = Kafka::Message.new("one")
115
+ message2 = Kafka::Message.new("two")
116
+ @producer.should_receive(:send).with([message1, message2]).exactly(:once).and_return(nil)
117
+ @producer.batch do |messages|
118
+ messages << message1
119
+ messages << message2
120
+ end
121
+ end
122
+ end
123
+ end
@@ -0,0 +1,18 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require 'rubygems'
16
+ require 'kafka'
17
+
18
+ include Kafka