leffen-kafka-rb 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,35 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ module Kafka
16
+ class MultiProducer
17
+ include Kafka::IO
18
+
19
+ def initialize(options={})
20
+ self.host = options[:host] || HOST
21
+ self.port = options[:port] || PORT
22
+ self.compression = options[:compression] || Message::NO_COMPRESSION
23
+ self.connect(self.host, self.port)
24
+ end
25
+
26
+ def push(topic, messages, options={})
27
+ partition = options[:partition] || 0
28
+ self.write(Encoder.produce(topic, partition, messages, compression))
29
+ end
30
+
31
+ def multi_push(producer_requests)
32
+ self.write(Encoder.multiproduce(producer_requests, compression))
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,42 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ module Kafka
16
+ class Producer
17
+
18
+ include Kafka::IO
19
+
20
+ attr_accessor :topic, :partition
21
+
22
+ def initialize(options = {})
23
+ self.topic = options[:topic] || "test"
24
+ self.partition = options[:partition] || 0
25
+ self.host = options[:host] || HOST
26
+ self.port = options[:port] || PORT
27
+ self.compression = options[:compression] || Message::NO_COMPRESSION
28
+ self.connect(self.host, self.port)
29
+ end
30
+
31
+ def push(messages)
32
+ self.write(Encoder.produce(self.topic, self.partition, messages, compression))
33
+ end
34
+
35
+ def batch(&block)
36
+ batch = Kafka::Batch.new
37
+ block.call( batch )
38
+ push(batch.messages)
39
+ batch.messages.clear
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,26 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ module Kafka
17
+ class ProducerRequest
18
+ attr_accessor :topic, :messages, :partition
19
+
20
+ def initialize(topic, messages, options={})
21
+ self.topic = topic
22
+ self.partition = options[:partition] || 0
23
+ self.messages = Array(messages)
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,23 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ module Kafka
16
+ module RequestType
17
+ PRODUCE = 0
18
+ FETCH = 1
19
+ MULTIFETCH = 2
20
+ MULTIPRODUCE = 3
21
+ OFFSETS = 4
22
+ end
23
+ end
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ require File.join(File.dirname(__FILE__), "kafka")
@@ -0,0 +1,35 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Batch do
18
+
19
+ before(:each) do
20
+ @batch = Batch.new
21
+ end
22
+
23
+ describe "batch messages" do
24
+ it "holds all messages to be sent" do
25
+ @batch.should respond_to(:messages)
26
+ @batch.messages.class.should eql(Array)
27
+ end
28
+
29
+ it "supports queueing/adding messages to be send" do
30
+ @batch.messages << mock(Kafka::Message.new("one"))
31
+ @batch.messages << mock(Kafka::Message.new("two"))
32
+ @batch.messages.length.should eql(2)
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,133 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+ require 'kafka/cli'
17
+
18
+ describe CLI do
19
+
20
+ before(:each) do
21
+ CLI.instance_variable_set("@config", {})
22
+ CLI.stub(:puts)
23
+ end
24
+
25
+ describe "should read from env" do
26
+ describe "kafka host" do
27
+ it "should read KAFKA_HOST from env" do
28
+ CLI.read_env("KAFKA_HOST" => "google.com")
29
+ CLI.config[:host].should == "google.com"
30
+ end
31
+
32
+ it "kafka port" do
33
+ CLI.read_env("KAFKA_PORT" => "1234")
34
+ CLI.config[:port].should == 1234
35
+ end
36
+
37
+ it "kafka topic" do
38
+ CLI.read_env("KAFKA_TOPIC" => "news")
39
+ CLI.config[:topic].should == "news"
40
+ end
41
+
42
+ it "kafka compression" do
43
+ CLI.read_env("KAFKA_COMPRESSION" => "no")
44
+ CLI.config[:compression].should == Message::NO_COMPRESSION
45
+
46
+ CLI.read_env("KAFKA_COMPRESSION" => "gzip")
47
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
48
+
49
+ CLI.read_env("KAFKA_COMPRESSION" => "snappy")
50
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
51
+ end
52
+ end
53
+ end
54
+
55
+ describe "should read from command line" do
56
+ it "kafka host" do
57
+ CLI.parse_args(%w(--host google.com))
58
+ CLI.config[:host].should == "google.com"
59
+
60
+ CLI.parse_args(%w(-h google.com))
61
+ CLI.config[:host].should == "google.com"
62
+ end
63
+
64
+ it "kafka port" do
65
+ CLI.parse_args(%w(--port 1234))
66
+ CLI.config[:port].should == 1234
67
+
68
+ CLI.parse_args(%w(-p 1234))
69
+ CLI.config[:port].should == 1234
70
+ end
71
+
72
+ it "kafka topic" do
73
+ CLI.parse_args(%w(--topic news))
74
+ CLI.config[:topic].should == "news"
75
+
76
+ CLI.parse_args(%w(-t news))
77
+ CLI.config[:topic].should == "news"
78
+ end
79
+
80
+ it "kafka compression" do
81
+ CLI.stub(:publish? => true)
82
+
83
+ CLI.parse_args(%w(--compression no))
84
+ CLI.config[:compression].should == Message::NO_COMPRESSION
85
+ CLI.parse_args(%w(-c no))
86
+ CLI.config[:compression].should == Message::NO_COMPRESSION
87
+
88
+ CLI.parse_args(%w(--compression gzip))
89
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
90
+ CLI.parse_args(%w(-c gzip))
91
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
92
+
93
+ CLI.parse_args(%w(--compression snappy))
94
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
95
+ CLI.parse_args(%w(-c snappy))
96
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
97
+ end
98
+
99
+ it "message" do
100
+ CLI.stub(:publish? => true)
101
+ CLI.parse_args(%w(--message YEAH))
102
+ CLI.config[:message].should == "YEAH"
103
+
104
+ CLI.parse_args(%w(-m YEAH))
105
+ CLI.config[:message].should == "YEAH"
106
+ end
107
+
108
+ end
109
+
110
+ describe "config validation" do
111
+ it "should assign a default port" do
112
+ CLI.stub(:exit)
113
+ CLI.stub(:puts)
114
+ CLI.validate_config
115
+ CLI.config[:port].should == Kafka::IO::PORT
116
+ end
117
+ end
118
+
119
+ it "should assign a default host" do
120
+ CLI.stub(:exit)
121
+ CLI.validate_config
122
+ CLI.config[:host].should == Kafka::IO::HOST
123
+ end
124
+
125
+
126
+ it "read compression method" do
127
+ CLI.string_to_compression("no").should == Message::NO_COMPRESSION
128
+ CLI.string_to_compression("gzip").should == Message::GZIP_COMPRESSION
129
+ CLI.string_to_compression("snappy").should == Message::SNAPPY_COMPRESSION
130
+ lambda { CLI.push(:string_to_compression,nil) }.should raise_error
131
+ end
132
+
133
+ end
@@ -0,0 +1,146 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+
17
+ describe Consumer do
18
+
19
+ before(:each) do
20
+ @mocked_socket = mock(TCPSocket)
21
+ TCPSocket.stub!(:new).and_return(@mocked_socket) # don't use a real socket
22
+ @consumer = Consumer.new(:offset => 0)
23
+ end
24
+
25
+ describe "Kafka Consumer" do
26
+
27
+ it "should have a Kafka::RequestType::FETCH" do
28
+ Kafka::RequestType::FETCH.should eql(1)
29
+ @consumer.should respond_to(:request_type)
30
+ end
31
+
32
+ it "should have a topic and a partition" do
33
+ @consumer.should respond_to(:topic)
34
+ @consumer.should respond_to(:partition)
35
+ end
36
+
37
+ it "should have a polling option, and a default value" do
38
+ Consumer::DEFAULT_POLLING_INTERVAL.should eql(2)
39
+ @consumer.should respond_to(:polling)
40
+ @consumer.polling.should eql(2)
41
+ end
42
+
43
+ it "should set a topic and partition on initialize" do
44
+ @consumer = Consumer.new({ :host => "localhost", :port => 9092, :topic => "testing" })
45
+ @consumer.topic.should eql("testing")
46
+ @consumer.partition.should eql(0)
47
+ @consumer = Consumer.new({ :topic => "testing", :partition => 3 })
48
+ @consumer.partition.should eql(3)
49
+ end
50
+
51
+ it "should set default host and port if none is specified" do
52
+ @consumer = Consumer.new
53
+ @consumer.host.should eql("localhost")
54
+ @consumer.port.should eql(9092)
55
+ end
56
+
57
+ it "should not have a default offset but be able to set it" do
58
+ @consumer = Consumer.new
59
+ @consumer.offset.should be_nil
60
+ @consumer = Consumer.new({ :offset => 1111 })
61
+ @consumer.offset.should eql(1111)
62
+ end
63
+
64
+ it "should have a max size" do
65
+ Consumer::MAX_SIZE.should eql(1048576)
66
+ @consumer.max_size.should eql(1048576)
67
+ end
68
+
69
+ it "should return the size of the request" do
70
+ @consumer.topic = "someothertopicname"
71
+ @consumer.encoded_request_size.should eql([38].pack("N"))
72
+ end
73
+
74
+ it "should encode a request to consume" do
75
+ bytes = [Kafka::RequestType::FETCH].pack("n") + ["test".length].pack("n") + "test" + [0].pack("N") + [0].pack("q").reverse + [Kafka::Consumer::MAX_SIZE].pack("N")
76
+ @consumer.encode_request(Kafka::RequestType::FETCH, "test", 0, 0, Kafka::Consumer::MAX_SIZE).should eql(bytes)
77
+ end
78
+
79
+ it "should read the response data" do
80
+ bytes = [0].pack("n") + [1120192889].pack("N") + "ale"
81
+ @mocked_socket.should_receive(:read).and_return([9].pack("N"))
82
+ @mocked_socket.should_receive(:read).with(9).and_return(bytes)
83
+ @consumer.read_data_response.should eql(bytes[2,7])
84
+ end
85
+
86
+ it "should send a consumer request" do
87
+ @consumer.stub!(:encoded_request_size).and_return(666)
88
+ @consumer.stub!(:encode_request).and_return("someencodedrequest")
89
+ @consumer.should_receive(:write).with("someencodedrequest").exactly(:once).and_return(true)
90
+ @consumer.should_receive(:write).with(666).exactly(:once).and_return(true)
91
+ @consumer.send_consume_request.should eql(true)
92
+ end
93
+
94
+ it "should consume messages" do
95
+ @consumer.should_receive(:send_consume_request).and_return(true)
96
+ @consumer.should_receive(:read_data_response).and_return("")
97
+ @consumer.consume.should eql([])
98
+ end
99
+
100
+ it "should loop and execute a block with the consumed messages" do
101
+ @consumer.stub!(:consume).and_return([mock(Kafka::Message)])
102
+ messages = []
103
+ messages.should_receive(:<<).exactly(:once).and_return([])
104
+ @consumer.loop do |message|
105
+ messages << message
106
+ break # we don't wanna loop forever on the test
107
+ end
108
+ end
109
+
110
+ it "should loop (every N seconds, configurable on polling attribute), and execute a block with the consumed messages" do
111
+ @consumer = Consumer.new({ :polling => 1 })
112
+ @consumer.stub!(:consume).and_return([mock(Kafka::Message)])
113
+ messages = []
114
+ messages.should_receive(:<<).exactly(:twice).and_return([])
115
+ executed_times = 0
116
+ @consumer.loop do |message|
117
+ messages << message
118
+ executed_times += 1
119
+ break if executed_times >= 2 # we don't wanna loop forever on the test, only 2 seconds
120
+ end
121
+
122
+ executed_times.should eql(2)
123
+ end
124
+
125
+ it "should fetch initial offset if no offset is given" do
126
+ @consumer = Consumer.new
127
+ @consumer.should_receive(:fetch_latest_offset).exactly(:once).and_return(1000)
128
+ @consumer.should_receive(:send_consume_request).and_return(true)
129
+ @consumer.should_receive(:read_data_response).and_return("")
130
+ @consumer.consume
131
+ @consumer.offset.should eql(1000)
132
+ end
133
+
134
+ it "should encode an offset request" do
135
+ bytes = [Kafka::RequestType::OFFSETS].pack("n") + ["test".length].pack("n") + "test" + [0].pack("N") + [-1].pack("q").reverse + [Kafka::Consumer::MAX_OFFSETS].pack("N")
136
+ @consumer.encode_request(Kafka::RequestType::OFFSETS, "test", 0, -1, Kafka::Consumer::MAX_OFFSETS).should eql(bytes)
137
+ end
138
+
139
+ it "should parse an offsets response" do
140
+ bytes = [0].pack("n") + [1].pack('N') + [21346].pack('q').reverse
141
+ @mocked_socket.should_receive(:read).and_return([14].pack("N"))
142
+ @mocked_socket.should_receive(:read).and_return(bytes)
143
+ @consumer.read_offsets_response.should eql(21346)
144
+ end
145
+ end
146
+ end