em-kafka 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ NzM5ZGZjMjg0YzI4MTg4NGZiYzk2NTM2Y2Q3ODI5ZTFmYzQwMmMzMg==
5
+ data.tar.gz: !binary |-
6
+ ZjA2NDFhYWU1NDY0NTIzNmQ2MWY2YjJmZDI2YmViODAyZjNiYjBmMg==
7
+ !binary "U0hBNTEy":
8
+ metadata.gz: !binary |-
9
+ MWI4N2FiODA4ZjZhZDQ2YmJhZWUzZDY2NjA1YjY0NTRjN2Y2YjBiZGFiNGRk
10
+ ZDFmZDU2NzdlY2U0YzFmNWEzYjlhYjI0MTVkODE2YWQzOWIxOGZjN2U3NDFj
11
+ ZmM5ODMxZDE4OTU4ZmZmODBkODU1ZjhmNWI0ZDFjZmQ5YTgzMmU=
12
+ data.tar.gz: !binary |-
13
+ ZjQ1ZDlmNmI5MDljNTUwNGI5M2JhYzJiNmIzMjJkMGMyNTg1ODNiMDZjMjBj
14
+ MmQ1M2M3NjM1MjNlYmNhODU3NzhmMjc2YTUzNTg0NGU4NjA0ZjQ2ZWJmYTI3
15
+ NjhhNGM2OThjOGYxZWFhMDg5NWNmMGI4NzZiNjUwZWI2ODY2ZmE=
data/.gitignore ADDED
@@ -0,0 +1 @@
1
+ .rvmrc
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --color
data/Gemfile ADDED
@@ -0,0 +1,2 @@
1
+ source "http://rubygems.org"
2
+ gemspec
data/Gemfile.lock ADDED
@@ -0,0 +1,28 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ em-kafka (0.0.1)
5
+ eventmachine (>= 1.0.0.beta.4)
6
+ yajl-ruby (>= 0.8.2)
7
+
8
+ GEM
9
+ remote: http://rubygems.org/
10
+ specs:
11
+ diff-lcs (1.1.3)
12
+ eventmachine (1.0.0.beta.4)
13
+ rspec (2.6.0)
14
+ rspec-core (~> 2.6.0)
15
+ rspec-expectations (~> 2.6.0)
16
+ rspec-mocks (~> 2.6.0)
17
+ rspec-core (2.6.4)
18
+ rspec-expectations (2.6.0)
19
+ diff-lcs (~> 1.1.2)
20
+ rspec-mocks (2.6.0)
21
+ yajl-ruby (1.1.0)
22
+
23
+ PLATFORMS
24
+ ruby
25
+
26
+ DEPENDENCIES
27
+ em-kafka!
28
+ rspec (~> 2.6.0)
data/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ (The MIT-License)
2
+
3
+ Copyright (c) 2011 GroupMe
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,42 @@
1
+ # EM-Kafka
2
+
3
+ EventMachine driver for [Kafka](http://incubator.apache.org/kafka/index.html).
4
+
5
+ ## Producer
6
+
7
+ When using Ruby objects, the payload is encoded to JSON
8
+
9
+ producer = EM::Kafka::Producer.new("kafka://topic@localhost:9092/0")
10
+ producer.deliver(:foo => "bar") # payload is {foo:"bar"}
11
+
12
+ ## Consumer
13
+
14
+ consumer = EM::Kafka::Consumer.new("kafka://topic@localhost:9092/0")
15
+ consumer.consume do |message|
16
+ puts message.payload
17
+ end
18
+
19
+ ## Messages
20
+
21
+ Messages are composed of:
22
+
23
+ * a payload
24
+ * a magic id (defaults to 0)
25
+
26
+ Change the magic id when the payload format changes:
27
+
28
+ EM::Kafka::Message.new("payload", 2)
29
+
30
+ Pass messages when you want to be specific:
31
+
32
+ message_1 = EM::Kafka::Message.new("payload_1", 2)
33
+ message_2 = EM::Kafka::Message.new("payload_2", 2)
34
+ producer.deliver([message_1, message_2])
35
+
36
+
37
+ ## Credits
38
+
39
+ Heavily influenced by / borrowed from:
40
+
41
+ * kafka-rb (Alejandro Crosa)
42
+ * em-hiredis (Martyn Loughran)
data/Rakefile ADDED
@@ -0,0 +1,4 @@
1
+ require 'bundler/gem_tasks'
2
+ require "rspec/core/rake_task"
3
+ RSpec::Core::RakeTask.new(:core)
4
+ task :default => :core
data/bin/consume ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+ require_relative "../lib/em-kafka"
3
+
4
+ topic = ARGV[0] || "test"
5
+
6
+ EM.run do
7
+ trap("TERM") { EM.stop; exit; }
8
+ consumer = EM::Kafka::Consumer.new(:topic => topic)
9
+ puts "consuming topic #{consumer.topic}"
10
+ consumer.consume do |message|
11
+ puts "payload: #{message.payload}"
12
+ end
13
+ end
14
+
data/bin/produce ADDED
@@ -0,0 +1,17 @@
1
+ #!/usr/bin/env ruby
2
+ require_relative "../lib/em-kafka"
3
+
4
+ topic = ARGV[0] || "test"
5
+ puts "Producing topic '#{topic}'"
6
+
7
+ EM.run do
8
+ trap("TERM") { EM.stop; exit; }
9
+ producer = EM::Kafka::Producer.new(:topic => topic)
10
+ puts "topic is #{producer.topic}"
11
+
12
+ EM.add_periodic_timer(0.25) {
13
+ message = EM::Kafka::Message.new("hello-#{Time.now.to_i}")
14
+ producer.deliver(message)
15
+ puts "Sending #{message.payload}"
16
+ }
17
+ end
data/em-kafka.gemspec ADDED
@@ -0,0 +1,21 @@
1
+ # -*- mode: ruby; encoding: utf-8 -*-
2
+ $:.push File.expand_path("../lib", __FILE__)
3
+ require "em-kafka/version"
4
+
5
+ Gem::Specification.new do |s|
6
+ s.name = "em-kafka"
7
+ s.version = EventMachine::Kafka::VERSION
8
+ s.authors = ["Brandon Keene"]
9
+ s.email = ["bkeene@gmail.com"]
10
+ s.homepage = ""
11
+ s.summary = %q{EventMachine Kafka driver}
12
+
13
+ s.files = `git ls-files`.split("\n")
14
+ s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
15
+ s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
16
+ s.require_paths = ["lib"]
17
+
18
+ s.add_dependency "eventmachine-le", ">= 1.1.5"
19
+ s.add_dependency "yajl-ruby", ">= 0.8.2"
20
+ s.add_development_dependency "rspec", "~> 2.6.0"
21
+ end
@@ -0,0 +1,32 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class Client
4
+ def initialize(host, port)
5
+ @host = host || 'localhost'
6
+ @port = port || 9092
7
+ @callback = nil
8
+ end
9
+
10
+ def send_data(data)
11
+ connect if @connection.nil? || @connection.disconnected?
12
+ @connection.send_data(data)
13
+ end
14
+
15
+ def on_data(&block)
16
+ @callback = block
17
+ end
18
+
19
+ def connect
20
+ @connection = EM.connect(@host, @port, EM::Kafka::Connection)
21
+ @connection.on(:message) do |message|
22
+ @callback.call(message) if @callback
23
+ end
24
+ @connection
25
+ end
26
+
27
+ def close_connection
28
+ @connection.close_connection_after_writing
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,29 @@
1
+ module EventMachine::Kafka
2
+ class Connection < EM::Connection
3
+ include EventMachine::Kafka::EventEmitter
4
+
5
+ def initialize(*args)
6
+ super
7
+ @disconnected = false
8
+ end
9
+
10
+ def disconnected?
11
+ @disconnected
12
+ end
13
+
14
+ def connection_completed
15
+ EventMachine::Kafka.logger.info("Connected to Kafka")
16
+ emit(:connected)
17
+ end
18
+
19
+ def receive_data(data)
20
+ emit(:message, data)
21
+ end
22
+
23
+ def unbind
24
+ @disconnected = true
25
+ EventMachine::Kafka.logger.info("Disconnected from Kafka")
26
+ emit(:closed)
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,49 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class Consumer
4
+ require_relative "consumer_request"
5
+ require_relative "parser"
6
+
7
+ attr_accessor :topic,
8
+ :partition,
9
+ :offset,
10
+ :max_size,
11
+ :request_type,
12
+ :polling,
13
+ :client,
14
+ :host,
15
+ :port
16
+
17
+ def initialize(uri, options = {})
18
+ uri = URI(uri)
19
+ self.host = uri.host
20
+ self.port = uri.port
21
+ self.topic = uri.user
22
+ self.partition = uri.path[1..-1].to_i
23
+
24
+ self.offset = options[:offset] || 0
25
+ self.max_size = options[:max_size] || EM::Kafka::MESSAGE_MAX_SIZE
26
+ self.request_type = options[:request_type] || EM::Kafka::REQUEST_FETCH
27
+ self.polling = options[:polling] || EM::Kafka::CONSUMER_POLLING_INTERVAL
28
+ self.client = EM::Kafka::Client.new(host, port)
29
+ client.connect
30
+ end
31
+
32
+ def consume(&block)
33
+ raise ArgumentError.new("block required") unless block_given?
34
+ parser = EM::Kafka::Parser.new(offset, &block)
35
+ parser.on_offset_update { |i| self.offset = i }
36
+ client.on_data { |binary| parser.on_data(binary) }
37
+ EM.add_periodic_timer(polling) { request_consume }
38
+ end
39
+
40
+ private
41
+
42
+ def request_consume
43
+ request = EM::Kafka::ConsumerRequest.new(request_type, topic, partition, offset, max_size)
44
+ client.send_data(request.encode_size)
45
+ client.send_data(request.encode)
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,23 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class ConsumerRequest
4
+ def initialize(type, topic, partition, offset, max_size)
5
+ @type, @topic, @partition, @offset, @max_size =
6
+ type, topic, partition, offset, max_size
7
+ end
8
+
9
+ def encode_size
10
+ [2 + 2 + @topic.length + 4 + 8 + 4].pack("N")
11
+ end
12
+
13
+ def encode
14
+ [@type].pack("n") +
15
+ [@topic.length].pack("n") +
16
+ @topic +
17
+ [@partition].pack("N") +
18
+ [@offset].pack("Q").reverse + # DIY 64bit big endian integer
19
+ [@max_size].pack("N")
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,29 @@
1
+ module EventMachine::Kafka
2
+ module EventEmitter
3
+ def on(event, &listener)
4
+ _listeners[event] << listener
5
+ end
6
+
7
+ def emit(event, *args)
8
+ _listeners[event].each { |l| l.call(*args) }
9
+ end
10
+
11
+ def remove_listener(event, &listener)
12
+ _listeners[event].delete(listener)
13
+ end
14
+
15
+ def remove_all_listeners(event)
16
+ _listeners.delete(event)
17
+ end
18
+
19
+ def listeners(event)
20
+ _listeners[event]
21
+ end
22
+
23
+ private
24
+
25
+ def _listeners
26
+ @_listeners ||= Hash.new { |h,k| h[k] = [] }
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,34 @@
1
+ module EventMachine
2
+ module Kafka
3
+ # 1 byte "magic" identifier to allow format changes
4
+ # 4 byte CRC32 of the payload
5
+ # N - 5 byte payload
6
+ class Message
7
+ require "zlib"
8
+ attr_accessor :magic, :checksum, :payload, :size
9
+
10
+ def initialize(payload, magic = 0, checksum = nil, size = nil)
11
+ self.payload = payload
12
+ self.magic = magic
13
+ self.checksum = checksum || Zlib.crc32(payload)
14
+ end
15
+
16
+ def valid?
17
+ checksum == Zlib.crc32(payload)
18
+ end
19
+
20
+ def encode
21
+ [magic, checksum].pack("CN") +
22
+ payload.to_s.force_encoding(Encoding::ASCII_8BIT)
23
+ end
24
+
25
+ def self.decode(size, binary)
26
+ return unless binary
27
+ magic = binary[4].unpack("C").shift
28
+ checksum = binary[5..9].unpack("N").shift
29
+ payload = binary[9..-1]
30
+ new(payload, magic, checksum)
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,66 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class Parser
4
+ attr_accessor :offset
5
+
6
+ def initialize(offset = 0, &block)
7
+ self.offset = offset
8
+ @block = block
9
+ reset
10
+ end
11
+
12
+ def on_data(binary)
13
+ if @complete
14
+ parsed_size = binary[0, 4].unpack("N").shift
15
+
16
+ if (parsed_size - 2) > 0
17
+ @size = parsed_size
18
+ else
19
+ # empty response
20
+ return
21
+ end
22
+ end
23
+
24
+ @buffer << binary
25
+
26
+ received_data = @buffer.size + binary.size
27
+ if received_data >= @size
28
+ parse(@buffer[6, @size]) # account for 4 byte size and 2 byte junk
29
+ else
30
+ @complete = false
31
+ end
32
+ end
33
+
34
+ def on_offset_update(&callback)
35
+ @on_offset_update_callback = callback
36
+ end
37
+
38
+ private
39
+
40
+ def parse(frame)
41
+ i = 0
42
+ while i <= frame.length do
43
+ break unless message_size = frame[i, 4].unpack("N").first
44
+ message_data = frame[i, message_size + 4]
45
+ message = Kafka::Message.decode(message_size, message_data)
46
+ i += message_size + 4
47
+ @block.call(message)
48
+ end
49
+
50
+ advance_offset(i)
51
+ reset
52
+ end
53
+
54
+ def reset
55
+ @size = 0
56
+ @complete = true
57
+ @buffer = ""
58
+ end
59
+
60
+ def advance_offset(i)
61
+ self.offset += i
62
+ @on_offset_update_callback.call(offset) if @on_offset_update_callback
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,26 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class Producer
4
+ require_relative "producer_request"
5
+ attr_accessor :host, :port, :topic, :partition, :client
6
+
7
+ def initialize(uri)
8
+ uri = URI(uri)
9
+ self.host = uri.host
10
+ self.port = uri.port
11
+ self.topic = uri.user
12
+ self.partition = uri.path[1..-1].to_i
13
+
14
+ raise ArgumentError("topic required") unless topic
15
+
16
+ self.client = EM::Kafka::Client.new(host, port)
17
+ client.connect
18
+ end
19
+
20
+ def deliver(message)
21
+ request = EM::Kafka::ProducerRequest.new(topic, partition, message)
22
+ client.send_data(request.encode)
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,38 @@
1
+ module EventMachine
2
+ module Kafka
3
+ class ProducerRequest
4
+ def initialize(topic, partition, messages)
5
+ @topic, @partition, @messages = topic, partition, messages
6
+ end
7
+
8
+ def encode
9
+ data = "\x00\x00" +
10
+ [@topic.length].pack("n") +
11
+ @topic +
12
+ [@partition].pack("N") +
13
+ encode_messages(@messages)
14
+
15
+ [data.length].pack("N") + data
16
+ end
17
+
18
+ private
19
+
20
+ def encode_messages(messages)
21
+ messages = [messages].flatten
22
+ messages = messages.map do |m|
23
+ if m.is_a?(EM::Kafka::Message)
24
+ m
25
+ else
26
+ EM::Kafka::Message.new(Yajl::Encoder.encode(m))
27
+ end
28
+ end
29
+
30
+ message_set = messages.map { |m|
31
+ data = m.encode
32
+ [data.length].pack("N") + data
33
+ }.join("")
34
+ [message_set.length].pack("N") + message_set
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,5 @@
1
+ module EventMachine
2
+ module Kafka
3
+ VERSION = "0.0.1"
4
+ end
5
+ end
data/lib/em-kafka.rb ADDED
@@ -0,0 +1,48 @@
1
+ require "eventmachine"
2
+ require "logger"
3
+ require "uri"
4
+ require "yajl"
5
+
6
+ require_relative "em-kafka/event_emitter"
7
+ require_relative "em-kafka/connection"
8
+ require_relative "em-kafka/client"
9
+ require_relative "em-kafka/message"
10
+ require_relative "em-kafka/producer"
11
+ require_relative "em-kafka/consumer"
12
+
13
+ module EventMachine
14
+ module Kafka
15
+ MESSAGE_MAX_SIZE = 1048576 # 1 MB
16
+ CONSUMER_POLLING_INTERVAL = 2 # 2 seconds
17
+
18
+ REQUEST_PRODUCE = 0
19
+ REQUEST_FETCH = 1
20
+ REQUEST_MULTIFETCH = 2
21
+ REQUEST_MULTIPRODUCE = 3
22
+ REQUEST_OFFSETS = 4
23
+
24
+ ERROR_NO_ERROR = 0
25
+ ERROR_OFFSET_OUT_OF_RANGE = 1
26
+ ERROR_INVALID_MESSAGE_CODE = 2
27
+ ERROR_WRONG_PARTITION_CODE = 3
28
+ ERROR_INVALID_RETCH_SIZE_CODE = 4
29
+
30
+ ERROR_DESCRIPTIONS = {
31
+ ERROR_NO_ERROR => 'No error',
32
+ ERROR_INVALID_MESSAGE_CODE => 'Offset out of range',
33
+ ERROR_INVALID_MESSAGE_CODE => 'Invalid message code',
34
+ ERROR_WRONG_PARTITION_CODE => 'Wrong partition code',
35
+ ERROR_INVALID_RETCH_SIZE_CODE => 'Invalid retch size code'
36
+ }
37
+
38
+ class << self
39
+ def logger
40
+ @logger ||= Logger.new(STDOUT)
41
+ end
42
+
43
+ def logger=(new_logger)
44
+ @logger = new_logger
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,32 @@
1
+ require 'spec_helper'
2
+
3
+ describe EM::Kafka::ConsumerRequest do
4
+ before do
5
+ @request = EM::Kafka::ConsumerRequest.new(
6
+ EM::Kafka::REQUEST_FETCH,
7
+ "topic",
8
+ 0,
9
+ 100,
10
+ EM::Kafka::MESSAGE_MAX_SIZE
11
+ )
12
+ end
13
+
14
+ describe "#encode" do
15
+ it "returns binary" do
16
+ data = [EM::Kafka::REQUEST_FETCH].pack("n") +
17
+ ["topic".length].pack('n') +
18
+ "topic" +
19
+ [0].pack("N") +
20
+ [100].pack("Q").reverse + # DIY 64bit big endian integer
21
+ [EM::Kafka::MESSAGE_MAX_SIZE].pack("N")
22
+
23
+ @request.encode.should == data
24
+ end
25
+ end
26
+
27
+ describe "#encode_size" do
28
+ it "returns packed 2 + 2 + @topic.length + 4 + 8 + 4" do
29
+ @request.encode_size.should == "\x00\x00\x00\x19"
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,24 @@
1
+ require 'spec_helper'
2
+
3
+ describe EM::Kafka::Consumer do
4
+ before do
5
+ @client = mock("Client", :connect => true)
6
+ EM::Kafka::Client.should_receive(:new).and_return(@client)
7
+ end
8
+
9
+ it "should set a topic and partition on initialize" do
10
+ consumer = EM::Kafka::Consumer.new("kafka://testing@localhost:9092/3")
11
+ consumer.host.should == "localhost"
12
+ consumer.port.should == 9092
13
+ consumer.topic.should == "testing"
14
+ consumer.partition.should == 3
15
+ end
16
+
17
+ it "should set default partition to 0" do
18
+ consumer = EM::Kafka::Consumer.new("kafka://testing@localhost:9092")
19
+ consumer.host.should == "localhost"
20
+ consumer.port.should == 9092
21
+ consumer.topic.should == "testing"
22
+ consumer.partition.should == 0
23
+ end
24
+ end
@@ -0,0 +1,30 @@
1
+ require 'spec_helper'
2
+
3
+ describe EM::Kafka::Message do
4
+ describe "#encode" do
5
+ it "turns Message into data" do
6
+ message = EM::Kafka::Message.new("ale")
7
+ message.payload.should == "ale"
8
+ message.checksum.should == 1120192889
9
+ message.magic.should == 0
10
+
11
+ message.encode.should == [0].pack("C") +
12
+ [1120192889].pack("N") +
13
+ "ale".force_encoding(Encoding::ASCII_8BIT)
14
+ end
15
+ end
16
+
17
+ describe ".decode" do
18
+ it "turns data into a Message" do
19
+ data = [12].pack("N") +
20
+ [0].pack("C") +
21
+ [1120192889].pack("N") + "ale"
22
+
23
+ message = EM::Kafka::Message.decode(data.size, data)
24
+ message.should be_valid
25
+ message.payload.should == "ale"
26
+ message.checksum.should == 1120192889
27
+ message.magic.should == 0
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,86 @@
1
+ require 'spec_helper'
2
+
3
+ describe EM::Kafka::Parser do
4
+ describe "parse" do
5
+ it "parses messages from newline boundaries across packets" do
6
+ messages = []
7
+ parser = EM::Kafka::Parser.new do |message|
8
+ messages << message
9
+ end
10
+
11
+ message_1 = EM::Kafka::Message.new("foo").encode
12
+ message_2 = EM::Kafka::Message.new("barizzle").encode
13
+ message_3 = EM::Kafka::Message.new("langlang").encode
14
+
15
+ binary = [51].pack("N") +
16
+ [0, 0].pack("CC") + # 2 byte offset
17
+ [message_1.size].pack("N") +
18
+ message_1 +
19
+ [message_2.size].pack("N") +
20
+ message_2 +
21
+ [message_3.size].pack("N") +
22
+ message_3
23
+
24
+ frame_1 = binary[0..11]
25
+ frame_2 = binary[12..-1]
26
+
27
+ parser.on_data(frame_1)
28
+ parser.on_data(frame_2)
29
+
30
+ messages[0].payload.should == "foo"
31
+ messages[0].should be_valid
32
+ messages[1].payload.should == "barizzle"
33
+ messages[1].should be_valid
34
+ messages[2].payload.should == "langlang"
35
+ messages[2].should be_valid
36
+
37
+ empty_frame = [2, 0, 0].pack("NCC")
38
+ parser.on_data(empty_frame)
39
+
40
+ message_4 = EM::Kafka::Message.new("after empty").encode
41
+
42
+ binary = [26].pack("N") +
43
+ [0, 0].pack("CC") + # 2 byte offset
44
+ [message_4.size].pack("N") +
45
+ message_4
46
+
47
+ frame_3 = binary
48
+ parser.on_data(frame_3)
49
+
50
+ messages[3].payload.should == "after empty"
51
+ messages[3].should be_valid
52
+ end
53
+ end
54
+
55
+ describe "on_offset_update" do
56
+ it "returns the proper offset" do
57
+ offset = 0
58
+ messages = []
59
+ parser = EM::Kafka::Parser.new do |message|
60
+ messages << message
61
+ end
62
+ parser.on_offset_update {|new_offset| offset = new_offset }
63
+
64
+ message_1 = EM::Kafka::Message.new("foo").encode
65
+ message_2 = EM::Kafka::Message.new("barizzle").encode
66
+ message_3 = EM::Kafka::Message.new("langlang").encode
67
+
68
+ binary = [51].pack("N") +
69
+ [0, 0].pack("CC") + # 2 byte offset
70
+ [message_1.size].pack("N") +
71
+ message_1 +
72
+ [message_2.size].pack("N") +
73
+ message_2 +
74
+ [message_3.size].pack("N") +
75
+ message_3
76
+
77
+ frame_1 = binary[0..11]
78
+ frame_2 = binary[12..-1]
79
+
80
+ parser.on_data(frame_1)
81
+ parser.on_data(frame_2)
82
+
83
+ offset.should == message_1.size + message_2.size + message_3.size + 4 + 4 + 4
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,41 @@
1
+ require 'spec_helper'
2
+
3
+ describe EM::Kafka::ProducerRequest do
4
+ describe "#encode" do
5
+ it "binary encodes an empty request" do
6
+ bytes = EM::Kafka::ProducerRequest.new("test", 0, []).encode
7
+ bytes.length.should eql(20)
8
+ bytes.should eql("\000\000\000\020\000\000\000\004test\000\000\000\000\000\000\000\000")
9
+ end
10
+
11
+ it "should binary encode a request with a message, using a specific wire format" do
12
+ request = EM::Kafka::ProducerRequest.new("test", 3, EM::Kafka::Message.new("ale"))
13
+ bytes = request.encode
14
+
15
+ data_size = bytes[0, 4].unpack("N").shift
16
+ request_id = bytes[4, 2].unpack("n").shift
17
+ topic_length = bytes[6, 2].unpack("n").shift
18
+ topic = bytes[8, 4]
19
+ partition = bytes[12, 4].unpack("N").shift
20
+ messages_length = bytes[16, 4].unpack("N").shift
21
+ messages = bytes[20, messages_length]
22
+
23
+ bytes.length.should eql(32)
24
+ data_size.should eql(28)
25
+ request_id.should eql(0)
26
+ topic_length.should eql(4)
27
+ topic.should eql("test")
28
+ partition.should eql(3)
29
+ messages_length.should eql(12)
30
+ end
31
+
32
+ it "encodes ruby objects to JSON and inflates message" do
33
+ message = EM::Kafka::Message.new(Yajl::Encoder.encode(key: "value"))
34
+ request_with_message = EM::Kafka::ProducerRequest.new("test", 3, message)
35
+ request_with_message.encode.size.should == 44
36
+
37
+ request = EM::Kafka::ProducerRequest.new("test", 3, key: "value")
38
+ request.encode.size.should == 44
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,34 @@
1
+ require "spec_helper"
2
+
3
+ describe EM::Kafka::Producer do
4
+ before do
5
+ @client = mock("Client", :connect => true)
6
+ EM::Kafka::Client.should_receive(:new).and_return(@client)
7
+ end
8
+
9
+ it "should set a topic and partition on initialize" do
10
+ producer = EM::Kafka::Producer.new("kafka://testing@localhost:9092/3")
11
+ producer.host.should == "localhost"
12
+ producer.port.should == 9092
13
+ producer.topic.should == "testing"
14
+ producer.partition.should == 3
15
+ end
16
+
17
+ it "should set default partition to 0" do
18
+ producer = EM::Kafka::Producer.new("kafka://testing@localhost:9092")
19
+ producer.host.should == "localhost"
20
+ producer.port.should == 9092
21
+ producer.topic.should == "testing"
22
+ producer.partition.should == 0
23
+ end
24
+
25
+ it "should send messages" do
26
+ producer = EM::Kafka::Producer.new("kafka://testing@localhost:9092/3")
27
+ message = EM::Kafka::Message.new("hello world")
28
+ request = EM::Kafka::ProducerRequest.new("testing", 3, message)
29
+
30
+ @client.should_receive(:send_data).with(request.encode)
31
+
32
+ producer.deliver(message)
33
+ end
34
+ end
@@ -0,0 +1,11 @@
1
+ require "spec_helper"
2
+
3
+ describe EventMachine::Kafka do
4
+ describe ".logger" do
5
+ it "sets logger" do
6
+ new_logger = Logger.new(STDOUT)
7
+ EM::Kafka.logger = new_logger
8
+ EM::Kafka.logger.should == new_logger
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,10 @@
1
+ require "bundler/setup"
2
+ Bundler.require :default, :development
3
+
4
+ require 'em-kafka'
5
+
6
+ RSpec.configure do |config|
7
+ config.before(:each) do
8
+ EM::Kafka.logger = Logger.new("/dev/null")
9
+ end
10
+ end
metadata ADDED
@@ -0,0 +1,117 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: em-kafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Brandon Keene
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2013-08-08 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: eventmachine-le
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.1.5
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ! '>='
25
+ - !ruby/object:Gem::Version
26
+ version: 1.1.5
27
+ - !ruby/object:Gem::Dependency
28
+ name: yajl-ruby
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ! '>='
32
+ - !ruby/object:Gem::Version
33
+ version: 0.8.2
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ! '>='
39
+ - !ruby/object:Gem::Version
40
+ version: 0.8.2
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ~>
46
+ - !ruby/object:Gem::Version
47
+ version: 2.6.0
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ~>
53
+ - !ruby/object:Gem::Version
54
+ version: 2.6.0
55
+ description:
56
+ email:
57
+ - bkeene@gmail.com
58
+ executables:
59
+ - consume
60
+ - produce
61
+ extensions: []
62
+ extra_rdoc_files: []
63
+ files:
64
+ - .gitignore
65
+ - .rspec
66
+ - Gemfile
67
+ - Gemfile.lock
68
+ - LICENSE
69
+ - README.md
70
+ - Rakefile
71
+ - bin/consume
72
+ - bin/produce
73
+ - em-kafka.gemspec
74
+ - lib/em-kafka.rb
75
+ - lib/em-kafka/client.rb
76
+ - lib/em-kafka/connection.rb
77
+ - lib/em-kafka/consumer.rb
78
+ - lib/em-kafka/consumer_request.rb
79
+ - lib/em-kafka/event_emitter.rb
80
+ - lib/em-kafka/message.rb
81
+ - lib/em-kafka/parser.rb
82
+ - lib/em-kafka/producer.rb
83
+ - lib/em-kafka/producer_request.rb
84
+ - lib/em-kafka/version.rb
85
+ - spec/em-kafka/consumer_request_spec.rb
86
+ - spec/em-kafka/consumer_spec.rb
87
+ - spec/em-kafka/message_spec.rb
88
+ - spec/em-kafka/parser_spec.rb
89
+ - spec/em-kafka/producer_request_spec.rb
90
+ - spec/em-kafka/producer_spec.rb
91
+ - spec/em-kafka_spec.rb
92
+ - spec/spec_helper.rb
93
+ homepage: ''
94
+ licenses: []
95
+ metadata: {}
96
+ post_install_message:
97
+ rdoc_options: []
98
+ require_paths:
99
+ - lib
100
+ required_ruby_version: !ruby/object:Gem::Requirement
101
+ requirements:
102
+ - - ! '>='
103
+ - !ruby/object:Gem::Version
104
+ version: '0'
105
+ required_rubygems_version: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - ! '>='
108
+ - !ruby/object:Gem::Version
109
+ version: '0'
110
+ requirements: []
111
+ rubyforge_project:
112
+ rubygems_version: 2.0.3
113
+ signing_key:
114
+ specification_version: 4
115
+ summary: EventMachine Kafka driver
116
+ test_files: []
117
+ has_rdoc: