kafka-rb 0.0.11 → 0.0.12
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +5 -0
- data/Rakefile +1 -35
- data/lib/kafka.rb +6 -2
- data/lib/kafka/encoder.rb +16 -18
- data/lib/kafka/io.rb +1 -1
- data/lib/kafka/message.rb +107 -15
- data/lib/kafka/multi_producer.rb +3 -2
- data/lib/kafka/producer.rb +6 -5
- data/spec/encoder_spec.rb +81 -3
- data/spec/kafka_spec.rb +0 -1
- data/spec/message_spec.rb +79 -3
- data/spec/multi_producer_spec.rb +25 -1
- data/spec/producer_request_spec.rb +1 -1
- data/spec/producer_spec.rb +6 -0
- metadata +49 -50
data/README.md
CHANGED
@@ -7,6 +7,11 @@ and is used in production at wooga.
|
|
7
7
|
You need to have access to your Kafka instance and be able to connect through TCP.
|
8
8
|
You can obtain a copy and instructions on how to setup kafka at http://incubator.apache.org/kafka/
|
9
9
|
|
10
|
+
To make Snappy compression available, add
|
11
|
+
|
12
|
+
gem "snappy", "0.0.4", :git => "git://github.com/watersofoblivion/snappy.git", :branch => "snappy-streaming"
|
13
|
+
|
14
|
+
to your Gemfile.
|
10
15
|
|
11
16
|
## Installation
|
12
17
|
|
data/Rakefile
CHANGED
@@ -19,35 +19,7 @@ require 'rubygems/specification'
|
|
19
19
|
require 'date'
|
20
20
|
require 'rspec/core/rake_task'
|
21
21
|
|
22
|
-
spec =
|
23
|
-
s.name = %q{kafka-rb}
|
24
|
-
s.version = "0.0.11"
|
25
|
-
|
26
|
-
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
|
27
|
-
s.authors = ["Alejandro Crosa", "Stefan Mees", "Tim Lossen", "Liam Stewart"]
|
28
|
-
s.autorequire = %q{kafka-rb}
|
29
|
-
s.date = Time.now.strftime("%Y-%m-%d")
|
30
|
-
s.description = %q{kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.}
|
31
|
-
s.extra_rdoc_files = ["LICENSE"]
|
32
|
-
s.files = ["LICENSE", "README.md", "Rakefile"] + Dir.glob("lib/**/*.rb")
|
33
|
-
s.test_files = Dir.glob("spec/**/*.rb")
|
34
|
-
s.homepage = %q{http://github.com/acrosa/kafka-rb}
|
35
|
-
s.require_paths = ["lib"]
|
36
|
-
s.summary = %q{A Ruby client for the Kafka distributed publish/subscribe messaging service}
|
37
|
-
|
38
|
-
if s.respond_to? :specification_version then
|
39
|
-
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
|
40
|
-
s.specification_version = 3
|
41
|
-
|
42
|
-
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
|
43
|
-
s.add_development_dependency(%q<rspec>, [">= 0"])
|
44
|
-
else
|
45
|
-
s.add_dependency(%q<rspec>, [">= 0"])
|
46
|
-
end
|
47
|
-
else
|
48
|
-
s.add_dependency(%q<rspec>, [">= 0"])
|
49
|
-
end
|
50
|
-
end
|
22
|
+
spec = eval(File.open("kafka-rb.gemspec", "r").read)
|
51
23
|
|
52
24
|
Gem::PackageTask.new(spec) do |pkg|
|
53
25
|
pkg.gem_spec = spec
|
@@ -58,12 +30,6 @@ task :install => [:package] do
|
|
58
30
|
sh %{sudo gem install pkg/#{GEM}-#{GEM_VERSION}}
|
59
31
|
end
|
60
32
|
|
61
|
-
desc "Run all examples with RCov"
|
62
|
-
RSpec::Core::RakeTask.new(:rcov) do |t|
|
63
|
-
t.pattern = FileList['spec/**/*_spec.rb']
|
64
|
-
t.rcov = true
|
65
|
-
end
|
66
|
-
|
67
33
|
desc "Run specs"
|
68
34
|
RSpec::Core::RakeTask.new do |t|
|
69
35
|
t.pattern = FileList['spec/**/*_spec.rb']
|
data/lib/kafka.rb
CHANGED
@@ -14,8 +14,12 @@
|
|
14
14
|
# limitations under the License.
|
15
15
|
require 'socket'
|
16
16
|
require 'zlib'
|
17
|
-
|
18
|
-
|
17
|
+
require "stringio"
|
18
|
+
|
19
|
+
begin
|
20
|
+
require 'snappy'
|
21
|
+
rescue LoadError
|
22
|
+
nil
|
19
23
|
end
|
20
24
|
|
21
25
|
require File.join(File.dirname(__FILE__), "kafka", "io")
|
data/lib/kafka/encoder.rb
CHANGED
@@ -15,22 +15,12 @@
|
|
15
15
|
|
16
16
|
module Kafka
|
17
17
|
module Encoder
|
18
|
-
def self.message(message)
|
19
|
-
|
20
|
-
if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
|
21
|
-
Iconv.new('UTF-8//IGNORE', 'UTF-8').iconv(message.payload.to_s)
|
22
|
-
else
|
23
|
-
message.payload.to_s.force_encoding(Encoding::ASCII_8BIT)
|
24
|
-
end
|
25
|
-
data = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + payload
|
26
|
-
|
27
|
-
[data.length].pack("N") + data
|
18
|
+
def self.message(message, compression = Message::NO_COMPRESSION)
|
19
|
+
message.encode(compression)
|
28
20
|
end
|
29
21
|
|
30
|
-
def self.message_block(topic, partition, messages)
|
31
|
-
message_set =
|
32
|
-
self.message(message)
|
33
|
-
}.join("")
|
22
|
+
def self.message_block(topic, partition, messages, compression)
|
23
|
+
message_set = message_set(messages, compression)
|
34
24
|
|
35
25
|
topic = [topic.length].pack("n") + topic
|
36
26
|
partition = [partition].pack("N")
|
@@ -39,16 +29,24 @@ module Kafka
|
|
39
29
|
return topic + partition + messages
|
40
30
|
end
|
41
31
|
|
42
|
-
def self.
|
32
|
+
def self.message_set(messages, compression)
|
33
|
+
message_set = Array(messages).collect { |message|
|
34
|
+
self.message(message)
|
35
|
+
}.join("")
|
36
|
+
message_set = self.message(Message.new(message_set), compression) unless compression == Message::NO_COMPRESSION
|
37
|
+
message_set
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.produce(topic, partition, messages, compression = Message::NO_COMPRESSION)
|
43
41
|
request = [RequestType::PRODUCE].pack("n")
|
44
|
-
data = request + self.message_block(topic, partition, messages)
|
42
|
+
data = request + self.message_block(topic, partition, messages, compression)
|
45
43
|
|
46
44
|
return [data.length].pack("N") + data
|
47
45
|
end
|
48
46
|
|
49
|
-
def self.multiproduce(producer_requests)
|
47
|
+
def self.multiproduce(producer_requests, compression = Message::NO_COMPRESSION)
|
50
48
|
part_set = Array(producer_requests).map { |req|
|
51
|
-
self.message_block(req.topic, req.partition, req.messages)
|
49
|
+
self.message_block(req.topic, req.partition, req.messages, compression)
|
52
50
|
}
|
53
51
|
|
54
52
|
request = [RequestType::MULTIPRODUCE].pack("n")
|
data/lib/kafka/io.rb
CHANGED
data/lib/kafka/message.rb
CHANGED
@@ -33,6 +33,10 @@ module Kafka
|
|
33
33
|
class Message
|
34
34
|
|
35
35
|
MAGIC_IDENTIFIER_DEFAULT = 0
|
36
|
+
MAGIC_IDENTIFIER_COMPRESSION = 1
|
37
|
+
NO_COMPRESSION = 0
|
38
|
+
GZIP_COMPRESSION = 1
|
39
|
+
SNAPPY_COMPRESSION = 2
|
36
40
|
BASIC_MESSAGE_HEADER = 'NC'.freeze
|
37
41
|
VERSION_0_HEADER = 'N'.freeze
|
38
42
|
VERSION_1_HEADER = 'CN'.freeze
|
@@ -41,9 +45,10 @@ module Kafka
|
|
41
45
|
attr_accessor :magic, :checksum, :payload
|
42
46
|
|
43
47
|
def initialize(payload = nil, magic = MAGIC_IDENTIFIER_DEFAULT, checksum = nil)
|
44
|
-
self.magic
|
45
|
-
self.payload
|
46
|
-
self.checksum
|
48
|
+
self.magic = magic
|
49
|
+
self.payload = payload || ""
|
50
|
+
self.checksum = checksum || self.calculate_checksum
|
51
|
+
@compression = NO_COMPRESSION
|
47
52
|
end
|
48
53
|
|
49
54
|
def calculate_checksum
|
@@ -66,7 +71,7 @@ module Kafka
|
|
66
71
|
break if bytes_processed + message_size + 4 > data.length # message is truncated
|
67
72
|
|
68
73
|
case magic
|
69
|
-
when
|
74
|
+
when MAGIC_IDENTIFIER_DEFAULT
|
70
75
|
# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 ...
|
71
76
|
# | | | |
|
72
77
|
# | message_size |magic| checksum | payload ...
|
@@ -75,7 +80,7 @@ module Kafka
|
|
75
80
|
payload = data[bytes_processed + 9, payload_size]
|
76
81
|
messages << Kafka::Message.new(payload, magic, checksum)
|
77
82
|
|
78
|
-
when
|
83
|
+
when MAGIC_IDENTIFIER_COMPRESSION
|
79
84
|
# | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 ...
|
80
85
|
# | | | | |
|
81
86
|
# | size |magic|attrs| checksum | payload ...
|
@@ -84,18 +89,22 @@ module Kafka
|
|
84
89
|
payload = data[bytes_processed + 10, payload_size]
|
85
90
|
|
86
91
|
case attributes & COMPRESSION_CODEC_MASK
|
87
|
-
when
|
92
|
+
when NO_COMPRESSION # a single uncompressed message
|
88
93
|
messages << Kafka::Message.new(payload, magic, checksum)
|
89
|
-
when
|
94
|
+
when GZIP_COMPRESSION # a gzip-compressed message set -- parse recursively
|
90
95
|
uncompressed = Zlib::GzipReader.new(StringIO.new(payload)).read
|
91
96
|
message_set = parse_from(uncompressed)
|
92
97
|
raise 'malformed compressed message' if message_set.size != uncompressed.size
|
93
98
|
messages.concat(message_set.messages)
|
99
|
+
when SNAPPY_COMPRESSION # a snappy-compresses message set -- parse recursively
|
100
|
+
ensure_snappy! do
|
101
|
+
uncompressed = Snappy::Reader.new(StringIO.new(payload)).read
|
102
|
+
message_set = parse_from(uncompressed)
|
103
|
+
raise 'malformed compressed message' if message_set.size != uncompressed.size
|
104
|
+
messages.concat(message_set.messages)
|
105
|
+
end
|
94
106
|
else
|
95
107
|
# https://cwiki.apache.org/confluence/display/KAFKA/Compression
|
96
|
-
# claims that 2 is for Snappy compression, but Kafka's Scala client
|
97
|
-
# implementation doesn't seem to support it yet, so I don't have
|
98
|
-
# a reference implementation to test against.
|
99
108
|
raise "Unsupported Kafka compression codec: #{attributes & COMPRESSION_CODEC_MASK}"
|
100
109
|
end
|
101
110
|
|
@@ -108,10 +117,93 @@ module Kafka
|
|
108
117
|
|
109
118
|
MessageSet.new(bytes_processed, messages)
|
110
119
|
end
|
111
|
-
end
|
112
120
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
121
|
+
def encode(compression = NO_COMPRESSION)
|
122
|
+
@compression = compression
|
123
|
+
|
124
|
+
self.payload = asciify_payload
|
125
|
+
self.payload = compress_payload if compression?
|
126
|
+
|
127
|
+
data = magic_and_compression + [calculate_checksum].pack("N") + payload
|
128
|
+
[data.length].pack("N") + data
|
129
|
+
end
|
130
|
+
|
131
|
+
|
132
|
+
# Encapsulates a list of Kafka messages (as Kafka::Message objects in the
|
133
|
+
# +messages+ attribute) and their total serialized size in bytes (the +size+
|
134
|
+
# attribute).
|
135
|
+
class MessageSet < Struct.new(:size, :messages); end
|
136
|
+
|
137
|
+
def self.ensure_snappy!
|
138
|
+
if Object.const_defined? "Snappy"
|
139
|
+
yield
|
140
|
+
else
|
141
|
+
fail "Snappy not available!"
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
def ensure_snappy! &block
|
146
|
+
self.class.ensure_snappy! &block
|
147
|
+
end
|
148
|
+
|
149
|
+
private
|
150
|
+
|
151
|
+
attr_reader :compression
|
152
|
+
|
153
|
+
def compression?
|
154
|
+
compression != NO_COMPRESSION
|
155
|
+
end
|
156
|
+
|
157
|
+
def magic_and_compression
|
158
|
+
if compression?
|
159
|
+
[MAGIC_IDENTIFIER_COMPRESSION, compression].pack("CC")
|
160
|
+
else
|
161
|
+
[MAGIC_IDENTIFIER_DEFAULT].pack("C")
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
def asciify_payload
|
166
|
+
if RUBY_VERSION[0, 3] == "1.8"
|
167
|
+
payload
|
168
|
+
else
|
169
|
+
payload.to_s.force_encoding(Encoding::ASCII_8BIT)
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
def compress_payload
|
174
|
+
case compression
|
175
|
+
when GZIP_COMPRESSION
|
176
|
+
gzip
|
177
|
+
when SNAPPY_COMPRESSION
|
178
|
+
snappy
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
def gzip
|
183
|
+
with_buffer do |buffer|
|
184
|
+
gz = Zlib::GzipWriter.new buffer, nil, nil
|
185
|
+
gz.write payload
|
186
|
+
gz.close
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
def snappy
|
191
|
+
ensure_snappy! do
|
192
|
+
with_buffer do |buffer|
|
193
|
+
Snappy::Writer.new buffer do |w|
|
194
|
+
w << payload
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
def with_buffer
|
201
|
+
buffer = StringIO.new
|
202
|
+
buffer.set_encoding Encoding::ASCII_8BIT unless RUBY_VERSION =~ /^1\.8/
|
203
|
+
yield buffer if block_given?
|
204
|
+
buffer.rewind
|
205
|
+
buffer.string
|
206
|
+
end
|
207
|
+
end
|
117
208
|
end
|
209
|
+
|
data/lib/kafka/multi_producer.rb
CHANGED
@@ -19,16 +19,17 @@ module Kafka
|
|
19
19
|
def initialize(options={})
|
20
20
|
self.host = options[:host] || HOST
|
21
21
|
self.port = options[:port] || PORT
|
22
|
+
self.compression = options[:compression] || Message::NO_COMPRESSION
|
22
23
|
self.connect(self.host, self.port)
|
23
24
|
end
|
24
25
|
|
25
26
|
def send(topic, messages, options={})
|
26
27
|
partition = options[:partition] || 0
|
27
|
-
self.write(Encoder.produce(topic, partition, messages))
|
28
|
+
self.write(Encoder.produce(topic, partition, messages, compression))
|
28
29
|
end
|
29
30
|
|
30
31
|
def multi_send(producer_requests)
|
31
|
-
self.write(Encoder.multiproduce(producer_requests))
|
32
|
+
self.write(Encoder.multiproduce(producer_requests, compression))
|
32
33
|
end
|
33
34
|
end
|
34
35
|
end
|
data/lib/kafka/producer.rb
CHANGED
@@ -20,15 +20,16 @@ module Kafka
|
|
20
20
|
attr_accessor :topic, :partition
|
21
21
|
|
22
22
|
def initialize(options = {})
|
23
|
-
self.topic
|
24
|
-
self.partition
|
25
|
-
self.host
|
26
|
-
self.port
|
23
|
+
self.topic = options[:topic] || "test"
|
24
|
+
self.partition = options[:partition] || 0
|
25
|
+
self.host = options[:host] || HOST
|
26
|
+
self.port = options[:port] || PORT
|
27
|
+
self.compression = options[:compression] || Message::NO_COMPRESSION
|
27
28
|
self.connect(self.host, self.port)
|
28
29
|
end
|
29
30
|
|
30
31
|
def send(messages)
|
31
|
-
self.write(Encoder.produce(self.topic, self.partition, messages))
|
32
|
+
self.write(Encoder.produce(self.topic, self.partition, messages, compression))
|
32
33
|
end
|
33
34
|
|
34
35
|
def batch(&block)
|
data/spec/encoder_spec.rb
CHANGED
@@ -39,12 +39,73 @@ describe Encoder do
|
|
39
39
|
encoded = described_class.message(message)
|
40
40
|
message = Kafka::Message.parse_from(encoded).messages.first
|
41
41
|
if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
|
42
|
-
ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
|
43
|
-
ic.iconv(message.payload).should eql("ümlaut")
|
42
|
+
#ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
|
43
|
+
#ic.iconv(message.payload).should eql("ümlaut")
|
44
|
+
message.payload.should eql("ümlaut")
|
44
45
|
else
|
45
46
|
message.payload.force_encoding(Encoding::UTF_8).should eql("ümlaut")
|
46
47
|
end
|
47
48
|
end
|
49
|
+
|
50
|
+
it "should encode strings containing non-ASCII characters" do
|
51
|
+
message = Kafka::Message.new("\214")
|
52
|
+
encoded = described_class.message(message)
|
53
|
+
message = Kafka::Message.parse_from(encoded).messages.first
|
54
|
+
if RUBY_VERSION[0,3] == "1.8"
|
55
|
+
message.payload.should eql("\214")
|
56
|
+
else
|
57
|
+
message.payload.force_encoding(Encoding::UTF_8).should eql("\214")
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
describe :compression do
|
63
|
+
before do
|
64
|
+
@message = Kafka::Message.new "foo"
|
65
|
+
end
|
66
|
+
|
67
|
+
it "should default to no compression" do
|
68
|
+
msg = "foo"
|
69
|
+
checksum = Zlib.crc32 msg
|
70
|
+
magic = 0
|
71
|
+
msg_size = 5 + msg.size
|
72
|
+
raw = [msg_size, magic, checksum, msg].pack "NCNa#{msg.size}"
|
73
|
+
|
74
|
+
Encoder.message(@message).should == raw
|
75
|
+
end
|
76
|
+
|
77
|
+
it "should support GZip compression" do
|
78
|
+
buffer = StringIO.new
|
79
|
+
gz = Zlib::GzipWriter.new buffer, nil, nil
|
80
|
+
gz.write "foo"
|
81
|
+
gz.close
|
82
|
+
buffer.rewind
|
83
|
+
msg = buffer.string
|
84
|
+
checksum = Zlib.crc32 msg
|
85
|
+
magic = 1
|
86
|
+
attrs = 1
|
87
|
+
msg_size = 6 + msg.size
|
88
|
+
raw = [msg_size, magic, attrs, checksum, msg].pack "NCCNa#{msg.size}"
|
89
|
+
Encoder.message(@message, 1).should == raw
|
90
|
+
end
|
91
|
+
|
92
|
+
if Object.const_defined? "Snappy"
|
93
|
+
it "should support Snappy compression" do
|
94
|
+
buffer = StringIO.new
|
95
|
+
Snappy::Writer.new buffer do |w|
|
96
|
+
w << "foo"
|
97
|
+
end
|
98
|
+
buffer.rewind
|
99
|
+
msg = buffer.string
|
100
|
+
checksum = Zlib.crc32 msg
|
101
|
+
magic = 1
|
102
|
+
attrs = 2
|
103
|
+
msg_size = 6 + msg.size
|
104
|
+
raw = [msg_size, magic, attrs, checksum, msg].pack "NCCNa#{msg.size}"
|
105
|
+
|
106
|
+
Encoder.message(@message, 2).should == raw
|
107
|
+
end
|
108
|
+
end
|
48
109
|
end
|
49
110
|
|
50
111
|
describe "produce" do
|
@@ -75,6 +136,23 @@ describe Encoder do
|
|
75
136
|
end
|
76
137
|
end
|
77
138
|
|
139
|
+
describe "message_set" do
|
140
|
+
it "should compress messages into a message set" do
|
141
|
+
message_one = Kafka::Message.new "foo"
|
142
|
+
message_two = Kafka::Message.new "bar"
|
143
|
+
bytes = described_class.message_set [message_one, message_two], Kafka::Message::GZIP_COMPRESSION
|
144
|
+
|
145
|
+
messages = Kafka::Message.parse_from bytes
|
146
|
+
messages.should be_a Kafka::Message::MessageSet
|
147
|
+
messages.messages.size.should == 2
|
148
|
+
|
149
|
+
messages.messages[0].should be_a Kafka::Message
|
150
|
+
messages.messages[0].payload.should == "foo"
|
151
|
+
messages.messages[1].should be_a Kafka::Message
|
152
|
+
messages.messages[1].payload.should == "bar"
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
78
156
|
describe "multiproduce" do
|
79
157
|
it "encodes an empty request" do
|
80
158
|
bytes = described_class.multiproduce([])
|
@@ -135,7 +213,7 @@ describe Encoder do
|
|
135
213
|
messages = [Kafka::Message.new("ale"), Kafka::Message.new("beer")]
|
136
214
|
bytes = described_class.multiproduce([
|
137
215
|
Kafka::ProducerRequest.new("test", messages[0]),
|
138
|
-
Kafka::ProducerRequest.new("topic", messages[1], partition
|
216
|
+
Kafka::ProducerRequest.new("topic", messages[1], :partition => 1),
|
139
217
|
])
|
140
218
|
|
141
219
|
req_length = bytes[0, 4].unpack("N").shift
|
data/spec/kafka_spec.rb
CHANGED
data/spec/message_spec.rb
CHANGED
@@ -16,6 +16,10 @@ require File.dirname(__FILE__) + '/spec_helper'
|
|
16
16
|
|
17
17
|
describe Message do
|
18
18
|
|
19
|
+
def pack_v1_message bytes, attributes
|
20
|
+
[6 + bytes.length, 1, attributes, Zlib.crc32(bytes), bytes].pack "NCCNa*"
|
21
|
+
end
|
22
|
+
|
19
23
|
before(:each) do
|
20
24
|
@message = Message.new
|
21
25
|
end
|
@@ -120,7 +124,36 @@ describe Message do
|
|
120
124
|
message.payload.should == 'abracadabra'
|
121
125
|
end
|
122
126
|
|
123
|
-
|
127
|
+
if Object.const_defined? "Snappy"
|
128
|
+
it "should parse a snappy-compressed message" do
|
129
|
+
cleartext = "abracadabra"
|
130
|
+
bytes = pack_v1_message cleartext, 0
|
131
|
+
compressed = Snappy.deflate(bytes)
|
132
|
+
bytes = pack_v1_message compressed, 2
|
133
|
+
message = Message.parse_from(bytes).messages.first
|
134
|
+
message.should be_valid
|
135
|
+
message.payload.should == cleartext
|
136
|
+
end
|
137
|
+
|
138
|
+
it "should recursively parse nested snappy compressed messages" do
|
139
|
+
uncompressed = pack_v1_message('abracadabra', 0)
|
140
|
+
uncompressed << pack_v1_message('foobar', 0)
|
141
|
+
compressed = pack_v1_message(Snappy.deflate(uncompressed), 2)
|
142
|
+
messages = Message.parse_from(compressed).messages
|
143
|
+
messages.map(&:payload).should == ['abracadabra', 'foobar']
|
144
|
+
messages.map(&:valid?).should == [true, true]
|
145
|
+
end
|
146
|
+
|
147
|
+
it "should support a mixture of snappy compressed and uncompressed messages" do
|
148
|
+
bytes = pack_v1_message(Snappy.deflate(pack_v1_message("compressed", 0)), 2)
|
149
|
+
bytes << pack_v1_message('uncompressed', 0)
|
150
|
+
messages = Message.parse_from(bytes).messages
|
151
|
+
messages.map(&:payload).should == ["compressed", "uncompressed"]
|
152
|
+
messages.map(&:valid?).should == [true, true]
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
it "should recursively parse nested gzip compressed messages" do
|
124
157
|
uncompressed = [17, 1, 0, 401275319, 'abracadabra'].pack('NCCNa*')
|
125
158
|
uncompressed << [12, 1, 0, 2666930069, 'foobar'].pack('NCCNa*')
|
126
159
|
compressed_io = StringIO.new('')
|
@@ -132,7 +165,7 @@ describe Message do
|
|
132
165
|
messages.map(&:valid?).should == [true, true]
|
133
166
|
end
|
134
167
|
|
135
|
-
it "should support a mixture of compressed and uncompressed messages" do
|
168
|
+
it "should support a mixture of gzip compressed and uncompressed messages" do
|
136
169
|
compressed = 'H4sIAG0LI1AAA2NgYBBkZBB/9XN7YlJRYnJiCogCAH9lueQVAAAA'.unpack('m*').shift
|
137
170
|
bytes = [45, 1, 1, 1303540914, compressed].pack('NCCNa*')
|
138
171
|
bytes << [11, 1, 0, 907060870, 'hello'].pack('NCCNa*')
|
@@ -142,10 +175,53 @@ describe Message do
|
|
142
175
|
end
|
143
176
|
|
144
177
|
it "should raise an error if the compression codec is not supported" do
|
145
|
-
bytes = [6, 1,
|
178
|
+
bytes = [6, 1, 3, 0, ''].pack('NCCNa*') # 3 = some unknown future compression codec
|
146
179
|
lambda {
|
147
180
|
Kafka::Message.parse_from(bytes)
|
148
181
|
}.should raise_error(RuntimeError, /Unsupported Kafka compression codec/)
|
149
182
|
end
|
150
183
|
end
|
184
|
+
|
185
|
+
describe "#ensure_snappy!" do
|
186
|
+
let(:message) { Kafka::Message.new }
|
187
|
+
before { Kafka::Message.instance_variable_set :@snappy, nil }
|
188
|
+
|
189
|
+
subject { message.ensure_snappy! { 42 } }
|
190
|
+
|
191
|
+
if Object.const_defined? "Snappy"
|
192
|
+
context "when snappy is available" do
|
193
|
+
before { Object.stub! :const_defined? => true }
|
194
|
+
it { should == 42 }
|
195
|
+
end
|
196
|
+
end
|
197
|
+
|
198
|
+
context "when snappy is not available" do
|
199
|
+
before { Object.stub! :const_defined? => false }
|
200
|
+
|
201
|
+
it "raises an error" do
|
202
|
+
expect { message.ensure_snappy! { 42 } }.to raise_error
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
describe ".ensure_snappy!" do
|
208
|
+
before { Kafka::Message.instance_variable_set :@snappy, nil }
|
209
|
+
|
210
|
+
subject { Kafka::Message.ensure_snappy! { 42 } }
|
211
|
+
|
212
|
+
if Object.const_defined? "Snappy"
|
213
|
+
context "when snappy is available" do
|
214
|
+
before { Object.stub! :const_defined? => true }
|
215
|
+
it { should == 42 }
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
context "when snappy is not available" do
|
220
|
+
before { Object.stub! :const_defined? => false }
|
221
|
+
|
222
|
+
it "raises an error" do
|
223
|
+
expect { Kafka::Message.ensure_snappy! { 42 } }.to raise_error
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
151
227
|
end
|
data/spec/multi_producer_spec.rb
CHANGED
@@ -27,12 +27,18 @@ describe MultiProducer do
|
|
27
27
|
subject.port.should eql(9092)
|
28
28
|
end
|
29
29
|
|
30
|
+
it "should have compression" do
|
31
|
+
subject.should respond_to :compression
|
32
|
+
described_class.new(:compression => Kafka::Message::SNAPPY_COMPRESSION).compression.should == Kafka::Message::SNAPPY_COMPRESSION
|
33
|
+
described_class.new.compression.should == Kafka::Message::NO_COMPRESSION
|
34
|
+
end
|
35
|
+
|
30
36
|
it "sends single messages" do
|
31
37
|
message = Kafka::Message.new("ale")
|
32
38
|
encoded = Kafka::Encoder.produce("test", 0, message)
|
33
39
|
|
34
40
|
subject.should_receive(:write).with(encoded).and_return(encoded.length)
|
35
|
-
subject.send("test", message, partition
|
41
|
+
subject.send("test", message, :partition => 0).should == encoded.length
|
36
42
|
end
|
37
43
|
|
38
44
|
it "sends multiple messages" do
|
@@ -46,5 +52,23 @@ describe MultiProducer do
|
|
46
52
|
subject.should_receive(:write).with(encoded).and_return(encoded.length)
|
47
53
|
subject.multi_send(reqs).should == encoded.length
|
48
54
|
end
|
55
|
+
|
56
|
+
it "should compress messages" do
|
57
|
+
subject.compression = Kafka::Message::SNAPPY_COMPRESSION
|
58
|
+
@mocked_socket.stub! :write => 0
|
59
|
+
messages = [Kafka::Message.new("ale"), Kafka::Message.new("beer")]
|
60
|
+
|
61
|
+
encoded = Encoder.produce("test", 0, messages[0])
|
62
|
+
Encoder.should_receive(:produce).with("test", 0, messages[0], subject.compression).and_return encoded
|
63
|
+
subject.send("test", messages[0], :partition => 0)
|
64
|
+
|
65
|
+
reqs = [
|
66
|
+
Kafka::ProducerRequest.new("topic", messages[0]),
|
67
|
+
Kafka::ProducerRequest.new("topic", messages[1]),
|
68
|
+
]
|
69
|
+
encoded = Encoder.multiproduce(reqs)
|
70
|
+
Encoder.should_receive(:multiproduce).with(reqs, subject.compression)
|
71
|
+
subject.multi_send(reqs)
|
72
|
+
end
|
49
73
|
end
|
50
74
|
end
|
data/spec/producer_spec.rb
CHANGED
@@ -30,6 +30,12 @@ describe Producer do
|
|
30
30
|
@producer.should respond_to(:partition)
|
31
31
|
end
|
32
32
|
|
33
|
+
it "should have compression" do
|
34
|
+
@producer.should respond_to :compression
|
35
|
+
Producer.new(:compression => 1).compression.should == 1
|
36
|
+
Producer.new.compression.should == 0
|
37
|
+
end
|
38
|
+
|
33
39
|
it "should set a topic and partition on initialize" do
|
34
40
|
@producer = Producer.new({ :host => "localhost", :port => 9092, :topic => "testing" })
|
35
41
|
@producer.topic.should eql("testing")
|
metadata
CHANGED
@@ -1,10 +1,14 @@
|
|
1
|
-
--- !ruby/object:Gem::Specification
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
2
|
name: kafka-rb
|
3
|
-
version: !ruby/object:Gem::Version
|
4
|
-
|
5
|
-
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
prerelease: false
|
5
|
+
segments:
|
6
|
+
- 0
|
7
|
+
- 0
|
8
|
+
- 12
|
9
|
+
version: 0.0.12
|
6
10
|
platform: ruby
|
7
|
-
authors:
|
11
|
+
authors:
|
8
12
|
- Alejandro Crosa
|
9
13
|
- Stefan Mees
|
10
14
|
- Tim Lossen
|
@@ -12,32 +16,31 @@ authors:
|
|
12
16
|
autorequire: kafka-rb
|
13
17
|
bindir: bin
|
14
18
|
cert_chain: []
|
15
|
-
|
16
|
-
|
17
|
-
|
19
|
+
|
20
|
+
date: 2012-12-19 00:00:00 -08:00
|
21
|
+
default_executable:
|
22
|
+
dependencies:
|
23
|
+
- !ruby/object:Gem::Dependency
|
18
24
|
name: rspec
|
19
|
-
requirement: !ruby/object:Gem::Requirement
|
20
|
-
none: false
|
21
|
-
requirements:
|
22
|
-
- - ! '>='
|
23
|
-
- !ruby/object:Gem::Version
|
24
|
-
version: '0'
|
25
|
-
type: :development
|
26
25
|
prerelease: false
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
26
|
+
requirement: &id001 !ruby/object:Gem::Requirement
|
27
|
+
requirements:
|
28
|
+
- - ">="
|
29
|
+
- !ruby/object:Gem::Version
|
30
|
+
segments:
|
31
|
+
- 0
|
32
|
+
version: "0"
|
33
|
+
type: :development
|
34
|
+
version_requirements: *id001
|
35
|
+
description: kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.
|
35
36
|
email:
|
36
37
|
executables: []
|
38
|
+
|
37
39
|
extensions: []
|
38
|
-
|
40
|
+
|
41
|
+
extra_rdoc_files:
|
39
42
|
- LICENSE
|
40
|
-
files:
|
43
|
+
files:
|
41
44
|
- LICENSE
|
42
45
|
- README.md
|
43
46
|
- Rakefile
|
@@ -52,41 +55,37 @@ files:
|
|
52
55
|
- lib/kafka/producer_request.rb
|
53
56
|
- lib/kafka/request_type.rb
|
54
57
|
- lib/kafka.rb
|
55
|
-
|
56
|
-
- spec/consumer_spec.rb
|
57
|
-
- spec/encoder_spec.rb
|
58
|
-
- spec/io_spec.rb
|
59
|
-
- spec/kafka_spec.rb
|
60
|
-
- spec/message_spec.rb
|
61
|
-
- spec/multi_producer_spec.rb
|
62
|
-
- spec/producer_request_spec.rb
|
63
|
-
- spec/producer_spec.rb
|
64
|
-
- spec/spec_helper.rb
|
58
|
+
has_rdoc: true
|
65
59
|
homepage: http://github.com/acrosa/kafka-rb
|
66
60
|
licenses: []
|
61
|
+
|
67
62
|
post_install_message:
|
68
63
|
rdoc_options: []
|
69
|
-
|
64
|
+
|
65
|
+
require_paths:
|
70
66
|
- lib
|
71
|
-
required_ruby_version: !ruby/object:Gem::Requirement
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
requirements:
|
80
|
-
- -
|
81
|
-
- !ruby/object:Gem::Version
|
82
|
-
|
67
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
68
|
+
requirements:
|
69
|
+
- - ">="
|
70
|
+
- !ruby/object:Gem::Version
|
71
|
+
segments:
|
72
|
+
- 0
|
73
|
+
version: "0"
|
74
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
75
|
+
requirements:
|
76
|
+
- - ">="
|
77
|
+
- !ruby/object:Gem::Version
|
78
|
+
segments:
|
79
|
+
- 0
|
80
|
+
version: "0"
|
83
81
|
requirements: []
|
82
|
+
|
84
83
|
rubyforge_project:
|
85
|
-
rubygems_version: 1.
|
84
|
+
rubygems_version: 1.3.6
|
86
85
|
signing_key:
|
87
86
|
specification_version: 3
|
88
87
|
summary: A Ruby client for the Kafka distributed publish/subscribe messaging service
|
89
|
-
test_files:
|
88
|
+
test_files:
|
90
89
|
- spec/batch_spec.rb
|
91
90
|
- spec/consumer_spec.rb
|
92
91
|
- spec/encoder_spec.rb
|