kafka-rb 0.0.8 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/README.md CHANGED
@@ -10,7 +10,7 @@ You can obtain a copy and instructions on how to setup kafka at http://incubator
10
10
 
11
11
  ## Installation
12
12
 
13
- sudo gem install wooga-kafka-rb
13
+ sudo gem install kafka-rb
14
14
 
15
15
  (should work fine with JRuby, Ruby 1.8 and 1.9)
16
16
 
data/Rakefile CHANGED
@@ -21,7 +21,7 @@ require 'rspec/core/rake_task'
21
21
 
22
22
  spec = Gem::Specification.new do |s|
23
23
  s.name = %q{kafka-rb}
24
- s.version = "0.0.8"
24
+ s.version = "0.0.9"
25
25
 
26
26
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
27
27
  s.authors = ["Alejandro Crosa", "Stefan Mees", "Tim Lossen"]
@@ -30,7 +30,7 @@ spec = Gem::Specification.new do |s|
30
30
  s.description = %q{kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.}
31
31
  s.extra_rdoc_files = ["LICENSE"]
32
32
  s.files = ["LICENSE", "README.md", "Rakefile", "lib/kafka", "lib/kafka/batch.rb", "lib/kafka/consumer.rb", "lib/kafka/io.rb", "lib/kafka/message.rb", "lib/kafka/producer.rb", "lib/kafka/request_type.rb", "lib/kafka/error_codes.rb", "lib/kafka.rb", "spec/batch_spec.rb", "spec/consumer_spec.rb", "spec/io_spec.rb", "spec/kafka_spec.rb", "spec/message_spec.rb", "spec/producer_spec.rb", "spec/spec_helper.rb"]
33
- s.homepage = %q{http://github.com/wooga/kafka-rb}
33
+ s.homepage = %q{http://github.com/acrosa/kafka-rb}
34
34
  s.require_paths = ["lib"]
35
35
  s.rubygems_version = %q{1.3.7}
36
36
  s.summary = %q{A Ruby client for the Kafka distributed publish/subscribe messaging service}
@@ -49,7 +49,7 @@ spec = Gem::Specification.new do |s|
49
49
  end
50
50
  end
51
51
 
52
- Rake::GemPackageTask.new(spec) do |pkg|
52
+ Gem::PackageTask.new(spec) do |pkg|
53
53
  pkg.gem_spec = spec
54
54
  end
55
55
 
@@ -29,8 +29,8 @@ module Kafka
29
29
  def initialize(options = {})
30
30
  self.topic = options[:topic] || "test"
31
31
  self.partition = options[:partition] || 0
32
- self.host = options[:host] || "localhost"
33
- self.port = options[:port] || 9092
32
+ self.host = options[:host] || HOST
33
+ self.port = options[:port] || PORT
34
34
  self.offset = options[:offset]
35
35
  self.max_size = options[:max_size] || MAX_SIZE
36
36
  self.polling = options[:polling] || DEFAULT_POLLING_INTERVAL
@@ -49,8 +49,9 @@ module Kafka
49
49
  def consume
50
50
  self.offset ||= fetch_latest_offset
51
51
  send_consume_request
52
- data = read_data_response
53
- parse_message_set_from(data)
52
+ message_set = Kafka::Message.parse_from(read_data_response)
53
+ self.offset += message_set.size
54
+ message_set.messages
54
55
  rescue SocketError
55
56
  nil
56
57
  end
@@ -94,21 +95,5 @@ module Kafka
94
95
  max_size = [max_size].pack("N")
95
96
  request_type + topic + partition + offset + max_size
96
97
  end
97
-
98
- def parse_message_set_from(data)
99
- messages = []
100
- processed = 0
101
- length = data.length - 4
102
- while (processed <= length) do
103
- message_size = data[processed, 4].unpack("N").shift + 4
104
- message_data = data[processed, message_size]
105
- break unless message_data.size == message_size
106
- messages << Kafka::Message.parse_from(message_data)
107
- processed += message_size
108
- end
109
- self.offset += processed
110
- messages
111
- end
112
-
113
98
  end
114
99
  end
data/lib/kafka/io.rb CHANGED
@@ -16,6 +16,9 @@ module Kafka
16
16
  module IO
17
17
  attr_accessor :socket, :host, :port
18
18
 
19
+ HOST = "localhost"
20
+ PORT = 9092
21
+
19
22
  def connect(host, port)
20
23
  raise ArgumentError, "No host or port specified" unless host && port
21
24
  self.host = host
data/lib/kafka/message.rb CHANGED
@@ -14,19 +14,35 @@
14
14
  # limitations under the License.
15
15
  module Kafka
16
16
 
17
- # A message. The format of an N byte message is the following:
18
- # 1 byte "magic" identifier to allow format changes
19
- # 4 byte CRC32 of the payload
20
- # N - 5 byte payload
17
+ # A message. The format of a message is as follows:
18
+ #
19
+ # 4 byte big-endian int: length of message in bytes (including the rest of
20
+ # the header, but excluding the length field itself)
21
+ # 1 byte: "magic" identifier (format version number)
22
+ #
23
+ # If the magic byte == 0, there is one more header field:
24
+ #
25
+ # 4 byte big-endian int: CRC32 checksum of the payload
26
+ #
27
+ # If the magic byte == 1, there are two more header fields:
28
+ #
29
+ # 1 byte: "attributes" (flags for compression, codec etc)
30
+ # 4 byte big-endian int: CRC32 checksum of the payload
31
+ #
32
+ # All following bytes are the payload.
21
33
  class Message
22
34
 
23
35
  MAGIC_IDENTIFIER_DEFAULT = 0
36
+ BASIC_MESSAGE_HEADER = 'NC'.freeze
37
+ VERSION_0_HEADER = 'N'.freeze
38
+ VERSION_1_HEADER = 'CN'.freeze
39
+ COMPRESSION_CODEC_MASK = 0x03
24
40
 
25
41
  attr_accessor :magic, :checksum, :payload
26
42
 
27
43
  def initialize(payload = nil, magic = MAGIC_IDENTIFIER_DEFAULT, checksum = nil)
28
44
  self.magic = magic
29
- self.payload = payload
45
+ self.payload = payload || ""
30
46
  self.checksum = checksum || self.calculate_checksum
31
47
  end
32
48
 
@@ -35,15 +51,67 @@ module Kafka
35
51
  end
36
52
 
37
53
  def valid?
38
- self.checksum == Zlib.crc32(self.payload)
54
+ self.checksum == calculate_checksum
39
55
  end
40
56
 
41
- def self.parse_from(binary)
42
- size = binary[0, 4].unpack("N").shift.to_i
43
- magic = binary[4, 1].unpack("C").shift
44
- checksum = binary[5, 4].unpack("N").shift
45
- payload = binary[9, size] # 5 = 1 + 4 is Magic + Checksum
46
- return Kafka::Message.new(payload, magic, checksum)
57
+ # Takes a byte string containing one or more messages; returns a MessageSet
58
+ # with the messages parsed from the string, and the number of bytes
59
+ # consumed from the string.
60
+ def self.parse_from(data)
61
+ messages = []
62
+ bytes_processed = 0
63
+
64
+ while bytes_processed <= data.length - 5 # 5 = size of BASIC_MESSAGE_HEADER
65
+ message_size, magic = data[bytes_processed, 5].unpack(BASIC_MESSAGE_HEADER)
66
+ break if bytes_processed + message_size + 4 > data.length # message is truncated
67
+
68
+ case magic
69
+ when 0
70
+ # | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 ...
71
+ # | | | |
72
+ # | message_size |magic| checksum | payload ...
73
+ payload_size = message_size - 5 # 5 = sizeof(magic) + sizeof(checksum)
74
+ checksum = data[bytes_processed + 5, 4].unpack(VERSION_0_HEADER).shift
75
+ payload = data[bytes_processed + 9, payload_size]
76
+ messages << Kafka::Message.new(payload, magic, checksum)
77
+
78
+ when 1
79
+ # | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 ...
80
+ # | | | | |
81
+ # | size |magic|attrs| checksum | payload ...
82
+ payload_size = message_size - 6 # 6 = sizeof(magic) + sizeof(attrs) + sizeof(checksum)
83
+ attributes, checksum = data[bytes_processed + 5, 5].unpack(VERSION_1_HEADER)
84
+ payload = data[bytes_processed + 10, payload_size]
85
+
86
+ case attributes & COMPRESSION_CODEC_MASK
87
+ when 0 # a single uncompressed message
88
+ messages << Kafka::Message.new(payload, magic, checksum)
89
+ when 1 # a gzip-compressed message set -- parse recursively
90
+ uncompressed = Zlib::GzipReader.new(StringIO.new(payload)).read
91
+ message_set = parse_from(uncompressed)
92
+ raise 'malformed compressed message' if message_set.size != uncompressed.size
93
+ messages.concat(message_set.messages)
94
+ else
95
+ # https://cwiki.apache.org/confluence/display/KAFKA/Compression
96
+ # claims that 2 is for Snappy compression, but Kafka's Scala client
97
+ # implementation doesn't seem to support it yet, so I don't have
98
+ # a reference implementation to test against.
99
+ raise "Unsupported Kafka compression codec: #{attributes & COMPRESSION_CODEC_MASK}"
100
+ end
101
+
102
+ else
103
+ raise "Unsupported Kafka message version: magic number #{magic}"
104
+ end
105
+
106
+ bytes_processed += message_size + 4 # 4 = sizeof(message_size)
107
+ end
108
+
109
+ MessageSet.new(bytes_processed, messages)
47
110
  end
48
111
  end
112
+
113
+ # Encapsulates a list of Kafka messages (as Kafka::Message objects in the
114
+ # +messages+ attribute) and their total serialized size in bytes (the +size+
115
+ # attribute).
116
+ class MessageSet < Struct.new(:size, :messages); end
49
117
  end
@@ -17,45 +17,18 @@ module Kafka
17
17
 
18
18
  include Kafka::IO
19
19
 
20
- PRODUCE_REQUEST_ID = Kafka::RequestType::PRODUCE
21
-
22
20
  attr_accessor :topic, :partition
23
21
 
24
22
  def initialize(options = {})
25
23
  self.topic = options[:topic] || "test"
26
24
  self.partition = options[:partition] || 0
27
- self.host = options[:host] || "localhost"
28
- self.port = options[:port] || 9092
25
+ self.host = options[:host] || HOST
26
+ self.port = options[:port] || PORT
29
27
  self.connect(self.host, self.port)
30
28
  end
31
29
 
32
- def encode(message)
33
- if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
34
- ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
35
- [message.magic].pack("C") + [message.calculate_checksum].pack("N") + ic.iconv(message.payload.to_s)
36
- else
37
- [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload.to_s.force_encoding(Encoding::ASCII_8BIT)
38
- end
39
- end
40
-
41
- def encode_request(topic, partition, messages)
42
- message_set = Array(messages).collect { |message|
43
- encoded_message = self.encode(message)
44
- [encoded_message.length].pack("N") + encoded_message
45
- }.join("")
46
-
47
- request = [PRODUCE_REQUEST_ID].pack("n")
48
- topic = [topic.length].pack("n") + topic
49
- partition = [partition].pack("N")
50
- messages = [message_set.length].pack("N") + message_set
51
-
52
- data = request + topic + partition + messages
53
-
54
- return [data.length].pack("N") + data
55
- end
56
-
57
30
  def send(messages)
58
- self.write(self.encode_request(self.topic, self.partition, messages))
31
+ self.write(Encoder.produce(self.topic, self.partition, messages))
59
32
  end
60
33
 
61
34
  def batch(&block)
data/lib/kafka.rb CHANGED
@@ -20,10 +20,13 @@ end
20
20
 
21
21
  require File.join(File.dirname(__FILE__), "kafka", "io")
22
22
  require File.join(File.dirname(__FILE__), "kafka", "request_type")
23
+ require File.join(File.dirname(__FILE__), "kafka", "encoder")
23
24
  require File.join(File.dirname(__FILE__), "kafka", "error_codes")
24
25
  require File.join(File.dirname(__FILE__), "kafka", "batch")
25
26
  require File.join(File.dirname(__FILE__), "kafka", "message")
27
+ require File.join(File.dirname(__FILE__), "kafka", "multi_producer")
26
28
  require File.join(File.dirname(__FILE__), "kafka", "producer")
29
+ require File.join(File.dirname(__FILE__), "kafka", "producer_request")
27
30
  require File.join(File.dirname(__FILE__), "kafka", "consumer")
28
31
 
29
32
  module Kafka
@@ -25,7 +25,7 @@ describe Consumer do
25
25
  describe "Kafka Consumer" do
26
26
 
27
27
  it "should have a Kafka::RequestType::FETCH" do
28
- Consumer::Kafka::RequestType::FETCH.should eql(1)
28
+ Kafka::RequestType::FETCH.should eql(1)
29
29
  @consumer.should respond_to(:request_type)
30
30
  end
31
31
 
@@ -91,39 +91,6 @@ describe Consumer do
91
91
  @consumer.send_consume_request.should eql(true)
92
92
  end
93
93
 
94
- it "should parse a message set from bytes" do
95
- bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
96
- message = @consumer.parse_message_set_from(bytes).first
97
- message.payload.should eql("ale")
98
- message.checksum.should eql(1120192889)
99
- message.magic.should eql(0)
100
- message.valid?.should eql(true)
101
- end
102
-
103
- it "should skip an incomplete message at the end of the response" do
104
- bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
105
- # incomplete message
106
- bytes += [8].pack("N")
107
- messages = @consumer.parse_message_set_from(bytes)
108
- messages.size.should eql(1)
109
- end
110
-
111
- it "should skip an incomplete message at the end of the response which has the same length as an empty message" do
112
- bytes = [8].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
113
- # incomplete message because payload is missing
114
- bytes += [8].pack("N") + [0].pack("C") + [1120192889].pack("N")
115
- messages = @consumer.parse_message_set_from(bytes)
116
- messages.size.should eql(1)
117
- end
118
-
119
- it "should read empty messages correctly" do
120
- # empty message
121
- bytes = [5].pack("N") + [0].pack("C") + [0].pack("N") + ""
122
- messages = @consumer.parse_message_set_from(bytes)
123
- messages.size.should eql(1)
124
- messages.first.payload.should eql("")
125
- end
126
-
127
94
  it "should consume messages" do
128
95
  @consumer.should_receive(:send_consume_request).and_return(true)
129
96
  @consumer.should_receive(:read_data_response).and_return("")
data/spec/message_spec.rb CHANGED
@@ -40,6 +40,10 @@ describe Message do
40
40
  @message.magic.should eql(1)
41
41
  end
42
42
 
43
+ it "should have an empty payload by default" do
44
+ @message.payload.should == ""
45
+ end
46
+
43
47
  it "should calculate the checksum (crc32 of a given message)" do
44
48
  @message.payload = "ale"
45
49
  @message.calculate_checksum.should eql(1120192889)
@@ -56,14 +60,92 @@ describe Message do
56
60
  @message = Message.new("alejandro", 0, 66666666) # 66666666 is a funny checksum
57
61
  @message.valid?.should eql(false)
58
62
  end
63
+ end
59
64
 
60
- it "should parse a message from bytes" do
61
- bytes = [12].pack("N") + [0].pack("C") + [1120192889].pack("N") + "ale"
62
- message = Kafka::Message.parse_from(bytes)
65
+ describe "parsing" do
66
+ it "should parse a version-0 message from bytes" do
67
+ bytes = [8, 0, 1120192889, 'ale'].pack('NCNa*')
68
+ message = Kafka::Message.parse_from(bytes).messages.first
63
69
  message.valid?.should eql(true)
64
70
  message.magic.should eql(0)
65
71
  message.checksum.should eql(1120192889)
66
72
  message.payload.should eql("ale")
67
73
  end
74
+
75
+ it "should parse a version-1 message from bytes" do
76
+ bytes = [12, 1, 0, 755095536, 'martin'].pack('NCCNa*')
77
+ message = Kafka::Message.parse_from(bytes).messages.first
78
+ message.should be_valid
79
+ message.magic.should == 1
80
+ message.checksum.should == 755095536
81
+ message.payload.should == 'martin'
82
+ end
83
+
84
+ it "should raise an error if the magic number is not recognised" do
85
+ bytes = [12, 2, 0, 755095536, 'martin'].pack('NCCNa*') # 2 = some future format that's not yet invented
86
+ lambda {
87
+ Kafka::Message.parse_from(bytes)
88
+ }.should raise_error(RuntimeError, /Unsupported Kafka message version/)
89
+ end
90
+
91
+ it "should skip an incomplete message at the end of the response" do
92
+ bytes = [8, 0, 1120192889, 'ale'].pack('NCNa*')
93
+ bytes += [8].pack('N') # incomplete message (only length, rest is truncated)
94
+ message_set = Message.parse_from(bytes)
95
+ message_set.messages.size.should == 1
96
+ message_set.size.should == 12 # bytes consumed
97
+ end
98
+
99
+ it "should skip an incomplete message at the end of the response which has the same length as an empty message" do
100
+ bytes = [8, 0, 1120192889, 'ale'].pack('NCNa*')
101
+ bytes += [8, 0, 1120192889].pack('NCN') # incomplete message (payload is missing)
102
+ message_set = Message.parse_from(bytes)
103
+ message_set.messages.size.should == 1
104
+ message_set.size.should == 12 # bytes consumed
105
+ end
106
+
107
+ it "should read empty messages correctly" do
108
+ # empty message
109
+ bytes = [5, 0, 0, ''].pack('NCNa*')
110
+ messages = Message.parse_from(bytes).messages
111
+ messages.size.should == 1
112
+ messages.first.payload.should == ''
113
+ end
114
+
115
+ it "should parse a gzip-compressed message" do
116
+ compressed = 'H4sIAG0LI1AAA2NgYBBkZBB/9XN7YlJRYnJiCogCAH9lueQVAAAA'.unpack('m*').shift
117
+ bytes = [45, 1, 1, 1303540914, compressed].pack('NCCNa*')
118
+ message = Message.parse_from(bytes).messages.first
119
+ message.should be_valid
120
+ message.payload.should == 'abracadabra'
121
+ end
122
+
123
+ it "should recursively parse nested compressed messages" do
124
+ uncompressed = [17, 1, 0, 401275319, 'abracadabra'].pack('NCCNa*')
125
+ uncompressed << [12, 1, 0, 2666930069, 'foobar'].pack('NCCNa*')
126
+ compressed_io = StringIO.new('')
127
+ Zlib::GzipWriter.new(compressed_io).tap{|gzip| gzip << uncompressed; gzip.close }
128
+ compressed = compressed_io.string
129
+ bytes = [compressed.size + 6, 1, 1, Zlib.crc32(compressed), compressed].pack('NCCNa*')
130
+ messages = Message.parse_from(bytes).messages
131
+ messages.map(&:payload).should == ['abracadabra', 'foobar']
132
+ messages.map(&:valid?).should == [true, true]
133
+ end
134
+
135
+ it "should support a mixture of compressed and uncompressed messages" do
136
+ compressed = 'H4sIAG0LI1AAA2NgYBBkZBB/9XN7YlJRYnJiCogCAH9lueQVAAAA'.unpack('m*').shift
137
+ bytes = [45, 1, 1, 1303540914, compressed].pack('NCCNa*')
138
+ bytes << [11, 1, 0, 907060870, 'hello'].pack('NCCNa*')
139
+ messages = Message.parse_from(bytes).messages
140
+ messages.map(&:payload).should == ['abracadabra', 'hello']
141
+ messages.map(&:valid?).should == [true, true]
142
+ end
143
+
144
+ it "should raise an error if the compression codec is not supported" do
145
+ bytes = [6, 1, 2, 0, ''].pack('NCCNa*') # 2 = Snappy codec
146
+ lambda {
147
+ Kafka::Message.parse_from(bytes)
148
+ }.should raise_error(RuntimeError, /Unsupported Kafka compression codec/)
149
+ end
68
150
  end
69
151
  end
@@ -25,10 +25,6 @@ describe Producer do
25
25
  end
26
26
 
27
27
  describe "Kafka Producer" do
28
- it "should have a PRODUCE_REQUEST_ID" do
29
- Producer::PRODUCE_REQUEST_ID.should eql(0)
30
- end
31
-
32
28
  it "should have a topic and a partition" do
33
29
  @producer.should respond_to(:topic)
34
30
  @producer.should respond_to(:partition)
@@ -47,60 +43,6 @@ describe Producer do
47
43
  @producer.host.should eql("localhost")
48
44
  @producer.port.should eql(9092)
49
45
  end
50
-
51
- describe "Message Encoding" do
52
- it "should encode a message" do
53
- message = Kafka::Message.new("alejandro")
54
- full_message = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload
55
- @producer.encode(message).should eql(full_message)
56
- end
57
-
58
- it "should encode an empty message" do
59
- message = Kafka::Message.new()
60
- full_message = [message.magic].pack("C") + [message.calculate_checksum].pack("N") + message.payload.to_s
61
- @producer.encode(message).should eql(full_message)
62
- end
63
-
64
- it "should encode strings containing non-ASCII characters" do
65
- message = Kafka::Message.new("ümlaut")
66
- encoded = @producer.encode(message)
67
- data = [encoded.size].pack("N") + encoded
68
- if RUBY_VERSION[0,3] == "1.8" # Use old iconv on Ruby 1.8 for encoding
69
- ic = Iconv.new('UTF-8//IGNORE', 'UTF-8')
70
- ic.iconv(Kafka::Message.parse_from(data).payload).should eql("ümlaut")
71
- else
72
- Kafka::Message.parse_from(data).payload.force_encoding(Encoding::UTF_8).should eql("ümlaut")
73
- end
74
- end
75
- end
76
-
77
- describe "Request Encoding" do
78
- it "should binary encode an empty request" do
79
- bytes = @producer.encode_request("test", 0, [])
80
- bytes.length.should eql(20)
81
- bytes.should eql("\000\000\000\020\000\000\000\004test\000\000\000\000\000\000\000\000")
82
- end
83
-
84
- it "should binary encode a request with a message, using a specific wire format" do
85
- message = Kafka::Message.new("ale")
86
- bytes = @producer.encode_request("test", 3, message)
87
- data_size = bytes[0, 4].unpack("N").shift
88
- request_id = bytes[4, 2].unpack("n").shift
89
- topic_length = bytes[6, 2].unpack("n").shift
90
- topic = bytes[8, 4]
91
- partition = bytes[12, 4].unpack("N").shift
92
- messages_length = bytes[16, 4].unpack("N").shift
93
- messages = bytes[20, messages_length]
94
-
95
- bytes.length.should eql(32)
96
- data_size.should eql(28)
97
- request_id.should eql(0)
98
- topic_length.should eql(4)
99
- topic.should eql("test")
100
- partition.should eql(3)
101
- messages_length.should eql(12)
102
- end
103
- end
104
46
  end
105
47
 
106
48
  it "should send messages" do
@@ -120,4 +62,4 @@ describe Producer do
120
62
  end
121
63
  end
122
64
  end
123
- end
65
+ end
metadata CHANGED
@@ -1,47 +1,42 @@
1
- --- !ruby/object:Gem::Specification
1
+ --- !ruby/object:Gem::Specification
2
2
  name: kafka-rb
3
- version: !ruby/object:Gem::Version
4
- hash: 15
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.9
5
5
  prerelease:
6
- segments:
7
- - 0
8
- - 0
9
- - 8
10
- version: 0.0.8
11
6
  platform: ruby
12
- authors:
7
+ authors:
13
8
  - Alejandro Crosa
14
9
  - Stefan Mees
15
10
  - Tim Lossen
16
11
  autorequire: kafka-rb
17
12
  bindir: bin
18
13
  cert_chain: []
19
-
20
- date: 2012-01-14 00:00:00 Z
21
- dependencies:
22
- - !ruby/object:Gem::Dependency
14
+ date: 2012-09-11 00:00:00.000000000 Z
15
+ dependencies:
16
+ - !ruby/object:Gem::Dependency
23
17
  name: rspec
24
- prerelease: false
25
- requirement: &id001 !ruby/object:Gem::Requirement
18
+ requirement: !ruby/object:Gem::Requirement
26
19
  none: false
27
- requirements:
28
- - - ">="
29
- - !ruby/object:Gem::Version
30
- hash: 3
31
- segments:
32
- - 0
33
- version: "0"
20
+ requirements:
21
+ - - ! '>='
22
+ - !ruby/object:Gem::Version
23
+ version: '0'
34
24
  type: :development
35
- version_requirements: *id001
36
- description: kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.
25
+ prerelease: false
26
+ version_requirements: !ruby/object:Gem::Requirement
27
+ none: false
28
+ requirements:
29
+ - - ! '>='
30
+ - !ruby/object:Gem::Version
31
+ version: '0'
32
+ description: kafka-rb allows you to produce and consume messages using the Kafka distributed
33
+ publish/subscribe messaging service.
37
34
  email:
38
35
  executables: []
39
-
40
36
  extensions: []
41
-
42
- extra_rdoc_files:
37
+ extra_rdoc_files:
43
38
  - LICENSE
44
- files:
39
+ files:
45
40
  - LICENSE
46
41
  - README.md
47
42
  - Rakefile
@@ -60,38 +55,28 @@ files:
60
55
  - spec/message_spec.rb
61
56
  - spec/producer_spec.rb
62
57
  - spec/spec_helper.rb
63
- homepage: http://github.com/wooga/kafka-rb
58
+ homepage: http://github.com/acrosa/kafka-rb
64
59
  licenses: []
65
-
66
60
  post_install_message:
67
61
  rdoc_options: []
68
-
69
- require_paths:
62
+ require_paths:
70
63
  - lib
71
- required_ruby_version: !ruby/object:Gem::Requirement
64
+ required_ruby_version: !ruby/object:Gem::Requirement
72
65
  none: false
73
- requirements:
74
- - - ">="
75
- - !ruby/object:Gem::Version
76
- hash: 3
77
- segments:
78
- - 0
79
- version: "0"
80
- required_rubygems_version: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - ! '>='
68
+ - !ruby/object:Gem::Version
69
+ version: '0'
70
+ required_rubygems_version: !ruby/object:Gem::Requirement
81
71
  none: false
82
- requirements:
83
- - - ">="
84
- - !ruby/object:Gem::Version
85
- hash: 3
86
- segments:
87
- - 0
88
- version: "0"
72
+ requirements:
73
+ - - ! '>='
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
89
76
  requirements: []
90
-
91
77
  rubyforge_project:
92
- rubygems_version: 1.8.11
78
+ rubygems_version: 1.8.24
93
79
  signing_key:
94
80
  specification_version: 3
95
81
  summary: A Ruby client for the Kafka distributed publish/subscribe messaging service
96
82
  test_files: []
97
-