kafka-rb 0.0.12 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/README.md CHANGED
@@ -66,6 +66,56 @@ to your Gemfile.
66
66
  end
67
67
 
68
68
 
69
+ ### Using the cli
70
+
71
+ There is two cli programs to communicate with kafka from the command line
72
+ interface mainly intended for debug. `kafka-publish` and `kafka-consumer`. You
73
+ can configure the commands by command line arguments or by setting the
74
+ environment variables: *KAFKA_HOST*, *KAFKA_PORT*, *KAFKA_TOPIC*,
75
+ *KAFKA_COMPRESSION*.
76
+
77
+
78
+
79
+ #### kafka-publish
80
+
81
+ ```
82
+ $ kafka-publish --help
83
+ Usage: kafka-publish [options]
84
+
85
+ -h, --host HOST Set the kafka hostname
86
+ -p, --port PORT Set the kafka port
87
+ -t, --topic TOPIC Set the kafka topic
88
+ -c, --compression no|gzip|snappy Set the compression method
89
+ -m, --message MESSAGE Message to send
90
+ ```
91
+
92
+ If _message_ is omitted, `kafka-publish` will read from *STDIN*, until EOF or
93
+ SIG-INT.
94
+
95
+ NOTE: kafka-publish doesn't bach messages for the moment.
96
+
97
+ This could be quiet handy for piping directly to kafka:
98
+
99
+ ```
100
+ $ tail -f /var/log/syslog | kafka-publish -t syslog
101
+ ```
102
+
103
+ #### kafka-consumer
104
+
105
+ ```
106
+ $ kafka-consumer --help
107
+ Usage: kafka-consumer [options]
108
+
109
+ -h, --host HOST Set the kafka hostname
110
+ -p, --port PORT Set the kafka port
111
+ -t, --topic TOPIC Set the kafka topic
112
+ ```
113
+
114
+ Kafka consumer will loop and wait for messages until it is interrupted.
115
+
116
+ This could be nice for example to have a sample of messages.
117
+
118
+
69
119
  ## Questions?
70
120
  alejandrocrosa at(@) gmail.com
71
121
  http://twitter.com/alejandrocrosa
data/lib/kafka/cli.rb ADDED
@@ -0,0 +1,170 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ require 'optparse'
17
+
18
+ module Kafka
19
+ module CLI #:nodoc: all
20
+ extend self
21
+
22
+ def publish!
23
+ read_env
24
+ parse_args
25
+ validate_config
26
+ if config[:message]
27
+ send(config, config.delete(:message))
28
+ else
29
+ publish(config)
30
+ end
31
+ end
32
+
33
+
34
+ def subscribe!
35
+ read_env
36
+ parse_args
37
+ validate_config
38
+ subscribe(config)
39
+ end
40
+
41
+ def validate_config
42
+ if config[:help]
43
+ puts help
44
+ exit
45
+ end
46
+ config[:host] ||= IO::HOST
47
+ config[:port] ||= IO::PORT
48
+ config[:topic].is_a?(String) or raise "Missing topic"
49
+
50
+ rescue RuntimeError => e
51
+ puts e.message
52
+ puts help
53
+ exit
54
+ end
55
+
56
+ def parse_args(args = ARGV)
57
+ option_parser.parse(args)
58
+ end
59
+
60
+ def read_env(env = ENV)
61
+ config[:host] = env["KAFKA_HOST"] if env["KAFKA_HOST"]
62
+ config[:port] = env["KAFKA_PORT"].to_i if env["KAFKA_PORT"]
63
+ config[:topic] = env["KAFKA_TOPIC"] if env["KAFKA_TOPIC"]
64
+ config[:compression] = string_to_compression(env["KAFKA_COMPRESSION"]) if env["KAFKA_COMPRESSION"]
65
+ end
66
+
67
+ def config
68
+ @config ||= {:compression => string_to_compression("no")}
69
+ end
70
+
71
+ def help
72
+ option_parser.to_s
73
+ end
74
+
75
+ def option_parser
76
+ OptionParser.new do |opts|
77
+ opts.banner = "Usage: #{program_name} [options]"
78
+ opts.separator ""
79
+
80
+ opts.on("-h","--host HOST", "Set the kafka hostname") do |h|
81
+ config[:host] = h
82
+ end
83
+
84
+ opts.on("-p", "--port PORT", "Set the kafka port") do |port|
85
+ config[:port] = port.to_i
86
+ end
87
+
88
+ opts.on("-t", "--topic TOPIC", "Set the kafka topic") do |topic|
89
+ config[:topic] = topic
90
+ end
91
+
92
+ opts.on("-c", "--compression no|gzip|snappy", "Set the compression method") do |meth|
93
+ config[:compression] = string_to_compression(meth)
94
+ end if publish?
95
+
96
+ opts.on("-m","--message MESSAGE", "Message to send") do |msg|
97
+ config[:message] = msg
98
+ end if publish?
99
+
100
+ opts.separator ""
101
+
102
+ opts.on("--help", "show the help") do
103
+ config[:help] = true
104
+ end
105
+
106
+ opts.separator ""
107
+ opts.separator "You can set the host, port, topic and compression from the environment variables: KAFKA_HOST, KAFKA_PORT, KAFKA_TOPIC AND KAFKA_COMPRESSION"
108
+ end
109
+ end
110
+
111
+ def publish?
112
+ program_name == "kafka-publish"
113
+ end
114
+
115
+ def subscribe?
116
+ program_name == "kafka-subscribe"
117
+ end
118
+
119
+ def program_name(pn = $0)
120
+ File.basename(pn)
121
+ end
122
+
123
+ def string_to_compression(meth)
124
+ case meth
125
+ when "no" then Message::NO_COMPRESSION
126
+ when "gzip" then Message::GZIP_COMPRESSION
127
+ when "snappy" then Message::SNAPPY_COMPRESSION
128
+ else raise "No supported compression"
129
+ end
130
+ end
131
+
132
+ def send(options, message)
133
+ Producer.new(options).send(Message.new(message))
134
+ end
135
+
136
+ def publish(options)
137
+ trap(:INT){ exit }
138
+ producer = Producer.new(options)
139
+ loop do
140
+ publish_loop(producer)
141
+ end
142
+ end
143
+
144
+ def publish_loop(producer)
145
+ message = read_input
146
+ producer.send(Message.new(message))
147
+ end
148
+
149
+ def read_input
150
+ input = $stdin.gets
151
+ if input
152
+ input.strip
153
+ else
154
+ exit # gets return nil when eof
155
+ end
156
+
157
+ end
158
+
159
+ def subscribe(options)
160
+ trap(:INT){ exit }
161
+ consumer = Consumer.new(options)
162
+ consumer.loop do |messages|
163
+ messages.each do |message|
164
+ puts message.payload
165
+ end
166
+ end
167
+ end
168
+
169
+ end
170
+ end
@@ -23,12 +23,12 @@ module Kafka
23
23
  self.connect(self.host, self.port)
24
24
  end
25
25
 
26
- def send(topic, messages, options={})
26
+ def push(topic, messages, options={})
27
27
  partition = options[:partition] || 0
28
28
  self.write(Encoder.produce(topic, partition, messages, compression))
29
29
  end
30
30
 
31
- def multi_send(producer_requests)
31
+ def multi_push(producer_requests)
32
32
  self.write(Encoder.multiproduce(producer_requests, compression))
33
33
  end
34
34
  end
@@ -28,14 +28,14 @@ module Kafka
28
28
  self.connect(self.host, self.port)
29
29
  end
30
30
 
31
- def send(messages)
31
+ def push(messages)
32
32
  self.write(Encoder.produce(self.topic, self.partition, messages, compression))
33
33
  end
34
34
 
35
35
  def batch(&block)
36
36
  batch = Kafka::Batch.new
37
37
  block.call( batch )
38
- self.send(batch.messages)
38
+ push(batch.messages)
39
39
  batch.messages.clear
40
40
  end
41
41
  end
data/spec/cli_spec.rb ADDED
@@ -0,0 +1,133 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ require File.dirname(__FILE__) + '/spec_helper'
16
+ require 'kafka/cli'
17
+
18
+ describe CLI do
19
+
20
+ before(:each) do
21
+ CLI.instance_variable_set("@config", {})
22
+ CLI.stub(:puts)
23
+ end
24
+
25
+ describe "should read from env" do
26
+ describe "kafka host" do
27
+ it "should read KAFKA_HOST from env" do
28
+ CLI.read_env("KAFKA_HOST" => "google.com")
29
+ CLI.config[:host].should == "google.com"
30
+ end
31
+
32
+ it "kafka port" do
33
+ CLI.read_env("KAFKA_PORT" => "1234")
34
+ CLI.config[:port].should == 1234
35
+ end
36
+
37
+ it "kafka topic" do
38
+ CLI.read_env("KAFKA_TOPIC" => "news")
39
+ CLI.config[:topic].should == "news"
40
+ end
41
+
42
+ it "kafka compression" do
43
+ CLI.read_env("KAFKA_COMPRESSION" => "no")
44
+ CLI.config[:compression].should == Message::NO_COMPRESSION
45
+
46
+ CLI.read_env("KAFKA_COMPRESSION" => "gzip")
47
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
48
+
49
+ CLI.read_env("KAFKA_COMPRESSION" => "snappy")
50
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
51
+ end
52
+ end
53
+ end
54
+
55
+ describe "should read from command line" do
56
+ it "kafka host" do
57
+ CLI.parse_args(%w(--host google.com))
58
+ CLI.config[:host].should == "google.com"
59
+
60
+ CLI.parse_args(%w(-h google.com))
61
+ CLI.config[:host].should == "google.com"
62
+ end
63
+
64
+ it "kafka port" do
65
+ CLI.parse_args(%w(--port 1234))
66
+ CLI.config[:port].should == 1234
67
+
68
+ CLI.parse_args(%w(-p 1234))
69
+ CLI.config[:port].should == 1234
70
+ end
71
+
72
+ it "kafka topic" do
73
+ CLI.parse_args(%w(--topic news))
74
+ CLI.config[:topic].should == "news"
75
+
76
+ CLI.parse_args(%w(-t news))
77
+ CLI.config[:topic].should == "news"
78
+ end
79
+
80
+ it "kafka compression" do
81
+ CLI.stub(:publish? => true)
82
+
83
+ CLI.parse_args(%w(--compression no))
84
+ CLI.config[:compression].should == Message::NO_COMPRESSION
85
+ CLI.parse_args(%w(-c no))
86
+ CLI.config[:compression].should == Message::NO_COMPRESSION
87
+
88
+ CLI.parse_args(%w(--compression gzip))
89
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
90
+ CLI.parse_args(%w(-c gzip))
91
+ CLI.config[:compression].should == Message::GZIP_COMPRESSION
92
+
93
+ CLI.parse_args(%w(--compression snappy))
94
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
95
+ CLI.parse_args(%w(-c snappy))
96
+ CLI.config[:compression].should == Message::SNAPPY_COMPRESSION
97
+ end
98
+
99
+ it "message" do
100
+ CLI.stub(:publish? => true)
101
+ CLI.parse_args(%w(--message YEAH))
102
+ CLI.config[:message].should == "YEAH"
103
+
104
+ CLI.parse_args(%w(-m YEAH))
105
+ CLI.config[:message].should == "YEAH"
106
+ end
107
+
108
+ end
109
+
110
+ describe "config validation" do
111
+ it "should assign a default port" do
112
+ CLI.stub(:exit)
113
+ CLI.stub(:puts)
114
+ CLI.validate_config
115
+ CLI.config[:port].should == Kafka::IO::PORT
116
+ end
117
+ end
118
+
119
+ it "should assign a default host" do
120
+ CLI.stub(:exit)
121
+ CLI.validate_config
122
+ CLI.config[:host].should == Kafka::IO::HOST
123
+ end
124
+
125
+
126
+ it "read compression method" do
127
+ CLI.string_to_compression("no").should == Message::NO_COMPRESSION
128
+ CLI.string_to_compression("gzip").should == Message::GZIP_COMPRESSION
129
+ CLI.string_to_compression("snappy").should == Message::SNAPPY_COMPRESSION
130
+ lambda { CLI.send(:string_to_compression,nil) }.should raise_error
131
+ end
132
+
133
+ end
@@ -38,7 +38,7 @@ describe MultiProducer do
38
38
  encoded = Kafka::Encoder.produce("test", 0, message)
39
39
 
40
40
  subject.should_receive(:write).with(encoded).and_return(encoded.length)
41
- subject.send("test", message, :partition => 0).should == encoded.length
41
+ subject.push("test", message, :partition => 0).should == encoded.length
42
42
  end
43
43
 
44
44
  it "sends multiple messages" do
@@ -50,7 +50,7 @@ describe MultiProducer do
50
50
  encoded = Encoder.multiproduce(reqs)
51
51
 
52
52
  subject.should_receive(:write).with(encoded).and_return(encoded.length)
53
- subject.multi_send(reqs).should == encoded.length
53
+ subject.multi_push(reqs).should == encoded.length
54
54
  end
55
55
 
56
56
  it "should compress messages" do
@@ -60,7 +60,7 @@ describe MultiProducer do
60
60
 
61
61
  encoded = Encoder.produce("test", 0, messages[0])
62
62
  Encoder.should_receive(:produce).with("test", 0, messages[0], subject.compression).and_return encoded
63
- subject.send("test", messages[0], :partition => 0)
63
+ subject.push("test", messages[0], :partition => 0)
64
64
 
65
65
  reqs = [
66
66
  Kafka::ProducerRequest.new("topic", messages[0]),
@@ -68,7 +68,7 @@ describe MultiProducer do
68
68
  ]
69
69
  encoded = Encoder.multiproduce(reqs)
70
70
  Encoder.should_receive(:multiproduce).with(reqs, subject.compression)
71
- subject.multi_send(reqs)
71
+ subject.multi_push(reqs)
72
72
  end
73
73
  end
74
74
  end
@@ -54,14 +54,14 @@ describe Producer do
54
54
  it "should send messages" do
55
55
  @producer.should_receive(:write).and_return(32)
56
56
  message = Kafka::Message.new("ale")
57
- @producer.send(message).should eql(32)
57
+ @producer.push(message).should eql(32)
58
58
  end
59
59
 
60
60
  describe "Message Batching" do
61
61
  it "should batch messages and send them at once" do
62
62
  message1 = Kafka::Message.new("one")
63
63
  message2 = Kafka::Message.new("two")
64
- @producer.should_receive(:send).with([message1, message2]).exactly(:once).and_return(nil)
64
+ @producer.should_receive(:push).with([message1, message2]).exactly(:once).and_return(nil)
65
65
  @producer.batch do |messages|
66
66
  messages << message1
67
67
  messages << message2
metadata CHANGED
@@ -1,14 +1,10 @@
1
- --- !ruby/object:Gem::Specification
1
+ --- !ruby/object:Gem::Specification
2
2
  name: kafka-rb
3
- version: !ruby/object:Gem::Version
4
- prerelease: false
5
- segments:
6
- - 0
7
- - 0
8
- - 12
9
- version: 0.0.12
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.13
5
+ prerelease:
10
6
  platform: ruby
11
- authors:
7
+ authors:
12
8
  - Alejandro Crosa
13
9
  - Stefan Mees
14
10
  - Tim Lossen
@@ -16,35 +12,37 @@ authors:
16
12
  autorequire: kafka-rb
17
13
  bindir: bin
18
14
  cert_chain: []
19
-
20
- date: 2012-12-19 00:00:00 -08:00
21
- default_executable:
22
- dependencies:
23
- - !ruby/object:Gem::Dependency
15
+ date: 2013-03-30 00:00:00.000000000 Z
16
+ dependencies:
17
+ - !ruby/object:Gem::Dependency
24
18
  name: rspec
25
- prerelease: false
26
- requirement: &id001 !ruby/object:Gem::Requirement
27
- requirements:
28
- - - ">="
29
- - !ruby/object:Gem::Version
30
- segments:
31
- - 0
32
- version: "0"
19
+ requirement: !ruby/object:Gem::Requirement
20
+ none: false
21
+ requirements:
22
+ - - ! '>='
23
+ - !ruby/object:Gem::Version
24
+ version: '0'
33
25
  type: :development
34
- version_requirements: *id001
35
- description: kafka-rb allows you to produce and consume messages using the Kafka distributed publish/subscribe messaging service.
26
+ prerelease: false
27
+ version_requirements: !ruby/object:Gem::Requirement
28
+ none: false
29
+ requirements:
30
+ - - ! '>='
31
+ - !ruby/object:Gem::Version
32
+ version: '0'
33
+ description: kafka-rb allows you to produce and consume messages using the Kafka distributed
34
+ publish/subscribe messaging service.
36
35
  email:
37
36
  executables: []
38
-
39
37
  extensions: []
40
-
41
- extra_rdoc_files:
38
+ extra_rdoc_files:
42
39
  - LICENSE
43
- files:
40
+ files:
44
41
  - LICENSE
45
42
  - README.md
46
43
  - Rakefile
47
44
  - lib/kafka/batch.rb
45
+ - lib/kafka/cli.rb
48
46
  - lib/kafka/consumer.rb
49
47
  - lib/kafka/encoder.rb
50
48
  - lib/kafka/error_codes.rb
@@ -55,38 +53,47 @@ files:
55
53
  - lib/kafka/producer_request.rb
56
54
  - lib/kafka/request_type.rb
57
55
  - lib/kafka.rb
58
- has_rdoc: true
56
+ - spec/batch_spec.rb
57
+ - spec/cli_spec.rb
58
+ - spec/consumer_spec.rb
59
+ - spec/encoder_spec.rb
60
+ - spec/io_spec.rb
61
+ - spec/kafka_spec.rb
62
+ - spec/message_spec.rb
63
+ - spec/multi_producer_spec.rb
64
+ - spec/producer_request_spec.rb
65
+ - spec/producer_spec.rb
66
+ - spec/spec_helper.rb
59
67
  homepage: http://github.com/acrosa/kafka-rb
60
68
  licenses: []
61
-
62
69
  post_install_message:
63
70
  rdoc_options: []
64
-
65
- require_paths:
71
+ require_paths:
66
72
  - lib
67
- required_ruby_version: !ruby/object:Gem::Requirement
68
- requirements:
69
- - - ">="
70
- - !ruby/object:Gem::Version
71
- segments:
72
- - 0
73
- version: "0"
74
- required_rubygems_version: !ruby/object:Gem::Requirement
75
- requirements:
76
- - - ">="
77
- - !ruby/object:Gem::Version
78
- segments:
73
+ required_ruby_version: !ruby/object:Gem::Requirement
74
+ none: false
75
+ requirements:
76
+ - - ! '>='
77
+ - !ruby/object:Gem::Version
78
+ version: '0'
79
+ segments:
79
80
  - 0
80
- version: "0"
81
+ hash: 3316747562611299804
82
+ required_rubygems_version: !ruby/object:Gem::Requirement
83
+ none: false
84
+ requirements:
85
+ - - ! '>='
86
+ - !ruby/object:Gem::Version
87
+ version: '0'
81
88
  requirements: []
82
-
83
89
  rubyforge_project:
84
- rubygems_version: 1.3.6
90
+ rubygems_version: 1.8.24
85
91
  signing_key:
86
92
  specification_version: 3
87
93
  summary: A Ruby client for the Kafka distributed publish/subscribe messaging service
88
- test_files:
94
+ test_files:
89
95
  - spec/batch_spec.rb
96
+ - spec/cli_spec.rb
90
97
  - spec/consumer_spec.rb
91
98
  - spec/encoder_spec.rb
92
99
  - spec/io_spec.rb