test_kafka 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/.gitignore ADDED
@@ -0,0 +1,17 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --color
2
+ --format progress
data/CHANGELOG.md ADDED
@@ -0,0 +1,3 @@
1
+ # 0.1.0
2
+
3
+ * Initial release
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in test_kafka.gemspec
4
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2013 Brian Schroeder
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # TestKafka
2
+
3
+ Minimal Kafka 0.8 runner suitable for integration testing.
4
+
5
+ Adapted from the excellent [poseidon](https://github.com/bpot/poseidon)'s integration tests.
6
+
7
+ ## Installation
8
+
9
+ Add TestKafka to your application's Gemfile:
10
+
11
+ ```ruby
12
+ gem 'test_kafka'
13
+ ```
14
+
15
+ and bundle:
16
+
17
+ $ bundle
18
+
19
+ ## Usage
20
+
21
+ ```ruby
22
+ require 'test_kafka'
23
+
24
+ cluster = TestKafka.start('/usr/local/kafka')
25
+ # or specify custom a temp directory and kafka/zk ports:
26
+ # cluster = TestKafka.start('/usr/local/kafka', '/tmp', 9092, 2181)
27
+
28
+ # ... interact with Kafka/ZK ...
29
+
30
+ cluster.stop
31
+ ```
32
+
33
+ ## Requirements
34
+
35
+ * Kafka 0.8 or higher
data/Rakefile ADDED
@@ -0,0 +1 @@
1
+ require "bundler/gem_tasks"
@@ -0,0 +1,64 @@
1
+ require 'test_kafka/java_runner'
2
+
3
+ module TestKafka
4
+ class Broker
5
+ DEFAULT_PROPERTIES = {
6
+ "broker.id" => 0,
7
+ "port" => 9092,
8
+ "num.network.threads" => 2,
9
+ "num.io.threads" => 2,
10
+ "socket.send.buffer.bytes" => 1048576,
11
+ "socket.receive.buffer.bytes" => 1048576,
12
+ "socket.request.max.bytes" => 104857600,
13
+ "log.dir" => "/tmp/kafka-logs",
14
+ "num.partitions" => 1,
15
+ "log.flush.interval.messages" => 10000,
16
+ "log.flush.interval.ms" => 1000,
17
+ "log.retention.hours" => 168,
18
+ "log.segment.bytes" => 536870912,
19
+ "log.cleanup.interval.mins" => 1,
20
+ "zookeeper.connect" => "localhost:2181",
21
+ "zookeeper.connection.timeout.ms" => 1000000,
22
+ "kafka.metrics.polling.interval.secs" => 5,
23
+ "kafka.metrics.reporters" => "kafka.metrics.KafkaCSVMetricsReporter",
24
+ "kafka.csv.metrics.dir" => "/tmp/kafka_metrics",
25
+ "kafka.csv.metrics.reporter.enabled" => "false",
26
+ }.freeze
27
+
28
+ def initialize(kafka_path, tmp_dir, port, zk_port, broker_id=0, partition_count=1)
29
+ @broker_id = broker_id
30
+ @port = port
31
+ @jr = JavaRunner.new("broker_#{broker_id}",
32
+ tmp_dir,
33
+ "kafka.Kafka",
34
+ port,
35
+ kafka_path,
36
+ DEFAULT_PROPERTIES.merge(
37
+ "broker.id" => broker_id,
38
+ "port" => port,
39
+ "log.dir" => "#{tmp_dir}/kafka-logs_#{broker_id}",
40
+ "kafka.csv.metrics.dir" => "#{tmp_dir}/kafka_metrics",
41
+ "num.partitions" => partition_count,
42
+ "zookeeper.connect" => "localhost:#{zk_port}"
43
+ ))
44
+ end
45
+
46
+ attr_reader :broker_id, :port
47
+
48
+ def start
49
+ @jr.start
50
+ end
51
+
52
+ def stop
53
+ @jr.stop
54
+ end
55
+
56
+ def pid
57
+ @jr.pid
58
+ end
59
+
60
+ def with_interruption(&block)
61
+ @jr.with_interruption(&block)
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,31 @@
1
+ require 'test_kafka/zookeeper'
2
+ require 'test_kafka/broker'
3
+
4
+ module TestKafka
5
+ class Cluster
6
+ def initialize(kafka_path, tmp_dir, kafka_port, zk_port)
7
+ @zookeeper = Zookeeper.new(kafka_path, tmp_dir, zk_port)
8
+ @broker = Broker.new(kafka_path, tmp_dir, kafka_port, zk_port)
9
+ end
10
+
11
+ attr_reader :broker, :zookeeper
12
+
13
+ def start
14
+ @zookeeper.start
15
+ @broker.start
16
+
17
+ self
18
+ end
19
+
20
+ def stop
21
+ @zookeeper.stop
22
+ @broker.stop
23
+
24
+ self
25
+ end
26
+
27
+ def with_interruption(&block)
28
+ @broker.with_interruption(&block)
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,105 @@
1
+ require 'fileutils'
2
+ require 'socket' # daemon_controller needs this but currently doesn't require it
3
+ require 'daemon_controller'
4
+
5
+ module TestKafka
6
+ class JavaRunner
7
+ MAX_HEAP_SIZE = 512
8
+ JAR_PATTERN = "core/target/scala-*/*.jar"
9
+
10
+ def initialize(id, tmp_dir, java_class, port, kafka_path, properties={})
11
+ @id = id
12
+ @tmp_dir = tmp_dir
13
+ @java_class = java_class
14
+ @port = port
15
+ @kafka_path = kafka_path
16
+ @properties = properties
17
+ end
18
+
19
+ attr_reader :tmp_dir, :java_class, :kafka_path
20
+
21
+ def start
22
+ write_properties
23
+ run
24
+ end
25
+
26
+ def stop
27
+ daemon_controller.stop
28
+ end
29
+
30
+ def with_interruption
31
+ stop
32
+ begin
33
+ yield
34
+ ensure
35
+ start
36
+ end
37
+ end
38
+
39
+ def pid
40
+ data = File.read(pid_path)
41
+
42
+ data.to_i if data
43
+ end
44
+
45
+ private
46
+
47
+ def classpath
48
+ Dir.glob(kafka_path + "/" + JAR_PATTERN).join(":")
49
+ end
50
+
51
+ def java_command
52
+ "exec java -Xmx#{MAX_HEAP_SIZE}M -server -cp #{classpath} #{java_class} #{config_path}"
53
+ end
54
+
55
+ def daemon_controller
56
+ @dc ||= DaemonController.new(
57
+ :identifier => @id,
58
+ :start_command => "#{java_command} >>#{log_path} 2>&1 & echo $! > #{pid_path}",
59
+ :ping_command => [:tcp, '127.0.0.1', @port],
60
+ :pid_file => pid_path,
61
+ :log_file => log_path,
62
+ :start_timeout => 25
63
+ )
64
+ end
65
+
66
+ def run
67
+ FileUtils.mkdir_p(log_dir)
68
+ FileUtils.mkdir_p(pid_dir)
69
+ daemon_controller.start
70
+ end
71
+
72
+ def write_properties
73
+ FileUtils.mkdir_p(config_dir)
74
+ File.open(config_path, "w+") do |f|
75
+ @properties.each do |k,v|
76
+ f.puts "#{k}=#{v}"
77
+ end
78
+ end
79
+ end
80
+
81
+ def pid_path
82
+ "#{pid_dir}/#{@id}.pid"
83
+ end
84
+
85
+ def pid_dir
86
+ "#{tmp_dir}/pid"
87
+ end
88
+
89
+ def log_path
90
+ "#{log_dir}/#{@id}.log"
91
+ end
92
+
93
+ def log_dir
94
+ "#{tmp_dir}/log"
95
+ end
96
+
97
+ def config_path
98
+ "#{config_dir}/#{@id}.properties"
99
+ end
100
+
101
+ def config_dir
102
+ "#{tmp_dir}/config"
103
+ end
104
+ end
105
+ end
@@ -0,0 +1,3 @@
1
+ module TestKafka
2
+ VERSION = "0.1.0"
3
+ end
@@ -0,0 +1,35 @@
1
+ require 'test_kafka/java_runner'
2
+
3
+ module TestKafka
4
+ class Zookeeper
5
+ def initialize(kafka_path, tmp_dir, port)
6
+ @port = port
7
+ @jr = JavaRunner.new("zookeeper",
8
+ tmp_dir,
9
+ "org.apache.zookeeper.server.quorum.QuorumPeerMain",
10
+ port,
11
+ kafka_path,
12
+ "dataDir" => "#{tmp_dir}/zookeeper",
13
+ "clientPort" => port,
14
+ "maxClientCnxns" => 0)
15
+ end
16
+
17
+ attr_reader :port
18
+
19
+ def start
20
+ @jr.start
21
+ end
22
+
23
+ def stop
24
+ @jr.stop
25
+ end
26
+
27
+ def pid
28
+ @jr.pid
29
+ end
30
+
31
+ def with_interruption(&block)
32
+ @jr.with_interruption(&block)
33
+ end
34
+ end
35
+ end
data/lib/test_kafka.rb ADDED
@@ -0,0 +1,11 @@
1
+ require 'fileutils'
2
+ require 'test_kafka/cluster'
3
+
4
+ module TestKafka
5
+ def self.start(kafka_path, tmp_root='/tmp', kafka_port=9092, zk_port=2181)
6
+ tmp_dir = tmp_root + "/test_kafka"
7
+ FileUtils.rm_rf(tmp_dir)
8
+
9
+ TestKafka::Cluster.new(kafka_path, tmp_dir, kafka_port, zk_port).start
10
+ end
11
+ end
@@ -0,0 +1,45 @@
1
+ RSpec.configure do |config|
2
+ config.treat_symbols_as_metadata_keys_with_true_values = true
3
+ # config.run_all_when_everything_filtered = true
4
+ # config.filter_run :focus
5
+ config.order = 'random'
6
+ end
7
+
8
+ DEFAULT_KAFKA_PATH = "/usr/local/kafka"
9
+ KAFKA_PATH = ENV["KAFKA_PATH"] || DEFAULT_KAFKA_PATH
10
+
11
+ require 'test_kafka/java_runner'
12
+
13
+ if Dir.glob(KAFKA_PATH + "/" + TestKafka::JavaRunner::JAR_PATTERN).empty?
14
+ fail "Could not find Kafka. Set the environment variable KAFKA_PATH or install Kafka to /usr/local/kafka."
15
+ end
16
+
17
+ def running?(pid)
18
+ Process.kill(0, pid)
19
+ true
20
+ rescue Errno::ESRCH
21
+ false
22
+ end
23
+
24
+ require 'poseidon'
25
+
26
+ def write_messages(port, messages)
27
+ producer = Poseidon::Producer.new(["localhost:#{port}"],
28
+ 'test_producer')
29
+ producer.send_messages(messages.map { |m|
30
+ Poseidon::MessageToSend.new('topic1', m)
31
+ })
32
+ end
33
+
34
+ def read_messages(port)
35
+ consumer = Poseidon::PartitionConsumer.new(
36
+ 'test_consumer',
37
+ 'localhost',
38
+ broker_port,
39
+ 'topic1',
40
+ 0,
41
+ :earliest_offset
42
+ )
43
+
44
+ consumer.fetch.map(&:value)
45
+ end
@@ -0,0 +1,87 @@
1
+ require 'spec_helper'
2
+ require 'test_kafka/broker'
3
+ require 'test_kafka/zookeeper'
4
+
5
+ describe TestKafka::Broker do
6
+ let(:broker_port) { 9093 }
7
+ let(:zk_port) { 2182 }
8
+ let(:broker_tmp_dir) { "/tmp/kafka-test" }
9
+ let(:broker) { TestKafka::Broker.new(KAFKA_PATH,
10
+ broker_tmp_dir,
11
+ broker_port,
12
+ zk_port) }
13
+
14
+ before(:all) do
15
+ zk_tmp_dir = "/tmp/zk-test"
16
+ FileUtils.rm_rf(zk_tmp_dir)
17
+ @zk = TestKafka::Zookeeper.new(KAFKA_PATH, zk_tmp_dir, zk_port)
18
+ @zk.start
19
+ end
20
+
21
+ after(:all) do
22
+ @zk.stop
23
+ end
24
+
25
+ before do
26
+ FileUtils.rm_rf(broker_tmp_dir)
27
+ end
28
+
29
+ describe '#start' do
30
+ it 'starts a Kafka broker' do
31
+ broker.start
32
+ messages = ['value1', 'value2']
33
+ write_messages(broker_port, messages)
34
+
35
+ read_messages(broker_port).should eql messages
36
+
37
+ broker.stop
38
+ end
39
+ end
40
+
41
+ describe '#pid' do
42
+ it 'is the PID of the broker process' do
43
+ broker.start
44
+ ps_output = `ps aux |
45
+ grep "kafka\\.Kafka" |
46
+ grep #{Regexp.escape(broker_tmp_dir)} |
47
+ grep -v grep`
48
+ ps_pid = ps_output[/(\d+)/, 1].to_i
49
+
50
+ broker.pid.should eql ps_output[/(\d+)/, 1].to_i
51
+
52
+ broker.stop
53
+ end
54
+ end
55
+
56
+ describe '#stop' do
57
+ it 'stops a running Kafka broker' do
58
+ broker.start
59
+ pid = broker.pid
60
+ broker.stop
61
+
62
+ running?(pid).should be_false
63
+ end
64
+ end
65
+
66
+ describe '#port' do
67
+ it 'is the provided port' do
68
+ broker.port.should eql broker_port
69
+ end
70
+ end
71
+
72
+ describe '#with_interruption' do
73
+ it 'temporarily drops the broker' do
74
+ broker.start
75
+ old_pid = broker.pid
76
+
77
+ broker.with_interruption do
78
+ running?(old_pid).should be_false
79
+ end
80
+ new_pid = broker.pid
81
+
82
+ running?(new_pid).should be_true
83
+
84
+ broker.stop
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,56 @@
1
+ require 'spec_helper'
2
+ require 'test_kafka/cluster'
3
+ require 'poseidon'
4
+
5
+ describe TestKafka::Cluster do
6
+ let(:tmp_dir) { "/tmp/cluster-test" }
7
+ let(:broker_port) { 9093 }
8
+ let(:zk_port) { 2182 }
9
+ let(:cluster) { TestKafka::Cluster.new(KAFKA_PATH, tmp_dir, broker_port, zk_port) }
10
+
11
+ before do
12
+ FileUtils.rm_rf(tmp_dir)
13
+ end
14
+
15
+ describe '#start' do
16
+ it 'starts the cluster' do
17
+ cluster.start
18
+ messages = ['value1', 'value2']
19
+ write_messages(broker_port, messages)
20
+
21
+ read_messages(broker_port).should eql messages
22
+
23
+ cluster.stop
24
+ end
25
+ end
26
+
27
+ describe '#stop' do
28
+ it 'stops a running cluster' do
29
+ cluster.start
30
+ broker_pid = cluster.broker.pid
31
+ zk_pid = cluster.zookeeper.pid
32
+ cluster.stop
33
+
34
+ running?(broker_pid).should be_false
35
+ running?(zk_pid).should be_false
36
+ end
37
+ end
38
+
39
+ describe '#with_interruption' do
40
+ it 'temporarily stops the kafka broker' do
41
+ cluster.start
42
+ old_broker_pid = cluster.broker.pid
43
+ old_zk_pid = cluster.zookeeper.pid
44
+
45
+ cluster.with_interruption do
46
+ running?(old_broker_pid).should be_false
47
+ end
48
+ new_broker_pid = cluster.broker.pid
49
+
50
+ running?(new_broker_pid).should be_true
51
+ running?(old_zk_pid).should be_true
52
+
53
+ cluster.stop
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,78 @@
1
+ require 'spec_helper'
2
+ require 'test_kafka/zookeeper'
3
+ require 'zk'
4
+ require 'fileutils'
5
+
6
+ describe TestKafka::Zookeeper do
7
+ let(:port) { 2182 }
8
+ let(:tmp_dir) { "/tmp/zk-test" }
9
+ let(:server) { TestKafka::Zookeeper.new(KAFKA_PATH, tmp_dir, port) }
10
+
11
+ before do
12
+ FileUtils.rm_rf(tmp_dir)
13
+ end
14
+
15
+ describe '#start' do
16
+ it 'starts a ZK server' do
17
+ server.start
18
+ client = ZK.new("localhost:#{port}")
19
+ begin
20
+ client.create("/path", "foo")
21
+ rescue ZK::Exceptions::OperationTimeOut
22
+ sleep 0.1
23
+ retry
24
+ end
25
+
26
+ client.get("/path").first.should eql "foo"
27
+
28
+ server.stop
29
+ end
30
+ end
31
+
32
+ describe '#pid' do
33
+ it 'is the PID of the ZK process' do
34
+ server.start
35
+ ps_output = `ps aux |
36
+ grep zookeeper |
37
+ grep #{Regexp.escape(tmp_dir)} |
38
+ grep -v grep`
39
+ ps_pid = ps_output[/(\d+)/, 1].to_i
40
+
41
+ server.pid.should eql ps_output[/(\d+)/, 1].to_i
42
+
43
+ server.stop
44
+ end
45
+ end
46
+
47
+ describe '#stop' do
48
+ it 'stops a running ZK server' do
49
+ server.start
50
+ pid = server.pid
51
+ server.stop
52
+
53
+ running?(pid).should be_false
54
+ end
55
+ end
56
+
57
+ describe '#port' do
58
+ it 'is the provided port' do
59
+ server.port.should eql port
60
+ end
61
+ end
62
+
63
+ describe '#with_interruption' do
64
+ it 'temporarily stops the server' do
65
+ server.start
66
+ old_pid = server.pid
67
+
68
+ server.with_interruption do
69
+ running?(old_pid).should be_false
70
+ end
71
+ new_pid = server.pid
72
+
73
+ running?(new_pid).should be_true
74
+
75
+ server.stop
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,54 @@
1
+ require 'spec_helper'
2
+ require 'test_kafka'
3
+ require 'fileutils'
4
+
5
+ describe TestKafka do
6
+ describe '.start' do
7
+ context 'with custom args' do
8
+ let(:tmp_root) { '/tmp/test_kafka-test' }
9
+ let(:broker_port) { 9093 }
10
+ let(:zk_port) { 2182 }
11
+
12
+ it 'initializes and starts a single-node test cluster' do
13
+ cluster = TestKafka.start(KAFKA_PATH, tmp_root, broker_port, zk_port)
14
+
15
+ messages = ['value1', 'value2']
16
+ write_messages(broker_port, messages)
17
+
18
+ read_messages(broker_port).should eql messages
19
+
20
+ cluster.stop
21
+ end
22
+
23
+ it 'deletes an existing test_kafka temp directory if one exists' do
24
+ tmp_dir = tmp_root + "/test_kafka"
25
+ FileUtils.mkdir_p(tmp_dir)
26
+ canary_path = tmp_dir + "/canary"
27
+ File.write(canary_path, "chirp")
28
+
29
+ cluster = TestKafka.start(KAFKA_PATH, tmp_root, broker_port, zk_port)
30
+
31
+ File.exist?(canary_path).should be_false
32
+
33
+ cluster.stop
34
+ end
35
+ end
36
+
37
+ context 'with default args' do
38
+ before(:all) { @cluster = TestKafka.start(KAFKA_PATH) }
39
+ after(:all) { @cluster.stop }
40
+
41
+ it 'uses /tmp as a temp root' do
42
+ Dir.exist?('/tmp/test_kafka').should be_true
43
+ end
44
+
45
+ it 'uses kafka port 9092' do
46
+ @cluster.broker.port.should eql 9092
47
+ end
48
+
49
+ it 'uses ZK port 2181' do
50
+ @cluster.zookeeper.port.should eql 2181
51
+ end
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,28 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'test_kafka/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "test_kafka"
8
+ spec.version = TestKafka::VERSION
9
+ spec.authors = ["Brian Schroeder"]
10
+ spec.email = ["bts@gmail.com"]
11
+ spec.description = %q{Minimal Kafka runner suitable for integration testing}
12
+ spec.summary = spec.description
13
+ spec.homepage = "http://github.com/bts/test_kafka"
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files`.split($/)
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_development_dependency "bundler", "~> 1.3"
22
+ spec.add_development_dependency "rake", "~> 10.1.0"
23
+ spec.add_development_dependency "rspec", "~> 2.12.0"
24
+ spec.add_development_dependency "zk", "~> 1.9.2"
25
+ spec.add_development_dependency "poseidon", "~> 0.0.4"
26
+
27
+ spec.add_dependency "daemon_controller", "~> 1.0.0"
28
+ end
metadata ADDED
@@ -0,0 +1,166 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: test_kafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Brian Schroeder
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2013-11-11 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: bundler
16
+ requirement: !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ~>
20
+ - !ruby/object:Gem::Version
21
+ version: '1.3'
22
+ type: :development
23
+ prerelease: false
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ~>
28
+ - !ruby/object:Gem::Version
29
+ version: '1.3'
30
+ - !ruby/object:Gem::Dependency
31
+ name: rake
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ~>
36
+ - !ruby/object:Gem::Version
37
+ version: 10.1.0
38
+ type: :development
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ~>
44
+ - !ruby/object:Gem::Version
45
+ version: 10.1.0
46
+ - !ruby/object:Gem::Dependency
47
+ name: rspec
48
+ requirement: !ruby/object:Gem::Requirement
49
+ none: false
50
+ requirements:
51
+ - - ~>
52
+ - !ruby/object:Gem::Version
53
+ version: 2.12.0
54
+ type: :development
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ~>
60
+ - !ruby/object:Gem::Version
61
+ version: 2.12.0
62
+ - !ruby/object:Gem::Dependency
63
+ name: zk
64
+ requirement: !ruby/object:Gem::Requirement
65
+ none: false
66
+ requirements:
67
+ - - ~>
68
+ - !ruby/object:Gem::Version
69
+ version: 1.9.2
70
+ type: :development
71
+ prerelease: false
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
74
+ requirements:
75
+ - - ~>
76
+ - !ruby/object:Gem::Version
77
+ version: 1.9.2
78
+ - !ruby/object:Gem::Dependency
79
+ name: poseidon
80
+ requirement: !ruby/object:Gem::Requirement
81
+ none: false
82
+ requirements:
83
+ - - ~>
84
+ - !ruby/object:Gem::Version
85
+ version: 0.0.4
86
+ type: :development
87
+ prerelease: false
88
+ version_requirements: !ruby/object:Gem::Requirement
89
+ none: false
90
+ requirements:
91
+ - - ~>
92
+ - !ruby/object:Gem::Version
93
+ version: 0.0.4
94
+ - !ruby/object:Gem::Dependency
95
+ name: daemon_controller
96
+ requirement: !ruby/object:Gem::Requirement
97
+ none: false
98
+ requirements:
99
+ - - ~>
100
+ - !ruby/object:Gem::Version
101
+ version: 1.0.0
102
+ type: :runtime
103
+ prerelease: false
104
+ version_requirements: !ruby/object:Gem::Requirement
105
+ none: false
106
+ requirements:
107
+ - - ~>
108
+ - !ruby/object:Gem::Version
109
+ version: 1.0.0
110
+ description: Minimal Kafka runner suitable for integration testing
111
+ email:
112
+ - bts@gmail.com
113
+ executables: []
114
+ extensions: []
115
+ extra_rdoc_files: []
116
+ files:
117
+ - .gitignore
118
+ - .rspec
119
+ - CHANGELOG.md
120
+ - Gemfile
121
+ - LICENSE.txt
122
+ - README.md
123
+ - Rakefile
124
+ - lib/test_kafka.rb
125
+ - lib/test_kafka/broker.rb
126
+ - lib/test_kafka/cluster.rb
127
+ - lib/test_kafka/java_runner.rb
128
+ - lib/test_kafka/version.rb
129
+ - lib/test_kafka/zookeeper.rb
130
+ - spec/spec_helper.rb
131
+ - spec/test_kafka/broker_spec.rb
132
+ - spec/test_kafka/cluster_spec.rb
133
+ - spec/test_kafka/zookeeper_spec.rb
134
+ - spec/test_kafka_spec.rb
135
+ - test_kafka.gemspec
136
+ homepage: http://github.com/bts/test_kafka
137
+ licenses:
138
+ - MIT
139
+ post_install_message:
140
+ rdoc_options: []
141
+ require_paths:
142
+ - lib
143
+ required_ruby_version: !ruby/object:Gem::Requirement
144
+ none: false
145
+ requirements:
146
+ - - ! '>='
147
+ - !ruby/object:Gem::Version
148
+ version: '0'
149
+ required_rubygems_version: !ruby/object:Gem::Requirement
150
+ none: false
151
+ requirements:
152
+ - - ! '>='
153
+ - !ruby/object:Gem::Version
154
+ version: '0'
155
+ requirements: []
156
+ rubyforge_project:
157
+ rubygems_version: 1.8.23
158
+ signing_key:
159
+ specification_version: 3
160
+ summary: Minimal Kafka runner suitable for integration testing
161
+ test_files:
162
+ - spec/spec_helper.rb
163
+ - spec/test_kafka/broker_spec.rb
164
+ - spec/test_kafka/cluster_spec.rb
165
+ - spec/test_kafka/zookeeper_spec.rb
166
+ - spec/test_kafka_spec.rb