poseidon 0.0.4 → 0.0.5.pre1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +2 -0
  3. data/.travis.yml +2 -0
  4. data/CHANGES.md +4 -0
  5. data/README.md +4 -9
  6. data/Rakefile +3 -0
  7. data/lib/poseidon.rb +41 -24
  8. data/lib/poseidon/broker_pool.rb +7 -3
  9. data/lib/poseidon/cluster_metadata.rb +17 -1
  10. data/lib/poseidon/connection.rb +33 -11
  11. data/lib/poseidon/message_conductor.rb +2 -2
  12. data/lib/poseidon/messages_for_broker.rb +17 -0
  13. data/lib/poseidon/messages_to_send.rb +4 -4
  14. data/lib/poseidon/partition_consumer.rb +67 -24
  15. data/lib/poseidon/producer.rb +4 -1
  16. data/lib/poseidon/protocol/request_buffer.rb +12 -4
  17. data/lib/poseidon/sync_producer.rb +55 -22
  18. data/lib/poseidon/topic_metadata.rb +23 -8
  19. data/lib/poseidon/version.rb +1 -1
  20. data/log/.gitkeep +0 -0
  21. data/poseidon.gemspec +2 -2
  22. data/spec/integration/multiple_brokers/consumer_spec.rb +1 -1
  23. data/spec/integration/multiple_brokers/metadata_failures_spec.rb +35 -0
  24. data/spec/integration/multiple_brokers/rebalance_spec.rb +67 -0
  25. data/spec/integration/multiple_brokers/round_robin_spec.rb +4 -4
  26. data/spec/integration/multiple_brokers/spec_helper.rb +29 -7
  27. data/spec/integration/simple/compression_spec.rb +1 -0
  28. data/spec/integration/simple/connection_spec.rb +1 -1
  29. data/spec/integration/simple/simple_producer_and_consumer_spec.rb +25 -2
  30. data/spec/integration/simple/spec_helper.rb +2 -2
  31. data/spec/integration/simple/truncated_messages_spec.rb +1 -1
  32. data/spec/integration/simple/unavailable_broker_spec.rb +9 -16
  33. data/spec/spec_helper.rb +3 -0
  34. data/spec/test_cluster.rb +51 -48
  35. data/spec/unit/broker_pool_spec.rb +28 -7
  36. data/spec/unit/cluster_metadata_spec.rb +3 -3
  37. data/spec/unit/message_conductor_spec.rb +27 -14
  38. data/spec/unit/messages_to_send_spec.rb +3 -3
  39. data/spec/unit/partition_consumer_spec.rb +28 -10
  40. data/spec/unit/sync_producer_spec.rb +16 -12
  41. metadata +24 -35
  42. data/spec/bin/kafka-run-class.sh +0 -65
@@ -38,12 +38,12 @@ describe MessagesToSend do
38
38
  context "is successful" do
39
39
  before(:each) do
40
40
  @mts.messages_for_brokers(nil).each do |mfb|
41
- @mts.successfully_sent(mfb)
41
+ @mts.successfully_sent(mfb.messages)
42
42
  end
43
43
  end
44
44
 
45
45
  it "successfully sends all" do
46
- expect(@mts.all_sent?).to eq(true)
46
+ expect(@mts.pending_messages?).to eq(false)
47
47
  end
48
48
  end
49
49
 
@@ -56,7 +56,7 @@ describe MessagesToSend do
56
56
  it "does not send all" do
57
57
  @mts.messages_for_brokers(nil).each do |mfb|
58
58
  end
59
- expect(@mts.all_sent?).to eq(false)
59
+ expect(@mts.pending_messages?).to eq(true)
60
60
  end
61
61
  end
62
62
  end
@@ -14,13 +14,13 @@ describe PartitionConsumer do
14
14
  describe "creation" do
15
15
  context "when passed unknown options" do
16
16
  it "raises an ArgumentError" do
17
- expect { PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0,-2, :unknown => true) }.to raise_error(ArgumentError)
17
+ expect { PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, :earliest_offset, :unknown => true) }.to raise_error(ArgumentError)
18
18
  end
19
19
  end
20
20
 
21
21
  context "when passed an unknown offset" do
22
22
  it "raises an ArgumentError" do
23
- expect { PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0,:coolest_offset) }.to raise_error(ArgumentError)
23
+ expect { PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, :coolest_offset) }.to raise_error(ArgumentError)
24
24
  end
25
25
  end
26
26
  end
@@ -30,14 +30,14 @@ describe PartitionConsumer do
30
30
  it "resolves offset if it's not set" do
31
31
  @connection.should_receive(:offset).and_return(@offset_response)
32
32
  pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic",
33
- 0, -2)
33
+ 0, :earliest_offset)
34
34
 
35
35
  pc.next_offset
36
36
  end
37
37
 
38
38
  it "returns resolved offset" do
39
39
  pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic",
40
- 0, -2)
40
+ 0, :earliest_offset)
41
41
  expect(pc.next_offset).to eq(100)
42
42
  end
43
43
  end
@@ -54,7 +54,7 @@ describe PartitionConsumer do
54
54
  it "is raised" do
55
55
  @offset_response.first.partition_offsets.first.stub!(:error).and_return(2)
56
56
  pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic",
57
- 0, -2)
57
+ 0, :earliest_offset)
58
58
 
59
59
  expect { pc.next_offset }.to raise_error(Errors::InvalidMessage)
60
60
  end
@@ -63,12 +63,20 @@ describe PartitionConsumer do
63
63
  context "when no offset exists" do
64
64
  it "sets offset to 0" do
65
65
  pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic",
66
- 0, -2)
66
+ 0, :earliest_offset)
67
67
 
68
68
  @offset_response.first.partition_offsets.first.stub!(:offsets).and_return([])
69
69
  expect(pc.next_offset).to eq(0)
70
70
  end
71
71
  end
72
+
73
+ context "when offset negative" do
74
+ it "resolves offset to one " do
75
+ pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic",
76
+ 0, -10)
77
+ expect(pc.next_offset).to eq(90)
78
+ end
79
+ end
72
80
  end
73
81
 
74
82
  describe "fetching messages" do
@@ -81,7 +89,7 @@ describe PartitionConsumer do
81
89
  @response = Protocol::FetchResponse.new(stub('common'), [topic_fetch_response])
82
90
 
83
91
  @connection.stub(:fetch).and_return(@response)
84
- @pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, -2)
92
+ @pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, :earliest_offset)
85
93
  end
86
94
 
87
95
  it "returns FetchedMessage objects" do
@@ -89,14 +97,24 @@ describe PartitionConsumer do
89
97
  end
90
98
 
91
99
  it "uses object defaults" do
92
- @connection.should_receive(:fetch).with(10_000, 0, anything)
100
+ @connection.should_receive(:fetch).with(10_000, 1, anything)
93
101
  @pc.fetch
94
102
  end
95
103
 
96
104
  context "when options are passed" do
97
105
  it "overrides object defaults" do
98
- @connection.should_receive(:fetch).with(20_000, 0, anything)
99
- @pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, -2, :max_wait_ms => 20_000)
106
+ @connection.should_receive(:fetch).with(20_000, 1, anything)
107
+ @pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, :earliest_offset, :max_wait_ms => 20_000)
108
+
109
+ @pc.fetch
110
+ end
111
+ end
112
+
113
+ context "when negative offset beyond beginning of partition is passed" do
114
+ it "starts from the earliest offset" do
115
+ @pc = PartitionConsumer.new("test_client", "localhost", 9092, "test_topic", 0, -10000)
116
+ pfr = @response.topic_fetch_responses.first.partition_fetch_responses.first
117
+ pfr.stub!(:error).and_return(1, 1, 0)
100
118
 
101
119
  @pc.fetch
102
120
  end
@@ -10,6 +10,7 @@ describe SyncProducer do
10
10
  expect(sp.metadata_refresh_interval_ms).to eq(600_000)
11
11
  expect(sp.required_acks).to eq(0)
12
12
  expect(sp.max_send_retries).to eq(3)
13
+ expect(sp.socket_timeout_ms).to eq(10_000)
13
14
  end
14
15
 
15
16
  it "raises ArgumentError on unknown options" do
@@ -40,8 +41,9 @@ describe SyncProducer do
40
41
 
41
42
  it "fetches metadata" do
42
43
  @broker_pool.should_recieve(:fetch_metadata)
44
+
43
45
  @sp = SyncProducer.new("test_client", [])
44
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
46
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue Errors::UnableToFetchMetadata
45
47
  end
46
48
  end
47
49
 
@@ -54,51 +56,53 @@ describe SyncProducer do
54
56
  @broker_pool.should_recieve(:execute_api_call, :producer, anything, anything, anything)
55
57
 
56
58
  @sp = SyncProducer.new("test_client", [])
57
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
59
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue StandardError
58
60
  end
59
61
  end
60
62
 
61
63
  context "always fails" do
62
64
  before(:each) do
63
- @mbts.stub!(:all_sent?).and_return(false)
65
+ @mbts.stub!(:pending_messages?).and_return(true)
64
66
  @sp = SyncProducer.new("test_client", [])
65
67
  end
66
68
 
67
69
  it "retries the correct number of times" do
68
70
  @mbts.should_receive(:messages_for_brokers).exactly(4).times
69
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
71
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue StandardError
70
72
  end
71
73
 
72
74
  it "sleeps the correct amount between retries" do
73
75
  Kernel.should_receive(:sleep).with(0.1).exactly(4).times
74
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
76
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue StandardError
75
77
  end
76
78
 
77
79
  it "refreshes metadata between retries" do
78
80
  @cluster_metadata.should_receive(:update).exactly(4).times
79
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
81
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue StandardError
80
82
  end
81
83
 
82
- it "returns false" do
83
- expect(@sp.send_messages([Message.new(:topic => "topic", :value => "value")])).to eq(false)
84
+ it "raises an exception" do
85
+ expect {
86
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
87
+ }.to raise_error
84
88
  end
85
89
  end
86
90
 
87
91
  context "no retries" do
88
92
  before(:each) do
89
- @mbts.stub!(:all_sent?).and_return(false)
93
+ @mbts.stub!(:pending_messages?).and_return(true)
90
94
  @sp = SyncProducer.new("test_client", [], max_send_retries: 0)
91
95
  end
92
96
 
93
97
  it "does not call sleep" do
94
98
  Kernel.should_receive(:sleep).exactly(0).times
95
- @sp.send_messages([Message.new(:topic => "topic", :value => "value")])
99
+ @sp.send_messages([Message.new(:topic => "topic", :value => "value")]) rescue Errors::UnableToFetchMetadata
96
100
  end
97
101
  end
98
102
 
99
103
  context "succeeds on first attempt" do
100
104
  before(:each) do
101
- @mbts.stub!(:all_sent?).and_return(true)
105
+ @mbts.stub!(:pending_messages?).and_return(false)
102
106
  @sp = SyncProducer.new("test_client", [])
103
107
  end
104
108
 
@@ -119,7 +123,7 @@ describe SyncProducer do
119
123
 
120
124
  context "succeeds on second attempt" do
121
125
  before(:each) do
122
- @mbts.stub!(:all_sent?).and_return(false, true)
126
+ @mbts.stub!(:pending_messages?).and_return(true, false)
123
127
  @sp = SyncProducer.new("test_client", [])
124
128
  end
125
129
 
metadata CHANGED
@@ -1,69 +1,55 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: poseidon
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.4
4
+ version: 0.0.5.pre1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Bob Potter
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-09-26 00:00:00.000000000 Z
11
+ date: 2014-10-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - '>='
17
+ - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '0'
19
+ version: 2.13.0
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - '>='
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '0'
26
+ version: 2.13.0
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: yard
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - '>='
31
+ - - ">="
32
32
  - !ruby/object:Gem::Version
33
33
  version: '0'
34
34
  type: :development
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - '>='
38
+ - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0'
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: simplecov
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
- - - '>='
46
- - !ruby/object:Gem::Version
47
- version: '0'
48
- type: :development
49
- prerelease: false
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - '>='
53
- - !ruby/object:Gem::Version
54
- version: '0'
55
- - !ruby/object:Gem::Dependency
56
- name: daemon_controller
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - '>='
45
+ - - ">="
60
46
  - !ruby/object:Gem::Version
61
47
  version: '0'
62
48
  type: :development
63
49
  prerelease: false
64
50
  version_requirements: !ruby/object:Gem::Requirement
65
51
  requirements:
66
- - - '>='
52
+ - - ">="
67
53
  - !ruby/object:Gem::Version
68
54
  version: '0'
69
55
  description: A Kafka (http://kafka.apache.org/) producer and consumer
@@ -73,10 +59,10 @@ executables: []
73
59
  extensions: []
74
60
  extra_rdoc_files: []
75
61
  files:
76
- - .gitignore
77
- - .rspec
78
- - .travis.yml
79
- - .yardopts
62
+ - ".gitignore"
63
+ - ".rspec"
64
+ - ".travis.yml"
65
+ - ".yardopts"
80
66
  - CHANGES.md
81
67
  - Gemfile
82
68
  - LICENSE.txt
@@ -111,9 +97,11 @@ files:
111
97
  - lib/poseidon/sync_producer.rb
112
98
  - lib/poseidon/topic_metadata.rb
113
99
  - lib/poseidon/version.rb
100
+ - log/.gitkeep
114
101
  - poseidon.gemspec
115
- - spec/bin/kafka-run-class.sh
116
102
  - spec/integration/multiple_brokers/consumer_spec.rb
103
+ - spec/integration/multiple_brokers/metadata_failures_spec.rb
104
+ - spec/integration/multiple_brokers/rebalance_spec.rb
117
105
  - spec/integration/multiple_brokers/round_robin_spec.rb
118
106
  - spec/integration/multiple_brokers/spec_helper.rb
119
107
  - spec/integration/simple/compression_spec.rb
@@ -153,23 +141,24 @@ require_paths:
153
141
  - lib
154
142
  required_ruby_version: !ruby/object:Gem::Requirement
155
143
  requirements:
156
- - - '>='
144
+ - - ">="
157
145
  - !ruby/object:Gem::Version
158
- version: '0'
146
+ version: 1.9.3
159
147
  required_rubygems_version: !ruby/object:Gem::Requirement
160
148
  requirements:
161
- - - '>='
149
+ - - ">"
162
150
  - !ruby/object:Gem::Version
163
- version: '0'
151
+ version: 1.3.1
164
152
  requirements: []
165
153
  rubyforge_project:
166
- rubygems_version: 2.0.2
154
+ rubygems_version: 2.2.2
167
155
  signing_key:
168
156
  specification_version: 4
169
157
  summary: Poseidon is a producer and consumer implementation for Kafka >= 0.8
170
158
  test_files:
171
- - spec/bin/kafka-run-class.sh
172
159
  - spec/integration/multiple_brokers/consumer_spec.rb
160
+ - spec/integration/multiple_brokers/metadata_failures_spec.rb
161
+ - spec/integration/multiple_brokers/rebalance_spec.rb
173
162
  - spec/integration/multiple_brokers/round_robin_spec.rb
174
163
  - spec/integration/multiple_brokers/spec_helper.rb
175
164
  - spec/integration/simple/compression_spec.rb
@@ -1,65 +0,0 @@
1
- #!/bin/bash
2
- # Licensed to the Apache Software Foundation (ASF) under one or more
3
- # contributor license agreements. See the NOTICE file distributed with
4
- # this work for additional information regarding copyright ownership.
5
- # The ASF licenses this file to You under the Apache License, Version 2.0
6
- # (the "License"); you may not use this file except in compliance with
7
- # the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing, software
12
- # distributed under the License is distributed on an "AS IS" BASIS,
13
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
- # See the License for the specific language governing permissions and
15
- # limitations under the License.
16
-
17
- if [ $# -lt 1 ];
18
- then
19
- echo "USAGE: $0 classname [opts]"
20
- exit 1
21
- fi
22
-
23
- SCALA_VERSION=2.8.0
24
-
25
- # assume all dependencies have been packaged into one jar with sbt-assembly's task "assembly-package-dependency"
26
- for file in $KAFKA_PATH/core/target/scala-2.8.0/*.jar;
27
- do
28
- CLASSPATH=$CLASSPATH:$file
29
- done
30
-
31
- for file in $KAFKA_PATH/perf/target/scala-${SCALA_VERSION}/kafka*.jar;
32
- do
33
- CLASSPATH=$CLASSPATH:$file
34
- done
35
-
36
- # classpath addition for release
37
- for file in $KAFKA_PATH/libs/*.jar;
38
- do
39
- CLASSPATH=$CLASSPATH:$file
40
- done
41
-
42
- for file in $KAFKA_PATH/kafka*.jar;
43
- do
44
- CLASSPATH=$CLASSPATH:$file
45
- done
46
-
47
- if [ -z "$KAFKA_JMX_OPTS" ]; then
48
- KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
49
- fi
50
-
51
- if [ -z "$KAFKA_OPTS" ]; then
52
- KAFKA_OPTS="-Xmx512M -server -Dlog4j.configuration=file:$KAFKA_PATH/config/log4j.properties"
53
- fi
54
-
55
- if [ $JMX_PORT ]; then
56
- KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT "
57
- fi
58
-
59
- if [ -z "$JAVA_HOME" ]; then
60
- JAVA="java"
61
- else
62
- JAVA="$JAVA_HOME/bin/java"
63
- fi
64
-
65
- exec $JAVA $KAFKA_OPTS $KAFKA_JMX_OPTS -cp $CLASSPATH "$@"