heller 0.0.3-java → 0.2.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,178 @@
1
+ # encoding: utf-8
2
+
3
+ require 'spec_helper'
4
+
5
+ module Heller
6
+ describe ProducerConfiguration do
7
+ it 'has sane defaults for running locally' do
8
+ configuration = described_class.new
9
+
10
+ expect(configuration[:brokers]).to eq('localhost:9092')
11
+ expect(configuration[:serializer]).to eq('kafka.serializer.StringEncoder')
12
+ expect(configuration[:ack]).to eq(-1)
13
+ expect(configuration[:type]).to eq(:sync)
14
+ end
15
+
16
+ context 'given hash with user-defined options' do
17
+ context 'with Symbol keys' do
18
+ it 'merges them with the defaults' do
19
+ configuration = described_class.new({
20
+ brokers: 'localhost:9092,localhost:9093',
21
+ serializer: 'kafka.serializer.DefaultEncoder',
22
+ batch_size: 1500
23
+ })
24
+
25
+ expect(configuration[:brokers]).to eq('localhost:9092,localhost:9093')
26
+ expect(configuration[:serializer]).to eq('kafka.serializer.DefaultEncoder')
27
+ expect(configuration[:batch_size]).to eq(1500)
28
+ end
29
+ end
30
+
31
+ context 'with String keys' do
32
+ it 'merges them with the defaults' do
33
+ configuration = described_class.new({
34
+ 'brokers' => 'localhost:9092,localhost:9093',
35
+ 'serializer' => 'kafka.serializer.DefaultEncoder',
36
+ 'batch_size' => 1500
37
+ })
38
+
39
+ expect(configuration[:brokers]).to eq('localhost:9092,localhost:9093')
40
+ expect(configuration[:serializer]).to eq('kafka.serializer.DefaultEncoder')
41
+ expect(configuration[:batch_size]).to eq(1500)
42
+ end
43
+ end
44
+ end
45
+
46
+ context '#to_java' do
47
+ shared_examples_for 'a ProducerConfiguration' do
48
+ let :configuration do
49
+ described_class.new(options).to_java
50
+ end
51
+
52
+ it 'returns an instance of Kafka::Producer::ProducerConfig' do
53
+ expect(configuration).to be_a(Kafka::Producer::ProducerConfig)
54
+ end
55
+
56
+ it 'sets #broker_list' do
57
+ expect(configuration.broker_list).to eq('localhost:9092,localhost:9093')
58
+ end
59
+
60
+ it 'sets #request_required_acks' do
61
+ expect(configuration.request_required_acks).to eq(-1)
62
+ end
63
+
64
+ it 'sets #producer_type' do
65
+ expect(configuration.producer_type).to eq('async')
66
+ end
67
+
68
+ it 'sets serializer_class' do
69
+ expect(configuration.serializer_class).to eq('kafka.serializer.StringEncoder')
70
+ end
71
+
72
+ it 'sets #key_serializer_class' do
73
+ expect(configuration.key_serializer_class).to eq('kafka.serializer.DefaultEncoder')
74
+ end
75
+
76
+ it 'sets #partitioner_class' do
77
+ expect(configuration.partitioner_class).to eq('kafka.producer.DefaultPartitioner')
78
+ end
79
+
80
+ it 'sets #compression_codec' do
81
+ expect(configuration.compression_codec.name).to eq('gzip')
82
+ end
83
+
84
+ it 'sets #message_send_max_retries' do
85
+ expect(configuration.message_send_max_retries).to eq(5)
86
+ end
87
+
88
+ it 'sets #retry_backoff_ms' do
89
+ expect(configuration.retry_backoff_ms).to eq(1500)
90
+ end
91
+
92
+ it 'sets #topic_metadata_refresh_interval_ms' do
93
+ expect(configuration.topic_metadata_refresh_interval_ms).to eq(5000)
94
+ end
95
+
96
+ it 'sets #queue_buffering_max_ms' do
97
+ expect(configuration.queue_buffering_max_ms).to eq(1000 * 100)
98
+ end
99
+
100
+ it 'sets #queue_buffering_max_messages' do
101
+ expect(configuration.queue_buffering_max_messages).to eq(10000)
102
+ end
103
+
104
+ it 'sets #queue_enqueue_timeout_ms' do
105
+ expect(configuration.queue_enqueue_timeout_ms).to eq(1000)
106
+ end
107
+
108
+ it 'sets #batch_num_messages' do
109
+ expect(configuration.batch_num_messages).to eq(2000)
110
+ end
111
+
112
+ it 'sets #send_buffer_bytes' do
113
+ expect(configuration.send_buffer_bytes).to eq(1024 * 1000)
114
+ end
115
+
116
+ it 'sets #client_id' do
117
+ expect(configuration.client_id).to eq('spec-client')
118
+ end
119
+
120
+ it 'sets #request_timeout_ms' do
121
+ expect(configuration.request_timeout_ms).to eq(10000)
122
+ end
123
+ end
124
+
125
+ context 'with Symbol keys' do
126
+ let :options do
127
+ {
128
+ brokers: 'localhost:9092,localhost:9093',
129
+ type: :async,
130
+ serializer: 'kafka.serializer.StringEncoder',
131
+ key_serializer: 'kafka.serializer.DefaultEncoder',
132
+ partitioner: 'kafka.producer.DefaultPartitioner',
133
+ compression: :gzip,
134
+ num_retries: 5,
135
+ retry_backoff: 1500,
136
+ metadata_refresh_interval: 5000,
137
+ batch_size: 2000,
138
+ client_id: 'spec-client',
139
+ request_timeout: 10000,
140
+ buffer_limit: 100 * 100,
141
+ buffer_timeout: 1000 * 100,
142
+ enqueue_timeout: 1000,
143
+ socket_buffer: 1024 * 1000,
144
+ ack: -1
145
+ }
146
+ end
147
+
148
+ it_behaves_like 'a ProducerConfiguration'
149
+ end
150
+
151
+ context 'with String keys' do
152
+ let :options do
153
+ {
154
+ 'brokers' => 'localhost:9092,localhost:9093',
155
+ 'type' => :async,
156
+ 'serializer' => 'kafka.serializer.StringEncoder',
157
+ 'key_serializer' => 'kafka.serializer.DefaultEncoder',
158
+ 'partitioner' => 'kafka.producer.DefaultPartitioner',
159
+ 'compression' => :gzip,
160
+ 'num_retries' => 5,
161
+ 'retry_backoff' => 1500,
162
+ 'metadata_refresh_interval' => 5000,
163
+ 'batch_size' => 2000,
164
+ 'client_id' => 'spec-client',
165
+ 'request_timeout' => 10000,
166
+ 'buffer_limit' => 100 * 100,
167
+ 'buffer_timeout' => 1000 * 100,
168
+ 'enqueue_timeout' => 1000,
169
+ 'socket_buffer' => 1024 * 1000,
170
+ 'ack' => -1
171
+ }
172
+ end
173
+
174
+ it_behaves_like 'a ProducerConfiguration'
175
+ end
176
+ end
177
+ end
178
+ end
@@ -0,0 +1,65 @@
1
+ # encoding: utf-8
2
+
3
+ require 'spec_helper'
4
+
5
+ module Heller
6
+ describe Producer do
7
+ let(:producer) do
8
+ described_class.new('localhost:9092', producer_impl: producer_impl)
9
+ end
10
+
11
+ let :producer_impl do
12
+ double(:producer_impl, new: producer_spy)
13
+ end
14
+
15
+ let :producer_spy do
16
+ double(:producer, send: nil, close: nil)
17
+ end
18
+
19
+ describe '#new' do
20
+ it 'converts options to a Kafka::Producer::ProducerConfig object' do
21
+ described_class.new('localhost:9092,localhost:9093', type: :async, producer_impl: producer_impl)
22
+
23
+ expect(producer_impl).to have_received(:new).with(instance_of(Kafka::Producer::ProducerConfig))
24
+ end
25
+ end
26
+
27
+ describe '#push' do
28
+ it 'wraps messages in a java.util.ArrayList' do
29
+ messages = [Heller::Message.new('topic', 'actual message', 'key!'), Heller::Message.new('topic2', 'payload')]
30
+
31
+ expect(producer_spy).to receive(:send) do |msgs|
32
+ expect(msgs).to be_a(java.util.ArrayList)
33
+ expect(msgs.to_a).to eq(messages)
34
+ end
35
+
36
+ producer.push(messages)
37
+ end
38
+
39
+ it 'allows sending a single message' do
40
+ message = Heller::Message.new('topic', 'actual message')
41
+
42
+ expect(producer_spy).to receive(:send) do |msgs|
43
+ expect(msgs.size).to eq(1)
44
+ expect(msgs.first).to eq(message)
45
+ end
46
+
47
+ producer.push(message)
48
+ end
49
+ end
50
+
51
+ describe '#disconnect' do
52
+ it 'calls #close on the underlying producer' do
53
+ producer.disconnect
54
+
55
+ expect(producer_spy).to have_received(:close)
56
+ end
57
+
58
+ it 'is aliased to #close' do
59
+ producer.close
60
+
61
+ expect(producer_spy).to have_received(:close)
62
+ end
63
+ end
64
+ end
65
+ end
@@ -0,0 +1,122 @@
1
+ # encoding: utf-8
2
+
3
+ require 'spec_helper'
4
+
5
+ module Heller
6
+ describe TopicMetadataResponse do
7
+ let :response do
8
+ described_class.new(underlying)
9
+ end
10
+
11
+ let :underlying do
12
+ double(:topic_metadata_response, topics_metadata: [fake_topics_metadata])
13
+ end
14
+
15
+ let :fake_topics_metadata do
16
+ double(:topics_metadata)
17
+ end
18
+
19
+ describe '#each' do
20
+ let :fake_partition_metadata do
21
+ double(:partition_metadata)
22
+ end
23
+
24
+ before do
25
+ allow(fake_topics_metadata).to receive(:topic).and_return('spec')
26
+ allow(fake_topics_metadata).to receive(:partitions_metadata).and_return([fake_partition_metadata])
27
+ end
28
+
29
+ it 'yields topic and partition_metadata' do
30
+ yielded = []
31
+ response.each { |topic, meta| yielded << [topic, meta] }
32
+ expect(yielded.flatten).to eq(['spec', fake_partition_metadata])
33
+ end
34
+ end
35
+
36
+ describe '#metadata' do
37
+ before do
38
+ allow(underlying).to receive(:topics_metadata)
39
+ end
40
+
41
+ it 'returns #topics_metadata' do
42
+ response.metadata
43
+
44
+ expect(underlying).to have_received(:topics_metadata).once
45
+ end
46
+ end
47
+
48
+ describe '#leader_for' do
49
+ context 'given a topic-partition combination that exists' do
50
+ let :fake_partition_metadata do
51
+ double(:partition_metadata)
52
+ end
53
+
54
+ before do
55
+ allow(fake_topics_metadata).to receive(:topic).and_return('spec')
56
+ allow(fake_topics_metadata).to receive(:partitions_metadata).and_return([fake_partition_metadata])
57
+ allow(fake_partition_metadata).to receive(:partition_id).and_return(0)
58
+ allow(fake_partition_metadata).to receive(:leader).and_return('a non-nil value')
59
+ end
60
+
61
+ it 'returns the leader' do
62
+ expect(response.leader_for('spec', 0)).not_to be_nil
63
+
64
+ expect(fake_partition_metadata).to have_received(:leader)
65
+ end
66
+
67
+ it 'caches the result' do
68
+ 2.times { expect(response.leader_for('spec', 0)).not_to be_nil }
69
+
70
+ expect(fake_partition_metadata).to have_received(:leader).once
71
+ end
72
+ end
73
+
74
+ context 'given a topic-partition combination that does not exist' do
75
+ before do
76
+ allow(fake_topics_metadata).to receive(:topic).and_return('not-spec')
77
+ end
78
+
79
+ it 'raises NoSuchTopicPartitionCombinationError' do
80
+ expect { response.leader_for('non-existent', 1) }.to raise_error(NoSuchTopicPartitionCombinationError)
81
+ end
82
+ end
83
+ end
84
+
85
+ describe '#isr_for' do
86
+ context 'given a topic-partition combination that exists' do
87
+ let :fake_partition_metadata do
88
+ double(:partition_metadata)
89
+ end
90
+
91
+ before do
92
+ allow(fake_topics_metadata).to receive(:topic).and_return('spec')
93
+ allow(fake_topics_metadata).to receive(:partitions_metadata).and_return([fake_partition_metadata])
94
+ allow(fake_partition_metadata).to receive(:partition_id).and_return(0)
95
+ allow(fake_partition_metadata).to receive(:isr).and_return(['a non-nil value'])
96
+ end
97
+
98
+ it 'returns in sync replicas' do
99
+ expect(response.isr_for('spec', 0)).not_to be_nil
100
+
101
+ expect(fake_partition_metadata).to have_received(:isr)
102
+ end
103
+
104
+ it 'caches the result' do
105
+ 2.times { expect(response.isr_for('spec', 0)).not_to be_nil }
106
+
107
+ expect(fake_partition_metadata).to have_received(:isr).once
108
+ end
109
+ end
110
+
111
+ context 'given a topic-partition combination that does not exist' do
112
+ before do
113
+ allow(fake_topics_metadata).to receive(:topic).and_return('not-spec')
114
+ end
115
+
116
+ it 'raises NoSuchTopicPartitionCombinationError' do
117
+ expect { response.isr_for('non-existent', 1) }.to raise_error(NoSuchTopicPartitionCombinationError)
118
+ end
119
+ end
120
+ end
121
+ end
122
+ end
@@ -0,0 +1,166 @@
1
+ # encoding: utf-8
2
+
3
+ require 'spec_helper'
4
+
5
+
6
+ module Heller
7
+ describe ZookeeperConsumer do
8
+ let :consumer do
9
+ described_class.new('localhost:2181', options, consumer_impl)
10
+ end
11
+
12
+ let :options do
13
+ {
14
+ group_id: 'test',
15
+ }
16
+ end
17
+
18
+ let :consumer_impl do
19
+ double(:consumer_impl)
20
+ end
21
+
22
+ let :values do
23
+ []
24
+ end
25
+
26
+ before do
27
+ allow(consumer_impl).to receive(:createJavaConsumerConnector).and_return(consumer_impl)
28
+ allow(consumer_impl).to receive(:create_message_streams) do |hash, *args|
29
+ values.concat(hash.values)
30
+ end
31
+ allow(consumer_impl).to receive(:create_message_streams_by_filter)
32
+ end
33
+
34
+ describe '#initialize' do
35
+ it 'creates a JavaConsumerConnector' do
36
+ described_class.new('localhost:2181', options, consumer_impl)
37
+
38
+ expect(consumer_impl).to have_received(:createJavaConsumerConnector)
39
+ end
40
+
41
+ it 'includes ZooKeeper hosts in configuration' do
42
+ allow(consumer_impl).to receive(:createJavaConsumerConnector) do |config|
43
+ expect(config.zk_connect).to eq('localhost:2181')
44
+ end
45
+
46
+ described_class.new('localhost:2181', options, consumer_impl)
47
+ end
48
+ end
49
+
50
+ describe '#create_streams' do
51
+ context 'when given :key_decoder and :value_decoder' do
52
+ let :key_decoder do
53
+ double(:key_decoder)
54
+ end
55
+
56
+ let :value_decoder do
57
+ double(:value_decoder)
58
+ end
59
+
60
+ before do
61
+ consumer.create_streams({'topic1' => 2}, key_decoder: key_decoder, value_decoder: value_decoder)
62
+ end
63
+
64
+ it 'creates message streams with given key decoder' do
65
+ expect(consumer_impl).to have_received(:create_message_streams).with(anything, key_decoder, anything)
66
+ end
67
+
68
+ it 'creates message streams with given value decoder' do
69
+ expect(consumer_impl).to have_received(:create_message_streams).with(anything, anything, value_decoder)
70
+ end
71
+
72
+ it 'converts longs to integers' do
73
+ values.each do |value|
74
+ expect(value).to be_a(java.lang.Integer)
75
+ end
76
+ end
77
+ end
78
+
79
+ context 'when not given any options' do
80
+ before do
81
+ consumer.create_streams({'topic1' => 2})
82
+ end
83
+
84
+ it 'creates message streams' do
85
+ expect(consumer_impl).to have_received(:create_message_streams)
86
+ end
87
+
88
+ it 'converts longs to integers' do
89
+ values.each do |value|
90
+ expect(value).to be_a(java.lang.Integer)
91
+ end
92
+ end
93
+ end
94
+ end
95
+
96
+ describe '#create_streams_by_filter' do
97
+ let :key_decoder do
98
+ nil
99
+ end
100
+
101
+ let :value_decoder do
102
+ nil
103
+ end
104
+
105
+ before do
106
+ consumer.create_streams_by_filter('hello-world', 1, key_decoder: key_decoder, value_decoder: value_decoder)
107
+ end
108
+
109
+ it 'creates message streams' do
110
+ expect(consumer_impl).to have_received(:create_message_streams_by_filter)
111
+ end
112
+
113
+ it 'creates a Whitelist object from given filter' do
114
+ expect(consumer_impl).to have_received(:create_message_streams_by_filter).with(instance_of(Kafka::Consumer::Whitelist), 1)
115
+ end
116
+
117
+ context 'when given :key_decoder and :value_decoder' do
118
+ let :key_decoder do
119
+ double(:key_decoder)
120
+ end
121
+
122
+ let :value_decoder do
123
+ double(:value_decoder)
124
+ end
125
+
126
+ it 'creates message streams with given key decoder' do
127
+ expect(consumer_impl).to have_received(:create_message_streams_by_filter).with(anything, 1, key_decoder, anything)
128
+ end
129
+
130
+ it 'creates message streams with given value decoder' do
131
+ expect(consumer_impl).to have_received(:create_message_streams_by_filter).with(anything, 1, anything, value_decoder)
132
+ end
133
+ end
134
+ end
135
+
136
+ describe '#commit' do
137
+ before do
138
+ allow(consumer_impl).to receive(:commit_offsets)
139
+ end
140
+
141
+ it 'calls #commit_offsets' do
142
+ consumer.commit
143
+
144
+ expect(consumer_impl).to have_received(:commit_offsets)
145
+ end
146
+ end
147
+
148
+ describe '#close' do
149
+ before do
150
+ allow(consumer_impl).to receive(:shutdown)
151
+ end
152
+
153
+ it 'calls #shutdown' do
154
+ consumer.close
155
+
156
+ expect(consumer_impl).to have_received(:shutdown)
157
+ end
158
+
159
+ it 'is aliased as #shutdown' do
160
+ consumer.shutdown
161
+
162
+ expect(consumer_impl).to have_received(:shutdown)
163
+ end
164
+ end
165
+ end
166
+ end