heller 0.0.3-java → 0.2.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +193 -0
- data/lib/heller/configuration.rb +41 -0
- data/lib/heller/consumer.rb +93 -57
- data/lib/heller/consumer_configuration.rb +38 -0
- data/lib/heller/errors.rb +9 -0
- data/lib/heller/fetch_request.rb +11 -0
- data/lib/heller/fetch_response.rb +33 -0
- data/lib/heller/message.rb +9 -0
- data/lib/heller/message_set_enumerator.rb +35 -0
- data/lib/heller/offset_request.rb +23 -0
- data/lib/heller/offset_response.rb +29 -0
- data/lib/heller/producer.rb +24 -38
- data/lib/heller/producer_configuration.rb +44 -0
- data/lib/heller/topic_metadata_response.rb +66 -0
- data/lib/heller/version.rb +5 -0
- data/lib/heller/zookeeper_consumer.rb +47 -0
- data/lib/heller.rb +21 -2
- data/lib/kafka.rb +50 -27
- data/spec/heller/consumer_configuration_spec.rb +196 -0
- data/spec/heller/consumer_spec.rb +376 -0
- data/spec/heller/fetch_response_spec.rb +93 -0
- data/spec/heller/message_set_enumerator_spec.rb +54 -0
- data/spec/heller/offset_response_spec.rb +68 -0
- data/spec/heller/producer_configuration_spec.rb +178 -0
- data/spec/heller/producer_spec.rb +65 -0
- data/spec/heller/topic_metadata_response_spec.rb +122 -0
- data/spec/heller/zookeeper_consumer_spec.rb +166 -0
- data/spec/integration/end_to_end_communication_spec.rb +316 -0
- data/spec/integration/zookeeper_consumer_spec.rb +85 -0
- data/spec/spec_helper.rb +41 -0
- data/spec/support/fakers.rb +45 -0
- metadata +62 -27
@@ -0,0 +1,376 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
|
5
|
+
module Heller
|
6
|
+
describe Consumer do
|
7
|
+
let(:consumer) do
|
8
|
+
described_class.new('localhost:9092', consumer_impl: consumer_impl, client_id: 'spec-consumer')
|
9
|
+
end
|
10
|
+
|
11
|
+
let :consumer_impl do
|
12
|
+
double(:consumer_impl)
|
13
|
+
end
|
14
|
+
|
15
|
+
let :consumer_spy do
|
16
|
+
double(:consumer)
|
17
|
+
end
|
18
|
+
|
19
|
+
before do
|
20
|
+
allow(consumer_impl).to receive(:new) do |*args|
|
21
|
+
allow(consumer_spy).to receive(:client_id).and_return(args.last)
|
22
|
+
consumer_spy
|
23
|
+
end
|
24
|
+
allow(consumer_spy).to receive(:fetch)
|
25
|
+
end
|
26
|
+
|
27
|
+
describe '#initialize' do
|
28
|
+
it 'takes a connect string' do
|
29
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl)
|
30
|
+
expect(consumer_impl).to have_received(:new).with('localhost', 9092, anything, anything, anything)
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'proxies arguments when creating the internal consumer' do
|
34
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl)
|
35
|
+
expect(consumer_impl).to have_received(:new).with('localhost', 9092, anything, anything, anything)
|
36
|
+
end
|
37
|
+
|
38
|
+
context 'when not given any options' do
|
39
|
+
it 'fills in sane defaults for missing options' do
|
40
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl)
|
41
|
+
expect(consumer_impl).to have_received(:new).with('localhost', 9092, 30000, 65536, anything)
|
42
|
+
end
|
43
|
+
|
44
|
+
context 'client_id' do
|
45
|
+
it 'makes some kind of attempt to generate a unique client id' do
|
46
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl)
|
47
|
+
expect(consumer.client_id).to match /heller\-consumer\-[a-f0-9]{8}\-[a-f0-9]{4}\-[a-f0-9]{4}\-[a-f0-9]{4}\-[a-f0-9]{12}/
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
context 'when given options' do
|
53
|
+
it 'merges options with the defaults' do
|
54
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl, timeout: 10, buffer_size: 11, client_id: 'hi')
|
55
|
+
expect(consumer_impl).to have_received(:new).with('localhost', 9092, 10, 11, 'hi')
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
describe '#fetch' do
|
61
|
+
let :topic do
|
62
|
+
'spec'
|
63
|
+
end
|
64
|
+
|
65
|
+
let :partition do
|
66
|
+
0
|
67
|
+
end
|
68
|
+
|
69
|
+
let :offset do
|
70
|
+
0
|
71
|
+
end
|
72
|
+
|
73
|
+
context 'when given a single Heller::FetchRequest' do
|
74
|
+
let :request do
|
75
|
+
Heller::FetchRequest.new(topic, partition, offset)
|
76
|
+
end
|
77
|
+
|
78
|
+
it 'converts it to a Kafka::Api::FetchRequest' do
|
79
|
+
expect(consumer_spy).to receive(:fetch).with(instance_of(Kafka::Api::FetchRequest))
|
80
|
+
|
81
|
+
consumer.fetch(request)
|
82
|
+
end
|
83
|
+
|
84
|
+
it 'includes parameters from given Heller::FetchRequest' do
|
85
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
86
|
+
request_info = request.request_info
|
87
|
+
expect(request_info.size).to eq(1)
|
88
|
+
|
89
|
+
tuple = request_info.first
|
90
|
+
expect(tuple._1.topic).to eq('spec')
|
91
|
+
expect(tuple._1.partition).to eq(0)
|
92
|
+
expect(tuple._2.offset).to eq(0)
|
93
|
+
end
|
94
|
+
|
95
|
+
consumer.fetch(request)
|
96
|
+
end
|
97
|
+
|
98
|
+
it 'returns a Heller::FetchResponse object' do
|
99
|
+
expect(consumer.fetch(request)).to be_a(Heller::FetchResponse)
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
context 'when given an array of Heller::FetchRequests' do
|
104
|
+
it 'converts them to a Kafka::Api::FetchRequest' do
|
105
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
106
|
+
expect(request).to be_a(Kafka::Api::FetchRequest)
|
107
|
+
expect(request.request_info.size).to eq(3)
|
108
|
+
end
|
109
|
+
|
110
|
+
requests = 3.times.map { |i| Heller::FetchRequest.new(topic, partition + i, offset) }
|
111
|
+
consumer.fetch(requests)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
context 'fetch options' do
|
116
|
+
let :fetch_request do
|
117
|
+
Heller::FetchRequest.new(topic, partition, offset)
|
118
|
+
end
|
119
|
+
|
120
|
+
it 'sets a default fetch size' do
|
121
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
122
|
+
tuple = request.request_info.first
|
123
|
+
expect(tuple._2.fetch_size).to eq(1024 * 1024)
|
124
|
+
end
|
125
|
+
|
126
|
+
consumer.fetch(fetch_request)
|
127
|
+
end
|
128
|
+
|
129
|
+
it 'allows fetch size to be overridden' do
|
130
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
131
|
+
tuple = request.request_info.first
|
132
|
+
expect(tuple._2.fetch_size).to eq(1024)
|
133
|
+
end
|
134
|
+
|
135
|
+
consumer.fetch(fetch_request, 1024)
|
136
|
+
end
|
137
|
+
|
138
|
+
it 'includes the client_id' do
|
139
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
140
|
+
expect(request.client_id).to eq('spec-consumer')
|
141
|
+
end
|
142
|
+
|
143
|
+
consumer.fetch(fetch_request)
|
144
|
+
end
|
145
|
+
|
146
|
+
it 'includes max_wait if given when the consumer was created' do
|
147
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl, client_id: 'spec-consumer', max_wait: 1)
|
148
|
+
|
149
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
150
|
+
expect(request.max_wait).to eq(1)
|
151
|
+
end
|
152
|
+
|
153
|
+
consumer.fetch(fetch_request)
|
154
|
+
end
|
155
|
+
|
156
|
+
it 'includes min_bytes if given when the consumer was created' do
|
157
|
+
consumer = described_class.new('localhost:9092', consumer_impl: consumer_impl, client_id: 'spec-consumer', min_bytes: 1024)
|
158
|
+
|
159
|
+
expect(consumer_spy).to receive(:fetch) do |request|
|
160
|
+
expect(request.min_bytes).to eq(1024)
|
161
|
+
end
|
162
|
+
|
163
|
+
consumer.fetch(fetch_request)
|
164
|
+
end
|
165
|
+
end
|
166
|
+
end
|
167
|
+
|
168
|
+
describe '#offsets_before' do
|
169
|
+
before do
|
170
|
+
allow(consumer_spy).to receive(:get_offsets_before)
|
171
|
+
end
|
172
|
+
|
173
|
+
let :topic do
|
174
|
+
'spec'
|
175
|
+
end
|
176
|
+
|
177
|
+
let :partition do
|
178
|
+
0
|
179
|
+
end
|
180
|
+
|
181
|
+
let :time do
|
182
|
+
Time.utc(2013, 7, 20)
|
183
|
+
end
|
184
|
+
|
185
|
+
let :offset_request do
|
186
|
+
Heller::OffsetRequest.new(topic, partition, time)
|
187
|
+
end
|
188
|
+
|
189
|
+
it 'sends an OffsetRequest using #get_offsets_before' do
|
190
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
191
|
+
expect(request).to be_a(Kafka::JavaApi::OffsetRequest)
|
192
|
+
end
|
193
|
+
|
194
|
+
consumer.offsets_before(offset_request)
|
195
|
+
end
|
196
|
+
|
197
|
+
it 'returns a Heller::OffsetResponse' do
|
198
|
+
expect(consumer.offsets_before(offset_request)).to be_a(Heller::OffsetResponse)
|
199
|
+
end
|
200
|
+
|
201
|
+
it 'includes client_id' do
|
202
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
203
|
+
expect(request.underlying.client_id).not_to be_nil
|
204
|
+
end
|
205
|
+
|
206
|
+
consumer.offsets_before(offset_request)
|
207
|
+
end
|
208
|
+
|
209
|
+
it 'accepts ints instead of Time objects' do
|
210
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
211
|
+
request_info = request.underlying.request_info
|
212
|
+
expect(request.underlying.request_info.values.first.time).to eq(0)
|
213
|
+
expect(request_info.values.first.time).to eq(0)
|
214
|
+
end
|
215
|
+
|
216
|
+
offset_request = Heller::OffsetRequest.new(topic, partition, 0)
|
217
|
+
consumer.offsets_before(offset_request)
|
218
|
+
end
|
219
|
+
|
220
|
+
context 'maximum number of offsets to fetch' do
|
221
|
+
it 'defaults to 1' do
|
222
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
223
|
+
request_info = request.underlying.request_info
|
224
|
+
expect(request_info.values.first.max_num_offsets).to eq(1)
|
225
|
+
end
|
226
|
+
|
227
|
+
consumer.offsets_before(Heller::OffsetRequest.new('spec', 0, 0))
|
228
|
+
end
|
229
|
+
|
230
|
+
it 'is overridable' do
|
231
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
232
|
+
request_info = request.underlying.request_info
|
233
|
+
expect(request_info.values.first.max_num_offsets).to eq(10)
|
234
|
+
end
|
235
|
+
|
236
|
+
consumer.offsets_before(Heller::OffsetRequest.new('spec', 0, 0, 10))
|
237
|
+
end
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
describe '#earliest_offset' do
|
242
|
+
let :fake_offset_response do
|
243
|
+
double(:offset_response)
|
244
|
+
end
|
245
|
+
|
246
|
+
before do
|
247
|
+
allow(consumer_spy).to receive(:get_offsets_before).and_return(fake_offset_response)
|
248
|
+
allow(fake_offset_response).to receive(:offsets).with('spec', 0).and_return([0, 1, 2])
|
249
|
+
end
|
250
|
+
|
251
|
+
it 'sends an OffsetRequest with the magic value for \'earliest\' offset' do
|
252
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
253
|
+
request_info = request.underlying.request_info
|
254
|
+
expect(request_info.values.first.time).to eq(-2)
|
255
|
+
|
256
|
+
fake_offset_response
|
257
|
+
end
|
258
|
+
|
259
|
+
consumer.earliest_offset('spec', 0)
|
260
|
+
end
|
261
|
+
|
262
|
+
it 'returns a single offset' do
|
263
|
+
expect(consumer.earliest_offset('spec', 0)).to eq(0)
|
264
|
+
end
|
265
|
+
|
266
|
+
it 'fetches only one offset' do
|
267
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
268
|
+
request_info = request.underlying.request_info
|
269
|
+
expect(request_info.values.first.max_num_offsets).to eq(1)
|
270
|
+
|
271
|
+
fake_offset_response
|
272
|
+
end
|
273
|
+
|
274
|
+
consumer.earliest_offset('spec', 0)
|
275
|
+
end
|
276
|
+
end
|
277
|
+
|
278
|
+
describe '#latest_offset' do
|
279
|
+
let :fake_offset_response do
|
280
|
+
double(:offset_response)
|
281
|
+
end
|
282
|
+
|
283
|
+
before do
|
284
|
+
allow(fake_offset_response).to receive(:offsets).with('spec', 0).and_return([0, 1, 2])
|
285
|
+
allow(consumer_spy).to receive(:get_offsets_before).and_return(fake_offset_response)
|
286
|
+
end
|
287
|
+
|
288
|
+
it 'sends an OffsetRequest with the magic value for \'latest\' offset' do
|
289
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
290
|
+
request_info = request.underlying.request_info
|
291
|
+
expect(request_info.values.first.time).to eq(-1)
|
292
|
+
|
293
|
+
fake_offset_response
|
294
|
+
end
|
295
|
+
|
296
|
+
consumer.latest_offset('spec', 0)
|
297
|
+
end
|
298
|
+
|
299
|
+
it 'returns a single offset' do
|
300
|
+
expect(consumer.latest_offset('spec', 0)).to eq(2)
|
301
|
+
end
|
302
|
+
|
303
|
+
it 'fetches only one offset' do
|
304
|
+
expect(consumer_spy).to receive(:get_offsets_before) do |request|
|
305
|
+
request_info = request.underlying.request_info
|
306
|
+
expect(request_info.values.first.max_num_offsets).to eq(1)
|
307
|
+
|
308
|
+
fake_offset_response
|
309
|
+
end
|
310
|
+
|
311
|
+
consumer.latest_offset('spec', 0)
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
315
|
+
describe '#metadata' do
|
316
|
+
before do
|
317
|
+
allow(consumer_spy).to receive(:send)
|
318
|
+
end
|
319
|
+
|
320
|
+
context 'given a list of topics' do
|
321
|
+
it 'sends a TopicMetadataRequest' do
|
322
|
+
consumer.metadata(['topic1', 'topic2'])
|
323
|
+
expect(consumer_spy).to have_received(:send) do |request|
|
324
|
+
expect(request).to be_a(Kafka::JavaApi::TopicMetadataRequest)
|
325
|
+
expect(request.topics.to_a).to eql(['topic1', 'topic2'])
|
326
|
+
end
|
327
|
+
end
|
328
|
+
|
329
|
+
it 'returns a Heller::TopicMetadataResponse' do
|
330
|
+
expect(consumer.metadata(['topic1', 'topic2'])).to be_a(Heller::TopicMetadataResponse)
|
331
|
+
end
|
332
|
+
end
|
333
|
+
|
334
|
+
context 'given an empty list' do
|
335
|
+
it 'sends a TopicMetadataRequest' do
|
336
|
+
consumer.metadata([])
|
337
|
+
expect(consumer_spy).to have_received(:send) do |request|
|
338
|
+
expect(request.topics.to_a).to eq([])
|
339
|
+
end
|
340
|
+
end
|
341
|
+
end
|
342
|
+
|
343
|
+
context 'given no arguments' do
|
344
|
+
it 'sends a TopicMetadataRequest with an empty list of topics' do
|
345
|
+
consumer.metadata
|
346
|
+
expect(consumer_spy).to have_received(:send) do |request|
|
347
|
+
expect(request.topics.to_a).to eq([])
|
348
|
+
end
|
349
|
+
end
|
350
|
+
end
|
351
|
+
|
352
|
+
it 'is aliased as #topic_metadata' do
|
353
|
+
consumer.topic_metadata
|
354
|
+
expect(consumer_spy).to have_received(:send).with(an_instance_of(Kafka::JavaApi::TopicMetadataRequest))
|
355
|
+
end
|
356
|
+
end
|
357
|
+
|
358
|
+
context '#disconnect' do
|
359
|
+
before do
|
360
|
+
allow(consumer_spy).to receive(:close)
|
361
|
+
end
|
362
|
+
|
363
|
+
it 'calls #close on the underlying consumer' do
|
364
|
+
consumer.disconnect
|
365
|
+
|
366
|
+
expect(consumer_spy).to have_received(:close)
|
367
|
+
end
|
368
|
+
|
369
|
+
it 'is aliased to #close' do
|
370
|
+
consumer.close
|
371
|
+
|
372
|
+
expect(consumer_spy).to have_received(:close)
|
373
|
+
end
|
374
|
+
end
|
375
|
+
end
|
376
|
+
end
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
|
5
|
+
module Heller
|
6
|
+
describe FetchResponse do
|
7
|
+
let :fetch_response do
|
8
|
+
described_class.new(underlying, decoder)
|
9
|
+
end
|
10
|
+
|
11
|
+
let :underlying do
|
12
|
+
double(:fetch_response)
|
13
|
+
end
|
14
|
+
|
15
|
+
let :decoder do
|
16
|
+
double(:decoder)
|
17
|
+
end
|
18
|
+
|
19
|
+
let :message_set do
|
20
|
+
double(:message_set, iterator: nil)
|
21
|
+
end
|
22
|
+
|
23
|
+
before do
|
24
|
+
allow(underlying).to receive(:has_error?)
|
25
|
+
allow(underlying).to receive(:error)
|
26
|
+
allow(underlying).to receive(:high_watermark)
|
27
|
+
end
|
28
|
+
|
29
|
+
describe '#error?' do
|
30
|
+
it 'proxies underlying FetchResponse#has_error?' do
|
31
|
+
fetch_response.error?
|
32
|
+
|
33
|
+
expect(underlying).to have_received(:has_error?)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
describe '#error' do
|
38
|
+
context 'given a topic and partition combination that does exist' do
|
39
|
+
it 'returns whatever the underlying FetchResponse returns' do
|
40
|
+
expect(underlying).to receive(:error_code).with('spec', 0).and_return('error stuff')
|
41
|
+
|
42
|
+
expect(fetch_response.error('spec', 0)).to eq('error stuff')
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
context 'given a topic and partition combination that does not exist' do
|
47
|
+
it 'raises NoSuchTopicPartitionCombinationError' do
|
48
|
+
expect(underlying).to receive(:error_code).with('non-existent', 1024).and_raise(IllegalArgumentException.new)
|
49
|
+
|
50
|
+
expect { fetch_response.error('non-existent', 1024) }.to raise_error(NoSuchTopicPartitionCombinationError)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
describe '#messages' do
|
56
|
+
context 'given a topic and partition combination that does exist' do
|
57
|
+
it 'returns a MessageSetEnumerator' do
|
58
|
+
expect(underlying).to receive(:message_set).with('spec', 0).and_return(message_set)
|
59
|
+
|
60
|
+
enumerator = fetch_response.messages('spec', 0)
|
61
|
+
|
62
|
+
expect(enumerator).to be_a(MessageSetEnumerator)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
context 'given a topic and partition combination that does not exist' do
|
67
|
+
it 'raises NoSuchTopicPartitionCombinationError' do
|
68
|
+
expect(underlying).to receive(:message_set).with('non-existent', 1024).and_raise(IllegalArgumentException.new)
|
69
|
+
|
70
|
+
expect { fetch_response.messages('non-existent', 1024) }.to raise_error(NoSuchTopicPartitionCombinationError)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
describe '#high_watermark' do
|
76
|
+
context 'given a topic and partition combination that does exist' do
|
77
|
+
it 'proxies the method call to the underlying FetchResponse' do
|
78
|
+
fetch_response.high_watermark('spec', 0)
|
79
|
+
|
80
|
+
expect(underlying).to have_received(:high_watermark).with('spec', 0)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
context 'given a topic and partition combination that does not exist' do
|
85
|
+
it 'raises NoSuchTopicPartitionCombinationError' do
|
86
|
+
expect(underlying).to receive(:high_watermark).with('non-existent', 1024).and_raise(IllegalArgumentException.new)
|
87
|
+
|
88
|
+
expect { fetch_response.high_watermark('non-existent', 1024) }.to raise_error(NoSuchTopicPartitionCombinationError)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
|
5
|
+
module Heller
|
6
|
+
describe MessageSetEnumerator do
|
7
|
+
let :message_set do
|
8
|
+
create_fake_message_set('first', 'second', 'third', 'fourth')
|
9
|
+
end
|
10
|
+
|
11
|
+
let :decoder do
|
12
|
+
Kafka::Serializer::StringDecoder.new(nil)
|
13
|
+
end
|
14
|
+
|
15
|
+
let :enumerator do
|
16
|
+
MessageSetEnumerator.new(message_set, decoder)
|
17
|
+
end
|
18
|
+
|
19
|
+
describe '#next' do
|
20
|
+
it 'returns the first offset and decoded message pair' do
|
21
|
+
offset, message = enumerator.next
|
22
|
+
expect(offset).to eq(0)
|
23
|
+
expect(message).to eq('first')
|
24
|
+
end
|
25
|
+
|
26
|
+
it 'returns the second offset and decoded message pair' do
|
27
|
+
enumerator.next
|
28
|
+
offset, message = enumerator.next
|
29
|
+
expect(offset).to eq(1)
|
30
|
+
expect(message).to eq('second')
|
31
|
+
end
|
32
|
+
|
33
|
+
it 'returns each offset and decoded message pair in order' do
|
34
|
+
result = []
|
35
|
+
4.times { result << enumerator.next }
|
36
|
+
expect(result).to eq([[0, 'first'], [1, 'second'], [2, 'third'], [3, 'fourth']])
|
37
|
+
end
|
38
|
+
|
39
|
+
it 'raises StopIteration when all pairs have been returned' do
|
40
|
+
4.times { enumerator.next }
|
41
|
+
expect { enumerator.next }.to raise_error(StopIteration)
|
42
|
+
expect { enumerator.next }.to raise_error(StopIteration)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
describe '#each' do
|
47
|
+
it 'returns each offset and decoded message pair' do
|
48
|
+
result = []
|
49
|
+
enumerator.each { |i| result << i }
|
50
|
+
expect(result).to eq([[0, 'first'], [1, 'second'], [2, 'third'], [3, 'fourth']])
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
|
5
|
+
module Heller
|
6
|
+
describe OffsetResponse do
|
7
|
+
let :response do
|
8
|
+
described_class.new(underlying)
|
9
|
+
end
|
10
|
+
|
11
|
+
let :underlying do
|
12
|
+
double(:offset_response)
|
13
|
+
end
|
14
|
+
|
15
|
+
before do
|
16
|
+
allow(underlying).to receive(:has_error?)
|
17
|
+
end
|
18
|
+
|
19
|
+
describe '#error?' do
|
20
|
+
it 'proxies the underlying FetchResponse#has_error?' do
|
21
|
+
response.error?
|
22
|
+
|
23
|
+
expect(underlying).to have_received(:has_error?)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
describe '#error' do
|
28
|
+
context 'given a topic and partition combination that does exist' do
|
29
|
+
it 'returns whatever the underlying FetchResponse returns' do
|
30
|
+
expect(underlying).to receive(:error_code).with('spec', 0).and_return('error stuff')
|
31
|
+
|
32
|
+
expect(response.error('spec', 0)).to eq('error stuff')
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
context 'given a topic and partition combination that does not exist' do
|
37
|
+
it 'raises NoSuchTopicPartitionCombinationError' do
|
38
|
+
expect(underlying).to receive(:error_code).with('non-existent', 1024).and_raise(NoSuchElementException.new)
|
39
|
+
|
40
|
+
expect { response.error('non-existent', 1024) }.to raise_error(NoSuchTopicPartitionCombinationError)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
describe '#offsets' do
|
46
|
+
context 'given a topic and partition combination that does exist' do
|
47
|
+
let :fake_long_array do
|
48
|
+
double(:long_array, to_a: [])
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'returns an array' do
|
52
|
+
expect(underlying).to receive(:offsets).with('spec', 0).and_return(fake_long_array)
|
53
|
+
|
54
|
+
expect(response.offsets('spec', 0)).to eq([])
|
55
|
+
expect(fake_long_array).to have_received(:to_a)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
context 'given a topic and partition combination that does not exist' do
|
60
|
+
it 'raises NoSuchTopicPartitionCombinationError' do
|
61
|
+
expect(underlying).to receive(:offsets).with('non-existent', 1024).and_raise(NoSuchElementException.new)
|
62
|
+
|
63
|
+
expect { response.offsets('non-existent', 1024) }.to raise_error(NoSuchTopicPartitionCombinationError)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|