logstash-output-cloudwatchlogs 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,29 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-output-cloudwatchlogs'
4
+ s.version = '0.9.0'
5
+ s.licenses = ['Amazon Software License']
6
+ s.summary = "This output lets you send logs to AWS CloudWatch Logs service"
7
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
+ s.authors = ["AWS"]
9
+ s.email = 'cloudwatch-logs-feedback@amazon.com'
10
+ s.homepage = "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/WhatIsCloudWatchLogs.html"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash-core', '>= 1.5.0', '< 2.0.0'
24
+ s.add_runtime_dependency 'logstash-codec-plain'
25
+ s.add_runtime_dependency 'logstash-mixin-aws', '>= 1.0.0'
26
+ s.add_runtime_dependency 'aws-sdk', ['~> 2']
27
+
28
+ s.add_development_dependency 'logstash-devutils'
29
+ end
@@ -0,0 +1,624 @@
1
+ # encoding: utf-8
2
+
3
+ #
4
+ # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
5
+ #
6
+ # Licensed under the Amazon Software License (the "License").
7
+ # You may not use this file except in compliance with the License.
8
+ # A copy of the License is located at
9
+ #
10
+ # http://aws.amazon.com/asl/
11
+ #
12
+ # or in the "license" file accompanying this file. This file is distributed
13
+ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
14
+ # express or implied. See the License for the specific language governing
15
+ # permissions and limitations under the License.
16
+
17
+ require "logstash/devutils/rspec/spec_helper"
18
+ require "logstash/plugin"
19
+ require "logstash/json"
20
+ require "logstash/timestamp"
21
+ require "logstash/outputs/cloudwatchlogs"
22
+
23
+ require "aws-sdk"
24
+
25
+ describe "outputs/cloudwatchlogs" do
26
+
27
+ describe "#new" do
28
+ it "should raise error when log group name is not configured" do
29
+ expect {
30
+ LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
31
+ "log_group_name" => "lg")
32
+ }.to raise_error(LogStash::ConfigurationError)
33
+ end
34
+
35
+ it "should raise error when log stream name is not configured" do
36
+ expect {
37
+ LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
38
+ "log_stream_name" => "ls")
39
+ }.to raise_error(LogStash::ConfigurationError)
40
+ end
41
+
42
+ it "should create the output with log group/stream name" do
43
+ expect {
44
+ LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
45
+ "log_group_name" => "lg", "log_stream_name" => "ls")
46
+ }.to_not raise_error
47
+ end
48
+ end
49
+
50
+ describe "#register" do
51
+ it "should set the batch_count to MAX_BATCH_COUNT if a larger value is configured" do
52
+ output = LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
53
+ "log_group_name" => "lg", "log_stream_name" => "ls",
54
+ "batch_count" => LogStash::Outputs::CloudWatchLogs::MAX_BATCH_COUNT + 1)
55
+ expect {output.register}.to_not raise_error
56
+ output.batch_count.should eql(LogStash::Outputs::CloudWatchLogs::MAX_BATCH_COUNT)
57
+ end
58
+
59
+ it "should set the batch_size to MAX_BATCH_SIZE if a larger value is configured" do
60
+ output = LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
61
+ "log_group_name" => "lg", "log_stream_name" => "ls",
62
+ "batch_count" => LogStash::Outputs::CloudWatchLogs::MAX_BATCH_SIZE + 1)
63
+ expect {output.register}.to_not raise_error
64
+ output.batch_size.should eql(LogStash::Outputs::CloudWatchLogs::MAX_BATCH_SIZE)
65
+ end
66
+
67
+ it "should set the buffer_duration to MIN_BUFFER_DURATION if a smaler value is configured" do
68
+ output = LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
69
+ "log_group_name" => "lg", "log_stream_name" => "ls",
70
+ "buffer_duration" => LogStash::Outputs::CloudWatchLogs::MIN_BUFFER_DURATION - 1)
71
+ expect {output.register}.to_not raise_error
72
+ output.buffer_duration.should eql(LogStash::Outputs::CloudWatchLogs::MIN_BUFFER_DURATION)
73
+ end
74
+ end
75
+
76
+ describe "#receive" do
77
+ before :each do
78
+ @output = LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
79
+ "log_group_name" => "lg", "log_stream_name" => "ls")
80
+ @output.register
81
+ end
82
+
83
+ context "when event is invalid" do
84
+ before :each do
85
+ @event = LogStash::Event.new
86
+ @event.timestamp = LogStash::Timestamp.coerce("2015-02-13T01:19:08Z")
87
+ @event["message"] = "test"
88
+ expect(@output.buffer).not_to receive(:enq)
89
+ end
90
+ context "when event doesn't have @timestamp" do
91
+ it "should not save the event" do
92
+ @event.remove("@timestamp")
93
+ expect { @output.receive(@event) }.to_not raise_error
94
+ end
95
+ end
96
+ context "when event doesn't have message" do
97
+ it "should not save the event" do
98
+ @event.remove("message")
99
+ expect { @output.receive(@event) }.to_not raise_error
100
+ end
101
+ end
102
+ end
103
+
104
+ context "when event is valid" do
105
+ context "when first event is received" do
106
+ it "should save the event to buffer" do
107
+ expect(@output.buffer).to receive(:enq) { {:timestamp => 1423786748000.0, :message => "test"} }
108
+ event = LogStash::Event.new
109
+ event.timestamp = LogStash::Timestamp.coerce("2015-02-13T01:19:08Z")
110
+ event["message"] = "test"
111
+ expect { @output.receive(event) }.to_not raise_error
112
+ end
113
+ end
114
+
115
+ end
116
+ end
117
+
118
+ describe "#flush" do
119
+ before :each do
120
+ @cwl = Aws::CloudWatchLogs::Client.new(:region => "us-east-1")
121
+ @output = LogStash::Plugin.lookup("output", "cloudwatchlogs").new(
122
+ "log_group_name" => "lg", "log_stream_name" => "ls")
123
+ @output.register
124
+ @output.cwl = @cwl
125
+ end
126
+
127
+ context "when received zero events" do
128
+ it "should not make the service call" do
129
+ expect(@cwl).not_to receive(:put_log_events)
130
+ @output.flush([])
131
+ end
132
+ end
133
+
134
+ context "when received some events" do
135
+ before :each do
136
+ @output.sequence_token = 'token'
137
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
138
+ allow(@cwl).to receive(:put_log_events) { @response }
139
+ end
140
+ context "when events are not sorted" do
141
+ it "should sort the events before sending them to service" do
142
+ expect(@cwl).to receive(:put_log_events).with(
143
+ :log_events => [
144
+ {:timestamp => 123, :message => 'abc'},
145
+ {:timestamp => 124, :message => 'zzz'}],
146
+ :log_group_name => 'lg',
147
+ :log_stream_name => 'ls',
148
+ :sequence_token => 'token'
149
+ ) { @response }
150
+ @output.flush([
151
+ {:timestamp => 124, :message => 'zzz'},
152
+ {:timestamp => 123, :message => 'abc'}])
153
+ end
154
+ end
155
+ context "when events are sorted" do
156
+ it "should send the events as is" do
157
+ expect(@cwl).to receive(:put_log_events).with(
158
+ :log_events => [
159
+ {:timestamp => 123, :message => 'abc'},
160
+ {:timestamp => 124, :message => 'zzz'}],
161
+ :log_group_name => 'lg',
162
+ :log_stream_name => 'ls',
163
+ :sequence_token => 'token'
164
+ ) { @response }
165
+ @output.flush([
166
+ {:timestamp => 123, :message => 'abc'},
167
+ {:timestamp => 124, :message => 'zzz'}])
168
+ end
169
+ end
170
+ context "when log events span more than 24 hours" do
171
+ it "should break log events into multiple batches and no batch spans more than 24 hours" do
172
+ twenty_four_hours_in_mills = 24 * 60 * 60 * 1000
173
+ expect(@cwl).to receive(:put_log_events).once.with(
174
+ :log_events => [
175
+ {:timestamp => 123, :message => 'abc'}],
176
+ :log_group_name => 'lg',
177
+ :log_stream_name => 'ls',
178
+ :sequence_token => 'token'
179
+ ) { @response }
180
+ expect(@cwl).to receive(:put_log_events).once.with(
181
+ :log_events => [
182
+ {:timestamp => 123 + twenty_four_hours_in_mills + 1, :message => 'zzz'}],
183
+ :log_group_name => 'lg',
184
+ :log_stream_name => 'ls',
185
+ :sequence_token => 'ntoken'
186
+ )
187
+ @output.flush([
188
+ {:timestamp => 123, :message => 'abc'},
189
+ {:timestamp => 123 + twenty_four_hours_in_mills + 1, :message => 'zzz'}])
190
+ end
191
+ end
192
+ context "when log events span exactly 24 hours" do
193
+ it "should not break log events into multiple batches" do
194
+ twenty_four_hours_in_mills = 24 * 60 * 60 * 1000
195
+ expect(@cwl).to receive(:put_log_events).once.with(
196
+ :log_events => [
197
+ {:timestamp => 123, :message => 'abc'},
198
+ {:timestamp => 123 + twenty_four_hours_in_mills, :message => 'zzz'}],
199
+ :log_group_name => 'lg',
200
+ :log_stream_name => 'ls',
201
+ :sequence_token => 'token'
202
+ ) { @response }
203
+ @output.flush([
204
+ {:timestamp => 123, :message => 'abc'},
205
+ {:timestamp => 123 + twenty_four_hours_in_mills, :message => 'zzz'}])
206
+ end
207
+ end
208
+ end
209
+
210
+ describe "error handling" do
211
+ context "when sending the first batch" do
212
+ it "should use null sequence token" do
213
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
214
+ allow(@cwl).to receive(:put_log_events) { @response }
215
+ expect(@cwl).to receive(:put_log_events).with(
216
+ :log_events => [
217
+ {:timestamp => 123, :message => 'abc'}],
218
+ :log_group_name => 'lg',
219
+ :log_stream_name => 'ls',
220
+ :sequence_token => nil
221
+ ) { @response }
222
+ @output.flush([
223
+ {:timestamp => 123, :message => 'abc'}])
224
+ end
225
+ context "when the log stream doesn't exist" do
226
+ it "should create log group and log stream" do
227
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
228
+ # throw exception for 1st call and good for following calls
229
+ allow(@cwl).to receive(:put_log_events) do
230
+ allow(@cwl).to receive(:put_log_events) { @response }
231
+ raise Aws::CloudWatchLogs::Errors::ResourceNotFoundException.new(nil, nil)
232
+ end
233
+ expect(@cwl).to receive(:put_log_events).exactly(2).times.with(
234
+ :log_events => [
235
+ {:timestamp => 123, :message => 'abc'}],
236
+ :log_group_name => 'lg',
237
+ :log_stream_name => 'ls',
238
+ :sequence_token => nil
239
+ )
240
+ expect(@cwl).to receive(:create_log_group).with(:log_group_name => 'lg')
241
+ expect(@cwl).to receive(:create_log_stream).with(:log_group_name => 'lg', :log_stream_name => 'ls')
242
+ @output.flush([
243
+ {:timestamp => 123, :message => 'abc'}])
244
+ end
245
+ end
246
+ context "when the log stream exists" do
247
+ context "log stream has no log event" do
248
+ it "should succeed" do
249
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
250
+ allow(@cwl).to receive(:put_log_events) { @response }
251
+ expect(@cwl).to receive(:put_log_events).with(
252
+ :log_events => [
253
+ {:timestamp => 123, :message => 'abc'}],
254
+ :log_group_name => 'lg',
255
+ :log_stream_name => 'ls',
256
+ :sequence_token => nil
257
+ )
258
+ @output.flush([
259
+ {:timestamp => 123, :message => 'abc'}])
260
+ end
261
+ end
262
+
263
+ context "log stream has some log events" do
264
+ context "when log stream only accpeted one batch in the past" do
265
+ context "when the sending batch is the same as accepted batch" do
266
+ it "should not retry upon DataAlreadyAcceptedException" do
267
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
268
+ @ex = Aws::CloudWatchLogs::Errors::DataAlreadyAcceptedException.new(nil, "The next batch can be sent with sequenceToken: 456")
269
+ # throw exception for 1st call and good for following calls
270
+ allow(@cwl).to receive(:put_log_events) do
271
+ allow(@cwl).to receive(:put_log_events) { @response }
272
+ raise @ex
273
+ end
274
+ expect(@cwl).to receive(:put_log_events).once.with(
275
+ :log_events => [
276
+ {:timestamp => 123, :message => 'abc'}],
277
+ :log_group_name => 'lg',
278
+ :log_stream_name => 'ls',
279
+ :sequence_token => nil
280
+ )
281
+ @output.flush([
282
+ {:timestamp => 123, :message => 'abc'}])
283
+ end
284
+ end
285
+ context "when the sending batch is different than accepted batch" do
286
+ it "should retry upon InvalidSequenceTokenException" do
287
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
288
+ @ex = Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException.new(nil, "The next expected sequenceToken is: 456")
289
+ # throw exception for 1st call and good for following calls
290
+ allow(@cwl).to receive(:put_log_events) do
291
+ allow(@cwl).to receive(:put_log_events) { @response }
292
+ raise @ex
293
+ end
294
+ expect(@cwl).to receive(:put_log_events).once.with(
295
+ :log_events => [
296
+ {:timestamp => 123, :message => 'abc'}],
297
+ :log_group_name => 'lg',
298
+ :log_stream_name => 'ls',
299
+ :sequence_token => nil
300
+ )
301
+ expect(@cwl).to receive(:put_log_events).once.with(
302
+ :log_events => [
303
+ {:timestamp => 123, :message => 'abc'}],
304
+ :log_group_name => 'lg',
305
+ :log_stream_name => 'ls',
306
+ :sequence_token => '456'
307
+ ) { @response }
308
+ @output.flush([
309
+ {:timestamp => 123, :message => 'abc'}])
310
+ end
311
+ end
312
+ end
313
+ context "when log stream already accepted more than one batch" do
314
+ it "should retry upon InvalidSequenceTokenException" do
315
+ @output.sequence_token = "lasttoken"
316
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
317
+ @ex = Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException.new(nil, "The next expected sequenceToken is: 456")
318
+ # throw exception for 1st call and good for following calls
319
+ allow(@cwl).to receive(:put_log_events) do
320
+ allow(@cwl).to receive(:put_log_events) { @response }
321
+ raise @ex
322
+ end
323
+ expect(@cwl).to receive(:put_log_events).once.with(
324
+ :log_events => [
325
+ {:timestamp => 123, :message => 'abc'}],
326
+ :log_group_name => 'lg',
327
+ :log_stream_name => 'ls',
328
+ :sequence_token => 'lasttoken'
329
+ )
330
+ expect(@cwl).to receive(:put_log_events).once.with(
331
+ :log_events => [
332
+ {:timestamp => 123, :message => 'abc'}],
333
+ :log_group_name => 'lg',
334
+ :log_stream_name => 'ls',
335
+ :sequence_token => '456'
336
+ ) { @response }
337
+ @output.flush([
338
+ {:timestamp => 123, :message => 'abc'}])
339
+ end
340
+ end
341
+ end
342
+ end
343
+ end
344
+ context "when sending batch after first batch" do
345
+ before :each do
346
+ @output.sequence_token = 'lasttoken'
347
+ end
348
+ it "should use the previous token" do
349
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
350
+ allow(@cwl).to receive(:put_log_events) { @response }
351
+ expect(@cwl).to receive(:put_log_events).once.with(
352
+ :log_events => [
353
+ {:timestamp => 123, :message => 'abc'}],
354
+ :log_group_name => 'lg',
355
+ :log_stream_name => 'ls',
356
+ :sequence_token => 'lasttoken'
357
+ ) { @response }
358
+ @output.flush([
359
+ {:timestamp => 123, :message => 'abc'}])
360
+ end
361
+ end
362
+ context "when sending invalid request" do
363
+ it "should not retry" do
364
+ @output.log_group_name = nil
365
+ @output.log_stream_name = nil
366
+ @output.sequence_token = nil
367
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
368
+ @ex = Aws::CloudWatchLogs::Errors::InvalidParameterException.new(nil, nil)
369
+ allow(@cwl).to receive(:put_log_events) { raise @ex }
370
+ expect(@cwl).to receive(:put_log_events).once.with(
371
+ :log_events => [
372
+ {:timestamp => 123, :message => 'abc'}],
373
+ :log_group_name => nil,
374
+ :log_stream_name => nil,
375
+ :sequence_token => nil
376
+ )
377
+ @output.flush([
378
+ {:timestamp => 123, :message => 'abc'}])
379
+ end
380
+ end
381
+ context "when receiving unknown exception" do
382
+ it "should retry forever until getting back non-unknow exception" do
383
+ @output.sequence_token = nil
384
+ allow(@output).to receive(:sleep)
385
+ @response = double(Aws::PageableResponse, :next_sequence_token => 'ntoken')
386
+ @ex = Aws::CloudWatchLogs::Errors::ServiceUnavailableException.new(nil, nil)
387
+ # Throw 7 exceptions and then return normal
388
+ allow(@cwl).to receive(:put_log_events) do
389
+ allow(@cwl).to receive(:put_log_events) do
390
+ allow(@cwl).to receive(:put_log_events) do
391
+ allow(@cwl).to receive(:put_log_events) do
392
+ allow(@cwl).to receive(:put_log_events) do
393
+ allow(@cwl).to receive(:put_log_events) do
394
+ allow(@cwl).to receive(:put_log_events) do
395
+ allow(@cwl).to receive(:put_log_events) { @response }
396
+ raise @ex
397
+ end
398
+ raise @ex
399
+ end
400
+ raise @ex
401
+ end
402
+ raise @ex
403
+ end
404
+ raise @ex
405
+ end
406
+ raise @ex
407
+ end
408
+ raise @ex
409
+ end
410
+ expect(@cwl).to receive(:put_log_events).exactly(8).times.with(
411
+ :log_events => [
412
+ {:timestamp => 123, :message => 'abc'}],
413
+ :log_group_name => 'lg',
414
+ :log_stream_name => 'ls',
415
+ :sequence_token => nil
416
+ )
417
+ [2, 4, 8, 16, 32].each do |i|
418
+ expect(@output).to receive(:sleep).once.with(i)
419
+ end
420
+ # Should sleep upto 64 seconds for each retry
421
+ expect(@output).to receive(:sleep).twice.with(64)
422
+ @output.flush([
423
+ {:timestamp => 123, :message => 'abc'}])
424
+ end
425
+ end
426
+ end
427
+
428
+ end
429
+
430
+ end
431
+
432
+
433
+ describe "outputs/cloudwatchlogs/buffer" do
434
+
435
+ describe "#initialize" do
436
+ it "should create a buffer" do
437
+ buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
438
+ max_batch_count: 5, max_batch_size: 10, buffer_duration: 5000,
439
+ out_queue_size: 5,
440
+ size_of_item_proc: Proc.new {|item| item.bytesize})
441
+ end
442
+ end
443
+
444
+ describe "#enq" do
445
+ context "when batch is closed based on item count/size" do
446
+ before :each do
447
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
448
+ max_batch_count: 5, max_batch_size: 10, buffer_duration: 0,
449
+ out_queue_size: 5,
450
+ size_of_item_proc: Proc.new {|item| item.bytesize})
451
+ end
452
+
453
+ context "when the number of items is less than max batch count" do
454
+ it "should accept an item" do
455
+ @buffer.enq("ab")
456
+ @buffer.in_batch.should eql(["ab"])
457
+ @buffer.in_size.should == 2
458
+ @buffer.in_count.should == 1
459
+ end
460
+ end
461
+
462
+ context "when the number of items is equal to the max batch size" do
463
+ it "should batch items to the out queue and current batch is empty" do
464
+ 5.times do |i| @buffer.enq("#{i}") end
465
+ @buffer.in_batch.should eql([])
466
+ @buffer.out_queue.deq(true).should eql(["0", "1", "2", "3", "4"])
467
+ @buffer.in_size.should == 0
468
+ @buffer.in_count.should == 0
469
+ end
470
+ end
471
+
472
+ context "when the number of items is greater than the max batch size" do
473
+ it "should batch items to the out queue" do
474
+ 6.times do |i| @buffer.enq("#{i}") end
475
+ @buffer.in_batch.should eql(["5"])
476
+ @buffer.out_queue.deq(true).should eql(["0", "1", "2", "3", "4"])
477
+ @buffer.in_size.should eql(1)
478
+ @buffer.in_count.should eql(1)
479
+ end
480
+ end
481
+
482
+ context "when the size of items is equal to the max batch size" do
483
+ it "should batch items to the out queue and current batch is empty" do
484
+ 2.times do |i| @buffer.enq("abcd#{i}") end
485
+ @buffer.in_batch.should eql([])
486
+ @buffer.out_queue.deq(true).should eql(["abcd0", "abcd1"])
487
+ @buffer.in_size.should == 0
488
+ @buffer.in_count.should == 0
489
+ end
490
+ end
491
+
492
+ context "when the size of items is greater than max batch size" do
493
+ it "should batch items to the out queue" do
494
+ 3.times do |i| @buffer.enq("abc#{i}") end
495
+ @buffer.in_batch.should eql(["abc2"])
496
+ @buffer.out_queue.deq(true).should eql(["abc0", "abc1"])
497
+ @buffer.in_size.should == 4
498
+ @buffer.in_count.should == 1
499
+ end
500
+ end
501
+ end
502
+
503
+ context "when batch is closed when it passes buffer_duration milliseconds" do
504
+ before :each do
505
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
506
+ max_batch_count: 5, max_batch_size: 10, buffer_duration: 1000,
507
+ out_queue_size: 5,
508
+ size_of_item_proc: Proc.new {|item| item.bytesize})
509
+ end
510
+
511
+ context "when the number of items is less than max batch count and size is less than batch size" do
512
+ it "should batch items to the out queue" do
513
+ @buffer.enq("ab")
514
+ sleep(2)
515
+ @buffer.in_batch.should eql([])
516
+ @buffer.out_queue.deq(true).should eql(["ab"])
517
+ @buffer.in_size.should == 0
518
+ @buffer.in_count.should == 0
519
+ end
520
+ end
521
+ end
522
+
523
+ context "when batching is determined by size/count/duration" do
524
+ context "when enough items are added within buffer_duration milliseconds" do
525
+ it "should not add batch whose size or count is less than the threshold to the out queue except the first and last batch" do
526
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
527
+ max_batch_count: 5, max_batch_size: 100, buffer_duration: 1000,
528
+ out_queue_size: 5,
529
+ size_of_item_proc: Proc.new {|item| item.bytesize})
530
+ 20.times do |i|
531
+ @buffer.enq("#{i}")
532
+ # sleep less than buffer_duration, 5 items take 0.5 seconds to fill the buffer
533
+ sleep(0.1)
534
+ end
535
+ sleep(2)
536
+ # There will be 4 to 5 batches
537
+ batches = []
538
+ while !@buffer.out_queue.empty? do
539
+ batches << @buffer.out_queue.deq(true)
540
+ end
541
+ batches.size.should >= 4
542
+ batches.size.should <= 5
543
+ batches.shift
544
+ batches.pop
545
+ batches.each do |batch|
546
+ batch.size.should == 5
547
+ end
548
+ end
549
+ end
550
+ end
551
+ end
552
+
553
+ describe "#close" do
554
+ it "should add last batch to out queue" do
555
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
556
+ max_batch_count: 5, max_batch_size: 100, buffer_duration: 1000,
557
+ out_queue_size: 5,
558
+ size_of_item_proc: Proc.new {|item| item.bytesize})
559
+ consumer = Thread.new do
560
+ @buffer.deq {}
561
+ end
562
+ 33.times do |i|
563
+ @buffer.enq("#{i}")
564
+ sleep(0.01)
565
+ end
566
+ @buffer.close
567
+ @buffer.in_count.should == 0
568
+ consumer.join
569
+ end
570
+ end
571
+
572
+ describe "#deq" do
573
+ it "should keep processing items until the buffer is closed" do
574
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
575
+ max_batch_count: 5, max_batch_size: 100, buffer_duration: 1000,
576
+ out_queue_size: 5,
577
+ size_of_item_proc: Proc.new {|item| item.bytesize})
578
+ item_count = 0
579
+ consumer = Thread.new do
580
+ @buffer.deq do |batch|
581
+ item_count += batch.size
582
+ end
583
+ end
584
+ 33.times do |i|
585
+ @buffer.enq("#{i}")
586
+ sleep(0.01)
587
+ end
588
+ @buffer.close
589
+ consumer.join
590
+ item_count.should == 33
591
+ end
592
+ end
593
+
594
+ it "should not miss any item" do
595
+ @buffer = LogStash::Outputs::CloudWatchLogs::Buffer.new(
596
+ max_batch_count: 137, max_batch_size: 1000, buffer_duration: 5000,
597
+ out_queue_size: 50,
598
+ size_of_item_proc: Proc.new {|item| item.bytesize})
599
+ item_count = 0
600
+ consumer = Thread.new do
601
+ @buffer.deq do |batch|
602
+ item_count += batch.size
603
+ end
604
+ end
605
+ threads = []
606
+ num_of_threads = 100
607
+ num_of_items = 10000
608
+ # 100 threads plus one scheduled batcher thread
609
+ num_of_threads.times do |m|
610
+ threads << Thread.new do
611
+ num_of_items.times do |i|
612
+ @buffer.enq("#{i}")
613
+ end
614
+ end
615
+ end
616
+ # let producers complete the writes
617
+ threads.map(&:join)
618
+ # move all items to the out queue
619
+ @buffer.close
620
+ # let consumer complete the read
621
+ consumer.join
622
+ item_count.should == num_of_items * num_of_threads
623
+ end
624
+ end