fluent-plugin-firehose 0.0.1 → 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
data/test/helper.rb ADDED
@@ -0,0 +1,31 @@
1
+ # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License"). You
4
+ # may not use this file except in compliance with the License. A copy of
5
+ # the License is located at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # or in the "license" file accompanying this file. This file is
10
+ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
+ # ANY KIND, either express or implied. See the License for the specific
12
+ # language governing permissions and limitations under the License.
13
+
14
+ require 'rubygems'
15
+ require 'bundler'
16
+ require 'stringio'
17
+ begin
18
+ Bundler.setup(:default, :development)
19
+ rescue Bundler::BundlerError => e
20
+ $stderr.puts e.message
21
+ $stderr.puts "Run `bundle install` to install missing gems"
22
+ exit e.status_code
23
+ end
24
+
25
+ require 'test/unit'
26
+ require 'test/unit/rr'
27
+
28
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
29
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
30
+ require 'fluent/test'
31
+ require 'fluent/plugin/out_firehose'
@@ -0,0 +1,642 @@
1
+ # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
+ # Copyright 2015 Ji Oh Yoo
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License"). You
5
+ # may not use this file except in compliance with the License. A copy of
6
+ # the License is located at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # or in the "license" file accompanying this file. This file is
11
+ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
+ # ANY KIND, either express or implied. See the License for the specific
13
+ # language governing permissions and limitations under the License.
14
+
15
+ require 'helper'
16
+
17
+ class FirehoseOutputTest < Test::Unit::TestCase
18
+ def setup
19
+ Fluent::Test.setup
20
+ end
21
+
22
+ CONFIG = %[
23
+ aws_key_id test_key_id
24
+ aws_sec_key test_sec_key
25
+ stream_name test_stream
26
+ region us-east-1
27
+ partition_key test_partition_key
28
+ ]
29
+
30
+ CONFIG_YAJL= CONFIG + %[
31
+ use_yajl true
32
+ ]
33
+
34
+ CONFIG_WITH_COMPRESSION = CONFIG + %[
35
+ zlib_compression true
36
+ ]
37
+
38
+ CONFIG_YAJL_WITH_COMPRESSION = CONFIG_YAJL + %[
39
+ zlib_compression true
40
+ ]
41
+
42
+ def create_driver(conf = CONFIG, tag='test')
43
+ Fluent::Test::BufferedOutputTestDriver
44
+ .new(FluentPluginFirehose::FirehoseBufferedOutput, tag).configure(conf)
45
+ end
46
+
47
+ def create_mock_client
48
+ client = mock(Object.new)
49
+ mock(Aws::Firehose::Client).new({}) { client }
50
+ return client
51
+ end
52
+
53
+ def test_configure
54
+ d = create_driver
55
+ assert_equal 'test_key_id', d.instance.aws_key_id
56
+ assert_equal 'test_sec_key', d.instance.aws_sec_key
57
+ assert_equal 'test_stream', d.instance.stream_name
58
+ assert_equal 'us-east-1', d.instance.region
59
+ assert_equal 'test_partition_key', d.instance.partition_key
60
+ end
61
+
62
+ def test_configure_with_credentials
63
+ d = create_driver(<<-EOS)
64
+ profile default
65
+ credentials_path /home/scott/.aws/credentials
66
+ stream_name test_stream
67
+ region us-east-1
68
+ partition_key test_partition_key
69
+ EOS
70
+
71
+ assert_equal 'default', d.instance.profile
72
+ assert_equal '/home/scott/.aws/credentials', d.instance.credentials_path
73
+ assert_equal 'test_stream', d.instance.stream_name
74
+ assert_equal 'us-east-1', d.instance.region
75
+ assert_equal 'test_partition_key', d.instance.partition_key
76
+ end
77
+
78
+ def test_load_client
79
+ client = stub(Object.new)
80
+ client.describe_stream
81
+ client.put_records { {} }
82
+
83
+ stub(Aws::Firehose::Client).new do |options|
84
+ assert_equal("test_key_id", options[:access_key_id])
85
+ assert_equal("test_sec_key", options[:secret_access_key])
86
+ assert_equal("us-east-1", options[:region])
87
+ client
88
+ end
89
+
90
+ d = create_driver
91
+ d.run
92
+ end
93
+
94
+ def test_load_client_with_credentials
95
+ client = stub(Object.new)
96
+ client.describe_stream
97
+ client.put_records { {} }
98
+
99
+ stub(Aws::Firehose::Client).new do |options|
100
+ assert_equal(nil, options[:access_key_id])
101
+ assert_equal(nil, options[:secret_access_key])
102
+ assert_equal("us-east-1", options[:region])
103
+
104
+ credentials = options[:credentials]
105
+ assert_equal("default", credentials.profile_name)
106
+ assert_equal("/home/scott/.aws/credentials", credentials.path)
107
+
108
+ client
109
+ end
110
+
111
+ d = create_driver(<<-EOS)
112
+ profile default
113
+ credentials_path /home/scott/.aws/credentials
114
+ stream_name test_stream
115
+ region us-east-1
116
+ partition_key test_partition_key
117
+ EOS
118
+
119
+ d.run
120
+ end
121
+
122
+ def test_load_client_with_role_arn
123
+ client = stub(Object.new)
124
+ client.describe_stream
125
+ client.put_records { {} }
126
+
127
+ stub(Aws::AssumeRoleCredentials).new do |options|
128
+ assert_equal("arn:aws:iam::001234567890:role/my-role", options[:role_arn])
129
+ assert_equal("aws-fluent-plugin-kinesis", options[:role_session_name])
130
+ assert_equal("my_external_id", options[:external_id])
131
+ assert_equal(3600, options[:duration_seconds])
132
+ "sts_credentials"
133
+ end
134
+
135
+ stub(Aws::Firehose::Client).new do |options|
136
+ assert_equal("sts_credentials", options[:credentials])
137
+ client
138
+ end
139
+
140
+ d = create_driver(<<-EOS)
141
+ role_arn arn:aws:iam::001234567890:role/my-role
142
+ external_id my_external_id
143
+ stream_name test_stream
144
+ region us-east-1
145
+ partition_key test_partition_key
146
+ EOS
147
+ d.run
148
+ end
149
+
150
+ def test_configure_with_more_options
151
+
152
+ conf = %[
153
+ stream_name test_stream
154
+ region us-east-1
155
+ ensure_stream_connection false
156
+ http_proxy http://proxy:3333/
157
+ partition_key test_partition_key
158
+ partition_key_expr record
159
+ explicit_hash_key test_hash_key
160
+ explicit_hash_key_expr record
161
+ order_events true
162
+ use_yajl true
163
+ ]
164
+ d = create_driver(conf)
165
+ assert_equal 'test_stream', d.instance.stream_name
166
+ assert_equal 'us-east-1', d.instance.region
167
+ assert_equal false, d.instance.ensure_stream_connection
168
+ assert_equal 'http://proxy:3333/', d.instance.http_proxy
169
+ assert_equal 'test_partition_key', d.instance.partition_key
170
+ assert_equal 'Proc',
171
+ d.instance.instance_variable_get(:@partition_key_proc).class.to_s
172
+ assert_equal 'test_hash_key', d.instance.explicit_hash_key
173
+ assert_equal 'Proc',
174
+ d.instance.instance_variable_get(:@explicit_hash_key_proc).class.to_s
175
+ assert_equal 'a',
176
+ d.instance.instance_variable_get(:@partition_key_proc).call('a')
177
+ assert_equal 'a',
178
+ d.instance.instance_variable_get(:@explicit_hash_key_proc).call('a')
179
+ assert_equal true, d.instance.order_events
180
+ assert_equal nil, d.instance.instance_variable_get(:@sequence_number_for_ordering)
181
+ assert_equal true, d.instance.use_yajl
182
+ end
183
+
184
+ def test_mode_configuration
185
+
186
+ conf = %[
187
+ stream_name test_stream
188
+ region us-east-1
189
+ partition_key test_partition_key
190
+ ]
191
+ d = create_driver(conf)
192
+ assert_equal(false, d.instance.order_events)
193
+ assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
194
+
195
+ conf = %[
196
+ stream_name test_stream
197
+ region us-east-1
198
+ partition_key test_partition_key
199
+ order_events true
200
+ ]
201
+ d = create_driver(conf)
202
+ assert_equal(true, d.instance.order_events)
203
+ assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
204
+
205
+ conf = %[
206
+ stream_name test_stream
207
+ region us-east-1
208
+ partition_key test_partition_key
209
+ num_threads 1
210
+ ]
211
+ d = create_driver(conf)
212
+ assert_equal(false, d.instance.order_events)
213
+ assert_equal(false, d.instance.instance_variable_get(:@parallel_mode))
214
+
215
+ conf = %[
216
+ stream_name test_stream
217
+ region us-east-1
218
+ partition_key test_partition_key
219
+ num_threads 2
220
+ ]
221
+ d = create_driver(conf)
222
+ assert_equal(false, d.instance.order_events)
223
+ assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
224
+
225
+ conf = %[
226
+ stream_name test_stream
227
+ region us-east-1
228
+ partition_key test_partition_key
229
+ detach_process 1
230
+ ]
231
+ d = create_driver(conf)
232
+ assert_equal(false, d.instance.order_events)
233
+ assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
234
+
235
+ conf = %[
236
+ stream_name test_stream
237
+ region us-east-1
238
+ partition_key test_partition_key
239
+ order_events true
240
+ detach_process 1
241
+ num_threads 2
242
+ ]
243
+ d = create_driver(conf)
244
+ assert_equal(false, d.instance.order_events)
245
+ assert_equal(true, d.instance.instance_variable_get(:@parallel_mode))
246
+
247
+ end
248
+
249
+
250
+ data("json"=>CONFIG, "yajl"=>CONFIG_YAJL)
251
+ def test_format_without_compression(config)
252
+
253
+ d = create_driver(config)
254
+
255
+ data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
256
+ data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
257
+
258
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
259
+ d.emit(data1, time)
260
+ d.emit(data2, time)
261
+
262
+ d.expect_format({
263
+ 'data' => data1.to_json,
264
+ 'partition_key' => 'key1' }.to_msgpack
265
+ )
266
+ d.expect_format({
267
+ 'data' => data2.to_json,
268
+ 'partition_key' => 'key2' }.to_msgpack
269
+ )
270
+
271
+ client = create_mock_client
272
+ client.describe_stream(delivery_stream_name: 'test_stream')
273
+ client.put_records(
274
+ delivery_stream_name: 'test_stream',
275
+ records: [
276
+ {
277
+ data: data1.to_json,
278
+ partition_key: 'key1'
279
+ },
280
+ {
281
+ data: data2.to_json,
282
+ partition_key: 'key2'
283
+ }
284
+ ]
285
+ ) { {} }
286
+
287
+ d.run
288
+ end
289
+
290
+ data("json"=>CONFIG_WITH_COMPRESSION, "yajl"=>CONFIG_YAJL_WITH_COMPRESSION)
291
+ def test_format_with_compression(config)
292
+
293
+ d = create_driver(config)
294
+
295
+ data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
296
+ data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
297
+
298
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
299
+ d.emit(data1, time)
300
+ d.emit(data2, time)
301
+
302
+ d.expect_format({
303
+ 'data' => data1.to_json,
304
+ 'partition_key' => 'key1' }.to_msgpack
305
+ )
306
+ d.expect_format({
307
+ 'data' => data2.to_json,
308
+ 'partition_key' => 'key2' }.to_msgpack
309
+ )
310
+
311
+ client = create_mock_client
312
+ client.describe_stream(delivery_stream_name: 'test_stream')
313
+ client.put_records(
314
+ delivery_stream_name: 'test_stream',
315
+ records: [
316
+ {
317
+ data: Zlib::Deflate.deflate(data1.to_json),
318
+ partition_key: 'key1'
319
+ },
320
+ {
321
+ data: Zlib::Deflate.deflate(data2.to_json),
322
+ partition_key: 'key2'
323
+ }
324
+ ]
325
+ ) { {} }
326
+
327
+ d.run
328
+ end
329
+
330
+ def test_order_events
331
+
332
+ d = create_driver(CONFIG + "\norder_events true")
333
+
334
+ data1 = {"test_partition_key"=>"key1","a"=>1,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
335
+ data2 = {"test_partition_key"=>"key2","a"=>2,"time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
336
+
337
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
338
+ d.emit(data1, time)
339
+ d.emit(data2, time)
340
+
341
+ d.expect_format({
342
+ 'data' => data1.to_json,
343
+ 'partition_key' => 'key1' }.to_msgpack
344
+ )
345
+ d.expect_format({
346
+ 'data' => data2.to_json,
347
+ 'partition_key' => 'key2' }.to_msgpack
348
+ )
349
+
350
+ client = create_mock_client
351
+ client.describe_stream(delivery_stream_name: 'test_stream')
352
+ client.put_record(
353
+ data: data1.to_json,
354
+ partition_key: 'key1',
355
+ delivery_stream_name: 'test_stream'
356
+ ) { {sequence_number: 1} }
357
+ client.put_record(
358
+ data: data2.to_json,
359
+ partition_key: 'key2',
360
+ sequence_number_for_ordering: 1,
361
+ delivery_stream_name: 'test_stream'
362
+ ) { {} }
363
+
364
+ d.run
365
+ end
366
+
367
+ def test_format_at_lowlevel
368
+ d = create_driver
369
+ data = {"test_partition_key"=>"key1","a"=>1}
370
+ assert_equal(
371
+ MessagePack.pack({
372
+ "data" => data.to_json,
373
+ "partition_key" => "key1"
374
+ }),
375
+ d.instance.format('test','test',data)
376
+ )
377
+ end
378
+
379
+ def test_format_at_lowlevel_with_more_options
380
+
381
+ conf = %[
382
+ stream_name test_stream
383
+ region us-east-1
384
+ partition_key test_partition_key
385
+ partition_key_expr record
386
+ explicit_hash_key test_hash_key
387
+ explicit_hash_key_expr record
388
+ order_events true
389
+ ]
390
+
391
+ d = create_driver(conf)
392
+ data = {"test_partition_key"=>"key1","test_hash_key"=>"hash1","a"=>1}
393
+ assert_equal(
394
+ MessagePack.pack({
395
+ "data" => data.to_json,
396
+ "partition_key" => "key1",
397
+ "explicit_hash_key" => "hash1"
398
+ }),
399
+ d.instance.format('test','test',data)
400
+ )
401
+ end
402
+
403
+ def test_multibyte_with_yajl_without_compression
404
+
405
+ d = create_driver(CONFIG_YAJL)
406
+
407
+ data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
408
+ json = Yajl.dump(data1)
409
+ data1["a"].force_encoding("ASCII-8BIT")
410
+
411
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
412
+ d.emit(data1, time)
413
+
414
+ d.expect_format({
415
+ 'data' => json,
416
+ 'partition_key' => 'key1' }.to_msgpack
417
+ )
418
+
419
+ client = create_mock_client
420
+ client.describe_stream(delivery_stream_name: 'test_stream')
421
+ client.put_records(
422
+ delivery_stream_name: 'test_stream',
423
+ records: [
424
+ {
425
+ data: json,
426
+ partition_key: 'key1'
427
+ }
428
+ ]
429
+ ) { {} }
430
+
431
+ d.run
432
+ end
433
+
434
+ def test_multibyte_with_yajl_with_compression
435
+
436
+ d = create_driver(CONFIG_YAJL_WITH_COMPRESSION)
437
+
438
+ data1 = {"test_partition_key"=>"key1","a"=>"\xE3\x82\xA4\xE3\x83\xB3\xE3\x82\xB9\xE3\x83\x88\xE3\x83\xBC\xE3\x83\xAB","time"=>"2011-01-02T13:14:15Z","tag"=>"test"}
439
+ json = Yajl.dump(data1)
440
+ data1["a"].force_encoding("ASCII-8BIT")
441
+
442
+ time = Time.parse("2011-01-02 13:14:15 UTC").to_i
443
+ d.emit(data1, time)
444
+
445
+ d.expect_format({
446
+ 'data' => json,
447
+ 'partition_key' => 'key1' }.to_msgpack
448
+ )
449
+
450
+ client = create_mock_client
451
+ client.describe_stream(delivery_stream_name: 'test_stream')
452
+ client.put_records(
453
+ delivery_stream_name: 'test_stream',
454
+ records: [
455
+ {
456
+ data: Zlib::Deflate.deflate(json),
457
+ partition_key: 'key1'
458
+ }
459
+ ]
460
+ ) { {} }
461
+
462
+ d.run
463
+ end
464
+
465
+ def test_get_key
466
+ d = create_driver
467
+ assert_equal(
468
+ "1",
469
+ d.instance.send(:get_key, "partition_key", {"test_partition_key" => 1})
470
+ )
471
+
472
+ assert_equal(
473
+ "abc",
474
+ d.instance.send(:get_key, "partition_key", {"test_partition_key" => "abc"})
475
+ )
476
+
477
+ d = create_driver(%[
478
+ random_partition_key true
479
+ stream_name test_stream
480
+ region us-east-1'
481
+ ])
482
+
483
+ assert_match(
484
+ /\A[\da-f-]{36}\z/,
485
+ d.instance.send(:get_key, 'foo', 'bar')
486
+ )
487
+
488
+ d = create_driver(%[
489
+ random_partition_key true
490
+ partition_key test_key
491
+ stream_name test_stream
492
+ region us-east-1'
493
+ ])
494
+
495
+ assert_match(
496
+ /\A[\da-f-]{36}\z/,
497
+ d.instance.send(
498
+ :get_key,
499
+ 'partition_key',
500
+ {"test_key" => 'key1'}
501
+ )
502
+ )
503
+
504
+ d = create_driver(%[
505
+ random_partition_key true
506
+ partition_key test_key
507
+ explicit_hash_key explicit_key
508
+ stream_name test_stream
509
+ region us-east-1'
510
+ ])
511
+
512
+ assert_match(
513
+ /\A[\da-f-]{36}\z/,
514
+ d.instance.send(
515
+ :get_key,
516
+ 'partition_key',
517
+ {"test_key" => 'key1', "explicit_key" => 'key2'}
518
+ )
519
+ )
520
+ end
521
+
522
+ def test_record_exceeds_max_size
523
+ d = create_driver
524
+
525
+ # PUT_RECORD_MAX_DATA_SIZE = 1024*1024 is way too big, try something smaller (10), just to verify the logic not the actual value
526
+ original_put_record_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORD_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORD_MAX_DATA_SIZE)
527
+ d.instance.class.const_set(:PUT_RECORD_MAX_DATA_SIZE, 10)
528
+
529
+ string = ''
530
+ (1..5).each{ string = string + '1' }
531
+ assert_equal(
532
+ false,
533
+ d.instance.send(:record_exceeds_max_size?,string)
534
+ )
535
+
536
+ string = ''
537
+ (1..10).each{ string = string + '1' }
538
+ assert_equal(
539
+ false,
540
+ d.instance.send(:record_exceeds_max_size?,string)
541
+ )
542
+
543
+ string = ''
544
+ (1..11).each{ string = string + '1' }
545
+ assert_equal(
546
+ true,
547
+ d.instance.send(:record_exceeds_max_size?,string)
548
+ )
549
+
550
+ # reset the constant
551
+ d.instance.class.const_set(:PUT_RECORD_MAX_DATA_SIZE, original_put_record_max_data_size)
552
+ end
553
+
554
+ def test_build_records_array_to_put
555
+ d = create_driver
556
+
557
+ # PUT_RECORDS_MAX_DATA_SIZE = 1024*1024*5 is way too big, try something smaller (100), just to verify the logic not the actual value
558
+ original_put_records_max_data_size = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_DATA_SIZE) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_DATA_SIZE)
559
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_DATA_SIZE, 100)
560
+
561
+ # PUT_RECORDS_MAX_COUNT = 500 is way too big, try something smaller (10), just to verify the logic not the actual value
562
+ original_put_records_max_count = d.instance.class.send(:remove_const, :PUT_RECORD_BATCH_MAX_COUNT) if d.instance.class.const_defined?(:PUT_RECORD_BATCH_MAX_COUNT)
563
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_COUNT, 10)
564
+
565
+ data_list = []
566
+ (0..10).each do |n|
567
+ data_list.push({data: '1', partition_key: '0'})
568
+ end
569
+ result = d.instance.send(:build_records_array_to_put,data_list)
570
+ assert_equal(2,result.length)
571
+ assert_equal(10,result[0].length)
572
+ assert_equal(1,result[1].length)
573
+
574
+ data_list = []
575
+ (0..24).each do
576
+ data_list.push({data: '1', partition_key: '0'})
577
+ end
578
+ result = d.instance.send(:build_records_array_to_put,data_list)
579
+ assert_equal(3,result.length)
580
+ assert_equal(10,result[0].length)
581
+ assert_equal(10,result[1].length)
582
+ assert_equal(5,result[2].length)
583
+
584
+ data_list = []
585
+ (0..20).each do
586
+ data_list.push({data: '0123456789', partition_key: '1'})
587
+ end
588
+ # Should return 3 lists: 9*11 + 9*11 + 3*11
589
+ result = d.instance.send(:build_records_array_to_put,data_list)
590
+ assert_equal(3,result.length)
591
+ assert_equal(9,result[0].length)
592
+ assert_operator(
593
+ 100, :>,
594
+ result[0].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
595
+ )
596
+ assert_equal(9,result[1].length)
597
+ assert_operator(
598
+ 100, :>,
599
+ result[1].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
600
+ )
601
+ assert_equal(3,result[2].length)
602
+ assert_operator(
603
+ 100, :>,
604
+ result[2].reduce(0){|sum,i| sum + i[:data].length + i[:partition_key].length}
605
+ )
606
+
607
+ # reset the constants
608
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_DATA_SIZE, original_put_records_max_data_size)
609
+ d.instance.class.const_set(:PUT_RECORD_BATCH_MAX_COUNT, original_put_records_max_count)
610
+ end
611
+
612
+ def test_build_empty_array_to_put
613
+ d = create_driver
614
+ data_list = []
615
+ result = d.instance.send(:build_records_array_to_put,data_list)
616
+ assert_equal(0, result.length, 'Should return empty array if there is no record')
617
+ end
618
+
619
+ def test_build_data_to_put
620
+ d = create_driver
621
+ assert_equal(
622
+ {key: 1},
623
+ d.instance.send(:build_data_to_put,{"key"=>1})
624
+ )
625
+ end
626
+
627
+ def test_calculate_sleep_duration
628
+ d = create_driver
629
+ assert_operator(
630
+ 1, :>,
631
+ d.instance.send(:calculate_sleep_duration,0)
632
+ )
633
+ assert_operator(
634
+ 2, :>,
635
+ d.instance.send(:calculate_sleep_duration,1)
636
+ )
637
+ assert_operator(
638
+ 4, :>,
639
+ d.instance.send(:calculate_sleep_duration,2)
640
+ )
641
+ end
642
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-firehose
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ji Oh Yoo
@@ -126,7 +126,19 @@ email:
126
126
  executables: []
127
127
  extensions: []
128
128
  extra_rdoc_files: []
129
- files: []
129
+ files:
130
+ - .gitignore
131
+ - CHANGELOG.md
132
+ - Gemfile
133
+ - Gemfile.lock
134
+ - LICENSE.txt
135
+ - README.md
136
+ - Rakefile
137
+ - fluent-plugin-firehose.gemspec
138
+ - lib/fluent/plugin/out_firehose.rb
139
+ - lib/fluent/plugin/version.rb
140
+ - test/helper.rb
141
+ - test/plugin/test_out_kinesis.rb
130
142
  homepage:
131
143
  licenses:
132
144
  - Apache License, Version 2.0
@@ -151,4 +163,6 @@ rubygems_version: 2.0.14
151
163
  signing_key:
152
164
  specification_version: 4
153
165
  summary: Fluentd output plugin that sends events to Amazon Kinesis Firehose.
154
- test_files: []
166
+ test_files:
167
+ - test/helper.rb
168
+ - test/plugin/test_out_kinesis.rb