fluent-plugin-s3-hubspot 1.0.0.rc5

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,640 @@
1
+ require 'fluent/test'
2
+ require 'fluent/test/helpers'
3
+ require 'fluent/test/log'
4
+ require 'fluent/test/driver/output'
5
+ require 'aws-sdk-resources'
6
+ require 'fluent/plugin/out_s3'
7
+
8
+ require 'test/unit/rr'
9
+ require 'zlib'
10
+ require 'fileutils'
11
+ require 'timecop'
12
+ require 'uuidtools'
13
+
14
+ include Fluent::Test::Helpers
15
+
16
+ class S3OutputTest < Test::Unit::TestCase
17
+ def setup
18
+ # Fluent::Test.setup
19
+ end
20
+
21
+ def teardown
22
+ Dir.glob('test/tmp/*').each {|file| FileUtils.rm_f(file) }
23
+ end
24
+
25
+ CONFIG = %[
26
+ aws_key_id test_key_id
27
+ aws_sec_key test_sec_key
28
+ s3_bucket test_bucket
29
+ path log
30
+ utc
31
+ buffer_type memory
32
+ time_slice_format %Y%m%d-%H
33
+ ]
34
+
35
+ def create_driver(conf = CONFIG)
36
+ Fluent::Test::Driver::Output.new(Fluent::Plugin::S3Output) do
37
+ def format(tag, time, record)
38
+ super
39
+ end
40
+
41
+ def write(chunk)
42
+ chunk.read
43
+ end
44
+
45
+ private
46
+
47
+ def ensure_bucket
48
+ end
49
+
50
+ def check_apikeys
51
+ end
52
+ end.configure(conf)
53
+ end
54
+
55
+ def test_configure
56
+ d = create_driver
57
+ assert_equal 'test_key_id', d.instance.aws_key_id
58
+ assert_equal 'test_sec_key', d.instance.aws_sec_key
59
+ assert_equal 'test_bucket', d.instance.s3_bucket
60
+ assert_equal 'log', d.instance.path
61
+ assert_equal 'gz', d.instance.instance_variable_get(:@compressor).ext
62
+ assert_equal 'application/x-gzip', d.instance.instance_variable_get(:@compressor).content_type
63
+ assert_equal false, d.instance.force_path_style
64
+ assert_equal nil, d.instance.compute_checksums
65
+ assert_equal nil, d.instance.signature_version
66
+ assert_equal true, d.instance.check_bucket
67
+ assert_equal true, d.instance.check_object
68
+ end
69
+
70
+ def test_s3_endpoint_with_valid_endpoint
71
+ d = create_driver(CONFIG + 's3_endpoint riak-cs.example.com')
72
+ assert_equal 'riak-cs.example.com', d.instance.s3_endpoint
73
+ end
74
+
75
+ data('US West (Oregon)' => 's3-us-west-2.amazonaws.com',
76
+ 'EU (Frankfurt)' => 's3.eu-central-1.amazonaws.com',
77
+ 'Asia Pacific (Tokyo)' => 's3-ap-northeast-1.amazonaws.com')
78
+ def test_s3_endpoint_with_invalid_endpoint(endpoint)
79
+ assert_raise(Fluent::ConfigError, "s3_endpoint parameter is not supported, use s3_region instead. This parameter is for S3 compatible services") {
80
+ create_driver(CONFIG + "s3_endpoint #{endpoint}")
81
+ }
82
+ end
83
+
84
+ def test_configure_with_mime_type_json
85
+ conf = CONFIG.clone
86
+ conf << "\nstore_as json\n"
87
+ d = create_driver(conf)
88
+ assert_equal 'json', d.instance.instance_variable_get(:@compressor).ext
89
+ assert_equal 'application/json', d.instance.instance_variable_get(:@compressor).content_type
90
+ end
91
+
92
+ def test_configure_with_mime_type_text
93
+ conf = CONFIG.clone
94
+ conf << "\nstore_as text\n"
95
+ d = create_driver(conf)
96
+ assert_equal 'txt', d.instance.instance_variable_get(:@compressor).ext
97
+ assert_equal 'text/plain', d.instance.instance_variable_get(:@compressor).content_type
98
+ end
99
+
100
+ def test_configure_with_mime_type_lzo
101
+ conf = CONFIG.clone
102
+ conf << "\nstore_as lzo\n"
103
+ d = create_driver(conf)
104
+ assert_equal 'lzo', d.instance.instance_variable_get(:@compressor).ext
105
+ assert_equal 'application/x-lzop', d.instance.instance_variable_get(:@compressor).content_type
106
+ rescue => e
107
+ # TODO: replace code with disable lzop command
108
+ assert(e.is_a?(Fluent::ConfigError))
109
+ end
110
+
111
+ def test_configure_with_path_style
112
+ conf = CONFIG.clone
113
+ conf << "\nforce_path_style true\n"
114
+ d = create_driver(conf)
115
+ assert d.instance.force_path_style
116
+ end
117
+
118
+ def test_configure_with_compute_checksums
119
+ conf = CONFIG.clone
120
+ conf << "\ncompute_checksums false\n"
121
+ d = create_driver(conf)
122
+ assert_equal false, d.instance.compute_checksums
123
+ end
124
+
125
+ def test_configure_with_hex_random_length
126
+ conf = CONFIG.clone
127
+ assert_raise Fluent::ConfigError do
128
+ create_driver(conf + "\nhex_random_length 17\n")
129
+ end
130
+ assert_nothing_raised do
131
+ create_driver(conf + "\nhex_random_length 16\n")
132
+ end
133
+ end
134
+
135
+ def test_configure_with_no_check_on_s3
136
+ conf = CONFIG.clone
137
+ conf << "\ncheck_bucket false\ncheck_object false\n"
138
+ d = create_driver(conf)
139
+ assert_equal false, d.instance.check_bucket
140
+ assert_equal false, d.instance.check_object
141
+ end
142
+
143
+ def test_format
144
+ d = create_driver
145
+
146
+ time = event_time("2011-01-02 13:14:15 UTC")
147
+ d.run(default_tag: "test") do
148
+ d.feed(time, { "a" => 1 })
149
+ d.feed(time, { "a" => 2 })
150
+ end
151
+ expected = [
152
+ %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n],
153
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
154
+ ]
155
+ assert_equal(expected, d.formatted)
156
+ end
157
+
158
+ def test_format_included_tag_and_time
159
+ config = [CONFIG, 'include_tag_key true', 'include_time_key true'].join("\n")
160
+ d = create_driver(config)
161
+
162
+ time = event_time("2011-01-02 13:14:15 UTC")
163
+ d.run(default_tag: "test") do
164
+ d.feed(time, { "a" => 1 })
165
+ d.feed(time, { "a" => 2 })
166
+ end
167
+ expected = [
168
+ %[2011-01-02T13:14:15Z\ttest\t{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n],
169
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
170
+ ]
171
+ assert_equal(expected, d.formatted)
172
+ end
173
+
174
+ def test_format_with_format_ltsv
175
+ config = [CONFIG, 'format ltsv'].join("\n")
176
+ d = create_driver(config)
177
+
178
+ time = event_time("2011-01-02 13:14:15 UTC")
179
+ d.run(default_tag: "test") do
180
+ d.feed(time, {"a"=>1, "b"=>1})
181
+ d.feed(time, {"a"=>2, "b"=>2})
182
+ end
183
+ expected = [
184
+ %[a:1\tb:1\n],
185
+ %[a:2\tb:2\n]
186
+ ]
187
+ assert_equal(expected, d.formatted)
188
+ end
189
+
190
+ def test_format_with_format_json
191
+ config = [CONFIG, 'format json'].join("\n")
192
+ d = create_driver(config)
193
+
194
+ time = event_time("2011-01-02 13:14:15 UTC")
195
+ d.run(default_tag: "test") do
196
+ d.feed(time, {"a"=>1})
197
+ d.feed(time, {"a"=>2})
198
+ end
199
+ expected = [
200
+ %[{"a":1}\n],
201
+ %[{"a":2}\n]
202
+ ]
203
+ assert_equal(expected, d.formatted)
204
+ end
205
+
206
+ def test_format_with_format_json_included_tag
207
+ config = [CONFIG, 'format json', 'include_tag_key true'].join("\n")
208
+ d = create_driver(config)
209
+
210
+ time = event_time("2011-01-02 13:14:15 UTC")
211
+ d.run(default_tag: "test") do
212
+ d.feed(time, {"a"=>1})
213
+ d.feed(time, {"a"=>2})
214
+ end
215
+ expected = [
216
+ %[{"a":1,"tag":"test"}\n],
217
+ %[{"a":2,"tag":"test"}\n]
218
+ ]
219
+ assert_equal(expected, d.formatted)
220
+ end
221
+
222
+ def test_format_with_format_json_included_time
223
+ config = [CONFIG, 'format json', 'include_time_key true'].join("\n")
224
+ d = create_driver(config)
225
+
226
+ time = event_time("2011-01-02 13:14:15 UTC")
227
+ d.run(default_tag: "test") do
228
+ d.feed(time, {"a"=>1})
229
+ d.feed(time, {"a"=>2})
230
+ end
231
+ expected = [
232
+ %[{"a":1,"time":"2011-01-02T13:14:15Z"}\n],
233
+ %[{"a":2,"time":"2011-01-02T13:14:15Z"}\n]
234
+ ]
235
+ assert_equal(expected, d.formatted)
236
+ end
237
+
238
+ def test_format_with_format_json_included_tag_and_time
239
+ config = [CONFIG, 'format json', 'include_tag_key true', 'include_time_key true'].join("\n")
240
+ d = create_driver(config)
241
+
242
+ time = event_time("2011-01-02 13:14:15 UTC")
243
+ d.run(default_tag: "test") do
244
+ d.feed(time, {"a"=>1})
245
+ d.feed(time, {"a"=>2})
246
+ end
247
+ expected = [
248
+ %[{"a":1,"tag":"test","time":"2011-01-02T13:14:15Z"}\n],
249
+ %[{"a":2,"tag":"test","time":"2011-01-02T13:14:15Z"}\n]
250
+ ]
251
+ assert_equal(expected, d.formatted)
252
+ end
253
+
254
+ CONFIG_TIME_SLICE = <<EOC
255
+ aws_key_id test_key_id
256
+ aws_sec_key test_sec_key
257
+ s3_bucket test_bucket
258
+ s3_object_key_format %{path}/events/ts=%{time_slice}/events_%{index}-%{hostname}.%{file_extension}
259
+ time_slice_format %Y%m%d-%H
260
+ path log
261
+ utc
262
+ buffer_type memory
263
+ @log_level debug
264
+ check_bucket true
265
+ check_object true
266
+ EOC
267
+
268
+ def create_time_sliced_driver(conf = CONFIG_TIME_SLICE)
269
+ Fluent::Test::Driver::Output.new(Fluent::Plugin::S3Output) do
270
+ def format(tag, time, record)
271
+ super
272
+ end
273
+
274
+ def write(chunk)
275
+ super
276
+ end
277
+
278
+ private
279
+
280
+ def check_apikeys
281
+ end
282
+ end.configure(conf)
283
+ end
284
+
285
+ def test_write_with_hardened_s3_policy
286
+ # Partial mock the S3Bucket, not to make an actual connection to Amazon S3
287
+ setup_mocks_hardened_policy
288
+ s3_local_file_path = "/tmp/s3-test.txt"
289
+ # @s3_object_key_format will be hard_coded with timestamp only,
290
+ # as in this case, it will not check for object existence, not even bucker existence
291
+ # check_bukcet and check_object both of this config parameter should be false
292
+ # @s3_object_key_format = "%{path}/%{time_slice}_%{hms_slice}.%{file_extension}"
293
+ setup_s3_object_mocks_hardened_policy()
294
+
295
+ # We must use TimeSlicedOutputTestDriver instead of BufferedOutputTestDriver,
296
+ # to make assertions on chunks' keys
297
+ config = CONFIG_TIME_SLICE.gsub(/check_object true/, "check_object false\n")
298
+ config = config.gsub(/check_bucket true/, "check_bucket false\n")
299
+ d = create_time_sliced_driver(config)
300
+
301
+ time = event_time("2011-01-02 13:14:15 UTC")
302
+ d.run(default_tag: "test") do
303
+ d.feed(time, {"a"=>1})
304
+ d.feed(time, {"a"=>2})
305
+ end
306
+
307
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
308
+ data = gz.read
309
+ assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
310
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
311
+ data
312
+ end
313
+ FileUtils.rm_f(s3_local_file_path)
314
+ end
315
+
316
+ def test_write_with_custom_s3_object_key_format
317
+ # Partial mock the S3Bucket, not to make an actual connection to Amazon S3
318
+ setup_mocks(true)
319
+ s3_local_file_path = "/tmp/s3-test.txt"
320
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path)
321
+
322
+ d = create_time_sliced_driver
323
+
324
+ time = event_time("2011-01-02 13:14:15 UTC")
325
+ d.run(default_tag: "test") do
326
+ d.feed(time, {"a"=>1})
327
+ d.feed(time, {"a"=>2})
328
+ end
329
+
330
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
331
+ data = gz.read
332
+ assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
333
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
334
+ data
335
+ end
336
+ FileUtils.rm_f(s3_local_file_path)
337
+ end
338
+
339
+ def test_write_with_custom_s3_object_key_format_containing_uuid_flush_placeholder
340
+
341
+ begin
342
+ require 'uuidtools'
343
+ rescue LoadError
344
+ pend("uuidtools not found. skip this test")
345
+ end
346
+
347
+ # Partial mock the S3Bucket, not to make an actual connection to Amazon S3
348
+ setup_mocks(true)
349
+
350
+ uuid = "5755e23f-9b54-42d8-8818-2ea38c6f279e"
351
+ stub(::UUIDTools::UUID).random_create{ uuid }
352
+
353
+ s3_local_file_path = "/tmp/s3-test.txt"
354
+ s3path = "log/events/ts=20110102-13/events_0-#{uuid}.gz"
355
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path, s3path: s3path)
356
+
357
+ config = CONFIG_TIME_SLICE.gsub(/%{hostname}/,"%{uuid_flush}")
358
+ d = create_time_sliced_driver(config)
359
+
360
+ time = event_time("2011-01-02 13:14:15 UTC")
361
+ d.run(default_tag: "test") do
362
+ d.feed(time, {"a"=>1})
363
+ d.feed(time, {"a"=>2})
364
+ end
365
+
366
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
367
+ data = gz.read
368
+ assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
369
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
370
+ data
371
+ end
372
+ FileUtils.rm_f(s3_local_file_path)
373
+ Dir.glob('tmp/*').each {|file| FileUtils.rm_f(file) }
374
+ end
375
+
376
+ # ToDo: need to test hex_random does not change on retry, but it is difficult with
377
+ # the current fluentd test helper because it does not provide a way to run with the same chunks
378
+ def test_write_with_custom_s3_object_key_format_containing_hex_random_placeholder
379
+ unique_hex = "5226c3c4fb3d49b15226c3c4fb3d49b1"
380
+ hex_random = unique_hex.reverse[0...5]
381
+
382
+ config = CONFIG_TIME_SLICE.gsub(/%{hostname}/,"%{hex_random}") << "\nhex_random_length #{hex_random.length}"
383
+ config = config.gsub(/buffer_type memory/, "buffer_type file\nbuffer_path test/tmp/buf")
384
+
385
+ # Partial mock the S3Bucket, not to make an actual connection to Amazon S3
386
+ setup_mocks(true)
387
+
388
+ s3path = "log/events/ts=20110102-13/events_0-#{hex_random}.gz"
389
+ s3_local_file_path = "/tmp/s3-test.txt"
390
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path, s3path: s3path)
391
+
392
+ d = create_time_sliced_driver(config)
393
+ stub(Fluent::UniqueId).hex(anything) { unique_hex }
394
+
395
+ time = event_time("2011-01-02 13:14:15 UTC")
396
+ d.run(default_tag: "test") do
397
+ d.feed(time, {"a"=>1})
398
+ d.feed(time, {"a"=>2})
399
+ end
400
+
401
+ Zlib::GzipReader.open(s3_local_file_path) do |gz|
402
+ data = gz.read
403
+ assert_equal %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] +
404
+ %[2011-01-02T13:14:15Z\ttest\t{"a":2}\n],
405
+ data
406
+ end
407
+ FileUtils.rm_f(s3_local_file_path)
408
+ end
409
+
410
+ def setup_mocks(exists_return = false)
411
+ @s3_client = stub(Aws::S3::Client.new(stub_responses: true))
412
+ mock(Aws::S3::Client).new(anything).at_least(0) { @s3_client }
413
+ @s3_resource = mock(Aws::S3::Resource.new(client: @s3_client))
414
+ mock(Aws::S3::Resource).new(client: @s3_client) { @s3_resource }
415
+ @s3_bucket = mock(Aws::S3::Bucket.new(name: "test",
416
+ client: @s3_client))
417
+ @s3_bucket.exists? { exists_return }
418
+ @s3_object = mock(Aws::S3::Object.new(bucket_name: "test_bucket",
419
+ key: "test",
420
+ client: @s3_client))
421
+ @s3_object.exists?.at_least(0) { false }
422
+ @s3_bucket.object(anything).at_least(0) { @s3_object }
423
+ @s3_resource.bucket(anything) { @s3_bucket }
424
+ end
425
+
426
+ def setup_s3_object_mocks(params = {})
427
+ s3path = params[:s3path] || "log/events/ts=20110102-13/events_0-#{Socket.gethostname}.gz"
428
+ s3_local_file_path = params[:s3_local_file_path] || "/tmp/s3-test.txt"
429
+
430
+ # Assert content of event logs which are being sent to S3
431
+ s3obj = stub(Aws::S3::Object.new(bucket_name: "test_bucket",
432
+ key: "test",
433
+ client: @s3_client))
434
+ s3obj.exists? { false }
435
+
436
+ tempfile = File.new(s3_local_file_path, "w")
437
+ stub(Tempfile).new("s3-") { tempfile }
438
+ s3obj.put(body: tempfile,
439
+ content_type: "application/x-gzip",
440
+ storage_class: "STANDARD")
441
+
442
+ @s3_bucket.object(s3path) { s3obj }
443
+ end
444
+
445
+ def setup_mocks_hardened_policy()
446
+ @s3_client = stub(Aws::S3::Client.new(:stub_responses => true))
447
+ mock(Aws::S3::Client).new(anything).at_least(0) { @s3_client }
448
+ @s3_resource = mock(Aws::S3::Resource.new(:client => @s3_client))
449
+ mock(Aws::S3::Resource).new(:client => @s3_client) { @s3_resource }
450
+ @s3_bucket = mock(Aws::S3::Bucket.new(:name => "test",
451
+ :client => @s3_client))
452
+ @s3_object = mock(Aws::S3::Object.new(:bucket_name => "test_bucket",
453
+ :key => "test",
454
+ :client => @s3_client))
455
+ @s3_bucket.object(anything).at_least(0) { @s3_object }
456
+ @s3_resource.bucket(anything) { @s3_bucket }
457
+ end
458
+
459
+ def setup_s3_object_mocks_hardened_policy(params = {})
460
+ s3_local_file_path = params[:s3_local_file_path] || "/tmp/s3-test.txt"
461
+
462
+ # Assert content of event logs which are being sent to S3
463
+ s3obj = stub(Aws::S3::Object.new(:bucket_name => "test_bucket",
464
+ :key => "test",
465
+ :client => @s3_client))
466
+
467
+ tempfile = File.new(s3_local_file_path, "w")
468
+ stub(Tempfile).new("s3-") { tempfile }
469
+ s3obj.put(:body => tempfile,
470
+ :content_type => "application/x-gzip",
471
+ :storage_class => "STANDARD")
472
+ end
473
+
474
+ def test_auto_create_bucket_false_with_non_existence_bucket
475
+ setup_mocks
476
+
477
+ config = CONFIG_TIME_SLICE + 'auto_create_bucket false'
478
+ d = create_time_sliced_driver(config)
479
+ assert_raise(RuntimeError, "The specified bucket does not exist: bucket = test_bucket") {
480
+ d.run {}
481
+ }
482
+ end
483
+
484
+ def test_auto_create_bucket_true_with_non_existence_bucket
485
+ setup_mocks
486
+ @s3_resource.create_bucket(bucket: "test_bucket")
487
+
488
+ config = CONFIG_TIME_SLICE + 'auto_create_bucket true'
489
+ d = create_time_sliced_driver(config)
490
+ assert_nothing_raised { d.run {} }
491
+ end
492
+
493
+ def test_credentials
494
+ d = create_time_sliced_driver
495
+ assert_nothing_raised { d.run {} }
496
+ client = d.instance.instance_variable_get(:@s3).client
497
+ credentials = client.config.credentials
498
+ assert_instance_of(Aws::Credentials, credentials)
499
+ end
500
+
501
+ def test_assume_role_credentials
502
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
503
+ mock(Aws::AssumeRoleCredentials).new(role_arn: "test_arn",
504
+ role_session_name: "test_session",
505
+ client: anything){
506
+ expected_credentials
507
+ }
508
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
509
+ config += %[
510
+ <assume_role_credentials>
511
+ role_arn test_arn
512
+ role_session_name test_session
513
+ </assume_role_credentials>
514
+ ]
515
+ d = create_time_sliced_driver(config)
516
+ assert_nothing_raised { d.run {} }
517
+ client = d.instance.instance_variable_get(:@s3).client
518
+ credentials = client.config.credentials
519
+ assert_equal(expected_credentials, credentials)
520
+ end
521
+
522
+ def test_assume_role_credentials_with_region
523
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
524
+ sts_client = Aws::STS::Client.new(region: 'ap-northeast-1')
525
+ mock(Aws::STS::Client).new(region: 'ap-northeast-1'){ sts_client }
526
+ mock(Aws::AssumeRoleCredentials).new(role_arn: "test_arn",
527
+ role_session_name: "test_session",
528
+ client: sts_client){
529
+ expected_credentials
530
+ }
531
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
532
+ config += %[
533
+ s3_region ap-northeast-1
534
+ <assume_role_credentials>
535
+ role_arn test_arn
536
+ role_session_name test_session
537
+ </assume_role_credentials>
538
+ ]
539
+ d = create_time_sliced_driver(config)
540
+ assert_nothing_raised { d.run {} }
541
+ client = d.instance.instance_variable_get(:@s3).client
542
+ credentials = client.config.credentials
543
+ assert_equal(expected_credentials, credentials)
544
+ end
545
+
546
+ def test_instance_profile_credentials
547
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
548
+ mock(Aws::InstanceProfileCredentials).new({}).returns(expected_credentials)
549
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
550
+ config += %[
551
+ <instance_profile_credentials>
552
+ </instance_profile_credentials>
553
+ ]
554
+ d = create_time_sliced_driver(config)
555
+ assert_nothing_raised { d.run {} }
556
+ client = d.instance.instance_variable_get(:@s3).client
557
+ credentials = client.config.credentials
558
+ assert_equal(expected_credentials, credentials)
559
+ end
560
+
561
+ def test_ecs_credentials
562
+ ENV["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] = "/credential_provider_version/credentials?id=task_UUID"
563
+
564
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
565
+ mock(Aws::ECSCredentials).new({}).returns(expected_credentials)
566
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
567
+ config += %[
568
+ <instance_profile_credentials>
569
+ </instance_profile_credentials>
570
+ ]
571
+ d = create_time_sliced_driver(config)
572
+ assert_nothing_raised { d.run {} }
573
+ client = d.instance.instance_variable_get(:@s3).client
574
+ credentials = client.config.credentials
575
+ assert_equal(expected_credentials, credentials)
576
+
577
+ ENV["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] = nil
578
+ end
579
+
580
+ def test_instance_profile_credentials_aws_iam_retries
581
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
582
+ mock(Aws::InstanceProfileCredentials).new({ retries: 10 }).returns(expected_credentials)
583
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
584
+ config += %[
585
+ aws_iam_retries 10
586
+ ]
587
+ d = create_time_sliced_driver(config)
588
+ assert_nothing_raised { d.run {} }
589
+ client = d.instance.instance_variable_get(:@s3).client
590
+ credentials = client.config.credentials
591
+ assert_equal(expected_credentials, credentials)
592
+ end
593
+
594
+ def test_shared_credentials
595
+ expected_credentials = Aws::Credentials.new("test_key", "test_secret")
596
+ mock(Aws::SharedCredentials).new({}).returns(expected_credentials)
597
+ config = CONFIG_TIME_SLICE.split("\n").reject{|x| x =~ /.+aws_.+/}.join("\n")
598
+ config += %[
599
+ <shared_credentials>
600
+ </shared_credentials>
601
+ ]
602
+ d = create_time_sliced_driver(config)
603
+ assert_nothing_raised { d.run {} }
604
+ client = d.instance.instance_variable_get(:@s3).client
605
+ credentials = client.config.credentials
606
+ assert_equal(expected_credentials, credentials)
607
+ end
608
+
609
+ def test_signature_version
610
+ config = [CONFIG, 'signature_version s3'].join("\n")
611
+ d = create_driver(config)
612
+
613
+ signature_version = d.instance.instance_variable_get(:@signature_version)
614
+ assert_equal("s3", signature_version)
615
+ end
616
+
617
+ def test_warn_for_delay
618
+ setup_mocks(true)
619
+ s3_local_file_path = "/tmp/s3-test.txt"
620
+ setup_s3_object_mocks(s3_local_file_path: s3_local_file_path)
621
+
622
+ config = CONFIG_TIME_SLICE + 'warn_for_delay 1d'
623
+ d = create_time_sliced_driver(config)
624
+
625
+ delayed_time = event_time("2011-01-02 13:14:15 UTC")
626
+ now = delayed_time.to_i + 86000 + 1
627
+ d.instance.log.out.flush_logs = false
628
+ Timecop.freeze(Time.at(now)) do
629
+ d.run(default_tag: "test") do
630
+ d.feed(delayed_time, {"a"=>1})
631
+ d.feed(delayed_time, {"a"=>2})
632
+ end
633
+ end
634
+ logs = d.instance.log.out.logs
635
+ assert_true logs.any? {|log| log.include?('out_s3: delayed events were put') }
636
+ d.instance.log.out.flush_logs = true
637
+ d.instance.log.out.reset
638
+ FileUtils.rm_f(s3_local_file_path)
639
+ end
640
+ end