aws 2.8.0 → 2.9.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,23 @@
1
+ class S3TestBase < Test::Unit::TestCase
2
+
3
+ RIGHT_OBJECT_TEXT = 'Right test message'
4
+
5
+ def setup
6
+ TestCredentials.get_credentials
7
+ @s3 = Aws::S3Interface.new(TestCredentials.aws_access_key_id, TestCredentials.aws_secret_access_key)
8
+ @bucket = TestCredentials.config['amazon']['my_prefix'] + '_awesome_test_bucket_000A1'
9
+ @bucket2 = TestCredentials.config['amazon']['my_prefix'] + '_awesome_test_bucket_000A2'
10
+ @key1 = 'test/woohoo1/'
11
+ @key2 = 'test1/key/woohoo2'
12
+ @key3 = 'test2/A%B@C_D&E?F+G=H"I'
13
+ @key1_copy = 'test/woohoo1_2'
14
+ @key1_new_name = 'test/woohoo1_3'
15
+ @key2_new_name = 'test1/key/woohoo2_new'
16
+ @s = Aws::S3.new(TestCredentials.aws_access_key_id, TestCredentials.aws_secret_access_key)
17
+ end
18
+
19
+ def teardown
20
+
21
+ end
22
+
23
+ end
@@ -0,0 +1,3 @@
1
+ require 'test/unit'
2
+ require File.dirname(__FILE__) + '/../../lib/aws'
3
+
@@ -0,0 +1,180 @@
1
+ require File.dirname(__FILE__) + '/test_helper.rb'
2
+ require_relative 's3_test_base'
3
+ require File.dirname(__FILE__) + '/../test_credentials.rb'
4
+
5
+ class TestS3 < S3TestBase
6
+
7
+ #---------------------------
8
+ # Aws::S3Interface
9
+ #---------------------------
10
+
11
+ def test_01_create_bucket
12
+ assert @s3.create_bucket(@bucket), 'Create_bucket fail'
13
+ end
14
+
15
+ def test_02_list_all_my_buckets
16
+ assert @s3.list_all_my_buckets.map { |bucket| bucket[:name] }.include?(@bucket), "#{@bucket} must exist in bucket list"
17
+ end
18
+
19
+ def test_03_list_empty_bucket
20
+ assert_equal 0, @s3.list_bucket(@bucket).size, "#{@bucket} isn't empty, arrgh!"
21
+ end
22
+
23
+ def test_04_put
24
+ assert @s3.put(@bucket, @key1, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo1!'), 'Put bucket fail'
25
+ assert @s3.put(@bucket, @key2, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo2!'), 'Put bucket fail'
26
+ assert @s3.put(@bucket, @key3, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo3!'), 'Put bucket fail'
27
+ end
28
+
29
+ def test_04a_put
30
+ super_big_string = ""
31
+ 1000000.times {|i| super_big_string << "abcde" }
32
+ assert @s3.put(@bucket, "super_big", super_big_string), 'Put bucket fail'
33
+ end
34
+
35
+ def test_05_get_and_get_object
36
+ assert_raise(Aws::AwsError) { @s3.get(@bucket, 'undefined/key') }
37
+ data1 = @s3.get(@bucket, @key1)
38
+ assert_equal RIGHT_OBJECT_TEXT, data1[:object], "Object text must be equal to '#{RIGHT_OBJECT_TEXT}'"
39
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key1), "Get_object text must return '#{RIGHT_OBJECT_TEXT}'"
40
+ assert_equal 'Woohoo1!', data1[:headers]['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
41
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key3), "Get_object text must return '#{RIGHT_OBJECT_TEXT}'"
42
+ end
43
+
44
+ def test_06_head
45
+ assert_equal 'Woohoo1!', @s3.head(@bucket, @key1)['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
46
+ end
47
+
48
+
49
+ def test_07_streaming_get
50
+ resp = String.new
51
+ assert_raise(Aws::AwsError) do
52
+ @s3.get(@bucket, 'undefined/key') do |chunk|
53
+ resp += chunk
54
+ end
55
+ end
56
+
57
+ resp = String.new
58
+ data1 = @s3.get(@bucket, @key1) do |chunk|
59
+ resp += chunk
60
+ end
61
+ assert_equal RIGHT_OBJECT_TEXT, resp, "Object text must be equal to '#{RIGHT_OBJECT_TEXT}'"
62
+ assert_equal @s3.get_object(@bucket, @key1), resp, "Streaming iface must return same as non-streaming"
63
+ assert_equal 'Woohoo1!', data1[:headers]['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
64
+ end
65
+
66
+ def test_08_keys
67
+ keys = @s3.list_bucket(@bucket).map { |b| b[:key] }
68
+ assert_equal keys.size, 3, "There should be 3 keys"
69
+ assert(keys.include?(@key1))
70
+ assert(keys.include?(@key2))
71
+ assert(keys.include?(@key3))
72
+ end
73
+
74
+ def test_09_copy_key
75
+ #--- test COPY
76
+ # copy a key
77
+ assert @s3.copy(@bucket, @key1, @bucket, @key1_copy)
78
+ # check it was copied well
79
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key1_copy), "copied object must have the same data"
80
+ # check meta-headers were copied
81
+ headers = @s3.head(@bucket, @key1_copy)
82
+ assert_equal 'Woohoo1!', headers['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
83
+ #--- test REPLACE
84
+ assert @s3.copy(@bucket, @key1, @bucket, @key1_copy, :replace, 'x-amz-meta-family' => 'oooops!')
85
+ # check it was copied well
86
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key1_copy), "copied object must have the same data"
87
+ # check meta-headers were overwrittenn
88
+ headers = @s3.head(@bucket, @key1_copy)
89
+ assert_equal 'oooops!', headers['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'oooops!'"
90
+ end
91
+
92
+ def test_10_move_key
93
+ # move a key
94
+ assert @s3.move(@bucket, @key1, @bucket, @key1_new_name)
95
+ # check it's data was moved correctly
96
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key1_new_name), "moved object must have the same data"
97
+ # check meta-headers were moved
98
+ headers = @s3.head(@bucket, @key1_new_name)
99
+ assert_equal 'Woohoo1!', headers['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
100
+ # check the original key is not exists any more
101
+ keys = @s3.list_bucket(@bucket).map { |b| b[:key] }
102
+ assert(!keys.include?(@key1))
103
+ end
104
+
105
+ def test_11_rename_key
106
+ # rename a key
107
+ assert @s3.rename(@bucket, @key2, @key2_new_name)
108
+ # check the new key data
109
+ assert_equal RIGHT_OBJECT_TEXT, @s3.get_object(@bucket, @key2_new_name), "moved object must have the same data"
110
+ # check meta-headers
111
+ headers = @s3.head(@bucket, @key2_new_name)
112
+ assert_equal 'Woohoo2!', headers['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo2!'"
113
+ # check the original key is not exists any more
114
+ keys = @s3.list_bucket(@bucket).map { |b| b[:key] }
115
+ assert(!keys.include?(@key2))
116
+ end
117
+
118
+ def test_12_retrieve_object
119
+ assert_raise(Aws::AwsError) { @s3.retrieve_object(:bucket => @bucket, :key => 'undefined/key') }
120
+ data1 = @s3.retrieve_object(:bucket => @bucket, :key => @key1_new_name)
121
+ assert_equal RIGHT_OBJECT_TEXT, data1[:object], "Object text must be equal to '#{RIGHT_OBJECT_TEXT}'"
122
+ assert_equal 'Woohoo1!', data1[:headers]['x-amz-meta-family'], "x-amz-meta-family header must be equal to 'Woohoo1!'"
123
+ end
124
+
125
+ def test_13_delete_folder
126
+ assert_equal 1, @s3.delete_folder(@bucket, 'test').size, "Only one key(#{@key1}) must be deleted!"
127
+ end
128
+
129
+ def test_14_multipart_upload
130
+ segmented_object = TestCredentials.config['amazon']['my_prefix']+"segmented"
131
+ uploadId = @s3.initiate_multipart(@bucket, segmented_object)
132
+ assert(uploadId.instance_of?(String))
133
+ part1_etag = @s3.upload_part(@bucket, segmented_object, uploadId, "1", File.open(TestCredentials.config['amazon']['multipart_segment1']))
134
+ assert(part1_etag.instance_of?(String))
135
+ part2_etag = @s3.upload_part(@bucket, segmented_object, uploadId, "2", File.open(TestCredentials.config['amazon']['multipart_segment2']))
136
+ assert(part2_etag.instance_of?(String))
137
+ parts = @s3.list_parts(@bucket, segmented_object, uploadId)
138
+ part_etags = parts[:parts].collect{|part| part[:etag].gsub!("\"", "")}
139
+ assert(part_etags.include?(part1_etag))
140
+ assert(part_etags.include?(part2_etag))
141
+ assert(@s3.complete_multipart(@bucket, segmented_object, uploadId, {"1"=>part1_etag, "2"=>part2_etag}))
142
+ object_data = @s3.head(@bucket, segmented_object)
143
+ combined_size = File.size(TestCredentials.config['amazon']['multipart_segment1'])+ File.size(TestCredentials.config['amazon']['multipart_segment2'])
144
+ assert_equal object_data["content-length"].to_i, combined_size
145
+ end
146
+
147
+ # idle timeout is 20 seconds
148
+ # https://forums.aws.amazon.com/thread.jspa?threadID=58038
149
+ def test_15_idle_timeout
150
+ @s3 = Aws::S3Interface.new(TestCredentials.aws_access_key_id, TestCredentials.aws_secret_access_key,
151
+ :connection_mode=>:single)
152
+ # Disable connection retrying
153
+ Aws::AWSErrorHandler.close_on_error = false
154
+ assert @s3.put(@bucket, @key1, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo1!'), 'Put bucket fail'
155
+ sleep 300
156
+ assert_raises Aws::AwsError do
157
+ @s3.put(@bucket, @key2, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo1!')
158
+ end
159
+
160
+ # now try again with retry mode
161
+ @s3 = Aws::S3Interface.new(TestCredentials.aws_access_key_id, TestCredentials.aws_secret_access_key,
162
+ :connection_mode=>:single)
163
+ Aws::AWSErrorHandler.close_on_error = true
164
+ assert @s3.put(@bucket, @key1, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo1!'), 'Put bucket fail'
165
+ sleep 30
166
+ assert @s3.put(@bucket, @key2, RIGHT_OBJECT_TEXT, 'x-amz-meta-family'=>'Woohoo1!'), 'Put bucket fail'
167
+
168
+
169
+ end
170
+
171
+ def test_99_delete_bucket
172
+ assert_raise(Aws::AwsError) { @s3.delete_bucket(@bucket) }
173
+ assert @s3.clear_bucket(@bucket), 'Clear_bucket fail'
174
+ assert_equal 0, @s3.list_bucket(@bucket).size, 'Bucket must be empty'
175
+ assert @s3.delete_bucket(@bucket)
176
+ assert !@s3.list_all_my_buckets.map { |bucket| bucket[:name] }.include?(@bucket), "#{@bucket} must not exist"
177
+ end
178
+
179
+
180
+ end
@@ -0,0 +1,201 @@
1
+ # encoding: utf-8
2
+ require File.dirname(__FILE__) + '/test_helper.rb'
3
+ require_relative 's3_test_base'
4
+ require File.dirname(__FILE__) + '/../test_credentials.rb'
5
+
6
+ class TestS3Class < S3TestBase
7
+
8
+ #---------------------------
9
+ # Aws::S3 classes
10
+ #---------------------------
11
+
12
+ def test_20_s3
13
+ # create bucket
14
+ bucket = @s.bucket(@bucket, true)
15
+ assert bucket
16
+ # check that the bucket exists
17
+ assert @s.buckets.map { |b| b.name }.include?(@bucket)
18
+ # delete bucket
19
+ assert bucket.clear
20
+ assert bucket.delete
21
+ end
22
+
23
+ def test_21_bucket_create_put_get_key
24
+ bucket = Aws::S3::Bucket.create(@s, @bucket, true)
25
+ # check that the bucket exists
26
+ assert @s.buckets.map { |b| b.name }.include?(@bucket)
27
+ assert bucket.keys.empty?, "keys are not empty: " + bucket.keys.inspect
28
+ # put data
29
+ assert bucket.put(@key3, RIGHT_OBJECT_TEXT, {'family'=>'123456'})
30
+ # get data and compare
31
+ assert_equal RIGHT_OBJECT_TEXT, bucket.get(@key3)
32
+ # get key object
33
+ key = bucket.key(@key3, true)
34
+ assert_equal Aws::S3::Key, key.class
35
+ assert key.exists?
36
+ assert_equal '123456', key.meta_headers['family']
37
+ end
38
+
39
+ def test_22_bucket_put_big_with_multibyte_chars
40
+ bucket = Aws::S3::Bucket.create(@s, @bucket, true)
41
+ super_big_string = ""
42
+ 10000.times { |i| super_big_string << "abcde Café" }
43
+ # this string has multibye values just to mess things up abit.
44
+ puts 'String made, putting...'
45
+ puts "#{super_big_string.size} - #{super_big_string.bytesize}"
46
+ assert bucket.put("super_big", super_big_string), 'Put bucket fail'
47
+
48
+ got = bucket.get("super_big")
49
+ puts 'got.class=' + got.class.name
50
+ assert_equal(super_big_string, got, "not the same yo")
51
+ end
52
+
53
+ def test_23_put_strange_things
54
+ bucket = Aws::S3::Bucket.create(@s, @bucket, true)
55
+
56
+ # this is kinda bad, you put a nil, but get an empty string back
57
+ assert bucket.put("strange", nil), 'Put bucket fail'
58
+ got = bucket.get("strange")
59
+ assert_equal("", got)
60
+
61
+ x = "\xE2\x80\x99s Café"
62
+ puts "#{x.size} - #{x.bytesize}"
63
+ assert bucket.put("multibye", x)
64
+
65
+
66
+
67
+ end
68
+
69
+ def test_30_keys
70
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
71
+ # create first key
72
+ key3 = Aws::S3::Key.create(bucket, @key3)
73
+ key3.refresh
74
+ assert key3.exists?
75
+ assert_equal '123456', key3.meta_headers['family']
76
+ # create second key
77
+ key2 = Aws::S3::Key.create(bucket, @key2)
78
+ assert !key2.refresh
79
+ assert !key2.exists?
80
+ assert_raise(Aws::AwsError) { key2.head }
81
+ # store key
82
+ key2.meta_headers = {'family'=>'111222333'}
83
+ assert key2.put(RIGHT_OBJECT_TEXT)
84
+ # make sure that the key exists
85
+ assert key2.refresh
86
+ assert key2.exists?
87
+ assert key2.head
88
+ # get its data
89
+ assert_equal RIGHT_OBJECT_TEXT, key2.get
90
+ # drop key
91
+ assert key2.delete
92
+ assert !key2.exists?
93
+ end
94
+
95
+ def test_31_rename_key
96
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
97
+ # -- 1 -- (key based rename)
98
+ # create a key
99
+ key = bucket.key('test/copy/1')
100
+ key.put(RIGHT_OBJECT_TEXT)
101
+ original_key = key.clone
102
+ assert key.exists?, "'test/copy/1' should exist"
103
+ # rename it
104
+ key.rename('test/copy/2')
105
+ assert_equal 'test/copy/2', key.name
106
+ assert key.exists?, "'test/copy/2' should exist"
107
+ # the original key should not exist
108
+ assert !original_key.exists?, "'test/copy/1' should not exist"
109
+ # -- 2 -- (bucket based rename)
110
+ bucket.rename_key('test/copy/2', 'test/copy/3')
111
+ assert bucket.key('test/copy/3').exists?, "'test/copy/3' should exist"
112
+ assert !bucket.key('test/copy/2').exists?, "'test/copy/2' should not exist"
113
+ end
114
+
115
+ def test_32_copy_key
116
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
117
+ # -- 1 -- (key based copy)
118
+ # create a key
119
+ key = bucket.key('test/copy/10')
120
+ key.put(RIGHT_OBJECT_TEXT)
121
+ # make copy
122
+ new_key = key.copy('test/copy/11')
123
+ # make sure both the keys exist and have a correct data
124
+ assert key.exists?, "'test/copy/10' should exist"
125
+ assert new_key.exists?, "'test/copy/11' should exist"
126
+ assert_equal RIGHT_OBJECT_TEXT, key.get
127
+ assert_equal RIGHT_OBJECT_TEXT, new_key.get
128
+ # -- 2 -- (bucket based copy)
129
+ bucket.copy_key('test/copy/11', 'test/copy/12')
130
+ assert bucket.key('test/copy/11').exists?, "'test/copy/11' should exist"
131
+ assert bucket.key('test/copy/12').exists?, "'test/copy/12' should exist"
132
+ assert_equal RIGHT_OBJECT_TEXT, bucket.key('test/copy/11').get
133
+ assert_equal RIGHT_OBJECT_TEXT, bucket.key('test/copy/12').get
134
+ end
135
+
136
+ def test_33_move_key
137
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
138
+ # -- 1 -- (key based copy)
139
+ # create a key
140
+ key = bucket.key('test/copy/20')
141
+ key.put(RIGHT_OBJECT_TEXT)
142
+ # move
143
+ new_key = key.move('test/copy/21')
144
+ # make sure both the keys exist and have a correct data
145
+ assert !key.exists?, "'test/copy/20' should not exist"
146
+ assert new_key.exists?, "'test/copy/21' should exist"
147
+ assert_equal RIGHT_OBJECT_TEXT, new_key.get
148
+ # -- 2 -- (bucket based copy)
149
+ bucket.copy_key('test/copy/21', 'test/copy/22')
150
+ assert bucket.key('test/copy/21').exists?, "'test/copy/21' should not exist"
151
+ assert bucket.key('test/copy/22').exists?, "'test/copy/22' should exist"
152
+ assert_equal RIGHT_OBJECT_TEXT, bucket.key('test/copy/22').get
153
+ end
154
+
155
+ def test_40_save_meta
156
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
157
+ # create a key
158
+ key = bucket.key('test/copy/30')
159
+ key.put(RIGHT_OBJECT_TEXT)
160
+ assert key.meta_headers.empty?
161
+ # store some meta keys
162
+ meta = {'family' => 'oops', 'race' => 'troll'}
163
+ assert_equal meta, key.save_meta(meta)
164
+ # reload meta
165
+ assert_equal meta, key.reload_meta
166
+ end
167
+
168
+ def test_60_clear_delete
169
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
170
+ # add another key
171
+ bucket.put(@key2, RIGHT_OBJECT_TEXT)
172
+ # delete 'folder'
173
+ assert_equal 1, bucket.delete_folder(@key1).size
174
+ # delete
175
+ assert_raise(Aws::AwsError) { bucket.delete }
176
+ assert bucket.delete(true)
177
+ end
178
+
179
+ # No streaming test should be captured in
180
+ # test_21_bucket_create_put_get_key
181
+ def test_61_get_bucket_key_via_streaming
182
+ bucket = Aws::S3::Bucket.create(@s, @bucket, true)
183
+ # check that the bucket exists
184
+ assert @s.buckets.map { |b| b.name }.include?(@bucket)
185
+ # put data
186
+ assert bucket.put(@key2, RIGHT_OBJECT_TEXT, {'family'=>'123456_61'})
187
+ # get data and compare via streaming
188
+ # stream data from S3
189
+ data = ""
190
+ bucket.get(@key2) do |chunk|
191
+ data += chunk
192
+ end
193
+ assert_equal RIGHT_OBJECT_TEXT, data
194
+ # get key object
195
+ key = bucket.key(@key2, true)
196
+ assert_equal Aws::S3::Key, key.class
197
+ assert key.exists?
198
+ assert_equal '123456_61', key.meta_headers['family']
199
+ end
200
+
201
+ end
@@ -0,0 +1,139 @@
1
+ require File.dirname(__FILE__) + '/test_helper.rb'
2
+ require_relative 's3_test_base'
3
+ require File.dirname(__FILE__) + '/../test_credentials.rb'
4
+
5
+ class TestS3Rights < S3TestBase
6
+ # Grantees
7
+
8
+ def test_30_create_bucket
9
+ bucket = @s.bucket(@bucket, true, 'public-read')
10
+ assert bucket
11
+ end
12
+
13
+ def test_31_list_grantees
14
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
15
+ # get grantees list
16
+ grantees = bucket.grantees
17
+ # check that the grantees count equal to 2 (root, AllUsers)
18
+ assert_equal 2, grantees.size
19
+ end
20
+
21
+ def test_32_grant_revoke_drop
22
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
23
+ # Take 'AllUsers' grantee
24
+ grantee = Aws::S3::Grantee.new(bucket, 'http://acs.amazonaws.com/groups/global/AllUsers')
25
+ # Check exists?
26
+ assert grantee.exists?
27
+ # Add grant as String
28
+ assert grantee.grant('WRITE')
29
+ # Add grants as Array
30
+ assert grantee.grant(['READ_ACP', 'WRITE_ACP'])
31
+ # Check perms count
32
+ assert_equal 4, grantee.perms.size
33
+ # revoke 'WRITE_ACP'
34
+ assert grantee.revoke('WRITE_ACP')
35
+ # Check manual perm removal method
36
+ grantee.perms -= ['READ_ACP']
37
+ grantee.apply
38
+ assert_equal 2, grantee.perms.size
39
+ # Check grantee removal if it has no permissions
40
+ assert grantee.perms = []
41
+ assert grantee.apply
42
+ assert !grantee.exists?
43
+ # Check multiple perms assignment
44
+ assert grantee.grant('FULL_CONTROL', 'READ', 'WRITE')
45
+ assert_equal ['FULL_CONTROL', 'READ', 'WRITE'].sort, grantee.perms.sort
46
+ # Check multiple perms removal
47
+ assert grantee.revoke('FULL_CONTROL', 'WRITE')
48
+ assert_equal ['READ'], grantee.perms
49
+ # check 'Drop' method
50
+ assert grantee.drop
51
+ assert !grantee.exists?
52
+ assert_equal 1, bucket.grantees.size
53
+ # Delete bucket
54
+ bucket.delete(true)
55
+ end
56
+
57
+ def test_33_key_grantees
58
+ # Create bucket
59
+ bucket = @s.bucket(@bucket, true)
60
+ # Create key
61
+ key = bucket.key(@key1)
62
+ assert key.put(RIGHT_OBJECT_TEXT, 'public-read')
63
+ # Get grantees list (must be == 2)
64
+ grantees = key.grantees
65
+ assert grantees
66
+ assert_equal 2, grantees.size
67
+ # Take one of grantees and give him 'Write' perms
68
+ grantee = grantees[0]
69
+ assert grantee.grant('WRITE')
70
+ # Drop grantee
71
+ assert grantee.drop
72
+ # Drop bucket
73
+ bucket.delete(true)
74
+ end
75
+
76
+ def test_34_bucket_create_put_with_perms
77
+ bucket = Aws::S3::Bucket.create(@s, @bucket, true)
78
+ # check that the bucket exists
79
+ assert @s.buckets.map { |b| b.name }.include?(@bucket)
80
+ assert bucket.keys.empty?
81
+ # put data (with canned ACL)
82
+ assert bucket.put(@key1, RIGHT_OBJECT_TEXT, {'family'=>'123456'}, "public-read")
83
+ # get data and compare
84
+ assert_equal RIGHT_OBJECT_TEXT, bucket.get(@key1)
85
+ # get key object
86
+ key = bucket.key(@key1, true)
87
+ assert_equal Aws::S3::Key, key.class
88
+ assert key.exists?
89
+ assert_equal '123456', key.meta_headers['family']
90
+ end
91
+
92
+ def test_35_key_put_with_perms
93
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
94
+ # create first key
95
+ key1 = Aws::S3::Key.create(bucket, @key1)
96
+ key1.refresh
97
+ assert key1.exists?
98
+ assert key1.put(RIGHT_OBJECT_TEXT, "public-read")
99
+ # get its data
100
+ assert_equal RIGHT_OBJECT_TEXT, key1.get
101
+ # drop key
102
+ assert key1.delete
103
+ assert !key1.exists?
104
+ end
105
+
106
+ def test_36_set_amazon_problems
107
+ original_problems = Aws::S3Interface.amazon_problems
108
+ assert(original_problems.length > 0)
109
+ Aws::S3Interface.amazon_problems= original_problems << "A New Problem"
110
+ new_problems = Aws::S3Interface.amazon_problems
111
+ assert_equal(new_problems, original_problems)
112
+
113
+ Aws::S3Interface.amazon_problems= nil
114
+ assert_nil(Aws::S3Interface.amazon_problems)
115
+ end
116
+
117
+ def test_37_access_logging
118
+ bucket = Aws::S3::Bucket.create(@s, @bucket, false)
119
+ targetbucket = Aws::S3::Bucket.create(@s, @bucket2, true)
120
+ # Take 'AllUsers' grantee
121
+ grantee = Aws::S3::Grantee.new(targetbucket, 'http://acs.amazonaws.com/groups/s3/LogDelivery')
122
+
123
+ assert grantee.grant(['READ_ACP', 'WRITE'])
124
+
125
+ assert bucket.enable_logging(:targetbucket => targetbucket, :targetprefix => "loggylogs/")
126
+
127
+ assert_equal(bucket.logging_info, {:enabled => true, :targetbucket => @bucket2, :targetprefix => "loggylogs/"})
128
+
129
+ assert bucket.disable_logging
130
+
131
+ # check 'Drop' method
132
+ assert grantee.drop
133
+
134
+ # Delete bucket
135
+ bucket.delete(true)
136
+ targetbucket.delete(true)
137
+ end
138
+
139
+ end