plntr-fakes3 1.0.0.pre.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +16 -0
- data/CONTRIBUTING.md +50 -0
- data/DEPLOY_README.md +18 -0
- data/Dockerfile +13 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +76 -0
- data/ISSUE_TEMPLATE.md +9 -0
- data/Makefile +7 -0
- data/PULL_REQUEST_TEMPLATE.md +9 -0
- data/README.md +42 -0
- data/Rakefile +18 -0
- data/bin/fakes3 +6 -0
- data/fakes3.gemspec +34 -0
- data/lib/fakes3/bucket.rb +65 -0
- data/lib/fakes3/bucket_query.rb +11 -0
- data/lib/fakes3/cli.rb +71 -0
- data/lib/fakes3/errors.rb +46 -0
- data/lib/fakes3/file_store.rb +286 -0
- data/lib/fakes3/rate_limitable_file.rb +21 -0
- data/lib/fakes3/s3_object.rb +19 -0
- data/lib/fakes3/server.rb +560 -0
- data/lib/fakes3/sorted_object_list.rb +137 -0
- data/lib/fakes3/unsupported_operation.rb +4 -0
- data/lib/fakes3/util.rb +8 -0
- data/lib/fakes3/version.rb +3 -0
- data/lib/fakes3/xml_adapter.rb +222 -0
- data/lib/fakes3.rb +3 -0
- data/static/button.svg +4 -0
- data/static/logo.png +0 -0
- data/test/aws_sdk_commands_test.rb +58 -0
- data/test/aws_sdk_v2_commands_test.rb +65 -0
- data/test/boto_test.rb +25 -0
- data/test/botocmd.py +87 -0
- data/test/cli_test.rb +18 -0
- data/test/local_s3_cfg +34 -0
- data/test/minitest_helper.rb +46 -0
- data/test/post_test.rb +58 -0
- data/test/right_aws_commands_test.rb +209 -0
- data/test/s3_commands_test.rb +209 -0
- data/test/s3cmd_test.rb +52 -0
- data/test/test_helper.rb +6 -0
- metadata +255 -0
@@ -0,0 +1,209 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
#require 'fakes3/server'
|
4
|
+
require 'right_aws'
|
5
|
+
require 'time'
|
6
|
+
|
7
|
+
class RightAWSCommandsTest < Test::Unit::TestCase
|
8
|
+
|
9
|
+
def setup
|
10
|
+
@s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
|
11
|
+
{:multi_thread => false, :server => 'localhost',
|
12
|
+
:port => 10453, :protocol => 'http', :logger => Logger.new("/dev/null"),
|
13
|
+
:no_subdomains => true })
|
14
|
+
end
|
15
|
+
|
16
|
+
def teardown
|
17
|
+
end
|
18
|
+
|
19
|
+
def test_create_bucket
|
20
|
+
bucket = @s3.create_bucket("s3media")
|
21
|
+
assert_not_nil bucket
|
22
|
+
end
|
23
|
+
|
24
|
+
def test_store
|
25
|
+
@s3.put("s3media","helloworld", "Hello World Man!")
|
26
|
+
obj = @s3.get("s3media", "helloworld")
|
27
|
+
assert_equal "Hello World Man!", obj[:object]
|
28
|
+
|
29
|
+
obj = @s3.get("s3media", "helloworld")
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_store_not_found
|
33
|
+
begin
|
34
|
+
obj = @s3.get("s3media", "helloworldnotexist")
|
35
|
+
rescue RightAws::AwsError
|
36
|
+
assert $!.message.include?('NoSuchKey')
|
37
|
+
rescue
|
38
|
+
fail 'Should have caught NoSuchKey Exception'
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_large_store
|
43
|
+
@s3.put("s3media", "helloworld", "Hello World Man!")
|
44
|
+
buffer = ""
|
45
|
+
500000.times do
|
46
|
+
buffer << "#{(rand * 100).to_i}"
|
47
|
+
end
|
48
|
+
|
49
|
+
buf_len = buffer.length
|
50
|
+
@s3.put("s3media", "big", buffer)
|
51
|
+
|
52
|
+
output = ""
|
53
|
+
@s3.get("s3media","big") do |chunk|
|
54
|
+
output << chunk
|
55
|
+
end
|
56
|
+
assert_equal buf_len, output.size
|
57
|
+
end
|
58
|
+
|
59
|
+
# Test that GET requests with a delimiter return a list of
|
60
|
+
def test_list_by_delimiter
|
61
|
+
@s3.create_bucket("s3media")
|
62
|
+
|
63
|
+
@s3.put("s3media", "delimited/item", "item")
|
64
|
+
|
65
|
+
expected_prefixes = []
|
66
|
+
(1..50).each do |i|
|
67
|
+
key_prefix = "delimited/%02d/" % i
|
68
|
+
@s3.put("s3media", key_prefix + "foo", "foo")
|
69
|
+
@s3.put("s3media", key_prefix + "fie", "fie")
|
70
|
+
expected_prefixes << key_prefix
|
71
|
+
end
|
72
|
+
|
73
|
+
key_names = []
|
74
|
+
common_prefixes = []
|
75
|
+
@s3.incrementally_list_bucket("s3media", {:prefix => "delimited", :delimiter => '/'}) do |currentResponse|
|
76
|
+
common_prefixes += currentResponse[:common_prefixes]
|
77
|
+
end
|
78
|
+
assert_equal ["delimited/"], common_prefixes
|
79
|
+
|
80
|
+
common_prefixes = []
|
81
|
+
@s3.incrementally_list_bucket("s3media", {:prefix => "delimited/", :delimiter => '/', "max-keys" => 100}) do |currentResponse|
|
82
|
+
key_names += currentResponse[:contents].map do |key|
|
83
|
+
key[:key]
|
84
|
+
end
|
85
|
+
common_prefixes += currentResponse[:common_prefixes]
|
86
|
+
end
|
87
|
+
assert_equal expected_prefixes, common_prefixes
|
88
|
+
assert_equal ["delimited/item"], key_names
|
89
|
+
end
|
90
|
+
|
91
|
+
def test_multi_directory
|
92
|
+
@s3.put("s3media", "dir/right/123.txt", "recursive")
|
93
|
+
output = ""
|
94
|
+
obj = @s3.get("s3media", "dir/right/123.txt") do |chunk|
|
95
|
+
output << chunk
|
96
|
+
end
|
97
|
+
assert_equal "recursive", output
|
98
|
+
end
|
99
|
+
|
100
|
+
def test_intra_bucket_copy
|
101
|
+
@s3.put("s3media", "original.txt", "Hello World")
|
102
|
+
@s3.copy("s3media", "original.txt", "s3media", "copy.txt")
|
103
|
+
obj = @s3.get("s3media", "copy.txt")
|
104
|
+
assert_equal "Hello World", obj[:object]
|
105
|
+
end
|
106
|
+
|
107
|
+
def test_copy_in_place
|
108
|
+
@s3.put("s3media", "copy-in-place", "Hello World")
|
109
|
+
@s3.copy("s3media", "copy-in-place", "s3media","copy-in-place")
|
110
|
+
obj = @s3.get("s3media", "copy-in-place")
|
111
|
+
assert_equal "Hello World", obj[:object]
|
112
|
+
end
|
113
|
+
|
114
|
+
def test_content_encoding
|
115
|
+
foo_compressed = Zlib::Deflate.deflate("foo")
|
116
|
+
@s3.put("s3media", "foo", foo_compressed, {"content-encoding" => "gzip"})
|
117
|
+
obj = @s3.get("s3media", "foo")
|
118
|
+
# assert_equal "gzip", obj[:headers]["content-encoding"] # TODO why doesn't checking content-encoding work?
|
119
|
+
assert_equal "gzip", obj[:headers]["x-content-encoding"] # TODO why doesn't checking content-encoding work?
|
120
|
+
end
|
121
|
+
|
122
|
+
# def test_content_encoding_data
|
123
|
+
# foo_compressed = Zlib::Deflate.deflate("foo-two")
|
124
|
+
# @s3.put("s3media", "foo-two", foo_compressed, {"content-encoding" => "gzip"})
|
125
|
+
# obj = @s3.get("s3media", "foo-two")
|
126
|
+
# puts "*** GOT HERE 1 #{ obj[:object] }"
|
127
|
+
# assert_equal "foo-two", Zlib::Inflate::inflate(obj[:object])
|
128
|
+
# end
|
129
|
+
|
130
|
+
def test_copy_replace_metadata
|
131
|
+
@s3.put("s3media", "copy_replace", "Hello World", {"content-type" => "application/octet-stream"})
|
132
|
+
obj = @s3.get("s3media", "copy_replace")
|
133
|
+
assert_equal "Hello World", obj[:object]
|
134
|
+
assert_equal "application/octet-stream", obj[:headers]["content-type"]
|
135
|
+
@s3.copy("s3media", "copy_replace", "s3media", "copy_replace", :replace, {"content-type"=>"text/plain"})
|
136
|
+
obj = @s3.get("s3media", "copy_replace")
|
137
|
+
assert_equal "Hello World", obj[:object]
|
138
|
+
assert_equal "text/plain", obj[:headers]["content-type"]
|
139
|
+
end
|
140
|
+
|
141
|
+
def test_larger_lists
|
142
|
+
@s3.create_bucket('right_aws_many')
|
143
|
+
(0..50).each do |i|
|
144
|
+
('a'..'z').each do |letter|
|
145
|
+
name = "#{letter}#{i}"
|
146
|
+
@s3.put('right_aws_many', name, 'asdf')
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
keys = @s3.list_bucket('right_aws_many')
|
151
|
+
assert_equal(1000, keys.size)
|
152
|
+
assert_equal('a0', keys.first[:key])
|
153
|
+
end
|
154
|
+
|
155
|
+
def test_destroy_bucket
|
156
|
+
@s3.create_bucket('deletebucket')
|
157
|
+
@s3.delete_bucket('deletebucket')
|
158
|
+
|
159
|
+
begin
|
160
|
+
bucket = @s3.list_bucket('deletebucket')
|
161
|
+
fail("Shouldn't succeed here")
|
162
|
+
rescue RightAws::AwsError
|
163
|
+
assert $!.message.include?('NoSuchBucket')
|
164
|
+
rescue
|
165
|
+
fail 'Should have caught NoSuchBucket Exception'
|
166
|
+
end
|
167
|
+
|
168
|
+
end
|
169
|
+
|
170
|
+
def test_if_none_match
|
171
|
+
@s3.put("s3media", "if_none_match_test", "Hello World 1!")
|
172
|
+
obj = @s3.get("s3media", "if_none_match_test")
|
173
|
+
tag = obj[:headers]["etag"]
|
174
|
+
begin
|
175
|
+
@s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
|
176
|
+
rescue URI::InvalidURIError
|
177
|
+
# expected error for 304
|
178
|
+
else
|
179
|
+
fail 'Should have encountered an error due to the server not returning a response due to caching'
|
180
|
+
end
|
181
|
+
@s3.put("s3media", "if_none_match_test", "Hello World 2!")
|
182
|
+
obj = @s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
|
183
|
+
assert_equal "Hello World 2!", obj[:object]
|
184
|
+
end
|
185
|
+
|
186
|
+
def test_if_modified_since
|
187
|
+
@s3.put("s3media", "if_modified_since_test", "Hello World 1!")
|
188
|
+
obj = @s3.get("s3media", "if_modified_since_test")
|
189
|
+
modified = obj[:headers]["last-modified"]
|
190
|
+
begin
|
191
|
+
@s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => modified})
|
192
|
+
rescue URI::InvalidURIError
|
193
|
+
# expected error for 304
|
194
|
+
else
|
195
|
+
fail 'Should have encountered an error due to the server not returning a response due to caching'
|
196
|
+
end
|
197
|
+
# Granularity of an HTTP Date is 1 second which isn't enough for the test
|
198
|
+
# so manually rewind the clock by a second
|
199
|
+
time_in_the_past = Time.httpdate(modified) - 1
|
200
|
+
begin
|
201
|
+
obj = @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => time_in_the_past.httpdate})
|
202
|
+
rescue
|
203
|
+
fail 'Should have been downloaded since the date is in the past now'
|
204
|
+
else
|
205
|
+
#expected scenario
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
end
|
@@ -0,0 +1,209 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
#require 'fakes3/server'
|
4
|
+
require 'aws/s3'
|
5
|
+
|
6
|
+
class S3CommandsTest < Test::Unit::TestCase
|
7
|
+
include AWS::S3
|
8
|
+
|
9
|
+
def setup
|
10
|
+
AWS::S3::Base.establish_connection!(:access_key_id => "123",
|
11
|
+
:secret_access_key => "abc",
|
12
|
+
:server => "localhost",
|
13
|
+
:port => "10453" )
|
14
|
+
end
|
15
|
+
|
16
|
+
def teardown
|
17
|
+
AWS::S3::Base.disconnect!
|
18
|
+
end
|
19
|
+
|
20
|
+
def test_create_bucket
|
21
|
+
bucket = Bucket.create("ruby_aws_s3")
|
22
|
+
assert_not_nil bucket
|
23
|
+
|
24
|
+
bucket_names = []
|
25
|
+
Service.buckets.each do |bucket|
|
26
|
+
bucket_names << bucket.name
|
27
|
+
end
|
28
|
+
assert(bucket_names.index("ruby_aws_s3") >= 0)
|
29
|
+
end
|
30
|
+
|
31
|
+
def test_destroy_bucket
|
32
|
+
Bucket.create("deletebucket")
|
33
|
+
Bucket.delete("deletebucket")
|
34
|
+
|
35
|
+
begin
|
36
|
+
bucket = Bucket.find("deletebucket")
|
37
|
+
assert_fail("Shouldn't succeed here")
|
38
|
+
rescue
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_store
|
43
|
+
bucket = Bucket.create("ruby_aws_s3")
|
44
|
+
S3Object.store("hello","world","ruby_aws_s3")
|
45
|
+
|
46
|
+
output = ""
|
47
|
+
obj = S3Object.stream("hello","ruby_aws_s3") do |chunk|
|
48
|
+
output << chunk
|
49
|
+
end
|
50
|
+
assert_equal "world", output
|
51
|
+
end
|
52
|
+
|
53
|
+
def test_large_store
|
54
|
+
bucket = Bucket.create("ruby_aws_s3")
|
55
|
+
buffer = ""
|
56
|
+
500000.times do
|
57
|
+
buffer << "#{(rand * 100).to_i}"
|
58
|
+
end
|
59
|
+
|
60
|
+
buf_len = buffer.length
|
61
|
+
S3Object.store("big",buffer,"ruby_aws_s3")
|
62
|
+
|
63
|
+
output = ""
|
64
|
+
S3Object.stream("big","ruby_aws_s3") do |chunk|
|
65
|
+
output << chunk
|
66
|
+
end
|
67
|
+
assert_equal buf_len,output.size
|
68
|
+
end
|
69
|
+
|
70
|
+
def test_metadata_store
|
71
|
+
assert_equal true, Bucket.create("ruby_aws_s3")
|
72
|
+
bucket = Bucket.find("ruby_aws_s3")
|
73
|
+
|
74
|
+
# Note well: we can't seem to access obj.metadata until we've stored
|
75
|
+
# the object and found it again. Thus the store, find, store
|
76
|
+
# runaround below.
|
77
|
+
obj = bucket.new_object(:value => "foo")
|
78
|
+
obj.key = "key_with_metadata"
|
79
|
+
obj.store
|
80
|
+
obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
|
81
|
+
obj.metadata[:param1] = "one"
|
82
|
+
obj.metadata[:param2] = "two, three"
|
83
|
+
obj.store
|
84
|
+
obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
|
85
|
+
|
86
|
+
assert_equal "one", obj.metadata[:param1]
|
87
|
+
assert_equal "two, three", obj.metadata[:param2]
|
88
|
+
end
|
89
|
+
|
90
|
+
def test_metadata_copy
|
91
|
+
assert_equal true, Bucket.create("ruby_aws_s3")
|
92
|
+
bucket = Bucket.find("ruby_aws_s3")
|
93
|
+
|
94
|
+
# Note well: we can't seem to access obj.metadata until we've stored
|
95
|
+
# the object and found it again. Thus the store, find, store
|
96
|
+
# runaround below.
|
97
|
+
obj = bucket.new_object(:value => "foo")
|
98
|
+
obj.key = "key_with_metadata"
|
99
|
+
obj.store
|
100
|
+
obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
|
101
|
+
obj.metadata[:param1] = "one"
|
102
|
+
obj.metadata[:param2] = "two, three"
|
103
|
+
obj.store
|
104
|
+
|
105
|
+
S3Object.copy("key_with_metadata", "key_with_metadata2", "ruby_aws_s3")
|
106
|
+
obj = S3Object.find("key_with_metadata2", "ruby_aws_s3")
|
107
|
+
|
108
|
+
assert_equal "one", obj.metadata[:param1]
|
109
|
+
assert_equal "two, three", obj.metadata[:param2]
|
110
|
+
end
|
111
|
+
|
112
|
+
|
113
|
+
def test_multi_directory
|
114
|
+
bucket = Bucket.create("ruby_aws_s3")
|
115
|
+
S3Object.store("dir/myfile/123.txt","recursive","ruby_aws_s3")
|
116
|
+
|
117
|
+
output = ""
|
118
|
+
obj = S3Object.stream("dir/myfile/123.txt","ruby_aws_s3") do |chunk|
|
119
|
+
output << chunk
|
120
|
+
end
|
121
|
+
assert_equal "recursive", output
|
122
|
+
end
|
123
|
+
|
124
|
+
def test_find_nil_bucket
|
125
|
+
begin
|
126
|
+
bucket = Bucket.find("unknown")
|
127
|
+
assert_fail "Bucket.find didn't throw an exception"
|
128
|
+
rescue
|
129
|
+
assert_equal AWS::S3::NoSuchBucket,$!.class
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
def test_find_object
|
134
|
+
bucket = Bucket.create('find_bucket')
|
135
|
+
obj_name = 'short'
|
136
|
+
S3Object.store(obj_name,'short_text','find_bucket')
|
137
|
+
short = S3Object.find(obj_name,"find_bucket")
|
138
|
+
assert_not_nil(short)
|
139
|
+
assert_equal(short.value,'short_text')
|
140
|
+
end
|
141
|
+
|
142
|
+
def test_find_non_existent_object
|
143
|
+
bucket = Bucket.create('find_bucket')
|
144
|
+
obj_name = 'doesnotexist'
|
145
|
+
assert_raise AWS::S3::NoSuchKey do
|
146
|
+
should_throw = S3Object.find(obj_name,"find_bucket")
|
147
|
+
end
|
148
|
+
|
149
|
+
# Try something higher in the alphabet
|
150
|
+
assert_raise AWS::S3::NoSuchKey do
|
151
|
+
should_throw = S3Object.find("zzz","find_bucket")
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
def test_exists?
|
156
|
+
bucket = Bucket.create('ruby_aws_s3')
|
157
|
+
obj_name = 'dir/myfile/exists.txt'
|
158
|
+
S3Object.store(obj_name,'exists','ruby_aws_s3')
|
159
|
+
assert S3Object.exists?(obj_name, 'ruby_aws_s3')
|
160
|
+
assert !S3Object.exists?('dir/myfile/doesnotexist.txt','ruby_aws_s3')
|
161
|
+
end
|
162
|
+
|
163
|
+
def test_delete
|
164
|
+
bucket = Bucket.create("ruby_aws_s3")
|
165
|
+
S3Object.store("something_to_delete","asdf","ruby_aws_s3")
|
166
|
+
something = S3Object.find("something_to_delete","ruby_aws_s3")
|
167
|
+
S3Object.delete("something_to_delete","ruby_aws_s3")
|
168
|
+
|
169
|
+
assert_raise AWS::S3::NoSuchKey do
|
170
|
+
should_throw = S3Object.find("something_to_delete","ruby_aws_s3")
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
def test_rename
|
175
|
+
bucket = Bucket.create("ruby_aws_s3")
|
176
|
+
S3Object.store("something_to_rename","asdf","ruby_aws_s3")
|
177
|
+
S3Object.rename("something_to_rename","renamed","ruby_aws_s3")
|
178
|
+
|
179
|
+
renamed = S3Object.find("renamed","ruby_aws_s3")
|
180
|
+
assert_not_nil(renamed)
|
181
|
+
assert_equal(renamed.value,'asdf')
|
182
|
+
|
183
|
+
assert_raise AWS::S3::NoSuchKey do
|
184
|
+
should_throw = S3Object.find("something_to_rename","ruby_aws_s3")
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
def test_larger_lists
|
189
|
+
Bucket.create("ruby_aws_s3_many")
|
190
|
+
(0..50).each do |i|
|
191
|
+
('a'..'z').each do |letter|
|
192
|
+
name = "#{letter}#{i}"
|
193
|
+
S3Object.store(name,"asdf","ruby_aws_s3_many")
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
bucket = Bucket.find("ruby_aws_s3_many")
|
198
|
+
assert_equal(bucket.size,1000)
|
199
|
+
assert_equal(bucket.objects.first.key,"a0")
|
200
|
+
end
|
201
|
+
|
202
|
+
|
203
|
+
# Copying an object
|
204
|
+
#S3Object.copy 'headshot.jpg', 'headshot2.jpg', 'photos'
|
205
|
+
|
206
|
+
# Renaming an object
|
207
|
+
#S3Object.rename 'headshot.jpg', 'portrait.jpg', 'photos'
|
208
|
+
|
209
|
+
end
|
data/test/s3cmd_test.rb
ADDED
@@ -0,0 +1,52 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
|
4
|
+
class S3CmdTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
config = File.expand_path(File.join(File.dirname(__FILE__),'local_s3_cfg'))
|
7
|
+
raise "Please install s3cmd" if `which s3cmd`.empty?
|
8
|
+
@s3cmd = "s3cmd --config #{config}"
|
9
|
+
end
|
10
|
+
|
11
|
+
def teardown
|
12
|
+
end
|
13
|
+
|
14
|
+
def test_create_bucket
|
15
|
+
`#{@s3cmd} mb s3://s3cmd_bucket`
|
16
|
+
output = `#{@s3cmd} ls`
|
17
|
+
assert_match(/s3cmd_bucket/,output)
|
18
|
+
end
|
19
|
+
|
20
|
+
def test_store
|
21
|
+
File.open(__FILE__,'rb') do |input|
|
22
|
+
File.open("/tmp/fakes3_upload",'wb') do |output|
|
23
|
+
output << input.read
|
24
|
+
end
|
25
|
+
end
|
26
|
+
output = `#{@s3cmd} put /tmp/fakes3_upload s3://s3cmd_bucket/upload`
|
27
|
+
assert_match(/upload/,output)
|
28
|
+
|
29
|
+
FileUtils.rm("/tmp/fakes3_upload")
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_acl
|
33
|
+
File.open(__FILE__,'rb') do |input|
|
34
|
+
File.open("/tmp/fakes3_acl_upload",'wb') do |output|
|
35
|
+
output << input.read
|
36
|
+
end
|
37
|
+
end
|
38
|
+
output = `#{@s3cmd} put /tmp/fakes3_acl_upload s3://s3cmd_bucket/acl_upload`
|
39
|
+
assert_match(/upload/,output)
|
40
|
+
|
41
|
+
output = `#{@s3cmd} --force setacl -P s3://s3cmd_bucket/acl_upload`
|
42
|
+
end
|
43
|
+
|
44
|
+
def test_large_store
|
45
|
+
end
|
46
|
+
|
47
|
+
def test_multi_directory
|
48
|
+
end
|
49
|
+
|
50
|
+
def test_intra_bucket_copy
|
51
|
+
end
|
52
|
+
end
|