rack_fake_s3 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +5 -0
- data/Gemfile +2 -0
- data/Gemfile.lock +37 -0
- data/MIT-LICENSE +20 -0
- data/README.md +39 -0
- data/Rakefile +12 -0
- data/config.ru +4 -0
- data/lib/rack_fake_s3/bucket.rb +64 -0
- data/lib/rack_fake_s3/bucket_query.rb +11 -0
- data/lib/rack_fake_s3/errors.rb +46 -0
- data/lib/rack_fake_s3/file_store.rb +211 -0
- data/lib/rack_fake_s3/rate_limitable_file.rb +21 -0
- data/lib/rack_fake_s3/s3_object.rb +19 -0
- data/lib/rack_fake_s3/server.rb +442 -0
- data/lib/rack_fake_s3/sorted_object_list.rb +100 -0
- data/lib/rack_fake_s3/version.rb +3 -0
- data/lib/rack_fake_s3/xml_adapter.rb +179 -0
- data/lib/rack_fake_s3.rb +7 -0
- data/rack_fake_s3.gemspec +28 -0
- data/test/local_s3_cfg +34 -0
- data/test/right_aws_commands_test.rb +65 -0
- data/test/s3_commands_test.rb +166 -0
- data/test/s3cmd_test.rb +52 -0
- data/test/test_helper.rb +4 -0
- metadata +204 -0
@@ -0,0 +1,179 @@
|
|
1
|
+
require 'builder'
|
2
|
+
require 'time'
|
3
|
+
|
4
|
+
module RackFakeS3
|
5
|
+
class XmlAdapter
|
6
|
+
def self.buckets(bucket_objects)
|
7
|
+
output = ""
|
8
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
9
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
10
|
+
xml.ListAllMyBucketsResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lam|
|
11
|
+
lam.Owner { |owner|
|
12
|
+
owner.ID("123")
|
13
|
+
owner.DisplayName("FakeS3")
|
14
|
+
}
|
15
|
+
lam.Buckets { |buckets|
|
16
|
+
bucket_objects.each do |bucket|
|
17
|
+
buckets.Bucket do |b|
|
18
|
+
b.Name(bucket.name)
|
19
|
+
b.CreationDate(bucket.creation_date.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
|
20
|
+
end
|
21
|
+
end
|
22
|
+
}
|
23
|
+
}
|
24
|
+
output
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.error(error)
|
28
|
+
output = ""
|
29
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
30
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
31
|
+
xml.Error { |err|
|
32
|
+
err.Code(error.code)
|
33
|
+
err.Message(error.message)
|
34
|
+
err.Resource(error.resource)
|
35
|
+
err.RequestId(1)
|
36
|
+
}
|
37
|
+
output
|
38
|
+
end
|
39
|
+
|
40
|
+
# <?xml version="1.0" encoding="UTF-8"?>
|
41
|
+
#<Error>
|
42
|
+
# <Code>NoSuchKey</Code>
|
43
|
+
# <Message>The resource you requested does not exist</Message>
|
44
|
+
# <Resource>/mybucket/myfoto.jpg</Resource>
|
45
|
+
# <RequestId>4442587FB7D0A2F9</RequestId>
|
46
|
+
#</Error>
|
47
|
+
#
|
48
|
+
def self.error_no_such_bucket(name)
|
49
|
+
output = ""
|
50
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
51
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
52
|
+
xml.Error { |err|
|
53
|
+
err.Code("NoSuchBucket")
|
54
|
+
err.Message("The resource you requested does not exist")
|
55
|
+
err.Resource(name)
|
56
|
+
err.RequestId(1)
|
57
|
+
}
|
58
|
+
output
|
59
|
+
end
|
60
|
+
|
61
|
+
def self.error_bucket_not_empty(name)
|
62
|
+
output = ""
|
63
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
64
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
65
|
+
xml.Error { |err|
|
66
|
+
err.Code("BucketNotEmpty")
|
67
|
+
err.Message("The bucket you tried to delete is not empty.")
|
68
|
+
err.Resource(name)
|
69
|
+
err.RequestId(1)
|
70
|
+
}
|
71
|
+
output
|
72
|
+
end
|
73
|
+
|
74
|
+
def self.error_no_such_key(name)
|
75
|
+
output = ""
|
76
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
77
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
78
|
+
xml.Error { |err|
|
79
|
+
err.Code("NoSuchKey")
|
80
|
+
err.Message("The specified key does not exist")
|
81
|
+
err.Key(name)
|
82
|
+
err.RequestId(1)
|
83
|
+
err.HostId(2)
|
84
|
+
}
|
85
|
+
output
|
86
|
+
end
|
87
|
+
|
88
|
+
def self.bucket(bucket)
|
89
|
+
output = ""
|
90
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
91
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
92
|
+
xml.ListBucketResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lbr|
|
93
|
+
lbr.Name(bucket.name)
|
94
|
+
lbr.Prefix
|
95
|
+
lbr.Marker
|
96
|
+
lbr.MaxKeys("1000")
|
97
|
+
lbr.IsTruncated("false")
|
98
|
+
}
|
99
|
+
output
|
100
|
+
end
|
101
|
+
|
102
|
+
# A bucket query gives back the bucket along with contents
|
103
|
+
# <Contents>
|
104
|
+
#<Key>Nelson</Key>
|
105
|
+
# <LastModified>2006-01-01T12:00:00.000Z</LastModified>
|
106
|
+
# <ETag>"828ef3fdfa96f00ad9f27c383fc9ac7f"</ETag>
|
107
|
+
# <Size>5</Size>
|
108
|
+
# <StorageClass>STANDARD</StorageClass>
|
109
|
+
# <Owner>
|
110
|
+
# <ID>bcaf161ca5fb16fd081034f</ID>
|
111
|
+
# <DisplayName>webfile</DisplayName>
|
112
|
+
# </Owner>
|
113
|
+
# </Contents>
|
114
|
+
|
115
|
+
def self.append_objects_to_list_bucket_result(lbr,objects)
|
116
|
+
return if objects.nil? or objects.size == 0
|
117
|
+
|
118
|
+
if objects.index(nil)
|
119
|
+
require 'ruby-debug'
|
120
|
+
Debugger.start
|
121
|
+
debugger
|
122
|
+
end
|
123
|
+
|
124
|
+
objects.each do |s3_object|
|
125
|
+
lbr.Contents { |contents|
|
126
|
+
contents.Key(s3_object.name)
|
127
|
+
contents.LastModifed(s3_object.creation_date)
|
128
|
+
contents.ETag("\"#{s3_object.md5}\"")
|
129
|
+
contents.Size(s3_object.size)
|
130
|
+
contents.StorageClass("STANDARD")
|
131
|
+
|
132
|
+
contents.Owner { |owner|
|
133
|
+
owner.ID("abc")
|
134
|
+
owner.DisplayName("You")
|
135
|
+
}
|
136
|
+
}
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def self.bucket_query(bucket_query)
|
141
|
+
output = ""
|
142
|
+
bucket = bucket_query.bucket
|
143
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
144
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
145
|
+
xml.ListBucketResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lbr|
|
146
|
+
lbr.Name(bucket.name)
|
147
|
+
lbr.Prefix(bucket_query.prefix)
|
148
|
+
lbr.Marker(bucket_query.marker)
|
149
|
+
lbr.MaxKeys(bucket_query.max_keys)
|
150
|
+
lbr.IsTruncated(bucket_query.is_truncated?)
|
151
|
+
append_objects_to_list_bucket_result(lbr,bucket_query.matches)
|
152
|
+
}
|
153
|
+
output
|
154
|
+
end
|
155
|
+
|
156
|
+
# ACL xml
|
157
|
+
def self.acl(object = nil)
|
158
|
+
output = ""
|
159
|
+
xml = Builder::XmlMarkup.new(:target => output)
|
160
|
+
xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
161
|
+
xml.AccessControlPolicy(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |acp|
|
162
|
+
acp.Owner do |owner|
|
163
|
+
owner.ID("abc")
|
164
|
+
owner.DisplayName("You")
|
165
|
+
end
|
166
|
+
acp.AccessControlList do |acl|
|
167
|
+
acl.Grant do |grant|
|
168
|
+
grant.Grantee("xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:type" => "CanonicalUser") do |grantee|
|
169
|
+
grantee.ID("abc")
|
170
|
+
grantee.DisplayName("You")
|
171
|
+
end
|
172
|
+
grant.Permission("FULL_CONTROL")
|
173
|
+
end
|
174
|
+
end
|
175
|
+
}
|
176
|
+
output
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
data/lib/rack_fake_s3.rb
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
require File.join(File.dirname(__FILE__), 'lib', 'rack_fake_s3', 'version')
|
3
|
+
|
4
|
+
Gem::Specification.new do |s|
|
5
|
+
s.name = "rack_fake_s3"
|
6
|
+
s.version = RackFakeS3::VERSION
|
7
|
+
s.platform = Gem::Platform::RUBY
|
8
|
+
s.authors = ['Mario Visic']
|
9
|
+
s.email = ['mario.visic@envato.com']
|
10
|
+
s.homepage = "https://github.com/envato/rack_fake_s3"
|
11
|
+
s.summary = %q{Rack Fake S3 is a dummy rack app that simulates S3 for local integration testing. Originally created by Curtis Spencer, modified by Mario Visic at Envato.}
|
12
|
+
s.description = %q{Test S3 integration locally. Originally created by Curtis Spencer, modified by Mario Visic at Envato.}
|
13
|
+
|
14
|
+
s.add_development_dependency "bundler", ">= 1.0.0"
|
15
|
+
s.add_development_dependency "aws-s3"
|
16
|
+
s.add_development_dependency "right_aws"
|
17
|
+
s.add_development_dependency "rake"
|
18
|
+
s.add_development_dependency "rest-client"
|
19
|
+
|
20
|
+
s.add_dependency "thor"
|
21
|
+
s.add_dependency "builder"
|
22
|
+
s.add_dependency "rack"
|
23
|
+
|
24
|
+
s.files = `git ls-files`.split("\n")
|
25
|
+
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
26
|
+
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
27
|
+
s.require_paths = ["lib"]
|
28
|
+
end
|
data/test/local_s3_cfg
ADDED
@@ -0,0 +1,34 @@
|
|
1
|
+
[default]
|
2
|
+
access_key = abc
|
3
|
+
acl_public = False
|
4
|
+
bucket_location = US
|
5
|
+
cloudfront_host = cloudfront.amazonaws.com
|
6
|
+
cloudfront_resource = /2008-06-30/distribution
|
7
|
+
default_mime_type = binary/octet-stream
|
8
|
+
delete_removed = False
|
9
|
+
dry_run = False
|
10
|
+
encoding = UTF-8
|
11
|
+
encrypt = False
|
12
|
+
force = False
|
13
|
+
get_continue = False
|
14
|
+
gpg_command = None
|
15
|
+
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
|
16
|
+
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
|
17
|
+
gpg_passphrase =
|
18
|
+
guess_mime_type = True
|
19
|
+
host_base = localhost:10453
|
20
|
+
host_bucket = %(bucket)s.localhost:10453
|
21
|
+
human_readable_sizes = False
|
22
|
+
list_md5 = False
|
23
|
+
preserve_attrs = True
|
24
|
+
progress_meter = True
|
25
|
+
proxy_host =
|
26
|
+
proxy_port = 0
|
27
|
+
recursive = False
|
28
|
+
recv_chunk = 4096
|
29
|
+
secret_key = def
|
30
|
+
send_chunk = 4096
|
31
|
+
simpledb_host = sdb.amazonaws.com
|
32
|
+
skip_existing = False
|
33
|
+
use_https = False
|
34
|
+
verbosity = WARNING
|
@@ -0,0 +1,65 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
#require 'fakes3/server'
|
4
|
+
require 'right_aws'
|
5
|
+
|
6
|
+
class RightAWSCommandsTest < Test::Unit::TestCase
|
7
|
+
|
8
|
+
def setup
|
9
|
+
@s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
|
10
|
+
{:multi_thread => false, :server => 'localhost',
|
11
|
+
:port => 10453, :protocol => 'http',:logger => Logger.new("/dev/null"),:no_subdomains => true })
|
12
|
+
end
|
13
|
+
|
14
|
+
def teardown
|
15
|
+
end
|
16
|
+
|
17
|
+
def test_create_bucket
|
18
|
+
bucket = @s3.create_bucket("s3media")
|
19
|
+
assert_not_nil bucket
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_store
|
23
|
+
@s3.put("s3media","helloworld","Hello World Man!")
|
24
|
+
obj = @s3.get("s3media","helloworld")
|
25
|
+
assert_equal "Hello World Man!",obj[:object]
|
26
|
+
|
27
|
+
obj = @s3.get("s3media","helloworld")
|
28
|
+
end
|
29
|
+
|
30
|
+
def test_large_store
|
31
|
+
@s3.put("s3media","helloworld","Hello World Man!")
|
32
|
+
buffer = ""
|
33
|
+
500000.times do
|
34
|
+
buffer << "#{(rand * 100).to_i}"
|
35
|
+
end
|
36
|
+
|
37
|
+
buf_len = buffer.length
|
38
|
+
@s3.put("s3media","big",buffer)
|
39
|
+
|
40
|
+
output = ""
|
41
|
+
@s3.get("s3media","big") do |chunk|
|
42
|
+
output << chunk
|
43
|
+
end
|
44
|
+
assert_equal buf_len,output.size
|
45
|
+
end
|
46
|
+
|
47
|
+
def test_multi_directory
|
48
|
+
@s3.put("s3media","dir/right/123.txt","recursive")
|
49
|
+
output = ""
|
50
|
+
obj = @s3.get("s3media","dir/right/123.txt") do |chunk|
|
51
|
+
output << chunk
|
52
|
+
end
|
53
|
+
|
54
|
+
assert_equal "recursive", output
|
55
|
+
end
|
56
|
+
|
57
|
+
def test_intra_bucket_copy
|
58
|
+
@s3.put("s3media","original.txt","Hello World")
|
59
|
+
@s3.copy("s3media","original.txt","s3media","copy.txt")
|
60
|
+
obj = @s3.get("s3media","copy.txt")
|
61
|
+
|
62
|
+
assert_equal "Hello World",obj[:object]
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
@@ -0,0 +1,166 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
#require 'fakes3/server'
|
4
|
+
require 'aws/s3'
|
5
|
+
|
6
|
+
class S3CommandsTest < Test::Unit::TestCase
|
7
|
+
include AWS::S3
|
8
|
+
|
9
|
+
def setup
|
10
|
+
AWS::S3::Base.establish_connection!(:access_key_id => "123",
|
11
|
+
:secret_access_key => "abc",
|
12
|
+
:server => "localhost",
|
13
|
+
:port => "10453" )
|
14
|
+
end
|
15
|
+
|
16
|
+
def teardown
|
17
|
+
AWS::S3::Base.disconnect!
|
18
|
+
end
|
19
|
+
|
20
|
+
def test_create_bucket
|
21
|
+
bucket = Bucket.create("ruby_aws_s3")
|
22
|
+
assert_not_nil bucket
|
23
|
+
|
24
|
+
bucket_names = []
|
25
|
+
Service.buckets.each do |bucket|
|
26
|
+
bucket_names << bucket.name
|
27
|
+
end
|
28
|
+
assert(bucket_names.index("ruby_aws_s3") >= 0)
|
29
|
+
end
|
30
|
+
|
31
|
+
def test_destroy_bucket
|
32
|
+
Bucket.create("deletebucket")
|
33
|
+
Bucket.delete("deletebucket")
|
34
|
+
|
35
|
+
begin
|
36
|
+
bucket = Bucket.find("deletebucket")
|
37
|
+
assert_fail("Shouldn't succeed here")
|
38
|
+
rescue
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_store
|
43
|
+
bucket = Bucket.create("ruby_aws_s3")
|
44
|
+
S3Object.store("hello","world","ruby_aws_s3")
|
45
|
+
|
46
|
+
output = ""
|
47
|
+
obj = S3Object.stream("hello","ruby_aws_s3") do |chunk|
|
48
|
+
output << chunk
|
49
|
+
end
|
50
|
+
assert_equal "world", output
|
51
|
+
end
|
52
|
+
|
53
|
+
def test_large_store
|
54
|
+
bucket = Bucket.create("ruby_aws_s3")
|
55
|
+
buffer = ""
|
56
|
+
500000.times do
|
57
|
+
buffer << "#{(rand * 100).to_i}"
|
58
|
+
end
|
59
|
+
|
60
|
+
buf_len = buffer.length
|
61
|
+
S3Object.store("big",buffer,"ruby_aws_s3")
|
62
|
+
|
63
|
+
output = ""
|
64
|
+
S3Object.stream("big","ruby_aws_s3") do |chunk|
|
65
|
+
output << chunk
|
66
|
+
end
|
67
|
+
assert_equal buf_len,output.size
|
68
|
+
end
|
69
|
+
|
70
|
+
def test_multi_directory
|
71
|
+
bucket = Bucket.create("ruby_aws_s3")
|
72
|
+
S3Object.store("dir/myfile/123.txt","recursive","ruby_aws_s3")
|
73
|
+
|
74
|
+
output = ""
|
75
|
+
obj = S3Object.stream("dir/myfile/123.txt","ruby_aws_s3") do |chunk|
|
76
|
+
output << chunk
|
77
|
+
end
|
78
|
+
assert_equal "recursive", output
|
79
|
+
end
|
80
|
+
|
81
|
+
def test_find_nil_bucket
|
82
|
+
begin
|
83
|
+
bucket = Bucket.find("unknown")
|
84
|
+
assert_fail "Bucket.find didn't throw an exception"
|
85
|
+
rescue
|
86
|
+
assert_equal AWS::S3::NoSuchBucket,$!.class
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def test_find_object
|
91
|
+
bucket = Bucket.create('find_bucket')
|
92
|
+
obj_name = 'short'
|
93
|
+
S3Object.store(obj_name,'short_text','find_bucket')
|
94
|
+
short = S3Object.find(obj_name,"find_bucket")
|
95
|
+
assert_not_nil(short)
|
96
|
+
assert_equal(short.value,'short_text')
|
97
|
+
end
|
98
|
+
|
99
|
+
def test_find_non_existent_object
|
100
|
+
bucket = Bucket.create('find_bucket')
|
101
|
+
obj_name = 'doesnotexist'
|
102
|
+
assert_raise AWS::S3::NoSuchKey do
|
103
|
+
should_throw = S3Object.find(obj_name,"find_bucket")
|
104
|
+
end
|
105
|
+
|
106
|
+
# Try something higher in the alphabet
|
107
|
+
assert_raise AWS::S3::NoSuchKey do
|
108
|
+
should_throw = S3Object.find("zzz","find_bucket")
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
def test_exists?
|
113
|
+
bucket = Bucket.create('ruby_aws_s3')
|
114
|
+
obj_name = 'dir/myfile/exists.txt'
|
115
|
+
S3Object.store(obj_name,'exists','ruby_aws_s3')
|
116
|
+
assert S3Object.exists?(obj_name, 'ruby_aws_s3')
|
117
|
+
assert !S3Object.exists?('dir/myfile/doesnotexist.txt','ruby_aws_s3')
|
118
|
+
end
|
119
|
+
|
120
|
+
def test_delete
|
121
|
+
bucket = Bucket.create("ruby_aws_s3")
|
122
|
+
S3Object.store("something_to_delete","asdf","ruby_aws_s3")
|
123
|
+
something = S3Object.find("something_to_delete","ruby_aws_s3")
|
124
|
+
S3Object.delete("something_to_delete","ruby_aws_s3")
|
125
|
+
|
126
|
+
assert_raise AWS::S3::NoSuchKey do
|
127
|
+
should_throw = S3Object.find("something_to_delete","ruby_aws_s3")
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
def test_rename
|
132
|
+
bucket = Bucket.create("ruby_aws_s3")
|
133
|
+
S3Object.store("something_to_rename","asdf","ruby_aws_s3")
|
134
|
+
S3Object.rename("something_to_rename","renamed","ruby_aws_s3")
|
135
|
+
|
136
|
+
renamed = S3Object.find("renamed","ruby_aws_s3")
|
137
|
+
assert_not_nil(renamed)
|
138
|
+
assert_equal(renamed.value,'asdf')
|
139
|
+
|
140
|
+
assert_raise AWS::S3::NoSuchKey do
|
141
|
+
should_throw = S3Object.find("something_to_rename","ruby_aws_s3")
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
def test_larger_lists
|
146
|
+
Bucket.create("ruby_aws_s3_many")
|
147
|
+
(0..50).each do |i|
|
148
|
+
('a'..'z').each do |letter|
|
149
|
+
name = "#{letter}#{i}"
|
150
|
+
S3Object.store(name,"asdf","ruby_aws_s3_many")
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
bucket = Bucket.find("ruby_aws_s3_many")
|
155
|
+
assert_equal(bucket.size,1000)
|
156
|
+
assert_equal(bucket.objects.first.key,"a0")
|
157
|
+
end
|
158
|
+
|
159
|
+
|
160
|
+
# Copying an object
|
161
|
+
#S3Object.copy 'headshot.jpg', 'headshot2.jpg', 'photos'
|
162
|
+
|
163
|
+
# Renaming an object
|
164
|
+
#S3Object.rename 'headshot.jpg', 'portrait.jpg', 'photos'
|
165
|
+
|
166
|
+
end
|
data/test/s3cmd_test.rb
ADDED
@@ -0,0 +1,52 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
|
4
|
+
class S3CmdTest < Test::Unit::TestCase
|
5
|
+
def setup
|
6
|
+
config = File.expand_path(File.join(File.dirname(__FILE__),'local_s3_cfg'))
|
7
|
+
raise "Please install s3cmd" if `which s3cmd`.empty?
|
8
|
+
@s3cmd = "s3cmd --config #{config}"
|
9
|
+
end
|
10
|
+
|
11
|
+
def teardown
|
12
|
+
end
|
13
|
+
|
14
|
+
def test_create_bucket
|
15
|
+
`#{@s3cmd} mb s3://s3cmd_bucket`
|
16
|
+
output = `#{@s3cmd} ls`
|
17
|
+
assert_match(/s3cmd_bucket/,output)
|
18
|
+
end
|
19
|
+
|
20
|
+
def test_store
|
21
|
+
File.open(__FILE__,'rb') do |input|
|
22
|
+
File.open("/tmp/fakes3_upload",'wb') do |output|
|
23
|
+
output << input.read
|
24
|
+
end
|
25
|
+
end
|
26
|
+
output = `#{@s3cmd} put /tmp/fakes3_upload s3://s3cmd_bucket/upload`
|
27
|
+
assert_match(/stored/,output)
|
28
|
+
|
29
|
+
FileUtils.rm("/tmp/fakes3_upload")
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_acl
|
33
|
+
File.open(__FILE__,'rb') do |input|
|
34
|
+
File.open("/tmp/fakes3_acl_upload",'wb') do |output|
|
35
|
+
output << input.read
|
36
|
+
end
|
37
|
+
end
|
38
|
+
output = `#{@s3cmd} put /tmp/fakes3_acl_upload s3://s3cmd_bucket/acl_upload`
|
39
|
+
assert_match(/stored/,output)
|
40
|
+
|
41
|
+
output = `#{@s3cmd} --force setacl -P s3://s3cmd_bucket/acl_upload`
|
42
|
+
end
|
43
|
+
|
44
|
+
def test_large_store
|
45
|
+
end
|
46
|
+
|
47
|
+
def test_multi_directory
|
48
|
+
end
|
49
|
+
|
50
|
+
def test_intra_bucket_copy
|
51
|
+
end
|
52
|
+
end
|
data/test/test_helper.rb
ADDED