fakes3-ruby18 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,34 @@
1
+ [default]
2
+ access_key = abc
3
+ acl_public = False
4
+ bucket_location = US
5
+ cloudfront_host = cloudfront.amazonaws.com
6
+ cloudfront_resource = /2008-06-30/distribution
7
+ default_mime_type = binary/octet-stream
8
+ delete_removed = False
9
+ dry_run = False
10
+ encoding = UTF-8
11
+ encrypt = False
12
+ force = False
13
+ get_continue = False
14
+ gpg_command = None
15
+ gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
16
+ gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
17
+ gpg_passphrase =
18
+ guess_mime_type = True
19
+ host_base = localhost:10453
20
+ host_bucket = %(bucket)s.localhost:10453
21
+ human_readable_sizes = False
22
+ list_md5 = False
23
+ preserve_attrs = True
24
+ progress_meter = True
25
+ proxy_host =
26
+ proxy_port = 0
27
+ recursive = False
28
+ recv_chunk = 4096
29
+ secret_key = def
30
+ send_chunk = 4096
31
+ simpledb_host = sdb.amazonaws.com
32
+ skip_existing = False
33
+ use_https = False
34
+ verbosity = WARNING
@@ -0,0 +1,54 @@
1
+ require 'test/test_helper'
2
+ require 'rest-client'
3
+
4
+ class PostTest < Test::Unit::TestCase
5
+ # Make sure you have a posttest.localhost in your /etc/hosts/
6
+ def setup
7
+ @url='http://posttest.localhost:10453/'
8
+ end
9
+
10
+ def teardown
11
+ end
12
+
13
+ def test_options
14
+ res= RestClient.options(@url) { |response|
15
+ assert_equal(response.headers[:access_control_allow_origin],"*")
16
+ }
17
+ end
18
+
19
+ def test_redirect
20
+ res = RestClient.post(
21
+ @url,
22
+ 'key'=>'uploads/12345/${filename}',
23
+ 'success_action_redirect'=>'http://somewhere.else.com/',
24
+ 'file'=>File.new(__FILE__,"rb")
25
+ ) { |response|
26
+ assert_equal(response.code, 307)
27
+ assert_equal(response.headers[:location], 'http://somewhere.else.com/')
28
+ }
29
+ end
30
+
31
+ def test_status_200
32
+ res = RestClient.post(
33
+ @url,
34
+ 'key'=>'uploads/12345/${filename}',
35
+ 'success_action_status'=>'200',
36
+ 'file'=>File.new(__FILE__,"rb")
37
+ ) { |response|
38
+ assert_equal(response.code, 200)
39
+ }
40
+ end
41
+
42
+ def test_status_201
43
+ res = RestClient.post(
44
+ @url,
45
+ 'key'=>'uploads/12345/${filename}',
46
+ 'success_action_status'=>'201',
47
+ 'file'=>File.new(__FILE__,"rb")
48
+ ) { |response|
49
+ assert_equal(response.code, 201)
50
+ assert_match(%r{^\<\?xml.*uploads/12345/post_test\.rb}m, response.body)
51
+ }
52
+ end
53
+
54
+ end
@@ -0,0 +1,192 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ #require 'fakes3/server'
4
+ require 'right_aws'
5
+ require 'time'
6
+
7
+ class RightAWSCommandsTest < Test::Unit::TestCase
8
+
9
+ def setup
10
+ @s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
11
+ {:multi_thread => false, :server => 'localhost',
12
+ :port => 10453, :protocol => 'http',:logger => Logger.new("/dev/null"),:no_subdomains => true })
13
+ end
14
+
15
+ def teardown
16
+ end
17
+
18
+ def test_create_bucket
19
+ bucket = @s3.create_bucket("s3media")
20
+ assert_not_nil bucket
21
+ end
22
+
23
+ def test_store
24
+ @s3.put("s3media","helloworld","Hello World Man!")
25
+ obj = @s3.get("s3media","helloworld")
26
+ assert_equal "Hello World Man!",obj[:object]
27
+
28
+ obj = @s3.get("s3media","helloworld")
29
+ end
30
+
31
+ def test_store_not_found
32
+ begin
33
+ obj = @s3.get("s3media","helloworldnotexist")
34
+ rescue RightAws::AwsError
35
+ assert $!.message.include?('NoSuchKey')
36
+ rescue
37
+ fail 'Should have caught NoSuchKey Exception'
38
+ end
39
+ end
40
+
41
+ def test_large_store
42
+ @s3.put("s3media","helloworld","Hello World Man!")
43
+ buffer = ""
44
+ 500000.times do
45
+ buffer << "#{(rand * 100).to_i}"
46
+ end
47
+
48
+ buf_len = buffer.length
49
+ @s3.put("s3media","big",buffer)
50
+
51
+ output = ""
52
+ @s3.get("s3media","big") do |chunk|
53
+ output << chunk
54
+ end
55
+ assert_equal buf_len,output.size
56
+ end
57
+
58
+ # Test that GET requests with a delimiter return a list of
59
+ def test_list_by_delimiter
60
+ @s3.create_bucket("s3media")
61
+
62
+ @s3.put("s3media", "delimited/item", "item")
63
+
64
+ expected_prefixes = []
65
+ (1..50).each do |i|
66
+ key_prefix = "delimited/%02d/" % i
67
+ @s3.put("s3media", key_prefix + "foo", "foo")
68
+ @s3.put("s3media", key_prefix + "fie", "fie")
69
+ expected_prefixes << key_prefix
70
+ end
71
+
72
+ key_names = []
73
+ common_prefixes = []
74
+ @s3.incrementally_list_bucket("s3media", {:prefix => "delimited", :delimiter => '/'}) do |currentResponse|
75
+ common_prefixes += currentResponse[:common_prefixes]
76
+ end
77
+ assert_equal ["delimited/"], common_prefixes
78
+
79
+ common_prefixes = []
80
+ @s3.incrementally_list_bucket("s3media", {:prefix => "delimited/", :delimiter => '/', "max-keys" => 5}) do |currentResponse|
81
+ key_names += currentResponse[:contents].map do |key|
82
+ key[:key]
83
+ end
84
+ common_prefixes += currentResponse[:common_prefixes]
85
+ end
86
+ assert_equal expected_prefixes, common_prefixes
87
+ assert_equal ["delimited/item"], key_names
88
+ end
89
+
90
+ def test_multi_directory
91
+ @s3.put("s3media","dir/right/123.txt","recursive")
92
+ output = ""
93
+ obj = @s3.get("s3media","dir/right/123.txt") do |chunk|
94
+ output << chunk
95
+ end
96
+ assert_equal "recursive", output
97
+ end
98
+
99
+ def test_intra_bucket_copy
100
+ @s3.put("s3media","original.txt","Hello World")
101
+ @s3.copy("s3media","original.txt","s3media","copy.txt")
102
+ obj = @s3.get("s3media","copy.txt")
103
+ assert_equal "Hello World",obj[:object]
104
+ end
105
+
106
+ def test_copy_in_place
107
+ @s3.put("s3media","foo","Hello World")
108
+ @s3.copy("s3media","foo","s3media","foo")
109
+ obj = @s3.get("s3media","foo")
110
+ assert_equal "Hello World",obj[:object]
111
+ end
112
+
113
+ def test_copy_replace_metadata
114
+ @s3.put("s3media","foo","Hello World",{"content-type"=>"application/octet-stream"})
115
+ obj = @s3.get("s3media","foo")
116
+ assert_equal "Hello World",obj[:object]
117
+ assert_equal "application/octet-stream",obj[:headers]["content-type"]
118
+ @s3.copy("s3media","foo","s3media","foo",:replace,{"content-type"=>"text/plain"})
119
+ obj = @s3.get("s3media","foo")
120
+ assert_equal "Hello World",obj[:object]
121
+ assert_equal "text/plain",obj[:headers]["content-type"]
122
+ end
123
+
124
+ def test_larger_lists
125
+ @s3.create_bucket('right_aws_many')
126
+ (0..50).each do |i|
127
+ ('a'..'z').each do |letter|
128
+ name = "#{letter}#{i}"
129
+ @s3.put('right_aws_many', name, 'asdf')
130
+ end
131
+ end
132
+
133
+ keys = @s3.list_bucket('right_aws_many')
134
+ assert_equal(1000, keys.size)
135
+ assert_equal('a0', keys.first[:key])
136
+ end
137
+
138
+ def test_destroy_bucket
139
+ @s3.create_bucket('deletebucket')
140
+ @s3.delete_bucket('deletebucket')
141
+
142
+ begin
143
+ bucket = @s3.list_bucket('deletebucket')
144
+ fail("Shouldn't succeed here")
145
+ rescue RightAws::AwsError
146
+ assert $!.message.include?('NoSuchBucket')
147
+ rescue
148
+ fail 'Should have caught NoSuchBucket Exception'
149
+ end
150
+
151
+ end
152
+
153
+ def test_if_none_match
154
+ @s3.put("s3media","if_none_match_test","Hello World 1!")
155
+ obj = @s3.get("s3media","if_none_match_test")
156
+ tag = obj[:headers]["etag"]
157
+ begin
158
+ @s3.get("s3media", "if_none_match_test", {"If-None-Match"=>tag})
159
+ rescue URI::InvalidURIError
160
+ # expected error for 304
161
+ else
162
+ fail 'Should have encountered an error due to the server not returning a response due to caching'
163
+ end
164
+ @s3.put("s3media","if_none_match_test","Hello World 2!")
165
+ obj = @s3.get("s3media", "if_none_match_test", {"If-None-Match"=>tag})
166
+ assert_equal "Hello World 2!",obj[:object]
167
+ end
168
+
169
+ def test_if_modified_since
170
+ @s3.put("s3media","if_modified_since_test","Hello World 1!")
171
+ obj = @s3.get("s3media","if_modified_since_test")
172
+ modified = obj[:headers]["last-modified"]
173
+ begin
174
+ @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since"=>modified})
175
+ rescue URI::InvalidURIError
176
+ # expected error for 304
177
+ else
178
+ fail 'Should have encountered an error due to the server not returning a response due to caching'
179
+ end
180
+ # Granularity of an HTTP Date is 1 second which isn't enough for the test
181
+ # so manually rewind the clock by a second
182
+ timeInThePast = Time.httpdate(modified) - 1
183
+ begin
184
+ obj = @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since"=>timeInThePast.httpdate()})
185
+ rescue
186
+ fail 'Should have been downloaded since the date is in the past now'
187
+ else
188
+ #expected scenario
189
+ end
190
+ end
191
+
192
+ end
@@ -0,0 +1,209 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ #require 'fakes3/server'
4
+ require 'aws/s3'
5
+
6
+ class S3CommandsTest < Test::Unit::TestCase
7
+ include AWS::S3
8
+
9
+ def setup
10
+ AWS::S3::Base.establish_connection!(:access_key_id => "123",
11
+ :secret_access_key => "abc",
12
+ :server => "localhost",
13
+ :port => "10453" )
14
+ end
15
+
16
+ def teardown
17
+ AWS::S3::Base.disconnect!
18
+ end
19
+
20
+ def test_create_bucket
21
+ bucket = Bucket.create("ruby_aws_s3")
22
+ assert_not_nil bucket
23
+
24
+ bucket_names = []
25
+ Service.buckets.each do |bucket|
26
+ bucket_names << bucket.name
27
+ end
28
+ assert(bucket_names.index("ruby_aws_s3") >= 0)
29
+ end
30
+
31
+ def test_destroy_bucket
32
+ Bucket.create("deletebucket")
33
+ Bucket.delete("deletebucket")
34
+
35
+ begin
36
+ bucket = Bucket.find("deletebucket")
37
+ assert_fail("Shouldn't succeed here")
38
+ rescue
39
+ end
40
+ end
41
+
42
+ def test_store
43
+ bucket = Bucket.create("ruby_aws_s3")
44
+ S3Object.store("hello","world","ruby_aws_s3")
45
+
46
+ output = ""
47
+ obj = S3Object.stream("hello","ruby_aws_s3") do |chunk|
48
+ output << chunk
49
+ end
50
+ assert_equal "world", output
51
+ end
52
+
53
+ def test_large_store
54
+ bucket = Bucket.create("ruby_aws_s3")
55
+ buffer = ""
56
+ 500000.times do
57
+ buffer << "#{(rand * 100).to_i}"
58
+ end
59
+
60
+ buf_len = buffer.length
61
+ S3Object.store("big",buffer,"ruby_aws_s3")
62
+
63
+ output = ""
64
+ S3Object.stream("big","ruby_aws_s3") do |chunk|
65
+ output << chunk
66
+ end
67
+ assert_equal buf_len,output.size
68
+ end
69
+
70
+ def test_metadata_store
71
+ assert_equal true, Bucket.create("ruby_aws_s3")
72
+ bucket = Bucket.find("ruby_aws_s3")
73
+
74
+ # Note well: we can't seem to access obj.metadata until we've stored
75
+ # the object and found it again. Thus the store, find, store
76
+ # runaround below.
77
+ obj = bucket.new_object(:value => "foo")
78
+ obj.key = "key_with_metadata"
79
+ obj.store
80
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
81
+ obj.metadata[:param1] = "one"
82
+ obj.metadata[:param2] = "two, three"
83
+ obj.store
84
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
85
+
86
+ assert_equal "one", obj.metadata[:param1]
87
+ assert_equal "two, three", obj.metadata[:param2]
88
+ end
89
+
90
+ def test_metadata_copy
91
+ assert_equal true, Bucket.create("ruby_aws_s3")
92
+ bucket = Bucket.find("ruby_aws_s3")
93
+
94
+ # Note well: we can't seem to access obj.metadata until we've stored
95
+ # the object and found it again. Thus the store, find, store
96
+ # runaround below.
97
+ obj = bucket.new_object(:value => "foo")
98
+ obj.key = "key_with_metadata"
99
+ obj.store
100
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
101
+ obj.metadata[:param1] = "one"
102
+ obj.metadata[:param2] = "two, three"
103
+ obj.store
104
+
105
+ S3Object.copy("key_with_metadata", "key_with_metadata2", "ruby_aws_s3")
106
+ obj = S3Object.find("key_with_metadata2", "ruby_aws_s3")
107
+
108
+ assert_equal "one", obj.metadata[:param1]
109
+ assert_equal "two, three", obj.metadata[:param2]
110
+ end
111
+
112
+
113
+ def test_multi_directory
114
+ bucket = Bucket.create("ruby_aws_s3")
115
+ S3Object.store("dir/myfile/123.txt","recursive","ruby_aws_s3")
116
+
117
+ output = ""
118
+ obj = S3Object.stream("dir/myfile/123.txt","ruby_aws_s3") do |chunk|
119
+ output << chunk
120
+ end
121
+ assert_equal "recursive", output
122
+ end
123
+
124
+ def test_find_nil_bucket
125
+ begin
126
+ bucket = Bucket.find("unknown")
127
+ assert_fail "Bucket.find didn't throw an exception"
128
+ rescue
129
+ assert_equal AWS::S3::NoSuchBucket,$!.class
130
+ end
131
+ end
132
+
133
+ def test_find_object
134
+ bucket = Bucket.create('find_bucket')
135
+ obj_name = 'short'
136
+ S3Object.store(obj_name,'short_text','find_bucket')
137
+ short = S3Object.find(obj_name,"find_bucket")
138
+ assert_not_nil(short)
139
+ assert_equal(short.value,'short_text')
140
+ end
141
+
142
+ def test_find_non_existent_object
143
+ bucket = Bucket.create('find_bucket')
144
+ obj_name = 'doesnotexist'
145
+ assert_raise AWS::S3::NoSuchKey do
146
+ should_throw = S3Object.find(obj_name,"find_bucket")
147
+ end
148
+
149
+ # Try something higher in the alphabet
150
+ assert_raise AWS::S3::NoSuchKey do
151
+ should_throw = S3Object.find("zzz","find_bucket")
152
+ end
153
+ end
154
+
155
+ def test_exists?
156
+ bucket = Bucket.create('ruby_aws_s3')
157
+ obj_name = 'dir/myfile/exists.txt'
158
+ S3Object.store(obj_name,'exists','ruby_aws_s3')
159
+ assert S3Object.exists?(obj_name, 'ruby_aws_s3')
160
+ assert !S3Object.exists?('dir/myfile/doesnotexist.txt','ruby_aws_s3')
161
+ end
162
+
163
+ def test_delete
164
+ bucket = Bucket.create("ruby_aws_s3")
165
+ S3Object.store("something_to_delete","asdf","ruby_aws_s3")
166
+ something = S3Object.find("something_to_delete","ruby_aws_s3")
167
+ S3Object.delete("something_to_delete","ruby_aws_s3")
168
+
169
+ assert_raise AWS::S3::NoSuchKey do
170
+ should_throw = S3Object.find("something_to_delete","ruby_aws_s3")
171
+ end
172
+ end
173
+
174
+ def test_rename
175
+ bucket = Bucket.create("ruby_aws_s3")
176
+ S3Object.store("something_to_rename","asdf","ruby_aws_s3")
177
+ S3Object.rename("something_to_rename","renamed","ruby_aws_s3")
178
+
179
+ renamed = S3Object.find("renamed","ruby_aws_s3")
180
+ assert_not_nil(renamed)
181
+ assert_equal(renamed.value,'asdf')
182
+
183
+ assert_raise AWS::S3::NoSuchKey do
184
+ should_throw = S3Object.find("something_to_rename","ruby_aws_s3")
185
+ end
186
+ end
187
+
188
+ def test_larger_lists
189
+ Bucket.create("ruby_aws_s3_many")
190
+ (0..50).each do |i|
191
+ ('a'..'z').each do |letter|
192
+ name = "#{letter}#{i}"
193
+ S3Object.store(name,"asdf","ruby_aws_s3_many")
194
+ end
195
+ end
196
+
197
+ bucket = Bucket.find("ruby_aws_s3_many")
198
+ assert_equal(bucket.size,1000)
199
+ assert_equal(bucket.objects.first.key,"a0")
200
+ end
201
+
202
+
203
+ # Copying an object
204
+ #S3Object.copy 'headshot.jpg', 'headshot2.jpg', 'photos'
205
+
206
+ # Renaming an object
207
+ #S3Object.rename 'headshot.jpg', 'portrait.jpg', 'photos'
208
+
209
+ end