fakes3test10 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,65 @@
1
+ require 'test/test_helper'
2
+ require 'aws-sdk'
3
+
4
+ class AwsSdkV2CommandsTest < Test::Unit::TestCase
5
+ def setup
6
+ @creds = Aws::Credentials.new('123', 'abc')
7
+ @s3 = Aws::S3::Client.new(credentials: @creds, region: 'us-east-1', endpoint: 'http://localhost:10453/')
8
+ @resource = Aws::S3::Resource.new(client: @s3)
9
+ @bucket = @resource.create_bucket(bucket: 'v2_bucket')
10
+
11
+ # Delete all objects to avoid sharing state between tests
12
+ @bucket.objects.each(&:delete)
13
+ end
14
+
15
+ def test_create_bucket
16
+ bucket = @resource.create_bucket(bucket: 'v2_create_bucket')
17
+ assert_not_nil bucket
18
+
19
+ bucket_names = @resource.buckets.map(&:name)
20
+ assert(bucket_names.index("v2_create_bucket") >= 0)
21
+ end
22
+
23
+ def test_destroy_bucket
24
+ @bucket.delete
25
+
26
+ begin
27
+ @s3.head_bucket(bucket: 'v2_bucket')
28
+ assert_fail("Shouldn't succeed here")
29
+ rescue
30
+ end
31
+ end
32
+
33
+ def test_create_object
34
+ object = @bucket.object('key')
35
+ object.put(body: 'test')
36
+
37
+ assert_equal 'test', object.get.body.string
38
+ end
39
+
40
+ def test_delete_object
41
+ object = @bucket.object('exists')
42
+ object.put(body: 'test')
43
+
44
+ assert_equal 'test', object.get.body.string
45
+
46
+ object.delete
47
+
48
+ assert_raise Aws::S3::Errors::NoSuchKey do
49
+ object.get
50
+ end
51
+ end
52
+
53
+ # TODO - get this test working
54
+ #
55
+ #def test_copy_object
56
+ # object = @bucket.object("key_one")
57
+ # object.put(body: 'asdf')
58
+
59
+ # # TODO: explore why 'key1' won't work but 'key_one' will
60
+ # object2 = @bucket.object('key_two')
61
+ # object2.copy_from(copy_source: 'testing_copy/key_one')
62
+
63
+ # assert_equal 2, @bucket.objects.count
64
+ #end
65
+ end
@@ -0,0 +1,25 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+
4
+ class BotoTest < Test::Unit::TestCase
5
+ def setup
6
+ cmdpath = File.expand_path(File.join(File.dirname(__FILE__),'botocmd.py'))
7
+ @botocmd = "python #{cmdpath} -t localhost -p 10453"
8
+ end
9
+
10
+ def teardown
11
+ end
12
+
13
+ def test_store
14
+ File.open(__FILE__,'rb') do |input|
15
+ File.open("/tmp/fakes3_upload",'wb') do |output|
16
+ output << input.read
17
+ end
18
+ end
19
+ output = `#{@botocmd} put /tmp/fakes3_upload s3://s3cmd_bucket/upload`
20
+ assert_match(/stored/,output)
21
+
22
+ FileUtils.rm("/tmp/fakes3_upload")
23
+ end
24
+
25
+ end
@@ -0,0 +1,87 @@
1
+ #!/usr/bin/python
2
+ # -*- coding: utf-8 -*-
3
+ # fakes3cmd.py -- an s3cmd-like script that accepts a custom host and portname
4
+ import re
5
+ import os
6
+ from optparse import OptionParser
7
+
8
+ try:
9
+ from boto.s3.connection import S3Connection, OrdinaryCallingFormat
10
+ from boto.s3.key import Key
11
+ except ImportError:
12
+ raise Exception('You must install the boto package for python')
13
+
14
+
15
+ class FakeS3Cmd(object):
16
+ COMMANDS = ['mb', 'rb', 'put', ]
17
+ def __init__(self, host, port):
18
+ self.host = host
19
+ self.port = port
20
+ self.conn = None
21
+ self._connect()
22
+
23
+ def _connect(self):
24
+ print 'Connecting: %s:%s' % (self.host, self.port)
25
+ self.conn = S3Connection(is_secure=False,
26
+ calling_format=OrdinaryCallingFormat(),
27
+ aws_access_key_id='',
28
+ aws_secret_access_key='',
29
+ port=self.port, host=self.host)
30
+
31
+
32
+ @staticmethod
33
+ def _parse_uri(path):
34
+ match = re.match(r's3://([^/]+)(?:/(.*))?', path, re.I)
35
+ ## (bucket, key)
36
+ return match.groups()
37
+
38
+ def mb(self, path, *args):
39
+ if not self.conn:
40
+ self._connect()
41
+
42
+ bucket, _ = self._parse_uri(path)
43
+ self.conn.create_bucket(bucket)
44
+ print 'made bucket: [%s]' % bucket
45
+
46
+ def rb(self, path, *args):
47
+ if not self.conn:
48
+ self._connect()
49
+
50
+ bucket, _ = self._parse_uri(path)
51
+ self.conn.delete_bucket(bucket)
52
+ print 'removed bucket: [%s]' % bucket
53
+
54
+ def put(self, *args):
55
+ if not self.conn:
56
+ self._connect()
57
+
58
+ args = list(args)
59
+ path = args.pop()
60
+ bucket_name, prefix = self._parse_uri(path)
61
+ bucket = self.conn.create_bucket(bucket_name)
62
+ for src_file in args:
63
+ key = Key(bucket)
64
+ key.key = os.path.join(prefix, os.path.basename(src_file))
65
+ key.set_contents_from_filename(src_file)
66
+ print 'stored: [%s]' % key.key
67
+
68
+
69
+ if __name__ == "__main__":
70
+ # check for options. TODO: This requires a more verbose help message
71
+ # to explain how the positional arguments work.
72
+ parser = OptionParser()
73
+ parser.add_option("-t", "--host", type="string", default='localhost')
74
+ parser.add_option("-p", "--port", type='int', default=80)
75
+ o, args = parser.parse_args()
76
+
77
+ if len(args) < 2:
78
+ raise ValueError('you must minimally supply a desired command and s3 uri')
79
+
80
+ cmd = args.pop(0)
81
+
82
+ if cmd not in FakeS3Cmd.COMMANDS:
83
+ raise ValueError('%s is not a valid command' % cmd)
84
+
85
+ fs3 = FakeS3Cmd(o.host, o.port)
86
+ handler = getattr(fs3, cmd)
87
+ handler(*args)
@@ -0,0 +1,18 @@
1
+ require 'test/test_helper'
2
+ require 'test/minitest_helper'
3
+ require 'fakes3/cli'
4
+
5
+
6
+ class CLITest < Test::Unit::TestCase
7
+ def setup
8
+ super
9
+ FakeS3::Server.any_instance.stubs(:serve)
10
+ end
11
+
12
+ def test_quiet_mode
13
+ script = FakeS3::CLI.new([], :root => '.', :port => 4567, :quiet => true)
14
+ assert_output('') do
15
+ script.invoke(:server)
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,34 @@
1
+ [default]
2
+ access_key = abc
3
+ acl_public = False
4
+ bucket_location = US
5
+ cloudfront_host = cloudfront.amazonaws.com
6
+ cloudfront_resource = /2008-06-30/distribution
7
+ default_mime_type = binary/octet-stream
8
+ delete_removed = False
9
+ dry_run = False
10
+ encoding = UTF-8
11
+ encrypt = False
12
+ force = False
13
+ get_continue = False
14
+ gpg_command = None
15
+ gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
16
+ gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
17
+ gpg_passphrase =
18
+ guess_mime_type = True
19
+ host_base = localhost:10453
20
+ host_bucket = %(bucket)s.localhost:10453
21
+ human_readable_sizes = False
22
+ list_md5 = False
23
+ preserve_attrs = True
24
+ progress_meter = True
25
+ proxy_host =
26
+ proxy_port = 0
27
+ recursive = False
28
+ recv_chunk = 4096
29
+ secret_key = def
30
+ send_chunk = 4096
31
+ simpledb_host = sdb.amazonaws.com
32
+ skip_existing = False
33
+ use_https = False
34
+ verbosity = WARNING
@@ -0,0 +1,46 @@
1
+ # LICENSE:
2
+ #
3
+ # (The MIT License)
4
+ #
5
+ # Copyright © Ryan Davis, seattle.rb
6
+ #
7
+ # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
8
+ #
9
+ # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
10
+ #
11
+ # THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
12
+
13
+ # The following is from minitest:
14
+ # TODO - decide whether to switch to minitest or what to do about these:
15
+
16
+ def capture_io
17
+ require 'stringio'
18
+
19
+ captured_stdout, captured_stderr = StringIO.new, StringIO.new
20
+
21
+ orig_stdout, orig_stderr = $stdout, $stderr
22
+ $stdout, $stderr = captured_stdout, captured_stderr
23
+
24
+ begin
25
+ yield
26
+ ensure
27
+ $stdout = orig_stdout
28
+ $stderr = orig_stderr
29
+ end
30
+
31
+ return captured_stdout.string, captured_stderr.string
32
+ end
33
+
34
+ def assert_output stdout = nil, stderr = nil
35
+ out, err = capture_io do
36
+ yield
37
+ end
38
+
39
+ err_msg = Regexp === stderr ? :assert_match : :assert_equal if stderr
40
+ out_msg = Regexp === stdout ? :assert_match : :assert_equal if stdout
41
+
42
+ y = send err_msg, stderr, err, "In stderr" if err_msg
43
+ x = send out_msg, stdout, out, "In stdout" if out_msg
44
+
45
+ (!stdout || x) && (!stderr || y)
46
+ end
@@ -0,0 +1,58 @@
1
+ require 'test/test_helper'
2
+ require 'rest-client'
3
+
4
+ class PostTest < Test::Unit::TestCase
5
+ # Make sure you have a posttest.localhost in your /etc/hosts/
6
+ def setup
7
+ @url='http://posttest.localhost:10453/'
8
+ end
9
+
10
+ def teardown
11
+ end
12
+
13
+ def test_options
14
+ RestClient.options(@url) do |response|
15
+ assert_equal(response.code, 200)
16
+ assert_equal(response.headers[:access_control_allow_origin],"*")
17
+ assert_equal(response.headers[:access_control_allow_methods], "PUT, POST, HEAD, GET, OPTIONS")
18
+ assert_equal(response.headers[:access_control_allow_headers], "Accept, Content-Type, Authorization, Content-Length, ETag, X-CSRF-Token, Content-Disposition")
19
+ assert_equal(response.headers[:access_control_expose_headers], "ETag")
20
+ end
21
+ end
22
+
23
+ def test_redirect
24
+ res = RestClient.post(
25
+ @url,
26
+ 'key'=>'uploads/12345/${filename}',
27
+ 'success_action_redirect'=>'http://somewhere.else.com/?foo=bar',
28
+ 'file'=>File.new(__FILE__,"rb")
29
+ ) { |response|
30
+ assert_equal(response.code, 303)
31
+ assert_equal(response.headers[:location], 'http://somewhere.else.com/?foo=bar&bucket=posttest&key=uploads%2F12345%2Fpost_test.rb')
32
+ }
33
+ end
34
+
35
+ def test_status_200
36
+ res = RestClient.post(
37
+ @url,
38
+ 'key'=>'uploads/12345/${filename}',
39
+ 'success_action_status'=>'200',
40
+ 'file'=>File.new(__FILE__,"rb")
41
+ ) { |response|
42
+ assert_equal(response.code, 200)
43
+ }
44
+ end
45
+
46
+ def test_status_201
47
+ res = RestClient.post(
48
+ @url,
49
+ 'key'=>'uploads/12345/${filename}',
50
+ 'success_action_status'=>'201',
51
+ 'file'=>File.new(__FILE__,"rb")
52
+ ) { |response|
53
+ assert_equal(response.code, 201)
54
+ assert_match(%r{^\<\?xml.*uploads/12345/post_test\.rb}m, response.body)
55
+ }
56
+ end
57
+
58
+ end
@@ -0,0 +1,219 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ require 'right_aws'
4
+ require 'time'
5
+
6
+ class RightAWSCommandsTest < Test::Unit::TestCase
7
+
8
+ def setup
9
+ @s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
10
+ {:multi_thread => false, :server => 'localhost',
11
+ :port => 10453, :protocol => 'http', :logger => Logger.new("/dev/null"),
12
+ :no_subdomains => true })
13
+ end
14
+
15
+ def teardown
16
+ end
17
+
18
+ def test_create_bucket
19
+ bucket = @s3.create_bucket("s3media")
20
+ assert_not_nil bucket
21
+ end
22
+
23
+ def test_store
24
+ @s3.put("s3media", "helloworld", "Hello World Man!")
25
+ obj = @s3.get("s3media", "helloworld")
26
+ assert_equal "Hello World Man!", obj[:object]
27
+ end
28
+
29
+ # TODO - get Chinese to work
30
+ #def test_store_chinese
31
+ # ni_hao = "你好"
32
+ # great_wall = "中国的长城"
33
+ #
34
+ # @s3.put("s3media", ni_hao, great_wall)
35
+ # obj = @s3.get("s3media", ni_hao)
36
+ # assert_equal(great_wall, obj[:object])
37
+ #end
38
+
39
+ def test_store_not_found
40
+ begin
41
+ obj = @s3.get("s3media", "helloworldnotexist")
42
+ rescue RightAws::AwsError
43
+ assert $!.message.include?('NoSuchKey')
44
+ rescue
45
+ fail 'Should have caught NoSuchKey Exception'
46
+ end
47
+ end
48
+
49
+ def test_large_store
50
+ @s3.put("s3media", "helloworld", "Hello World Man!")
51
+ buffer = ""
52
+ 500000.times do
53
+ buffer << "#{(rand * 100).to_i}"
54
+ end
55
+
56
+ buf_len = buffer.length
57
+ time_before = Time.now
58
+ @s3.put("s3media", "big", buffer)
59
+
60
+ output = ""
61
+ @s3.get("s3media","big") do |chunk|
62
+ output << chunk
63
+ end
64
+ time_after = Time.now
65
+
66
+ assert(time_after - time_before < 2) # Should run in under 2 seconds on normal machines
67
+ assert_equal(buf_len, output.size)
68
+ end
69
+
70
+ # Test that GET requests with a delimiter return a list of
71
+ def test_list_by_delimiter
72
+ @s3.create_bucket("s3media")
73
+
74
+ @s3.put("s3media", "delimited/item", "item")
75
+
76
+ expected_prefixes = []
77
+ (1..50).each do |i|
78
+ key_prefix = "delimited/%02d/" % i
79
+ @s3.put("s3media", key_prefix + "foo", "foo")
80
+ @s3.put("s3media", key_prefix + "fie", "fie")
81
+ expected_prefixes << key_prefix
82
+ end
83
+
84
+ key_names = []
85
+ common_prefixes = []
86
+ @s3.incrementally_list_bucket("s3media", {:prefix => "delimited", :delimiter => '/'}) do |currentResponse|
87
+ common_prefixes += currentResponse[:common_prefixes]
88
+ end
89
+ assert_equal ["delimited/"], common_prefixes
90
+
91
+ common_prefixes = []
92
+ @s3.incrementally_list_bucket("s3media", {:prefix => "delimited/", :delimiter => '/', "max-keys" => 100}) do |currentResponse|
93
+ key_names += currentResponse[:contents].map do |key|
94
+ key[:key]
95
+ end
96
+ common_prefixes += currentResponse[:common_prefixes]
97
+ end
98
+ assert_equal expected_prefixes, common_prefixes
99
+ assert_equal ["delimited/item"], key_names
100
+ end
101
+
102
+ def test_multi_directory
103
+ @s3.put("s3media", "dir/right/123.txt", "recursive")
104
+ output = ""
105
+ obj = @s3.get("s3media", "dir/right/123.txt") do |chunk|
106
+ output << chunk
107
+ end
108
+ assert_equal "recursive", output
109
+ end
110
+
111
+ def test_intra_bucket_copy
112
+ @s3.put("s3media", "original.txt", "Hello World")
113
+ @s3.copy("s3media", "original.txt", "s3media", "copy.txt")
114
+ obj = @s3.get("s3media", "copy.txt")
115
+ assert_equal "Hello World", obj[:object]
116
+ end
117
+
118
+ def test_copy_in_place
119
+ @s3.put("s3media", "copy-in-place", "Hello World")
120
+ @s3.copy("s3media", "copy-in-place", "s3media","copy-in-place")
121
+ obj = @s3.get("s3media", "copy-in-place")
122
+ assert_equal "Hello World", obj[:object]
123
+ end
124
+
125
+ def test_content_encoding
126
+ foo_compressed = Zlib::Deflate.deflate("foo")
127
+ @s3.put("s3media", "foo", foo_compressed, {"content-encoding" => "gzip"})
128
+ obj = @s3.get("s3media", "foo")
129
+ # assert_equal "gzip", obj[:headers]["content-encoding"] # TODO why doesn't checking content-encoding work?
130
+ assert_equal "gzip", obj[:headers]["x-content-encoding"] # TODO why doesn't checking content-encoding work?
131
+ end
132
+
133
+ # def test_content_encoding_data
134
+ # foo_compressed = Zlib::Deflate.deflate("foo-two")
135
+ # @s3.put("s3media", "foo-two", foo_compressed, {"content-encoding" => "gzip"})
136
+ # obj = @s3.get("s3media", "foo-two")
137
+ # puts "*** GOT HERE 1 #{ obj[:object] }"
138
+ # assert_equal "foo-two", Zlib::Inflate::inflate(obj[:object])
139
+ # end
140
+
141
+ def test_copy_replace_metadata
142
+ @s3.put("s3media", "copy_replace", "Hello World", {"content-type" => "application/octet-stream"})
143
+ obj = @s3.get("s3media", "copy_replace")
144
+ assert_equal "Hello World", obj[:object]
145
+ assert_equal "application/octet-stream", obj[:headers]["content-type"]
146
+ @s3.copy("s3media", "copy_replace", "s3media", "copy_replace", :replace, {"content-type"=>"text/plain"})
147
+ obj = @s3.get("s3media", "copy_replace")
148
+ assert_equal "Hello World", obj[:object]
149
+ assert_equal "text/plain", obj[:headers]["content-type"]
150
+ end
151
+
152
+ def test_larger_lists
153
+ @s3.create_bucket('right_aws_many')
154
+ (0..50).each do |i|
155
+ ('a'..'z').each do |letter|
156
+ name = "#{letter}#{i}"
157
+ @s3.put('right_aws_many', name, 'asdf')
158
+ end
159
+ end
160
+
161
+ keys = @s3.list_bucket('right_aws_many')
162
+ assert_equal(1000, keys.size)
163
+ assert_equal('a0', keys.first[:key])
164
+ end
165
+
166
+ def test_destroy_bucket
167
+ @s3.create_bucket('deletebucket')
168
+ @s3.delete_bucket('deletebucket')
169
+
170
+ begin
171
+ bucket = @s3.list_bucket('deletebucket')
172
+ fail("Shouldn't succeed here")
173
+ rescue RightAws::AwsError
174
+ assert $!.message.include?('NoSuchBucket')
175
+ rescue
176
+ fail 'Should have caught NoSuchBucket Exception'
177
+ end
178
+ end
179
+
180
+ def test_if_none_match
181
+ @s3.put("s3media", "if_none_match_test", "Hello World 1!")
182
+ obj = @s3.get("s3media", "if_none_match_test")
183
+ tag = obj[:headers]["etag"]
184
+ begin
185
+ @s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
186
+ rescue URI::InvalidURIError
187
+ # expected error for 304
188
+ else
189
+ fail 'Should have encountered an error due to the server not returning a response due to caching'
190
+ end
191
+ @s3.put("s3media", "if_none_match_test", "Hello World 2!")
192
+ obj = @s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
193
+ assert_equal "Hello World 2!", obj[:object]
194
+ end
195
+
196
+ def test_if_modified_since
197
+ @s3.put("s3media", "if_modified_since_test", "Hello World 1!")
198
+ obj = @s3.get("s3media", "if_modified_since_test")
199
+ modified = obj[:headers]["last-modified"]
200
+ begin
201
+ @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => modified})
202
+ rescue URI::InvalidURIError
203
+ # expected error for 304
204
+ else
205
+ fail 'Should have encountered an error due to the server not returning a response due to caching'
206
+ end
207
+ # Granularity of an HTTP Date is 1 second which isn't enough for the test
208
+ # so manually rewind the clock by a second
209
+ time_in_the_past = Time.httpdate(modified) - 1
210
+ begin
211
+ obj = @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => time_in_the_past.httpdate})
212
+ rescue
213
+ fail 'Should have been downloaded since the date is in the past now'
214
+ else
215
+ #expected scenario
216
+ end
217
+ end
218
+
219
+ end