fakes3 1.2.0 → 1.2.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/Gemfile +1 -1
- data/Rakefile +4 -0
- data/fakes3.gemspec +1 -0
- data/lib/fakes3/file_store.rb +25 -0
- data/lib/fakes3/s3_object.rb +1 -1
- data/lib/fakes3/server.rb +30 -7
- data/lib/fakes3/version.rb +1 -1
- data/lib/fakes3/xml_parser.rb +16 -0
- data/test/botocmd.py +10 -8
- metadata +18 -5
- data/test/right_aws_commands_test.rb +0 -219
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: db9520921cf6e3133db80395102de95622fde8c1
|
4
|
+
data.tar.gz: 3627b3c4a37f71be5f9a3b6af00443efd5c4cdc2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 34d0d8a995420525d0faf38ebae7d7bcba6b96cd38ec2297ed35a5a1702ab3912184cfee15b61caa9db6ed5041f8bec1ea7e425c485d0a52b88f1d40b49b138a
|
7
|
+
data.tar.gz: 43de1b4446acb27e2e99c0624a496e3c8b4efc7c65a3eda0342e8102351a6e1fe3876b966cc2d32c643bd804e62712de2a6562b05accc4e9c5dfafe40f1d6b37
|
data/.gitignore
CHANGED
data/Gemfile
CHANGED
data/Rakefile
CHANGED
@@ -8,6 +8,10 @@ Rake::TestTask.new(:test) do |t|
|
|
8
8
|
t.libs << "."
|
9
9
|
t.test_files =
|
10
10
|
FileList['test/*_test.rb'].exclude('test/s3_commands_test.rb')
|
11
|
+
|
12
|
+
# A lot of the gems like right aws and amazon sdk have a bunch of warnings, so
|
13
|
+
# this suppresses them for the test runs
|
14
|
+
t.warning = false
|
11
15
|
end
|
12
16
|
|
13
17
|
desc "Run the test_server"
|
data/fakes3.gemspec
CHANGED
@@ -26,6 +26,7 @@ Gem::Specification.new do |s|
|
|
26
26
|
#s.add_development_dependency "debugger"
|
27
27
|
s.add_dependency "thor"
|
28
28
|
s.add_dependency "builder"
|
29
|
+
s.add_dependency "xml-simple"
|
29
30
|
|
30
31
|
s.files = `git ls-files`.split("\n")
|
31
32
|
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
data/lib/fakes3/file_store.rb
CHANGED
@@ -96,6 +96,7 @@ module FakeS3
|
|
96
96
|
real_obj.modified_date = metadata.fetch(:modified_date) do
|
97
97
|
File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION)
|
98
98
|
end
|
99
|
+
real_obj.cache_control = metadata[:cache_control]
|
99
100
|
real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} }
|
100
101
|
return real_obj
|
101
102
|
rescue
|
@@ -158,6 +159,7 @@ module FakeS3
|
|
158
159
|
obj.content_encoding = src_metadata[:content_encoding] # if src_metadata[:content_encoding]
|
159
160
|
obj.size = src_metadata[:size]
|
160
161
|
obj.modified_date = src_metadata[:modified_date]
|
162
|
+
obj.cache_control = src_metadata[:cache_control]
|
161
163
|
|
162
164
|
src_bucket.find(src_name)
|
163
165
|
dst_bucket.add(obj)
|
@@ -214,6 +216,7 @@ module FakeS3
|
|
214
216
|
obj.content_encoding = metadata_struct[:content_encoding] # if metadata_struct[:content_encoding]
|
215
217
|
obj.size = metadata_struct[:size]
|
216
218
|
obj.modified_date = metadata_struct[:modified_date]
|
219
|
+
obj.cache_control = metadata_struct[:cache_control]
|
217
220
|
|
218
221
|
bucket.add(obj)
|
219
222
|
return obj
|
@@ -269,6 +272,23 @@ module FakeS3
|
|
269
272
|
end
|
270
273
|
end
|
271
274
|
|
275
|
+
def delete_objects(bucket, objects, request)
|
276
|
+
begin
|
277
|
+
filenames = []
|
278
|
+
objects.each do |object_name|
|
279
|
+
filenames << File.join(@root,bucket.name,object_name)
|
280
|
+
object = bucket.find(object_name)
|
281
|
+
bucket.remove(object)
|
282
|
+
end
|
283
|
+
|
284
|
+
FileUtils.rm_rf(filenames)
|
285
|
+
rescue
|
286
|
+
puts $!
|
287
|
+
$!.backtrace.each { |line| puts line }
|
288
|
+
return nil
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
272
292
|
# TODO: abstract getting meta data from request.
|
273
293
|
def create_metadata(content, request)
|
274
294
|
metadata = {}
|
@@ -277,6 +297,11 @@ module FakeS3
|
|
277
297
|
if request.header['content-disposition']
|
278
298
|
metadata[:content_disposition] = request.header['content-disposition'].first
|
279
299
|
end
|
300
|
+
|
301
|
+
if request.header['cache-control']
|
302
|
+
metadata[:cache_control] = request.header['cache-control'].first
|
303
|
+
end
|
304
|
+
|
280
305
|
content_encoding = request.header["content-encoding"].first
|
281
306
|
metadata[:content_encoding] = content_encoding
|
282
307
|
#if content_encoding
|
data/lib/fakes3/s3_object.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
module FakeS3
|
2
2
|
class S3Object
|
3
3
|
include Comparable
|
4
|
-
attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:content_disposition,:content_encoding,:custom_metadata
|
4
|
+
attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:content_disposition,:content_encoding,:custom_metadata,:cache_control
|
5
5
|
|
6
6
|
def hash
|
7
7
|
@name.hash
|
data/lib/fakes3/server.rb
CHANGED
@@ -4,9 +4,11 @@ require 'webrick/https'
|
|
4
4
|
require 'openssl'
|
5
5
|
require 'securerandom'
|
6
6
|
require 'cgi'
|
7
|
+
require 'uri'
|
7
8
|
require 'fakes3/util'
|
8
9
|
require 'fakes3/file_store'
|
9
10
|
require 'fakes3/xml_adapter'
|
11
|
+
require 'fakes3/xml_parser'
|
10
12
|
require 'fakes3/bucket_query'
|
11
13
|
require 'fakes3/unsupported_operation'
|
12
14
|
require 'fakes3/errors'
|
@@ -26,6 +28,7 @@ module FakeS3
|
|
26
28
|
MOVE = "MOVE"
|
27
29
|
DELETE_OBJECT = "DELETE_OBJECT"
|
28
30
|
DELETE_BUCKET = "DELETE_BUCKET"
|
31
|
+
DELETE_OBJECTS = "DELETE_OBJECTS"
|
29
32
|
|
30
33
|
attr_accessor :bucket, :object, :type, :src_bucket,
|
31
34
|
:src_object, :method, :webrick_request,
|
@@ -98,6 +101,7 @@ module FakeS3
|
|
98
101
|
response.status = 404
|
99
102
|
response.body = XmlAdapter.error_no_such_key(s_req.object)
|
100
103
|
response['Content-Type'] = "application/xml"
|
104
|
+
response['Access-Control-Allow-Origin'] = '*'
|
101
105
|
return
|
102
106
|
end
|
103
107
|
|
@@ -124,8 +128,7 @@ module FakeS3
|
|
124
128
|
response.header['Content-Encoding'] = real_obj.content_encoding
|
125
129
|
end
|
126
130
|
|
127
|
-
response['Content-Disposition'] = real_obj.content_disposition
|
128
|
-
stat = File::Stat.new(real_obj.io.path)
|
131
|
+
response['Content-Disposition'] = real_obj.content_disposition ? real_obj.content_disposition : 'attachment'
|
129
132
|
|
130
133
|
response['Last-Modified'] = Time.iso8601(real_obj.modified_date).httpdate
|
131
134
|
response.header['ETag'] = "\"#{real_obj.md5}\""
|
@@ -137,6 +140,7 @@ module FakeS3
|
|
137
140
|
response.header['x-amz-meta-' + header] = value
|
138
141
|
end
|
139
142
|
|
143
|
+
stat = File::Stat.new(real_obj.io.path)
|
140
144
|
content_length = stat.size
|
141
145
|
|
142
146
|
# Added Range Query support
|
@@ -168,6 +172,10 @@ module FakeS3
|
|
168
172
|
else
|
169
173
|
response.body = real_obj.io
|
170
174
|
end
|
175
|
+
|
176
|
+
if real_obj.cache_control
|
177
|
+
response['Cache-Control'] = real_obj.cache_control
|
178
|
+
end
|
171
179
|
end
|
172
180
|
end
|
173
181
|
|
@@ -240,6 +248,10 @@ module FakeS3
|
|
240
248
|
end
|
241
249
|
|
242
250
|
def do_POST(request,response)
|
251
|
+
if request.query_string === 'delete'
|
252
|
+
return do_DELETE(request, response)
|
253
|
+
end
|
254
|
+
|
243
255
|
s_req = normalize_request(request)
|
244
256
|
key = request.query['key']
|
245
257
|
query = CGI::parse(request.request_uri.query || "")
|
@@ -319,6 +331,10 @@ module FakeS3
|
|
319
331
|
s_req = normalize_request(request)
|
320
332
|
|
321
333
|
case s_req.type
|
334
|
+
when Request::DELETE_OBJECTS
|
335
|
+
bucket_obj = @store.get_bucket(s_req.bucket)
|
336
|
+
keys = XmlParser.delete_objects(s_req.webrick_request)
|
337
|
+
@store.delete_objects(bucket_obj,keys,s_req.webrick_request)
|
322
338
|
when Request::DELETE_OBJECT
|
323
339
|
bucket_obj = @store.get_bucket(s_req.bucket)
|
324
340
|
@store.delete_object(bucket_obj,s_req.object,s_req.webrick_request)
|
@@ -332,7 +348,6 @@ module FakeS3
|
|
332
348
|
|
333
349
|
def do_OPTIONS(request, response)
|
334
350
|
super
|
335
|
-
|
336
351
|
response['Access-Control-Allow-Origin'] = '*'
|
337
352
|
response['Access-Control-Allow-Methods'] = 'PUT, POST, HEAD, GET, OPTIONS'
|
338
353
|
response['Access-Control-Allow-Headers'] = 'Accept, Content-Type, Authorization, Content-Length, ETag, X-CSRF-Token, Content-Disposition'
|
@@ -356,10 +371,13 @@ module FakeS3
|
|
356
371
|
end
|
357
372
|
|
358
373
|
if elems.size == 0
|
359
|
-
|
374
|
+
s_req.type = Request::DELETE_OBJECTS
|
375
|
+
s_req.query = query
|
376
|
+
s_req.webrick_request = webrick_req
|
360
377
|
elsif elems.size == 1
|
361
|
-
s_req.type = Request::DELETE_BUCKET
|
378
|
+
s_req.type = webrick_req.query_string == 'delete' ? Request::DELETE_OBJECTS : Request::DELETE_BUCKET
|
362
379
|
s_req.query = query
|
380
|
+
s_req.webrick_request = webrick_req
|
363
381
|
else
|
364
382
|
s_req.type = Request::DELETE_OBJECT
|
365
383
|
object = elems[1,elems.size].join('/')
|
@@ -432,7 +450,8 @@ module FakeS3
|
|
432
450
|
# for multipart copy
|
433
451
|
copy_source = webrick_req.header["x-amz-copy-source"]
|
434
452
|
if copy_source and copy_source.size == 1
|
435
|
-
|
453
|
+
copy_source = URI.unescape copy_source.first
|
454
|
+
src_elems = copy_source.split("/")
|
436
455
|
root_offset = src_elems[0] == "" ? 1 : 0
|
437
456
|
s_req.src_bucket = src_elems[root_offset]
|
438
457
|
s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
|
@@ -482,7 +501,11 @@ module FakeS3
|
|
482
501
|
when 'DELETE'
|
483
502
|
normalize_delete(webrick_req,s_req)
|
484
503
|
when 'POST'
|
485
|
-
|
504
|
+
if webrick_req.query_string != 'delete'
|
505
|
+
normalize_post(webrick_req,s_req)
|
506
|
+
else
|
507
|
+
normalize_delete(webrick_req,s_req)
|
508
|
+
end
|
486
509
|
else
|
487
510
|
raise "Unknown Request"
|
488
511
|
end
|
data/lib/fakes3/version.rb
CHANGED
@@ -0,0 +1,16 @@
|
|
1
|
+
require 'xmlsimple'
|
2
|
+
|
3
|
+
module FakeS3
|
4
|
+
class XmlParser
|
5
|
+
def self.delete_objects(request)
|
6
|
+
keys = []
|
7
|
+
|
8
|
+
objects = XmlSimple.xml_in(request.body, {'NoAttr' => true})['Object']
|
9
|
+
objects.each do |key|
|
10
|
+
keys << key['Key'][0]
|
11
|
+
end
|
12
|
+
|
13
|
+
keys
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
data/test/botocmd.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
#!/usr/bin/python
|
2
|
+
|
2
3
|
# -*- coding: utf-8 -*-
|
3
4
|
# fakes3cmd.py -- an s3cmd-like script that accepts a custom host and portname
|
5
|
+
from __future__ import print_function
|
4
6
|
import re
|
5
7
|
import os
|
6
8
|
from optparse import OptionParser
|
@@ -21,12 +23,12 @@ class FakeS3Cmd(object):
|
|
21
23
|
self._connect()
|
22
24
|
|
23
25
|
def _connect(self):
|
24
|
-
print
|
26
|
+
print('Connecting: %s:%s' % (self.host, self.port))
|
25
27
|
self.conn = S3Connection(is_secure=False,
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
28
|
+
calling_format=OrdinaryCallingFormat(),
|
29
|
+
aws_access_key_id='',
|
30
|
+
aws_secret_access_key='',
|
31
|
+
port=self.port, host=self.host)
|
30
32
|
|
31
33
|
|
32
34
|
@staticmethod
|
@@ -41,7 +43,7 @@ class FakeS3Cmd(object):
|
|
41
43
|
|
42
44
|
bucket, _ = self._parse_uri(path)
|
43
45
|
self.conn.create_bucket(bucket)
|
44
|
-
print
|
46
|
+
print('made bucket: [%s]' % bucket)
|
45
47
|
|
46
48
|
def rb(self, path, *args):
|
47
49
|
if not self.conn:
|
@@ -49,7 +51,7 @@ class FakeS3Cmd(object):
|
|
49
51
|
|
50
52
|
bucket, _ = self._parse_uri(path)
|
51
53
|
self.conn.delete_bucket(bucket)
|
52
|
-
print
|
54
|
+
print('removed bucket: [%s]' % bucket)
|
53
55
|
|
54
56
|
def put(self, *args):
|
55
57
|
if not self.conn:
|
@@ -63,7 +65,7 @@ class FakeS3Cmd(object):
|
|
63
65
|
key = Key(bucket)
|
64
66
|
key.key = os.path.join(prefix, os.path.basename(src_file))
|
65
67
|
key.set_contents_from_filename(src_file)
|
66
|
-
print
|
68
|
+
print('stored: [%s]' % key.key)
|
67
69
|
|
68
70
|
|
69
71
|
if __name__ == "__main__":
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fakes3
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.2.
|
4
|
+
version: 1.2.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Curtis Spencer
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-
|
11
|
+
date: 2017-12-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -164,6 +164,20 @@ dependencies:
|
|
164
164
|
- - ">="
|
165
165
|
- !ruby/object:Gem::Version
|
166
166
|
version: '0'
|
167
|
+
- !ruby/object:Gem::Dependency
|
168
|
+
name: xml-simple
|
169
|
+
requirement: !ruby/object:Gem::Requirement
|
170
|
+
requirements:
|
171
|
+
- - ">="
|
172
|
+
- !ruby/object:Gem::Version
|
173
|
+
version: '0'
|
174
|
+
type: :runtime
|
175
|
+
prerelease: false
|
176
|
+
version_requirements: !ruby/object:Gem::Requirement
|
177
|
+
requirements:
|
178
|
+
- - ">="
|
179
|
+
- !ruby/object:Gem::Version
|
180
|
+
version: '0'
|
167
181
|
description: Use Fake S3 to test basic Amazon S3 functionality without actually connecting
|
168
182
|
to AWS
|
169
183
|
email:
|
@@ -200,6 +214,7 @@ files:
|
|
200
214
|
- lib/fakes3/util.rb
|
201
215
|
- lib/fakes3/version.rb
|
202
216
|
- lib/fakes3/xml_adapter.rb
|
217
|
+
- lib/fakes3/xml_parser.rb
|
203
218
|
- static/button.svg
|
204
219
|
- static/logo.png
|
205
220
|
- test/aws_sdk_commands_test.rb
|
@@ -210,7 +225,6 @@ files:
|
|
210
225
|
- test/local_s3_cfg
|
211
226
|
- test/minitest_helper.rb
|
212
227
|
- test/post_test.rb
|
213
|
-
- test/right_aws_commands_test.rb
|
214
228
|
- test/s3_commands_test.rb
|
215
229
|
- test/s3cmd_test.rb
|
216
230
|
- test/test_helper.rb
|
@@ -235,7 +249,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
235
249
|
version: '0'
|
236
250
|
requirements: []
|
237
251
|
rubyforge_project:
|
238
|
-
rubygems_version: 2.6.
|
252
|
+
rubygems_version: 2.6.13
|
239
253
|
signing_key:
|
240
254
|
specification_version: 4
|
241
255
|
summary: Fake S3 is a server that simulates Amazon S3 commands so you can test your
|
@@ -249,7 +263,6 @@ test_files:
|
|
249
263
|
- test/local_s3_cfg
|
250
264
|
- test/minitest_helper.rb
|
251
265
|
- test/post_test.rb
|
252
|
-
- test/right_aws_commands_test.rb
|
253
266
|
- test/s3_commands_test.rb
|
254
267
|
- test/s3cmd_test.rb
|
255
268
|
- test/test_helper.rb
|
@@ -1,219 +0,0 @@
|
|
1
|
-
require 'test/test_helper'
|
2
|
-
require 'fileutils'
|
3
|
-
require 'right_aws'
|
4
|
-
require 'time'
|
5
|
-
|
6
|
-
class RightAWSCommandsTest < Test::Unit::TestCase
|
7
|
-
|
8
|
-
def setup
|
9
|
-
@s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
|
10
|
-
{:multi_thread => false, :server => 'localhost',
|
11
|
-
:port => 10453, :protocol => 'http', :logger => Logger.new("/dev/null"),
|
12
|
-
:no_subdomains => true })
|
13
|
-
end
|
14
|
-
|
15
|
-
def teardown
|
16
|
-
end
|
17
|
-
|
18
|
-
def test_create_bucket
|
19
|
-
bucket = @s3.create_bucket("s3media")
|
20
|
-
assert_not_nil bucket
|
21
|
-
end
|
22
|
-
|
23
|
-
def test_store
|
24
|
-
@s3.put("s3media", "helloworld", "Hello World Man!")
|
25
|
-
obj = @s3.get("s3media", "helloworld")
|
26
|
-
assert_equal "Hello World Man!", obj[:object]
|
27
|
-
end
|
28
|
-
|
29
|
-
# TODO - get Chinese to work
|
30
|
-
#def test_store_chinese
|
31
|
-
# ni_hao = "你好"
|
32
|
-
# great_wall = "中国的长城"
|
33
|
-
#
|
34
|
-
# @s3.put("s3media", ni_hao, great_wall)
|
35
|
-
# obj = @s3.get("s3media", ni_hao)
|
36
|
-
# assert_equal(great_wall, obj[:object])
|
37
|
-
#end
|
38
|
-
|
39
|
-
def test_store_not_found
|
40
|
-
begin
|
41
|
-
obj = @s3.get("s3media", "helloworldnotexist")
|
42
|
-
rescue RightAws::AwsError
|
43
|
-
assert $!.message.include?('NoSuchKey')
|
44
|
-
rescue
|
45
|
-
fail 'Should have caught NoSuchKey Exception'
|
46
|
-
end
|
47
|
-
end
|
48
|
-
|
49
|
-
def test_large_store
|
50
|
-
@s3.put("s3media", "helloworld", "Hello World Man!")
|
51
|
-
buffer = ""
|
52
|
-
500000.times do
|
53
|
-
buffer << "#{(rand * 100).to_i}"
|
54
|
-
end
|
55
|
-
|
56
|
-
buf_len = buffer.length
|
57
|
-
time_before = Time.now
|
58
|
-
@s3.put("s3media", "big", buffer)
|
59
|
-
|
60
|
-
output = ""
|
61
|
-
@s3.get("s3media","big") do |chunk|
|
62
|
-
output << chunk
|
63
|
-
end
|
64
|
-
time_after = Time.now
|
65
|
-
|
66
|
-
assert(time_after - time_before < 2) # Should run in under 2 seconds on normal machines
|
67
|
-
assert_equal(buf_len, output.size)
|
68
|
-
end
|
69
|
-
|
70
|
-
# Test that GET requests with a delimiter return a list of
|
71
|
-
def test_list_by_delimiter
|
72
|
-
@s3.create_bucket("s3media")
|
73
|
-
|
74
|
-
@s3.put("s3media", "delimited/item", "item")
|
75
|
-
|
76
|
-
expected_prefixes = []
|
77
|
-
(1..50).each do |i|
|
78
|
-
key_prefix = "delimited/%02d/" % i
|
79
|
-
@s3.put("s3media", key_prefix + "foo", "foo")
|
80
|
-
@s3.put("s3media", key_prefix + "fie", "fie")
|
81
|
-
expected_prefixes << key_prefix
|
82
|
-
end
|
83
|
-
|
84
|
-
key_names = []
|
85
|
-
common_prefixes = []
|
86
|
-
@s3.incrementally_list_bucket("s3media", {:prefix => "delimited", :delimiter => '/'}) do |currentResponse|
|
87
|
-
common_prefixes += currentResponse[:common_prefixes]
|
88
|
-
end
|
89
|
-
assert_equal ["delimited/"], common_prefixes
|
90
|
-
|
91
|
-
common_prefixes = []
|
92
|
-
@s3.incrementally_list_bucket("s3media", {:prefix => "delimited/", :delimiter => '/', "max-keys" => 100}) do |currentResponse|
|
93
|
-
key_names += currentResponse[:contents].map do |key|
|
94
|
-
key[:key]
|
95
|
-
end
|
96
|
-
common_prefixes += currentResponse[:common_prefixes]
|
97
|
-
end
|
98
|
-
assert_equal expected_prefixes, common_prefixes
|
99
|
-
assert_equal ["delimited/item"], key_names
|
100
|
-
end
|
101
|
-
|
102
|
-
def test_multi_directory
|
103
|
-
@s3.put("s3media", "dir/right/123.txt", "recursive")
|
104
|
-
output = ""
|
105
|
-
obj = @s3.get("s3media", "dir/right/123.txt") do |chunk|
|
106
|
-
output << chunk
|
107
|
-
end
|
108
|
-
assert_equal "recursive", output
|
109
|
-
end
|
110
|
-
|
111
|
-
def test_intra_bucket_copy
|
112
|
-
@s3.put("s3media", "original.txt", "Hello World")
|
113
|
-
@s3.copy("s3media", "original.txt", "s3media", "copy.txt")
|
114
|
-
obj = @s3.get("s3media", "copy.txt")
|
115
|
-
assert_equal "Hello World", obj[:object]
|
116
|
-
end
|
117
|
-
|
118
|
-
def test_copy_in_place
|
119
|
-
@s3.put("s3media", "copy-in-place", "Hello World")
|
120
|
-
@s3.copy("s3media", "copy-in-place", "s3media","copy-in-place")
|
121
|
-
obj = @s3.get("s3media", "copy-in-place")
|
122
|
-
assert_equal "Hello World", obj[:object]
|
123
|
-
end
|
124
|
-
|
125
|
-
def test_content_encoding
|
126
|
-
foo_compressed = Zlib::Deflate.deflate("foo")
|
127
|
-
@s3.put("s3media", "foo", foo_compressed, {"content-encoding" => "gzip"})
|
128
|
-
obj = @s3.get("s3media", "foo")
|
129
|
-
# assert_equal "gzip", obj[:headers]["content-encoding"] # TODO why doesn't checking content-encoding work?
|
130
|
-
assert_equal "gzip", obj[:headers]["x-content-encoding"] # TODO why doesn't checking content-encoding work?
|
131
|
-
end
|
132
|
-
|
133
|
-
# def test_content_encoding_data
|
134
|
-
# foo_compressed = Zlib::Deflate.deflate("foo-two")
|
135
|
-
# @s3.put("s3media", "foo-two", foo_compressed, {"content-encoding" => "gzip"})
|
136
|
-
# obj = @s3.get("s3media", "foo-two")
|
137
|
-
# puts "*** GOT HERE 1 #{ obj[:object] }"
|
138
|
-
# assert_equal "foo-two", Zlib::Inflate::inflate(obj[:object])
|
139
|
-
# end
|
140
|
-
|
141
|
-
def test_copy_replace_metadata
|
142
|
-
@s3.put("s3media", "copy_replace", "Hello World", {"content-type" => "application/octet-stream"})
|
143
|
-
obj = @s3.get("s3media", "copy_replace")
|
144
|
-
assert_equal "Hello World", obj[:object]
|
145
|
-
assert_equal "application/octet-stream", obj[:headers]["content-type"]
|
146
|
-
@s3.copy("s3media", "copy_replace", "s3media", "copy_replace", :replace, {"content-type"=>"text/plain"})
|
147
|
-
obj = @s3.get("s3media", "copy_replace")
|
148
|
-
assert_equal "Hello World", obj[:object]
|
149
|
-
assert_equal "text/plain", obj[:headers]["content-type"]
|
150
|
-
end
|
151
|
-
|
152
|
-
def test_larger_lists
|
153
|
-
@s3.create_bucket('right_aws_many')
|
154
|
-
(0..50).each do |i|
|
155
|
-
('a'..'z').each do |letter|
|
156
|
-
name = "#{letter}#{i}"
|
157
|
-
@s3.put('right_aws_many', name, 'asdf')
|
158
|
-
end
|
159
|
-
end
|
160
|
-
|
161
|
-
keys = @s3.list_bucket('right_aws_many')
|
162
|
-
assert_equal(1000, keys.size)
|
163
|
-
assert_equal('a0', keys.first[:key])
|
164
|
-
end
|
165
|
-
|
166
|
-
def test_destroy_bucket
|
167
|
-
@s3.create_bucket('deletebucket')
|
168
|
-
@s3.delete_bucket('deletebucket')
|
169
|
-
|
170
|
-
begin
|
171
|
-
bucket = @s3.list_bucket('deletebucket')
|
172
|
-
fail("Shouldn't succeed here")
|
173
|
-
rescue RightAws::AwsError
|
174
|
-
assert $!.message.include?('NoSuchBucket')
|
175
|
-
rescue
|
176
|
-
fail 'Should have caught NoSuchBucket Exception'
|
177
|
-
end
|
178
|
-
end
|
179
|
-
|
180
|
-
def test_if_none_match
|
181
|
-
@s3.put("s3media", "if_none_match_test", "Hello World 1!")
|
182
|
-
obj = @s3.get("s3media", "if_none_match_test")
|
183
|
-
tag = obj[:headers]["etag"]
|
184
|
-
begin
|
185
|
-
@s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
|
186
|
-
rescue URI::InvalidURIError
|
187
|
-
# expected error for 304
|
188
|
-
else
|
189
|
-
fail 'Should have encountered an error due to the server not returning a response due to caching'
|
190
|
-
end
|
191
|
-
@s3.put("s3media", "if_none_match_test", "Hello World 2!")
|
192
|
-
obj = @s3.get("s3media", "if_none_match_test", {"If-None-Match" => tag})
|
193
|
-
assert_equal "Hello World 2!", obj[:object]
|
194
|
-
end
|
195
|
-
|
196
|
-
def test_if_modified_since
|
197
|
-
@s3.put("s3media", "if_modified_since_test", "Hello World 1!")
|
198
|
-
obj = @s3.get("s3media", "if_modified_since_test")
|
199
|
-
modified = obj[:headers]["last-modified"]
|
200
|
-
begin
|
201
|
-
@s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => modified})
|
202
|
-
rescue URI::InvalidURIError
|
203
|
-
# expected error for 304
|
204
|
-
else
|
205
|
-
fail 'Should have encountered an error due to the server not returning a response due to caching'
|
206
|
-
end
|
207
|
-
# Granularity of an HTTP Date is 1 second which isn't enough for the test
|
208
|
-
# so manually rewind the clock by a second
|
209
|
-
time_in_the_past = Time.httpdate(modified) - 1
|
210
|
-
begin
|
211
|
-
obj = @s3.get("s3media", "if_modified_since_test", {"If-Modified-Since" => time_in_the_past.httpdate})
|
212
|
-
rescue
|
213
|
-
fail 'Should have been downloaded since the date is in the past now'
|
214
|
-
else
|
215
|
-
#expected scenario
|
216
|
-
end
|
217
|
-
end
|
218
|
-
|
219
|
-
end
|