fake_aws 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +17 -0
- data/.rspec +2 -0
- data/.travis.yml +3 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +43 -0
- data/Rakefile +6 -0
- data/fake_aws.gemspec +26 -0
- data/lib/fake_aws/s3/object_store.rb +66 -0
- data/lib/fake_aws/s3/operations/get_object.rb +53 -0
- data/lib/fake_aws/s3/operations/put_object.rb +67 -0
- data/lib/fake_aws/s3/rack_app.rb +25 -0
- data/lib/fake_aws/s3/xml_error_response.rb +29 -0
- data/lib/fake_aws/version.rb +3 -0
- data/lib/fake_aws.rb +6 -0
- data/spec/s3/object_store_spec.rb +75 -0
- data/spec/s3/rack_app_spec.rb +147 -0
- data/spec/s3/xml_error_response_spec.rb +9 -0
- data/spec/spec_helper.rb +2 -0
- metadata +140 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 52ba91fe2a0a18c2caad357a8ee0274630c6972e
|
4
|
+
data.tar.gz: 707bac260246499571d40d34877a7a2d17524129
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 5c065a73d8850ae6109ef0ce90ad70bde721b426aa1ee130f4c916cd19a70d47ea961cae2d5814241c0f17f6a43404b77722a231f2b2dfb3d675a7ebed793fe2
|
7
|
+
data.tar.gz: 1765e4c189c2b74a1ce88d9d499fb76a2ceebf3242574806771678ea29005e82f2b8f2a47e33ddaece740bfbb515939ac36c9d7c926d253f27818bdadd0f5452
|
data/.gitignore
ADDED
data/.rspec
ADDED
data/.travis.yml
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Envato, Pete Yandell & Luke Arndt
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,43 @@
|
|
1
|
+
# FakeAWS
|
2
|
+
|
3
|
+
A minimal implementation of AWS as a Rack app, for testing and development.
|
4
|
+
|
5
|
+
This is still in the very early stages of development.
|
6
|
+
|
7
|
+
So far there's only a tiny bit of S3 implemented, but it's well tested and
|
8
|
+
designed to be easy to extend. Pull requests for more features are welcome.
|
9
|
+
|
10
|
+
## Installation
|
11
|
+
|
12
|
+
Add this line to your application's Gemfile:
|
13
|
+
|
14
|
+
gem 'fake_aws'
|
15
|
+
|
16
|
+
And then execute:
|
17
|
+
|
18
|
+
$ bundle
|
19
|
+
|
20
|
+
Or install it yourself as:
|
21
|
+
|
22
|
+
$ gem install fake_aws
|
23
|
+
|
24
|
+
## Usage
|
25
|
+
|
26
|
+
TODO: Write usage instructions here
|
27
|
+
|
28
|
+
## Contributing
|
29
|
+
|
30
|
+
1. Fork it
|
31
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
32
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
33
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
34
|
+
5. Create new Pull Request
|
35
|
+
|
36
|
+
## To Do
|
37
|
+
|
38
|
+
- Split up the rack app tests into separate files for the different operations
|
39
|
+
- Handle bucket names in the host as well as the path
|
40
|
+
- Spit out a properly formatted response on a successful PUT object operation
|
41
|
+
- Complete the missing fields in XML error responses
|
42
|
+
- Check signing of requests
|
43
|
+
- Handle PUT Object copy requests
|
data/Rakefile
ADDED
data/fake_aws.gemspec
ADDED
@@ -0,0 +1,26 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'fake_aws/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "fake_aws"
|
8
|
+
spec.version = FakeAWS::VERSION
|
9
|
+
spec.authors = ["Pete Yandell", "Luke Arndt"]
|
10
|
+
spec.email = ["pete@notahat.com", "luke@arndt.io"]
|
11
|
+
spec.description = %q{A subset of AWS as a Rack app, for dev and testing}
|
12
|
+
spec.summary = %q{A subset of AWS (so far just a small proportion of S3) implemented as a Rack app, useful for development and testing.}
|
13
|
+
spec.homepage = "https://github.com/envato/fake_aws"
|
14
|
+
spec.license = "MIT"
|
15
|
+
|
16
|
+
spec.files = `git ls-files`.split($/)
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
19
|
+
spec.require_paths = ["lib"]
|
20
|
+
|
21
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
22
|
+
spec.add_development_dependency "rake"
|
23
|
+
spec.add_development_dependency "rspec"
|
24
|
+
spec.add_development_dependency "faraday"
|
25
|
+
spec.add_development_dependency "rack-test"
|
26
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module FakeAWS
|
4
|
+
module S3
|
5
|
+
|
6
|
+
# Read and write S3 objects and metadata about them in the filesystem.
|
7
|
+
class ObjectStore
|
8
|
+
|
9
|
+
def initialize(root_directory, path_info)
|
10
|
+
@root_directory = root_directory
|
11
|
+
@path_info = path_info
|
12
|
+
|
13
|
+
path_components = @path_info.split("/")
|
14
|
+
_, @bucket, *@directories, @file_name = path_components
|
15
|
+
end
|
16
|
+
|
17
|
+
attr_reader :bucket
|
18
|
+
|
19
|
+
def bucket_exists?
|
20
|
+
Dir.exists?(bucket_path)
|
21
|
+
end
|
22
|
+
|
23
|
+
def object_exists?
|
24
|
+
File.exists?(file_path)
|
25
|
+
end
|
26
|
+
|
27
|
+
def write_object(content, metadata)
|
28
|
+
FileUtils.mkdir_p(directory_path)
|
29
|
+
File.write(file_path, content)
|
30
|
+
File.write(metadata_file_path, metadata.to_json)
|
31
|
+
end
|
32
|
+
|
33
|
+
def read_object
|
34
|
+
File.new(file_path)
|
35
|
+
end
|
36
|
+
|
37
|
+
def read_metadata
|
38
|
+
if File.exists?(metadata_file_path)
|
39
|
+
JSON.parse(File.read(metadata_file_path))
|
40
|
+
else
|
41
|
+
{}
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
def bucket_path
|
48
|
+
@bucket_path ||= File.join(@root_directory, @bucket)
|
49
|
+
end
|
50
|
+
|
51
|
+
def file_path
|
52
|
+
@file_path ||= File.join(@root_directory, @path_info)
|
53
|
+
end
|
54
|
+
|
55
|
+
def metadata_file_path
|
56
|
+
"#{file_path}.metadata.json"
|
57
|
+
end
|
58
|
+
|
59
|
+
def directory_path
|
60
|
+
@directory_path ||= File.join(@root_directory, @bucket, *@directories)
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
module FakeAWS
|
2
|
+
module S3
|
3
|
+
module Operations
|
4
|
+
|
5
|
+
class GetObject
|
6
|
+
def initialize(root_directory, env)
|
7
|
+
@root_directory = root_directory
|
8
|
+
@env = env
|
9
|
+
end
|
10
|
+
|
11
|
+
def call
|
12
|
+
if object_store.object_exists?
|
13
|
+
success_response
|
14
|
+
else
|
15
|
+
no_such_key_response
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
private
|
20
|
+
|
21
|
+
def success_response
|
22
|
+
[
|
23
|
+
200,
|
24
|
+
{ "Content-Type" => content_type },
|
25
|
+
object_store.read_object
|
26
|
+
]
|
27
|
+
end
|
28
|
+
|
29
|
+
def no_such_key_response
|
30
|
+
[
|
31
|
+
404,
|
32
|
+
{ "Content-Type" => "application/xml" },
|
33
|
+
# TODO: need to figure out what the resource should be here.
|
34
|
+
XMLErrorResponse.new("NoSuchKey", "The specified key does not exist.", "")
|
35
|
+
]
|
36
|
+
end
|
37
|
+
|
38
|
+
def content_type
|
39
|
+
metadata["Content-Type"] || "application/octet-stream"
|
40
|
+
end
|
41
|
+
|
42
|
+
def metadata
|
43
|
+
@metadata ||= object_store.read_metadata
|
44
|
+
end
|
45
|
+
|
46
|
+
def object_store
|
47
|
+
@object_store ||= ObjectStore.new(@root_directory, @env["PATH_INFO"])
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
module FakeAWS
|
2
|
+
module S3
|
3
|
+
module Operations
|
4
|
+
|
5
|
+
class PutObject
|
6
|
+
def initialize(root_directory, env)
|
7
|
+
@root_directory = root_directory
|
8
|
+
@env = env
|
9
|
+
end
|
10
|
+
|
11
|
+
def call
|
12
|
+
# TODO: Bit of a tell-don't-ask violation here. Can it be fixed?
|
13
|
+
if object_store.bucket_exists?
|
14
|
+
object_store.write_object(content, metadata)
|
15
|
+
success_response
|
16
|
+
else
|
17
|
+
no_such_bucket_response
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def success_response
|
24
|
+
[
|
25
|
+
200,
|
26
|
+
{ "Content-Type" => "application/xml" },
|
27
|
+
["hello world"] # TODO: Uh huh.
|
28
|
+
]
|
29
|
+
end
|
30
|
+
|
31
|
+
def no_such_bucket_response
|
32
|
+
[
|
33
|
+
404,
|
34
|
+
{ "Content-Type" => "application/xml" },
|
35
|
+
XMLErrorResponse.new(
|
36
|
+
"NoSuchBucket",
|
37
|
+
"The specified bucket does not exist.",
|
38
|
+
"/#{object_store.bucket}"
|
39
|
+
)
|
40
|
+
]
|
41
|
+
end
|
42
|
+
|
43
|
+
def content
|
44
|
+
@env["rack.input"].read
|
45
|
+
end
|
46
|
+
|
47
|
+
def metadata
|
48
|
+
@metadata ||= {}.tap do |metadata|
|
49
|
+
metadata["Content-Type"] = @env['CONTENT_TYPE']
|
50
|
+
|
51
|
+
user_metadata_env_keys = @env.keys.select {|key| key =~ /^HTTP_X_AMZ_META_/ }
|
52
|
+
user_metadata_env_keys.each do |env_key|
|
53
|
+
metadata_key = env_key.sub(/^HTTP_/, "").gsub("_", "-").downcase
|
54
|
+
metadata[metadata_key] = @env[env_key]
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def object_store
|
60
|
+
@object_store ||= ObjectStore.new(@root_directory, @env["PATH_INFO"])
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module FakeAWS
|
2
|
+
module S3
|
3
|
+
|
4
|
+
class RackApp
|
5
|
+
def initialize(directory)
|
6
|
+
@directory = directory
|
7
|
+
end
|
8
|
+
|
9
|
+
def call(env)
|
10
|
+
operation_class = case env["REQUEST_METHOD"]
|
11
|
+
when "PUT"
|
12
|
+
Operations::PutObject
|
13
|
+
when "GET"
|
14
|
+
Operations::GetObject
|
15
|
+
else
|
16
|
+
raise "Unhandled request method" # TODO: Make a proper exception for this.
|
17
|
+
end
|
18
|
+
|
19
|
+
operation = operation_class.new(@directory, env)
|
20
|
+
operation.call
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module FakeAWS
|
2
|
+
module S3
|
3
|
+
|
4
|
+
class XMLErrorResponse
|
5
|
+
def initialize(code, message, resource)
|
6
|
+
@code = code
|
7
|
+
@message = message
|
8
|
+
@resource = resource
|
9
|
+
end
|
10
|
+
|
11
|
+
def to_str
|
12
|
+
# TODO: Fill out the bits of the XML response that we haven't yet.
|
13
|
+
"".tap do |xml|
|
14
|
+
xml << %q{<?xml version="1.0" encoding="UTF-8"?>\n}
|
15
|
+
xml << %q{<Error>}
|
16
|
+
|
17
|
+
xml << " <Code>#{@code}</Code>"
|
18
|
+
xml << " <Message>#{@message}</Message>"
|
19
|
+
xml << " <Resource>#{@resource}</Resource>"
|
20
|
+
xml << " <RequestId></RequestId>"
|
21
|
+
|
22
|
+
xml << %q{</Error>}
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
end
|
27
|
+
|
28
|
+
end
|
29
|
+
end
|
data/lib/fake_aws.rb
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe FakeAWS::S3::ObjectStore do
|
4
|
+
let(:root_directory) { "tmp" }
|
5
|
+
let(:path_info) { "/mah-bucket/mah-file.txt" }
|
6
|
+
|
7
|
+
subject { described_class.new(root_directory, path_info) }
|
8
|
+
|
9
|
+
let(:bucket_path) { "tmp/mah-bucket" }
|
10
|
+
let(:object_file_path) { "tmp/mah-bucket/mah-file.txt" }
|
11
|
+
let(:metadata_file_path) { "tmp/mah-bucket/mah-file.txt.metadata.json" }
|
12
|
+
|
13
|
+
before do
|
14
|
+
FileUtils.rm_r(root_directory) rescue Errno::ENOENT
|
15
|
+
FileUtils.mkdir_p(bucket_path)
|
16
|
+
end
|
17
|
+
|
18
|
+
describe "#bucket_exists?" do
|
19
|
+
it "returns true if the bucket directory exists" do
|
20
|
+
expect(subject.bucket_exists?).to be_true
|
21
|
+
end
|
22
|
+
|
23
|
+
it "returns false if the bucket directory doesn't exist" do
|
24
|
+
FileUtils.rmdir(bucket_path)
|
25
|
+
expect(subject.bucket_exists?).to be_false
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
describe "#object_exists?" do
|
30
|
+
it "returns true if the object file exists" do
|
31
|
+
File.write(object_file_path, "Hello, world!")
|
32
|
+
expect(subject.object_exists?).to be_true
|
33
|
+
end
|
34
|
+
|
35
|
+
it "returns file if the object file doesn't exist" do
|
36
|
+
expect(subject.object_exists?).to be_false
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
describe "#write_object" do
|
41
|
+
it "writes the content to the object file" do
|
42
|
+
subject.write_object("Hello, world!", { "bunnies" => "scary" })
|
43
|
+
|
44
|
+
expect(File.read(object_file_path)).to eq("Hello, world!")
|
45
|
+
end
|
46
|
+
|
47
|
+
it "writes the metadata to the metadata file as JSON" do
|
48
|
+
subject.write_object("Hello, world!", { "bunnies" => "scary" })
|
49
|
+
|
50
|
+
expect(File.read(metadata_file_path)).to eq('{"bunnies":"scary"}')
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
describe "#read_object" do
|
55
|
+
it "reads the contents of the object file" do
|
56
|
+
File.write(object_file_path, "Hello, world!")
|
57
|
+
|
58
|
+
expect(subject.read_object.read).to eq("Hello, world!")
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
describe "#read_metadata" do
|
63
|
+
it "returns an empty hash if there's no metadata file" do
|
64
|
+
expect(subject.read_metadata).to eq({})
|
65
|
+
end
|
66
|
+
|
67
|
+
it "returns the JSON from the metadata file converted to a hash" do
|
68
|
+
File.write(metadata_file_path, '{"bunnies":"scary"}')
|
69
|
+
|
70
|
+
expect(subject.read_metadata).to eq("bunnies" => "scary")
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
end
|
75
|
+
|
@@ -0,0 +1,147 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'faraday'
|
3
|
+
require 'rack/test'
|
4
|
+
require 'json'
|
5
|
+
|
6
|
+
describe FakeAWS::S3::RackApp do
|
7
|
+
let(:s3_path) { "tmp" }
|
8
|
+
let(:bucket) { "mah-bucket" }
|
9
|
+
let(:file_name) { "mah-file.txt"}
|
10
|
+
let(:file_contents) { "Hello, world!" }
|
11
|
+
subject { described_class.new(s3_path) }
|
12
|
+
|
13
|
+
let(:connection) do
|
14
|
+
Faraday.new do |connection|
|
15
|
+
connection.adapter :rack, subject
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
before do
|
20
|
+
FileUtils.rm_r(s3_path) rescue Errno::ENOENT
|
21
|
+
FileUtils.mkdir(s3_path)
|
22
|
+
end
|
23
|
+
|
24
|
+
context "PUT object" do
|
25
|
+
def put_example_file(key)
|
26
|
+
connection.put do |request|
|
27
|
+
request.url(File.join(bucket, key))
|
28
|
+
request.headers["Content-Type"] = "text/plain"
|
29
|
+
request.headers["x-amz-meta-example"] = "example metadata"
|
30
|
+
request.body = file_contents
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def read_example_metadata(key)
|
35
|
+
metadata_file_path = File.join(s3_path, "/#{bucket}/#{key}.metadata.json")
|
36
|
+
JSON.parse(File.read(metadata_file_path))
|
37
|
+
end
|
38
|
+
|
39
|
+
context "with an existing bucket" do
|
40
|
+
before do
|
41
|
+
Dir.mkdir(File.join(s3_path, bucket))
|
42
|
+
end
|
43
|
+
|
44
|
+
it "returns a 200" do
|
45
|
+
response = put_example_file(file_name)
|
46
|
+
expect(response.status).to eq(200)
|
47
|
+
end
|
48
|
+
|
49
|
+
it "returns a correctly constructed response"
|
50
|
+
|
51
|
+
it "creates a file" do
|
52
|
+
put_example_file(file_name)
|
53
|
+
expect(File.read(File.join(s3_path, "/#{bucket}/#{file_name}"))).to eq(file_contents)
|
54
|
+
end
|
55
|
+
|
56
|
+
it "stores the content-type" do
|
57
|
+
put_example_file(file_name)
|
58
|
+
|
59
|
+
metadata = read_example_metadata(file_name)
|
60
|
+
expect(metadata["Content-Type"]).to eq("text/plain")
|
61
|
+
end
|
62
|
+
|
63
|
+
it "stores user-defined metadata" do
|
64
|
+
put_example_file(file_name)
|
65
|
+
|
66
|
+
metadata = read_example_metadata(file_name)
|
67
|
+
expect(metadata["x-amz-meta-example"]).to eq("example metadata")
|
68
|
+
end
|
69
|
+
|
70
|
+
it "creates sub-directories for paths that contain them" do
|
71
|
+
put_example_file("foo/bar/#{file_name}")
|
72
|
+
expect(File.read(File.join(s3_path, "/#{bucket}/foo/bar/#{file_name}"))).to eq(file_contents)
|
73
|
+
end
|
74
|
+
|
75
|
+
it "handles sub-directories that already exist" do
|
76
|
+
FileUtils.mkdir_p(File.join(s3_path, "#{bucket}/foo/bar"))
|
77
|
+
put_example_file("foo/bar/#{file_name}")
|
78
|
+
expect(File.read(File.join(s3_path, "/#{bucket}/foo/bar/#{file_name}"))).to eq(file_contents)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
context "without an existing bucket" do
|
83
|
+
it "returns a 404" do
|
84
|
+
response = put_example_file(file_name)
|
85
|
+
expect(response.status).to eq(404)
|
86
|
+
end
|
87
|
+
|
88
|
+
it "returns the correct XML response"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
context "GET object" do
|
93
|
+
def get_example_file(key)
|
94
|
+
connection.get(File.join(bucket, key))
|
95
|
+
end
|
96
|
+
|
97
|
+
context "with a file that exists" do
|
98
|
+
before do
|
99
|
+
FileUtils.mkdir(File.join(s3_path, bucket))
|
100
|
+
File.write(File.join(s3_path, bucket, file_name), file_contents)
|
101
|
+
end
|
102
|
+
|
103
|
+
it "returns a 200" do
|
104
|
+
response = get_example_file(file_name)
|
105
|
+
expect(response.status).to eq(200)
|
106
|
+
end
|
107
|
+
|
108
|
+
it "returns a correctly constructed response"
|
109
|
+
|
110
|
+
it "returns the contents of the file" do
|
111
|
+
response = get_example_file(file_name)
|
112
|
+
expect(response.body).to eq(file_contents)
|
113
|
+
end
|
114
|
+
|
115
|
+
it "returns the right content type" do
|
116
|
+
file_metadata = {
|
117
|
+
"Content-Type" => "text/plain"
|
118
|
+
}.to_json
|
119
|
+
File.write(File.join(s3_path, bucket, "#{file_name}.metadata.json"), file_metadata)
|
120
|
+
|
121
|
+
response = get_example_file(file_name)
|
122
|
+
expect(response.headers["Content-Type"]).to eq("text/plain")
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
context "with a file that doesn't exist" do
|
127
|
+
before do
|
128
|
+
FileUtils.mkdir(File.join(s3_path, bucket))
|
129
|
+
end
|
130
|
+
|
131
|
+
it "returns a 404" do
|
132
|
+
response = get_example_file(file_name)
|
133
|
+
expect(response.status).to eq(404)
|
134
|
+
end
|
135
|
+
|
136
|
+
it "returns the correct XML response"
|
137
|
+
end
|
138
|
+
|
139
|
+
context "with a bucket that doesn't exist" do
|
140
|
+
it "returns the right sort of error" do
|
141
|
+
pending "Need to figure out what error AWS actually returns for this case"
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
end
|
147
|
+
|
data/spec/spec_helper.rb
ADDED
metadata
ADDED
@@ -0,0 +1,140 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: fake_aws
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Pete Yandell
|
8
|
+
- Luke Arndt
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2013-11-05 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: bundler
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - ~>
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: '1.3'
|
21
|
+
type: :development
|
22
|
+
prerelease: false
|
23
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
|
+
requirements:
|
25
|
+
- - ~>
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '1.3'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
name: rake
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - '>='
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: '0'
|
35
|
+
type: :development
|
36
|
+
prerelease: false
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - '>='
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: '0'
|
42
|
+
- !ruby/object:Gem::Dependency
|
43
|
+
name: rspec
|
44
|
+
requirement: !ruby/object:Gem::Requirement
|
45
|
+
requirements:
|
46
|
+
- - '>='
|
47
|
+
- !ruby/object:Gem::Version
|
48
|
+
version: '0'
|
49
|
+
type: :development
|
50
|
+
prerelease: false
|
51
|
+
version_requirements: !ruby/object:Gem::Requirement
|
52
|
+
requirements:
|
53
|
+
- - '>='
|
54
|
+
- !ruby/object:Gem::Version
|
55
|
+
version: '0'
|
56
|
+
- !ruby/object:Gem::Dependency
|
57
|
+
name: faraday
|
58
|
+
requirement: !ruby/object:Gem::Requirement
|
59
|
+
requirements:
|
60
|
+
- - '>='
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '0'
|
63
|
+
type: :development
|
64
|
+
prerelease: false
|
65
|
+
version_requirements: !ruby/object:Gem::Requirement
|
66
|
+
requirements:
|
67
|
+
- - '>='
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: '0'
|
70
|
+
- !ruby/object:Gem::Dependency
|
71
|
+
name: rack-test
|
72
|
+
requirement: !ruby/object:Gem::Requirement
|
73
|
+
requirements:
|
74
|
+
- - '>='
|
75
|
+
- !ruby/object:Gem::Version
|
76
|
+
version: '0'
|
77
|
+
type: :development
|
78
|
+
prerelease: false
|
79
|
+
version_requirements: !ruby/object:Gem::Requirement
|
80
|
+
requirements:
|
81
|
+
- - '>='
|
82
|
+
- !ruby/object:Gem::Version
|
83
|
+
version: '0'
|
84
|
+
description: A subset of AWS as a Rack app, for dev and testing
|
85
|
+
email:
|
86
|
+
- pete@notahat.com
|
87
|
+
- luke@arndt.io
|
88
|
+
executables: []
|
89
|
+
extensions: []
|
90
|
+
extra_rdoc_files: []
|
91
|
+
files:
|
92
|
+
- .gitignore
|
93
|
+
- .rspec
|
94
|
+
- .travis.yml
|
95
|
+
- Gemfile
|
96
|
+
- LICENSE.txt
|
97
|
+
- README.md
|
98
|
+
- Rakefile
|
99
|
+
- fake_aws.gemspec
|
100
|
+
- lib/fake_aws.rb
|
101
|
+
- lib/fake_aws/s3/object_store.rb
|
102
|
+
- lib/fake_aws/s3/operations/get_object.rb
|
103
|
+
- lib/fake_aws/s3/operations/put_object.rb
|
104
|
+
- lib/fake_aws/s3/rack_app.rb
|
105
|
+
- lib/fake_aws/s3/xml_error_response.rb
|
106
|
+
- lib/fake_aws/version.rb
|
107
|
+
- spec/s3/object_store_spec.rb
|
108
|
+
- spec/s3/rack_app_spec.rb
|
109
|
+
- spec/s3/xml_error_response_spec.rb
|
110
|
+
- spec/spec_helper.rb
|
111
|
+
homepage: https://github.com/envato/fake_aws
|
112
|
+
licenses:
|
113
|
+
- MIT
|
114
|
+
metadata: {}
|
115
|
+
post_install_message:
|
116
|
+
rdoc_options: []
|
117
|
+
require_paths:
|
118
|
+
- lib
|
119
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
120
|
+
requirements:
|
121
|
+
- - '>='
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '0'
|
124
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
125
|
+
requirements:
|
126
|
+
- - '>='
|
127
|
+
- !ruby/object:Gem::Version
|
128
|
+
version: '0'
|
129
|
+
requirements: []
|
130
|
+
rubyforge_project:
|
131
|
+
rubygems_version: 2.0.3
|
132
|
+
signing_key:
|
133
|
+
specification_version: 4
|
134
|
+
summary: A subset of AWS (so far just a small proportion of S3) implemented as a Rack
|
135
|
+
app, useful for development and testing.
|
136
|
+
test_files:
|
137
|
+
- spec/s3/object_store_spec.rb
|
138
|
+
- spec/s3/rack_app_spec.rb
|
139
|
+
- spec/s3/xml_error_response_spec.rb
|
140
|
+
- spec/spec_helper.rb
|