fakes3 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+ pkg/*
2
+ *.gem
3
+ .bundle
4
+ tmp
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source "http://rubygems.org"
2
+
3
+ # Specify your gem's dependencies in fakes3.gemspec
4
+ gemspec
@@ -0,0 +1,32 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ fakes3 (0.1.0)
5
+ builder
6
+ thor
7
+
8
+ GEM
9
+ remote: http://rubygems.org/
10
+ specs:
11
+ aws-s3 (0.6.2)
12
+ builder
13
+ mime-types
14
+ xml-simple
15
+ builder (2.1.2)
16
+ mime-types (1.16)
17
+ right_aws (2.0.0)
18
+ right_http_connection (>= 1.2.1)
19
+ right_http_connection (1.2.4)
20
+ thor (0.14.4)
21
+ xml-simple (1.0.12)
22
+
23
+ PLATFORMS
24
+ ruby
25
+
26
+ DEPENDENCIES
27
+ aws-s3
28
+ builder
29
+ bundler (>= 1.0.0)
30
+ fakes3!
31
+ right_aws
32
+ thor
@@ -0,0 +1,39 @@
1
+ ## Introduction
2
+ FakeS3 is a lightweight server that responds to the same calls Amazon S3 responds to.
3
+ It is extremely useful for testing of S3 in a sandbox environment without actually
4
+ making calls to Amazon, which not only require network, but also cost you precious dollars.
5
+
6
+ For now there is a basic file store backend.
7
+
8
+ FakeS3 doesn't support all of the S3 command set, but the basic ones like put, get,
9
+ list, copy, and make bucket are supported. More coming soon.
10
+
11
+ ## Installation
12
+ gem install fakes3
13
+
14
+ ## Running
15
+ To run a fakes3 server, you just specify a root and a port.
16
+
17
+ fakes3 -r /mnt/fakes3_root -p 4567
18
+
19
+ ## Connecting to FakeS3
20
+
21
+ Take a look at the test cases to see client example usage. For now, FakeS3 is
22
+ mainly tested with s3cmd, aws-s3 gem, and right_aws. There are plenty more
23
+ libraries out there, and please do mention other clients.
24
+
25
+ ## Running Tests
26
+ In order to run the tests add the following line to your /etc/hosts:
27
+
28
+ 127.0.0.1 s3.localhost
29
+
30
+ Then start the test server using
31
+
32
+ rake test_server
33
+
34
+
35
+ Then in another terminal window run
36
+
37
+ rake test
38
+
39
+ It is a TODO to get this to be just one command
@@ -0,0 +1,14 @@
1
+ require 'bundler'
2
+ require 'rake/testtask'
3
+ include Rake::DSL
4
+ Bundler::GemHelper.install_tasks
5
+
6
+ Rake::TestTask.new(:test) do |t|
7
+ t.test_files = FileList['test/*_test.rb']
8
+ t.ruby_opts = ['-rubygems'] if defined? Gem
9
+ t.ruby_opts << '-I.'
10
+ end
11
+
12
+ task :test_server do |t|
13
+ system("bundle exec bin/fakes3 --port 10453 --root test_root")
14
+ end
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env ruby
2
+ require 'fakes3/cli'
3
+ FakeS3::CLI.start
@@ -0,0 +1,29 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path("../lib", __FILE__)
3
+ require "fakes3/version"
4
+
5
+ Gem::Specification.new do |s|
6
+ s.name = "fakes3"
7
+ s.version = FakeS3::VERSION
8
+ s.platform = Gem::Platform::RUBY
9
+ s.authors = ["Curtis Spencer"]
10
+ s.email = ["thorin@gmail.com"]
11
+ s.homepage = ""
12
+ s.summary = %q{FakeS3 is a server that simulates S3 commands so you can test your S3 functionality in your projects}
13
+ s.description = %q{Use FakeS3 to test basic S3 functionality without actually connecting to S3}
14
+
15
+ s.rubyforge_project = "fakes3"
16
+
17
+ s.add_development_dependency "bundler", ">= 1.0.0"
18
+ s.add_development_dependency "aws-s3"
19
+ s.add_development_dependency "right_aws"
20
+ #s.add_development_dependency "aws-sdk"
21
+ #s.add_development_dependency "ruby-debug19"
22
+ s.add_dependency "thor"
23
+ s.add_dependency "builder"
24
+
25
+ s.files = `git ls-files`.split("\n")
26
+ s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
27
+ s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
28
+ s.require_paths = ["lib"]
29
+ end
@@ -0,0 +1,3 @@
1
+ require 'fakes3/version'
2
+ require 'fakes3/file_store'
3
+ require 'fakes3/server'
@@ -0,0 +1,18 @@
1
+ require 'builder'
2
+ require 'fakes3/s3_object'
3
+
4
+ module FakeS3
5
+ class Bucket
6
+ attr_accessor :name,:creation_date,:objects
7
+
8
+ def initialize(name,creation_date,objects)
9
+ @name = name
10
+ @creation_date = creation_date
11
+ @objects = []
12
+ objects.each do |obj|
13
+ @objects << obj
14
+ end
15
+ end
16
+
17
+ end
18
+ end
@@ -0,0 +1,59 @@
1
+ require 'thor'
2
+ require 'fakes3/server'
3
+ require 'fakes3/version'
4
+
5
+ module FakeS3
6
+ class CLI < Thor
7
+ default_task("server")
8
+
9
+ desc "server", "Run a server on a particular hostname"
10
+ method_option :root, :type => :string, :aliases => '-r', :required => true
11
+ method_option :port, :type => :numeric, :aliases => '-p', :required => true
12
+ method_option :hostname, :type => :string, :aliases => '-h', :desc => "The root name of the host. Defaults to s3.amazonaws.com."
13
+ method_option :limit, :aliases => '-l', :type => :string, :desc => 'Rate limit for serving (ie. 50K, 1.0M)'
14
+ def server
15
+ store = nil
16
+ if options[:root]
17
+ root = File.expand_path(options[:root])
18
+ store = FileStore.new(root)
19
+ end
20
+
21
+ if store.nil?
22
+ puts "You must specify a root to use a file store (the current default)"
23
+ exit(-1)
24
+ end
25
+
26
+ hostname = 's3.amazonaws.com'
27
+ if options[:hostname]
28
+ hostname = options[:hostname]
29
+ # In case the user has put a port on the hostname
30
+ if hostname =~ /:(\d+)/
31
+ hostname = hostname.split(":")[0]
32
+ end
33
+ end
34
+
35
+ if options[:limit]
36
+ begin
37
+ store.rate_limit = options[:limit]
38
+ rescue
39
+ puts $!.message
40
+ exit(-1)
41
+ end
42
+ end
43
+
44
+ puts "Loading FakeS3 with #{root} on port #{options[:port]} with hostname #{hostname}"
45
+ server = FakeS3::Server.new(options[:port],store,hostname)
46
+ server.serve
47
+ end
48
+
49
+ desc "version", "Report the current fakes3 version"
50
+ def version
51
+ puts <<"EOF"
52
+ ======================
53
+ FakeS3 #{FakeS3::VERSION}
54
+
55
+ Copyright 2012, Curtis Spencer (@jubos)
56
+ EOF
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,153 @@
1
+ require 'fileutils'
2
+ require 'time'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/bucket'
5
+ require 'fakes3/rate_limitable_file'
6
+ require 'digest/md5'
7
+ require 'yaml'
8
+
9
+ module FakeS3
10
+ class FileStore
11
+ SHUCK_METADATA_DIR = ".fakes3_metadataFFF"
12
+
13
+ def initialize(root)
14
+ @root = root
15
+ @buckets = []
16
+ @bucket_hash = {}
17
+ Dir[File.join(root,"*")].each do |bucket|
18
+ bucket_name = File.basename(bucket)
19
+ bucket_obj = Bucket.new(bucket_name,Time.now,[])
20
+ @buckets << bucket_obj
21
+ @bucket_hash[bucket_name] = bucket_obj
22
+ end
23
+ end
24
+
25
+ # Pass a rate limit in bytes per second
26
+ def rate_limit=(rate_limit)
27
+ if rate_limit.is_a?(String)
28
+ if rate_limit =~ /^(\d+)$/
29
+ RateLimitableFile.rate_limit = rate_limit.to_i
30
+ elsif rate_limit =~ /^(.*)K$/
31
+ RateLimitableFile.rate_limit = $1.to_f * 1000
32
+ elsif rate_limit =~ /^(.*)M$/
33
+ RateLimitableFile.rate_limit = $1.to_f * 1000000
34
+ elsif rate_limit =~ /^(.*)G$/
35
+ RateLimitableFile.rate_limit = $1.to_f * 1000000000
36
+ else
37
+ raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
38
+ end
39
+ else
40
+ RateLimitableFile.rate_limit = nil
41
+ end
42
+ end
43
+
44
+ def buckets
45
+ @buckets
46
+ end
47
+
48
+ def get_bucket(bucket)
49
+ @bucket_hash[bucket]
50
+ end
51
+
52
+ def create_bucket(bucket)
53
+ FileUtils.mkdir_p(File.join(@root,bucket))
54
+ bucket_obj = Bucket.new(bucket,Time.now,[])
55
+ if !@bucket_hash[bucket]
56
+ @buckets << bucket_obj
57
+ @bucket_hash[bucket] = bucket_obj
58
+ end
59
+ end
60
+
61
+ def get_object(bucket,object, request)
62
+ begin
63
+ real_obj = S3Object.new
64
+ obj_root = File.join(@root,bucket,object,SHUCK_METADATA_DIR)
65
+ metadata = YAML.parse(File.open(File.join(obj_root,"metadata"),'rb').read)
66
+ real_obj.name = object
67
+ real_obj.md5 = metadata[:md5].value
68
+ real_obj.content_type = metadata[:content_type] ? metadata[:content_type].value : "application/octet-stream"
69
+ #real_obj.io = File.open(File.join(obj_root,"content"),'rb')
70
+ real_obj.io = RateLimitableFile.open(File.join(obj_root,"content"),'rb')
71
+ return real_obj
72
+ rescue
73
+ puts $!
74
+ return nil
75
+ end
76
+ end
77
+
78
+ def object_metadata(bucket,object)
79
+ end
80
+
81
+ def copy_object(src_bucket,src_object,dst_bucket,dst_object)
82
+ src_root = File.join(@root,src_bucket,src_object,SHUCK_METADATA_DIR)
83
+ src_obj = S3Object.new
84
+ src_metadata_filename = File.join(src_root,"metadata")
85
+ src_metadata = YAML.parse(File.open(src_metadata_filename,'rb').read)
86
+ src_content_filename = File.join(src_root,"content")
87
+
88
+ dst_filename= File.join(@root,dst_bucket,dst_object)
89
+ FileUtils.mkdir_p(dst_filename)
90
+
91
+ metadata_dir = File.join(dst_filename,SHUCK_METADATA_DIR)
92
+ FileUtils.mkdir_p(metadata_dir)
93
+
94
+ content = File.join(metadata_dir,"content")
95
+ metadata = File.join(metadata_dir,"metadata")
96
+
97
+ File.open(content,'wb') do |f|
98
+ File.open(src_content_filename,'rb') do |input|
99
+ f << input.read
100
+ end
101
+ end
102
+
103
+ File.open(metadata,'w') do |f|
104
+ File.open(src_metadata_filename,'r') do |input|
105
+ f << input.read
106
+ end
107
+ end
108
+
109
+ obj = S3Object.new
110
+ obj.md5 = src_metadata[:md5]
111
+ obj.content_type = src_metadata[:content_type]
112
+ return obj
113
+ end
114
+
115
+ def store_object(bucket,object,request)
116
+ begin
117
+ filename = File.join(@root,bucket,object)
118
+ FileUtils.mkdir_p(filename)
119
+
120
+ metadata_dir = File.join(filename,SHUCK_METADATA_DIR)
121
+ FileUtils.mkdir_p(metadata_dir)
122
+
123
+ content = File.join(filename,SHUCK_METADATA_DIR,"content")
124
+ metadata = File.join(filename,SHUCK_METADATA_DIR,"metadata")
125
+
126
+ md5 = Digest::MD5.new
127
+
128
+ File.open(content,'wb') do |f|
129
+ request.body do |chunk|
130
+ f << chunk
131
+ md5 << chunk
132
+ end
133
+ end
134
+
135
+ metadata_struct = {}
136
+ metadata_struct[:md5] = md5.hexdigest
137
+ metadata_struct[:content_type] = request.header["content-type"].first
138
+
139
+ File.open(metadata,'w') do |f|
140
+ f << YAML::dump(metadata_struct)
141
+ end
142
+ obj = S3Object.new
143
+ obj.md5 = metadata_struct[:md5]
144
+ obj.content_type = metadata_struct[:content_type]
145
+ return obj
146
+ rescue
147
+ puts $!
148
+ $!.backtrace.each { |line| puts line }
149
+ return nil
150
+ end
151
+ end
152
+ end
153
+ end
@@ -0,0 +1,21 @@
1
+ module FakeS3
2
+ class RateLimitableFile < File
3
+ @@rate_limit = nil
4
+ # Specify a rate limit in bytes per second
5
+ def self.rate_limit
6
+ @@rate_limit
7
+ end
8
+
9
+ def self.rate_limit=(rate_limit)
10
+ @@rate_limit = rate_limit
11
+ end
12
+
13
+ def read(args)
14
+ if @@rate_limit
15
+ time_to_sleep = args / @@rate_limit
16
+ sleep(time_to_sleep)
17
+ end
18
+ return super(args)
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,5 @@
1
+ module FakeS3
2
+ class S3Object
3
+ attr_accessor :name,:size,:creation_date,:md5,:io,:content_type
4
+ end
5
+ end
@@ -0,0 +1,264 @@
1
+ require 'webrick'
2
+ require 'fakes3/file_store'
3
+ require 'fakes3/xml_adapter'
4
+
5
+ module FakeS3
6
+ class Request
7
+ CREATE_BUCKET = "CREATE_BUCKET"
8
+ LIST_BUCKETS = "LIST_BUCKETS"
9
+ LS_BUCKET = "LS_BUCKET"
10
+ STORE = "STORE"
11
+ COPY = "COPY"
12
+ GET = "GET"
13
+ GET_ACL = "GET_ACL"
14
+ SET_ACL = "SET_ACL"
15
+ MOVE = "MOVE"
16
+ DELETE = "DELETE"
17
+
18
+ attr_accessor :bucket,:object,:type,:src_bucket,:src_object,:method,:webrick_request,:path,:is_path_style
19
+
20
+ def inspect
21
+ puts "-----Inspect FakeS3 Request"
22
+ puts "Type: #{@type}"
23
+ puts "Is Path Style: #{@is_path_style}"
24
+ puts "Request Method: #{@method}"
25
+ puts "Bucket: #{@bucket}"
26
+ puts "Object: #{@object}"
27
+ puts "Src Bucket: #{@src_bucket}"
28
+ puts "Src Object: #{@src_object}"
29
+ puts "-----Done"
30
+ end
31
+ end
32
+
33
+ class Servlet < WEBrick::HTTPServlet::AbstractServlet
34
+ def initialize(server,store,hostname)
35
+ super(server)
36
+ @store = store
37
+ @hostname = hostname
38
+ @root_hostnames = [hostname,'localhost','s3.amazonaws.com','s3.localhost']
39
+ end
40
+
41
+ def do_GET(request, response)
42
+ s_req = normalize_request(request)
43
+
44
+ case s_req.type
45
+ when 'LIST_BUCKETS'
46
+ response.status = 200
47
+ response['Content-Type'] = 'application/xml'
48
+ buckets = @store.buckets
49
+ response.body = XmlAdapter.buckets(buckets)
50
+ when 'LS_BUCKET'
51
+ bucket_obj = @store.get_bucket(s_req.bucket)
52
+ if bucket_obj
53
+ response.status = 200
54
+ response.body = XmlAdapter.bucket(bucket_obj)
55
+ response['Content-Type'] = "application/xml"
56
+ else
57
+ response.status = 404
58
+ response.body = XmlAdapter.error_no_such_bucket(s_req.bucket)
59
+ response['Content-Type'] = "application/xml"
60
+ end
61
+ when 'GET_ACL'
62
+ response.status = 200
63
+ response.body = XmlAdapter.acl()
64
+ response['Content-Type'] = 'application/xml'
65
+ when 'GET'
66
+ real_obj = @store.get_object(s_req.bucket,s_req.object,request)
67
+ if !real_obj
68
+ response.status = 404
69
+ response.body = ""
70
+ return
71
+ end
72
+
73
+ response.status = 200
74
+ response['Content-Type'] = real_obj.content_type
75
+ content_length = File::Stat.new(real_obj.io.path).size
76
+ response['Etag'] = real_obj.md5
77
+ response['Accept-Ranges'] = "bytes"
78
+
79
+ # Added Range Query support
80
+ if range = request.header["range"].first
81
+ response.status = 206
82
+ if range =~ /bytes=(\d*)-(\d*)/
83
+ start = $1.to_i
84
+ finish = $2.to_i
85
+ finish_str = ""
86
+ if finish == 0
87
+ finish = content_length - 1
88
+ finish_str = "#{finish}"
89
+ else
90
+ finish_str = finish.to_s
91
+ end
92
+
93
+ bytes_to_read = finish - start + 1
94
+ response['Content-Range'] = "bytes #{start}-#{finish_str}/#{content_length}"
95
+ real_obj.io.pos = start
96
+ response.body = real_obj.io.read(bytes_to_read)
97
+ return
98
+ end
99
+ end
100
+ response['Content-Length'] = File::Stat.new(real_obj.io.path).size
101
+ response.body = real_obj.io
102
+ end
103
+ end
104
+
105
+ def do_PUT(request,response)
106
+ s_req = normalize_request(request)
107
+
108
+
109
+ case s_req.type
110
+ when Request::COPY
111
+ @store.copy_object(s_req.src_bucket,s_req.src_object,s_req.bucket,s_req.object)
112
+ when Request::STORE
113
+ real_obj = @store.store_object(s_req.bucket,s_req.object,s_req.webrick_request)
114
+ response['Etag'] = real_obj.md5
115
+ when Request::CREATE_BUCKET
116
+ @store.create_bucket(s_req.bucket)
117
+ end
118
+
119
+ response.status = 200
120
+ response.body = ""
121
+ response['Content-Type'] = "text/xml"
122
+ end
123
+
124
+ def do_POST(request,response)
125
+ p request
126
+ end
127
+
128
+ def do_DELETE(request,response)
129
+ p request
130
+ end
131
+
132
+ private
133
+
134
+ def normalize_get(webrick_req,s_req)
135
+ path = webrick_req.path
136
+ path_len = path.size
137
+ query = webrick_req.query
138
+ if path == "/" and s_req.is_path_style
139
+ s_req.type = Request::LIST_BUCKETS
140
+ else
141
+ if s_req.is_path_style
142
+ elems = path[1,path_len].split("/")
143
+ s_req.bucket = elems[0]
144
+ else
145
+ elems = path.split("/")
146
+ end
147
+
148
+ if elems.size == 0
149
+ # List buckets
150
+ s_req.type = Request::LIST_BUCKETS
151
+ elsif elems.size == 1
152
+ s_req.type = Request::LS_BUCKET
153
+ else
154
+ if query["acl"] == ""
155
+ s_req.type = Request::GET_ACL
156
+ else
157
+ s_req.type = Request::GET
158
+ end
159
+ object = elems[1,elems.size].join('/')
160
+ s_req.object = object
161
+ end
162
+ end
163
+ end
164
+
165
+ def normalize_put(webrick_req,s_req)
166
+ path = webrick_req.path
167
+ path_len = path.size
168
+ if path == "/"
169
+ if s_req.bucket
170
+ s_req.type = Request::CREATE_BUCKET
171
+ end
172
+ else
173
+ if s_req.is_path_style
174
+ elems = path[1,path_len].split("/")
175
+ s_req.bucket = elems[0]
176
+ if elems.size == 1
177
+ s_req.type = Request::CREATE_BUCKET
178
+ else
179
+ if webrick_req.request_line =~ /\?acl/
180
+ s_req.type = Request::SET_ACL
181
+ else
182
+ s_req.type = Request::STORE
183
+ end
184
+ s_req.object = elems[1,elems.size].join('/')
185
+ end
186
+ else
187
+ if webrick_req.request_line =~ /\?acl/
188
+ s_req.type = Request::SET_ACL
189
+ else
190
+ s_req.type = Request::STORE
191
+ end
192
+ s_req.object = webrick_req.path
193
+ end
194
+ end
195
+
196
+ copy_source = webrick_req.header["x-amz-copy-source"]
197
+ if copy_source and copy_source.size == 1
198
+ src_elems = copy_source.first.split("/")
199
+ root_offset = src_elems[0] == "" ? 1 : 0
200
+ s_req.src_bucket = src_elems[root_offset]
201
+ s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
202
+ s_req.type = Request::COPY
203
+ end
204
+
205
+ s_req.webrick_request = webrick_req
206
+ end
207
+
208
+ # This method takes a webrick request and generates a normalized FakeS3 request
209
+ def normalize_request(webrick_req)
210
+ host_header= webrick_req["Host"]
211
+ host = host_header.split(':')[0]
212
+
213
+ s_req = Request.new
214
+ s_req.path = webrick_req.path
215
+ s_req.is_path_style = true
216
+
217
+ if !@root_hostnames.include?(host)
218
+ s_req.bucket = host.split(".")[0]
219
+ s_req.is_path_style = false
220
+ end
221
+
222
+ case webrick_req.request_method
223
+ when 'PUT'
224
+ normalize_put(webrick_req,s_req)
225
+ when 'GET'
226
+ normalize_get(webrick_req,s_req)
227
+ else
228
+ raise "Unknown Request"
229
+ end
230
+
231
+ return s_req
232
+ end
233
+
234
+ def dump_request(request)
235
+ puts "----------Dump Request-------------"
236
+ puts request.request_method
237
+ puts request.path
238
+ request.each do |k,v|
239
+ puts "#{k}:#{v}"
240
+ end
241
+ puts "----------End Dump -------------"
242
+ end
243
+ end
244
+
245
+
246
+ class Server
247
+ def initialize(port,store,hostname)
248
+ @port = port
249
+ @store = store
250
+ @hostname = hostname
251
+ end
252
+
253
+ def serve
254
+ @server = WEBrick::HTTPServer.new(:Port => @port)
255
+ @server.mount "/", Servlet, @store,@hostname
256
+ trap "INT" do @server.shutdown end
257
+ @server.start
258
+ end
259
+
260
+ def shutdown
261
+ @server.shutdown
262
+ end
263
+ end
264
+ end
@@ -0,0 +1,3 @@
1
+ module FakeS3
2
+ VERSION = "0.1.0"
3
+ end
@@ -0,0 +1,98 @@
1
+ require 'builder'
2
+ require 'time'
3
+
4
+ module FakeS3
5
+ class XmlAdapter
6
+ def self.buckets(bucket_objects)
7
+ output = ""
8
+ xml = Builder::XmlMarkup.new(:target => output)
9
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
10
+ xml.ListAllMyBucketsResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lam|
11
+ lam.Owner { |owner|
12
+ owner.ID("123")
13
+ owner.DisplayName("FakeS3")
14
+ }
15
+ lam.Buckets { |buckets|
16
+ bucket_objects.each do |bucket|
17
+ buckets.Bucket do |b|
18
+ b.Name(bucket.name)
19
+ b.CreationDate(bucket.creation_date.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
20
+ end
21
+ end
22
+ }
23
+ }
24
+ output
25
+ end
26
+
27
+ # <?xml version="1.0" encoding="UTF-8"?>
28
+ #<Error>
29
+ # <Code>NoSuchKey</Code>
30
+ # <Message>The resource you requested does not exist</Message>
31
+ # <Resource>/mybucket/myfoto.jpg</Resource>
32
+ # <RequestId>4442587FB7D0A2F9</RequestId>
33
+ #</Error>
34
+ def self.error_no_such_bucket(name)
35
+ output = ""
36
+ xml = Builder::XmlMarkup.new(:target => output)
37
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
38
+ xml.Error { |err|
39
+ err.Code("NoSuchBucket")
40
+ err.Message("The resource you requested does not exist")
41
+ err.Resource(name)
42
+ err.RequestId(1)
43
+ }
44
+ output
45
+ end
46
+
47
+ def self.error_no_such_key(name)
48
+ output = ""
49
+ xml = Builder::XmlMarkup.new(:target => output)
50
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
51
+ xml.Error { |err|
52
+ err.Code("NoSuchKey")
53
+ err.Message("The specified key does not exist")
54
+ err.Key(name)
55
+ err.RequestId(1)
56
+ err.HostId(2)
57
+ }
58
+ output
59
+ end
60
+
61
+ def self.bucket(bucket)
62
+ output = ""
63
+ xml = Builder::XmlMarkup.new(:target => output)
64
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
65
+ xml.ListBucketResult(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |lbr|
66
+ lbr.Name(bucket.name)
67
+ lbr.Prefix
68
+ lbr.Marker
69
+ lbr.MaxKeys("1000")
70
+ lbr.IsTruncated("false")
71
+ }
72
+ output
73
+ end
74
+
75
+ # ACL xml
76
+ def self.acl(object = nil)
77
+ output = ""
78
+ xml = Builder::XmlMarkup.new(:target => output)
79
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
80
+ xml.AccessControlPolicy(:xmlns => "http://s3.amazonaws.com/doc/2006-03-01/") { |acp|
81
+ acp.Owner do |owner|
82
+ owner.ID("abc")
83
+ owner.DisplayName("You")
84
+ end
85
+ acp.AccessControlList do |acl|
86
+ acl.Grant do |grant|
87
+ grant.Grantee("xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:type" => "CanonicalUser") do |grantee|
88
+ grantee.ID("abc")
89
+ grantee.DisplayName("You")
90
+ end
91
+ grant.Permission("FULL_CONTROL")
92
+ end
93
+ end
94
+ }
95
+ output
96
+ end
97
+ end
98
+ end
@@ -0,0 +1,34 @@
1
+ [default]
2
+ access_key = abc
3
+ acl_public = False
4
+ bucket_location = US
5
+ cloudfront_host = cloudfront.amazonaws.com
6
+ cloudfront_resource = /2008-06-30/distribution
7
+ default_mime_type = binary/octet-stream
8
+ delete_removed = False
9
+ dry_run = False
10
+ encoding = UTF-8
11
+ encrypt = False
12
+ force = False
13
+ get_continue = False
14
+ gpg_command = None
15
+ gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
16
+ gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
17
+ gpg_passphrase =
18
+ guess_mime_type = True
19
+ host_base = localhost:10453
20
+ host_bucket = %(bucket)s.localhost:10453
21
+ human_readable_sizes = False
22
+ list_md5 = False
23
+ preserve_attrs = True
24
+ progress_meter = True
25
+ proxy_host =
26
+ proxy_port = 0
27
+ recursive = False
28
+ recv_chunk = 4096
29
+ secret_key = def
30
+ send_chunk = 4096
31
+ simpledb_host = sdb.amazonaws.com
32
+ skip_existing = False
33
+ use_https = False
34
+ verbosity = WARNING
@@ -0,0 +1,63 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ require 'fakes3/server'
4
+ require 'right_aws'
5
+
6
+ class RightAWSCommandsTest < Test::Unit::TestCase
7
+
8
+ def setup
9
+ @s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX',
10
+ {:multi_thread => false, :server => 'localhost',
11
+ :port => 10453, :protocol => 'http',:logger => Logger.new("/dev/null") })
12
+ end
13
+
14
+ def teardown
15
+ end
16
+
17
+ def test_create_bucket
18
+ bucket = @s3.create_bucket("s3media")
19
+ assert_not_nil bucket
20
+ end
21
+
22
+ def test_store
23
+ @s3.put("s3media","helloworld","Hello World Man!")
24
+ obj = @s3.get("s3media","helloworld")
25
+ assert_equal "Hello World Man!",obj[:object]
26
+
27
+ obj = @s3.get("s3media","helloworld", )
28
+ end
29
+
30
+ def test_large_store
31
+ @s3.put("s3media","helloworld","Hello World Man!")
32
+ buffer = ""
33
+ 500000.times do
34
+ buffer << "#{(rand * 100).to_i}"
35
+ end
36
+
37
+ buf_len = buffer.length
38
+ @s3.put("s3media","big",buffer)
39
+
40
+ output = ""
41
+ @s3.get("s3media","big") do |chunk|
42
+ output << chunk
43
+ end
44
+ assert_equal buf_len,output.size
45
+ end
46
+
47
+ def test_multi_directory
48
+ @s3.put("s3media","dir/right/123.txt","recursive")
49
+ output = ""
50
+ obj = @s3.get("s3media","dir/right/123.txt") do |chunk|
51
+ output << chunk
52
+ end
53
+ assert_equal "recursive", output
54
+ end
55
+
56
+ def test_intra_bucket_copy
57
+ @s3.put("s3media","original.txt","Hello World")
58
+ @s3.copy("s3media","original.txt","s3media","copy.txt")
59
+ obj = @s3.get("s3media","copy.txt")
60
+ assert_equal "Hello World",obj[:object]
61
+ end
62
+
63
+ end
@@ -0,0 +1,69 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ require 'fakes3/server'
4
+ require 'aws/s3'
5
+
6
+ class S3CommandsTest < Test::Unit::TestCase
7
+ include AWS::S3
8
+
9
+ def setup
10
+ AWS::S3::Base.establish_connection!(:access_key_id => "123", :secret_access_key => "abc", :server => "localhost", :port => "10453" )
11
+ end
12
+
13
+ def teardown
14
+ AWS::S3::Base.disconnect!
15
+ end
16
+
17
+ def test_create_bucket
18
+ bucket = Bucket.create("mybucket")
19
+ assert_not_nil bucket
20
+ end
21
+
22
+ def test_store
23
+ bucket = Bucket.create("mybucket")
24
+ S3Object.store("hello","world","mybucket")
25
+
26
+ output = ""
27
+ obj = S3Object.stream("hello","mybucket") do |chunk|
28
+ output << chunk
29
+ end
30
+ assert_equal "world", output
31
+ end
32
+
33
+ def test_large_store
34
+ bucket = Bucket.create("mybucket")
35
+ buffer = ""
36
+ 500000.times do
37
+ buffer << "#{(rand * 100).to_i}"
38
+ end
39
+
40
+ buf_len = buffer.length
41
+ S3Object.store("big",buffer,"mybucket")
42
+
43
+ output = ""
44
+ S3Object.stream("big","mybucket") do |chunk|
45
+ output << chunk
46
+ end
47
+ assert_equal buf_len,output.size
48
+ end
49
+
50
+ def test_multi_directory
51
+ bucket = Bucket.create("mybucket")
52
+ S3Object.store("dir/myfile/123.txt","recursive","mybucket")
53
+
54
+ output = ""
55
+ obj = S3Object.stream("dir/myfile/123.txt","mybucket") do |chunk|
56
+ output << chunk
57
+ end
58
+ assert_equal "recursive", output
59
+ end
60
+
61
+ def test_find_nil_bucket
62
+ begin
63
+ bucket = Bucket.find("unknown")
64
+ assert_fail "Bucket.find didn't throw an exception"
65
+ rescue
66
+ assert_equal AWS::S3::NoSuchBucket,$!.class
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,55 @@
1
+ require 'test/test_helper'
2
+ require 'fileutils'
3
+ require 'fakes3/server'
4
+
5
+ # You need to have s3cmd installed to use this
6
+ class S3CmdTest < Test::Unit::TestCase
7
+
8
+ def setup
9
+ config = File.expand_path(File.join(File.dirname(__FILE__),'local_s3_cfg'))
10
+ @s3cmd = "s3cmd --config #{config}"
11
+ end
12
+
13
+ def teardown
14
+ end
15
+
16
+ def test_create_bucket
17
+ `#{@s3cmd} mb s3://s3cmd_bucket`
18
+ output = `#{@s3cmd} ls`
19
+ assert_match(/s3cmd_bucket/,output)
20
+ end
21
+
22
+ def test_store
23
+ File.open(__FILE__,'rb') do |input|
24
+ File.open("/tmp/fakes3_upload",'wb') do |output|
25
+ output << input.read
26
+ end
27
+ end
28
+ output = `#{@s3cmd} put /tmp/fakes3_upload s3://s3cmd_bucket/upload`
29
+ assert_match(/stored/,output)
30
+
31
+ FileUtils.rm("/tmp/fakes3_upload")
32
+ end
33
+
34
+ def test_acl
35
+ File.open(__FILE__,'rb') do |input|
36
+ File.open("/tmp/fakes3_acl_upload",'wb') do |output|
37
+ output << input.read
38
+ end
39
+ end
40
+ output = `#{@s3cmd} put /tmp/fakes3_acl_upload s3://s3cmd_bucket/acl_upload`
41
+ assert_match(/stored/,output)
42
+
43
+ output = `#{@s3cmd} --force setacl -P s3://s3cmd_bucket/acl_upload`
44
+ end
45
+
46
+ def test_large_store
47
+ end
48
+
49
+ def test_multi_directory
50
+ end
51
+
52
+ def test_intra_bucket_copy
53
+ end
54
+
55
+ end
@@ -0,0 +1,8 @@
1
+ require 'test/unit'
2
+ require 'bundler/setup'
3
+
4
+ testdir = File.dirname(__FILE__)
5
+ $LOAD_PATH.unshift testdir unless $LOAD_PATH.include?(testdir)
6
+
7
+ libdir = File.dirname(File.dirname(__FILE__)) + '/lib'
8
+ $LOAD_PATH.unshift libdir unless $LOAD_PATH.include?(libdir)
metadata ADDED
@@ -0,0 +1,155 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fakes3
3
+ version: !ruby/object:Gem::Version
4
+ prerelease: false
5
+ segments:
6
+ - 0
7
+ - 1
8
+ - 0
9
+ version: 0.1.0
10
+ platform: ruby
11
+ authors:
12
+ - Curtis Spencer
13
+ autorequire:
14
+ bindir: bin
15
+ cert_chain: []
16
+
17
+ date: 2012-04-13 00:00:00 -07:00
18
+ default_executable:
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ name: bundler
22
+ prerelease: false
23
+ requirement: &id001 !ruby/object:Gem::Requirement
24
+ none: false
25
+ requirements:
26
+ - - ">="
27
+ - !ruby/object:Gem::Version
28
+ segments:
29
+ - 1
30
+ - 0
31
+ - 0
32
+ version: 1.0.0
33
+ type: :development
34
+ version_requirements: *id001
35
+ - !ruby/object:Gem::Dependency
36
+ name: aws-s3
37
+ prerelease: false
38
+ requirement: &id002 !ruby/object:Gem::Requirement
39
+ none: false
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ segments:
44
+ - 0
45
+ version: "0"
46
+ type: :development
47
+ version_requirements: *id002
48
+ - !ruby/object:Gem::Dependency
49
+ name: right_aws
50
+ prerelease: false
51
+ requirement: &id003 !ruby/object:Gem::Requirement
52
+ none: false
53
+ requirements:
54
+ - - ">="
55
+ - !ruby/object:Gem::Version
56
+ segments:
57
+ - 0
58
+ version: "0"
59
+ type: :development
60
+ version_requirements: *id003
61
+ - !ruby/object:Gem::Dependency
62
+ name: thor
63
+ prerelease: false
64
+ requirement: &id004 !ruby/object:Gem::Requirement
65
+ none: false
66
+ requirements:
67
+ - - ">="
68
+ - !ruby/object:Gem::Version
69
+ segments:
70
+ - 0
71
+ version: "0"
72
+ type: :runtime
73
+ version_requirements: *id004
74
+ - !ruby/object:Gem::Dependency
75
+ name: builder
76
+ prerelease: false
77
+ requirement: &id005 !ruby/object:Gem::Requirement
78
+ none: false
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ segments:
83
+ - 0
84
+ version: "0"
85
+ type: :runtime
86
+ version_requirements: *id005
87
+ description: Use FakeS3 to test basic S3 functionality without actually connecting to S3
88
+ email:
89
+ - thorin@gmail.com
90
+ executables:
91
+ - fakes3
92
+ extensions: []
93
+
94
+ extra_rdoc_files: []
95
+
96
+ files:
97
+ - .gitignore
98
+ - Gemfile
99
+ - Gemfile.lock
100
+ - README.md
101
+ - Rakefile
102
+ - bin/fakes3
103
+ - fakes3.gemspec
104
+ - lib/fakes3.rb
105
+ - lib/fakes3/bucket.rb
106
+ - lib/fakes3/cli.rb
107
+ - lib/fakes3/file_store.rb
108
+ - lib/fakes3/rate_limitable_file.rb
109
+ - lib/fakes3/s3_object.rb
110
+ - lib/fakes3/server.rb
111
+ - lib/fakes3/version.rb
112
+ - lib/fakes3/xml_adapter.rb
113
+ - test/local_s3_cfg
114
+ - test/right_aws_commands_test.rb
115
+ - test/s3_commands_test.rb
116
+ - test/s3cmd_test.rb
117
+ - test/test_helper.rb
118
+ has_rdoc: true
119
+ homepage: ""
120
+ licenses: []
121
+
122
+ post_install_message:
123
+ rdoc_options: []
124
+
125
+ require_paths:
126
+ - lib
127
+ required_ruby_version: !ruby/object:Gem::Requirement
128
+ none: false
129
+ requirements:
130
+ - - ">="
131
+ - !ruby/object:Gem::Version
132
+ segments:
133
+ - 0
134
+ version: "0"
135
+ required_rubygems_version: !ruby/object:Gem::Requirement
136
+ none: false
137
+ requirements:
138
+ - - ">="
139
+ - !ruby/object:Gem::Version
140
+ segments:
141
+ - 0
142
+ version: "0"
143
+ requirements: []
144
+
145
+ rubyforge_project: fakes3
146
+ rubygems_version: 1.3.7
147
+ signing_key:
148
+ specification_version: 3
149
+ summary: FakeS3 is a server that simulates S3 commands so you can test your S3 functionality in your projects
150
+ test_files:
151
+ - test/local_s3_cfg
152
+ - test/right_aws_commands_test.rb
153
+ - test/s3_commands_test.rb
154
+ - test/s3cmd_test.rb
155
+ - test/test_helper.rb