fakes3test10 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,320 @@
1
+ require 'fileutils'
2
+ require 'time'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/bucket'
5
+ require 'fakes3/rate_limitable_file'
6
+ require 'digest/md5'
7
+ require 'yaml'
8
+
9
+ module FakeS3
10
+ class FileStore
11
+ FAKE_S3_METADATA_DIR = ".fakes3_metadataFFF"
12
+
13
+ # S3 clients with overly strict date parsing fails to parse ISO 8601 dates
14
+ # without any sub second precision (e.g. jets3t v0.7.2), and the examples
15
+ # given in the official AWS S3 documentation specify three (3) decimals for
16
+ # sub second precision.
17
+ SUBSECOND_PRECISION = 3
18
+
19
+ def initialize(root, quiet_mode)
20
+ @root = root
21
+ @buckets = []
22
+ @bucket_hash = {}
23
+ @quiet_mode = quiet_mode
24
+ Dir[File.join(root,"*")].each do |bucket|
25
+ bucket_name = File.basename(bucket)
26
+ bucket_obj = Bucket.new(bucket_name,Time.now,[])
27
+ @buckets << bucket_obj
28
+ @bucket_hash[bucket_name] = bucket_obj
29
+ end
30
+ end
31
+
32
+ # Pass a rate limit in bytes per second
33
+ def rate_limit=(rate_limit)
34
+ if rate_limit.is_a?(String)
35
+ if rate_limit =~ /^(\d+)$/
36
+ RateLimitableFile.rate_limit = rate_limit.to_i
37
+ elsif rate_limit =~ /^(.*)K$/
38
+ RateLimitableFile.rate_limit = $1.to_f * 1000
39
+ elsif rate_limit =~ /^(.*)M$/
40
+ RateLimitableFile.rate_limit = $1.to_f * 1000000
41
+ elsif rate_limit =~ /^(.*)G$/
42
+ RateLimitableFile.rate_limit = $1.to_f * 1000000000
43
+ else
44
+ raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
45
+ end
46
+ else
47
+ RateLimitableFile.rate_limit = nil
48
+ end
49
+ end
50
+
51
+ def buckets
52
+ @buckets
53
+ end
54
+
55
+ def get_bucket_folder(bucket)
56
+ File.join(@root, bucket.name)
57
+ end
58
+
59
+ def get_bucket(bucket)
60
+ @bucket_hash[bucket]
61
+ end
62
+
63
+ def create_bucket(bucket)
64
+ FileUtils.mkdir_p(File.join(@root, bucket))
65
+ bucket_obj = Bucket.new(bucket, Time.now, [])
66
+ if !@bucket_hash[bucket]
67
+ @buckets << bucket_obj
68
+ @bucket_hash[bucket] = bucket_obj
69
+ end
70
+ bucket_obj
71
+ end
72
+
73
+ def delete_bucket(bucket_name)
74
+ bucket = get_bucket(bucket_name)
75
+ raise NoSuchBucket if !bucket
76
+ raise BucketNotEmpty if bucket.objects.count > 0
77
+ FileUtils.rm_r(get_bucket_folder(bucket))
78
+ @bucket_hash.delete(bucket_name)
79
+ end
80
+
81
+ def get_object(bucket, object_name, request)
82
+ begin
83
+ real_obj = S3Object.new
84
+ obj_root = File.join(@root,bucket,object_name,FAKE_S3_METADATA_DIR)
85
+ metadata = File.open(File.join(obj_root, "metadata")) { |file| YAML::load(file) }
86
+ real_obj.name = object_name
87
+ real_obj.md5 = metadata[:md5]
88
+ real_obj.content_type = request.query['response-content-type'] ||
89
+ metadata.fetch(:content_type) { "application/octet-stream" }
90
+ real_obj.content_disposition = request.query['response-content-disposition'] ||
91
+ metadata[:content_disposition]
92
+ real_obj.content_encoding = metadata.fetch(:content_encoding) # if metadata.fetch(:content_encoding)
93
+ real_obj.io = RateLimitableFile.open(File.join(obj_root, "content"), 'rb')
94
+ real_obj.size = metadata.fetch(:size) { 0 }
95
+ real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION)
96
+ real_obj.modified_date = metadata.fetch(:modified_date) do
97
+ File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION)
98
+ end
99
+ real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} }
100
+ return real_obj
101
+ rescue
102
+ unless @quiet_mode
103
+ puts $!
104
+ $!.backtrace.each { |line| puts line }
105
+ end
106
+ return nil
107
+ end
108
+ end
109
+
110
+ def object_metadata(bucket, object)
111
+ end
112
+
113
+ def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
114
+ src_root = File.join(@root,src_bucket_name,src_name,FAKE_S3_METADATA_DIR)
115
+ src_metadata_filename = File.join(src_root, "metadata")
116
+ src_metadata = YAML.load(File.open(src_metadata_filename, 'rb').read)
117
+ src_content_filename = File.join(src_root, "content")
118
+
119
+ dst_filename= File.join(@root,dst_bucket_name,dst_name)
120
+ FileUtils.mkdir_p(dst_filename)
121
+
122
+ metadata_dir = File.join(dst_filename,FAKE_S3_METADATA_DIR)
123
+ FileUtils.mkdir_p(metadata_dir)
124
+
125
+ content = File.join(metadata_dir, "content")
126
+ metadata = File.join(metadata_dir, "metadata")
127
+
128
+ if src_bucket_name != dst_bucket_name || src_name != dst_name
129
+ File.open(content, 'wb') do |f|
130
+ File.open(src_content_filename, 'rb') do |input|
131
+ f << input.read
132
+ end
133
+ end
134
+
135
+ File.open(metadata,'w') do |f|
136
+ File.open(src_metadata_filename,'r') do |input|
137
+ f << input.read
138
+ end
139
+ end
140
+ end
141
+
142
+ metadata_directive = request.header["x-amz-metadata-directive"].first
143
+ if metadata_directive == "REPLACE"
144
+ metadata_struct = create_metadata(content, request)
145
+ File.open(metadata,'w') do |f|
146
+ f << YAML::dump(metadata_struct)
147
+ end
148
+ end
149
+
150
+ src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
151
+ dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)
152
+
153
+ obj = S3Object.new
154
+ obj.name = dst_name
155
+ obj.md5 = src_metadata[:md5]
156
+ obj.content_type = src_metadata[:content_type]
157
+ obj.content_disposition = src_metadata[:content_disposition]
158
+ obj.content_encoding = src_metadata[:content_encoding] # if src_metadata[:content_encoding]
159
+ obj.size = src_metadata[:size]
160
+ obj.modified_date = src_metadata[:modified_date]
161
+
162
+ src_bucket.find(src_name)
163
+ dst_bucket.add(obj)
164
+ return obj
165
+ end
166
+
167
+ def store_object(bucket, object_name, request)
168
+ filedata = ""
169
+
170
+ # TODO put a tmpfile here first and mv it over at the end
171
+ content_type = request.content_type || ""
172
+
173
+ match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
174
+ boundary = match[1] if match
175
+ if boundary
176
+ boundary = WEBrick::HTTPUtils::dequote(boundary)
177
+ form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
178
+
179
+ if form_data['file'] == nil || form_data['file'] == ""
180
+ raise WEBrick::HTTPStatus::BadRequest
181
+ end
182
+
183
+ filedata = form_data['file']
184
+ else
185
+ request.body { |chunk| filedata << chunk }
186
+ end
187
+
188
+ do_store_object(bucket, object_name, filedata, request)
189
+ end
190
+
191
+ def do_store_object(bucket, object_name, filedata, request)
192
+ begin
193
+ filename = File.join(@root, bucket.name, object_name)
194
+ FileUtils.mkdir_p(filename)
195
+
196
+ metadata_dir = File.join(filename, FAKE_S3_METADATA_DIR)
197
+ FileUtils.mkdir_p(metadata_dir)
198
+
199
+ content = File.join(filename, FAKE_S3_METADATA_DIR, "content")
200
+ metadata = File.join(filename, FAKE_S3_METADATA_DIR, "metadata")
201
+
202
+ File.open(content,'wb') { |f| f << filedata }
203
+
204
+ metadata_struct = create_metadata(content, request)
205
+ File.open(metadata,'w') do |f|
206
+ f << YAML::dump(metadata_struct)
207
+ end
208
+
209
+ obj = S3Object.new
210
+ obj.name = object_name
211
+ obj.md5 = metadata_struct[:md5]
212
+ obj.content_type = metadata_struct[:content_type]
213
+ obj.content_disposition = metadata_struct[:content_disposition]
214
+ obj.content_encoding = metadata_struct[:content_encoding] # if metadata_struct[:content_encoding]
215
+ obj.size = metadata_struct[:size]
216
+ obj.modified_date = metadata_struct[:modified_date]
217
+
218
+ bucket.add(obj)
219
+ return obj
220
+ rescue
221
+ unless @quiet_mode
222
+ puts $!
223
+ $!.backtrace.each { |line| puts line }
224
+ end
225
+ return nil
226
+ end
227
+ end
228
+
229
+ def combine_object_parts(bucket, upload_id, object_name, parts, request)
230
+ upload_path = File.join(@root, bucket.name)
231
+ base_path = File.join(upload_path, "#{upload_id}_#{object_name}")
232
+
233
+ complete_file = ""
234
+ chunk = ""
235
+ part_paths = []
236
+
237
+ parts.sort_by { |part| part[:number] }.each do |part|
238
+ part_path = "#{base_path}_part#{part[:number]}"
239
+ content_path = File.join(part_path, FAKE_S3_METADATA_DIR, 'content')
240
+
241
+ File.open(content_path, 'rb') { |f| chunk = f.read }
242
+ etag = Digest::MD5.hexdigest(chunk)
243
+
244
+ raise new Error "invalid file chunk" unless part[:etag] == etag
245
+ complete_file << chunk
246
+ part_paths << part_path
247
+ end
248
+
249
+ object = do_store_object(bucket, object_name, complete_file, request)
250
+
251
+ # clean up parts
252
+ part_paths.each do |path|
253
+ FileUtils.remove_dir(path)
254
+ end
255
+
256
+ object
257
+ end
258
+
259
+ def delete_object(bucket,object_name,request)
260
+ begin
261
+ filename = File.join(@root,bucket.name,object_name)
262
+ FileUtils.rm_rf(filename)
263
+ object = bucket.find(object_name)
264
+ bucket.remove(object)
265
+ rescue
266
+ puts $!
267
+ $!.backtrace.each { |line| puts line }
268
+ return nil
269
+ end
270
+ end
271
+
272
+ def delete_objects(bucket, objects, request)
273
+ begin
274
+ filenames = []
275
+ objects.each do |object_name|
276
+ filenames << File.join(@root,bucket.name,object_name)
277
+ object = bucket.find(object_name)
278
+ bucket.remove(object)
279
+ end
280
+
281
+ FileUtils.rm_rf(filenames)
282
+ rescue
283
+ puts $!
284
+ $!.backtrace.each { |line| puts line }
285
+ return nil
286
+ end
287
+ end
288
+
289
+ # TODO: abstract getting meta data from request.
290
+ def create_metadata(content, request)
291
+ metadata = {}
292
+ metadata[:md5] = Digest::MD5.file(content).hexdigest
293
+ metadata[:content_type] = request.header["content-type"].first
294
+ if request.header['content-disposition']
295
+ metadata[:content_disposition] = request.header['content-disposition'].first
296
+ end
297
+ content_encoding = request.header["content-encoding"].first
298
+ metadata[:content_encoding] = content_encoding
299
+ #if content_encoding
300
+ # metadata[:content_encoding] = content_encoding
301
+ #end
302
+ metadata[:size] = File.size(content)
303
+ metadata[:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION)
304
+ metadata[:amazon_metadata] = {}
305
+ metadata[:custom_metadata] = {}
306
+
307
+ # Add custom metadata from the request header
308
+ request.header.each do |key, value|
309
+ match = /^x-amz-([^-]+)-(.*)$/.match(key)
310
+ next unless match
311
+ if match[1].eql?('meta') && (match_key = match[2])
312
+ metadata[:custom_metadata][match_key] = value.join(', ')
313
+ next
314
+ end
315
+ metadata[:amazon_metadata][key.gsub(/^x-amz-/, '')] = value.join(', ')
316
+ end
317
+ return metadata
318
+ end
319
+ end
320
+ end
@@ -0,0 +1,21 @@
1
+ module FakeS3
2
+ class RateLimitableFile < File
3
+ @@rate_limit = nil
4
+ # Specify a rate limit in bytes per second
5
+ def self.rate_limit
6
+ @@rate_limit
7
+ end
8
+
9
+ def self.rate_limit=(rate_limit)
10
+ @@rate_limit = rate_limit
11
+ end
12
+
13
+ def read(args)
14
+ if @@rate_limit
15
+ time_to_sleep = args / @@rate_limit
16
+ sleep(time_to_sleep)
17
+ end
18
+ return super(args)
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,19 @@
1
+ module FakeS3
2
+ class S3Object
3
+ include Comparable
4
+ attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:content_disposition,:content_encoding,:custom_metadata
5
+
6
+ def hash
7
+ @name.hash
8
+ end
9
+
10
+ def eql?(object)
11
+ object.is_a?(self.class) ? (@name == object.name) : false
12
+ end
13
+
14
+ # Sort by the object's name
15
+ def <=>(object)
16
+ object.is_a?(self.class) ? (@name <=> object.name) : nil
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,585 @@
1
+ require 'time'
2
+ require 'webrick'
3
+ require 'webrick/https'
4
+ require 'openssl'
5
+ require 'securerandom'
6
+ require 'cgi'
7
+ require 'fakes3/util'
8
+ require 'fakes3/file_store'
9
+ require 'fakes3/xml_adapter'
10
+ require 'fakes3/xml_parser'
11
+ require 'fakes3/bucket_query'
12
+ require 'fakes3/unsupported_operation'
13
+ require 'fakes3/errors'
14
+ require 'ipaddr'
15
+
16
+ module FakeS3
17
+ class Request
18
+ CREATE_BUCKET = "CREATE_BUCKET"
19
+ LIST_BUCKETS = "LIST_BUCKETS"
20
+ LS_BUCKET = "LS_BUCKET"
21
+ HEAD = "HEAD"
22
+ STORE = "STORE"
23
+ COPY = "COPY"
24
+ GET = "GET"
25
+ GET_ACL = "GET_ACL"
26
+ SET_ACL = "SET_ACL"
27
+ MOVE = "MOVE"
28
+ DELETE_OBJECT = "DELETE_OBJECT"
29
+ DELETE_BUCKET = "DELETE_BUCKET"
30
+ DELETE_OBJECTS = "DELETE_OBJECTS"
31
+
32
+ attr_accessor :bucket, :object, :type, :src_bucket,
33
+ :src_object, :method, :webrick_request,
34
+ :path, :is_path_style, :query, :http_verb
35
+
36
+ def inspect
37
+ puts "-----Inspect FakeS3 Request"
38
+ puts "Type: #{@type}"
39
+ puts "Is Path Style: #{@is_path_style}"
40
+ puts "Request Method: #{@method}"
41
+ puts "Bucket: #{@bucket}"
42
+ puts "Object: #{@object}"
43
+ puts "Src Bucket: #{@src_bucket}"
44
+ puts "Src Object: #{@src_object}"
45
+ puts "Query: #{@query}"
46
+ puts "-----Done"
47
+ end
48
+ end
49
+
50
+ class Servlet < WEBrick::HTTPServlet::AbstractServlet
51
+ def initialize(server,store,hostname)
52
+ super(server)
53
+ @store = store
54
+ @hostname = hostname
55
+ @port = server.config[:Port]
56
+ @root_hostnames = [hostname,'localhost','s3.amazonaws.com','s3.localhost']
57
+ end
58
+
59
+ def validate_request(request)
60
+ req = request.webrick_request
61
+ return if req.nil?
62
+ return if not req.header.has_key?('expect')
63
+ req.continue if req.header['expect'].first=='100-continue'
64
+ end
65
+
66
+ def do_GET(request, response)
67
+ s_req = normalize_request(request)
68
+
69
+ case s_req.type
70
+ when 'LIST_BUCKETS'
71
+ response.status = 200
72
+ response['Content-Type'] = 'application/xml'
73
+ buckets = @store.buckets
74
+ response.body = XmlAdapter.buckets(buckets)
75
+ when 'LS_BUCKET'
76
+ bucket_obj = @store.get_bucket(s_req.bucket)
77
+ if bucket_obj
78
+ response.status = 200
79
+ response['Content-Type'] = "application/xml"
80
+ query = {
81
+ :marker => s_req.query["marker"] ? s_req.query["marker"].to_s : nil,
82
+ :prefix => s_req.query["prefix"] ? s_req.query["prefix"].to_s : nil,
83
+ :max_keys => s_req.query["max-keys"] ? s_req.query["max-keys"].to_i : nil,
84
+ :delimiter => s_req.query["delimiter"] ? s_req.query["delimiter"].to_s : nil
85
+ }
86
+ bq = bucket_obj.query_for_range(query)
87
+ response.body = XmlAdapter.bucket_query(bq)
88
+ else
89
+ response.status = 404
90
+ response.body = XmlAdapter.error_no_such_bucket(s_req.bucket)
91
+ response['Content-Type'] = "application/xml"
92
+ end
93
+ when 'GET_ACL'
94
+ response.status = 200
95
+ response.body = XmlAdapter.acl
96
+ response['Content-Type'] = 'application/xml'
97
+ when 'GET'
98
+ real_obj = @store.get_object(s_req.bucket, s_req.object, request)
99
+ if !real_obj
100
+ response.status = 404
101
+ response.body = XmlAdapter.error_no_such_key(s_req.object)
102
+ response['Content-Type'] = "application/xml"
103
+ return
104
+ end
105
+
106
+ if_none_match = request["If-None-Match"]
107
+ if if_none_match == "\"#{real_obj.md5}\"" or if_none_match == "*"
108
+ response.status = 304
109
+ return
110
+ end
111
+
112
+ if_modified_since = request["If-Modified-Since"]
113
+ if if_modified_since
114
+ time = Time.httpdate(if_modified_since)
115
+ if time >= Time.iso8601(real_obj.modified_date)
116
+ response.status = 304
117
+ return
118
+ end
119
+ end
120
+
121
+ response.status = 200
122
+ response['Content-Type'] = real_obj.content_type
123
+
124
+ if real_obj.content_encoding
125
+ response.header['X-Content-Encoding'] = real_obj.content_encoding
126
+ response.header['Content-Encoding'] = real_obj.content_encoding
127
+ end
128
+
129
+ response['Content-Disposition'] = real_obj.content_disposition if real_obj.content_disposition
130
+ stat = File::Stat.new(real_obj.io.path)
131
+
132
+ response['Last-Modified'] = Time.iso8601(real_obj.modified_date).httpdate
133
+ response.header['ETag'] = "\"#{real_obj.md5}\""
134
+ response['Accept-Ranges'] = "bytes"
135
+ response['Last-Ranges'] = "bytes"
136
+ response['Access-Control-Allow-Origin'] = '*'
137
+
138
+ real_obj.custom_metadata.each do |header, value|
139
+ response.header['x-amz-meta-' + header] = value
140
+ end
141
+
142
+ content_length = stat.size
143
+
144
+ # Added Range Query support
145
+ range = request.header["range"].first
146
+ if range
147
+ response.status = 206
148
+ if range =~ /bytes=(\d*)-(\d*)/
149
+ start = $1.to_i
150
+ finish = $2.to_i
151
+ finish_str = ""
152
+ if finish == 0
153
+ finish = content_length - 1
154
+ finish_str = "#{finish}"
155
+ else
156
+ finish_str = finish.to_s
157
+ end
158
+
159
+ bytes_to_read = finish - start + 1
160
+ response['Content-Range'] = "bytes #{start}-#{finish_str}/#{content_length}"
161
+ real_obj.io.pos = start
162
+ response.body = real_obj.io.read(bytes_to_read)
163
+ return
164
+ end
165
+ end
166
+ response['Content-Length'] = File::Stat.new(real_obj.io.path).size
167
+ response['Content-Disposition'] = 'attachment'
168
+ if s_req.http_verb == 'HEAD'
169
+ response.body = ""
170
+ real_obj.io.close
171
+ else
172
+ response.body = real_obj.io
173
+ end
174
+ end
175
+ end
176
+
177
+ def do_PUT(request, response)
178
+ s_req = normalize_request(request)
179
+ query = CGI::parse(request.request_uri.query || "")
180
+
181
+ return do_multipartPUT(request, response) if query['uploadId'].first
182
+
183
+ response.status = 200
184
+ response.body = ""
185
+ response['Content-Type'] = "text/xml"
186
+ response['Access-Control-Allow-Origin'] = '*'
187
+
188
+ case s_req.type
189
+ when Request::COPY
190
+ object = @store.copy_object(s_req.src_bucket, s_req.src_object, s_req.bucket, s_req.object, request)
191
+ response.body = XmlAdapter.copy_object_result(object)
192
+ when Request::STORE
193
+ bucket_obj = @store.get_bucket(s_req.bucket)
194
+ if !bucket_obj
195
+ # Lazily create a bucket. TODO fix this to return the proper error
196
+ bucket_obj = @store.create_bucket(s_req.bucket)
197
+ end
198
+
199
+ real_obj = @store.store_object(bucket_obj, s_req.object, s_req.webrick_request)
200
+ response.header['ETag'] = "\"#{real_obj.md5}\""
201
+ when Request::CREATE_BUCKET
202
+ @store.create_bucket(s_req.bucket)
203
+ end
204
+ end
205
+
206
+ def do_multipartPUT(request, response)
207
+ s_req = normalize_request(request)
208
+ query = CGI::parse(request.request_uri.query)
209
+
210
+ part_number = query['partNumber'].first
211
+ upload_id = query['uploadId'].first
212
+ part_name = "#{upload_id}_#{s_req.object}_part#{part_number}"
213
+
214
+ # store the part
215
+ if s_req.type == Request::COPY
216
+ real_obj = @store.copy_object(
217
+ s_req.src_bucket, s_req.src_object,
218
+ s_req.bucket , part_name,
219
+ request
220
+ )
221
+
222
+ response['Content-Type'] = "text/xml"
223
+ response.body = XmlAdapter.copy_object_result real_obj
224
+ else
225
+ bucket_obj = @store.get_bucket(s_req.bucket)
226
+ if !bucket_obj
227
+ bucket_obj = @store.create_bucket(s_req.bucket)
228
+ end
229
+ real_obj = @store.store_object(
230
+ bucket_obj, part_name,
231
+ request
232
+ )
233
+
234
+ response.body = ""
235
+ response.header['ETag'] = "\"#{real_obj.md5}\""
236
+ end
237
+
238
+ response['Access-Control-Allow-Origin'] = '*'
239
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
240
+ response['Access-Control-Expose-Headers'] = 'ETag'
241
+
242
+ response.status = 200
243
+ end
244
+
245
+ def do_POST(request,response)
246
+ if request.query_string === 'delete'
247
+ return do_DELETE(request, response)
248
+ end
249
+
250
+ s_req = normalize_request(request)
251
+ key = request.query['key']
252
+ query = CGI::parse(request.request_uri.query || "")
253
+
254
+ if query.has_key?('uploads')
255
+ upload_id = SecureRandom.hex
256
+
257
+ response.body = <<-eos.strip
258
+ <?xml version="1.0" encoding="UTF-8"?>
259
+ <InitiateMultipartUploadResult>
260
+ <Bucket>#{ s_req.bucket }</Bucket>
261
+ <Key>#{ key }</Key>
262
+ <UploadId>#{ upload_id }</UploadId>
263
+ </InitiateMultipartUploadResult>
264
+ eos
265
+ elsif query.has_key?('uploadId')
266
+ upload_id = query['uploadId'].first
267
+ bucket_obj = @store.get_bucket(s_req.bucket)
268
+ real_obj = @store.combine_object_parts(
269
+ bucket_obj,
270
+ upload_id,
271
+ s_req.object,
272
+ parse_complete_multipart_upload(request),
273
+ request
274
+ )
275
+
276
+ response.body = XmlAdapter.complete_multipart_result real_obj
277
+ elsif request.content_type =~ /^multipart\/form-data; boundary=(.+)/
278
+ key = request.query['key']
279
+
280
+ success_action_redirect = request.query['success_action_redirect']
281
+ success_action_status = request.query['success_action_status']
282
+
283
+ filename = 'default'
284
+ filename = $1 if request.body =~ /filename="(.*)"/
285
+ key = key.gsub('${filename}', filename)
286
+
287
+ bucket_obj = @store.get_bucket(s_req.bucket) || @store.create_bucket(s_req.bucket)
288
+ real_obj = @store.store_object(bucket_obj, key, s_req.webrick_request)
289
+
290
+ response['Etag'] = "\"#{real_obj.md5}\""
291
+
292
+ if success_action_redirect
293
+ object_params = [ [ :bucket, s_req.bucket ], [ :key, key ] ]
294
+ location_uri = URI.parse(success_action_redirect)
295
+ original_location_params = URI.decode_www_form(String(location_uri.query))
296
+ location_uri.query = URI.encode_www_form(original_location_params + object_params)
297
+
298
+ response.status = 303
299
+ response.body = ""
300
+ response['Location'] = location_uri.to_s
301
+ else
302
+ response.status = success_action_status || 204
303
+ if response.status == "201"
304
+ str = request.inspect
305
+ port = str[str.index('@port')+6..str[str.index('@port')..str.length].index(',')]
306
+ host = str[str.index('@host')+6..str[str.index('@host')..str.length].index(',')]
307
+ response.body = <<-eos.strip
308
+ <?xml version="1.0" encoding="UTF-8"?>
309
+ <PostResponse>
310
+ <Location>http://length:#{str.length}:index of , :#{str[str.index('@port')..str.length].index(',')}/#{s_req.bucket}/#{key}</Location>
311
+ <Bucket>#{s_req.bucket}</Bucket>
312
+ <Key>#{key}</Key>
313
+ <ETag>#{response['Etag']}</ETag>
314
+ </PostResponse>
315
+ eos
316
+ end
317
+
318
+ end
319
+ else
320
+ raise WEBrick::HTTPStatus::BadRequest
321
+ end
322
+
323
+ response['Content-Type'] = 'text/xml'
324
+ response['Access-Control-Allow-Origin'] = '*'
325
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
326
+ response['Access-Control-Expose-Headers'] = 'ETag'
327
+ end
328
+
329
+ def do_DELETE(request, response)
330
+ s_req = normalize_request(request)
331
+
332
+ case s_req.type
333
+ when Request::DELETE_OBJECTS
334
+ bucket_obj = @store.get_bucket(s_req.bucket)
335
+ keys = XmlParser.delete_objects(s_req.webrick_request)
336
+ @store.delete_objects(bucket_obj,keys,s_req.webrick_request)
337
+ when Request::DELETE_OBJECT
338
+ bucket_obj = @store.get_bucket(s_req.bucket)
339
+ @store.delete_object(bucket_obj,s_req.object,s_req.webrick_request)
340
+ when Request::DELETE_BUCKET
341
+ @store.delete_bucket(s_req.bucket)
342
+ end
343
+
344
+ response.status = 204
345
+ response.body = ""
346
+ end
347
+
348
+ def do_OPTIONS(request, response)
349
+ super
350
+ response['Access-Control-Allow-Origin'] = '*'
351
+ response['Access-Control-Allow-Methods'] = 'PUT, POST, HEAD, GET, OPTIONS'
352
+ response['Access-Control-Allow-Headers'] = 'Accept, Content-Type, Authorization, Content-Length, ETag, X-CSRF-Token, Content-Disposition'
353
+ response['Access-Control-Expose-Headers'] = 'ETag'
354
+ end
355
+
356
+ private
357
+
358
+ def normalize_delete(webrick_req, s_req)
359
+ path = webrick_req.path
360
+ path_len = path.size
361
+ query = webrick_req.query
362
+ if path == "/" and s_req.is_path_style
363
+ # Probably do a 404 here
364
+ else
365
+ if s_req.is_path_style
366
+ elems = path[1,path_len].split("/")
367
+ s_req.bucket = elems[0]
368
+ else
369
+ elems = path.split("/")
370
+ end
371
+
372
+ if elems.size == 0
373
+ raise UnsupportedOperation
374
+ elsif elems.size == 1
375
+ s_req.type = webrick_req.query_string == 'delete' ? Request::DELETE_OBJECTS : Request::DELETE_BUCKET
376
+ s_req.query = query
377
+ s_req.webrick_request = webrick_req
378
+ else
379
+ s_req.type = Request::DELETE_OBJECT
380
+ object = elems[1,elems.size].join('/')
381
+ s_req.object = object
382
+ end
383
+ end
384
+ end
385
+
386
+ def normalize_get(webrick_req, s_req)
387
+ path = webrick_req.path
388
+ path_len = path.size
389
+ query = webrick_req.query
390
+ if path == "/" and s_req.is_path_style
391
+ s_req.type = Request::LIST_BUCKETS
392
+ else
393
+ if s_req.is_path_style
394
+ elems = path[1,path_len].split("/")
395
+ s_req.bucket = elems[0]
396
+ else
397
+ elems = path.split("/")
398
+ end
399
+
400
+ if elems.size < 2
401
+ s_req.type = Request::LS_BUCKET
402
+ s_req.query = query
403
+ else
404
+ if query["acl"] == ""
405
+ s_req.type = Request::GET_ACL
406
+ else
407
+ s_req.type = Request::GET
408
+ end
409
+ object = elems[1,elems.size].join('/')
410
+ s_req.object = object
411
+ end
412
+ end
413
+ end
414
+
415
+ def normalize_put(webrick_req, s_req)
416
+ path = webrick_req.path
417
+ path_len = path.size
418
+ if path == "/"
419
+ if s_req.bucket
420
+ s_req.type = Request::CREATE_BUCKET
421
+ end
422
+ else
423
+ if s_req.is_path_style
424
+ elems = path[1,path_len].split("/")
425
+ s_req.bucket = elems[0]
426
+ if elems.size == 1
427
+ s_req.type = Request::CREATE_BUCKET
428
+ else
429
+ if webrick_req.request_line =~ /\?acl/
430
+ s_req.type = Request::SET_ACL
431
+ else
432
+ s_req.type = Request::STORE
433
+ end
434
+ s_req.object = elems[1,elems.size].join('/')
435
+ end
436
+ else
437
+ if webrick_req.request_line =~ /\?acl/
438
+ s_req.type = Request::SET_ACL
439
+ else
440
+ s_req.type = Request::STORE
441
+ end
442
+ s_req.object = webrick_req.path[1..-1]
443
+ end
444
+ end
445
+
446
+ # TODO: also parse the x-amz-copy-source-range:bytes=first-last header
447
+ # for multipart copy
448
+ copy_source = webrick_req.header["x-amz-copy-source"]
449
+ if copy_source and copy_source.size == 1
450
+ src_elems = copy_source.first.split("/")
451
+ root_offset = src_elems[0] == "" ? 1 : 0
452
+ s_req.src_bucket = src_elems[root_offset]
453
+ s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
454
+ s_req.type = Request::COPY
455
+ end
456
+
457
+ s_req.webrick_request = webrick_req
458
+ end
459
+
460
+ def normalize_post(webrick_req,s_req)
461
+ path = webrick_req.path
462
+ path_len = path.size
463
+
464
+ s_req.path = webrick_req.query['key']
465
+ s_req.webrick_request = webrick_req
466
+
467
+ if s_req.is_path_style
468
+ elems = path[1, path_len].split("/")
469
+ s_req.bucket = elems[0]
470
+ s_req.object = elems[1..-1].join('/') if elems.size >= 2
471
+ else
472
+ s_req.object = path[1..-1]
473
+ end
474
+ end
475
+
476
+ # This method takes a webrick request and generates a normalized FakeS3 request
477
+ def normalize_request(webrick_req)
478
+ host_header= webrick_req["Host"]
479
+ host = host_header.split(':')[0]
480
+
481
+ s_req = Request.new
482
+ s_req.path = webrick_req.path
483
+ s_req.is_path_style = true
484
+
485
+ if !@root_hostnames.include?(host) && !(IPAddr.new(host) rescue nil)
486
+ s_req.bucket = host.split(".")[0]
487
+ s_req.is_path_style = false
488
+ end
489
+
490
+ s_req.http_verb = webrick_req.request_method
491
+
492
+ case webrick_req.request_method
493
+ when 'PUT'
494
+ normalize_put(webrick_req,s_req)
495
+ when 'GET','HEAD'
496
+ normalize_get(webrick_req,s_req)
497
+ when 'DELETE'
498
+ normalize_delete(webrick_req,s_req)
499
+ when 'POST'
500
+ if webrick_req.query_string != 'delete'
501
+ normalize_post(webrick_req,s_req)
502
+ else
503
+ normalize_delete(webrick_req,s_req)
504
+ end
505
+ else
506
+ raise "Unknown Request"
507
+ end
508
+
509
+ validate_request(s_req)
510
+
511
+ return s_req
512
+ end
513
+
514
+ def parse_complete_multipart_upload(request)
515
+ parts_xml = ""
516
+ request.body { |chunk| parts_xml << chunk }
517
+
518
+ # TODO: improve parsing xml
519
+ parts_xml = parts_xml.scan(/<Part>.*?<\/Part>/m)
520
+
521
+ parts_xml.collect do |xml|
522
+ {
523
+ number: xml[/<PartNumber>(\d+)<\/PartNumber>/, 1].to_i,
524
+ etag: FakeS3::Util.strip_before_and_after(xml[/\<ETag\>(.+)<\/ETag>/, 1], '"')
525
+ }
526
+ end
527
+ end
528
+
529
+ def dump_request(request)
530
+ puts "----------Dump Request-------------"
531
+ puts request.request_method
532
+ puts request.path
533
+ request.each do |k,v|
534
+ puts "#{k}:#{v}"
535
+ end
536
+ puts "----------End Dump -------------"
537
+ end
538
+ end
539
+
540
+
541
+ class Server
542
+ def initialize(address, port, store, hostname, ssl_cert_path, ssl_key_path, extra_options={})
543
+ @address = address
544
+ @port = port
545
+ @store = store
546
+ @hostname = hostname
547
+ @ssl_cert_path = ssl_cert_path
548
+ @ssl_key_path = ssl_key_path
549
+ webrick_config = {
550
+ :BindAddress => @address,
551
+ :Port => @port
552
+ }
553
+ if !@ssl_cert_path.to_s.empty?
554
+ webrick_config.merge!(
555
+ {
556
+ :SSLEnable => true,
557
+ :SSLCertificate => OpenSSL::X509::Certificate.new(File.read(@ssl_cert_path)),
558
+ :SSLPrivateKey => OpenSSL::PKey::RSA.new(File.read(@ssl_key_path))
559
+ }
560
+ )
561
+ end
562
+
563
+ if extra_options[:quiet]
564
+ webrick_config.merge!(
565
+ :Logger => WEBrick::Log.new("/dev/null"),
566
+ :AccessLog => []
567
+ )
568
+ end
569
+
570
+ @server = WEBrick::HTTPServer.new(webrick_config)
571
+ end
572
+
573
+ def serve
574
+ @server.mount "/", Servlet, @store, @hostname
575
+ shutdown = proc { @server.shutdown }
576
+ trap "INT", &shutdown
577
+ trap "TERM", &shutdown
578
+ @server.start
579
+ end
580
+
581
+ def shutdown
582
+ @server.shutdown
583
+ end
584
+ end
585
+ end