fakes3as6 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,320 @@
1
+ require 'fileutils'
2
+ require 'time'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/bucket'
5
+ require 'fakes3/rate_limitable_file'
6
+ require 'digest/md5'
7
+ require 'yaml'
8
+
9
+ module FakeS3
10
+ class FileStore
11
+ FAKE_S3_METADATA_DIR = ".fakes3_metadataFFF"
12
+
13
+ # S3 clients with overly strict date parsing fails to parse ISO 8601 dates
14
+ # without any sub second precision (e.g. jets3t v0.7.2), and the examples
15
+ # given in the official AWS S3 documentation specify three (3) decimals for
16
+ # sub second precision.
17
+ SUBSECOND_PRECISION = 3
18
+
19
+ def initialize(root, quiet_mode)
20
+ @root = root
21
+ @buckets = []
22
+ @bucket_hash = {}
23
+ @quiet_mode = quiet_mode
24
+ Dir[File.join(root,"*")].each do |bucket|
25
+ bucket_name = File.basename(bucket)
26
+ bucket_obj = Bucket.new(bucket_name,Time.now,[])
27
+ @buckets << bucket_obj
28
+ @bucket_hash[bucket_name] = bucket_obj
29
+ end
30
+ end
31
+
32
+ # Pass a rate limit in bytes per second
33
+ def rate_limit=(rate_limit)
34
+ if rate_limit.is_a?(String)
35
+ if rate_limit =~ /^(\d+)$/
36
+ RateLimitableFile.rate_limit = rate_limit.to_i
37
+ elsif rate_limit =~ /^(.*)K$/
38
+ RateLimitableFile.rate_limit = $1.to_f * 1000
39
+ elsif rate_limit =~ /^(.*)M$/
40
+ RateLimitableFile.rate_limit = $1.to_f * 1000000
41
+ elsif rate_limit =~ /^(.*)G$/
42
+ RateLimitableFile.rate_limit = $1.to_f * 1000000000
43
+ else
44
+ raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
45
+ end
46
+ else
47
+ RateLimitableFile.rate_limit = nil
48
+ end
49
+ end
50
+
51
+ def buckets
52
+ @buckets
53
+ end
54
+
55
+ def get_bucket_folder(bucket)
56
+ File.join(@root, bucket.name)
57
+ end
58
+
59
+ def get_bucket(bucket)
60
+ @bucket_hash[bucket]
61
+ end
62
+
63
+ def create_bucket(bucket)
64
+ FileUtils.mkdir_p(File.join(@root, bucket))
65
+ bucket_obj = Bucket.new(bucket, Time.now, [])
66
+ if !@bucket_hash[bucket]
67
+ @buckets << bucket_obj
68
+ @bucket_hash[bucket] = bucket_obj
69
+ end
70
+ bucket_obj
71
+ end
72
+
73
+ def delete_bucket(bucket_name)
74
+ bucket = get_bucket(bucket_name)
75
+ raise NoSuchBucket if !bucket
76
+ raise BucketNotEmpty if bucket.objects.count > 0
77
+ FileUtils.rm_r(get_bucket_folder(bucket))
78
+ @bucket_hash.delete(bucket_name)
79
+ end
80
+
81
+ def get_object(bucket, object_name, request)
82
+ begin
83
+ real_obj = S3Object.new
84
+ obj_root = File.join(@root,bucket,object_name,FAKE_S3_METADATA_DIR)
85
+ metadata = File.open(File.join(obj_root, "metadata")) { |file| YAML::load(file) }
86
+ real_obj.name = object_name
87
+ real_obj.md5 = metadata[:md5]
88
+ real_obj.content_type = request.query['response-content-type'] ||
89
+ metadata.fetch(:content_type) { "application/octet-stream" }
90
+ real_obj.content_disposition = request.query['response-content-disposition'] ||
91
+ metadata[:content_disposition]
92
+ real_obj.content_encoding = metadata.fetch(:content_encoding) # if metadata.fetch(:content_encoding)
93
+ real_obj.io = RateLimitableFile.open(File.join(obj_root, "content"), 'rb')
94
+ real_obj.size = metadata.fetch(:size) { 0 }
95
+ real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION)
96
+ real_obj.modified_date = metadata.fetch(:modified_date) do
97
+ File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION)
98
+ end
99
+ real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} }
100
+ return real_obj
101
+ rescue
102
+ unless @quiet_mode
103
+ puts $!
104
+ $!.backtrace.each { |line| puts line }
105
+ end
106
+ return nil
107
+ end
108
+ end
109
+
110
+ def object_metadata(bucket, object)
111
+ end
112
+
113
+ def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
114
+ src_root = File.join(@root,src_bucket_name,src_name,FAKE_S3_METADATA_DIR)
115
+ src_metadata_filename = File.join(src_root, "metadata")
116
+ src_metadata = YAML.load(File.open(src_metadata_filename, 'rb').read)
117
+ src_content_filename = File.join(src_root, "content")
118
+
119
+ dst_filename= File.join(@root,dst_bucket_name,dst_name)
120
+ FileUtils.mkdir_p(dst_filename)
121
+
122
+ metadata_dir = File.join(dst_filename,FAKE_S3_METADATA_DIR)
123
+ FileUtils.mkdir_p(metadata_dir)
124
+
125
+ content = File.join(metadata_dir, "content")
126
+ metadata = File.join(metadata_dir, "metadata")
127
+
128
+ if src_bucket_name != dst_bucket_name || src_name != dst_name
129
+ File.open(content, 'wb') do |f|
130
+ File.open(src_content_filename, 'rb') do |input|
131
+ f << input.read
132
+ end
133
+ end
134
+
135
+ File.open(metadata,'w') do |f|
136
+ File.open(src_metadata_filename,'r') do |input|
137
+ f << input.read
138
+ end
139
+ end
140
+ end
141
+
142
+ metadata_directive = request.header["x-amz-metadata-directive"].first
143
+ if metadata_directive == "REPLACE"
144
+ metadata_struct = create_metadata(content, request)
145
+ File.open(metadata,'w') do |f|
146
+ f << YAML::dump(metadata_struct)
147
+ end
148
+ end
149
+
150
+ src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
151
+ dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)
152
+
153
+ obj = S3Object.new
154
+ obj.name = dst_name
155
+ obj.md5 = src_metadata[:md5]
156
+ obj.content_type = src_metadata[:content_type]
157
+ obj.content_disposition = src_metadata[:content_disposition]
158
+ obj.content_encoding = src_metadata[:content_encoding] # if src_metadata[:content_encoding]
159
+ obj.size = src_metadata[:size]
160
+ obj.modified_date = src_metadata[:modified_date]
161
+
162
+ src_bucket.find(src_name)
163
+ dst_bucket.add(obj)
164
+ return obj
165
+ end
166
+
167
+ def store_object(bucket, object_name, request)
168
+ filedata = ""
169
+
170
+ # TODO put a tmpfile here first and mv it over at the end
171
+ content_type = request.content_type || ""
172
+
173
+ match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
174
+ boundary = match[1] if match
175
+ if boundary
176
+ boundary = WEBrick::HTTPUtils::dequote(boundary)
177
+ form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
178
+
179
+ if form_data['file'] == nil || form_data['file'] == ""
180
+ raise WEBrick::HTTPStatus::BadRequest
181
+ end
182
+
183
+ filedata = form_data['file']
184
+ else
185
+ request.body { |chunk| filedata << chunk }
186
+ end
187
+
188
+ do_store_object(bucket, object_name, filedata, request)
189
+ end
190
+
191
+ def do_store_object(bucket, object_name, filedata, request)
192
+ begin
193
+ filename = File.join(@root, bucket.name, object_name)
194
+ FileUtils.mkdir_p(filename)
195
+
196
+ metadata_dir = File.join(filename, FAKE_S3_METADATA_DIR)
197
+ FileUtils.mkdir_p(metadata_dir)
198
+
199
+ content = File.join(filename, FAKE_S3_METADATA_DIR, "content")
200
+ metadata = File.join(filename, FAKE_S3_METADATA_DIR, "metadata")
201
+
202
+ File.open(content,'wb') { |f| f << filedata }
203
+
204
+ metadata_struct = create_metadata(content, request)
205
+ File.open(metadata,'w') do |f|
206
+ f << YAML::dump(metadata_struct)
207
+ end
208
+
209
+ obj = S3Object.new
210
+ obj.name = object_name
211
+ obj.md5 = metadata_struct[:md5]
212
+ obj.content_type = metadata_struct[:content_type]
213
+ obj.content_disposition = metadata_struct[:content_disposition]
214
+ obj.content_encoding = metadata_struct[:content_encoding] # if metadata_struct[:content_encoding]
215
+ obj.size = metadata_struct[:size]
216
+ obj.modified_date = metadata_struct[:modified_date]
217
+
218
+ bucket.add(obj)
219
+ return obj
220
+ rescue
221
+ unless @quiet_mode
222
+ puts $!
223
+ $!.backtrace.each { |line| puts line }
224
+ end
225
+ return nil
226
+ end
227
+ end
228
+
229
+ def combine_object_parts(bucket, upload_id, object_name, parts, request)
230
+ upload_path = File.join(@root, bucket.name)
231
+ base_path = File.join(upload_path, "#{upload_id}_#{object_name}")
232
+
233
+ complete_file = ""
234
+ chunk = ""
235
+ part_paths = []
236
+
237
+ parts.sort_by { |part| part[:number] }.each do |part|
238
+ part_path = "#{base_path}_part#{part[:number]}"
239
+ content_path = File.join(part_path, FAKE_S3_METADATA_DIR, 'content')
240
+
241
+ File.open(content_path, 'rb') { |f| chunk = f.read }
242
+ etag = Digest::MD5.hexdigest(chunk)
243
+
244
+ raise new Error "invalid file chunk" unless part[:etag] == etag
245
+ complete_file << chunk
246
+ part_paths << part_path
247
+ end
248
+
249
+ object = do_store_object(bucket, object_name, complete_file, request)
250
+
251
+ # clean up parts
252
+ part_paths.each do |path|
253
+ FileUtils.remove_dir(path)
254
+ end
255
+
256
+ object
257
+ end
258
+
259
+ def delete_object(bucket,object_name,request)
260
+ begin
261
+ filename = File.join(@root,bucket.name,object_name)
262
+ FileUtils.rm_rf(filename)
263
+ object = bucket.find(object_name)
264
+ bucket.remove(object)
265
+ rescue
266
+ puts $!
267
+ $!.backtrace.each { |line| puts line }
268
+ return nil
269
+ end
270
+ end
271
+
272
+ def delete_objects(bucket, objects, request)
273
+ begin
274
+ filenames = []
275
+ objects.each do |object_name|
276
+ filenames << File.join(@root,bucket.name,object_name)
277
+ object = bucket.find(object_name)
278
+ bucket.remove(object)
279
+ end
280
+
281
+ FileUtils.rm_rf(filenames)
282
+ rescue
283
+ puts $!
284
+ $!.backtrace.each { |line| puts line }
285
+ return nil
286
+ end
287
+ end
288
+
289
+ # TODO: abstract getting meta data from request.
290
+ def create_metadata(content, request)
291
+ metadata = {}
292
+ metadata[:md5] = Digest::MD5.file(content).hexdigest
293
+ metadata[:content_type] = request.header["content-type"].first
294
+ if request.header['content-disposition']
295
+ metadata[:content_disposition] = request.header['content-disposition'].first
296
+ end
297
+ content_encoding = request.header["content-encoding"].first
298
+ metadata[:content_encoding] = content_encoding
299
+ #if content_encoding
300
+ # metadata[:content_encoding] = content_encoding
301
+ #end
302
+ metadata[:size] = File.size(content)
303
+ metadata[:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION)
304
+ metadata[:amazon_metadata] = {}
305
+ metadata[:custom_metadata] = {}
306
+
307
+ # Add custom metadata from the request header
308
+ request.header.each do |key, value|
309
+ match = /^x-amz-([^-]+)-(.*)$/.match(key)
310
+ next unless match
311
+ if match[1].eql?('meta') && (match_key = match[2])
312
+ metadata[:custom_metadata][match_key] = value.join(', ')
313
+ next
314
+ end
315
+ metadata[:amazon_metadata][key.gsub(/^x-amz-/, '')] = value.join(', ')
316
+ end
317
+ return metadata
318
+ end
319
+ end
320
+ end
@@ -0,0 +1,21 @@
1
+ module FakeS3
2
+ class RateLimitableFile < File
3
+ @@rate_limit = nil
4
+ # Specify a rate limit in bytes per second
5
+ def self.rate_limit
6
+ @@rate_limit
7
+ end
8
+
9
+ def self.rate_limit=(rate_limit)
10
+ @@rate_limit = rate_limit
11
+ end
12
+
13
+ def read(args)
14
+ if @@rate_limit
15
+ time_to_sleep = args / @@rate_limit
16
+ sleep(time_to_sleep)
17
+ end
18
+ return super(args)
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,19 @@
1
+ module FakeS3
2
+ class S3Object
3
+ include Comparable
4
+ attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:content_disposition,:content_encoding,:custom_metadata
5
+
6
+ def hash
7
+ @name.hash
8
+ end
9
+
10
+ def eql?(object)
11
+ object.is_a?(self.class) ? (@name == object.name) : false
12
+ end
13
+
14
+ # Sort by the object's name
15
+ def <=>(object)
16
+ object.is_a?(self.class) ? (@name <=> object.name) : nil
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,581 @@
1
+ require 'time'
2
+ require 'webrick'
3
+ require 'webrick/https'
4
+ require 'openssl'
5
+ require 'securerandom'
6
+ require 'cgi'
7
+ require 'fakes3/util'
8
+ require 'fakes3/file_store'
9
+ require 'fakes3/xml_adapter'
10
+ require 'fakes3/xml_parser'
11
+ require 'fakes3/bucket_query'
12
+ require 'fakes3/unsupported_operation'
13
+ require 'fakes3/errors'
14
+ require 'ipaddr'
15
+
16
+ module FakeS3
17
+ class Request
18
+ CREATE_BUCKET = "CREATE_BUCKET"
19
+ LIST_BUCKETS = "LIST_BUCKETS"
20
+ LS_BUCKET = "LS_BUCKET"
21
+ HEAD = "HEAD"
22
+ STORE = "STORE"
23
+ COPY = "COPY"
24
+ GET = "GET"
25
+ GET_ACL = "GET_ACL"
26
+ SET_ACL = "SET_ACL"
27
+ MOVE = "MOVE"
28
+ DELETE_OBJECT = "DELETE_OBJECT"
29
+ DELETE_BUCKET = "DELETE_BUCKET"
30
+ DELETE_OBJECTS = "DELETE_OBJECTS"
31
+
32
+ attr_accessor :bucket, :object, :type, :src_bucket,
33
+ :src_object, :method, :webrick_request,
34
+ :path, :is_path_style, :query, :http_verb
35
+
36
+ def inspect
37
+ puts "-----Inspect FakeS3 Request"
38
+ puts "Type: #{@type}"
39
+ puts "Is Path Style: #{@is_path_style}"
40
+ puts "Request Method: #{@method}"
41
+ puts "Bucket: #{@bucket}"
42
+ puts "Object: #{@object}"
43
+ puts "Src Bucket: #{@src_bucket}"
44
+ puts "Src Object: #{@src_object}"
45
+ puts "Query: #{@query}"
46
+ puts "-----Done"
47
+ end
48
+ end
49
+
50
+ class Servlet < WEBrick::HTTPServlet::AbstractServlet
51
+ def initialize(server,store,hostname)
52
+ super(server)
53
+ @store = store
54
+ @hostname = hostname
55
+ @port = server.config[:Port]
56
+ @root_hostnames = [hostname,'localhost','s3.amazonaws.com','s3.localhost']
57
+ end
58
+
59
+ def validate_request(request)
60
+ req = request.webrick_request
61
+ return if req.nil?
62
+ return if not req.header.has_key?('expect')
63
+ req.continue if req.header['expect'].first=='100-continue'
64
+ end
65
+
66
+ def do_GET(request, response)
67
+ s_req = normalize_request(request)
68
+
69
+ case s_req.type
70
+ when 'LIST_BUCKETS'
71
+ response.status = 200
72
+ response['Content-Type'] = 'application/xml'
73
+ buckets = @store.buckets
74
+ response.body = XmlAdapter.buckets(buckets)
75
+ when 'LS_BUCKET'
76
+ bucket_obj = @store.get_bucket(s_req.bucket)
77
+ if bucket_obj
78
+ response.status = 200
79
+ response['Content-Type'] = "application/xml"
80
+ query = {
81
+ :marker => s_req.query["marker"] ? s_req.query["marker"].to_s : nil,
82
+ :prefix => s_req.query["prefix"] ? s_req.query["prefix"].to_s : nil,
83
+ :max_keys => s_req.query["max-keys"] ? s_req.query["max-keys"].to_i : nil,
84
+ :delimiter => s_req.query["delimiter"] ? s_req.query["delimiter"].to_s : nil
85
+ }
86
+ bq = bucket_obj.query_for_range(query)
87
+ response.body = XmlAdapter.bucket_query(bq)
88
+ else
89
+ response.status = 404
90
+ response.body = XmlAdapter.error_no_such_bucket(s_req.bucket)
91
+ response['Content-Type'] = "application/xml"
92
+ end
93
+ when 'GET_ACL'
94
+ response.status = 200
95
+ response.body = XmlAdapter.acl
96
+ response['Content-Type'] = 'application/xml'
97
+ when 'GET'
98
+ real_obj = @store.get_object(s_req.bucket, s_req.object, request)
99
+ if !real_obj
100
+ response.status = 404
101
+ response.body = XmlAdapter.error_no_such_key(s_req.object)
102
+ response['Content-Type'] = "application/xml"
103
+ return
104
+ end
105
+
106
+ if_none_match = request["If-None-Match"]
107
+ if if_none_match == "\"#{real_obj.md5}\"" or if_none_match == "*"
108
+ response.status = 304
109
+ return
110
+ end
111
+
112
+ if_modified_since = request["If-Modified-Since"]
113
+ if if_modified_since
114
+ time = Time.httpdate(if_modified_since)
115
+ if time >= Time.iso8601(real_obj.modified_date)
116
+ response.status = 304
117
+ return
118
+ end
119
+ end
120
+
121
+ response.status = 200
122
+ response['Content-Type'] = real_obj.content_type
123
+
124
+ if real_obj.content_encoding
125
+ response.header['X-Content-Encoding'] = real_obj.content_encoding
126
+ response.header['Content-Encoding'] = real_obj.content_encoding
127
+ end
128
+
129
+ response['Content-Disposition'] = real_obj.content_disposition if real_obj.content_disposition
130
+ stat = File::Stat.new(real_obj.io.path)
131
+
132
+ response['Last-Modified'] = Time.iso8601(real_obj.modified_date).httpdate
133
+ response.header['ETag'] = "\"#{real_obj.md5}\""
134
+ response['Accept-Ranges'] = "bytes"
135
+ response['Last-Ranges'] = "bytes"
136
+ response['Access-Control-Allow-Origin'] = '*'
137
+
138
+ real_obj.custom_metadata.each do |header, value|
139
+ response.header['x-amz-meta-' + header] = value
140
+ end
141
+
142
+ content_length = stat.size
143
+
144
+ # Added Range Query support
145
+ range = request.header["range"].first
146
+ if range
147
+ response.status = 206
148
+ if range =~ /bytes=(\d*)-(\d*)/
149
+ start = $1.to_i
150
+ finish = $2.to_i
151
+ finish_str = ""
152
+ if finish == 0
153
+ finish = content_length - 1
154
+ finish_str = "#{finish}"
155
+ else
156
+ finish_str = finish.to_s
157
+ end
158
+
159
+ bytes_to_read = finish - start + 1
160
+ response['Content-Range'] = "bytes #{start}-#{finish_str}/#{content_length}"
161
+ real_obj.io.pos = start
162
+ response.body = real_obj.io.read(bytes_to_read)
163
+ return
164
+ end
165
+ end
166
+ response['Content-Length'] = File::Stat.new(real_obj.io.path).size
167
+ response['Content-Disposition'] = 'attachment'
168
+ if s_req.http_verb == 'HEAD'
169
+ response.body = ""
170
+ real_obj.io.close
171
+ else
172
+ response.body = real_obj.io
173
+ end
174
+ end
175
+ end
176
+
177
+ def do_PUT(request, response)
178
+ s_req = normalize_request(request)
179
+ query = CGI::parse(request.request_uri.query || "")
180
+
181
+ return do_multipartPUT(request, response) if query['uploadId'].first
182
+
183
+ response.status = 200
184
+ response.body = ""
185
+ response['Content-Type'] = "text/xml"
186
+ response['Access-Control-Allow-Origin'] = '*'
187
+
188
+ case s_req.type
189
+ when Request::COPY
190
+ object = @store.copy_object(s_req.src_bucket, s_req.src_object, s_req.bucket, s_req.object, request)
191
+ response.body = XmlAdapter.copy_object_result(object)
192
+ when Request::STORE
193
+ bucket_obj = @store.get_bucket(s_req.bucket)
194
+ if !bucket_obj
195
+ # Lazily create a bucket. TODO fix this to return the proper error
196
+ bucket_obj = @store.create_bucket(s_req.bucket)
197
+ end
198
+
199
+ real_obj = @store.store_object(bucket_obj, s_req.object, s_req.webrick_request)
200
+ response.header['ETag'] = "\"#{real_obj.md5}\""
201
+ when Request::CREATE_BUCKET
202
+ @store.create_bucket(s_req.bucket)
203
+ end
204
+ end
205
+
206
+ def do_multipartPUT(request, response)
207
+ s_req = normalize_request(request)
208
+ query = CGI::parse(request.request_uri.query)
209
+
210
+ part_number = query['partNumber'].first
211
+ upload_id = query['uploadId'].first
212
+ part_name = "#{upload_id}_#{s_req.object}_part#{part_number}"
213
+
214
+ # store the part
215
+ if s_req.type == Request::COPY
216
+ real_obj = @store.copy_object(
217
+ s_req.src_bucket, s_req.src_object,
218
+ s_req.bucket , part_name,
219
+ request
220
+ )
221
+
222
+ response['Content-Type'] = "text/xml"
223
+ response.body = XmlAdapter.copy_object_result real_obj
224
+ else
225
+ bucket_obj = @store.get_bucket(s_req.bucket)
226
+ if !bucket_obj
227
+ bucket_obj = @store.create_bucket(s_req.bucket)
228
+ end
229
+ real_obj = @store.store_object(
230
+ bucket_obj, part_name,
231
+ request
232
+ )
233
+
234
+ response.body = ""
235
+ response.header['ETag'] = "\"#{real_obj.md5}\""
236
+ end
237
+
238
+ response['Access-Control-Allow-Origin'] = '*'
239
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
240
+ response['Access-Control-Expose-Headers'] = 'ETag'
241
+
242
+ response.status = 200
243
+ end
244
+
245
+ def do_POST(request,response)
246
+ if request.query_string === 'delete'
247
+ return do_DELETE(request, response)
248
+ end
249
+
250
+ s_req = normalize_request(request)
251
+ key = request.query['key']
252
+ query = CGI::parse(request.request_uri.query || "")
253
+
254
+ if query.has_key?('uploads')
255
+ upload_id = SecureRandom.hex
256
+
257
+ response.body = <<-eos.strip
258
+ <?xml version="1.0" encoding="UTF-8"?>
259
+ <InitiateMultipartUploadResult>
260
+ <Bucket>#{ s_req.bucket }</Bucket>
261
+ <Key>#{ key }</Key>
262
+ <UploadId>#{ upload_id }</UploadId>
263
+ </InitiateMultipartUploadResult>
264
+ eos
265
+ elsif query.has_key?('uploadId')
266
+ upload_id = query['uploadId'].first
267
+ bucket_obj = @store.get_bucket(s_req.bucket)
268
+ real_obj = @store.combine_object_parts(
269
+ bucket_obj,
270
+ upload_id,
271
+ s_req.object,
272
+ parse_complete_multipart_upload(request),
273
+ request
274
+ )
275
+
276
+ response.body = XmlAdapter.complete_multipart_result real_obj
277
+ elsif request.content_type =~ /^multipart\/form-data; boundary=(.+)/
278
+ key = request.query['key']
279
+
280
+ success_action_redirect = request.query['success_action_redirect']
281
+ success_action_status = request.query['success_action_status']
282
+
283
+ filename = 'default'
284
+ filename = $1 if request.body =~ /filename="(.*)"/
285
+ key = key.gsub('${filename}', filename)
286
+
287
+ bucket_obj = @store.get_bucket(s_req.bucket) || @store.create_bucket(s_req.bucket)
288
+ real_obj = @store.store_object(bucket_obj, key, s_req.webrick_request)
289
+
290
+ response['Etag'] = "\"#{real_obj.md5}\""
291
+
292
+ if success_action_redirect
293
+ object_params = [ [ :bucket, s_req.bucket ], [ :key, key ] ]
294
+ location_uri = URI.parse(success_action_redirect)
295
+ original_location_params = URI.decode_www_form(String(location_uri.query))
296
+ location_uri.query = URI.encode_www_form(original_location_params + object_params)
297
+
298
+ response.status = 303
299
+ response.body = ""
300
+ response['Location'] = location_uri.to_s
301
+ else
302
+ response.status = success_action_status || 204
303
+ if response.status == "201"
304
+ response.body = <<-eos.strip
305
+ <?xml version="1.0" encoding="UTF-8"?>
306
+ <PostResponse>
307
+ <Location>http://35.167.69.239:4581/#{s_req.bucket}/#{key}</Location>
308
+ <Bucket>#{s_req.bucket}</Bucket>
309
+ <Key>#{key}</Key>
310
+ <ETag>#{response['Etag']}</ETag>
311
+ </PostResponse>
312
+ eos
313
+ end
314
+ end
315
+ else
316
+ raise WEBrick::HTTPStatus::BadRequest
317
+ end
318
+
319
+ response['Content-Type'] = 'text/xml'
320
+ response['Access-Control-Allow-Origin'] = '*'
321
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
322
+ response['Access-Control-Expose-Headers'] = 'ETag'
323
+ end
324
+
325
+ def do_DELETE(request, response)
326
+ s_req = normalize_request(request)
327
+
328
+ case s_req.type
329
+ when Request::DELETE_OBJECTS
330
+ bucket_obj = @store.get_bucket(s_req.bucket)
331
+ keys = XmlParser.delete_objects(s_req.webrick_request)
332
+ @store.delete_objects(bucket_obj,keys,s_req.webrick_request)
333
+ when Request::DELETE_OBJECT
334
+ bucket_obj = @store.get_bucket(s_req.bucket)
335
+ @store.delete_object(bucket_obj,s_req.object,s_req.webrick_request)
336
+ when Request::DELETE_BUCKET
337
+ @store.delete_bucket(s_req.bucket)
338
+ end
339
+
340
+ response.status = 204
341
+ response.body = ""
342
+ end
343
+
344
+ def do_OPTIONS(request, response)
345
+ super
346
+ response['Access-Control-Allow-Origin'] = '*'
347
+ response['Access-Control-Allow-Methods'] = 'PUT, POST, HEAD, GET, OPTIONS'
348
+ response['Access-Control-Allow-Headers'] = 'Accept, Content-Type, Authorization, Content-Length, ETag, X-CSRF-Token, Content-Disposition'
349
+ response['Access-Control-Expose-Headers'] = 'ETag'
350
+ end
351
+
352
+ private
353
+
354
+ def normalize_delete(webrick_req, s_req)
355
+ path = webrick_req.path
356
+ path_len = path.size
357
+ query = webrick_req.query
358
+ if path == "/" and s_req.is_path_style
359
+ # Probably do a 404 here
360
+ else
361
+ if s_req.is_path_style
362
+ elems = path[1,path_len].split("/")
363
+ s_req.bucket = elems[0]
364
+ else
365
+ elems = path.split("/")
366
+ end
367
+
368
+ if elems.size == 0
369
+ raise UnsupportedOperation
370
+ elsif elems.size == 1
371
+ s_req.type = webrick_req.query_string == 'delete' ? Request::DELETE_OBJECTS : Request::DELETE_BUCKET
372
+ s_req.query = query
373
+ s_req.webrick_request = webrick_req
374
+ else
375
+ s_req.type = Request::DELETE_OBJECT
376
+ object = elems[1,elems.size].join('/')
377
+ s_req.object = object
378
+ end
379
+ end
380
+ end
381
+
382
+ def normalize_get(webrick_req, s_req)
383
+ path = webrick_req.path
384
+ path_len = path.size
385
+ query = webrick_req.query
386
+ if path == "/" and s_req.is_path_style
387
+ s_req.type = Request::LIST_BUCKETS
388
+ else
389
+ if s_req.is_path_style
390
+ elems = path[1,path_len].split("/")
391
+ s_req.bucket = elems[0]
392
+ else
393
+ elems = path.split("/")
394
+ end
395
+
396
+ if elems.size < 2
397
+ s_req.type = Request::LS_BUCKET
398
+ s_req.query = query
399
+ else
400
+ if query["acl"] == ""
401
+ s_req.type = Request::GET_ACL
402
+ else
403
+ s_req.type = Request::GET
404
+ end
405
+ object = elems[1,elems.size].join('/')
406
+ s_req.object = object
407
+ end
408
+ end
409
+ end
410
+
411
+ def normalize_put(webrick_req, s_req)
412
+ path = webrick_req.path
413
+ path_len = path.size
414
+ if path == "/"
415
+ if s_req.bucket
416
+ s_req.type = Request::CREATE_BUCKET
417
+ end
418
+ else
419
+ if s_req.is_path_style
420
+ elems = path[1,path_len].split("/")
421
+ s_req.bucket = elems[0]
422
+ if elems.size == 1
423
+ s_req.type = Request::CREATE_BUCKET
424
+ else
425
+ if webrick_req.request_line =~ /\?acl/
426
+ s_req.type = Request::SET_ACL
427
+ else
428
+ s_req.type = Request::STORE
429
+ end
430
+ s_req.object = elems[1,elems.size].join('/')
431
+ end
432
+ else
433
+ if webrick_req.request_line =~ /\?acl/
434
+ s_req.type = Request::SET_ACL
435
+ else
436
+ s_req.type = Request::STORE
437
+ end
438
+ s_req.object = webrick_req.path[1..-1]
439
+ end
440
+ end
441
+
442
+ # TODO: also parse the x-amz-copy-source-range:bytes=first-last header
443
+ # for multipart copy
444
+ copy_source = webrick_req.header["x-amz-copy-source"]
445
+ if copy_source and copy_source.size == 1
446
+ src_elems = copy_source.first.split("/")
447
+ root_offset = src_elems[0] == "" ? 1 : 0
448
+ s_req.src_bucket = src_elems[root_offset]
449
+ s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
450
+ s_req.type = Request::COPY
451
+ end
452
+
453
+ s_req.webrick_request = webrick_req
454
+ end
455
+
456
+ def normalize_post(webrick_req,s_req)
457
+ path = webrick_req.path
458
+ path_len = path.size
459
+
460
+ s_req.path = webrick_req.query['key']
461
+ s_req.webrick_request = webrick_req
462
+
463
+ if s_req.is_path_style
464
+ elems = path[1, path_len].split("/")
465
+ s_req.bucket = elems[0]
466
+ s_req.object = elems[1..-1].join('/') if elems.size >= 2
467
+ else
468
+ s_req.object = path[1..-1]
469
+ end
470
+ end
471
+
472
+ # This method takes a webrick request and generates a normalized FakeS3 request
473
+ def normalize_request(webrick_req)
474
+ host_header= webrick_req["Host"]
475
+ host = host_header.split(':')[0]
476
+
477
+ s_req = Request.new
478
+ s_req.path = webrick_req.path
479
+ s_req.is_path_style = true
480
+
481
+ if !@root_hostnames.include?(host) && !(IPAddr.new(host) rescue nil)
482
+ s_req.bucket = host.split(".")[0]
483
+ s_req.is_path_style = false
484
+ end
485
+
486
+ s_req.http_verb = webrick_req.request_method
487
+
488
+ case webrick_req.request_method
489
+ when 'PUT'
490
+ normalize_put(webrick_req,s_req)
491
+ when 'GET','HEAD'
492
+ normalize_get(webrick_req,s_req)
493
+ when 'DELETE'
494
+ normalize_delete(webrick_req,s_req)
495
+ when 'POST'
496
+ if webrick_req.query_string != 'delete'
497
+ normalize_post(webrick_req,s_req)
498
+ else
499
+ normalize_delete(webrick_req,s_req)
500
+ end
501
+ else
502
+ raise "Unknown Request"
503
+ end
504
+
505
+ validate_request(s_req)
506
+
507
+ return s_req
508
+ end
509
+
510
+ def parse_complete_multipart_upload(request)
511
+ parts_xml = ""
512
+ request.body { |chunk| parts_xml << chunk }
513
+
514
+ # TODO: improve parsing xml
515
+ parts_xml = parts_xml.scan(/<Part>.*?<\/Part>/m)
516
+
517
+ parts_xml.collect do |xml|
518
+ {
519
+ number: xml[/<PartNumber>(\d+)<\/PartNumber>/, 1].to_i,
520
+ etag: FakeS3::Util.strip_before_and_after(xml[/\<ETag\>(.+)<\/ETag>/, 1], '"')
521
+ }
522
+ end
523
+ end
524
+
525
+ def dump_request(request)
526
+ puts "----------Dump Request-------------"
527
+ puts request.request_method
528
+ puts request.path
529
+ request.each do |k,v|
530
+ puts "#{k}:#{v}"
531
+ end
532
+ puts "----------End Dump -------------"
533
+ end
534
+ end
535
+
536
+
537
+ class Server
538
+ def initialize(address, port, store, hostname, ssl_cert_path, ssl_key_path, extra_options={})
539
+ @address = address
540
+ @port = port
541
+ @store = store
542
+ @hostname = hostname
543
+ @ssl_cert_path = ssl_cert_path
544
+ @ssl_key_path = ssl_key_path
545
+ webrick_config = {
546
+ :BindAddress => @address,
547
+ :Port => @port
548
+ }
549
+ if !@ssl_cert_path.to_s.empty?
550
+ webrick_config.merge!(
551
+ {
552
+ :SSLEnable => true,
553
+ :SSLCertificate => OpenSSL::X509::Certificate.new(File.read(@ssl_cert_path)),
554
+ :SSLPrivateKey => OpenSSL::PKey::RSA.new(File.read(@ssl_key_path))
555
+ }
556
+ )
557
+ end
558
+
559
+ if extra_options[:quiet]
560
+ webrick_config.merge!(
561
+ :Logger => WEBrick::Log.new("/dev/null"),
562
+ :AccessLog => []
563
+ )
564
+ end
565
+
566
+ @server = WEBrick::HTTPServer.new(webrick_config)
567
+ end
568
+
569
+ def serve
570
+ @server.mount "/", Servlet, @store, @hostname
571
+ shutdown = proc { @server.shutdown }
572
+ trap "INT", &shutdown
573
+ trap "TERM", &shutdown
574
+ @server.start
575
+ end
576
+
577
+ def shutdown
578
+ @server.shutdown
579
+ end
580
+ end
581
+ end