fakes3 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: e4b5a4bfc64bf07bbf5ebaf3038c4ddf95c7a3e4
4
- data.tar.gz: 86835f87289156cbbf1ef050d9892d020adbe281
3
+ metadata.gz: 35ac3fdceddeef49ddeb95137f2dbfc8a719a4e2
4
+ data.tar.gz: c88fa58f1a7b52b9fcbb1dae974175bed5c1c178
5
5
  SHA512:
6
- metadata.gz: 76f712e660a295b7b1b7435ccf7890a8eb87eefdcbdfa7847d512077792206e6463d63b93786ffb60bbb250e7ebc2885817db2acf85577937dfcff8e9ec004ab
7
- data.tar.gz: 691fecdbc4e377f2f23a4e19256db8a018e6357beff5ceb3d0b77915da3f529f25862bbb4177d6de4165c7f4aec2c411396ded814ce5d2a9f817435ff9b0b14f
6
+ metadata.gz: d9a5a1c68ee5187593a2bb3aedec7e888b6f400b6c0882a1ebb8cb4323d81ed7d29c53657833da2564fea09bb3e9732eb34f6f78cfa63e2274910ae2f128c8d8
7
+ data.tar.gz: 21cd19d83eed82e5b89511ad0a7b60e934c516f97295ce54b09135240f36c0523dd2927bf68765cb6ba219233c59595c0df17dbb425db84f562c215a8c3bf913
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- fakes3 (0.2.0)
4
+ fakes3 (0.2.1)
5
5
  builder
6
6
  thor
7
7
 
@@ -16,12 +16,17 @@ GEM
16
16
  json (~> 1.4)
17
17
  nokogiri (>= 1.4.4)
18
18
  builder (3.2.2)
19
+ byebug (4.0.1)
20
+ columnize (= 0.9.0)
21
+ rb-readline (= 0.5.2)
22
+ columnize (0.9.0)
19
23
  json (1.8.1)
20
24
  mime-types (1.25)
21
25
  mini_portile (0.6.1)
22
26
  nokogiri (1.6.4.1)
23
27
  mini_portile (~> 0.6.0)
24
28
  rake (10.1.0)
29
+ rb-readline (0.5.2)
25
30
  rest-client (1.6.7)
26
31
  mime-types (>= 1.16)
27
32
  right_aws (3.1.0)
@@ -37,6 +42,7 @@ DEPENDENCIES
37
42
  aws-s3
38
43
  aws-sdk-v1
39
44
  bundler (>= 1.0.0)
45
+ byebug
40
46
  fakes3!
41
47
  rake
42
48
  rest-client
@@ -103,7 +103,7 @@ module FakeS3
103
103
  def object_metadata(bucket,object)
104
104
  end
105
105
 
106
- def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name,request)
106
+ def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
107
107
  src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR)
108
108
  src_metadata_filename = File.join(src_root,"metadata")
109
109
  src_metadata = YAML.load(File.open(src_metadata_filename,'rb').read)
@@ -140,8 +140,8 @@ module FakeS3
140
140
  end
141
141
  end
142
142
 
143
- src_bucket = self.get_bucket(src_bucket_name)
144
- dst_bucket = self.get_bucket(dst_bucket_name)
143
+ src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
144
+ dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)
145
145
 
146
146
  obj = S3Object.new
147
147
  obj.name = dst_name
@@ -155,7 +155,31 @@ module FakeS3
155
155
  return obj
156
156
  end
157
157
 
158
- def store_object(bucket,object_name,request)
158
+ def store_object(bucket, object_name, request)
159
+ filedata = ""
160
+
161
+ # TODO put a tmpfile here first and mv it over at the end
162
+ content_type = request.content_type || ""
163
+
164
+ match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
165
+ boundary = match[1] if match
166
+ if boundary
167
+ boundary = WEBrick::HTTPUtils::dequote(boundary)
168
+ form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
169
+
170
+ if form_data['file'] == nil or form_data['file'] == ""
171
+ raise WEBrick::HTTPStatus::BadRequest
172
+ end
173
+
174
+ filedata = form_data['file']
175
+ else
176
+ request.body { |chunk| filedata << chunk }
177
+ end
178
+
179
+ do_store_object(bucket, object_name, filedata, request)
180
+ end
181
+
182
+ def do_store_object(bucket, object_name, filedata, request)
159
183
  begin
160
184
  filename = File.join(@root,bucket.name,object_name)
161
185
  FileUtils.mkdir_p(filename)
@@ -163,29 +187,12 @@ module FakeS3
163
187
  metadata_dir = File.join(filename,SHUCK_METADATA_DIR)
164
188
  FileUtils.mkdir_p(metadata_dir)
165
189
 
166
- content = File.join(filename,SHUCK_METADATA_DIR,"content")
190
+ content = File.join(filename,SHUCK_METADATA_DIR,"content")
167
191
  metadata = File.join(filename,SHUCK_METADATA_DIR,"metadata")
168
192
 
169
- # TODO put a tmpfile here first and mv it over at the end
193
+ File.open(content,'wb') { |f| f << filedata }
170
194
 
171
- match=request.content_type.match(/^multipart\/form-data; boundary=(.+)/)
172
- boundary = match[1] if match
173
- if boundary
174
- boundary = WEBrick::HTTPUtils::dequote(boundary)
175
- filedata = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
176
- raise HTTPStatus::BadRequest if filedata['file'].empty?
177
- File.open(content, 'wb') do |f|
178
- f << filedata['file']
179
- end
180
- else
181
- File.open(content,'wb') do |f|
182
- request.body do |chunk|
183
- f << chunk
184
- end
185
- end
186
- end
187
195
  metadata_struct = create_metadata(content,request)
188
-
189
196
  File.open(metadata,'w') do |f|
190
197
  f << YAML::dump(metadata_struct)
191
198
  end
@@ -206,6 +213,36 @@ module FakeS3
206
213
  end
207
214
  end
208
215
 
216
+ def combine_object_parts(bucket, upload_id, object_name, parts, request)
217
+ upload_path = File.join(@root, bucket.name)
218
+ base_path = File.join(upload_path, "#{upload_id}_#{object_name}")
219
+
220
+ complete_file = ""
221
+ chunk = ""
222
+ part_paths = []
223
+
224
+ parts.sort_by { |part| part[:number] }.each do |part|
225
+ part_path = "#{base_path}_part#{part[:number]}"
226
+ content_path = File.join(part_path, SHUCK_METADATA_DIR, 'content')
227
+
228
+ File.open(content_path, 'rb') { |f| chunk = f.read }
229
+ etag = Digest::MD5.hexdigest(chunk)
230
+
231
+ raise new Error "invalid file chunk" unless part[:etag] == etag
232
+ complete_file << chunk
233
+ part_paths << part_path
234
+ end
235
+
236
+ object = do_store_object(bucket, object_name, complete_file, request)
237
+
238
+ # clean up parts
239
+ part_paths.each do |path|
240
+ FileUtils.remove_dir(path)
241
+ end
242
+
243
+ object
244
+ end
245
+
209
246
  def delete_object(bucket,object_name,request)
210
247
  begin
211
248
  filename = File.join(@root,bucket.name,object_name)
@@ -219,6 +256,7 @@ module FakeS3
219
256
  end
220
257
  end
221
258
 
259
+ # TODO: abstract getting meta data from request.
222
260
  def create_metadata(content,request)
223
261
  metadata = {}
224
262
  metadata[:md5] = Digest::MD5.file(content).hexdigest
@@ -2,6 +2,8 @@ require 'time'
2
2
  require 'webrick'
3
3
  require 'webrick/https'
4
4
  require 'openssl'
5
+ require 'securerandom'
6
+ require 'cgi'
5
7
  require 'fakes3/file_store'
6
8
  require 'fakes3/xml_adapter'
7
9
  require 'fakes3/bucket_query'
@@ -109,7 +111,7 @@ module FakeS3
109
111
  if time >= Time.iso8601(real_obj.modified_date)
110
112
  response.status = 304
111
113
  return
112
- end
114
+ end
113
115
  end
114
116
 
115
117
  response.status = 200
@@ -160,6 +162,9 @@ module FakeS3
160
162
 
161
163
  def do_PUT(request,response)
162
164
  s_req = normalize_request(request)
165
+ query = CGI::parse(request.request_uri.query || "")
166
+
167
+ return do_multipartPUT(request, response) if query['uploadId'].first
163
168
 
164
169
  response.status = 200
165
170
  response.body = ""
@@ -184,44 +189,114 @@ module FakeS3
184
189
  end
185
190
  end
186
191
 
187
- def do_POST(request,response)
188
- # check that we've received file data
189
- unless request.content_type =~ /^multipart\/form-data; boundary=(.+)/
190
- raise WEBrick::HTTPStatus::BadRequest
192
+ def do_multipartPUT(request, response)
193
+ s_req = normalize_request(request)
194
+ query = CGI::parse(request.request_uri.query)
195
+
196
+ part_number = query['partNumber'].first
197
+ upload_id = query['uploadId'].first
198
+ part_name = "#{upload_id}_#{s_req.object}_part#{part_number}"
199
+
200
+ # store the part
201
+ if s_req.type == Request::COPY
202
+ real_obj = @store.copy_object(
203
+ s_req.src_bucket, s_req.src_object,
204
+ s_req.bucket , part_name,
205
+ request
206
+ )
207
+
208
+ response['Content-Type'] = "text/xml"
209
+ response.body = XmlAdapter.copy_object_result real_obj
210
+ else
211
+ bucket_obj = @store.get_bucket(s_req.bucket)
212
+ if !bucket_obj
213
+ bucket_obj = @store.create_bucket(s_req.bucket)
214
+ end
215
+ real_obj = @store.store_object(
216
+ bucket_obj, part_name,
217
+ request
218
+ )
219
+
220
+ response.body = ""
221
+ response.header['ETag'] = "\"#{real_obj.md5}\""
191
222
  end
223
+
224
+ response['Access-Control-Allow-Origin'] = '*'
225
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
226
+ response['Access-Control-Expose-Headers'] = 'ETag'
227
+
228
+ response.status = 200
229
+ end
230
+
231
+ def do_POST(request,response)
192
232
  s_req = normalize_request(request)
193
- key=request.query['key']
194
- success_action_redirect=request.query['success_action_redirect']
195
- success_action_status=request.query['success_action_status']
233
+ key = request.query['key']
234
+ query = CGI::parse(request.request_uri.query || "")
235
+
236
+ if query.has_key?('uploads')
237
+ upload_id = SecureRandom.hex
238
+
239
+ response.body = <<-eos.strip
240
+ <?xml version="1.0" encoding="UTF-8"?>
241
+ <InitiateMultipartUploadResult>
242
+ <Bucket>#{ s_req.bucket }</Bucket>
243
+ <Key>#{ key }</Key>
244
+ <UploadId>#{ upload_id }</UploadId>
245
+ </InitiateMultipartUploadResult>
246
+ eos
247
+ elsif query.has_key?('uploadId')
248
+ upload_id = query['uploadId'].first
249
+ bucket_obj = @store.get_bucket(s_req.bucket)
250
+ real_obj = @store.combine_object_parts(
251
+ bucket_obj,
252
+ upload_id,
253
+ s_req.object,
254
+ parse_complete_multipart_upload(request),
255
+ request
256
+ )
196
257
 
197
- filename = 'default'
198
- filename = $1 if request.body =~ /filename="(.*)"/
199
- key=key.gsub('${filename}', filename)
258
+ response.body = XmlAdapter.complete_multipart_result real_obj
259
+ elsif request.content_type =~ /^multipart\/form-data; boundary=(.+)/
260
+ key=request.query['key']
200
261
 
201
- bucket_obj = @store.get_bucket(s_req.bucket) || @store.create_bucket(s_req.bucket)
202
- real_obj=@store.store_object(bucket_obj, key, s_req.webrick_request)
262
+ success_action_redirect = request.query['success_action_redirect']
263
+ success_action_status = request.query['success_action_status']
203
264
 
204
- response['Etag'] = "\"#{real_obj.md5}\""
205
- response.body = ""
206
- if success_action_redirect
207
- response.status = 307
208
- response['Location']=success_action_redirect
209
- else
210
- response.status = success_action_status || 204
211
- if response.status=="201"
212
- response.body= <<-eos.strip
213
- <?xml version="1.0" encoding="UTF-8"?>
214
- <PostResponse>
215
- <Location>http://#{s_req.bucket}.localhost:#{@port}/#{key}</Location>
216
- <Bucket>#{s_req.bucket}</Bucket>
217
- <Key>#{key}</Key>
218
- <ETag>#{response['Etag']}</ETag>
219
- </PostResponse>
220
- eos
265
+ filename = 'default'
266
+ filename = $1 if request.body =~ /filename="(.*)"/
267
+ key = key.gsub('${filename}', filename)
268
+
269
+ bucket_obj = @store.get_bucket(s_req.bucket) || @store.create_bucket(s_req.bucket)
270
+ real_obj = @store.store_object(bucket_obj, key, s_req.webrick_request)
271
+
272
+ response['Etag'] = "\"#{real_obj.md5}\""
273
+
274
+ if success_action_redirect
275
+ response.status = 307
276
+ response.body = ""
277
+ response['Location'] = success_action_redirect
278
+ else
279
+ response.status = success_action_status || 204
280
+ if response.status == "201"
281
+ response.body = <<-eos.strip
282
+ <?xml version="1.0" encoding="UTF-8"?>
283
+ <PostResponse>
284
+ <Location>http://#{s_req.bucket}.localhost:#{@port}/#{key}</Location>
285
+ <Bucket>#{s_req.bucket}</Bucket>
286
+ <Key>#{key}</Key>
287
+ <ETag>#{response['Etag']}</ETag>
288
+ </PostResponse>
289
+ eos
290
+ end
221
291
  end
292
+ else
293
+ raise WEBrick::HTTPStatus::BadRequest
222
294
  end
223
- response['Content-Type'] = 'text/xml'
224
- response['Access-Control-Allow-Origin'] = '*'
295
+
296
+ response['Content-Type'] = 'text/xml'
297
+ response['Access-Control-Allow-Origin'] = '*'
298
+ response['Access-Control-Allow-Headers'] = 'Authorization, Content-Length'
299
+ response['Access-Control-Expose-Headers'] = 'ETag'
225
300
  end
226
301
 
227
302
  def do_DELETE(request,response)
@@ -241,10 +316,11 @@ module FakeS3
241
316
 
242
317
  def do_OPTIONS(request, response)
243
318
  super
244
- response["Access-Control-Allow-Origin"] = "*"
245
- response["Access-Control-Allow-Methods"] = "HEAD, GET, PUT, POST"
246
- response["Access-Control-Allow-Headers"] = "accept, content-type"
247
- response["Access-Control-Expose-Headers"] = "ETag, x-amz-meta-custom-header"
319
+
320
+ response['Access-Control-Allow-Origin'] = '*'
321
+ response['Access-Control-Allow-Methods'] = 'PUT, POST, HEAD, GET, OPTIONS'
322
+ response['Access-Control-Allow-Headers'] = 'Accept, Content-Type, Authorization, Content-Length, ETag'
323
+ response['Access-Control-Expose-Headers'] = 'ETag'
248
324
  end
249
325
 
250
326
  private
@@ -336,9 +412,11 @@ module FakeS3
336
412
  end
337
413
  end
338
414
 
415
+ # TODO: also parse the x-amz-copy-source-range:bytes=first-last header
416
+ # for multipart copy
339
417
  copy_source = webrick_req.header["x-amz-copy-source"]
340
418
  if copy_source and copy_source.size == 1
341
- src_elems = copy_source.first.split("/")
419
+ src_elems = copy_source.first.split("/")
342
420
  root_offset = src_elems[0] == "" ? 1 : 0
343
421
  s_req.src_bucket = src_elems[root_offset]
344
422
  s_req.src_object = src_elems[1 + root_offset,src_elems.size].join("/")
@@ -355,6 +433,14 @@ module FakeS3
355
433
  s_req.path = webrick_req.query['key']
356
434
 
357
435
  s_req.webrick_request = webrick_req
436
+
437
+ if s_req.is_path_style
438
+ elems = path[1,path_len].split("/")
439
+ s_req.bucket = elems[0]
440
+ s_req.object = elems[1..-1].join('/') if elems.size >= 2
441
+ else
442
+ s_req.object = path[1..-1]
443
+ end
358
444
  end
359
445
 
360
446
  # This method takes a webrick request and generates a normalized FakeS3 request
@@ -391,6 +477,21 @@ module FakeS3
391
477
  return s_req
392
478
  end
393
479
 
480
+ def parse_complete_multipart_upload request
481
+ parts_xml = ""
482
+ request.body { |chunk| parts_xml << chunk }
483
+
484
+ # TODO: I suck at parsing xml
485
+ parts_xml = parts_xml.scan /\<Part\>.*?<\/Part\>/m
486
+
487
+ parts_xml.collect do |xml|
488
+ {
489
+ number: xml[/\<PartNumber\>(\d+)\<\/PartNumber\>/, 1].to_i,
490
+ etag: xml[/\<ETag\>\"(.+)\"\<\/ETag\>/, 1]
491
+ }
492
+ end
493
+ end
494
+
394
495
  def dump_request(request)
395
496
  puts "----------Dump Request-------------"
396
497
  puts request.request_method
@@ -1,3 +1,3 @@
1
1
  module FakeS3
2
- VERSION = "0.2.0"
2
+ VERSION = "0.2.1"
3
3
  end
@@ -199,5 +199,24 @@ module FakeS3
199
199
  }
200
200
  output
201
201
  end
202
+
203
+ # <CompleteMultipartUploadResult>
204
+ # <Location>http://Example-Bucket.s3.amazonaws.com/Example-Object</Location>
205
+ # <Bucket>Example-Bucket</Bucket>
206
+ # <Key>Example-Object</Key>
207
+ # <ETag>"3858f62230ac3c915f300c664312c11f-9"</ETag>
208
+ # </CompleteMultipartUploadResult>
209
+ def self.complete_multipart_result(object)
210
+ output = ""
211
+ xml = Builder::XmlMarkup.new(:target => output)
212
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
213
+ xml.CompleteMultipartUploadResult { |result|
214
+ result.Location("TODO: implement")
215
+ result.Bucket("TODO: implement")
216
+ result.Key(object.name)
217
+ result.ETag("\"#{object.md5}\"")
218
+ }
219
+ output
220
+ end
202
221
  end
203
222
  end
@@ -20,4 +20,12 @@ class AwsSdkCommandsTest < Test::Unit::TestCase
20
20
 
21
21
  assert_equal 2, bucket.objects.count
22
22
  end
23
+
24
+ def test_multipart_upload
25
+ bucket = @s3.buckets["test_multipart_upload"]
26
+ object = bucket.objects["key1"]
27
+ object.write("thisisaverybigfile", :multipart_threshold => 5)
28
+ assert object.exists?
29
+ assert_equal "thisisaverybigfile", object.read
30
+ end
23
31
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fakes3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Curtis Spencer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-03-05 00:00:00.000000000 Z
11
+ date: 2015-03-15 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -196,4 +196,3 @@ test_files:
196
196
  - test/s3_commands_test.rb
197
197
  - test/s3cmd_test.rb
198
198
  - test/test_helper.rb
199
- has_rdoc: