fakes3 0.1.5.2 → 0.1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 0fd5e9498cd058b485b059f03ca06d9030d8d7c1
4
- data.tar.gz: 31b5e10a57f2a723a8a04bbdec62f7c726c8cb5c
3
+ metadata.gz: afc9d8c94ab0b848399aae28b8031acf3a27cadb
4
+ data.tar.gz: d46b07e6f772a54bcece517ae7a7c6517a4243c8
5
5
  SHA512:
6
- metadata.gz: 56353c5a81798343a51989019c484a5e203b91188e813a3bddbb33ff120d451b5d99c7c6c2d9cbf13b2c929f9c651eb9d0f846ddae3d1529fbfc3718b96d07e0
7
- data.tar.gz: b24c2db7d546dd0772dd2c897f90d2ae331b9b286f326be62e8d5794eaa606b331e48a0216c3fddb0ccffa42719c7802b1b8952f3d9c0b796d8a27943470e6cb
6
+ metadata.gz: 9db326bbb641bfedc2be1553b1846dcaab6a886d133a50be6912de8bce365a4bf509e73399650508148d125bee1cd11039a2988d2a644d081f7bdc2ef79f79d3
7
+ data.tar.gz: 3dafd2e1c59227812fd5b004c99e8a9c41287402d154cc3eea8d70fba121b94b6d88822e37f173e5362de976f8d7a46c39ec1dedbcb63fb0ee8a854a33d3861b
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- fakes3 (0.1.5.2)
4
+ fakes3 (0.1.6.0)
5
5
  builder
6
6
  thor
7
7
 
@@ -15,6 +15,8 @@ GEM
15
15
  builder (3.2.2)
16
16
  mime-types (1.25)
17
17
  rake (10.1.0)
18
+ rest-client (1.6.7)
19
+ mime-types (>= 1.16)
18
20
  right_aws (3.1.0)
19
21
  right_http_connection (>= 1.2.5)
20
22
  right_http_connection (1.4.0)
@@ -29,4 +31,5 @@ DEPENDENCIES
29
31
  bundler (>= 1.0.0)
30
32
  fakes3!
31
33
  rake
34
+ rest-client
32
35
  right_aws
data/README.md CHANGED
@@ -1,7 +1,7 @@
1
1
  ## Introduction
2
- FakeS3 is a lightweight server that responds to the same calls Amazon S3 responds to.
2
+ FakeS3 is a lightweight server that responds to the same calls Amazon S3 responds to.
3
3
  It is extremely useful for testing of S3 in a sandbox environment without actually
4
- making calls to Amazon, which not only require network, but also cost you precious dollars.
4
+ making calls to Amazon, which not only require network, but also cost you precious dollars.
5
5
 
6
6
  The goal of Fake S3 is to minimize runtime dependencies and be more of a
7
7
  development tool to test S3 calls in your code rather than a production server
@@ -31,6 +31,20 @@ Here is a running list of [supported clients](https://github.com/jubos/fake-s3/w
31
31
 
32
32
  ## Running Tests
33
33
 
34
+ There are some pre-requesites to actually being able to run the unit/integration tests
35
+
36
+ ### On OSX
37
+
38
+ Edit your /etc/hosts and add the following line:
39
+
40
+ 127.0.0.1 posttest.localhost
41
+
42
+ Then ensure that the following packages are installed (boto, s3cmd)
43
+
44
+ > pip install boto
45
+ > brew install s3cmd
46
+
47
+
34
48
  Start the test server using
35
49
 
36
50
  rake test_server
@@ -39,7 +53,7 @@ Then in another terminal window run
39
53
 
40
54
  rake test
41
55
 
42
- It is a TODO to get this to be just one command
56
+ It is a still a TODO to get this to be just one command
43
57
 
44
58
  ## More Information
45
59
 
data/fakes3.gemspec CHANGED
@@ -16,10 +16,11 @@ Gem::Specification.new do |s|
16
16
  s.add_development_dependency "bundler", ">= 1.0.0"
17
17
  s.add_development_dependency "aws-s3"
18
18
  s.add_development_dependency "right_aws"
19
+ s.add_development_dependency "rest-client"
19
20
  s.add_development_dependency "rake"
20
21
  #s.add_development_dependency "aws-sdk"
21
22
  #s.add_development_dependency "ruby-debug"
22
- #s.add_development_dependency "ruby-debug19"
23
+ #s.add_development_dependency "debugger"
23
24
  s.add_dependency "thor"
24
25
  s.add_dependency "builder"
25
26
 
@@ -82,8 +82,9 @@ module FakeS3
82
82
  #real_obj.io = File.open(File.join(obj_root,"content"),'rb')
83
83
  real_obj.io = RateLimitableFile.open(File.join(obj_root,"content"),'rb')
84
84
  real_obj.size = metadata.fetch(:size) { 0 }
85
- real_obj.creation_date = File.ctime(obj_root).iso8601()
86
- real_obj.modified_date = metadata.fetch(:modified_date) { File.mtime(File.join(obj_root,"content")).iso8601() }
85
+ real_obj.creation_date = File.ctime(obj_root).utc.iso8601()
86
+ real_obj.modified_date = metadata.fetch(:modified_date) { File.mtime(File.join(obj_root,"content")).utc.iso8601() }
87
+ real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} }
87
88
  return real_obj
88
89
  rescue
89
90
  puts $!
@@ -95,7 +96,7 @@ module FakeS3
95
96
  def object_metadata(bucket,object)
96
97
  end
97
98
 
98
- def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name)
99
+ def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name,request)
99
100
  src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR)
100
101
  src_metadata_filename = File.join(src_root,"metadata")
101
102
  src_metadata = YAML.load(File.open(src_metadata_filename,'rb').read)
@@ -110,15 +111,25 @@ module FakeS3
110
111
  content = File.join(metadata_dir,"content")
111
112
  metadata = File.join(metadata_dir,"metadata")
112
113
 
113
- File.open(content,'wb') do |f|
114
- File.open(src_content_filename,'rb') do |input|
115
- f << input.read
114
+ if src_bucket_name != dst_bucket_name || src_name != dst_name
115
+ File.open(content,'wb') do |f|
116
+ File.open(src_content_filename,'rb') do |input|
117
+ f << input.read
118
+ end
119
+ end
120
+
121
+ File.open(metadata,'w') do |f|
122
+ File.open(src_metadata_filename,'r') do |input|
123
+ f << input.read
124
+ end
116
125
  end
117
126
  end
118
127
 
119
- File.open(metadata,'w') do |f|
120
- File.open(src_metadata_filename,'r') do |input|
121
- f << input.read
128
+ metadata_directive = request.header["x-amz-metadata-directive"].first
129
+ if metadata_directive == "REPLACE"
130
+ metadata_struct = create_metadata(content,request)
131
+ File.open(metadata,'w') do |f|
132
+ f << YAML::dump(metadata_struct)
122
133
  end
123
134
  end
124
135
 
@@ -149,21 +160,25 @@ module FakeS3
149
160
  content = File.join(filename,SHUCK_METADATA_DIR,"content")
150
161
  metadata = File.join(filename,SHUCK_METADATA_DIR,"metadata")
151
162
 
152
- md5 = Digest::MD5.new
153
163
  # TODO put a tmpfile here first and mv it over at the end
154
164
 
155
- File.open(content,'wb') do |f|
156
- request.body do |chunk|
157
- f << chunk
158
- md5 << chunk
165
+ match=request.content_type.match(/^multipart\/form-data; boundary=(.+)/)
166
+ boundary = match[1] if match
167
+ if boundary
168
+ boundary = WEBrick::HTTPUtils::dequote(boundary)
169
+ filedata = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
170
+ raise HTTPStatus::BadRequest if filedata['file'].empty?
171
+ File.open(content, 'wb') do |f|
172
+ f << filedata['file']
173
+ end
174
+ else
175
+ File.open(content,'wb') do |f|
176
+ request.body do |chunk|
177
+ f << chunk
178
+ end
159
179
  end
160
180
  end
161
-
162
- metadata_struct = {}
163
- metadata_struct[:md5] = md5.hexdigest
164
- metadata_struct[:content_type] = request.header["content-type"].first
165
- metadata_struct[:size] = File.size(content)
166
- metadata_struct[:modified_date] = File.mtime(content).iso8601()
181
+ metadata_struct = create_metadata(content,request)
167
182
 
168
183
  File.open(metadata,'w') do |f|
169
184
  f << YAML::dump(metadata_struct)
@@ -197,5 +212,22 @@ module FakeS3
197
212
  return nil
198
213
  end
199
214
  end
215
+
216
+ def create_metadata(content,request)
217
+ metadata = {}
218
+ metadata[:md5] = Digest::MD5.file(content).hexdigest
219
+ metadata[:content_type] = request.header["content-type"].first
220
+ metadata[:size] = File.size(content)
221
+ metadata[:modified_date] = File.mtime(content).utc.iso8601()
222
+
223
+ # Add custom metadata from the request header
224
+ request.header.each do |key, value|
225
+ match = /^x-amz-meta-(.*)$/.match(key)
226
+ if match
227
+ metadata_struct[:custom_metadata][match[1]] = value.join(', ')
228
+ end
229
+ end
230
+ return metadata
231
+ end
200
232
  end
201
233
  end
@@ -1,7 +1,7 @@
1
1
  module FakeS3
2
2
  class S3Object
3
3
  include Comparable
4
- attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type
4
+ attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:custom_metadata
5
5
 
6
6
  def hash
7
7
  @name.hash
data/lib/fakes3/server.rb CHANGED
@@ -1,3 +1,4 @@
1
+ require 'time'
1
2
  require 'webrick'
2
3
  require 'fakes3/file_store'
3
4
  require 'fakes3/xml_adapter'
@@ -43,6 +44,7 @@ module FakeS3
43
44
  super(server)
44
45
  @store = store
45
46
  @hostname = hostname
47
+ @port = server.config[:Port]
46
48
  @root_hostnames = [hostname,'localhost','s3.amazonaws.com','s3.localhost']
47
49
  end
48
50
 
@@ -81,7 +83,8 @@ module FakeS3
81
83
  real_obj = @store.get_object(s_req.bucket,s_req.object,request)
82
84
  if !real_obj
83
85
  response.status = 404
84
- response.body = ""
86
+ response.body = XmlAdapter.error_no_such_key(s_req.object)
87
+ response['Content-Type'] = "application/xml"
85
88
  return
86
89
  end
87
90
 
@@ -89,11 +92,15 @@ module FakeS3
89
92
  response['Content-Type'] = real_obj.content_type
90
93
  stat = File::Stat.new(real_obj.io.path)
91
94
 
92
- response['Last-Modified'] = stat.mtime.iso8601()
95
+ response['Last-Modified'] = Time.iso8601(real_obj.modified_date).httpdate()
93
96
  response.header['ETag'] = "\"#{real_obj.md5}\""
94
97
  response['Accept-Ranges'] = "bytes"
95
98
  response['Last-Ranges'] = "bytes"
96
99
 
100
+ real_obj.custom_metadata.each do |header, value|
101
+ response.header['x-amz-meta-' + header] = value
102
+ end
103
+
97
104
  content_length = stat.size
98
105
 
99
106
  # Added Range Query support
@@ -118,7 +125,6 @@ module FakeS3
118
125
  end
119
126
  end
120
127
  response['Content-Length'] = File::Stat.new(real_obj.io.path).size
121
- response['Last-Modified'] = real_obj.modified_date
122
128
  if s_req.http_verb == 'HEAD'
123
129
  response.body = ""
124
130
  else
@@ -130,9 +136,14 @@ module FakeS3
130
136
  def do_PUT(request,response)
131
137
  s_req = normalize_request(request)
132
138
 
139
+ response.status = 200
140
+ response.body = ""
141
+ response['Content-Type'] = "text/xml"
142
+
133
143
  case s_req.type
134
144
  when Request::COPY
135
- @store.copy_object(s_req.src_bucket,s_req.src_object,s_req.bucket,s_req.object)
145
+ object = @store.copy_object(s_req.src_bucket,s_req.src_object,s_req.bucket,s_req.object,request)
146
+ response.body = XmlAdapter.copy_object_result(object)
136
147
  when Request::STORE
137
148
  bucket_obj = @store.get_bucket(s_req.bucket)
138
149
  if !bucket_obj
@@ -145,14 +156,46 @@ module FakeS3
145
156
  when Request::CREATE_BUCKET
146
157
  @store.create_bucket(s_req.bucket)
147
158
  end
148
-
149
- response.status = 200
150
- response.body = ""
151
- response['Content-Type'] = "text/xml"
152
159
  end
153
160
 
154
- # Posts aren't supported yet
155
161
  def do_POST(request,response)
162
+ # check that we've received file data
163
+ unless request.content_type =~ /^multipart\/form-data; boundary=(.+)/
164
+ raise WEBrick::HTTPStatus::BadRequest
165
+ end
166
+ s_req = normalize_request(request)
167
+ key=request.query['key']
168
+ success_action_redirect=request.query['success_action_redirect']
169
+ success_action_status=request.query['success_action_status']
170
+
171
+ filename = 'default'
172
+ filename = $1 if request.body =~ /filename="(.*)"/
173
+ key=key.gsub('${filename}', filename)
174
+
175
+ bucket_obj = @store.get_bucket(s_req.bucket) || @store.create_bucket(s_req.bucket)
176
+ real_obj=@store.store_object(bucket_obj, key, s_req.webrick_request)
177
+
178
+ response['Etag'] = "\"#{real_obj.md5}\""
179
+ response.body = ""
180
+ if success_action_redirect
181
+ response.status = 307
182
+ response['Location']=success_action_redirect
183
+ else
184
+ response.status = success_action_status || 204
185
+ if response.status=="201"
186
+ response.body= <<-eos.strip
187
+ <?xml version="1.0" encoding="UTF-8"?>
188
+ <PostResponse>
189
+ <Location>http://#{s_req.bucket}.localhost:#{@port}/#{key}</Location>
190
+ <Bucket>#{s_req.bucket}</Bucket>
191
+ <Key>#{key}</Key>
192
+ <ETag>#{response['Etag']}</ETag>
193
+ </PostResponse>
194
+ eos
195
+ end
196
+ end
197
+ response['Content-Type'] = 'text/xml'
198
+ response['Access-Control-Allow-Origin']='*'
156
199
  end
157
200
 
158
201
  def do_DELETE(request,response)
@@ -169,6 +212,11 @@ module FakeS3
169
212
  response.status = 204
170
213
  response.body = ""
171
214
  end
215
+
216
+ def do_OPTIONS(request, response)
217
+ super
218
+ response["Access-Control-Allow-Origin"]="*"
219
+ end
172
220
 
173
221
  private
174
222
 
@@ -255,7 +303,7 @@ module FakeS3
255
303
  else
256
304
  s_req.type = Request::STORE
257
305
  end
258
- s_req.object = webrick_req.path
306
+ s_req.object = webrick_req.path[1..-1]
259
307
  end
260
308
  end
261
309
 
@@ -271,6 +319,15 @@ module FakeS3
271
319
  s_req.webrick_request = webrick_req
272
320
  end
273
321
 
322
+ def normalize_post(webrick_req,s_req)
323
+ path = webrick_req.path
324
+ path_len = path.size
325
+
326
+ s_req.path = webrick_req.query['key']
327
+
328
+ s_req.webrick_request = webrick_req
329
+ end
330
+
274
331
  # This method takes a webrick request and generates a normalized FakeS3 request
275
332
  def normalize_request(webrick_req)
276
333
  host_header= webrick_req["Host"]
@@ -294,6 +351,8 @@ module FakeS3
294
351
  normalize_get(webrick_req,s_req)
295
352
  when 'DELETE'
296
353
  normalize_delete(webrick_req,s_req)
354
+ when 'POST'
355
+ normalize_post(webrick_req,s_req)
297
356
  else
298
357
  raise "Unknown Request"
299
358
  end
@@ -1,3 +1,3 @@
1
1
  module FakeS3
2
- VERSION = "0.1.5.2"
2
+ VERSION = "0.1.6.0"
3
3
  end
@@ -175,5 +175,20 @@ module FakeS3
175
175
  }
176
176
  output
177
177
  end
178
+
179
+ # <CopyObjectResult>
180
+ # <LastModified>2009-10-28T22:32:00</LastModified>
181
+ # <ETag>"9b2cf535f27731c974343645a3985328"</ETag>
182
+ # </CopyObjectResult>
183
+ def self.copy_object_result(object)
184
+ output = ""
185
+ xml = Builder::XmlMarkup.new(:target => output)
186
+ xml.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
187
+ xml.CopyObjectResult { |result|
188
+ result.LastModified(object.modified_date)
189
+ result.ETag("\"#{object.md5}\"")
190
+ }
191
+ output
192
+ end
178
193
  end
179
194
  end
data/test/post_test.rb ADDED
@@ -0,0 +1,54 @@
1
+ require 'test/test_helper'
2
+ require 'rest-client'
3
+
4
+ class PostTest < Test::Unit::TestCase
5
+ # Make sure you have a posttest.localhost in your /etc/hosts/
6
+ def setup
7
+ @url='http://posttest.localhost:10453/'
8
+ end
9
+
10
+ def teardown
11
+ end
12
+
13
+ def test_options
14
+ res= RestClient.options(@url) { |response|
15
+ assert_equal(response.headers[:access_control_allow_origin],"*")
16
+ }
17
+ end
18
+
19
+ def test_redirect
20
+ res = RestClient.post(
21
+ @url,
22
+ 'key'=>'uploads/12345/${filename}',
23
+ 'success_action_redirect'=>'http://somewhere.else.com/',
24
+ 'file'=>File.new(__FILE__,"rb")
25
+ ) { |response|
26
+ assert_equal(response.code, 307)
27
+ assert_equal(response.headers[:location], 'http://somewhere.else.com/')
28
+ }
29
+ end
30
+
31
+ def test_status_200
32
+ res = RestClient.post(
33
+ @url,
34
+ 'key'=>'uploads/12345/${filename}',
35
+ 'success_action_status'=>'200',
36
+ 'file'=>File.new(__FILE__,"rb")
37
+ ) { |response|
38
+ assert_equal(response.code, 200)
39
+ }
40
+ end
41
+
42
+ def test_status_201
43
+ res = RestClient.post(
44
+ @url,
45
+ 'key'=>'uploads/12345/${filename}',
46
+ 'success_action_status'=>'201',
47
+ 'file'=>File.new(__FILE__,"rb")
48
+ ) { |response|
49
+ assert_equal(response.code, 201)
50
+ assert_match(%r{^\<\?xml.*uploads/12345/post_test\.rb}m, response.body)
51
+ }
52
+ end
53
+
54
+ end
@@ -27,6 +27,16 @@ class RightAWSCommandsTest < Test::Unit::TestCase
27
27
  obj = @s3.get("s3media","helloworld")
28
28
  end
29
29
 
30
+ def test_store_not_found
31
+ begin
32
+ obj = @s3.get("s3media","helloworldnotexist")
33
+ rescue RightAws::AwsError
34
+ assert $!.message.include?('NoSuchKey')
35
+ rescue
36
+ fail 'Should have caught NoSuchKey Exception'
37
+ end
38
+ end
39
+
30
40
  def test_large_store
31
41
  @s3.put("s3media","helloworld","Hello World Man!")
32
42
  buffer = ""
@@ -60,6 +70,24 @@ class RightAWSCommandsTest < Test::Unit::TestCase
60
70
  assert_equal "Hello World",obj[:object]
61
71
  end
62
72
 
73
+ def test_copy_in_place
74
+ @s3.put("s3media","foo","Hello World")
75
+ @s3.copy("s3media","foo","s3media","foo")
76
+ obj = @s3.get("s3media","foo")
77
+ assert_equal "Hello World",obj[:object]
78
+ end
79
+
80
+ def test_copy_replace_metadata
81
+ @s3.put("s3media","foo","Hello World",{"content-type"=>"application/octet-stream"})
82
+ obj = @s3.get("s3media","foo")
83
+ assert_equal "Hello World",obj[:object]
84
+ assert_equal "application/octet-stream",obj[:headers]["content-type"]
85
+ @s3.copy("s3media","foo","s3media","foo",:replace,{"content-type"=>"text/plain"})
86
+ obj = @s3.get("s3media","foo")
87
+ assert_equal "Hello World",obj[:object]
88
+ assert_equal "text/plain",obj[:headers]["content-type"]
89
+ end
90
+
63
91
  def test_larger_lists
64
92
  @s3.create_bucket('right_aws_many')
65
93
  (0..50).each do |i|
@@ -67,6 +67,49 @@ class S3CommandsTest < Test::Unit::TestCase
67
67
  assert_equal buf_len,output.size
68
68
  end
69
69
 
70
+ def test_metadata_store
71
+ assert_equal true, Bucket.create("ruby_aws_s3")
72
+ bucket = Bucket.find("ruby_aws_s3")
73
+
74
+ # Note well: we can't seem to access obj.metadata until we've stored
75
+ # the object and found it again. Thus the store, find, store
76
+ # runaround below.
77
+ obj = bucket.new_object(:value => "foo")
78
+ obj.key = "key_with_metadata"
79
+ obj.store
80
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
81
+ obj.metadata[:param1] = "one"
82
+ obj.metadata[:param2] = "two, three"
83
+ obj.store
84
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
85
+
86
+ assert_equal "one", obj.metadata[:param1]
87
+ assert_equal "two, three", obj.metadata[:param2]
88
+ end
89
+
90
+ def test_metadata_copy
91
+ assert_equal true, Bucket.create("ruby_aws_s3")
92
+ bucket = Bucket.find("ruby_aws_s3")
93
+
94
+ # Note well: we can't seem to access obj.metadata until we've stored
95
+ # the object and found it again. Thus the store, find, store
96
+ # runaround below.
97
+ obj = bucket.new_object(:value => "foo")
98
+ obj.key = "key_with_metadata"
99
+ obj.store
100
+ obj = S3Object.find("key_with_metadata", "ruby_aws_s3")
101
+ obj.metadata[:param1] = "one"
102
+ obj.metadata[:param2] = "two, three"
103
+ obj.store
104
+
105
+ S3Object.copy("key_with_metadata", "key_with_metadata2", "ruby_aws_s3")
106
+ obj = S3Object.find("key_with_metadata2", "ruby_aws_s3")
107
+
108
+ assert_equal "one", obj.metadata[:param1]
109
+ assert_equal "two, three", obj.metadata[:param2]
110
+ end
111
+
112
+
70
113
  def test_multi_directory
71
114
  bucket = Bucket.create("ruby_aws_s3")
72
115
  S3Object.store("dir/myfile/123.txt","recursive","ruby_aws_s3")
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fakes3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5.2
4
+ version: 0.1.6.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Curtis Spencer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-10-21 00:00:00.000000000 Z
11
+ date: 2014-11-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -52,6 +52,20 @@ dependencies:
52
52
  - - '>='
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rest-client
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '>='
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
55
69
  - !ruby/object:Gem::Dependency
56
70
  name: rake
57
71
  requirement: !ruby/object:Gem::Requirement
@@ -127,6 +141,7 @@ files:
127
141
  - test/boto_test.rb
128
142
  - test/botocmd.py
129
143
  - test/local_s3_cfg
144
+ - test/post_test.rb
130
145
  - test/right_aws_commands_test.rb
131
146
  - test/s3_commands_test.rb
132
147
  - test/s3cmd_test.rb
@@ -150,7 +165,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
150
165
  version: '0'
151
166
  requirements: []
152
167
  rubyforge_project: fakes3
153
- rubygems_version: 2.0.2
168
+ rubygems_version: 2.0.14
154
169
  signing_key:
155
170
  specification_version: 4
156
171
  summary: FakeS3 is a server that simulates S3 commands so you can test your S3 functionality
@@ -159,6 +174,7 @@ test_files:
159
174
  - test/boto_test.rb
160
175
  - test/botocmd.py
161
176
  - test/local_s3_cfg
177
+ - test/post_test.rb
162
178
  - test/right_aws_commands_test.rb
163
179
  - test/s3_commands_test.rb
164
180
  - test/s3cmd_test.rb