plntr-fakes3 1.0.0.pre.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,286 @@
1
+ require 'fileutils'
2
+ require 'time'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/bucket'
5
+ require 'fakes3/rate_limitable_file'
6
+ require 'digest/md5'
7
+ require 'yaml'
8
+
9
+ module FakeS3
10
+ class FileStore
11
+ FAKE_S3_METADATA_DIR = ".fakes3_metadataFFF"
12
+
13
+ # S3 clients with overly strict date parsing fails to parse ISO 8601 dates
14
+ # without any sub second precision (e.g. jets3t v0.7.2), and the examples
15
+ # given in the official AWS S3 documentation specify three (3) decimals for
16
+ # sub second precision.
17
+ SUBSECOND_PRECISION = 3
18
+
19
+ def initialize(root)
20
+ @root = root
21
+ @buckets = []
22
+ @bucket_hash = {}
23
+ Dir[File.join(root,"*")].each do |bucket|
24
+ bucket_name = File.basename(bucket)
25
+ bucket_obj = Bucket.new(bucket_name,Time.now,[])
26
+ @buckets << bucket_obj
27
+ @bucket_hash[bucket_name] = bucket_obj
28
+ end
29
+ end
30
+
31
+ # Pass a rate limit in bytes per second
32
+ def rate_limit=(rate_limit)
33
+ if rate_limit.is_a?(String)
34
+ if rate_limit =~ /^(\d+)$/
35
+ RateLimitableFile.rate_limit = rate_limit.to_i
36
+ elsif rate_limit =~ /^(.*)K$/
37
+ RateLimitableFile.rate_limit = $1.to_f * 1000
38
+ elsif rate_limit =~ /^(.*)M$/
39
+ RateLimitableFile.rate_limit = $1.to_f * 1000000
40
+ elsif rate_limit =~ /^(.*)G$/
41
+ RateLimitableFile.rate_limit = $1.to_f * 1000000000
42
+ else
43
+ raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
44
+ end
45
+ else
46
+ RateLimitableFile.rate_limit = nil
47
+ end
48
+ end
49
+
50
+ def buckets
51
+ @buckets
52
+ end
53
+
54
+ def get_bucket_folder(bucket)
55
+ File.join(@root, bucket.name)
56
+ end
57
+
58
+ def get_bucket(bucket)
59
+ @bucket_hash[bucket]
60
+ end
61
+
62
+ def create_bucket(bucket)
63
+ FileUtils.mkdir_p(File.join(@root, bucket))
64
+ bucket_obj = Bucket.new(bucket, Time.now, [])
65
+ if !@bucket_hash[bucket]
66
+ @buckets << bucket_obj
67
+ @bucket_hash[bucket] = bucket_obj
68
+ end
69
+ bucket_obj
70
+ end
71
+
72
+ def delete_bucket(bucket_name)
73
+ bucket = get_bucket(bucket_name)
74
+ raise NoSuchBucket if !bucket
75
+ raise BucketNotEmpty if bucket.objects.count > 0
76
+ FileUtils.rm_r(get_bucket_folder(bucket))
77
+ @bucket_hash.delete(bucket_name)
78
+ end
79
+
80
+ def get_object(bucket, object_name, request)
81
+ begin
82
+ real_obj = S3Object.new
83
+ obj_root = File.join(@root,bucket,object_name,FAKE_S3_METADATA_DIR)
84
+ metadata = File.open(File.join(obj_root, "metadata")) { |file| YAML::load(file) }
85
+ real_obj.name = object_name
86
+ real_obj.md5 = metadata[:md5]
87
+ real_obj.content_type = metadata.fetch(:content_type) { "application/octet-stream" }
88
+ real_obj.content_encoding = metadata.fetch(:content_encoding)
89
+ real_obj.io = RateLimitableFile.open(File.join(obj_root, "content"), 'rb')
90
+ real_obj.size = metadata.fetch(:size) { 0 }
91
+ real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION)
92
+ real_obj.modified_date = metadata.fetch(:modified_date) do
93
+ File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION)
94
+ end
95
+ real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} }
96
+ return real_obj
97
+ rescue
98
+ puts $!
99
+ $!.backtrace.each { |line| puts line }
100
+ return nil
101
+ end
102
+ end
103
+
104
+ def object_metadata(bucket, object)
105
+ end
106
+
107
+ def copy_object(src_bucket_name, src_name, dst_bucket_name, dst_name, request)
108
+ src_root = File.join(@root,src_bucket_name,src_name,FAKE_S3_METADATA_DIR)
109
+ src_metadata_filename = File.join(src_root, "metadata")
110
+ src_metadata = YAML.load(File.open(src_metadata_filename, 'rb').read)
111
+ src_content_filename = File.join(src_root, "content")
112
+
113
+ dst_filename= File.join(@root,dst_bucket_name,dst_name)
114
+ FileUtils.mkdir_p(dst_filename)
115
+
116
+ metadata_dir = File.join(dst_filename,FAKE_S3_METADATA_DIR)
117
+ FileUtils.mkdir_p(metadata_dir)
118
+
119
+ content = File.join(metadata_dir, "content")
120
+ metadata = File.join(metadata_dir, "metadata")
121
+
122
+ if src_bucket_name != dst_bucket_name || src_name != dst_name
123
+ File.open(content, 'wb') do |f|
124
+ File.open(src_content_filename, 'rb') do |input|
125
+ f << input.read
126
+ end
127
+ end
128
+
129
+ File.open(metadata,'w') do |f|
130
+ File.open(src_metadata_filename,'r') do |input|
131
+ f << input.read
132
+ end
133
+ end
134
+ end
135
+
136
+ metadata_directive = request.header["x-amz-metadata-directive"].first
137
+ if metadata_directive == "REPLACE"
138
+ metadata_struct = create_metadata(content,request)
139
+ File.open(metadata,'w') do |f|
140
+ f << YAML::dump(metadata_struct)
141
+ end
142
+ end
143
+
144
+ src_bucket = get_bucket(src_bucket_name) || create_bucket(src_bucket_name)
145
+ dst_bucket = get_bucket(dst_bucket_name) || create_bucket(dst_bucket_name)
146
+
147
+ obj = S3Object.new
148
+ obj.name = dst_name
149
+ obj.md5 = src_metadata[:md5]
150
+ obj.content_type = src_metadata[:content_type]
151
+ obj.content_encoding = src_metadata[:content_encoding]
152
+ obj.size = src_metadata[:size]
153
+ obj.modified_date = src_metadata[:modified_date]
154
+
155
+ src_bucket.find(src_name)
156
+ dst_bucket.add(obj)
157
+ return obj
158
+ end
159
+
160
+ def store_object(bucket, object_name, request)
161
+ filedata = ""
162
+
163
+ # TODO put a tmpfile here first and mv it over at the end
164
+ content_type = request.content_type || ""
165
+
166
+ match = content_type.match(/^multipart\/form-data; boundary=(.+)/)
167
+ boundary = match[1] if match
168
+ if boundary
169
+ boundary = WEBrick::HTTPUtils::dequote(boundary)
170
+ form_data = WEBrick::HTTPUtils::parse_form_data(request.body, boundary)
171
+
172
+ if form_data['file'] == nil || form_data['file'] == ""
173
+ raise WEBrick::HTTPStatus::BadRequest
174
+ end
175
+
176
+ filedata = form_data['file']
177
+ else
178
+ request.body { |chunk| filedata << chunk }
179
+ end
180
+
181
+ do_store_object(bucket, object_name, filedata, request)
182
+ end
183
+
184
+ def do_store_object(bucket, object_name, filedata, request)
185
+ begin
186
+ filename = File.join(@root, bucket.name, object_name)
187
+ FileUtils.mkdir_p(filename)
188
+
189
+ metadata_dir = File.join(filename, FAKE_S3_METADATA_DIR)
190
+ FileUtils.mkdir_p(metadata_dir)
191
+
192
+ content = File.join(filename, FAKE_S3_METADATA_DIR, "content")
193
+ metadata = File.join(filename, FAKE_S3_METADATA_DIR, "metadata")
194
+
195
+ File.open(content,'wb') { |f| f << filedata }
196
+
197
+ metadata_struct = create_metadata(content, request)
198
+ File.open(metadata,'w') do |f|
199
+ f << YAML::dump(metadata_struct)
200
+ end
201
+
202
+ obj = S3Object.new
203
+ obj.name = object_name
204
+ obj.md5 = metadata_struct[:md5]
205
+ obj.content_type = metadata_struct[:content_type]
206
+ obj.content_encoding = metadata_struct[:content_encoding]
207
+ obj.size = metadata_struct[:size]
208
+ obj.modified_date = metadata_struct[:modified_date]
209
+
210
+ bucket.add(obj)
211
+ return obj
212
+ rescue
213
+ puts $!
214
+ $!.backtrace.each { |line| puts line }
215
+ return nil
216
+ end
217
+ end
218
+
219
+ def combine_object_parts(bucket, upload_id, object_name, parts, request)
220
+ upload_path = File.join(@root, bucket.name)
221
+ base_path = File.join(upload_path, "#{upload_id}_#{object_name}")
222
+
223
+ complete_file = ""
224
+ chunk = ""
225
+ part_paths = []
226
+
227
+ parts.sort_by { |part| part[:number] }.each do |part|
228
+ part_path = "#{base_path}_part#{part[:number]}"
229
+ content_path = File.join(part_path, FAKE_S3_METADATA_DIR, 'content')
230
+
231
+ File.open(content_path, 'rb') { |f| chunk = f.read }
232
+ etag = Digest::MD5.hexdigest(chunk)
233
+
234
+ raise new Error "invalid file chunk" unless part[:etag] == etag
235
+ complete_file << chunk
236
+ part_paths << part_path
237
+ end
238
+
239
+ object = do_store_object(bucket, object_name, complete_file, request)
240
+
241
+ # clean up parts
242
+ part_paths.each do |path|
243
+ FileUtils.remove_dir(path)
244
+ end
245
+
246
+ object
247
+ end
248
+
249
+ def delete_object(bucket,object_name,request)
250
+ begin
251
+ filename = File.join(@root,bucket.name,object_name)
252
+ FileUtils.rm_rf(filename)
253
+ object = bucket.find(object_name)
254
+ bucket.remove(object)
255
+ rescue
256
+ puts $!
257
+ $!.backtrace.each { |line| puts line }
258
+ return nil
259
+ end
260
+ end
261
+
262
+ # TODO: abstract getting meta data from request.
263
+ def create_metadata(content, request)
264
+ metadata = {}
265
+ metadata[:md5] = Digest::MD5.file(content).hexdigest
266
+ metadata[:content_type] = request.header["content-type"].first
267
+ metadata[:content_encoding] = request.header["content-encoding"].first
268
+ metadata[:size] = File.size(content)
269
+ metadata[:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION)
270
+ metadata[:amazon_metadata] = {}
271
+ metadata[:custom_metadata] = {}
272
+
273
+ # Add custom metadata from the request header
274
+ request.header.each do |key, value|
275
+ match = /^x-amz-([^-]+)-(.*)$/.match(key)
276
+ next unless match
277
+ if match[1].eql?('meta') && (match_key = match[2])
278
+ metadata[:custom_metadata][match_key] = value.join(', ')
279
+ next
280
+ end
281
+ metadata[:amazon_metadata][key.gsub(/^x-amz-/, '')] = value.join(', ')
282
+ end
283
+ return metadata
284
+ end
285
+ end
286
+ end
@@ -0,0 +1,21 @@
1
+ module FakeS3
2
+ class RateLimitableFile < File
3
+ @@rate_limit = nil
4
+ # Specify a rate limit in bytes per second
5
+ def self.rate_limit
6
+ @@rate_limit
7
+ end
8
+
9
+ def self.rate_limit=(rate_limit)
10
+ @@rate_limit = rate_limit
11
+ end
12
+
13
+ def read(args)
14
+ if @@rate_limit
15
+ time_to_sleep = args / @@rate_limit
16
+ sleep(time_to_sleep)
17
+ end
18
+ return super(args)
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,19 @@
1
+ module FakeS3
2
+ class S3Object
3
+ include Comparable
4
+ attr_accessor :name,:size,:creation_date,:modified_date,:md5,:io,:content_type,:content_encoding,:custom_metadata
5
+
6
+ def hash
7
+ @name.hash
8
+ end
9
+
10
+ def eql?(object)
11
+ object.is_a?(self.class) ? (@name == object.name) : false
12
+ end
13
+
14
+ # Sort by the object's name
15
+ def <=>(object)
16
+ object.is_a?(self.class) ? (@name <=> object.name) : nil
17
+ end
18
+ end
19
+ end