revans_right_aws 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. data/.gemtest +0 -0
  2. data/History.txt +284 -0
  3. data/Manifest.txt +50 -0
  4. data/README.txt +167 -0
  5. data/Rakefile +110 -0
  6. data/lib/acf/right_acf_interface.rb +485 -0
  7. data/lib/acf/right_acf_origin_access_identities.rb +230 -0
  8. data/lib/acf/right_acf_streaming_interface.rb +236 -0
  9. data/lib/acw/right_acw_interface.rb +249 -0
  10. data/lib/as/right_as_interface.rb +699 -0
  11. data/lib/awsbase/benchmark_fix.rb +39 -0
  12. data/lib/awsbase/right_awsbase.rb +978 -0
  13. data/lib/awsbase/support.rb +115 -0
  14. data/lib/ec2/right_ec2.rb +395 -0
  15. data/lib/ec2/right_ec2_ebs.rb +452 -0
  16. data/lib/ec2/right_ec2_images.rb +373 -0
  17. data/lib/ec2/right_ec2_instances.rb +755 -0
  18. data/lib/ec2/right_ec2_monitoring.rb +70 -0
  19. data/lib/ec2/right_ec2_reserved_instances.rb +170 -0
  20. data/lib/ec2/right_ec2_security_groups.rb +277 -0
  21. data/lib/ec2/right_ec2_spot_instances.rb +399 -0
  22. data/lib/ec2/right_ec2_vpc.rb +571 -0
  23. data/lib/elb/right_elb_interface.rb +496 -0
  24. data/lib/rds/right_rds_interface.rb +998 -0
  25. data/lib/right_aws.rb +83 -0
  26. data/lib/s3/right_s3.rb +1126 -0
  27. data/lib/s3/right_s3_interface.rb +1199 -0
  28. data/lib/sdb/active_sdb.rb +1122 -0
  29. data/lib/sdb/right_sdb_interface.rb +721 -0
  30. data/lib/sqs/right_sqs.rb +388 -0
  31. data/lib/sqs/right_sqs_gen2.rb +343 -0
  32. data/lib/sqs/right_sqs_gen2_interface.rb +524 -0
  33. data/lib/sqs/right_sqs_interface.rb +594 -0
  34. data/test/acf/test_helper.rb +2 -0
  35. data/test/acf/test_right_acf.rb +138 -0
  36. data/test/ec2/test_helper.rb +2 -0
  37. data/test/ec2/test_right_ec2.rb +108 -0
  38. data/test/http_connection.rb +87 -0
  39. data/test/rds/test_helper.rb +2 -0
  40. data/test/rds/test_right_rds.rb +120 -0
  41. data/test/s3/test_helper.rb +2 -0
  42. data/test/s3/test_right_s3.rb +421 -0
  43. data/test/s3/test_right_s3_stubbed.rb +97 -0
  44. data/test/sdb/test_active_sdb.rb +357 -0
  45. data/test/sdb/test_helper.rb +3 -0
  46. data/test/sdb/test_right_sdb.rb +253 -0
  47. data/test/sqs/test_helper.rb +2 -0
  48. data/test/sqs/test_right_sqs.rb +291 -0
  49. data/test/sqs/test_right_sqs_gen2.rb +264 -0
  50. data/test/test_credentials.rb +37 -0
  51. data/test/ts_right_aws.rb +14 -0
  52. metadata +169 -0
@@ -0,0 +1,1199 @@
1
+ #
2
+ # Copyright (c) 2007-2008 RightScale Inc
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining
5
+ # a copy of this software and associated documentation files (the
6
+ # "Software"), to deal in the Software without restriction, including
7
+ # without limitation the rights to use, copy, modify, merge, publish,
8
+ # distribute, sublicense, and/or sell copies of the Software, and to
9
+ # permit persons to whom the Software is furnished to do so, subject to
10
+ # the following conditions:
11
+ #
12
+ # The above copyright notice and this permission notice shall be
13
+ # included in all copies or substantial portions of the Software.
14
+ #
15
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
19
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
20
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
21
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22
+ #
23
+
24
+ module RightAws
25
+
26
+ class S3Interface < RightAwsBase
27
+
28
+ USE_100_CONTINUE_PUT_SIZE = 1_000_000
29
+
30
+ include RightAwsBaseInterface
31
+
32
+ DEFAULT_HOST = 's3.amazonaws.com'
33
+ DEFAULT_PORT = 443
34
+ DEFAULT_PROTOCOL = 'https'
35
+ DEFAULT_SERVICE = '/'
36
+ REQUEST_TTL = 30
37
+ DEFAULT_EXPIRES_AFTER = 1 * 24 * 60 * 60 # One day's worth of seconds
38
+ ONE_YEAR_IN_SECONDS = 365 * 24 * 60 * 60
39
+ AMAZON_HEADER_PREFIX = 'x-amz-'
40
+ AMAZON_METADATA_PREFIX = 'x-amz-meta-'
41
+
42
+ @@bench = AwsBenchmarkingBlock.new
43
+ def self.bench_xml
44
+ @@bench.xml
45
+ end
46
+ def self.bench_s3
47
+ @@bench.service
48
+ end
49
+
50
+ # Params supported:
51
+ # :no_subdomains => true # do not use bucket as a part of domain name but as a part of path
52
+ @@params = {}
53
+ def self.params
54
+ @@params
55
+ end
56
+
57
+ # get custom option
58
+ def param(name)
59
+ # - check explicitly defined param (@params)
60
+ # - otherwise check implicitly defined one (@@params)
61
+ @params.has_key?(name) ? @params[name] : @@params[name]
62
+ end
63
+
64
+ # Creates new RightS3 instance.
65
+ #
66
+ # s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<RightAws::S3Interface:0xb7b3c27c>
67
+ #
68
+ # Params is a hash:
69
+ #
70
+ # {:server => 's3.amazonaws.com' # Amazon service host: 's3.amazonaws.com'(default)
71
+ # :port => 443 # Amazon service port: 80 or 443(default)
72
+ # :protocol => 'https' # Amazon service protocol: 'http' or 'https'(default)
73
+ # :multi_thread => true|false # Multi-threaded (connection per each thread): true or false(default)
74
+ # :logger => Logger Object} # Logger instance: logs to STDOUT if omitted }
75
+ #
76
+ def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
77
+ init({ :name => 'S3',
78
+ :default_host => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host : DEFAULT_HOST,
79
+ :default_port => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port : DEFAULT_PORT,
80
+ :default_service => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path : DEFAULT_SERVICE,
81
+ :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL },
82
+ aws_access_key_id || ENV['AWS_ACCESS_KEY_ID'],
83
+ aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'],
84
+ params)
85
+ end
86
+
87
+
88
+ #-----------------------------------------------------------------
89
+ # Requests
90
+ #-----------------------------------------------------------------
91
+ # Produces canonical string for signing.
92
+ def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
93
+ s3_headers = {}
94
+ headers.each do |key, value|
95
+ key = key.downcase
96
+ value = case
97
+ when value.is_a?(Array) then value.join('')
98
+ else value.to_s
99
+ end
100
+ s3_headers[key] = value.strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
101
+ end
102
+ s3_headers['content-type'] ||= ''
103
+ s3_headers['content-md5'] ||= ''
104
+ s3_headers['date'] = '' if s3_headers.has_key? 'x-amz-date'
105
+ s3_headers['date'] = expires if expires
106
+ # prepare output string
107
+ out_string = "#{method}\n"
108
+ s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
109
+ out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
110
+ end
111
+ # ignore everything after the question mark...
112
+ out_string << path.gsub(/\?.*$/, '')
113
+ # ...unless there is an acl or torrent parameter
114
+ out_string << '?acl' if path[/[&?]acl($|&|=)/]
115
+ out_string << '?torrent' if path[/[&?]torrent($|&|=)/]
116
+ out_string << '?location' if path[/[&?]location($|&|=)/]
117
+ out_string << '?logging' if path[/[&?]logging($|&|=)/] # this one is beta, no support for now
118
+ out_string
119
+ end
120
+
121
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?BucketRestrictions.html
122
+ def is_dns_bucket?(bucket_name)
123
+ bucket_name = bucket_name.to_s
124
+ return nil unless (3..63) === bucket_name.size
125
+ bucket_name.split('.').each do |component|
126
+ return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
127
+ end
128
+ true
129
+ end
130
+
131
+ def fetch_request_params(headers) #:nodoc:
132
+ # default server to use
133
+ server = @params[:server]
134
+ service = @params[:service].to_s
135
+ service.chop! if service[%r{/$}] # remove trailing '/' from service
136
+ # extract bucket name and check it's dns compartibility
137
+ headers[:url].to_s[%r{^([a-z0-9._-]*)(/[^?]*)?(\?.+)?}i]
138
+ bucket_name, key_path, params_list = $1, $2, $3
139
+ # select request model
140
+ if !param(:no_subdomains) && is_dns_bucket?(bucket_name)
141
+ # fix a path
142
+ server = "#{bucket_name}.#{server}"
143
+ key_path ||= '/'
144
+ path = "#{service}#{key_path}#{params_list}"
145
+ else
146
+ path = "#{service}/#{bucket_name}#{key_path}#{params_list}"
147
+ end
148
+ path_to_sign = "#{service}/#{bucket_name}#{key_path}#{params_list}"
149
+ # path_to_sign = "/#{bucket_name}#{key_path}#{params_list}"
150
+ [ server, path, path_to_sign ]
151
+ end
152
+
153
+ # Generates request hash for REST API.
154
+ # Assumes that headers[:url] is URL encoded (use CGI::escape)
155
+ def generate_rest_request(method, headers) # :nodoc:
156
+ # calculate request data
157
+ server, path, path_to_sign = fetch_request_params(headers)
158
+ data = headers[:data]
159
+ # remove unset(==optional) and symbolyc keys
160
+ headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
161
+ #
162
+ headers['content-type'] ||= ''
163
+ headers['date'] = Time.now.httpdate
164
+ # create request
165
+ request = "Net::HTTP::#{method.capitalize}".constantize.new(path)
166
+ request.body = data if data
167
+ # set request headers and meta headers
168
+ headers.each { |key, value| request[key.to_s] = value }
169
+ #generate auth strings
170
+ auth_string = canonical_string(request.method, path_to_sign, request.to_hash)
171
+ signature = AwsUtils::sign(@aws_secret_access_key, auth_string)
172
+ # set other headers
173
+ request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
174
+ # prepare output hash
175
+ { :request => request,
176
+ :server => server,
177
+ :port => @params[:port],
178
+ :protocol => @params[:protocol] }
179
+ end
180
+
181
+ # Sends request to Amazon and parses the response.
182
+ # Raises AwsError if any banana happened.
183
+ def request_info(request, parser, &block) # :nodoc:
184
+ request_info_impl(:s3_connection, @@bench, request, parser, &block)
185
+ end
186
+
187
+ # Returns an array of customer's buckets. Each item is a +hash+.
188
+ #
189
+ # s3.list_all_my_buckets #=>
190
+ # [{:owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
191
+ # :owner_display_name => "root",
192
+ # :name => "bucket_name",
193
+ # :creation_date => "2007-04-19T18:47:43.000Z"}, ..., {...}]
194
+ #
195
+ def list_all_my_buckets(headers={})
196
+ req_hash = generate_rest_request('GET', headers.merge(:url=>''))
197
+ request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
198
+ rescue
199
+ on_exception
200
+ end
201
+
202
+ # Creates new bucket. Returns +true+ or an exception.
203
+ #
204
+ # # create a bucket at American server
205
+ # s3.create_bucket('my-awesome-bucket-us') #=> true
206
+ # # create a bucket at European server
207
+ # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
208
+ #
209
+ def create_bucket(bucket, headers={})
210
+ data = nil
211
+ location = case headers[:location].to_s
212
+ when 'us','US' then ''
213
+ when 'eu' then 'EU'
214
+ else headers[:location].to_s
215
+ end
216
+
217
+ unless location.blank?
218
+ data = "<CreateBucketConfiguration><LocationConstraint>#{location}</LocationConstraint></CreateBucketConfiguration>"
219
+ end
220
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
221
+ request_info(req_hash, RightHttp2xxParser.new)
222
+ rescue Exception => e
223
+ # if the bucket exists AWS returns an error for the location constraint interface. Drop it
224
+ e.is_a?(RightAws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true : on_exception
225
+ end
226
+
227
+ # Retrieve bucket location
228
+ #
229
+ # s3.create_bucket('my-awesome-bucket-us') #=> true
230
+ # puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)
231
+ #
232
+ # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
233
+ # puts s3.bucket_location('my-awesome-bucket-eu') #=> 'EU'
234
+ #
235
+ def bucket_location(bucket, headers={})
236
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
237
+ request_info(req_hash, S3BucketLocationParser.new)
238
+ rescue
239
+ on_exception
240
+ end
241
+
242
+ # Retrieves the logging configuration for a bucket.
243
+ # Returns a hash of {:enabled, :targetbucket, :targetprefix}
244
+ #
245
+ # s3.interface.get_logging_parse(:bucket => "asset_bucket")
246
+ # => {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}
247
+ #
248
+ #
249
+ def get_logging_parse(params)
250
+ AwsUtils.mandatory_arguments([:bucket], params)
251
+ AwsUtils.allow_only([:bucket, :headers], params)
252
+ params[:headers] = {} unless params[:headers]
253
+ req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
254
+ request_info(req_hash, S3LoggingParser.new)
255
+ rescue
256
+ on_exception
257
+ end
258
+
259
+ # Sets logging configuration for a bucket from the XML configuration document.
260
+ # params:
261
+ # :bucket
262
+ # :xmldoc
263
+ def put_logging(params)
264
+ AwsUtils.mandatory_arguments([:bucket,:xmldoc], params)
265
+ AwsUtils.allow_only([:bucket,:xmldoc, :headers], params)
266
+ params[:headers] = {} unless params[:headers]
267
+ req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
268
+ request_info(req_hash, RightHttp2xxParser.new)
269
+ rescue
270
+ on_exception
271
+ end
272
+
273
+ # Deletes new bucket. Bucket must be empty! Returns +true+ or an exception.
274
+ #
275
+ # s3.delete_bucket('my_awesome_bucket') #=> true
276
+ #
277
+ # See also: force_delete_bucket method
278
+ #
279
+ def delete_bucket(bucket, headers={})
280
+ req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
281
+ request_info(req_hash, RightHttp2xxParser.new)
282
+ rescue
283
+ on_exception
284
+ end
285
+
286
+ # Returns an array of bucket's keys. Each array item (key data) is a +hash+.
287
+ #
288
+ # s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
289
+ # [{:key => "test1",
290
+ # :last_modified => "2007-05-18T07:00:59.000Z",
291
+ # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
292
+ # :owner_display_name => "root",
293
+ # :e_tag => "000000000059075b964b07152d234b70",
294
+ # :storage_class => "STANDARD",
295
+ # :size => 3,
296
+ # :service=> {'is_truncated' => false,
297
+ # 'prefix' => "t",
298
+ # 'marker' => "",
299
+ # 'name' => "my_awesome_bucket",
300
+ # 'max-keys' => "5"}, ..., {...}]
301
+ #
302
+ def list_bucket(bucket, options={}, headers={})
303
+ bucket += '?'+options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
304
+ req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
305
+ request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
306
+ rescue
307
+ on_exception
308
+ end
309
+
310
+ # Incrementally list the contents of a bucket. Yields the following hash to a block:
311
+ # s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
312
+ # {
313
+ # :name => 'bucketname',
314
+ # :prefix => 'subfolder/',
315
+ # :marker => 'fileN.jpg',
316
+ # :max_keys => 234,
317
+ # :delimiter => '/',
318
+ # :is_truncated => true,
319
+ # :next_marker => 'fileX.jpg',
320
+ # :contents => [
321
+ # { :key => "file1",
322
+ # :last_modified => "2007-05-18T07:00:59.000Z",
323
+ # :e_tag => "000000000059075b964b07152d234b70",
324
+ # :size => 3,
325
+ # :storage_class => "STANDARD",
326
+ # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
327
+ # :owner_display_name => "root"
328
+ # }, { :key, ...}, ... {:key, ...}
329
+ # ]
330
+ # :common_prefixes => [
331
+ # "prefix1",
332
+ # "prefix2",
333
+ # ...,
334
+ # "prefixN"
335
+ # ]
336
+ # }
337
+ def incrementally_list_bucket(bucket, options={}, headers={}, &block)
338
+ internal_options = options.symbolize_keys
339
+ begin
340
+ internal_bucket = bucket.dup
341
+ internal_bucket += '?'+internal_options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless internal_options.blank?
342
+ req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
343
+ response = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
344
+ there_are_more_keys = response[:is_truncated]
345
+ if(there_are_more_keys)
346
+ internal_options[:marker] = decide_marker(response)
347
+ total_results = response[:contents].length + response[:common_prefixes].length
348
+ internal_options[:'max-keys'] ? (internal_options[:'max-keys'] -= total_results) : nil
349
+ end
350
+ yield response
351
+ end while there_are_more_keys && under_max_keys(internal_options)
352
+ true
353
+ rescue
354
+ on_exception
355
+ end
356
+
357
+
358
+ private
359
+ def decide_marker(response)
360
+ return response[:next_marker].dup if response[:next_marker]
361
+ last_key = response[:contents].last[:key]
362
+ last_prefix = response[:common_prefixes].last
363
+ if(!last_key)
364
+ return nil if(!last_prefix)
365
+ last_prefix.dup
366
+ elsif(!last_prefix)
367
+ last_key.dup
368
+ else
369
+ last_key > last_prefix ? last_key.dup : last_prefix.dup
370
+ end
371
+ end
372
+
373
+ def under_max_keys(internal_options)
374
+ internal_options[:'max-keys'] ? internal_options[:'max-keys'] > 0 : true
375
+ end
376
+
377
+ public
378
+ # Saves object to Amazon. Returns +true+ or an exception.
379
+ # Any header starting with AMAZON_METADATA_PREFIX is considered
380
+ # user metadata. It will be stored with the object and returned
381
+ # when you retrieve the object. The total size of the HTTP
382
+ # request, not including the body, must be less than 4 KB.
383
+ #
384
+ # s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true
385
+ #
386
+ # This method is capable of 'streaming' uploads; that is, it can upload
387
+ # data from a file or other IO object without first reading all the data
388
+ # into memory. This is most useful for large PUTs - it is difficult to read
389
+ # a 2 GB file entirely into memory before sending it to S3.
390
+ # To stream an upload, pass an object that responds to 'read' (like the read
391
+ # method of IO) and to either 'lstat' or 'size'. For files, this means
392
+ # streaming is enabled by simply making the call:
393
+ #
394
+ # s3.put(bucket_name, 'S3keyname.forthisfile', File.open('localfilename.dat'))
395
+ #
396
+ # If the IO object you wish to stream from responds to the read method but
397
+ # doesn't implement lstat or size, you can extend the object dynamically
398
+ # to implement these methods, or define your own class which defines these
399
+ # methods. Be sure that your class returns 'nil' from read() after having
400
+ # read 'size' bytes. Otherwise S3 will drop the socket after
401
+ # 'Content-Length' bytes have been uploaded, and HttpConnection will
402
+ # interpret this as an error.
403
+ #
404
+ # This method now supports very large PUTs, where very large
405
+ # is > 2 GB.
406
+ #
407
+ # For Win32 users: Files and IO objects should be opened in binary mode. If
408
+ # a text mode IO object is passed to PUT, it will be converted to binary
409
+ # mode.
410
+ #
411
+
412
+ def put(bucket, key, data=nil, headers={})
413
+ # On Windows, if someone opens a file in text mode, we must reset it so
414
+ # to binary mode for streaming to work properly
415
+ if(data.respond_to?(:binmode))
416
+ data.binmode
417
+ end
418
+ if (data.respond_to?(:lstat) && data.lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
419
+ (data.respond_to?(:size) && data.size >= USE_100_CONTINUE_PUT_SIZE)
420
+ headers['expect'] = '100-continue'
421
+ end
422
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data))
423
+ request_info(req_hash, RightHttp2xxParser.new)
424
+ rescue
425
+ on_exception
426
+ end
427
+
428
+
429
+
430
+ # New experimental API for uploading objects, introduced in RightAws 1.8.1.
431
+ # store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification
432
+ # of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
433
+ # The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).
434
+ #
435
+ # If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
436
+ # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
437
+ #
438
+ # The optional argument of :headers allows the caller to specify arbitrary request header values.
439
+ #
440
+ # s3.store_object(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
441
+ # => {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
442
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
443
+ # "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
444
+ # :verified_md5=>true,
445
+ # "x-amz-request-id"=>"63916465939995BA",
446
+ # "server"=>"AmazonS3",
447
+ # "content-length"=>"0"}
448
+ #
449
+ # s3.store_object(:bucket => "foobucket", :key => "foo", :data => "polemonium" )
450
+ # => {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
451
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
452
+ # "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
453
+ # :verified_md5=>false,
454
+ # "x-amz-request-id"=>"3B25A996BC2CDD3B",
455
+ # "server"=>"AmazonS3",
456
+ # "content-length"=>"0"}
457
+
458
+ def store_object(params)
459
+ AwsUtils.allow_only([:bucket, :key, :data, :headers, :md5], params)
460
+ AwsUtils.mandatory_arguments([:bucket, :key, :data], params)
461
+ params[:headers] = {} unless params[:headers]
462
+
463
+ params[:data].binmode if(params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
464
+ if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
465
+ (params[:data].respond_to?(:size) && params[:data].size >= USE_100_CONTINUE_PUT_SIZE)
466
+ params[:headers]['expect'] = '100-continue'
467
+ end
468
+
469
+ req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
470
+ resp = request_info(req_hash, S3HttpResponseHeadParser.new)
471
+ if(params[:md5])
472
+ resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
473
+ else
474
+ resp[:verified_md5] = false
475
+ end
476
+ resp
477
+ rescue
478
+ on_exception
479
+ end
480
+
481
+ # Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
482
+ # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
483
+ # This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.
484
+ #
485
+ # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
486
+ # => {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
487
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
488
+ # "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
489
+ # :verified_md5=>true,
490
+ # "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
491
+ # "server"=>"AmazonS3",
492
+ # "content-length"=>"0"}
493
+ #
494
+ # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2953", :data => "polemonium" )
495
+ # RightAws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
496
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
497
+ # "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
498
+ # :verified_md5=>false,
499
+ # "x-amz-request-id"=>"0D7ADE09F42606F2",
500
+ # "server"=>"AmazonS3",
501
+ # "content-length"=>"0"}
502
+ def store_object_and_verify(params)
503
+ AwsUtils.mandatory_arguments([:md5], params)
504
+ r = store_object(params)
505
+ r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
506
+ end
507
+
508
+ # Retrieves object data from Amazon. Returns a +hash+ or an exception.
509
+ #
510
+ # s3.get('my_awesome_bucket', 'log/curent/1.log') #=>
511
+ #
512
+ # {:object => "Ola-la!",
513
+ # :headers => {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
514
+ # "content-type" => "",
515
+ # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
516
+ # "date" => "Wed, 23 May 2007 09:08:03 GMT",
517
+ # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
518
+ # "x-amz-meta-family" => "Woho556!",
519
+ # "x-amz-request-id" => "0000000C246D770C",
520
+ # "server" => "AmazonS3",
521
+ # "content-length" => "7"}}
522
+ #
523
+ # If a block is provided, yields incrementally to the block as
524
+ # the response is read. For large responses, this function is ideal as
525
+ # the response can be 'streamed'. The hash containing header fields is
526
+ # still returned.
527
+ # Example:
528
+ # foo = File.new('./chunder.txt', File::CREAT|File::RDWR)
529
+ # rhdr = s3.get('aws-test', 'Cent5V1_7_1.img.part.00') do |chunk|
530
+ # foo.write(chunk)
531
+ # end
532
+ # foo.close
533
+ #
534
+
535
+ def get(bucket, key, headers={}, &block)
536
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
537
+ request_info(req_hash, S3HttpResponseBodyParser.new, &block)
538
+ rescue
539
+ on_exception
540
+ end
541
+
542
+ # New experimental API for retrieving objects, introduced in RightAws 1.8.1.
543
+ # retrieve_object is similar in function to the older function get. It allows for optional verification
544
+ # of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
545
+ #
546
+ # If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
547
+ # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
548
+ #
549
+ # The optional argument of :headers allows the caller to specify arbitrary request header values.
550
+ # Mandatory arguments:
551
+ # :bucket - the bucket in which the object is stored
552
+ # :key - the object address (or path) within the bucket
553
+ # Optional arguments:
554
+ # :headers - hash of additional HTTP headers to include with the request
555
+ # :md5 - MD5 checksum against which to verify the retrieved object
556
+ #
557
+ # s3.retrieve_object(:bucket => "foobucket", :key => "foo")
558
+ # => {:verified_md5=>false,
559
+ # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
560
+ # "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
561
+ # "content-type"=>"",
562
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
563
+ # "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
564
+ # "x-amz-request-id"=>"EE4855DE27A2688C",
565
+ # "server"=>"AmazonS3",
566
+ # "content-length"=>"10"},
567
+ # :object=>"polemonium"}
568
+ #
569
+ # s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
570
+ # => {:verified_md5=>true,
571
+ # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
572
+ # "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
573
+ # "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
574
+ # "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
575
+ # "x-amz-request-id"=>"6E7F317356580599",
576
+ # "server"=>"AmazonS3",
577
+ # "content-length"=>"10"},
578
+ # :object=>"polemonium"}
579
+ # If a block is provided, yields incrementally to the block as
580
+ # the response is read. For large responses, this function is ideal as
581
+ # the response can be 'streamed'. The hash containing header fields is
582
+ # still returned.
583
+ def retrieve_object(params, &block)
584
+ AwsUtils.mandatory_arguments([:bucket, :key], params)
585
+ AwsUtils.allow_only([:bucket, :key, :headers, :md5], params)
586
+ params[:headers] = {} unless params[:headers]
587
+ req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
588
+ resp = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
589
+ resp[:verified_md5] = false
590
+ if(params[:md5] && (resp[:headers]['etag'].gsub(/\"/,'') == params[:md5]))
591
+ resp[:verified_md5] = true
592
+ end
593
+ resp
594
+ rescue
595
+ on_exception
596
+ end
597
+
598
+ # Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
599
+ # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
600
+ # This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.
601
+ def retrieve_object_and_verify(params, &block)
602
+ AwsUtils.mandatory_arguments([:md5], params)
603
+ resp = retrieve_object(params, &block)
604
+ return resp if resp[:verified_md5]
605
+ raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
606
+ end
607
+
608
+ # Retrieves object metadata. Returns a +hash+ of http_response_headers.
609
+ #
610
+ # s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
611
+ # {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
612
+ # "content-type" => "",
613
+ # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
614
+ # "date" => "Wed, 23 May 2007 09:08:03 GMT",
615
+ # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
616
+ # "x-amz-meta-family" => "Woho556!",
617
+ # "x-amz-request-id" => "0000000C246D770C",
618
+ # "server" => "AmazonS3",
619
+ # "content-length" => "7"}
620
+ #
621
+ def head(bucket, key, headers={})
622
+ req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
623
+ request_info(req_hash, S3HttpResponseHeadParser.new)
624
+ rescue
625
+ on_exception
626
+ end
627
+
628
+ # Deletes key. Returns +true+ or an exception.
629
+ #
630
+ # s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true
631
+ #
632
+ def delete(bucket, key='', headers={})
633
+ req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
634
+ request_info(req_hash, RightHttp2xxParser.new)
635
+ rescue
636
+ on_exception
637
+ end
638
+
639
+ # Copy an object.
640
+ # directive: :copy - copy meta-headers from source (default value)
641
+ # :replace - replace meta-headers by passed ones
642
+ #
643
+ # # copy a key with meta-headers
644
+ # s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}
645
+ #
646
+ # # copy a key, overwrite meta-headers
647
+ # s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}
648
+ #
649
+ # see: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html
650
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html
651
+ #
652
+ def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
653
+ dest_key ||= src_key
654
+ headers['x-amz-metadata-directive'] = directive.to_s.upcase
655
+ headers['x-amz-copy-source'] = "#{src_bucket}/#{CGI::escape src_key}"
656
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
657
+ request_info(req_hash, S3CopyParser.new)
658
+ rescue
659
+ on_exception
660
+ end
661
+
662
+ # Move an object.
663
+ # directive: :copy - copy meta-headers from source (default value)
664
+ # :replace - replace meta-headers by passed ones
665
+ #
666
+ # # move bucket1/key1 to bucket1/key2
667
+ # s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}
668
+ #
669
+ # # move bucket1/key1 to bucket2/key2 with new meta-headers assignment
670
+ # s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}
671
+ #
672
+ def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
673
+ copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
674
+ # delete an original key if it differs from a destination one
675
+ delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
676
+ copy_result
677
+ end
678
+
679
+ # Rename an object.
680
+ #
681
+ # # rename bucket1/key1 to bucket1/key2
682
+ # s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}
683
+ #
684
+ def rename(src_bucket, src_key, dest_key, headers={})
685
+ move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
686
+ end
687
+
688
+ # Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.
689
+ #
690
+ # s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
691
+ # {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
692
+ # "content-type"=>"application/xml;charset=ISO-8859-1",
693
+ # "date"=>"Wed, 23 May 2007 09:40:16 GMT",
694
+ # "x-amz-request-id"=>"B183FA7AB5FBB4DD",
695
+ # "server"=>"AmazonS3",
696
+ # "transfer-encoding"=>"chunked"},
697
+ # :object => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
698
+ # <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
699
+ # <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
700
+ # 16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
701
+ # <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }
702
+ #
703
+ def get_acl(bucket, key='', headers={})
704
+ key = key.blank? ? '' : "/#{CGI::escape key}"
705
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
706
+ request_info(req_hash, S3HttpResponseBodyParser.new)
707
+ rescue
708
+ on_exception
709
+ end
710
+
711
+ # Retieves the ACL (access control policy) for a bucket or object.
712
+ # Returns a hash of {:owner, :grantees}
713
+ #
714
+ # s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>
715
+ #
716
+ # { :grantees=>
717
+ # { "16...2a"=>
718
+ # { :display_name=>"root",
719
+ # :permissions=>["FULL_CONTROL"],
720
+ # :attributes=>
721
+ # { "xsi:type"=>"CanonicalUser",
722
+ # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
723
+ # "http://acs.amazonaws.com/groups/global/AllUsers"=>
724
+ # { :display_name=>"AllUsers",
725
+ # :permissions=>["READ"],
726
+ # :attributes=>
727
+ # { "xsi:type"=>"Group",
728
+ # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
729
+ # :owner=>
730
+ # { :id=>"16..2a",
731
+ # :display_name=>"root"}}
732
+ #
733
+ def get_acl_parse(bucket, key='', headers={})
734
+ key = key.blank? ? '' : "/#{CGI::escape key}"
735
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
736
+ acl = request_info(req_hash, S3AclParser.new(:logger => @logger))
737
+ result = {}
738
+ result[:owner] = acl[:owner]
739
+ result[:grantees] = {}
740
+ acl[:grantees].each do |grantee|
741
+ key = grantee[:id] || grantee[:uri]
742
+ if result[:grantees].key?(key)
743
+ result[:grantees][key][:permissions] << grantee[:permissions]
744
+ else
745
+ result[:grantees][key] =
746
+ { :display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
747
+ :permissions => Array(grantee[:permissions]),
748
+ :attributes => grantee[:attributes] }
749
+ end
750
+ end
751
+ result
752
+ rescue
753
+ on_exception
754
+ end
755
+
756
+ # Sets the ACL on a bucket or object.
757
+ def put_acl(bucket, key, acl_xml_doc, headers={})
758
+ key = key.blank? ? '' : "/#{CGI::escape key}"
759
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
760
+ request_info(req_hash, S3HttpResponseBodyParser.new)
761
+ rescue
762
+ on_exception
763
+ end
764
+
765
+ # Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.
766
+ def get_bucket_acl(bucket, headers={})
767
+ return get_acl(bucket, '', headers)
768
+ rescue
769
+ on_exception
770
+ end
771
+
772
+ # Sets the ACL on a bucket only.
773
+ def put_bucket_acl(bucket, acl_xml_doc, headers={})
774
+ return put_acl(bucket, '', acl_xml_doc, headers)
775
+ rescue
776
+ on_exception
777
+ end
778
+
779
+
780
+ # Removes all keys from bucket. Returns +true+ or an exception.
781
+ #
782
+ # s3.clear_bucket('my_awesome_bucket') #=> true
783
+ #
784
+ def clear_bucket(bucket)
785
+ incrementally_list_bucket(bucket) do |results|
786
+ results[:contents].each { |key| delete(bucket, key[:key]) }
787
+ end
788
+ true
789
+ rescue
790
+ on_exception
791
+ end
792
+
793
+ # Deletes all keys in bucket then deletes bucket. Returns +true+ or an exception.
794
+ #
795
+ # s3.force_delete_bucket('my_awesome_bucket')
796
+ #
797
+ def force_delete_bucket(bucket)
798
+ clear_bucket(bucket)
799
+ delete_bucket(bucket)
800
+ rescue
801
+ on_exception
802
+ end
803
+
804
+ # Deletes all keys where the 'folder_key' may be assumed as 'folder' name. Returns an array of string keys that have been deleted.
805
+ #
806
+ # s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
807
+ # s3.delete_folder('my_awesome_bucket','test') #=> ['test','test/2/34','test/3']
808
+ #
809
+ def delete_folder(bucket, folder_key, separator='/')
810
+ folder_key.chomp!(separator)
811
+ allkeys = []
812
+ incrementally_list_bucket(bucket, { 'prefix' => folder_key }) do |results|
813
+ keys = results[:contents].map{ |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil}.compact
814
+ keys.each{ |key| delete(bucket, key) }
815
+ allkeys << keys
816
+ end
817
+ allkeys
818
+ rescue
819
+ on_exception
820
+ end
821
+
822
+ # Retrieves object data only (headers are omitted). Returns +string+ or an exception.
823
+ #
824
+ # s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'
825
+ #
826
+ def get_object(bucket, key, headers={})
827
+ get(bucket, key, headers)[:object]
828
+ rescue
829
+ on_exception
830
+ end
831
+
832
+ #-----------------------------------------------------------------
833
+ # Query API: Links
834
+ #-----------------------------------------------------------------
835
+
836
+ # Generates link for QUERY API
837
+ def generate_link(method, headers={}, expires=nil) #:nodoc:
838
+ # calculate request data
839
+ server, path, path_to_sign = fetch_request_params(headers)
840
+ # expiration time
841
+ expires ||= DEFAULT_EXPIRES_AFTER
842
+ expires = Time.now.utc + expires if expires.is_a?(Fixnum) && (expires < ONE_YEAR_IN_SECONDS)
843
+ expires = expires.to_i
844
+ # remove unset(==optional) and symbolyc keys
845
+ headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
846
+ #generate auth strings
847
+ auth_string = canonical_string(method, path_to_sign, headers, expires)
848
+ signature = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
849
+ # path building
850
+ addon = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
851
+ path += path[/\?/] ? "&#{addon}" : "?#{addon}"
852
+ "#{@params[:protocol]}://#{server}:#{@params[:port]}#{path}"
853
+ rescue
854
+ on_exception
855
+ end
856
+
857
+ # Generates link for 'ListAllMyBuckets'.
858
+ #
859
+ # s3.list_all_my_buckets_link #=> url string
860
+ #
861
+ def list_all_my_buckets_link(expires=nil, headers={})
862
+ generate_link('GET', headers.merge(:url=>''), expires)
863
+ rescue
864
+ on_exception
865
+ end
866
+
867
+ # Generates link for 'CreateBucket'.
868
+ #
869
+ # s3.create_bucket_link('my_awesome_bucket') #=> url string
870
+ #
871
+ def create_bucket_link(bucket, expires=nil, headers={})
872
+ generate_link('PUT', headers.merge(:url=>bucket), expires)
873
+ rescue
874
+ on_exception
875
+ end
876
+
877
+ # Generates link for 'DeleteBucket'.
878
+ #
879
+ # s3.delete_bucket_link('my_awesome_bucket') #=> url string
880
+ #
881
+ def delete_bucket_link(bucket, expires=nil, headers={})
882
+ generate_link('DELETE', headers.merge(:url=>bucket), expires)
883
+ rescue
884
+ on_exception
885
+ end
886
+
887
+ # Generates link for 'ListBucket'.
888
+ #
889
+ # s3.list_bucket_link('my_awesome_bucket') #=> url string
890
+ #
891
+ def list_bucket_link(bucket, options=nil, expires=nil, headers={})
892
+ bucket += '?' + options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
893
+ generate_link('GET', headers.merge(:url=>bucket), expires)
894
+ rescue
895
+ on_exception
896
+ end
897
+
898
+ # Generates link for 'PutObject'.
899
+ #
900
+ # s3.put_link('my_awesome_bucket',key, object) #=> url string
901
+ #
902
+ def put_link(bucket, key, data=nil, expires=nil, headers={})
903
+ generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data), expires)
904
+ rescue
905
+ on_exception
906
+ end
907
+
908
+ # Generates link for 'GetObject'.
909
+ #
910
+ # if a bucket comply with virtual hosting naming then retuns a link with the
911
+ # bucket as a part of host name:
912
+ #
913
+ # s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...
914
+ #
915
+ # otherwise returns an old style link (the bucket is a part of path):
916
+ #
917
+ # s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...
918
+ #
919
+ # see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html
920
+ def get_link(bucket, key, expires=nil, headers={})
921
+ generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
922
+ rescue
923
+ on_exception
924
+ end
925
+
926
+ # Generates link for 'HeadObject'.
927
+ #
928
+ # s3.head_link('my_awesome_bucket',key) #=> url string
929
+ #
930
+ def head_link(bucket, key, expires=nil, headers={})
931
+ generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
932
+ rescue
933
+ on_exception
934
+ end
935
+
936
+ # Generates link for 'DeleteObject'.
937
+ #
938
+ # s3.delete_link('my_awesome_bucket',key) #=> url string
939
+ #
940
+ def delete_link(bucket, key, expires=nil, headers={})
941
+ generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
942
+ rescue
943
+ on_exception
944
+ end
945
+
946
+
947
+ # Generates link for 'GetACL'.
948
+ #
949
+ # s3.get_acl_link('my_awesome_bucket',key) #=> url string
950
+ #
951
+ def get_acl_link(bucket, key='', headers={})
952
+ return generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
953
+ rescue
954
+ on_exception
955
+ end
956
+
957
+ # Generates link for 'PutACL'.
958
+ #
959
+ # s3.put_acl_link('my_awesome_bucket',key) #=> url string
960
+ #
961
+ def put_acl_link(bucket, key='', headers={})
962
+ return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
963
+ rescue
964
+ on_exception
965
+ end
966
+
967
+ # Generates link for 'GetBucketACL'.
968
+ #
969
+ # s3.get_acl_link('my_awesome_bucket',key) #=> url string
970
+ #
971
+ def get_bucket_acl_link(bucket, headers={})
972
+ return get_acl_link(bucket, '', headers)
973
+ rescue
974
+ on_exception
975
+ end
976
+
977
+ # Generates link for 'PutBucketACL'.
978
+ #
979
+ # s3.put_acl_link('my_awesome_bucket',key) #=> url string
980
+ #
981
+ def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
982
+ return put_acl_link(bucket, '', acl_xml_doc, headers)
983
+ rescue
984
+ on_exception
985
+ end
986
+
987
+ #-----------------------------------------------------------------
988
+ # PARSERS:
989
+ #-----------------------------------------------------------------
990
+
991
+ class S3ListAllMyBucketsParser < RightAWSParser # :nodoc:
992
+ def reset
993
+ @result = []
994
+ @owner = {}
995
+ end
996
+ def tagstart(name, attributes)
997
+ @current_bucket = {} if name == 'Bucket'
998
+ end
999
+ def tagend(name)
1000
+ case name
1001
+ when 'ID' then @owner[:owner_id] = @text
1002
+ when 'DisplayName' then @owner[:owner_display_name] = @text
1003
+ when 'Name' then @current_bucket[:name] = @text
1004
+ when 'CreationDate'then @current_bucket[:creation_date] = @text
1005
+ when 'Bucket' then @result << @current_bucket.merge(@owner)
1006
+ end
1007
+ end
1008
+ end
1009
+
1010
+ class S3ListBucketParser < RightAWSParser # :nodoc:
1011
+ def reset
1012
+ @result = []
1013
+ @service = {}
1014
+ @current_key = {}
1015
+ end
1016
+ def tagstart(name, attributes)
1017
+ @current_key = {} if name == 'Contents'
1018
+ end
1019
+ def tagend(name)
1020
+ case name
1021
+ # service info
1022
+ when 'Name' then @service['name'] = @text
1023
+ when 'Prefix' then @service['prefix'] = @text
1024
+ when 'Marker' then @service['marker'] = @text
1025
+ when 'MaxKeys' then @service['max-keys'] = @text
1026
+ when 'Delimiter' then @service['delimiter'] = @text
1027
+ when 'IsTruncated' then @service['is_truncated'] = (@text =~ /false/ ? false : true)
1028
+ # key data
1029
+ when 'Key' then @current_key[:key] = @text
1030
+ when 'LastModified'then @current_key[:last_modified] = @text
1031
+ when 'ETag' then @current_key[:e_tag] = @text
1032
+ when 'Size' then @current_key[:size] = @text.to_i
1033
+ when 'StorageClass'then @current_key[:storage_class] = @text
1034
+ when 'ID' then @current_key[:owner_id] = @text
1035
+ when 'DisplayName' then @current_key[:owner_display_name] = @text
1036
+ when 'Contents'
1037
+ @current_key[:service] = @service
1038
+ @result << @current_key
1039
+ end
1040
+ end
1041
+ end
1042
+
1043
+ class S3ImprovedListBucketParser < RightAWSParser # :nodoc:
1044
+ def reset
1045
+ @result = {}
1046
+ @result[:contents] = []
1047
+ @result[:common_prefixes] = []
1048
+ @contents = []
1049
+ @current_key = {}
1050
+ @common_prefixes = []
1051
+ @in_common_prefixes = false
1052
+ end
1053
+ def tagstart(name, attributes)
1054
+ @current_key = {} if name == 'Contents'
1055
+ @in_common_prefixes = true if name == 'CommonPrefixes'
1056
+ end
1057
+ def tagend(name)
1058
+ case name
1059
+ # service info
1060
+ when 'Name' then @result[:name] = @text
1061
+ # Amazon uses the same tag for the search prefix and for the entries
1062
+ # in common prefix...so use our simple flag to see which element
1063
+ # we are parsing
1064
+ when 'Prefix' then @in_common_prefixes ? @common_prefixes << @text : @result[:prefix] = @text
1065
+ when 'Marker' then @result[:marker] = @text
1066
+ when 'MaxKeys' then @result[:max_keys] = @text
1067
+ when 'Delimiter' then @result[:delimiter] = @text
1068
+ when 'IsTruncated' then @result[:is_truncated] = (@text =~ /false/ ? false : true)
1069
+ when 'NextMarker' then @result[:next_marker] = @text
1070
+ # key data
1071
+ when 'Key' then @current_key[:key] = @text
1072
+ when 'LastModified'then @current_key[:last_modified] = @text
1073
+ when 'ETag' then @current_key[:e_tag] = @text
1074
+ when 'Size' then @current_key[:size] = @text.to_i
1075
+ when 'StorageClass'then @current_key[:storage_class] = @text
1076
+ when 'ID' then @current_key[:owner_id] = @text
1077
+ when 'DisplayName' then @current_key[:owner_display_name] = @text
1078
+ when 'Contents' then @result[:contents] << @current_key
1079
+ # Common Prefix stuff
1080
+ when 'CommonPrefixes'
1081
+ @result[:common_prefixes] = @common_prefixes
1082
+ @in_common_prefixes = false
1083
+ end
1084
+ end
1085
+ end
1086
+
1087
+ class S3BucketLocationParser < RightAWSParser # :nodoc:
1088
+ def reset
1089
+ @result = ''
1090
+ end
1091
+ def tagend(name)
1092
+ @result = @text if name == 'LocationConstraint'
1093
+ end
1094
+ end
1095
+
1096
+ class S3AclParser < RightAWSParser # :nodoc:
1097
+ def reset
1098
+ @result = {:grantees=>[], :owner=>{}}
1099
+ @current_grantee = {}
1100
+ end
1101
+ def tagstart(name, attributes)
1102
+ @current_grantee = { :attributes => attributes } if name=='Grantee'
1103
+ end
1104
+ def tagend(name)
1105
+ case name
1106
+ # service info
1107
+ when 'ID'
1108
+ if @xmlpath == 'AccessControlPolicy/Owner'
1109
+ @result[:owner][:id] = @text
1110
+ else
1111
+ @current_grantee[:id] = @text
1112
+ end
1113
+ when 'DisplayName'
1114
+ if @xmlpath == 'AccessControlPolicy/Owner'
1115
+ @result[:owner][:display_name] = @text
1116
+ else
1117
+ @current_grantee[:display_name] = @text
1118
+ end
1119
+ when 'URI'
1120
+ @current_grantee[:uri] = @text
1121
+ when 'Permission'
1122
+ @current_grantee[:permissions] = @text
1123
+ when 'Grant'
1124
+ @result[:grantees] << @current_grantee
1125
+ end
1126
+ end
1127
+ end
1128
+
1129
+ class S3LoggingParser < RightAWSParser # :nodoc:
1130
+ def reset
1131
+ @result = {:enabled => false, :targetbucket => '', :targetprefix => ''}
1132
+ @current_grantee = {}
1133
+ end
1134
+ def tagend(name)
1135
+ case name
1136
+ # service info
1137
+ when 'TargetBucket'
1138
+ if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1139
+ @result[:targetbucket] = @text
1140
+ @result[:enabled] = true
1141
+ end
1142
+ when 'TargetPrefix'
1143
+ if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1144
+ @result[:targetprefix] = @text
1145
+ @result[:enabled] = true
1146
+ end
1147
+ end
1148
+ end
1149
+ end
1150
+
1151
+ class S3CopyParser < RightAWSParser # :nodoc:
1152
+ def reset
1153
+ @result = {}
1154
+ end
1155
+ def tagend(name)
1156
+ case name
1157
+ when 'LastModified' then @result[:last_modified] = @text
1158
+ when 'ETag' then @result[:e_tag] = @text
1159
+ end
1160
+ end
1161
+ end
1162
+
1163
+ #-----------------------------------------------------------------
1164
+ # PARSERS: Non XML
1165
+ #-----------------------------------------------------------------
1166
+
1167
+ class S3HttpResponseParser # :nodoc:
1168
+ attr_reader :result
1169
+ def parse(response)
1170
+ @result = response
1171
+ end
1172
+ def headers_to_string(headers)
1173
+ result = {}
1174
+ headers.each do |key, value|
1175
+ value = value.to_s if value.is_a?(Array) && value.size<2
1176
+ result[key] = value
1177
+ end
1178
+ result
1179
+ end
1180
+ end
1181
+
1182
+ class S3HttpResponseBodyParser < S3HttpResponseParser # :nodoc:
1183
+ def parse(response)
1184
+ @result = {
1185
+ :object => response.body,
1186
+ :headers => headers_to_string(response.to_hash)
1187
+ }
1188
+ end
1189
+ end
1190
+
1191
+ class S3HttpResponseHeadParser < S3HttpResponseParser # :nodoc:
1192
+ def parse(response)
1193
+ @result = headers_to_string(response.to_hash)
1194
+ end
1195
+ end
1196
+
1197
+ end
1198
+
1199
+ end