ijin-right_aws 1.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. data/History.txt +239 -0
  2. data/Manifest.txt +46 -0
  3. data/README.txt +167 -0
  4. data/Rakefile +125 -0
  5. data/VERSION +1 -0
  6. data/lib/acf/right_acf_interface.rb +413 -0
  7. data/lib/acw/right_acw_interface.rb +249 -0
  8. data/lib/as/right_as_interface.rb +690 -0
  9. data/lib/awsbase/benchmark_fix.rb +39 -0
  10. data/lib/awsbase/right_awsbase.rb +931 -0
  11. data/lib/awsbase/support.rb +115 -0
  12. data/lib/ec2/right_ec2.rb +617 -0
  13. data/lib/ec2/right_ec2_ebs.rb +451 -0
  14. data/lib/ec2/right_ec2_images.rb +373 -0
  15. data/lib/ec2/right_ec2_instances.rb +760 -0
  16. data/lib/ec2/right_ec2_monitoring.rb +70 -0
  17. data/lib/ec2/right_ec2_reserved_instances.rb +167 -0
  18. data/lib/ec2/right_ec2_vpc.rb +571 -0
  19. data/lib/elb/right_elb_interface.rb +407 -0
  20. data/lib/rds/right_rds_interface.rb +998 -0
  21. data/lib/right_aws.rb +79 -0
  22. data/lib/s3/right_s3.rb +1102 -0
  23. data/lib/s3/right_s3_interface.rb +1195 -0
  24. data/lib/sdb/active_sdb.rb +930 -0
  25. data/lib/sdb/right_sdb_interface.rb +672 -0
  26. data/lib/sqs/right_sqs.rb +388 -0
  27. data/lib/sqs/right_sqs_gen2.rb +343 -0
  28. data/lib/sqs/right_sqs_gen2_interface.rb +523 -0
  29. data/lib/sqs/right_sqs_interface.rb +594 -0
  30. data/test/acf/test_helper.rb +2 -0
  31. data/test/acf/test_right_acf.rb +146 -0
  32. data/test/awsbase/test_helper.rb +2 -0
  33. data/test/awsbase/test_right_awsbase.rb +12 -0
  34. data/test/ec2/test_helper.rb +2 -0
  35. data/test/ec2/test_right_ec2.rb +108 -0
  36. data/test/http_connection.rb +87 -0
  37. data/test/rds/test_helper.rb +2 -0
  38. data/test/rds/test_right_rds.rb +120 -0
  39. data/test/s3/test_helper.rb +2 -0
  40. data/test/s3/test_right_s3.rb +419 -0
  41. data/test/s3/test_right_s3_stubbed.rb +95 -0
  42. data/test/sdb/test_active_sdb.rb +299 -0
  43. data/test/sdb/test_helper.rb +3 -0
  44. data/test/sdb/test_right_sdb.rb +247 -0
  45. data/test/sqs/test_helper.rb +2 -0
  46. data/test/sqs/test_right_sqs.rb +291 -0
  47. data/test/sqs/test_right_sqs_gen2.rb +276 -0
  48. data/test/test_credentials.rb +37 -0
  49. data/test/ts_right_aws.rb +14 -0
  50. metadata +122 -0
@@ -0,0 +1,1195 @@
1
+ #
2
+ # Copyright (c) 2007-2008 RightScale Inc
3
+ #
4
+ # Permission is hereby granted, free of charge, to any person obtaining
5
+ # a copy of this software and associated documentation files (the
6
+ # "Software"), to deal in the Software without restriction, including
7
+ # without limitation the rights to use, copy, modify, merge, publish,
8
+ # distribute, sublicense, and/or sell copies of the Software, and to
9
+ # permit persons to whom the Software is furnished to do so, subject to
10
+ # the following conditions:
11
+ #
12
+ # The above copyright notice and this permission notice shall be
13
+ # included in all copies or substantial portions of the Software.
14
+ #
15
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
19
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
20
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
21
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22
+ #
23
+
24
+ module RightAws
25
+
26
+ class S3Interface < RightAwsBase
27
+
28
+ USE_100_CONTINUE_PUT_SIZE = 1_000_000
29
+
30
+ include RightAwsBaseInterface
31
+
32
+ DEFAULT_HOST = 's3.amazonaws.com'
33
+ DEFAULT_PORT = 443
34
+ DEFAULT_PROTOCOL = 'https'
35
+ DEFAULT_SERVICE = '/'
36
+ REQUEST_TTL = 30
37
+ DEFAULT_EXPIRES_AFTER = 1 * 24 * 60 * 60 # One day's worth of seconds
38
+ ONE_YEAR_IN_SECONDS = 365 * 24 * 60 * 60
39
+ AMAZON_HEADER_PREFIX = 'x-amz-'
40
+ AMAZON_METADATA_PREFIX = 'x-amz-meta-'
41
+
42
+ @@bench = AwsBenchmarkingBlock.new
43
+ def self.bench_xml
44
+ @@bench.xml
45
+ end
46
+ def self.bench_s3
47
+ @@bench.service
48
+ end
49
+
50
+ # Params supported:
51
+ # :no_subdomains => true # do not use bucket as a part of domain name but as a part of path
52
+ @@params = {}
53
+ def self.params
54
+ @@params
55
+ end
56
+
57
+ # get custom option
58
+ def param(name)
59
+ # - check explicitly defined param (@params)
60
+ # - otherwise check implicitly defined one (@@params)
61
+ @params.has_key?(name) ? @params[name] : @@params[name]
62
+ end
63
+
64
+ # Creates new RightS3 instance.
65
+ #
66
+ # s3 = RightAws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<RightAws::S3Interface:0xb7b3c27c>
67
+ #
68
+ # Params is a hash:
69
+ #
70
+ # {:server => 's3.amazonaws.com' # Amazon service host: 's3.amazonaws.com'(default)
71
+ # :port => 443 # Amazon service port: 80 or 443(default)
72
+ # :protocol => 'https' # Amazon service protocol: 'http' or 'https'(default)
73
+ # :multi_thread => true|false # Multi-threaded (connection per each thread): true or false(default)
74
+ # :logger => Logger Object} # Logger instance: logs to STDOUT if omitted }
75
+ #
76
+ def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
77
+ init({ :name => 'S3',
78
+ :default_host => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host : DEFAULT_HOST,
79
+ :default_port => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port : DEFAULT_PORT,
80
+ :default_service => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path : DEFAULT_SERVICE,
81
+ :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL },
82
+ aws_access_key_id || ENV['AWS_ACCESS_KEY_ID'],
83
+ aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'],
84
+ params)
85
+ end
86
+
87
+
88
+ #-----------------------------------------------------------------
89
+ # Requests
90
+ #-----------------------------------------------------------------
91
+ # Produces canonical string for signing.
92
+ def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
93
+ s3_headers = {}
94
+ headers.each do |key, value|
95
+ key = key.downcase
96
+ s3_headers[key] = value.to_s.strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
97
+ end
98
+ s3_headers['content-type'] ||= ''
99
+ s3_headers['content-md5'] ||= ''
100
+ s3_headers['date'] = '' if s3_headers.has_key? 'x-amz-date'
101
+ s3_headers['date'] = expires if expires
102
+ # prepare output string
103
+ out_string = "#{method}\n"
104
+ s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
105
+ out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
106
+ end
107
+ # ignore everything after the question mark...
108
+ out_string << path.gsub(/\?.*$/, '')
109
+ # ...unless there is an acl or torrent parameter
110
+ out_string << '?acl' if path[/[&?]acl($|&|=)/]
111
+ out_string << '?torrent' if path[/[&?]torrent($|&|=)/]
112
+ out_string << '?location' if path[/[&?]location($|&|=)/]
113
+ out_string << '?logging' if path[/[&?]logging($|&|=)/] # this one is beta, no support for now
114
+ out_string
115
+ end
116
+
117
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?BucketRestrictions.html
118
+ def is_dns_bucket?(bucket_name)
119
+ bucket_name = bucket_name.to_s
120
+ return nil unless (3..63) === bucket_name.size
121
+ bucket_name.split('.').each do |component|
122
+ return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
123
+ end
124
+ true
125
+ end
126
+
127
+ def fetch_request_params(headers) #:nodoc:
128
+ # default server to use
129
+ server = @params[:server]
130
+ service = @params[:service].to_s
131
+ service.chop! if service[%r{/$}] # remove trailing '/' from service
132
+ # extract bucket name and check it's dns compartibility
133
+ headers[:url].to_s[%r{^([a-z0-9._-]*)(/[^?]*)?(\?.+)?}i]
134
+ bucket_name, key_path, params_list = $1, $2, $3
135
+ # select request model
136
+ if !param(:no_subdomains) && is_dns_bucket?(bucket_name)
137
+ # fix a path
138
+ server = "#{bucket_name}.#{server}"
139
+ key_path ||= '/'
140
+ path = "#{service}#{key_path}#{params_list}"
141
+ else
142
+ path = "#{service}/#{bucket_name}#{key_path}#{params_list}"
143
+ end
144
+ path_to_sign = "#{service}/#{bucket_name}#{key_path}#{params_list}"
145
+ # path_to_sign = "/#{bucket_name}#{key_path}#{params_list}"
146
+ [ server, path, path_to_sign ]
147
+ end
148
+
149
+ # Generates request hash for REST API.
150
+ # Assumes that headers[:url] is URL encoded (use CGI::escape)
151
+ def generate_rest_request(method, headers) # :nodoc:
152
+ # calculate request data
153
+ server, path, path_to_sign = fetch_request_params(headers)
154
+ data = headers[:data]
155
+ # remove unset(==optional) and symbolyc keys
156
+ headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
157
+ #
158
+ headers['content-type'] ||= ''
159
+ headers['date'] = Time.now.httpdate
160
+ # create request
161
+ request = "Net::HTTP::#{method.capitalize}".constantize.new(path)
162
+ request.body = data if data
163
+ # set request headers and meta headers
164
+ headers.each { |key, value| request[key.to_s] = value }
165
+ #generate auth strings
166
+ auth_string = canonical_string(request.method, path_to_sign, request.to_hash)
167
+ signature = AwsUtils::sign(@aws_secret_access_key, auth_string)
168
+ # set other headers
169
+ request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
170
+ # prepare output hash
171
+ { :request => request,
172
+ :server => server,
173
+ :port => @params[:port],
174
+ :protocol => @params[:protocol] }
175
+ end
176
+
177
+ # Sends request to Amazon and parses the response.
178
+ # Raises AwsError if any banana happened.
179
+ def request_info(request, parser, &block) # :nodoc:
180
+ request_info_impl(:s3_connection, @@bench, request, parser, &block)
181
+ end
182
+
183
+ # Returns an array of customer's buckets. Each item is a +hash+.
184
+ #
185
+ # s3.list_all_my_buckets #=>
186
+ # [{:owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
187
+ # :owner_display_name => "root",
188
+ # :name => "bucket_name",
189
+ # :creation_date => "2007-04-19T18:47:43.000Z"}, ..., {...}]
190
+ #
191
+ def list_all_my_buckets(headers={})
192
+ req_hash = generate_rest_request('GET', headers.merge(:url=>''))
193
+ request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
194
+ rescue
195
+ on_exception
196
+ end
197
+
198
+ # Creates new bucket. Returns +true+ or an exception.
199
+ #
200
+ # # create a bucket at American server
201
+ # s3.create_bucket('my-awesome-bucket-us') #=> true
202
+ # # create a bucket at European server
203
+ # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
204
+ #
205
+ def create_bucket(bucket, headers={})
206
+ data = nil
207
+ location = case headers[:location].to_s
208
+ when 'us','US' then ''
209
+ when 'eu' then 'EU'
210
+ else headers[:location].to_s
211
+ end
212
+
213
+ unless location.blank?
214
+ data = "<CreateBucketConfiguration><LocationConstraint>#{location}</LocationConstraint></CreateBucketConfiguration>"
215
+ end
216
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
217
+ request_info(req_hash, RightHttp2xxParser.new)
218
+ rescue Exception => e
219
+ # if the bucket exists AWS returns an error for the location constraint interface. Drop it
220
+ e.is_a?(RightAws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true : on_exception
221
+ end
222
+
223
+ # Retrieve bucket location
224
+ #
225
+ # s3.create_bucket('my-awesome-bucket-us') #=> true
226
+ # puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)
227
+ #
228
+ # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
229
+ # puts s3.bucket_location('my-awesome-bucket-eu') #=> 'EU'
230
+ #
231
+ def bucket_location(bucket, headers={})
232
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
233
+ request_info(req_hash, S3BucketLocationParser.new)
234
+ rescue
235
+ on_exception
236
+ end
237
+
238
+ # Retrieves the logging configuration for a bucket.
239
+ # Returns a hash of {:enabled, :targetbucket, :targetprefix}
240
+ #
241
+ # s3.interface.get_logging_parse(:bucket => "asset_bucket")
242
+ # => {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}
243
+ #
244
+ #
245
+ def get_logging_parse(params)
246
+ AwsUtils.mandatory_arguments([:bucket], params)
247
+ AwsUtils.allow_only([:bucket, :headers], params)
248
+ params[:headers] = {} unless params[:headers]
249
+ req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
250
+ request_info(req_hash, S3LoggingParser.new)
251
+ rescue
252
+ on_exception
253
+ end
254
+
255
+ # Sets logging configuration for a bucket from the XML configuration document.
256
+ # params:
257
+ # :bucket
258
+ # :xmldoc
259
+ def put_logging(params)
260
+ AwsUtils.mandatory_arguments([:bucket,:xmldoc], params)
261
+ AwsUtils.allow_only([:bucket,:xmldoc, :headers], params)
262
+ params[:headers] = {} unless params[:headers]
263
+ req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
264
+ request_info(req_hash, RightHttp2xxParser.new)
265
+ rescue
266
+ on_exception
267
+ end
268
+
269
+ # Deletes new bucket. Bucket must be empty! Returns +true+ or an exception.
270
+ #
271
+ # s3.delete_bucket('my_awesome_bucket') #=> true
272
+ #
273
+ # See also: force_delete_bucket method
274
+ #
275
+ def delete_bucket(bucket, headers={})
276
+ req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
277
+ request_info(req_hash, RightHttp2xxParser.new)
278
+ rescue
279
+ on_exception
280
+ end
281
+
282
+ # Returns an array of bucket's keys. Each array item (key data) is a +hash+.
283
+ #
284
+ # s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
285
+ # [{:key => "test1",
286
+ # :last_modified => "2007-05-18T07:00:59.000Z",
287
+ # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
288
+ # :owner_display_name => "root",
289
+ # :e_tag => "000000000059075b964b07152d234b70",
290
+ # :storage_class => "STANDARD",
291
+ # :size => 3,
292
+ # :service=> {'is_truncated' => false,
293
+ # 'prefix' => "t",
294
+ # 'marker' => "",
295
+ # 'name' => "my_awesome_bucket",
296
+ # 'max-keys' => "5"}, ..., {...}]
297
+ #
298
+ def list_bucket(bucket, options={}, headers={})
299
+ bucket += '?'+options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
300
+ req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
301
+ request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
302
+ rescue
303
+ on_exception
304
+ end
305
+
306
+ # Incrementally list the contents of a bucket. Yields the following hash to a block:
307
+ # s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
308
+ # {
309
+ # :name => 'bucketname',
310
+ # :prefix => 'subfolder/',
311
+ # :marker => 'fileN.jpg',
312
+ # :max_keys => 234,
313
+ # :delimiter => '/',
314
+ # :is_truncated => true,
315
+ # :next_marker => 'fileX.jpg',
316
+ # :contents => [
317
+ # { :key => "file1",
318
+ # :last_modified => "2007-05-18T07:00:59.000Z",
319
+ # :e_tag => "000000000059075b964b07152d234b70",
320
+ # :size => 3,
321
+ # :storage_class => "STANDARD",
322
+ # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
323
+ # :owner_display_name => "root"
324
+ # }, { :key, ...}, ... {:key, ...}
325
+ # ]
326
+ # :common_prefixes => [
327
+ # "prefix1",
328
+ # "prefix2",
329
+ # ...,
330
+ # "prefixN"
331
+ # ]
332
+ # }
333
+ def incrementally_list_bucket(bucket, options={}, headers={}, &block)
334
+ internal_options = options.symbolize_keys
335
+ begin
336
+ internal_bucket = bucket.dup
337
+ internal_bucket += '?'+internal_options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless internal_options.blank?
338
+ req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
339
+ response = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
340
+ there_are_more_keys = response[:is_truncated]
341
+ if(there_are_more_keys)
342
+ internal_options[:marker] = decide_marker(response)
343
+ total_results = response[:contents].length + response[:common_prefixes].length
344
+ internal_options[:'max-keys'] ? (internal_options[:'max-keys'] -= total_results) : nil
345
+ end
346
+ yield response
347
+ end while there_are_more_keys && under_max_keys(internal_options)
348
+ true
349
+ rescue
350
+ on_exception
351
+ end
352
+
353
+
354
+ private
355
+ def decide_marker(response)
356
+ return response[:next_marker].dup if response[:next_marker]
357
+ last_key = response[:contents].last[:key]
358
+ last_prefix = response[:common_prefixes].last
359
+ if(!last_key)
360
+ return nil if(!last_prefix)
361
+ last_prefix.dup
362
+ elsif(!last_prefix)
363
+ last_key.dup
364
+ else
365
+ last_key > last_prefix ? last_key.dup : last_prefix.dup
366
+ end
367
+ end
368
+
369
+ def under_max_keys(internal_options)
370
+ internal_options[:'max-keys'] ? internal_options[:'max-keys'] > 0 : true
371
+ end
372
+
373
+ public
374
+ # Saves object to Amazon. Returns +true+ or an exception.
375
+ # Any header starting with AMAZON_METADATA_PREFIX is considered
376
+ # user metadata. It will be stored with the object and returned
377
+ # when you retrieve the object. The total size of the HTTP
378
+ # request, not including the body, must be less than 4 KB.
379
+ #
380
+ # s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true
381
+ #
382
+ # This method is capable of 'streaming' uploads; that is, it can upload
383
+ # data from a file or other IO object without first reading all the data
384
+ # into memory. This is most useful for large PUTs - it is difficult to read
385
+ # a 2 GB file entirely into memory before sending it to S3.
386
+ # To stream an upload, pass an object that responds to 'read' (like the read
387
+ # method of IO) and to either 'lstat' or 'size'. For files, this means
388
+ # streaming is enabled by simply making the call:
389
+ #
390
+ # s3.put(bucket_name, 'S3keyname.forthisfile', File.open('localfilename.dat'))
391
+ #
392
+ # If the IO object you wish to stream from responds to the read method but
393
+ # doesn't implement lstat or size, you can extend the object dynamically
394
+ # to implement these methods, or define your own class which defines these
395
+ # methods. Be sure that your class returns 'nil' from read() after having
396
+ # read 'size' bytes. Otherwise S3 will drop the socket after
397
+ # 'Content-Length' bytes have been uploaded, and HttpConnection will
398
+ # interpret this as an error.
399
+ #
400
+ # This method now supports very large PUTs, where very large
401
+ # is > 2 GB.
402
+ #
403
+ # For Win32 users: Files and IO objects should be opened in binary mode. If
404
+ # a text mode IO object is passed to PUT, it will be converted to binary
405
+ # mode.
406
+ #
407
+
408
+ def put(bucket, key, data=nil, headers={})
409
+ # On Windows, if someone opens a file in text mode, we must reset it so
410
+ # to binary mode for streaming to work properly
411
+ if(data.respond_to?(:binmode))
412
+ data.binmode
413
+ end
414
+ if (data.respond_to?(:lstat) && data.lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
415
+ (data.respond_to?(:size) && data.size >= USE_100_CONTINUE_PUT_SIZE)
416
+ headers['expect'] = '100-continue'
417
+ end
418
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data))
419
+ request_info(req_hash, RightHttp2xxParser.new)
420
+ rescue
421
+ on_exception
422
+ end
423
+
424
+
425
+
426
+ # New experimental API for uploading objects, introduced in RightAws 1.8.1.
427
+ # store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification
428
+ # of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
429
+ # The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).
430
+ #
431
+ # If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
432
+ # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
433
+ #
434
+ # The optional argument of :headers allows the caller to specify arbitrary request header values.
435
+ #
436
+ # s3.store_object(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
437
+ # => {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
438
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
439
+ # "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
440
+ # :verified_md5=>true,
441
+ # "x-amz-request-id"=>"63916465939995BA",
442
+ # "server"=>"AmazonS3",
443
+ # "content-length"=>"0"}
444
+ #
445
+ # s3.store_object(:bucket => "foobucket", :key => "foo", :data => "polemonium" )
446
+ # => {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
447
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
448
+ # "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
449
+ # :verified_md5=>false,
450
+ # "x-amz-request-id"=>"3B25A996BC2CDD3B",
451
+ # "server"=>"AmazonS3",
452
+ # "content-length"=>"0"}
453
+
454
+ def store_object(params)
455
+ AwsUtils.allow_only([:bucket, :key, :data, :headers, :md5], params)
456
+ AwsUtils.mandatory_arguments([:bucket, :key, :data], params)
457
+ params[:headers] = {} unless params[:headers]
458
+
459
+ params[:data].binmode if(params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
460
+ if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
461
+ (params[:data].respond_to?(:size) && params[:data].size >= USE_100_CONTINUE_PUT_SIZE)
462
+ params[:headers]['expect'] = '100-continue'
463
+ end
464
+
465
+ req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
466
+ resp = request_info(req_hash, S3HttpResponseHeadParser.new)
467
+ if(params[:md5])
468
+ resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
469
+ else
470
+ resp[:verified_md5] = false
471
+ end
472
+ resp
473
+ rescue
474
+ on_exception
475
+ end
476
+
477
+ # Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
478
+ # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
479
+ # This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.
480
+ #
481
+ # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
482
+ # => {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
483
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
484
+ # "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
485
+ # :verified_md5=>true,
486
+ # "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
487
+ # "server"=>"AmazonS3",
488
+ # "content-length"=>"0"}
489
+ #
490
+ # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2953", :data => "polemonium" )
491
+ # RightAws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
492
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
493
+ # "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
494
+ # :verified_md5=>false,
495
+ # "x-amz-request-id"=>"0D7ADE09F42606F2",
496
+ # "server"=>"AmazonS3",
497
+ # "content-length"=>"0"}
498
+ def store_object_and_verify(params)
499
+ AwsUtils.mandatory_arguments([:md5], params)
500
+ r = store_object(params)
501
+ r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
502
+ end
503
+
504
+ # Retrieves object data from Amazon. Returns a +hash+ or an exception.
505
+ #
506
+ # s3.get('my_awesome_bucket', 'log/curent/1.log') #=>
507
+ #
508
+ # {:object => "Ola-la!",
509
+ # :headers => {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
510
+ # "content-type" => "",
511
+ # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
512
+ # "date" => "Wed, 23 May 2007 09:08:03 GMT",
513
+ # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
514
+ # "x-amz-meta-family" => "Woho556!",
515
+ # "x-amz-request-id" => "0000000C246D770C",
516
+ # "server" => "AmazonS3",
517
+ # "content-length" => "7"}}
518
+ #
519
+ # If a block is provided, yields incrementally to the block as
520
+ # the response is read. For large responses, this function is ideal as
521
+ # the response can be 'streamed'. The hash containing header fields is
522
+ # still returned.
523
+ # Example:
524
+ # foo = File.new('./chunder.txt', File::CREAT|File::RDWR)
525
+ # rhdr = s3.get('aws-test', 'Cent5V1_7_1.img.part.00') do |chunk|
526
+ # foo.write(chunk)
527
+ # end
528
+ # foo.close
529
+ #
530
+
531
+ def get(bucket, key, headers={}, &block)
532
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
533
+ request_info(req_hash, S3HttpResponseBodyParser.new, &block)
534
+ rescue
535
+ on_exception
536
+ end
537
+
538
+ # New experimental API for retrieving objects, introduced in RightAws 1.8.1.
539
+ # retrieve_object is similar in function to the older function get. It allows for optional verification
540
+ # of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
541
+ #
542
+ # If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
543
+ # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
544
+ #
545
+ # The optional argument of :headers allows the caller to specify arbitrary request header values.
546
+ # Mandatory arguments:
547
+ # :bucket - the bucket in which the object is stored
548
+ # :key - the object address (or path) within the bucket
549
+ # Optional arguments:
550
+ # :headers - hash of additional HTTP headers to include with the request
551
+ # :md5 - MD5 checksum against which to verify the retrieved object
552
+ #
553
+ # s3.retrieve_object(:bucket => "foobucket", :key => "foo")
554
+ # => {:verified_md5=>false,
555
+ # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
556
+ # "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
557
+ # "content-type"=>"",
558
+ # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
559
+ # "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
560
+ # "x-amz-request-id"=>"EE4855DE27A2688C",
561
+ # "server"=>"AmazonS3",
562
+ # "content-length"=>"10"},
563
+ # :object=>"polemonium"}
564
+ #
565
+ # s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
566
+ # => {:verified_md5=>true,
567
+ # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
568
+ # "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
569
+ # "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
570
+ # "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
571
+ # "x-amz-request-id"=>"6E7F317356580599",
572
+ # "server"=>"AmazonS3",
573
+ # "content-length"=>"10"},
574
+ # :object=>"polemonium"}
575
+ # If a block is provided, yields incrementally to the block as
576
+ # the response is read. For large responses, this function is ideal as
577
+ # the response can be 'streamed'. The hash containing header fields is
578
+ # still returned.
579
+ def retrieve_object(params, &block)
580
+ AwsUtils.mandatory_arguments([:bucket, :key], params)
581
+ AwsUtils.allow_only([:bucket, :key, :headers, :md5], params)
582
+ params[:headers] = {} unless params[:headers]
583
+ req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
584
+ resp = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
585
+ resp[:verified_md5] = false
586
+ if(params[:md5] && (resp[:headers]['etag'].gsub(/\"/,'') == params[:md5]))
587
+ resp[:verified_md5] = true
588
+ end
589
+ resp
590
+ rescue
591
+ on_exception
592
+ end
593
+
594
+ # Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
595
+ # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
596
+ # This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.
597
+ def retrieve_object_and_verify(params, &block)
598
+ AwsUtils.mandatory_arguments([:md5], params)
599
+ resp = retrieve_object(params, &block)
600
+ return resp if resp[:verified_md5]
601
+ raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
602
+ end
603
+
604
+ # Retrieves object metadata. Returns a +hash+ of http_response_headers.
605
+ #
606
+ # s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
607
+ # {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
608
+ # "content-type" => "",
609
+ # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
610
+ # "date" => "Wed, 23 May 2007 09:08:03 GMT",
611
+ # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
612
+ # "x-amz-meta-family" => "Woho556!",
613
+ # "x-amz-request-id" => "0000000C246D770C",
614
+ # "server" => "AmazonS3",
615
+ # "content-length" => "7"}
616
+ #
617
+ def head(bucket, key, headers={})
618
+ req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
619
+ request_info(req_hash, S3HttpResponseHeadParser.new)
620
+ rescue
621
+ on_exception
622
+ end
623
+
624
+ # Deletes key. Returns +true+ or an exception.
625
+ #
626
+ # s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true
627
+ #
628
+ def delete(bucket, key='', headers={})
629
+ req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
630
+ request_info(req_hash, RightHttp2xxParser.new)
631
+ rescue
632
+ on_exception
633
+ end
634
+
635
+ # Copy an object.
636
+ # directive: :copy - copy meta-headers from source (default value)
637
+ # :replace - replace meta-headers by passed ones
638
+ #
639
+ # # copy a key with meta-headers
640
+ # s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}
641
+ #
642
+ # # copy a key, overwrite meta-headers
643
+ # s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}
644
+ #
645
+ # see: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html
646
+ # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html
647
+ #
648
+ def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
649
+ dest_key ||= src_key
650
+ headers['x-amz-metadata-directive'] = directive.to_s.upcase
651
+ headers['x-amz-copy-source'] = "#{src_bucket}/#{CGI::escape src_key}"
652
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
653
+ request_info(req_hash, S3CopyParser.new)
654
+ rescue
655
+ on_exception
656
+ end
657
+
658
+ # Move an object.
659
+ # directive: :copy - copy meta-headers from source (default value)
660
+ # :replace - replace meta-headers by passed ones
661
+ #
662
+ # # move bucket1/key1 to bucket1/key2
663
+ # s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}
664
+ #
665
+ # # move bucket1/key1 to bucket2/key2 with new meta-headers assignment
666
+ # s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}
667
+ #
668
+ def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
669
+ copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
670
+ # delete an original key if it differs from a destination one
671
+ delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
672
+ copy_result
673
+ end
674
+
675
+ # Rename an object.
676
+ #
677
+ # # rename bucket1/key1 to bucket1/key2
678
+ # s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}
679
+ #
680
+ def rename(src_bucket, src_key, dest_key, headers={})
681
+ move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
682
+ end
683
+
684
+ # Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.
685
+ #
686
+ # s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
687
+ # {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
688
+ # "content-type"=>"application/xml;charset=ISO-8859-1",
689
+ # "date"=>"Wed, 23 May 2007 09:40:16 GMT",
690
+ # "x-amz-request-id"=>"B183FA7AB5FBB4DD",
691
+ # "server"=>"AmazonS3",
692
+ # "transfer-encoding"=>"chunked"},
693
+ # :object => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
694
+ # <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
695
+ # <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
696
+ # 16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
697
+ # <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }
698
+ #
699
+ def get_acl(bucket, key='', headers={})
700
+ key = key.blank? ? '' : "/#{CGI::escape key}"
701
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
702
+ request_info(req_hash, S3HttpResponseBodyParser.new)
703
+ rescue
704
+ on_exception
705
+ end
706
+
707
+ # Retieves the ACL (access control policy) for a bucket or object.
708
+ # Returns a hash of {:owner, :grantees}
709
+ #
710
+ # s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>
711
+ #
712
+ # { :grantees=>
713
+ # { "16...2a"=>
714
+ # { :display_name=>"root",
715
+ # :permissions=>["FULL_CONTROL"],
716
+ # :attributes=>
717
+ # { "xsi:type"=>"CanonicalUser",
718
+ # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
719
+ # "http://acs.amazonaws.com/groups/global/AllUsers"=>
720
+ # { :display_name=>"AllUsers",
721
+ # :permissions=>["READ"],
722
+ # :attributes=>
723
+ # { "xsi:type"=>"Group",
724
+ # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
725
+ # :owner=>
726
+ # { :id=>"16..2a",
727
+ # :display_name=>"root"}}
728
+ #
729
+ def get_acl_parse(bucket, key='', headers={})
730
+ key = key.blank? ? '' : "/#{CGI::escape key}"
731
+ req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
732
+ acl = request_info(req_hash, S3AclParser.new(:logger => @logger))
733
+ result = {}
734
+ result[:owner] = acl[:owner]
735
+ result[:grantees] = {}
736
+ acl[:grantees].each do |grantee|
737
+ key = grantee[:id] || grantee[:uri]
738
+ if result[:grantees].key?(key)
739
+ result[:grantees][key][:permissions] << grantee[:permissions]
740
+ else
741
+ result[:grantees][key] =
742
+ { :display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
743
+ :permissions => grantee[:permissions].to_a,
744
+ :attributes => grantee[:attributes] }
745
+ end
746
+ end
747
+ result
748
+ rescue
749
+ on_exception
750
+ end
751
+
752
+ # Sets the ACL on a bucket or object.
753
+ def put_acl(bucket, key, acl_xml_doc, headers={})
754
+ key = key.blank? ? '' : "/#{CGI::escape key}"
755
+ req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
756
+ request_info(req_hash, S3HttpResponseBodyParser.new)
757
+ rescue
758
+ on_exception
759
+ end
760
+
761
+ # Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.
762
+ def get_bucket_acl(bucket, headers={})
763
+ return get_acl(bucket, '', headers)
764
+ rescue
765
+ on_exception
766
+ end
767
+
768
+ # Sets the ACL on a bucket only.
769
+ def put_bucket_acl(bucket, acl_xml_doc, headers={})
770
+ return put_acl(bucket, '', acl_xml_doc, headers)
771
+ rescue
772
+ on_exception
773
+ end
774
+
775
+
776
+ # Removes all keys from bucket. Returns +true+ or an exception.
777
+ #
778
+ # s3.clear_bucket('my_awesome_bucket') #=> true
779
+ #
780
+ def clear_bucket(bucket)
781
+ incrementally_list_bucket(bucket) do |results|
782
+ results[:contents].each { |key| delete(bucket, key[:key]) }
783
+ end
784
+ true
785
+ rescue
786
+ on_exception
787
+ end
788
+
789
+ # Deletes all keys in bucket then deletes bucket. Returns +true+ or an exception.
790
+ #
791
+ # s3.force_delete_bucket('my_awesome_bucket')
792
+ #
793
+ def force_delete_bucket(bucket)
794
+ clear_bucket(bucket)
795
+ delete_bucket(bucket)
796
+ rescue
797
+ on_exception
798
+ end
799
+
800
+ # Deletes all keys where the 'folder_key' may be assumed as 'folder' name. Returns an array of string keys that have been deleted.
801
+ #
802
+ # s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
803
+ # s3.delete_folder('my_awesome_bucket','test') #=> ['test','test/2/34','test/3']
804
+ #
805
+ def delete_folder(bucket, folder_key, separator='/')
806
+ folder_key.chomp!(separator)
807
+ allkeys = []
808
+ incrementally_list_bucket(bucket, { 'prefix' => folder_key }) do |results|
809
+ keys = results[:contents].map{ |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil}.compact
810
+ keys.each{ |key| delete(bucket, key) }
811
+ allkeys << keys
812
+ end
813
+ allkeys
814
+ rescue
815
+ on_exception
816
+ end
817
+
818
+ # Retrieves object data only (headers are omitted). Returns +string+ or an exception.
819
+ #
820
+ # s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'
821
+ #
822
+ def get_object(bucket, key, headers={})
823
+ get(bucket, key, headers)[:object]
824
+ rescue
825
+ on_exception
826
+ end
827
+
828
+ #-----------------------------------------------------------------
829
+ # Query API: Links
830
+ #-----------------------------------------------------------------
831
+
832
+ # Generates link for QUERY API
833
+ def generate_link(method, headers={}, expires=nil) #:nodoc:
834
+ # calculate request data
835
+ server, path, path_to_sign = fetch_request_params(headers)
836
+ # expiration time
837
+ expires ||= DEFAULT_EXPIRES_AFTER
838
+ expires = Time.now.utc + expires if expires.is_a?(Fixnum) && (expires < ONE_YEAR_IN_SECONDS)
839
+ expires = expires.to_i
840
+ # remove unset(==optional) and symbolyc keys
841
+ headers.each{ |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
842
+ #generate auth strings
843
+ auth_string = canonical_string(method, path_to_sign, headers, expires)
844
+ signature = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
845
+ # path building
846
+ addon = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
847
+ path += path[/\?/] ? "&#{addon}" : "?#{addon}"
848
+ "#{@params[:protocol]}://#{server}:#{@params[:port]}#{path}"
849
+ rescue
850
+ on_exception
851
+ end
852
+
853
+ # Generates link for 'ListAllMyBuckets'.
854
+ #
855
+ # s3.list_all_my_buckets_link #=> url string
856
+ #
857
+ def list_all_my_buckets_link(expires=nil, headers={})
858
+ generate_link('GET', headers.merge(:url=>''), expires)
859
+ rescue
860
+ on_exception
861
+ end
862
+
863
+ # Generates link for 'CreateBucket'.
864
+ #
865
+ # s3.create_bucket_link('my_awesome_bucket') #=> url string
866
+ #
867
+ def create_bucket_link(bucket, expires=nil, headers={})
868
+ generate_link('PUT', headers.merge(:url=>bucket), expires)
869
+ rescue
870
+ on_exception
871
+ end
872
+
873
+ # Generates link for 'DeleteBucket'.
874
+ #
875
+ # s3.delete_bucket_link('my_awesome_bucket') #=> url string
876
+ #
877
+ def delete_bucket_link(bucket, expires=nil, headers={})
878
+ generate_link('DELETE', headers.merge(:url=>bucket), expires)
879
+ rescue
880
+ on_exception
881
+ end
882
+
883
+ # Generates link for 'ListBucket'.
884
+ #
885
+ # s3.list_bucket_link('my_awesome_bucket') #=> url string
886
+ #
887
+ def list_bucket_link(bucket, options=nil, expires=nil, headers={})
888
+ bucket += '?' + options.map{|k, v| "#{k.to_s}=#{CGI::escape v.to_s}"}.join('&') unless options.blank?
889
+ generate_link('GET', headers.merge(:url=>bucket), expires)
890
+ rescue
891
+ on_exception
892
+ end
893
+
894
+ # Generates link for 'PutObject'.
895
+ #
896
+ # s3.put_link('my_awesome_bucket',key, object) #=> url string
897
+ #
898
+ def put_link(bucket, key, data=nil, expires=nil, headers={})
899
+ generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}", :data=>data), expires)
900
+ rescue
901
+ on_exception
902
+ end
903
+
904
+ # Generates link for 'GetObject'.
905
+ #
906
+ # if a bucket comply with virtual hosting naming then retuns a link with the
907
+ # bucket as a part of host name:
908
+ #
909
+ # s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...
910
+ #
911
+ # otherwise returns an old style link (the bucket is a part of path):
912
+ #
913
+ # s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...
914
+ #
915
+ # see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html
916
+ def get_link(bucket, key, expires=nil, headers={})
917
+ generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
918
+ rescue
919
+ on_exception
920
+ end
921
+
922
+ # Generates link for 'HeadObject'.
923
+ #
924
+ # s3.head_link('my_awesome_bucket',key) #=> url string
925
+ #
926
+ def head_link(bucket, key, expires=nil, headers={})
927
+ generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
928
+ rescue
929
+ on_exception
930
+ end
931
+
932
+ # Generates link for 'DeleteObject'.
933
+ #
934
+ # s3.delete_link('my_awesome_bucket',key) #=> url string
935
+ #
936
+ def delete_link(bucket, key, expires=nil, headers={})
937
+ generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"), expires)
938
+ rescue
939
+ on_exception
940
+ end
941
+
942
+
943
+ # Generates link for 'GetACL'.
944
+ #
945
+ # s3.get_acl_link('my_awesome_bucket',key) #=> url string
946
+ #
947
+ def get_acl_link(bucket, key='', headers={})
948
+ return generate_link('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
949
+ rescue
950
+ on_exception
951
+ end
952
+
953
+ # Generates link for 'PutACL'.
954
+ #
955
+ # s3.put_acl_link('my_awesome_bucket',key) #=> url string
956
+ #
957
+ def put_acl_link(bucket, key='', headers={})
958
+ return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{CGI::escape key}?acl"))
959
+ rescue
960
+ on_exception
961
+ end
962
+
963
+ # Generates link for 'GetBucketACL'.
964
+ #
965
+ # s3.get_acl_link('my_awesome_bucket',key) #=> url string
966
+ #
967
+ def get_bucket_acl_link(bucket, headers={})
968
+ return get_acl_link(bucket, '', headers)
969
+ rescue
970
+ on_exception
971
+ end
972
+
973
+ # Generates link for 'PutBucketACL'.
974
+ #
975
+ # s3.put_acl_link('my_awesome_bucket',key) #=> url string
976
+ #
977
+ def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
978
+ return put_acl_link(bucket, '', acl_xml_doc, headers)
979
+ rescue
980
+ on_exception
981
+ end
982
+
983
+ #-----------------------------------------------------------------
984
+ # PARSERS:
985
+ #-----------------------------------------------------------------
986
+
987
+ class S3ListAllMyBucketsParser < RightAWSParser # :nodoc:
988
+ def reset
989
+ @result = []
990
+ @owner = {}
991
+ end
992
+ def tagstart(name, attributes)
993
+ @current_bucket = {} if name == 'Bucket'
994
+ end
995
+ def tagend(name)
996
+ case name
997
+ when 'ID' then @owner[:owner_id] = @text
998
+ when 'DisplayName' then @owner[:owner_display_name] = @text
999
+ when 'Name' then @current_bucket[:name] = @text
1000
+ when 'CreationDate'then @current_bucket[:creation_date] = @text
1001
+ when 'Bucket' then @result << @current_bucket.merge(@owner)
1002
+ end
1003
+ end
1004
+ end
1005
+
1006
+ class S3ListBucketParser < RightAWSParser # :nodoc:
1007
+ def reset
1008
+ @result = []
1009
+ @service = {}
1010
+ @current_key = {}
1011
+ end
1012
+ def tagstart(name, attributes)
1013
+ @current_key = {} if name == 'Contents'
1014
+ end
1015
+ def tagend(name)
1016
+ case name
1017
+ # service info
1018
+ when 'Name' then @service['name'] = @text
1019
+ when 'Prefix' then @service['prefix'] = @text
1020
+ when 'Marker' then @service['marker'] = @text
1021
+ when 'MaxKeys' then @service['max-keys'] = @text
1022
+ when 'Delimiter' then @service['delimiter'] = @text
1023
+ when 'IsTruncated' then @service['is_truncated'] = (@text =~ /false/ ? false : true)
1024
+ # key data
1025
+ when 'Key' then @current_key[:key] = @text
1026
+ when 'LastModified'then @current_key[:last_modified] = @text
1027
+ when 'ETag' then @current_key[:e_tag] = @text
1028
+ when 'Size' then @current_key[:size] = @text.to_i
1029
+ when 'StorageClass'then @current_key[:storage_class] = @text
1030
+ when 'ID' then @current_key[:owner_id] = @text
1031
+ when 'DisplayName' then @current_key[:owner_display_name] = @text
1032
+ when 'Contents'
1033
+ @current_key[:service] = @service
1034
+ @result << @current_key
1035
+ end
1036
+ end
1037
+ end
1038
+
1039
+ class S3ImprovedListBucketParser < RightAWSParser # :nodoc:
1040
+ def reset
1041
+ @result = {}
1042
+ @result[:contents] = []
1043
+ @result[:common_prefixes] = []
1044
+ @contents = []
1045
+ @current_key = {}
1046
+ @common_prefixes = []
1047
+ @in_common_prefixes = false
1048
+ end
1049
+ def tagstart(name, attributes)
1050
+ @current_key = {} if name == 'Contents'
1051
+ @in_common_prefixes = true if name == 'CommonPrefixes'
1052
+ end
1053
+ def tagend(name)
1054
+ case name
1055
+ # service info
1056
+ when 'Name' then @result[:name] = @text
1057
+ # Amazon uses the same tag for the search prefix and for the entries
1058
+ # in common prefix...so use our simple flag to see which element
1059
+ # we are parsing
1060
+ when 'Prefix' then @in_common_prefixes ? @common_prefixes << @text : @result[:prefix] = @text
1061
+ when 'Marker' then @result[:marker] = @text
1062
+ when 'MaxKeys' then @result[:max_keys] = @text
1063
+ when 'Delimiter' then @result[:delimiter] = @text
1064
+ when 'IsTruncated' then @result[:is_truncated] = (@text =~ /false/ ? false : true)
1065
+ when 'NextMarker' then @result[:next_marker] = @text
1066
+ # key data
1067
+ when 'Key' then @current_key[:key] = @text
1068
+ when 'LastModified'then @current_key[:last_modified] = @text
1069
+ when 'ETag' then @current_key[:e_tag] = @text
1070
+ when 'Size' then @current_key[:size] = @text.to_i
1071
+ when 'StorageClass'then @current_key[:storage_class] = @text
1072
+ when 'ID' then @current_key[:owner_id] = @text
1073
+ when 'DisplayName' then @current_key[:owner_display_name] = @text
1074
+ when 'Contents' then @result[:contents] << @current_key
1075
+ # Common Prefix stuff
1076
+ when 'CommonPrefixes'
1077
+ @result[:common_prefixes] = @common_prefixes
1078
+ @in_common_prefixes = false
1079
+ end
1080
+ end
1081
+ end
1082
+
1083
+ class S3BucketLocationParser < RightAWSParser # :nodoc:
1084
+ def reset
1085
+ @result = ''
1086
+ end
1087
+ def tagend(name)
1088
+ @result = @text if name == 'LocationConstraint'
1089
+ end
1090
+ end
1091
+
1092
+ class S3AclParser < RightAWSParser # :nodoc:
1093
+ def reset
1094
+ @result = {:grantees=>[], :owner=>{}}
1095
+ @current_grantee = {}
1096
+ end
1097
+ def tagstart(name, attributes)
1098
+ @current_grantee = { :attributes => attributes } if name=='Grantee'
1099
+ end
1100
+ def tagend(name)
1101
+ case name
1102
+ # service info
1103
+ when 'ID'
1104
+ if @xmlpath == 'AccessControlPolicy/Owner'
1105
+ @result[:owner][:id] = @text
1106
+ else
1107
+ @current_grantee[:id] = @text
1108
+ end
1109
+ when 'DisplayName'
1110
+ if @xmlpath == 'AccessControlPolicy/Owner'
1111
+ @result[:owner][:display_name] = @text
1112
+ else
1113
+ @current_grantee[:display_name] = @text
1114
+ end
1115
+ when 'URI'
1116
+ @current_grantee[:uri] = @text
1117
+ when 'Permission'
1118
+ @current_grantee[:permissions] = @text
1119
+ when 'Grant'
1120
+ @result[:grantees] << @current_grantee
1121
+ end
1122
+ end
1123
+ end
1124
+
1125
+ class S3LoggingParser < RightAWSParser # :nodoc:
1126
+ def reset
1127
+ @result = {:enabled => false, :targetbucket => '', :targetprefix => ''}
1128
+ @current_grantee = {}
1129
+ end
1130
+ def tagend(name)
1131
+ case name
1132
+ # service info
1133
+ when 'TargetBucket'
1134
+ if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1135
+ @result[:targetbucket] = @text
1136
+ @result[:enabled] = true
1137
+ end
1138
+ when 'TargetPrefix'
1139
+ if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1140
+ @result[:targetprefix] = @text
1141
+ @result[:enabled] = true
1142
+ end
1143
+ end
1144
+ end
1145
+ end
1146
+
1147
+ class S3CopyParser < RightAWSParser # :nodoc:
1148
+ def reset
1149
+ @result = {}
1150
+ end
1151
+ def tagend(name)
1152
+ case name
1153
+ when 'LastModified' then @result[:last_modified] = @text
1154
+ when 'ETag' then @result[:e_tag] = @text
1155
+ end
1156
+ end
1157
+ end
1158
+
1159
+ #-----------------------------------------------------------------
1160
+ # PARSERS: Non XML
1161
+ #-----------------------------------------------------------------
1162
+
1163
+ class S3HttpResponseParser # :nodoc:
1164
+ attr_reader :result
1165
+ def parse(response)
1166
+ @result = response
1167
+ end
1168
+ def headers_to_string(headers)
1169
+ result = {}
1170
+ headers.each do |key, value|
1171
+ value = value.to_s if value.is_a?(Array) && value.size<2
1172
+ result[key] = value
1173
+ end
1174
+ result
1175
+ end
1176
+ end
1177
+
1178
+ class S3HttpResponseBodyParser < S3HttpResponseParser # :nodoc:
1179
+ def parse(response)
1180
+ @result = {
1181
+ :object => response.body,
1182
+ :headers => headers_to_string(response.to_hash)
1183
+ }
1184
+ end
1185
+ end
1186
+
1187
+ class S3HttpResponseHeadParser < S3HttpResponseParser # :nodoc:
1188
+ def parse(response)
1189
+ @result = headers_to_string(response.to_hash)
1190
+ end
1191
+ end
1192
+
1193
+ end
1194
+
1195
+ end