aws 2.3.34 → 2.4.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,1253 +0,0 @@
1
- #
2
- # Copyright (c) 2007-2008 RightScale Inc
3
- #
4
- # Permission is hereby granted, free of charge, to any person obtaining
5
- # a copy of this software and associated documentation files (the
6
- # "Software"), to deal in the Software without restriction, including
7
- # without limitation the rights to use, copy, modify, merge, publish,
8
- # distribute, sublicense, and/or sell copies of the Software, and to
9
- # permit persons to whom the Software is furnished to do so, subject to
10
- # the following conditions:
11
- #
12
- # The above copyright notice and this permission notice shall be
13
- # included in all copies or substantial portions of the Software.
14
- #
15
- # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
- # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17
- # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
- # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
19
- # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
20
- # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
21
- # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22
- #
23
-
24
- module Aws
25
-
26
- class S3Interface < AwsBase
27
-
28
- USE_100_CONTINUE_PUT_SIZE = 1_000_000
29
-
30
- include AwsBaseInterface
31
-
32
- DEFAULT_HOST = 's3.amazonaws.com'
33
- DEFAULT_PORT = 443
34
- DEFAULT_PROTOCOL = 'https'
35
- DEFAULT_SERVICE = '/'
36
- REQUEST_TTL = 30
37
- DEFAULT_EXPIRES_AFTER = 1 * 24 * 60 * 60 # One day's worth of seconds
38
- ONE_YEAR_IN_SECONDS = 365 * 24 * 60 * 60
39
- AMAZON_HEADER_PREFIX = 'x-amz-'
40
- AMAZON_METADATA_PREFIX = 'x-amz-meta-'
41
-
42
- @@bench = AwsBenchmarkingBlock.new
43
-
44
- def self.bench_xml
45
- @@bench.xml
46
- end
47
-
48
- def self.bench_s3
49
- @@bench.service
50
- end
51
-
52
-
53
- # Creates new RightS3 instance.
54
- #
55
- # s3 = Aws::S3Interface.new('1E3GDYEOGFJPIT7XXXXXX','hgTHt68JY07JKUY08ftHYtERkjgtfERn57XXXXXX', {:multi_thread => true, :logger => Logger.new('/tmp/x.log')}) #=> #<Aws::S3Interface:0xb7b3c27c>
56
- #
57
- # Params is a hash:
58
- #
59
- # {:server => 's3.amazonaws.com' # Amazon service host: 's3.amazonaws.com'(default)
60
- # :port => 443 # Amazon service port: 80 or 443(default)
61
- # :protocol => 'https' # Amazon service protocol: 'http' or 'https'(default)
62
- # :connection_mode => :default # options are
63
- # :default (will use best known safe (as in won't need explicit close) option, may change in the future)
64
- # :per_request (opens and closes a connection on every request)
65
- # :single (one thread across entire app)
66
- # :per_thread (one connection per thread)
67
- # :logger => Logger Object} # Logger instance: logs to STDOUT if omitted }
68
- #
69
- def initialize(aws_access_key_id=nil, aws_secret_access_key=nil, params={})
70
- init({:name => 'S3',
71
- :default_host => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).host : DEFAULT_HOST,
72
- :default_port => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).port : DEFAULT_PORT,
73
- :default_service => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).path : DEFAULT_SERVICE,
74
- :default_protocol => ENV['S3_URL'] ? URI.parse(ENV['S3_URL']).scheme : DEFAULT_PROTOCOL},
75
- aws_access_key_id || ENV['AWS_ACCESS_KEY_ID'],
76
- aws_secret_access_key || ENV['AWS_SECRET_ACCESS_KEY'],
77
- params)
78
- end
79
-
80
-
81
- def close_connection
82
- close_conn :s3_connection
83
- end
84
-
85
- #-----------------------------------------------------------------
86
- # Requests
87
- #-----------------------------------------------------------------
88
- # Produces canonical string for signing.
89
- def canonical_string(method, path, headers={}, expires=nil) # :nodoc:
90
- s3_headers = {}
91
- headers.each do |key, value|
92
- key = key.downcase
93
- s3_headers[key] = value.join("").strip if key[/^#{AMAZON_HEADER_PREFIX}|^content-md5$|^content-type$|^date$/o]
94
- end
95
- s3_headers['content-type'] ||= ''
96
- s3_headers['content-md5'] ||= ''
97
- s3_headers['date'] = '' if s3_headers.has_key? 'x-amz-date'
98
- s3_headers['date'] = expires if expires
99
- # prepare output string
100
- out_string = "#{method}\n"
101
- s3_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
102
- out_string << (key[/^#{AMAZON_HEADER_PREFIX}/o] ? "#{key}:#{value}\n" : "#{value}\n")
103
- end
104
- # ignore everything after the question mark...
105
- out_string << path.gsub(/\?.*$/, '')
106
- # ...unless there is an acl or torrent parameter
107
- out_string << '?acl' if path[/[&?]acl($|&|=)/]
108
- out_string << '?policy' if path[/[&?]policy($|&|=)/]
109
- out_string << '?torrent' if path[/[&?]torrent($|&|=)/]
110
- out_string << '?location' if path[/[&?]location($|&|=)/]
111
- out_string << '?logging' if path[/[&?]logging($|&|=)/] # this one is beta, no support for now
112
- out_string
113
- end
114
-
115
- # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?BucketRestrictions.html
116
- def is_dns_bucket?(bucket_name)
117
- bucket_name = bucket_name.to_s
118
- return nil unless (3..63) === bucket_name.size
119
- bucket_name.split('.').each do |component|
120
- return nil unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
121
- end
122
- true
123
- end
124
-
125
- def fetch_request_params(headers) #:nodoc:
126
- # default server to use
127
- server = @params[:server]
128
- service = @params[:service].to_s
129
- service.chop! if service[%r{/$}] # remove trailing '/' from service
130
- # extract bucket name and check it's dns compartibility
131
- headers[:url].to_s[%r{^([a-z0-9._-]*)(/[^?]*)?(\?.+)?}i]
132
- bucket_name, key_path, params_list = $1, $2, $3
133
- # select request model
134
- if is_dns_bucket?(bucket_name)
135
- # fix a path
136
- server = "#{bucket_name}.#{server}"
137
- key_path ||= '/'
138
- path = "#{service}#{key_path}#{params_list}"
139
- else
140
- path = "#{service}/#{bucket_name}#{key_path}#{params_list}"
141
- end
142
- path_to_sign = "#{service}/#{bucket_name}#{key_path}#{params_list}"
143
- # path_to_sign = "/#{bucket_name}#{key_path}#{params_list}"
144
- [server, path, path_to_sign]
145
- end
146
-
147
- # Generates request hash for REST API.
148
- # Assumes that headers[:url] is URL encoded (use CGI::escape)
149
- def generate_rest_request(method, headers) # :nodoc:
150
- # calculate request data
151
- server, path, path_to_sign = fetch_request_params(headers)
152
- data = headers[:data]
153
- # remove unset(==optional) and symbolyc keys
154
- headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
155
- #
156
- headers['content-type'] ||= ''
157
- headers['date'] = Time.now.httpdate
158
- # create request
159
- request = "Net::HTTP::#{method.capitalize}".constantize.new(path)
160
- request.body = data if data
161
- # set request headers and meta headers
162
- headers.each { |key, value| request[key.to_s] = value }
163
- #generate auth strings
164
- auth_string = canonical_string(request.method, path_to_sign, request.to_hash)
165
- signature = AwsUtils::sign(@aws_secret_access_key, auth_string)
166
- # set other headers
167
- request['Authorization'] = "AWS #{@aws_access_key_id}:#{signature}"
168
- # prepare output hash
169
- {:request => request,
170
- :server => server,
171
- :port => @params[:port],
172
- :protocol => @params[:protocol]}
173
- end
174
-
175
- # Sends request to Amazon and parses the response.
176
- # Raises AwsError if any banana happened.
177
- def request_info(request, parser, options={}, &block) # :nodoc:
178
- request_info2(request, parser, @params, :s3_connection, @logger, @@bench, options, &block)
179
-
180
- end
181
-
182
-
183
- # Returns an array of customer's buckets. Each item is a +hash+.
184
- #
185
- # s3.list_all_my_buckets #=>
186
- # [{:owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
187
- # :owner_display_name => "root",
188
- # :name => "bucket_name",
189
- # :creation_date => "2007-04-19T18:47:43.000Z"}, ..., {...}]
190
- #
191
- def list_all_my_buckets(headers={})
192
- req_hash = generate_rest_request('GET', headers.merge(:url=>''))
193
- request_info(req_hash, S3ListAllMyBucketsParser.new(:logger => @logger))
194
- rescue
195
- on_exception
196
- end
197
-
198
- # Creates new bucket. Returns +true+ or an exception.
199
- #
200
- # # create a bucket at American server
201
- # s3.create_bucket('my-awesome-bucket-us') #=> true
202
- # # create a bucket at European server
203
- # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
204
- #
205
- def create_bucket(bucket, headers={})
206
- data = nil
207
- unless headers[:location].blank?
208
- # data = "<CreateBucketConfiguration><LocationConstraint>#{headers[:location].to_s.upcase}</LocationConstraint></CreateBucketConfiguration>"
209
- location = headers[:location].to_s
210
- location.upcase! if location == 'eu'
211
- data = "<CreateBucketConfiguration><LocationConstraint>#{location}</LocationConstraint></CreateBucketConfiguration>"
212
- end
213
- req_hash = generate_rest_request('PUT', headers.merge(:url=>bucket, :data => data))
214
- request_info(req_hash, RightHttp2xxParser.new)
215
- rescue Exception => e
216
- # if the bucket exists AWS returns an error for the location constraint interface. Drop it
217
- e.is_a?(Aws::AwsError) && e.message.include?('BucketAlreadyOwnedByYou') ? true : on_exception
218
- end
219
-
220
- # Retrieve bucket location
221
- #
222
- # s3.create_bucket('my-awesome-bucket-us') #=> true
223
- # puts s3.bucket_location('my-awesome-bucket-us') #=> '' (Amazon's default value assumed)
224
- #
225
- # s3.create_bucket('my-awesome-bucket-eu', :location => :eu) #=> true
226
- # puts s3.bucket_location('my-awesome-bucket-eu') #=> 'EU'
227
- #
228
- def bucket_location(bucket, headers={})
229
- req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}?location"))
230
- request_info(req_hash, S3BucketLocationParser.new)
231
- rescue
232
- on_exception
233
- end
234
-
235
- # Retrieves the logging configuration for a bucket.
236
- # Returns a hash of {:enabled, :targetbucket, :targetprefix}
237
- #
238
- # s3.interface.get_logging_parse(:bucket => "asset_bucket")
239
- # => {:enabled=>true, :targetbucket=>"mylogbucket", :targetprefix=>"loggylogs/"}
240
- #
241
- #
242
- def get_logging_parse(params)
243
- AwsUtils.mandatory_arguments([:bucket], params)
244
- AwsUtils.allow_only([:bucket, :headers], params)
245
- params[:headers] = {} unless params[:headers]
246
- req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}?logging"))
247
- request_info(req_hash, S3LoggingParser.new)
248
- rescue
249
- on_exception
250
- end
251
-
252
- # Sets logging configuration for a bucket from the XML configuration document.
253
- # params:
254
- # :bucket
255
- # :xmldoc
256
- def put_logging(params)
257
- AwsUtils.mandatory_arguments([:bucket, :xmldoc], params)
258
- AwsUtils.allow_only([:bucket, :xmldoc, :headers], params)
259
- params[:headers] = {} unless params[:headers]
260
- req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}?logging", :data => params[:xmldoc]))
261
- request_info(req_hash, S3TrueParser.new)
262
- rescue
263
- on_exception
264
- end
265
-
266
- # Deletes new bucket. Bucket must be empty! Returns +true+ or an exception.
267
- #
268
- # s3.delete_bucket('my_awesome_bucket') #=> true
269
- #
270
- # See also: force_delete_bucket method
271
- #
272
- def delete_bucket(bucket, headers={})
273
- req_hash = generate_rest_request('DELETE', headers.merge(:url=>bucket))
274
- request_info(req_hash, RightHttp2xxParser.new)
275
- rescue
276
- on_exception
277
- end
278
-
279
- # Returns an array of bucket's keys. Each array item (key data) is a +hash+.
280
- #
281
- # s3.list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) #=>
282
- # [{:key => "test1",
283
- # :last_modified => "2007-05-18T07:00:59.000Z",
284
- # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
285
- # :owner_display_name => "root",
286
- # :e_tag => "000000000059075b964b07152d234b70",
287
- # :storage_class => "STANDARD",
288
- # :size => 3,
289
- # :service=> {'is_truncated' => false,
290
- # 'prefix' => "t",
291
- # 'marker' => "",
292
- # 'name' => "my_awesome_bucket",
293
- # 'max-keys' => "5"}, ..., {...}]
294
- #
295
- def list_bucket(bucket, options={}, headers={})
296
- bucket += '?'+options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless options.blank?
297
- req_hash = generate_rest_request('GET', headers.merge(:url=>bucket))
298
- request_info(req_hash, S3ListBucketParser.new(:logger => @logger))
299
- rescue
300
- on_exception
301
- end
302
-
303
- # Incrementally list the contents of a bucket. Yields the following hash to a block:
304
- # s3.incrementally_list_bucket('my_awesome_bucket', { 'prefix'=>'t', 'marker'=>'', 'max-keys'=>5, delimiter=>'' }) yields
305
- # {
306
- # :name => 'bucketname',
307
- # :prefix => 'subfolder/',
308
- # :marker => 'fileN.jpg',
309
- # :max_keys => 234,
310
- # :delimiter => '/',
311
- # :is_truncated => true,
312
- # :next_marker => 'fileX.jpg',
313
- # :contents => [
314
- # { :key => "file1",
315
- # :last_modified => "2007-05-18T07:00:59.000Z",
316
- # :e_tag => "000000000059075b964b07152d234b70",
317
- # :size => 3,
318
- # :storage_class => "STANDARD",
319
- # :owner_id => "00000000009314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a",
320
- # :owner_display_name => "root"
321
- # }, { :key, ...}, ... {:key, ...}
322
- # ]
323
- # :common_prefixes => [
324
- # "prefix1",
325
- # "prefix2",
326
- # ...,
327
- # "prefixN"
328
- # ]
329
- # }
330
- def incrementally_list_bucket(bucket, options={}, headers={}, &block)
331
- internal_options = options.symbolize_keys
332
- begin
333
- internal_bucket = bucket.dup
334
- internal_bucket += '?'+internal_options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless internal_options.blank?
335
- req_hash = generate_rest_request('GET', headers.merge(:url=>internal_bucket))
336
- response = request_info(req_hash, S3ImprovedListBucketParser.new(:logger => @logger))
337
- there_are_more_keys = response[:is_truncated]
338
- if (there_are_more_keys)
339
- internal_options[:marker] = decide_marker(response)
340
- total_results = response[:contents].length + response[:common_prefixes].length
341
- internal_options[:'max-keys'] ? (internal_options[:'max-keys'] -= total_results) : nil
342
- end
343
- yield response
344
- end while there_are_more_keys && under_max_keys(internal_options)
345
- true
346
- rescue
347
- on_exception
348
- end
349
-
350
-
351
- private
352
- def decide_marker(response)
353
- return response[:next_marker].dup if response[:next_marker]
354
- last_key = response[:contents].last[:key]
355
- last_prefix = response[:common_prefixes].last
356
- if (!last_key)
357
- return nil if (!last_prefix)
358
- last_prefix.dup
359
- elsif (!last_prefix)
360
- last_key.dup
361
- else
362
- last_key > last_prefix ? last_key.dup : last_prefix.dup
363
- end
364
- end
365
-
366
- def under_max_keys(internal_options)
367
- internal_options[:'max-keys'] ? internal_options[:'max-keys'] > 0 : true
368
- end
369
-
370
- public
371
- # Saves object to Amazon. Returns +true+ or an exception.
372
- # Any header starting with AMAZON_METADATA_PREFIX is considered
373
- # user metadata. It will be stored with the object and returned
374
- # when you retrieve the object. The total size of the HTTP
375
- # request, not including the body, must be less than 4 KB.
376
- #
377
- # s3.put('my_awesome_bucket', 'log/current/1.log', 'Ola-la!', 'x-amz-meta-family'=>'Woho556!') #=> true
378
- #
379
- # This method is capable of 'streaming' uploads; that is, it can upload
380
- # data from a file or other IO object without first reading all the data
381
- # into memory. This is most useful for large PUTs - it is difficult to read
382
- # a 2 GB file entirely into memory before sending it to S3.
383
- # To stream an upload, pass an object that responds to 'read' (like the read
384
- # method of IO) and to either 'lstat' or 'size'. For files, this means
385
- # streaming is enabled by simply making the call:
386
- #
387
- # s3.put(bucket_name, 'S3keyname.forthisfile', File.open('localfilename.dat'))
388
- #
389
- # If the IO object you wish to stream from responds to the read method but
390
- # doesn't implement lstat or size, you can extend the object dynamically
391
- # to implement these methods, or define your own class which defines these
392
- # methods. Be sure that your class returns 'nil' from read() after having
393
- # read 'size' bytes. Otherwise S3 will drop the socket after
394
- # 'Content-Length' bytes have been uploaded, and HttpConnection will
395
- # interpret this as an error.
396
- #
397
- # This method now supports very large PUTs, where very large
398
- # is > 2 GB.
399
- #
400
- # For Win32 users: Files and IO objects should be opened in binary mode. If
401
- # a text mode IO object is passed to PUT, it will be converted to binary
402
- # mode.
403
- #
404
-
405
- def put(bucket, key, data=nil, headers={})
406
- # On Windows, if someone opens a file in text mode, we must reset it so
407
- # to binary mode for streaming to work properly
408
- if (data.respond_to?(:binmode))
409
- data.binmode
410
- end
411
- data_size = data.respond_to?(:lstat) ? data.lstat.size :
412
- (data.respond_to?(:size) ? data.size : 0)
413
- if (data_size >= USE_100_CONTINUE_PUT_SIZE)
414
- headers['expect'] = '100-continue'
415
- end
416
- req_hash = generate_rest_request('PUT', headers.merge(:url =>"#{bucket}/#{CGI::escape key}", :data=>data,
417
- 'Content-Length' => data_size.to_s))
418
- request_info(req_hash, RightHttp2xxParser.new)
419
- rescue
420
- on_exception
421
- end
422
-
423
-
424
- # New experimental API for uploading objects, introduced in Aws 1.8.1.
425
- # store_object is similar in function to the older function put, but returns the full response metadata. It also allows for optional verification
426
- # of object md5 checksums on upload. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
427
- # The hash of the response headers contains useful information like the Amazon request ID and the object ETag (MD5 checksum).
428
- #
429
- # If the optional :md5 argument is provided, store_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
430
- # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
431
- #
432
- # The optional argument of :headers allows the caller to specify arbitrary request header values.
433
- #
434
- # s3.store_object(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
435
- # => {"x-amz-id-2"=>"SVsnS2nfDaR+ixyJUlRKM8GndRyEMS16+oZRieamuL61pPxPaTuWrWtlYaEhYrI/",
436
- # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
437
- # "date"=>"Mon, 29 Sep 2008 18:57:46 GMT",
438
- # :verified_md5=>true,
439
- # "x-amz-request-id"=>"63916465939995BA",
440
- # "server"=>"AmazonS3",
441
- # "content-length"=>"0"}
442
- #
443
- # s3.store_object(:bucket => "foobucket", :key => "foo", :data => "polemonium" )
444
- # => {"x-amz-id-2"=>"MAt9PLjgLX9UYJ5tV2fI/5dBZdpFjlzRVpWgBDpvZpl+V+gJFcBMW2L+LBstYpbR",
445
- # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
446
- # "date"=>"Mon, 29 Sep 2008 18:58:56 GMT",
447
- # :verified_md5=>false,
448
- # "x-amz-request-id"=>"3B25A996BC2CDD3B",
449
- # "server"=>"AmazonS3",
450
- # "content-length"=>"0"}
451
-
452
- def store_object(params)
453
- AwsUtils.allow_only([:bucket, :key, :data, :headers, :md5], params)
454
- AwsUtils.mandatory_arguments([:bucket, :key, :data], params)
455
- params[:headers] = {} unless params[:headers]
456
-
457
- params[:data].binmode if (params[:data].respond_to?(:binmode)) # On Windows, if someone opens a file in text mode, we must reset it to binary mode for streaming to work properly
458
- if (params[:data].respond_to?(:lstat) && params[:data].lstat.size >= USE_100_CONTINUE_PUT_SIZE) ||
459
- (params[:data].respond_to?(:size) && params[:data].size >= USE_100_CONTINUE_PUT_SIZE)
460
- params[:headers]['expect'] = '100-continue'
461
- end
462
-
463
- req_hash = generate_rest_request('PUT', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}", :data=>params[:data]))
464
- resp = request_info(req_hash, S3HttpResponseHeadParser.new)
465
- if (params[:md5])
466
- resp[:verified_md5] = (resp['etag'].gsub(/\"/, '') == params[:md5]) ? true : false
467
- else
468
- resp[:verified_md5] = false
469
- end
470
- resp
471
- rescue
472
- on_exception
473
- end
474
-
475
- # Identical in function to store_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
476
- # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
477
- # This call is implemented as a wrapper around store_object and the user may gain different semantics by creating a custom wrapper.
478
- #
479
- # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2954", :data => "polemonium" )
480
- # => {"x-amz-id-2"=>"IZN3XsH4FlBU0+XYkFTfHwaiF1tNzrm6dIW2EM/cthKvl71nldfVC0oVQyydzWpb",
481
- # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
482
- # "date"=>"Mon, 29 Sep 2008 18:38:32 GMT",
483
- # :verified_md5=>true,
484
- # "x-amz-request-id"=>"E8D7EA4FE00F5DF7",
485
- # "server"=>"AmazonS3",
486
- # "content-length"=>"0"}
487
- #
488
- # s3.store_object_and_verify(:bucket => "foobucket", :key => "foo", :md5 => "a507841b1bc8115094b00bbe8c1b2953", :data => "polemonium" )
489
- # Aws::AwsError: Uploaded object failed MD5 checksum verification: {"x-amz-id-2"=>"HTxVtd2bf7UHHDn+WzEH43MkEjFZ26xuYvUzbstkV6nrWvECRWQWFSx91z/bl03n",
490
- # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
491
- # "date"=>"Mon, 29 Sep 2008 18:38:41 GMT",
492
- # :verified_md5=>false,
493
- # "x-amz-request-id"=>"0D7ADE09F42606F2",
494
- # "server"=>"AmazonS3",
495
- # "content-length"=>"0"}
496
- def store_object_and_verify(params)
497
- AwsUtils.mandatory_arguments([:md5], params)
498
- r = store_object(params)
499
- r[:verified_md5] ? (return r) : (raise AwsError.new("Uploaded object failed MD5 checksum verification: #{r.inspect}"))
500
- end
501
-
502
- # Retrieves object data from Amazon. Returns a +hash+ or an exception.
503
- #
504
- # s3.get('my_awesome_bucket', 'log/curent/1.log') #=>
505
- #
506
- # {:object => "Ola-la!",
507
- # :headers => {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
508
- # "content-type" => "",
509
- # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
510
- # "date" => "Wed, 23 May 2007 09:08:03 GMT",
511
- # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
512
- # "x-amz-meta-family" => "Woho556!",
513
- # "x-amz-request-id" => "0000000C246D770C",
514
- # "server" => "AmazonS3",
515
- # "content-length" => "7"}}
516
- #
517
- # If a block is provided, yields incrementally to the block as
518
- # the response is read. For large responses, this function is ideal as
519
- # the response can be 'streamed'. The hash containing header fields is
520
- # still returned.
521
- # Example:
522
- # foo = File.new('./chunder.txt', File::CREAT|File::RDWR)
523
- # rhdr = s3.get('aws-test', 'Cent5V1_7_1.img.part.00') do |chunk|
524
- # foo.write(chunk)
525
- # end
526
- # foo.close
527
- #
528
-
529
- def get(bucket, key, headers={}, &block)
530
- req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
531
- request_info(req_hash, S3HttpResponseBodyParser.new, &block)
532
- rescue
533
- on_exception
534
- end
535
-
536
- # New experimental API for retrieving objects, introduced in Aws 1.8.1.
537
- # retrieve_object is similar in function to the older function get. It allows for optional verification
538
- # of object md5 checksums on retrieval. Parameters are passed as hash entries and are checked for completeness as well as for spurious arguments.
539
- #
540
- # If the optional :md5 argument is provided, retrieve_object verifies that the given md5 matches the md5 returned by S3. The :verified_md5 field in the response hash is
541
- # set true or false depending on the outcome of this check. If no :md5 argument is given, :verified_md5 will be false in the response.
542
- #
543
- # The optional argument of :headers allows the caller to specify arbitrary request header values.
544
- # Mandatory arguments:
545
- # :bucket - the bucket in which the object is stored
546
- # :key - the object address (or path) within the bucket
547
- # Optional arguments:
548
- # :headers - hash of additional HTTP headers to include with the request
549
- # :md5 - MD5 checksum against which to verify the retrieved object
550
- #
551
- # s3.retrieve_object(:bucket => "foobucket", :key => "foo")
552
- # => {:verified_md5=>false,
553
- # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
554
- # "x-amz-id-2"=>"2Aj3TDz6HP5109qly//18uHZ2a1TNHGLns9hyAtq2ved7wmzEXDOPGRHOYEa3Qnp",
555
- # "content-type"=>"",
556
- # "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
557
- # "date"=>"Tue, 30 Sep 2008 00:52:44 GMT",
558
- # "x-amz-request-id"=>"EE4855DE27A2688C",
559
- # "server"=>"AmazonS3",
560
- # "content-length"=>"10"},
561
- # :object=>"polemonium"}
562
- #
563
- # s3.retrieve_object(:bucket => "foobucket", :key => "foo", :md5=>'a507841b1bc8115094b00bbe8c1b2954')
564
- # => {:verified_md5=>true,
565
- # :headers=>{"last-modified"=>"Mon, 29 Sep 2008 18:58:56 GMT",
566
- # "x-amz-id-2"=>"mLWQcI+VuKVIdpTaPXEo84g0cz+vzmRLbj79TS8eFPfw19cGFOPxuLy4uGYVCvdH",
567
- # "content-type"=>"", "etag"=>"\"a507841b1bc8115094b00bbe8c1b2954\"",
568
- # "date"=>"Tue, 30 Sep 2008 00:53:08 GMT",
569
- # "x-amz-request-id"=>"6E7F317356580599",
570
- # "server"=>"AmazonS3",
571
- # "content-length"=>"10"},
572
- # :object=>"polemonium"}
573
- # If a block is provided, yields incrementally to the block as
574
- # the response is read. For large responses, this function is ideal as
575
- # the response can be 'streamed'. The hash containing header fields is
576
- # still returned.
577
- def retrieve_object(params, &block)
578
- AwsUtils.mandatory_arguments([:bucket, :key], params)
579
- AwsUtils.allow_only([:bucket, :key, :headers, :md5], params)
580
- params[:headers] = {} unless params[:headers]
581
- req_hash = generate_rest_request('GET', params[:headers].merge(:url=>"#{params[:bucket]}/#{CGI::escape params[:key]}"))
582
- resp = request_info(req_hash, S3HttpResponseBodyParser.new, &block)
583
- resp[:verified_md5] = false
584
- if (params[:md5] && (resp[:headers]['etag'].gsub(/\"/, '') == params[:md5]))
585
- resp[:verified_md5] = true
586
- end
587
- resp
588
- rescue
589
- on_exception
590
- end
591
-
592
- # Identical in function to retrieve_object, but requires verification that the returned ETag is identical to the checksum passed in by the user as the 'md5' argument.
593
- # If the check passes, returns the response metadata with the "verified_md5" field set true. Raises an exception if the checksums conflict.
594
- # This call is implemented as a wrapper around retrieve_object and the user may gain different semantics by creating a custom wrapper.
595
- def retrieve_object_and_verify(params, &block)
596
- AwsUtils.mandatory_arguments([:md5], params)
597
- resp = retrieve_object(params, &block)
598
- return resp if resp[:verified_md5]
599
- raise AwsError.new("Retrieved object failed MD5 checksum verification: #{resp.inspect}")
600
- end
601
-
602
- # Retrieves object metadata. Returns a +hash+ of http_response_headers.
603
- #
604
- # s3.head('my_awesome_bucket', 'log/curent/1.log') #=>
605
- # {"last-modified" => "Wed, 23 May 2007 09:08:04 GMT",
606
- # "content-type" => "",
607
- # "etag" => "\"000000000096f4ee74bc4596443ef2a4\"",
608
- # "date" => "Wed, 23 May 2007 09:08:03 GMT",
609
- # "x-amz-id-2" => "ZZZZZZZZZZZZZZZZZZZZ1HJXZoehfrS4QxcxTdNGldR7w/FVqblP50fU8cuIMLiu",
610
- # "x-amz-meta-family" => "Woho556!",
611
- # "x-amz-request-id" => "0000000C246D770C",
612
- # "server" => "AmazonS3",
613
- # "content-length" => "7"}
614
- #
615
- def head(bucket, key, headers={})
616
- req_hash = generate_rest_request('HEAD', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
617
- request_info(req_hash, S3HttpResponseHeadParser.new)
618
- rescue
619
- on_exception
620
- end
621
-
622
- # Deletes key. Returns +true+ or an exception.
623
- #
624
- # s3.delete('my_awesome_bucket', 'log/curent/1.log') #=> true
625
- #
626
- def delete(bucket, key='', headers={})
627
- req_hash = generate_rest_request('DELETE', headers.merge(:url=>"#{bucket}/#{CGI::escape key}"))
628
- request_info(req_hash, RightHttp2xxParser.new)
629
- rescue
630
- on_exception
631
- end
632
-
633
- # Copy an object.
634
- # directive: :copy - copy meta-headers from source (default value)
635
- # :replace - replace meta-headers by passed ones
636
- #
637
- # # copy a key with meta-headers
638
- # s3.copy('b1', 'key1', 'b1', 'key1_copy') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:25:22.000Z"}
639
- #
640
- # # copy a key, overwrite meta-headers
641
- # s3.copy('b1', 'key2', 'b1', 'key2_copy', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:26:22.000Z"}
642
- #
643
- # see: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/UsingCopyingObjects.html
644
- # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html
645
- #
646
- def copy(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
647
- dest_key ||= src_key
648
- headers['x-amz-metadata-directive'] = directive.to_s.upcase
649
- headers['x-amz-copy-source'] = "#{src_bucket}/#{CGI::escape src_key}"
650
- req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{dest_bucket}/#{CGI::escape dest_key}"))
651
- request_info(req_hash, S3CopyParser.new)
652
- rescue
653
- on_exception
654
- end
655
-
656
- # Move an object.
657
- # directive: :copy - copy meta-headers from source (default value)
658
- # :replace - replace meta-headers by passed ones
659
- #
660
- # # move bucket1/key1 to bucket1/key2
661
- # s3.move('bucket1', 'key1', 'bucket1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:27:22.000Z"}
662
- #
663
- # # move bucket1/key1 to bucket2/key2 with new meta-headers assignment
664
- # s3.copy('bucket1', 'key1', 'bucket2', 'key2', :replace, 'x-amz-meta-family'=>'Woho555!') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:28:22.000Z"}
665
- #
666
- def move(src_bucket, src_key, dest_bucket, dest_key=nil, directive=:copy, headers={})
667
- copy_result = copy(src_bucket, src_key, dest_bucket, dest_key, directive, headers)
668
- # delete an original key if it differs from a destination one
669
- delete(src_bucket, src_key) unless src_bucket == dest_bucket && src_key == dest_key
670
- copy_result
671
- end
672
-
673
- # Rename an object.
674
- #
675
- # # rename bucket1/key1 to bucket1/key2
676
- # s3.rename('bucket1', 'key1', 'key2') #=> {:e_tag=>"\"e8b...8d\"", :last_modified=>"2008-05-11T10:29:22.000Z"}
677
- #
678
- def rename(src_bucket, src_key, dest_key, headers={})
679
- move(src_bucket, src_key, src_bucket, dest_key, :copy, headers)
680
- end
681
-
682
- # Retieves the ACL (access control policy) for a bucket or object. Returns a hash of headers and xml doc with ACL data. See: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTAccessPolicy.html.
683
- #
684
- # s3.get_acl('my_awesome_bucket', 'log/curent/1.log') #=>
685
- # {:headers => {"x-amz-id-2"=>"B3BdDMDUz+phFF2mGBH04E46ZD4Qb9HF5PoPHqDRWBv+NVGeA3TOQ3BkVvPBjgxX",
686
- # "content-type"=>"application/xml;charset=ISO-8859-1",
687
- # "date"=>"Wed, 23 May 2007 09:40:16 GMT",
688
- # "x-amz-request-id"=>"B183FA7AB5FBB4DD",
689
- # "server"=>"AmazonS3",
690
- # "transfer-encoding"=>"chunked"},
691
- # :object => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner>
692
- # <ID>16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Owner>
693
- # <AccessControlList><Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\"><ID>
694
- # 16144ab2929314cc309ffe736daa2b264357476c7fea6efb2c3347ac3ab2792a</ID><DisplayName>root</DisplayName></Grantee>
695
- # <Permission>FULL_CONTROL</Permission></Grant></AccessControlList></AccessControlPolicy>" }
696
- #
697
- def get_acl(bucket, key='', headers={})
698
- key = key.blank? ? '' : "/#{CGI::escape key}"
699
- req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
700
- request_info(req_hash, S3HttpResponseBodyParser.new)
701
- rescue
702
- on_exception
703
- end
704
-
705
- # Retieves the ACL (access control policy) for a bucket or object.
706
- # Returns a hash of {:owner, :grantees}
707
- #
708
- # s3.get_acl_parse('my_awesome_bucket', 'log/curent/1.log') #=>
709
- #
710
- # { :grantees=>
711
- # { "16...2a"=>
712
- # { :display_name=>"root",
713
- # :permissions=>["FULL_CONTROL"],
714
- # :attributes=>
715
- # { "xsi:type"=>"CanonicalUser",
716
- # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}},
717
- # "http://acs.amazonaws.com/groups/global/AllUsers"=>
718
- # { :display_name=>"AllUsers",
719
- # :permissions=>["READ"],
720
- # :attributes=>
721
- # { "xsi:type"=>"Group",
722
- # "xmlns:xsi"=>"http://www.w3.org/2001/XMLSchema-instance"}}},
723
- # :owner=>
724
- # { :id=>"16..2a",
725
- # :display_name=>"root"}}
726
- #
727
- def get_acl_parse(bucket, key='', headers={})
728
- key = key.blank? ? '' : "/#{CGI::escape key}"
729
- req_hash = generate_rest_request('GET', headers.merge(:url=>"#{bucket}#{key}?acl"))
730
- acl = request_info(req_hash, S3AclParser.new(:logger => @logger))
731
- result = {}
732
- result[:owner] = acl[:owner]
733
- result[:grantees] = {}
734
- acl[:grantees].each do |grantee|
735
- key = grantee[:id] || grantee[:uri]
736
- if result[:grantees].key?(key)
737
- result[:grantees][key][:permissions] << grantee[:permissions]
738
- else
739
- result[:grantees][key] =
740
- {:display_name => grantee[:display_name] || grantee[:uri].to_s[/[^\/]*$/],
741
- :permissions => grantee[:permissions].lines.to_a,
742
- :attributes => grantee[:attributes]}
743
- end
744
- end
745
- result
746
- rescue
747
- on_exception
748
- end
749
-
750
- # Sets the ACL on a bucket or object.
751
- def put_acl(bucket, key, acl_xml_doc, headers={})
752
- key = key.blank? ? '' : "/#{CGI::escape key}"
753
- req_hash = generate_rest_request('PUT', headers.merge(:url=>"#{bucket}#{key}?acl", :data=>acl_xml_doc))
754
- request_info(req_hash, S3HttpResponseBodyParser.new)
755
- rescue
756
- on_exception
757
- end
758
-
759
- # Retieves the ACL (access control policy) for a bucket. Returns a hash of headers and xml doc with ACL data.
760
- def get_bucket_acl(bucket, headers={})
761
- return get_acl(bucket, '', headers)
762
- rescue
763
- on_exception
764
- end
765
-
766
- # Sets the ACL on a bucket only.
767
- def put_bucket_acl(bucket, acl_xml_doc, headers={})
768
- return put_acl(bucket, '', acl_xml_doc, headers)
769
- rescue
770
- on_exception
771
- end
772
-
773
- def get_bucket_policy(bucket)
774
- req_hash = generate_rest_request('GET', {:url=>"#{bucket}?policy"})
775
- request_info(req_hash, S3HttpResponseBodyParser.new)
776
- rescue
777
- on_exception
778
- end
779
-
780
- def put_bucket_policy(bucket, policy)
781
- key = key.blank? ? '' : "/#{CGI::escape key}"
782
- req_hash = generate_rest_request('PUT', {:url=>"#{bucket}?policy", :data=>policy})
783
- request_info(req_hash, S3HttpResponseBodyParser.new)
784
- rescue
785
- on_exception
786
- end
787
-
788
- # Removes all keys from bucket. Returns +true+ or an exception.
789
- #
790
- # s3.clear_bucket('my_awesome_bucket') #=> true
791
- #
792
- def clear_bucket(bucket)
793
- incrementally_list_bucket(bucket) do |results|
794
- results[:contents].each { |key| delete(bucket, key[:key]) }
795
- end
796
- true
797
- rescue
798
- on_exception
799
- end
800
-
801
- # Deletes all keys in bucket then deletes bucket. Returns +true+ or an exception.
802
- #
803
- # s3.force_delete_bucket('my_awesome_bucket')
804
- #
805
- def force_delete_bucket(bucket)
806
- clear_bucket(bucket)
807
- delete_bucket(bucket)
808
- rescue
809
- on_exception
810
- end
811
-
812
- # Deletes all keys where the 'folder_key' may be assumed as 'folder' name. Returns an array of string keys that have been deleted.
813
- #
814
- # s3.list_bucket('my_awesome_bucket').map{|key_data| key_data[:key]} #=> ['test','test/2/34','test/3','test1','test1/logs']
815
- # s3.delete_folder('my_awesome_bucket','test') #=> ['test','test/2/34','test/3']
816
- #
817
- def delete_folder(bucket, folder_key, separator='/')
818
- folder_key.chomp!(separator)
819
- allkeys = []
820
- incrementally_list_bucket(bucket, {'prefix' => folder_key}) do |results|
821
- keys = results[:contents].map { |s3_key| s3_key[:key][/^#{folder_key}($|#{separator}.*)/] ? s3_key[:key] : nil }.compact
822
- keys.each { |key| delete(bucket, key) }
823
- allkeys << keys
824
- end
825
- allkeys
826
- rescue
827
- on_exception
828
- end
829
-
830
- # Retrieves object data only (headers are omitted). Returns +string+ or an exception.
831
- #
832
- # s3.get('my_awesome_bucket', 'log/curent/1.log') #=> 'Ola-la!'
833
- #
834
- def get_object(bucket, key, headers={})
835
- get(bucket, key, headers)[:object]
836
- rescue
837
- on_exception
838
- end
839
-
840
- #-----------------------------------------------------------------
841
- # Query API: Links
842
- #-----------------------------------------------------------------
843
-
844
- # Generates link for QUERY API
845
- def generate_link(method, headers={}, expires=nil) #:nodoc:
846
- # calculate request data
847
- server, path, path_to_sign = fetch_request_params(headers)
848
- # expiration time
849
- expires ||= DEFAULT_EXPIRES_AFTER
850
- expires = Time.now.utc + expires if expires.is_a?(Fixnum) && (expires < ONE_YEAR_IN_SECONDS)
851
- expires = expires.to_i
852
- # remove unset(==optional) and symbolyc keys
853
- headers.each { |key, value| headers.delete(key) if (value.nil? || key.is_a?(Symbol)) }
854
- #generate auth strings
855
- auth_string = canonical_string(method, path_to_sign, headers, expires)
856
- signature = CGI::escape(Base64.encode64(OpenSSL::HMAC.digest(OpenSSL::Digest::Digest.new("sha1"), @aws_secret_access_key, auth_string)).strip)
857
- # path building
858
- addon = "Signature=#{signature}&Expires=#{expires}&AWSAccessKeyId=#{@aws_access_key_id}"
859
- path += path[/\?/] ? "&#{addon}" : "?#{addon}"
860
- "#{@params[:protocol]}://#{server}:#{@params[:port]}#{path}"
861
- rescue
862
- on_exception
863
- end
864
-
865
- # Generates link for 'ListAllMyBuckets'.
866
- #
867
- # s3.list_all_my_buckets_link #=> url string
868
- #
869
- def list_all_my_buckets_link(expires=nil, headers={})
870
- generate_link('GET', headers.merge(:url=>''), expires)
871
- rescue
872
- on_exception
873
- end
874
-
875
- # Generates link for 'CreateBucket'.
876
- #
877
- # s3.create_bucket_link('my_awesome_bucket') #=> url string
878
- #
879
- def create_bucket_link(bucket, expires=nil, headers={})
880
- generate_link('PUT', headers.merge(:url=>bucket), expires)
881
- rescue
882
- on_exception
883
- end
884
-
885
- # Generates link for 'DeleteBucket'.
886
- #
887
- # s3.delete_bucket_link('my_awesome_bucket') #=> url string
888
- #
889
- def delete_bucket_link(bucket, expires=nil, headers={})
890
- generate_link('DELETE', headers.merge(:url=>bucket), expires)
891
- rescue
892
- on_exception
893
- end
894
-
895
- # Generates link for 'ListBucket'.
896
- #
897
- # s3.list_bucket_link('my_awesome_bucket') #=> url string
898
- #
899
- def list_bucket_link(bucket, options=nil, expires=nil, headers={})
900
- bucket += '?' + options.map { |k, v| "#{k.to_s}=#{CGI::escape v.to_s}" }.join('&') unless options.blank?
901
- generate_link('GET', headers.merge(:url=>bucket), expires)
902
- rescue
903
- on_exception
904
- end
905
-
906
- # Generates link for 'PutObject'.
907
- #
908
- # s3.put_link('my_awesome_bucket',key, object) #=> url string
909
- #
910
- def put_link(bucket, key, data=nil, expires=nil, headers={})
911
- generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}", :data=>data), expires)
912
- rescue
913
- on_exception
914
- end
915
-
916
- # Generates link for 'GetObject'.
917
- #
918
- # if a bucket comply with virtual hosting naming then retuns a link with the
919
- # bucket as a part of host name:
920
- #
921
- # s3.get_link('my-awesome-bucket',key) #=> https://my-awesome-bucket.s3.amazonaws.com:443/asia%2Fcustomers?Signature=nh7...
922
- #
923
- # otherwise returns an old style link (the bucket is a part of path):
924
- #
925
- # s3.get_link('my_awesome_bucket',key) #=> https://s3.amazonaws.com:443/my_awesome_bucket/asia%2Fcustomers?Signature=QAO...
926
- #
927
- # see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/VirtualHosting.html
928
- def get_link(bucket, key, expires=nil, headers={})
929
- generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
930
- rescue
931
- on_exception
932
- end
933
-
934
- # Generates link for 'HeadObject'.
935
- #
936
- # s3.head_link('my_awesome_bucket',key) #=> url string
937
- #
938
- def head_link(bucket, key, expires=nil, headers={})
939
- generate_link('HEAD', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
940
- rescue
941
- on_exception
942
- end
943
-
944
- # Generates link for 'DeleteObject'.
945
- #
946
- # s3.delete_link('my_awesome_bucket',key) #=> url string
947
- #
948
- def delete_link(bucket, key, expires=nil, headers={})
949
- generate_link('DELETE', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}"), expires)
950
- rescue
951
- on_exception
952
- end
953
-
954
-
955
- # Generates link for 'GetACL'.
956
- #
957
- # s3.get_acl_link('my_awesome_bucket',key) #=> url string
958
- #
959
- def get_acl_link(bucket, key='', headers={})
960
- return generate_link('GET', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
961
- rescue
962
- on_exception
963
- end
964
-
965
- # Generates link for 'PutACL'.
966
- #
967
- # s3.put_acl_link('my_awesome_bucket',key) #=> url string
968
- #
969
- def put_acl_link(bucket, key='', headers={})
970
- return generate_link('PUT', headers.merge(:url=>"#{bucket}/#{AwsUtils::URLencode key}?acl"))
971
- rescue
972
- on_exception
973
- end
974
-
975
- # Generates link for 'GetBucketACL'.
976
- #
977
- # s3.get_acl_link('my_awesome_bucket',key) #=> url string
978
- #
979
- def get_bucket_acl_link(bucket, headers={})
980
- return get_acl_link(bucket, '', headers)
981
- rescue
982
- on_exception
983
- end
984
-
985
- # Generates link for 'PutBucketACL'.
986
- #
987
- # s3.put_acl_link('my_awesome_bucket',key) #=> url string
988
- #
989
- def put_bucket_acl_link(bucket, acl_xml_doc, headers={})
990
- return put_acl_link(bucket, '', acl_xml_doc, headers)
991
- rescue
992
- on_exception
993
- end
994
-
995
- #-----------------------------------------------------------------
996
- # PARSERS:
997
- #-----------------------------------------------------------------
998
-
999
- class S3ListAllMyBucketsParser < AwsParser # :nodoc:
1000
- def reset
1001
- @result = []
1002
- @owner = {}
1003
- end
1004
-
1005
- def tagstart(name, attributes)
1006
- @current_bucket = {} if name == 'Bucket'
1007
- end
1008
-
1009
- def tagend(name)
1010
- case name
1011
- when 'ID';
1012
- @owner[:owner_id] = @text
1013
- when 'DisplayName';
1014
- @owner[:owner_display_name] = @text
1015
- when 'Name';
1016
- @current_bucket[:name] = @text
1017
- when 'CreationDate';
1018
- @current_bucket[:creation_date] = @text
1019
- when 'Bucket';
1020
- @result << @current_bucket.merge(@owner)
1021
- end
1022
- end
1023
- end
1024
-
1025
- class S3ListBucketParser < AwsParser # :nodoc:
1026
- def reset
1027
- @result = []
1028
- @service = {}
1029
- @current_key = {}
1030
- end
1031
-
1032
- def tagstart(name, attributes)
1033
- @current_key = {} if name == 'Contents'
1034
- end
1035
-
1036
- def tagend(name)
1037
- case name
1038
- # service info
1039
- when 'Name';
1040
- @service['name'] = @text
1041
- when 'Prefix';
1042
- @service['prefix'] = @text
1043
- when 'Marker';
1044
- @service['marker'] = @text
1045
- when 'MaxKeys';
1046
- @service['max-keys'] = @text
1047
- when 'Delimiter';
1048
- @service['delimiter'] = @text
1049
- when 'IsTruncated';
1050
- @service['is_truncated'] = (@text =~ /false/ ? false : true)
1051
- # key data
1052
- when 'Key';
1053
- @current_key[:key] = @text
1054
- when 'LastModified';
1055
- @current_key[:last_modified] = @text
1056
- when 'ETag';
1057
- @current_key[:e_tag] = @text
1058
- when 'Size';
1059
- @current_key[:size] = @text.to_i
1060
- when 'StorageClass';
1061
- @current_key[:storage_class] = @text
1062
- when 'ID';
1063
- @current_key[:owner_id] = @text
1064
- when 'DisplayName';
1065
- @current_key[:owner_display_name] = @text
1066
- when 'Contents';
1067
- @current_key[:service] = @service; @result << @current_key
1068
- end
1069
- end
1070
- end
1071
-
1072
- class S3ImprovedListBucketParser < AwsParser # :nodoc:
1073
- def reset
1074
- @result = {}
1075
- @result[:contents] = []
1076
- @result[:common_prefixes] = []
1077
- @contents = []
1078
- @current_key = {}
1079
- @common_prefixes = []
1080
- @in_common_prefixes = false
1081
- end
1082
-
1083
- def tagstart(name, attributes)
1084
- @current_key = {} if name == 'Contents'
1085
- @in_common_prefixes = true if name == 'CommonPrefixes'
1086
- end
1087
-
1088
- def tagend(name)
1089
- case name
1090
- # service info
1091
- when 'Name';
1092
- @result[:name] = @text
1093
- # Amazon uses the same tag for the search prefix and for the entries
1094
- # in common prefix...so use our simple flag to see which element
1095
- # we are parsing
1096
- when 'Prefix';
1097
- @in_common_prefixes ? @common_prefixes << @text : @result[:prefix] = @text
1098
- when 'Marker';
1099
- @result[:marker] = @text
1100
- when 'MaxKeys';
1101
- @result[:max_keys] = @text
1102
- when 'Delimiter';
1103
- @result[:delimiter] = @text
1104
- when 'IsTruncated';
1105
- @result[:is_truncated] = (@text =~ /false/ ? false : true)
1106
- when 'NextMarker';
1107
- @result[:next_marker] = @text
1108
- # key data
1109
- when 'Key';
1110
- @current_key[:key] = @text
1111
- when 'LastModified';
1112
- @current_key[:last_modified] = @text
1113
- when 'ETag';
1114
- @current_key[:e_tag] = @text
1115
- when 'Size';
1116
- @current_key[:size] = @text.to_i
1117
- when 'StorageClass';
1118
- @current_key[:storage_class] = @text
1119
- when 'ID';
1120
- @current_key[:owner_id] = @text
1121
- when 'DisplayName';
1122
- @current_key[:owner_display_name] = @text
1123
- when 'Contents';
1124
- @result[:contents] << @current_key
1125
- # Common Prefix stuff
1126
- when 'CommonPrefixes';
1127
- @result[:common_prefixes] = @common_prefixes; @in_common_prefixes = false
1128
- end
1129
- end
1130
- end
1131
-
1132
- class S3BucketLocationParser < AwsParser # :nodoc:
1133
- def reset
1134
- @result = ''
1135
- end
1136
-
1137
- def tagend(name)
1138
- @result = @text if name == 'LocationConstraint'
1139
- end
1140
- end
1141
-
1142
- class S3AclParser < AwsParser # :nodoc:
1143
- def reset
1144
- @result = {:grantees=>[], :owner=>{}}
1145
- @current_grantee = {}
1146
- end
1147
-
1148
- def tagstart(name, attributes)
1149
- @current_grantee = {:attributes => attributes} if name=='Grantee'
1150
- end
1151
-
1152
- def tagend(name)
1153
- case name
1154
- # service info
1155
- when 'ID'
1156
- if @xmlpath == 'AccessControlPolicy/Owner'
1157
- @result[:owner][:id] = @text
1158
- else
1159
- @current_grantee[:id] = @text
1160
- end
1161
- when 'DisplayName'
1162
- if @xmlpath == 'AccessControlPolicy/Owner'
1163
- @result[:owner][:display_name] = @text
1164
- else
1165
- @current_grantee[:display_name] = @text
1166
- end
1167
- when 'URI'
1168
- @current_grantee[:uri] = @text
1169
- when 'Permission'
1170
- @current_grantee[:permissions] = @text
1171
- when 'Grant'
1172
- @result[:grantees] << @current_grantee
1173
- end
1174
- end
1175
- end
1176
-
1177
- class S3LoggingParser < AwsParser # :nodoc:
1178
- def reset
1179
- @result = {:enabled => false, :targetbucket => '', :targetprefix => ''}
1180
- @current_grantee = {}
1181
- end
1182
-
1183
- def tagend(name)
1184
- case name
1185
- # service info
1186
- when 'TargetBucket'
1187
- if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1188
- @result[:targetbucket] = @text
1189
- @result[:enabled] = true
1190
- end
1191
- when 'TargetPrefix'
1192
- if @xmlpath == 'BucketLoggingStatus/LoggingEnabled'
1193
- @result[:targetprefix] = @text
1194
- @result[:enabled] = true
1195
- end
1196
- end
1197
- end
1198
- end
1199
-
1200
- class S3CopyParser < AwsParser # :nodoc:
1201
- def reset
1202
- @result = {}
1203
- end
1204
-
1205
- def tagend(name)
1206
- case name
1207
- when 'LastModified' then
1208
- @result[:last_modified] = @text
1209
- when 'ETag' then
1210
- @result[:e_tag] = @text
1211
- end
1212
- end
1213
- end
1214
-
1215
- #-----------------------------------------------------------------
1216
- # PARSERS: Non XML
1217
- #-----------------------------------------------------------------
1218
-
1219
- class S3HttpResponseParser # :nodoc:
1220
- attr_reader :result
1221
-
1222
- def parse(response)
1223
- @result = response
1224
- end
1225
-
1226
- def headers_to_string(headers)
1227
- result = {}
1228
- headers.each do |key, value|
1229
- value = value[0] if value.is_a?(Array) && value.size<2
1230
- result[key] = value
1231
- end
1232
- result
1233
- end
1234
- end
1235
-
1236
- class S3HttpResponseBodyParser < S3HttpResponseParser # :nodoc:
1237
- def parse(response)
1238
- @result = {
1239
- :object => response.body,
1240
- :headers => headers_to_string(response.to_hash)
1241
- }
1242
- end
1243
- end
1244
-
1245
- class S3HttpResponseHeadParser < S3HttpResponseParser # :nodoc:
1246
- def parse(response)
1247
- @result = headers_to_string(response.to_hash)
1248
- end
1249
- end
1250
-
1251
- end
1252
-
1253
- end